summaryrefslogtreecommitdiff
path: root/utils
diff options
context:
space:
mode:
Diffstat (limited to 'utils')
-rwxr-xr-xutils/abi-compat.sh213
-rwxr-xr-xutils/checkstyle.py884
-rw-r--r--utils/codegen/controls.py142
-rwxr-xr-xutils/codegen/gen-controls.py109
-rwxr-xr-xutils/codegen/gen-formats.py121
-rwxr-xr-xutils/codegen/gen-gst-controls.py182
-rwxr-xr-xutils/codegen/gen-header.sh26
-rwxr-xr-xutils/codegen/gen-ipa-pub-key.py48
-rwxr-xr-xutils/codegen/gen-tp-header.py37
-rwxr-xr-xutils/codegen/ipc/extract-docs.py79
-rwxr-xr-xutils/codegen/ipc/generate.py36
-rw-r--r--utils/codegen/ipc/generators/__init__.py0
-rw-r--r--utils/codegen/ipc/generators/libcamera_templates/core_ipa_interface.h.tmpl42
-rw-r--r--utils/codegen/ipc/generators/libcamera_templates/core_ipa_serializer.h.tmpl44
-rw-r--r--utils/codegen/ipc/generators/libcamera_templates/definition_functions.tmpl56
-rw-r--r--utils/codegen/ipc/generators/libcamera_templates/meson.build14
-rw-r--r--utils/codegen/ipc/generators/libcamera_templates/module_ipa_interface.h.tmpl92
-rw-r--r--utils/codegen/ipc/generators/libcamera_templates/module_ipa_proxy.cpp.tmpl255
-rw-r--r--utils/codegen/ipc/generators/libcamera_templates/module_ipa_proxy.h.tmpl132
-rw-r--r--utils/codegen/ipc/generators/libcamera_templates/module_ipa_proxy_worker.cpp.tmpl246
-rw-r--r--utils/codegen/ipc/generators/libcamera_templates/module_ipa_serializer.h.tmpl45
-rw-r--r--utils/codegen/ipc/generators/libcamera_templates/proxy_functions.tmpl202
-rw-r--r--utils/codegen/ipc/generators/libcamera_templates/serializer.tmpl319
-rw-r--r--utils/codegen/ipc/generators/meson.build3
-rw-r--r--utils/codegen/ipc/generators/mojom_libcamera_generator.py555
-rw-r--r--utils/codegen/ipc/meson.build19
-rw-r--r--utils/codegen/ipc/mojo/README4
-rw-r--r--utils/codegen/ipc/mojo/public/LICENSE27
-rw-r--r--utils/codegen/ipc/mojo/public/tools/.style.yapf6
-rw-r--r--utils/codegen/ipc/mojo/public/tools/BUILD.gn22
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/BUILD.gn131
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/README.md1014
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/checks/__init__.py0
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check.py170
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check_unittest.py194
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_definitions_check.py34
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check.py62
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check_unittest.py173
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_check.py102
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_checks_unittest.py254
-rwxr-xr-xutils/codegen/ipc/mojo/public/tools/bindings/concatenate-files.py55
-rwxr-xr-xutils/codegen/ipc/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py75
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/gen_data_files_list.py48
-rwxr-xr-xutils/codegen/ipc/mojo/public/tools/bindings/generate_type_mappings.py135
-rwxr-xr-xutils/codegen/ipc/mojo/public/tools/bindings/minify_with_terser.py47
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/mojom.gni2118
-rwxr-xr-xutils/codegen/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py424
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/mojom_bindings_generator_unittest.py62
-rwxr-xr-xutils/codegen/ipc/mojo/public/tools/bindings/validate_typemap_config.py58
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/BUILD.gn18
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/README.md14
-rwxr-xr-xutils/codegen/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py204
-rwxr-xr-xutils/codegen/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility_unittest.py339
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/const_unittest.py90
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/enum_unittest.py120
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/feature_unittest.py84
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/BUILD.gn43
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/__init__.py0
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/error.py28
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/fileutil.py44
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/fileutil_unittest.py37
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/__init__.py0
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/check.py26
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/generator.py328
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/generator_unittest.py71
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/module.py2059
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/module_unittest.py31
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/pack.py367
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/pack_unittest.py253
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/template_expander.py82
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/translate.py1258
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/translate_unittest.py141
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/__init__.py0
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/ast.py462
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/ast_unittest.py115
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features.py83
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py376
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py249
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py194
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/parser.py510
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/parser_unittest.py1375
-rwxr-xr-xutils/codegen/ipc/mojo/public/tools/mojom/mojom_parser.py502
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser_test_case.py73
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser_unittest.py186
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/stable_attribute_unittest.py127
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/union_unittest.py44
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/version_compatibility_unittest.py458
-rwxr-xr-xutils/codegen/ipc/mojo/public/tools/run_all_python_unittests.py30
-rwxr-xr-xutils/codegen/ipc/parser.py17
-rw-r--r--utils/codegen/ipc/tools/README4
-rw-r--r--utils/codegen/ipc/tools/diagnosis/crbug_1001171.py51
-rw-r--r--utils/codegen/meson.build19
-rwxr-xr-xutils/gen-debug-controls.py163
-rwxr-xr-xutils/gen-ipa-priv-key.sh11
-rwxr-xr-xutils/gen-version.sh29
-rwxr-xr-xutils/hooks/post-commit2
-rwxr-xr-xutils/hooks/pre-commit2
-rwxr-xr-xutils/hooks/pre-push119
-rwxr-xr-xutils/ipu3/ipu3-capture.sh5
-rw-r--r--utils/ipu3/ipu3-pack.c101
-rwxr-xr-xutils/ipu3/ipu3-process.sh2
-rw-r--r--utils/ipu3/ipu3-unpack.c7
-rw-r--r--utils/ipu3/meson.build3
-rw-r--r--utils/meson.build6
-rwxr-xr-xutils/raspberrypi/ctt/alsc_only.py42
-rw-r--r--utils/raspberrypi/ctt/cac_only.py142
-rw-r--r--utils/raspberrypi/ctt/colors.py30
-rwxr-xr-xutils/raspberrypi/ctt/convert_tuning.py120
-rwxr-xr-xutils/raspberrypi/ctt/ctt.py802
-rw-r--r--utils/raspberrypi/ctt/ctt_alsc.py308
-rw-r--r--utils/raspberrypi/ctt/ctt_awb.py377
-rw-r--r--utils/raspberrypi/ctt/ctt_cac.py228
-rw-r--r--utils/raspberrypi/ctt/ctt_ccm.py404
-rw-r--r--utils/raspberrypi/ctt/ctt_config_example.json17
-rw-r--r--utils/raspberrypi/ctt/ctt_dots_locator.py118
-rw-r--r--utils/raspberrypi/ctt/ctt_geq.py181
-rw-r--r--utils/raspberrypi/ctt/ctt_image_load.py455
-rw-r--r--utils/raspberrypi/ctt/ctt_lux.py61
-rw-r--r--utils/raspberrypi/ctt/ctt_macbeth_locator.py757
-rw-r--r--utils/raspberrypi/ctt/ctt_noise.py123
-rwxr-xr-xutils/raspberrypi/ctt/ctt_pisp.py805
-rwxr-xr-xutils/raspberrypi/ctt/ctt_pretty_print_json.py130
-rw-r--r--utils/raspberrypi/ctt/ctt_ransac.py71
-rw-r--r--utils/raspberrypi/ctt/ctt_ref.pgm5
-rw-r--r--utils/raspberrypi/ctt/ctt_tools.py150
-rwxr-xr-xutils/raspberrypi/ctt/ctt_vc4.py126
-rw-r--r--utils/raspberrypi/ctt/ctt_visualise.py43
-rw-r--r--utils/raspberrypi/delayedctrls_parse.py113
-rwxr-xr-xutils/release.sh46
-rwxr-xr-xutils/rkisp1/gen-csc-table.py215
-rwxr-xr-xutils/rkisp1/rkisp1-capture.sh64
-rw-r--r--utils/run-dist.sh11
-rwxr-xr-xutils/semver446
-rwxr-xr-xutils/tracepoints/analyze-ipa-trace.py77
-rw-r--r--utils/tuning/README.rst20
-rw-r--r--utils/tuning/config-example.yaml12
-rw-r--r--utils/tuning/libtuning/__init__.py13
-rw-r--r--utils/tuning/libtuning/average.py21
-rw-r--r--utils/tuning/libtuning/ctt_awb.py378
-rw-r--r--utils/tuning/libtuning/ctt_ccm.py408
-rw-r--r--utils/tuning/libtuning/ctt_colors.py30
-rw-r--r--utils/tuning/libtuning/ctt_ransac.py71
-rw-r--r--utils/tuning/libtuning/generators/__init__.py6
-rw-r--r--utils/tuning/libtuning/generators/generator.py15
-rw-r--r--utils/tuning/libtuning/generators/raspberrypi_output.py114
-rw-r--r--utils/tuning/libtuning/generators/yaml_output.py127
-rw-r--r--utils/tuning/libtuning/gradient.py75
-rw-r--r--utils/tuning/libtuning/image.py140
-rw-r--r--utils/tuning/libtuning/libtuning.py212
-rw-r--r--utils/tuning/libtuning/macbeth.py537
-rw-r--r--utils/tuning/libtuning/macbeth_ref.pgm6
-rw-r--r--utils/tuning/libtuning/modules/__init__.py3
-rw-r--r--utils/tuning/libtuning/modules/agc/__init__.py6
-rw-r--r--utils/tuning/libtuning/modules/agc/agc.py21
-rw-r--r--utils/tuning/libtuning/modules/agc/rkisp1.py79
-rw-r--r--utils/tuning/libtuning/modules/awb/__init__.py6
-rw-r--r--utils/tuning/libtuning/modules/awb/awb.py36
-rw-r--r--utils/tuning/libtuning/modules/awb/rkisp1.py27
-rw-r--r--utils/tuning/libtuning/modules/ccm/__init__.py6
-rw-r--r--utils/tuning/libtuning/modules/ccm/ccm.py41
-rw-r--r--utils/tuning/libtuning/modules/ccm/rkisp1.py28
-rw-r--r--utils/tuning/libtuning/modules/lsc/__init__.py7
-rw-r--r--utils/tuning/libtuning/modules/lsc/lsc.py75
-rw-r--r--utils/tuning/libtuning/modules/lsc/raspberrypi.py248
-rw-r--r--utils/tuning/libtuning/modules/lsc/rkisp1.py116
-rw-r--r--utils/tuning/libtuning/modules/module.py32
-rw-r--r--utils/tuning/libtuning/modules/static.py24
-rw-r--r--utils/tuning/libtuning/parsers/__init__.py6
-rw-r--r--utils/tuning/libtuning/parsers/parser.py21
-rw-r--r--utils/tuning/libtuning/parsers/raspberrypi_parser.py93
-rw-r--r--utils/tuning/libtuning/parsers/yaml_parser.py20
-rw-r--r--utils/tuning/libtuning/smoothing.py24
-rw-r--r--utils/tuning/libtuning/utils.py186
-rw-r--r--utils/tuning/raspberrypi/__init__.py3
-rw-r--r--utils/tuning/raspberrypi/alsc.py19
-rwxr-xr-xutils/tuning/raspberrypi_alsc_only.py23
-rw-r--r--utils/tuning/requirements.txt9
-rwxr-xr-xutils/tuning/rkisp1.py57
-rwxr-xr-xutils/update-kernel-headers.sh90
-rwxr-xr-xutils/update-mojo.sh90
180 files changed, 29834 insertions, 292 deletions
diff --git a/utils/abi-compat.sh b/utils/abi-compat.sh
new file mode 100755
index 00000000..31f61e32
--- /dev/null
+++ b/utils/abi-compat.sh
@@ -0,0 +1,213 @@
+#!/bin/bash
+
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Generate and compare the ABI compatibilty of two libcamera versions
+
+name=$(basename "$0")
+
+usage() {
+ cat << EOF
+$name: Determine the ABI/API compatibility of two build versions
+
+ $name [--help] [--abi-dir=<PATH>] [--tmp-dir=<PATH>] ARGS
+
+The positional arguments (ARGS) determine the versions that will be compared and
+take three variants:
+
+ - No positional arguments:
+ $name [optional arguments]
+
+ It is assumed to compare the current git HEAD against the most recent TAG
+
+ - One positional argument:
+ $name [optional aguments] COMMITISH
+
+ The given COMMITISH is compared against it's most recent TAG
+
+ - Two positional arguments:
+ $name [optional aguments] BASE COMMITISH
+
+ The given COMMITISH is compared against the given BASE.
+
+Optional Arguments:
+ --abi-dir <path> Use <path> for storing (or retrieving existing) ABI data
+ files
+
+ --tmp-dir <path> Specify temporary build location for building ABI data.
+ This could be a tmpfs/RAM disk to save on disk writes.
+EOF
+}
+
+dbg () {
+ echo "$@" >&2
+}
+
+die () {
+ echo "$name: $*" >&2
+ exit 1
+}
+
+describe () {
+ git describe --tags "$1" \
+ || die "Failed to describe $1"
+}
+
+prev_release () {
+ git describe --tags --abbrev=0 "$1"^ \
+ || die "Failed to identify previous release tag from $1"
+}
+
+# Make sure we exit on errors during argument parsing.
+set -Eeuo pipefail
+
+positional=()
+while [[ $# -gt 0 ]] ; do
+ option="$1"
+ shift
+
+ case $option in
+ -h|--help)
+ usage
+ exit 0
+ ;;
+
+ --abi-dir)
+ abi_dir=$1
+ shift
+ ;;
+
+ --tmp-dir)
+ tmp=$1
+ shift
+ ;;
+
+ -*)
+ die "Unrecognised argument $option"
+ ;;
+
+ *) # Parse unidentified arguments based on position.
+ positional+=("$option")
+ ;;
+ esac
+done
+set -- "${positional[@]}" # restore positional parameters.
+
+# Parse positional arguments.
+case $# in
+ 0) # Check HEAD against previous 'release'.
+ from=$(prev_release HEAD)
+ to=$(describe HEAD)
+ ;;
+
+ 1) # Check COMMIT against previous release.
+ from=$(prev_release "$1")
+ to=$(describe "$1")
+ ;;
+
+ 2) # Check ABI between FROM and TO explicitly.
+ from=$(describe "$1")
+ to=$(describe "$2")
+ ;;
+
+ *)
+ die "Invalid arguments"
+ ;;
+esac
+
+if ! which abi-compliance-checker; then
+ die "This tool requires 'abi-compliance-checker' to be installed."
+fi
+
+
+abi_dir=${abi_dir:-abi}
+tmp=${tmp:-"$abi_dir/tmp/"}
+
+echo "Validating ABI compatibility between $from and $to"
+
+mkdir -p "$abi_dir"
+mkdir -p "$tmp"
+
+# Generate an abi-compliance-checker xml description file.
+create_xml() {
+ local output="$1"
+ local version="$2"
+ local root="$3"
+
+ echo "<version>$version</version>" > "$output"
+ echo "<headers>$root/usr/local/include/</headers>" >> "$output"
+ echo "<libs>$root/usr/local/lib/</libs>" >> "$output"
+}
+
+# Check if an ABI dump file exists, and if not create one by building a minimal
+# configuration of libcamera at the specified version using a clean worktree.
+create_abi_dump() {
+ local version="$1"
+ local abi_file="$abi_dir/$version.abi.dump"
+ local worktree="$tmp/$version"
+ local build="$tmp/$version-build"
+
+ # Use a fully qualified path when calling ninja -C.
+ install=$(realpath "$tmp/$version-install")
+
+ if [[ ! -e "$abi_file" ]] ; then
+ dbg "Creating ABI dump for $version in $abi_dir"
+ git worktree add --force "$worktree" "$version"
+
+ # Generate a minimal libcamera build. "lib" and "prefix" are
+ # defined explicitly to avoid system default ambiguities.
+ meson setup "$build" "$worktree" \
+ -Dcam=disabled \
+ -Ddocumentation=disabled \
+ -Dgstreamer=disabled \
+ -Dlc-compliance=disabled \
+ -Dlibdir=lib \
+ -Dpipelines= \
+ -Dprefix=/usr/local/ \
+ -Dpycamera=disabled \
+ -Dqcam=disabled \
+ -Dtracing=disabled
+
+ ninja -C "$build"
+ DESTDIR="$install" ninja -C "$build" install
+
+ # Create an xml descriptor with parameters to generate the dump file.
+ create_xml \
+ "$install/libcamera-abi-dump.xml" \
+ "$version" \
+ "$install"
+
+ abi-compliance-checker \
+ -lib libcamera \
+ -v1 "$version" \
+ -dump "$install/libcamera-abi-dump.xml" \
+ -dump-path "$abi_file"
+
+ dbg Created "$abi_file"
+
+ dbg Removing Worktree "$worktree"
+ git worktree remove -f "$worktree"
+
+ dbg Removing "$build"
+ rm -r "$build"
+
+ dbg Removing "$install"
+ rm -r "$install"
+ fi
+}
+
+# Create the requested ABI dump files if they don't yet exist.
+create_abi_dump "$from"
+create_abi_dump "$to"
+
+# TODO: Future iterations and extensions here could add "-stdout -xml" and
+# parse the results automatically.
+abi-compliance-checker -l libcamera \
+ -old "$abi_dir/$from.abi.dump" \
+ -new "$abi_dir/$to.abi.dump"
+
+# On (far too many) occasions, the tools keep running leaving a cpu core @ 100%
+# CPU usage. Perhaps some subprocess gets launched but never rejoined. Stop
+# them all.
+#
+# TODO: Investigate this and report upstream.
+killall abi-compliance-checker 2>/dev/null
diff --git a/utils/checkstyle.py b/utils/checkstyle.py
index b594a19a..f6229bbd 100755
--- a/utils/checkstyle.py
+++ b/utils/checkstyle.py
@@ -1,10 +1,10 @@
-#!/usr/bin/python3
+#!/usr/bin/env python3
# SPDX-License-Identifier: GPL-2.0-or-later
# Copyright (C) 2018, Google Inc.
#
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
#
-# checkstyle.py - A patch style checker script based on astyle or clang-format
+# A patch style checker script based on clang-format
#
# TODO:
#
@@ -22,22 +22,7 @@ import shutil
import subprocess
import sys
-astyle_options = (
- '-n',
- '--style=linux',
- '--indent=force-tab=8',
- '--attach-namespaces',
- '--attach-extern-c',
- '--pad-oper',
- '--align-pointer=name',
- '--align-reference=name',
- '--keep-one-line-blocks',
- '--max-code-length=120'
-)
-
dependencies = {
- 'astyle': False,
- 'clang-format': False,
'git': True,
}
@@ -182,6 +167,12 @@ def parse_diff(diff):
hunk = DiffHunk(line)
elif hunk is not None:
+ # Work around https://github.com/python/cpython/issues/46395
+ # See https://www.gnu.org/software/diffutils/manual/html_node/Incomplete-Lines.html
+ if line[-1] != '\n':
+ hunk.append(line + '\n')
+ line = '\\ No newline at end of file\n'
+
hunk.append(line)
if hunk:
@@ -191,73 +182,490 @@ def parse_diff(diff):
# ------------------------------------------------------------------------------
-# Style Checkers
+# Commit, Staged Changes & Amendments
#
-_style_checkers = []
+class CommitFile:
+ def __init__(self, name):
+ info = name.split()
+ self.__status = info[0][0]
-class StyleCheckerRegistry(type):
- def __new__(cls, clsname, bases, attrs):
- newclass = super(StyleCheckerRegistry, cls).__new__(cls, clsname, bases, attrs)
- if clsname != 'StyleChecker':
- _style_checkers.append(newclass)
- return newclass
+ # For renamed files, store the new name
+ if self.__status == 'R':
+ self.__filename = info[2]
+ else:
+ self.__filename = info[1]
+
+ def __repr__(self):
+ return f'{self.__status} {self.__filename}'
+
+ @property
+ def filename(self):
+ return self.__filename
+
+ @property
+ def status(self):
+ return self.__status
-class StyleChecker(metaclass=StyleCheckerRegistry):
+class Commit:
+ def __init__(self, commit):
+ self._commit = commit
+ self._author = None
+ self._trailers = []
+ self._parse()
+
+ def _parse_commit(self):
+ # Get and parse the commit message.
+ ret = subprocess.run(['git', 'show', '--format=%H%n%an <%ae>%n%s%n%b',
+ '--no-patch', self.commit],
+ stdout=subprocess.PIPE).stdout.decode('utf-8')
+ lines = ret.splitlines()
+
+ self._commit = lines[0]
+ self._author = lines[1]
+ self._title = lines[2]
+ self._body = lines[3:]
+
+ # Parse the trailers. Feed git-interpret-trailers with a full commit
+ # message that includes both the title and the body, as it otherwise
+ # fails to find trailers when the body contains trailers only.
+ message = self._title + '\n\n' + '\n'.join(self._body)
+ trailers = subprocess.run(['git', 'interpret-trailers', '--parse'],
+ input=message.encode('utf-8'),
+ stdout=subprocess.PIPE).stdout.decode('utf-8')
+
+ self._trailers = trailers.splitlines()
+
+ def _parse(self):
+ self._parse_commit()
+
+ # Get the list of files. Use an empty format specifier to suppress the
+ # commit message completely.
+ ret = subprocess.run(['git', 'show', '--format=', '--name-status',
+ self.commit],
+ stdout=subprocess.PIPE).stdout.decode('utf-8')
+ self._files = [CommitFile(f) for f in ret.splitlines()]
+
+ def __repr__(self):
+ return '\n'.join([
+ f'commit {self.commit}',
+ f'Author: {self.author}',
+ f'',
+ f' {self.title}',
+ '',
+ '\n'.join([line and f' {line}' or '' for line in self._body]),
+ 'Trailers:',
+ ] + self.trailers)
+
+ def files(self, filter='AMR'):
+ return [f.filename for f in self._files if f.status in filter]
+
+ @property
+ def author(self):
+ return self._author
+
+ @property
+ def commit(self):
+ return self._commit
+
+ @property
+ def title(self):
+ return self._title
+
+ @property
+ def trailers(self):
+ return self._trailers
+
+ def get_diff(self, top_level, filename):
+ diff = subprocess.run(['git', 'diff', '%s~..%s' % (self.commit, self.commit),
+ '--', '%s/%s' % (top_level, filename)],
+ stdout=subprocess.PIPE).stdout.decode('utf-8')
+ return parse_diff(diff.splitlines(True))
+
+ def get_file(self, filename):
+ return subprocess.run(['git', 'show', '%s:%s' % (self.commit, filename)],
+ stdout=subprocess.PIPE).stdout.decode('utf-8')
+
+
+class StagedChanges(Commit):
def __init__(self):
- pass
+ Commit.__init__(self, '')
+
+ def _parse(self):
+ ret = subprocess.run(['git', 'diff', '--staged', '--name-status'],
+ stdout=subprocess.PIPE).stdout.decode('utf-8')
+ self._title = 'Staged changes'
+ self._files = [CommitFile(f) for f in ret.splitlines()]
- #
- # Class methods
- #
+ def get_diff(self, top_level, filename):
+ diff = subprocess.run(['git', 'diff', '--staged', '--',
+ '%s/%s' % (top_level, filename)],
+ stdout=subprocess.PIPE).stdout.decode('utf-8')
+ return parse_diff(diff.splitlines(True))
+
+
+class Amendment(Commit):
+ def __init__(self):
+ Commit.__init__(self, 'HEAD')
+
+ def _parse(self):
+ self._parse_commit()
+
+ self._title = f'Amendment of "{self.title}"'
+
+ # Extract the list of modified files.
+ ret = subprocess.run(['git', 'diff', '--staged', '--name-status', 'HEAD~'],
+ stdout=subprocess.PIPE).stdout.decode('utf-8')
+ self._files = [CommitFile(f) for f in ret.splitlines()]
+
+ def get_diff(self, top_level, filename):
+ diff = subprocess.run(['git', 'diff', '--staged', 'HEAD~', '--',
+ '%s/%s' % (top_level, filename)],
+ stdout=subprocess.PIPE).stdout.decode('utf-8')
+ return parse_diff(diff.splitlines(True))
+
+
+# ------------------------------------------------------------------------------
+# Helpers
+#
+
+class ClassRegistry(type):
+ def __new__(cls, clsname, bases, attrs):
+ newclass = super().__new__(cls, clsname, bases, attrs)
+ if bases and bases[0] != CheckerBase:
+ base = bases[0]
+
+ if not hasattr(base, 'subclasses'):
+ base.subclasses = []
+ base.subclasses.append(newclass)
+ base.subclasses.sort(key=lambda x: getattr(x, 'priority', 0),
+ reverse=True)
+ return newclass
+
+
+class CheckerBase(metaclass=ClassRegistry):
@classmethod
- def checkers(cls, filename):
- for checker in _style_checkers:
- if checker.supports(filename):
- yield checker
+ def instances(cls, obj, names):
+ for instance in cls.subclasses:
+ if names and instance.__name__ not in names:
+ continue
+ if instance.supports(obj):
+ yield instance
@classmethod
- def supports(cls, filename):
- for pattern in cls.patterns:
- if fnmatch.fnmatch(os.path.basename(filename), pattern):
- return True
+ def supports(cls, obj):
+ if hasattr(cls, 'commit_types'):
+ return type(obj) in cls.commit_types
+
+ if hasattr(cls, 'patterns'):
+ for pattern in cls.patterns:
+ if fnmatch.fnmatch(os.path.basename(obj), pattern):
+ return True
+
return False
@classmethod
def all_patterns(cls):
patterns = set()
- for checker in _style_checkers:
- patterns.update(checker.patterns)
+ for instance in cls.subclasses:
+ if hasattr(instance, 'patterns'):
+ patterns.update(instance.patterns)
return patterns
+ @classmethod
+ def check_dependencies(cls):
+ if not hasattr(cls, 'dependencies'):
+ return []
+
+ issues = []
+
+ for command in cls.dependencies:
+ if command not in dependencies:
+ dependencies[command] = shutil.which(command)
+
+ if not dependencies[command]:
+ issues.append(CommitIssue(f'Missing {command} to run {cls.__name__}'))
+
+ return issues
+
+
+# ------------------------------------------------------------------------------
+# Commit Checkers
+#
+
+class CommitChecker(CheckerBase):
+ pass
+
+
+class CommitIssue(object):
+ def __init__(self, msg):
+ self.msg = msg
+
+ def __str__(self):
+ return f'{Colours.fg(Colours.Yellow)}{self.msg}{Colours.reset()}'
+
+
+class HeaderAddChecker(CommitChecker):
+ commit_types = (Commit, StagedChanges, Amendment)
+
+ @classmethod
+ def check(cls, commit, top_level):
+ issues = []
+
+ meson_files = [f for f in commit.files()
+ if os.path.basename(f) == 'meson.build']
+
+ for filename in commit.files('AR'):
+ if not filename.startswith('include/libcamera/') or \
+ not filename.endswith('.h'):
+ continue
+
+ meson = os.path.dirname(filename) + '/meson.build'
+ header = os.path.basename(filename)
+
+ issue = CommitIssue('Header %s added without corresponding update to %s' %
+ (filename, meson))
+
+ if meson not in meson_files:
+ issues.append(issue)
+ continue
+
+ diff = commit.get_diff(top_level, meson)
+ found = False
+
+ for hunk in diff:
+ for line in hunk.lines:
+ if line[0] != '+':
+ continue
+
+ if line.find("'%s'" % header) != -1:
+ found = True
+ break
+
+ if found:
+ break
+
+ if not found:
+ issues.append(issue)
+
+ return issues
+
+
+class TitleChecker(CommitChecker):
+ commit_types = (Commit,)
+
+ prefix_regex = re.compile(r'^([a-zA-Z0-9_.-]+: )+')
+ release_regex = re.compile(r'libcamera v[0-9]+\.[0-9]+\.[0-9]+')
+
+ @classmethod
+ def check(cls, commit, top_level):
+ title = commit.title
+
+ # Ignore release commits, they don't need a prefix.
+ if TitleChecker.release_regex.fullmatch(title):
+ return []
+
+ prefix_pos = title.find(': ')
+ if prefix_pos != -1 and prefix_pos != len(title) - 2:
+ return []
+
+ # Find prefix candidates by searching the git history
+ msgs = subprocess.run(['git', 'log', '--no-decorate', '--oneline', '-n100', '--'] + commit.files(),
+ stdout=subprocess.PIPE).stdout.decode('utf-8')
+ prefixes = {}
+ prefixes_count = 0
+ for msg in msgs.splitlines():
+ prefix = TitleChecker.prefix_regex.match(msg)
+ if not prefix:
+ continue
+
+ prefix = prefix.group(0)
+ if prefix in prefixes:
+ prefixes[prefix] += 1
+ else:
+ prefixes[prefix] = 1
+
+ prefixes_count += 1
+
+ if not prefixes:
+ return [CommitIssue('Commit title is missing prefix')]
+
+ # Sort the candidates by number of occurrences and pick the best ones.
+ # When multiple prefixes are possible without a clear winner, we want to
+ # display the most common options to the user, but without the most
+ # unlikely options to avoid too long messages. As a heuristic, select
+ # enough candidates to cover at least 2/3 of the possible prefixes, but
+ # never more than 4 candidates.
+ prefixes = list(prefixes.items())
+ prefixes.sort(key=lambda x: x[1], reverse=True)
+
+ candidates = []
+ candidates_count = 0
+ for prefix in prefixes:
+ candidates.append(f"`{prefix[0]}'")
+ candidates_count += prefix[1]
+ if candidates_count >= prefixes_count * 2 / 3 or \
+ len(candidates) == 4:
+ break
+
+ candidates = candidates[:-2] + [' or '.join(candidates[-2:])]
+ candidates = ', '.join(candidates)
+
+ return [CommitIssue('Commit title is missing prefix, '
+ 'possible candidates are ' + candidates)]
+
+
+class TrailersChecker(CommitChecker):
+ commit_types = (Commit,)
+
+ commit_regex = re.compile(r'[0-9a-f]{12}[0-9a-f]* \(".*"\)')
+
+ coverity_regex = re.compile(r'Coverity CID=.*')
+
+ # Simple e-mail address validator regex, with an additional trailing
+ # comment. The complexity of a full RFC6531 validator isn't worth the
+ # additional invalid addresses it would reject.
+ email_regex = re.compile(r'[^<]+ <[^@>]+@[^>]+>( # .*)?')
+
+ link_regex = re.compile(r'https?://.*')
+
+ @staticmethod
+ def validate_reported_by(value):
+ if TrailersChecker.email_regex.fullmatch(value):
+ return True
+ if TrailersChecker.coverity_regex.fullmatch(value):
+ return True
+ return False
+
+ known_trailers = {
+ 'Acked-by': email_regex,
+ 'Bug': link_regex,
+ 'Co-developed-by': email_regex,
+ 'Fixes': commit_regex,
+ 'Link': link_regex,
+ 'Reported-by': validate_reported_by,
+ 'Reviewed-by': email_regex,
+ 'Signed-off-by': email_regex,
+ 'Suggested-by': email_regex,
+ 'Tested-by': email_regex,
+ }
+
+ trailer_regex = re.compile(r'([A-Z][a-zA-Z-]*)\s*:\s*(.*)')
+
+ @classmethod
+ def check(cls, commit, top_level):
+ issues = []
+
+ sob_found = False
+
+ for trailer in commit.trailers:
+ match = TrailersChecker.trailer_regex.fullmatch(trailer)
+ if not match:
+ issues.append(CommitIssue(f"Malformed commit trailer '{trailer}'"))
+ continue
+
+ key, value = match.groups()
+
+ validator = TrailersChecker.known_trailers.get(key)
+ if not validator:
+ issues.append(CommitIssue(f"Invalid commit trailer key '{key}'"))
+ continue
+
+ if isinstance(validator, re.Pattern):
+ valid = bool(validator.fullmatch(value))
+ else:
+ valid = validator(value)
+
+ if not valid:
+ issues.append(CommitIssue(f"Malformed value '{value}' for commit trailer '{key}'"))
+ continue
+
+ if key == 'Signed-off-by':
+ if value == commit.author:
+ sob_found = True
+
+ if not sob_found:
+ issues.append(CommitIssue(f"No 'Signed-off-by' trailer matching author '{commit.author}', see Documentation/contributing.rst"))
+
+ return issues
+
+
+# ------------------------------------------------------------------------------
+# Style Checkers
+#
+
+class StyleChecker(CheckerBase):
+ pass
+
class StyleIssue(object):
- def __init__(self, line_number, line, msg):
+ def __init__(self, line_number, position, line, msg):
self.line_number = line_number
+ self.position = position
self.line = line
self.msg = msg
+ def __str__(self):
+ s = []
+ s.append(f'{Colours.fg(Colours.Yellow)}#{self.line_number}: {self.msg}{Colours.reset()}')
+ if self.line is not None:
+ s.append(f'{Colours.fg(Colours.Yellow)}+{self.line.rstrip()}{Colours.reset()}')
+
+ if self.position is not None:
+ # Align the position marker by using the original line with
+ # all characters except for tabs replaced with spaces. This
+ # ensures proper alignment regardless of how the code is
+ # indented.
+ start = self.position[0]
+ prefix = ''.join([c if c == '\t' else ' ' for c in self.line[:start]])
+ length = self.position[1] - start - 1
+ s.append(f' {prefix}^{"~" * length}')
+
+ return '\n'.join(s)
+
+
+class HexValueChecker(StyleChecker):
+ patterns = ('*.c', '*.cpp', '*.h')
+
+ regex = re.compile(r'\b0[xX][0-9a-fA-F]+\b')
+
+ @classmethod
+ def check(cls, content, line_numbers):
+ issues = []
+
+ for line_number in line_numbers:
+ line = content[line_number - 1]
+ match = HexValueChecker.regex.search(line)
+ if not match:
+ continue
+
+ value = match.group(0)
+ if value == value.lower():
+ continue
+
+ issues.append(StyleIssue(line_number, match.span(0), line,
+ f'Use lowercase hex constant {value.lower()}'))
+
+ return issues
+
class IncludeChecker(StyleChecker):
patterns = ('*.cpp', '*.h')
- headers = ('assert', 'ctype', 'errno', 'fenv', 'float', 'inttypes',
- 'limits', 'locale', 'math', 'setjmp', 'signal', 'stdarg',
- 'stddef', 'stdint', 'stdio', 'stdlib', 'string', 'time', 'uchar',
- 'wchar', 'wctype')
- include_regex = re.compile('^#include <c([a-z]*)>')
-
- def __init__(self, content):
- super().__init__()
- self.__content = content
+ headers = ('cassert', 'cctype', 'cerrno', 'cfenv', 'cfloat', 'cinttypes',
+ 'climits', 'clocale', 'csetjmp', 'csignal', 'cstdarg', 'cstddef',
+ 'cstdint', 'cstdio', 'cstdlib', 'cstring', 'ctime', 'cuchar',
+ 'cwchar', 'cwctype', 'math.h')
+ include_regex = re.compile(r'^#include <([a-z.]*)>')
- def check(self, line_numbers):
+ @classmethod
+ def check(self, content, line_numbers):
issues = []
for line_number in line_numbers:
- line = self.__content[line_number - 1]
+ line = content[line_number - 1]
match = IncludeChecker.include_regex.match(line)
if not match:
continue
@@ -266,28 +674,34 @@ class IncludeChecker(StyleChecker):
if header not in IncludeChecker.headers:
continue
- issues.append(StyleIssue(line_number, line,
- 'C compatibility header <%s.h> is preferred' % header))
+ if header.endswith('.h'):
+ header_type = 'C++'
+ header = 'c' + header[:-2]
+ else:
+ header_type = 'C compatibility'
+ header = header[1:] + '.h'
+
+ issues.append(StyleIssue(line_number, match.span(1), line,
+ f'{header_type} header <{header}> is preferred'))
return issues
class LogCategoryChecker(StyleChecker):
- log_regex = re.compile('\\bLOG\((Debug|Info|Warning|Error|Fatal)\)')
+ log_regex = re.compile(r'\bLOG\((Debug|Info|Warning|Error|Fatal)\)')
patterns = ('*.cpp',)
- def __init__(self, content):
- super().__init__()
- self.__content = content
-
- def check(self, line_numbers):
+ @classmethod
+ def check(cls, content, line_numbers):
issues = []
for line_number in line_numbers:
- line = self.__content[line_number-1]
- if not LogCategoryChecker.log_regex.search(line):
+ line = content[line_number - 1]
+ match = LogCategoryChecker.log_regex.search(line)
+ if not match:
continue
- issues.append(StyleIssue(line_number, line, 'LOG() should use categories'))
+ issues.append(StyleIssue(line_number, match.span(1), line,
+ 'LOG() should use categories'))
return issues
@@ -295,70 +709,30 @@ class LogCategoryChecker(StyleChecker):
class MesonChecker(StyleChecker):
patterns = ('meson.build',)
- def __init__(self, content):
- super().__init__()
- self.__content = content
-
- def check(self, line_numbers):
+ @classmethod
+ def check(cls, content, line_numbers):
issues = []
for line_number in line_numbers:
- line = self.__content[line_number-1]
- if line.find('\t') != -1:
- issues.append(StyleIssue(line_number, line, 'meson.build should use spaces for indentation'))
- return issues
-
-
-class Pep8Checker(StyleChecker):
- patterns = ('*.py',)
- results_regex = re.compile('stdin:([0-9]+):([0-9]+)(.*)')
-
- def __init__(self, content):
- super().__init__()
- self.__content = content
-
- def check(self, line_numbers):
- issues = []
- data = ''.join(self.__content).encode('utf-8')
-
- try:
- ret = subprocess.run(['pycodestyle', '--ignore=E501', '-'],
- input=data, stdout=subprocess.PIPE)
- except FileNotFoundError:
- issues.append(StyleIssue(0, None, "Please install pycodestyle to validate python additions"))
- return issues
-
- results = ret.stdout.decode('utf-8').splitlines()
- for item in results:
- search = re.search(Pep8Checker.results_regex, item)
- line_number = int(search.group(1))
- position = int(search.group(2))
- msg = search.group(3)
-
- if line_number in line_numbers:
- line = self.__content[line_number - 1]
- issues.append(StyleIssue(line_number, line, msg))
-
+ line = content[line_number - 1]
+ pos = line.find('\t')
+ if pos != -1:
+ issues.append(StyleIssue(line_number, [pos, pos], line,
+ 'meson.build should use spaces for indentation'))
return issues
class ShellChecker(StyleChecker):
+ dependencies = ('shellcheck',)
patterns = ('*.sh',)
- results_line_regex = re.compile('In - line ([0-9]+):')
-
- def __init__(self, content):
- super().__init__()
- self.__content = content
+ results_line_regex = re.compile(r'In - line ([0-9]+):')
- def check(self, line_numbers):
+ @classmethod
+ def check(cls, content, line_numbers):
issues = []
- data = ''.join(self.__content).encode('utf-8')
+ data = ''.join(content).encode('utf-8')
- try:
- ret = subprocess.run(['shellcheck', '-Cnever', '-'],
- input=data, stdout=subprocess.PIPE)
- except FileNotFoundError:
- issues.append(StyleIssue(0, None, "Please install shellcheck to validate shell script additions"))
- return issues
+ ret = subprocess.run(['shellcheck', '-Cnever', '-'],
+ input=data, stdout=subprocess.PIPE)
results = ret.stdout.decode('utf-8').splitlines()
for nr, item in enumerate(results):
@@ -370,11 +744,8 @@ class ShellChecker(StyleChecker):
line = results[nr + 1]
msg = results[nr + 2]
- # Determined, but not yet used
- position = msg.find('^') + 1
-
if line_number in line_numbers:
- issues.append(StyleIssue(line_number, line, msg))
+ issues.append(StyleIssue(line_number, None, line, msg))
return issues
@@ -383,67 +754,14 @@ class ShellChecker(StyleChecker):
# Formatters
#
-_formatters = []
-
-class FormatterRegistry(type):
- def __new__(cls, clsname, bases, attrs):
- newclass = super(FormatterRegistry, cls).__new__(cls, clsname, bases, attrs)
- if clsname != 'Formatter':
- _formatters.append(newclass)
- return newclass
-
-
-class Formatter(metaclass=FormatterRegistry):
- enabled = True
-
- def __init__(self):
- pass
-
- #
- # Class methods
- #
- @classmethod
- def formatters(cls, filename):
- for formatter in _formatters:
- if not cls.enabled:
- continue
- if formatter.supports(filename):
- yield formatter
-
- @classmethod
- def supports(cls, filename):
- if not cls.enabled:
- return False
- for pattern in cls.patterns:
- if fnmatch.fnmatch(os.path.basename(filename), pattern):
- return True
- return False
-
- @classmethod
- def all_patterns(cls):
- patterns = set()
- for formatter in _formatters:
- if not cls.enabled:
- continue
- patterns.update(formatter.patterns)
-
- return patterns
-
-
-class AStyleFormatter(Formatter):
- enabled = False
- patterns = ('*.c', '*.cpp', '*.h')
-
- @classmethod
- def format(cls, filename, data):
- ret = subprocess.run(['astyle', *astyle_options],
- input=data.encode('utf-8'), stdout=subprocess.PIPE)
- return ret.stdout.decode('utf-8')
+class Formatter(CheckerBase):
+ pass
class CLangFormatter(Formatter):
- enabled = False
+ dependencies = ('clang-format',)
patterns = ('*.c', '*.cpp', '*.h')
+ priority = -1
@classmethod
def format(cls, filename, data):
@@ -456,7 +774,8 @@ class CLangFormatter(Formatter):
class DoxygenFormatter(Formatter):
patterns = ('*.c', '*.cpp')
- return_regex = re.compile(' +\\* +\\\\return +[a-z]')
+ oneliner_regex = re.compile(r'^ +\* +\\(brief|param|return)\b.*\.$')
+ return_regex = re.compile(r' +\* +\\return +[a-z]')
@classmethod
def format(cls, filename, data):
@@ -471,6 +790,7 @@ class DoxygenFormatter(Formatter):
lines.append(line)
continue
+ line = cls.oneliner_regex.sub(lambda m: m.group(0)[:-1], line)
line = cls.return_regex.sub(lambda m: m.group(0)[:-1] + m.group(0)[-1].upper(), line)
if line.find('*/') != -1:
@@ -481,10 +801,42 @@ class DoxygenFormatter(Formatter):
return '\n'.join(lines)
+class DPointerFormatter(Formatter):
+ # Ensure consistent naming of variables related to the d-pointer design
+ # pattern.
+ patterns = ('*.cpp', '*.h')
+
+ # The clang formatter runs first, we can thus rely on appropriate coding
+ # style.
+ declare_regex = re.compile(r'^(\t*)(const )?([a-zA-Z0-9_]+) \*( ?const )?([a-zA-Z0-9_]+) = (LIBCAMERA_[DO]_PTR)\(([a-zA-Z0-9_]+)\);$')
+
+ @classmethod
+ def format(cls, filename, data):
+ lines = []
+
+ for line in data.split('\n'):
+ match = cls.declare_regex.match(line)
+ if match:
+ indent = match.group(1) or ''
+ const = match.group(2) or ''
+ macro = match.group(6)
+ klass = match.group(7)
+ if macro == 'LIBCAMERA_D_PTR':
+ var = 'Private *const d'
+ else:
+ var = f'{klass} *const o'
+
+ line = f'{indent}{const}{var} = {macro}({klass});'
+
+ lines.append(line)
+
+ return '\n'.join(lines)
+
+
class IncludeOrderFormatter(Formatter):
patterns = ('*.cpp', '*.h')
- include_regex = re.compile('^#include ["<]([^">]*)[">]')
+ include_regex = re.compile(r'^#include (["<])([^">]*)([">])')
@classmethod
def format(cls, filename, data):
@@ -498,7 +850,21 @@ class IncludeOrderFormatter(Formatter):
if match:
# If the current line is an #include statement, add it to the
# includes group and continue to the next line.
- includes.append((line, match.group(1)))
+ open_token = match.group(1)
+ file_name = match.group(2)
+ close_token = match.group(3)
+
+ # Ensure the "..." include style for internal headers and the
+ # <...> style for all other libcamera headers.
+ if (file_name.startswith('libcamera/internal')):
+ open_token = '"'
+ close_token = '"'
+ elif (file_name.startswith('libcamera/')):
+ open_token = '<'
+ close_token = '>'
+
+ line = f'#include {open_token}{file_name}{close_token}'
+ includes.append((line, file_name))
continue
# The current line is not an #include statement, output the sorted
@@ -522,6 +888,17 @@ class IncludeOrderFormatter(Formatter):
return '\n'.join(lines)
+class Pep8Formatter(Formatter):
+ dependencies = ('autopep8',)
+ patterns = ('*.py',)
+
+ @classmethod
+ def format(cls, filename, data):
+ ret = subprocess.run(['autopep8', '--ignore=E501', '-'],
+ input=data.encode('utf-8'), stdout=subprocess.PIPE)
+ return ret.stdout.decode('utf-8')
+
+
class StripTrailingSpaceFormatter(Formatter):
patterns = ('*.c', '*.cpp', '*.h', '*.py', 'meson.build')
@@ -537,69 +914,27 @@ class StripTrailingSpaceFormatter(Formatter):
# Style checking
#
-class Commit:
- def __init__(self, commit):
- self.commit = commit
-
- def get_info(self):
- # Get the commit title and list of files.
- ret = subprocess.run(['git', 'show', '--pretty=oneline', '--name-only',
- self.commit],
- stdout=subprocess.PIPE).stdout.decode('utf-8')
- files = ret.splitlines()
- # Returning title and files list as a tuple
- return files[0], files[1:]
-
- def get_diff(self, top_level, filename):
- return subprocess.run(['git', 'diff', '%s~..%s' % (self.commit, self.commit),
- '--', '%s/%s' % (top_level, filename)],
- stdout=subprocess.PIPE).stdout.decode('utf-8')
-
- def get_file(self, filename):
- return subprocess.run(['git', 'show', '%s:%s' % (self.commit, filename)],
- stdout=subprocess.PIPE).stdout.decode('utf-8')
-
-
-class StagedChanges(Commit):
- def __init__(self):
- Commit.__init__(self, '')
-
- def get_info(self):
- ret = subprocess.run(['git', 'diff', '--staged', '--name-only'],
- stdout=subprocess.PIPE).stdout.decode('utf-8')
- return "Staged changes", ret.splitlines()
-
- def get_diff(self, top_level, filename):
- return subprocess.run(['git', 'diff', '--staged', '--',
- '%s/%s' % (top_level, filename)],
- stdout=subprocess.PIPE).stdout.decode('utf-8')
+def check_commit(top_level, commit, checkers):
+ issues = []
+ # Apply the commit checkers first.
+ for checker in CommitChecker.instances(commit, checkers):
+ issues_ = checker.check_dependencies()
+ if issues_:
+ issues += issues_
+ continue
-class Amendment(StagedChanges):
- def __init__(self):
- StagedChanges.__init__(self)
+ issues += checker.check(commit, top_level)
- def get_info(self):
- # Create a title using HEAD commit
- ret = subprocess.run(['git', 'show', '--pretty=oneline', '--no-patch'],
- stdout=subprocess.PIPE).stdout.decode('utf-8')
- title = 'Amendment of ' + ret.strip()
- # Extract the list of modified files
- ret = subprocess.run(['git', 'diff', '--staged', '--name-only', 'HEAD~'],
- stdout=subprocess.PIPE).stdout.decode('utf-8')
- return title, ret.splitlines()
+ for issue in issues:
+ print(issue)
- def get_diff(self, top_level, filename):
- return subprocess.run(['git', 'diff', '--staged', 'HEAD~', '--',
- '%s/%s' % (top_level, filename)],
- stdout=subprocess.PIPE).stdout.decode('utf-8')
+ return len(issues)
-def check_file(top_level, commit, filename):
+def check_file(top_level, commit, filename, checkers):
# Extract the line numbers touched by the commit.
- diff = commit.get_diff(top_level, filename)
- diff = diff.splitlines(True)
- commit_diff = parse_diff(diff)
+ commit_diff = commit.get_diff(top_level, filename)
lines = []
for hunk in commit_diff:
@@ -612,9 +947,15 @@ def check_file(top_level, commit, filename):
# Format the file after the commit with all formatters and compute the diff
# between the unformatted and formatted contents.
after = commit.get_file(filename)
+ issues = []
formatted = after
- for formatter in Formatter.formatters(filename):
+ for formatter in Formatter.instances(filename, checkers):
+ issues_ = formatter.check_dependencies()
+ if issues_:
+ issues += issues_
+ continue
+
formatted = formatter.format(filename, formatted)
after = after.splitlines(True)
@@ -627,11 +968,14 @@ def check_file(top_level, commit, filename):
formatted_diff = [hunk for hunk in formatted_diff if hunk.intersects(lines)]
# Check for code issues not related to formatting.
- issues = []
- for checker in StyleChecker.checkers(filename):
- checker = checker(after)
+ for checker in StyleChecker.instances(filename, checkers):
+ issues_ = checker.check_dependencies()
+ if issues_:
+ issues += issues_
+ continue
+
for hunk in commit_diff:
- issues += checker.check(hunk.side('to').touched)
+ issues += checker.check(after, hunk.side('to').touched)
# Print the detected issues.
if len(issues) == 0 and len(formatted_diff) == 0:
@@ -645,42 +989,38 @@ def check_file(top_level, commit, filename):
print(hunk)
if len(issues):
- issues = sorted(issues, key=lambda i: i.line_number)
+ issues = sorted(issues, key=lambda i: getattr(i, 'line_number', -1))
for issue in issues:
- print('%s#%u: %s' % (Colours.fg(Colours.Yellow), issue.line_number, issue.msg))
- if issue.line is not None:
- print('+%s%s' % (issue.line.rstrip(), Colours.reset()))
+ print(issue)
return len(formatted_diff) + len(issues)
-def check_style(top_level, commit):
- title, files = commit.get_info()
-
+def check_style(top_level, commit, checkers):
+ title = commit.commit + ' ' + commit.title
separator = '-' * len(title)
print(separator)
print(title)
print(separator)
+ # Apply the commit checkers first.
+ issues = check_commit(top_level, commit, checkers)
+
# Filter out files we have no checker for.
patterns = set()
patterns.update(StyleChecker.all_patterns())
patterns.update(Formatter.all_patterns())
- files = [f for f in files if len([p for p in patterns if fnmatch.fnmatch(os.path.basename(f), p)])]
- if len(files) == 0:
- print("Commit doesn't touch source files, skipping")
- return 0
+ files = [f for f in commit.files() if len([p for p in patterns if fnmatch.fnmatch(os.path.basename(f), p)])]
- issues = 0
for f in files:
- issues += check_file(top_level, commit, f)
+ issues += check_file(top_level, commit, f, checkers)
if issues == 0:
- print("No style issue detected")
+ print('No issue detected')
else:
print('---')
- print("%u potential style %s detected, please review" % \
- (issues, 'issue' if issues == 1 else 'issues'))
+ print('%u potential %s detected, please review' %
+ (issues, 'issue' if issues == 1 else 'issues'))
return issues
@@ -723,8 +1063,8 @@ def main(argv):
# Parse command line arguments
parser = argparse.ArgumentParser()
- parser.add_argument('--formatter', '-f', type=str, choices=['astyle', 'clang-format'],
- help='Code formatter. Default to clang-format if not specified.')
+ parser.add_argument('--checkers', '-c', type=str,
+ help='Specify which checkers to run as a comma-separated list. Defaults to all checkers')
parser.add_argument('--staged', '-s', action='store_true',
help='Include the changes in the index. Defaults to False')
parser.add_argument('--amend', '-a', action='store_true',
@@ -733,30 +1073,18 @@ def main(argv):
help='Revision range (as defined by git rev-parse). Defaults to HEAD if not specified.')
args = parser.parse_args(argv[1:])
- # Check for required dependencies.
+ if args.checkers:
+ args.checkers = args.checkers.split(',')
+
+ # Check for required common dependencies.
for command, mandatory in dependencies.items():
found = shutil.which(command)
if mandatory and not found:
- print("Executable %s not found" % command)
+ print('Executable %s not found' % command)
return 1
dependencies[command] = found
- if args.formatter:
- if not args.formatter in dependencies or \
- not dependencies[args.formatter]:
- print("Formatter %s not available" % args.formatter)
- return 1
- formatter = args.formatter
- else:
- if dependencies['clang-format']:
- CLangFormatter.enabled = True
- elif dependencies['astyle']:
- AStyleFormatter.enabled = True
- else:
- print("No formatter found, please install clang-format or astyle")
- return 1
-
# Get the top level directory to pass absolute file names to git diff
# commands, in order to support execution from subdirectories of the git
# tree.
@@ -781,7 +1109,7 @@ def main(argv):
issues = 0
for commit in commits:
- issues += check_style(top_level, commit)
+ issues += check_style(top_level, commit, args.checkers)
print('')
if issues:
diff --git a/utils/codegen/controls.py b/utils/codegen/controls.py
new file mode 100644
index 00000000..e5161048
--- /dev/null
+++ b/utils/codegen/controls.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright (C) 2019, Google Inc.
+#
+# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
+#
+# Helper classes to handle source code generation for libcamera controls
+
+
+class ControlEnum(object):
+ def __init__(self, data):
+ self.__data = data
+
+ @property
+ def description(self):
+ """The enum description"""
+ return self.__data.get('description')
+
+ @property
+ def name(self):
+ """The enum name"""
+ return self.__data.get('name')
+
+ @property
+ def value(self):
+ """The enum value"""
+ return self.__data.get('value')
+
+
+class Control(object):
+ def __init__(self, name, data, vendor, mode):
+ self.__name = name
+ self.__data = data
+ self.__enum_values = None
+ self.__size = None
+ self.__vendor = vendor
+
+ enum_values = data.get('enum')
+ if enum_values is not None:
+ self.__enum_values = [ControlEnum(enum) for enum in enum_values]
+
+ size = self.__data.get('size')
+ if size is not None:
+ if len(size) == 0:
+ raise RuntimeError(f'Control `{self.__name}` size must have at least one dimension')
+
+ # Compute the total number of elements in the array. If any of the
+ # array dimension is a string, the array is variable-sized.
+ num_elems = 1
+ for dim in size:
+ if type(dim) is str:
+ num_elems = 0
+ break
+
+ dim = int(dim)
+ if dim <= 0:
+ raise RuntimeError(f'Control `{self.__name}` size must have positive values only')
+
+ num_elems *= dim
+
+ self.__size = num_elems
+
+ if mode == 'properties':
+ self.__direction = 'out'
+ else:
+ direction = self.__data.get('direction')
+ if direction is None:
+ raise RuntimeError(f'Control `{self.__name}` missing required field `direction`')
+ if direction not in ['in', 'out', 'inout']:
+ raise RuntimeError(f'Control `{self.__name}` direction `{direction}` is invalid; must be one of `in`, `out`, or `inout`')
+ self.__direction = direction
+
+ @property
+ def description(self):
+ """The control description"""
+ return self.__data.get('description')
+
+ @property
+ def enum_values(self):
+ """The enum values, if the control is an enumeration"""
+ if self.__enum_values is None:
+ return
+ for enum in self.__enum_values:
+ yield enum
+
+ @property
+ def enum_values_count(self):
+ """The number of enum values, if the control is an enumeration"""
+ if self.__enum_values is None:
+ return 0
+ return len(self.__enum_values)
+
+ @property
+ def is_enum(self):
+ """Is the control an enumeration"""
+ return self.__enum_values is not None
+
+ @property
+ def vendor(self):
+ """The vendor string, or None"""
+ return self.__vendor
+
+ @property
+ def name(self):
+ """The control name (CamelCase)"""
+ return self.__name
+
+ @property
+ def type(self):
+ typ = self.__data.get('type')
+ size = self.__data.get('size')
+
+ if typ == 'string':
+ return 'std::string'
+
+ if self.__size is None:
+ return typ
+
+ if self.__size:
+ return f"Span<const {typ}, {self.__size}>"
+ else:
+ return f"Span<const {typ}>"
+
+ @property
+ def direction(self):
+ in_flag = 'ControlId::Direction::In'
+ out_flag = 'ControlId::Direction::Out'
+
+ if self.__direction == 'inout':
+ return f'{in_flag} | {out_flag}'
+ if self.__direction == 'in':
+ return in_flag
+ if self.__direction == 'out':
+ return out_flag
+
+ @property
+ def element_type(self):
+ return self.__data.get('type')
+
+ @property
+ def size(self):
+ return self.__size
diff --git a/utils/codegen/gen-controls.py b/utils/codegen/gen-controls.py
new file mode 100755
index 00000000..59b716c1
--- /dev/null
+++ b/utils/codegen/gen-controls.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright (C) 2019, Google Inc.
+#
+# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
+#
+# Generate control definitions from YAML
+
+import argparse
+import jinja2
+import os
+import sys
+import yaml
+
+from controls import Control
+
+
+def snake_case(s):
+ return ''.join([c.isupper() and ('_' + c) or c for c in s]).strip('_')
+
+
+def format_description(description):
+ description = description.strip('\n').split('\n')
+ for i in range(1, len(description)):
+ line = description[i]
+ description[i] = (line and ' * ' or ' *') + line
+ return '\n'.join(description)
+
+
+def extend_control(ctrl, id, ranges):
+ ctrl.id = ranges[ctrl.vendor] + id + 1
+
+ if ctrl.vendor != 'libcamera':
+ ctrl.namespace = f'{ctrl.vendor}::'
+ else:
+ ctrl.namespace = ''
+
+ return ctrl
+
+
+def main(argv):
+
+ # Parse command line arguments
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--mode', '-m', type=str, required=True, choices=['controls', 'properties'],
+ help='Mode of operation')
+ parser.add_argument('--output', '-o', metavar='file', type=str,
+ help='Output file name. Defaults to standard output if not specified.')
+ parser.add_argument('--ranges', '-r', type=str, required=True,
+ help='Control id range reservation file.')
+ parser.add_argument('--template', '-t', dest='template', type=str, required=True,
+ help='Template file name.')
+ parser.add_argument('input', type=str, nargs='+',
+ help='Input file name.')
+
+ args = parser.parse_args(argv[1:])
+
+ ranges = {}
+ with open(args.ranges, 'rb') as f:
+ data = open(args.ranges, 'rb').read()
+ ranges = yaml.safe_load(data)['ranges']
+
+ controls = {}
+ for input in args.input:
+ data = yaml.safe_load(open(input, 'rb').read())
+
+ vendor = data['vendor']
+ if vendor not in ranges.keys():
+ raise RuntimeError(f'Control id range is not defined for vendor {vendor}')
+
+ ctrls = controls.setdefault(vendor, [])
+
+ for i, ctrl in enumerate(data['controls']):
+ ctrl = Control(*ctrl.popitem(), vendor, args.mode)
+ ctrls.append(extend_control(ctrl, i, ranges))
+
+ # Sort the vendors by range numerical value
+ controls = [[vendor, ctrls] for vendor, ctrls in controls.items()]
+ controls.sort(key=lambda item: ranges[item[0]])
+
+ filename = {
+ 'controls': 'control_ids',
+ 'properties': 'property_ids',
+ }[args.mode]
+
+ data = {
+ 'filename': filename,
+ 'mode': args.mode,
+ 'controls': controls,
+ }
+
+ env = jinja2.Environment()
+ env.filters['format_description'] = format_description
+ env.filters['snake_case'] = snake_case
+ template = env.from_string(open(args.template, 'r', encoding='utf-8').read())
+ string = template.render(data)
+
+ if args.output:
+ output = open(args.output, 'w', encoding='utf-8')
+ output.write(string)
+ output.close()
+ else:
+ sys.stdout.write(string)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/utils/codegen/gen-formats.py b/utils/codegen/gen-formats.py
new file mode 100755
index 00000000..0c0932a5
--- /dev/null
+++ b/utils/codegen/gen-formats.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright (C) 2020, Google Inc.
+#
+# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
+#
+# Generate formats definitions from YAML
+
+import argparse
+import re
+import string
+import sys
+import yaml
+
+
+class DRMFourCC(object):
+ format_regex = re.compile(r"#define (DRM_FORMAT_[A-Z0-9_]+)[ \t]+fourcc_code\(('.', '.', '.', '.')\)")
+ mod_vendor_regex = re.compile(r"#define DRM_FORMAT_MOD_VENDOR_([A-Z0-9_]+)[ \t]+([0-9a-fA-Fx]+)")
+ mod_regex = re.compile(r"#define ([A-Za-z0-9_]+)[ \t]+fourcc_mod_code\(([A-Z0-9_]+), ([0-9a-fA-Fx]+)\)")
+
+ def __init__(self, filename):
+ self.formats = {}
+ self.vendors = {}
+ self.mods = {}
+
+ for line in open(filename, 'rb').readlines():
+ line = line.decode('utf-8')
+
+ match = DRMFourCC.format_regex.match(line)
+ if match:
+ format, fourcc = match.groups()
+ self.formats[format] = fourcc
+ continue
+
+ match = DRMFourCC.mod_vendor_regex.match(line)
+ if match:
+ vendor, value = match.groups()
+ self.vendors[vendor] = int(value, 0)
+ continue
+
+ match = DRMFourCC.mod_regex.match(line)
+ if match:
+ mod, vendor, value = match.groups()
+ self.mods[mod] = (vendor, int(value, 0))
+ continue
+
+ def fourcc(self, name):
+ return self.formats[name]
+
+ def mod(self, name):
+ vendor, value = self.mods[name]
+ return self.vendors[vendor], value
+
+
+def generate_h(formats, drm_fourcc):
+ template = string.Template('constexpr PixelFormat ${name}{ __fourcc(${fourcc}), __mod(${mod}) };')
+
+ fmts = []
+
+ for format in formats:
+ name, format = format.popitem()
+ fourcc = drm_fourcc.fourcc(format['fourcc'])
+ if format.get('big-endian'):
+ fourcc += '| DRM_FORMAT_BIG_ENDIAN'
+
+ data = {
+ 'name': name,
+ 'fourcc': fourcc,
+ 'mod': '0, 0',
+ }
+
+ mod = format.get('mod')
+ if mod:
+ data['mod'] = '%u, %u' % drm_fourcc.mod(mod)
+
+ fmts.append(template.substitute(data))
+
+ return {'formats': '\n'.join(fmts)}
+
+
+def fill_template(template, data):
+
+ template = open(template, 'rb').read()
+ template = template.decode('utf-8')
+ template = string.Template(template)
+ return template.substitute(data)
+
+
+def main(argv):
+
+ # Parse command line arguments
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-o', dest='output', metavar='file', type=str,
+ help='Output file name. Defaults to standard output if not specified.')
+ parser.add_argument('input', type=str,
+ help='Input file name.')
+ parser.add_argument('template', type=str,
+ help='Template file name.')
+ parser.add_argument('drm_fourcc', type=str,
+ help='Path to drm_fourcc.h.')
+ args = parser.parse_args(argv[1:])
+
+ data = open(args.input, 'rb').read()
+ formats = yaml.safe_load(data)['formats']
+ drm_fourcc = DRMFourCC(args.drm_fourcc)
+
+ data = generate_h(formats, drm_fourcc)
+ data = fill_template(args.template, data)
+
+ if args.output:
+ output = open(args.output, 'wb')
+ output.write(data.encode('utf-8'))
+ output.close()
+ else:
+ sys.stdout.write(data)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/utils/codegen/gen-gst-controls.py b/utils/codegen/gen-gst-controls.py
new file mode 100755
index 00000000..df098826
--- /dev/null
+++ b/utils/codegen/gen-gst-controls.py
@@ -0,0 +1,182 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright (C) 2019, Google Inc.
+# Copyright (C) 2024, Jaslo Ziska
+#
+# Authors:
+# Laurent Pinchart <laurent.pinchart@ideasonboard.com>
+# Jaslo Ziska <jaslo@ziska.de>
+#
+# Generate gstreamer control properties from YAML
+
+import argparse
+import jinja2
+import re
+import sys
+import yaml
+
+from controls import Control
+
+
+exposed_controls = [
+ 'AeEnable', 'AeMeteringMode', 'AeConstraintMode', 'AeExposureMode',
+ 'ExposureValue', 'ExposureTime', 'AnalogueGain', 'AeFlickerPeriod',
+ 'Brightness', 'Contrast', 'AwbEnable', 'AwbMode', 'ColourGains',
+ 'Saturation', 'Sharpness', 'ColourCorrectionMatrix', 'ScalerCrop',
+ 'DigitalGain', 'AfMode', 'AfRange', 'AfSpeed', 'AfMetering', 'AfWindows',
+ 'LensPosition', 'Gamma',
+]
+
+
+def find_common_prefix(strings):
+ prefix = strings[0]
+
+ for string in strings[1:]:
+ while string[:len(prefix)] != prefix and prefix:
+ prefix = prefix[:len(prefix) - 1]
+ if not prefix:
+ break
+
+ return prefix
+
+
+def format_description(description):
+ # Substitute doxygen keywords \sa (see also) and \todo
+ description = re.sub(r'\\sa((?: \w+)+)',
+ lambda match: 'See also: ' + ', '.join(
+ map(kebab_case, match.group(1).strip().split(' '))
+ ) + '.', description)
+ description = re.sub(r'\\todo', 'Todo:', description)
+
+ description = description.strip().split('\n')
+ return '\n'.join([
+ '"' + line.replace('\\', r'\\').replace('"', r'\"') + ' "' for line in description if line
+ ]).rstrip()
+
+
+# Custom filter to allow indenting by a string prior to Jinja version 3.0
+#
+# This function can be removed and the calls to indent_str() replaced by the
+# built-in indent() filter when dropping Jinja versions older than 3.0
+def indent_str(s, indention):
+ s += '\n'
+
+ lines = s.splitlines()
+ rv = lines.pop(0)
+
+ if lines:
+ rv += '\n' + '\n'.join(
+ indention + line if line else line for line in lines
+ )
+
+ return rv
+
+
+def snake_case(s):
+ return ''.join([
+ c.isupper() and ('_' + c.lower()) or c for c in s
+ ]).strip('_')
+
+
+def kebab_case(s):
+ return snake_case(s).replace('_', '-')
+
+
+def extend_control(ctrl):
+ if ctrl.vendor != 'libcamera':
+ ctrl.namespace = f'{ctrl.vendor}::'
+ ctrl.vendor_prefix = f'{ctrl.vendor}-'
+ else:
+ ctrl.namespace = ''
+ ctrl.vendor_prefix = ''
+
+ ctrl.is_array = ctrl.size is not None
+
+ if ctrl.is_enum:
+ # Remove common prefix from enum variant names
+ prefix = find_common_prefix([enum.name for enum in ctrl.enum_values])
+ for enum in ctrl.enum_values:
+ enum.gst_name = kebab_case(enum.name.removeprefix(prefix))
+
+ ctrl.gtype = 'enum'
+ ctrl.default = '0'
+ elif ctrl.element_type == 'bool':
+ ctrl.gtype = 'boolean'
+ ctrl.default = 'false'
+ elif ctrl.element_type == 'float':
+ ctrl.gtype = 'float'
+ ctrl.default = '0'
+ ctrl.min = '-G_MAXFLOAT'
+ ctrl.max = 'G_MAXFLOAT'
+ elif ctrl.element_type == 'int32_t':
+ ctrl.gtype = 'int'
+ ctrl.default = '0'
+ ctrl.min = 'G_MININT'
+ ctrl.max = 'G_MAXINT'
+ elif ctrl.element_type == 'int64_t':
+ ctrl.gtype = 'int64'
+ ctrl.default = '0'
+ ctrl.min = 'G_MININT64'
+ ctrl.max = 'G_MAXINT64'
+ elif ctrl.element_type == 'uint8_t':
+ ctrl.gtype = 'uchar'
+ ctrl.default = '0'
+ ctrl.min = '0'
+ ctrl.max = 'G_MAXUINT8'
+ elif ctrl.element_type == 'Rectangle':
+ ctrl.is_rectangle = True
+ ctrl.default = '0'
+ ctrl.min = '0'
+ ctrl.max = 'G_MAXINT'
+ else:
+ raise RuntimeError(f'The type `{ctrl.element_type}` is unknown')
+
+ return ctrl
+
+
+def main(argv):
+ # Parse command line arguments
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--output', '-o', metavar='file', type=str,
+ help='Output file name. Defaults to standard output if not specified.')
+ parser.add_argument('--template', '-t', dest='template', type=str, required=True,
+ help='Template file name.')
+ parser.add_argument('input', type=str, nargs='+',
+ help='Input file name.')
+
+ args = parser.parse_args(argv[1:])
+
+ controls = {}
+ for input in args.input:
+ data = yaml.safe_load(open(input, 'rb').read())
+
+ vendor = data['vendor']
+ ctrls = controls.setdefault(vendor, [])
+
+ for ctrl in data['controls']:
+ ctrl = Control(*ctrl.popitem(), vendor, mode='controls')
+
+ if ctrl.name in exposed_controls:
+ ctrls.append(extend_control(ctrl))
+
+ data = {'controls': list(controls.items())}
+
+ env = jinja2.Environment()
+ env.filters['format_description'] = format_description
+ env.filters['indent_str'] = indent_str
+ env.filters['snake_case'] = snake_case
+ env.filters['kebab_case'] = kebab_case
+ template = env.from_string(open(args.template, 'r', encoding='utf-8').read())
+ string = template.render(data)
+
+ if args.output:
+ with open(args.output, 'w', encoding='utf-8') as output:
+ output.write(string)
+ else:
+ sys.stdout.write(string)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/utils/codegen/gen-header.sh b/utils/codegen/gen-header.sh
new file mode 100755
index 00000000..c78f0859
--- /dev/null
+++ b/utils/codegen/gen-header.sh
@@ -0,0 +1,26 @@
+#!/bin/sh
+
+dst_file="$1"
+shift
+
+cat <<EOF > "$dst_file"
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/* This file is auto-generated, do not edit! */
+/*
+ * Copyright (C) 2018-2019, Google Inc.
+ *
+ * libcamera public API
+ */
+
+#pragma once
+
+EOF
+
+headers=$(for header in "$@" ; do
+ header=$(basename "$header")
+ echo "$header"
+done | sort)
+
+for header in $headers ; do
+ echo "#include <libcamera/$header>" >> "$dst_file"
+done
diff --git a/utils/codegen/gen-ipa-pub-key.py b/utils/codegen/gen-ipa-pub-key.py
new file mode 100755
index 00000000..dc3e7d5f
--- /dev/null
+++ b/utils/codegen/gen-ipa-pub-key.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright (C) 2020, Google Inc.
+#
+# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
+#
+# Generate the IPA module signing public key
+
+import string
+import subprocess
+import sys
+
+
+def main(argv):
+ if len(argv) != 4:
+ print('Usage: %s priv-key template output' % argv[0])
+ return 1
+
+ priv_key = argv[1]
+ template = argv[2]
+ output = argv[3]
+
+ try:
+ ret = subprocess.run(['openssl', 'rsa', '-pubout', '-in', priv_key,
+ '-outform', 'DER'],
+ stdout=subprocess.PIPE)
+ except FileNotFoundError:
+ print('Please install openssl to sign IPA modules')
+ return 1
+
+ ipa_key = ['0x%02x' % c for c in ret.stdout]
+ ipa_key = [', '.join(ipa_key[bound:bound + 8]) for bound in range(0, len(ipa_key), 8)]
+ ipa_key = ',\n\t'.join(ipa_key)
+ data = {'ipa_key': ipa_key}
+
+ template = open(template, 'rb').read()
+ template = template.decode('utf-8')
+ template = string.Template(template)
+
+ f = open(output, 'wb')
+ f.write(template.substitute(data).encode('utf-8'))
+ f.close()
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/utils/codegen/gen-tp-header.py b/utils/codegen/gen-tp-header.py
new file mode 100755
index 00000000..6769c7ce
--- /dev/null
+++ b/utils/codegen/gen-tp-header.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright (C) 2020, Google Inc.
+#
+# Author: Paul Elder <paul.elder@ideasonboard.com>
+#
+# Generate header file to contain lttng tracepoints
+
+import jinja2
+import pathlib
+import os
+import sys
+
+def main(argv):
+ if len(argv) < 4:
+ print(f'Usage: {argv[0]} include_build_dir output template tp_files...')
+ return 1
+
+ output = argv[2]
+ template = argv[3]
+
+ path = pathlib.Path(output).absolute().relative_to(argv[1])
+
+ source = ''
+ for fname in argv[4:]:
+ source += open(fname, 'r', encoding='utf-8').read() + '\n\n'
+
+ template = jinja2.Template(open(template, 'r', encoding='utf-8').read())
+ string = template.render(path=path, source=source)
+
+ f = open(output, 'w', encoding='utf-8').write(string)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/utils/codegen/ipc/extract-docs.py b/utils/codegen/ipc/extract-docs.py
new file mode 100755
index 00000000..61f44cae
--- /dev/null
+++ b/utils/codegen/ipc/extract-docs.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright (C) 2021, Google Inc.
+#
+# Author: Paul Elder <paul.elder@ideasonboard.com>
+#
+# Extract doxygen documentation from mojom files
+
+import argparse
+import re
+import sys
+
+regex_block_start = re.compile(r'^/\*\*$')
+regex_block_end = re.compile(r'^ \*/$')
+regex_spdx = re.compile(r'^/\* SPDX-License-Identifier: .* \*/$')
+
+
+def main(argv):
+
+ # Parse command line arguments
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-o', dest='output', metavar='file',
+ type=argparse.FileType('w', encoding='utf-8'),
+ default=sys.stdout,
+ help='Output file name (default: standard output)')
+ parser.add_argument('input', type=str,
+ help='Input file name.')
+ args = parser.parse_args(argv[1:])
+
+ lines = open(args.input, 'r').readlines()
+ pipeline = args.input.split('/')[-1].replace('.mojom', '')
+
+ if not regex_spdx.match(lines[0]):
+ raise Exception(f'Missing SPDX license header in {args.input}')
+
+ data = lines[0]
+ data += f'''\
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * Docs file for generated {pipeline}.mojom
+ *
+ * This file is auto-generated. Do not edit.
+ */
+
+namespace libcamera {{
+
+'''
+
+ in_block = False
+ comment = ''
+ for lineno, line in enumerate(lines, start=1):
+ if regex_block_start.match(line):
+ if in_block:
+ raise SyntaxError('Expected end of comment',
+ (args.input, lineno, 1, line))
+ in_block = True
+ comment = line
+ continue
+
+ if regex_block_end.match(line):
+ if in_block:
+ comment += line
+ data += comment + '\n'
+ in_block = False
+ continue
+
+ if in_block:
+ comment += line
+
+ data += '} /* namespace libcamera */\n'
+
+ args.output.write(data)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/utils/codegen/ipc/generate.py b/utils/codegen/ipc/generate.py
new file mode 100755
index 00000000..dfbe659b
--- /dev/null
+++ b/utils/codegen/ipc/generate.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: BSD-3-Clause
+# Copyright (C) 2020, Google Inc.
+#
+# Author: Paul Elder <paul.elder@ideasonboard.com>
+#
+# Run mojo code generator for generating libcamera IPC files
+
+import os
+import sys
+
+sys.path.insert(0, f'{os.path.dirname(__file__)}/mojo/public/tools/bindings')
+
+import mojo.public.tools.bindings.mojom_bindings_generator as generator
+
+def _GetModulePath(path, output_dir):
+ return os.path.join(output_dir, path.relative_path())
+
+
+# Disable the attribute checker to support our custom attributes. Ideally we
+# should add the attributes to the list of allowed attributes in
+# utils/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check.py, but
+# we're trying hard to use the upstream mojom as-is.
+if hasattr(generator, '_BUILTIN_CHECKS'):
+ del generator._BUILTIN_CHECKS['attributes']
+
+# Override the mojo code generator's generator list to only contain our
+# libcamera generator
+generator._BUILTIN_GENERATORS = {'libcamera': 'mojom_libcamera_generator'}
+
+# Override the mojo code generator's _GetModulePath method to not add
+# the '-module' suffix when searching for mojo modules, so that we can
+# pass the path to the mojom module without having to trim the '-module' suffix
+generator._GetModulePath = _GetModulePath
+
+generator.main()
diff --git a/utils/codegen/ipc/generators/__init__.py b/utils/codegen/ipc/generators/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/utils/codegen/ipc/generators/__init__.py
diff --git a/utils/codegen/ipc/generators/libcamera_templates/core_ipa_interface.h.tmpl b/utils/codegen/ipc/generators/libcamera_templates/core_ipa_interface.h.tmpl
new file mode 100644
index 00000000..3942e570
--- /dev/null
+++ b/utils/codegen/ipc/generators/libcamera_templates/core_ipa_interface.h.tmpl
@@ -0,0 +1,42 @@
+{#-
+ # SPDX-License-Identifier: LGPL-2.1-or-later
+ # Copyright (C) 2020, Google Inc.
+-#}
+{%- import "definition_functions.tmpl" as funcs -%}
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * libcamera core definitions for Image Processing Algorithms
+ *
+ * This file is auto-generated. Do not edit.
+ */
+
+#pragma once
+
+{% if has_map %}#include <map>{% endif %}
+{% if has_string %}#include <string>{% endif %}
+{% if has_array %}#include <vector>{% endif %}
+
+#include <libcamera/controls.h>
+#include <libcamera/framebuffer.h>
+#include <libcamera/geometry.h>
+
+#include <libcamera/ipa/ipa_interface.h>
+
+namespace libcamera {
+
+{# \todo Use const char * instead of std::string for strings #}
+{% for const in consts %}
+static const {{const.kind|name}} {{const.mojom_name}} = {{const.value}};
+{% endfor %}
+
+{% for enum in enums_gen_header %}
+{{funcs.define_enum(enum)}}
+{% endfor %}
+
+{%- for struct in structs_gen_header %}
+{{funcs.define_struct(struct)}}
+{% endfor %}
+
+} /* namespace libcamera */
diff --git a/utils/codegen/ipc/generators/libcamera_templates/core_ipa_serializer.h.tmpl b/utils/codegen/ipc/generators/libcamera_templates/core_ipa_serializer.h.tmpl
new file mode 100644
index 00000000..036518f6
--- /dev/null
+++ b/utils/codegen/ipc/generators/libcamera_templates/core_ipa_serializer.h.tmpl
@@ -0,0 +1,44 @@
+{#-
+ # SPDX-License-Identifier: LGPL-2.1-or-later
+ # Copyright (C) 2020, Google Inc.
+-#}
+{%- import "serializer.tmpl" as serializer -%}
+
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * Data serializer for core libcamera definitions for IPA
+ *
+ * This file is auto-generated. Do not edit.
+ */
+
+#pragma once
+
+#include <tuple>
+#include <vector>
+
+#include <libcamera/ipa/core_ipa_interface.h>
+
+#include "libcamera/internal/control_serializer.h"
+#include "libcamera/internal/ipa_data_serializer.h"
+
+namespace libcamera {
+
+LOG_DECLARE_CATEGORY(IPADataSerializer)
+{% for struct in structs_gen_serializer %}
+template<>
+class IPADataSerializer<{{struct|name}}>
+{
+public:
+{{- serializer.serializer(struct, "")}}
+{%- if struct|has_fd %}
+{{serializer.deserializer_fd(struct, "")}}
+{%- else %}
+{{serializer.deserializer_no_fd(struct, "")}}
+{{serializer.deserializer_fd_simple(struct, "")}}
+{%- endif %}
+};
+{% endfor %}
+
+} /* namespace libcamera */
diff --git a/utils/codegen/ipc/generators/libcamera_templates/definition_functions.tmpl b/utils/codegen/ipc/generators/libcamera_templates/definition_functions.tmpl
new file mode 100644
index 00000000..8b8509f3
--- /dev/null
+++ b/utils/codegen/ipc/generators/libcamera_templates/definition_functions.tmpl
@@ -0,0 +1,56 @@
+{#-
+ # SPDX-License-Identifier: LGPL-2.1-or-later
+ # Copyright (C) 2020, Google Inc.
+-#}
+
+{#
+ # \brief Generate enum definition
+ #
+ # \param enum Enum object whose definition is to be generated
+ #}
+{%- macro define_enum(enum) -%}
+enum{{" class" if enum|is_scoped}} {{enum.mojom_name}} {
+{%- for field in enum.fields %}
+ {{field.mojom_name}} = {{field.numeric_value}},
+{%- endfor %}
+};
+{%- endmacro -%}
+
+{#
+ # \brief Generate struct definition
+ #
+ # \param struct Struct object whose definition is to be generated
+ #}
+{%- macro define_struct(struct) -%}
+struct {{struct.mojom_name}}
+{
+public:
+#ifndef __DOXYGEN__
+ {{struct.mojom_name}}() {%- if struct|has_default_fields %}
+ :{% endif %}
+{%- for field in struct.fields|with_default_values -%}
+{{" " if loop.first}}{{field.mojom_name}}({{field|default_value}}){{", " if not loop.last}}
+{%- endfor %}
+ {
+ }
+
+ {{struct.mojom_name}}(
+{%- for field in struct.fields -%}
+{{"const " if not field|is_pod}}{{field|name}} {{"&" if not field|is_pod}}_{{field.mojom_name}}{{", " if not loop.last}}
+{%- endfor -%}
+)
+ :
+{%- for field in struct.fields -%}
+{{" " if loop.first}}{{field.mojom_name}}(_{{field.mojom_name}}){{", " if not loop.last}}
+{%- endfor %}
+ {
+ }
+#endif
+
+{% for field in struct.fields %}
+ {{field|name}} {{field.mojom_name}};
+{%- endfor %}
+};
+{%- endmacro -%}
+
+
diff --git a/utils/codegen/ipc/generators/libcamera_templates/meson.build b/utils/codegen/ipc/generators/libcamera_templates/meson.build
new file mode 100644
index 00000000..70664eab
--- /dev/null
+++ b/utils/codegen/ipc/generators/libcamera_templates/meson.build
@@ -0,0 +1,14 @@
+# SPDX-License-Identifier: CC0-1.0
+
+mojom_template_files = files([
+ 'core_ipa_interface.h.tmpl',
+ 'core_ipa_serializer.h.tmpl',
+ 'definition_functions.tmpl',
+ 'module_ipa_interface.h.tmpl',
+ 'module_ipa_proxy.cpp.tmpl',
+ 'module_ipa_proxy.h.tmpl',
+ 'module_ipa_proxy_worker.cpp.tmpl',
+ 'module_ipa_serializer.h.tmpl',
+ 'proxy_functions.tmpl',
+ 'serializer.tmpl',
+])
diff --git a/utils/codegen/ipc/generators/libcamera_templates/module_ipa_interface.h.tmpl b/utils/codegen/ipc/generators/libcamera_templates/module_ipa_interface.h.tmpl
new file mode 100644
index 00000000..5d70ea6a
--- /dev/null
+++ b/utils/codegen/ipc/generators/libcamera_templates/module_ipa_interface.h.tmpl
@@ -0,0 +1,92 @@
+{#-
+ # SPDX-License-Identifier: LGPL-2.1-or-later
+ # Copyright (C) 2020, Google Inc.
+-#}
+{%- import "definition_functions.tmpl" as funcs -%}
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * Image Processing Algorithm interface for {{module_name}}
+ *
+ * This file is auto-generated. Do not edit.
+ */
+
+#pragma once
+
+{% if has_map %}#include <map>{% endif %}
+{% if has_string %}#include <string>{% endif %}
+{% if has_array %}#include <vector>{% endif %}
+
+#include <libcamera/base/flags.h>
+#include <libcamera/base/signal.h>
+
+#include <libcamera/controls.h>
+#include <libcamera/framebuffer.h>
+#include <libcamera/geometry.h>
+
+#include <libcamera/ipa/core_ipa_interface.h>
+#include <libcamera/ipa/ipa_interface.h>
+
+namespace libcamera {
+{%- if has_namespace %}
+{% for ns in namespace %}
+namespace {{ns}} {
+{% endfor %}
+{%- endif %}
+
+{% for const in consts %}
+const {{const.kind|name}} {{const.mojom_name}} = {{const.value}};
+{% endfor %}
+
+enum class {{cmd_enum_name}} {
+ Exit = 0,
+{%- for method in interface_main.methods %}
+ {{method.mojom_name|cap}} = {{loop.index}},
+{%- endfor %}
+};
+
+enum class {{cmd_event_enum_name}} {
+{%- for method in interface_event.methods %}
+ {{method.mojom_name|cap}} = {{loop.index}},
+{%- endfor %}
+};
+
+{% for enum in enums %}
+{{funcs.define_enum(enum)}}
+{% endfor %}
+
+{%- for struct in structs_nonempty %}
+{{funcs.define_struct(struct)}}
+{% endfor %}
+
+{#-
+Any consts or #defines should be moved to the mojom file.
+#}
+class {{interface_name}} : public IPAInterface
+{
+public:
+{% for method in interface_main.methods %}
+ virtual {{method|method_return_value}} {{method.mojom_name}}(
+{%- for param in method|method_parameters %}
+ {{param}}{{- "," if not loop.last}}
+{%- endfor -%}
+) = 0;
+{% endfor %}
+
+{%- for method in interface_event.methods %}
+ Signal<
+{%- for param in method.parameters -%}
+ {{"const " if not param|is_pod}}{{param|name}}{{" &" if not param|is_pod and not param|is_enum}}
+ {{- ", " if not loop.last}}
+{%- endfor -%}
+> {{method.mojom_name}};
+{% endfor -%}
+};
+
+{%- if has_namespace %}
+{% for ns in namespace|reverse %}
+} /* namespace {{ns}} */
+{% endfor %}
+{%- endif %}
+} /* namespace libcamera */
diff --git a/utils/codegen/ipc/generators/libcamera_templates/module_ipa_proxy.cpp.tmpl b/utils/codegen/ipc/generators/libcamera_templates/module_ipa_proxy.cpp.tmpl
new file mode 100644
index 00000000..ce3cc5ab
--- /dev/null
+++ b/utils/codegen/ipc/generators/libcamera_templates/module_ipa_proxy.cpp.tmpl
@@ -0,0 +1,255 @@
+{#-
+ # SPDX-License-Identifier: LGPL-2.1-or-later
+ # Copyright (C) 2020, Google Inc.
+-#}
+{%- import "proxy_functions.tmpl" as proxy_funcs -%}
+
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * Image Processing Algorithm proxy for {{module_name}}
+ *
+ * This file is auto-generated. Do not edit.
+ */
+
+#include <libcamera/ipa/{{module_name}}_ipa_proxy.h>
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include <libcamera/ipa/ipa_module_info.h>
+#include <libcamera/ipa/{{module_name}}_ipa_interface.h>
+#include <libcamera/ipa/{{module_name}}_ipa_serializer.h>
+
+#include <libcamera/base/log.h>
+#include <libcamera/base/thread.h>
+
+#include "libcamera/internal/control_serializer.h"
+#include "libcamera/internal/ipa_data_serializer.h"
+#include "libcamera/internal/ipa_module.h"
+#include "libcamera/internal/ipa_proxy.h"
+#include "libcamera/internal/ipc_pipe.h"
+#include "libcamera/internal/ipc_pipe_unixsocket.h"
+#include "libcamera/internal/ipc_unixsocket.h"
+#include "libcamera/internal/process.h"
+
+namespace libcamera {
+
+LOG_DECLARE_CATEGORY(IPAProxy)
+
+{%- if has_namespace %}
+{% for ns in namespace %}
+namespace {{ns}} {
+{% endfor %}
+{%- endif %}
+
+{{proxy_name}}::{{proxy_name}}(IPAModule *ipam, bool isolate)
+ : IPAProxy(ipam), isolate_(isolate),
+ controlSerializer_(ControlSerializer::Role::Proxy), seq_(0)
+{
+ LOG(IPAProxy, Debug)
+ << "initializing {{module_name}} proxy: loading IPA from "
+ << ipam->path();
+
+ if (isolate_) {
+ const std::string proxyWorkerPath = resolvePath("{{module_name}}_ipa_proxy");
+ if (proxyWorkerPath.empty()) {
+ LOG(IPAProxy, Error)
+ << "Failed to get proxy worker path";
+ return;
+ }
+
+ ipc_ = std::make_unique<IPCPipeUnixSocket>(ipam->path().c_str(),
+ proxyWorkerPath.c_str());
+ if (!ipc_->isConnected()) {
+ LOG(IPAProxy, Error) << "Failed to create IPCPipe";
+ return;
+ }
+
+ ipc_->recv.connect(this, &{{proxy_name}}::recvMessage);
+
+ valid_ = true;
+ return;
+ }
+
+ if (!ipam->load())
+ return;
+
+ IPAInterface *ipai = ipam->createInterface();
+ if (!ipai) {
+ LOG(IPAProxy, Error)
+ << "Failed to create IPA context for " << ipam->path();
+ return;
+ }
+
+ ipa_ = std::unique_ptr<{{interface_name}}>(static_cast<{{interface_name}} *>(ipai));
+ proxy_.setIPA(ipa_.get());
+
+{% for method in interface_event.methods %}
+ ipa_->{{method.mojom_name}}.connect(this, &{{proxy_name}}::{{method.mojom_name}}Thread);
+{%- endfor %}
+
+ valid_ = true;
+}
+
+{{proxy_name}}::~{{proxy_name}}()
+{
+ if (isolate_) {
+ IPCMessage::Header header =
+ { static_cast<uint32_t>({{cmd_enum_name}}::Exit), seq_++ };
+ IPCMessage msg(header);
+ ipc_->sendAsync(msg);
+ }
+}
+
+{% if interface_event.methods|length > 0 %}
+void {{proxy_name}}::recvMessage(const IPCMessage &data)
+{
+ size_t dataSize = data.data().size();
+ {{cmd_event_enum_name}} _cmd = static_cast<{{cmd_event_enum_name}}>(data.header().cmd);
+
+ switch (_cmd) {
+{%- for method in interface_event.methods %}
+ case {{cmd_event_enum_name}}::{{method.mojom_name|cap}}: {
+ {{method.mojom_name}}IPC(data.data().cbegin(), dataSize, data.fds());
+ break;
+ }
+{%- endfor %}
+ default:
+ LOG(IPAProxy, Error) << "Unknown command " << static_cast<uint32_t>(_cmd);
+ }
+}
+{%- endif %}
+
+{% for method in interface_main.methods %}
+{{proxy_funcs.func_sig(proxy_name, method)}}
+{
+ if (isolate_)
+ {{"return " if method|method_return_value != "void"}}{{method.mojom_name}}IPC(
+{%- for param in method|method_param_names -%}
+ {{param}}{{- ", " if not loop.last}}
+{%- endfor -%}
+);
+ else
+ {{"return " if method|method_return_value != "void"}}{{method.mojom_name}}Thread(
+{%- for param in method|method_param_names -%}
+ {{param}}{{- ", " if not loop.last}}
+{%- endfor -%}
+);
+}
+
+{{proxy_funcs.func_sig(proxy_name, method, "Thread")}}
+{
+{%- if method.mojom_name == "stop" %}
+ {{proxy_funcs.stop_thread_body()}}
+{%- elif method.mojom_name == "init" %}
+ {{ method|method_return_value + " _ret = " if method|method_return_value != "void" -}}
+ ipa_->{{method.mojom_name}}(
+ {%- for param in method|method_param_names -%}
+ {{param}}{{- ", " if not loop.last}}
+ {%- endfor -%}
+);
+
+ proxy_.moveToThread(&thread_);
+
+ return {{ "_ret" if method|method_return_value != "void" }};
+{%- elif method.mojom_name == "start" %}
+ state_ = ProxyRunning;
+ thread_.start();
+
+ {{ "return " if method|method_return_value != "void" -}}
+ proxy_.invokeMethod(&ThreadProxy::start, ConnectionTypeBlocking
+ {{- ", " if method|method_param_names}}
+ {%- for param in method|method_param_names -%}
+ {{param}}{{- ", " if not loop.last}}
+ {%- endfor -%}
+);
+{%- elif not method|is_async %}
+ {{ "return " if method|method_return_value != "void" -}}
+ ipa_->{{method.mojom_name}}(
+ {%- for param in method|method_param_names -%}
+ {{param}}{{- ", " if not loop.last}}
+ {%- endfor -%}
+);
+{% elif method|is_async %}
+ ASSERT(state_ == ProxyRunning);
+ proxy_.invokeMethod(&ThreadProxy::{{method.mojom_name}}, ConnectionTypeQueued
+ {%- for param in method|method_param_names -%}
+ , {{param}}
+ {%- endfor -%}
+);
+{%- endif %}
+}
+
+{{proxy_funcs.func_sig(proxy_name, method, "IPC")}}
+{
+{%- if method.mojom_name == "configure" %}
+ controlSerializer_.reset();
+{%- endif %}
+{%- set has_output = true if method|method_param_outputs|length > 0 or method|method_return_value != "void" %}
+{%- set cmd = cmd_enum_name + "::" + method.mojom_name|cap %}
+ IPCMessage::Header _header = { static_cast<uint32_t>({{cmd}}), seq_++ };
+ IPCMessage _ipcInputBuf(_header);
+{%- if has_output %}
+ IPCMessage _ipcOutputBuf;
+{%- endif %}
+
+{{proxy_funcs.serialize_call(method|method_param_inputs, '_ipcInputBuf.data()', '_ipcInputBuf.fds()')}}
+
+{% if method|is_async %}
+ int _ret = ipc_->sendAsync(_ipcInputBuf);
+{%- else %}
+ int _ret = ipc_->sendSync(_ipcInputBuf
+{{- ", &_ipcOutputBuf" if has_output -}}
+);
+{%- endif %}
+ if (_ret < 0) {
+ LOG(IPAProxy, Error) << "Failed to call {{method.mojom_name}}";
+{%- if method|method_return_value != "void" %}
+ return static_cast<{{method|method_return_value}}>(_ret);
+{%- else %}
+ return;
+{%- endif %}
+ }
+{% if method|method_return_value != "void" %}
+ {{method|method_return_value}} _retValue = IPADataSerializer<{{method|method_return_value}}>::deserialize(_ipcOutputBuf.data(), 0);
+
+{{proxy_funcs.deserialize_call(method|method_param_outputs, '_ipcOutputBuf.data()', '_ipcOutputBuf.fds()', init_offset = method|method_return_value|byte_width|int)}}
+
+ return _retValue;
+
+{% elif method|method_param_outputs|length > 0 %}
+{{proxy_funcs.deserialize_call(method|method_param_outputs, '_ipcOutputBuf.data()', '_ipcOutputBuf.fds()')}}
+{% endif -%}
+}
+
+{% endfor %}
+
+{% for method in interface_event.methods %}
+{{proxy_funcs.func_sig(proxy_name, method, "Thread")}}
+{
+ ASSERT(state_ != ProxyStopped);
+ {{method.mojom_name}}.emit({{method.parameters|params_comma_sep}});
+}
+
+void {{proxy_name}}::{{method.mojom_name}}IPC(
+ [[maybe_unused]] std::vector<uint8_t>::const_iterator data,
+ [[maybe_unused]] size_t dataSize,
+ [[maybe_unused]] const std::vector<SharedFD> &fds)
+{
+{%- for param in method.parameters %}
+ {{param|name}} {{param.mojom_name}};
+{%- endfor %}
+{{proxy_funcs.deserialize_call(method.parameters, 'data', 'fds', false, false, true, 'dataSize')}}
+ {{method.mojom_name}}.emit({{method.parameters|params_comma_sep}});
+}
+{% endfor %}
+
+{%- if has_namespace %}
+{% for ns in namespace|reverse %}
+} /* namespace {{ns}} */
+{% endfor %}
+{%- endif %}
+} /* namespace libcamera */
diff --git a/utils/codegen/ipc/generators/libcamera_templates/module_ipa_proxy.h.tmpl b/utils/codegen/ipc/generators/libcamera_templates/module_ipa_proxy.h.tmpl
new file mode 100644
index 00000000..e213b18a
--- /dev/null
+++ b/utils/codegen/ipc/generators/libcamera_templates/module_ipa_proxy.h.tmpl
@@ -0,0 +1,132 @@
+{#-
+ # SPDX-License-Identifier: LGPL-2.1-or-later
+ # Copyright (C) 2020, Google Inc.
+-#}
+{%- import "proxy_functions.tmpl" as proxy_funcs -%}
+
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * Image Processing Algorithm proxy for {{module_name}}
+ *
+ * This file is auto-generated. Do not edit.
+ */
+
+#pragma once
+
+#include <libcamera/ipa/ipa_interface.h>
+#include <libcamera/ipa/{{module_name}}_ipa_interface.h>
+
+#include <libcamera/base/object.h>
+#include <libcamera/base/thread.h>
+
+#include "libcamera/internal/control_serializer.h"
+#include "libcamera/internal/ipa_proxy.h"
+#include "libcamera/internal/ipc_pipe.h"
+#include "libcamera/internal/ipc_pipe_unixsocket.h"
+#include "libcamera/internal/ipc_unixsocket.h"
+
+namespace libcamera {
+{%- if has_namespace %}
+{% for ns in namespace %}
+namespace {{ns}} {
+{% endfor %}
+{%- endif %}
+
+class {{proxy_name}} : public IPAProxy, public {{interface_name}}, public Object
+{
+public:
+ {{proxy_name}}(IPAModule *ipam, bool isolate);
+ ~{{proxy_name}}();
+
+{% for method in interface_main.methods %}
+{{proxy_funcs.func_sig(proxy_name, method, "", false, true)|indent(8, true)}};
+{% endfor %}
+
+{%- for method in interface_event.methods %}
+ Signal<
+{%- for param in method.parameters -%}
+ {{"const " if not param|is_pod}}{{param|name}}{{" &" if not param|is_pod and not param|is_enum}}
+ {{- ", " if not loop.last}}
+{%- endfor -%}
+> {{method.mojom_name}};
+{% endfor %}
+
+private:
+ void recvMessage(const IPCMessage &data);
+
+{% for method in interface_main.methods %}
+{{proxy_funcs.func_sig(proxy_name, method, "Thread", false)|indent(8, true)}};
+{{proxy_funcs.func_sig(proxy_name, method, "IPC", false)|indent(8, true)}};
+{% endfor %}
+{% for method in interface_event.methods %}
+{{proxy_funcs.func_sig(proxy_name, method, "Thread", false)|indent(8, true)}};
+ void {{method.mojom_name}}IPC(
+ std::vector<uint8_t>::const_iterator data,
+ size_t dataSize,
+ const std::vector<SharedFD> &fds);
+{% endfor %}
+
+ /* Helper class to invoke async functions in another thread. */
+ class ThreadProxy : public Object
+ {
+ public:
+ ThreadProxy()
+ : ipa_(nullptr)
+ {
+ }
+
+ void setIPA({{interface_name}} *ipa)
+ {
+ ipa_ = ipa;
+ }
+
+ void stop()
+ {
+ ipa_->stop();
+ }
+{% for method in interface_main.methods %}
+{%- if method|is_async %}
+ {{proxy_funcs.func_sig(proxy_name, method, "", false)|indent(16)}}
+ {
+ ipa_->{{method.mojom_name}}({{method.parameters|params_comma_sep}});
+ }
+{%- elif method.mojom_name == "start" %}
+ {{proxy_funcs.func_sig(proxy_name, method, "", false)|indent(16)}}
+ {
+{%- if method|method_return_value != "void" %}
+ return ipa_->{{method.mojom_name}}({{method.parameters|params_comma_sep}});
+{%- else %}
+ ipa_->{{method.mojom_name}}({{method.parameters|params_comma_sep}}
+ {{- ", " if method|method_param_outputs|params_comma_sep -}}
+ {{- method|method_param_outputs|params_comma_sep}});
+{%- endif %}
+ }
+{%- endif %}
+{%- endfor %}
+
+ private:
+ {{interface_name}} *ipa_;
+ };
+
+ Thread thread_;
+ ThreadProxy proxy_;
+ std::unique_ptr<{{interface_name}}> ipa_;
+
+ const bool isolate_;
+
+ std::unique_ptr<IPCPipeUnixSocket> ipc_;
+
+ ControlSerializer controlSerializer_;
+
+{# \todo Move this to IPCPipe #}
+ uint32_t seq_;
+};
+
+{%- if has_namespace %}
+{% for ns in namespace|reverse %}
+} /* namespace {{ns}} */
+{% endfor %}
+{%- endif %}
+} /* namespace libcamera */
diff --git a/utils/codegen/ipc/generators/libcamera_templates/module_ipa_proxy_worker.cpp.tmpl b/utils/codegen/ipc/generators/libcamera_templates/module_ipa_proxy_worker.cpp.tmpl
new file mode 100644
index 00000000..1f990d3f
--- /dev/null
+++ b/utils/codegen/ipc/generators/libcamera_templates/module_ipa_proxy_worker.cpp.tmpl
@@ -0,0 +1,246 @@
+{#-
+ # SPDX-License-Identifier: LGPL-2.1-or-later
+ # Copyright (C) 2020, Google Inc.
+-#}
+{%- import "proxy_functions.tmpl" as proxy_funcs -%}
+
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * Image Processing Algorithm proxy worker for {{module_name}}
+ *
+ * This file is auto-generated. Do not edit.
+ */
+
+{#- \todo Split proxy worker into IPC worker and proxy worker. #}
+
+#include <algorithm>
+#include <iostream>
+#include <sys/types.h>
+#include <tuple>
+#include <unistd.h>
+
+#include <libcamera/ipa/ipa_interface.h>
+#include <libcamera/ipa/{{module_name}}_ipa_interface.h>
+#include <libcamera/ipa/{{module_name}}_ipa_serializer.h>
+#include <libcamera/logging.h>
+
+#include <libcamera/base/event_dispatcher.h>
+#include <libcamera/base/log.h>
+#include <libcamera/base/thread.h>
+#include <libcamera/base/unique_fd.h>
+
+#include "libcamera/internal/camera_sensor.h"
+#include "libcamera/internal/control_serializer.h"
+#include "libcamera/internal/ipa_data_serializer.h"
+#include "libcamera/internal/ipa_module.h"
+#include "libcamera/internal/ipa_proxy.h"
+#include "libcamera/internal/ipc_pipe.h"
+#include "libcamera/internal/ipc_pipe_unixsocket.h"
+#include "libcamera/internal/ipc_unixsocket.h"
+
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY({{proxy_worker_name}})
+
+{%- if has_namespace %}
+{% for ns in namespace -%}
+using namespace {{ns}};
+{% endfor %}
+{%- endif %}
+
+class {{proxy_worker_name}}
+{
+public:
+ {{proxy_worker_name}}()
+ : ipa_(nullptr),
+ controlSerializer_(ControlSerializer::Role::Worker),
+ exit_(false) {}
+
+ ~{{proxy_worker_name}}() {}
+
+ void readyRead()
+ {
+ IPCUnixSocket::Payload _message;
+ int _retRecv = socket_.receive(&_message);
+ if (_retRecv) {
+ LOG({{proxy_worker_name}}, Error)
+ << "Receive message failed: " << _retRecv;
+ return;
+ }
+
+ IPCMessage _ipcMessage(_message);
+
+ {{cmd_enum_name}} _cmd = static_cast<{{cmd_enum_name}}>(_ipcMessage.header().cmd);
+
+ switch (_cmd) {
+ case {{cmd_enum_name}}::Exit: {
+ exit_ = true;
+ break;
+ }
+
+{% for method in interface_main.methods %}
+ case {{cmd_enum_name}}::{{method.mojom_name|cap}}: {
+{%- if method.mojom_name == "configure" %}
+ controlSerializer_.reset();
+{%- endif %}
+ {{proxy_funcs.deserialize_call(method|method_param_inputs, '_ipcMessage.data()', '_ipcMessage.fds()', false, true)|indent(16, true)}}
+{% for param in method|method_param_outputs %}
+ {{param|name}} {{param.mojom_name}};
+{% endfor %}
+{%- if method|method_return_value != "void" %}
+ {{method|method_return_value}} _callRet =
+{%- endif -%}
+ ipa_->{{method.mojom_name}}({{method.parameters|params_comma_sep}}
+{{- ", " if method|method_param_outputs|params_comma_sep -}}
+{%- for param in method|method_param_outputs -%}
+&{{param.mojom_name}}{{", " if not loop.last}}
+{%- endfor -%}
+);
+{% if not method|is_async %}
+ IPCMessage::Header header = { _ipcMessage.header().cmd, _ipcMessage.header().cookie };
+ IPCMessage _response(header);
+{%- if method|method_return_value != "void" %}
+ std::vector<uint8_t> _callRetBuf;
+ std::tie(_callRetBuf, std::ignore) =
+ IPADataSerializer<{{method|method_return_value}}>::serialize(_callRet);
+ _response.data().insert(_response.data().end(), _callRetBuf.cbegin(), _callRetBuf.cend());
+{%- endif %}
+ {{proxy_funcs.serialize_call(method|method_param_outputs, "_response.data()", "_response.fds()")|indent(16, true)}}
+ int _ret = socket_.send(_response.payload());
+ if (_ret < 0) {
+ LOG({{proxy_worker_name}}, Error)
+ << "Reply to {{method.mojom_name}}() failed: " << _ret;
+ }
+ LOG({{proxy_worker_name}}, Debug) << "Done replying to {{method.mojom_name}}()";
+{%- endif %}
+ break;
+ }
+{% endfor %}
+ default:
+ LOG({{proxy_worker_name}}, Error) << "Unknown command " << _ipcMessage.header().cmd;
+ }
+ }
+
+ int init(std::unique_ptr<IPAModule> &ipam, UniqueFD socketfd)
+ {
+ if (socket_.bind(std::move(socketfd)) < 0) {
+ LOG({{proxy_worker_name}}, Error)
+ << "IPC socket binding failed";
+ return EXIT_FAILURE;
+ }
+ socket_.readyRead.connect(this, &{{proxy_worker_name}}::readyRead);
+
+ ipa_ = dynamic_cast<{{interface_name}} *>(ipam->createInterface());
+ if (!ipa_) {
+ LOG({{proxy_worker_name}}, Error)
+ << "Failed to create IPA interface instance";
+ return EXIT_FAILURE;
+ }
+{% for method in interface_event.methods %}
+ ipa_->{{method.mojom_name}}.connect(this, &{{proxy_worker_name}}::{{method.mojom_name}});
+{%- endfor %}
+ return 0;
+ }
+
+ void run()
+ {
+ EventDispatcher *dispatcher = Thread::current()->eventDispatcher();
+ while (!exit_)
+ dispatcher->processEvents();
+ }
+
+ void cleanup()
+ {
+ delete ipa_;
+ socket_.close();
+ }
+
+private:
+
+{% for method in interface_event.methods %}
+{{proxy_funcs.func_sig(proxy_name, method, "", false)|indent(8, true)}}
+ {
+ IPCMessage::Header header = {
+ static_cast<uint32_t>({{cmd_event_enum_name}}::{{method.mojom_name|cap}}),
+ 0
+ };
+ IPCMessage _message(header);
+
+ {{proxy_funcs.serialize_call(method|method_param_inputs, "_message.data()", "_message.fds()")}}
+
+ int _ret = socket_.send(_message.payload());
+ if (_ret < 0)
+ LOG({{proxy_worker_name}}, Error)
+ << "Sending event {{method.mojom_name}}() failed: " << _ret;
+
+ LOG({{proxy_worker_name}}, Debug) << "{{method.mojom_name}} done";
+ }
+{% endfor %}
+
+ {{interface_name}} *ipa_;
+ IPCUnixSocket socket_;
+
+ ControlSerializer controlSerializer_;
+
+ bool exit_;
+};
+
+int main(int argc, char **argv)
+{
+{#- \todo Handle enabling debugging more dynamically. #}
+ /* Uncomment this for debugging. */
+#if 0
+ std::string logPath = "/tmp/libcamera.worker." +
+ std::to_string(getpid()) + ".log";
+ logSetFile(logPath.c_str());
+#endif
+
+ if (argc < 3) {
+ LOG({{proxy_worker_name}}, Error)
+ << "Tried to start worker with no args: "
+ << "expected <path to IPA so> <fd to bind unix socket>";
+ return EXIT_FAILURE;
+ }
+
+ UniqueFD fd(std::stoi(argv[2]));
+ LOG({{proxy_worker_name}}, Info)
+ << "Starting worker for IPA module " << argv[1]
+ << " with IPC fd = " << fd.get();
+
+ std::unique_ptr<IPAModule> ipam = std::make_unique<IPAModule>(argv[1]);
+ if (!ipam->isValid() || !ipam->load()) {
+ LOG({{proxy_worker_name}}, Error)
+ << "IPAModule " << argv[1] << " isn't valid";
+ return EXIT_FAILURE;
+ }
+
+ /*
+ * Shutdown of proxy worker can be pre-empted by events like
+ * SIGINT/SIGTERM, even before the pipeline handler can request
+ * shutdown. Hence, assign a new gid to prevent signals on the
+ * application being delivered to the proxy.
+ */
+ if (setpgid(0, 0) < 0) {
+ int err = errno;
+ LOG({{proxy_worker_name}}, Warning)
+ << "Failed to set new gid: " << strerror(err);
+ }
+
+ {{proxy_worker_name}} proxyWorker;
+ int ret = proxyWorker.init(ipam, std::move(fd));
+ if (ret < 0) {
+ LOG({{proxy_worker_name}}, Error)
+ << "Failed to initialize proxy worker";
+ return ret;
+ }
+
+ LOG({{proxy_worker_name}}, Debug) << "Proxy worker successfully initialized";
+
+ proxyWorker.run();
+
+ proxyWorker.cleanup();
+
+ return 0;
+}
diff --git a/utils/codegen/ipc/generators/libcamera_templates/module_ipa_serializer.h.tmpl b/utils/codegen/ipc/generators/libcamera_templates/module_ipa_serializer.h.tmpl
new file mode 100644
index 00000000..cd5a65a9
--- /dev/null
+++ b/utils/codegen/ipc/generators/libcamera_templates/module_ipa_serializer.h.tmpl
@@ -0,0 +1,45 @@
+{#-
+ # SPDX-License-Identifier: LGPL-2.1-or-later
+ # Copyright (C) 2020, Google Inc.
+-#}
+{%- import "serializer.tmpl" as serializer -%}
+
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * Image Processing Algorithm data serializer for {{module_name}}
+ *
+ * This file is auto-generated. Do not edit.
+ */
+
+#pragma once
+
+#include <tuple>
+#include <vector>
+
+#include <libcamera/ipa/{{module_name}}_ipa_interface.h>
+#include <libcamera/ipa/core_ipa_serializer.h>
+
+#include "libcamera/internal/control_serializer.h"
+#include "libcamera/internal/ipa_data_serializer.h"
+
+namespace libcamera {
+
+LOG_DECLARE_CATEGORY(IPADataSerializer)
+{% for struct in structs_nonempty %}
+template<>
+class IPADataSerializer<{{struct|name_full}}>
+{
+public:
+{{- serializer.serializer(struct, namespace_str)}}
+{%- if struct|has_fd %}
+{{serializer.deserializer_fd(struct, namespace_str)}}
+{%- else %}
+{{serializer.deserializer_no_fd(struct, namespace_str)}}
+{{serializer.deserializer_fd_simple(struct, namespace_str)}}
+{%- endif %}
+};
+{% endfor %}
+
+} /* namespace libcamera */
diff --git a/utils/codegen/ipc/generators/libcamera_templates/proxy_functions.tmpl b/utils/codegen/ipc/generators/libcamera_templates/proxy_functions.tmpl
new file mode 100644
index 00000000..b5797b14
--- /dev/null
+++ b/utils/codegen/ipc/generators/libcamera_templates/proxy_functions.tmpl
@@ -0,0 +1,202 @@
+{#-
+ # SPDX-License-Identifier: LGPL-2.1-or-later
+ # Copyright (C) 2020, Google Inc.
+-#}
+{#
+ # \brief Generate function prototype
+ #
+ # \param class Class name
+ # \param method mojom Method object
+ # \param suffix Suffix to append to \a method function name
+ # \param need_class_name If true, generate class name with function
+ # \param override If true, generate override tag after the function prototype
+ #}
+{%- macro func_sig(class, method, suffix = "", need_class_name = true, override = false) -%}
+{{method|method_return_value}} {{class + "::" if need_class_name}}{{method.mojom_name}}{{suffix}}(
+{%- for param in method|method_parameters %}
+ {{param}}{{- "," if not loop.last}}
+{%- endfor -%}
+){{" override" if override}}
+{%- endmacro -%}
+
+{#
+ # \brief Generate function body for IPA stop() function for thread
+ #}
+{%- macro stop_thread_body() -%}
+ ASSERT(state_ != ProxyStopping);
+ if (state_ != ProxyRunning)
+ return;
+
+ state_ = ProxyStopping;
+
+ proxy_.invokeMethod(&ThreadProxy::stop, ConnectionTypeBlocking);
+
+ thread_.exit();
+ thread_.wait();
+
+ Thread::current()->dispatchMessages(Message::Type::InvokeMessage);
+
+ state_ = ProxyStopped;
+{%- endmacro -%}
+
+
+{#
+ # \brief Serialize multiple objects into data buffer and fd vector
+ #
+ # Generate code to serialize multiple objects, as specified in \a params
+ # (which are the parameters to some function), into \a buf data buffer and
+ # \a fds fd vector.
+ # This code is meant to be used by the proxy, for serializing prior to IPC calls.
+ #
+ # \todo Avoid intermediate vectors
+ #}
+{%- macro serialize_call(params, buf, fds) %}
+{%- for param in params %}
+{%- if param|is_enum %}
+ static_assert(sizeof({{param|name_full}}) <= 4);
+{%- endif %}
+ std::vector<uint8_t> {{param.mojom_name}}Buf;
+{%- if param|has_fd %}
+ std::vector<SharedFD> {{param.mojom_name}}Fds;
+ std::tie({{param.mojom_name}}Buf, {{param.mojom_name}}Fds) =
+{%- else %}
+ std::tie({{param.mojom_name}}Buf, std::ignore) =
+{%- endif %}
+{%- if param|is_flags %}
+ IPADataSerializer<{{param|name_full}}>::serialize({{param.mojom_name}}
+{%- elif param|is_enum %}
+ IPADataSerializer<uint32_t>::serialize(static_cast<uint32_t>({{param.mojom_name}})
+{%- else %}
+ IPADataSerializer<{{param|name}}>::serialize({{param.mojom_name}}
+{% endif -%}
+{{- ", &controlSerializer_" if param|needs_control_serializer -}}
+);
+{%- endfor %}
+
+{%- if params|length > 1 %}
+{%- for param in params %}
+ appendPOD<uint32_t>({{buf}}, {{param.mojom_name}}Buf.size());
+{%- if param|has_fd %}
+ appendPOD<uint32_t>({{buf}}, {{param.mojom_name}}Fds.size());
+{%- endif %}
+{%- endfor %}
+{%- endif %}
+
+{%- for param in params %}
+ {{buf}}.insert({{buf}}.end(), {{param.mojom_name}}Buf.begin(), {{param.mojom_name}}Buf.end());
+{%- endfor %}
+
+{%- for param in params %}
+{%- if param|has_fd %}
+ {{fds}}.insert({{fds}}.end(), {{param.mojom_name}}Fds.begin(), {{param.mojom_name}}Fds.end());
+{%- endif %}
+{%- endfor %}
+{%- endmacro -%}
+
+
+{#
+ # \brief Deserialize a single object from data buffer and fd vector
+ #
+ # \param pointer If true, deserialize the object into a dereferenced pointer
+ # \param iter If true, treat \a buf as an iterator instead of a vector
+ # \param data_size Variable that holds the size of the vector referenced by \a buf
+ #
+ # Generate code to deserialize a single object, as specified in \a param,
+ # from \a buf data buffer and \a fds fd vector.
+ # This code is meant to be used by macro deserialize_call.
+ #}
+{%- macro deserialize_param(param, pointer, loop, buf, fds, iter, data_size) -%}
+{{"*" if pointer}}{{param.mojom_name}} =
+{%- if param|is_flags %}
+IPADataSerializer<{{param|name_full}}>::deserialize(
+{%- elif param|is_enum %}
+static_cast<{{param|name_full}}>(IPADataSerializer<uint32_t>::deserialize(
+{%- else %}
+IPADataSerializer<{{param|name}}>::deserialize(
+{%- endif %}
+ {{buf}}{{- ".cbegin()" if not iter}} + {{param.mojom_name}}Start,
+{%- if loop.last and not iter %}
+ {{buf}}.cend()
+{%- elif not iter %}
+ {{buf}}.cbegin() + {{param.mojom_name}}Start + {{param.mojom_name}}BufSize
+{%- elif iter and loop.length == 1 %}
+ {{buf}} + {{data_size}}
+{%- else %}
+ {{buf}} + {{param.mojom_name}}Start + {{param.mojom_name}}BufSize
+{%- endif -%}
+{{- "," if param|has_fd}}
+{%- if param|has_fd %}
+ {{fds}}.cbegin() + {{param.mojom_name}}FdStart,
+{%- if loop.last %}
+ {{fds}}.cend()
+{%- else %}
+ {{fds}}.cbegin() + {{param.mojom_name}}FdStart + {{param.mojom_name}}FdsSize
+{%- endif -%}
+{%- endif -%}
+{{- "," if param|needs_control_serializer}}
+{%- if param|needs_control_serializer %}
+ &controlSerializer_
+{%- endif -%}
+){{")" if param|is_enum and not param|is_flags}};
+{%- endmacro -%}
+
+
+{#
+ # \brief Deserialize multiple objects from data buffer and fd vector
+ #
+ # \param pointer If true, deserialize objects into pointers, and adds a null check.
+ # \param declare If true, declare the objects in addition to deserialization.
+ # \param iter if true, treat \a buf as an iterator instead of a vector
+ # \param data_size Variable that holds the size of the vector referenced by \a buf
+ #
+ # Generate code to deserialize multiple objects, as specified in \a params
+ # (which are the parameters to some function), from \a buf data buffer and
+ # \a fds fd vector.
+ # This code is meant to be used by the proxy, for deserializing after IPC calls.
+ #
+ # \todo Avoid intermediate vectors
+ #}
+{%- macro deserialize_call(params, buf, fds, pointer = true, declare = false, iter = false, data_size = '', init_offset = 0) -%}
+{% set ns = namespace(size_offset = init_offset) %}
+{%- if params|length > 1 %}
+{%- for param in params %}
+ [[maybe_unused]] const size_t {{param.mojom_name}}BufSize = readPOD<uint32_t>({{buf}}, {{ns.size_offset}}
+{%- if iter -%}
+, {{buf}} + {{data_size}}
+{%- endif -%}
+);
+ {%- set ns.size_offset = ns.size_offset + 4 %}
+{%- if param|has_fd %}
+ [[maybe_unused]] const size_t {{param.mojom_name}}FdsSize = readPOD<uint32_t>({{buf}}, {{ns.size_offset}}
+{%- if iter -%}
+, {{buf}} + {{data_size}}
+{%- endif -%}
+);
+ {%- set ns.size_offset = ns.size_offset + 4 %}
+{%- endif %}
+{%- endfor %}
+{%- endif %}
+{% for param in params %}
+{%- if loop.first %}
+ const size_t {{param.mojom_name}}Start = {{ns.size_offset}};
+{%- else %}
+ const size_t {{param.mojom_name}}Start = {{loop.previtem.mojom_name}}Start + {{loop.previtem.mojom_name}}BufSize;
+{%- endif %}
+{%- endfor %}
+{% for param in params|with_fds %}
+{%- if loop.first %}
+ const size_t {{param.mojom_name}}FdStart = 0;
+{%- else %}
+ const size_t {{param.mojom_name}}FdStart = {{loop.previtem.mojom_name}}FdStart + {{loop.previtem.mojom_name}}FdsSize;
+{%- endif %}
+{%- endfor %}
+{% for param in params %}
+ {%- if pointer %}
+ if ({{param.mojom_name}}) {
+{{deserialize_param(param, pointer, loop, buf, fds, iter, data_size)|indent(16, True)}}
+ }
+ {%- else %}
+ {{param|name + " " if declare}}{{deserialize_param(param, pointer, loop, buf, fds, iter, data_size)|indent(8)}}
+ {%- endif %}
+{% endfor %}
+{%- endmacro -%}
diff --git a/utils/codegen/ipc/generators/libcamera_templates/serializer.tmpl b/utils/codegen/ipc/generators/libcamera_templates/serializer.tmpl
new file mode 100644
index 00000000..323e1293
--- /dev/null
+++ b/utils/codegen/ipc/generators/libcamera_templates/serializer.tmpl
@@ -0,0 +1,319 @@
+{#-
+ # SPDX-License-Identifier: LGPL-2.1-or-later
+ # Copyright (C) 2020, Google Inc.
+-#}
+{#
+ # \brief Verify that there is enough bytes to deserialize
+ #
+ # Generate code that verifies that \a size is not greater than \a dataSize.
+ # Otherwise log an error with \a name and \a typename.
+ #}
+{%- macro check_data_size(size, dataSize, name, typename) %}
+ if ({{dataSize}} < {{size}}) {
+ LOG(IPADataSerializer, Error)
+ << "Failed to deserialize " << "{{name}}"
+ << ": not enough {{typename}}, expected "
+ << ({{size}}) << ", got " << ({{dataSize}});
+ return ret;
+ }
+{%- endmacro %}
+
+
+{#
+ # \brief Serialize a field into return vector
+ #
+ # Generate code to serialize \a field into retData, including size of the
+ # field and fds (where appropriate).
+ # This code is meant to be used by the IPADataSerializer specialization.
+ #
+ # \todo Avoid intermediate vectors
+ #}
+{%- macro serializer_field(field, namespace, loop) %}
+{%- if field|is_pod or field|is_enum %}
+ std::vector<uint8_t> {{field.mojom_name}};
+ std::tie({{field.mojom_name}}, std::ignore) =
+ {%- if field|is_pod %}
+ IPADataSerializer<{{field|name}}>::serialize(data.{{field.mojom_name}});
+ {%- elif field|is_flags %}
+ IPADataSerializer<{{field|name_full}}>::serialize(data.{{field.mojom_name}});
+ {%- elif field|is_enum_scoped %}
+ IPADataSerializer<uint{{field|bit_width}}_t>::serialize(static_cast<uint{{field|bit_width}}_t>(data.{{field.mojom_name}}));
+ {%- elif field|is_enum %}
+ IPADataSerializer<uint{{field|bit_width}}_t>::serialize(data.{{field.mojom_name}});
+ {%- endif %}
+ retData.insert(retData.end(), {{field.mojom_name}}.begin(), {{field.mojom_name}}.end());
+{%- elif field|is_fd %}
+ std::vector<uint8_t> {{field.mojom_name}};
+ std::vector<SharedFD> {{field.mojom_name}}Fds;
+ std::tie({{field.mojom_name}}, {{field.mojom_name}}Fds) =
+ IPADataSerializer<{{field|name}}>::serialize(data.{{field.mojom_name}});
+ retData.insert(retData.end(), {{field.mojom_name}}.begin(), {{field.mojom_name}}.end());
+ retFds.insert(retFds.end(), {{field.mojom_name}}Fds.begin(), {{field.mojom_name}}Fds.end());
+{%- elif field|is_controls %}
+ if (data.{{field.mojom_name}}.size() > 0) {
+ std::vector<uint8_t> {{field.mojom_name}};
+ std::tie({{field.mojom_name}}, std::ignore) =
+ IPADataSerializer<{{field|name}}>::serialize(data.{{field.mojom_name}}, cs);
+ appendPOD<uint32_t>(retData, {{field.mojom_name}}.size());
+ retData.insert(retData.end(), {{field.mojom_name}}.begin(), {{field.mojom_name}}.end());
+ } else {
+ appendPOD<uint32_t>(retData, 0);
+ }
+{%- elif field|is_plain_struct or field|is_array or field|is_map or field|is_str %}
+ std::vector<uint8_t> {{field.mojom_name}};
+ {%- if field|has_fd %}
+ std::vector<SharedFD> {{field.mojom_name}}Fds;
+ std::tie({{field.mojom_name}}, {{field.mojom_name}}Fds) =
+ {%- else %}
+ std::tie({{field.mojom_name}}, std::ignore) =
+ {%- endif %}
+ {%- if field|is_array or field|is_map %}
+ IPADataSerializer<{{field|name}}>::serialize(data.{{field.mojom_name}}, cs);
+ {%- elif field|is_str %}
+ IPADataSerializer<{{field|name}}>::serialize(data.{{field.mojom_name}});
+ {%- else %}
+ IPADataSerializer<{{field|name_full}}>::serialize(data.{{field.mojom_name}}, cs);
+ {%- endif %}
+ appendPOD<uint32_t>(retData, {{field.mojom_name}}.size());
+ {%- if field|has_fd %}
+ appendPOD<uint32_t>(retData, {{field.mojom_name}}Fds.size());
+ {%- endif %}
+ retData.insert(retData.end(), {{field.mojom_name}}.begin(), {{field.mojom_name}}.end());
+ {%- if field|has_fd %}
+ retFds.insert(retFds.end(), {{field.mojom_name}}Fds.begin(), {{field.mojom_name}}Fds.end());
+ {%- endif %}
+{%- else %}
+ /* Unknown serialization for {{field.mojom_name}}. */
+{%- endif %}
+{%- endmacro %}
+
+
+{#
+ # \brief Deserialize a field into return struct
+ #
+ # Generate code to deserialize \a field into object ret.
+ # This code is meant to be used by the IPADataSerializer specialization.
+ #}
+{%- macro deserializer_field(field, namespace, loop) %}
+{% if field|is_pod or field|is_enum %}
+ {%- set field_size = (field|bit_width|int / 8)|int %}
+ {{- check_data_size(field_size, 'dataSize', field.mojom_name, 'data')}}
+ {%- if field|is_pod %}
+ ret.{{field.mojom_name}} = IPADataSerializer<{{field|name}}>::deserialize(m, m + {{field_size}});
+ {%- elif field|is_flags %}
+ ret.{{field.mojom_name}} = IPADataSerializer<{{field|name_full}}>::deserialize(m, m + {{field_size}});
+ {%- else %}
+ ret.{{field.mojom_name}} = static_cast<{{field|name_full}}>(IPADataSerializer<uint{{field|bit_width}}_t>::deserialize(m, m + {{field_size}}));
+ {%- endif %}
+ {%- if not loop.last %}
+ m += {{field_size}};
+ dataSize -= {{field_size}};
+ {%- endif %}
+{% elif field|is_fd %}
+ {%- set field_size = 4 %}
+ {{- check_data_size(field_size, 'dataSize', field.mojom_name, 'data')}}
+ ret.{{field.mojom_name}} = IPADataSerializer<{{field|name}}>::deserialize(m, m + {{field_size}}, n, n + 1, cs);
+ {%- if not loop.last %}
+ m += {{field_size}};
+ dataSize -= {{field_size}};
+ n += ret.{{field.mojom_name}}.isValid() ? 1 : 0;
+ fdsSize -= ret.{{field.mojom_name}}.isValid() ? 1 : 0;
+ {%- endif %}
+{% elif field|is_controls %}
+ {%- set field_size = 4 %}
+ {{- check_data_size(field_size, 'dataSize', field.mojom_name + 'Size', 'data')}}
+ const size_t {{field.mojom_name}}Size = readPOD<uint32_t>(m, 0, dataEnd);
+ m += {{field_size}};
+ dataSize -= {{field_size}};
+ {%- set field_size = field.mojom_name + 'Size' -%}
+ {{- check_data_size(field_size, 'dataSize', field.mojom_name, 'data')}}
+ if ({{field.mojom_name}}Size > 0)
+ ret.{{field.mojom_name}} =
+ IPADataSerializer<{{field|name}}>::deserialize(m, m + {{field.mojom_name}}Size, cs);
+ {%- if not loop.last %}
+ m += {{field_size}};
+ dataSize -= {{field_size}};
+ {%- endif %}
+{% elif field|is_plain_struct or field|is_array or field|is_map or field|is_str %}
+ {%- set field_size = 4 %}
+ {{- check_data_size(field_size, 'dataSize', field.mojom_name + 'Size', 'data')}}
+ const size_t {{field.mojom_name}}Size = readPOD<uint32_t>(m, 0, dataEnd);
+ m += {{field_size}};
+ dataSize -= {{field_size}};
+ {%- if field|has_fd %}
+ {%- set field_size = 4 %}
+ {{- check_data_size(field_size, 'dataSize', field.mojom_name + 'FdsSize', 'data')}}
+ const size_t {{field.mojom_name}}FdsSize = readPOD<uint32_t>(m, 0, dataEnd);
+ m += {{field_size}};
+ dataSize -= {{field_size}};
+ {{- check_data_size(field.mojom_name + 'FdsSize', 'fdsSize', field.mojom_name, 'fds')}}
+ {%- endif %}
+ {%- set field_size = field.mojom_name + 'Size' -%}
+ {{- check_data_size(field_size, 'dataSize', field.mojom_name, 'data')}}
+ ret.{{field.mojom_name}} =
+ {%- if field|is_str %}
+ IPADataSerializer<{{field|name}}>::deserialize(m, m + {{field.mojom_name}}Size);
+ {%- elif field|has_fd and (field|is_array or field|is_map) %}
+ IPADataSerializer<{{field|name}}>::deserialize(m, m + {{field.mojom_name}}Size, n, n + {{field.mojom_name}}FdsSize, cs);
+ {%- elif field|has_fd and (not (field|is_array or field|is_map)) %}
+ IPADataSerializer<{{field|name_full}}>::deserialize(m, m + {{field.mojom_name}}Size, n, n + {{field.mojom_name}}FdsSize, cs);
+ {%- elif (not field|has_fd) and (field|is_array or field|is_map) %}
+ IPADataSerializer<{{field|name}}>::deserialize(m, m + {{field.mojom_name}}Size, cs);
+ {%- else %}
+ IPADataSerializer<{{field|name_full}}>::deserialize(m, m + {{field.mojom_name}}Size, cs);
+ {%- endif %}
+ {%- if not loop.last %}
+ m += {{field_size}};
+ dataSize -= {{field_size}};
+ {%- if field|has_fd %}
+ n += {{field.mojom_name}}FdsSize;
+ fdsSize -= {{field.mojom_name}}FdsSize;
+ {%- endif %}
+ {%- endif %}
+{% else %}
+ /* Unknown deserialization for {{field.mojom_name}}. */
+{%- endif %}
+{%- endmacro %}
+
+
+{#
+ # \brief Serialize a struct
+ #
+ # Generate code for IPADataSerializer specialization, for serializing
+ # \a struct.
+ #}
+{%- macro serializer(struct, namespace) %}
+ static std::tuple<std::vector<uint8_t>, std::vector<SharedFD>>
+ serialize(const {{struct|name_full}} &data,
+{%- if struct|needs_control_serializer %}
+ ControlSerializer *cs)
+{%- else %}
+ [[maybe_unused]] ControlSerializer *cs = nullptr)
+{%- endif %}
+ {
+ std::vector<uint8_t> retData;
+{%- if struct|has_fd %}
+ std::vector<SharedFD> retFds;
+{%- endif %}
+{%- for field in struct.fields %}
+{{serializer_field(field, namespace, loop)}}
+{%- endfor %}
+{% if struct|has_fd %}
+ return {retData, retFds};
+{%- else %}
+ return {retData, {}};
+{%- endif %}
+ }
+{%- endmacro %}
+
+
+{#
+ # \brief Deserialize a struct that has fds
+ #
+ # Generate code for IPADataSerializer specialization, for deserializing
+ # \a struct, in the case that \a struct has file descriptors.
+ #}
+{%- macro deserializer_fd(struct, namespace) %}
+ static {{struct|name_full}}
+ deserialize(std::vector<uint8_t> &data,
+ std::vector<SharedFD> &fds,
+{%- if struct|needs_control_serializer %}
+ ControlSerializer *cs)
+{%- else %}
+ ControlSerializer *cs = nullptr)
+{%- endif %}
+ {
+ return IPADataSerializer<{{struct|name_full}}>::deserialize(data.cbegin(), data.cend(), fds.cbegin(), fds.cend(), cs);
+ }
+
+{# \todo Don't inline this function #}
+ static {{struct|name_full}}
+ deserialize(std::vector<uint8_t>::const_iterator dataBegin,
+ std::vector<uint8_t>::const_iterator dataEnd,
+ std::vector<SharedFD>::const_iterator fdsBegin,
+ std::vector<SharedFD>::const_iterator fdsEnd,
+{%- if struct|needs_control_serializer %}
+ ControlSerializer *cs)
+{%- else %}
+ [[maybe_unused]] ControlSerializer *cs = nullptr)
+{%- endif %}
+ {
+ {{struct|name_full}} ret;
+ std::vector<uint8_t>::const_iterator m = dataBegin;
+ std::vector<SharedFD>::const_iterator n = fdsBegin;
+
+ size_t dataSize = std::distance(dataBegin, dataEnd);
+ [[maybe_unused]] size_t fdsSize = std::distance(fdsBegin, fdsEnd);
+{%- for field in struct.fields -%}
+{{deserializer_field(field, namespace, loop)}}
+{%- endfor %}
+ return ret;
+ }
+{%- endmacro %}
+
+{#
+ # \brief Deserialize a struct that has fds, using non-fd
+ #
+ # Generate code for IPADataSerializer specialization, for deserializing
+ # \a struct, in the case that \a struct has no file descriptors but requires
+ # deserializers with file descriptors.
+ #}
+{%- macro deserializer_fd_simple(struct, namespace) %}
+ static {{struct|name_full}}
+ deserialize(std::vector<uint8_t> &data,
+ [[maybe_unused]] std::vector<SharedFD> &fds,
+ ControlSerializer *cs = nullptr)
+ {
+ return IPADataSerializer<{{struct|name_full}}>::deserialize(data.cbegin(), data.cend(), cs);
+ }
+
+ static {{struct|name_full}}
+ deserialize(std::vector<uint8_t>::const_iterator dataBegin,
+ std::vector<uint8_t>::const_iterator dataEnd,
+ [[maybe_unused]] std::vector<SharedFD>::const_iterator fdsBegin,
+ [[maybe_unused]] std::vector<SharedFD>::const_iterator fdsEnd,
+ ControlSerializer *cs = nullptr)
+ {
+ return IPADataSerializer<{{struct|name_full}}>::deserialize(dataBegin, dataEnd, cs);
+ }
+{%- endmacro %}
+
+
+{#
+ # \brief Deserialize a struct that has no fds
+ #
+ # Generate code for IPADataSerializer specialization, for deserializing
+ # \a struct, in the case that \a struct does not have file descriptors.
+ #}
+{%- macro deserializer_no_fd(struct, namespace) %}
+ static {{struct|name_full}}
+ deserialize(std::vector<uint8_t> &data,
+{%- if struct|needs_control_serializer %}
+ ControlSerializer *cs)
+{%- else %}
+ ControlSerializer *cs = nullptr)
+{%- endif %}
+ {
+ return IPADataSerializer<{{struct|name_full}}>::deserialize(data.cbegin(), data.cend(), cs);
+ }
+
+{# \todo Don't inline this function #}
+ static {{struct|name_full}}
+ deserialize(std::vector<uint8_t>::const_iterator dataBegin,
+ std::vector<uint8_t>::const_iterator dataEnd,
+{%- if struct|needs_control_serializer %}
+ ControlSerializer *cs)
+{%- else %}
+ [[maybe_unused]] ControlSerializer *cs = nullptr)
+{%- endif %}
+ {
+ {{struct|name_full}} ret;
+ std::vector<uint8_t>::const_iterator m = dataBegin;
+
+ size_t dataSize = std::distance(dataBegin, dataEnd);
+{%- for field in struct.fields -%}
+{{deserializer_field(field, namespace, loop)}}
+{%- endfor %}
+ return ret;
+ }
+{%- endmacro %}
diff --git a/utils/codegen/ipc/generators/meson.build b/utils/codegen/ipc/generators/meson.build
new file mode 100644
index 00000000..504f1a46
--- /dev/null
+++ b/utils/codegen/ipc/generators/meson.build
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: CC0-1.0
+
+subdir('libcamera_templates')
diff --git a/utils/codegen/ipc/generators/mojom_libcamera_generator.py b/utils/codegen/ipc/generators/mojom_libcamera_generator.py
new file mode 100644
index 00000000..d9c620a0
--- /dev/null
+++ b/utils/codegen/ipc/generators/mojom_libcamera_generator.py
@@ -0,0 +1,555 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright (C) 2020, Google Inc.
+#
+# Author: Paul Elder <paul.elder@ideasonboard.com>
+#
+# Generates libcamera files from a mojom.Module.
+
+import argparse
+import datetime
+import os
+import re
+
+import mojom.fileutil as fileutil
+import mojom.generate.generator as generator
+import mojom.generate.module as mojom
+from mojom.generate.template_expander import UseJinja
+
+
+GENERATOR_PREFIX = 'libcamera'
+
+_kind_to_cpp_type = {
+ mojom.BOOL: 'bool',
+ mojom.INT8: 'int8_t',
+ mojom.UINT8: 'uint8_t',
+ mojom.INT16: 'int16_t',
+ mojom.UINT16: 'uint16_t',
+ mojom.INT32: 'int32_t',
+ mojom.UINT32: 'uint32_t',
+ mojom.FLOAT: 'float',
+ mojom.INT64: 'int64_t',
+ mojom.UINT64: 'uint64_t',
+ mojom.DOUBLE: 'double',
+}
+
+_bit_widths = {
+ mojom.BOOL: '8',
+ mojom.INT8: '8',
+ mojom.UINT8: '8',
+ mojom.INT16: '16',
+ mojom.UINT16: '16',
+ mojom.INT32: '32',
+ mojom.UINT32: '32',
+ mojom.FLOAT: '32',
+ mojom.INT64: '64',
+ mojom.UINT64: '64',
+ mojom.DOUBLE: '64',
+}
+
+def ModuleName(path):
+ return path.split('/')[-1].split('.')[0]
+
+def ModuleClassName(module):
+ return re.sub(r'^IPA(.*)Interface$', lambda match: match.group(1),
+ module.interfaces[0].mojom_name)
+
+def Capitalize(name):
+ return name[0].upper() + name[1:]
+
+def ConstantStyle(name):
+ return generator.ToUpperSnakeCase(name)
+
+def Choose(cond, t, f):
+ return t if cond else f
+
+def CommaSep(l):
+ return ', '.join([m for m in l])
+
+def ParamsCommaSep(l):
+ return ', '.join([m.mojom_name for m in l])
+
+def GetDefaultValue(element):
+ if element.default is not None:
+ return element.default
+ if type(element.kind) == mojom.ValueKind:
+ return '0'
+ if IsFlags(element):
+ return ''
+ if mojom.IsEnumKind(element.kind):
+ return f'static_cast<{element.kind.mojom_name}>(0)'
+ if isinstance(element.kind, mojom.Struct) and \
+ element.kind.mojom_name == 'SharedFD':
+ return '-1'
+ return ''
+
+def HasDefaultValue(element):
+ return GetDefaultValue(element) != ''
+
+def HasDefaultFields(element):
+ return True in [HasDefaultValue(x) for x in element.fields]
+
+def GetAllTypes(element):
+ if mojom.IsArrayKind(element):
+ return GetAllTypes(element.kind)
+ if mojom.IsMapKind(element):
+ return GetAllTypes(element.key_kind) + GetAllTypes(element.value_kind)
+ if isinstance(element, mojom.Parameter):
+ return GetAllTypes(element.kind)
+ if mojom.IsEnumKind(element):
+ return [element.mojom_name]
+ if not mojom.IsStructKind(element):
+ return [element.spec]
+ if len(element.fields) == 0:
+ return [element.mojom_name]
+ ret = [GetAllTypes(x.kind) for x in element.fields]
+ ret = [x for sublist in ret for x in sublist]
+ return list(set(ret))
+
+def GetAllAttrs(element):
+ if mojom.IsArrayKind(element):
+ return GetAllAttrs(element.kind)
+ if mojom.IsMapKind(element):
+ return {**GetAllAttrs(element.key_kind), **GetAllAttrs(element.value_kind)}
+ if isinstance(element, mojom.Parameter):
+ return GetAllAttrs(element.kind)
+ if mojom.IsEnumKind(element):
+ return element.attributes if element.attributes is not None else {}
+ if mojom.IsStructKind(element) and len(element.fields) == 0:
+ return element.attributes if element.attributes is not None else {}
+ if not mojom.IsStructKind(element):
+ if hasattr(element, 'attributes'):
+ return element.attributes or {}
+ return {}
+ attrs = [(x.attributes) for x in element.fields]
+ ret = {}
+ for d in attrs:
+ ret.update(d or {})
+ if hasattr(element, 'attributes'):
+ ret.update(element.attributes or {})
+ return ret
+
+def NeedsControlSerializer(element):
+ types = GetAllTypes(element)
+ for type in ['ControlList', 'ControlInfoMap']:
+ if f'x:{type}' in types:
+ raise Exception(f'Unknown type "{type}" in {element.mojom_name}, did you mean "libcamera.{type}"?')
+ return "ControlList" in types or "ControlInfoMap" in types
+
+def HasFd(element):
+ attrs = GetAllAttrs(element)
+ if isinstance(element, mojom.Kind):
+ types = GetAllTypes(element)
+ else:
+ types = GetAllTypes(element.kind)
+ return "SharedFD" in types or (attrs is not None and "hasFd" in attrs)
+
+def WithDefaultValues(element):
+ return [x for x in element if HasDefaultValue(x)]
+
+def WithFds(element):
+ return [x for x in element if HasFd(x)]
+
+def MethodParamInputs(method):
+ return method.parameters
+
+def MethodParamOutputs(method):
+ if method.response_parameters is None:
+ return []
+
+ if MethodReturnValue(method) == 'void':
+ return method.response_parameters
+
+ if len(method.response_parameters) <= 1:
+ return []
+
+ return method.response_parameters[1:]
+
+def MethodParamsHaveFd(parameters):
+ return len([x for x in parameters if HasFd(x)]) > 0
+
+def MethodInputHasFd(method):
+ return MethodParamsHaveFd(method.parameters)
+
+def MethodOutputHasFd(method):
+ return MethodParamsHaveFd(MethodParamOutputs(method))
+
+def MethodParamNames(method):
+ params = []
+ for param in method.parameters:
+ params.append(param.mojom_name)
+ for param in MethodParamOutputs(method):
+ params.append(param.mojom_name)
+ return params
+
+def MethodParameters(method):
+ params = []
+ for param in method.parameters:
+ params.append('const %s %s%s' % (GetNameForElement(param),
+ '' if IsPod(param) or IsEnum(param) else '&',
+ param.mojom_name))
+ for param in MethodParamOutputs(method):
+ params.append(f'{GetNameForElement(param)} *{param.mojom_name}')
+ return params
+
+def MethodReturnValue(method):
+ if method.response_parameters is None or len(method.response_parameters) == 0:
+ return 'void'
+ first_output = method.response_parameters[0]
+ if ((len(method.response_parameters) == 1 and IsPod(first_output)) or
+ first_output.kind == mojom.INT32):
+ return GetNameForElement(first_output)
+ return 'void'
+
+def IsAsync(method):
+ # Events are always async
+ if re.match("^IPA.*EventInterface$", method.interface.mojom_name):
+ return True
+ elif re.match("^IPA.*Interface$", method.interface.mojom_name):
+ if method.attributes is None:
+ return False
+ elif 'async' in method.attributes and method.attributes['async']:
+ return True
+ return False
+
+def IsArray(element):
+ return mojom.IsArrayKind(element.kind)
+
+def IsControls(element):
+ return mojom.IsStructKind(element.kind) and (element.kind.mojom_name == "ControlList" or
+ element.kind.mojom_name == "ControlInfoMap")
+
+def IsEnum(element):
+ return mojom.IsEnumKind(element.kind)
+
+
+# Only works the enum definition, not types
+def IsScoped(element):
+ attributes = getattr(element, 'attributes', None)
+ if not attributes:
+ return False
+ return 'scopedEnum' in attributes
+
+
+def IsEnumScoped(element):
+ if not IsEnum(element):
+ return False
+ return IsScoped(element.kind)
+
+def IsFd(element):
+ return mojom.IsStructKind(element.kind) and element.kind.mojom_name == "SharedFD"
+
+
+def IsFlags(element):
+ attributes = getattr(element, 'attributes', None)
+ if not attributes:
+ return False
+ return 'flags' in attributes
+
+def IsMap(element):
+ return mojom.IsMapKind(element.kind)
+
+def IsPlainStruct(element):
+ return mojom.IsStructKind(element.kind) and not IsControls(element) and not IsFd(element)
+
+def IsPod(element):
+ return element.kind in _kind_to_cpp_type
+
+def IsStr(element):
+ return element.kind.spec == 's'
+
+def BitWidth(element):
+ if element.kind in _bit_widths:
+ return _bit_widths[element.kind]
+ if mojom.IsEnumKind(element.kind):
+ return '32'
+ return ''
+
+def ByteWidthFromCppType(t):
+ key = None
+ for mojo_type, cpp_type in _kind_to_cpp_type.items():
+ if t == cpp_type:
+ key = mojo_type
+ if key is None:
+ raise Exception('invalid type')
+ return str(int(_bit_widths[key]) // 8)
+
+# Get the type name for a given element
+def GetNameForElement(element):
+ # Flags
+ if IsFlags(element):
+ return f'Flags<{GetFullNameForElement(element.kind)}>'
+ # structs
+ if (mojom.IsEnumKind(element) or
+ mojom.IsInterfaceKind(element) or
+ mojom.IsStructKind(element)):
+ return element.mojom_name
+ # vectors
+ if (mojom.IsArrayKind(element)):
+ elem_name = GetFullNameForElement(element.kind)
+ return f'std::vector<{elem_name}>'
+ # maps
+ if (mojom.IsMapKind(element)):
+ key_name = GetFullNameForElement(element.key_kind)
+ value_name = GetFullNameForElement(element.value_kind)
+ return f'std::map<{key_name}, {value_name}>'
+ # struct fields and function parameters
+ if isinstance(element, (mojom.Field, mojom.Method, mojom.Parameter)):
+ # maps and vectors
+ if (mojom.IsArrayKind(element.kind) or mojom.IsMapKind(element.kind)):
+ return GetNameForElement(element.kind)
+ # strings
+ if (mojom.IsReferenceKind(element.kind) and element.kind.spec == 's'):
+ return 'std::string'
+ # PODs
+ if element.kind in _kind_to_cpp_type:
+ return _kind_to_cpp_type[element.kind]
+ # structs and enums
+ return element.kind.mojom_name
+ # PODs that are members of vectors/maps
+ if (hasattr(element, '__hash__') and element in _kind_to_cpp_type):
+ return _kind_to_cpp_type[element]
+ if (hasattr(element, 'spec')):
+ # strings that are members of vectors/maps
+ if (element.spec == 's'):
+ return 'std::string'
+ # structs that aren't defined in mojom that are members of vectors/maps
+ if (element.spec[0] == 'x'):
+ return element.spec.replace('x:', '').replace('.', '::')
+ if (mojom.IsInterfaceRequestKind(element) or
+ mojom.IsAssociatedKind(element) or
+ mojom.IsPendingRemoteKind(element) or
+ mojom.IsPendingReceiverKind(element) or
+ mojom.IsUnionKind(element)):
+ raise Exception('Unsupported element: %s' % element)
+ raise Exception('Unexpected element: %s' % element)
+
+def GetFullNameForElement(element):
+ name = GetNameForElement(element)
+ namespace_str = ''
+ if (mojom.IsStructKind(element) or mojom.IsEnumKind(element)):
+ namespace_str = element.module.mojom_namespace.replace('.', '::')
+ elif (hasattr(element, 'kind') and
+ (mojom.IsStructKind(element.kind) or mojom.IsEnumKind(element.kind))):
+ namespace_str = element.kind.module.mojom_namespace.replace('.', '::')
+
+ if namespace_str == '':
+ return name
+
+ if IsFlags(element):
+ return GetNameForElement(element)
+
+ return f'{namespace_str}::{name}'
+
+def ValidateZeroLength(l, s, cap=True):
+ if l is None:
+ return
+ if len(l) > 0:
+ raise Exception(f'{s.capitalize() if cap else s} should be empty')
+
+def ValidateSingleLength(l, s, cap=True):
+ if len(l) > 1:
+ raise Exception(f'Only one {s} allowed')
+ if len(l) < 1:
+ raise Exception(f'{s.capitalize() if cap else s} is required')
+
+def GetMainInterface(interfaces):
+ intf = [x for x in interfaces
+ if re.match("^IPA.*Interface", x.mojom_name) and
+ not re.match("^IPA.*EventInterface", x.mojom_name)]
+ ValidateSingleLength(intf, 'main interface')
+ return None if len(intf) == 0 else intf[0]
+
+def GetEventInterface(interfaces):
+ event = [x for x in interfaces if re.match("^IPA.*EventInterface", x.mojom_name)]
+ ValidateSingleLength(event, 'event interface')
+ return None if len(event) == 0 else event[0]
+
+def ValidateNamespace(namespace):
+ if namespace == '':
+ raise Exception('Must have a namespace')
+
+ if not re.match(r'^ipa\.[0-9A-Za-z_]+', namespace):
+ raise Exception('Namespace must be of the form "ipa.{pipeline_name}"')
+
+def ValidateInterfaces(interfaces):
+ # Validate presence of main interface
+ intf = GetMainInterface(interfaces)
+ if intf is None:
+ raise Exception('Must have main IPA interface')
+
+ # Validate presence of event interface
+ event = GetEventInterface(interfaces)
+ if intf is None:
+ raise Exception('Must have event IPA interface')
+
+ # Validate required main interface functions
+ f_init = [x for x in intf.methods if x.mojom_name == 'init']
+ f_start = [x for x in intf.methods if x.mojom_name == 'start']
+ f_stop = [x for x in intf.methods if x.mojom_name == 'stop']
+
+ ValidateSingleLength(f_init, 'init()', False)
+ ValidateSingleLength(f_start, 'start()', False)
+ ValidateSingleLength(f_stop, 'stop()', False)
+
+ f_stop = f_stop[0]
+
+ # No need to validate init() and start() as they are customizable
+
+ # Validate parameters to stop()
+ ValidateZeroLength(f_stop.parameters, 'input parameter to stop()')
+ ValidateZeroLength(f_stop.parameters, 'output parameter from stop()')
+
+ # Validate that event interface has at least one event
+ if len(event.methods) < 1:
+ raise Exception('Event interface must have at least one event')
+
+ # Validate that all async methods don't have return values
+ intf_methods_async = [x for x in intf.methods if IsAsync(x)]
+ for method in intf_methods_async:
+ ValidateZeroLength(method.response_parameters,
+ f'{method.mojom_name} response parameters', False)
+
+ event_methods_async = [x for x in event.methods if IsAsync(x)]
+ for method in event_methods_async:
+ ValidateZeroLength(method.response_parameters,
+ f'{method.mojom_name} response parameters', False)
+
+class Generator(generator.Generator):
+ @staticmethod
+ def GetTemplatePrefix():
+ return 'libcamera_templates'
+
+ def GetFilters(self):
+ libcamera_filters = {
+ 'all_types': GetAllTypes,
+ 'bit_width': BitWidth,
+ 'byte_width' : ByteWidthFromCppType,
+ 'cap': Capitalize,
+ 'choose': Choose,
+ 'comma_sep': CommaSep,
+ 'default_value': GetDefaultValue,
+ 'has_default_fields': HasDefaultFields,
+ 'has_fd': HasFd,
+ 'is_async': IsAsync,
+ 'is_array': IsArray,
+ 'is_controls': IsControls,
+ 'is_enum': IsEnum,
+ 'is_enum_scoped': IsEnumScoped,
+ 'is_fd': IsFd,
+ 'is_flags': IsFlags,
+ 'is_map': IsMap,
+ 'is_plain_struct': IsPlainStruct,
+ 'is_pod': IsPod,
+ 'is_scoped': IsScoped,
+ 'is_str': IsStr,
+ 'method_input_has_fd': MethodInputHasFd,
+ 'method_output_has_fd': MethodOutputHasFd,
+ 'method_param_names': MethodParamNames,
+ 'method_param_inputs': MethodParamInputs,
+ 'method_param_outputs': MethodParamOutputs,
+ 'method_parameters': MethodParameters,
+ 'method_return_value': MethodReturnValue,
+ 'name': GetNameForElement,
+ 'name_full': GetFullNameForElement,
+ 'needs_control_serializer': NeedsControlSerializer,
+ 'params_comma_sep': ParamsCommaSep,
+ 'with_default_values': WithDefaultValues,
+ 'with_fds': WithFds,
+ }
+ return libcamera_filters
+
+ def _GetJinjaExports(self):
+ return {
+ 'cmd_enum_name': '_%sCmd' % self.module_name,
+ 'cmd_event_enum_name': '_%sEventCmd' % self.module_name,
+ 'consts': self.module.constants,
+ 'enums': self.module.enums,
+ 'has_array': len([x for x in self.module.kinds.keys() if x[0] == 'a']) > 0,
+ 'has_map': len([x for x in self.module.kinds.keys() if x[0] == 'm']) > 0,
+ 'has_string': len([x for x in self.module.kinds.keys() if x[0] == 's']) > 0,
+ 'has_namespace': self.module.mojom_namespace != '',
+ 'interface_event': GetEventInterface(self.module.interfaces),
+ 'interface_main': GetMainInterface(self.module.interfaces),
+ 'interface_name': 'IPA%sInterface' % self.module_name,
+ 'module_name': ModuleName(self.module.path),
+ 'namespace': self.module.mojom_namespace.split('.'),
+ 'namespace_str': self.module.mojom_namespace.replace('.', '::') if
+ self.module.mojom_namespace is not None else '',
+ 'proxy_name': 'IPAProxy%s' % self.module_name,
+ 'proxy_worker_name': 'IPAProxy%sWorker' % self.module_name,
+ 'structs_nonempty': [x for x in self.module.structs if len(x.fields) > 0],
+ }
+
+ def _GetJinjaExportsForCore(self):
+ return {
+ 'consts': self.module.constants,
+ 'enums_gen_header': [x for x in self.module.enums if x.attributes is None or 'skipHeader' not in x.attributes],
+ 'has_array': len([x for x in self.module.kinds.keys() if x[0] == 'a']) > 0,
+ 'has_map': len([x for x in self.module.kinds.keys() if x[0] == 'm']) > 0,
+ 'has_string': len([x for x in self.module.kinds.keys() if x[0] == 's']) > 0,
+ 'structs_gen_header': [x for x in self.module.structs if x.attributes is None or 'skipHeader' not in x.attributes],
+ 'structs_gen_serializer': [x for x in self.module.structs if x.attributes is None or 'skipSerdes' not in x.attributes],
+ }
+
+ @UseJinja('core_ipa_interface.h.tmpl')
+ def _GenerateCoreHeader(self):
+ return self._GetJinjaExportsForCore()
+
+ @UseJinja('core_ipa_serializer.h.tmpl')
+ def _GenerateCoreSerializer(self):
+ return self._GetJinjaExportsForCore()
+
+ @UseJinja('module_ipa_interface.h.tmpl')
+ def _GenerateDataHeader(self):
+ return self._GetJinjaExports()
+
+ @UseJinja('module_ipa_serializer.h.tmpl')
+ def _GenerateSerializer(self):
+ return self._GetJinjaExports()
+
+ @UseJinja('module_ipa_proxy.cpp.tmpl')
+ def _GenerateProxyCpp(self):
+ return self._GetJinjaExports()
+
+ @UseJinja('module_ipa_proxy.h.tmpl')
+ def _GenerateProxyHeader(self):
+ return self._GetJinjaExports()
+
+ @UseJinja('module_ipa_proxy_worker.cpp.tmpl')
+ def _GenerateProxyWorker(self):
+ return self._GetJinjaExports()
+
+ def GenerateFiles(self, unparsed_args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--libcamera_generate_core_header', action='store_true')
+ parser.add_argument('--libcamera_generate_core_serializer', action='store_true')
+ parser.add_argument('--libcamera_generate_header', action='store_true')
+ parser.add_argument('--libcamera_generate_serializer', action='store_true')
+ parser.add_argument('--libcamera_generate_proxy_cpp', action='store_true')
+ parser.add_argument('--libcamera_generate_proxy_h', action='store_true')
+ parser.add_argument('--libcamera_generate_proxy_worker', action='store_true')
+ parser.add_argument('--libcamera_output_path')
+ args = parser.parse_args(unparsed_args)
+
+ if not args.libcamera_generate_core_header and \
+ not args.libcamera_generate_core_serializer:
+ ValidateNamespace(self.module.mojom_namespace)
+ ValidateInterfaces(self.module.interfaces)
+ self.module_name = ModuleClassName(self.module)
+
+ fileutil.EnsureDirectoryExists(os.path.dirname(args.libcamera_output_path))
+
+ gen_funcs = [
+ [args.libcamera_generate_core_header, self._GenerateCoreHeader],
+ [args.libcamera_generate_core_serializer, self._GenerateCoreSerializer],
+ [args.libcamera_generate_header, self._GenerateDataHeader],
+ [args.libcamera_generate_serializer, self._GenerateSerializer],
+ [args.libcamera_generate_proxy_cpp, self._GenerateProxyCpp],
+ [args.libcamera_generate_proxy_h, self._GenerateProxyHeader],
+ [args.libcamera_generate_proxy_worker, self._GenerateProxyWorker],
+ ]
+
+ for pair in gen_funcs:
+ if pair[0]:
+ self.Write(pair[1](), args.libcamera_output_path)
diff --git a/utils/codegen/ipc/meson.build b/utils/codegen/ipc/meson.build
new file mode 100644
index 00000000..f77bf324
--- /dev/null
+++ b/utils/codegen/ipc/meson.build
@@ -0,0 +1,19 @@
+# SPDX-License-Identifier: CC0-1.0
+
+subdir('generators')
+
+py_modules += ['jinja2', 'ply']
+
+mojom_parser = find_program('./parser.py')
+
+mojom_generator = find_program('./generate.py')
+
+mojom_docs_extractor = find_program('./extract-docs.py')
+
+mojom_templates = custom_target('mojom_templates',
+ input : mojom_template_files,
+ output : 'libcamera_templates.zip',
+ command : [mojom_generator, '-o', '@OUTDIR@', 'precompile'],
+ env : py_build_env)
+
+mojom_templates_dir = meson.current_build_dir()
diff --git a/utils/codegen/ipc/mojo/README b/utils/codegen/ipc/mojo/README
new file mode 100644
index 00000000..961cabd2
--- /dev/null
+++ b/utils/codegen/ipc/mojo/README
@@ -0,0 +1,4 @@
+# SPDX-License-Identifier: CC0-1.0
+
+Files in this directory are imported from 9be4263648d7 of Chromium. Do not
+modify them manually.
diff --git a/utils/codegen/ipc/mojo/public/LICENSE b/utils/codegen/ipc/mojo/public/LICENSE
new file mode 100644
index 00000000..513e8a6a
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/LICENSE
@@ -0,0 +1,27 @@
+// Copyright 2014 The Chromium Authors
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/utils/codegen/ipc/mojo/public/tools/.style.yapf b/utils/codegen/ipc/mojo/public/tools/.style.yapf
new file mode 100644
index 00000000..b4ebbe24
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/.style.yapf
@@ -0,0 +1,6 @@
+[style]
+based_on_style = pep8
+
+# New directories should use a .style.yapf that does not include the following:
+column_limit = 80
+indent_width = 2
diff --git a/utils/codegen/ipc/mojo/public/tools/BUILD.gn b/utils/codegen/ipc/mojo/public/tools/BUILD.gn
new file mode 100644
index 00000000..5328a34a
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/BUILD.gn
@@ -0,0 +1,22 @@
+# Copyright 2020 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# The main target used to aggregate all unit tests for Python-based Mojo tools.
+# This is used to generate a complete isolate which can be pushed to bots to run
+# the tests.
+group("mojo_python_unittests") {
+ data = [
+ "run_all_python_unittests.py",
+ "//testing/scripts/run_isolated_script_test.py",
+ ]
+ deps = [
+ "//mojo/public/tools/bindings:tests",
+ "//mojo/public/tools/mojom:tests",
+ "//mojo/public/tools/mojom/mojom:tests",
+ ]
+ data_deps = [
+ "//testing:test_scripts_shared",
+ "//third_party/catapult/third_party/typ/",
+ ]
+}
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/BUILD.gn b/utils/codegen/ipc/mojo/public/tools/bindings/BUILD.gn
new file mode 100644
index 00000000..eeca73ea
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/BUILD.gn
@@ -0,0 +1,131 @@
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//mojo/public/tools/bindings/mojom.gni")
+import("//third_party/jinja2/jinja2.gni")
+
+action("precompile_templates") {
+ sources = mojom_generator_sources
+ sources += [
+ "$mojom_generator_root/generators/cpp_templates/cpp_macros.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/enum_macros.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/enum_serialization_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/feature_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/feature_definition.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/interface_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/interface_definition.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/interface_feature_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/interface_macros.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/interface_proxy_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/interface_request_validator_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/interface_response_validator_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/interface_stub_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/module-features.h.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/module-forward.h.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/module-import-headers.h.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/module-params-data.h.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/module-shared-internal.h.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/module-shared-message-ids.h.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/module-shared.cc.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/module-shared.h.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/module-test-utils.h.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/module.cc.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/module.h.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/struct_data_view_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/struct_data_view_definition.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/struct_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/struct_definition.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/struct_macros.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/struct_serialization_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/struct_traits_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/struct_traits_definition.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/struct_unserialized_message_context.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/union_data_view_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/union_data_view_definition.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/union_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/union_definition.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/union_serialization_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/union_traits_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/union_traits_definition.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/validation_macros.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/wrapper_class_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/wrapper_class_definition.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/wrapper_class_template_definition.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/wrapper_union_class_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/wrapper_union_class_definition.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/wrapper_union_class_template_definition.tmpl",
+ "$mojom_generator_root/generators/java_templates/constant_definition.tmpl",
+ "$mojom_generator_root/generators/java_templates/constants.java.tmpl",
+ "$mojom_generator_root/generators/java_templates/data_types_definition.tmpl",
+ "$mojom_generator_root/generators/java_templates/enum.java.tmpl",
+ "$mojom_generator_root/generators/java_templates/enum_definition.tmpl",
+ "$mojom_generator_root/generators/java_templates/header.java.tmpl",
+ "$mojom_generator_root/generators/java_templates/interface.java.tmpl",
+ "$mojom_generator_root/generators/java_templates/interface_definition.tmpl",
+ "$mojom_generator_root/generators/java_templates/interface_internal.java.tmpl",
+ "$mojom_generator_root/generators/java_templates/struct.java.tmpl",
+ "$mojom_generator_root/generators/java_templates/union.java.tmpl",
+ "$mojom_generator_root/generators/js_templates/enum_definition.tmpl",
+ "$mojom_generator_root/generators/js_templates/fuzzing.tmpl",
+ "$mojom_generator_root/generators/js_templates/interface_definition.tmpl",
+ "$mojom_generator_root/generators/js_templates/lite/enum_definition.tmpl",
+ "$mojom_generator_root/generators/js_templates/lite/enum_definition_for_module.tmpl",
+ "$mojom_generator_root/generators/js_templates/lite/interface_definition.tmpl",
+ "$mojom_generator_root/generators/js_templates/lite/interface_definition_for_module.tmpl",
+ "$mojom_generator_root/generators/js_templates/lite/module_definition.tmpl",
+ "$mojom_generator_root/generators/js_templates/lite/mojom-lite.js.tmpl",
+ "$mojom_generator_root/generators/js_templates/lite/mojom.m.js.tmpl",
+ "$mojom_generator_root/generators/js_templates/lite/struct_definition.tmpl",
+ "$mojom_generator_root/generators/js_templates/lite/struct_definition_for_module.tmpl",
+ "$mojom_generator_root/generators/js_templates/lite/union_definition.tmpl",
+ "$mojom_generator_root/generators/js_templates/lite/union_definition_for_module.tmpl",
+ "$mojom_generator_root/generators/js_templates/module.amd.tmpl",
+ "$mojom_generator_root/generators/js_templates/module_definition.tmpl",
+ "$mojom_generator_root/generators/js_templates/struct_definition.tmpl",
+ "$mojom_generator_root/generators/js_templates/union_definition.tmpl",
+ "$mojom_generator_root/generators/js_templates/validation_macros.tmpl",
+ "$mojom_generator_root/generators/mojolpm_templates/mojolpm.cc.tmpl",
+ "$mojom_generator_root/generators/mojolpm_templates/mojolpm.h.tmpl",
+ "$mojom_generator_root/generators/mojolpm_templates/mojolpm.proto.tmpl",
+ "$mojom_generator_root/generators/mojolpm_templates/mojolpm_from_proto_macros.tmpl",
+ "$mojom_generator_root/generators/mojolpm_templates/mojolpm_macros.tmpl",
+ "$mojom_generator_root/generators/mojolpm_templates/mojolpm_to_proto_macros.tmpl",
+ "$mojom_generator_root/generators/mojolpm_templates/mojolpm_traits_specialization_macros.tmpl",
+ "$mojom_generator_root/generators/ts_templates/enum_definition.tmpl",
+ "$mojom_generator_root/generators/ts_templates/interface_definition.tmpl",
+ "$mojom_generator_root/generators/ts_templates/module_definition.tmpl",
+ "$mojom_generator_root/generators/ts_templates/struct_definition.tmpl",
+ "$mojom_generator_root/generators/ts_templates/union_definition.tmpl",
+ ]
+ script = mojom_generator_script
+
+ inputs = jinja2_sources
+ outputs = [
+ "$target_gen_dir/cpp_templates.zip",
+ "$target_gen_dir/java_templates.zip",
+ "$target_gen_dir/js_templates.zip",
+ "$target_gen_dir/mojolpm_templates.zip",
+ "$target_gen_dir/ts_templates.zip",
+ ]
+ args = [
+ "-o",
+ rebase_path(target_gen_dir, root_build_dir),
+ "--use_bundled_pylibs",
+ "precompile",
+ ]
+}
+
+group("tests") {
+ data = [
+ mojom_generator_script,
+ "checks/mojom_attributes_check_unittest.py",
+ "checks/mojom_interface_feature_check_unittest.py",
+ "checks/mojom_restrictions_checks_unittest.py",
+ "mojom_bindings_generator_unittest.py",
+ "//tools/diagnosis/crbug_1001171.py",
+ "//third_party/markupsafe/",
+ ]
+ data += mojom_generator_sources
+ data += jinja2_sources
+}
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/README.md b/utils/codegen/ipc/mojo/public/tools/bindings/README.md
new file mode 100644
index 00000000..b27b2d01
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/README.md
@@ -0,0 +1,1014 @@
+# Mojom Interface Definition Language (IDL)
+This document is a subset of the [Mojo documentation](/mojo/README.md).
+
+[TOC]
+
+## Overview
+
+Mojom is the IDL for Mojo interfaces. Given a `.mojom` file, the
+[bindings
+generator](https://cs.chromium.org/chromium/src/mojo/public/tools/bindings/) can
+output bindings for any supported language: **C++**, **JavaScript**, or
+**Java**.
+
+For a trivial example consider the following hypothetical Mojom file we write to
+`//services/widget/public/mojom/frobinator.mojom`:
+
+```
+module widget.mojom;
+
+interface Frobinator {
+ Frobinate();
+};
+```
+
+This defines a single [interface](#Interfaces) named `Frobinator` in a
+[module](#Modules) named `widget.mojom` (and thus fully qualified in Mojom as
+`widget.mojom.Frobinator`.) Note that many interfaces and/or other types of
+definitions (structs, enums, *etc.*) may be included in a single Mojom file.
+
+If we add a corresponding GN target to
+`//services/widget/public/mojom/BUILD.gn`:
+
+```
+import("mojo/public/tools/bindings/mojom.gni")
+
+mojom("mojom") {
+ sources = [
+ "frobinator.mojom",
+ ]
+}
+```
+
+and then build this target:
+
+```
+ninja -C out/r services/widget/public/mojom
+```
+
+we'll find several generated sources in our output directory:
+
+```
+out/r/gen/services/widget/public/mojom/frobinator.mojom.cc
+out/r/gen/services/widget/public/mojom/frobinator.mojom.h
+out/r/gen/services/widget/public/mojom/frobinator.mojom-shared.h
+etc...
+```
+
+Each of these generated source modules includes a set of definitions
+representing the Mojom contents in C++. You can also build or depend on suffixed
+target names to get bindings for other languages. For example,
+
+```
+ninja -C out/r services/widget/public/mojom:mojom_js
+ninja -C out/r services/widget/public/mojom:mojom_java
+```
+
+would generate JavaScript and Java bindings respectively, in the same generated
+output directory.
+
+For more details regarding the generated
+outputs please see
+[documentation for individual target languages](#Generated-Code-For-Target-Languages).
+
+## Mojom Syntax
+
+Mojom IDL allows developers to define **structs**, **unions**, **interfaces**,
+**constants**, and **enums**, all within the context of a **module**. These
+definitions are used to generate code in the supported target languages at build
+time.
+
+Mojom files may **import** other Mojom files in order to reference their
+definitions.
+
+### Primitive Types
+Mojom supports a few basic data types which may be composed into structs or used
+for message parameters.
+
+| Type | Description
+|-------------------------------|-------------------------------------------------------|
+| `bool` | Boolean type (`true` or `false`.)
+| `int8`, `uint8` | Signed or unsigned 8-bit integer.
+| `int16`, `uint16` | Signed or unsigned 16-bit integer.
+| `int32`, `uint32` | Signed or unsigned 32-bit integer.
+| `int64`, `uint64` | Signed or unsigned 64-bit integer.
+| `float`, `double` | 32- or 64-bit floating point number.
+| `string` | UTF-8 encoded string.
+| `array<T>` | Array of any Mojom type *T*; for example, `array<uint8>` or `array<array<string>>`.
+| `array<T, N>` | Fixed-length array of any Mojom type *T*. The parameter *N* must be an integral constant.
+| `map<S, T>` | Associated array mapping values of type *S* to values of type *T*. *S* may be a `string`, `enum`, or numeric type.
+| `handle` | Generic Mojo handle. May be any type of handle, including a wrapped native platform handle.
+| `handle<message_pipe>` | Generic message pipe handle.
+| `handle<shared_buffer>` | Shared buffer handle.
+| `handle<data_pipe_producer>` | Data pipe producer handle.
+| `handle<data_pipe_consumer>` | Data pipe consumer handle.
+| `handle<platform>` | A native platform/OS handle.
+| *`pending_remote<InterfaceType>`* | Any user-defined Mojom interface type. This is sugar for a strongly-typed message pipe handle which should eventually be used to make outgoing calls on the interface.
+| *`pending_receiver<InterfaceType>`* | A pending receiver for any user-defined Mojom interface type. This is sugar for a more strongly-typed message pipe handle which is expected to receive request messages and should therefore eventually be bound to an implementation of the interface.
+| *`pending_associated_remote<InterfaceType>`* | An associated interface handle. See [Associated Interfaces](#Associated-Interfaces)
+| *`pending_associated_receiver<InterfaceType>`* | A pending associated receiver. See [Associated Interfaces](#Associated-Interfaces)
+| *T*? | An optional (nullable) value. Primitive numeric types (integers, floats, booleans, and enums) are not nullable. All other types are nullable.
+
+### Modules
+
+Every Mojom file may optionally specify a single **module** to which it belongs.
+
+This is used strictly for aggregating all defined symbols therein within a
+common Mojom namespace. The specific impact this has on generated bindings code
+varies for each target language. For example, if the following Mojom is used to
+generate bindings:
+
+```
+module business.stuff;
+
+interface MoneyGenerator {
+ GenerateMoney();
+};
+```
+
+Generated C++ bindings will define a class interface `MoneyGenerator` in the
+`business::stuff` namespace, while Java bindings will define an interface
+`MoneyGenerator` in the `org.chromium.business.stuff` package. JavaScript
+bindings at this time are unaffected by module declarations.
+
+**NOTE:** By convention in the Chromium codebase, **all** Mojom files should
+declare a module name with at least (and preferably exactly) one top-level name
+as well as an inner `mojom` module suffix. *e.g.*, `chrome.mojom`,
+`business.mojom`, *etc.*
+
+This convention makes it easy to tell which symbols are generated by Mojom when
+reading non-Mojom code, and it also avoids namespace collisions in the fairly
+common scenario where you have a real C++ or Java `Foo` along with a
+corresponding Mojom `Foo` for its serialized representation.
+
+### Imports
+
+If your Mojom references definitions from other Mojom files, you must **import**
+those files. Import syntax is as follows:
+
+```
+import "services/widget/public/mojom/frobinator.mojom";
+```
+
+Import paths are always relative to the top-level directory.
+
+Note that circular imports are **not** supported.
+
+### Structs
+
+Structs are defined using the **struct** keyword, and they provide a way to
+group related fields together:
+
+``` cpp
+struct StringPair {
+ string first;
+ string second;
+};
+```
+
+Struct fields may be comprised of any of the types listed above in the
+[Primitive Types](#Primitive-Types) section.
+
+Default values may be specified as long as they are constant:
+
+``` cpp
+struct Request {
+ int32 id = -1;
+ string details;
+};
+```
+
+What follows is a fairly
+comprehensive example using the supported field types:
+
+``` cpp
+struct StringPair {
+ string first;
+ string second;
+};
+
+enum AnEnum {
+ kYes,
+ kNo
+};
+
+interface SampleInterface {
+ DoStuff();
+};
+
+struct AllTheThings {
+ // Note that these types can never be marked nullable!
+ bool boolean_value;
+ int8 signed_8bit_value = 42;
+ uint8 unsigned_8bit_value;
+ int16 signed_16bit_value;
+ uint16 unsigned_16bit_value;
+ int32 signed_32bit_value;
+ uint32 unsigned_32bit_value;
+ int64 signed_64bit_value;
+ uint64 unsigned_64bit_value;
+ float float_value_32bit;
+ double float_value_64bit;
+ AnEnum enum_value = AnEnum.kYes;
+
+ // Strings may be nullable.
+ string? maybe_a_string_maybe_not;
+
+ // Structs may contain other structs. These may also be nullable.
+ StringPair some_strings;
+ StringPair? maybe_some_more_strings;
+
+ // In fact structs can also be nested, though in practice you must always make
+ // such fields nullable -- otherwise messages would need to be infinitely long
+ // in order to pass validation!
+ AllTheThings? more_things;
+
+ // Arrays may be templated over any Mojom type, and are always nullable:
+ array<int32> numbers;
+ array<int32>? maybe_more_numbers;
+
+ // Arrays of arrays of arrays... are fine.
+ array<array<array<AnEnum>>> this_works_but_really_plz_stop;
+
+ // The element type may be nullable if it's a type which is allowed to be
+ // nullable.
+ array<AllTheThings?> more_maybe_things;
+
+ // Fixed-size arrays get some extra validation on the receiving end to ensure
+ // that the correct number of elements is always received.
+ array<uint64, 2> uuid;
+
+ // Maps follow many of the same rules as arrays. Key types may be any
+ // non-handle, non-collection type, and value types may be any supported
+ // struct field type. Maps may also be nullable.
+ map<string, int32> one_map;
+ map<AnEnum, string>? maybe_another_map;
+ map<StringPair, AllTheThings?>? maybe_a_pretty_weird_but_valid_map;
+ map<StringPair, map<int32, array<map<string, string>?>?>?> ridiculous;
+
+ // And finally, all handle types are valid as struct fields and may be
+ // nullable. Note that interfaces and interface requests (the "Foo" and
+ // "Foo&" type syntax respectively) are just strongly-typed message pipe
+ // handles.
+ handle generic_handle;
+ handle<data_pipe_consumer> reader;
+ handle<data_pipe_producer>? maybe_writer;
+ handle<shared_buffer> dumping_ground;
+ handle<message_pipe> raw_message_pipe;
+ pending_remote<SampleInterface>? maybe_a_sample_interface_client_pipe;
+ pending_receiver<SampleInterface> non_nullable_sample_pending_receiver;
+ pending_receiver<SampleInterface>? nullable_sample_pending_receiver;
+ pending_associated_remote<SampleInterface> associated_interface_client;
+ pending_associated_receiver<SampleInterface> associated_pending_receiver;
+ pending_associated_receiver<SampleInterface>? maybe_another_pending_receiver;
+};
+```
+
+For details on how all of these different types translate to usable generated
+code, see
+[documentation for individual target languages](#Generated-Code-For-Target-Languages).
+
+### Unions
+
+Mojom supports tagged unions using the **union** keyword. A union is a
+collection of fields which may take the value of any single one of those fields
+at a time. Thus they provide a way to represent a variant value type while
+minimizing storage requirements.
+
+Union fields may be of any type supported by [struct](#Structs) fields. For
+example:
+
+```cpp
+union ExampleUnion {
+ string str;
+ StringPair pair;
+ int64 id;
+ array<uint64, 2> guid;
+ SampleInterface iface;
+};
+```
+
+For details on how unions like this translate to generated bindings code, see
+[documentation for individual target languages](#Generated-Code-For-Target-Languages).
+
+### Enumeration Types
+
+Enumeration types may be defined using the **enum** keyword either directly
+within a module or nested within the namespace of some struct or interface:
+
+```
+module business.mojom;
+
+enum Department {
+ kSales = 0,
+ kDev,
+};
+
+struct Employee {
+ enum Type {
+ kFullTime,
+ kPartTime,
+ };
+
+ Type type;
+ // ...
+};
+```
+
+C++ constant-style enum value names are preferred as specified in the
+[Google C++ Style Guide](https://google.github.io/styleguide/cppguide.html#Enumerator_Names).
+
+Similar to C-style enums, individual values may be explicitly assigned within an
+enum definition. By default, values are based at zero and increment by
+1 sequentially.
+
+The effect of nested definitions on generated bindings varies depending on the
+target language. See [documentation for individual target languages](#Generated-Code-For-Target-Languages).
+
+### Constants
+
+Constants may be defined using the **const** keyword either directly within a
+module or nested within the namespace of some struct or interface:
+
+```
+module business.mojom;
+
+const string kServiceName = "business";
+
+struct Employee {
+ const uint64 kInvalidId = 0;
+
+ enum Type {
+ kFullTime,
+ kPartTime,
+ };
+
+ uint64 id = kInvalidId;
+ Type type;
+};
+```
+
+The effect of nested definitions on generated bindings varies depending on the
+target language. See [documentation for individual target languages](#Generated-Code-For-Target-Languages).
+
+### Features
+
+Features can be declared with a `name` and `default_state` and can be attached
+in mojo to interfaces or methods using the `RuntimeFeature` attribute. If the
+feature is disabled at runtime, the method will crash and the interface will
+refuse to be bound / instantiated. Features cannot be serialized to be sent over
+IPC at this time.
+
+```
+module experimental.mojom;
+
+feature kUseElevators {
+ const string name = "UseElevators";
+ const bool default_state = false;
+}
+
+[RuntimeFeature=kUseElevators]
+interface Elevator {
+ // This interface cannot be bound or called if the feature is disabled.
+}
+
+interface Building {
+ // This method cannot be called if the feature is disabled.
+ [RuntimeFeature=kUseElevators]
+ CallElevator(int floor);
+
+ // This method can be called.
+ RingDoorbell(int volume);
+}
+```
+
+### Interfaces
+
+An **interface** is a logical bundle of parameterized request messages. Each
+request message may optionally define a parameterized response message. Here's
+an example to define an interface `Foo` with various kinds of requests:
+
+```
+interface Foo {
+ // A request which takes no arguments and expects no response.
+ MyMessage();
+
+ // A request which has some arguments and expects no response.
+ MyOtherMessage(string name, array<uint8> bytes);
+
+ // A request which expects a single-argument response.
+ MyMessageWithResponse(string command) => (bool success);
+
+ // A request which expects a response with multiple arguments.
+ MyMessageWithMoarResponse(string a, string b) => (int8 c, int8 d);
+};
+```
+
+Anything which is a valid struct field type (see [Structs](#Structs)) is also a
+valid request or response argument type. The type notation is the same for both.
+
+### Attributes
+
+Mojom definitions may have their meaning altered by **attributes**, specified
+with a syntax similar to Java or C# attributes. There are a handle of
+interesting attributes supported today.
+
+* **`[Sync]`**:
+ The `Sync` attribute may be specified for any interface method which expects a
+ response. This makes it so that callers of the method can wait synchronously
+ for a response. See [Synchronous
+ Calls](/mojo/public/cpp/bindings/README.md#Synchronous-Calls) in the C++
+ bindings documentation. Note that sync methods are only actually synchronous
+ when called from C++.
+
+* **`[NoInterrupt]`**:
+ When a thread is waiting for a reply to a `Sync` message, it's possible to be
+ woken up to dispatch other unrelated incoming `Sync` messages. This measure
+ helps to avoid deadlocks. If a `Sync` message is also marked as `NoInterrupt`
+ however, this behavior is disabled: instead the calling thread will only wake
+ up for the precise message being waited upon. This attribute must be used with
+ extreme caution, because it can lead to deadlocks otherwise.
+
+* **`[Default]`**:
+ The `Default` attribute may be used to specify an enumerator value or union
+ field that will be used if an `Extensible` enumeration or union does not
+ deserialize to a known value on the receiver side, i.e. the sender is using a
+ newer version of the enum or union. This allows unknown values to be mapped to
+ a well-defined value that can be appropriately handled.
+
+ Note: The `Default` field for a union must be of nullable or integral type.
+ When a union is defaulted to this field, the field takes on the default value
+ for its type: null for nullable types, and zero/false for integral types.
+
+* **`[Extensible]`**:
+ The `Extensible` attribute may be specified for any enum or union definition.
+ For enums, this essentially disables builtin range validation when receiving
+ values of the enum type in a message, allowing older bindings to tolerate
+ unrecognized values from newer versions of the enum.
+
+ If an enum value within an extensible enum definition is affixed with the
+ `Default` attribute, out-of-range values for the enum will deserialize to that
+ default value. Only one enum value may be designated as the `Default`.
+
+ Similarly, a union marked `Extensible` will deserialize to its `Default` field
+ when an unrecognized field is received. Extensible unions MUST specify exactly
+ one `Default` field, and the field must be of nullable or integral type. When
+ defaulted to this field, the value is always null/zero/false as appropriate.
+
+ An `Extensible` enumeration REQUIRES that a `Default` value be specified,
+ so all new extensible enums should specify one.
+
+* **`[Native]`**:
+ The `Native` attribute may be specified for an empty struct declaration to
+ provide a nominal bridge between Mojo IPC and legacy `IPC::ParamTraits` or
+ `IPC_STRUCT_TRAITS*` macros. See [Repurposing Legacy IPC
+ Traits](/docs/mojo_ipc_conversion.md#repurposing-and-invocations) for more
+ details. Note support for this attribute is strictly limited to C++ bindings
+ generation.
+
+* **`[MinVersion=N]`**:
+ The `MinVersion` attribute is used to specify the version at which a given
+ field, enum value, interface method, or method parameter was introduced.
+ See [Versioning](#Versioning) for more details. `MinVersion` does not apply
+ to interfaces, structs or enums, but to the fields of those types.
+ `MinVersion` is not a module-global value, but it is ok to pretend it is by
+ skipping versions when adding fields or parameters.
+
+* **`[Stable]`**:
+ The `Stable` attribute specifies that a given mojom type or interface
+ definition can be considered stable over time, meaning it is safe to use for
+ things like persistent storage or communication between independent
+ version-skewed binaries. Stable definitions may only depend on builtin mojom
+ types or other stable definitions, and changes to such definitions MUST
+ preserve backward-compatibility through appropriate use of versioning.
+ Backward-compatibility of changes is enforced in the Chromium tree using a
+ strict presubmit check. See [Versioning](#Versioning) for more details on
+ backward-compatibility constraints.
+
+* **`[Uuid=<UUID>]`**:
+ Specifies a UUID to be associated with a given interface. The UUID is intended
+ to remain stable across all changes to the interface definition, including
+ name changes. The value given for this attribute should be a standard UUID
+ string representation as specified by RFC 4122. New UUIDs can be generated
+ with common tools such as `uuidgen`.
+
+* **`[RuntimeFeature=feature]`**
+ The `RuntimeFeature` attribute should reference a mojo `feature`. If this
+ feature is enabled (e.g. using `--enable-features={feature.name}`) then the
+ interface behaves entirely as expected. If the feature is not enabled the
+ interface cannot be bound to a concrete receiver or remote - attempting to do
+ so will result in the receiver or remote being reset() to an unbound state.
+ Note that this is a different concept to the build-time `EnableIf` directive.
+ `RuntimeFeature` is currently only supported for C++ bindings and has no
+ effect for, say, Java or TypeScript bindings (see https://crbug.com/1278253).
+
+* **`[EnableIf=value]`**:
+ The `EnableIf` attribute is used to conditionally enable definitions when the
+ mojom is parsed. If the `mojom` target in the GN file does not include the
+ matching `value` in the list of `enabled_features`, the definition will be
+ disabled. This is useful for mojom definitions that only make sense on one
+ platform. Note that the `EnableIf` attribute can only be set once per
+ definition and cannot be set at the same time as `EnableIfNot`. Also be aware
+ that only one condition can be tested, `EnableIf=value,xyz` introduces a new
+ `xyz` attribute. `xyz` is not part of the `EnableIf` condition that depends
+ only on the feature `value`. Complex conditions can be introduced via
+ enabled_features in `build.gn` files.
+
+* **`[EnableIfNot=value]`**:
+ The `EnableIfNot` attribute is used to conditionally enable definitions when
+ the mojom is parsed. If the `mojom` target in the GN file includes the
+ matching `value` in the list of `enabled_features`, the definition will be
+ disabled. This is useful for mojom definitions that only make sense on all but
+ one platform. Note that the `EnableIfNot` attribute can only be set once per
+ definition and cannot be set at the same time as `EnableIf`.
+
+* **`[ServiceSandbox=value]`**:
+ The `ServiceSandbox` attribute is used in Chromium to tag which sandbox a
+ service hosting an implementation of interface will be launched in. This only
+ applies to `C++` bindings. `value` should match a constant defined in an
+ imported `sandbox.mojom.Sandbox` enum (for Chromium this is
+ `//sandbox/policy/mojom/sandbox.mojom`), such as `kService`.
+
+* **`[RequireContext=enum]`**:
+ The `RequireContext` attribute is used in Chromium to tag interfaces that
+ should be passed (as remotes or receivers) only to privileged process
+ contexts. The process context must be an enum that is imported into the
+ mojom that defines the tagged interface. `RequireContext` may be used in
+ future to DCHECK or CHECK if remotes are made available in contexts that
+ conflict with the one provided in the interface definition. Process contexts
+ are not the same as the sandbox a process is running in, but will reflect
+ the set of capabilities provided to the service.
+
+* **`[AllowedContext=enum]`**:
+ The `AllowedContext` attribute is used in Chromium to tag methods that pass
+ remotes or receivers of interfaces that are marked with a `RequireContext`
+ attribute. The enum provided on the method must be equal or better (lower
+ numerically) than the one required on the interface being passed. At present
+ failing to specify an adequate `AllowedContext` value will cause mojom
+ generation to fail at compile time. In future DCHECKs or CHECKs might be
+ added to enforce that method is only called from a process context that meets
+ the given `AllowedContext` value. The enum must of the same type as that
+ specified in the interface's `RequireContext` attribute. Adding an
+ `AllowedContext` attribute to a method is a strong indication that you need
+ a detailed security review of your design - please reach out to the security
+ team.
+
+* **`[SupportsUrgent]`**:
+ The `SupportsUrgent` attribute is used in conjunction with
+ `mojo::UrgentMessageScope` in Chromium to tag messages as having high
+ priority. The IPC layer notifies the underlying scheduler upon both receiving
+ and processing an urgent message. At present, this attribute only affects
+ channel associated messages in the renderer process.
+
+## Generated Code For Target Languages
+
+When the bindings generator successfully processes an input Mojom file, it emits
+corresponding code for each supported target language. For more details on how
+Mojom concepts translate to a given target language, please refer to the
+bindings API documentation for that language:
+
+* [C++ Bindings](/mojo/public/cpp/bindings/README.md)
+* [JavaScript Bindings](/mojo/public/js/README.md)
+* [Java Bindings](/mojo/public/java/bindings/README.md)
+
+## Message Validation
+
+Regardless of target language, all interface messages are validated during
+deserialization before they are dispatched to a receiving implementation of the
+interface. This helps to ensure consistent validation across interfaces without
+leaving the burden to developers and security reviewers every time a new message
+is added.
+
+If a message fails validation, it is never dispatched. Instead a **connection
+error** is raised on the binding object (see
+[C++ Connection Errors](/mojo/public/cpp/bindings/README.md#Connection-Errors),
+[Java Connection Errors](/mojo/public/java/bindings/README.md#Connection-Errors),
+or
+[JavaScript Connection Errors](/mojo/public/js/README.md#Connection-Errors) for
+details.)
+
+Some baseline level of validation is done automatically for primitive Mojom
+types.
+
+### Non-Nullable Objects
+
+Mojom fields or parameter values (*e.g.*, structs, interfaces, arrays, *etc.*)
+may be marked nullable in Mojom definitions (see
+[Primitive Types](#Primitive-Types).) If a field or parameter is **not** marked
+nullable but a message is received with a null value in its place, that message
+will fail validation.
+
+### Enums
+
+Enums declared in Mojom are automatically validated against the range of legal
+values. For example if a Mojom declares the enum:
+
+``` cpp
+enum AdvancedBoolean {
+ kTrue = 0,
+ kFalse = 1,
+ kFileNotFound = 2,
+};
+```
+
+and a message is received with the integral value 3 (or anything other than 0,
+1, or 2) in place of some `AdvancedBoolean` field or parameter, the message will
+fail validation.
+
+*** note
+NOTE: It's possible to avoid this type of validation error by explicitly marking
+an enum as [Extensible](#Attributes) if you anticipate your enum being exchanged
+between two different versions of the binding interface. See
+[Versioning](#Versioning).
+***
+
+### Other failures
+
+There are a host of internal validation errors that may occur when a malformed
+message is received, but developers should not be concerned with these
+specifically; in general they can only result from internal bindings bugs,
+compromised processes, or some remote endpoint making a dubious effort to
+manually encode their own bindings messages.
+
+### Custom Validation
+
+It's also possible for developers to define custom validation logic for specific
+Mojom struct types by exploiting the
+[type mapping](/mojo/public/cpp/bindings/README.md#Type-Mapping) system for C++
+bindings. Messages rejected by custom validation logic trigger the same
+validation failure behavior as the built-in type validation routines.
+
+## Associated Interfaces
+
+As mentioned in the [Primitive Types](#Primitive-Types) section above, pending_remote
+and pending_receiver fields and parameters may be marked as `associated`. This
+essentially means that they are piggy-backed on some other interface's message
+pipe.
+
+Because individual interface message pipes operate independently there can be no
+relative ordering guarantees among them. Associated interfaces are useful when
+one interface needs to guarantee strict FIFO ordering with respect to one or
+more other interfaces, as they allow interfaces to share a single pipe.
+
+Currently associated interfaces are only supported in generated C++ bindings.
+See the documentation for
+[C++ Associated Interfaces](/mojo/public/cpp/bindings/README.md#Associated-Interfaces).
+
+## Versioning
+
+### Overview
+
+*** note
+**NOTE:** You don't need to worry about versioning if you don't care about
+backwards compatibility. Today, all parts of the Chrome browser are
+updated atomically and there is not yet any possibility of any two
+Chrome processes communicating with two different versions of any given Mojom
+interface. On Chrome OS, there are several places where versioning is required.
+For example,
+[ARC++](https://developer.android.com/chrome-os/intro)
+uses versioned mojo to send IPC to the Android container.
+Likewise, the
+[Lacros](/docs/lacros.md)
+browser uses versioned mojo to talk to the ash system UI.
+***
+
+Services extend their interfaces to support new features over time, and clients
+want to use those new features when they are available. If services and clients
+are not updated at the same time, it's important for them to be able to
+communicate with each other using different snapshots (versions) of their
+interfaces.
+
+This document shows how to extend Mojom interfaces in a backwards-compatible
+way. Changing interfaces in a non-backwards-compatible way is not discussed,
+because in that case communication between different interface versions is
+impossible anyway.
+
+### Versioned Structs
+
+You can use the `MinVersion` [attribute](#Attributes) to indicate from which
+version a struct field is introduced. Assume you have the following struct:
+
+``` cpp
+struct Employee {
+ uint64 employee_id;
+ string name;
+};
+```
+
+and you would like to add birthday and nickname fields. You can add them as
+optional types with a `MinVersion` like so:
+
+``` cpp
+struct Employee {
+ uint64 employee_id;
+ string name;
+ [MinVersion=1] Date? birthday;
+ [MinVersion=1] string? nickname;
+};
+```
+
+*** note
+**NOTE:** Mojo object or handle types added with a `MinVersion` **MUST** be
+optional (nullable) or primitive. See [Primitive Types](#Primitive-Types) for
+details on nullable values.
+***
+
+By default, fields belong to version 0. New fields must be appended to the
+struct definition (*i.e*., existing fields must not change **ordinal value**)
+with the `MinVersion` attribute set to a number greater than any previous
+existing versions.
+
+The value of `MinVersion` is unrelated to ordinals. The choice of a particular
+version number is arbitrary. All its usage means is that a field isn't present
+before the numbered version.
+
+*** note
+**NOTE:** do not change existing fields in versioned structs, as this is
+not backwards-compatible. Instead, rename the old field to make its
+deprecation clear and add a new field with a new `MinVersion` number.
+***
+
+**Ordinal value** refers to the relative positional layout of a struct's fields
+(and an interface's methods) when encoded in a message. Implicitly, ordinal
+numbers are assigned to fields according to lexical position. In the example
+above, `employee_id` has an ordinal value of 0 and `name` has an ordinal value
+of 1.
+
+Ordinal values can be specified explicitly using `**@**` notation, subject to
+the following hard constraints:
+
+* For any given struct or interface, if any field or method explicitly specifies
+ an ordinal value, all fields or methods must explicitly specify an ordinal
+ value.
+* For an *N*-field struct, the set of explicitly assigned ordinal values must be
+ limited to the range *[0, N-1]*. Structs should include placeholder fields
+ to fill the ordinal positions of removed fields (for example "Unused_Field"
+ or "RemovedField", etc).
+
+You may reorder fields, but you must ensure that the ordinal values of existing
+fields remain unchanged. For example, the following struct remains
+backwards-compatible:
+
+``` cpp
+struct Employee {
+ uint64 employee_id@0;
+ [MinVersion=1] Date? birthday@2;
+ string name@1;
+ [MinVersion=1] string? nickname@3;
+};
+```
+
+### Versioned Interfaces
+
+There are two dimensions on which an interface can be extended
+
+**Appending New Parameters To Existing Methods**
+: Parameter lists are treated as structs internally, so all the rules of
+ versioned structs apply to method parameter lists. The only difference is
+ that the version number is scoped to the whole interface rather than to any
+ individual parameter list.
+
+``` cpp
+// Old version:
+interface HumanResourceDatabase {
+ QueryEmployee(uint64 id) => (Employee? employee);
+};
+
+// New version:
+interface HumanResourceDatabase {
+ QueryEmployee(uint64 id, [MinVersion=1] bool retrieve_finger_print)
+ => (Employee? employee,
+ [MinVersion=1] array<uint8>? finger_print);
+};
+```
+
+Similar to [versioned structs](#Versioned-Structs), when you pass the parameter
+list of a request or response method to a destination using an older version of
+an interface, unrecognized fields are silently discarded.
+
+ Please note that adding a response to a message which did not previously
+ expect a response is a not a backwards-compatible change.
+
+**Appending New Methods**
+: Similarly, you can reorder methods with explicit ordinal values as long as
+ the ordinal values of existing methods are unchanged.
+
+For example:
+
+``` cpp
+// Old version:
+interface HumanResourceDatabase {
+ QueryEmployee(uint64 id) => (Employee? employee);
+};
+
+// New version:
+interface HumanResourceDatabase {
+ QueryEmployee(uint64 id) => (Employee? employee);
+
+ [MinVersion=1]
+ AttachFingerPrint(uint64 id, array<uint8> finger_print)
+ => (bool success);
+};
+```
+
+If a method call is not recognized, it is considered a validation error and the
+receiver will close its end of the interface pipe. For example, if a client on
+version 1 of the above interface sends an `AttachFingerPrint` request to an
+implementation of version 0, the client will be disconnected.
+
+Bindings target languages that support versioning expose means to query or
+assert the remote version from a client handle (*e.g.*, an
+`mojo::Remote<T>` in C++ bindings.)
+
+See
+[C++ Versioning Considerations](/mojo/public/cpp/bindings/README.md#Versioning-Considerations)
+and
+[Java Versioning Considerations](/mojo/public/java/bindings/README.md#Versioning-Considerations)
+
+### Versioned Enums
+
+**By default, enums are non-extensible**, which means that generated message
+validation code does not expect to see new values in the future. When an unknown
+value is seen for a non-extensible enum field or parameter, a validation error
+is raised.
+
+If you want an enum to be extensible in the future, you can apply the
+`[Extensible]` [attribute](#Attributes):
+
+``` cpp
+[Extensible]
+enum Department {
+ kSales,
+ kDev,
+};
+```
+
+And later you can extend this enum without breaking backwards compatibility:
+
+``` cpp
+[Extensible]
+enum Department {
+ kSales,
+ kDev,
+ [MinVersion=1] kResearch,
+};
+```
+
+*** note
+**NOTE:** For versioned enum definitions, the use of a `[MinVersion]` attribute
+is strictly for documentation purposes. It has no impact on the generated code.
+***
+
+With extensible enums, bound interface implementations may receive unknown enum
+values and will need to deal with them gracefully. See
+[C++ Versioning Considerations](/mojo/public/cpp/bindings/README.md#Versioning-Considerations)
+for details.
+
+### Renaming versioned structs
+It's possible to rename versioned structs by using the `[RenamedFrom]` attribute.
+RenamedFrom
+
+``` cpp
+module asdf.mojom;
+
+// Old version:
+[Stable]
+struct OldStruct {
+};
+
+// New version:
+[Stable, RenamedFrom="asdf.mojom.OldStruct"]
+struct NewStruct {
+};
+```
+
+## Component targets
+
+If there are multiple components depending on the same mojom target within one binary,
+the target will need to be defined as `mojom_component` instead of `mojom`.
+Since `mojom` targets are generated `source_set` targets and `mojom_component` targets
+are generated `component` targets, you would use `mojom_component` in the same cases
+where you would use `component` for non-mojom files.
+*** note
+**NOTE**: by default, components for both blink and non-blink bindings are generated.
+Use the `disable_variants` target parameter to generate only non-blink bindings.
+You can also generate a `source_set` for one of the variants by defining
+[export_*](https://source.chromium.org/chromium/chromium/src/+/main:mojo/public/tools/bindings/mojom.gni;drc=739b9fbce50310c1dd2b59c279cd90a9319cb6e8;l=318)
+parameters for the `mojom_component` target.
+***
+
+## Grammar Reference
+
+Below is the (BNF-ish) context-free grammar of the Mojom language:
+
+```
+MojomFile = StatementList
+StatementList = Statement StatementList | Statement
+Statement = ModuleStatement | ImportStatement | Definition
+
+ModuleStatement = AttributeSection "module" Identifier ";"
+ImportStatement = "import" StringLiteral ";"
+Definition = Struct Union Interface Enum Feature Const
+
+AttributeSection = <empty> | "[" AttributeList "]"
+AttributeList = <empty> | NonEmptyAttributeList
+NonEmptyAttributeList = Attribute
+ | Attribute "," NonEmptyAttributeList
+Attribute = Name
+ | Name "=" Name
+ | Name "=" Literal
+
+Struct = AttributeSection "struct" Name "{" StructBody "}" ";"
+ | AttributeSection "struct" Name ";"
+StructBody = <empty>
+ | StructBody Const
+ | StructBody Enum
+ | StructBody StructField
+StructField = AttributeSection TypeSpec Name Ordinal Default ";"
+
+Union = AttributeSection "union" Name "{" UnionBody "}" ";"
+UnionBody = <empty> | UnionBody UnionField
+UnionField = AttributeSection TypeSpec Name Ordinal ";"
+
+Interface = AttributeSection "interface" Name "{" InterfaceBody "}" ";"
+InterfaceBody = <empty>
+ | InterfaceBody Const
+ | InterfaceBody Enum
+ | InterfaceBody Method
+Method = AttributeSection Name Ordinal "(" ParameterList ")" Response ";"
+ParameterList = <empty> | NonEmptyParameterList
+NonEmptyParameterList = Parameter
+ | Parameter "," NonEmptyParameterList
+Parameter = AttributeSection TypeSpec Name Ordinal
+Response = <empty> | "=>" "(" ParameterList ")"
+
+TypeSpec = TypeName "?" | TypeName
+TypeName = BasicTypeName
+ | Array
+ | FixedArray
+ | Map
+ | InterfaceRequest
+BasicTypeName = Identifier | "associated" Identifier | HandleType | NumericType
+NumericType = "bool" | "int8" | "uint8" | "int16" | "uint16" | "int32"
+ | "uint32" | "int64" | "uint64" | "float" | "double"
+HandleType = "handle" | "handle" "<" SpecificHandleType ">"
+SpecificHandleType = "message_pipe"
+ | "shared_buffer"
+ | "data_pipe_consumer"
+ | "data_pipe_producer"
+ | "platform"
+Array = "array" "<" TypeSpec ">"
+FixedArray = "array" "<" TypeSpec "," IntConstDec ">"
+Map = "map" "<" Identifier "," TypeSpec ">"
+InterfaceRequest = Identifier "&" | "associated" Identifier "&"
+
+Ordinal = <empty> | OrdinalValue
+
+Default = <empty> | "=" Constant
+
+Enum = AttributeSection "enum" Name "{" NonEmptyEnumValueList "}" ";"
+ | AttributeSection "enum" Name "{" NonEmptyEnumValueList "," "}" ";"
+NonEmptyEnumValueList = EnumValue | NonEmptyEnumValueList "," EnumValue
+EnumValue = AttributeSection Name
+ | AttributeSection Name "=" Integer
+ | AttributeSection Name "=" Identifier
+
+; Note: `feature` is a weak keyword and can appear as, say, a struct field name.
+Feature = AttributeSection "feature" Name "{" FeatureBody "}" ";"
+ | AttributeSection "feature" Name ";"
+FeatureBody = <empty>
+ | FeatureBody FeatureField
+FeatureField = AttributeSection TypeSpec Name Default ";"
+
+Const = "const" TypeSpec Name "=" Constant ";"
+
+Constant = Literal | Identifier ";"
+
+Identifier = Name | Name "." Identifier
+
+Literal = Integer | Float | "true" | "false" | "default" | StringLiteral
+
+Integer = IntConst | "+" IntConst | "-" IntConst
+IntConst = IntConstDec | IntConstHex
+
+Float = FloatConst | "+" FloatConst | "-" FloatConst
+
+; The rules below are for tokens matched strictly according to the given regexes
+
+Identifier = /[a-zA-Z_][0-9a-zA-Z_]*/
+IntConstDec = /0|(1-9[0-9]*)/
+IntConstHex = /0[xX][0-9a-fA-F]+/
+OrdinalValue = /@(0|(1-9[0-9]*))/
+FloatConst = ... # Imagine it's close enough to C-style float syntax.
+StringLiteral = ... # Imagine it's close enough to C-style string literals, including escapes.
+```
+
+## Additional Documentation
+
+[Mojom Message Format](https://docs.google.com/document/d/13pv9cFh5YKuBggDBQ1-AL8VReF-IYpFOFpRfvWFrwio/edit)
+: Describes the wire format used by Mojo bindings interfaces over message
+ pipes.
+
+[Input Format of Mojom Message Validation Tests](https://docs.google.com/document/d/1-y-2IYctyX2NPaLxJjpJfzVNWCC2SR2MJAD9MpIytHQ/edit)
+: Describes a text format used to facilitate bindings message validation
+ tests.
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/checks/__init__.py b/utils/codegen/ipc/mojo/public/tools/bindings/checks/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/checks/__init__.py
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check.py b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check.py
new file mode 100644
index 00000000..e6e4f2c9
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check.py
@@ -0,0 +1,170 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Validate mojo attributes are allowed in Chrome before generation."""
+
+import mojom.generate.check as check
+import mojom.generate.module as module
+
+_COMMON_ATTRIBUTES = {
+ 'EnableIf',
+ 'EnableIfNot',
+}
+
+# For struct, union & parameter lists.
+_COMMON_FIELD_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'MinVersion',
+ 'RenamedFrom',
+}
+
+# Note: `Default`` goes on the default _value_, not on the enum.
+# Note: [Stable] without [Extensible] is not allowed.
+_ENUM_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'Extensible',
+ 'Native',
+ 'Stable',
+ 'RenamedFrom',
+ 'Uuid',
+}
+
+# TODO(crbug.com/1234883) MinVersion is not needed for EnumVal.
+_ENUMVAL_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'Default',
+ 'MinVersion',
+}
+
+_INTERFACE_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'RenamedFrom',
+ 'RequireContext',
+ 'RuntimeFeature',
+ 'ServiceSandbox',
+ 'Stable',
+ 'Uuid',
+}
+
+_METHOD_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'AllowedContext',
+ 'MinVersion',
+ 'NoInterrupt',
+ 'RuntimeFeature',
+ 'SupportsUrgent',
+ 'Sync',
+ 'UnlimitedSize',
+}
+
+_MODULE_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'JavaConstantsClassName',
+ 'JavaPackage',
+}
+
+_PARAMETER_ATTRIBUTES = _COMMON_FIELD_ATTRIBUTES
+
+_STRUCT_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'CustomSerializer',
+ 'JavaClassName',
+ 'Native',
+ 'Stable',
+ 'RenamedFrom',
+ 'Uuid',
+}
+
+_STRUCT_FIELD_ATTRIBUTES = _COMMON_FIELD_ATTRIBUTES
+
+_UNION_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'Extensible',
+ 'Stable',
+ 'RenamedFrom',
+ 'Uuid',
+}
+
+_UNION_FIELD_ATTRIBUTES = _COMMON_FIELD_ATTRIBUTES | {
+ 'Default',
+}
+
+# TODO(https://crbug.com/1193875) empty this set and remove the allowlist.
+_STABLE_ONLY_ALLOWLISTED_ENUMS = {
+ 'crosapi.mojom.OptionalBool',
+ 'crosapi.mojom.TriState',
+}
+
+
+class Check(check.Check):
+ def __init__(self, *args, **kwargs):
+ super(Check, self).__init__(*args, **kwargs)
+
+ def _Respell(self, allowed, attribute):
+ for a in allowed:
+ if a.lower() == attribute.lower():
+ return f" - Did you mean: {a}?"
+ return ""
+
+ def _CheckAttributes(self, context, allowed, attributes):
+ if not attributes:
+ return
+ for attribute in attributes:
+ if not attribute in allowed:
+ # Is there a close misspelling?
+ hint = self._Respell(allowed, attribute)
+ raise check.CheckException(
+ self.module,
+ f"attribute {attribute} not allowed on {context}{hint}")
+
+ def _CheckEnumAttributes(self, enum):
+ if enum.attributes:
+ self._CheckAttributes("enum", _ENUM_ATTRIBUTES, enum.attributes)
+ if 'Stable' in enum.attributes and not 'Extensible' in enum.attributes:
+ full_name = f"{self.module.mojom_namespace}.{enum.mojom_name}"
+ if full_name not in _STABLE_ONLY_ALLOWLISTED_ENUMS:
+ raise check.CheckException(
+ self.module,
+ f"[Extensible] required on [Stable] enum {full_name}")
+ for enumval in enum.fields:
+ self._CheckAttributes("enum value", _ENUMVAL_ATTRIBUTES,
+ enumval.attributes)
+
+ def _CheckInterfaceAttributes(self, interface):
+ self._CheckAttributes("interface", _INTERFACE_ATTRIBUTES,
+ interface.attributes)
+ for method in interface.methods:
+ self._CheckAttributes("method", _METHOD_ATTRIBUTES, method.attributes)
+ for param in method.parameters:
+ self._CheckAttributes("parameter", _PARAMETER_ATTRIBUTES,
+ param.attributes)
+ if method.response_parameters:
+ for param in method.response_parameters:
+ self._CheckAttributes("parameter", _PARAMETER_ATTRIBUTES,
+ param.attributes)
+ for enum in interface.enums:
+ self._CheckEnumAttributes(enum)
+
+ def _CheckModuleAttributes(self):
+ self._CheckAttributes("module", _MODULE_ATTRIBUTES, self.module.attributes)
+
+ def _CheckStructAttributes(self, struct):
+ self._CheckAttributes("struct", _STRUCT_ATTRIBUTES, struct.attributes)
+ for field in struct.fields:
+ self._CheckAttributes("struct field", _STRUCT_FIELD_ATTRIBUTES,
+ field.attributes)
+ for enum in struct.enums:
+ self._CheckEnumAttributes(enum)
+
+ def _CheckUnionAttributes(self, union):
+ self._CheckAttributes("union", _UNION_ATTRIBUTES, union.attributes)
+ for field in union.fields:
+ self._CheckAttributes("union field", _UNION_FIELD_ATTRIBUTES,
+ field.attributes)
+
+ def CheckModule(self):
+ """Note that duplicate attributes are forbidden at the parse phase.
+ We also do not need to look at the types of any parameters, as they will be
+ checked where they are defined. Consts do not have attributes so can be
+ skipped."""
+ self._CheckModuleAttributes()
+ for interface in self.module.interfaces:
+ self._CheckInterfaceAttributes(interface)
+ for enum in self.module.enums:
+ self._CheckEnumAttributes(enum)
+ for struct in self.module.structs:
+ self._CheckStructAttributes(struct)
+ for union in self.module.unions:
+ self._CheckUnionAttributes(union)
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check_unittest.py b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check_unittest.py
new file mode 100644
index 00000000..f1a50a4a
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check_unittest.py
@@ -0,0 +1,194 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import mojom.generate.check as check
+from mojom_bindings_generator import LoadChecks, _Generate
+from mojom_parser_test_case import MojomParserTestCase
+
+
+class FakeArgs:
+ """Fakes args to _Generate - intention is to do just enough to run checks"""
+
+ def __init__(self, tester, files=None):
+ """ `tester` is MojomParserTestCase for paths.
+ `files` will have tester path added."""
+ self.checks_string = 'attributes'
+ self.depth = tester.GetPath('')
+ self.filelist = None
+ self.filename = [tester.GetPath(x) for x in files]
+ self.gen_directories = tester.GetPath('gen')
+ self.generators_string = ''
+ self.import_directories = []
+ self.output_dir = tester.GetPath('out')
+ self.scrambled_message_id_salt_paths = None
+ self.typemaps = []
+ self.variant = 'none'
+
+
+class MojoBindingsCheckTest(MojomParserTestCase):
+ def _ParseAndGenerate(self, mojoms):
+ self.ParseMojoms(mojoms)
+ args = FakeArgs(self, files=mojoms)
+ _Generate(args, {})
+
+ def _testValid(self, filename, content):
+ self.WriteFile(filename, content)
+ self._ParseAndGenerate([filename])
+
+ def _testThrows(self, filename, content, regexp):
+ mojoms = []
+ self.WriteFile(filename, content)
+ mojoms.append(filename)
+ with self.assertRaisesRegexp(check.CheckException, regexp):
+ self._ParseAndGenerate(mojoms)
+
+ def testLoads(self):
+ """Validate that the check is registered under the expected name."""
+ check_modules = LoadChecks('attributes')
+ self.assertTrue(check_modules['attributes'])
+
+ def testNoAnnotations(self):
+ # Undecorated mojom should be fine.
+ self._testValid(
+ "a.mojom", """
+ module a;
+ struct Bar { int32 a; };
+ enum Hello { kValue };
+ union Thingy { Bar b; Hello hi; };
+ interface Foo {
+ Foo(int32 a, Hello hi, Thingy t) => (Bar b);
+ };
+ """)
+
+ def testValidAnnotations(self):
+ # Obviously this is meaningless and won't generate, but it should pass
+ # the attribute check's validation.
+ self._testValid(
+ "a.mojom", """
+ [JavaConstantsClassName="FakeClass",JavaPackage="org.chromium.Fake"]
+ module a;
+ [Stable, Extensible]
+ enum Hello { [Default] kValue, kValue2, [MinVersion=2] kValue3 };
+ [Native]
+ enum NativeEnum {};
+ [Stable,Extensible]
+ union Thingy { Bar b; [Default]int32 c; Hello hi; };
+
+ [Stable,RenamedFrom="module.other.Foo",
+ Uuid="4C178401-4B07-4C2E-9255-5401A943D0C7"]
+ struct Structure { Hello hi; };
+
+ [ServiceSandbox=Hello.kValue,RequireContext=Hello.kValue,Stable,
+ Uuid="2F17D7DD-865A-4B1C-9394-9C94E035E82F"]
+ interface Foo {
+ [AllowedContext=Hello.kValue]
+ Foo@0(int32 a) => (int32 b);
+ [MinVersion=2,Sync,UnlimitedSize,NoInterrupt]
+ Bar@1(int32 b, [MinVersion=2]Structure? s) => (bool c);
+ };
+
+ [RuntimeFeature=test.mojom.FeatureName]
+ interface FooFeatureControlled {};
+
+ interface FooMethodFeatureControlled {
+ [RuntimeFeature=test.mojom.FeatureName]
+ MethodWithFeature() => (bool c);
+ };
+ """)
+
+ def testWrongModuleStable(self):
+ contents = """
+ // err: module cannot be Stable
+ [Stable]
+ module a;
+ enum Hello { kValue, kValue2, kValue3 };
+ enum NativeEnum {};
+ struct Structure { Hello hi; };
+
+ interface Foo {
+ Foo(int32 a) => (int32 b);
+ Bar(int32 b, Structure? s) => (bool c);
+ };
+ """
+ self._testThrows('b.mojom', contents,
+ 'attribute Stable not allowed on module')
+
+ def testWrongEnumDefault(self):
+ contents = """
+ module a;
+ // err: default should go on EnumValue not Enum.
+ [Default=kValue]
+ enum Hello { kValue, kValue2, kValue3 };
+ enum NativeEnum {};
+ struct Structure { Hello hi; };
+
+ interface Foo {
+ Foo(int32 a) => (int32 b);
+ Bar(int32 b, Structure? s) => (bool c);
+ };
+ """
+ self._testThrows('b.mojom', contents,
+ 'attribute Default not allowed on enum')
+
+ def testWrongStructMinVersion(self):
+ contents = """
+ module a;
+ enum Hello { kValue, kValue2, kValue3 };
+ enum NativeEnum {};
+ // err: struct cannot have MinVersion.
+ [MinVersion=2]
+ struct Structure { Hello hi; };
+
+ interface Foo {
+ Foo(int32 a) => (int32 b);
+ Bar(int32 b, Structure? s) => (bool c);
+ };
+ """
+ self._testThrows('b.mojom', contents,
+ 'attribute MinVersion not allowed on struct')
+
+ def testWrongMethodRequireContext(self):
+ contents = """
+ module a;
+ enum Hello { kValue, kValue2, kValue3 };
+ enum NativeEnum {};
+ struct Structure { Hello hi; };
+
+ interface Foo {
+ // err: RequireContext is for interfaces.
+ [RequireContext=Hello.kValue]
+ Foo(int32 a) => (int32 b);
+ Bar(int32 b, Structure? s) => (bool c);
+ };
+ """
+ self._testThrows('b.mojom', contents,
+ 'RequireContext not allowed on method')
+
+ def testWrongMethodRequireContext(self):
+ # crbug.com/1230122
+ contents = """
+ module a;
+ interface Foo {
+ // err: sync not Sync.
+ [sync]
+ Foo(int32 a) => (int32 b);
+ };
+ """
+ self._testThrows('b.mojom', contents,
+ 'attribute sync not allowed.*Did you mean: Sync')
+
+ def testStableExtensibleEnum(self):
+ # crbug.com/1193875
+ contents = """
+ module a;
+ [Stable]
+ enum Foo {
+ kDefaultVal,
+ kOtherVal = 2,
+ };
+ """
+ self._testThrows('a.mojom', contents,
+ 'Extensible.*?required.*?Stable.*?enum')
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_definitions_check.py b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_definitions_check.py
new file mode 100644
index 00000000..702d41c3
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_definitions_check.py
@@ -0,0 +1,34 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Ensure no duplicate type definitions before generation."""
+
+import mojom.generate.check as check
+import mojom.generate.module as module
+
+
+class Check(check.Check):
+ def __init__(self, *args, **kwargs):
+ super(Check, self).__init__(*args, **kwargs)
+
+ def CheckModule(self):
+ kinds = dict()
+ for module in self.module.imports:
+ for kind in module.enums + module.structs + module.unions:
+ kind_name = f'{kind.module.mojom_namespace}.{kind.mojom_name}'
+ if kind_name in kinds:
+ previous_module = kinds[kind_name]
+ if previous_module.path != module.path:
+ raise check.CheckException(
+ self.module, f"multiple-definition for type {kind_name}" +
+ f"(defined in both {previous_module} and {module})")
+ kinds[kind_name] = kind.module
+
+ for kind in self.module.enums + self.module.structs + self.module.unions:
+ kind_name = f'{kind.module.mojom_namespace}.{kind.mojom_name}'
+ if kind_name in kinds:
+ previous_module = kinds[kind_name]
+ raise check.CheckException(
+ self.module, f"multiple-definition for type {kind_name}" +
+ f"(previous definition in {previous_module})")
+ return True
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check.py b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check.py
new file mode 100644
index 00000000..07f51a64
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check.py
@@ -0,0 +1,62 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Validate mojo runtime feature guarded interfaces are nullable."""
+
+import mojom.generate.check as check
+import mojom.generate.module as module
+
+
+class Check(check.Check):
+ def __init__(self, *args, **kwargs):
+ super(Check, self).__init__(*args, **kwargs)
+
+ # `param` is an Interface of some sort.
+ def _CheckNonNullableFeatureGuardedInterface(self, kind):
+ # Only need to validate interface if it has a RuntimeFeature
+ if not kind.kind.runtime_feature:
+ return
+ # Nullable (optional) is ok as the interface expects they might not be sent.
+ if kind.is_nullable:
+ return
+ interface = kind.kind.mojom_name
+ raise check.CheckException(
+ self.module,
+ f"interface {interface} has a RuntimeFeature but is not nullable")
+
+ # `param` can be a lot of things so check if it is a remote/receiver.
+ # Array/Map must be recursed into.
+ def _CheckFieldOrParam(self, kind):
+ if module.IsAnyInterfaceKind(kind):
+ self._CheckNonNullableFeatureGuardedInterface(kind)
+ if module.IsArrayKind(kind):
+ self._CheckFieldOrParam(kind.kind)
+ if module.IsMapKind(kind):
+ self._CheckFieldOrParam(kind.key_kind)
+ self._CheckFieldOrParam(kind.value_kind)
+
+ def _CheckInterfaceFeatures(self, interface):
+ for method in interface.methods:
+ for param in method.parameters:
+ self._CheckFieldOrParam(param.kind)
+ if method.response_parameters:
+ for param in method.response_parameters:
+ self._CheckFieldOrParam(param.kind)
+
+ def _CheckStructFeatures(self, struct):
+ for field in struct.fields:
+ self._CheckFieldOrParam(field.kind)
+
+ def _CheckUnionFeatures(self, union):
+ for field in union.fields:
+ self._CheckFieldOrParam(field.kind)
+
+ def CheckModule(self):
+ """Validate that any runtime feature guarded interfaces that might be passed
+ over mojo are nullable."""
+ for interface in self.module.interfaces:
+ self._CheckInterfaceFeatures(interface)
+ for struct in self.module.structs:
+ self._CheckStructFeatures(struct)
+ for union in self.module.unions:
+ self._CheckUnionFeatures(union)
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check_unittest.py b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check_unittest.py
new file mode 100644
index 00000000..e96152fd
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check_unittest.py
@@ -0,0 +1,173 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import mojom.generate.check as check
+from mojom_bindings_generator import LoadChecks, _Generate
+from mojom_parser_test_case import MojomParserTestCase
+
+
+class FakeArgs:
+ """Fakes args to _Generate - intention is to do just enough to run checks"""
+ def __init__(self, tester, files=None):
+ """ `tester` is MojomParserTestCase for paths.
+ `files` will have tester path added."""
+ self.checks_string = 'features'
+ self.depth = tester.GetPath('')
+ self.filelist = None
+ self.filename = [tester.GetPath(x) for x in files]
+ self.gen_directories = tester.GetPath('gen')
+ self.generators_string = ''
+ self.import_directories = []
+ self.output_dir = tester.GetPath('out')
+ self.scrambled_message_id_salt_paths = None
+ self.typemaps = []
+ self.variant = 'none'
+
+
+class MojoBindingsCheckTest(MojomParserTestCase):
+ def _ParseAndGenerate(self, mojoms):
+ self.ParseMojoms(mojoms)
+ args = FakeArgs(self, files=mojoms)
+ _Generate(args, {})
+
+ def assertValid(self, filename, content):
+ self.WriteFile(filename, content)
+ self._ParseAndGenerate([filename])
+
+ def assertThrows(self, filename, content, regexp):
+ mojoms = []
+ self.WriteFile(filename, content)
+ mojoms.append(filename)
+ with self.assertRaisesRegexp(check.CheckException, regexp):
+ self._ParseAndGenerate(mojoms)
+
+ def testLoads(self):
+ """Validate that the check is registered under the expected name."""
+ check_modules = LoadChecks('features')
+ self.assertTrue(check_modules['features'])
+
+ def testNullableOk(self):
+ self.assertValid(
+ "a.mojom", """
+ module a;
+ // Scaffolding.
+ feature kFeature {
+ const string name = "Hello";
+ const bool enabled_state = false;
+ };
+ [RuntimeFeature=kFeature]
+ interface Guarded {
+ };
+
+ // Unguarded interfaces should be ok everywhere.
+ interface NotGuarded { };
+
+ // Optional (nullable) interfaces should be ok everywhere:
+ struct Bar {
+ pending_remote<Guarded>? remote;
+ pending_receiver<Guarded>? receiver;
+ };
+ union Thingy {
+ pending_remote<Guarded>? remote;
+ pending_receiver<Guarded>? receiver;
+ };
+ interface Foo {
+ Foo(
+ pending_remote<Guarded>? remote,
+ pending_receiver<Guarded>? receiver,
+ pending_associated_remote<Guarded>? a_remote,
+ pending_associated_receiver<Guarded>? a_receiver,
+ // Unguarded interfaces do not have to be nullable.
+ pending_remote<NotGuarded> remote,
+ pending_receiver<NotGuarded> receiver,
+ pending_associated_remote<NotGuarded> a_remote,
+ pending_associated_receiver<NotGuarded> a_receiver
+ ) => (
+ pending_remote<Guarded>? remote,
+ pending_receiver<Guarded>? receiver
+ );
+ Bar(array<pending_remote<Guarded>?> remote)
+ => (map<string, pending_receiver<Guarded>?> a);
+ };
+ """)
+
+ def testMethodParamsMustBeNullable(self):
+ prelude = """
+ module a;
+ // Scaffolding.
+ feature kFeature {
+ const string name = "Hello";
+ const bool enabled_state = false;
+ };
+ [RuntimeFeature=kFeature]
+ interface Guarded { };
+ """
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ interface Trial {
+ Method(pending_remote<Guarded> a) => ();
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ interface Trial {
+ Method(bool foo) => (pending_receiver<Guarded> a);
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ interface Trial {
+ Method(pending_receiver<Guarded> a) => ();
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ interface Trial {
+ Method(pending_associated_remote<Guarded> a) => ();
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ interface Trial {
+ Method(pending_associated_receiver<Guarded> a) => ();
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ interface Trial {
+ Method(array<pending_associated_receiver<Guarded>> a) => ();
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ interface Trial {
+ Method(map<string, pending_associated_receiver<Guarded>> a) => ();
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+
+ def testStructUnionMembersMustBeNullable(self):
+ prelude = """
+ module a;
+ // Scaffolding.
+ feature kFeature {
+ const string name = "Hello";
+ const bool enabled_state = false;
+ };
+ [RuntimeFeature=kFeature]
+ interface Guarded { };
+ """
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ struct Trial {
+ pending_remote<Guarded> a;
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ union Trial {
+ pending_remote<Guarded> a;
+ };
+ """, 'interface Guarded has a RuntimeFeature')
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_check.py b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_check.py
new file mode 100644
index 00000000..d570e26c
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_check.py
@@ -0,0 +1,102 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Validate RequireContext and AllowedContext annotations before generation."""
+
+import mojom.generate.check as check
+import mojom.generate.module as module
+
+
+class Check(check.Check):
+ def __init__(self, *args, **kwargs):
+ self.kind_to_interfaces = dict()
+ super(Check, self).__init__(*args, **kwargs)
+
+ def _IsPassedInterface(self, candidate):
+ if isinstance(
+ candidate.kind,
+ (module.PendingReceiver, module.PendingRemote,
+ module.PendingAssociatedReceiver, module.PendingAssociatedRemote)):
+ return True
+ return False
+
+ def _CheckInterface(self, method, param):
+ # |param| is a pending_x<Interface> so need .kind.kind to get Interface.
+ interface = param.kind.kind
+ if interface.require_context:
+ if method.allowed_context is None:
+ raise check.CheckException(
+ self.module, "method `{}` has parameter `{}` which passes interface"
+ " `{}` that requires an AllowedContext annotation but none exists.".
+ format(
+ method.mojom_name,
+ param.mojom_name,
+ interface.mojom_name,
+ ))
+ # If a string was provided, or if an enum was not imported, this will
+ # be a string and we cannot validate that it is in range.
+ if not isinstance(method.allowed_context, module.EnumValue):
+ raise check.CheckException(
+ self.module,
+ "method `{}` has AllowedContext={} which is not a valid enum value."
+ .format(method.mojom_name, method.allowed_context))
+ # EnumValue must be from the same enum to be compared.
+ if interface.require_context.enum != method.allowed_context.enum:
+ raise check.CheckException(
+ self.module, "method `{}` has parameter `{}` which passes interface"
+ " `{}` that requires AllowedContext={} but one of kind `{}` was "
+ "provided.".format(
+ method.mojom_name,
+ param.mojom_name,
+ interface.mojom_name,
+ interface.require_context.enum,
+ method.allowed_context.enum,
+ ))
+ # RestrictContext enums have most privileged field first (lowest value).
+ interface_value = interface.require_context.field.numeric_value
+ method_value = method.allowed_context.field.numeric_value
+ if interface_value < method_value:
+ raise check.CheckException(
+ self.module, "RequireContext={} > AllowedContext={} for method "
+ "`{}` which passes interface `{}`.".format(
+ interface.require_context.GetSpec(),
+ method.allowed_context.GetSpec(), method.mojom_name,
+ interface.mojom_name))
+ return True
+
+ def _GatherReferencedInterfaces(self, field):
+ key = field.kind.spec
+ # structs/unions can nest themselves so we need to bookkeep.
+ if not key in self.kind_to_interfaces:
+ # Might reference ourselves so have to create the list first.
+ self.kind_to_interfaces[key] = set()
+ for param in field.kind.fields:
+ if self._IsPassedInterface(param):
+ self.kind_to_interfaces[key].add(param)
+ elif isinstance(param.kind, (module.Struct, module.Union)):
+ for iface in self._GatherReferencedInterfaces(param):
+ self.kind_to_interfaces[key].add(iface)
+ return self.kind_to_interfaces[key]
+
+ def _CheckParams(self, method, params):
+ # Note: we have to repeat _CheckParams for each method as each might have
+ # different AllowedContext= attributes. We cannot memoize this function,
+ # but can do so for gathering referenced interfaces as their RequireContext
+ # attributes do not change.
+ for param in params:
+ if self._IsPassedInterface(param):
+ self._CheckInterface(method, param)
+ elif isinstance(param.kind, (module.Struct, module.Union)):
+ for interface in self._GatherReferencedInterfaces(param):
+ self._CheckInterface(method, interface)
+
+ def _CheckMethod(self, method):
+ if method.parameters:
+ self._CheckParams(method, method.parameters)
+ if method.response_parameters:
+ self._CheckParams(method, method.response_parameters)
+
+ def CheckModule(self):
+ for interface in self.module.interfaces:
+ for method in interface.methods:
+ self._CheckMethod(method)
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_checks_unittest.py b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_checks_unittest.py
new file mode 100644
index 00000000..a6cd71e2
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_checks_unittest.py
@@ -0,0 +1,254 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import mojom.generate.check as check
+from mojom_bindings_generator import LoadChecks, _Generate
+from mojom_parser_test_case import MojomParserTestCase
+
+# Mojoms that we will use in multiple tests.
+basic_mojoms = {
+ 'level.mojom':
+ """
+ module level;
+ enum Level {
+ kHighest,
+ kMiddle,
+ kLowest,
+ };
+ """,
+ 'interfaces.mojom':
+ """
+ module interfaces;
+ import "level.mojom";
+ struct Foo {int32 bar;};
+ [RequireContext=level.Level.kHighest]
+ interface High {
+ DoFoo(Foo foo);
+ };
+ [RequireContext=level.Level.kMiddle]
+ interface Mid {
+ DoFoo(Foo foo);
+ };
+ [RequireContext=level.Level.kLowest]
+ interface Low {
+ DoFoo(Foo foo);
+ };
+ """
+}
+
+
+class FakeArgs:
+ """Fakes args to _Generate - intention is to do just enough to run checks"""
+
+ def __init__(self, tester, files=None):
+ """ `tester` is MojomParserTestCase for paths.
+ `files` will have tester path added."""
+ self.checks_string = 'restrictions'
+ self.depth = tester.GetPath('')
+ self.filelist = None
+ self.filename = [tester.GetPath(x) for x in files]
+ self.gen_directories = tester.GetPath('gen')
+ self.generators_string = ''
+ self.import_directories = []
+ self.output_dir = tester.GetPath('out')
+ self.scrambled_message_id_salt_paths = None
+ self.typemaps = []
+ self.variant = 'none'
+
+
+class MojoBindingsCheckTest(MojomParserTestCase):
+ def _WriteBasicMojoms(self):
+ for filename, contents in basic_mojoms.items():
+ self.WriteFile(filename, contents)
+ return list(basic_mojoms.keys())
+
+ def _ParseAndGenerate(self, mojoms):
+ self.ParseMojoms(mojoms)
+ args = FakeArgs(self, files=mojoms)
+ _Generate(args, {})
+
+ def testLoads(self):
+ """Validate that the check is registered under the expected name."""
+ check_modules = LoadChecks('restrictions')
+ self.assertTrue(check_modules['restrictions'])
+
+ def testValidAnnotations(self):
+ mojoms = self._WriteBasicMojoms()
+
+ a = 'a.mojom'
+ self.WriteFile(
+ a, """
+ module a;
+ import "level.mojom";
+ import "interfaces.mojom";
+
+ interface PassesHigh {
+ [AllowedContext=level.Level.kHighest]
+ DoHigh(pending_receiver<interfaces.High> hi);
+ };
+ interface PassesMedium {
+ [AllowedContext=level.Level.kMiddle]
+ DoMedium(pending_receiver<interfaces.Mid> hi);
+ [AllowedContext=level.Level.kMiddle]
+ DoMediumRem(pending_remote<interfaces.Mid> hi);
+ [AllowedContext=level.Level.kMiddle]
+ DoMediumAssoc(pending_associated_receiver<interfaces.Mid> hi);
+ [AllowedContext=level.Level.kMiddle]
+ DoMediumAssocRem(pending_associated_remote<interfaces.Mid> hi);
+ };
+ interface PassesLow {
+ [AllowedContext=level.Level.kLowest]
+ DoLow(pending_receiver<interfaces.Low> hi);
+ };
+
+ struct One { pending_receiver<interfaces.High> hi; };
+ struct Two { One one; };
+ interface PassesNestedHigh {
+ [AllowedContext=level.Level.kHighest]
+ DoNestedHigh(Two two);
+ };
+
+ // Allowed as PassesHigh is not itself restricted.
+ interface PassesPassesHigh {
+ DoPass(pending_receiver<PassesHigh> hiho);
+ };
+ """)
+ mojoms.append(a)
+ self._ParseAndGenerate(mojoms)
+
+ def _testThrows(self, filename, content, regexp):
+ mojoms = self._WriteBasicMojoms()
+ self.WriteFile(filename, content)
+ mojoms.append(filename)
+ with self.assertRaisesRegexp(check.CheckException, regexp):
+ self._ParseAndGenerate(mojoms)
+
+ def testMissingAnnotation(self):
+ contents = """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+
+ interface PassesHigh {
+ // err: missing annotation.
+ DoHigh(pending_receiver<interfaces.High> hi);
+ };
+ """
+ self._testThrows('b.mojom', contents, 'require.*?AllowedContext')
+
+ def testAllowTooLow(self):
+ contents = """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+
+ interface PassesHigh {
+ // err: level is worse than required.
+ [AllowedContext=level.Level.kMiddle]
+ DoHigh(pending_receiver<interfaces.High> hi);
+ };
+ """
+ self._testThrows('b.mojom', contents,
+ 'RequireContext=.*?kHighest > AllowedContext=.*?kMiddle')
+
+ def testWrongEnumInAllow(self):
+ contents = """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+ enum Blah {
+ kZero,
+ };
+ interface PassesHigh {
+ // err: different enums.
+ [AllowedContext=Blah.kZero]
+ DoHigh(pending_receiver<interfaces.High> hi);
+ };
+ """
+ self._testThrows('b.mojom', contents, 'but one of kind')
+
+ def testNotAnEnumInAllow(self):
+ contents = """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+ interface PassesHigh {
+ // err: not an enum.
+ [AllowedContext=doopdedoo.mojom.kWhatever]
+ DoHigh(pending_receiver<interfaces.High> hi);
+ };
+ """
+ self._testThrows('b.mojom', contents, 'not a valid enum value')
+
+ def testMissingAllowedForNestedStructs(self):
+ contents = """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+ struct One { pending_receiver<interfaces.High> hi; };
+ struct Two { One one; };
+ interface PassesNestedHigh {
+ // err: missing annotation.
+ DoNestedHigh(Two two);
+ };
+ """
+ self._testThrows('b.mojom', contents, 'require.*?AllowedContext')
+
+ def testMissingAllowedForNestedUnions(self):
+ contents = """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+ struct One { pending_receiver<interfaces.High> hi; };
+ struct Two { One one; };
+ union Three {One one; Two two; };
+ interface PassesNestedHigh {
+ // err: missing annotation.
+ DoNestedHigh(Three three);
+ };
+ """
+ self._testThrows('b.mojom', contents, 'require.*?AllowedContext')
+
+ def testMultipleInterfacesThrows(self):
+ contents = """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+ struct One { pending_receiver<interfaces.High> hi; };
+ interface PassesMultipleInterfaces {
+ [AllowedContext=level.Level.kMiddle]
+ DoMultiple(
+ pending_remote<interfaces.Mid> mid,
+ pending_receiver<interfaces.High> hi,
+ One one
+ );
+ };
+ """
+ self._testThrows('b.mojom', contents,
+ 'RequireContext=.*?kHighest > AllowedContext=.*?kMiddle')
+
+ def testMultipleInterfacesAllowed(self):
+ """Multiple interfaces can be passed, all satisfy the level."""
+ mojoms = self._WriteBasicMojoms()
+
+ b = "b.mojom"
+ self.WriteFile(
+ b, """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+ struct One { pending_receiver<interfaces.High> hi; };
+ interface PassesMultipleInterfaces {
+ [AllowedContext=level.Level.kHighest]
+ DoMultiple(
+ pending_receiver<interfaces.High> hi,
+ pending_remote<interfaces.Mid> mid,
+ One one
+ );
+ };
+ """)
+ mojoms.append(b)
+ self._ParseAndGenerate(mojoms)
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/concatenate-files.py b/utils/codegen/ipc/mojo/public/tools/bindings/concatenate-files.py
new file mode 100755
index 00000000..4dd26d4a
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/concatenate-files.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+# Copyright 2019 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# This utility concatenates several files into one. On Unix-like systems
+# it is equivalent to:
+# cat file1 file2 file3 ...files... > target
+#
+# The reason for writing a separate utility is that 'cat' is not available
+# on all supported build platforms, but Python is, and hence this provides
+# us with an easy and uniform way of doing this on all platforms.
+
+# for py2/py3 compatibility
+from __future__ import print_function
+
+import optparse
+import sys
+
+
+def Concatenate(filenames):
+ """Concatenate files.
+
+ Args:
+ files: Array of file names.
+ The last name is the target; all earlier ones are sources.
+
+ Returns:
+ True, if the operation was successful.
+ """
+ if len(filenames) < 2:
+ print("An error occurred generating %s:\nNothing to do." % filenames[-1])
+ return False
+
+ try:
+ with open(filenames[-1], "wb") as target:
+ for filename in filenames[:-1]:
+ with open(filename, "rb") as current:
+ target.write(current.read())
+ return True
+ except IOError as e:
+ print("An error occurred when writing %s:\n%s" % (filenames[-1], e))
+ return False
+
+
+def main():
+ parser = optparse.OptionParser()
+ parser.set_usage("""Concatenate several files into one.
+ Equivalent to: cat file1 ... > target.""")
+ (_options, args) = parser.parse_args()
+ sys.exit(0 if Concatenate(args) else 1)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py b/utils/codegen/ipc/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py
new file mode 100755
index 00000000..7d56c9f9
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Simple utility which concatenates a set of files into a single output file
+while also stripping any goog.provide or goog.require lines. This allows us to
+provide a very primitive sort of "compilation" without any extra toolchain
+support and without having to modify otherwise compilable sources in the tree
+which use these directives.
+
+goog.provide lines are replaced with an equivalent invocation of
+mojo.internal.exportModule, which accomplishes essentially the same thing in an
+uncompiled context. A singular exception is made for the 'mojo.internal' export,
+which is instead replaced with an inlined assignment to initialize the
+namespace.
+"""
+
+from __future__ import print_function
+
+import optparse
+import re
+import sys
+
+
+_MOJO_INTERNAL_MODULE_NAME = "mojo.internal"
+_MOJO_EXPORT_MODULE_SYMBOL = "mojo.internal.exportModule"
+
+
+def FilterLine(filename, line, output):
+ if line.startswith("goog.require"):
+ return
+
+ if line.startswith("goog.provide"):
+ match = re.match(r"goog.provide\('([^']+)'\);", line)
+ if not match:
+ print("Invalid goog.provide line in %s:\n%s" % (filename, line))
+ sys.exit(1)
+
+ module_name = match.group(1)
+ if module_name == _MOJO_INTERNAL_MODULE_NAME:
+ output.write("self.mojo = { internal: {} };")
+ else:
+ output.write("%s('%s');\n" % (_MOJO_EXPORT_MODULE_SYMBOL, module_name))
+ return
+
+ output.write(line)
+
+def ConcatenateAndReplaceExports(filenames):
+ if (len(filenames) < 2):
+ print("At least two filenames (one input and the output) are required.")
+ return False
+
+ try:
+ with open(filenames[-1], "w") as target:
+ for filename in filenames[:-1]:
+ with open(filename, "r") as current:
+ for line in current.readlines():
+ FilterLine(filename, line, target)
+ return True
+ except IOError as e:
+ print("Error generating %s\n: %s" % (filenames[-1], e))
+ return False
+
+def main():
+ parser = optparse.OptionParser()
+ parser.set_usage("""file1 [file2...] outfile
+ Concatenate several files into one, stripping Closure provide and
+ require directives along the way.""")
+ (_, args) = parser.parse_args()
+ sys.exit(0 if ConcatenateAndReplaceExports(args) else 1)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/gen_data_files_list.py b/utils/codegen/ipc/mojo/public/tools/bindings/gen_data_files_list.py
new file mode 100644
index 00000000..c6daff03
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/gen_data_files_list.py
@@ -0,0 +1,48 @@
+# Copyright 2017 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Generates a list of all files in a directory.
+
+This script takes in a directory and an output file name as input.
+It then reads the directory and creates a list of all file names
+in that directory. The list is written to the output file.
+There is also an option to pass in '-p' or '--pattern'
+which will check each file name against a regular expression
+pattern that is passed in. Only files which match the regex
+will be written to the list.
+"""
+
+from __future__ import print_function
+
+import os
+import re
+import sys
+
+from optparse import OptionParser
+
+sys.path.insert(
+ 0,
+ os.path.join(
+ os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "mojom"))
+
+from mojom.generate.generator import WriteFile
+
+
+def main():
+ parser = OptionParser()
+ parser.add_option('-d', '--directory', help='Read files from DIRECTORY')
+ parser.add_option('-o', '--output', help='Write list to FILE')
+ parser.add_option('-p',
+ '--pattern',
+ help='Only reads files that name matches PATTERN',
+ default=".")
+ (options, _) = parser.parse_args()
+ pattern = re.compile(options.pattern)
+ files = [f for f in os.listdir(options.directory) if pattern.match(f)]
+
+ contents = '\n'.join(f for f in files) + '\n'
+ WriteFile(contents, options.output)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/generate_type_mappings.py b/utils/codegen/ipc/mojo/public/tools/bindings/generate_type_mappings.py
new file mode 100755
index 00000000..4a53e2bf
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/generate_type_mappings.py
@@ -0,0 +1,135 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Generates a JSON typemap from its command-line arguments and dependencies.
+
+Each typemap should be specified in an command-line argument of the form
+key=value, with an argument of "--start-typemap" preceding each typemap.
+
+For example,
+generate_type_mappings.py --output=foo.typemap --start-typemap \\
+ public_headers=foo.h traits_headers=foo_traits.h \\
+ type_mappings=mojom.Foo=FooImpl
+
+generates a foo.typemap containing
+{
+ "c++": {
+ "mojom.Foo": {
+ "typename": "FooImpl",
+ "traits_headers": [
+ "foo_traits.h"
+ ],
+ "public_headers": [
+ "foo.h"
+ ]
+ }
+ }
+}
+
+Then,
+generate_type_mappings.py --dependency foo.typemap --output=bar.typemap \\
+ --start-typemap public_headers=bar.h traits_headers=bar_traits.h \\
+ type_mappings=mojom.Bar=BarImpl
+
+generates a bar.typemap containing
+{
+ "c++": {
+ "mojom.Bar": {
+ "typename": "BarImpl",
+ "traits_headers": [
+ "bar_traits.h"
+ ],
+ "public_headers": [
+ "bar.h"
+ ]
+ },
+ "mojom.Foo": {
+ "typename": "FooImpl",
+ "traits_headers": [
+ "foo_traits.h"
+ ],
+ "public_headers": [
+ "foo.h"
+ ]
+ }
+ }
+}
+"""
+
+import argparse
+import json
+import os
+import re
+import sys
+
+sys.path.insert(
+ 0,
+ os.path.join(
+ os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "mojom"))
+
+from mojom.generate.generator import WriteFile
+
+def ReadTypemap(path):
+ with open(path) as f:
+ return json.load(f)['c++']
+
+
+def LoadCppTypemapConfig(path):
+ configs = {}
+ with open(path) as f:
+ for config in json.load(f):
+ for entry in config['types']:
+ configs[entry['mojom']] = {
+ 'typename': entry['cpp'],
+ 'forward_declaration': entry.get('forward_declaration', None),
+ 'public_headers': config.get('traits_headers', []),
+ 'traits_headers': config.get('traits_private_headers', []),
+ 'copyable_pass_by_value': entry.get('copyable_pass_by_value',
+ False),
+ 'default_constructible': entry.get('default_constructible', True),
+ 'force_serialize': entry.get('force_serialize', False),
+ 'hashable': entry.get('hashable', False),
+ 'move_only': entry.get('move_only', False),
+ 'nullable_is_same_type': entry.get('nullable_is_same_type', False),
+ 'non_copyable_non_movable': False,
+ }
+ return configs
+
+def main():
+ parser = argparse.ArgumentParser(
+ description=__doc__,
+ formatter_class=argparse.RawDescriptionHelpFormatter)
+ parser.add_argument(
+ '--dependency',
+ type=str,
+ action='append',
+ default=[],
+ help=('A path to another JSON typemap to merge into the output. '
+ 'This may be repeated to merge multiple typemaps.'))
+ parser.add_argument(
+ '--cpp-typemap-config',
+ type=str,
+ action='store',
+ dest='cpp_config_path',
+ help=('A path to a single JSON-formatted typemap config as emitted by'
+ 'GN when processing a mojom_cpp_typemap build rule.'))
+ parser.add_argument('--output',
+ type=str,
+ required=True,
+ help='The path to which to write the generated JSON.')
+ params, _ = parser.parse_known_args()
+ typemaps = {}
+ if params.cpp_config_path:
+ typemaps = LoadCppTypemapConfig(params.cpp_config_path)
+ missing = [path for path in params.dependency if not os.path.exists(path)]
+ if missing:
+ raise IOError('Missing dependencies: %s' % ', '.join(missing))
+ for path in params.dependency:
+ typemaps.update(ReadTypemap(path))
+
+ WriteFile(json.dumps({'c++': typemaps}, indent=2), params.output)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/minify_with_terser.py b/utils/codegen/ipc/mojo/public/tools/bindings/minify_with_terser.py
new file mode 100755
index 00000000..cefee7a4
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/minify_with_terser.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python3
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# This utility minifies JS files with terser.
+#
+# Instance of 'node' has no 'RunNode' member (no-member)
+# pylint: disable=no-member
+
+import argparse
+import os
+import sys
+
+_HERE_PATH = os.path.dirname(__file__)
+_SRC_PATH = os.path.normpath(os.path.join(_HERE_PATH, '..', '..', '..', '..'))
+_CWD = os.getcwd()
+sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'node'))
+import node
+import node_modules
+
+
+def MinifyFile(input_file, output_file):
+ node.RunNode([
+ node_modules.PathToTerser(), input_file, '--mangle', '--compress',
+ '--comments', 'false', '--output', output_file
+ ])
+
+
+def main(argv):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--input', required=True)
+ parser.add_argument('--output', required=True)
+ args = parser.parse_args(argv)
+
+ # Delete the output file if it already exists. It may be a sym link to the
+ # input, because in non-optimized/pre-Terser builds the input file is copied
+ # to the output location with gn copy().
+ out_path = os.path.join(_CWD, args.output)
+ if (os.path.exists(out_path)):
+ os.remove(out_path)
+
+ MinifyFile(os.path.join(_CWD, args.input), out_path)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/mojom.gni b/utils/codegen/ipc/mojo/public/tools/bindings/mojom.gni
new file mode 100644
index 00000000..3f6e54e0
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/mojom.gni
@@ -0,0 +1,2118 @@
+# Copyright 2014 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//third_party/closure_compiler/closure_args.gni")
+import("//third_party/closure_compiler/compile_js.gni")
+import("//third_party/protobuf/proto_library.gni")
+import("//ui/webui/resources/tools/generate_grd.gni")
+import("//ui/webui/webui_features.gni")
+
+import("//build/config/cast.gni")
+
+# TODO(rockot): Maybe we can factor these dependencies out of //mojo. They're
+# used to conditionally enable message ID scrambling in a way which is
+# consistent across toolchains and which is affected by branded vs non-branded
+# Chrome builds. Ideally we could create some generic knobs here that could be
+# flipped elsewhere though.
+import("//build/config/chrome_build.gni")
+import("//build/config/chromeos/ui_mode.gni")
+import("//build/config/features.gni")
+import("//build/config/nacl/config.gni")
+import("//build/toolchain/kythe.gni")
+import("//components/nacl/features.gni")
+import("//third_party/jinja2/jinja2.gni")
+import("//third_party/ply/ply.gni")
+import("//tools/ipc_fuzzer/ipc_fuzzer.gni")
+declare_args() {
+ # Indicates whether typemapping should be supported in this build
+ # configuration. This may be disabled when building external projects which
+ # depend on //mojo but which do not need/want all of the Chromium tree
+ # dependencies that come with typemapping.
+ #
+ # Note that (perhaps obviously) a huge amount of Chromium code will not build
+ # with typemapping disabled, so it is never valid to set this to |false| in
+ # any Chromium build configuration.
+ enable_mojom_typemapping = true
+
+ # Controls message ID scrambling behavior. If |true|, message IDs are
+ # scrambled (i.e. randomized based on the contents of //chrome/VERSION) on
+ # non-Chrome OS desktop platforms. Enabled on official builds by default.
+ # Set to |true| to enable message ID scrambling on a specific build.
+ # See also `enable_scrambled_message_ids` below for more details.
+ enable_mojom_message_id_scrambling = is_official_build
+
+ # Enables generating javascript fuzzing-related code and the bindings for the
+ # MojoLPM fuzzer targets. Off by default.
+ enable_mojom_fuzzer = false
+
+ # Enables Closure compilation of generated JS lite bindings. In environments
+ # where compilation is supported, any mojom target "foo" will also have a
+ # corresponding "foo_js_library_for_compile" target generated.
+ if (is_chromeos_ash) {
+ enable_mojom_closure_compile = enable_js_type_check && optimize_webui
+ }
+}
+
+# Closure libraries are needed for mojom_closure_compile, and when
+# js_type_check is enabled on Ash.
+if (is_chromeos_ash) {
+ generate_mojom_closure_libraries =
+ enable_mojom_closure_compile || enable_js_type_check
+} else {
+ generate_mojom_closure_libraries = false
+}
+
+# NOTE: We would like to avoid scrambling message IDs where it doesn't add
+# value, so we limit the behavior to desktop builds for now. There is some
+# redundancy in the conditions here, but it is tolerated for clarity:
+# We're explicit about Mac, Windows, and Linux desktop support, but it's
+# also necessary to ensure that bindings in alternate toolchains (e.g.
+# NaCl IRT) are always consistent with the default toolchain; for that
+# reason we always enable scrambling within NaCl toolchains when possible,
+# as well as within the default toolchain when NaCl is enabled.
+#
+# Finally, because we *cannot* enable scrambling on Chrome OS (it would break
+# ARC) we have to explicitly opt out there even when NaCl is enabled (and
+# consequently also when building for NaCl toolchains.) For this reason we
+# check |target_os| explicitly, as it's consistent across all toolchains.
+#
+# TODO(crbug.com/1052397): Remove !chromeos_is_browser_only once
+# lacros-chrome switches to target_os="chromeos"
+enable_scrambled_message_ids =
+ enable_mojom_message_id_scrambling &&
+ (is_mac || is_win || (is_linux && !is_castos) ||
+ ((enable_nacl || is_nacl) &&
+ (target_os != "chromeos" && !chromeos_is_browser_only)))
+
+_mojom_tools_root = "//mojo/public/tools"
+_mojom_library_root = "$_mojom_tools_root/mojom/mojom"
+mojom_parser_script = "$_mojom_tools_root/mojom/mojom_parser.py"
+mojom_parser_sources = [
+ "$_mojom_library_root/__init__.py",
+ "$_mojom_library_root/error.py",
+ "$_mojom_library_root/fileutil.py",
+ "$_mojom_library_root/generate/__init__.py",
+ "$_mojom_library_root/generate/check.py",
+ "$_mojom_library_root/generate/generator.py",
+ "$_mojom_library_root/generate/module.py",
+ "$_mojom_library_root/generate/pack.py",
+ "$_mojom_library_root/generate/template_expander.py",
+ "$_mojom_library_root/generate/translate.py",
+ "$_mojom_library_root/parse/__init__.py",
+ "$_mojom_library_root/parse/ast.py",
+ "$_mojom_library_root/parse/conditional_features.py",
+ "$_mojom_library_root/parse/lexer.py",
+ "$_mojom_library_root/parse/parser.py",
+ "//tools/diagnosis/crbug_1001171.py",
+]
+
+mojom_generator_root = "$_mojom_tools_root/bindings"
+mojom_generator_script = "$mojom_generator_root/mojom_bindings_generator.py"
+mojom_generator_sources =
+ mojom_parser_sources + [
+ "$mojom_generator_root/checks/__init__.py",
+ "$mojom_generator_root/checks/mojom_attributes_check.py",
+ "$mojom_generator_root/checks/mojom_definitions_check.py",
+ "$mojom_generator_root/checks/mojom_interface_feature_check.py",
+ "$mojom_generator_root/checks/mojom_restrictions_check.py",
+ "$mojom_generator_root/generators/__init__.py",
+ "$mojom_generator_root/generators/cpp_util.py",
+ "$mojom_generator_root/generators/mojom_cpp_generator.py",
+ "$mojom_generator_root/generators/mojom_java_generator.py",
+ "$mojom_generator_root/generators/mojom_js_generator.py",
+ "$mojom_generator_root/generators/mojom_mojolpm_generator.py",
+ "$mojom_generator_root/generators/mojom_ts_generator.py",
+ "$mojom_generator_script",
+ "//build/action_helpers.py",
+ "//build/gn_helpers.py",
+ "//build/zip_helpers.py",
+ ]
+
+if (enable_scrambled_message_ids) {
+ declare_args() {
+ # The path to a file whose contents can be used as the basis for a message
+ # ID scrambling salt.
+ mojom_message_id_salt_path = "//chrome/VERSION"
+ }
+
+ assert(mojom_message_id_salt_path != "")
+ message_scrambling_args = [
+ "--scrambled_message_id_salt_path",
+ rebase_path(mojom_message_id_salt_path, root_build_dir),
+ ]
+ message_scrambling_inputs = [ mojom_message_id_salt_path ]
+} else {
+ message_scrambling_args = []
+ message_scrambling_inputs = []
+}
+
+# Generates targets for building C++, JavaScript and Java bindings from mojom
+# files. The output files will go under the generated file directory tree with
+# the same path as each input file.
+#
+# Other targets should depend on one of these generated targets (where "foo"
+# is the target name):
+#
+# foo
+# C++ bindings.
+#
+# foo_blink
+# C++ bindings using Blink standard types.
+#
+# foo_java
+# Java bindings.
+#
+# foo_js
+# JavaScript bindings; used as compile-time dependency.
+#
+# foo_js_data_deps
+# JavaScript bindings; used as run-time dependency.
+#
+# Parameters:
+#
+# sources (optional if one of the deps sets listed below is present)
+# List of source .mojom files to compile.
+#
+# deps (optional)
+# Note: this can contain only other mojom targets.
+#
+# DEPRECATED: This is synonymous with public_deps because all mojom
+# dependencies must be public by design. Please use public_deps.
+#
+# public_deps (optional)
+# Note: this can contain only other mojom targets.
+#
+# parser_deps (optional)
+# List of non-mojom targets required for the mojom sources to be parsed.
+#
+# import_dirs (optional)
+# List of import directories that will get added when processing sources.
+#
+# input_root_override (optional)
+# Root path for the .mojom files used to generate the namespaces for
+# interfaces. Useful with targets outside //, e.g. in parent directories
+# above "//". The default input root is //
+# Example: Vivaldi's source root is "//vivaldi/",
+# and "//vivaldi/chromium/" is "//"
+# In such cases, not using this argument lead to the output files being
+# located in different directories than expected.
+#
+# testonly (optional)
+#
+# visibility (optional)
+#
+# visibility_blink (optional)
+# The value to use for visibility for the blink variant. If unset,
+# |visibility| is used.
+#
+# cpp_only (optional)
+# If set to true, only the C++ bindings targets will be generated.
+#
+# NOTE: If the global |enable_mojom_fuzzer| build arg is true, JS bindings
+# will still be generated even when |cpp_only| is set to |true|, unless
+# you also set |enable_fuzzing| to |false| in your mojom target.
+#
+# cpp_typemaps (optional)
+# A list of typemaps to be applied to the generated C++ bindings for this
+# mojom target. Note that this only applies to the non-Blink variant of
+# generated C++ bindings.
+#
+# Every typemap is a GN scope describing how one or more mojom types maps
+# to a non-mojom C++ type, including necessary deps and headers required
+# for the mapping to work. See the Typemaps section below.
+#
+# blink_cpp_typemaps (optional)
+# Same as above, but for the Blink variant of generated C++ bindings.
+#
+# cpp_proxy_target (optional)
+# The name of a target which all C++ dependencies will link against
+# instead of linking directly against this mojom target's generated C++
+# sources. Normally when declaring invoking the mojom("foo") target, GN
+# emits a source_set or component target named "foo" which encompasses the
+# default variant of generated C++ bindings. This changes that to instead
+# emit a group("foo") which merely forwards public_deps to the named
+# `cpp_proxy_target`. That target must in turn depend on
+# "foo_cpp_sources".
+#
+# This is useful primarily in conjunction with export_define et al to
+# embed generated C++ bindings within an existing component target.
+#
+# blink_cpp_proxy_target (optional)
+# Same concept as `cpp_proxy_target` above, but affects the generated
+# "foo_blink" Blink-variant C++ bindings.
+#
+# cpp_configs (optional)
+# A list of extra configs to apply to the default variant of generated C++
+# bindings.
+#
+# blink_cpp_configs (optional)
+# A list of extra configs to apply to the Blink variant of generated C++
+# bindings.
+#
+# mojom_source_deps (optional)
+# A list of mojoms this target depends upon. This is equivalent to
+# public_deps except that the C++ bindings depend on each of the named
+# "foo" targets' "foo_cpp_sources" rather than on foo's
+# `cpp_proxy_target`. It only makes sense to use this for dependencies
+# that set `cpp_proxy_target`, and only when the dependent mojom() would
+# otherwise have circular dependencies with that proxy target.
+#
+# mojom_blink_source_deps (optional)
+# Same as above but depends on "foo_blink_cpp_sources" and is used for
+# dependencies that specify a `blink_cpp_proxy_target`.
+#
+# generate_java (optional)
+# If set to true, Java bindings are generated for Android builds. If
+# |cpp_only| is set to true, it overrides this to prevent generation of
+# Java bindings.
+#
+# enable_js_fuzzing (optional)
+# Enables generation of javascript fuzzing sources for the target if the
+# global build arg |enable_mojom_fuzzer| is also set to |true|.
+# Defaults to |true|. If JS fuzzing generation is enabled for a target,
+# the target will always generate JS bindings even if |cpp_only| is set to
+# |true|. See note above.
+#
+# enable_mojolpm_fuzzing (optional)
+# Enables generation of fuzzing sources for the target if the global build
+# arg |enable_mojom_fuzzer| is also set to |true|. Defaults to |true|.
+#
+# support_lazy_serialization (optional)
+# If set to |true|, generated C++ bindings will effectively prefer to
+# transmit messages in an unserialized form when going between endpoints
+# in the same process. This avoids the runtime cost of serialization,
+# deserialization, and validation logic at the expensive of increased
+# code size. Defaults to |false|.
+#
+# disable_variants (optional)
+# If |true|, no variant sources will be generated for the target. Defaults
+# to |false|.
+#
+# disallow_native_types (optional)
+# If set to |true|, mojoms in this target may not apply the [Native]
+# attribute to struct or enum declarations. This avoids emitting code
+# which depends on legacy IPC serialization. Default is |false|, meaning
+# [Native] types are allowed.
+#
+# disallow_interfaces (optional)
+# If set to |true|, mojoms in this target may not define interfaces.
+# Generates bindings with a smaller set of dependencies. Defaults to
+# |false|.
+#
+# scramble_message_ids (optional)
+# If set to |true| (the default), generated mojom interfaces will use
+# scrambled ordinal identifiers in encoded messages.
+#
+# component_output_prefix (optional)
+# The prefix to use for the output_name of any component library emitted
+# for generated C++ bindings. If this is omitted, C++ bindings targets are
+# emitted as source_sets instead. Because this controls the name of the
+# output shared library binary in the root output directory, it must be
+# unique across the entire build configuration.
+#
+# This is required if |component_macro_prefix| is specified.
+#
+# component_macro_prefix (optional)
+# This specifies a macro prefix to use for component export macros and
+# should therefore be globally unique in the project. For example if this
+# is "FOO_BAR", then the generated C++ sources will be built with
+# IS_FOO_BAR_{suffix}_IMPL defined, and the generated public headers will
+# annotate public symbol definitions with
+# COMPONENT_EXPORT(FOO_BAR_{suffix}). "suffix" in this case depends on
+# which internal subtarget is generating the code (e.g. "SHARED", or a
+# variant name like "BLINK").
+#
+# enabled_features (optional)
+# Definitions in a mojom file can be guarded by an EnableIf attribute. If
+# the value specified by the attribute does not match any items in the
+# list of enabled_features, the definition will be disabled, with no code
+# emitted for it.
+#
+# generate_closure_exports (optional)
+# Generates JS lite bindings will use goog.provide and goog.require
+# annotations to export its symbols and import core Mojo bindings support
+# and other mojom dependency modules. Use this if you plan to compile your
+# bindings into a larger JS binary. Defaults to |false|, instead
+# generating JS lite bindings which assume they will be manually loaded in
+# correct dependency order. Note that this only has an effect if
+# the |enable_mojom_closure_compile| global arg is set to |true| as well.
+#
+# generate_webui_js_bindings (optional)
+# Generate WebUI bindings in JavaScript rather than TypeScript. Defaults
+# to false. ChromeOS only parameter.
+#
+# generate_legacy_js_bindings (optional)
+# Generate js_data_deps target containing legacy JavaScript bindings files
+# for Blink tests and other non-WebUI users when generating TypeScript
+# bindings for WebUI. Ignored if generate_webui_js_bindings is set to
+# true.
+#
+# js_generate_struct_deserializers (optional)
+# Generates JS deerialize methods for structs.
+#
+# extra_cpp_template_paths (optional)
+# List of extra C++ templates that are used to generate additional source
+# and/or header files. The templates should end with extension ".tmpl".
+#
+# webui_module_path (optional)
+# The path or URL at which modules generated by this target will be
+# accessible to WebUI pages. This may either be an absolute path or
+# a full URL path starting with "chrome://resources/mojo". If this path
+# is not specified, WebUI bindings will not be generated.
+#
+# If an absolute path, a WebUI page may only import these modules if
+# they are added to that page's data source (usually by adding the
+# modules to the mojo_files list for build_webui(), or by listing the
+# files as inputs to the page's ts_library() and/or generate_grd() build
+# steps.
+#
+# If this is is instead a URL string starting with
+# "chrome://resources/mojo", the resulting bindings files should
+# be added to one of the lists in ui/webui/resources/mojo/BUILD.gn,
+# at which point they will be made available to all WebUI pages at the
+# given URL.
+#
+# Note: WebUI module bindings are generated in TypeScript by default,
+# unless |generate_webui_js_bindings| is specified as true.
+#
+# The following parameters are used to support the component build. They are
+# needed so that bindings which are linked with a component can use the same
+# export settings for classes. The first three are for the chromium variant, and
+# the last three are for the blink variant. These parameters can also override
+# |component_macro_prefix| for a specific variant, allowing e.g. one variant
+# to be linked into a larger non-mojom component target, while all other
+# variants get their own unique component target.
+# export_class_attribute (optional)
+# The attribute to add to the class declaration. e.g. "CONTENT_EXPORT"
+# export_define (optional)
+# A define to be added to the source_set which is needed by the export
+# header. e.g. "CONTENT_IMPLEMENTATION=1"
+# export_header (optional)
+# A header to be added to the generated bindings to support the component
+# build. e.g. "content/common/content_export.h"
+# export_class_attribute_blink (optional)
+# export_define_blink (optional)
+# export_header_blink (optional)
+# These three parameters are the blink variants of the previous 3.
+#
+# The following parameters are used to correct component build dependencies.
+# They are needed so mojom-mojom dependencies follow the rule that dependencies
+# on a source set in another component are replaced by a dependency on the
+# containing component. The first two are for the chromium variant; the other
+# two are for the blink variant.
+# overridden_deps (optional)
+# The list of mojom deps to be overridden.
+# component_deps (optional)
+# The list of component deps to add to replace overridden_deps.
+# overridden_deps_blink (optional)
+# component_deps_blink (optional)
+# These two parameters are the blink variants of the previous two.
+#
+# check_includes_blink (optional)
+# Overrides the check_includes variable for the blink variant.
+# If check_includes_blink is not defined, the check_includes variable
+# retains its original value.
+#
+# Typemaps
+# ========
+# The cpp_typemaps and blink_cpp_typemaps each specify an optional list of
+# typemapping configurations. Each configuration is a GN scope with metadata
+# describing what and how to map.
+#
+# Typemap scope parameters:
+# types
+# A list of type specifications for this typemap. Each type specification
+# is a nested GN scope which can be expressed with the following syntax:
+#
+# {
+# mojom = "foo.mojom.Bar"
+# cpp = "::foo::LegitBar"
+# move_only = true
+# # etc...
+# }
+#
+# Each type specification supports the following values:
+#
+# mojom (required)
+# The fully qualified name of a mojom type to be mapped. This is a
+# string like "foo.mojom.Bar".
+#
+# cpp (required)
+# The fully qualified name of the C++ type to which the mojom type
+# should be mapped in generated bindings. This is a string like
+# "::base::Value" or "std::vector<::base::Value>".
+#
+# copyable_pass_by_value (optional)
+# A boolean value (default false) which effectively indicates
+# whether the C++ type is very cheap to copy. If so, generated
+# bindings will pass by value but not use std::move() at call sites.
+#
+# default_constructible (optional)
+# A boolean value (default true) which indicates whether the C++
+# type is default constructible. If a C++ type is not default
+# constructible (e.g. the implementor of the type prefers not to
+# publicly expose a default constructor that creates an object in an
+# invalid state), Mojo will instead construct C++ type with an
+# argument of the type `mojo::DefaultConstruct::Tag` (essentially a
+# passkey-like type specifically for this use case).
+#
+# force_serialize (optional)
+# A boolean value (default false) which disables lazy serialization
+# of the typemapped type if lazy serialization is enabled for the
+# mojom target applying this typemap.
+#
+# forward_declaration (optional)
+# A forward declaration of the C++ type, which bindings that don't
+# need the full type definition can use to reduce the size of
+# the generated code. This is a string like
+# "namespace base { class Value; }".
+#
+# hashable (optional)
+# A boolean value (default false) indicating whether the C++ type is
+# hashable. Set to true if true AND needed (i.e. you need to use the
+# type as the key of a mojom map).
+#
+# move_only (optional)
+# A boolean value (default false) which indicates whether the C++
+# type is move-only. If true, generated bindings will pass the type
+# by value and use std::move() at call sites.
+#
+# nullable_is_same_type (optional)
+# A boolean value (default false) which indicates that the C++ type
+# has some baked-in semantic notion of a "null" state. If true, the
+# traits for the type must define IsNull and SetToNull methods.
+#
+# When false, nullable fields are represented by wrapping the C++
+# type with absl::optional, and null values are simply
+# absl::nullopt.
+#
+# Additional typemap scope parameters:
+#
+# traits_headers (optional)
+# Headers which must be included in the generated mojom in order for
+# serialization to be possible. This generally means including at least
+# the header for the corresponding mojom traits definitions.
+#
+# traits_private_headers (optional)
+# Headers which must be included in generated C++ serialization code for
+# a mojom using the typemap. This should be used only when including a
+# header in |traits_headers| is problematic for compilation, as is
+# sometimes the case with legacy IPC message headers.
+#
+# traits_sources (optional)
+# The references to the source files (typically a single .cc and .h file)
+# defining an appropriate set of EnumTraits or StructTraits, etc for the
+# the type-mapping. Using this will cause the listed sources to be
+# integrated directly into the dependent mojom's generated type-mapping
+# targets.
+#
+# Prefer using |traits_public_deps| over inlined |traits_sources|, as this
+# will generally lead to easier build maintenance over time.
+#
+# NOTE: If a typemap is shared by Blink and non-Blink bindings, you cannot
+# use this and MUST use |traits_public_deps| to reference traits built
+# within a separate target.
+#
+# traits_deps / traits_public_deps (optional)
+# Any dependencies of sources in |traits_headers| or |traits_sources| must
+# be listed here.
+#
+template("mojom") {
+ assert(
+ defined(invoker.sources) || defined(invoker.deps) ||
+ defined(invoker.public_deps),
+ "\"sources\" or \"deps\" must be defined for the $target_name template.")
+
+ if (defined(invoker.export_class_attribute) ||
+ defined(invoker.export_define) || defined(invoker.export_header)) {
+ assert(defined(invoker.export_class_attribute))
+ assert(defined(invoker.export_define) || defined(invoker.cpp_configs))
+ assert(defined(invoker.export_header))
+ }
+ if (defined(invoker.export_class_attribute_blink) ||
+ defined(invoker.export_define_blink) ||
+ defined(invoker.export_header_blink)) {
+ assert(defined(invoker.export_class_attribute_blink))
+ assert(defined(invoker.export_define_blink) ||
+ defined(invoker.blink_cpp_configs))
+ assert(defined(invoker.export_header_blink))
+
+ # Not all platforms use the Blink variant, so make sure GN doesn't complain
+ # about these values being inconsequential.
+ not_needed(invoker,
+ [
+ "export_class_attribute_blink",
+ "export_define_blink",
+ "export_header_blink",
+ ])
+ }
+ if (defined(invoker.overridden_deps) || defined(invoker.component_deps)) {
+ assert(defined(invoker.overridden_deps))
+ assert(defined(invoker.component_deps))
+ }
+
+ if (defined(invoker.overridden_deps_blink) ||
+ defined(invoker.component_deps_blink)) {
+ assert(defined(invoker.overridden_deps_blink))
+ assert(defined(invoker.component_deps_blink))
+ }
+
+ # Type-mapping may be disabled or we may not generate C++ bindings.
+ not_needed(invoker,
+ [
+ "cpp_typemaps",
+ "blink_cpp_typemaps",
+ ])
+
+ require_full_cpp_deps =
+ !defined(invoker.disallow_native_types) ||
+ !invoker.disallow_native_types || !defined(invoker.disallow_interfaces) ||
+ !invoker.disallow_interfaces
+
+ all_deps = []
+ mojom_cpp_deps = []
+ if (defined(invoker.deps)) {
+ all_deps += invoker.deps
+ mojom_cpp_deps += invoker.deps
+ }
+ if (defined(invoker.public_deps)) {
+ all_deps += invoker.public_deps
+ mojom_cpp_deps += invoker.public_deps
+ }
+ if (defined(invoker.mojom_source_deps)) {
+ all_deps += invoker.mojom_source_deps
+ }
+ if (defined(invoker.mojom_blink_source_deps)) {
+ all_deps += invoker.mojom_blink_source_deps
+ }
+ not_needed([ "mojom_deps" ])
+
+ if (defined(invoker.component_macro_prefix)) {
+ assert(defined(invoker.component_output_prefix))
+ }
+
+ group("${target_name}__is_mojom") {
+ }
+
+ # Explicitly ensure that all dependencies (invoker.deps and
+ # invoker.public_deps) are mojom targets.
+ group("${target_name}__check_deps_are_all_mojom") {
+ deps = []
+ foreach(d, all_deps) {
+ name = get_label_info(d, "label_no_toolchain")
+ toolchain = get_label_info(d, "toolchain")
+ deps += [ "${name}__is_mojom(${toolchain})" ]
+ }
+ }
+
+ sources_list = []
+ if (defined(invoker.sources)) {
+ sources_list = invoker.sources
+ }
+
+ # Listed sources may be relative to the current target dir, or they may be
+ # absolute paths, including paths to generated mojom files. While those are
+ # fine as-is for input references, deriving output paths can be more subtle.
+ #
+ # Here we rewrite all source paths to be relative to the root build dir and
+ # strip any root_gen_dir prefixes.
+ #
+ # So for a target in //foo/bar with:
+ #
+ # sources = [
+ # "a.mojom",
+ # "b/c.mojom",
+ # "//baz/d.mojom",
+ # "$target_gen_dir/e.mojom",
+ # ]
+ #
+ # output_file_base_paths will be:
+ #
+ # [
+ # "foo/bar/a.mojom",
+ # "foo/bar/b/c.mojom",
+ # "baz/d.mojom",
+ # "foo/bar/e.mojom",
+ # ]
+ #
+ # This result is essentially a list of base filename paths which are suitable
+ # for the naming of any generated output files derived from their
+ # corresponding input mojoms. These paths are always considered to be relative
+ # to root_gen_dir.
+ if (defined(invoker.input_root_override)) {
+ source_abspaths = rebase_path(sources_list, invoker.input_root_override)
+ } else {
+ source_abspaths = rebase_path(sources_list, "//")
+ }
+ output_file_base_paths = []
+ foreach(path, source_abspaths) {
+ output_file_base_paths +=
+ [ string_replace(path, rebase_path(root_gen_dir, "//") + "/", "") ]
+ }
+
+ # Sanity check that either all input files have a .mojom extension, or
+ # all input files have a .test-mojom extension AND |testonly| is |true|.
+ sources_list_filenames =
+ process_file_template(sources_list, "{{source_file_part}}")
+ sources_list_filenames_with_mojom_extension =
+ process_file_template(sources_list, "{{source_name_part}}.mojom")
+ if (sources_list_filenames != sources_list_filenames_with_mojom_extension) {
+ sources_list_filenames_with_test_mojom_extension =
+ process_file_template(sources_list, "{{source_name_part}}.test-mojom")
+ if (sources_list_filenames ==
+ sources_list_filenames_with_test_mojom_extension) {
+ assert(
+ defined(invoker.testonly) && invoker.testonly,
+ "mojom targets for .test-mojom files must set |testonly| to |true|")
+ } else {
+ assert(
+ false,
+ "One or more mojom files has an invalid extension. The only " +
+ "allowed extensions are .mojom and .test-mojom, and any given " +
+ "mojom target must use one or the other exclusively.")
+ }
+ }
+
+ build_metadata_filename = "$target_gen_dir/$target_name.build_metadata"
+ build_metadata = {
+ }
+ build_metadata.sources = rebase_path(sources_list, target_gen_dir)
+ build_metadata.deps = []
+ foreach(dep, all_deps) {
+ dep_target_gen_dir = get_label_info(dep, "target_gen_dir")
+ dep_name = get_label_info(dep, "name")
+ build_metadata.deps +=
+ [ rebase_path("$dep_target_gen_dir/$dep_name.build_metadata",
+ target_gen_dir) ]
+ }
+ write_file(build_metadata_filename, build_metadata, "json")
+
+ generate_js_fuzzing =
+ (!defined(invoker.enable_js_fuzzing) || invoker.enable_js_fuzzing) &&
+ enable_mojom_fuzzer && (!defined(invoker.testonly) || !invoker.testonly)
+
+ generate_mojolpm_fuzzing =
+ (!defined(invoker.enable_mojolpm_fuzzing) ||
+ invoker.enable_mojolpm_fuzzing) && enable_mojom_fuzzer &&
+ (!defined(invoker.testonly) || !invoker.testonly)
+
+ parser_target_name = "${target_name}__parser"
+ parser_deps = []
+ foreach(dep, all_deps) {
+ _label = get_label_info(dep, "label_no_toolchain")
+ parser_deps += [ "${_label}__parser" ]
+ }
+ if (defined(invoker.parser_deps)) {
+ parser_deps += invoker.parser_deps
+ }
+ if (sources_list == []) {
+ # Even without sources we generate a parser target to at least forward
+ # other parser dependencies.
+ group(parser_target_name) {
+ public_deps = parser_deps
+ }
+ } else {
+ enabled_features = []
+ if (defined(invoker.enabled_features)) {
+ enabled_features += invoker.enabled_features
+ }
+ if (is_posix) {
+ enabled_features += [ "is_posix" ]
+ }
+ if (is_android) {
+ enabled_features += [ "is_android" ]
+ } else if (is_chromeos_ash) {
+ enabled_features += [
+ "is_chromeos",
+ "is_chromeos_ash",
+ ]
+ } else if (is_chromeos_lacros) {
+ enabled_features += [
+ "is_chromeos",
+ "is_chromeos_lacros",
+ ]
+ } else if (is_fuchsia) {
+ enabled_features += [ "is_fuchsia" ]
+ } else if (is_ios) {
+ enabled_features += [ "is_ios" ]
+ } else if (is_linux) {
+ enabled_features += [ "is_linux" ]
+ } else if (is_mac) {
+ enabled_features += [ "is_mac" ]
+ } else if (is_win) {
+ enabled_features += [ "is_win" ]
+ }
+
+ if (is_apple) {
+ enabled_features += [ "is_apple" ]
+ }
+
+ action(parser_target_name) {
+ allow_remote = true
+ custom_processor = "mojom_parser"
+ script = mojom_parser_script
+ inputs = mojom_parser_sources + ply_sources + [ build_metadata_filename ]
+ sources = sources_list
+ public_deps = parser_deps
+ outputs = []
+ foreach(base_path, output_file_base_paths) {
+ filename = get_path_info(base_path, "file")
+ dirname = get_path_info(base_path, "dir")
+ outputs += [ "$root_gen_dir/$dirname/${filename}-module" ]
+ }
+
+ filelist = []
+ foreach(source, sources_list) {
+ filelist += [ rebase_path(source, root_build_dir) ]
+ }
+
+ # Workaround for https://github.com/ninja-build/ninja/issues/1966.
+ rsp_file = "$target_gen_dir/${target_name}.rsp"
+ write_file(rsp_file, filelist)
+ inputs += [ rsp_file ]
+
+ args = [
+ # Resolve relative input mojom paths against both the root src dir and
+ # the root gen dir.
+ "--input-root",
+ rebase_path("//.", root_build_dir),
+ "--input-root",
+ rebase_path(root_gen_dir, root_build_dir),
+
+ "--output-root",
+ rebase_path(root_gen_dir, root_build_dir),
+
+ "--mojom-file-list=" + rebase_path(rsp_file, root_build_dir),
+
+ "--check-imports",
+ rebase_path(build_metadata_filename, root_build_dir),
+ ]
+
+ if (defined(invoker.input_root_override)) {
+ args += [
+ "--input-root",
+ rebase_path(invoker.input_root_override, root_build_dir),
+ ]
+ }
+
+ foreach(enabled_feature, enabled_features) {
+ args += [
+ "--enable-feature",
+ enabled_feature,
+ ]
+ }
+
+ if (defined(invoker.webui_module_path)) {
+ args += [
+ "--add-module-metadata",
+ "webui_module_path=${invoker.webui_module_path}",
+ ]
+ if (defined(invoker.generate_webui_js_bindings) &&
+ invoker.generate_webui_js_bindings) {
+ args += [
+ "--add-module-metadata",
+ "generate_webui_js=True",
+ ]
+ }
+ }
+ }
+ }
+
+ generator_cpp_message_ids_target_name = "${target_name}__generate_message_ids"
+
+ # Generate code that is shared by different variants.
+ if (sources_list != []) {
+ base_dir = "//"
+ if (defined(invoker.input_root_override)) {
+ base_dir = invoker.input_root_override
+ }
+
+ common_generator_args = [
+ "--use_bundled_pylibs",
+ "-o",
+ rebase_path(root_gen_dir, root_build_dir),
+ "generate",
+ "-d",
+ rebase_path(base_dir, root_build_dir),
+ "-I",
+ rebase_path("//", root_build_dir),
+ "--bytecode_path",
+ rebase_path("$root_gen_dir/mojo/public/tools/bindings", root_build_dir),
+ ]
+ if (defined(invoker.input_root_override)) {
+ common_generator_args += [
+ "-I",
+ rebase_path(invoker.input_root_override, root_build_dir),
+ ]
+ }
+
+ if (defined(invoker.disallow_native_types) &&
+ invoker.disallow_native_types) {
+ common_generator_args += [ "--disallow_native_types" ]
+ }
+
+ if (defined(invoker.disallow_interfaces) && invoker.disallow_interfaces) {
+ common_generator_args += [ "--disallow_interfaces" ]
+ }
+
+ if (defined(invoker.import_dirs)) {
+ foreach(import_dir, invoker.import_dirs) {
+ common_generator_args += [
+ "-I",
+ rebase_path(import_dir, root_build_dir),
+ ]
+ }
+ }
+
+ if (defined(invoker.component_macro_prefix)) {
+ shared_component_export_macro =
+ "COMPONENT_EXPORT(${invoker.component_macro_prefix}_SHARED)"
+ shared_component_impl_macro =
+ "IS_${invoker.component_macro_prefix}_SHARED_IMPL"
+ shared_component_output_name = "${invoker.component_output_prefix}_shared"
+ } else if (defined(invoker.export_class_attribute_shared) ||
+ defined(invoker.export_class_attribute)) {
+ if (defined(invoker.export_class_attribute_shared)) {
+ assert(defined(invoker.export_header_shared))
+ shared_component_export_macro = invoker.export_class_attribute_shared
+ shared_component_impl_macro = invoker.export_define_shared
+ } else {
+ assert(!defined(invoker.export_header_shared))
+
+ # If no explicit shared attribute/define was provided by the invoker,
+ # we derive some reasonable settings frorm the default variant.
+ shared_component_export_macro = "COMPONENT_EXPORT(MOJOM_SHARED_" +
+ invoker.export_class_attribute + ")"
+ shared_component_impl_macro =
+ "IS_MOJOM_SHARED_" + invoker.export_class_attribute + "_IMPL"
+ }
+
+ if (defined(invoker.component_output_prefix)) {
+ shared_component_output_name =
+ "${invoker.component_output_prefix}_shared"
+ } else {
+ shared_component_output_name = "${target_name}_shared"
+ }
+ }
+
+ action(generator_cpp_message_ids_target_name) {
+ allow_remote = true
+ script = mojom_generator_script
+ inputs = mojom_generator_sources + jinja2_sources
+ sources = sources_list +
+ [ "$root_gen_dir/mojo/public/tools/bindings/cpp_templates.zip" ]
+ deps = [
+ ":$parser_target_name",
+ "//mojo/public/tools/bindings:precompile_templates",
+ ]
+ if (defined(invoker.parser_deps)) {
+ deps += invoker.parser_deps
+ }
+ outputs = []
+ args = common_generator_args
+ filelist = []
+ foreach(source, sources_list) {
+ filelist += [ rebase_path(source, root_build_dir) ]
+ }
+ foreach(base_path, output_file_base_paths) {
+ filename = get_path_info(base_path, "file")
+ dirname = get_path_info(base_path, "dir")
+ inputs += [ "$root_gen_dir/$dirname/${filename}-module" ]
+ outputs += [ "$root_gen_dir/$base_path-shared-message-ids.h" ]
+ }
+
+ # Workaround for https://github.com/ninja-build/ninja/issues/1966.
+ rsp_file = "$target_gen_dir/${target_name}.rsp"
+ write_file(rsp_file, filelist)
+ inputs += [ rsp_file ]
+
+ args += [
+ "--filelist=" + rebase_path(rsp_file, root_build_dir),
+ "--generate_non_variant_code",
+ "--generate_message_ids",
+ "-g",
+ "c++",
+ ]
+
+ if (!defined(invoker.scramble_message_ids) ||
+ invoker.scramble_message_ids) {
+ inputs += message_scrambling_inputs
+ args += message_scrambling_args
+ }
+ }
+
+ generator_shared_target_name = "${target_name}_shared__generator"
+
+ action(generator_shared_target_name) {
+ allow_remote = true
+ visibility = [ ":*" ]
+ script = mojom_generator_script
+ inputs = mojom_generator_sources + jinja2_sources
+ sources = sources_list +
+ [ "$root_gen_dir/mojo/public/tools/bindings/cpp_templates.zip" ]
+ deps = [
+ ":$parser_target_name",
+ "//mojo/public/tools/bindings:precompile_templates",
+ ]
+ if (defined(invoker.parser_deps)) {
+ deps += invoker.parser_deps
+ }
+
+ outputs = []
+ args = common_generator_args
+ filelist = []
+ foreach(source, sources_list) {
+ filelist += [ rebase_path(source, root_build_dir) ]
+ }
+ foreach(base_path, output_file_base_paths) {
+ # Need the mojom-module as an input to this action.
+ filename = get_path_info(base_path, "file")
+ dirname = get_path_info(base_path, "dir")
+ inputs += [ "$root_gen_dir/$dirname/${filename}-module" ]
+
+ outputs += [
+ "$root_gen_dir/$base_path-features.h",
+ "$root_gen_dir/$base_path-params-data.h",
+ "$root_gen_dir/$base_path-shared-internal.h",
+ "$root_gen_dir/$base_path-shared.cc",
+ "$root_gen_dir/$base_path-shared.h",
+ ]
+ }
+
+ # Workaround for https://github.com/ninja-build/ninja/issues/1966.
+ rsp_file = "$target_gen_dir/${target_name}.rsp"
+ write_file(rsp_file, filelist)
+ inputs += [ rsp_file ]
+
+ args += [
+ "--filelist=" + rebase_path(rsp_file, root_build_dir),
+ "--generate_non_variant_code",
+ "-g",
+ "c++",
+ ]
+
+ if (defined(shared_component_export_macro)) {
+ args += [
+ "--export_attribute",
+ shared_component_export_macro,
+ "--export_header",
+ "base/component_export.h",
+ ]
+ }
+
+ # Enable adding annotations to generated C++ headers that are used for
+ # cross-references in CodeSearch.
+ if (enable_kythe_annotations) {
+ args += [ "--enable_kythe_annotations" ]
+ }
+ }
+ } else {
+ group(generator_cpp_message_ids_target_name) {
+ }
+ }
+
+ shared_cpp_sources_target_name = "${target_name}_shared_cpp_sources"
+ source_set(shared_cpp_sources_target_name) {
+ if (defined(invoker.testonly)) {
+ testonly = invoker.testonly
+ }
+ configs += [ "//build/config/compiler:wexit_time_destructors" ]
+ deps = []
+ public_deps = []
+ if (output_file_base_paths != []) {
+ sources = []
+ foreach(base_path, output_file_base_paths) {
+ sources += [
+ "$root_gen_dir/$base_path-features.h",
+ "$root_gen_dir/$base_path-params-data.h",
+ "$root_gen_dir/$base_path-shared-internal.h",
+ "$root_gen_dir/$base_path-shared.cc",
+ "$root_gen_dir/$base_path-shared.h",
+ ]
+ }
+ public_deps += [ ":$generator_shared_target_name" ]
+ }
+ if (require_full_cpp_deps) {
+ public_deps += [ "//mojo/public/cpp/bindings" ]
+ } else {
+ public_deps += [ "//mojo/public/cpp/bindings:bindings_base" ]
+ }
+ foreach(d, all_deps) {
+ # Resolve the name, so that a target //mojo/something becomes
+ # //mojo/something:something and we can append shared_cpp_sources_suffix
+ # to get the cpp dependency name.
+ full_name = get_label_info("$d", "label_no_toolchain")
+ public_deps += [ "${full_name}_shared" ]
+ }
+ if (defined(shared_component_impl_macro)) {
+ defines = [ shared_component_impl_macro ]
+ }
+ }
+
+ shared_cpp_library_target_name = "${target_name}_shared"
+ if (defined(shared_component_output_name)) {
+ component(shared_cpp_library_target_name) {
+ if (defined(invoker.testonly)) {
+ testonly = invoker.testonly
+ }
+ output_name = "$shared_component_output_name"
+ public_deps = [ ":$shared_cpp_sources_target_name" ]
+ }
+ } else {
+ group(shared_cpp_library_target_name) {
+ if (defined(invoker.testonly)) {
+ testonly = invoker.testonly
+ }
+ public_deps = [ ":$shared_cpp_sources_target_name" ]
+ }
+ }
+
+ if (generate_mojolpm_fuzzing) {
+ # This block generates the proto files used for the MojoLPM fuzzer,
+ # and the corresponding proto targets that will be linked in the fuzzer
+ # targets. These are independent of the typemappings, and can be done
+ # separately here.
+
+ generator_mojolpm_proto_target_name =
+ "${target_name}_mojolpm_proto_generator"
+
+ action(generator_mojolpm_proto_target_name) {
+ allow_remote = true
+ script = mojom_generator_script
+ inputs = mojom_generator_sources + jinja2_sources
+ sources =
+ invoker.sources + [
+ "$root_gen_dir/mojo/public/tools/bindings/cpp_templates.zip",
+ "$root_gen_dir/mojo/public/tools/bindings/mojolpm_templates.zip",
+ ]
+ deps = [
+ ":$parser_target_name",
+ "//mojo/public/tools/bindings:precompile_templates",
+ ]
+
+ outputs = []
+ args = common_generator_args
+ filelist = []
+
+ # Split the input into generated and non-generated source files. They
+ # need to be processed separately.
+ gen_dir_path_wildcard = get_path_info("//", "gen_dir") + "/*"
+ non_gen_sources =
+ filter_exclude(invoker.sources, [ gen_dir_path_wildcard ])
+ gen_sources = filter_include(invoker.sources, [ gen_dir_path_wildcard ])
+
+ foreach(source, non_gen_sources) {
+ filelist += [ rebase_path(source, root_build_dir) ]
+ inputs += [ "$target_gen_dir/$source-module" ]
+ outputs += [ "$target_gen_dir/$source.mojolpm.proto" ]
+ }
+
+ foreach(source, gen_sources) {
+ filelist += [ rebase_path(source, root_build_dir) ]
+
+ # For generated files, we assume they're in the target_gen_dir or a
+ # sub-folder of it. Rebase the path so we can get the relative location.
+ source_file = rebase_path(source, target_gen_dir)
+ inputs += [ "$target_gen_dir/$source_file-module" ]
+ outputs += [ "$target_gen_dir/$source_file.mojolpm.proto" ]
+ }
+
+ # Workaround for https://github.com/ninja-build/ninja/issues/1966.
+ rsp_file = "$target_gen_dir/${target_name}.rsp"
+ write_file(rsp_file, filelist)
+ inputs += [ rsp_file ]
+
+ args += [
+ "--filelist=" + rebase_path(rsp_file, root_build_dir),
+ "--generate_non_variant_code",
+ "-g",
+ "mojolpm",
+ ]
+ }
+
+ mojolpm_proto_target_name = "${target_name}_mojolpm_proto"
+ if (defined(invoker.sources)) {
+ proto_library(mojolpm_proto_target_name) {
+ testonly = true
+ generate_python = false
+
+ # Split the input into generated and non-generated source files. They
+ # need to be processed separately.
+ gen_dir_path_wildcard = get_path_info("//", "gen_dir") + "/*"
+ non_gen_sources =
+ filter_exclude(invoker.sources, [ gen_dir_path_wildcard ])
+ gen_sources = filter_include(invoker.sources, [ gen_dir_path_wildcard ])
+ sources = process_file_template(
+ non_gen_sources,
+ [ "{{source_gen_dir}}/{{source_file_part}}.mojolpm.proto" ])
+ sources += process_file_template(
+ gen_sources,
+ [ "{{source_dir}}/{{source_file_part}}.mojolpm.proto" ])
+
+ import_dirs = [ "//" ]
+ proto_in_dir = "${root_gen_dir}"
+ proto_out_dir = "."
+ proto_deps = [ ":$generator_mojolpm_proto_target_name" ]
+ link_deps = [ "//mojo/public/tools/fuzzers:mojolpm_proto" ]
+
+ foreach(d, all_deps) {
+ # Resolve the name, so that a target //mojo/something becomes
+ # //mojo/something:something and we can append mojolpm_proto_suffix
+ # to get the proto dependency name.
+ full_name = get_label_info("$d", "label_no_toolchain")
+ proto_deps += [ "${full_name}_mojolpm_proto" ]
+ link_deps += [ "${full_name}_mojolpm_proto" ]
+ }
+ }
+ } else {
+ group(mojolpm_proto_target_name) {
+ testonly = true
+ public_deps = [ "//mojo/public/tools/fuzzers:mojolpm_proto" ]
+ if (defined(generator_shared_target_name)) {
+ public_deps += [ ":$generator_shared_target_name" ]
+ }
+ foreach(d, all_deps) {
+ # Resolve the name, so that a target //mojo/something becomes
+ # //mojo/something:something and we can append #mojolpm_proto_suffix
+ # to get the proto dependency name.
+ full_name = get_label_info("$d", "label_no_toolchain")
+ public_deps += [ "${full_name}_mojolpm_proto" ]
+ }
+ }
+ }
+ }
+
+ # Generate code for variants.
+ default_variant = {
+ component_macro_suffix = ""
+ }
+ if ((!defined(invoker.disable_variants) || !invoker.disable_variants) &&
+ use_blink) {
+ blink_variant = {
+ variant = "blink"
+ component_macro_suffix = "_BLINK"
+ for_blink = true
+ }
+ enabled_configurations = [
+ default_variant,
+ blink_variant,
+ ]
+ } else {
+ enabled_configurations = [ default_variant ]
+ }
+ foreach(bindings_configuration, enabled_configurations) {
+ cpp_only = false
+ if (defined(invoker.cpp_only)) {
+ cpp_only = invoker.cpp_only
+ }
+ variant_suffix = ""
+ if (defined(bindings_configuration.variant)) {
+ variant = bindings_configuration.variant
+ variant_suffix = "_${variant}"
+ cpp_only = true
+ }
+
+ cpp_typemap_configs = []
+ export_defines = []
+ export_defines_overridden = false
+ force_source_set = false
+ proxy_target = ""
+ extra_configs = []
+ output_visibility = []
+ output_visibility = [ "*" ]
+ cpp_source_deps = []
+ if (defined(bindings_configuration.for_blink) &&
+ bindings_configuration.for_blink) {
+ if (defined(invoker.blink_cpp_typemaps)) {
+ cpp_typemap_configs = invoker.blink_cpp_typemaps
+ }
+ if (defined(invoker.export_define_blink)) {
+ export_defines_overridden = true
+ export_defines = [ invoker.export_define_blink ]
+ force_source_set = true
+ }
+ if (defined(invoker.blink_cpp_configs)) {
+ extra_configs += invoker.blink_cpp_configs
+ }
+ if (defined(invoker.blink_cpp_proxy_target)) {
+ proxy_target = invoker.blink_cpp_proxy_target
+ }
+ if (defined(invoker.visibility_blink)) {
+ output_visibility = []
+ output_visibility = invoker.visibility_blink
+ }
+ if (defined(invoker.mojom_blink_source_deps)) {
+ cpp_source_deps = invoker.mojom_blink_source_deps
+ }
+ } else {
+ if (defined(invoker.cpp_typemaps)) {
+ cpp_typemap_configs = invoker.cpp_typemaps
+ }
+ if (defined(invoker.export_define)) {
+ export_defines_overridden = true
+ export_defines = [ invoker.export_define ]
+ force_source_set = true
+ }
+ if (defined(invoker.cpp_configs)) {
+ extra_configs += invoker.cpp_configs
+ }
+ if (defined(invoker.cpp_proxy_target)) {
+ proxy_target = invoker.cpp_proxy_target
+ }
+ if (defined(invoker.visibility)) {
+ output_visibility = []
+ output_visibility = invoker.visibility
+ }
+ if (defined(invoker.mojom_source_deps)) {
+ cpp_source_deps = invoker.mojom_source_deps
+ }
+ }
+ not_needed([ "cpp_typemap_configs" ])
+ if (proxy_target != "") {
+ group("${target_name}${variant_suffix}__has_cpp_proxy") {
+ }
+ }
+
+ if (!export_defines_overridden && defined(invoker.component_macro_prefix)) {
+ output_name_override =
+ "${invoker.component_output_prefix}${variant_suffix}"
+ export_defines =
+ [ "IS_${invoker.component_macro_prefix}" +
+ "${bindings_configuration.component_macro_suffix}_IMPL" ]
+ }
+
+ generate_java = false
+ if (!cpp_only && defined(invoker.generate_java)) {
+ generate_java = invoker.generate_java
+ }
+ type_mappings_target_name = "${target_name}${variant_suffix}__type_mappings"
+ type_mappings_path =
+ "$target_gen_dir/${target_name}${variant_suffix}__type_mappings"
+ if (sources_list != []) {
+ export_args = []
+ export_args_overridden = false
+ if (defined(bindings_configuration.for_blink) &&
+ bindings_configuration.for_blink) {
+ if (defined(invoker.export_class_attribute_blink)) {
+ export_args_overridden = true
+ export_args += [
+ "--export_attribute",
+ invoker.export_class_attribute_blink,
+ "--export_header",
+ invoker.export_header_blink,
+ ]
+ }
+ } else if (defined(invoker.export_class_attribute)) {
+ export_args_overridden = true
+ export_args += [
+ "--export_attribute",
+ invoker.export_class_attribute,
+ "--export_header",
+ invoker.export_header,
+ ]
+ }
+ if (!export_args_overridden && defined(invoker.component_macro_prefix)) {
+ export_args += [
+ "--export_attribute",
+ "COMPONENT_EXPORT(${invoker.component_macro_prefix}" +
+ "${bindings_configuration.component_macro_suffix})",
+ "--export_header",
+ "base/component_export.h",
+ ]
+ }
+
+ generator_cpp_output_suffixes = []
+ variant_dash_suffix = ""
+ if (defined(variant)) {
+ variant_dash_suffix = "-${variant}"
+ }
+ generator_cpp_output_suffixes += [
+ "${variant_dash_suffix}-forward.h",
+ "${variant_dash_suffix}-import-headers.h",
+ "${variant_dash_suffix}-test-utils.h",
+ "${variant_dash_suffix}.cc",
+ "${variant_dash_suffix}.h",
+ ]
+
+ generator_target_name = "${target_name}${variant_suffix}__generator"
+
+ # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
+ action(generator_target_name) {
+ allow_remote = true
+ visibility = [ ":*" ]
+ script = mojom_generator_script
+ inputs = mojom_generator_sources + jinja2_sources
+ sources =
+ sources_list + [
+ "$root_gen_dir/mojo/public/tools/bindings/cpp_templates.zip",
+ type_mappings_path,
+ ]
+ if (generate_mojolpm_fuzzing &&
+ !defined(bindings_configuration.variant)) {
+ sources += [
+ "$root_gen_dir/mojo/public/tools/bindings/mojolpm_templates.zip",
+ ]
+ }
+ deps = [
+ ":$parser_target_name",
+ ":$type_mappings_target_name",
+ "//mojo/public/tools/bindings:precompile_templates",
+ ]
+
+ if (defined(invoker.parser_deps)) {
+ deps += invoker.parser_deps
+ }
+ outputs = []
+ args = common_generator_args + export_args
+ filelist = []
+ foreach(source, sources_list) {
+ filelist += [ rebase_path(source, root_build_dir) ]
+ }
+ foreach(base_path, output_file_base_paths) {
+ filename = get_path_info(base_path, "file")
+ dirname = get_path_info(base_path, "dir")
+ inputs += [ "$root_gen_dir/$dirname/${filename}-module" ]
+
+ outputs += [
+ "$root_gen_dir/${base_path}${variant_dash_suffix}-forward.h",
+ "$root_gen_dir/${base_path}${variant_dash_suffix}-import-headers.h",
+ "$root_gen_dir/${base_path}${variant_dash_suffix}-test-utils.h",
+ "$root_gen_dir/${base_path}${variant_dash_suffix}.cc",
+ "$root_gen_dir/${base_path}${variant_dash_suffix}.h",
+ ]
+ if (generate_mojolpm_fuzzing &&
+ !defined(bindings_configuration.variant)) {
+ outputs += [
+ "$root_gen_dir/${base_path}${variant_dash_suffix}-mojolpm.cc",
+ "$root_gen_dir/${base_path}${variant_dash_suffix}-mojolpm.h",
+ ]
+ }
+ }
+
+ # Workaround for https://github.com/ninja-build/ninja/issues/1966.
+ rsp_file = "$target_gen_dir/${target_name}.rsp"
+ write_file(rsp_file, filelist)
+ inputs += [ rsp_file ]
+ args += [
+ "--filelist=" + rebase_path("$rsp_file", root_build_dir),
+ "-g",
+ ]
+
+ if (generate_mojolpm_fuzzing &&
+ !defined(bindings_configuration.variant)) {
+ args += [ "c++,mojolpm" ]
+ } else {
+ args += [ "c++" ]
+ }
+
+ if (defined(bindings_configuration.variant)) {
+ args += [
+ "--variant",
+ bindings_configuration.variant,
+ ]
+ }
+
+ args += [
+ "--typemap",
+ rebase_path(type_mappings_path, root_build_dir),
+ ]
+
+ if (defined(bindings_configuration.for_blink) &&
+ bindings_configuration.for_blink) {
+ args += [ "--for_blink" ]
+ }
+
+ if (defined(invoker.support_lazy_serialization) &&
+ invoker.support_lazy_serialization) {
+ args += [ "--support_lazy_serialization" ]
+ }
+
+ if (enable_kythe_annotations) {
+ args += [ "--enable_kythe_annotations" ]
+ }
+
+ if (!defined(invoker.scramble_message_ids) ||
+ invoker.scramble_message_ids) {
+ inputs += message_scrambling_inputs
+ args += message_scrambling_args
+ }
+
+ if (defined(invoker.extra_cpp_template_paths)) {
+ foreach(extra_cpp_template, invoker.extra_cpp_template_paths) {
+ args += [
+ "--extra_cpp_template_paths",
+ rebase_path(extra_cpp_template, root_build_dir),
+ ]
+ inputs += [ extra_cpp_template ]
+
+ assert(
+ get_path_info(extra_cpp_template, "extension") == "tmpl",
+ "--extra_cpp_template_paths only accepts template files ending in extension .tmpl")
+ foreach(base_path, output_file_base_paths) {
+ template_file_name = get_path_info("$extra_cpp_template", "name")
+ outputs += [ "$root_gen_dir/${base_path}${variant_dash_suffix}-${template_file_name}" ]
+ }
+ }
+ }
+ }
+ }
+
+ # Write the typemapping configuration for this target out to a file to be
+ # validated by a Python script. This helps catch mistakes that can't
+ # be caught by logic in GN.
+ _typemap_config_filename =
+ "$target_gen_dir/${target_name}${variant_suffix}.typemap_config"
+ _typemap_stamp_filename = "${_typemap_config_filename}.validated"
+ _typemap_validator_target_name = "${type_mappings_target_name}__validator"
+ _rebased_typemap_configs = []
+ foreach(config, cpp_typemap_configs) {
+ _rebased_config = {
+ }
+ _rebased_config = config
+ if (defined(config.traits_headers)) {
+ _rebased_config.traits_headers = []
+ _rebased_config.traits_headers =
+ rebase_path(config.traits_headers, "//")
+ }
+ if (defined(config.traits_private_headers)) {
+ _rebased_config.traits_private_headers = []
+ _rebased_config.traits_private_headers =
+ rebase_path(config.traits_private_headers, "//")
+ }
+ _rebased_typemap_configs += [ _rebased_config ]
+ }
+ write_file(_typemap_config_filename, _rebased_typemap_configs, "json")
+ _mojom_target_name = target_name
+
+ action(_typemap_validator_target_name) {
+ allow_remote = true
+ script = "$mojom_generator_root/validate_typemap_config.py"
+ inputs = [ _typemap_config_filename ]
+ outputs = [ _typemap_stamp_filename ]
+ args = [
+ get_label_info(_mojom_target_name, "label_no_toolchain"),
+ rebase_path(_typemap_config_filename, root_build_dir),
+ rebase_path(_typemap_stamp_filename, root_build_dir),
+ ]
+ }
+
+ action(type_mappings_target_name) {
+ allow_remote = true
+ inputs =
+ mojom_generator_sources + jinja2_sources + [ _typemap_stamp_filename ]
+ outputs = [ type_mappings_path ]
+ script = "$mojom_generator_root/generate_type_mappings.py"
+ deps = [ ":$_typemap_validator_target_name" ]
+ args = [
+ "--output",
+ rebase_path(type_mappings_path, root_build_dir),
+ ]
+
+ sources = []
+ foreach(d, all_deps) {
+ name = get_label_info(d, "label_no_toolchain")
+ toolchain = get_label_info(d, "toolchain")
+ dependency_output = "${name}${variant_suffix}__type_mappings"
+ dependency_target = "${dependency_output}(${toolchain})"
+ deps += [ dependency_target ]
+ dependency_output_dir =
+ get_label_info(dependency_output, "target_gen_dir")
+ dependency_name = get_label_info(dependency_output, "name")
+ dependency_path = "$dependency_output_dir/${dependency_name}"
+ sources += [ dependency_path ]
+ args += [
+ "--dependency",
+ rebase_path(dependency_path, root_build_dir),
+ ]
+ }
+
+ # Newer GN-based typemaps are aggregated into a single config.
+ inputs += [ _typemap_config_filename ]
+ args += [
+ "--cpp-typemap-config",
+ rebase_path(_typemap_config_filename, root_build_dir),
+ ]
+ }
+
+ group("${target_name}${variant_suffix}_headers") {
+ public_deps = []
+ if (sources_list != []) {
+ public_deps += [
+ ":$generator_cpp_message_ids_target_name",
+ ":$generator_shared_target_name",
+ ":$generator_target_name",
+ ]
+ }
+ foreach(d, all_deps) {
+ full_name = get_label_info("$d", "label_no_toolchain")
+ public_deps += [ "${full_name}${variant_suffix}_headers" ]
+ }
+ if (defined(bindings_configuration.for_blink) &&
+ bindings_configuration.for_blink) {
+ public_deps += [ "//mojo/public/cpp/bindings:wtf_support" ]
+ }
+ }
+
+ js_data_deps_target_name = target_name + "_js_data_deps"
+ not_needed([ "js_data_deps_target_name" ])
+
+ if (!force_source_set && defined(invoker.component_macro_prefix)) {
+ sources_target_type = "component"
+ } else {
+ sources_target_type = "source_set"
+ }
+
+ output_target_name = "${target_name}${variant_suffix}"
+ if (proxy_target != "") {
+ group(output_target_name) {
+ public_deps = [ proxy_target ]
+ visibility = output_visibility
+ if (defined(invoker.testonly)) {
+ testonly = invoker.testonly
+ }
+ }
+ sources_target_name = "${output_target_name}_cpp_sources"
+ } else {
+ sources_target_name = output_target_name
+ }
+
+ target(sources_target_type, sources_target_name) {
+ if (defined(output_name_override)) {
+ output_name = output_name_override
+ }
+ visibility = output_visibility + [
+ ":$output_target_name",
+ ":${target_name}_mojolpm",
+ ]
+ if (defined(invoker.testonly)) {
+ testonly = invoker.testonly
+ }
+ defines = export_defines
+ configs += [ "//build/config/compiler:wexit_time_destructors" ]
+ configs += extra_configs
+ if (output_file_base_paths != []) {
+ sources = []
+ foreach(base_path, output_file_base_paths) {
+ foreach(suffix, generator_cpp_output_suffixes) {
+ sources += [ "$root_gen_dir/${base_path}$suffix" ]
+ }
+ }
+ }
+ deps = [
+ ":$generator_cpp_message_ids_target_name",
+ "//mojo/public/cpp/bindings:struct_traits",
+ "//mojo/public/interfaces/bindings:bindings_headers",
+ ]
+ public_deps = [
+ ":$shared_cpp_library_target_name",
+ "//base",
+ ]
+ if (require_full_cpp_deps) {
+ public_deps += [ "//mojo/public/cpp/bindings" ]
+ } else {
+ public_deps += [ "//mojo/public/cpp/bindings:bindings_base" ]
+ }
+
+ if (sources_list != []) {
+ public_deps += [ ":$generator_target_name" ]
+ }
+ foreach(d, mojom_cpp_deps) {
+ # Resolve the name, so that a target //mojo/something becomes
+ # //mojo/something:something and we can append variant_suffix to
+ # get the cpp dependency name.
+ full_name = get_label_info(d, "label_no_toolchain")
+ public_deps += [ "${full_name}${variant_suffix}" ]
+ }
+ foreach(d, cpp_source_deps) {
+ full_name = get_label_info(d, "label_no_toolchain")
+ public_deps += [
+ "${full_name}${variant_suffix}__has_cpp_proxy",
+ "${full_name}${variant_suffix}_cpp_sources",
+ ]
+ }
+ if (defined(bindings_configuration.for_blink) &&
+ bindings_configuration.for_blink) {
+ if (defined(invoker.overridden_deps_blink)) {
+ foreach(d, invoker.overridden_deps_blink) {
+ # Resolve the name, so that a target //mojo/something becomes
+ # //mojo/something:something and we can append variant_suffix
+ # to get the cpp dependency name.
+ full_name = get_label_info("$d", "label_no_toolchain")
+ public_deps -= [ "${full_name}${variant_suffix}" ]
+ }
+ public_deps += invoker.component_deps_blink
+ }
+ if (defined(invoker.check_includes_blink)) {
+ check_includes = invoker.check_includes_blink
+ }
+ } else {
+ if (defined(invoker.check_includes_blink)) {
+ not_needed(invoker, [ "check_includes_blink" ])
+ }
+ if (defined(invoker.overridden_deps)) {
+ foreach(d, invoker.overridden_deps) {
+ # Resolve the name, so that a target //mojo/something becomes
+ # //mojo/something:something and we can append variant_suffix
+ # to get the cpp dependency name.
+ full_name = get_label_info("$d", "label_no_toolchain")
+ public_deps -= [ "${full_name}${variant_suffix}" ]
+ }
+ public_deps += invoker.component_deps
+ }
+ }
+ foreach(config, cpp_typemap_configs) {
+ if (defined(config.traits_sources)) {
+ sources += config.traits_sources
+ }
+ if (defined(config.traits_deps)) {
+ deps += config.traits_deps
+ }
+ if (defined(config.traits_public_deps)) {
+ public_deps += config.traits_public_deps
+ }
+ }
+ if (defined(bindings_configuration.for_blink) &&
+ bindings_configuration.for_blink) {
+ public_deps += [ "//mojo/public/cpp/bindings:wtf_support" ]
+ }
+ }
+
+ if (generate_mojolpm_fuzzing && !defined(variant)) {
+ # This block contains the C++ targets for the MojoLPM fuzzer, we need to
+ # do this here so that we can use the typemap configuration for the
+ # empty-variant Mojo target.
+
+ mojolpm_target_name = "${target_name}_mojolpm"
+ mojolpm_generator_target_name = "${target_name}__generator"
+ source_set(mojolpm_target_name) {
+ # There are still a few missing header dependencies between mojo targets
+ # with typemaps and the dependencies of their typemap headers. It would
+ # be good to enable include checking for these in the future though.
+ check_includes = false
+ testonly = true
+ if (defined(invoker.sources)) {
+ # Split the input into generated and non-generated source files. They
+ # need to be processed separately.
+ gen_dir_path_wildcard = get_path_info("//", "gen_dir") + "/*"
+ non_gen_sources =
+ filter_exclude(invoker.sources, [ gen_dir_path_wildcard ])
+ gen_sources =
+ filter_include(invoker.sources, [ gen_dir_path_wildcard ])
+ sources = process_file_template(
+ non_gen_sources,
+ [
+ "{{source_gen_dir}}/{{source_file_part}}-mojolpm.cc",
+ "{{source_gen_dir}}/{{source_file_part}}-mojolpm.h",
+ ])
+ sources += process_file_template(
+ gen_sources,
+ [
+ "{{source_dir}}/{{source_file_part}}-mojolpm.cc",
+ "{{source_dir}}/{{source_file_part}}-mojolpm.h",
+ ])
+ deps = [ ":$output_target_name" ]
+ } else {
+ sources = []
+ deps = []
+ }
+
+ public_deps = [
+ ":$generator_shared_target_name",
+
+ # NB: hardcoded dependency on the no-variant variant generator, since
+ # mojolpm only uses the no-variant type.
+ ":$mojolpm_generator_target_name",
+ ":$mojolpm_proto_target_name",
+ "//base",
+ "//mojo/public/tools/fuzzers:mojolpm",
+ ]
+
+ foreach(d, all_deps) {
+ # Resolve the name, so that a target //mojo/something becomes
+ # //mojo/something:something and we can append variant_suffix to
+ # get the cpp dependency name.
+ full_name = get_label_info("$d", "label_no_toolchain")
+ public_deps += [ "${full_name}_mojolpm" ]
+ }
+
+ foreach(config, cpp_typemap_configs) {
+ if (defined(config.traits_deps)) {
+ deps += config.traits_deps
+ }
+ if (defined(config.traits_public_deps)) {
+ public_deps += config.traits_public_deps
+ }
+ }
+ }
+ }
+
+ if (generate_java && is_android) {
+ import("//build/config/android/rules.gni")
+
+ java_generator_target_name = target_name + "_java__generator"
+ if (sources_list != []) {
+ action(java_generator_target_name) {
+ script = mojom_generator_script
+ inputs = mojom_generator_sources + jinja2_sources
+ sources = sources_list
+ deps = [
+ ":$parser_target_name",
+ ":$type_mappings_target_name",
+ "//mojo/public/tools/bindings:precompile_templates",
+ ]
+ outputs = []
+ args = common_generator_args
+ filelist = []
+ foreach(source, sources_list) {
+ filelist += [ rebase_path(source, root_build_dir) ]
+ }
+ foreach(base_path, output_file_base_paths) {
+ outputs += [ "$root_gen_dir/$base_path.srcjar" ]
+ }
+
+ response_file_contents = filelist
+
+ args += [
+ "--filelist={{response_file_name}}",
+ "-g",
+ "java",
+ ]
+
+ if (!defined(invoker.scramble_message_ids) ||
+ invoker.scramble_message_ids) {
+ inputs += message_scrambling_inputs
+ args += message_scrambling_args
+ }
+ }
+ } else {
+ group(java_generator_target_name) {
+ }
+ }
+
+ java_srcjar_target_name = target_name + "_java_sources"
+
+ action(java_srcjar_target_name) {
+ script = "//build/android/gyp/zip.py"
+ inputs = []
+ if (output_file_base_paths != []) {
+ foreach(base_path, output_file_base_paths) {
+ inputs += [ "$root_gen_dir/${base_path}.srcjar" ]
+ }
+ }
+ output = "$target_gen_dir/$target_name.srcjar"
+ outputs = [ output ]
+ rebase_inputs = rebase_path(inputs, root_build_dir)
+ rebase_output = rebase_path(output, root_build_dir)
+ args = [
+ "--input-zips=$rebase_inputs",
+ "--output=$rebase_output",
+ ]
+ deps = []
+ if (sources_list != []) {
+ deps = [ ":$java_generator_target_name" ]
+ }
+ }
+
+ java_target_name = target_name + "_java"
+ android_library(java_target_name) {
+ forward_variables_from(invoker, [ "enable_bytecode_checks" ])
+ deps = [
+ "//mojo/public/java:bindings_java",
+ "//mojo/public/java:system_java",
+ "//third_party/androidx:androidx_annotation_annotation_java",
+ ]
+
+ # Disable warnings/checks on these generated files.
+ chromium_code = false
+
+ foreach(d, all_deps) {
+ # Resolve the name, so that a target //mojo/something becomes
+ # //mojo/something:something and we can append "_java" to get the java
+ # dependency name.
+ full_name = get_label_info(d, "label_no_toolchain")
+ deps += [ "${full_name}_java" ]
+ }
+
+ srcjar_deps = [ ":$java_srcjar_target_name" ]
+ }
+ }
+ }
+
+ if (defined(invoker.generate_webui_js_bindings)) {
+ assert(is_chromeos_ash,
+ "generate_webui_js_bindings can only be used on ChromeOS Ash")
+ assert(invoker.generate_webui_js_bindings,
+ "generate_webui_js_bindings should be set to true or removed")
+ }
+
+ use_typescript_for_target = defined(invoker.webui_module_path) &&
+ !defined(invoker.generate_webui_js_bindings)
+
+ generate_legacy_js = !use_typescript_for_target ||
+ (defined(invoker.generate_legacy_js_bindings) &&
+ invoker.generate_legacy_js_bindings)
+
+ if (!use_typescript_for_target &&
+ defined(invoker.generate_legacy_js_bindings)) {
+ not_needed(invoker, [ "generate_legacy_js_bindings" ])
+ }
+
+ # Targets needed by both TS and JS bindings targets. These are needed
+ # unconditionally for JS bindings targets, and are needed for TS bindings
+ # targets when generate_legacy_js_bindings is true. This option is provided
+ # since the legacy bindings are needed by Blink tests and non-Chromium users,
+ # which are not expected to migrate to modules or TypeScript.
+ if (generate_legacy_js && (generate_js_fuzzing ||
+ !defined(invoker.cpp_only) || !invoker.cpp_only)) {
+ if (sources_list != []) {
+ generator_js_target_name = "${target_name}_js__generator"
+
+ action(generator_js_target_name) {
+ script = mojom_generator_script
+ inputs = mojom_generator_sources + jinja2_sources
+ sources = sources_list
+ deps = [
+ ":$parser_target_name",
+ "//mojo/public/tools/bindings:precompile_templates",
+ ]
+ if (defined(invoker.parser_deps)) {
+ deps += invoker.parser_deps
+ }
+ outputs = []
+ args = common_generator_args
+ filelist = []
+ foreach(source, sources_list) {
+ filelist += [ rebase_path(source, root_build_dir) ]
+ }
+ foreach(base_path, output_file_base_paths) {
+ outputs += [
+ "$root_gen_dir/$base_path.js",
+ "$root_gen_dir/$base_path.m.js",
+ "$root_gen_dir/$base_path-lite.js",
+ "$root_gen_dir/$base_path-lite-for-compile.js",
+ ]
+
+ if (defined(invoker.webui_module_path) &&
+ !use_typescript_for_target) {
+ outputs += [ "$root_gen_dir/mojom-webui/$base_path-webui.js" ]
+ }
+ }
+
+ response_file_contents = filelist
+
+ args += [
+ "--filelist={{response_file_name}}",
+ "-g",
+ "javascript",
+ ]
+
+ if (defined(invoker.js_generate_struct_deserializers) &&
+ invoker.js_generate_struct_deserializers) {
+ args += [ "--js_generate_struct_deserializers" ]
+ }
+
+ if (!defined(invoker.scramble_message_ids) ||
+ invoker.scramble_message_ids) {
+ inputs += message_scrambling_inputs
+ args += message_scrambling_args
+ }
+
+ if (generate_js_fuzzing) {
+ args += [ "--generate_fuzzing" ]
+ }
+ }
+ }
+
+ js_target_name = target_name + "_js"
+ group(js_target_name) {
+ public_deps = []
+ if (sources_list != []) {
+ public_deps += [ ":$generator_js_target_name" ]
+ }
+
+ foreach(d, all_deps) {
+ full_name = get_label_info(d, "label_no_toolchain")
+ public_deps += [ "${full_name}_js" ]
+ }
+ }
+
+ group(js_data_deps_target_name) {
+ deps = []
+ if (sources_list != []) {
+ data = []
+ foreach(base_path, output_file_base_paths) {
+ data += [
+ "$root_gen_dir/${base_path}.js",
+ "$root_gen_dir/${base_path}.m.js",
+ "$root_gen_dir/${base_path}-lite.js",
+ ]
+ }
+ deps += [ ":$generator_js_target_name" ]
+ }
+
+ if (defined(invoker.disallow_native_types) &&
+ invoker.disallow_native_types) {
+ data_deps = []
+ } else {
+ data_deps = [ "//mojo/public/js:bindings_module" ]
+ }
+ foreach(d, all_deps) {
+ full_name = get_label_info(d, "label_no_toolchain")
+ data_deps += [ "${full_name}_js_data_deps" ]
+ }
+ }
+ }
+
+ # js_library() closure compiler targets, primarily used on ChromeOS. Only
+ # generate these targets if the mojom target is not C++ only and is not using
+ # TypeScript.
+ if (generate_mojom_closure_libraries &&
+ (!defined(invoker.cpp_only) || !invoker.cpp_only) && generate_legacy_js) {
+ js_library_for_compile_target_name = "${target_name}_js_library_for_compile"
+ if (sources_list != []) {
+ js_library(js_library_for_compile_target_name) {
+ extra_public_deps = [ ":$generator_js_target_name" ]
+ sources = []
+ foreach(base_path, output_file_base_paths) {
+ sources += [ "$root_gen_dir/${base_path}-lite-for-compile.js" ]
+ }
+ externs_list = [
+ "${externs_path}/mojo_core.js",
+ "${externs_path}/pending.js",
+ ]
+ deps = []
+ if (!defined(invoker.disallow_native_types)) {
+ deps += [ "//mojo/public/js:bindings_lite_sources" ]
+ }
+ foreach(d, all_deps) {
+ full_name = get_label_info(d, "label_no_toolchain")
+ deps += [ "${full_name}_js_library_for_compile" ]
+ }
+ }
+ } else {
+ group(js_library_for_compile_target_name) {
+ }
+ }
+
+ # WebUI specific closure targets, not needed by targets that are generating
+ # TypeScript WebUI bindings or by legacy-only targets.
+ if (defined(invoker.webui_module_path) && !use_typescript_for_target) {
+ webui_js_target_name = "${target_name}_webui_js"
+ if (sources_list != []) {
+ js_library(webui_js_target_name) {
+ extra_public_deps = [ ":$generator_js_target_name" ]
+ sources = []
+ foreach(base_path, output_file_base_paths) {
+ sources += [ "$root_gen_dir/mojom-webui/${base_path}-webui.js" ]
+ }
+ externs_list = [
+ "${externs_path}/mojo_core.js",
+ "${externs_path}/pending.js",
+ ]
+ if (defined(invoker.disallow_native_types) &&
+ invoker.disallow_native_types) {
+ deps = []
+ } else {
+ deps = [ "//mojo/public/js:bindings_uncompiled" ]
+ }
+ foreach(d, all_deps) {
+ full_name = get_label_info(d, "label_no_toolchain")
+ deps += [ "${full_name}_webui_js" ]
+ }
+ }
+ } else {
+ group(webui_js_target_name) {
+ }
+ }
+
+ webui_grdp_target_name = "${target_name}_webui_grdp"
+ out_grd = "$target_gen_dir/${target_name}_webui_resources.grdp"
+ grd_prefix = "${target_name}_webui"
+ generate_grd(webui_grdp_target_name) {
+ grd_prefix = grd_prefix
+ out_grd = out_grd
+
+ deps = [ ":$webui_js_target_name" ]
+
+ input_files = []
+ foreach(base_path, output_file_base_paths) {
+ input_files += [ "${base_path}-webui.js" ]
+ }
+
+ input_files_base_dir =
+ rebase_path("$root_gen_dir/mojom-webui", "$root_build_dir")
+ }
+ }
+ }
+ if ((generate_js_fuzzing || !defined(invoker.cpp_only) ||
+ !invoker.cpp_only) && use_typescript_for_target) {
+ if (sources_list != []) {
+ source_filelist = []
+ foreach(source, sources_list) {
+ source_filelist += [ rebase_path(source, root_build_dir) ]
+ }
+
+ # Generate Typescript bindings.
+ generator_ts_target_name = "${target_name}_ts__generator"
+
+ action(generator_ts_target_name) {
+ script = mojom_generator_script
+ inputs = mojom_generator_sources + jinja2_sources
+ sources = sources_list
+ deps = [
+ ":$parser_target_name",
+ "//mojo/public/tools/bindings:precompile_templates",
+ ]
+
+ outputs = []
+ foreach(base_path, output_file_base_paths) {
+ outputs += [ "$root_gen_dir/$base_path-webui.ts" ]
+ }
+ args = common_generator_args
+ response_file_contents = source_filelist
+
+ args += [
+ "--filelist={{response_file_name}}",
+ "-g",
+ "typescript",
+ ]
+
+ if (!defined(invoker.scramble_message_ids) ||
+ invoker.scramble_message_ids) {
+ inputs += message_scrambling_inputs
+ args += message_scrambling_args
+ }
+
+ if (defined(invoker.js_generate_struct_deserializers) &&
+ invoker.js_generate_struct_deserializers) {
+ args += [ "--js_generate_struct_deserializers" ]
+ }
+
+ # TODO(crbug.com/1007587): Support scramble_message_ids if above is
+ # insufficient.
+ # TODO(crbug.com/1007591): Support generate_fuzzing.
+ }
+ }
+ }
+}
+
+# A helper for the mojom() template above when component libraries are desired
+# for generated C++ bindings units. Supports all the same arguments as mojom()
+# except for the optional |component_output_prefix| and |component_macro_prefix|
+# arguments. These are instead shortened to |output_prefix| and |macro_prefix|
+# and are *required*.
+template("mojom_component") {
+ assert(defined(invoker.output_prefix) && defined(invoker.macro_prefix))
+
+ mojom(target_name) {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "output_prefix",
+ "macro_prefix",
+ ])
+ component_output_prefix = invoker.output_prefix
+ component_macro_prefix = invoker.macro_prefix
+ }
+}
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py b/utils/codegen/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py
new file mode 100755
index 00000000..8c641c2a
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py
@@ -0,0 +1,424 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""The frontend for the Mojo bindings system."""
+
+from __future__ import print_function
+
+import argparse
+
+import hashlib
+import importlib
+import json
+import os
+import pprint
+import re
+import struct
+import sys
+
+# Disable lint check for finding modules:
+# pylint: disable=F0401
+
+def _GetDirAbove(dirname):
+ """Returns the directory "above" this file containing |dirname| (which must
+ also be "above" this file)."""
+ path = os.path.abspath(__file__)
+ while True:
+ path, tail = os.path.split(path)
+ assert tail
+ if tail == dirname:
+ return path
+
+
+sys.path.insert(
+ 0,
+ os.path.join(
+ os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "mojom"))
+
+from mojom.error import Error
+import mojom.fileutil as fileutil
+from mojom.generate.module import Module
+from mojom.generate import template_expander
+from mojom.generate import translate
+from mojom.generate.generator import WriteFile
+
+sys.path.append(
+ os.path.join(_GetDirAbove("mojo"), "tools", "diagnosis"))
+import crbug_1001171
+
+
+_BUILTIN_GENERATORS = {
+ "c++": "mojom_cpp_generator",
+ "javascript": "mojom_js_generator",
+ "java": "mojom_java_generator",
+ "mojolpm": "mojom_mojolpm_generator",
+ "typescript": "mojom_ts_generator",
+}
+
+_BUILTIN_CHECKS = {
+ "attributes": "mojom_attributes_check",
+ "definitions": "mojom_definitions_check",
+ "features": "mojom_interface_feature_check",
+ "restrictions": "mojom_restrictions_check",
+}
+
+
+def LoadGenerators(generators_string):
+ if not generators_string:
+ return {} # No generators.
+
+ generators = {}
+ for generator_name in [s.strip() for s in generators_string.split(",")]:
+ language = generator_name.lower()
+ if language not in _BUILTIN_GENERATORS:
+ print("Unknown generator name %s" % generator_name)
+ sys.exit(1)
+ generator_module = importlib.import_module(
+ "generators.%s" % _BUILTIN_GENERATORS[language])
+ generators[language] = generator_module
+ return generators
+
+
+def LoadChecks(checks_string):
+ if not checks_string:
+ return {} # No checks.
+
+ checks = {}
+ for check_name in [s.strip() for s in checks_string.split(",")]:
+ check = check_name.lower()
+ if check not in _BUILTIN_CHECKS:
+ print("Unknown check name %s" % check_name)
+ sys.exit(1)
+ check_module = importlib.import_module("checks.%s" % _BUILTIN_CHECKS[check])
+ checks[check] = check_module
+ return checks
+
+
+def MakeImportStackMessage(imported_filename_stack):
+ """Make a (human-readable) message listing a chain of imports. (Returned
+ string begins with a newline (if nonempty) and does not end with one.)"""
+ return ''.join(
+ reversed(["\n %s was imported by %s" % (a, b) for (a, b) in \
+ zip(imported_filename_stack[1:], imported_filename_stack)]))
+
+
+class RelativePath:
+ """Represents a path relative to the source tree or generated output dir."""
+
+ def __init__(self, path, source_root, output_dir):
+ self.path = path
+ if path.startswith(source_root):
+ self.root = source_root
+ elif path.startswith(output_dir):
+ self.root = output_dir
+ else:
+ raise Exception("Invalid input path %s" % path)
+
+ def relative_path(self):
+ return os.path.relpath(
+ os.path.abspath(self.path), os.path.abspath(self.root))
+
+
+def _GetModulePath(path, output_dir):
+ return os.path.join(output_dir, path.relative_path() + '-module')
+
+
+def ScrambleMethodOrdinals(interfaces, salt):
+ already_generated = set()
+ for interface in interfaces:
+ i = 0
+ already_generated.clear()
+ for method in interface.methods:
+ if method.explicit_ordinal is not None:
+ continue
+ while True:
+ i = i + 1
+ if i == 1000000:
+ raise Exception("Could not generate %d method ordinals for %s" %
+ (len(interface.methods), interface.mojom_name))
+ # Generate a scrambled method.ordinal value. The algorithm doesn't have
+ # to be very strong, cryptographically. It just needs to be non-trivial
+ # to guess the results without the secret salt, in order to make it
+ # harder for a compromised process to send fake Mojo messages.
+ sha256 = hashlib.sha256(salt)
+ sha256.update(interface.mojom_name.encode('utf-8'))
+ sha256.update(str(i).encode('utf-8'))
+ # Take the first 4 bytes as a little-endian uint32.
+ ordinal = struct.unpack('<L', sha256.digest()[:4])[0]
+ # Trim to 31 bits, so it always fits into a Java (signed) int.
+ ordinal = ordinal & 0x7fffffff
+ if ordinal in already_generated:
+ continue
+ already_generated.add(ordinal)
+ method.ordinal = ordinal
+ method.ordinal_comment = (
+ 'The %s value is based on sha256(salt + "%s%d").' %
+ (ordinal, interface.mojom_name, i))
+ break
+
+
+def ReadFileContents(filename):
+ with open(filename, 'rb') as f:
+ return f.read()
+
+
+class MojomProcessor:
+ """Takes parsed mojom modules and generates language bindings from them.
+
+ Attributes:
+ _processed_files: {Dict[str, mojom.generate.module.Module]} Mapping from
+ relative mojom filename paths to the module AST for that mojom file.
+ """
+ def __init__(self, should_generate):
+ self._should_generate = should_generate
+ self._processed_files = {}
+ self._typemap = {}
+
+ def LoadTypemaps(self, typemaps):
+ # Support some very simple single-line comments in typemap JSON.
+ comment_expr = r"^\s*//.*$"
+ def no_comments(line):
+ return not re.match(comment_expr, line)
+ for filename in typemaps:
+ with open(filename) as f:
+ typemaps = json.loads("".join(filter(no_comments, f.readlines())))
+ for language, typemap in typemaps.items():
+ language_map = self._typemap.get(language, {})
+ language_map.update(typemap)
+ self._typemap[language] = language_map
+ if 'c++' in self._typemap:
+ self._typemap['mojolpm'] = self._typemap['c++']
+
+ def _GenerateModule(self, args, remaining_args, check_modules,
+ generator_modules, rel_filename, imported_filename_stack):
+ # Return the already-generated module.
+ if rel_filename.path in self._processed_files:
+ return self._processed_files[rel_filename.path]
+
+ if rel_filename.path in imported_filename_stack:
+ print("%s: Error: Circular dependency" % rel_filename.path + \
+ MakeImportStackMessage(imported_filename_stack + [rel_filename.path]))
+ sys.exit(1)
+
+ module_path = _GetModulePath(rel_filename, args.output_dir)
+ with open(module_path, 'rb') as f:
+ module = Module.Load(f)
+
+ if args.scrambled_message_id_salt_paths:
+ salt = b''.join(
+ map(ReadFileContents, args.scrambled_message_id_salt_paths))
+ ScrambleMethodOrdinals(module.interfaces, salt)
+
+ if self._should_generate(rel_filename.path):
+ # Run checks on module first.
+ for check_module in check_modules.values():
+ checker = check_module.Check(module)
+ checker.CheckModule()
+ # Then run generation.
+ for language, generator_module in generator_modules.items():
+ generator = generator_module.Generator(
+ module, args.output_dir, typemap=self._typemap.get(language, {}),
+ variant=args.variant, bytecode_path=args.bytecode_path,
+ for_blink=args.for_blink,
+ js_generate_struct_deserializers=\
+ args.js_generate_struct_deserializers,
+ export_attribute=args.export_attribute,
+ export_header=args.export_header,
+ generate_non_variant_code=args.generate_non_variant_code,
+ support_lazy_serialization=args.support_lazy_serialization,
+ disallow_native_types=args.disallow_native_types,
+ disallow_interfaces=args.disallow_interfaces,
+ generate_message_ids=args.generate_message_ids,
+ generate_fuzzing=args.generate_fuzzing,
+ enable_kythe_annotations=args.enable_kythe_annotations,
+ extra_cpp_template_paths=args.extra_cpp_template_paths,
+ generate_extra_cpp_only=args.generate_extra_cpp_only)
+ filtered_args = []
+ if hasattr(generator_module, 'GENERATOR_PREFIX'):
+ prefix = '--' + generator_module.GENERATOR_PREFIX + '_'
+ filtered_args = [arg for arg in remaining_args
+ if arg.startswith(prefix)]
+ generator.GenerateFiles(filtered_args)
+
+ # Save result.
+ self._processed_files[rel_filename.path] = module
+ return module
+
+
+def _Generate(args, remaining_args):
+ if args.variant == "none":
+ args.variant = None
+
+ for idx, import_dir in enumerate(args.import_directories):
+ tokens = import_dir.split(":")
+ if len(tokens) >= 2:
+ args.import_directories[idx] = RelativePath(tokens[0], tokens[1],
+ args.output_dir)
+ else:
+ args.import_directories[idx] = RelativePath(tokens[0], args.depth,
+ args.output_dir)
+ generator_modules = LoadGenerators(args.generators_string)
+ check_modules = LoadChecks(args.checks_string)
+
+ fileutil.EnsureDirectoryExists(args.output_dir)
+
+ processor = MojomProcessor(lambda filename: filename in args.filename)
+ processor.LoadTypemaps(set(args.typemaps))
+
+ if args.filelist:
+ with open(args.filelist) as f:
+ args.filename.extend(f.read().split())
+
+ for filename in args.filename:
+ processor._GenerateModule(
+ args, remaining_args, check_modules, generator_modules,
+ RelativePath(filename, args.depth, args.output_dir), [])
+
+ return 0
+
+
+def _Precompile(args, _):
+ generator_modules = LoadGenerators(",".join(_BUILTIN_GENERATORS.keys()))
+
+ template_expander.PrecompileTemplates(generator_modules, args.output_dir)
+ return 0
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description="Generate bindings from mojom files.")
+ parser.add_argument("--use_bundled_pylibs", action="store_true",
+ help="use Python modules bundled in the SDK")
+ parser.add_argument(
+ "-o",
+ "--output_dir",
+ dest="output_dir",
+ default=".",
+ help="output directory for generated files")
+
+ subparsers = parser.add_subparsers()
+
+ generate_parser = subparsers.add_parser(
+ "generate", description="Generate bindings from mojom files.")
+ generate_parser.add_argument("filename", nargs="*",
+ help="mojom input file")
+ generate_parser.add_argument("--filelist", help="mojom input file list")
+ generate_parser.add_argument("-d", "--depth", dest="depth", default=".",
+ help="depth from source root")
+ generate_parser.add_argument("-g",
+ "--generators",
+ dest="generators_string",
+ metavar="GENERATORS",
+ default="c++,javascript,java,mojolpm",
+ help="comma-separated list of generators")
+ generate_parser.add_argument("-c",
+ "--checks",
+ dest="checks_string",
+ metavar="CHECKS",
+ default=",".join(_BUILTIN_CHECKS.keys()),
+ help="comma-separated list of checks")
+ generate_parser.add_argument(
+ "--gen_dir", dest="gen_directories", action="append", metavar="directory",
+ default=[], help="add a directory to be searched for the syntax trees.")
+ generate_parser.add_argument(
+ "-I", dest="import_directories", action="append", metavar="directory",
+ default=[],
+ help="add a directory to be searched for import files. The depth from "
+ "source root can be specified for each import by appending it after "
+ "a colon")
+ generate_parser.add_argument("--typemap", action="append", metavar="TYPEMAP",
+ default=[], dest="typemaps",
+ help="apply TYPEMAP to generated output")
+ generate_parser.add_argument("--variant", dest="variant", default=None,
+ help="output a named variant of the bindings")
+ generate_parser.add_argument(
+ "--bytecode_path", required=True, help=(
+ "the path from which to load template bytecode; to generate template "
+ "bytecode, run %s precompile BYTECODE_PATH" % os.path.basename(
+ sys.argv[0])))
+ generate_parser.add_argument("--for_blink", action="store_true",
+ help="Use WTF types as generated types for mojo "
+ "string/array/map.")
+ generate_parser.add_argument(
+ "--js_generate_struct_deserializers", action="store_true",
+ help="Generate javascript deserialize methods for structs in "
+ "mojom-lite.js file")
+ generate_parser.add_argument(
+ "--export_attribute", default="",
+ help="Optional attribute to specify on class declaration to export it "
+ "for the component build.")
+ generate_parser.add_argument(
+ "--export_header", default="",
+ help="Optional header to include in the generated headers to support the "
+ "component build.")
+ generate_parser.add_argument(
+ "--generate_non_variant_code", action="store_true",
+ help="Generate code that is shared by different variants.")
+ generate_parser.add_argument(
+ "--scrambled_message_id_salt_path",
+ dest="scrambled_message_id_salt_paths",
+ help="If non-empty, the path to a file whose contents should be used as"
+ "a salt for generating scrambled message IDs. If this switch is specified"
+ "more than once, the contents of all salt files are concatenated to form"
+ "the salt value.", default=[], action="append")
+ generate_parser.add_argument(
+ "--support_lazy_serialization",
+ help="If set, generated bindings will serialize lazily when possible.",
+ action="store_true")
+ generate_parser.add_argument(
+ "--extra_cpp_template_paths",
+ dest="extra_cpp_template_paths",
+ action="append",
+ metavar="path_to_template",
+ default=[],
+ help="Provide a path to a new template (.tmpl) that is used to generate "
+ "additional C++ source/header files ")
+ generate_parser.add_argument(
+ "--generate_extra_cpp_only",
+ help="If set and extra_cpp_template_paths provided, will only generate"
+ "extra_cpp_template related C++ bindings",
+ action="store_true")
+ generate_parser.add_argument(
+ "--disallow_native_types",
+ help="Disallows the [Native] attribute to be specified on structs or "
+ "enums within the mojom file.", action="store_true")
+ generate_parser.add_argument(
+ "--disallow_interfaces",
+ help="Disallows interface definitions within the mojom file. It is an "
+ "error to specify this flag when processing a mojom file which defines "
+ "any interface.", action="store_true")
+ generate_parser.add_argument(
+ "--generate_message_ids",
+ help="Generates only the message IDs header for C++ bindings. Note that "
+ "this flag only matters if --generate_non_variant_code is also "
+ "specified.", action="store_true")
+ generate_parser.add_argument(
+ "--generate_fuzzing",
+ action="store_true",
+ help="Generates additional bindings for fuzzing in JS.")
+ generate_parser.add_argument(
+ "--enable_kythe_annotations",
+ action="store_true",
+ help="Adds annotations for kythe metadata generation.")
+
+ generate_parser.set_defaults(func=_Generate)
+
+ precompile_parser = subparsers.add_parser("precompile",
+ description="Precompile templates for the mojom bindings generator.")
+ precompile_parser.set_defaults(func=_Precompile)
+
+ args, remaining_args = parser.parse_known_args()
+ return args.func(args, remaining_args)
+
+
+if __name__ == "__main__":
+ with crbug_1001171.DumpStateOnLookupError():
+ ret = main()
+ # Exit without running GC, which can save multiple seconds due to the large
+ # number of object created. But flush is necessary as os._exit doesn't do
+ # that.
+ sys.stdout.flush()
+ sys.stderr.flush()
+ os._exit(ret)
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/mojom_bindings_generator_unittest.py b/utils/codegen/ipc/mojo/public/tools/bindings/mojom_bindings_generator_unittest.py
new file mode 100644
index 00000000..761922b6
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/mojom_bindings_generator_unittest.py
@@ -0,0 +1,62 @@
+# Copyright 2014 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from mojom_bindings_generator import MakeImportStackMessage
+from mojom_bindings_generator import ScrambleMethodOrdinals
+
+
+class FakeIface:
+ def __init__(self):
+ self.mojom_name = None
+ self.methods = None
+
+
+class FakeMethod:
+ def __init__(self, explicit_ordinal=None):
+ self.explicit_ordinal = explicit_ordinal
+ self.ordinal = explicit_ordinal
+ self.ordinal_comment = None
+
+
+class MojoBindingsGeneratorTest(unittest.TestCase):
+ """Tests mojo_bindings_generator."""
+
+ def testMakeImportStackMessage(self):
+ """Tests MakeImportStackMessage()."""
+ self.assertEqual(MakeImportStackMessage(["x"]), "")
+ self.assertEqual(MakeImportStackMessage(["x", "y"]),
+ "\n y was imported by x")
+ self.assertEqual(MakeImportStackMessage(["x", "y", "z"]),
+ "\n z was imported by y\n y was imported by x")
+
+ def testScrambleMethodOrdinals(self):
+ """Tests ScrambleMethodOrdinals()."""
+ interface = FakeIface()
+ interface.mojom_name = 'RendererConfiguration'
+ interface.methods = [
+ FakeMethod(),
+ FakeMethod(),
+ FakeMethod(),
+ FakeMethod(explicit_ordinal=42)
+ ]
+ ScrambleMethodOrdinals([interface], "foo".encode('utf-8'))
+ # These next three values are hard-coded. If the generation algorithm
+ # changes from being based on sha256(seed + interface.name + str(i)) then
+ # these numbers will obviously need to change too.
+ #
+ # Note that hashlib.sha256('fooRendererConfiguration1').digest()[:4] is
+ # '\xa5\xbc\xf9\xca' and that hex(1257880741) = '0x4af9bca5'. The
+ # difference in 0x4a vs 0xca is because we only take 31 bits.
+ self.assertEqual(interface.methods[0].ordinal, 1257880741)
+ self.assertEqual(interface.methods[1].ordinal, 631133653)
+ self.assertEqual(interface.methods[2].ordinal, 549336076)
+
+ # Explicit method ordinals should not be scrambled.
+ self.assertEqual(interface.methods[3].ordinal, 42)
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/validate_typemap_config.py b/utils/codegen/ipc/mojo/public/tools/bindings/validate_typemap_config.py
new file mode 100755
index 00000000..6bb7a209
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/validate_typemap_config.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+# Copyright 2020 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import json
+import os
+import re
+import sys
+
+
+def CheckCppTypemapConfigs(target_name, config_filename, out_filename):
+ _SUPPORTED_CONFIG_KEYS = set([
+ 'types', 'traits_headers', 'traits_private_headers', 'traits_sources',
+ 'traits_deps', 'traits_public_deps'
+ ])
+ _SUPPORTED_TYPE_KEYS = set([
+ 'mojom', 'cpp', 'copyable_pass_by_value', 'force_serialize', 'hashable',
+ 'move_only', 'nullable_is_same_type', 'forward_declaration',
+ 'default_constructible'
+ ])
+ with open(config_filename, 'r') as f:
+ for config in json.load(f):
+ for key in config.keys():
+ if key not in _SUPPORTED_CONFIG_KEYS:
+ raise ValueError('Invalid typemap property "%s" when processing %s' %
+ (key, target_name))
+
+ types = config.get('types')
+ if not types:
+ raise ValueError('Typemap for %s must specify at least one type to map'
+ % target_name)
+
+ for entry in types:
+ for key in entry.keys():
+ if key not in _SUPPORTED_TYPE_KEYS:
+ raise IOError(
+ 'Invalid type property "%s" in typemap for "%s" on target %s' %
+ (key, entry.get('mojom', '(unknown)'), target_name))
+
+ with open(out_filename, 'w') as f:
+ f.truncate(0)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ _, args = parser.parse_known_args()
+ if len(args) != 3:
+ print('Usage: validate_typemap_config.py target_name config_filename '
+ 'stamp_filename')
+ sys.exit(1)
+
+ CheckCppTypemapConfigs(args[0], args[1], args[2])
+
+
+if __name__ == '__main__':
+ main()
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/BUILD.gn b/utils/codegen/ipc/mojo/public/tools/mojom/BUILD.gn
new file mode 100644
index 00000000..eafb95a1
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/BUILD.gn
@@ -0,0 +1,18 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+group("tests") {
+ data = [
+ "check_stable_mojom_compatibility_unittest.py",
+ "check_stable_mojom_compatibility.py",
+ "const_unittest.py",
+ "enum_unittest.py",
+ "feature_unittest.py",
+ "mojom_parser_test_case.py",
+ "mojom_parser_unittest.py",
+ "mojom_parser.py",
+ "stable_attribute_unittest.py",
+ "version_compatibility_unittest.py",
+ ]
+}
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/README.md b/utils/codegen/ipc/mojo/public/tools/mojom/README.md
new file mode 100644
index 00000000..e5d17ab0
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/README.md
@@ -0,0 +1,14 @@
+# The Mojom Parser
+
+The Mojom format is an interface definition language (IDL) for describing
+interprocess communication (IPC) messages and data types for use with the
+low-level cross-platform
+[Mojo IPC library](https://chromium.googlesource.com/chromium/src/+/main/mojo/public/c/system/README.md).
+
+This directory consists of a `mojom` Python module, its tests, and supporting
+command-line tools. The Python module implements the parser used by the
+command-line tools and exposes an API to help external bindings generators emit
+useful code from the parser's outputs.
+
+TODO(https://crbug.com/1060464): Fill out this documentation once the library
+and tools have stabilized.
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py b/utils/codegen/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py
new file mode 100755
index 00000000..35cd1cfd
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py
@@ -0,0 +1,204 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Verifies backward-compatibility of mojom type changes.
+
+Given a set of pre- and post-diff mojom file contents, and a root directory
+for a project, this tool verifies that any changes to [Stable] mojom types are
+backward-compatible with the previous version.
+
+This can be used e.g. by a presubmit check to prevent developers from making
+breaking changes to stable mojoms."""
+
+import argparse
+import io
+import json
+import os
+import os.path
+import sys
+
+from mojom.generate import module
+from mojom.generate import translate
+from mojom.parse import parser
+
+# pylint: disable=raise-missing-from
+
+
+class ParseError(Exception):
+ pass
+
+
+def _ValidateDelta(root, delta):
+ """Parses all modified mojoms (including all transitive mojom dependencies,
+ even if unmodified) to perform backward-compatibility checks on any types
+ marked with the [Stable] attribute.
+
+ Note that unlike the normal build-time parser in mojom_parser.py, this does
+ not produce or rely on cached module translations, but instead parses the full
+ transitive closure of a mojom's input dependencies all at once.
+ """
+
+ translate.is_running_backwards_compatibility_check_hack = True
+
+ # First build a map of all files covered by the delta
+ affected_files = set()
+ old_files = {}
+ new_files = {}
+ for change in delta:
+ # TODO(crbug.com/953884): Use pathlib once we're migrated fully to Python 3.
+ filename = change['filename'].replace('\\', '/')
+ affected_files.add(filename)
+ if change['old']:
+ old_files[filename] = change['old']
+ if change['new']:
+ new_files[filename] = change['new']
+
+ # Parse and translate all mojoms relevant to the delta, including transitive
+ # imports that weren't modified.
+ unmodified_modules = {}
+
+ def parseMojom(mojom, file_overrides, override_modules):
+ if mojom in unmodified_modules or mojom in override_modules:
+ return
+
+ contents = file_overrides.get(mojom)
+ if contents:
+ modules = override_modules
+ else:
+ modules = unmodified_modules
+ with io.open(os.path.join(root, mojom), encoding='utf-8') as f:
+ contents = f.read()
+
+ try:
+ ast = parser.Parse(contents, mojom)
+ except Exception as e:
+ raise ParseError('encountered exception {0} while parsing {1}'.format(
+ e, mojom))
+
+ # Files which are generated at compile time can't be checked by this script
+ # (at the moment) since they may not exist in the output directory.
+ generated_files_to_skip = {
+ ('third_party/blink/public/mojom/runtime_feature_state/'
+ 'runtime_feature.mojom'),
+ ('third_party/blink/public/mojom/origin_trial_feature/'
+ 'origin_trial_feature.mojom'),
+ }
+
+ ast.import_list.items = [
+ x for x in ast.import_list.items
+ if x.import_filename not in generated_files_to_skip
+ ]
+
+ for imp in ast.import_list:
+ if (not file_overrides.get(imp.import_filename)
+ and not os.path.exists(os.path.join(root, imp.import_filename))):
+ # Speculatively construct a path prefix to locate the import_filename
+ mojom_path = os.path.dirname(os.path.normpath(mojom)).split(os.sep)
+ test_prefix = ''
+ for path_component in mojom_path:
+ test_prefix = os.path.join(test_prefix, path_component)
+ test_import_filename = os.path.join(test_prefix, imp.import_filename)
+ if os.path.exists(os.path.join(root, test_import_filename)):
+ imp.import_filename = test_import_filename
+ break
+ parseMojom(imp.import_filename, file_overrides, override_modules)
+
+ # Now that the transitive set of dependencies has been imported and parsed
+ # above, translate each mojom AST into a Module so that all types are fully
+ # defined and can be inspected.
+ all_modules = {}
+ all_modules.update(unmodified_modules)
+ all_modules.update(override_modules)
+ modules[mojom] = translate.OrderedModule(ast, mojom, all_modules)
+
+ old_modules = {}
+ for mojom in old_files:
+ parseMojom(mojom, old_files, old_modules)
+ new_modules = {}
+ for mojom in new_files:
+ parseMojom(mojom, new_files, new_modules)
+
+ # At this point we have a complete set of translated Modules from both the
+ # pre- and post-diff mojom contents. Now we can analyze backward-compatibility
+ # of the deltas.
+ #
+ # Note that for backward-compatibility checks we only care about types which
+ # were marked [Stable] before the diff. Types newly marked as [Stable] are not
+ # checked.
+ def collectTypes(modules):
+ types = {}
+ for m in modules.values():
+ for kinds in (m.enums, m.structs, m.unions, m.interfaces):
+ for kind in kinds:
+ types[kind.qualified_name] = kind
+ return types
+
+ old_types = collectTypes(old_modules)
+ new_types = collectTypes(new_modules)
+
+ # Collect any renamed types so they can be compared accordingly.
+ renamed_types = {}
+ for name, kind in new_types.items():
+ old_name = kind.attributes and kind.attributes.get('RenamedFrom')
+ if old_name:
+ renamed_types[old_name] = name
+
+ for qualified_name, kind in old_types.items():
+ if not kind.stable:
+ continue
+
+ new_name = renamed_types.get(qualified_name, qualified_name)
+ if new_name not in new_types:
+ raise Exception(
+ 'Stable type %s appears to be deleted by this change. If it was '
+ 'renamed, please add a [RenamedFrom] attribute to the new type. This '
+ 'can be deleted by a subsequent change.' % qualified_name)
+
+ checker = module.BackwardCompatibilityChecker()
+ try:
+ if not checker.IsBackwardCompatible(new_types[new_name], kind):
+ raise Exception(
+ 'Stable type %s appears to have changed in a way which '
+ 'breaks backward-compatibility. Please fix!\n\nIf you '
+ 'believe this assessment to be incorrect, please file a '
+ 'Chromium bug against the "Internals>Mojo>Bindings" '
+ 'component.' % qualified_name)
+ except Exception as e:
+ raise Exception(
+ 'Stable type %s appears to have changed in a way which '
+ 'breaks backward-compatibility: \n\n%s.\nPlease fix!\n\nIf you '
+ 'believe this assessment to be incorrect, please file a '
+ 'Chromium bug against the "Internals>Mojo>Bindings" '
+ 'component.' % (qualified_name, e))
+
+
+def Run(command_line, delta=None):
+ """Runs the tool with the given command_line. Normally this will read the
+ change description from stdin as a JSON-encoded list, but tests may pass a
+ delta directly for convenience."""
+ arg_parser = argparse.ArgumentParser(
+ description='Verifies backward-compatibility of mojom type changes.',
+ epilog="""
+This tool reads a change description from stdin and verifies that all modified
+[Stable] mojom types will retain backward-compatibility. The change description
+must be a JSON-encoded list of objects, each with a "filename" key (path to a
+changed mojom file, relative to ROOT); an "old" key whose value is a string of
+the full file contents before the change, or null if the file is being added;
+and a "new" key whose value is a string of the full file contents after the
+change, or null if the file is being deleted.""")
+ arg_parser.add_argument(
+ '--src-root',
+ required=True,
+ action='store',
+ metavar='ROOT',
+ help='The root of the source tree in which the checked mojoms live.')
+
+ args, _ = arg_parser.parse_known_args(command_line)
+ if not delta:
+ delta = json.load(sys.stdin)
+ _ValidateDelta(args.src_root, delta)
+
+
+if __name__ == '__main__':
+ Run(sys.argv[1:])
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility_unittest.py
new file mode 100755
index 00000000..06769c95
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility_unittest.py
@@ -0,0 +1,339 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+import os.path
+import shutil
+import tempfile
+import unittest
+
+import check_stable_mojom_compatibility
+
+from mojom.generate import module
+
+
+class Change:
+ """Helper to clearly define a mojom file delta to be analyzed."""
+
+ def __init__(self, filename, old=None, new=None):
+ """If old is None, this is a file addition. If new is None, this is a file
+ deletion. Otherwise it's a file change."""
+ self.filename = filename
+ self.old = old
+ self.new = new
+
+
+class UnchangedFile(Change):
+ def __init__(self, filename, contents):
+ super().__init__(filename, old=contents, new=contents)
+
+
+class CheckStableMojomCompatibilityTest(unittest.TestCase):
+ """Tests covering the behavior of the compatibility checking tool. Note that
+ details of different compatibility checks and relevant failure modes are NOT
+ covered by these tests. Those are instead covered by unittests in
+ version_compatibility_unittest.py. Additionally, the tests which ensure a
+ given set of [Stable] mojom definitions are indeed plausibly stable (i.e. they
+ have no unstable dependencies) are covered by stable_attribute_unittest.py.
+
+ These tests cover higher-level concerns of the compatibility checking tool,
+ like file or symbol, renames, changes spread over multiple files, etc."""
+
+ def verifyBackwardCompatibility(self, changes):
+ """Helper for implementing assertBackwardCompatible and
+ assertNotBackwardCompatible"""
+
+ temp_dir = tempfile.mkdtemp()
+ for change in changes:
+ if change.old:
+ # Populate the old file on disk in our temporary fake source root
+ file_path = os.path.join(temp_dir, change.filename)
+ dir_path = os.path.dirname(file_path)
+ if not os.path.exists(dir_path):
+ os.makedirs(dir_path)
+ with open(file_path, 'w') as f:
+ f.write(change.old)
+
+ delta = []
+ for change in changes:
+ if change.old != change.new:
+ delta.append({
+ 'filename': change.filename,
+ 'old': change.old,
+ 'new': change.new
+ })
+
+ try:
+ check_stable_mojom_compatibility.Run(['--src-root', temp_dir],
+ delta=delta)
+ finally:
+ shutil.rmtree(temp_dir)
+
+ def assertBackwardCompatible(self, changes):
+ self.verifyBackwardCompatibility(changes)
+
+ def assertNotBackwardCompatible(self, changes):
+ try:
+ self.verifyBackwardCompatibility(changes)
+ except Exception:
+ return
+
+ raise Exception('Change unexpectedly passed a backward-compatibility check')
+
+ def testBasicCompatibility(self):
+ """Minimal smoke test to verify acceptance of a simple valid change."""
+ self.assertBackwardCompatible([
+ Change('foo/foo.mojom',
+ old='[Stable] struct S {};',
+ new='[Stable] struct S { [MinVersion=1] int32 x; };')
+ ])
+
+ def testBasicIncompatibility(self):
+ """Minimal smoke test to verify rejection of a simple invalid change."""
+ self.assertNotBackwardCompatible([
+ Change('foo/foo.mojom',
+ old='[Stable] struct S {};',
+ new='[Stable] struct S { int32 x; };')
+ ])
+
+ def testIgnoreIfNotStable(self):
+ """We don't care about types not marked [Stable]"""
+ self.assertBackwardCompatible([
+ Change('foo/foo.mojom',
+ old='struct S {};',
+ new='struct S { int32 x; };')
+ ])
+
+ def testRename(self):
+ """We can do checks for renamed types."""
+ self.assertBackwardCompatible([
+ Change('foo/foo.mojom',
+ old='[Stable] struct S {};',
+ new='[Stable, RenamedFrom="S"] struct T {};')
+ ])
+ self.assertNotBackwardCompatible([
+ Change('foo/foo.mojom',
+ old='[Stable] struct S {};',
+ new='[Stable, RenamedFrom="S"] struct T { int32 x; };')
+ ])
+ self.assertBackwardCompatible([
+ Change('foo/foo.mojom',
+ old='[Stable] struct S {};',
+ new="""\
+ [Stable, RenamedFrom="S"]
+ struct T { [MinVersion=1] int32 x; };
+ """)
+ ])
+
+ def testNewlyStable(self):
+ """We don't care about types newly marked as [Stable]."""
+ self.assertBackwardCompatible([
+ Change('foo/foo.mojom',
+ old='struct S {};',
+ new='[Stable] struct S { int32 x; };')
+ ])
+
+ def testFileRename(self):
+ """Make sure we can still do compatibility checks after a file rename."""
+ self.assertBackwardCompatible([
+ Change('foo/foo.mojom', old='[Stable] struct S {};', new=None),
+ Change('bar/bar.mojom',
+ old=None,
+ new='[Stable] struct S { [MinVersion=1] int32 x; };')
+ ])
+ self.assertNotBackwardCompatible([
+ Change('foo/foo.mojom', old='[Stable] struct S {};', new=None),
+ Change('bar/bar.mojom', old=None, new='[Stable] struct S { int32 x; };')
+ ])
+
+ def testWithImport(self):
+ """Ensure that cross-module dependencies do not break the compatibility
+ checking tool."""
+ self.assertBackwardCompatible([
+ Change('foo/foo.mojom',
+ old="""\
+ module foo;
+ [Stable] struct S {};
+ """,
+ new="""\
+ module foo;
+ [Stable] struct S { [MinVersion=2] int32 x; };
+ """),
+ Change('bar/bar.mojom',
+ old="""\
+ module bar;
+ import "foo/foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """,
+ new="""\
+ module bar;
+ import "foo/foo.mojom";
+ [Stable] struct T { foo.S s; [MinVersion=1] int32 y; };
+ """)
+ ])
+
+ def testWithMovedDefinition(self):
+ """If a definition moves from one file to another, we should still be able
+ to check compatibility accurately."""
+ self.assertBackwardCompatible([
+ Change('foo/foo.mojom',
+ old="""\
+ module foo;
+ [Stable] struct S {};
+ """,
+ new="""\
+ module foo;
+ """),
+ Change('bar/bar.mojom',
+ old="""\
+ module bar;
+ import "foo/foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """,
+ new="""\
+ module bar;
+ import "foo/foo.mojom";
+ [Stable, RenamedFrom="foo.S"] struct S {
+ [MinVersion=2] int32 x;
+ };
+ [Stable] struct T { S s; [MinVersion=1] int32 y; };
+ """)
+ ])
+
+ self.assertNotBackwardCompatible([
+ Change('foo/foo.mojom',
+ old="""\
+ module foo;
+ [Stable] struct S {};
+ """,
+ new="""\
+ module foo;
+ """),
+ Change('bar/bar.mojom',
+ old="""\
+ module bar;
+ import "foo/foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """,
+ new="""\
+ module bar;
+ import "foo/foo.mojom";
+ [Stable, RenamedFrom="foo.S"] struct S { int32 x; };
+ [Stable] struct T { S s; [MinVersion=1] int32 y; };
+ """)
+ ])
+
+ def testWithUnmodifiedImport(self):
+ """Unchanged files in the filesystem are still parsed by the compatibility
+ checking tool if they're imported by a changed file."""
+ self.assertBackwardCompatible([
+ UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
+ Change('bar/bar.mojom',
+ old="""\
+ module bar;
+ import "foo/foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """,
+ new="""\
+ module bar;
+ import "foo/foo.mojom";
+ [Stable] struct T { foo.S s; [MinVersion=1] int32 x; };
+ """)
+ ])
+
+ self.assertNotBackwardCompatible([
+ UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
+ Change('bar/bar.mojom',
+ old="""\
+ module bar;
+ import "foo/foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """,
+ new="""\
+ module bar;
+ import "foo/foo.mojom";
+ [Stable] struct T { foo.S s; int32 x; };
+ """)
+ ])
+
+ def testWithPartialImport(self):
+ """The compatibility checking tool correctly parses imports with partial
+ paths."""
+ self.assertBackwardCompatible([
+ UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
+ Change('foo/bar.mojom',
+ old="""\
+ module bar;
+ import "foo/foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """,
+ new="""\
+ module bar;
+ import "foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """)
+ ])
+
+ self.assertBackwardCompatible([
+ UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
+ Change('foo/bar.mojom',
+ old="""\
+ module bar;
+ import "foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """,
+ new="""\
+ module bar;
+ import "foo/foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """)
+ ])
+
+ self.assertNotBackwardCompatible([
+ UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
+ Change('bar/bar.mojom',
+ old="""\
+ module bar;
+ import "foo/foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """,
+ new="""\
+ module bar;
+ import "foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """)
+ ])
+
+ self.assertNotBackwardCompatible([
+ UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
+ Change('bar/bar.mojom',
+ old="""\
+ module bar;
+ import "foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """,
+ new="""\
+ module bar;
+ import "foo/foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """)
+ ])
+
+ def testNewEnumDefault(self):
+ # Should be backwards compatible since it does not affect the wire format.
+ # This specific case also checks that the backwards compatibility checker
+ # does not throw an error due to the older version of the enum not
+ # specifying [Default].
+ self.assertBackwardCompatible([
+ Change('foo/foo.mojom',
+ old='[Extensible] enum E { One };',
+ new='[Extensible] enum E { [Default] One };')
+ ])
+ self.assertBackwardCompatible([
+ Change('foo/foo.mojom',
+ old='[Extensible] enum E { [Default] One, Two, };',
+ new='[Extensible] enum E { One, [Default] Two, };')
+ ])
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/const_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/const_unittest.py
new file mode 100644
index 00000000..e8ed36a7
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/const_unittest.py
@@ -0,0 +1,90 @@
+# Copyright 2020 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from mojom_parser_test_case import MojomParserTestCase
+from mojom.generate import module as mojom
+
+
+class ConstTest(MojomParserTestCase):
+ """Tests constant parsing behavior."""
+
+ def testLiteralInt(self):
+ a_mojom = 'a.mojom'
+ self.WriteFile(a_mojom, 'const int32 k = 42;')
+ self.ParseMojoms([a_mojom])
+ a = self.LoadModule(a_mojom)
+ self.assertEqual(1, len(a.constants))
+ self.assertEqual('k', a.constants[0].mojom_name)
+ self.assertEqual('42', a.constants[0].value)
+
+ def testLiteralFloat(self):
+ a_mojom = 'a.mojom'
+ self.WriteFile(a_mojom, 'const float k = 42.5;')
+ self.ParseMojoms([a_mojom])
+ a = self.LoadModule(a_mojom)
+ self.assertEqual(1, len(a.constants))
+ self.assertEqual('k', a.constants[0].mojom_name)
+ self.assertEqual('42.5', a.constants[0].value)
+
+ def testLiteralString(self):
+ a_mojom = 'a.mojom'
+ self.WriteFile(a_mojom, 'const string k = "woot";')
+ self.ParseMojoms([a_mojom])
+ a = self.LoadModule(a_mojom)
+ self.assertEqual(1, len(a.constants))
+ self.assertEqual('k', a.constants[0].mojom_name)
+ self.assertEqual('"woot"', a.constants[0].value)
+
+ def testEnumConstant(self):
+ a_mojom = 'a.mojom'
+ self.WriteFile(a_mojom, 'module a; enum E { kA = 41, kB };')
+ b_mojom = 'b.mojom'
+ self.WriteFile(
+ b_mojom, """\
+ import "a.mojom";
+ const a.E kE1 = a.E.kB;
+
+ // We also allow value names to be unqualified, implying scope from the
+ // constant's type.
+ const a.E kE2 = kB;
+ """)
+ self.ParseMojoms([a_mojom, b_mojom])
+ a = self.LoadModule(a_mojom)
+ b = self.LoadModule(b_mojom)
+ self.assertEqual(1, len(a.enums))
+ self.assertEqual('E', a.enums[0].mojom_name)
+ self.assertEqual(2, len(b.constants))
+ self.assertEqual('kE1', b.constants[0].mojom_name)
+ self.assertEqual(a.enums[0], b.constants[0].kind)
+ self.assertEqual(a.enums[0].fields[1], b.constants[0].value.field)
+ self.assertEqual(42, b.constants[0].value.field.numeric_value)
+ self.assertEqual('kE2', b.constants[1].mojom_name)
+ self.assertEqual(a.enums[0].fields[1], b.constants[1].value.field)
+ self.assertEqual(42, b.constants[1].value.field.numeric_value)
+
+ def testConstantReference(self):
+ a_mojom = 'a.mojom'
+ self.WriteFile(a_mojom, 'const int32 kA = 42; const int32 kB = kA;')
+ self.ParseMojoms([a_mojom])
+ a = self.LoadModule(a_mojom)
+ self.assertEqual(2, len(a.constants))
+ self.assertEqual('kA', a.constants[0].mojom_name)
+ self.assertEqual('42', a.constants[0].value)
+ self.assertEqual('kB', a.constants[1].mojom_name)
+ self.assertEqual('42', a.constants[1].value)
+
+ def testImportedConstantReference(self):
+ a_mojom = 'a.mojom'
+ self.WriteFile(a_mojom, 'const int32 kA = 42;')
+ b_mojom = 'b.mojom'
+ self.WriteFile(b_mojom, 'import "a.mojom"; const int32 kB = kA;')
+ self.ParseMojoms([a_mojom, b_mojom])
+ a = self.LoadModule(a_mojom)
+ b = self.LoadModule(b_mojom)
+ self.assertEqual(1, len(a.constants))
+ self.assertEqual(1, len(b.constants))
+ self.assertEqual('kA', a.constants[0].mojom_name)
+ self.assertEqual('42', a.constants[0].value)
+ self.assertEqual('kB', b.constants[0].mojom_name)
+ self.assertEqual('42', b.constants[0].value)
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/enum_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/enum_unittest.py
new file mode 100644
index 00000000..9269cde5
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/enum_unittest.py
@@ -0,0 +1,120 @@
+# Copyright 2020 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from mojom_parser_test_case import MojomParserTestCase
+
+
+class EnumTest(MojomParserTestCase):
+ """Tests enum parsing behavior."""
+
+ def testExplicitValues(self):
+ """Verifies basic parsing of assigned integral values."""
+ types = self.ExtractTypes('enum E { kFoo=0, kBar=2, kBaz };')
+ self.assertEqual('kFoo', types['E'].fields[0].mojom_name)
+ self.assertEqual(0, types['E'].fields[0].numeric_value)
+ self.assertEqual('kBar', types['E'].fields[1].mojom_name)
+ self.assertEqual(2, types['E'].fields[1].numeric_value)
+ self.assertEqual('kBaz', types['E'].fields[2].mojom_name)
+ self.assertEqual(3, types['E'].fields[2].numeric_value)
+
+ def testImplicitValues(self):
+ """Verifies basic automatic assignment of integral values at parse time."""
+ types = self.ExtractTypes('enum E { kFoo, kBar, kBaz };')
+ self.assertEqual('kFoo', types['E'].fields[0].mojom_name)
+ self.assertEqual(0, types['E'].fields[0].numeric_value)
+ self.assertEqual('kBar', types['E'].fields[1].mojom_name)
+ self.assertEqual(1, types['E'].fields[1].numeric_value)
+ self.assertEqual('kBaz', types['E'].fields[2].mojom_name)
+ self.assertEqual(2, types['E'].fields[2].numeric_value)
+
+ def testSameEnumReference(self):
+ """Verifies that an enum value can be assigned from the value of another
+ field within the same enum."""
+ types = self.ExtractTypes('enum E { kA, kB, kFirst=kA };')
+ self.assertEqual('kA', types['E'].fields[0].mojom_name)
+ self.assertEqual(0, types['E'].fields[0].numeric_value)
+ self.assertEqual('kB', types['E'].fields[1].mojom_name)
+ self.assertEqual(1, types['E'].fields[1].numeric_value)
+ self.assertEqual('kFirst', types['E'].fields[2].mojom_name)
+ self.assertEqual(0, types['E'].fields[2].numeric_value)
+
+ def testSameModuleOtherEnumReference(self):
+ """Verifies that an enum value can be assigned from the value of a field
+ in another enum within the same module."""
+ types = self.ExtractTypes('enum E { kA, kB }; enum F { kA = E.kB };')
+ self.assertEqual(1, types['F'].fields[0].numeric_value)
+
+ def testImportedEnumReference(self):
+ """Verifies that an enum value can be assigned from the value of a field
+ in another enum within a different module."""
+ a_mojom = 'a.mojom'
+ self.WriteFile(a_mojom, 'module a; enum E { kFoo=42, kBar };')
+ b_mojom = 'b.mojom'
+ self.WriteFile(b_mojom,
+ 'module b; import "a.mojom"; enum F { kFoo = a.E.kBar };')
+ self.ParseMojoms([a_mojom, b_mojom])
+ b = self.LoadModule(b_mojom)
+
+ self.assertEqual('F', b.enums[0].mojom_name)
+ self.assertEqual('kFoo', b.enums[0].fields[0].mojom_name)
+ self.assertEqual(43, b.enums[0].fields[0].numeric_value)
+
+ def testConstantReference(self):
+ """Verifies that an enum value can be assigned from the value of an
+ integral constant within the same module."""
+ types = self.ExtractTypes('const int32 kFoo = 42; enum E { kA = kFoo };')
+ self.assertEqual(42, types['E'].fields[0].numeric_value)
+
+ def testInvalidConstantReference(self):
+ """Verifies that enum values cannot be assigned from the value of
+ non-integral constants."""
+ with self.assertRaisesRegexp(ValueError, 'not an integer'):
+ self.ExtractTypes('const float kFoo = 1.0; enum E { kA = kFoo };')
+ with self.assertRaisesRegexp(ValueError, 'not an integer'):
+ self.ExtractTypes('const double kFoo = 1.0; enum E { kA = kFoo };')
+ with self.assertRaisesRegexp(ValueError, 'not an integer'):
+ self.ExtractTypes('const string kFoo = "lol"; enum E { kA = kFoo };')
+
+ def testImportedConstantReference(self):
+ """Verifies that an enum value can be assigned from the value of an integral
+ constant within an imported module."""
+ a_mojom = 'a.mojom'
+ self.WriteFile(a_mojom, 'module a; const int32 kFoo = 37;')
+ b_mojom = 'b.mojom'
+ self.WriteFile(b_mojom,
+ 'module b; import "a.mojom"; enum F { kFoo = a.kFoo };')
+ self.ParseMojoms([a_mojom, b_mojom])
+ b = self.LoadModule(b_mojom)
+
+ self.assertEqual('F', b.enums[0].mojom_name)
+ self.assertEqual('kFoo', b.enums[0].fields[0].mojom_name)
+ self.assertEqual(37, b.enums[0].fields[0].numeric_value)
+
+ def testEnumAttributesAreEnums(self):
+ """Verifies that enum values in attributes are really enum types."""
+ a_mojom = 'a.mojom'
+ self.WriteFile(a_mojom, 'module a; enum E { kFoo, kBar };')
+ b_mojom = 'b.mojom'
+ self.WriteFile(
+ b_mojom, 'module b;'
+ 'import "a.mojom";'
+ '[MooCow=a.E.kFoo]'
+ 'interface Foo { Foo(); };')
+ self.ParseMojoms([a_mojom, b_mojom])
+ b = self.LoadModule(b_mojom)
+ self.assertEqual(b.interfaces[0].attributes['MooCow'].mojom_name, 'kFoo')
+
+ def testConstantAttributes(self):
+ """Verifies that constants as attributes are translated to the constant."""
+ a_mojom = 'a.mojom'
+ self.WriteFile(
+ a_mojom, 'module a;'
+ 'enum E { kFoo, kBar };'
+ 'const E kB = E.kFoo;'
+ '[Attr=kB] interface Hello { Foo(); };')
+ self.ParseMojoms([a_mojom])
+ a = self.LoadModule(a_mojom)
+ self.assertEqual(a.interfaces[0].attributes['Attr'].mojom_name, 'kB')
+ self.assertEquals(a.interfaces[0].attributes['Attr'].value.mojom_name,
+ 'kFoo')
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/feature_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/feature_unittest.py
new file mode 100644
index 00000000..5f014e1c
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/feature_unittest.py
@@ -0,0 +1,84 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from mojom_parser_test_case import MojomParserTestCase
+
+
+class FeatureTest(MojomParserTestCase):
+ """Tests feature parsing behavior."""
+ def testFeatureOff(self):
+ """Verifies basic parsing of feature types."""
+ types = self.ExtractTypes("""
+ // e.g. BASE_DECLARE_FEATURE(kFeature);
+ [AttributeOne=ValueOne]
+ feature kFeature {
+ // BASE_FEATURE(kFeature,"MyFeature",
+ // base::FEATURE_DISABLED_BY_DEFAULT);
+ const string name = "MyFeature";
+ const bool default_state = false;
+ };
+ """)
+ self.assertEqual('name', types['kFeature'].constants[0].mojom_name)
+ self.assertEqual('"MyFeature"', types['kFeature'].constants[0].value)
+ self.assertEqual('default_state', types['kFeature'].constants[1].mojom_name)
+ self.assertEqual('false', types['kFeature'].constants[1].value)
+
+ def testFeatureOn(self):
+ """Verifies basic parsing of feature types."""
+ types = self.ExtractTypes("""
+ // e.g. BASE_DECLARE_FEATURE(kFeature);
+ feature kFeature {
+ // BASE_FEATURE(kFeature,"MyFeature",
+ // base::FEATURE_ENABLED_BY_DEFAULT);
+ const string name = "MyFeature";
+ const bool default_state = true;
+ };
+ """)
+ self.assertEqual('name', types['kFeature'].constants[0].mojom_name)
+ self.assertEqual('"MyFeature"', types['kFeature'].constants[0].value)
+ self.assertEqual('default_state', types['kFeature'].constants[1].mojom_name)
+ self.assertEqual('true', types['kFeature'].constants[1].value)
+
+ def testFeatureWeakKeyword(self):
+ """Verifies that `feature` is a weak keyword."""
+ types = self.ExtractTypes("""
+ // e.g. BASE_DECLARE_FEATURE(kFeature);
+ [AttributeOne=ValueOne]
+ feature kFeature {
+ // BASE_FEATURE(kFeature,"MyFeature",
+ // base::FEATURE_DISABLED_BY_DEFAULT);
+ const string name = "MyFeature";
+ const bool default_state = false;
+ };
+ struct MyStruct {
+ bool feature = true;
+ };
+ interface InterfaceName {
+ Method(string feature) => (int32 feature);
+ };
+ """)
+ self.assertEqual('name', types['kFeature'].constants[0].mojom_name)
+ self.assertEqual('"MyFeature"', types['kFeature'].constants[0].value)
+ self.assertEqual('default_state', types['kFeature'].constants[1].mojom_name)
+ self.assertEqual('false', types['kFeature'].constants[1].value)
+
+ def testFeatureAttributesAreFeatures(self):
+ """Verifies that feature values in attributes are really feature types."""
+ a_mojom = 'a.mojom'
+ self.WriteFile(
+ a_mojom, 'module a;'
+ 'feature F { const string name = "f";'
+ 'const bool default_state = false; };')
+ b_mojom = 'b.mojom'
+ self.WriteFile(
+ b_mojom, 'module b;'
+ 'import "a.mojom";'
+ 'feature G'
+ '{const string name = "g"; const bool default_state = false;};'
+ '[Attri=a.F] interface Foo { Foo(); };'
+ '[Boink=G] interface Bar {};')
+ self.ParseMojoms([a_mojom, b_mojom])
+ b = self.LoadModule(b_mojom)
+ self.assertEqual(b.interfaces[0].attributes['Attri'].mojom_name, 'F')
+ self.assertEqual(b.interfaces[1].attributes['Boink'].mojom_name, 'G')
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/BUILD.gn b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/BUILD.gn
new file mode 100644
index 00000000..a0edf0eb
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/BUILD.gn
@@ -0,0 +1,43 @@
+# Copyright 2020 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+group("mojom") {
+ data = [
+ "__init__.py",
+ "error.py",
+ "fileutil.py",
+ "generate/__init__.py",
+ "generate/check.py",
+ "generate/generator.py",
+ "generate/module.py",
+ "generate/pack.py",
+ "generate/template_expander.py",
+ "generate/translate.py",
+ "parse/__init__.py",
+ "parse/ast.py",
+ "parse/conditional_features.py",
+ "parse/lexer.py",
+ "parse/parser.py",
+
+ # Third-party module dependencies
+ "//third_party/jinja2/",
+ "//third_party/ply/",
+ ]
+}
+
+group("tests") {
+ data = [
+ "fileutil_unittest.py",
+ "generate/generator_unittest.py",
+ "generate/module_unittest.py",
+ "generate/pack_unittest.py",
+ "generate/translate_unittest.py",
+ "parse/ast_unittest.py",
+ "parse/conditional_features_unittest.py",
+ "parse/lexer_unittest.py",
+ "parse/parser_unittest.py",
+ ]
+
+ public_deps = [ ":mojom" ]
+}
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/__init__.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/__init__.py
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/error.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/error.py
new file mode 100644
index 00000000..dd53b835
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/error.py
@@ -0,0 +1,28 @@
+# Copyright 2014 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class Error(Exception):
+ """Base class for Mojo IDL bindings parser/generator errors."""
+
+ def __init__(self, filename, message, lineno=None, addenda=None, **kwargs):
+ """|filename| is the (primary) file which caused the error, |message| is the
+ error message, |lineno| is the 1-based line number (or |None| if not
+ applicable/available), and |addenda| is a list of additional lines to append
+ to the final error message."""
+ Exception.__init__(self, **kwargs)
+ self.filename = filename
+ self.message = message
+ self.lineno = lineno
+ self.addenda = addenda
+
+ def __str__(self):
+ if self.lineno:
+ s = "%s:%d: Error: %s" % (self.filename, self.lineno, self.message)
+ else:
+ s = "%s: Error: %s" % (self.filename, self.message)
+ return "\n".join([s] + self.addenda) if self.addenda else s
+
+ def __repr__(self):
+ return str(self)
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/fileutil.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/fileutil.py
new file mode 100644
index 00000000..124f12c1
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/fileutil.py
@@ -0,0 +1,44 @@
+# Copyright 2015 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import errno
+import os.path
+import sys
+
+
+def _GetDirAbove(dirname):
+ """Returns the directory "above" this file containing |dirname| (which must
+ also be "above" this file)."""
+ path = os.path.abspath(__file__)
+ while True:
+ path, tail = os.path.split(path)
+ if not tail:
+ return None
+ if tail == dirname:
+ return path
+
+
+def EnsureDirectoryExists(path, always_try_to_create=False):
+ """A wrapper for os.makedirs that does not error if the directory already
+ exists. A different process could be racing to create this directory."""
+
+ if not os.path.exists(path) or always_try_to_create:
+ try:
+ os.makedirs(path)
+ except OSError as e:
+ # There may have been a race to create this directory.
+ if e.errno != errno.EEXIST:
+ raise
+
+
+def AddLocalRepoThirdPartyDirToModulePath():
+ """Helper function to find the top-level directory of this script's repository
+ assuming the script falls somewhere within a 'mojo' directory, and insert the
+ top-level 'third_party' directory early in the module search path. Used to
+ ensure that third-party dependencies provided within the repository itself
+ (e.g. Chromium sources include snapshots of jinja2 and ply) are preferred over
+ locally installed system library packages."""
+ toplevel_dir = _GetDirAbove('mojo')
+ if toplevel_dir:
+ sys.path.insert(1, os.path.join(toplevel_dir, 'third_party'))
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/fileutil_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/fileutil_unittest.py
new file mode 100644
index 00000000..c93d2289
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/fileutil_unittest.py
@@ -0,0 +1,37 @@
+# Copyright 2015 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os.path
+import shutil
+import tempfile
+import unittest
+
+from mojom import fileutil
+
+class FileUtilTest(unittest.TestCase):
+ def testEnsureDirectoryExists(self):
+ """Test that EnsureDirectoryExists functions correctly."""
+
+ temp_dir = tempfile.mkdtemp()
+ try:
+ self.assertTrue(os.path.exists(temp_dir))
+
+ # Directory does not exist, yet.
+ full = os.path.join(temp_dir, "foo", "bar")
+ self.assertFalse(os.path.exists(full))
+
+ # Create the directory.
+ fileutil.EnsureDirectoryExists(full)
+ self.assertTrue(os.path.exists(full))
+
+ # Trying to create it again does not cause an error.
+ fileutil.EnsureDirectoryExists(full)
+ self.assertTrue(os.path.exists(full))
+
+ # Bypass check for directory existence to tickle error handling that
+ # occurs in response to a race.
+ fileutil.EnsureDirectoryExists(full, always_try_to_create=True)
+ self.assertTrue(os.path.exists(full))
+ finally:
+ shutil.rmtree(temp_dir)
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/__init__.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/__init__.py
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/check.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/check.py
new file mode 100644
index 00000000..1efe2022
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/check.py
@@ -0,0 +1,26 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Code shared by the various pre-generation mojom checkers."""
+
+
+class CheckException(Exception):
+ def __init__(self, module, message):
+ self.module = module
+ self.message = message
+ super().__init__(self.message)
+
+ def __str__(self):
+ return "Failed mojo pre-generation check for {}:\n{}".format(
+ self.module.path, self.message)
+
+
+class Check:
+ def __init__(self, module):
+ self.module = module
+
+ def CheckModule(self):
+ """ Subclass should return True if its Checks pass, and throw an
+ exception otherwise. CheckModule will be called immediately before
+ mojom.generate.Generator.GenerateFiles()"""
+ raise NotImplementedError("Subclasses must override/implement this method")
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/generator.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/generator.py
new file mode 100644
index 00000000..96fe3a2d
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/generator.py
@@ -0,0 +1,328 @@
+# Copyright 2013 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Code shared by the various language-specific code generators."""
+
+from __future__ import print_function
+
+from functools import partial
+import os.path
+import re
+
+from mojom import fileutil
+from mojom.generate import module as mojom
+from mojom.generate import pack
+
+
+def ExpectedArraySize(kind):
+ if mojom.IsArrayKind(kind):
+ return kind.length
+ return None
+
+
+def SplitCamelCase(identifier):
+ """Splits a camel-cased |identifier| and returns a list of lower-cased
+ strings.
+ """
+ # Add underscores after uppercase letters when appropriate. An uppercase
+ # letter is considered the end of a word if it is followed by an upper and a
+ # lower. E.g. URLLoaderFactory -> URL_LoaderFactory
+ identifier = re.sub('([A-Z][0-9]*)(?=[A-Z][0-9]*[a-z])', r'\1_', identifier)
+ # Add underscores after lowercase letters when appropriate. A lowercase letter
+ # is considered the end of a word if it is followed by an upper.
+ # E.g. URLLoaderFactory -> URLLoader_Factory
+ identifier = re.sub('([a-z][0-9]*)(?=[A-Z])', r'\1_', identifier)
+ return [x.lower() for x in identifier.split('_')]
+
+
+def ToCamel(identifier, lower_initial=False, digits_split=False, delimiter='_'):
+ """Splits |identifier| using |delimiter|, makes the first character of each
+ word uppercased (but makes the first character of the first word lowercased
+ if |lower_initial| is set to True), and joins the words. Please note that for
+ each word, all the characters except the first one are untouched.
+ """
+ result = ''
+ capitalize_next = True
+ for i in range(len(identifier)):
+ if identifier[i] == delimiter:
+ capitalize_next = True
+ elif digits_split and identifier[i].isdigit():
+ capitalize_next = True
+ result += identifier[i]
+ elif capitalize_next:
+ capitalize_next = False
+ result += identifier[i].upper()
+ else:
+ result += identifier[i]
+
+ if lower_initial and result:
+ result = result[0].lower() + result[1:]
+
+ return result
+
+
+def _ToSnakeCase(identifier, upper=False):
+ """Splits camel-cased |identifier| into lower case words, removes the first
+ word if it's "k" and joins them using "_" e.g. for "URLLoaderFactory", returns
+ "URL_LOADER_FACTORY" if upper, otherwise "url_loader_factory".
+ """
+ words = SplitCamelCase(identifier)
+ if words[0] == 'k' and len(words) > 1:
+ words = words[1:]
+
+ # Variables cannot start with a digit
+ if (words[0][0].isdigit()):
+ words[0] = '_' + words[0]
+
+
+ if upper:
+ words = map(lambda x: x.upper(), words)
+
+ return '_'.join(words)
+
+
+def ToUpperSnakeCase(identifier):
+ """Splits camel-cased |identifier| into lower case words, removes the first
+ word if it's "k" and joins them using "_" e.g. for "URLLoaderFactory", returns
+ "URL_LOADER_FACTORY".
+ """
+ return _ToSnakeCase(identifier, upper=True)
+
+
+def ToLowerSnakeCase(identifier):
+ """Splits camel-cased |identifier| into lower case words, removes the first
+ word if it's "k" and joins them using "_" e.g. for "URLLoaderFactory", returns
+ "url_loader_factory".
+ """
+ return _ToSnakeCase(identifier, upper=False)
+
+
+class Stylizer:
+ """Stylizers specify naming rules to map mojom names to names in generated
+ code. For example, if you would like method_name in mojom to be mapped to
+ MethodName in the generated code, you need to define a subclass of Stylizer
+ and override StylizeMethod to do the conversion."""
+
+ def StylizeConstant(self, mojom_name):
+ return mojom_name
+
+ def StylizeField(self, mojom_name):
+ return mojom_name
+
+ def StylizeStruct(self, mojom_name):
+ return mojom_name
+
+ def StylizeUnion(self, mojom_name):
+ return mojom_name
+
+ def StylizeParameter(self, mojom_name):
+ return mojom_name
+
+ def StylizeMethod(self, mojom_name):
+ return mojom_name
+
+ def StylizeInterface(self, mojom_name):
+ return mojom_name
+
+ def StylizeEnumField(self, mojom_name):
+ return mojom_name
+
+ def StylizeEnum(self, mojom_name):
+ return mojom_name
+
+ def StylizeFeature(self, mojom_name):
+ return mojom_name
+
+ def StylizeModule(self, mojom_namespace):
+ return mojom_namespace
+
+
+def WriteFile(contents, full_path):
+ # If |contents| is same with the file content, we skip updating.
+ if not isinstance(contents, bytes):
+ data = contents.encode('utf8')
+ else:
+ data = contents
+
+ if os.path.isfile(full_path):
+ with open(full_path, 'rb') as destination_file:
+ if destination_file.read() == data:
+ return
+
+ # Make sure the containing directory exists.
+ full_dir = os.path.dirname(full_path)
+ fileutil.EnsureDirectoryExists(full_dir)
+
+ # Dump the data to disk.
+ with open(full_path, 'wb') as f:
+ f.write(data)
+
+
+def AddComputedData(module):
+ """Adds computed data to the given module. The data is computed once and
+ used repeatedly in the generation process."""
+
+ def _AddStructComputedData(exported, struct):
+ struct.packed = pack.PackedStruct(struct)
+ struct.bytes = pack.GetByteLayout(struct.packed)
+ struct.versions = pack.GetVersionInfo(struct.packed)
+ struct.exported = exported
+
+ def _AddInterfaceComputedData(interface):
+ interface.version = 0
+ for method in interface.methods:
+ # this field is never scrambled
+ method.sequential_ordinal = method.ordinal
+
+ if method.min_version is not None:
+ interface.version = max(interface.version, method.min_version)
+
+ method.param_struct = _GetStructFromMethod(method)
+ if interface.stable:
+ method.param_struct.attributes[mojom.ATTRIBUTE_STABLE] = True
+ if method.explicit_ordinal is None:
+ raise Exception(
+ 'Stable interfaces must declare explicit method ordinals. The '
+ 'method %s on stable interface %s does not declare an explicit '
+ 'ordinal.' % (method.mojom_name, interface.qualified_name))
+ interface.version = max(interface.version,
+ method.param_struct.versions[-1].version)
+
+ if method.response_parameters is not None:
+ method.response_param_struct = _GetResponseStructFromMethod(method)
+ if interface.stable:
+ method.response_param_struct.attributes[mojom.ATTRIBUTE_STABLE] = True
+ interface.version = max(
+ interface.version,
+ method.response_param_struct.versions[-1].version)
+ else:
+ method.response_param_struct = None
+
+ def _GetStructFromMethod(method):
+ """Converts a method's parameters into the fields of a struct."""
+ params_class = "%s_%s_Params" % (method.interface.mojom_name,
+ method.mojom_name)
+ struct = mojom.Struct(params_class,
+ module=method.interface.module,
+ attributes={})
+ for param in method.parameters:
+ struct.AddField(
+ param.mojom_name,
+ param.kind,
+ param.ordinal,
+ attributes=param.attributes)
+ _AddStructComputedData(False, struct)
+ return struct
+
+ def _GetResponseStructFromMethod(method):
+ """Converts a method's response_parameters into the fields of a struct."""
+ params_class = "%s_%s_ResponseParams" % (method.interface.mojom_name,
+ method.mojom_name)
+ struct = mojom.Struct(params_class,
+ module=method.interface.module,
+ attributes={})
+ for param in method.response_parameters:
+ struct.AddField(
+ param.mojom_name,
+ param.kind,
+ param.ordinal,
+ attributes=param.attributes)
+ _AddStructComputedData(False, struct)
+ return struct
+
+ for struct in module.structs:
+ _AddStructComputedData(True, struct)
+ for interface in module.interfaces:
+ _AddInterfaceComputedData(interface)
+
+
+class Generator:
+ # Pass |output_dir| to emit files to disk. Omit |output_dir| to echo all
+ # files to stdout.
+ def __init__(self,
+ module,
+ output_dir=None,
+ typemap=None,
+ variant=None,
+ bytecode_path=None,
+ for_blink=False,
+ js_generate_struct_deserializers=False,
+ export_attribute=None,
+ export_header=None,
+ generate_non_variant_code=False,
+ support_lazy_serialization=False,
+ disallow_native_types=False,
+ disallow_interfaces=False,
+ generate_message_ids=False,
+ generate_fuzzing=False,
+ enable_kythe_annotations=False,
+ extra_cpp_template_paths=None,
+ generate_extra_cpp_only=False):
+ self.module = module
+ self.output_dir = output_dir
+ self.typemap = typemap or {}
+ self.variant = variant
+ self.bytecode_path = bytecode_path
+ self.for_blink = for_blink
+ self.js_generate_struct_deserializers = js_generate_struct_deserializers
+ self.export_attribute = export_attribute
+ self.export_header = export_header
+ self.generate_non_variant_code = generate_non_variant_code
+ self.support_lazy_serialization = support_lazy_serialization
+ self.disallow_native_types = disallow_native_types
+ self.disallow_interfaces = disallow_interfaces
+ self.generate_message_ids = generate_message_ids
+ self.generate_fuzzing = generate_fuzzing
+ self.enable_kythe_annotations = enable_kythe_annotations
+ self.extra_cpp_template_paths = extra_cpp_template_paths
+ self.generate_extra_cpp_only = generate_extra_cpp_only
+
+ def Write(self, contents, filename):
+ if self.output_dir is None:
+ print(contents)
+ return
+ full_path = os.path.join(self.output_dir, filename)
+ WriteFile(contents, full_path)
+
+ def OptimizeEmpty(self, contents):
+ # Look for .cc files that contain no actual code. There are many of these
+ # and they collectively take a while to compile.
+ lines = contents.splitlines()
+
+ for line in lines:
+ if line.startswith('#') or line.startswith('//'):
+ continue
+ if re.match(r'namespace .* {', line) or re.match(r'}.*//.*namespace',
+ line):
+ continue
+ if line.strip():
+ # There is some actual code - return the unmodified contents.
+ return contents
+
+ # If we reach here then we have a .cc file with no actual code. The
+ # includes are therefore unneeded and can be removed.
+ new_lines = [line for line in lines if not line.startswith('#include')]
+ if len(new_lines) < len(lines):
+ new_lines.append('')
+ new_lines.append('// Includes removed due to no code being generated.')
+ return '\n'.join(new_lines)
+
+ def WriteWithComment(self, contents, filename):
+ generator_name = "mojom_bindings_generator.py"
+ comment = r"// %s is auto generated by %s, do not edit" % (filename,
+ generator_name)
+ contents = comment + '\n' + '\n' + contents;
+ if filename.endswith('.cc'):
+ contents = self.OptimizeEmpty(contents)
+ self.Write(contents, filename)
+
+ def GenerateFiles(self, args):
+ raise NotImplementedError("Subclasses must override/implement this method")
+
+ def GetJinjaParameters(self):
+ """Returns default constructor parameters for the jinja environment."""
+ return {}
+
+ def GetGlobals(self):
+ """Returns global mappings for the template generation."""
+ return {}
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/generator_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/generator_unittest.py
new file mode 100644
index 00000000..7143e07c
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/generator_unittest.py
@@ -0,0 +1,71 @@
+# Copyright 2014 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import importlib.util
+import os.path
+import sys
+import unittest
+
+def _GetDirAbove(dirname):
+ """Returns the directory "above" this file containing |dirname| (which must
+ also be "above" this file)."""
+ path = os.path.abspath(__file__)
+ while True:
+ path, tail = os.path.split(path)
+ assert tail
+ if tail == dirname:
+ return path
+
+
+try:
+ importlib.util.find_spec("mojom")
+except ImportError:
+ sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib"))
+from mojom.generate import generator
+
+class StringManipulationTest(unittest.TestCase):
+ """generator contains some string utilities, this tests only those."""
+
+ def testSplitCamelCase(self):
+ self.assertEquals(["camel", "case"], generator.SplitCamelCase("CamelCase"))
+ self.assertEquals(["url", "loader", "factory"],
+ generator.SplitCamelCase('URLLoaderFactory'))
+ self.assertEquals(["get99", "entries"],
+ generator.SplitCamelCase('Get99Entries'))
+ self.assertEquals(["get99entries"],
+ generator.SplitCamelCase('Get99entries'))
+
+ def testToCamel(self):
+ self.assertEquals("CamelCase", generator.ToCamel("camel_case"))
+ self.assertEquals("CAMELCASE", generator.ToCamel("CAMEL_CASE"))
+ self.assertEquals("camelCase",
+ generator.ToCamel("camel_case", lower_initial=True))
+ self.assertEquals("CamelCase", generator.ToCamel(
+ "camel case", delimiter=' '))
+ self.assertEquals("CaMelCaSe", generator.ToCamel("caMel_caSe"))
+ self.assertEquals("L2Tp", generator.ToCamel("l2tp", digits_split=True))
+ self.assertEquals("l2tp", generator.ToCamel("l2tp", lower_initial=True))
+
+ def testToSnakeCase(self):
+ self.assertEquals("snake_case", generator.ToLowerSnakeCase("SnakeCase"))
+ self.assertEquals("snake_case", generator.ToLowerSnakeCase("snakeCase"))
+ self.assertEquals("snake_case", generator.ToLowerSnakeCase("SnakeCASE"))
+ self.assertEquals("snake_d3d11_case",
+ generator.ToLowerSnakeCase("SnakeD3D11Case"))
+ self.assertEquals("snake_d3d11_case",
+ generator.ToLowerSnakeCase("SnakeD3d11Case"))
+ self.assertEquals("snake_d3d11_case",
+ generator.ToLowerSnakeCase("snakeD3d11Case"))
+ self.assertEquals("SNAKE_CASE", generator.ToUpperSnakeCase("SnakeCase"))
+ self.assertEquals("SNAKE_CASE", generator.ToUpperSnakeCase("snakeCase"))
+ self.assertEquals("SNAKE_CASE", generator.ToUpperSnakeCase("SnakeCASE"))
+ self.assertEquals("SNAKE_D3D11_CASE",
+ generator.ToUpperSnakeCase("SnakeD3D11Case"))
+ self.assertEquals("SNAKE_D3D11_CASE",
+ generator.ToUpperSnakeCase("SnakeD3d11Case"))
+ self.assertEquals("SNAKE_D3D11_CASE",
+ generator.ToUpperSnakeCase("snakeD3d11Case"))
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/module.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/module.py
new file mode 100644
index 00000000..ca71059d
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/module.py
@@ -0,0 +1,2059 @@
+# Copyright 2013 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This module's classes provide an interface to mojo modules. Modules are
+# collections of interfaces and structs to be used by mojo ipc clients and
+# servers.
+#
+# A simple interface would be created this way:
+# module = mojom.generate.module.Module('Foo')
+# interface = module.AddInterface('Bar')
+# method = interface.AddMethod('Tat', 0)
+# method.AddParameter('baz', 0, mojom.INT32)
+
+import pickle
+from collections import OrderedDict
+from uuid import UUID
+
+# pylint: disable=raise-missing-from
+
+
+class BackwardCompatibilityChecker:
+ """Used for memoization while recursively checking two type definitions for
+ backward-compatibility."""
+
+ def __init__(self):
+ self._cache = {}
+
+ def IsBackwardCompatible(self, new_kind, old_kind):
+ key = (new_kind, old_kind)
+ result = self._cache.get(key)
+ if result is None:
+ # Assume they're compatible at first to effectively ignore recursive
+ # checks between these types, e.g. if both kinds are a struct or union
+ # that references itself in a field.
+ self._cache[key] = True
+ result = new_kind.IsBackwardCompatible(old_kind, self)
+ self._cache[key] = result
+ return result
+
+
+# We use our own version of __repr__ when displaying the AST, as the
+# AST currently doesn't capture which nodes are reference (e.g. to
+# types) and which nodes are definitions. This allows us to e.g. print
+# the definition of a struct when it's defined inside a module, but
+# only print its name when it's referenced in e.g. a method parameter.
+def Repr(obj, as_ref=True):
+ """A version of __repr__ that can distinguish references.
+
+ Sometimes we like to print an object's full representation
+ (e.g. with its fields) and sometimes we just want to reference an
+ object that was printed in full elsewhere. This function allows us
+ to make that distinction.
+
+ Args:
+ obj: The object whose string representation we compute.
+ as_ref: If True, use the short reference representation.
+
+ Returns:
+ A str representation of |obj|.
+ """
+ if hasattr(obj, 'Repr'):
+ return obj.Repr(as_ref=as_ref)
+ # Since we cannot implement Repr for existing container types, we
+ # handle them here.
+ if isinstance(obj, list):
+ if not obj:
+ return '[]'
+ return ('[\n%s\n]' %
+ (',\n'.join(' %s' % Repr(elem, as_ref).replace('\n', '\n ')
+ for elem in obj)))
+ if isinstance(obj, dict):
+ if not obj:
+ return '{}'
+ return ('{\n%s\n}' % (',\n'.join(' %s: %s' %
+ (Repr(key, as_ref).replace('\n', '\n '),
+ Repr(val, as_ref).replace('\n', '\n '))
+ for key, val in obj.items())))
+ return repr(obj)
+
+
+def GenericRepr(obj, names):
+ """Compute generic Repr for |obj| based on the attributes in |names|.
+
+ Args:
+ obj: The object to compute a Repr for.
+ names: A dict from attribute names to include, to booleans
+ specifying whether those attributes should be shown as
+ references or not.
+
+ Returns:
+ A str representation of |obj|.
+ """
+
+ def ReprIndent(name, as_ref):
+ return ' %s=%s' % (name, Repr(getattr(obj, name), as_ref).replace(
+ '\n', '\n '))
+
+ return '%s(\n%s\n)' % (obj.__class__.__name__, ',\n'.join(
+ ReprIndent(name, as_ref) for (name, as_ref) in names.items()))
+
+
+class Kind:
+ """Kind represents a type (e.g. int8, string).
+
+ Attributes:
+ spec: A string uniquely identifying the type. May be None.
+ module: {Module} The defining module. Set to None for built-in types.
+ parent_kind: The enclosing type. For example, an enum defined
+ inside an interface has that interface as its parent. May be None.
+ is_nullable: True if the type is nullable.
+ """
+
+ def __init__(self, spec=None, is_nullable=False, module=None):
+ self.spec = spec
+ self.module = module
+ self.parent_kind = None
+ self.is_nullable = is_nullable
+ self.shared_definition = {}
+
+ @classmethod
+ def AddSharedProperty(cls, name):
+ """Adds a property |name| to |cls|, which accesses the corresponding item in
+ |shared_definition|.
+
+ The reason of adding such indirection is to enable sharing definition
+ between a reference kind and its nullable variation. For example:
+ a = Struct('test_struct_1')
+ b = a.MakeNullableKind()
+ a.name = 'test_struct_2'
+ print(b.name) # Outputs 'test_struct_2'.
+ """
+ def Get(self):
+ try:
+ return self.shared_definition[name]
+ except KeyError: # Must raise AttributeError if property doesn't exist.
+ raise AttributeError
+
+ def Set(self, value):
+ self.shared_definition[name] = value
+
+ setattr(cls, name, property(Get, Set))
+
+ def Repr(self, as_ref=True):
+ # pylint: disable=unused-argument
+ return '<%s spec=%r is_nullable=%r>' % (self.__class__.__name__, self.spec,
+ self.is_nullable)
+
+ def __repr__(self):
+ # Gives us a decent __repr__ for all kinds.
+ return self.Repr()
+
+ def __eq__(self, rhs):
+ # pylint: disable=unidiomatic-typecheck
+ return (type(self) == type(rhs)
+ and (self.spec, self.parent_kind, self.is_nullable)
+ == (rhs.spec, rhs.parent_kind, rhs.is_nullable))
+
+ def __hash__(self):
+ # TODO(crbug.com/1060471): Remove this and other __hash__ methods on Kind
+ # and its subclasses. This is to support existing generator code which uses
+ # some primitive Kinds as dict keys. The default hash (object identity)
+ # breaks these dicts when a pickled Module instance is unpickled and used
+ # during a subsequent run of the parser.
+ return hash((self.spec, self.parent_kind, self.is_nullable))
+
+ # pylint: disable=unused-argument
+ def IsBackwardCompatible(self, rhs, checker):
+ return self == rhs
+
+
+class ValueKind(Kind):
+ """ValueKind represents values that aren't reference kinds.
+
+ The primary difference is the wire representation for nullable value kinds
+ still reserves space for the value type itself, even if that value itself
+ is logically null.
+ """
+ def __init__(self, spec=None, is_nullable=False, module=None):
+ assert spec is None or is_nullable == spec.startswith('?')
+ Kind.__init__(self, spec, is_nullable, module)
+
+ def MakeNullableKind(self):
+ assert not self.is_nullable
+
+ if self == BOOL:
+ return NULLABLE_BOOL
+ if self == INT8:
+ return NULLABLE_INT8
+ if self == INT16:
+ return NULLABLE_INT16
+ if self == INT32:
+ return NULLABLE_INT32
+ if self == INT64:
+ return NULLABLE_INT64
+ if self == UINT8:
+ return NULLABLE_UINT8
+ if self == UINT16:
+ return NULLABLE_UINT16
+ if self == UINT32:
+ return NULLABLE_UINT32
+ if self == UINT64:
+ return NULLABLE_UINT64
+ if self == FLOAT:
+ return NULLABLE_FLOAT
+ if self == DOUBLE:
+ return NULLABLE_DOUBLE
+
+ nullable_kind = type(self)()
+ nullable_kind.shared_definition = self.shared_definition
+ if self.spec is not None:
+ nullable_kind.spec = '?' + self.spec
+ nullable_kind.is_nullable = True
+ nullable_kind.parent_kind = self.parent_kind
+ nullable_kind.module = self.module
+
+ return nullable_kind
+
+ def MakeUnnullableKind(self):
+ assert self.is_nullable
+
+ if self == NULLABLE_BOOL:
+ return BOOL
+ if self == NULLABLE_INT8:
+ return INT8
+ if self == NULLABLE_INT16:
+ return INT16
+ if self == NULLABLE_INT32:
+ return INT32
+ if self == NULLABLE_INT64:
+ return INT64
+ if self == NULLABLE_UINT8:
+ return UINT8
+ if self == NULLABLE_UINT16:
+ return UINT16
+ if self == NULLABLE_UINT32:
+ return UINT32
+ if self == NULLABLE_UINT64:
+ return UINT64
+ if self == NULLABLE_FLOAT:
+ return FLOAT
+ if self == NULLABLE_DOUBLE:
+ return DOUBLE
+
+ nullable_kind = type(self)()
+ nullable_kind.shared_definition = self.shared_definition
+ if self.spec is not None:
+ nullable_kind.spec = self.spec[1:]
+ nullable_kind.is_nullable = False
+ nullable_kind.parent_kind = self.parent_kind
+ nullable_kind.module = self.module
+
+ return nullable_kind
+
+ def __eq__(self, rhs):
+ return (isinstance(rhs, ValueKind) and super().__eq__(rhs))
+
+ def __hash__(self): # pylint: disable=useless-super-delegation
+ return super().__hash__()
+
+
+class ReferenceKind(Kind):
+ """ReferenceKind represents pointer and handle types.
+
+ A type is nullable if null (for pointer types) or invalid handle (for handle
+ types) is a legal value for the type.
+ """
+
+ def __init__(self, spec=None, is_nullable=False, module=None):
+ assert spec is None or is_nullable == spec.startswith('?')
+ Kind.__init__(self, spec, is_nullable, module)
+
+ def MakeNullableKind(self):
+ assert not self.is_nullable
+
+ if self == STRING:
+ return NULLABLE_STRING
+ if self == HANDLE:
+ return NULLABLE_HANDLE
+ if self == DCPIPE:
+ return NULLABLE_DCPIPE
+ if self == DPPIPE:
+ return NULLABLE_DPPIPE
+ if self == MSGPIPE:
+ return NULLABLE_MSGPIPE
+ if self == SHAREDBUFFER:
+ return NULLABLE_SHAREDBUFFER
+ if self == PLATFORMHANDLE:
+ return NULLABLE_PLATFORMHANDLE
+
+ nullable_kind = type(self)()
+ nullable_kind.shared_definition = self.shared_definition
+ if self.spec is not None:
+ nullable_kind.spec = '?' + self.spec
+ nullable_kind.is_nullable = True
+ nullable_kind.parent_kind = self.parent_kind
+ nullable_kind.module = self.module
+
+ return nullable_kind
+
+ def MakeUnnullableKind(self):
+ assert self.is_nullable
+
+ if self == NULLABLE_STRING:
+ return STRING
+ if self == NULLABLE_HANDLE:
+ return HANDLE
+ if self == NULLABLE_DCPIPE:
+ return DCPIPE
+ if self == NULLABLE_DPPIPE:
+ return DPPIPE
+ if self == NULLABLE_MSGPIPE:
+ return MSGPIPE
+ if self == NULLABLE_SHAREDBUFFER:
+ return SHAREDBUFFER
+ if self == NULLABLE_PLATFORMHANDLE:
+ return PLATFORMHANDLE
+
+ unnullable_kind = type(self)()
+ unnullable_kind.shared_definition = self.shared_definition
+ if self.spec is not None:
+ assert self.spec[0] == '?'
+ unnullable_kind.spec = self.spec[1:]
+ unnullable_kind.is_nullable = False
+ unnullable_kind.parent_kind = self.parent_kind
+ unnullable_kind.module = self.module
+
+ return unnullable_kind
+
+ def __eq__(self, rhs):
+ return (isinstance(rhs, ReferenceKind) and super().__eq__(rhs))
+
+ def __hash__(self): # pylint: disable=useless-super-delegation
+ return super().__hash__()
+
+
+# Initialize the set of primitive types. These can be accessed by clients.
+BOOL = ValueKind('b')
+INT8 = ValueKind('i8')
+INT16 = ValueKind('i16')
+INT32 = ValueKind('i32')
+INT64 = ValueKind('i64')
+UINT8 = ValueKind('u8')
+UINT16 = ValueKind('u16')
+UINT32 = ValueKind('u32')
+UINT64 = ValueKind('u64')
+FLOAT = ValueKind('f')
+DOUBLE = ValueKind('d')
+NULLABLE_BOOL = ValueKind('?b', True)
+NULLABLE_INT8 = ValueKind('?i8', True)
+NULLABLE_INT16 = ValueKind('?i16', True)
+NULLABLE_INT32 = ValueKind('?i32', True)
+NULLABLE_INT64 = ValueKind('?i64', True)
+NULLABLE_UINT8 = ValueKind('?u8', True)
+NULLABLE_UINT16 = ValueKind('?u16', True)
+NULLABLE_UINT32 = ValueKind('?u32', True)
+NULLABLE_UINT64 = ValueKind('?u64', True)
+NULLABLE_FLOAT = ValueKind('?f', True)
+NULLABLE_DOUBLE = ValueKind('?d', True)
+STRING = ReferenceKind('s')
+HANDLE = ReferenceKind('h')
+DCPIPE = ReferenceKind('h:d:c')
+DPPIPE = ReferenceKind('h:d:p')
+MSGPIPE = ReferenceKind('h:m')
+SHAREDBUFFER = ReferenceKind('h:s')
+PLATFORMHANDLE = ReferenceKind('h:p')
+NULLABLE_STRING = ReferenceKind('?s', True)
+NULLABLE_HANDLE = ReferenceKind('?h', True)
+NULLABLE_DCPIPE = ReferenceKind('?h:d:c', True)
+NULLABLE_DPPIPE = ReferenceKind('?h:d:p', True)
+NULLABLE_MSGPIPE = ReferenceKind('?h:m', True)
+NULLABLE_SHAREDBUFFER = ReferenceKind('?h:s', True)
+NULLABLE_PLATFORMHANDLE = ReferenceKind('?h:p', True)
+
+# Collection of all Primitive types
+PRIMITIVES = (
+ BOOL,
+ INT8,
+ INT16,
+ INT32,
+ INT64,
+ UINT8,
+ UINT16,
+ UINT32,
+ UINT64,
+ FLOAT,
+ DOUBLE,
+ NULLABLE_BOOL,
+ NULLABLE_INT8,
+ NULLABLE_INT16,
+ NULLABLE_INT32,
+ NULLABLE_INT64,
+ NULLABLE_UINT8,
+ NULLABLE_UINT16,
+ NULLABLE_UINT32,
+ NULLABLE_UINT64,
+ NULLABLE_FLOAT,
+ NULLABLE_DOUBLE,
+ STRING,
+ HANDLE,
+ DCPIPE,
+ DPPIPE,
+ MSGPIPE,
+ SHAREDBUFFER,
+ PLATFORMHANDLE,
+ NULLABLE_STRING,
+ NULLABLE_HANDLE,
+ NULLABLE_DCPIPE,
+ NULLABLE_DPPIPE,
+ NULLABLE_MSGPIPE,
+ NULLABLE_SHAREDBUFFER,
+ NULLABLE_PLATFORMHANDLE,
+)
+
+ATTRIBUTE_MIN_VERSION = 'MinVersion'
+ATTRIBUTE_DEFAULT = 'Default'
+ATTRIBUTE_EXTENSIBLE = 'Extensible'
+ATTRIBUTE_NO_INTERRUPT = 'NoInterrupt'
+ATTRIBUTE_STABLE = 'Stable'
+ATTRIBUTE_SUPPORTS_URGENT = 'SupportsUrgent'
+ATTRIBUTE_SYNC = 'Sync'
+ATTRIBUTE_UNLIMITED_SIZE = 'UnlimitedSize'
+ATTRIBUTE_UUID = 'Uuid'
+ATTRIBUTE_SERVICE_SANDBOX = 'ServiceSandbox'
+ATTRIBUTE_REQUIRE_CONTEXT = 'RequireContext'
+ATTRIBUTE_ALLOWED_CONTEXT = 'AllowedContext'
+ATTRIBUTE_RUNTIME_FEATURE = 'RuntimeFeature'
+
+
+class NamedValue:
+ def __init__(self, module, parent_kind, mojom_name):
+ self.module = module
+ self.parent_kind = parent_kind
+ self.mojom_name = mojom_name
+
+ def GetSpec(self):
+ return (self.module.GetNamespacePrefix() +
+ (self.parent_kind and
+ (self.parent_kind.mojom_name + '.') or "") + self.mojom_name)
+
+ def __eq__(self, rhs):
+ return (isinstance(rhs, NamedValue)
+ and (self.parent_kind, self.mojom_name) == (rhs.parent_kind,
+ rhs.mojom_name))
+
+ def __hash__(self):
+ return hash((self.parent_kind, self.mojom_name))
+
+
+class BuiltinValue:
+ def __init__(self, value):
+ self.value = value
+
+ def __eq__(self, rhs):
+ return isinstance(rhs, BuiltinValue) and self.value == rhs.value
+
+
+class ConstantValue(NamedValue):
+ def __init__(self, module, parent_kind, constant):
+ NamedValue.__init__(self, module, parent_kind, constant.mojom_name)
+ self.constant = constant
+
+ @property
+ def name(self):
+ return self.constant.name
+
+
+class EnumValue(NamedValue):
+ def __init__(self, module, enum, field):
+ NamedValue.__init__(self, module, enum.parent_kind, field.mojom_name)
+ self.field = field
+ self.enum = enum
+
+ def GetSpec(self):
+ return (self.module.GetNamespacePrefix() +
+ (self.parent_kind and (self.parent_kind.mojom_name + '.') or "") +
+ self.enum.mojom_name + '.' + self.mojom_name)
+
+ @property
+ def name(self):
+ return self.field.name
+
+
+class Constant:
+ def __init__(self, mojom_name=None, kind=None, value=None, parent_kind=None):
+ self.mojom_name = mojom_name
+ self.name = None
+ self.kind = kind
+ self.value = value
+ self.parent_kind = parent_kind
+
+ def Stylize(self, stylizer):
+ self.name = stylizer.StylizeConstant(self.mojom_name)
+
+ def __eq__(self, rhs):
+ return (isinstance(rhs, Constant)
+ and (self.mojom_name, self.kind, self.value,
+ self.parent_kind) == (rhs.mojom_name, rhs.kind, rhs.value,
+ rhs.parent_kind))
+
+
+class Field:
+ def __init__(self,
+ mojom_name=None,
+ kind=None,
+ ordinal=None,
+ default=None,
+ attributes=None):
+ if self.__class__.__name__ == 'Field':
+ raise Exception()
+ self.mojom_name = mojom_name
+ self.name = None
+ self.kind = kind
+ self.ordinal = ordinal
+ self.default = default
+ self.attributes = attributes
+
+ def Repr(self, as_ref=True):
+ # pylint: disable=unused-argument
+ # Fields are only referenced by objects which define them and thus
+ # they are always displayed as non-references.
+ return GenericRepr(self, {'mojom_name': False, 'kind': True})
+
+ def Stylize(self, stylizer):
+ self.name = stylizer.StylizeField(self.mojom_name)
+
+ @property
+ def min_version(self):
+ return self.attributes.get(ATTRIBUTE_MIN_VERSION) \
+ if self.attributes else None
+
+ def __eq__(self, rhs):
+ return (isinstance(rhs, Field)
+ and (self.mojom_name, self.kind, self.ordinal, self.default,
+ self.attributes) == (rhs.mojom_name, rhs.kind, rhs.ordinal,
+ rhs.default, rhs.attributes))
+
+ def __hash__(self):
+ return hash((self.mojom_name, self.kind, self.ordinal, self.default))
+
+
+class StructField(Field):
+ def __hash__(self):
+ return super(Field, self).__hash__()
+
+
+class UnionField(Field):
+ def __init__(self,
+ mojom_name=None,
+ kind=None,
+ ordinal=None,
+ default=None,
+ attributes=None):
+ Field.__init__(self, mojom_name, kind, ordinal, default, attributes)
+
+ @property
+ def is_default(self):
+ return self.attributes.get(ATTRIBUTE_DEFAULT, False) \
+ if self.attributes else False
+
+
+def _IsFieldBackwardCompatible(new_field, old_field, checker):
+ if (new_field.min_version or 0) != (old_field.min_version or 0):
+ return False
+
+ return checker.IsBackwardCompatible(new_field.kind, old_field.kind)
+
+
+class Feature(ReferenceKind):
+ """A runtime enabled feature defined from mojom.
+
+ Attributes:
+ mojom_name: {str} The name of the feature type as defined in mojom.
+ name: {str} The stylized name. (Note: not the "name" used by FeatureList.)
+ constants: {List[Constant]} The constants defined in the feature scope.
+ attributes: {dict} Additional information about the feature.
+ """
+
+ Kind.AddSharedProperty('mojom_name')
+ Kind.AddSharedProperty('name')
+ Kind.AddSharedProperty('constants')
+ Kind.AddSharedProperty('attributes')
+
+ def __init__(self, mojom_name=None, module=None, attributes=None):
+ if mojom_name is not None:
+ spec = 'x:' + mojom_name
+ else:
+ spec = None
+ ReferenceKind.__init__(self, spec, False, module)
+ self.mojom_name = mojom_name
+ self.name = None
+ self.constants = []
+ self.attributes = attributes
+
+ def Stylize(self, stylizer):
+ self.name = stylizer.StylizeFeature(self.mojom_name)
+ for constant in self.constants:
+ constant.Stylize(stylizer)
+
+
+class Struct(ReferenceKind):
+ """A struct with typed fields.
+
+ Attributes:
+ mojom_name: {str} The name of the struct type as defined in mojom.
+ name: {str} The stylized name.
+ native_only: {bool} Does the struct have a body (i.e. any fields) or is it
+ purely a native struct.
+ custom_serializer: {bool} Should we generate a serializer for the struct or
+ will one be provided by non-generated code.
+ fields: {List[StructField]} The members of the struct.
+ enums: {List[Enum]} The enums defined in the struct scope.
+ constants: {List[Constant]} The constants defined in the struct scope.
+ attributes: {dict} Additional information about the struct, such as
+ if it's a native struct.
+ """
+
+ Kind.AddSharedProperty('mojom_name')
+ Kind.AddSharedProperty('name')
+ Kind.AddSharedProperty('native_only')
+ Kind.AddSharedProperty('custom_serializer')
+ Kind.AddSharedProperty('fields')
+ Kind.AddSharedProperty('enums')
+ Kind.AddSharedProperty('constants')
+ Kind.AddSharedProperty('attributes')
+
+ def __init__(self, mojom_name=None, module=None, attributes=None):
+ if mojom_name is not None:
+ spec = 'x:' + mojom_name
+ else:
+ spec = None
+ ReferenceKind.__init__(self, spec, False, module)
+ self.mojom_name = mojom_name
+ self.name = None
+ self.native_only = False
+ self.custom_serializer = False
+ self.fields = []
+ self.enums = []
+ self.constants = []
+ self.attributes = attributes
+
+ def Repr(self, as_ref=True):
+ if as_ref:
+ return '<%s mojom_name=%r module=%s>' % (self.__class__.__name__,
+ self.mojom_name,
+ Repr(self.module, as_ref=True))
+ return GenericRepr(self, {
+ 'mojom_name': False,
+ 'fields': False,
+ 'module': True
+ })
+
+ def AddField(self,
+ mojom_name,
+ kind,
+ ordinal=None,
+ default=None,
+ attributes=None):
+ field = StructField(mojom_name, kind, ordinal, default, attributes)
+ self.fields.append(field)
+ return field
+
+ def Stylize(self, stylizer):
+ self.name = stylizer.StylizeStruct(self.mojom_name)
+ for field in self.fields:
+ field.Stylize(stylizer)
+ for enum in self.enums:
+ enum.Stylize(stylizer)
+ for constant in self.constants:
+ constant.Stylize(stylizer)
+
+ def IsBackwardCompatible(self, rhs, checker):
+ """This struct is backward-compatible with rhs (older_struct) if and only if
+ all of the following conditions hold:
+ - Any newly added field is tagged with a [MinVersion] attribute specifying
+ a version number greater than all previously used [MinVersion]
+ attributes within the struct.
+ - All fields present in rhs remain present in the new struct,
+ with the same ordinal position, same optional or non-optional status,
+ same (or backward-compatible) type and where applicable, the same
+ [MinVersion] attribute value.
+ - All [MinVersion] attributes must be non-decreasing in ordinal order.
+ - All reference-typed (string, array, map, struct, or union) fields tagged
+ with a [MinVersion] greater than zero must be optional.
+ """
+
+ def buildOrdinalFieldMap(struct):
+ fields_by_ordinal = {}
+ for field in struct.fields:
+ if field.ordinal in fields_by_ordinal:
+ raise Exception('Multiple fields with ordinal %s in struct %s.' %
+ (field.ordinal, struct.mojom_name))
+ fields_by_ordinal[field.ordinal] = field
+ return fields_by_ordinal
+
+ new_fields = buildOrdinalFieldMap(self)
+ old_fields = buildOrdinalFieldMap(rhs)
+ if len(new_fields) < len(old_fields):
+ # At least one field was removed, which is not OK.
+ return False
+
+ # If there are N fields, existing ordinal values must exactly cover the
+ # range from 0 to N-1.
+ num_old_ordinals = len(old_fields)
+ max_old_min_version = 0
+ for ordinal in range(num_old_ordinals):
+ new_field = new_fields[ordinal]
+ old_field = old_fields[ordinal]
+ if (old_field.min_version or 0) > max_old_min_version:
+ max_old_min_version = old_field.min_version
+ if not _IsFieldBackwardCompatible(new_field, old_field, checker):
+ # Type or min-version mismatch between old and new versions of the same
+ # ordinal field.
+ return False
+
+ # At this point we know all old fields are intact in the new struct
+ # definition. Now verify that all new fields have a high enough min version
+ # and are appropriately optional where required.
+ num_new_ordinals = len(new_fields)
+ last_min_version = max_old_min_version
+ for ordinal in range(num_old_ordinals, num_new_ordinals):
+ new_field = new_fields[ordinal]
+ min_version = new_field.min_version or 0
+ if min_version <= max_old_min_version:
+ # A new field is being added to an existing version, which is not OK.
+ return False
+ if min_version < last_min_version:
+ # The [MinVersion] of a field cannot be lower than the [MinVersion] of
+ # a field with lower ordinal value.
+ return False
+ if IsReferenceKind(new_field.kind) and not IsNullableKind(new_field.kind):
+ # New fields whose type can be nullable MUST be nullable.
+ return False
+
+ return True
+
+ @property
+ def stable(self):
+ return self.attributes.get(ATTRIBUTE_STABLE, False) \
+ if self.attributes else False
+
+ @property
+ def qualified_name(self):
+ if self.parent_kind:
+ prefix = self.parent_kind.qualified_name + '.'
+ else:
+ prefix = self.module.GetNamespacePrefix()
+ return '%s%s' % (prefix, self.mojom_name)
+
+ def _tuple(self):
+ return (self.mojom_name, self.native_only, self.fields, self.constants,
+ self.attributes)
+
+ def __eq__(self, rhs):
+ return isinstance(rhs, Struct) and self._tuple() == rhs._tuple()
+
+ def __lt__(self, rhs):
+ if not isinstance(self, type(rhs)):
+ return str(type(self)) < str(type(rhs))
+
+ return self._tuple() < rhs._tuple()
+
+ def __hash__(self):
+ return id(self)
+
+
+class Union(ReferenceKind):
+ """A union of several kinds.
+
+ Attributes:
+ mojom_name: {str} The name of the union type as defined in mojom.
+ name: {str} The stylized name.
+ fields: {List[UnionField]} The members of the union.
+ attributes: {dict} Additional information about the union, such as
+ which Java class name to use to represent it in the generated
+ bindings.
+ """
+ Kind.AddSharedProperty('mojom_name')
+ Kind.AddSharedProperty('name')
+ Kind.AddSharedProperty('fields')
+ Kind.AddSharedProperty('attributes')
+ Kind.AddSharedProperty('default_field')
+
+ def __init__(self, mojom_name=None, module=None, attributes=None):
+ if mojom_name is not None:
+ spec = 'x:' + mojom_name
+ else:
+ spec = None
+ ReferenceKind.__init__(self, spec, False, module)
+ self.mojom_name = mojom_name
+ self.name = None
+ self.fields = []
+ self.attributes = attributes
+ self.default_field = None
+
+ def Repr(self, as_ref=True):
+ if as_ref:
+ return '<%s spec=%r is_nullable=%r fields=%s>' % (
+ self.__class__.__name__, self.spec, self.is_nullable, Repr(
+ self.fields))
+ return GenericRepr(self, {'fields': True, 'is_nullable': False})
+
+ def AddField(self, mojom_name, kind, ordinal=None, attributes=None):
+ field = UnionField(mojom_name, kind, ordinal, None, attributes)
+ self.fields.append(field)
+ return field
+
+ def Stylize(self, stylizer):
+ self.name = stylizer.StylizeUnion(self.mojom_name)
+ for field in self.fields:
+ field.Stylize(stylizer)
+
+ def IsBackwardCompatible(self, rhs, checker):
+ """This union is backward-compatible with rhs (older_union) if and only if
+ all of the following conditions hold:
+ - Any newly added field is tagged with a [MinVersion] attribute specifying
+ a version number greater than all previously used [MinVersion]
+ attributes within the union.
+ - All fields present in rhs remain present in the new union,
+ with the same ordinal value, same optional or non-optional status,
+ same (or backward-compatible) type, and where applicable, the same
+ [MinVersion] attribute value.
+ """
+
+ def buildOrdinalFieldMap(union):
+ fields_by_ordinal = {}
+ for field in union.fields:
+ if field.ordinal in fields_by_ordinal:
+ raise Exception('Multiple fields with ordinal %s in union %s.' %
+ (field.ordinal, union.mojom_name))
+ fields_by_ordinal[field.ordinal] = field
+ return fields_by_ordinal
+
+ new_fields = buildOrdinalFieldMap(self)
+ old_fields = buildOrdinalFieldMap(rhs)
+ if len(new_fields) < len(old_fields):
+ # At least one field was removed, which is not OK.
+ return False
+
+ max_old_min_version = 0
+ for ordinal, old_field in old_fields.items():
+ new_field = new_fields.get(ordinal)
+ if not new_field:
+ # A field was removed, which is not OK.
+ return False
+ if not _IsFieldBackwardCompatible(new_field, old_field, checker):
+ # An field changed its type or MinVersion, which is not OK.
+ return False
+ old_min_version = old_field.min_version or 0
+ if old_min_version > max_old_min_version:
+ max_old_min_version = old_min_version
+
+ new_ordinals = set(new_fields.keys()) - set(old_fields.keys())
+ for ordinal in new_ordinals:
+ if (new_fields[ordinal].min_version or 0) <= max_old_min_version:
+ # New fields must use a MinVersion greater than any old fields.
+ return False
+
+ return True
+
+ @property
+ def extensible(self):
+ return self.attributes.get(ATTRIBUTE_EXTENSIBLE, False) \
+ if self.attributes else False
+
+ @property
+ def stable(self):
+ return self.attributes.get(ATTRIBUTE_STABLE, False) \
+ if self.attributes else False
+
+ @property
+ def qualified_name(self):
+ if self.parent_kind:
+ prefix = self.parent_kind.qualified_name + '.'
+ else:
+ prefix = self.module.GetNamespacePrefix()
+ return '%s%s' % (prefix, self.mojom_name)
+
+ def _tuple(self):
+ return (self.mojom_name, self.fields, self.attributes)
+
+ def __eq__(self, rhs):
+ return isinstance(rhs, Union) and self._tuple() == rhs._tuple()
+
+ def __lt__(self, rhs):
+ if not isinstance(self, type(rhs)):
+ return str(type(self)) < str(type(rhs))
+
+ return self._tuple() < rhs._tuple()
+
+ def __hash__(self):
+ return id(self)
+
+
+class Array(ReferenceKind):
+ """An array.
+
+ Attributes:
+ kind: {Kind} The type of the elements. May be None.
+ length: The number of elements. None if unknown.
+ """
+
+ Kind.AddSharedProperty('kind')
+ Kind.AddSharedProperty('length')
+
+ def __init__(self, kind=None, length=None):
+ if kind is not None:
+ if length is not None:
+ spec = 'a%d:%s' % (length, kind.spec)
+ else:
+ spec = 'a:%s' % kind.spec
+
+ ReferenceKind.__init__(self, spec)
+ else:
+ ReferenceKind.__init__(self)
+ self.kind = kind
+ self.length = length
+
+ def Repr(self, as_ref=True):
+ if as_ref:
+ return '<%s spec=%r is_nullable=%r kind=%s length=%r>' % (
+ self.__class__.__name__, self.spec, self.is_nullable, Repr(
+ self.kind), self.length)
+ return GenericRepr(self, {
+ 'kind': True,
+ 'length': False,
+ 'is_nullable': False
+ })
+
+ def __eq__(self, rhs):
+ return (isinstance(rhs, Array)
+ and (self.kind, self.length) == (rhs.kind, rhs.length))
+
+ def __hash__(self):
+ return id(self)
+
+ def IsBackwardCompatible(self, rhs, checker):
+ return (isinstance(rhs, Array) and self.length == rhs.length
+ and checker.IsBackwardCompatible(self.kind, rhs.kind))
+
+
+class Map(ReferenceKind):
+ """A map.
+
+ Attributes:
+ key_kind: {Kind} The type of the keys. May be None.
+ value_kind: {Kind} The type of the elements. May be None.
+ """
+ Kind.AddSharedProperty('key_kind')
+ Kind.AddSharedProperty('value_kind')
+
+ def __init__(self, key_kind=None, value_kind=None):
+ if (key_kind is not None and value_kind is not None):
+ ReferenceKind.__init__(
+ self, 'm[' + key_kind.spec + '][' + value_kind.spec + ']')
+ if IsNullableKind(key_kind):
+ raise Exception("Nullable kinds cannot be keys in maps.")
+ if IsAnyHandleKind(key_kind):
+ raise Exception("Handles cannot be keys in maps.")
+ if IsAnyInterfaceKind(key_kind):
+ raise Exception("Interfaces cannot be keys in maps.")
+ if IsArrayKind(key_kind):
+ raise Exception("Arrays cannot be keys in maps.")
+ else:
+ ReferenceKind.__init__(self)
+
+ self.key_kind = key_kind
+ self.value_kind = value_kind
+
+ def Repr(self, as_ref=True):
+ if as_ref:
+ return '<%s spec=%r is_nullable=%r key_kind=%s value_kind=%s>' % (
+ self.__class__.__name__, self.spec, self.is_nullable,
+ Repr(self.key_kind), Repr(self.value_kind))
+ return GenericRepr(self, {'key_kind': True, 'value_kind': True})
+
+ def __eq__(self, rhs):
+ return (isinstance(rhs, Map) and
+ (self.key_kind, self.value_kind) == (rhs.key_kind, rhs.value_kind))
+
+ def __hash__(self):
+ return id(self)
+
+ def IsBackwardCompatible(self, rhs, checker):
+ return (isinstance(rhs, Map)
+ and checker.IsBackwardCompatible(self.key_kind, rhs.key_kind)
+ and checker.IsBackwardCompatible(self.value_kind, rhs.value_kind))
+
+
+class PendingRemote(ReferenceKind):
+ Kind.AddSharedProperty('kind')
+
+ def __init__(self, kind=None):
+ if kind is not None:
+ if not isinstance(kind, Interface):
+ raise Exception(
+ 'pending_remote<T> requires T to be an interface type. Got %r' %
+ kind.spec)
+ ReferenceKind.__init__(self, 'rmt:' + kind.spec)
+ else:
+ ReferenceKind.__init__(self)
+ self.kind = kind
+
+ def __eq__(self, rhs):
+ return isinstance(rhs, PendingRemote) and self.kind == rhs.kind
+
+ def __hash__(self):
+ return id(self)
+
+ def IsBackwardCompatible(self, rhs, checker):
+ return (isinstance(rhs, PendingRemote)
+ and checker.IsBackwardCompatible(self.kind, rhs.kind))
+
+
+class PendingReceiver(ReferenceKind):
+ Kind.AddSharedProperty('kind')
+
+ def __init__(self, kind=None):
+ if kind is not None:
+ if not isinstance(kind, Interface):
+ raise Exception(
+ 'pending_receiver<T> requires T to be an interface type. Got %r' %
+ kind.spec)
+ ReferenceKind.__init__(self, 'rcv:' + kind.spec)
+ else:
+ ReferenceKind.__init__(self)
+ self.kind = kind
+
+ def __eq__(self, rhs):
+ return isinstance(rhs, PendingReceiver) and self.kind == rhs.kind
+
+ def __hash__(self):
+ return id(self)
+
+ def IsBackwardCompatible(self, rhs, checker):
+ return isinstance(rhs, PendingReceiver) and checker.IsBackwardCompatible(
+ self.kind, rhs.kind)
+
+
+class PendingAssociatedRemote(ReferenceKind):
+ Kind.AddSharedProperty('kind')
+
+ def __init__(self, kind=None):
+ if kind is not None:
+ if not isinstance(kind, Interface):
+ raise Exception(
+ 'pending_associated_remote<T> requires T to be an interface ' +
+ 'type. Got %r' % kind.spec)
+ ReferenceKind.__init__(self, 'rma:' + kind.spec)
+ else:
+ ReferenceKind.__init__(self)
+ self.kind = kind
+
+ def __eq__(self, rhs):
+ return isinstance(rhs, PendingAssociatedRemote) and self.kind == rhs.kind
+
+ def __hash__(self):
+ return id(self)
+
+ def IsBackwardCompatible(self, rhs, checker):
+ return isinstance(rhs,
+ PendingAssociatedRemote) and checker.IsBackwardCompatible(
+ self.kind, rhs.kind)
+
+
+class PendingAssociatedReceiver(ReferenceKind):
+ Kind.AddSharedProperty('kind')
+
+ def __init__(self, kind=None):
+ if kind is not None:
+ if not isinstance(kind, Interface):
+ raise Exception(
+ 'pending_associated_receiver<T> requires T to be an interface' +
+ 'type. Got %r' % kind.spec)
+ ReferenceKind.__init__(self, 'rca:' + kind.spec)
+ else:
+ ReferenceKind.__init__(self)
+ self.kind = kind
+
+ def __eq__(self, rhs):
+ return isinstance(rhs, PendingAssociatedReceiver) and self.kind == rhs.kind
+
+ def __hash__(self):
+ return id(self)
+
+ def IsBackwardCompatible(self, rhs, checker):
+ return isinstance(
+ rhs, PendingAssociatedReceiver) and checker.IsBackwardCompatible(
+ self.kind, rhs.kind)
+
+
+class InterfaceRequest(ReferenceKind):
+ Kind.AddSharedProperty('kind')
+
+ def __init__(self, kind=None):
+ if kind is not None:
+ if not isinstance(kind, Interface):
+ raise Exception(
+ "Interface request requires %r to be an interface." % kind.spec)
+ ReferenceKind.__init__(self, 'r:' + kind.spec)
+ else:
+ ReferenceKind.__init__(self)
+ self.kind = kind
+
+ def __eq__(self, rhs):
+ return isinstance(rhs, InterfaceRequest) and self.kind == rhs.kind
+
+ def __hash__(self):
+ return id(self)
+
+ def IsBackwardCompatible(self, rhs, checker):
+ return isinstance(rhs, InterfaceRequest) and checker.IsBackwardCompatible(
+ self.kind, rhs.kind)
+
+
+class AssociatedInterfaceRequest(ReferenceKind):
+ Kind.AddSharedProperty('kind')
+
+ def __init__(self, kind=None):
+ if kind is not None:
+ if not isinstance(kind, InterfaceRequest):
+ raise Exception(
+ "Associated interface request requires %r to be an interface "
+ "request." % kind.spec)
+ assert not kind.is_nullable
+ ReferenceKind.__init__(self, 'asso:' + kind.spec)
+ else:
+ ReferenceKind.__init__(self)
+ self.kind = kind.kind if kind is not None else None
+
+ def __eq__(self, rhs):
+ return isinstance(rhs, AssociatedInterfaceRequest) and self.kind == rhs.kind
+
+ def __hash__(self):
+ return id(self)
+
+ def IsBackwardCompatible(self, rhs, checker):
+ return isinstance(
+ rhs, AssociatedInterfaceRequest) and checker.IsBackwardCompatible(
+ self.kind, rhs.kind)
+
+
+class Parameter:
+ def __init__(self,
+ mojom_name=None,
+ kind=None,
+ ordinal=None,
+ default=None,
+ attributes=None):
+ self.mojom_name = mojom_name
+ self.name = None
+ self.ordinal = ordinal
+ self.kind = kind
+ self.default = default
+ self.attributes = attributes
+
+ def Repr(self, as_ref=True):
+ # pylint: disable=unused-argument
+ return '<%s mojom_name=%r kind=%s>' % (
+ self.__class__.__name__, self.mojom_name, self.kind.Repr(as_ref=True))
+
+ def Stylize(self, stylizer):
+ self.name = stylizer.StylizeParameter(self.mojom_name)
+
+ @property
+ def min_version(self):
+ return self.attributes.get(ATTRIBUTE_MIN_VERSION) \
+ if self.attributes else None
+
+ def __eq__(self, rhs):
+ return (isinstance(rhs, Parameter)
+ and (self.mojom_name, self.ordinal, self.kind, self.default,
+ self.attributes) == (rhs.mojom_name, rhs.ordinal, rhs.kind,
+ rhs.default, rhs.attributes))
+
+
+class Method:
+ def __init__(self, interface, mojom_name, ordinal=None, attributes=None):
+ self.interface = interface
+ self.mojom_name = mojom_name
+ self.name = None
+ self.explicit_ordinal = ordinal
+ self.ordinal = ordinal
+ self.parameters = []
+ self.param_struct = None
+ self.response_parameters = None
+ self.response_param_struct = None
+ self.attributes = attributes
+
+ def Repr(self, as_ref=True):
+ if as_ref:
+ return '<%s mojom_name=%r>' % (self.__class__.__name__, self.mojom_name)
+ return GenericRepr(self, {
+ 'mojom_name': False,
+ 'parameters': True,
+ 'response_parameters': True
+ })
+
+ def AddParameter(self,
+ mojom_name,
+ kind,
+ ordinal=None,
+ default=None,
+ attributes=None):
+ parameter = Parameter(mojom_name, kind, ordinal, default, attributes)
+ self.parameters.append(parameter)
+ return parameter
+
+ def AddResponseParameter(self,
+ mojom_name,
+ kind,
+ ordinal=None,
+ default=None,
+ attributes=None):
+ if self.response_parameters == None:
+ self.response_parameters = []
+ parameter = Parameter(mojom_name, kind, ordinal, default, attributes)
+ self.response_parameters.append(parameter)
+ return parameter
+
+ def Stylize(self, stylizer):
+ self.name = stylizer.StylizeMethod(self.mojom_name)
+ for param in self.parameters:
+ param.Stylize(stylizer)
+ if self.response_parameters is not None:
+ for param in self.response_parameters:
+ param.Stylize(stylizer)
+
+ if self.param_struct:
+ self.param_struct.Stylize(stylizer)
+ if self.response_param_struct:
+ self.response_param_struct.Stylize(stylizer)
+
+ @property
+ def min_version(self):
+ return self.attributes.get(ATTRIBUTE_MIN_VERSION) \
+ if self.attributes else None
+
+ @property
+ def sync(self):
+ return self.attributes.get(ATTRIBUTE_SYNC) \
+ if self.attributes else None
+
+ @property
+ def allow_interrupt(self):
+ return not self.attributes.get(ATTRIBUTE_NO_INTERRUPT) \
+ if self.attributes else True
+
+ @property
+ def unlimited_message_size(self):
+ return self.attributes.get(ATTRIBUTE_UNLIMITED_SIZE) \
+ if self.attributes else False
+
+ @property
+ def allowed_context(self):
+ return self.attributes.get(ATTRIBUTE_ALLOWED_CONTEXT) \
+ if self.attributes else None
+
+ @property
+ def supports_urgent(self):
+ return self.attributes.get(ATTRIBUTE_SUPPORTS_URGENT) \
+ if self.attributes else None
+
+ @property
+ def runtime_feature(self):
+ if not self.attributes:
+ return None
+ runtime_feature = self.attributes.get(ATTRIBUTE_RUNTIME_FEATURE, None)
+ if runtime_feature is None:
+ return None
+ if not isinstance(runtime_feature, Feature):
+ raise Exception("RuntimeFeature attribute on %s must be a feature." %
+ self.name)
+ return runtime_feature
+
+ def _tuple(self):
+ return (self.mojom_name, self.ordinal, self.parameters,
+ self.response_parameters, self.attributes)
+
+ def __eq__(self, rhs):
+ return isinstance(rhs, Method) and self._tuple() == rhs._tuple()
+
+ def __lt__(self, rhs):
+ if not isinstance(self, type(rhs)):
+ return str(type(self)) < str(type(rhs))
+
+ return self._tuple() < rhs._tuple()
+
+
+class Interface(ReferenceKind):
+ Kind.AddSharedProperty('mojom_name')
+ Kind.AddSharedProperty('name')
+ Kind.AddSharedProperty('methods')
+ Kind.AddSharedProperty('enums')
+ Kind.AddSharedProperty('constants')
+ Kind.AddSharedProperty('attributes')
+
+ def __init__(self, mojom_name=None, module=None, attributes=None):
+ if mojom_name is not None:
+ spec = 'x:' + mojom_name
+ else:
+ spec = None
+ ReferenceKind.__init__(self, spec, False, module)
+ self.mojom_name = mojom_name
+ self.name = None
+ self.methods = []
+ self.enums = []
+ self.constants = []
+ self.attributes = attributes
+
+ def Repr(self, as_ref=True):
+ if as_ref:
+ return '<%s mojom_name=%r>' % (self.__class__.__name__, self.mojom_name)
+ return GenericRepr(self, {
+ 'mojom_name': False,
+ 'attributes': False,
+ 'methods': False
+ })
+
+ def AddMethod(self, mojom_name, ordinal=None, attributes=None):
+ method = Method(self, mojom_name, ordinal, attributes)
+ self.methods.append(method)
+ return method
+
+ def Stylize(self, stylizer):
+ self.name = stylizer.StylizeInterface(self.mojom_name)
+ for method in self.methods:
+ method.Stylize(stylizer)
+ for enum in self.enums:
+ enum.Stylize(stylizer)
+ for constant in self.constants:
+ constant.Stylize(stylizer)
+
+ def IsBackwardCompatible(self, rhs, checker):
+ """This interface is backward-compatible with rhs (older_interface) if and
+ only if all of the following conditions hold:
+ - All defined methods in rhs (when identified by ordinal) have
+ backward-compatible definitions in this interface. For each method this
+ means:
+ - The parameter list is backward-compatible, according to backward-
+ compatibility rules for structs, where each parameter is essentially
+ a struct field.
+ - If the old method definition does not specify a reply message, the
+ new method definition must not specify a reply message.
+ - If the old method definition specifies a reply message, the new
+ method definition must also specify a reply message with a parameter
+ list that is backward-compatible according to backward-compatibility
+ rules for structs.
+ - All newly introduced methods in this interface have a [MinVersion]
+ attribute specifying a version greater than any method in
+ rhs.
+ """
+
+ def buildOrdinalMethodMap(interface):
+ methods_by_ordinal = {}
+ for method in interface.methods:
+ if method.ordinal in methods_by_ordinal:
+ raise Exception('Multiple methods with ordinal %s in interface %s.' %
+ (method.ordinal, interface.mojom_name))
+ methods_by_ordinal[method.ordinal] = method
+ return methods_by_ordinal
+
+ new_methods = buildOrdinalMethodMap(self)
+ old_methods = buildOrdinalMethodMap(rhs)
+ max_old_min_version = 0
+ for ordinal, old_method in old_methods.items():
+ new_method = new_methods.get(ordinal)
+ if not new_method:
+ # A method was removed, which is not OK.
+ return False
+
+ if not checker.IsBackwardCompatible(new_method.param_struct,
+ old_method.param_struct):
+ # The parameter list is not backward-compatible, which is not OK.
+ return False
+
+ if old_method.response_param_struct is None:
+ if new_method.response_param_struct is not None:
+ # A reply was added to a message which didn't have one before, and
+ # this is not OK.
+ return False
+ else:
+ if new_method.response_param_struct is None:
+ # A reply was removed from a message, which is not OK.
+ return False
+ if not checker.IsBackwardCompatible(new_method.response_param_struct,
+ old_method.response_param_struct):
+ # The new message's reply is not backward-compatible with the old
+ # message's reply, which is not OK.
+ return False
+
+ if (old_method.min_version or 0) > max_old_min_version:
+ max_old_min_version = old_method.min_version
+
+ # All the old methods are compatible with their new counterparts. Now verify
+ # that newly added methods are properly versioned.
+ new_ordinals = set(new_methods.keys()) - set(old_methods.keys())
+ for ordinal in new_ordinals:
+ new_method = new_methods[ordinal]
+ if (new_method.min_version or 0) <= max_old_min_version:
+ # A method was added to an existing version, which is not OK.
+ return False
+
+ return True
+
+ @property
+ def service_sandbox(self):
+ if not self.attributes:
+ return None
+ service_sandbox = self.attributes.get(ATTRIBUTE_SERVICE_SANDBOX, None)
+ if service_sandbox is None:
+ return None
+ # Constants are only allowed to refer to an enum here, so replace.
+ if isinstance(service_sandbox, Constant):
+ service_sandbox = service_sandbox.value
+ if not isinstance(service_sandbox, EnumValue):
+ raise Exception("ServiceSandbox attribute on %s must be an enum value." %
+ self.module.name)
+ return service_sandbox
+
+ @property
+ def runtime_feature(self):
+ if not self.attributes:
+ return None
+ runtime_feature = self.attributes.get(ATTRIBUTE_RUNTIME_FEATURE, None)
+ if runtime_feature is None:
+ return None
+ if not isinstance(runtime_feature, Feature):
+ raise Exception("RuntimeFeature attribute on %s must be a feature." %
+ self.name)
+ return runtime_feature
+
+ @property
+ def require_context(self):
+ if not self.attributes:
+ return None
+ return self.attributes.get(ATTRIBUTE_REQUIRE_CONTEXT, None)
+
+ @property
+ def stable(self):
+ return self.attributes.get(ATTRIBUTE_STABLE, False) \
+ if self.attributes else False
+
+ @property
+ def qualified_name(self):
+ if self.parent_kind:
+ prefix = self.parent_kind.qualified_name + '.'
+ else:
+ prefix = self.module.GetNamespacePrefix()
+ return '%s%s' % (prefix, self.mojom_name)
+
+ def _tuple(self):
+ return (self.mojom_name, self.methods, self.enums, self.constants,
+ self.attributes)
+
+ def __eq__(self, rhs):
+ return isinstance(rhs, Interface) and self._tuple() == rhs._tuple()
+
+ def __lt__(self, rhs):
+ if not isinstance(self, type(rhs)):
+ return str(type(self)) < str(type(rhs))
+
+ return self._tuple() < rhs._tuple()
+
+ @property
+ def uuid(self):
+ uuid_str = self.attributes.get(ATTRIBUTE_UUID) if self.attributes else None
+ if uuid_str is None:
+ return None
+
+ try:
+ u = UUID(uuid_str)
+ except:
+ raise ValueError('Invalid format for Uuid attribute on interface {}. '
+ 'Expected standard RFC 4122 string representation of '
+ 'a UUID.'.format(self.mojom_name))
+ return (int(u.hex[:16], 16), int(u.hex[16:], 16))
+
+ def __hash__(self):
+ return id(self)
+
+
+class AssociatedInterface(ReferenceKind):
+ Kind.AddSharedProperty('kind')
+
+ def __init__(self, kind=None):
+ if kind is not None:
+ if not isinstance(kind, Interface):
+ raise Exception(
+ "Associated interface requires %r to be an interface." % kind.spec)
+ assert not kind.is_nullable
+ ReferenceKind.__init__(self, 'asso:' + kind.spec)
+ else:
+ ReferenceKind.__init__(self)
+ self.kind = kind
+
+ def __eq__(self, rhs):
+ return isinstance(rhs, AssociatedInterface) and self.kind == rhs.kind
+
+ def __hash__(self):
+ return id(self)
+
+ def IsBackwardCompatible(self, rhs, checker):
+ return isinstance(rhs,
+ AssociatedInterface) and checker.IsBackwardCompatible(
+ self.kind, rhs.kind)
+
+
+class EnumField:
+ def __init__(self,
+ mojom_name=None,
+ value=None,
+ attributes=None,
+ numeric_value=None):
+ self.mojom_name = mojom_name
+ self.name = None
+ self.value = value
+ self.attributes = attributes
+ self.numeric_value = numeric_value
+
+ def Stylize(self, stylizer):
+ self.name = stylizer.StylizeEnumField(self.mojom_name)
+
+ @property
+ def default(self):
+ return self.attributes.get(ATTRIBUTE_DEFAULT, False) \
+ if self.attributes else False
+
+ @property
+ def min_version(self):
+ return self.attributes.get(ATTRIBUTE_MIN_VERSION) \
+ if self.attributes else None
+
+ def __eq__(self, rhs):
+ return (isinstance(rhs, EnumField)
+ and (self.mojom_name, self.value, self.attributes,
+ self.numeric_value) == (rhs.mojom_name, rhs.value,
+ rhs.attributes, rhs.numeric_value))
+
+
+class Enum(ValueKind):
+ Kind.AddSharedProperty('mojom_name')
+ Kind.AddSharedProperty('name')
+ Kind.AddSharedProperty('native_only')
+ Kind.AddSharedProperty('fields')
+ Kind.AddSharedProperty('attributes')
+ Kind.AddSharedProperty('min_value')
+ Kind.AddSharedProperty('max_value')
+ Kind.AddSharedProperty('default_field')
+
+ def __init__(self, mojom_name=None, module=None, attributes=None):
+ if mojom_name is not None:
+ spec = 'x:' + mojom_name
+ else:
+ spec = None
+ ValueKind.__init__(self, spec, False, module)
+ self.mojom_name = mojom_name
+ self.name = None
+ self.native_only = False
+ self.fields = []
+ self.attributes = attributes
+ self.min_value = None
+ self.max_value = None
+ self.default_field = None
+
+ def Repr(self, as_ref=True):
+ if as_ref:
+ return '<%s mojom_name=%r>' % (self.__class__.__name__, self.mojom_name)
+ return GenericRepr(self, {'mojom_name': False, 'fields': False})
+
+ def Stylize(self, stylizer):
+ self.name = stylizer.StylizeEnum(self.mojom_name)
+ for field in self.fields:
+ field.Stylize(stylizer)
+
+ @property
+ def extensible(self):
+ return self.attributes.get(ATTRIBUTE_EXTENSIBLE, False) \
+ if self.attributes else False
+
+ @property
+ def stable(self):
+ return self.attributes.get(ATTRIBUTE_STABLE, False) \
+ if self.attributes else False
+
+ @property
+ def qualified_name(self):
+ if self.parent_kind:
+ prefix = self.parent_kind.qualified_name + '.'
+ else:
+ prefix = self.module.GetNamespacePrefix()
+ return '%s%s' % (prefix, self.mojom_name)
+
+ # pylint: disable=unused-argument
+ def IsBackwardCompatible(self, rhs, checker):
+ """This enum is backward-compatible with rhs (older_enum) if and only if one
+ of the following conditions holds:
+ - Neither enum is [Extensible] and both have the exact same set of valid
+ numeric values. Field names and aliases for the same numeric value do
+ not affect compatibility.
+ - rhs is [Extensible], and for every version defined by
+ rhs, this enum has the exact same set of valid numeric values.
+ """
+
+ def buildVersionFieldMap(enum):
+ fields_by_min_version = {}
+ for field in enum.fields:
+ if field.min_version not in fields_by_min_version:
+ fields_by_min_version[field.min_version] = set()
+ fields_by_min_version[field.min_version].add(field.numeric_value)
+ return fields_by_min_version
+
+ old_fields = buildVersionFieldMap(rhs)
+ new_fields = buildVersionFieldMap(self)
+
+ if new_fields.keys() != old_fields.keys() and not rhs.extensible:
+ raise Exception("Non-extensible enum cannot be modified")
+
+ for min_version, valid_values in old_fields.items():
+ if min_version not in new_fields:
+ raise Exception('New values added to an extensible enum '
+ 'do not specify MinVersion: %s' % new_fields)
+
+ if (new_fields[min_version] != valid_values):
+ if (len(new_fields[min_version]) < len(valid_values)):
+ raise Exception('Removing values for an existing MinVersion %s '
+ 'is not allowed' % min_version)
+
+ raise Exception(
+ 'New values don\'t match old values'
+ 'for an existing MinVersion %s,'
+ ' please specify MinVersion equal to "Next version" '
+ 'in the enum description'
+ ' for the following values:\n%s' %
+ (min_version, new_fields[min_version].difference(valid_values)))
+ return True
+
+ def _tuple(self):
+ return (self.mojom_name, self.native_only, self.fields, self.attributes,
+ self.min_value, self.max_value, self.default_field)
+
+ def __eq__(self, rhs):
+ return isinstance(rhs, Enum) and self._tuple() == rhs._tuple()
+
+ def __lt__(self, rhs):
+ if not isinstance(self, type(rhs)):
+ return str(type(self)) < str(type(rhs))
+
+ return self._tuple() < rhs._tuple()
+
+ def __hash__(self):
+ return id(self)
+
+
+class Module:
+ def __init__(self, path=None, mojom_namespace=None, attributes=None):
+ self.path = path
+ self.mojom_namespace = mojom_namespace
+ self.namespace = None
+ self.structs = []
+ self.unions = []
+ self.interfaces = []
+ self.enums = []
+ self.features = []
+ self.constants = []
+ self.kinds = OrderedDict()
+ self.attributes = attributes
+ self.imports = []
+ self.imported_kinds = OrderedDict()
+ self.metadata = OrderedDict()
+
+ def __repr__(self):
+ # Gives us a decent __repr__ for modules.
+ return self.Repr()
+
+ def __eq__(self, rhs):
+ return (isinstance(rhs, Module)
+ and (self.path, self.attributes, self.mojom_namespace, self.imports,
+ self.constants, self.enums, self.structs, self.unions,
+ self.interfaces, self.features)
+ == (rhs.path, rhs.attributes, rhs.mojom_namespace, rhs.imports,
+ rhs.constants, rhs.enums, rhs.structs, rhs.unions,
+ rhs.interfaces, rhs.features))
+
+ def __hash__(self):
+ return id(self)
+
+ def Repr(self, as_ref=True):
+ if as_ref:
+ return '<%s path=%r mojom_namespace=%r>' % (
+ self.__class__.__name__, self.path, self.mojom_namespace)
+ return GenericRepr(
+ self, {
+ 'path': False,
+ 'mojom_namespace': False,
+ 'attributes': False,
+ 'structs': False,
+ 'interfaces': False,
+ 'unions': False,
+ 'features': False,
+ })
+
+ def GetNamespacePrefix(self):
+ return '%s.' % self.mojom_namespace if self.mojom_namespace else ''
+
+ def AddInterface(self, mojom_name, attributes=None):
+ interface = Interface(mojom_name, self, attributes)
+ self.interfaces.append(interface)
+ return interface
+
+ def AddStruct(self, mojom_name, attributes=None):
+ struct = Struct(mojom_name, self, attributes)
+ self.structs.append(struct)
+ return struct
+
+ def AddUnion(self, mojom_name, attributes=None):
+ union = Union(mojom_name, self, attributes)
+ self.unions.append(union)
+ return union
+
+ def AddFeature(self, mojom_name, attributes=None):
+ feature = Feature(mojom_name, self, attributes)
+ self.features.append(feature)
+ return feature
+
+ def Stylize(self, stylizer):
+ self.namespace = stylizer.StylizeModule(self.mojom_namespace)
+ for struct in self.structs:
+ struct.Stylize(stylizer)
+ for union in self.unions:
+ union.Stylize(stylizer)
+ for interface in self.interfaces:
+ interface.Stylize(stylizer)
+ for enum in self.enums:
+ enum.Stylize(stylizer)
+ for constant in self.constants:
+ constant.Stylize(stylizer)
+ for feature in self.features:
+ feature.Stylize(stylizer)
+
+ for imported_module in self.imports:
+ imported_module.Stylize(stylizer)
+
+ def Dump(self, f):
+ pickle.dump(self, f)
+
+ @classmethod
+ def Load(cls, f):
+ result = pickle.load(f)
+ assert isinstance(result, Module)
+ return result
+
+
+def IsBoolKind(kind):
+ return kind.spec == BOOL.spec or kind.spec == NULLABLE_BOOL.spec
+
+
+def IsFloatKind(kind):
+ return kind.spec == FLOAT.spec or kind.spec == NULLABLE_FLOAT.spec
+
+
+def IsDoubleKind(kind):
+ return kind.spec == DOUBLE.spec or kind.spec == NULLABLE_DOUBLE.spec
+
+
+def IsIntegralKind(kind):
+ return (kind.spec == BOOL.spec or kind.spec == INT8.spec
+ or kind.spec == INT16.spec or kind.spec == INT32.spec
+ or kind.spec == INT64.spec or kind.spec == UINT8.spec
+ or kind.spec == UINT16.spec or kind.spec == UINT32.spec
+ or kind.spec == UINT64.spec or kind.spec == NULLABLE_BOOL.spec
+ or kind.spec == NULLABLE_INT8.spec or kind.spec == NULLABLE_INT16.spec
+ or kind.spec == NULLABLE_INT32.spec
+ or kind.spec == NULLABLE_INT64.spec
+ or kind.spec == NULLABLE_UINT8.spec
+ or kind.spec == NULLABLE_UINT16.spec
+ or kind.spec == NULLABLE_UINT32.spec
+ or kind.spec == NULLABLE_UINT64.spec)
+
+
+def IsStringKind(kind):
+ return kind.spec == STRING.spec or kind.spec == NULLABLE_STRING.spec
+
+
+def IsGenericHandleKind(kind):
+ return kind.spec == HANDLE.spec or kind.spec == NULLABLE_HANDLE.spec
+
+
+def IsDataPipeConsumerKind(kind):
+ return kind.spec == DCPIPE.spec or kind.spec == NULLABLE_DCPIPE.spec
+
+
+def IsDataPipeProducerKind(kind):
+ return kind.spec == DPPIPE.spec or kind.spec == NULLABLE_DPPIPE.spec
+
+
+def IsMessagePipeKind(kind):
+ return kind.spec == MSGPIPE.spec or kind.spec == NULLABLE_MSGPIPE.spec
+
+
+def IsSharedBufferKind(kind):
+ return (kind.spec == SHAREDBUFFER.spec
+ or kind.spec == NULLABLE_SHAREDBUFFER.spec)
+
+
+def IsPlatformHandleKind(kind):
+ return (kind.spec == PLATFORMHANDLE.spec
+ or kind.spec == NULLABLE_PLATFORMHANDLE.spec)
+
+
+def IsStructKind(kind):
+ return isinstance(kind, Struct)
+
+
+def IsUnionKind(kind):
+ return isinstance(kind, Union)
+
+
+def IsArrayKind(kind):
+ return isinstance(kind, Array)
+
+
+def IsFeatureKind(kind):
+ return isinstance(kind, Feature)
+
+
+def IsInterfaceKind(kind):
+ return isinstance(kind, Interface)
+
+
+def IsAssociatedInterfaceKind(kind):
+ return isinstance(kind, AssociatedInterface)
+
+
+def IsInterfaceRequestKind(kind):
+ return isinstance(kind, InterfaceRequest)
+
+
+def IsAssociatedInterfaceRequestKind(kind):
+ return isinstance(kind, AssociatedInterfaceRequest)
+
+
+def IsPendingRemoteKind(kind):
+ return isinstance(kind, PendingRemote)
+
+
+def IsPendingReceiverKind(kind):
+ return isinstance(kind, PendingReceiver)
+
+
+def IsPendingAssociatedRemoteKind(kind):
+ return isinstance(kind, PendingAssociatedRemote)
+
+
+def IsPendingAssociatedReceiverKind(kind):
+ return isinstance(kind, PendingAssociatedReceiver)
+
+
+def IsEnumKind(kind):
+ return isinstance(kind, Enum)
+
+
+def IsValueKind(kind):
+ return isinstance(kind, ValueKind)
+
+
+def IsReferenceKind(kind):
+ return isinstance(kind, ReferenceKind)
+
+
+def IsNullableKind(kind):
+ return kind.is_nullable
+
+
+def IsMapKind(kind):
+ return isinstance(kind, Map)
+
+
+def IsObjectKind(kind):
+ return IsPointerKind(kind) or IsUnionKind(kind)
+
+
+def IsPointerKind(kind):
+ return (IsStructKind(kind) or IsArrayKind(kind) or IsStringKind(kind)
+ or IsMapKind(kind))
+
+
+# Please note that it doesn't include any interface kind.
+def IsAnyHandleKind(kind):
+ return (IsGenericHandleKind(kind) or IsDataPipeConsumerKind(kind)
+ or IsDataPipeProducerKind(kind) or IsMessagePipeKind(kind)
+ or IsSharedBufferKind(kind) or IsPlatformHandleKind(kind))
+
+
+def IsAnyInterfaceKind(kind):
+ return (IsInterfaceKind(kind) or IsInterfaceRequestKind(kind)
+ or IsAssociatedKind(kind) or IsPendingRemoteKind(kind)
+ or IsPendingReceiverKind(kind))
+
+
+def IsAnyHandleOrInterfaceKind(kind):
+ return IsAnyHandleKind(kind) or IsAnyInterfaceKind(kind)
+
+
+def IsAssociatedKind(kind):
+ return (IsAssociatedInterfaceKind(kind)
+ or IsAssociatedInterfaceRequestKind(kind)
+ or IsPendingAssociatedRemoteKind(kind)
+ or IsPendingAssociatedReceiverKind(kind))
+
+
+def HasCallbacks(interface):
+ for method in interface.methods:
+ if method.response_parameters != None:
+ return True
+ return False
+
+
+# Finds out whether an interface passes associated interfaces and associated
+# interface requests.
+def PassesAssociatedKinds(interface):
+ visited_kinds = set()
+ for method in interface.methods:
+ if MethodPassesAssociatedKinds(method, visited_kinds):
+ return True
+ return False
+
+
+def _AnyMethodParameterRecursive(method, predicate, visited_kinds=None):
+ def _HasProperty(kind):
+ if kind in visited_kinds:
+ # No need to examine the kind again.
+ return False
+ visited_kinds.add(kind)
+ if predicate(kind):
+ return True
+ if IsArrayKind(kind):
+ return _HasProperty(kind.kind)
+ if IsStructKind(kind) or IsUnionKind(kind):
+ for field in kind.fields:
+ if _HasProperty(field.kind):
+ return True
+ if IsMapKind(kind):
+ if _HasProperty(kind.key_kind) or _HasProperty(kind.value_kind):
+ return True
+ return False
+
+ if visited_kinds is None:
+ visited_kinds = set()
+
+ for param in method.parameters:
+ if _HasProperty(param.kind):
+ return True
+ if method.response_parameters != None:
+ for param in method.response_parameters:
+ if _HasProperty(param.kind):
+ return True
+ return False
+
+
+# Finds out whether a method passes associated interfaces and associated
+# interface requests.
+def MethodPassesAssociatedKinds(method, visited_kinds=None):
+ return _AnyMethodParameterRecursive(
+ method, IsAssociatedKind, visited_kinds=visited_kinds)
+
+
+# Determines whether a method passes interfaces.
+def MethodPassesInterfaces(method):
+ return _AnyMethodParameterRecursive(method, IsInterfaceKind)
+
+
+def GetSyncMethodOrdinals(interface):
+ return [method.ordinal for method in interface.methods if method.sync]
+
+
+def HasUninterruptableMethods(interface):
+ for method in interface.methods:
+ if not method.allow_interrupt:
+ return True
+ return False
+
+
+def ContainsHandlesOrInterfaces(kind):
+ """Check if the kind contains any handles.
+
+ This check is recursive so it checks all struct fields, containers elements,
+ etc.
+
+ Args:
+ struct: {Kind} The kind to check.
+
+ Returns:
+ {bool}: True if the kind contains handles.
+ """
+ # We remember the types we already checked to avoid infinite recursion when
+ # checking recursive (or mutually recursive) types:
+ checked = set()
+
+ def Check(kind):
+ if kind.spec in checked:
+ return False
+ checked.add(kind.spec)
+ if IsStructKind(kind):
+ return any(Check(field.kind) for field in kind.fields)
+ if IsUnionKind(kind):
+ return any(Check(field.kind) for field in kind.fields)
+ if IsAnyHandleKind(kind):
+ return True
+ if IsAnyInterfaceKind(kind):
+ return True
+ if IsArrayKind(kind):
+ return Check(kind.kind)
+ if IsMapKind(kind):
+ return Check(kind.key_kind) or Check(kind.value_kind)
+ return False
+
+ return Check(kind)
+
+
+def ContainsNativeTypes(kind):
+ """Check if the kind contains any native type (struct or enum).
+
+ This check is recursive so it checks all struct fields, scoped interface
+ enums, etc.
+
+ Args:
+ struct: {Kind} The kind to check.
+
+ Returns:
+ {bool}: True if the kind contains native types.
+ """
+ # We remember the types we already checked to avoid infinite recursion when
+ # checking recursive (or mutually recursive) types:
+ checked = set()
+
+ def Check(kind):
+ if kind.spec in checked:
+ return False
+ checked.add(kind.spec)
+ if IsEnumKind(kind):
+ return kind.native_only
+ if IsStructKind(kind):
+ if kind.native_only:
+ return True
+ if any(enum.native_only for enum in kind.enums):
+ return True
+ return any(Check(field.kind) for field in kind.fields)
+ if IsUnionKind(kind):
+ return any(Check(field.kind) for field in kind.fields)
+ if IsInterfaceKind(kind):
+ return any(enum.native_only for enum in kind.enums)
+ if IsArrayKind(kind):
+ return Check(kind.kind)
+ if IsMapKind(kind):
+ return Check(kind.key_kind) or Check(kind.value_kind)
+ return False
+
+ return Check(kind)
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/module_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/module_unittest.py
new file mode 100644
index 00000000..2a4e852c
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/module_unittest.py
@@ -0,0 +1,31 @@
+# Copyright 2014 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+import unittest
+
+from mojom.generate import module as mojom
+
+
+class ModuleTest(unittest.TestCase):
+ def testNonInterfaceAsInterfaceRequest(self):
+ """Tests that a non-interface cannot be used for interface requests."""
+ module = mojom.Module('test_module', 'test_namespace')
+ struct = mojom.Struct('TestStruct', module=module)
+ with self.assertRaises(Exception) as e:
+ mojom.InterfaceRequest(struct)
+ self.assertEquals(
+ e.exception.__str__(),
+ 'Interface request requires \'x:TestStruct\' to be an interface.')
+
+ def testNonInterfaceAsAssociatedInterface(self):
+ """Tests that a non-interface type cannot be used for associated interfaces.
+ """
+ module = mojom.Module('test_module', 'test_namespace')
+ struct = mojom.Struct('TestStruct', module=module)
+ with self.assertRaises(Exception) as e:
+ mojom.AssociatedInterface(struct)
+ self.assertEquals(
+ e.exception.__str__(),
+ 'Associated interface requires \'x:TestStruct\' to be an interface.')
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/pack.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/pack.py
new file mode 100644
index 00000000..61240426
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/pack.py
@@ -0,0 +1,367 @@
+# Copyright 2013 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import copy
+from mojom.generate import module as mojom
+
+# This module provides a mechanism for determining the packed order and offsets
+# of a mojom.Struct.
+#
+# ps = pack.PackedStruct(struct)
+# ps.packed_fields will access a list of PackedField objects, each of which
+# will have an offset, a size and a bit (for mojom.BOOLs).
+
+# Size of struct header in bytes: num_bytes [4B] + version [4B].
+HEADER_SIZE = 8
+
+
+class PackedField:
+ kind_to_size = {
+ mojom.BOOL: 1,
+ mojom.INT8: 1,
+ mojom.UINT8: 1,
+ mojom.INT16: 2,
+ mojom.UINT16: 2,
+ mojom.INT32: 4,
+ mojom.UINT32: 4,
+ mojom.FLOAT: 4,
+ mojom.HANDLE: 4,
+ mojom.MSGPIPE: 4,
+ mojom.SHAREDBUFFER: 4,
+ mojom.PLATFORMHANDLE: 4,
+ mojom.DCPIPE: 4,
+ mojom.DPPIPE: 4,
+ mojom.NULLABLE_HANDLE: 4,
+ mojom.NULLABLE_MSGPIPE: 4,
+ mojom.NULLABLE_SHAREDBUFFER: 4,
+ mojom.NULLABLE_PLATFORMHANDLE: 4,
+ mojom.NULLABLE_DCPIPE: 4,
+ mojom.NULLABLE_DPPIPE: 4,
+ mojom.INT64: 8,
+ mojom.UINT64: 8,
+ mojom.DOUBLE: 8,
+ mojom.STRING: 8,
+ mojom.NULLABLE_STRING: 8
+ }
+
+ @classmethod
+ def GetSizeForKind(cls, kind):
+ if isinstance(kind, (mojom.Array, mojom.Map, mojom.Struct, mojom.Interface,
+ mojom.AssociatedInterface, mojom.PendingRemote,
+ mojom.PendingAssociatedRemote)):
+ return 8
+ if isinstance(kind, mojom.Union):
+ return 16
+ if isinstance(kind, (mojom.InterfaceRequest, mojom.PendingReceiver)):
+ kind = mojom.MSGPIPE
+ if isinstance(
+ kind,
+ (mojom.AssociatedInterfaceRequest, mojom.PendingAssociatedReceiver)):
+ return 4
+ if isinstance(kind, mojom.Enum):
+ # TODO(mpcomplete): what about big enums?
+ return cls.kind_to_size[mojom.INT32]
+ if not kind in cls.kind_to_size:
+ raise Exception("Undefined type: %s. Did you forget to import the file "
+ "containing the definition?" % kind.spec)
+ return cls.kind_to_size[kind]
+
+ @classmethod
+ def GetAlignmentForKind(cls, kind):
+ if isinstance(kind, (mojom.Interface, mojom.AssociatedInterface,
+ mojom.PendingRemote, mojom.PendingAssociatedRemote)):
+ return 4
+ if isinstance(kind, mojom.Union):
+ return 8
+ return cls.GetSizeForKind(kind)
+
+ def __init__(self,
+ field,
+ index,
+ ordinal,
+ original_field=None,
+ sub_ordinal=None,
+ linked_value_packed_field=None):
+ """
+ Args:
+ field: the original field.
+ index: the position of the original field in the struct.
+ ordinal: the ordinal of the field for serialization.
+ original_field: See below.
+ sub_ordinal: See below.
+ linked_value_packed_field: See below.
+
+ original_field, sub_ordinal, and linked_value_packed_field are used to
+ support nullable ValueKind fields. For legacy reasons, nullable ValueKind
+ fields actually generate two PackedFields. This allows:
+
+ - backwards compatibility prior to Mojo support for nullable ValueKinds.
+ - correct packing of fields for the aforementioned backwards compatibility.
+
+ When translating Fields to PackedFields, the original field is turned into
+ two PackedFields: the first PackedField always has type mojom.BOOL, while
+ the second PackedField has the non-nullable version of the field's kind.
+
+ When constructing these PackedFields, original_field references the field
+ as defined in the mojom; the name as defined in the mojom will be used for
+ all layers above the wire/data layer.
+
+ sub_ordinal is used to sort the two PackedFields correctly with respect to
+ each other: the first mojom.BOOL field always has sub_ordinal 0, while the
+ second field always has sub_ordinal 1.
+
+ Finally, linked_value_packed_field is used by the serialization and
+ deserialization helpers, which generally just iterate over a PackedStruct's
+ PackedField's in ordinal order. This allows the helpers to easily reference
+ any related PackedFields rather than having to lookup related PackedFields
+ by index while iterating.
+ """
+ self.field = field
+ self.index = index
+ self.ordinal = ordinal
+ self.original_field = original_field
+ self.sub_ordinal = sub_ordinal
+ self.linked_value_packed_field = linked_value_packed_field
+ self.size = self.GetSizeForKind(self.field.kind)
+ self.alignment = self.GetAlignmentForKind(self.field.kind)
+ self.offset = None
+ self.bit = None
+ self.min_version = None
+
+
+def GetPad(offset, alignment):
+ """Returns the pad necessary to reserve space so that |offset + pad| equals to
+ some multiple of |alignment|."""
+ return (alignment - (offset % alignment)) % alignment
+
+
+def GetFieldOffset(field, last_field):
+ """Returns a 2-tuple of the field offset and bit (for BOOLs)."""
+ if (field.field.kind == mojom.BOOL and last_field.field.kind == mojom.BOOL
+ and last_field.bit < 7):
+ return (last_field.offset, last_field.bit + 1)
+
+ offset = last_field.offset + last_field.size
+ pad = GetPad(offset, field.alignment)
+ return (offset + pad, 0)
+
+
+def GetPayloadSizeUpToField(field):
+ """Returns the payload size (not including struct header) if |field| is the
+ last field.
+ """
+ if not field:
+ return 0
+ offset = field.offset + field.size
+ pad = GetPad(offset, 8)
+ return offset + pad
+
+
+def IsNullableValueKindPackedField(field):
+ """Returns true if `field` is derived from a nullable ValueKind field.
+
+ Nullable ValueKind fields often require special handling in the bindings due
+ to the way the implementation is constrained for wire compatibility.
+ """
+ assert isinstance(field, PackedField)
+ return field.sub_ordinal is not None
+
+
+def IsPrimaryNullableValueKindPackedField(field):
+ """Returns true if `field` is derived from a nullable ValueKind mojom field
+ and is the "primary" field.
+
+ The primary field is a bool PackedField that controls if the field should be
+ considered as present or not; it will have a reference to the PackedField that
+ holds the actual value representation if considered present.
+
+ Bindings code that translates between the wire protocol and the higher layers
+ can use this to simplify mapping multiple PackedFields to the single field
+ that is logically exposed to bindings consumers.
+ """
+ assert isinstance(field, PackedField)
+ return field.linked_value_packed_field is not None
+
+
+class PackedStruct:
+ def __init__(self, struct):
+ self.struct = struct
+ # |packed_fields| contains all the fields, in increasing offset order.
+ self.packed_fields = []
+ # |packed_fields_in_ordinal_order| refers to the same fields as
+ # |packed_fields|, but in ordinal order.
+ self.packed_fields_in_ordinal_order = []
+
+ # No fields.
+ if (len(struct.fields) == 0):
+ return
+
+ # Start by sorting by ordinal.
+ src_fields = self.packed_fields_in_ordinal_order
+ ordinal = 0
+ for index, field in enumerate(struct.fields):
+ if field.ordinal is not None:
+ ordinal = field.ordinal
+ # Nullable value types are a bit weird: they generate two PackedFields
+ # despite being a single ValueKind. This is for wire compatibility to
+ # ease the transition from legacy mojom syntax where nullable value types
+ # were not supported.
+ if isinstance(field.kind, mojom.ValueKind) and field.kind.is_nullable:
+ # The suffixes intentionally use Unicode codepoints which are considered
+ # valid C++/Java/JavaScript identifiers, yet are unlikely to be used in
+ # actual user code.
+ has_value_field = copy.copy(field)
+ has_value_field.name = f'{field.mojom_name}_$flag'
+ has_value_field.kind = mojom.BOOL
+
+ value_field = copy.copy(field)
+ value_field.name = f'{field.mojom_name}_$value'
+ value_field.kind = field.kind.MakeUnnullableKind()
+
+ value_packed_field = PackedField(value_field,
+ index,
+ ordinal,
+ original_field=field,
+ sub_ordinal=1,
+ linked_value_packed_field=None)
+ has_value_packed_field = PackedField(
+ has_value_field,
+ index,
+ ordinal,
+ original_field=field,
+ sub_ordinal=0,
+ linked_value_packed_field=value_packed_field)
+ src_fields.append(has_value_packed_field)
+ src_fields.append(value_packed_field)
+ else:
+ src_fields.append(PackedField(field, index, ordinal))
+ ordinal += 1
+ src_fields.sort(key=lambda field: (field.ordinal, field.sub_ordinal))
+
+ # Set |min_version| for each field.
+ next_min_version = 0
+ for packed_field in src_fields:
+ if packed_field.field.min_version is None:
+ assert next_min_version == 0
+ else:
+ assert packed_field.field.min_version >= next_min_version
+ next_min_version = packed_field.field.min_version
+ packed_field.min_version = next_min_version
+
+ if (packed_field.min_version != 0
+ and mojom.IsReferenceKind(packed_field.field.kind)
+ and not packed_field.field.kind.is_nullable):
+ raise Exception(
+ "Non-nullable reference fields are only allowed in version 0 of a "
+ "struct. %s.%s is defined with [MinVersion=%d]." %
+ (self.struct.name, packed_field.field.name,
+ packed_field.min_version))
+
+ src_field = src_fields[0]
+ src_field.offset = 0
+ src_field.bit = 0
+ dst_fields = self.packed_fields
+ dst_fields.append(src_field)
+
+ # Then find first slot that each field will fit.
+ for src_field in src_fields[1:]:
+ last_field = dst_fields[0]
+ for i in range(1, len(dst_fields)):
+ next_field = dst_fields[i]
+ offset, bit = GetFieldOffset(src_field, last_field)
+ if offset + src_field.size <= next_field.offset:
+ # Found hole.
+ src_field.offset = offset
+ src_field.bit = bit
+ dst_fields.insert(i, src_field)
+ break
+ last_field = next_field
+ if src_field.offset is None:
+ # Add to end
+ src_field.offset, src_field.bit = GetFieldOffset(src_field, last_field)
+ dst_fields.append(src_field)
+
+
+class ByteInfo:
+ def __init__(self):
+ self.is_padding = False
+ self.packed_fields = []
+
+
+def GetByteLayout(packed_struct):
+ total_payload_size = GetPayloadSizeUpToField(
+ packed_struct.packed_fields[-1] if packed_struct.packed_fields else None)
+ byte_info = [ByteInfo() for i in range(total_payload_size)]
+
+ limit_of_previous_field = 0
+ for packed_field in packed_struct.packed_fields:
+ for i in range(limit_of_previous_field, packed_field.offset):
+ byte_info[i].is_padding = True
+ byte_info[packed_field.offset].packed_fields.append(packed_field)
+ limit_of_previous_field = packed_field.offset + packed_field.size
+
+ for i in range(limit_of_previous_field, len(byte_info)):
+ byte_info[i].is_padding = True
+
+ for byte in byte_info:
+ # A given byte cannot both be padding and have a fields packed into it.
+ assert not (byte.is_padding and byte.packed_fields)
+
+ return byte_info
+
+
+class VersionInfo:
+ def __init__(self, version, num_fields, num_packed_fields, num_bytes):
+ self.version = version
+ self.num_fields = num_fields
+ self.num_packed_fields = num_packed_fields
+ self.num_bytes = num_bytes
+
+
+def GetVersionInfo(packed_struct):
+ """Get version information for a struct.
+
+ Args:
+ packed_struct: A PackedStruct instance.
+
+ Returns:
+ A non-empty list of VersionInfo instances, sorted by version in increasing
+ order.
+ Note: The version numbers may not be consecutive.
+ """
+ versions = []
+ last_version = 0
+ last_num_fields = 0
+ last_num_packed_fields = 0
+ last_payload_size = 0
+
+ for packed_field in packed_struct.packed_fields_in_ordinal_order:
+ if packed_field.min_version != last_version:
+ versions.append(
+ VersionInfo(last_version, last_num_fields, last_num_packed_fields,
+ last_payload_size + HEADER_SIZE))
+ last_version = packed_field.min_version
+
+ # Nullable numeric fields (e.g. `int32?`) expand to two packed fields, so to
+ # avoid double-counting, only increment if the field is:
+ # - not used for representing a nullable value kind field, or
+ # - the primary field representing the nullable value kind field.
+ last_num_fields += 1 if (
+ not IsNullableValueKindPackedField(packed_field)
+ or IsPrimaryNullableValueKindPackedField(packed_field)) else 0
+
+ last_num_packed_fields += 1
+
+ # The fields are iterated in ordinal order here. However, the size of a
+ # version is determined by the last field of that version in pack order,
+ # instead of ordinal order. Therefore, we need to calculate the max value.
+ last_payload_size = max(GetPayloadSizeUpToField(packed_field),
+ last_payload_size)
+
+ assert len(
+ versions) == 0 or last_num_packed_fields != versions[-1].num_packed_fields
+ versions.append(
+ VersionInfo(last_version, last_num_fields, last_num_packed_fields,
+ last_payload_size + HEADER_SIZE))
+ return versions
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/pack_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/pack_unittest.py
new file mode 100644
index 00000000..7d8e4e01
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/pack_unittest.py
@@ -0,0 +1,253 @@
+# Copyright 2013 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+import unittest
+
+from mojom.generate import module as mojom
+from mojom.generate import pack
+
+
+class PackTest(unittest.TestCase):
+ def testOrdinalOrder(self):
+ struct = mojom.Struct('test')
+ struct.AddField('testfield1', mojom.INT32, 2)
+ struct.AddField('testfield2', mojom.INT32, 1)
+ ps = pack.PackedStruct(struct)
+
+ self.assertEqual(2, len(ps.packed_fields))
+ self.assertEqual('testfield2', ps.packed_fields[0].field.mojom_name)
+ self.assertEqual('testfield1', ps.packed_fields[1].field.mojom_name)
+
+ def testZeroFields(self):
+ struct = mojom.Struct('test')
+ ps = pack.PackedStruct(struct)
+ self.assertEqual(0, len(ps.packed_fields))
+
+ def testOneField(self):
+ struct = mojom.Struct('test')
+ struct.AddField('testfield1', mojom.INT8)
+ ps = pack.PackedStruct(struct)
+ self.assertEqual(1, len(ps.packed_fields))
+
+ def _CheckPackSequence(self, kinds, fields, offsets):
+ """Checks the pack order and offsets of a sequence of mojom.Kinds.
+
+ Args:
+ kinds: A sequence of mojom.Kinds that specify the fields that are to be
+ created.
+ fields: The expected order of the resulting fields, with the integer "1"
+ first.
+ offsets: The expected order of offsets, with the integer "0" first.
+ """
+ struct = mojom.Struct('test')
+ index = 1
+ for kind in kinds:
+ struct.AddField('%d' % index, kind)
+ index += 1
+ ps = pack.PackedStruct(struct)
+ num_fields = len(ps.packed_fields)
+ self.assertEqual(len(kinds), num_fields)
+ for i in range(num_fields):
+ self.assertEqual('%d' % fields[i], ps.packed_fields[i].field.mojom_name)
+ self.assertEqual(offsets[i], ps.packed_fields[i].offset)
+
+ def testPaddingPackedInOrder(self):
+ return self._CheckPackSequence((mojom.INT8, mojom.UINT8, mojom.INT32),
+ (1, 2, 3), (0, 1, 4))
+
+ def testPaddingPackedOutOfOrder(self):
+ return self._CheckPackSequence((mojom.INT8, mojom.INT32, mojom.UINT8),
+ (1, 3, 2), (0, 1, 4))
+
+ def testPaddingPackedOverflow(self):
+ kinds = (mojom.INT8, mojom.INT32, mojom.INT16, mojom.INT8, mojom.INT8)
+ # 2 bytes should be packed together first, followed by short, then by int.
+ fields = (1, 4, 3, 2, 5)
+ offsets = (0, 1, 2, 4, 8)
+ return self._CheckPackSequence(kinds, fields, offsets)
+
+ def testNullableTypes(self):
+ kinds = (mojom.STRING.MakeNullableKind(), mojom.HANDLE.MakeNullableKind(),
+ mojom.Struct('test_struct').MakeNullableKind(),
+ mojom.DCPIPE.MakeNullableKind(), mojom.Array().MakeNullableKind(),
+ mojom.DPPIPE.MakeNullableKind(),
+ mojom.Array(length=5).MakeNullableKind(),
+ mojom.MSGPIPE.MakeNullableKind(),
+ mojom.Interface('test_interface').MakeNullableKind(),
+ mojom.SHAREDBUFFER.MakeNullableKind(),
+ mojom.InterfaceRequest().MakeNullableKind())
+ fields = (1, 2, 4, 3, 5, 6, 8, 7, 9, 10, 11)
+ offsets = (0, 8, 12, 16, 24, 32, 36, 40, 48, 56, 60)
+ return self._CheckPackSequence(kinds, fields, offsets)
+
+ def testAllTypes(self):
+ return self._CheckPackSequence(
+ (mojom.BOOL, mojom.INT8, mojom.STRING, mojom.UINT8, mojom.INT16,
+ mojom.DOUBLE, mojom.UINT16, mojom.INT32, mojom.UINT32, mojom.INT64,
+ mojom.FLOAT, mojom.STRING, mojom.HANDLE, mojom.UINT64,
+ mojom.Struct('test'), mojom.Array(), mojom.STRING.MakeNullableKind()),
+ (1, 2, 4, 5, 7, 3, 6, 8, 9, 10, 11, 13, 12, 14, 15, 16, 17, 18),
+ (0, 1, 2, 4, 6, 8, 16, 24, 28, 32, 40, 44, 48, 56, 64, 72, 80, 88))
+
+ def testPaddingPackedOutOfOrderByOrdinal(self):
+ struct = mojom.Struct('test')
+ struct.AddField('testfield1', mojom.INT8)
+ struct.AddField('testfield3', mojom.UINT8, 3)
+ struct.AddField('testfield2', mojom.INT32, 2)
+ ps = pack.PackedStruct(struct)
+ self.assertEqual(3, len(ps.packed_fields))
+
+ # Second byte should be packed in behind first, altering order.
+ self.assertEqual('testfield1', ps.packed_fields[0].field.mojom_name)
+ self.assertEqual('testfield3', ps.packed_fields[1].field.mojom_name)
+ self.assertEqual('testfield2', ps.packed_fields[2].field.mojom_name)
+
+ # Second byte should be packed with first.
+ self.assertEqual(0, ps.packed_fields[0].offset)
+ self.assertEqual(1, ps.packed_fields[1].offset)
+ self.assertEqual(4, ps.packed_fields[2].offset)
+
+ def testBools(self):
+ struct = mojom.Struct('test')
+ struct.AddField('bit0', mojom.BOOL)
+ struct.AddField('bit1', mojom.BOOL)
+ struct.AddField('int', mojom.INT32)
+ struct.AddField('bit2', mojom.BOOL)
+ struct.AddField('bit3', mojom.BOOL)
+ struct.AddField('bit4', mojom.BOOL)
+ struct.AddField('bit5', mojom.BOOL)
+ struct.AddField('bit6', mojom.BOOL)
+ struct.AddField('bit7', mojom.BOOL)
+ struct.AddField('bit8', mojom.BOOL)
+ ps = pack.PackedStruct(struct)
+ self.assertEqual(10, len(ps.packed_fields))
+
+ # First 8 bits packed together.
+ for i in range(8):
+ pf = ps.packed_fields[i]
+ self.assertEqual(0, pf.offset)
+ self.assertEqual("bit%d" % i, pf.field.mojom_name)
+ self.assertEqual(i, pf.bit)
+
+ # Ninth bit goes into second byte.
+ self.assertEqual("bit8", ps.packed_fields[8].field.mojom_name)
+ self.assertEqual(1, ps.packed_fields[8].offset)
+ self.assertEqual(0, ps.packed_fields[8].bit)
+
+ # int comes last.
+ self.assertEqual("int", ps.packed_fields[9].field.mojom_name)
+ self.assertEqual(4, ps.packed_fields[9].offset)
+
+ def testMinVersion(self):
+ """Tests that |min_version| is properly set for packed fields."""
+ struct = mojom.Struct('test')
+ struct.AddField('field_2', mojom.BOOL, 2)
+ struct.AddField('field_0', mojom.INT32, 0)
+ struct.AddField('field_1', mojom.INT64, 1)
+ ps = pack.PackedStruct(struct)
+
+ self.assertEqual('field_0', ps.packed_fields[0].field.mojom_name)
+ self.assertEqual('field_2', ps.packed_fields[1].field.mojom_name)
+ self.assertEqual('field_1', ps.packed_fields[2].field.mojom_name)
+
+ self.assertEqual(0, ps.packed_fields[0].min_version)
+ self.assertEqual(0, ps.packed_fields[1].min_version)
+ self.assertEqual(0, ps.packed_fields[2].min_version)
+
+ struct.fields[0].attributes = {'MinVersion': 1}
+ ps = pack.PackedStruct(struct)
+
+ self.assertEqual(0, ps.packed_fields[0].min_version)
+ self.assertEqual(1, ps.packed_fields[1].min_version)
+ self.assertEqual(0, ps.packed_fields[2].min_version)
+
+ def testGetVersionInfoEmptyStruct(self):
+ """Tests that pack.GetVersionInfo() never returns an empty list, even for
+ empty structs.
+ """
+ struct = mojom.Struct('test')
+ ps = pack.PackedStruct(struct)
+
+ versions = pack.GetVersionInfo(ps)
+ self.assertEqual(1, len(versions))
+ self.assertEqual(0, versions[0].version)
+ self.assertEqual(0, versions[0].num_fields)
+ self.assertEqual(8, versions[0].num_bytes)
+
+ def testGetVersionInfoComplexOrder(self):
+ """Tests pack.GetVersionInfo() using a struct whose definition order,
+ ordinal order and pack order for fields are all different.
+ """
+ struct = mojom.Struct('test')
+ struct.AddField(
+ 'field_3', mojom.BOOL, ordinal=3, attributes={'MinVersion': 3})
+ struct.AddField('field_0', mojom.INT32, ordinal=0)
+ struct.AddField(
+ 'field_1', mojom.INT64, ordinal=1, attributes={'MinVersion': 2})
+ struct.AddField(
+ 'field_2', mojom.INT64, ordinal=2, attributes={'MinVersion': 3})
+ ps = pack.PackedStruct(struct)
+
+ versions = pack.GetVersionInfo(ps)
+ self.assertEqual(3, len(versions))
+
+ self.assertEqual(0, versions[0].version)
+ self.assertEqual(1, versions[0].num_fields)
+ self.assertEqual(16, versions[0].num_bytes)
+
+ self.assertEqual(2, versions[1].version)
+ self.assertEqual(2, versions[1].num_fields)
+ self.assertEqual(24, versions[1].num_bytes)
+
+ self.assertEqual(3, versions[2].version)
+ self.assertEqual(4, versions[2].num_fields)
+ self.assertEqual(32, versions[2].num_bytes)
+
+ def testGetVersionInfoPackedStruct(self):
+ """Tests that pack.GetVersionInfo() correctly sets version, num_fields,
+ and num_packed_fields for a packed struct.
+ """
+ struct = mojom.Struct('test')
+ struct.AddField('field_0', mojom.BOOL, ordinal=0)
+ struct.AddField('field_1',
+ mojom.NULLABLE_BOOL,
+ ordinal=1,
+ attributes={'MinVersion': 1})
+ struct.AddField('field_2',
+ mojom.NULLABLE_BOOL,
+ ordinal=2,
+ attributes={'MinVersion': 2})
+ ps = pack.PackedStruct(struct)
+ versions = pack.GetVersionInfo(ps)
+
+ self.assertEqual(3, len(versions))
+ self.assertEqual(0, versions[0].version)
+ self.assertEqual(1, versions[1].version)
+ self.assertEqual(2, versions[2].version)
+ self.assertEqual(1, versions[0].num_fields)
+ self.assertEqual(2, versions[1].num_fields)
+ self.assertEqual(3, versions[2].num_fields)
+ self.assertEqual(1, versions[0].num_packed_fields)
+ self.assertEqual(3, versions[1].num_packed_fields)
+ self.assertEqual(5, versions[2].num_packed_fields)
+
+ def testInterfaceAlignment(self):
+ """Tests that interfaces are aligned on 4-byte boundaries, although the size
+ of an interface is 8 bytes.
+ """
+ kinds = (mojom.INT32, mojom.Interface('test_interface'))
+ fields = (1, 2)
+ offsets = (0, 4)
+ self._CheckPackSequence(kinds, fields, offsets)
+
+ def testAssociatedInterfaceAlignment(self):
+ """Tests that associated interfaces are aligned on 4-byte boundaries,
+ although the size of an associated interface is 8 bytes.
+ """
+ kinds = (mojom.INT32,
+ mojom.AssociatedInterface(mojom.Interface('test_interface')))
+ fields = (1, 2)
+ offsets = (0, 4)
+ self._CheckPackSequence(kinds, fields, offsets)
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/template_expander.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/template_expander.py
new file mode 100644
index 00000000..807e2a4f
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/template_expander.py
@@ -0,0 +1,82 @@
+# Copyright 2013 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Based on third_party/WebKit/Source/build/scripts/template_expander.py.
+
+import os.path
+import sys
+
+from mojom import fileutil
+
+fileutil.AddLocalRepoThirdPartyDirToModulePath()
+import jinja2
+
+
+def ApplyTemplate(mojo_generator, path_to_template, params, **kwargs):
+ loader = jinja2.ModuleLoader(
+ os.path.join(mojo_generator.bytecode_path,
+ "%s.zip" % mojo_generator.GetTemplatePrefix()))
+ final_kwargs = dict(mojo_generator.GetJinjaParameters())
+ final_kwargs.update(kwargs)
+
+ jinja_env = jinja2.Environment(
+ loader=loader, keep_trailing_newline=True, **final_kwargs)
+ jinja_env.globals.update(mojo_generator.GetGlobals())
+ jinja_env.filters.update(mojo_generator.GetFilters())
+ template = jinja_env.get_template(path_to_template)
+ return template.render(params)
+
+
+def UseJinja(path_to_template, **kwargs):
+ def RealDecorator(generator):
+ def GeneratorInternal(*args, **kwargs2):
+ parameters = generator(*args, **kwargs2)
+ return ApplyTemplate(args[0], path_to_template, parameters, **kwargs)
+
+ GeneratorInternal.__name__ = generator.__name__
+ return GeneratorInternal
+
+ return RealDecorator
+
+
+def ApplyImportedTemplate(mojo_generator, path_to_template, filename, params,
+ **kwargs):
+ loader = jinja2.FileSystemLoader(searchpath=path_to_template)
+ final_kwargs = dict(mojo_generator.GetJinjaParameters())
+ final_kwargs.update(kwargs)
+
+ jinja_env = jinja2.Environment(
+ loader=loader, keep_trailing_newline=True, **final_kwargs)
+ jinja_env.globals.update(mojo_generator.GetGlobals())
+ jinja_env.filters.update(mojo_generator.GetFilters())
+ template = jinja_env.get_template(filename)
+ return template.render(params)
+
+
+def UseJinjaForImportedTemplate(func):
+ def wrapper(*args, **kwargs):
+ parameters = func(*args, **kwargs)
+ path_to_template = args[1]
+ filename = args[2]
+ return ApplyImportedTemplate(args[0], path_to_template, filename,
+ parameters)
+
+ wrapper.__name__ = func.__name__
+ return wrapper
+
+
+def PrecompileTemplates(generator_modules, output_dir):
+ for module in generator_modules.values():
+ generator = module.Generator(None)
+ jinja_env = jinja2.Environment(
+ loader=jinja2.FileSystemLoader([
+ os.path.join(
+ os.path.dirname(module.__file__), generator.GetTemplatePrefix())
+ ]))
+ jinja_env.filters.update(generator.GetFilters())
+ jinja_env.compile_templates(os.path.join(
+ output_dir, "%s.zip" % generator.GetTemplatePrefix()),
+ extensions=["tmpl"],
+ zip="stored",
+ ignore_errors=False)
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/translate.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/translate.py
new file mode 100644
index 00000000..83bb297f
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/translate.py
@@ -0,0 +1,1258 @@
+# Copyright 2013 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Convert parse tree to AST.
+
+This module converts the parse tree to the AST we use for code generation. The
+main entry point is OrderedModule, which gets passed the parser
+representation of a mojom file. When called it's assumed that all imports have
+already been parsed and converted to ASTs before.
+"""
+
+import itertools
+import os
+import re
+
+from collections import OrderedDict
+from mojom.generate import generator
+from mojom.generate import module as mojom
+from mojom.parse import ast
+
+
+is_running_backwards_compatibility_check_hack = False
+
+### DO NOT ADD ENTRIES TO THIS LIST. ###
+_EXTENSIBLE_ENUMS_MISSING_DEFAULT = (
+ 'x:arc.keymaster.mojom.Algorithm',
+ 'x:arc.keymaster.mojom.Digest',
+ 'x:arc.keymaster.mojom.SignatureResult',
+ 'x:arc.mojom.AccessibilityActionType',
+ 'x:arc.mojom.AccessibilityBooleanProperty',
+ 'x:arc.mojom.AccessibilityEventIntListProperty',
+ 'x:arc.mojom.AccessibilityEventIntProperty',
+ 'x:arc.mojom.AccessibilityEventStringProperty',
+ 'x:arc.mojom.AccessibilityEventType',
+ 'x:arc.mojom.AccessibilityFilterType',
+ 'x:arc.mojom.AccessibilityIntListProperty',
+ 'x:arc.mojom.AccessibilityIntProperty',
+ 'x:arc.mojom.AccessibilityLiveRegionType',
+ 'x:arc.mojom.AccessibilityNotificationStateType',
+ 'x:arc.mojom.AccessibilityRangeType',
+ 'x:arc.mojom.AccessibilitySelectionMode',
+ 'x:arc.mojom.AccessibilityStringListProperty',
+ 'x:arc.mojom.AccessibilityStringProperty',
+ 'x:arc.mojom.AccessibilityWindowBooleanProperty',
+ 'x:arc.mojom.AccessibilityWindowIntListProperty',
+ 'x:arc.mojom.AccessibilityWindowIntProperty',
+ 'x:arc.mojom.AccessibilityWindowStringProperty',
+ 'x:arc.mojom.AccessibilityWindowType',
+ 'x:arc.mojom.AccountCheckStatus',
+ 'x:arc.mojom.AccountUpdateType',
+ 'x:arc.mojom.ActionType',
+ 'x:arc.mojom.Algorithm',
+ 'x:arc.mojom.AndroidIdSource',
+ 'x:arc.mojom.AnrSource',
+ 'x:arc.mojom.AnrType',
+ 'x:arc.mojom.AppDiscoveryRequestState',
+ 'x:arc.mojom.AppKillType',
+ 'x:arc.mojom.AppPermission',
+ 'x:arc.mojom.AppPermissionGroup',
+ 'x:arc.mojom.AppReinstallState',
+ 'x:arc.mojom.AppShortcutItemType',
+ 'x:arc.mojom.ArcAuthCodeStatus',
+ 'x:arc.mojom.ArcClipboardDragDropEvent',
+ 'x:arc.mojom.ArcCorePriAbiMigEvent',
+ 'x:arc.mojom.ArcDnsQuery',
+ 'x:arc.mojom.ArcImageCopyPasteCompatAction',
+ 'x:arc.mojom.ArcNetworkError',
+ 'x:arc.mojom.ArcNetworkEvent',
+ 'x:arc.mojom.ArcNotificationEvent',
+ 'x:arc.mojom.ArcNotificationExpandState',
+ 'x:arc.mojom.ArcNotificationPriority',
+ 'x:arc.mojom.ArcNotificationRemoteInputState',
+ 'x:arc.mojom.ArcNotificationShownContents',
+ 'x:arc.mojom.ArcNotificationStyle',
+ 'x:arc.mojom.ArcNotificationType',
+ 'x:arc.mojom.ArcPipEvent',
+ 'x:arc.mojom.ArcResizeLockState',
+ 'x:arc.mojom.ArcSignInSuccess',
+ 'x:arc.mojom.ArcTimerResult',
+ 'x:arc.mojom.AudioSwitch',
+ 'x:arc.mojom.BluetoothAclState',
+ 'x:arc.mojom.BluetoothAdapterState',
+ 'x:arc.mojom.BluetoothAdvertisingDataType',
+ 'x:arc.mojom.BluetoothBondState',
+ 'x:arc.mojom.BluetoothDeviceType',
+ 'x:arc.mojom.BluetoothDiscoveryState',
+ 'x:arc.mojom.BluetoothGattDBAttributeType',
+ 'x:arc.mojom.BluetoothGattStatus',
+ 'x:arc.mojom.BluetoothPropertyType',
+ 'x:arc.mojom.BluetoothScanMode',
+ 'x:arc.mojom.BluetoothSdpAttributeType',
+ 'x:arc.mojom.BluetoothSocketType',
+ 'x:arc.mojom.BluetoothStatus',
+ 'x:arc.mojom.BootType',
+ 'x:arc.mojom.CaptionTextShadowType',
+ 'x:arc.mojom.ChangeType',
+ 'x:arc.mojom.ChromeAccountType',
+ 'x:arc.mojom.ChromeApp',
+ 'x:arc.mojom.ChromePage',
+ 'x:arc.mojom.ClockId',
+ 'x:arc.mojom.CloudProvisionFlowError',
+ 'x:arc.mojom.CommandResultType',
+ 'x:arc.mojom.CompanionLibApiId',
+ 'x:arc.mojom.ConnectionStateType',
+ 'x:arc.mojom.ContentChangeType',
+ 'x:arc.mojom.CpuRestrictionState',
+ 'x:arc.mojom.CursorCoordinateSpace',
+ 'x:arc.mojom.DataRestoreStatus',
+ 'x:arc.mojom.DecoderStatus',
+ 'x:arc.mojom.DeviceType',
+ 'x:arc.mojom.Digest',
+ 'x:arc.mojom.DisplayWakeLockType',
+ 'x:arc.mojom.EapMethod',
+ 'x:arc.mojom.EapPhase2Method',
+ 'x:arc.mojom.FileSelectorEventType',
+ 'x:arc.mojom.GMSCheckInError',
+ 'x:arc.mojom.GMSSignInError',
+ 'x:arc.mojom.GeneralSignInError',
+ 'x:arc.mojom.GetNetworksRequestType',
+ 'x:arc.mojom.HalPixelFormat',
+ 'x:arc.mojom.IPAddressType',
+ 'x:arc.mojom.InstallErrorReason',
+ 'x:arc.mojom.KeyFormat',
+ 'x:arc.mojom.KeyManagement',
+ 'x:arc.mojom.KeyPurpose',
+ 'x:arc.mojom.KeymasterError',
+ 'x:arc.mojom.MainAccountHashMigrationStatus',
+ 'x:arc.mojom.MainAccountResolutionStatus',
+ 'x:arc.mojom.ManagementChangeStatus',
+ 'x:arc.mojom.ManagementState',
+ 'x:arc.mojom.MessageCenterVisibility',
+ 'x:arc.mojom.MetricsType',
+ 'x:arc.mojom.MountEvent',
+ 'x:arc.mojom.NativeBridgeType',
+ 'x:arc.mojom.NetworkResult',
+ 'x:arc.mojom.NetworkType',
+ 'x:arc.mojom.OemCryptoAlgorithm',
+ 'x:arc.mojom.OemCryptoCipherMode',
+ 'x:arc.mojom.OemCryptoHdcpCapability',
+ 'x:arc.mojom.OemCryptoLicenseType',
+ 'x:arc.mojom.OemCryptoPrivateKey',
+ 'x:arc.mojom.OemCryptoProvisioningMethod',
+ 'x:arc.mojom.OemCryptoResult',
+ 'x:arc.mojom.OemCryptoRsaPaddingScheme',
+ 'x:arc.mojom.OemCryptoUsageEntryStatus',
+ 'x:arc.mojom.Padding',
+ 'x:arc.mojom.PaiFlowState',
+ 'x:arc.mojom.PatternType',
+ 'x:arc.mojom.PressureLevel',
+ 'x:arc.mojom.PrintColorMode',
+ 'x:arc.mojom.PrintContentType',
+ 'x:arc.mojom.PrintDuplexMode',
+ 'x:arc.mojom.PrinterStatus',
+ 'x:arc.mojom.ProcessState',
+ 'x:arc.mojom.PurchaseState',
+ 'x:arc.mojom.ReauthReason',
+ 'x:arc.mojom.ScaleFactor',
+ 'x:arc.mojom.SecurityType',
+ 'x:arc.mojom.SegmentStyle',
+ 'x:arc.mojom.SelectFilesActionType',
+ 'x:arc.mojom.SetNativeChromeVoxResponse',
+ 'x:arc.mojom.ShowPackageInfoPage',
+ 'x:arc.mojom.SpanType',
+ 'x:arc.mojom.SupportedLinkChangeSource',
+ 'x:arc.mojom.TetheringClientState',
+ 'x:arc.mojom.TextInputType',
+ 'x:arc.mojom.TtsEventType',
+ 'x:arc.mojom.VideoCodecProfile',
+ 'x:arc.mojom.VideoDecodeAccelerator.Result',
+ 'x:arc.mojom.VideoEncodeAccelerator.Error',
+ 'x:arc.mojom.VideoFrameStorageType',
+ 'x:arc.mojom.VideoPixelFormat',
+ 'x:arc.mojom.WakefulnessMode',
+ 'x:arc.mojom.WebApkInstallResult',
+ 'x:ash.ime.mojom.InputFieldType',
+ 'x:ash.ime.mojom.PersonalizationMode',
+ 'x:ash.language.mojom.FeatureId',
+ 'x:blink.mojom.ScrollRestorationType',
+ 'x:chromeos.cdm.mojom.CdmKeyStatus',
+ 'x:chromeos.cdm.mojom.CdmMessageType',
+ 'x:chromeos.cdm.mojom.CdmSessionType',
+ 'x:chromeos.cdm.mojom.DecryptStatus',
+ 'x:chromeos.cdm.mojom.EmeInitDataType',
+ 'x:chromeos.cdm.mojom.EncryptionScheme',
+ 'x:chromeos.cdm.mojom.HdcpVersion',
+ 'x:chromeos.cdm.mojom.OutputProtection.LinkType',
+ 'x:chromeos.cdm.mojom.OutputProtection.ProtectionType',
+ 'x:chromeos.cdm.mojom.PromiseException',
+ 'x:chromeos.cfm.mojom.EnqueuePriority',
+ 'x:chromeos.cfm.mojom.LoggerErrorCode',
+ 'x:chromeos.cfm.mojom.LoggerState',
+ 'x:chromeos.cros_healthd.mojom.CryptoAlgorithm',
+ 'x:chromeos.cros_healthd.mojom.EncryptionState',
+ 'x:chromeos.machine_learning.mojom.AnnotationUsecase',
+ 'x:chromeos.machine_learning.mojom.BuiltinModelId',
+ 'x:chromeos.machine_learning.mojom.CreateGraphExecutorResult',
+ 'x:chromeos.machine_learning.mojom.DocumentScannerResultStatus',
+ 'x:chromeos.machine_learning.mojom.EndpointReason',
+ 'x:chromeos.machine_learning.mojom.EndpointerType',
+ 'x:chromeos.machine_learning.mojom.ExecuteResult',
+ 'x:chromeos.machine_learning.mojom.GrammarCheckerResult.Status',
+ 'x:chromeos.machine_learning.mojom.HandwritingRecognizerResult.Status',
+ 'x:chromeos.machine_learning.mojom.LoadHandwritingModelResult',
+ 'x:chromeos.machine_learning.mojom.LoadModelResult',
+ 'x:chromeos.machine_learning.mojom.Rotation',
+ 'x:chromeos.network_config.mojom.ConnectionStateType',
+ 'x:chromeos.network_config.mojom.DeviceStateType',
+ 'x:chromeos.network_config.mojom.IPConfigType',
+ 'x:chromeos.network_config.mojom.NetworkType',
+ 'x:chromeos.network_config.mojom.OncSource',
+ 'x:chromeos.network_config.mojom.PolicySource',
+ 'x:chromeos.network_config.mojom.PortalState',
+ 'x:chromeos.wilco_dtc_supportd.mojom.WilcoDtcSupportdEvent',
+ 'x:chromeos.wilco_dtc_supportd.mojom.WilcoDtcSupportdWebRequestHttpMethod',
+ 'x:chromeos.wilco_dtc_supportd.mojom.WilcoDtcSupportdWebRequestStatus',
+ 'x:cros.mojom.CameraClientType',
+ 'x:cros.mojom.CameraMetadataSectionStart',
+ 'x:cros.mojom.CameraMetadataTag',
+ 'x:cros.mojom.HalPixelFormat',
+ 'x:crosapi.mojom.AllowedPaths',
+ 'x:crosapi.mojom.BrowserAppInstanceType',
+ 'x:crosapi.mojom.CreationResult',
+ 'x:crosapi.mojom.DeviceAccessResultCode',
+ 'x:crosapi.mojom.DeviceMode',
+ 'x:crosapi.mojom.DlpRestrictionLevel',
+ 'x:crosapi.mojom.ExoImeSupport',
+ 'x:crosapi.mojom.FullscreenVisibility',
+ 'x:crosapi.mojom.GoogleServiceAuthError.State',
+ 'x:crosapi.mojom.IsInstallableResult',
+ 'x:crosapi.mojom.KeyTag',
+ 'x:crosapi.mojom.KeystoreSigningAlgorithmName',
+ 'x:crosapi.mojom.KeystoreType',
+ 'x:crosapi.mojom.LacrosFeedbackSource',
+ 'x:crosapi.mojom.MemoryPressureLevel',
+ 'x:crosapi.mojom.MetricsReportingManaged',
+ 'x:crosapi.mojom.NotificationType',
+ 'x:crosapi.mojom.OndeviceHandwritingSupport',
+ 'x:crosapi.mojom.OpenResult',
+ 'x:crosapi.mojom.PolicyDomain',
+ 'x:crosapi.mojom.RegistrationCodeType',
+ 'x:crosapi.mojom.ScaleFactor',
+ 'x:crosapi.mojom.SearchResult.OptionalBool',
+ 'x:crosapi.mojom.SelectFileDialogType',
+ 'x:crosapi.mojom.SelectFileResult',
+ 'x:crosapi.mojom.SharesheetResult',
+ 'x:crosapi.mojom.TouchEventType',
+ 'x:crosapi.mojom.VideoRotation',
+ 'x:crosapi.mojom.WallpaperLayout',
+ 'x:crosapi.mojom.WebAppInstallResultCode',
+ 'x:crosapi.mojom.WebAppUninstallResultCode',
+ 'x:device.mojom.HidBusType',
+ 'x:device.mojom.WakeLockReason',
+ 'x:device.mojom.WakeLockType',
+ 'x:drivefs.mojom.DialogReason.Type',
+ 'x:drivefs.mojom.DriveError.Type',
+ 'x:drivefs.mojom.DriveFsDelegate.ExtensionConnectionStatus',
+ 'x:drivefs.mojom.FileMetadata.CanPinStatus',
+ 'x:drivefs.mojom.FileMetadata.Type',
+ 'x:drivefs.mojom.ItemEventReason',
+ 'x:drivefs.mojom.MirrorPathStatus',
+ 'x:drivefs.mojom.MirrorSyncStatus',
+ 'x:drivefs.mojom.QueryParameters.SortField',
+ 'x:fuzz.mojom.FuzzEnum',
+ 'x:media.mojom.FillLightMode',
+ 'x:media.mojom.MeteringMode',
+ 'x:media.mojom.PowerLineFrequency',
+ 'x:media.mojom.RedEyeReduction',
+ 'x:media.mojom.ResolutionChangePolicy',
+ 'x:media.mojom.VideoCaptureApi',
+ 'x:media.mojom.VideoCaptureBufferType',
+ 'x:media.mojom.VideoCaptureError',
+ 'x:media.mojom.VideoCaptureFrameDropReason',
+ 'x:media.mojom.VideoCapturePixelFormat',
+ 'x:media.mojom.VideoCaptureTransportType',
+ 'x:media.mojom.VideoFacingMode',
+ 'x:media_session.mojom.AudioFocusType',
+ 'x:media_session.mojom.CameraState',
+ 'x:media_session.mojom.EnforcementMode',
+ 'x:media_session.mojom.MediaAudioVideoState',
+ 'x:media_session.mojom.MediaImageBitmapColorType',
+ 'x:media_session.mojom.MediaPictureInPictureState',
+ 'x:media_session.mojom.MediaPlaybackState',
+ 'x:media_session.mojom.MediaSession.SuspendType',
+ 'x:media_session.mojom.MediaSessionAction',
+ 'x:media_session.mojom.MediaSessionImageType',
+ 'x:media_session.mojom.MediaSessionInfo.SessionState',
+ 'x:media_session.mojom.MicrophoneState',
+ 'x:ml.model_loader.mojom.ComputeResult',
+ 'x:ml.model_loader.mojom.CreateModelLoaderResult',
+ 'x:ml.model_loader.mojom.LoadModelResult',
+ 'x:mojo.test.AnExtensibleEnum',
+ 'x:mojo.test.EnumB',
+ 'x:mojo.test.ExtensibleEmptyEnum',
+ 'x:mojo.test.enum_default_unittest.mojom.ExtensibleEnumWithoutDefault',
+ 'x:network.mojom.WebSandboxFlags',
+ 'x:payments.mojom.BillingResponseCode',
+ 'x:payments.mojom.CreateDigitalGoodsResponseCode',
+ 'x:payments.mojom.ItemType',
+ 'x:printing.mojom.PrinterType',
+ 'x:ui.mojom.KeyboardCode',
+)
+### DO NOT ADD ENTRIES TO THIS LIST. ###
+
+
+def _DuplicateName(values):
+ """Returns the 'mojom_name' of the first entry in |values| whose 'mojom_name'
+ has already been encountered. If there are no duplicates, returns None."""
+ names = set()
+ for value in values:
+ if value.mojom_name in names:
+ return value.mojom_name
+ names.add(value.mojom_name)
+ return None
+
+
+def _ElemsOfType(elems, elem_type, scope):
+ """Find all elements of the given type.
+
+ Args:
+ elems: {Sequence[Any]} Sequence of elems.
+ elem_type: {Type[C]} Extract all elems of this type.
+ scope: {str} The name of the surrounding scope (e.g. struct
+ definition). Used in error messages.
+
+ Returns:
+ {List[C]} All elems of matching type.
+ """
+ assert isinstance(elem_type, type)
+ result = [elem for elem in elems if isinstance(elem, elem_type)]
+ duplicate_name = _DuplicateName(result)
+ if duplicate_name:
+ raise Exception('Names in mojom must be unique within a scope. The name '
+ '"%s" is used more than once within the scope "%s".' %
+ (duplicate_name, scope))
+ return result
+
+
+def _ProcessElements(scope, elements, operations_by_type):
+ """Iterates over the given elements, running a function from
+ operations_by_type for any element that matches a key in that dict. The scope
+ is the name of the surrounding scope, such as a filename or struct name, used
+ only in error messages."""
+ names_in_this_scope = set()
+ for element in elements:
+ # pylint: disable=unidiomatic-typecheck
+ element_type = type(element)
+ if element_type in operations_by_type:
+ if element.mojom_name in names_in_this_scope:
+ raise Exception('Names must be unique within a scope. The name "%s" is '
+ 'used more than once within the scope "%s".' %
+ (duplicate_name, scope))
+ operations_by_type[element_type](element)
+
+
+def _MapKind(kind):
+ map_to_kind = {
+ 'bool': 'b',
+ 'int8': 'i8',
+ 'int16': 'i16',
+ 'int32': 'i32',
+ 'int64': 'i64',
+ 'uint8': 'u8',
+ 'uint16': 'u16',
+ 'uint32': 'u32',
+ 'uint64': 'u64',
+ 'float': 'f',
+ 'double': 'd',
+ 'string': 's',
+ 'handle': 'h',
+ 'handle<data_pipe_consumer>': 'h:d:c',
+ 'handle<data_pipe_producer>': 'h:d:p',
+ 'handle<message_pipe>': 'h:m',
+ 'handle<shared_buffer>': 'h:s',
+ 'handle<platform>': 'h:p'
+ }
+ if kind.endswith('?'):
+ base_kind = _MapKind(kind[0:-1])
+ return '?' + base_kind
+ if kind.endswith('}'):
+ lbracket = kind.rfind('{')
+ value = kind[0:lbracket]
+ return 'm[' + _MapKind(kind[lbracket + 1:-1]) + '][' + _MapKind(value) + ']'
+ if kind.endswith(']'):
+ lbracket = kind.rfind('[')
+ typename = kind[0:lbracket]
+ return 'a' + kind[lbracket + 1:-1] + ':' + _MapKind(typename)
+ if kind.startswith('asso<'):
+ assert kind.endswith('>')
+ return 'asso:' + _MapKind(kind[5:-1])
+ if kind.startswith('rmt<'):
+ assert kind.endswith('>')
+ return 'rmt:' + _MapKind(kind[4:-1])
+ if kind.startswith('rcv<'):
+ assert kind.endswith('>')
+ return 'rcv:' + _MapKind(kind[4:-1])
+ if kind.startswith('rma<'):
+ assert kind.endswith('>')
+ return 'rma:' + _MapKind(kind[4:-1])
+ if kind.startswith('rca<'):
+ assert kind.endswith('>')
+ return 'rca:' + _MapKind(kind[4:-1])
+ if kind in map_to_kind:
+ return map_to_kind[kind]
+ return 'x:' + kind
+
+
+def _MapAttributeValue(module, kind, value):
+ # True/False/None
+ if value is None:
+ return value
+ if not isinstance(value, str):
+ return value
+ # Is the attribute value the name of a feature?
+ try:
+ # Features cannot be nested in other types, so lookup in the global scope.
+ trial = _LookupKind(module.kinds, 'x:' + value,
+ _GetScopeForKind(module, kind))
+ if isinstance(trial, mojom.Feature):
+ return trial
+ except ValueError:
+ pass
+ # Is the attribute value a constant or enum value?
+ try:
+ trial = _LookupValue(module, None, None, ('IDENTIFIER', value))
+ if isinstance(trial, mojom.ConstantValue):
+ return trial.constant
+ if isinstance(trial, mojom.EnumValue):
+ return trial
+ except ValueError:
+ pass
+ # If not a referenceable mojo type - return as a string.
+ return value
+
+
+def _AttributeListToDict(module, kind, attribute_list):
+ if attribute_list is None:
+ return None
+ assert isinstance(attribute_list, ast.AttributeList)
+ attributes = dict()
+ for attribute in attribute_list:
+ if attribute.key in attributes:
+ raise Exception("Duplicate key (%s) in attribute list" % attribute.key)
+ attributes[attribute.key] = _MapAttributeValue(module, kind,
+ attribute.value)
+ return attributes
+
+
+builtin_values = frozenset([
+ "double.INFINITY", "double.NEGATIVE_INFINITY", "double.NAN",
+ "float.INFINITY", "float.NEGATIVE_INFINITY", "float.NAN"
+])
+
+
+def _IsBuiltinValue(value):
+ return value in builtin_values
+
+
+def _LookupKind(kinds, spec, scope):
+ """Tries to find which Kind a spec refers to, given the scope in which its
+ referenced. Starts checking from the narrowest scope to most general. For
+ example, given a struct field like
+ Foo.Bar x;
+ Foo.Bar could refer to the type 'Bar' in the 'Foo' namespace, or an inner
+ type 'Bar' in the struct 'Foo' in the current namespace.
+
+ |scope| is a tuple that looks like (namespace, struct/interface), referring
+ to the location where the type is referenced."""
+ if spec.startswith('x:'):
+ mojom_name = spec[2:]
+ for i in range(len(scope), -1, -1):
+ test_spec = 'x:'
+ if i > 0:
+ test_spec += '.'.join(scope[:i]) + '.'
+ test_spec += mojom_name
+ kind = kinds.get(test_spec)
+ if kind:
+ return kind
+
+ return kinds.get(spec)
+
+
+def _GetScopeForKind(module, kind):
+ """For a given kind, returns a tuple of progressively more specific names
+ used to qualify the kind. For example if kind is an enum named Bar nested in a
+ struct Foo within module 'foo', this would return ('foo', 'Foo', 'Bar')"""
+ if isinstance(kind, mojom.Enum) and kind.parent_kind:
+ # Enums may be nested in other kinds.
+ return _GetScopeForKind(module, kind.parent_kind) + (kind.mojom_name, )
+
+ module_fragment = (module.mojom_namespace, ) if module.mojom_namespace else ()
+ kind_fragment = (kind.mojom_name, ) if kind else ()
+ return module_fragment + kind_fragment
+
+
+def _LookupValueInScope(module, kind, identifier):
+ """Given a kind and an identifier, this attempts to resolve the given
+ identifier to a concrete NamedValue within the scope of the given kind."""
+ scope = _GetScopeForKind(module, kind)
+ for i in reversed(range(len(scope) + 1)):
+ qualified_name = '.'.join(scope[:i] + (identifier, ))
+ value = module.values.get(qualified_name)
+ if value:
+ return value
+ return None
+
+
+def _LookupValue(module, parent_kind, implied_kind, ast_leaf_node):
+ """Resolves a leaf node in the form ('IDENTIFIER', 'x') to a constant value
+ identified by 'x' in some mojom definition. parent_kind is used as context
+ when resolving the identifier. If the given leaf node is not an IDENTIFIER
+ (e.g. already a constant value), it is returned as-is.
+
+ If implied_kind is provided, the parsed identifier may also be resolved within
+ its scope as fallback. This can be useful for more concise value references
+ when assigning enum-typed constants or field values."""
+ if not isinstance(ast_leaf_node, tuple) or ast_leaf_node[0] != 'IDENTIFIER':
+ return ast_leaf_node
+
+ # First look for a known user-defined identifier to resolve this within the
+ # enclosing scope.
+ identifier = ast_leaf_node[1]
+
+ value = _LookupValueInScope(module, parent_kind, identifier)
+ if value:
+ return value
+
+ # Next look in the scope of implied_kind, if provided.
+ value = (implied_kind and implied_kind.module and _LookupValueInScope(
+ implied_kind.module, implied_kind, identifier))
+ if value:
+ return value
+
+ # Fall back on defined builtin symbols
+ if _IsBuiltinValue(identifier):
+ return mojom.BuiltinValue(identifier)
+
+ raise ValueError('Unknown identifier %s' % identifier)
+
+
+def _Kind(kinds, spec, scope):
+ """Convert a type name into a mojom.Kind object.
+
+ As a side-effect this function adds the result to 'kinds'.
+
+ Args:
+ kinds: {Dict[str, mojom.Kind]} All known kinds up to this point, indexed by
+ their names.
+ spec: {str} A name uniquely identifying a type.
+ scope: {Tuple[str, str]} A tuple that looks like (namespace,
+ struct/interface), referring to the location where the type is
+ referenced.
+
+ Returns:
+ {mojom.Kind} The type corresponding to 'spec'.
+ """
+ kind = _LookupKind(kinds, spec, scope)
+ if kind:
+ return kind
+
+ if spec.startswith('?'):
+ kind = _Kind(kinds, spec[1:], scope)
+ kind = kind.MakeNullableKind()
+ elif spec.startswith('a:'):
+ kind = mojom.Array(_Kind(kinds, spec[2:], scope))
+ elif spec.startswith('asso:'):
+ inner_kind = _Kind(kinds, spec[5:], scope)
+ if isinstance(inner_kind, mojom.InterfaceRequest):
+ kind = mojom.AssociatedInterfaceRequest(inner_kind)
+ else:
+ kind = mojom.AssociatedInterface(inner_kind)
+ elif spec.startswith('a'):
+ colon = spec.find(':')
+ length = int(spec[1:colon])
+ kind = mojom.Array(_Kind(kinds, spec[colon + 1:], scope), length)
+ elif spec.startswith('r:'):
+ kind = mojom.InterfaceRequest(_Kind(kinds, spec[2:], scope))
+ elif spec.startswith('rmt:'):
+ kind = mojom.PendingRemote(_Kind(kinds, spec[4:], scope))
+ elif spec.startswith('rcv:'):
+ kind = mojom.PendingReceiver(_Kind(kinds, spec[4:], scope))
+ elif spec.startswith('rma:'):
+ kind = mojom.PendingAssociatedRemote(_Kind(kinds, spec[4:], scope))
+ elif spec.startswith('rca:'):
+ kind = mojom.PendingAssociatedReceiver(_Kind(kinds, spec[4:], scope))
+ elif spec.startswith('m['):
+ # Isolate the two types from their brackets.
+
+ # It is not allowed to use map as key, so there shouldn't be nested ']'s
+ # inside the key type spec.
+ key_end = spec.find(']')
+ assert key_end != -1 and key_end < len(spec) - 1
+ assert spec[key_end + 1] == '[' and spec[-1] == ']'
+
+ first_kind = spec[2:key_end]
+ second_kind = spec[key_end + 2:-1]
+
+ kind = mojom.Map(
+ _Kind(kinds, first_kind, scope), _Kind(kinds, second_kind, scope))
+ else:
+ kind = mojom.Kind(spec)
+
+ kinds[spec] = kind
+ return kind
+
+
+def _Import(module, import_module):
+ # Copy the struct kinds from our imports into the current module.
+ importable_kinds = (mojom.Struct, mojom.Union, mojom.Enum, mojom.Interface,
+ mojom.Feature)
+ for kind in import_module.kinds.values():
+ if (isinstance(kind, importable_kinds)
+ and kind.module.path == import_module.path):
+ module.kinds[kind.spec] = kind
+ # Ditto for values.
+ for value in import_module.values.values():
+ if value.module.path == import_module.path:
+ module.values[value.GetSpec()] = value
+
+ return import_module
+
+
+def _Feature(module, parsed_feature):
+ """
+ Args:
+ module: {mojom.Module} Module currently being constructed.
+ parsed_feature: {ast.Feature} Parsed feature.
+
+ Returns:
+ {mojom.Feature} AST feature.
+ """
+ feature = mojom.Feature(module=module)
+ feature.mojom_name = parsed_feature.mojom_name
+ feature.spec = 'x:' + module.GetNamespacePrefix() + feature.mojom_name
+ module.kinds[feature.spec] = feature
+ feature.constants = []
+ _ProcessElements(
+ parsed_feature.mojom_name, parsed_feature.body, {
+ ast.Const:
+ lambda const: feature.constants.append(
+ _Constant(module, const, feature)),
+ })
+
+ feature.attributes = _AttributeListToDict(module, feature,
+ parsed_feature.attribute_list)
+ return feature
+
+
+def _Struct(module, parsed_struct):
+ """
+ Args:
+ module: {mojom.Module} Module currently being constructed.
+ parsed_struct: {ast.Struct} Parsed struct.
+
+ Returns:
+ {mojom.Struct} AST struct.
+ """
+ struct = mojom.Struct(module=module)
+ struct.mojom_name = parsed_struct.mojom_name
+ struct.native_only = parsed_struct.body is None
+ struct.spec = 'x:' + module.GetNamespacePrefix() + struct.mojom_name
+ module.kinds[struct.spec] = struct
+ struct.enums = []
+ struct.constants = []
+ struct.fields_data = []
+ if not struct.native_only:
+ _ProcessElements(
+ parsed_struct.mojom_name, parsed_struct.body, {
+ ast.Enum:
+ lambda enum: struct.enums.append(_Enum(module, enum, struct)),
+ ast.Const:
+ lambda const: struct.constants.append(
+ _Constant(module, const, struct)),
+ ast.StructField:
+ struct.fields_data.append,
+ })
+
+ struct.attributes = _AttributeListToDict(module, struct,
+ parsed_struct.attribute_list)
+
+ # Enforce that a [Native] attribute is set to make native-only struct
+ # declarations more explicit.
+ if struct.native_only:
+ if not struct.attributes or not struct.attributes.get('Native', False):
+ raise Exception("Native-only struct declarations must include a " +
+ "Native attribute.")
+
+ if struct.attributes and struct.attributes.get('CustomSerializer', False):
+ struct.custom_serializer = True
+
+ return struct
+
+
+def _Union(module, parsed_union):
+ """
+ Args:
+ module: {mojom.Module} Module currently being constructed.
+ parsed_union: {ast.Union} Parsed union.
+
+ Returns:
+ {mojom.Union} AST union.
+ """
+ union = mojom.Union(module=module)
+ union.mojom_name = parsed_union.mojom_name
+ union.spec = 'x:' + module.GetNamespacePrefix() + union.mojom_name
+ module.kinds[union.spec] = union
+ # Stash fields parsed_union here temporarily.
+ union.fields_data = []
+ _ProcessElements(parsed_union.mojom_name, parsed_union.body,
+ {ast.UnionField: union.fields_data.append})
+ union.attributes = _AttributeListToDict(module, union,
+ parsed_union.attribute_list)
+ return union
+
+
+def _StructField(module, parsed_field, struct):
+ """
+ Args:
+ module: {mojom.Module} Module currently being constructed.
+ parsed_field: {ast.StructField} Parsed struct field.
+ struct: {mojom.Struct} Struct this field belongs to.
+
+ Returns:
+ {mojom.StructField} AST struct field.
+ """
+ field = mojom.StructField()
+ field.mojom_name = parsed_field.mojom_name
+ field.kind = _Kind(module.kinds, _MapKind(parsed_field.typename),
+ (module.mojom_namespace, struct.mojom_name))
+ field.ordinal = parsed_field.ordinal.value if parsed_field.ordinal else None
+ field.default = _LookupValue(module, struct, field.kind,
+ parsed_field.default_value)
+ field.attributes = _AttributeListToDict(module, field,
+ parsed_field.attribute_list)
+ return field
+
+
+def _UnionField(module, parsed_field, union):
+ """
+ Args:
+ module: {mojom.Module} Module currently being constructed.
+ parsed_field: {ast.UnionField} Parsed union field.
+ union: {mojom.Union} Union this fields belong to.
+
+ Returns:
+ {mojom.UnionField} AST union.
+ """
+ field = mojom.UnionField()
+ field.mojom_name = parsed_field.mojom_name
+ # Disallow unions from being self-recursive.
+ parsed_typename = parsed_field.typename
+ if parsed_typename.endswith('?'):
+ parsed_typename = parsed_typename[:-1]
+ assert parsed_typename != union.mojom_name
+ field.kind = _Kind(module.kinds, _MapKind(parsed_field.typename),
+ (module.mojom_namespace, union.mojom_name))
+ field.ordinal = parsed_field.ordinal.value if parsed_field.ordinal else None
+ field.default = None
+ field.attributes = _AttributeListToDict(module, field,
+ parsed_field.attribute_list)
+ if field.is_default and not mojom.IsNullableKind(field.kind) and \
+ not mojom.IsIntegralKind(field.kind):
+ raise Exception(
+ '[Default] field for union %s must be nullable or integral type.' %
+ union.mojom_name)
+ return field
+
+
+def _Parameter(module, parsed_param, interface):
+ """
+ Args:
+ module: {mojom.Module} Module currently being constructed.
+ parsed_param: {ast.Parameter} Parsed parameter.
+ union: {mojom.Interface} Interface this parameter belongs to.
+
+ Returns:
+ {mojom.Parameter} AST parameter.
+ """
+ parameter = mojom.Parameter()
+ parameter.mojom_name = parsed_param.mojom_name
+ parameter.kind = _Kind(module.kinds, _MapKind(parsed_param.typename),
+ (module.mojom_namespace, interface.mojom_name))
+ parameter.ordinal = (parsed_param.ordinal.value
+ if parsed_param.ordinal else None)
+ parameter.default = None # TODO(tibell): We never have these. Remove field?
+ parameter.attributes = _AttributeListToDict(module, parameter,
+ parsed_param.attribute_list)
+ return parameter
+
+
+def _Method(module, parsed_method, interface):
+ """
+ Args:
+ module: {mojom.Module} Module currently being constructed.
+ parsed_method: {ast.Method} Parsed method.
+ interface: {mojom.Interface} Interface this method belongs to.
+
+ Returns:
+ {mojom.Method} AST method.
+ """
+ method = mojom.Method(
+ interface,
+ parsed_method.mojom_name,
+ ordinal=parsed_method.ordinal.value if parsed_method.ordinal else None)
+ method.parameters = list(
+ map(lambda parameter: _Parameter(module, parameter, interface),
+ parsed_method.parameter_list))
+ if parsed_method.response_parameter_list is not None:
+ method.response_parameters = list(
+ map(lambda parameter: _Parameter(module, parameter, interface),
+ parsed_method.response_parameter_list))
+ method.attributes = _AttributeListToDict(module, method,
+ parsed_method.attribute_list)
+
+ # Enforce that only methods with response can have a [Sync] attribute.
+ if method.sync and method.response_parameters is None:
+ raise Exception("Only methods with response can include a [Sync] "
+ "attribute. If no response parameters are needed, you "
+ "could use an empty response parameter list, i.e., "
+ "\"=> ()\".")
+ # And only methods with the [Sync] attribute can specify [NoInterrupt].
+ if not method.allow_interrupt and not method.sync:
+ raise Exception("Only [Sync] methods can be marked [NoInterrupt].")
+
+ return method
+
+
+def _Interface(module, parsed_iface):
+ """
+ Args:
+ module: {mojom.Module} Module currently being constructed.
+ parsed_iface: {ast.Interface} Parsed interface.
+
+ Returns:
+ {mojom.Interface} AST interface.
+ """
+ interface = mojom.Interface(module=module)
+ interface.mojom_name = parsed_iface.mojom_name
+ interface.spec = 'x:' + module.GetNamespacePrefix() + interface.mojom_name
+ module.kinds[interface.spec] = interface
+ interface.attributes = _AttributeListToDict(module, interface,
+ parsed_iface.attribute_list)
+ interface.enums = []
+ interface.constants = []
+ interface.methods_data = []
+ _ProcessElements(
+ parsed_iface.mojom_name, parsed_iface.body, {
+ ast.Enum:
+ lambda enum: interface.enums.append(_Enum(module, enum, interface)),
+ ast.Const:
+ lambda const: interface.constants.append(
+ _Constant(module, const, interface)),
+ ast.Method:
+ interface.methods_data.append,
+ })
+ return interface
+
+
+def _EnumField(module, enum, parsed_field):
+ """
+ Args:
+ module: {mojom.Module} Module currently being constructed.
+ enum: {mojom.Enum} Enum this field belongs to.
+ parsed_field: {ast.EnumValue} Parsed enum value.
+
+ Returns:
+ {mojom.EnumField} AST enum field.
+ """
+ field = mojom.EnumField()
+ field.mojom_name = parsed_field.mojom_name
+ field.value = _LookupValue(module, enum, None, parsed_field.value)
+ field.attributes = _AttributeListToDict(module, field,
+ parsed_field.attribute_list)
+ value = mojom.EnumValue(module, enum, field)
+ module.values[value.GetSpec()] = value
+ return field
+
+
+def _ResolveNumericEnumValues(enum):
+ """
+ Given a reference to a mojom.Enum, resolves and assigns the numeric value of
+ each field, and also computes the min_value and max_value of the enum.
+ """
+
+ # map of <mojom_name> -> integral value
+ prev_value = -1
+ min_value = None
+ max_value = None
+ for field in enum.fields:
+ # This enum value is +1 the previous enum value (e.g: BEGIN).
+ if field.value is None:
+ prev_value += 1
+
+ # Integral value (e.g: BEGIN = -0x1).
+ elif isinstance(field.value, str):
+ prev_value = int(field.value, 0)
+
+ # Reference to a previous enum value (e.g: INIT = BEGIN).
+ elif isinstance(field.value, mojom.EnumValue):
+ prev_value = field.value.field.numeric_value
+ elif isinstance(field.value, mojom.ConstantValue):
+ constant = field.value.constant
+ kind = constant.kind
+ if not mojom.IsIntegralKind(kind) or mojom.IsBoolKind(kind):
+ raise ValueError('Enum values must be integers. %s is not an integer.' %
+ constant.mojom_name)
+ prev_value = int(constant.value, 0)
+ else:
+ raise Exception('Unresolved enum value for %s' % field.value.GetSpec())
+
+ if prev_value in (-128, -127):
+ raise Exception(f'{field.mojom_name} in {enum.spec} has the value '
+ f'{prev_value}, which is reserved for WTF::HashTrait\'s '
+ 'default enum specialization and may not be used.')
+ field.numeric_value = prev_value
+ if min_value is None or prev_value < min_value:
+ min_value = prev_value
+ if max_value is None or prev_value > max_value:
+ max_value = prev_value
+
+ enum.min_value = min_value
+ enum.max_value = max_value
+
+
+def _Enum(module, parsed_enum, parent_kind):
+ """
+ Args:
+ module: {mojom.Module} Module currently being constructed.
+ parsed_enum: {ast.Enum} Parsed enum.
+
+ Returns:
+ {mojom.Enum} AST enum.
+ """
+ enum = mojom.Enum(module=module)
+ enum.mojom_name = parsed_enum.mojom_name
+ enum.native_only = parsed_enum.enum_value_list is None
+ mojom_name = enum.mojom_name
+ if parent_kind:
+ mojom_name = parent_kind.mojom_name + '.' + mojom_name
+ enum.spec = 'x:%s.%s' % (module.mojom_namespace, mojom_name)
+ enum.parent_kind = parent_kind
+ enum.attributes = _AttributeListToDict(module, enum,
+ parsed_enum.attribute_list)
+
+ if not enum.native_only:
+ enum.fields = list(
+ map(lambda field: _EnumField(module, enum, field),
+ parsed_enum.enum_value_list))
+ _ResolveNumericEnumValues(enum)
+ # TODO(https://crbug.com/731893): Require a default value to be
+ # specified.
+ for field in enum.fields:
+ if field.default:
+ if not enum.extensible:
+ raise Exception(
+ f'Non-extensible enum {enum.spec} may not specify a default')
+ if enum.default_field is not None:
+ raise Exception(f'Multiple [Default] enumerators in enum {enum.spec}')
+ enum.default_field = field
+ # While running the backwards compatibility check, ignore errors because the
+ # old version of the enum might not specify [Default].
+ if (enum.extensible and enum.default_field is None
+ and enum.spec not in _EXTENSIBLE_ENUMS_MISSING_DEFAULT
+ and not is_running_backwards_compatibility_check_hack):
+ raise Exception(
+ f'Extensible enum {enum.spec} must specify a [Default] enumerator')
+
+ module.kinds[enum.spec] = enum
+
+ # Enforce that a [Native] attribute is set to make native-only enum
+ # declarations more explicit.
+ if enum.native_only:
+ if not enum.attributes or not enum.attributes.get('Native', False):
+ raise Exception("Native-only enum declarations must include a " +
+ "Native attribute.")
+
+ return enum
+
+
+def _Constant(module, parsed_const, parent_kind):
+ """
+ Args:
+ module: {mojom.Module} Module currently being constructed.
+ parsed_const: {ast.Const} Parsed constant.
+
+ Returns:
+ {mojom.Constant} AST constant.
+ """
+ constant = mojom.Constant()
+ constant.mojom_name = parsed_const.mojom_name
+ if parent_kind:
+ scope = (module.mojom_namespace, parent_kind.mojom_name)
+ else:
+ scope = (module.mojom_namespace, )
+ # TODO(mpcomplete): maybe we should only support POD kinds.
+ constant.kind = _Kind(module.kinds, _MapKind(parsed_const.typename), scope)
+ constant.parent_kind = parent_kind
+ constant.value = _LookupValue(module, parent_kind, constant.kind,
+ parsed_const.value)
+
+ # Iteratively resolve this constant reference to a concrete value
+ while isinstance(constant.value, mojom.ConstantValue):
+ constant.value = constant.value.constant.value
+
+ value = mojom.ConstantValue(module, parent_kind, constant)
+ module.values[value.GetSpec()] = value
+ return constant
+
+
+def _CollectReferencedKinds(module, all_defined_kinds):
+ """
+ Takes a {mojom.Module} object and a list of all defined kinds within that
+ module, and enumerates the complete dict of user-defined mojom types
+ (as {mojom.Kind} objects) referenced by the module's own defined kinds (i.e.
+ as types of struct or union or interface parameters. The returned dict is
+ keyed by kind spec.
+ """
+
+ def extract_referenced_user_kinds(kind):
+ if mojom.IsArrayKind(kind):
+ return extract_referenced_user_kinds(kind.kind)
+ if mojom.IsMapKind(kind):
+ return (extract_referenced_user_kinds(kind.key_kind) +
+ extract_referenced_user_kinds(kind.value_kind))
+ if (mojom.IsInterfaceRequestKind(kind) or mojom.IsAssociatedKind(kind)
+ or mojom.IsPendingRemoteKind(kind)
+ or mojom.IsPendingReceiverKind(kind)):
+ return [kind.kind]
+ if mojom.IsStructKind(kind):
+ return [kind]
+ if (mojom.IsInterfaceKind(kind) or mojom.IsEnumKind(kind)
+ or mojom.IsUnionKind(kind)):
+ return [kind]
+ return []
+
+ def sanitize_kind(kind):
+ """Removes nullability from a kind"""
+ if kind.spec.startswith('?'):
+ return _Kind(module.kinds, kind.spec[1:], (module.mojom_namespace, ''))
+ return kind
+
+ referenced_user_kinds = {}
+ for defined_kind in all_defined_kinds:
+ if mojom.IsStructKind(defined_kind) or mojom.IsUnionKind(defined_kind):
+ for field in defined_kind.fields:
+ for referenced_kind in extract_referenced_user_kinds(field.kind):
+ sanitized_kind = sanitize_kind(referenced_kind)
+ referenced_user_kinds[sanitized_kind.spec] = sanitized_kind
+
+ # Also scan for references in parameter lists
+ for interface in module.interfaces:
+ for method in interface.methods:
+ for param in itertools.chain(method.parameters or [],
+ method.response_parameters or []):
+ for referenced_kind in extract_referenced_user_kinds(param.kind):
+ sanitized_kind = sanitize_kind(referenced_kind)
+ referenced_user_kinds[sanitized_kind.spec] = sanitized_kind
+ # Consts can reference imported enums.
+ for const in module.constants:
+ if not const.kind in mojom.PRIMITIVES:
+ sanitized_kind = sanitize_kind(const.kind)
+ referenced_user_kinds[sanitized_kind.spec] = sanitized_kind
+
+ return referenced_user_kinds
+
+
+def _AssignDefaultOrdinals(items):
+ """Assigns default ordinal values to a sequence of items if necessary."""
+ next_ordinal = 0
+ for item in items:
+ if item.ordinal is not None:
+ next_ordinal = item.ordinal + 1
+ else:
+ item.ordinal = next_ordinal
+ next_ordinal += 1
+
+
+def _AssertTypeIsStable(kind):
+ """Raises an error if a type is not stable, meaning it is composed of at least
+ one type that is not marked [Stable]."""
+
+ def assertDependencyIsStable(dependency):
+ if (mojom.IsEnumKind(dependency) or mojom.IsStructKind(dependency)
+ or mojom.IsUnionKind(dependency) or mojom.IsInterfaceKind(dependency)):
+ if not dependency.stable:
+ raise Exception(
+ '%s is marked [Stable] but cannot be stable because it depends on '
+ '%s, which is not marked [Stable].' %
+ (kind.mojom_name, dependency.mojom_name))
+ elif mojom.IsArrayKind(dependency) or mojom.IsAnyInterfaceKind(dependency):
+ assertDependencyIsStable(dependency.kind)
+ elif mojom.IsMapKind(dependency):
+ assertDependencyIsStable(dependency.key_kind)
+ assertDependencyIsStable(dependency.value_kind)
+
+ if mojom.IsStructKind(kind) or mojom.IsUnionKind(kind):
+ for field in kind.fields:
+ assertDependencyIsStable(field.kind)
+ elif mojom.IsInterfaceKind(kind):
+ for method in kind.methods:
+ for param in method.param_struct.fields:
+ assertDependencyIsStable(param.kind)
+ if method.response_param_struct:
+ for response_param in method.response_param_struct.fields:
+ assertDependencyIsStable(response_param.kind)
+
+
+def _AssertStructIsValid(kind):
+ expected_ordinals = set(range(0, len(kind.fields)))
+ ordinals = set(map(lambda field: field.ordinal, kind.fields))
+ if ordinals != expected_ordinals:
+ raise Exception(
+ 'Structs must use contiguous ordinals starting from 0. ' +
+ '{} is missing the following ordinals: {}.'.format(
+ kind.mojom_name, ', '.join(map(str, expected_ordinals - ordinals))))
+
+
+def _Module(tree, path, imports):
+ """
+ Args:
+ tree: {ast.Mojom} The parse tree.
+ path: {str} The path to the mojom file.
+ imports: {Dict[str, mojom.Module]} Mapping from filenames, as they appear in
+ the import list, to already processed modules. Used to process imports.
+
+ Returns:
+ {mojom.Module} An AST for the mojom.
+ """
+ module = mojom.Module(path=path)
+ module.kinds = {}
+ for kind in mojom.PRIMITIVES:
+ module.kinds[kind.spec] = kind
+
+ module.values = {}
+
+ module.mojom_namespace = tree.module.mojom_namespace[1] if tree.module else ''
+ # Imports must come first, because they add to module.kinds which is used
+ # by by the others.
+ module.imports = [
+ _Import(module, imports[imp.import_filename]) for imp in tree.import_list
+ ]
+ if tree.module and tree.module.attribute_list:
+ assert isinstance(tree.module.attribute_list, ast.AttributeList)
+ # TODO(vtl): Check for duplicate keys here.
+ module.attributes = dict((attribute.key, attribute.value)
+ for attribute in tree.module.attribute_list)
+
+ filename = os.path.basename(path)
+ # First pass collects kinds.
+ module.constants = []
+ module.enums = []
+ module.structs = []
+ module.unions = []
+ module.interfaces = []
+ module.features = []
+
+ _ProcessElements(
+ filename, tree.definition_list, {
+ ast.Const:
+ lambda const: module.constants.append(_Constant(module, const, None)),
+ ast.Enum:
+ lambda enum: module.enums.append(_Enum(module, enum, None)),
+ ast.Struct:
+ lambda struct: module.structs.append(_Struct(module, struct)),
+ ast.Union:
+ lambda union: module.unions.append(_Union(module, union)),
+ ast.Interface:
+ lambda interface: module.interfaces.append(
+ _Interface(module, interface)),
+ ast.Feature:
+ lambda feature: module.features.append(_Feature(module, feature)),
+ })
+
+ # Second pass expands fields and methods. This allows fields and parameters
+ # to refer to kinds defined anywhere in the mojom.
+ all_defined_kinds = {}
+ for struct in module.structs:
+ struct.fields = list(
+ map(lambda field: _StructField(module, field, struct),
+ struct.fields_data))
+ _AssignDefaultOrdinals(struct.fields)
+ del struct.fields_data
+ all_defined_kinds[struct.spec] = struct
+ for enum in struct.enums:
+ all_defined_kinds[enum.spec] = enum
+
+ for feature in module.features:
+ all_defined_kinds[feature.spec] = feature
+
+ for union in module.unions:
+ union.fields = list(
+ map(lambda field: _UnionField(module, field, union), union.fields_data))
+ _AssignDefaultOrdinals(union.fields)
+ for field in union.fields:
+ if field.is_default:
+ if union.default_field is not None:
+ raise Exception('Multiple [Default] fields in union %s.' %
+ union.mojom_name)
+ union.default_field = field
+ del union.fields_data
+ all_defined_kinds[union.spec] = union
+ if union.extensible and union.default_field is None:
+ raise Exception('Extensible union %s must specify a [Default] field' %
+ union.mojom_name)
+
+ for interface in module.interfaces:
+ interface.methods = list(
+ map(lambda method: _Method(module, method, interface),
+ interface.methods_data))
+ _AssignDefaultOrdinals(interface.methods)
+ del interface.methods_data
+ all_defined_kinds[interface.spec] = interface
+ for enum in interface.enums:
+ all_defined_kinds[enum.spec] = enum
+ for enum in module.enums:
+ all_defined_kinds[enum.spec] = enum
+
+ all_referenced_kinds = _CollectReferencedKinds(module,
+ all_defined_kinds.values())
+ imported_kind_specs = set(all_referenced_kinds.keys()).difference(
+ set(all_defined_kinds.keys()))
+ module.imported_kinds = OrderedDict((spec, all_referenced_kinds[spec])
+ for spec in sorted(imported_kind_specs))
+
+ generator.AddComputedData(module)
+ for iface in module.interfaces:
+ for method in iface.methods:
+ if method.param_struct:
+ _AssignDefaultOrdinals(method.param_struct.fields)
+ if method.response_param_struct:
+ _AssignDefaultOrdinals(method.response_param_struct.fields)
+
+ # Ensure that all types marked [Stable] are actually stable. Enums are
+ # automatically OK since they don't depend on other definitions.
+ for kinds in (module.structs, module.unions, module.interfaces):
+ for kind in kinds:
+ if kind.stable:
+ _AssertTypeIsStable(kind)
+
+ for kind in module.structs:
+ _AssertStructIsValid(kind)
+
+ return module
+
+
+def OrderedModule(tree, path, imports):
+ """Convert parse tree to AST module.
+
+ Args:
+ tree: {ast.Mojom} The parse tree.
+ path: {str} The path to the mojom file.
+ imports: {Dict[str, mojom.Module]} Mapping from filenames, as they appear in
+ the import list, to already processed modules. Used to process imports.
+
+ Returns:
+ {mojom.Module} An AST for the mojom.
+ """
+ module = _Module(tree, path, imports)
+ return module
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/translate_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/translate_unittest.py
new file mode 100644
index 00000000..b4fea924
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/translate_unittest.py
@@ -0,0 +1,141 @@
+# Copyright 2014 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from mojom.generate import module as mojom
+from mojom.generate import translate
+from mojom.parse import ast
+
+class TranslateTest(unittest.TestCase):
+ """Tests |parser.Parse()|."""
+
+ def testSimpleArray(self):
+ """Tests a simple int32[]."""
+ # pylint: disable=W0212
+ self.assertEquals(translate._MapKind("int32[]"), "a:i32")
+
+ def testAssociativeArray(self):
+ """Tests a simple uint8{string}."""
+ # pylint: disable=W0212
+ self.assertEquals(translate._MapKind("uint8{string}"), "m[s][u8]")
+
+ def testLeftToRightAssociativeArray(self):
+ """Makes sure that parsing is done from right to left on the internal kinds
+ in the presence of an associative array."""
+ # pylint: disable=W0212
+ self.assertEquals(translate._MapKind("uint8[]{string}"), "m[s][a:u8]")
+
+ def testTranslateSimpleUnions(self):
+ """Makes sure that a simple union is translated correctly."""
+ tree = ast.Mojom(None, ast.ImportList(), [
+ ast.Union(
+ "SomeUnion", None,
+ ast.UnionBody([
+ ast.UnionField("a", None, None, "int32"),
+ ast.UnionField("b", None, None, "string")
+ ]))
+ ])
+
+ translation = translate.OrderedModule(tree, "mojom_tree", [])
+ self.assertEqual(1, len(translation.unions))
+
+ union = translation.unions[0]
+ self.assertTrue(isinstance(union, mojom.Union))
+ self.assertEqual("SomeUnion", union.mojom_name)
+ self.assertEqual(2, len(union.fields))
+ self.assertEqual("a", union.fields[0].mojom_name)
+ self.assertEqual(mojom.INT32.spec, union.fields[0].kind.spec)
+ self.assertEqual("b", union.fields[1].mojom_name)
+ self.assertEqual(mojom.STRING.spec, union.fields[1].kind.spec)
+
+ def testMapKindRaisesWithDuplicate(self):
+ """Verifies _MapTreeForType() raises when passed two values with the same
+ name."""
+ methods = [
+ ast.Method('dup', None, None, ast.ParameterList(), None),
+ ast.Method('dup', None, None, ast.ParameterList(), None)
+ ]
+ with self.assertRaises(Exception):
+ translate._ElemsOfType(methods, ast.Method, 'scope')
+
+ def testAssociatedKinds(self):
+ """Tests type spec translation of associated interfaces and requests."""
+ # pylint: disable=W0212
+ self.assertEquals(
+ translate._MapKind("asso<SomeInterface>?"), "?asso:x:SomeInterface")
+ self.assertEquals(translate._MapKind("rca<SomeInterface>?"),
+ "?rca:x:SomeInterface")
+
+ def testSelfRecursiveUnions(self):
+ """Verifies _UnionField() raises when a union is self-recursive."""
+ tree = ast.Mojom(None, ast.ImportList(), [
+ ast.Union("SomeUnion", None,
+ ast.UnionBody([ast.UnionField("a", None, None, "SomeUnion")]))
+ ])
+ with self.assertRaises(Exception):
+ translate.OrderedModule(tree, "mojom_tree", [])
+
+ tree = ast.Mojom(None, ast.ImportList(), [
+ ast.Union(
+ "SomeUnion", None,
+ ast.UnionBody([ast.UnionField("a", None, None, "SomeUnion?")]))
+ ])
+ with self.assertRaises(Exception):
+ translate.OrderedModule(tree, "mojom_tree", [])
+
+ def testDuplicateAttributesException(self):
+ tree = ast.Mojom(None, ast.ImportList(), [
+ ast.Union(
+ "FakeUnion",
+ ast.AttributeList([
+ ast.Attribute("key1", "value"),
+ ast.Attribute("key1", "value")
+ ]),
+ ast.UnionBody([
+ ast.UnionField("a", None, None, "int32"),
+ ast.UnionField("b", None, None, "string")
+ ]))
+ ])
+ with self.assertRaises(Exception):
+ translate.OrderedModule(tree, "mojom_tree", [])
+
+ def testEnumWithReservedValues(self):
+ """Verifies that assigning reserved values to enumerators fails."""
+ # -128 is reserved for the empty representation in WTF::HashTraits.
+ tree = ast.Mojom(None, ast.ImportList(), [
+ ast.Enum(
+ "MyEnum", None,
+ ast.EnumValueList([
+ ast.EnumValue('kReserved', None, '-128'),
+ ]))
+ ])
+ with self.assertRaises(Exception) as context:
+ translate.OrderedModule(tree, "mojom_tree", [])
+ self.assertIn("reserved for WTF::HashTrait", str(context.exception))
+
+ # -127 is reserved for the deleted representation in WTF::HashTraits.
+ tree = ast.Mojom(None, ast.ImportList(), [
+ ast.Enum(
+ "MyEnum", None,
+ ast.EnumValueList([
+ ast.EnumValue('kReserved', None, '-127'),
+ ]))
+ ])
+ with self.assertRaises(Exception) as context:
+ translate.OrderedModule(tree, "mojom_tree", [])
+ self.assertIn("reserved for WTF::HashTrait", str(context.exception))
+
+ # Implicitly assigning a reserved value should also fail.
+ tree = ast.Mojom(None, ast.ImportList(), [
+ ast.Enum(
+ "MyEnum", None,
+ ast.EnumValueList([
+ ast.EnumValue('kNotReserved', None, '-129'),
+ ast.EnumValue('kImplicitlyReserved', None, None),
+ ]))
+ ])
+ with self.assertRaises(Exception) as context:
+ translate.OrderedModule(tree, "mojom_tree", [])
+ self.assertIn("reserved for WTF::HashTrait", str(context.exception))
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/__init__.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/__init__.py
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/ast.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/ast.py
new file mode 100644
index 00000000..aae9cdb6
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/ast.py
@@ -0,0 +1,462 @@
+# Copyright 2014 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Node classes for the AST for a Mojo IDL file."""
+
+# Note: For convenience of testing, you probably want to define __eq__() methods
+# for all node types; it's okay to be slightly lax (e.g., not compare filename
+# and lineno). You may also define __repr__() to help with analyzing test
+# failures, especially for more complex types.
+
+import os.path
+
+
+# Instance of 'NodeListBase' has no '_list_item_type' member (no-member)
+# pylint: disable=no-member
+
+
+class NodeBase:
+ """Base class for nodes in the AST."""
+
+ def __init__(self, filename=None, lineno=None):
+ self.filename = filename
+ self.lineno = lineno
+
+ def __eq__(self, other):
+ # We want strict comparison of the two object's types. Disable pylint's
+ # insistence upon recommending isinstance().
+ # pylint: disable=unidiomatic-typecheck
+ return type(self) == type(other)
+
+ # Make != the inverse of ==. (Subclasses shouldn't have to override this.)
+ def __ne__(self, other):
+ return not self == other
+
+
+# TODO(vtl): Some of this is complicated enough that it should be tested.
+class NodeListBase(NodeBase):
+ """Represents a list of other nodes, all having the same type. (This is meant
+ to be subclassed, with subclasses defining _list_item_type to be the class (or
+ classes, in a tuple) of the members of the list.)"""
+
+ def __init__(self, item_or_items=None, **kwargs):
+ super().__init__(**kwargs)
+ self.items = []
+ if item_or_items is None:
+ pass
+ elif isinstance(item_or_items, list):
+ for item in item_or_items:
+ assert isinstance(item, self._list_item_type)
+ self.Append(item)
+ else:
+ assert isinstance(item_or_items, self._list_item_type)
+ self.Append(item_or_items)
+
+ # Support iteration. For everything else, users should just access |items|
+ # directly. (We intentionally do NOT supply |__len__()| or |__nonzero__()|, so
+ # |bool(NodeListBase())| is true.)
+ def __iter__(self):
+ return self.items.__iter__()
+
+ def __eq__(self, other):
+ return super().__eq__(other) and \
+ self.items == other.items
+
+ # Implement this so that on failure, we get slightly more sensible output.
+ def __repr__(self):
+ return self.__class__.__name__ + "([" + \
+ ", ".join([repr(elem) for elem in self.items]) + "])"
+
+ def Insert(self, item):
+ """Inserts item at the front of the list."""
+
+ assert isinstance(item, self._list_item_type)
+ self.items.insert(0, item)
+ self._UpdateFilenameAndLineno()
+
+ def Append(self, item):
+ """Appends item to the end of the list."""
+
+ assert isinstance(item, self._list_item_type)
+ self.items.append(item)
+ self._UpdateFilenameAndLineno()
+
+ def _UpdateFilenameAndLineno(self):
+ if self.items:
+ self.filename = self.items[0].filename
+ self.lineno = self.items[0].lineno
+
+
+class Definition(NodeBase):
+ """Represents a definition of anything that has a global name (e.g., enums,
+ enum values, consts, structs, struct fields, interfaces). (This does not
+ include parameter definitions.) This class is meant to be subclassed."""
+
+ def __init__(self, mojom_name, **kwargs):
+ assert isinstance(mojom_name, str)
+ NodeBase.__init__(self, **kwargs)
+ self.mojom_name = mojom_name
+
+
+################################################################################
+
+
+class Attribute(NodeBase):
+ """Represents an attribute."""
+
+ def __init__(self, key, value, **kwargs):
+ assert isinstance(key, str)
+ super().__init__(**kwargs)
+ self.key = key
+ self.value = value
+
+ def __eq__(self, other):
+ return super().__eq__(other) and \
+ self.key == other.key and \
+ self.value == other.value
+
+
+class AttributeList(NodeListBase):
+ """Represents a list attributes."""
+
+ _list_item_type = Attribute
+
+
+class Const(Definition):
+ """Represents a const definition."""
+
+ def __init__(self, mojom_name, attribute_list, typename, value, **kwargs):
+ assert attribute_list is None or isinstance(attribute_list, AttributeList)
+ # The typename is currently passed through as a string.
+ assert isinstance(typename, str)
+ # The value is either a literal (currently passed through as a string) or a
+ # "wrapped identifier".
+ assert isinstance(value, (tuple, str))
+ super().__init__(mojom_name, **kwargs)
+ self.attribute_list = attribute_list
+ self.typename = typename
+ self.value = value
+
+ def __eq__(self, other):
+ return super().__eq__(other) and \
+ self.attribute_list == other.attribute_list and \
+ self.typename == other.typename and \
+ self.value == other.value
+
+
+class Enum(Definition):
+ """Represents an enum definition."""
+
+ def __init__(self, mojom_name, attribute_list, enum_value_list, **kwargs):
+ assert attribute_list is None or isinstance(attribute_list, AttributeList)
+ assert enum_value_list is None or isinstance(enum_value_list, EnumValueList)
+ super().__init__(mojom_name, **kwargs)
+ self.attribute_list = attribute_list
+ self.enum_value_list = enum_value_list
+
+ def __eq__(self, other):
+ return super().__eq__(other) and \
+ self.attribute_list == other.attribute_list and \
+ self.enum_value_list == other.enum_value_list
+
+
+class EnumValue(Definition):
+ """Represents a definition of an enum value."""
+
+ def __init__(self, mojom_name, attribute_list, value, **kwargs):
+ # The optional value is either an int (which is current a string) or a
+ # "wrapped identifier".
+ assert attribute_list is None or isinstance(attribute_list, AttributeList)
+ assert value is None or isinstance(value, (tuple, str))
+ super().__init__(mojom_name, **kwargs)
+ self.attribute_list = attribute_list
+ self.value = value
+
+ def __eq__(self, other):
+ return super().__eq__(other) and \
+ self.attribute_list == other.attribute_list and \
+ self.value == other.value
+
+
+class EnumValueList(NodeListBase):
+ """Represents a list of enum value definitions (i.e., the "body" of an enum
+ definition)."""
+
+ _list_item_type = EnumValue
+
+
+class Feature(Definition):
+ """Represents a runtime feature definition."""
+ def __init__(self, mojom_name, attribute_list, body, **kwargs):
+ assert attribute_list is None or isinstance(attribute_list, AttributeList)
+ assert isinstance(body, FeatureBody) or body is None
+ super().__init__(mojom_name, **kwargs)
+ self.attribute_list = attribute_list
+ self.body = body
+
+ def __eq__(self, other):
+ return super().__eq__(other) and \
+ self.attribute_list == other.attribute_list and \
+ self.body == other.body
+
+ def __repr__(self):
+ return "Feature(mojom_name = %s, attribute_list = %s, body = %s)" % (
+ self.mojom_name, self.attribute_list, self.body)
+
+
+# This needs to be declared after `FeatureConst` and `FeatureField`.
+class FeatureBody(NodeListBase):
+ """Represents the body of (i.e., list of definitions inside) a feature."""
+
+ # Features are compile time helpers so all fields are initializers/consts
+ # for the underlying platform feature type.
+ _list_item_type = (Const)
+
+
+class Import(NodeBase):
+ """Represents an import statement."""
+
+ def __init__(self, attribute_list, import_filename, **kwargs):
+ assert attribute_list is None or isinstance(attribute_list, AttributeList)
+ assert isinstance(import_filename, str)
+ super().__init__(**kwargs)
+ self.attribute_list = attribute_list
+ # TODO(crbug.com/953884): Use pathlib once we're migrated fully to Python 3.
+ self.import_filename = os.path.normpath(import_filename).replace('\\', '/')
+
+ def __eq__(self, other):
+ return super().__eq__(other) and \
+ self.attribute_list == other.attribute_list and \
+ self.import_filename == other.import_filename
+
+
+class ImportList(NodeListBase):
+ """Represents a list (i.e., sequence) of import statements."""
+
+ _list_item_type = Import
+
+
+class Interface(Definition):
+ """Represents an interface definition."""
+
+ def __init__(self, mojom_name, attribute_list, body, **kwargs):
+ assert attribute_list is None or isinstance(attribute_list, AttributeList)
+ assert isinstance(body, InterfaceBody)
+ super().__init__(mojom_name, **kwargs)
+ self.attribute_list = attribute_list
+ self.body = body
+
+ def __eq__(self, other):
+ return super().__eq__(other) and \
+ self.attribute_list == other.attribute_list and \
+ self.body == other.body
+
+
+class Method(Definition):
+ """Represents a method definition."""
+
+ def __init__(self, mojom_name, attribute_list, ordinal, parameter_list,
+ response_parameter_list, **kwargs):
+ assert attribute_list is None or isinstance(attribute_list, AttributeList)
+ assert ordinal is None or isinstance(ordinal, Ordinal)
+ assert isinstance(parameter_list, ParameterList)
+ assert response_parameter_list is None or \
+ isinstance(response_parameter_list, ParameterList)
+ super().__init__(mojom_name, **kwargs)
+ self.attribute_list = attribute_list
+ self.ordinal = ordinal
+ self.parameter_list = parameter_list
+ self.response_parameter_list = response_parameter_list
+
+ def __eq__(self, other):
+ return super().__eq__(other) and \
+ self.attribute_list == other.attribute_list and \
+ self.ordinal == other.ordinal and \
+ self.parameter_list == other.parameter_list and \
+ self.response_parameter_list == other.response_parameter_list
+
+
+# This needs to be declared after |Method|.
+class InterfaceBody(NodeListBase):
+ """Represents the body of (i.e., list of definitions inside) an interface."""
+
+ _list_item_type = (Const, Enum, Method)
+
+
+class Module(NodeBase):
+ """Represents a module statement."""
+
+ def __init__(self, mojom_namespace, attribute_list, **kwargs):
+ # |mojom_namespace| is either none or a "wrapped identifier".
+ assert mojom_namespace is None or isinstance(mojom_namespace, tuple)
+ assert attribute_list is None or isinstance(attribute_list, AttributeList)
+ super().__init__(**kwargs)
+ self.mojom_namespace = mojom_namespace
+ self.attribute_list = attribute_list
+
+ def __eq__(self, other):
+ return super().__eq__(other) and \
+ self.mojom_namespace == other.mojom_namespace and \
+ self.attribute_list == other.attribute_list
+
+
+class Mojom(NodeBase):
+ """Represents an entire .mojom file. (This is the root node.)"""
+
+ def __init__(self, module, import_list, definition_list, **kwargs):
+ assert module is None or isinstance(module, Module)
+ assert isinstance(import_list, ImportList)
+ assert isinstance(definition_list, list)
+ super().__init__(**kwargs)
+ self.module = module
+ self.import_list = import_list
+ self.definition_list = definition_list
+
+ def __eq__(self, other):
+ return super().__eq__(other) and \
+ self.module == other.module and \
+ self.import_list == other.import_list and \
+ self.definition_list == other.definition_list
+
+ def __repr__(self):
+ return "%s(%r, %r, %r)" % (self.__class__.__name__, self.module,
+ self.import_list, self.definition_list)
+
+
+class Ordinal(NodeBase):
+ """Represents an ordinal value labeling, e.g., a struct field."""
+
+ def __init__(self, value, **kwargs):
+ assert isinstance(value, int)
+ super().__init__(**kwargs)
+ self.value = value
+
+ def __eq__(self, other):
+ return super().__eq__(other) and \
+ self.value == other.value
+
+
+class Parameter(NodeBase):
+ """Represents a method request or response parameter."""
+
+ def __init__(self, mojom_name, attribute_list, ordinal, typename, **kwargs):
+ assert isinstance(mojom_name, str)
+ assert attribute_list is None or isinstance(attribute_list, AttributeList)
+ assert ordinal is None or isinstance(ordinal, Ordinal)
+ assert isinstance(typename, str)
+ super().__init__(**kwargs)
+ self.mojom_name = mojom_name
+ self.attribute_list = attribute_list
+ self.ordinal = ordinal
+ self.typename = typename
+
+ def __eq__(self, other):
+ return super().__eq__(other) and \
+ self.mojom_name == other.mojom_name and \
+ self.attribute_list == other.attribute_list and \
+ self.ordinal == other.ordinal and \
+ self.typename == other.typename
+
+
+class ParameterList(NodeListBase):
+ """Represents a list of (method request or response) parameters."""
+
+ _list_item_type = Parameter
+
+
+class Struct(Definition):
+ """Represents a struct definition."""
+
+ def __init__(self, mojom_name, attribute_list, body, **kwargs):
+ assert attribute_list is None or isinstance(attribute_list, AttributeList)
+ assert isinstance(body, StructBody) or body is None
+ super().__init__(mojom_name, **kwargs)
+ self.attribute_list = attribute_list
+ self.body = body
+
+ def __eq__(self, other):
+ return super().__eq__(other) and \
+ self.attribute_list == other.attribute_list and \
+ self.body == other.body
+
+ def __repr__(self):
+ return "Struct(mojom_name = %s, attribute_list = %s, body = %s)" % (
+ self.mojom_name, self.attribute_list, self.body)
+
+
+class StructField(Definition):
+ """Represents a struct field definition."""
+
+ def __init__(self, mojom_name, attribute_list, ordinal, typename,
+ default_value, **kwargs):
+ assert isinstance(mojom_name, str)
+ assert attribute_list is None or isinstance(attribute_list, AttributeList)
+ assert ordinal is None or isinstance(ordinal, Ordinal)
+ assert isinstance(typename, str)
+ # The optional default value is currently either a value as a string or a
+ # "wrapped identifier".
+ assert default_value is None or isinstance(default_value, (str, tuple))
+ super().__init__(mojom_name, **kwargs)
+ self.attribute_list = attribute_list
+ self.ordinal = ordinal
+ self.typename = typename
+ self.default_value = default_value
+
+ def __eq__(self, other):
+ return super().__eq__(other) and \
+ self.attribute_list == other.attribute_list and \
+ self.ordinal == other.ordinal and \
+ self.typename == other.typename and \
+ self.default_value == other.default_value
+
+ def __repr__(self):
+ return ("StructField(mojom_name = %s, attribute_list = %s, ordinal = %s, "
+ "typename = %s, default_value = %s") % (
+ self.mojom_name, self.attribute_list, self.ordinal,
+ self.typename, self.default_value)
+
+
+# This needs to be declared after |StructField|.
+class StructBody(NodeListBase):
+ """Represents the body of (i.e., list of definitions inside) a struct."""
+
+ _list_item_type = (Const, Enum, StructField)
+
+
+class Union(Definition):
+ """Represents a union definition."""
+
+ def __init__(self, mojom_name, attribute_list, body, **kwargs):
+ assert attribute_list is None or isinstance(attribute_list, AttributeList)
+ assert isinstance(body, UnionBody)
+ super().__init__(mojom_name, **kwargs)
+ self.attribute_list = attribute_list
+ self.body = body
+
+ def __eq__(self, other):
+ return super().__eq__(other) and \
+ self.attribute_list == other.attribute_list and \
+ self.body == other.body
+
+
+class UnionField(Definition):
+ def __init__(self, mojom_name, attribute_list, ordinal, typename, **kwargs):
+ assert isinstance(mojom_name, str)
+ assert attribute_list is None or isinstance(attribute_list, AttributeList)
+ assert ordinal is None or isinstance(ordinal, Ordinal)
+ assert isinstance(typename, str)
+ super().__init__(mojom_name, **kwargs)
+ self.attribute_list = attribute_list
+ self.ordinal = ordinal
+ self.typename = typename
+
+ def __eq__(self, other):
+ return super().__eq__(other) and \
+ self.attribute_list == other.attribute_list and \
+ self.ordinal == other.ordinal and \
+ self.typename == other.typename
+
+
+class UnionBody(NodeListBase):
+
+ _list_item_type = UnionField
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/ast_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/ast_unittest.py
new file mode 100644
index 00000000..b289f7b1
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/ast_unittest.py
@@ -0,0 +1,115 @@
+# Copyright 2014 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from mojom.parse import ast
+
+class _TestNode(ast.NodeBase):
+ """Node type for tests."""
+
+ def __init__(self, value, **kwargs):
+ super().__init__(**kwargs)
+ self.value = value
+
+ def __eq__(self, other):
+ return super().__eq__(other) and self.value == other.value
+
+class _TestNodeList(ast.NodeListBase):
+ """Node list type for tests."""
+
+ _list_item_type = _TestNode
+
+class ASTTest(unittest.TestCase):
+ """Tests various AST classes."""
+
+ def testNodeBase(self):
+ # Test |__eq__()|; this is only used for testing, where we want to do
+ # comparison by value and ignore filenames/line numbers (for convenience).
+ node1 = ast.NodeBase(filename="hello.mojom", lineno=123)
+ node2 = ast.NodeBase()
+ self.assertEquals(node1, node2)
+ self.assertEquals(node2, node1)
+
+ # Check that |__ne__()| just defers to |__eq__()| properly.
+ self.assertFalse(node1 != node2)
+ self.assertFalse(node2 != node1)
+
+ # Check that |filename| and |lineno| are set properly (and are None by
+ # default).
+ self.assertEquals(node1.filename, "hello.mojom")
+ self.assertEquals(node1.lineno, 123)
+ self.assertIsNone(node2.filename)
+ self.assertIsNone(node2.lineno)
+
+ # |NodeBase|'s |__eq__()| should compare types (and a subclass's |__eq__()|
+ # should first defer to its superclass's).
+ node3 = _TestNode(123)
+ self.assertNotEqual(node1, node3)
+ self.assertNotEqual(node3, node1)
+ # Also test |__eq__()| directly.
+ self.assertFalse(node1 == node3)
+ self.assertFalse(node3 == node1)
+
+ node4 = _TestNode(123, filename="world.mojom", lineno=123)
+ self.assertEquals(node4, node3)
+ node5 = _TestNode(456)
+ self.assertNotEquals(node5, node4)
+
+ def testNodeListBase(self):
+ node1 = _TestNode(1, filename="foo.mojom", lineno=1)
+ # Equal to, but not the same as, |node1|:
+ node1b = _TestNode(1, filename="foo.mojom", lineno=1)
+ node2 = _TestNode(2, filename="foo.mojom", lineno=2)
+
+ nodelist1 = _TestNodeList() # Contains: (empty).
+ self.assertEquals(nodelist1, nodelist1)
+ self.assertEquals(nodelist1.items, [])
+ self.assertIsNone(nodelist1.filename)
+ self.assertIsNone(nodelist1.lineno)
+
+ nodelist2 = _TestNodeList(node1) # Contains: 1.
+ self.assertEquals(nodelist2, nodelist2)
+ self.assertEquals(nodelist2.items, [node1])
+ self.assertNotEqual(nodelist2, nodelist1)
+ self.assertEquals(nodelist2.filename, "foo.mojom")
+ self.assertEquals(nodelist2.lineno, 1)
+
+ nodelist3 = _TestNodeList([node2]) # Contains: 2.
+ self.assertEquals(nodelist3.items, [node2])
+ self.assertNotEqual(nodelist3, nodelist1)
+ self.assertNotEqual(nodelist3, nodelist2)
+ self.assertEquals(nodelist3.filename, "foo.mojom")
+ self.assertEquals(nodelist3.lineno, 2)
+
+ nodelist1.Append(node1b) # Contains: 1.
+ self.assertEquals(nodelist1.items, [node1])
+ self.assertEquals(nodelist1, nodelist2)
+ self.assertNotEqual(nodelist1, nodelist3)
+ self.assertEquals(nodelist1.filename, "foo.mojom")
+ self.assertEquals(nodelist1.lineno, 1)
+
+ nodelist1.Append(node2) # Contains: 1, 2.
+ self.assertEquals(nodelist1.items, [node1, node2])
+ self.assertNotEqual(nodelist1, nodelist2)
+ self.assertNotEqual(nodelist1, nodelist3)
+ self.assertEquals(nodelist1.lineno, 1)
+
+ nodelist2.Append(node2) # Contains: 1, 2.
+ self.assertEquals(nodelist2.items, [node1, node2])
+ self.assertEquals(nodelist2, nodelist1)
+ self.assertNotEqual(nodelist2, nodelist3)
+ self.assertEquals(nodelist2.lineno, 1)
+
+ nodelist3.Insert(node1) # Contains: 1, 2.
+ self.assertEquals(nodelist3.items, [node1, node2])
+ self.assertEquals(nodelist3, nodelist1)
+ self.assertEquals(nodelist3, nodelist2)
+ self.assertEquals(nodelist3.lineno, 1)
+
+ # Test iteration:
+ i = 1
+ for item in nodelist1:
+ self.assertEquals(item.value, i)
+ i += 1
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features.py
new file mode 100644
index 00000000..9687edbf
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features.py
@@ -0,0 +1,83 @@
+# Copyright 2018 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Helpers for processing conditionally enabled features in a mojom."""
+
+from mojom.error import Error
+from mojom.parse import ast
+
+
+class EnableIfError(Error):
+ """ Class for errors from ."""
+
+ def __init__(self, filename, message, lineno=None):
+ Error.__init__(self, filename, message, lineno=lineno, addenda=None)
+
+
+def _IsEnabled(definition, enabled_features):
+ """Returns true if a definition is enabled.
+
+ A definition is enabled if it has no EnableIf/EnableIfNot attribute.
+ It is retained if it has an EnableIf attribute and the attribute is in
+ enabled_features. It is retained if it has an EnableIfNot attribute and the
+ attribute is not in enabled features.
+ """
+ if not hasattr(definition, "attribute_list"):
+ return True
+ if not definition.attribute_list:
+ return True
+
+ already_defined = False
+ for a in definition.attribute_list:
+ if a.key == 'EnableIf' or a.key == 'EnableIfNot':
+ if already_defined:
+ raise EnableIfError(
+ definition.filename,
+ "EnableIf/EnableIfNot attribute may only be set once per field.",
+ definition.lineno)
+ already_defined = True
+
+ for attribute in definition.attribute_list:
+ if attribute.key == 'EnableIf' and attribute.value not in enabled_features:
+ return False
+ if attribute.key == 'EnableIfNot' and attribute.value in enabled_features:
+ return False
+ return True
+
+
+def _FilterDisabledFromNodeList(node_list, enabled_features):
+ if not node_list:
+ return
+ assert isinstance(node_list, ast.NodeListBase)
+ node_list.items = [
+ item for item in node_list.items if _IsEnabled(item, enabled_features)
+ ]
+ for item in node_list.items:
+ _FilterDefinition(item, enabled_features)
+
+
+def _FilterDefinition(definition, enabled_features):
+ """Filters definitions with a body."""
+ if isinstance(definition, ast.Enum):
+ _FilterDisabledFromNodeList(definition.enum_value_list, enabled_features)
+ elif isinstance(definition, ast.Method):
+ _FilterDisabledFromNodeList(definition.parameter_list, enabled_features)
+ _FilterDisabledFromNodeList(definition.response_parameter_list,
+ enabled_features)
+ elif isinstance(definition,
+ (ast.Interface, ast.Struct, ast.Union, ast.Feature)):
+ _FilterDisabledFromNodeList(definition.body, enabled_features)
+
+
+def RemoveDisabledDefinitions(mojom, enabled_features):
+ """Removes conditionally disabled definitions from a Mojom node."""
+ mojom.import_list = ast.ImportList([
+ imported_file for imported_file in mojom.import_list
+ if _IsEnabled(imported_file, enabled_features)
+ ])
+ mojom.definition_list = [
+ definition for definition in mojom.definition_list
+ if _IsEnabled(definition, enabled_features)
+ ]
+ for definition in mojom.definition_list:
+ _FilterDefinition(definition, enabled_features)
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py
new file mode 100644
index 00000000..cca1764b
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py
@@ -0,0 +1,376 @@
+# Copyright 2018 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import importlib.util
+import os
+import sys
+import unittest
+
+def _GetDirAbove(dirname):
+ """Returns the directory "above" this file containing |dirname| (which must
+ also be "above" this file)."""
+ path = os.path.abspath(__file__)
+ while True:
+ path, tail = os.path.split(path)
+ assert tail
+ if tail == dirname:
+ return path
+
+try:
+ importlib.util.find_spec("mojom")
+except ImportError:
+ sys.path.append(os.path.join(_GetDirAbove('pylib'), 'pylib'))
+import mojom.parse.ast as ast
+import mojom.parse.conditional_features as conditional_features
+import mojom.parse.parser as parser
+
+ENABLED_FEATURES = frozenset({'red', 'green', 'blue'})
+
+class ConditionalFeaturesTest(unittest.TestCase):
+ """Tests |mojom.parse.conditional_features|."""
+
+ def parseAndAssertEqual(self, source, expected_source):
+ definition = parser.Parse(source, "my_file.mojom")
+ conditional_features.RemoveDisabledDefinitions(definition, ENABLED_FEATURES)
+ expected = parser.Parse(expected_source, "my_file.mojom")
+ self.assertEquals(definition, expected)
+
+ def testFilterConst(self):
+ """Test that Consts are correctly filtered."""
+ const_source = """
+ [EnableIf=blue]
+ const int kMyConst1 = 1;
+ [EnableIf=orange]
+ const double kMyConst2 = 2;
+ const int kMyConst3 = 3;
+ """
+ expected_source = """
+ [EnableIf=blue]
+ const int kMyConst1 = 1;
+ const int kMyConst3 = 3;
+ """
+ self.parseAndAssertEqual(const_source, expected_source)
+
+ def testFilterIfNotConst(self):
+ """Test that Consts are correctly filtered."""
+ const_source = """
+ [EnableIfNot=blue]
+ const int kMyConst1 = 1;
+ [EnableIfNot=orange]
+ const double kMyConst2 = 2;
+ [EnableIf=blue]
+ const int kMyConst3 = 3;
+ [EnableIfNot=blue]
+ const int kMyConst4 = 4;
+ [EnableIfNot=purple]
+ const int kMyConst5 = 5;
+ """
+ expected_source = """
+ [EnableIfNot=orange]
+ const double kMyConst2 = 2;
+ [EnableIf=blue]
+ const int kMyConst3 = 3;
+ [EnableIfNot=purple]
+ const int kMyConst5 = 5;
+ """
+ self.parseAndAssertEqual(const_source, expected_source)
+
+ def testFilterIfNotMultipleConst(self):
+ """Test that Consts are correctly filtered."""
+ const_source = """
+ [EnableIfNot=blue]
+ const int kMyConst1 = 1;
+ [EnableIfNot=orange]
+ const double kMyConst2 = 2;
+ [EnableIfNot=orange]
+ const int kMyConst3 = 3;
+ """
+ expected_source = """
+ [EnableIfNot=orange]
+ const double kMyConst2 = 2;
+ [EnableIfNot=orange]
+ const int kMyConst3 = 3;
+ """
+ self.parseAndAssertEqual(const_source, expected_source)
+
+ def testFilterEnum(self):
+ """Test that EnumValues are correctly filtered from an Enum."""
+ enum_source = """
+ enum MyEnum {
+ [EnableIf=purple]
+ VALUE1,
+ [EnableIf=blue]
+ VALUE2,
+ VALUE3,
+ };
+ """
+ expected_source = """
+ enum MyEnum {
+ [EnableIf=blue]
+ VALUE2,
+ VALUE3
+ };
+ """
+ self.parseAndAssertEqual(enum_source, expected_source)
+
+ def testFilterImport(self):
+ """Test that imports are correctly filtered from a Mojom."""
+ import_source = """
+ [EnableIf=blue]
+ import "foo.mojom";
+ import "bar.mojom";
+ [EnableIf=purple]
+ import "baz.mojom";
+ """
+ expected_source = """
+ [EnableIf=blue]
+ import "foo.mojom";
+ import "bar.mojom";
+ """
+ self.parseAndAssertEqual(import_source, expected_source)
+
+ def testFilterIfNotImport(self):
+ """Test that imports are correctly filtered from a Mojom."""
+ import_source = """
+ [EnableIf=blue]
+ import "foo.mojom";
+ [EnableIfNot=purple]
+ import "bar.mojom";
+ [EnableIfNot=green]
+ import "baz.mojom";
+ """
+ expected_source = """
+ [EnableIf=blue]
+ import "foo.mojom";
+ [EnableIfNot=purple]
+ import "bar.mojom";
+ """
+ self.parseAndAssertEqual(import_source, expected_source)
+
+ def testFilterInterface(self):
+ """Test that definitions are correctly filtered from an Interface."""
+ interface_source = """
+ interface MyInterface {
+ [EnableIf=blue]
+ enum MyEnum {
+ [EnableIf=purple]
+ VALUE1,
+ VALUE2,
+ };
+ [EnableIf=blue]
+ const int32 kMyConst = 123;
+ [EnableIf=purple]
+ MyMethod();
+ };
+ """
+ expected_source = """
+ interface MyInterface {
+ [EnableIf=blue]
+ enum MyEnum {
+ VALUE2,
+ };
+ [EnableIf=blue]
+ const int32 kMyConst = 123;
+ };
+ """
+ self.parseAndAssertEqual(interface_source, expected_source)
+
+ def testFilterMethod(self):
+ """Test that Parameters are correctly filtered from a Method."""
+ method_source = """
+ interface MyInterface {
+ [EnableIf=blue]
+ MyMethod([EnableIf=purple] int32 a) => ([EnableIf=red] int32 b);
+ };
+ """
+ expected_source = """
+ interface MyInterface {
+ [EnableIf=blue]
+ MyMethod() => ([EnableIf=red] int32 b);
+ };
+ """
+ self.parseAndAssertEqual(method_source, expected_source)
+
+ def testFilterStruct(self):
+ """Test that definitions are correctly filtered from a Struct."""
+ struct_source = """
+ struct MyStruct {
+ [EnableIf=blue]
+ enum MyEnum {
+ VALUE1,
+ [EnableIf=purple]
+ VALUE2,
+ };
+ [EnableIf=yellow]
+ const double kMyConst = 1.23;
+ [EnableIf=green]
+ int32 a;
+ double b;
+ [EnableIf=purple]
+ int32 c;
+ [EnableIf=blue]
+ double d;
+ int32 e;
+ [EnableIf=orange]
+ double f;
+ };
+ """
+ expected_source = """
+ struct MyStruct {
+ [EnableIf=blue]
+ enum MyEnum {
+ VALUE1,
+ };
+ [EnableIf=green]
+ int32 a;
+ double b;
+ [EnableIf=blue]
+ double d;
+ int32 e;
+ };
+ """
+ self.parseAndAssertEqual(struct_source, expected_source)
+
+ def testFilterIfNotStruct(self):
+ """Test that definitions are correctly filtered from a Struct."""
+ struct_source = """
+ struct MyStruct {
+ [EnableIf=blue]
+ enum MyEnum {
+ VALUE1,
+ [EnableIfNot=red]
+ VALUE2,
+ };
+ [EnableIfNot=yellow]
+ const double kMyConst = 1.23;
+ [EnableIf=green]
+ int32 a;
+ double b;
+ [EnableIfNot=purple]
+ int32 c;
+ [EnableIf=blue]
+ double d;
+ int32 e;
+ [EnableIfNot=red]
+ double f;
+ };
+ """
+ expected_source = """
+ struct MyStruct {
+ [EnableIf=blue]
+ enum MyEnum {
+ VALUE1,
+ };
+ [EnableIfNot=yellow]
+ const double kMyConst = 1.23;
+ [EnableIf=green]
+ int32 a;
+ double b;
+ [EnableIfNot=purple]
+ int32 c;
+ [EnableIf=blue]
+ double d;
+ int32 e;
+ };
+ """
+ self.parseAndAssertEqual(struct_source, expected_source)
+
+ def testFilterUnion(self):
+ """Test that UnionFields are correctly filtered from a Union."""
+ union_source = """
+ union MyUnion {
+ [EnableIf=yellow]
+ int32 a;
+ [EnableIf=red]
+ bool b;
+ };
+ """
+ expected_source = """
+ union MyUnion {
+ [EnableIf=red]
+ bool b;
+ };
+ """
+ self.parseAndAssertEqual(union_source, expected_source)
+
+ def testSameNameFields(self):
+ mojom_source = """
+ enum Foo {
+ [EnableIf=red]
+ VALUE1 = 5,
+ [EnableIf=yellow]
+ VALUE1 = 6,
+ };
+ [EnableIf=red]
+ const double kMyConst = 1.23;
+ [EnableIf=yellow]
+ const double kMyConst = 4.56;
+ """
+ expected_source = """
+ enum Foo {
+ [EnableIf=red]
+ VALUE1 = 5,
+ };
+ [EnableIf=red]
+ const double kMyConst = 1.23;
+ """
+ self.parseAndAssertEqual(mojom_source, expected_source)
+
+ def testFeaturesWithEnableIf(self):
+ mojom_source = """
+ feature Foo {
+ const string name = "FooFeature";
+ [EnableIf=red]
+ const bool default_state = false;
+ [EnableIf=yellow]
+ const bool default_state = true;
+ };
+ """
+ expected_source = """
+ feature Foo {
+ const string name = "FooFeature";
+ [EnableIf=red]
+ const bool default_state = false;
+ };
+ """
+ self.parseAndAssertEqual(mojom_source, expected_source)
+
+ def testMultipleEnableIfs(self):
+ source = """
+ enum Foo {
+ [EnableIf=red,EnableIf=yellow]
+ kBarValue = 5,
+ };
+ """
+ definition = parser.Parse(source, "my_file.mojom")
+ self.assertRaises(conditional_features.EnableIfError,
+ conditional_features.RemoveDisabledDefinitions,
+ definition, ENABLED_FEATURES)
+
+ def testMultipleEnableIfs(self):
+ source = """
+ enum Foo {
+ [EnableIf=red,EnableIfNot=yellow]
+ kBarValue = 5,
+ };
+ """
+ definition = parser.Parse(source, "my_file.mojom")
+ self.assertRaises(conditional_features.EnableIfError,
+ conditional_features.RemoveDisabledDefinitions,
+ definition, ENABLED_FEATURES)
+
+ def testMultipleEnableIfs(self):
+ source = """
+ enum Foo {
+ [EnableIfNot=red,EnableIfNot=yellow]
+ kBarValue = 5,
+ };
+ """
+ definition = parser.Parse(source, "my_file.mojom")
+ self.assertRaises(conditional_features.EnableIfError,
+ conditional_features.RemoveDisabledDefinitions,
+ definition, ENABLED_FEATURES)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py
new file mode 100644
index 00000000..00136a8b
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py
@@ -0,0 +1,249 @@
+# Copyright 2014 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os.path
+import sys
+
+from mojom import fileutil
+from mojom.error import Error
+
+fileutil.AddLocalRepoThirdPartyDirToModulePath()
+from ply.lex import TOKEN
+
+
+class LexError(Error):
+ """Class for errors from the lexer."""
+
+ def __init__(self, filename, message, lineno):
+ Error.__init__(self, filename, message, lineno=lineno)
+
+
+# We have methods which look like they could be functions:
+# pylint: disable=R0201
+class Lexer:
+ def __init__(self, filename):
+ self.filename = filename
+
+ ######################-- PRIVATE --######################
+
+ ##
+ ## Internal auxiliary methods
+ ##
+ def _error(self, msg, token):
+ raise LexError(self.filename, msg, token.lineno)
+
+ ##
+ ## Reserved keywords
+ ##
+ keywords = (
+ 'HANDLE',
+ 'IMPORT',
+ 'MODULE',
+ 'STRUCT',
+ 'UNION',
+ 'INTERFACE',
+ 'ENUM',
+ 'CONST',
+ 'TRUE',
+ 'FALSE',
+ 'DEFAULT',
+ 'ARRAY',
+ 'MAP',
+ 'ASSOCIATED',
+ 'PENDING_REMOTE',
+ 'PENDING_RECEIVER',
+ 'PENDING_ASSOCIATED_REMOTE',
+ 'PENDING_ASSOCIATED_RECEIVER',
+ 'FEATURE',
+ )
+
+ keyword_map = {}
+ for keyword in keywords:
+ keyword_map[keyword.lower()] = keyword
+
+ ##
+ ## All the tokens recognized by the lexer
+ ##
+ tokens = keywords + (
+ # Identifiers
+ 'NAME',
+
+ # Constants
+ 'ORDINAL',
+ 'INT_CONST_DEC',
+ 'INT_CONST_HEX',
+ 'FLOAT_CONST',
+
+ # String literals
+ 'STRING_LITERAL',
+
+ # Operators
+ 'MINUS',
+ 'PLUS',
+ 'QSTN',
+
+ # Assignment
+ 'EQUALS',
+
+ # Request / response
+ 'RESPONSE',
+
+ # Delimiters
+ 'LPAREN',
+ 'RPAREN', # ( )
+ 'LBRACKET',
+ 'RBRACKET', # [ ]
+ 'LBRACE',
+ 'RBRACE', # { }
+ 'LANGLE',
+ 'RANGLE', # < >
+ 'SEMI', # ;
+ 'COMMA',
+ 'DOT' # , .
+ )
+
+ ##
+ ## Regexes for use in tokens
+ ##
+
+ # valid C identifiers (K&R2: A.2.3)
+ identifier = r'[a-zA-Z_][0-9a-zA-Z_]*'
+
+ hex_prefix = '0[xX]'
+ hex_digits = '[0-9a-fA-F]+'
+
+ # integer constants (K&R2: A.2.5.1)
+ decimal_constant = '0|([1-9][0-9]*)'
+ hex_constant = hex_prefix + hex_digits
+ # Don't allow octal constants (even invalid octal).
+ octal_constant_disallowed = '0[0-9]+'
+
+ # character constants (K&R2: A.2.5.2)
+ # Note: a-zA-Z and '.-~^_!=&;,' are allowed as escape chars to support #line
+ # directives with Windows paths as filenames (..\..\dir\file)
+ # For the same reason, decimal_escape allows all digit sequences. We want to
+ # parse all correct code, even if it means to sometimes parse incorrect
+ # code.
+ #
+ simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])"""
+ decimal_escape = r"""(\d+)"""
+ hex_escape = r"""(x[0-9a-fA-F]+)"""
+ bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-7])"""
+
+ escape_sequence = \
+ r"""(\\("""+simple_escape+'|'+decimal_escape+'|'+hex_escape+'))'
+
+ # string literals (K&R2: A.2.6)
+ string_char = r"""([^"\\\n]|""" + escape_sequence + ')'
+ string_literal = '"' + string_char + '*"'
+ bad_string_literal = '"' + string_char + '*' + bad_escape + string_char + '*"'
+
+ # floating constants (K&R2: A.2.5.3)
+ exponent_part = r"""([eE][-+]?[0-9]+)"""
+ fractional_constant = r"""([0-9]*\.[0-9]+)|([0-9]+\.)"""
+ floating_constant = \
+ '(((('+fractional_constant+')'+ \
+ exponent_part+'?)|([0-9]+'+exponent_part+')))'
+
+ # Ordinals
+ ordinal = r'@[0-9]+'
+ missing_ordinal_value = r'@'
+ # Don't allow ordinal values in octal (even invalid octal, like 09) or
+ # hexadecimal.
+ octal_or_hex_ordinal_disallowed = (
+ r'@((0[0-9]+)|(' + hex_prefix + hex_digits + '))')
+
+ ##
+ ## Rules for the normal state
+ ##
+ t_ignore = ' \t\r'
+
+ # Newlines
+ def t_NEWLINE(self, t):
+ r'\n+'
+ t.lexer.lineno += len(t.value)
+
+ # Operators
+ t_MINUS = r'-'
+ t_PLUS = r'\+'
+ t_QSTN = r'\?'
+
+ # =
+ t_EQUALS = r'='
+
+ # =>
+ t_RESPONSE = r'=>'
+
+ # Delimiters
+ t_LPAREN = r'\('
+ t_RPAREN = r'\)'
+ t_LBRACKET = r'\['
+ t_RBRACKET = r'\]'
+ t_LBRACE = r'\{'
+ t_RBRACE = r'\}'
+ t_LANGLE = r'<'
+ t_RANGLE = r'>'
+ t_COMMA = r','
+ t_DOT = r'\.'
+ t_SEMI = r';'
+
+ t_STRING_LITERAL = string_literal
+
+ # The following floating and integer constants are defined as
+ # functions to impose a strict order (otherwise, decimal
+ # is placed before the others because its regex is longer,
+ # and this is bad)
+ #
+ @TOKEN(floating_constant)
+ def t_FLOAT_CONST(self, t):
+ return t
+
+ @TOKEN(hex_constant)
+ def t_INT_CONST_HEX(self, t):
+ return t
+
+ @TOKEN(octal_constant_disallowed)
+ def t_OCTAL_CONSTANT_DISALLOWED(self, t):
+ msg = "Octal values not allowed"
+ self._error(msg, t)
+
+ @TOKEN(decimal_constant)
+ def t_INT_CONST_DEC(self, t):
+ return t
+
+ # unmatched string literals are caught by the preprocessor
+
+ @TOKEN(bad_string_literal)
+ def t_BAD_STRING_LITERAL(self, t):
+ msg = "String contains invalid escape code"
+ self._error(msg, t)
+
+ # Handle ordinal-related tokens in the right order:
+ @TOKEN(octal_or_hex_ordinal_disallowed)
+ def t_OCTAL_OR_HEX_ORDINAL_DISALLOWED(self, t):
+ msg = "Octal and hexadecimal ordinal values not allowed"
+ self._error(msg, t)
+
+ @TOKEN(ordinal)
+ def t_ORDINAL(self, t):
+ return t
+
+ @TOKEN(missing_ordinal_value)
+ def t_BAD_ORDINAL(self, t):
+ msg = "Missing ordinal value"
+ self._error(msg, t)
+
+ @TOKEN(identifier)
+ def t_NAME(self, t):
+ t.type = self.keyword_map.get(t.value, "NAME")
+ return t
+
+ # Ignore C and C++ style comments
+ def t_COMMENT(self, t):
+ r'(/\*(.|\n)*?\*/)|(//.*(\n[ \t]*//.*)*)'
+ t.lexer.lineno += t.value.count("\n")
+
+ def t_error(self, t):
+ msg = "Illegal character %s" % repr(t.value[0])
+ self._error(msg, t)
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py
new file mode 100644
index 00000000..bc9f8354
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py
@@ -0,0 +1,194 @@
+# Copyright 2014 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import importlib.util
+import os.path
+import sys
+import unittest
+
+def _GetDirAbove(dirname):
+ """Returns the directory "above" this file containing |dirname| (which must
+ also be "above" this file)."""
+ path = os.path.abspath(__file__)
+ while True:
+ path, tail = os.path.split(path)
+ assert tail
+ if tail == dirname:
+ return path
+
+sys.path.insert(1, os.path.join(_GetDirAbove("mojo"), "third_party"))
+from ply import lex
+
+try:
+ importlib.util.find_spec("mojom")
+except ImportError:
+ sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib"))
+import mojom.parse.lexer
+
+# This (monkey-patching LexToken to make comparison value-based) is evil, but
+# we'll do it anyway. (I'm pretty sure ply's lexer never cares about comparing
+# for object identity.)
+def _LexTokenEq(self, other):
+ return self.type == other.type and self.value == other.value and \
+ self.lineno == other.lineno and self.lexpos == other.lexpos
+
+
+setattr(lex.LexToken, '__eq__', _LexTokenEq)
+
+
+def _MakeLexToken(token_type, value, lineno=1, lexpos=0):
+ """Makes a LexToken with the given parameters. (Note that lineno is 1-based,
+ but lexpos is 0-based.)"""
+ rv = lex.LexToken()
+ rv.type, rv.value, rv.lineno, rv.lexpos = token_type, value, lineno, lexpos
+ return rv
+
+
+def _MakeLexTokenForKeyword(keyword, **kwargs):
+ """Makes a LexToken for the given keyword."""
+ return _MakeLexToken(keyword.upper(), keyword.lower(), **kwargs)
+
+
+class LexerTest(unittest.TestCase):
+ """Tests |mojom.parse.lexer.Lexer|."""
+
+ def __init__(self, *args, **kwargs):
+ unittest.TestCase.__init__(self, *args, **kwargs)
+ # Clone all lexer instances from this one, since making a lexer is slow.
+ self._zygote_lexer = lex.lex(mojom.parse.lexer.Lexer("my_file.mojom"))
+
+ def testValidKeywords(self):
+ """Tests valid keywords."""
+ self.assertEquals(
+ self._SingleTokenForInput("handle"), _MakeLexTokenForKeyword("handle"))
+ self.assertEquals(
+ self._SingleTokenForInput("import"), _MakeLexTokenForKeyword("import"))
+ self.assertEquals(
+ self._SingleTokenForInput("module"), _MakeLexTokenForKeyword("module"))
+ self.assertEquals(
+ self._SingleTokenForInput("struct"), _MakeLexTokenForKeyword("struct"))
+ self.assertEquals(
+ self._SingleTokenForInput("union"), _MakeLexTokenForKeyword("union"))
+ self.assertEquals(
+ self._SingleTokenForInput("interface"),
+ _MakeLexTokenForKeyword("interface"))
+ self.assertEquals(
+ self._SingleTokenForInput("enum"), _MakeLexTokenForKeyword("enum"))
+ self.assertEquals(
+ self._SingleTokenForInput("const"), _MakeLexTokenForKeyword("const"))
+ self.assertEquals(
+ self._SingleTokenForInput("true"), _MakeLexTokenForKeyword("true"))
+ self.assertEquals(
+ self._SingleTokenForInput("false"), _MakeLexTokenForKeyword("false"))
+ self.assertEquals(
+ self._SingleTokenForInput("default"),
+ _MakeLexTokenForKeyword("default"))
+ self.assertEquals(
+ self._SingleTokenForInput("array"), _MakeLexTokenForKeyword("array"))
+ self.assertEquals(
+ self._SingleTokenForInput("map"), _MakeLexTokenForKeyword("map"))
+ self.assertEquals(
+ self._SingleTokenForInput("associated"),
+ _MakeLexTokenForKeyword("associated"))
+
+ def testValidIdentifiers(self):
+ """Tests identifiers."""
+ self.assertEquals(
+ self._SingleTokenForInput("abcd"), _MakeLexToken("NAME", "abcd"))
+ self.assertEquals(
+ self._SingleTokenForInput("AbC_d012_"),
+ _MakeLexToken("NAME", "AbC_d012_"))
+ self.assertEquals(
+ self._SingleTokenForInput("_0123"), _MakeLexToken("NAME", "_0123"))
+
+ def testInvalidIdentifiers(self):
+ with self.assertRaisesRegexp(
+ mojom.parse.lexer.LexError,
+ r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
+ self._TokensForInput("$abc")
+ with self.assertRaisesRegexp(
+ mojom.parse.lexer.LexError,
+ r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
+ self._TokensForInput("a$bc")
+
+ def testDecimalIntegerConstants(self):
+ self.assertEquals(
+ self._SingleTokenForInput("0"), _MakeLexToken("INT_CONST_DEC", "0"))
+ self.assertEquals(
+ self._SingleTokenForInput("1"), _MakeLexToken("INT_CONST_DEC", "1"))
+ self.assertEquals(
+ self._SingleTokenForInput("123"), _MakeLexToken("INT_CONST_DEC", "123"))
+ self.assertEquals(
+ self._SingleTokenForInput("10"), _MakeLexToken("INT_CONST_DEC", "10"))
+
+ def testValidTokens(self):
+ """Tests valid tokens (which aren't tested elsewhere)."""
+ # Keywords tested in |testValidKeywords|.
+ # NAME tested in |testValidIdentifiers|.
+ self.assertEquals(
+ self._SingleTokenForInput("@123"), _MakeLexToken("ORDINAL", "@123"))
+ self.assertEquals(
+ self._SingleTokenForInput("456"), _MakeLexToken("INT_CONST_DEC", "456"))
+ self.assertEquals(
+ self._SingleTokenForInput("0x01aB2eF3"),
+ _MakeLexToken("INT_CONST_HEX", "0x01aB2eF3"))
+ self.assertEquals(
+ self._SingleTokenForInput("123.456"),
+ _MakeLexToken("FLOAT_CONST", "123.456"))
+ self.assertEquals(
+ self._SingleTokenForInput("\"hello\""),
+ _MakeLexToken("STRING_LITERAL", "\"hello\""))
+ self.assertEquals(
+ self._SingleTokenForInput("+"), _MakeLexToken("PLUS", "+"))
+ self.assertEquals(
+ self._SingleTokenForInput("-"), _MakeLexToken("MINUS", "-"))
+ self.assertEquals(
+ self._SingleTokenForInput("?"), _MakeLexToken("QSTN", "?"))
+ self.assertEquals(
+ self._SingleTokenForInput("="), _MakeLexToken("EQUALS", "="))
+ self.assertEquals(
+ self._SingleTokenForInput("=>"), _MakeLexToken("RESPONSE", "=>"))
+ self.assertEquals(
+ self._SingleTokenForInput("("), _MakeLexToken("LPAREN", "("))
+ self.assertEquals(
+ self._SingleTokenForInput(")"), _MakeLexToken("RPAREN", ")"))
+ self.assertEquals(
+ self._SingleTokenForInput("["), _MakeLexToken("LBRACKET", "["))
+ self.assertEquals(
+ self._SingleTokenForInput("]"), _MakeLexToken("RBRACKET", "]"))
+ self.assertEquals(
+ self._SingleTokenForInput("{"), _MakeLexToken("LBRACE", "{"))
+ self.assertEquals(
+ self._SingleTokenForInput("}"), _MakeLexToken("RBRACE", "}"))
+ self.assertEquals(
+ self._SingleTokenForInput("<"), _MakeLexToken("LANGLE", "<"))
+ self.assertEquals(
+ self._SingleTokenForInput(">"), _MakeLexToken("RANGLE", ">"))
+ self.assertEquals(
+ self._SingleTokenForInput(";"), _MakeLexToken("SEMI", ";"))
+ self.assertEquals(
+ self._SingleTokenForInput(","), _MakeLexToken("COMMA", ","))
+ self.assertEquals(self._SingleTokenForInput("."), _MakeLexToken("DOT", "."))
+
+ def _TokensForInput(self, input_string):
+ """Gets a list of tokens for the given input string."""
+ lexer = self._zygote_lexer.clone()
+ lexer.input(input_string)
+ rv = []
+ while True:
+ tok = lexer.token()
+ if not tok:
+ return rv
+ rv.append(tok)
+
+ def _SingleTokenForInput(self, input_string):
+ """Gets the single token for the given input string. (Raises an exception if
+ the input string does not result in exactly one token.)"""
+ toks = self._TokensForInput(input_string)
+ assert len(toks) == 1
+ return toks[0]
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/parser.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/parser.py
new file mode 100644
index 00000000..1dffd98b
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/parser.py
@@ -0,0 +1,510 @@
+# Copyright 2014 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Generates a syntax tree from a Mojo IDL file."""
+
+# Breaking parser stanzas is unhelpful so allow longer lines.
+# pylint: disable=line-too-long
+
+import os.path
+import sys
+
+from mojom import fileutil
+from mojom.error import Error
+from mojom.parse import ast
+from mojom.parse.lexer import Lexer
+
+fileutil.AddLocalRepoThirdPartyDirToModulePath()
+from ply import lex
+from ply import yacc
+
+_MAX_ORDINAL_VALUE = 0xffffffff
+_MAX_ARRAY_SIZE = 0xffffffff
+
+
+class ParseError(Error):
+ """Class for errors from the parser."""
+
+ def __init__(self, filename, message, lineno=None, snippet=None):
+ Error.__init__(
+ self,
+ filename,
+ message,
+ lineno=lineno,
+ addenda=([snippet] if snippet else None))
+
+
+# We have methods which look like they could be functions:
+# pylint: disable=R0201
+class Parser:
+ def __init__(self, lexer, source, filename):
+ self.tokens = lexer.tokens
+ self.source = source
+ self.filename = filename
+
+ # Names of functions
+ #
+ # In general, we name functions after the left-hand-side of the rule(s) that
+ # they handle. E.g., |p_foo_bar| for a rule |foo_bar : ...|.
+ #
+ # There may be multiple functions handling rules for the same left-hand-side;
+ # then we name the functions |p_foo_bar_N| (for left-hand-side |foo_bar|),
+ # where N is a number (numbered starting from 1). Note that using multiple
+ # functions is actually more efficient than having single functions handle
+ # multiple rules (and, e.g., distinguishing them by examining |len(p)|).
+ #
+ # It's also possible to have a function handling multiple rules with different
+ # left-hand-sides. We do not do this.
+ #
+ # See http://www.dabeaz.com/ply/ply.html#ply_nn25 for more details.
+
+ # TODO(vtl): Get rid of the braces in the module "statement". (Consider
+ # renaming "module" -> "package".) Then we'll be able to have a single rule
+ # for root (by making module "optional").
+ def p_root_1(self, p):
+ """root : """
+ p[0] = ast.Mojom(None, ast.ImportList(), [])
+
+ def p_root_2(self, p):
+ """root : root module"""
+ if p[1].module is not None:
+ raise ParseError(
+ self.filename,
+ "Multiple \"module\" statements not allowed:",
+ p[2].lineno,
+ snippet=self._GetSnippet(p[2].lineno))
+ if p[1].import_list.items or p[1].definition_list:
+ raise ParseError(
+ self.filename,
+ "\"module\" statements must precede imports and definitions:",
+ p[2].lineno,
+ snippet=self._GetSnippet(p[2].lineno))
+ p[0] = p[1]
+ p[0].module = p[2]
+
+ def p_root_3(self, p):
+ """root : root import"""
+ if p[1].definition_list:
+ raise ParseError(
+ self.filename,
+ "\"import\" statements must precede definitions:",
+ p[2].lineno,
+ snippet=self._GetSnippet(p[2].lineno))
+ p[0] = p[1]
+ p[0].import_list.Append(p[2])
+
+ def p_root_4(self, p):
+ """root : root definition"""
+ p[0] = p[1]
+ p[0].definition_list.append(p[2])
+
+ def p_import(self, p):
+ """import : attribute_section IMPORT STRING_LITERAL SEMI"""
+ # 'eval' the literal to strip the quotes.
+ # TODO(vtl): This eval is dubious. We should unquote/unescape ourselves.
+ p[0] = ast.Import(
+ p[1], eval(p[3]), filename=self.filename, lineno=p.lineno(2))
+
+ def p_module(self, p):
+ """module : attribute_section MODULE identifier_wrapped SEMI"""
+ p[0] = ast.Module(p[3], p[1], filename=self.filename, lineno=p.lineno(2))
+
+ def p_definition(self, p):
+ """definition : struct
+ | union
+ | interface
+ | enum
+ | const
+ | feature"""
+ p[0] = p[1]
+
+ def p_attribute_section_1(self, p):
+ """attribute_section : """
+ p[0] = None
+
+ def p_attribute_section_2(self, p):
+ """attribute_section : LBRACKET attribute_list RBRACKET"""
+ p[0] = p[2]
+
+ def p_attribute_list_1(self, p):
+ """attribute_list : """
+ p[0] = ast.AttributeList()
+
+ def p_attribute_list_2(self, p):
+ """attribute_list : nonempty_attribute_list"""
+ p[0] = p[1]
+
+ def p_nonempty_attribute_list_1(self, p):
+ """nonempty_attribute_list : attribute"""
+ p[0] = ast.AttributeList(p[1])
+
+ def p_nonempty_attribute_list_2(self, p):
+ """nonempty_attribute_list : nonempty_attribute_list COMMA attribute"""
+ p[0] = p[1]
+ p[0].Append(p[3])
+
+ def p_attribute_1(self, p):
+ """attribute : name_wrapped EQUALS identifier_wrapped"""
+ p[0] = ast.Attribute(p[1],
+ p[3][1],
+ filename=self.filename,
+ lineno=p.lineno(1))
+
+ def p_attribute_2(self, p):
+ """attribute : name_wrapped EQUALS evaled_literal
+ | name_wrapped EQUALS name_wrapped"""
+ p[0] = ast.Attribute(p[1], p[3], filename=self.filename, lineno=p.lineno(1))
+
+ def p_attribute_3(self, p):
+ """attribute : name_wrapped"""
+ p[0] = ast.Attribute(p[1], True, filename=self.filename, lineno=p.lineno(1))
+
+ def p_evaled_literal(self, p):
+ """evaled_literal : literal"""
+ # 'eval' the literal to strip the quotes. Handle keywords "true" and "false"
+ # specially since they cannot directly be evaluated to python boolean
+ # values.
+ if p[1] == "true":
+ p[0] = True
+ elif p[1] == "false":
+ p[0] = False
+ else:
+ p[0] = eval(p[1])
+
+ def p_struct_1(self, p):
+ """struct : attribute_section STRUCT name_wrapped LBRACE struct_body RBRACE SEMI"""
+ p[0] = ast.Struct(p[3], p[1], p[5])
+
+ def p_struct_2(self, p):
+ """struct : attribute_section STRUCT name_wrapped SEMI"""
+ p[0] = ast.Struct(p[3], p[1], None)
+
+ def p_struct_body_1(self, p):
+ """struct_body : """
+ p[0] = ast.StructBody()
+
+ def p_struct_body_2(self, p):
+ """struct_body : struct_body const
+ | struct_body enum
+ | struct_body struct_field"""
+ p[0] = p[1]
+ p[0].Append(p[2])
+
+ def p_struct_field(self, p):
+ """struct_field : attribute_section typename name_wrapped ordinal default SEMI"""
+ p[0] = ast.StructField(p[3], p[1], p[4], p[2], p[5])
+
+ def p_feature(self, p):
+ """feature : attribute_section FEATURE NAME LBRACE feature_body RBRACE SEMI"""
+ p[0] = ast.Feature(p[3], p[1], p[5])
+
+ def p_feature_body_1(self, p):
+ """feature_body : """
+ p[0] = ast.FeatureBody()
+
+ def p_feature_body_2(self, p):
+ """feature_body : feature_body const"""
+ p[0] = p[1]
+ p[0].Append(p[2])
+
+ def p_union(self, p):
+ """union : attribute_section UNION name_wrapped LBRACE union_body RBRACE SEMI"""
+ p[0] = ast.Union(p[3], p[1], p[5])
+
+ def p_union_body_1(self, p):
+ """union_body : """
+ p[0] = ast.UnionBody()
+
+ def p_union_body_2(self, p):
+ """union_body : union_body union_field"""
+ p[0] = p[1]
+ p[1].Append(p[2])
+
+ def p_union_field(self, p):
+ """union_field : attribute_section typename name_wrapped ordinal SEMI"""
+ p[0] = ast.UnionField(p[3], p[1], p[4], p[2])
+
+ def p_default_1(self, p):
+ """default : """
+ p[0] = None
+
+ def p_default_2(self, p):
+ """default : EQUALS constant"""
+ p[0] = p[2]
+
+ def p_interface(self, p):
+ """interface : attribute_section INTERFACE name_wrapped LBRACE interface_body RBRACE SEMI"""
+ p[0] = ast.Interface(p[3], p[1], p[5])
+
+ def p_interface_body_1(self, p):
+ """interface_body : """
+ p[0] = ast.InterfaceBody()
+
+ def p_interface_body_2(self, p):
+ """interface_body : interface_body const
+ | interface_body enum
+ | interface_body method"""
+ p[0] = p[1]
+ p[0].Append(p[2])
+
+ def p_response_1(self, p):
+ """response : """
+ p[0] = None
+
+ def p_response_2(self, p):
+ """response : RESPONSE LPAREN parameter_list RPAREN"""
+ p[0] = p[3]
+
+ def p_method(self, p):
+ """method : attribute_section name_wrapped ordinal LPAREN parameter_list RPAREN response SEMI"""
+ p[0] = ast.Method(p[2], p[1], p[3], p[5], p[7])
+
+ def p_parameter_list_1(self, p):
+ """parameter_list : """
+ p[0] = ast.ParameterList()
+
+ def p_parameter_list_2(self, p):
+ """parameter_list : nonempty_parameter_list"""
+ p[0] = p[1]
+
+ def p_nonempty_parameter_list_1(self, p):
+ """nonempty_parameter_list : parameter"""
+ p[0] = ast.ParameterList(p[1])
+
+ def p_nonempty_parameter_list_2(self, p):
+ """nonempty_parameter_list : nonempty_parameter_list COMMA parameter"""
+ p[0] = p[1]
+ p[0].Append(p[3])
+
+ def p_parameter(self, p):
+ """parameter : attribute_section typename name_wrapped ordinal"""
+ p[0] = ast.Parameter(
+ p[3], p[1], p[4], p[2], filename=self.filename, lineno=p.lineno(3))
+
+ def p_typename(self, p):
+ """typename : nonnullable_typename QSTN
+ | nonnullable_typename"""
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ p[0] = p[1] + "?"
+
+ def p_nonnullable_typename(self, p):
+ """nonnullable_typename : basictypename
+ | array
+ | fixed_array
+ | associative_array"""
+ p[0] = p[1]
+
+ def p_basictypename(self, p):
+ """basictypename : remotetype
+ | receivertype
+ | associatedremotetype
+ | associatedreceivertype
+ | identifier
+ | ASSOCIATED identifier
+ | handletype"""
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ p[0] = "asso<" + p[2] + ">"
+
+ def p_remotetype(self, p):
+ """remotetype : PENDING_REMOTE LANGLE identifier RANGLE"""
+ p[0] = "rmt<%s>" % p[3]
+
+ def p_receivertype(self, p):
+ """receivertype : PENDING_RECEIVER LANGLE identifier RANGLE"""
+ p[0] = "rcv<%s>" % p[3]
+
+ def p_associatedremotetype(self, p):
+ """associatedremotetype : PENDING_ASSOCIATED_REMOTE LANGLE identifier RANGLE"""
+ p[0] = "rma<%s>" % p[3]
+
+ def p_associatedreceivertype(self, p):
+ """associatedreceivertype : PENDING_ASSOCIATED_RECEIVER LANGLE identifier RANGLE"""
+ p[0] = "rca<%s>" % p[3]
+
+ def p_handletype(self, p):
+ """handletype : HANDLE
+ | HANDLE LANGLE name_wrapped RANGLE"""
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ if p[3] not in ('data_pipe_consumer', 'data_pipe_producer',
+ 'message_pipe', 'shared_buffer', 'platform'):
+ # Note: We don't enable tracking of line numbers for everything, so we
+ # can't use |p.lineno(3)|.
+ raise ParseError(
+ self.filename,
+ "Invalid handle type %r:" % p[3],
+ lineno=p.lineno(1),
+ snippet=self._GetSnippet(p.lineno(1)))
+ p[0] = "handle<" + p[3] + ">"
+
+ def p_array(self, p):
+ """array : ARRAY LANGLE typename RANGLE"""
+ p[0] = p[3] + "[]"
+
+ def p_fixed_array(self, p):
+ """fixed_array : ARRAY LANGLE typename COMMA INT_CONST_DEC RANGLE"""
+ value = int(p[5])
+ if value == 0 or value > _MAX_ARRAY_SIZE:
+ raise ParseError(
+ self.filename,
+ "Fixed array size %d invalid:" % value,
+ lineno=p.lineno(5),
+ snippet=self._GetSnippet(p.lineno(5)))
+ p[0] = p[3] + "[" + p[5] + "]"
+
+ def p_associative_array(self, p):
+ """associative_array : MAP LANGLE identifier COMMA typename RANGLE"""
+ p[0] = p[5] + "{" + p[3] + "}"
+
+ def p_ordinal_1(self, p):
+ """ordinal : """
+ p[0] = None
+
+ def p_ordinal_2(self, p):
+ """ordinal : ORDINAL"""
+ value = int(p[1][1:])
+ if value > _MAX_ORDINAL_VALUE:
+ raise ParseError(
+ self.filename,
+ "Ordinal value %d too large:" % value,
+ lineno=p.lineno(1),
+ snippet=self._GetSnippet(p.lineno(1)))
+ p[0] = ast.Ordinal(value, filename=self.filename, lineno=p.lineno(1))
+
+ def p_enum_1(self, p):
+ """enum : attribute_section ENUM name_wrapped LBRACE enum_value_list RBRACE SEMI
+ | attribute_section ENUM name_wrapped LBRACE \
+ nonempty_enum_value_list COMMA RBRACE SEMI"""
+ p[0] = ast.Enum(
+ p[3], p[1], p[5], filename=self.filename, lineno=p.lineno(2))
+
+ def p_enum_2(self, p):
+ """enum : attribute_section ENUM name_wrapped SEMI"""
+ p[0] = ast.Enum(
+ p[3], p[1], None, filename=self.filename, lineno=p.lineno(2))
+
+ def p_enum_value_list_1(self, p):
+ """enum_value_list : """
+ p[0] = ast.EnumValueList()
+
+ def p_enum_value_list_2(self, p):
+ """enum_value_list : nonempty_enum_value_list"""
+ p[0] = p[1]
+
+ def p_nonempty_enum_value_list_1(self, p):
+ """nonempty_enum_value_list : enum_value"""
+ p[0] = ast.EnumValueList(p[1])
+
+ def p_nonempty_enum_value_list_2(self, p):
+ """nonempty_enum_value_list : nonempty_enum_value_list COMMA enum_value"""
+ p[0] = p[1]
+ p[0].Append(p[3])
+
+ def p_enum_value(self, p):
+ """enum_value : attribute_section name_wrapped
+ | attribute_section name_wrapped EQUALS int
+ | attribute_section name_wrapped EQUALS identifier_wrapped"""
+ p[0] = ast.EnumValue(
+ p[2],
+ p[1],
+ p[4] if len(p) == 5 else None,
+ filename=self.filename,
+ lineno=p.lineno(2))
+
+ def p_const(self, p):
+ """const : attribute_section CONST typename name_wrapped EQUALS constant SEMI"""
+ p[0] = ast.Const(p[4], p[1], p[3], p[6])
+
+ def p_constant(self, p):
+ """constant : literal
+ | identifier_wrapped"""
+ p[0] = p[1]
+
+ def p_identifier_wrapped(self, p):
+ """identifier_wrapped : identifier"""
+ p[0] = ('IDENTIFIER', p[1])
+
+ # TODO(vtl): Make this produce a "wrapped" identifier (probably as an
+ # |ast.Identifier|, to be added) and get rid of identifier_wrapped.
+ def p_identifier(self, p):
+ """identifier : name_wrapped
+ | name_wrapped DOT identifier"""
+ p[0] = ''.join(p[1:])
+
+ # Allow 'feature' to be a name literal not just a keyword.
+ def p_name_wrapped(self, p):
+ """name_wrapped : NAME
+ | FEATURE"""
+ p[0] = p[1]
+
+ def p_literal(self, p):
+ """literal : int
+ | float
+ | TRUE
+ | FALSE
+ | DEFAULT
+ | STRING_LITERAL"""
+ p[0] = p[1]
+
+ def p_int(self, p):
+ """int : int_const
+ | PLUS int_const
+ | MINUS int_const"""
+ p[0] = ''.join(p[1:])
+
+ def p_int_const(self, p):
+ """int_const : INT_CONST_DEC
+ | INT_CONST_HEX"""
+ p[0] = p[1]
+
+ def p_float(self, p):
+ """float : FLOAT_CONST
+ | PLUS FLOAT_CONST
+ | MINUS FLOAT_CONST"""
+ p[0] = ''.join(p[1:])
+
+ def p_error(self, e):
+ if e is None:
+ # Unexpected EOF.
+ # TODO(vtl): Can we figure out what's missing?
+ raise ParseError(self.filename, "Unexpected end of file")
+
+ if e.value == 'feature':
+ raise ParseError(self.filename,
+ "`feature` is reserved for a future mojom keyword",
+ lineno=e.lineno,
+ snippet=self._GetSnippet(e.lineno))
+
+ raise ParseError(
+ self.filename,
+ "Unexpected %r:" % e.value,
+ lineno=e.lineno,
+ snippet=self._GetSnippet(e.lineno))
+
+ def _GetSnippet(self, lineno):
+ return self.source.split('\n')[lineno - 1]
+
+
+def Parse(source, filename):
+ """Parse source file to AST.
+
+ Args:
+ source: The source text as a str (Python 2 or 3) or unicode (Python 2).
+ filename: The filename that |source| originates from.
+
+ Returns:
+ The AST as a mojom.parse.ast.Mojom object.
+ """
+ lexer = Lexer(filename)
+ parser = Parser(lexer, source, filename)
+
+ lex.lex(object=lexer)
+ yacc.yacc(module=parser, debug=0, write_tables=0)
+
+ tree = yacc.parse(source)
+ return tree
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/parser_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/parser_unittest.py
new file mode 100644
index 00000000..0a26307b
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/parser_unittest.py
@@ -0,0 +1,1375 @@
+# Copyright 2014 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from mojom.parse import ast
+from mojom.parse import lexer
+from mojom.parse import parser
+
+class ParserTest(unittest.TestCase):
+ """Tests |parser.Parse()|."""
+
+ def testTrivialValidSource(self):
+ """Tests a trivial, but valid, .mojom source."""
+
+ source = """\
+ // This is a comment.
+
+ module my_module;
+ """
+ expected = ast.Mojom(
+ ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), [])
+ self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
+
+ def testSourceWithCrLfs(self):
+ """Tests a .mojom source with CR-LFs instead of LFs."""
+
+ source = "// This is a comment.\r\n\r\nmodule my_module;\r\n"
+ expected = ast.Mojom(
+ ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), [])
+ self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
+
+ def testUnexpectedEOF(self):
+ """Tests a "truncated" .mojom source."""
+
+ source = """\
+ // This is a comment.
+
+ module my_module
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom: Error: Unexpected end of file$"):
+ parser.Parse(source, "my_file.mojom")
+
+ def testCommentLineNumbers(self):
+ """Tests that line numbers are correctly tracked when comments are
+ present."""
+
+ source1 = """\
+ // Isolated C++-style comments.
+
+ // Foo.
+ asdf1
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError,
+ r"^my_file\.mojom:4: Error: Unexpected 'asdf1':\n *asdf1$"):
+ parser.Parse(source1, "my_file.mojom")
+
+ source2 = """\
+ // Consecutive C++-style comments.
+ // Foo.
+ // Bar.
+
+ struct Yada { // Baz.
+ // Quux.
+ int32 x;
+ };
+
+ asdf2
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError,
+ r"^my_file\.mojom:10: Error: Unexpected 'asdf2':\n *asdf2$"):
+ parser.Parse(source2, "my_file.mojom")
+
+ source3 = """\
+ /* Single-line C-style comments. */
+ /* Foobar. */
+
+ /* Baz. */
+ asdf3
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError,
+ r"^my_file\.mojom:5: Error: Unexpected 'asdf3':\n *asdf3$"):
+ parser.Parse(source3, "my_file.mojom")
+
+ source4 = """\
+ /* Multi-line C-style comments.
+ */
+ /*
+ Foo.
+ Bar.
+ */
+
+ /* Baz
+ Quux. */
+ asdf4
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError,
+ r"^my_file\.mojom:10: Error: Unexpected 'asdf4':\n *asdf4$"):
+ parser.Parse(source4, "my_file.mojom")
+
+ def testSimpleStruct(self):
+ """Tests a simple .mojom source that just defines a struct."""
+
+ source = """\
+ module my_module;
+
+ struct MyStruct {
+ int32 a;
+ double b;
+ };
+ """
+ expected = ast.Mojom(
+ ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), [
+ ast.Struct(
+ 'MyStruct', None,
+ ast.StructBody([
+ ast.StructField('a', None, None, 'int32', None),
+ ast.StructField('b', None, None, 'double', None)
+ ]))
+ ])
+ self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
+
+ def testSimpleStructWithoutModule(self):
+ """Tests a simple struct without an explict module statement."""
+
+ source = """\
+ struct MyStruct {
+ int32 a;
+ double b;
+ };
+ """
+ expected = ast.Mojom(None, ast.ImportList(), [
+ ast.Struct(
+ 'MyStruct', None,
+ ast.StructBody([
+ ast.StructField('a', None, None, 'int32', None),
+ ast.StructField('b', None, None, 'double', None)
+ ]))
+ ])
+ self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
+
+ def testValidStructDefinitions(self):
+ """Tests all types of definitions that can occur in a struct."""
+
+ source = """\
+ struct MyStruct {
+ enum MyEnum { VALUE };
+ const double kMyConst = 1.23;
+ int32 a;
+ SomeOtherStruct b; // Invalidity detected at another stage.
+ };
+ """
+ expected = ast.Mojom(None, ast.ImportList(), [
+ ast.Struct(
+ 'MyStruct', None,
+ ast.StructBody([
+ ast.Enum('MyEnum', None,
+ ast.EnumValueList(ast.EnumValue('VALUE', None, None))),
+ ast.Const('kMyConst', None, 'double', '1.23'),
+ ast.StructField('a', None, None, 'int32', None),
+ ast.StructField('b', None, None, 'SomeOtherStruct', None)
+ ]))
+ ])
+ self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
+
+ def testInvalidStructDefinitions(self):
+ """Tests that definitions that aren't allowed in a struct are correctly
+ detected."""
+
+ source1 = """\
+ struct MyStruct {
+ MyMethod(int32 a);
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected '\(':\n"
+ r" *MyMethod\(int32 a\);$"):
+ parser.Parse(source1, "my_file.mojom")
+
+ source2 = """\
+ struct MyStruct {
+ struct MyInnerStruct {
+ int32 a;
+ };
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'struct':\n"
+ r" *struct MyInnerStruct {$"):
+ parser.Parse(source2, "my_file.mojom")
+
+ source3 = """\
+ struct MyStruct {
+ interface MyInterface {
+ MyMethod(int32 a);
+ };
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError,
+ r"^my_file\.mojom:2: Error: Unexpected 'interface':\n"
+ r" *interface MyInterface {$"):
+ parser.Parse(source3, "my_file.mojom")
+
+ def testMissingModuleName(self):
+ """Tests an (invalid) .mojom with a missing module name."""
+
+ source1 = """\
+ // Missing module name.
+ module ;
+ struct MyStruct {
+ int32 a;
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError,
+ r"^my_file\.mojom:2: Error: Unexpected ';':\n *module ;$"):
+ parser.Parse(source1, "my_file.mojom")
+
+ # Another similar case, but make sure that line-number tracking/reporting
+ # is correct.
+ source2 = """\
+ module
+ // This line intentionally left unblank.
+
+ struct MyStruct {
+ int32 a;
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:4: Error: Unexpected 'struct':\n"
+ r" *struct MyStruct {$"):
+ parser.Parse(source2, "my_file.mojom")
+
+ def testMultipleModuleStatements(self):
+ """Tests an (invalid) .mojom with multiple module statements."""
+
+ source = """\
+ module foo;
+ module bar;
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError,
+ r"^my_file\.mojom:2: Error: Multiple \"module\" statements not "
+ r"allowed:\n *module bar;$"):
+ parser.Parse(source, "my_file.mojom")
+
+ def testModuleStatementAfterImport(self):
+ """Tests an (invalid) .mojom with a module statement after an import."""
+
+ source = """\
+ import "foo.mojom";
+ module foo;
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError,
+ r"^my_file\.mojom:2: Error: \"module\" statements must precede imports "
+ r"and definitions:\n *module foo;$"):
+ parser.Parse(source, "my_file.mojom")
+
+ def testModuleStatementAfterDefinition(self):
+ """Tests an (invalid) .mojom with a module statement after a definition."""
+
+ source = """\
+ struct MyStruct {
+ int32 a;
+ };
+ module foo;
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError,
+ r"^my_file\.mojom:4: Error: \"module\" statements must precede imports "
+ r"and definitions:\n *module foo;$"):
+ parser.Parse(source, "my_file.mojom")
+
+ def testImportStatementAfterDefinition(self):
+ """Tests an (invalid) .mojom with an import statement after a definition."""
+
+ source = """\
+ struct MyStruct {
+ int32 a;
+ };
+ import "foo.mojom";
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError,
+ r"^my_file\.mojom:4: Error: \"import\" statements must precede "
+ r"definitions:\n *import \"foo.mojom\";$"):
+ parser.Parse(source, "my_file.mojom")
+
+ def testEnums(self):
+ """Tests that enum statements are correctly parsed."""
+
+ source = """\
+ module my_module;
+ enum MyEnum1 { VALUE1, VALUE2 }; // No trailing comma.
+ enum MyEnum2 {
+ VALUE1 = -1,
+ VALUE2 = 0,
+ VALUE3 = + 987, // Check that space is allowed.
+ VALUE4 = 0xAF12,
+ VALUE5 = -0x09bcd,
+ VALUE6 = VALUE5,
+ VALUE7, // Leave trailing comma.
+ };
+ """
+ expected = ast.Mojom(
+ ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), [
+ ast.Enum(
+ 'MyEnum1', None,
+ ast.EnumValueList([
+ ast.EnumValue('VALUE1', None, None),
+ ast.EnumValue('VALUE2', None, None)
+ ])),
+ ast.Enum(
+ 'MyEnum2', None,
+ ast.EnumValueList([
+ ast.EnumValue('VALUE1', None, '-1'),
+ ast.EnumValue('VALUE2', None, '0'),
+ ast.EnumValue('VALUE3', None, '+987'),
+ ast.EnumValue('VALUE4', None, '0xAF12'),
+ ast.EnumValue('VALUE5', None, '-0x09bcd'),
+ ast.EnumValue('VALUE6', None, ('IDENTIFIER', 'VALUE5')),
+ ast.EnumValue('VALUE7', None, None)
+ ]))
+ ])
+ self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
+
+ def testInvalidEnumInitializers(self):
+ """Tests that invalid enum initializers are correctly detected."""
+
+ # Floating point value.
+ source2 = "enum MyEnum { VALUE = 0.123 };"
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:1: Error: Unexpected '0\.123':\n"
+ r"enum MyEnum { VALUE = 0\.123 };$"):
+ parser.Parse(source2, "my_file.mojom")
+
+ # Boolean value.
+ source2 = "enum MyEnum { VALUE = true };"
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:1: Error: Unexpected 'true':\n"
+ r"enum MyEnum { VALUE = true };$"):
+ parser.Parse(source2, "my_file.mojom")
+
+ def testConsts(self):
+ """Tests some constants and struct members initialized with them."""
+
+ source = """\
+ module my_module;
+
+ struct MyStruct {
+ const int8 kNumber = -1;
+ int8 number@0 = kNumber;
+ };
+ """
+ expected = ast.Mojom(
+ ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), [
+ ast.Struct(
+ 'MyStruct', None,
+ ast.StructBody([
+ ast.Const('kNumber', None, 'int8', '-1'),
+ ast.StructField('number', None, ast.Ordinal(0), 'int8',
+ ('IDENTIFIER', 'kNumber'))
+ ]))
+ ])
+ self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
+
+ def testNoConditionals(self):
+ """Tests that ?: is not allowed."""
+
+ source = """\
+ module my_module;
+
+ enum MyEnum {
+ MY_ENUM_1 = 1 ? 2 : 3
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:4: Error: Unexpected '\?':\n"
+ r" *MY_ENUM_1 = 1 \? 2 : 3$"):
+ parser.Parse(source, "my_file.mojom")
+
+ def testSimpleOrdinals(self):
+ """Tests that (valid) ordinal values are scanned correctly."""
+
+ source = """\
+ module my_module;
+
+ // This isn't actually valid .mojom, but the problem (missing ordinals)
+ // should be handled at a different level.
+ struct MyStruct {
+ int32 a0@0;
+ int32 a1@1;
+ int32 a2@2;
+ int32 a9@9;
+ int32 a10 @10;
+ int32 a11 @11;
+ int32 a29 @29;
+ int32 a1234567890 @1234567890;
+ };
+ """
+ expected = ast.Mojom(
+ ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), [
+ ast.Struct(
+ 'MyStruct', None,
+ ast.StructBody([
+ ast.StructField('a0', None, ast.Ordinal(0), 'int32', None),
+ ast.StructField('a1', None, ast.Ordinal(1), 'int32', None),
+ ast.StructField('a2', None, ast.Ordinal(2), 'int32', None),
+ ast.StructField('a9', None, ast.Ordinal(9), 'int32', None),
+ ast.StructField('a10', None, ast.Ordinal(10), 'int32',
+ None),
+ ast.StructField('a11', None, ast.Ordinal(11), 'int32',
+ None),
+ ast.StructField('a29', None, ast.Ordinal(29), 'int32',
+ None),
+ ast.StructField('a1234567890', None,
+ ast.Ordinal(1234567890), 'int32', None)
+ ]))
+ ])
+ self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
+
+ def testInvalidOrdinals(self):
+ """Tests that (lexically) invalid ordinals are correctly detected."""
+
+ source1 = """\
+ module my_module;
+
+ struct MyStruct {
+ int32 a_missing@;
+ };
+ """
+ with self.assertRaisesRegexp(
+ lexer.LexError, r"^my_file\.mojom:4: Error: Missing ordinal value$"):
+ parser.Parse(source1, "my_file.mojom")
+
+ source2 = """\
+ module my_module;
+
+ struct MyStruct {
+ int32 a_octal@01;
+ };
+ """
+ with self.assertRaisesRegexp(
+ lexer.LexError, r"^my_file\.mojom:4: Error: "
+ r"Octal and hexadecimal ordinal values not allowed$"):
+ parser.Parse(source2, "my_file.mojom")
+
+ source3 = """\
+ module my_module; struct MyStruct { int32 a_invalid_octal@08; };
+ """
+ with self.assertRaisesRegexp(
+ lexer.LexError, r"^my_file\.mojom:1: Error: "
+ r"Octal and hexadecimal ordinal values not allowed$"):
+ parser.Parse(source3, "my_file.mojom")
+
+ source4 = "module my_module; struct MyStruct { int32 a_hex@0x1aB9; };"
+ with self.assertRaisesRegexp(
+ lexer.LexError, r"^my_file\.mojom:1: Error: "
+ r"Octal and hexadecimal ordinal values not allowed$"):
+ parser.Parse(source4, "my_file.mojom")
+
+ source5 = "module my_module; struct MyStruct { int32 a_hex@0X0; };"
+ with self.assertRaisesRegexp(
+ lexer.LexError, r"^my_file\.mojom:1: Error: "
+ r"Octal and hexadecimal ordinal values not allowed$"):
+ parser.Parse(source5, "my_file.mojom")
+
+ source6 = """\
+ struct MyStruct {
+ int32 a_too_big@999999999999;
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:2: Error: "
+ r"Ordinal value 999999999999 too large:\n"
+ r" *int32 a_too_big@999999999999;$"):
+ parser.Parse(source6, "my_file.mojom")
+
+ def testNestedNamespace(self):
+ """Tests that "nested" namespaces work."""
+
+ source = """\
+ module my.mod;
+
+ struct MyStruct {
+ int32 a;
+ };
+ """
+ expected = ast.Mojom(
+ ast.Module(('IDENTIFIER', 'my.mod'), None), ast.ImportList(), [
+ ast.Struct(
+ 'MyStruct', None,
+ ast.StructBody(ast.StructField('a', None, None, 'int32', None)))
+ ])
+ self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
+
+ def testValidHandleTypes(self):
+ """Tests (valid) handle types."""
+
+ source = """\
+ struct MyStruct {
+ handle a;
+ handle<data_pipe_consumer> b;
+ handle <data_pipe_producer> c;
+ handle < message_pipe > d;
+ handle
+ < shared_buffer
+ > e;
+ handle
+ <platform
+
+ > f;
+ };
+ """
+ expected = ast.Mojom(None, ast.ImportList(), [
+ ast.Struct(
+ 'MyStruct', None,
+ ast.StructBody([
+ ast.StructField('a', None, None, 'handle', None),
+ ast.StructField('b', None, None, 'handle<data_pipe_consumer>',
+ None),
+ ast.StructField('c', None, None, 'handle<data_pipe_producer>',
+ None),
+ ast.StructField('d', None, None, 'handle<message_pipe>', None),
+ ast.StructField('e', None, None, 'handle<shared_buffer>', None),
+ ast.StructField('f', None, None, 'handle<platform>', None)
+ ]))
+ ])
+ self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
+
+ def testInvalidHandleType(self):
+ """Tests an invalid (unknown) handle type."""
+
+ source = """\
+ struct MyStruct {
+ handle<wtf_is_this> foo;
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:2: Error: "
+ r"Invalid handle type 'wtf_is_this':\n"
+ r" *handle<wtf_is_this> foo;$"):
+ parser.Parse(source, "my_file.mojom")
+
+ def testValidDefaultValues(self):
+ """Tests default values that are valid (to the parser)."""
+
+ source = """\
+ struct MyStruct {
+ int16 a0 = 0;
+ uint16 a1 = 0x0;
+ uint16 a2 = 0x00;
+ uint16 a3 = 0x01;
+ uint16 a4 = 0xcd;
+ int32 a5 = 12345;
+ int64 a6 = -12345;
+ int64 a7 = +12345;
+ uint32 a8 = 0x12cd3;
+ uint32 a9 = -0x12cD3;
+ uint32 a10 = +0x12CD3;
+ bool a11 = true;
+ bool a12 = false;
+ float a13 = 1.2345;
+ float a14 = -1.2345;
+ float a15 = +1.2345;
+ float a16 = 123.;
+ float a17 = .123;
+ double a18 = 1.23E10;
+ double a19 = 1.E-10;
+ double a20 = .5E+10;
+ double a21 = -1.23E10;
+ double a22 = +.123E10;
+ };
+ """
+ expected = ast.Mojom(None, ast.ImportList(), [
+ ast.Struct(
+ 'MyStruct', None,
+ ast.StructBody([
+ ast.StructField('a0', None, None, 'int16', '0'),
+ ast.StructField('a1', None, None, 'uint16', '0x0'),
+ ast.StructField('a2', None, None, 'uint16', '0x00'),
+ ast.StructField('a3', None, None, 'uint16', '0x01'),
+ ast.StructField('a4', None, None, 'uint16', '0xcd'),
+ ast.StructField('a5', None, None, 'int32', '12345'),
+ ast.StructField('a6', None, None, 'int64', '-12345'),
+ ast.StructField('a7', None, None, 'int64', '+12345'),
+ ast.StructField('a8', None, None, 'uint32', '0x12cd3'),
+ ast.StructField('a9', None, None, 'uint32', '-0x12cD3'),
+ ast.StructField('a10', None, None, 'uint32', '+0x12CD3'),
+ ast.StructField('a11', None, None, 'bool', 'true'),
+ ast.StructField('a12', None, None, 'bool', 'false'),
+ ast.StructField('a13', None, None, 'float', '1.2345'),
+ ast.StructField('a14', None, None, 'float', '-1.2345'),
+ ast.StructField('a15', None, None, 'float', '+1.2345'),
+ ast.StructField('a16', None, None, 'float', '123.'),
+ ast.StructField('a17', None, None, 'float', '.123'),
+ ast.StructField('a18', None, None, 'double', '1.23E10'),
+ ast.StructField('a19', None, None, 'double', '1.E-10'),
+ ast.StructField('a20', None, None, 'double', '.5E+10'),
+ ast.StructField('a21', None, None, 'double', '-1.23E10'),
+ ast.StructField('a22', None, None, 'double', '+.123E10')
+ ]))
+ ])
+ self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
+
+ def testValidFixedSizeArray(self):
+ """Tests parsing a fixed size array."""
+
+ source = """\
+ struct MyStruct {
+ array<int32> normal_array;
+ array<int32, 1> fixed_size_array_one_entry;
+ array<int32, 10> fixed_size_array_ten_entries;
+ array<array<array<int32, 1>>, 2> nested_arrays;
+ };
+ """
+ expected = ast.Mojom(None, ast.ImportList(), [
+ ast.Struct(
+ 'MyStruct', None,
+ ast.StructBody([
+ ast.StructField('normal_array', None, None, 'int32[]', None),
+ ast.StructField('fixed_size_array_one_entry', None, None,
+ 'int32[1]', None),
+ ast.StructField('fixed_size_array_ten_entries', None, None,
+ 'int32[10]', None),
+ ast.StructField('nested_arrays', None, None, 'int32[1][][2]',
+ None)
+ ]))
+ ])
+ self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
+
+ def testValidNestedArray(self):
+ """Tests parsing a nested array."""
+
+ source = "struct MyStruct { array<array<int32>> nested_array; };"
+ expected = ast.Mojom(None, ast.ImportList(), [
+ ast.Struct(
+ 'MyStruct', None,
+ ast.StructBody(
+ ast.StructField('nested_array', None, None, 'int32[][]', None)))
+ ])
+ self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
+
+ def testInvalidFixedArraySize(self):
+ """Tests that invalid fixed array bounds are correctly detected."""
+
+ source1 = """\
+ struct MyStruct {
+ array<int32, 0> zero_size_array;
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError,
+ r"^my_file\.mojom:2: Error: Fixed array size 0 invalid:\n"
+ r" *array<int32, 0> zero_size_array;$"):
+ parser.Parse(source1, "my_file.mojom")
+
+ source2 = """\
+ struct MyStruct {
+ array<int32, 999999999999> too_big_array;
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError,
+ r"^my_file\.mojom:2: Error: Fixed array size 999999999999 invalid:\n"
+ r" *array<int32, 999999999999> too_big_array;$"):
+ parser.Parse(source2, "my_file.mojom")
+
+ source3 = """\
+ struct MyStruct {
+ array<int32, abcdefg> not_a_number;
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'abcdefg':\n"
+ r" *array<int32, abcdefg> not_a_number;"):
+ parser.Parse(source3, "my_file.mojom")
+
+ def testValidAssociativeArrays(self):
+ """Tests that we can parse valid associative array structures."""
+
+ source1 = "struct MyStruct { map<string, uint8> data; };"
+ expected1 = ast.Mojom(None, ast.ImportList(), [
+ ast.Struct(
+ 'MyStruct', None,
+ ast.StructBody(
+ [ast.StructField('data', None, None, 'uint8{string}', None)]))
+ ])
+ self.assertEquals(parser.Parse(source1, "my_file.mojom"), expected1)
+
+ source2 = "interface MyInterface { MyMethod(map<string, uint8> a); };"
+ expected2 = ast.Mojom(None, ast.ImportList(), [
+ ast.Interface(
+ 'MyInterface', None,
+ ast.InterfaceBody(
+ ast.Method(
+ 'MyMethod', None, None,
+ ast.ParameterList(
+ ast.Parameter('a', None, None, 'uint8{string}')),
+ None)))
+ ])
+ self.assertEquals(parser.Parse(source2, "my_file.mojom"), expected2)
+
+ source3 = "struct MyStruct { map<string, array<uint8>> data; };"
+ expected3 = ast.Mojom(None, ast.ImportList(), [
+ ast.Struct(
+ 'MyStruct', None,
+ ast.StructBody(
+ [ast.StructField('data', None, None, 'uint8[]{string}', None)]))
+ ])
+ self.assertEquals(parser.Parse(source3, "my_file.mojom"), expected3)
+
+ def testValidMethod(self):
+ """Tests parsing method declarations."""
+
+ source1 = "interface MyInterface { MyMethod(int32 a); };"
+ expected1 = ast.Mojom(None, ast.ImportList(), [
+ ast.Interface(
+ 'MyInterface', None,
+ ast.InterfaceBody(
+ ast.Method(
+ 'MyMethod', None, None,
+ ast.ParameterList(ast.Parameter('a', None, None, 'int32')),
+ None)))
+ ])
+ self.assertEquals(parser.Parse(source1, "my_file.mojom"), expected1)
+
+ source2 = """\
+ interface MyInterface {
+ MyMethod1@0(int32 a@0, int64 b@1);
+ MyMethod2@1() => ();
+ };
+ """
+ expected2 = ast.Mojom(None, ast.ImportList(), [
+ ast.Interface(
+ 'MyInterface', None,
+ ast.InterfaceBody([
+ ast.Method(
+ 'MyMethod1', None, ast.Ordinal(0),
+ ast.ParameterList([
+ ast.Parameter('a', None, ast.Ordinal(0), 'int32'),
+ ast.Parameter('b', None, ast.Ordinal(1), 'int64')
+ ]), None),
+ ast.Method('MyMethod2', None, ast.Ordinal(1),
+ ast.ParameterList(), ast.ParameterList())
+ ]))
+ ])
+ self.assertEquals(parser.Parse(source2, "my_file.mojom"), expected2)
+
+ source3 = """\
+ interface MyInterface {
+ MyMethod(string a) => (int32 a, bool b);
+ };
+ """
+ expected3 = ast.Mojom(None, ast.ImportList(), [
+ ast.Interface(
+ 'MyInterface', None,
+ ast.InterfaceBody(
+ ast.Method(
+ 'MyMethod', None, None,
+ ast.ParameterList(ast.Parameter('a', None, None, 'string')),
+ ast.ParameterList([
+ ast.Parameter('a', None, None, 'int32'),
+ ast.Parameter('b', None, None, 'bool')
+ ]))))
+ ])
+ self.assertEquals(parser.Parse(source3, "my_file.mojom"), expected3)
+
+ def testInvalidMethods(self):
+ """Tests that invalid method declarations are correctly detected."""
+
+ # No trailing commas.
+ source1 = """\
+ interface MyInterface {
+ MyMethod(string a,);
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected '\)':\n"
+ r" *MyMethod\(string a,\);$"):
+ parser.Parse(source1, "my_file.mojom")
+
+ # No leading commas.
+ source2 = """\
+ interface MyInterface {
+ MyMethod(, string a);
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected ',':\n"
+ r" *MyMethod\(, string a\);$"):
+ parser.Parse(source2, "my_file.mojom")
+
+ def testValidInterfaceDefinitions(self):
+ """Tests all types of definitions that can occur in an interface."""
+
+ source = """\
+ interface MyInterface {
+ enum MyEnum { VALUE };
+ const int32 kMyConst = 123;
+ MyMethod(int32 x) => (MyEnum y);
+ };
+ """
+ expected = ast.Mojom(None, ast.ImportList(), [
+ ast.Interface(
+ 'MyInterface', None,
+ ast.InterfaceBody([
+ ast.Enum('MyEnum', None,
+ ast.EnumValueList(ast.EnumValue('VALUE', None, None))),
+ ast.Const('kMyConst', None, 'int32', '123'),
+ ast.Method(
+ 'MyMethod', None, None,
+ ast.ParameterList(ast.Parameter('x', None, None, 'int32')),
+ ast.ParameterList(ast.Parameter('y', None, None, 'MyEnum')))
+ ]))
+ ])
+ self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
+
+ def testInvalidInterfaceDefinitions(self):
+ """Tests that definitions that aren't allowed in an interface are correctly
+ detected."""
+
+ source1 = """\
+ interface MyInterface {
+ struct MyStruct {
+ int32 a;
+ };
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'struct':\n"
+ r" *struct MyStruct {$"):
+ parser.Parse(source1, "my_file.mojom")
+
+ source2 = """\
+ interface MyInterface {
+ interface MyInnerInterface {
+ MyMethod(int32 x);
+ };
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError,
+ r"^my_file\.mojom:2: Error: Unexpected 'interface':\n"
+ r" *interface MyInnerInterface {$"):
+ parser.Parse(source2, "my_file.mojom")
+
+ source3 = """\
+ interface MyInterface {
+ int32 my_field;
+ };
+ """
+ # The parser thinks that "int32" is a plausible name for a method, so it's
+ # "my_field" that gives it away.
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'my_field':\n"
+ r" *int32 my_field;$"):
+ parser.Parse(source3, "my_file.mojom")
+
+ def testValidAttributes(self):
+ """Tests parsing attributes (and attribute lists)."""
+
+ # Note: We use structs because they have (optional) attribute lists.
+
+ # Empty attribute list.
+ source1 = "[] struct MyStruct {};"
+ expected1 = ast.Mojom(
+ None, ast.ImportList(),
+ [ast.Struct('MyStruct', ast.AttributeList(), ast.StructBody())])
+ self.assertEquals(parser.Parse(source1, "my_file.mojom"), expected1)
+
+ # One-element attribute list, with name value.
+ source2 = "[MyAttribute=MyName] struct MyStruct {};"
+ expected2 = ast.Mojom(None, ast.ImportList(), [
+ ast.Struct('MyStruct',
+ ast.AttributeList(ast.Attribute("MyAttribute", "MyName")),
+ ast.StructBody())
+ ])
+ self.assertEquals(parser.Parse(source2, "my_file.mojom"), expected2)
+
+ # Two-element attribute list, with one string value and one integer value.
+ source3 = "[MyAttribute1 = \"hello\", MyAttribute2 = 5] struct MyStruct {};"
+ expected3 = ast.Mojom(None, ast.ImportList(), [
+ ast.Struct(
+ 'MyStruct',
+ ast.AttributeList([
+ ast.Attribute("MyAttribute1", "hello"),
+ ast.Attribute("MyAttribute2", 5)
+ ]), ast.StructBody())
+ ])
+ self.assertEquals(parser.Parse(source3, "my_file.mojom"), expected3)
+
+ # Various places that attribute list is allowed.
+ source4 = """\
+ [Attr0=0] module my_module;
+
+ [Attr1=1] import "my_import";
+
+ [Attr2=2] struct MyStruct {
+ [Attr3=3] int32 a;
+ };
+ [Attr4=4] union MyUnion {
+ [Attr5=5] int32 a;
+ };
+ [Attr6=6] enum MyEnum {
+ [Attr7=7] a
+ };
+ [Attr8=8] interface MyInterface {
+ [Attr9=9] MyMethod([Attr10=10] int32 a) => ([Attr11=11] bool b);
+ };
+ [Attr12=12] const double kMyConst = 1.23;
+ """
+ expected4 = ast.Mojom(
+ ast.Module(('IDENTIFIER', 'my_module'),
+ ast.AttributeList([ast.Attribute("Attr0", 0)])),
+ ast.ImportList(
+ ast.Import(
+ ast.AttributeList([ast.Attribute("Attr1", 1)]), "my_import")),
+ [
+ ast.Struct(
+ 'MyStruct', ast.AttributeList(ast.Attribute("Attr2", 2)),
+ ast.StructBody(
+ ast.StructField(
+ 'a', ast.AttributeList([ast.Attribute("Attr3", 3)]),
+ None, 'int32', None))),
+ ast.Union(
+ 'MyUnion', ast.AttributeList(ast.Attribute("Attr4", 4)),
+ ast.UnionBody(
+ ast.UnionField(
+ 'a', ast.AttributeList([ast.Attribute("Attr5", 5)]),
+ None, 'int32'))),
+ ast.Enum(
+ 'MyEnum', ast.AttributeList(ast.Attribute("Attr6", 6)),
+ ast.EnumValueList(
+ ast.EnumValue(
+ 'VALUE', ast.AttributeList([ast.Attribute("Attr7", 7)]),
+ None))),
+ ast.Interface(
+ 'MyInterface', ast.AttributeList(ast.Attribute("Attr8", 8)),
+ ast.InterfaceBody(
+ ast.Method(
+ 'MyMethod', ast.AttributeList(
+ ast.Attribute("Attr9", 9)), None,
+ ast.ParameterList(
+ ast.Parameter(
+ 'a',
+ ast.AttributeList([ast.Attribute("Attr10", 10)
+ ]), None, 'int32')),
+ ast.ParameterList(
+ ast.Parameter(
+ 'b',
+ ast.AttributeList([ast.Attribute("Attr11", 11)
+ ]), None, 'bool'))))),
+ ast.Const('kMyConst', ast.AttributeList(
+ ast.Attribute("Attr12", 12)), 'double', '1.23')
+ ])
+ self.assertEquals(parser.Parse(source4, "my_file.mojom"), expected4)
+
+ # TODO(vtl): Boolean attributes don't work yet. (In fact, we just |eval()|
+ # literal (non-name) values, which is extremely dubious.)
+
+ def testInvalidAttributes(self):
+ """Tests that invalid attributes and attribute lists are correctly
+ detected."""
+
+ # Trailing commas not allowed.
+ source1 = "[MyAttribute=MyName,] struct MyStruct {};"
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:1: Error: Unexpected '\]':\n"
+ r"\[MyAttribute=MyName,\] struct MyStruct {};$"):
+ parser.Parse(source1, "my_file.mojom")
+
+ # Missing value.
+ source2 = "[MyAttribute=] struct MyStruct {};"
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:1: Error: Unexpected '\]':\n"
+ r"\[MyAttribute=\] struct MyStruct {};$"):
+ parser.Parse(source2, "my_file.mojom")
+
+ # Missing key.
+ source3 = "[=MyName] struct MyStruct {};"
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:1: Error: Unexpected '=':\n"
+ r"\[=MyName\] struct MyStruct {};$"):
+ parser.Parse(source3, "my_file.mojom")
+
+ def testValidImports(self):
+ """Tests parsing import statements."""
+
+ # One import (no module statement).
+ source1 = "import \"somedir/my.mojom\";"
+ expected1 = ast.Mojom(None,
+ ast.ImportList(ast.Import(None, "somedir/my.mojom")),
+ [])
+ self.assertEquals(parser.Parse(source1, "my_file.mojom"), expected1)
+
+ # Two imports (no module statement).
+ source2 = """\
+ import "somedir/my1.mojom";
+ import "somedir/my2.mojom";
+ """
+ expected2 = ast.Mojom(
+ None,
+ ast.ImportList([
+ ast.Import(None, "somedir/my1.mojom"),
+ ast.Import(None, "somedir/my2.mojom")
+ ]), [])
+ self.assertEquals(parser.Parse(source2, "my_file.mojom"), expected2)
+
+ # Imports with module statement.
+ source3 = """\
+ module my_module;
+ import "somedir/my1.mojom";
+ import "somedir/my2.mojom";
+ """
+ expected3 = ast.Mojom(
+ ast.Module(('IDENTIFIER', 'my_module'), None),
+ ast.ImportList([
+ ast.Import(None, "somedir/my1.mojom"),
+ ast.Import(None, "somedir/my2.mojom")
+ ]), [])
+ self.assertEquals(parser.Parse(source3, "my_file.mojom"), expected3)
+
+ def testInvalidImports(self):
+ """Tests that invalid import statements are correctly detected."""
+
+ source1 = """\
+ // Make the error occur on line 2.
+ import invalid
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'invalid':\n"
+ r" *import invalid$"):
+ parser.Parse(source1, "my_file.mojom")
+
+ source2 = """\
+ import // Missing string.
+ struct MyStruct {
+ int32 a;
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'struct':\n"
+ r" *struct MyStruct {$"):
+ parser.Parse(source2, "my_file.mojom")
+
+ source3 = """\
+ import "foo.mojom" // Missing semicolon.
+ struct MyStruct {
+ int32 a;
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'struct':\n"
+ r" *struct MyStruct {$"):
+ parser.Parse(source3, "my_file.mojom")
+
+ def testValidNullableTypes(self):
+ """Tests parsing nullable types."""
+
+ source = """\
+ struct MyStruct {
+ int32? a; // This is actually invalid, but handled at a different
+ // level.
+ string? b;
+ array<int32> ? c;
+ array<string ? > ? d;
+ array<array<int32>?>? e;
+ array<int32, 1>? f;
+ array<string?, 1>? g;
+ some_struct? h;
+ handle? i;
+ handle<data_pipe_consumer>? j;
+ handle<data_pipe_producer>? k;
+ handle<message_pipe>? l;
+ handle<shared_buffer>? m;
+ pending_receiver<some_interface>? n;
+ handle<platform>? o;
+ };
+ """
+ expected = ast.Mojom(None, ast.ImportList(), [
+ ast.Struct(
+ 'MyStruct', None,
+ ast.StructBody([
+ ast.StructField('a', None, None, 'int32?', None),
+ ast.StructField('b', None, None, 'string?', None),
+ ast.StructField('c', None, None, 'int32[]?', None),
+ ast.StructField('d', None, None, 'string?[]?', None),
+ ast.StructField('e', None, None, 'int32[]?[]?', None),
+ ast.StructField('f', None, None, 'int32[1]?', None),
+ ast.StructField('g', None, None, 'string?[1]?', None),
+ ast.StructField('h', None, None, 'some_struct?', None),
+ ast.StructField('i', None, None, 'handle?', None),
+ ast.StructField('j', None, None, 'handle<data_pipe_consumer>?',
+ None),
+ ast.StructField('k', None, None, 'handle<data_pipe_producer>?',
+ None),
+ ast.StructField('l', None, None, 'handle<message_pipe>?', None),
+ ast.StructField('m', None, None, 'handle<shared_buffer>?',
+ None),
+ ast.StructField('n', None, None, 'rcv<some_interface>?', None),
+ ast.StructField('o', None, None, 'handle<platform>?', None)
+ ]))
+ ])
+ self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
+
+ def testInvalidNullableTypes(self):
+ """Tests that invalid nullable types are correctly detected."""
+ source1 = """\
+ struct MyStruct {
+ string?? a;
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected '\?':\n"
+ r" *string\?\? a;$"):
+ parser.Parse(source1, "my_file.mojom")
+
+ source2 = """\
+ struct MyStruct {
+ handle?<data_pipe_consumer> a;
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected '<':\n"
+ r" *handle\?<data_pipe_consumer> a;$"):
+ parser.Parse(source2, "my_file.mojom")
+
+ def testSimpleUnion(self):
+ """Tests a simple .mojom source that just defines a union."""
+ source = """\
+ module my_module;
+
+ union MyUnion {
+ int32 a;
+ double b;
+ };
+ """
+ expected = ast.Mojom(
+ ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), [
+ ast.Union(
+ 'MyUnion', None,
+ ast.UnionBody([
+ ast.UnionField('a', None, None, 'int32'),
+ ast.UnionField('b', None, None, 'double')
+ ]))
+ ])
+ actual = parser.Parse(source, "my_file.mojom")
+ self.assertEquals(actual, expected)
+
+ def testUnionWithOrdinals(self):
+ """Test that ordinals are assigned to fields."""
+ source = """\
+ module my_module;
+
+ union MyUnion {
+ int32 a @10;
+ double b @30;
+ };
+ """
+ expected = ast.Mojom(
+ ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), [
+ ast.Union(
+ 'MyUnion', None,
+ ast.UnionBody([
+ ast.UnionField('a', None, ast.Ordinal(10), 'int32'),
+ ast.UnionField('b', None, ast.Ordinal(30), 'double')
+ ]))
+ ])
+ actual = parser.Parse(source, "my_file.mojom")
+ self.assertEquals(actual, expected)
+
+ def testUnionWithStructMembers(self):
+ """Test that struct members are accepted."""
+ source = """\
+ module my_module;
+
+ union MyUnion {
+ SomeStruct s;
+ };
+ """
+ expected = ast.Mojom(
+ ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), [
+ ast.Union(
+ 'MyUnion', None,
+ ast.UnionBody([ast.UnionField('s', None, None, 'SomeStruct')]))
+ ])
+ actual = parser.Parse(source, "my_file.mojom")
+ self.assertEquals(actual, expected)
+
+ def testUnionWithArrayMember(self):
+ """Test that array members are accepted."""
+ source = """\
+ module my_module;
+
+ union MyUnion {
+ array<int32> a;
+ };
+ """
+ expected = ast.Mojom(
+ ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), [
+ ast.Union(
+ 'MyUnion', None,
+ ast.UnionBody([ast.UnionField('a', None, None, 'int32[]')]))
+ ])
+ actual = parser.Parse(source, "my_file.mojom")
+ self.assertEquals(actual, expected)
+
+ def testUnionWithMapMember(self):
+ """Test that map members are accepted."""
+ source = """\
+ module my_module;
+
+ union MyUnion {
+ map<int32, string> m;
+ };
+ """
+ expected = ast.Mojom(
+ ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), [
+ ast.Union(
+ 'MyUnion', None,
+ ast.UnionBody(
+ [ast.UnionField('m', None, None, 'string{int32}')]))
+ ])
+ actual = parser.Parse(source, "my_file.mojom")
+ self.assertEquals(actual, expected)
+
+ def testUnionDisallowNestedStruct(self):
+ """Tests that structs cannot be nested in unions."""
+ source = """\
+ module my_module;
+
+ union MyUnion {
+ struct MyStruct {
+ int32 a;
+ };
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:4: Error: Unexpected 'struct':\n"
+ r" *struct MyStruct {$"):
+ parser.Parse(source, "my_file.mojom")
+
+ def testUnionDisallowNestedInterfaces(self):
+ """Tests that interfaces cannot be nested in unions."""
+ source = """\
+ module my_module;
+
+ union MyUnion {
+ interface MyInterface {
+ MyMethod(int32 a);
+ };
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError,
+ r"^my_file\.mojom:4: Error: Unexpected 'interface':\n"
+ r" *interface MyInterface {$"):
+ parser.Parse(source, "my_file.mojom")
+
+ def testUnionDisallowNestedUnion(self):
+ """Tests that unions cannot be nested in unions."""
+ source = """\
+ module my_module;
+
+ union MyUnion {
+ union MyOtherUnion {
+ int32 a;
+ };
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:4: Error: Unexpected 'union':\n"
+ r" *union MyOtherUnion {$"):
+ parser.Parse(source, "my_file.mojom")
+
+ def testUnionDisallowNestedEnum(self):
+ """Tests that enums cannot be nested in unions."""
+ source = """\
+ module my_module;
+
+ union MyUnion {
+ enum MyEnum {
+ A,
+ };
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:4: Error: Unexpected 'enum':\n"
+ r" *enum MyEnum {$"):
+ parser.Parse(source, "my_file.mojom")
+
+ def testValidAssociatedKinds(self):
+ """Tests parsing associated interfaces and requests."""
+ source1 = """\
+ struct MyStruct {
+ associated MyInterface a;
+ pending_associated_receiver<MyInterface> b;
+ associated MyInterface? c;
+ pending_associated_receiver<MyInterface>? d;
+ };
+ """
+ expected1 = ast.Mojom(None, ast.ImportList(), [
+ ast.Struct(
+ 'MyStruct', None,
+ ast.StructBody([
+ ast.StructField('a', None, None, 'asso<MyInterface>', None),
+ ast.StructField('b', None, None, 'rca<MyInterface>', None),
+ ast.StructField('c', None, None, 'asso<MyInterface>?', None),
+ ast.StructField('d', None, None, 'rca<MyInterface>?', None)
+ ]))
+ ])
+ self.assertEquals(parser.Parse(source1, "my_file.mojom"), expected1)
+
+ source2 = """\
+ interface MyInterface {
+ MyMethod(associated A a) =>(pending_associated_receiver<B> b);
+ };"""
+ expected2 = ast.Mojom(None, ast.ImportList(), [
+ ast.Interface(
+ 'MyInterface', None,
+ ast.InterfaceBody(
+ ast.Method(
+ 'MyMethod', None, None,
+ ast.ParameterList(ast.Parameter('a', None, None,
+ 'asso<A>')),
+ ast.ParameterList(ast.Parameter('b', None, None,
+ 'rca<B>')))))
+ ])
+ self.assertEquals(parser.Parse(source2, "my_file.mojom"), expected2)
+
+ def testInvalidAssociatedKinds(self):
+ """Tests that invalid associated interfaces and requests are correctly
+ detected."""
+ source1 = """\
+ struct MyStruct {
+ associated associated SomeInterface a;
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError,
+ r"^my_file\.mojom:2: Error: Unexpected 'associated':\n"
+ r" *associated associated SomeInterface a;$"):
+ parser.Parse(source1, "my_file.mojom")
+
+ source2 = """\
+ struct MyStruct {
+ associated handle a;
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'handle':\n"
+ r" *associated handle a;$"):
+ parser.Parse(source2, "my_file.mojom")
+
+ source3 = """\
+ struct MyStruct {
+ associated? MyInterface& a;
+ };
+ """
+ with self.assertRaisesRegexp(
+ parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected '\?':\n"
+ r" *associated\? MyInterface& a;$"):
+ parser.Parse(source3, "my_file.mojom")
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser.py
new file mode 100755
index 00000000..9693090e
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser.py
@@ -0,0 +1,502 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Parses mojom IDL files.
+
+This script parses one or more input mojom files and produces corresponding
+module files fully describing the definitions contained within each mojom. The
+module data is pickled and can be easily consumed by other tools to, e.g.,
+generate usable language bindings.
+"""
+
+import argparse
+import builtins
+import codecs
+import errno
+import json
+import logging
+import multiprocessing
+import os
+import os.path
+import sys
+import traceback
+from collections import defaultdict
+
+from mojom.generate import module
+from mojom.generate import translate
+from mojom.parse import parser
+from mojom.parse import conditional_features
+
+
+# Disable this for easier debugging.
+_ENABLE_MULTIPROCESSING = True
+
+# https://docs.python.org/3/library/multiprocessing.html#:~:text=bpo-33725
+if __name__ == '__main__' and sys.platform == 'darwin':
+ multiprocessing.set_start_method('fork')
+_MULTIPROCESSING_USES_FORK = multiprocessing.get_start_method() == 'fork'
+
+
+def _ResolveRelativeImportPath(path, roots):
+ """Attempts to resolve a relative import path against a set of possible roots.
+
+ Args:
+ path: The relative import path to resolve.
+ roots: A list of absolute paths which will be checked in descending length
+ order for a match against path.
+
+ Returns:
+ A normalized absolute path combining one of the roots with the input path if
+ and only if such a file exists.
+
+ Raises:
+ ValueError: The path could not be resolved against any of the given roots.
+ """
+ for root in reversed(sorted(roots, key=len)):
+ abs_path = os.path.join(root, path)
+ if os.path.isfile(abs_path):
+ return os.path.normcase(os.path.normpath(abs_path))
+
+ raise ValueError('"%s" does not exist in any of %s' % (path, roots))
+
+
+def RebaseAbsolutePath(path, roots):
+ """Rewrites an absolute file path as relative to an absolute directory path in
+ roots.
+
+ Args:
+ path: The absolute path of an existing file.
+ roots: A list of absolute directory paths. The given path argument must fall
+ within one of these directories.
+
+ Returns:
+ A path equivalent to the input path, but relative to one of the provided
+ roots. If the input path falls within multiple roots, the longest root is
+ chosen (and thus the shortest relative path is returned).
+
+ Paths returned by this method always use forward slashes as a separator to
+ mirror mojom import syntax.
+
+ Raises:
+ ValueError if the given path does not fall within any of the listed roots.
+ """
+ assert os.path.isabs(path)
+ assert os.path.isfile(path)
+ assert all(map(os.path.isabs, roots))
+
+ sorted_roots = list(reversed(sorted(roots, key=len)))
+
+ def try_rebase_path(path, root):
+ head, rebased_path = os.path.split(path)
+ while head != root:
+ head, tail = os.path.split(head)
+ if not tail:
+ return None
+ rebased_path = os.path.join(tail, rebased_path)
+ return rebased_path
+
+ for root in sorted_roots:
+ relative_path = try_rebase_path(path, root)
+ if relative_path:
+ # TODO(crbug.com/953884): Use pathlib for this kind of thing once we're
+ # fully migrated to Python 3.
+ return relative_path.replace('\\', '/')
+
+ raise ValueError('%s does not fall within any of %s' % (path, sorted_roots))
+
+
+def _GetModuleFilename(mojom_filename):
+ return mojom_filename + '-module'
+
+
+def _EnsureInputLoaded(mojom_abspath, module_path, abs_paths, asts,
+ dependencies, loaded_modules, module_metadata):
+ """Recursively ensures that a module and its dependencies are loaded.
+
+ Args:
+ mojom_abspath: An absolute file path pointing to a mojom file to load.
+ module_path: The relative path used to identify mojom_abspath.
+ abs_paths: A mapping from module paths to absolute file paths for all
+ inputs given to this execution of the script.
+ asts: A map from each input mojom's absolute path to its parsed AST.
+ dependencies: A mapping of which input mojoms depend on each other, indexed
+ by absolute file path.
+ loaded_modules: A mapping of all modules loaded so far, including non-input
+ modules that were pulled in as transitive dependencies of the inputs.
+ module_metadata: Metadata to be attached to every module loaded by this
+ helper.
+
+ Returns:
+ None
+
+ On return, loaded_modules will be populated with the loaded input mojom's
+ Module as well as the Modules of all of its transitive dependencies."""
+
+ if mojom_abspath in loaded_modules:
+ # Already done.
+ return
+
+ for dep_abspath, dep_path in sorted(dependencies[mojom_abspath]):
+ if dep_abspath not in loaded_modules:
+ _EnsureInputLoaded(dep_abspath, dep_path, abs_paths, asts, dependencies,
+ loaded_modules, module_metadata)
+
+ imports = {}
+ for imp in asts[mojom_abspath].import_list:
+ path = imp.import_filename
+ imports[path] = loaded_modules[abs_paths[path]]
+ loaded_modules[mojom_abspath] = translate.OrderedModule(
+ asts[mojom_abspath], module_path, imports)
+ loaded_modules[mojom_abspath].metadata = dict(module_metadata)
+
+
+def _CollectAllowedImportsFromBuildMetadata(build_metadata_filename):
+ allowed_imports = set()
+ processed_deps = set()
+
+ def collect(metadata_filename):
+ processed_deps.add(metadata_filename)
+
+ # Paths in the metadata file are relative to the metadata file's dir.
+ metadata_dir = os.path.abspath(os.path.dirname(metadata_filename))
+
+ def to_abs(s):
+ return os.path.normpath(os.path.join(metadata_dir, s))
+
+ with open(metadata_filename) as f:
+ metadata = json.load(f)
+ allowed_imports.update(
+ [os.path.normcase(to_abs(s)) for s in metadata['sources']])
+ for dep_metadata in metadata['deps']:
+ dep_metadata = to_abs(dep_metadata)
+ if dep_metadata not in processed_deps:
+ collect(dep_metadata)
+
+ collect(build_metadata_filename)
+ return allowed_imports
+
+
+# multiprocessing helper.
+def _ParseAstHelper(mojom_abspath, enabled_features):
+ with codecs.open(mojom_abspath, encoding='utf-8') as f:
+ ast = parser.Parse(f.read(), mojom_abspath)
+ conditional_features.RemoveDisabledDefinitions(ast, enabled_features)
+ return mojom_abspath, ast
+
+
+# multiprocessing helper.
+def _SerializeHelper(mojom_abspath, mojom_path):
+ module_path = os.path.join(_SerializeHelper.output_root_path,
+ _GetModuleFilename(mojom_path))
+ module_dir = os.path.dirname(module_path)
+ if not os.path.exists(module_dir):
+ try:
+ # Python 2 doesn't support exist_ok on makedirs(), so we just ignore
+ # that failure if it happens. It's possible during build due to races
+ # among build steps with module outputs in the same directory.
+ os.makedirs(module_dir)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+ with open(module_path, 'wb') as f:
+ _SerializeHelper.loaded_modules[mojom_abspath].Dump(f)
+
+
+class _ExceptionWrapper:
+ def __init__(self):
+ # Do not capture exception object to ensure pickling works.
+ self.formatted_trace = traceback.format_exc()
+
+
+class _FuncWrapper:
+ """Marshals exceptions and spreads args."""
+
+ def __init__(self, func):
+ self._func = func
+
+ def __call__(self, args):
+ # multiprocessing does not gracefully handle excptions.
+ # https://crbug.com/1219044
+ try:
+ return self._func(*args)
+ except: # pylint: disable=bare-except
+ return _ExceptionWrapper()
+
+
+def _Shard(target_func, arg_list, processes=None):
+ arg_list = list(arg_list)
+ if processes is None:
+ processes = multiprocessing.cpu_count()
+ # Seems optimal to have each process perform at least 2 tasks.
+ processes = min(processes, len(arg_list) // 2)
+
+ if sys.platform == 'win32':
+ # TODO(crbug.com/1190269) - we can't use more than 56
+ # cores on Windows or Python3 may hang.
+ processes = min(processes, 56)
+
+ # Don't spin up processes unless there is enough work to merit doing so.
+ if not _ENABLE_MULTIPROCESSING or processes < 2:
+ for arg_tuple in arg_list:
+ yield target_func(*arg_tuple)
+ return
+
+ pool = multiprocessing.Pool(processes=processes)
+ try:
+ wrapped_func = _FuncWrapper(target_func)
+ for result in pool.imap_unordered(wrapped_func, arg_list):
+ if isinstance(result, _ExceptionWrapper):
+ sys.stderr.write(result.formatted_trace)
+ sys.exit(1)
+ yield result
+ finally:
+ pool.close()
+ pool.join() # Needed on Windows to avoid WindowsError during terminate.
+ pool.terminate()
+
+
+def _ParseMojoms(mojom_files,
+ input_root_paths,
+ output_root_path,
+ module_root_paths,
+ enabled_features,
+ module_metadata,
+ allowed_imports=None):
+ """Parses a set of mojom files and produces serialized module outputs.
+
+ Args:
+ mojom_files: A list of mojom files to process. Paths must be absolute paths
+ which fall within one of the input or output root paths.
+ input_root_paths: A list of absolute filesystem paths which may be used to
+ resolve relative mojom file paths.
+ output_root_path: An absolute filesystem path which will service as the root
+ for all emitted artifacts. Artifacts produced from a given mojom file
+ are based on the mojom's relative path, rebased onto this path.
+ Additionally, the script expects this root to contain already-generated
+ modules for any transitive dependencies not listed in mojom_files.
+ module_root_paths: A list of absolute filesystem paths which contain
+ already-generated modules for any non-transitive dependencies.
+ enabled_features: A list of enabled feature names, controlling which AST
+ nodes are filtered by [EnableIf] or [EnableIfNot] attributes.
+ module_metadata: A list of 2-tuples representing metadata key-value pairs to
+ attach to each compiled module output.
+
+ Returns:
+ None.
+
+ Upon completion, a mojom-module file will be saved for each input mojom.
+ """
+ assert input_root_paths
+ assert output_root_path
+
+ loaded_mojom_asts = {}
+ loaded_modules = {}
+ input_dependencies = defaultdict(set)
+ mojom_files_to_parse = dict((os.path.normcase(abs_path),
+ RebaseAbsolutePath(abs_path, input_root_paths))
+ for abs_path in mojom_files)
+ abs_paths = dict(
+ (path, abs_path) for abs_path, path in mojom_files_to_parse.items())
+
+ logging.info('Parsing %d .mojom into ASTs', len(mojom_files_to_parse))
+ map_args = ((mojom_abspath, enabled_features)
+ for mojom_abspath in mojom_files_to_parse)
+ for mojom_abspath, ast in _Shard(_ParseAstHelper, map_args):
+ loaded_mojom_asts[mojom_abspath] = ast
+
+ logging.info('Processing dependencies')
+ for mojom_abspath, ast in sorted(loaded_mojom_asts.items()):
+ invalid_imports = []
+ for imp in ast.import_list:
+ import_abspath = _ResolveRelativeImportPath(imp.import_filename,
+ input_root_paths)
+ if allowed_imports and import_abspath not in allowed_imports:
+ invalid_imports.append(imp.import_filename)
+
+ abs_paths[imp.import_filename] = import_abspath
+ if import_abspath in mojom_files_to_parse:
+ # This import is in the input list, so we're going to translate it
+ # into a module below; however it's also a dependency of another input
+ # module. We retain record of dependencies to help with input
+ # processing later.
+ input_dependencies[mojom_abspath].add(
+ (import_abspath, imp.import_filename))
+ elif import_abspath not in loaded_modules:
+ # We have an import that isn't being parsed right now. It must already
+ # be parsed and have a module file sitting in a corresponding output
+ # location.
+ module_path = _GetModuleFilename(imp.import_filename)
+ module_abspath = _ResolveRelativeImportPath(
+ module_path, module_root_paths + [output_root_path])
+ with open(module_abspath, 'rb') as module_file:
+ loaded_modules[import_abspath] = module.Module.Load(module_file)
+
+ if invalid_imports:
+ raise ValueError(
+ '\nThe file %s imports the following files not allowed by build '
+ 'dependencies:\n\n%s\n' % (mojom_abspath, '\n'.join(invalid_imports)))
+ logging.info('Loaded %d modules from dependencies', len(loaded_modules))
+
+ # At this point all transitive imports not listed as inputs have been loaded
+ # and we have a complete dependency tree of the unprocessed inputs. Now we can
+ # load all the inputs, resolving dependencies among them recursively as we go.
+ logging.info('Ensuring inputs are loaded')
+ num_existing_modules_loaded = len(loaded_modules)
+ for mojom_abspath, mojom_path in mojom_files_to_parse.items():
+ _EnsureInputLoaded(mojom_abspath, mojom_path, abs_paths, loaded_mojom_asts,
+ input_dependencies, loaded_modules, module_metadata)
+ assert (num_existing_modules_loaded +
+ len(mojom_files_to_parse) == len(loaded_modules))
+
+ # Now we have fully translated modules for every input and every transitive
+ # dependency. We can dump the modules to disk for other tools to use.
+ logging.info('Serializing %d modules', len(mojom_files_to_parse))
+
+ # Windows does not use fork() for multiprocessing, so we'd need to pass
+ # loaded_module via IPC rather than via globals. Doing so is slower than not
+ # using multiprocessing.
+ _SerializeHelper.loaded_modules = loaded_modules
+ _SerializeHelper.output_root_path = output_root_path
+ # Doesn't seem to help past 4. Perhaps IO bound here?
+ processes = 4 if _MULTIPROCESSING_USES_FORK else 0
+ map_args = mojom_files_to_parse.items()
+ for _ in _Shard(_SerializeHelper, map_args, processes=processes):
+ pass
+
+
+def Run(command_line):
+ debug_logging = os.environ.get('MOJOM_PARSER_DEBUG', '0') != '0'
+ logging.basicConfig(level=logging.DEBUG if debug_logging else logging.WARNING,
+ format='%(levelname).1s %(relativeCreated)6d %(message)s')
+ logging.info('Started (%s)', os.path.basename(sys.argv[0]))
+
+ arg_parser = argparse.ArgumentParser(
+ description="""
+Parses one or more mojom files and produces corresponding module outputs fully
+describing the definitions therein. The output is exhaustive, stable, and
+sufficient for another tool to consume and emit e.g. usable language
+bindings based on the original mojoms.""",
+ epilog="""
+Note that each transitive import dependency reachable from the input mojoms must
+either also be listed as an input or must have its corresponding compiled module
+already present in the provided output root.""")
+
+ arg_parser.add_argument(
+ '--input-root',
+ default=[],
+ action='append',
+ metavar='ROOT',
+ dest='input_root_paths',
+ help='Adds ROOT to the set of root paths against which relative input '
+ 'paths should be resolved. Provided root paths are always searched '
+ 'in order from longest absolute path to shortest.')
+ arg_parser.add_argument(
+ '--output-root',
+ action='store',
+ required=True,
+ dest='output_root_path',
+ metavar='ROOT',
+ help='Use ROOT as the root path in which the parser should emit compiled '
+ 'modules for each processed input mojom. The path of emitted module is '
+ 'based on the relative input path, rebased onto this root. Note that '
+ 'ROOT is also searched for existing modules of any transitive imports '
+ 'which were not included in the set of inputs.')
+ arg_parser.add_argument(
+ '--module-root',
+ default=[],
+ action='append',
+ metavar='ROOT',
+ dest='module_root_paths',
+ help='Adds ROOT to the set of root paths to search for existing modules '
+ 'of non-transitive imports. Provided root paths are always searched in '
+ 'order from longest absolute path to shortest.')
+ arg_parser.add_argument(
+ '--mojoms',
+ nargs='+',
+ dest='mojom_files',
+ default=[],
+ metavar='MOJOM_FILE',
+ help='Input mojom filename(s). Each filename must be either an absolute '
+ 'path which falls within one of the given input or output roots, or a '
+ 'relative path the parser will attempt to resolve using each of those '
+ 'roots in unspecified order.')
+ arg_parser.add_argument(
+ '--mojom-file-list',
+ action='store',
+ metavar='LIST_FILENAME',
+ help='Input file whose contents are a list of mojoms to process. This '
+ 'may be provided in lieu of --mojoms to avoid hitting command line '
+ 'length limtations')
+ arg_parser.add_argument(
+ '--enable-feature',
+ dest='enabled_features',
+ default=[],
+ action='append',
+ metavar='FEATURE',
+ help='Enables a named feature when parsing the given mojoms. Features '
+ 'are identified by arbitrary string values. Specifying this flag with a '
+ 'given FEATURE name will cause the parser to process any syntax elements '
+ 'tagged with an [EnableIf=FEATURE] or [EnableIfNot] attribute. If this '
+ 'flag is not provided for a given FEATURE, such tagged elements are '
+ 'discarded by the parser and will not be present in the compiled output.')
+ arg_parser.add_argument(
+ '--check-imports',
+ dest='build_metadata_filename',
+ action='store',
+ metavar='METADATA_FILENAME',
+ help='Instructs the parser to check imports against a set of allowed '
+ 'imports. Allowed imports are based on build metadata within '
+ 'METADATA_FILENAME. This is a JSON file with a `sources` key listing '
+ 'paths to the set of input mojom files being processed by this parser '
+ 'run, and a `deps` key listing paths to metadata files for any '
+ 'dependencies of these inputs. This feature can be used to implement '
+ 'build-time dependency checking for mojom imports, where each build '
+ 'metadata file corresponds to a build target in the dependency graph of '
+ 'a typical build system.')
+ arg_parser.add_argument(
+ '--add-module-metadata',
+ dest='module_metadata',
+ default=[],
+ action='append',
+ metavar='KEY=VALUE',
+ help='Adds a metadata key-value pair to the output module. This can be '
+ 'used by build toolchains to augment parsed mojom modules with product-'
+ 'specific metadata for later extraction and use by custom bindings '
+ 'generators.')
+
+ args, _ = arg_parser.parse_known_args(command_line)
+ if args.mojom_file_list:
+ with open(args.mojom_file_list) as f:
+ args.mojom_files.extend(f.read().split())
+
+ if not args.mojom_files:
+ raise ValueError(
+ 'Must list at least one mojom file via --mojoms or --mojom-file-list')
+
+ mojom_files = list(map(os.path.abspath, args.mojom_files))
+ input_roots = list(map(os.path.abspath, args.input_root_paths))
+ output_root = os.path.abspath(args.output_root_path)
+ module_roots = list(map(os.path.abspath, args.module_root_paths))
+
+ if args.build_metadata_filename:
+ allowed_imports = _CollectAllowedImportsFromBuildMetadata(
+ args.build_metadata_filename)
+ else:
+ allowed_imports = None
+
+ module_metadata = list(
+ map(lambda kvp: tuple(kvp.split('=')), args.module_metadata))
+ _ParseMojoms(mojom_files, input_roots, output_root, module_roots,
+ args.enabled_features, module_metadata, allowed_imports)
+ logging.info('Finished')
+
+
+if __name__ == '__main__':
+ Run(sys.argv[1:])
+ # Exit without running GC, which can save multiple seconds due to the large
+ # number of object created. But flush is necessary as os._exit doesn't do
+ # that.
+ sys.stdout.flush()
+ sys.stderr.flush()
+ os._exit(0)
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser_test_case.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser_test_case.py
new file mode 100644
index 00000000..f0ee6966
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser_test_case.py
@@ -0,0 +1,73 @@
+# Copyright 2020 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+import os.path
+import shutil
+import tempfile
+import unittest
+
+import mojom_parser
+
+from mojom.generate import module
+
+
+class MojomParserTestCase(unittest.TestCase):
+ """Tests covering the behavior defined by the main mojom_parser.py script.
+ This includes behavior around input and output path manipulation, dependency
+ resolution, and module serialization and deserialization."""
+
+ def __init__(self, method_name):
+ super().__init__(method_name)
+ self._temp_dir = None
+
+ def setUp(self):
+ self._temp_dir = tempfile.mkdtemp()
+
+ def tearDown(self):
+ shutil.rmtree(self._temp_dir)
+ self._temp_dir = None
+
+ def GetPath(self, path):
+ assert not os.path.isabs(path)
+ return os.path.join(self._temp_dir, path)
+
+ def GetModulePath(self, path):
+ assert not os.path.isabs(path)
+ return os.path.join(self.GetPath('out'), path) + '-module'
+
+ def WriteFile(self, path, contents):
+ full_path = self.GetPath(path)
+ dirname = os.path.dirname(full_path)
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+ with open(full_path, 'w') as f:
+ f.write(contents)
+
+ def LoadModule(self, mojom_path):
+ with open(self.GetModulePath(mojom_path), 'rb') as f:
+ return module.Module.Load(f)
+
+ def ParseMojoms(self, mojoms, metadata=None):
+ """Parse all input mojoms relative the temp dir."""
+ out_dir = self.GetPath('out')
+ args = [
+ '--input-root', self._temp_dir, '--input-root', out_dir,
+ '--output-root', out_dir, '--mojoms'
+ ] + list(map(lambda mojom: os.path.join(self._temp_dir, mojom), mojoms))
+ if metadata:
+ args.extend(['--check-imports', self.GetPath(metadata)])
+ mojom_parser.Run(args)
+
+ def ExtractTypes(self, mojom):
+ filename = 'test.mojom'
+ self.WriteFile(filename, mojom)
+ self.ParseMojoms([filename])
+ m = self.LoadModule(filename)
+ definitions = {}
+ for kinds in (m.enums, m.structs, m.unions, m.interfaces, m.features):
+ for kind in kinds:
+ definitions[kind.mojom_name] = kind
+ return definitions
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser_unittest.py
new file mode 100644
index 00000000..353a2b6e
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser_unittest.py
@@ -0,0 +1,186 @@
+# Copyright 2020 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+
+from mojom_parser_test_case import MojomParserTestCase
+
+
+class MojomParserTest(MojomParserTestCase):
+ """Tests covering the behavior defined by the main mojom_parser.py script.
+ This includes behavior around input and output path manipulation, dependency
+ resolution, and module serialization and deserialization."""
+
+ def testBasicParse(self):
+ """Basic test to verify that we can parse a mojom file and get a module."""
+ mojom = 'foo/bar.mojom'
+ self.WriteFile(
+ mojom, """\
+ module test;
+ enum TestEnum { kFoo };
+ """)
+ self.ParseMojoms([mojom])
+
+ m = self.LoadModule(mojom)
+ self.assertEqual('foo/bar.mojom', m.path)
+ self.assertEqual('test', m.mojom_namespace)
+ self.assertEqual(1, len(m.enums))
+
+ def testBasicParseWithAbsolutePaths(self):
+ """Verifies that we can parse a mojom file given an absolute path input."""
+ mojom = 'foo/bar.mojom'
+ self.WriteFile(
+ mojom, """\
+ module test;
+ enum TestEnum { kFoo };
+ """)
+ self.ParseMojoms([self.GetPath(mojom)])
+
+ m = self.LoadModule(mojom)
+ self.assertEqual('foo/bar.mojom', m.path)
+ self.assertEqual('test', m.mojom_namespace)
+ self.assertEqual(1, len(m.enums))
+
+ def testImport(self):
+ """Verify imports within the same set of mojom inputs."""
+ a = 'a.mojom'
+ b = 'b.mojom'
+ self.WriteFile(
+ a, """\
+ module a;
+ import "b.mojom";
+ struct Foo { b.Bar bar; };""")
+ self.WriteFile(b, """\
+ module b;
+ struct Bar {};""")
+ self.ParseMojoms([a, b])
+
+ ma = self.LoadModule(a)
+ mb = self.LoadModule(b)
+ self.assertEqual('a.mojom', ma.path)
+ self.assertEqual('b.mojom', mb.path)
+ self.assertEqual(1, len(ma.imports))
+ self.assertEqual(mb, ma.imports[0])
+
+ def testPreProcessedImport(self):
+ """Verify imports processed by a previous parser execution can be loaded
+ properly when parsing a dependent mojom."""
+ a = 'a.mojom'
+ self.WriteFile(a, """\
+ module a;
+ struct Bar {};""")
+ self.ParseMojoms([a])
+
+ b = 'b.mojom'
+ self.WriteFile(
+ b, """\
+ module b;
+ import "a.mojom";
+ struct Foo { a.Bar bar; };""")
+ self.ParseMojoms([b])
+
+ def testMissingImport(self):
+ """Verify that an import fails if the imported mojom does not exist."""
+ a = 'a.mojom'
+ self.WriteFile(
+ a, """\
+ module a;
+ import "non-existent.mojom";
+ struct Bar {};""")
+ with self.assertRaisesRegexp(ValueError, "does not exist"):
+ self.ParseMojoms([a])
+
+ def testUnparsedImport(self):
+ """Verify that an import fails if the imported mojom is not in the set of
+ mojoms provided to the parser on this execution AND there is no pre-existing
+ parsed output module already on disk for it."""
+ a = 'a.mojom'
+ b = 'b.mojom'
+ self.WriteFile(a, """\
+ module a;
+ struct Bar {};""")
+ self.WriteFile(
+ b, """\
+ module b;
+ import "a.mojom";
+ struct Foo { a.Bar bar; };""")
+
+ # a.mojom has not been parsed yet, so its import will fail when processing
+ # b.mojom here.
+ with self.assertRaisesRegexp(ValueError, "does not exist"):
+ self.ParseMojoms([b])
+
+ def testCheckImportsBasic(self):
+ """Verify that the parser can handle --check-imports with a valid set of
+ inputs, including support for transitive dependency resolution."""
+ a = 'a.mojom'
+ a_metadata = 'out/a.build_metadata'
+ b = 'b.mojom'
+ b_metadata = 'out/b.build_metadata'
+ c = 'c.mojom'
+ c_metadata = 'out/c.build_metadata'
+ self.WriteFile(a_metadata,
+ json.dumps({
+ "sources": [self.GetPath(a)],
+ "deps": []
+ }))
+ self.WriteFile(
+ b_metadata,
+ json.dumps({
+ "sources": [self.GetPath(b)],
+ "deps": [self.GetPath(a_metadata)]
+ }))
+ self.WriteFile(
+ c_metadata,
+ json.dumps({
+ "sources": [self.GetPath(c)],
+ "deps": [self.GetPath(b_metadata)]
+ }))
+ self.WriteFile(a, """\
+ module a;
+ struct Bar {};""")
+ self.WriteFile(
+ b, """\
+ module b;
+ import "a.mojom";
+ struct Foo { a.Bar bar; };""")
+ self.WriteFile(
+ c, """\
+ module c;
+ import "a.mojom";
+ import "b.mojom";
+ struct Baz { b.Foo foo; };""")
+ self.ParseMojoms([a], metadata=a_metadata)
+ self.ParseMojoms([b], metadata=b_metadata)
+ self.ParseMojoms([c], metadata=c_metadata)
+
+ def testCheckImportsMissing(self):
+ """Verify that the parser rejects valid input mojoms when imports don't
+ agree with build metadata given via --check-imports."""
+ a = 'a.mojom'
+ a_metadata = 'out/a.build_metadata'
+ b = 'b.mojom'
+ b_metadata = 'out/b.build_metadata'
+ self.WriteFile(a_metadata,
+ json.dumps({
+ "sources": [self.GetPath(a)],
+ "deps": []
+ }))
+ self.WriteFile(b_metadata,
+ json.dumps({
+ "sources": [self.GetPath(b)],
+ "deps": []
+ }))
+ self.WriteFile(a, """\
+ module a;
+ struct Bar {};""")
+ self.WriteFile(
+ b, """\
+ module b;
+ import "a.mojom";
+ struct Foo { a.Bar bar; };""")
+
+ self.ParseMojoms([a], metadata=a_metadata)
+ with self.assertRaisesRegexp(ValueError, "not allowed by build"):
+ self.ParseMojoms([b], metadata=b_metadata)
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/stable_attribute_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/stable_attribute_unittest.py
new file mode 100644
index 00000000..d10d69c6
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/stable_attribute_unittest.py
@@ -0,0 +1,127 @@
+# Copyright 2020 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from mojom_parser_test_case import MojomParserTestCase
+
+from mojom.generate import module
+
+
+class StableAttributeTest(MojomParserTestCase):
+ """Tests covering usage of the [Stable] attribute."""
+
+ def testStableAttributeTagging(self):
+ """Verify that we recognize the [Stable] attribute on relevant definitions
+ and the resulting parser outputs are tagged accordingly."""
+ mojom = 'test.mojom'
+ self.WriteFile(
+ mojom, """\
+ [Stable] enum TestEnum { kFoo };
+ enum UnstableEnum { kBar };
+ [Stable] struct TestStruct { TestEnum a; };
+ struct UnstableStruct { UnstableEnum a; };
+ [Stable] union TestUnion { TestEnum a; TestStruct b; };
+ union UnstableUnion { UnstableEnum a; UnstableStruct b; };
+ [Stable] interface TestInterface { Foo@0(TestUnion x) => (); };
+ interface UnstableInterface { Foo(UnstableUnion x) => (); };
+ """)
+ self.ParseMojoms([mojom])
+
+ m = self.LoadModule(mojom)
+ self.assertEqual(2, len(m.enums))
+ self.assertTrue(m.enums[0].stable)
+ self.assertFalse(m.enums[1].stable)
+ self.assertEqual(2, len(m.structs))
+ self.assertTrue(m.structs[0].stable)
+ self.assertFalse(m.structs[1].stable)
+ self.assertEqual(2, len(m.unions))
+ self.assertTrue(m.unions[0].stable)
+ self.assertFalse(m.unions[1].stable)
+ self.assertEqual(2, len(m.interfaces))
+ self.assertTrue(m.interfaces[0].stable)
+ self.assertFalse(m.interfaces[1].stable)
+
+ def testStableStruct(self):
+ """A [Stable] struct is valid if all its fields are also stable."""
+ self.ExtractTypes('[Stable] struct S {};')
+ self.ExtractTypes('[Stable] struct S { int32 x; bool b; };')
+ self.ExtractTypes('[Stable] enum E { A }; [Stable] struct S { E e; };')
+ self.ExtractTypes('[Stable] struct S {}; [Stable] struct T { S s; };')
+ self.ExtractTypes(
+ '[Stable] struct S {}; [Stable] struct T { array<S> ss; };')
+ self.ExtractTypes(
+ '[Stable] interface F {}; [Stable] struct T { pending_remote<F> f; };')
+
+ with self.assertRaisesRegexp(Exception, 'because it depends on E'):
+ self.ExtractTypes('enum E { A }; [Stable] struct S { E e; };')
+ with self.assertRaisesRegexp(Exception, 'because it depends on X'):
+ self.ExtractTypes('struct X {}; [Stable] struct S { X x; };')
+ with self.assertRaisesRegexp(Exception, 'because it depends on T'):
+ self.ExtractTypes('struct T {}; [Stable] struct S { array<T> xs; };')
+ with self.assertRaisesRegexp(Exception, 'because it depends on T'):
+ self.ExtractTypes('struct T {}; [Stable] struct S { map<int32, T> xs; };')
+ with self.assertRaisesRegexp(Exception, 'because it depends on T'):
+ self.ExtractTypes('struct T {}; [Stable] struct S { map<T, int32> xs; };')
+ with self.assertRaisesRegexp(Exception, 'because it depends on F'):
+ self.ExtractTypes(
+ 'interface F {}; [Stable] struct S { pending_remote<F> f; };')
+ with self.assertRaisesRegexp(Exception, 'because it depends on F'):
+ self.ExtractTypes(
+ 'interface F {}; [Stable] struct S { pending_receiver<F> f; };')
+
+ def testStableUnion(self):
+ """A [Stable] union is valid if all its fields' types are also stable."""
+ self.ExtractTypes('[Stable] union U {};')
+ self.ExtractTypes('[Stable] union U { int32 x; bool b; };')
+ self.ExtractTypes('[Stable] enum E { A }; [Stable] union U { E e; };')
+ self.ExtractTypes('[Stable] struct S {}; [Stable] union U { S s; };')
+ self.ExtractTypes(
+ '[Stable] struct S {}; [Stable] union U { array<S> ss; };')
+ self.ExtractTypes(
+ '[Stable] interface F {}; [Stable] union U { pending_remote<F> f; };')
+
+ with self.assertRaisesRegexp(Exception, 'because it depends on E'):
+ self.ExtractTypes('enum E { A }; [Stable] union U { E e; };')
+ with self.assertRaisesRegexp(Exception, 'because it depends on X'):
+ self.ExtractTypes('struct X {}; [Stable] union U { X x; };')
+ with self.assertRaisesRegexp(Exception, 'because it depends on T'):
+ self.ExtractTypes('struct T {}; [Stable] union U { array<T> xs; };')
+ with self.assertRaisesRegexp(Exception, 'because it depends on T'):
+ self.ExtractTypes('struct T {}; [Stable] union U { map<int32, T> xs; };')
+ with self.assertRaisesRegexp(Exception, 'because it depends on T'):
+ self.ExtractTypes('struct T {}; [Stable] union U { map<T, int32> xs; };')
+ with self.assertRaisesRegexp(Exception, 'because it depends on F'):
+ self.ExtractTypes(
+ 'interface F {}; [Stable] union U { pending_remote<F> f; };')
+ with self.assertRaisesRegexp(Exception, 'because it depends on F'):
+ self.ExtractTypes(
+ 'interface F {}; [Stable] union U { pending_receiver<F> f; };')
+
+ def testStableInterface(self):
+ """A [Stable] interface is valid if all its methods' parameter types are
+ stable, including response parameters where applicable."""
+ self.ExtractTypes('[Stable] interface F {};')
+ self.ExtractTypes('[Stable] interface F { A@0(int32 x); };')
+ self.ExtractTypes('[Stable] interface F { A@0(int32 x) => (bool b); };')
+ self.ExtractTypes("""\
+ [Stable] enum E { A, B, C };
+ [Stable] struct S {};
+ [Stable] interface F { A@0(E e, S s) => (bool b, array<S> s); };
+ """)
+
+ with self.assertRaisesRegexp(Exception, 'because it depends on E'):
+ self.ExtractTypes(
+ 'enum E { A, B, C }; [Stable] interface F { A@0(E e); };')
+ with self.assertRaisesRegexp(Exception, 'because it depends on E'):
+ self.ExtractTypes(
+ 'enum E { A, B, C }; [Stable] interface F { A@0(int32 x) => (E e); };'
+ )
+ with self.assertRaisesRegexp(Exception, 'because it depends on S'):
+ self.ExtractTypes(
+ 'struct S {}; [Stable] interface F { A@0(int32 x) => (S s); };')
+ with self.assertRaisesRegexp(Exception, 'because it depends on S'):
+ self.ExtractTypes(
+ 'struct S {}; [Stable] interface F { A@0(S s) => (bool b); };')
+
+ with self.assertRaisesRegexp(Exception, 'explicit method ordinals'):
+ self.ExtractTypes('[Stable] interface F { A() => (); };')
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/union_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/union_unittest.py
new file mode 100644
index 00000000..6b2525e5
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/union_unittest.py
@@ -0,0 +1,44 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from mojom_parser_test_case import MojomParserTestCase
+
+
+class UnionTest(MojomParserTestCase):
+ """Tests union parsing behavior."""
+
+ def testExtensibleMustHaveDefault(self):
+ """Verifies that extensible unions must have a default field."""
+ mojom = 'foo.mojom'
+ self.WriteFile(mojom, 'module foo; [Extensible] union U { bool x; };')
+ with self.assertRaisesRegexp(Exception, 'must specify a \[Default\]'):
+ self.ParseMojoms([mojom])
+
+ def testExtensibleSingleDefault(self):
+ """Verifies that extensible unions must not have multiple default fields."""
+ mojom = 'foo.mojom'
+ self.WriteFile(
+ mojom, """\
+ module foo;
+ [Extensible] union U {
+ [Default] bool x;
+ [Default] bool y;
+ };
+ """)
+ with self.assertRaisesRegexp(Exception, 'Multiple \[Default\] fields'):
+ self.ParseMojoms([mojom])
+
+ def testExtensibleDefaultTypeValid(self):
+ """Verifies that an extensible union's default field must be nullable or
+ integral type."""
+ mojom = 'foo.mojom'
+ self.WriteFile(
+ mojom, """\
+ module foo;
+ [Extensible] union U {
+ [Default] handle<message_pipe> p;
+ };
+ """)
+ with self.assertRaisesRegexp(Exception, 'must be nullable or integral'):
+ self.ParseMojoms([mojom])
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/version_compatibility_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/version_compatibility_unittest.py
new file mode 100644
index 00000000..45e45ec5
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/version_compatibility_unittest.py
@@ -0,0 +1,458 @@
+# Copyright 2020 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from mojom.generate import module
+from mojom_parser_test_case import MojomParserTestCase
+
+
+class VersionCompatibilityTest(MojomParserTestCase):
+ """Tests covering compatibility between two versions of the same mojom type
+ definition. This coverage ensures that we can reliably detect unsafe changes
+ to definitions that are expected to tolerate version skew in production
+ environments."""
+
+ def _GetTypeCompatibilityMap(self, old_mojom, new_mojom):
+ """Helper to support the implementation of assertBackwardCompatible and
+ assertNotBackwardCompatible."""
+
+ old = self.ExtractTypes(old_mojom)
+ new = self.ExtractTypes(new_mojom)
+ self.assertEqual(set(old.keys()), set(new.keys()),
+ 'Old and new test mojoms should use the same type names.')
+
+ checker = module.BackwardCompatibilityChecker()
+ compatibility_map = {}
+ for name in old:
+ try:
+ compatibility_map[name] = checker.IsBackwardCompatible(
+ new[name], old[name])
+ except Exception:
+ compatibility_map[name] = False
+ return compatibility_map
+
+ def assertBackwardCompatible(self, old_mojom, new_mojom):
+ compatibility_map = self._GetTypeCompatibilityMap(old_mojom, new_mojom)
+ for name, compatible in compatibility_map.items():
+ if not compatible:
+ raise AssertionError(
+ 'Given the old mojom:\n\n %s\n\nand the new mojom:\n\n %s\n\n'
+ 'The new definition of %s should pass a backward-compatibiity '
+ 'check, but it does not.' % (old_mojom, new_mojom, name))
+
+ def assertNotBackwardCompatible(self, old_mojom, new_mojom):
+ compatibility_map = self._GetTypeCompatibilityMap(old_mojom, new_mojom)
+ if all(compatibility_map.values()):
+ raise AssertionError(
+ 'Given the old mojom:\n\n %s\n\nand the new mojom:\n\n %s\n\n'
+ 'The new mojom should fail a backward-compatibility check, but it '
+ 'does not.' % (old_mojom, new_mojom))
+
+ def testNewNonExtensibleEnumValue(self):
+ """Adding a value to a non-extensible enum breaks backward-compatibility."""
+ self.assertNotBackwardCompatible('enum E { kFoo, kBar };',
+ 'enum E { kFoo, kBar, kBaz };')
+
+ def testNewNonExtensibleEnumValueWithMinVersion(self):
+ """Adding a value to a non-extensible enum breaks backward-compatibility,
+ even with a new [MinVersion] specified for the value."""
+ self.assertNotBackwardCompatible(
+ 'enum E { kFoo, kBar };', 'enum E { kFoo, kBar, [MinVersion=1] kBaz };')
+
+ def testNewValueInExistingVersion(self):
+ """Adding a value to an existing version is not allowed, even if the old
+ enum was marked [Extensible]. Note that it is irrelevant whether or not the
+ new enum is marked [Extensible]."""
+ self.assertNotBackwardCompatible(
+ '[Extensible] enum E { [Default] kFoo, kBar };',
+ 'enum E { kFoo, kBar, kBaz };')
+ self.assertNotBackwardCompatible(
+ '[Extensible] enum E { [Default] kFoo, kBar };',
+ '[Extensible] enum E { [Default] kFoo, kBar, kBaz };')
+ self.assertNotBackwardCompatible(
+ '[Extensible] enum E { [Default] kFoo, [MinVersion=1] kBar };',
+ 'enum E { kFoo, [MinVersion=1] kBar, [MinVersion=1] kBaz };')
+
+ def testEnumValueRemoval(self):
+ """Removal of an enum value is never valid even for [Extensible] enums."""
+ self.assertNotBackwardCompatible('enum E { kFoo, kBar };',
+ 'enum E { kFoo };')
+ self.assertNotBackwardCompatible(
+ '[Extensible] enum E { [Default] kFoo, kBar };',
+ '[Extensible] enum E { [Default] kFoo };')
+ self.assertNotBackwardCompatible(
+ '[Extensible] enum E { [Default] kA, [MinVersion=1] kB };',
+ '[Extensible] enum E { [Default] kA, };')
+ self.assertNotBackwardCompatible(
+ """[Extensible] enum E {
+ [Default] kA,
+ [MinVersion=1] kB,
+ [MinVersion=1] kZ };""",
+ '[Extensible] enum E { [Default] kA, [MinVersion=1] kB };')
+
+ def testNewExtensibleEnumValueWithMinVersion(self):
+ """Adding a new and properly [MinVersion]'d value to an [Extensible] enum
+ is a backward-compatible change. Note that it is irrelevant whether or not
+ the new enum is marked [Extensible]."""
+ self.assertBackwardCompatible('[Extensible] enum E { [Default] kA, kB };',
+ 'enum E { kA, kB, [MinVersion=1] kC };')
+ self.assertBackwardCompatible(
+ '[Extensible] enum E { [Default] kA, kB };',
+ '[Extensible] enum E { [Default] kA, kB, [MinVersion=1] kC };')
+ self.assertBackwardCompatible(
+ '[Extensible] enum E { [Default] kA, [MinVersion=1] kB };',
+ """[Extensible] enum E {
+ [Default] kA,
+ [MinVersion=1] kB,
+ [MinVersion=2] kC };""")
+
+ def testRenameEnumValue(self):
+ """Renaming an enum value does not affect backward-compatibility. Only
+ numeric value is relevant."""
+ self.assertBackwardCompatible('enum E { kA, kB };', 'enum E { kX, kY };')
+
+ def testAddEnumValueAlias(self):
+ """Adding new enum fields does not affect backward-compatibility if it does
+ not introduce any new numeric values."""
+ self.assertBackwardCompatible(
+ 'enum E { kA, kB };', 'enum E { kA, kB, kC = kA, kD = 1, kE = kD };')
+
+ def testEnumIdentity(self):
+ """An unchanged enum is obviously backward-compatible."""
+ self.assertBackwardCompatible('enum E { kA, kB, kC };',
+ 'enum E { kA, kB, kC };')
+
+ def testNewStructFieldUnversioned(self):
+ """Adding a new field to a struct without a new (i.e. higher than any
+ existing version) [MinVersion] tag breaks backward-compatibility."""
+ self.assertNotBackwardCompatible('struct S { string a; };',
+ 'struct S { string a; string b; };')
+
+ def testStructFieldRemoval(self):
+ """Removing a field from a struct breaks backward-compatibility."""
+ self.assertNotBackwardCompatible('struct S { string a; string b; };',
+ 'struct S { string a; };')
+
+ def testStructFieldTypeChange(self):
+ """Changing the type of an existing field always breaks
+ backward-compatibility."""
+ self.assertNotBackwardCompatible('struct S { string a; };',
+ 'struct S { array<int32> a; };')
+
+ def testStructFieldBecomingOptional(self):
+ """Changing a field from non-optional to optional breaks
+ backward-compatibility."""
+ self.assertNotBackwardCompatible('struct S { string a; };',
+ 'struct S { string? a; };')
+
+ def testStructFieldBecomingNonOptional(self):
+ """Changing a field from optional to non-optional breaks
+ backward-compatibility."""
+ self.assertNotBackwardCompatible('struct S { string? a; };',
+ 'struct S { string a; };')
+
+ def testStructFieldOrderChange(self):
+ """Changing the order of fields breaks backward-compatibility."""
+ self.assertNotBackwardCompatible('struct S { string a; bool b; };',
+ 'struct S { bool b; string a; };')
+ self.assertNotBackwardCompatible('struct S { string a@0; bool b@1; };',
+ 'struct S { string a@1; bool b@0; };')
+
+ def testStructFieldMinVersionChange(self):
+ """Changing the MinVersion of a field breaks backward-compatibility."""
+ self.assertNotBackwardCompatible(
+ 'struct S { string a; [MinVersion=1] string? b; };',
+ 'struct S { string a; [MinVersion=2] string? b; };')
+
+ def testStructFieldTypeChange(self):
+ """If a struct field's own type definition changes, the containing struct
+ is backward-compatible if and only if the field type's change is
+ backward-compatible."""
+ self.assertBackwardCompatible(
+ 'struct S {}; struct T { S s; };',
+ 'struct S { [MinVersion=1] int32 x; }; struct T { S s; };')
+ self.assertBackwardCompatible(
+ '[Extensible] enum E { [Default] kA }; struct S { E e; };',
+ """[Extensible] enum E {
+ [Default] kA,
+ [MinVersion=1] kB };
+ struct S { E e; };""")
+ self.assertNotBackwardCompatible(
+ 'struct S {}; struct T { S s; };',
+ 'struct S { int32 x; }; struct T { S s; };')
+ self.assertNotBackwardCompatible(
+ '[Extensible] enum E { [Default] kA }; struct S { E e; };',
+ '[Extensible] enum E { [Default] kA, kB }; struct S { E e; };')
+
+ def testNewStructFieldWithInvalidMinVersion(self):
+ """Adding a new field using an existing MinVersion breaks backward-
+ compatibility."""
+ self.assertNotBackwardCompatible(
+ """\
+ struct S {
+ string a;
+ [MinVersion=1] string? b;
+ };
+ """, """\
+ struct S {
+ string a;
+ [MinVersion=1] string? b;
+ [MinVersion=1] string? c;
+ };""")
+
+ def testNewStructFieldWithValidMinVersion(self):
+ """Adding a new field is safe if tagged with a MinVersion greater than any
+ previously used MinVersion in the struct."""
+ self.assertBackwardCompatible(
+ 'struct S { int32 a; };',
+ 'struct S { int32 a; [MinVersion=1] int32 b; };')
+ self.assertBackwardCompatible(
+ 'struct S { int32 a; [MinVersion=1] int32 b; };',
+ 'struct S { int32 a; [MinVersion=1] int32 b; [MinVersion=2] bool c; };')
+
+ def testNewStructFieldNullableReference(self):
+ """Adding a new nullable reference-typed field is fine if versioned
+ properly."""
+ self.assertBackwardCompatible(
+ 'struct S { int32 a; };',
+ 'struct S { int32 a; [MinVersion=1] string? b; };')
+
+ def testStructFieldRename(self):
+ """Renaming a field has no effect on backward-compatibility."""
+ self.assertBackwardCompatible('struct S { int32 x; bool b; };',
+ 'struct S { int32 a; bool b; };')
+
+ def testStructFieldReorderWithExplicitOrdinals(self):
+ """Reordering fields has no effect on backward-compatibility when field
+ ordinals are explicitly labeled and remain unchanged."""
+ self.assertBackwardCompatible('struct S { bool b@1; int32 a@0; };',
+ 'struct S { int32 a@0; bool b@1; };')
+
+ def testNewUnionFieldUnversioned(self):
+ """Adding a new field to a union without a new (i.e. higher than any
+ existing version) [MinVersion] tag breaks backward-compatibility."""
+ self.assertNotBackwardCompatible('union U { string a; };',
+ 'union U { string a; string b; };')
+
+ def testUnionFieldRemoval(self):
+ """Removing a field from a union breaks backward-compatibility."""
+ self.assertNotBackwardCompatible('union U { string a; string b; };',
+ 'union U { string a; };')
+
+ def testUnionFieldTypeChange(self):
+ """Changing the type of an existing field always breaks
+ backward-compatibility."""
+ self.assertNotBackwardCompatible('union U { string a; };',
+ 'union U { array<int32> a; };')
+
+ def testUnionFieldBecomingOptional(self):
+ """Changing a field from non-optional to optional breaks
+ backward-compatibility."""
+ self.assertNotBackwardCompatible('union U { string a; };',
+ 'union U { string? a; };')
+
+ def testFieldNestedTypeChanged(self):
+ """Changing the definition of a nested type within a field (such as an array
+ element or interface endpoint type) should only break backward-compatibility
+ if the changes to that type are not backward-compatible."""
+ self.assertBackwardCompatible(
+ """\
+ struct S { string a; };
+ struct T { array<S> ss; };
+ """, """\
+ struct S {
+ string a;
+ [MinVersion=1] string? b;
+ };
+ struct T { array<S> ss; };
+ """)
+ self.assertBackwardCompatible(
+ """\
+ interface F { Do(); };
+ struct S { pending_receiver<F> r; };
+ """, """\
+ interface F {
+ Do();
+ [MinVersion=1] Say();
+ };
+ struct S { pending_receiver<F> r; };
+ """)
+
+ def testRecursiveTypeChange(self):
+ """Recursive types do not break the compatibility checker."""
+ self.assertBackwardCompatible(
+ """\
+ struct S {
+ string a;
+ array<S> others;
+ };""", """\
+ struct S {
+ string a;
+ array<S> others;
+ [MinVersion=1] string? b;
+ };""")
+
+ def testUnionFieldBecomingNonOptional(self):
+ """Changing a field from optional to non-optional breaks
+ backward-compatibility."""
+ self.assertNotBackwardCompatible('union U { string? a; };',
+ 'union U { string a; };')
+
+ def testUnionFieldOrderChange(self):
+ """Changing the order of fields breaks backward-compatibility."""
+ self.assertNotBackwardCompatible('union U { string a; bool b; };',
+ 'union U { bool b; string a; };')
+ self.assertNotBackwardCompatible('union U { string a@0; bool b@1; };',
+ 'union U { string a@1; bool b@0; };')
+
+ def testUnionFieldMinVersionChange(self):
+ """Changing the MinVersion of a field breaks backward-compatibility."""
+ self.assertNotBackwardCompatible(
+ 'union U { string a; [MinVersion=1] string b; };',
+ 'union U { string a; [MinVersion=2] string b; };')
+
+ def testUnionFieldTypeChange(self):
+ """If a union field's own type definition changes, the containing union
+ is backward-compatible if and only if the field type's change is
+ backward-compatible."""
+ self.assertBackwardCompatible(
+ 'struct S {}; union U { S s; };',
+ 'struct S { [MinVersion=1] int32 x; }; union U { S s; };')
+ self.assertBackwardCompatible(
+ '[Extensible] enum E { [Default] kA }; union U { E e; };',
+ """[Extensible] enum E {
+ [Default] kA,
+ [MinVersion=1] kB };
+ union U { E e; };""")
+ self.assertNotBackwardCompatible(
+ 'struct S {}; union U { S s; };',
+ 'struct S { int32 x; }; union U { S s; };')
+ self.assertNotBackwardCompatible(
+ '[Extensible] enum E { [Default] kA }; union U { E e; };',
+ '[Extensible] enum E { [Default] kA, kB }; union U { E e; };')
+
+ def testNewUnionFieldWithInvalidMinVersion(self):
+ """Adding a new field using an existing MinVersion breaks backward-
+ compatibility."""
+ self.assertNotBackwardCompatible(
+ """\
+ union U {
+ string a;
+ [MinVersion=1] string b;
+ };
+ """, """\
+ union U {
+ string a;
+ [MinVersion=1] string b;
+ [MinVersion=1] string c;
+ };""")
+
+ def testNewUnionFieldWithValidMinVersion(self):
+ """Adding a new field is safe if tagged with a MinVersion greater than any
+ previously used MinVersion in the union."""
+ self.assertBackwardCompatible(
+ 'union U { int32 a; };',
+ 'union U { int32 a; [MinVersion=1] int32 b; };')
+ self.assertBackwardCompatible(
+ 'union U { int32 a; [MinVersion=1] int32 b; };',
+ 'union U { int32 a; [MinVersion=1] int32 b; [MinVersion=2] bool c; };')
+
+ def testUnionFieldRename(self):
+ """Renaming a field has no effect on backward-compatibility."""
+ self.assertBackwardCompatible('union U { int32 x; bool b; };',
+ 'union U { int32 a; bool b; };')
+
+ def testUnionFieldReorderWithExplicitOrdinals(self):
+ """Reordering fields has no effect on backward-compatibility when field
+ ordinals are explicitly labeled and remain unchanged."""
+ self.assertBackwardCompatible('union U { bool b@1; int32 a@0; };',
+ 'union U { int32 a@0; bool b@1; };')
+
+ def testNewInterfaceMethodUnversioned(self):
+ """Adding a new method to an interface without a new (i.e. higher than any
+ existing version) [MinVersion] tag breaks backward-compatibility."""
+ self.assertNotBackwardCompatible('interface F { A(); };',
+ 'interface F { A(); B(); };')
+
+ def testInterfaceMethodRemoval(self):
+ """Removing a method from an interface breaks backward-compatibility."""
+ self.assertNotBackwardCompatible('interface F { A(); B(); };',
+ 'interface F { A(); };')
+
+ def testInterfaceMethodParamsChanged(self):
+ """Changes to the parameter list are only backward-compatible if they meet
+ backward-compatibility requirements of an equivalent struct definition."""
+ self.assertNotBackwardCompatible('interface F { A(); };',
+ 'interface F { A(int32 x); };')
+ self.assertNotBackwardCompatible('interface F { A(int32 x); };',
+ 'interface F { A(bool x); };')
+ self.assertNotBackwardCompatible(
+ 'interface F { A(int32 x, [MinVersion=1] string? s); };', """\
+ interface F {
+ A(int32 x, [MinVersion=1] string? s, [MinVersion=1] int32 y);
+ };""")
+
+ self.assertBackwardCompatible('interface F { A(int32 x); };',
+ 'interface F { A(int32 a); };')
+ self.assertBackwardCompatible(
+ 'interface F { A(int32 x); };',
+ 'interface F { A(int32 x, [MinVersion=1] string? s); };')
+
+ self.assertBackwardCompatible(
+ 'struct S {}; interface F { A(S s); };',
+ 'struct S { [MinVersion=1] int32 x; }; interface F { A(S s); };')
+ self.assertBackwardCompatible(
+ 'struct S {}; struct T {}; interface F { A(S s); };',
+ 'struct S {}; struct T {}; interface F { A(T s); };')
+ self.assertNotBackwardCompatible(
+ 'struct S {}; struct T { int32 x; }; interface F { A(S s); };',
+ 'struct S {}; struct T { int32 x; }; interface F { A(T t); };')
+
+ def testInterfaceMethodReplyAdded(self):
+ """Adding a reply to a message breaks backward-compatibilty."""
+ self.assertNotBackwardCompatible('interface F { A(); };',
+ 'interface F { A() => (); };')
+
+ def testInterfaceMethodReplyRemoved(self):
+ """Removing a reply from a message breaks backward-compatibility."""
+ self.assertNotBackwardCompatible('interface F { A() => (); };',
+ 'interface F { A(); };')
+
+ def testInterfaceMethodReplyParamsChanged(self):
+ """Similar to request parameters, a change to reply parameters is considered
+ backward-compatible if it meets the same backward-compatibility
+ requirements imposed on equivalent struct changes."""
+ self.assertNotBackwardCompatible('interface F { A() => (); };',
+ 'interface F { A() => (int32 x); };')
+ self.assertNotBackwardCompatible('interface F { A() => (int32 x); };',
+ 'interface F { A() => (); };')
+ self.assertNotBackwardCompatible('interface F { A() => (bool x); };',
+ 'interface F { A() => (int32 x); };')
+
+ self.assertBackwardCompatible('interface F { A() => (int32 a); };',
+ 'interface F { A() => (int32 x); };')
+ self.assertBackwardCompatible(
+ 'interface F { A() => (int32 x); };',
+ 'interface F { A() => (int32 x, [MinVersion] string? s); };')
+
+ def testNewInterfaceMethodWithInvalidMinVersion(self):
+ """Adding a new method to an existing version is not backward-compatible."""
+ self.assertNotBackwardCompatible(
+ """\
+ interface F {
+ A();
+ [MinVersion=1] B();
+ };
+ """, """\
+ interface F {
+ A();
+ [MinVersion=1] B();
+ [MinVersion=1] C();
+ };
+ """)
+
+ def testNewInterfaceMethodWithValidMinVersion(self):
+ """Adding a new method is fine as long as its MinVersion exceeds that of any
+ method on the old interface definition."""
+ self.assertBackwardCompatible('interface F { A(); };',
+ 'interface F { A(); [MinVersion=1] B(); };')
diff --git a/utils/codegen/ipc/mojo/public/tools/run_all_python_unittests.py b/utils/codegen/ipc/mojo/public/tools/run_all_python_unittests.py
new file mode 100755
index 00000000..98bce18c
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/run_all_python_unittests.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os.path
+import sys
+
+_TOOLS_DIR = os.path.dirname(__file__)
+_MOJOM_DIR = os.path.join(_TOOLS_DIR, 'mojom')
+_BINDINGS_DIR = os.path.join(_TOOLS_DIR, 'bindings')
+_SRC_DIR = os.path.join(_TOOLS_DIR, os.path.pardir, os.path.pardir,
+ os.path.pardir)
+
+# Ensure that the mojom library is discoverable.
+sys.path.append(_MOJOM_DIR)
+sys.path.append(_BINDINGS_DIR)
+
+# Help Python find typ in //third_party/catapult/third_party/typ/
+sys.path.append(
+ os.path.join(_SRC_DIR, 'third_party', 'catapult', 'third_party', 'typ'))
+import typ
+
+
+def Main():
+ return typ.main(top_level_dirs=[_MOJOM_DIR, _BINDINGS_DIR])
+
+
+if __name__ == '__main__':
+ sys.exit(Main())
diff --git a/utils/codegen/ipc/parser.py b/utils/codegen/ipc/parser.py
new file mode 100755
index 00000000..8e70322d
--- /dev/null
+++ b/utils/codegen/ipc/parser.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: BSD-3-Clause
+# Copyright (C) 2020, Google Inc.
+#
+# Author: Paul Elder <paul.elder@ideasonboard.com>
+#
+# Run mojo parser with python3
+
+import os
+import sys
+
+# Make sure that mojom_parser.py can import mojom
+sys.path.insert(0, f'{os.path.dirname(__file__)}/mojo/public/tools/mojom')
+
+import mojo.public.tools.mojom.mojom_parser as parser
+
+parser.Run(sys.argv[1:])
diff --git a/utils/codegen/ipc/tools/README b/utils/codegen/ipc/tools/README
new file mode 100644
index 00000000..961cabd2
--- /dev/null
+++ b/utils/codegen/ipc/tools/README
@@ -0,0 +1,4 @@
+# SPDX-License-Identifier: CC0-1.0
+
+Files in this directory are imported from 9be4263648d7 of Chromium. Do not
+modify them manually.
diff --git a/utils/codegen/ipc/tools/diagnosis/crbug_1001171.py b/utils/codegen/ipc/tools/diagnosis/crbug_1001171.py
new file mode 100644
index 00000000..40900d10
--- /dev/null
+++ b/utils/codegen/ipc/tools/diagnosis/crbug_1001171.py
@@ -0,0 +1,51 @@
+# Copyright 2019 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper context wrapper for diagnosing crbug.com/1001171.
+
+This module and all uses thereof can and should be removed once
+crbug.com/1001171 has been resolved.
+"""
+
+from __future__ import print_function
+
+import contextlib
+import os
+import sys
+
+
+@contextlib.contextmanager
+def DumpStateOnLookupError():
+ """Prints potentially useful state info in the event of a LookupError."""
+ try:
+ yield
+ except LookupError:
+ print('LookupError diagnosis for crbug.com/1001171:')
+ for path_index, path_entry in enumerate(sys.path):
+ desc = 'unknown'
+ if not os.path.exists(path_entry):
+ desc = 'missing'
+ elif os.path.islink(path_entry):
+ desc = 'link -> %s' % os.path.realpath(path_entry)
+ elif os.path.isfile(path_entry):
+ desc = 'file'
+ elif os.path.isdir(path_entry):
+ desc = 'dir'
+ print(' sys.path[%d]: %s (%s)' % (path_index, path_entry, desc))
+
+ real_path_entry = os.path.realpath(path_entry)
+ if (path_entry.endswith(os.path.join('lib', 'python2.7'))
+ and os.path.isdir(real_path_entry)):
+ encodings_dir = os.path.realpath(
+ os.path.join(real_path_entry, 'encodings'))
+ if os.path.exists(encodings_dir):
+ if os.path.isdir(encodings_dir):
+ print(' %s contents: %s' % (encodings_dir,
+ str(os.listdir(encodings_dir))))
+ else:
+ print(' %s exists but is not a directory' % encodings_dir)
+ else:
+ print(' %s missing' % encodings_dir)
+
+ raise
diff --git a/utils/codegen/meson.build b/utils/codegen/meson.build
new file mode 100644
index 00000000..904dd66d
--- /dev/null
+++ b/utils/codegen/meson.build
@@ -0,0 +1,19 @@
+# SPDX-License-Identifier: CC0-1.0
+
+## Code generation
+
+py_build_env = environment()
+# \todo Investigate usage of PYTHONPYCACHEPREFIX for Python >= 3.8
+py_build_env.set('PYTHONDONTWRITEBYTECODE', '1')
+py_build_env.prepend('PYTHONPATH', meson.current_source_dir())
+
+py_modules += ['jinja2', 'yaml']
+
+gen_controls = files('gen-controls.py')
+gen_formats = files('gen-formats.py')
+gen_gst_controls = files('gen-gst-controls.py')
+gen_header = files('gen-header.sh')
+gen_ipa_pub_key = files('gen-ipa-pub-key.py')
+gen_tracepoints = files('gen-tp-header.py')
+
+subdir('ipc')
diff --git a/utils/gen-debug-controls.py b/utils/gen-debug-controls.py
new file mode 100755
index 00000000..272597f4
--- /dev/null
+++ b/utils/gen-debug-controls.py
@@ -0,0 +1,163 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright (C) 2024, Google Inc.
+#
+# Author: Stefan Klug <stefan.klug@ideasonboard.com>
+#
+# This script looks for occurrences of the debug metadata controls in the source
+# tree and updates src/libcamera/control_ids_debug.yaml accordingly. It is meant
+# to be used during development to ease updating of the yaml file while
+# debugging.
+
+import argparse
+import logging
+import os
+import re
+import sys
+from dataclasses import dataclass
+from pathlib import Path
+
+logger = logging.getLogger(__name__)
+logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s')
+
+try:
+ import ruamel.yaml as ruyaml
+except:
+ logger.error(
+ f'Failed to import ruamel.yaml. Please install the ruamel.yaml package.')
+ sys.exit(1)
+
+@dataclass
+class FoundMatch:
+ file: os.PathLike
+ whole_match: str
+ line: int
+ type: str
+ name: str
+ size: str = None
+
+
+def get_control_name(control):
+ k = list(control.keys())
+ if len(k) != 1:
+ raise Exception(f"Can't handle control entry with {len(k)} keys")
+ return k[0]
+
+
+def find_debug_controls(dir):
+ extensions = ['.cpp', '.h']
+ files = [p for p in dir.rglob('*') if p.suffix in extensions]
+
+ # The following regex was tested on
+ # set<Span<type>>( controls::debug::something , static_cast<type>(var) )
+ # set<>( controls::debug::something , static_cast<type>(var) )
+ # set( controls::debug::something , static_cast<type> (var) )
+ exp = re.compile(r'set' # set function
+ r'(?:\<((?:[^)(])*)\>)?' # followed by a optional template param
+ r'\(\s*controls::debug::(\w+)\s*,' # referencing a debug control
+ )
+ matches = []
+ for p in files:
+ with p.open('r') as f:
+ for idx, line in enumerate(f):
+ match = exp.search(line)
+ if match:
+ m = FoundMatch(file=p, line=idx, type=match.group(1),
+ name=match.group(2), whole_match=match.group(0))
+ if m.type is not None and m.type.startswith('Span'):
+ # Simple span type detection treating the last word
+ # inside <> as type.
+ r = re.match(r'Span<(?:.*\s+)(.*)>', m.type)
+ m.type = r.group(1)
+ m.size = '[n]'
+ matches.append(m)
+ return matches
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(
+ description='Automatically updates control_ids_debug.yaml')
+ parser.parse_args(argv[1:])
+
+ yaml = ruyaml.YAML()
+ root_dir = Path(__file__).resolve().parent.parent
+ ctrl_file = root_dir.joinpath('src/libcamera/control_ids_debug.yaml')
+
+ matches = find_debug_controls(root_dir.joinpath('src'))
+
+ doc = yaml.load(ctrl_file)
+
+ controls = doc['controls']
+
+ # Create a map of names in the existing yaml for easier updating.
+ controls_map = {}
+ for control in controls:
+ for k, v in control.items():
+ controls_map[k] = v
+
+ obsolete_names = list(controls_map.keys())
+
+ for m in matches:
+ if not m.type:
+ p = m.file.relative_to(Path.cwd(), walk_up=True)
+ logger.warning(
+ f'{p}:{m.line + 1}: Failed to deduce type from {m.whole_match} ... skipping')
+ continue
+
+ p = m.file.relative_to(root_dir)
+ desc = {'type': m.type,
+ 'direction': 'out',
+ 'description': f'Debug control {m.name} found in {p}:{m.line}'}
+ if m.size is not None:
+ desc['size'] = m.size
+
+ if m.name in controls_map:
+ # Can't use == for modified check because of the special yaml dicts.
+ update_needed = False
+ if list(controls_map[m.name].keys()) != list(desc.keys()):
+ update_needed = True
+ else:
+ for k, v in controls_map[m.name].items():
+ if v != desc[k]:
+ update_needed = True
+ break
+
+ if update_needed:
+ logger.info(f"Update control '{m.name}'")
+ controls_map[m.name].clear()
+ controls_map[m.name].update(desc)
+
+ obsolete_names.remove(m.name)
+ else:
+ logger.info(f"Add control '{m.name}'")
+ insert_before = len(controls)
+ for idx, control in enumerate(controls):
+ if get_control_name(control).lower() > m.name.lower():
+ insert_before = idx
+ break
+ controls.insert(insert_before, {m.name: desc})
+
+ # Remove elements from controls without recreating the list (to keep
+ # comments etc.).
+ idx = 0
+ while idx < len(controls):
+ name = get_control_name(controls[idx])
+ if name in obsolete_names:
+ logger.info(f"Remove control '{name}'")
+ controls.pop(idx)
+ else:
+ idx += 1
+
+ with ctrl_file.open('w') as f:
+ # Ruyaml looses the header.
+ f.write(("# SPDX-License-Identifier: LGPL-2.1-or-later\n"
+ "#\n"
+ "# This file was generated by utils/gen-debug-controls.py\n"
+ "#\n"))
+ yaml.dump(doc, f)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/utils/gen-ipa-priv-key.sh b/utils/gen-ipa-priv-key.sh
new file mode 100755
index 00000000..2ca7b883
--- /dev/null
+++ b/utils/gen-ipa-priv-key.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright (C) 2020, Google Inc.
+#
+# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
+#
+# Generate an RSA private key to sign IPA modules
+
+key="$1"
+
+openssl genpkey -algorithm RSA -out "${key}" -pkeyopt rsa_keygen_bits:2048
diff --git a/utils/gen-version.sh b/utils/gen-version.sh
index 7f7872ce..1b818e9e 100755
--- a/utils/gen-version.sh
+++ b/utils/gen-version.sh
@@ -4,10 +4,26 @@
# Generate a version string using git describe
build_dir="$1"
+src_dir="$2"
+project_version="$3"
+
+# If .tarball-version exists, output the version string from the file and exit.
+# This file is auto-generated on a 'meson dist' command from the run-dist.sh
+# script.
+if [ -n "$src_dir" ] && [ -f "$src_dir"/.tarball-version ]
+then
+ cat "$src_dir"/.tarball-version
+ exit 0
+fi
# Bail out if the directory isn't under git control
-src_dir=$(git rev-parse --git-dir 2>&1) || exit 1
-src_dir=$(readlink -f "$src_dir/..")
+git_dir=$(git rev-parse --git-dir 2>&1) || exit 1
+
+# Derive the source directory from the git directory if not specified.
+if [ -z "$src_dir" ]
+then
+ src_dir=$(readlink -f "$git_dir/..")
+fi
# Get a short description from the tree.
version=$(git describe --abbrev=8 --match "v[0-9]*" 2>/dev/null)
@@ -26,7 +42,14 @@ if [ -z "$build_dir" ] || (echo "$build_dir" | grep -q "$src_dir")
then
git update-index --refresh > /dev/null 2>&1
fi
-git diff-index --quiet HEAD || version="$version-dirty"
+git diff-index --quiet HEAD || version="$version-dirty ($(date +%Y-%m-%dT%H:%M:%S%Z))"
+
+# If a project version is provided, use it to replace the version number.
+if [ -n "$project_version" ]
+then
+ version=$(echo "$version" | sed -e 's/^[^-]*-//')
+ version="v$project_version-$version"
+fi
# Replace first '-' with a '+' to denote build metadata, strip the 'g' in from
# of the git SHA1 and remove the initial 'v'.
diff --git a/utils/hooks/post-commit b/utils/hooks/post-commit
index c8b1739a..f28cbd8d 100755
--- a/utils/hooks/post-commit
+++ b/utils/hooks/post-commit
@@ -1,5 +1,7 @@
#!/bin/sh
+# SPDX-License-Identifier: GPL-2.0-or-later
+
# Execute the checkstyle script after committing any code. This allows the
# commit to succeed, but ensures that the developer is aware of any potential
# issues immediately, and can resolve them and fix rapidly with:
diff --git a/utils/hooks/pre-commit b/utils/hooks/pre-commit
index 47fcbc81..7a4cb625 100755
--- a/utils/hooks/pre-commit
+++ b/utils/hooks/pre-commit
@@ -1,5 +1,7 @@
#!/bin/sh
+# SPDX-License-Identifier: GPL-2.0-or-later
+
# Execute the checkstyle script before committing any code. This will fail the
# commit in case of style issues, ensuring that the developer will notice them.
# The pre-commit hook can be bypassed with git commit -n to ignore selective
diff --git a/utils/hooks/pre-push b/utils/hooks/pre-push
new file mode 100755
index 00000000..68dcbd0c
--- /dev/null
+++ b/utils/hooks/pre-push
@@ -0,0 +1,119 @@
+#!/bin/bash
+
+# SPDX-License-Identifier: GPL-2.0-or-later
+
+# A hook script to prevent pushing unsuitable commits to the master or
+# integration branches. Criteria to determine unsuitable commits are listed
+# below.
+#
+# Information about the commits which are being pushed is supplied as lines to
+# the standard input in the form:
+#
+# <local ref> <local sha1> <remote ref> <remote sha1>
+
+z40=0000000000000000000000000000000000000000
+
+remote_name="$1"
+remote_url="$2"
+
+while read -r local_ref local_sha remote_ref remote_sha
+do
+ case "$remote_ref" in
+ refs/heads/master)
+ ;;
+ refs/heads/integration/*)
+ ;;
+ *)
+ continue
+ esac
+
+ # If the remote branch gets deleted, there's nothing to check.
+ if [ "$local_sha" = $z40 ]
+ then
+ continue
+ fi
+
+ # Check if we are creating a new branch or updating an existing one.
+ if [ "$remote_sha" = $z40 ]
+ then
+ if [ "$remote_ref" = "refs/heads/master" ]
+ then
+ # There are known invalid commits in the full history,
+ # skip the checks if we are pushing the master branch
+ # (for instance to an empty repository).
+ continue
+ else
+ # We're pushing a new integration branch, check all
+ # commits on top of the master branch.
+ range="remotes/$remote_name/master..$local_sha"
+ fi
+ else
+ # Update to existing branch, examine new commits only.
+ range="$remote_sha..$local_sha"
+ fi
+
+ #
+ # Find invalid commits.
+ #
+ errors=0
+ for commit in $(git rev-list "$range")
+ do
+ msg=$(git cat-file commit "$commit")
+
+ # 1. The commit message shall not contain a local changelog.
+ if echo -E "$msg" | grep -q '^--- *$'
+ then
+ echo >&2 "Found local changelog in commit $commit"
+ errors=$((errors+1))
+ fi
+
+ # 2. The commit message shall have Signed-off-by lines
+ # corresponding the committer, author, and all co-developers.
+ committer=$(echo "$msg" | grep '^committer ' | head -1 | \
+ cut -d ' ' -f 2- | rev | cut -d ' ' -f 3- | rev)
+ if ! echo -E "$msg" | grep -F -q "Signed-off-by: ${committer}"
+ then
+ echo >&2 "Missing committer Signed-off-by in commit $commit"
+ errors=$((errors+1))
+ fi
+
+ author=$(echo "$msg" | grep '^author ' | head -1 | \
+ cut -d ' ' -f 2- | rev | cut -d ' ' -f 3- | rev)
+ if ! echo -E "$msg" | grep -F -q "Signed-off-by: ${author}"
+ then
+ echo >&2 "Missing author Signed-off-by in commit $commit"
+ errors=$((errors+1))
+ fi
+
+ while read -r codev
+ do
+ if ! echo -E "$msg" | grep -F -q "Signed-off-by: ${codev}"
+ then
+ echo >&2 "Missing co-developer '${codev}' Signed-off-by in commit $commit"
+ errors=$((errors+1))
+ fi
+ done < <(echo "$msg" | grep '^Co-developed-by: ' | cut -d ' ' -f 2-)
+
+ # 3. A Reviewed-by or Acked-by is required.
+ if ! echo -E "$msg" | grep -q '^\(Reviewed\|Acked\)-by: '
+ then
+ echo >&2 "No Reviewed-by or Acked-by in commit $commit"
+ errors=$((errors+1))
+ fi
+
+ # 4. The commit message shall not contain a Change-Id.
+ if echo -E "$msg" | grep -q '^Change-Id:'
+ then
+ echo >&2 "Found Change-Id in commit $commit"
+ errors=$((errors+1))
+ fi
+ done
+
+ if [ $errors != 0 ]
+ then
+ echo >&2 "Found $errors errors in $local_ref, not pushing"
+ exit 1
+ fi
+done
+
+exit 0
diff --git a/utils/ipu3/ipu3-capture.sh b/utils/ipu3/ipu3-capture.sh
index ba6147b4..004a92b0 100755
--- a/utils/ipu3/ipu3-capture.sh
+++ b/utils/ipu3/ipu3-capture.sh
@@ -4,7 +4,7 @@
#
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
#
-# ipu3-capture.sh - Capture raw frames from cameras based on the Intel IPU3
+# Capture raw frames from cameras based on the Intel IPU3
#
# The scripts makes use of the following tools, which are expected to be
# executable from the system-wide path or from the local directory:
@@ -63,7 +63,8 @@ parse_pipeline() {
if (sensor) {
gsub(\".*fmt:\", \"\");
gsub(\"[] ].*\", \"\");
- gsub(\"/\", \" \");
+ sub(\"/\", \" \");
+ sub(\"@[0-9]+/[0-9]+\", \"\");
format=\$0;
}
}
diff --git a/utils/ipu3/ipu3-pack.c b/utils/ipu3/ipu3-pack.c
new file mode 100644
index 00000000..23d2db8b
--- /dev/null
+++ b/utils/ipu3/ipu3-pack.c
@@ -0,0 +1,101 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * ipu3-pack - Convert unpacked RAW10 Bayer data to the IPU3 packed Bayer formats
+ *
+ * Copyright 2022 Umang Jain <umang.jain@ideasonboard.com>
+ */
+#define _GNU_SOURCE
+
+#include <errno.h>
+#include <fcntl.h>
+#include <libgen.h>
+#include <stdint.h>
+#include <stdio.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+static void usage(char *argv0)
+{
+ printf("Usage: %s input-file output-file\n", basename(argv0));
+ printf("Convert unpacked RAW10 Bayer data to the IPU3 packed Bayer formats\n");
+ printf("If the output-file '-', output data will be written to standard output\n");
+}
+
+int main(int argc, char *argv[])
+{
+ int in_fd;
+ int out_fd;
+ int ret;
+
+ if (argc != 3) {
+ usage(argv[0]);
+ return 1;
+ }
+
+ in_fd = open(argv[1], O_RDONLY);
+ if (in_fd == -1) {
+ fprintf(stderr, "Failed to open input file '%s': %s\n",
+ argv[1], strerror(errno));
+ return 1;
+ }
+
+ if (strcmp(argv[2], "-") == 0) {
+ out_fd = STDOUT_FILENO;
+ } else {
+ out_fd = open(argv[2], O_WRONLY | O_TRUNC | O_CREAT, 0644);
+ if (out_fd == -1) {
+ fprintf(stderr, "Failed to open output file '%s': %s\n",
+ argv[2], strerror(errno));
+ close(in_fd);
+ return 1;
+ }
+ }
+
+ while (1) {
+ uint16_t in_data[25];
+ uint8_t out_data[32];
+ unsigned int i;
+
+ ret = read(in_fd, in_data, sizeof(in_data));
+ if (ret < 0) {
+ fprintf(stderr, "Failed to read input data: %s\n",
+ strerror(errno));
+ goto done;
+ }
+
+ if ((unsigned)ret < sizeof(in_data)) {
+ if (ret != 0)
+ fprintf(stderr, "%u bytes of stray data at end of input\n",
+ ret);
+ goto done;
+ }
+
+ for (i = 0; i < 30; ++i) {
+ unsigned int index = (i * 8) / 10;
+ unsigned int msb_shift = (i * 8) % 10;
+ unsigned int lsb_shift = 10 - msb_shift;
+
+ out_data[i] = ((in_data[index] >> msb_shift) & 0xff)
+ | ((in_data[index+1] << lsb_shift) & 0xff);
+ }
+
+ out_data[30] = (in_data[24] >> 0) & 0xff;
+ out_data[31] = (in_data[24] >> 8) & 0x03;
+
+ ret = write(out_fd, out_data, sizeof(out_data));
+ if (ret < 0) {
+ fprintf(stderr, "Failed to write output data: %s\n",
+ strerror(errno));
+ goto done;
+ }
+ }
+
+done:
+ close(in_fd);
+ if (out_fd != STDOUT_FILENO)
+ close(out_fd);
+
+ return ret ? 1 : 0;
+}
diff --git a/utils/ipu3/ipu3-process.sh b/utils/ipu3/ipu3-process.sh
index bb4abbe8..25bc849f 100755
--- a/utils/ipu3/ipu3-process.sh
+++ b/utils/ipu3/ipu3-process.sh
@@ -4,7 +4,7 @@
#
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
#
-# ipu3-process.sh - Process raw frames with the Intel IPU3
+# Process raw frames with the Intel IPU3
#
# The scripts makes use of the following tools, which are expected to be
# found in $PATH:
diff --git a/utils/ipu3/ipu3-unpack.c b/utils/ipu3/ipu3-unpack.c
index 2dce1038..6ee8c45a 100644
--- a/utils/ipu3/ipu3-unpack.c
+++ b/utils/ipu3/ipu3-unpack.c
@@ -8,6 +8,7 @@
#include <errno.h>
#include <fcntl.h>
+#include <libgen.h>
#include <stdint.h>
#include <stdio.h>
#include <string.h>
@@ -15,7 +16,7 @@
#include <sys/types.h>
#include <unistd.h>
-static void usage(const char *argv0)
+static void usage(char *argv0)
{
printf("Usage: %s input-file output-file\n", basename(argv0));
printf("Unpack the IPU3 raw Bayer format to 16-bit Bayer\n");
@@ -78,8 +79,8 @@ int main(int argc, char *argv[])
}
ret = write(out_fd, out_data, 50);
- if (ret < -1) {
- fprintf(stderr, "Failed to read input data: %s\n",
+ if (ret == -1) {
+ fprintf(stderr, "Failed to write output data: %s\n",
strerror(errno));
goto done;
}
diff --git a/utils/ipu3/meson.build b/utils/ipu3/meson.build
index 49c45856..c92cc658 100644
--- a/utils/ipu3/meson.build
+++ b/utils/ipu3/meson.build
@@ -1 +1,4 @@
+# SPDX-License-Identifier: CC0-1.0
+
+ipu3_pack = executable('ipu3-pack', 'ipu3-pack.c')
ipu3_unpack = executable('ipu3-unpack', 'ipu3-unpack.c')
diff --git a/utils/meson.build b/utils/meson.build
index f434c79c..95d657ac 100644
--- a/utils/meson.build
+++ b/utils/meson.build
@@ -1 +1,7 @@
+# SPDX-License-Identifier: CC0-1.0
+
+subdir('codegen')
subdir('ipu3')
+
+## Module signing
+gen_ipa_priv_key = files('gen-ipa-priv-key.sh')
diff --git a/utils/raspberrypi/ctt/alsc_only.py b/utils/raspberrypi/ctt/alsc_only.py
new file mode 100755
index 00000000..a521c4ad
--- /dev/null
+++ b/utils/raspberrypi/ctt/alsc_only.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python3
+#
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2022, Raspberry Pi Ltd
+#
+# alsc tuning tool
+
+import sys
+
+from ctt import *
+from ctt_tools import parse_input
+
+if __name__ == '__main__':
+ """
+ initialise calibration
+ """
+ if len(sys.argv) == 1:
+ print("""
+ PiSP Lens Shading Camera Tuning Tool version 1.0
+
+ Required Arguments:
+ '-i' : Calibration image directory.
+ '-o' : Name of output json file.
+
+ Optional Arguments:
+ '-t' : Target platform - 'pisp' or 'vc4'. Default 'vc4'
+ '-c' : Config file for the CTT. If not passed, default parameters used.
+ '-l' : Name of output log file. If not passed, 'ctt_log.txt' used.
+ """)
+ quit(0)
+ else:
+ """
+ parse input arguments
+ """
+ json_output, directory, config, log_output, target = parse_input()
+ if target == 'pisp':
+ from ctt_pisp import json_template, grid_size
+ elif target == 'vc4':
+ from ctt_vc4 import json_template, grid_size
+
+ run_ctt(json_output, directory, config, log_output, json_template, grid_size, target, alsc_only=True)
diff --git a/utils/raspberrypi/ctt/cac_only.py b/utils/raspberrypi/ctt/cac_only.py
new file mode 100644
index 00000000..1c0a8193
--- /dev/null
+++ b/utils/raspberrypi/ctt/cac_only.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python3
+#
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2023, Raspberry Pi (Trading) Ltd.
+#
+# cac_only.py - cac tuning tool
+
+
+# This file allows you to tune only the chromatic aberration correction
+# Specify any number of files in the command line args, and it shall iterate through
+# and generate an averaged cac table from all the input images, which you can then
+# input into your tuning file.
+
+# Takes .dng files produced by the camera modules of the dots grid and calculates the chromatic abberation of each dot.
+# Then takes each dot, and works out where it was in the image, and uses that to output a tables of the shifts
+# across the whole image.
+
+from PIL import Image
+import numpy as np
+import rawpy
+import sys
+import getopt
+
+from ctt_cac import *
+
+
+def cac(filelist, output_filepath, plot_results=False):
+ np.set_printoptions(precision=3)
+ np.set_printoptions(suppress=True)
+
+ # Create arrays to hold all the dots data and their colour offsets
+ red_shift = [] # Format is: [[Dot Center X, Dot Center Y, x shift, y shift]]
+ blue_shift = []
+ # Iterate through the files
+ # Multiple files is reccomended to average out the lens aberration through rotations
+ for file in filelist:
+ print("\n Processing file " + str(file))
+ # Read the raw RGB values from the .dng file
+ with rawpy.imread(file) as raw:
+ rgb = raw.postprocess()
+ sizes = (raw.sizes)
+
+ image_size = [sizes[2], sizes[3]] # Image size, X, Y
+ # Create a colour copy of the RGB values to use later in the calibration
+ imout = Image.new(mode="RGB", size=image_size)
+ rgb_image = np.array(imout)
+ # The rgb values need reshaping from a 1d array to a 3d array to be worked with easily
+ rgb.reshape((image_size[0], image_size[1], 3))
+ rgb_image = rgb
+
+ # Pass the RGB image through to the dots locating program
+ # Returns an array of the dots (colour rectangles around the dots), and an array of their locations
+ print("Finding dots")
+ dots, dots_locations = find_dots_locations(rgb_image)
+
+ # Now, analyse each dot. Work out the centroid of each colour channel, and use that to work out
+ # by how far the chromatic aberration has shifted each channel
+ print('Dots found: ' + str(len(dots)))
+
+ for dot, dot_location in zip(dots, dots_locations):
+ if len(dot) > 0:
+ if (dot_location[0] > 0) and (dot_location[1] > 0):
+ ret = analyse_dot(dot, dot_location)
+ red_shift.append(ret[0])
+ blue_shift.append(ret[1])
+
+ # Take our arrays of red shifts and locations, push them through to be interpolated into a 9x9 matrix
+ # for the CAC block to handle and then store these as a .json file to be added to the camera
+ # tuning file
+ print("\nCreating output grid")
+ rx, ry, bx, by = shifts_to_yaml(red_shift, blue_shift, image_size)
+
+ print("CAC correction complete!")
+
+ # The json format that we then paste into the tuning file (manually)
+ sample = '''
+ {
+ "rpi.cac" :
+ {
+ "strength": 1.0,
+ "lut_rx" : [
+ rx_vals
+ ],
+ "lut_ry" : [
+ ry_vals
+ ],
+ "lut_bx" : [
+ bx_vals
+ ],
+ "lut_by" : [
+ by_vals
+ ]
+ }
+ }
+ '''
+
+ # Below, may look incorrect, however, the PiSP (standard) dimensions are flipped in comparison to
+ # PIL image coordinate directions, hence why xr -> yr. Also, the shifts calculated are colour shifts,
+ # and the PiSP block asks for the values it should shift (hence the * -1, to convert from colour shift to a pixel shift)
+ sample = sample.replace("rx_vals", pprint_array(ry * -1))
+ sample = sample.replace("ry_vals", pprint_array(rx * -1))
+ sample = sample.replace("bx_vals", pprint_array(by * -1))
+ sample = sample.replace("by_vals", pprint_array(bx * -1))
+ print("Successfully converted to JSON")
+ f = open(str(output_filepath), "w+")
+ f.write(sample)
+ f.close()
+ print("Successfully written to json file")
+ '''
+ If you wish to see a plot of the colour channel shifts, add the -p or --plots option
+ Can be a quick way of validating if the data/dots you've got are good, or if you need to
+ change some parameters/take some better images
+ '''
+ if plot_results:
+ plot_shifts(red_shift, blue_shift)
+
+
+if __name__ == "__main__":
+ argv = sys.argv
+ # Detect the input and output file paths
+ arg_output = "output.json"
+ arg_help = "{0} -i <input> -o <output> -p <plot results>".format(argv[0])
+ opts, args = getopt.getopt(argv[1:], "hi:o:p", ["help", "input=", "output=", "plot"])
+
+ output_location = 0
+ input_location = 0
+ filelist = []
+ plot_results = False
+ for i in range(len(argv)):
+ if ("-h") in argv[i]:
+ print(arg_help) # print the help message
+ sys.exit(2)
+ if "-o" in argv[i]:
+ output_location = i
+ if ".dng" in argv[i]:
+ filelist.append(argv[i])
+ if "-p" in argv[i]:
+ plot_results = True
+
+ arg_output = argv[output_location + 1]
+ cac(filelist, arg_output, plot_results)
diff --git a/utils/raspberrypi/ctt/colors.py b/utils/raspberrypi/ctt/colors.py
new file mode 100644
index 00000000..cb4d236b
--- /dev/null
+++ b/utils/raspberrypi/ctt/colors.py
@@ -0,0 +1,30 @@
+# Program to convert from RGB to LAB color space
+def RGB_to_LAB(RGB): # where RGB is a 1x3 array. e.g RGB = [100, 255, 230]
+ num = 0
+ XYZ = [0, 0, 0]
+ # converted all the three R, G, B to X, Y, Z
+ X = RGB[0] * 0.4124 + RGB[1] * 0.3576 + RGB[2] * 0.1805
+ Y = RGB[0] * 0.2126 + RGB[1] * 0.7152 + RGB[2] * 0.0722
+ Z = RGB[0] * 0.0193 + RGB[1] * 0.1192 + RGB[2] * 0.9505
+
+ XYZ[0] = X / 255 * 100
+ XYZ[1] = Y / 255 * 100 # XYZ Must be in range 0 -> 100, so scale down from 255
+ XYZ[2] = Z / 255 * 100
+ XYZ[0] = XYZ[0] / 95.047 # ref_X = 95.047 Observer= 2°, Illuminant= D65
+ XYZ[1] = XYZ[1] / 100.0 # ref_Y = 100.000
+ XYZ[2] = XYZ[2] / 108.883 # ref_Z = 108.883
+ num = 0
+ for value in XYZ:
+ if value > 0.008856:
+ value = value ** (0.3333333333333333)
+ else:
+ value = (7.787 * value) + (16 / 116)
+ XYZ[num] = value
+ num = num + 1
+
+ # L, A, B, values calculated below
+ L = (116 * XYZ[1]) - 16
+ a = 500 * (XYZ[0] - XYZ[1])
+ b = 200 * (XYZ[1] - XYZ[2])
+
+ return [L, a, b]
diff --git a/utils/raspberrypi/ctt/convert_tuning.py b/utils/raspberrypi/ctt/convert_tuning.py
new file mode 100755
index 00000000..83cf69d4
--- /dev/null
+++ b/utils/raspberrypi/ctt/convert_tuning.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python3
+#
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Script to convert version 1.0 Raspberry Pi camera tuning files to version 2.0.
+#
+# Copyright 2022 Raspberry Pi Ltd
+
+import argparse
+import json
+import numpy as np
+import sys
+
+from ctt_pretty_print_json import pretty_print
+from ctt_pisp import grid_size as grid_size_pisp
+from ctt_pisp import json_template as json_template_pisp
+from ctt_vc4 import grid_size as grid_size_vc4
+from ctt_vc4 import json_template as json_template_vc4
+
+
+def interp_2d(in_ls, src_w, src_h, dst_w, dst_h):
+
+ out_ls = np.zeros((dst_h, dst_w))
+ for i in range(src_h):
+ out_ls[i] = np.interp(np.linspace(0, dst_w - 1, dst_w),
+ np.linspace(0, dst_w - 1, src_w),
+ in_ls[i])
+ for i in range(dst_w):
+ out_ls[:,i] = np.interp(np.linspace(0, dst_h - 1, dst_h),
+ np.linspace(0, dst_h - 1, src_h),
+ out_ls[:src_h, i])
+ return out_ls
+
+
+def convert_target(in_json: dict, target: str):
+
+ src_w, src_h = grid_size_pisp if target == 'vc4' else grid_size_vc4
+ dst_w, dst_h = grid_size_vc4 if target == 'vc4' else grid_size_pisp
+ json_template = json_template_vc4 if target == 'vc4' else json_template_pisp
+
+ # ALSC grid sizes
+ alsc = next(algo for algo in in_json['algorithms'] if 'rpi.alsc' in algo)['rpi.alsc']
+ for colour in ['calibrations_Cr', 'calibrations_Cb']:
+ if colour not in alsc:
+ continue
+ for temperature in alsc[colour]:
+ in_ls = np.reshape(temperature['table'], (src_h, src_w))
+ out_ls = interp_2d(in_ls, src_w, src_h, dst_w, dst_h)
+ temperature['table'] = np.round(out_ls.flatten(), 3).tolist()
+
+ if 'luminance_lut' in alsc:
+ in_ls = np.reshape(alsc['luminance_lut'], (src_h, src_w))
+ out_ls = interp_2d(in_ls, src_w, src_h, dst_w, dst_h)
+ alsc['luminance_lut'] = np.round(out_ls.flatten(), 3).tolist()
+
+ # Denoise blocks
+ for i, algo in enumerate(in_json['algorithms']):
+ if list(algo.keys())[0] == 'rpi.sdn':
+ in_json['algorithms'][i] = {'rpi.denoise': json_template['rpi.sdn'] if target == 'vc4' else json_template['rpi.denoise']}
+ break
+
+ # AGC mode weights
+ agc = next(algo for algo in in_json['algorithms'] if 'rpi.agc' in algo)['rpi.agc']
+ if 'channels' in agc:
+ for i, channel in enumerate(agc['channels']):
+ target_agc_metering = json_template['rpi.agc']['channels'][i]['metering_modes']
+ for mode, v in channel['metering_modes'].items():
+ v['weights'] = target_agc_metering[mode]['weights']
+ else:
+ for mode, v in agc["metering_modes"].items():
+ target_agc_metering = json_template['rpi.agc']['channels'][0]['metering_modes']
+ v['weights'] = target_agc_metering[mode]['weights']
+
+ # HDR
+ if target == 'pisp':
+ for i, algo in enumerate(in_json['algorithms']):
+ if list(algo.keys())[0] == 'rpi.hdr':
+ in_json['algorithms'][i] = {'rpi.hdr': json_template['rpi.hdr']}
+
+ return in_json
+
+
+def convert_v2(in_json: dict, target: str) -> str:
+
+ if 'version' in in_json.keys() and in_json['version'] == 1.0:
+ converted = {
+ 'version': 2.0,
+ 'target': target,
+ 'algorithms': [{algo: config} for algo, config in in_json.items()]
+ }
+ else:
+ converted = in_json
+
+ # Convert between vc4 <-> pisp targets. This is a best effort thing.
+ if converted['target'] != target:
+ converted = convert_target(converted, target)
+ converted['target'] = target
+
+ grid_size = grid_size_vc4[0] if target == 'vc4' else grid_size_pisp[0]
+ return pretty_print(converted, custom_elems={'table': grid_size, 'luminance_lut': grid_size})
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=
+ 'Convert the format of the Raspberry Pi camera tuning file from v1.0 to v2.0 and/or the vc4 <-> pisp targets.\n')
+ parser.add_argument('input', type=str, help='Input tuning file.')
+ parser.add_argument('-t', '--target', type=str, help='Target platform.',
+ choices=['pisp', 'vc4'], default='vc4')
+ parser.add_argument('output', type=str, nargs='?',
+ help='Output converted tuning file. If not provided, the input file will be updated in-place.',
+ default=None)
+ args = parser.parse_args()
+
+ with open(args.input, 'r') as f:
+ in_json = json.load(f)
+
+ out_json = convert_v2(in_json, args.target)
+
+ with open(args.output if args.output is not None else args.input, 'w') as f:
+ f.write(out_json)
diff --git a/utils/raspberrypi/ctt/ctt.py b/utils/raspberrypi/ctt/ctt.py
new file mode 100755
index 00000000..96f1b5e6
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt.py
@@ -0,0 +1,802 @@
+#!/usr/bin/env python3
+#
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# camera tuning tool
+
+import os
+import sys
+from ctt_image_load import *
+from ctt_cac import *
+from ctt_ccm import *
+from ctt_awb import *
+from ctt_alsc import *
+from ctt_lux import *
+from ctt_noise import *
+from ctt_geq import *
+from ctt_pretty_print_json import pretty_print
+import random
+import json
+import re
+
+"""
+This file houses the camera object, which is used to perform the calibrations.
+The camera object houses all the calibration images as attributes in three lists:
+ - imgs (macbeth charts)
+ - imgs_alsc (alsc correction images)
+ - imgs_cac (cac correction images)
+Various calibrations are methods of the camera object, and the output is stored
+in a dictionary called self.json.
+Once all the caibration has been completed, the Camera.json is written into a
+json file.
+The camera object initialises its json dictionary by reading from a pre-written
+blank json file. This has been done to avoid reproducing the entire json file
+in the code here, thereby avoiding unecessary clutter.
+"""
+
+
+"""
+Get the colour and lux values from the strings of each inidvidual image
+"""
+def get_col_lux(string):
+ """
+ Extract colour and lux values from filename
+ """
+ col = re.search(r'([0-9]+)[kK](\.(jpg|jpeg|brcm|dng)|_.*\.(jpg|jpeg|brcm|dng))$', string)
+ lux = re.search(r'([0-9]+)[lL](\.(jpg|jpeg|brcm|dng)|_.*\.(jpg|jpeg|brcm|dng))$', string)
+ try:
+ col = col.group(1)
+ except AttributeError:
+ """
+ Catch error if images labelled incorrectly and pass reasonable defaults
+ """
+ return None, None
+ try:
+ lux = lux.group(1)
+ except AttributeError:
+ """
+ Catch error if images labelled incorrectly and pass reasonable defaults
+ Still returns colour if that has been found.
+ """
+ return col, None
+ return int(col), int(lux)
+
+
+"""
+Camera object that is the backbone of the tuning tool.
+Input is the desired path of the output json.
+"""
+class Camera:
+ def __init__(self, jfile, json):
+ self.path = os.path.dirname(os.path.expanduser(__file__)) + '/'
+ if self.path == '/':
+ self.path = ''
+ self.imgs = []
+ self.imgs_alsc = []
+ self.imgs_cac = []
+ self.log = 'Log created : ' + time.asctime(time.localtime(time.time()))
+ self.log_separator = '\n'+'-'*70+'\n'
+ self.jf = jfile
+ """
+ initial json dict populated by uncalibrated values
+ """
+ self.json = json
+
+ """
+ Perform colour correction calibrations by comparing macbeth patch colours
+ to standard macbeth chart colours.
+ """
+ def ccm_cal(self, do_alsc_colour, grid_size):
+ if 'rpi.ccm' in self.disable:
+ return 1
+ print('\nStarting CCM calibration')
+ self.log_new_sec('CCM')
+ """
+ if image is greyscale then CCm makes no sense
+ """
+ if self.grey:
+ print('\nERROR: Can\'t do CCM on greyscale image!')
+ self.log += '\nERROR: Cannot perform CCM calibration '
+ self.log += 'on greyscale image!\nCCM aborted!'
+ del self.json['rpi.ccm']
+ return 0
+ a = time.time()
+ """
+ Check if alsc tables have been generated, if not then do ccm without
+ alsc
+ """
+ if ("rpi.alsc" not in self.disable) and do_alsc_colour:
+ """
+ case where ALSC colour has been done, so no errors should be
+ expected...
+ """
+ try:
+ cal_cr_list = self.json['rpi.alsc']['calibrations_Cr']
+ cal_cb_list = self.json['rpi.alsc']['calibrations_Cb']
+ self.log += '\nALSC tables found successfully'
+ except KeyError:
+ cal_cr_list, cal_cb_list = None, None
+ print('WARNING! No ALSC tables found for CCM!')
+ print('Performing CCM calibrations without ALSC correction...')
+ self.log += '\nWARNING: No ALSC tables found.\nCCM calibration '
+ self.log += 'performed without ALSC correction...'
+ else:
+ """
+ case where config options result in CCM done without ALSC colour tables
+ """
+ cal_cr_list, cal_cb_list = None, None
+ self.log += '\nWARNING: No ALSC tables found.\nCCM calibration '
+ self.log += 'performed without ALSC correction...'
+
+ """
+ Do CCM calibration
+ """
+ try:
+ ccms = ccm(self, cal_cr_list, cal_cb_list, grid_size)
+ except ArithmeticError:
+ print('ERROR: Matrix is singular!\nTake new pictures and try again...')
+ self.log += '\nERROR: Singular matrix encountered during fit!'
+ self.log += '\nCCM aborted!'
+ return 1
+ """
+ Write output to json
+ """
+ self.json['rpi.ccm']['ccms'] = ccms
+ self.log += '\nCCM calibration written to json file'
+ print('Finished CCM calibration')
+
+ """
+ Perform chromatic abberation correction using multiple dots images.
+ """
+ def cac_cal(self, do_alsc_colour):
+ if 'rpi.cac' in self.disable:
+ return 1
+ print('\nStarting CAC calibration')
+ self.log_new_sec('CAC')
+ """
+ check if cac images have been taken
+ """
+ if len(self.imgs_cac) == 0:
+ print('\nError:\nNo cac calibration images found')
+ self.log += '\nERROR: No CAC calibration images found!'
+ self.log += '\nCAC calibration aborted!'
+ return 1
+ """
+ if image is greyscale then CAC makes no sense
+ """
+ if self.grey:
+ print('\nERROR: Can\'t do CAC on greyscale image!')
+ self.log += '\nERROR: Cannot perform CAC calibration '
+ self.log += 'on greyscale image!\nCAC aborted!'
+ del self.json['rpi.cac']
+ return 0
+ a = time.time()
+ """
+ Check if camera is greyscale or color. If not greyscale, then perform cac
+ """
+ if do_alsc_colour:
+ """
+ Here we have a color sensor. Perform cac
+ """
+ try:
+ cacs = cac(self)
+ except ArithmeticError:
+ print('ERROR: Matrix is singular!\nTake new pictures and try again...')
+ self.log += '\nERROR: Singular matrix encountered during fit!'
+ self.log += '\nCAC aborted!'
+ return 1
+ else:
+ """
+ case where config options suggest greyscale camera. No point in doing CAC
+ """
+ cal_cr_list, cal_cb_list = None, None
+ self.log += '\nWARNING: No ALSC tables found.\nCAC calibration '
+ self.log += 'performed without ALSC correction...'
+
+ """
+ Write output to json
+ """
+ self.json['rpi.cac']['cac'] = cacs
+ self.log += '\nCAC calibration written to json file'
+ print('Finished CAC calibration')
+
+
+ """
+ Auto white balance calibration produces a colour curve for
+ various colour temperatures, as well as providing a maximum 'wiggle room'
+ distance from this curve (transverse_neg/pos).
+ """
+ def awb_cal(self, greyworld, do_alsc_colour, grid_size):
+ if 'rpi.awb' in self.disable:
+ return 1
+ print('\nStarting AWB calibration')
+ self.log_new_sec('AWB')
+ """
+ if image is greyscale then AWB makes no sense
+ """
+ if self.grey:
+ print('\nERROR: Can\'t do AWB on greyscale image!')
+ self.log += '\nERROR: Cannot perform AWB calibration '
+ self.log += 'on greyscale image!\nAWB aborted!'
+ del self.json['rpi.awb']
+ return 0
+ """
+ optional set greyworld (e.g. for noir cameras)
+ """
+ if greyworld:
+ self.json['rpi.awb']['bayes'] = 0
+ self.log += '\nGreyworld set'
+ """
+ Check if alsc tables have been generated, if not then do awb without
+ alsc correction
+ """
+ if ("rpi.alsc" not in self.disable) and do_alsc_colour:
+ try:
+ cal_cr_list = self.json['rpi.alsc']['calibrations_Cr']
+ cal_cb_list = self.json['rpi.alsc']['calibrations_Cb']
+ self.log += '\nALSC tables found successfully'
+ except KeyError:
+ cal_cr_list, cal_cb_list = None, None
+ print('ERROR, no ALSC calibrations found for AWB')
+ print('Performing AWB without ALSC tables')
+ self.log += '\nWARNING: No ALSC tables found.\nAWB calibration '
+ self.log += 'performed without ALSC correction...'
+ else:
+ cal_cr_list, cal_cb_list = None, None
+ self.log += '\nWARNING: No ALSC tables found.\nAWB calibration '
+ self.log += 'performed without ALSC correction...'
+ """
+ call calibration function
+ """
+ plot = "rpi.awb" in self.plot
+ awb_out = awb(self, cal_cr_list, cal_cb_list, plot, grid_size)
+ ct_curve, transverse_neg, transverse_pos = awb_out
+ """
+ write output to json
+ """
+ self.json['rpi.awb']['ct_curve'] = ct_curve
+ self.json['rpi.awb']['sensitivity_r'] = 1.0
+ self.json['rpi.awb']['sensitivity_b'] = 1.0
+ self.json['rpi.awb']['transverse_pos'] = transverse_pos
+ self.json['rpi.awb']['transverse_neg'] = transverse_neg
+ self.log += '\nAWB calibration written to json file'
+ print('Finished AWB calibration')
+
+ """
+ Auto lens shading correction completely mitigates the effects of lens shading for ech
+ colour channel seperately, and then partially corrects for vignetting.
+ The extent of the correction depends on the 'luminance_strength' parameter.
+ """
+ def alsc_cal(self, luminance_strength, do_alsc_colour, grid_size, max_gain=8.0):
+ if 'rpi.alsc' in self.disable:
+ return 1
+ print('\nStarting ALSC calibration')
+ self.log_new_sec('ALSC')
+ """
+ check if alsc images have been taken
+ """
+ if len(self.imgs_alsc) == 0:
+ print('\nError:\nNo alsc calibration images found')
+ self.log += '\nERROR: No ALSC calibration images found!'
+ self.log += '\nALSC calibration aborted!'
+ return 1
+ self.json['rpi.alsc']['luminance_strength'] = luminance_strength
+ if self.grey and do_alsc_colour:
+ print('Greyscale camera so only luminance_lut calculated')
+ do_alsc_colour = False
+ self.log += '\nWARNING: ALSC colour correction cannot be done on '
+ self.log += 'greyscale image!\nALSC colour corrections forced off!'
+ """
+ call calibration function
+ """
+ plot = "rpi.alsc" in self.plot
+ alsc_out = alsc_all(self, do_alsc_colour, plot, grid_size, max_gain=max_gain)
+ cal_cr_list, cal_cb_list, luminance_lut, av_corn = alsc_out
+ """
+ write output to json and finish if not do_alsc_colour
+ """
+ if not do_alsc_colour:
+ self.json['rpi.alsc']['luminance_lut'] = luminance_lut
+ self.json['rpi.alsc']['n_iter'] = 0
+ self.log += '\nALSC calibrations written to json file'
+ self.log += '\nNo colour calibrations performed'
+ print('Finished ALSC calibrations')
+ return 1
+
+ self.json['rpi.alsc']['calibrations_Cr'] = cal_cr_list
+ self.json['rpi.alsc']['calibrations_Cb'] = cal_cb_list
+ self.json['rpi.alsc']['luminance_lut'] = luminance_lut
+ self.log += '\nALSC colour and luminance tables written to json file'
+
+ """
+ The sigmas determine the strength of the adaptive algorithm, that
+ cleans up any lens shading that has slipped through the alsc. These are
+ determined by measuring a 'worst-case' difference between two alsc tables
+ that are adjacent in colour space. If, however, only one colour
+ temperature has been provided, then this difference can not be computed
+ as only one table is available.
+ To determine the sigmas you would have to estimate the error of an alsc
+ table with only the image it was taken on as a check. To avoid circularity,
+ dfault exaggerated sigmas are used, which can result in too much alsc and
+ is therefore not advised.
+ In general, just take another alsc picture at another colour temperature!
+ """
+
+ if len(self.imgs_alsc) == 1:
+ self.json['rpi.alsc']['sigma'] = 0.005
+ self.json['rpi.alsc']['sigma_Cb'] = 0.005
+ print('\nWarning:\nOnly one alsc calibration found'
+ '\nStandard sigmas used for adaptive algorithm.')
+ print('Finished ALSC calibrations')
+ self.log += '\nWARNING: Only one colour temperature found in '
+ self.log += 'calibration images.\nStandard sigmas used for adaptive '
+ self.log += 'algorithm!'
+ return 1
+
+ """
+ obtain worst-case scenario residual sigmas
+ """
+ sigma_r, sigma_b = get_sigma(self, cal_cr_list, cal_cb_list, grid_size)
+ """
+ write output to json
+ """
+ self.json['rpi.alsc']['sigma'] = np.round(sigma_r, 5)
+ self.json['rpi.alsc']['sigma_Cb'] = np.round(sigma_b, 5)
+ self.log += '\nCalibrated sigmas written to json file'
+ print('Finished ALSC calibrations')
+
+ """
+ Green equalisation fixes problems caused by discrepancies in green
+ channels. This is done by measuring the effect on macbeth chart patches,
+ which ideally would have the same green values throughout.
+ An upper bound linear model is fit, fixing a threshold for the green
+ differences that are corrected.
+ """
+ def geq_cal(self):
+ if 'rpi.geq' in self.disable:
+ return 1
+ print('\nStarting GEQ calibrations')
+ self.log_new_sec('GEQ')
+ """
+ perform calibration
+ """
+ plot = 'rpi.geq' in self.plot
+ slope, offset = geq_fit(self, plot)
+ """
+ write output to json
+ """
+ self.json['rpi.geq']['offset'] = offset
+ self.json['rpi.geq']['slope'] = slope
+ self.log += '\nGEQ calibrations written to json file'
+ print('Finished GEQ calibrations')
+
+ """
+ Lux calibrations allow the lux level of a scene to be estimated by a ratio
+ calculation. Lux values are used in the pipeline for algorithms such as AGC
+ and AWB
+ """
+ def lux_cal(self):
+ if 'rpi.lux' in self.disable:
+ return 1
+ print('\nStarting LUX calibrations')
+ self.log_new_sec('LUX')
+ """
+ The lux calibration is done on a single image. For best effects, the
+ image with lux level closest to 1000 is chosen.
+ """
+ luxes = [Img.lux for Img in self.imgs]
+ argmax = luxes.index(min(luxes, key=lambda l: abs(1000-l)))
+ Img = self.imgs[argmax]
+ self.log += '\nLux found closest to 1000: {} lx'.format(Img.lux)
+ self.log += '\nImage used: ' + Img.name
+ if Img.lux < 50:
+ self.log += '\nWARNING: Low lux could cause inaccurate calibrations!'
+ """
+ do calibration
+ """
+ lux_out, shutter_speed, gain = lux(self, Img)
+ """
+ write output to json
+ """
+ self.json['rpi.lux']['reference_shutter_speed'] = shutter_speed
+ self.json['rpi.lux']['reference_gain'] = gain
+ self.json['rpi.lux']['reference_lux'] = Img.lux
+ self.json['rpi.lux']['reference_Y'] = lux_out
+ self.log += '\nLUX calibrations written to json file'
+ print('Finished LUX calibrations')
+
+ """
+ Noise alibration attempts to describe the noise profile of the sensor. The
+ calibration is run on macbeth images and the final output is taken as the average
+ """
+ def noise_cal(self):
+ if 'rpi.noise' in self.disable:
+ return 1
+ print('\nStarting NOISE calibrations')
+ self.log_new_sec('NOISE')
+ """
+ run calibration on all images and sort by slope.
+ """
+ plot = "rpi.noise" in self.plot
+ noise_out = sorted([noise(self, Img, plot) for Img in self.imgs], key=lambda x: x[0])
+ self.log += '\nFinished processing images'
+ """
+ take the average of the interquartile
+ """
+ length = len(noise_out)
+ noise_out = np.mean(noise_out[length//4:1+3*length//4], axis=0)
+ self.log += '\nAverage noise profile: constant = {} '.format(int(noise_out[1]))
+ self.log += 'slope = {:.3f}'.format(noise_out[0])
+ """
+ write to json
+ """
+ self.json['rpi.noise']['reference_constant'] = int(noise_out[1])
+ self.json['rpi.noise']['reference_slope'] = round(noise_out[0], 3)
+ self.log += '\nNOISE calibrations written to json'
+ print('Finished NOISE calibrations')
+
+ """
+ Removes json entries that are turned off
+ """
+ def json_remove(self, disable):
+ self.log_new_sec('Disabling Options', cal=False)
+ if len(self.disable) == 0:
+ self.log += '\nNothing disabled!'
+ return 1
+ for key in disable:
+ try:
+ del self.json[key]
+ self.log += '\nDisabled: ' + key
+ except KeyError:
+ self.log += '\nERROR: ' + key + ' not found!'
+ """
+ writes the json dictionary to the raw json file then make pretty
+ """
+ def write_json(self, version=2.0, target='bcm2835', grid_size=(16, 12)):
+ """
+ Write json dictionary to file using our version 2 format
+ """
+
+ out_json = {
+ "version": version,
+ 'target': target if target != 'vc4' else 'bcm2835',
+ "algorithms": [{name: data} for name, data in self.json.items()],
+ }
+
+ with open(self.jf, 'w') as f:
+ f.write(pretty_print(out_json,
+ custom_elems={'table': grid_size[0], 'luminance_lut': grid_size[0]}))
+
+ """
+ add a new section to the log file
+ """
+ def log_new_sec(self, section, cal=True):
+ self.log += '\n'+self.log_separator
+ self.log += section
+ if cal:
+ self.log += ' Calibration'
+ self.log += self.log_separator
+
+ """
+ write script arguments to log file
+ """
+ def log_user_input(self, json_output, directory, config, log_output):
+ self.log_new_sec('User Arguments', cal=False)
+ self.log += '\nJson file output: ' + json_output
+ self.log += '\nCalibration images directory: ' + directory
+ if config is None:
+ self.log += '\nNo configuration file input... using default options'
+ elif config is False:
+ self.log += '\nWARNING: Invalid configuration file path...'
+ self.log += ' using default options'
+ elif config is True:
+ self.log += '\nWARNING: Invalid syntax in configuration file...'
+ self.log += ' using default options'
+ else:
+ self.log += '\nConfiguration file: ' + config
+ if log_output is None:
+ self.log += '\nNo log file path input... using default: ctt_log.txt'
+ else:
+ self.log += '\nLog file output: ' + log_output
+
+ # if log_output
+
+ """
+ write log file
+ """
+ def write_log(self, filename):
+ if filename is None:
+ filename = 'ctt_log.txt'
+ self.log += '\n' + self.log_separator
+ with open(filename, 'w') as logfile:
+ logfile.write(self.log)
+
+ """
+ Add all images from directory, pass into relevant list of images and
+ extrace lux and temperature values.
+ """
+ def add_imgs(self, directory, mac_config, blacklevel=-1):
+ self.log_new_sec('Image Loading', cal=False)
+ img_suc_msg = 'Image loaded successfully!'
+ print('\n\nLoading images from '+directory)
+ self.log += '\nDirectory: ' + directory
+ """
+ get list of files
+ """
+ filename_list = get_photos(directory)
+ print("Files found: {}".format(len(filename_list)))
+ self.log += '\nFiles found: {}'.format(len(filename_list))
+ """
+ iterate over files
+ """
+ filename_list.sort()
+ for filename in filename_list:
+ address = directory + filename
+ print('\nLoading image: '+filename)
+ self.log += '\n\nImage: ' + filename
+ """
+ obtain colour and lux value
+ """
+ col, lux = get_col_lux(filename)
+ """
+ Check if image is an alsc calibration image
+ """
+ if 'alsc' in filename:
+ Img = load_image(self, address, mac=False)
+ self.log += '\nIdentified as an ALSC image'
+ """
+ check if imagae data has been successfully unpacked
+ """
+ if Img == 0:
+ print('\nDISCARDED')
+ self.log += '\nImage discarded!'
+ continue
+ """
+ check that image colour temperature has been successfuly obtained
+ """
+ elif col is not None:
+ """
+ if successful, append to list and continue to next image
+ """
+ Img.col = col
+ Img.name = filename
+ self.log += '\nColour temperature: {} K'.format(col)
+ self.imgs_alsc.append(Img)
+ if blacklevel != -1:
+ Img.blacklevel_16 = blacklevel
+ print(img_suc_msg)
+ continue
+ else:
+ print('Error! No colour temperature found!')
+ self.log += '\nWARNING: Error reading colour temperature'
+ self.log += '\nImage discarded!'
+ print('DISCARDED')
+ elif 'cac' in filename:
+ Img = load_image(self, address, mac=False)
+ self.log += '\nIdentified as an CAC image'
+ Img.name = filename
+ self.log += '\nColour temperature: {} K'.format(col)
+ self.imgs_cac.append(Img)
+ if blacklevel != -1:
+ Img.blacklevel_16 = blacklevel
+ print(img_suc_msg)
+ continue
+ else:
+ self.log += '\nIdentified as macbeth chart image'
+ """
+ if image isn't an alsc correction then it must have a lux and a
+ colour temperature value to be useful
+ """
+ if lux is None:
+ print('DISCARDED')
+ self.log += '\nWARNING: Error reading lux value'
+ self.log += '\nImage discarded!'
+ continue
+ Img = load_image(self, address, mac_config)
+ """
+ check that image data has been successfuly unpacked
+ """
+ if Img == 0:
+ print('DISCARDED')
+ self.log += '\nImage discarded!'
+ continue
+ else:
+ """
+ if successful, append to list and continue to next image
+ """
+ Img.col, Img.lux = col, lux
+ Img.name = filename
+ self.log += '\nColour temperature: {} K'.format(col)
+ self.log += '\nLux value: {} lx'.format(lux)
+ if blacklevel != -1:
+ Img.blacklevel_16 = blacklevel
+ print(img_suc_msg)
+ self.imgs.append(Img)
+
+ print('\nFinished loading images')
+
+ """
+ Check that usable images have been found
+ Possible errors include:
+ - no macbeth chart
+ - incorrect filename/extension
+ - images from different cameras
+ """
+ def check_imgs(self, macbeth=True):
+ self.log += '\n\nImages found:'
+ self.log += '\nMacbeth : {}'.format(len(self.imgs))
+ self.log += '\nALSC : {} '.format(len(self.imgs_alsc))
+ self.log += '\nCAC: {} '.format(len(self.imgs_cac))
+ self.log += '\n\nCamera metadata'
+ """
+ check usable images found
+ """
+ if len(self.imgs) == 0 and macbeth:
+ print('\nERROR: No usable macbeth chart images found')
+ self.log += '\nERROR: No usable macbeth chart images found'
+ return 0
+ elif len(self.imgs) == 0 and len(self.imgs_alsc) == 0 and len(self.imgs_cac) == 0:
+ print('\nERROR: No usable images found')
+ self.log += '\nERROR: No usable images found'
+ return 0
+ """
+ Double check that every image has come from the same camera...
+ """
+ all_imgs = self.imgs + self.imgs_alsc + self.imgs_cac
+ camNames = list(set([Img.camName for Img in all_imgs]))
+ patterns = list(set([Img.pattern for Img in all_imgs]))
+ sigbitss = list(set([Img.sigbits for Img in all_imgs]))
+ blacklevels = list(set([Img.blacklevel_16 for Img in all_imgs]))
+ sizes = list(set([(Img.w, Img.h) for Img in all_imgs]))
+
+ if 1:
+ self.grey = (patterns[0] == 128)
+ self.blacklevel_16 = blacklevels[0]
+ self.log += '\nName: {}'.format(camNames[0])
+ self.log += '\nBayer pattern case: {}'.format(patterns[0])
+ if self.grey:
+ self.log += '\nGreyscale camera identified'
+ self.log += '\nSignificant bits: {}'.format(sigbitss[0])
+ self.log += '\nBlacklevel: {}'.format(blacklevels[0])
+ self.log += '\nImage size: w = {} h = {}'.format(sizes[0][0], sizes[0][1])
+ return 1
+ else:
+ print('\nERROR: Images from different cameras')
+ self.log += '\nERROR: Images are from different cameras'
+ return 0
+
+
+def run_ctt(json_output, directory, config, log_output, json_template, grid_size, target, alsc_only=False):
+ """
+ check input files are jsons
+ """
+ if json_output[-5:] != '.json':
+ raise ArgError('\n\nError: Output must be a json file!')
+ if config is not None:
+ """
+ check if config file is actually a json
+ """
+ if config[-5:] != '.json':
+ raise ArgError('\n\nError: Config file must be a json file!')
+ """
+ read configurations
+ """
+ try:
+ with open(config, 'r') as config_json:
+ configs = json.load(config_json)
+ except FileNotFoundError:
+ configs = {}
+ config = False
+ except json.decoder.JSONDecodeError:
+ configs = {}
+ config = True
+
+ else:
+ configs = {}
+ """
+ load configurations from config file, if not given then set default
+ """
+ disable = get_config(configs, "disable", [], 'list')
+ plot = get_config(configs, "plot", [], 'list')
+ awb_d = get_config(configs, "awb", {}, 'dict')
+ greyworld = get_config(awb_d, "greyworld", 0, 'bool')
+ alsc_d = get_config(configs, "alsc", {}, 'dict')
+ do_alsc_colour = get_config(alsc_d, "do_alsc_colour", 1, 'bool')
+ luminance_strength = get_config(alsc_d, "luminance_strength", 0.8, 'num')
+ lsc_max_gain = get_config(alsc_d, "max_gain", 8.0, 'num')
+ blacklevel = get_config(configs, "blacklevel", -1, 'num')
+ macbeth_d = get_config(configs, "macbeth", {}, 'dict')
+ mac_small = get_config(macbeth_d, "small", 0, 'bool')
+ mac_show = get_config(macbeth_d, "show", 0, 'bool')
+ mac_config = (mac_small, mac_show)
+ print("Read lsc_max_gain", lsc_max_gain)
+
+ if blacklevel < -1 or blacklevel >= 2**16:
+ print('\nInvalid blacklevel, defaulted to 64')
+ blacklevel = -1
+
+ if luminance_strength < 0 or luminance_strength > 1:
+ print('\nInvalid luminance_strength strength, defaulted to 0.5')
+ luminance_strength = 0.5
+
+ """
+ sanitise directory path
+ """
+ if directory[-1] != '/':
+ directory += '/'
+ """
+ initialise tuning tool and load images
+ """
+ try:
+ Cam = Camera(json_output, json=json_template)
+ Cam.log_user_input(json_output, directory, config, log_output)
+ if alsc_only:
+ disable = set(Cam.json.keys()).symmetric_difference({"rpi.alsc"})
+ Cam.disable = disable
+ Cam.plot = plot
+ Cam.add_imgs(directory, mac_config, blacklevel)
+ except FileNotFoundError:
+ raise ArgError('\n\nError: Input image directory not found!')
+
+ """
+ preform calibrations as long as check_imgs returns True
+ If alsc is activated then it must be done before awb and ccm since the alsc
+ tables are used in awb and ccm calibrations
+ ccm also technically does an awb but it measures this from the macbeth
+ chart in the image rather than using calibration data
+ """
+ if Cam.check_imgs(macbeth=not alsc_only):
+ if not alsc_only:
+ Cam.json['rpi.black_level']['black_level'] = Cam.blacklevel_16
+ Cam.json_remove(disable)
+ print('\nSTARTING CALIBRATIONS')
+ Cam.alsc_cal(luminance_strength, do_alsc_colour, grid_size, max_gain=lsc_max_gain)
+ Cam.geq_cal()
+ Cam.lux_cal()
+ Cam.noise_cal()
+ if "rpi.cac" in json_template:
+ Cam.cac_cal(do_alsc_colour)
+ Cam.awb_cal(greyworld, do_alsc_colour, grid_size)
+ Cam.ccm_cal(do_alsc_colour, grid_size)
+
+ print('\nFINISHED CALIBRATIONS')
+ Cam.write_json(target=target, grid_size=grid_size)
+ Cam.write_log(log_output)
+ print('\nCalibrations written to: '+json_output)
+ if log_output is None:
+ log_output = 'ctt_log.txt'
+ print('Log file written to: '+log_output)
+ pass
+ else:
+ Cam.write_log(log_output)
+
+if __name__ == '__main__':
+ """
+ initialise calibration
+ """
+ if len(sys.argv) == 1:
+ print("""
+ PiSP Tuning Tool version 1.0
+ Required Arguments:
+ '-i' : Calibration image directory.
+ '-o' : Name of output json file.
+
+ Optional Arguments:
+ '-t' : Target platform - 'pisp' or 'vc4'. Default 'vc4'
+ '-c' : Config file for the CTT. If not passed, default parameters used.
+ '-l' : Name of output log file. If not passed, 'ctt_log.txt' used.
+ """)
+ quit(0)
+ else:
+ """
+ parse input arguments
+ """
+ json_output, directory, config, log_output, target = parse_input()
+ if target == 'pisp':
+ from ctt_pisp import json_template, grid_size
+ elif target == 'vc4':
+ from ctt_vc4 import json_template, grid_size
+
+ run_ctt(json_output, directory, config, log_output, json_template, grid_size, target)
diff --git a/utils/raspberrypi/ctt/ctt_alsc.py b/utils/raspberrypi/ctt/ctt_alsc.py
new file mode 100644
index 00000000..1d94dfa5
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_alsc.py
@@ -0,0 +1,308 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# camera tuning tool for ALSC (auto lens shading correction)
+
+from ctt_image_load import *
+import matplotlib.pyplot as plt
+from matplotlib import cm
+from mpl_toolkits.mplot3d import Axes3D
+
+
+"""
+preform alsc calibration on a set of images
+"""
+def alsc_all(Cam, do_alsc_colour, plot, grid_size=(16, 12), max_gain=8.0):
+ imgs_alsc = Cam.imgs_alsc
+ grid_w, grid_h = grid_size
+ """
+ create list of colour temperatures and associated calibration tables
+ """
+ list_col = []
+ list_cr = []
+ list_cb = []
+ list_cg = []
+ for Img in imgs_alsc:
+ col, cr, cb, cg, size = alsc(Cam, Img, do_alsc_colour, plot, grid_size=grid_size, max_gain=max_gain)
+ list_col.append(col)
+ list_cr.append(cr)
+ list_cb.append(cb)
+ list_cg.append(cg)
+ Cam.log += '\n'
+ Cam.log += '\nFinished processing images'
+ w, h, dx, dy = size
+ Cam.log += '\nChannel dimensions: w = {} h = {}'.format(int(w), int(h))
+ Cam.log += '\n16x12 grid rectangle size: w = {} h = {}'.format(dx, dy)
+
+ """
+ convert to numpy array for data manipulation
+ """
+ list_col = np.array(list_col)
+ list_cr = np.array(list_cr)
+ list_cb = np.array(list_cb)
+ list_cg = np.array(list_cg)
+
+ cal_cr_list = []
+ cal_cb_list = []
+
+ """
+ only do colour calculations if required
+ """
+ if do_alsc_colour:
+ Cam.log += '\nALSC colour tables'
+ for ct in sorted(set(list_col)):
+ Cam.log += '\nColour temperature: {} K'.format(ct)
+ """
+ average tables for the same colour temperature
+ """
+ indices = np.where(list_col == ct)
+ ct = int(ct)
+ t_r = np.mean(list_cr[indices], axis=0)
+ t_b = np.mean(list_cb[indices], axis=0)
+ """
+ force numbers to be stored to 3dp.... :(
+ """
+ t_r = np.where((100*t_r) % 1 <= 0.05, t_r+0.001, t_r)
+ t_b = np.where((100*t_b) % 1 <= 0.05, t_b+0.001, t_b)
+ t_r = np.where((100*t_r) % 1 >= 0.95, t_r-0.001, t_r)
+ t_b = np.where((100*t_b) % 1 >= 0.95, t_b-0.001, t_b)
+ t_r = np.round(t_r, 3)
+ t_b = np.round(t_b, 3)
+ r_corners = (t_r[0], t_r[grid_w - 1], t_r[-1], t_r[-grid_w])
+ b_corners = (t_b[0], t_b[grid_w - 1], t_b[-1], t_b[-grid_w])
+ middle_pos = (grid_h // 2 - 1) * grid_w + grid_w - 1
+ r_cen = t_r[middle_pos]+t_r[middle_pos + 1]+t_r[middle_pos + grid_w]+t_r[middle_pos + grid_w + 1]
+ r_cen = round(r_cen/4, 3)
+ b_cen = t_b[middle_pos]+t_b[middle_pos + 1]+t_b[middle_pos + grid_w]+t_b[middle_pos + grid_w + 1]
+ b_cen = round(b_cen/4, 3)
+ Cam.log += '\nRed table corners: {}'.format(r_corners)
+ Cam.log += '\nRed table centre: {}'.format(r_cen)
+ Cam.log += '\nBlue table corners: {}'.format(b_corners)
+ Cam.log += '\nBlue table centre: {}'.format(b_cen)
+ cr_dict = {
+ 'ct': ct,
+ 'table': list(t_r)
+ }
+ cb_dict = {
+ 'ct': ct,
+ 'table': list(t_b)
+ }
+ cal_cr_list.append(cr_dict)
+ cal_cb_list.append(cb_dict)
+ Cam.log += '\n'
+ else:
+ cal_cr_list, cal_cb_list = None, None
+
+ """
+ average all values for luminance shading and return one table for all temperatures
+ """
+ lum_lut = np.mean(list_cg, axis=0)
+ lum_lut = np.where((100*lum_lut) % 1 <= 0.05, lum_lut+0.001, lum_lut)
+ lum_lut = np.where((100*lum_lut) % 1 >= 0.95, lum_lut-0.001, lum_lut)
+ lum_lut = list(np.round(lum_lut, 3))
+
+ """
+ calculate average corner for lsc gain calculation further on
+ """
+ corners = (lum_lut[0], lum_lut[15], lum_lut[-1], lum_lut[-16])
+ Cam.log += '\nLuminance table corners: {}'.format(corners)
+ l_cen = lum_lut[5*16+7]+lum_lut[5*16+8]+lum_lut[6*16+7]+lum_lut[6*16+8]
+ l_cen = round(l_cen/4, 3)
+ Cam.log += '\nLuminance table centre: {}'.format(l_cen)
+ av_corn = np.sum(corners)/4
+
+ return cal_cr_list, cal_cb_list, lum_lut, av_corn
+
+
+"""
+calculate g/r and g/b for 32x32 points arranged in a grid for a single image
+"""
+def alsc(Cam, Img, do_alsc_colour, plot=False, grid_size=(16, 12), max_gain=8.0):
+ Cam.log += '\nProcessing image: ' + Img.name
+ grid_w, grid_h = grid_size
+ """
+ get channel in correct order
+ """
+ channels = [Img.channels[i] for i in Img.order]
+ """
+ calculate size of single rectangle.
+ -(-(w-1)//32) is a ceiling division. w-1 is to deal robustly with the case
+ where w is a multiple of 32.
+ """
+ w, h = Img.w/2, Img.h/2
+ dx, dy = int(-(-(w-1)//grid_w)), int(-(-(h-1)//grid_h))
+ """
+ average the green channels into one
+ """
+ av_ch_g = np.mean((channels[1:3]), axis=0)
+ if do_alsc_colour:
+ """
+ obtain grid_w x grid_h grid of intensities for each channel and subtract black level
+ """
+ g = get_grid(av_ch_g, dx, dy, grid_size) - Img.blacklevel_16
+ r = get_grid(channels[0], dx, dy, grid_size) - Img.blacklevel_16
+ b = get_grid(channels[3], dx, dy, grid_size) - Img.blacklevel_16
+ """
+ calculate ratios as 32 bit in order to be supported by medianBlur function
+ """
+ cr = np.reshape(g/r, (grid_h, grid_w)).astype('float32')
+ cb = np.reshape(g/b, (grid_h, grid_w)).astype('float32')
+ cg = np.reshape(1/g, (grid_h, grid_w)).astype('float32')
+ """
+ median blur to remove peaks and save as float 64
+ """
+ cr = cv2.medianBlur(cr, 3).astype('float64')
+ cr = cr/np.min(cr) # gain tables are easier for humans to read if the minimum is 1.0
+ cb = cv2.medianBlur(cb, 3).astype('float64')
+ cb = cb/np.min(cb)
+ cg = cv2.medianBlur(cg, 3).astype('float64')
+ cg = cg/np.min(cg)
+ cg = [min(v, max_gain) for v in cg.flatten()] # never exceed the max luminance gain
+
+ """
+ debugging code showing 2D surface plot of vignetting. Quite useful for
+ for sanity check
+ """
+ if plot:
+ hf = plt.figure(figsize=(8, 8))
+ ha = hf.add_subplot(311, projection='3d')
+ """
+ note Y is plotted as -Y so plot has same axes as image
+ """
+ X, Y = np.meshgrid(range(grid_w), range(grid_h))
+ ha.plot_surface(X, -Y, cr, cmap=cm.coolwarm, linewidth=0)
+ ha.set_title('ALSC Plot\nImg: {}\n\ncr'.format(Img.str))
+ hb = hf.add_subplot(312, projection='3d')
+ hb.plot_surface(X, -Y, cb, cmap=cm.coolwarm, linewidth=0)
+ hb.set_title('cb')
+ hc = hf.add_subplot(313, projection='3d')
+ hc.plot_surface(X, -Y, cg, cmap=cm.coolwarm, linewidth=0)
+ hc.set_title('g')
+ # print(Img.str)
+ plt.show()
+
+ return Img.col, cr.flatten(), cb.flatten(), cg, (w, h, dx, dy)
+
+ else:
+ """
+ only perform calculations for luminance shading
+ """
+ g = get_grid(av_ch_g, dx, dy, grid_size) - Img.blacklevel_16
+ cg = np.reshape(1/g, (grid_h, grid_w)).astype('float32')
+ cg = cv2.medianBlur(cg, 3).astype('float64')
+ cg = cg/np.min(cg)
+ cg = [min(v, max_gain) for v in cg.flatten()] # never exceed the max luminance gain
+
+ if plot:
+ hf = plt.figure(figssize=(8, 8))
+ ha = hf.add_subplot(1, 1, 1, projection='3d')
+ X, Y = np.meashgrid(range(grid_w), range(grid_h))
+ ha.plot_surface(X, -Y, cg, cmap=cm.coolwarm, linewidth=0)
+ ha.set_title('ALSC Plot (Luminance only!)\nImg: {}\n\ncg').format(Img.str)
+ plt.show()
+
+ return Img.col, None, None, cg.flatten(), (w, h, dx, dy)
+
+
+"""
+Compresses channel down to a grid of the requested size
+"""
+def get_grid(chan, dx, dy, grid_size):
+ grid_w, grid_h = grid_size
+ grid = []
+ """
+ since left and bottom border will not necessarily have rectangles of
+ dimension dx x dy, the 32nd iteration has to be handled separately.
+ """
+ for i in range(grid_h - 1):
+ for j in range(grid_w - 1):
+ grid.append(np.mean(chan[dy*i:dy*(1+i), dx*j:dx*(1+j)]))
+ grid.append(np.mean(chan[dy*i:dy*(1+i), (grid_w - 1)*dx:]))
+ for j in range(grid_w - 1):
+ grid.append(np.mean(chan[(grid_h - 1)*dy:, dx*j:dx*(1+j)]))
+ grid.append(np.mean(chan[(grid_h - 1)*dy:, (grid_w - 1)*dx:]))
+ """
+ return as np.array, ready for further manipulation
+ """
+ return np.array(grid)
+
+
+"""
+obtains sigmas for red and blue, effectively a measure of the 'error'
+"""
+def get_sigma(Cam, cal_cr_list, cal_cb_list, grid_size):
+ Cam.log += '\nCalculating sigmas'
+ """
+ provided colour alsc tables were generated for two different colour
+ temperatures sigma is calculated by comparing two calibration temperatures
+ adjacent in colour space
+ """
+ """
+ create list of colour temperatures
+ """
+ cts = [cal['ct'] for cal in cal_cr_list]
+ # print(cts)
+ """
+ calculate sigmas for each adjacent cts and return worst one
+ """
+ sigma_rs = []
+ sigma_bs = []
+ for i in range(len(cts)-1):
+ sigma_rs.append(calc_sigma(cal_cr_list[i]['table'], cal_cr_list[i+1]['table'], grid_size))
+ sigma_bs.append(calc_sigma(cal_cb_list[i]['table'], cal_cb_list[i+1]['table'], grid_size))
+ Cam.log += '\nColour temperature interval {} - {} K'.format(cts[i], cts[i+1])
+ Cam.log += '\nSigma red: {}'.format(sigma_rs[-1])
+ Cam.log += '\nSigma blue: {}'.format(sigma_bs[-1])
+
+ """
+ return maximum sigmas, not necessarily from the same colour temperature
+ interval
+ """
+ sigma_r = max(sigma_rs) if sigma_rs else 0.005
+ sigma_b = max(sigma_bs) if sigma_bs else 0.005
+ Cam.log += '\nMaximum sigmas: Red = {} Blue = {}'.format(sigma_r, sigma_b)
+
+ # print(sigma_rs, sigma_bs)
+ # print(sigma_r, sigma_b)
+ return sigma_r, sigma_b
+
+
+"""
+calculate sigma from two adjacent gain tables
+"""
+def calc_sigma(g1, g2, grid_size):
+ grid_w, grid_h = grid_size
+ """
+ reshape into 16x12 matrix
+ """
+ g1 = np.reshape(g1, (grid_h, grid_w))
+ g2 = np.reshape(g2, (grid_h, grid_w))
+ """
+ apply gains to gain table
+ """
+ gg = g1/g2
+ if np.mean(gg) < 1:
+ gg = 1/gg
+ """
+ for each internal patch, compute average difference between it and its 4
+ neighbours, then append to list
+ """
+ diffs = []
+ for i in range(grid_h - 2):
+ for j in range(grid_w - 2):
+ """
+ note indexing is incremented by 1 since all patches on borders are
+ not counted
+ """
+ diff = np.abs(gg[i+1][j+1]-gg[i][j+1])
+ diff += np.abs(gg[i+1][j+1]-gg[i+2][j+1])
+ diff += np.abs(gg[i+1][j+1]-gg[i+1][j])
+ diff += np.abs(gg[i+1][j+1]-gg[i+1][j+2])
+ diffs.append(diff/4)
+
+ """
+ return mean difference
+ """
+ mean_diff = np.mean(diffs)
+ return(np.round(mean_diff, 5))
diff --git a/utils/raspberrypi/ctt/ctt_awb.py b/utils/raspberrypi/ctt/ctt_awb.py
new file mode 100644
index 00000000..4af1fe41
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_awb.py
@@ -0,0 +1,377 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# camera tuning tool for AWB
+
+from ctt_image_load import *
+import matplotlib.pyplot as plt
+from bisect import bisect_left
+from scipy.optimize import fmin
+
+
+"""
+obtain piecewise linear approximation for colour curve
+"""
+def awb(Cam, cal_cr_list, cal_cb_list, plot, grid_size):
+ imgs = Cam.imgs
+ """
+ condense alsc calibration tables into one dictionary
+ """
+ if cal_cr_list is None:
+ colour_cals = None
+ else:
+ colour_cals = {}
+ for cr, cb in zip(cal_cr_list, cal_cb_list):
+ cr_tab = cr['table']
+ cb_tab = cb['table']
+ """
+ normalise tables so min value is 1
+ """
+ cr_tab = cr_tab/np.min(cr_tab)
+ cb_tab = cb_tab/np.min(cb_tab)
+ colour_cals[cr['ct']] = [cr_tab, cb_tab]
+ """
+ obtain data from greyscale macbeth patches
+ """
+ rb_raw = []
+ rbs_hat = []
+ for Img in imgs:
+ Cam.log += '\nProcessing '+Img.name
+ """
+ get greyscale patches with alsc applied if alsc enabled.
+ Note: if alsc is disabled then colour_cals will be set to None and the
+ function will just return the greyscale patches
+ """
+ r_patchs, b_patchs, g_patchs = get_alsc_patches(Img, colour_cals, grid_size=grid_size)
+ """
+ calculate ratio of r, b to g
+ """
+ r_g = np.mean(r_patchs/g_patchs)
+ b_g = np.mean(b_patchs/g_patchs)
+ Cam.log += '\n r : {:.4f} b : {:.4f}'.format(r_g, b_g)
+ """
+ The curve tends to be better behaved in so-called hatspace.
+ R, B, G represent the individual channels. The colour curve is plotted in
+ r, b space, where:
+ r = R/G
+ b = B/G
+ This will be referred to as dehatspace... (sorry)
+ Hatspace is defined as:
+ r_hat = R/(R+B+G)
+ b_hat = B/(R+B+G)
+ To convert from dehatspace to hastpace (hat operation):
+ r_hat = r/(1+r+b)
+ b_hat = b/(1+r+b)
+ To convert from hatspace to dehatspace (dehat operation):
+ r = r_hat/(1-r_hat-b_hat)
+ b = b_hat/(1-r_hat-b_hat)
+ Proof is left as an excercise to the reader...
+ Throughout the code, r and b are sometimes referred to as r_g and b_g
+ as a reminder that they are ratios
+ """
+ r_g_hat = r_g/(1+r_g+b_g)
+ b_g_hat = b_g/(1+r_g+b_g)
+ Cam.log += '\n r_hat : {:.4f} b_hat : {:.4f}'.format(r_g_hat, b_g_hat)
+ rbs_hat.append((r_g_hat, b_g_hat, Img.col))
+ rb_raw.append((r_g, b_g))
+ Cam.log += '\n'
+
+ Cam.log += '\nFinished processing images'
+ """
+ sort all lits simultaneously by r_hat
+ """
+ rbs_zip = list(zip(rbs_hat, rb_raw))
+ rbs_zip.sort(key=lambda x: x[0][0])
+ rbs_hat, rb_raw = list(zip(*rbs_zip))
+ """
+ unzip tuples ready for processing
+ """
+ rbs_hat = list(zip(*rbs_hat))
+ rb_raw = list(zip(*rb_raw))
+ """
+ fit quadratic fit to r_g hat and b_g_hat
+ """
+ a, b, c = np.polyfit(rbs_hat[0], rbs_hat[1], 2)
+ Cam.log += '\nFit quadratic curve in hatspace'
+ """
+ the algorithm now approximates the shortest distance from each point to the
+ curve in dehatspace. Since the fit is done in hatspace, it is easier to
+ find the actual shortest distance in hatspace and use the projection back
+ into dehatspace as an overestimate.
+ The distance will be used for two things:
+ 1) In the case that colour temperature does not strictly decrease with
+ increasing r/g, the closest point to the line will be chosen out of an
+ increasing pair of colours.
+
+ 2) To calculate transverse negative an dpositive, the maximum positive
+ and negative distance from the line are chosen. This benefits from the
+ overestimate as the transverse pos/neg are upper bound values.
+ """
+ """
+ define fit function
+ """
+ def f(x):
+ return a*x**2 + b*x + c
+ """
+ iterate over points (R, B are x and y coordinates of points) and calculate
+ distance to line in dehatspace
+ """
+ dists = []
+ for i, (R, B) in enumerate(zip(rbs_hat[0], rbs_hat[1])):
+ """
+ define function to minimise as square distance between datapoint and
+ point on curve. Squaring is monotonic so minimising radius squared is
+ equivalent to minimising radius
+ """
+ def f_min(x):
+ y = f(x)
+ return((x-R)**2+(y-B)**2)
+ """
+ perform optimisation with scipy.optmisie.fmin
+ """
+ x_hat = fmin(f_min, R, disp=0)[0]
+ y_hat = f(x_hat)
+ """
+ dehat
+ """
+ x = x_hat/(1-x_hat-y_hat)
+ y = y_hat/(1-x_hat-y_hat)
+ rr = R/(1-R-B)
+ bb = B/(1-R-B)
+ """
+ calculate euclidean distance in dehatspace
+ """
+ dist = ((x-rr)**2+(y-bb)**2)**0.5
+ """
+ return negative if point is below the fit curve
+ """
+ if (x+y) > (rr+bb):
+ dist *= -1
+ dists.append(dist)
+ Cam.log += '\nFound closest point on fit line to each point in dehatspace'
+ """
+ calculate wiggle factors in awb. 10% added since this is an upper bound
+ """
+ transverse_neg = - np.min(dists) * 1.1
+ transverse_pos = np.max(dists) * 1.1
+ Cam.log += '\nTransverse pos : {:.5f}'.format(transverse_pos)
+ Cam.log += '\nTransverse neg : {:.5f}'.format(transverse_neg)
+ """
+ set minimum transverse wiggles to 0.1 .
+ Wiggle factors dictate how far off of the curve the algorithm searches. 0.1
+ is a suitable minimum that gives better results for lighting conditions not
+ within calibration dataset. Anything less will generalise poorly.
+ """
+ if transverse_pos < 0.01:
+ transverse_pos = 0.01
+ Cam.log += '\nForced transverse pos to 0.01'
+ if transverse_neg < 0.01:
+ transverse_neg = 0.01
+ Cam.log += '\nForced transverse neg to 0.01'
+
+ """
+ generate new b_hat values at each r_hat according to fit
+ """
+ r_hat_fit = np.array(rbs_hat[0])
+ b_hat_fit = a*r_hat_fit**2 + b*r_hat_fit + c
+ """
+ transform from hatspace to dehatspace
+ """
+ r_fit = r_hat_fit/(1-r_hat_fit-b_hat_fit)
+ b_fit = b_hat_fit/(1-r_hat_fit-b_hat_fit)
+ c_fit = np.round(rbs_hat[2], 0)
+ """
+ round to 4dp
+ """
+ r_fit = np.where((1000*r_fit) % 1 <= 0.05, r_fit+0.0001, r_fit)
+ r_fit = np.where((1000*r_fit) % 1 >= 0.95, r_fit-0.0001, r_fit)
+ b_fit = np.where((1000*b_fit) % 1 <= 0.05, b_fit+0.0001, b_fit)
+ b_fit = np.where((1000*b_fit) % 1 >= 0.95, b_fit-0.0001, b_fit)
+ r_fit = np.round(r_fit, 4)
+ b_fit = np.round(b_fit, 4)
+ """
+ The following code ensures that colour temperature decreases with
+ increasing r/g
+ """
+ """
+ iterate backwards over list for easier indexing
+ """
+ i = len(c_fit) - 1
+ while i > 0:
+ if c_fit[i] > c_fit[i-1]:
+ Cam.log += '\nColour temperature increase found\n'
+ Cam.log += '{} K at r = {} to '.format(c_fit[i-1], r_fit[i-1])
+ Cam.log += '{} K at r = {}'.format(c_fit[i], r_fit[i])
+ """
+ if colour temperature increases then discard point furthest from
+ the transformed fit (dehatspace)
+ """
+ error_1 = abs(dists[i-1])
+ error_2 = abs(dists[i])
+ Cam.log += '\nDistances from fit:\n'
+ Cam.log += '{} K : {:.5f} , '.format(c_fit[i], error_1)
+ Cam.log += '{} K : {:.5f}'.format(c_fit[i-1], error_2)
+ """
+ find bad index
+ note that in python false = 0 and true = 1
+ """
+ bad = i - (error_1 < error_2)
+ Cam.log += '\nPoint at {} K deleted as '.format(c_fit[bad])
+ Cam.log += 'it is furthest from fit'
+ """
+ delete bad point
+ """
+ r_fit = np.delete(r_fit, bad)
+ b_fit = np.delete(b_fit, bad)
+ c_fit = np.delete(c_fit, bad).astype(np.uint16)
+ """
+ note that if a point has been discarded then the length has decreased
+ by one, meaning that decreasing the index by one will reassess the kept
+ point against the next point. It is therefore possible, in theory, for
+ two adjacent points to be discarded, although probably rare
+ """
+ i -= 1
+
+ """
+ return formatted ct curve, ordered by increasing colour temperature
+ """
+ ct_curve = list(np.array(list(zip(b_fit, r_fit, c_fit))).flatten())[::-1]
+ Cam.log += '\nFinal CT curve:'
+ for i in range(len(ct_curve)//3):
+ j = 3*i
+ Cam.log += '\n ct: {} '.format(ct_curve[j])
+ Cam.log += ' r: {} '.format(ct_curve[j+1])
+ Cam.log += ' b: {} '.format(ct_curve[j+2])
+
+ """
+ plotting code for debug
+ """
+ if plot:
+ x = np.linspace(np.min(rbs_hat[0]), np.max(rbs_hat[0]), 100)
+ y = a*x**2 + b*x + c
+ plt.subplot(2, 1, 1)
+ plt.title('hatspace')
+ plt.plot(rbs_hat[0], rbs_hat[1], ls='--', color='blue')
+ plt.plot(x, y, color='green', ls='-')
+ plt.scatter(rbs_hat[0], rbs_hat[1], color='red')
+ for i, ct in enumerate(rbs_hat[2]):
+ plt.annotate(str(ct), (rbs_hat[0][i], rbs_hat[1][i]))
+ plt.xlabel('$\\hat{r}$')
+ plt.ylabel('$\\hat{b}$')
+ """
+ optional set axes equal to shortest distance so line really does
+ looks perpendicular and everybody is happy
+ """
+ # ax = plt.gca()
+ # ax.set_aspect('equal')
+ plt.grid()
+ plt.subplot(2, 1, 2)
+ plt.title('dehatspace - indoors?')
+ plt.plot(r_fit, b_fit, color='blue')
+ plt.scatter(rb_raw[0], rb_raw[1], color='green')
+ plt.scatter(r_fit, b_fit, color='red')
+ for i, ct in enumerate(c_fit):
+ plt.annotate(str(ct), (r_fit[i], b_fit[i]))
+ plt.xlabel('$r$')
+ plt.ylabel('$b$')
+ """
+ optional set axes equal to shortest distance so line really does
+ looks perpendicular and everybody is happy
+ """
+ # ax = plt.gca()
+ # ax.set_aspect('equal')
+ plt.subplots_adjust(hspace=0.5)
+ plt.grid()
+ plt.show()
+ """
+ end of plotting code
+ """
+ return(ct_curve, np.round(transverse_pos, 5), np.round(transverse_neg, 5))
+
+
+"""
+obtain greyscale patches and perform alsc colour correction
+"""
+def get_alsc_patches(Img, colour_cals, grey=True, grid_size=(16, 12)):
+ """
+ get patch centre coordinates, image colour and the actual
+ patches for each channel, remembering to subtract blacklevel
+ If grey then only greyscale patches considered
+ """
+ grid_w, grid_h = grid_size
+ if grey:
+ cen_coords = Img.cen_coords[3::4]
+ col = Img.col
+ patches = [np.array(Img.patches[i]) for i in Img.order]
+ r_patchs = patches[0][3::4] - Img.blacklevel_16
+ b_patchs = patches[3][3::4] - Img.blacklevel_16
+ """
+ note two green channels are averages
+ """
+ g_patchs = (patches[1][3::4]+patches[2][3::4])/2 - Img.blacklevel_16
+ else:
+ cen_coords = Img.cen_coords
+ col = Img.col
+ patches = [np.array(Img.patches[i]) for i in Img.order]
+ r_patchs = patches[0] - Img.blacklevel_16
+ b_patchs = patches[3] - Img.blacklevel_16
+ g_patchs = (patches[1]+patches[2])/2 - Img.blacklevel_16
+
+ if colour_cals is None:
+ return r_patchs, b_patchs, g_patchs
+ """
+ find where image colour fits in alsc colour calibration tables
+ """
+ cts = list(colour_cals.keys())
+ pos = bisect_left(cts, col)
+ """
+ if img colour is below minimum or above maximum alsc calibration colour, simply
+ pick extreme closest to img colour
+ """
+ if pos % len(cts) == 0:
+ """
+ this works because -0 = 0 = first and -1 = last index
+ """
+ col_tabs = np.array(colour_cals[cts[-pos//len(cts)]])
+ """
+ else, perform linear interpolation between existing alsc colour
+ calibration tables
+ """
+ else:
+ bef = cts[pos-1]
+ aft = cts[pos]
+ da = col-bef
+ db = aft-col
+ bef_tabs = np.array(colour_cals[bef])
+ aft_tabs = np.array(colour_cals[aft])
+ col_tabs = (bef_tabs*db + aft_tabs*da)/(da+db)
+ col_tabs = np.reshape(col_tabs, (2, grid_h, grid_w))
+ """
+ calculate dx, dy used to calculate alsc table
+ """
+ w, h = Img.w/2, Img.h/2
+ dx, dy = int(-(-(w-1)//grid_w)), int(-(-(h-1)//grid_h))
+ """
+ make list of pairs of gains for each patch by selecting the correct value
+ in alsc colour calibration table
+ """
+ patch_gains = []
+ for cen in cen_coords:
+ x, y = cen[0]//dx, cen[1]//dy
+ # We could probably do with some better spatial interpolation here?
+ col_gains = (col_tabs[0][y][x], col_tabs[1][y][x])
+ patch_gains.append(col_gains)
+
+ """
+ multiply the r and b channels in each patch by the respective gain, finally
+ performing the alsc colour correction
+ """
+ for i, gains in enumerate(patch_gains):
+ r_patchs[i] = r_patchs[i] * gains[0]
+ b_patchs[i] = b_patchs[i] * gains[1]
+
+ """
+ return greyscale patches, g channel and correct r, b channels
+ """
+ return r_patchs, b_patchs, g_patchs
diff --git a/utils/raspberrypi/ctt/ctt_cac.py b/utils/raspberrypi/ctt/ctt_cac.py
new file mode 100644
index 00000000..5a4c5101
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_cac.py
@@ -0,0 +1,228 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2023, Raspberry Pi Ltd
+#
+# ctt_cac.py - CAC (Chromatic Aberration Correction) tuning tool
+
+from PIL import Image
+import numpy as np
+import matplotlib.pyplot as plt
+from matplotlib import cm
+
+from ctt_dots_locator import find_dots_locations
+
+
+# This is the wrapper file that creates a JSON entry for you to append
+# to your camera tuning file.
+# It calculates the chromatic aberration at different points throughout
+# the image and uses that to produce a martix that can then be used
+# in the camera tuning files to correct this aberration.
+
+
+def pprint_array(array):
+ # Function to print the array in a tidier format
+ array = array
+ output = ""
+ for i in range(len(array)):
+ for j in range(len(array[0])):
+ output += str(round(array[i, j], 2)) + ", "
+ # Add the necessary indentation to the array
+ output += "\n "
+ # Cut off the end of the array (nicely formats it)
+ return output[:-22]
+
+
+def plot_shifts(red_shifts, blue_shifts):
+ # If users want, they can pass a command line option to show the shifts on a graph
+ # Can be useful to check that the functions are all working, and that the sample
+ # images are doing the right thing
+ Xs = np.array(red_shifts)[:, 0]
+ Ys = np.array(red_shifts)[:, 1]
+ Zs = np.array(red_shifts)[:, 2]
+ Zs2 = np.array(red_shifts)[:, 3]
+ Zs3 = np.array(blue_shifts)[:, 2]
+ Zs4 = np.array(blue_shifts)[:, 3]
+
+ fig, axs = plt.subplots(2, 2)
+ ax = fig.add_subplot(2, 2, 1, projection='3d')
+ ax.scatter(Xs, Ys, Zs, cmap=cm.jet, linewidth=0)
+ ax.set_title('Red X Shift')
+ ax = fig.add_subplot(2, 2, 2, projection='3d')
+ ax.scatter(Xs, Ys, Zs2, cmap=cm.jet, linewidth=0)
+ ax.set_title('Red Y Shift')
+ ax = fig.add_subplot(2, 2, 3, projection='3d')
+ ax.scatter(Xs, Ys, Zs3, cmap=cm.jet, linewidth=0)
+ ax.set_title('Blue X Shift')
+ ax = fig.add_subplot(2, 2, 4, projection='3d')
+ ax.scatter(Xs, Ys, Zs4, cmap=cm.jet, linewidth=0)
+ ax.set_title('Blue Y Shift')
+ fig.tight_layout()
+ plt.show()
+
+
+def shifts_to_yaml(red_shift, blue_shift, image_dimensions, output_grid_size=9):
+ # Convert the shifts to a numpy array for easier handling and initialise other variables
+ red_shifts = np.array(red_shift)
+ blue_shifts = np.array(blue_shift)
+ # create a grid that's smaller than the output grid, which we then interpolate from to get the output values
+ xrgrid = np.zeros((output_grid_size - 1, output_grid_size - 1))
+ xbgrid = np.zeros((output_grid_size - 1, output_grid_size - 1))
+ yrgrid = np.zeros((output_grid_size - 1, output_grid_size - 1))
+ ybgrid = np.zeros((output_grid_size - 1, output_grid_size - 1))
+
+ xrsgrid = []
+ xbsgrid = []
+ yrsgrid = []
+ ybsgrid = []
+ xg = np.zeros((output_grid_size - 1, output_grid_size - 1))
+ yg = np.zeros((output_grid_size - 1, output_grid_size - 1))
+
+ # Format the grids - numpy doesn't work for this, it wants a
+ # nice uniformly spaced grid, which we don't know if we have yet, hence the rather mundane setup
+ for x in range(output_grid_size - 1):
+ xrsgrid.append([])
+ yrsgrid.append([])
+ xbsgrid.append([])
+ ybsgrid.append([])
+ for y in range(output_grid_size - 1):
+ xrsgrid[x].append([])
+ yrsgrid[x].append([])
+ xbsgrid[x].append([])
+ ybsgrid[x].append([])
+
+ image_size = (image_dimensions[0], image_dimensions[1])
+ gridxsize = image_size[0] / (output_grid_size - 1)
+ gridysize = image_size[1] / (output_grid_size - 1)
+
+ # Iterate through each dot, and it's shift values and put these into the correct grid location
+ for red_shift in red_shifts:
+ xgridloc = int(red_shift[0] / gridxsize)
+ ygridloc = int(red_shift[1] / gridysize)
+ xrsgrid[xgridloc][ygridloc].append(red_shift[2])
+ yrsgrid[xgridloc][ygridloc].append(red_shift[3])
+
+ for blue_shift in blue_shifts:
+ xgridloc = int(blue_shift[0] / gridxsize)
+ ygridloc = int(blue_shift[1] / gridysize)
+ xbsgrid[xgridloc][ygridloc].append(blue_shift[2])
+ ybsgrid[xgridloc][ygridloc].append(blue_shift[3])
+
+ # Now calculate the average pixel shift for each square in the grid
+ for x in range(output_grid_size - 1):
+ for y in range(output_grid_size - 1):
+ xrgrid[x, y] = np.mean(xrsgrid[x][y])
+ yrgrid[x, y] = np.mean(yrsgrid[x][y])
+ xbgrid[x, y] = np.mean(xbsgrid[x][y])
+ ybgrid[x, y] = np.mean(ybsgrid[x][y])
+
+ # Next, we start to interpolate the central points of the grid that gets passed to the tuning file
+ input_grids = np.array([xrgrid, yrgrid, xbgrid, ybgrid])
+ output_grids = np.zeros((4, output_grid_size, output_grid_size))
+
+ # Interpolate the centre of the grid
+ output_grids[:, 1:-1, 1:-1] = (input_grids[:, 1:, :-1] + input_grids[:, 1:, 1:] + input_grids[:, :-1, 1:] + input_grids[:, :-1, :-1]) / 4
+
+ # Edge cases:
+ output_grids[:, 1:-1, 0] = ((input_grids[:, :-1, 0] + input_grids[:, 1:, 0]) / 2 - output_grids[:, 1:-1, 1]) * 2 + output_grids[:, 1:-1, 1]
+ output_grids[:, 1:-1, -1] = ((input_grids[:, :-1, 7] + input_grids[:, 1:, 7]) / 2 - output_grids[:, 1:-1, -2]) * 2 + output_grids[:, 1:-1, -2]
+ output_grids[:, 0, 1:-1] = ((input_grids[:, 0, :-1] + input_grids[:, 0, 1:]) / 2 - output_grids[:, 1, 1:-1]) * 2 + output_grids[:, 1, 1:-1]
+ output_grids[:, -1, 1:-1] = ((input_grids[:, 7, :-1] + input_grids[:, 7, 1:]) / 2 - output_grids[:, -2, 1:-1]) * 2 + output_grids[:, -2, 1:-1]
+
+ # Corner Cases:
+ output_grids[:, 0, 0] = (output_grids[:, 0, 1] - output_grids[:, 1, 1]) + (output_grids[:, 1, 0] - output_grids[:, 1, 1]) + output_grids[:, 1, 1]
+ output_grids[:, 0, -1] = (output_grids[:, 0, -2] - output_grids[:, 1, -2]) + (output_grids[:, 1, -1] - output_grids[:, 1, -2]) + output_grids[:, 1, -2]
+ output_grids[:, -1, 0] = (output_grids[:, -1, 1] - output_grids[:, -2, 1]) + (output_grids[:, -2, 0] - output_grids[:, -2, 1]) + output_grids[:, -2, 1]
+ output_grids[:, -1, -1] = (output_grids[:, -2, -1] - output_grids[:, -2, -2]) + (output_grids[:, -1, -2] - output_grids[:, -2, -2]) + output_grids[:, -2, -2]
+
+ # Below, we swap the x and the y coordinates, and also multiply by a factor of -1
+ # This is due to the PiSP (standard) dimensions being flipped in comparison to
+ # PIL image coordinate directions, hence why xr -> yr. Also, the shifts calculated are colour shifts,
+ # and the PiSP block asks for the values it should shift by (hence the * -1, to convert from colour shift to a pixel shift)
+
+ output_grid_yr, output_grid_xr, output_grid_yb, output_grid_xb = output_grids * -1
+ return output_grid_xr, output_grid_yr, output_grid_xb, output_grid_yb
+
+
+def analyse_dot(dot, dot_location=[0, 0]):
+ # Scan through the dot, calculate the centroid of each colour channel by doing:
+ # pixel channel brightness * distance from top left corner
+ # Sum these, and divide by the sum of each channel's brightnesses to get a centroid for each channel
+ red_channel = np.array(dot)[:, :, 0]
+ y_num_pixels = len(red_channel[0])
+ x_num_pixels = len(red_channel)
+ yred_weight = np.sum(np.dot(red_channel, np.arange(y_num_pixels)))
+ xred_weight = np.sum(np.dot(np.arange(x_num_pixels), red_channel))
+ red_sum = np.sum(red_channel)
+
+ green_channel = np.array(dot)[:, :, 1]
+ ygreen_weight = np.sum(np.dot(green_channel, np.arange(y_num_pixels)))
+ xgreen_weight = np.sum(np.dot(np.arange(x_num_pixels), green_channel))
+ green_sum = np.sum(green_channel)
+
+ blue_channel = np.array(dot)[:, :, 2]
+ yblue_weight = np.sum(np.dot(blue_channel, np.arange(y_num_pixels)))
+ xblue_weight = np.sum(np.dot(np.arange(x_num_pixels), blue_channel))
+ blue_sum = np.sum(blue_channel)
+
+ # We return this structure. It contains 2 arrays that contain:
+ # the locations of the dot center, along with the channel shifts in the x and y direction:
+ # [ [red_center_x, red_center_y, red_x_shift, red_y_shift], [blue_center_x, blue_center_y, blue_x_shift, blue_y_shift] ]
+
+ return [[int(dot_location[0]) + int(len(dot) / 2), int(dot_location[1]) + int(len(dot[0]) / 2), xred_weight / red_sum - xgreen_weight / green_sum, yred_weight / red_sum - ygreen_weight / green_sum], [dot_location[0] + int(len(dot) / 2), dot_location[1] + int(len(dot[0]) / 2), xblue_weight / blue_sum - xgreen_weight / green_sum, yblue_weight / blue_sum - ygreen_weight / green_sum]]
+
+
+def cac(Cam):
+ filelist = Cam.imgs_cac
+
+ Cam.log += '\nCAC analysing files: {}'.format(str(filelist))
+ np.set_printoptions(precision=3)
+ np.set_printoptions(suppress=True)
+
+ # Create arrays to hold all the dots data and their colour offsets
+ red_shift = [] # Format is: [[Dot Center X, Dot Center Y, x shift, y shift]]
+ blue_shift = []
+ # Iterate through the files
+ # Multiple files is reccomended to average out the lens aberration through rotations
+ for file in filelist:
+ Cam.log += '\nCAC processing file'
+ print("\n Processing file")
+ # Read the raw RGB values
+ rgb = file.rgb
+ image_size = [file.h, file.w] # Image size, X, Y
+ # Create a colour copy of the RGB values to use later in the calibration
+ imout = Image.new(mode="RGB", size=image_size)
+ rgb_image = np.array(imout)
+ # The rgb values need reshaping from a 1d array to a 3d array to be worked with easily
+ rgb.reshape((image_size[0], image_size[1], 3))
+ rgb_image = rgb
+
+ # Pass the RGB image through to the dots locating program
+ # Returns an array of the dots (colour rectangles around the dots), and an array of their locations
+ print("Finding dots")
+ Cam.log += '\nFinding dots'
+ dots, dots_locations = find_dots_locations(rgb_image)
+
+ # Now, analyse each dot. Work out the centroid of each colour channel, and use that to work out
+ # by how far the chromatic aberration has shifted each channel
+ Cam.log += '\nDots found: {}'.format(str(len(dots)))
+ print('Dots found: ' + str(len(dots)))
+
+ for dot, dot_location in zip(dots, dots_locations):
+ if len(dot) > 0:
+ if (dot_location[0] > 0) and (dot_location[1] > 0):
+ ret = analyse_dot(dot, dot_location)
+ red_shift.append(ret[0])
+ blue_shift.append(ret[1])
+
+ # Take our arrays of red shifts and locations, push them through to be interpolated into a 9x9 matrix
+ # for the CAC block to handle and then store these as a .json file to be added to the camera
+ # tuning file
+ print("\nCreating output grid")
+ Cam.log += '\nCreating output grid'
+ rx, ry, bx, by = shifts_to_yaml(red_shift, blue_shift, image_size)
+
+ print("CAC correction complete!")
+ Cam.log += '\nCAC correction complete!'
+
+ # Give the JSON dict back to the main ctt program
+ return {"strength": 1.0, "lut_rx": list(rx.round(2).reshape(81)), "lut_ry": list(ry.round(2).reshape(81)), "lut_bx": list(bx.round(2).reshape(81)), "lut_by": list(by.round(2).reshape(81))}
diff --git a/utils/raspberrypi/ctt/ctt_ccm.py b/utils/raspberrypi/ctt/ctt_ccm.py
new file mode 100644
index 00000000..07c943a8
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_ccm.py
@@ -0,0 +1,404 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# camera tuning tool for CCM (colour correction matrix)
+
+from ctt_image_load import *
+from ctt_awb import get_alsc_patches
+import colors
+from scipy.optimize import minimize
+from ctt_visualise import visualise_macbeth_chart
+import numpy as np
+"""
+takes 8-bit macbeth chart values, degammas and returns 16 bit
+"""
+
+'''
+This program has many options from which to derive the color matrix from.
+The first is average. This minimises the average delta E across all patches of
+the macbeth chart. Testing across all cameras yeilded this as the most color
+accurate and vivid. Other options are avalible however.
+Maximum minimises the maximum Delta E of the patches. It iterates through till
+a minimum maximum is found (so that there is
+not one patch that deviates wildly.)
+This yields generally good results but overall the colors are less accurate
+Have a fiddle with maximum and see what you think.
+The final option allows you to select the patches for which to average across.
+This means that you can bias certain patches, for instance if you want the
+reds to be more accurate.
+'''
+
+matrix_selection_types = ["average", "maximum", "patches"]
+typenum = 0 # select from array above, 0 = average, 1 = maximum, 2 = patches
+test_patches = [1, 2, 5, 8, 9, 12, 14]
+
+'''
+Enter patches to test for. Can also be entered twice if you
+would like twice as much bias on one patch.
+'''
+
+
+def degamma(x):
+ x = x / ((2 ** 8) - 1) # takes 255 and scales it down to one
+ x = np.where(x < 0.04045, x / 12.92, ((x + 0.055) / 1.055) ** 2.4)
+ x = x * ((2 ** 16) - 1) # takes one and scales up to 65535, 16 bit color
+ return x
+
+
+def gamma(x):
+ # Take 3 long array of color values and gamma them
+ return [((colour / 255) ** (1 / 2.4) * 1.055 - 0.055) * 255 for colour in x]
+
+
+"""
+FInds colour correction matrices for list of images
+"""
+
+
+def ccm(Cam, cal_cr_list, cal_cb_list, grid_size):
+ global matrix_selection_types, typenum
+ imgs = Cam.imgs
+ """
+ standard macbeth chart colour values
+ """
+ m_rgb = np.array([ # these are in RGB
+ [116, 81, 67], # dark skin
+ [199, 147, 129], # light skin
+ [91, 122, 156], # blue sky
+ [90, 108, 64], # foliage
+ [130, 128, 176], # blue flower
+ [92, 190, 172], # bluish green
+ [224, 124, 47], # orange
+ [68, 91, 170], # purplish blue
+ [198, 82, 97], # moderate red
+ [94, 58, 106], # purple
+ [159, 189, 63], # yellow green
+ [230, 162, 39], # orange yellow
+ [35, 63, 147], # blue
+ [67, 149, 74], # green
+ [180, 49, 57], # red
+ [238, 198, 20], # yellow
+ [193, 84, 151], # magenta
+ [0, 136, 170], # cyan (goes out of gamut)
+ [245, 245, 243], # white 9.5
+ [200, 202, 202], # neutral 8
+ [161, 163, 163], # neutral 6.5
+ [121, 121, 122], # neutral 5
+ [82, 84, 86], # neutral 3.5
+ [49, 49, 51] # black 2
+ ])
+ """
+ convert reference colours from srgb to rgb
+ """
+ m_srgb = degamma(m_rgb) # now in 16 bit color.
+
+ # Produce array of LAB values for ideal color chart
+ m_lab = [colors.RGB_to_LAB(color / 256) for color in m_srgb]
+
+ """
+ reorder reference values to match how patches are ordered
+ """
+ m_srgb = np.array([m_srgb[i::6] for i in range(6)]).reshape((24, 3))
+ m_lab = np.array([m_lab[i::6] for i in range(6)]).reshape((24, 3))
+ m_rgb = np.array([m_rgb[i::6] for i in range(6)]).reshape((24, 3))
+ """
+ reformat alsc correction tables or set colour_cals to None if alsc is
+ deactivated
+ """
+ if cal_cr_list is None:
+ colour_cals = None
+ else:
+ colour_cals = {}
+ for cr, cb in zip(cal_cr_list, cal_cb_list):
+ cr_tab = cr['table']
+ cb_tab = cb['table']
+ """
+ normalise tables so min value is 1
+ """
+ cr_tab = cr_tab / np.min(cr_tab)
+ cb_tab = cb_tab / np.min(cb_tab)
+ colour_cals[cr['ct']] = [cr_tab, cb_tab]
+
+ """
+ for each image, perform awb and alsc corrections.
+ Then calculate the colour correction matrix for that image, recording the
+ ccm and the colour tempertaure.
+ """
+ ccm_tab = {}
+ for Img in imgs:
+ Cam.log += '\nProcessing image: ' + Img.name
+ """
+ get macbeth patches with alsc applied if alsc enabled.
+ Note: if alsc is disabled then colour_cals will be set to None and no
+ the function will simply return the macbeth patches
+ """
+ r, b, g = get_alsc_patches(Img, colour_cals, grey=False, grid_size=grid_size)
+ """
+ do awb
+ Note: awb is done by measuring the macbeth chart in the image, rather
+ than from the awb calibration. This is done so the awb will be perfect
+ and the ccm matrices will be more accurate.
+ """
+ r_greys, b_greys, g_greys = r[3::4], b[3::4], g[3::4]
+ r_g = np.mean(r_greys / g_greys)
+ b_g = np.mean(b_greys / g_greys)
+ r = r / r_g
+ b = b / b_g
+ """
+ normalise brightness wrt reference macbeth colours and then average
+ each channel for each patch
+ """
+ gain = np.mean(m_srgb) / np.mean((r, g, b))
+ Cam.log += '\nGain with respect to standard colours: {:.3f}'.format(gain)
+ r = np.mean(gain * r, axis=1)
+ b = np.mean(gain * b, axis=1)
+ g = np.mean(gain * g, axis=1)
+ """
+ calculate ccm matrix
+ """
+ # ==== All of below should in sRGB ===##
+ sumde = 0
+ ccm = do_ccm(r, g, b, m_srgb)
+ # This is the initial guess that our optimisation code works with.
+ original_ccm = ccm
+ r1 = ccm[0]
+ r2 = ccm[1]
+ g1 = ccm[3]
+ g2 = ccm[4]
+ b1 = ccm[6]
+ b2 = ccm[7]
+ '''
+ COLOR MATRIX LOOKS AS BELOW
+ R1 R2 R3 Rval Outr
+ G1 G2 G3 * Gval = G
+ B1 B2 B3 Bval B
+ Will be optimising 6 elements and working out the third element using 1-r1-r2 = r3
+ '''
+
+ x0 = [r1, r2, g1, g2, b1, b2]
+ '''
+ We use our old CCM as the initial guess for the program to find the
+ optimised matrix
+ '''
+ result = minimize(guess, x0, args=(r, g, b, m_lab), tol=0.01)
+ '''
+ This produces a color matrix which has the lowest delta E possible,
+ based off the input data. Note it is impossible for this to reach
+ zero since the input data is imperfect
+ '''
+
+ Cam.log += ("\n \n Optimised Matrix Below: \n \n")
+ [r1, r2, g1, g2, b1, b2] = result.x
+ # The new, optimised color correction matrix values
+ optimised_ccm = [r1, r2, (1 - r1 - r2), g1, g2, (1 - g1 - g2), b1, b2, (1 - b1 - b2)]
+
+ # This is the optimised Color Matrix (preserving greys by summing rows up to 1)
+ Cam.log += str(optimised_ccm)
+ Cam.log += "\n Old Color Correction Matrix Below \n"
+ Cam.log += str(ccm)
+
+ formatted_ccm = np.array(original_ccm).reshape((3, 3))
+
+ '''
+ below is a whole load of code that then applies the latest color
+ matrix, and returns LAB values for color. This can then be used
+ to calculate the final delta E
+ '''
+ optimised_ccm_rgb = [] # Original Color Corrected Matrix RGB / LAB
+ optimised_ccm_lab = []
+
+ formatted_optimised_ccm = np.array(optimised_ccm).reshape((3, 3))
+ after_gamma_rgb = []
+ after_gamma_lab = []
+
+ for RGB in zip(r, g, b):
+ ccm_applied_rgb = np.dot(formatted_ccm, (np.array(RGB) / 256))
+ optimised_ccm_rgb.append(gamma(ccm_applied_rgb))
+ optimised_ccm_lab.append(colors.RGB_to_LAB(ccm_applied_rgb))
+
+ optimised_ccm_applied_rgb = np.dot(formatted_optimised_ccm, np.array(RGB) / 256)
+ after_gamma_rgb.append(gamma(optimised_ccm_applied_rgb))
+ after_gamma_lab.append(colors.RGB_to_LAB(optimised_ccm_applied_rgb))
+ '''
+ Gamma After RGB / LAB - not used in calculations, only used for visualisation
+ We now want to spit out some data that shows
+ how the optimisation has improved the color matrices
+ '''
+ Cam.log += "Here are the Improvements"
+
+ # CALCULATE WORST CASE delta e
+ old_worst_delta_e = 0
+ before_average = transform_and_evaluate(formatted_ccm, r, g, b, m_lab)
+ new_worst_delta_e = 0
+ after_average = transform_and_evaluate(formatted_optimised_ccm, r, g, b, m_lab)
+ for i in range(24):
+ old_delta_e = deltae(optimised_ccm_lab[i], m_lab[i]) # Current Old Delta E
+ new_delta_e = deltae(after_gamma_lab[i], m_lab[i]) # Current New Delta E
+ if old_delta_e > old_worst_delta_e:
+ old_worst_delta_e = old_delta_e
+ if new_delta_e > new_worst_delta_e:
+ new_worst_delta_e = new_delta_e
+
+ Cam.log += "Before color correction matrix was optimised, we got an average delta E of " + str(before_average) + " and a maximum delta E of " + str(old_worst_delta_e)
+ Cam.log += "After color correction matrix was optimised, we got an average delta E of " + str(after_average) + " and a maximum delta E of " + str(new_worst_delta_e)
+
+ visualise_macbeth_chart(m_rgb, optimised_ccm_rgb, after_gamma_rgb, str(Img.col) + str(matrix_selection_types[typenum]))
+ '''
+ The program will also save some visualisations of improvements.
+ Very pretty to look at. Top rectangle is ideal, Left square is
+ before optimisation, right square is after.
+ '''
+
+ """
+ if a ccm has already been calculated for that temperature then don't
+ overwrite but save both. They will then be averaged later on
+ """ # Now going to use optimised color matrix, optimised_ccm
+ if Img.col in ccm_tab.keys():
+ ccm_tab[Img.col].append(optimised_ccm)
+ else:
+ ccm_tab[Img.col] = [optimised_ccm]
+ Cam.log += '\n'
+
+ Cam.log += '\nFinished processing images'
+ """
+ average any ccms that share a colour temperature
+ """
+ for k, v in ccm_tab.items():
+ tab = np.mean(v, axis=0)
+ tab = np.where((10000 * tab) % 1 <= 0.05, tab + 0.00001, tab)
+ tab = np.where((10000 * tab) % 1 >= 0.95, tab - 0.00001, tab)
+ ccm_tab[k] = list(np.round(tab, 5))
+ Cam.log += '\nMatrix calculated for colour temperature of {} K'.format(k)
+
+ """
+ return all ccms with respective colour temperature in the correct format,
+ sorted by their colour temperature
+ """
+ sorted_ccms = sorted(ccm_tab.items(), key=lambda kv: kv[0])
+ ccms = []
+ for i in sorted_ccms:
+ ccms.append({
+ 'ct': i[0],
+ 'ccm': i[1]
+ })
+ return ccms
+
+
+def guess(x0, r, g, b, m_lab): # provides a method of numerical feedback for the optimisation code
+ [r1, r2, g1, g2, b1, b2] = x0
+ ccm = np.array([r1, r2, (1 - r1 - r2),
+ g1, g2, (1 - g1 - g2),
+ b1, b2, (1 - b1 - b2)]).reshape((3, 3)) # format the matrix correctly
+ return transform_and_evaluate(ccm, r, g, b, m_lab)
+
+
+def transform_and_evaluate(ccm, r, g, b, m_lab): # Transforms colors to LAB and applies the correction matrix
+ # create list of matrix changed colors
+ realrgb = []
+ for RGB in zip(r, g, b):
+ rgb_post_ccm = np.dot(ccm, np.array(RGB) / 256) # This is RGB values after the color correction matrix has been applied
+ realrgb.append(colors.RGB_to_LAB(rgb_post_ccm))
+ # now compare that with m_lab and return numeric result, averaged for each patch
+ return (sumde(realrgb, m_lab) / 24) # returns an average result of delta E
+
+
+def sumde(listA, listB):
+ global typenum, test_patches
+ sumde = 0
+ maxde = 0
+ patchde = [] # Create array of the delta E values for each patch. useful for optimisation of certain patches
+ for listA_item, listB_item in zip(listA, listB):
+ if maxde < (deltae(listA_item, listB_item)):
+ maxde = deltae(listA_item, listB_item)
+ patchde.append(deltae(listA_item, listB_item))
+ sumde += deltae(listA_item, listB_item)
+ '''
+ The different options specified at the start allow for
+ the maximum to be returned, average or specific patches
+ '''
+ if typenum == 0:
+ return sumde
+ if typenum == 1:
+ return maxde
+ if typenum == 2:
+ output = sum([patchde[test_patch] for test_patch in test_patches])
+ # Selects only certain patches and returns the output for them
+ return output
+
+
+"""
+calculates the ccm for an individual image.
+ccms are calculated in rgb space, and are fit by hand. Although it is a 3x3
+matrix, each row must add up to 1 in order to conserve greyness, simplifying
+calculation.
+The initial CCM is calculated in RGB, and then optimised in LAB color space
+This simplifies the initial calculation but then gets us the accuracy of
+using LAB color space.
+"""
+
+
+def do_ccm(r, g, b, m_srgb):
+ rb = r-b
+ gb = g-b
+ rb_2s = (rb * rb)
+ rb_gbs = (rb * gb)
+ gb_2s = (gb * gb)
+
+ r_rbs = rb * (m_srgb[..., 0] - b)
+ r_gbs = gb * (m_srgb[..., 0] - b)
+ g_rbs = rb * (m_srgb[..., 1] - b)
+ g_gbs = gb * (m_srgb[..., 1] - b)
+ b_rbs = rb * (m_srgb[..., 2] - b)
+ b_gbs = gb * (m_srgb[..., 2] - b)
+
+ """
+ Obtain least squares fit
+ """
+ rb_2 = np.sum(rb_2s)
+ gb_2 = np.sum(gb_2s)
+ rb_gb = np.sum(rb_gbs)
+ r_rb = np.sum(r_rbs)
+ r_gb = np.sum(r_gbs)
+ g_rb = np.sum(g_rbs)
+ g_gb = np.sum(g_gbs)
+ b_rb = np.sum(b_rbs)
+ b_gb = np.sum(b_gbs)
+
+ det = rb_2 * gb_2 - rb_gb * rb_gb
+
+ """
+ Raise error if matrix is singular...
+ This shouldn't really happen with real data but if it does just take new
+ pictures and try again, not much else to be done unfortunately...
+ """
+ if det < 0.001:
+ raise ArithmeticError
+
+ r_a = (gb_2 * r_rb - rb_gb * r_gb) / det
+ r_b = (rb_2 * r_gb - rb_gb * r_rb) / det
+ """
+ Last row can be calculated by knowing the sum must be 1
+ """
+ r_c = 1 - r_a - r_b
+
+ g_a = (gb_2 * g_rb - rb_gb * g_gb) / det
+ g_b = (rb_2 * g_gb - rb_gb * g_rb) / det
+ g_c = 1 - g_a - g_b
+
+ b_a = (gb_2 * b_rb - rb_gb * b_gb) / det
+ b_b = (rb_2 * b_gb - rb_gb * b_rb) / det
+ b_c = 1 - b_a - b_b
+
+ """
+ format ccm
+ """
+ ccm = [r_a, r_b, r_c, g_a, g_b, g_c, b_a, b_b, b_c]
+
+ return ccm
+
+
+def deltae(colorA, colorB):
+ return ((colorA[0] - colorB[0]) ** 2 + (colorA[1] - colorB[1]) ** 2 + (colorA[2] - colorB[2]) ** 2) ** 0.5
+ # return ((colorA[1]-colorB[1]) * * 2 + (colorA[2]-colorB[2]) * * 2) * * 0.5
+ # UNCOMMENT IF YOU WANT TO NEGLECT LUMINANCE FROM CALCULATION OF DELTA E
diff --git a/utils/raspberrypi/ctt/ctt_config_example.json b/utils/raspberrypi/ctt/ctt_config_example.json
new file mode 100644
index 00000000..1105862c
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_config_example.json
@@ -0,0 +1,17 @@
+{
+ "disable": [],
+ "plot": [],
+ "alsc": {
+ "do_alsc_colour": 1,
+ "luminance_strength": 0.8,
+ "max_gain": 8.0
+ },
+ "awb": {
+ "greyworld": 0
+ },
+ "blacklevel": -1,
+ "macbeth": {
+ "small": 0,
+ "show": 0
+ }
+}
diff --git a/utils/raspberrypi/ctt/ctt_dots_locator.py b/utils/raspberrypi/ctt/ctt_dots_locator.py
new file mode 100644
index 00000000..4945c04b
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_dots_locator.py
@@ -0,0 +1,118 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2023, Raspberry Pi Ltd
+#
+# find_dots.py - Used by CAC algorithm to convert image to set of dots
+
+'''
+This file takes the black and white version of the image, along with
+the color version. It then located the black dots on the image by
+thresholding dark pixels.
+In a rather fun way, the algorithm bounces around the thresholded area in a random path
+We then use the maximum and minimum of these paths to determine the dot shape and size
+This info is then used to return colored dots and locations back to the main file
+'''
+
+import numpy as np
+import random
+from PIL import Image, ImageEnhance, ImageFilter
+
+
+def find_dots_locations(rgb_image, color_threshold=100, dots_edge_avoid=75, image_edge_avoid=10, search_path_length=500, grid_scan_step_size=10, logfile=open("log.txt", "a+")):
+ # Initialise some starting variables
+ pixels = Image.fromarray(rgb_image)
+ pixels = pixels.convert("L")
+ enhancer = ImageEnhance.Contrast(pixels)
+ im_output = enhancer.enhance(1.4)
+ # We smooth it slightly to make it easier for the dot recognition program to locate the dots
+ im_output = im_output.filter(ImageFilter.GaussianBlur(radius=2))
+ bw_image = np.array(im_output)
+
+ location = [0, 0]
+ dots = []
+ dots_location = []
+ # the program takes away the edges - we don't want a dot that is half a circle, the
+ # centroids would all be wrong
+ for x in range(dots_edge_avoid, len(bw_image) - dots_edge_avoid, grid_scan_step_size):
+ for y in range(dots_edge_avoid, len(bw_image[0]) - dots_edge_avoid, grid_scan_step_size):
+ location = [x, y]
+ scrap_dot = False # A variable used to make sure that this is a valid dot
+ if (bw_image[location[0], location[1]] < color_threshold) and not (scrap_dot):
+ heading = "south" # Define a starting direction to move in
+ coords = []
+ for i in range(search_path_length): # Creates a path of length `search_path_length`. This turns out to always be enough to work out the rough shape of the dot.
+ # Now make sure that the thresholded area doesn't come within 10 pixels of the edge of the image, ensures we capture all the CA
+ if ((image_edge_avoid < location[0] < len(bw_image) - image_edge_avoid) and (image_edge_avoid < location[1] < len(bw_image[0]) - image_edge_avoid)) and not (scrap_dot):
+ if heading == "south":
+ if bw_image[location[0] + 1, location[1]] < color_threshold:
+ # Here, notice it does not go south, but actually goes southeast
+ # This is crucial in ensuring that we make our way around the majority of the dot
+ location[0] = location[0] + 1
+ location[1] = location[1] + 1
+ heading = "south"
+ else:
+ # This happens when we reach a thresholded edge. We now randomly change direction and keep searching
+ dir = random.randint(1, 2)
+ if dir == 1:
+ heading = "west"
+ if dir == 2:
+ heading = "east"
+
+ if heading == "east":
+ if bw_image[location[0], location[1] + 1] < color_threshold:
+ location[1] = location[1] + 1
+ heading = "east"
+ else:
+ dir = random.randint(1, 2)
+ if dir == 1:
+ heading = "north"
+ if dir == 2:
+ heading = "south"
+
+ if heading == "west":
+ if bw_image[location[0], location[1] - 1] < color_threshold:
+ location[1] = location[1] - 1
+ heading = "west"
+ else:
+ dir = random.randint(1, 2)
+ if dir == 1:
+ heading = "north"
+ if dir == 2:
+ heading = "south"
+
+ if heading == "north":
+ if bw_image[location[0] - 1, location[1]] < color_threshold:
+ location[0] = location[0] - 1
+ heading = "north"
+ else:
+ dir = random.randint(1, 2)
+ if dir == 1:
+ heading = "west"
+ if dir == 2:
+ heading = "east"
+ # Log where our particle travels across the dot
+ coords.append([location[0], location[1]])
+ else:
+ scrap_dot = True # We just don't have enough space around the dot, discard this one, and move on
+ if not scrap_dot:
+ # get the size of the dot surrounding the dot
+ x_coords = np.array(coords)[:, 0]
+ y_coords = np.array(coords)[:, 1]
+ hsquaresize = max(list(x_coords)) - min(list(x_coords))
+ vsquaresize = max(list(y_coords)) - min(list(y_coords))
+ # Create the bounding coordinates of the rectangle surrounding the dot
+ # Program uses the dotsize + half of the dotsize to ensure we get all that color fringing
+ extra_space_factor = 0.45
+ top_left_x = (min(list(x_coords)) - int(hsquaresize * extra_space_factor))
+ btm_right_x = max(list(x_coords)) + int(hsquaresize * extra_space_factor)
+ top_left_y = (min(list(y_coords)) - int(vsquaresize * extra_space_factor))
+ btm_right_y = max(list(y_coords)) + int(vsquaresize * extra_space_factor)
+ # Overwrite the area of the dot to ensure we don't use it again
+ bw_image[top_left_x:btm_right_x, top_left_y:btm_right_y] = 255
+ # Add the color version of the dot to the list to send off, along with some coordinates.
+ dots.append(rgb_image[top_left_x:btm_right_x, top_left_y:btm_right_y])
+ dots_location.append([top_left_x, top_left_y])
+ else:
+ # Dot was too close to the image border to be useable
+ pass
+ return dots, dots_location
diff --git a/utils/raspberrypi/ctt/ctt_geq.py b/utils/raspberrypi/ctt/ctt_geq.py
new file mode 100644
index 00000000..5a91ebb4
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_geq.py
@@ -0,0 +1,181 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# camera tuning tool for GEQ (green equalisation)
+
+from ctt_tools import *
+import matplotlib.pyplot as plt
+import scipy.optimize as optimize
+
+
+"""
+Uses green differences in macbeth patches to fit green equalisation threshold
+model. Ideally, all macbeth chart centres would fall below the threshold as
+these should be corrected by geq.
+"""
+def geq_fit(Cam, plot):
+ imgs = Cam.imgs
+ """
+ green equalisation to mitigate mazing.
+ Fits geq model by looking at difference
+ between greens in macbeth patches
+ """
+ geqs = np.array([geq(Cam, Img)*Img.againQ8_norm for Img in imgs])
+ Cam.log += '\nProcessed all images'
+ geqs = geqs.reshape((-1, 2))
+ """
+ data is sorted by green difference and top half is selected since higher
+ green difference data define the decision boundary.
+ """
+ geqs = np.array(sorted(geqs, key=lambda r: np.abs((r[1]-r[0])/r[0])))
+
+ length = len(geqs)
+ g0 = geqs[length//2:, 0]
+ g1 = geqs[length//2:, 1]
+ gdiff = np.abs(g0-g1)
+ """
+ find linear fit by minimising asymmetric least square errors
+ in order to cover most of the macbeth images.
+ the philosophy here is that every macbeth patch should fall within the
+ threshold, hence the upper bound approach
+ """
+ def f(params):
+ m, c = params
+ a = gdiff - (m*g0+c)
+ """
+ asymmetric square error returns:
+ 1.95 * a**2 if a is positive
+ 0.05 * a**2 if a is negative
+ """
+ return(np.sum(a**2+0.95*np.abs(a)*a))
+
+ initial_guess = [0.01, 500]
+ """
+ Nelder-Mead is usually not the most desirable optimisation method
+ but has been chosen here due to its robustness to undifferentiability
+ (is that a word?)
+ """
+ result = optimize.minimize(f, initial_guess, method='Nelder-Mead')
+ """
+ need to check if the fit worked correectly
+ """
+ if result.success:
+ slope, offset = result.x
+ Cam.log += '\nFit result: slope = {:.5f} '.format(slope)
+ Cam.log += 'offset = {}'.format(int(offset))
+ """
+ optional plotting code
+ """
+ if plot:
+ x = np.linspace(max(g0)*1.1, 100)
+ y = slope*x + offset
+ plt.title('GEQ Asymmetric \'Upper Bound\' Fit')
+ plt.plot(x, y, color='red', ls='--', label='fit')
+ plt.scatter(g0, gdiff, color='b', label='data')
+ plt.ylabel('Difference in green channels')
+ plt.xlabel('Green value')
+
+ """
+ This upper bound asymmetric gives correct order of magnitude values.
+ The pipeline approximates a 1st derivative of a gaussian with some
+ linear piecewise functions, introducing arbitrary cutoffs. For
+ pessimistic geq, the model parameters have been increased by a
+ scaling factor/constant.
+
+ Feel free to tune these or edit the json files directly if you
+ belive there are still mazing effects left (threshold too low) or if you
+ think it is being overcorrected (threshold too high).
+ We have gone for a one size fits most approach that will produce
+ acceptable results in most applications.
+ """
+ slope *= 1.5
+ offset += 201
+ Cam.log += '\nFit after correction factors: slope = {:.5f}'.format(slope)
+ Cam.log += ' offset = {}'.format(int(offset))
+ """
+ clamp offset at 0 due to pipeline considerations
+ """
+ if offset < 0:
+ Cam.log += '\nOffset raised to 0'
+ offset = 0
+ """
+ optional plotting code
+ """
+ if plot:
+ y2 = slope*x + offset
+ plt.plot(x, y2, color='green', ls='--', label='scaled fit')
+ plt.grid()
+ plt.legend()
+ plt.show()
+
+ """
+ the case where for some reason the fit didn't work correctly
+
+ Transpose data and then least squares linear fit. Transposing data
+ makes it robust to many patches where green difference is the same
+ since they only contribute to one error minimisation, instead of dragging
+ the entire linear fit down.
+ """
+
+ else:
+ print('\nError! Couldn\'t fit asymmetric lest squares')
+ print(result.message)
+ Cam.log += '\nWARNING: Asymmetric least squares fit failed! '
+ Cam.log += 'Standard fit used could possibly lead to worse results'
+ fit = np.polyfit(gdiff, g0, 1)
+ offset, slope = -fit[1]/fit[0], 1/fit[0]
+ Cam.log += '\nFit result: slope = {:.5f} '.format(slope)
+ Cam.log += 'offset = {}'.format(int(offset))
+ """
+ optional plotting code
+ """
+ if plot:
+ x = np.linspace(max(g0)*1.1, 100)
+ y = slope*x + offset
+ plt.title('GEQ Linear Fit')
+ plt.plot(x, y, color='red', ls='--', label='fit')
+ plt.scatter(g0, gdiff, color='b', label='data')
+ plt.ylabel('Difference in green channels')
+ plt.xlabel('Green value')
+ """
+ Scaling factors (see previous justification)
+ The model here will not be an upper bound so scaling factors have
+ been increased.
+ This method of deriving geq model parameters is extremely arbitrary
+ and undesirable.
+ """
+ slope *= 2.5
+ offset += 301
+ Cam.log += '\nFit after correction factors: slope = {:.5f}'.format(slope)
+ Cam.log += ' offset = {}'.format(int(offset))
+
+ if offset < 0:
+ Cam.log += '\nOffset raised to 0'
+ offset = 0
+
+ """
+ optional plotting code
+ """
+ if plot:
+ y2 = slope*x + offset
+ plt.plot(x, y2, color='green', ls='--', label='scaled fit')
+ plt.legend()
+ plt.grid()
+ plt.show()
+
+ return round(slope, 5), int(offset)
+
+
+""""
+Return green channels of macbeth patches
+returns g0, g1 where
+> g0 is green next to red
+> g1 is green next to blue
+"""
+def geq(Cam, Img):
+ Cam.log += '\nProcessing image {}'.format(Img.name)
+ patches = [Img.patches[i] for i in Img.order][1:3]
+ g_patches = np.array([(np.mean(patches[0][i]), np.mean(patches[1][i])) for i in range(24)])
+ Cam.log += '\n'
+ return(g_patches)
diff --git a/utils/raspberrypi/ctt/ctt_image_load.py b/utils/raspberrypi/ctt/ctt_image_load.py
new file mode 100644
index 00000000..531de328
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_image_load.py
@@ -0,0 +1,455 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019-2020, Raspberry Pi Ltd
+#
+# camera tuning tool image loading
+
+from ctt_tools import *
+from ctt_macbeth_locator import *
+import json
+import pyexiv2 as pyexif
+import rawpy as raw
+
+
+"""
+Image class load image from raw data and extracts metadata.
+
+Once image is extracted from data, it finds 24 16x16 patches for each
+channel, centred at the macbeth chart squares
+"""
+class Image:
+ def __init__(self, buf):
+ self.buf = buf
+ self.patches = None
+ self.saturated = False
+
+ '''
+ obtain metadata from buffer
+ '''
+ def get_meta(self):
+ self.ver = ba_to_b(self.buf[4:5])
+ self.w = ba_to_b(self.buf[0xd0:0xd2])
+ self.h = ba_to_b(self.buf[0xd2:0xd4])
+ self.pad = ba_to_b(self.buf[0xd4:0xd6])
+ self.fmt = self.buf[0xf5]
+ self.sigbits = 2*self.fmt + 4
+ self.pattern = self.buf[0xf4]
+ self.exposure = ba_to_b(self.buf[0x90:0x94])
+ self.againQ8 = ba_to_b(self.buf[0x94:0x96])
+ self.againQ8_norm = self.againQ8/256
+ camName = self.buf[0x10:0x10+128]
+ camName_end = camName.find(0x00)
+ self.camName = self.buf[0x10:0x10+128][:camName_end].decode()
+
+ """
+ Channel order depending on bayer pattern
+ """
+ bayer_case = {
+ 0: (0, 1, 2, 3), # red
+ 1: (2, 0, 3, 1), # green next to red
+ 2: (3, 2, 1, 0), # green next to blue
+ 3: (1, 0, 3, 2), # blue
+ 128: (0, 1, 2, 3) # arbitrary order for greyscale casw
+ }
+ self.order = bayer_case[self.pattern]
+
+ '''
+ manual blacklevel - not robust
+ '''
+ if 'ov5647' in self.camName:
+ self.blacklevel = 16
+ else:
+ self.blacklevel = 64
+ self.blacklevel_16 = self.blacklevel << (6)
+ return 1
+
+ '''
+ print metadata for debug
+ '''
+ def print_meta(self):
+ print('\nData:')
+ print(' ver = {}'.format(self.ver))
+ print(' w = {}'.format(self.w))
+ print(' h = {}'.format(self.h))
+ print(' pad = {}'.format(self.pad))
+ print(' fmt = {}'.format(self.fmt))
+ print(' sigbits = {}'.format(self.sigbits))
+ print(' pattern = {}'.format(self.pattern))
+ print(' exposure = {}'.format(self.exposure))
+ print(' againQ8 = {}'.format(self.againQ8))
+ print(' againQ8_norm = {}'.format(self.againQ8_norm))
+ print(' camName = {}'.format(self.camName))
+ print(' blacklevel = {}'.format(self.blacklevel))
+ print(' blacklevel_16 = {}'.format(self.blacklevel_16))
+
+ return 1
+
+ """
+ get image from raw scanline data
+ """
+ def get_image(self, raw):
+ self.dptr = []
+ """
+ check if data is 10 or 12 bits
+ """
+ if self.sigbits == 10:
+ """
+ calc length of scanline
+ """
+ lin_len = ((((((self.w+self.pad+3)>>2)) * 5)+31)>>5) * 32
+ """
+ stack scan lines into matrix
+ """
+ raw = np.array(raw).reshape(-1, lin_len).astype(np.int64)[:self.h, ...]
+ """
+ separate 5 bits in each package, stopping when w is satisfied
+ """
+ ba0 = raw[..., 0:5*((self.w+3)>>2):5]
+ ba1 = raw[..., 1:5*((self.w+3)>>2):5]
+ ba2 = raw[..., 2:5*((self.w+3)>>2):5]
+ ba3 = raw[..., 3:5*((self.w+3)>>2):5]
+ ba4 = raw[..., 4:5*((self.w+3)>>2):5]
+ """
+ assemble 10 bit numbers
+ """
+ ch0 = np.left_shift((np.left_shift(ba0, 2) + (ba4 % 4)), 6)
+ ch1 = np.left_shift((np.left_shift(ba1, 2) + (np.right_shift(ba4, 2) % 4)), 6)
+ ch2 = np.left_shift((np.left_shift(ba2, 2) + (np.right_shift(ba4, 4) % 4)), 6)
+ ch3 = np.left_shift((np.left_shift(ba3, 2) + (np.right_shift(ba4, 6) % 4)), 6)
+ """
+ interleave bits
+ """
+ mat = np.empty((self.h, self.w), dtype=ch0.dtype)
+
+ mat[..., 0::4] = ch0
+ mat[..., 1::4] = ch1
+ mat[..., 2::4] = ch2
+ mat[..., 3::4] = ch3
+
+ """
+ There is som eleaking memory somewhere in the code. This code here
+ seemed to make things good enough that the code would run for
+ reasonable numbers of images, however this is techincally just a
+ workaround. (sorry)
+ """
+ ba0, ba1, ba2, ba3, ba4 = None, None, None, None, None
+ del ba0, ba1, ba2, ba3, ba4
+ ch0, ch1, ch2, ch3 = None, None, None, None
+ del ch0, ch1, ch2, ch3
+
+ """
+ same as before but 12 bit case
+ """
+ elif self.sigbits == 12:
+ lin_len = ((((((self.w+self.pad+1)>>1)) * 3)+31)>>5) * 32
+ raw = np.array(raw).reshape(-1, lin_len).astype(np.int64)[:self.h, ...]
+ ba0 = raw[..., 0:3*((self.w+1)>>1):3]
+ ba1 = raw[..., 1:3*((self.w+1)>>1):3]
+ ba2 = raw[..., 2:3*((self.w+1)>>1):3]
+ ch0 = np.left_shift((np.left_shift(ba0, 4) + ba2 % 16), 4)
+ ch1 = np.left_shift((np.left_shift(ba1, 4) + (np.right_shift(ba2, 4)) % 16), 4)
+ mat = np.empty((self.h, self.w), dtype=ch0.dtype)
+ mat[..., 0::2] = ch0
+ mat[..., 1::2] = ch1
+
+ else:
+ """
+ data is neither 10 nor 12 or incorrect data
+ """
+ print('ERROR: wrong bit format, only 10 or 12 bit supported')
+ return 0
+
+ """
+ separate bayer channels
+ """
+ c0 = mat[0::2, 0::2]
+ c1 = mat[0::2, 1::2]
+ c2 = mat[1::2, 0::2]
+ c3 = mat[1::2, 1::2]
+ self.channels = [c0, c1, c2, c3]
+ return 1
+
+ """
+ obtain 16x16 patch centred at macbeth square centre for each channel
+ """
+ def get_patches(self, cen_coords, size=16):
+ """
+ obtain channel widths and heights
+ """
+ ch_w, ch_h = self.w, self.h
+ cen_coords = list(np.array((cen_coords[0])).astype(np.int32))
+ self.cen_coords = cen_coords
+ """
+ squares are ordered by stacking macbeth chart columns from
+ left to right. Some useful patch indices:
+ white = 3
+ black = 23
+ 'reds' = 9, 10
+ 'blues' = 2, 5, 8, 20, 22
+ 'greens' = 6, 12, 17
+ greyscale = 3, 7, 11, 15, 19, 23
+ """
+ all_patches = []
+ for ch in self.channels:
+ ch_patches = []
+ for cen in cen_coords:
+ '''
+ macbeth centre is placed at top left of central 2x2 patch
+ to account for rounding
+ Patch pixels are sorted by pixel brightness so spatial
+ information is lost.
+ '''
+ patch = ch[cen[1]-7:cen[1]+9, cen[0]-7:cen[0]+9].flatten()
+ patch.sort()
+ if patch[-5] == (2**self.sigbits-1)*2**(16-self.sigbits):
+ self.saturated = True
+ ch_patches.append(patch)
+ # print('\nNew Patch\n')
+ all_patches.append(ch_patches)
+ # print('\n\nNew Channel\n\n')
+ self.patches = all_patches
+ return 1
+
+
+def brcm_load_image(Cam, im_str):
+ """
+ Load image where raw data and metadata is in the BRCM format
+ """
+ try:
+ """
+ create byte array
+ """
+ with open(im_str, 'rb') as image:
+ f = image.read()
+ b = bytearray(f)
+ """
+ return error if incorrect image address
+ """
+ except FileNotFoundError:
+ print('\nERROR:\nInvalid image address')
+ Cam.log += '\nWARNING: Invalid image address'
+ return 0
+
+ """
+ return error if problem reading file
+ """
+ if f is None:
+ print('\nERROR:\nProblem reading file')
+ Cam.log += '\nWARNING: Problem readin file'
+ return 0
+
+ # print('\nLooking for EOI and BRCM header')
+ """
+ find end of image followed by BRCM header by turning
+ bytearray into hex string and string matching with regexp
+ """
+ start = -1
+ match = bytearray(b'\xff\xd9@BRCM')
+ match_str = binascii.hexlify(match)
+ b_str = binascii.hexlify(b)
+ """
+ note index is divided by two to go from string to hex
+ """
+ indices = [m.start()//2 for m in re.finditer(match_str, b_str)]
+ # print(indices)
+ try:
+ start = indices[0] + 3
+ except IndexError:
+ print('\nERROR:\nNo Broadcom header found')
+ Cam.log += '\nWARNING: No Broadcom header found!'
+ return 0
+ """
+ extract data after header
+ """
+ # print('\nExtracting data after header')
+ buf = b[start:start+32768]
+ Img = Image(buf)
+ Img.str = im_str
+ # print('Data found successfully')
+
+ """
+ obtain metadata
+ """
+ # print('\nReading metadata')
+ Img.get_meta()
+ Cam.log += '\nExposure : {} us'.format(Img.exposure)
+ Cam.log += '\nNormalised gain : {}'.format(Img.againQ8_norm)
+ # print('Metadata read successfully')
+
+ """
+ obtain raw image data
+ """
+ # print('\nObtaining raw image data')
+ raw = b[start+32768:]
+ Img.get_image(raw)
+ """
+ delete raw to stop memory errors
+ """
+ raw = None
+ del raw
+ # print('Raw image data obtained successfully')
+
+ return Img
+
+
+def dng_load_image(Cam, im_str):
+ try:
+ Img = Image(None)
+
+ # RawPy doesn't load all the image tags that we need, so we use py3exiv2
+ metadata = pyexif.ImageMetadata(im_str)
+ metadata.read()
+
+ Img.ver = 100 # random value
+ """
+ The DNG and TIFF/EP specifications use different IFDs to store the raw
+ image data and the Exif tags. DNG stores them in a SubIFD and in an Exif
+ IFD respectively (named "SubImage1" and "Photo" by pyexiv2), while
+ TIFF/EP stores them both in IFD0 (name "Image"). Both are used in "DNG"
+ files, with libcamera-apps following the DNG recommendation and
+ applications based on picamera2 following TIFF/EP.
+
+ This code detects which tags are being used, and therefore extracts the
+ correct values.
+ """
+ try:
+ Img.w = metadata['Exif.SubImage1.ImageWidth'].value
+ subimage = "SubImage1"
+ photo = "Photo"
+ except KeyError:
+ Img.w = metadata['Exif.Image.ImageWidth'].value
+ subimage = "Image"
+ photo = "Image"
+ Img.pad = 0
+ Img.h = metadata[f'Exif.{subimage}.ImageLength'].value
+ white = metadata[f'Exif.{subimage}.WhiteLevel'].value
+ Img.sigbits = int(white).bit_length()
+ Img.fmt = (Img.sigbits - 4) // 2
+ Img.exposure = int(metadata[f'Exif.{photo}.ExposureTime'].value * 1000000)
+ Img.againQ8 = metadata[f'Exif.{photo}.ISOSpeedRatings'].value * 256 / 100
+ Img.againQ8_norm = Img.againQ8 / 256
+ Img.camName = metadata['Exif.Image.Model'].value
+ Img.blacklevel = int(metadata[f'Exif.{subimage}.BlackLevel'].value[0])
+ Img.blacklevel_16 = Img.blacklevel << (16 - Img.sigbits)
+ bayer_case = {
+ '0 1 1 2': (0, (0, 1, 2, 3)),
+ '1 2 0 1': (1, (2, 0, 3, 1)),
+ '2 1 1 0': (2, (3, 2, 1, 0)),
+ '1 0 2 1': (3, (1, 0, 3, 2))
+ }
+ cfa_pattern = metadata[f'Exif.{subimage}.CFAPattern'].value
+ Img.pattern = bayer_case[cfa_pattern][0]
+ Img.order = bayer_case[cfa_pattern][1]
+
+ # Now use RawPy tp get the raw Bayer pixels
+ raw_im = raw.imread(im_str)
+ raw_data = raw_im.raw_image
+ shift = 16 - Img.sigbits
+ c0 = np.left_shift(raw_data[0::2, 0::2].astype(np.int64), shift)
+ c1 = np.left_shift(raw_data[0::2, 1::2].astype(np.int64), shift)
+ c2 = np.left_shift(raw_data[1::2, 0::2].astype(np.int64), shift)
+ c3 = np.left_shift(raw_data[1::2, 1::2].astype(np.int64), shift)
+ Img.channels = [c0, c1, c2, c3]
+ Img.rgb = raw_im.postprocess()
+
+ except Exception:
+ print("\nERROR: failed to load DNG file", im_str)
+ print("Either file does not exist or is incompatible")
+ Cam.log += '\nERROR: DNG file does not exist or is incompatible'
+ raise
+
+ return Img
+
+
+'''
+load image from file location and perform calibration
+check correct filetype
+
+mac boolean is true if image is expected to contain macbeth chart and false
+if not (alsc images don't have macbeth charts)
+'''
+def load_image(Cam, im_str, mac_config=None, show=False, mac=True, show_meta=False):
+ """
+ check image is correct filetype
+ """
+ if '.jpg' in im_str or '.jpeg' in im_str or '.brcm' in im_str or '.dng' in im_str:
+ if '.dng' in im_str:
+ Img = dng_load_image(Cam, im_str)
+ else:
+ Img = brcm_load_image(Cam, im_str)
+ """
+ handle errors smoothly if loading image failed
+ """
+ if Img == 0:
+ return 0
+ if show_meta:
+ Img.print_meta()
+
+ if mac:
+ """
+ find macbeth centres, discarding images that are too dark or light
+ """
+ av_chan = (np.mean(np.array(Img.channels), axis=0)/(2**16))
+ av_val = np.mean(av_chan)
+ # print(av_val)
+ if av_val < Img.blacklevel_16/(2**16)+1/64:
+ macbeth = None
+ print('\nError: Image too dark!')
+ Cam.log += '\nWARNING: Image too dark!'
+ else:
+ macbeth = find_macbeth(Cam, av_chan, mac_config)
+
+ """
+ if no macbeth found return error
+ """
+ if macbeth is None:
+ print('\nERROR: No macbeth chart found')
+ return 0
+ mac_cen_coords = macbeth[1]
+ # print('\nMacbeth centres located successfully')
+
+ """
+ obtain image patches
+ """
+ # print('\nObtaining image patches')
+ Img.get_patches(mac_cen_coords)
+ if Img.saturated:
+ print('\nERROR: Macbeth patches have saturated')
+ Cam.log += '\nWARNING: Macbeth patches have saturated!'
+ return 0
+
+ """
+ clear memory
+ """
+ Img.buf = None
+ del Img.buf
+
+ # print('Image patches obtained successfully')
+
+ """
+ optional debug
+ """
+ if show and __name__ == '__main__':
+ copy = sum(Img.channels)/2**18
+ copy = np.reshape(copy, (Img.h//2, Img.w//2)).astype(np.float64)
+ copy, _ = reshape(copy, 800)
+ represent(copy)
+
+ return Img
+
+ """
+ return error if incorrect filetype
+ """
+ else:
+ # print('\nERROR:\nInvalid file extension')
+ return 0
+
+
+"""
+bytearray splice to number little endian
+"""
+def ba_to_b(b):
+ total = 0
+ for i in range(len(b)):
+ total += 256**i * b[i]
+ return total
diff --git a/utils/raspberrypi/ctt/ctt_lux.py b/utils/raspberrypi/ctt/ctt_lux.py
new file mode 100644
index 00000000..46be1512
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_lux.py
@@ -0,0 +1,61 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# camera tuning tool for lux level
+
+from ctt_tools import *
+
+
+"""
+Find lux values from metadata and calculate Y
+"""
+def lux(Cam, Img):
+ shutter_speed = Img.exposure
+ gain = Img.againQ8_norm
+ aperture = 1
+ Cam.log += '\nShutter speed = {}'.format(shutter_speed)
+ Cam.log += '\nGain = {}'.format(gain)
+ Cam.log += '\nAperture = {}'.format(aperture)
+ patches = [Img.patches[i] for i in Img.order]
+ channels = [Img.channels[i] for i in Img.order]
+ return lux_calc(Cam, Img, patches, channels), shutter_speed, gain
+
+
+"""
+perform lux calibration on bayer channels
+"""
+def lux_calc(Cam, Img, patches, channels):
+ """
+ find means color channels on grey patches
+ """
+ ap_r = np.mean(patches[0][3::4])
+ ap_g = (np.mean(patches[1][3::4])+np.mean(patches[2][3::4]))/2
+ ap_b = np.mean(patches[3][3::4])
+ Cam.log += '\nAverage channel values on grey patches:'
+ Cam.log += '\nRed = {:.0f} Green = {:.0f} Blue = {:.0f}'.format(ap_r, ap_b, ap_g)
+ # print(ap_r, ap_g, ap_b)
+ """
+ calculate channel gains
+ """
+ gr = ap_g/ap_r
+ gb = ap_g/ap_b
+ Cam.log += '\nChannel gains: Red = {:.3f} Blue = {:.3f}'.format(gr, gb)
+
+ """
+ find means color channels on image and scale by gain
+ note greens are averaged together (treated as one channel)
+ """
+ a_r = np.mean(channels[0])*gr
+ a_g = (np.mean(channels[1])+np.mean(channels[2]))/2
+ a_b = np.mean(channels[3])*gb
+ Cam.log += '\nAverage channel values over entire image scaled by channel gains:'
+ Cam.log += '\nRed = {:.0f} Green = {:.0f} Blue = {:.0f}'.format(a_r, a_b, a_g)
+ # print(a_r, a_g, a_b)
+ """
+ Calculate y with top row of yuv matrix
+ """
+ y = 0.299*a_r + 0.587*a_g + 0.114*a_b
+ Cam.log += '\nY value calculated: {}'.format(int(y))
+ # print(y)
+ return int(y)
diff --git a/utils/raspberrypi/ctt/ctt_macbeth_locator.py b/utils/raspberrypi/ctt/ctt_macbeth_locator.py
new file mode 100644
index 00000000..f22dbf31
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_macbeth_locator.py
@@ -0,0 +1,757 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# camera tuning tool Macbeth chart locator
+
+from ctt_ransac import *
+from ctt_tools import *
+import warnings
+
+"""
+NOTE: some custom functions have been used here to make the code more readable.
+These are defined in tools.py if they are needed for reference.
+"""
+
+
+"""
+Some inconsistencies between packages cause runtime warnings when running
+the clustering algorithm. This catches these warnings so they don't flood the
+output to the console
+"""
+def fxn():
+ warnings.warn("runtime", RuntimeWarning)
+
+
+"""
+Define the success message
+"""
+success_msg = 'Macbeth chart located successfully'
+
+def find_macbeth(Cam, img, mac_config=(0, 0)):
+ small_chart, show = mac_config
+ print('Locating macbeth chart')
+ Cam.log += '\nLocating macbeth chart'
+ """
+ catch the warnings
+ """
+ warnings.simplefilter("ignore")
+ fxn()
+
+ """
+ Reference macbeth chart is created that will be correlated with the located
+ macbeth chart guess to produce a confidence value for the match.
+ """
+ ref = cv2.imread(Cam.path + 'ctt_ref.pgm', flags=cv2.IMREAD_GRAYSCALE)
+ ref_w = 120
+ ref_h = 80
+ rc1 = (0, 0)
+ rc2 = (0, ref_h)
+ rc3 = (ref_w, ref_h)
+ rc4 = (ref_w, 0)
+ ref_corns = np.array((rc1, rc2, rc3, rc4), np.float32)
+ ref_data = (ref, ref_w, ref_h, ref_corns)
+
+ """
+ locate macbeth chart
+ """
+ cor, mac, coords, msg = get_macbeth_chart(img, ref_data)
+
+ # Keep a list that will include this and any brightened up versions of
+ # the image for reuse.
+ all_images = [img]
+
+ """
+ following bits of code tries to fix common problems with simple
+ techniques.
+ If now or at any point the best correlation is of above 0.75, then
+ nothing more is tried as this is a high enough confidence to ensure
+ reliable macbeth square centre placement.
+ """
+
+ """
+ brighten image 2x
+ """
+ if cor < 0.75:
+ a = 2
+ img_br = cv2.convertScaleAbs(img, alpha=a, beta=0)
+ all_images.append(img_br)
+ cor_b, mac_b, coords_b, msg_b = get_macbeth_chart(img_br, ref_data)
+ if cor_b > cor:
+ cor, mac, coords, msg = cor_b, mac_b, coords_b, msg_b
+
+ """
+ brighten image 4x
+ """
+ if cor < 0.75:
+ a = 4
+ img_br = cv2.convertScaleAbs(img, alpha=a, beta=0)
+ all_images.append(img_br)
+ cor_b, mac_b, coords_b, msg_b = get_macbeth_chart(img_br, ref_data)
+ if cor_b > cor:
+ cor, mac, coords, msg = cor_b, mac_b, coords_b, msg_b
+
+ """
+ In case macbeth chart is too small, take a selection of the image and
+ attempt to locate macbeth chart within that. The scale increment is
+ root 2
+ """
+ """
+ These variables will be used to transform the found coordinates at smaller
+ scales back into the original. If ii is still -1 after this section that
+ means it was not successful
+ """
+ ii = -1
+ w_best = 0
+ h_best = 0
+ d_best = 100
+ """
+ d_best records the scale of the best match. Macbeth charts are only looked
+ for at one scale increment smaller than the current best match in order to avoid
+ unecessarily searching for macbeth charts at small scales.
+ If a macbeth chart ha already been found then set d_best to 0
+ """
+ if cor != 0:
+ d_best = 0
+
+ """
+ scale 3/2 (approx root2)
+ """
+ if cor < 0.75:
+ imgs = []
+ """
+ get size of image
+ """
+ shape = list(img.shape[:2])
+ w, h = shape
+ """
+ set dimensions of the subselection and the step along each axis between
+ selections
+ """
+ w_sel = int(2*w/3)
+ h_sel = int(2*h/3)
+ w_inc = int(w/6)
+ h_inc = int(h/6)
+ """
+ for each subselection, look for a macbeth chart
+ loop over this and any brightened up images that we made to increase the
+ likelihood of success
+ """
+ for img_br in all_images:
+ for i in range(3):
+ for j in range(3):
+ w_s, h_s = i*w_inc, j*h_inc
+ img_sel = img_br[w_s:w_s+w_sel, h_s:h_s+h_sel]
+ cor_ij, mac_ij, coords_ij, msg_ij = get_macbeth_chart(img_sel, ref_data)
+ """
+ if the correlation is better than the best then record the
+ scale and current subselection at which macbeth chart was
+ found. Also record the coordinates, macbeth chart and message.
+ """
+ if cor_ij > cor:
+ cor = cor_ij
+ mac, coords, msg = mac_ij, coords_ij, msg_ij
+ ii, jj = i, j
+ w_best, h_best = w_inc, h_inc
+ d_best = 1
+
+ """
+ scale 2
+ """
+ if cor < 0.75:
+ imgs = []
+ shape = list(img.shape[:2])
+ w, h = shape
+ w_sel = int(w/2)
+ h_sel = int(h/2)
+ w_inc = int(w/8)
+ h_inc = int(h/8)
+ # Again, loop over any brightened up images as well
+ for img_br in all_images:
+ for i in range(5):
+ for j in range(5):
+ w_s, h_s = i*w_inc, j*h_inc
+ img_sel = img_br[w_s:w_s+w_sel, h_s:h_s+h_sel]
+ cor_ij, mac_ij, coords_ij, msg_ij = get_macbeth_chart(img_sel, ref_data)
+ if cor_ij > cor:
+ cor = cor_ij
+ mac, coords, msg = mac_ij, coords_ij, msg_ij
+ ii, jj = i, j
+ w_best, h_best = w_inc, h_inc
+ d_best = 2
+
+ """
+ The following code checks for macbeth charts at even smaller scales. This
+ slows the code down significantly and has therefore been omitted by default,
+ however it is not unusably slow so might be useful if the macbeth chart
+ is too small to be picked up to by the current subselections.
+ Use this for macbeth charts with side lengths around 1/5 image dimensions
+ (and smaller...?) it is, however, recommended that macbeth charts take up as
+ large as possible a proportion of the image.
+ """
+
+ if small_chart:
+
+ if cor < 0.75 and d_best > 1:
+ imgs = []
+ shape = list(img.shape[:2])
+ w, h = shape
+ w_sel = int(w/3)
+ h_sel = int(h/3)
+ w_inc = int(w/12)
+ h_inc = int(h/12)
+ for i in range(9):
+ for j in range(9):
+ w_s, h_s = i*w_inc, j*h_inc
+ img_sel = img[w_s:w_s+w_sel, h_s:h_s+h_sel]
+ cor_ij, mac_ij, coords_ij, msg_ij = get_macbeth_chart(img_sel, ref_data)
+ if cor_ij > cor:
+ cor = cor_ij
+ mac, coords, msg = mac_ij, coords_ij, msg_ij
+ ii, jj = i, j
+ w_best, h_best = w_inc, h_inc
+ d_best = 3
+
+ if cor < 0.75 and d_best > 2:
+ imgs = []
+ shape = list(img.shape[:2])
+ w, h = shape
+ w_sel = int(w/4)
+ h_sel = int(h/4)
+ w_inc = int(w/16)
+ h_inc = int(h/16)
+ for i in range(13):
+ for j in range(13):
+ w_s, h_s = i*w_inc, j*h_inc
+ img_sel = img[w_s:w_s+w_sel, h_s:h_s+h_sel]
+ cor_ij, mac_ij, coords_ij, msg_ij = get_macbeth_chart(img_sel, ref_data)
+ if cor_ij > cor:
+ cor = cor_ij
+ mac, coords, msg = mac_ij, coords_ij, msg_ij
+ ii, jj = i, j
+ w_best, h_best = w_inc, h_inc
+
+ """
+ Transform coordinates from subselection to original image
+ """
+ if ii != -1:
+ for a in range(len(coords)):
+ for b in range(len(coords[a][0])):
+ coords[a][0][b][1] += ii*w_best
+ coords[a][0][b][0] += jj*h_best
+
+ """
+ initialise coords_fit variable
+ """
+ coords_fit = None
+ # print('correlation: {}'.format(cor))
+ """
+ print error or success message
+ """
+ print(msg)
+ Cam.log += '\n' + str(msg)
+ if msg == success_msg:
+ coords_fit = coords
+ Cam.log += '\nMacbeth chart vertices:\n'
+ Cam.log += '{}'.format(2*np.round(coords_fit[0][0]), 0)
+ """
+ if correlation is lower than 0.75 there may be a risk of macbeth chart
+ corners not having been located properly. It might be worth running
+ with show set to true to check where the macbeth chart centres have
+ been located.
+ """
+ print('Confidence: {:.3f}'.format(cor))
+ Cam.log += '\nConfidence: {:.3f}'.format(cor)
+ if cor < 0.75:
+ print('Caution: Low confidence guess!')
+ Cam.log += 'WARNING: Low confidence guess!'
+ # cv2.imshow('MacBeth', mac)
+ # represent(mac, 'MacBeth chart')
+
+ """
+ extract data from coords_fit and plot on original image
+ """
+ if show and coords_fit is not None:
+ copy = img.copy()
+ verts = coords_fit[0][0]
+ cents = coords_fit[1][0]
+
+ """
+ draw circles at vertices of macbeth chart
+ """
+ for vert in verts:
+ p = tuple(np.round(vert).astype(np.int32))
+ cv2.circle(copy, p, 10, 1, -1)
+ """
+ draw circles at centres of squares
+ """
+ for i in range(len(cents)):
+ cent = cents[i]
+ p = tuple(np.round(cent).astype(np.int32))
+ """
+ draw black circle on white square, white circle on black square an
+ grey circle everywhere else.
+ """
+ if i == 3:
+ cv2.circle(copy, p, 8, 0, -1)
+ elif i == 23:
+ cv2.circle(copy, p, 8, 1, -1)
+ else:
+ cv2.circle(copy, p, 8, 0.5, -1)
+ copy, _ = reshape(copy, 400)
+ represent(copy)
+
+ return(coords_fit)
+
+
+def get_macbeth_chart(img, ref_data):
+ """
+ function returns coordinates of macbeth chart vertices and square centres,
+ along with an error/success message for debugging purposes. Additionally,
+ it scores the match with a confidence value.
+
+ Brief explanation of the macbeth chart locating algorithm:
+ - Find rectangles within image
+ - Take rectangles within percentage offset of median perimeter. The
+ assumption is that these will be the macbeth squares
+ - For each potential square, find the 24 possible macbeth centre locations
+ that would produce a square in that location
+ - Find clusters of potential macbeth chart centres to find the potential
+ macbeth centres with the most votes, i.e. the most likely ones
+ - For each potential macbeth centre, use the centres of the squares that
+ voted for it to find macbeth chart corners
+ - For each set of corners, transform the possible match into normalised
+ space and correlate with a reference chart to evaluate the match
+ - Select the highest correlation as the macbeth chart match, returning the
+ correlation as the confidence score
+ """
+
+ """
+ get reference macbeth chart data
+ """
+ (ref, ref_w, ref_h, ref_corns) = ref_data
+
+ """
+ the code will raise and catch a MacbethError in case of a problem, trying
+ to give some likely reasons why the problem occred, hence the try/except
+ """
+ try:
+ """
+ obtain image, convert to grayscale and normalise
+ """
+ src = img
+ src, factor = reshape(src, 200)
+ original = src.copy()
+ a = 125/np.average(src)
+ src_norm = cv2.convertScaleAbs(src, alpha=a, beta=0)
+ """
+ This code checks if there are seperate colour channels. In the past the
+ macbeth locator ran on jpgs and this makes it robust to different
+ filetypes. Note that running it on a jpg has 4x the pixels of the
+ average bayer channel so coordinates must be doubled.
+
+ This is best done in img_load.py in the get_patches method. The
+ coordinates and image width, height must be divided by two if the
+ macbeth locator has been run on a demosaicked image.
+ """
+ if len(src_norm.shape) == 3:
+ src_bw = cv2.cvtColor(src_norm, cv2.COLOR_BGR2GRAY)
+ else:
+ src_bw = src_norm
+ original_bw = src_bw.copy()
+ """
+ obtain image edges
+ """
+ sigma = 2
+ src_bw = cv2.GaussianBlur(src_bw, (0, 0), sigma)
+ t1, t2 = 50, 100
+ edges = cv2.Canny(src_bw, t1, t2)
+ """
+ dilate edges to prevent self-intersections in contours
+ """
+ k_size = 2
+ kernel = np.ones((k_size, k_size))
+ its = 1
+ edges = cv2.dilate(edges, kernel, iterations=its)
+ """
+ find Contours in image
+ """
+ conts, _ = cv2.findContours(edges, cv2.RETR_TREE,
+ cv2.CHAIN_APPROX_NONE)
+ if len(conts) == 0:
+ raise MacbethError(
+ '\nWARNING: No macbeth chart found!'
+ '\nNo contours found in image\n'
+ 'Possible problems:\n'
+ '- Macbeth chart is too dark or bright\n'
+ '- Macbeth chart is occluded\n'
+ )
+ """
+ find quadrilateral contours
+ """
+ epsilon = 0.07
+ conts_per = []
+ for i in range(len(conts)):
+ per = cv2.arcLength(conts[i], True)
+ poly = cv2.approxPolyDP(conts[i], epsilon*per, True)
+ if len(poly) == 4 and cv2.isContourConvex(poly):
+ conts_per.append((poly, per))
+
+ if len(conts_per) == 0:
+ raise MacbethError(
+ '\nWARNING: No macbeth chart found!'
+ '\nNo quadrilateral contours found'
+ '\nPossible problems:\n'
+ '- Macbeth chart is too dark or bright\n'
+ '- Macbeth chart is occluded\n'
+ '- Macbeth chart is out of camera plane\n'
+ )
+
+ """
+ sort contours by perimeter and get perimeters within percent of median
+ """
+ conts_per = sorted(conts_per, key=lambda x: x[1])
+ med_per = conts_per[int(len(conts_per)/2)][1]
+ side = med_per/4
+ perc = 0.1
+ med_low, med_high = med_per*(1-perc), med_per*(1+perc)
+ squares = []
+ for i in conts_per:
+ if med_low <= i[1] and med_high >= i[1]:
+ squares.append(i[0])
+
+ """
+ obtain coordinates of nomralised macbeth and squares
+ """
+ square_verts, mac_norm = get_square_verts(0.06)
+ """
+ for each square guess, find 24 possible macbeth chart centres
+ """
+ mac_mids = []
+ squares_raw = []
+ for i in range(len(squares)):
+ square = squares[i]
+ squares_raw.append(square)
+ """
+ convert quads to rotated rectangles. This is required as the
+ 'squares' are usually quite irregular quadrilaterls, so performing
+ a transform would result in exaggerated warping and inaccurate
+ macbeth chart centre placement
+ """
+ rect = cv2.minAreaRect(square)
+ square = cv2.boxPoints(rect).astype(np.float32)
+ """
+ reorder vertices to prevent 'hourglass shape'
+ """
+ square = sorted(square, key=lambda x: x[0])
+ square_1 = sorted(square[:2], key=lambda x: x[1])
+ square_2 = sorted(square[2:], key=lambda x: -x[1])
+ square = np.array(np.concatenate((square_1, square_2)), np.float32)
+ square = np.reshape(square, (4, 2)).astype(np.float32)
+ squares[i] = square
+ """
+ find 24 possible macbeth chart centres by trasnforming normalised
+ macbeth square vertices onto candidate square vertices found in image
+ """
+ for j in range(len(square_verts)):
+ verts = square_verts[j]
+ p_mat = cv2.getPerspectiveTransform(verts, square)
+ mac_guess = cv2.perspectiveTransform(mac_norm, p_mat)
+ mac_guess = np.round(mac_guess).astype(np.int32)
+ """
+ keep only if candidate macbeth is within image border
+ (deprecated)
+ """
+ in_border = True
+ # for p in mac_guess[0]:
+ # pptest = cv2.pointPolygonTest(
+ # img_con,
+ # tuple(p),
+ # False
+ # )
+ # if pptest == -1:
+ # in_border = False
+ # break
+
+ if in_border:
+ mac_mid = np.mean(mac_guess,
+ axis=1)
+ mac_mids.append([mac_mid, (i, j)])
+
+ if len(mac_mids) == 0:
+ raise MacbethError(
+ '\nWARNING: No macbeth chart found!'
+ '\nNo possible macbeth charts found within image'
+ '\nPossible problems:\n'
+ '- Part of the macbeth chart is outside the image\n'
+ '- Quadrilaterals in image background\n'
+ )
+
+ """
+ reshape data
+ """
+ for i in range(len(mac_mids)):
+ mac_mids[i][0] = mac_mids[i][0][0]
+
+ """
+ find where midpoints cluster to identify most likely macbeth centres
+ """
+ clustering = cluster.AgglomerativeClustering(
+ n_clusters=None,
+ compute_full_tree=True,
+ distance_threshold=side*2
+ )
+ mac_mids_list = [x[0] for x in mac_mids]
+
+ if len(mac_mids_list) == 1:
+ """
+ special case of only one valid centre found (probably not needed)
+ """
+ clus_list = []
+ clus_list.append([mac_mids, len(mac_mids)])
+
+ else:
+ clustering.fit(mac_mids_list)
+ # try:
+ # clustering.fit(mac_mids_list)
+ # except RuntimeWarning as error:
+ # return(0, None, None, error)
+
+ """
+ create list of all clusters
+ """
+ clus_list = []
+ if clustering.n_clusters_ > 1:
+ for i in range(clustering.labels_.max()+1):
+ indices = [j for j, x in enumerate(clustering.labels_) if x == i]
+ clus = []
+ for index in indices:
+ clus.append(mac_mids[index])
+ clus_list.append([clus, len(clus)])
+ clus_list.sort(key=lambda x: -x[1])
+
+ elif clustering.n_clusters_ == 1:
+ """
+ special case of only one cluster found
+ """
+ # print('only 1 cluster')
+ clus_list.append([mac_mids, len(mac_mids)])
+ else:
+ raise MacbethError(
+ '\nWARNING: No macebth chart found!'
+ '\nNo clusters found'
+ '\nPossible problems:\n'
+ '- NA\n'
+ )
+
+ """
+ keep only clusters with enough votes
+ """
+ clus_len_max = clus_list[0][1]
+ clus_tol = 0.7
+ for i in range(len(clus_list)):
+ if clus_list[i][1] < clus_len_max * clus_tol:
+ clus_list = clus_list[:i]
+ break
+ cent = np.mean(clus_list[i][0], axis=0)[0]
+ clus_list[i].append(cent)
+
+ """
+ represent most popular cluster centroids
+ """
+ # copy = original_bw.copy()
+ # copy = cv2.cvtColor(copy, cv2.COLOR_GRAY2RGB)
+ # copy = cv2.resize(copy, None, fx=2, fy=2)
+ # for clus in clus_list:
+ # centroid = tuple(2*np.round(clus[2]).astype(np.int32))
+ # cv2.circle(copy, centroid, 7, (255, 0, 0), -1)
+ # cv2.circle(copy, centroid, 2, (0, 0, 255), -1)
+ # represent(copy)
+
+ """
+ get centres of each normalised square
+ """
+ reference = get_square_centres(0.06)
+
+ """
+ for each possible macbeth chart, transform image into
+ normalised space and find correlation with reference
+ """
+ max_cor = 0
+ best_map = None
+ best_fit = None
+ best_cen_fit = None
+ best_ref_mat = None
+
+ for clus in clus_list:
+ clus = clus[0]
+ sq_cents = []
+ ref_cents = []
+ i_list = [p[1][0] for p in clus]
+ for point in clus:
+ i, j = point[1]
+ """
+ remove any square that voted for two different points within
+ the same cluster. This causes the same point in the image to be
+ mapped to two different reference square centres, resulting in
+ a very distorted perspective transform since cv2.findHomography
+ simply minimises error.
+ This phenomenon is not particularly likely to occur due to the
+ enforced distance threshold in the clustering fit but it is
+ best to keep this in just in case.
+ """
+ if i_list.count(i) == 1:
+ square = squares_raw[i]
+ sq_cent = np.mean(square, axis=0)
+ ref_cent = reference[j]
+ sq_cents.append(sq_cent)
+ ref_cents.append(ref_cent)
+
+ """
+ At least four squares need to have voted for a centre in
+ order for a transform to be found
+ """
+ if len(sq_cents) < 4:
+ raise MacbethError(
+ '\nWARNING: No macbeth chart found!'
+ '\nNot enough squares found'
+ '\nPossible problems:\n'
+ '- Macbeth chart is occluded\n'
+ '- Macbeth chart is too dark or bright\n'
+ )
+
+ ref_cents = np.array(ref_cents)
+ sq_cents = np.array(sq_cents)
+ """
+ find best fit transform from normalised centres to image
+ """
+ h_mat, mask = cv2.findHomography(ref_cents, sq_cents)
+ if 'None' in str(type(h_mat)):
+ raise MacbethError(
+ '\nERROR\n'
+ )
+
+ """
+ transform normalised corners and centres into image space
+ """
+ mac_fit = cv2.perspectiveTransform(mac_norm, h_mat)
+ mac_cen_fit = cv2.perspectiveTransform(np.array([reference]), h_mat)
+ """
+ transform located corners into reference space
+ """
+ ref_mat = cv2.getPerspectiveTransform(
+ mac_fit,
+ np.array([ref_corns])
+ )
+ map_to_ref = cv2.warpPerspective(
+ original_bw, ref_mat,
+ (ref_w, ref_h)
+ )
+ """
+ normalise brigthness
+ """
+ a = 125/np.average(map_to_ref)
+ map_to_ref = cv2.convertScaleAbs(map_to_ref, alpha=a, beta=0)
+ """
+ find correlation with bw reference macbeth
+ """
+ cor = correlate(map_to_ref, ref)
+ """
+ keep only if best correlation
+ """
+ if cor > max_cor:
+ max_cor = cor
+ best_map = map_to_ref
+ best_fit = mac_fit
+ best_cen_fit = mac_cen_fit
+ best_ref_mat = ref_mat
+
+ """
+ rotate macbeth by pi and recorrelate in case macbeth chart is
+ upside-down
+ """
+ mac_fit_inv = np.array(
+ ([[mac_fit[0][2], mac_fit[0][3],
+ mac_fit[0][0], mac_fit[0][1]]])
+ )
+ mac_cen_fit_inv = np.flip(mac_cen_fit, axis=1)
+ ref_mat = cv2.getPerspectiveTransform(
+ mac_fit_inv,
+ np.array([ref_corns])
+ )
+ map_to_ref = cv2.warpPerspective(
+ original_bw, ref_mat,
+ (ref_w, ref_h)
+ )
+ a = 125/np.average(map_to_ref)
+ map_to_ref = cv2.convertScaleAbs(map_to_ref, alpha=a, beta=0)
+ cor = correlate(map_to_ref, ref)
+ if cor > max_cor:
+ max_cor = cor
+ best_map = map_to_ref
+ best_fit = mac_fit_inv
+ best_cen_fit = mac_cen_fit_inv
+ best_ref_mat = ref_mat
+
+ """
+ Check best match is above threshold
+ """
+ cor_thresh = 0.6
+ if max_cor < cor_thresh:
+ raise MacbethError(
+ '\nWARNING: Correlation too low'
+ '\nPossible problems:\n'
+ '- Bad lighting conditions\n'
+ '- Macbeth chart is occluded\n'
+ '- Background is too noisy\n'
+ '- Macbeth chart is out of camera plane\n'
+ )
+ """
+ Following code is mostly representation for debugging purposes
+ """
+
+ """
+ draw macbeth corners and centres on image
+ """
+ copy = original.copy()
+ copy = cv2.resize(original, None, fx=2, fy=2)
+ # print('correlation = {}'.format(round(max_cor, 2)))
+ for point in best_fit[0]:
+ point = np.array(point, np.float32)
+ point = tuple(2*np.round(point).astype(np.int32))
+ cv2.circle(copy, point, 4, (255, 0, 0), -1)
+ for point in best_cen_fit[0]:
+ point = np.array(point, np.float32)
+ point = tuple(2*np.round(point).astype(np.int32))
+ cv2.circle(copy, point, 4, (0, 0, 255), -1)
+ copy = copy.copy()
+ cv2.circle(copy, point, 4, (0, 0, 255), -1)
+
+ """
+ represent coloured macbeth in reference space
+ """
+ best_map_col = cv2.warpPerspective(
+ original, best_ref_mat, (ref_w, ref_h)
+ )
+ best_map_col = cv2.resize(
+ best_map_col, None, fx=4, fy=4
+ )
+ a = 125/np.average(best_map_col)
+ best_map_col_norm = cv2.convertScaleAbs(
+ best_map_col, alpha=a, beta=0
+ )
+ # cv2.imshow('Macbeth', best_map_col)
+ # represent(copy)
+
+ """
+ rescale coordinates to original image size
+ """
+ fit_coords = (best_fit/factor, best_cen_fit/factor)
+
+ return(max_cor, best_map_col_norm, fit_coords, success_msg)
+
+ """
+ catch macbeth errors and continue with code
+ """
+ except MacbethError as error:
+ return(0, None, None, error)
diff --git a/utils/raspberrypi/ctt/ctt_noise.py b/utils/raspberrypi/ctt/ctt_noise.py
new file mode 100644
index 00000000..0b18d83f
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_noise.py
@@ -0,0 +1,123 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# camera tuning tool noise calibration
+
+from ctt_image_load import *
+import matplotlib.pyplot as plt
+
+
+"""
+Find noise standard deviation and fit to model:
+
+ noise std = a + b*sqrt(pixel mean)
+"""
+def noise(Cam, Img, plot):
+ Cam.log += '\nProcessing image: {}'.format(Img.name)
+ stds = []
+ means = []
+ """
+ iterate through macbeth square patches
+ """
+ for ch_patches in Img.patches:
+ for patch in ch_patches:
+ """
+ renormalise patch
+ """
+ patch = np.array(patch)
+ patch = (patch-Img.blacklevel_16)/Img.againQ8_norm
+ std = np.std(patch)
+ mean = np.mean(patch)
+ stds.append(std)
+ means.append(mean)
+
+ """
+ clean data and ensure all means are above 0
+ """
+ stds = np.array(stds)
+ means = np.array(means)
+ means = np.clip(np.array(means), 0, None)
+ sq_means = np.sqrt(means)
+
+ """
+ least squares fit model
+ """
+ fit = np.polyfit(sq_means, stds, 1)
+ Cam.log += '\nBlack level = {}'.format(Img.blacklevel_16)
+ Cam.log += '\nNoise profile: offset = {}'.format(int(fit[1]))
+ Cam.log += ' slope = {:.3f}'.format(fit[0])
+ """
+ remove any values further than std from the fit
+
+ anomalies most likely caused by:
+ > ucharacteristically noisy white patch
+ > saturation in the white patch
+ """
+ fit_score = np.abs(stds - fit[0]*sq_means - fit[1])
+ fit_std = np.std(stds)
+ fit_score_norm = fit_score - fit_std
+ anom_ind = np.where(fit_score_norm > 1)
+ fit_score_norm.sort()
+ sq_means_clean = np.delete(sq_means, anom_ind)
+ stds_clean = np.delete(stds, anom_ind)
+ removed = len(stds) - len(stds_clean)
+ if removed != 0:
+ Cam.log += '\nIdentified and removed {} anomalies.'.format(removed)
+ Cam.log += '\nRecalculating fit'
+ """
+ recalculate fit with outliers removed
+ """
+ fit = np.polyfit(sq_means_clean, stds_clean, 1)
+ Cam.log += '\nNoise profile: offset = {}'.format(int(fit[1]))
+ Cam.log += ' slope = {:.3f}'.format(fit[0])
+
+ """
+ if fit const is < 0 then force through 0 by
+ dividing by sq_means and fitting poly order 0
+ """
+ corrected = 0
+ if fit[1] < 0:
+ corrected = 1
+ ones = np.ones(len(means))
+ y_data = stds/sq_means
+ fit2 = np.polyfit(ones, y_data, 0)
+ Cam.log += '\nOffset below zero. Fit recalculated with zero offset'
+ Cam.log += '\nNoise profile: offset = 0'
+ Cam.log += ' slope = {:.3f}'.format(fit2[0])
+ # print('new fit')
+ # print(fit2)
+
+ """
+ plot fit for debug
+ """
+ if plot:
+ x = np.arange(sq_means.max()//0.88)
+ fit_plot = x*fit[0] + fit[1]
+ plt.scatter(sq_means, stds, label='data', color='blue')
+ plt.scatter(sq_means[anom_ind], stds[anom_ind], color='orange', label='anomalies')
+ plt.plot(x, fit_plot, label='fit', color='red', ls=':')
+ if fit[1] < 0:
+ fit_plot_2 = x*fit2[0]
+ plt.plot(x, fit_plot_2, label='fit 0 intercept', color='green', ls='--')
+ plt.plot(0, 0)
+ plt.title('Noise Plot\nImg: {}'.format(Img.str))
+ plt.legend(loc='upper left')
+ plt.xlabel('Sqrt Pixel Value')
+ plt.ylabel('Noise Standard Deviation')
+ plt.grid()
+ plt.show()
+ """
+ End of plotting code
+ """
+
+ """
+ format output to include forced 0 constant
+ """
+ Cam.log += '\n'
+ if corrected:
+ fit = [fit2[0], 0]
+ return fit
+
+ else:
+ return fit
diff --git a/utils/raspberrypi/ctt/ctt_pisp.py b/utils/raspberrypi/ctt/ctt_pisp.py
new file mode 100755
index 00000000..a59b053c
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_pisp.py
@@ -0,0 +1,805 @@
+#!/usr/bin/env python3
+#
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# ctt_pisp.py - camera tuning tool data for PiSP platforms
+
+
+json_template = {
+ "rpi.black_level": {
+ "black_level": 4096
+ },
+ "rpi.lux": {
+ "reference_shutter_speed": 10000,
+ "reference_gain": 1,
+ "reference_aperture": 1.0
+ },
+ "rpi.dpc": {
+ "strength": 1
+ },
+ "rpi.noise": {
+ },
+ "rpi.geq": {
+ },
+ "rpi.denoise":
+ {
+ "normal":
+ {
+ "sdn":
+ {
+ "deviation": 1.6,
+ "strength": 0.5,
+ "deviation2": 3.2,
+ "deviation_no_tdn": 3.2,
+ "strength_no_tdn": 0.75
+ },
+ "cdn":
+ {
+ "deviation": 200,
+ "strength": 0.3
+ },
+ "tdn":
+ {
+ "deviation": 0.8,
+ "threshold": 0.05
+ }
+ },
+ "hdr":
+ {
+ "sdn":
+ {
+ "deviation": 1.6,
+ "strength": 0.5,
+ "deviation2": 3.2,
+ "deviation_no_tdn": 3.2,
+ "strength_no_tdn": 0.75
+ },
+ "cdn":
+ {
+ "deviation": 200,
+ "strength": 0.3
+ },
+ "tdn":
+ {
+ "deviation": 1.3,
+ "threshold": 0.1
+ }
+ },
+ "night":
+ {
+ "sdn":
+ {
+ "deviation": 1.6,
+ "strength": 0.5,
+ "deviation2": 3.2,
+ "deviation_no_tdn": 3.2,
+ "strength_no_tdn": 0.75
+ },
+ "cdn":
+ {
+ "deviation": 200,
+ "strength": 0.3
+ },
+ "tdn":
+ {
+ "deviation": 1.3,
+ "threshold": 0.1
+ }
+ }
+ },
+ "rpi.awb": {
+ "priors": [
+ {"lux": 0, "prior": [2000, 1.0, 3000, 0.0, 13000, 0.0]},
+ {"lux": 800, "prior": [2000, 0.0, 6000, 2.0, 13000, 2.0]},
+ {"lux": 1500, "prior": [2000, 0.0, 4000, 1.0, 6000, 6.0, 6500, 7.0, 7000, 1.0, 13000, 1.0]}
+ ],
+ "modes": {
+ "auto": {"lo": 2500, "hi": 7700},
+ "incandescent": {"lo": 2500, "hi": 3000},
+ "tungsten": {"lo": 3000, "hi": 3500},
+ "fluorescent": {"lo": 4000, "hi": 4700},
+ "indoor": {"lo": 3000, "hi": 5000},
+ "daylight": {"lo": 5500, "hi": 6500},
+ "cloudy": {"lo": 7000, "hi": 8000}
+ },
+ "bayes": 1
+ },
+ "rpi.agc":
+ {
+ "channels":
+ [
+ {
+ "comment": "Channel 0 is normal AGC",
+ "metering_modes":
+ {
+ "centre-weighted":
+ {
+ "weights":
+ [
+ 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,
+ 0, 1, 1, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 0,
+ 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1,
+ 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 3, 3, 3, 3, 3, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 4, 4, 4, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 3, 3, 3, 3, 3, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1,
+ 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1,
+ 0, 1, 1, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 0,
+ 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0
+ ]
+ },
+ "spot":
+ {
+ "weights":
+ [
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 1, 2, 3, 2, 1, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ ]
+ },
+ "matrix":
+ {
+ "weights":
+ [
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
+ ]
+ }
+ },
+ "exposure_modes":
+ {
+ "normal":
+ {
+ "shutter": [ 100, 10000, 30000, 60000, 66666 ],
+ "gain": [ 1.0, 1.5, 2.0, 4.0, 8.0 ]
+ },
+ "short":
+ {
+ "shutter": [ 100, 5000, 10000, 20000, 60000 ],
+ "gain": [ 1.0, 1.5, 2.0, 4.0, 8.0 ]
+ },
+ "long":
+ {
+ "shutter": [ 100, 10000, 30000, 60000, 90000, 120000 ],
+ "gain": [ 1.0, 1.5, 2.0, 4.0, 8.0, 12.0 ]
+ }
+ },
+ "constraint_modes":
+ {
+ "normal": [
+ {
+ "bound": "LOWER",
+ "q_lo": 0.98,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.5,
+ 1000, 0.5
+ ]
+ }
+ ],
+ "highlight": [
+ {
+ "bound": "LOWER",
+ "q_lo": 0.98,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.5,
+ 1000, 0.5
+ ]
+ },
+ {
+ "bound": "UPPER",
+ "q_lo": 0.98,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.8,
+ 1000, 0.8
+ ]
+ },
+ ],
+ "shadows": [
+ {
+ "bound": "LOWER",
+ "q_lo": 0.0,
+ "q_hi": 0.5,
+ "y_target":
+ [
+ 0, 0.17,
+ 1000, 0.17
+ ]
+ }
+ ]
+ },
+ "y_target":
+ [
+ 0, 0.16,
+ 1000, 0.165,
+ 10000, 0.17
+ ]
+ },
+ {
+ "comment": "Channel 1 is the HDR short channel",
+ "desaturate": 0,
+ "metering_modes":
+ {
+ "centre-weighted":
+ {
+ "weights":
+ [
+ 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,
+ 0, 1, 1, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 0,
+ 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1,
+ 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 3, 3, 3, 3, 3, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 4, 4, 4, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 3, 3, 3, 3, 3, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1,
+ 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1,
+ 0, 1, 1, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 0,
+ 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0
+ ]
+ },
+ "spot":
+ {
+ "weights":
+ [
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 1, 2, 3, 2, 1, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ ]
+ },
+ "matrix":
+ {
+ "weights":
+ [
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
+ ]
+ }
+ },
+ "exposure_modes":
+ {
+ "normal":
+ {
+ "shutter": [ 100, 20000, 60000 ],
+ "gain": [ 1.0, 1.0, 1.0 ]
+ },
+ "short":
+ {
+ "shutter": [ 100, 20000, 60000 ],
+ "gain": [ 1.0, 1.0, 1.0 ]
+ },
+ "long":
+ {
+ "shutter": [ 100, 20000, 60000 ],
+ "gain": [ 1.0, 1.0, 1.0 ]
+ }
+ },
+ "constraint_modes":
+ {
+ "normal": [
+ {
+ "bound": "LOWER",
+ "q_lo": 0.95,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.5,
+ 1000, 0.5
+ ]
+ },
+ {
+ "bound": "UPPER",
+ "q_lo": 0.95,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.7,
+ 1000, 0.7
+ ]
+ },
+ {
+ "bound": "LOWER",
+ "q_lo": 0.0,
+ "q_hi": 0.2,
+ "y_target":
+ [
+ 0, 0.002,
+ 1000, 0.002
+ ]
+ }
+ ],
+ "highlight": [
+ {
+ "bound": "LOWER",
+ "q_lo": 0.95,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.5,
+ 1000, 0.5
+ ]
+ },
+ {
+ "bound": "UPPER",
+ "q_lo": 0.95,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.7,
+ 1000, 0.7
+ ]
+ },
+ {
+ "bound": "LOWER",
+ "q_lo": 0.0,
+ "q_hi": 0.2,
+ "y_target":
+ [
+ 0, 0.002,
+ 1000, 0.002
+ ]
+ }
+ ],
+ "shadows": [
+ {
+ "bound": "LOWER",
+ "q_lo": 0.95,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.5,
+ 1000, 0.5
+ ]
+ },
+ {
+ "bound": "UPPER",
+ "q_lo": 0.95,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.7,
+ 1000, 0.7
+ ]
+ },
+ {
+ "bound": "LOWER",
+ "q_lo": 0.0,
+ "q_hi": 0.2,
+ "y_target":
+ [
+ 0, 0.002,
+ 1000, 0.002
+ ]
+ }
+ ]
+ },
+ "y_target":
+ [
+ 0, 0.16,
+ 1000, 0.165,
+ 10000, 0.17
+ ]
+ },
+ {
+ "comment": "Channel 2 is the HDR long channel",
+ "desaturate": 0,
+ "metering_modes":
+ {
+ "centre-weighted":
+ {
+ "weights":
+ [
+ 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,
+ 0, 1, 1, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 0,
+ 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1,
+ 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 3, 3, 3, 3, 3, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 4, 4, 4, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 3, 3, 3, 3, 3, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1,
+ 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1,
+ 0, 1, 1, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 0,
+ 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0
+ ]
+ },
+ "spot":
+ {
+ "weights":
+ [
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 1, 2, 3, 2, 1, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ ]
+ },
+ "matrix":
+ {
+ "weights":
+ [
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
+ ]
+ }
+ },
+ "exposure_modes":
+ {
+ "normal":
+ {
+ "shutter": [ 100, 20000, 30000, 60000 ],
+ "gain": [ 1.0, 2.0, 4.0, 8.0 ]
+ },
+ "short":
+ {
+ "shutter": [ 100, 20000, 30000, 60000 ],
+ "gain": [ 1.0, 2.0, 4.0, 8.0 ]
+ },
+ "long":
+ {
+ "shutter": [ 100, 20000, 30000, 60000 ],
+ "gain": [ 1.0, 2.0, 4.0, 8.0 ]
+ }
+ },
+ "constraint_modes":
+ {
+ "normal": [
+ ],
+ "highlight": [
+ ],
+ "shadows": [
+ ]
+ },
+ "channel_constraints":
+ [
+ {
+ "bound": "UPPER",
+ "channel": 4,
+ "factor": 8
+ },
+ {
+ "bound": "LOWER",
+ "channel": 4,
+ "factor": 2
+ }
+ ],
+ "y_target":
+ [
+ 0, 0.16,
+ 1000, 0.165,
+ 10000, 0.17
+ ]
+ },
+ {
+ "comment": "Channel 3 is the night mode channel",
+ "base_ev": 0.33,
+ "metering_modes":
+ {
+ "centre-weighted":
+ {
+ "weights":
+ [
+ 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,
+ 0, 1, 1, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 0,
+ 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1,
+ 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 3, 3, 3, 3, 3, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 4, 4, 4, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 3, 3, 3, 3, 3, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1,
+ 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1,
+ 0, 1, 1, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 0,
+ 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0
+ ]
+ },
+ "spot":
+ {
+ "weights":
+ [
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 1, 2, 3, 2, 1, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ ]
+ },
+ "matrix":
+ {
+ "weights":
+ [
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
+ ]
+ }
+ },
+ "exposure_modes":
+ {
+ "normal":
+ {
+ "shutter": [ 100, 20000, 66666 ],
+ "gain": [ 1.0, 2.0, 4.0 ]
+ },
+ "short":
+ {
+ "shutter": [ 100, 20000, 33333 ],
+ "gain": [ 1.0, 2.0, 4.0 ]
+ },
+ "long":
+ {
+ "shutter": [ 100, 20000, 66666, 120000 ],
+ "gain": [ 1.0, 2.0, 4.0, 4.0 ]
+ }
+ },
+ "constraint_modes":
+ {
+ "normal": [
+ {
+ "bound": "LOWER",
+ "q_lo": 0.98,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.5,
+ 1000, 0.5
+ ]
+ }
+ ],
+ "highlight": [
+ {
+ "bound": "LOWER",
+ "q_lo": 0.98,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.5,
+ 1000, 0.5
+ ]
+ },
+ {
+ "bound": "UPPER",
+ "q_lo": 0.98,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.8,
+ 1000, 0.8
+ ]
+ }
+ ],
+ "shadows": [
+ {
+ "bound": "LOWER",
+ "q_lo": 0.98,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.5,
+ 1000, 0.5
+ ]
+ }
+ ]
+ },
+ "y_target":
+ [
+ 0, 0.16,
+ 1000, 0.16,
+ 10000, 0.17
+ ]
+ }
+ ]
+ },
+ "rpi.alsc": {
+ 'omega': 1.3,
+ 'n_iter': 100,
+ 'luminance_strength': 0.8,
+ },
+ "rpi.contrast": {
+ "ce_enable": 1,
+ "gamma_curve": [
+ 0, 0,
+ 1024, 5040,
+ 2048, 9338,
+ 3072, 12356,
+ 4096, 15312,
+ 5120, 18051,
+ 6144, 20790,
+ 7168, 23193,
+ 8192, 25744,
+ 9216, 27942,
+ 10240, 30035,
+ 11264, 32005,
+ 12288, 33975,
+ 13312, 35815,
+ 14336, 37600,
+ 15360, 39168,
+ 16384, 40642,
+ 18432, 43379,
+ 20480, 45749,
+ 22528, 47753,
+ 24576, 49621,
+ 26624, 51253,
+ 28672, 52698,
+ 30720, 53796,
+ 32768, 54876,
+ 36864, 57012,
+ 40960, 58656,
+ 45056, 59954,
+ 49152, 61183,
+ 53248, 62355,
+ 57344, 63419,
+ 61440, 64476,
+ 65535, 65535
+ ]
+ },
+ "rpi.ccm": {
+ },
+ "rpi.cac": {
+ },
+ "rpi.sharpen": {
+ "threshold": 0.25,
+ "limit": 1.0,
+ "strength": 1.0
+ },
+ "rpi.hdr":
+ {
+ "Off":
+ {
+ "cadence": [ 0 ]
+ },
+ "MultiExposureUnmerged":
+ {
+ "cadence": [ 1, 2 ],
+ "channel_map": { "short": 1, "long": 2 }
+ },
+ "SingleExposure":
+ {
+ "cadence": [1],
+ "channel_map": { "short": 1 },
+ "spatial_gain": 2.0,
+ "tonemap_enable": 1
+ },
+ "MultiExposure":
+ {
+ "cadence": [1, 2],
+ "channel_map": { "short": 1, "long": 2 },
+ "stitch_enable": 1,
+ "spatial_gain": 2.0,
+ "tonemap_enable": 1
+ },
+ "Night":
+ {
+ "cadence": [ 3 ],
+ "channel_map": { "night": 3 },
+ "tonemap_enable": 1,
+ "tonemap":
+ [
+ 0, 0,
+ 5000, 20000,
+ 10000, 30000,
+ 20000, 47000,
+ 30000, 55000,
+ 65535, 65535
+ ]
+ }
+ }
+}
+
+grid_size = (32, 32)
diff --git a/utils/raspberrypi/ctt/ctt_pretty_print_json.py b/utils/raspberrypi/ctt/ctt_pretty_print_json.py
new file mode 100755
index 00000000..a4cae62d
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_pretty_print_json.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python3
+#
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright 2022 Raspberry Pi Ltd
+#
+# Script to pretty print a Raspberry Pi tuning config JSON structure in
+# version 2.0 and later formats.
+
+import argparse
+import json
+import textwrap
+
+
+class Encoder(json.JSONEncoder):
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.indentation_level = 0
+ self.hard_break = 120
+ self.custom_elems = {
+ 'weights': 15,
+ 'table': 16,
+ 'luminance_lut': 16,
+ 'ct_curve': 3,
+ 'ccm': 3,
+ 'lut_rx': 9,
+ 'lut_bx': 9,
+ 'lut_by': 9,
+ 'lut_ry': 9,
+ 'gamma_curve': 2,
+ 'y_target': 2,
+ 'prior': 2,
+ 'tonemap': 2
+ }
+
+ def encode(self, o, node_key=None):
+ if isinstance(o, (list, tuple)):
+ # Check if we are a flat list of numbers.
+ if not any(isinstance(el, (list, tuple, dict)) for el in o):
+ s = ', '.join(json.dumps(el) for el in o)
+ if node_key in self.custom_elems.keys():
+ # Special case handling to specify number of elements in a row for tables, ccm, etc.
+ self.indentation_level += 1
+ sl = s.split(', ')
+ num = self.custom_elems[node_key]
+ chunk = [self.indent_str + ', '.join(sl[x:x + num]) for x in range(0, len(sl), num)]
+ t = ',\n'.join(chunk)
+ self.indentation_level -= 1
+ output = f'\n{self.indent_str}[\n{t}\n{self.indent_str}]'
+ elif len(s) > self.hard_break - len(self.indent_str):
+ # Break a long list with wraps.
+ self.indentation_level += 1
+ t = textwrap.fill(s, self.hard_break, break_long_words=False,
+ initial_indent=self.indent_str, subsequent_indent=self.indent_str)
+ self.indentation_level -= 1
+ output = f'\n{self.indent_str}[\n{t}\n{self.indent_str}]'
+ else:
+ # Smaller lists can remain on a single line.
+ output = f' [ {s} ]'
+ return output
+ else:
+ # Sub-structures in the list case.
+ self.indentation_level += 1
+ output = [self.indent_str + self.encode(el) for el in o]
+ self.indentation_level -= 1
+ output = ',\n'.join(output)
+ return f' [\n{output}\n{self.indent_str}]'
+
+ elif isinstance(o, dict):
+ self.indentation_level += 1
+ output = []
+ for k, v in o.items():
+ if isinstance(v, dict) and len(v) == 0:
+ # Empty config block special case.
+ output.append(self.indent_str + f'{json.dumps(k)}: {{ }}')
+ else:
+ # Only linebreak if the next node is a config block.
+ sep = f'\n{self.indent_str}' if isinstance(v, dict) else ''
+ output.append(self.indent_str + f'{json.dumps(k)}:{sep}{self.encode(v, k)}')
+ output = ',\n'.join(output)
+ self.indentation_level -= 1
+ return f'{{\n{output}\n{self.indent_str}}}'
+
+ else:
+ return ' ' + json.dumps(o)
+
+ @property
+ def indent_str(self) -> str:
+ return ' ' * self.indentation_level * self.indent
+
+ def iterencode(self, o, **kwargs):
+ return self.encode(o)
+
+
+def pretty_print(in_json: dict, custom_elems={}) -> str:
+
+ if 'version' not in in_json or \
+ 'target' not in in_json or \
+ 'algorithms' not in in_json or \
+ in_json['version'] < 2.0:
+ raise RuntimeError('Incompatible JSON dictionary has been provided')
+
+ encoder = Encoder(indent=4, sort_keys=False)
+ encoder.custom_elems |= custom_elems
+ return encoder.encode(in_json) #json.dumps(in_json, cls=Encoder, indent=4, sort_keys=False)
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=
+ 'Prettify a version 2.0 camera tuning config JSON file.')
+ parser.add_argument('-t', '--target', type=str, help='Target platform', choices=['pisp', 'vc4'], default='vc4')
+ parser.add_argument('input', type=str, help='Input tuning file.')
+ parser.add_argument('output', type=str, nargs='?',
+ help='Output converted tuning file. If not provided, the input file will be updated in-place.',
+ default=None)
+ args = parser.parse_args()
+
+ with open(args.input, 'r') as f:
+ in_json = json.load(f)
+
+ if args.target == 'pisp':
+ from ctt_pisp import grid_size
+ elif args.target == 'vc4':
+ from ctt_vc4 import grid_size
+
+ out_json = pretty_print(in_json, custom_elems={'table': grid_size[0], 'luminance_lut': grid_size[0]})
+
+ with open(args.output if args.output is not None else args.input, 'w') as f:
+ f.write(out_json)
diff --git a/utils/raspberrypi/ctt/ctt_ransac.py b/utils/raspberrypi/ctt/ctt_ransac.py
new file mode 100644
index 00000000..01bba302
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_ransac.py
@@ -0,0 +1,71 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# camera tuning tool RANSAC selector for Macbeth chart locator
+
+import numpy as np
+
+scale = 2
+
+
+"""
+constructs normalised macbeth chart corners for ransac algorithm
+"""
+def get_square_verts(c_err=0.05, scale=scale):
+ """
+ define macbeth chart corners
+ """
+ b_bord_x, b_bord_y = scale*8.5, scale*13
+ s_bord = 6*scale
+ side = 41*scale
+ x_max = side*6 + 5*s_bord + 2*b_bord_x
+ y_max = side*4 + 3*s_bord + 2*b_bord_y
+ c1 = (0, 0)
+ c2 = (0, y_max)
+ c3 = (x_max, y_max)
+ c4 = (x_max, 0)
+ mac_norm = np.array((c1, c2, c3, c4), np.float32)
+ mac_norm = np.array([mac_norm])
+
+ square_verts = []
+ square_0 = np.array(((0, 0), (0, side),
+ (side, side), (side, 0)), np.float32)
+ offset_0 = np.array((b_bord_x, b_bord_y), np.float32)
+ c_off = side * c_err
+ offset_cont = np.array(((c_off, c_off), (c_off, -c_off),
+ (-c_off, -c_off), (-c_off, c_off)), np.float32)
+ square_0 += offset_0
+ square_0 += offset_cont
+ """
+ define macbeth square corners
+ """
+ for i in range(6):
+ shift_i = np.array(((i*side, 0), (i*side, 0),
+ (i*side, 0), (i*side, 0)), np.float32)
+ shift_bord = np.array(((i*s_bord, 0), (i*s_bord, 0),
+ (i*s_bord, 0), (i*s_bord, 0)), np.float32)
+ square_i = square_0 + shift_i + shift_bord
+ for j in range(4):
+ shift_j = np.array(((0, j*side), (0, j*side),
+ (0, j*side), (0, j*side)), np.float32)
+ shift_bord = np.array(((0, j*s_bord),
+ (0, j*s_bord), (0, j*s_bord),
+ (0, j*s_bord)), np.float32)
+ square_j = square_i + shift_j + shift_bord
+ square_verts.append(square_j)
+ # print('square_verts')
+ # print(square_verts)
+ return np.array(square_verts, np.float32), mac_norm
+
+
+def get_square_centres(c_err=0.05, scale=scale):
+ """
+ define macbeth square centres
+ """
+ verts, mac_norm = get_square_verts(c_err, scale=scale)
+
+ centres = np.mean(verts, axis=1)
+ # print('centres')
+ # print(centres)
+ return np.array(centres, np.float32)
diff --git a/utils/raspberrypi/ctt/ctt_ref.pgm b/utils/raspberrypi/ctt/ctt_ref.pgm
new file mode 100644
index 00000000..9b9f4920
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_ref.pgm
@@ -0,0 +1,5 @@
+P5
+# Reference macbeth chart
+120 80
+255
+  !#!" #!"&&$#$#'"%&#+2///..../.........-()))))))))))))))))))(((-,*)'(&)#($%(%"###""!%""&"&&!$" #!$ !"! $&**" !#5.,%+,-5"0<HBAA54" %##((()*+,---.........+*)))))))))))))))-.,,--+))('((''('%'%##"!""!"!""""#!   ! %‚/vÀ¯z:òøßãLñ©û¶ÑÔcÒ,!#""%%''')**+)-../..../.-*)))))))))))))**,,)**'(''&'((&&%%##$! !!!! ! !  !  5*"-)&7(1.75Rnge`\`$ ""!"%%%'')())++--/---,-..,-.,++**))))())*)*)''%'%&%&'&%%"""""        !  !!$&$$&##(+*,,/10122126545./66402006486869650*.1.***)*+)()&((('('##)('&%%&%$$$#$%$%$ (((*))('((('('(&%V0;>>;@@>@AAAACBCB=&<­·³µ¶¾¿ÃÇÇÆÇËÒÐÇÄ<5x–•ŠŽŒŠ‰„„„„|64RYVTSRRRMMNLKJJLH+&0gijgdeffmmnpnkji`#3™ ª¦¨¨£Ÿ›››š–—™šbY! 3FHHIIIHIJIIJHIII@#?¾ÈÊÍÏÑÔÖØÚÚÚÛßáßÔ=7}—š˜———˜—˜˜——˜——‘:5Wcbcbdcb`^^`^^_^Y,'6‰ŽŒ‰ˆˆˆ‡†…„††„‚r'<½ÆÅÅÅÄÂÀ¿¾¾¼»¼¼µl%2FHHIIHJJJJJJIIJI?%;ÁÌÌÒÓÖØÙÛÛÜÜÞßâãÕ>7|•™™ž™—˜˜˜—™™™š˜–;8Xfeeegeccb`^aba]Z+)<Ž“’‘‹Š‰‰‰‰ˆ†r)>¿ÇÇÇÆÅÅÄÂÁÁÀ¾¾¼·q#3GHIIIIJIIJJIHIJI@&5ÁÎÑÔÕØÙÚÜÜÞßßßàâ×=8~”•˜™š›šš™›šœ››“;8Zgghggedbdcbda^\Z+(;““’‘‘Ž‹‹ŠŠ‰ˆy)9¿ÈÈÈÇÇÅÄÂÁÁÀ¿½½¹z"3GIIJJJJJKJJJJJJJ@'4ÂÑÔÔÙÚÛÜÞÝßßààààØ>9|”—–—™ššš™›œŸ¥ ž˜=8Zhighgeeeedeca__[/)B’–•••“‘ŽŒŒŒŒŠv&:ÁÊÊÊÊÆÆÆÂÁÂÂÁ¿¿º|#3GJJIIJKKKJJJKKJK@&6ÆÒ××ÙÛÛÞÞßààààààÖ>9~”———˜˜—™šžž    ˜<8Yghegggffihccab^\/*C“™˜—––””’‘‘Žz'9ÄÍËÈÈÇÇÆÆÄÂÂÀÀ¿»‚$  6IKJJMMMKMKKMKKMLC&2É××ÙÛÜßÞàááâââââÖ@9•——˜˜™˜˜š››žŸžž—<9Yghhhhijiegdcebc^0)G—›š™˜˜˜–•“’‘Ž(7ÃÍÌËÊÈÇÇÅÆÄÂÂÂÁº‰% 6JLMMNMMKMMNMMMMMD&2ÊÙÙÛÝßßßààáââáãâÖ@:~”—™™š™™››žžžžž—=9Xfghhjiigdgddedc`1)M—œ›š˜™—•”‘’‘Ž}(:ÄÐÍÌËÊÇÆÆÆÅÂÄÁ¾& "8LNOONNOMONNMMNOND'3ÍÛÛÞßàààáââãâåãå×@;–˜˜™žŸŸ  ¡¡  —=:Ziiigheegegegggdc1,Q›ŸŸž›šš˜––““‘~)8ÂÍÎÌËÊÊÈÆÆÆÆÄÆÇÁ•%# "9NNNPPPQOOOOONNOOD'0ÎÜÜßßáàáââååäãåæ×?;–˜—™šœžŸ¡¡ ¡Ÿ  ™=;[iigeeegghgdedgea0-P› ¡ žš˜—–•”(8ÃÏÎÎÌÊÈÈÇÇÇÆÈÇÆÃ' "#$:NNOQPPRPQPOOPQPPD*1ÐßßàààâãããåææåææÛA;‚˜™™šœžžŸ  Ÿž Ÿ—;:Yfghgghgghghhdggc3.\¡£¡  Ÿœœš˜—•’‘~);ÅÎÎÑÐÌËÊÇÈÉÊÊÇŤ(&%%;OQQQRSSRPQQQQSQQF)3ÓßàááãâãåææææææçÜB<ƒ™šœœžžžžŸ žŸ Ÿž—=:Wfhghhhihggghfhee4/f ¥¤¢¡¡ŸŸš˜—””’‘‚*:ÇÏÍÍÎÎÍÌÉÈËÊÈÆÆÃ¤&%%%?RSSSSSTTTTSSSTTRE)5ÕàááãâäåæåæçççèèÛB=„šœœžŸ Ÿ ¡ žŸŸŸ˜@:Ygiihhiiiihihiiif72p £¤¤£ ŸŸœœ™—–•’‘}(9ÇÎÏÎÍÍÍÍÍËÌÊÈÈÇÆ©'#%&?TUTTTUUQSTTTTTVSF*3ÕàãâãäåæææçççèééßF>†žž  ¡¡£ £¡¡¡ Ÿ˜A;[ghjiihiiiihihije50r¢¦¥¥££ Ÿžœš™—–““‚)6ÈÏÏÎÌÎÎÌÏÏËÊÊÈÈÆ«& &#%?SVVVUUUUUTUUVVUUG*5ÖãããåæææçèèèèééëßF=…ŸŸ¢££££ ¡¡  £ ˜A;Yhijiiijjiiiiijje81t¦¦¦¥¥£¡ Ÿ›˜——•’~)5ÇÑÑÏÎËÍÍÑÑÌËÈÈÉÆ°' '$$=OQRRQQPRSRSSSSSSG+6ËÙÙÜÛÜÞÝßààààáããÙD@‚š›œŸœžœ›š”?;Wefgggggfffgeeefc41xŸžŸž››š˜•”’‘ŽŒ{*5¾ÈÈÇÅÃÃÄÄÃÂÂÂÀ¿¼«( &&&'++++,,*-,-00-0100*-SUX\]]`_ffgiooopo=;X\bedbadbca`]\]ZZ;;<::8:;9983433110/-,...1//12410/..--+)"",---,-./,,.-/-0-( &&%+/0103322011223233)(34534767::;;==:=B9;BFGEEGIKJKIJGIJCD=<:76566554111/0/1.*+00233300/00//..,+*#")(*)++,++))*++**'!!&$*w³½¾¿Â¼ÀÀ¼¼·¹¹¸´²Ž1-_addc`ceccdccedbb?A|ŒŒ‘‹ŒŒ‹ŠŠ‰‰ˆB>=>?@@?====;<:;:<:11r‹ŒŽ“–““•–˜™Ž+.’—”™ ¥¢¡¤žšŸŸœ( !'%*zÀÇÆÆÇÇÊÊÈÈÈÊËËËÉ 42gjmllklomooonpopmHG‘©¬«««¬©«««ª««ª©£D>AEDEFEECEECCCDDEC46µåçèçççæåäãáàÞÜÚ׿0:Î×Ö×××ÖÕÒÓÏÐÐÍÍѾ,!!&&,|ÂÇÇÇÇÇÇËËÇÈÊËËÍÊ¡61inknnoopoppoqqrqoEE”¬­­­®®®­®®¯­®®­¥FACGFFFFFFDFDDDDDDC57¹íñïîîíííëéçæãáßÝÄ09ÓÛÛÛÛÚÙØ×ÖÕÔÔÒÔÒÁ+!"%%-~ÀÆÈÊÇÇÈÉÌÌÊÊËÌÌÊ¡42inopppppoqqqrrsrnAB“«®®­®®®®®±­®¬°­¥C?DGGGGFFFFDFFDDEDC48ºíððïïîîíìëèçæãáßÅ1;ÔÞÞÝÜÚÚÙÙ×ÕÕÔÕÔÒÁ+!!"#*|¿ÄÉÊÈÈÈÈÉÍÉÈËÍÍÊ¡62imoppppqqqqrtrqtrGD•¬®®­°®°°°±±°®®­§H?CGGGGGGGGFFFFFFDB38»îðïïïïîíììëèçæâàÅ1<ÖààßÞÞÜÚÚÙÙÙ××ÔÔ½, !)}¿ÃÈÈÊÇÈÈËÎËÊËÌÍË¢63mooppqqqqqqrrtvtoDH—­­®±®°±°­°®­±°°¦JACHHGGHGGFFFDDGGFD29ÀðóòðïïïîííìêéèæâÆ3>ÖááààßÞÜÛÙÙÙØ×Ø×½, $){¼ÂÅÆÉÇÈÆËËÌÊËÊÍË¢53jpppqprqrrrttuvuo>H˜®°®±²±±°°°±°±°°ªJAFHHHHHGGHGGFGGFFE28ÁðôòòððïïîíëìëéçãÇ3:×ãáááßÞÝÛÛÚÙÚÚÚÚ½- "*{¸ÁÁÅÆÇÆÆÊËÌÉÊËÎÌ£53loqpqsqrrrtrutsvrAH—«®®±±°°°®±±±®­°©HCGHIHHHHHHGFGHGGGD5;ÀðóóòñððïîííìëëèäÇ28ØäãááààßÞÜÛÛÛÚÚÚÀ, +}¹¾ÀÂÂÅÅÅÇÉÍËÊËÌÊ¡52mqoqpqrttttttuurpFI–®°±°±±²°±±°±±¯°§OCEHHIHHHHGHGGFFIGF8<ÃðòòóóòððïíîìììéæÍ48ÚçåããáààßÝÜÜÜÜÛÛ¿, (|º¼¾ÀÀÃÄÄÆÇÍËÊÊËÊ¢41krqpqqqrrtrtuvtuoEH—­°°²±±±±¯²²®²±®«PBHHIIIHIIHIHGHGHHE7<ÃðóóòñððððïíííìêçÑ58ÜèæåãââáßßÜÞÜÞÞÚÄ* (zºº»¾ÀÂÂÂÄÄÇËÈÊËÊ¡63kpqprqqstttutrvvoFO˜¯°¯°±±°±±±±±°±²©LEHHIIHIHHHIGHGIHGF4=ÅñóóóððððïïîìíëéèÓ5<ÞêèçåââááßÞÞßßßÚÇ* 'zº½º»¾ÁÀÂÂÄÅÊÇÈÊÈ¡62lppqrqrrrtttuttvpAG›¯°±°±°°°°°±±°±±«MGHIIIIHIIIHHIIJHHG4<ÃñóóóðòððîîïííëéèÓ4<ÞëêççæãâáàßàÞÞÛØÇ+ !){º¼º»¾½ÀÁÁÂÄÉÇÇÉÈ 62jopqqqqqrtttutttrEH™¯±°°°¯°°±±²²°±±ªOHFIIIIIJIIIIHIHIHI7>ÅðôóòòòïðïîîíììëèÒ5;àíêèææãâáâßßÜÛÙÖÇ, !)z¼¾¼¹»»ÁÁ¿ÁÁÈÇÆÆÆŸ53lppqqrqrtttuuuutsFI™®±²±±±±²²²±°¯±²«RHGJIJHJKJJJIIIIIIH9>ÂñôôôòóððïîííìëééÓ5;àìééèææäááßÜÛØ×ÔÇ+  !({»¿¸º½½¿¾¿¾ÀÅÆÄÆÅœ41joppprqrrrutttvvrIH’­±°°°±±±²²°±²±±ªTHCJJJJJIJIJJIJJJIH7=ÂòôòóóñðïîîííììéèÒ5;ßìêêèæåäâàÞÛÙÖ×ÕÇ+ (u±±®¯±³µ²´´µº»¸»º‘65gjlmmmnoopnpprpqoIH¦©ª©«ªªª«¬«ªª¨ª¤OIBIJJJIJJJJIIIHHHG89ºåççæçåäããâáßàßÝÜÈ29ÔàßÝÛÛÙØÕÓÑÎÌÈÌʾ' "&,-*)-01/,0/12102-+04448789<>>??AFAD@DBCIJNRWTSUXT[WUQUOKFEBBABA?>>=<<;;67942:<<<>9999864565363&(13335422./1/-+..+ !"&$$""$"&$%'()(''*+-0124688:<>>??A>?EBCHKOLJLNOSQOXQQVMLACGHGHIGFHGDCCBB@??7432233210111.,++,++%(++)*(''%%%$$#%&$# ")0/001120024455520+-U]`addcdhefeekecYGFJRXYYVWWZWVXXVZTOBF}™œšœžœ›š™–™K7Ybccddfeg`^]^]\[Z[*)OTTPPQPOKOLLJJLIK  !1;:9:<<===;=???A@9*/„Ž‘’”•”––—™™š››’FJmxyxwyzzzxyzzz{zxLOÉÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿú]=‹§©¨¦§¥¦¤¤¢¡¡¡ ›Ž.-‹’’Œ‰‡…‚€€€y# !!2><=;==>=<<>@@@@A9-0‡‘‘”—˜˜™—š›žŸ —IKnz||{|{||{}}~}}{zLOÌÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿý]>ެ­¬««¨ª¨§¦¥¥££¡¡–..Œ––”“Ž‹‹‰‡…………„~% $2==;<>>?===>@A@AB;+1…Ž‘“•—™™˜˜™œžŸŸ—JJo{|y{||}{||}}}}}yMTÎÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿý_>ެ­¬«ª©©¦¦¦¤¤££¢ ”-.–”‘‘ŽŠŠ‰…„…„…„}# %2<=;=<@?>==>?A@AA9+3…Ž‘“–——˜™šœšœœžž•FMlz{{y|}}}}||}|}}{MTÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýd>«¬«ªªª¨§¦¦¤¤¤¡  ”-,Œ“‘’Ї†……„„„…# %1<<<;==<<=>?A?@AA:,3†Ž‘’•——˜˜šœšœœž–INo{{y{||||}|}}|~}{RTÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýd=Š©­¬«©©§¦¦¥¤¤£¡ Ÿ—/-‹’‘‹‹‰ˆ…………ƒƒ„}#!$0<<<=<<==>A@@>@AA:-2†‘“’–——™™šš™œ›œ—HInzz{{||{{}~~}}|}zMRÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýd=‰ª«ªªª§¥¥¥£¤¡¡  ”++ˆŽŽ‹ŠŠ‰………„„„ƒ„~# "$/;<==>;===@@@@>AA:+2†Ž’’“•—–™˜šœ™œ–KHn||y|||||{}~}|}|xMSÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýd=†©©ª©©§¦¦¥¤£¡Ÿ žœ’+,‡‹Š‰ˆ††…„„„„ƒ}# ! "/:<=>@<<>=@@@@@AA;-3„’’•–˜˜š™šššœ›˜MFs||{{{y}z}}|}|}}yMWÏÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýc>„©ª©§¦¦¥¤£¡£  ŸŸ›’,)…ŒŠ‹‰ˆ‡†…„„„ƒƒƒ|! !1;>?>><<>@>>=>ABB;,0ƒŽ‘’––™™™™ššœœ›˜LHr{|{|}|y|}}}}}zNXÎÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýc?„©«ª§§¦¥¥££   žžšŽ()„‹ŠŠ‰ˆ…†„„„„„‚ƒƒz# $/;;<=;<>>=>>>@@BB:,1†‘“•–—˜™šœšœšž˜IInyz||||||{||}{~|{NVÏÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýc;§¨¨¦¦¦¤££¡¡ŸŸœš“('ƒŠŠˆ‰ˆ……ƒ„ƒƒƒƒ‚€}# $0:<==<;>@>>>>@ABB:,/„‘““–˜™™™šœšš—HLlx|}y{y{|y{|}}}}yMRÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýd>~¥§¦¦§¥¤££¢ ŸžššŽ*(ƒŠ‡‡ˆˆ„ƒ‚„ƒƒ‚‚‚y" !&3:;<<;==@@=>AABBA;-3†‘“‘”–—˜˜™šœœš›–KLqz{|||y{}|}{}|~{zRQÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüc9w¤§¦¥¦¤¢£  Ÿžžš™Ž)'ˆ†……„…„ƒƒƒ‚€€€€y" !%1<<;=>===<=@@ABBC<.5†’’•–—™˜™™œœž•IIlz{|}~~~|}{||~}}zMUÌÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüd;p¤¦¥¥¤¤£¤¢ Ÿž›š˜)$€ˆˆ…„„„…‚‚€€€x" $2===<==@=<>=ABBBC?/0ˆ‘’•••˜—˜™™š™œž˜IGkz}}{||}{||y||}zyOVÊÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüc7o¢¥¥¤££¡¤¡žŸš™š˜‘'&~‡„„„„ƒ…„€~€z"#"#/;<:<<?>;===@?AAA>07‹‹Ž’’’”“•–—•‘GGgwxz{yyxyzzyz{yuuHO½ùûüüüüüüûûûúúúúò\8v›žœ›š™˜—•••”‘†'$w~~}|||{~|{zxxxxv!"""'*+(+)*))()+,,.../0398;=<=>DCCDDCBBDHBCJMMLMPNPOJPKPSJDICCNMPONMNNOKHIFDBHE3/46433323.....*+,)( !##!!!!!$#$$#$#&"!!"(+**,,*+.//1478:<:33ACDFGGIIHIJLPKNMQFIPTTRVXVUXUUTXUSTNEGGFDEFAA>==;94877520-,))*(((('&$#!!" &%'FQPQR]dqŒ˜£«¹ÍàðÈ=FñûüÿÿÿÿÿÿÿÿÿÿÿÿÿúQN·èììêìæéììêéëëéêáLEœ˜…znki^[YTPUOS;.%-/12322221/10//,/%#0¯ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿß@QýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿQMÁðôõôóôóôõõôõôôóæKE„¨©¨§§¤¥¥¢¤£ žžž˜H01NNQOQQOOMNNLKLJGB'&/¸ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿâAWþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿOLÀñóôôôóóóõôôõóôòèKE„¦¨©©§ª©ª¦¨§¥¢¤¢œF-,PQQPQPPQPOONMNNKE''0·ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáCZþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿRMÁñôóóòòôòôõóôôóòåJE‚¥©¬¬©ª©ª§¥¥¤¤¤¢™F,*NSQPPQOOOOMNNMKID('2·ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáD[þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿQKÀðòòòóóòóõóõóòòðæIF€§©ª©§©§©¥¤¤¤¤¡ ˜F,*NPPPPPPNOONMMMJIF!'(2¶ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáF]þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿRL¿íððòðòòóóòòñïòðäHD£¦©©§¨¦¦¦¤¤¤¤¢ ˜F+%MPPPPOOONONNMMKID)*4¸ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáD^þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿPL¿ìðïïòòððòòðïðòïäIC€¢¦¨¨¥¦¥§¥¤££¡ŸŸ—F+&NPOOOPPOONMMKMKHD**6ºÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáD_þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿQJ¾ëïïïòðððððïðïîïãFC~¢§¥¥¦¦¦¤££¤¡   ˜F,'MPOOOOONONNKKIIIG,+7»ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáD^ýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿQI¾êîîîïðïðïððïïïîâEB|£¥¤££¤¤¤£¤¢Ÿ ŸŸ—E+&MONOOONNNNKMJKJHH,-8¹ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿàD]þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿPI¼éíìîîîîñðòóóöù÷èHE¥¨§¥¥¤¤£¡£¡  žŸ—C,#LOOOONONNNKKKMKJF,*6»ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáCaýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþMH»éììíîðððôóõöööõçIF‚©ª§¦¦¥££¢ Ÿžž Ÿ–D*%KONOMNMMKMKJJJIJE,,6»ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿâB^ýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþMG¹èììîðòóòóóóóòóôéHB}£©¦¦§¥¤¤¢ŸŸžžš”D+&LONOOONNMMMMKLKIA,,6ºÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿàA\ýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþMF¹éìííïòóôððôöõööêIE¦ª©¦§¨§§¡¡Ÿš™”E+&LNNMONNMMKKKKKIHF --6¹ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿßA[üÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþKF¶çìðïððïðóöõöõùúîJC©«­«¦§¦¥¤¡¤žžš—F*&LMONMNMNKKJMKJJIF **5»ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿß>WüÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþKE¶èïíðîðóöõøòùóöôçF?}¨©²¯¬¬©¥¤¤£žœ˜˜‘C*%KONNNJKKKMKJKJKID,*4¶ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿØ<WöþÿÿÿÿÿÿÿÿÿÿÿÿÿÿøMA°áäååçêêïêëëåæéçÝGCxž¨¦ ©¥¤ šœ¡˜•’ŠB)%HKLKKJJJKIHIHHFGC!()*q ¡š›šš™““’‘‘’‹‹o39v|}wwwwwwrqtuspn=9^gadcfgce`dbUY[\^>;DIJDB?FEGE=7>8634.(&&(%&*&%%'+*)+*#%()''03364443233222243/-+133423333423766645789:><<<;<;<?=?;<<:78673/001113--.-+*)&&#"&$#%&""$!! ))+rbPpAD9-*******+*++)++--.//./.0/21453469:=;98<;<>=;><7766666741012.-13/-+-/(''&&&%%&$.%0()-%-#-#' #&(% )))hn›YQgÛ7(*))))*)**,--....../0/0001357666::;;>?>AA866666666656565300/20/.-*)(('((&&%)d=yoP¼<Ñ?ßFQFx;§2»1«0))*RQ.0*,,5*(*))))*,**,+/.../...02/22224456468;:>BB;>;:76666666666755303033/,.-*(())('&')#)"##(+$+*#)) & 
diff --git a/utils/raspberrypi/ctt/ctt_tools.py b/utils/raspberrypi/ctt/ctt_tools.py
new file mode 100644
index 00000000..50b01ecf
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_tools.py
@@ -0,0 +1,150 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# camera tuning tool miscellaneous
+
+import time
+import re
+import binascii
+import os
+import cv2
+import numpy as np
+import imutils
+import sys
+import matplotlib.pyplot as plt
+from sklearn import cluster as cluster
+from sklearn.neighbors import NearestCentroid as get_centroids
+
+"""
+This file contains some useful tools, the details of which aren't important to
+understanding of the code. They ar collated here to attempt to improve code
+readability in the main files.
+"""
+
+
+"""
+obtain config values, unless it doesnt exist, in which case pick default
+Furthermore, it can check if the input is the correct type
+"""
+def get_config(dictt, key, default, ttype):
+ try:
+ val = dictt[key]
+ if ttype == 'string':
+ val = str(val)
+ elif ttype == 'num':
+ if 'int' not in str(type(val)):
+ if 'float' not in str(type(val)):
+ raise ValueError
+ elif ttype == 'dict':
+ if not isinstance(val, dict):
+ raise ValueError
+ elif ttype == 'list':
+ if not isinstance(val, list):
+ raise ValueError
+ elif ttype == 'bool':
+ ttype = int(bool(ttype))
+ else:
+ val = dictt[key]
+ except (KeyError, ValueError):
+ val = default
+ return val
+
+
+"""
+argument parser
+"""
+def parse_input():
+ arguments = sys.argv[1:]
+ if len(arguments) % 2 != 0:
+ raise ArgError('\n\nERROR! Enter value for each arguent passed.')
+ params = arguments[0::2]
+ vals = arguments[1::2]
+ args_dict = dict(zip(params, vals))
+ json_output = get_config(args_dict, '-o', None, 'string')
+ directory = get_config(args_dict, '-i', None, 'string')
+ config = get_config(args_dict, '-c', None, 'string')
+ log_path = get_config(args_dict, '-l', None, 'string')
+ target = get_config(args_dict, '-t', "vc4", 'string')
+ if directory is None:
+ raise ArgError('\n\nERROR! No input directory given.')
+ if json_output is None:
+ raise ArgError('\n\nERROR! No output json given.')
+ return json_output, directory, config, log_path, target
+
+
+"""
+custom arg and macbeth error class
+"""
+class ArgError(Exception):
+ pass
+class MacbethError(Exception):
+ pass
+
+
+"""
+correlation function to quantify match
+"""
+def correlate(im1, im2):
+ f1 = im1.flatten()
+ f2 = im2.flatten()
+ cor = np.corrcoef(f1, f2)
+ return cor[0][1]
+
+
+"""
+get list of files from directory
+"""
+def get_photos(directory='photos'):
+ filename_list = []
+ for filename in os.listdir(directory):
+ if 'jp' in filename or '.dng' in filename:
+ filename_list.append(filename)
+ return filename_list
+
+
+"""
+display image for debugging... read at your own risk...
+"""
+def represent(img, name='image'):
+ # if type(img) == tuple or type(img) == list:
+ # for i in range(len(img)):
+ # name = 'image {}'.format(i)
+ # cv2.imshow(name, img[i])
+ # else:
+ # cv2.imshow(name, img)
+ # cv2.waitKey(0)
+ # cv2.destroyAllWindows()
+ # return 0
+ """
+ code above displays using opencv, but this doesn't catch users pressing 'x'
+ with their mouse to close the window.... therefore matplotlib is used....
+ (thanks a lot opencv)
+ """
+ grid = plt.GridSpec(22, 1)
+ plt.subplot(grid[:19, 0])
+ plt.imshow(img, cmap='gray')
+ plt.axis('off')
+ plt.subplot(grid[21, 0])
+ plt.title('press \'q\' to continue')
+ plt.axis('off')
+ plt.show()
+
+ # f = plt.figure()
+ # ax = f.add_subplot(211)
+ # ax2 = f.add_subplot(122)
+ # ax.imshow(img, cmap='gray')
+ # ax.axis('off')
+ # ax2.set_figheight(2)
+ # ax2.title('press \'q\' to continue')
+ # ax2.axis('off')
+ # plt.show()
+
+
+"""
+reshape image to fixed width without distorting
+returns image and scale factor
+"""
+def reshape(img, width):
+ factor = width/img.shape[0]
+ return cv2.resize(img, None, fx=factor, fy=factor), factor
diff --git a/utils/raspberrypi/ctt/ctt_vc4.py b/utils/raspberrypi/ctt/ctt_vc4.py
new file mode 100755
index 00000000..7154e110
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_vc4.py
@@ -0,0 +1,126 @@
+#!/usr/bin/env python3
+#
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# ctt_vc4.py - camera tuning tool data for VC4 platforms
+
+
+json_template = {
+ "rpi.black_level": {
+ "black_level": 4096
+ },
+ "rpi.dpc": {
+ },
+ "rpi.lux": {
+ "reference_shutter_speed": 10000,
+ "reference_gain": 1,
+ "reference_aperture": 1.0
+ },
+ "rpi.noise": {
+ },
+ "rpi.geq": {
+ },
+ "rpi.sdn": {
+ },
+ "rpi.awb": {
+ "priors": [
+ {"lux": 0, "prior": [2000, 1.0, 3000, 0.0, 13000, 0.0]},
+ {"lux": 800, "prior": [2000, 0.0, 6000, 2.0, 13000, 2.0]},
+ {"lux": 1500, "prior": [2000, 0.0, 4000, 1.0, 6000, 6.0, 6500, 7.0, 7000, 1.0, 13000, 1.0]}
+ ],
+ "modes": {
+ "auto": {"lo": 2500, "hi": 8000},
+ "incandescent": {"lo": 2500, "hi": 3000},
+ "tungsten": {"lo": 3000, "hi": 3500},
+ "fluorescent": {"lo": 4000, "hi": 4700},
+ "indoor": {"lo": 3000, "hi": 5000},
+ "daylight": {"lo": 5500, "hi": 6500},
+ "cloudy": {"lo": 7000, "hi": 8600}
+ },
+ "bayes": 1
+ },
+ "rpi.agc": {
+ "metering_modes": {
+ "centre-weighted": {
+ "weights": [3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0]
+ },
+ "spot": {
+ "weights": [2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
+ },
+ "matrix": {
+ "weights": [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
+ }
+ },
+ "exposure_modes": {
+ "normal": {
+ "shutter": [100, 10000, 30000, 60000, 120000],
+ "gain": [1.0, 2.0, 4.0, 6.0, 6.0]
+ },
+ "short": {
+ "shutter": [100, 5000, 10000, 20000, 120000],
+ "gain": [1.0, 2.0, 4.0, 6.0, 6.0]
+ }
+ },
+ "constraint_modes": {
+ "normal": [
+ {"bound": "LOWER", "q_lo": 0.98, "q_hi": 1.0, "y_target": [0, 0.5, 1000, 0.5]}
+ ],
+ "highlight": [
+ {"bound": "LOWER", "q_lo": 0.98, "q_hi": 1.0, "y_target": [0, 0.5, 1000, 0.5]},
+ {"bound": "UPPER", "q_lo": 0.98, "q_hi": 1.0, "y_target": [0, 0.8, 1000, 0.8]}
+ ]
+ },
+ "y_target": [0, 0.16, 1000, 0.165, 10000, 0.17]
+ },
+ "rpi.alsc": {
+ 'omega': 1.3,
+ 'n_iter': 100,
+ 'luminance_strength': 0.7,
+ },
+ "rpi.contrast": {
+ "ce_enable": 1,
+ "gamma_curve": [
+ 0, 0,
+ 1024, 5040,
+ 2048, 9338,
+ 3072, 12356,
+ 4096, 15312,
+ 5120, 18051,
+ 6144, 20790,
+ 7168, 23193,
+ 8192, 25744,
+ 9216, 27942,
+ 10240, 30035,
+ 11264, 32005,
+ 12288, 33975,
+ 13312, 35815,
+ 14336, 37600,
+ 15360, 39168,
+ 16384, 40642,
+ 18432, 43379,
+ 20480, 45749,
+ 22528, 47753,
+ 24576, 49621,
+ 26624, 51253,
+ 28672, 52698,
+ 30720, 53796,
+ 32768, 54876,
+ 36864, 57012,
+ 40960, 58656,
+ 45056, 59954,
+ 49152, 61183,
+ 53248, 62355,
+ 57344, 63419,
+ 61440, 64476,
+ 65535, 65535
+ ]
+ },
+ "rpi.ccm": {
+ },
+ "rpi.sharpen": {
+ }
+}
+
+grid_size = (16, 12)
diff --git a/utils/raspberrypi/ctt/ctt_visualise.py b/utils/raspberrypi/ctt/ctt_visualise.py
new file mode 100644
index 00000000..ed2339fd
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_visualise.py
@@ -0,0 +1,43 @@
+"""
+Some code that will save virtual macbeth charts that show the difference between optimised matrices and non optimised matrices
+
+The function creates an image that is 1550 by 1050 pixels wide, and fills it with patches which are 200x200 pixels in size
+Each patch contains the ideal color, the color from the original matrix, and the color from the final matrix
+_________________
+| |
+| Ideal Color |
+|_______________|
+| Old | new |
+| Color | Color |
+|_______|_______|
+
+Nice way of showing how the optimisation helps change the colors and the color matricies
+"""
+import numpy as np
+from PIL import Image
+
+
+def visualise_macbeth_chart(macbeth_rgb, original_rgb, new_rgb, output_filename):
+ image = np.zeros((1050, 1550, 3), dtype=np.uint8)
+ colorindex = -1
+ for y in range(6):
+ for x in range(4): # Creates 6 x 4 grid of macbeth chart
+ colorindex += 1
+ xlocation = 50 + 250 * x # Means there is 50px of black gap between each square, more like the real macbeth chart.
+ ylocation = 50 + 250 * y
+ for g in range(200):
+ for i in range(100):
+ image[xlocation + i, ylocation + g] = macbeth_rgb[colorindex]
+ xlocation = 150 + 250 * x
+ ylocation = 50 + 250 * y
+ for i in range(100):
+ for g in range(100):
+ image[xlocation + i, ylocation + g] = original_rgb[colorindex] # Smaller squares below to compare the old colors with the new ones
+ xlocation = 150 + 250 * x
+ ylocation = 150 + 250 * y
+ for i in range(100):
+ for g in range(100):
+ image[xlocation + i, ylocation + g] = new_rgb[colorindex]
+
+ img = Image.fromarray(image, 'RGB')
+ img.save(str(output_filename) + 'Generated Macbeth Chart.png')
diff --git a/utils/raspberrypi/delayedctrls_parse.py b/utils/raspberrypi/delayedctrls_parse.py
new file mode 100644
index 00000000..1decf73f
--- /dev/null
+++ b/utils/raspberrypi/delayedctrls_parse.py
@@ -0,0 +1,113 @@
+# SPDX-License-Identifier: BSD-2-Clause
+
+import re
+import sys
+import os
+
+if len(sys.argv) != 2:
+ print("Usage: {} <infile>".format(sys.argv[0]))
+ sys.exit()
+
+infile = sys.argv[1]
+insplit = os.path.splitext(infile)
+outfile = insplit[0] + '_parsed' + insplit[1]
+
+frame_re = re.compile(r'frame (\d+) started')
+
+delays = {
+ 'Analogue Gain': 1,
+ 'Exposure': 2,
+ 'Vertical Blanking': 2
+}
+
+ctrl_action = {
+ 'Write': {},
+ 'Get': {},
+ 'Queue': {},
+ 'No-op': {}
+}
+
+ctrl_re = {
+ 'Write': re.compile(r'Setting (.*?) to (\d+) at index (\d+)'),
+ 'No-op': re.compile(r'Queue is empty, (.*?) (.*?) (.*?)'),
+ 'Get': re.compile(r'Reading (.*?) to (\d+) at index (\d+)'),
+ 'Queue': re.compile(r'Queuing (.*?) to (\d+) at index (\d+)')
+}
+
+frame_num = -1
+
+max_delay = 0
+for k, d in delays.items():
+ if max_delay < d:
+ max_delay = d
+
+with open(infile) as f:
+ lines = f.readlines()
+
+for line in lines:
+ r = frame_re.search(line)
+ if r:
+ frame_num = int(r.group(1))
+
+ for (key, re) in ctrl_re.items():
+ r = re.search(line)
+ if r:
+ ctrl_action[key][(frame_num, r.group(1))] = (r.group(2), r.group(3))
+
+with open(outfile, 'wt') as f:
+ queueIndex = 1
+ f.write('{:<10}{:<15}{:<12}{:<18}{}\n'.format('Frame', 'Action', 'Gain', 'Exposure', 'Vblank'))
+ for frame in range(0, frame_num + 1):
+ for (k, a) in ctrl_action.items():
+ str = '{:<10}{:<10}'.format(frame, k)
+
+ for c in delays.keys():
+ # Tabulate all results
+ str += '{:>5} {:<10}'.format(a[(frame, c)][0] if (frame, c) in a.keys() else '---',
+ '[' + (a[(frame, c)][1] if (frame, c) in a.keys() else '-') + ']')
+
+ f.write(str.strip() + '\n')
+
+# Test the write -> get matches the set delay.
+for (frame, c) in ctrl_action['Write'].keys():
+ set_value = ctrl_action['Write'][(frame, c)][0]
+ delay_frame = frame + delays[c]
+ if (delay_frame <= frame_num):
+ if (delay_frame, c) in ctrl_action['Get']:
+ get_value = ctrl_action['Get'][(delay_frame, c)][0]
+ if get_value != set_value:
+ print('Error: {} written at frame {} to value {} != {} at frame {}'
+ .format(c, frame, set_value, get_value, delay_frame))
+ else:
+ print('Warning: {} written at frame {} to value {} did not get logged on frame {} - dropped frame?'
+ .format(c, frame, set_value, delay_frame))
+
+# Test the queue -> write matches the set delay.
+for (frame, c) in ctrl_action['Queue'].keys():
+ set_value = ctrl_action['Queue'][(frame, c)][0]
+ delay_frame = frame + max_delay - delays[c] + 1
+ if (delay_frame <= frame_num):
+ if (delay_frame, c) in ctrl_action['Write']:
+ write_value = ctrl_action['Write'][(delay_frame, c)][0]
+ if write_value != set_value:
+ print('Info: {} queued at frame {} to value {} != {} written at frame {}'
+ ' - lagging behind or double queue on a single frame!'
+ .format(c, frame, set_value, write_value, delay_frame))
+ else:
+ print('Warning: {} queued at frame {} to value {} did not get logged on frame {} - dropped frame?'
+ .format(c, frame, set_value, delay_frame))
+
+# Test the get -> write matches the set delay going backwards.
+for (frame, c) in ctrl_action['Get'].keys():
+ get_value = ctrl_action['Get'][(frame, c)][0]
+ delay_frame = frame - delays[c]
+ if (delay_frame >= 6):
+ if (delay_frame, c) in ctrl_action['Write']:
+ write_value = ctrl_action['Write'][(delay_frame, c)][0]
+ if get_value != write_value:
+ print('Info: {} got at frame {} to value {} != {} written at frame {}'
+ ' - lagging behind or double queue on a single frame!'
+ .format(c, frame, get_value, write_value, delay_frame))
+ else:
+ print('Warning: {} got at frame {} to value {} did not get written on frame {}'
+ .format(c, frame, get_value, delay_frame))
diff --git a/utils/release.sh b/utils/release.sh
new file mode 100755
index 00000000..8cc85859
--- /dev/null
+++ b/utils/release.sh
@@ -0,0 +1,46 @@
+#!/bin/sh
+
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Prepare a project release
+
+set -e
+
+# Abort if we are not within the project root or the tree is not clean.
+if [ ! -e utils/gen-version.sh ] || [ ! -e .git ]; then
+ echo "This release script must be run from the root of libcamera git tree."
+ exit 1
+fi
+
+if ! git diff-index --quiet HEAD; then
+ echo "Tree must be clean to release."
+ exit 1
+fi
+
+# Identify current version components
+version=$(./utils/gen-version.sh)
+
+# Decide if we are here to bump major, minor, or patch release.
+case $1 in
+ major|minor|patch)
+ bump=$1;
+ ;;
+ *)
+ echo "You must specify the version bump level: (major, minor, patch)"
+ exit 1
+ ;;
+esac
+
+new_version=$(./utils/semver bump "$bump" "$version")
+
+echo "Bumping $bump"
+echo " Existing version is: $version"
+echo " New version is : $new_version"
+
+# Patch in the version to our meson.build
+sed -i -E "s/ version : '.*',/ version : '$new_version',/" meson.build
+
+# Commit the update
+git commit meson.build -esm "libcamera v$new_version"
+
+# Create a tag from that commit
+git show -s --format=%B | git tag "v$new_version" -s -F -
diff --git a/utils/rkisp1/gen-csc-table.py b/utils/rkisp1/gen-csc-table.py
new file mode 100755
index 00000000..ffc0370a
--- /dev/null
+++ b/utils/rkisp1/gen-csc-table.py
@@ -0,0 +1,215 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright (C) 2022, Ideas on Board Oy
+#
+# Generate color space conversion table coefficients with configurable
+# fixed-point precision
+
+import argparse
+import enum
+import numpy as np
+import sys
+
+
+encodings = {
+ 'rec601': [
+ [ 0.299, 0.587, 0.114 ],
+ [ -0.299 / 1.772, -0.587 / 1.772, 0.886 / 1.772 ],
+ [ 0.701 / 1.402, -0.587 / 1.402, -0.114 / 1.402 ]
+ ],
+ 'rec709': [
+ [ 0.2126, 0.7152, 0.0722 ],
+ [ -0.2126 / 1.8556, -0.7152 / 1.8556, 0.9278 / 1.8556 ],
+ [ 0.7874 / 1.5748, -0.7152 / 1.5748, -0.0722 / 1.5748 ]
+ ],
+ 'rec2020': [
+ [ 0.2627, 0.6780, 0.0593 ],
+ [ -0.2627 / 1.8814, -0.6780 / 1.8814, 0.9407 / 1.8814 ],
+ [ 0.7373 / 1.4746, -0.6780 / 1.4746, -0.0593 / 1.4746 ],
+ ],
+ 'smpte240m': [
+ [ 0.2122, 0.7013, 0.0865 ],
+ [ -0.2122 / 1.8270, -0.7013 / 1.8270, 0.9135 / 1.8270 ],
+ [ 0.7878 / 1.5756, -0.7013 / 1.5756, -0.0865 / 1.5756 ],
+ ],
+}
+
+
+class Precision(object):
+ def __init__(self, precision):
+ if precision[0].upper() != 'Q':
+ raise RuntimeError(f'Invalid precision `{precision}`')
+ prec = precision[1:].split('.')
+ if len(prec) != 2:
+ raise RuntimeError(f'Invalid precision `{precision}`')
+
+ self.__prec = [int(v) for v in prec]
+
+ @property
+ def integer(self):
+ return self.__prec[0]
+
+ @property
+ def fractional(self):
+ return self.__prec[1]
+
+ @property
+ def total(self):
+ # Add 1 for the sign bit
+ return self.__prec[0] + self.__prec[1] + 1
+
+
+class Quantization(enum.Enum):
+ FULL = 0
+ LIMITED = 1
+
+
+def scale_coeff(coeff, quantization, luma):
+ """Scale a coefficient to the output range dictated by the quantization.
+
+ Parameters
+ ----------
+ coeff : float
+ The CSC matrix coefficient to scale
+ quantization : Quantization
+ The quantization, either FULL or LIMITED
+ luma : bool
+ True if the coefficient corresponds to a luma value, False otherwise
+ """
+
+ # Assume the input range is 8 bits. The output range is set by the
+ # quantization and differs between luma and chrome components for limited
+ # range.
+ in_range = 255 - 0
+ if quantization == Quantization.FULL:
+ out_range = 255 - 0
+ elif luma:
+ out_range = 235 - 16
+ else:
+ out_range = 240 - 16
+
+ return coeff * out_range / in_range
+
+
+def round_array(values):
+ """Round a list of signed floating point values to the closest integer while
+ preserving the (rounded) value of the sum of all elements.
+ """
+
+ # Calculate the rounding error as the difference between the rounded sum of
+ # values and the sum of rounded values. This is by definition an integer
+ # (positive or negative), which indicates how many values will need to be
+ # 'flipped' to the opposite rounding.
+ rounded_values = [round(value) for value in values]
+ sum_values = round(sum(values))
+ sum_error = sum_values - sum(rounded_values)
+
+ if sum_error == 0:
+ return rounded_values
+
+ # The next step is to distribute the error among the values, in a way that
+ # will minimize the relative error introduced in individual values. We
+ # extend the values list with the rounded value and original index for each
+ # element, and sort by rounding error. Then we modify the elements with the
+ # highest or lowest error, depending on whether the sum error is negative
+ # or positive.
+
+ values = [[value, round(value), index] for index, value in enumerate(values)]
+ values.sort(key=lambda v: v[1] - v[0])
+
+ # It could also be argued that the key for the sort order should not be the
+ # absolute rouding error but the relative error, as the impact of identical
+ # rounding errors will differ for coefficients with widely different values.
+ # This is a topic for further research.
+ #
+ # values.sort(key=lambda v: (v[1] - v[0]) / abs(v[0]))
+
+ if sum_error > 0:
+ for i in range(sum_error):
+ values[i][1] += 1
+ else:
+ for i in range(-sum_error):
+ values[len(values) - i - 1][1] -= 1
+
+ # Finally, sort back by index, make sure the total rounding error is now 0,
+ # and return the rounded values.
+ values.sort(key=lambda v: v[2])
+ values = [value[1] for value in values]
+ assert(sum(values) == sum_values)
+
+ return values
+
+
+def main(argv):
+
+ # Parse command line arguments.
+ parser = argparse.ArgumentParser(
+ description='Generate color space conversion table coefficients with '
+ 'configurable fixed-point precision.'
+ )
+ parser.add_argument('--invert', '-i', action='store_true',
+ help='Invert the color space conversion (YUV -> RGB)')
+ parser.add_argument('--precision', '-p', default='Q1.7',
+ help='The output fixed point precision in Q notation (sign bit excluded)')
+ parser.add_argument('--quantization', '-q', choices=['full', 'limited'],
+ default='limited', help='Quantization range')
+ parser.add_argument('encoding', choices=encodings.keys(), help='YCbCr encoding')
+ args = parser.parse_args(argv[1:])
+
+ try:
+ precision = Precision(args.precision)
+ except Exception:
+ print(f'Invalid precision `{args.precision}`')
+ return 1
+
+ encoding = encodings[args.encoding]
+ quantization = Quantization[args.quantization.upper()]
+
+ # Scale and round the encoding coefficients based on the precision and
+ # quantization range.
+ luma = True
+ scaled_coeffs = []
+ for line in encoding:
+ line = [scale_coeff(coeff, quantization, luma) for coeff in line]
+ scaled_coeffs.append(line)
+ luma = False
+
+ if args.invert:
+ scaled_coeffs = np.linalg.inv(scaled_coeffs)
+
+ rounded_coeffs = []
+ for line in scaled_coeffs:
+ line = [coeff * (1 << precision.fractional) for coeff in line]
+ # For the RGB to YUV conversion, use a rounding method that preserves
+ # the rounded sum of each line to avoid biases and overflow, as the sum
+ # of luma and chroma coefficients should be 1.0 and 0.0 respectively
+ # (in full range). For the YUV to RGB conversion, there is no such
+ # constraint, so use simple rounding.
+ if args.invert:
+ line = [round(coeff) for coeff in line]
+ else:
+ line = round_array(line)
+
+ # Convert coefficients to the number of bits selected by the precision.
+ # Negative values will be turned into positive integers using 2's
+ # complement.
+ line = [coeff & ((1 << precision.total) - 1) for coeff in line]
+ rounded_coeffs.append(line)
+
+ # Print the result as C code.
+ nbits = 1 << (precision.total - 1).bit_length()
+ nbytes = nbits // 4
+ print(f'static const u{nbits} {"yuv2rgb" if args.invert else "rgb2yuv"}_{args.encoding}_{quantization.name.lower()}_coeffs[] = {{')
+
+ for line in rounded_coeffs:
+ line = [f'0x{coeff:0{nbytes}x}' for coeff in line]
+
+ print(f'\t{", ".join(line)},')
+
+ print('};')
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/utils/rkisp1/rkisp1-capture.sh b/utils/rkisp1/rkisp1-capture.sh
index 4d09f5d5..d767e31d 100755
--- a/utils/rkisp1/rkisp1-capture.sh
+++ b/utils/rkisp1/rkisp1-capture.sh
@@ -4,8 +4,7 @@
#
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
#
-# rkisp-capture.sh - Capture processed frames from cameras based on the
-# Rockchip ISP1
+# Capture processed frames from cameras based on the Rockchip ISP1
#
# The scripts makes use of the following tools, which are expected to be
# executable from the system-wide path or from the local directory:
@@ -14,6 +13,37 @@
# - raw2rgbpnm (from git://git.retiisi.org.uk/~sailus/raw2rgbpnm.git)
# - yavta (from git://git.ideasonboard.org/yavta.git)
+# Return the entity connected to a given pad
+# $1: The pad, expressed as "entity":index
+mc_remote_entity() {
+ local entity="${1%:*}"
+ local pad="${1#*:}"
+
+ ${mediactl} -p | awk '
+/^- entity / {
+ in_entity=0
+}
+
+/^- entity [0-9]+: '"${entity}"' / {
+ in_entity=1
+}
+
+/^[ \t]+pad/ {
+ in_pad=0
+}
+
+/^[ \t]+pad'"${pad}"': / {
+ in_pad=1
+}
+
+/^[ \t]+(<-|->) "[^"]+"/ {
+ if (in_entity && in_pad) {
+ print gensub(/^[^"]+"([^"]+)":([0-9]+).*$/, "\\1", "g")
+ exit
+ }
+}'
+}
+
# Locate the sensor entity
find_sensor() {
local bus
@@ -28,6 +58,17 @@ find_sensor() {
echo "$sensor_name $bus"
}
+# Locate the CSI-2 receiver
+find_csi2_rx() {
+ local sensor_name=$1
+ local csi2_rx
+
+ csi2_rx=$(mc_remote_entity "$sensor_name:0")
+ if [ "$csi2_rx" != rkisp1_isp ] ; then
+ echo "$csi2_rx"
+ fi
+}
+
# Locate the media device
find_media_device() {
local mdev
@@ -51,7 +92,7 @@ get_sensor_format() {
local format
local sensor=$1
- format=$($mediactl --get-v4l2 "'$sensor':0" | sed 's/\[\([^ ]*\).*/\1/')
+ format=$($mediactl --get-v4l2 "'$sensor':0" | grep 'fmt:' | sed 's/.*\(fmt:\S*\).*/\1/')
sensor_mbus_code=$(echo $format | sed 's/fmt:\([A-Z0-9_]*\).*/\1/')
sensor_size=$(echo $format | sed 's/[^\/]*\/\([0-9x]*\).*/\1/')
@@ -63,15 +104,27 @@ configure_pipeline() {
local format="fmt:$sensor_mbus_code/$sensor_size"
local capture_mbus_code=$1
local capture_size=$2
+ local csi2_rx
echo "Configuring pipeline for $sensor in $format"
+ csi2_rx=$(find_csi2_rx "$sensor")
+
$mediactl -r
- $mediactl -l "'$sensor':0 -> 'rkisp1_isp':0 [1]"
+ if [ -n "$csi2_rx" ] ; then
+ $mediactl -l "'$sensor':0 -> '$csi2_rx':0 [1]"
+ $mediactl -l "'$csi2_rx':1 -> 'rkisp1_isp':0 [1]"
+ else
+ $mediactl -l "'$sensor':0 -> 'rkisp1_isp':0 [1]"
+ fi
$mediactl -l "'rkisp1_isp':2 -> 'rkisp1_resizer_mainpath':0 [1]"
$mediactl -V "\"$sensor\":0 [$format]"
+ if [ -n "$csi2_rx" ] ; then
+ $mediactl -V "'$csi2_rx':0 [$format]"
+ $mediactl -V "'$csi2_rx':1 [$format]"
+ fi
$mediactl -V "'rkisp1_isp':0 [$format crop:(0,0)/$sensor_size]"
$mediactl -V "'rkisp1_isp':2 [fmt:$capture_mbus_code/$sensor_size crop:(0,0)/$sensor_size]"
$mediactl -V "'rkisp1_resizer_mainpath':0 [fmt:$capture_mbus_code/$sensor_size crop:(0,0)/$sensor_size]"
@@ -88,6 +141,7 @@ capture_frames() {
if [[ $save_file -eq 1 ]]; then
file_op="--file=/tmp/frame-#.bin"
+ rm -f /tmp/frame-*.bin
fi
yavta -c$frame_count -n5 -I -f $capture_format -s $capture_size \
@@ -170,7 +224,7 @@ mediactl="media-ctl -d $mdev"
get_sensor_format "$sensor"
if [[ $raw == true ]] ; then
- capture_format=$(echo $sensor_mbus_code | sed 's/_[0-9X]$//')
+ capture_format=$(echo $sensor_mbus_code | sed 's/_[0-9X]*$//')
capture_mbus_code=$sensor_mbus_code
else
capture_format=YUYV
diff --git a/utils/run-dist.sh b/utils/run-dist.sh
new file mode 100644
index 00000000..e89c3733
--- /dev/null
+++ b/utils/run-dist.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# On a meson dist run, generate the version string and store it in a file.
+# This will later be picked up by the utils/gen-version.sh script and used
+# instead of re-generating it. This way, if we are not building in the upstream
+# git source tree, the upstream version information will be preserved.
+
+cd "$MESON_SOURCE_ROOT" || return
+./utils/gen-version.sh > "$MESON_DIST_ROOT"/.tarball-version
diff --git a/utils/semver b/utils/semver
new file mode 100755
index 00000000..a1604250
--- /dev/null
+++ b/utils/semver
@@ -0,0 +1,446 @@
+#!/usr/bin/env bash
+# SPDX-License-Identifier: Apache-2.0
+
+set -o errexit -o nounset -o pipefail
+
+NAT='0|[1-9][0-9]*'
+ALPHANUM='[0-9]*[A-Za-z-][0-9A-Za-z-]*'
+IDENT="$NAT|$ALPHANUM"
+FIELD='[0-9A-Za-z-]+'
+
+SEMVER_REGEX="\
+^[vV]?\
+($NAT)\\.($NAT)\\.($NAT)\
+(\\-(${IDENT})(\\.(${IDENT}))*)?\
+(\\+${FIELD}(\\.${FIELD})*)?$"
+
+PROG=semver
+PROG_VERSION="3.4.0"
+
+USAGE="\
+Usage:
+ $PROG bump major <version>
+ $PROG bump minor <version>
+ $PROG bump patch <version>
+ $PROG bump prerel|prerelease [<prerel>] <version>
+ $PROG bump build <build> <version>
+ $PROG bump release <version>
+ $PROG get major <version>
+ $PROG get minor <version>
+ $PROG get patch <version>
+ $PROG get prerel|prerelease <version>
+ $PROG get build <version>
+ $PROG get release <version>
+ $PROG compare <version> <other_version>
+ $PROG diff <version> <other_version>
+ $PROG validate <version>
+ $PROG --help
+ $PROG --version
+
+Arguments:
+ <version> A version must match the following regular expression:
+ \"${SEMVER_REGEX}\"
+ In English:
+ -- The version must match X.Y.Z[-PRERELEASE][+BUILD]
+ where X, Y and Z are non-negative integers.
+ -- PRERELEASE is a dot separated sequence of non-negative integers and/or
+ identifiers composed of alphanumeric characters and hyphens (with
+ at least one non-digit). Numeric identifiers must not have leading
+ zeros. A hyphen (\"-\") introduces this optional part.
+ -- BUILD is a dot separated sequence of identifiers composed of alphanumeric
+ characters and hyphens. A plus (\"+\") introduces this optional part.
+
+ <other_version> See <version> definition.
+
+ <prerel> A string as defined by PRERELEASE above. Or, it can be a PRERELEASE
+ prototype string followed by a dot.
+
+ <build> A string as defined by BUILD above.
+
+Options:
+ -v, --version Print the version of this tool.
+ -h, --help Print this help message.
+
+Commands:
+ bump Bump by one of major, minor, patch; zeroing or removing
+ subsequent parts. \"bump prerel\" (or its synonym \"bump prerelease\")
+ sets the PRERELEASE part and removes any BUILD part. A trailing dot
+ in the <prerel> argument introduces an incrementing numeric field
+ which is added or bumped. If no <prerel> argument is provided, an
+ incrementing numeric field is introduced/bumped. \"bump build\" sets
+ the BUILD part. \"bump release\" removes any PRERELEASE or BUILD parts.
+ The bumped version is written to stdout.
+
+ get Extract given part of <version>, where part is one of major, minor,
+ patch, prerel (alternatively: prerelease), build, or release.
+
+ compare Compare <version> with <other_version>, output to stdout the
+ following values: -1 if <other_version> is newer, 0 if equal, 1 if
+ older. The BUILD part is not used in comparisons.
+
+ diff Compare <version> with <other_version>, output to stdout the
+ difference between two versions by the release type (MAJOR, MINOR,
+ PATCH, PRERELEASE, BUILD).
+
+ validate Validate if <version> follows the SEMVER pattern (see <version>
+ definition). Print 'valid' to stdout if the version is valid, otherwise
+ print 'invalid'.
+
+See also:
+ https://semver.org -- Semantic Versioning 2.0.0"
+
+function error {
+ echo -e "$1" >&2
+ exit 1
+}
+
+function usage_help {
+ error "$USAGE"
+}
+
+function usage_version {
+ echo -e "${PROG}: $PROG_VERSION"
+ exit 0
+}
+
+# normalize the "part" keywords to a canonical string. At present,
+# only "prerelease" is normalized to "prerel".
+
+function normalize_part {
+ if [ "$1" == "prerelease" ]
+ then
+ echo "prerel"
+ else
+ echo "$1"
+ fi
+}
+
+function validate_version {
+ local version=$1
+ if [[ "$version" =~ $SEMVER_REGEX ]]; then
+ # if a second argument is passed, store the result in var named by $2
+ if [ "$#" -eq "2" ]; then
+ local major=${BASH_REMATCH[1]}
+ local minor=${BASH_REMATCH[2]}
+ local patch=${BASH_REMATCH[3]}
+ local prere=${BASH_REMATCH[4]}
+ local build=${BASH_REMATCH[8]}
+ eval "$2=(\"$major\" \"$minor\" \"$patch\" \"$prere\" \"$build\")"
+ else
+ echo "$version"
+ fi
+ else
+ error "version $version does not match the semver scheme 'X.Y.Z(-PRERELEASE)(+BUILD)'. See help for more information."
+ fi
+}
+
+function is_nat {
+ [[ "$1" =~ ^($NAT)$ ]]
+}
+
+function is_null {
+ [ -z "$1" ]
+}
+
+function order_nat {
+ [ "$1" -lt "$2" ] && { echo -1 ; return ; }
+ [ "$1" -gt "$2" ] && { echo 1 ; return ; }
+ echo 0
+}
+
+function order_string {
+ [[ $1 < $2 ]] && { echo -1 ; return ; }
+ [[ $1 > $2 ]] && { echo 1 ; return ; }
+ echo 0
+}
+
+# given two (named) arrays containing NAT and/or ALPHANUM fields, compare them
+# one by one according to semver 2.0.0 spec. Return -1, 0, 1 if left array ($1)
+# is less-than, equal, or greater-than the right array ($2). The longer array
+# is considered greater-than the shorter if the shorter is a prefix of the longer.
+#
+function compare_fields {
+ local l="$1[@]"
+ local r="$2[@]"
+ local leftfield=( "${!l}" )
+ local rightfield=( "${!r}" )
+ local left
+ local right
+
+ local i=$(( -1 ))
+ local order=$(( 0 ))
+
+ while true
+ do
+ [ $order -ne 0 ] && { echo $order ; return ; }
+
+ : $(( i++ ))
+ left="${leftfield[$i]}"
+ right="${rightfield[$i]}"
+
+ is_null "$left" && is_null "$right" && { echo 0 ; return ; }
+ is_null "$left" && { echo -1 ; return ; }
+ is_null "$right" && { echo 1 ; return ; }
+
+ is_nat "$left" && is_nat "$right" && { order=$(order_nat "$left" "$right") ; continue ; }
+ is_nat "$left" && { echo -1 ; return ; }
+ is_nat "$right" && { echo 1 ; return ; }
+ { order=$(order_string "$left" "$right") ; continue ; }
+ done
+}
+
+# shellcheck disable=SC2206 # checked by "validate"; ok to expand prerel id's into array
+function compare_version {
+ local order
+ validate_version "$1" V
+ validate_version "$2" V_
+
+ # compare major, minor, patch
+
+ local left=( "${V[0]}" "${V[1]}" "${V[2]}" )
+ local right=( "${V_[0]}" "${V_[1]}" "${V_[2]}" )
+
+ order=$(compare_fields left right)
+ [ "$order" -ne 0 ] && { echo "$order" ; return ; }
+
+ # compare pre-release ids when M.m.p are equal
+
+ local prerel="${V[3]:1}"
+ local prerel_="${V_[3]:1}"
+ local left=( ${prerel//./ } )
+ local right=( ${prerel_//./ } )
+
+ # if left and right have no pre-release part, then left equals right
+ # if only one of left/right has pre-release part, that one is less than simple M.m.p
+
+ [ -z "$prerel" ] && [ -z "$prerel_" ] && { echo 0 ; return ; }
+ [ -z "$prerel" ] && { echo 1 ; return ; }
+ [ -z "$prerel_" ] && { echo -1 ; return ; }
+
+ # otherwise, compare the pre-release id's
+
+ compare_fields left right
+}
+
+# render_prerel -- return a prerel field with a trailing numeric string
+# usage: render_prerel numeric [prefix-string]
+#
+function render_prerel {
+ if [ -z "$2" ]
+ then
+ echo "${1}"
+ else
+ echo "${2}${1}"
+ fi
+}
+
+# extract_prerel -- extract prefix and trailing numeric portions of a pre-release part
+# usage: extract_prerel prerel prerel_parts
+# The prefix and trailing numeric parts are returned in "prerel_parts".
+#
+PREFIX_ALPHANUM='[.0-9A-Za-z-]*[.A-Za-z-]'
+DIGITS='[0-9][0-9]*'
+EXTRACT_REGEX="^(${PREFIX_ALPHANUM})*(${DIGITS})$"
+
+function extract_prerel {
+ local prefix; local numeric;
+
+ if [[ "$1" =~ $EXTRACT_REGEX ]]
+ then # found prefix and trailing numeric parts
+ prefix="${BASH_REMATCH[1]}"
+ numeric="${BASH_REMATCH[2]}"
+ else # no numeric part
+ prefix="${1}"
+ numeric=
+ fi
+
+ eval "$2=(\"$prefix\" \"$numeric\")"
+}
+
+# bump_prerel -- return the new pre-release part based on previous pre-release part
+# and prototype for bump
+# usage: bump_prerel proto previous
+#
+function bump_prerel {
+ local proto; local prev_prefix; local prev_numeric;
+
+ # case one: no trailing dot in prototype => simply replace previous with proto
+ if [[ ! ( "$1" =~ \.$ ) ]]
+ then
+ echo "$1"
+ return
+ fi
+
+ proto="${1%.}" # discard trailing dot marker from prototype
+
+ extract_prerel "${2#-}" prerel_parts # extract parts of previous pre-release
+# shellcheck disable=SC2154
+ prev_prefix="${prerel_parts[0]}"
+ prev_numeric="${prerel_parts[1]}"
+
+ # case two: bump or append numeric to previous pre-release part
+ if [ "$proto" == "+" ] # dummy "+" indicates no prototype argument provided
+ then
+ if [ -n "$prev_numeric" ]
+ then
+ : $(( ++prev_numeric )) # previous pre-release is already numbered, bump it
+ render_prerel "$prev_numeric" "$prev_prefix"
+ else
+ render_prerel 1 "$prev_prefix" # append starting number
+ fi
+ return
+ fi
+
+ # case three: set, bump, or append using prototype prefix
+ if [ "$prev_prefix" != "$proto" ]
+ then
+ render_prerel 1 "$proto" # proto not same pre-release; set and start at '1'
+ elif [ -n "$prev_numeric" ]
+ then
+ : $(( ++prev_numeric )) # pre-release is numbered; bump it
+ render_prerel "$prev_numeric" "$prev_prefix"
+ else
+ render_prerel 1 "$prev_prefix" # start pre-release at number '1'
+ fi
+}
+
+function command_bump {
+ local new; local version; local sub_version; local command;
+
+ command="$(normalize_part "$1")"
+
+ case $# in
+ 2) case "$command" in
+ major|minor|patch|prerel|release) sub_version="+."; version=$2;;
+ *) usage_help;;
+ esac ;;
+ 3) case "$command" in
+ prerel|build) sub_version=$2 version=$3 ;;
+ *) usage_help;;
+ esac ;;
+ *) usage_help;;
+ esac
+
+ validate_version "$version" parts
+ # shellcheck disable=SC2154
+ local major="${parts[0]}"
+ local minor="${parts[1]}"
+ local patch="${parts[2]}"
+ local prere="${parts[3]}"
+ local build="${parts[4]}"
+
+ case "$command" in
+ major) new="$((major + 1)).0.0";;
+ minor) new="${major}.$((minor + 1)).0";;
+ patch) new="${major}.${minor}.$((patch + 1))";;
+ release) new="${major}.${minor}.${patch}";;
+ prerel) new=$(validate_version "${major}.${minor}.${patch}-$(bump_prerel "$sub_version" "$prere")");;
+ build) new=$(validate_version "${major}.${minor}.${patch}${prere}+${sub_version}");;
+ *) usage_help ;;
+ esac
+
+ echo "$new"
+ exit 0
+}
+
+function command_compare {
+ local v; local v_;
+
+ case $# in
+ 2) v=$(validate_version "$1"); v_=$(validate_version "$2") ;;
+ *) usage_help ;;
+ esac
+
+ set +u # need unset array element to evaluate to null
+ compare_version "$v" "$v_"
+ exit 0
+}
+
+function command_diff {
+ validate_version "$1" v1_parts
+ # shellcheck disable=SC2154
+ local v1_major="${v1_parts[0]}"
+ local v1_minor="${v1_parts[1]}"
+ local v1_patch="${v1_parts[2]}"
+ local v1_prere="${v1_parts[3]}"
+ local v1_build="${v1_parts[4]}"
+
+ validate_version "$2" v2_parts
+ # shellcheck disable=SC2154
+ local v2_major="${v2_parts[0]}"
+ local v2_minor="${v2_parts[1]}"
+ local v2_patch="${v2_parts[2]}"
+ local v2_prere="${v2_parts[3]}"
+ local v2_build="${v2_parts[4]}"
+
+ if [ "${v1_major}" != "${v2_major}" ]; then
+ echo "major"
+ elif [ "${v1_minor}" != "${v2_minor}" ]; then
+ echo "minor"
+ elif [ "${v1_patch}" != "${v2_patch}" ]; then
+ echo "patch"
+ elif [ "${v1_prere}" != "${v2_prere}" ]; then
+ echo "prerelease"
+ elif [ "${v1_build}" != "${v2_build}" ]; then
+ echo "build"
+ fi
+}
+
+# shellcheck disable=SC2034
+function command_get {
+ local part version
+
+ if [[ "$#" -ne "2" ]] || [[ -z "$1" ]] || [[ -z "$2" ]]; then
+ usage_help
+ exit 0
+ fi
+
+ part="$1"
+ version="$2"
+
+ validate_version "$version" parts
+ local major="${parts[0]}"
+ local minor="${parts[1]}"
+ local patch="${parts[2]}"
+ local prerel="${parts[3]:1}"
+ local build="${parts[4]:1}"
+ local release="${major}.${minor}.${patch}"
+
+ part="$(normalize_part "$part")"
+
+ case "$part" in
+ major|minor|patch|release|prerel|build) echo "${!part}" ;;
+ *) usage_help ;;
+ esac
+
+ exit 0
+}
+
+function command_validate {
+ if [[ "$#" -ne "1" ]]; then
+ usage_help
+ fi
+
+ if [[ "$1" =~ $SEMVER_REGEX ]]; then
+ echo "valid"
+ else
+ echo "invalid"
+ fi
+
+ exit 0
+}
+
+case $# in
+ 0) echo "Unknown command: $*"; usage_help;;
+esac
+
+case $1 in
+ --help|-h) echo -e "$USAGE"; exit 0;;
+ --version|-v) usage_version ;;
+ bump) shift; command_bump "$@";;
+ get) shift; command_get "$@";;
+ compare) shift; command_compare "$@";;
+ diff) shift; command_diff "$@";;
+ validate) shift; command_validate "$@";;
+ *) echo "Unknown arguments: $*"; usage_help;;
+esac
diff --git a/utils/tracepoints/analyze-ipa-trace.py b/utils/tracepoints/analyze-ipa-trace.py
new file mode 100755
index 00000000..92e8a235
--- /dev/null
+++ b/utils/tracepoints/analyze-ipa-trace.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright (C) 2020, Google Inc.
+#
+# Author: Paul Elder <paul.elder@ideasonboard.com>
+#
+# Example of how to extract information from libcamera lttng traces
+
+import argparse
+import bt2
+import statistics as stats
+import sys
+
+# pipeline -> {function -> stack(timestamps)}
+timestamps = {}
+
+# pipeline:function -> samples[]
+samples = {}
+
+def main(argv):
+ parser = argparse.ArgumentParser(
+ description='A simple analysis script to get statistics on time taken for IPA calls')
+ parser.add_argument('-p', '--pipeline', type=str,
+ help='Name of pipeline to filter for')
+ parser.add_argument('trace_path', type=str,
+ help='Path to lttng trace (eg. ~/lttng-traces/demo-20201029-184003)')
+ args = parser.parse_args(argv[1:])
+
+ traces = bt2.TraceCollectionMessageIterator(args.trace_path)
+ for msg in traces:
+ if type(msg) is not bt2._EventMessageConst or \
+ 'pipeline_name' not in msg.event.payload_field or \
+ (args.pipeline is not None and \
+ msg.event.payload_field['pipeline_name'] != args.pipeline):
+ continue
+
+ pipeline = msg.event.payload_field['pipeline_name']
+ event = msg.event.name
+ func = msg.event.payload_field['function_name']
+ timestamp_ns = msg.default_clock_snapshot.ns_from_origin
+
+ if event == 'libcamera:ipa_call_begin':
+ if pipeline not in timestamps:
+ timestamps[pipeline] = {}
+ if func not in timestamps[pipeline]:
+ timestamps[pipeline][func] = []
+ timestamps[pipeline][func].append(timestamp_ns)
+
+ if event == 'libcamera:ipa_call_end':
+ ts = timestamps[pipeline][func].pop()
+ key = f'{pipeline}:{func}'
+ if key not in samples:
+ samples[key] = []
+ samples[key].append(timestamp_ns - ts)
+
+ # Compute stats
+ rows = []
+ rows.append(['pipeline:function', 'min', 'max', 'mean', 'stddev'])
+ for k, v in samples.items():
+ mean = int(stats.mean(v))
+ stddev = int(stats.stdev(v))
+ minv = min(v)
+ maxv = max(v)
+ rows.append([k, str(minv), str(maxv), str(mean), str(stddev)])
+
+ # Get maximum string width for every column
+ widths = []
+ for i in range(len(rows[0])):
+ widths.append(max([len(row[i]) for row in rows]))
+
+ # Print stats table
+ for row in rows:
+ fmt = [row[i].rjust(widths[i]) for i in range(1, 5)]
+ print('{} {} {} {} {}'.format(row[0].ljust(widths[0]), *fmt))
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/utils/tuning/README.rst b/utils/tuning/README.rst
new file mode 100644
index 00000000..89a1d61e
--- /dev/null
+++ b/utils/tuning/README.rst
@@ -0,0 +1,20 @@
+.. SPDX-License-Identifier: CC-BY-SA-4.0
+
+libcamera tuning tools
+======================
+
+.. Note:: The tuning tools are still very much work in progress. If in doubt,
+ please ask on the mailing list.
+
+.. todo::
+ Write documentation
+
+Installation of dependencies
+----------------------------
+
+::
+ # Using a venv
+ python3 -m venv venv
+ . ./venv/bin/activate
+ pip3 install -r requirements.txt
+
diff --git a/utils/tuning/config-example.yaml b/utils/tuning/config-example.yaml
new file mode 100644
index 00000000..1b7f52cd
--- /dev/null
+++ b/utils/tuning/config-example.yaml
@@ -0,0 +1,12 @@
+general:
+ disable: []
+ plot: []
+ alsc:
+ do_alsc_colour: 1
+ luminance_strength: 0.5
+ awb:
+ greyworld: 0
+ macbeth:
+ small: 1
+ show: 0
+# blacklevel: 32 \ No newline at end of file
diff --git a/utils/tuning/libtuning/__init__.py b/utils/tuning/libtuning/__init__.py
new file mode 100644
index 00000000..93049976
--- /dev/null
+++ b/utils/tuning/libtuning/__init__.py
@@ -0,0 +1,13 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+
+from libtuning.utils import *
+from libtuning.libtuning import *
+
+from libtuning.image import *
+from libtuning.macbeth import *
+
+from libtuning.average import *
+from libtuning.gradient import *
+from libtuning.smoothing import *
diff --git a/utils/tuning/libtuning/average.py b/utils/tuning/libtuning/average.py
new file mode 100644
index 00000000..c41075a1
--- /dev/null
+++ b/utils/tuning/libtuning/average.py
@@ -0,0 +1,21 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# Wrapper for numpy averaging functions to enable duck-typing
+
+import numpy as np
+
+
+# @brief Wrapper for np averaging functions so that they can be duck-typed
+class Average(object):
+ def __init__(self):
+ pass
+
+ def average(self, np_array):
+ raise NotImplementedError
+
+
+class Mean(Average):
+ def average(self, np_array):
+ return np.mean(np_array)
diff --git a/utils/tuning/libtuning/ctt_awb.py b/utils/tuning/libtuning/ctt_awb.py
new file mode 100644
index 00000000..240f37e6
--- /dev/null
+++ b/utils/tuning/libtuning/ctt_awb.py
@@ -0,0 +1,378 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# camera tuning tool for AWB
+
+import logging
+
+import matplotlib.pyplot as plt
+from bisect import bisect_left
+from scipy.optimize import fmin
+import numpy as np
+
+from .image import Image
+
+logger = logging.getLogger(__name__)
+
+"""
+obtain piecewise linear approximation for colour curve
+"""
+def awb(imgs, cal_cr_list, cal_cb_list, plot):
+ """
+ condense alsc calibration tables into one dictionary
+ """
+ if cal_cr_list is None:
+ colour_cals = None
+ else:
+ colour_cals = {}
+ for cr, cb in zip(cal_cr_list, cal_cb_list):
+ cr_tab = cr['table']
+ cb_tab = cb['table']
+ """
+ normalise tables so min value is 1
+ """
+ cr_tab = cr_tab/np.min(cr_tab)
+ cb_tab = cb_tab/np.min(cb_tab)
+ colour_cals[cr['ct']] = [cr_tab, cb_tab]
+ """
+ obtain data from greyscale macbeth patches
+ """
+ rb_raw = []
+ rbs_hat = []
+ for Img in imgs:
+ logger.info(f'Processing {Img.name}')
+ """
+ get greyscale patches with alsc applied if alsc enabled.
+ Note: if alsc is disabled then colour_cals will be set to None and the
+ function will just return the greyscale patches
+ """
+ r_patchs, b_patchs, g_patchs = get_alsc_patches(Img, colour_cals)
+ """
+ calculate ratio of r, b to g
+ """
+ r_g = np.mean(r_patchs/g_patchs)
+ b_g = np.mean(b_patchs/g_patchs)
+ logger.info(f' r : {r_g:.4f} b : {b_g:.4f}')
+ """
+ The curve tends to be better behaved in so-called hatspace.
+ R, B, G represent the individual channels. The colour curve is plotted in
+ r, b space, where:
+ r = R/G
+ b = B/G
+ This will be referred to as dehatspace... (sorry)
+ Hatspace is defined as:
+ r_hat = R/(R+B+G)
+ b_hat = B/(R+B+G)
+ To convert from dehatspace to hastpace (hat operation):
+ r_hat = r/(1+r+b)
+ b_hat = b/(1+r+b)
+ To convert from hatspace to dehatspace (dehat operation):
+ r = r_hat/(1-r_hat-b_hat)
+ b = b_hat/(1-r_hat-b_hat)
+ Proof is left as an excercise to the reader...
+ Throughout the code, r and b are sometimes referred to as r_g and b_g
+ as a reminder that they are ratios
+ """
+ r_g_hat = r_g/(1+r_g+b_g)
+ b_g_hat = b_g/(1+r_g+b_g)
+ logger.info(f' r_hat : {r_g_hat:.4f} b_hat : {b_g_hat:.4f}')
+ rbs_hat.append((r_g_hat, b_g_hat, Img.color))
+ rb_raw.append((r_g, b_g))
+
+ logger.info('Finished processing images')
+ """
+ sort all lits simultaneously by r_hat
+ """
+ rbs_zip = list(zip(rbs_hat, rb_raw))
+ rbs_zip.sort(key=lambda x: x[0][0])
+ rbs_hat, rb_raw = list(zip(*rbs_zip))
+ """
+ unzip tuples ready for processing
+ """
+ rbs_hat = list(zip(*rbs_hat))
+ rb_raw = list(zip(*rb_raw))
+ """
+ fit quadratic fit to r_g hat and b_g_hat
+ """
+ a, b, c = np.polyfit(rbs_hat[0], rbs_hat[1], 2)
+ logger.info('Fit quadratic curve in hatspace')
+ """
+ the algorithm now approximates the shortest distance from each point to the
+ curve in dehatspace. Since the fit is done in hatspace, it is easier to
+ find the actual shortest distance in hatspace and use the projection back
+ into dehatspace as an overestimate.
+ The distance will be used for two things:
+ 1) In the case that colour temperature does not strictly decrease with
+ increasing r/g, the closest point to the line will be chosen out of an
+ increasing pair of colours.
+
+ 2) To calculate transverse negative an dpositive, the maximum positive
+ and negative distance from the line are chosen. This benefits from the
+ overestimate as the transverse pos/neg are upper bound values.
+ """
+ """
+ define fit function
+ """
+ def f(x):
+ return a*x**2 + b*x + c
+ """
+ iterate over points (R, B are x and y coordinates of points) and calculate
+ distance to line in dehatspace
+ """
+ dists = []
+ for i, (R, B) in enumerate(zip(rbs_hat[0], rbs_hat[1])):
+ """
+ define function to minimise as square distance between datapoint and
+ point on curve. Squaring is monotonic so minimising radius squared is
+ equivalent to minimising radius
+ """
+ def f_min(x):
+ y = f(x)
+ return((x-R)**2+(y-B)**2)
+ """
+ perform optimisation with scipy.optmisie.fmin
+ """
+ x_hat = fmin(f_min, R, disp=0)[0]
+ y_hat = f(x_hat)
+ """
+ dehat
+ """
+ x = x_hat/(1-x_hat-y_hat)
+ y = y_hat/(1-x_hat-y_hat)
+ rr = R/(1-R-B)
+ bb = B/(1-R-B)
+ """
+ calculate euclidean distance in dehatspace
+ """
+ dist = ((x-rr)**2+(y-bb)**2)**0.5
+ """
+ return negative if point is below the fit curve
+ """
+ if (x+y) > (rr+bb):
+ dist *= -1
+ dists.append(dist)
+ logger.info('Found closest point on fit line to each point in dehatspace')
+ """
+ calculate wiggle factors in awb. 10% added since this is an upper bound
+ """
+ transverse_neg = - np.min(dists) * 1.1
+ transverse_pos = np.max(dists) * 1.1
+ logger.info(f'Transverse pos : {transverse_pos:.5f}')
+ logger.info(f'Transverse neg : {transverse_neg:.5f}')
+ """
+ set minimum transverse wiggles to 0.1 .
+ Wiggle factors dictate how far off of the curve the algorithm searches. 0.1
+ is a suitable minimum that gives better results for lighting conditions not
+ within calibration dataset. Anything less will generalise poorly.
+ """
+ if transverse_pos < 0.01:
+ transverse_pos = 0.01
+ logger.info('Forced transverse pos to 0.01')
+ if transverse_neg < 0.01:
+ transverse_neg = 0.01
+ logger.info('Forced transverse neg to 0.01')
+
+ """
+ generate new b_hat values at each r_hat according to fit
+ """
+ r_hat_fit = np.array(rbs_hat[0])
+ b_hat_fit = a*r_hat_fit**2 + b*r_hat_fit + c
+ """
+ transform from hatspace to dehatspace
+ """
+ r_fit = r_hat_fit/(1-r_hat_fit-b_hat_fit)
+ b_fit = b_hat_fit/(1-r_hat_fit-b_hat_fit)
+ c_fit = np.round(rbs_hat[2], 0)
+ """
+ round to 4dp
+ """
+ r_fit = np.where((1000*r_fit) % 1 <= 0.05, r_fit+0.0001, r_fit)
+ r_fit = np.where((1000*r_fit) % 1 >= 0.95, r_fit-0.0001, r_fit)
+ b_fit = np.where((1000*b_fit) % 1 <= 0.05, b_fit+0.0001, b_fit)
+ b_fit = np.where((1000*b_fit) % 1 >= 0.95, b_fit-0.0001, b_fit)
+ r_fit = np.round(r_fit, 4)
+ b_fit = np.round(b_fit, 4)
+ """
+ The following code ensures that colour temperature decreases with
+ increasing r/g
+ """
+ """
+ iterate backwards over list for easier indexing
+ """
+ i = len(c_fit) - 1
+ while i > 0:
+ if c_fit[i] > c_fit[i-1]:
+ logger.info('Colour temperature increase found')
+ logger.info(f'{c_fit[i - 1]} K at r = {r_fit[i - 1]} to ')
+ logger.info(f'{c_fit[i]} K at r = {r_fit[i]}')
+ """
+ if colour temperature increases then discard point furthest from
+ the transformed fit (dehatspace)
+ """
+ error_1 = abs(dists[i-1])
+ error_2 = abs(dists[i])
+ logger.info('Distances from fit:')
+ logger.info(f'{c_fit[i]} K : {error_1:.5f}')
+ logger.info(f'{c_fit[i - 1]} K : {error_2:.5f}')
+ """
+ find bad index
+ note that in python false = 0 and true = 1
+ """
+ bad = i - (error_1 < error_2)
+ logger.info(f'Point at {c_fit[bad]} K deleted as ')
+ logger.info('it is furthest from fit')
+ """
+ delete bad point
+ """
+ r_fit = np.delete(r_fit, bad)
+ b_fit = np.delete(b_fit, bad)
+ c_fit = np.delete(c_fit, bad).astype(np.uint16)
+ """
+ note that if a point has been discarded then the length has decreased
+ by one, meaning that decreasing the index by one will reassess the kept
+ point against the next point. It is therefore possible, in theory, for
+ two adjacent points to be discarded, although probably rare
+ """
+ i -= 1
+
+ """
+ return formatted ct curve, ordered by increasing colour temperature
+ """
+ ct_curve = list(np.array(list(zip(b_fit, r_fit, c_fit))).flatten())[::-1]
+ logger.info('Final CT curve:')
+ for i in range(len(ct_curve)//3):
+ j = 3*i
+ logger.info(f' ct: {ct_curve[j]} ')
+ logger.info(f' r: {ct_curve[j + 1]} ')
+ logger.info(f' b: {ct_curve[j + 2]} ')
+
+ """
+ plotting code for debug
+ """
+ if plot:
+ x = np.linspace(np.min(rbs_hat[0]), np.max(rbs_hat[0]), 100)
+ y = a*x**2 + b*x + c
+ plt.subplot(2, 1, 1)
+ plt.title('hatspace')
+ plt.plot(rbs_hat[0], rbs_hat[1], ls='--', color='blue')
+ plt.plot(x, y, color='green', ls='-')
+ plt.scatter(rbs_hat[0], rbs_hat[1], color='red')
+ for i, ct in enumerate(rbs_hat[2]):
+ plt.annotate(str(ct), (rbs_hat[0][i], rbs_hat[1][i]))
+ plt.xlabel('$\\hat{r}$')
+ plt.ylabel('$\\hat{b}$')
+ """
+ optional set axes equal to shortest distance so line really does
+ looks perpendicular and everybody is happy
+ """
+ # ax = plt.gca()
+ # ax.set_aspect('equal')
+ plt.grid()
+ plt.subplot(2, 1, 2)
+ plt.title('dehatspace - indoors?')
+ plt.plot(r_fit, b_fit, color='blue')
+ plt.scatter(rb_raw[0], rb_raw[1], color='green')
+ plt.scatter(r_fit, b_fit, color='red')
+ for i, ct in enumerate(c_fit):
+ plt.annotate(str(ct), (r_fit[i], b_fit[i]))
+ plt.xlabel('$r$')
+ plt.ylabel('$b$')
+ """
+ optional set axes equal to shortest distance so line really does
+ looks perpendicular and everybody is happy
+ """
+ # ax = plt.gca()
+ # ax.set_aspect('equal')
+ plt.subplots_adjust(hspace=0.5)
+ plt.grid()
+ plt.show()
+ """
+ end of plotting code
+ """
+ return(ct_curve, np.round(transverse_pos, 5), np.round(transverse_neg, 5))
+
+
+"""
+obtain greyscale patches and perform alsc colour correction
+"""
+def get_alsc_patches(Img, colour_cals, grey=True):
+ """
+ get patch centre coordinates, image colour and the actual
+ patches for each channel, remembering to subtract blacklevel
+ If grey then only greyscale patches considered
+ """
+ patches = Img.patches
+ if grey:
+ cen_coords = Img.cen_coords[3::4]
+ col = Img.color
+ r_patchs = patches[0][3::4] - Img.blacklevel_16
+ b_patchs = patches[3][3::4] - Img.blacklevel_16
+ """
+ note two green channels are averages
+ """
+ g_patchs = (patches[1][3::4]+patches[2][3::4])/2 - Img.blacklevel_16
+ else:
+ cen_coords = Img.cen_coords
+ col = Img.color
+ r_patchs = patches[0] - Img.blacklevel_16
+ b_patchs = patches[3] - Img.blacklevel_16
+ g_patchs = (patches[1]+patches[2])/2 - Img.blacklevel_16
+
+ if colour_cals is None:
+ return r_patchs, b_patchs, g_patchs
+ """
+ find where image colour fits in alsc colour calibration tables
+ """
+ cts = list(colour_cals.keys())
+ pos = bisect_left(cts, col)
+ """
+ if img colour is below minimum or above maximum alsc calibration colour, simply
+ pick extreme closest to img colour
+ """
+ if pos % len(cts) == 0:
+ """
+ this works because -0 = 0 = first and -1 = last index
+ """
+ col_tabs = np.array(colour_cals[cts[-pos//len(cts)]])
+ """
+ else, perform linear interpolation between existing alsc colour
+ calibration tables
+ """
+ else:
+ bef = cts[pos-1]
+ aft = cts[pos]
+ da = col-bef
+ db = aft-col
+ bef_tabs = np.array(colour_cals[bef])
+ aft_tabs = np.array(colour_cals[aft])
+ col_tabs = (bef_tabs*db + aft_tabs*da)/(da+db)
+ col_tabs = np.reshape(col_tabs, (2, 12, 16))
+ """
+ calculate dx, dy used to calculate alsc table
+ """
+ w, h = Img.w/2, Img.h/2
+ dx, dy = int(-(-(w-1)//16)), int(-(-(h-1)//12))
+ """
+ make list of pairs of gains for each patch by selecting the correct value
+ in alsc colour calibration table
+ """
+ patch_gains = []
+ for cen in cen_coords:
+ x, y = cen[0]//dx, cen[1]//dy
+ # We could probably do with some better spatial interpolation here?
+ col_gains = (col_tabs[0][y][x], col_tabs[1][y][x])
+ patch_gains.append(col_gains)
+
+ """
+ multiply the r and b channels in each patch by the respective gain, finally
+ performing the alsc colour correction
+ """
+ for i, gains in enumerate(patch_gains):
+ r_patchs[i] = r_patchs[i] * gains[0]
+ b_patchs[i] = b_patchs[i] * gains[1]
+
+ """
+ return greyscale patches, g channel and correct r, b channels
+ """
+ return r_patchs, b_patchs, g_patchs
diff --git a/utils/tuning/libtuning/ctt_ccm.py b/utils/tuning/libtuning/ctt_ccm.py
new file mode 100644
index 00000000..2e87a667
--- /dev/null
+++ b/utils/tuning/libtuning/ctt_ccm.py
@@ -0,0 +1,408 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# camera tuning tool for CCM (colour correction matrix)
+
+import logging
+
+import numpy as np
+from scipy.optimize import minimize
+
+from . import ctt_colors as colors
+from .image import Image
+from .ctt_awb import get_alsc_patches
+from .utils import visualise_macbeth_chart
+
+logger = logging.getLogger(__name__)
+
+"""
+takes 8-bit macbeth chart values, degammas and returns 16 bit
+"""
+
+'''
+This program has many options from which to derive the color matrix from.
+The first is average. This minimises the average delta E across all patches of
+the macbeth chart. Testing across all cameras yeilded this as the most color
+accurate and vivid. Other options are avalible however.
+Maximum minimises the maximum Delta E of the patches. It iterates through till
+a minimum maximum is found (so that there is
+not one patch that deviates wildly.)
+This yields generally good results but overall the colors are less accurate
+Have a fiddle with maximum and see what you think.
+The final option allows you to select the patches for which to average across.
+This means that you can bias certain patches, for instance if you want the
+reds to be more accurate.
+'''
+
+matrix_selection_types = ["average", "maximum", "patches"]
+typenum = 0 # select from array above, 0 = average, 1 = maximum, 2 = patches
+test_patches = [1, 2, 5, 8, 9, 12, 14]
+
+'''
+Enter patches to test for. Can also be entered twice if you
+would like twice as much bias on one patch.
+'''
+
+
+def degamma(x):
+ x = x / ((2 ** 8) - 1) # takes 255 and scales it down to one
+ x = np.where(x < 0.04045, x / 12.92, ((x + 0.055) / 1.055) ** 2.4)
+ x = x * ((2 ** 16) - 1) # takes one and scales up to 65535, 16 bit color
+ return x
+
+
+def gamma(x):
+ # Take 3 long array of color values and gamma them
+ return [((colour / 255) ** (1 / 2.4) * 1.055 - 0.055) * 255 for colour in x]
+
+
+"""
+FInds colour correction matrices for list of images
+"""
+
+
+def ccm(imgs, cal_cr_list, cal_cb_list):
+ global matrix_selection_types, typenum
+ """
+ standard macbeth chart colour values
+ """
+ m_rgb = np.array([ # these are in RGB
+ [116, 81, 67], # dark skin
+ [199, 147, 129], # light skin
+ [91, 122, 156], # blue sky
+ [90, 108, 64], # foliage
+ [130, 128, 176], # blue flower
+ [92, 190, 172], # bluish green
+ [224, 124, 47], # orange
+ [68, 91, 170], # purplish blue
+ [198, 82, 97], # moderate red
+ [94, 58, 106], # purple
+ [159, 189, 63], # yellow green
+ [230, 162, 39], # orange yellow
+ [35, 63, 147], # blue
+ [67, 149, 74], # green
+ [180, 49, 57], # red
+ [238, 198, 20], # yellow
+ [193, 84, 151], # magenta
+ [0, 136, 170], # cyan (goes out of gamut)
+ [245, 245, 243], # white 9.5
+ [200, 202, 202], # neutral 8
+ [161, 163, 163], # neutral 6.5
+ [121, 121, 122], # neutral 5
+ [82, 84, 86], # neutral 3.5
+ [49, 49, 51] # black 2
+ ])
+ """
+ convert reference colours from srgb to rgb
+ """
+ m_srgb = degamma(m_rgb) # now in 16 bit color.
+
+ # Produce array of LAB values for ideal color chart
+ m_lab = [colors.RGB_to_LAB(color / 256) for color in m_srgb]
+
+ """
+ reorder reference values to match how patches are ordered
+ """
+ m_srgb = np.array([m_srgb[i::6] for i in range(6)]).reshape((24, 3))
+ m_lab = np.array([m_lab[i::6] for i in range(6)]).reshape((24, 3))
+ m_rgb = np.array([m_rgb[i::6] for i in range(6)]).reshape((24, 3))
+ """
+ reformat alsc correction tables or set colour_cals to None if alsc is
+ deactivated
+ """
+ if cal_cr_list is None:
+ colour_cals = None
+ else:
+ colour_cals = {}
+ for cr, cb in zip(cal_cr_list, cal_cb_list):
+ cr_tab = cr['table']
+ cb_tab = cb['table']
+ """
+ normalise tables so min value is 1
+ """
+ cr_tab = cr_tab / np.min(cr_tab)
+ cb_tab = cb_tab / np.min(cb_tab)
+ colour_cals[cr['ct']] = [cr_tab, cb_tab]
+
+ """
+ for each image, perform awb and alsc corrections.
+ Then calculate the colour correction matrix for that image, recording the
+ ccm and the colour tempertaure.
+ """
+ ccm_tab = {}
+ for Img in imgs:
+ logger.info('Processing image: ' + Img.name)
+ """
+ get macbeth patches with alsc applied if alsc enabled.
+ Note: if alsc is disabled then colour_cals will be set to None and no
+ the function will simply return the macbeth patches
+ """
+ r, b, g = get_alsc_patches(Img, colour_cals, grey=False)
+ # 256 values for each patch of sRGB values
+
+ """
+ do awb
+ Note: awb is done by measuring the macbeth chart in the image, rather
+ than from the awb calibration. This is done so the awb will be perfect
+ and the ccm matrices will be more accurate.
+ """
+ r_greys, b_greys, g_greys = r[3::4], b[3::4], g[3::4]
+ r_g = np.mean(r_greys / g_greys)
+ b_g = np.mean(b_greys / g_greys)
+ r = r / r_g
+ b = b / b_g
+ """
+ normalise brightness wrt reference macbeth colours and then average
+ each channel for each patch
+ """
+ gain = np.mean(m_srgb) / np.mean((r, g, b))
+ logger.info(f'Gain with respect to standard colours: {gain:.3f}')
+ r = np.mean(gain * r, axis=1)
+ b = np.mean(gain * b, axis=1)
+ g = np.mean(gain * g, axis=1)
+ """
+ calculate ccm matrix
+ """
+ # ==== All of below should in sRGB ===##
+ sumde = 0
+ ccm = do_ccm(r, g, b, m_srgb)
+ # This is the initial guess that our optimisation code works with.
+ original_ccm = ccm
+ r1 = ccm[0]
+ r2 = ccm[1]
+ g1 = ccm[3]
+ g2 = ccm[4]
+ b1 = ccm[6]
+ b2 = ccm[7]
+ '''
+ COLOR MATRIX LOOKS AS BELOW
+ R1 R2 R3 Rval Outr
+ G1 G2 G3 * Gval = G
+ B1 B2 B3 Bval B
+ Will be optimising 6 elements and working out the third element using 1-r1-r2 = r3
+ '''
+
+ x0 = [r1, r2, g1, g2, b1, b2]
+ '''
+ We use our old CCM as the initial guess for the program to find the
+ optimised matrix
+ '''
+ result = minimize(guess, x0, args=(r, g, b, m_lab), tol=0.01)
+ '''
+ This produces a color matrix which has the lowest delta E possible,
+ based off the input data. Note it is impossible for this to reach
+ zero since the input data is imperfect
+ '''
+
+ [r1, r2, g1, g2, b1, b2] = result.x
+ # The new, optimised color correction matrix values
+ # This is the optimised Color Matrix (preserving greys by summing rows up to 1)
+ optimised_ccm = [r1, r2, (1 - r1 - r2), g1, g2, (1 - g1 - g2), b1, b2, (1 - b1 - b2)]
+
+ logger.info(f'Optimized Matrix: {np.round(optimised_ccm, 4)}')
+ logger.info(f'Old Matrix: {np.round(ccm, 4)}')
+
+ formatted_ccm = np.array(original_ccm).reshape((3, 3))
+
+ '''
+ below is a whole load of code that then applies the latest color
+ matrix, and returns LAB values for color. This can then be used
+ to calculate the final delta E
+ '''
+ optimised_ccm_rgb = [] # Original Color Corrected Matrix RGB / LAB
+ optimised_ccm_lab = []
+
+ formatted_optimised_ccm = np.array(optimised_ccm).reshape((3, 3))
+ after_gamma_rgb = []
+ after_gamma_lab = []
+
+ for RGB in zip(r, g, b):
+ ccm_applied_rgb = np.dot(formatted_ccm, (np.array(RGB) / 256))
+ optimised_ccm_rgb.append(gamma(ccm_applied_rgb))
+ optimised_ccm_lab.append(colors.RGB_to_LAB(ccm_applied_rgb))
+
+ optimised_ccm_applied_rgb = np.dot(formatted_optimised_ccm, np.array(RGB) / 256)
+ after_gamma_rgb.append(gamma(optimised_ccm_applied_rgb))
+ after_gamma_lab.append(colors.RGB_to_LAB(optimised_ccm_applied_rgb))
+ '''
+ Gamma After RGB / LAB - not used in calculations, only used for visualisation
+ We now want to spit out some data that shows
+ how the optimisation has improved the color matrices
+ '''
+ logger.info("Here are the Improvements")
+
+ # CALCULATE WORST CASE delta e
+ old_worst_delta_e = 0
+ before_average = transform_and_evaluate(formatted_ccm, r, g, b, m_lab)
+ new_worst_delta_e = 0
+ after_average = transform_and_evaluate(formatted_optimised_ccm, r, g, b, m_lab)
+ for i in range(24):
+ old_delta_e = deltae(optimised_ccm_lab[i], m_lab[i]) # Current Old Delta E
+ new_delta_e = deltae(after_gamma_lab[i], m_lab[i]) # Current New Delta E
+ if old_delta_e > old_worst_delta_e:
+ old_worst_delta_e = old_delta_e
+ if new_delta_e > new_worst_delta_e:
+ new_worst_delta_e = new_delta_e
+
+ logger.info(f'delta E optimized: average: {after_average:.2f} max:{new_worst_delta_e:.2f}')
+ logger.info(f'delta E old: average: {before_average:.2f} max:{old_worst_delta_e:.2f}')
+
+ visualise_macbeth_chart(m_rgb, optimised_ccm_rgb, after_gamma_rgb, str(Img.color) + str(matrix_selection_types[typenum]))
+ '''
+ The program will also save some visualisations of improvements.
+ Very pretty to look at. Top rectangle is ideal, Left square is
+ before optimisation, right square is after.
+ '''
+
+ """
+ if a ccm has already been calculated for that temperature then don't
+ overwrite but save both. They will then be averaged later on
+ """ # Now going to use optimised color matrix, optimised_ccm
+ if Img.color in ccm_tab.keys():
+ ccm_tab[Img.color].append(optimised_ccm)
+ else:
+ ccm_tab[Img.color] = [optimised_ccm]
+
+ logger.info('Finished processing images')
+ """
+ average any ccms that share a colour temperature
+ """
+ for k, v in ccm_tab.items():
+ tab = np.mean(v, axis=0)
+ tab = np.where((10000 * tab) % 1 <= 0.05, tab + 0.00001, tab)
+ tab = np.where((10000 * tab) % 1 >= 0.95, tab - 0.00001, tab)
+ ccm_tab[k] = list(np.round(tab, 5))
+ logger.info(f'Matrix calculated for colour temperature of {k} K')
+
+ """
+ return all ccms with respective colour temperature in the correct format,
+ sorted by their colour temperature
+ """
+ sorted_ccms = sorted(ccm_tab.items(), key=lambda kv: kv[0])
+ ccms = []
+ for i in sorted_ccms:
+ ccms.append({
+ 'ct': i[0],
+ 'ccm': i[1]
+ })
+ return ccms
+
+
+def guess(x0, r, g, b, m_lab): # provides a method of numerical feedback for the optimisation code
+ [r1, r2, g1, g2, b1, b2] = x0
+ ccm = np.array([r1, r2, (1 - r1 - r2),
+ g1, g2, (1 - g1 - g2),
+ b1, b2, (1 - b1 - b2)]).reshape((3, 3)) # format the matrix correctly
+ return transform_and_evaluate(ccm, r, g, b, m_lab)
+
+
+def transform_and_evaluate(ccm, r, g, b, m_lab): # Transforms colors to LAB and applies the correction matrix
+ # create list of matrix changed colors
+ realrgb = []
+ for RGB in zip(r, g, b):
+ rgb_post_ccm = np.dot(ccm, np.array(RGB) / 256) # This is RGB values after the color correction matrix has been applied
+ realrgb.append(colors.RGB_to_LAB(rgb_post_ccm))
+ # now compare that with m_lab and return numeric result, averaged for each patch
+ return (sumde(realrgb, m_lab) / 24) # returns an average result of delta E
+
+
+def sumde(listA, listB):
+ global typenum, test_patches
+ sumde = 0
+ maxde = 0
+ patchde = [] # Create array of the delta E values for each patch. useful for optimisation of certain patches
+ for listA_item, listB_item in zip(listA, listB):
+ if maxde < (deltae(listA_item, listB_item)):
+ maxde = deltae(listA_item, listB_item)
+ patchde.append(deltae(listA_item, listB_item))
+ sumde += deltae(listA_item, listB_item)
+ '''
+ The different options specified at the start allow for
+ the maximum to be returned, average or specific patches
+ '''
+ if typenum == 0:
+ return sumde
+ if typenum == 1:
+ return maxde
+ if typenum == 2:
+ output = sum([patchde[test_patch] for test_patch in test_patches])
+ # Selects only certain patches and returns the output for them
+ return output
+
+
+"""
+calculates the ccm for an individual image.
+ccms are calculated in rgb space, and are fit by hand. Although it is a 3x3
+matrix, each row must add up to 1 in order to conserve greyness, simplifying
+calculation.
+The initial CCM is calculated in RGB, and then optimised in LAB color space
+This simplifies the initial calculation but then gets us the accuracy of
+using LAB color space.
+"""
+
+
+def do_ccm(r, g, b, m_srgb):
+ rb = r-b
+ gb = g-b
+ rb_2s = (rb * rb)
+ rb_gbs = (rb * gb)
+ gb_2s = (gb * gb)
+
+ r_rbs = rb * (m_srgb[..., 0] - b)
+ r_gbs = gb * (m_srgb[..., 0] - b)
+ g_rbs = rb * (m_srgb[..., 1] - b)
+ g_gbs = gb * (m_srgb[..., 1] - b)
+ b_rbs = rb * (m_srgb[..., 2] - b)
+ b_gbs = gb * (m_srgb[..., 2] - b)
+
+ """
+ Obtain least squares fit
+ """
+ rb_2 = np.sum(rb_2s)
+ gb_2 = np.sum(gb_2s)
+ rb_gb = np.sum(rb_gbs)
+ r_rb = np.sum(r_rbs)
+ r_gb = np.sum(r_gbs)
+ g_rb = np.sum(g_rbs)
+ g_gb = np.sum(g_gbs)
+ b_rb = np.sum(b_rbs)
+ b_gb = np.sum(b_gbs)
+
+ det = rb_2 * gb_2 - rb_gb * rb_gb
+
+ """
+ Raise error if matrix is singular...
+ This shouldn't really happen with real data but if it does just take new
+ pictures and try again, not much else to be done unfortunately...
+ """
+ if det < 0.001:
+ raise ArithmeticError
+
+ r_a = (gb_2 * r_rb - rb_gb * r_gb) / det
+ r_b = (rb_2 * r_gb - rb_gb * r_rb) / det
+ """
+ Last row can be calculated by knowing the sum must be 1
+ """
+ r_c = 1 - r_a - r_b
+
+ g_a = (gb_2 * g_rb - rb_gb * g_gb) / det
+ g_b = (rb_2 * g_gb - rb_gb * g_rb) / det
+ g_c = 1 - g_a - g_b
+
+ b_a = (gb_2 * b_rb - rb_gb * b_gb) / det
+ b_b = (rb_2 * b_gb - rb_gb * b_rb) / det
+ b_c = 1 - b_a - b_b
+
+ """
+ format ccm
+ """
+ ccm = [r_a, r_b, r_c, g_a, g_b, g_c, b_a, b_b, b_c]
+
+ return ccm
+
+
+def deltae(colorA, colorB):
+ return ((colorA[0] - colorB[0]) ** 2 + (colorA[1] - colorB[1]) ** 2 + (colorA[2] - colorB[2]) ** 2) ** 0.5
+ # return ((colorA[1]-colorB[1]) * * 2 + (colorA[2]-colorB[2]) * * 2) * * 0.5
+ # UNCOMMENT IF YOU WANT TO NEGLECT LUMINANCE FROM CALCULATION OF DELTA E
diff --git a/utils/tuning/libtuning/ctt_colors.py b/utils/tuning/libtuning/ctt_colors.py
new file mode 100644
index 00000000..cb4d236b
--- /dev/null
+++ b/utils/tuning/libtuning/ctt_colors.py
@@ -0,0 +1,30 @@
+# Program to convert from RGB to LAB color space
+def RGB_to_LAB(RGB): # where RGB is a 1x3 array. e.g RGB = [100, 255, 230]
+ num = 0
+ XYZ = [0, 0, 0]
+ # converted all the three R, G, B to X, Y, Z
+ X = RGB[0] * 0.4124 + RGB[1] * 0.3576 + RGB[2] * 0.1805
+ Y = RGB[0] * 0.2126 + RGB[1] * 0.7152 + RGB[2] * 0.0722
+ Z = RGB[0] * 0.0193 + RGB[1] * 0.1192 + RGB[2] * 0.9505
+
+ XYZ[0] = X / 255 * 100
+ XYZ[1] = Y / 255 * 100 # XYZ Must be in range 0 -> 100, so scale down from 255
+ XYZ[2] = Z / 255 * 100
+ XYZ[0] = XYZ[0] / 95.047 # ref_X = 95.047 Observer= 2°, Illuminant= D65
+ XYZ[1] = XYZ[1] / 100.0 # ref_Y = 100.000
+ XYZ[2] = XYZ[2] / 108.883 # ref_Z = 108.883
+ num = 0
+ for value in XYZ:
+ if value > 0.008856:
+ value = value ** (0.3333333333333333)
+ else:
+ value = (7.787 * value) + (16 / 116)
+ XYZ[num] = value
+ num = num + 1
+
+ # L, A, B, values calculated below
+ L = (116 * XYZ[1]) - 16
+ a = 500 * (XYZ[0] - XYZ[1])
+ b = 200 * (XYZ[1] - XYZ[2])
+
+ return [L, a, b]
diff --git a/utils/tuning/libtuning/ctt_ransac.py b/utils/tuning/libtuning/ctt_ransac.py
new file mode 100644
index 00000000..01bba302
--- /dev/null
+++ b/utils/tuning/libtuning/ctt_ransac.py
@@ -0,0 +1,71 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# camera tuning tool RANSAC selector for Macbeth chart locator
+
+import numpy as np
+
+scale = 2
+
+
+"""
+constructs normalised macbeth chart corners for ransac algorithm
+"""
+def get_square_verts(c_err=0.05, scale=scale):
+ """
+ define macbeth chart corners
+ """
+ b_bord_x, b_bord_y = scale*8.5, scale*13
+ s_bord = 6*scale
+ side = 41*scale
+ x_max = side*6 + 5*s_bord + 2*b_bord_x
+ y_max = side*4 + 3*s_bord + 2*b_bord_y
+ c1 = (0, 0)
+ c2 = (0, y_max)
+ c3 = (x_max, y_max)
+ c4 = (x_max, 0)
+ mac_norm = np.array((c1, c2, c3, c4), np.float32)
+ mac_norm = np.array([mac_norm])
+
+ square_verts = []
+ square_0 = np.array(((0, 0), (0, side),
+ (side, side), (side, 0)), np.float32)
+ offset_0 = np.array((b_bord_x, b_bord_y), np.float32)
+ c_off = side * c_err
+ offset_cont = np.array(((c_off, c_off), (c_off, -c_off),
+ (-c_off, -c_off), (-c_off, c_off)), np.float32)
+ square_0 += offset_0
+ square_0 += offset_cont
+ """
+ define macbeth square corners
+ """
+ for i in range(6):
+ shift_i = np.array(((i*side, 0), (i*side, 0),
+ (i*side, 0), (i*side, 0)), np.float32)
+ shift_bord = np.array(((i*s_bord, 0), (i*s_bord, 0),
+ (i*s_bord, 0), (i*s_bord, 0)), np.float32)
+ square_i = square_0 + shift_i + shift_bord
+ for j in range(4):
+ shift_j = np.array(((0, j*side), (0, j*side),
+ (0, j*side), (0, j*side)), np.float32)
+ shift_bord = np.array(((0, j*s_bord),
+ (0, j*s_bord), (0, j*s_bord),
+ (0, j*s_bord)), np.float32)
+ square_j = square_i + shift_j + shift_bord
+ square_verts.append(square_j)
+ # print('square_verts')
+ # print(square_verts)
+ return np.array(square_verts, np.float32), mac_norm
+
+
+def get_square_centres(c_err=0.05, scale=scale):
+ """
+ define macbeth square centres
+ """
+ verts, mac_norm = get_square_verts(c_err, scale=scale)
+
+ centres = np.mean(verts, axis=1)
+ # print('centres')
+ # print(centres)
+ return np.array(centres, np.float32)
diff --git a/utils/tuning/libtuning/generators/__init__.py b/utils/tuning/libtuning/generators/__init__.py
new file mode 100644
index 00000000..f28b6149
--- /dev/null
+++ b/utils/tuning/libtuning/generators/__init__.py
@@ -0,0 +1,6 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+
+from libtuning.generators.raspberrypi_output import RaspberryPiOutput
+from libtuning.generators.yaml_output import YamlOutput
diff --git a/utils/tuning/libtuning/generators/generator.py b/utils/tuning/libtuning/generators/generator.py
new file mode 100644
index 00000000..77a8ba4a
--- /dev/null
+++ b/utils/tuning/libtuning/generators/generator.py
@@ -0,0 +1,15 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# Base class for a generator to convert dict to tuning file
+
+from pathlib import Path
+
+
+class Generator(object):
+ def __init__(self):
+ pass
+
+ def write(self, output_path: Path, output_dict: dict, output_order: list):
+ raise NotImplementedError
diff --git a/utils/tuning/libtuning/generators/raspberrypi_output.py b/utils/tuning/libtuning/generators/raspberrypi_output.py
new file mode 100644
index 00000000..47b49059
--- /dev/null
+++ b/utils/tuning/libtuning/generators/raspberrypi_output.py
@@ -0,0 +1,114 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright 2022 Raspberry Pi Ltd
+#
+# Generate tuning file in Raspberry Pi's json format
+#
+# (Copied from ctt_pretty_print_json.py)
+
+from .generator import Generator
+
+import json
+from pathlib import Path
+import textwrap
+
+
+class Encoder(json.JSONEncoder):
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.indentation_level = 0
+ self.hard_break = 120
+ self.custom_elems = {
+ 'table': 16,
+ 'luminance_lut': 16,
+ 'ct_curve': 3,
+ 'ccm': 3,
+ 'gamma_curve': 2,
+ 'y_target': 2,
+ 'prior': 2
+ }
+
+ def encode(self, o, node_key=None):
+ if isinstance(o, (list, tuple)):
+ # Check if we are a flat list of numbers.
+ if not any(isinstance(el, (list, tuple, dict)) for el in o):
+ s = ', '.join(json.dumps(el) for el in o)
+ if node_key in self.custom_elems.keys():
+ # Special case handling to specify number of elements in a row for tables, ccm, etc.
+ self.indentation_level += 1
+ sl = s.split(', ')
+ num = self.custom_elems[node_key]
+ chunk = [self.indent_str + ', '.join(sl[x:x + num]) for x in range(0, len(sl), num)]
+ t = ',\n'.join(chunk)
+ self.indentation_level -= 1
+ output = f'\n{self.indent_str}[\n{t}\n{self.indent_str}]'
+ elif len(s) > self.hard_break - len(self.indent_str):
+ # Break a long list with wraps.
+ self.indentation_level += 1
+ t = textwrap.fill(s, self.hard_break, break_long_words=False,
+ initial_indent=self.indent_str, subsequent_indent=self.indent_str)
+ self.indentation_level -= 1
+ output = f'\n{self.indent_str}[\n{t}\n{self.indent_str}]'
+ else:
+ # Smaller lists can remain on a single line.
+ output = f' [ {s} ]'
+ return output
+ else:
+ # Sub-structures in the list case.
+ self.indentation_level += 1
+ output = [self.indent_str + self.encode(el) for el in o]
+ self.indentation_level -= 1
+ output = ',\n'.join(output)
+ return f' [\n{output}\n{self.indent_str}]'
+
+ elif isinstance(o, dict):
+ self.indentation_level += 1
+ output = []
+ for k, v in o.items():
+ if isinstance(v, dict) and len(v) == 0:
+ # Empty config block special case.
+ output.append(self.indent_str + f'{json.dumps(k)}: {{ }}')
+ else:
+ # Only linebreak if the next node is a config block.
+ sep = f'\n{self.indent_str}' if isinstance(v, dict) else ''
+ output.append(self.indent_str + f'{json.dumps(k)}:{sep}{self.encode(v, k)}')
+ output = ',\n'.join(output)
+ self.indentation_level -= 1
+ return f'{{\n{output}\n{self.indent_str}}}'
+
+ else:
+ return ' ' + json.dumps(o)
+
+ @property
+ def indent_str(self) -> str:
+ return ' ' * self.indentation_level * self.indent
+
+ def iterencode(self, o, **kwargs):
+ return self.encode(o)
+
+
+class RaspberryPiOutput(Generator):
+ def __init__(self):
+ super().__init__()
+
+ def _pretty_print(self, in_json: dict) -> str:
+
+ if 'version' not in in_json or \
+ 'target' not in in_json or \
+ 'algorithms' not in in_json or \
+ in_json['version'] < 2.0:
+ raise RuntimeError('Incompatible JSON dictionary has been provided')
+
+ return json.dumps(in_json, cls=Encoder, indent=4, sort_keys=False)
+
+ def write(self, output_file: Path, output_dict: dict, output_order: list):
+ # Write json dictionary to file using ctt's version 2 format
+ out_json = {
+ "version": 2.0,
+ 'target': 'bcm2835',
+ "algorithms": [{f'{module.out_name}': output_dict[module]} for module in output_order]
+ }
+
+ with open(output_file, 'w') as f:
+ f.write(self._pretty_print(out_json))
diff --git a/utils/tuning/libtuning/generators/yaml_output.py b/utils/tuning/libtuning/generators/yaml_output.py
new file mode 100644
index 00000000..c490081d
--- /dev/null
+++ b/utils/tuning/libtuning/generators/yaml_output.py
@@ -0,0 +1,127 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright 2022 Paul Elder <paul.elder@ideasonboard.com>
+#
+# Generate tuning file in YAML format
+
+from .generator import Generator
+
+from numbers import Number
+from pathlib import Path
+
+import logging
+
+logger = logging.getLogger(__name__)
+
+class YamlOutput(Generator):
+ def __init__(self):
+ super().__init__()
+
+ def _stringify_number_list(self, listt: list):
+ line_wrap = 80
+
+ line = '[ ' + ', '.join([str(x) for x in listt]) + ' ]'
+ if len(line) <= line_wrap:
+ return [line]
+
+ out_lines = ['[']
+ line = ' '
+ for x in listt:
+ x_str = str(x)
+ # If the first number is longer than line_wrap, it'll add an extra line
+ if len(line) + len(x_str) > line_wrap:
+ out_lines.append(line)
+ line = f' {x_str},'
+ continue
+ line += f' {x_str},'
+ out_lines.append(line)
+ out_lines.append(']')
+
+ return out_lines
+
+ # @return Array of lines, and boolean of if all elements were numbers
+ def _stringify_list(self, listt: list):
+ out_lines = []
+
+ all_numbers = set([isinstance(x, Number) for x in listt]).issubset({True})
+
+ if all_numbers:
+ return self._stringify_number_list(listt), True
+
+ for value in listt:
+ if isinstance(value, Number):
+ out_lines.append(f'- {str(value)}')
+ elif isinstance(value, str):
+ out_lines.append(f'- "{value}"')
+ elif isinstance(value, list):
+ lines, all_numbers = self._stringify_list(value)
+
+ if all_numbers:
+ out_lines.append( f'- {lines[0]}')
+ out_lines += [f' {line}' for line in lines[1:]]
+ else:
+ out_lines.append( f'-')
+ out_lines += [f' {line}' for line in lines]
+ elif isinstance(value, dict):
+ lines = self._stringify_dict(value)
+ out_lines.append( f'- {lines[0]}')
+ out_lines += [f' {line}' for line in lines[1:]]
+
+ return out_lines, False
+
+ def _stringify_dict(self, dictt: dict):
+ out_lines = []
+
+ for key in dictt:
+ value = dictt[key]
+
+ if isinstance(value, Number):
+ out_lines.append(f'{key}: {str(value)}')
+ elif isinstance(value, str):
+ out_lines.append(f'{key}: "{value}"')
+ elif isinstance(value, list):
+ lines, all_numbers = self._stringify_list(value)
+
+ if all_numbers:
+ out_lines.append( f'{key}: {lines[0]}')
+ out_lines += [f'{" " * (len(key) + 2)}{line}' for line in lines[1:]]
+ else:
+ out_lines.append( f'{key}:')
+ out_lines += [f' {line}' for line in lines]
+ elif isinstance(value, dict):
+ lines = self._stringify_dict(value)
+ out_lines.append( f'{key}:')
+ out_lines += [f' {line}' for line in lines]
+
+ return out_lines
+
+ def write(self, output_file: Path, output_dict: dict, output_order: list):
+ out_lines = [
+ '%YAML 1.1',
+ '---',
+ 'version: 1',
+ # No need to condition this, as libtuning already guarantees that
+ # we have at least one module. Even if the module has no output,
+ # its prescence is meaningful.
+ 'algorithms:'
+ ]
+
+ for module in output_order:
+ if module not in output_dict:
+ continue
+
+ out_lines.append(f' - {module.out_name}:')
+
+ if len(output_dict[module]) == 0:
+ continue
+
+ if not isinstance(output_dict[module], dict):
+ logger.error(f'Error: Output of {module.type} is not a dictionary')
+ continue
+
+ lines = self._stringify_dict(output_dict[module])
+ out_lines += [f' {line}' for line in lines]
+
+ with open(output_file, 'w', encoding='utf-8') as f:
+ for line in out_lines:
+ f.write(f'{line}\n')
diff --git a/utils/tuning/libtuning/gradient.py b/utils/tuning/libtuning/gradient.py
new file mode 100644
index 00000000..b643f502
--- /dev/null
+++ b/utils/tuning/libtuning/gradient.py
@@ -0,0 +1,75 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# Gradients that can be used to distribute or map numbers
+
+import libtuning as lt
+
+import math
+from numbers import Number
+
+
+# @brief Gradient for how to allocate pixels to sectors
+# @description There are no parameters for the gradients as the domain is the
+# number of pixels and the range is the number of sectors, and
+# there is only one curve that has a startpoint and endpoint at
+# (0, 0) and at (#pixels, #sectors). The exception is for curves
+# that *do* have multiple solutions for only two points, such as
+# gaussian, and curves of higher polynomial orders if we had them.
+#
+# \todo There will probably be a helper in the Gradient class, as I have a
+# feeling that all the other curves (besides Linear and Gaussian) can be
+# implemented in the same way.
+class Gradient(object):
+ def __init__(self):
+ pass
+
+ # @brief Distribute pixels into sectors (only in one dimension)
+ # @param domain Number of pixels
+ # @param sectors Number of sectors
+ # @return A list of number of pixels in each sector
+ def distribute(self, domain: list, sectors: list) -> list:
+ raise NotImplementedError
+
+ # @brief Map a number on a curve
+ # @param domain Domain of the curve
+ # @param rang Range of the curve
+ # @param x Input on the domain of the curve
+ # @return y from the range of the curve
+ def map(self, domain: tuple, rang: tuple, x: Number) -> Number:
+ raise NotImplementedError
+
+
+class Linear(Gradient):
+ # @param remainder Mode of handling remainder
+ def __init__(self, remainder: lt.Remainder = lt.Remainder.Float):
+ self.remainder = remainder
+
+ def distribute(self, domain: list, sectors: list) -> list:
+ size = domain / sectors
+ rem = domain % sectors
+
+ if rem == 0:
+ return [int(size)] * sectors
+
+ size = math.ceil(size)
+ rem = domain % size
+ output_sectors = [int(size)] * (sectors - 1)
+
+ if self.remainder == lt.Remainder.Float:
+ size = domain / sectors
+ output_sectors = [size] * sectors
+ elif self.remainder == lt.Remainder.DistributeFront:
+ output_sectors.append(int(rem))
+ elif self.remainder == lt.Remainder.DistributeBack:
+ output_sectors.insert(0, int(rem))
+ else:
+ raise ValueError
+
+ return output_sectors
+
+ def map(self, domain: tuple, rang: tuple, x: Number) -> Number:
+ m = (rang[1] - rang[0]) / (domain[1] - domain[0])
+ b = rang[0] - m * domain[0]
+ return m * x + b
diff --git a/utils/tuning/libtuning/image.py b/utils/tuning/libtuning/image.py
new file mode 100644
index 00000000..ecd334bd
--- /dev/null
+++ b/utils/tuning/libtuning/image.py
@@ -0,0 +1,140 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# Container for an image and associated metadata
+
+import binascii
+import numpy as np
+from pathlib import Path
+import pyexiv2 as pyexif
+import rawpy as raw
+import re
+
+import libtuning as lt
+import libtuning.utils as utils
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class Image:
+ def __init__(self, path: Path):
+ self.path = path
+ self.lsc_only = False
+ self.color = -1
+ self.lux = -1
+ self.macbeth = None
+
+ try:
+ self._load_metadata_exif()
+ except Exception as e:
+ logger.error(f'Failed to load metadata from {self.path}: {e}')
+ raise e
+
+ try:
+ self._read_image_dng()
+ except Exception as e:
+ logger.error(f'Failed to load image data from {self.path}: {e}')
+ raise e
+
+ @property
+ def name(self):
+ return self.path.name
+
+ # May raise KeyError as there are too many to check
+ def _load_metadata_exif(self):
+ # RawPy doesn't load all the image tags that we need, so we use py3exiv2
+ metadata = pyexif.ImageMetadata(str(self.path))
+ metadata.read()
+
+ # The DNG and TIFF/EP specifications use different IFDs to store the
+ # raw image data and the Exif tags. DNG stores them in a SubIFD and in
+ # an Exif IFD respectively (named "SubImage1" and "Photo" by pyexiv2),
+ # while TIFF/EP stores them both in IFD0 (name "Image"). Both are used
+ # in "DNG" files, with libcamera-apps following the DNG recommendation
+ # and applications based on picamera2 following TIFF/EP.
+ #
+ # This code detects which tags are being used, and therefore extracts the
+ # correct values.
+ try:
+ self.w = metadata['Exif.SubImage1.ImageWidth'].value
+ subimage = 'SubImage1'
+ photo = 'Photo'
+ except KeyError:
+ self.w = metadata['Exif.Image.ImageWidth'].value
+ subimage = 'Image'
+ photo = 'Image'
+ self.pad = 0
+ self.h = metadata[f'Exif.{subimage}.ImageLength'].value
+ white = metadata[f'Exif.{subimage}.WhiteLevel'].value
+ self.sigbits = int(white).bit_length()
+ self.fmt = (self.sigbits - 4) // 2
+ self.exposure = int(metadata[f'Exif.{photo}.ExposureTime'].value * 1000000)
+ self.againQ8 = metadata[f'Exif.{photo}.ISOSpeedRatings'].value * 256 / 100
+ self.againQ8_norm = self.againQ8 / 256
+ self.camName = metadata['Exif.Image.Model'].value
+ self.blacklevel = int(metadata[f'Exif.{subimage}.BlackLevel'].value[0])
+ self.blacklevel_16 = self.blacklevel << (16 - self.sigbits)
+
+ # Channel order depending on bayer pattern
+ # The key is the order given by exif, where 0 is R, 1 is G, and 2 is B
+ # The value is the index where the color can be found, where the first
+ # is R, then G, then G, then B.
+ bayer_case = {
+ '0 1 1 2': (lt.Color.R, lt.Color.GR, lt.Color.GB, lt.Color.B),
+ '1 2 0 1': (lt.Color.GB, lt.Color.B, lt.Color.R, lt.Color.GR),
+ '2 1 1 0': (lt.Color.B, lt.Color.GB, lt.Color.GR, lt.Color.R),
+ '1 0 2 1': (lt.Color.GR, lt.Color.R, lt.Color.B, lt.Color.GB)
+ }
+ # Note: This needs to be in IFD0
+ cfa_pattern = metadata[f'Exif.{subimage}.CFAPattern'].value
+ self.order = bayer_case[cfa_pattern]
+
+ def _read_image_dng(self):
+ raw_im = raw.imread(str(self.path))
+ raw_data = raw_im.raw_image
+ shift = 16 - self.sigbits
+ c0 = np.left_shift(raw_data[0::2, 0::2].astype(np.int64), shift)
+ c1 = np.left_shift(raw_data[0::2, 1::2].astype(np.int64), shift)
+ c2 = np.left_shift(raw_data[1::2, 0::2].astype(np.int64), shift)
+ c3 = np.left_shift(raw_data[1::2, 1::2].astype(np.int64), shift)
+ self.channels = [c0, c1, c2, c3]
+ # Reorder the channels into R, GR, GB, B
+ self.channels = [self.channels[i] for i in self.order]
+
+ # \todo Move this to macbeth.py
+ def get_patches(self, cen_coords, size=16):
+ saturated = False
+
+ # Obtain channel widths and heights
+ ch_w, ch_h = self.w, self.h
+ cen_coords = list(np.array((cen_coords[0])).astype(np.int32))
+ self.cen_coords = cen_coords
+
+ # Squares are ordered by stacking macbeth chart columns from left to
+ # right. Some useful patch indices:
+ # white = 3
+ # black = 23
+ # 'reds' = 9, 10
+ # 'blues' = 2, 5, 8, 20, 22
+ # 'greens' = 6, 12, 17
+ # greyscale = 3, 7, 11, 15, 19, 23
+ all_patches = []
+ for ch in self.channels:
+ ch_patches = []
+ for cen in cen_coords:
+ # Macbeth centre is placed at top left of central 2x2 patch to
+ # account for rounding. Patch pixels are sorted by pixel
+ # brightness so spatial information is lost.
+ patch = ch[cen[1] - 7:cen[1] + 9, cen[0] - 7:cen[0] + 9].flatten()
+ patch.sort()
+ if patch[-5] == (2**self.sigbits - 1) * 2**(16 - self.sigbits):
+ saturated = True
+ ch_patches.append(patch)
+
+ all_patches.append(ch_patches)
+
+ self.patches = np.array(all_patches)
+
+ return not saturated
diff --git a/utils/tuning/libtuning/libtuning.py b/utils/tuning/libtuning/libtuning.py
new file mode 100644
index 00000000..bac57323
--- /dev/null
+++ b/utils/tuning/libtuning/libtuning.py
@@ -0,0 +1,212 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# An infrastructure for camera tuning tools
+
+import argparse
+import logging
+
+import libtuning as lt
+import libtuning.utils as utils
+
+from enum import Enum, IntEnum
+
+logger = logging.getLogger(__name__)
+
+class Color(IntEnum):
+ R = 0
+ GR = 1
+ GB = 2
+ B = 3
+
+
+class Debug(Enum):
+ Plot = 1
+
+
+# @brief What to do with the leftover pixels after dividing them into ALSC
+# sectors, when the division gradient is uniform
+# @var Float Force floating point division so all sectors divide equally
+# @var DistributeFront Divide the remainder equally (until running out,
+# obviously) into the existing sectors, starting from the front
+# @var DistributeBack Same as DistributeFront but starting from the back
+class Remainder(Enum):
+ Float = 0
+ DistributeFront = 1
+ DistributeBack = 2
+
+
+# @brief A helper class to contain a default value for a module configuration
+# parameter
+class Param(object):
+ # @var Required The value contained in this instance is irrelevant, and the
+ # value must be provided by the tuning configuration file.
+ # @var Optional If the value is not provided by the tuning configuration
+ # file, then the value contained in this instance will be used instead.
+ # @var Hardcode The value contained in this instance will be used
+ class Mode(Enum):
+ Required = 0
+ Optional = 1
+ Hardcode = 2
+
+ # @param name Name of the parameter. Shall match the name used in the
+ # configuration file for the parameter
+ # @param required Whether or not a value is required in the config
+ # parameter of get_value()
+ # @param val Default value (only relevant if mode is Optional)
+ def __init__(self, name: str, required: Mode, val=None):
+ self.name = name
+ self.__required = required
+ self.val = val
+
+ def get_value(self, config: dict):
+ if self.__required is self.Mode.Hardcode:
+ return self.val
+
+ if self.__required is self.Mode.Required and self.name not in config:
+ raise ValueError(f'Parameter {self.name} is required but not provided in the configuration')
+
+ return config[self.name] if self.required else self.val
+
+ @property
+ def required(self):
+ return self.__required is self.Mode.Required
+
+ # @brief Used by libtuning to auto-generate help information for the tuning
+ # script on the available parameters for the configuration file
+ # \todo Implement this
+ @property
+ def info(self):
+ raise NotImplementedError
+
+
+class Tuner(object):
+
+ # External functions
+
+ def __init__(self, platform_name):
+ self.name = platform_name
+ self.modules = []
+ self.parser = None
+ self.generator = None
+ self.output_order = []
+ self.config = {}
+ self.output = {}
+
+ def add(self, module):
+ if isinstance(module, list):
+ self.modules.extend(module)
+ else:
+ self.modules.append(module)
+
+ def set_input_parser(self, parser):
+ self.parser = parser
+
+ def set_output_formatter(self, output):
+ self.generator = output
+
+ def set_output_order(self, modules):
+ self.output_order = modules
+
+ # @brief Convert classes in self.output_order to the instances in self.modules
+ def _prepare_output_order(self):
+ output_order = self.output_order
+ self.output_order = []
+ for module_type in output_order:
+ modules = [module for module in self.modules if module.type == module_type.type]
+ if len(modules) > 1:
+ logger.error(f'Multiple modules found for module type "{module_type.type}"')
+ return False
+ if len(modules) < 1:
+ logger.error(f'No module found for module type "{module_type.type}"')
+ return False
+ self.output_order.append(modules[0])
+
+ return True
+
+ # \todo Validate parser and generator at Tuner construction time?
+ def _validate_settings(self):
+ if self.parser is None:
+ logger.error('Missing parser')
+ return False
+
+ if self.generator is None:
+ logger.error('Missing generator')
+ return False
+
+ if len(self.modules) == 0:
+ logger.error('No modules added')
+ return False
+
+ if len(self.output_order) != len(self.modules):
+ logger.error('Number of outputs does not match number of modules')
+ return False
+
+ return True
+
+ def _process_args(self, argv, platform_name):
+ parser = argparse.ArgumentParser(description=f'Camera Tuning for {platform_name}')
+ parser.add_argument('-i', '--input', type=str, required=True,
+ help='''Directory containing calibration images (required).
+ Images for ALSC must be named "alsc_{Color Temperature}k_1[u].dng",
+ and all other images must be named "{Color Temperature}k_{Lux Level}l.dng"''')
+ parser.add_argument('-o', '--output', type=str, required=True,
+ help='Output file (required)')
+ # It is not our duty to scan all modules to figure out their default
+ # options, so simply return an empty configuration if none is provided.
+ parser.add_argument('-c', '--config', type=str, default='',
+ help='Config file (optional)')
+ # \todo Check if we really need this or if stderr is good enough, or if
+ # we want a better logging infrastructure with log levels
+ parser.add_argument('-l', '--log', type=str, default=None,
+ help='Output log file (optional)')
+ return parser.parse_args(argv[1:])
+
+ def run(self, argv):
+ args = self._process_args(argv, self.name)
+ if args is None:
+ return -1
+
+ if not self._validate_settings():
+ return -1
+
+ if not self._prepare_output_order():
+ return -1
+
+ if len(args.config) > 0:
+ self.config, disable = self.parser.parse(args.config, self.modules)
+ else:
+ self.config = {'general': {}}
+ disable = []
+
+ # Remove disabled modules
+ for module in disable:
+ if module in self.modules:
+ self.modules.remove(module)
+
+ for module in self.modules:
+ if not module.validate_config(self.config):
+ logger.error(f'Config is invalid for module {module.type}')
+ return -1
+
+ has_lsc = any(isinstance(m, lt.modules.lsc.LSC) for m in self.modules)
+ # Only one LSC module allowed
+ has_only_lsc = has_lsc and len(self.modules) == 1
+
+ images = utils.load_images(args.input, self.config, not has_only_lsc, has_lsc)
+ if images is None or len(images) == 0:
+ logger.error(f'No images were found, or able to load')
+ return -1
+
+ # Do the tuning
+ for module in self.modules:
+ out = module.process(self.config, images, self.output)
+ if out is None:
+ logger.warning(f'Module {module.hr_name} failed to process...')
+ continue
+ self.output[module] = out
+
+ self.generator.write(args.output, self.output, self.output_order)
+
+ return 0
diff --git a/utils/tuning/libtuning/macbeth.py b/utils/tuning/libtuning/macbeth.py
new file mode 100644
index 00000000..4a2006b0
--- /dev/null
+++ b/utils/tuning/libtuning/macbeth.py
@@ -0,0 +1,537 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+# Copyright (C) 2024, Ideas on Board Oy
+#
+# Locate and extract Macbeth charts from images
+# (Copied from: ctt_macbeth_locator.py)
+
+# \todo Add debugging
+
+import cv2
+import os
+from pathlib import Path
+import numpy as np
+import warnings
+import logging
+from sklearn import cluster as cluster
+
+from .ctt_ransac import get_square_verts, get_square_centres
+from .image import Image
+
+logger = logging.getLogger(__name__)
+
+
+class MacbethError(Exception):
+ pass
+
+
+# Reshape image to fixed width without distorting returns image and scale
+# factor
+def reshape(img, width):
+ factor = width / img.shape[0]
+ return cv2.resize(img, None, fx=factor, fy=factor), factor
+
+
+# Correlation function to quantify match
+def correlate(im1, im2):
+ f1 = im1.flatten()
+ f2 = im2.flatten()
+ cor = np.corrcoef(f1, f2)
+ return cor[0][1]
+
+
+# @brief Compute coordinates of macbeth chart vertices and square centres
+# @return (max_cor, best_map_col_norm, fit_coords, success)
+#
+# Also returns an error/success message for debugging purposes. Additionally,
+# it scores the match with a confidence value.
+#
+# Brief explanation of the macbeth chart locating algorithm:
+# - Find rectangles within image
+# - Take rectangles within percentage offset of median perimeter. The
+# assumption is that these will be the macbeth squares
+# - For each potential square, find the 24 possible macbeth centre locations
+# that would produce a square in that location
+# - Find clusters of potential macbeth chart centres to find the potential
+# macbeth centres with the most votes, i.e. the most likely ones
+# - For each potential macbeth centre, use the centres of the squares that
+# voted for it to find macbeth chart corners
+# - For each set of corners, transform the possible match into normalised
+# space and correlate with a reference chart to evaluate the match
+# - Select the highest correlation as the macbeth chart match, returning the
+# correlation as the confidence score
+#
+# \todo Clean this up
+def get_macbeth_chart(img, ref_data):
+ ref, ref_w, ref_h, ref_corns = ref_data
+
+ # The code will raise and catch a MacbethError in case of a problem, trying
+ # to give some likely reasons why the problem occured, hence the try/except
+ try:
+ # Obtain image, convert to grayscale and normalise
+ src = img
+ src, factor = reshape(src, 200)
+ original = src.copy()
+ a = 125 / np.average(src)
+ src_norm = cv2.convertScaleAbs(src, alpha=a, beta=0)
+
+ # This code checks if there are seperate colour channels. In the past the
+ # macbeth locator ran on jpgs and this makes it robust to different
+ # filetypes. Note that running it on a jpg has 4x the pixels of the
+ # average bayer channel so coordinates must be doubled.
+
+ # This is best done in img_load.py in the get_patches method. The
+ # coordinates and image width, height must be divided by two if the
+ # macbeth locator has been run on a demosaicked image.
+ if len(src_norm.shape) == 3:
+ src_bw = cv2.cvtColor(src_norm, cv2.COLOR_BGR2GRAY)
+ else:
+ src_bw = src_norm
+ original_bw = src_bw.copy()
+
+ # Obtain image edges
+ sigma = 2
+ src_bw = cv2.GaussianBlur(src_bw, (0, 0), sigma)
+ t1, t2 = 50, 100
+ edges = cv2.Canny(src_bw, t1, t2)
+
+ # Dilate edges to prevent self-intersections in contours
+ k_size = 2
+ kernel = np.ones((k_size, k_size))
+ its = 1
+ edges = cv2.dilate(edges, kernel, iterations=its)
+
+ # Find contours in image
+ conts, _ = cv2.findContours(edges, cv2.RETR_TREE,
+ cv2.CHAIN_APPROX_NONE)
+ if len(conts) == 0:
+ raise MacbethError(
+ '\nWARNING: No macbeth chart found!'
+ '\nNo contours found in image\n'
+ 'Possible problems:\n'
+ '- Macbeth chart is too dark or bright\n'
+ '- Macbeth chart is occluded\n'
+ )
+
+ # Find quadrilateral contours
+ epsilon = 0.07
+ conts_per = []
+ for i in range(len(conts)):
+ per = cv2.arcLength(conts[i], True)
+ poly = cv2.approxPolyDP(conts[i], epsilon * per, True)
+ if len(poly) == 4 and cv2.isContourConvex(poly):
+ conts_per.append((poly, per))
+
+ if len(conts_per) == 0:
+ raise MacbethError(
+ '\nWARNING: No macbeth chart found!'
+ '\nNo quadrilateral contours found'
+ '\nPossible problems:\n'
+ '- Macbeth chart is too dark or bright\n'
+ '- Macbeth chart is occluded\n'
+ '- Macbeth chart is out of camera plane\n'
+ )
+
+ # Sort contours by perimeter and get perimeters within percent of median
+ conts_per = sorted(conts_per, key=lambda x: x[1])
+ med_per = conts_per[int(len(conts_per) / 2)][1]
+ side = med_per / 4
+ perc = 0.1
+ med_low, med_high = med_per * (1 - perc), med_per * (1 + perc)
+ squares = []
+ for i in conts_per:
+ if med_low <= i[1] and med_high >= i[1]:
+ squares.append(i[0])
+
+ # Obtain coordinates of nomralised macbeth and squares
+ square_verts, mac_norm = get_square_verts(0.06)
+ # For each square guess, find 24 possible macbeth chart centres
+ mac_mids = []
+ squares_raw = []
+ for i in range(len(squares)):
+ square = squares[i]
+ squares_raw.append(square)
+
+ # Convert quads to rotated rectangles. This is required as the
+ # 'squares' are usually quite irregular quadrilaterls, so
+ # performing a transform would result in exaggerated warping and
+ # inaccurate macbeth chart centre placement
+ rect = cv2.minAreaRect(square)
+ square = cv2.boxPoints(rect).astype(np.float32)
+
+ # Reorder vertices to prevent 'hourglass shape'
+ square = sorted(square, key=lambda x: x[0])
+ square_1 = sorted(square[:2], key=lambda x: x[1])
+ square_2 = sorted(square[2:], key=lambda x: -x[1])
+ square = np.array(np.concatenate((square_1, square_2)), np.float32)
+ square = np.reshape(square, (4, 2)).astype(np.float32)
+ squares[i] = square
+
+ # Find 24 possible macbeth chart centres by trasnforming normalised
+ # macbeth square vertices onto candidate square vertices found in image
+ for j in range(len(square_verts)):
+ verts = square_verts[j]
+ p_mat = cv2.getPerspectiveTransform(verts, square)
+ mac_guess = cv2.perspectiveTransform(mac_norm, p_mat)
+ mac_guess = np.round(mac_guess).astype(np.int32)
+
+ mac_mid = np.mean(mac_guess, axis=1)
+ mac_mids.append([mac_mid, (i, j)])
+
+ if len(mac_mids) == 0:
+ raise MacbethError(
+ '\nWARNING: No macbeth chart found!'
+ '\nNo possible macbeth charts found within image'
+ '\nPossible problems:\n'
+ '- Part of the macbeth chart is outside the image\n'
+ '- Quadrilaterals in image background\n'
+ )
+
+ # Reshape data
+ for i in range(len(mac_mids)):
+ mac_mids[i][0] = mac_mids[i][0][0]
+
+ # Find where midpoints cluster to identify most likely macbeth centres
+ clustering = cluster.AgglomerativeClustering(
+ n_clusters=None,
+ compute_full_tree=True,
+ distance_threshold=side * 2
+ )
+ mac_mids_list = [x[0] for x in mac_mids]
+
+ if len(mac_mids_list) == 1:
+ # Special case of only one valid centre found (probably not needed)
+ clus_list = []
+ clus_list.append([mac_mids, len(mac_mids)])
+
+ else:
+ clustering.fit(mac_mids_list)
+
+ # Create list of all clusters
+ clus_list = []
+ if clustering.n_clusters_ > 1:
+ for i in range(clustering.labels_.max() + 1):
+ indices = [j for j, x in enumerate(clustering.labels_) if x == i]
+ clus = []
+ for index in indices:
+ clus.append(mac_mids[index])
+ clus_list.append([clus, len(clus)])
+ clus_list.sort(key=lambda x: -x[1])
+
+ elif clustering.n_clusters_ == 1:
+ # Special case of only one cluster found
+ clus_list.append([mac_mids, len(mac_mids)])
+ else:
+ raise MacbethError(
+ '\nWARNING: No macebth chart found!'
+ '\nNo clusters found'
+ '\nPossible problems:\n'
+ '- NA\n'
+ )
+
+ # Keep only clusters with enough votes
+ clus_len_max = clus_list[0][1]
+ clus_tol = 0.7
+ for i in range(len(clus_list)):
+ if clus_list[i][1] < clus_len_max * clus_tol:
+ clus_list = clus_list[:i]
+ break
+ cent = np.mean(clus_list[i][0], axis=0)[0]
+ clus_list[i].append(cent)
+
+ # Get centres of each normalised square
+ reference = get_square_centres(0.06)
+
+ # For each possible macbeth chart, transform image into
+ # normalised space and find correlation with reference
+ max_cor = 0
+ best_map = None
+ best_fit = None
+ best_cen_fit = None
+ best_ref_mat = None
+
+ for clus in clus_list:
+ clus = clus[0]
+ sq_cents = []
+ ref_cents = []
+ i_list = [p[1][0] for p in clus]
+ for point in clus:
+ i, j = point[1]
+
+ # Remove any square that voted for two different points within
+ # the same cluster. This causes the same point in the image to be
+ # mapped to two different reference square centres, resulting in
+ # a very distorted perspective transform since cv2.findHomography
+ # simply minimises error.
+ # This phenomenon is not particularly likely to occur due to the
+ # enforced distance threshold in the clustering fit but it is
+ # best to keep this in just in case.
+ if i_list.count(i) == 1:
+ square = squares_raw[i]
+ sq_cent = np.mean(square, axis=0)
+ ref_cent = reference[j]
+ sq_cents.append(sq_cent)
+ ref_cents.append(ref_cent)
+
+ # At least four squares need to have voted for a centre in
+ # order for a transform to be found
+ if len(sq_cents) < 4:
+ raise MacbethError(
+ '\nWARNING: No macbeth chart found!'
+ '\nNot enough squares found'
+ '\nPossible problems:\n'
+ '- Macbeth chart is occluded\n'
+ '- Macbeth chart is too dark of bright\n'
+ )
+
+ ref_cents = np.array(ref_cents)
+ sq_cents = np.array(sq_cents)
+
+ # Find best fit transform from normalised centres to image
+ h_mat, mask = cv2.findHomography(ref_cents, sq_cents)
+ if 'None' in str(type(h_mat)):
+ raise MacbethError(
+ '\nERROR\n'
+ )
+
+ # Transform normalised corners and centres into image space
+ mac_fit = cv2.perspectiveTransform(mac_norm, h_mat)
+ mac_cen_fit = cv2.perspectiveTransform(np.array([reference]), h_mat)
+
+ # Transform located corners into reference space
+ ref_mat = cv2.getPerspectiveTransform(
+ mac_fit,
+ np.array([ref_corns])
+ )
+ map_to_ref = cv2.warpPerspective(
+ original_bw, ref_mat,
+ (ref_w, ref_h)
+ )
+
+ # Normalise brigthness
+ a = 125 / np.average(map_to_ref)
+ map_to_ref = cv2.convertScaleAbs(map_to_ref, alpha=a, beta=0)
+
+ # Find correlation with bw reference macbeth
+ cor = correlate(map_to_ref, ref)
+
+ # Keep only if best correlation
+ if cor > max_cor:
+ max_cor = cor
+ best_map = map_to_ref
+ best_fit = mac_fit
+ best_cen_fit = mac_cen_fit
+ best_ref_mat = ref_mat
+
+ # Rotate macbeth by pi and recorrelate in case macbeth chart is
+ # upside-down
+ mac_fit_inv = np.array(
+ ([[mac_fit[0][2], mac_fit[0][3],
+ mac_fit[0][0], mac_fit[0][1]]])
+ )
+ mac_cen_fit_inv = np.flip(mac_cen_fit, axis=1)
+ ref_mat = cv2.getPerspectiveTransform(
+ mac_fit_inv,
+ np.array([ref_corns])
+ )
+ map_to_ref = cv2.warpPerspective(
+ original_bw, ref_mat,
+ (ref_w, ref_h)
+ )
+ a = 125 / np.average(map_to_ref)
+ map_to_ref = cv2.convertScaleAbs(map_to_ref, alpha=a, beta=0)
+ cor = correlate(map_to_ref, ref)
+ if cor > max_cor:
+ max_cor = cor
+ best_map = map_to_ref
+ best_fit = mac_fit_inv
+ best_cen_fit = mac_cen_fit_inv
+ best_ref_mat = ref_mat
+
+ # Check best match is above threshold
+ cor_thresh = 0.6
+ if max_cor < cor_thresh:
+ raise MacbethError(
+ '\nWARNING: Correlation too low'
+ '\nPossible problems:\n'
+ '- Bad lighting conditions\n'
+ '- Macbeth chart is occluded\n'
+ '- Background is too noisy\n'
+ '- Macbeth chart is out of camera plane\n'
+ )
+
+ # Represent coloured macbeth in reference space
+ best_map_col = cv2.warpPerspective(
+ original, best_ref_mat, (ref_w, ref_h)
+ )
+ best_map_col = cv2.resize(
+ best_map_col, None, fx=4, fy=4
+ )
+ a = 125 / np.average(best_map_col)
+ best_map_col_norm = cv2.convertScaleAbs(
+ best_map_col, alpha=a, beta=0
+ )
+
+ # Rescale coordinates to original image size
+ fit_coords = (best_fit / factor, best_cen_fit / factor)
+
+ return (max_cor, best_map_col_norm, fit_coords, True)
+
+ # Catch macbeth errors and continue with code
+ except MacbethError as error:
+ # \todo: This happens so many times in a normal run, that it shadows
+ # all the relevant output
+ # logger.warning(error)
+ return (0, None, None, False)
+
+
+def find_macbeth(img, mac_config):
+ small_chart = mac_config['small']
+ show = mac_config['show']
+
+ # Catch the warnings
+ warnings.simplefilter("ignore")
+ warnings.warn("runtime", RuntimeWarning)
+
+ # Reference macbeth chart is created that will be correlated with the
+ # located macbeth chart guess to produce a confidence value for the match.
+ script_dir = Path(os.path.realpath(os.path.dirname(__file__)))
+ macbeth_ref_path = script_dir.joinpath('macbeth_ref.pgm')
+ ref = cv2.imread(str(macbeth_ref_path), flags=cv2.IMREAD_GRAYSCALE)
+ ref_w = 120
+ ref_h = 80
+ rc1 = (0, 0)
+ rc2 = (0, ref_h)
+ rc3 = (ref_w, ref_h)
+ rc4 = (ref_w, 0)
+ ref_corns = np.array((rc1, rc2, rc3, rc4), np.float32)
+ ref_data = (ref, ref_w, ref_h, ref_corns)
+
+ # Locate macbeth chart
+ cor, mac, coords, ret = get_macbeth_chart(img, ref_data)
+
+ # Following bits of code try to fix common problems with simple techniques.
+ # If now or at any point the best correlation is of above 0.75, then
+ # nothing more is tried as this is a high enough confidence to ensure
+ # reliable macbeth square centre placement.
+
+ # Keep a list that will include this and any brightened up versions of
+ # the image for reuse.
+ all_images = [img]
+
+ for brightness in [2, 4]:
+ if cor >= 0.75:
+ break
+ img_br = cv2.convertScaleAbs(img, alpha=brightness, beta=0)
+ all_images.append(img_br)
+ cor_b, mac_b, coords_b, ret_b = get_macbeth_chart(img_br, ref_data)
+ if cor_b > cor:
+ cor, mac, coords, ret = cor_b, mac_b, coords_b, ret_b
+
+ # In case macbeth chart is too small, take a selection of the image and
+ # attempt to locate macbeth chart within that. The scale increment is
+ # root 2
+
+ # These variables will be used to transform the found coordinates at
+ # smaller scales back into the original. If ii is still -1 after this
+ # section that means it was not successful
+ ii = -1
+ w_best = 0
+ h_best = 0
+ d_best = 100
+
+ # d_best records the scale of the best match. Macbeth charts are only looked
+ # for at one scale increment smaller than the current best match in order to avoid
+ # unecessarily searching for macbeth charts at small scales.
+ # If a macbeth chart ha already been found then set d_best to 0
+ if cor != 0:
+ d_best = 0
+
+ for index, pair in enumerate([{'sel': 2 / 3, 'inc': 1 / 6},
+ {'sel': 1 / 2, 'inc': 1 / 8},
+ {'sel': 1 / 3, 'inc': 1 / 12},
+ {'sel': 1 / 4, 'inc': 1 / 16}]):
+ if cor >= 0.75:
+ break
+
+ # Check if we need to check macbeth charts at even smaller scales. This
+ # slows the code down significantly and has therefore been omitted by
+ # default, however it is not unusably slow so might be useful if the
+ # macbeth chart is too small to be picked up to by the current
+ # subselections. Use this for macbeth charts with side lengths around
+ # 1/5 image dimensions (and smaller...?) it is, however, recommended
+ # that macbeth charts take up as large as possible a proportion of the
+ # image.
+ if index >= 2 and (not small_chart or d_best <= index - 1):
+ break
+
+ w, h = list(img.shape[:2])
+ # Set dimensions of the subselection and the step along each axis
+ # between selections
+ w_sel = int(w * pair['sel'])
+ h_sel = int(h * pair['sel'])
+ w_inc = int(w * pair['inc'])
+ h_inc = int(h * pair['inc'])
+
+ loop = int(((1 - pair['sel']) / pair['inc']) + 1)
+ # For each subselection, look for a macbeth chart
+ for img_br in all_images:
+ for i in range(loop):
+ for j in range(loop):
+ w_s, h_s = i * w_inc, j * h_inc
+ img_sel = img_br[w_s:w_s + w_sel, h_s:h_s + h_sel]
+ cor_ij, mac_ij, coords_ij, ret_ij = get_macbeth_chart(img_sel, ref_data)
+
+ # If the correlation is better than the best then record the
+ # scale and current subselection at which macbeth chart was
+ # found. Also record the coordinates, macbeth chart and message.
+ if cor_ij > cor:
+ cor = cor_ij
+ mac, coords, ret = mac_ij, coords_ij, ret_ij
+ ii, jj = i, j
+ w_best, h_best = w_inc, h_inc
+ d_best = index + 1
+
+ # Transform coordinates from subselection to original image
+ if ii != -1:
+ for a in range(len(coords)):
+ for b in range(len(coords[a][0])):
+ coords[a][0][b][1] += ii * w_best
+ coords[a][0][b][0] += jj * h_best
+
+ if not ret:
+ return None
+
+ coords_fit = coords
+ if cor < 0.75:
+ logger.warning(f'Low confidence {cor:.3f} for macbeth chart')
+
+ if show:
+ draw_macbeth_results(img, coords_fit)
+
+ return coords_fit
+
+
+def locate_macbeth(image: Image, config: dict):
+ # Find macbeth centres
+ av_chan = (np.mean(np.array(image.channels), axis=0) / (2**16))
+ av_val = np.mean(av_chan)
+ if av_val < image.blacklevel_16 / (2**16) + 1 / 64:
+ logger.warning(f'Image {image.path.name} too dark')
+ return None
+
+ macbeth = find_macbeth(av_chan, config['general']['macbeth'])
+
+ if macbeth is None:
+ logger.warning(f'No macbeth chart found in {image.path.name}')
+ return None
+
+ mac_cen_coords = macbeth[1]
+ if not image.get_patches(mac_cen_coords):
+ logger.warning(f'Macbeth patches have saturated in {image.path.name}')
+ return None
+
+ image.macbeth = macbeth
+
+ return macbeth
diff --git a/utils/tuning/libtuning/macbeth_ref.pgm b/utils/tuning/libtuning/macbeth_ref.pgm
new file mode 100644
index 00000000..089ea91f
--- /dev/null
+++ b/utils/tuning/libtuning/macbeth_ref.pgm
@@ -0,0 +1,6 @@
+P5
+# SPDX-License-Identifier: BSD-2-Clause
+# Reference macbeth chart
+120 80
+255
+  !#!" #!"&&$#$#'"%&#+2///..../.........-()))))))))))))))))))(((-,*)'(&)#($%(%"###""!%""&"&&!$" #!$ !"! $&**" !#5.,%+,-5"0<HBAA54" %##((()*+,---.........+*)))))))))))))))-.,,--+))('((''('%'%##"!""!"!""""#!   ! %‚/vÀ¯z:òøßãLñ©û¶ÑÔcÒ,!#""%%''')**+)-../..../.-*)))))))))))))**,,)**'(''&'((&&%%##$! !!!! ! !  !  5*"-)&7(1.75Rnge`\`$ ""!"%%%'')())++--/---,-..,-.,++**))))())*)*)''%'%&%&'&%%"""""        !  !!$&$$&##(+*,,/10122126545./66402006486869650*.1.***)*+)()&((('('##)('&%%&%$$$#$%$%$ (((*))('((('('(&%V0;>>;@@>@AAAACBCB=&<­·³µ¶¾¿ÃÇÇÆÇËÒÐÇÄ<5x–•ŠŽŒŠ‰„„„„|64RYVTSRRRMMNLKJJLH+&0gijgdeffmmnpnkji`#3™ ª¦¨¨£Ÿ›››š–—™šbY! 3FHHIIIHIJIIJHIII@#?¾ÈÊÍÏÑÔÖØÚÚÚÛßáßÔ=7}—š˜———˜—˜˜——˜——‘:5Wcbcbdcb`^^`^^_^Y,'6‰ŽŒ‰ˆˆˆ‡†…„††„‚r'<½ÆÅÅÅÄÂÀ¿¾¾¼»¼¼µl%2FHHIIHJJJJJJIIJI?%;ÁÌÌÒÓÖØÙÛÛÜÜÞßâãÕ>7|•™™ž™—˜˜˜—™™™š˜–;8Xfeeegeccb`^aba]Z+)<Ž“’‘‹Š‰‰‰‰ˆ†r)>¿ÇÇÇÆÅÅÄÂÁÁÀ¾¾¼·q#3GHIIIIJIIJJIHIJI@&5ÁÎÑÔÕØÙÚÜÜÞßßßàâ×=8~”•˜™š›šš™›šœ››“;8Zgghggedbdcbda^\Z+(;““’‘‘Ž‹‹ŠŠ‰ˆy)9¿ÈÈÈÇÇÅÄÂÁÁÀ¿½½¹z"3GIIJJJJJKJJJJJJJ@'4ÂÑÔÔÙÚÛÜÞÝßßààààØ>9|”—–—™ššš™›œŸ¥ ž˜=8Zhighgeeeedeca__[/)B’–•••“‘ŽŒŒŒŒŠv&:ÁÊÊÊÊÆÆÆÂÁÂÂÁ¿¿º|#3GJJIIJKKKJJJKKJK@&6ÆÒ××ÙÛÛÞÞßààààààÖ>9~”———˜˜—™šžž    ˜<8Yghegggffihccab^\/*C“™˜—––””’‘‘Žz'9ÄÍËÈÈÇÇÆÆÄÂÂÀÀ¿»‚$  6IKJJMMMKMKKMKKMLC&2É××ÙÛÜßÞàááâââââÖ@9•——˜˜™˜˜š››žŸžž—<9Yghhhhijiegdcebc^0)G—›š™˜˜˜–•“’‘Ž(7ÃÍÌËÊÈÇÇÅÆÄÂÂÂÁº‰% 6JLMMNMMKMMNMMMMMD&2ÊÙÙÛÝßßßààáââáãâÖ@:~”—™™š™™››žžžžž—=9Xfghhjiigdgddedc`1)M—œ›š˜™—•”‘’‘Ž}(:ÄÐÍÌËÊÇÆÆÆÅÂÄÁ¾& "8LNOONNOMONNMMNOND'3ÍÛÛÞßàààáââãâåãå×@;–˜˜™žŸŸ  ¡¡  —=:Ziiigheegegegggdc1,Q›ŸŸž›šš˜––““‘~)8ÂÍÎÌËÊÊÈÆÆÆÆÄÆÇÁ•%# "9NNNPPPQOOOOONNOOD'0ÎÜÜßßáàáââååäãåæ×?;–˜—™šœžŸ¡¡ ¡Ÿ  ™=;[iigeeegghgdedgea0-P› ¡ žš˜—–•”(8ÃÏÎÎÌÊÈÈÇÇÇÆÈÇÆÃ' "#$:NNOQPPRPQPOOPQPPD*1ÐßßàààâãããåææåææÛA;‚˜™™šœžžŸ  Ÿž Ÿ—;:Yfghgghgghghhdggc3.\¡£¡  Ÿœœš˜—•’‘~);ÅÎÎÑÐÌËÊÇÈÉÊÊÇŤ(&%%;OQQQRSSRPQQQQSQQF)3ÓßàááãâãåææææææçÜB<ƒ™šœœžžžžŸ žŸ Ÿž—=:Wfhghhhihggghfhee4/f ¥¤¢¡¡ŸŸš˜—””’‘‚*:ÇÏÍÍÎÎÍÌÉÈËÊÈÆÆÃ¤&%%%?RSSSSSTTTTSSSTTRE)5ÕàááãâäåæåæçççèèÛB=„šœœžŸ Ÿ ¡ žŸŸŸ˜@:Ygiihhiiiihihiiif72p £¤¤£ ŸŸœœ™—–•’‘}(9ÇÎÏÎÍÍÍÍÍËÌÊÈÈÇÆ©'#%&?TUTTTUUQSTTTTTVSF*3ÕàãâãäåæææçççèééßF>†žž  ¡¡£ £¡¡¡ Ÿ˜A;[ghjiihiiiihihije50r¢¦¥¥££ Ÿžœš™—–““‚)6ÈÏÏÎÌÎÎÌÏÏËÊÊÈÈÆ«& &#%?SVVVUUUUUTUUVVUUG*5ÖãããåæææçèèèèééëßF=…ŸŸ¢££££ ¡¡  £ ˜A;Yhijiiijjiiiiijje81t¦¦¦¥¥£¡ Ÿ›˜——•’~)5ÇÑÑÏÎËÍÍÑÑÌËÈÈÉÆ°' '$$=OQRRQQPRSRSSSSSSG+6ËÙÙÜÛÜÞÝßààààáããÙD@‚š›œŸœžœ›š”?;Wefgggggfffgeeefc41xŸžŸž››š˜•”’‘ŽŒ{*5¾ÈÈÇÅÃÃÄÄÃÂÂÂÀ¿¼«( &&&'++++,,*-,-00-0100*-SUX\]]`_ffgiooopo=;X\bedbadbca`]\]ZZ;;<::8:;9983433110/-,...1//12410/..--+)"",---,-./,,.-/-0-( &&%+/0103322011223233)(34534767::;;==:=B9;BFGEEGIKJKIJGIJCD=<:76566554111/0/1.*+00233300/00//..,+*#")(*)++,++))*++**'!!&$*w³½¾¿Â¼ÀÀ¼¼·¹¹¸´²Ž1-_addc`ceccdccedbb?A|ŒŒ‘‹ŒŒ‹ŠŠ‰‰ˆB>=>?@@?====;<:;:<:11r‹ŒŽ“–““•–˜™Ž+.’—”™ ¥¢¡¤žšŸŸœ( !'%*zÀÇÆÆÇÇÊÊÈÈÈÊËËËÉ 42gjmllklomooonpopmHG‘©¬«««¬©«««ª««ª©£D>AEDEFEECEECCCDDEC46µåçèçççæåäãáàÞÜÚ׿0:Î×Ö×××ÖÕÒÓÏÐÐÍÍѾ,!!&&,|ÂÇÇÇÇÇÇËËÇÈÊËËÍÊ¡61inknnoopoppoqqrqoEE”¬­­­®®®­®®¯­®®­¥FACGFFFFFFDFDDDDDDC57¹íñïîîíííëéçæãáßÝÄ09ÓÛÛÛÛÚÙØ×ÖÕÔÔÒÔÒÁ+!"%%-~ÀÆÈÊÇÇÈÉÌÌÊÊËÌÌÊ¡42inopppppoqqqrrsrnAB“«®®­®®®®®±­®¬°­¥C?DGGGGFFFFDFFDDEDC48ºíððïïîîíìëèçæãáßÅ1;ÔÞÞÝÜÚÚÙÙ×ÕÕÔÕÔÒÁ+!!"#*|¿ÄÉÊÈÈÈÈÉÍÉÈËÍÍÊ¡62imoppppqqqqrtrqtrGD•¬®®­°®°°°±±°®®­§H?CGGGGGGGGFFFFFFDB38»îðïïïïîíììëèçæâàÅ1<ÖààßÞÞÜÚÚÙÙÙ××ÔÔ½, !)}¿ÃÈÈÊÇÈÈËÎËÊËÌÍË¢63mooppqqqqqqrrtvtoDH—­­®±®°±°­°®­±°°¦JACHHGGHGGFFFDDGGFD29ÀðóòðïïïîííìêéèæâÆ3>ÖááààßÞÜÛÙÙÙØ×Ø×½, $){¼ÂÅÆÉÇÈÆËËÌÊËÊÍË¢53jpppqprqrrrttuvuo>H˜®°®±²±±°°°±°±°°ªJAFHHHHHGGHGGFGGFFE28ÁðôòòððïïîíëìëéçãÇ3:×ãáááßÞÝÛÛÚÙÚÚÚÚ½- "*{¸ÁÁÅÆÇÆÆÊËÌÉÊËÎÌ£53loqpqsqrrrtrutsvrAH—«®®±±°°°®±±±®­°©HCGHIHHHHHHGFGHGGGD5;ÀðóóòñððïîííìëëèäÇ28ØäãááààßÞÜÛÛÛÚÚÚÀ, +}¹¾ÀÂÂÅÅÅÇÉÍËÊËÌÊ¡52mqoqpqrttttttuurpFI–®°±°±±²°±±°±±¯°§OCEHHIHHHHGHGGFFIGF8<ÃðòòóóòððïíîìììéæÍ48ÚçåããáààßÝÜÜÜÜÛÛ¿, (|º¼¾ÀÀÃÄÄÆÇÍËÊÊËÊ¢41krqpqqqrrtrtuvtuoEH—­°°²±±±±¯²²®²±®«PBHHIIIHIIHIHGHGHHE7<ÃðóóòñððððïíííìêçÑ58ÜèæåãââáßßÜÞÜÞÞÚÄ* (zºº»¾ÀÂÂÂÄÄÇËÈÊËÊ¡63kpqprqqstttutrvvoFO˜¯°¯°±±°±±±±±°±²©LEHHIIHIHHHIGHGIHGF4=ÅñóóóððððïïîìíëéèÓ5<ÞêèçåââááßÞÞßßßÚÇ* 'zº½º»¾ÁÀÂÂÄÅÊÇÈÊÈ¡62lppqrqrrrtttuttvpAG›¯°±°±°°°°°±±°±±«MGHIIIIHIIIHHIIJHHG4<ÃñóóóðòððîîïííëéèÓ4<ÞëêççæãâáàßàÞÞÛØÇ+ !){º¼º»¾½ÀÁÁÂÄÉÇÇÉÈ 62jopqqqqqrtttutttrEH™¯±°°°¯°°±±²²°±±ªOHFIIIIIJIIIIHIHIHI7>ÅðôóòòòïðïîîíììëèÒ5;àíêèææãâáâßßÜÛÙÖÇ, !)z¼¾¼¹»»ÁÁ¿ÁÁÈÇÆÆÆŸ53lppqqrqrtttuuuutsFI™®±²±±±±²²²±°¯±²«RHGJIJHJKJJJIIIIIIH9>ÂñôôôòóððïîííìëééÓ5;àìééèææäááßÜÛØ×ÔÇ+  !({»¿¸º½½¿¾¿¾ÀÅÆÄÆÅœ41joppprqrrrutttvvrIH’­±°°°±±±²²°±²±±ªTHCJJJJJIJIJJIJJJIH7=ÂòôòóóñðïîîííììéèÒ5;ßìêêèæåäâàÞÛÙÖ×ÕÇ+ (u±±®¯±³µ²´´µº»¸»º‘65gjlmmmnoopnpprpqoIH¦©ª©«ªªª«¬«ªª¨ª¤OIBIJJJIJJJJIIIHHHG89ºåççæçåäããâáßàßÝÜÈ29ÔàßÝÛÛÙØÕÓÑÎÌÈÌʾ' "&,-*)-01/,0/12102-+04448789<>>??AFAD@DBCIJNRWTSUXT[WUQUOKFEBBABA?>>=<<;;67942:<<<>9999864565363&(13335422./1/-+..+ !"&$$""$"&$%'()(''*+-0124688:<>>??A>?EBCHKOLJLNOSQOXQQVMLACGHGHIGFHGDCCBB@??7432233210111.,++,++%(++)*(''%%%$$#%&$# ")0/001120024455520+-U]`addcdhefeekecYGFJRXYYVWWZWVXXVZTOBF}™œšœžœ›š™–™K7Ybccddfeg`^]^]\[Z[*)OTTPPQPOKOLLJJLIK  !1;:9:<<===;=???A@9*/„Ž‘’”•”––—™™š››’FJmxyxwyzzzxyzzz{zxLOÉÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿú]=‹§©¨¦§¥¦¤¤¢¡¡¡ ›Ž.-‹’’Œ‰‡…‚€€€y# !!2><=;==>=<<>@@@@A9-0‡‘‘”—˜˜™—š›žŸ —IKnz||{|{||{}}~}}{zLOÌÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿý]>ެ­¬««¨ª¨§¦¥¥££¡¡–..Œ––”“Ž‹‹‰‡…………„~% $2==;<>>?===>@A@AB;+1…Ž‘“•—™™˜˜™œžŸŸ—JJo{|y{||}{||}}}}}yMTÎÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿý_>ެ­¬«ª©©¦¦¦¤¤££¢ ”-.–”‘‘ŽŠŠ‰…„…„…„}# %2<=;=<@?>==>?A@AA9+3…Ž‘“–——˜™šœšœœžž•FMlz{{y|}}}}||}|}}{MTÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýd>«¬«ªªª¨§¦¦¤¤¤¡  ”-,Œ“‘’Ї†……„„„…# %1<<<;==<<=>?A?@AA:,3†Ž‘’•——˜˜šœšœœž–INo{{y{||||}|}}|~}{RTÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýd=Š©­¬«©©§¦¦¥¤¤£¡ Ÿ—/-‹’‘‹‹‰ˆ…………ƒƒ„}#!$0<<<=<<==>A@@>@AA:-2†‘“’–——™™šš™œ›œ—HInzz{{||{{}~~}}|}zMRÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýd=‰ª«ªªª§¥¥¥£¤¡¡  ”++ˆŽŽ‹ŠŠ‰………„„„ƒ„~# "$/;<==>;===@@@@>AA:+2†Ž’’“•—–™˜šœ™œ–KHn||y|||||{}~}|}|xMSÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýd=†©©ª©©§¦¦¥¤£¡Ÿ žœ’+,‡‹Š‰ˆ††…„„„„ƒ}# ! "/:<=>@<<>=@@@@@AA;-3„’’•–˜˜š™šššœ›˜MFs||{{{y}z}}|}|}}yMWÏÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýc>„©ª©§¦¦¥¤£¡£  ŸŸ›’,)…ŒŠ‹‰ˆ‡†…„„„ƒƒƒ|! !1;>?>><<>@>>=>ABB;,0ƒŽ‘’––™™™™ššœœ›˜LHr{|{|}|y|}}}}}zNXÎÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýc?„©«ª§§¦¥¥££   žžšŽ()„‹ŠŠ‰ˆ…†„„„„„‚ƒƒz# $/;;<=;<>>=>>>@@BB:,1†‘“•–—˜™šœšœšž˜IInyz||||||{||}{~|{NVÏÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýc;§¨¨¦¦¦¤££¡¡ŸŸœš“('ƒŠŠˆ‰ˆ……ƒ„ƒƒƒƒ‚€}# $0:<==<;>@>>>>@ABB:,/„‘““–˜™™™šœšš—HLlx|}y{y{|y{|}}}}yMRÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýd>~¥§¦¦§¥¤££¢ ŸžššŽ*(ƒŠ‡‡ˆˆ„ƒ‚„ƒƒ‚‚‚y" !&3:;<<;==@@=>AABBA;-3†‘“‘”–—˜˜™šœœš›–KLqz{|||y{}|}{}|~{zRQÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüc9w¤§¦¥¦¤¢£  Ÿžžš™Ž)'ˆ†……„…„ƒƒƒ‚€€€€y" !%1<<;=>===<=@@ABBC<.5†’’•–—™˜™™œœž•IIlz{|}~~~|}{||~}}zMUÌÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüd;p¤¦¥¥¤¤£¤¢ Ÿž›š˜)$€ˆˆ…„„„…‚‚€€€x" $2===<==@=<>=ABBBC?/0ˆ‘’•••˜—˜™™š™œž˜IGkz}}{||}{||y||}zyOVÊÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüc7o¢¥¥¤££¡¤¡žŸš™š˜‘'&~‡„„„„ƒ…„€~€z"#"#/;<:<<?>;===@?AAA>07‹‹Ž’’’”“•–—•‘GGgwxz{yyxyzzyz{yuuHO½ùûüüüüüüûûûúúúúò\8v›žœ›š™˜—•••”‘†'$w~~}|||{~|{zxxxxv!"""'*+(+)*))()+,,.../0398;=<=>DCCDDCBBDHBCJMMLMPNPOJPKPSJDICCNMPONMNNOKHIFDBHE3/46433323.....*+,)( !##!!!!!$#$$#$#&"!!"(+**,,*+.//1478:<:33ACDFGGIIHIJLPKNMQFIPTTRVXVUXUUTXUSTNEGGFDEFAA>==;94877520-,))*(((('&$#!!" &%'FQPQR]dqŒ˜£«¹ÍàðÈ=FñûüÿÿÿÿÿÿÿÿÿÿÿÿÿúQN·èììêìæéììêéëëéêáLEœ˜…znki^[YTPUOS;.%-/12322221/10//,/%#0¯ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿß@QýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿQMÁðôõôóôóôõõôõôôóæKE„¨©¨§§¤¥¥¢¤£ žžž˜H01NNQOQQOOMNNLKLJGB'&/¸ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿâAWþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿOLÀñóôôôóóóõôôõóôòèKE„¦¨©©§ª©ª¦¨§¥¢¤¢œF-,PQQPQPPQPOONMNNKE''0·ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáCZþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿRMÁñôóóòòôòôõóôôóòåJE‚¥©¬¬©ª©ª§¥¥¤¤¤¢™F,*NSQPPQOOOOMNNMKID('2·ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáD[þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿQKÀðòòòóóòóõóõóòòðæIF€§©ª©§©§©¥¤¤¤¤¡ ˜F,*NPPPPPPNOONMMMJIF!'(2¶ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáF]þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿRL¿íððòðòòóóòòñïòðäHD£¦©©§¨¦¦¦¤¤¤¤¢ ˜F+%MPPPPOOONONNMMKID)*4¸ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáD^þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿPL¿ìðïïòòððòòðïðòïäIC€¢¦¨¨¥¦¥§¥¤££¡ŸŸ—F+&NPOOOPPOONMMKMKHD**6ºÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáD_þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿQJ¾ëïïïòðððððïðïîïãFC~¢§¥¥¦¦¦¤££¤¡   ˜F,'MPOOOOONONNKKIIIG,+7»ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáD^ýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿQI¾êîîîïðïðïððïïïîâEB|£¥¤££¤¤¤£¤¢Ÿ ŸŸ—E+&MONOOONNNNKMJKJHH,-8¹ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿàD]þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿPI¼éíìîîîîñðòóóöù÷èHE¥¨§¥¥¤¤£¡£¡  žŸ—C,#LOOOONONNNKKKMKJF,*6»ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáCaýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþMH»éììíîðððôóõöööõçIF‚©ª§¦¦¥££¢ Ÿžž Ÿ–D*%KONOMNMMKMKJJJIJE,,6»ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿâB^ýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþMG¹èììîðòóòóóóóòóôéHB}£©¦¦§¥¤¤¢ŸŸžžš”D+&LONOOONNMMMMKLKIA,,6ºÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿàA\ýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþMF¹éìííïòóôððôöõööêIE¦ª©¦§¨§§¡¡Ÿš™”E+&LNNMONNMMKKKKKIHF --6¹ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿßA[üÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþKF¶çìðïððïðóöõöõùúîJC©«­«¦§¦¥¤¡¤žžš—F*&LMONMNMNKKJMKJJIF **5»ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿß>WüÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþKE¶èïíðîðóöõøòùóöôçF?}¨©²¯¬¬©¥¤¤£žœ˜˜‘C*%KONNNJKKKMKJKJKID,*4¶ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿØ<WöþÿÿÿÿÿÿÿÿÿÿÿÿÿÿøMA°áäååçêêïêëëåæéçÝGCxž¨¦ ©¥¤ šœ¡˜•’ŠB)%HKLKKJJJKIHIHHFGC!()*q ¡š›šš™““’‘‘’‹‹o39v|}wwwwwwrqtuspn=9^gadcfgce`dbUY[\^>;DIJDB?FEGE=7>8634.(&&(%&*&%%'+*)+*#%()''03364443233222243/-+133423333423766645789:><<<;<;<?=?;<<:78673/001113--.-+*)&&#"&$#%&""$!! ))+rbPpAD9-*******+*++)++--.//./.0/21453469:=;98<;<>=;><7766666741012.-13/-+-/(''&&&%%&$.%0()-%-#-#' #&(% )))hn›YQgÛ7(*))))*)**,--....../0/0001357666::;;>?>AA866666666656565300/20/.-*)(('((&&%)d=yoP¼<Ñ?ßFQFx;§2»1«0))*RQ.0*,,5*(*))))*,**,+/.../...02/22224456468;:>BB;>;:76666666666755303033/,.-*(())('&')#)"##(+$+*#)) & 
diff --git a/utils/tuning/libtuning/modules/__init__.py b/utils/tuning/libtuning/modules/__init__.py
new file mode 100644
index 00000000..9ccabb0e
--- /dev/null
+++ b/utils/tuning/libtuning/modules/__init__.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
diff --git a/utils/tuning/libtuning/modules/agc/__init__.py b/utils/tuning/libtuning/modules/agc/__init__.py
new file mode 100644
index 00000000..4db9ca37
--- /dev/null
+++ b/utils/tuning/libtuning/modules/agc/__init__.py
@@ -0,0 +1,6 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2024, Paul Elder <paul.elder@ideasonboard.com>
+
+from libtuning.modules.agc.agc import AGC
+from libtuning.modules.agc.rkisp1 import AGCRkISP1
diff --git a/utils/tuning/libtuning/modules/agc/agc.py b/utils/tuning/libtuning/modules/agc/agc.py
new file mode 100644
index 00000000..9c8899ba
--- /dev/null
+++ b/utils/tuning/libtuning/modules/agc/agc.py
@@ -0,0 +1,21 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+# Copyright (C) 2024, Paul Elder <paul.elder@ideasonboard.com>
+
+from ..module import Module
+
+import libtuning as lt
+
+
+class AGC(Module):
+ type = 'agc'
+ hr_name = 'AGC (Base)'
+ out_name = 'GenericAGC'
+
+ # \todo Add sector shapes and stuff just like lsc
+ def __init__(self, *,
+ debug: list):
+ super().__init__()
+
+ self.debug = debug
diff --git a/utils/tuning/libtuning/modules/agc/rkisp1.py b/utils/tuning/libtuning/modules/agc/rkisp1.py
new file mode 100644
index 00000000..2dad3a09
--- /dev/null
+++ b/utils/tuning/libtuning/modules/agc/rkisp1.py
@@ -0,0 +1,79 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+# Copyright (C) 2024, Paul Elder <paul.elder@ideasonboard.com>
+#
+# rkisp1.py - AGC module for tuning rkisp1
+
+from .agc import AGC
+
+import libtuning as lt
+
+
+class AGCRkISP1(AGC):
+ hr_name = 'AGC (RkISP1)'
+ out_name = 'Agc'
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+
+ # We don't actually need anything from the config file
+ def validate_config(self, config: dict) -> bool:
+ return True
+
+ def _generate_metering_modes(self) -> dict:
+ centre_weighted = [
+ 0, 0, 0, 0, 0,
+ 0, 6, 8, 6, 0,
+ 0, 8, 16, 8, 0,
+ 0, 6, 8, 6, 0,
+ 0, 0, 0, 0, 0
+ ]
+
+ spot = [
+ 0, 0, 0, 0, 0,
+ 0, 2, 4, 2, 0,
+ 0, 4, 16, 4, 0,
+ 0, 2, 4, 2, 0,
+ 0, 0, 0, 0, 0
+ ]
+
+ matrix = [1 for i in range(0, 25)]
+
+ return {
+ 'MeteringCentreWeighted': centre_weighted,
+ 'MeteringSpot': spot,
+ 'MeteringMatrix': matrix
+ }
+
+ def _generate_exposure_modes(self) -> dict:
+ normal = {'exposureTime': [100, 10000, 30000, 60000, 120000],
+ 'gain': [2.0, 4.0, 6.0, 6.0, 6.0]}
+ short = {'exposureTime': [100, 5000, 10000, 20000, 120000],
+ 'gain': [2.0, 4.0, 6.0, 6.0, 6.0]}
+
+ return {'ExposureNormal': normal, 'ExposureShort': short}
+
+ def _generate_constraint_modes(self) -> dict:
+ normal = {'lower': {'qLo': 0.98, 'qHi': 1.0, 'yTarget': 0.5}}
+ highlight = {
+ 'lower': {'qLo': 0.98, 'qHi': 1.0, 'yTarget': 0.5},
+ 'upper': {'qLo': 0.98, 'qHi': 1.0, 'yTarget': 0.8}
+ }
+
+ return {'ConstraintNormal': normal, 'ConstraintHighlight': highlight}
+
+ def _generate_y_target(self) -> list:
+ return 0.5
+
+ def process(self, config: dict, images: list, outputs: dict) -> dict:
+ output = {}
+
+ output['AeMeteringMode'] = self._generate_metering_modes()
+ output['AeExposureMode'] = self._generate_exposure_modes()
+ output['AeConstraintMode'] = self._generate_constraint_modes()
+ output['relativeLuminanceTarget'] = self._generate_y_target()
+
+ # \todo Debug functionality
+
+ return output
diff --git a/utils/tuning/libtuning/modules/awb/__init__.py b/utils/tuning/libtuning/modules/awb/__init__.py
new file mode 100644
index 00000000..2d67f10c
--- /dev/null
+++ b/utils/tuning/libtuning/modules/awb/__init__.py
@@ -0,0 +1,6 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2024, Ideas On Board
+
+from libtuning.modules.awb.awb import AWB
+from libtuning.modules.awb.rkisp1 import AWBRkISP1
diff --git a/utils/tuning/libtuning/modules/awb/awb.py b/utils/tuning/libtuning/modules/awb/awb.py
new file mode 100644
index 00000000..c154cf3b
--- /dev/null
+++ b/utils/tuning/libtuning/modules/awb/awb.py
@@ -0,0 +1,36 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2024, Ideas On Board
+
+import logging
+
+from ..module import Module
+
+from libtuning.ctt_awb import awb
+import numpy as np
+
+logger = logging.getLogger(__name__)
+
+
+class AWB(Module):
+ type = 'awb'
+ hr_name = 'AWB (Base)'
+ out_name = 'GenericAWB'
+
+ def __init__(self, *, debug: list):
+ super().__init__()
+
+ self.debug = debug
+
+ def do_calculation(self, images):
+ logger.info('Starting AWB calculation')
+
+ imgs = [img for img in images if img.macbeth is not None]
+
+ gains, _, _ = awb(imgs, None, None, False)
+ gains = np.reshape(gains, (-1, 3))
+
+ return [{
+ 'ct': int(v[0]),
+ 'gains': [float(1.0 / v[1]), float(1.0 / v[2])]
+ } for v in gains]
diff --git a/utils/tuning/libtuning/modules/awb/rkisp1.py b/utils/tuning/libtuning/modules/awb/rkisp1.py
new file mode 100644
index 00000000..0c95843b
--- /dev/null
+++ b/utils/tuning/libtuning/modules/awb/rkisp1.py
@@ -0,0 +1,27 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2024, Ideas On Board
+#
+# AWB module for tuning rkisp1
+
+from .awb import AWB
+
+import libtuning as lt
+
+
+class AWBRkISP1(AWB):
+ hr_name = 'AWB (RkISP1)'
+ out_name = 'Awb'
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+
+ def validate_config(self, config: dict) -> bool:
+ return True
+
+ def process(self, config: dict, images: list, outputs: dict) -> dict:
+ output = {}
+
+ output['colourGains'] = self.do_calculation(images)
+
+ return output
diff --git a/utils/tuning/libtuning/modules/ccm/__init__.py b/utils/tuning/libtuning/modules/ccm/__init__.py
new file mode 100644
index 00000000..322602af
--- /dev/null
+++ b/utils/tuning/libtuning/modules/ccm/__init__.py
@@ -0,0 +1,6 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2024, Paul Elder <paul.elder@ideasonboard.com>
+
+from libtuning.modules.ccm.ccm import CCM
+from libtuning.modules.ccm.rkisp1 import CCMRkISP1
diff --git a/utils/tuning/libtuning/modules/ccm/ccm.py b/utils/tuning/libtuning/modules/ccm/ccm.py
new file mode 100644
index 00000000..18702f8d
--- /dev/null
+++ b/utils/tuning/libtuning/modules/ccm/ccm.py
@@ -0,0 +1,41 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2024, Paul Elder <paul.elder@ideasonboard.com>
+# Copyright (C) 2024, Ideas on Board
+#
+# Base Ccm tuning module
+
+from ..module import Module
+
+from libtuning.ctt_ccm import ccm
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class CCM(Module):
+ type = 'ccm'
+ hr_name = 'CCM (Base)'
+ out_name = 'GenericCCM'
+
+ def __init__(self, debug: list):
+ super().__init__()
+
+ self.debug = debug
+
+ def do_calibration(self, images):
+ logger.info('Starting CCM calibration')
+
+ imgs = [img for img in images if img.macbeth is not None]
+
+ # todo: Take LSC calibration results into account.
+ cal_cr_list = None
+ cal_cb_list = None
+
+ try:
+ ccms = ccm(imgs, cal_cr_list, cal_cb_list)
+ except ArithmeticError:
+ logger.error('CCM calibration failed')
+ return None
+
+ return ccms
diff --git a/utils/tuning/libtuning/modules/ccm/rkisp1.py b/utils/tuning/libtuning/modules/ccm/rkisp1.py
new file mode 100644
index 00000000..be0252d9
--- /dev/null
+++ b/utils/tuning/libtuning/modules/ccm/rkisp1.py
@@ -0,0 +1,28 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2024, Paul Elder <paul.elder@ideasonboard.com>
+# Copyright (C) 2024, Ideas on Board
+#
+# Ccm module for tuning rkisp1
+
+from .ccm import CCM
+
+
+class CCMRkISP1(CCM):
+ hr_name = 'Crosstalk Correction (RkISP1)'
+ out_name = 'Ccm'
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+
+ # We don't need anything from the config file.
+ def validate_config(self, config: dict) -> bool:
+ return True
+
+ def process(self, config: dict, images: list, outputs: dict) -> dict:
+ output = {}
+
+ ccms = self.do_calibration(images)
+ output['ccms'] = ccms
+
+ return output
diff --git a/utils/tuning/libtuning/modules/lsc/__init__.py b/utils/tuning/libtuning/modules/lsc/__init__.py
new file mode 100644
index 00000000..0ba4411b
--- /dev/null
+++ b/utils/tuning/libtuning/modules/lsc/__init__.py
@@ -0,0 +1,7 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+
+from libtuning.modules.lsc.lsc import LSC
+from libtuning.modules.lsc.raspberrypi import ALSCRaspberryPi
+from libtuning.modules.lsc.rkisp1 import LSCRkISP1
diff --git a/utils/tuning/libtuning/modules/lsc/lsc.py b/utils/tuning/libtuning/modules/lsc/lsc.py
new file mode 100644
index 00000000..e0ca22eb
--- /dev/null
+++ b/utils/tuning/libtuning/modules/lsc/lsc.py
@@ -0,0 +1,75 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+
+from ..module import Module
+
+import libtuning as lt
+import libtuning.utils as utils
+
+import numpy as np
+
+
+class LSC(Module):
+ type = 'lsc'
+ hr_name = 'LSC (Base)'
+ out_name = 'GenericLSC'
+
+ def __init__(self, *,
+ debug: list,
+ sector_shape: tuple,
+ sector_x_gradient: lt.Gradient,
+ sector_y_gradient: lt.Gradient,
+ sector_average_function: lt.Average,
+ smoothing_function: lt.Smoothing):
+ super().__init__()
+
+ self.debug = debug
+
+ self.sector_shape = sector_shape
+ self.sector_x_gradient = sector_x_gradient
+ self.sector_y_gradient = sector_y_gradient
+ self.sector_average_function = sector_average_function
+
+ self.smoothing_function = smoothing_function
+
+ def _enumerate_lsc_images(self, images):
+ for image in images:
+ if image.lsc_only:
+ yield image
+
+ def _get_grid(self, channel, img_w, img_h):
+ # List of number of pixels in each sector
+ sectors_x = self.sector_x_gradient.distribute(img_w / 2, self.sector_shape[0])
+ sectors_y = self.sector_y_gradient.distribute(img_h / 2, self.sector_shape[1])
+
+ grid = []
+
+ r = 0
+ for y in sectors_y:
+ c = 0
+ for x in sectors_x:
+ grid.append(self.sector_average_function.average(channel[r:r + y, c:c + x]))
+ c += x
+ r += y
+
+ return np.array(grid)
+
+ def _lsc_single_channel(self, channel: np.array,
+ image: lt.Image, green_grid: np.array = None):
+ grid = self._get_grid(channel, image.w, image.h)
+ # Clamp the values to a small positive, so that the following 1/grid
+ # doesn't produce negative results.
+ grid = np.maximum(grid - image.blacklevel_16, 0.1)
+
+ if green_grid is None:
+ table = np.reshape(1 / grid, self.sector_shape[::-1])
+ else:
+ table = np.reshape(green_grid / grid, self.sector_shape[::-1])
+ table = self.smoothing_function.smoothing(table)
+
+ if green_grid is None:
+ table = table / np.min(table)
+
+ return table, grid
diff --git a/utils/tuning/libtuning/modules/lsc/raspberrypi.py b/utils/tuning/libtuning/modules/lsc/raspberrypi.py
new file mode 100644
index 00000000..99bc4fe6
--- /dev/null
+++ b/utils/tuning/libtuning/modules/lsc/raspberrypi.py
@@ -0,0 +1,248 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# ALSC module for tuning Raspberry Pi
+
+from .lsc import LSC
+
+import libtuning as lt
+import libtuning.utils as utils
+
+from numbers import Number
+import numpy as np
+import logging
+
+logger = logging.getLogger(__name__)
+
+class ALSCRaspberryPi(LSC):
+ # Override the type name so that the parser can match the entry in the
+ # config file.
+ type = 'alsc'
+ hr_name = 'ALSC (Raspberry Pi)'
+ out_name = 'rpi.alsc'
+ compatible = ['raspberrypi']
+
+ def __init__(self, *,
+ do_color: lt.Param,
+ luminance_strength: lt.Param,
+ **kwargs):
+ super().__init__(**kwargs)
+
+ self.do_color = do_color
+ self.luminance_strength = luminance_strength
+
+ self.output_range = (0, 3.999)
+
+ def validate_config(self, config: dict) -> bool:
+ if self not in config:
+ logger.error(f'{self.type} not in config')
+ return False
+
+ valid = True
+
+ conf = config[self]
+
+ lum_key = self.luminance_strength.name
+ color_key = self.do_color.name
+
+ if lum_key not in conf and self.luminance_strength.required:
+ logger.error(f'{lum_key} is not in config')
+ valid = False
+
+ if lum_key in conf and (conf[lum_key] < 0 or conf[lum_key] > 1):
+ logger.warning(f'{lum_key} is not in range [0, 1]; defaulting to 0.5')
+
+ if color_key not in conf and self.do_color.required:
+ logger.error(f'{color_key} is not in config')
+ valid = False
+
+ return valid
+
+ # @return Image color temperature, flattened array of red calibration table
+ # (containing {sector size} elements), flattened array of blue
+ # calibration table, flattened array of green calibration
+ # table
+
+ def _do_single_alsc(self, image: lt.Image, do_alsc_colour: bool):
+ average_green = np.mean((image.channels[lt.Color.GR:lt.Color.GB + 1]), axis=0)
+
+ cg, g = self._lsc_single_channel(average_green, image)
+
+ if not do_alsc_colour:
+ return image.color, None, None, cg.flatten()
+
+ cr, _ = self._lsc_single_channel(image.channels[lt.Color.R], image, g)
+ cb, _ = self._lsc_single_channel(image.channels[lt.Color.B], image, g)
+
+ # \todo implement debug
+
+ return image.color, cr.flatten(), cb.flatten(), cg.flatten()
+
+ # @return Red shading table, Blue shading table, Green shading table,
+ # number of images processed
+
+ def _do_all_alsc(self, images: list, do_alsc_colour: bool, general_conf: dict) -> (list, list, list, Number, int):
+ # List of colour temperatures
+ list_col = []
+ # Associated calibration tables
+ list_cr = []
+ list_cb = []
+ list_cg = []
+ count = 0
+ for image in self._enumerate_lsc_images(images):
+ col, cr, cb, cg = self._do_single_alsc(image, do_alsc_colour)
+ list_col.append(col)
+ list_cr.append(cr)
+ list_cb.append(cb)
+ list_cg.append(cg)
+ count += 1
+
+ # Convert to numpy array for data manipulation
+ list_col = np.array(list_col)
+ list_cr = np.array(list_cr)
+ list_cb = np.array(list_cb)
+ list_cg = np.array(list_cg)
+
+ cal_cr_list = []
+ cal_cb_list = []
+
+ # Note: Calculation of average corners and center of the shading tables
+ # has been removed (which ctt had, as it was unused)
+
+ # Average all values for luminance shading and return one table for all temperatures
+ lum_lut = list(np.round(np.mean(list_cg, axis=0), 3))
+
+ if not do_alsc_colour:
+ return None, None, lum_lut, count
+
+ for ct in sorted(set(list_col)):
+ # Average tables for the same colour temperature
+ indices = np.where(list_col == ct)
+ ct = int(ct)
+ t_r = np.round(np.mean(list_cr[indices], axis=0), 3)
+ t_b = np.round(np.mean(list_cb[indices], axis=0), 3)
+
+ cr_dict = {
+ 'ct': ct,
+ 'table': list(t_r)
+ }
+ cb_dict = {
+ 'ct': ct,
+ 'table': list(t_b)
+ }
+ cal_cr_list.append(cr_dict)
+ cal_cb_list.append(cb_dict)
+
+ return cal_cr_list, cal_cb_list, lum_lut, count
+
+ # @brief Calculate sigma from two adjacent gain tables
+ def _calcSigma(self, g1, g2):
+ g1 = np.reshape(g1, self.sector_shape[::-1])
+ g2 = np.reshape(g2, self.sector_shape[::-1])
+
+ # Apply gains to gain table
+ gg = g1 / g2
+ if np.mean(gg) < 1:
+ gg = 1 / gg
+
+ # For each internal patch, compute average difference between it and
+ # its 4 neighbours, then append to list
+ diffs = []
+ for i in range(self.sector_shape[1] - 2):
+ for j in range(self.sector_shape[0] - 2):
+ # Indexing is incremented by 1 since all patches on borders are
+ # not counted
+ diff = np.abs(gg[i + 1][j + 1] - gg[i][j + 1])
+ diff += np.abs(gg[i + 1][j + 1] - gg[i + 2][j + 1])
+ diff += np.abs(gg[i + 1][j + 1] - gg[i + 1][j])
+ diff += np.abs(gg[i + 1][j + 1] - gg[i + 1][j + 2])
+ diffs.append(diff / 4)
+
+ mean_diff = np.mean(diffs)
+ return np.round(mean_diff, 5)
+
+ # @brief Obtains sigmas for red and blue, effectively a measure of the
+ # 'error'
+ def _get_sigma(self, cal_cr_list, cal_cb_list):
+ # Provided colour alsc tables were generated for two different colour
+ # temperatures sigma is calculated by comparing two calibration temperatures
+ # adjacent in colour space
+
+ color_temps = [cal['ct'] for cal in cal_cr_list]
+
+ # Calculate sigmas for each adjacent color_temps and return worst one
+ sigma_rs = []
+ sigma_bs = []
+ for i in range(len(color_temps) - 1):
+ sigma_rs.append(self._calcSigma(cal_cr_list[i]['table'], cal_cr_list[i + 1]['table']))
+ sigma_bs.append(self._calcSigma(cal_cb_list[i]['table'], cal_cb_list[i + 1]['table']))
+
+ # Return maximum sigmas, not necessarily from the same colour
+ # temperature interval
+ sigma_r = max(sigma_rs) if sigma_rs else 0.005
+ sigma_b = max(sigma_bs) if sigma_bs else 0.005
+
+ return sigma_r, sigma_b
+
+ def process(self, config: dict, images: list, outputs: dict) -> dict:
+ output = {
+ 'omega': 1.3,
+ 'n_iter': 100,
+ 'luminance_strength': 0.7
+ }
+
+ conf = config[self]
+ general_conf = config['general']
+
+ do_alsc_colour = self.do_color.get_value(conf)
+
+ # \todo I have no idea where this input parameter is used
+ luminance_strength = self.luminance_strength.get_value(conf)
+ if luminance_strength < 0 or luminance_strength > 1:
+ luminance_strength = 0.5
+
+ output['luminance_strength'] = luminance_strength
+
+ # \todo Validate images from greyscale camera and force grescale mode
+ # \todo Debug functionality
+
+ alsc_out = self._do_all_alsc(images, do_alsc_colour, general_conf)
+ # \todo Handle the second green lut
+ cal_cr_list, cal_cb_list, luminance_lut, count = alsc_out
+
+ if not do_alsc_colour:
+ output['luminance_lut'] = luminance_lut
+ output['n_iter'] = 0
+ return output
+
+ output['calibrations_Cr'] = cal_cr_list
+ output['calibrations_Cb'] = cal_cb_list
+ output['luminance_lut'] = luminance_lut
+
+ # The sigmas determine the strength of the adaptive algorithm, that
+ # cleans up any lens shading that has slipped through the alsc. These
+ # are determined by measuring a 'worst-case' difference between two
+ # alsc tables that are adjacent in colour space. If, however, only one
+ # colour temperature has been provided, then this difference can not be
+ # computed as only one table is available.
+ # To determine the sigmas you would have to estimate the error of an
+ # alsc table with only the image it was taken on as a check. To avoid
+ # circularity, dfault exaggerated sigmas are used, which can result in
+ # too much alsc and is therefore not advised.
+ # In general, just take another alsc picture at another colour
+ # temperature!
+
+ if count == 1:
+ output['sigma'] = 0.005
+ output['sigma_Cb'] = 0.005
+ logger.warning('Only one alsc calibration found; standard sigmas used for adaptive algorithm.')
+ return output
+
+ # Obtain worst-case scenario residual sigmas
+ sigma_r, sigma_b = self._get_sigma(cal_cr_list, cal_cb_list)
+ output['sigma'] = np.round(sigma_r, 5)
+ output['sigma_Cb'] = np.round(sigma_b, 5)
+
+ return output
diff --git a/utils/tuning/libtuning/modules/lsc/rkisp1.py b/utils/tuning/libtuning/modules/lsc/rkisp1.py
new file mode 100644
index 00000000..c02b2306
--- /dev/null
+++ b/utils/tuning/libtuning/modules/lsc/rkisp1.py
@@ -0,0 +1,116 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# LSC module for tuning rkisp1
+
+from .lsc import LSC
+
+import libtuning as lt
+import libtuning.utils as utils
+
+from numbers import Number
+import numpy as np
+
+
+class LSCRkISP1(LSC):
+ hr_name = 'LSC (RkISP1)'
+ out_name = 'LensShadingCorrection'
+ # \todo Not sure if this is useful. Probably will remove later.
+ compatible = ['rkisp1']
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(**kwargs)
+
+ # We don't actually need anything from the config file
+ def validate_config(self, config: dict) -> bool:
+ return True
+
+ # @return Image color temperature, flattened array of red calibration table
+ # (containing {sector size} elements), flattened array of blue
+ # calibration table, flattened array of (red's) green calibration
+ # table, flattened array of (blue's) green calibration table
+
+ def _do_single_lsc(self, image: lt.Image):
+ # Perform LSC on each colour channel independently. A future enhancement
+ # worth investigating would be splitting the luminance and chrominance
+ # LSC as done by Raspberry Pi.
+ cgr, _ = self._lsc_single_channel(image.channels[lt.Color.GR], image)
+ cgb, _ = self._lsc_single_channel(image.channels[lt.Color.GB], image)
+ cr, _ = self._lsc_single_channel(image.channels[lt.Color.R], image)
+ cb, _ = self._lsc_single_channel(image.channels[lt.Color.B], image)
+
+ return image.color, cr.flatten(), cb.flatten(), cgr.flatten(), cgb.flatten()
+
+ # @return List of dictionaries of color temperature, red table, red's green
+ # table, blue's green table, and blue table
+
+ def _do_all_lsc(self, images: list) -> list:
+ output_list = []
+ output_map_func = lt.gradient.Linear().map
+
+ # List of colour temperatures
+ list_col = []
+ # Associated calibration tables
+ list_cr = []
+ list_cb = []
+ list_cgr = []
+ list_cgb = []
+ for image in self._enumerate_lsc_images(images):
+ col, cr, cb, cgr, cgb = self._do_single_lsc(image)
+ list_col.append(col)
+ list_cr.append(cr)
+ list_cb.append(cb)
+ list_cgr.append(cgr)
+ list_cgb.append(cgb)
+
+ # Convert to numpy array for data manipulation
+ list_col = np.array(list_col)
+ list_cr = np.array(list_cr)
+ list_cb = np.array(list_cb)
+ list_cgr = np.array(list_cgr)
+ list_cgb = np.array(list_cgb)
+
+ for color_temperature in sorted(set(list_col)):
+ # Average tables for the same colour temperature
+ indices = np.where(list_col == color_temperature)
+ color_temperature = int(color_temperature)
+
+ tables = []
+ for lis in [list_cr, list_cgr, list_cgb, list_cb]:
+ table = np.mean(lis[indices], axis=0)
+ table = output_map_func((1, 4), (1024, 4096), table)
+ table = np.clip(table, 1024, 4095)
+ table = np.round(table).astype('int32').tolist()
+ tables.append(table)
+
+ entry = {
+ 'ct': color_temperature,
+ 'r': tables[0],
+ 'gr': tables[1],
+ 'gb': tables[2],
+ 'b': tables[3],
+ }
+
+ output_list.append(entry)
+
+ return output_list
+
+ def process(self, config: dict, images: list, outputs: dict) -> dict:
+ output = {}
+
+ # \todo This should actually come from self.sector_{x,y}_gradient
+ size_gradient = lt.gradient.Linear(lt.Remainder.Float)
+ output['x-size'] = size_gradient.distribute(0.5, 8)
+ output['y-size'] = size_gradient.distribute(0.5, 8)
+
+ output['sets'] = self._do_all_lsc(images)
+
+ if len(output['sets']) == 0:
+ return None
+
+ # \todo Validate images from greyscale camera and force grescale mode
+ # \todo Debug functionality
+
+ return output
diff --git a/utils/tuning/libtuning/modules/module.py b/utils/tuning/libtuning/modules/module.py
new file mode 100644
index 00000000..de624384
--- /dev/null
+++ b/utils/tuning/libtuning/modules/module.py
@@ -0,0 +1,32 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# Base class for algorithm-specific tuning modules
+
+
+# @var type Type of the module. Defined in the base module.
+# @var out_name The key that will be used for the algorithm in the algorithms
+# dictionary in the tuning output file
+# @var hr_name Human-readable module name, mostly for debugging
+class Module(object):
+ type = 'base'
+ hr_name = 'Base Module'
+ out_name = 'GenericAlgorithm'
+
+ def __init__(self):
+ pass
+
+ def validate_config(self, config: dict) -> bool:
+ raise NotImplementedError
+
+ # @brief Do the module's processing
+ # @param config Full configuration from the input configuration file
+ # @param images List of images to process
+ # @param outputs The outputs of all modules that were executed before this
+ # module. Note that this is an input parameter, and the
+ # output of this module should be returned directly
+ # @return Result of the module's processing. It may be empty. None
+ # indicates failure and that the result should not be used.
+ def process(self, config: dict, images: list, outputs: dict) -> dict:
+ raise NotImplementedError
diff --git a/utils/tuning/libtuning/modules/static.py b/utils/tuning/libtuning/modules/static.py
new file mode 100644
index 00000000..4d0f7e18
--- /dev/null
+++ b/utils/tuning/libtuning/modules/static.py
@@ -0,0 +1,24 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2024, Ideas on Board
+#
+# Module implementation for static data
+
+from .module import Module
+
+
+# This module can be used in cases where the tuning file should contain
+# static data.
+class StaticModule(Module):
+ def __init__(self, out_name: str, output: dict = {}):
+ super().__init__()
+ self.out_name = out_name
+ self.hr_name = f'Static {out_name}'
+ self.type = f'static_{out_name}'
+ self.output = output
+
+ def validate_config(self, config: dict) -> bool:
+ return True
+
+ def process(self, config: dict, images: list, outputs: dict) -> dict:
+ return self.output
diff --git a/utils/tuning/libtuning/parsers/__init__.py b/utils/tuning/libtuning/parsers/__init__.py
new file mode 100644
index 00000000..022c1e5d
--- /dev/null
+++ b/utils/tuning/libtuning/parsers/__init__.py
@@ -0,0 +1,6 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+
+from libtuning.parsers.raspberrypi_parser import RaspberryPiParser
+from libtuning.parsers.yaml_parser import YamlParser
diff --git a/utils/tuning/libtuning/parsers/parser.py b/utils/tuning/libtuning/parsers/parser.py
new file mode 100644
index 00000000..0c3944c7
--- /dev/null
+++ b/utils/tuning/libtuning/parsers/parser.py
@@ -0,0 +1,21 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# Base class for a parser for a specific format of config file
+
+class Parser(object):
+ def __init__(self):
+ pass
+
+ # @brief Parse a config file into a config dict
+ # @details The config dict shall have one key 'general' with a dict value
+ # for general configuration options, and all other entries shall
+ # have the module as the key with its configuration options (as a
+ # dict) as the value. The config dict shall prune entries that are
+ # for modules that are not in @a modules.
+ # @param config (str) Path to config file
+ # @param modules (list) List of modules
+ # @return (dict, list) Configuration and list of modules to disable
+ def parse(self, config_file: str, modules: list) -> (dict, list):
+ raise NotImplementedError
diff --git a/utils/tuning/libtuning/parsers/raspberrypi_parser.py b/utils/tuning/libtuning/parsers/raspberrypi_parser.py
new file mode 100644
index 00000000..f1da4592
--- /dev/null
+++ b/utils/tuning/libtuning/parsers/raspberrypi_parser.py
@@ -0,0 +1,93 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# Parser for Raspberry Pi config file format
+
+from .parser import Parser
+
+import json
+import numbers
+
+import libtuning.utils as utils
+
+
+class RaspberryPiParser(Parser):
+ def __init__(self):
+ super().__init__()
+
+ # The string in the 'disable' and 'plot' lists are formatted as
+ # 'rpi.{module_name}'.
+ # @brief Enumerate, as a module, @a listt if its value exists in @a dictt
+ # and it is the name of a valid module in @a modules
+ def _enumerate_rpi_modules(self, listt, dictt, modules):
+ for x in listt:
+ name = x.replace('rpi.', '')
+ if name not in dictt:
+ continue
+ module = utils.get_module_by_typename(modules, name)
+ if module is not None:
+ yield module
+
+ def _valid_macbeth_option(self, value):
+ if not isinstance(value, dict):
+ return False
+
+ if list(value.keys()) != ['small', 'show']:
+ return False
+
+ for val in value.values():
+ if not isinstance(val, numbers.Number):
+ return False
+
+ return True
+
+ def parse(self, config_file: str, modules: list) -> (dict, list):
+ with open(config_file, 'r') as config_json:
+ config = json.load(config_json)
+
+ disable = []
+ for module in self._enumerate_rpi_modules(config['disable'], config, modules):
+ disable.append(module)
+ # Remove the disabled module's config too
+ config.pop(module.name)
+ config.pop('disable')
+
+ # The raspberrypi config format has 'plot' map to a list of module
+ # names which should be plotted. libtuning has each module contain the
+ # plot information in itself so do this conversion.
+
+ for module in self._enumerate_rpi_modules(config['plot'], config, modules):
+ # It's fine to set the value of a potentially disabled module, as
+ # the object still exists at this point
+ module.appendValue('debug', 'plot')
+ config.pop('plot')
+
+ # Convert the keys from module name to module instance
+
+ new_config = {}
+
+ for module_name in config:
+ module = utils.get_module_by_type_name(modules, module_name)
+ if module is not None:
+ new_config[module] = config[module_name]
+
+ new_config['general'] = {}
+
+ if 'blacklevel' in config:
+ if not isinstance(config['blacklevel'], numbers.Number):
+ raise TypeError('Config "blacklevel" must be a number')
+ # Raspberry Pi's ctt config has magic blacklevel value -1 to mean
+ # "get it from the image metadata". Since we already do that in
+ # Image, don't save it to the config here.
+ if config['blacklevel'] >= 0:
+ new_config['general']['blacklevel'] = config['blacklevel']
+
+ if 'macbeth' in config:
+ if not self._valid_macbeth_option(config['macbeth']):
+ raise TypeError('Config "macbeth" must be a dict: {"small": number, "show": number}')
+ new_config['general']['macbeth'] = config['macbeth']
+ else:
+ new_config['general']['macbeth'] = {'small': 0, 'show': 0}
+
+ return new_config, disable
diff --git a/utils/tuning/libtuning/parsers/yaml_parser.py b/utils/tuning/libtuning/parsers/yaml_parser.py
new file mode 100644
index 00000000..1fa6b7a8
--- /dev/null
+++ b/utils/tuning/libtuning/parsers/yaml_parser.py
@@ -0,0 +1,20 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# Parser for YAML format config file
+
+from .parser import Parser
+import yaml
+
+
+class YamlParser(Parser):
+ def __init__(self):
+ super().__init__()
+
+ def parse(self, config_file: str, modules: list) -> (dict, list):
+ # Dummy implementation that just reads the file
+ with open(config_file, 'r') as f:
+ config = yaml.safe_load(f)
+
+ return config, []
diff --git a/utils/tuning/libtuning/smoothing.py b/utils/tuning/libtuning/smoothing.py
new file mode 100644
index 00000000..de4d920c
--- /dev/null
+++ b/utils/tuning/libtuning/smoothing.py
@@ -0,0 +1,24 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# Wrapper for cv2 smoothing functions to enable duck-typing
+
+import cv2
+
+
+# @brief Wrapper for cv2 smoothing functions so that they can be duck-typed
+class Smoothing(object):
+ def __init__(self):
+ pass
+
+ def smoothing(self, src):
+ raise NotImplementedError
+
+
+class MedianBlur(Smoothing):
+ def __init__(self, ksize):
+ self.ksize = ksize
+
+ def smoothing(self, src):
+ return cv2.medianBlur(src.astype('float32'), self.ksize).astype('float64')
diff --git a/utils/tuning/libtuning/utils.py b/utils/tuning/libtuning/utils.py
new file mode 100644
index 00000000..e35cf409
--- /dev/null
+++ b/utils/tuning/libtuning/utils.py
@@ -0,0 +1,186 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# Utilities for libtuning
+
+import cv2
+import decimal
+import math
+import numpy as np
+import os
+from pathlib import Path
+import re
+import sys
+import logging
+
+import libtuning as lt
+from libtuning.image import Image
+from .macbeth import locate_macbeth
+
+logger = logging.getLogger(__name__)
+
+# Utility functions
+
+
+def get_module_by_type_name(modules, name):
+ for module in modules:
+ if module.type == name:
+ return module
+ return None
+
+
+# Private utility functions
+
+
+def _list_image_files(directory):
+ d = Path(directory)
+ files = [d.joinpath(f) for f in os.listdir(d)
+ if re.search(r'\.(jp[e]g$)|(dng$)', f)]
+ files.sort()
+ return files
+
+
+def _parse_image_filename(fn: Path):
+ lsc_only = False
+ color_temperature = None
+ lux = None
+
+ parts = fn.stem.split('_')
+ for part in parts:
+ if part == 'alsc':
+ lsc_only = True
+ continue
+ r = re.match(r'(\d+)[kK]', part)
+ if r:
+ color_temperature = int(r.group(1))
+ continue
+ r = re.match(r'(\d+)[lLuU]', part)
+ if r:
+ lux = int(r.group(1))
+
+ if color_temperature is None:
+ logger.error(f'The file name of "{fn.name}" does not contain a color temperature')
+
+ if lux is None and lsc_only is False:
+ logger.error(f'The file name of "{fn.name}" must either contain alsc or a lux level')
+
+ return color_temperature, lux, lsc_only
+
+
+# \todo Implement this from check_imgs() in ctt.py
+def _validate_images(images):
+ return True
+
+
+# Public utility functions
+
+
+# @brief Load images into a single list of Image instances
+# @param input_dir Directory from which to load image files
+# @param config Configuration dictionary
+# @param load_nonlsc Whether or not to load non-lsc images
+# @param load_lsc Whether or not to load lsc-only images
+# @return A list of Image instances
+def load_images(input_dir: str, config: dict, load_nonlsc: bool, load_lsc: bool) -> list:
+ files = _list_image_files(input_dir)
+ if len(files) == 0:
+ logger.error(f'No images found in {input_dir}')
+ return None
+
+ images = []
+ for f in files:
+ color, lux, lsc_only = _parse_image_filename(f)
+
+ if color is None:
+ logger.warning(f'Ignoring "{f.name}" as it has no associated color temperature')
+ continue
+
+ logger.info(f'Process image "{f.name}" (color={color}, lux={lux}, lsc_only={lsc_only})')
+
+ # Skip lsc image if we don't need it
+ if lsc_only and not load_lsc:
+ logger.warning(f'Skipping {f.name} as this tuner has no LSC module')
+ continue
+
+ # Skip non-lsc image if we don't need it
+ if not lsc_only and not load_nonlsc:
+ logger.warning(f'Skipping {f.name} as this tuner only has an LSC module')
+ continue
+
+ # Load image
+ try:
+ image = Image(f)
+ except Exception as e:
+ logger.error(f'Failed to load image {f.name}: {e}')
+ continue
+
+ # Populate simple fields
+ image.lsc_only = lsc_only
+ image.color = color
+ image.lux = lux
+
+ # Black level comes from the TIFF tags, but they are overridable by the
+ # config file.
+ if 'blacklevel' in config['general']:
+ image.blacklevel_16 = config['general']['blacklevel']
+
+ if lsc_only:
+ images.append(image)
+ continue
+
+ # Handle macbeth
+ macbeth = locate_macbeth(image, config)
+ if macbeth is None:
+ continue
+
+ images.append(image)
+
+ if not _validate_images(images):
+ return None
+
+ return images
+
+
+
+"""
+Some code that will save virtual macbeth charts that show the difference between optimised matrices and non optimised matrices
+
+The function creates an image that is 1550 by 1050 pixels wide, and fills it with patches which are 200x200 pixels in size
+Each patch contains the ideal color, the color from the original matrix, and the color from the final matrix
+_________________
+| |
+| Ideal Color |
+|_______________|
+| Old | new |
+| Color | Color |
+|_______|_______|
+
+Nice way of showing how the optimisation helps change the colors and the color matricies
+"""
+def visualise_macbeth_chart(macbeth_rgb, original_rgb, new_rgb, output_filename):
+ image = np.zeros((1050, 1550, 3), dtype=np.uint8)
+ colorindex = -1
+ for y in range(6):
+ for x in range(4): # Creates 6 x 4 grid of macbeth chart
+ colorindex += 1
+ xlocation = 50 + 250 * x # Means there is 50px of black gap between each square, more like the real macbeth chart.
+ ylocation = 50 + 250 * y
+ for g in range(200):
+ for i in range(100):
+ image[xlocation + i, ylocation + g] = macbeth_rgb[colorindex]
+ xlocation = 150 + 250 * x
+ ylocation = 50 + 250 * y
+ for i in range(100):
+ for g in range(100):
+ image[xlocation + i, ylocation + g] = original_rgb[colorindex] # Smaller squares below to compare the old colors with the new ones
+ xlocation = 150 + 250 * x
+ ylocation = 150 + 250 * y
+ for i in range(100):
+ for g in range(100):
+ image[xlocation + i, ylocation + g] = new_rgb[colorindex]
+
+ im_bgr = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
+ cv2.imwrite(f'{output_filename} Generated Macbeth Chart.png', im_bgr)
+
diff --git a/utils/tuning/raspberrypi/__init__.py b/utils/tuning/raspberrypi/__init__.py
new file mode 100644
index 00000000..9ccabb0e
--- /dev/null
+++ b/utils/tuning/raspberrypi/__init__.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
diff --git a/utils/tuning/raspberrypi/alsc.py b/utils/tuning/raspberrypi/alsc.py
new file mode 100644
index 00000000..ba8fc9e1
--- /dev/null
+++ b/utils/tuning/raspberrypi/alsc.py
@@ -0,0 +1,19 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# ALSC module instance for Raspberry Pi tuning scripts
+
+import libtuning as lt
+from libtuning.modules.lsc import ALSCRaspberryPi
+
+ALSC = \
+ ALSCRaspberryPi(do_color=lt.Param('do_alsc_colour', lt.Param.Mode.Optional, True),
+ luminance_strength=lt.Param('luminance_strength', lt.Param.Mode.Optional, 0.5),
+ debug=[lt.Debug.Plot],
+ sector_shape=(16, 12),
+ sector_x_gradient=lt.gradient.Linear(lt.Remainder.DistributeFront),
+ sector_y_gradient=lt.gradient.Linear(lt.Remainder.DistributeFront),
+ sector_average_function=lt.average.Mean(),
+ smoothing_function=lt.smoothing.MedianBlur(3),
+ )
diff --git a/utils/tuning/raspberrypi_alsc_only.py b/utils/tuning/raspberrypi_alsc_only.py
new file mode 100755
index 00000000..777d8007
--- /dev/null
+++ b/utils/tuning/raspberrypi_alsc_only.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# Tuning script for raspberrypi, ALSC only
+
+import sys
+
+import libtuning as lt
+from libtuning.parsers import RaspberryPiParser
+from libtuning.generators import RaspberryPiOutput
+
+from raspberrypi.alsc import ALSC
+
+tuner = lt.Tuner('Raspberry Pi (ALSC only)')
+tuner.add(ALSC)
+tuner.set_input_parser(RaspberryPiParser())
+tuner.set_output_formatter(RaspberryPiOutput())
+tuner.set_output_order([ALSC])
+
+if __name__ == '__main__':
+ sys.exit(tuner.run(sys.argv))
diff --git a/utils/tuning/requirements.txt b/utils/tuning/requirements.txt
new file mode 100644
index 00000000..3705769b
--- /dev/null
+++ b/utils/tuning/requirements.txt
@@ -0,0 +1,9 @@
+coloredlogs
+matplotlib
+numpy
+opencv-python
+py3exiv2
+pyyaml
+rawpy
+scikit-learn
+scipy
diff --git a/utils/tuning/rkisp1.py b/utils/tuning/rkisp1.py
new file mode 100755
index 00000000..9f40fd8b
--- /dev/null
+++ b/utils/tuning/rkisp1.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+# Copyright (C) 2024, Ideas On Board
+#
+# Tuning script for rkisp1
+
+import coloredlogs
+import logging
+import sys
+
+import libtuning as lt
+from libtuning.parsers import YamlParser
+from libtuning.generators import YamlOutput
+from libtuning.modules.lsc import LSCRkISP1
+from libtuning.modules.agc import AGCRkISP1
+from libtuning.modules.awb import AWBRkISP1
+from libtuning.modules.ccm import CCMRkISP1
+from libtuning.modules.static import StaticModule
+
+coloredlogs.install(level=logging.INFO, fmt='%(name)s %(levelname)s %(message)s')
+
+agc = AGCRkISP1(debug=[lt.Debug.Plot])
+awb = AWBRkISP1(debug=[lt.Debug.Plot])
+blc = StaticModule('BlackLevelCorrection')
+ccm = CCMRkISP1(debug=[lt.Debug.Plot])
+color_processing = StaticModule('ColorProcessing')
+filter = StaticModule('Filter')
+gamma_out = StaticModule('GammaOutCorrection', {'gamma': 2.2})
+lsc = LSCRkISP1(debug=[lt.Debug.Plot],
+ # This is for the actual LSC tuning, and is part of the base LSC
+ # module. rkisp1's table sector sizes (16x16 programmed as mirrored
+ # 8x8) are separate, and is hardcoded in its specific LSC tuning
+ # module.
+ sector_shape=(17, 17),
+
+ sector_x_gradient=lt.gradient.Linear(lt.Remainder.DistributeFront),
+ sector_y_gradient=lt.gradient.Linear(lt.Remainder.DistributeFront),
+
+ # This is the function that will be used to average the pixels in
+ # each sector. This can also be a custom function.
+ sector_average_function=lt.average.Mean(),
+
+ # This is the function that will be used to smooth the color ratio
+ # values. This can also be a custom function.
+ smoothing_function=lt.smoothing.MedianBlur(3),)
+
+tuner = lt.Tuner('RkISP1')
+tuner.add([agc, awb, blc, ccm, color_processing, filter, gamma_out, lsc])
+tuner.set_input_parser(YamlParser())
+tuner.set_output_formatter(YamlOutput())
+tuner.set_output_order([agc, awb, blc, ccm, color_processing,
+ filter, gamma_out, lsc])
+
+if __name__ == '__main__':
+ sys.exit(tuner.run(sys.argv))
diff --git a/utils/update-kernel-headers.sh b/utils/update-kernel-headers.sh
new file mode 100755
index 00000000..9a64dfb5
--- /dev/null
+++ b/utils/update-kernel-headers.sh
@@ -0,0 +1,90 @@
+#!/bin/sh
+
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Update the kernel headers copy from a kernel source tree
+
+if [ $# != 1 ] ; then
+ echo "Usage: $0 <kernel dir>"
+ exit 1
+fi
+
+header_dir="$(dirname "$(realpath "$0")")/../include/linux"
+kernel_dir="$(realpath "$1")"
+
+# Bail out if the directory doesn't contain kernel sources
+line=$(head -3 "${kernel_dir}/Kbuild" 2>/dev/null | tail -1)
+if [ "$line" != "# Kbuild for top-level directory of the kernel" ] ; then
+ echo "Directory ${kernel_dir} doesn't contain a kernel source tree"
+ exit 1
+fi
+
+if [ ! -e "${kernel_dir}/.git" ] ; then
+ echo "Directory ${kernel_dir} doesn't contain a git tree"
+ exit 1
+fi
+
+# Check the kernel version, and reject dirty trees
+version=$(git -C "${kernel_dir}" describe --dirty)
+echo $version
+if echo "${version}" | grep -q dirty ; then
+ echo "Kernel tree in ${kernel_dir} is dirty"
+ exit 1
+fi
+
+# Install the headers to a temporary directory
+install_dir=$(mktemp -d)
+if [ ! -d "${install_dir}" ] ; then
+ echo "Failed to create temporary directory"
+ exit 1
+fi
+
+trap "rm -rf ${install_dir}" EXIT
+
+set -e
+make -C "${kernel_dir}" O="${install_dir}" headers_install
+set +e
+
+# Copy the headers
+headers="
+ drm/drm_fourcc.h
+ linux/dma-buf.h
+ linux/dma-heap.h
+ linux/media-bus-format.h
+ linux/media.h
+ linux/rkisp1-config.h
+ linux/udmabuf.h
+ linux/v4l2-common.h
+ linux/v4l2-controls.h
+ linux/v4l2-mediabus.h
+ linux/v4l2-subdev.h
+ linux/videodev2.h
+"
+
+for header in $headers ; do
+ name=$(basename "${header}")
+ cp "${install_dir}/usr/include/${header}" "${header_dir}/${name}"
+done
+
+# The IPU3 header is a special case, as it's stored in staging. Handle it
+# manually.
+(cd "${install_dir}" ; "${kernel_dir}/scripts/headers_install.sh" \
+ "${kernel_dir}/drivers/staging/media/ipu3/include/uapi/intel-ipu3.h" \
+ "${header_dir}/intel-ipu3.h")
+
+# Update the README file
+cat <<EOF > "${header_dir}/README"
+# SPDX-License-Identifier: CC0-1.0
+
+Files in this directory are imported from ${version} of the Linux kernel. Do not
+modify them manually.
+EOF
+
+# Cleanup
+rm -rf "${install_dir}"
+
+cat <<EOF
+----------------------------------------------------------------------
+Kernel headers updated. Please review and up-port local changes before
+committing.
+----------------------------------------------------------------------
+EOF
diff --git a/utils/update-mojo.sh b/utils/update-mojo.sh
new file mode 100755
index 00000000..09c8ff5b
--- /dev/null
+++ b/utils/update-mojo.sh
@@ -0,0 +1,90 @@
+#!/bin/sh
+
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Update mojo copy from a chromium source tree
+
+set -e
+
+if [ $# != 1 ] ; then
+ echo "Usage: $0 <chromium dir>"
+ exit 1
+fi
+
+ipc_dir="$(dirname "$(realpath "$0")")/ipc"
+chromium_dir="$(realpath "$1")"
+
+cd "${ipc_dir}/../../"
+
+# Reject dirty libcamera trees
+if [ -n "$(git status --porcelain -uno)" ] ; then
+ echo "libcamera tree is dirty"
+ exit 1
+fi
+
+if [ ! -d "${chromium_dir}/mojo" ] ; then
+ echo "Directory ${chromium_dir} doesn't contain mojo"
+ exit 1
+fi
+
+if [ ! -d "${chromium_dir}/.git" ] ; then
+ echo "Directory ${chromium_dir} doesn't contain a git tree"
+ exit 1
+fi
+
+# Get the chromium commit id
+version=$(git -C "${chromium_dir}" rev-parse --short HEAD)
+
+# Reject dirty chromium trees
+if [ -n "$(git -C "${chromium_dir}" status --porcelain)" ] ; then
+ echo "Chromium tree in ${chromium_dir} is dirty"
+ exit 1
+fi
+
+# Remove the previously imported files.
+rm -rf utils/ipc/mojo/
+rm -rf utils/ipc/tools/
+
+# Copy the diagnosis file
+mkdir -p utils/ipc/tools/diagnosis/
+cp "${chromium_dir}/tools/diagnosis/crbug_1001171.py" utils/ipc/tools/diagnosis/
+
+# Copy the rest of mojo
+mkdir -p utils/ipc/mojo/public/
+cp "${chromium_dir}/mojo/public/LICENSE" utils/ipc/mojo/public/
+
+(
+ cd "${chromium_dir}" || exit
+ find ./mojo/public/tools -type f \
+ -not -path "*/generators/*" \
+ -not -path "*/fuzzers/*" \
+ -exec cp --parents "{}" "${ipc_dir}" ";"
+)
+
+# Update the README files
+readme=$(cat <<EOF
+# SPDX-License-Identifier: CC0-1.0
+
+Files in this directory are imported from ${version} of Chromium. Do not
+modify them manually.
+EOF
+)
+
+echo "$readme" > utils/ipc/mojo/README
+echo "$readme" > utils/ipc/tools/README
+
+# Commit the update. Use 'git commit -n' to avoid checkstyle pre-commit hook
+# failures, as mojo doesn't comply with the Python coding style enforced by
+# checkstyle.py.
+git add utils/ipc/mojo/
+git add utils/ipc/tools/
+
+echo "utils: ipc: Update mojo
+
+Update mojo from commit
+
+$(git -C "${chromium_dir}" show --pretty='%H "%s"' --no-patch)
+
+from the Chromium repository.
+
+The update-mojo.sh script was used for this update." | \
+git commit -n -s -F -