summaryrefslogtreecommitdiff
path: root/utils
diff options
context:
space:
mode:
Diffstat (limited to 'utils')
-rwxr-xr-xutils/abi-compat.sh212
-rwxr-xr-xutils/checkstyle.py238
-rwxr-xr-xutils/gen-controls.py301
-rwxr-xr-xutils/gen-formats.py2
-rwxr-xr-xutils/gen-header.sh2
-rwxr-xr-xutils/gen-ipa-priv-key.sh2
-rwxr-xr-xutils/gen-ipa-pub-key.py2
-rwxr-xr-xutils/gen-version.sh8
-rwxr-xr-xutils/hooks/pre-push12
-rwxr-xr-xutils/ipc/extract-docs.py10
-rwxr-xr-xutils/ipc/generate.py14
-rw-r--r--utils/ipc/generators/libcamera_templates/core_ipa_interface.h.tmpl4
-rw-r--r--utils/ipc/generators/libcamera_templates/core_ipa_serializer.h.tmpl2
-rw-r--r--utils/ipc/generators/libcamera_templates/definition_functions.tmpl2
-rw-r--r--utils/ipc/generators/libcamera_templates/module_ipa_interface.h.tmpl4
-rw-r--r--utils/ipc/generators/libcamera_templates/module_ipa_proxy.cpp.tmpl10
-rw-r--r--utils/ipc/generators/libcamera_templates/module_ipa_proxy.h.tmpl5
-rw-r--r--utils/ipc/generators/libcamera_templates/module_ipa_proxy_worker.cpp.tmpl2
-rw-r--r--utils/ipc/generators/libcamera_templates/module_ipa_serializer.h.tmpl2
-rw-r--r--utils/ipc/generators/libcamera_templates/proxy_functions.tmpl22
-rw-r--r--utils/ipc/generators/libcamera_templates/serializer.tmpl6
-rw-r--r--utils/ipc/generators/mojom_libcamera_generator.py49
-rw-r--r--utils/ipc/mojo/README2
-rw-r--r--utils/ipc/mojo/public/LICENSE2
-rw-r--r--utils/ipc/mojo/public/tools/BUILD.gn8
-rw-r--r--utils/ipc/mojo/public/tools/bindings/BUILD.gn36
-rw-r--r--utils/ipc/mojo/public/tools/bindings/README.md239
-rw-r--r--utils/ipc/mojo/public/tools/bindings/checks/__init__.py0
-rw-r--r--utils/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check.py170
-rw-r--r--utils/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check_unittest.py194
-rw-r--r--utils/ipc/mojo/public/tools/bindings/checks/mojom_definitions_check.py34
-rw-r--r--utils/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check.py62
-rw-r--r--utils/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check_unittest.py173
-rw-r--r--utils/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_check.py102
-rw-r--r--utils/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_checks_unittest.py254
-rw-r--r--utils/ipc/mojo/public/tools/bindings/chromium_bindings_configuration.gni51
-rw-r--r--utils/ipc/mojo/public/tools/bindings/compile_typescript.py27
-rwxr-xr-xutils/ipc/mojo/public/tools/bindings/concatenate-files.py5
-rwxr-xr-xutils/ipc/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py10
-rwxr-xr-xutils/ipc/mojo/public/tools/bindings/format_typemap_generator_args.py36
-rw-r--r--utils/ipc/mojo/public/tools/bindings/gen_data_files_list.py2
-rwxr-xr-xutils/ipc/mojo/public/tools/bindings/generate_type_mappings.py4
-rwxr-xr-xutils/ipc/mojo/public/tools/bindings/minify_with_terser.py47
-rw-r--r--utils/ipc/mojo/public/tools/bindings/mojom.gni845
-rwxr-xr-xutils/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py62
-rw-r--r--utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator_unittest.py6
-rwxr-xr-xutils/ipc/mojo/public/tools/bindings/mojom_types_downgrader.py119
-rwxr-xr-xutils/ipc/mojo/public/tools/bindings/validate_typemap_config.py5
-rw-r--r--utils/ipc/mojo/public/tools/mojom/BUILD.gn18
-rwxr-xr-xutils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py69
-rwxr-xr-xutils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility_unittest.py87
-rw-r--r--utils/ipc/mojo/public/tools/mojom/const_unittest.py2
-rw-r--r--utils/ipc/mojo/public/tools/mojom/enum_unittest.py30
-rw-r--r--utils/ipc/mojo/public/tools/mojom/feature_unittest.py84
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/BUILD.gn3
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/error.py2
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/fileutil.py3
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/fileutil_unittest.py7
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/generate/check.py26
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/generate/constant_resolver.py93
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/generate/generator.py11
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/generate/generator_unittest.py9
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/generate/module.py787
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/generate/module_unittest.py2
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/generate/pack.py151
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/generate/pack_unittest.py30
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/generate/template_expander.py2
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/generate/translate.py464
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/generate/translate_unittest.py82
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/parse/ast.py145
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/parse/ast_unittest.py12
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features.py21
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py155
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py8
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py10
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/parse/parser.py108
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/parse/parser_unittest.py39
-rwxr-xr-xutils/ipc/mojo/public/tools/mojom/mojom_parser.py119
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom_parser_test_case.py6
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom_parser_unittest.py31
-rw-r--r--utils/ipc/mojo/public/tools/mojom/stable_attribute_unittest.py2
-rw-r--r--utils/ipc/mojo/public/tools/mojom/union_unittest.py44
-rw-r--r--utils/ipc/mojo/public/tools/mojom/version_compatibility_unittest.py73
-rwxr-xr-xutils/ipc/mojo/public/tools/run_all_python_unittests.py8
-rwxr-xr-xutils/ipc/parser.py4
-rw-r--r--utils/ipc/tools/README2
-rw-r--r--utils/ipc/tools/diagnosis/crbug_1001171.py2
-rwxr-xr-xutils/ipu3/ipu3-capture.sh5
-rw-r--r--utils/ipu3/ipu3-pack.c101
-rwxr-xr-xutils/ipu3/ipu3-process.sh2
-rw-r--r--utils/ipu3/ipu3-unpack.c7
-rw-r--r--utils/ipu3/meson.build1
-rwxr-xr-xutils/raspberrypi/ctt/alsc_only.py34
-rw-r--r--utils/raspberrypi/ctt/colors.py30
-rwxr-xr-xutils/raspberrypi/ctt/convert_tuning.py46
-rwxr-xr-xutils/raspberrypi/ctt/ctt.py41
-rw-r--r--utils/raspberrypi/ctt/ctt_alsc.py6
-rw-r--r--utils/raspberrypi/ctt/ctt_awb.py4
-rw-r--r--utils/raspberrypi/ctt/ctt_ccm.py262
-rw-r--r--utils/raspberrypi/ctt/ctt_geq.py4
-rw-r--r--utils/raspberrypi/ctt/ctt_image_load.py41
-rw-r--r--utils/raspberrypi/ctt/ctt_lux.py4
-rw-r--r--utils/raspberrypi/ctt/ctt_macbeth_locator.py73
-rw-r--r--utils/raspberrypi/ctt/ctt_noise.py4
-rwxr-xr-x[-rw-r--r--]utils/raspberrypi/ctt/ctt_pretty_print_json.py194
-rw-r--r--utils/raspberrypi/ctt/ctt_ransac.py4
-rw-r--r--utils/raspberrypi/ctt/ctt_tools.py4
-rw-r--r--utils/raspberrypi/ctt/ctt_visualise.py43
-rw-r--r--utils/raspberrypi/delayedctrls_parse.py2
-rwxr-xr-xutils/release.sh46
-rwxr-xr-xutils/rkisp1/gen-csc-table.py215
-rwxr-xr-xutils/rkisp1/rkisp1-capture.sh64
-rwxr-xr-xutils/semver446
-rwxr-xr-xutils/tracepoints/analyze-ipa-trace.py2
-rwxr-xr-xutils/tracepoints/gen-tp-header.py15
-rw-r--r--utils/tuning/README.rst11
-rw-r--r--utils/tuning/libtuning/__init__.py13
-rw-r--r--utils/tuning/libtuning/average.py21
-rw-r--r--utils/tuning/libtuning/generators/__init__.py6
-rw-r--r--utils/tuning/libtuning/generators/generator.py15
-rw-r--r--utils/tuning/libtuning/generators/raspberrypi_output.py114
-rw-r--r--utils/tuning/libtuning/generators/yaml_output.py123
-rw-r--r--utils/tuning/libtuning/gradient.py75
-rw-r--r--utils/tuning/libtuning/image.py136
-rw-r--r--utils/tuning/libtuning/libtuning.py208
-rw-r--r--utils/tuning/libtuning/macbeth.py516
-rw-r--r--utils/tuning/libtuning/macbeth_ref.pgm6
-rw-r--r--utils/tuning/libtuning/modules/__init__.py3
-rw-r--r--utils/tuning/libtuning/modules/lsc/__init__.py7
-rw-r--r--utils/tuning/libtuning/modules/lsc/lsc.py72
-rw-r--r--utils/tuning/libtuning/modules/lsc/raspberrypi.py246
-rw-r--r--utils/tuning/libtuning/modules/lsc/rkisp1.py112
-rw-r--r--utils/tuning/libtuning/modules/module.py32
-rw-r--r--utils/tuning/libtuning/parsers/__init__.py6
-rw-r--r--utils/tuning/libtuning/parsers/parser.py21
-rw-r--r--utils/tuning/libtuning/parsers/raspberrypi_parser.py93
-rw-r--r--utils/tuning/libtuning/parsers/yaml_parser.py17
-rw-r--r--utils/tuning/libtuning/smoothing.py24
-rw-r--r--utils/tuning/libtuning/utils.py125
-rw-r--r--utils/tuning/raspberrypi/__init__.py3
-rw-r--r--utils/tuning/raspberrypi/alsc.py19
-rwxr-xr-xutils/tuning/raspberrypi_alsc_only.py23
-rwxr-xr-xutils/tuning/rkisp1.py40
-rwxr-xr-xutils/update-kernel-headers.sh2
-rwxr-xr-xutils/update-mojo.sh52
145 files changed, 8203 insertions, 1795 deletions
diff --git a/utils/abi-compat.sh b/utils/abi-compat.sh
new file mode 100755
index 00000000..c936ac05
--- /dev/null
+++ b/utils/abi-compat.sh
@@ -0,0 +1,212 @@
+#!/bin/bash
+
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Generate and compare the ABI compatibilty of two libcamera versions
+
+name=$(basename "$0")
+
+usage() {
+ cat << EOF
+$name: Determine the ABI/API compatibility of two build versions
+
+ $name [--help] [--abi-dir=<PATH>] [--tmp-dir=<PATH>] ARGS
+
+The positional arguments (ARGS) determine the versions that will be compared and
+take three variants:
+
+ - No positional arguments:
+ $name [optional arguments]
+
+ It is assumed to compare the current git HEAD against the most recent TAG
+
+ - One positional argument:
+ $name [optional aguments] COMMITISH
+
+ The given COMMITISH is compared against it's most recent TAG
+
+ - Two positional arguments:
+ $name [optional aguments] BASE COMMITISH
+
+ The given COMMITISH is compared against the given BASE.
+
+Optional Arguments:
+ --abi-dir <path> Use <path> for storing (or retrieving existing) ABI data
+ files
+
+ --tmp-dir <path> Specify temporary build location for building ABI data.
+ This could be a tmpfs/RAM disk to save on disk writes.
+EOF
+}
+
+dbg () {
+ echo "$@" >&2
+}
+
+die () {
+ echo "$name: $*" >&2
+ exit 1
+}
+
+describe () {
+ git describe --tags "$1" \
+ || die "Failed to describe $1"
+}
+
+prev_release () {
+ git describe --tags --abbrev=0 "$1"^ \
+ || die "Failed to identify previous release tag from $1"
+}
+
+# Make sure we exit on errors during argument parsing.
+set -Eeuo pipefail
+
+positional=()
+while [[ $# -gt 0 ]] ; do
+ option="$1"
+ shift
+
+ case $option in
+ -h|--help)
+ usage
+ exit 0
+ ;;
+
+ --abi-dir)
+ abi_dir=$1
+ shift
+ ;;
+
+ --tmp-dir)
+ tmp=$1
+ shift
+ ;;
+
+ -*)
+ die "Unrecognised argument $option"
+ ;;
+
+ *) # Parse unidentified arguments based on position.
+ positional+=("$option")
+ ;;
+ esac
+done
+set -- "${positional[@]}" # restore positional parameters.
+
+# Parse positional arguments.
+case $# in
+ 0) # Check HEAD against previous 'release'.
+ from=$(prev_release HEAD)
+ to=$(describe HEAD)
+ ;;
+
+ 1) # Check COMMIT against previous release.
+ from=$(prev_release "$1")
+ to=$(describe "$1")
+ ;;
+
+ 2) # Check ABI between FROM and TO explicitly.
+ from=$(describe "$1")
+ to=$(describe "$2")
+ ;;
+
+ *)
+ die "Invalid arguments"
+ ;;
+esac
+
+if ! which abi-compliance-checker; then
+ die "This tool requires 'abi-compliance-checker' to be installed."
+fi
+
+
+abi_dir=${abi_dir:-abi}
+tmp=${tmp:-"$abi_dir/tmp/"}
+
+echo "Validating ABI compatibility between $from and $to"
+
+mkdir -p "$abi_dir"
+mkdir -p "$tmp"
+
+# Generate an abi-compliance-checker xml description file.
+create_xml() {
+ local output="$1"
+ local version="$2"
+ local root="$3"
+
+ echo "<version>$version</version>" > "$output"
+ echo "<headers>$root/usr/local/include/</headers>" >> "$output"
+ echo "<libs>$root/usr/local/lib/</libs>" >> "$output"
+}
+
+# Check if an ABI dump file exists, and if not create one by building a minimal
+# configuration of libcamera at the specified version using a clean worktree.
+create_abi_dump() {
+ local version="$1"
+ local abi_file="$abi_dir/$version.abi.dump"
+ local worktree="$tmp/$version"
+ local build="$tmp/$version-build"
+
+ # Use a fully qualified path when calling ninja -C.
+ install=$(realpath "$tmp/$version-install")
+
+ if [[ ! -e "$abi_file" ]] ; then
+ dbg "Creating ABI dump for $version in $abi_dir"
+ git worktree add --force "$worktree" "$version"
+
+ # Generate a minimal libcamera build. "lib" and "prefix" are
+ # defined explicitly to avoid system default ambiguities.
+ meson setup "$build" "$worktree" \
+ -Dlibdir=lib \
+ -Dprefix=/usr/local/ \
+ -Ddocumentation=disabled \
+ -Dcam=disabled \
+ -Dqcam=disabled \
+ -Dgstreamer=disabled \
+ -Dlc-compliance=disabled \
+ -Dtracing=disabled \
+ -Dpipelines=
+
+ ninja -C "$build"
+ DESTDIR="$install" ninja -C "$build" install
+
+ # Create an xml descriptor with parameters to generate the dump file.
+ create_xml \
+ "$install/libcamera-abi-dump.xml" \
+ "$version" \
+ "$install"
+
+ abi-compliance-checker \
+ -lib libcamera \
+ -v1 "$version" \
+ -dump "$install/libcamera-abi-dump.xml" \
+ -dump-path "$abi_file"
+
+ dbg Created "$abi_file"
+
+ dbg Removing Worktree "$worktree"
+ git worktree remove -f "$worktree"
+
+ dbg Removing "$build"
+ rm -r "$build"
+
+ dbg Removing "$install"
+ rm -r "$install"
+ fi
+}
+
+# Create the requested ABI dump files if they don't yet exist.
+create_abi_dump "$from"
+create_abi_dump "$to"
+
+# TODO: Future iterations and extensions here could add "-stdout -xml" and
+# parse the results automatically.
+abi-compliance-checker -l libcamera \
+ -old "$abi_dir/$from.abi.dump" \
+ -new "$abi_dir/$to.abi.dump"
+
+# On (far too many) occasions, the tools keep running leaving a cpu core @ 100%
+# CPU usage. Perhaps some subprocess gets launched but never rejoined. Stop
+# them all.
+#
+# TODO: Investigate this and report upstream.
+killall abi-compliance-checker 2>/dev/null
diff --git a/utils/checkstyle.py b/utils/checkstyle.py
index f0248d65..4e287b2e 100755
--- a/utils/checkstyle.py
+++ b/utils/checkstyle.py
@@ -1,10 +1,10 @@
-#!/usr/bin/python3
+#!/usr/bin/env python3
# SPDX-License-Identifier: GPL-2.0-or-later
# Copyright (C) 2018, Google Inc.
#
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
#
-# checkstyle.py - A patch style checker script based on clang-format
+# A patch style checker script based on clang-format
#
# TODO:
#
@@ -168,6 +168,12 @@ def parse_diff(diff):
hunk = DiffHunk(line)
elif hunk is not None:
+ # Work around https://github.com/python/cpython/issues/46395
+ # See https://www.gnu.org/software/diffutils/manual/html_node/Incomplete-Lines.html
+ if line[-1] != '\n':
+ hunk.append(line + '\n')
+ line = '\\ No newline at end of file\n'
+
hunk.append(line)
if hunk:
@@ -191,6 +197,9 @@ class CommitFile:
else:
self.__filename = info[1]
+ def __repr__(self):
+ return f'{self.__status} {self.__filename}'
+
@property
def filename(self):
return self.__filename
@@ -203,16 +212,30 @@ class CommitFile:
class Commit:
def __init__(self, commit):
self.commit = commit
+ self._trailers = []
self._parse()
+ def _parse_trailers(self, lines):
+ for index in range(1, len(lines)):
+ line = lines[index]
+ if not line:
+ break
+
+ self._trailers.append(line)
+
+ return index
+
def _parse(self):
# Get the commit title and list of files.
- ret = subprocess.run(['git', 'show', '--pretty=oneline', '--name-status',
+ ret = subprocess.run(['git', 'show', '--format=%s%n%(trailers:only,unfold)', '--name-status',
self.commit],
stdout=subprocess.PIPE).stdout.decode('utf-8')
- files = ret.splitlines()
- self._files = [CommitFile(f) for f in files[1:]]
- self._title = files[0]
+ lines = ret.splitlines()
+
+ self._title = lines[0]
+
+ index = self._parse_trailers(lines)
+ self._files = [CommitFile(f) for f in lines[index:] if f]
def files(self, filter='AMR'):
return [f.filename for f in self._files if f.status in filter]
@@ -221,6 +244,10 @@ class Commit:
def title(self):
return self._title
+ @property
+ def trailers(self):
+ return self._trailers
+
def get_diff(self, top_level, filename):
diff = subprocess.run(['git', 'diff', '%s~..%s' % (self.commit, self.commit),
'--', '%s/%s' % (top_level, filename)],
@@ -249,15 +276,21 @@ class StagedChanges(Commit):
return parse_diff(diff.splitlines(True))
-class Amendment(StagedChanges):
+class Amendment(Commit):
def __init__(self):
- StagedChanges.__init__(self)
+ Commit.__init__(self, '')
def _parse(self):
- # Create a title using HEAD commit
- ret = subprocess.run(['git', 'show', '--pretty=oneline', '--no-patch'],
+ # Create a title using HEAD commit and parse the trailers.
+ ret = subprocess.run(['git', 'show', '--format=%H %s%n%(trailers:only,unfold)',
+ '--no-patch'],
stdout=subprocess.PIPE).stdout.decode('utf-8')
- self._title = 'Amendment of ' + ret.strip()
+ lines = ret.splitlines()
+
+ self._title = 'Amendment of ' + lines[0].strip()
+
+ self._parse_trailers(lines)
+
# Extract the list of modified files
ret = subprocess.run(['git', 'diff', '--staged', '--name-status', 'HEAD~'],
stdout=subprocess.PIPE).stdout.decode('utf-8')
@@ -298,8 +331,10 @@ class CommitChecker(metaclass=ClassRegistry):
# Class methods
#
@classmethod
- def checkers(cls):
+ def checkers(cls, names):
for checker in cls.subclasses:
+ if names and checker.__name__ not in names:
+ continue
yield checker
@@ -313,7 +348,7 @@ class HeaderAddChecker(CommitChecker):
def check(cls, commit, top_level):
issues = []
- meson_files = [f for f in commit.files('M')
+ meson_files = [f for f in commit.files()
if os.path.basename(f) == 'meson.build']
for filename in commit.files('AR'):
@@ -352,6 +387,137 @@ class HeaderAddChecker(CommitChecker):
return issues
+class TitleChecker(CommitChecker):
+ prefix_regex = re.compile(r'^([a-zA-Z0-9_.-]+: )+')
+ release_regex = re.compile(r'libcamera v[0-9]+\.[0-9]+\.[0-9]+')
+
+ @classmethod
+ def check(cls, commit, top_level):
+ title = commit.title
+
+ # Skip the check when validating staged changes (as done through a
+ # pre-commit hook) as there is no title to check in that case.
+ if isinstance(commit, StagedChanges):
+ return []
+
+ # Ignore release commits, they don't need a prefix.
+ if TitleChecker.release_regex.fullmatch(title):
+ return []
+
+ prefix_pos = title.find(': ')
+ if prefix_pos != -1 and prefix_pos != len(title) - 2:
+ return []
+
+ # Find prefix candidates by searching the git history
+ msgs = subprocess.run(['git', 'log', '--no-decorate', '--oneline', '-n100', '--'] + commit.files(),
+ stdout=subprocess.PIPE).stdout.decode('utf-8')
+ prefixes = {}
+ prefixes_count = 0
+ for msg in msgs.splitlines():
+ prefix = TitleChecker.prefix_regex.match(msg)
+ if not prefix:
+ continue
+
+ prefix = prefix.group(0)
+ if prefix in prefixes:
+ prefixes[prefix] += 1
+ else:
+ prefixes[prefix] = 1
+
+ prefixes_count += 1
+
+ if not prefixes:
+ return [CommitIssue('Commit title is missing prefix')]
+
+ # Sort the candidates by number of occurrences and pick the best ones.
+ # When multiple prefixes are possible without a clear winner, we want to
+ # display the most common options to the user, but without the most
+ # unlikely options to avoid too long messages. As a heuristic, select
+ # enough candidates to cover at least 2/3 of the possible prefixes, but
+ # never more than 4 candidates.
+ prefixes = list(prefixes.items())
+ prefixes.sort(key=lambda x: x[1], reverse=True)
+
+ candidates = []
+ candidates_count = 0
+ for prefix in prefixes:
+ candidates.append(f"`{prefix[0]}'")
+ candidates_count += prefix[1]
+ if candidates_count >= prefixes_count * 2 / 3 or \
+ len(candidates) == 4:
+ break
+
+ candidates = candidates[:-2] + [' or '.join(candidates[-2:])]
+ candidates = ', '.join(candidates)
+
+ return [CommitIssue('Commit title is missing prefix, '
+ 'possible candidates are ' + candidates)]
+
+
+class TrailersChecker(CommitChecker):
+ commit_regex = re.compile(r'[0-9a-f]{12}[0-9a-f]* \(".*"\)')
+
+ coverity_regex = re.compile(r'Coverity CID=.*')
+
+ # Simple e-mail address validator regex, with an additional trailing
+ # comment. The complexity of a full RFC6531 validator isn't worth the
+ # additional invalid addresses it would reject.
+ email_regex = re.compile(r'[^<]+ <[^@>]+@[^>]+>( # .*)?')
+
+ link_regex = re.compile(r'https?://.*')
+
+ @staticmethod
+ def validate_reported_by(value):
+ if TrailersChecker.email_regex.fullmatch(value):
+ return True
+ if TrailersChecker.coverity_regex.fullmatch(value):
+ return True
+ return False
+
+ known_trailers = {
+ 'Acked-by': email_regex,
+ 'Bug': link_regex,
+ 'Co-developed-by': email_regex,
+ 'Fixes': commit_regex,
+ 'Link': link_regex,
+ 'Reported-by': validate_reported_by,
+ 'Reviewed-by': email_regex,
+ 'Signed-off-by': email_regex,
+ 'Suggested-by': email_regex,
+ 'Tested-by': email_regex,
+ }
+
+ trailer_regex = re.compile(r'([A-Z][a-zA-Z-]*)\s*:\s*(.*)')
+
+ @classmethod
+ def check(cls, commit, top_level):
+ issues = []
+
+ for trailer in commit.trailers:
+ match = TrailersChecker.trailer_regex.fullmatch(trailer)
+ if not match:
+ issues.append(CommitIssue(f"Malformed commit trailer '{trailer}'"))
+ continue
+
+ key, value = match.groups()
+
+ validator = TrailersChecker.known_trailers.get(key)
+ if not validator:
+ issues.append(CommitIssue(f"Invalid commit trailer key '{key}'"))
+ continue
+
+ if isinstance(validator, re.Pattern):
+ valid = bool(validator.fullmatch(value))
+ else:
+ valid = validator(value)
+
+ if not valid:
+ issues.append(CommitIssue(f"Malformed value '{value}' for commit trailer '{key}'"))
+ continue
+
+ return issues
+
+
# ------------------------------------------------------------------------------
# Style Checkers
#
@@ -366,8 +532,10 @@ class StyleChecker(metaclass=ClassRegistry):
# Class methods
#
@classmethod
- def checkers(cls, filename):
+ def checkers(cls, filename, names):
for checker in cls.subclasses:
+ if names and checker.__name__ not in names:
+ continue
if checker.supports(filename):
yield checker
@@ -401,7 +569,7 @@ class IncludeChecker(StyleChecker):
'limits', 'locale', 'setjmp', 'signal', 'stdarg', 'stddef',
'stdint', 'stdio', 'stdlib', 'string', 'time', 'uchar', 'wchar',
'wctype')
- include_regex = re.compile('^#include <c([a-z]*)>')
+ include_regex = re.compile(r'^#include <c([a-z]*)>')
def __init__(self, content):
super().__init__()
@@ -427,7 +595,7 @@ class IncludeChecker(StyleChecker):
class LogCategoryChecker(StyleChecker):
- log_regex = re.compile('\\bLOG\((Debug|Info|Warning|Error|Fatal)\)')
+ log_regex = re.compile(r'\bLOG\((Debug|Info|Warning|Error|Fatal)\)')
patterns = ('*.cpp',)
def __init__(self, content):
@@ -464,7 +632,7 @@ class MesonChecker(StyleChecker):
class Pep8Checker(StyleChecker):
patterns = ('*.py',)
- results_regex = re.compile('stdin:([0-9]+):([0-9]+)(.*)')
+ results_regex = re.compile(r'stdin:([0-9]+):([0-9]+)(.*)')
def __init__(self, content):
super().__init__()
@@ -497,7 +665,7 @@ class Pep8Checker(StyleChecker):
class ShellChecker(StyleChecker):
patterns = ('*.sh',)
- results_line_regex = re.compile('In - line ([0-9]+):')
+ results_line_regex = re.compile(r'In - line ([0-9]+):')
def __init__(self, content):
super().__init__()
@@ -547,8 +715,10 @@ class Formatter(metaclass=ClassRegistry):
# Class methods
#
@classmethod
- def formatters(cls, filename):
+ def formatters(cls, filename, names):
for formatter in cls.subclasses:
+ if names and formatter.__name__ not in names:
+ continue
if formatter.supports(filename):
yield formatter
@@ -583,7 +753,8 @@ class CLangFormatter(Formatter):
class DoxygenFormatter(Formatter):
patterns = ('*.c', '*.cpp')
- return_regex = re.compile(' +\\* +\\\\return +[a-z]')
+ oneliner_regex = re.compile(r'^ +\* +\\(brief|param|return)\b.*\.$')
+ return_regex = re.compile(r' +\* +\\return +[a-z]')
@classmethod
def format(cls, filename, data):
@@ -598,6 +769,7 @@ class DoxygenFormatter(Formatter):
lines.append(line)
continue
+ line = cls.oneliner_regex.sub(lambda m: m.group(0)[:-1], line)
line = cls.return_regex.sub(lambda m: m.group(0)[:-1] + m.group(0)[-1].upper(), line)
if line.find('*/') != -1:
@@ -643,7 +815,7 @@ class DPointerFormatter(Formatter):
class IncludeOrderFormatter(Formatter):
patterns = ('*.cpp', '*.h')
- include_regex = re.compile('^#include (["<])([^">]*)([">])')
+ include_regex = re.compile(r'^#include (["<])([^">]*)([">])')
@classmethod
def format(cls, filename, data):
@@ -710,7 +882,7 @@ class StripTrailingSpaceFormatter(Formatter):
# Style checking
#
-def check_file(top_level, commit, filename):
+def check_file(top_level, commit, filename, checkers):
# Extract the line numbers touched by the commit.
commit_diff = commit.get_diff(top_level, filename)
@@ -727,7 +899,7 @@ def check_file(top_level, commit, filename):
after = commit.get_file(filename)
formatted = after
- for formatter in Formatter.formatters(filename):
+ for formatter in Formatter.formatters(filename, checkers):
formatted = formatter.format(filename, formatted)
after = after.splitlines(True)
@@ -741,7 +913,7 @@ def check_file(top_level, commit, filename):
# Check for code issues not related to formatting.
issues = []
- for checker in StyleChecker.checkers(filename):
+ for checker in StyleChecker.checkers(filename, checkers):
checker = checker(after)
for hunk in commit_diff:
issues += checker.check(hunk.side('to').touched)
@@ -769,16 +941,17 @@ def check_file(top_level, commit, filename):
return len(formatted_diff) + len(issues)
-def check_style(top_level, commit):
- separator = '-' * len(commit.title)
+def check_style(top_level, commit, checkers):
+ title = commit.commit + ' ' + commit.title
+ separator = '-' * len(title)
print(separator)
- print(commit.title)
+ print(title)
print(separator)
issues = 0
# Apply the commit checkers first.
- for checker in CommitChecker.checkers():
+ for checker in CommitChecker.checkers(checkers):
for issue in checker.check(commit, top_level):
print('%s%s%s' % (Colours.fg(Colours.Yellow), issue.msg, Colours.reset()))
issues += 1
@@ -790,7 +963,7 @@ def check_style(top_level, commit):
files = [f for f in commit.files() if len([p for p in patterns if fnmatch.fnmatch(os.path.basename(f), p)])]
for f in files:
- issues += check_file(top_level, commit, f)
+ issues += check_file(top_level, commit, f, checkers)
if issues == 0:
print('No issue detected')
@@ -840,6 +1013,8 @@ def main(argv):
# Parse command line arguments
parser = argparse.ArgumentParser()
+ parser.add_argument('--checkers', '-c', type=str,
+ help='Specify which checkers to run as a comma-separated list. Defaults to all checkers')
parser.add_argument('--staged', '-s', action='store_true',
help='Include the changes in the index. Defaults to False')
parser.add_argument('--amend', '-a', action='store_true',
@@ -848,6 +1023,9 @@ def main(argv):
help='Revision range (as defined by git rev-parse). Defaults to HEAD if not specified.')
args = parser.parse_args(argv[1:])
+ if args.checkers:
+ args.checkers = args.checkers.split(',')
+
# Check for required dependencies.
for command, mandatory in dependencies.items():
found = shutil.which(command)
@@ -881,7 +1059,7 @@ def main(argv):
issues = 0
for commit in commits:
- issues += check_style(top_level, commit)
+ issues += check_style(top_level, commit, args.checkers)
print('')
if issues:
diff --git a/utils/gen-controls.py b/utils/gen-controls.py
index 3f99b5e2..56315f50 100755
--- a/utils/gen-controls.py
+++ b/utils/gen-controls.py
@@ -4,12 +4,113 @@
#
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
#
-# gen-controls.py - Generate control definitions from YAML
+# Generate control definitions from YAML
import argparse
+from functools import reduce
+import operator
import string
import sys
import yaml
+import os
+
+
+class ControlEnum(object):
+ def __init__(self, data):
+ self.__data = data
+
+ @property
+ def description(self):
+ """The enum description"""
+ return self.__data.get('description')
+
+ @property
+ def name(self):
+ """The enum name"""
+ return self.__data.get('name')
+
+ @property
+ def value(self):
+ """The enum value"""
+ return self.__data.get('value')
+
+
+class Control(object):
+ def __init__(self, name, data, vendor):
+ self.__name = name
+ self.__data = data
+ self.__enum_values = None
+ self.__size = None
+ self.__vendor = vendor
+
+ enum_values = data.get('enum')
+ if enum_values is not None:
+ self.__enum_values = [ControlEnum(enum) for enum in enum_values]
+
+ size = self.__data.get('size')
+ if size is not None:
+ if len(size) == 0:
+ raise RuntimeError(f'Control `{self.__name}` size must have at least one dimension')
+
+ # Compute the total number of elements in the array. If any of the
+ # array dimension is a string, the array is variable-sized.
+ num_elems = 1
+ for dim in size:
+ if type(dim) is str:
+ num_elems = 0
+ break
+
+ dim = int(dim)
+ if dim <= 0:
+ raise RuntimeError(f'Control `{self.__name}` size must have positive values only')
+
+ num_elems *= dim
+
+ self.__size = num_elems
+
+ @property
+ def description(self):
+ """The control description"""
+ return self.__data.get('description')
+
+ @property
+ def enum_values(self):
+ """The enum values, if the control is an enumeration"""
+ if self.__enum_values is None:
+ return
+ for enum in self.__enum_values:
+ yield enum
+
+ @property
+ def is_enum(self):
+ """Is the control an enumeration"""
+ return self.__enum_values is not None
+
+ @property
+ def vendor(self):
+ """The vendor string, or None"""
+ return self.__vendor
+
+ @property
+ def name(self):
+ """The control name (CamelCase)"""
+ return self.__name
+
+ @property
+ def type(self):
+ typ = self.__data.get('type')
+ size = self.__data.get('size')
+
+ if typ == 'string':
+ return 'std::string'
+
+ if self.__size is None:
+ return typ
+
+ if self.__size:
+ return f"Span<const {typ}, {self.__size}>"
+ else:
+ return f"Span<const {typ}>"
def snake_case(s):
@@ -39,47 +140,45 @@ ${description}
*/''')
enum_values_start = string.Template('''extern const std::array<const ControlValue, ${size}> ${name}Values = {''')
enum_values_values = string.Template('''\tstatic_cast<int32_t>(${name}),''')
+ name_value_map_doc = string.Template('''/**
+ * \\var ${name}NameValueMap
+ * \\brief Map of all $name supported value names (in std::string format) to value
+ */''')
+ name_value_map_start = string.Template('''extern const std::map<std::string, ${type}> ${name}NameValueMap = {''')
+ name_value_values = string.Template('''\t{ "${name}", ${name} },''')
- ctrls_doc = []
- ctrls_def = []
- draft_ctrls_doc = []
- draft_ctrls_def = []
+ ctrls_doc = {}
+ ctrls_def = {}
ctrls_map = []
for ctrl in controls:
- name, ctrl = ctrl.popitem()
- id_name = snake_case(name).upper()
+ id_name = snake_case(ctrl.name).upper()
- ctrl_type = ctrl['type']
- if ctrl_type == 'string':
- ctrl_type = 'std::string'
- elif ctrl.get('size'):
- ctrl_type = 'Span<const %s>' % ctrl_type
+ vendor = ctrl.vendor
+ if vendor not in ctrls_doc:
+ ctrls_doc[vendor] = []
+ ctrls_def[vendor] = []
info = {
- 'name': name,
- 'type': ctrl_type,
- 'description': format_description(ctrl['description']),
+ 'name': ctrl.name,
+ 'type': ctrl.type,
+ 'description': format_description(ctrl.description),
'id_name': id_name,
}
- target_doc = ctrls_doc
- target_def = ctrls_def
- if ctrl.get('draft'):
- target_doc = draft_ctrls_doc
- target_def = draft_ctrls_def
+ target_doc = ctrls_doc[vendor]
+ target_def = ctrls_def[vendor]
- enum = ctrl.get('enum')
- if enum:
+ if ctrl.is_enum:
enum_doc = []
enum_doc.append(enum_doc_start_template.substitute(info))
num_entries = 0
- for entry in enum:
+ for enum in ctrl.enum_values:
value_info = {
- 'name': name,
- 'value': entry['name'],
- 'description': format_description(entry['description']),
+ 'name': ctrl.name,
+ 'value': enum.name,
+ 'description': format_description(enum.description),
}
enum_doc.append(enum_doc_value_template.substitute(value_info))
num_entries += 1
@@ -90,75 +189,100 @@ ${description}
values_info = {
'name': info['name'],
+ 'type': ctrl.type,
'size': num_entries,
}
target_doc.append(enum_values_doc.substitute(values_info))
target_def.append(enum_values_start.substitute(values_info))
- for entry in enum:
+ for enum in ctrl.enum_values:
value_info = {
- 'name': entry['name']
+ 'name': enum.name
}
target_def.append(enum_values_values.substitute(value_info))
target_def.append("};")
+ target_doc.append(name_value_map_doc.substitute(values_info))
+ target_def.append(name_value_map_start.substitute(values_info))
+ for enum in ctrl.enum_values:
+ value_info = {
+ 'name': enum.name
+ }
+ target_def.append(name_value_values.substitute(value_info))
+ target_def.append("};")
+
target_doc.append(doc_template.substitute(info))
target_def.append(def_template.substitute(info))
- if ctrl.get('draft'):
- name = 'draft::' + name
+ vendor_ns = vendor + '::' if vendor != "libcamera" else ''
+ ctrls_map.append('\t{ ' + vendor_ns + id_name + ', &' + vendor_ns + ctrl.name + ' },')
- ctrls_map.append('\t{ ' + id_name + ', &' + name + ' },')
+ vendor_ctrl_doc_sub = []
+ vendor_ctrl_template = string.Template('''
+/**
+ * \\brief Namespace for ${vendor} controls
+ */
+namespace ${vendor} {
+
+${vendor_controls_str}
+
+} /* namespace ${vendor} */''')
+
+ for vendor in [v for v in ctrls_doc.keys() if v not in ['libcamera']]:
+ vendor_ctrl_doc_sub.append(vendor_ctrl_template.substitute({'vendor': vendor, 'vendor_controls_str': '\n\n'.join(ctrls_doc[vendor])}))
+
+ vendor_ctrl_def_sub = []
+ for vendor in [v for v in ctrls_def.keys() if v not in ['libcamera']]:
+ vendor_ctrl_def_sub.append(vendor_ctrl_template.substitute({'vendor': vendor, 'vendor_controls_str': '\n'.join(ctrls_def[vendor])}))
return {
- 'controls_doc': '\n\n'.join(ctrls_doc),
- 'controls_def': '\n'.join(ctrls_def),
- 'draft_controls_doc': '\n\n'.join(draft_ctrls_doc),
- 'draft_controls_def': '\n\n'.join(draft_ctrls_def),
+ 'controls_doc': '\n\n'.join(ctrls_doc['libcamera']),
+ 'controls_def': '\n'.join(ctrls_def['libcamera']),
'controls_map': '\n'.join(ctrls_map),
+ 'vendor_controls_doc': '\n'.join(vendor_ctrl_doc_sub),
+ 'vendor_controls_def': '\n'.join(vendor_ctrl_def_sub),
}
-def generate_h(controls):
+def generate_h(controls, mode, ranges):
enum_template_start = string.Template('''enum ${name}Enum {''')
enum_value_template = string.Template('''\t${name} = ${value},''')
enum_values_template = string.Template('''extern const std::array<const ControlValue, ${size}> ${name}Values;''')
+ name_value_map_template = string.Template('''extern const std::map<std::string, ${type}> ${name}NameValueMap;''')
template = string.Template('''extern const Control<${type}> ${name};''')
- ctrls = []
- draft_ctrls = []
- ids = []
- id_value = 1
+ ctrls = {}
+ ids = {}
+ id_value = {}
for ctrl in controls:
- name, ctrl = ctrl.popitem()
- id_name = snake_case(name).upper()
+ id_name = snake_case(ctrl.name).upper()
- ids.append('\t' + id_name + ' = ' + str(id_value) + ',')
+ vendor = ctrl.vendor
+ if vendor not in ctrls:
+ if vendor not in ranges.keys():
+ raise RuntimeError(f'Control id range is not defined for vendor {vendor}')
+ id_value[vendor] = ranges[vendor] + 1
+ ids[vendor] = []
+ ctrls[vendor] = []
- ctrl_type = ctrl['type']
- if ctrl_type == 'string':
- ctrl_type = 'std::string'
- elif ctrl.get('size'):
- ctrl_type = 'Span<const %s>' % ctrl_type
+ target_ids = ids[vendor]
+ target_ids.append('\t' + id_name + ' = ' + str(id_value[vendor]) + ',')
info = {
- 'name': name,
- 'type': ctrl_type,
+ 'name': ctrl.name,
+ 'type': ctrl.type,
}
- target_ctrls = ctrls
- if ctrl.get('draft'):
- target_ctrls = draft_ctrls
+ target_ctrls = ctrls[vendor]
- enum = ctrl.get('enum')
- if enum:
+ if ctrl.is_enum:
target_ctrls.append(enum_template_start.substitute(info))
num_entries = 0
- for entry in enum:
+ for enum in ctrl.enum_values:
value_info = {
- 'name': entry['name'],
- 'value': entry['value'],
+ 'name': enum.name,
+ 'value': enum.value,
}
target_ctrls.append(enum_value_template.substitute(value_info))
num_entries += 1
@@ -166,17 +290,41 @@ def generate_h(controls):
values_info = {
'name': info['name'],
+ 'type': ctrl.type,
'size': num_entries,
}
target_ctrls.append(enum_values_template.substitute(values_info))
+ target_ctrls.append(name_value_map_template.substitute(values_info))
target_ctrls.append(template.substitute(info))
- id_value += 1
+ id_value[vendor] += 1
+
+ vendor_template = string.Template('''
+namespace ${vendor} {
+
+#define LIBCAMERA_HAS_${vendor_def}_VENDOR_${mode}
+
+enum {
+${vendor_enums}
+};
+
+${vendor_controls}
+
+} /* namespace ${vendor} */
+''')
+
+ vendor_sub = []
+ for vendor in [v for v in ctrls.keys() if v != 'libcamera']:
+ vendor_sub.append(vendor_template.substitute({'mode': mode.upper(),
+ 'vendor': vendor,
+ 'vendor_def': vendor.upper(),
+ 'vendor_enums': '\n'.join(ids[vendor]),
+ 'vendor_controls': '\n'.join(ctrls[vendor])}))
return {
- 'ids': '\n'.join(ids),
- 'controls': '\n'.join(ctrls),
- 'draft_controls': '\n'.join(draft_ctrls)
+ 'ids': '\n'.join(ids['libcamera']),
+ 'controls': '\n'.join(ctrls['libcamera']),
+ 'vendor_controls': '\n'.join(vendor_sub)
}
@@ -192,21 +340,36 @@ def main(argv):
# Parse command line arguments
parser = argparse.ArgumentParser()
- parser.add_argument('-o', dest='output', metavar='file', type=str,
+ parser.add_argument('--mode', '-m', type=str, required=True, choices=['controls', 'properties'],
+ help='Mode of operation')
+ parser.add_argument('--output', '-o', metavar='file', type=str,
help='Output file name. Defaults to standard output if not specified.')
- parser.add_argument('input', type=str,
- help='Input file name.')
- parser.add_argument('template', type=str,
+ parser.add_argument('--ranges', '-r', type=str, required=True,
+ help='Control id range reservation file.')
+ parser.add_argument('--template', '-t', dest='template', type=str, required=True,
help='Template file name.')
+ parser.add_argument('input', type=str, nargs='+',
+ help='Input file name.')
+
args = parser.parse_args(argv[1:])
- data = open(args.input, 'rb').read()
- controls = yaml.safe_load(data)['controls']
+ ranges = {}
+ with open(args.ranges, 'rb') as f:
+ data = open(args.ranges, 'rb').read()
+ ranges = yaml.safe_load(data)['ranges']
+
+ controls = []
+ for input in args.input:
+ with open(input, 'rb') as f:
+ data = f.read()
+ vendor = yaml.safe_load(data)['vendor']
+ ctrls = yaml.safe_load(data)['controls']
+ controls = controls + [Control(*ctrl.popitem(), vendor) for ctrl in ctrls]
if args.template.endswith('.cpp.in'):
data = generate_cpp(controls)
elif args.template.endswith('.h.in'):
- data = generate_h(controls)
+ data = generate_h(controls, args.mode, ranges)
else:
raise RuntimeError('Unknown template type')
diff --git a/utils/gen-formats.py b/utils/gen-formats.py
index da79a8bb..0c0932a5 100755
--- a/utils/gen-formats.py
+++ b/utils/gen-formats.py
@@ -4,7 +4,7 @@
#
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
#
-# gen-formats.py - Generate formats definitions from YAML
+# Generate formats definitions from YAML
import argparse
import re
diff --git a/utils/gen-header.sh b/utils/gen-header.sh
index 8b66c5dd..d4692758 100755
--- a/utils/gen-header.sh
+++ b/utils/gen-header.sh
@@ -9,7 +9,7 @@ cat <<EOF > "$dst_file"
/*
* Copyright (C) 2018-2019, Google Inc.
*
- * libcamera.h - libcamera public API
+ * libcamera public API
*/
#pragma once
diff --git a/utils/gen-ipa-priv-key.sh b/utils/gen-ipa-priv-key.sh
index 919751f2..2ca7b883 100755
--- a/utils/gen-ipa-priv-key.sh
+++ b/utils/gen-ipa-priv-key.sh
@@ -4,7 +4,7 @@
#
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
#
-# gen-ipa-priv-key.sh - Generate an RSA private key to sign IPA modules
+# Generate an RSA private key to sign IPA modules
key="$1"
diff --git a/utils/gen-ipa-pub-key.py b/utils/gen-ipa-pub-key.py
index a4a1f7b7..dc3e7d5f 100755
--- a/utils/gen-ipa-pub-key.py
+++ b/utils/gen-ipa-pub-key.py
@@ -4,7 +4,7 @@
#
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
#
-# ipa-gen-key.py - Generate the IPA module signing public key
+# Generate the IPA module signing public key
import string
import subprocess
diff --git a/utils/gen-version.sh b/utils/gen-version.sh
index eb7c7268..e1f7ca7b 100755
--- a/utils/gen-version.sh
+++ b/utils/gen-version.sh
@@ -5,6 +5,7 @@
build_dir="$1"
src_dir="$2"
+project_version="$3"
# If .tarball-version exists, output the version string from the file and exit.
# This file is auto-generated on a 'meson dist' command from the run-dist.sh
@@ -43,6 +44,13 @@ then
fi
git diff-index --quiet HEAD || version="$version-dirty ($(date --iso-8601=seconds))"
+# If a project version is provided, use it to replace the version number.
+if [ -n "$project_version" ]
+then
+ version=$(echo "$version" | sed -e 's/^[^-]*-//')
+ version="v$project_version-$version"
+fi
+
# Replace first '-' with a '+' to denote build metadata, strip the 'g' in from
# of the git SHA1 and remove the initial 'v'.
version=$(echo "$version" | sed -e 's/-/+/' | sed -e 's/-g/-/' | cut -c 2-)
diff --git a/utils/hooks/pre-push b/utils/hooks/pre-push
index 90ffdf6f..9918b286 100755
--- a/utils/hooks/pre-push
+++ b/utils/hooks/pre-push
@@ -1,4 +1,4 @@
-#!/bin/sh
+#!/bin/bash
# SPDX-License-Identifier: GPL-2.0-or-later
@@ -61,7 +61,7 @@ do
msg=$(git cat-file commit "$commit")
# 1. The commit message shall not contain a local changelog.
- if echo "$msg" | grep -q '^--- *$'
+ if echo -E "$msg" | grep -q '^--- *$'
then
echo >&2 "Found local changelog in commit $commit"
errors=$((errors+1))
@@ -71,7 +71,7 @@ do
# corresponding the committer and the author.
committer=$(echo "$msg" | grep '^committer ' | head -1 | \
cut -d ' ' -f 2- | rev | cut -d ' ' -f 3- | rev)
- if ! echo "$msg" | grep -F -q "Signed-off-by: ${committer}"
+ if ! echo -E "$msg" | grep -F -q "Signed-off-by: ${committer}"
then
echo >&2 "Missing committer Signed-off-by in commit $commit"
errors=$((errors+1))
@@ -79,21 +79,21 @@ do
author=$(echo "$msg" | grep '^author ' | head -1 | \
cut -d ' ' -f 2- | rev | cut -d ' ' -f 3- | rev)
- if ! echo "$msg" | grep -F -q "Signed-off-by: ${author}"
+ if ! echo -E "$msg" | grep -F -q "Signed-off-by: ${author}"
then
echo >&2 "Missing author Signed-off-by in commit $commit"
errors=$((errors+1))
fi
# 3. A Reviewed-by or Acked-by is required.
- if ! echo "$msg" | grep -q '^\(Reviewed\|Acked\)-by: '
+ if ! echo -E "$msg" | grep -q '^\(Reviewed\|Acked\)-by: '
then
echo >&2 "No Reviewed-by or Acked-by in commit $commit"
errors=$((errors+1))
fi
# 4. The commit message shall not contain a Change-Id.
- if echo "$msg" | grep -q '^Change-Id:'
+ if echo -E "$msg" | grep -q '^Change-Id:'
then
echo >&2 "Found Change-Id in commit $commit"
errors=$((errors+1))
diff --git a/utils/ipc/extract-docs.py b/utils/ipc/extract-docs.py
index 8f7fff9f..61f44cae 100755
--- a/utils/ipc/extract-docs.py
+++ b/utils/ipc/extract-docs.py
@@ -4,15 +4,15 @@
#
# Author: Paul Elder <paul.elder@ideasonboard.com>
#
-# extract-docs.py - Extract doxygen documentation from mojom files
+# Extract doxygen documentation from mojom files
import argparse
import re
import sys
-regex_block_start = re.compile('^\/\*\*$')
-regex_block_end = re.compile('^ \*\/$')
-regex_spdx = re.compile('^\/\* SPDX-License-Identifier: .* \*\/$')
+regex_block_start = re.compile(r'^/\*\*$')
+regex_block_end = re.compile(r'^ \*/$')
+regex_spdx = re.compile(r'^/\* SPDX-License-Identifier: .* \*/$')
def main(argv):
@@ -38,7 +38,7 @@ def main(argv):
/*
* Copyright (C) 2021, Google Inc.
*
- * {pipeline}_ipa_interface.cpp - Docs file for generated {pipeline}.mojom
+ * Docs file for generated {pipeline}.mojom
*
* This file is auto-generated. Do not edit.
*/
diff --git a/utils/ipc/generate.py b/utils/ipc/generate.py
index 8771e0a6..c2b3fcb7 100755
--- a/utils/ipc/generate.py
+++ b/utils/ipc/generate.py
@@ -4,7 +4,7 @@
#
# Author: Paul Elder <paul.elder@ideasonboard.com>
#
-# generate.py - Run mojo code generator for generating libcamera IPC files
+# Run mojo code generator for generating libcamera IPC files
import os
import sys
@@ -12,10 +12,20 @@ import sys
# TODO set sys.pycache_prefix for >= python3.8
sys.dont_write_bytecode = True
+sys.path.insert(0, f'{os.path.dirname(__file__)}/mojo/public/tools/bindings')
+
import mojo.public.tools.bindings.mojom_bindings_generator as generator
def _GetModulePath(path, output_dir):
- return os.path.join(output_dir, path.relative_path())
+ return os.path.join(output_dir, path.relative_path())
+
+
+# Disable the attribute checker to support our custom attributes. Ideally we
+# should add the attributes to the list of allowed attributes in
+# utils/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check.py, but
+# we're trying hard to use the upstream mojom as-is.
+if hasattr(generator, '_BUILTIN_CHECKS'):
+ del generator._BUILTIN_CHECKS['attributes']
# Override the mojo code generator's generator list to only contain our
# libcamera generator
diff --git a/utils/ipc/generators/libcamera_templates/core_ipa_interface.h.tmpl b/utils/ipc/generators/libcamera_templates/core_ipa_interface.h.tmpl
index a565b59a..7f2d0810 100644
--- a/utils/ipc/generators/libcamera_templates/core_ipa_interface.h.tmpl
+++ b/utils/ipc/generators/libcamera_templates/core_ipa_interface.h.tmpl
@@ -7,7 +7,7 @@
/*
* Copyright (C) 2020, Google Inc.
*
- * core_ipa_interface.h - libcamera core definitions for Image Processing Algorithms
+ * libcamera core definitions for Image Processing Algorithms
*
* This file is auto-generated. Do not edit.
*/
@@ -26,7 +26,7 @@ namespace libcamera {
static const {{const.kind|name}} {{const.mojom_name}} = {{const.value}};
{% endfor %}
-{% for enum in enums %}
+{% for enum in enums_gen_header %}
{{funcs.define_enum(enum)}}
{% endfor %}
diff --git a/utils/ipc/generators/libcamera_templates/core_ipa_serializer.h.tmpl b/utils/ipc/generators/libcamera_templates/core_ipa_serializer.h.tmpl
index 5738a1aa..036518f6 100644
--- a/utils/ipc/generators/libcamera_templates/core_ipa_serializer.h.tmpl
+++ b/utils/ipc/generators/libcamera_templates/core_ipa_serializer.h.tmpl
@@ -8,7 +8,7 @@
/*
* Copyright (C) 2020, Google Inc.
*
- * core_ipa_serializer.h - Data serializer for core libcamera definitions for IPA
+ * Data serializer for core libcamera definitions for IPA
*
* This file is auto-generated. Do not edit.
*/
diff --git a/utils/ipc/generators/libcamera_templates/definition_functions.tmpl b/utils/ipc/generators/libcamera_templates/definition_functions.tmpl
index 94bb4918..8b8509f3 100644
--- a/utils/ipc/generators/libcamera_templates/definition_functions.tmpl
+++ b/utils/ipc/generators/libcamera_templates/definition_functions.tmpl
@@ -9,7 +9,7 @@
# \param enum Enum object whose definition is to be generated
#}
{%- macro define_enum(enum) -%}
-enum {{enum.mojom_name}} {
+enum{{" class" if enum|is_scoped}} {{enum.mojom_name}} {
{%- for field in enum.fields %}
{{field.mojom_name}} = {{field.numeric_value}},
{%- endfor %}
diff --git a/utils/ipc/generators/libcamera_templates/module_ipa_interface.h.tmpl b/utils/ipc/generators/libcamera_templates/module_ipa_interface.h.tmpl
index 415ec283..4d88a3d7 100644
--- a/utils/ipc/generators/libcamera_templates/module_ipa_interface.h.tmpl
+++ b/utils/ipc/generators/libcamera_templates/module_ipa_interface.h.tmpl
@@ -7,7 +7,7 @@
/*
* Copyright (C) 2020, Google Inc.
*
- * {{module_name}}_ipa_interface.h - Image Processing Algorithm interface for {{module_name}}
+ * Image Processing Algorithm interface for {{module_name}}
*
* This file is auto-generated. Do not edit.
*/
@@ -69,7 +69,7 @@ public:
{%- for method in interface_event.methods %}
Signal<
{%- for param in method.parameters -%}
- {{"const " if not param|is_pod}}{{param|name}}{{" &" if not param|is_pod}}
+ {{"const " if not param|is_pod}}{{param|name}}{{" &" if not param|is_pod and not param|is_enum}}
{{- ", " if not loop.last}}
{%- endfor -%}
> {{method.mojom_name}};
diff --git a/utils/ipc/generators/libcamera_templates/module_ipa_proxy.cpp.tmpl b/utils/ipc/generators/libcamera_templates/module_ipa_proxy.cpp.tmpl
index c37c4941..ce3cc5ab 100644
--- a/utils/ipc/generators/libcamera_templates/module_ipa_proxy.cpp.tmpl
+++ b/utils/ipc/generators/libcamera_templates/module_ipa_proxy.cpp.tmpl
@@ -8,7 +8,7 @@
/*
* Copyright (C) 2020, Google Inc.
*
- * {{module_name}}_ipa_proxy.cpp - Image Processing Algorithm proxy for {{module_name}}
+ * Image Processing Algorithm proxy for {{module_name}}
*
* This file is auto-generated. Do not edit.
*/
@@ -175,9 +175,9 @@ void {{proxy_name}}::recvMessage(const IPCMessage &data)
);
{% elif method|is_async %}
ASSERT(state_ == ProxyRunning);
- proxy_.invokeMethod(&ThreadProxy::{{method.mojom_name}}, ConnectionTypeQueued,
+ proxy_.invokeMethod(&ThreadProxy::{{method.mojom_name}}, ConnectionTypeQueued
{%- for param in method|method_param_names -%}
- {{param}}{{- ", " if not loop.last}}
+ , {{param}}
{%- endfor -%}
);
{%- endif %}
@@ -235,8 +235,8 @@ void {{proxy_name}}::recvMessage(const IPCMessage &data)
}
void {{proxy_name}}::{{method.mojom_name}}IPC(
- std::vector<uint8_t>::const_iterator data,
- size_t dataSize,
+ [[maybe_unused]] std::vector<uint8_t>::const_iterator data,
+ [[maybe_unused]] size_t dataSize,
[[maybe_unused]] const std::vector<SharedFD> &fds)
{
{%- for param in method.parameters %}
diff --git a/utils/ipc/generators/libcamera_templates/module_ipa_proxy.h.tmpl b/utils/ipc/generators/libcamera_templates/module_ipa_proxy.h.tmpl
index c308dd10..e213b18a 100644
--- a/utils/ipc/generators/libcamera_templates/module_ipa_proxy.h.tmpl
+++ b/utils/ipc/generators/libcamera_templates/module_ipa_proxy.h.tmpl
@@ -8,7 +8,7 @@
/*
* Copyright (C) 2020, Google Inc.
*
- * {{module_name}}_ipa_proxy.h - Image Processing Algorithm proxy for {{module_name}}
+ * Image Processing Algorithm proxy for {{module_name}}
*
* This file is auto-generated. Do not edit.
*/
@@ -18,6 +18,7 @@
#include <libcamera/ipa/ipa_interface.h>
#include <libcamera/ipa/{{module_name}}_ipa_interface.h>
+#include <libcamera/base/object.h>
#include <libcamera/base/thread.h>
#include "libcamera/internal/control_serializer.h"
@@ -46,7 +47,7 @@ public:
{%- for method in interface_event.methods %}
Signal<
{%- for param in method.parameters -%}
- {{"const " if not param|is_pod}}{{param|name}}{{" &" if not param|is_pod}}
+ {{"const " if not param|is_pod}}{{param|name}}{{" &" if not param|is_pod and not param|is_enum}}
{{- ", " if not loop.last}}
{%- endfor -%}
> {{method.mojom_name}};
diff --git a/utils/ipc/generators/libcamera_templates/module_ipa_proxy_worker.cpp.tmpl b/utils/ipc/generators/libcamera_templates/module_ipa_proxy_worker.cpp.tmpl
index b65dc4cf..1f990d3f 100644
--- a/utils/ipc/generators/libcamera_templates/module_ipa_proxy_worker.cpp.tmpl
+++ b/utils/ipc/generators/libcamera_templates/module_ipa_proxy_worker.cpp.tmpl
@@ -8,7 +8,7 @@
/*
* Copyright (C) 2020, Google Inc.
*
- * {{module_name}}_ipa_proxy_worker.cpp - Image Processing Algorithm proxy worker for {{module_name}}
+ * Image Processing Algorithm proxy worker for {{module_name}}
*
* This file is auto-generated. Do not edit.
*/
diff --git a/utils/ipc/generators/libcamera_templates/module_ipa_serializer.h.tmpl b/utils/ipc/generators/libcamera_templates/module_ipa_serializer.h.tmpl
index 8b709705..cd5a65a9 100644
--- a/utils/ipc/generators/libcamera_templates/module_ipa_serializer.h.tmpl
+++ b/utils/ipc/generators/libcamera_templates/module_ipa_serializer.h.tmpl
@@ -8,7 +8,7 @@
/*
* Copyright (C) 2020, Google Inc.
*
- * {{module_name}}_ipa_serializer.h - Image Processing Algorithm data serializer for {{module_name}}
+ * Image Processing Algorithm data serializer for {{module_name}}
*
* This file is auto-generated. Do not edit.
*/
diff --git a/utils/ipc/generators/libcamera_templates/proxy_functions.tmpl b/utils/ipc/generators/libcamera_templates/proxy_functions.tmpl
index bac826a7..b5797b14 100644
--- a/utils/ipc/generators/libcamera_templates/proxy_functions.tmpl
+++ b/utils/ipc/generators/libcamera_templates/proxy_functions.tmpl
@@ -52,6 +52,9 @@
#}
{%- macro serialize_call(params, buf, fds) %}
{%- for param in params %}
+{%- if param|is_enum %}
+ static_assert(sizeof({{param|name_full}}) <= 4);
+{%- endif %}
std::vector<uint8_t> {{param.mojom_name}}Buf;
{%- if param|has_fd %}
std::vector<SharedFD> {{param.mojom_name}}Fds;
@@ -59,7 +62,13 @@
{%- else %}
std::tie({{param.mojom_name}}Buf, std::ignore) =
{%- endif %}
+{%- if param|is_flags %}
+ IPADataSerializer<{{param|name_full}}>::serialize({{param.mojom_name}}
+{%- elif param|is_enum %}
+ IPADataSerializer<uint32_t>::serialize(static_cast<uint32_t>({{param.mojom_name}})
+{%- else %}
IPADataSerializer<{{param|name}}>::serialize({{param.mojom_name}}
+{% endif -%}
{{- ", &controlSerializer_" if param|needs_control_serializer -}}
);
{%- endfor %}
@@ -97,7 +106,14 @@
# This code is meant to be used by macro deserialize_call.
#}
{%- macro deserialize_param(param, pointer, loop, buf, fds, iter, data_size) -%}
-{{"*" if pointer}}{{param.mojom_name}} = IPADataSerializer<{{param|name}}>::deserialize(
+{{"*" if pointer}}{{param.mojom_name}} =
+{%- if param|is_flags %}
+IPADataSerializer<{{param|name_full}}>::deserialize(
+{%- elif param|is_enum %}
+static_cast<{{param|name_full}}>(IPADataSerializer<uint32_t>::deserialize(
+{%- else %}
+IPADataSerializer<{{param|name}}>::deserialize(
+{%- endif %}
{{buf}}{{- ".cbegin()" if not iter}} + {{param.mojom_name}}Start,
{%- if loop.last and not iter %}
{{buf}}.cend()
@@ -121,7 +137,7 @@
{%- if param|needs_control_serializer %}
&controlSerializer_
{%- endif -%}
-);
+){{")" if param|is_enum and not param|is_flags}};
{%- endmacro -%}
@@ -170,7 +186,7 @@
{% for param in params|with_fds %}
{%- if loop.first %}
const size_t {{param.mojom_name}}FdStart = 0;
-{%- elif not loop.last %}
+{%- else %}
const size_t {{param.mojom_name}}FdStart = {{loop.previtem.mojom_name}}FdStart + {{loop.previtem.mojom_name}}FdsSize;
{%- endif %}
{%- endfor %}
diff --git a/utils/ipc/generators/libcamera_templates/serializer.tmpl b/utils/ipc/generators/libcamera_templates/serializer.tmpl
index 77bae36f..323e1293 100644
--- a/utils/ipc/generators/libcamera_templates/serializer.tmpl
+++ b/utils/ipc/generators/libcamera_templates/serializer.tmpl
@@ -34,6 +34,10 @@
std::tie({{field.mojom_name}}, std::ignore) =
{%- if field|is_pod %}
IPADataSerializer<{{field|name}}>::serialize(data.{{field.mojom_name}});
+ {%- elif field|is_flags %}
+ IPADataSerializer<{{field|name_full}}>::serialize(data.{{field.mojom_name}});
+ {%- elif field|is_enum_scoped %}
+ IPADataSerializer<uint{{field|bit_width}}_t>::serialize(static_cast<uint{{field|bit_width}}_t>(data.{{field.mojom_name}}));
{%- elif field|is_enum %}
IPADataSerializer<uint{{field|bit_width}}_t>::serialize(data.{{field.mojom_name}});
{%- endif %}
@@ -96,6 +100,8 @@
{{- check_data_size(field_size, 'dataSize', field.mojom_name, 'data')}}
{%- if field|is_pod %}
ret.{{field.mojom_name}} = IPADataSerializer<{{field|name}}>::deserialize(m, m + {{field_size}});
+ {%- elif field|is_flags %}
+ ret.{{field.mojom_name}} = IPADataSerializer<{{field|name_full}}>::deserialize(m, m + {{field_size}});
{%- else %}
ret.{{field.mojom_name}} = static_cast<{{field|name_full}}>(IPADataSerializer<uint{{field|bit_width}}_t>::deserialize(m, m + {{field_size}}));
{%- endif %}
diff --git a/utils/ipc/generators/mojom_libcamera_generator.py b/utils/ipc/generators/mojom_libcamera_generator.py
index 753bfc73..b8209e51 100644
--- a/utils/ipc/generators/mojom_libcamera_generator.py
+++ b/utils/ipc/generators/mojom_libcamera_generator.py
@@ -4,7 +4,7 @@
#
# Author: Paul Elder <paul.elder@ideasonboard.com>
#
-# mojom_libcamera_generator.py - Generates libcamera files from a mojom.Module.
+# Generates libcamera files from a mojom.Module.
import argparse
import datetime
@@ -72,8 +72,10 @@ def ParamsCommaSep(l):
def GetDefaultValue(element):
if element.default is not None:
return element.default
- if type(element.kind) == mojom.Kind:
+ if type(element.kind) == mojom.ValueKind:
return '0'
+ if IsFlags(element):
+ return ''
if mojom.IsEnumKind(element.kind):
return f'static_cast<{element.kind.mojom_name}>(0)'
if isinstance(element.kind, mojom.Struct) and \
@@ -184,7 +186,7 @@ def MethodParameters(method):
params = []
for param in method.parameters:
params.append('const %s %s%s' % (GetNameForElement(param),
- '&' if not IsPod(param) else '',
+ '' if IsPod(param) or IsEnum(param) else '&',
param.mojom_name))
for param in MethodParamOutputs(method):
params.append(f'{GetNameForElement(param)} *{param.mojom_name}')
@@ -220,9 +222,30 @@ def IsControls(element):
def IsEnum(element):
return mojom.IsEnumKind(element.kind)
+
+# Only works the enum definition, not types
+def IsScoped(element):
+ attributes = getattr(element, 'attributes', None)
+ if not attributes:
+ return False
+ return 'scopedEnum' in attributes
+
+
+def IsEnumScoped(element):
+ if not IsEnum(element):
+ return False
+ return IsScoped(element.kind)
+
def IsFd(element):
return mojom.IsStructKind(element.kind) and element.kind.mojom_name == "SharedFD"
+
+def IsFlags(element):
+ attributes = getattr(element, 'attributes', None)
+ if not attributes:
+ return False
+ return 'flags' in attributes
+
def IsMap(element):
return mojom.IsMapKind(element.kind)
@@ -251,9 +274,11 @@ def ByteWidthFromCppType(t):
raise Exception('invalid type')
return str(int(_bit_widths[key]) // 8)
-
# Get the type name for a given element
def GetNameForElement(element):
+ # Flags
+ if IsFlags(element):
+ return f'Flags<{GetFullNameForElement(element.kind)}>'
# structs
if (mojom.IsEnumKind(element) or
mojom.IsInterfaceKind(element) or
@@ -302,15 +327,18 @@ def GetNameForElement(element):
def GetFullNameForElement(element):
name = GetNameForElement(element)
namespace_str = ''
- if mojom.IsStructKind(element):
+ if (mojom.IsStructKind(element) or mojom.IsEnumKind(element)):
namespace_str = element.module.mojom_namespace.replace('.', '::')
elif (hasattr(element, 'kind') and
- (mojom.IsStructKind(element.kind) or
- mojom.IsEnumKind(element.kind))):
+ (mojom.IsStructKind(element.kind) or mojom.IsEnumKind(element.kind))):
namespace_str = element.kind.module.mojom_namespace.replace('.', '::')
if namespace_str == '':
return name
+
+ if IsFlags(element):
+ return GetNameForElement(element)
+
return f'{namespace_str}::{name}'
def ValidateZeroLength(l, s, cap=True):
@@ -341,7 +369,7 @@ def ValidateNamespace(namespace):
if namespace == '':
raise Exception('Must have a namespace')
- if not re.match('^ipa\.[0-9A-Za-z_]+', namespace):
+ if not re.match(r'^ipa\.[0-9A-Za-z_]+', namespace):
raise Exception('Namespace must be of the form "ipa.{pipeline_name}"')
def ValidateInterfaces(interfaces):
@@ -407,10 +435,13 @@ class Generator(generator.Generator):
'is_array': IsArray,
'is_controls': IsControls,
'is_enum': IsEnum,
+ 'is_enum_scoped': IsEnumScoped,
'is_fd': IsFd,
+ 'is_flags': IsFlags,
'is_map': IsMap,
'is_plain_struct': IsPlainStruct,
'is_pod': IsPod,
+ 'is_scoped': IsScoped,
'is_str': IsStr,
'method_input_has_fd': MethodInputHasFd,
'method_output_has_fd': MethodOutputHasFd,
@@ -452,7 +483,7 @@ class Generator(generator.Generator):
def _GetJinjaExportsForCore(self):
return {
'consts': self.module.constants,
- 'enums': self.module.enums,
+ 'enums_gen_header': [x for x in self.module.enums if x.attributes is None or 'skipHeader' not in x.attributes],
'has_array': len([x for x in self.module.kinds.keys() if x[0] == 'a']) > 0,
'has_map': len([x for x in self.module.kinds.keys() if x[0] == 'm']) > 0,
'structs_gen_header': [x for x in self.module.structs if x.attributes is None or 'skipHeader' not in x.attributes],
diff --git a/utils/ipc/mojo/README b/utils/ipc/mojo/README
index d5c24fc3..961cabd2 100644
--- a/utils/ipc/mojo/README
+++ b/utils/ipc/mojo/README
@@ -1,4 +1,4 @@
# SPDX-License-Identifier: CC0-1.0
-Files in this directory are imported from 9c138d992bfc of Chromium. Do not
+Files in this directory are imported from 9be4263648d7 of Chromium. Do not
modify them manually.
diff --git a/utils/ipc/mojo/public/LICENSE b/utils/ipc/mojo/public/LICENSE
index 972bb2ed..513e8a6a 100644
--- a/utils/ipc/mojo/public/LICENSE
+++ b/utils/ipc/mojo/public/LICENSE
@@ -1,4 +1,4 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
+// Copyright 2014 The Chromium Authors
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
diff --git a/utils/ipc/mojo/public/tools/BUILD.gn b/utils/ipc/mojo/public/tools/BUILD.gn
index eb6391a6..5328a34a 100644
--- a/utils/ipc/mojo/public/tools/BUILD.gn
+++ b/utils/ipc/mojo/public/tools/BUILD.gn
@@ -1,4 +1,4 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -10,7 +10,11 @@ group("mojo_python_unittests") {
"run_all_python_unittests.py",
"//testing/scripts/run_isolated_script_test.py",
]
- deps = [ "//mojo/public/tools/mojom/mojom:tests" ]
+ deps = [
+ "//mojo/public/tools/bindings:tests",
+ "//mojo/public/tools/mojom:tests",
+ "//mojo/public/tools/mojom/mojom:tests",
+ ]
data_deps = [
"//testing:test_scripts_shared",
"//third_party/catapult/third_party/typ/",
diff --git a/utils/ipc/mojo/public/tools/bindings/BUILD.gn b/utils/ipc/mojo/public/tools/bindings/BUILD.gn
index 3e242532..eeca73ea 100644
--- a/utils/ipc/mojo/public/tools/bindings/BUILD.gn
+++ b/utils/ipc/mojo/public/tools/bindings/BUILD.gn
@@ -1,24 +1,27 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
+# Copyright 2016 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import("//build/config/python.gni")
import("//mojo/public/tools/bindings/mojom.gni")
import("//third_party/jinja2/jinja2.gni")
-# TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
-python2_action("precompile_templates") {
+action("precompile_templates") {
sources = mojom_generator_sources
sources += [
+ "$mojom_generator_root/generators/cpp_templates/cpp_macros.tmpl",
"$mojom_generator_root/generators/cpp_templates/enum_macros.tmpl",
"$mojom_generator_root/generators/cpp_templates/enum_serialization_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/feature_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/feature_definition.tmpl",
"$mojom_generator_root/generators/cpp_templates/interface_declaration.tmpl",
"$mojom_generator_root/generators/cpp_templates/interface_definition.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/interface_feature_declaration.tmpl",
"$mojom_generator_root/generators/cpp_templates/interface_macros.tmpl",
"$mojom_generator_root/generators/cpp_templates/interface_proxy_declaration.tmpl",
"$mojom_generator_root/generators/cpp_templates/interface_request_validator_declaration.tmpl",
"$mojom_generator_root/generators/cpp_templates/interface_response_validator_declaration.tmpl",
"$mojom_generator_root/generators/cpp_templates/interface_stub_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/module-features.h.tmpl",
"$mojom_generator_root/generators/cpp_templates/module-forward.h.tmpl",
"$mojom_generator_root/generators/cpp_templates/module-import-headers.h.tmpl",
"$mojom_generator_root/generators/cpp_templates/module-params-data.h.tmpl",
@@ -26,7 +29,6 @@ python2_action("precompile_templates") {
"$mojom_generator_root/generators/cpp_templates/module-shared-message-ids.h.tmpl",
"$mojom_generator_root/generators/cpp_templates/module-shared.cc.tmpl",
"$mojom_generator_root/generators/cpp_templates/module-shared.h.tmpl",
- "$mojom_generator_root/generators/cpp_templates/module-test-utils.cc.tmpl",
"$mojom_generator_root/generators/cpp_templates/module-test-utils.h.tmpl",
"$mojom_generator_root/generators/cpp_templates/module.cc.tmpl",
"$mojom_generator_root/generators/cpp_templates/module.h.tmpl",
@@ -65,9 +67,6 @@ python2_action("precompile_templates") {
"$mojom_generator_root/generators/java_templates/struct.java.tmpl",
"$mojom_generator_root/generators/java_templates/union.java.tmpl",
"$mojom_generator_root/generators/js_templates/enum_definition.tmpl",
- "$mojom_generator_root/generators/js_templates/externs/interface_definition.tmpl",
- "$mojom_generator_root/generators/js_templates/externs/module.externs.tmpl",
- "$mojom_generator_root/generators/js_templates/externs/struct_definition.tmpl",
"$mojom_generator_root/generators/js_templates/fuzzing.tmpl",
"$mojom_generator_root/generators/js_templates/interface_definition.tmpl",
"$mojom_generator_root/generators/js_templates/lite/enum_definition.tmpl",
@@ -93,8 +92,11 @@ python2_action("precompile_templates") {
"$mojom_generator_root/generators/mojolpm_templates/mojolpm_macros.tmpl",
"$mojom_generator_root/generators/mojolpm_templates/mojolpm_to_proto_macros.tmpl",
"$mojom_generator_root/generators/mojolpm_templates/mojolpm_traits_specialization_macros.tmpl",
+ "$mojom_generator_root/generators/ts_templates/enum_definition.tmpl",
+ "$mojom_generator_root/generators/ts_templates/interface_definition.tmpl",
"$mojom_generator_root/generators/ts_templates/module_definition.tmpl",
- "$mojom_generator_root/generators/ts_templates/mojom.tmpl",
+ "$mojom_generator_root/generators/ts_templates/struct_definition.tmpl",
+ "$mojom_generator_root/generators/ts_templates/union_definition.tmpl",
]
script = mojom_generator_script
@@ -102,8 +104,8 @@ python2_action("precompile_templates") {
outputs = [
"$target_gen_dir/cpp_templates.zip",
"$target_gen_dir/java_templates.zip",
- "$target_gen_dir/mojolpm_templates.zip",
"$target_gen_dir/js_templates.zip",
+ "$target_gen_dir/mojolpm_templates.zip",
"$target_gen_dir/ts_templates.zip",
]
args = [
@@ -113,3 +115,17 @@ python2_action("precompile_templates") {
"precompile",
]
}
+
+group("tests") {
+ data = [
+ mojom_generator_script,
+ "checks/mojom_attributes_check_unittest.py",
+ "checks/mojom_interface_feature_check_unittest.py",
+ "checks/mojom_restrictions_checks_unittest.py",
+ "mojom_bindings_generator_unittest.py",
+ "//tools/diagnosis/crbug_1001171.py",
+ "//third_party/markupsafe/",
+ ]
+ data += mojom_generator_sources
+ data += jinja2_sources
+}
diff --git a/utils/ipc/mojo/public/tools/bindings/README.md b/utils/ipc/mojo/public/tools/bindings/README.md
index 43882450..b27b2d01 100644
--- a/utils/ipc/mojo/public/tools/bindings/README.md
+++ b/utils/ipc/mojo/public/tools/bindings/README.md
@@ -96,7 +96,7 @@ for message parameters.
| `string` | UTF-8 encoded string.
| `array<T>` | Array of any Mojom type *T*; for example, `array<uint8>` or `array<array<string>>`.
| `array<T, N>` | Fixed-length array of any Mojom type *T*. The parameter *N* must be an integral constant.
-| `map<S, T>` | Associated array maping values of type *S* to values of type *T*. *S* may be a `string`, `enum`, or numeric type.
+| `map<S, T>` | Associated array mapping values of type *S* to values of type *T*. *S* may be a `string`, `enum`, or numeric type.
| `handle` | Generic Mojo handle. May be any type of handle, including a wrapped native platform handle.
| `handle<message_pipe>` | Generic message pipe handle.
| `handle<shared_buffer>` | Shared buffer handle.
@@ -188,8 +188,8 @@ struct StringPair {
};
enum AnEnum {
- YES,
- NO
+ kYes,
+ kNo
};
interface SampleInterface {
@@ -209,7 +209,7 @@ struct AllTheThings {
uint64 unsigned_64bit_value;
float float_value_32bit;
double float_value_64bit;
- AnEnum enum_value = AnEnum.YES;
+ AnEnum enum_value = AnEnum.kYes;
// Strings may be nullable.
string? maybe_a_string_maybe_not;
@@ -300,14 +300,14 @@ within a module or nested within the namespace of some struct or interface:
module business.mojom;
enum Department {
- SALES = 0,
- DEV,
+ kSales = 0,
+ kDev,
};
struct Employee {
enum Type {
- FULL_TIME,
- PART_TIME,
+ kFullTime,
+ kPartTime,
};
Type type;
@@ -315,6 +315,9 @@ struct Employee {
};
```
+C++ constant-style enum value names are preferred as specified in the
+[Google C++ Style Guide](https://google.github.io/styleguide/cppguide.html#Enumerator_Names).
+
Similar to C-style enums, individual values may be explicitly assigned within an
enum definition. By default, values are based at zero and increment by
1 sequentially.
@@ -336,8 +339,8 @@ struct Employee {
const uint64 kInvalidId = 0;
enum Type {
- FULL_TIME,
- PART_TIME,
+ kFullTime,
+ kPartTime,
};
uint64 id = kInvalidId;
@@ -348,6 +351,37 @@ struct Employee {
The effect of nested definitions on generated bindings varies depending on the
target language. See [documentation for individual target languages](#Generated-Code-For-Target-Languages).
+### Features
+
+Features can be declared with a `name` and `default_state` and can be attached
+in mojo to interfaces or methods using the `RuntimeFeature` attribute. If the
+feature is disabled at runtime, the method will crash and the interface will
+refuse to be bound / instantiated. Features cannot be serialized to be sent over
+IPC at this time.
+
+```
+module experimental.mojom;
+
+feature kUseElevators {
+ const string name = "UseElevators";
+ const bool default_state = false;
+}
+
+[RuntimeFeature=kUseElevators]
+interface Elevator {
+ // This interface cannot be bound or called if the feature is disabled.
+}
+
+interface Building {
+ // This method cannot be called if the feature is disabled.
+ [RuntimeFeature=kUseElevators]
+ CallElevator(int floor);
+
+ // This method can be called.
+ RingDoorbell(int volume);
+}
+```
+
### Interfaces
An **interface** is a logical bundle of parameterized request messages. Each
@@ -396,20 +430,33 @@ interesting attributes supported today.
extreme caution, because it can lead to deadlocks otherwise.
* **`[Default]`**:
- The `Default` attribute may be used to specify an enumerator value that
- will be used if an `Extensible` enumeration does not deserialize to a known
- value on the receiver side, i.e. the sender is using a newer version of the
- enum. This allows unknown values to be mapped to a well-defined value that can
- be appropriately handled.
+ The `Default` attribute may be used to specify an enumerator value or union
+ field that will be used if an `Extensible` enumeration or union does not
+ deserialize to a known value on the receiver side, i.e. the sender is using a
+ newer version of the enum or union. This allows unknown values to be mapped to
+ a well-defined value that can be appropriately handled.
+
+ Note: The `Default` field for a union must be of nullable or integral type.
+ When a union is defaulted to this field, the field takes on the default value
+ for its type: null for nullable types, and zero/false for integral types.
* **`[Extensible]`**:
- The `Extensible` attribute may be specified for any enum definition. This
- essentially disables builtin range validation when receiving values of the
- enum type in a message, allowing older bindings to tolerate unrecognized
- values from newer versions of the enum.
+ The `Extensible` attribute may be specified for any enum or union definition.
+ For enums, this essentially disables builtin range validation when receiving
+ values of the enum type in a message, allowing older bindings to tolerate
+ unrecognized values from newer versions of the enum.
- Note: in the future, an `Extensible` enumeration will require that a `Default`
- enumerator value also be specified.
+ If an enum value within an extensible enum definition is affixed with the
+ `Default` attribute, out-of-range values for the enum will deserialize to that
+ default value. Only one enum value may be designated as the `Default`.
+
+ Similarly, a union marked `Extensible` will deserialize to its `Default` field
+ when an unrecognized field is received. Extensible unions MUST specify exactly
+ one `Default` field, and the field must be of nullable or integral type. When
+ defaulted to this field, the value is always null/zero/false as appropriate.
+
+ An `Extensible` enumeration REQUIRES that a `Default` value be specified,
+ so all new extensible enums should specify one.
* **`[Native]`**:
The `Native` attribute may be specified for an empty struct declaration to
@@ -422,7 +469,10 @@ interesting attributes supported today.
* **`[MinVersion=N]`**:
The `MinVersion` attribute is used to specify the version at which a given
field, enum value, interface method, or method parameter was introduced.
- See [Versioning](#Versioning) for more details.
+ See [Versioning](#Versioning) for more details. `MinVersion` does not apply
+ to interfaces, structs or enums, but to the fields of those types.
+ `MinVersion` is not a module-global value, but it is ok to pretend it is by
+ skipping versions when adding fields or parameters.
* **`[Stable]`**:
The `Stable` attribute specifies that a given mojom type or interface
@@ -442,13 +492,73 @@ interesting attributes supported today.
string representation as specified by RFC 4122. New UUIDs can be generated
with common tools such as `uuidgen`.
+* **`[RuntimeFeature=feature]`**
+ The `RuntimeFeature` attribute should reference a mojo `feature`. If this
+ feature is enabled (e.g. using `--enable-features={feature.name}`) then the
+ interface behaves entirely as expected. If the feature is not enabled the
+ interface cannot be bound to a concrete receiver or remote - attempting to do
+ so will result in the receiver or remote being reset() to an unbound state.
+ Note that this is a different concept to the build-time `EnableIf` directive.
+ `RuntimeFeature` is currently only supported for C++ bindings and has no
+ effect for, say, Java or TypeScript bindings (see https://crbug.com/1278253).
+
* **`[EnableIf=value]`**:
The `EnableIf` attribute is used to conditionally enable definitions when the
mojom is parsed. If the `mojom` target in the GN file does not include the
matching `value` in the list of `enabled_features`, the definition will be
disabled. This is useful for mojom definitions that only make sense on one
platform. Note that the `EnableIf` attribute can only be set once per
- definition.
+ definition and cannot be set at the same time as `EnableIfNot`. Also be aware
+ that only one condition can be tested, `EnableIf=value,xyz` introduces a new
+ `xyz` attribute. `xyz` is not part of the `EnableIf` condition that depends
+ only on the feature `value`. Complex conditions can be introduced via
+ enabled_features in `build.gn` files.
+
+* **`[EnableIfNot=value]`**:
+ The `EnableIfNot` attribute is used to conditionally enable definitions when
+ the mojom is parsed. If the `mojom` target in the GN file includes the
+ matching `value` in the list of `enabled_features`, the definition will be
+ disabled. This is useful for mojom definitions that only make sense on all but
+ one platform. Note that the `EnableIfNot` attribute can only be set once per
+ definition and cannot be set at the same time as `EnableIf`.
+
+* **`[ServiceSandbox=value]`**:
+ The `ServiceSandbox` attribute is used in Chromium to tag which sandbox a
+ service hosting an implementation of interface will be launched in. This only
+ applies to `C++` bindings. `value` should match a constant defined in an
+ imported `sandbox.mojom.Sandbox` enum (for Chromium this is
+ `//sandbox/policy/mojom/sandbox.mojom`), such as `kService`.
+
+* **`[RequireContext=enum]`**:
+ The `RequireContext` attribute is used in Chromium to tag interfaces that
+ should be passed (as remotes or receivers) only to privileged process
+ contexts. The process context must be an enum that is imported into the
+ mojom that defines the tagged interface. `RequireContext` may be used in
+ future to DCHECK or CHECK if remotes are made available in contexts that
+ conflict with the one provided in the interface definition. Process contexts
+ are not the same as the sandbox a process is running in, but will reflect
+ the set of capabilities provided to the service.
+
+* **`[AllowedContext=enum]`**:
+ The `AllowedContext` attribute is used in Chromium to tag methods that pass
+ remotes or receivers of interfaces that are marked with a `RequireContext`
+ attribute. The enum provided on the method must be equal or better (lower
+ numerically) than the one required on the interface being passed. At present
+ failing to specify an adequate `AllowedContext` value will cause mojom
+ generation to fail at compile time. In future DCHECKs or CHECKs might be
+ added to enforce that method is only called from a process context that meets
+ the given `AllowedContext` value. The enum must of the same type as that
+ specified in the interface's `RequireContext` attribute. Adding an
+ `AllowedContext` attribute to a method is a strong indication that you need
+ a detailed security review of your design - please reach out to the security
+ team.
+
+* **`[SupportsUrgent]`**:
+ The `SupportsUrgent` attribute is used in conjunction with
+ `mojo::UrgentMessageScope` in Chromium to tag messages as having high
+ priority. The IPC layer notifies the underlying scheduler upon both receiving
+ and processing an urgent message. At present, this attribute only affects
+ channel associated messages in the renderer process.
## Generated Code For Target Languages
@@ -495,9 +605,9 @@ values. For example if a Mojom declares the enum:
``` cpp
enum AdvancedBoolean {
- TRUE = 0,
- FALSE = 1,
- FILE_NOT_FOUND = 2,
+ kTrue = 0,
+ kFalse = 1,
+ kFileNotFound = 2,
};
```
@@ -550,10 +660,16 @@ See the documentation for
*** note
**NOTE:** You don't need to worry about versioning if you don't care about
-backwards compatibility. Specifically, all parts of Chrome are updated
-atomically today and there is not yet any possibility of any two Chrome
-processes communicating with two different versions of any given Mojom
-interface.
+backwards compatibility. Today, all parts of the Chrome browser are
+updated atomically and there is not yet any possibility of any two
+Chrome processes communicating with two different versions of any given Mojom
+interface. On Chrome OS, there are several places where versioning is required.
+For example,
+[ARC++](https://developer.android.com/chrome-os/intro)
+uses versioned mojo to send IPC to the Android container.
+Likewise, the
+[Lacros](/docs/lacros.md)
+browser uses versioned mojo to talk to the ash system UI.
***
Services extend their interfaces to support new features over time, and clients
@@ -593,8 +709,8 @@ struct Employee {
*** note
**NOTE:** Mojo object or handle types added with a `MinVersion` **MUST** be
-optional (nullable). See [Primitive Types](#Primitive-Types) for details on
-nullable values.
+optional (nullable) or primitive. See [Primitive Types](#Primitive-Types) for
+details on nullable values.
***
By default, fields belong to version 0. New fields must be appended to the
@@ -624,10 +740,10 @@ the following hard constraints:
* For any given struct or interface, if any field or method explicitly specifies
an ordinal value, all fields or methods must explicitly specify an ordinal
value.
-* For an *N*-field struct or *N*-method interface, the set of explicitly
- assigned ordinal values must be limited to the range *[0, N-1]*. Interfaces
- should include placeholder methods to fill the ordinal positions of removed
- methods (for example "Unused_Message_7@7()" or "RemovedMessage@42()", etc).
+* For an *N*-field struct, the set of explicitly assigned ordinal values must be
+ limited to the range *[0, N-1]*. Structs should include placeholder fields
+ to fill the ordinal positions of removed fields (for example "Unused_Field"
+ or "RemovedField", etc).
You may reorder fields, but you must ensure that the ordinal values of existing
fields remain unchanged. For example, the following struct remains
@@ -652,6 +768,24 @@ There are two dimensions on which an interface can be extended
that the version number is scoped to the whole interface rather than to any
individual parameter list.
+``` cpp
+// Old version:
+interface HumanResourceDatabase {
+ QueryEmployee(uint64 id) => (Employee? employee);
+};
+
+// New version:
+interface HumanResourceDatabase {
+ QueryEmployee(uint64 id, [MinVersion=1] bool retrieve_finger_print)
+ => (Employee? employee,
+ [MinVersion=1] array<uint8>? finger_print);
+};
+```
+
+Similar to [versioned structs](#Versioned-Structs), when you pass the parameter
+list of a request or response method to a destination using an older version of
+an interface, unrecognized fields are silently discarded.
+
Please note that adding a response to a message which did not previously
expect a response is a not a backwards-compatible change.
@@ -664,17 +798,12 @@ For example:
``` cpp
// Old version:
interface HumanResourceDatabase {
- AddEmployee(Employee employee) => (bool success);
QueryEmployee(uint64 id) => (Employee? employee);
};
// New version:
interface HumanResourceDatabase {
- AddEmployee(Employee employee) => (bool success);
-
- QueryEmployee(uint64 id, [MinVersion=1] bool retrieve_finger_print)
- => (Employee? employee,
- [MinVersion=1] array<uint8>? finger_print);
+ QueryEmployee(uint64 id) => (Employee? employee);
[MinVersion=1]
AttachFingerPrint(uint64 id, array<uint8> finger_print)
@@ -682,10 +811,7 @@ interface HumanResourceDatabase {
};
```
-Similar to [versioned structs](#Versioned-Structs), when you pass the parameter
-list of a request or response method to a destination using an older version of
-an interface, unrecognized fields are silently discarded. However, if the method
-call itself is not recognized, it is considered a validation error and the
+If a method call is not recognized, it is considered a validation error and the
receiver will close its end of the interface pipe. For example, if a client on
version 1 of the above interface sends an `AttachFingerPrint` request to an
implementation of version 0, the client will be disconnected.
@@ -712,8 +838,8 @@ If you want an enum to be extensible in the future, you can apply the
``` cpp
[Extensible]
enum Department {
- SALES,
- DEV,
+ kSales,
+ kDev,
};
```
@@ -722,9 +848,9 @@ And later you can extend this enum without breaking backwards compatibility:
``` cpp
[Extensible]
enum Department {
- SALES,
- DEV,
- [MinVersion=1] RESEARCH,
+ kSales,
+ kDev,
+ [MinVersion=1] kResearch,
};
```
@@ -782,7 +908,7 @@ Statement = ModuleStatement | ImportStatement | Definition
ModuleStatement = AttributeSection "module" Identifier ";"
ImportStatement = "import" StringLiteral ";"
-Definition = Struct Union Interface Enum Const
+Definition = Struct Union Interface Enum Feature Const
AttributeSection = <empty> | "[" AttributeList "]"
AttributeList = <empty> | NonEmptyAttributeList
@@ -809,7 +935,7 @@ InterfaceBody = <empty>
| InterfaceBody Const
| InterfaceBody Enum
| InterfaceBody Method
-Method = AttributeSection Name Ordinal "(" ParamterList ")" Response ";"
+Method = AttributeSection Name Ordinal "(" ParameterList ")" Response ";"
ParameterList = <empty> | NonEmptyParameterList
NonEmptyParameterList = Parameter
| Parameter "," NonEmptyParameterList
@@ -847,6 +973,13 @@ EnumValue = AttributeSection Name
| AttributeSection Name "=" Integer
| AttributeSection Name "=" Identifier
+; Note: `feature` is a weak keyword and can appear as, say, a struct field name.
+Feature = AttributeSection "feature" Name "{" FeatureBody "}" ";"
+ | AttributeSection "feature" Name ";"
+FeatureBody = <empty>
+ | FeatureBody FeatureField
+FeatureField = AttributeSection TypeSpec Name Default ";"
+
Const = "const" TypeSpec Name "=" Constant ";"
Constant = Literal | Identifier ";"
diff --git a/utils/ipc/mojo/public/tools/bindings/checks/__init__.py b/utils/ipc/mojo/public/tools/bindings/checks/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/utils/ipc/mojo/public/tools/bindings/checks/__init__.py
diff --git a/utils/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check.py b/utils/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check.py
new file mode 100644
index 00000000..e6e4f2c9
--- /dev/null
+++ b/utils/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check.py
@@ -0,0 +1,170 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Validate mojo attributes are allowed in Chrome before generation."""
+
+import mojom.generate.check as check
+import mojom.generate.module as module
+
+_COMMON_ATTRIBUTES = {
+ 'EnableIf',
+ 'EnableIfNot',
+}
+
+# For struct, union & parameter lists.
+_COMMON_FIELD_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'MinVersion',
+ 'RenamedFrom',
+}
+
+# Note: `Default`` goes on the default _value_, not on the enum.
+# Note: [Stable] without [Extensible] is not allowed.
+_ENUM_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'Extensible',
+ 'Native',
+ 'Stable',
+ 'RenamedFrom',
+ 'Uuid',
+}
+
+# TODO(crbug.com/1234883) MinVersion is not needed for EnumVal.
+_ENUMVAL_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'Default',
+ 'MinVersion',
+}
+
+_INTERFACE_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'RenamedFrom',
+ 'RequireContext',
+ 'RuntimeFeature',
+ 'ServiceSandbox',
+ 'Stable',
+ 'Uuid',
+}
+
+_METHOD_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'AllowedContext',
+ 'MinVersion',
+ 'NoInterrupt',
+ 'RuntimeFeature',
+ 'SupportsUrgent',
+ 'Sync',
+ 'UnlimitedSize',
+}
+
+_MODULE_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'JavaConstantsClassName',
+ 'JavaPackage',
+}
+
+_PARAMETER_ATTRIBUTES = _COMMON_FIELD_ATTRIBUTES
+
+_STRUCT_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'CustomSerializer',
+ 'JavaClassName',
+ 'Native',
+ 'Stable',
+ 'RenamedFrom',
+ 'Uuid',
+}
+
+_STRUCT_FIELD_ATTRIBUTES = _COMMON_FIELD_ATTRIBUTES
+
+_UNION_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'Extensible',
+ 'Stable',
+ 'RenamedFrom',
+ 'Uuid',
+}
+
+_UNION_FIELD_ATTRIBUTES = _COMMON_FIELD_ATTRIBUTES | {
+ 'Default',
+}
+
+# TODO(https://crbug.com/1193875) empty this set and remove the allowlist.
+_STABLE_ONLY_ALLOWLISTED_ENUMS = {
+ 'crosapi.mojom.OptionalBool',
+ 'crosapi.mojom.TriState',
+}
+
+
+class Check(check.Check):
+ def __init__(self, *args, **kwargs):
+ super(Check, self).__init__(*args, **kwargs)
+
+ def _Respell(self, allowed, attribute):
+ for a in allowed:
+ if a.lower() == attribute.lower():
+ return f" - Did you mean: {a}?"
+ return ""
+
+ def _CheckAttributes(self, context, allowed, attributes):
+ if not attributes:
+ return
+ for attribute in attributes:
+ if not attribute in allowed:
+ # Is there a close misspelling?
+ hint = self._Respell(allowed, attribute)
+ raise check.CheckException(
+ self.module,
+ f"attribute {attribute} not allowed on {context}{hint}")
+
+ def _CheckEnumAttributes(self, enum):
+ if enum.attributes:
+ self._CheckAttributes("enum", _ENUM_ATTRIBUTES, enum.attributes)
+ if 'Stable' in enum.attributes and not 'Extensible' in enum.attributes:
+ full_name = f"{self.module.mojom_namespace}.{enum.mojom_name}"
+ if full_name not in _STABLE_ONLY_ALLOWLISTED_ENUMS:
+ raise check.CheckException(
+ self.module,
+ f"[Extensible] required on [Stable] enum {full_name}")
+ for enumval in enum.fields:
+ self._CheckAttributes("enum value", _ENUMVAL_ATTRIBUTES,
+ enumval.attributes)
+
+ def _CheckInterfaceAttributes(self, interface):
+ self._CheckAttributes("interface", _INTERFACE_ATTRIBUTES,
+ interface.attributes)
+ for method in interface.methods:
+ self._CheckAttributes("method", _METHOD_ATTRIBUTES, method.attributes)
+ for param in method.parameters:
+ self._CheckAttributes("parameter", _PARAMETER_ATTRIBUTES,
+ param.attributes)
+ if method.response_parameters:
+ for param in method.response_parameters:
+ self._CheckAttributes("parameter", _PARAMETER_ATTRIBUTES,
+ param.attributes)
+ for enum in interface.enums:
+ self._CheckEnumAttributes(enum)
+
+ def _CheckModuleAttributes(self):
+ self._CheckAttributes("module", _MODULE_ATTRIBUTES, self.module.attributes)
+
+ def _CheckStructAttributes(self, struct):
+ self._CheckAttributes("struct", _STRUCT_ATTRIBUTES, struct.attributes)
+ for field in struct.fields:
+ self._CheckAttributes("struct field", _STRUCT_FIELD_ATTRIBUTES,
+ field.attributes)
+ for enum in struct.enums:
+ self._CheckEnumAttributes(enum)
+
+ def _CheckUnionAttributes(self, union):
+ self._CheckAttributes("union", _UNION_ATTRIBUTES, union.attributes)
+ for field in union.fields:
+ self._CheckAttributes("union field", _UNION_FIELD_ATTRIBUTES,
+ field.attributes)
+
+ def CheckModule(self):
+ """Note that duplicate attributes are forbidden at the parse phase.
+ We also do not need to look at the types of any parameters, as they will be
+ checked where they are defined. Consts do not have attributes so can be
+ skipped."""
+ self._CheckModuleAttributes()
+ for interface in self.module.interfaces:
+ self._CheckInterfaceAttributes(interface)
+ for enum in self.module.enums:
+ self._CheckEnumAttributes(enum)
+ for struct in self.module.structs:
+ self._CheckStructAttributes(struct)
+ for union in self.module.unions:
+ self._CheckUnionAttributes(union)
diff --git a/utils/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check_unittest.py b/utils/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check_unittest.py
new file mode 100644
index 00000000..f1a50a4a
--- /dev/null
+++ b/utils/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check_unittest.py
@@ -0,0 +1,194 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import mojom.generate.check as check
+from mojom_bindings_generator import LoadChecks, _Generate
+from mojom_parser_test_case import MojomParserTestCase
+
+
+class FakeArgs:
+ """Fakes args to _Generate - intention is to do just enough to run checks"""
+
+ def __init__(self, tester, files=None):
+ """ `tester` is MojomParserTestCase for paths.
+ `files` will have tester path added."""
+ self.checks_string = 'attributes'
+ self.depth = tester.GetPath('')
+ self.filelist = None
+ self.filename = [tester.GetPath(x) for x in files]
+ self.gen_directories = tester.GetPath('gen')
+ self.generators_string = ''
+ self.import_directories = []
+ self.output_dir = tester.GetPath('out')
+ self.scrambled_message_id_salt_paths = None
+ self.typemaps = []
+ self.variant = 'none'
+
+
+class MojoBindingsCheckTest(MojomParserTestCase):
+ def _ParseAndGenerate(self, mojoms):
+ self.ParseMojoms(mojoms)
+ args = FakeArgs(self, files=mojoms)
+ _Generate(args, {})
+
+ def _testValid(self, filename, content):
+ self.WriteFile(filename, content)
+ self._ParseAndGenerate([filename])
+
+ def _testThrows(self, filename, content, regexp):
+ mojoms = []
+ self.WriteFile(filename, content)
+ mojoms.append(filename)
+ with self.assertRaisesRegexp(check.CheckException, regexp):
+ self._ParseAndGenerate(mojoms)
+
+ def testLoads(self):
+ """Validate that the check is registered under the expected name."""
+ check_modules = LoadChecks('attributes')
+ self.assertTrue(check_modules['attributes'])
+
+ def testNoAnnotations(self):
+ # Undecorated mojom should be fine.
+ self._testValid(
+ "a.mojom", """
+ module a;
+ struct Bar { int32 a; };
+ enum Hello { kValue };
+ union Thingy { Bar b; Hello hi; };
+ interface Foo {
+ Foo(int32 a, Hello hi, Thingy t) => (Bar b);
+ };
+ """)
+
+ def testValidAnnotations(self):
+ # Obviously this is meaningless and won't generate, but it should pass
+ # the attribute check's validation.
+ self._testValid(
+ "a.mojom", """
+ [JavaConstantsClassName="FakeClass",JavaPackage="org.chromium.Fake"]
+ module a;
+ [Stable, Extensible]
+ enum Hello { [Default] kValue, kValue2, [MinVersion=2] kValue3 };
+ [Native]
+ enum NativeEnum {};
+ [Stable,Extensible]
+ union Thingy { Bar b; [Default]int32 c; Hello hi; };
+
+ [Stable,RenamedFrom="module.other.Foo",
+ Uuid="4C178401-4B07-4C2E-9255-5401A943D0C7"]
+ struct Structure { Hello hi; };
+
+ [ServiceSandbox=Hello.kValue,RequireContext=Hello.kValue,Stable,
+ Uuid="2F17D7DD-865A-4B1C-9394-9C94E035E82F"]
+ interface Foo {
+ [AllowedContext=Hello.kValue]
+ Foo@0(int32 a) => (int32 b);
+ [MinVersion=2,Sync,UnlimitedSize,NoInterrupt]
+ Bar@1(int32 b, [MinVersion=2]Structure? s) => (bool c);
+ };
+
+ [RuntimeFeature=test.mojom.FeatureName]
+ interface FooFeatureControlled {};
+
+ interface FooMethodFeatureControlled {
+ [RuntimeFeature=test.mojom.FeatureName]
+ MethodWithFeature() => (bool c);
+ };
+ """)
+
+ def testWrongModuleStable(self):
+ contents = """
+ // err: module cannot be Stable
+ [Stable]
+ module a;
+ enum Hello { kValue, kValue2, kValue3 };
+ enum NativeEnum {};
+ struct Structure { Hello hi; };
+
+ interface Foo {
+ Foo(int32 a) => (int32 b);
+ Bar(int32 b, Structure? s) => (bool c);
+ };
+ """
+ self._testThrows('b.mojom', contents,
+ 'attribute Stable not allowed on module')
+
+ def testWrongEnumDefault(self):
+ contents = """
+ module a;
+ // err: default should go on EnumValue not Enum.
+ [Default=kValue]
+ enum Hello { kValue, kValue2, kValue3 };
+ enum NativeEnum {};
+ struct Structure { Hello hi; };
+
+ interface Foo {
+ Foo(int32 a) => (int32 b);
+ Bar(int32 b, Structure? s) => (bool c);
+ };
+ """
+ self._testThrows('b.mojom', contents,
+ 'attribute Default not allowed on enum')
+
+ def testWrongStructMinVersion(self):
+ contents = """
+ module a;
+ enum Hello { kValue, kValue2, kValue3 };
+ enum NativeEnum {};
+ // err: struct cannot have MinVersion.
+ [MinVersion=2]
+ struct Structure { Hello hi; };
+
+ interface Foo {
+ Foo(int32 a) => (int32 b);
+ Bar(int32 b, Structure? s) => (bool c);
+ };
+ """
+ self._testThrows('b.mojom', contents,
+ 'attribute MinVersion not allowed on struct')
+
+ def testWrongMethodRequireContext(self):
+ contents = """
+ module a;
+ enum Hello { kValue, kValue2, kValue3 };
+ enum NativeEnum {};
+ struct Structure { Hello hi; };
+
+ interface Foo {
+ // err: RequireContext is for interfaces.
+ [RequireContext=Hello.kValue]
+ Foo(int32 a) => (int32 b);
+ Bar(int32 b, Structure? s) => (bool c);
+ };
+ """
+ self._testThrows('b.mojom', contents,
+ 'RequireContext not allowed on method')
+
+ def testWrongMethodRequireContext(self):
+ # crbug.com/1230122
+ contents = """
+ module a;
+ interface Foo {
+ // err: sync not Sync.
+ [sync]
+ Foo(int32 a) => (int32 b);
+ };
+ """
+ self._testThrows('b.mojom', contents,
+ 'attribute sync not allowed.*Did you mean: Sync')
+
+ def testStableExtensibleEnum(self):
+ # crbug.com/1193875
+ contents = """
+ module a;
+ [Stable]
+ enum Foo {
+ kDefaultVal,
+ kOtherVal = 2,
+ };
+ """
+ self._testThrows('a.mojom', contents,
+ 'Extensible.*?required.*?Stable.*?enum')
diff --git a/utils/ipc/mojo/public/tools/bindings/checks/mojom_definitions_check.py b/utils/ipc/mojo/public/tools/bindings/checks/mojom_definitions_check.py
new file mode 100644
index 00000000..702d41c3
--- /dev/null
+++ b/utils/ipc/mojo/public/tools/bindings/checks/mojom_definitions_check.py
@@ -0,0 +1,34 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Ensure no duplicate type definitions before generation."""
+
+import mojom.generate.check as check
+import mojom.generate.module as module
+
+
+class Check(check.Check):
+ def __init__(self, *args, **kwargs):
+ super(Check, self).__init__(*args, **kwargs)
+
+ def CheckModule(self):
+ kinds = dict()
+ for module in self.module.imports:
+ for kind in module.enums + module.structs + module.unions:
+ kind_name = f'{kind.module.mojom_namespace}.{kind.mojom_name}'
+ if kind_name in kinds:
+ previous_module = kinds[kind_name]
+ if previous_module.path != module.path:
+ raise check.CheckException(
+ self.module, f"multiple-definition for type {kind_name}" +
+ f"(defined in both {previous_module} and {module})")
+ kinds[kind_name] = kind.module
+
+ for kind in self.module.enums + self.module.structs + self.module.unions:
+ kind_name = f'{kind.module.mojom_namespace}.{kind.mojom_name}'
+ if kind_name in kinds:
+ previous_module = kinds[kind_name]
+ raise check.CheckException(
+ self.module, f"multiple-definition for type {kind_name}" +
+ f"(previous definition in {previous_module})")
+ return True
diff --git a/utils/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check.py b/utils/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check.py
new file mode 100644
index 00000000..07f51a64
--- /dev/null
+++ b/utils/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check.py
@@ -0,0 +1,62 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Validate mojo runtime feature guarded interfaces are nullable."""
+
+import mojom.generate.check as check
+import mojom.generate.module as module
+
+
+class Check(check.Check):
+ def __init__(self, *args, **kwargs):
+ super(Check, self).__init__(*args, **kwargs)
+
+ # `param` is an Interface of some sort.
+ def _CheckNonNullableFeatureGuardedInterface(self, kind):
+ # Only need to validate interface if it has a RuntimeFeature
+ if not kind.kind.runtime_feature:
+ return
+ # Nullable (optional) is ok as the interface expects they might not be sent.
+ if kind.is_nullable:
+ return
+ interface = kind.kind.mojom_name
+ raise check.CheckException(
+ self.module,
+ f"interface {interface} has a RuntimeFeature but is not nullable")
+
+ # `param` can be a lot of things so check if it is a remote/receiver.
+ # Array/Map must be recursed into.
+ def _CheckFieldOrParam(self, kind):
+ if module.IsAnyInterfaceKind(kind):
+ self._CheckNonNullableFeatureGuardedInterface(kind)
+ if module.IsArrayKind(kind):
+ self._CheckFieldOrParam(kind.kind)
+ if module.IsMapKind(kind):
+ self._CheckFieldOrParam(kind.key_kind)
+ self._CheckFieldOrParam(kind.value_kind)
+
+ def _CheckInterfaceFeatures(self, interface):
+ for method in interface.methods:
+ for param in method.parameters:
+ self._CheckFieldOrParam(param.kind)
+ if method.response_parameters:
+ for param in method.response_parameters:
+ self._CheckFieldOrParam(param.kind)
+
+ def _CheckStructFeatures(self, struct):
+ for field in struct.fields:
+ self._CheckFieldOrParam(field.kind)
+
+ def _CheckUnionFeatures(self, union):
+ for field in union.fields:
+ self._CheckFieldOrParam(field.kind)
+
+ def CheckModule(self):
+ """Validate that any runtime feature guarded interfaces that might be passed
+ over mojo are nullable."""
+ for interface in self.module.interfaces:
+ self._CheckInterfaceFeatures(interface)
+ for struct in self.module.structs:
+ self._CheckStructFeatures(struct)
+ for union in self.module.unions:
+ self._CheckUnionFeatures(union)
diff --git a/utils/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check_unittest.py b/utils/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check_unittest.py
new file mode 100644
index 00000000..e96152fd
--- /dev/null
+++ b/utils/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check_unittest.py
@@ -0,0 +1,173 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import mojom.generate.check as check
+from mojom_bindings_generator import LoadChecks, _Generate
+from mojom_parser_test_case import MojomParserTestCase
+
+
+class FakeArgs:
+ """Fakes args to _Generate - intention is to do just enough to run checks"""
+ def __init__(self, tester, files=None):
+ """ `tester` is MojomParserTestCase for paths.
+ `files` will have tester path added."""
+ self.checks_string = 'features'
+ self.depth = tester.GetPath('')
+ self.filelist = None
+ self.filename = [tester.GetPath(x) for x in files]
+ self.gen_directories = tester.GetPath('gen')
+ self.generators_string = ''
+ self.import_directories = []
+ self.output_dir = tester.GetPath('out')
+ self.scrambled_message_id_salt_paths = None
+ self.typemaps = []
+ self.variant = 'none'
+
+
+class MojoBindingsCheckTest(MojomParserTestCase):
+ def _ParseAndGenerate(self, mojoms):
+ self.ParseMojoms(mojoms)
+ args = FakeArgs(self, files=mojoms)
+ _Generate(args, {})
+
+ def assertValid(self, filename, content):
+ self.WriteFile(filename, content)
+ self._ParseAndGenerate([filename])
+
+ def assertThrows(self, filename, content, regexp):
+ mojoms = []
+ self.WriteFile(filename, content)
+ mojoms.append(filename)
+ with self.assertRaisesRegexp(check.CheckException, regexp):
+ self._ParseAndGenerate(mojoms)
+
+ def testLoads(self):
+ """Validate that the check is registered under the expected name."""
+ check_modules = LoadChecks('features')
+ self.assertTrue(check_modules['features'])
+
+ def testNullableOk(self):
+ self.assertValid(
+ "a.mojom", """
+ module a;
+ // Scaffolding.
+ feature kFeature {
+ const string name = "Hello";
+ const bool enabled_state = false;
+ };
+ [RuntimeFeature=kFeature]
+ interface Guarded {
+ };
+
+ // Unguarded interfaces should be ok everywhere.
+ interface NotGuarded { };
+
+ // Optional (nullable) interfaces should be ok everywhere:
+ struct Bar {
+ pending_remote<Guarded>? remote;
+ pending_receiver<Guarded>? receiver;
+ };
+ union Thingy {
+ pending_remote<Guarded>? remote;
+ pending_receiver<Guarded>? receiver;
+ };
+ interface Foo {
+ Foo(
+ pending_remote<Guarded>? remote,
+ pending_receiver<Guarded>? receiver,
+ pending_associated_remote<Guarded>? a_remote,
+ pending_associated_receiver<Guarded>? a_receiver,
+ // Unguarded interfaces do not have to be nullable.
+ pending_remote<NotGuarded> remote,
+ pending_receiver<NotGuarded> receiver,
+ pending_associated_remote<NotGuarded> a_remote,
+ pending_associated_receiver<NotGuarded> a_receiver
+ ) => (
+ pending_remote<Guarded>? remote,
+ pending_receiver<Guarded>? receiver
+ );
+ Bar(array<pending_remote<Guarded>?> remote)
+ => (map<string, pending_receiver<Guarded>?> a);
+ };
+ """)
+
+ def testMethodParamsMustBeNullable(self):
+ prelude = """
+ module a;
+ // Scaffolding.
+ feature kFeature {
+ const string name = "Hello";
+ const bool enabled_state = false;
+ };
+ [RuntimeFeature=kFeature]
+ interface Guarded { };
+ """
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ interface Trial {
+ Method(pending_remote<Guarded> a) => ();
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ interface Trial {
+ Method(bool foo) => (pending_receiver<Guarded> a);
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ interface Trial {
+ Method(pending_receiver<Guarded> a) => ();
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ interface Trial {
+ Method(pending_associated_remote<Guarded> a) => ();
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ interface Trial {
+ Method(pending_associated_receiver<Guarded> a) => ();
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ interface Trial {
+ Method(array<pending_associated_receiver<Guarded>> a) => ();
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ interface Trial {
+ Method(map<string, pending_associated_receiver<Guarded>> a) => ();
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+
+ def testStructUnionMembersMustBeNullable(self):
+ prelude = """
+ module a;
+ // Scaffolding.
+ feature kFeature {
+ const string name = "Hello";
+ const bool enabled_state = false;
+ };
+ [RuntimeFeature=kFeature]
+ interface Guarded { };
+ """
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ struct Trial {
+ pending_remote<Guarded> a;
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ union Trial {
+ pending_remote<Guarded> a;
+ };
+ """, 'interface Guarded has a RuntimeFeature')
diff --git a/utils/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_check.py b/utils/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_check.py
new file mode 100644
index 00000000..d570e26c
--- /dev/null
+++ b/utils/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_check.py
@@ -0,0 +1,102 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Validate RequireContext and AllowedContext annotations before generation."""
+
+import mojom.generate.check as check
+import mojom.generate.module as module
+
+
+class Check(check.Check):
+ def __init__(self, *args, **kwargs):
+ self.kind_to_interfaces = dict()
+ super(Check, self).__init__(*args, **kwargs)
+
+ def _IsPassedInterface(self, candidate):
+ if isinstance(
+ candidate.kind,
+ (module.PendingReceiver, module.PendingRemote,
+ module.PendingAssociatedReceiver, module.PendingAssociatedRemote)):
+ return True
+ return False
+
+ def _CheckInterface(self, method, param):
+ # |param| is a pending_x<Interface> so need .kind.kind to get Interface.
+ interface = param.kind.kind
+ if interface.require_context:
+ if method.allowed_context is None:
+ raise check.CheckException(
+ self.module, "method `{}` has parameter `{}` which passes interface"
+ " `{}` that requires an AllowedContext annotation but none exists.".
+ format(
+ method.mojom_name,
+ param.mojom_name,
+ interface.mojom_name,
+ ))
+ # If a string was provided, or if an enum was not imported, this will
+ # be a string and we cannot validate that it is in range.
+ if not isinstance(method.allowed_context, module.EnumValue):
+ raise check.CheckException(
+ self.module,
+ "method `{}` has AllowedContext={} which is not a valid enum value."
+ .format(method.mojom_name, method.allowed_context))
+ # EnumValue must be from the same enum to be compared.
+ if interface.require_context.enum != method.allowed_context.enum:
+ raise check.CheckException(
+ self.module, "method `{}` has parameter `{}` which passes interface"
+ " `{}` that requires AllowedContext={} but one of kind `{}` was "
+ "provided.".format(
+ method.mojom_name,
+ param.mojom_name,
+ interface.mojom_name,
+ interface.require_context.enum,
+ method.allowed_context.enum,
+ ))
+ # RestrictContext enums have most privileged field first (lowest value).
+ interface_value = interface.require_context.field.numeric_value
+ method_value = method.allowed_context.field.numeric_value
+ if interface_value < method_value:
+ raise check.CheckException(
+ self.module, "RequireContext={} > AllowedContext={} for method "
+ "`{}` which passes interface `{}`.".format(
+ interface.require_context.GetSpec(),
+ method.allowed_context.GetSpec(), method.mojom_name,
+ interface.mojom_name))
+ return True
+
+ def _GatherReferencedInterfaces(self, field):
+ key = field.kind.spec
+ # structs/unions can nest themselves so we need to bookkeep.
+ if not key in self.kind_to_interfaces:
+ # Might reference ourselves so have to create the list first.
+ self.kind_to_interfaces[key] = set()
+ for param in field.kind.fields:
+ if self._IsPassedInterface(param):
+ self.kind_to_interfaces[key].add(param)
+ elif isinstance(param.kind, (module.Struct, module.Union)):
+ for iface in self._GatherReferencedInterfaces(param):
+ self.kind_to_interfaces[key].add(iface)
+ return self.kind_to_interfaces[key]
+
+ def _CheckParams(self, method, params):
+ # Note: we have to repeat _CheckParams for each method as each might have
+ # different AllowedContext= attributes. We cannot memoize this function,
+ # but can do so for gathering referenced interfaces as their RequireContext
+ # attributes do not change.
+ for param in params:
+ if self._IsPassedInterface(param):
+ self._CheckInterface(method, param)
+ elif isinstance(param.kind, (module.Struct, module.Union)):
+ for interface in self._GatherReferencedInterfaces(param):
+ self._CheckInterface(method, interface)
+
+ def _CheckMethod(self, method):
+ if method.parameters:
+ self._CheckParams(method, method.parameters)
+ if method.response_parameters:
+ self._CheckParams(method, method.response_parameters)
+
+ def CheckModule(self):
+ for interface in self.module.interfaces:
+ for method in interface.methods:
+ self._CheckMethod(method)
diff --git a/utils/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_checks_unittest.py b/utils/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_checks_unittest.py
new file mode 100644
index 00000000..a6cd71e2
--- /dev/null
+++ b/utils/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_checks_unittest.py
@@ -0,0 +1,254 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import mojom.generate.check as check
+from mojom_bindings_generator import LoadChecks, _Generate
+from mojom_parser_test_case import MojomParserTestCase
+
+# Mojoms that we will use in multiple tests.
+basic_mojoms = {
+ 'level.mojom':
+ """
+ module level;
+ enum Level {
+ kHighest,
+ kMiddle,
+ kLowest,
+ };
+ """,
+ 'interfaces.mojom':
+ """
+ module interfaces;
+ import "level.mojom";
+ struct Foo {int32 bar;};
+ [RequireContext=level.Level.kHighest]
+ interface High {
+ DoFoo(Foo foo);
+ };
+ [RequireContext=level.Level.kMiddle]
+ interface Mid {
+ DoFoo(Foo foo);
+ };
+ [RequireContext=level.Level.kLowest]
+ interface Low {
+ DoFoo(Foo foo);
+ };
+ """
+}
+
+
+class FakeArgs:
+ """Fakes args to _Generate - intention is to do just enough to run checks"""
+
+ def __init__(self, tester, files=None):
+ """ `tester` is MojomParserTestCase for paths.
+ `files` will have tester path added."""
+ self.checks_string = 'restrictions'
+ self.depth = tester.GetPath('')
+ self.filelist = None
+ self.filename = [tester.GetPath(x) for x in files]
+ self.gen_directories = tester.GetPath('gen')
+ self.generators_string = ''
+ self.import_directories = []
+ self.output_dir = tester.GetPath('out')
+ self.scrambled_message_id_salt_paths = None
+ self.typemaps = []
+ self.variant = 'none'
+
+
+class MojoBindingsCheckTest(MojomParserTestCase):
+ def _WriteBasicMojoms(self):
+ for filename, contents in basic_mojoms.items():
+ self.WriteFile(filename, contents)
+ return list(basic_mojoms.keys())
+
+ def _ParseAndGenerate(self, mojoms):
+ self.ParseMojoms(mojoms)
+ args = FakeArgs(self, files=mojoms)
+ _Generate(args, {})
+
+ def testLoads(self):
+ """Validate that the check is registered under the expected name."""
+ check_modules = LoadChecks('restrictions')
+ self.assertTrue(check_modules['restrictions'])
+
+ def testValidAnnotations(self):
+ mojoms = self._WriteBasicMojoms()
+
+ a = 'a.mojom'
+ self.WriteFile(
+ a, """
+ module a;
+ import "level.mojom";
+ import "interfaces.mojom";
+
+ interface PassesHigh {
+ [AllowedContext=level.Level.kHighest]
+ DoHigh(pending_receiver<interfaces.High> hi);
+ };
+ interface PassesMedium {
+ [AllowedContext=level.Level.kMiddle]
+ DoMedium(pending_receiver<interfaces.Mid> hi);
+ [AllowedContext=level.Level.kMiddle]
+ DoMediumRem(pending_remote<interfaces.Mid> hi);
+ [AllowedContext=level.Level.kMiddle]
+ DoMediumAssoc(pending_associated_receiver<interfaces.Mid> hi);
+ [AllowedContext=level.Level.kMiddle]
+ DoMediumAssocRem(pending_associated_remote<interfaces.Mid> hi);
+ };
+ interface PassesLow {
+ [AllowedContext=level.Level.kLowest]
+ DoLow(pending_receiver<interfaces.Low> hi);
+ };
+
+ struct One { pending_receiver<interfaces.High> hi; };
+ struct Two { One one; };
+ interface PassesNestedHigh {
+ [AllowedContext=level.Level.kHighest]
+ DoNestedHigh(Two two);
+ };
+
+ // Allowed as PassesHigh is not itself restricted.
+ interface PassesPassesHigh {
+ DoPass(pending_receiver<PassesHigh> hiho);
+ };
+ """)
+ mojoms.append(a)
+ self._ParseAndGenerate(mojoms)
+
+ def _testThrows(self, filename, content, regexp):
+ mojoms = self._WriteBasicMojoms()
+ self.WriteFile(filename, content)
+ mojoms.append(filename)
+ with self.assertRaisesRegexp(check.CheckException, regexp):
+ self._ParseAndGenerate(mojoms)
+
+ def testMissingAnnotation(self):
+ contents = """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+
+ interface PassesHigh {
+ // err: missing annotation.
+ DoHigh(pending_receiver<interfaces.High> hi);
+ };
+ """
+ self._testThrows('b.mojom', contents, 'require.*?AllowedContext')
+
+ def testAllowTooLow(self):
+ contents = """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+
+ interface PassesHigh {
+ // err: level is worse than required.
+ [AllowedContext=level.Level.kMiddle]
+ DoHigh(pending_receiver<interfaces.High> hi);
+ };
+ """
+ self._testThrows('b.mojom', contents,
+ 'RequireContext=.*?kHighest > AllowedContext=.*?kMiddle')
+
+ def testWrongEnumInAllow(self):
+ contents = """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+ enum Blah {
+ kZero,
+ };
+ interface PassesHigh {
+ // err: different enums.
+ [AllowedContext=Blah.kZero]
+ DoHigh(pending_receiver<interfaces.High> hi);
+ };
+ """
+ self._testThrows('b.mojom', contents, 'but one of kind')
+
+ def testNotAnEnumInAllow(self):
+ contents = """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+ interface PassesHigh {
+ // err: not an enum.
+ [AllowedContext=doopdedoo.mojom.kWhatever]
+ DoHigh(pending_receiver<interfaces.High> hi);
+ };
+ """
+ self._testThrows('b.mojom', contents, 'not a valid enum value')
+
+ def testMissingAllowedForNestedStructs(self):
+ contents = """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+ struct One { pending_receiver<interfaces.High> hi; };
+ struct Two { One one; };
+ interface PassesNestedHigh {
+ // err: missing annotation.
+ DoNestedHigh(Two two);
+ };
+ """
+ self._testThrows('b.mojom', contents, 'require.*?AllowedContext')
+
+ def testMissingAllowedForNestedUnions(self):
+ contents = """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+ struct One { pending_receiver<interfaces.High> hi; };
+ struct Two { One one; };
+ union Three {One one; Two two; };
+ interface PassesNestedHigh {
+ // err: missing annotation.
+ DoNestedHigh(Three three);
+ };
+ """
+ self._testThrows('b.mojom', contents, 'require.*?AllowedContext')
+
+ def testMultipleInterfacesThrows(self):
+ contents = """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+ struct One { pending_receiver<interfaces.High> hi; };
+ interface PassesMultipleInterfaces {
+ [AllowedContext=level.Level.kMiddle]
+ DoMultiple(
+ pending_remote<interfaces.Mid> mid,
+ pending_receiver<interfaces.High> hi,
+ One one
+ );
+ };
+ """
+ self._testThrows('b.mojom', contents,
+ 'RequireContext=.*?kHighest > AllowedContext=.*?kMiddle')
+
+ def testMultipleInterfacesAllowed(self):
+ """Multiple interfaces can be passed, all satisfy the level."""
+ mojoms = self._WriteBasicMojoms()
+
+ b = "b.mojom"
+ self.WriteFile(
+ b, """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+ struct One { pending_receiver<interfaces.High> hi; };
+ interface PassesMultipleInterfaces {
+ [AllowedContext=level.Level.kHighest]
+ DoMultiple(
+ pending_receiver<interfaces.High> hi,
+ pending_remote<interfaces.Mid> mid,
+ One one
+ );
+ };
+ """)
+ mojoms.append(b)
+ self._ParseAndGenerate(mojoms)
diff --git a/utils/ipc/mojo/public/tools/bindings/chromium_bindings_configuration.gni b/utils/ipc/mojo/public/tools/bindings/chromium_bindings_configuration.gni
deleted file mode 100644
index d8a13874..00000000
--- a/utils/ipc/mojo/public/tools/bindings/chromium_bindings_configuration.gni
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-_typemap_imports = [
- "//chrome/chrome_cleaner/mojom/typemaps/typemaps.gni",
- "//chrome/common/importer/typemaps.gni",
- "//chrome/common/media_router/mojom/typemaps.gni",
- "//chrome/typemaps.gni",
- "//chromecast/typemaps.gni",
- "//chromeos/typemaps.gni",
- "//chromeos/components/multidevice/mojom/typemaps.gni",
- "//chromeos/services/cros_healthd/public/mojom/typemaps.gni",
- "//chromeos/services/device_sync/public/mojom/typemaps.gni",
- "//chromeos/services/network_config/public/mojom/typemaps.gni",
- "//chromeos/services/secure_channel/public/mojom/typemaps.gni",
- "//components/arc/mojom/typemaps.gni",
- "//components/chromeos_camera/common/typemaps.gni",
- "//components/services/storage/public/cpp/filesystem/typemaps.gni",
- "//components/sync/mojom/typemaps.gni",
- "//components/typemaps.gni",
- "//content/browser/typemaps.gni",
- "//content/public/common/typemaps.gni",
- "//sandbox/mac/mojom/typemaps.gni",
- "//services/media_session/public/cpp/typemaps.gni",
- "//services/proxy_resolver/public/cpp/typemaps.gni",
- "//services/resource_coordinator/public/cpp/typemaps.gni",
- "//services/service_manager/public/cpp/typemaps.gni",
- "//services/tracing/public/mojom/typemaps.gni",
-]
-
-_typemaps = []
-foreach(typemap_import, _typemap_imports) {
- # Avoid reassignment error by assigning to empty scope first.
- _imported = {
- }
- _imported = read_file(typemap_import, "scope")
- _typemaps += _imported.typemaps
-}
-
-typemaps = []
-foreach(typemap, _typemaps) {
- typemaps += [
- {
- filename = typemap
- config = read_file(typemap, "scope")
- },
- ]
-}
-
-component_macro_suffix = ""
diff --git a/utils/ipc/mojo/public/tools/bindings/compile_typescript.py b/utils/ipc/mojo/public/tools/bindings/compile_typescript.py
deleted file mode 100644
index a978901b..00000000
--- a/utils/ipc/mojo/public/tools/bindings/compile_typescript.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2019 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-import argparse
-
-_HERE_PATH = os.path.dirname(__file__)
-_SRC_PATH = os.path.normpath(os.path.join(_HERE_PATH, '..', '..', '..', '..'))
-
-sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'node'))
-import node
-import node_modules
-
-def main(argv):
- parser = argparse.ArgumentParser()
- parser.add_argument('--tsconfig_path', required=True)
- args = parser.parse_args(argv)
-
- result = node.RunNode([node_modules.PathToTypescript()] +
- ['--project', args.tsconfig_path])
- if len(result) != 0:
- raise RuntimeError('Failed to compile Typescript: \n%s' % result)
-
-if __name__ == '__main__':
- main(sys.argv[1:])
diff --git a/utils/ipc/mojo/public/tools/bindings/concatenate-files.py b/utils/ipc/mojo/public/tools/bindings/concatenate-files.py
index 48bc66fd..4dd26d4a 100755
--- a/utils/ipc/mojo/public/tools/bindings/concatenate-files.py
+++ b/utils/ipc/mojo/public/tools/bindings/concatenate-files.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-# Copyright 2019 The Chromium Authors. All rights reserved.
+# Copyright 2019 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
@@ -15,6 +15,7 @@
from __future__ import print_function
import optparse
+import sys
def Concatenate(filenames):
@@ -47,7 +48,7 @@ def main():
parser.set_usage("""Concatenate several files into one.
Equivalent to: cat file1 ... > target.""")
(_options, args) = parser.parse_args()
- exit(0 if Concatenate(args) else 1)
+ sys.exit(0 if Concatenate(args) else 1)
if __name__ == "__main__":
diff --git a/utils/ipc/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py b/utils/ipc/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py
index be8985ce..7d56c9f9 100755
--- a/utils/ipc/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py
+++ b/utils/ipc/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-# Copyright 2018 The Chromium Authors. All rights reserved.
+# Copyright 2018 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -20,6 +20,7 @@ from __future__ import print_function
import optparse
import re
+import sys
_MOJO_INTERNAL_MODULE_NAME = "mojo.internal"
@@ -31,10 +32,10 @@ def FilterLine(filename, line, output):
return
if line.startswith("goog.provide"):
- match = re.match("goog.provide\('([^']+)'\);", line)
+ match = re.match(r"goog.provide\('([^']+)'\);", line)
if not match:
print("Invalid goog.provide line in %s:\n%s" % (filename, line))
- exit(1)
+ sys.exit(1)
module_name = match.group(1)
if module_name == _MOJO_INTERNAL_MODULE_NAME:
@@ -67,7 +68,8 @@ def main():
Concatenate several files into one, stripping Closure provide and
require directives along the way.""")
(_, args) = parser.parse_args()
- exit(0 if ConcatenateAndReplaceExports(args) else 1)
+ sys.exit(0 if ConcatenateAndReplaceExports(args) else 1)
+
if __name__ == "__main__":
main()
diff --git a/utils/ipc/mojo/public/tools/bindings/format_typemap_generator_args.py b/utils/ipc/mojo/public/tools/bindings/format_typemap_generator_args.py
deleted file mode 100755
index 7ac4af5f..00000000
--- a/utils/ipc/mojo/public/tools/bindings/format_typemap_generator_args.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from __future__ import print_function
-
-import sys
-
-# This utility converts mojom dependencies into their corresponding typemap
-# paths and formats them to be consumed by generate_type_mappings.py.
-
-
-def FormatTypemap(typemap_filename):
- # A simple typemap is valid Python with a minor alteration.
- with open(typemap_filename) as f:
- typemap_content = f.read().replace('=\n', '=')
- typemap = {}
- exec typemap_content in typemap
-
- for header in typemap.get('public_headers', []):
- yield 'public_headers=%s' % header
- for header in typemap.get('traits_headers', []):
- yield 'traits_headers=%s' % header
- for header in typemap.get('type_mappings', []):
- yield 'type_mappings=%s' % header
-
-
-def main():
- typemaps = sys.argv[1:]
- print(' '.join('--start-typemap %s' % ' '.join(FormatTypemap(typemap))
- for typemap in typemaps))
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/utils/ipc/mojo/public/tools/bindings/gen_data_files_list.py b/utils/ipc/mojo/public/tools/bindings/gen_data_files_list.py
index 8b78d092..c6daff03 100644
--- a/utils/ipc/mojo/public/tools/bindings/gen_data_files_list.py
+++ b/utils/ipc/mojo/public/tools/bindings/gen_data_files_list.py
@@ -1,4 +1,4 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
+# Copyright 2017 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates a list of all files in a directory.
diff --git a/utils/ipc/mojo/public/tools/bindings/generate_type_mappings.py b/utils/ipc/mojo/public/tools/bindings/generate_type_mappings.py
index a0096649..4a53e2bf 100755
--- a/utils/ipc/mojo/public/tools/bindings/generate_type_mappings.py
+++ b/utils/ipc/mojo/public/tools/bindings/generate_type_mappings.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-# Copyright 2016 The Chromium Authors. All rights reserved.
+# Copyright 2016 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates a JSON typemap from its command-line arguments and dependencies.
@@ -82,10 +82,12 @@ def LoadCppTypemapConfig(path):
for entry in config['types']:
configs[entry['mojom']] = {
'typename': entry['cpp'],
+ 'forward_declaration': entry.get('forward_declaration', None),
'public_headers': config.get('traits_headers', []),
'traits_headers': config.get('traits_private_headers', []),
'copyable_pass_by_value': entry.get('copyable_pass_by_value',
False),
+ 'default_constructible': entry.get('default_constructible', True),
'force_serialize': entry.get('force_serialize', False),
'hashable': entry.get('hashable', False),
'move_only': entry.get('move_only', False),
diff --git a/utils/ipc/mojo/public/tools/bindings/minify_with_terser.py b/utils/ipc/mojo/public/tools/bindings/minify_with_terser.py
new file mode 100755
index 00000000..cefee7a4
--- /dev/null
+++ b/utils/ipc/mojo/public/tools/bindings/minify_with_terser.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python3
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# This utility minifies JS files with terser.
+#
+# Instance of 'node' has no 'RunNode' member (no-member)
+# pylint: disable=no-member
+
+import argparse
+import os
+import sys
+
+_HERE_PATH = os.path.dirname(__file__)
+_SRC_PATH = os.path.normpath(os.path.join(_HERE_PATH, '..', '..', '..', '..'))
+_CWD = os.getcwd()
+sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'node'))
+import node
+import node_modules
+
+
+def MinifyFile(input_file, output_file):
+ node.RunNode([
+ node_modules.PathToTerser(), input_file, '--mangle', '--compress',
+ '--comments', 'false', '--output', output_file
+ ])
+
+
+def main(argv):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--input', required=True)
+ parser.add_argument('--output', required=True)
+ args = parser.parse_args(argv)
+
+ # Delete the output file if it already exists. It may be a sym link to the
+ # input, because in non-optimized/pre-Terser builds the input file is copied
+ # to the output location with gn copy().
+ out_path = os.path.join(_CWD, args.output)
+ if (os.path.exists(out_path)):
+ os.remove(out_path)
+
+ MinifyFile(os.path.join(_CWD, args.input), out_path)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/utils/ipc/mojo/public/tools/bindings/mojom.gni b/utils/ipc/mojo/public/tools/bindings/mojom.gni
index fe2a1da3..3f6e54e0 100644
--- a/utils/ipc/mojo/public/tools/bindings/mojom.gni
+++ b/utils/ipc/mojo/public/tools/bindings/mojom.gni
@@ -1,25 +1,28 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import("//build/config/python.gni")
import("//third_party/closure_compiler/closure_args.gni")
import("//third_party/closure_compiler/compile_js.gni")
import("//third_party/protobuf/proto_library.gni")
+import("//ui/webui/resources/tools/generate_grd.gni")
import("//ui/webui/webui_features.gni")
+import("//build/config/cast.gni")
+
# TODO(rockot): Maybe we can factor these dependencies out of //mojo. They're
# used to conditionally enable message ID scrambling in a way which is
# consistent across toolchains and which is affected by branded vs non-branded
# Chrome builds. Ideally we could create some generic knobs here that could be
# flipped elsewhere though.
import("//build/config/chrome_build.gni")
-import("//build/config/chromecast_build.gni")
import("//build/config/chromeos/ui_mode.gni")
+import("//build/config/features.gni")
import("//build/config/nacl/config.gni")
import("//build/toolchain/kythe.gni")
import("//components/nacl/features.gni")
import("//third_party/jinja2/jinja2.gni")
+import("//third_party/ply/ply.gni")
import("//tools/ipc_fuzzer/ipc_fuzzer.gni")
declare_args() {
# Indicates whether typemapping should be supported in this build
@@ -34,21 +37,30 @@ declare_args() {
# Controls message ID scrambling behavior. If |true|, message IDs are
# scrambled (i.e. randomized based on the contents of //chrome/VERSION) on
- # non-Chrome OS desktop platforms. Set to |false| to disable message ID
- # scrambling on all platforms.
- enable_mojom_message_id_scrambling = true
+ # non-Chrome OS desktop platforms. Enabled on official builds by default.
+ # Set to |true| to enable message ID scrambling on a specific build.
+ # See also `enable_scrambled_message_ids` below for more details.
+ enable_mojom_message_id_scrambling = is_official_build
+
+ # Enables generating javascript fuzzing-related code and the bindings for the
+ # MojoLPM fuzzer targets. Off by default.
+ enable_mojom_fuzzer = false
# Enables Closure compilation of generated JS lite bindings. In environments
# where compilation is supported, any mojom target "foo" will also have a
# corresponding "foo_js_library_for_compile" target generated.
- enable_mojom_closure_compile = enable_js_type_check && optimize_webui
-
- # Enables generating Typescript bindings and compiling them to JS bindings.
- enable_typescript_bindings = false
+ if (is_chromeos_ash) {
+ enable_mojom_closure_compile = enable_js_type_check && optimize_webui
+ }
+}
- # Enables generating javascript fuzzing-related code and the bindings for the
- # MojoLPM fuzzer targets. Off by default.
- enable_mojom_fuzzer = false
+# Closure libraries are needed for mojom_closure_compile, and when
+# js_type_check is enabled on Ash.
+if (is_chromeos_ash) {
+ generate_mojom_closure_libraries =
+ enable_mojom_closure_compile || enable_js_type_check
+} else {
+ generate_mojom_closure_libraries = false
}
# NOTE: We would like to avoid scrambling message IDs where it doesn't add
@@ -69,9 +81,8 @@ declare_args() {
# lacros-chrome switches to target_os="chromeos"
enable_scrambled_message_ids =
enable_mojom_message_id_scrambling &&
- (is_mac || is_win ||
- (is_linux && !is_chromeos_ash && !is_chromecast && !is_chromeos_lacros) ||
- ((enable_nacl || is_nacl || is_nacl_nonsfi) &&
+ (is_mac || is_win || (is_linux && !is_castos) ||
+ ((enable_nacl || is_nacl) &&
(target_os != "chromeos" && !chromeos_is_browser_only)))
_mojom_tools_root = "//mojo/public/tools"
@@ -80,7 +91,9 @@ mojom_parser_script = "$_mojom_tools_root/mojom/mojom_parser.py"
mojom_parser_sources = [
"$_mojom_library_root/__init__.py",
"$_mojom_library_root/error.py",
+ "$_mojom_library_root/fileutil.py",
"$_mojom_library_root/generate/__init__.py",
+ "$_mojom_library_root/generate/check.py",
"$_mojom_library_root/generate/generator.py",
"$_mojom_library_root/generate/module.py",
"$_mojom_library_root/generate/pack.py",
@@ -88,21 +101,32 @@ mojom_parser_sources = [
"$_mojom_library_root/generate/translate.py",
"$_mojom_library_root/parse/__init__.py",
"$_mojom_library_root/parse/ast.py",
+ "$_mojom_library_root/parse/conditional_features.py",
"$_mojom_library_root/parse/lexer.py",
"$_mojom_library_root/parse/parser.py",
+ "//tools/diagnosis/crbug_1001171.py",
]
mojom_generator_root = "$_mojom_tools_root/bindings"
mojom_generator_script = "$mojom_generator_root/mojom_bindings_generator.py"
mojom_generator_sources =
mojom_parser_sources + [
+ "$mojom_generator_root/checks/__init__.py",
+ "$mojom_generator_root/checks/mojom_attributes_check.py",
+ "$mojom_generator_root/checks/mojom_definitions_check.py",
+ "$mojom_generator_root/checks/mojom_interface_feature_check.py",
+ "$mojom_generator_root/checks/mojom_restrictions_check.py",
+ "$mojom_generator_root/generators/__init__.py",
"$mojom_generator_root/generators/cpp_util.py",
"$mojom_generator_root/generators/mojom_cpp_generator.py",
"$mojom_generator_root/generators/mojom_java_generator.py",
- "$mojom_generator_root/generators/mojom_mojolpm_generator.py",
"$mojom_generator_root/generators/mojom_js_generator.py",
+ "$mojom_generator_root/generators/mojom_mojolpm_generator.py",
"$mojom_generator_root/generators/mojom_ts_generator.py",
"$mojom_generator_script",
+ "//build/action_helpers.py",
+ "//build/gn_helpers.py",
+ "//build/zip_helpers.py",
]
if (enable_scrambled_message_ids) {
@@ -243,12 +267,16 @@ if (enable_scrambled_message_ids) {
# |cpp_only| is set to true, it overrides this to prevent generation of
# Java bindings.
#
-# enable_fuzzing (optional)
+# enable_js_fuzzing (optional)
+# Enables generation of javascript fuzzing sources for the target if the
+# global build arg |enable_mojom_fuzzer| is also set to |true|.
+# Defaults to |true|. If JS fuzzing generation is enabled for a target,
+# the target will always generate JS bindings even if |cpp_only| is set to
+# |true|. See note above.
+#
+# enable_mojolpm_fuzzing (optional)
# Enables generation of fuzzing sources for the target if the global build
-# arg |enable_mojom_fuzzer| is also set to |true|. Defaults to |true|. If
-# fuzzing generation is enabled for a target, the target will always
-# generate JS bindings even if |cpp_only| is set to |true|. See note
-# above.
+# arg |enable_mojom_fuzzer| is also set to |true|. Defaults to |true|.
#
# support_lazy_serialization (optional)
# If set to |true|, generated C++ bindings will effectively prefer to
@@ -310,8 +338,15 @@ if (enable_scrambled_message_ids) {
# correct dependency order. Note that this only has an effect if
# the |enable_mojom_closure_compile| global arg is set to |true| as well.
#
-# use_typescript_sources (optional)
-# Uses the Typescript generator to generate JavaScript bindings.
+# generate_webui_js_bindings (optional)
+# Generate WebUI bindings in JavaScript rather than TypeScript. Defaults
+# to false. ChromeOS only parameter.
+#
+# generate_legacy_js_bindings (optional)
+# Generate js_data_deps target containing legacy JavaScript bindings files
+# for Blink tests and other non-WebUI users when generating TypeScript
+# bindings for WebUI. Ignored if generate_webui_js_bindings is set to
+# true.
#
# js_generate_struct_deserializers (optional)
# Generates JS deerialize methods for structs.
@@ -323,17 +358,23 @@ if (enable_scrambled_message_ids) {
# webui_module_path (optional)
# The path or URL at which modules generated by this target will be
# accessible to WebUI pages. This may either be an absolute path or
-# a full URL path starting with "chrome://resources/mojo".
+# a full URL path starting with "chrome://resources/mojo". If this path
+# is not specified, WebUI bindings will not be generated.
#
# If an absolute path, a WebUI page may only import these modules if
-# they are manually packaged and mapped independently by that page's
-# WebUIDataSource. The mapped path must match the path given here.
+# they are added to that page's data source (usually by adding the
+# modules to the mojo_files list for build_webui(), or by listing the
+# files as inputs to the page's ts_library() and/or generate_grd() build
+# steps.
#
# If this is is instead a URL string starting with
-# "chrome://resources/mojo", the generated resources must be added to
-# content_resources.grd and registered with
-# content::SharedResourcesDataSource with a corresponding path, at which
-# point they will be made available to all WebUI pages at the given URL.
+# "chrome://resources/mojo", the resulting bindings files should
+# be added to one of the lists in ui/webui/resources/mojo/BUILD.gn,
+# at which point they will be made available to all WebUI pages at the
+# given URL.
+#
+# Note: WebUI module bindings are generated in TypeScript by default,
+# unless |generate_webui_js_bindings| is specified as true.
#
# The following parameters are used to support the component build. They are
# needed so that bindings which are linked with a component can use the same
@@ -402,16 +443,41 @@ if (enable_scrambled_message_ids) {
# should be mapped in generated bindings. This is a string like
# "::base::Value" or "std::vector<::base::Value>".
#
-# move_only (optional)
-# A boolean value (default false) which indicates whether the C++
-# type is move-only. If true, generated bindings will pass the type
-# by value and use std::move() at call sites.
-#
# copyable_pass_by_value (optional)
# A boolean value (default false) which effectively indicates
# whether the C++ type is very cheap to copy. If so, generated
# bindings will pass by value but not use std::move() at call sites.
#
+# default_constructible (optional)
+# A boolean value (default true) which indicates whether the C++
+# type is default constructible. If a C++ type is not default
+# constructible (e.g. the implementor of the type prefers not to
+# publicly expose a default constructor that creates an object in an
+# invalid state), Mojo will instead construct C++ type with an
+# argument of the type `mojo::DefaultConstruct::Tag` (essentially a
+# passkey-like type specifically for this use case).
+#
+# force_serialize (optional)
+# A boolean value (default false) which disables lazy serialization
+# of the typemapped type if lazy serialization is enabled for the
+# mojom target applying this typemap.
+#
+# forward_declaration (optional)
+# A forward declaration of the C++ type, which bindings that don't
+# need the full type definition can use to reduce the size of
+# the generated code. This is a string like
+# "namespace base { class Value; }".
+#
+# hashable (optional)
+# A boolean value (default false) indicating whether the C++ type is
+# hashable. Set to true if true AND needed (i.e. you need to use the
+# type as the key of a mojom map).
+#
+# move_only (optional)
+# A boolean value (default false) which indicates whether the C++
+# type is move-only. If true, generated bindings will pass the type
+# by value and use std::move() at call sites.
+#
# nullable_is_same_type (optional)
# A boolean value (default false) which indicates that the C++ type
# has some baked-in semantic notion of a "null" state. If true, the
@@ -421,16 +487,6 @@ if (enable_scrambled_message_ids) {
# type with absl::optional, and null values are simply
# absl::nullopt.
#
-# hashable (optional)
-# A boolean value (default false) indicating whether the C++ type is
-# hashable. Set to true if true AND needed (i.e. you need to use the
-# type as the key of a mojom map).
-#
-# force_serialize (optional)
-# A boolean value (default false) which disables lazy serialization
-# of the typemapped type if lazy serialization is enabled for the
-# mojom target applying this typemap.
-#
# Additional typemap scope parameters:
#
# traits_headers (optional)
@@ -621,20 +677,26 @@ template("mojom") {
build_metadata_filename = "$target_gen_dir/$target_name.build_metadata"
build_metadata = {
}
- build_metadata.sources = rebase_path(sources_list)
+ build_metadata.sources = rebase_path(sources_list, target_gen_dir)
build_metadata.deps = []
foreach(dep, all_deps) {
dep_target_gen_dir = get_label_info(dep, "target_gen_dir")
dep_name = get_label_info(dep, "name")
build_metadata.deps +=
- [ rebase_path("$dep_target_gen_dir/$dep_name.build_metadata") ]
+ [ rebase_path("$dep_target_gen_dir/$dep_name.build_metadata",
+ target_gen_dir) ]
}
write_file(build_metadata_filename, build_metadata, "json")
- generate_fuzzing =
- (!defined(invoker.enable_fuzzing) || invoker.enable_fuzzing) &&
+ generate_js_fuzzing =
+ (!defined(invoker.enable_js_fuzzing) || invoker.enable_js_fuzzing) &&
enable_mojom_fuzzer && (!defined(invoker.testonly) || !invoker.testonly)
+ generate_mojolpm_fuzzing =
+ (!defined(invoker.enable_mojolpm_fuzzing) ||
+ invoker.enable_mojolpm_fuzzing) && enable_mojom_fuzzer &&
+ (!defined(invoker.testonly) || !invoker.testonly)
+
parser_target_name = "${target_name}__parser"
parser_deps = []
foreach(dep, all_deps) {
@@ -665,30 +727,34 @@ template("mojom") {
"is_chromeos",
"is_chromeos_ash",
]
+ } else if (is_chromeos_lacros) {
+ enabled_features += [
+ "is_chromeos",
+ "is_chromeos_lacros",
+ ]
} else if (is_fuchsia) {
enabled_features += [ "is_fuchsia" ]
} else if (is_ios) {
enabled_features += [ "is_ios" ]
- } else if (is_linux || is_chromeos_lacros) {
+ } else if (is_linux) {
enabled_features += [ "is_linux" ]
- if (is_chromeos_lacros) {
- enabled_features += [
- "is_chromeos",
- "is_chromeos_lacros",
- ]
- }
} else if (is_mac) {
enabled_features += [ "is_mac" ]
} else if (is_win) {
enabled_features += [ "is_win" ]
}
- # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(parser_target_name) {
+ if (is_apple) {
+ enabled_features += [ "is_apple" ]
+ }
+
+ action(parser_target_name) {
+ allow_remote = true
+ custom_processor = "mojom_parser"
script = mojom_parser_script
- inputs = mojom_parser_sources + [ build_metadata_filename ]
+ inputs = mojom_parser_sources + ply_sources + [ build_metadata_filename ]
sources = sources_list
- deps = parser_deps
+ public_deps = parser_deps
outputs = []
foreach(base_path, output_file_base_paths) {
filename = get_path_info(base_path, "file")
@@ -698,31 +764,35 @@ template("mojom") {
filelist = []
foreach(source, sources_list) {
- filelist += [ rebase_path(source) ]
+ filelist += [ rebase_path(source, root_build_dir) ]
}
- response_file_contents = filelist
+
+ # Workaround for https://github.com/ninja-build/ninja/issues/1966.
+ rsp_file = "$target_gen_dir/${target_name}.rsp"
+ write_file(rsp_file, filelist)
+ inputs += [ rsp_file ]
args = [
# Resolve relative input mojom paths against both the root src dir and
# the root gen dir.
"--input-root",
- rebase_path("//."),
+ rebase_path("//.", root_build_dir),
"--input-root",
- rebase_path(root_gen_dir),
+ rebase_path(root_gen_dir, root_build_dir),
"--output-root",
- rebase_path(root_gen_dir),
+ rebase_path(root_gen_dir, root_build_dir),
- "--mojom-file-list={{response_file_name}}",
+ "--mojom-file-list=" + rebase_path(rsp_file, root_build_dir),
"--check-imports",
- rebase_path(build_metadata_filename),
+ rebase_path(build_metadata_filename, root_build_dir),
]
if (defined(invoker.input_root_override)) {
args += [
"--input-root",
- rebase_path(invoker.input_root_override),
+ rebase_path(invoker.input_root_override, root_build_dir),
]
}
@@ -738,6 +808,13 @@ template("mojom") {
"--add-module-metadata",
"webui_module_path=${invoker.webui_module_path}",
]
+ if (defined(invoker.generate_webui_js_bindings) &&
+ invoker.generate_webui_js_bindings) {
+ args += [
+ "--add-module-metadata",
+ "generate_webui_js=True",
+ ]
+ }
}
}
}
@@ -819,11 +896,12 @@ template("mojom") {
}
}
- # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(generator_cpp_message_ids_target_name) {
+ action(generator_cpp_message_ids_target_name) {
+ allow_remote = true
script = mojom_generator_script
inputs = mojom_generator_sources + jinja2_sources
- sources = sources_list
+ sources = sources_list +
+ [ "$root_gen_dir/mojo/public/tools/bindings/cpp_templates.zip" ]
deps = [
":$parser_target_name",
"//mojo/public/tools/bindings:precompile_templates",
@@ -835,16 +913,22 @@ template("mojom") {
args = common_generator_args
filelist = []
foreach(source, sources_list) {
- filelist += [ rebase_path("$source", root_build_dir) ]
+ filelist += [ rebase_path(source, root_build_dir) ]
}
foreach(base_path, output_file_base_paths) {
+ filename = get_path_info(base_path, "file")
+ dirname = get_path_info(base_path, "dir")
+ inputs += [ "$root_gen_dir/$dirname/${filename}-module" ]
outputs += [ "$root_gen_dir/$base_path-shared-message-ids.h" ]
}
- response_file_contents = filelist
+ # Workaround for https://github.com/ninja-build/ninja/issues/1966.
+ rsp_file = "$target_gen_dir/${target_name}.rsp"
+ write_file(rsp_file, filelist)
+ inputs += [ rsp_file ]
args += [
- "--filelist={{response_file_name}}",
+ "--filelist=" + rebase_path(rsp_file, root_build_dir),
"--generate_non_variant_code",
"--generate_message_ids",
"-g",
@@ -860,12 +944,13 @@ template("mojom") {
generator_shared_target_name = "${target_name}_shared__generator"
- # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(generator_shared_target_name) {
+ action(generator_shared_target_name) {
+ allow_remote = true
visibility = [ ":*" ]
script = mojom_generator_script
inputs = mojom_generator_sources + jinja2_sources
- sources = sources_list
+ sources = sources_list +
+ [ "$root_gen_dir/mojo/public/tools/bindings/cpp_templates.zip" ]
deps = [
":$parser_target_name",
"//mojo/public/tools/bindings:precompile_templates",
@@ -878,10 +963,16 @@ template("mojom") {
args = common_generator_args
filelist = []
foreach(source, sources_list) {
- filelist += [ rebase_path("$source", root_build_dir) ]
+ filelist += [ rebase_path(source, root_build_dir) ]
}
foreach(base_path, output_file_base_paths) {
+ # Need the mojom-module as an input to this action.
+ filename = get_path_info(base_path, "file")
+ dirname = get_path_info(base_path, "dir")
+ inputs += [ "$root_gen_dir/$dirname/${filename}-module" ]
+
outputs += [
+ "$root_gen_dir/$base_path-features.h",
"$root_gen_dir/$base_path-params-data.h",
"$root_gen_dir/$base_path-shared-internal.h",
"$root_gen_dir/$base_path-shared.cc",
@@ -889,10 +980,13 @@ template("mojom") {
]
}
- response_file_contents = filelist
+ # Workaround for https://github.com/ninja-build/ninja/issues/1966.
+ rsp_file = "$target_gen_dir/${target_name}.rsp"
+ write_file(rsp_file, filelist)
+ inputs += [ rsp_file ]
args += [
- "--filelist={{response_file_name}}",
+ "--filelist=" + rebase_path(rsp_file, root_build_dir),
"--generate_non_variant_code",
"-g",
"c++",
@@ -923,12 +1017,14 @@ template("mojom") {
if (defined(invoker.testonly)) {
testonly = invoker.testonly
}
+ configs += [ "//build/config/compiler:wexit_time_destructors" ]
deps = []
public_deps = []
if (output_file_base_paths != []) {
sources = []
foreach(base_path, output_file_base_paths) {
sources += [
+ "$root_gen_dir/$base_path-features.h",
"$root_gen_dir/$base_path-params-data.h",
"$root_gen_dir/$base_path-shared-internal.h",
"$root_gen_dir/$base_path-shared.cc",
@@ -972,7 +1068,7 @@ template("mojom") {
}
}
- if (generate_fuzzing) {
+ if (generate_mojolpm_fuzzing) {
# This block generates the proto files used for the MojoLPM fuzzer,
# and the corresponding proto targets that will be linked in the fuzzer
# targets. These are independent of the typemappings, and can be done
@@ -981,11 +1077,15 @@ template("mojom") {
generator_mojolpm_proto_target_name =
"${target_name}_mojolpm_proto_generator"
- # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(generator_mojolpm_proto_target_name) {
+ action(generator_mojolpm_proto_target_name) {
+ allow_remote = true
script = mojom_generator_script
inputs = mojom_generator_sources + jinja2_sources
- sources = invoker.sources
+ sources =
+ invoker.sources + [
+ "$root_gen_dir/mojo/public/tools/bindings/cpp_templates.zip",
+ "$root_gen_dir/mojo/public/tools/bindings/mojolpm_templates.zip",
+ ]
deps = [
":$parser_target_name",
"//mojo/public/tools/bindings:precompile_templates",
@@ -994,15 +1094,37 @@ template("mojom") {
outputs = []
args = common_generator_args
filelist = []
- foreach(source, invoker.sources) {
- filelist += [ rebase_path("$source", root_build_dir) ]
+
+ # Split the input into generated and non-generated source files. They
+ # need to be processed separately.
+ gen_dir_path_wildcard = get_path_info("//", "gen_dir") + "/*"
+ non_gen_sources =
+ filter_exclude(invoker.sources, [ gen_dir_path_wildcard ])
+ gen_sources = filter_include(invoker.sources, [ gen_dir_path_wildcard ])
+
+ foreach(source, non_gen_sources) {
+ filelist += [ rebase_path(source, root_build_dir) ]
+ inputs += [ "$target_gen_dir/$source-module" ]
outputs += [ "$target_gen_dir/$source.mojolpm.proto" ]
}
- response_file_contents = filelist
+ foreach(source, gen_sources) {
+ filelist += [ rebase_path(source, root_build_dir) ]
+
+ # For generated files, we assume they're in the target_gen_dir or a
+ # sub-folder of it. Rebase the path so we can get the relative location.
+ source_file = rebase_path(source, target_gen_dir)
+ inputs += [ "$target_gen_dir/$source_file-module" ]
+ outputs += [ "$target_gen_dir/$source_file.mojolpm.proto" ]
+ }
+
+ # Workaround for https://github.com/ninja-build/ninja/issues/1966.
+ rsp_file = "$target_gen_dir/${target_name}.rsp"
+ write_file(rsp_file, filelist)
+ inputs += [ rsp_file ]
args += [
- "--filelist={{response_file_name}}",
+ "--filelist=" + rebase_path(rsp_file, root_build_dir),
"--generate_non_variant_code",
"-g",
"mojolpm",
@@ -1014,9 +1136,20 @@ template("mojom") {
proto_library(mojolpm_proto_target_name) {
testonly = true
generate_python = false
+
+ # Split the input into generated and non-generated source files. They
+ # need to be processed separately.
+ gen_dir_path_wildcard = get_path_info("//", "gen_dir") + "/*"
+ non_gen_sources =
+ filter_exclude(invoker.sources, [ gen_dir_path_wildcard ])
+ gen_sources = filter_include(invoker.sources, [ gen_dir_path_wildcard ])
sources = process_file_template(
- invoker.sources,
+ non_gen_sources,
[ "{{source_gen_dir}}/{{source_file_part}}.mojolpm.proto" ])
+ sources += process_file_template(
+ gen_sources,
+ [ "{{source_dir}}/{{source_file_part}}.mojolpm.proto" ])
+
import_dirs = [ "//" ]
proto_in_dir = "${root_gen_dir}"
proto_out_dir = "."
@@ -1055,7 +1188,7 @@ template("mojom") {
component_macro_suffix = ""
}
if ((!defined(invoker.disable_variants) || !invoker.disable_variants) &&
- !is_ios) {
+ use_blink) {
blink_variant = {
variant = "blink"
component_macro_suffix = "_BLINK"
@@ -1149,39 +1282,6 @@ template("mojom") {
"${bindings_configuration.component_macro_suffix}_IMPL" ]
}
- export_args = []
- export_args_overridden = false
- if (defined(bindings_configuration.for_blink) &&
- bindings_configuration.for_blink) {
- if (defined(invoker.export_class_attribute_blink)) {
- export_args_overridden = true
- export_args += [
- "--export_attribute",
- invoker.export_class_attribute_blink,
- "--export_header",
- invoker.export_header_blink,
- ]
- }
- } else if (defined(invoker.export_class_attribute)) {
- export_args_overridden = true
- export_args += [
- "--export_attribute",
- invoker.export_class_attribute,
- "--export_header",
- invoker.export_header,
- ]
- }
-
- if (!export_args_overridden && defined(invoker.component_macro_prefix)) {
- export_args += [
- "--export_attribute",
- "COMPONENT_EXPORT(${invoker.component_macro_prefix}" +
- "${bindings_configuration.component_macro_suffix})",
- "--export_header",
- "base/component_export.h",
- ]
- }
-
generate_java = false
if (!cpp_only && defined(invoker.generate_java)) {
generate_java = invoker.generate_java
@@ -1190,6 +1290,38 @@ template("mojom") {
type_mappings_path =
"$target_gen_dir/${target_name}${variant_suffix}__type_mappings"
if (sources_list != []) {
+ export_args = []
+ export_args_overridden = false
+ if (defined(bindings_configuration.for_blink) &&
+ bindings_configuration.for_blink) {
+ if (defined(invoker.export_class_attribute_blink)) {
+ export_args_overridden = true
+ export_args += [
+ "--export_attribute",
+ invoker.export_class_attribute_blink,
+ "--export_header",
+ invoker.export_header_blink,
+ ]
+ }
+ } else if (defined(invoker.export_class_attribute)) {
+ export_args_overridden = true
+ export_args += [
+ "--export_attribute",
+ invoker.export_class_attribute,
+ "--export_header",
+ invoker.export_header,
+ ]
+ }
+ if (!export_args_overridden && defined(invoker.component_macro_prefix)) {
+ export_args += [
+ "--export_attribute",
+ "COMPONENT_EXPORT(${invoker.component_macro_prefix}" +
+ "${bindings_configuration.component_macro_suffix})",
+ "--export_header",
+ "base/component_export.h",
+ ]
+ }
+
generator_cpp_output_suffixes = []
variant_dash_suffix = ""
if (defined(variant)) {
@@ -1198,7 +1330,6 @@ template("mojom") {
generator_cpp_output_suffixes += [
"${variant_dash_suffix}-forward.h",
"${variant_dash_suffix}-import-headers.h",
- "${variant_dash_suffix}-test-utils.cc",
"${variant_dash_suffix}-test-utils.h",
"${variant_dash_suffix}.cc",
"${variant_dash_suffix}.h",
@@ -1207,16 +1338,28 @@ template("mojom") {
generator_target_name = "${target_name}${variant_suffix}__generator"
# TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(generator_target_name) {
+ action(generator_target_name) {
+ allow_remote = true
visibility = [ ":*" ]
script = mojom_generator_script
inputs = mojom_generator_sources + jinja2_sources
- sources = sources_list
+ sources =
+ sources_list + [
+ "$root_gen_dir/mojo/public/tools/bindings/cpp_templates.zip",
+ type_mappings_path,
+ ]
+ if (generate_mojolpm_fuzzing &&
+ !defined(bindings_configuration.variant)) {
+ sources += [
+ "$root_gen_dir/mojo/public/tools/bindings/mojolpm_templates.zip",
+ ]
+ }
deps = [
":$parser_target_name",
":$type_mappings_target_name",
"//mojo/public/tools/bindings:precompile_templates",
]
+
if (defined(invoker.parser_deps)) {
deps += invoker.parser_deps
}
@@ -1224,18 +1367,22 @@ template("mojom") {
args = common_generator_args + export_args
filelist = []
foreach(source, sources_list) {
- filelist += [ rebase_path("$source", root_build_dir) ]
+ filelist += [ rebase_path(source, root_build_dir) ]
}
foreach(base_path, output_file_base_paths) {
+ filename = get_path_info(base_path, "file")
+ dirname = get_path_info(base_path, "dir")
+ inputs += [ "$root_gen_dir/$dirname/${filename}-module" ]
+
outputs += [
"$root_gen_dir/${base_path}${variant_dash_suffix}-forward.h",
"$root_gen_dir/${base_path}${variant_dash_suffix}-import-headers.h",
- "$root_gen_dir/${base_path}${variant_dash_suffix}-test-utils.cc",
"$root_gen_dir/${base_path}${variant_dash_suffix}-test-utils.h",
"$root_gen_dir/${base_path}${variant_dash_suffix}.cc",
"$root_gen_dir/${base_path}${variant_dash_suffix}.h",
]
- if (generate_fuzzing && !defined(bindings_configuration.variant)) {
+ if (generate_mojolpm_fuzzing &&
+ !defined(bindings_configuration.variant)) {
outputs += [
"$root_gen_dir/${base_path}${variant_dash_suffix}-mojolpm.cc",
"$root_gen_dir/${base_path}${variant_dash_suffix}-mojolpm.h",
@@ -1243,14 +1390,17 @@ template("mojom") {
}
}
- response_file_contents = filelist
-
+ # Workaround for https://github.com/ninja-build/ninja/issues/1966.
+ rsp_file = "$target_gen_dir/${target_name}.rsp"
+ write_file(rsp_file, filelist)
+ inputs += [ rsp_file ]
args += [
- "--filelist={{response_file_name}}",
+ "--filelist=" + rebase_path("$rsp_file", root_build_dir),
"-g",
]
- if (generate_fuzzing && !defined(bindings_configuration.variant)) {
+ if (generate_mojolpm_fuzzing &&
+ !defined(bindings_configuration.variant)) {
args += [ "c++,mojolpm" ]
} else {
args += [ "c++" ]
@@ -1294,6 +1444,8 @@ template("mojom") {
"--extra_cpp_template_paths",
rebase_path(extra_cpp_template, root_build_dir),
]
+ inputs += [ extra_cpp_template ]
+
assert(
get_path_info(extra_cpp_template, "extension") == "tmpl",
"--extra_cpp_template_paths only accepts template files ending in extension .tmpl")
@@ -1306,62 +1458,6 @@ template("mojom") {
}
}
- if (generate_fuzzing && !defined(variant)) {
- # This block contains the C++ targets for the MojoLPM fuzzer, we need to
- # do this here so that we can use the typemap configuration for the
- # empty-variant Mojo target.
-
- mojolpm_target_name = "${target_name}_mojolpm"
- mojolpm_generator_target_name = "${target_name}__generator"
- source_set(mojolpm_target_name) {
- # There are still a few missing header dependencies between mojo targets
- # with typemaps and the dependencies of their typemap headers. It would
- # be good to enable include checking for these in the future though.
- check_includes = false
- testonly = true
- if (defined(invoker.sources)) {
- sources = process_file_template(
- invoker.sources,
- [
- "{{source_gen_dir}}/{{source_file_part}}-mojolpm.cc",
- "{{source_gen_dir}}/{{source_file_part}}-mojolpm.h",
- ])
- deps = []
- } else {
- sources = []
- deps = []
- }
-
- public_deps = [
- ":$generator_shared_target_name",
-
- # NB: hardcoded dependency on the no-variant variant generator, since
- # mojolpm only uses the no-variant type.
- ":$mojolpm_generator_target_name",
- ":$mojolpm_proto_target_name",
- "//base",
- "//mojo/public/tools/fuzzers:mojolpm",
- ]
-
- foreach(d, all_deps) {
- # Resolve the name, so that a target //mojo/something becomes
- # //mojo/something:something and we can append variant_suffix to
- # get the cpp dependency name.
- full_name = get_label_info("$d", "label_no_toolchain")
- public_deps += [ "${full_name}_mojolpm" ]
- }
-
- foreach(config, cpp_typemap_configs) {
- if (defined(config.traits_deps)) {
- deps += config.traits_deps
- }
- if (defined(config.traits_public_deps)) {
- public_deps += config.traits_public_deps
- }
- }
- }
- }
-
# Write the typemapping configuration for this target out to a file to be
# validated by a Python script. This helps catch mistakes that can't
# be caught by logic in GN.
@@ -1389,20 +1485,20 @@ template("mojom") {
write_file(_typemap_config_filename, _rebased_typemap_configs, "json")
_mojom_target_name = target_name
- # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(_typemap_validator_target_name) {
+ action(_typemap_validator_target_name) {
+ allow_remote = true
script = "$mojom_generator_root/validate_typemap_config.py"
inputs = [ _typemap_config_filename ]
outputs = [ _typemap_stamp_filename ]
args = [
get_label_info(_mojom_target_name, "label_no_toolchain"),
- rebase_path(_typemap_config_filename),
- rebase_path(_typemap_stamp_filename),
+ rebase_path(_typemap_config_filename, root_build_dir),
+ rebase_path(_typemap_stamp_filename, root_build_dir),
]
}
- # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(type_mappings_target_name) {
+ action(type_mappings_target_name) {
+ allow_remote = true
inputs =
mojom_generator_sources + jinja2_sources + [ _typemap_stamp_filename ]
outputs = [ type_mappings_path ]
@@ -1413,6 +1509,7 @@ template("mojom") {
rebase_path(type_mappings_path, root_build_dir),
]
+ sources = []
foreach(d, all_deps) {
name = get_label_info(d, "label_no_toolchain")
toolchain = get_label_info(d, "toolchain")
@@ -1422,12 +1519,11 @@ template("mojom") {
dependency_output_dir =
get_label_info(dependency_output, "target_gen_dir")
dependency_name = get_label_info(dependency_output, "name")
- dependency_path =
- rebase_path("$dependency_output_dir/${dependency_name}",
- root_build_dir)
+ dependency_path = "$dependency_output_dir/${dependency_name}"
+ sources += [ dependency_path ]
args += [
"--dependency",
- dependency_path,
+ rebase_path(dependency_path, root_build_dir),
]
}
@@ -1485,11 +1581,15 @@ template("mojom") {
if (defined(output_name_override)) {
output_name = output_name_override
}
- visibility = output_visibility + [ ":$output_target_name" ]
+ visibility = output_visibility + [
+ ":$output_target_name",
+ ":${target_name}_mojolpm",
+ ]
if (defined(invoker.testonly)) {
testonly = invoker.testonly
}
defines = export_defines
+ configs += [ "//build/config/compiler:wexit_time_destructors" ]
configs += extra_configs
if (output_file_base_paths != []) {
sources = []
@@ -1578,13 +1678,81 @@ template("mojom") {
}
}
+ if (generate_mojolpm_fuzzing && !defined(variant)) {
+ # This block contains the C++ targets for the MojoLPM fuzzer, we need to
+ # do this here so that we can use the typemap configuration for the
+ # empty-variant Mojo target.
+
+ mojolpm_target_name = "${target_name}_mojolpm"
+ mojolpm_generator_target_name = "${target_name}__generator"
+ source_set(mojolpm_target_name) {
+ # There are still a few missing header dependencies between mojo targets
+ # with typemaps and the dependencies of their typemap headers. It would
+ # be good to enable include checking for these in the future though.
+ check_includes = false
+ testonly = true
+ if (defined(invoker.sources)) {
+ # Split the input into generated and non-generated source files. They
+ # need to be processed separately.
+ gen_dir_path_wildcard = get_path_info("//", "gen_dir") + "/*"
+ non_gen_sources =
+ filter_exclude(invoker.sources, [ gen_dir_path_wildcard ])
+ gen_sources =
+ filter_include(invoker.sources, [ gen_dir_path_wildcard ])
+ sources = process_file_template(
+ non_gen_sources,
+ [
+ "{{source_gen_dir}}/{{source_file_part}}-mojolpm.cc",
+ "{{source_gen_dir}}/{{source_file_part}}-mojolpm.h",
+ ])
+ sources += process_file_template(
+ gen_sources,
+ [
+ "{{source_dir}}/{{source_file_part}}-mojolpm.cc",
+ "{{source_dir}}/{{source_file_part}}-mojolpm.h",
+ ])
+ deps = [ ":$output_target_name" ]
+ } else {
+ sources = []
+ deps = []
+ }
+
+ public_deps = [
+ ":$generator_shared_target_name",
+
+ # NB: hardcoded dependency on the no-variant variant generator, since
+ # mojolpm only uses the no-variant type.
+ ":$mojolpm_generator_target_name",
+ ":$mojolpm_proto_target_name",
+ "//base",
+ "//mojo/public/tools/fuzzers:mojolpm",
+ ]
+
+ foreach(d, all_deps) {
+ # Resolve the name, so that a target //mojo/something becomes
+ # //mojo/something:something and we can append variant_suffix to
+ # get the cpp dependency name.
+ full_name = get_label_info("$d", "label_no_toolchain")
+ public_deps += [ "${full_name}_mojolpm" ]
+ }
+
+ foreach(config, cpp_typemap_configs) {
+ if (defined(config.traits_deps)) {
+ deps += config.traits_deps
+ }
+ if (defined(config.traits_public_deps)) {
+ public_deps += config.traits_public_deps
+ }
+ }
+ }
+ }
+
if (generate_java && is_android) {
import("//build/config/android/rules.gni")
java_generator_target_name = target_name + "_java__generator"
if (sources_list != []) {
- # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(java_generator_target_name) {
+ action(java_generator_target_name) {
script = mojom_generator_script
inputs = mojom_generator_sources + jinja2_sources
sources = sources_list
@@ -1597,7 +1765,7 @@ template("mojom") {
args = common_generator_args
filelist = []
foreach(source, sources_list) {
- filelist += [ rebase_path("$source", root_build_dir) ]
+ filelist += [ rebase_path(source, root_build_dir) ]
}
foreach(base_path, output_file_base_paths) {
outputs += [ "$root_gen_dir/$base_path.srcjar" ]
@@ -1624,8 +1792,7 @@ template("mojom") {
java_srcjar_target_name = target_name + "_java_sources"
- # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(java_srcjar_target_name) {
+ action(java_srcjar_target_name) {
script = "//build/android/gyp/zip.py"
inputs = []
if (output_file_base_paths != []) {
@@ -1651,7 +1818,6 @@ template("mojom") {
android_library(java_target_name) {
forward_variables_from(invoker, [ "enable_bytecode_checks" ])
deps = [
- "//base:base_java",
"//mojo/public/java:bindings_java",
"//mojo/public/java:system_java",
"//third_party/androidx:androidx_annotation_annotation_java",
@@ -1673,21 +1839,36 @@ template("mojom") {
}
}
- use_typescript_for_target =
- enable_typescript_bindings && defined(invoker.use_typescript_sources) &&
- invoker.use_typescript_sources
+ if (defined(invoker.generate_webui_js_bindings)) {
+ assert(is_chromeos_ash,
+ "generate_webui_js_bindings can only be used on ChromeOS Ash")
+ assert(invoker.generate_webui_js_bindings,
+ "generate_webui_js_bindings should be set to true or removed")
+ }
+
+ use_typescript_for_target = defined(invoker.webui_module_path) &&
+ !defined(invoker.generate_webui_js_bindings)
- if (!use_typescript_for_target && defined(invoker.use_typescript_sources)) {
- not_needed(invoker, [ "use_typescript_sources" ])
+ generate_legacy_js = !use_typescript_for_target ||
+ (defined(invoker.generate_legacy_js_bindings) &&
+ invoker.generate_legacy_js_bindings)
+
+ if (!use_typescript_for_target &&
+ defined(invoker.generate_legacy_js_bindings)) {
+ not_needed(invoker, [ "generate_legacy_js_bindings" ])
}
- if ((generate_fuzzing || !defined(invoker.cpp_only) || !invoker.cpp_only) &&
- !use_typescript_for_target) {
+ # Targets needed by both TS and JS bindings targets. These are needed
+ # unconditionally for JS bindings targets, and are needed for TS bindings
+ # targets when generate_legacy_js_bindings is true. This option is provided
+ # since the legacy bindings are needed by Blink tests and non-Chromium users,
+ # which are not expected to migrate to modules or TypeScript.
+ if (generate_legacy_js && (generate_js_fuzzing ||
+ !defined(invoker.cpp_only) || !invoker.cpp_only)) {
if (sources_list != []) {
generator_js_target_name = "${target_name}_js__generator"
- # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(generator_js_target_name) {
+ action(generator_js_target_name) {
script = mojom_generator_script
inputs = mojom_generator_sources + jinja2_sources
sources = sources_list
@@ -1702,19 +1883,18 @@ template("mojom") {
args = common_generator_args
filelist = []
foreach(source, sources_list) {
- filelist += [ rebase_path("$source", root_build_dir) ]
+ filelist += [ rebase_path(source, root_build_dir) ]
}
foreach(base_path, output_file_base_paths) {
outputs += [
"$root_gen_dir/$base_path.js",
- "$root_gen_dir/$base_path.externs.js",
"$root_gen_dir/$base_path.m.js",
"$root_gen_dir/$base_path-lite.js",
- "$root_gen_dir/$base_path.html",
"$root_gen_dir/$base_path-lite-for-compile.js",
]
- if (defined(invoker.webui_module_path)) {
+ if (defined(invoker.webui_module_path) &&
+ !use_typescript_for_target) {
outputs += [ "$root_gen_dir/mojom-webui/$base_path-webui.js" ]
}
}
@@ -1725,7 +1905,6 @@ template("mojom") {
"--filelist={{response_file_name}}",
"-g",
"javascript",
- "--js_bindings_mode=new",
]
if (defined(invoker.js_generate_struct_deserializers) &&
@@ -1739,7 +1918,7 @@ template("mojom") {
args += message_scrambling_args
}
- if (generate_fuzzing) {
+ if (generate_js_fuzzing) {
args += [ "--generate_fuzzing" ]
}
}
@@ -1783,31 +1962,13 @@ template("mojom") {
data_deps += [ "${full_name}_js_data_deps" ]
}
}
+ }
- js_library_target_name = "${target_name}_js_library"
- if (sources_list != []) {
- js_library(js_library_target_name) {
- extra_public_deps = [ ":$generator_js_target_name" ]
- sources = []
- foreach(base_path, output_file_base_paths) {
- sources += [ "$root_gen_dir/${base_path}-lite.js" ]
- }
- externs_list = [
- "${externs_path}/mojo_core.js",
- "${externs_path}/pending.js",
- ]
-
- deps = []
- foreach(d, all_deps) {
- full_name = get_label_info(d, "label_no_toolchain")
- deps += [ "${full_name}_js_library" ]
- }
- }
- } else {
- group(js_library_target_name) {
- }
- }
-
+ # js_library() closure compiler targets, primarily used on ChromeOS. Only
+ # generate these targets if the mojom target is not C++ only and is not using
+ # TypeScript.
+ if (generate_mojom_closure_libraries &&
+ (!defined(invoker.cpp_only) || !invoker.cpp_only) && generate_legacy_js) {
js_library_for_compile_target_name = "${target_name}_js_library_for_compile"
if (sources_list != []) {
js_library(js_library_for_compile_target_name) {
@@ -1834,35 +1995,9 @@ template("mojom") {
}
}
- js_modules_target_name = "${target_name}_js_modules"
- if (sources_list != []) {
- js_library(js_modules_target_name) {
- extra_public_deps = [ ":$generator_js_target_name" ]
- sources = []
- foreach(base_path, output_file_base_paths) {
- sources += [ "$root_gen_dir/${base_path}.m.js" ]
- }
- externs_list = [
- "${externs_path}/mojo_core.js",
- "${externs_path}/pending.js",
- ]
- if (defined(invoker.disallow_native_types) &&
- invoker.disallow_native_types) {
- deps = []
- } else {
- deps = [ "//mojo/public/js:bindings_uncompiled" ]
- }
- foreach(d, all_deps) {
- full_name = get_label_info(d, "label_no_toolchain")
- deps += [ "${full_name}_js_modules" ]
- }
- }
- } else {
- group(js_modules_target_name) {
- }
- }
-
- if (defined(invoker.webui_module_path)) {
+ # WebUI specific closure targets, not needed by targets that are generating
+ # TypeScript WebUI bindings or by legacy-only targets.
+ if (defined(invoker.webui_module_path) && !use_typescript_for_target) {
webui_js_target_name = "${target_name}_webui_js"
if (sources_list != []) {
js_library(webui_js_target_name) {
@@ -1890,46 +2025,38 @@ template("mojom") {
group(webui_js_target_name) {
}
}
- }
- }
- if ((generate_fuzzing || !defined(invoker.cpp_only) || !invoker.cpp_only) &&
- use_typescript_for_target) {
- generator_js_target_names = []
- source_filelist = []
- foreach(source, sources_list) {
- source_filelist += [ rebase_path("$source", root_build_dir) ]
- }
- dependency_types = [
- {
- name = "regular"
- ts_extension = ".ts"
- js_extension = ".js"
- },
- {
- name = "es_modules"
- ts_extension = ".m.ts"
- js_extension = ".m.js"
- },
- ]
+ webui_grdp_target_name = "${target_name}_webui_grdp"
+ out_grd = "$target_gen_dir/${target_name}_webui_resources.grdp"
+ grd_prefix = "${target_name}_webui"
+ generate_grd(webui_grdp_target_name) {
+ grd_prefix = grd_prefix
+ out_grd = out_grd
- foreach(dependency_type, dependency_types) {
- ts_outputs = []
- js_outputs = []
+ deps = [ ":$webui_js_target_name" ]
- foreach(base_path, output_file_base_paths) {
- ts_outputs +=
- [ "$root_gen_dir/$base_path-lite${dependency_type.ts_extension}" ]
- js_outputs +=
- [ "$root_gen_dir/$base_path-lite${dependency_type.js_extension}" ]
+ input_files = []
+ foreach(base_path, output_file_base_paths) {
+ input_files += [ "${base_path}-webui.js" ]
+ }
+
+ input_files_base_dir =
+ rebase_path("$root_gen_dir/mojom-webui", "$root_build_dir")
+ }
+ }
+ }
+ if ((generate_js_fuzzing || !defined(invoker.cpp_only) ||
+ !invoker.cpp_only) && use_typescript_for_target) {
+ if (sources_list != []) {
+ source_filelist = []
+ foreach(source, sources_list) {
+ source_filelist += [ rebase_path(source, root_build_dir) ]
}
# Generate Typescript bindings.
- generator_ts_target_name =
- "${target_name}_${dependency_type.name}__ts__generator"
+ generator_ts_target_name = "${target_name}_ts__generator"
- # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(generator_ts_target_name) {
+ action(generator_ts_target_name) {
script = mojom_generator_script
inputs = mojom_generator_sources + jinja2_sources
sources = sources_list
@@ -1938,7 +2065,10 @@ template("mojom") {
"//mojo/public/tools/bindings:precompile_templates",
]
- outputs = ts_outputs
+ outputs = []
+ foreach(base_path, output_file_base_paths) {
+ outputs += [ "$root_gen_dir/$base_path-webui.ts" ]
+ }
args = common_generator_args
response_file_contents = source_filelist
@@ -1948,97 +2078,20 @@ template("mojom") {
"typescript",
]
- if (dependency_type.name == "es_modules") {
- args += [ "--ts_use_es_modules" ]
- }
-
- # TODO(crbug.com/1007587): Support scramble_message_ids.
- # TODO(crbug.com/1007591): Support generate_fuzzing.
- }
-
- # Create tsconfig.json for the generated Typescript.
- tsconfig_filename =
- "$target_gen_dir/$target_name-${dependency_type.name}-tsconfig.json"
- tsconfig = {
- }
- tsconfig.compilerOptions = {
- composite = true
- target = "es6"
- module = "es6"
- lib = [
- "es6",
- "esnext.bigint",
- ]
- strict = true
- }
- tsconfig.files = []
- foreach(base_path, output_file_base_paths) {
- tsconfig.files += [ rebase_path(
- "$root_gen_dir/$base_path-lite${dependency_type.ts_extension}",
- target_gen_dir,
- root_gen_dir) ]
- }
- tsconfig.references = []
-
- # Get tsconfigs for deps.
- foreach(d, all_deps) {
- dep_target_gen_dir = rebase_path(get_label_info(d, "target_gen_dir"))
- dep_name = get_label_info(d, "name")
- reference = {
- }
- reference.path = "$dep_target_gen_dir/$dep_name-${dependency_type.name}-tsconfig.json"
- tsconfig.references += [ reference ]
- }
- write_file(tsconfig_filename, tsconfig, "json")
-
- # Compile previously generated Typescript to Javascript.
- generator_js_target_name =
- "${target_name}_${dependency_type.name}__js__generator"
- generator_js_target_names += [ generator_js_target_name ]
-
- # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(generator_js_target_name) {
- script = "$mojom_generator_root/compile_typescript.py"
- sources = ts_outputs
- outputs = js_outputs
- public_deps = [ ":$generator_ts_target_name" ]
- foreach(d, all_deps) {
- full_name = get_label_info(d, "label_no_toolchain")
- public_deps +=
- [ "${full_name}_${dependency_type.name}__js__generator" ]
+ if (!defined(invoker.scramble_message_ids) ||
+ invoker.scramble_message_ids) {
+ inputs += message_scrambling_inputs
+ args += message_scrambling_args
}
- absolute_tsconfig_path =
- rebase_path(tsconfig_filename, "", target_gen_dir)
- args = [ "--tsconfig_path=$absolute_tsconfig_path" ]
- }
- }
-
- js_target_name = target_name + "_js"
- group(js_target_name) {
- public_deps = []
- if (sources_list != []) {
- foreach(generator_js_target_name, generator_js_target_names) {
- public_deps += [ ":$generator_js_target_name" ]
+ if (defined(invoker.js_generate_struct_deserializers) &&
+ invoker.js_generate_struct_deserializers) {
+ args += [ "--js_generate_struct_deserializers" ]
}
- }
- foreach(d, all_deps) {
- full_name = get_label_info(d, "label_no_toolchain")
- public_deps += [ "${full_name}_js" ]
- }
- }
-
- group(js_data_deps_target_name) {
- data = js_outputs
- deps = []
- foreach(generator_js_target_name, generator_js_target_names) {
- deps += [ ":$generator_js_target_name" ]
- }
- data_deps = []
- foreach(d, all_deps) {
- full_name = get_label_info(d, "label_no_toolchain")
- data_deps += [ "${full_name}_js_data_deps" ]
+ # TODO(crbug.com/1007587): Support scramble_message_ids if above is
+ # insufficient.
+ # TODO(crbug.com/1007591): Support generate_fuzzing.
}
}
}
diff --git a/utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py b/utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py
index da9efc71..8c641c2a 100755
--- a/utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py
+++ b/utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-# Copyright 2013 The Chromium Authors. All rights reserved.
+# Copyright 2013 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -57,10 +57,17 @@ _BUILTIN_GENERATORS = {
"typescript": "mojom_ts_generator",
}
+_BUILTIN_CHECKS = {
+ "attributes": "mojom_attributes_check",
+ "definitions": "mojom_definitions_check",
+ "features": "mojom_interface_feature_check",
+ "restrictions": "mojom_restrictions_check",
+}
+
def LoadGenerators(generators_string):
if not generators_string:
- return [] # No generators.
+ return {} # No generators.
generators = {}
for generator_name in [s.strip() for s in generators_string.split(",")]:
@@ -74,6 +81,21 @@ def LoadGenerators(generators_string):
return generators
+def LoadChecks(checks_string):
+ if not checks_string:
+ return {} # No checks.
+
+ checks = {}
+ for check_name in [s.strip() for s in checks_string.split(",")]:
+ check = check_name.lower()
+ if check not in _BUILTIN_CHECKS:
+ print("Unknown check name %s" % check_name)
+ sys.exit(1)
+ check_module = importlib.import_module("checks.%s" % _BUILTIN_CHECKS[check])
+ checks[check] = check_module
+ return checks
+
+
def MakeImportStackMessage(imported_filename_stack):
"""Make a (human-readable) message listing a chain of imports. (Returned
string begins with a newline (if nonempty) and does not end with one.)"""
@@ -82,7 +104,7 @@ def MakeImportStackMessage(imported_filename_stack):
zip(imported_filename_stack[1:], imported_filename_stack)]))
-class RelativePath(object):
+class RelativePath:
"""Represents a path relative to the source tree or generated output dir."""
def __init__(self, path, source_root, output_dir):
@@ -142,7 +164,7 @@ def ReadFileContents(filename):
return f.read()
-class MojomProcessor(object):
+class MojomProcessor:
"""Takes parsed mojom modules and generates language bindings from them.
Attributes:
@@ -169,8 +191,8 @@ class MojomProcessor(object):
if 'c++' in self._typemap:
self._typemap['mojolpm'] = self._typemap['c++']
- def _GenerateModule(self, args, remaining_args, generator_modules,
- rel_filename, imported_filename_stack):
+ def _GenerateModule(self, args, remaining_args, check_modules,
+ generator_modules, rel_filename, imported_filename_stack):
# Return the already-generated module.
if rel_filename.path in self._processed_files:
return self._processed_files[rel_filename.path]
@@ -190,12 +212,16 @@ class MojomProcessor(object):
ScrambleMethodOrdinals(module.interfaces, salt)
if self._should_generate(rel_filename.path):
+ # Run checks on module first.
+ for check_module in check_modules.values():
+ checker = check_module.Check(module)
+ checker.CheckModule()
+ # Then run generation.
for language, generator_module in generator_modules.items():
generator = generator_module.Generator(
module, args.output_dir, typemap=self._typemap.get(language, {}),
variant=args.variant, bytecode_path=args.bytecode_path,
for_blink=args.for_blink,
- js_bindings_mode=args.js_bindings_mode,
js_generate_struct_deserializers=\
args.js_generate_struct_deserializers,
export_attribute=args.export_attribute,
@@ -234,6 +260,7 @@ def _Generate(args, remaining_args):
args.import_directories[idx] = RelativePath(tokens[0], args.depth,
args.output_dir)
generator_modules = LoadGenerators(args.generators_string)
+ check_modules = LoadChecks(args.checks_string)
fileutil.EnsureDirectoryExists(args.output_dir)
@@ -246,7 +273,7 @@ def _Generate(args, remaining_args):
for filename in args.filename:
processor._GenerateModule(
- args, remaining_args, generator_modules,
+ args, remaining_args, check_modules, generator_modules,
RelativePath(filename, args.depth, args.output_dir), [])
return 0
@@ -286,6 +313,12 @@ def main():
metavar="GENERATORS",
default="c++,javascript,java,mojolpm",
help="comma-separated list of generators")
+ generate_parser.add_argument("-c",
+ "--checks",
+ dest="checks_string",
+ metavar="CHECKS",
+ default=",".join(_BUILTIN_CHECKS.keys()),
+ help="comma-separated list of checks")
generate_parser.add_argument(
"--gen_dir", dest="gen_directories", action="append", metavar="directory",
default=[], help="add a directory to be searched for the syntax trees.")
@@ -309,11 +342,6 @@ def main():
help="Use WTF types as generated types for mojo "
"string/array/map.")
generate_parser.add_argument(
- "--js_bindings_mode", choices=["new", "old"], default="old",
- help="This option only affects the JavaScript bindings. The value could "
- "be \"new\" to generate new-style lite JS bindings in addition to the "
- "old, or \"old\" to only generate old bindings.")
- generate_parser.add_argument(
"--js_generate_struct_deserializers", action="store_true",
help="Generate javascript deserialize methods for structs in "
"mojom-lite.js file")
@@ -387,4 +415,10 @@ def main():
if __name__ == "__main__":
with crbug_1001171.DumpStateOnLookupError():
- sys.exit(main())
+ ret = main()
+ # Exit without running GC, which can save multiple seconds due to the large
+ # number of object created. But flush is necessary as os._exit doesn't do
+ # that.
+ sys.stdout.flush()
+ sys.stderr.flush()
+ os._exit(ret)
diff --git a/utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator_unittest.py b/utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator_unittest.py
index bddbe3f4..761922b6 100644
--- a/utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator_unittest.py
+++ b/utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator_unittest.py
@@ -1,4 +1,4 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -8,13 +8,13 @@ from mojom_bindings_generator import MakeImportStackMessage
from mojom_bindings_generator import ScrambleMethodOrdinals
-class FakeIface(object):
+class FakeIface:
def __init__(self):
self.mojom_name = None
self.methods = None
-class FakeMethod(object):
+class FakeMethod:
def __init__(self, explicit_ordinal=None):
self.explicit_ordinal = explicit_ordinal
self.ordinal = explicit_ordinal
diff --git a/utils/ipc/mojo/public/tools/bindings/mojom_types_downgrader.py b/utils/ipc/mojo/public/tools/bindings/mojom_types_downgrader.py
deleted file mode 100755
index 15f0e3ba..00000000
--- a/utils/ipc/mojo/public/tools/bindings/mojom_types_downgrader.py
+++ /dev/null
@@ -1,119 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2020 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Downgrades *.mojom files to the old mojo types for remotes and receivers."""
-
-import argparse
-import fnmatch
-import os
-import re
-import shutil
-import sys
-import tempfile
-
-# List of patterns and replacements to match and use against the contents of a
-# mojo file. Each replacement string will be used with Python string's format()
-# function, so the '{}' substring is used to mark where the mojo type should go.
-_MOJO_REPLACEMENTS = {
- r'pending_remote': r'{}',
- r'pending_receiver': r'{}&',
- r'pending_associated_remote': r'associated {}',
- r'pending_associated_receiver': r'associated {}&',
-}
-
-# Pre-compiled regular expression that matches against any of the replacements.
-_REGEXP_PATTERN = re.compile(
- r'|'.join(
- ['{}\s*<\s*(.*?)\s*>'.format(k) for k in _MOJO_REPLACEMENTS.keys()]),
- flags=re.DOTALL)
-
-
-def ReplaceFunction(match_object):
- """Returns the right replacement for the string matched against the regexp."""
- for index, (match, repl) in enumerate(_MOJO_REPLACEMENTS.items(), 1):
- if match_object.group(0).startswith(match):
- return repl.format(match_object.group(index))
-
-
-def DowngradeFile(path, output_dir=None):
- """Downgrades the mojom file specified by |path| to the old mojo types.
-
- Optionally pass |output_dir| to place the result under a separate output
- directory, preserving the relative path to the file included in |path|.
- """
- # Use a temporary file to dump the new contents after replacing the patterns.
- with open(path) as src_mojo_file:
- with tempfile.NamedTemporaryFile(mode='w', delete=False) as tmp_mojo_file:
- tmp_contents = _REGEXP_PATTERN.sub(ReplaceFunction, src_mojo_file.read())
- tmp_mojo_file.write(tmp_contents)
-
- # Files should be placed in the desired output directory
- if output_dir:
- output_filepath = os.path.join(output_dir, os.path.basename(path))
- if not os.path.exists(output_dir):
- os.makedirs(output_dir)
- else:
- output_filepath = path
-
- # Write the new contents preserving the original file's attributes.
- shutil.copystat(path, tmp_mojo_file.name)
- shutil.move(tmp_mojo_file.name, output_filepath)
-
- # Make sure to "touch" the new file so that access, modify and change times
- # are always newer than the source file's, otherwise Modify time will be kept
- # as per the call to shutil.copystat(), causing unnecessary generations of the
- # output file in subsequent builds due to ninja considering it dirty.
- os.utime(output_filepath, None)
-
-
-def DowngradeDirectory(path, output_dir=None):
- """Downgrades mojom files inside directory |path| to the old mojo types.
-
- Optionally pass |output_dir| to place the result under a separate output
- directory, preserving the relative path to the file included in |path|.
- """
- # We don't have recursive glob.glob() nor pathlib.Path.rglob() in Python 2.7
- mojom_filepaths = []
- for dir_path, _, filenames in os.walk(path):
- for filename in fnmatch.filter(filenames, "*mojom"):
- mojom_filepaths.append(os.path.join(dir_path, filename))
-
- for path in mojom_filepaths:
- absolute_dirpath = os.path.dirname(os.path.abspath(path))
- if output_dir:
- dest_dirpath = output_dir + absolute_dirpath
- else:
- dest_dirpath = absolute_dirpath
- DowngradeFile(path, dest_dirpath)
-
-
-def DowngradePath(src_path, output_dir=None):
- """Downgrades the mojom files pointed by |src_path| to the old mojo types.
-
- Optionally pass |output_dir| to place the result under a separate output
- directory, preserving the relative path to the file included in |path|.
- """
- if os.path.isdir(src_path):
- DowngradeDirectory(src_path, output_dir)
- elif os.path.isfile(src_path):
- DowngradeFile(src_path, output_dir)
- else:
- print(">>> {} not pointing to a valid file or directory".format(src_path))
- sys.exit(1)
-
-
-def main():
- parser = argparse.ArgumentParser(
- description="Downgrade *.mojom files to use the old mojo types.")
- parser.add_argument(
- "srcpath", help="path to the file or directory to apply the conversion")
- parser.add_argument(
- "--outdir", help="the directory to place the converted file(s) under")
- args = parser.parse_args()
-
- DowngradePath(args.srcpath, args.outdir)
-
-
-if __name__ == "__main__":
- sys.exit(main())
diff --git a/utils/ipc/mojo/public/tools/bindings/validate_typemap_config.py b/utils/ipc/mojo/public/tools/bindings/validate_typemap_config.py
index f1783d59..6bb7a209 100755
--- a/utils/ipc/mojo/public/tools/bindings/validate_typemap_config.py
+++ b/utils/ipc/mojo/public/tools/bindings/validate_typemap_config.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-# Copyright 2020 The Chromium Authors. All rights reserved.
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -17,7 +17,8 @@ def CheckCppTypemapConfigs(target_name, config_filename, out_filename):
])
_SUPPORTED_TYPE_KEYS = set([
'mojom', 'cpp', 'copyable_pass_by_value', 'force_serialize', 'hashable',
- 'move_only', 'nullable_is_same_type'
+ 'move_only', 'nullable_is_same_type', 'forward_declaration',
+ 'default_constructible'
])
with open(config_filename, 'r') as f:
for config in json.load(f):
diff --git a/utils/ipc/mojo/public/tools/mojom/BUILD.gn b/utils/ipc/mojo/public/tools/mojom/BUILD.gn
new file mode 100644
index 00000000..eafb95a1
--- /dev/null
+++ b/utils/ipc/mojo/public/tools/mojom/BUILD.gn
@@ -0,0 +1,18 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+group("tests") {
+ data = [
+ "check_stable_mojom_compatibility_unittest.py",
+ "check_stable_mojom_compatibility.py",
+ "const_unittest.py",
+ "enum_unittest.py",
+ "feature_unittest.py",
+ "mojom_parser_test_case.py",
+ "mojom_parser_unittest.py",
+ "mojom_parser.py",
+ "stable_attribute_unittest.py",
+ "version_compatibility_unittest.py",
+ ]
+}
diff --git a/utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py b/utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py
index 08bd672f..35cd1cfd 100755
--- a/utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py
+++ b/utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py
@@ -1,5 +1,5 @@
-#!/usr/bin/env python
-# Copyright 2020 The Chromium Authors. All rights reserved.
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Verifies backward-compatibility of mojom type changes.
@@ -12,20 +12,18 @@ This can be used e.g. by a presubmit check to prevent developers from making
breaking changes to stable mojoms."""
import argparse
-import errno
import io
import json
import os
import os.path
-import shutil
-import six
import sys
-import tempfile
from mojom.generate import module
from mojom.generate import translate
from mojom.parse import parser
+# pylint: disable=raise-missing-from
+
class ParseError(Exception):
pass
@@ -41,6 +39,8 @@ def _ValidateDelta(root, delta):
transitive closure of a mojom's input dependencies all at once.
"""
+ translate.is_running_backwards_compatibility_check_hack = True
+
# First build a map of all files covered by the delta
affected_files = set()
old_files = {}
@@ -73,11 +73,35 @@ def _ValidateDelta(root, delta):
try:
ast = parser.Parse(contents, mojom)
except Exception as e:
- six.reraise(
- ParseError,
- 'encountered exception {0} while parsing {1}'.format(e, mojom),
- sys.exc_info()[2])
+ raise ParseError('encountered exception {0} while parsing {1}'.format(
+ e, mojom))
+
+ # Files which are generated at compile time can't be checked by this script
+ # (at the moment) since they may not exist in the output directory.
+ generated_files_to_skip = {
+ ('third_party/blink/public/mojom/runtime_feature_state/'
+ 'runtime_feature.mojom'),
+ ('third_party/blink/public/mojom/origin_trial_feature/'
+ 'origin_trial_feature.mojom'),
+ }
+
+ ast.import_list.items = [
+ x for x in ast.import_list.items
+ if x.import_filename not in generated_files_to_skip
+ ]
+
for imp in ast.import_list:
+ if (not file_overrides.get(imp.import_filename)
+ and not os.path.exists(os.path.join(root, imp.import_filename))):
+ # Speculatively construct a path prefix to locate the import_filename
+ mojom_path = os.path.dirname(os.path.normpath(mojom)).split(os.sep)
+ test_prefix = ''
+ for path_component in mojom_path:
+ test_prefix = os.path.join(test_prefix, path_component)
+ test_import_filename = os.path.join(test_prefix, imp.import_filename)
+ if os.path.exists(os.path.join(root, test_import_filename)):
+ imp.import_filename = test_import_filename
+ break
parseMojom(imp.import_filename, file_overrides, override_modules)
# Now that the transitive set of dependencies has been imported and parsed
@@ -89,10 +113,10 @@ def _ValidateDelta(root, delta):
modules[mojom] = translate.OrderedModule(ast, mojom, all_modules)
old_modules = {}
- for mojom in old_files.keys():
+ for mojom in old_files:
parseMojom(mojom, old_files, old_modules)
new_modules = {}
- for mojom in new_files.keys():
+ for mojom in new_files:
parseMojom(mojom, new_files, new_modules)
# At this point we have a complete set of translated Modules from both the
@@ -132,12 +156,21 @@ def _ValidateDelta(root, delta):
'can be deleted by a subsequent change.' % qualified_name)
checker = module.BackwardCompatibilityChecker()
- if not checker.IsBackwardCompatible(new_types[new_name], kind):
- raise Exception('Stable type %s appears to have changed in a way which '
- 'breaks backward-compatibility. Please fix!\n\nIf you '
- 'believe this assessment to be incorrect, please file a '
- 'Chromium bug against the "Internals>Mojo>Bindings" '
- 'component.' % qualified_name)
+ try:
+ if not checker.IsBackwardCompatible(new_types[new_name], kind):
+ raise Exception(
+ 'Stable type %s appears to have changed in a way which '
+ 'breaks backward-compatibility. Please fix!\n\nIf you '
+ 'believe this assessment to be incorrect, please file a '
+ 'Chromium bug against the "Internals>Mojo>Bindings" '
+ 'component.' % qualified_name)
+ except Exception as e:
+ raise Exception(
+ 'Stable type %s appears to have changed in a way which '
+ 'breaks backward-compatibility: \n\n%s.\nPlease fix!\n\nIf you '
+ 'believe this assessment to be incorrect, please file a '
+ 'Chromium bug against the "Internals>Mojo>Bindings" '
+ 'component.' % (qualified_name, e))
def Run(command_line, delta=None):
diff --git a/utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility_unittest.py b/utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility_unittest.py
index 9f51ea77..06769c95 100755
--- a/utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility_unittest.py
+++ b/utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility_unittest.py
@@ -1,5 +1,5 @@
-#!/usr/bin/env python
-# Copyright 2020 The Chromium Authors. All rights reserved.
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -15,7 +15,7 @@ import check_stable_mojom_compatibility
from mojom.generate import module
-class Change(object):
+class Change:
"""Helper to clearly define a mojom file delta to be analyzed."""
def __init__(self, filename, old=None, new=None):
@@ -28,7 +28,7 @@ class Change(object):
class UnchangedFile(Change):
def __init__(self, filename, contents):
- super(UnchangedFile, self).__init__(filename, old=contents, new=contents)
+ super().__init__(filename, old=contents, new=contents)
class CheckStableMojomCompatibilityTest(unittest.TestCase):
@@ -258,3 +258,82 @@ class CheckStableMojomCompatibilityTest(unittest.TestCase):
[Stable] struct T { foo.S s; int32 x; };
""")
])
+
+ def testWithPartialImport(self):
+ """The compatibility checking tool correctly parses imports with partial
+ paths."""
+ self.assertBackwardCompatible([
+ UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
+ Change('foo/bar.mojom',
+ old="""\
+ module bar;
+ import "foo/foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """,
+ new="""\
+ module bar;
+ import "foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """)
+ ])
+
+ self.assertBackwardCompatible([
+ UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
+ Change('foo/bar.mojom',
+ old="""\
+ module bar;
+ import "foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """,
+ new="""\
+ module bar;
+ import "foo/foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """)
+ ])
+
+ self.assertNotBackwardCompatible([
+ UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
+ Change('bar/bar.mojom',
+ old="""\
+ module bar;
+ import "foo/foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """,
+ new="""\
+ module bar;
+ import "foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """)
+ ])
+
+ self.assertNotBackwardCompatible([
+ UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
+ Change('bar/bar.mojom',
+ old="""\
+ module bar;
+ import "foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """,
+ new="""\
+ module bar;
+ import "foo/foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """)
+ ])
+
+ def testNewEnumDefault(self):
+ # Should be backwards compatible since it does not affect the wire format.
+ # This specific case also checks that the backwards compatibility checker
+ # does not throw an error due to the older version of the enum not
+ # specifying [Default].
+ self.assertBackwardCompatible([
+ Change('foo/foo.mojom',
+ old='[Extensible] enum E { One };',
+ new='[Extensible] enum E { [Default] One };')
+ ])
+ self.assertBackwardCompatible([
+ Change('foo/foo.mojom',
+ old='[Extensible] enum E { [Default] One, Two, };',
+ new='[Extensible] enum E { One, [Default] Two, };')
+ ])
diff --git a/utils/ipc/mojo/public/tools/mojom/const_unittest.py b/utils/ipc/mojo/public/tools/mojom/const_unittest.py
index cb42dfac..e8ed36a7 100644
--- a/utils/ipc/mojo/public/tools/mojom/const_unittest.py
+++ b/utils/ipc/mojo/public/tools/mojom/const_unittest.py
@@ -1,4 +1,4 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/utils/ipc/mojo/public/tools/mojom/enum_unittest.py b/utils/ipc/mojo/public/tools/mojom/enum_unittest.py
index d9005078..9269cde5 100644
--- a/utils/ipc/mojo/public/tools/mojom/enum_unittest.py
+++ b/utils/ipc/mojo/public/tools/mojom/enum_unittest.py
@@ -1,4 +1,4 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -90,3 +90,31 @@ class EnumTest(MojomParserTestCase):
self.assertEqual('F', b.enums[0].mojom_name)
self.assertEqual('kFoo', b.enums[0].fields[0].mojom_name)
self.assertEqual(37, b.enums[0].fields[0].numeric_value)
+
+ def testEnumAttributesAreEnums(self):
+ """Verifies that enum values in attributes are really enum types."""
+ a_mojom = 'a.mojom'
+ self.WriteFile(a_mojom, 'module a; enum E { kFoo, kBar };')
+ b_mojom = 'b.mojom'
+ self.WriteFile(
+ b_mojom, 'module b;'
+ 'import "a.mojom";'
+ '[MooCow=a.E.kFoo]'
+ 'interface Foo { Foo(); };')
+ self.ParseMojoms([a_mojom, b_mojom])
+ b = self.LoadModule(b_mojom)
+ self.assertEqual(b.interfaces[0].attributes['MooCow'].mojom_name, 'kFoo')
+
+ def testConstantAttributes(self):
+ """Verifies that constants as attributes are translated to the constant."""
+ a_mojom = 'a.mojom'
+ self.WriteFile(
+ a_mojom, 'module a;'
+ 'enum E { kFoo, kBar };'
+ 'const E kB = E.kFoo;'
+ '[Attr=kB] interface Hello { Foo(); };')
+ self.ParseMojoms([a_mojom])
+ a = self.LoadModule(a_mojom)
+ self.assertEqual(a.interfaces[0].attributes['Attr'].mojom_name, 'kB')
+ self.assertEquals(a.interfaces[0].attributes['Attr'].value.mojom_name,
+ 'kFoo')
diff --git a/utils/ipc/mojo/public/tools/mojom/feature_unittest.py b/utils/ipc/mojo/public/tools/mojom/feature_unittest.py
new file mode 100644
index 00000000..5f014e1c
--- /dev/null
+++ b/utils/ipc/mojo/public/tools/mojom/feature_unittest.py
@@ -0,0 +1,84 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from mojom_parser_test_case import MojomParserTestCase
+
+
+class FeatureTest(MojomParserTestCase):
+ """Tests feature parsing behavior."""
+ def testFeatureOff(self):
+ """Verifies basic parsing of feature types."""
+ types = self.ExtractTypes("""
+ // e.g. BASE_DECLARE_FEATURE(kFeature);
+ [AttributeOne=ValueOne]
+ feature kFeature {
+ // BASE_FEATURE(kFeature,"MyFeature",
+ // base::FEATURE_DISABLED_BY_DEFAULT);
+ const string name = "MyFeature";
+ const bool default_state = false;
+ };
+ """)
+ self.assertEqual('name', types['kFeature'].constants[0].mojom_name)
+ self.assertEqual('"MyFeature"', types['kFeature'].constants[0].value)
+ self.assertEqual('default_state', types['kFeature'].constants[1].mojom_name)
+ self.assertEqual('false', types['kFeature'].constants[1].value)
+
+ def testFeatureOn(self):
+ """Verifies basic parsing of feature types."""
+ types = self.ExtractTypes("""
+ // e.g. BASE_DECLARE_FEATURE(kFeature);
+ feature kFeature {
+ // BASE_FEATURE(kFeature,"MyFeature",
+ // base::FEATURE_ENABLED_BY_DEFAULT);
+ const string name = "MyFeature";
+ const bool default_state = true;
+ };
+ """)
+ self.assertEqual('name', types['kFeature'].constants[0].mojom_name)
+ self.assertEqual('"MyFeature"', types['kFeature'].constants[0].value)
+ self.assertEqual('default_state', types['kFeature'].constants[1].mojom_name)
+ self.assertEqual('true', types['kFeature'].constants[1].value)
+
+ def testFeatureWeakKeyword(self):
+ """Verifies that `feature` is a weak keyword."""
+ types = self.ExtractTypes("""
+ // e.g. BASE_DECLARE_FEATURE(kFeature);
+ [AttributeOne=ValueOne]
+ feature kFeature {
+ // BASE_FEATURE(kFeature,"MyFeature",
+ // base::FEATURE_DISABLED_BY_DEFAULT);
+ const string name = "MyFeature";
+ const bool default_state = false;
+ };
+ struct MyStruct {
+ bool feature = true;
+ };
+ interface InterfaceName {
+ Method(string feature) => (int32 feature);
+ };
+ """)
+ self.assertEqual('name', types['kFeature'].constants[0].mojom_name)
+ self.assertEqual('"MyFeature"', types['kFeature'].constants[0].value)
+ self.assertEqual('default_state', types['kFeature'].constants[1].mojom_name)
+ self.assertEqual('false', types['kFeature'].constants[1].value)
+
+ def testFeatureAttributesAreFeatures(self):
+ """Verifies that feature values in attributes are really feature types."""
+ a_mojom = 'a.mojom'
+ self.WriteFile(
+ a_mojom, 'module a;'
+ 'feature F { const string name = "f";'
+ 'const bool default_state = false; };')
+ b_mojom = 'b.mojom'
+ self.WriteFile(
+ b_mojom, 'module b;'
+ 'import "a.mojom";'
+ 'feature G'
+ '{const string name = "g"; const bool default_state = false;};'
+ '[Attri=a.F] interface Foo { Foo(); };'
+ '[Boink=G] interface Bar {};')
+ self.ParseMojoms([a_mojom, b_mojom])
+ b = self.LoadModule(b_mojom)
+ self.assertEqual(b.interfaces[0].attributes['Attri'].mojom_name, 'F')
+ self.assertEqual(b.interfaces[1].attributes['Boink'].mojom_name, 'G')
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/BUILD.gn b/utils/ipc/mojo/public/tools/mojom/mojom/BUILD.gn
index 51facc0c..a0edf0eb 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/BUILD.gn
+++ b/utils/ipc/mojo/public/tools/mojom/mojom/BUILD.gn
@@ -1,4 +1,4 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -8,6 +8,7 @@ group("mojom") {
"error.py",
"fileutil.py",
"generate/__init__.py",
+ "generate/check.py",
"generate/generator.py",
"generate/module.py",
"generate/pack.py",
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/error.py b/utils/ipc/mojo/public/tools/mojom/mojom/error.py
index 8a1e03da..dd53b835 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/error.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom/error.py
@@ -1,4 +1,4 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/fileutil.py b/utils/ipc/mojo/public/tools/mojom/mojom/fileutil.py
index bf626f54..124f12c1 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/fileutil.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom/fileutil.py
@@ -1,9 +1,8 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
+# Copyright 2015 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import errno
-import imp
import os.path
import sys
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/fileutil_unittest.py b/utils/ipc/mojo/public/tools/mojom/mojom/fileutil_unittest.py
index ff5753a2..c93d2289 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/fileutil_unittest.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom/fileutil_unittest.py
@@ -1,20 +1,17 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
+# Copyright 2015 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import imp
import os.path
import shutil
-import sys
import tempfile
import unittest
from mojom import fileutil
-
class FileUtilTest(unittest.TestCase):
def testEnsureDirectoryExists(self):
- """Test that EnsureDirectoryExists fuctions correctly."""
+ """Test that EnsureDirectoryExists functions correctly."""
temp_dir = tempfile.mkdtemp()
try:
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/check.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/check.py
new file mode 100644
index 00000000..1efe2022
--- /dev/null
+++ b/utils/ipc/mojo/public/tools/mojom/mojom/generate/check.py
@@ -0,0 +1,26 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Code shared by the various pre-generation mojom checkers."""
+
+
+class CheckException(Exception):
+ def __init__(self, module, message):
+ self.module = module
+ self.message = message
+ super().__init__(self.message)
+
+ def __str__(self):
+ return "Failed mojo pre-generation check for {}:\n{}".format(
+ self.module.path, self.message)
+
+
+class Check:
+ def __init__(self, module):
+ self.module = module
+
+ def CheckModule(self):
+ """ Subclass should return True if its Checks pass, and throw an
+ exception otherwise. CheckModule will be called immediately before
+ mojom.generate.Generator.GenerateFiles()"""
+ raise NotImplementedError("Subclasses must override/implement this method")
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/constant_resolver.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/constant_resolver.py
deleted file mode 100644
index 0dfd996e..00000000
--- a/utils/ipc/mojo/public/tools/mojom/mojom/generate/constant_resolver.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Resolves the values used for constants and enums."""
-
-from itertools import ifilter
-
-from mojom.generate import module as mojom
-
-
-def ResolveConstants(module, expression_to_text):
- in_progress = set()
- computed = set()
-
- def GetResolvedValue(named_value):
- assert isinstance(named_value, (mojom.EnumValue, mojom.ConstantValue))
- if isinstance(named_value, mojom.EnumValue):
- field = next(
- ifilter(lambda field: field.name == named_value.name,
- named_value.enum.fields), None)
- if not field:
- raise RuntimeError(
- 'Unable to get computed value for field %s of enum %s' %
- (named_value.name, named_value.enum.name))
- if field not in computed:
- ResolveEnum(named_value.enum)
- return field.resolved_value
- else:
- ResolveConstant(named_value.constant)
- named_value.resolved_value = named_value.constant.resolved_value
- return named_value.resolved_value
-
- def ResolveConstant(constant):
- if constant in computed:
- return
- if constant in in_progress:
- raise RuntimeError('Circular dependency for constant: %s' % constant.name)
- in_progress.add(constant)
- if isinstance(constant.value, (mojom.EnumValue, mojom.ConstantValue)):
- resolved_value = GetResolvedValue(constant.value)
- else:
- resolved_value = expression_to_text(constant.value)
- constant.resolved_value = resolved_value
- in_progress.remove(constant)
- computed.add(constant)
-
- def ResolveEnum(enum):
- def ResolveEnumField(enum, field, default_value):
- if field in computed:
- return
- if field in in_progress:
- raise RuntimeError('Circular dependency for enum: %s' % enum.name)
- in_progress.add(field)
- if field.value:
- if isinstance(field.value, mojom.EnumValue):
- resolved_value = GetResolvedValue(field.value)
- elif isinstance(field.value, str):
- resolved_value = int(field.value, 0)
- else:
- raise RuntimeError('Unexpected value: %s' % field.value)
- else:
- resolved_value = default_value
- field.resolved_value = resolved_value
- in_progress.remove(field)
- computed.add(field)
-
- current_value = 0
- for field in enum.fields:
- ResolveEnumField(enum, field, current_value)
- current_value = field.resolved_value + 1
-
- for constant in module.constants:
- ResolveConstant(constant)
-
- for enum in module.enums:
- ResolveEnum(enum)
-
- for struct in module.structs:
- for constant in struct.constants:
- ResolveConstant(constant)
- for enum in struct.enums:
- ResolveEnum(enum)
- for field in struct.fields:
- if isinstance(field.default, (mojom.ConstantValue, mojom.EnumValue)):
- field.default.resolved_value = GetResolvedValue(field.default)
-
- for interface in module.interfaces:
- for constant in interface.constants:
- ResolveConstant(constant)
- for enum in interface.enums:
- ResolveEnum(enum)
-
- return module
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/generator.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/generator.py
index 4a1c73fc..96fe3a2d 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/generate/generator.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom/generate/generator.py
@@ -1,4 +1,4 @@
-# Copyright 2013 The Chromium Authors. All rights reserved.
+# Copyright 2013 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Code shared by the various language-specific code generators."""
@@ -97,7 +97,7 @@ def ToLowerSnakeCase(identifier):
return _ToSnakeCase(identifier, upper=False)
-class Stylizer(object):
+class Stylizer:
"""Stylizers specify naming rules to map mojom names to names in generated
code. For example, if you would like method_name in mojom to be mapped to
MethodName in the generated code, you need to define a subclass of Stylizer
@@ -130,6 +130,9 @@ class Stylizer(object):
def StylizeEnum(self, mojom_name):
return mojom_name
+ def StylizeFeature(self, mojom_name):
+ return mojom_name
+
def StylizeModule(self, mojom_namespace):
return mojom_namespace
@@ -233,7 +236,7 @@ def AddComputedData(module):
_AddInterfaceComputedData(interface)
-class Generator(object):
+class Generator:
# Pass |output_dir| to emit files to disk. Omit |output_dir| to echo all
# files to stdout.
def __init__(self,
@@ -243,7 +246,6 @@ class Generator(object):
variant=None,
bytecode_path=None,
for_blink=False,
- js_bindings_mode="new",
js_generate_struct_deserializers=False,
export_attribute=None,
export_header=None,
@@ -262,7 +264,6 @@ class Generator(object):
self.variant = variant
self.bytecode_path = bytecode_path
self.for_blink = for_blink
- self.js_bindings_mode = js_bindings_mode
self.js_generate_struct_deserializers = js_generate_struct_deserializers
self.export_attribute = export_attribute
self.export_header = export_header
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/generator_unittest.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/generator_unittest.py
index 32c884a8..7143e07c 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/generate/generator_unittest.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom/generate/generator_unittest.py
@@ -1,13 +1,12 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import imp
+import importlib.util
import os.path
import sys
import unittest
-
def _GetDirAbove(dirname):
"""Returns the directory "above" this file containing |dirname| (which must
also be "above" this file)."""
@@ -20,12 +19,11 @@ def _GetDirAbove(dirname):
try:
- imp.find_module("mojom")
+ importlib.util.find_spec("mojom")
except ImportError:
sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib"))
from mojom.generate import generator
-
class StringManipulationTest(unittest.TestCase):
"""generator contains some string utilities, this tests only those."""
@@ -69,6 +67,5 @@ class StringManipulationTest(unittest.TestCase):
self.assertEquals("SNAKE_D3D11_CASE",
generator.ToUpperSnakeCase("snakeD3d11Case"))
-
if __name__ == "__main__":
unittest.main()
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/module.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/module.py
index 9bdb28e0..ca71059d 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/generate/module.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom/generate/module.py
@@ -1,4 +1,4 @@
-# Copyright 2013 The Chromium Authors. All rights reserved.
+# Copyright 2013 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -12,15 +12,14 @@
# method = interface.AddMethod('Tat', 0)
# method.AddParameter('baz', 0, mojom.INT32)
-import sys
-if sys.version_info.major == 2:
- import cPickle as pickle
-else:
- import pickle
+import pickle
+from collections import OrderedDict
from uuid import UUID
+# pylint: disable=raise-missing-from
-class BackwardCompatibilityChecker(object):
+
+class BackwardCompatibilityChecker:
"""Used for memoization while recursively checking two type definitions for
backward-compatibility."""
@@ -64,23 +63,20 @@ def Repr(obj, as_ref=True):
return obj.Repr(as_ref=as_ref)
# Since we cannot implement Repr for existing container types, we
# handle them here.
- elif isinstance(obj, list):
+ if isinstance(obj, list):
if not obj:
return '[]'
- else:
- return ('[\n%s\n]' % (',\n'.join(
- ' %s' % Repr(elem, as_ref).replace('\n', '\n ')
- for elem in obj)))
- elif isinstance(obj, dict):
+ return ('[\n%s\n]' %
+ (',\n'.join(' %s' % Repr(elem, as_ref).replace('\n', '\n ')
+ for elem in obj)))
+ if isinstance(obj, dict):
if not obj:
return '{}'
- else:
- return ('{\n%s\n}' % (',\n'.join(
- ' %s: %s' % (Repr(key, as_ref).replace('\n', '\n '),
- Repr(val, as_ref).replace('\n', '\n '))
- for key, val in obj.items())))
- else:
- return repr(obj)
+ return ('{\n%s\n}' % (',\n'.join(' %s: %s' %
+ (Repr(key, as_ref).replace('\n', '\n '),
+ Repr(val, as_ref).replace('\n', '\n '))
+ for key, val in obj.items())))
+ return repr(obj)
def GenericRepr(obj, names):
@@ -104,7 +100,7 @@ def GenericRepr(obj, names):
ReprIndent(name, as_ref) for (name, as_ref) in names.items()))
-class Kind(object):
+class Kind:
"""Kind represents a type (e.g. int8, string).
Attributes:
@@ -112,16 +108,43 @@ class Kind(object):
module: {Module} The defining module. Set to None for built-in types.
parent_kind: The enclosing type. For example, an enum defined
inside an interface has that interface as its parent. May be None.
+ is_nullable: True if the type is nullable.
"""
- def __init__(self, spec=None, module=None):
+ def __init__(self, spec=None, is_nullable=False, module=None):
self.spec = spec
self.module = module
self.parent_kind = None
+ self.is_nullable = is_nullable
+ self.shared_definition = {}
+
+ @classmethod
+ def AddSharedProperty(cls, name):
+ """Adds a property |name| to |cls|, which accesses the corresponding item in
+ |shared_definition|.
+
+ The reason of adding such indirection is to enable sharing definition
+ between a reference kind and its nullable variation. For example:
+ a = Struct('test_struct_1')
+ b = a.MakeNullableKind()
+ a.name = 'test_struct_2'
+ print(b.name) # Outputs 'test_struct_2'.
+ """
+ def Get(self):
+ try:
+ return self.shared_definition[name]
+ except KeyError: # Must raise AttributeError if property doesn't exist.
+ raise AttributeError
+
+ def Set(self, value):
+ self.shared_definition[name] = value
+
+ setattr(cls, name, property(Get, Set))
def Repr(self, as_ref=True):
# pylint: disable=unused-argument
- return '<%s spec=%r>' % (self.__class__.__name__, self.spec)
+ return '<%s spec=%r is_nullable=%r>' % (self.__class__.__name__, self.spec,
+ self.is_nullable)
def __repr__(self):
# Gives us a decent __repr__ for all kinds.
@@ -130,7 +153,8 @@ class Kind(object):
def __eq__(self, rhs):
# pylint: disable=unidiomatic-typecheck
return (type(self) == type(rhs)
- and (self.spec, self.parent_kind) == (rhs.spec, rhs.parent_kind))
+ and (self.spec, self.parent_kind, self.is_nullable)
+ == (rhs.spec, rhs.parent_kind, rhs.is_nullable))
def __hash__(self):
# TODO(crbug.com/1060471): Remove this and other __hash__ methods on Kind
@@ -138,32 +162,113 @@ class Kind(object):
# some primitive Kinds as dict keys. The default hash (object identity)
# breaks these dicts when a pickled Module instance is unpickled and used
# during a subsequent run of the parser.
- return hash((self.spec, self.parent_kind))
+ return hash((self.spec, self.parent_kind, self.is_nullable))
# pylint: disable=unused-argument
def IsBackwardCompatible(self, rhs, checker):
return self == rhs
+class ValueKind(Kind):
+ """ValueKind represents values that aren't reference kinds.
+
+ The primary difference is the wire representation for nullable value kinds
+ still reserves space for the value type itself, even if that value itself
+ is logically null.
+ """
+ def __init__(self, spec=None, is_nullable=False, module=None):
+ assert spec is None or is_nullable == spec.startswith('?')
+ Kind.__init__(self, spec, is_nullable, module)
+
+ def MakeNullableKind(self):
+ assert not self.is_nullable
+
+ if self == BOOL:
+ return NULLABLE_BOOL
+ if self == INT8:
+ return NULLABLE_INT8
+ if self == INT16:
+ return NULLABLE_INT16
+ if self == INT32:
+ return NULLABLE_INT32
+ if self == INT64:
+ return NULLABLE_INT64
+ if self == UINT8:
+ return NULLABLE_UINT8
+ if self == UINT16:
+ return NULLABLE_UINT16
+ if self == UINT32:
+ return NULLABLE_UINT32
+ if self == UINT64:
+ return NULLABLE_UINT64
+ if self == FLOAT:
+ return NULLABLE_FLOAT
+ if self == DOUBLE:
+ return NULLABLE_DOUBLE
+
+ nullable_kind = type(self)()
+ nullable_kind.shared_definition = self.shared_definition
+ if self.spec is not None:
+ nullable_kind.spec = '?' + self.spec
+ nullable_kind.is_nullable = True
+ nullable_kind.parent_kind = self.parent_kind
+ nullable_kind.module = self.module
+
+ return nullable_kind
+
+ def MakeUnnullableKind(self):
+ assert self.is_nullable
+
+ if self == NULLABLE_BOOL:
+ return BOOL
+ if self == NULLABLE_INT8:
+ return INT8
+ if self == NULLABLE_INT16:
+ return INT16
+ if self == NULLABLE_INT32:
+ return INT32
+ if self == NULLABLE_INT64:
+ return INT64
+ if self == NULLABLE_UINT8:
+ return UINT8
+ if self == NULLABLE_UINT16:
+ return UINT16
+ if self == NULLABLE_UINT32:
+ return UINT32
+ if self == NULLABLE_UINT64:
+ return UINT64
+ if self == NULLABLE_FLOAT:
+ return FLOAT
+ if self == NULLABLE_DOUBLE:
+ return DOUBLE
+
+ nullable_kind = type(self)()
+ nullable_kind.shared_definition = self.shared_definition
+ if self.spec is not None:
+ nullable_kind.spec = self.spec[1:]
+ nullable_kind.is_nullable = False
+ nullable_kind.parent_kind = self.parent_kind
+ nullable_kind.module = self.module
+
+ return nullable_kind
+
+ def __eq__(self, rhs):
+ return (isinstance(rhs, ValueKind) and super().__eq__(rhs))
+
+ def __hash__(self): # pylint: disable=useless-super-delegation
+ return super().__hash__()
+
+
class ReferenceKind(Kind):
"""ReferenceKind represents pointer and handle types.
A type is nullable if null (for pointer types) or invalid handle (for handle
types) is a legal value for the type.
-
- Attributes:
- is_nullable: True if the type is nullable.
"""
def __init__(self, spec=None, is_nullable=False, module=None):
assert spec is None or is_nullable == spec.startswith('?')
- Kind.__init__(self, spec, module)
- self.is_nullable = is_nullable
- self.shared_definition = {}
-
- def Repr(self, as_ref=True):
- return '<%s spec=%r is_nullable=%r>' % (self.__class__.__name__, self.spec,
- self.is_nullable)
+ Kind.__init__(self, spec, is_nullable, module)
def MakeNullableKind(self):
assert not self.is_nullable
@@ -193,55 +298,65 @@ class ReferenceKind(Kind):
return nullable_kind
- @classmethod
- def AddSharedProperty(cls, name):
- """Adds a property |name| to |cls|, which accesses the corresponding item in
- |shared_definition|.
-
- The reason of adding such indirection is to enable sharing definition
- between a reference kind and its nullable variation. For example:
- a = Struct('test_struct_1')
- b = a.MakeNullableKind()
- a.name = 'test_struct_2'
- print(b.name) # Outputs 'test_struct_2'.
- """
-
- def Get(self):
- try:
- return self.shared_definition[name]
- except KeyError: # Must raise AttributeError if property doesn't exist.
- raise AttributeError
-
- def Set(self, value):
- self.shared_definition[name] = value
+ def MakeUnnullableKind(self):
+ assert self.is_nullable
+
+ if self == NULLABLE_STRING:
+ return STRING
+ if self == NULLABLE_HANDLE:
+ return HANDLE
+ if self == NULLABLE_DCPIPE:
+ return DCPIPE
+ if self == NULLABLE_DPPIPE:
+ return DPPIPE
+ if self == NULLABLE_MSGPIPE:
+ return MSGPIPE
+ if self == NULLABLE_SHAREDBUFFER:
+ return SHAREDBUFFER
+ if self == NULLABLE_PLATFORMHANDLE:
+ return PLATFORMHANDLE
+
+ unnullable_kind = type(self)()
+ unnullable_kind.shared_definition = self.shared_definition
+ if self.spec is not None:
+ assert self.spec[0] == '?'
+ unnullable_kind.spec = self.spec[1:]
+ unnullable_kind.is_nullable = False
+ unnullable_kind.parent_kind = self.parent_kind
+ unnullable_kind.module = self.module
- setattr(cls, name, property(Get, Set))
+ return unnullable_kind
def __eq__(self, rhs):
- return (isinstance(rhs, ReferenceKind)
- and super(ReferenceKind, self).__eq__(rhs)
- and self.is_nullable == rhs.is_nullable)
+ return (isinstance(rhs, ReferenceKind) and super().__eq__(rhs))
- def __hash__(self):
- return hash((super(ReferenceKind, self).__hash__(), self.is_nullable))
-
- def IsBackwardCompatible(self, rhs, checker):
- return (super(ReferenceKind, self).IsBackwardCompatible(rhs, checker)
- and self.is_nullable == rhs.is_nullable)
+ def __hash__(self): # pylint: disable=useless-super-delegation
+ return super().__hash__()
# Initialize the set of primitive types. These can be accessed by clients.
-BOOL = Kind('b')
-INT8 = Kind('i8')
-INT16 = Kind('i16')
-INT32 = Kind('i32')
-INT64 = Kind('i64')
-UINT8 = Kind('u8')
-UINT16 = Kind('u16')
-UINT32 = Kind('u32')
-UINT64 = Kind('u64')
-FLOAT = Kind('f')
-DOUBLE = Kind('d')
+BOOL = ValueKind('b')
+INT8 = ValueKind('i8')
+INT16 = ValueKind('i16')
+INT32 = ValueKind('i32')
+INT64 = ValueKind('i64')
+UINT8 = ValueKind('u8')
+UINT16 = ValueKind('u16')
+UINT32 = ValueKind('u32')
+UINT64 = ValueKind('u64')
+FLOAT = ValueKind('f')
+DOUBLE = ValueKind('d')
+NULLABLE_BOOL = ValueKind('?b', True)
+NULLABLE_INT8 = ValueKind('?i8', True)
+NULLABLE_INT16 = ValueKind('?i16', True)
+NULLABLE_INT32 = ValueKind('?i32', True)
+NULLABLE_INT64 = ValueKind('?i64', True)
+NULLABLE_UINT8 = ValueKind('?u8', True)
+NULLABLE_UINT16 = ValueKind('?u16', True)
+NULLABLE_UINT32 = ValueKind('?u32', True)
+NULLABLE_UINT64 = ValueKind('?u64', True)
+NULLABLE_FLOAT = ValueKind('?f', True)
+NULLABLE_DOUBLE = ValueKind('?d', True)
STRING = ReferenceKind('s')
HANDLE = ReferenceKind('h')
DCPIPE = ReferenceKind('h:d:c')
@@ -270,6 +385,17 @@ PRIMITIVES = (
UINT64,
FLOAT,
DOUBLE,
+ NULLABLE_BOOL,
+ NULLABLE_INT8,
+ NULLABLE_INT16,
+ NULLABLE_INT32,
+ NULLABLE_INT64,
+ NULLABLE_UINT8,
+ NULLABLE_UINT16,
+ NULLABLE_UINT32,
+ NULLABLE_UINT64,
+ NULLABLE_FLOAT,
+ NULLABLE_DOUBLE,
STRING,
HANDLE,
DCPIPE,
@@ -291,12 +417,17 @@ ATTRIBUTE_DEFAULT = 'Default'
ATTRIBUTE_EXTENSIBLE = 'Extensible'
ATTRIBUTE_NO_INTERRUPT = 'NoInterrupt'
ATTRIBUTE_STABLE = 'Stable'
+ATTRIBUTE_SUPPORTS_URGENT = 'SupportsUrgent'
ATTRIBUTE_SYNC = 'Sync'
ATTRIBUTE_UNLIMITED_SIZE = 'UnlimitedSize'
ATTRIBUTE_UUID = 'Uuid'
+ATTRIBUTE_SERVICE_SANDBOX = 'ServiceSandbox'
+ATTRIBUTE_REQUIRE_CONTEXT = 'RequireContext'
+ATTRIBUTE_ALLOWED_CONTEXT = 'AllowedContext'
+ATTRIBUTE_RUNTIME_FEATURE = 'RuntimeFeature'
-class NamedValue(object):
+class NamedValue:
def __init__(self, module, parent_kind, mojom_name):
self.module = module
self.parent_kind = parent_kind
@@ -316,7 +447,7 @@ class NamedValue(object):
return hash((self.parent_kind, self.mojom_name))
-class BuiltinValue(object):
+class BuiltinValue:
def __init__(self, value):
self.value = value
@@ -350,7 +481,7 @@ class EnumValue(NamedValue):
return self.field.name
-class Constant(object):
+class Constant:
def __init__(self, mojom_name=None, kind=None, value=None, parent_kind=None):
self.mojom_name = mojom_name
self.name = None
@@ -368,7 +499,7 @@ class Constant(object):
rhs.parent_kind))
-class Field(object):
+class Field:
def __init__(self,
mojom_name=None,
kind=None,
@@ -414,7 +545,18 @@ class StructField(Field):
class UnionField(Field):
- pass
+ def __init__(self,
+ mojom_name=None,
+ kind=None,
+ ordinal=None,
+ default=None,
+ attributes=None):
+ Field.__init__(self, mojom_name, kind, ordinal, default, attributes)
+
+ @property
+ def is_default(self):
+ return self.attributes.get(ATTRIBUTE_DEFAULT, False) \
+ if self.attributes else False
def _IsFieldBackwardCompatible(new_field, old_field, checker):
@@ -424,6 +566,38 @@ def _IsFieldBackwardCompatible(new_field, old_field, checker):
return checker.IsBackwardCompatible(new_field.kind, old_field.kind)
+class Feature(ReferenceKind):
+ """A runtime enabled feature defined from mojom.
+
+ Attributes:
+ mojom_name: {str} The name of the feature type as defined in mojom.
+ name: {str} The stylized name. (Note: not the "name" used by FeatureList.)
+ constants: {List[Constant]} The constants defined in the feature scope.
+ attributes: {dict} Additional information about the feature.
+ """
+
+ Kind.AddSharedProperty('mojom_name')
+ Kind.AddSharedProperty('name')
+ Kind.AddSharedProperty('constants')
+ Kind.AddSharedProperty('attributes')
+
+ def __init__(self, mojom_name=None, module=None, attributes=None):
+ if mojom_name is not None:
+ spec = 'x:' + mojom_name
+ else:
+ spec = None
+ ReferenceKind.__init__(self, spec, False, module)
+ self.mojom_name = mojom_name
+ self.name = None
+ self.constants = []
+ self.attributes = attributes
+
+ def Stylize(self, stylizer):
+ self.name = stylizer.StylizeFeature(self.mojom_name)
+ for constant in self.constants:
+ constant.Stylize(stylizer)
+
+
class Struct(ReferenceKind):
"""A struct with typed fields.
@@ -441,14 +615,14 @@ class Struct(ReferenceKind):
if it's a native struct.
"""
- ReferenceKind.AddSharedProperty('mojom_name')
- ReferenceKind.AddSharedProperty('name')
- ReferenceKind.AddSharedProperty('native_only')
- ReferenceKind.AddSharedProperty('custom_serializer')
- ReferenceKind.AddSharedProperty('fields')
- ReferenceKind.AddSharedProperty('enums')
- ReferenceKind.AddSharedProperty('constants')
- ReferenceKind.AddSharedProperty('attributes')
+ Kind.AddSharedProperty('mojom_name')
+ Kind.AddSharedProperty('name')
+ Kind.AddSharedProperty('native_only')
+ Kind.AddSharedProperty('custom_serializer')
+ Kind.AddSharedProperty('fields')
+ Kind.AddSharedProperty('enums')
+ Kind.AddSharedProperty('constants')
+ Kind.AddSharedProperty('attributes')
def __init__(self, mojom_name=None, module=None, attributes=None):
if mojom_name is not None:
@@ -470,12 +644,11 @@ class Struct(ReferenceKind):
return '<%s mojom_name=%r module=%s>' % (self.__class__.__name__,
self.mojom_name,
Repr(self.module, as_ref=True))
- else:
- return GenericRepr(self, {
- 'mojom_name': False,
- 'fields': False,
- 'module': True
- })
+ return GenericRepr(self, {
+ 'mojom_name': False,
+ 'fields': False,
+ 'module': True
+ })
def AddField(self,
mojom_name,
@@ -496,13 +669,13 @@ class Struct(ReferenceKind):
for constant in self.constants:
constant.Stylize(stylizer)
- def IsBackwardCompatible(self, older_struct, checker):
- """This struct is backward-compatible with older_struct if and only if all
- of the following conditions hold:
+ def IsBackwardCompatible(self, rhs, checker):
+ """This struct is backward-compatible with rhs (older_struct) if and only if
+ all of the following conditions hold:
- Any newly added field is tagged with a [MinVersion] attribute specifying
a version number greater than all previously used [MinVersion]
attributes within the struct.
- - All fields present in older_struct remain present in the new struct,
+ - All fields present in rhs remain present in the new struct,
with the same ordinal position, same optional or non-optional status,
same (or backward-compatible) type and where applicable, the same
[MinVersion] attribute value.
@@ -521,7 +694,7 @@ class Struct(ReferenceKind):
return fields_by_ordinal
new_fields = buildOrdinalFieldMap(self)
- old_fields = buildOrdinalFieldMap(older_struct)
+ old_fields = buildOrdinalFieldMap(rhs)
if len(new_fields) < len(old_fields):
# At least one field was removed, which is not OK.
return False
@@ -574,11 +747,18 @@ class Struct(ReferenceKind):
prefix = self.module.GetNamespacePrefix()
return '%s%s' % (prefix, self.mojom_name)
+ def _tuple(self):
+ return (self.mojom_name, self.native_only, self.fields, self.constants,
+ self.attributes)
+
def __eq__(self, rhs):
- return (isinstance(rhs, Struct) and
- (self.mojom_name, self.native_only, self.fields, self.constants,
- self.attributes) == (rhs.mojom_name, rhs.native_only, rhs.fields,
- rhs.constants, rhs.attributes))
+ return isinstance(rhs, Struct) and self._tuple() == rhs._tuple()
+
+ def __lt__(self, rhs):
+ if not isinstance(self, type(rhs)):
+ return str(type(self)) < str(type(rhs))
+
+ return self._tuple() < rhs._tuple()
def __hash__(self):
return id(self)
@@ -595,10 +775,11 @@ class Union(ReferenceKind):
which Java class name to use to represent it in the generated
bindings.
"""
- ReferenceKind.AddSharedProperty('mojom_name')
- ReferenceKind.AddSharedProperty('name')
- ReferenceKind.AddSharedProperty('fields')
- ReferenceKind.AddSharedProperty('attributes')
+ Kind.AddSharedProperty('mojom_name')
+ Kind.AddSharedProperty('name')
+ Kind.AddSharedProperty('fields')
+ Kind.AddSharedProperty('attributes')
+ Kind.AddSharedProperty('default_field')
def __init__(self, mojom_name=None, module=None, attributes=None):
if mojom_name is not None:
@@ -610,14 +791,14 @@ class Union(ReferenceKind):
self.name = None
self.fields = []
self.attributes = attributes
+ self.default_field = None
def Repr(self, as_ref=True):
if as_ref:
return '<%s spec=%r is_nullable=%r fields=%s>' % (
self.__class__.__name__, self.spec, self.is_nullable, Repr(
self.fields))
- else:
- return GenericRepr(self, {'fields': True, 'is_nullable': False})
+ return GenericRepr(self, {'fields': True, 'is_nullable': False})
def AddField(self, mojom_name, kind, ordinal=None, attributes=None):
field = UnionField(mojom_name, kind, ordinal, None, attributes)
@@ -629,13 +810,13 @@ class Union(ReferenceKind):
for field in self.fields:
field.Stylize(stylizer)
- def IsBackwardCompatible(self, older_union, checker):
- """This union is backward-compatible with older_union if and only if all
- of the following conditions hold:
+ def IsBackwardCompatible(self, rhs, checker):
+ """This union is backward-compatible with rhs (older_union) if and only if
+ all of the following conditions hold:
- Any newly added field is tagged with a [MinVersion] attribute specifying
a version number greater than all previously used [MinVersion]
attributes within the union.
- - All fields present in older_union remain present in the new union,
+ - All fields present in rhs remain present in the new union,
with the same ordinal value, same optional or non-optional status,
same (or backward-compatible) type, and where applicable, the same
[MinVersion] attribute value.
@@ -651,7 +832,7 @@ class Union(ReferenceKind):
return fields_by_ordinal
new_fields = buildOrdinalFieldMap(self)
- old_fields = buildOrdinalFieldMap(older_union)
+ old_fields = buildOrdinalFieldMap(rhs)
if len(new_fields) < len(old_fields):
# At least one field was removed, which is not OK.
return False
@@ -678,6 +859,11 @@ class Union(ReferenceKind):
return True
@property
+ def extensible(self):
+ return self.attributes.get(ATTRIBUTE_EXTENSIBLE, False) \
+ if self.attributes else False
+
+ @property
def stable(self):
return self.attributes.get(ATTRIBUTE_STABLE, False) \
if self.attributes else False
@@ -690,10 +876,17 @@ class Union(ReferenceKind):
prefix = self.module.GetNamespacePrefix()
return '%s%s' % (prefix, self.mojom_name)
+ def _tuple(self):
+ return (self.mojom_name, self.fields, self.attributes)
+
def __eq__(self, rhs):
- return (isinstance(rhs, Union) and
- (self.mojom_name, self.fields,
- self.attributes) == (rhs.mojom_name, rhs.fields, rhs.attributes))
+ return isinstance(rhs, Union) and self._tuple() == rhs._tuple()
+
+ def __lt__(self, rhs):
+ if not isinstance(self, type(rhs)):
+ return str(type(self)) < str(type(rhs))
+
+ return self._tuple() < rhs._tuple()
def __hash__(self):
return id(self)
@@ -707,8 +900,8 @@ class Array(ReferenceKind):
length: The number of elements. None if unknown.
"""
- ReferenceKind.AddSharedProperty('kind')
- ReferenceKind.AddSharedProperty('length')
+ Kind.AddSharedProperty('kind')
+ Kind.AddSharedProperty('length')
def __init__(self, kind=None, length=None):
if kind is not None:
@@ -728,12 +921,11 @@ class Array(ReferenceKind):
return '<%s spec=%r is_nullable=%r kind=%s length=%r>' % (
self.__class__.__name__, self.spec, self.is_nullable, Repr(
self.kind), self.length)
- else:
- return GenericRepr(self, {
- 'kind': True,
- 'length': False,
- 'is_nullable': False
- })
+ return GenericRepr(self, {
+ 'kind': True,
+ 'length': False,
+ 'is_nullable': False
+ })
def __eq__(self, rhs):
return (isinstance(rhs, Array)
@@ -754,8 +946,8 @@ class Map(ReferenceKind):
key_kind: {Kind} The type of the keys. May be None.
value_kind: {Kind} The type of the elements. May be None.
"""
- ReferenceKind.AddSharedProperty('key_kind')
- ReferenceKind.AddSharedProperty('value_kind')
+ Kind.AddSharedProperty('key_kind')
+ Kind.AddSharedProperty('value_kind')
def __init__(self, key_kind=None, value_kind=None):
if (key_kind is not None and value_kind is not None):
@@ -780,8 +972,7 @@ class Map(ReferenceKind):
return '<%s spec=%r is_nullable=%r key_kind=%s value_kind=%s>' % (
self.__class__.__name__, self.spec, self.is_nullable,
Repr(self.key_kind), Repr(self.value_kind))
- else:
- return GenericRepr(self, {'key_kind': True, 'value_kind': True})
+ return GenericRepr(self, {'key_kind': True, 'value_kind': True})
def __eq__(self, rhs):
return (isinstance(rhs, Map) and
@@ -797,7 +988,7 @@ class Map(ReferenceKind):
class PendingRemote(ReferenceKind):
- ReferenceKind.AddSharedProperty('kind')
+ Kind.AddSharedProperty('kind')
def __init__(self, kind=None):
if kind is not None:
@@ -822,7 +1013,7 @@ class PendingRemote(ReferenceKind):
class PendingReceiver(ReferenceKind):
- ReferenceKind.AddSharedProperty('kind')
+ Kind.AddSharedProperty('kind')
def __init__(self, kind=None):
if kind is not None:
@@ -847,7 +1038,7 @@ class PendingReceiver(ReferenceKind):
class PendingAssociatedRemote(ReferenceKind):
- ReferenceKind.AddSharedProperty('kind')
+ Kind.AddSharedProperty('kind')
def __init__(self, kind=None):
if kind is not None:
@@ -873,7 +1064,7 @@ class PendingAssociatedRemote(ReferenceKind):
class PendingAssociatedReceiver(ReferenceKind):
- ReferenceKind.AddSharedProperty('kind')
+ Kind.AddSharedProperty('kind')
def __init__(self, kind=None):
if kind is not None:
@@ -899,7 +1090,7 @@ class PendingAssociatedReceiver(ReferenceKind):
class InterfaceRequest(ReferenceKind):
- ReferenceKind.AddSharedProperty('kind')
+ Kind.AddSharedProperty('kind')
def __init__(self, kind=None):
if kind is not None:
@@ -923,7 +1114,7 @@ class InterfaceRequest(ReferenceKind):
class AssociatedInterfaceRequest(ReferenceKind):
- ReferenceKind.AddSharedProperty('kind')
+ Kind.AddSharedProperty('kind')
def __init__(self, kind=None):
if kind is not None:
@@ -949,7 +1140,7 @@ class AssociatedInterfaceRequest(ReferenceKind):
self.kind, rhs.kind)
-class Parameter(object):
+class Parameter:
def __init__(self,
mojom_name=None,
kind=None,
@@ -983,7 +1174,7 @@ class Parameter(object):
rhs.default, rhs.attributes))
-class Method(object):
+class Method:
def __init__(self, interface, mojom_name, ordinal=None, attributes=None):
self.interface = interface
self.mojom_name = mojom_name
@@ -999,12 +1190,11 @@ class Method(object):
def Repr(self, as_ref=True):
if as_ref:
return '<%s mojom_name=%r>' % (self.__class__.__name__, self.mojom_name)
- else:
- return GenericRepr(self, {
- 'mojom_name': False,
- 'parameters': True,
- 'response_parameters': True
- })
+ return GenericRepr(self, {
+ 'mojom_name': False,
+ 'parameters': True,
+ 'response_parameters': True
+ })
def AddParameter(self,
mojom_name,
@@ -1061,21 +1251,49 @@ class Method(object):
return self.attributes.get(ATTRIBUTE_UNLIMITED_SIZE) \
if self.attributes else False
+ @property
+ def allowed_context(self):
+ return self.attributes.get(ATTRIBUTE_ALLOWED_CONTEXT) \
+ if self.attributes else None
+
+ @property
+ def supports_urgent(self):
+ return self.attributes.get(ATTRIBUTE_SUPPORTS_URGENT) \
+ if self.attributes else None
+
+ @property
+ def runtime_feature(self):
+ if not self.attributes:
+ return None
+ runtime_feature = self.attributes.get(ATTRIBUTE_RUNTIME_FEATURE, None)
+ if runtime_feature is None:
+ return None
+ if not isinstance(runtime_feature, Feature):
+ raise Exception("RuntimeFeature attribute on %s must be a feature." %
+ self.name)
+ return runtime_feature
+
+ def _tuple(self):
+ return (self.mojom_name, self.ordinal, self.parameters,
+ self.response_parameters, self.attributes)
+
def __eq__(self, rhs):
- return (isinstance(rhs, Method) and
- (self.mojom_name, self.ordinal, self.parameters,
- self.response_parameters,
- self.attributes) == (rhs.mojom_name, rhs.ordinal, rhs.parameters,
- rhs.response_parameters, rhs.attributes))
+ return isinstance(rhs, Method) and self._tuple() == rhs._tuple()
+
+ def __lt__(self, rhs):
+ if not isinstance(self, type(rhs)):
+ return str(type(self)) < str(type(rhs))
+
+ return self._tuple() < rhs._tuple()
class Interface(ReferenceKind):
- ReferenceKind.AddSharedProperty('mojom_name')
- ReferenceKind.AddSharedProperty('name')
- ReferenceKind.AddSharedProperty('methods')
- ReferenceKind.AddSharedProperty('enums')
- ReferenceKind.AddSharedProperty('constants')
- ReferenceKind.AddSharedProperty('attributes')
+ Kind.AddSharedProperty('mojom_name')
+ Kind.AddSharedProperty('name')
+ Kind.AddSharedProperty('methods')
+ Kind.AddSharedProperty('enums')
+ Kind.AddSharedProperty('constants')
+ Kind.AddSharedProperty('attributes')
def __init__(self, mojom_name=None, module=None, attributes=None):
if mojom_name is not None:
@@ -1093,12 +1311,11 @@ class Interface(ReferenceKind):
def Repr(self, as_ref=True):
if as_ref:
return '<%s mojom_name=%r>' % (self.__class__.__name__, self.mojom_name)
- else:
- return GenericRepr(self, {
- 'mojom_name': False,
- 'attributes': False,
- 'methods': False
- })
+ return GenericRepr(self, {
+ 'mojom_name': False,
+ 'attributes': False,
+ 'methods': False
+ })
def AddMethod(self, mojom_name, ordinal=None, attributes=None):
method = Method(self, mojom_name, ordinal, attributes)
@@ -1114,10 +1331,10 @@ class Interface(ReferenceKind):
for constant in self.constants:
constant.Stylize(stylizer)
- def IsBackwardCompatible(self, older_interface, checker):
- """This interface is backward-compatible with older_interface if and only
- if all of the following conditions hold:
- - All defined methods in older_interface (when identified by ordinal) have
+ def IsBackwardCompatible(self, rhs, checker):
+ """This interface is backward-compatible with rhs (older_interface) if and
+ only if all of the following conditions hold:
+ - All defined methods in rhs (when identified by ordinal) have
backward-compatible definitions in this interface. For each method this
means:
- The parameter list is backward-compatible, according to backward-
@@ -1131,7 +1348,7 @@ class Interface(ReferenceKind):
rules for structs.
- All newly introduced methods in this interface have a [MinVersion]
attribute specifying a version greater than any method in
- older_interface.
+ rhs.
"""
def buildOrdinalMethodMap(interface):
@@ -1144,7 +1361,7 @@ class Interface(ReferenceKind):
return methods_by_ordinal
new_methods = buildOrdinalMethodMap(self)
- old_methods = buildOrdinalMethodMap(older_interface)
+ old_methods = buildOrdinalMethodMap(rhs)
max_old_min_version = 0
for ordinal, old_method in old_methods.items():
new_method = new_methods.get(ordinal)
@@ -1187,6 +1404,39 @@ class Interface(ReferenceKind):
return True
@property
+ def service_sandbox(self):
+ if not self.attributes:
+ return None
+ service_sandbox = self.attributes.get(ATTRIBUTE_SERVICE_SANDBOX, None)
+ if service_sandbox is None:
+ return None
+ # Constants are only allowed to refer to an enum here, so replace.
+ if isinstance(service_sandbox, Constant):
+ service_sandbox = service_sandbox.value
+ if not isinstance(service_sandbox, EnumValue):
+ raise Exception("ServiceSandbox attribute on %s must be an enum value." %
+ self.module.name)
+ return service_sandbox
+
+ @property
+ def runtime_feature(self):
+ if not self.attributes:
+ return None
+ runtime_feature = self.attributes.get(ATTRIBUTE_RUNTIME_FEATURE, None)
+ if runtime_feature is None:
+ return None
+ if not isinstance(runtime_feature, Feature):
+ raise Exception("RuntimeFeature attribute on %s must be a feature." %
+ self.name)
+ return runtime_feature
+
+ @property
+ def require_context(self):
+ if not self.attributes:
+ return None
+ return self.attributes.get(ATTRIBUTE_REQUIRE_CONTEXT, None)
+
+ @property
def stable(self):
return self.attributes.get(ATTRIBUTE_STABLE, False) \
if self.attributes else False
@@ -1199,11 +1449,18 @@ class Interface(ReferenceKind):
prefix = self.module.GetNamespacePrefix()
return '%s%s' % (prefix, self.mojom_name)
+ def _tuple(self):
+ return (self.mojom_name, self.methods, self.enums, self.constants,
+ self.attributes)
+
def __eq__(self, rhs):
- return (isinstance(rhs, Interface)
- and (self.mojom_name, self.methods, self.enums, self.constants,
- self.attributes) == (rhs.mojom_name, rhs.methods, rhs.enums,
- rhs.constants, rhs.attributes))
+ return isinstance(rhs, Interface) and self._tuple() == rhs._tuple()
+
+ def __lt__(self, rhs):
+ if not isinstance(self, type(rhs)):
+ return str(type(self)) < str(type(rhs))
+
+ return self._tuple() < rhs._tuple()
@property
def uuid(self):
@@ -1224,7 +1481,7 @@ class Interface(ReferenceKind):
class AssociatedInterface(ReferenceKind):
- ReferenceKind.AddSharedProperty('kind')
+ Kind.AddSharedProperty('kind')
def __init__(self, kind=None):
if kind is not None:
@@ -1249,7 +1506,7 @@ class AssociatedInterface(ReferenceKind):
self.kind, rhs.kind)
-class EnumField(object):
+class EnumField:
def __init__(self,
mojom_name=None,
value=None,
@@ -1281,16 +1538,25 @@ class EnumField(object):
rhs.attributes, rhs.numeric_value))
-class Enum(Kind):
+class Enum(ValueKind):
+ Kind.AddSharedProperty('mojom_name')
+ Kind.AddSharedProperty('name')
+ Kind.AddSharedProperty('native_only')
+ Kind.AddSharedProperty('fields')
+ Kind.AddSharedProperty('attributes')
+ Kind.AddSharedProperty('min_value')
+ Kind.AddSharedProperty('max_value')
+ Kind.AddSharedProperty('default_field')
+
def __init__(self, mojom_name=None, module=None, attributes=None):
- self.mojom_name = mojom_name
- self.name = None
- self.native_only = False
if mojom_name is not None:
spec = 'x:' + mojom_name
else:
spec = None
- Kind.__init__(self, spec, module)
+ ValueKind.__init__(self, spec, False, module)
+ self.mojom_name = mojom_name
+ self.name = None
+ self.native_only = False
self.fields = []
self.attributes = attributes
self.min_value = None
@@ -1300,8 +1566,7 @@ class Enum(Kind):
def Repr(self, as_ref=True):
if as_ref:
return '<%s mojom_name=%r>' % (self.__class__.__name__, self.mojom_name)
- else:
- return GenericRepr(self, {'mojom_name': False, 'fields': False})
+ return GenericRepr(self, {'mojom_name': False, 'fields': False})
def Stylize(self, stylizer):
self.name = stylizer.StylizeEnum(self.mojom_name)
@@ -1327,14 +1592,14 @@ class Enum(Kind):
return '%s%s' % (prefix, self.mojom_name)
# pylint: disable=unused-argument
- def IsBackwardCompatible(self, older_enum, checker):
- """This enum is backward-compatible with older_enum if and only if one of
- the following conditions holds:
+ def IsBackwardCompatible(self, rhs, checker):
+ """This enum is backward-compatible with rhs (older_enum) if and only if one
+ of the following conditions holds:
- Neither enum is [Extensible] and both have the exact same set of valid
numeric values. Field names and aliases for the same numeric value do
not affect compatibility.
- - older_enum is [Extensible], and for every version defined by
- older_enum, this enum has the exact same set of valid numeric values.
+ - rhs is [Extensible], and for every version defined by
+ rhs, this enum has the exact same set of valid numeric values.
"""
def buildVersionFieldMap(enum):
@@ -1345,32 +1610,49 @@ class Enum(Kind):
fields_by_min_version[field.min_version].add(field.numeric_value)
return fields_by_min_version
- old_fields = buildVersionFieldMap(older_enum)
+ old_fields = buildVersionFieldMap(rhs)
new_fields = buildVersionFieldMap(self)
- if new_fields.keys() != old_fields.keys() and not older_enum.extensible:
- return False
+ if new_fields.keys() != old_fields.keys() and not rhs.extensible:
+ raise Exception("Non-extensible enum cannot be modified")
for min_version, valid_values in old_fields.items():
- if (min_version not in new_fields
- or new_fields[min_version] != valid_values):
- return False
+ if min_version not in new_fields:
+ raise Exception('New values added to an extensible enum '
+ 'do not specify MinVersion: %s' % new_fields)
+
+ if (new_fields[min_version] != valid_values):
+ if (len(new_fields[min_version]) < len(valid_values)):
+ raise Exception('Removing values for an existing MinVersion %s '
+ 'is not allowed' % min_version)
+ raise Exception(
+ 'New values don\'t match old values'
+ 'for an existing MinVersion %s,'
+ ' please specify MinVersion equal to "Next version" '
+ 'in the enum description'
+ ' for the following values:\n%s' %
+ (min_version, new_fields[min_version].difference(valid_values)))
return True
+ def _tuple(self):
+ return (self.mojom_name, self.native_only, self.fields, self.attributes,
+ self.min_value, self.max_value, self.default_field)
+
def __eq__(self, rhs):
- return (isinstance(rhs, Enum) and
- (self.mojom_name, self.native_only, self.fields, self.attributes,
- self.min_value, self.max_value,
- self.default_field) == (rhs.mojom_name, rhs.native_only,
- rhs.fields, rhs.attributes, rhs.min_value,
- rhs.max_value, rhs.default_field))
+ return isinstance(rhs, Enum) and self._tuple() == rhs._tuple()
+
+ def __lt__(self, rhs):
+ if not isinstance(self, type(rhs)):
+ return str(type(self)) < str(type(rhs))
+
+ return self._tuple() < rhs._tuple()
def __hash__(self):
return id(self)
-class Module(object):
+class Module:
def __init__(self, path=None, mojom_namespace=None, attributes=None):
self.path = path
self.mojom_namespace = mojom_namespace
@@ -1379,24 +1661,26 @@ class Module(object):
self.unions = []
self.interfaces = []
self.enums = []
+ self.features = []
self.constants = []
- self.kinds = {}
+ self.kinds = OrderedDict()
self.attributes = attributes
self.imports = []
- self.imported_kinds = {}
- self.metadata = {}
+ self.imported_kinds = OrderedDict()
+ self.metadata = OrderedDict()
def __repr__(self):
# Gives us a decent __repr__ for modules.
return self.Repr()
def __eq__(self, rhs):
- return (isinstance(rhs, Module) and
- (self.path, self.attributes, self.mojom_namespace, self.imports,
- self.constants, self.enums, self.structs, self.unions,
- self.interfaces) == (rhs.path, rhs.attributes, rhs.mojom_namespace,
- rhs.imports, rhs.constants, rhs.enums,
- rhs.structs, rhs.unions, rhs.interfaces))
+ return (isinstance(rhs, Module)
+ and (self.path, self.attributes, self.mojom_namespace, self.imports,
+ self.constants, self.enums, self.structs, self.unions,
+ self.interfaces, self.features)
+ == (rhs.path, rhs.attributes, rhs.mojom_namespace, rhs.imports,
+ rhs.constants, rhs.enums, rhs.structs, rhs.unions,
+ rhs.interfaces, rhs.features))
def __hash__(self):
return id(self)
@@ -1405,16 +1689,16 @@ class Module(object):
if as_ref:
return '<%s path=%r mojom_namespace=%r>' % (
self.__class__.__name__, self.path, self.mojom_namespace)
- else:
- return GenericRepr(
- self, {
- 'path': False,
- 'mojom_namespace': False,
- 'attributes': False,
- 'structs': False,
- 'interfaces': False,
- 'unions': False
- })
+ return GenericRepr(
+ self, {
+ 'path': False,
+ 'mojom_namespace': False,
+ 'attributes': False,
+ 'structs': False,
+ 'interfaces': False,
+ 'unions': False,
+ 'features': False,
+ })
def GetNamespacePrefix(self):
return '%s.' % self.mojom_namespace if self.mojom_namespace else ''
@@ -1434,6 +1718,11 @@ class Module(object):
self.unions.append(union)
return union
+ def AddFeature(self, mojom_name, attributes=None):
+ feature = Feature(mojom_name, self, attributes)
+ self.features.append(feature)
+ return feature
+
def Stylize(self, stylizer):
self.namespace = stylizer.StylizeModule(self.mojom_namespace)
for struct in self.structs:
@@ -1446,12 +1735,14 @@ class Module(object):
enum.Stylize(stylizer)
for constant in self.constants:
constant.Stylize(stylizer)
+ for feature in self.features:
+ feature.Stylize(stylizer)
for imported_module in self.imports:
imported_module.Stylize(stylizer)
def Dump(self, f):
- pickle.dump(self, f, 2)
+ pickle.dump(self, f)
@classmethod
def Load(cls, f):
@@ -1461,15 +1752,15 @@ class Module(object):
def IsBoolKind(kind):
- return kind.spec == BOOL.spec
+ return kind.spec == BOOL.spec or kind.spec == NULLABLE_BOOL.spec
def IsFloatKind(kind):
- return kind.spec == FLOAT.spec
+ return kind.spec == FLOAT.spec or kind.spec == NULLABLE_FLOAT.spec
def IsDoubleKind(kind):
- return kind.spec == DOUBLE.spec
+ return kind.spec == DOUBLE.spec or kind.spec == NULLABLE_DOUBLE.spec
def IsIntegralKind(kind):
@@ -1477,7 +1768,14 @@ def IsIntegralKind(kind):
or kind.spec == INT16.spec or kind.spec == INT32.spec
or kind.spec == INT64.spec or kind.spec == UINT8.spec
or kind.spec == UINT16.spec or kind.spec == UINT32.spec
- or kind.spec == UINT64.spec)
+ or kind.spec == UINT64.spec or kind.spec == NULLABLE_BOOL.spec
+ or kind.spec == NULLABLE_INT8.spec or kind.spec == NULLABLE_INT16.spec
+ or kind.spec == NULLABLE_INT32.spec
+ or kind.spec == NULLABLE_INT64.spec
+ or kind.spec == NULLABLE_UINT8.spec
+ or kind.spec == NULLABLE_UINT16.spec
+ or kind.spec == NULLABLE_UINT32.spec
+ or kind.spec == NULLABLE_UINT64.spec)
def IsStringKind(kind):
@@ -1522,6 +1820,10 @@ def IsArrayKind(kind):
return isinstance(kind, Array)
+def IsFeatureKind(kind):
+ return isinstance(kind, Feature)
+
+
def IsInterfaceKind(kind):
return isinstance(kind, Interface)
@@ -1558,12 +1860,16 @@ def IsEnumKind(kind):
return isinstance(kind, Enum)
+def IsValueKind(kind):
+ return isinstance(kind, ValueKind)
+
+
def IsReferenceKind(kind):
return isinstance(kind, ReferenceKind)
def IsNullableKind(kind):
- return IsReferenceKind(kind) and kind.is_nullable
+ return kind.is_nullable
def IsMapKind(kind):
@@ -1664,11 +1970,8 @@ def MethodPassesInterfaces(method):
return _AnyMethodParameterRecursive(method, IsInterfaceKind)
-def HasSyncMethods(interface):
- for method in interface.methods:
- if method.sync:
- return True
- return False
+def GetSyncMethodOrdinals(interface):
+ return [method.ordinal for method in interface.methods if method.sync]
def HasUninterruptableMethods(interface):
@@ -1700,18 +2003,17 @@ def ContainsHandlesOrInterfaces(kind):
checked.add(kind.spec)
if IsStructKind(kind):
return any(Check(field.kind) for field in kind.fields)
- elif IsUnionKind(kind):
+ if IsUnionKind(kind):
return any(Check(field.kind) for field in kind.fields)
- elif IsAnyHandleKind(kind):
+ if IsAnyHandleKind(kind):
return True
- elif IsAnyInterfaceKind(kind):
+ if IsAnyInterfaceKind(kind):
return True
- elif IsArrayKind(kind):
+ if IsArrayKind(kind):
return Check(kind.kind)
- elif IsMapKind(kind):
+ if IsMapKind(kind):
return Check(kind.key_kind) or Check(kind.value_kind)
- else:
- return False
+ return False
return Check(kind)
@@ -1738,21 +2040,20 @@ def ContainsNativeTypes(kind):
checked.add(kind.spec)
if IsEnumKind(kind):
return kind.native_only
- elif IsStructKind(kind):
+ if IsStructKind(kind):
if kind.native_only:
return True
if any(enum.native_only for enum in kind.enums):
return True
return any(Check(field.kind) for field in kind.fields)
- elif IsUnionKind(kind):
+ if IsUnionKind(kind):
return any(Check(field.kind) for field in kind.fields)
- elif IsInterfaceKind(kind):
+ if IsInterfaceKind(kind):
return any(enum.native_only for enum in kind.enums)
- elif IsArrayKind(kind):
+ if IsArrayKind(kind):
return Check(kind.kind)
- elif IsMapKind(kind):
+ if IsMapKind(kind):
return Check(kind.key_kind) or Check(kind.value_kind)
- else:
- return False
+ return False
return Check(kind)
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/module_unittest.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/module_unittest.py
index e8fd4936..2a4e852c 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/generate/module_unittest.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom/generate/module_unittest.py
@@ -1,4 +1,4 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/pack.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/pack.py
index 88b77c98..61240426 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/generate/pack.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom/generate/pack.py
@@ -1,7 +1,8 @@
-# Copyright 2013 The Chromium Authors. All rights reserved.
+# Copyright 2013 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import copy
from mojom.generate import module as mojom
# This module provides a mechanism for determining the packed order and offsets
@@ -15,7 +16,7 @@ from mojom.generate import module as mojom
HEADER_SIZE = 8
-class PackedField(object):
+class PackedField:
kind_to_size = {
mojom.BOOL: 1,
mojom.INT8: 1,
@@ -75,18 +76,55 @@ class PackedField(object):
return 8
return cls.GetSizeForKind(kind)
- def __init__(self, field, index, ordinal):
+ def __init__(self,
+ field,
+ index,
+ ordinal,
+ original_field=None,
+ sub_ordinal=None,
+ linked_value_packed_field=None):
"""
Args:
field: the original field.
index: the position of the original field in the struct.
ordinal: the ordinal of the field for serialization.
+ original_field: See below.
+ sub_ordinal: See below.
+ linked_value_packed_field: See below.
+
+ original_field, sub_ordinal, and linked_value_packed_field are used to
+ support nullable ValueKind fields. For legacy reasons, nullable ValueKind
+ fields actually generate two PackedFields. This allows:
+
+ - backwards compatibility prior to Mojo support for nullable ValueKinds.
+ - correct packing of fields for the aforementioned backwards compatibility.
+
+ When translating Fields to PackedFields, the original field is turned into
+ two PackedFields: the first PackedField always has type mojom.BOOL, while
+ the second PackedField has the non-nullable version of the field's kind.
+
+ When constructing these PackedFields, original_field references the field
+ as defined in the mojom; the name as defined in the mojom will be used for
+ all layers above the wire/data layer.
+
+ sub_ordinal is used to sort the two PackedFields correctly with respect to
+ each other: the first mojom.BOOL field always has sub_ordinal 0, while the
+ second field always has sub_ordinal 1.
+
+ Finally, linked_value_packed_field is used by the serialization and
+ deserialization helpers, which generally just iterate over a PackedStruct's
+ PackedField's in ordinal order. This allows the helpers to easily reference
+ any related PackedFields rather than having to lookup related PackedFields
+ by index while iterating.
"""
self.field = field
self.index = index
self.ordinal = ordinal
- self.size = self.GetSizeForKind(field.kind)
- self.alignment = self.GetAlignmentForKind(field.kind)
+ self.original_field = original_field
+ self.sub_ordinal = sub_ordinal
+ self.linked_value_packed_field = linked_value_packed_field
+ self.size = self.GetSizeForKind(self.field.kind)
+ self.alignment = self.GetAlignmentForKind(self.field.kind)
self.offset = None
self.bit = None
self.min_version = None
@@ -120,7 +158,33 @@ def GetPayloadSizeUpToField(field):
return offset + pad
-class PackedStruct(object):
+def IsNullableValueKindPackedField(field):
+ """Returns true if `field` is derived from a nullable ValueKind field.
+
+ Nullable ValueKind fields often require special handling in the bindings due
+ to the way the implementation is constrained for wire compatibility.
+ """
+ assert isinstance(field, PackedField)
+ return field.sub_ordinal is not None
+
+
+def IsPrimaryNullableValueKindPackedField(field):
+ """Returns true if `field` is derived from a nullable ValueKind mojom field
+ and is the "primary" field.
+
+ The primary field is a bool PackedField that controls if the field should be
+ considered as present or not; it will have a reference to the PackedField that
+ holds the actual value representation if considered present.
+
+ Bindings code that translates between the wire protocol and the higher layers
+ can use this to simplify mapping multiple PackedFields to the single field
+ that is logically exposed to bindings consumers.
+ """
+ assert isinstance(field, PackedField)
+ return field.linked_value_packed_field is not None
+
+
+class PackedStruct:
def __init__(self, struct):
self.struct = struct
# |packed_fields| contains all the fields, in increasing offset order.
@@ -139,9 +203,41 @@ class PackedStruct(object):
for index, field in enumerate(struct.fields):
if field.ordinal is not None:
ordinal = field.ordinal
- src_fields.append(PackedField(field, index, ordinal))
+ # Nullable value types are a bit weird: they generate two PackedFields
+ # despite being a single ValueKind. This is for wire compatibility to
+ # ease the transition from legacy mojom syntax where nullable value types
+ # were not supported.
+ if isinstance(field.kind, mojom.ValueKind) and field.kind.is_nullable:
+ # The suffixes intentionally use Unicode codepoints which are considered
+ # valid C++/Java/JavaScript identifiers, yet are unlikely to be used in
+ # actual user code.
+ has_value_field = copy.copy(field)
+ has_value_field.name = f'{field.mojom_name}_$flag'
+ has_value_field.kind = mojom.BOOL
+
+ value_field = copy.copy(field)
+ value_field.name = f'{field.mojom_name}_$value'
+ value_field.kind = field.kind.MakeUnnullableKind()
+
+ value_packed_field = PackedField(value_field,
+ index,
+ ordinal,
+ original_field=field,
+ sub_ordinal=1,
+ linked_value_packed_field=None)
+ has_value_packed_field = PackedField(
+ has_value_field,
+ index,
+ ordinal,
+ original_field=field,
+ sub_ordinal=0,
+ linked_value_packed_field=value_packed_field)
+ src_fields.append(has_value_packed_field)
+ src_fields.append(value_packed_field)
+ else:
+ src_fields.append(PackedField(field, index, ordinal))
ordinal += 1
- src_fields.sort(key=lambda field: field.ordinal)
+ src_fields.sort(key=lambda field: (field.ordinal, field.sub_ordinal))
# Set |min_version| for each field.
next_min_version = 0
@@ -156,10 +252,11 @@ class PackedStruct(object):
if (packed_field.min_version != 0
and mojom.IsReferenceKind(packed_field.field.kind)
and not packed_field.field.kind.is_nullable):
- raise Exception("Non-nullable fields are only allowed in version 0 of "
- "a struct. %s.%s is defined with [MinVersion=%d]." %
- (self.struct.name, packed_field.field.name,
- packed_field.min_version))
+ raise Exception(
+ "Non-nullable reference fields are only allowed in version 0 of a "
+ "struct. %s.%s is defined with [MinVersion=%d]." %
+ (self.struct.name, packed_field.field.name,
+ packed_field.min_version))
src_field = src_fields[0]
src_field.offset = 0
@@ -186,7 +283,7 @@ class PackedStruct(object):
dst_fields.append(src_field)
-class ByteInfo(object):
+class ByteInfo:
def __init__(self):
self.is_padding = False
self.packed_fields = []
@@ -214,10 +311,11 @@ def GetByteLayout(packed_struct):
return byte_info
-class VersionInfo(object):
- def __init__(self, version, num_fields, num_bytes):
+class VersionInfo:
+ def __init__(self, version, num_fields, num_packed_fields, num_bytes):
self.version = version
self.num_fields = num_fields
+ self.num_packed_fields = num_packed_fields
self.num_bytes = num_bytes
@@ -235,24 +333,35 @@ def GetVersionInfo(packed_struct):
versions = []
last_version = 0
last_num_fields = 0
+ last_num_packed_fields = 0
last_payload_size = 0
for packed_field in packed_struct.packed_fields_in_ordinal_order:
if packed_field.min_version != last_version:
versions.append(
- VersionInfo(last_version, last_num_fields,
+ VersionInfo(last_version, last_num_fields, last_num_packed_fields,
last_payload_size + HEADER_SIZE))
last_version = packed_field.min_version
- last_num_fields += 1
+ # Nullable numeric fields (e.g. `int32?`) expand to two packed fields, so to
+ # avoid double-counting, only increment if the field is:
+ # - not used for representing a nullable value kind field, or
+ # - the primary field representing the nullable value kind field.
+ last_num_fields += 1 if (
+ not IsNullableValueKindPackedField(packed_field)
+ or IsPrimaryNullableValueKindPackedField(packed_field)) else 0
+
+ last_num_packed_fields += 1
+
# The fields are iterated in ordinal order here. However, the size of a
# version is determined by the last field of that version in pack order,
# instead of ordinal order. Therefore, we need to calculate the max value.
- last_payload_size = max(
- GetPayloadSizeUpToField(packed_field), last_payload_size)
+ last_payload_size = max(GetPayloadSizeUpToField(packed_field),
+ last_payload_size)
- assert len(versions) == 0 or last_num_fields != versions[-1].num_fields
+ assert len(
+ versions) == 0 or last_num_packed_fields != versions[-1].num_packed_fields
versions.append(
- VersionInfo(last_version, last_num_fields,
+ VersionInfo(last_version, last_num_fields, last_num_packed_fields,
last_payload_size + HEADER_SIZE))
return versions
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/pack_unittest.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/pack_unittest.py
index 98c705ad..7d8e4e01 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/generate/pack_unittest.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom/generate/pack_unittest.py
@@ -1,4 +1,4 @@
-# Copyright 2013 The Chromium Authors. All rights reserved.
+# Copyright 2013 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -205,6 +205,34 @@ class PackTest(unittest.TestCase):
self.assertEqual(4, versions[2].num_fields)
self.assertEqual(32, versions[2].num_bytes)
+ def testGetVersionInfoPackedStruct(self):
+ """Tests that pack.GetVersionInfo() correctly sets version, num_fields,
+ and num_packed_fields for a packed struct.
+ """
+ struct = mojom.Struct('test')
+ struct.AddField('field_0', mojom.BOOL, ordinal=0)
+ struct.AddField('field_1',
+ mojom.NULLABLE_BOOL,
+ ordinal=1,
+ attributes={'MinVersion': 1})
+ struct.AddField('field_2',
+ mojom.NULLABLE_BOOL,
+ ordinal=2,
+ attributes={'MinVersion': 2})
+ ps = pack.PackedStruct(struct)
+ versions = pack.GetVersionInfo(ps)
+
+ self.assertEqual(3, len(versions))
+ self.assertEqual(0, versions[0].version)
+ self.assertEqual(1, versions[1].version)
+ self.assertEqual(2, versions[2].version)
+ self.assertEqual(1, versions[0].num_fields)
+ self.assertEqual(2, versions[1].num_fields)
+ self.assertEqual(3, versions[2].num_fields)
+ self.assertEqual(1, versions[0].num_packed_fields)
+ self.assertEqual(3, versions[1].num_packed_fields)
+ self.assertEqual(5, versions[2].num_packed_fields)
+
def testInterfaceAlignment(self):
"""Tests that interfaces are aligned on 4-byte boundaries, although the size
of an interface is 8 bytes.
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/template_expander.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/template_expander.py
index 0da90058..807e2a4f 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/generate/template_expander.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom/generate/template_expander.py
@@ -1,4 +1,4 @@
-# Copyright 2013 The Chromium Authors. All rights reserved.
+# Copyright 2013 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/translate.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/translate.py
index 7580b780..83bb297f 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/generate/translate.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom/generate/translate.py
@@ -1,4 +1,4 @@
-# Copyright 2013 The Chromium Authors. All rights reserved.
+# Copyright 2013 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Convert parse tree to AST.
@@ -12,17 +12,294 @@ already been parsed and converted to ASTs before.
import itertools
import os
import re
-import sys
+from collections import OrderedDict
from mojom.generate import generator
from mojom.generate import module as mojom
from mojom.parse import ast
-def _IsStrOrUnicode(x):
- if sys.version_info[0] < 3:
- return isinstance(x, (unicode, str))
- return isinstance(x, str)
+is_running_backwards_compatibility_check_hack = False
+
+### DO NOT ADD ENTRIES TO THIS LIST. ###
+_EXTENSIBLE_ENUMS_MISSING_DEFAULT = (
+ 'x:arc.keymaster.mojom.Algorithm',
+ 'x:arc.keymaster.mojom.Digest',
+ 'x:arc.keymaster.mojom.SignatureResult',
+ 'x:arc.mojom.AccessibilityActionType',
+ 'x:arc.mojom.AccessibilityBooleanProperty',
+ 'x:arc.mojom.AccessibilityEventIntListProperty',
+ 'x:arc.mojom.AccessibilityEventIntProperty',
+ 'x:arc.mojom.AccessibilityEventStringProperty',
+ 'x:arc.mojom.AccessibilityEventType',
+ 'x:arc.mojom.AccessibilityFilterType',
+ 'x:arc.mojom.AccessibilityIntListProperty',
+ 'x:arc.mojom.AccessibilityIntProperty',
+ 'x:arc.mojom.AccessibilityLiveRegionType',
+ 'x:arc.mojom.AccessibilityNotificationStateType',
+ 'x:arc.mojom.AccessibilityRangeType',
+ 'x:arc.mojom.AccessibilitySelectionMode',
+ 'x:arc.mojom.AccessibilityStringListProperty',
+ 'x:arc.mojom.AccessibilityStringProperty',
+ 'x:arc.mojom.AccessibilityWindowBooleanProperty',
+ 'x:arc.mojom.AccessibilityWindowIntListProperty',
+ 'x:arc.mojom.AccessibilityWindowIntProperty',
+ 'x:arc.mojom.AccessibilityWindowStringProperty',
+ 'x:arc.mojom.AccessibilityWindowType',
+ 'x:arc.mojom.AccountCheckStatus',
+ 'x:arc.mojom.AccountUpdateType',
+ 'x:arc.mojom.ActionType',
+ 'x:arc.mojom.Algorithm',
+ 'x:arc.mojom.AndroidIdSource',
+ 'x:arc.mojom.AnrSource',
+ 'x:arc.mojom.AnrType',
+ 'x:arc.mojom.AppDiscoveryRequestState',
+ 'x:arc.mojom.AppKillType',
+ 'x:arc.mojom.AppPermission',
+ 'x:arc.mojom.AppPermissionGroup',
+ 'x:arc.mojom.AppReinstallState',
+ 'x:arc.mojom.AppShortcutItemType',
+ 'x:arc.mojom.ArcAuthCodeStatus',
+ 'x:arc.mojom.ArcClipboardDragDropEvent',
+ 'x:arc.mojom.ArcCorePriAbiMigEvent',
+ 'x:arc.mojom.ArcDnsQuery',
+ 'x:arc.mojom.ArcImageCopyPasteCompatAction',
+ 'x:arc.mojom.ArcNetworkError',
+ 'x:arc.mojom.ArcNetworkEvent',
+ 'x:arc.mojom.ArcNotificationEvent',
+ 'x:arc.mojom.ArcNotificationExpandState',
+ 'x:arc.mojom.ArcNotificationPriority',
+ 'x:arc.mojom.ArcNotificationRemoteInputState',
+ 'x:arc.mojom.ArcNotificationShownContents',
+ 'x:arc.mojom.ArcNotificationStyle',
+ 'x:arc.mojom.ArcNotificationType',
+ 'x:arc.mojom.ArcPipEvent',
+ 'x:arc.mojom.ArcResizeLockState',
+ 'x:arc.mojom.ArcSignInSuccess',
+ 'x:arc.mojom.ArcTimerResult',
+ 'x:arc.mojom.AudioSwitch',
+ 'x:arc.mojom.BluetoothAclState',
+ 'x:arc.mojom.BluetoothAdapterState',
+ 'x:arc.mojom.BluetoothAdvertisingDataType',
+ 'x:arc.mojom.BluetoothBondState',
+ 'x:arc.mojom.BluetoothDeviceType',
+ 'x:arc.mojom.BluetoothDiscoveryState',
+ 'x:arc.mojom.BluetoothGattDBAttributeType',
+ 'x:arc.mojom.BluetoothGattStatus',
+ 'x:arc.mojom.BluetoothPropertyType',
+ 'x:arc.mojom.BluetoothScanMode',
+ 'x:arc.mojom.BluetoothSdpAttributeType',
+ 'x:arc.mojom.BluetoothSocketType',
+ 'x:arc.mojom.BluetoothStatus',
+ 'x:arc.mojom.BootType',
+ 'x:arc.mojom.CaptionTextShadowType',
+ 'x:arc.mojom.ChangeType',
+ 'x:arc.mojom.ChromeAccountType',
+ 'x:arc.mojom.ChromeApp',
+ 'x:arc.mojom.ChromePage',
+ 'x:arc.mojom.ClockId',
+ 'x:arc.mojom.CloudProvisionFlowError',
+ 'x:arc.mojom.CommandResultType',
+ 'x:arc.mojom.CompanionLibApiId',
+ 'x:arc.mojom.ConnectionStateType',
+ 'x:arc.mojom.ContentChangeType',
+ 'x:arc.mojom.CpuRestrictionState',
+ 'x:arc.mojom.CursorCoordinateSpace',
+ 'x:arc.mojom.DataRestoreStatus',
+ 'x:arc.mojom.DecoderStatus',
+ 'x:arc.mojom.DeviceType',
+ 'x:arc.mojom.Digest',
+ 'x:arc.mojom.DisplayWakeLockType',
+ 'x:arc.mojom.EapMethod',
+ 'x:arc.mojom.EapPhase2Method',
+ 'x:arc.mojom.FileSelectorEventType',
+ 'x:arc.mojom.GMSCheckInError',
+ 'x:arc.mojom.GMSSignInError',
+ 'x:arc.mojom.GeneralSignInError',
+ 'x:arc.mojom.GetNetworksRequestType',
+ 'x:arc.mojom.HalPixelFormat',
+ 'x:arc.mojom.IPAddressType',
+ 'x:arc.mojom.InstallErrorReason',
+ 'x:arc.mojom.KeyFormat',
+ 'x:arc.mojom.KeyManagement',
+ 'x:arc.mojom.KeyPurpose',
+ 'x:arc.mojom.KeymasterError',
+ 'x:arc.mojom.MainAccountHashMigrationStatus',
+ 'x:arc.mojom.MainAccountResolutionStatus',
+ 'x:arc.mojom.ManagementChangeStatus',
+ 'x:arc.mojom.ManagementState',
+ 'x:arc.mojom.MessageCenterVisibility',
+ 'x:arc.mojom.MetricsType',
+ 'x:arc.mojom.MountEvent',
+ 'x:arc.mojom.NativeBridgeType',
+ 'x:arc.mojom.NetworkResult',
+ 'x:arc.mojom.NetworkType',
+ 'x:arc.mojom.OemCryptoAlgorithm',
+ 'x:arc.mojom.OemCryptoCipherMode',
+ 'x:arc.mojom.OemCryptoHdcpCapability',
+ 'x:arc.mojom.OemCryptoLicenseType',
+ 'x:arc.mojom.OemCryptoPrivateKey',
+ 'x:arc.mojom.OemCryptoProvisioningMethod',
+ 'x:arc.mojom.OemCryptoResult',
+ 'x:arc.mojom.OemCryptoRsaPaddingScheme',
+ 'x:arc.mojom.OemCryptoUsageEntryStatus',
+ 'x:arc.mojom.Padding',
+ 'x:arc.mojom.PaiFlowState',
+ 'x:arc.mojom.PatternType',
+ 'x:arc.mojom.PressureLevel',
+ 'x:arc.mojom.PrintColorMode',
+ 'x:arc.mojom.PrintContentType',
+ 'x:arc.mojom.PrintDuplexMode',
+ 'x:arc.mojom.PrinterStatus',
+ 'x:arc.mojom.ProcessState',
+ 'x:arc.mojom.PurchaseState',
+ 'x:arc.mojom.ReauthReason',
+ 'x:arc.mojom.ScaleFactor',
+ 'x:arc.mojom.SecurityType',
+ 'x:arc.mojom.SegmentStyle',
+ 'x:arc.mojom.SelectFilesActionType',
+ 'x:arc.mojom.SetNativeChromeVoxResponse',
+ 'x:arc.mojom.ShowPackageInfoPage',
+ 'x:arc.mojom.SpanType',
+ 'x:arc.mojom.SupportedLinkChangeSource',
+ 'x:arc.mojom.TetheringClientState',
+ 'x:arc.mojom.TextInputType',
+ 'x:arc.mojom.TtsEventType',
+ 'x:arc.mojom.VideoCodecProfile',
+ 'x:arc.mojom.VideoDecodeAccelerator.Result',
+ 'x:arc.mojom.VideoEncodeAccelerator.Error',
+ 'x:arc.mojom.VideoFrameStorageType',
+ 'x:arc.mojom.VideoPixelFormat',
+ 'x:arc.mojom.WakefulnessMode',
+ 'x:arc.mojom.WebApkInstallResult',
+ 'x:ash.ime.mojom.InputFieldType',
+ 'x:ash.ime.mojom.PersonalizationMode',
+ 'x:ash.language.mojom.FeatureId',
+ 'x:blink.mojom.ScrollRestorationType',
+ 'x:chromeos.cdm.mojom.CdmKeyStatus',
+ 'x:chromeos.cdm.mojom.CdmMessageType',
+ 'x:chromeos.cdm.mojom.CdmSessionType',
+ 'x:chromeos.cdm.mojom.DecryptStatus',
+ 'x:chromeos.cdm.mojom.EmeInitDataType',
+ 'x:chromeos.cdm.mojom.EncryptionScheme',
+ 'x:chromeos.cdm.mojom.HdcpVersion',
+ 'x:chromeos.cdm.mojom.OutputProtection.LinkType',
+ 'x:chromeos.cdm.mojom.OutputProtection.ProtectionType',
+ 'x:chromeos.cdm.mojom.PromiseException',
+ 'x:chromeos.cfm.mojom.EnqueuePriority',
+ 'x:chromeos.cfm.mojom.LoggerErrorCode',
+ 'x:chromeos.cfm.mojom.LoggerState',
+ 'x:chromeos.cros_healthd.mojom.CryptoAlgorithm',
+ 'x:chromeos.cros_healthd.mojom.EncryptionState',
+ 'x:chromeos.machine_learning.mojom.AnnotationUsecase',
+ 'x:chromeos.machine_learning.mojom.BuiltinModelId',
+ 'x:chromeos.machine_learning.mojom.CreateGraphExecutorResult',
+ 'x:chromeos.machine_learning.mojom.DocumentScannerResultStatus',
+ 'x:chromeos.machine_learning.mojom.EndpointReason',
+ 'x:chromeos.machine_learning.mojom.EndpointerType',
+ 'x:chromeos.machine_learning.mojom.ExecuteResult',
+ 'x:chromeos.machine_learning.mojom.GrammarCheckerResult.Status',
+ 'x:chromeos.machine_learning.mojom.HandwritingRecognizerResult.Status',
+ 'x:chromeos.machine_learning.mojom.LoadHandwritingModelResult',
+ 'x:chromeos.machine_learning.mojom.LoadModelResult',
+ 'x:chromeos.machine_learning.mojom.Rotation',
+ 'x:chromeos.network_config.mojom.ConnectionStateType',
+ 'x:chromeos.network_config.mojom.DeviceStateType',
+ 'x:chromeos.network_config.mojom.IPConfigType',
+ 'x:chromeos.network_config.mojom.NetworkType',
+ 'x:chromeos.network_config.mojom.OncSource',
+ 'x:chromeos.network_config.mojom.PolicySource',
+ 'x:chromeos.network_config.mojom.PortalState',
+ 'x:chromeos.wilco_dtc_supportd.mojom.WilcoDtcSupportdEvent',
+ 'x:chromeos.wilco_dtc_supportd.mojom.WilcoDtcSupportdWebRequestHttpMethod',
+ 'x:chromeos.wilco_dtc_supportd.mojom.WilcoDtcSupportdWebRequestStatus',
+ 'x:cros.mojom.CameraClientType',
+ 'x:cros.mojom.CameraMetadataSectionStart',
+ 'x:cros.mojom.CameraMetadataTag',
+ 'x:cros.mojom.HalPixelFormat',
+ 'x:crosapi.mojom.AllowedPaths',
+ 'x:crosapi.mojom.BrowserAppInstanceType',
+ 'x:crosapi.mojom.CreationResult',
+ 'x:crosapi.mojom.DeviceAccessResultCode',
+ 'x:crosapi.mojom.DeviceMode',
+ 'x:crosapi.mojom.DlpRestrictionLevel',
+ 'x:crosapi.mojom.ExoImeSupport',
+ 'x:crosapi.mojom.FullscreenVisibility',
+ 'x:crosapi.mojom.GoogleServiceAuthError.State',
+ 'x:crosapi.mojom.IsInstallableResult',
+ 'x:crosapi.mojom.KeyTag',
+ 'x:crosapi.mojom.KeystoreSigningAlgorithmName',
+ 'x:crosapi.mojom.KeystoreType',
+ 'x:crosapi.mojom.LacrosFeedbackSource',
+ 'x:crosapi.mojom.MemoryPressureLevel',
+ 'x:crosapi.mojom.MetricsReportingManaged',
+ 'x:crosapi.mojom.NotificationType',
+ 'x:crosapi.mojom.OndeviceHandwritingSupport',
+ 'x:crosapi.mojom.OpenResult',
+ 'x:crosapi.mojom.PolicyDomain',
+ 'x:crosapi.mojom.RegistrationCodeType',
+ 'x:crosapi.mojom.ScaleFactor',
+ 'x:crosapi.mojom.SearchResult.OptionalBool',
+ 'x:crosapi.mojom.SelectFileDialogType',
+ 'x:crosapi.mojom.SelectFileResult',
+ 'x:crosapi.mojom.SharesheetResult',
+ 'x:crosapi.mojom.TouchEventType',
+ 'x:crosapi.mojom.VideoRotation',
+ 'x:crosapi.mojom.WallpaperLayout',
+ 'x:crosapi.mojom.WebAppInstallResultCode',
+ 'x:crosapi.mojom.WebAppUninstallResultCode',
+ 'x:device.mojom.HidBusType',
+ 'x:device.mojom.WakeLockReason',
+ 'x:device.mojom.WakeLockType',
+ 'x:drivefs.mojom.DialogReason.Type',
+ 'x:drivefs.mojom.DriveError.Type',
+ 'x:drivefs.mojom.DriveFsDelegate.ExtensionConnectionStatus',
+ 'x:drivefs.mojom.FileMetadata.CanPinStatus',
+ 'x:drivefs.mojom.FileMetadata.Type',
+ 'x:drivefs.mojom.ItemEventReason',
+ 'x:drivefs.mojom.MirrorPathStatus',
+ 'x:drivefs.mojom.MirrorSyncStatus',
+ 'x:drivefs.mojom.QueryParameters.SortField',
+ 'x:fuzz.mojom.FuzzEnum',
+ 'x:media.mojom.FillLightMode',
+ 'x:media.mojom.MeteringMode',
+ 'x:media.mojom.PowerLineFrequency',
+ 'x:media.mojom.RedEyeReduction',
+ 'x:media.mojom.ResolutionChangePolicy',
+ 'x:media.mojom.VideoCaptureApi',
+ 'x:media.mojom.VideoCaptureBufferType',
+ 'x:media.mojom.VideoCaptureError',
+ 'x:media.mojom.VideoCaptureFrameDropReason',
+ 'x:media.mojom.VideoCapturePixelFormat',
+ 'x:media.mojom.VideoCaptureTransportType',
+ 'x:media.mojom.VideoFacingMode',
+ 'x:media_session.mojom.AudioFocusType',
+ 'x:media_session.mojom.CameraState',
+ 'x:media_session.mojom.EnforcementMode',
+ 'x:media_session.mojom.MediaAudioVideoState',
+ 'x:media_session.mojom.MediaImageBitmapColorType',
+ 'x:media_session.mojom.MediaPictureInPictureState',
+ 'x:media_session.mojom.MediaPlaybackState',
+ 'x:media_session.mojom.MediaSession.SuspendType',
+ 'x:media_session.mojom.MediaSessionAction',
+ 'x:media_session.mojom.MediaSessionImageType',
+ 'x:media_session.mojom.MediaSessionInfo.SessionState',
+ 'x:media_session.mojom.MicrophoneState',
+ 'x:ml.model_loader.mojom.ComputeResult',
+ 'x:ml.model_loader.mojom.CreateModelLoaderResult',
+ 'x:ml.model_loader.mojom.LoadModelResult',
+ 'x:mojo.test.AnExtensibleEnum',
+ 'x:mojo.test.EnumB',
+ 'x:mojo.test.ExtensibleEmptyEnum',
+ 'x:mojo.test.enum_default_unittest.mojom.ExtensibleEnumWithoutDefault',
+ 'x:network.mojom.WebSandboxFlags',
+ 'x:payments.mojom.BillingResponseCode',
+ 'x:payments.mojom.CreateDigitalGoodsResponseCode',
+ 'x:payments.mojom.ItemType',
+ 'x:printing.mojom.PrinterType',
+ 'x:ui.mojom.KeyboardCode',
+)
+### DO NOT ADD ENTRIES TO THIS LIST. ###
def _DuplicateName(values):
@@ -98,12 +375,6 @@ def _MapKind(kind):
}
if kind.endswith('?'):
base_kind = _MapKind(kind[0:-1])
- # NOTE: This doesn't rule out enum types. Those will be detected later, when
- # cross-reference is established.
- reference_kinds = ('m', 's', 'h', 'a', 'r', 'x', 'asso', 'rmt', 'rcv',
- 'rma', 'rca')
- if re.split('[^a-z]', base_kind, 1)[0] not in reference_kinds:
- raise Exception('A type (spec "%s") cannot be made nullable' % base_kind)
return '?' + base_kind
if kind.endswith('}'):
lbracket = kind.rfind('{')
@@ -113,8 +384,6 @@ def _MapKind(kind):
lbracket = kind.rfind('[')
typename = kind[0:lbracket]
return 'a' + kind[lbracket + 1:-1] + ':' + _MapKind(typename)
- if kind.endswith('&'):
- return 'r:' + _MapKind(kind[0:-1])
if kind.startswith('asso<'):
assert kind.endswith('>')
return 'asso:' + _MapKind(kind[5:-1])
@@ -135,13 +404,45 @@ def _MapKind(kind):
return 'x:' + kind
-def _AttributeListToDict(attribute_list):
+def _MapAttributeValue(module, kind, value):
+ # True/False/None
+ if value is None:
+ return value
+ if not isinstance(value, str):
+ return value
+ # Is the attribute value the name of a feature?
+ try:
+ # Features cannot be nested in other types, so lookup in the global scope.
+ trial = _LookupKind(module.kinds, 'x:' + value,
+ _GetScopeForKind(module, kind))
+ if isinstance(trial, mojom.Feature):
+ return trial
+ except ValueError:
+ pass
+ # Is the attribute value a constant or enum value?
+ try:
+ trial = _LookupValue(module, None, None, ('IDENTIFIER', value))
+ if isinstance(trial, mojom.ConstantValue):
+ return trial.constant
+ if isinstance(trial, mojom.EnumValue):
+ return trial
+ except ValueError:
+ pass
+ # If not a referenceable mojo type - return as a string.
+ return value
+
+
+def _AttributeListToDict(module, kind, attribute_list):
if attribute_list is None:
return None
assert isinstance(attribute_list, ast.AttributeList)
- # TODO(vtl): Check for duplicate keys here.
- return dict(
- [(attribute.key, attribute.value) for attribute in attribute_list])
+ attributes = dict()
+ for attribute in attribute_list:
+ if attribute.key in attributes:
+ raise Exception("Duplicate key (%s) in attribute list" % attribute.key)
+ attributes[attribute.key] = _MapAttributeValue(module, kind,
+ attribute.value)
+ return attributes
builtin_values = frozenset([
@@ -257,7 +558,8 @@ def _Kind(kinds, spec, scope):
return kind
if spec.startswith('?'):
- kind = _Kind(kinds, spec[1:], scope).MakeNullableKind()
+ kind = _Kind(kinds, spec[1:], scope)
+ kind = kind.MakeNullableKind()
elif spec.startswith('a:'):
kind = mojom.Array(_Kind(kinds, spec[2:], scope))
elif spec.startswith('asso:'):
@@ -303,7 +605,8 @@ def _Kind(kinds, spec, scope):
def _Import(module, import_module):
# Copy the struct kinds from our imports into the current module.
- importable_kinds = (mojom.Struct, mojom.Union, mojom.Enum, mojom.Interface)
+ importable_kinds = (mojom.Struct, mojom.Union, mojom.Enum, mojom.Interface,
+ mojom.Feature)
for kind in import_module.kinds.values():
if (isinstance(kind, importable_kinds)
and kind.module.path == import_module.path):
@@ -316,6 +619,32 @@ def _Import(module, import_module):
return import_module
+def _Feature(module, parsed_feature):
+ """
+ Args:
+ module: {mojom.Module} Module currently being constructed.
+ parsed_feature: {ast.Feature} Parsed feature.
+
+ Returns:
+ {mojom.Feature} AST feature.
+ """
+ feature = mojom.Feature(module=module)
+ feature.mojom_name = parsed_feature.mojom_name
+ feature.spec = 'x:' + module.GetNamespacePrefix() + feature.mojom_name
+ module.kinds[feature.spec] = feature
+ feature.constants = []
+ _ProcessElements(
+ parsed_feature.mojom_name, parsed_feature.body, {
+ ast.Const:
+ lambda const: feature.constants.append(
+ _Constant(module, const, feature)),
+ })
+
+ feature.attributes = _AttributeListToDict(module, feature,
+ parsed_feature.attribute_list)
+ return feature
+
+
def _Struct(module, parsed_struct):
"""
Args:
@@ -345,7 +674,8 @@ def _Struct(module, parsed_struct):
struct.fields_data.append,
})
- struct.attributes = _AttributeListToDict(parsed_struct.attribute_list)
+ struct.attributes = _AttributeListToDict(module, struct,
+ parsed_struct.attribute_list)
# Enforce that a [Native] attribute is set to make native-only struct
# declarations more explicit.
@@ -377,7 +707,8 @@ def _Union(module, parsed_union):
union.fields_data = []
_ProcessElements(parsed_union.mojom_name, parsed_union.body,
{ast.UnionField: union.fields_data.append})
- union.attributes = _AttributeListToDict(parsed_union.attribute_list)
+ union.attributes = _AttributeListToDict(module, union,
+ parsed_union.attribute_list)
return union
@@ -398,7 +729,8 @@ def _StructField(module, parsed_field, struct):
field.ordinal = parsed_field.ordinal.value if parsed_field.ordinal else None
field.default = _LookupValue(module, struct, field.kind,
parsed_field.default_value)
- field.attributes = _AttributeListToDict(parsed_field.attribute_list)
+ field.attributes = _AttributeListToDict(module, field,
+ parsed_field.attribute_list)
return field
@@ -414,11 +746,22 @@ def _UnionField(module, parsed_field, union):
"""
field = mojom.UnionField()
field.mojom_name = parsed_field.mojom_name
+ # Disallow unions from being self-recursive.
+ parsed_typename = parsed_field.typename
+ if parsed_typename.endswith('?'):
+ parsed_typename = parsed_typename[:-1]
+ assert parsed_typename != union.mojom_name
field.kind = _Kind(module.kinds, _MapKind(parsed_field.typename),
(module.mojom_namespace, union.mojom_name))
field.ordinal = parsed_field.ordinal.value if parsed_field.ordinal else None
field.default = None
- field.attributes = _AttributeListToDict(parsed_field.attribute_list)
+ field.attributes = _AttributeListToDict(module, field,
+ parsed_field.attribute_list)
+ if field.is_default and not mojom.IsNullableKind(field.kind) and \
+ not mojom.IsIntegralKind(field.kind):
+ raise Exception(
+ '[Default] field for union %s must be nullable or integral type.' %
+ union.mojom_name)
return field
@@ -439,7 +782,8 @@ def _Parameter(module, parsed_param, interface):
parameter.ordinal = (parsed_param.ordinal.value
if parsed_param.ordinal else None)
parameter.default = None # TODO(tibell): We never have these. Remove field?
- parameter.attributes = _AttributeListToDict(parsed_param.attribute_list)
+ parameter.attributes = _AttributeListToDict(module, parameter,
+ parsed_param.attribute_list)
return parameter
@@ -464,7 +808,8 @@ def _Method(module, parsed_method, interface):
method.response_parameters = list(
map(lambda parameter: _Parameter(module, parameter, interface),
parsed_method.response_parameter_list))
- method.attributes = _AttributeListToDict(parsed_method.attribute_list)
+ method.attributes = _AttributeListToDict(module, method,
+ parsed_method.attribute_list)
# Enforce that only methods with response can have a [Sync] attribute.
if method.sync and method.response_parameters is None:
@@ -492,7 +837,8 @@ def _Interface(module, parsed_iface):
interface.mojom_name = parsed_iface.mojom_name
interface.spec = 'x:' + module.GetNamespacePrefix() + interface.mojom_name
module.kinds[interface.spec] = interface
- interface.attributes = _AttributeListToDict(parsed_iface.attribute_list)
+ interface.attributes = _AttributeListToDict(module, interface,
+ parsed_iface.attribute_list)
interface.enums = []
interface.constants = []
interface.methods_data = []
@@ -522,7 +868,8 @@ def _EnumField(module, enum, parsed_field):
field = mojom.EnumField()
field.mojom_name = parsed_field.mojom_name
field.value = _LookupValue(module, enum, None, parsed_field.value)
- field.attributes = _AttributeListToDict(parsed_field.attribute_list)
+ field.attributes = _AttributeListToDict(module, field,
+ parsed_field.attribute_list)
value = mojom.EnumValue(module, enum, field)
module.values[value.GetSpec()] = value
return field
@@ -544,7 +891,7 @@ def _ResolveNumericEnumValues(enum):
prev_value += 1
# Integral value (e.g: BEGIN = -0x1).
- elif _IsStrOrUnicode(field.value):
+ elif isinstance(field.value, str):
prev_value = int(field.value, 0)
# Reference to a previous enum value (e.g: INIT = BEGIN).
@@ -560,7 +907,10 @@ def _ResolveNumericEnumValues(enum):
else:
raise Exception('Unresolved enum value for %s' % field.value.GetSpec())
- #resolved_enum_values[field.mojom_name] = prev_value
+ if prev_value in (-128, -127):
+ raise Exception(f'{field.mojom_name} in {enum.spec} has the value '
+ f'{prev_value}, which is reserved for WTF::HashTrait\'s '
+ 'default enum specialization and may not be used.')
field.numeric_value = prev_value
if min_value is None or prev_value < min_value:
min_value = prev_value
@@ -588,7 +938,8 @@ def _Enum(module, parsed_enum, parent_kind):
mojom_name = parent_kind.mojom_name + '.' + mojom_name
enum.spec = 'x:%s.%s' % (module.mojom_namespace, mojom_name)
enum.parent_kind = parent_kind
- enum.attributes = _AttributeListToDict(parsed_enum.attribute_list)
+ enum.attributes = _AttributeListToDict(module, enum,
+ parsed_enum.attribute_list)
if not enum.native_only:
enum.fields = list(
@@ -600,11 +951,18 @@ def _Enum(module, parsed_enum, parent_kind):
for field in enum.fields:
if field.default:
if not enum.extensible:
- raise Exception('Non-extensible enums may not specify a default')
- if enum.default_field is not None:
raise Exception(
- 'Only one enumerator value may be specified as the default')
+ f'Non-extensible enum {enum.spec} may not specify a default')
+ if enum.default_field is not None:
+ raise Exception(f'Multiple [Default] enumerators in enum {enum.spec}')
enum.default_field = field
+ # While running the backwards compatibility check, ignore errors because the
+ # old version of the enum might not specify [Default].
+ if (enum.extensible and enum.default_field is None
+ and enum.spec not in _EXTENSIBLE_ENUMS_MISSING_DEFAULT
+ and not is_running_backwards_compatibility_check_hack):
+ raise Exception(
+ f'Extensible enum {enum.spec} must specify a [Default] enumerator')
module.kinds[enum.spec] = enum
@@ -696,6 +1054,11 @@ def _CollectReferencedKinds(module, all_defined_kinds):
for referenced_kind in extract_referenced_user_kinds(param.kind):
sanitized_kind = sanitize_kind(referenced_kind)
referenced_user_kinds[sanitized_kind.spec] = sanitized_kind
+ # Consts can reference imported enums.
+ for const in module.constants:
+ if not const.kind in mojom.PRIMITIVES:
+ sanitized_kind = sanitize_kind(const.kind)
+ referenced_user_kinds[sanitized_kind.spec] = sanitized_kind
return referenced_user_kinds
@@ -741,6 +1104,16 @@ def _AssertTypeIsStable(kind):
assertDependencyIsStable(response_param.kind)
+def _AssertStructIsValid(kind):
+ expected_ordinals = set(range(0, len(kind.fields)))
+ ordinals = set(map(lambda field: field.ordinal, kind.fields))
+ if ordinals != expected_ordinals:
+ raise Exception(
+ 'Structs must use contiguous ordinals starting from 0. ' +
+ '{} is missing the following ordinals: {}.'.format(
+ kind.mojom_name, ', '.join(map(str, expected_ordinals - ordinals))))
+
+
def _Module(tree, path, imports):
"""
Args:
@@ -778,6 +1151,8 @@ def _Module(tree, path, imports):
module.structs = []
module.unions = []
module.interfaces = []
+ module.features = []
+
_ProcessElements(
filename, tree.definition_list, {
ast.Const:
@@ -791,6 +1166,8 @@ def _Module(tree, path, imports):
ast.Interface:
lambda interface: module.interfaces.append(
_Interface(module, interface)),
+ ast.Feature:
+ lambda feature: module.features.append(_Feature(module, feature)),
})
# Second pass expands fields and methods. This allows fields and parameters
@@ -806,12 +1183,24 @@ def _Module(tree, path, imports):
for enum in struct.enums:
all_defined_kinds[enum.spec] = enum
+ for feature in module.features:
+ all_defined_kinds[feature.spec] = feature
+
for union in module.unions:
union.fields = list(
map(lambda field: _UnionField(module, field, union), union.fields_data))
_AssignDefaultOrdinals(union.fields)
+ for field in union.fields:
+ if field.is_default:
+ if union.default_field is not None:
+ raise Exception('Multiple [Default] fields in union %s.' %
+ union.mojom_name)
+ union.default_field = field
del union.fields_data
all_defined_kinds[union.spec] = union
+ if union.extensible and union.default_field is None:
+ raise Exception('Extensible union %s must specify a [Default] field' %
+ union.mojom_name)
for interface in module.interfaces:
interface.methods = list(
@@ -829,8 +1218,8 @@ def _Module(tree, path, imports):
all_defined_kinds.values())
imported_kind_specs = set(all_referenced_kinds.keys()).difference(
set(all_defined_kinds.keys()))
- module.imported_kinds = dict(
- (spec, all_referenced_kinds[spec]) for spec in imported_kind_specs)
+ module.imported_kinds = OrderedDict((spec, all_referenced_kinds[spec])
+ for spec in sorted(imported_kind_specs))
generator.AddComputedData(module)
for iface in module.interfaces:
@@ -847,6 +1236,9 @@ def _Module(tree, path, imports):
if kind.stable:
_AssertTypeIsStable(kind)
+ for kind in module.structs:
+ _AssertStructIsValid(kind)
+
return module
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/translate_unittest.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/translate_unittest.py
index 19905c8a..b4fea924 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/generate/translate_unittest.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom/generate/translate_unittest.py
@@ -1,17 +1,13 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import imp
-import os.path
-import sys
import unittest
from mojom.generate import module as mojom
from mojom.generate import translate
from mojom.parse import ast
-
class TranslateTest(unittest.TestCase):
"""Tests |parser.Parse()|."""
@@ -69,5 +65,77 @@ class TranslateTest(unittest.TestCase):
# pylint: disable=W0212
self.assertEquals(
translate._MapKind("asso<SomeInterface>?"), "?asso:x:SomeInterface")
- self.assertEquals(
- translate._MapKind("asso<SomeInterface&>?"), "?asso:r:x:SomeInterface")
+ self.assertEquals(translate._MapKind("rca<SomeInterface>?"),
+ "?rca:x:SomeInterface")
+
+ def testSelfRecursiveUnions(self):
+ """Verifies _UnionField() raises when a union is self-recursive."""
+ tree = ast.Mojom(None, ast.ImportList(), [
+ ast.Union("SomeUnion", None,
+ ast.UnionBody([ast.UnionField("a", None, None, "SomeUnion")]))
+ ])
+ with self.assertRaises(Exception):
+ translate.OrderedModule(tree, "mojom_tree", [])
+
+ tree = ast.Mojom(None, ast.ImportList(), [
+ ast.Union(
+ "SomeUnion", None,
+ ast.UnionBody([ast.UnionField("a", None, None, "SomeUnion?")]))
+ ])
+ with self.assertRaises(Exception):
+ translate.OrderedModule(tree, "mojom_tree", [])
+
+ def testDuplicateAttributesException(self):
+ tree = ast.Mojom(None, ast.ImportList(), [
+ ast.Union(
+ "FakeUnion",
+ ast.AttributeList([
+ ast.Attribute("key1", "value"),
+ ast.Attribute("key1", "value")
+ ]),
+ ast.UnionBody([
+ ast.UnionField("a", None, None, "int32"),
+ ast.UnionField("b", None, None, "string")
+ ]))
+ ])
+ with self.assertRaises(Exception):
+ translate.OrderedModule(tree, "mojom_tree", [])
+
+ def testEnumWithReservedValues(self):
+ """Verifies that assigning reserved values to enumerators fails."""
+ # -128 is reserved for the empty representation in WTF::HashTraits.
+ tree = ast.Mojom(None, ast.ImportList(), [
+ ast.Enum(
+ "MyEnum", None,
+ ast.EnumValueList([
+ ast.EnumValue('kReserved', None, '-128'),
+ ]))
+ ])
+ with self.assertRaises(Exception) as context:
+ translate.OrderedModule(tree, "mojom_tree", [])
+ self.assertIn("reserved for WTF::HashTrait", str(context.exception))
+
+ # -127 is reserved for the deleted representation in WTF::HashTraits.
+ tree = ast.Mojom(None, ast.ImportList(), [
+ ast.Enum(
+ "MyEnum", None,
+ ast.EnumValueList([
+ ast.EnumValue('kReserved', None, '-127'),
+ ]))
+ ])
+ with self.assertRaises(Exception) as context:
+ translate.OrderedModule(tree, "mojom_tree", [])
+ self.assertIn("reserved for WTF::HashTrait", str(context.exception))
+
+ # Implicitly assigning a reserved value should also fail.
+ tree = ast.Mojom(None, ast.ImportList(), [
+ ast.Enum(
+ "MyEnum", None,
+ ast.EnumValueList([
+ ast.EnumValue('kNotReserved', None, '-129'),
+ ast.EnumValue('kImplicitlyReserved', None, None),
+ ]))
+ ])
+ with self.assertRaises(Exception) as context:
+ translate.OrderedModule(tree, "mojom_tree", [])
+ self.assertIn("reserved for WTF::HashTrait", str(context.exception))
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/ast.py b/utils/ipc/mojo/public/tools/mojom/mojom/parse/ast.py
index 1f0db200..aae9cdb6 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/parse/ast.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom/parse/ast.py
@@ -1,4 +1,4 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Node classes for the AST for a Mojo IDL file."""
@@ -8,17 +8,14 @@
# and lineno). You may also define __repr__() to help with analyzing test
# failures, especially for more complex types.
+import os.path
-import sys
+# Instance of 'NodeListBase' has no '_list_item_type' member (no-member)
+# pylint: disable=no-member
-def _IsStrOrUnicode(x):
- if sys.version_info[0] < 3:
- return isinstance(x, (unicode, str))
- return isinstance(x, str)
-
-class NodeBase(object):
+class NodeBase:
"""Base class for nodes in the AST."""
def __init__(self, filename=None, lineno=None):
@@ -43,7 +40,7 @@ class NodeListBase(NodeBase):
classes, in a tuple) of the members of the list.)"""
def __init__(self, item_or_items=None, **kwargs):
- super(NodeListBase, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.items = []
if item_or_items is None:
pass
@@ -62,7 +59,7 @@ class NodeListBase(NodeBase):
return self.items.__iter__()
def __eq__(self, other):
- return super(NodeListBase, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.items == other.items
# Implement this so that on failure, we get slightly more sensible output.
@@ -96,7 +93,7 @@ class Definition(NodeBase):
include parameter definitions.) This class is meant to be subclassed."""
def __init__(self, mojom_name, **kwargs):
- assert _IsStrOrUnicode(mojom_name)
+ assert isinstance(mojom_name, str)
NodeBase.__init__(self, **kwargs)
self.mojom_name = mojom_name
@@ -108,13 +105,13 @@ class Attribute(NodeBase):
"""Represents an attribute."""
def __init__(self, key, value, **kwargs):
- assert _IsStrOrUnicode(key)
- super(Attribute, self).__init__(**kwargs)
+ assert isinstance(key, str)
+ super().__init__(**kwargs)
self.key = key
self.value = value
def __eq__(self, other):
- return super(Attribute, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.key == other.key and \
self.value == other.value
@@ -131,17 +128,17 @@ class Const(Definition):
def __init__(self, mojom_name, attribute_list, typename, value, **kwargs):
assert attribute_list is None or isinstance(attribute_list, AttributeList)
# The typename is currently passed through as a string.
- assert _IsStrOrUnicode(typename)
+ assert isinstance(typename, str)
# The value is either a literal (currently passed through as a string) or a
# "wrapped identifier".
- assert _IsStrOrUnicode or isinstance(value, tuple)
- super(Const, self).__init__(mojom_name, **kwargs)
+ assert isinstance(value, (tuple, str))
+ super().__init__(mojom_name, **kwargs)
self.attribute_list = attribute_list
self.typename = typename
self.value = value
def __eq__(self, other):
- return super(Const, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.attribute_list == other.attribute_list and \
self.typename == other.typename and \
self.value == other.value
@@ -153,12 +150,12 @@ class Enum(Definition):
def __init__(self, mojom_name, attribute_list, enum_value_list, **kwargs):
assert attribute_list is None or isinstance(attribute_list, AttributeList)
assert enum_value_list is None or isinstance(enum_value_list, EnumValueList)
- super(Enum, self).__init__(mojom_name, **kwargs)
+ super().__init__(mojom_name, **kwargs)
self.attribute_list = attribute_list
self.enum_value_list = enum_value_list
def __eq__(self, other):
- return super(Enum, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.attribute_list == other.attribute_list and \
self.enum_value_list == other.enum_value_list
@@ -170,13 +167,13 @@ class EnumValue(Definition):
# The optional value is either an int (which is current a string) or a
# "wrapped identifier".
assert attribute_list is None or isinstance(attribute_list, AttributeList)
- assert value is None or _IsStrOrUnicode(value) or isinstance(value, tuple)
- super(EnumValue, self).__init__(mojom_name, **kwargs)
+ assert value is None or isinstance(value, (tuple, str))
+ super().__init__(mojom_name, **kwargs)
self.attribute_list = attribute_list
self.value = value
def __eq__(self, other):
- return super(EnumValue, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.attribute_list == other.attribute_list and \
self.value == other.value
@@ -188,18 +185,47 @@ class EnumValueList(NodeListBase):
_list_item_type = EnumValue
+class Feature(Definition):
+ """Represents a runtime feature definition."""
+ def __init__(self, mojom_name, attribute_list, body, **kwargs):
+ assert attribute_list is None or isinstance(attribute_list, AttributeList)
+ assert isinstance(body, FeatureBody) or body is None
+ super().__init__(mojom_name, **kwargs)
+ self.attribute_list = attribute_list
+ self.body = body
+
+ def __eq__(self, other):
+ return super().__eq__(other) and \
+ self.attribute_list == other.attribute_list and \
+ self.body == other.body
+
+ def __repr__(self):
+ return "Feature(mojom_name = %s, attribute_list = %s, body = %s)" % (
+ self.mojom_name, self.attribute_list, self.body)
+
+
+# This needs to be declared after `FeatureConst` and `FeatureField`.
+class FeatureBody(NodeListBase):
+ """Represents the body of (i.e., list of definitions inside) a feature."""
+
+ # Features are compile time helpers so all fields are initializers/consts
+ # for the underlying platform feature type.
+ _list_item_type = (Const)
+
+
class Import(NodeBase):
"""Represents an import statement."""
def __init__(self, attribute_list, import_filename, **kwargs):
assert attribute_list is None or isinstance(attribute_list, AttributeList)
- assert _IsStrOrUnicode(import_filename)
- super(Import, self).__init__(**kwargs)
+ assert isinstance(import_filename, str)
+ super().__init__(**kwargs)
self.attribute_list = attribute_list
- self.import_filename = import_filename
+ # TODO(crbug.com/953884): Use pathlib once we're migrated fully to Python 3.
+ self.import_filename = os.path.normpath(import_filename).replace('\\', '/')
def __eq__(self, other):
- return super(Import, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.attribute_list == other.attribute_list and \
self.import_filename == other.import_filename
@@ -216,12 +242,12 @@ class Interface(Definition):
def __init__(self, mojom_name, attribute_list, body, **kwargs):
assert attribute_list is None or isinstance(attribute_list, AttributeList)
assert isinstance(body, InterfaceBody)
- super(Interface, self).__init__(mojom_name, **kwargs)
+ super().__init__(mojom_name, **kwargs)
self.attribute_list = attribute_list
self.body = body
def __eq__(self, other):
- return super(Interface, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.attribute_list == other.attribute_list and \
self.body == other.body
@@ -236,14 +262,14 @@ class Method(Definition):
assert isinstance(parameter_list, ParameterList)
assert response_parameter_list is None or \
isinstance(response_parameter_list, ParameterList)
- super(Method, self).__init__(mojom_name, **kwargs)
+ super().__init__(mojom_name, **kwargs)
self.attribute_list = attribute_list
self.ordinal = ordinal
self.parameter_list = parameter_list
self.response_parameter_list = response_parameter_list
def __eq__(self, other):
- return super(Method, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.attribute_list == other.attribute_list and \
self.ordinal == other.ordinal and \
self.parameter_list == other.parameter_list and \
@@ -264,12 +290,12 @@ class Module(NodeBase):
# |mojom_namespace| is either none or a "wrapped identifier".
assert mojom_namespace is None or isinstance(mojom_namespace, tuple)
assert attribute_list is None or isinstance(attribute_list, AttributeList)
- super(Module, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.mojom_namespace = mojom_namespace
self.attribute_list = attribute_list
def __eq__(self, other):
- return super(Module, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.mojom_namespace == other.mojom_namespace and \
self.attribute_list == other.attribute_list
@@ -281,13 +307,13 @@ class Mojom(NodeBase):
assert module is None or isinstance(module, Module)
assert isinstance(import_list, ImportList)
assert isinstance(definition_list, list)
- super(Mojom, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.module = module
self.import_list = import_list
self.definition_list = definition_list
def __eq__(self, other):
- return super(Mojom, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.module == other.module and \
self.import_list == other.import_list and \
self.definition_list == other.definition_list
@@ -302,11 +328,11 @@ class Ordinal(NodeBase):
def __init__(self, value, **kwargs):
assert isinstance(value, int)
- super(Ordinal, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.value = value
def __eq__(self, other):
- return super(Ordinal, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.value == other.value
@@ -314,18 +340,18 @@ class Parameter(NodeBase):
"""Represents a method request or response parameter."""
def __init__(self, mojom_name, attribute_list, ordinal, typename, **kwargs):
- assert _IsStrOrUnicode(mojom_name)
+ assert isinstance(mojom_name, str)
assert attribute_list is None or isinstance(attribute_list, AttributeList)
assert ordinal is None or isinstance(ordinal, Ordinal)
- assert _IsStrOrUnicode(typename)
- super(Parameter, self).__init__(**kwargs)
+ assert isinstance(typename, str)
+ super().__init__(**kwargs)
self.mojom_name = mojom_name
self.attribute_list = attribute_list
self.ordinal = ordinal
self.typename = typename
def __eq__(self, other):
- return super(Parameter, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.mojom_name == other.mojom_name and \
self.attribute_list == other.attribute_list and \
self.ordinal == other.ordinal and \
@@ -344,42 +370,51 @@ class Struct(Definition):
def __init__(self, mojom_name, attribute_list, body, **kwargs):
assert attribute_list is None or isinstance(attribute_list, AttributeList)
assert isinstance(body, StructBody) or body is None
- super(Struct, self).__init__(mojom_name, **kwargs)
+ super().__init__(mojom_name, **kwargs)
self.attribute_list = attribute_list
self.body = body
def __eq__(self, other):
- return super(Struct, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.attribute_list == other.attribute_list and \
self.body == other.body
+ def __repr__(self):
+ return "Struct(mojom_name = %s, attribute_list = %s, body = %s)" % (
+ self.mojom_name, self.attribute_list, self.body)
+
class StructField(Definition):
"""Represents a struct field definition."""
def __init__(self, mojom_name, attribute_list, ordinal, typename,
default_value, **kwargs):
- assert _IsStrOrUnicode(mojom_name)
+ assert isinstance(mojom_name, str)
assert attribute_list is None or isinstance(attribute_list, AttributeList)
assert ordinal is None or isinstance(ordinal, Ordinal)
- assert _IsStrOrUnicode(typename)
+ assert isinstance(typename, str)
# The optional default value is currently either a value as a string or a
# "wrapped identifier".
- assert default_value is None or _IsStrOrUnicode(default_value) or \
- isinstance(default_value, tuple)
- super(StructField, self).__init__(mojom_name, **kwargs)
+ assert default_value is None or isinstance(default_value, (str, tuple))
+ super().__init__(mojom_name, **kwargs)
self.attribute_list = attribute_list
self.ordinal = ordinal
self.typename = typename
self.default_value = default_value
def __eq__(self, other):
- return super(StructField, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.attribute_list == other.attribute_list and \
self.ordinal == other.ordinal and \
self.typename == other.typename and \
self.default_value == other.default_value
+ def __repr__(self):
+ return ("StructField(mojom_name = %s, attribute_list = %s, ordinal = %s, "
+ "typename = %s, default_value = %s") % (
+ self.mojom_name, self.attribute_list, self.ordinal,
+ self.typename, self.default_value)
+
# This needs to be declared after |StructField|.
class StructBody(NodeListBase):
@@ -394,29 +429,29 @@ class Union(Definition):
def __init__(self, mojom_name, attribute_list, body, **kwargs):
assert attribute_list is None or isinstance(attribute_list, AttributeList)
assert isinstance(body, UnionBody)
- super(Union, self).__init__(mojom_name, **kwargs)
+ super().__init__(mojom_name, **kwargs)
self.attribute_list = attribute_list
self.body = body
def __eq__(self, other):
- return super(Union, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.attribute_list == other.attribute_list and \
self.body == other.body
class UnionField(Definition):
def __init__(self, mojom_name, attribute_list, ordinal, typename, **kwargs):
- assert _IsStrOrUnicode(mojom_name)
+ assert isinstance(mojom_name, str)
assert attribute_list is None or isinstance(attribute_list, AttributeList)
assert ordinal is None or isinstance(ordinal, Ordinal)
- assert _IsStrOrUnicode(typename)
- super(UnionField, self).__init__(mojom_name, **kwargs)
+ assert isinstance(typename, str)
+ super().__init__(mojom_name, **kwargs)
self.attribute_list = attribute_list
self.ordinal = ordinal
self.typename = typename
def __eq__(self, other):
- return super(UnionField, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.attribute_list == other.attribute_list and \
self.ordinal == other.ordinal and \
self.typename == other.typename
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/ast_unittest.py b/utils/ipc/mojo/public/tools/mojom/mojom/parse/ast_unittest.py
index 62798631..b289f7b1 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/parse/ast_unittest.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom/parse/ast_unittest.py
@@ -1,32 +1,26 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import imp
-import os.path
-import sys
import unittest
from mojom.parse import ast
-
class _TestNode(ast.NodeBase):
"""Node type for tests."""
def __init__(self, value, **kwargs):
- super(_TestNode, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.value = value
def __eq__(self, other):
- return super(_TestNode, self).__eq__(other) and self.value == other.value
-
+ return super().__eq__(other) and self.value == other.value
class _TestNodeList(ast.NodeListBase):
"""Node list type for tests."""
_list_item_type = _TestNode
-
class ASTTest(unittest.TestCase):
"""Tests various AST classes."""
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features.py b/utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features.py
index 3cb73c5d..9687edbf 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features.py
@@ -1,4 +1,4 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
+# Copyright 2018 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Helpers for processing conditionally enabled features in a mojom."""
@@ -17,8 +17,10 @@ class EnableIfError(Error):
def _IsEnabled(definition, enabled_features):
"""Returns true if a definition is enabled.
- A definition is enabled if it has no EnableIf attribute, or if the value of
- the EnableIf attribute is in enabled_features.
+ A definition is enabled if it has no EnableIf/EnableIfNot attribute.
+ It is retained if it has an EnableIf attribute and the attribute is in
+ enabled_features. It is retained if it has an EnableIfNot attribute and the
+ attribute is not in enabled features.
"""
if not hasattr(definition, "attribute_list"):
return True
@@ -27,17 +29,19 @@ def _IsEnabled(definition, enabled_features):
already_defined = False
for a in definition.attribute_list:
- if a.key == 'EnableIf':
+ if a.key == 'EnableIf' or a.key == 'EnableIfNot':
if already_defined:
raise EnableIfError(
definition.filename,
- "EnableIf attribute may only be defined once per field.",
+ "EnableIf/EnableIfNot attribute may only be set once per field.",
definition.lineno)
already_defined = True
for attribute in definition.attribute_list:
if attribute.key == 'EnableIf' and attribute.value not in enabled_features:
return False
+ if attribute.key == 'EnableIfNot' and attribute.value in enabled_features:
+ return False
return True
@@ -56,15 +60,12 @@ def _FilterDefinition(definition, enabled_features):
"""Filters definitions with a body."""
if isinstance(definition, ast.Enum):
_FilterDisabledFromNodeList(definition.enum_value_list, enabled_features)
- elif isinstance(definition, ast.Interface):
- _FilterDisabledFromNodeList(definition.body, enabled_features)
elif isinstance(definition, ast.Method):
_FilterDisabledFromNodeList(definition.parameter_list, enabled_features)
_FilterDisabledFromNodeList(definition.response_parameter_list,
enabled_features)
- elif isinstance(definition, ast.Struct):
- _FilterDisabledFromNodeList(definition.body, enabled_features)
- elif isinstance(definition, ast.Union):
+ elif isinstance(definition,
+ (ast.Interface, ast.Struct, ast.Union, ast.Feature)):
_FilterDisabledFromNodeList(definition.body, enabled_features)
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py b/utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py
index aa609be7..cca1764b 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py
@@ -1,13 +1,12 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
+# Copyright 2018 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import imp
+import importlib.util
import os
import sys
import unittest
-
def _GetDirAbove(dirname):
"""Returns the directory "above" this file containing |dirname| (which must
also be "above" this file)."""
@@ -18,9 +17,8 @@ def _GetDirAbove(dirname):
if tail == dirname:
return path
-
try:
- imp.find_module('mojom')
+ importlib.util.find_spec("mojom")
except ImportError:
sys.path.append(os.path.join(_GetDirAbove('pylib'), 'pylib'))
import mojom.parse.ast as ast
@@ -29,7 +27,6 @@ import mojom.parse.parser as parser
ENABLED_FEATURES = frozenset({'red', 'green', 'blue'})
-
class ConditionalFeaturesTest(unittest.TestCase):
"""Tests |mojom.parse.conditional_features|."""
@@ -55,6 +52,48 @@ class ConditionalFeaturesTest(unittest.TestCase):
"""
self.parseAndAssertEqual(const_source, expected_source)
+ def testFilterIfNotConst(self):
+ """Test that Consts are correctly filtered."""
+ const_source = """
+ [EnableIfNot=blue]
+ const int kMyConst1 = 1;
+ [EnableIfNot=orange]
+ const double kMyConst2 = 2;
+ [EnableIf=blue]
+ const int kMyConst3 = 3;
+ [EnableIfNot=blue]
+ const int kMyConst4 = 4;
+ [EnableIfNot=purple]
+ const int kMyConst5 = 5;
+ """
+ expected_source = """
+ [EnableIfNot=orange]
+ const double kMyConst2 = 2;
+ [EnableIf=blue]
+ const int kMyConst3 = 3;
+ [EnableIfNot=purple]
+ const int kMyConst5 = 5;
+ """
+ self.parseAndAssertEqual(const_source, expected_source)
+
+ def testFilterIfNotMultipleConst(self):
+ """Test that Consts are correctly filtered."""
+ const_source = """
+ [EnableIfNot=blue]
+ const int kMyConst1 = 1;
+ [EnableIfNot=orange]
+ const double kMyConst2 = 2;
+ [EnableIfNot=orange]
+ const int kMyConst3 = 3;
+ """
+ expected_source = """
+ [EnableIfNot=orange]
+ const double kMyConst2 = 2;
+ [EnableIfNot=orange]
+ const int kMyConst3 = 3;
+ """
+ self.parseAndAssertEqual(const_source, expected_source)
+
def testFilterEnum(self):
"""Test that EnumValues are correctly filtered from an Enum."""
enum_source = """
@@ -91,6 +130,24 @@ class ConditionalFeaturesTest(unittest.TestCase):
"""
self.parseAndAssertEqual(import_source, expected_source)
+ def testFilterIfNotImport(self):
+ """Test that imports are correctly filtered from a Mojom."""
+ import_source = """
+ [EnableIf=blue]
+ import "foo.mojom";
+ [EnableIfNot=purple]
+ import "bar.mojom";
+ [EnableIfNot=green]
+ import "baz.mojom";
+ """
+ expected_source = """
+ [EnableIf=blue]
+ import "foo.mojom";
+ [EnableIfNot=purple]
+ import "bar.mojom";
+ """
+ self.parseAndAssertEqual(import_source, expected_source)
+
def testFilterInterface(self):
"""Test that definitions are correctly filtered from an Interface."""
interface_source = """
@@ -175,6 +232,50 @@ class ConditionalFeaturesTest(unittest.TestCase):
"""
self.parseAndAssertEqual(struct_source, expected_source)
+ def testFilterIfNotStruct(self):
+ """Test that definitions are correctly filtered from a Struct."""
+ struct_source = """
+ struct MyStruct {
+ [EnableIf=blue]
+ enum MyEnum {
+ VALUE1,
+ [EnableIfNot=red]
+ VALUE2,
+ };
+ [EnableIfNot=yellow]
+ const double kMyConst = 1.23;
+ [EnableIf=green]
+ int32 a;
+ double b;
+ [EnableIfNot=purple]
+ int32 c;
+ [EnableIf=blue]
+ double d;
+ int32 e;
+ [EnableIfNot=red]
+ double f;
+ };
+ """
+ expected_source = """
+ struct MyStruct {
+ [EnableIf=blue]
+ enum MyEnum {
+ VALUE1,
+ };
+ [EnableIfNot=yellow]
+ const double kMyConst = 1.23;
+ [EnableIf=green]
+ int32 a;
+ double b;
+ [EnableIfNot=purple]
+ int32 c;
+ [EnableIf=blue]
+ double d;
+ int32 e;
+ };
+ """
+ self.parseAndAssertEqual(struct_source, expected_source)
+
def testFilterUnion(self):
"""Test that UnionFields are correctly filtered from a Union."""
union_source = """
@@ -216,6 +317,25 @@ class ConditionalFeaturesTest(unittest.TestCase):
"""
self.parseAndAssertEqual(mojom_source, expected_source)
+ def testFeaturesWithEnableIf(self):
+ mojom_source = """
+ feature Foo {
+ const string name = "FooFeature";
+ [EnableIf=red]
+ const bool default_state = false;
+ [EnableIf=yellow]
+ const bool default_state = true;
+ };
+ """
+ expected_source = """
+ feature Foo {
+ const string name = "FooFeature";
+ [EnableIf=red]
+ const bool default_state = false;
+ };
+ """
+ self.parseAndAssertEqual(mojom_source, expected_source)
+
def testMultipleEnableIfs(self):
source = """
enum Foo {
@@ -228,6 +348,29 @@ class ConditionalFeaturesTest(unittest.TestCase):
conditional_features.RemoveDisabledDefinitions,
definition, ENABLED_FEATURES)
+ def testMultipleEnableIfs(self):
+ source = """
+ enum Foo {
+ [EnableIf=red,EnableIfNot=yellow]
+ kBarValue = 5,
+ };
+ """
+ definition = parser.Parse(source, "my_file.mojom")
+ self.assertRaises(conditional_features.EnableIfError,
+ conditional_features.RemoveDisabledDefinitions,
+ definition, ENABLED_FEATURES)
+
+ def testMultipleEnableIfs(self):
+ source = """
+ enum Foo {
+ [EnableIfNot=red,EnableIfNot=yellow]
+ kBarValue = 5,
+ };
+ """
+ definition = parser.Parse(source, "my_file.mojom")
+ self.assertRaises(conditional_features.EnableIfError,
+ conditional_features.RemoveDisabledDefinitions,
+ definition, ENABLED_FEATURES)
if __name__ == '__main__':
unittest.main()
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py b/utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py
index 3e084bbf..00136a8b 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py
@@ -1,8 +1,7 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import imp
import os.path
import sys
@@ -22,7 +21,7 @@ class LexError(Error):
# We have methods which look like they could be functions:
# pylint: disable=R0201
-class Lexer(object):
+class Lexer:
def __init__(self, filename):
self.filename = filename
@@ -56,6 +55,7 @@ class Lexer(object):
'PENDING_RECEIVER',
'PENDING_ASSOCIATED_REMOTE',
'PENDING_ASSOCIATED_RECEIVER',
+ 'FEATURE',
)
keyword_map = {}
@@ -81,7 +81,6 @@ class Lexer(object):
# Operators
'MINUS',
'PLUS',
- 'AMP',
'QSTN',
# Assignment
@@ -168,7 +167,6 @@ class Lexer(object):
# Operators
t_MINUS = r'-'
t_PLUS = r'\+'
- t_AMP = r'&'
t_QSTN = r'\?'
# =
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py b/utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py
index eadc6587..bc9f8354 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py
@@ -1,13 +1,12 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import imp
+import importlib.util
import os.path
import sys
import unittest
-
def _GetDirAbove(dirname):
"""Returns the directory "above" this file containing |dirname| (which must
also be "above" this file)."""
@@ -18,17 +17,15 @@ def _GetDirAbove(dirname):
if tail == dirname:
return path
-
sys.path.insert(1, os.path.join(_GetDirAbove("mojo"), "third_party"))
from ply import lex
try:
- imp.find_module("mojom")
+ importlib.util.find_spec("mojom")
except ImportError:
sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib"))
import mojom.parse.lexer
-
# This (monkey-patching LexToken to make comparison value-based) is evil, but
# we'll do it anyway. (I'm pretty sure ply's lexer never cares about comparing
# for object identity.)
@@ -146,7 +143,6 @@ class LexerTest(unittest.TestCase):
self._SingleTokenForInput("+"), _MakeLexToken("PLUS", "+"))
self.assertEquals(
self._SingleTokenForInput("-"), _MakeLexToken("MINUS", "-"))
- self.assertEquals(self._SingleTokenForInput("&"), _MakeLexToken("AMP", "&"))
self.assertEquals(
self._SingleTokenForInput("?"), _MakeLexToken("QSTN", "?"))
self.assertEquals(
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/parser.py b/utils/ipc/mojo/public/tools/mojom/mojom/parse/parser.py
index b3b803d6..1dffd98b 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/parse/parser.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom/parse/parser.py
@@ -1,8 +1,11 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates a syntax tree from a Mojo IDL file."""
+# Breaking parser stanzas is unhelpful so allow longer lines.
+# pylint: disable=line-too-long
+
import os.path
import sys
@@ -33,7 +36,7 @@ class ParseError(Error):
# We have methods which look like they could be functions:
# pylint: disable=R0201
-class Parser(object):
+class Parser:
def __init__(self, lexer, source, filename):
self.tokens = lexer.tokens
self.source = source
@@ -111,7 +114,8 @@ class Parser(object):
| union
| interface
| enum
- | const"""
+ | const
+ | feature"""
p[0] = p[1]
def p_attribute_section_1(self, p):
@@ -140,12 +144,19 @@ class Parser(object):
p[0].Append(p[3])
def p_attribute_1(self, p):
- """attribute : NAME EQUALS evaled_literal
- | NAME EQUALS NAME"""
- p[0] = ast.Attribute(p[1], p[3], filename=self.filename, lineno=p.lineno(1))
+ """attribute : name_wrapped EQUALS identifier_wrapped"""
+ p[0] = ast.Attribute(p[1],
+ p[3][1],
+ filename=self.filename,
+ lineno=p.lineno(1))
def p_attribute_2(self, p):
- """attribute : NAME"""
+ """attribute : name_wrapped EQUALS evaled_literal
+ | name_wrapped EQUALS name_wrapped"""
+ p[0] = ast.Attribute(p[1], p[3], filename=self.filename, lineno=p.lineno(1))
+
+ def p_attribute_3(self, p):
+ """attribute : name_wrapped"""
p[0] = ast.Attribute(p[1], True, filename=self.filename, lineno=p.lineno(1))
def p_evaled_literal(self, p):
@@ -161,11 +172,11 @@ class Parser(object):
p[0] = eval(p[1])
def p_struct_1(self, p):
- """struct : attribute_section STRUCT NAME LBRACE struct_body RBRACE SEMI"""
+ """struct : attribute_section STRUCT name_wrapped LBRACE struct_body RBRACE SEMI"""
p[0] = ast.Struct(p[3], p[1], p[5])
def p_struct_2(self, p):
- """struct : attribute_section STRUCT NAME SEMI"""
+ """struct : attribute_section STRUCT name_wrapped SEMI"""
p[0] = ast.Struct(p[3], p[1], None)
def p_struct_body_1(self, p):
@@ -180,11 +191,24 @@ class Parser(object):
p[0].Append(p[2])
def p_struct_field(self, p):
- """struct_field : attribute_section typename NAME ordinal default SEMI"""
+ """struct_field : attribute_section typename name_wrapped ordinal default SEMI"""
p[0] = ast.StructField(p[3], p[1], p[4], p[2], p[5])
+ def p_feature(self, p):
+ """feature : attribute_section FEATURE NAME LBRACE feature_body RBRACE SEMI"""
+ p[0] = ast.Feature(p[3], p[1], p[5])
+
+ def p_feature_body_1(self, p):
+ """feature_body : """
+ p[0] = ast.FeatureBody()
+
+ def p_feature_body_2(self, p):
+ """feature_body : feature_body const"""
+ p[0] = p[1]
+ p[0].Append(p[2])
+
def p_union(self, p):
- """union : attribute_section UNION NAME LBRACE union_body RBRACE SEMI"""
+ """union : attribute_section UNION name_wrapped LBRACE union_body RBRACE SEMI"""
p[0] = ast.Union(p[3], p[1], p[5])
def p_union_body_1(self, p):
@@ -197,7 +221,7 @@ class Parser(object):
p[1].Append(p[2])
def p_union_field(self, p):
- """union_field : attribute_section typename NAME ordinal SEMI"""
+ """union_field : attribute_section typename name_wrapped ordinal SEMI"""
p[0] = ast.UnionField(p[3], p[1], p[4], p[2])
def p_default_1(self, p):
@@ -209,8 +233,7 @@ class Parser(object):
p[0] = p[2]
def p_interface(self, p):
- """interface : attribute_section INTERFACE NAME LBRACE interface_body \
- RBRACE SEMI"""
+ """interface : attribute_section INTERFACE name_wrapped LBRACE interface_body RBRACE SEMI"""
p[0] = ast.Interface(p[3], p[1], p[5])
def p_interface_body_1(self, p):
@@ -233,8 +256,7 @@ class Parser(object):
p[0] = p[3]
def p_method(self, p):
- """method : attribute_section NAME ordinal LPAREN parameter_list RPAREN \
- response SEMI"""
+ """method : attribute_section name_wrapped ordinal LPAREN parameter_list RPAREN response SEMI"""
p[0] = ast.Method(p[2], p[1], p[3], p[5], p[7])
def p_parameter_list_1(self, p):
@@ -255,7 +277,7 @@ class Parser(object):
p[0].Append(p[3])
def p_parameter(self, p):
- """parameter : attribute_section typename NAME ordinal"""
+ """parameter : attribute_section typename name_wrapped ordinal"""
p[0] = ast.Parameter(
p[3], p[1], p[4], p[2], filename=self.filename, lineno=p.lineno(3))
@@ -271,8 +293,7 @@ class Parser(object):
"""nonnullable_typename : basictypename
| array
| fixed_array
- | associative_array
- | interfacerequest"""
+ | associative_array"""
p[0] = p[1]
def p_basictypename(self, p):
@@ -297,18 +318,16 @@ class Parser(object):
p[0] = "rcv<%s>" % p[3]
def p_associatedremotetype(self, p):
- """associatedremotetype : PENDING_ASSOCIATED_REMOTE LANGLE identifier \
- RANGLE"""
+ """associatedremotetype : PENDING_ASSOCIATED_REMOTE LANGLE identifier RANGLE"""
p[0] = "rma<%s>" % p[3]
def p_associatedreceivertype(self, p):
- """associatedreceivertype : PENDING_ASSOCIATED_RECEIVER LANGLE identifier \
- RANGLE"""
+ """associatedreceivertype : PENDING_ASSOCIATED_RECEIVER LANGLE identifier RANGLE"""
p[0] = "rca<%s>" % p[3]
def p_handletype(self, p):
"""handletype : HANDLE
- | HANDLE LANGLE NAME RANGLE"""
+ | HANDLE LANGLE name_wrapped RANGLE"""
if len(p) == 2:
p[0] = p[1]
else:
@@ -342,14 +361,6 @@ class Parser(object):
"""associative_array : MAP LANGLE identifier COMMA typename RANGLE"""
p[0] = p[5] + "{" + p[3] + "}"
- def p_interfacerequest(self, p):
- """interfacerequest : identifier AMP
- | ASSOCIATED identifier AMP"""
- if len(p) == 3:
- p[0] = p[1] + "&"
- else:
- p[0] = "asso<" + p[2] + "&>"
-
def p_ordinal_1(self, p):
"""ordinal : """
p[0] = None
@@ -366,15 +377,14 @@ class Parser(object):
p[0] = ast.Ordinal(value, filename=self.filename, lineno=p.lineno(1))
def p_enum_1(self, p):
- """enum : attribute_section ENUM NAME LBRACE enum_value_list \
- RBRACE SEMI
- | attribute_section ENUM NAME LBRACE nonempty_enum_value_list \
- COMMA RBRACE SEMI"""
+ """enum : attribute_section ENUM name_wrapped LBRACE enum_value_list RBRACE SEMI
+ | attribute_section ENUM name_wrapped LBRACE \
+ nonempty_enum_value_list COMMA RBRACE SEMI"""
p[0] = ast.Enum(
p[3], p[1], p[5], filename=self.filename, lineno=p.lineno(2))
def p_enum_2(self, p):
- """enum : attribute_section ENUM NAME SEMI"""
+ """enum : attribute_section ENUM name_wrapped SEMI"""
p[0] = ast.Enum(
p[3], p[1], None, filename=self.filename, lineno=p.lineno(2))
@@ -396,9 +406,9 @@ class Parser(object):
p[0].Append(p[3])
def p_enum_value(self, p):
- """enum_value : attribute_section NAME
- | attribute_section NAME EQUALS int
- | attribute_section NAME EQUALS identifier_wrapped"""
+ """enum_value : attribute_section name_wrapped
+ | attribute_section name_wrapped EQUALS int
+ | attribute_section name_wrapped EQUALS identifier_wrapped"""
p[0] = ast.EnumValue(
p[2],
p[1],
@@ -407,7 +417,7 @@ class Parser(object):
lineno=p.lineno(2))
def p_const(self, p):
- """const : attribute_section CONST typename NAME EQUALS constant SEMI"""
+ """const : attribute_section CONST typename name_wrapped EQUALS constant SEMI"""
p[0] = ast.Const(p[4], p[1], p[3], p[6])
def p_constant(self, p):
@@ -422,10 +432,16 @@ class Parser(object):
# TODO(vtl): Make this produce a "wrapped" identifier (probably as an
# |ast.Identifier|, to be added) and get rid of identifier_wrapped.
def p_identifier(self, p):
- """identifier : NAME
- | NAME DOT identifier"""
+ """identifier : name_wrapped
+ | name_wrapped DOT identifier"""
p[0] = ''.join(p[1:])
+ # Allow 'feature' to be a name literal not just a keyword.
+ def p_name_wrapped(self, p):
+ """name_wrapped : NAME
+ | FEATURE"""
+ p[0] = p[1]
+
def p_literal(self, p):
"""literal : int
| float
@@ -458,6 +474,12 @@ class Parser(object):
# TODO(vtl): Can we figure out what's missing?
raise ParseError(self.filename, "Unexpected end of file")
+ if e.value == 'feature':
+ raise ParseError(self.filename,
+ "`feature` is reserved for a future mojom keyword",
+ lineno=e.lineno,
+ snippet=self._GetSnippet(e.lineno))
+
raise ParseError(
self.filename,
"Unexpected %r:" % e.value,
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/parser_unittest.py b/utils/ipc/mojo/public/tools/mojom/mojom/parse/parser_unittest.py
index 6d6b7153..0a26307b 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/parse/parser_unittest.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom/parse/parser_unittest.py
@@ -1,17 +1,13 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import imp
-import os.path
-import sys
import unittest
from mojom.parse import ast
from mojom.parse import lexer
from mojom.parse import parser
-
class ParserTest(unittest.TestCase):
"""Tests |parser.Parse()|."""
@@ -1086,7 +1082,7 @@ class ParserTest(unittest.TestCase):
handle<data_pipe_producer>? k;
handle<message_pipe>? l;
handle<shared_buffer>? m;
- some_interface&? n;
+ pending_receiver<some_interface>? n;
handle<platform>? o;
};
"""
@@ -1110,7 +1106,7 @@ class ParserTest(unittest.TestCase):
ast.StructField('l', None, None, 'handle<message_pipe>?', None),
ast.StructField('m', None, None, 'handle<shared_buffer>?',
None),
- ast.StructField('n', None, None, 'some_interface&?', None),
+ ast.StructField('n', None, None, 'rcv<some_interface>?', None),
ast.StructField('o', None, None, 'handle<platform>?', None)
]))
])
@@ -1138,16 +1134,6 @@ class ParserTest(unittest.TestCase):
r" *handle\?<data_pipe_consumer> a;$"):
parser.Parse(source2, "my_file.mojom")
- source3 = """\
- struct MyStruct {
- some_interface?& a;
- };
- """
- with self.assertRaisesRegexp(
- parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected '&':\n"
- r" *some_interface\?& a;$"):
- parser.Parse(source3, "my_file.mojom")
-
def testSimpleUnion(self):
"""Tests a simple .mojom source that just defines a union."""
source = """\
@@ -1317,9 +1303,9 @@ class ParserTest(unittest.TestCase):
source1 = """\
struct MyStruct {
associated MyInterface a;
- associated MyInterface& b;
+ pending_associated_receiver<MyInterface> b;
associated MyInterface? c;
- associated MyInterface&? d;
+ pending_associated_receiver<MyInterface>? d;
};
"""
expected1 = ast.Mojom(None, ast.ImportList(), [
@@ -1327,16 +1313,16 @@ class ParserTest(unittest.TestCase):
'MyStruct', None,
ast.StructBody([
ast.StructField('a', None, None, 'asso<MyInterface>', None),
- ast.StructField('b', None, None, 'asso<MyInterface&>', None),
+ ast.StructField('b', None, None, 'rca<MyInterface>', None),
ast.StructField('c', None, None, 'asso<MyInterface>?', None),
- ast.StructField('d', None, None, 'asso<MyInterface&>?', None)
+ ast.StructField('d', None, None, 'rca<MyInterface>?', None)
]))
])
self.assertEquals(parser.Parse(source1, "my_file.mojom"), expected1)
source2 = """\
interface MyInterface {
- MyMethod(associated A a) =>(associated B& b);
+ MyMethod(associated A a) =>(pending_associated_receiver<B> b);
};"""
expected2 = ast.Mojom(None, ast.ImportList(), [
ast.Interface(
@@ -1344,10 +1330,10 @@ class ParserTest(unittest.TestCase):
ast.InterfaceBody(
ast.Method(
'MyMethod', None, None,
- ast.ParameterList(
- ast.Parameter('a', None, None, 'asso<A>')),
- ast.ParameterList(
- ast.Parameter('b', None, None, 'asso<B&>')))))
+ ast.ParameterList(ast.Parameter('a', None, None,
+ 'asso<A>')),
+ ast.ParameterList(ast.Parameter('b', None, None,
+ 'rca<B>')))))
])
self.assertEquals(parser.Parse(source2, "my_file.mojom"), expected2)
@@ -1385,6 +1371,5 @@ class ParserTest(unittest.TestCase):
r" *associated\? MyInterface& a;$"):
parser.Parse(source3, "my_file.mojom")
-
if __name__ == "__main__":
unittest.main()
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom_parser.py b/utils/ipc/mojo/public/tools/mojom/mojom_parser.py
index eb90c825..9693090e 100755
--- a/utils/ipc/mojo/public/tools/mojom/mojom_parser.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom_parser.py
@@ -1,5 +1,5 @@
-#!/usr/bin/env python
-# Copyright 2020 The Chromium Authors. All rights reserved.
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Parses mojom IDL files.
@@ -11,6 +11,7 @@ generate usable language bindings.
"""
import argparse
+import builtins
import codecs
import errno
import json
@@ -19,6 +20,7 @@ import multiprocessing
import os
import os.path
import sys
+import traceback
from collections import defaultdict
from mojom.generate import module
@@ -28,16 +30,12 @@ from mojom.parse import conditional_features
# Disable this for easier debugging.
-# In Python 2, subprocesses just hang when exceptions are thrown :(.
-_ENABLE_MULTIPROCESSING = sys.version_info[0] > 2
+_ENABLE_MULTIPROCESSING = True
-if sys.version_info < (3, 4):
- _MULTIPROCESSING_USES_FORK = sys.platform.startswith('linux')
-else:
- # https://docs.python.org/3/library/multiprocessing.html#:~:text=bpo-33725
- if __name__ == '__main__' and sys.platform == 'darwin':
- multiprocessing.set_start_method('fork')
- _MULTIPROCESSING_USES_FORK = multiprocessing.get_start_method() == 'fork'
+# https://docs.python.org/3/library/multiprocessing.html#:~:text=bpo-33725
+if __name__ == '__main__' and sys.platform == 'darwin':
+ multiprocessing.set_start_method('fork')
+_MULTIPROCESSING_USES_FORK = multiprocessing.get_start_method() == 'fork'
def _ResolveRelativeImportPath(path, roots):
@@ -63,7 +61,7 @@ def _ResolveRelativeImportPath(path, roots):
raise ValueError('"%s" does not exist in any of %s' % (path, roots))
-def _RebaseAbsolutePath(path, roots):
+def RebaseAbsolutePath(path, roots):
"""Rewrites an absolute file path as relative to an absolute directory path in
roots.
@@ -139,7 +137,7 @@ def _EnsureInputLoaded(mojom_abspath, module_path, abs_paths, asts,
# Already done.
return
- for dep_abspath, dep_path in dependencies[mojom_abspath]:
+ for dep_abspath, dep_path in sorted(dependencies[mojom_abspath]):
if dep_abspath not in loaded_modules:
_EnsureInputLoaded(dep_abspath, dep_path, abs_paths, asts, dependencies,
loaded_modules, module_metadata)
@@ -159,11 +157,19 @@ def _CollectAllowedImportsFromBuildMetadata(build_metadata_filename):
def collect(metadata_filename):
processed_deps.add(metadata_filename)
+
+ # Paths in the metadata file are relative to the metadata file's dir.
+ metadata_dir = os.path.abspath(os.path.dirname(metadata_filename))
+
+ def to_abs(s):
+ return os.path.normpath(os.path.join(metadata_dir, s))
+
with open(metadata_filename) as f:
metadata = json.load(f)
allowed_imports.update(
- map(os.path.normcase, map(os.path.normpath, metadata['sources'])))
+ [os.path.normcase(to_abs(s)) for s in metadata['sources']])
for dep_metadata in metadata['deps']:
+ dep_metadata = to_abs(dep_metadata)
if dep_metadata not in processed_deps:
collect(dep_metadata)
@@ -172,8 +178,7 @@ def _CollectAllowedImportsFromBuildMetadata(build_metadata_filename):
# multiprocessing helper.
-def _ParseAstHelper(args):
- mojom_abspath, enabled_features = args
+def _ParseAstHelper(mojom_abspath, enabled_features):
with codecs.open(mojom_abspath, encoding='utf-8') as f:
ast = parser.Parse(f.read(), mojom_abspath)
conditional_features.RemoveDisabledDefinitions(ast, enabled_features)
@@ -181,8 +186,7 @@ def _ParseAstHelper(args):
# multiprocessing helper.
-def _SerializeHelper(args):
- mojom_abspath, mojom_path = args
+def _SerializeHelper(mojom_abspath, mojom_path):
module_path = os.path.join(_SerializeHelper.output_root_path,
_GetModuleFilename(mojom_path))
module_dir = os.path.dirname(module_path)
@@ -199,12 +203,33 @@ def _SerializeHelper(args):
_SerializeHelper.loaded_modules[mojom_abspath].Dump(f)
-def _Shard(target_func, args, processes=None):
- args = list(args)
+class _ExceptionWrapper:
+ def __init__(self):
+ # Do not capture exception object to ensure pickling works.
+ self.formatted_trace = traceback.format_exc()
+
+
+class _FuncWrapper:
+ """Marshals exceptions and spreads args."""
+
+ def __init__(self, func):
+ self._func = func
+
+ def __call__(self, args):
+ # multiprocessing does not gracefully handle excptions.
+ # https://crbug.com/1219044
+ try:
+ return self._func(*args)
+ except: # pylint: disable=bare-except
+ return _ExceptionWrapper()
+
+
+def _Shard(target_func, arg_list, processes=None):
+ arg_list = list(arg_list)
if processes is None:
processes = multiprocessing.cpu_count()
# Seems optimal to have each process perform at least 2 tasks.
- processes = min(processes, len(args) // 2)
+ processes = min(processes, len(arg_list) // 2)
if sys.platform == 'win32':
# TODO(crbug.com/1190269) - we can't use more than 56
@@ -213,13 +238,17 @@ def _Shard(target_func, args, processes=None):
# Don't spin up processes unless there is enough work to merit doing so.
if not _ENABLE_MULTIPROCESSING or processes < 2:
- for result in map(target_func, args):
- yield result
+ for arg_tuple in arg_list:
+ yield target_func(*arg_tuple)
return
pool = multiprocessing.Pool(processes=processes)
try:
- for result in pool.imap_unordered(target_func, args):
+ wrapped_func = _FuncWrapper(target_func)
+ for result in pool.imap_unordered(wrapped_func, arg_list):
+ if isinstance(result, _ExceptionWrapper):
+ sys.stderr.write(result.formatted_trace)
+ sys.exit(1)
yield result
finally:
pool.close()
@@ -230,6 +259,7 @@ def _Shard(target_func, args, processes=None):
def _ParseMojoms(mojom_files,
input_root_paths,
output_root_path,
+ module_root_paths,
enabled_features,
module_metadata,
allowed_imports=None):
@@ -245,8 +275,10 @@ def _ParseMojoms(mojom_files,
are based on the mojom's relative path, rebased onto this path.
Additionally, the script expects this root to contain already-generated
modules for any transitive dependencies not listed in mojom_files.
+ module_root_paths: A list of absolute filesystem paths which contain
+ already-generated modules for any non-transitive dependencies.
enabled_features: A list of enabled feature names, controlling which AST
- nodes are filtered by [EnableIf] attributes.
+ nodes are filtered by [EnableIf] or [EnableIfNot] attributes.
module_metadata: A list of 2-tuples representing metadata key-value pairs to
attach to each compiled module output.
@@ -262,7 +294,7 @@ def _ParseMojoms(mojom_files,
loaded_modules = {}
input_dependencies = defaultdict(set)
mojom_files_to_parse = dict((os.path.normcase(abs_path),
- _RebaseAbsolutePath(abs_path, input_root_paths))
+ RebaseAbsolutePath(abs_path, input_root_paths))
for abs_path in mojom_files)
abs_paths = dict(
(path, abs_path) for abs_path, path in mojom_files_to_parse.items())
@@ -274,7 +306,7 @@ def _ParseMojoms(mojom_files,
loaded_mojom_asts[mojom_abspath] = ast
logging.info('Processing dependencies')
- for mojom_abspath, ast in loaded_mojom_asts.items():
+ for mojom_abspath, ast in sorted(loaded_mojom_asts.items()):
invalid_imports = []
for imp in ast.import_list:
import_abspath = _ResolveRelativeImportPath(imp.import_filename,
@@ -295,8 +327,8 @@ def _ParseMojoms(mojom_files,
# be parsed and have a module file sitting in a corresponding output
# location.
module_path = _GetModuleFilename(imp.import_filename)
- module_abspath = _ResolveRelativeImportPath(module_path,
- [output_root_path])
+ module_abspath = _ResolveRelativeImportPath(
+ module_path, module_root_paths + [output_root_path])
with open(module_abspath, 'rb') as module_file:
loaded_modules[import_abspath] = module.Module.Load(module_file)
@@ -371,6 +403,15 @@ already present in the provided output root.""")
'ROOT is also searched for existing modules of any transitive imports '
'which were not included in the set of inputs.')
arg_parser.add_argument(
+ '--module-root',
+ default=[],
+ action='append',
+ metavar='ROOT',
+ dest='module_root_paths',
+ help='Adds ROOT to the set of root paths to search for existing modules '
+ 'of non-transitive imports. Provided root paths are always searched in '
+ 'order from longest absolute path to shortest.')
+ arg_parser.add_argument(
'--mojoms',
nargs='+',
dest='mojom_files',
@@ -396,9 +437,9 @@ already present in the provided output root.""")
help='Enables a named feature when parsing the given mojoms. Features '
'are identified by arbitrary string values. Specifying this flag with a '
'given FEATURE name will cause the parser to process any syntax elements '
- 'tagged with an [EnableIf=FEATURE] attribute. If this flag is not '
- 'provided for a given FEATURE, such tagged elements are discarded by the '
- 'parser and will not be present in the compiled output.')
+ 'tagged with an [EnableIf=FEATURE] or [EnableIfNot] attribute. If this '
+ 'flag is not provided for a given FEATURE, such tagged elements are '
+ 'discarded by the parser and will not be present in the compiled output.')
arg_parser.add_argument(
'--check-imports',
dest='build_metadata_filename',
@@ -436,6 +477,7 @@ already present in the provided output root.""")
mojom_files = list(map(os.path.abspath, args.mojom_files))
input_roots = list(map(os.path.abspath, args.input_root_paths))
output_root = os.path.abspath(args.output_root_path)
+ module_roots = list(map(os.path.abspath, args.module_root_paths))
if args.build_metadata_filename:
allowed_imports = _CollectAllowedImportsFromBuildMetadata(
@@ -445,13 +487,16 @@ already present in the provided output root.""")
module_metadata = list(
map(lambda kvp: tuple(kvp.split('=')), args.module_metadata))
- _ParseMojoms(mojom_files, input_roots, output_root, args.enabled_features,
- module_metadata, allowed_imports)
+ _ParseMojoms(mojom_files, input_roots, output_root, module_roots,
+ args.enabled_features, module_metadata, allowed_imports)
logging.info('Finished')
- # Exit without running GC, which can save multiple seconds due the large
- # number of object created.
- os._exit(0)
if __name__ == '__main__':
Run(sys.argv[1:])
+ # Exit without running GC, which can save multiple seconds due to the large
+ # number of object created. But flush is necessary as os._exit doesn't do
+ # that.
+ sys.stdout.flush()
+ sys.stderr.flush()
+ os._exit(0)
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom_parser_test_case.py b/utils/ipc/mojo/public/tools/mojom/mojom_parser_test_case.py
index e213fbfa..f0ee6966 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom_parser_test_case.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom_parser_test_case.py
@@ -1,4 +1,4 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -20,7 +20,7 @@ class MojomParserTestCase(unittest.TestCase):
resolution, and module serialization and deserialization."""
def __init__(self, method_name):
- super(MojomParserTestCase, self).__init__(method_name)
+ super().__init__(method_name)
self._temp_dir = None
def setUp(self):
@@ -67,7 +67,7 @@ class MojomParserTestCase(unittest.TestCase):
self.ParseMojoms([filename])
m = self.LoadModule(filename)
definitions = {}
- for kinds in (m.enums, m.structs, m.unions, m.interfaces):
+ for kinds in (m.enums, m.structs, m.unions, m.interfaces, m.features):
for kind in kinds:
definitions[kind.mojom_name] = kind
return definitions
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom_parser_unittest.py b/utils/ipc/mojo/public/tools/mojom/mojom_parser_unittest.py
index a93f34ba..353a2b6e 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom_parser_unittest.py
+++ b/utils/ipc/mojo/public/tools/mojom/mojom_parser_unittest.py
@@ -1,7 +1,9 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import json
+
from mojom_parser_test_case import MojomParserTestCase
@@ -119,15 +121,22 @@ class MojomParserTest(MojomParserTestCase):
c = 'c.mojom'
c_metadata = 'out/c.build_metadata'
self.WriteFile(a_metadata,
- '{"sources": ["%s"], "deps": []}\n' % self.GetPath(a))
+ json.dumps({
+ "sources": [self.GetPath(a)],
+ "deps": []
+ }))
self.WriteFile(
b_metadata,
- '{"sources": ["%s"], "deps": ["%s"]}\n' % (self.GetPath(b),
- self.GetPath(a_metadata)))
+ json.dumps({
+ "sources": [self.GetPath(b)],
+ "deps": [self.GetPath(a_metadata)]
+ }))
self.WriteFile(
c_metadata,
- '{"sources": ["%s"], "deps": ["%s"]}\n' % (self.GetPath(c),
- self.GetPath(b_metadata)))
+ json.dumps({
+ "sources": [self.GetPath(c)],
+ "deps": [self.GetPath(b_metadata)]
+ }))
self.WriteFile(a, """\
module a;
struct Bar {};""")
@@ -154,9 +163,15 @@ class MojomParserTest(MojomParserTestCase):
b = 'b.mojom'
b_metadata = 'out/b.build_metadata'
self.WriteFile(a_metadata,
- '{"sources": ["%s"], "deps": []}\n' % self.GetPath(a))
+ json.dumps({
+ "sources": [self.GetPath(a)],
+ "deps": []
+ }))
self.WriteFile(b_metadata,
- '{"sources": ["%s"], "deps": []}\n' % self.GetPath(b))
+ json.dumps({
+ "sources": [self.GetPath(b)],
+ "deps": []
+ }))
self.WriteFile(a, """\
module a;
struct Bar {};""")
diff --git a/utils/ipc/mojo/public/tools/mojom/stable_attribute_unittest.py b/utils/ipc/mojo/public/tools/mojom/stable_attribute_unittest.py
index d45ec586..d10d69c6 100644
--- a/utils/ipc/mojo/public/tools/mojom/stable_attribute_unittest.py
+++ b/utils/ipc/mojo/public/tools/mojom/stable_attribute_unittest.py
@@ -1,4 +1,4 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/utils/ipc/mojo/public/tools/mojom/union_unittest.py b/utils/ipc/mojo/public/tools/mojom/union_unittest.py
new file mode 100644
index 00000000..6b2525e5
--- /dev/null
+++ b/utils/ipc/mojo/public/tools/mojom/union_unittest.py
@@ -0,0 +1,44 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from mojom_parser_test_case import MojomParserTestCase
+
+
+class UnionTest(MojomParserTestCase):
+ """Tests union parsing behavior."""
+
+ def testExtensibleMustHaveDefault(self):
+ """Verifies that extensible unions must have a default field."""
+ mojom = 'foo.mojom'
+ self.WriteFile(mojom, 'module foo; [Extensible] union U { bool x; };')
+ with self.assertRaisesRegexp(Exception, 'must specify a \[Default\]'):
+ self.ParseMojoms([mojom])
+
+ def testExtensibleSingleDefault(self):
+ """Verifies that extensible unions must not have multiple default fields."""
+ mojom = 'foo.mojom'
+ self.WriteFile(
+ mojom, """\
+ module foo;
+ [Extensible] union U {
+ [Default] bool x;
+ [Default] bool y;
+ };
+ """)
+ with self.assertRaisesRegexp(Exception, 'Multiple \[Default\] fields'):
+ self.ParseMojoms([mojom])
+
+ def testExtensibleDefaultTypeValid(self):
+ """Verifies that an extensible union's default field must be nullable or
+ integral type."""
+ mojom = 'foo.mojom'
+ self.WriteFile(
+ mojom, """\
+ module foo;
+ [Extensible] union U {
+ [Default] handle<message_pipe> p;
+ };
+ """)
+ with self.assertRaisesRegexp(Exception, 'must be nullable or integral'):
+ self.ParseMojoms([mojom])
diff --git a/utils/ipc/mojo/public/tools/mojom/version_compatibility_unittest.py b/utils/ipc/mojo/public/tools/mojom/version_compatibility_unittest.py
index 65db4dc9..45e45ec5 100644
--- a/utils/ipc/mojo/public/tools/mojom/version_compatibility_unittest.py
+++ b/utils/ipc/mojo/public/tools/mojom/version_compatibility_unittest.py
@@ -1,4 +1,4 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -23,9 +23,12 @@ class VersionCompatibilityTest(MojomParserTestCase):
checker = module.BackwardCompatibilityChecker()
compatibility_map = {}
- for name in old.keys():
- compatibility_map[name] = checker.IsBackwardCompatible(
- new[name], old[name])
+ for name in old:
+ try:
+ compatibility_map[name] = checker.IsBackwardCompatible(
+ new[name], old[name])
+ except Exception:
+ compatibility_map[name] = False
return compatibility_map
def assertBackwardCompatible(self, old_mojom, new_mojom):
@@ -60,40 +63,48 @@ class VersionCompatibilityTest(MojomParserTestCase):
"""Adding a value to an existing version is not allowed, even if the old
enum was marked [Extensible]. Note that it is irrelevant whether or not the
new enum is marked [Extensible]."""
- self.assertNotBackwardCompatible('[Extensible] enum E { kFoo, kBar };',
- 'enum E { kFoo, kBar, kBaz };')
self.assertNotBackwardCompatible(
- '[Extensible] enum E { kFoo, kBar };',
- '[Extensible] enum E { kFoo, kBar, kBaz };')
+ '[Extensible] enum E { [Default] kFoo, kBar };',
+ 'enum E { kFoo, kBar, kBaz };')
+ self.assertNotBackwardCompatible(
+ '[Extensible] enum E { [Default] kFoo, kBar };',
+ '[Extensible] enum E { [Default] kFoo, kBar, kBaz };')
self.assertNotBackwardCompatible(
- '[Extensible] enum E { kFoo, [MinVersion=1] kBar };',
+ '[Extensible] enum E { [Default] kFoo, [MinVersion=1] kBar };',
'enum E { kFoo, [MinVersion=1] kBar, [MinVersion=1] kBaz };')
def testEnumValueRemoval(self):
"""Removal of an enum value is never valid even for [Extensible] enums."""
self.assertNotBackwardCompatible('enum E { kFoo, kBar };',
'enum E { kFoo };')
- self.assertNotBackwardCompatible('[Extensible] enum E { kFoo, kBar };',
- '[Extensible] enum E { kFoo };')
self.assertNotBackwardCompatible(
- '[Extensible] enum E { kA, [MinVersion=1] kB };',
- '[Extensible] enum E { kA, };')
+ '[Extensible] enum E { [Default] kFoo, kBar };',
+ '[Extensible] enum E { [Default] kFoo };')
+ self.assertNotBackwardCompatible(
+ '[Extensible] enum E { [Default] kA, [MinVersion=1] kB };',
+ '[Extensible] enum E { [Default] kA, };')
self.assertNotBackwardCompatible(
- '[Extensible] enum E { kA, [MinVersion=1] kB, [MinVersion=1] kZ };',
- '[Extensible] enum E { kA, [MinVersion=1] kB };')
+ """[Extensible] enum E {
+ [Default] kA,
+ [MinVersion=1] kB,
+ [MinVersion=1] kZ };""",
+ '[Extensible] enum E { [Default] kA, [MinVersion=1] kB };')
def testNewExtensibleEnumValueWithMinVersion(self):
"""Adding a new and properly [MinVersion]'d value to an [Extensible] enum
is a backward-compatible change. Note that it is irrelevant whether or not
the new enum is marked [Extensible]."""
- self.assertBackwardCompatible('[Extensible] enum E { kA, kB };',
+ self.assertBackwardCompatible('[Extensible] enum E { [Default] kA, kB };',
'enum E { kA, kB, [MinVersion=1] kC };')
self.assertBackwardCompatible(
- '[Extensible] enum E { kA, kB };',
- '[Extensible] enum E { kA, kB, [MinVersion=1] kC };')
+ '[Extensible] enum E { [Default] kA, kB };',
+ '[Extensible] enum E { [Default] kA, kB, [MinVersion=1] kC };')
self.assertBackwardCompatible(
- '[Extensible] enum E { kA, [MinVersion=1] kB };',
- '[Extensible] enum E { kA, [MinVersion=1] kB, [MinVersion=2] kC };')
+ '[Extensible] enum E { [Default] kA, [MinVersion=1] kB };',
+ """[Extensible] enum E {
+ [Default] kA,
+ [MinVersion=1] kB,
+ [MinVersion=2] kC };""")
def testRenameEnumValue(self):
"""Renaming an enum value does not affect backward-compatibility. Only
@@ -161,14 +172,17 @@ class VersionCompatibilityTest(MojomParserTestCase):
'struct S {}; struct T { S s; };',
'struct S { [MinVersion=1] int32 x; }; struct T { S s; };')
self.assertBackwardCompatible(
- '[Extensible] enum E { kA }; struct S { E e; };',
- '[Extensible] enum E { kA, [MinVersion=1] kB }; struct S { E e; };')
+ '[Extensible] enum E { [Default] kA }; struct S { E e; };',
+ """[Extensible] enum E {
+ [Default] kA,
+ [MinVersion=1] kB };
+ struct S { E e; };""")
self.assertNotBackwardCompatible(
'struct S {}; struct T { S s; };',
'struct S { int32 x; }; struct T { S s; };')
self.assertNotBackwardCompatible(
- '[Extensible] enum E { kA }; struct S { E e; };',
- '[Extensible] enum E { kA, kB }; struct S { E e; };')
+ '[Extensible] enum E { [Default] kA }; struct S { E e; };',
+ '[Extensible] enum E { [Default] kA, kB }; struct S { E e; };')
def testNewStructFieldWithInvalidMinVersion(self):
"""Adding a new field using an existing MinVersion breaks backward-
@@ -305,14 +319,17 @@ class VersionCompatibilityTest(MojomParserTestCase):
'struct S {}; union U { S s; };',
'struct S { [MinVersion=1] int32 x; }; union U { S s; };')
self.assertBackwardCompatible(
- '[Extensible] enum E { kA }; union U { E e; };',
- '[Extensible] enum E { kA, [MinVersion=1] kB }; union U { E e; };')
+ '[Extensible] enum E { [Default] kA }; union U { E e; };',
+ """[Extensible] enum E {
+ [Default] kA,
+ [MinVersion=1] kB };
+ union U { E e; };""")
self.assertNotBackwardCompatible(
'struct S {}; union U { S s; };',
'struct S { int32 x; }; union U { S s; };')
self.assertNotBackwardCompatible(
- '[Extensible] enum E { kA }; union U { E e; };',
- '[Extensible] enum E { kA, kB }; union U { E e; };')
+ '[Extensible] enum E { [Default] kA }; union U { E e; };',
+ '[Extensible] enum E { [Default] kA, kB }; union U { E e; };')
def testNewUnionFieldWithInvalidMinVersion(self):
"""Adding a new field using an existing MinVersion breaks backward-
diff --git a/utils/ipc/mojo/public/tools/run_all_python_unittests.py b/utils/ipc/mojo/public/tools/run_all_python_unittests.py
index b2010958..98bce18c 100755
--- a/utils/ipc/mojo/public/tools/run_all_python_unittests.py
+++ b/utils/ipc/mojo/public/tools/run_all_python_unittests.py
@@ -1,5 +1,5 @@
-#!/usr/bin/env python
-# Copyright 2020 The Chromium Authors. All rights reserved.
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -8,11 +8,13 @@ import sys
_TOOLS_DIR = os.path.dirname(__file__)
_MOJOM_DIR = os.path.join(_TOOLS_DIR, 'mojom')
+_BINDINGS_DIR = os.path.join(_TOOLS_DIR, 'bindings')
_SRC_DIR = os.path.join(_TOOLS_DIR, os.path.pardir, os.path.pardir,
os.path.pardir)
# Ensure that the mojom library is discoverable.
sys.path.append(_MOJOM_DIR)
+sys.path.append(_BINDINGS_DIR)
# Help Python find typ in //third_party/catapult/third_party/typ/
sys.path.append(
@@ -21,7 +23,7 @@ import typ
def Main():
- return typ.main(top_level_dir=_MOJOM_DIR)
+ return typ.main(top_level_dirs=[_MOJOM_DIR, _BINDINGS_DIR])
if __name__ == '__main__':
diff --git a/utils/ipc/parser.py b/utils/ipc/parser.py
index f46820fa..cb5608b7 100755
--- a/utils/ipc/parser.py
+++ b/utils/ipc/parser.py
@@ -4,7 +4,7 @@
#
# Author: Paul Elder <paul.elder@ideasonboard.com>
#
-# parser.py - Run mojo parser with python3
+# Run mojo parser with python3
import os
import sys
@@ -13,7 +13,7 @@ import sys
sys.dont_write_bytecode = True
# Make sure that mojom_parser.py can import mojom
-sys.path.append(f'{os.path.dirname(__file__)}/mojo/public/tools/mojom')
+sys.path.insert(0, f'{os.path.dirname(__file__)}/mojo/public/tools/mojom')
import mojo.public.tools.mojom.mojom_parser as parser
diff --git a/utils/ipc/tools/README b/utils/ipc/tools/README
index d5c24fc3..961cabd2 100644
--- a/utils/ipc/tools/README
+++ b/utils/ipc/tools/README
@@ -1,4 +1,4 @@
# SPDX-License-Identifier: CC0-1.0
-Files in this directory are imported from 9c138d992bfc of Chromium. Do not
+Files in this directory are imported from 9be4263648d7 of Chromium. Do not
modify them manually.
diff --git a/utils/ipc/tools/diagnosis/crbug_1001171.py b/utils/ipc/tools/diagnosis/crbug_1001171.py
index 478fb8c1..40900d10 100644
--- a/utils/ipc/tools/diagnosis/crbug_1001171.py
+++ b/utils/ipc/tools/diagnosis/crbug_1001171.py
@@ -1,4 +1,4 @@
-# Copyright 2019 The Chromium Authors. All rights reserved.
+# Copyright 2019 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/utils/ipu3/ipu3-capture.sh b/utils/ipu3/ipu3-capture.sh
index ba6147b4..004a92b0 100755
--- a/utils/ipu3/ipu3-capture.sh
+++ b/utils/ipu3/ipu3-capture.sh
@@ -4,7 +4,7 @@
#
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
#
-# ipu3-capture.sh - Capture raw frames from cameras based on the Intel IPU3
+# Capture raw frames from cameras based on the Intel IPU3
#
# The scripts makes use of the following tools, which are expected to be
# executable from the system-wide path or from the local directory:
@@ -63,7 +63,8 @@ parse_pipeline() {
if (sensor) {
gsub(\".*fmt:\", \"\");
gsub(\"[] ].*\", \"\");
- gsub(\"/\", \" \");
+ sub(\"/\", \" \");
+ sub(\"@[0-9]+/[0-9]+\", \"\");
format=\$0;
}
}
diff --git a/utils/ipu3/ipu3-pack.c b/utils/ipu3/ipu3-pack.c
new file mode 100644
index 00000000..23d2db8b
--- /dev/null
+++ b/utils/ipu3/ipu3-pack.c
@@ -0,0 +1,101 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * ipu3-pack - Convert unpacked RAW10 Bayer data to the IPU3 packed Bayer formats
+ *
+ * Copyright 2022 Umang Jain <umang.jain@ideasonboard.com>
+ */
+#define _GNU_SOURCE
+
+#include <errno.h>
+#include <fcntl.h>
+#include <libgen.h>
+#include <stdint.h>
+#include <stdio.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+static void usage(char *argv0)
+{
+ printf("Usage: %s input-file output-file\n", basename(argv0));
+ printf("Convert unpacked RAW10 Bayer data to the IPU3 packed Bayer formats\n");
+ printf("If the output-file '-', output data will be written to standard output\n");
+}
+
+int main(int argc, char *argv[])
+{
+ int in_fd;
+ int out_fd;
+ int ret;
+
+ if (argc != 3) {
+ usage(argv[0]);
+ return 1;
+ }
+
+ in_fd = open(argv[1], O_RDONLY);
+ if (in_fd == -1) {
+ fprintf(stderr, "Failed to open input file '%s': %s\n",
+ argv[1], strerror(errno));
+ return 1;
+ }
+
+ if (strcmp(argv[2], "-") == 0) {
+ out_fd = STDOUT_FILENO;
+ } else {
+ out_fd = open(argv[2], O_WRONLY | O_TRUNC | O_CREAT, 0644);
+ if (out_fd == -1) {
+ fprintf(stderr, "Failed to open output file '%s': %s\n",
+ argv[2], strerror(errno));
+ close(in_fd);
+ return 1;
+ }
+ }
+
+ while (1) {
+ uint16_t in_data[25];
+ uint8_t out_data[32];
+ unsigned int i;
+
+ ret = read(in_fd, in_data, sizeof(in_data));
+ if (ret < 0) {
+ fprintf(stderr, "Failed to read input data: %s\n",
+ strerror(errno));
+ goto done;
+ }
+
+ if ((unsigned)ret < sizeof(in_data)) {
+ if (ret != 0)
+ fprintf(stderr, "%u bytes of stray data at end of input\n",
+ ret);
+ goto done;
+ }
+
+ for (i = 0; i < 30; ++i) {
+ unsigned int index = (i * 8) / 10;
+ unsigned int msb_shift = (i * 8) % 10;
+ unsigned int lsb_shift = 10 - msb_shift;
+
+ out_data[i] = ((in_data[index] >> msb_shift) & 0xff)
+ | ((in_data[index+1] << lsb_shift) & 0xff);
+ }
+
+ out_data[30] = (in_data[24] >> 0) & 0xff;
+ out_data[31] = (in_data[24] >> 8) & 0x03;
+
+ ret = write(out_fd, out_data, sizeof(out_data));
+ if (ret < 0) {
+ fprintf(stderr, "Failed to write output data: %s\n",
+ strerror(errno));
+ goto done;
+ }
+ }
+
+done:
+ close(in_fd);
+ if (out_fd != STDOUT_FILENO)
+ close(out_fd);
+
+ return ret ? 1 : 0;
+}
diff --git a/utils/ipu3/ipu3-process.sh b/utils/ipu3/ipu3-process.sh
index bb4abbe8..25bc849f 100755
--- a/utils/ipu3/ipu3-process.sh
+++ b/utils/ipu3/ipu3-process.sh
@@ -4,7 +4,7 @@
#
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
#
-# ipu3-process.sh - Process raw frames with the Intel IPU3
+# Process raw frames with the Intel IPU3
#
# The scripts makes use of the following tools, which are expected to be
# found in $PATH:
diff --git a/utils/ipu3/ipu3-unpack.c b/utils/ipu3/ipu3-unpack.c
index 2dce1038..6ee8c45a 100644
--- a/utils/ipu3/ipu3-unpack.c
+++ b/utils/ipu3/ipu3-unpack.c
@@ -8,6 +8,7 @@
#include <errno.h>
#include <fcntl.h>
+#include <libgen.h>
#include <stdint.h>
#include <stdio.h>
#include <string.h>
@@ -15,7 +16,7 @@
#include <sys/types.h>
#include <unistd.h>
-static void usage(const char *argv0)
+static void usage(char *argv0)
{
printf("Usage: %s input-file output-file\n", basename(argv0));
printf("Unpack the IPU3 raw Bayer format to 16-bit Bayer\n");
@@ -78,8 +79,8 @@ int main(int argc, char *argv[])
}
ret = write(out_fd, out_data, 50);
- if (ret < -1) {
- fprintf(stderr, "Failed to read input data: %s\n",
+ if (ret == -1) {
+ fprintf(stderr, "Failed to write output data: %s\n",
strerror(errno));
goto done;
}
diff --git a/utils/ipu3/meson.build b/utils/ipu3/meson.build
index 88049f58..c92cc658 100644
--- a/utils/ipu3/meson.build
+++ b/utils/ipu3/meson.build
@@ -1,3 +1,4 @@
# SPDX-License-Identifier: CC0-1.0
+ipu3_pack = executable('ipu3-pack', 'ipu3-pack.c')
ipu3_unpack = executable('ipu3-unpack', 'ipu3-unpack.c')
diff --git a/utils/raspberrypi/ctt/alsc_only.py b/utils/raspberrypi/ctt/alsc_only.py
new file mode 100755
index 00000000..092aa40e
--- /dev/null
+++ b/utils/raspberrypi/ctt/alsc_only.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python3
+#
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2022, Raspberry Pi (Trading) Limited
+#
+# alsc tuning tool
+
+from ctt import *
+
+
+if __name__ == '__main__':
+ """
+ initialise calibration
+ """
+ if len(sys.argv) == 1:
+ print("""
+ Pisp Camera Tuning Tool version 1.0
+
+ Required Arguments:
+ '-i' : Calibration image directory.
+ '-o' : Name of output json file.
+
+ Optional Arguments:
+ '-c' : Config file for the CTT. If not passed, default parameters used.
+ '-l' : Name of output log file. If not passed, 'ctt_log.txt' used.
+ """)
+ quit(0)
+ else:
+ """
+ parse input arguments
+ """
+ json_output, directory, config, log_output = parse_input()
+ run_ctt(json_output, directory, config, log_output, alsc_only=True)
diff --git a/utils/raspberrypi/ctt/colors.py b/utils/raspberrypi/ctt/colors.py
new file mode 100644
index 00000000..cb4d236b
--- /dev/null
+++ b/utils/raspberrypi/ctt/colors.py
@@ -0,0 +1,30 @@
+# Program to convert from RGB to LAB color space
+def RGB_to_LAB(RGB): # where RGB is a 1x3 array. e.g RGB = [100, 255, 230]
+ num = 0
+ XYZ = [0, 0, 0]
+ # converted all the three R, G, B to X, Y, Z
+ X = RGB[0] * 0.4124 + RGB[1] * 0.3576 + RGB[2] * 0.1805
+ Y = RGB[0] * 0.2126 + RGB[1] * 0.7152 + RGB[2] * 0.0722
+ Z = RGB[0] * 0.0193 + RGB[1] * 0.1192 + RGB[2] * 0.9505
+
+ XYZ[0] = X / 255 * 100
+ XYZ[1] = Y / 255 * 100 # XYZ Must be in range 0 -> 100, so scale down from 255
+ XYZ[2] = Z / 255 * 100
+ XYZ[0] = XYZ[0] / 95.047 # ref_X = 95.047 Observer= 2°, Illuminant= D65
+ XYZ[1] = XYZ[1] / 100.0 # ref_Y = 100.000
+ XYZ[2] = XYZ[2] / 108.883 # ref_Z = 108.883
+ num = 0
+ for value in XYZ:
+ if value > 0.008856:
+ value = value ** (0.3333333333333333)
+ else:
+ value = (7.787 * value) + (16 / 116)
+ XYZ[num] = value
+ num = num + 1
+
+ # L, A, B, values calculated below
+ L = (116 * XYZ[1]) - 16
+ a = 500 * (XYZ[0] - XYZ[1])
+ b = 200 * (XYZ[1] - XYZ[2])
+
+ return [L, a, b]
diff --git a/utils/raspberrypi/ctt/convert_tuning.py b/utils/raspberrypi/ctt/convert_tuning.py
new file mode 100755
index 00000000..f4504d45
--- /dev/null
+++ b/utils/raspberrypi/ctt/convert_tuning.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python3
+#
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Script to convert version 1.0 Raspberry Pi camera tuning files to version 2.0.
+#
+# Copyright 2022 Raspberry Pi Ltd
+
+import argparse
+import json
+import sys
+
+from ctt_pretty_print_json import pretty_print
+
+
+def convert_v2(in_json: dict) -> str:
+
+ if 'version' in in_json.keys() and in_json['version'] != 1.0:
+ print(f'The JSON config reports version {in_json["version"]} that is incompatible with this tool.')
+ sys.exit(-1)
+
+ converted = {
+ 'version': 2.0,
+ 'target': 'bcm2835',
+ 'algorithms': [{algo: config} for algo, config in in_json.items()]
+ }
+
+ return pretty_print(converted)
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=
+ 'Convert the format of the Raspberry Pi camera tuning file from v1.0 to v2.0.\n')
+ parser.add_argument('input', type=str, help='Input tuning file.')
+ parser.add_argument('output', type=str, nargs='?',
+ help='Output converted tuning file. If not provided, the input file will be updated in-place.',
+ default=None)
+ args = parser.parse_args()
+
+ with open(args.input, 'r') as f:
+ in_json = json.load(f)
+
+ out_json = convert_v2(in_json)
+
+ with open(args.output if args.output is not None else args.input, 'w') as f:
+ f.write(out_json)
diff --git a/utils/raspberrypi/ctt/ctt.py b/utils/raspberrypi/ctt/ctt.py
index 15064634..bbe960b0 100755
--- a/utils/raspberrypi/ctt/ctt.py
+++ b/utils/raspberrypi/ctt/ctt.py
@@ -2,9 +2,9 @@
#
# SPDX-License-Identifier: BSD-2-Clause
#
-# Copyright (C) 2019, Raspberry Pi (Trading) Limited
+# Copyright (C) 2019, Raspberry Pi Ltd
#
-# ctt.py - camera tuning tool
+# camera tuning tool
import os
import sys
@@ -15,7 +15,7 @@ from ctt_alsc import *
from ctt_lux import *
from ctt_noise import *
from ctt_geq import *
-from ctt_pretty_print_json import *
+from ctt_pretty_print_json import pretty_print
import random
import json
import re
@@ -350,7 +350,7 @@ class Camera:
alsc_out = alsc_all(self, do_alsc_colour, plot)
cal_cr_list, cal_cb_list, luminance_lut, av_corn = alsc_out
"""
- write ouput to json and finish if not do_alsc_colour
+ write output to json and finish if not do_alsc_colour
"""
if not do_alsc_colour:
self.json['rpi.alsc']['luminance_lut'] = luminance_lut
@@ -511,13 +511,17 @@ class Camera:
"""
def write_json(self):
"""
- Write json dictionary to file
+ Write json dictionary to file using our version 2 format
"""
- jstring = json.dumps(self.json, sort_keys=False)
- """
- make it pretty :)
- """
- pretty_print_json(jstring, self.jf)
+
+ out_json = {
+ "version": 2.0,
+ 'target': 'bcm2835',
+ "algorithms": [{name: data} for name, data in self.json.items()],
+ }
+
+ with open(self.jf, 'w') as f:
+ f.write(pretty_print(out_json))
"""
add a new section to the log file
@@ -664,7 +668,7 @@ class Camera:
- incorrect filename/extension
- images from different cameras
"""
- def check_imgs(self):
+ def check_imgs(self, macbeth=True):
self.log += '\n\nImages found:'
self.log += '\nMacbeth : {}'.format(len(self.imgs))
self.log += '\nALSC : {} '.format(len(self.imgs_alsc))
@@ -672,10 +676,14 @@ class Camera:
"""
check usable images found
"""
- if len(self.imgs) == 0:
+ if len(self.imgs) == 0 and macbeth:
print('\nERROR: No usable macbeth chart images found')
self.log += '\nERROR: No usable macbeth chart images found'
return 0
+ elif len(self.imgs) == 0 and len(self.imgs_alsc) == 0:
+ print('\nERROR: No usable images found')
+ self.log += '\nERROR: No usable images found'
+ return 0
"""
Double check that every image has come from the same camera...
"""
@@ -704,7 +712,7 @@ class Camera:
return 0
-def run_ctt(json_output, directory, config, log_output):
+def run_ctt(json_output, directory, config, log_output, alsc_only=False):
"""
check input files are jsons
"""
@@ -766,6 +774,8 @@ def run_ctt(json_output, directory, config, log_output):
try:
Cam = Camera(json_output)
Cam.log_user_input(json_output, directory, config, log_output)
+ if alsc_only:
+ disable = set(Cam.json.keys()).symmetric_difference({"rpi.alsc"})
Cam.disable = disable
Cam.plot = plot
Cam.add_imgs(directory, mac_config, blacklevel)
@@ -779,8 +789,9 @@ def run_ctt(json_output, directory, config, log_output):
ccm also technically does an awb but it measures this from the macbeth
chart in the image rather than using calibration data
"""
- if Cam.check_imgs():
- Cam.json['rpi.black_level']['black_level'] = Cam.blacklevel_16
+ if Cam.check_imgs(macbeth=not alsc_only):
+ if not alsc_only:
+ Cam.json['rpi.black_level']['black_level'] = Cam.blacklevel_16
Cam.json_remove(disable)
print('\nSTARTING CALIBRATIONS')
Cam.alsc_cal(luminance_strength, do_alsc_colour)
diff --git a/utils/raspberrypi/ctt/ctt_alsc.py b/utils/raspberrypi/ctt/ctt_alsc.py
index 89e86469..b0201ac4 100644
--- a/utils/raspberrypi/ctt/ctt_alsc.py
+++ b/utils/raspberrypi/ctt/ctt_alsc.py
@@ -1,8 +1,8 @@
# SPDX-License-Identifier: BSD-2-Clause
#
-# Copyright (C) 2019, Raspberry Pi (Trading) Limited
+# Copyright (C) 2019, Raspberry Pi Ltd
#
-# ctt_alsc.py - camera tuning tool for ALSC (auto lens shading correction)
+# camera tuning tool for ALSC (auto lens shading correction)
from ctt_image_load import *
import matplotlib.pyplot as plt
@@ -132,7 +132,7 @@ def alsc(Cam, Img, do_alsc_colour, plot=False):
"""
average the green channels into one
"""
- av_ch_g = np.mean((channels[1:2]), axis=0)
+ av_ch_g = np.mean((channels[1:3]), axis=0)
if do_alsc_colour:
"""
obtain 16x12 grid of intensities for each channel and subtract black level
diff --git a/utils/raspberrypi/ctt/ctt_awb.py b/utils/raspberrypi/ctt/ctt_awb.py
index 3c8cd902..5ba6f978 100644
--- a/utils/raspberrypi/ctt/ctt_awb.py
+++ b/utils/raspberrypi/ctt/ctt_awb.py
@@ -1,8 +1,8 @@
# SPDX-License-Identifier: BSD-2-Clause
#
-# Copyright (C) 2019, Raspberry Pi (Trading) Limited
+# Copyright (C) 2019, Raspberry Pi Ltd
#
-# ctt_awb.py - camera tuning tool for AWB
+# camera tuning tool for AWB
from ctt_image_load import *
import matplotlib.pyplot as plt
diff --git a/utils/raspberrypi/ctt/ctt_ccm.py b/utils/raspberrypi/ctt/ctt_ccm.py
index cebecfc2..59753e33 100644
--- a/utils/raspberrypi/ctt/ctt_ccm.py
+++ b/utils/raspberrypi/ctt/ctt_ccm.py
@@ -1,32 +1,68 @@
# SPDX-License-Identifier: BSD-2-Clause
#
-# Copyright (C) 2019, Raspberry Pi (Trading) Limited
+# Copyright (C) 2019, Raspberry Pi Ltd
#
-# ctt_ccm.py - camera tuning tool for CCM (colour correction matrix)
+# camera tuning tool for CCM (colour correction matrix)
from ctt_image_load import *
from ctt_awb import get_alsc_patches
-
-
+import colors
+from scipy.optimize import minimize
+from ctt_visualise import visualise_macbeth_chart
+import numpy as np
"""
takes 8-bit macbeth chart values, degammas and returns 16 bit
"""
+
+'''
+This program has many options from which to derive the color matrix from.
+The first is average. This minimises the average delta E across all patches of
+the macbeth chart. Testing across all cameras yeilded this as the most color
+accurate and vivid. Other options are avalible however.
+Maximum minimises the maximum Delta E of the patches. It iterates through till
+a minimum maximum is found (so that there is
+not one patch that deviates wildly.)
+This yields generally good results but overall the colors are less accurate
+Have a fiddle with maximum and see what you think.
+The final option allows you to select the patches for which to average across.
+This means that you can bias certain patches, for instance if you want the
+reds to be more accurate.
+'''
+
+matrix_selection_types = ["average", "maximum", "patches"]
+typenum = 0 # select from array above, 0 = average, 1 = maximum, 2 = patches
+test_patches = [1, 2, 5, 8, 9, 12, 14]
+
+'''
+Enter patches to test for. Can also be entered twice if you
+would like twice as much bias on one patch.
+'''
+
+
def degamma(x):
- x = x / ((2**8)-1)
- x = np.where(x < 0.04045, x/12.92, ((x+0.055)/1.055)**2.4)
- x = x * ((2**16)-1)
+ x = x / ((2 ** 8) - 1) # takes 255 and scales it down to one
+ x = np.where(x < 0.04045, x / 12.92, ((x + 0.055) / 1.055) ** 2.4)
+ x = x * ((2 ** 16) - 1) # takes one and scales up to 65535, 16 bit color
return x
+def gamma(x):
+ # Take 3 long array of color values and gamma them
+ return [((colour / 255) ** (1 / 2.4) * 1.055 - 0.055) * 255 for colour in x]
+
+
"""
FInds colour correction matrices for list of images
"""
+
+
def ccm(Cam, cal_cr_list, cal_cb_list):
+ global matrix_selection_types, typenum
imgs = Cam.imgs
"""
standard macbeth chart colour values
"""
- m_rgb = np.array([ # these are in sRGB
+ m_rgb = np.array([ # these are in RGB
[116, 81, 67], # dark skin
[199, 147, 129], # light skin
[91, 122, 156], # blue sky
@@ -34,7 +70,7 @@ def ccm(Cam, cal_cr_list, cal_cb_list):
[130, 128, 176], # blue flower
[92, 190, 172], # bluish green
[224, 124, 47], # orange
- [68, 91, 170], # purplish blue
+ [68, 91, 170], # purplish blue
[198, 82, 97], # moderate red
[94, 58, 106], # purple
[159, 189, 63], # yellow green
@@ -52,16 +88,20 @@ def ccm(Cam, cal_cr_list, cal_cb_list):
[82, 84, 86], # neutral 3.5
[49, 49, 51] # black 2
])
-
"""
convert reference colours from srgb to rgb
"""
- m_srgb = degamma(m_rgb)
+ m_srgb = degamma(m_rgb) # now in 16 bit color.
+
+ # Produce array of LAB values for ideal color chart
+ m_lab = [colors.RGB_to_LAB(color / 256) for color in m_srgb]
+
"""
reorder reference values to match how patches are ordered
"""
m_srgb = np.array([m_srgb[i::6] for i in range(6)]).reshape((24, 3))
-
+ m_lab = np.array([m_lab[i::6] for i in range(6)]).reshape((24, 3))
+ m_rgb = np.array([m_rgb[i::6] for i in range(6)]).reshape((24, 3))
"""
reformat alsc correction tables or set colour_cals to None if alsc is
deactivated
@@ -76,8 +116,8 @@ def ccm(Cam, cal_cr_list, cal_cb_list):
"""
normalise tables so min value is 1
"""
- cr_tab = cr_tab/np.min(cr_tab)
- cb_tab = cb_tab/np.min(cb_tab)
+ cr_tab = cr_tab / np.min(cr_tab)
+ cb_tab = cb_tab / np.min(cb_tab)
colour_cals[cr['ct']] = [cr_tab, cb_tab]
"""
@@ -94,6 +134,8 @@ def ccm(Cam, cal_cr_list, cal_cb_list):
the function will simply return the macbeth patches
"""
r, b, g = get_alsc_patches(Img, colour_cals, grey=False)
+ # 256 values for each patch of sRGB values
+
"""
do awb
Note: awb is done by measuring the macbeth chart in the image, rather
@@ -101,34 +143,123 @@ def ccm(Cam, cal_cr_list, cal_cb_list):
and the ccm matrices will be more accurate.
"""
r_greys, b_greys, g_greys = r[3::4], b[3::4], g[3::4]
- r_g = np.mean(r_greys/g_greys)
- b_g = np.mean(b_greys/g_greys)
+ r_g = np.mean(r_greys / g_greys)
+ b_g = np.mean(b_greys / g_greys)
r = r / r_g
b = b / b_g
-
"""
normalise brightness wrt reference macbeth colours and then average
each channel for each patch
"""
- gain = np.mean(m_srgb)/np.mean((r, g, b))
+ gain = np.mean(m_srgb) / np.mean((r, g, b))
Cam.log += '\nGain with respect to standard colours: {:.3f}'.format(gain)
- r = np.mean(gain*r, axis=1)
- b = np.mean(gain*b, axis=1)
- g = np.mean(gain*g, axis=1)
-
+ r = np.mean(gain * r, axis=1)
+ b = np.mean(gain * b, axis=1)
+ g = np.mean(gain * g, axis=1)
"""
calculate ccm matrix
"""
+ # ==== All of below should in sRGB ===##
+ sumde = 0
ccm = do_ccm(r, g, b, m_srgb)
+ # This is the initial guess that our optimisation code works with.
+ original_ccm = ccm
+ r1 = ccm[0]
+ r2 = ccm[1]
+ g1 = ccm[3]
+ g2 = ccm[4]
+ b1 = ccm[6]
+ b2 = ccm[7]
+ '''
+ COLOR MATRIX LOOKS AS BELOW
+ R1 R2 R3 Rval Outr
+ G1 G2 G3 * Gval = G
+ B1 B2 B3 Bval B
+ Will be optimising 6 elements and working out the third element using 1-r1-r2 = r3
+ '''
+
+ x0 = [r1, r2, g1, g2, b1, b2]
+ '''
+ We use our old CCM as the initial guess for the program to find the
+ optimised matrix
+ '''
+ result = minimize(guess, x0, args=(r, g, b, m_lab), tol=0.01)
+ '''
+ This produces a color matrix which has the lowest delta E possible,
+ based off the input data. Note it is impossible for this to reach
+ zero since the input data is imperfect
+ '''
+
+ Cam.log += ("\n \n Optimised Matrix Below: \n \n")
+ [r1, r2, g1, g2, b1, b2] = result.x
+ # The new, optimised color correction matrix values
+ optimised_ccm = [r1, r2, (1 - r1 - r2), g1, g2, (1 - g1 - g2), b1, b2, (1 - b1 - b2)]
+
+ # This is the optimised Color Matrix (preserving greys by summing rows up to 1)
+ Cam.log += str(optimised_ccm)
+ Cam.log += "\n Old Color Correction Matrix Below \n"
+ Cam.log += str(ccm)
+
+ formatted_ccm = np.array(original_ccm).reshape((3, 3))
+
+ '''
+ below is a whole load of code that then applies the latest color
+ matrix, and returns LAB values for color. This can then be used
+ to calculate the final delta E
+ '''
+ optimised_ccm_rgb = [] # Original Color Corrected Matrix RGB / LAB
+ optimised_ccm_lab = []
+
+ formatted_optimised_ccm = np.array(optimised_ccm).reshape((3, 3))
+ after_gamma_rgb = []
+ after_gamma_lab = []
+
+ for RGB in zip(r, g, b):
+ ccm_applied_rgb = np.dot(formatted_ccm, (np.array(RGB) / 256))
+ optimised_ccm_rgb.append(gamma(ccm_applied_rgb))
+ optimised_ccm_lab.append(colors.RGB_to_LAB(ccm_applied_rgb))
+
+ optimised_ccm_applied_rgb = np.dot(formatted_optimised_ccm, np.array(RGB) / 256)
+ after_gamma_rgb.append(gamma(optimised_ccm_applied_rgb))
+ after_gamma_lab.append(colors.RGB_to_LAB(optimised_ccm_applied_rgb))
+ '''
+ Gamma After RGB / LAB - not used in calculations, only used for visualisation
+ We now want to spit out some data that shows
+ how the optimisation has improved the color matrices
+ '''
+ Cam.log += "Here are the Improvements"
+
+ # CALCULATE WORST CASE delta e
+ old_worst_delta_e = 0
+ before_average = transform_and_evaluate(formatted_ccm, r, g, b, m_lab)
+ new_worst_delta_e = 0
+ after_average = transform_and_evaluate(formatted_optimised_ccm, r, g, b, m_lab)
+ for i in range(24):
+ old_delta_e = deltae(optimised_ccm_lab[i], m_lab[i]) # Current Old Delta E
+ new_delta_e = deltae(after_gamma_lab[i], m_lab[i]) # Current New Delta E
+ if old_delta_e > old_worst_delta_e:
+ old_worst_delta_e = old_delta_e
+ if new_delta_e > new_worst_delta_e:
+ new_worst_delta_e = new_delta_e
+
+ Cam.log += "Before color correction matrix was optimised, we got an average delta E of " + str(before_average) + " and a maximum delta E of " + str(old_worst_delta_e)
+ Cam.log += "After color correction matrix was optimised, we got an average delta E of " + str(after_average) + " and a maximum delta E of " + str(new_worst_delta_e)
+
+ visualise_macbeth_chart(m_rgb, optimised_ccm_rgb, after_gamma_rgb, str(Img.col) + str(matrix_selection_types[typenum]))
+ '''
+ The program will also save some visualisations of improvements.
+ Very pretty to look at. Top rectangle is ideal, Left square is
+ before optimisation, right square is after.
+ '''
"""
if a ccm has already been calculated for that temperature then don't
overwrite but save both. They will then be averaged later on
- """
+ """ # Now going to use optimised color matrix, optimised_ccm
if Img.col in ccm_tab.keys():
- ccm_tab[Img.col].append(ccm)
+ ccm_tab[Img.col].append(optimised_ccm)
else:
- ccm_tab[Img.col] = [ccm]
+ ccm_tab[Img.col] = [optimised_ccm]
Cam.log += '\n'
Cam.log += '\nFinished processing images'
@@ -137,8 +268,8 @@ def ccm(Cam, cal_cr_list, cal_cb_list):
"""
for k, v in ccm_tab.items():
tab = np.mean(v, axis=0)
- tab = np.where((10000*tab) % 1 <= 0.05, tab+0.00001, tab)
- tab = np.where((10000*tab) % 1 >= 0.95, tab-0.00001, tab)
+ tab = np.where((10000 * tab) % 1 <= 0.05, tab + 0.00001, tab)
+ tab = np.where((10000 * tab) % 1 >= 0.95, tab - 0.00001, tab)
ccm_tab[k] = list(np.round(tab, 5))
Cam.log += '\nMatrix calculated for colour temperature of {} K'.format(k)
@@ -156,20 +287,65 @@ def ccm(Cam, cal_cr_list, cal_cb_list):
return ccms
+def guess(x0, r, g, b, m_lab): # provides a method of numerical feedback for the optimisation code
+ [r1, r2, g1, g2, b1, b2] = x0
+ ccm = np.array([r1, r2, (1 - r1 - r2),
+ g1, g2, (1 - g1 - g2),
+ b1, b2, (1 - b1 - b2)]).reshape((3, 3)) # format the matrix correctly
+ return transform_and_evaluate(ccm, r, g, b, m_lab)
+
+
+def transform_and_evaluate(ccm, r, g, b, m_lab): # Transforms colors to LAB and applies the correction matrix
+ # create list of matrix changed colors
+ realrgb = []
+ for RGB in zip(r, g, b):
+ rgb_post_ccm = np.dot(ccm, np.array(RGB) / 256) # This is RGB values after the color correction matrix has been applied
+ realrgb.append(colors.RGB_to_LAB(rgb_post_ccm))
+ # now compare that with m_lab and return numeric result, averaged for each patch
+ return (sumde(realrgb, m_lab) / 24) # returns an average result of delta E
+
+
+def sumde(listA, listB):
+ global typenum, test_patches
+ sumde = 0
+ maxde = 0
+ patchde = [] # Create array of the delta E values for each patch. useful for optimisation of certain patches
+ for listA_item, listB_item in zip(listA, listB):
+ if maxde < (deltae(listA_item, listB_item)):
+ maxde = deltae(listA_item, listB_item)
+ patchde.append(deltae(listA_item, listB_item))
+ sumde += deltae(listA_item, listB_item)
+ '''
+ The different options specified at the start allow for
+ the maximum to be returned, average or specific patches
+ '''
+ if typenum == 0:
+ return sumde
+ if typenum == 1:
+ return maxde
+ if typenum == 2:
+ output = sum([patchde[test_patch] for test_patch in test_patches])
+ # Selects only certain patches and returns the output for them
+ return output
+
+
"""
calculates the ccm for an individual image.
-ccms are calculate in rgb space, and are fit by hand. Although it is a 3x3
+ccms are calculated in rgb space, and are fit by hand. Although it is a 3x3
matrix, each row must add up to 1 in order to conserve greyness, simplifying
calculation.
-Should you want to fit them in another space (e.g. LAB) we wish you the best of
-luck and send us the code when you are done! :-)
+The initial CCM is calculated in RGB, and then optimised in LAB color space
+This simplifies the initial calculation but then gets us the accuracy of
+using LAB color space.
"""
+
+
def do_ccm(r, g, b, m_srgb):
rb = r-b
gb = g-b
- rb_2s = (rb*rb)
- rb_gbs = (rb*gb)
- gb_2s = (gb*gb)
+ rb_2s = (rb * rb)
+ rb_gbs = (rb * gb)
+ gb_2s = (gb * gb)
r_rbs = rb * (m_srgb[..., 0] - b)
r_gbs = gb * (m_srgb[..., 0] - b)
@@ -191,7 +367,7 @@ def do_ccm(r, g, b, m_srgb):
b_rb = np.sum(b_rbs)
b_gb = np.sum(b_gbs)
- det = rb_2*gb_2 - rb_gb*rb_gb
+ det = rb_2 * gb_2 - rb_gb * rb_gb
"""
Raise error if matrix is singular...
@@ -201,19 +377,19 @@ def do_ccm(r, g, b, m_srgb):
if det < 0.001:
raise ArithmeticError
- r_a = (gb_2*r_rb - rb_gb*r_gb)/det
- r_b = (rb_2*r_gb - rb_gb*r_rb)/det
+ r_a = (gb_2 * r_rb - rb_gb * r_gb) / det
+ r_b = (rb_2 * r_gb - rb_gb * r_rb) / det
"""
Last row can be calculated by knowing the sum must be 1
"""
r_c = 1 - r_a - r_b
- g_a = (gb_2*g_rb - rb_gb*g_gb)/det
- g_b = (rb_2*g_gb - rb_gb*g_rb)/det
+ g_a = (gb_2 * g_rb - rb_gb * g_gb) / det
+ g_b = (rb_2 * g_gb - rb_gb * g_rb) / det
g_c = 1 - g_a - g_b
- b_a = (gb_2*b_rb - rb_gb*b_gb)/det
- b_b = (rb_2*b_gb - rb_gb*b_rb)/det
+ b_a = (gb_2 * b_rb - rb_gb * b_gb) / det
+ b_b = (rb_2 * b_gb - rb_gb * b_rb) / det
b_c = 1 - b_a - b_b
"""
@@ -222,3 +398,9 @@ def do_ccm(r, g, b, m_srgb):
ccm = [r_a, r_b, r_c, g_a, g_b, g_c, b_a, b_b, b_c]
return ccm
+
+
+def deltae(colorA, colorB):
+ return ((colorA[0] - colorB[0]) ** 2 + (colorA[1] - colorB[1]) ** 2 + (colorA[2] - colorB[2]) ** 2) ** 0.5
+ # return ((colorA[1]-colorB[1]) * * 2 + (colorA[2]-colorB[2]) * * 2) * * 0.5
+ # UNCOMMENT IF YOU WANT TO NEGLECT LUMINANCE FROM CALCULATION OF DELTA E
diff --git a/utils/raspberrypi/ctt/ctt_geq.py b/utils/raspberrypi/ctt/ctt_geq.py
index 2aa668f1..5a91ebb4 100644
--- a/utils/raspberrypi/ctt/ctt_geq.py
+++ b/utils/raspberrypi/ctt/ctt_geq.py
@@ -1,8 +1,8 @@
# SPDX-License-Identifier: BSD-2-Clause
#
-# Copyright (C) 2019, Raspberry Pi (Trading) Limited
+# Copyright (C) 2019, Raspberry Pi Ltd
#
-# ctt_geq.py - camera tuning tool for GEQ (green equalisation)
+# camera tuning tool for GEQ (green equalisation)
from ctt_tools import *
import matplotlib.pyplot as plt
diff --git a/utils/raspberrypi/ctt/ctt_image_load.py b/utils/raspberrypi/ctt/ctt_image_load.py
index 66adb237..d76ece73 100644
--- a/utils/raspberrypi/ctt/ctt_image_load.py
+++ b/utils/raspberrypi/ctt/ctt_image_load.py
@@ -1,8 +1,8 @@
# SPDX-License-Identifier: BSD-2-Clause
#
-# Copyright (C) 2019-2020, Raspberry Pi (Trading) Limited
+# Copyright (C) 2019-2020, Raspberry Pi Ltd
#
-# ctt_image_load.py - camera tuning tool image loading
+# camera tuning tool image loading
from ctt_tools import *
from ctt_macbeth_locator import *
@@ -301,17 +301,35 @@ def dng_load_image(Cam, im_str):
metadata.read()
Img.ver = 100 # random value
- Img.w = metadata['Exif.SubImage1.ImageWidth'].value
+ """
+ The DNG and TIFF/EP specifications use different IFDs to store the raw
+ image data and the Exif tags. DNG stores them in a SubIFD and in an Exif
+ IFD respectively (named "SubImage1" and "Photo" by pyexiv2), while
+ TIFF/EP stores them both in IFD0 (name "Image"). Both are used in "DNG"
+ files, with libcamera-apps following the DNG recommendation and
+ applications based on picamera2 following TIFF/EP.
+
+ This code detects which tags are being used, and therefore extracts the
+ correct values.
+ """
+ try:
+ Img.w = metadata['Exif.SubImage1.ImageWidth'].value
+ subimage = "SubImage1"
+ photo = "Photo"
+ except KeyError:
+ Img.w = metadata['Exif.Image.ImageWidth'].value
+ subimage = "Image"
+ photo = "Image"
Img.pad = 0
- Img.h = metadata['Exif.SubImage1.ImageLength'].value
- white = metadata['Exif.SubImage1.WhiteLevel'].value
+ Img.h = metadata[f'Exif.{subimage}.ImageLength'].value
+ white = metadata[f'Exif.{subimage}.WhiteLevel'].value
Img.sigbits = int(white).bit_length()
Img.fmt = (Img.sigbits - 4) // 2
- Img.exposure = int(metadata['Exif.Photo.ExposureTime'].value*1000000)
- Img.againQ8 = metadata['Exif.Photo.ISOSpeedRatings'].value*256/100
+ Img.exposure = int(metadata[f'Exif.{photo}.ExposureTime'].value * 1000000)
+ Img.againQ8 = metadata[f'Exif.{photo}.ISOSpeedRatings'].value * 256 / 100
Img.againQ8_norm = Img.againQ8 / 256
Img.camName = metadata['Exif.Image.Model'].value
- Img.blacklevel = int(metadata['Exif.SubImage1.BlackLevel'].value[0])
+ Img.blacklevel = int(metadata[f'Exif.{subimage}.BlackLevel'].value[0])
Img.blacklevel_16 = Img.blacklevel << (16 - Img.sigbits)
bayer_case = {
'0 1 1 2': (0, (0, 1, 2, 3)),
@@ -319,7 +337,7 @@ def dng_load_image(Cam, im_str):
'2 1 1 0': (2, (3, 2, 1, 0)),
'1 0 2 1': (3, (1, 0, 3, 2))
}
- cfa_pattern = metadata['Exif.SubImage1.CFAPattern'].value
+ cfa_pattern = metadata[f'Exif.{subimage}.CFAPattern'].value
Img.pattern = bayer_case[cfa_pattern][0]
Img.order = bayer_case[cfa_pattern][1]
@@ -358,6 +376,11 @@ def load_image(Cam, im_str, mac_config=None, show=False, mac=True, show_meta=Fal
Img = dng_load_image(Cam, im_str)
else:
Img = brcm_load_image(Cam, im_str)
+ """
+ handle errors smoothly if loading image failed
+ """
+ if Img == 0:
+ return 0
if show_meta:
Img.print_meta()
diff --git a/utils/raspberrypi/ctt/ctt_lux.py b/utils/raspberrypi/ctt/ctt_lux.py
index 4e7785ef..46be1512 100644
--- a/utils/raspberrypi/ctt/ctt_lux.py
+++ b/utils/raspberrypi/ctt/ctt_lux.py
@@ -1,8 +1,8 @@
# SPDX-License-Identifier: BSD-2-Clause
#
-# Copyright (C) 2019, Raspberry Pi (Trading) Limited
+# Copyright (C) 2019, Raspberry Pi Ltd
#
-# ctt_lux.py - camera tuning tool for lux level
+# camera tuning tool for lux level
from ctt_tools import *
diff --git a/utils/raspberrypi/ctt/ctt_macbeth_locator.py b/utils/raspberrypi/ctt/ctt_macbeth_locator.py
index cae1d334..f22dbf31 100644
--- a/utils/raspberrypi/ctt/ctt_macbeth_locator.py
+++ b/utils/raspberrypi/ctt/ctt_macbeth_locator.py
@@ -1,8 +1,8 @@
# SPDX-License-Identifier: BSD-2-Clause
#
-# Copyright (C) 2019, Raspberry Pi (Trading) Limited
+# Copyright (C) 2019, Raspberry Pi Ltd
#
-# ctt_macbeth_locator.py - camera tuning tool Macbeth chart locator
+# camera tuning tool Macbeth chart locator
from ctt_ransac import *
from ctt_tools import *
@@ -57,6 +57,10 @@ def find_macbeth(Cam, img, mac_config=(0, 0)):
"""
cor, mac, coords, msg = get_macbeth_chart(img, ref_data)
+ # Keep a list that will include this and any brightened up versions of
+ # the image for reuse.
+ all_images = [img]
+
"""
following bits of code tries to fix common problems with simple
techniques.
@@ -71,6 +75,7 @@ def find_macbeth(Cam, img, mac_config=(0, 0)):
if cor < 0.75:
a = 2
img_br = cv2.convertScaleAbs(img, alpha=a, beta=0)
+ all_images.append(img_br)
cor_b, mac_b, coords_b, msg_b = get_macbeth_chart(img_br, ref_data)
if cor_b > cor:
cor, mac, coords, msg = cor_b, mac_b, coords_b, msg_b
@@ -81,6 +86,7 @@ def find_macbeth(Cam, img, mac_config=(0, 0)):
if cor < 0.75:
a = 4
img_br = cv2.convertScaleAbs(img, alpha=a, beta=0)
+ all_images.append(img_br)
cor_b, mac_b, coords_b, msg_b = get_macbeth_chart(img_br, ref_data)
if cor_b > cor:
cor, mac, coords, msg = cor_b, mac_b, coords_b, msg_b
@@ -128,23 +134,26 @@ def find_macbeth(Cam, img, mac_config=(0, 0)):
h_inc = int(h/6)
"""
for each subselection, look for a macbeth chart
+ loop over this and any brightened up images that we made to increase the
+ likelihood of success
"""
- for i in range(3):
- for j in range(3):
- w_s, h_s = i*w_inc, j*h_inc
- img_sel = img[w_s:w_s+w_sel, h_s:h_s+h_sel]
- cor_ij, mac_ij, coords_ij, msg_ij = get_macbeth_chart(img_sel, ref_data)
- """
- if the correlation is better than the best then record the
- scale and current subselection at which macbeth chart was
- found. Also record the coordinates, macbeth chart and message.
- """
- if cor_ij > cor:
- cor = cor_ij
- mac, coords, msg = mac_ij, coords_ij, msg_ij
- ii, jj = i, j
- w_best, h_best = w_inc, h_inc
- d_best = 1
+ for img_br in all_images:
+ for i in range(3):
+ for j in range(3):
+ w_s, h_s = i*w_inc, j*h_inc
+ img_sel = img_br[w_s:w_s+w_sel, h_s:h_s+h_sel]
+ cor_ij, mac_ij, coords_ij, msg_ij = get_macbeth_chart(img_sel, ref_data)
+ """
+ if the correlation is better than the best then record the
+ scale and current subselection at which macbeth chart was
+ found. Also record the coordinates, macbeth chart and message.
+ """
+ if cor_ij > cor:
+ cor = cor_ij
+ mac, coords, msg = mac_ij, coords_ij, msg_ij
+ ii, jj = i, j
+ w_best, h_best = w_inc, h_inc
+ d_best = 1
"""
scale 2
@@ -157,17 +166,19 @@ def find_macbeth(Cam, img, mac_config=(0, 0)):
h_sel = int(h/2)
w_inc = int(w/8)
h_inc = int(h/8)
- for i in range(5):
- for j in range(5):
- w_s, h_s = i*w_inc, j*h_inc
- img_sel = img[w_s:w_s+w_sel, h_s:h_s+h_sel]
- cor_ij, mac_ij, coords_ij, msg_ij = get_macbeth_chart(img_sel, ref_data)
- if cor_ij > cor:
- cor = cor_ij
- mac, coords, msg = mac_ij, coords_ij, msg_ij
- ii, jj = i, j
- w_best, h_best = w_inc, h_inc
- d_best = 2
+ # Again, loop over any brightened up images as well
+ for img_br in all_images:
+ for i in range(5):
+ for j in range(5):
+ w_s, h_s = i*w_inc, j*h_inc
+ img_sel = img_br[w_s:w_s+w_sel, h_s:h_s+h_sel]
+ cor_ij, mac_ij, coords_ij, msg_ij = get_macbeth_chart(img_sel, ref_data)
+ if cor_ij > cor:
+ cor = cor_ij
+ mac, coords, msg = mac_ij, coords_ij, msg_ij
+ ii, jj = i, j
+ w_best, h_best = w_inc, h_inc
+ d_best = 2
"""
The following code checks for macbeth charts at even smaller scales. This
@@ -238,7 +249,7 @@ def find_macbeth(Cam, img, mac_config=(0, 0)):
print error or success message
"""
print(msg)
- Cam.log += '\n' + msg
+ Cam.log += '\n' + str(msg)
if msg == success_msg:
coords_fit = coords
Cam.log += '\nMacbeth chart vertices:\n'
@@ -606,7 +617,7 @@ def get_macbeth_chart(img, ref_data):
'\nNot enough squares found'
'\nPossible problems:\n'
'- Macbeth chart is occluded\n'
- '- Macbeth chart is too dark of bright\n'
+ '- Macbeth chart is too dark or bright\n'
)
ref_cents = np.array(ref_cents)
diff --git a/utils/raspberrypi/ctt/ctt_noise.py b/utils/raspberrypi/ctt/ctt_noise.py
index 0afcf8f8..0b18d83f 100644
--- a/utils/raspberrypi/ctt/ctt_noise.py
+++ b/utils/raspberrypi/ctt/ctt_noise.py
@@ -1,8 +1,8 @@
# SPDX-License-Identifier: BSD-2-Clause
#
-# Copyright (C) 2019, Raspberry Pi (Trading) Limited
+# Copyright (C) 2019, Raspberry Pi Ltd
#
-# ctt_noise.py - camera tuning tool noise calibration
+# camera tuning tool noise calibration
from ctt_image_load import *
import matplotlib.pyplot as plt
diff --git a/utils/raspberrypi/ctt/ctt_pretty_print_json.py b/utils/raspberrypi/ctt/ctt_pretty_print_json.py
index d38ae617..3e3b8475 100644..100755
--- a/utils/raspberrypi/ctt/ctt_pretty_print_json.py
+++ b/utils/raspberrypi/ctt/ctt_pretty_print_json.py
@@ -1,106 +1,116 @@
+#!/usr/bin/env python3
+#
# SPDX-License-Identifier: BSD-2-Clause
#
-# Copyright (C) 2019, Raspberry Pi (Trading) Limited
+# Copyright 2022 Raspberry Pi Ltd
#
-# ctt_pretty_print_json.py - camera tuning tool JSON formatter
-
-import sys
-
-
-class JSONPrettyPrinter(object):
- """
- Take a collapsed JSON file and make it more readable
- """
- def __init__(self, fout):
- self.state = {
- "indent": 0,
- "inarray": [False],
- "arraycount": [],
- "skipnewline": True,
- "need_indent": False,
- "need_space": False,
+# Script to pretty print a Raspberry Pi tuning config JSON structure in
+# version 2.0 and later formats.
+
+import argparse
+import json
+import textwrap
+
+
+class Encoder(json.JSONEncoder):
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.indentation_level = 0
+ self.hard_break = 120
+ self.custom_elems = {
+ 'table': 16,
+ 'luminance_lut': 16,
+ 'ct_curve': 3,
+ 'ccm': 3,
+ 'gamma_curve': 2,
+ 'y_target': 2,
+ 'prior': 2
}
- self.fout = fout
-
- def newline(self):
- if not self.state["skipnewline"]:
- self.fout.write('\n')
- self.state["need_indent"] = True
- self.state["need_space"] = False
- self.state["skipnewline"] = True
-
- def write(self, c):
- if self.state["need_indent"]:
- self.fout.write(' ' * self.state["indent"] * 4)
- self.state["need_indent"] = False
- if self.state["need_space"]:
- self.fout.write(' ')
- self.state["need_space"] = False
- self.fout.write(c)
- self.state["skipnewline"] = False
-
- def process_char(self, c):
- if c == '{':
- self.newline()
- self.write(c)
- self.state["indent"] += 1
- self.newline()
- elif c == '}':
- self.state["indent"] -= 1
- self.newline()
- self.write(c)
- elif c == '[':
- self.newline()
- self.write(c)
- self.state["indent"] += 1
- self.newline()
- self.state["inarray"] = [True] + self.state["inarray"]
- self.state["arraycount"] = [0] + self.state["arraycount"]
- elif c == ']':
- self.state["indent"] -= 1
- self.newline()
- self.state["inarray"].pop(0)
- self.state["arraycount"].pop(0)
- self.write(c)
- elif c == ':':
- self.write(c)
- self.state["need_space"] = True
- elif c == ',':
- if not self.state["inarray"][0]:
- self.write(c)
- self.newline()
+ def encode(self, o, node_key=None):
+ if isinstance(o, (list, tuple)):
+ # Check if we are a flat list of numbers.
+ if not any(isinstance(el, (list, tuple, dict)) for el in o):
+ s = ', '.join(json.dumps(el) for el in o)
+ if node_key in self.custom_elems.keys():
+ # Special case handling to specify number of elements in a row for tables, ccm, etc.
+ self.indentation_level += 1
+ sl = s.split(', ')
+ num = self.custom_elems[node_key]
+ chunk = [self.indent_str + ', '.join(sl[x:x + num]) for x in range(0, len(sl), num)]
+ t = ',\n'.join(chunk)
+ self.indentation_level -= 1
+ output = f'\n{self.indent_str}[\n{t}\n{self.indent_str}]'
+ elif len(s) > self.hard_break - len(self.indent_str):
+ # Break a long list with wraps.
+ self.indentation_level += 1
+ t = textwrap.fill(s, self.hard_break, break_long_words=False,
+ initial_indent=self.indent_str, subsequent_indent=self.indent_str)
+ self.indentation_level -= 1
+ output = f'\n{self.indent_str}[\n{t}\n{self.indent_str}]'
+ else:
+ # Smaller lists can remain on a single line.
+ output = f' [ {s} ]'
+ return output
else:
- self.write(c)
- self.state["arraycount"][0] += 1
- if self.state["arraycount"][0] == 16:
- self.state["arraycount"][0] = 0
- self.newline()
+ # Sub-structures in the list case.
+ self.indentation_level += 1
+ output = [self.indent_str + self.encode(el) for el in o]
+ self.indentation_level -= 1
+ output = ',\n'.join(output)
+ return f' [\n{output}\n{self.indent_str}]'
+
+ elif isinstance(o, dict):
+ self.indentation_level += 1
+ output = []
+ for k, v in o.items():
+ if isinstance(v, dict) and len(v) == 0:
+ # Empty config block special case.
+ output.append(self.indent_str + f'{json.dumps(k)}: {{ }}')
else:
- self.state["need_space"] = True
- elif c.isspace():
- pass
+ # Only linebreak if the next node is a config block.
+ sep = f'\n{self.indent_str}' if isinstance(v, dict) else ''
+ output.append(self.indent_str + f'{json.dumps(k)}:{sep}{self.encode(v, k)}')
+ output = ',\n'.join(output)
+ self.indentation_level -= 1
+ return f'{{\n{output}\n{self.indent_str}}}'
+
else:
- self.write(c)
+ return ' ' + json.dumps(o)
+
+ @property
+ def indent_str(self) -> str:
+ return ' ' * self.indentation_level * self.indent
+
+ def iterencode(self, o, **kwargs):
+ return self.encode(o)
+
+
+def pretty_print(in_json: dict) -> str:
+
+ if 'version' not in in_json or \
+ 'target' not in in_json or \
+ 'algorithms' not in in_json or \
+ in_json['version'] < 2.0:
+ raise RuntimeError('Incompatible JSON dictionary has been provided')
- def print(self, string):
- for c in string:
- self.process_char(c)
- self.newline()
+ return json.dumps(in_json, cls=Encoder, indent=4, sort_keys=False)
-def pretty_print_json(str_in, output_filename):
- with open(output_filename, "w") as fout:
- printer = JSONPrettyPrinter(fout)
- printer.print(str_in)
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=
+ 'Prettify a version 2.0 camera tuning config JSON file.')
+ parser.add_argument('input', type=str, help='Input tuning file.')
+ parser.add_argument('output', type=str, nargs='?',
+ help='Output converted tuning file. If not provided, the input file will be updated in-place.',
+ default=None)
+ args = parser.parse_args()
+ with open(args.input, 'r') as f:
+ in_json = json.load(f)
-if __name__ == '__main__':
- if len(sys.argv) != 2:
- print("Usage: %s filename" % sys.argv[0])
- sys.exit(1)
+ out_json = pretty_print(in_json)
- input_filename = sys.argv[1]
- with open(input_filename, "r") as fin:
- printer = JSONPrettyPrinter(sys.stdout)
- printer.print(fin.read())
+ with open(args.output if args.output is not None else args.input, 'w') as f:
+ f.write(out_json)
diff --git a/utils/raspberrypi/ctt/ctt_ransac.py b/utils/raspberrypi/ctt/ctt_ransac.py
index 11515a4f..01bba302 100644
--- a/utils/raspberrypi/ctt/ctt_ransac.py
+++ b/utils/raspberrypi/ctt/ctt_ransac.py
@@ -1,8 +1,8 @@
# SPDX-License-Identifier: BSD-2-Clause
#
-# Copyright (C) 2019, Raspberry Pi (Trading) Limited
+# Copyright (C) 2019, Raspberry Pi Ltd
#
-# ctt_ransac.py - camera tuning tool RANSAC selector for Macbeth chart locator
+# camera tuning tool RANSAC selector for Macbeth chart locator
import numpy as np
diff --git a/utils/raspberrypi/ctt/ctt_tools.py b/utils/raspberrypi/ctt/ctt_tools.py
index 8728ff16..27c52193 100644
--- a/utils/raspberrypi/ctt/ctt_tools.py
+++ b/utils/raspberrypi/ctt/ctt_tools.py
@@ -1,8 +1,8 @@
# SPDX-License-Identifier: BSD-2-Clause
#
-# Copyright (C) 2019, Raspberry Pi (Trading) Limited
+# Copyright (C) 2019, Raspberry Pi Ltd
#
-# ctt_tools.py - camera tuning tool miscellaneous
+# camera tuning tool miscellaneous
import time
import re
diff --git a/utils/raspberrypi/ctt/ctt_visualise.py b/utils/raspberrypi/ctt/ctt_visualise.py
new file mode 100644
index 00000000..ed2339fd
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_visualise.py
@@ -0,0 +1,43 @@
+"""
+Some code that will save virtual macbeth charts that show the difference between optimised matrices and non optimised matrices
+
+The function creates an image that is 1550 by 1050 pixels wide, and fills it with patches which are 200x200 pixels in size
+Each patch contains the ideal color, the color from the original matrix, and the color from the final matrix
+_________________
+| |
+| Ideal Color |
+|_______________|
+| Old | new |
+| Color | Color |
+|_______|_______|
+
+Nice way of showing how the optimisation helps change the colors and the color matricies
+"""
+import numpy as np
+from PIL import Image
+
+
+def visualise_macbeth_chart(macbeth_rgb, original_rgb, new_rgb, output_filename):
+ image = np.zeros((1050, 1550, 3), dtype=np.uint8)
+ colorindex = -1
+ for y in range(6):
+ for x in range(4): # Creates 6 x 4 grid of macbeth chart
+ colorindex += 1
+ xlocation = 50 + 250 * x # Means there is 50px of black gap between each square, more like the real macbeth chart.
+ ylocation = 50 + 250 * y
+ for g in range(200):
+ for i in range(100):
+ image[xlocation + i, ylocation + g] = macbeth_rgb[colorindex]
+ xlocation = 150 + 250 * x
+ ylocation = 50 + 250 * y
+ for i in range(100):
+ for g in range(100):
+ image[xlocation + i, ylocation + g] = original_rgb[colorindex] # Smaller squares below to compare the old colors with the new ones
+ xlocation = 150 + 250 * x
+ ylocation = 150 + 250 * y
+ for i in range(100):
+ for g in range(100):
+ image[xlocation + i, ylocation + g] = new_rgb[colorindex]
+
+ img = Image.fromarray(image, 'RGB')
+ img.save(str(output_filename) + 'Generated Macbeth Chart.png')
diff --git a/utils/raspberrypi/delayedctrls_parse.py b/utils/raspberrypi/delayedctrls_parse.py
index e38145d8..1decf73f 100644
--- a/utils/raspberrypi/delayedctrls_parse.py
+++ b/utils/raspberrypi/delayedctrls_parse.py
@@ -1,3 +1,5 @@
+# SPDX-License-Identifier: BSD-2-Clause
+
import re
import sys
import os
diff --git a/utils/release.sh b/utils/release.sh
new file mode 100755
index 00000000..8cc85859
--- /dev/null
+++ b/utils/release.sh
@@ -0,0 +1,46 @@
+#!/bin/sh
+
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Prepare a project release
+
+set -e
+
+# Abort if we are not within the project root or the tree is not clean.
+if [ ! -e utils/gen-version.sh ] || [ ! -e .git ]; then
+ echo "This release script must be run from the root of libcamera git tree."
+ exit 1
+fi
+
+if ! git diff-index --quiet HEAD; then
+ echo "Tree must be clean to release."
+ exit 1
+fi
+
+# Identify current version components
+version=$(./utils/gen-version.sh)
+
+# Decide if we are here to bump major, minor, or patch release.
+case $1 in
+ major|minor|patch)
+ bump=$1;
+ ;;
+ *)
+ echo "You must specify the version bump level: (major, minor, patch)"
+ exit 1
+ ;;
+esac
+
+new_version=$(./utils/semver bump "$bump" "$version")
+
+echo "Bumping $bump"
+echo " Existing version is: $version"
+echo " New version is : $new_version"
+
+# Patch in the version to our meson.build
+sed -i -E "s/ version : '.*',/ version : '$new_version',/" meson.build
+
+# Commit the update
+git commit meson.build -esm "libcamera v$new_version"
+
+# Create a tag from that commit
+git show -s --format=%B | git tag "v$new_version" -s -F -
diff --git a/utils/rkisp1/gen-csc-table.py b/utils/rkisp1/gen-csc-table.py
new file mode 100755
index 00000000..ffc0370a
--- /dev/null
+++ b/utils/rkisp1/gen-csc-table.py
@@ -0,0 +1,215 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright (C) 2022, Ideas on Board Oy
+#
+# Generate color space conversion table coefficients with configurable
+# fixed-point precision
+
+import argparse
+import enum
+import numpy as np
+import sys
+
+
+encodings = {
+ 'rec601': [
+ [ 0.299, 0.587, 0.114 ],
+ [ -0.299 / 1.772, -0.587 / 1.772, 0.886 / 1.772 ],
+ [ 0.701 / 1.402, -0.587 / 1.402, -0.114 / 1.402 ]
+ ],
+ 'rec709': [
+ [ 0.2126, 0.7152, 0.0722 ],
+ [ -0.2126 / 1.8556, -0.7152 / 1.8556, 0.9278 / 1.8556 ],
+ [ 0.7874 / 1.5748, -0.7152 / 1.5748, -0.0722 / 1.5748 ]
+ ],
+ 'rec2020': [
+ [ 0.2627, 0.6780, 0.0593 ],
+ [ -0.2627 / 1.8814, -0.6780 / 1.8814, 0.9407 / 1.8814 ],
+ [ 0.7373 / 1.4746, -0.6780 / 1.4746, -0.0593 / 1.4746 ],
+ ],
+ 'smpte240m': [
+ [ 0.2122, 0.7013, 0.0865 ],
+ [ -0.2122 / 1.8270, -0.7013 / 1.8270, 0.9135 / 1.8270 ],
+ [ 0.7878 / 1.5756, -0.7013 / 1.5756, -0.0865 / 1.5756 ],
+ ],
+}
+
+
+class Precision(object):
+ def __init__(self, precision):
+ if precision[0].upper() != 'Q':
+ raise RuntimeError(f'Invalid precision `{precision}`')
+ prec = precision[1:].split('.')
+ if len(prec) != 2:
+ raise RuntimeError(f'Invalid precision `{precision}`')
+
+ self.__prec = [int(v) for v in prec]
+
+ @property
+ def integer(self):
+ return self.__prec[0]
+
+ @property
+ def fractional(self):
+ return self.__prec[1]
+
+ @property
+ def total(self):
+ # Add 1 for the sign bit
+ return self.__prec[0] + self.__prec[1] + 1
+
+
+class Quantization(enum.Enum):
+ FULL = 0
+ LIMITED = 1
+
+
+def scale_coeff(coeff, quantization, luma):
+ """Scale a coefficient to the output range dictated by the quantization.
+
+ Parameters
+ ----------
+ coeff : float
+ The CSC matrix coefficient to scale
+ quantization : Quantization
+ The quantization, either FULL or LIMITED
+ luma : bool
+ True if the coefficient corresponds to a luma value, False otherwise
+ """
+
+ # Assume the input range is 8 bits. The output range is set by the
+ # quantization and differs between luma and chrome components for limited
+ # range.
+ in_range = 255 - 0
+ if quantization == Quantization.FULL:
+ out_range = 255 - 0
+ elif luma:
+ out_range = 235 - 16
+ else:
+ out_range = 240 - 16
+
+ return coeff * out_range / in_range
+
+
+def round_array(values):
+ """Round a list of signed floating point values to the closest integer while
+ preserving the (rounded) value of the sum of all elements.
+ """
+
+ # Calculate the rounding error as the difference between the rounded sum of
+ # values and the sum of rounded values. This is by definition an integer
+ # (positive or negative), which indicates how many values will need to be
+ # 'flipped' to the opposite rounding.
+ rounded_values = [round(value) for value in values]
+ sum_values = round(sum(values))
+ sum_error = sum_values - sum(rounded_values)
+
+ if sum_error == 0:
+ return rounded_values
+
+ # The next step is to distribute the error among the values, in a way that
+ # will minimize the relative error introduced in individual values. We
+ # extend the values list with the rounded value and original index for each
+ # element, and sort by rounding error. Then we modify the elements with the
+ # highest or lowest error, depending on whether the sum error is negative
+ # or positive.
+
+ values = [[value, round(value), index] for index, value in enumerate(values)]
+ values.sort(key=lambda v: v[1] - v[0])
+
+ # It could also be argued that the key for the sort order should not be the
+ # absolute rouding error but the relative error, as the impact of identical
+ # rounding errors will differ for coefficients with widely different values.
+ # This is a topic for further research.
+ #
+ # values.sort(key=lambda v: (v[1] - v[0]) / abs(v[0]))
+
+ if sum_error > 0:
+ for i in range(sum_error):
+ values[i][1] += 1
+ else:
+ for i in range(-sum_error):
+ values[len(values) - i - 1][1] -= 1
+
+ # Finally, sort back by index, make sure the total rounding error is now 0,
+ # and return the rounded values.
+ values.sort(key=lambda v: v[2])
+ values = [value[1] for value in values]
+ assert(sum(values) == sum_values)
+
+ return values
+
+
+def main(argv):
+
+ # Parse command line arguments.
+ parser = argparse.ArgumentParser(
+ description='Generate color space conversion table coefficients with '
+ 'configurable fixed-point precision.'
+ )
+ parser.add_argument('--invert', '-i', action='store_true',
+ help='Invert the color space conversion (YUV -> RGB)')
+ parser.add_argument('--precision', '-p', default='Q1.7',
+ help='The output fixed point precision in Q notation (sign bit excluded)')
+ parser.add_argument('--quantization', '-q', choices=['full', 'limited'],
+ default='limited', help='Quantization range')
+ parser.add_argument('encoding', choices=encodings.keys(), help='YCbCr encoding')
+ args = parser.parse_args(argv[1:])
+
+ try:
+ precision = Precision(args.precision)
+ except Exception:
+ print(f'Invalid precision `{args.precision}`')
+ return 1
+
+ encoding = encodings[args.encoding]
+ quantization = Quantization[args.quantization.upper()]
+
+ # Scale and round the encoding coefficients based on the precision and
+ # quantization range.
+ luma = True
+ scaled_coeffs = []
+ for line in encoding:
+ line = [scale_coeff(coeff, quantization, luma) for coeff in line]
+ scaled_coeffs.append(line)
+ luma = False
+
+ if args.invert:
+ scaled_coeffs = np.linalg.inv(scaled_coeffs)
+
+ rounded_coeffs = []
+ for line in scaled_coeffs:
+ line = [coeff * (1 << precision.fractional) for coeff in line]
+ # For the RGB to YUV conversion, use a rounding method that preserves
+ # the rounded sum of each line to avoid biases and overflow, as the sum
+ # of luma and chroma coefficients should be 1.0 and 0.0 respectively
+ # (in full range). For the YUV to RGB conversion, there is no such
+ # constraint, so use simple rounding.
+ if args.invert:
+ line = [round(coeff) for coeff in line]
+ else:
+ line = round_array(line)
+
+ # Convert coefficients to the number of bits selected by the precision.
+ # Negative values will be turned into positive integers using 2's
+ # complement.
+ line = [coeff & ((1 << precision.total) - 1) for coeff in line]
+ rounded_coeffs.append(line)
+
+ # Print the result as C code.
+ nbits = 1 << (precision.total - 1).bit_length()
+ nbytes = nbits // 4
+ print(f'static const u{nbits} {"yuv2rgb" if args.invert else "rgb2yuv"}_{args.encoding}_{quantization.name.lower()}_coeffs[] = {{')
+
+ for line in rounded_coeffs:
+ line = [f'0x{coeff:0{nbytes}x}' for coeff in line]
+
+ print(f'\t{", ".join(line)},')
+
+ print('};')
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/utils/rkisp1/rkisp1-capture.sh b/utils/rkisp1/rkisp1-capture.sh
index 4d09f5d5..d767e31d 100755
--- a/utils/rkisp1/rkisp1-capture.sh
+++ b/utils/rkisp1/rkisp1-capture.sh
@@ -4,8 +4,7 @@
#
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
#
-# rkisp-capture.sh - Capture processed frames from cameras based on the
-# Rockchip ISP1
+# Capture processed frames from cameras based on the Rockchip ISP1
#
# The scripts makes use of the following tools, which are expected to be
# executable from the system-wide path or from the local directory:
@@ -14,6 +13,37 @@
# - raw2rgbpnm (from git://git.retiisi.org.uk/~sailus/raw2rgbpnm.git)
# - yavta (from git://git.ideasonboard.org/yavta.git)
+# Return the entity connected to a given pad
+# $1: The pad, expressed as "entity":index
+mc_remote_entity() {
+ local entity="${1%:*}"
+ local pad="${1#*:}"
+
+ ${mediactl} -p | awk '
+/^- entity / {
+ in_entity=0
+}
+
+/^- entity [0-9]+: '"${entity}"' / {
+ in_entity=1
+}
+
+/^[ \t]+pad/ {
+ in_pad=0
+}
+
+/^[ \t]+pad'"${pad}"': / {
+ in_pad=1
+}
+
+/^[ \t]+(<-|->) "[^"]+"/ {
+ if (in_entity && in_pad) {
+ print gensub(/^[^"]+"([^"]+)":([0-9]+).*$/, "\\1", "g")
+ exit
+ }
+}'
+}
+
# Locate the sensor entity
find_sensor() {
local bus
@@ -28,6 +58,17 @@ find_sensor() {
echo "$sensor_name $bus"
}
+# Locate the CSI-2 receiver
+find_csi2_rx() {
+ local sensor_name=$1
+ local csi2_rx
+
+ csi2_rx=$(mc_remote_entity "$sensor_name:0")
+ if [ "$csi2_rx" != rkisp1_isp ] ; then
+ echo "$csi2_rx"
+ fi
+}
+
# Locate the media device
find_media_device() {
local mdev
@@ -51,7 +92,7 @@ get_sensor_format() {
local format
local sensor=$1
- format=$($mediactl --get-v4l2 "'$sensor':0" | sed 's/\[\([^ ]*\).*/\1/')
+ format=$($mediactl --get-v4l2 "'$sensor':0" | grep 'fmt:' | sed 's/.*\(fmt:\S*\).*/\1/')
sensor_mbus_code=$(echo $format | sed 's/fmt:\([A-Z0-9_]*\).*/\1/')
sensor_size=$(echo $format | sed 's/[^\/]*\/\([0-9x]*\).*/\1/')
@@ -63,15 +104,27 @@ configure_pipeline() {
local format="fmt:$sensor_mbus_code/$sensor_size"
local capture_mbus_code=$1
local capture_size=$2
+ local csi2_rx
echo "Configuring pipeline for $sensor in $format"
+ csi2_rx=$(find_csi2_rx "$sensor")
+
$mediactl -r
- $mediactl -l "'$sensor':0 -> 'rkisp1_isp':0 [1]"
+ if [ -n "$csi2_rx" ] ; then
+ $mediactl -l "'$sensor':0 -> '$csi2_rx':0 [1]"
+ $mediactl -l "'$csi2_rx':1 -> 'rkisp1_isp':0 [1]"
+ else
+ $mediactl -l "'$sensor':0 -> 'rkisp1_isp':0 [1]"
+ fi
$mediactl -l "'rkisp1_isp':2 -> 'rkisp1_resizer_mainpath':0 [1]"
$mediactl -V "\"$sensor\":0 [$format]"
+ if [ -n "$csi2_rx" ] ; then
+ $mediactl -V "'$csi2_rx':0 [$format]"
+ $mediactl -V "'$csi2_rx':1 [$format]"
+ fi
$mediactl -V "'rkisp1_isp':0 [$format crop:(0,0)/$sensor_size]"
$mediactl -V "'rkisp1_isp':2 [fmt:$capture_mbus_code/$sensor_size crop:(0,0)/$sensor_size]"
$mediactl -V "'rkisp1_resizer_mainpath':0 [fmt:$capture_mbus_code/$sensor_size crop:(0,0)/$sensor_size]"
@@ -88,6 +141,7 @@ capture_frames() {
if [[ $save_file -eq 1 ]]; then
file_op="--file=/tmp/frame-#.bin"
+ rm -f /tmp/frame-*.bin
fi
yavta -c$frame_count -n5 -I -f $capture_format -s $capture_size \
@@ -170,7 +224,7 @@ mediactl="media-ctl -d $mdev"
get_sensor_format "$sensor"
if [[ $raw == true ]] ; then
- capture_format=$(echo $sensor_mbus_code | sed 's/_[0-9X]$//')
+ capture_format=$(echo $sensor_mbus_code | sed 's/_[0-9X]*$//')
capture_mbus_code=$sensor_mbus_code
else
capture_format=YUYV
diff --git a/utils/semver b/utils/semver
new file mode 100755
index 00000000..a1604250
--- /dev/null
+++ b/utils/semver
@@ -0,0 +1,446 @@
+#!/usr/bin/env bash
+# SPDX-License-Identifier: Apache-2.0
+
+set -o errexit -o nounset -o pipefail
+
+NAT='0|[1-9][0-9]*'
+ALPHANUM='[0-9]*[A-Za-z-][0-9A-Za-z-]*'
+IDENT="$NAT|$ALPHANUM"
+FIELD='[0-9A-Za-z-]+'
+
+SEMVER_REGEX="\
+^[vV]?\
+($NAT)\\.($NAT)\\.($NAT)\
+(\\-(${IDENT})(\\.(${IDENT}))*)?\
+(\\+${FIELD}(\\.${FIELD})*)?$"
+
+PROG=semver
+PROG_VERSION="3.4.0"
+
+USAGE="\
+Usage:
+ $PROG bump major <version>
+ $PROG bump minor <version>
+ $PROG bump patch <version>
+ $PROG bump prerel|prerelease [<prerel>] <version>
+ $PROG bump build <build> <version>
+ $PROG bump release <version>
+ $PROG get major <version>
+ $PROG get minor <version>
+ $PROG get patch <version>
+ $PROG get prerel|prerelease <version>
+ $PROG get build <version>
+ $PROG get release <version>
+ $PROG compare <version> <other_version>
+ $PROG diff <version> <other_version>
+ $PROG validate <version>
+ $PROG --help
+ $PROG --version
+
+Arguments:
+ <version> A version must match the following regular expression:
+ \"${SEMVER_REGEX}\"
+ In English:
+ -- The version must match X.Y.Z[-PRERELEASE][+BUILD]
+ where X, Y and Z are non-negative integers.
+ -- PRERELEASE is a dot separated sequence of non-negative integers and/or
+ identifiers composed of alphanumeric characters and hyphens (with
+ at least one non-digit). Numeric identifiers must not have leading
+ zeros. A hyphen (\"-\") introduces this optional part.
+ -- BUILD is a dot separated sequence of identifiers composed of alphanumeric
+ characters and hyphens. A plus (\"+\") introduces this optional part.
+
+ <other_version> See <version> definition.
+
+ <prerel> A string as defined by PRERELEASE above. Or, it can be a PRERELEASE
+ prototype string followed by a dot.
+
+ <build> A string as defined by BUILD above.
+
+Options:
+ -v, --version Print the version of this tool.
+ -h, --help Print this help message.
+
+Commands:
+ bump Bump by one of major, minor, patch; zeroing or removing
+ subsequent parts. \"bump prerel\" (or its synonym \"bump prerelease\")
+ sets the PRERELEASE part and removes any BUILD part. A trailing dot
+ in the <prerel> argument introduces an incrementing numeric field
+ which is added or bumped. If no <prerel> argument is provided, an
+ incrementing numeric field is introduced/bumped. \"bump build\" sets
+ the BUILD part. \"bump release\" removes any PRERELEASE or BUILD parts.
+ The bumped version is written to stdout.
+
+ get Extract given part of <version>, where part is one of major, minor,
+ patch, prerel (alternatively: prerelease), build, or release.
+
+ compare Compare <version> with <other_version>, output to stdout the
+ following values: -1 if <other_version> is newer, 0 if equal, 1 if
+ older. The BUILD part is not used in comparisons.
+
+ diff Compare <version> with <other_version>, output to stdout the
+ difference between two versions by the release type (MAJOR, MINOR,
+ PATCH, PRERELEASE, BUILD).
+
+ validate Validate if <version> follows the SEMVER pattern (see <version>
+ definition). Print 'valid' to stdout if the version is valid, otherwise
+ print 'invalid'.
+
+See also:
+ https://semver.org -- Semantic Versioning 2.0.0"
+
+function error {
+ echo -e "$1" >&2
+ exit 1
+}
+
+function usage_help {
+ error "$USAGE"
+}
+
+function usage_version {
+ echo -e "${PROG}: $PROG_VERSION"
+ exit 0
+}
+
+# normalize the "part" keywords to a canonical string. At present,
+# only "prerelease" is normalized to "prerel".
+
+function normalize_part {
+ if [ "$1" == "prerelease" ]
+ then
+ echo "prerel"
+ else
+ echo "$1"
+ fi
+}
+
+function validate_version {
+ local version=$1
+ if [[ "$version" =~ $SEMVER_REGEX ]]; then
+ # if a second argument is passed, store the result in var named by $2
+ if [ "$#" -eq "2" ]; then
+ local major=${BASH_REMATCH[1]}
+ local minor=${BASH_REMATCH[2]}
+ local patch=${BASH_REMATCH[3]}
+ local prere=${BASH_REMATCH[4]}
+ local build=${BASH_REMATCH[8]}
+ eval "$2=(\"$major\" \"$minor\" \"$patch\" \"$prere\" \"$build\")"
+ else
+ echo "$version"
+ fi
+ else
+ error "version $version does not match the semver scheme 'X.Y.Z(-PRERELEASE)(+BUILD)'. See help for more information."
+ fi
+}
+
+function is_nat {
+ [[ "$1" =~ ^($NAT)$ ]]
+}
+
+function is_null {
+ [ -z "$1" ]
+}
+
+function order_nat {
+ [ "$1" -lt "$2" ] && { echo -1 ; return ; }
+ [ "$1" -gt "$2" ] && { echo 1 ; return ; }
+ echo 0
+}
+
+function order_string {
+ [[ $1 < $2 ]] && { echo -1 ; return ; }
+ [[ $1 > $2 ]] && { echo 1 ; return ; }
+ echo 0
+}
+
+# given two (named) arrays containing NAT and/or ALPHANUM fields, compare them
+# one by one according to semver 2.0.0 spec. Return -1, 0, 1 if left array ($1)
+# is less-than, equal, or greater-than the right array ($2). The longer array
+# is considered greater-than the shorter if the shorter is a prefix of the longer.
+#
+function compare_fields {
+ local l="$1[@]"
+ local r="$2[@]"
+ local leftfield=( "${!l}" )
+ local rightfield=( "${!r}" )
+ local left
+ local right
+
+ local i=$(( -1 ))
+ local order=$(( 0 ))
+
+ while true
+ do
+ [ $order -ne 0 ] && { echo $order ; return ; }
+
+ : $(( i++ ))
+ left="${leftfield[$i]}"
+ right="${rightfield[$i]}"
+
+ is_null "$left" && is_null "$right" && { echo 0 ; return ; }
+ is_null "$left" && { echo -1 ; return ; }
+ is_null "$right" && { echo 1 ; return ; }
+
+ is_nat "$left" && is_nat "$right" && { order=$(order_nat "$left" "$right") ; continue ; }
+ is_nat "$left" && { echo -1 ; return ; }
+ is_nat "$right" && { echo 1 ; return ; }
+ { order=$(order_string "$left" "$right") ; continue ; }
+ done
+}
+
+# shellcheck disable=SC2206 # checked by "validate"; ok to expand prerel id's into array
+function compare_version {
+ local order
+ validate_version "$1" V
+ validate_version "$2" V_
+
+ # compare major, minor, patch
+
+ local left=( "${V[0]}" "${V[1]}" "${V[2]}" )
+ local right=( "${V_[0]}" "${V_[1]}" "${V_[2]}" )
+
+ order=$(compare_fields left right)
+ [ "$order" -ne 0 ] && { echo "$order" ; return ; }
+
+ # compare pre-release ids when M.m.p are equal
+
+ local prerel="${V[3]:1}"
+ local prerel_="${V_[3]:1}"
+ local left=( ${prerel//./ } )
+ local right=( ${prerel_//./ } )
+
+ # if left and right have no pre-release part, then left equals right
+ # if only one of left/right has pre-release part, that one is less than simple M.m.p
+
+ [ -z "$prerel" ] && [ -z "$prerel_" ] && { echo 0 ; return ; }
+ [ -z "$prerel" ] && { echo 1 ; return ; }
+ [ -z "$prerel_" ] && { echo -1 ; return ; }
+
+ # otherwise, compare the pre-release id's
+
+ compare_fields left right
+}
+
+# render_prerel -- return a prerel field with a trailing numeric string
+# usage: render_prerel numeric [prefix-string]
+#
+function render_prerel {
+ if [ -z "$2" ]
+ then
+ echo "${1}"
+ else
+ echo "${2}${1}"
+ fi
+}
+
+# extract_prerel -- extract prefix and trailing numeric portions of a pre-release part
+# usage: extract_prerel prerel prerel_parts
+# The prefix and trailing numeric parts are returned in "prerel_parts".
+#
+PREFIX_ALPHANUM='[.0-9A-Za-z-]*[.A-Za-z-]'
+DIGITS='[0-9][0-9]*'
+EXTRACT_REGEX="^(${PREFIX_ALPHANUM})*(${DIGITS})$"
+
+function extract_prerel {
+ local prefix; local numeric;
+
+ if [[ "$1" =~ $EXTRACT_REGEX ]]
+ then # found prefix and trailing numeric parts
+ prefix="${BASH_REMATCH[1]}"
+ numeric="${BASH_REMATCH[2]}"
+ else # no numeric part
+ prefix="${1}"
+ numeric=
+ fi
+
+ eval "$2=(\"$prefix\" \"$numeric\")"
+}
+
+# bump_prerel -- return the new pre-release part based on previous pre-release part
+# and prototype for bump
+# usage: bump_prerel proto previous
+#
+function bump_prerel {
+ local proto; local prev_prefix; local prev_numeric;
+
+ # case one: no trailing dot in prototype => simply replace previous with proto
+ if [[ ! ( "$1" =~ \.$ ) ]]
+ then
+ echo "$1"
+ return
+ fi
+
+ proto="${1%.}" # discard trailing dot marker from prototype
+
+ extract_prerel "${2#-}" prerel_parts # extract parts of previous pre-release
+# shellcheck disable=SC2154
+ prev_prefix="${prerel_parts[0]}"
+ prev_numeric="${prerel_parts[1]}"
+
+ # case two: bump or append numeric to previous pre-release part
+ if [ "$proto" == "+" ] # dummy "+" indicates no prototype argument provided
+ then
+ if [ -n "$prev_numeric" ]
+ then
+ : $(( ++prev_numeric )) # previous pre-release is already numbered, bump it
+ render_prerel "$prev_numeric" "$prev_prefix"
+ else
+ render_prerel 1 "$prev_prefix" # append starting number
+ fi
+ return
+ fi
+
+ # case three: set, bump, or append using prototype prefix
+ if [ "$prev_prefix" != "$proto" ]
+ then
+ render_prerel 1 "$proto" # proto not same pre-release; set and start at '1'
+ elif [ -n "$prev_numeric" ]
+ then
+ : $(( ++prev_numeric )) # pre-release is numbered; bump it
+ render_prerel "$prev_numeric" "$prev_prefix"
+ else
+ render_prerel 1 "$prev_prefix" # start pre-release at number '1'
+ fi
+}
+
+function command_bump {
+ local new; local version; local sub_version; local command;
+
+ command="$(normalize_part "$1")"
+
+ case $# in
+ 2) case "$command" in
+ major|minor|patch|prerel|release) sub_version="+."; version=$2;;
+ *) usage_help;;
+ esac ;;
+ 3) case "$command" in
+ prerel|build) sub_version=$2 version=$3 ;;
+ *) usage_help;;
+ esac ;;
+ *) usage_help;;
+ esac
+
+ validate_version "$version" parts
+ # shellcheck disable=SC2154
+ local major="${parts[0]}"
+ local minor="${parts[1]}"
+ local patch="${parts[2]}"
+ local prere="${parts[3]}"
+ local build="${parts[4]}"
+
+ case "$command" in
+ major) new="$((major + 1)).0.0";;
+ minor) new="${major}.$((minor + 1)).0";;
+ patch) new="${major}.${minor}.$((patch + 1))";;
+ release) new="${major}.${minor}.${patch}";;
+ prerel) new=$(validate_version "${major}.${minor}.${patch}-$(bump_prerel "$sub_version" "$prere")");;
+ build) new=$(validate_version "${major}.${minor}.${patch}${prere}+${sub_version}");;
+ *) usage_help ;;
+ esac
+
+ echo "$new"
+ exit 0
+}
+
+function command_compare {
+ local v; local v_;
+
+ case $# in
+ 2) v=$(validate_version "$1"); v_=$(validate_version "$2") ;;
+ *) usage_help ;;
+ esac
+
+ set +u # need unset array element to evaluate to null
+ compare_version "$v" "$v_"
+ exit 0
+}
+
+function command_diff {
+ validate_version "$1" v1_parts
+ # shellcheck disable=SC2154
+ local v1_major="${v1_parts[0]}"
+ local v1_minor="${v1_parts[1]}"
+ local v1_patch="${v1_parts[2]}"
+ local v1_prere="${v1_parts[3]}"
+ local v1_build="${v1_parts[4]}"
+
+ validate_version "$2" v2_parts
+ # shellcheck disable=SC2154
+ local v2_major="${v2_parts[0]}"
+ local v2_minor="${v2_parts[1]}"
+ local v2_patch="${v2_parts[2]}"
+ local v2_prere="${v2_parts[3]}"
+ local v2_build="${v2_parts[4]}"
+
+ if [ "${v1_major}" != "${v2_major}" ]; then
+ echo "major"
+ elif [ "${v1_minor}" != "${v2_minor}" ]; then
+ echo "minor"
+ elif [ "${v1_patch}" != "${v2_patch}" ]; then
+ echo "patch"
+ elif [ "${v1_prere}" != "${v2_prere}" ]; then
+ echo "prerelease"
+ elif [ "${v1_build}" != "${v2_build}" ]; then
+ echo "build"
+ fi
+}
+
+# shellcheck disable=SC2034
+function command_get {
+ local part version
+
+ if [[ "$#" -ne "2" ]] || [[ -z "$1" ]] || [[ -z "$2" ]]; then
+ usage_help
+ exit 0
+ fi
+
+ part="$1"
+ version="$2"
+
+ validate_version "$version" parts
+ local major="${parts[0]}"
+ local minor="${parts[1]}"
+ local patch="${parts[2]}"
+ local prerel="${parts[3]:1}"
+ local build="${parts[4]:1}"
+ local release="${major}.${minor}.${patch}"
+
+ part="$(normalize_part "$part")"
+
+ case "$part" in
+ major|minor|patch|release|prerel|build) echo "${!part}" ;;
+ *) usage_help ;;
+ esac
+
+ exit 0
+}
+
+function command_validate {
+ if [[ "$#" -ne "1" ]]; then
+ usage_help
+ fi
+
+ if [[ "$1" =~ $SEMVER_REGEX ]]; then
+ echo "valid"
+ else
+ echo "invalid"
+ fi
+
+ exit 0
+}
+
+case $# in
+ 0) echo "Unknown command: $*"; usage_help;;
+esac
+
+case $1 in
+ --help|-h) echo -e "$USAGE"; exit 0;;
+ --version|-v) usage_version ;;
+ bump) shift; command_bump "$@";;
+ get) shift; command_get "$@";;
+ compare) shift; command_compare "$@";;
+ diff) shift; command_diff "$@";;
+ validate) shift; command_validate "$@";;
+ *) echo "Unknown arguments: $*"; usage_help;;
+esac
diff --git a/utils/tracepoints/analyze-ipa-trace.py b/utils/tracepoints/analyze-ipa-trace.py
index 50fbbf42..92e8a235 100755
--- a/utils/tracepoints/analyze-ipa-trace.py
+++ b/utils/tracepoints/analyze-ipa-trace.py
@@ -4,7 +4,7 @@
#
# Author: Paul Elder <paul.elder@ideasonboard.com>
#
-# analyze-ipa-trace.py - Example of how to extract information from libcamera lttng traces
+# Example of how to extract information from libcamera lttng traces
import argparse
import bt2
diff --git a/utils/tracepoints/gen-tp-header.py b/utils/tracepoints/gen-tp-header.py
index bbd472d9..83606c32 100755
--- a/utils/tracepoints/gen-tp-header.py
+++ b/utils/tracepoints/gen-tp-header.py
@@ -4,26 +4,27 @@
#
# Author: Paul Elder <paul.elder@ideasonboard.com>
#
-# gen-tp-header.py - Generate header file to contain lttng tracepoints
+# Generate header file to contain lttng tracepoints
import datetime
import jinja2
+import pathlib
import os
import sys
def main(argv):
- if len(argv) < 3:
- print(f'Usage: {argv[0]} output template tp_files...')
+ if len(argv) < 4:
+ print(f'Usage: {argv[0]} include_build_dir output template tp_files...')
return 1
- output = argv[1]
- template = argv[2]
+ output = argv[2]
+ template = argv[3]
year = datetime.datetime.now().year
- path = output.replace('include/', '', 1)
+ path = pathlib.Path(output).absolute().relative_to(argv[1])
source = ''
- for fname in argv[3:]:
+ for fname in argv[4:]:
source += open(fname, 'r', encoding='utf-8').read() + '\n\n'
template = jinja2.Template(open(template, 'r', encoding='utf-8').read())
diff --git a/utils/tuning/README.rst b/utils/tuning/README.rst
new file mode 100644
index 00000000..ef3e6ad7
--- /dev/null
+++ b/utils/tuning/README.rst
@@ -0,0 +1,11 @@
+.. SPDX-License-Identifier: CC-BY-SA-4.0
+
+.. TODO: Write an overview of libtuning
+
+Dependencies
+------------
+
+- numpy
+- opencv-python
+- py3exiv2
+- rawpy
diff --git a/utils/tuning/libtuning/__init__.py b/utils/tuning/libtuning/__init__.py
new file mode 100644
index 00000000..93049976
--- /dev/null
+++ b/utils/tuning/libtuning/__init__.py
@@ -0,0 +1,13 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+
+from libtuning.utils import *
+from libtuning.libtuning import *
+
+from libtuning.image import *
+from libtuning.macbeth import *
+
+from libtuning.average import *
+from libtuning.gradient import *
+from libtuning.smoothing import *
diff --git a/utils/tuning/libtuning/average.py b/utils/tuning/libtuning/average.py
new file mode 100644
index 00000000..c41075a1
--- /dev/null
+++ b/utils/tuning/libtuning/average.py
@@ -0,0 +1,21 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# Wrapper for numpy averaging functions to enable duck-typing
+
+import numpy as np
+
+
+# @brief Wrapper for np averaging functions so that they can be duck-typed
+class Average(object):
+ def __init__(self):
+ pass
+
+ def average(self, np_array):
+ raise NotImplementedError
+
+
+class Mean(Average):
+ def average(self, np_array):
+ return np.mean(np_array)
diff --git a/utils/tuning/libtuning/generators/__init__.py b/utils/tuning/libtuning/generators/__init__.py
new file mode 100644
index 00000000..f28b6149
--- /dev/null
+++ b/utils/tuning/libtuning/generators/__init__.py
@@ -0,0 +1,6 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+
+from libtuning.generators.raspberrypi_output import RaspberryPiOutput
+from libtuning.generators.yaml_output import YamlOutput
diff --git a/utils/tuning/libtuning/generators/generator.py b/utils/tuning/libtuning/generators/generator.py
new file mode 100644
index 00000000..77a8ba4a
--- /dev/null
+++ b/utils/tuning/libtuning/generators/generator.py
@@ -0,0 +1,15 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# Base class for a generator to convert dict to tuning file
+
+from pathlib import Path
+
+
+class Generator(object):
+ def __init__(self):
+ pass
+
+ def write(self, output_path: Path, output_dict: dict, output_order: list):
+ raise NotImplementedError
diff --git a/utils/tuning/libtuning/generators/raspberrypi_output.py b/utils/tuning/libtuning/generators/raspberrypi_output.py
new file mode 100644
index 00000000..47b49059
--- /dev/null
+++ b/utils/tuning/libtuning/generators/raspberrypi_output.py
@@ -0,0 +1,114 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright 2022 Raspberry Pi Ltd
+#
+# Generate tuning file in Raspberry Pi's json format
+#
+# (Copied from ctt_pretty_print_json.py)
+
+from .generator import Generator
+
+import json
+from pathlib import Path
+import textwrap
+
+
+class Encoder(json.JSONEncoder):
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.indentation_level = 0
+ self.hard_break = 120
+ self.custom_elems = {
+ 'table': 16,
+ 'luminance_lut': 16,
+ 'ct_curve': 3,
+ 'ccm': 3,
+ 'gamma_curve': 2,
+ 'y_target': 2,
+ 'prior': 2
+ }
+
+ def encode(self, o, node_key=None):
+ if isinstance(o, (list, tuple)):
+ # Check if we are a flat list of numbers.
+ if not any(isinstance(el, (list, tuple, dict)) for el in o):
+ s = ', '.join(json.dumps(el) for el in o)
+ if node_key in self.custom_elems.keys():
+ # Special case handling to specify number of elements in a row for tables, ccm, etc.
+ self.indentation_level += 1
+ sl = s.split(', ')
+ num = self.custom_elems[node_key]
+ chunk = [self.indent_str + ', '.join(sl[x:x + num]) for x in range(0, len(sl), num)]
+ t = ',\n'.join(chunk)
+ self.indentation_level -= 1
+ output = f'\n{self.indent_str}[\n{t}\n{self.indent_str}]'
+ elif len(s) > self.hard_break - len(self.indent_str):
+ # Break a long list with wraps.
+ self.indentation_level += 1
+ t = textwrap.fill(s, self.hard_break, break_long_words=False,
+ initial_indent=self.indent_str, subsequent_indent=self.indent_str)
+ self.indentation_level -= 1
+ output = f'\n{self.indent_str}[\n{t}\n{self.indent_str}]'
+ else:
+ # Smaller lists can remain on a single line.
+ output = f' [ {s} ]'
+ return output
+ else:
+ # Sub-structures in the list case.
+ self.indentation_level += 1
+ output = [self.indent_str + self.encode(el) for el in o]
+ self.indentation_level -= 1
+ output = ',\n'.join(output)
+ return f' [\n{output}\n{self.indent_str}]'
+
+ elif isinstance(o, dict):
+ self.indentation_level += 1
+ output = []
+ for k, v in o.items():
+ if isinstance(v, dict) and len(v) == 0:
+ # Empty config block special case.
+ output.append(self.indent_str + f'{json.dumps(k)}: {{ }}')
+ else:
+ # Only linebreak if the next node is a config block.
+ sep = f'\n{self.indent_str}' if isinstance(v, dict) else ''
+ output.append(self.indent_str + f'{json.dumps(k)}:{sep}{self.encode(v, k)}')
+ output = ',\n'.join(output)
+ self.indentation_level -= 1
+ return f'{{\n{output}\n{self.indent_str}}}'
+
+ else:
+ return ' ' + json.dumps(o)
+
+ @property
+ def indent_str(self) -> str:
+ return ' ' * self.indentation_level * self.indent
+
+ def iterencode(self, o, **kwargs):
+ return self.encode(o)
+
+
+class RaspberryPiOutput(Generator):
+ def __init__(self):
+ super().__init__()
+
+ def _pretty_print(self, in_json: dict) -> str:
+
+ if 'version' not in in_json or \
+ 'target' not in in_json or \
+ 'algorithms' not in in_json or \
+ in_json['version'] < 2.0:
+ raise RuntimeError('Incompatible JSON dictionary has been provided')
+
+ return json.dumps(in_json, cls=Encoder, indent=4, sort_keys=False)
+
+ def write(self, output_file: Path, output_dict: dict, output_order: list):
+ # Write json dictionary to file using ctt's version 2 format
+ out_json = {
+ "version": 2.0,
+ 'target': 'bcm2835',
+ "algorithms": [{f'{module.out_name}': output_dict[module]} for module in output_order]
+ }
+
+ with open(output_file, 'w') as f:
+ f.write(self._pretty_print(out_json))
diff --git a/utils/tuning/libtuning/generators/yaml_output.py b/utils/tuning/libtuning/generators/yaml_output.py
new file mode 100644
index 00000000..8f22d386
--- /dev/null
+++ b/utils/tuning/libtuning/generators/yaml_output.py
@@ -0,0 +1,123 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright 2022 Paul Elder <paul.elder@ideasonboard.com>
+#
+# Generate tuning file in YAML format
+
+from .generator import Generator
+
+from numbers import Number
+from pathlib import Path
+
+import libtuning.utils as utils
+
+
+class YamlOutput(Generator):
+ def __init__(self):
+ super().__init__()
+
+ def _stringify_number_list(self, listt: list):
+ line_wrap = 80
+
+ line = '[ ' + ', '.join([str(x) for x in listt]) + ' ]'
+ if len(line) <= line_wrap:
+ return [line]
+
+ out_lines = ['[']
+ line = ' '
+ for x in listt:
+ x_str = str(x)
+ # If the first number is longer than line_wrap, it'll add an extra line
+ if len(line) + len(x_str) > line_wrap:
+ out_lines.append(line)
+ line = f' {x_str},'
+ continue
+ line += f' {x_str},'
+ out_lines.append(line)
+ out_lines.append(']')
+
+ return out_lines
+
+ # @return Array of lines, and boolean of if all elements were numbers
+ def _stringify_list(self, listt: list):
+ out_lines = []
+
+ all_numbers = set([isinstance(x, Number) for x in listt]).issubset({True})
+
+ if all_numbers:
+ return self._stringify_number_list(listt), True
+
+ for value in listt:
+ if isinstance(value, Number):
+ out_lines.append(f'- {str(value)}')
+ elif isinstance(value, str):
+ out_lines.append(f'- "{value}"')
+ elif isinstance(value, list):
+ lines, all_numbers = self._stringify_list(value)
+
+ if all_numbers:
+ out_lines.append( f'- {lines[0]}')
+ out_lines += [f' {line}' for line in lines[1:]]
+ else:
+ out_lines.append( f'-')
+ out_lines += [f' {line}' for line in lines]
+ elif isinstance(value, dict):
+ lines = self._stringify_dict(value)
+ out_lines.append( f'- {lines[0]}')
+ out_lines += [f' {line}' for line in lines[1:]]
+
+ return out_lines, False
+
+ def _stringify_dict(self, dictt: dict):
+ out_lines = []
+
+ for key in dictt:
+ value = dictt[key]
+
+ if isinstance(value, Number):
+ out_lines.append(f'{key}: {str(value)}')
+ elif isinstance(value, str):
+ out_lines.append(f'{key}: "{value}"')
+ elif isinstance(value, list):
+ lines, all_numbers = self._stringify_list(value)
+
+ if all_numbers:
+ out_lines.append( f'{key}: {lines[0]}')
+ out_lines += [f'{" " * (len(key) + 2)}{line}' for line in lines[1:]]
+ else:
+ out_lines.append( f'{key}:')
+ out_lines += [f' {line}' for line in lines]
+ elif isinstance(value, dict):
+ lines = self._stringify_dict(value)
+ out_lines.append( f'{key}:')
+ out_lines += [f' {line}' for line in lines]
+
+ return out_lines
+
+ def write(self, output_file: Path, output_dict: dict, output_order: list):
+ out_lines = [
+ '%YAML 1.1',
+ '---',
+ 'version: 1',
+ # No need to condition this, as libtuning already guarantees that
+ # we have at least one module. Even if the module has no output,
+ # its prescence is meaningful.
+ 'algorithms:'
+ ]
+
+ for module in output_order:
+ out_lines.append(f' - {module.out_name}:')
+
+ if len(output_dict[module]) == 0:
+ continue
+
+ if not isinstance(output_dict[module], dict):
+ utils.eprint(f'Error: Output of {module.type} is not a dictionary')
+ continue
+
+ lines = self._stringify_dict(output_dict[module])
+ out_lines += [f' {line}' for line in lines]
+
+ with open(output_file, 'w', encoding='utf-8') as f:
+ for line in out_lines:
+ f.write(f'{line}\n')
diff --git a/utils/tuning/libtuning/gradient.py b/utils/tuning/libtuning/gradient.py
new file mode 100644
index 00000000..b643f502
--- /dev/null
+++ b/utils/tuning/libtuning/gradient.py
@@ -0,0 +1,75 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# Gradients that can be used to distribute or map numbers
+
+import libtuning as lt
+
+import math
+from numbers import Number
+
+
+# @brief Gradient for how to allocate pixels to sectors
+# @description There are no parameters for the gradients as the domain is the
+# number of pixels and the range is the number of sectors, and
+# there is only one curve that has a startpoint and endpoint at
+# (0, 0) and at (#pixels, #sectors). The exception is for curves
+# that *do* have multiple solutions for only two points, such as
+# gaussian, and curves of higher polynomial orders if we had them.
+#
+# \todo There will probably be a helper in the Gradient class, as I have a
+# feeling that all the other curves (besides Linear and Gaussian) can be
+# implemented in the same way.
+class Gradient(object):
+ def __init__(self):
+ pass
+
+ # @brief Distribute pixels into sectors (only in one dimension)
+ # @param domain Number of pixels
+ # @param sectors Number of sectors
+ # @return A list of number of pixels in each sector
+ def distribute(self, domain: list, sectors: list) -> list:
+ raise NotImplementedError
+
+ # @brief Map a number on a curve
+ # @param domain Domain of the curve
+ # @param rang Range of the curve
+ # @param x Input on the domain of the curve
+ # @return y from the range of the curve
+ def map(self, domain: tuple, rang: tuple, x: Number) -> Number:
+ raise NotImplementedError
+
+
+class Linear(Gradient):
+ # @param remainder Mode of handling remainder
+ def __init__(self, remainder: lt.Remainder = lt.Remainder.Float):
+ self.remainder = remainder
+
+ def distribute(self, domain: list, sectors: list) -> list:
+ size = domain / sectors
+ rem = domain % sectors
+
+ if rem == 0:
+ return [int(size)] * sectors
+
+ size = math.ceil(size)
+ rem = domain % size
+ output_sectors = [int(size)] * (sectors - 1)
+
+ if self.remainder == lt.Remainder.Float:
+ size = domain / sectors
+ output_sectors = [size] * sectors
+ elif self.remainder == lt.Remainder.DistributeFront:
+ output_sectors.append(int(rem))
+ elif self.remainder == lt.Remainder.DistributeBack:
+ output_sectors.insert(0, int(rem))
+ else:
+ raise ValueError
+
+ return output_sectors
+
+ def map(self, domain: tuple, rang: tuple, x: Number) -> Number:
+ m = (rang[1] - rang[0]) / (domain[1] - domain[0])
+ b = rang[0] - m * domain[0]
+ return m * x + b
diff --git a/utils/tuning/libtuning/image.py b/utils/tuning/libtuning/image.py
new file mode 100644
index 00000000..e2181b11
--- /dev/null
+++ b/utils/tuning/libtuning/image.py
@@ -0,0 +1,136 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# Container for an image and associated metadata
+
+import binascii
+import numpy as np
+from pathlib import Path
+import pyexiv2 as pyexif
+import rawpy as raw
+import re
+
+import libtuning as lt
+import libtuning.utils as utils
+
+
+class Image:
+ def __init__(self, path: Path):
+ self.path = path
+ self.lsc_only = False
+ self.color = -1
+ self.lux = -1
+
+ try:
+ self._load_metadata_exif()
+ except Exception as e:
+ utils.eprint(f'Failed to load metadata from {self.path}: {e}')
+ raise e
+
+ try:
+ self._read_image_dng()
+ except Exception as e:
+ utils.eprint(f'Failed to load image data from {self.path}: {e}')
+ raise e
+
+ @property
+ def name(self):
+ return self.path.name
+
+ # May raise KeyError as there are too many to check
+ def _load_metadata_exif(self):
+ # RawPy doesn't load all the image tags that we need, so we use py3exiv2
+ metadata = pyexif.ImageMetadata(str(self.path))
+ metadata.read()
+
+ # The DNG and TIFF/EP specifications use different IFDs to store the
+ # raw image data and the Exif tags. DNG stores them in a SubIFD and in
+ # an Exif IFD respectively (named "SubImage1" and "Photo" by pyexiv2),
+ # while TIFF/EP stores them both in IFD0 (name "Image"). Both are used
+ # in "DNG" files, with libcamera-apps following the DNG recommendation
+ # and applications based on picamera2 following TIFF/EP.
+ #
+ # This code detects which tags are being used, and therefore extracts the
+ # correct values.
+ try:
+ self.w = metadata['Exif.SubImage1.ImageWidth'].value
+ subimage = 'SubImage1'
+ photo = 'Photo'
+ except KeyError:
+ self.w = metadata['Exif.Image.ImageWidth'].value
+ subimage = 'Image'
+ photo = 'Image'
+ self.pad = 0
+ self.h = metadata[f'Exif.{subimage}.ImageLength'].value
+ white = metadata[f'Exif.{subimage}.WhiteLevel'].value
+ self.sigbits = int(white).bit_length()
+ self.fmt = (self.sigbits - 4) // 2
+ self.exposure = int(metadata[f'Exif.{photo}.ExposureTime'].value * 1000000)
+ self.againQ8 = metadata[f'Exif.{photo}.ISOSpeedRatings'].value * 256 / 100
+ self.againQ8_norm = self.againQ8 / 256
+ self.camName = metadata['Exif.Image.Model'].value
+ self.blacklevel = int(metadata[f'Exif.{subimage}.BlackLevel'].value[0])
+ self.blacklevel_16 = self.blacklevel << (16 - self.sigbits)
+
+ # Channel order depending on bayer pattern
+ # The key is the order given by exif, where 0 is R, 1 is G, and 2 is B
+ # The value is the index where the color can be found, where the first
+ # is R, then G, then G, then B.
+ bayer_case = {
+ '0 1 1 2': (lt.Color.R, lt.Color.GR, lt.Color.GB, lt.Color.B),
+ '1 2 0 1': (lt.Color.GB, lt.Color.R, lt.Color.B, lt.Color.GR),
+ '2 1 1 0': (lt.Color.B, lt.Color.GB, lt.Color.GR, lt.Color.R),
+ '1 0 2 1': (lt.Color.GR, lt.Color.R, lt.Color.B, lt.Color.GB)
+ }
+ # Note: This needs to be in IFD0
+ cfa_pattern = metadata[f'Exif.{subimage}.CFAPattern'].value
+ self.order = bayer_case[cfa_pattern]
+
+ def _read_image_dng(self):
+ raw_im = raw.imread(str(self.path))
+ raw_data = raw_im.raw_image
+ shift = 16 - self.sigbits
+ c0 = np.left_shift(raw_data[0::2, 0::2].astype(np.int64), shift)
+ c1 = np.left_shift(raw_data[0::2, 1::2].astype(np.int64), shift)
+ c2 = np.left_shift(raw_data[1::2, 0::2].astype(np.int64), shift)
+ c3 = np.left_shift(raw_data[1::2, 1::2].astype(np.int64), shift)
+ self.channels = [c0, c1, c2, c3]
+ # Reorder the channels into R, GR, GB, B
+ self.channels = [self.channels[i] for i in self.order]
+
+ # \todo Move this to macbeth.py
+ def get_patches(self, cen_coords, size=16):
+ saturated = False
+
+ # Obtain channel widths and heights
+ ch_w, ch_h = self.w, self.h
+ cen_coords = list(np.array((cen_coords[0])).astype(np.int32))
+ self.cen_coords = cen_coords
+
+ # Squares are ordered by stacking macbeth chart columns from left to
+ # right. Some useful patch indices:
+ # white = 3
+ # black = 23
+ # 'reds' = 9, 10
+ # 'blues' = 2, 5, 8, 20, 22
+ # 'greens' = 6, 12, 17
+ # greyscale = 3, 7, 11, 15, 19, 23
+ all_patches = []
+ for ch in self.channels:
+ ch_patches = []
+ for cen in cen_coords:
+ # Macbeth centre is placed at top left of central 2x2 patch to
+ # account for rounding. Patch pixels are sorted by pixel
+ # brightness so spatial information is lost.
+ patch = ch[cen[1] - 7:cen[1] + 9, cen[0] - 7:cen[0] + 9].flatten()
+ patch.sort()
+ if patch[-5] == (2**self.sigbits - 1) * 2**(16 - self.sigbits):
+ saturated = True
+ ch_patches.append(patch)
+
+ all_patches.append(ch_patches)
+
+ self.patches = all_patches
+
+ return not saturated
diff --git a/utils/tuning/libtuning/libtuning.py b/utils/tuning/libtuning/libtuning.py
new file mode 100644
index 00000000..5e22288d
--- /dev/null
+++ b/utils/tuning/libtuning/libtuning.py
@@ -0,0 +1,208 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# An infrastructure for camera tuning tools
+
+import argparse
+
+import libtuning as lt
+import libtuning.utils as utils
+from libtuning.utils import eprint
+
+from enum import Enum, IntEnum
+
+
+class Color(IntEnum):
+ R = 0
+ GR = 1
+ GB = 2
+ B = 3
+
+
+class Debug(Enum):
+ Plot = 1
+
+
+# @brief What to do with the leftover pixels after dividing them into ALSC
+# sectors, when the division gradient is uniform
+# @var Float Force floating point division so all sectors divide equally
+# @var DistributeFront Divide the remainder equally (until running out,
+# obviously) into the existing sectors, starting from the front
+# @var DistributeBack Same as DistributeFront but starting from the back
+class Remainder(Enum):
+ Float = 0
+ DistributeFront = 1
+ DistributeBack = 2
+
+
+# @brief A helper class to contain a default value for a module configuration
+# parameter
+class Param(object):
+ # @var Required The value contained in this instance is irrelevant, and the
+ # value must be provided by the tuning configuration file.
+ # @var Optional If the value is not provided by the tuning configuration
+ # file, then the value contained in this instance will be used instead.
+ # @var Hardcode The value contained in this instance will be used
+ class Mode(Enum):
+ Required = 0
+ Optional = 1
+ Hardcode = 2
+
+ # @param name Name of the parameter. Shall match the name used in the
+ # configuration file for the parameter
+ # @param required Whether or not a value is required in the config
+ # parameter of get_value()
+ # @param val Default value (only relevant if mode is Optional)
+ def __init__(self, name: str, required: Mode, val=None):
+ self.name = name
+ self.__required = required
+ self.val = val
+
+ def get_value(self, config: dict):
+ if self.__required is self.Mode.Hardcode:
+ return self.val
+
+ if self.__required is self.Mode.Required and self.name not in config:
+ raise ValueError(f'Parameter {self.name} is required but not provided in the configuration')
+
+ return config[self.name] if self.required else self.val
+
+ @property
+ def required(self):
+ return self.__required is self.Mode.Required
+
+ # @brief Used by libtuning to auto-generate help information for the tuning
+ # script on the available parameters for the configuration file
+ # \todo Implement this
+ @property
+ def info(self):
+ raise NotImplementedError
+
+
+class Tuner(object):
+
+ # External functions
+
+ def __init__(self, platform_name):
+ self.name = platform_name
+ self.modules = []
+ self.parser = None
+ self.generator = None
+ self.output_order = []
+ self.config = {}
+ self.output = {}
+
+ def add(self, module):
+ self.modules.append(module)
+
+ def set_input_parser(self, parser):
+ self.parser = parser
+
+ def set_output_formatter(self, output):
+ self.generator = output
+
+ def set_output_order(self, modules):
+ self.output_order = modules
+
+ # @brief Convert classes in self.output_order to the instances in self.modules
+ def _prepare_output_order(self):
+ output_order = self.output_order
+ self.output_order = []
+ for module_type in output_order:
+ modules = [module for module in self.modules if module.type == module_type.type]
+ if len(modules) > 1:
+ eprint(f'Multiple modules found for module type "{module_type.type}"')
+ return False
+ if len(modules) < 1:
+ eprint(f'No module found for module type "{module_type.type}"')
+ return False
+ self.output_order.append(modules[0])
+
+ return True
+
+ # \todo Validate parser and generator at Tuner construction time?
+ def _validate_settings(self):
+ if self.parser is None:
+ eprint('Missing parser')
+ return False
+
+ if self.generator is None:
+ eprint('Missing generator')
+ return False
+
+ if len(self.modules) == 0:
+ eprint('No modules added')
+ return False
+
+ if len(self.output_order) != len(self.modules):
+ eprint('Number of outputs does not match number of modules')
+ return False
+
+ return True
+
+ def _process_args(self, argv, platform_name):
+ parser = argparse.ArgumentParser(description=f'Camera Tuning for {platform_name}')
+ parser.add_argument('-i', '--input', type=str, required=True,
+ help='''Directory containing calibration images (required).
+ Images for ALSC must be named "alsc_{Color Temperature}k_1[u].dng",
+ and all other images must be named "{Color Temperature}k_{Lux Level}l.dng"''')
+ parser.add_argument('-o', '--output', type=str, required=True,
+ help='Output file (required)')
+ # It is not our duty to scan all modules to figure out their default
+ # options, so simply return an empty configuration if none is provided.
+ parser.add_argument('-c', '--config', type=str, default='',
+ help='Config file (optional)')
+ # \todo Check if we really need this or if stderr is good enough, or if
+ # we want a better logging infrastructure with log levels
+ parser.add_argument('-l', '--log', type=str, default=None,
+ help='Output log file (optional)')
+ return parser.parse_args(argv[1:])
+
+ def run(self, argv):
+ args = self._process_args(argv, self.name)
+ if args is None:
+ return -1
+
+ if not self._validate_settings():
+ return -1
+
+ if not self._prepare_output_order():
+ return -1
+
+ if len(args.config) > 0:
+ self.config, disable = self.parser.parse(args.config, self.modules)
+ else:
+ self.config = {'general': {}}
+ disable = []
+
+ # Remove disabled modules
+ for module in disable:
+ if module in self.modules:
+ self.modules.remove(module)
+
+ for module in self.modules:
+ if not module.validate_config(self.config):
+ eprint(f'Config is invalid for module {module.type}')
+ return -1
+
+ has_lsc = any(isinstance(m, lt.modules.lsc.LSC) for m in self.modules)
+ # Only one LSC module allowed
+ has_only_lsc = has_lsc and len(self.modules) == 1
+
+ images = utils.load_images(args.input, self.config, not has_only_lsc, has_lsc)
+ if images is None or len(images) == 0:
+ eprint(f'No images were found, or able to load')
+ return -1
+
+ # Do the tuning
+ for module in self.modules:
+ out = module.process(self.config, images, self.output)
+ if out is None:
+ eprint(f'Module {module.name} failed to process, aborting')
+ break
+ self.output[module] = out
+
+ self.generator.write(args.output, self.output, self.output_order)
+
+ return 0
diff --git a/utils/tuning/libtuning/macbeth.py b/utils/tuning/libtuning/macbeth.py
new file mode 100644
index 00000000..e1182464
--- /dev/null
+++ b/utils/tuning/libtuning/macbeth.py
@@ -0,0 +1,516 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# Locate and extract Macbeth charts from images
+# (Copied from: ctt_macbeth_locator.py)
+
+# \todo Add debugging
+
+import cv2
+import os
+from pathlib import Path
+import numpy as np
+
+from libtuning.image import Image
+
+
+# Reshape image to fixed width without distorting returns image and scale
+# factor
+def reshape(img, width):
+ factor = width / img.shape[0]
+ return cv2.resize(img, None, fx=factor, fy=factor), factor
+
+
+# Correlation function to quantify match
+def correlate(im1, im2):
+ f1 = im1.flatten()
+ f2 = im2.flatten()
+ cor = np.corrcoef(f1, f2)
+ return cor[0][1]
+
+
+# @brief Compute coordinates of macbeth chart vertices and square centres
+# @return (max_cor, best_map_col_norm, fit_coords, success)
+#
+# Also returns an error/success message for debugging purposes. Additionally,
+# it scores the match with a confidence value.
+#
+# Brief explanation of the macbeth chart locating algorithm:
+# - Find rectangles within image
+# - Take rectangles within percentage offset of median perimeter. The
+# assumption is that these will be the macbeth squares
+# - For each potential square, find the 24 possible macbeth centre locations
+# that would produce a square in that location
+# - Find clusters of potential macbeth chart centres to find the potential
+# macbeth centres with the most votes, i.e. the most likely ones
+# - For each potential macbeth centre, use the centres of the squares that
+# voted for it to find macbeth chart corners
+# - For each set of corners, transform the possible match into normalised
+# space and correlate with a reference chart to evaluate the match
+# - Select the highest correlation as the macbeth chart match, returning the
+# correlation as the confidence score
+#
+# \todo Clean this up
+def get_macbeth_chart(img, ref_data):
+ ref, ref_w, ref_h, ref_corns = ref_data
+
+ # The code will raise and catch a MacbethError in case of a problem, trying
+ # to give some likely reasons why the problem occured, hence the try/except
+ try:
+ # Obtain image, convert to grayscale and normalise
+ src = img
+ src, factor = reshape(src, 200)
+ original = src.copy()
+ a = 125 / np.average(src)
+ src_norm = cv2.convertScaleAbs(src, alpha=a, beta=0)
+
+ # This code checks if there are seperate colour channels. In the past the
+ # macbeth locator ran on jpgs and this makes it robust to different
+ # filetypes. Note that running it on a jpg has 4x the pixels of the
+ # average bayer channel so coordinates must be doubled.
+
+ # This is best done in img_load.py in the get_patches method. The
+ # coordinates and image width, height must be divided by two if the
+ # macbeth locator has been run on a demosaicked image.
+ if len(src_norm.shape) == 3:
+ src_bw = cv2.cvtColor(src_norm, cv2.COLOR_BGR2GRAY)
+ else:
+ src_bw = src_norm
+ original_bw = src_bw.copy()
+
+ # Obtain image edges
+ sigma = 2
+ src_bw = cv2.GaussianBlur(src_bw, (0, 0), sigma)
+ t1, t2 = 50, 100
+ edges = cv2.Canny(src_bw, t1, t2)
+
+ # Dilate edges to prevent self-intersections in contours
+ k_size = 2
+ kernel = np.ones((k_size, k_size))
+ its = 1
+ edges = cv2.dilate(edges, kernel, iterations=its)
+
+ # Find contours in image
+ conts, _ = cv2.findContours(edges, cv2.RETR_TREE,
+ cv2.CHAIN_APPROX_NONE)
+ if len(conts) == 0:
+ raise MacbethError(
+ '\nWARNING: No macbeth chart found!'
+ '\nNo contours found in image\n'
+ 'Possible problems:\n'
+ '- Macbeth chart is too dark or bright\n'
+ '- Macbeth chart is occluded\n'
+ )
+
+ # Find quadrilateral contours
+ epsilon = 0.07
+ conts_per = []
+ for i in range(len(conts)):
+ per = cv2.arcLength(conts[i], True)
+ poly = cv2.approxPolyDP(conts[i], epsilon * per, True)
+ if len(poly) == 4 and cv2.isContourConvex(poly):
+ conts_per.append((poly, per))
+
+ if len(conts_per) == 0:
+ raise MacbethError(
+ '\nWARNING: No macbeth chart found!'
+ '\nNo quadrilateral contours found'
+ '\nPossible problems:\n'
+ '- Macbeth chart is too dark or bright\n'
+ '- Macbeth chart is occluded\n'
+ '- Macbeth chart is out of camera plane\n'
+ )
+
+ # Sort contours by perimeter and get perimeters within percent of median
+ conts_per = sorted(conts_per, key=lambda x: x[1])
+ med_per = conts_per[int(len(conts_per) / 2)][1]
+ side = med_per / 4
+ perc = 0.1
+ med_low, med_high = med_per * (1 - perc), med_per * (1 + perc)
+ squares = []
+ for i in conts_per:
+ if med_low <= i[1] and med_high >= i[1]:
+ squares.append(i[0])
+
+ # Obtain coordinates of nomralised macbeth and squares
+ square_verts, mac_norm = get_square_verts(0.06)
+ # For each square guess, find 24 possible macbeth chart centres
+ mac_mids = []
+ squares_raw = []
+ for i in range(len(squares)):
+ square = squares[i]
+ squares_raw.append(square)
+
+ # Convert quads to rotated rectangles. This is required as the
+ # 'squares' are usually quite irregular quadrilaterls, so
+ # performing a transform would result in exaggerated warping and
+ # inaccurate macbeth chart centre placement
+ rect = cv2.minAreaRect(square)
+ square = cv2.boxPoints(rect).astype(np.float32)
+
+ # Reorder vertices to prevent 'hourglass shape'
+ square = sorted(square, key=lambda x: x[0])
+ square_1 = sorted(square[:2], key=lambda x: x[1])
+ square_2 = sorted(square[2:], key=lambda x: -x[1])
+ square = np.array(np.concatenate((square_1, square_2)), np.float32)
+ square = np.reshape(square, (4, 2)).astype(np.float32)
+ squares[i] = square
+
+ # Find 24 possible macbeth chart centres by trasnforming normalised
+ # macbeth square vertices onto candidate square vertices found in image
+ for j in range(len(square_verts)):
+ verts = square_verts[j]
+ p_mat = cv2.getPerspectiveTransform(verts, square)
+ mac_guess = cv2.perspectiveTransform(mac_norm, p_mat)
+ mac_guess = np.round(mac_guess).astype(np.int32)
+
+ mac_mid = np.mean(mac_guess, axis=1)
+ mac_mids.append([mac_mid, (i, j)])
+
+ if len(mac_mids) == 0:
+ raise MacbethError(
+ '\nWARNING: No macbeth chart found!'
+ '\nNo possible macbeth charts found within image'
+ '\nPossible problems:\n'
+ '- Part of the macbeth chart is outside the image\n'
+ '- Quadrilaterals in image background\n'
+ )
+
+ # Reshape data
+ for i in range(len(mac_mids)):
+ mac_mids[i][0] = mac_mids[i][0][0]
+
+ # Find where midpoints cluster to identify most likely macbeth centres
+ clustering = cluster.AgglomerativeClustering(
+ n_clusters=None,
+ compute_full_tree=True,
+ distance_threshold=side * 2
+ )
+ mac_mids_list = [x[0] for x in mac_mids]
+
+ if len(mac_mids_list) == 1:
+ # Special case of only one valid centre found (probably not needed)
+ clus_list = []
+ clus_list.append([mac_mids, len(mac_mids)])
+
+ else:
+ clustering.fit(mac_mids_list)
+
+ # Create list of all clusters
+ clus_list = []
+ if clustering.n_clusters_ > 1:
+ for i in range(clustering.labels_.max() + 1):
+ indices = [j for j, x in enumerate(clustering.labels_) if x == i]
+ clus = []
+ for index in indices:
+ clus.append(mac_mids[index])
+ clus_list.append([clus, len(clus)])
+ clus_list.sort(key=lambda x: -x[1])
+
+ elif clustering.n_clusters_ == 1:
+ # Special case of only one cluster found
+ clus_list.append([mac_mids, len(mac_mids)])
+ else:
+ raise MacbethError(
+ '\nWARNING: No macebth chart found!'
+ '\nNo clusters found'
+ '\nPossible problems:\n'
+ '- NA\n'
+ )
+
+ # Keep only clusters with enough votes
+ clus_len_max = clus_list[0][1]
+ clus_tol = 0.7
+ for i in range(len(clus_list)):
+ if clus_list[i][1] < clus_len_max * clus_tol:
+ clus_list = clus_list[:i]
+ break
+ cent = np.mean(clus_list[i][0], axis=0)[0]
+ clus_list[i].append(cent)
+
+ # Get centres of each normalised square
+ reference = get_square_centres(0.06)
+
+ # For each possible macbeth chart, transform image into
+ # normalised space and find correlation with reference
+ max_cor = 0
+ best_map = None
+ best_fit = None
+ best_cen_fit = None
+ best_ref_mat = None
+
+ for clus in clus_list:
+ clus = clus[0]
+ sq_cents = []
+ ref_cents = []
+ i_list = [p[1][0] for p in clus]
+ for point in clus:
+ i, j = point[1]
+
+ # Remove any square that voted for two different points within
+ # the same cluster. This causes the same point in the image to be
+ # mapped to two different reference square centres, resulting in
+ # a very distorted perspective transform since cv2.findHomography
+ # simply minimises error.
+ # This phenomenon is not particularly likely to occur due to the
+ # enforced distance threshold in the clustering fit but it is
+ # best to keep this in just in case.
+ if i_list.count(i) == 1:
+ square = squares_raw[i]
+ sq_cent = np.mean(square, axis=0)
+ ref_cent = reference[j]
+ sq_cents.append(sq_cent)
+ ref_cents.append(ref_cent)
+
+ # At least four squares need to have voted for a centre in
+ # order for a transform to be found
+ if len(sq_cents) < 4:
+ raise MacbethError(
+ '\nWARNING: No macbeth chart found!'
+ '\nNot enough squares found'
+ '\nPossible problems:\n'
+ '- Macbeth chart is occluded\n'
+ '- Macbeth chart is too dark of bright\n'
+ )
+
+ ref_cents = np.array(ref_cents)
+ sq_cents = np.array(sq_cents)
+
+ # Find best fit transform from normalised centres to image
+ h_mat, mask = cv2.findHomography(ref_cents, sq_cents)
+ if 'None' in str(type(h_mat)):
+ raise MacbethError(
+ '\nERROR\n'
+ )
+
+ # Transform normalised corners and centres into image space
+ mac_fit = cv2.perspectiveTransform(mac_norm, h_mat)
+ mac_cen_fit = cv2.perspectiveTransform(np.array([reference]), h_mat)
+
+ # Transform located corners into reference space
+ ref_mat = cv2.getPerspectiveTransform(
+ mac_fit,
+ np.array([ref_corns])
+ )
+ map_to_ref = cv2.warpPerspective(
+ original_bw, ref_mat,
+ (ref_w, ref_h)
+ )
+
+ # Normalise brigthness
+ a = 125 / np.average(map_to_ref)
+ map_to_ref = cv2.convertScaleAbs(map_to_ref, alpha=a, beta=0)
+
+ # Find correlation with bw reference macbeth
+ cor = correlate(map_to_ref, ref)
+
+ # Keep only if best correlation
+ if cor > max_cor:
+ max_cor = cor
+ best_map = map_to_ref
+ best_fit = mac_fit
+ best_cen_fit = mac_cen_fit
+ best_ref_mat = ref_mat
+
+ # Rotate macbeth by pi and recorrelate in case macbeth chart is
+ # upside-down
+ mac_fit_inv = np.array(
+ ([[mac_fit[0][2], mac_fit[0][3],
+ mac_fit[0][0], mac_fit[0][1]]])
+ )
+ mac_cen_fit_inv = np.flip(mac_cen_fit, axis=1)
+ ref_mat = cv2.getPerspectiveTransform(
+ mac_fit_inv,
+ np.array([ref_corns])
+ )
+ map_to_ref = cv2.warpPerspective(
+ original_bw, ref_mat,
+ (ref_w, ref_h)
+ )
+ a = 125 / np.average(map_to_ref)
+ map_to_ref = cv2.convertScaleAbs(map_to_ref, alpha=a, beta=0)
+ cor = correlate(map_to_ref, ref)
+ if cor > max_cor:
+ max_cor = cor
+ best_map = map_to_ref
+ best_fit = mac_fit_inv
+ best_cen_fit = mac_cen_fit_inv
+ best_ref_mat = ref_mat
+
+ # Check best match is above threshold
+ cor_thresh = 0.6
+ if max_cor < cor_thresh:
+ raise MacbethError(
+ '\nWARNING: Correlation too low'
+ '\nPossible problems:\n'
+ '- Bad lighting conditions\n'
+ '- Macbeth chart is occluded\n'
+ '- Background is too noisy\n'
+ '- Macbeth chart is out of camera plane\n'
+ )
+
+ # Represent coloured macbeth in reference space
+ best_map_col = cv2.warpPerspective(
+ original, best_ref_mat, (ref_w, ref_h)
+ )
+ best_map_col = cv2.resize(
+ best_map_col, None, fx=4, fy=4
+ )
+ a = 125 / np.average(best_map_col)
+ best_map_col_norm = cv2.convertScaleAbs(
+ best_map_col, alpha=a, beta=0
+ )
+
+ # Rescale coordinates to original image size
+ fit_coords = (best_fit / factor, best_cen_fit / factor)
+
+ return (max_cor, best_map_col_norm, fit_coords, True)
+
+ # Catch macbeth errors and continue with code
+ except MacbethError as error:
+ eprint(error)
+ return (0, None, None, False)
+
+
+def find_macbeth(img, mac_config):
+ small_chart = mac_config['small']
+ show = mac_config['show']
+
+ # Catch the warnings
+ warnings.simplefilter("ignore")
+ warnings.warn("runtime", RuntimeWarning)
+
+ # Reference macbeth chart is created that will be correlated with the
+ # located macbeth chart guess to produce a confidence value for the match.
+ script_dir = Path(os.path.realpath(os.path.dirname(__file__)))
+ macbeth_ref_path = script_dir.joinpath('macbeth_ref.pgm')
+ ref = cv2.imread(str(macbeth_ref_path), flags=cv2.IMREAD_GRAYSCALE)
+ ref_w = 120
+ ref_h = 80
+ rc1 = (0, 0)
+ rc2 = (0, ref_h)
+ rc3 = (ref_w, ref_h)
+ rc4 = (ref_w, 0)
+ ref_corns = np.array((rc1, rc2, rc3, rc4), np.float32)
+ ref_data = (ref, ref_w, ref_h, ref_corns)
+
+ # Locate macbeth chart
+ cor, mac, coords, ret = get_macbeth_chart(img, ref_data)
+
+ # Following bits of code try to fix common problems with simple techniques.
+ # If now or at any point the best correlation is of above 0.75, then
+ # nothing more is tried as this is a high enough confidence to ensure
+ # reliable macbeth square centre placement.
+
+ for brightness in [2, 4]:
+ if cor >= 0.75:
+ break
+ img_br = cv2.convertScaleAbs(img, alpha=brightness, beta=0)
+ cor_b, mac_b, coords_b, ret_b = get_macbeth_chart(img_br, ref_data)
+ if cor_b > cor:
+ cor, mac, coords, ret = cor_b, mac_b, coords_b, ret_b
+
+ # In case macbeth chart is too small, take a selection of the image and
+ # attempt to locate macbeth chart within that. The scale increment is
+ # root 2
+
+ # These variables will be used to transform the found coordinates at
+ # smaller scales back into the original. If ii is still -1 after this
+ # section that means it was not successful
+ ii = -1
+ w_best = 0
+ h_best = 0
+ d_best = 100
+
+ # d_best records the scale of the best match. Macbeth charts are only looked
+ # for at one scale increment smaller than the current best match in order to avoid
+ # unecessarily searching for macbeth charts at small scales.
+ # If a macbeth chart ha already been found then set d_best to 0
+ if cor != 0:
+ d_best = 0
+
+ for index, pair in enumerate([{'sel': 2 / 3, 'inc': 1 / 6},
+ {'sel': 1 / 2, 'inc': 1 / 8},
+ {'sel': 1 / 3, 'inc': 1 / 12},
+ {'sel': 1 / 4, 'inc': 1 / 16}]):
+ if cor >= 0.75:
+ break
+
+ # Check if we need to check macbeth charts at even smaller scales. This
+ # slows the code down significantly and has therefore been omitted by
+ # default, however it is not unusably slow so might be useful if the
+ # macbeth chart is too small to be picked up to by the current
+ # subselections. Use this for macbeth charts with side lengths around
+ # 1/5 image dimensions (and smaller...?) it is, however, recommended
+ # that macbeth charts take up as large as possible a proportion of the
+ # image.
+ if index >= 2 and (not small_chart or d_best <= index - 1):
+ break
+
+ w, h = list(img.shape[:2])
+ # Set dimensions of the subselection and the step along each axis
+ # between selections
+ w_sel = int(w * pair['sel'])
+ h_sel = int(h * pair['sel'])
+ w_inc = int(w * pair['inc'])
+ h_inc = int(h * pair['inc'])
+
+ loop = ((1 - pair['sel']) / pair['inc']) + 1
+ # For each subselection, look for a macbeth chart
+ for i in range(loop):
+ for j in range(loop):
+ w_s, h_s = i * w_inc, j * h_inc
+ img_sel = img[w_s:w_s + w_sel, h_s:h_s + h_sel]
+ cor_ij, mac_ij, coords_ij, ret_ij = get_macbeth_chart(img_sel, ref_data)
+
+ # If the correlation is better than the best then record the
+ # scale and current subselection at which macbeth chart was
+ # found. Also record the coordinates, macbeth chart and message.
+ if cor_ij > cor:
+ cor = cor_ij
+ mac, coords, ret = mac_ij, coords_ij, ret_ij
+ ii, jj = i, j
+ w_best, h_best = w_inc, h_inc
+ d_best = index + 1
+
+ # Transform coordinates from subselection to original image
+ if ii != -1:
+ for a in range(len(coords)):
+ for b in range(len(coords[a][0])):
+ coords[a][0][b][1] += ii * w_best
+ coords[a][0][b][0] += jj * h_best
+
+ if not ret:
+ return None
+
+ coords_fit = coords
+ if cor < 0.75:
+ eprint(f'Warning: Low confidence {cor:.3f} for macbeth chart in {img.path.name}')
+
+ if show:
+ draw_macbeth_results(img, coords_fit)
+
+ return coords_fit
+
+
+def locate_macbeth(image: Image, config: dict):
+ # Find macbeth centres
+ av_chan = (np.mean(np.array(image.channels), axis=0) / (2**16))
+ av_val = np.mean(av_chan)
+ if av_val < image.blacklevel_16 / (2**16) + 1 / 64:
+ eprint(f'Image {image.path.name} too dark')
+ return None
+
+ macbeth = find_macbeth(av_chan, config['general']['macbeth'])
+
+ if macbeth is None:
+ eprint(f'No macbeth chart found in {image.path.name}')
+ return None
+
+ mac_cen_coords = macbeth[1]
+ if not image.get_patches(mac_cen_coords):
+ eprint(f'Macbeth patches have saturated in {image.path.name}')
+ return None
+
+ return macbeth
diff --git a/utils/tuning/libtuning/macbeth_ref.pgm b/utils/tuning/libtuning/macbeth_ref.pgm
new file mode 100644
index 00000000..37897140
--- /dev/null
+++ b/utils/tuning/libtuning/macbeth_ref.pgm
@@ -0,0 +1,6 @@
+# SPDX-License-Identifier: BSD-2-Clause
+P5
+# Reference macbeth chart
+120 80
+255
+  !#!" #!"&&$#$#'"%&#+2///..../.........-()))))))))))))))))))(((-,*)'(&)#($%(%"###""!%""&"&&!$" #!$ !"! $&**" !#5.,%+,-5"0<HBAA54" %##((()*+,---.........+*)))))))))))))))-.,,--+))('((''('%'%##"!""!"!""""#!   ! %‚/vÀ¯z:òøßãLñ©û¶ÑÔcÒ,!#""%%''')**+)-../..../.-*)))))))))))))**,,)**'(''&'((&&%%##$! !!!! ! !  !  5*"-)&7(1.75Rnge`\`$ ""!"%%%'')())++--/---,-..,-.,++**))))())*)*)''%'%&%&'&%%"""""        !  !!$&$$&##(+*,,/10122126545./66402006486869650*.1.***)*+)()&((('('##)('&%%&%$$$#$%$%$ (((*))('((('('(&%V0;>>;@@>@AAAACBCB=&<­·³µ¶¾¿ÃÇÇÆÇËÒÐÇÄ<5x–•ŠŽŒŠ‰„„„„|64RYVTSRRRMMNLKJJLH+&0gijgdeffmmnpnkji`#3™ ª¦¨¨£Ÿ›››š–—™šbY! 3FHHIIIHIJIIJHIII@#?¾ÈÊÍÏÑÔÖØÚÚÚÛßáßÔ=7}—š˜———˜—˜˜——˜——‘:5Wcbcbdcb`^^`^^_^Y,'6‰ŽŒ‰ˆˆˆ‡†…„††„‚r'<½ÆÅÅÅÄÂÀ¿¾¾¼»¼¼µl%2FHHIIHJJJJJJIIJI?%;ÁÌÌÒÓÖØÙÛÛÜÜÞßâãÕ>7|•™™ž™—˜˜˜—™™™š˜–;8Xfeeegeccb`^aba]Z+)<Ž“’‘‹Š‰‰‰‰ˆ†r)>¿ÇÇÇÆÅÅÄÂÁÁÀ¾¾¼·q#3GHIIIIJIIJJIHIJI@&5ÁÎÑÔÕØÙÚÜÜÞßßßàâ×=8~”•˜™š›šš™›šœ››“;8Zgghggedbdcbda^\Z+(;““’‘‘Ž‹‹ŠŠ‰ˆy)9¿ÈÈÈÇÇÅÄÂÁÁÀ¿½½¹z"3GIIJJJJJKJJJJJJJ@'4ÂÑÔÔÙÚÛÜÞÝßßààààØ>9|”—–—™ššš™›œŸ¥ ž˜=8Zhighgeeeedeca__[/)B’–•••“‘ŽŒŒŒŒŠv&:ÁÊÊÊÊÆÆÆÂÁÂÂÁ¿¿º|#3GJJIIJKKKJJJKKJK@&6ÆÒ××ÙÛÛÞÞßààààààÖ>9~”———˜˜—™šžž    ˜<8Yghegggffihccab^\/*C“™˜—––””’‘‘Žz'9ÄÍËÈÈÇÇÆÆÄÂÂÀÀ¿»‚$  6IKJJMMMKMKKMKKMLC&2É××ÙÛÜßÞàááâââââÖ@9•——˜˜™˜˜š››žŸžž—<9Yghhhhijiegdcebc^0)G—›š™˜˜˜–•“’‘Ž(7ÃÍÌËÊÈÇÇÅÆÄÂÂÂÁº‰% 6JLMMNMMKMMNMMMMMD&2ÊÙÙÛÝßßßààáââáãâÖ@:~”—™™š™™››žžžžž—=9Xfghhjiigdgddedc`1)M—œ›š˜™—•”‘’‘Ž}(:ÄÐÍÌËÊÇÆÆÆÅÂÄÁ¾& "8LNOONNOMONNMMNOND'3ÍÛÛÞßàààáââãâåãå×@;–˜˜™žŸŸ  ¡¡  —=:Ziiigheegegegggdc1,Q›ŸŸž›šš˜––““‘~)8ÂÍÎÌËÊÊÈÆÆÆÆÄÆÇÁ•%# "9NNNPPPQOOOOONNOOD'0ÎÜÜßßáàáââååäãåæ×?;–˜—™šœžŸ¡¡ ¡Ÿ  ™=;[iigeeegghgdedgea0-P› ¡ žš˜—–•”(8ÃÏÎÎÌÊÈÈÇÇÇÆÈÇÆÃ' "#$:NNOQPPRPQPOOPQPPD*1ÐßßàààâãããåææåææÛA;‚˜™™šœžžŸ  Ÿž Ÿ—;:Yfghgghgghghhdggc3.\¡£¡  Ÿœœš˜—•’‘~);ÅÎÎÑÐÌËÊÇÈÉÊÊÇŤ(&%%;OQQQRSSRPQQQQSQQF)3ÓßàááãâãåææææææçÜB<ƒ™šœœžžžžŸ žŸ Ÿž—=:Wfhghhhihggghfhee4/f ¥¤¢¡¡ŸŸš˜—””’‘‚*:ÇÏÍÍÎÎÍÌÉÈËÊÈÆÆä&%%%?RSSSSSTTTTSSSTTRE)5ÕàááãâäåæåæçççèèÛB=„šœœžŸ Ÿ ¡ žŸŸŸ˜@:Ygiihhiiiihihiiif72p £¤¤£ ŸŸœœ™—–•’‘}(9ÇÎÏÎÍÍÍÍÍËÌÊÈÈÇÆ©'#%&?TUTTTUUQSTTTTTVSF*3ÕàãâãäåæææçççèééßF>†žž  ¡¡£ £¡¡¡ Ÿ˜A;[ghjiihiiiihihije50r¢¦¥¥££ Ÿžœš™—–““‚)6ÈÏÏÎÌÎÎÌÏÏËÊÊÈÈÆ«& &#%?SVVVUUUUUTUUVVUUG*5ÖãããåæææçèèèèééëßF=…ŸŸ¢££££ ¡¡  £ ˜A;Yhijiiijjiiiiijje81t¦¦¦¥¥£¡ Ÿ›˜——•’~)5ÇÑÑÏÎËÍÍÑÑÌËÈÈÉÆ°' '$$=OQRRQQPRSRSSSSSSG+6ËÙÙÜÛÜÞÝßààààáããÙD@‚š›œŸœžœ›š”?;Wefgggggfffgeeefc41xŸžŸž››š˜•”’‘ŽŒ{*5¾ÈÈÇÅÃÃÄÄÃÂÂÂÀ¿¼«( &&&'++++,,*-,-00-0100*-SUX\]]`_ffgiooopo=;X\bedbadbca`]\]ZZ;;<::8:;9983433110/-,...1//12410/..--+)"",---,-./,,.-/-0-( &&%+/0103322011223233)(34534767::;;==:=B9;BFGEEGIKJKIJGIJCD=<:76566554111/0/1.*+00233300/00//..,+*#")(*)++,++))*++**'!!&$*w³½¾¿Â¼ÀÀ¼¼·¹¹¸´²Ž1-_addc`ceccdccedbb?A|ŒŒ‘‹ŒŒ‹ŠŠ‰‰ˆB>=>?@@?====;<:;:<:11r‹ŒŽ“–““•–˜™Ž+.’—”™ ¥¢¡¤žšŸŸœ( !'%*zÀÇÆÆÇÇÊÊÈÈÈÊËËËÉ 42gjmllklomooonpopmHG‘©¬«««¬©«««ª««ª©£D>AEDEFEECEECCCDDEC46µåçèçççæåäãáàÞÜÚ׿0:Î×Ö×××ÖÕÒÓÏÐÐÍÍѾ,!!&&,|ÂÇÇÇÇÇÇËËÇÈÊËËÍÊ¡61inknnoopoppoqqrqoEE”¬­­­®®®­®®¯­®®­¥FACGFFFFFFDFDDDDDDC57¹íñïîîíííëéçæãáßÝÄ09ÓÛÛÛÛÚÙØ×ÖÕÔÔÒÔÒÁ+!"%%-~ÀÆÈÊÇÇÈÉÌÌÊÊËÌÌÊ¡42inopppppoqqqrrsrnAB“«®®­®®®®®±­®¬°­¥C?DGGGGFFFFDFFDDEDC48ºíððïïîîíìëèçæãáßÅ1;ÔÞÞÝÜÚÚÙÙ×ÕÕÔÕÔÒÁ+!!"#*|¿ÄÉÊÈÈÈÈÉÍÉÈËÍÍÊ¡62imoppppqqqqrtrqtrGD•¬®®­°®°°°±±°®®­§H?CGGGGGGGGFFFFFFDB38»îðïïïïîíììëèçæâàÅ1<ÖààßÞÞÜÚÚÙÙÙ××ÔÔ½, !)}¿ÃÈÈÊÇÈÈËÎËÊËÌÍË¢63mooppqqqqqqrrtvtoDH—­­®±®°±°­°®­±°°¦JACHHGGHGGFFFDDGGFD29ÀðóòðïïïîííìêéèæâÆ3>ÖááààßÞÜÛÙÙÙØ×Ø×½, $){¼ÂÅÆÉÇÈÆËËÌÊËÊÍË¢53jpppqprqrrrttuvuo>H˜®°®±²±±°°°±°±°°ªJAFHHHHHGGHGGFGGFFE28ÁðôòòððïïîíëìëéçãÇ3:×ãáááßÞÝÛÛÚÙÚÚÚÚ½- "*{¸ÁÁÅÆÇÆÆÊËÌÉÊËÎÌ£53loqpqsqrrrtrutsvrAH—«®®±±°°°®±±±®­°©HCGHIHHHHHHGFGHGGGD5;ÀðóóòñððïîííìëëèäÇ28ØäãááààßÞÜÛÛÛÚÚÚÀ, +}¹¾ÀÂÂÅÅÅÇÉÍËÊËÌÊ¡52mqoqpqrttttttuurpFI–®°±°±±²°±±°±±¯°§OCEHHIHHHHGHGGFFIGF8<ÃðòòóóòððïíîìììéæÍ48ÚçåããáààßÝÜÜÜÜÛÛ¿, (|º¼¾ÀÀÃÄÄÆÇÍËÊÊËÊ¢41krqpqqqrrtrtuvtuoEH—­°°²±±±±¯²²®²±®«PBHHIIIHIIHIHGHGHHE7<ÃðóóòñððððïíííìêçÑ58ÜèæåãââáßßÜÞÜÞÞÚÄ* (zºº»¾ÀÂÂÂÄÄÇËÈÊËÊ¡63kpqprqqstttutrvvoFO˜¯°¯°±±°±±±±±°±²©LEHHIIHIHHHIGHGIHGF4=ÅñóóóððððïïîìíëéèÓ5<ÞêèçåââááßÞÞßßßÚÇ* 'zº½º»¾ÁÀÂÂÄÅÊÇÈÊÈ¡62lppqrqrrrtttuttvpAG›¯°±°±°°°°°±±°±±«MGHIIIIHIIIHHIIJHHG4<ÃñóóóðòððîîïííëéèÓ4<ÞëêççæãâáàßàÞÞÛØÇ+ !){º¼º»¾½ÀÁÁÂÄÉÇÇÉÈ 62jopqqqqqrtttutttrEH™¯±°°°¯°°±±²²°±±ªOHFIIIIIJIIIIHIHIHI7>ÅðôóòòòïðïîîíììëèÒ5;àíêèææãâáâßßÜÛÙÖÇ, !)z¼¾¼¹»»ÁÁ¿ÁÁÈÇÆÆÆŸ53lppqqrqrtttuuuutsFI™®±²±±±±²²²±°¯±²«RHGJIJHJKJJJIIIIIIH9>ÂñôôôòóððïîííìëééÓ5;àìééèææäááßÜÛØ×ÔÇ+  !({»¿¸º½½¿¾¿¾ÀÅÆÄÆÅœ41joppprqrrrutttvvrIH’­±°°°±±±²²°±²±±ªTHCJJJJJIJIJJIJJJIH7=ÂòôòóóñðïîîííììéèÒ5;ßìêêèæåäâàÞÛÙÖ×ÕÇ+ (u±±®¯±³µ²´´µº»¸»º‘65gjlmmmnoopnpprpqoIH¦©ª©«ªªª«¬«ªª¨ª¤OIBIJJJIJJJJIIIHHHG89ºåççæçåäããâáßàßÝÜÈ29ÔàßÝÛÛÙØÕÓÑÎÌÈÌʾ' "&,-*)-01/,0/12102-+04448789<>>??AFAD@DBCIJNRWTSUXT[WUQUOKFEBBABA?>>=<<;;67942:<<<>9999864565363&(13335422./1/-+..+ !"&$$""$"&$%'()(''*+-0124688:<>>??A>?EBCHKOLJLNOSQOXQQVMLACGHGHIGFHGDCCBB@??7432233210111.,++,++%(++)*(''%%%$$#%&$# ")0/001120024455520+-U]`addcdhefeekecYGFJRXYYVWWZWVXXVZTOBF}™œšœžœ›š™–™K7Ybccddfeg`^]^]\[Z[*)OTTPPQPOKOLLJJLIK  !1;:9:<<===;=???A@9*/„Ž‘’”•”––—™™š››’FJmxyxwyzzzxyzzz{zxLOÉÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿú]=‹§©¨¦§¥¦¤¤¢¡¡¡ ›Ž.-‹’’Œ‰‡…‚€€€y# !!2><=;==>=<<>@@@@A9-0‡‘‘”—˜˜™—š›žŸ —IKnz||{|{||{}}~}}{zLOÌÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿý]>Ž¬­¬««¨ª¨§¦¥¥££¡¡–..Œ––”“Ž‹‹‰‡…………„~% $2==;<>>?===>@A@AB;+1…Ž‘“•—™™˜˜™œžŸŸ—JJo{|y{||}{||}}}}}yMTÎÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿý_>Ž¬­¬«ª©©¦¦¦¤¤££¢ ”-.–”‘‘ŽŠŠ‰…„…„…„}# %2<=;=<@?>==>?A@AA9+3…Ž‘“–——˜™šœšœœžž•FMlz{{y|}}}}||}|}}{MTÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýd>«¬«ªªª¨§¦¦¤¤¤¡  ”-,Œ“‘’Š‡†……„„„…# %1<<<;==<<=>?A?@AA:,3†Ž‘’•——˜˜šœšœœž–INo{{y{||||}|}}|~}{RTÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýd=Š©­¬«©©§¦¦¥¤¤£¡ Ÿ—/-‹’‘‹‹‰ˆ…………ƒƒ„}#!$0<<<=<<==>A@@>@AA:-2†‘“’–——™™šš™œ›œ—HInzz{{||{{}~~}}|}zMRÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýd=‰ª«ªªª§¥¥¥£¤¡¡  ”++ˆŽŽ‹ŠŠ‰………„„„ƒ„~# "$/;<==>;===@@@@>AA:+2†Ž’’“•—–™˜šœ™œ–KHn||y|||||{}~}|}|xMSÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýd=†©©ª©©§¦¦¥¤£¡Ÿ žœ’+,‡‹Š‰ˆ††…„„„„ƒ}# ! "/:<=>@<<>=@@@@@AA;-3„’’•–˜˜š™šššœ›˜MFs||{{{y}z}}|}|}}yMWÏÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýc>„©ª©§¦¦¥¤£¡£  ŸŸ›’,)…ŒŠ‹‰ˆ‡†…„„„ƒƒƒ|! !1;>?>><<>@>>=>ABB;,0ƒŽ‘’––™™™™ššœœ›˜LHr{|{|}|y|}}}}}zNXÎÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýc?„©«ª§§¦¥¥££   žžšŽ()„‹ŠŠ‰ˆ…†„„„„„‚ƒƒz# $/;;<=;<>>=>>>@@BB:,1†‘“•–—˜™šœšœšž˜IInyz||||||{||}{~|{NVÏÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýc;§¨¨¦¦¦¤££¡¡ŸŸœš“('ƒŠŠˆ‰ˆ……ƒ„ƒƒƒƒ‚€}# $0:<==<;>@>>>>@ABB:,/„‘““–˜™™™šœšš—HLlx|}y{y{|y{|}}}}yMRÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýd>~¥§¦¦§¥¤££¢ ŸžššŽ*(ƒŠ‡‡ˆˆ„ƒ‚„ƒƒ‚‚‚y" !&3:;<<;==@@=>AABBA;-3†‘“‘”–—˜˜™šœœš›–KLqz{|||y{}|}{}|~{zRQÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüc9w¤§¦¥¦¤¢£  Ÿžžš™Ž)'ˆ†……„…„ƒƒƒ‚€€€€y" !%1<<;=>===<=@@ABBC<.5†’’•–—™˜™™œœž•IIlz{|}~~~|}{||~}}zMUÌÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüd;p¤¦¥¥¤¤£¤¢ Ÿž›š˜)$€ˆˆ…„„„…‚‚€€€x" $2===<==@=<>=ABBBC?/0ˆ‘’•••˜—˜™™š™œž˜IGkz}}{||}{||y||}zyOVÊÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüc7o¢¥¥¤££¡¤¡žŸš™š˜‘'&~‡„„„„ƒ…„€~€z"#"#/;<:<<?>;===@?AAA>07‹‹Ž’’’”“•–—•‘GGgwxz{yyxyzzyz{yuuHO½ùûüüüüüüûûûúúúúò\8v›žœ›š™˜—•••”‘†'$w~~}|||{~|{zxxxxv!"""'*+(+)*))()+,,.../0398;=<=>DCCDDCBBDHBCJMMLMPNPOJPKPSJDICCNMPONMNNOKHIFDBHE3/46433323.....*+,)( !##!!!!!$#$$#$#&"!!"(+**,,*+.//1478:<:33ACDFGGIIHIJLPKNMQFIPTTRVXVUXUUTXUSTNEGGFDEFAA>==;94877520-,))*(((('&$#!!" &%'FQPQR]dqŒ˜£«¹ÍàðÈ=FñûüÿÿÿÿÿÿÿÿÿÿÿÿÿúQN·èììêìæéììêéëëéêáLEœ˜…znki^[YTPUOS;.%-/12322221/10//,/%#0¯ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿß@QýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿQMÁðôõôóôóôõõôõôôóæKE„¨©¨§§¤¥¥¢¤£ žžž˜H01NNQOQQOOMNNLKLJGB'&/¸ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿâAWþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿOLÀñóôôôóóóõôôõóôòèKE„¦¨©©§ª©ª¦¨§¥¢¤¢œF-,PQQPQPPQPOONMNNKE''0·ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáCZþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿRMÁñôóóòòôòôõóôôóòåJE‚¥©¬¬©ª©ª§¥¥¤¤¤¢™F,*NSQPPQOOOOMNNMKID('2·ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáD[þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿQKÀðòòòóóòóõóõóòòðæIF€§©ª©§©§©¥¤¤¤¤¡ ˜F,*NPPPPPPNOONMMMJIF!'(2¶ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáF]þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿRL¿íððòðòòóóòòñïòðäHD£¦©©§¨¦¦¦¤¤¤¤¢ ˜F+%MPPPPOOONONNMMKID)*4¸ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáD^þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿPL¿ìðïïòòððòòðïðòïäIC€¢¦¨¨¥¦¥§¥¤££¡ŸŸ—F+&NPOOOPPOONMMKMKHD**6ºÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáD_þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿQJ¾ëïïïòðððððïðïîïãFC~¢§¥¥¦¦¦¤££¤¡   ˜F,'MPOOOOONONNKKIIIG,+7»ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáD^ýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿQI¾êîîîïðïðïððïïïîâEB|£¥¤££¤¤¤£¤¢Ÿ ŸŸ—E+&MONOOONNNNKMJKJHH,-8¹ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿàD]þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿPI¼éíìîîîîñðòóóöù÷èHE¥¨§¥¥¤¤£¡£¡  žŸ—C,#LOOOONONNNKKKMKJF,*6»ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáCaýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþMH»éììíîðððôóõöööõçIF‚©ª§¦¦¥££¢ Ÿžž Ÿ–D*%KONOMNMMKMKJJJIJE,,6»ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿâB^ýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþMG¹èììîðòóòóóóóòóôéHB}£©¦¦§¥¤¤¢ŸŸžžš”D+&LONOOONNMMMMKLKIA,,6ºÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿàA\ýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþMF¹éìííïòóôððôöõööêIE¦ª©¦§¨§§¡¡Ÿš™”E+&LNNMONNMMKKKKKIHF --6¹ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿßA[üÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþKF¶çìðïððïðóöõöõùúîJC©«­«¦§¦¥¤¡¤žžš—F*&LMONMNMNKKJMKJJIF **5»ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿß>WüÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþKE¶èïíðîðóöõøòùóöôçF?}¨©²¯¬¬©¥¤¤£žœ˜˜‘C*%KONNNJKKKMKJKJKID,*4¶ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿØ<WöþÿÿÿÿÿÿÿÿÿÿÿÿÿÿøMA°áäååçêêïêëëåæéçÝGCxž¨¦ ©¥¤ šœ¡˜•’ŠB)%HKLKKJJJKIHIHHFGC!()*q ¡š›šš™““’‘‘’‹‹o39v|}wwwwwwrqtuspn=9^gadcfgce`dbUY[\^>;DIJDB?FEGE=7>8634.(&&(%&*&%%'+*)+*#%()''03364443233222243/-+133423333423766645789:><<<;<;<?=?;<<:78673/001113--.-+*)&&#"&$#%&""$!! ))+rbPpAD9-*******+*++)++--.//./.0/21453469:=;98<;<>=;><7766666741012.-13/-+-/(''&&&%%&$.%0()-%-#-#' #&(% )))hn›YQgÛ7(*))))*)**,--....../0/0001357666::;;>?>AA866666666656565300/20/.-*)(('((&&%)d=yoP¼<Ñ?ßFQFx;§2»1«0))*RQ.0*,,5*(*))))*,**,+/.../...02/22224456468;:>BB;>;:76666666666755303033/,.-*(())('&')#)"##(+$+*#)) & 
diff --git a/utils/tuning/libtuning/modules/__init__.py b/utils/tuning/libtuning/modules/__init__.py
new file mode 100644
index 00000000..9ccabb0e
--- /dev/null
+++ b/utils/tuning/libtuning/modules/__init__.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
diff --git a/utils/tuning/libtuning/modules/lsc/__init__.py b/utils/tuning/libtuning/modules/lsc/__init__.py
new file mode 100644
index 00000000..0ba4411b
--- /dev/null
+++ b/utils/tuning/libtuning/modules/lsc/__init__.py
@@ -0,0 +1,7 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+
+from libtuning.modules.lsc.lsc import LSC
+from libtuning.modules.lsc.raspberrypi import ALSCRaspberryPi
+from libtuning.modules.lsc.rkisp1 import LSCRkISP1
diff --git a/utils/tuning/libtuning/modules/lsc/lsc.py b/utils/tuning/libtuning/modules/lsc/lsc.py
new file mode 100644
index 00000000..344a07a3
--- /dev/null
+++ b/utils/tuning/libtuning/modules/lsc/lsc.py
@@ -0,0 +1,72 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+
+from ..module import Module
+
+import libtuning as lt
+import libtuning.utils as utils
+
+import numpy as np
+
+
+class LSC(Module):
+ type = 'lsc'
+ hr_name = 'LSC (Base)'
+ out_name = 'GenericLSC'
+
+ def __init__(self, *,
+ debug: list,
+ sector_shape: tuple,
+ sector_x_gradient: lt.Gradient,
+ sector_y_gradient: lt.Gradient,
+ sector_average_function: lt.Average,
+ smoothing_function: lt.Smoothing):
+ super().__init__()
+
+ self.debug = debug
+
+ self.sector_shape = sector_shape
+ self.sector_x_gradient = sector_x_gradient
+ self.sector_y_gradient = sector_y_gradient
+ self.sector_average_function = sector_average_function
+
+ self.smoothing_function = smoothing_function
+
+ def _enumerate_lsc_images(self, images):
+ for image in images:
+ if image.lsc_only:
+ yield image
+
+ def _get_grid(self, channel, img_w, img_h):
+ # List of number of pixels in each sector
+ sectors_x = self.sector_x_gradient.distribute(img_w / 2, self.sector_shape[0])
+ sectors_y = self.sector_y_gradient.distribute(img_h / 2, self.sector_shape[1])
+
+ grid = []
+
+ r = 0
+ for y in sectors_y:
+ c = 0
+ for x in sectors_x:
+ grid.append(self.sector_average_function.average(channel[r:r + y, c:c + x]))
+ c += x
+ r += y
+
+ return np.array(grid)
+
+ def _lsc_single_channel(self, channel: np.array,
+ image: lt.Image, green_grid: np.array = None):
+ grid = self._get_grid(channel, image.w, image.h)
+ grid -= image.blacklevel_16
+ if green_grid is None:
+ table = np.reshape(1 / grid, self.sector_shape[::-1])
+ else:
+ table = np.reshape(green_grid / grid, self.sector_shape[::-1])
+ table = self.smoothing_function.smoothing(table)
+
+ if green_grid is None:
+ table = table / np.min(table)
+
+ return table, grid
diff --git a/utils/tuning/libtuning/modules/lsc/raspberrypi.py b/utils/tuning/libtuning/modules/lsc/raspberrypi.py
new file mode 100644
index 00000000..f19c7163
--- /dev/null
+++ b/utils/tuning/libtuning/modules/lsc/raspberrypi.py
@@ -0,0 +1,246 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# ALSC module for tuning Raspberry Pi
+
+from .lsc import LSC
+
+import libtuning as lt
+import libtuning.utils as utils
+
+from numbers import Number
+import numpy as np
+
+
+class ALSCRaspberryPi(LSC):
+ # Override the type name so that the parser can match the entry in the
+ # config file.
+ type = 'alsc'
+ hr_name = 'ALSC (Raspberry Pi)'
+ out_name = 'rpi.alsc'
+ compatible = ['raspberrypi']
+
+ def __init__(self, *,
+ do_color: lt.Param,
+ luminance_strength: lt.Param,
+ **kwargs):
+ super().__init__(**kwargs)
+
+ self.do_color = do_color
+ self.luminance_strength = luminance_strength
+
+ self.output_range = (0, 3.999)
+
+ def validate_config(self, config: dict) -> bool:
+ if self not in config:
+ utils.eprint(f'{self.type} not in config')
+ return False
+
+ valid = True
+
+ conf = config[self]
+
+ lum_key = self.luminance_strength.name
+ color_key = self.do_color.name
+
+ if lum_key not in conf and self.luminance_strength.required:
+ utils.eprint(f'{lum_key} is not in config')
+ valid = False
+
+ if lum_key in conf and (conf[lum_key] < 0 or conf[lum_key] > 1):
+ utils.eprint(f'Warning: {lum_key} is not in range [0, 1]; defaulting to 0.5')
+
+ if color_key not in conf and self.do_color.required:
+ utils.eprint(f'{color_key} is not in config')
+ valid = False
+
+ return valid
+
+ # @return Image color temperature, flattened array of red calibration table
+ # (containing {sector size} elements), flattened array of blue
+ # calibration table, flattened array of green calibration
+ # table
+
+ def _do_single_alsc(self, image: lt.Image, do_alsc_colour: bool):
+ average_green = np.mean((image.channels[lt.Color.GR:lt.Color.GB + 1]), axis=0)
+
+ cg, g = self._lsc_single_channel(average_green, image)
+
+ if not do_alsc_colour:
+ return image.color, None, None, cg.flatten()
+
+ cr, _ = self._lsc_single_channel(image.channels[lt.Color.R], image, g)
+ cb, _ = self._lsc_single_channel(image.channels[lt.Color.B], image, g)
+
+ # \todo implement debug
+
+ return image.color, cr.flatten(), cb.flatten(), cg.flatten()
+
+ # @return Red shading table, Blue shading table, Green shading table,
+ # number of images processed
+
+ def _do_all_alsc(self, images: list, do_alsc_colour: bool, general_conf: dict) -> (list, list, list, Number, int):
+ # List of colour temperatures
+ list_col = []
+ # Associated calibration tables
+ list_cr = []
+ list_cb = []
+ list_cg = []
+ count = 0
+ for image in self._enumerate_lsc_images(images):
+ col, cr, cb, cg = self._do_single_alsc(image, do_alsc_colour)
+ list_col.append(col)
+ list_cr.append(cr)
+ list_cb.append(cb)
+ list_cg.append(cg)
+ count += 1
+
+ # Convert to numpy array for data manipulation
+ list_col = np.array(list_col)
+ list_cr = np.array(list_cr)
+ list_cb = np.array(list_cb)
+ list_cg = np.array(list_cg)
+
+ cal_cr_list = []
+ cal_cb_list = []
+
+ # Note: Calculation of average corners and center of the shading tables
+ # has been removed (which ctt had, as it was unused)
+
+ # Average all values for luminance shading and return one table for all temperatures
+ lum_lut = list(np.round(np.mean(list_cg, axis=0), 3))
+
+ if not do_alsc_colour:
+ return None, None, lum_lut, count
+
+ for ct in sorted(set(list_col)):
+ # Average tables for the same colour temperature
+ indices = np.where(list_col == ct)
+ ct = int(ct)
+ t_r = np.round(np.mean(list_cr[indices], axis=0), 3)
+ t_b = np.round(np.mean(list_cb[indices], axis=0), 3)
+
+ cr_dict = {
+ 'ct': ct,
+ 'table': list(t_r)
+ }
+ cb_dict = {
+ 'ct': ct,
+ 'table': list(t_b)
+ }
+ cal_cr_list.append(cr_dict)
+ cal_cb_list.append(cb_dict)
+
+ return cal_cr_list, cal_cb_list, lum_lut, count
+
+ # @brief Calculate sigma from two adjacent gain tables
+ def _calcSigma(self, g1, g2):
+ g1 = np.reshape(g1, self.sector_shape[::-1])
+ g2 = np.reshape(g2, self.sector_shape[::-1])
+
+ # Apply gains to gain table
+ gg = g1 / g2
+ if np.mean(gg) < 1:
+ gg = 1 / gg
+
+ # For each internal patch, compute average difference between it and
+ # its 4 neighbours, then append to list
+ diffs = []
+ for i in range(self.sector_shape[1] - 2):
+ for j in range(self.sector_shape[0] - 2):
+ # Indexing is incremented by 1 since all patches on borders are
+ # not counted
+ diff = np.abs(gg[i + 1][j + 1] - gg[i][j + 1])
+ diff += np.abs(gg[i + 1][j + 1] - gg[i + 2][j + 1])
+ diff += np.abs(gg[i + 1][j + 1] - gg[i + 1][j])
+ diff += np.abs(gg[i + 1][j + 1] - gg[i + 1][j + 2])
+ diffs.append(diff / 4)
+
+ mean_diff = np.mean(diffs)
+ return np.round(mean_diff, 5)
+
+ # @brief Obtains sigmas for red and blue, effectively a measure of the
+ # 'error'
+ def _get_sigma(self, cal_cr_list, cal_cb_list):
+ # Provided colour alsc tables were generated for two different colour
+ # temperatures sigma is calculated by comparing two calibration temperatures
+ # adjacent in colour space
+
+ color_temps = [cal['ct'] for cal in cal_cr_list]
+
+ # Calculate sigmas for each adjacent color_temps and return worst one
+ sigma_rs = []
+ sigma_bs = []
+ for i in range(len(color_temps) - 1):
+ sigma_rs.append(self._calcSigma(cal_cr_list[i]['table'], cal_cr_list[i + 1]['table']))
+ sigma_bs.append(self._calcSigma(cal_cb_list[i]['table'], cal_cb_list[i + 1]['table']))
+
+ # Return maximum sigmas, not necessarily from the same colour
+ # temperature interval
+ sigma_r = max(sigma_rs) if sigma_rs else 0.005
+ sigma_b = max(sigma_bs) if sigma_bs else 0.005
+
+ return sigma_r, sigma_b
+
+ def process(self, config: dict, images: list, outputs: dict) -> dict:
+ output = {
+ 'omega': 1.3,
+ 'n_iter': 100,
+ 'luminance_strength': 0.7
+ }
+
+ conf = config[self]
+ general_conf = config['general']
+
+ do_alsc_colour = self.do_color.get_value(conf)
+
+ # \todo I have no idea where this input parameter is used
+ luminance_strength = self.luminance_strength.get_value(conf)
+ if luminance_strength < 0 or luminance_strength > 1:
+ luminance_strength = 0.5
+
+ output['luminance_strength'] = luminance_strength
+
+ # \todo Validate images from greyscale camera and force grescale mode
+ # \todo Debug functionality
+
+ alsc_out = self._do_all_alsc(images, do_alsc_colour, general_conf)
+ # \todo Handle the second green lut
+ cal_cr_list, cal_cb_list, luminance_lut, count = alsc_out
+
+ if not do_alsc_colour:
+ output['luminance_lut'] = luminance_lut
+ output['n_iter'] = 0
+ return output
+
+ output['calibrations_Cr'] = cal_cr_list
+ output['calibrations_Cb'] = cal_cb_list
+ output['luminance_lut'] = luminance_lut
+
+ # The sigmas determine the strength of the adaptive algorithm, that
+ # cleans up any lens shading that has slipped through the alsc. These
+ # are determined by measuring a 'worst-case' difference between two
+ # alsc tables that are adjacent in colour space. If, however, only one
+ # colour temperature has been provided, then this difference can not be
+ # computed as only one table is available.
+ # To determine the sigmas you would have to estimate the error of an
+ # alsc table with only the image it was taken on as a check. To avoid
+ # circularity, dfault exaggerated sigmas are used, which can result in
+ # too much alsc and is therefore not advised.
+ # In general, just take another alsc picture at another colour
+ # temperature!
+
+ if count == 1:
+ output['sigma'] = 0.005
+ output['sigma_Cb'] = 0.005
+ utils.eprint('Warning: Only one alsc calibration found; standard sigmas used for adaptive algorithm.')
+ return output
+
+ # Obtain worst-case scenario residual sigmas
+ sigma_r, sigma_b = self._get_sigma(cal_cr_list, cal_cb_list)
+ output['sigma'] = np.round(sigma_r, 5)
+ output['sigma_Cb'] = np.round(sigma_b, 5)
+
+ return output
diff --git a/utils/tuning/libtuning/modules/lsc/rkisp1.py b/utils/tuning/libtuning/modules/lsc/rkisp1.py
new file mode 100644
index 00000000..20406e43
--- /dev/null
+++ b/utils/tuning/libtuning/modules/lsc/rkisp1.py
@@ -0,0 +1,112 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# LSC module for tuning rkisp1
+
+from .lsc import LSC
+
+import libtuning as lt
+import libtuning.utils as utils
+
+from numbers import Number
+import numpy as np
+
+
+class LSCRkISP1(LSC):
+ hr_name = 'LSC (RkISP1)'
+ out_name = 'LensShadingCorrection'
+ # \todo Not sure if this is useful. Probably will remove later.
+ compatible = ['rkisp1']
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(**kwargs)
+
+ # We don't actually need anything from the config file
+ def validate_config(self, config: dict) -> bool:
+ return True
+
+ # @return Image color temperature, flattened array of red calibration table
+ # (containing {sector size} elements), flattened array of blue
+ # calibration table, flattened array of (red's) green calibration
+ # table, flattened array of (blue's) green calibration table
+
+ def _do_single_lsc(self, image: lt.Image):
+ cgr, gr = self._lsc_single_channel(image.channels[lt.Color.GR], image)
+ cgb, gb = self._lsc_single_channel(image.channels[lt.Color.GB], image)
+
+ # \todo Should these ratio against the average of both greens or just
+ # each green like we've done here?
+ cr, _ = self._lsc_single_channel(image.channels[lt.Color.R], image, gr)
+ cb, _ = self._lsc_single_channel(image.channels[lt.Color.B], image, gb)
+
+ return image.color, cr.flatten(), cb.flatten(), cgr.flatten(), cgb.flatten()
+
+ # @return List of dictionaries of color temperature, red table, red's green
+ # table, blue's green table, and blue table
+
+ def _do_all_lsc(self, images: list) -> list:
+ output_list = []
+ output_map_func = lt.gradient.Linear().map
+
+ # List of colour temperatures
+ list_col = []
+ # Associated calibration tables
+ list_cr = []
+ list_cb = []
+ list_cgr = []
+ list_cgb = []
+ for image in self._enumerate_lsc_images(images):
+ col, cr, cb, cgr, cgb = self._do_single_lsc(image)
+ list_col.append(col)
+ list_cr.append(cr)
+ list_cb.append(cb)
+ list_cgr.append(cgr)
+ list_cgb.append(cgb)
+
+ # Convert to numpy array for data manipulation
+ list_col = np.array(list_col)
+ list_cr = np.array(list_cr)
+ list_cb = np.array(list_cb)
+ list_cgr = np.array(list_cgr)
+ list_cgb = np.array(list_cgb)
+
+ for color_temperature in sorted(set(list_col)):
+ # Average tables for the same colour temperature
+ indices = np.where(list_col == color_temperature)
+ color_temperature = int(color_temperature)
+
+ tables = []
+ for lis in [list_cr, list_cgr, list_cgb, list_cb]:
+ table = np.mean(lis[indices], axis=0)
+ table = output_map_func((1, 3.999), (1024, 4095), table)
+ table = np.round(table).astype('int32').tolist()
+ tables.append(table)
+
+ entry = {
+ 'ct': color_temperature,
+ 'r': tables[0],
+ 'gr': tables[1],
+ 'gb': tables[2],
+ 'b': tables[3],
+ }
+
+ output_list.append(entry)
+
+ return output_list
+
+ def process(self, config: dict, images: list, outputs: dict) -> dict:
+ output = {}
+
+ # \todo This should actually come from self.sector_{x,y}_gradient
+ size_gradient = lt.gradient.Linear(lt.Remainder.Float)
+ output['x-size'] = size_gradient.distribute(0.5, 8)
+ output['y-size'] = size_gradient.distribute(0.5, 8)
+
+ output['sets'] = self._do_all_lsc(images)
+
+ # \todo Validate images from greyscale camera and force grescale mode
+ # \todo Debug functionality
+
+ return output
diff --git a/utils/tuning/libtuning/modules/module.py b/utils/tuning/libtuning/modules/module.py
new file mode 100644
index 00000000..de624384
--- /dev/null
+++ b/utils/tuning/libtuning/modules/module.py
@@ -0,0 +1,32 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# Base class for algorithm-specific tuning modules
+
+
+# @var type Type of the module. Defined in the base module.
+# @var out_name The key that will be used for the algorithm in the algorithms
+# dictionary in the tuning output file
+# @var hr_name Human-readable module name, mostly for debugging
+class Module(object):
+ type = 'base'
+ hr_name = 'Base Module'
+ out_name = 'GenericAlgorithm'
+
+ def __init__(self):
+ pass
+
+ def validate_config(self, config: dict) -> bool:
+ raise NotImplementedError
+
+ # @brief Do the module's processing
+ # @param config Full configuration from the input configuration file
+ # @param images List of images to process
+ # @param outputs The outputs of all modules that were executed before this
+ # module. Note that this is an input parameter, and the
+ # output of this module should be returned directly
+ # @return Result of the module's processing. It may be empty. None
+ # indicates failure and that the result should not be used.
+ def process(self, config: dict, images: list, outputs: dict) -> dict:
+ raise NotImplementedError
diff --git a/utils/tuning/libtuning/parsers/__init__.py b/utils/tuning/libtuning/parsers/__init__.py
new file mode 100644
index 00000000..022c1e5d
--- /dev/null
+++ b/utils/tuning/libtuning/parsers/__init__.py
@@ -0,0 +1,6 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+
+from libtuning.parsers.raspberrypi_parser import RaspberryPiParser
+from libtuning.parsers.yaml_parser import YamlParser
diff --git a/utils/tuning/libtuning/parsers/parser.py b/utils/tuning/libtuning/parsers/parser.py
new file mode 100644
index 00000000..0c3944c7
--- /dev/null
+++ b/utils/tuning/libtuning/parsers/parser.py
@@ -0,0 +1,21 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# Base class for a parser for a specific format of config file
+
+class Parser(object):
+ def __init__(self):
+ pass
+
+ # @brief Parse a config file into a config dict
+ # @details The config dict shall have one key 'general' with a dict value
+ # for general configuration options, and all other entries shall
+ # have the module as the key with its configuration options (as a
+ # dict) as the value. The config dict shall prune entries that are
+ # for modules that are not in @a modules.
+ # @param config (str) Path to config file
+ # @param modules (list) List of modules
+ # @return (dict, list) Configuration and list of modules to disable
+ def parse(self, config_file: str, modules: list) -> (dict, list):
+ raise NotImplementedError
diff --git a/utils/tuning/libtuning/parsers/raspberrypi_parser.py b/utils/tuning/libtuning/parsers/raspberrypi_parser.py
new file mode 100644
index 00000000..f1da4592
--- /dev/null
+++ b/utils/tuning/libtuning/parsers/raspberrypi_parser.py
@@ -0,0 +1,93 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# Parser for Raspberry Pi config file format
+
+from .parser import Parser
+
+import json
+import numbers
+
+import libtuning.utils as utils
+
+
+class RaspberryPiParser(Parser):
+ def __init__(self):
+ super().__init__()
+
+ # The string in the 'disable' and 'plot' lists are formatted as
+ # 'rpi.{module_name}'.
+ # @brief Enumerate, as a module, @a listt if its value exists in @a dictt
+ # and it is the name of a valid module in @a modules
+ def _enumerate_rpi_modules(self, listt, dictt, modules):
+ for x in listt:
+ name = x.replace('rpi.', '')
+ if name not in dictt:
+ continue
+ module = utils.get_module_by_typename(modules, name)
+ if module is not None:
+ yield module
+
+ def _valid_macbeth_option(self, value):
+ if not isinstance(value, dict):
+ return False
+
+ if list(value.keys()) != ['small', 'show']:
+ return False
+
+ for val in value.values():
+ if not isinstance(val, numbers.Number):
+ return False
+
+ return True
+
+ def parse(self, config_file: str, modules: list) -> (dict, list):
+ with open(config_file, 'r') as config_json:
+ config = json.load(config_json)
+
+ disable = []
+ for module in self._enumerate_rpi_modules(config['disable'], config, modules):
+ disable.append(module)
+ # Remove the disabled module's config too
+ config.pop(module.name)
+ config.pop('disable')
+
+ # The raspberrypi config format has 'plot' map to a list of module
+ # names which should be plotted. libtuning has each module contain the
+ # plot information in itself so do this conversion.
+
+ for module in self._enumerate_rpi_modules(config['plot'], config, modules):
+ # It's fine to set the value of a potentially disabled module, as
+ # the object still exists at this point
+ module.appendValue('debug', 'plot')
+ config.pop('plot')
+
+ # Convert the keys from module name to module instance
+
+ new_config = {}
+
+ for module_name in config:
+ module = utils.get_module_by_type_name(modules, module_name)
+ if module is not None:
+ new_config[module] = config[module_name]
+
+ new_config['general'] = {}
+
+ if 'blacklevel' in config:
+ if not isinstance(config['blacklevel'], numbers.Number):
+ raise TypeError('Config "blacklevel" must be a number')
+ # Raspberry Pi's ctt config has magic blacklevel value -1 to mean
+ # "get it from the image metadata". Since we already do that in
+ # Image, don't save it to the config here.
+ if config['blacklevel'] >= 0:
+ new_config['general']['blacklevel'] = config['blacklevel']
+
+ if 'macbeth' in config:
+ if not self._valid_macbeth_option(config['macbeth']):
+ raise TypeError('Config "macbeth" must be a dict: {"small": number, "show": number}')
+ new_config['general']['macbeth'] = config['macbeth']
+ else:
+ new_config['general']['macbeth'] = {'small': 0, 'show': 0}
+
+ return new_config, disable
diff --git a/utils/tuning/libtuning/parsers/yaml_parser.py b/utils/tuning/libtuning/parsers/yaml_parser.py
new file mode 100644
index 00000000..244db24d
--- /dev/null
+++ b/utils/tuning/libtuning/parsers/yaml_parser.py
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# Parser for YAML format config file
+
+from .parser import Parser
+
+
+class YamlParser(Parser):
+ def __init__(self):
+ super().__init__()
+
+ # \todo Implement this (it's fine for now as we don't need a config for
+ # rkisp1 LSC, which is the only user of this so far)
+ def parse(self, config_file: str, modules: list) -> (dict, list):
+ return {}, []
diff --git a/utils/tuning/libtuning/smoothing.py b/utils/tuning/libtuning/smoothing.py
new file mode 100644
index 00000000..de4d920c
--- /dev/null
+++ b/utils/tuning/libtuning/smoothing.py
@@ -0,0 +1,24 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# Wrapper for cv2 smoothing functions to enable duck-typing
+
+import cv2
+
+
+# @brief Wrapper for cv2 smoothing functions so that they can be duck-typed
+class Smoothing(object):
+ def __init__(self):
+ pass
+
+ def smoothing(self, src):
+ raise NotImplementedError
+
+
+class MedianBlur(Smoothing):
+ def __init__(self, ksize):
+ self.ksize = ksize
+
+ def smoothing(self, src):
+ return cv2.medianBlur(src.astype('float32'), self.ksize).astype('float64')
diff --git a/utils/tuning/libtuning/utils.py b/utils/tuning/libtuning/utils.py
new file mode 100644
index 00000000..1e8128ea
--- /dev/null
+++ b/utils/tuning/libtuning/utils.py
@@ -0,0 +1,125 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# Utilities for libtuning
+
+import decimal
+import math
+import numpy as np
+import os
+from pathlib import Path
+import re
+import sys
+
+import libtuning as lt
+from libtuning.image import Image
+from libtuning.macbeth import locate_macbeth
+
+# Utility functions
+
+
+def eprint(*args, **kwargs):
+ print(*args, file=sys.stderr, **kwargs)
+
+
+def get_module_by_type_name(modules, name):
+ for module in modules:
+ if module.type == name:
+ return module
+ return None
+
+
+# Private utility functions
+
+
+def _list_image_files(directory):
+ d = Path(directory)
+ files = [d.joinpath(f) for f in os.listdir(d)
+ if re.search(r'\.(jp[e]g$)|(dng$)', f)]
+ files.sort()
+ return files
+
+
+def _parse_image_filename(fn: Path):
+ result = re.search(r'^(alsc_)?(\d+)[kK]_(\d+)?[lLuU]?.\w{3,4}$', fn.name)
+ if result is None:
+ eprint(f'The file name of {fn.name} is incorrectly formatted')
+ return None, None, None
+
+ color = int(result.group(2))
+ lsc_only = result.group(1) is not None
+ lux = None if lsc_only else int(result.group(3))
+
+ return color, lux, lsc_only
+
+
+# \todo Implement this from check_imgs() in ctt.py
+def _validate_images(images):
+ return True
+
+
+# Public utility functions
+
+
+# @brief Load images into a single list of Image instances
+# @param input_dir Directory from which to load image files
+# @param config Configuration dictionary
+# @param load_nonlsc Whether or not to load non-lsc images
+# @param load_lsc Whether or not to load lsc-only images
+# @return A list of Image instances
+def load_images(input_dir: str, config: dict, load_nonlsc: bool, load_lsc: bool) -> list:
+ files = _list_image_files(input_dir)
+ if len(files) == 0:
+ eprint(f'No images found in {input_dir}')
+ return None
+
+ images = []
+ for f in files:
+ color, lux, lsc_only = _parse_image_filename(f)
+ if color is None:
+ continue
+
+ # Skip lsc image if we don't need it
+ if lsc_only and not load_lsc:
+ eprint(f'Skipping {f.name} as this tuner has no LSC module')
+ continue
+
+ # Skip non-lsc image if we don't need it
+ if not lsc_only and not load_nonlsc:
+ eprint(f'Skipping {f.name} as this tuner only has an LSC module')
+ continue
+
+ # Load image
+ try:
+ image = Image(f)
+ except Exception as e:
+ eprint(f'Failed to load image {f.name}: {e}')
+ continue
+
+ # Populate simple fields
+ image.lsc_only = lsc_only
+ image.color = color
+ image.lux = lux
+
+ # Black level comes from the TIFF tags, but they are overridable by the
+ # config file.
+ if 'blacklevel' in config['general']:
+ image.blacklevel_16 = config['general']['blacklevel']
+
+ if lsc_only:
+ images.append(image)
+ continue
+
+ # Handle macbeth
+ macbeth = locate_macbeth(config)
+ if macbeth is None:
+ continue
+
+ images.append(image)
+
+ if not _validate_images(images):
+ return None
+
+ return images
diff --git a/utils/tuning/raspberrypi/__init__.py b/utils/tuning/raspberrypi/__init__.py
new file mode 100644
index 00000000..9ccabb0e
--- /dev/null
+++ b/utils/tuning/raspberrypi/__init__.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
diff --git a/utils/tuning/raspberrypi/alsc.py b/utils/tuning/raspberrypi/alsc.py
new file mode 100644
index 00000000..ba8fc9e1
--- /dev/null
+++ b/utils/tuning/raspberrypi/alsc.py
@@ -0,0 +1,19 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# ALSC module instance for Raspberry Pi tuning scripts
+
+import libtuning as lt
+from libtuning.modules.lsc import ALSCRaspberryPi
+
+ALSC = \
+ ALSCRaspberryPi(do_color=lt.Param('do_alsc_colour', lt.Param.Mode.Optional, True),
+ luminance_strength=lt.Param('luminance_strength', lt.Param.Mode.Optional, 0.5),
+ debug=[lt.Debug.Plot],
+ sector_shape=(16, 12),
+ sector_x_gradient=lt.gradient.Linear(lt.Remainder.DistributeFront),
+ sector_y_gradient=lt.gradient.Linear(lt.Remainder.DistributeFront),
+ sector_average_function=lt.average.Mean(),
+ smoothing_function=lt.smoothing.MedianBlur(3),
+ )
diff --git a/utils/tuning/raspberrypi_alsc_only.py b/utils/tuning/raspberrypi_alsc_only.py
new file mode 100755
index 00000000..777d8007
--- /dev/null
+++ b/utils/tuning/raspberrypi_alsc_only.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# Tuning script for raspberrypi, ALSC only
+
+import sys
+
+import libtuning as lt
+from libtuning.parsers import RaspberryPiParser
+from libtuning.generators import RaspberryPiOutput
+
+from raspberrypi.alsc import ALSC
+
+tuner = lt.Tuner('Raspberry Pi (ALSC only)')
+tuner.add(ALSC)
+tuner.set_input_parser(RaspberryPiParser())
+tuner.set_output_formatter(RaspberryPiOutput())
+tuner.set_output_order([ALSC])
+
+if __name__ == '__main__':
+ sys.exit(tuner.run(sys.argv))
diff --git a/utils/tuning/rkisp1.py b/utils/tuning/rkisp1.py
new file mode 100755
index 00000000..517c791e
--- /dev/null
+++ b/utils/tuning/rkisp1.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+#
+# Tuning script for rkisp1
+
+import sys
+
+import libtuning as lt
+from libtuning.parsers import YamlParser
+from libtuning.generators import YamlOutput
+from libtuning.modules.lsc import LSCRkISP1
+
+tuner = lt.Tuner('RkISP1')
+tuner.add(LSCRkISP1(
+ debug=[lt.Debug.Plot],
+ # This is for the actual LSC tuning, and is part of the base LSC
+ # module. rkisp1's table sector sizes (16x16 programmed as mirrored
+ # 8x8) are separate, and is hardcoded in its specific LSC tuning
+ # module.
+ sector_shape=(17, 17),
+
+ sector_x_gradient=lt.gradient.Linear(lt.Remainder.DistributeFront),
+ sector_y_gradient=lt.gradient.Linear(lt.Remainder.DistributeFront),
+
+ # This is the function that will be used to average the pixels in
+ # each sector. This can also be a custom function.
+ sector_average_function=lt.average.Mean(),
+
+ # This is the function that will be used to smooth the color ratio
+ # values. This can also be a custom function.
+ smoothing_function=lt.smoothing.MedianBlur(3),
+ ))
+tuner.set_input_parser(YamlParser())
+tuner.set_output_formatter(YamlOutput())
+tuner.set_output_order([LSCRkISP1])
+
+if __name__ == '__main__':
+ sys.exit(tuner.run(sys.argv))
diff --git a/utils/update-kernel-headers.sh b/utils/update-kernel-headers.sh
index a006452e..590986d2 100755
--- a/utils/update-kernel-headers.sh
+++ b/utils/update-kernel-headers.sh
@@ -18,7 +18,7 @@ if [ "$line" != "# Kbuild for top-level directory of the kernel" ] ; then
exit 1
fi
-if [ ! -d "${kernel_dir}/.git" ] ; then
+if [ ! -e "${kernel_dir}/.git" ] ; then
echo "Directory ${kernel_dir} doesn't contain a git tree"
exit 1
fi
diff --git a/utils/update-mojo.sh b/utils/update-mojo.sh
index fcbc81e7..09c8ff5b 100755
--- a/utils/update-mojo.sh
+++ b/utils/update-mojo.sh
@@ -3,13 +3,23 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# Update mojo copy from a chromium source tree
+set -e
+
if [ $# != 1 ] ; then
echo "Usage: $0 <chromium dir>"
exit 1
fi
ipc_dir="$(dirname "$(realpath "$0")")/ipc"
-chromium_dir="$1"
+chromium_dir="$(realpath "$1")"
+
+cd "${ipc_dir}/../../"
+
+# Reject dirty libcamera trees
+if [ -n "$(git status --porcelain -uno)" ] ; then
+ echo "libcamera tree is dirty"
+ exit 1
+fi
if [ ! -d "${chromium_dir}/mojo" ] ; then
echo "Directory ${chromium_dir} doesn't contain mojo"
@@ -24,19 +34,23 @@ fi
# Get the chromium commit id
version=$(git -C "${chromium_dir}" rev-parse --short HEAD)
-# Reject dirty trees
+# Reject dirty chromium trees
if [ -n "$(git -C "${chromium_dir}" status --porcelain)" ] ; then
echo "Chromium tree in ${chromium_dir} is dirty"
exit 1
fi
+# Remove the previously imported files.
+rm -rf utils/ipc/mojo/
+rm -rf utils/ipc/tools/
+
# Copy the diagnosis file
-cp "${chromium_dir}/tools/diagnosis/crbug_1001171.py" "${ipc_dir}/tools/diagnosis"
+mkdir -p utils/ipc/tools/diagnosis/
+cp "${chromium_dir}/tools/diagnosis/crbug_1001171.py" utils/ipc/tools/diagnosis/
# Copy the rest of mojo
-cp "${chromium_dir}/mojo/public/LICENSE" "${ipc_dir}/mojo/public"
-
-rm -rf "${ipc_dir}/mojo/public/tools/*"
+mkdir -p utils/ipc/mojo/public/
+cp "${chromium_dir}/mojo/public/LICENSE" utils/ipc/mojo/public/
(
cd "${chromium_dir}" || exit
@@ -55,12 +69,22 @@ modify them manually.
EOF
)
-echo "$readme" > "${ipc_dir}/mojo/README"
-echo "$readme" > "${ipc_dir}/tools/README"
+echo "$readme" > utils/ipc/mojo/README
+echo "$readme" > utils/ipc/tools/README
-cat <<EOF
-------------------------------------------------------------
-mojo updated. Please review and up-port local changes before
-committing.
-------------------------------------------------------------
-EOF
+# Commit the update. Use 'git commit -n' to avoid checkstyle pre-commit hook
+# failures, as mojo doesn't comply with the Python coding style enforced by
+# checkstyle.py.
+git add utils/ipc/mojo/
+git add utils/ipc/tools/
+
+echo "utils: ipc: Update mojo
+
+Update mojo from commit
+
+$(git -C "${chromium_dir}" show --pretty='%H "%s"' --no-patch)
+
+from the Chromium repository.
+
+The update-mojo.sh script was used for this update." | \
+git commit -n -s -F -