summaryrefslogtreecommitdiff
path: root/utils
diff options
context:
space:
mode:
Diffstat (limited to 'utils')
-rwxr-xr-xutils/abi-compat.sh13
-rwxr-xr-xutils/checkstyle.py485
-rw-r--r--utils/codegen/controls.py142
-rwxr-xr-xutils/codegen/gen-controls.py109
-rwxr-xr-xutils/codegen/gen-formats.py (renamed from utils/gen-formats.py)2
-rwxr-xr-xutils/codegen/gen-gst-controls.py183
-rwxr-xr-xutils/codegen/gen-header.sh (renamed from utils/gen-header.sh)9
-rwxr-xr-xutils/codegen/gen-ipa-pub-key.py (renamed from utils/gen-ipa-pub-key.py)2
-rwxr-xr-xutils/codegen/gen-tp-header.py (renamed from utils/tracepoints/gen-tp-header.py)6
-rwxr-xr-xutils/codegen/ipc/extract-docs.py (renamed from utils/ipc/extract-docs.py)10
-rwxr-xr-xutils/codegen/ipc/generate.py (renamed from utils/ipc/generate.py)15
-rw-r--r--utils/codegen/ipc/generators/__init__.py (renamed from utils/ipc/generators/__init__.py)0
-rw-r--r--utils/codegen/ipc/generators/libcamera_templates/core_ipa_interface.h.tmpl (renamed from utils/ipc/generators/libcamera_templates/core_ipa_interface.h.tmpl)7
-rw-r--r--utils/codegen/ipc/generators/libcamera_templates/core_ipa_serializer.h.tmpl (renamed from utils/ipc/generators/libcamera_templates/core_ipa_serializer.h.tmpl)11
-rw-r--r--utils/codegen/ipc/generators/libcamera_templates/definition_functions.tmpl (renamed from utils/ipc/generators/libcamera_templates/definition_functions.tmpl)0
-rw-r--r--utils/codegen/ipc/generators/libcamera_templates/meson.build (renamed from utils/ipc/generators/libcamera_templates/meson.build)0
-rw-r--r--utils/codegen/ipc/generators/libcamera_templates/module_ipa_interface.h.tmpl (renamed from utils/ipc/generators/libcamera_templates/module_ipa_interface.h.tmpl)16
-rw-r--r--utils/codegen/ipc/generators/libcamera_templates/module_ipa_proxy.cpp.tmpl (renamed from utils/ipc/generators/libcamera_templates/module_ipa_proxy.cpp.tmpl)27
-rw-r--r--utils/codegen/ipc/generators/libcamera_templates/module_ipa_proxy.h.tmpl (renamed from utils/ipc/generators/libcamera_templates/module_ipa_proxy.h.tmpl)12
-rw-r--r--utils/codegen/ipc/generators/libcamera_templates/module_ipa_proxy_worker.cpp.tmpl (renamed from utils/ipc/generators/libcamera_templates/module_ipa_proxy_worker.cpp.tmpl)2
-rw-r--r--utils/codegen/ipc/generators/libcamera_templates/module_ipa_serializer.h.tmpl (renamed from utils/ipc/generators/libcamera_templates/module_ipa_serializer.h.tmpl)11
-rw-r--r--utils/codegen/ipc/generators/libcamera_templates/proxy_functions.tmpl (renamed from utils/ipc/generators/libcamera_templates/proxy_functions.tmpl)4
-rw-r--r--utils/codegen/ipc/generators/libcamera_templates/serializer.tmpl (renamed from utils/ipc/generators/libcamera_templates/serializer.tmpl)32
-rw-r--r--utils/codegen/ipc/generators/meson.build (renamed from utils/ipc/generators/meson.build)0
-rw-r--r--utils/codegen/ipc/generators/mojom_libcamera_generator.py (renamed from utils/ipc/generators/mojom_libcamera_generator.py)18
-rw-r--r--utils/codegen/ipc/meson.build (renamed from utils/ipc/meson.build)3
-rw-r--r--utils/codegen/ipc/mojo/README (renamed from utils/ipc/mojo/README)2
-rw-r--r--utils/codegen/ipc/mojo/public/LICENSE (renamed from utils/ipc/mojo/public/LICENSE)2
-rw-r--r--utils/codegen/ipc/mojo/public/tools/.style.yapf (renamed from utils/ipc/mojo/public/tools/.style.yapf)0
-rw-r--r--utils/codegen/ipc/mojo/public/tools/BUILD.gn (renamed from utils/ipc/mojo/public/tools/BUILD.gn)8
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/BUILD.gn (renamed from utils/ipc/mojo/public/tools/bindings/BUILD.gn)36
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/README.md (renamed from utils/ipc/mojo/public/tools/bindings/README.md)239
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/checks/__init__.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom/__init__.py)0
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check.py170
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check_unittest.py194
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_definitions_check.py34
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check.py62
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check_unittest.py173
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_check.py102
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_checks_unittest.py254
-rwxr-xr-xutils/codegen/ipc/mojo/public/tools/bindings/concatenate-files.py (renamed from utils/ipc/mojo/public/tools/bindings/concatenate-files.py)5
-rwxr-xr-xutils/codegen/ipc/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py (renamed from utils/ipc/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py)10
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/gen_data_files_list.py (renamed from utils/ipc/mojo/public/tools/bindings/gen_data_files_list.py)2
-rwxr-xr-xutils/codegen/ipc/mojo/public/tools/bindings/generate_type_mappings.py (renamed from utils/ipc/mojo/public/tools/bindings/generate_type_mappings.py)4
-rwxr-xr-xutils/codegen/ipc/mojo/public/tools/bindings/minify_with_terser.py47
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/mojom.gni (renamed from utils/ipc/mojo/public/tools/bindings/mojom.gni)845
-rwxr-xr-xutils/codegen/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py (renamed from utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py)62
-rw-r--r--utils/codegen/ipc/mojo/public/tools/bindings/mojom_bindings_generator_unittest.py (renamed from utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator_unittest.py)6
-rwxr-xr-xutils/codegen/ipc/mojo/public/tools/bindings/validate_typemap_config.py (renamed from utils/ipc/mojo/public/tools/bindings/validate_typemap_config.py)5
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/BUILD.gn18
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/README.md (renamed from utils/ipc/mojo/public/tools/mojom/README.md)0
-rwxr-xr-xutils/codegen/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py (renamed from utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py)69
-rwxr-xr-xutils/codegen/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility_unittest.py (renamed from utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility_unittest.py)87
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/const_unittest.py (renamed from utils/ipc/mojo/public/tools/mojom/const_unittest.py)2
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/enum_unittest.py (renamed from utils/ipc/mojo/public/tools/mojom/enum_unittest.py)30
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/feature_unittest.py84
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/BUILD.gn (renamed from utils/ipc/mojo/public/tools/mojom/mojom/BUILD.gn)3
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/__init__.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom/generate/__init__.py)0
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/error.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom/error.py)2
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/fileutil.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom/fileutil.py)3
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/fileutil_unittest.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom/fileutil_unittest.py)7
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/__init__.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom/parse/__init__.py)0
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/check.py26
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/generator.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom/generate/generator.py)11
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/generator_unittest.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom/generate/generator_unittest.py)9
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/module.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom/generate/module.py)787
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/module_unittest.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom/generate/module_unittest.py)2
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/pack.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom/generate/pack.py)151
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/pack_unittest.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom/generate/pack_unittest.py)30
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/template_expander.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom/generate/template_expander.py)2
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/translate.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom/generate/translate.py)464
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/translate_unittest.py141
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/__init__.py0
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/ast.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom/parse/ast.py)145
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/ast_unittest.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom/parse/ast_unittest.py)12
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features.py)21
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py)155
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py)8
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py)10
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/parser.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom/parse/parser.py)108
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/parser_unittest.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom/parse/parser_unittest.py)39
-rwxr-xr-xutils/codegen/ipc/mojo/public/tools/mojom/mojom_parser.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom_parser.py)119
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser_test_case.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom_parser_test_case.py)6
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser_unittest.py (renamed from utils/ipc/mojo/public/tools/mojom/mojom_parser_unittest.py)31
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/stable_attribute_unittest.py (renamed from utils/ipc/mojo/public/tools/mojom/stable_attribute_unittest.py)2
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/union_unittest.py44
-rw-r--r--utils/codegen/ipc/mojo/public/tools/mojom/version_compatibility_unittest.py (renamed from utils/ipc/mojo/public/tools/mojom/version_compatibility_unittest.py)73
-rwxr-xr-xutils/codegen/ipc/mojo/public/tools/run_all_python_unittests.py (renamed from utils/ipc/mojo/public/tools/run_all_python_unittests.py)8
-rwxr-xr-xutils/codegen/ipc/parser.py (renamed from utils/ipc/parser.py)5
-rw-r--r--utils/codegen/ipc/tools/README (renamed from utils/ipc/tools/README)2
-rw-r--r--utils/codegen/ipc/tools/diagnosis/crbug_1001171.py (renamed from utils/ipc/tools/diagnosis/crbug_1001171.py)2
-rw-r--r--utils/codegen/meson.build21
-rwxr-xr-xutils/gen-controls.py313
-rwxr-xr-xutils/gen-debug-controls.py163
-rwxr-xr-xutils/gen-ipa-priv-key.sh2
-rwxr-xr-xutils/gen-version.sh2
-rwxr-xr-xutils/hooks/pre-push23
-rw-r--r--utils/ipc/mojo/public/tools/bindings/chromium_bindings_configuration.gni51
-rw-r--r--utils/ipc/mojo/public/tools/bindings/compile_typescript.py27
-rwxr-xr-xutils/ipc/mojo/public/tools/bindings/format_typemap_generator_args.py36
-rwxr-xr-xutils/ipc/mojo/public/tools/bindings/mojom_types_downgrader.py119
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/generate/constant_resolver.py93
-rw-r--r--utils/ipc/mojo/public/tools/mojom/mojom/generate/translate_unittest.py73
-rwxr-xr-xutils/ipu3/ipu3-capture.sh2
-rw-r--r--utils/ipu3/ipu3-pack.c4
-rwxr-xr-xutils/ipu3/ipu3-process.sh2
-rw-r--r--utils/ipu3/ipu3-unpack.c5
-rw-r--r--utils/meson.build10
-rwxr-xr-xutils/raspberrypi/ctt/alsc_only.py20
-rw-r--r--utils/raspberrypi/ctt/cac_only.py142
-rw-r--r--utils/raspberrypi/ctt/colors.py2
-rwxr-xr-xutils/raspberrypi/ctt/convert_tuning.py98
-rwxr-xr-xutils/raspberrypi/ctt/ctt.py260
-rw-r--r--utils/raspberrypi/ctt/ctt_alsc.py88
-rw-r--r--utils/raspberrypi/ctt/ctt_awb.py13
-rw-r--r--utils/raspberrypi/ctt/ctt_cac.py250
-rw-r--r--utils/raspberrypi/ctt/ctt_ccm.py8
-rw-r--r--utils/raspberrypi/ctt/ctt_config_example.json5
-rw-r--r--utils/raspberrypi/ctt/ctt_dots_locator.py118
-rw-r--r--utils/raspberrypi/ctt/ctt_geq.py2
-rw-r--r--utils/raspberrypi/ctt/ctt_image_load.py3
-rw-r--r--utils/raspberrypi/ctt/ctt_lux.py2
-rw-r--r--utils/raspberrypi/ctt/ctt_macbeth_locator.py71
-rw-r--r--utils/raspberrypi/ctt/ctt_noise.py2
-rwxr-xr-xutils/raspberrypi/ctt/ctt_pisp.py805
-rwxr-xr-xutils/raspberrypi/ctt/ctt_pretty_print_json.py22
-rw-r--r--utils/raspberrypi/ctt/ctt_ransac.py2
-rw-r--r--utils/raspberrypi/ctt/ctt_tools.py5
-rwxr-xr-xutils/raspberrypi/ctt/ctt_vc4.py126
-rwxr-xr-xutils/rkisp1/gen-csc-table.py26
-rwxr-xr-xutils/rkisp1/rkisp1-capture.sh3
-rwxr-xr-xutils/tracepoints/analyze-ipa-trace.py2
-rw-r--r--utils/tracepoints/meson.build5
-rw-r--r--utils/tuning/README.rst23
-rw-r--r--utils/tuning/config-example.yaml54
-rw-r--r--utils/tuning/libtuning/average.py2
-rw-r--r--utils/tuning/libtuning/ctt_awb.py378
-rw-r--r--utils/tuning/libtuning/ctt_ccm.py408
-rw-r--r--utils/tuning/libtuning/ctt_colors.py30
-rw-r--r--utils/tuning/libtuning/ctt_ransac.py71
-rw-r--r--utils/tuning/libtuning/generators/generator.py2
-rw-r--r--utils/tuning/libtuning/generators/raspberrypi_output.py2
-rw-r--r--utils/tuning/libtuning/generators/yaml_output.py10
-rw-r--r--utils/tuning/libtuning/gradient.py2
-rw-r--r--utils/tuning/libtuning/image.py14
-rw-r--r--utils/tuning/libtuning/libtuning.py30
-rw-r--r--utils/tuning/libtuning/macbeth.py67
-rw-r--r--utils/tuning/libtuning/macbeth_ref.pgm2
-rw-r--r--utils/tuning/libtuning/modules/agc/__init__.py6
-rw-r--r--utils/tuning/libtuning/modules/agc/agc.py21
-rw-r--r--utils/tuning/libtuning/modules/agc/rkisp1.py79
-rw-r--r--utils/tuning/libtuning/modules/awb/__init__.py6
-rw-r--r--utils/tuning/libtuning/modules/awb/awb.py40
-rw-r--r--utils/tuning/libtuning/modules/awb/rkisp1.py36
-rw-r--r--utils/tuning/libtuning/modules/ccm/__init__.py6
-rw-r--r--utils/tuning/libtuning/modules/ccm/ccm.py41
-rw-r--r--utils/tuning/libtuning/modules/ccm/rkisp1.py28
-rw-r--r--utils/tuning/libtuning/modules/lsc/lsc.py5
-rw-r--r--utils/tuning/libtuning/modules/lsc/raspberrypi.py14
-rw-r--r--utils/tuning/libtuning/modules/lsc/rkisp1.py22
-rw-r--r--utils/tuning/libtuning/modules/lux/__init__.py6
-rw-r--r--utils/tuning/libtuning/modules/lux/lux.py70
-rw-r--r--utils/tuning/libtuning/modules/lux/rkisp1.py22
-rw-r--r--utils/tuning/libtuning/modules/module.py2
-rw-r--r--utils/tuning/libtuning/modules/static.py24
-rw-r--r--utils/tuning/libtuning/parsers/parser.py2
-rw-r--r--utils/tuning/libtuning/parsers/raspberrypi_parser.py2
-rw-r--r--utils/tuning/libtuning/parsers/yaml_parser.py11
-rw-r--r--utils/tuning/libtuning/smoothing.py2
-rw-r--r--utils/tuning/libtuning/utils.py99
-rw-r--r--utils/tuning/raspberrypi/alsc.py2
-rwxr-xr-xutils/tuning/raspberrypi_alsc_only.py2
-rw-r--r--utils/tuning/requirements.txt9
-rwxr-xr-xutils/tuning/rkisp1.py65
-rwxr-xr-xutils/update-kernel-headers.sh3
-rwxr-xr-xutils/update-mojo.sh52
176 files changed, 8358 insertions, 2539 deletions
diff --git a/utils/abi-compat.sh b/utils/abi-compat.sh
index c936ac05..31f61e32 100755
--- a/utils/abi-compat.sh
+++ b/utils/abi-compat.sh
@@ -156,15 +156,16 @@ create_abi_dump() {
# Generate a minimal libcamera build. "lib" and "prefix" are
# defined explicitly to avoid system default ambiguities.
meson setup "$build" "$worktree" \
- -Dlibdir=lib \
- -Dprefix=/usr/local/ \
- -Ddocumentation=disabled \
-Dcam=disabled \
- -Dqcam=disabled \
+ -Ddocumentation=disabled \
-Dgstreamer=disabled \
-Dlc-compliance=disabled \
- -Dtracing=disabled \
- -Dpipelines=
+ -Dlibdir=lib \
+ -Dpipelines= \
+ -Dprefix=/usr/local/ \
+ -Dpycamera=disabled \
+ -Dqcam=disabled \
+ -Dtracing=disabled
ninja -C "$build"
DESTDIR="$install" ninja -C "$build" install
diff --git a/utils/checkstyle.py b/utils/checkstyle.py
index 836ea80f..f6229bbd 100755
--- a/utils/checkstyle.py
+++ b/utils/checkstyle.py
@@ -1,10 +1,10 @@
-#!/usr/bin/python3
+#!/usr/bin/env python3
# SPDX-License-Identifier: GPL-2.0-or-later
# Copyright (C) 2018, Google Inc.
#
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
#
-# checkstyle.py - A patch style checker script based on clang-format
+# A patch style checker script based on clang-format
#
# TODO:
#
@@ -23,7 +23,6 @@ import subprocess
import sys
dependencies = {
- 'clang-format': True,
'git': True,
}
@@ -168,6 +167,12 @@ def parse_diff(diff):
hunk = DiffHunk(line)
elif hunk is not None:
+ # Work around https://github.com/python/cpython/issues/46395
+ # See https://www.gnu.org/software/diffutils/manual/html_node/Incomplete-Lines.html
+ if line[-1] != '\n':
+ hunk.append(line + '\n')
+ line = '\\ No newline at end of file\n'
+
hunk.append(line)
if hunk:
@@ -205,36 +210,66 @@ class CommitFile:
class Commit:
def __init__(self, commit):
- self.commit = commit
+ self._commit = commit
+ self._author = None
self._trailers = []
self._parse()
- def _parse_trailers(self, lines):
- for index in range(1, len(lines)):
- line = lines[index]
- if not line:
- break
+ def _parse_commit(self):
+ # Get and parse the commit message.
+ ret = subprocess.run(['git', 'show', '--format=%H%n%an <%ae>%n%s%n%b',
+ '--no-patch', self.commit],
+ stdout=subprocess.PIPE).stdout.decode('utf-8')
+ lines = ret.splitlines()
+
+ self._commit = lines[0]
+ self._author = lines[1]
+ self._title = lines[2]
+ self._body = lines[3:]
- self._trailers.append(line)
+ # Parse the trailers. Feed git-interpret-trailers with a full commit
+ # message that includes both the title and the body, as it otherwise
+ # fails to find trailers when the body contains trailers only.
+ message = self._title + '\n\n' + '\n'.join(self._body)
+ trailers = subprocess.run(['git', 'interpret-trailers', '--parse'],
+ input=message.encode('utf-8'),
+ stdout=subprocess.PIPE).stdout.decode('utf-8')
- return index
+ self._trailers = trailers.splitlines()
def _parse(self):
- # Get the commit title and list of files.
- ret = subprocess.run(['git', 'show', '--format=%s%n%(trailers:only,unfold)', '--name-status',
+ self._parse_commit()
+
+ # Get the list of files. Use an empty format specifier to suppress the
+ # commit message completely.
+ ret = subprocess.run(['git', 'show', '--format=', '--name-status',
self.commit],
stdout=subprocess.PIPE).stdout.decode('utf-8')
- lines = ret.splitlines()
-
- self._title = lines[0]
+ self._files = [CommitFile(f) for f in ret.splitlines()]
- index = self._parse_trailers(lines)
- self._files = [CommitFile(f) for f in lines[index:] if f]
+ def __repr__(self):
+ return '\n'.join([
+ f'commit {self.commit}',
+ f'Author: {self.author}',
+ f'',
+ f' {self.title}',
+ '',
+ '\n'.join([line and f' {line}' or '' for line in self._body]),
+ 'Trailers:',
+ ] + self.trailers)
def files(self, filter='AMR'):
return [f.filename for f in self._files if f.status in filter]
@property
+ def author(self):
+ return self._author
+
+ @property
+ def commit(self):
+ return self._commit
+
+ @property
def title(self):
return self._title
@@ -272,20 +307,14 @@ class StagedChanges(Commit):
class Amendment(Commit):
def __init__(self):
- Commit.__init__(self, '')
+ Commit.__init__(self, 'HEAD')
def _parse(self):
- # Create a title using HEAD commit and parse the trailers.
- ret = subprocess.run(['git', 'show', '--format=%H %s%n%(trailers:only,unfold)',
- '--no-patch'],
- stdout=subprocess.PIPE).stdout.decode('utf-8')
- lines = ret.splitlines()
+ self._parse_commit()
- self._title = 'Amendment of ' + lines[0].strip()
+ self._title = f'Amendment of "{self.title}"'
- self._parse_trailers(lines)
-
- # Extract the list of modified files
+ # Extract the list of modified files.
ret = subprocess.run(['git', 'diff', '--staged', '--name-status', 'HEAD~'],
stdout=subprocess.PIPE).stdout.decode('utf-8')
self._files = [CommitFile(f) for f in ret.splitlines()]
@@ -304,40 +333,83 @@ class Amendment(Commit):
class ClassRegistry(type):
def __new__(cls, clsname, bases, attrs):
newclass = super().__new__(cls, clsname, bases, attrs)
- if bases:
- bases[0].subclasses.append(newclass)
- bases[0].subclasses.sort(key=lambda x: getattr(x, 'priority', 0),
- reverse=True)
+ if bases and bases[0] != CheckerBase:
+ base = bases[0]
+
+ if not hasattr(base, 'subclasses'):
+ base.subclasses = []
+ base.subclasses.append(newclass)
+ base.subclasses.sort(key=lambda x: getattr(x, 'priority', 0),
+ reverse=True)
return newclass
-# ------------------------------------------------------------------------------
-# Commit Checkers
-#
+class CheckerBase(metaclass=ClassRegistry):
+ @classmethod
+ def instances(cls, obj, names):
+ for instance in cls.subclasses:
+ if names and instance.__name__ not in names:
+ continue
+ if instance.supports(obj):
+ yield instance
+
+ @classmethod
+ def supports(cls, obj):
+ if hasattr(cls, 'commit_types'):
+ return type(obj) in cls.commit_types
-class CommitChecker(metaclass=ClassRegistry):
- subclasses = []
+ if hasattr(cls, 'patterns'):
+ for pattern in cls.patterns:
+ if fnmatch.fnmatch(os.path.basename(obj), pattern):
+ return True
- def __init__(self):
- pass
+ return False
- #
- # Class methods
- #
@classmethod
- def checkers(cls, names):
- for checker in cls.subclasses:
- if names and checker.__name__ not in names:
- continue
- yield checker
+ def all_patterns(cls):
+ patterns = set()
+ for instance in cls.subclasses:
+ if hasattr(instance, 'patterns'):
+ patterns.update(instance.patterns)
+
+ return patterns
+
+ @classmethod
+ def check_dependencies(cls):
+ if not hasattr(cls, 'dependencies'):
+ return []
+
+ issues = []
+
+ for command in cls.dependencies:
+ if command not in dependencies:
+ dependencies[command] = shutil.which(command)
+
+ if not dependencies[command]:
+ issues.append(CommitIssue(f'Missing {command} to run {cls.__name__}'))
+
+ return issues
+
+
+# ------------------------------------------------------------------------------
+# Commit Checkers
+#
+
+class CommitChecker(CheckerBase):
+ pass
class CommitIssue(object):
def __init__(self, msg):
self.msg = msg
+ def __str__(self):
+ return f'{Colours.fg(Colours.Yellow)}{self.msg}{Colours.reset()}'
+
class HeaderAddChecker(CommitChecker):
+ commit_types = (Commit, StagedChanges, Amendment)
+
@classmethod
def check(cls, commit, top_level):
issues = []
@@ -382,6 +454,8 @@ class HeaderAddChecker(CommitChecker):
class TitleChecker(CommitChecker):
+ commit_types = (Commit,)
+
prefix_regex = re.compile(r'^([a-zA-Z0-9_.-]+: )+')
release_regex = re.compile(r'libcamera v[0-9]+\.[0-9]+\.[0-9]+')
@@ -389,11 +463,6 @@ class TitleChecker(CommitChecker):
def check(cls, commit, top_level):
title = commit.title
- # Skip the check when validating staged changes (as done through a
- # pre-commit hook) as there is no title to check in that case.
- if isinstance(commit, StagedChanges):
- return []
-
# Ignore release commits, they don't need a prefix.
if TitleChecker.release_regex.fullmatch(title):
return []
@@ -449,6 +518,8 @@ class TitleChecker(CommitChecker):
class TrailersChecker(CommitChecker):
+ commit_types = (Commit,)
+
commit_regex = re.compile(r'[0-9a-f]{12}[0-9a-f]* \(".*"\)')
coverity_regex = re.compile(r'Coverity CID=.*')
@@ -471,6 +542,7 @@ class TrailersChecker(CommitChecker):
known_trailers = {
'Acked-by': email_regex,
'Bug': link_regex,
+ 'Co-developed-by': email_regex,
'Fixes': commit_regex,
'Link': link_regex,
'Reported-by': validate_reported_by,
@@ -486,6 +558,8 @@ class TrailersChecker(CommitChecker):
def check(cls, commit, top_level):
issues = []
+ sob_found = False
+
for trailer in commit.trailers:
match = TrailersChecker.trailer_regex.fullmatch(trailer)
if not match:
@@ -508,6 +582,13 @@ class TrailersChecker(CommitChecker):
issues.append(CommitIssue(f"Malformed value '{value}' for commit trailer '{key}'"))
continue
+ if key == 'Signed-off-by':
+ if value == commit.author:
+ sob_found = True
+
+ if not sob_found:
+ issues.append(CommitIssue(f"No 'Signed-off-by' trailer matching author '{commit.author}', see Documentation/contributing.rst"))
+
return issues
@@ -515,64 +596,76 @@ class TrailersChecker(CommitChecker):
# Style Checkers
#
-class StyleChecker(metaclass=ClassRegistry):
- subclasses = []
+class StyleChecker(CheckerBase):
+ pass
- def __init__(self):
- pass
- #
- # Class methods
- #
- @classmethod
- def checkers(cls, filename, names):
- for checker in cls.subclasses:
- if names and checker.__name__ not in names:
- continue
- if checker.supports(filename):
- yield checker
+class StyleIssue(object):
+ def __init__(self, line_number, position, line, msg):
+ self.line_number = line_number
+ self.position = position
+ self.line = line
+ self.msg = msg
- @classmethod
- def supports(cls, filename):
- for pattern in cls.patterns:
- if fnmatch.fnmatch(os.path.basename(filename), pattern):
- return True
- return False
+ def __str__(self):
+ s = []
+ s.append(f'{Colours.fg(Colours.Yellow)}#{self.line_number}: {self.msg}{Colours.reset()}')
+ if self.line is not None:
+ s.append(f'{Colours.fg(Colours.Yellow)}+{self.line.rstrip()}{Colours.reset()}')
+
+ if self.position is not None:
+ # Align the position marker by using the original line with
+ # all characters except for tabs replaced with spaces. This
+ # ensures proper alignment regardless of how the code is
+ # indented.
+ start = self.position[0]
+ prefix = ''.join([c if c == '\t' else ' ' for c in self.line[:start]])
+ length = self.position[1] - start - 1
+ s.append(f' {prefix}^{"~" * length}')
+
+ return '\n'.join(s)
+
+
+class HexValueChecker(StyleChecker):
+ patterns = ('*.c', '*.cpp', '*.h')
+
+ regex = re.compile(r'\b0[xX][0-9a-fA-F]+\b')
@classmethod
- def all_patterns(cls):
- patterns = set()
- for checker in cls.subclasses:
- patterns.update(checker.patterns)
+ def check(cls, content, line_numbers):
+ issues = []
- return patterns
+ for line_number in line_numbers:
+ line = content[line_number - 1]
+ match = HexValueChecker.regex.search(line)
+ if not match:
+ continue
+ value = match.group(0)
+ if value == value.lower():
+ continue
-class StyleIssue(object):
- def __init__(self, line_number, line, msg):
- self.line_number = line_number
- self.line = line
- self.msg = msg
+ issues.append(StyleIssue(line_number, match.span(0), line,
+ f'Use lowercase hex constant {value.lower()}'))
+
+ return issues
class IncludeChecker(StyleChecker):
patterns = ('*.cpp', '*.h')
- headers = ('assert', 'ctype', 'errno', 'fenv', 'float', 'inttypes',
- 'limits', 'locale', 'setjmp', 'signal', 'stdarg', 'stddef',
- 'stdint', 'stdio', 'stdlib', 'string', 'time', 'uchar', 'wchar',
- 'wctype')
- include_regex = re.compile('^#include <c([a-z]*)>')
-
- def __init__(self, content):
- super().__init__()
- self.__content = content
+ headers = ('cassert', 'cctype', 'cerrno', 'cfenv', 'cfloat', 'cinttypes',
+ 'climits', 'clocale', 'csetjmp', 'csignal', 'cstdarg', 'cstddef',
+ 'cstdint', 'cstdio', 'cstdlib', 'cstring', 'ctime', 'cuchar',
+ 'cwchar', 'cwctype', 'math.h')
+ include_regex = re.compile(r'^#include <([a-z.]*)>')
- def check(self, line_numbers):
+ @classmethod
+ def check(self, content, line_numbers):
issues = []
for line_number in line_numbers:
- line = self.__content[line_number - 1]
+ line = content[line_number - 1]
match = IncludeChecker.include_regex.match(line)
if not match:
continue
@@ -581,28 +674,34 @@ class IncludeChecker(StyleChecker):
if header not in IncludeChecker.headers:
continue
- issues.append(StyleIssue(line_number, line,
- 'C compatibility header <%s.h> is preferred' % header))
+ if header.endswith('.h'):
+ header_type = 'C++'
+ header = 'c' + header[:-2]
+ else:
+ header_type = 'C compatibility'
+ header = header[1:] + '.h'
+
+ issues.append(StyleIssue(line_number, match.span(1), line,
+ f'{header_type} header <{header}> is preferred'))
return issues
class LogCategoryChecker(StyleChecker):
- log_regex = re.compile('\\bLOG\((Debug|Info|Warning|Error|Fatal)\)')
+ log_regex = re.compile(r'\bLOG\((Debug|Info|Warning|Error|Fatal)\)')
patterns = ('*.cpp',)
- def __init__(self, content):
- super().__init__()
- self.__content = content
-
- def check(self, line_numbers):
+ @classmethod
+ def check(cls, content, line_numbers):
issues = []
for line_number in line_numbers:
- line = self.__content[line_number-1]
- if not LogCategoryChecker.log_regex.search(line):
+ line = content[line_number - 1]
+ match = LogCategoryChecker.log_regex.search(line)
+ if not match:
continue
- issues.append(StyleIssue(line_number, line, 'LOG() should use categories'))
+ issues.append(StyleIssue(line_number, match.span(1), line,
+ 'LOG() should use categories'))
return issues
@@ -610,70 +709,30 @@ class LogCategoryChecker(StyleChecker):
class MesonChecker(StyleChecker):
patterns = ('meson.build',)
- def __init__(self, content):
- super().__init__()
- self.__content = content
-
- def check(self, line_numbers):
+ @classmethod
+ def check(cls, content, line_numbers):
issues = []
for line_number in line_numbers:
- line = self.__content[line_number-1]
- if line.find('\t') != -1:
- issues.append(StyleIssue(line_number, line, 'meson.build should use spaces for indentation'))
- return issues
-
-
-class Pep8Checker(StyleChecker):
- patterns = ('*.py',)
- results_regex = re.compile('stdin:([0-9]+):([0-9]+)(.*)')
-
- def __init__(self, content):
- super().__init__()
- self.__content = content
-
- def check(self, line_numbers):
- issues = []
- data = ''.join(self.__content).encode('utf-8')
-
- try:
- ret = subprocess.run(['pycodestyle', '--ignore=E501', '-'],
- input=data, stdout=subprocess.PIPE)
- except FileNotFoundError:
- issues.append(StyleIssue(0, None, 'Please install pycodestyle to validate python additions'))
- return issues
-
- results = ret.stdout.decode('utf-8').splitlines()
- for item in results:
- search = re.search(Pep8Checker.results_regex, item)
- line_number = int(search.group(1))
- position = int(search.group(2))
- msg = search.group(3)
-
- if line_number in line_numbers:
- line = self.__content[line_number - 1]
- issues.append(StyleIssue(line_number, line, msg))
-
+ line = content[line_number - 1]
+ pos = line.find('\t')
+ if pos != -1:
+ issues.append(StyleIssue(line_number, [pos, pos], line,
+ 'meson.build should use spaces for indentation'))
return issues
class ShellChecker(StyleChecker):
+ dependencies = ('shellcheck',)
patterns = ('*.sh',)
- results_line_regex = re.compile('In - line ([0-9]+):')
-
- def __init__(self, content):
- super().__init__()
- self.__content = content
+ results_line_regex = re.compile(r'In - line ([0-9]+):')
- def check(self, line_numbers):
+ @classmethod
+ def check(cls, content, line_numbers):
issues = []
- data = ''.join(self.__content).encode('utf-8')
+ data = ''.join(content).encode('utf-8')
- try:
- ret = subprocess.run(['shellcheck', '-Cnever', '-'],
- input=data, stdout=subprocess.PIPE)
- except FileNotFoundError:
- issues.append(StyleIssue(0, None, 'Please install shellcheck to validate shell script additions'))
- return issues
+ ret = subprocess.run(['shellcheck', '-Cnever', '-'],
+ input=data, stdout=subprocess.PIPE)
results = ret.stdout.decode('utf-8').splitlines()
for nr, item in enumerate(results):
@@ -685,11 +744,8 @@ class ShellChecker(StyleChecker):
line = results[nr + 1]
msg = results[nr + 2]
- # Determined, but not yet used
- position = msg.find('^') + 1
-
if line_number in line_numbers:
- issues.append(StyleIssue(line_number, line, msg))
+ issues.append(StyleIssue(line_number, None, line, msg))
return issues
@@ -698,40 +754,12 @@ class ShellChecker(StyleChecker):
# Formatters
#
-class Formatter(metaclass=ClassRegistry):
- subclasses = []
-
- def __init__(self):
- pass
-
- #
- # Class methods
- #
- @classmethod
- def formatters(cls, filename, names):
- for formatter in cls.subclasses:
- if names and formatter.__name__ not in names:
- continue
- if formatter.supports(filename):
- yield formatter
-
- @classmethod
- def supports(cls, filename):
- for pattern in cls.patterns:
- if fnmatch.fnmatch(os.path.basename(filename), pattern):
- return True
- return False
-
- @classmethod
- def all_patterns(cls):
- patterns = set()
- for formatter in cls.subclasses:
- patterns.update(formatter.patterns)
-
- return patterns
+class Formatter(CheckerBase):
+ pass
class CLangFormatter(Formatter):
+ dependencies = ('clang-format',)
patterns = ('*.c', '*.cpp', '*.h')
priority = -1
@@ -746,7 +774,8 @@ class CLangFormatter(Formatter):
class DoxygenFormatter(Formatter):
patterns = ('*.c', '*.cpp')
- return_regex = re.compile(' +\\* +\\\\return +[a-z]')
+ oneliner_regex = re.compile(r'^ +\* +\\(brief|param|return)\b.*\.$')
+ return_regex = re.compile(r' +\* +\\return +[a-z]')
@classmethod
def format(cls, filename, data):
@@ -761,6 +790,7 @@ class DoxygenFormatter(Formatter):
lines.append(line)
continue
+ line = cls.oneliner_regex.sub(lambda m: m.group(0)[:-1], line)
line = cls.return_regex.sub(lambda m: m.group(0)[:-1] + m.group(0)[-1].upper(), line)
if line.find('*/') != -1:
@@ -806,7 +836,7 @@ class DPointerFormatter(Formatter):
class IncludeOrderFormatter(Formatter):
patterns = ('*.cpp', '*.h')
- include_regex = re.compile('^#include (["<])([^">]*)([">])')
+ include_regex = re.compile(r'^#include (["<])([^">]*)([">])')
@classmethod
def format(cls, filename, data):
@@ -858,6 +888,17 @@ class IncludeOrderFormatter(Formatter):
return '\n'.join(lines)
+class Pep8Formatter(Formatter):
+ dependencies = ('autopep8',)
+ patterns = ('*.py',)
+
+ @classmethod
+ def format(cls, filename, data):
+ ret = subprocess.run(['autopep8', '--ignore=E501', '-'],
+ input=data.encode('utf-8'), stdout=subprocess.PIPE)
+ return ret.stdout.decode('utf-8')
+
+
class StripTrailingSpaceFormatter(Formatter):
patterns = ('*.c', '*.cpp', '*.h', '*.py', 'meson.build')
@@ -873,6 +914,24 @@ class StripTrailingSpaceFormatter(Formatter):
# Style checking
#
+def check_commit(top_level, commit, checkers):
+ issues = []
+
+ # Apply the commit checkers first.
+ for checker in CommitChecker.instances(commit, checkers):
+ issues_ = checker.check_dependencies()
+ if issues_:
+ issues += issues_
+ continue
+
+ issues += checker.check(commit, top_level)
+
+ for issue in issues:
+ print(issue)
+
+ return len(issues)
+
+
def check_file(top_level, commit, filename, checkers):
# Extract the line numbers touched by the commit.
commit_diff = commit.get_diff(top_level, filename)
@@ -888,9 +947,15 @@ def check_file(top_level, commit, filename, checkers):
# Format the file after the commit with all formatters and compute the diff
# between the unformatted and formatted contents.
after = commit.get_file(filename)
+ issues = []
formatted = after
- for formatter in Formatter.formatters(filename, checkers):
+ for formatter in Formatter.instances(filename, checkers):
+ issues_ = formatter.check_dependencies()
+ if issues_:
+ issues += issues_
+ continue
+
formatted = formatter.format(filename, formatted)
after = after.splitlines(True)
@@ -903,11 +968,14 @@ def check_file(top_level, commit, filename, checkers):
formatted_diff = [hunk for hunk in formatted_diff if hunk.intersects(lines)]
# Check for code issues not related to formatting.
- issues = []
- for checker in StyleChecker.checkers(filename, checkers):
- checker = checker(after)
+ for checker in StyleChecker.instances(filename, checkers):
+ issues_ = checker.check_dependencies()
+ if issues_:
+ issues += issues_
+ continue
+
for hunk in commit_diff:
- issues += checker.check(hunk.side('to').touched)
+ issues += checker.check(after, hunk.side('to').touched)
# Print the detected issues.
if len(issues) == 0 and len(formatted_diff) == 0:
@@ -921,13 +989,9 @@ def check_file(top_level, commit, filename, checkers):
print(hunk)
if len(issues):
- issues = sorted(issues, key=lambda i: i.line_number)
+ issues = sorted(issues, key=lambda i: getattr(i, 'line_number', -1))
for issue in issues:
- print('%s#%u: %s%s' % (Colours.fg(Colours.Yellow), issue.line_number,
- issue.msg, Colours.reset()))
- if issue.line is not None:
- print('%s+%s%s' % (Colours.fg(Colours.Yellow), issue.line.rstrip(),
- Colours.reset()))
+ print(issue)
return len(formatted_diff) + len(issues)
@@ -939,13 +1003,8 @@ def check_style(top_level, commit, checkers):
print(title)
print(separator)
- issues = 0
-
# Apply the commit checkers first.
- for checker in CommitChecker.checkers(checkers):
- for issue in checker.check(commit, top_level):
- print('%s%s%s' % (Colours.fg(Colours.Yellow), issue.msg, Colours.reset()))
- issues += 1
+ issues = check_commit(top_level, commit, checkers)
# Filter out files we have no checker for.
patterns = set()
@@ -1017,7 +1076,7 @@ def main(argv):
if args.checkers:
args.checkers = args.checkers.split(',')
- # Check for required dependencies.
+ # Check for required common dependencies.
for command, mandatory in dependencies.items():
found = shutil.which(command)
if mandatory and not found:
diff --git a/utils/codegen/controls.py b/utils/codegen/controls.py
new file mode 100644
index 00000000..e5161048
--- /dev/null
+++ b/utils/codegen/controls.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright (C) 2019, Google Inc.
+#
+# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
+#
+# Helper classes to handle source code generation for libcamera controls
+
+
+class ControlEnum(object):
+ def __init__(self, data):
+ self.__data = data
+
+ @property
+ def description(self):
+ """The enum description"""
+ return self.__data.get('description')
+
+ @property
+ def name(self):
+ """The enum name"""
+ return self.__data.get('name')
+
+ @property
+ def value(self):
+ """The enum value"""
+ return self.__data.get('value')
+
+
+class Control(object):
+ def __init__(self, name, data, vendor, mode):
+ self.__name = name
+ self.__data = data
+ self.__enum_values = None
+ self.__size = None
+ self.__vendor = vendor
+
+ enum_values = data.get('enum')
+ if enum_values is not None:
+ self.__enum_values = [ControlEnum(enum) for enum in enum_values]
+
+ size = self.__data.get('size')
+ if size is not None:
+ if len(size) == 0:
+ raise RuntimeError(f'Control `{self.__name}` size must have at least one dimension')
+
+ # Compute the total number of elements in the array. If any of the
+ # array dimension is a string, the array is variable-sized.
+ num_elems = 1
+ for dim in size:
+ if type(dim) is str:
+ num_elems = 0
+ break
+
+ dim = int(dim)
+ if dim <= 0:
+ raise RuntimeError(f'Control `{self.__name}` size must have positive values only')
+
+ num_elems *= dim
+
+ self.__size = num_elems
+
+ if mode == 'properties':
+ self.__direction = 'out'
+ else:
+ direction = self.__data.get('direction')
+ if direction is None:
+ raise RuntimeError(f'Control `{self.__name}` missing required field `direction`')
+ if direction not in ['in', 'out', 'inout']:
+ raise RuntimeError(f'Control `{self.__name}` direction `{direction}` is invalid; must be one of `in`, `out`, or `inout`')
+ self.__direction = direction
+
+ @property
+ def description(self):
+ """The control description"""
+ return self.__data.get('description')
+
+ @property
+ def enum_values(self):
+ """The enum values, if the control is an enumeration"""
+ if self.__enum_values is None:
+ return
+ for enum in self.__enum_values:
+ yield enum
+
+ @property
+ def enum_values_count(self):
+ """The number of enum values, if the control is an enumeration"""
+ if self.__enum_values is None:
+ return 0
+ return len(self.__enum_values)
+
+ @property
+ def is_enum(self):
+ """Is the control an enumeration"""
+ return self.__enum_values is not None
+
+ @property
+ def vendor(self):
+ """The vendor string, or None"""
+ return self.__vendor
+
+ @property
+ def name(self):
+ """The control name (CamelCase)"""
+ return self.__name
+
+ @property
+ def type(self):
+ typ = self.__data.get('type')
+ size = self.__data.get('size')
+
+ if typ == 'string':
+ return 'std::string'
+
+ if self.__size is None:
+ return typ
+
+ if self.__size:
+ return f"Span<const {typ}, {self.__size}>"
+ else:
+ return f"Span<const {typ}>"
+
+ @property
+ def direction(self):
+ in_flag = 'ControlId::Direction::In'
+ out_flag = 'ControlId::Direction::Out'
+
+ if self.__direction == 'inout':
+ return f'{in_flag} | {out_flag}'
+ if self.__direction == 'in':
+ return in_flag
+ if self.__direction == 'out':
+ return out_flag
+
+ @property
+ def element_type(self):
+ return self.__data.get('type')
+
+ @property
+ def size(self):
+ return self.__size
diff --git a/utils/codegen/gen-controls.py b/utils/codegen/gen-controls.py
new file mode 100755
index 00000000..59b716c1
--- /dev/null
+++ b/utils/codegen/gen-controls.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright (C) 2019, Google Inc.
+#
+# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
+#
+# Generate control definitions from YAML
+
+import argparse
+import jinja2
+import os
+import sys
+import yaml
+
+from controls import Control
+
+
+def snake_case(s):
+ return ''.join([c.isupper() and ('_' + c) or c for c in s]).strip('_')
+
+
+def format_description(description):
+ description = description.strip('\n').split('\n')
+ for i in range(1, len(description)):
+ line = description[i]
+ description[i] = (line and ' * ' or ' *') + line
+ return '\n'.join(description)
+
+
+def extend_control(ctrl, id, ranges):
+ ctrl.id = ranges[ctrl.vendor] + id + 1
+
+ if ctrl.vendor != 'libcamera':
+ ctrl.namespace = f'{ctrl.vendor}::'
+ else:
+ ctrl.namespace = ''
+
+ return ctrl
+
+
+def main(argv):
+
+ # Parse command line arguments
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--mode', '-m', type=str, required=True, choices=['controls', 'properties'],
+ help='Mode of operation')
+ parser.add_argument('--output', '-o', metavar='file', type=str,
+ help='Output file name. Defaults to standard output if not specified.')
+ parser.add_argument('--ranges', '-r', type=str, required=True,
+ help='Control id range reservation file.')
+ parser.add_argument('--template', '-t', dest='template', type=str, required=True,
+ help='Template file name.')
+ parser.add_argument('input', type=str, nargs='+',
+ help='Input file name.')
+
+ args = parser.parse_args(argv[1:])
+
+ ranges = {}
+ with open(args.ranges, 'rb') as f:
+ data = open(args.ranges, 'rb').read()
+ ranges = yaml.safe_load(data)['ranges']
+
+ controls = {}
+ for input in args.input:
+ data = yaml.safe_load(open(input, 'rb').read())
+
+ vendor = data['vendor']
+ if vendor not in ranges.keys():
+ raise RuntimeError(f'Control id range is not defined for vendor {vendor}')
+
+ ctrls = controls.setdefault(vendor, [])
+
+ for i, ctrl in enumerate(data['controls']):
+ ctrl = Control(*ctrl.popitem(), vendor, args.mode)
+ ctrls.append(extend_control(ctrl, i, ranges))
+
+ # Sort the vendors by range numerical value
+ controls = [[vendor, ctrls] for vendor, ctrls in controls.items()]
+ controls.sort(key=lambda item: ranges[item[0]])
+
+ filename = {
+ 'controls': 'control_ids',
+ 'properties': 'property_ids',
+ }[args.mode]
+
+ data = {
+ 'filename': filename,
+ 'mode': args.mode,
+ 'controls': controls,
+ }
+
+ env = jinja2.Environment()
+ env.filters['format_description'] = format_description
+ env.filters['snake_case'] = snake_case
+ template = env.from_string(open(args.template, 'r', encoding='utf-8').read())
+ string = template.render(data)
+
+ if args.output:
+ output = open(args.output, 'w', encoding='utf-8')
+ output.write(string)
+ output.close()
+ else:
+ sys.stdout.write(string)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/utils/gen-formats.py b/utils/codegen/gen-formats.py
index da79a8bb..0c0932a5 100755
--- a/utils/gen-formats.py
+++ b/utils/codegen/gen-formats.py
@@ -4,7 +4,7 @@
#
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
#
-# gen-formats.py - Generate formats definitions from YAML
+# Generate formats definitions from YAML
import argparse
import re
diff --git a/utils/codegen/gen-gst-controls.py b/utils/codegen/gen-gst-controls.py
new file mode 100755
index 00000000..4ca76049
--- /dev/null
+++ b/utils/codegen/gen-gst-controls.py
@@ -0,0 +1,183 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright (C) 2019, Google Inc.
+# Copyright (C) 2024, Jaslo Ziska
+#
+# Authors:
+# Laurent Pinchart <laurent.pinchart@ideasonboard.com>
+# Jaslo Ziska <jaslo@ziska.de>
+#
+# Generate gstreamer control properties from YAML
+
+import argparse
+import jinja2
+import re
+import sys
+import yaml
+
+from controls import Control
+
+
+exposed_controls = [
+ 'AeEnable', 'AeMeteringMode', 'AeConstraintMode', 'AeExposureMode',
+ 'ExposureValue', 'ExposureTime', 'ExposureTimeMode',
+ 'AnalogueGain', 'AnalogueGainMode', 'AeFlickerPeriod',
+ 'Brightness', 'Contrast', 'AwbEnable', 'AwbMode', 'ColourGains',
+ 'Saturation', 'Sharpness', 'ColourCorrectionMatrix', 'ScalerCrop',
+ 'DigitalGain', 'AfMode', 'AfRange', 'AfSpeed', 'AfMetering', 'AfWindows',
+ 'LensPosition', 'Gamma',
+]
+
+
+def find_common_prefix(strings):
+ prefix = strings[0]
+
+ for string in strings[1:]:
+ while string[:len(prefix)] != prefix and prefix:
+ prefix = prefix[:len(prefix) - 1]
+ if not prefix:
+ break
+
+ return prefix
+
+
+def format_description(description):
+ # Substitute doxygen keywords \sa (see also) and \todo
+ description = re.sub(r'\\sa((?: \w+)+)',
+ lambda match: 'See also: ' + ', '.join(
+ map(kebab_case, match.group(1).strip().split(' '))
+ ) + '.', description)
+ description = re.sub(r'\\todo', 'Todo:', description)
+
+ description = description.strip().split('\n')
+ return '\n'.join([
+ '"' + line.replace('\\', r'\\').replace('"', r'\"') + ' "' for line in description if line
+ ]).rstrip()
+
+
+# Custom filter to allow indenting by a string prior to Jinja version 3.0
+#
+# This function can be removed and the calls to indent_str() replaced by the
+# built-in indent() filter when dropping Jinja versions older than 3.0
+def indent_str(s, indention):
+ s += '\n'
+
+ lines = s.splitlines()
+ rv = lines.pop(0)
+
+ if lines:
+ rv += '\n' + '\n'.join(
+ indention + line if line else line for line in lines
+ )
+
+ return rv
+
+
+def snake_case(s):
+ return ''.join([
+ c.isupper() and ('_' + c.lower()) or c for c in s
+ ]).strip('_')
+
+
+def kebab_case(s):
+ return snake_case(s).replace('_', '-')
+
+
+def extend_control(ctrl):
+ if ctrl.vendor != 'libcamera':
+ ctrl.namespace = f'{ctrl.vendor}::'
+ ctrl.vendor_prefix = f'{ctrl.vendor}-'
+ else:
+ ctrl.namespace = ''
+ ctrl.vendor_prefix = ''
+
+ ctrl.is_array = ctrl.size is not None
+
+ if ctrl.is_enum:
+ # Remove common prefix from enum variant names
+ prefix = find_common_prefix([enum.name for enum in ctrl.enum_values])
+ for enum in ctrl.enum_values:
+ enum.gst_name = kebab_case(enum.name.removeprefix(prefix))
+
+ ctrl.gtype = 'enum'
+ ctrl.default = '0'
+ elif ctrl.element_type == 'bool':
+ ctrl.gtype = 'boolean'
+ ctrl.default = 'false'
+ elif ctrl.element_type == 'float':
+ ctrl.gtype = 'float'
+ ctrl.default = '0'
+ ctrl.min = '-G_MAXFLOAT'
+ ctrl.max = 'G_MAXFLOAT'
+ elif ctrl.element_type == 'int32_t':
+ ctrl.gtype = 'int'
+ ctrl.default = '0'
+ ctrl.min = 'G_MININT'
+ ctrl.max = 'G_MAXINT'
+ elif ctrl.element_type == 'int64_t':
+ ctrl.gtype = 'int64'
+ ctrl.default = '0'
+ ctrl.min = 'G_MININT64'
+ ctrl.max = 'G_MAXINT64'
+ elif ctrl.element_type == 'uint8_t':
+ ctrl.gtype = 'uchar'
+ ctrl.default = '0'
+ ctrl.min = '0'
+ ctrl.max = 'G_MAXUINT8'
+ elif ctrl.element_type == 'Rectangle':
+ ctrl.is_rectangle = True
+ ctrl.default = '0'
+ ctrl.min = '0'
+ ctrl.max = 'G_MAXINT'
+ else:
+ raise RuntimeError(f'The type `{ctrl.element_type}` is unknown')
+
+ return ctrl
+
+
+def main(argv):
+ # Parse command line arguments
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--output', '-o', metavar='file', type=str,
+ help='Output file name. Defaults to standard output if not specified.')
+ parser.add_argument('--template', '-t', dest='template', type=str, required=True,
+ help='Template file name.')
+ parser.add_argument('input', type=str, nargs='+',
+ help='Input file name.')
+
+ args = parser.parse_args(argv[1:])
+
+ controls = {}
+ for input in args.input:
+ data = yaml.safe_load(open(input, 'rb').read())
+
+ vendor = data['vendor']
+ ctrls = controls.setdefault(vendor, [])
+
+ for ctrl in data['controls']:
+ ctrl = Control(*ctrl.popitem(), vendor, mode='controls')
+
+ if ctrl.name in exposed_controls:
+ ctrls.append(extend_control(ctrl))
+
+ data = {'controls': list(controls.items())}
+
+ env = jinja2.Environment()
+ env.filters['format_description'] = format_description
+ env.filters['indent_str'] = indent_str
+ env.filters['snake_case'] = snake_case
+ env.filters['kebab_case'] = kebab_case
+ template = env.from_string(open(args.template, 'r', encoding='utf-8').read())
+ string = template.render(data)
+
+ if args.output:
+ with open(args.output, 'w', encoding='utf-8') as output:
+ output.write(string)
+ else:
+ sys.stdout.write(string)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/utils/gen-header.sh b/utils/codegen/gen-header.sh
index 8b66c5dd..c78f0859 100755
--- a/utils/gen-header.sh
+++ b/utils/codegen/gen-header.sh
@@ -1,7 +1,7 @@
#!/bin/sh
-src_dir="$1"
-dst_file="$2"
+dst_file="$1"
+shift
cat <<EOF > "$dst_file"
/* SPDX-License-Identifier: LGPL-2.1-or-later */
@@ -9,16 +9,15 @@ cat <<EOF > "$dst_file"
/*
* Copyright (C) 2018-2019, Google Inc.
*
- * libcamera.h - libcamera public API
+ * libcamera public API
*/
#pragma once
EOF
-headers=$(for header in "$src_dir"/*.h "$src_dir"/*.h.in ; do
+headers=$(for header in "$@" ; do
header=$(basename "$header")
- header="${header%.in}"
echo "$header"
done | sort)
diff --git a/utils/gen-ipa-pub-key.py b/utils/codegen/gen-ipa-pub-key.py
index a4a1f7b7..dc3e7d5f 100755
--- a/utils/gen-ipa-pub-key.py
+++ b/utils/codegen/gen-ipa-pub-key.py
@@ -4,7 +4,7 @@
#
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
#
-# ipa-gen-key.py - Generate the IPA module signing public key
+# Generate the IPA module signing public key
import string
import subprocess
diff --git a/utils/tracepoints/gen-tp-header.py b/utils/codegen/gen-tp-header.py
index a454615e..6769c7ce 100755
--- a/utils/tracepoints/gen-tp-header.py
+++ b/utils/codegen/gen-tp-header.py
@@ -4,9 +4,8 @@
#
# Author: Paul Elder <paul.elder@ideasonboard.com>
#
-# gen-tp-header.py - Generate header file to contain lttng tracepoints
+# Generate header file to contain lttng tracepoints
-import datetime
import jinja2
import pathlib
import os
@@ -20,7 +19,6 @@ def main(argv):
output = argv[2]
template = argv[3]
- year = datetime.datetime.now().year
path = pathlib.Path(output).absolute().relative_to(argv[1])
source = ''
@@ -28,7 +26,7 @@ def main(argv):
source += open(fname, 'r', encoding='utf-8').read() + '\n\n'
template = jinja2.Template(open(template, 'r', encoding='utf-8').read())
- string = template.render(year=year, path=path, source=source)
+ string = template.render(path=path, source=source)
f = open(output, 'w', encoding='utf-8').write(string)
diff --git a/utils/ipc/extract-docs.py b/utils/codegen/ipc/extract-docs.py
index 8f7fff9f..61f44cae 100755
--- a/utils/ipc/extract-docs.py
+++ b/utils/codegen/ipc/extract-docs.py
@@ -4,15 +4,15 @@
#
# Author: Paul Elder <paul.elder@ideasonboard.com>
#
-# extract-docs.py - Extract doxygen documentation from mojom files
+# Extract doxygen documentation from mojom files
import argparse
import re
import sys
-regex_block_start = re.compile('^\/\*\*$')
-regex_block_end = re.compile('^ \*\/$')
-regex_spdx = re.compile('^\/\* SPDX-License-Identifier: .* \*\/$')
+regex_block_start = re.compile(r'^/\*\*$')
+regex_block_end = re.compile(r'^ \*/$')
+regex_spdx = re.compile(r'^/\* SPDX-License-Identifier: .* \*/$')
def main(argv):
@@ -38,7 +38,7 @@ def main(argv):
/*
* Copyright (C) 2021, Google Inc.
*
- * {pipeline}_ipa_interface.cpp - Docs file for generated {pipeline}.mojom
+ * Docs file for generated {pipeline}.mojom
*
* This file is auto-generated. Do not edit.
*/
diff --git a/utils/ipc/generate.py b/utils/codegen/ipc/generate.py
index 8771e0a6..dfbe659b 100755
--- a/utils/ipc/generate.py
+++ b/utils/codegen/ipc/generate.py
@@ -4,18 +4,25 @@
#
# Author: Paul Elder <paul.elder@ideasonboard.com>
#
-# generate.py - Run mojo code generator for generating libcamera IPC files
+# Run mojo code generator for generating libcamera IPC files
import os
import sys
-# TODO set sys.pycache_prefix for >= python3.8
-sys.dont_write_bytecode = True
+sys.path.insert(0, f'{os.path.dirname(__file__)}/mojo/public/tools/bindings')
import mojo.public.tools.bindings.mojom_bindings_generator as generator
def _GetModulePath(path, output_dir):
- return os.path.join(output_dir, path.relative_path())
+ return os.path.join(output_dir, path.relative_path())
+
+
+# Disable the attribute checker to support our custom attributes. Ideally we
+# should add the attributes to the list of allowed attributes in
+# utils/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check.py, but
+# we're trying hard to use the upstream mojom as-is.
+if hasattr(generator, '_BUILTIN_CHECKS'):
+ del generator._BUILTIN_CHECKS['attributes']
# Override the mojo code generator's generator list to only contain our
# libcamera generator
diff --git a/utils/ipc/generators/__init__.py b/utils/codegen/ipc/generators/__init__.py
index e69de29b..e69de29b 100644
--- a/utils/ipc/generators/__init__.py
+++ b/utils/codegen/ipc/generators/__init__.py
diff --git a/utils/ipc/generators/libcamera_templates/core_ipa_interface.h.tmpl b/utils/codegen/ipc/generators/libcamera_templates/core_ipa_interface.h.tmpl
index c60b99b8..3942e570 100644
--- a/utils/ipc/generators/libcamera_templates/core_ipa_interface.h.tmpl
+++ b/utils/codegen/ipc/generators/libcamera_templates/core_ipa_interface.h.tmpl
@@ -7,7 +7,7 @@
/*
* Copyright (C) 2020, Google Inc.
*
- * core_ipa_interface.h - libcamera core definitions for Image Processing Algorithms
+ * libcamera core definitions for Image Processing Algorithms
*
* This file is auto-generated. Do not edit.
*/
@@ -15,8 +15,13 @@
#pragma once
{% if has_map %}#include <map>{% endif %}
+{% if has_string %}#include <string>{% endif %}
{% if has_array %}#include <vector>{% endif %}
+#include <libcamera/controls.h>
+#include <libcamera/framebuffer.h>
+#include <libcamera/geometry.h>
+
#include <libcamera/ipa/ipa_interface.h>
namespace libcamera {
diff --git a/utils/ipc/generators/libcamera_templates/core_ipa_serializer.h.tmpl b/utils/codegen/ipc/generators/libcamera_templates/core_ipa_serializer.h.tmpl
index 5738a1aa..ac84963d 100644
--- a/utils/ipc/generators/libcamera_templates/core_ipa_serializer.h.tmpl
+++ b/utils/codegen/ipc/generators/libcamera_templates/core_ipa_serializer.h.tmpl
@@ -8,7 +8,7 @@
/*
* Copyright (C) 2020, Google Inc.
*
- * core_ipa_serializer.h - Data serializer for core libcamera definitions for IPA
+ * Data serializer for core libcamera definitions for IPA
*
* This file is auto-generated. Do not edit.
*/
@@ -31,13 +31,8 @@ template<>
class IPADataSerializer<{{struct|name}}>
{
public:
-{{- serializer.serializer(struct, "")}}
-{%- if struct|has_fd %}
-{{serializer.deserializer_fd(struct, "")}}
-{%- else %}
-{{serializer.deserializer_no_fd(struct, "")}}
-{{serializer.deserializer_fd_simple(struct, "")}}
-{%- endif %}
+{{- serializer.serializer(struct)}}
+{{- serializer.deserializer(struct)}}
};
{% endfor %}
diff --git a/utils/ipc/generators/libcamera_templates/definition_functions.tmpl b/utils/codegen/ipc/generators/libcamera_templates/definition_functions.tmpl
index 8b8509f3..8b8509f3 100644
--- a/utils/ipc/generators/libcamera_templates/definition_functions.tmpl
+++ b/utils/codegen/ipc/generators/libcamera_templates/definition_functions.tmpl
diff --git a/utils/ipc/generators/libcamera_templates/meson.build b/utils/codegen/ipc/generators/libcamera_templates/meson.build
index 70664eab..70664eab 100644
--- a/utils/ipc/generators/libcamera_templates/meson.build
+++ b/utils/codegen/ipc/generators/libcamera_templates/meson.build
diff --git a/utils/ipc/generators/libcamera_templates/module_ipa_interface.h.tmpl b/utils/codegen/ipc/generators/libcamera_templates/module_ipa_interface.h.tmpl
index 160601f7..5d70ea6a 100644
--- a/utils/ipc/generators/libcamera_templates/module_ipa_interface.h.tmpl
+++ b/utils/codegen/ipc/generators/libcamera_templates/module_ipa_interface.h.tmpl
@@ -7,19 +7,27 @@
/*
* Copyright (C) 2020, Google Inc.
*
- * {{module_name}}_ipa_interface.h - Image Processing Algorithm interface for {{module_name}}
+ * Image Processing Algorithm interface for {{module_name}}
*
* This file is auto-generated. Do not edit.
*/
#pragma once
-#include <libcamera/ipa/core_ipa_interface.h>
-#include <libcamera/ipa/ipa_interface.h>
-
{% if has_map %}#include <map>{% endif %}
+{% if has_string %}#include <string>{% endif %}
{% if has_array %}#include <vector>{% endif %}
+#include <libcamera/base/flags.h>
+#include <libcamera/base/signal.h>
+
+#include <libcamera/controls.h>
+#include <libcamera/framebuffer.h>
+#include <libcamera/geometry.h>
+
+#include <libcamera/ipa/core_ipa_interface.h>
+#include <libcamera/ipa/ipa_interface.h>
+
namespace libcamera {
{%- if has_namespace %}
{% for ns in namespace %}
diff --git a/utils/ipc/generators/libcamera_templates/module_ipa_proxy.cpp.tmpl b/utils/codegen/ipc/generators/libcamera_templates/module_ipa_proxy.cpp.tmpl
index c37c4941..9a3aadbd 100644
--- a/utils/ipc/generators/libcamera_templates/module_ipa_proxy.cpp.tmpl
+++ b/utils/codegen/ipc/generators/libcamera_templates/module_ipa_proxy.cpp.tmpl
@@ -8,7 +8,7 @@
/*
* Copyright (C) 2020, Google Inc.
*
- * {{module_name}}_ipa_proxy.cpp - Image Processing Algorithm proxy for {{module_name}}
+ * Image Processing Algorithm proxy for {{module_name}}
*
* This file is auto-generated. Do not edit.
*/
@@ -127,13 +127,13 @@ void {{proxy_name}}::recvMessage(const IPCMessage &data)
{{proxy_funcs.func_sig(proxy_name, method)}}
{
if (isolate_)
- {{"return " if method|method_return_value != "void"}}{{method.mojom_name}}IPC(
+ return {{method.mojom_name}}IPC(
{%- for param in method|method_param_names -%}
{{param}}{{- ", " if not loop.last}}
{%- endfor -%}
);
else
- {{"return " if method|method_return_value != "void"}}{{method.mojom_name}}Thread(
+ return {{method.mojom_name}}Thread(
{%- for param in method|method_param_names -%}
{{param}}{{- ", " if not loop.last}}
{%- endfor -%}
@@ -159,25 +159,23 @@ void {{proxy_name}}::recvMessage(const IPCMessage &data)
state_ = ProxyRunning;
thread_.start();
- {{ "return " if method|method_return_value != "void" -}}
- proxy_.invokeMethod(&ThreadProxy::start, ConnectionTypeBlocking
+ return proxy_.invokeMethod(&ThreadProxy::start, ConnectionTypeBlocking
{{- ", " if method|method_param_names}}
{%- for param in method|method_param_names -%}
{{param}}{{- ", " if not loop.last}}
{%- endfor -%}
);
{%- elif not method|is_async %}
- {{ "return " if method|method_return_value != "void" -}}
- ipa_->{{method.mojom_name}}(
+ return ipa_->{{method.mojom_name}}(
{%- for param in method|method_param_names -%}
{{param}}{{- ", " if not loop.last}}
{%- endfor -%}
);
{% elif method|is_async %}
ASSERT(state_ == ProxyRunning);
- proxy_.invokeMethod(&ThreadProxy::{{method.mojom_name}}, ConnectionTypeQueued,
+ proxy_.invokeMethod(&ThreadProxy::{{method.mojom_name}}, ConnectionTypeQueued
{%- for param in method|method_param_names -%}
- {{param}}{{- ", " if not loop.last}}
+ , {{param}}
{%- endfor -%}
);
{%- endif %}
@@ -206,7 +204,7 @@ void {{proxy_name}}::recvMessage(const IPCMessage &data)
);
{%- endif %}
if (_ret < 0) {
- LOG(IPAProxy, Error) << "Failed to call {{method.mojom_name}}";
+ LOG(IPAProxy, Error) << "Failed to call {{method.mojom_name}}: " << _ret;
{%- if method|method_return_value != "void" %}
return static_cast<{{method|method_return_value}}>(_ret);
{%- else %}
@@ -235,14 +233,11 @@ void {{proxy_name}}::recvMessage(const IPCMessage &data)
}
void {{proxy_name}}::{{method.mojom_name}}IPC(
- std::vector<uint8_t>::const_iterator data,
- size_t dataSize,
+ [[maybe_unused]] std::vector<uint8_t>::const_iterator data,
+ [[maybe_unused]] size_t dataSize,
[[maybe_unused]] const std::vector<SharedFD> &fds)
{
-{%- for param in method.parameters %}
- {{param|name}} {{param.mojom_name}};
-{%- endfor %}
-{{proxy_funcs.deserialize_call(method.parameters, 'data', 'fds', false, false, true, 'dataSize')}}
+{{proxy_funcs.deserialize_call(method.parameters, 'data', 'fds', false, true, true, 'dataSize')}}
{{method.mojom_name}}.emit({{method.parameters|params_comma_sep}});
}
{% endfor %}
diff --git a/utils/ipc/generators/libcamera_templates/module_ipa_proxy.h.tmpl b/utils/codegen/ipc/generators/libcamera_templates/module_ipa_proxy.h.tmpl
index ed270f5c..a0312a7c 100644
--- a/utils/ipc/generators/libcamera_templates/module_ipa_proxy.h.tmpl
+++ b/utils/codegen/ipc/generators/libcamera_templates/module_ipa_proxy.h.tmpl
@@ -8,7 +8,7 @@
/*
* Copyright (C) 2020, Google Inc.
*
- * {{module_name}}_ipa_proxy.h - Image Processing Algorithm proxy for {{module_name}}
+ * Image Processing Algorithm proxy for {{module_name}}
*
* This file is auto-generated. Do not edit.
*/
@@ -18,6 +18,7 @@
#include <libcamera/ipa/ipa_interface.h>
#include <libcamera/ipa/{{module_name}}_ipa_interface.h>
+#include <libcamera/base/object.h>
#include <libcamera/base/thread.h>
#include "libcamera/internal/control_serializer.h"
@@ -43,15 +44,6 @@ public:
{{proxy_funcs.func_sig(proxy_name, method, "", false, true)|indent(8, true)}};
{% endfor %}
-{%- for method in interface_event.methods %}
- Signal<
-{%- for param in method.parameters -%}
- {{"const " if not param|is_pod}}{{param|name}}{{" &" if not param|is_pod and not param|is_enum}}
- {{- ", " if not loop.last}}
-{%- endfor -%}
-> {{method.mojom_name}};
-{% endfor %}
-
private:
void recvMessage(const IPCMessage &data);
diff --git a/utils/ipc/generators/libcamera_templates/module_ipa_proxy_worker.cpp.tmpl b/utils/codegen/ipc/generators/libcamera_templates/module_ipa_proxy_worker.cpp.tmpl
index b65dc4cf..1f990d3f 100644
--- a/utils/ipc/generators/libcamera_templates/module_ipa_proxy_worker.cpp.tmpl
+++ b/utils/codegen/ipc/generators/libcamera_templates/module_ipa_proxy_worker.cpp.tmpl
@@ -8,7 +8,7 @@
/*
* Copyright (C) 2020, Google Inc.
*
- * {{module_name}}_ipa_proxy_worker.cpp - Image Processing Algorithm proxy worker for {{module_name}}
+ * Image Processing Algorithm proxy worker for {{module_name}}
*
* This file is auto-generated. Do not edit.
*/
diff --git a/utils/ipc/generators/libcamera_templates/module_ipa_serializer.h.tmpl b/utils/codegen/ipc/generators/libcamera_templates/module_ipa_serializer.h.tmpl
index 8b709705..65a7dd11 100644
--- a/utils/ipc/generators/libcamera_templates/module_ipa_serializer.h.tmpl
+++ b/utils/codegen/ipc/generators/libcamera_templates/module_ipa_serializer.h.tmpl
@@ -8,7 +8,7 @@
/*
* Copyright (C) 2020, Google Inc.
*
- * {{module_name}}_ipa_serializer.h - Image Processing Algorithm data serializer for {{module_name}}
+ * Image Processing Algorithm data serializer for {{module_name}}
*
* This file is auto-generated. Do not edit.
*/
@@ -32,13 +32,8 @@ template<>
class IPADataSerializer<{{struct|name_full}}>
{
public:
-{{- serializer.serializer(struct, namespace_str)}}
-{%- if struct|has_fd %}
-{{serializer.deserializer_fd(struct, namespace_str)}}
-{%- else %}
-{{serializer.deserializer_no_fd(struct, namespace_str)}}
-{{serializer.deserializer_fd_simple(struct, namespace_str)}}
-{%- endif %}
+{{- serializer.serializer(struct)}}
+{{- serializer.deserializer(struct)}}
};
{% endfor %}
diff --git a/utils/ipc/generators/libcamera_templates/proxy_functions.tmpl b/utils/codegen/ipc/generators/libcamera_templates/proxy_functions.tmpl
index 2be65d43..25476990 100644
--- a/utils/ipc/generators/libcamera_templates/proxy_functions.tmpl
+++ b/utils/codegen/ipc/generators/libcamera_templates/proxy_functions.tmpl
@@ -34,7 +34,7 @@
thread_.exit();
thread_.wait();
- Thread::current()->dispatchMessages(Message::Type::InvokeMessage);
+ Thread::current()->dispatchMessages(Message::Type::InvokeMessage, this);
state_ = ProxyStopped;
{%- endmacro -%}
@@ -186,7 +186,7 @@ IPADataSerializer<{{param|name}}>::deserialize(
{% for param in params|with_fds %}
{%- if loop.first %}
const size_t {{param.mojom_name}}FdStart = 0;
-{%- elif not loop.last %}
+{%- else %}
const size_t {{param.mojom_name}}FdStart = {{loop.previtem.mojom_name}}FdStart + {{loop.previtem.mojom_name}}FdsSize;
{%- endif %}
{%- endfor %}
diff --git a/utils/ipc/generators/libcamera_templates/serializer.tmpl b/utils/codegen/ipc/generators/libcamera_templates/serializer.tmpl
index 323e1293..d07836cc 100644
--- a/utils/ipc/generators/libcamera_templates/serializer.tmpl
+++ b/utils/codegen/ipc/generators/libcamera_templates/serializer.tmpl
@@ -28,7 +28,7 @@
#
# \todo Avoid intermediate vectors
#}
-{%- macro serializer_field(field, namespace, loop) %}
+{%- macro serializer_field(field, loop) %}
{%- if field|is_pod or field|is_enum %}
std::vector<uint8_t> {{field.mojom_name}};
std::tie({{field.mojom_name}}, std::ignore) =
@@ -94,7 +94,7 @@
# Generate code to deserialize \a field into object ret.
# This code is meant to be used by the IPADataSerializer specialization.
#}
-{%- macro deserializer_field(field, namespace, loop) %}
+{%- macro deserializer_field(field, loop) %}
{% if field|is_pod or field|is_enum %}
{%- set field_size = (field|bit_width|int / 8)|int %}
{{- check_data_size(field_size, 'dataSize', field.mojom_name, 'data')}}
@@ -182,7 +182,7 @@
# Generate code for IPADataSerializer specialization, for serializing
# \a struct.
#}
-{%- macro serializer(struct, namespace) %}
+{%- macro serializer(struct) %}
static std::tuple<std::vector<uint8_t>, std::vector<SharedFD>>
serialize(const {{struct|name_full}} &data,
{%- if struct|needs_control_serializer %}
@@ -196,7 +196,7 @@
std::vector<SharedFD> retFds;
{%- endif %}
{%- for field in struct.fields %}
-{{serializer_field(field, namespace, loop)}}
+{{serializer_field(field, loop)}}
{%- endfor %}
{% if struct|has_fd %}
return {retData, retFds};
@@ -213,7 +213,7 @@
# Generate code for IPADataSerializer specialization, for deserializing
# \a struct, in the case that \a struct has file descriptors.
#}
-{%- macro deserializer_fd(struct, namespace) %}
+{%- macro deserializer_fd(struct) %}
static {{struct|name_full}}
deserialize(std::vector<uint8_t> &data,
std::vector<SharedFD> &fds,
@@ -245,7 +245,7 @@
size_t dataSize = std::distance(dataBegin, dataEnd);
[[maybe_unused]] size_t fdsSize = std::distance(fdsBegin, fdsEnd);
{%- for field in struct.fields -%}
-{{deserializer_field(field, namespace, loop)}}
+{{deserializer_field(field, loop)}}
{%- endfor %}
return ret;
}
@@ -258,7 +258,7 @@
# \a struct, in the case that \a struct has no file descriptors but requires
# deserializers with file descriptors.
#}
-{%- macro deserializer_fd_simple(struct, namespace) %}
+{%- macro deserializer_fd_simple(struct) %}
static {{struct|name_full}}
deserialize(std::vector<uint8_t> &data,
[[maybe_unused]] std::vector<SharedFD> &fds,
@@ -285,7 +285,7 @@
# Generate code for IPADataSerializer specialization, for deserializing
# \a struct, in the case that \a struct does not have file descriptors.
#}
-{%- macro deserializer_no_fd(struct, namespace) %}
+{%- macro deserializer_no_fd(struct) %}
static {{struct|name_full}}
deserialize(std::vector<uint8_t> &data,
{%- if struct|needs_control_serializer %}
@@ -312,8 +312,22 @@
size_t dataSize = std::distance(dataBegin, dataEnd);
{%- for field in struct.fields -%}
-{{deserializer_field(field, namespace, loop)}}
+{{deserializer_field(field, loop)}}
{%- endfor %}
return ret;
}
{%- endmacro %}
+
+{#
+ # \brief Deserialize a struct
+ #
+ # Generate code for IPADataSerializer specialization, for deserializing \a struct.
+ #}
+{%- macro deserializer(struct) %}
+{%- if struct|has_fd %}
+{{deserializer_fd(struct)}}
+{%- else %}
+{{deserializer_no_fd(struct)}}
+{{deserializer_fd_simple(struct)}}
+{%- endif %}
+{%- endmacro %}
diff --git a/utils/ipc/generators/meson.build b/utils/codegen/ipc/generators/meson.build
index 504f1a46..504f1a46 100644
--- a/utils/ipc/generators/meson.build
+++ b/utils/codegen/ipc/generators/meson.build
diff --git a/utils/ipc/generators/mojom_libcamera_generator.py b/utils/codegen/ipc/generators/mojom_libcamera_generator.py
index 1a629f9d..eff29a5b 100644
--- a/utils/ipc/generators/mojom_libcamera_generator.py
+++ b/utils/codegen/ipc/generators/mojom_libcamera_generator.py
@@ -4,7 +4,7 @@
#
# Author: Paul Elder <paul.elder@ideasonboard.com>
#
-# mojom_libcamera_generator.py - Generates libcamera files from a mojom.Module.
+# Generates libcamera files from a mojom.Module.
import argparse
import datetime
@@ -72,7 +72,7 @@ def ParamsCommaSep(l):
def GetDefaultValue(element):
if element.default is not None:
return element.default
- if type(element.kind) == mojom.Kind:
+ if type(element.kind) == mojom.ValueKind:
return '0'
if IsFlags(element):
return ''
@@ -166,7 +166,7 @@ def MethodParamOutputs(method):
return method.response_parameters[1:]
def MethodParamsHaveFd(parameters):
- return len([x for x in parameters if HasFd(x)]) > 0
+ return any(x for x in parameters if HasFd(x))
def MethodInputHasFd(method):
return MethodParamsHaveFd(method.parameters)
@@ -369,7 +369,7 @@ def ValidateNamespace(namespace):
if namespace == '':
raise Exception('Must have a namespace')
- if not re.match('^ipa\.[0-9A-Za-z_]+', namespace):
+ if not re.match(r'^ipa\.[0-9A-Za-z_]+', namespace):
raise Exception('Namespace must be of the form "ipa.{pipeline_name}"')
def ValidateInterfaces(interfaces):
@@ -465,8 +465,9 @@ class Generator(generator.Generator):
'cmd_event_enum_name': '_%sEventCmd' % self.module_name,
'consts': self.module.constants,
'enums': self.module.enums,
- 'has_array': len([x for x in self.module.kinds.keys() if x[0] == 'a']) > 0,
- 'has_map': len([x for x in self.module.kinds.keys() if x[0] == 'm']) > 0,
+ 'has_array': any(x for x in self.module.kinds.keys() if x[0] == 'a'),
+ 'has_map': any(x for x in self.module.kinds.keys() if x[0] == 'm'),
+ 'has_string': any(x for x in self.module.kinds.keys() if x[0] == 's'),
'has_namespace': self.module.mojom_namespace != '',
'interface_event': GetEventInterface(self.module.interfaces),
'interface_main': GetMainInterface(self.module.interfaces),
@@ -484,8 +485,9 @@ class Generator(generator.Generator):
return {
'consts': self.module.constants,
'enums_gen_header': [x for x in self.module.enums if x.attributes is None or 'skipHeader' not in x.attributes],
- 'has_array': len([x for x in self.module.kinds.keys() if x[0] == 'a']) > 0,
- 'has_map': len([x for x in self.module.kinds.keys() if x[0] == 'm']) > 0,
+ 'has_array': any(x for x in self.module.kinds.keys() if x[0] == 'a'),
+ 'has_map': any(x for x in self.module.kinds.keys() if x[0] == 'm'),
+ 'has_string': any(x for x in self.module.kinds.keys() if x[0] == 's'),
'structs_gen_header': [x for x in self.module.structs if x.attributes is None or 'skipHeader' not in x.attributes],
'structs_gen_serializer': [x for x in self.module.structs if x.attributes is None or 'skipSerdes' not in x.attributes],
}
diff --git a/utils/ipc/meson.build b/utils/codegen/ipc/meson.build
index 973a5417..f77bf324 100644
--- a/utils/ipc/meson.build
+++ b/utils/codegen/ipc/meson.build
@@ -13,6 +13,7 @@ mojom_docs_extractor = find_program('./extract-docs.py')
mojom_templates = custom_target('mojom_templates',
input : mojom_template_files,
output : 'libcamera_templates.zip',
- command : [mojom_generator, '-o', '@OUTDIR@', 'precompile'])
+ command : [mojom_generator, '-o', '@OUTDIR@', 'precompile'],
+ env : py_build_env)
mojom_templates_dir = meson.current_build_dir()
diff --git a/utils/ipc/mojo/README b/utils/codegen/ipc/mojo/README
index d5c24fc3..961cabd2 100644
--- a/utils/ipc/mojo/README
+++ b/utils/codegen/ipc/mojo/README
@@ -1,4 +1,4 @@
# SPDX-License-Identifier: CC0-1.0
-Files in this directory are imported from 9c138d992bfc of Chromium. Do not
+Files in this directory are imported from 9be4263648d7 of Chromium. Do not
modify them manually.
diff --git a/utils/ipc/mojo/public/LICENSE b/utils/codegen/ipc/mojo/public/LICENSE
index 972bb2ed..513e8a6a 100644
--- a/utils/ipc/mojo/public/LICENSE
+++ b/utils/codegen/ipc/mojo/public/LICENSE
@@ -1,4 +1,4 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
+// Copyright 2014 The Chromium Authors
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
diff --git a/utils/ipc/mojo/public/tools/.style.yapf b/utils/codegen/ipc/mojo/public/tools/.style.yapf
index b4ebbe24..b4ebbe24 100644
--- a/utils/ipc/mojo/public/tools/.style.yapf
+++ b/utils/codegen/ipc/mojo/public/tools/.style.yapf
diff --git a/utils/ipc/mojo/public/tools/BUILD.gn b/utils/codegen/ipc/mojo/public/tools/BUILD.gn
index eb6391a6..5328a34a 100644
--- a/utils/ipc/mojo/public/tools/BUILD.gn
+++ b/utils/codegen/ipc/mojo/public/tools/BUILD.gn
@@ -1,4 +1,4 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -10,7 +10,11 @@ group("mojo_python_unittests") {
"run_all_python_unittests.py",
"//testing/scripts/run_isolated_script_test.py",
]
- deps = [ "//mojo/public/tools/mojom/mojom:tests" ]
+ deps = [
+ "//mojo/public/tools/bindings:tests",
+ "//mojo/public/tools/mojom:tests",
+ "//mojo/public/tools/mojom/mojom:tests",
+ ]
data_deps = [
"//testing:test_scripts_shared",
"//third_party/catapult/third_party/typ/",
diff --git a/utils/ipc/mojo/public/tools/bindings/BUILD.gn b/utils/codegen/ipc/mojo/public/tools/bindings/BUILD.gn
index 3e242532..eeca73ea 100644
--- a/utils/ipc/mojo/public/tools/bindings/BUILD.gn
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/BUILD.gn
@@ -1,24 +1,27 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
+# Copyright 2016 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import("//build/config/python.gni")
import("//mojo/public/tools/bindings/mojom.gni")
import("//third_party/jinja2/jinja2.gni")
-# TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
-python2_action("precompile_templates") {
+action("precompile_templates") {
sources = mojom_generator_sources
sources += [
+ "$mojom_generator_root/generators/cpp_templates/cpp_macros.tmpl",
"$mojom_generator_root/generators/cpp_templates/enum_macros.tmpl",
"$mojom_generator_root/generators/cpp_templates/enum_serialization_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/feature_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/feature_definition.tmpl",
"$mojom_generator_root/generators/cpp_templates/interface_declaration.tmpl",
"$mojom_generator_root/generators/cpp_templates/interface_definition.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/interface_feature_declaration.tmpl",
"$mojom_generator_root/generators/cpp_templates/interface_macros.tmpl",
"$mojom_generator_root/generators/cpp_templates/interface_proxy_declaration.tmpl",
"$mojom_generator_root/generators/cpp_templates/interface_request_validator_declaration.tmpl",
"$mojom_generator_root/generators/cpp_templates/interface_response_validator_declaration.tmpl",
"$mojom_generator_root/generators/cpp_templates/interface_stub_declaration.tmpl",
+ "$mojom_generator_root/generators/cpp_templates/module-features.h.tmpl",
"$mojom_generator_root/generators/cpp_templates/module-forward.h.tmpl",
"$mojom_generator_root/generators/cpp_templates/module-import-headers.h.tmpl",
"$mojom_generator_root/generators/cpp_templates/module-params-data.h.tmpl",
@@ -26,7 +29,6 @@ python2_action("precompile_templates") {
"$mojom_generator_root/generators/cpp_templates/module-shared-message-ids.h.tmpl",
"$mojom_generator_root/generators/cpp_templates/module-shared.cc.tmpl",
"$mojom_generator_root/generators/cpp_templates/module-shared.h.tmpl",
- "$mojom_generator_root/generators/cpp_templates/module-test-utils.cc.tmpl",
"$mojom_generator_root/generators/cpp_templates/module-test-utils.h.tmpl",
"$mojom_generator_root/generators/cpp_templates/module.cc.tmpl",
"$mojom_generator_root/generators/cpp_templates/module.h.tmpl",
@@ -65,9 +67,6 @@ python2_action("precompile_templates") {
"$mojom_generator_root/generators/java_templates/struct.java.tmpl",
"$mojom_generator_root/generators/java_templates/union.java.tmpl",
"$mojom_generator_root/generators/js_templates/enum_definition.tmpl",
- "$mojom_generator_root/generators/js_templates/externs/interface_definition.tmpl",
- "$mojom_generator_root/generators/js_templates/externs/module.externs.tmpl",
- "$mojom_generator_root/generators/js_templates/externs/struct_definition.tmpl",
"$mojom_generator_root/generators/js_templates/fuzzing.tmpl",
"$mojom_generator_root/generators/js_templates/interface_definition.tmpl",
"$mojom_generator_root/generators/js_templates/lite/enum_definition.tmpl",
@@ -93,8 +92,11 @@ python2_action("precompile_templates") {
"$mojom_generator_root/generators/mojolpm_templates/mojolpm_macros.tmpl",
"$mojom_generator_root/generators/mojolpm_templates/mojolpm_to_proto_macros.tmpl",
"$mojom_generator_root/generators/mojolpm_templates/mojolpm_traits_specialization_macros.tmpl",
+ "$mojom_generator_root/generators/ts_templates/enum_definition.tmpl",
+ "$mojom_generator_root/generators/ts_templates/interface_definition.tmpl",
"$mojom_generator_root/generators/ts_templates/module_definition.tmpl",
- "$mojom_generator_root/generators/ts_templates/mojom.tmpl",
+ "$mojom_generator_root/generators/ts_templates/struct_definition.tmpl",
+ "$mojom_generator_root/generators/ts_templates/union_definition.tmpl",
]
script = mojom_generator_script
@@ -102,8 +104,8 @@ python2_action("precompile_templates") {
outputs = [
"$target_gen_dir/cpp_templates.zip",
"$target_gen_dir/java_templates.zip",
- "$target_gen_dir/mojolpm_templates.zip",
"$target_gen_dir/js_templates.zip",
+ "$target_gen_dir/mojolpm_templates.zip",
"$target_gen_dir/ts_templates.zip",
]
args = [
@@ -113,3 +115,17 @@ python2_action("precompile_templates") {
"precompile",
]
}
+
+group("tests") {
+ data = [
+ mojom_generator_script,
+ "checks/mojom_attributes_check_unittest.py",
+ "checks/mojom_interface_feature_check_unittest.py",
+ "checks/mojom_restrictions_checks_unittest.py",
+ "mojom_bindings_generator_unittest.py",
+ "//tools/diagnosis/crbug_1001171.py",
+ "//third_party/markupsafe/",
+ ]
+ data += mojom_generator_sources
+ data += jinja2_sources
+}
diff --git a/utils/ipc/mojo/public/tools/bindings/README.md b/utils/codegen/ipc/mojo/public/tools/bindings/README.md
index 43882450..b27b2d01 100644
--- a/utils/ipc/mojo/public/tools/bindings/README.md
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/README.md
@@ -96,7 +96,7 @@ for message parameters.
| `string` | UTF-8 encoded string.
| `array<T>` | Array of any Mojom type *T*; for example, `array<uint8>` or `array<array<string>>`.
| `array<T, N>` | Fixed-length array of any Mojom type *T*. The parameter *N* must be an integral constant.
-| `map<S, T>` | Associated array maping values of type *S* to values of type *T*. *S* may be a `string`, `enum`, or numeric type.
+| `map<S, T>` | Associated array mapping values of type *S* to values of type *T*. *S* may be a `string`, `enum`, or numeric type.
| `handle` | Generic Mojo handle. May be any type of handle, including a wrapped native platform handle.
| `handle<message_pipe>` | Generic message pipe handle.
| `handle<shared_buffer>` | Shared buffer handle.
@@ -188,8 +188,8 @@ struct StringPair {
};
enum AnEnum {
- YES,
- NO
+ kYes,
+ kNo
};
interface SampleInterface {
@@ -209,7 +209,7 @@ struct AllTheThings {
uint64 unsigned_64bit_value;
float float_value_32bit;
double float_value_64bit;
- AnEnum enum_value = AnEnum.YES;
+ AnEnum enum_value = AnEnum.kYes;
// Strings may be nullable.
string? maybe_a_string_maybe_not;
@@ -300,14 +300,14 @@ within a module or nested within the namespace of some struct or interface:
module business.mojom;
enum Department {
- SALES = 0,
- DEV,
+ kSales = 0,
+ kDev,
};
struct Employee {
enum Type {
- FULL_TIME,
- PART_TIME,
+ kFullTime,
+ kPartTime,
};
Type type;
@@ -315,6 +315,9 @@ struct Employee {
};
```
+C++ constant-style enum value names are preferred as specified in the
+[Google C++ Style Guide](https://google.github.io/styleguide/cppguide.html#Enumerator_Names).
+
Similar to C-style enums, individual values may be explicitly assigned within an
enum definition. By default, values are based at zero and increment by
1 sequentially.
@@ -336,8 +339,8 @@ struct Employee {
const uint64 kInvalidId = 0;
enum Type {
- FULL_TIME,
- PART_TIME,
+ kFullTime,
+ kPartTime,
};
uint64 id = kInvalidId;
@@ -348,6 +351,37 @@ struct Employee {
The effect of nested definitions on generated bindings varies depending on the
target language. See [documentation for individual target languages](#Generated-Code-For-Target-Languages).
+### Features
+
+Features can be declared with a `name` and `default_state` and can be attached
+in mojo to interfaces or methods using the `RuntimeFeature` attribute. If the
+feature is disabled at runtime, the method will crash and the interface will
+refuse to be bound / instantiated. Features cannot be serialized to be sent over
+IPC at this time.
+
+```
+module experimental.mojom;
+
+feature kUseElevators {
+ const string name = "UseElevators";
+ const bool default_state = false;
+}
+
+[RuntimeFeature=kUseElevators]
+interface Elevator {
+ // This interface cannot be bound or called if the feature is disabled.
+}
+
+interface Building {
+ // This method cannot be called if the feature is disabled.
+ [RuntimeFeature=kUseElevators]
+ CallElevator(int floor);
+
+ // This method can be called.
+ RingDoorbell(int volume);
+}
+```
+
### Interfaces
An **interface** is a logical bundle of parameterized request messages. Each
@@ -396,20 +430,33 @@ interesting attributes supported today.
extreme caution, because it can lead to deadlocks otherwise.
* **`[Default]`**:
- The `Default` attribute may be used to specify an enumerator value that
- will be used if an `Extensible` enumeration does not deserialize to a known
- value on the receiver side, i.e. the sender is using a newer version of the
- enum. This allows unknown values to be mapped to a well-defined value that can
- be appropriately handled.
+ The `Default` attribute may be used to specify an enumerator value or union
+ field that will be used if an `Extensible` enumeration or union does not
+ deserialize to a known value on the receiver side, i.e. the sender is using a
+ newer version of the enum or union. This allows unknown values to be mapped to
+ a well-defined value that can be appropriately handled.
+
+ Note: The `Default` field for a union must be of nullable or integral type.
+ When a union is defaulted to this field, the field takes on the default value
+ for its type: null for nullable types, and zero/false for integral types.
* **`[Extensible]`**:
- The `Extensible` attribute may be specified for any enum definition. This
- essentially disables builtin range validation when receiving values of the
- enum type in a message, allowing older bindings to tolerate unrecognized
- values from newer versions of the enum.
+ The `Extensible` attribute may be specified for any enum or union definition.
+ For enums, this essentially disables builtin range validation when receiving
+ values of the enum type in a message, allowing older bindings to tolerate
+ unrecognized values from newer versions of the enum.
- Note: in the future, an `Extensible` enumeration will require that a `Default`
- enumerator value also be specified.
+ If an enum value within an extensible enum definition is affixed with the
+ `Default` attribute, out-of-range values for the enum will deserialize to that
+ default value. Only one enum value may be designated as the `Default`.
+
+ Similarly, a union marked `Extensible` will deserialize to its `Default` field
+ when an unrecognized field is received. Extensible unions MUST specify exactly
+ one `Default` field, and the field must be of nullable or integral type. When
+ defaulted to this field, the value is always null/zero/false as appropriate.
+
+ An `Extensible` enumeration REQUIRES that a `Default` value be specified,
+ so all new extensible enums should specify one.
* **`[Native]`**:
The `Native` attribute may be specified for an empty struct declaration to
@@ -422,7 +469,10 @@ interesting attributes supported today.
* **`[MinVersion=N]`**:
The `MinVersion` attribute is used to specify the version at which a given
field, enum value, interface method, or method parameter was introduced.
- See [Versioning](#Versioning) for more details.
+ See [Versioning](#Versioning) for more details. `MinVersion` does not apply
+ to interfaces, structs or enums, but to the fields of those types.
+ `MinVersion` is not a module-global value, but it is ok to pretend it is by
+ skipping versions when adding fields or parameters.
* **`[Stable]`**:
The `Stable` attribute specifies that a given mojom type or interface
@@ -442,13 +492,73 @@ interesting attributes supported today.
string representation as specified by RFC 4122. New UUIDs can be generated
with common tools such as `uuidgen`.
+* **`[RuntimeFeature=feature]`**
+ The `RuntimeFeature` attribute should reference a mojo `feature`. If this
+ feature is enabled (e.g. using `--enable-features={feature.name}`) then the
+ interface behaves entirely as expected. If the feature is not enabled the
+ interface cannot be bound to a concrete receiver or remote - attempting to do
+ so will result in the receiver or remote being reset() to an unbound state.
+ Note that this is a different concept to the build-time `EnableIf` directive.
+ `RuntimeFeature` is currently only supported for C++ bindings and has no
+ effect for, say, Java or TypeScript bindings (see https://crbug.com/1278253).
+
* **`[EnableIf=value]`**:
The `EnableIf` attribute is used to conditionally enable definitions when the
mojom is parsed. If the `mojom` target in the GN file does not include the
matching `value` in the list of `enabled_features`, the definition will be
disabled. This is useful for mojom definitions that only make sense on one
platform. Note that the `EnableIf` attribute can only be set once per
- definition.
+ definition and cannot be set at the same time as `EnableIfNot`. Also be aware
+ that only one condition can be tested, `EnableIf=value,xyz` introduces a new
+ `xyz` attribute. `xyz` is not part of the `EnableIf` condition that depends
+ only on the feature `value`. Complex conditions can be introduced via
+ enabled_features in `build.gn` files.
+
+* **`[EnableIfNot=value]`**:
+ The `EnableIfNot` attribute is used to conditionally enable definitions when
+ the mojom is parsed. If the `mojom` target in the GN file includes the
+ matching `value` in the list of `enabled_features`, the definition will be
+ disabled. This is useful for mojom definitions that only make sense on all but
+ one platform. Note that the `EnableIfNot` attribute can only be set once per
+ definition and cannot be set at the same time as `EnableIf`.
+
+* **`[ServiceSandbox=value]`**:
+ The `ServiceSandbox` attribute is used in Chromium to tag which sandbox a
+ service hosting an implementation of interface will be launched in. This only
+ applies to `C++` bindings. `value` should match a constant defined in an
+ imported `sandbox.mojom.Sandbox` enum (for Chromium this is
+ `//sandbox/policy/mojom/sandbox.mojom`), such as `kService`.
+
+* **`[RequireContext=enum]`**:
+ The `RequireContext` attribute is used in Chromium to tag interfaces that
+ should be passed (as remotes or receivers) only to privileged process
+ contexts. The process context must be an enum that is imported into the
+ mojom that defines the tagged interface. `RequireContext` may be used in
+ future to DCHECK or CHECK if remotes are made available in contexts that
+ conflict with the one provided in the interface definition. Process contexts
+ are not the same as the sandbox a process is running in, but will reflect
+ the set of capabilities provided to the service.
+
+* **`[AllowedContext=enum]`**:
+ The `AllowedContext` attribute is used in Chromium to tag methods that pass
+ remotes or receivers of interfaces that are marked with a `RequireContext`
+ attribute. The enum provided on the method must be equal or better (lower
+ numerically) than the one required on the interface being passed. At present
+ failing to specify an adequate `AllowedContext` value will cause mojom
+ generation to fail at compile time. In future DCHECKs or CHECKs might be
+ added to enforce that method is only called from a process context that meets
+ the given `AllowedContext` value. The enum must of the same type as that
+ specified in the interface's `RequireContext` attribute. Adding an
+ `AllowedContext` attribute to a method is a strong indication that you need
+ a detailed security review of your design - please reach out to the security
+ team.
+
+* **`[SupportsUrgent]`**:
+ The `SupportsUrgent` attribute is used in conjunction with
+ `mojo::UrgentMessageScope` in Chromium to tag messages as having high
+ priority. The IPC layer notifies the underlying scheduler upon both receiving
+ and processing an urgent message. At present, this attribute only affects
+ channel associated messages in the renderer process.
## Generated Code For Target Languages
@@ -495,9 +605,9 @@ values. For example if a Mojom declares the enum:
``` cpp
enum AdvancedBoolean {
- TRUE = 0,
- FALSE = 1,
- FILE_NOT_FOUND = 2,
+ kTrue = 0,
+ kFalse = 1,
+ kFileNotFound = 2,
};
```
@@ -550,10 +660,16 @@ See the documentation for
*** note
**NOTE:** You don't need to worry about versioning if you don't care about
-backwards compatibility. Specifically, all parts of Chrome are updated
-atomically today and there is not yet any possibility of any two Chrome
-processes communicating with two different versions of any given Mojom
-interface.
+backwards compatibility. Today, all parts of the Chrome browser are
+updated atomically and there is not yet any possibility of any two
+Chrome processes communicating with two different versions of any given Mojom
+interface. On Chrome OS, there are several places where versioning is required.
+For example,
+[ARC++](https://developer.android.com/chrome-os/intro)
+uses versioned mojo to send IPC to the Android container.
+Likewise, the
+[Lacros](/docs/lacros.md)
+browser uses versioned mojo to talk to the ash system UI.
***
Services extend their interfaces to support new features over time, and clients
@@ -593,8 +709,8 @@ struct Employee {
*** note
**NOTE:** Mojo object or handle types added with a `MinVersion` **MUST** be
-optional (nullable). See [Primitive Types](#Primitive-Types) for details on
-nullable values.
+optional (nullable) or primitive. See [Primitive Types](#Primitive-Types) for
+details on nullable values.
***
By default, fields belong to version 0. New fields must be appended to the
@@ -624,10 +740,10 @@ the following hard constraints:
* For any given struct or interface, if any field or method explicitly specifies
an ordinal value, all fields or methods must explicitly specify an ordinal
value.
-* For an *N*-field struct or *N*-method interface, the set of explicitly
- assigned ordinal values must be limited to the range *[0, N-1]*. Interfaces
- should include placeholder methods to fill the ordinal positions of removed
- methods (for example "Unused_Message_7@7()" or "RemovedMessage@42()", etc).
+* For an *N*-field struct, the set of explicitly assigned ordinal values must be
+ limited to the range *[0, N-1]*. Structs should include placeholder fields
+ to fill the ordinal positions of removed fields (for example "Unused_Field"
+ or "RemovedField", etc).
You may reorder fields, but you must ensure that the ordinal values of existing
fields remain unchanged. For example, the following struct remains
@@ -652,6 +768,24 @@ There are two dimensions on which an interface can be extended
that the version number is scoped to the whole interface rather than to any
individual parameter list.
+``` cpp
+// Old version:
+interface HumanResourceDatabase {
+ QueryEmployee(uint64 id) => (Employee? employee);
+};
+
+// New version:
+interface HumanResourceDatabase {
+ QueryEmployee(uint64 id, [MinVersion=1] bool retrieve_finger_print)
+ => (Employee? employee,
+ [MinVersion=1] array<uint8>? finger_print);
+};
+```
+
+Similar to [versioned structs](#Versioned-Structs), when you pass the parameter
+list of a request or response method to a destination using an older version of
+an interface, unrecognized fields are silently discarded.
+
Please note that adding a response to a message which did not previously
expect a response is a not a backwards-compatible change.
@@ -664,17 +798,12 @@ For example:
``` cpp
// Old version:
interface HumanResourceDatabase {
- AddEmployee(Employee employee) => (bool success);
QueryEmployee(uint64 id) => (Employee? employee);
};
// New version:
interface HumanResourceDatabase {
- AddEmployee(Employee employee) => (bool success);
-
- QueryEmployee(uint64 id, [MinVersion=1] bool retrieve_finger_print)
- => (Employee? employee,
- [MinVersion=1] array<uint8>? finger_print);
+ QueryEmployee(uint64 id) => (Employee? employee);
[MinVersion=1]
AttachFingerPrint(uint64 id, array<uint8> finger_print)
@@ -682,10 +811,7 @@ interface HumanResourceDatabase {
};
```
-Similar to [versioned structs](#Versioned-Structs), when you pass the parameter
-list of a request or response method to a destination using an older version of
-an interface, unrecognized fields are silently discarded. However, if the method
-call itself is not recognized, it is considered a validation error and the
+If a method call is not recognized, it is considered a validation error and the
receiver will close its end of the interface pipe. For example, if a client on
version 1 of the above interface sends an `AttachFingerPrint` request to an
implementation of version 0, the client will be disconnected.
@@ -712,8 +838,8 @@ If you want an enum to be extensible in the future, you can apply the
``` cpp
[Extensible]
enum Department {
- SALES,
- DEV,
+ kSales,
+ kDev,
};
```
@@ -722,9 +848,9 @@ And later you can extend this enum without breaking backwards compatibility:
``` cpp
[Extensible]
enum Department {
- SALES,
- DEV,
- [MinVersion=1] RESEARCH,
+ kSales,
+ kDev,
+ [MinVersion=1] kResearch,
};
```
@@ -782,7 +908,7 @@ Statement = ModuleStatement | ImportStatement | Definition
ModuleStatement = AttributeSection "module" Identifier ";"
ImportStatement = "import" StringLiteral ";"
-Definition = Struct Union Interface Enum Const
+Definition = Struct Union Interface Enum Feature Const
AttributeSection = <empty> | "[" AttributeList "]"
AttributeList = <empty> | NonEmptyAttributeList
@@ -809,7 +935,7 @@ InterfaceBody = <empty>
| InterfaceBody Const
| InterfaceBody Enum
| InterfaceBody Method
-Method = AttributeSection Name Ordinal "(" ParamterList ")" Response ";"
+Method = AttributeSection Name Ordinal "(" ParameterList ")" Response ";"
ParameterList = <empty> | NonEmptyParameterList
NonEmptyParameterList = Parameter
| Parameter "," NonEmptyParameterList
@@ -847,6 +973,13 @@ EnumValue = AttributeSection Name
| AttributeSection Name "=" Integer
| AttributeSection Name "=" Identifier
+; Note: `feature` is a weak keyword and can appear as, say, a struct field name.
+Feature = AttributeSection "feature" Name "{" FeatureBody "}" ";"
+ | AttributeSection "feature" Name ";"
+FeatureBody = <empty>
+ | FeatureBody FeatureField
+FeatureField = AttributeSection TypeSpec Name Default ";"
+
Const = "const" TypeSpec Name "=" Constant ";"
Constant = Literal | Identifier ";"
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/__init__.py b/utils/codegen/ipc/mojo/public/tools/bindings/checks/__init__.py
index e69de29b..e69de29b 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/__init__.py
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/checks/__init__.py
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check.py b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check.py
new file mode 100644
index 00000000..e6e4f2c9
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check.py
@@ -0,0 +1,170 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Validate mojo attributes are allowed in Chrome before generation."""
+
+import mojom.generate.check as check
+import mojom.generate.module as module
+
+_COMMON_ATTRIBUTES = {
+ 'EnableIf',
+ 'EnableIfNot',
+}
+
+# For struct, union & parameter lists.
+_COMMON_FIELD_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'MinVersion',
+ 'RenamedFrom',
+}
+
+# Note: `Default`` goes on the default _value_, not on the enum.
+# Note: [Stable] without [Extensible] is not allowed.
+_ENUM_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'Extensible',
+ 'Native',
+ 'Stable',
+ 'RenamedFrom',
+ 'Uuid',
+}
+
+# TODO(crbug.com/1234883) MinVersion is not needed for EnumVal.
+_ENUMVAL_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'Default',
+ 'MinVersion',
+}
+
+_INTERFACE_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'RenamedFrom',
+ 'RequireContext',
+ 'RuntimeFeature',
+ 'ServiceSandbox',
+ 'Stable',
+ 'Uuid',
+}
+
+_METHOD_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'AllowedContext',
+ 'MinVersion',
+ 'NoInterrupt',
+ 'RuntimeFeature',
+ 'SupportsUrgent',
+ 'Sync',
+ 'UnlimitedSize',
+}
+
+_MODULE_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'JavaConstantsClassName',
+ 'JavaPackage',
+}
+
+_PARAMETER_ATTRIBUTES = _COMMON_FIELD_ATTRIBUTES
+
+_STRUCT_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'CustomSerializer',
+ 'JavaClassName',
+ 'Native',
+ 'Stable',
+ 'RenamedFrom',
+ 'Uuid',
+}
+
+_STRUCT_FIELD_ATTRIBUTES = _COMMON_FIELD_ATTRIBUTES
+
+_UNION_ATTRIBUTES = _COMMON_ATTRIBUTES | {
+ 'Extensible',
+ 'Stable',
+ 'RenamedFrom',
+ 'Uuid',
+}
+
+_UNION_FIELD_ATTRIBUTES = _COMMON_FIELD_ATTRIBUTES | {
+ 'Default',
+}
+
+# TODO(https://crbug.com/1193875) empty this set and remove the allowlist.
+_STABLE_ONLY_ALLOWLISTED_ENUMS = {
+ 'crosapi.mojom.OptionalBool',
+ 'crosapi.mojom.TriState',
+}
+
+
+class Check(check.Check):
+ def __init__(self, *args, **kwargs):
+ super(Check, self).__init__(*args, **kwargs)
+
+ def _Respell(self, allowed, attribute):
+ for a in allowed:
+ if a.lower() == attribute.lower():
+ return f" - Did you mean: {a}?"
+ return ""
+
+ def _CheckAttributes(self, context, allowed, attributes):
+ if not attributes:
+ return
+ for attribute in attributes:
+ if not attribute in allowed:
+ # Is there a close misspelling?
+ hint = self._Respell(allowed, attribute)
+ raise check.CheckException(
+ self.module,
+ f"attribute {attribute} not allowed on {context}{hint}")
+
+ def _CheckEnumAttributes(self, enum):
+ if enum.attributes:
+ self._CheckAttributes("enum", _ENUM_ATTRIBUTES, enum.attributes)
+ if 'Stable' in enum.attributes and not 'Extensible' in enum.attributes:
+ full_name = f"{self.module.mojom_namespace}.{enum.mojom_name}"
+ if full_name not in _STABLE_ONLY_ALLOWLISTED_ENUMS:
+ raise check.CheckException(
+ self.module,
+ f"[Extensible] required on [Stable] enum {full_name}")
+ for enumval in enum.fields:
+ self._CheckAttributes("enum value", _ENUMVAL_ATTRIBUTES,
+ enumval.attributes)
+
+ def _CheckInterfaceAttributes(self, interface):
+ self._CheckAttributes("interface", _INTERFACE_ATTRIBUTES,
+ interface.attributes)
+ for method in interface.methods:
+ self._CheckAttributes("method", _METHOD_ATTRIBUTES, method.attributes)
+ for param in method.parameters:
+ self._CheckAttributes("parameter", _PARAMETER_ATTRIBUTES,
+ param.attributes)
+ if method.response_parameters:
+ for param in method.response_parameters:
+ self._CheckAttributes("parameter", _PARAMETER_ATTRIBUTES,
+ param.attributes)
+ for enum in interface.enums:
+ self._CheckEnumAttributes(enum)
+
+ def _CheckModuleAttributes(self):
+ self._CheckAttributes("module", _MODULE_ATTRIBUTES, self.module.attributes)
+
+ def _CheckStructAttributes(self, struct):
+ self._CheckAttributes("struct", _STRUCT_ATTRIBUTES, struct.attributes)
+ for field in struct.fields:
+ self._CheckAttributes("struct field", _STRUCT_FIELD_ATTRIBUTES,
+ field.attributes)
+ for enum in struct.enums:
+ self._CheckEnumAttributes(enum)
+
+ def _CheckUnionAttributes(self, union):
+ self._CheckAttributes("union", _UNION_ATTRIBUTES, union.attributes)
+ for field in union.fields:
+ self._CheckAttributes("union field", _UNION_FIELD_ATTRIBUTES,
+ field.attributes)
+
+ def CheckModule(self):
+ """Note that duplicate attributes are forbidden at the parse phase.
+ We also do not need to look at the types of any parameters, as they will be
+ checked where they are defined. Consts do not have attributes so can be
+ skipped."""
+ self._CheckModuleAttributes()
+ for interface in self.module.interfaces:
+ self._CheckInterfaceAttributes(interface)
+ for enum in self.module.enums:
+ self._CheckEnumAttributes(enum)
+ for struct in self.module.structs:
+ self._CheckStructAttributes(struct)
+ for union in self.module.unions:
+ self._CheckUnionAttributes(union)
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check_unittest.py b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check_unittest.py
new file mode 100644
index 00000000..f1a50a4a
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check_unittest.py
@@ -0,0 +1,194 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import mojom.generate.check as check
+from mojom_bindings_generator import LoadChecks, _Generate
+from mojom_parser_test_case import MojomParserTestCase
+
+
+class FakeArgs:
+ """Fakes args to _Generate - intention is to do just enough to run checks"""
+
+ def __init__(self, tester, files=None):
+ """ `tester` is MojomParserTestCase for paths.
+ `files` will have tester path added."""
+ self.checks_string = 'attributes'
+ self.depth = tester.GetPath('')
+ self.filelist = None
+ self.filename = [tester.GetPath(x) for x in files]
+ self.gen_directories = tester.GetPath('gen')
+ self.generators_string = ''
+ self.import_directories = []
+ self.output_dir = tester.GetPath('out')
+ self.scrambled_message_id_salt_paths = None
+ self.typemaps = []
+ self.variant = 'none'
+
+
+class MojoBindingsCheckTest(MojomParserTestCase):
+ def _ParseAndGenerate(self, mojoms):
+ self.ParseMojoms(mojoms)
+ args = FakeArgs(self, files=mojoms)
+ _Generate(args, {})
+
+ def _testValid(self, filename, content):
+ self.WriteFile(filename, content)
+ self._ParseAndGenerate([filename])
+
+ def _testThrows(self, filename, content, regexp):
+ mojoms = []
+ self.WriteFile(filename, content)
+ mojoms.append(filename)
+ with self.assertRaisesRegexp(check.CheckException, regexp):
+ self._ParseAndGenerate(mojoms)
+
+ def testLoads(self):
+ """Validate that the check is registered under the expected name."""
+ check_modules = LoadChecks('attributes')
+ self.assertTrue(check_modules['attributes'])
+
+ def testNoAnnotations(self):
+ # Undecorated mojom should be fine.
+ self._testValid(
+ "a.mojom", """
+ module a;
+ struct Bar { int32 a; };
+ enum Hello { kValue };
+ union Thingy { Bar b; Hello hi; };
+ interface Foo {
+ Foo(int32 a, Hello hi, Thingy t) => (Bar b);
+ };
+ """)
+
+ def testValidAnnotations(self):
+ # Obviously this is meaningless and won't generate, but it should pass
+ # the attribute check's validation.
+ self._testValid(
+ "a.mojom", """
+ [JavaConstantsClassName="FakeClass",JavaPackage="org.chromium.Fake"]
+ module a;
+ [Stable, Extensible]
+ enum Hello { [Default] kValue, kValue2, [MinVersion=2] kValue3 };
+ [Native]
+ enum NativeEnum {};
+ [Stable,Extensible]
+ union Thingy { Bar b; [Default]int32 c; Hello hi; };
+
+ [Stable,RenamedFrom="module.other.Foo",
+ Uuid="4C178401-4B07-4C2E-9255-5401A943D0C7"]
+ struct Structure { Hello hi; };
+
+ [ServiceSandbox=Hello.kValue,RequireContext=Hello.kValue,Stable,
+ Uuid="2F17D7DD-865A-4B1C-9394-9C94E035E82F"]
+ interface Foo {
+ [AllowedContext=Hello.kValue]
+ Foo@0(int32 a) => (int32 b);
+ [MinVersion=2,Sync,UnlimitedSize,NoInterrupt]
+ Bar@1(int32 b, [MinVersion=2]Structure? s) => (bool c);
+ };
+
+ [RuntimeFeature=test.mojom.FeatureName]
+ interface FooFeatureControlled {};
+
+ interface FooMethodFeatureControlled {
+ [RuntimeFeature=test.mojom.FeatureName]
+ MethodWithFeature() => (bool c);
+ };
+ """)
+
+ def testWrongModuleStable(self):
+ contents = """
+ // err: module cannot be Stable
+ [Stable]
+ module a;
+ enum Hello { kValue, kValue2, kValue3 };
+ enum NativeEnum {};
+ struct Structure { Hello hi; };
+
+ interface Foo {
+ Foo(int32 a) => (int32 b);
+ Bar(int32 b, Structure? s) => (bool c);
+ };
+ """
+ self._testThrows('b.mojom', contents,
+ 'attribute Stable not allowed on module')
+
+ def testWrongEnumDefault(self):
+ contents = """
+ module a;
+ // err: default should go on EnumValue not Enum.
+ [Default=kValue]
+ enum Hello { kValue, kValue2, kValue3 };
+ enum NativeEnum {};
+ struct Structure { Hello hi; };
+
+ interface Foo {
+ Foo(int32 a) => (int32 b);
+ Bar(int32 b, Structure? s) => (bool c);
+ };
+ """
+ self._testThrows('b.mojom', contents,
+ 'attribute Default not allowed on enum')
+
+ def testWrongStructMinVersion(self):
+ contents = """
+ module a;
+ enum Hello { kValue, kValue2, kValue3 };
+ enum NativeEnum {};
+ // err: struct cannot have MinVersion.
+ [MinVersion=2]
+ struct Structure { Hello hi; };
+
+ interface Foo {
+ Foo(int32 a) => (int32 b);
+ Bar(int32 b, Structure? s) => (bool c);
+ };
+ """
+ self._testThrows('b.mojom', contents,
+ 'attribute MinVersion not allowed on struct')
+
+ def testWrongMethodRequireContext(self):
+ contents = """
+ module a;
+ enum Hello { kValue, kValue2, kValue3 };
+ enum NativeEnum {};
+ struct Structure { Hello hi; };
+
+ interface Foo {
+ // err: RequireContext is for interfaces.
+ [RequireContext=Hello.kValue]
+ Foo(int32 a) => (int32 b);
+ Bar(int32 b, Structure? s) => (bool c);
+ };
+ """
+ self._testThrows('b.mojom', contents,
+ 'RequireContext not allowed on method')
+
+ def testWrongMethodRequireContext(self):
+ # crbug.com/1230122
+ contents = """
+ module a;
+ interface Foo {
+ // err: sync not Sync.
+ [sync]
+ Foo(int32 a) => (int32 b);
+ };
+ """
+ self._testThrows('b.mojom', contents,
+ 'attribute sync not allowed.*Did you mean: Sync')
+
+ def testStableExtensibleEnum(self):
+ # crbug.com/1193875
+ contents = """
+ module a;
+ [Stable]
+ enum Foo {
+ kDefaultVal,
+ kOtherVal = 2,
+ };
+ """
+ self._testThrows('a.mojom', contents,
+ 'Extensible.*?required.*?Stable.*?enum')
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_definitions_check.py b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_definitions_check.py
new file mode 100644
index 00000000..702d41c3
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_definitions_check.py
@@ -0,0 +1,34 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Ensure no duplicate type definitions before generation."""
+
+import mojom.generate.check as check
+import mojom.generate.module as module
+
+
+class Check(check.Check):
+ def __init__(self, *args, **kwargs):
+ super(Check, self).__init__(*args, **kwargs)
+
+ def CheckModule(self):
+ kinds = dict()
+ for module in self.module.imports:
+ for kind in module.enums + module.structs + module.unions:
+ kind_name = f'{kind.module.mojom_namespace}.{kind.mojom_name}'
+ if kind_name in kinds:
+ previous_module = kinds[kind_name]
+ if previous_module.path != module.path:
+ raise check.CheckException(
+ self.module, f"multiple-definition for type {kind_name}" +
+ f"(defined in both {previous_module} and {module})")
+ kinds[kind_name] = kind.module
+
+ for kind in self.module.enums + self.module.structs + self.module.unions:
+ kind_name = f'{kind.module.mojom_namespace}.{kind.mojom_name}'
+ if kind_name in kinds:
+ previous_module = kinds[kind_name]
+ raise check.CheckException(
+ self.module, f"multiple-definition for type {kind_name}" +
+ f"(previous definition in {previous_module})")
+ return True
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check.py b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check.py
new file mode 100644
index 00000000..07f51a64
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check.py
@@ -0,0 +1,62 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Validate mojo runtime feature guarded interfaces are nullable."""
+
+import mojom.generate.check as check
+import mojom.generate.module as module
+
+
+class Check(check.Check):
+ def __init__(self, *args, **kwargs):
+ super(Check, self).__init__(*args, **kwargs)
+
+ # `param` is an Interface of some sort.
+ def _CheckNonNullableFeatureGuardedInterface(self, kind):
+ # Only need to validate interface if it has a RuntimeFeature
+ if not kind.kind.runtime_feature:
+ return
+ # Nullable (optional) is ok as the interface expects they might not be sent.
+ if kind.is_nullable:
+ return
+ interface = kind.kind.mojom_name
+ raise check.CheckException(
+ self.module,
+ f"interface {interface} has a RuntimeFeature but is not nullable")
+
+ # `param` can be a lot of things so check if it is a remote/receiver.
+ # Array/Map must be recursed into.
+ def _CheckFieldOrParam(self, kind):
+ if module.IsAnyInterfaceKind(kind):
+ self._CheckNonNullableFeatureGuardedInterface(kind)
+ if module.IsArrayKind(kind):
+ self._CheckFieldOrParam(kind.kind)
+ if module.IsMapKind(kind):
+ self._CheckFieldOrParam(kind.key_kind)
+ self._CheckFieldOrParam(kind.value_kind)
+
+ def _CheckInterfaceFeatures(self, interface):
+ for method in interface.methods:
+ for param in method.parameters:
+ self._CheckFieldOrParam(param.kind)
+ if method.response_parameters:
+ for param in method.response_parameters:
+ self._CheckFieldOrParam(param.kind)
+
+ def _CheckStructFeatures(self, struct):
+ for field in struct.fields:
+ self._CheckFieldOrParam(field.kind)
+
+ def _CheckUnionFeatures(self, union):
+ for field in union.fields:
+ self._CheckFieldOrParam(field.kind)
+
+ def CheckModule(self):
+ """Validate that any runtime feature guarded interfaces that might be passed
+ over mojo are nullable."""
+ for interface in self.module.interfaces:
+ self._CheckInterfaceFeatures(interface)
+ for struct in self.module.structs:
+ self._CheckStructFeatures(struct)
+ for union in self.module.unions:
+ self._CheckUnionFeatures(union)
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check_unittest.py b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check_unittest.py
new file mode 100644
index 00000000..e96152fd
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_interface_feature_check_unittest.py
@@ -0,0 +1,173 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import mojom.generate.check as check
+from mojom_bindings_generator import LoadChecks, _Generate
+from mojom_parser_test_case import MojomParserTestCase
+
+
+class FakeArgs:
+ """Fakes args to _Generate - intention is to do just enough to run checks"""
+ def __init__(self, tester, files=None):
+ """ `tester` is MojomParserTestCase for paths.
+ `files` will have tester path added."""
+ self.checks_string = 'features'
+ self.depth = tester.GetPath('')
+ self.filelist = None
+ self.filename = [tester.GetPath(x) for x in files]
+ self.gen_directories = tester.GetPath('gen')
+ self.generators_string = ''
+ self.import_directories = []
+ self.output_dir = tester.GetPath('out')
+ self.scrambled_message_id_salt_paths = None
+ self.typemaps = []
+ self.variant = 'none'
+
+
+class MojoBindingsCheckTest(MojomParserTestCase):
+ def _ParseAndGenerate(self, mojoms):
+ self.ParseMojoms(mojoms)
+ args = FakeArgs(self, files=mojoms)
+ _Generate(args, {})
+
+ def assertValid(self, filename, content):
+ self.WriteFile(filename, content)
+ self._ParseAndGenerate([filename])
+
+ def assertThrows(self, filename, content, regexp):
+ mojoms = []
+ self.WriteFile(filename, content)
+ mojoms.append(filename)
+ with self.assertRaisesRegexp(check.CheckException, regexp):
+ self._ParseAndGenerate(mojoms)
+
+ def testLoads(self):
+ """Validate that the check is registered under the expected name."""
+ check_modules = LoadChecks('features')
+ self.assertTrue(check_modules['features'])
+
+ def testNullableOk(self):
+ self.assertValid(
+ "a.mojom", """
+ module a;
+ // Scaffolding.
+ feature kFeature {
+ const string name = "Hello";
+ const bool enabled_state = false;
+ };
+ [RuntimeFeature=kFeature]
+ interface Guarded {
+ };
+
+ // Unguarded interfaces should be ok everywhere.
+ interface NotGuarded { };
+
+ // Optional (nullable) interfaces should be ok everywhere:
+ struct Bar {
+ pending_remote<Guarded>? remote;
+ pending_receiver<Guarded>? receiver;
+ };
+ union Thingy {
+ pending_remote<Guarded>? remote;
+ pending_receiver<Guarded>? receiver;
+ };
+ interface Foo {
+ Foo(
+ pending_remote<Guarded>? remote,
+ pending_receiver<Guarded>? receiver,
+ pending_associated_remote<Guarded>? a_remote,
+ pending_associated_receiver<Guarded>? a_receiver,
+ // Unguarded interfaces do not have to be nullable.
+ pending_remote<NotGuarded> remote,
+ pending_receiver<NotGuarded> receiver,
+ pending_associated_remote<NotGuarded> a_remote,
+ pending_associated_receiver<NotGuarded> a_receiver
+ ) => (
+ pending_remote<Guarded>? remote,
+ pending_receiver<Guarded>? receiver
+ );
+ Bar(array<pending_remote<Guarded>?> remote)
+ => (map<string, pending_receiver<Guarded>?> a);
+ };
+ """)
+
+ def testMethodParamsMustBeNullable(self):
+ prelude = """
+ module a;
+ // Scaffolding.
+ feature kFeature {
+ const string name = "Hello";
+ const bool enabled_state = false;
+ };
+ [RuntimeFeature=kFeature]
+ interface Guarded { };
+ """
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ interface Trial {
+ Method(pending_remote<Guarded> a) => ();
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ interface Trial {
+ Method(bool foo) => (pending_receiver<Guarded> a);
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ interface Trial {
+ Method(pending_receiver<Guarded> a) => ();
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ interface Trial {
+ Method(pending_associated_remote<Guarded> a) => ();
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ interface Trial {
+ Method(pending_associated_receiver<Guarded> a) => ();
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ interface Trial {
+ Method(array<pending_associated_receiver<Guarded>> a) => ();
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ interface Trial {
+ Method(map<string, pending_associated_receiver<Guarded>> a) => ();
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+
+ def testStructUnionMembersMustBeNullable(self):
+ prelude = """
+ module a;
+ // Scaffolding.
+ feature kFeature {
+ const string name = "Hello";
+ const bool enabled_state = false;
+ };
+ [RuntimeFeature=kFeature]
+ interface Guarded { };
+ """
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ struct Trial {
+ pending_remote<Guarded> a;
+ };
+ """, 'interface Guarded has a RuntimeFeature')
+ self.assertThrows(
+ 'a.mojom', prelude + """
+ union Trial {
+ pending_remote<Guarded> a;
+ };
+ """, 'interface Guarded has a RuntimeFeature')
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_check.py b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_check.py
new file mode 100644
index 00000000..d570e26c
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_check.py
@@ -0,0 +1,102 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Validate RequireContext and AllowedContext annotations before generation."""
+
+import mojom.generate.check as check
+import mojom.generate.module as module
+
+
+class Check(check.Check):
+ def __init__(self, *args, **kwargs):
+ self.kind_to_interfaces = dict()
+ super(Check, self).__init__(*args, **kwargs)
+
+ def _IsPassedInterface(self, candidate):
+ if isinstance(
+ candidate.kind,
+ (module.PendingReceiver, module.PendingRemote,
+ module.PendingAssociatedReceiver, module.PendingAssociatedRemote)):
+ return True
+ return False
+
+ def _CheckInterface(self, method, param):
+ # |param| is a pending_x<Interface> so need .kind.kind to get Interface.
+ interface = param.kind.kind
+ if interface.require_context:
+ if method.allowed_context is None:
+ raise check.CheckException(
+ self.module, "method `{}` has parameter `{}` which passes interface"
+ " `{}` that requires an AllowedContext annotation but none exists.".
+ format(
+ method.mojom_name,
+ param.mojom_name,
+ interface.mojom_name,
+ ))
+ # If a string was provided, or if an enum was not imported, this will
+ # be a string and we cannot validate that it is in range.
+ if not isinstance(method.allowed_context, module.EnumValue):
+ raise check.CheckException(
+ self.module,
+ "method `{}` has AllowedContext={} which is not a valid enum value."
+ .format(method.mojom_name, method.allowed_context))
+ # EnumValue must be from the same enum to be compared.
+ if interface.require_context.enum != method.allowed_context.enum:
+ raise check.CheckException(
+ self.module, "method `{}` has parameter `{}` which passes interface"
+ " `{}` that requires AllowedContext={} but one of kind `{}` was "
+ "provided.".format(
+ method.mojom_name,
+ param.mojom_name,
+ interface.mojom_name,
+ interface.require_context.enum,
+ method.allowed_context.enum,
+ ))
+ # RestrictContext enums have most privileged field first (lowest value).
+ interface_value = interface.require_context.field.numeric_value
+ method_value = method.allowed_context.field.numeric_value
+ if interface_value < method_value:
+ raise check.CheckException(
+ self.module, "RequireContext={} > AllowedContext={} for method "
+ "`{}` which passes interface `{}`.".format(
+ interface.require_context.GetSpec(),
+ method.allowed_context.GetSpec(), method.mojom_name,
+ interface.mojom_name))
+ return True
+
+ def _GatherReferencedInterfaces(self, field):
+ key = field.kind.spec
+ # structs/unions can nest themselves so we need to bookkeep.
+ if not key in self.kind_to_interfaces:
+ # Might reference ourselves so have to create the list first.
+ self.kind_to_interfaces[key] = set()
+ for param in field.kind.fields:
+ if self._IsPassedInterface(param):
+ self.kind_to_interfaces[key].add(param)
+ elif isinstance(param.kind, (module.Struct, module.Union)):
+ for iface in self._GatherReferencedInterfaces(param):
+ self.kind_to_interfaces[key].add(iface)
+ return self.kind_to_interfaces[key]
+
+ def _CheckParams(self, method, params):
+ # Note: we have to repeat _CheckParams for each method as each might have
+ # different AllowedContext= attributes. We cannot memoize this function,
+ # but can do so for gathering referenced interfaces as their RequireContext
+ # attributes do not change.
+ for param in params:
+ if self._IsPassedInterface(param):
+ self._CheckInterface(method, param)
+ elif isinstance(param.kind, (module.Struct, module.Union)):
+ for interface in self._GatherReferencedInterfaces(param):
+ self._CheckInterface(method, interface)
+
+ def _CheckMethod(self, method):
+ if method.parameters:
+ self._CheckParams(method, method.parameters)
+ if method.response_parameters:
+ self._CheckParams(method, method.response_parameters)
+
+ def CheckModule(self):
+ for interface in self.module.interfaces:
+ for method in interface.methods:
+ self._CheckMethod(method)
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_checks_unittest.py b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_checks_unittest.py
new file mode 100644
index 00000000..a6cd71e2
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/checks/mojom_restrictions_checks_unittest.py
@@ -0,0 +1,254 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import mojom.generate.check as check
+from mojom_bindings_generator import LoadChecks, _Generate
+from mojom_parser_test_case import MojomParserTestCase
+
+# Mojoms that we will use in multiple tests.
+basic_mojoms = {
+ 'level.mojom':
+ """
+ module level;
+ enum Level {
+ kHighest,
+ kMiddle,
+ kLowest,
+ };
+ """,
+ 'interfaces.mojom':
+ """
+ module interfaces;
+ import "level.mojom";
+ struct Foo {int32 bar;};
+ [RequireContext=level.Level.kHighest]
+ interface High {
+ DoFoo(Foo foo);
+ };
+ [RequireContext=level.Level.kMiddle]
+ interface Mid {
+ DoFoo(Foo foo);
+ };
+ [RequireContext=level.Level.kLowest]
+ interface Low {
+ DoFoo(Foo foo);
+ };
+ """
+}
+
+
+class FakeArgs:
+ """Fakes args to _Generate - intention is to do just enough to run checks"""
+
+ def __init__(self, tester, files=None):
+ """ `tester` is MojomParserTestCase for paths.
+ `files` will have tester path added."""
+ self.checks_string = 'restrictions'
+ self.depth = tester.GetPath('')
+ self.filelist = None
+ self.filename = [tester.GetPath(x) for x in files]
+ self.gen_directories = tester.GetPath('gen')
+ self.generators_string = ''
+ self.import_directories = []
+ self.output_dir = tester.GetPath('out')
+ self.scrambled_message_id_salt_paths = None
+ self.typemaps = []
+ self.variant = 'none'
+
+
+class MojoBindingsCheckTest(MojomParserTestCase):
+ def _WriteBasicMojoms(self):
+ for filename, contents in basic_mojoms.items():
+ self.WriteFile(filename, contents)
+ return list(basic_mojoms.keys())
+
+ def _ParseAndGenerate(self, mojoms):
+ self.ParseMojoms(mojoms)
+ args = FakeArgs(self, files=mojoms)
+ _Generate(args, {})
+
+ def testLoads(self):
+ """Validate that the check is registered under the expected name."""
+ check_modules = LoadChecks('restrictions')
+ self.assertTrue(check_modules['restrictions'])
+
+ def testValidAnnotations(self):
+ mojoms = self._WriteBasicMojoms()
+
+ a = 'a.mojom'
+ self.WriteFile(
+ a, """
+ module a;
+ import "level.mojom";
+ import "interfaces.mojom";
+
+ interface PassesHigh {
+ [AllowedContext=level.Level.kHighest]
+ DoHigh(pending_receiver<interfaces.High> hi);
+ };
+ interface PassesMedium {
+ [AllowedContext=level.Level.kMiddle]
+ DoMedium(pending_receiver<interfaces.Mid> hi);
+ [AllowedContext=level.Level.kMiddle]
+ DoMediumRem(pending_remote<interfaces.Mid> hi);
+ [AllowedContext=level.Level.kMiddle]
+ DoMediumAssoc(pending_associated_receiver<interfaces.Mid> hi);
+ [AllowedContext=level.Level.kMiddle]
+ DoMediumAssocRem(pending_associated_remote<interfaces.Mid> hi);
+ };
+ interface PassesLow {
+ [AllowedContext=level.Level.kLowest]
+ DoLow(pending_receiver<interfaces.Low> hi);
+ };
+
+ struct One { pending_receiver<interfaces.High> hi; };
+ struct Two { One one; };
+ interface PassesNestedHigh {
+ [AllowedContext=level.Level.kHighest]
+ DoNestedHigh(Two two);
+ };
+
+ // Allowed as PassesHigh is not itself restricted.
+ interface PassesPassesHigh {
+ DoPass(pending_receiver<PassesHigh> hiho);
+ };
+ """)
+ mojoms.append(a)
+ self._ParseAndGenerate(mojoms)
+
+ def _testThrows(self, filename, content, regexp):
+ mojoms = self._WriteBasicMojoms()
+ self.WriteFile(filename, content)
+ mojoms.append(filename)
+ with self.assertRaisesRegexp(check.CheckException, regexp):
+ self._ParseAndGenerate(mojoms)
+
+ def testMissingAnnotation(self):
+ contents = """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+
+ interface PassesHigh {
+ // err: missing annotation.
+ DoHigh(pending_receiver<interfaces.High> hi);
+ };
+ """
+ self._testThrows('b.mojom', contents, 'require.*?AllowedContext')
+
+ def testAllowTooLow(self):
+ contents = """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+
+ interface PassesHigh {
+ // err: level is worse than required.
+ [AllowedContext=level.Level.kMiddle]
+ DoHigh(pending_receiver<interfaces.High> hi);
+ };
+ """
+ self._testThrows('b.mojom', contents,
+ 'RequireContext=.*?kHighest > AllowedContext=.*?kMiddle')
+
+ def testWrongEnumInAllow(self):
+ contents = """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+ enum Blah {
+ kZero,
+ };
+ interface PassesHigh {
+ // err: different enums.
+ [AllowedContext=Blah.kZero]
+ DoHigh(pending_receiver<interfaces.High> hi);
+ };
+ """
+ self._testThrows('b.mojom', contents, 'but one of kind')
+
+ def testNotAnEnumInAllow(self):
+ contents = """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+ interface PassesHigh {
+ // err: not an enum.
+ [AllowedContext=doopdedoo.mojom.kWhatever]
+ DoHigh(pending_receiver<interfaces.High> hi);
+ };
+ """
+ self._testThrows('b.mojom', contents, 'not a valid enum value')
+
+ def testMissingAllowedForNestedStructs(self):
+ contents = """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+ struct One { pending_receiver<interfaces.High> hi; };
+ struct Two { One one; };
+ interface PassesNestedHigh {
+ // err: missing annotation.
+ DoNestedHigh(Two two);
+ };
+ """
+ self._testThrows('b.mojom', contents, 'require.*?AllowedContext')
+
+ def testMissingAllowedForNestedUnions(self):
+ contents = """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+ struct One { pending_receiver<interfaces.High> hi; };
+ struct Two { One one; };
+ union Three {One one; Two two; };
+ interface PassesNestedHigh {
+ // err: missing annotation.
+ DoNestedHigh(Three three);
+ };
+ """
+ self._testThrows('b.mojom', contents, 'require.*?AllowedContext')
+
+ def testMultipleInterfacesThrows(self):
+ contents = """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+ struct One { pending_receiver<interfaces.High> hi; };
+ interface PassesMultipleInterfaces {
+ [AllowedContext=level.Level.kMiddle]
+ DoMultiple(
+ pending_remote<interfaces.Mid> mid,
+ pending_receiver<interfaces.High> hi,
+ One one
+ );
+ };
+ """
+ self._testThrows('b.mojom', contents,
+ 'RequireContext=.*?kHighest > AllowedContext=.*?kMiddle')
+
+ def testMultipleInterfacesAllowed(self):
+ """Multiple interfaces can be passed, all satisfy the level."""
+ mojoms = self._WriteBasicMojoms()
+
+ b = "b.mojom"
+ self.WriteFile(
+ b, """
+ module b;
+ import "level.mojom";
+ import "interfaces.mojom";
+ struct One { pending_receiver<interfaces.High> hi; };
+ interface PassesMultipleInterfaces {
+ [AllowedContext=level.Level.kHighest]
+ DoMultiple(
+ pending_receiver<interfaces.High> hi,
+ pending_remote<interfaces.Mid> mid,
+ One one
+ );
+ };
+ """)
+ mojoms.append(b)
+ self._ParseAndGenerate(mojoms)
diff --git a/utils/ipc/mojo/public/tools/bindings/concatenate-files.py b/utils/codegen/ipc/mojo/public/tools/bindings/concatenate-files.py
index 48bc66fd..4dd26d4a 100755
--- a/utils/ipc/mojo/public/tools/bindings/concatenate-files.py
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/concatenate-files.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-# Copyright 2019 The Chromium Authors. All rights reserved.
+# Copyright 2019 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
@@ -15,6 +15,7 @@
from __future__ import print_function
import optparse
+import sys
def Concatenate(filenames):
@@ -47,7 +48,7 @@ def main():
parser.set_usage("""Concatenate several files into one.
Equivalent to: cat file1 ... > target.""")
(_options, args) = parser.parse_args()
- exit(0 if Concatenate(args) else 1)
+ sys.exit(0 if Concatenate(args) else 1)
if __name__ == "__main__":
diff --git a/utils/ipc/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py b/utils/codegen/ipc/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py
index be8985ce..7d56c9f9 100755
--- a/utils/ipc/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-# Copyright 2018 The Chromium Authors. All rights reserved.
+# Copyright 2018 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -20,6 +20,7 @@ from __future__ import print_function
import optparse
import re
+import sys
_MOJO_INTERNAL_MODULE_NAME = "mojo.internal"
@@ -31,10 +32,10 @@ def FilterLine(filename, line, output):
return
if line.startswith("goog.provide"):
- match = re.match("goog.provide\('([^']+)'\);", line)
+ match = re.match(r"goog.provide\('([^']+)'\);", line)
if not match:
print("Invalid goog.provide line in %s:\n%s" % (filename, line))
- exit(1)
+ sys.exit(1)
module_name = match.group(1)
if module_name == _MOJO_INTERNAL_MODULE_NAME:
@@ -67,7 +68,8 @@ def main():
Concatenate several files into one, stripping Closure provide and
require directives along the way.""")
(_, args) = parser.parse_args()
- exit(0 if ConcatenateAndReplaceExports(args) else 1)
+ sys.exit(0 if ConcatenateAndReplaceExports(args) else 1)
+
if __name__ == "__main__":
main()
diff --git a/utils/ipc/mojo/public/tools/bindings/gen_data_files_list.py b/utils/codegen/ipc/mojo/public/tools/bindings/gen_data_files_list.py
index 8b78d092..c6daff03 100644
--- a/utils/ipc/mojo/public/tools/bindings/gen_data_files_list.py
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/gen_data_files_list.py
@@ -1,4 +1,4 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
+# Copyright 2017 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates a list of all files in a directory.
diff --git a/utils/ipc/mojo/public/tools/bindings/generate_type_mappings.py b/utils/codegen/ipc/mojo/public/tools/bindings/generate_type_mappings.py
index a0096649..4a53e2bf 100755
--- a/utils/ipc/mojo/public/tools/bindings/generate_type_mappings.py
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/generate_type_mappings.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-# Copyright 2016 The Chromium Authors. All rights reserved.
+# Copyright 2016 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates a JSON typemap from its command-line arguments and dependencies.
@@ -82,10 +82,12 @@ def LoadCppTypemapConfig(path):
for entry in config['types']:
configs[entry['mojom']] = {
'typename': entry['cpp'],
+ 'forward_declaration': entry.get('forward_declaration', None),
'public_headers': config.get('traits_headers', []),
'traits_headers': config.get('traits_private_headers', []),
'copyable_pass_by_value': entry.get('copyable_pass_by_value',
False),
+ 'default_constructible': entry.get('default_constructible', True),
'force_serialize': entry.get('force_serialize', False),
'hashable': entry.get('hashable', False),
'move_only': entry.get('move_only', False),
diff --git a/utils/codegen/ipc/mojo/public/tools/bindings/minify_with_terser.py b/utils/codegen/ipc/mojo/public/tools/bindings/minify_with_terser.py
new file mode 100755
index 00000000..cefee7a4
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/minify_with_terser.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python3
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# This utility minifies JS files with terser.
+#
+# Instance of 'node' has no 'RunNode' member (no-member)
+# pylint: disable=no-member
+
+import argparse
+import os
+import sys
+
+_HERE_PATH = os.path.dirname(__file__)
+_SRC_PATH = os.path.normpath(os.path.join(_HERE_PATH, '..', '..', '..', '..'))
+_CWD = os.getcwd()
+sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'node'))
+import node
+import node_modules
+
+
+def MinifyFile(input_file, output_file):
+ node.RunNode([
+ node_modules.PathToTerser(), input_file, '--mangle', '--compress',
+ '--comments', 'false', '--output', output_file
+ ])
+
+
+def main(argv):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--input', required=True)
+ parser.add_argument('--output', required=True)
+ args = parser.parse_args(argv)
+
+ # Delete the output file if it already exists. It may be a sym link to the
+ # input, because in non-optimized/pre-Terser builds the input file is copied
+ # to the output location with gn copy().
+ out_path = os.path.join(_CWD, args.output)
+ if (os.path.exists(out_path)):
+ os.remove(out_path)
+
+ MinifyFile(os.path.join(_CWD, args.input), out_path)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/utils/ipc/mojo/public/tools/bindings/mojom.gni b/utils/codegen/ipc/mojo/public/tools/bindings/mojom.gni
index fe2a1da3..3f6e54e0 100644
--- a/utils/ipc/mojo/public/tools/bindings/mojom.gni
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/mojom.gni
@@ -1,25 +1,28 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import("//build/config/python.gni")
import("//third_party/closure_compiler/closure_args.gni")
import("//third_party/closure_compiler/compile_js.gni")
import("//third_party/protobuf/proto_library.gni")
+import("//ui/webui/resources/tools/generate_grd.gni")
import("//ui/webui/webui_features.gni")
+import("//build/config/cast.gni")
+
# TODO(rockot): Maybe we can factor these dependencies out of //mojo. They're
# used to conditionally enable message ID scrambling in a way which is
# consistent across toolchains and which is affected by branded vs non-branded
# Chrome builds. Ideally we could create some generic knobs here that could be
# flipped elsewhere though.
import("//build/config/chrome_build.gni")
-import("//build/config/chromecast_build.gni")
import("//build/config/chromeos/ui_mode.gni")
+import("//build/config/features.gni")
import("//build/config/nacl/config.gni")
import("//build/toolchain/kythe.gni")
import("//components/nacl/features.gni")
import("//third_party/jinja2/jinja2.gni")
+import("//third_party/ply/ply.gni")
import("//tools/ipc_fuzzer/ipc_fuzzer.gni")
declare_args() {
# Indicates whether typemapping should be supported in this build
@@ -34,21 +37,30 @@ declare_args() {
# Controls message ID scrambling behavior. If |true|, message IDs are
# scrambled (i.e. randomized based on the contents of //chrome/VERSION) on
- # non-Chrome OS desktop platforms. Set to |false| to disable message ID
- # scrambling on all platforms.
- enable_mojom_message_id_scrambling = true
+ # non-Chrome OS desktop platforms. Enabled on official builds by default.
+ # Set to |true| to enable message ID scrambling on a specific build.
+ # See also `enable_scrambled_message_ids` below for more details.
+ enable_mojom_message_id_scrambling = is_official_build
+
+ # Enables generating javascript fuzzing-related code and the bindings for the
+ # MojoLPM fuzzer targets. Off by default.
+ enable_mojom_fuzzer = false
# Enables Closure compilation of generated JS lite bindings. In environments
# where compilation is supported, any mojom target "foo" will also have a
# corresponding "foo_js_library_for_compile" target generated.
- enable_mojom_closure_compile = enable_js_type_check && optimize_webui
-
- # Enables generating Typescript bindings and compiling them to JS bindings.
- enable_typescript_bindings = false
+ if (is_chromeos_ash) {
+ enable_mojom_closure_compile = enable_js_type_check && optimize_webui
+ }
+}
- # Enables generating javascript fuzzing-related code and the bindings for the
- # MojoLPM fuzzer targets. Off by default.
- enable_mojom_fuzzer = false
+# Closure libraries are needed for mojom_closure_compile, and when
+# js_type_check is enabled on Ash.
+if (is_chromeos_ash) {
+ generate_mojom_closure_libraries =
+ enable_mojom_closure_compile || enable_js_type_check
+} else {
+ generate_mojom_closure_libraries = false
}
# NOTE: We would like to avoid scrambling message IDs where it doesn't add
@@ -69,9 +81,8 @@ declare_args() {
# lacros-chrome switches to target_os="chromeos"
enable_scrambled_message_ids =
enable_mojom_message_id_scrambling &&
- (is_mac || is_win ||
- (is_linux && !is_chromeos_ash && !is_chromecast && !is_chromeos_lacros) ||
- ((enable_nacl || is_nacl || is_nacl_nonsfi) &&
+ (is_mac || is_win || (is_linux && !is_castos) ||
+ ((enable_nacl || is_nacl) &&
(target_os != "chromeos" && !chromeos_is_browser_only)))
_mojom_tools_root = "//mojo/public/tools"
@@ -80,7 +91,9 @@ mojom_parser_script = "$_mojom_tools_root/mojom/mojom_parser.py"
mojom_parser_sources = [
"$_mojom_library_root/__init__.py",
"$_mojom_library_root/error.py",
+ "$_mojom_library_root/fileutil.py",
"$_mojom_library_root/generate/__init__.py",
+ "$_mojom_library_root/generate/check.py",
"$_mojom_library_root/generate/generator.py",
"$_mojom_library_root/generate/module.py",
"$_mojom_library_root/generate/pack.py",
@@ -88,21 +101,32 @@ mojom_parser_sources = [
"$_mojom_library_root/generate/translate.py",
"$_mojom_library_root/parse/__init__.py",
"$_mojom_library_root/parse/ast.py",
+ "$_mojom_library_root/parse/conditional_features.py",
"$_mojom_library_root/parse/lexer.py",
"$_mojom_library_root/parse/parser.py",
+ "//tools/diagnosis/crbug_1001171.py",
]
mojom_generator_root = "$_mojom_tools_root/bindings"
mojom_generator_script = "$mojom_generator_root/mojom_bindings_generator.py"
mojom_generator_sources =
mojom_parser_sources + [
+ "$mojom_generator_root/checks/__init__.py",
+ "$mojom_generator_root/checks/mojom_attributes_check.py",
+ "$mojom_generator_root/checks/mojom_definitions_check.py",
+ "$mojom_generator_root/checks/mojom_interface_feature_check.py",
+ "$mojom_generator_root/checks/mojom_restrictions_check.py",
+ "$mojom_generator_root/generators/__init__.py",
"$mojom_generator_root/generators/cpp_util.py",
"$mojom_generator_root/generators/mojom_cpp_generator.py",
"$mojom_generator_root/generators/mojom_java_generator.py",
- "$mojom_generator_root/generators/mojom_mojolpm_generator.py",
"$mojom_generator_root/generators/mojom_js_generator.py",
+ "$mojom_generator_root/generators/mojom_mojolpm_generator.py",
"$mojom_generator_root/generators/mojom_ts_generator.py",
"$mojom_generator_script",
+ "//build/action_helpers.py",
+ "//build/gn_helpers.py",
+ "//build/zip_helpers.py",
]
if (enable_scrambled_message_ids) {
@@ -243,12 +267,16 @@ if (enable_scrambled_message_ids) {
# |cpp_only| is set to true, it overrides this to prevent generation of
# Java bindings.
#
-# enable_fuzzing (optional)
+# enable_js_fuzzing (optional)
+# Enables generation of javascript fuzzing sources for the target if the
+# global build arg |enable_mojom_fuzzer| is also set to |true|.
+# Defaults to |true|. If JS fuzzing generation is enabled for a target,
+# the target will always generate JS bindings even if |cpp_only| is set to
+# |true|. See note above.
+#
+# enable_mojolpm_fuzzing (optional)
# Enables generation of fuzzing sources for the target if the global build
-# arg |enable_mojom_fuzzer| is also set to |true|. Defaults to |true|. If
-# fuzzing generation is enabled for a target, the target will always
-# generate JS bindings even if |cpp_only| is set to |true|. See note
-# above.
+# arg |enable_mojom_fuzzer| is also set to |true|. Defaults to |true|.
#
# support_lazy_serialization (optional)
# If set to |true|, generated C++ bindings will effectively prefer to
@@ -310,8 +338,15 @@ if (enable_scrambled_message_ids) {
# correct dependency order. Note that this only has an effect if
# the |enable_mojom_closure_compile| global arg is set to |true| as well.
#
-# use_typescript_sources (optional)
-# Uses the Typescript generator to generate JavaScript bindings.
+# generate_webui_js_bindings (optional)
+# Generate WebUI bindings in JavaScript rather than TypeScript. Defaults
+# to false. ChromeOS only parameter.
+#
+# generate_legacy_js_bindings (optional)
+# Generate js_data_deps target containing legacy JavaScript bindings files
+# for Blink tests and other non-WebUI users when generating TypeScript
+# bindings for WebUI. Ignored if generate_webui_js_bindings is set to
+# true.
#
# js_generate_struct_deserializers (optional)
# Generates JS deerialize methods for structs.
@@ -323,17 +358,23 @@ if (enable_scrambled_message_ids) {
# webui_module_path (optional)
# The path or URL at which modules generated by this target will be
# accessible to WebUI pages. This may either be an absolute path or
-# a full URL path starting with "chrome://resources/mojo".
+# a full URL path starting with "chrome://resources/mojo". If this path
+# is not specified, WebUI bindings will not be generated.
#
# If an absolute path, a WebUI page may only import these modules if
-# they are manually packaged and mapped independently by that page's
-# WebUIDataSource. The mapped path must match the path given here.
+# they are added to that page's data source (usually by adding the
+# modules to the mojo_files list for build_webui(), or by listing the
+# files as inputs to the page's ts_library() and/or generate_grd() build
+# steps.
#
# If this is is instead a URL string starting with
-# "chrome://resources/mojo", the generated resources must be added to
-# content_resources.grd and registered with
-# content::SharedResourcesDataSource with a corresponding path, at which
-# point they will be made available to all WebUI pages at the given URL.
+# "chrome://resources/mojo", the resulting bindings files should
+# be added to one of the lists in ui/webui/resources/mojo/BUILD.gn,
+# at which point they will be made available to all WebUI pages at the
+# given URL.
+#
+# Note: WebUI module bindings are generated in TypeScript by default,
+# unless |generate_webui_js_bindings| is specified as true.
#
# The following parameters are used to support the component build. They are
# needed so that bindings which are linked with a component can use the same
@@ -402,16 +443,41 @@ if (enable_scrambled_message_ids) {
# should be mapped in generated bindings. This is a string like
# "::base::Value" or "std::vector<::base::Value>".
#
-# move_only (optional)
-# A boolean value (default false) which indicates whether the C++
-# type is move-only. If true, generated bindings will pass the type
-# by value and use std::move() at call sites.
-#
# copyable_pass_by_value (optional)
# A boolean value (default false) which effectively indicates
# whether the C++ type is very cheap to copy. If so, generated
# bindings will pass by value but not use std::move() at call sites.
#
+# default_constructible (optional)
+# A boolean value (default true) which indicates whether the C++
+# type is default constructible. If a C++ type is not default
+# constructible (e.g. the implementor of the type prefers not to
+# publicly expose a default constructor that creates an object in an
+# invalid state), Mojo will instead construct C++ type with an
+# argument of the type `mojo::DefaultConstruct::Tag` (essentially a
+# passkey-like type specifically for this use case).
+#
+# force_serialize (optional)
+# A boolean value (default false) which disables lazy serialization
+# of the typemapped type if lazy serialization is enabled for the
+# mojom target applying this typemap.
+#
+# forward_declaration (optional)
+# A forward declaration of the C++ type, which bindings that don't
+# need the full type definition can use to reduce the size of
+# the generated code. This is a string like
+# "namespace base { class Value; }".
+#
+# hashable (optional)
+# A boolean value (default false) indicating whether the C++ type is
+# hashable. Set to true if true AND needed (i.e. you need to use the
+# type as the key of a mojom map).
+#
+# move_only (optional)
+# A boolean value (default false) which indicates whether the C++
+# type is move-only. If true, generated bindings will pass the type
+# by value and use std::move() at call sites.
+#
# nullable_is_same_type (optional)
# A boolean value (default false) which indicates that the C++ type
# has some baked-in semantic notion of a "null" state. If true, the
@@ -421,16 +487,6 @@ if (enable_scrambled_message_ids) {
# type with absl::optional, and null values are simply
# absl::nullopt.
#
-# hashable (optional)
-# A boolean value (default false) indicating whether the C++ type is
-# hashable. Set to true if true AND needed (i.e. you need to use the
-# type as the key of a mojom map).
-#
-# force_serialize (optional)
-# A boolean value (default false) which disables lazy serialization
-# of the typemapped type if lazy serialization is enabled for the
-# mojom target applying this typemap.
-#
# Additional typemap scope parameters:
#
# traits_headers (optional)
@@ -621,20 +677,26 @@ template("mojom") {
build_metadata_filename = "$target_gen_dir/$target_name.build_metadata"
build_metadata = {
}
- build_metadata.sources = rebase_path(sources_list)
+ build_metadata.sources = rebase_path(sources_list, target_gen_dir)
build_metadata.deps = []
foreach(dep, all_deps) {
dep_target_gen_dir = get_label_info(dep, "target_gen_dir")
dep_name = get_label_info(dep, "name")
build_metadata.deps +=
- [ rebase_path("$dep_target_gen_dir/$dep_name.build_metadata") ]
+ [ rebase_path("$dep_target_gen_dir/$dep_name.build_metadata",
+ target_gen_dir) ]
}
write_file(build_metadata_filename, build_metadata, "json")
- generate_fuzzing =
- (!defined(invoker.enable_fuzzing) || invoker.enable_fuzzing) &&
+ generate_js_fuzzing =
+ (!defined(invoker.enable_js_fuzzing) || invoker.enable_js_fuzzing) &&
enable_mojom_fuzzer && (!defined(invoker.testonly) || !invoker.testonly)
+ generate_mojolpm_fuzzing =
+ (!defined(invoker.enable_mojolpm_fuzzing) ||
+ invoker.enable_mojolpm_fuzzing) && enable_mojom_fuzzer &&
+ (!defined(invoker.testonly) || !invoker.testonly)
+
parser_target_name = "${target_name}__parser"
parser_deps = []
foreach(dep, all_deps) {
@@ -665,30 +727,34 @@ template("mojom") {
"is_chromeos",
"is_chromeos_ash",
]
+ } else if (is_chromeos_lacros) {
+ enabled_features += [
+ "is_chromeos",
+ "is_chromeos_lacros",
+ ]
} else if (is_fuchsia) {
enabled_features += [ "is_fuchsia" ]
} else if (is_ios) {
enabled_features += [ "is_ios" ]
- } else if (is_linux || is_chromeos_lacros) {
+ } else if (is_linux) {
enabled_features += [ "is_linux" ]
- if (is_chromeos_lacros) {
- enabled_features += [
- "is_chromeos",
- "is_chromeos_lacros",
- ]
- }
} else if (is_mac) {
enabled_features += [ "is_mac" ]
} else if (is_win) {
enabled_features += [ "is_win" ]
}
- # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(parser_target_name) {
+ if (is_apple) {
+ enabled_features += [ "is_apple" ]
+ }
+
+ action(parser_target_name) {
+ allow_remote = true
+ custom_processor = "mojom_parser"
script = mojom_parser_script
- inputs = mojom_parser_sources + [ build_metadata_filename ]
+ inputs = mojom_parser_sources + ply_sources + [ build_metadata_filename ]
sources = sources_list
- deps = parser_deps
+ public_deps = parser_deps
outputs = []
foreach(base_path, output_file_base_paths) {
filename = get_path_info(base_path, "file")
@@ -698,31 +764,35 @@ template("mojom") {
filelist = []
foreach(source, sources_list) {
- filelist += [ rebase_path(source) ]
+ filelist += [ rebase_path(source, root_build_dir) ]
}
- response_file_contents = filelist
+
+ # Workaround for https://github.com/ninja-build/ninja/issues/1966.
+ rsp_file = "$target_gen_dir/${target_name}.rsp"
+ write_file(rsp_file, filelist)
+ inputs += [ rsp_file ]
args = [
# Resolve relative input mojom paths against both the root src dir and
# the root gen dir.
"--input-root",
- rebase_path("//."),
+ rebase_path("//.", root_build_dir),
"--input-root",
- rebase_path(root_gen_dir),
+ rebase_path(root_gen_dir, root_build_dir),
"--output-root",
- rebase_path(root_gen_dir),
+ rebase_path(root_gen_dir, root_build_dir),
- "--mojom-file-list={{response_file_name}}",
+ "--mojom-file-list=" + rebase_path(rsp_file, root_build_dir),
"--check-imports",
- rebase_path(build_metadata_filename),
+ rebase_path(build_metadata_filename, root_build_dir),
]
if (defined(invoker.input_root_override)) {
args += [
"--input-root",
- rebase_path(invoker.input_root_override),
+ rebase_path(invoker.input_root_override, root_build_dir),
]
}
@@ -738,6 +808,13 @@ template("mojom") {
"--add-module-metadata",
"webui_module_path=${invoker.webui_module_path}",
]
+ if (defined(invoker.generate_webui_js_bindings) &&
+ invoker.generate_webui_js_bindings) {
+ args += [
+ "--add-module-metadata",
+ "generate_webui_js=True",
+ ]
+ }
}
}
}
@@ -819,11 +896,12 @@ template("mojom") {
}
}
- # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(generator_cpp_message_ids_target_name) {
+ action(generator_cpp_message_ids_target_name) {
+ allow_remote = true
script = mojom_generator_script
inputs = mojom_generator_sources + jinja2_sources
- sources = sources_list
+ sources = sources_list +
+ [ "$root_gen_dir/mojo/public/tools/bindings/cpp_templates.zip" ]
deps = [
":$parser_target_name",
"//mojo/public/tools/bindings:precompile_templates",
@@ -835,16 +913,22 @@ template("mojom") {
args = common_generator_args
filelist = []
foreach(source, sources_list) {
- filelist += [ rebase_path("$source", root_build_dir) ]
+ filelist += [ rebase_path(source, root_build_dir) ]
}
foreach(base_path, output_file_base_paths) {
+ filename = get_path_info(base_path, "file")
+ dirname = get_path_info(base_path, "dir")
+ inputs += [ "$root_gen_dir/$dirname/${filename}-module" ]
outputs += [ "$root_gen_dir/$base_path-shared-message-ids.h" ]
}
- response_file_contents = filelist
+ # Workaround for https://github.com/ninja-build/ninja/issues/1966.
+ rsp_file = "$target_gen_dir/${target_name}.rsp"
+ write_file(rsp_file, filelist)
+ inputs += [ rsp_file ]
args += [
- "--filelist={{response_file_name}}",
+ "--filelist=" + rebase_path(rsp_file, root_build_dir),
"--generate_non_variant_code",
"--generate_message_ids",
"-g",
@@ -860,12 +944,13 @@ template("mojom") {
generator_shared_target_name = "${target_name}_shared__generator"
- # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(generator_shared_target_name) {
+ action(generator_shared_target_name) {
+ allow_remote = true
visibility = [ ":*" ]
script = mojom_generator_script
inputs = mojom_generator_sources + jinja2_sources
- sources = sources_list
+ sources = sources_list +
+ [ "$root_gen_dir/mojo/public/tools/bindings/cpp_templates.zip" ]
deps = [
":$parser_target_name",
"//mojo/public/tools/bindings:precompile_templates",
@@ -878,10 +963,16 @@ template("mojom") {
args = common_generator_args
filelist = []
foreach(source, sources_list) {
- filelist += [ rebase_path("$source", root_build_dir) ]
+ filelist += [ rebase_path(source, root_build_dir) ]
}
foreach(base_path, output_file_base_paths) {
+ # Need the mojom-module as an input to this action.
+ filename = get_path_info(base_path, "file")
+ dirname = get_path_info(base_path, "dir")
+ inputs += [ "$root_gen_dir/$dirname/${filename}-module" ]
+
outputs += [
+ "$root_gen_dir/$base_path-features.h",
"$root_gen_dir/$base_path-params-data.h",
"$root_gen_dir/$base_path-shared-internal.h",
"$root_gen_dir/$base_path-shared.cc",
@@ -889,10 +980,13 @@ template("mojom") {
]
}
- response_file_contents = filelist
+ # Workaround for https://github.com/ninja-build/ninja/issues/1966.
+ rsp_file = "$target_gen_dir/${target_name}.rsp"
+ write_file(rsp_file, filelist)
+ inputs += [ rsp_file ]
args += [
- "--filelist={{response_file_name}}",
+ "--filelist=" + rebase_path(rsp_file, root_build_dir),
"--generate_non_variant_code",
"-g",
"c++",
@@ -923,12 +1017,14 @@ template("mojom") {
if (defined(invoker.testonly)) {
testonly = invoker.testonly
}
+ configs += [ "//build/config/compiler:wexit_time_destructors" ]
deps = []
public_deps = []
if (output_file_base_paths != []) {
sources = []
foreach(base_path, output_file_base_paths) {
sources += [
+ "$root_gen_dir/$base_path-features.h",
"$root_gen_dir/$base_path-params-data.h",
"$root_gen_dir/$base_path-shared-internal.h",
"$root_gen_dir/$base_path-shared.cc",
@@ -972,7 +1068,7 @@ template("mojom") {
}
}
- if (generate_fuzzing) {
+ if (generate_mojolpm_fuzzing) {
# This block generates the proto files used for the MojoLPM fuzzer,
# and the corresponding proto targets that will be linked in the fuzzer
# targets. These are independent of the typemappings, and can be done
@@ -981,11 +1077,15 @@ template("mojom") {
generator_mojolpm_proto_target_name =
"${target_name}_mojolpm_proto_generator"
- # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(generator_mojolpm_proto_target_name) {
+ action(generator_mojolpm_proto_target_name) {
+ allow_remote = true
script = mojom_generator_script
inputs = mojom_generator_sources + jinja2_sources
- sources = invoker.sources
+ sources =
+ invoker.sources + [
+ "$root_gen_dir/mojo/public/tools/bindings/cpp_templates.zip",
+ "$root_gen_dir/mojo/public/tools/bindings/mojolpm_templates.zip",
+ ]
deps = [
":$parser_target_name",
"//mojo/public/tools/bindings:precompile_templates",
@@ -994,15 +1094,37 @@ template("mojom") {
outputs = []
args = common_generator_args
filelist = []
- foreach(source, invoker.sources) {
- filelist += [ rebase_path("$source", root_build_dir) ]
+
+ # Split the input into generated and non-generated source files. They
+ # need to be processed separately.
+ gen_dir_path_wildcard = get_path_info("//", "gen_dir") + "/*"
+ non_gen_sources =
+ filter_exclude(invoker.sources, [ gen_dir_path_wildcard ])
+ gen_sources = filter_include(invoker.sources, [ gen_dir_path_wildcard ])
+
+ foreach(source, non_gen_sources) {
+ filelist += [ rebase_path(source, root_build_dir) ]
+ inputs += [ "$target_gen_dir/$source-module" ]
outputs += [ "$target_gen_dir/$source.mojolpm.proto" ]
}
- response_file_contents = filelist
+ foreach(source, gen_sources) {
+ filelist += [ rebase_path(source, root_build_dir) ]
+
+ # For generated files, we assume they're in the target_gen_dir or a
+ # sub-folder of it. Rebase the path so we can get the relative location.
+ source_file = rebase_path(source, target_gen_dir)
+ inputs += [ "$target_gen_dir/$source_file-module" ]
+ outputs += [ "$target_gen_dir/$source_file.mojolpm.proto" ]
+ }
+
+ # Workaround for https://github.com/ninja-build/ninja/issues/1966.
+ rsp_file = "$target_gen_dir/${target_name}.rsp"
+ write_file(rsp_file, filelist)
+ inputs += [ rsp_file ]
args += [
- "--filelist={{response_file_name}}",
+ "--filelist=" + rebase_path(rsp_file, root_build_dir),
"--generate_non_variant_code",
"-g",
"mojolpm",
@@ -1014,9 +1136,20 @@ template("mojom") {
proto_library(mojolpm_proto_target_name) {
testonly = true
generate_python = false
+
+ # Split the input into generated and non-generated source files. They
+ # need to be processed separately.
+ gen_dir_path_wildcard = get_path_info("//", "gen_dir") + "/*"
+ non_gen_sources =
+ filter_exclude(invoker.sources, [ gen_dir_path_wildcard ])
+ gen_sources = filter_include(invoker.sources, [ gen_dir_path_wildcard ])
sources = process_file_template(
- invoker.sources,
+ non_gen_sources,
[ "{{source_gen_dir}}/{{source_file_part}}.mojolpm.proto" ])
+ sources += process_file_template(
+ gen_sources,
+ [ "{{source_dir}}/{{source_file_part}}.mojolpm.proto" ])
+
import_dirs = [ "//" ]
proto_in_dir = "${root_gen_dir}"
proto_out_dir = "."
@@ -1055,7 +1188,7 @@ template("mojom") {
component_macro_suffix = ""
}
if ((!defined(invoker.disable_variants) || !invoker.disable_variants) &&
- !is_ios) {
+ use_blink) {
blink_variant = {
variant = "blink"
component_macro_suffix = "_BLINK"
@@ -1149,39 +1282,6 @@ template("mojom") {
"${bindings_configuration.component_macro_suffix}_IMPL" ]
}
- export_args = []
- export_args_overridden = false
- if (defined(bindings_configuration.for_blink) &&
- bindings_configuration.for_blink) {
- if (defined(invoker.export_class_attribute_blink)) {
- export_args_overridden = true
- export_args += [
- "--export_attribute",
- invoker.export_class_attribute_blink,
- "--export_header",
- invoker.export_header_blink,
- ]
- }
- } else if (defined(invoker.export_class_attribute)) {
- export_args_overridden = true
- export_args += [
- "--export_attribute",
- invoker.export_class_attribute,
- "--export_header",
- invoker.export_header,
- ]
- }
-
- if (!export_args_overridden && defined(invoker.component_macro_prefix)) {
- export_args += [
- "--export_attribute",
- "COMPONENT_EXPORT(${invoker.component_macro_prefix}" +
- "${bindings_configuration.component_macro_suffix})",
- "--export_header",
- "base/component_export.h",
- ]
- }
-
generate_java = false
if (!cpp_only && defined(invoker.generate_java)) {
generate_java = invoker.generate_java
@@ -1190,6 +1290,38 @@ template("mojom") {
type_mappings_path =
"$target_gen_dir/${target_name}${variant_suffix}__type_mappings"
if (sources_list != []) {
+ export_args = []
+ export_args_overridden = false
+ if (defined(bindings_configuration.for_blink) &&
+ bindings_configuration.for_blink) {
+ if (defined(invoker.export_class_attribute_blink)) {
+ export_args_overridden = true
+ export_args += [
+ "--export_attribute",
+ invoker.export_class_attribute_blink,
+ "--export_header",
+ invoker.export_header_blink,
+ ]
+ }
+ } else if (defined(invoker.export_class_attribute)) {
+ export_args_overridden = true
+ export_args += [
+ "--export_attribute",
+ invoker.export_class_attribute,
+ "--export_header",
+ invoker.export_header,
+ ]
+ }
+ if (!export_args_overridden && defined(invoker.component_macro_prefix)) {
+ export_args += [
+ "--export_attribute",
+ "COMPONENT_EXPORT(${invoker.component_macro_prefix}" +
+ "${bindings_configuration.component_macro_suffix})",
+ "--export_header",
+ "base/component_export.h",
+ ]
+ }
+
generator_cpp_output_suffixes = []
variant_dash_suffix = ""
if (defined(variant)) {
@@ -1198,7 +1330,6 @@ template("mojom") {
generator_cpp_output_suffixes += [
"${variant_dash_suffix}-forward.h",
"${variant_dash_suffix}-import-headers.h",
- "${variant_dash_suffix}-test-utils.cc",
"${variant_dash_suffix}-test-utils.h",
"${variant_dash_suffix}.cc",
"${variant_dash_suffix}.h",
@@ -1207,16 +1338,28 @@ template("mojom") {
generator_target_name = "${target_name}${variant_suffix}__generator"
# TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(generator_target_name) {
+ action(generator_target_name) {
+ allow_remote = true
visibility = [ ":*" ]
script = mojom_generator_script
inputs = mojom_generator_sources + jinja2_sources
- sources = sources_list
+ sources =
+ sources_list + [
+ "$root_gen_dir/mojo/public/tools/bindings/cpp_templates.zip",
+ type_mappings_path,
+ ]
+ if (generate_mojolpm_fuzzing &&
+ !defined(bindings_configuration.variant)) {
+ sources += [
+ "$root_gen_dir/mojo/public/tools/bindings/mojolpm_templates.zip",
+ ]
+ }
deps = [
":$parser_target_name",
":$type_mappings_target_name",
"//mojo/public/tools/bindings:precompile_templates",
]
+
if (defined(invoker.parser_deps)) {
deps += invoker.parser_deps
}
@@ -1224,18 +1367,22 @@ template("mojom") {
args = common_generator_args + export_args
filelist = []
foreach(source, sources_list) {
- filelist += [ rebase_path("$source", root_build_dir) ]
+ filelist += [ rebase_path(source, root_build_dir) ]
}
foreach(base_path, output_file_base_paths) {
+ filename = get_path_info(base_path, "file")
+ dirname = get_path_info(base_path, "dir")
+ inputs += [ "$root_gen_dir/$dirname/${filename}-module" ]
+
outputs += [
"$root_gen_dir/${base_path}${variant_dash_suffix}-forward.h",
"$root_gen_dir/${base_path}${variant_dash_suffix}-import-headers.h",
- "$root_gen_dir/${base_path}${variant_dash_suffix}-test-utils.cc",
"$root_gen_dir/${base_path}${variant_dash_suffix}-test-utils.h",
"$root_gen_dir/${base_path}${variant_dash_suffix}.cc",
"$root_gen_dir/${base_path}${variant_dash_suffix}.h",
]
- if (generate_fuzzing && !defined(bindings_configuration.variant)) {
+ if (generate_mojolpm_fuzzing &&
+ !defined(bindings_configuration.variant)) {
outputs += [
"$root_gen_dir/${base_path}${variant_dash_suffix}-mojolpm.cc",
"$root_gen_dir/${base_path}${variant_dash_suffix}-mojolpm.h",
@@ -1243,14 +1390,17 @@ template("mojom") {
}
}
- response_file_contents = filelist
-
+ # Workaround for https://github.com/ninja-build/ninja/issues/1966.
+ rsp_file = "$target_gen_dir/${target_name}.rsp"
+ write_file(rsp_file, filelist)
+ inputs += [ rsp_file ]
args += [
- "--filelist={{response_file_name}}",
+ "--filelist=" + rebase_path("$rsp_file", root_build_dir),
"-g",
]
- if (generate_fuzzing && !defined(bindings_configuration.variant)) {
+ if (generate_mojolpm_fuzzing &&
+ !defined(bindings_configuration.variant)) {
args += [ "c++,mojolpm" ]
} else {
args += [ "c++" ]
@@ -1294,6 +1444,8 @@ template("mojom") {
"--extra_cpp_template_paths",
rebase_path(extra_cpp_template, root_build_dir),
]
+ inputs += [ extra_cpp_template ]
+
assert(
get_path_info(extra_cpp_template, "extension") == "tmpl",
"--extra_cpp_template_paths only accepts template files ending in extension .tmpl")
@@ -1306,62 +1458,6 @@ template("mojom") {
}
}
- if (generate_fuzzing && !defined(variant)) {
- # This block contains the C++ targets for the MojoLPM fuzzer, we need to
- # do this here so that we can use the typemap configuration for the
- # empty-variant Mojo target.
-
- mojolpm_target_name = "${target_name}_mojolpm"
- mojolpm_generator_target_name = "${target_name}__generator"
- source_set(mojolpm_target_name) {
- # There are still a few missing header dependencies between mojo targets
- # with typemaps and the dependencies of their typemap headers. It would
- # be good to enable include checking for these in the future though.
- check_includes = false
- testonly = true
- if (defined(invoker.sources)) {
- sources = process_file_template(
- invoker.sources,
- [
- "{{source_gen_dir}}/{{source_file_part}}-mojolpm.cc",
- "{{source_gen_dir}}/{{source_file_part}}-mojolpm.h",
- ])
- deps = []
- } else {
- sources = []
- deps = []
- }
-
- public_deps = [
- ":$generator_shared_target_name",
-
- # NB: hardcoded dependency on the no-variant variant generator, since
- # mojolpm only uses the no-variant type.
- ":$mojolpm_generator_target_name",
- ":$mojolpm_proto_target_name",
- "//base",
- "//mojo/public/tools/fuzzers:mojolpm",
- ]
-
- foreach(d, all_deps) {
- # Resolve the name, so that a target //mojo/something becomes
- # //mojo/something:something and we can append variant_suffix to
- # get the cpp dependency name.
- full_name = get_label_info("$d", "label_no_toolchain")
- public_deps += [ "${full_name}_mojolpm" ]
- }
-
- foreach(config, cpp_typemap_configs) {
- if (defined(config.traits_deps)) {
- deps += config.traits_deps
- }
- if (defined(config.traits_public_deps)) {
- public_deps += config.traits_public_deps
- }
- }
- }
- }
-
# Write the typemapping configuration for this target out to a file to be
# validated by a Python script. This helps catch mistakes that can't
# be caught by logic in GN.
@@ -1389,20 +1485,20 @@ template("mojom") {
write_file(_typemap_config_filename, _rebased_typemap_configs, "json")
_mojom_target_name = target_name
- # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(_typemap_validator_target_name) {
+ action(_typemap_validator_target_name) {
+ allow_remote = true
script = "$mojom_generator_root/validate_typemap_config.py"
inputs = [ _typemap_config_filename ]
outputs = [ _typemap_stamp_filename ]
args = [
get_label_info(_mojom_target_name, "label_no_toolchain"),
- rebase_path(_typemap_config_filename),
- rebase_path(_typemap_stamp_filename),
+ rebase_path(_typemap_config_filename, root_build_dir),
+ rebase_path(_typemap_stamp_filename, root_build_dir),
]
}
- # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(type_mappings_target_name) {
+ action(type_mappings_target_name) {
+ allow_remote = true
inputs =
mojom_generator_sources + jinja2_sources + [ _typemap_stamp_filename ]
outputs = [ type_mappings_path ]
@@ -1413,6 +1509,7 @@ template("mojom") {
rebase_path(type_mappings_path, root_build_dir),
]
+ sources = []
foreach(d, all_deps) {
name = get_label_info(d, "label_no_toolchain")
toolchain = get_label_info(d, "toolchain")
@@ -1422,12 +1519,11 @@ template("mojom") {
dependency_output_dir =
get_label_info(dependency_output, "target_gen_dir")
dependency_name = get_label_info(dependency_output, "name")
- dependency_path =
- rebase_path("$dependency_output_dir/${dependency_name}",
- root_build_dir)
+ dependency_path = "$dependency_output_dir/${dependency_name}"
+ sources += [ dependency_path ]
args += [
"--dependency",
- dependency_path,
+ rebase_path(dependency_path, root_build_dir),
]
}
@@ -1485,11 +1581,15 @@ template("mojom") {
if (defined(output_name_override)) {
output_name = output_name_override
}
- visibility = output_visibility + [ ":$output_target_name" ]
+ visibility = output_visibility + [
+ ":$output_target_name",
+ ":${target_name}_mojolpm",
+ ]
if (defined(invoker.testonly)) {
testonly = invoker.testonly
}
defines = export_defines
+ configs += [ "//build/config/compiler:wexit_time_destructors" ]
configs += extra_configs
if (output_file_base_paths != []) {
sources = []
@@ -1578,13 +1678,81 @@ template("mojom") {
}
}
+ if (generate_mojolpm_fuzzing && !defined(variant)) {
+ # This block contains the C++ targets for the MojoLPM fuzzer, we need to
+ # do this here so that we can use the typemap configuration for the
+ # empty-variant Mojo target.
+
+ mojolpm_target_name = "${target_name}_mojolpm"
+ mojolpm_generator_target_name = "${target_name}__generator"
+ source_set(mojolpm_target_name) {
+ # There are still a few missing header dependencies between mojo targets
+ # with typemaps and the dependencies of their typemap headers. It would
+ # be good to enable include checking for these in the future though.
+ check_includes = false
+ testonly = true
+ if (defined(invoker.sources)) {
+ # Split the input into generated and non-generated source files. They
+ # need to be processed separately.
+ gen_dir_path_wildcard = get_path_info("//", "gen_dir") + "/*"
+ non_gen_sources =
+ filter_exclude(invoker.sources, [ gen_dir_path_wildcard ])
+ gen_sources =
+ filter_include(invoker.sources, [ gen_dir_path_wildcard ])
+ sources = process_file_template(
+ non_gen_sources,
+ [
+ "{{source_gen_dir}}/{{source_file_part}}-mojolpm.cc",
+ "{{source_gen_dir}}/{{source_file_part}}-mojolpm.h",
+ ])
+ sources += process_file_template(
+ gen_sources,
+ [
+ "{{source_dir}}/{{source_file_part}}-mojolpm.cc",
+ "{{source_dir}}/{{source_file_part}}-mojolpm.h",
+ ])
+ deps = [ ":$output_target_name" ]
+ } else {
+ sources = []
+ deps = []
+ }
+
+ public_deps = [
+ ":$generator_shared_target_name",
+
+ # NB: hardcoded dependency on the no-variant variant generator, since
+ # mojolpm only uses the no-variant type.
+ ":$mojolpm_generator_target_name",
+ ":$mojolpm_proto_target_name",
+ "//base",
+ "//mojo/public/tools/fuzzers:mojolpm",
+ ]
+
+ foreach(d, all_deps) {
+ # Resolve the name, so that a target //mojo/something becomes
+ # //mojo/something:something and we can append variant_suffix to
+ # get the cpp dependency name.
+ full_name = get_label_info("$d", "label_no_toolchain")
+ public_deps += [ "${full_name}_mojolpm" ]
+ }
+
+ foreach(config, cpp_typemap_configs) {
+ if (defined(config.traits_deps)) {
+ deps += config.traits_deps
+ }
+ if (defined(config.traits_public_deps)) {
+ public_deps += config.traits_public_deps
+ }
+ }
+ }
+ }
+
if (generate_java && is_android) {
import("//build/config/android/rules.gni")
java_generator_target_name = target_name + "_java__generator"
if (sources_list != []) {
- # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(java_generator_target_name) {
+ action(java_generator_target_name) {
script = mojom_generator_script
inputs = mojom_generator_sources + jinja2_sources
sources = sources_list
@@ -1597,7 +1765,7 @@ template("mojom") {
args = common_generator_args
filelist = []
foreach(source, sources_list) {
- filelist += [ rebase_path("$source", root_build_dir) ]
+ filelist += [ rebase_path(source, root_build_dir) ]
}
foreach(base_path, output_file_base_paths) {
outputs += [ "$root_gen_dir/$base_path.srcjar" ]
@@ -1624,8 +1792,7 @@ template("mojom") {
java_srcjar_target_name = target_name + "_java_sources"
- # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(java_srcjar_target_name) {
+ action(java_srcjar_target_name) {
script = "//build/android/gyp/zip.py"
inputs = []
if (output_file_base_paths != []) {
@@ -1651,7 +1818,6 @@ template("mojom") {
android_library(java_target_name) {
forward_variables_from(invoker, [ "enable_bytecode_checks" ])
deps = [
- "//base:base_java",
"//mojo/public/java:bindings_java",
"//mojo/public/java:system_java",
"//third_party/androidx:androidx_annotation_annotation_java",
@@ -1673,21 +1839,36 @@ template("mojom") {
}
}
- use_typescript_for_target =
- enable_typescript_bindings && defined(invoker.use_typescript_sources) &&
- invoker.use_typescript_sources
+ if (defined(invoker.generate_webui_js_bindings)) {
+ assert(is_chromeos_ash,
+ "generate_webui_js_bindings can only be used on ChromeOS Ash")
+ assert(invoker.generate_webui_js_bindings,
+ "generate_webui_js_bindings should be set to true or removed")
+ }
+
+ use_typescript_for_target = defined(invoker.webui_module_path) &&
+ !defined(invoker.generate_webui_js_bindings)
- if (!use_typescript_for_target && defined(invoker.use_typescript_sources)) {
- not_needed(invoker, [ "use_typescript_sources" ])
+ generate_legacy_js = !use_typescript_for_target ||
+ (defined(invoker.generate_legacy_js_bindings) &&
+ invoker.generate_legacy_js_bindings)
+
+ if (!use_typescript_for_target &&
+ defined(invoker.generate_legacy_js_bindings)) {
+ not_needed(invoker, [ "generate_legacy_js_bindings" ])
}
- if ((generate_fuzzing || !defined(invoker.cpp_only) || !invoker.cpp_only) &&
- !use_typescript_for_target) {
+ # Targets needed by both TS and JS bindings targets. These are needed
+ # unconditionally for JS bindings targets, and are needed for TS bindings
+ # targets when generate_legacy_js_bindings is true. This option is provided
+ # since the legacy bindings are needed by Blink tests and non-Chromium users,
+ # which are not expected to migrate to modules or TypeScript.
+ if (generate_legacy_js && (generate_js_fuzzing ||
+ !defined(invoker.cpp_only) || !invoker.cpp_only)) {
if (sources_list != []) {
generator_js_target_name = "${target_name}_js__generator"
- # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(generator_js_target_name) {
+ action(generator_js_target_name) {
script = mojom_generator_script
inputs = mojom_generator_sources + jinja2_sources
sources = sources_list
@@ -1702,19 +1883,18 @@ template("mojom") {
args = common_generator_args
filelist = []
foreach(source, sources_list) {
- filelist += [ rebase_path("$source", root_build_dir) ]
+ filelist += [ rebase_path(source, root_build_dir) ]
}
foreach(base_path, output_file_base_paths) {
outputs += [
"$root_gen_dir/$base_path.js",
- "$root_gen_dir/$base_path.externs.js",
"$root_gen_dir/$base_path.m.js",
"$root_gen_dir/$base_path-lite.js",
- "$root_gen_dir/$base_path.html",
"$root_gen_dir/$base_path-lite-for-compile.js",
]
- if (defined(invoker.webui_module_path)) {
+ if (defined(invoker.webui_module_path) &&
+ !use_typescript_for_target) {
outputs += [ "$root_gen_dir/mojom-webui/$base_path-webui.js" ]
}
}
@@ -1725,7 +1905,6 @@ template("mojom") {
"--filelist={{response_file_name}}",
"-g",
"javascript",
- "--js_bindings_mode=new",
]
if (defined(invoker.js_generate_struct_deserializers) &&
@@ -1739,7 +1918,7 @@ template("mojom") {
args += message_scrambling_args
}
- if (generate_fuzzing) {
+ if (generate_js_fuzzing) {
args += [ "--generate_fuzzing" ]
}
}
@@ -1783,31 +1962,13 @@ template("mojom") {
data_deps += [ "${full_name}_js_data_deps" ]
}
}
+ }
- js_library_target_name = "${target_name}_js_library"
- if (sources_list != []) {
- js_library(js_library_target_name) {
- extra_public_deps = [ ":$generator_js_target_name" ]
- sources = []
- foreach(base_path, output_file_base_paths) {
- sources += [ "$root_gen_dir/${base_path}-lite.js" ]
- }
- externs_list = [
- "${externs_path}/mojo_core.js",
- "${externs_path}/pending.js",
- ]
-
- deps = []
- foreach(d, all_deps) {
- full_name = get_label_info(d, "label_no_toolchain")
- deps += [ "${full_name}_js_library" ]
- }
- }
- } else {
- group(js_library_target_name) {
- }
- }
-
+ # js_library() closure compiler targets, primarily used on ChromeOS. Only
+ # generate these targets if the mojom target is not C++ only and is not using
+ # TypeScript.
+ if (generate_mojom_closure_libraries &&
+ (!defined(invoker.cpp_only) || !invoker.cpp_only) && generate_legacy_js) {
js_library_for_compile_target_name = "${target_name}_js_library_for_compile"
if (sources_list != []) {
js_library(js_library_for_compile_target_name) {
@@ -1834,35 +1995,9 @@ template("mojom") {
}
}
- js_modules_target_name = "${target_name}_js_modules"
- if (sources_list != []) {
- js_library(js_modules_target_name) {
- extra_public_deps = [ ":$generator_js_target_name" ]
- sources = []
- foreach(base_path, output_file_base_paths) {
- sources += [ "$root_gen_dir/${base_path}.m.js" ]
- }
- externs_list = [
- "${externs_path}/mojo_core.js",
- "${externs_path}/pending.js",
- ]
- if (defined(invoker.disallow_native_types) &&
- invoker.disallow_native_types) {
- deps = []
- } else {
- deps = [ "//mojo/public/js:bindings_uncompiled" ]
- }
- foreach(d, all_deps) {
- full_name = get_label_info(d, "label_no_toolchain")
- deps += [ "${full_name}_js_modules" ]
- }
- }
- } else {
- group(js_modules_target_name) {
- }
- }
-
- if (defined(invoker.webui_module_path)) {
+ # WebUI specific closure targets, not needed by targets that are generating
+ # TypeScript WebUI bindings or by legacy-only targets.
+ if (defined(invoker.webui_module_path) && !use_typescript_for_target) {
webui_js_target_name = "${target_name}_webui_js"
if (sources_list != []) {
js_library(webui_js_target_name) {
@@ -1890,46 +2025,38 @@ template("mojom") {
group(webui_js_target_name) {
}
}
- }
- }
- if ((generate_fuzzing || !defined(invoker.cpp_only) || !invoker.cpp_only) &&
- use_typescript_for_target) {
- generator_js_target_names = []
- source_filelist = []
- foreach(source, sources_list) {
- source_filelist += [ rebase_path("$source", root_build_dir) ]
- }
- dependency_types = [
- {
- name = "regular"
- ts_extension = ".ts"
- js_extension = ".js"
- },
- {
- name = "es_modules"
- ts_extension = ".m.ts"
- js_extension = ".m.js"
- },
- ]
+ webui_grdp_target_name = "${target_name}_webui_grdp"
+ out_grd = "$target_gen_dir/${target_name}_webui_resources.grdp"
+ grd_prefix = "${target_name}_webui"
+ generate_grd(webui_grdp_target_name) {
+ grd_prefix = grd_prefix
+ out_grd = out_grd
- foreach(dependency_type, dependency_types) {
- ts_outputs = []
- js_outputs = []
+ deps = [ ":$webui_js_target_name" ]
- foreach(base_path, output_file_base_paths) {
- ts_outputs +=
- [ "$root_gen_dir/$base_path-lite${dependency_type.ts_extension}" ]
- js_outputs +=
- [ "$root_gen_dir/$base_path-lite${dependency_type.js_extension}" ]
+ input_files = []
+ foreach(base_path, output_file_base_paths) {
+ input_files += [ "${base_path}-webui.js" ]
+ }
+
+ input_files_base_dir =
+ rebase_path("$root_gen_dir/mojom-webui", "$root_build_dir")
+ }
+ }
+ }
+ if ((generate_js_fuzzing || !defined(invoker.cpp_only) ||
+ !invoker.cpp_only) && use_typescript_for_target) {
+ if (sources_list != []) {
+ source_filelist = []
+ foreach(source, sources_list) {
+ source_filelist += [ rebase_path(source, root_build_dir) ]
}
# Generate Typescript bindings.
- generator_ts_target_name =
- "${target_name}_${dependency_type.name}__ts__generator"
+ generator_ts_target_name = "${target_name}_ts__generator"
- # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(generator_ts_target_name) {
+ action(generator_ts_target_name) {
script = mojom_generator_script
inputs = mojom_generator_sources + jinja2_sources
sources = sources_list
@@ -1938,7 +2065,10 @@ template("mojom") {
"//mojo/public/tools/bindings:precompile_templates",
]
- outputs = ts_outputs
+ outputs = []
+ foreach(base_path, output_file_base_paths) {
+ outputs += [ "$root_gen_dir/$base_path-webui.ts" ]
+ }
args = common_generator_args
response_file_contents = source_filelist
@@ -1948,97 +2078,20 @@ template("mojom") {
"typescript",
]
- if (dependency_type.name == "es_modules") {
- args += [ "--ts_use_es_modules" ]
- }
-
- # TODO(crbug.com/1007587): Support scramble_message_ids.
- # TODO(crbug.com/1007591): Support generate_fuzzing.
- }
-
- # Create tsconfig.json for the generated Typescript.
- tsconfig_filename =
- "$target_gen_dir/$target_name-${dependency_type.name}-tsconfig.json"
- tsconfig = {
- }
- tsconfig.compilerOptions = {
- composite = true
- target = "es6"
- module = "es6"
- lib = [
- "es6",
- "esnext.bigint",
- ]
- strict = true
- }
- tsconfig.files = []
- foreach(base_path, output_file_base_paths) {
- tsconfig.files += [ rebase_path(
- "$root_gen_dir/$base_path-lite${dependency_type.ts_extension}",
- target_gen_dir,
- root_gen_dir) ]
- }
- tsconfig.references = []
-
- # Get tsconfigs for deps.
- foreach(d, all_deps) {
- dep_target_gen_dir = rebase_path(get_label_info(d, "target_gen_dir"))
- dep_name = get_label_info(d, "name")
- reference = {
- }
- reference.path = "$dep_target_gen_dir/$dep_name-${dependency_type.name}-tsconfig.json"
- tsconfig.references += [ reference ]
- }
- write_file(tsconfig_filename, tsconfig, "json")
-
- # Compile previously generated Typescript to Javascript.
- generator_js_target_name =
- "${target_name}_${dependency_type.name}__js__generator"
- generator_js_target_names += [ generator_js_target_name ]
-
- # TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds.
- python2_action(generator_js_target_name) {
- script = "$mojom_generator_root/compile_typescript.py"
- sources = ts_outputs
- outputs = js_outputs
- public_deps = [ ":$generator_ts_target_name" ]
- foreach(d, all_deps) {
- full_name = get_label_info(d, "label_no_toolchain")
- public_deps +=
- [ "${full_name}_${dependency_type.name}__js__generator" ]
+ if (!defined(invoker.scramble_message_ids) ||
+ invoker.scramble_message_ids) {
+ inputs += message_scrambling_inputs
+ args += message_scrambling_args
}
- absolute_tsconfig_path =
- rebase_path(tsconfig_filename, "", target_gen_dir)
- args = [ "--tsconfig_path=$absolute_tsconfig_path" ]
- }
- }
-
- js_target_name = target_name + "_js"
- group(js_target_name) {
- public_deps = []
- if (sources_list != []) {
- foreach(generator_js_target_name, generator_js_target_names) {
- public_deps += [ ":$generator_js_target_name" ]
+ if (defined(invoker.js_generate_struct_deserializers) &&
+ invoker.js_generate_struct_deserializers) {
+ args += [ "--js_generate_struct_deserializers" ]
}
- }
- foreach(d, all_deps) {
- full_name = get_label_info(d, "label_no_toolchain")
- public_deps += [ "${full_name}_js" ]
- }
- }
-
- group(js_data_deps_target_name) {
- data = js_outputs
- deps = []
- foreach(generator_js_target_name, generator_js_target_names) {
- deps += [ ":$generator_js_target_name" ]
- }
- data_deps = []
- foreach(d, all_deps) {
- full_name = get_label_info(d, "label_no_toolchain")
- data_deps += [ "${full_name}_js_data_deps" ]
+ # TODO(crbug.com/1007587): Support scramble_message_ids if above is
+ # insufficient.
+ # TODO(crbug.com/1007591): Support generate_fuzzing.
}
}
}
diff --git a/utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py b/utils/codegen/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py
index da9efc71..8c641c2a 100755
--- a/utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-# Copyright 2013 The Chromium Authors. All rights reserved.
+# Copyright 2013 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -57,10 +57,17 @@ _BUILTIN_GENERATORS = {
"typescript": "mojom_ts_generator",
}
+_BUILTIN_CHECKS = {
+ "attributes": "mojom_attributes_check",
+ "definitions": "mojom_definitions_check",
+ "features": "mojom_interface_feature_check",
+ "restrictions": "mojom_restrictions_check",
+}
+
def LoadGenerators(generators_string):
if not generators_string:
- return [] # No generators.
+ return {} # No generators.
generators = {}
for generator_name in [s.strip() for s in generators_string.split(",")]:
@@ -74,6 +81,21 @@ def LoadGenerators(generators_string):
return generators
+def LoadChecks(checks_string):
+ if not checks_string:
+ return {} # No checks.
+
+ checks = {}
+ for check_name in [s.strip() for s in checks_string.split(",")]:
+ check = check_name.lower()
+ if check not in _BUILTIN_CHECKS:
+ print("Unknown check name %s" % check_name)
+ sys.exit(1)
+ check_module = importlib.import_module("checks.%s" % _BUILTIN_CHECKS[check])
+ checks[check] = check_module
+ return checks
+
+
def MakeImportStackMessage(imported_filename_stack):
"""Make a (human-readable) message listing a chain of imports. (Returned
string begins with a newline (if nonempty) and does not end with one.)"""
@@ -82,7 +104,7 @@ def MakeImportStackMessage(imported_filename_stack):
zip(imported_filename_stack[1:], imported_filename_stack)]))
-class RelativePath(object):
+class RelativePath:
"""Represents a path relative to the source tree or generated output dir."""
def __init__(self, path, source_root, output_dir):
@@ -142,7 +164,7 @@ def ReadFileContents(filename):
return f.read()
-class MojomProcessor(object):
+class MojomProcessor:
"""Takes parsed mojom modules and generates language bindings from them.
Attributes:
@@ -169,8 +191,8 @@ class MojomProcessor(object):
if 'c++' in self._typemap:
self._typemap['mojolpm'] = self._typemap['c++']
- def _GenerateModule(self, args, remaining_args, generator_modules,
- rel_filename, imported_filename_stack):
+ def _GenerateModule(self, args, remaining_args, check_modules,
+ generator_modules, rel_filename, imported_filename_stack):
# Return the already-generated module.
if rel_filename.path in self._processed_files:
return self._processed_files[rel_filename.path]
@@ -190,12 +212,16 @@ class MojomProcessor(object):
ScrambleMethodOrdinals(module.interfaces, salt)
if self._should_generate(rel_filename.path):
+ # Run checks on module first.
+ for check_module in check_modules.values():
+ checker = check_module.Check(module)
+ checker.CheckModule()
+ # Then run generation.
for language, generator_module in generator_modules.items():
generator = generator_module.Generator(
module, args.output_dir, typemap=self._typemap.get(language, {}),
variant=args.variant, bytecode_path=args.bytecode_path,
for_blink=args.for_blink,
- js_bindings_mode=args.js_bindings_mode,
js_generate_struct_deserializers=\
args.js_generate_struct_deserializers,
export_attribute=args.export_attribute,
@@ -234,6 +260,7 @@ def _Generate(args, remaining_args):
args.import_directories[idx] = RelativePath(tokens[0], args.depth,
args.output_dir)
generator_modules = LoadGenerators(args.generators_string)
+ check_modules = LoadChecks(args.checks_string)
fileutil.EnsureDirectoryExists(args.output_dir)
@@ -246,7 +273,7 @@ def _Generate(args, remaining_args):
for filename in args.filename:
processor._GenerateModule(
- args, remaining_args, generator_modules,
+ args, remaining_args, check_modules, generator_modules,
RelativePath(filename, args.depth, args.output_dir), [])
return 0
@@ -286,6 +313,12 @@ def main():
metavar="GENERATORS",
default="c++,javascript,java,mojolpm",
help="comma-separated list of generators")
+ generate_parser.add_argument("-c",
+ "--checks",
+ dest="checks_string",
+ metavar="CHECKS",
+ default=",".join(_BUILTIN_CHECKS.keys()),
+ help="comma-separated list of checks")
generate_parser.add_argument(
"--gen_dir", dest="gen_directories", action="append", metavar="directory",
default=[], help="add a directory to be searched for the syntax trees.")
@@ -309,11 +342,6 @@ def main():
help="Use WTF types as generated types for mojo "
"string/array/map.")
generate_parser.add_argument(
- "--js_bindings_mode", choices=["new", "old"], default="old",
- help="This option only affects the JavaScript bindings. The value could "
- "be \"new\" to generate new-style lite JS bindings in addition to the "
- "old, or \"old\" to only generate old bindings.")
- generate_parser.add_argument(
"--js_generate_struct_deserializers", action="store_true",
help="Generate javascript deserialize methods for structs in "
"mojom-lite.js file")
@@ -387,4 +415,10 @@ def main():
if __name__ == "__main__":
with crbug_1001171.DumpStateOnLookupError():
- sys.exit(main())
+ ret = main()
+ # Exit without running GC, which can save multiple seconds due to the large
+ # number of object created. But flush is necessary as os._exit doesn't do
+ # that.
+ sys.stdout.flush()
+ sys.stderr.flush()
+ os._exit(ret)
diff --git a/utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator_unittest.py b/utils/codegen/ipc/mojo/public/tools/bindings/mojom_bindings_generator_unittest.py
index bddbe3f4..761922b6 100644
--- a/utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator_unittest.py
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/mojom_bindings_generator_unittest.py
@@ -1,4 +1,4 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -8,13 +8,13 @@ from mojom_bindings_generator import MakeImportStackMessage
from mojom_bindings_generator import ScrambleMethodOrdinals
-class FakeIface(object):
+class FakeIface:
def __init__(self):
self.mojom_name = None
self.methods = None
-class FakeMethod(object):
+class FakeMethod:
def __init__(self, explicit_ordinal=None):
self.explicit_ordinal = explicit_ordinal
self.ordinal = explicit_ordinal
diff --git a/utils/ipc/mojo/public/tools/bindings/validate_typemap_config.py b/utils/codegen/ipc/mojo/public/tools/bindings/validate_typemap_config.py
index f1783d59..6bb7a209 100755
--- a/utils/ipc/mojo/public/tools/bindings/validate_typemap_config.py
+++ b/utils/codegen/ipc/mojo/public/tools/bindings/validate_typemap_config.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-# Copyright 2020 The Chromium Authors. All rights reserved.
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -17,7 +17,8 @@ def CheckCppTypemapConfigs(target_name, config_filename, out_filename):
])
_SUPPORTED_TYPE_KEYS = set([
'mojom', 'cpp', 'copyable_pass_by_value', 'force_serialize', 'hashable',
- 'move_only', 'nullable_is_same_type'
+ 'move_only', 'nullable_is_same_type', 'forward_declaration',
+ 'default_constructible'
])
with open(config_filename, 'r') as f:
for config in json.load(f):
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/BUILD.gn b/utils/codegen/ipc/mojo/public/tools/mojom/BUILD.gn
new file mode 100644
index 00000000..eafb95a1
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/BUILD.gn
@@ -0,0 +1,18 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+group("tests") {
+ data = [
+ "check_stable_mojom_compatibility_unittest.py",
+ "check_stable_mojom_compatibility.py",
+ "const_unittest.py",
+ "enum_unittest.py",
+ "feature_unittest.py",
+ "mojom_parser_test_case.py",
+ "mojom_parser_unittest.py",
+ "mojom_parser.py",
+ "stable_attribute_unittest.py",
+ "version_compatibility_unittest.py",
+ ]
+}
diff --git a/utils/ipc/mojo/public/tools/mojom/README.md b/utils/codegen/ipc/mojo/public/tools/mojom/README.md
index e5d17ab0..e5d17ab0 100644
--- a/utils/ipc/mojo/public/tools/mojom/README.md
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/README.md
diff --git a/utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py b/utils/codegen/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py
index 08bd672f..35cd1cfd 100755
--- a/utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py
@@ -1,5 +1,5 @@
-#!/usr/bin/env python
-# Copyright 2020 The Chromium Authors. All rights reserved.
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Verifies backward-compatibility of mojom type changes.
@@ -12,20 +12,18 @@ This can be used e.g. by a presubmit check to prevent developers from making
breaking changes to stable mojoms."""
import argparse
-import errno
import io
import json
import os
import os.path
-import shutil
-import six
import sys
-import tempfile
from mojom.generate import module
from mojom.generate import translate
from mojom.parse import parser
+# pylint: disable=raise-missing-from
+
class ParseError(Exception):
pass
@@ -41,6 +39,8 @@ def _ValidateDelta(root, delta):
transitive closure of a mojom's input dependencies all at once.
"""
+ translate.is_running_backwards_compatibility_check_hack = True
+
# First build a map of all files covered by the delta
affected_files = set()
old_files = {}
@@ -73,11 +73,35 @@ def _ValidateDelta(root, delta):
try:
ast = parser.Parse(contents, mojom)
except Exception as e:
- six.reraise(
- ParseError,
- 'encountered exception {0} while parsing {1}'.format(e, mojom),
- sys.exc_info()[2])
+ raise ParseError('encountered exception {0} while parsing {1}'.format(
+ e, mojom))
+
+ # Files which are generated at compile time can't be checked by this script
+ # (at the moment) since they may not exist in the output directory.
+ generated_files_to_skip = {
+ ('third_party/blink/public/mojom/runtime_feature_state/'
+ 'runtime_feature.mojom'),
+ ('third_party/blink/public/mojom/origin_trial_feature/'
+ 'origin_trial_feature.mojom'),
+ }
+
+ ast.import_list.items = [
+ x for x in ast.import_list.items
+ if x.import_filename not in generated_files_to_skip
+ ]
+
for imp in ast.import_list:
+ if (not file_overrides.get(imp.import_filename)
+ and not os.path.exists(os.path.join(root, imp.import_filename))):
+ # Speculatively construct a path prefix to locate the import_filename
+ mojom_path = os.path.dirname(os.path.normpath(mojom)).split(os.sep)
+ test_prefix = ''
+ for path_component in mojom_path:
+ test_prefix = os.path.join(test_prefix, path_component)
+ test_import_filename = os.path.join(test_prefix, imp.import_filename)
+ if os.path.exists(os.path.join(root, test_import_filename)):
+ imp.import_filename = test_import_filename
+ break
parseMojom(imp.import_filename, file_overrides, override_modules)
# Now that the transitive set of dependencies has been imported and parsed
@@ -89,10 +113,10 @@ def _ValidateDelta(root, delta):
modules[mojom] = translate.OrderedModule(ast, mojom, all_modules)
old_modules = {}
- for mojom in old_files.keys():
+ for mojom in old_files:
parseMojom(mojom, old_files, old_modules)
new_modules = {}
- for mojom in new_files.keys():
+ for mojom in new_files:
parseMojom(mojom, new_files, new_modules)
# At this point we have a complete set of translated Modules from both the
@@ -132,12 +156,21 @@ def _ValidateDelta(root, delta):
'can be deleted by a subsequent change.' % qualified_name)
checker = module.BackwardCompatibilityChecker()
- if not checker.IsBackwardCompatible(new_types[new_name], kind):
- raise Exception('Stable type %s appears to have changed in a way which '
- 'breaks backward-compatibility. Please fix!\n\nIf you '
- 'believe this assessment to be incorrect, please file a '
- 'Chromium bug against the "Internals>Mojo>Bindings" '
- 'component.' % qualified_name)
+ try:
+ if not checker.IsBackwardCompatible(new_types[new_name], kind):
+ raise Exception(
+ 'Stable type %s appears to have changed in a way which '
+ 'breaks backward-compatibility. Please fix!\n\nIf you '
+ 'believe this assessment to be incorrect, please file a '
+ 'Chromium bug against the "Internals>Mojo>Bindings" '
+ 'component.' % qualified_name)
+ except Exception as e:
+ raise Exception(
+ 'Stable type %s appears to have changed in a way which '
+ 'breaks backward-compatibility: \n\n%s.\nPlease fix!\n\nIf you '
+ 'believe this assessment to be incorrect, please file a '
+ 'Chromium bug against the "Internals>Mojo>Bindings" '
+ 'component.' % (qualified_name, e))
def Run(command_line, delta=None):
diff --git a/utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility_unittest.py
index 9f51ea77..06769c95 100755
--- a/utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility_unittest.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility_unittest.py
@@ -1,5 +1,5 @@
-#!/usr/bin/env python
-# Copyright 2020 The Chromium Authors. All rights reserved.
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -15,7 +15,7 @@ import check_stable_mojom_compatibility
from mojom.generate import module
-class Change(object):
+class Change:
"""Helper to clearly define a mojom file delta to be analyzed."""
def __init__(self, filename, old=None, new=None):
@@ -28,7 +28,7 @@ class Change(object):
class UnchangedFile(Change):
def __init__(self, filename, contents):
- super(UnchangedFile, self).__init__(filename, old=contents, new=contents)
+ super().__init__(filename, old=contents, new=contents)
class CheckStableMojomCompatibilityTest(unittest.TestCase):
@@ -258,3 +258,82 @@ class CheckStableMojomCompatibilityTest(unittest.TestCase):
[Stable] struct T { foo.S s; int32 x; };
""")
])
+
+ def testWithPartialImport(self):
+ """The compatibility checking tool correctly parses imports with partial
+ paths."""
+ self.assertBackwardCompatible([
+ UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
+ Change('foo/bar.mojom',
+ old="""\
+ module bar;
+ import "foo/foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """,
+ new="""\
+ module bar;
+ import "foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """)
+ ])
+
+ self.assertBackwardCompatible([
+ UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
+ Change('foo/bar.mojom',
+ old="""\
+ module bar;
+ import "foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """,
+ new="""\
+ module bar;
+ import "foo/foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """)
+ ])
+
+ self.assertNotBackwardCompatible([
+ UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
+ Change('bar/bar.mojom',
+ old="""\
+ module bar;
+ import "foo/foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """,
+ new="""\
+ module bar;
+ import "foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """)
+ ])
+
+ self.assertNotBackwardCompatible([
+ UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
+ Change('bar/bar.mojom',
+ old="""\
+ module bar;
+ import "foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """,
+ new="""\
+ module bar;
+ import "foo/foo.mojom";
+ [Stable] struct T { foo.S s; };
+ """)
+ ])
+
+ def testNewEnumDefault(self):
+ # Should be backwards compatible since it does not affect the wire format.
+ # This specific case also checks that the backwards compatibility checker
+ # does not throw an error due to the older version of the enum not
+ # specifying [Default].
+ self.assertBackwardCompatible([
+ Change('foo/foo.mojom',
+ old='[Extensible] enum E { One };',
+ new='[Extensible] enum E { [Default] One };')
+ ])
+ self.assertBackwardCompatible([
+ Change('foo/foo.mojom',
+ old='[Extensible] enum E { [Default] One, Two, };',
+ new='[Extensible] enum E { One, [Default] Two, };')
+ ])
diff --git a/utils/ipc/mojo/public/tools/mojom/const_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/const_unittest.py
index cb42dfac..e8ed36a7 100644
--- a/utils/ipc/mojo/public/tools/mojom/const_unittest.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/const_unittest.py
@@ -1,4 +1,4 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/utils/ipc/mojo/public/tools/mojom/enum_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/enum_unittest.py
index d9005078..9269cde5 100644
--- a/utils/ipc/mojo/public/tools/mojom/enum_unittest.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/enum_unittest.py
@@ -1,4 +1,4 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -90,3 +90,31 @@ class EnumTest(MojomParserTestCase):
self.assertEqual('F', b.enums[0].mojom_name)
self.assertEqual('kFoo', b.enums[0].fields[0].mojom_name)
self.assertEqual(37, b.enums[0].fields[0].numeric_value)
+
+ def testEnumAttributesAreEnums(self):
+ """Verifies that enum values in attributes are really enum types."""
+ a_mojom = 'a.mojom'
+ self.WriteFile(a_mojom, 'module a; enum E { kFoo, kBar };')
+ b_mojom = 'b.mojom'
+ self.WriteFile(
+ b_mojom, 'module b;'
+ 'import "a.mojom";'
+ '[MooCow=a.E.kFoo]'
+ 'interface Foo { Foo(); };')
+ self.ParseMojoms([a_mojom, b_mojom])
+ b = self.LoadModule(b_mojom)
+ self.assertEqual(b.interfaces[0].attributes['MooCow'].mojom_name, 'kFoo')
+
+ def testConstantAttributes(self):
+ """Verifies that constants as attributes are translated to the constant."""
+ a_mojom = 'a.mojom'
+ self.WriteFile(
+ a_mojom, 'module a;'
+ 'enum E { kFoo, kBar };'
+ 'const E kB = E.kFoo;'
+ '[Attr=kB] interface Hello { Foo(); };')
+ self.ParseMojoms([a_mojom])
+ a = self.LoadModule(a_mojom)
+ self.assertEqual(a.interfaces[0].attributes['Attr'].mojom_name, 'kB')
+ self.assertEquals(a.interfaces[0].attributes['Attr'].value.mojom_name,
+ 'kFoo')
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/feature_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/feature_unittest.py
new file mode 100644
index 00000000..5f014e1c
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/feature_unittest.py
@@ -0,0 +1,84 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from mojom_parser_test_case import MojomParserTestCase
+
+
+class FeatureTest(MojomParserTestCase):
+ """Tests feature parsing behavior."""
+ def testFeatureOff(self):
+ """Verifies basic parsing of feature types."""
+ types = self.ExtractTypes("""
+ // e.g. BASE_DECLARE_FEATURE(kFeature);
+ [AttributeOne=ValueOne]
+ feature kFeature {
+ // BASE_FEATURE(kFeature,"MyFeature",
+ // base::FEATURE_DISABLED_BY_DEFAULT);
+ const string name = "MyFeature";
+ const bool default_state = false;
+ };
+ """)
+ self.assertEqual('name', types['kFeature'].constants[0].mojom_name)
+ self.assertEqual('"MyFeature"', types['kFeature'].constants[0].value)
+ self.assertEqual('default_state', types['kFeature'].constants[1].mojom_name)
+ self.assertEqual('false', types['kFeature'].constants[1].value)
+
+ def testFeatureOn(self):
+ """Verifies basic parsing of feature types."""
+ types = self.ExtractTypes("""
+ // e.g. BASE_DECLARE_FEATURE(kFeature);
+ feature kFeature {
+ // BASE_FEATURE(kFeature,"MyFeature",
+ // base::FEATURE_ENABLED_BY_DEFAULT);
+ const string name = "MyFeature";
+ const bool default_state = true;
+ };
+ """)
+ self.assertEqual('name', types['kFeature'].constants[0].mojom_name)
+ self.assertEqual('"MyFeature"', types['kFeature'].constants[0].value)
+ self.assertEqual('default_state', types['kFeature'].constants[1].mojom_name)
+ self.assertEqual('true', types['kFeature'].constants[1].value)
+
+ def testFeatureWeakKeyword(self):
+ """Verifies that `feature` is a weak keyword."""
+ types = self.ExtractTypes("""
+ // e.g. BASE_DECLARE_FEATURE(kFeature);
+ [AttributeOne=ValueOne]
+ feature kFeature {
+ // BASE_FEATURE(kFeature,"MyFeature",
+ // base::FEATURE_DISABLED_BY_DEFAULT);
+ const string name = "MyFeature";
+ const bool default_state = false;
+ };
+ struct MyStruct {
+ bool feature = true;
+ };
+ interface InterfaceName {
+ Method(string feature) => (int32 feature);
+ };
+ """)
+ self.assertEqual('name', types['kFeature'].constants[0].mojom_name)
+ self.assertEqual('"MyFeature"', types['kFeature'].constants[0].value)
+ self.assertEqual('default_state', types['kFeature'].constants[1].mojom_name)
+ self.assertEqual('false', types['kFeature'].constants[1].value)
+
+ def testFeatureAttributesAreFeatures(self):
+ """Verifies that feature values in attributes are really feature types."""
+ a_mojom = 'a.mojom'
+ self.WriteFile(
+ a_mojom, 'module a;'
+ 'feature F { const string name = "f";'
+ 'const bool default_state = false; };')
+ b_mojom = 'b.mojom'
+ self.WriteFile(
+ b_mojom, 'module b;'
+ 'import "a.mojom";'
+ 'feature G'
+ '{const string name = "g"; const bool default_state = false;};'
+ '[Attri=a.F] interface Foo { Foo(); };'
+ '[Boink=G] interface Bar {};')
+ self.ParseMojoms([a_mojom, b_mojom])
+ b = self.LoadModule(b_mojom)
+ self.assertEqual(b.interfaces[0].attributes['Attri'].mojom_name, 'F')
+ self.assertEqual(b.interfaces[1].attributes['Boink'].mojom_name, 'G')
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/BUILD.gn b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/BUILD.gn
index 51facc0c..a0edf0eb 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/BUILD.gn
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/BUILD.gn
@@ -1,4 +1,4 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -8,6 +8,7 @@ group("mojom") {
"error.py",
"fileutil.py",
"generate/__init__.py",
+ "generate/check.py",
"generate/generator.py",
"generate/module.py",
"generate/pack.py",
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/__init__.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/__init__.py
index e69de29b..e69de29b 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/generate/__init__.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/__init__.py
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/error.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/error.py
index 8a1e03da..dd53b835 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/error.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/error.py
@@ -1,4 +1,4 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/fileutil.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/fileutil.py
index bf626f54..124f12c1 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/fileutil.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/fileutil.py
@@ -1,9 +1,8 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
+# Copyright 2015 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import errno
-import imp
import os.path
import sys
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/fileutil_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/fileutil_unittest.py
index ff5753a2..c93d2289 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/fileutil_unittest.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/fileutil_unittest.py
@@ -1,20 +1,17 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
+# Copyright 2015 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import imp
import os.path
import shutil
-import sys
import tempfile
import unittest
from mojom import fileutil
-
class FileUtilTest(unittest.TestCase):
def testEnsureDirectoryExists(self):
- """Test that EnsureDirectoryExists fuctions correctly."""
+ """Test that EnsureDirectoryExists functions correctly."""
temp_dir = tempfile.mkdtemp()
try:
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/__init__.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/__init__.py
index e69de29b..e69de29b 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/parse/__init__.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/__init__.py
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/check.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/check.py
new file mode 100644
index 00000000..1efe2022
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/check.py
@@ -0,0 +1,26 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Code shared by the various pre-generation mojom checkers."""
+
+
+class CheckException(Exception):
+ def __init__(self, module, message):
+ self.module = module
+ self.message = message
+ super().__init__(self.message)
+
+ def __str__(self):
+ return "Failed mojo pre-generation check for {}:\n{}".format(
+ self.module.path, self.message)
+
+
+class Check:
+ def __init__(self, module):
+ self.module = module
+
+ def CheckModule(self):
+ """ Subclass should return True if its Checks pass, and throw an
+ exception otherwise. CheckModule will be called immediately before
+ mojom.generate.Generator.GenerateFiles()"""
+ raise NotImplementedError("Subclasses must override/implement this method")
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/generator.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/generator.py
index 4a1c73fc..96fe3a2d 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/generate/generator.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/generator.py
@@ -1,4 +1,4 @@
-# Copyright 2013 The Chromium Authors. All rights reserved.
+# Copyright 2013 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Code shared by the various language-specific code generators."""
@@ -97,7 +97,7 @@ def ToLowerSnakeCase(identifier):
return _ToSnakeCase(identifier, upper=False)
-class Stylizer(object):
+class Stylizer:
"""Stylizers specify naming rules to map mojom names to names in generated
code. For example, if you would like method_name in mojom to be mapped to
MethodName in the generated code, you need to define a subclass of Stylizer
@@ -130,6 +130,9 @@ class Stylizer(object):
def StylizeEnum(self, mojom_name):
return mojom_name
+ def StylizeFeature(self, mojom_name):
+ return mojom_name
+
def StylizeModule(self, mojom_namespace):
return mojom_namespace
@@ -233,7 +236,7 @@ def AddComputedData(module):
_AddInterfaceComputedData(interface)
-class Generator(object):
+class Generator:
# Pass |output_dir| to emit files to disk. Omit |output_dir| to echo all
# files to stdout.
def __init__(self,
@@ -243,7 +246,6 @@ class Generator(object):
variant=None,
bytecode_path=None,
for_blink=False,
- js_bindings_mode="new",
js_generate_struct_deserializers=False,
export_attribute=None,
export_header=None,
@@ -262,7 +264,6 @@ class Generator(object):
self.variant = variant
self.bytecode_path = bytecode_path
self.for_blink = for_blink
- self.js_bindings_mode = js_bindings_mode
self.js_generate_struct_deserializers = js_generate_struct_deserializers
self.export_attribute = export_attribute
self.export_header = export_header
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/generator_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/generator_unittest.py
index 32c884a8..7143e07c 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/generate/generator_unittest.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/generator_unittest.py
@@ -1,13 +1,12 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import imp
+import importlib.util
import os.path
import sys
import unittest
-
def _GetDirAbove(dirname):
"""Returns the directory "above" this file containing |dirname| (which must
also be "above" this file)."""
@@ -20,12 +19,11 @@ def _GetDirAbove(dirname):
try:
- imp.find_module("mojom")
+ importlib.util.find_spec("mojom")
except ImportError:
sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib"))
from mojom.generate import generator
-
class StringManipulationTest(unittest.TestCase):
"""generator contains some string utilities, this tests only those."""
@@ -69,6 +67,5 @@ class StringManipulationTest(unittest.TestCase):
self.assertEquals("SNAKE_D3D11_CASE",
generator.ToUpperSnakeCase("snakeD3d11Case"))
-
if __name__ == "__main__":
unittest.main()
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/module.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/module.py
index 9bdb28e0..ca71059d 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/generate/module.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/module.py
@@ -1,4 +1,4 @@
-# Copyright 2013 The Chromium Authors. All rights reserved.
+# Copyright 2013 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -12,15 +12,14 @@
# method = interface.AddMethod('Tat', 0)
# method.AddParameter('baz', 0, mojom.INT32)
-import sys
-if sys.version_info.major == 2:
- import cPickle as pickle
-else:
- import pickle
+import pickle
+from collections import OrderedDict
from uuid import UUID
+# pylint: disable=raise-missing-from
-class BackwardCompatibilityChecker(object):
+
+class BackwardCompatibilityChecker:
"""Used for memoization while recursively checking two type definitions for
backward-compatibility."""
@@ -64,23 +63,20 @@ def Repr(obj, as_ref=True):
return obj.Repr(as_ref=as_ref)
# Since we cannot implement Repr for existing container types, we
# handle them here.
- elif isinstance(obj, list):
+ if isinstance(obj, list):
if not obj:
return '[]'
- else:
- return ('[\n%s\n]' % (',\n'.join(
- ' %s' % Repr(elem, as_ref).replace('\n', '\n ')
- for elem in obj)))
- elif isinstance(obj, dict):
+ return ('[\n%s\n]' %
+ (',\n'.join(' %s' % Repr(elem, as_ref).replace('\n', '\n ')
+ for elem in obj)))
+ if isinstance(obj, dict):
if not obj:
return '{}'
- else:
- return ('{\n%s\n}' % (',\n'.join(
- ' %s: %s' % (Repr(key, as_ref).replace('\n', '\n '),
- Repr(val, as_ref).replace('\n', '\n '))
- for key, val in obj.items())))
- else:
- return repr(obj)
+ return ('{\n%s\n}' % (',\n'.join(' %s: %s' %
+ (Repr(key, as_ref).replace('\n', '\n '),
+ Repr(val, as_ref).replace('\n', '\n '))
+ for key, val in obj.items())))
+ return repr(obj)
def GenericRepr(obj, names):
@@ -104,7 +100,7 @@ def GenericRepr(obj, names):
ReprIndent(name, as_ref) for (name, as_ref) in names.items()))
-class Kind(object):
+class Kind:
"""Kind represents a type (e.g. int8, string).
Attributes:
@@ -112,16 +108,43 @@ class Kind(object):
module: {Module} The defining module. Set to None for built-in types.
parent_kind: The enclosing type. For example, an enum defined
inside an interface has that interface as its parent. May be None.
+ is_nullable: True if the type is nullable.
"""
- def __init__(self, spec=None, module=None):
+ def __init__(self, spec=None, is_nullable=False, module=None):
self.spec = spec
self.module = module
self.parent_kind = None
+ self.is_nullable = is_nullable
+ self.shared_definition = {}
+
+ @classmethod
+ def AddSharedProperty(cls, name):
+ """Adds a property |name| to |cls|, which accesses the corresponding item in
+ |shared_definition|.
+
+ The reason of adding such indirection is to enable sharing definition
+ between a reference kind and its nullable variation. For example:
+ a = Struct('test_struct_1')
+ b = a.MakeNullableKind()
+ a.name = 'test_struct_2'
+ print(b.name) # Outputs 'test_struct_2'.
+ """
+ def Get(self):
+ try:
+ return self.shared_definition[name]
+ except KeyError: # Must raise AttributeError if property doesn't exist.
+ raise AttributeError
+
+ def Set(self, value):
+ self.shared_definition[name] = value
+
+ setattr(cls, name, property(Get, Set))
def Repr(self, as_ref=True):
# pylint: disable=unused-argument
- return '<%s spec=%r>' % (self.__class__.__name__, self.spec)
+ return '<%s spec=%r is_nullable=%r>' % (self.__class__.__name__, self.spec,
+ self.is_nullable)
def __repr__(self):
# Gives us a decent __repr__ for all kinds.
@@ -130,7 +153,8 @@ class Kind(object):
def __eq__(self, rhs):
# pylint: disable=unidiomatic-typecheck
return (type(self) == type(rhs)
- and (self.spec, self.parent_kind) == (rhs.spec, rhs.parent_kind))
+ and (self.spec, self.parent_kind, self.is_nullable)
+ == (rhs.spec, rhs.parent_kind, rhs.is_nullable))
def __hash__(self):
# TODO(crbug.com/1060471): Remove this and other __hash__ methods on Kind
@@ -138,32 +162,113 @@ class Kind(object):
# some primitive Kinds as dict keys. The default hash (object identity)
# breaks these dicts when a pickled Module instance is unpickled and used
# during a subsequent run of the parser.
- return hash((self.spec, self.parent_kind))
+ return hash((self.spec, self.parent_kind, self.is_nullable))
# pylint: disable=unused-argument
def IsBackwardCompatible(self, rhs, checker):
return self == rhs
+class ValueKind(Kind):
+ """ValueKind represents values that aren't reference kinds.
+
+ The primary difference is the wire representation for nullable value kinds
+ still reserves space for the value type itself, even if that value itself
+ is logically null.
+ """
+ def __init__(self, spec=None, is_nullable=False, module=None):
+ assert spec is None or is_nullable == spec.startswith('?')
+ Kind.__init__(self, spec, is_nullable, module)
+
+ def MakeNullableKind(self):
+ assert not self.is_nullable
+
+ if self == BOOL:
+ return NULLABLE_BOOL
+ if self == INT8:
+ return NULLABLE_INT8
+ if self == INT16:
+ return NULLABLE_INT16
+ if self == INT32:
+ return NULLABLE_INT32
+ if self == INT64:
+ return NULLABLE_INT64
+ if self == UINT8:
+ return NULLABLE_UINT8
+ if self == UINT16:
+ return NULLABLE_UINT16
+ if self == UINT32:
+ return NULLABLE_UINT32
+ if self == UINT64:
+ return NULLABLE_UINT64
+ if self == FLOAT:
+ return NULLABLE_FLOAT
+ if self == DOUBLE:
+ return NULLABLE_DOUBLE
+
+ nullable_kind = type(self)()
+ nullable_kind.shared_definition = self.shared_definition
+ if self.spec is not None:
+ nullable_kind.spec = '?' + self.spec
+ nullable_kind.is_nullable = True
+ nullable_kind.parent_kind = self.parent_kind
+ nullable_kind.module = self.module
+
+ return nullable_kind
+
+ def MakeUnnullableKind(self):
+ assert self.is_nullable
+
+ if self == NULLABLE_BOOL:
+ return BOOL
+ if self == NULLABLE_INT8:
+ return INT8
+ if self == NULLABLE_INT16:
+ return INT16
+ if self == NULLABLE_INT32:
+ return INT32
+ if self == NULLABLE_INT64:
+ return INT64
+ if self == NULLABLE_UINT8:
+ return UINT8
+ if self == NULLABLE_UINT16:
+ return UINT16
+ if self == NULLABLE_UINT32:
+ return UINT32
+ if self == NULLABLE_UINT64:
+ return UINT64
+ if self == NULLABLE_FLOAT:
+ return FLOAT
+ if self == NULLABLE_DOUBLE:
+ return DOUBLE
+
+ nullable_kind = type(self)()
+ nullable_kind.shared_definition = self.shared_definition
+ if self.spec is not None:
+ nullable_kind.spec = self.spec[1:]
+ nullable_kind.is_nullable = False
+ nullable_kind.parent_kind = self.parent_kind
+ nullable_kind.module = self.module
+
+ return nullable_kind
+
+ def __eq__(self, rhs):
+ return (isinstance(rhs, ValueKind) and super().__eq__(rhs))
+
+ def __hash__(self): # pylint: disable=useless-super-delegation
+ return super().__hash__()
+
+
class ReferenceKind(Kind):
"""ReferenceKind represents pointer and handle types.
A type is nullable if null (for pointer types) or invalid handle (for handle
types) is a legal value for the type.
-
- Attributes:
- is_nullable: True if the type is nullable.
"""
def __init__(self, spec=None, is_nullable=False, module=None):
assert spec is None or is_nullable == spec.startswith('?')
- Kind.__init__(self, spec, module)
- self.is_nullable = is_nullable
- self.shared_definition = {}
-
- def Repr(self, as_ref=True):
- return '<%s spec=%r is_nullable=%r>' % (self.__class__.__name__, self.spec,
- self.is_nullable)
+ Kind.__init__(self, spec, is_nullable, module)
def MakeNullableKind(self):
assert not self.is_nullable
@@ -193,55 +298,65 @@ class ReferenceKind(Kind):
return nullable_kind
- @classmethod
- def AddSharedProperty(cls, name):
- """Adds a property |name| to |cls|, which accesses the corresponding item in
- |shared_definition|.
-
- The reason of adding such indirection is to enable sharing definition
- between a reference kind and its nullable variation. For example:
- a = Struct('test_struct_1')
- b = a.MakeNullableKind()
- a.name = 'test_struct_2'
- print(b.name) # Outputs 'test_struct_2'.
- """
-
- def Get(self):
- try:
- return self.shared_definition[name]
- except KeyError: # Must raise AttributeError if property doesn't exist.
- raise AttributeError
-
- def Set(self, value):
- self.shared_definition[name] = value
+ def MakeUnnullableKind(self):
+ assert self.is_nullable
+
+ if self == NULLABLE_STRING:
+ return STRING
+ if self == NULLABLE_HANDLE:
+ return HANDLE
+ if self == NULLABLE_DCPIPE:
+ return DCPIPE
+ if self == NULLABLE_DPPIPE:
+ return DPPIPE
+ if self == NULLABLE_MSGPIPE:
+ return MSGPIPE
+ if self == NULLABLE_SHAREDBUFFER:
+ return SHAREDBUFFER
+ if self == NULLABLE_PLATFORMHANDLE:
+ return PLATFORMHANDLE
+
+ unnullable_kind = type(self)()
+ unnullable_kind.shared_definition = self.shared_definition
+ if self.spec is not None:
+ assert self.spec[0] == '?'
+ unnullable_kind.spec = self.spec[1:]
+ unnullable_kind.is_nullable = False
+ unnullable_kind.parent_kind = self.parent_kind
+ unnullable_kind.module = self.module
- setattr(cls, name, property(Get, Set))
+ return unnullable_kind
def __eq__(self, rhs):
- return (isinstance(rhs, ReferenceKind)
- and super(ReferenceKind, self).__eq__(rhs)
- and self.is_nullable == rhs.is_nullable)
+ return (isinstance(rhs, ReferenceKind) and super().__eq__(rhs))
- def __hash__(self):
- return hash((super(ReferenceKind, self).__hash__(), self.is_nullable))
-
- def IsBackwardCompatible(self, rhs, checker):
- return (super(ReferenceKind, self).IsBackwardCompatible(rhs, checker)
- and self.is_nullable == rhs.is_nullable)
+ def __hash__(self): # pylint: disable=useless-super-delegation
+ return super().__hash__()
# Initialize the set of primitive types. These can be accessed by clients.
-BOOL = Kind('b')
-INT8 = Kind('i8')
-INT16 = Kind('i16')
-INT32 = Kind('i32')
-INT64 = Kind('i64')
-UINT8 = Kind('u8')
-UINT16 = Kind('u16')
-UINT32 = Kind('u32')
-UINT64 = Kind('u64')
-FLOAT = Kind('f')
-DOUBLE = Kind('d')
+BOOL = ValueKind('b')
+INT8 = ValueKind('i8')
+INT16 = ValueKind('i16')
+INT32 = ValueKind('i32')
+INT64 = ValueKind('i64')
+UINT8 = ValueKind('u8')
+UINT16 = ValueKind('u16')
+UINT32 = ValueKind('u32')
+UINT64 = ValueKind('u64')
+FLOAT = ValueKind('f')
+DOUBLE = ValueKind('d')
+NULLABLE_BOOL = ValueKind('?b', True)
+NULLABLE_INT8 = ValueKind('?i8', True)
+NULLABLE_INT16 = ValueKind('?i16', True)
+NULLABLE_INT32 = ValueKind('?i32', True)
+NULLABLE_INT64 = ValueKind('?i64', True)
+NULLABLE_UINT8 = ValueKind('?u8', True)
+NULLABLE_UINT16 = ValueKind('?u16', True)
+NULLABLE_UINT32 = ValueKind('?u32', True)
+NULLABLE_UINT64 = ValueKind('?u64', True)
+NULLABLE_FLOAT = ValueKind('?f', True)
+NULLABLE_DOUBLE = ValueKind('?d', True)
STRING = ReferenceKind('s')
HANDLE = ReferenceKind('h')
DCPIPE = ReferenceKind('h:d:c')
@@ -270,6 +385,17 @@ PRIMITIVES = (
UINT64,
FLOAT,
DOUBLE,
+ NULLABLE_BOOL,
+ NULLABLE_INT8,
+ NULLABLE_INT16,
+ NULLABLE_INT32,
+ NULLABLE_INT64,
+ NULLABLE_UINT8,
+ NULLABLE_UINT16,
+ NULLABLE_UINT32,
+ NULLABLE_UINT64,
+ NULLABLE_FLOAT,
+ NULLABLE_DOUBLE,
STRING,
HANDLE,
DCPIPE,
@@ -291,12 +417,17 @@ ATTRIBUTE_DEFAULT = 'Default'
ATTRIBUTE_EXTENSIBLE = 'Extensible'
ATTRIBUTE_NO_INTERRUPT = 'NoInterrupt'
ATTRIBUTE_STABLE = 'Stable'
+ATTRIBUTE_SUPPORTS_URGENT = 'SupportsUrgent'
ATTRIBUTE_SYNC = 'Sync'
ATTRIBUTE_UNLIMITED_SIZE = 'UnlimitedSize'
ATTRIBUTE_UUID = 'Uuid'
+ATTRIBUTE_SERVICE_SANDBOX = 'ServiceSandbox'
+ATTRIBUTE_REQUIRE_CONTEXT = 'RequireContext'
+ATTRIBUTE_ALLOWED_CONTEXT = 'AllowedContext'
+ATTRIBUTE_RUNTIME_FEATURE = 'RuntimeFeature'
-class NamedValue(object):
+class NamedValue:
def __init__(self, module, parent_kind, mojom_name):
self.module = module
self.parent_kind = parent_kind
@@ -316,7 +447,7 @@ class NamedValue(object):
return hash((self.parent_kind, self.mojom_name))
-class BuiltinValue(object):
+class BuiltinValue:
def __init__(self, value):
self.value = value
@@ -350,7 +481,7 @@ class EnumValue(NamedValue):
return self.field.name
-class Constant(object):
+class Constant:
def __init__(self, mojom_name=None, kind=None, value=None, parent_kind=None):
self.mojom_name = mojom_name
self.name = None
@@ -368,7 +499,7 @@ class Constant(object):
rhs.parent_kind))
-class Field(object):
+class Field:
def __init__(self,
mojom_name=None,
kind=None,
@@ -414,7 +545,18 @@ class StructField(Field):
class UnionField(Field):
- pass
+ def __init__(self,
+ mojom_name=None,
+ kind=None,
+ ordinal=None,
+ default=None,
+ attributes=None):
+ Field.__init__(self, mojom_name, kind, ordinal, default, attributes)
+
+ @property
+ def is_default(self):
+ return self.attributes.get(ATTRIBUTE_DEFAULT, False) \
+ if self.attributes else False
def _IsFieldBackwardCompatible(new_field, old_field, checker):
@@ -424,6 +566,38 @@ def _IsFieldBackwardCompatible(new_field, old_field, checker):
return checker.IsBackwardCompatible(new_field.kind, old_field.kind)
+class Feature(ReferenceKind):
+ """A runtime enabled feature defined from mojom.
+
+ Attributes:
+ mojom_name: {str} The name of the feature type as defined in mojom.
+ name: {str} The stylized name. (Note: not the "name" used by FeatureList.)
+ constants: {List[Constant]} The constants defined in the feature scope.
+ attributes: {dict} Additional information about the feature.
+ """
+
+ Kind.AddSharedProperty('mojom_name')
+ Kind.AddSharedProperty('name')
+ Kind.AddSharedProperty('constants')
+ Kind.AddSharedProperty('attributes')
+
+ def __init__(self, mojom_name=None, module=None, attributes=None):
+ if mojom_name is not None:
+ spec = 'x:' + mojom_name
+ else:
+ spec = None
+ ReferenceKind.__init__(self, spec, False, module)
+ self.mojom_name = mojom_name
+ self.name = None
+ self.constants = []
+ self.attributes = attributes
+
+ def Stylize(self, stylizer):
+ self.name = stylizer.StylizeFeature(self.mojom_name)
+ for constant in self.constants:
+ constant.Stylize(stylizer)
+
+
class Struct(ReferenceKind):
"""A struct with typed fields.
@@ -441,14 +615,14 @@ class Struct(ReferenceKind):
if it's a native struct.
"""
- ReferenceKind.AddSharedProperty('mojom_name')
- ReferenceKind.AddSharedProperty('name')
- ReferenceKind.AddSharedProperty('native_only')
- ReferenceKind.AddSharedProperty('custom_serializer')
- ReferenceKind.AddSharedProperty('fields')
- ReferenceKind.AddSharedProperty('enums')
- ReferenceKind.AddSharedProperty('constants')
- ReferenceKind.AddSharedProperty('attributes')
+ Kind.AddSharedProperty('mojom_name')
+ Kind.AddSharedProperty('name')
+ Kind.AddSharedProperty('native_only')
+ Kind.AddSharedProperty('custom_serializer')
+ Kind.AddSharedProperty('fields')
+ Kind.AddSharedProperty('enums')
+ Kind.AddSharedProperty('constants')
+ Kind.AddSharedProperty('attributes')
def __init__(self, mojom_name=None, module=None, attributes=None):
if mojom_name is not None:
@@ -470,12 +644,11 @@ class Struct(ReferenceKind):
return '<%s mojom_name=%r module=%s>' % (self.__class__.__name__,
self.mojom_name,
Repr(self.module, as_ref=True))
- else:
- return GenericRepr(self, {
- 'mojom_name': False,
- 'fields': False,
- 'module': True
- })
+ return GenericRepr(self, {
+ 'mojom_name': False,
+ 'fields': False,
+ 'module': True
+ })
def AddField(self,
mojom_name,
@@ -496,13 +669,13 @@ class Struct(ReferenceKind):
for constant in self.constants:
constant.Stylize(stylizer)
- def IsBackwardCompatible(self, older_struct, checker):
- """This struct is backward-compatible with older_struct if and only if all
- of the following conditions hold:
+ def IsBackwardCompatible(self, rhs, checker):
+ """This struct is backward-compatible with rhs (older_struct) if and only if
+ all of the following conditions hold:
- Any newly added field is tagged with a [MinVersion] attribute specifying
a version number greater than all previously used [MinVersion]
attributes within the struct.
- - All fields present in older_struct remain present in the new struct,
+ - All fields present in rhs remain present in the new struct,
with the same ordinal position, same optional or non-optional status,
same (or backward-compatible) type and where applicable, the same
[MinVersion] attribute value.
@@ -521,7 +694,7 @@ class Struct(ReferenceKind):
return fields_by_ordinal
new_fields = buildOrdinalFieldMap(self)
- old_fields = buildOrdinalFieldMap(older_struct)
+ old_fields = buildOrdinalFieldMap(rhs)
if len(new_fields) < len(old_fields):
# At least one field was removed, which is not OK.
return False
@@ -574,11 +747,18 @@ class Struct(ReferenceKind):
prefix = self.module.GetNamespacePrefix()
return '%s%s' % (prefix, self.mojom_name)
+ def _tuple(self):
+ return (self.mojom_name, self.native_only, self.fields, self.constants,
+ self.attributes)
+
def __eq__(self, rhs):
- return (isinstance(rhs, Struct) and
- (self.mojom_name, self.native_only, self.fields, self.constants,
- self.attributes) == (rhs.mojom_name, rhs.native_only, rhs.fields,
- rhs.constants, rhs.attributes))
+ return isinstance(rhs, Struct) and self._tuple() == rhs._tuple()
+
+ def __lt__(self, rhs):
+ if not isinstance(self, type(rhs)):
+ return str(type(self)) < str(type(rhs))
+
+ return self._tuple() < rhs._tuple()
def __hash__(self):
return id(self)
@@ -595,10 +775,11 @@ class Union(ReferenceKind):
which Java class name to use to represent it in the generated
bindings.
"""
- ReferenceKind.AddSharedProperty('mojom_name')
- ReferenceKind.AddSharedProperty('name')
- ReferenceKind.AddSharedProperty('fields')
- ReferenceKind.AddSharedProperty('attributes')
+ Kind.AddSharedProperty('mojom_name')
+ Kind.AddSharedProperty('name')
+ Kind.AddSharedProperty('fields')
+ Kind.AddSharedProperty('attributes')
+ Kind.AddSharedProperty('default_field')
def __init__(self, mojom_name=None, module=None, attributes=None):
if mojom_name is not None:
@@ -610,14 +791,14 @@ class Union(ReferenceKind):
self.name = None
self.fields = []
self.attributes = attributes
+ self.default_field = None
def Repr(self, as_ref=True):
if as_ref:
return '<%s spec=%r is_nullable=%r fields=%s>' % (
self.__class__.__name__, self.spec, self.is_nullable, Repr(
self.fields))
- else:
- return GenericRepr(self, {'fields': True, 'is_nullable': False})
+ return GenericRepr(self, {'fields': True, 'is_nullable': False})
def AddField(self, mojom_name, kind, ordinal=None, attributes=None):
field = UnionField(mojom_name, kind, ordinal, None, attributes)
@@ -629,13 +810,13 @@ class Union(ReferenceKind):
for field in self.fields:
field.Stylize(stylizer)
- def IsBackwardCompatible(self, older_union, checker):
- """This union is backward-compatible with older_union if and only if all
- of the following conditions hold:
+ def IsBackwardCompatible(self, rhs, checker):
+ """This union is backward-compatible with rhs (older_union) if and only if
+ all of the following conditions hold:
- Any newly added field is tagged with a [MinVersion] attribute specifying
a version number greater than all previously used [MinVersion]
attributes within the union.
- - All fields present in older_union remain present in the new union,
+ - All fields present in rhs remain present in the new union,
with the same ordinal value, same optional or non-optional status,
same (or backward-compatible) type, and where applicable, the same
[MinVersion] attribute value.
@@ -651,7 +832,7 @@ class Union(ReferenceKind):
return fields_by_ordinal
new_fields = buildOrdinalFieldMap(self)
- old_fields = buildOrdinalFieldMap(older_union)
+ old_fields = buildOrdinalFieldMap(rhs)
if len(new_fields) < len(old_fields):
# At least one field was removed, which is not OK.
return False
@@ -678,6 +859,11 @@ class Union(ReferenceKind):
return True
@property
+ def extensible(self):
+ return self.attributes.get(ATTRIBUTE_EXTENSIBLE, False) \
+ if self.attributes else False
+
+ @property
def stable(self):
return self.attributes.get(ATTRIBUTE_STABLE, False) \
if self.attributes else False
@@ -690,10 +876,17 @@ class Union(ReferenceKind):
prefix = self.module.GetNamespacePrefix()
return '%s%s' % (prefix, self.mojom_name)
+ def _tuple(self):
+ return (self.mojom_name, self.fields, self.attributes)
+
def __eq__(self, rhs):
- return (isinstance(rhs, Union) and
- (self.mojom_name, self.fields,
- self.attributes) == (rhs.mojom_name, rhs.fields, rhs.attributes))
+ return isinstance(rhs, Union) and self._tuple() == rhs._tuple()
+
+ def __lt__(self, rhs):
+ if not isinstance(self, type(rhs)):
+ return str(type(self)) < str(type(rhs))
+
+ return self._tuple() < rhs._tuple()
def __hash__(self):
return id(self)
@@ -707,8 +900,8 @@ class Array(ReferenceKind):
length: The number of elements. None if unknown.
"""
- ReferenceKind.AddSharedProperty('kind')
- ReferenceKind.AddSharedProperty('length')
+ Kind.AddSharedProperty('kind')
+ Kind.AddSharedProperty('length')
def __init__(self, kind=None, length=None):
if kind is not None:
@@ -728,12 +921,11 @@ class Array(ReferenceKind):
return '<%s spec=%r is_nullable=%r kind=%s length=%r>' % (
self.__class__.__name__, self.spec, self.is_nullable, Repr(
self.kind), self.length)
- else:
- return GenericRepr(self, {
- 'kind': True,
- 'length': False,
- 'is_nullable': False
- })
+ return GenericRepr(self, {
+ 'kind': True,
+ 'length': False,
+ 'is_nullable': False
+ })
def __eq__(self, rhs):
return (isinstance(rhs, Array)
@@ -754,8 +946,8 @@ class Map(ReferenceKind):
key_kind: {Kind} The type of the keys. May be None.
value_kind: {Kind} The type of the elements. May be None.
"""
- ReferenceKind.AddSharedProperty('key_kind')
- ReferenceKind.AddSharedProperty('value_kind')
+ Kind.AddSharedProperty('key_kind')
+ Kind.AddSharedProperty('value_kind')
def __init__(self, key_kind=None, value_kind=None):
if (key_kind is not None and value_kind is not None):
@@ -780,8 +972,7 @@ class Map(ReferenceKind):
return '<%s spec=%r is_nullable=%r key_kind=%s value_kind=%s>' % (
self.__class__.__name__, self.spec, self.is_nullable,
Repr(self.key_kind), Repr(self.value_kind))
- else:
- return GenericRepr(self, {'key_kind': True, 'value_kind': True})
+ return GenericRepr(self, {'key_kind': True, 'value_kind': True})
def __eq__(self, rhs):
return (isinstance(rhs, Map) and
@@ -797,7 +988,7 @@ class Map(ReferenceKind):
class PendingRemote(ReferenceKind):
- ReferenceKind.AddSharedProperty('kind')
+ Kind.AddSharedProperty('kind')
def __init__(self, kind=None):
if kind is not None:
@@ -822,7 +1013,7 @@ class PendingRemote(ReferenceKind):
class PendingReceiver(ReferenceKind):
- ReferenceKind.AddSharedProperty('kind')
+ Kind.AddSharedProperty('kind')
def __init__(self, kind=None):
if kind is not None:
@@ -847,7 +1038,7 @@ class PendingReceiver(ReferenceKind):
class PendingAssociatedRemote(ReferenceKind):
- ReferenceKind.AddSharedProperty('kind')
+ Kind.AddSharedProperty('kind')
def __init__(self, kind=None):
if kind is not None:
@@ -873,7 +1064,7 @@ class PendingAssociatedRemote(ReferenceKind):
class PendingAssociatedReceiver(ReferenceKind):
- ReferenceKind.AddSharedProperty('kind')
+ Kind.AddSharedProperty('kind')
def __init__(self, kind=None):
if kind is not None:
@@ -899,7 +1090,7 @@ class PendingAssociatedReceiver(ReferenceKind):
class InterfaceRequest(ReferenceKind):
- ReferenceKind.AddSharedProperty('kind')
+ Kind.AddSharedProperty('kind')
def __init__(self, kind=None):
if kind is not None:
@@ -923,7 +1114,7 @@ class InterfaceRequest(ReferenceKind):
class AssociatedInterfaceRequest(ReferenceKind):
- ReferenceKind.AddSharedProperty('kind')
+ Kind.AddSharedProperty('kind')
def __init__(self, kind=None):
if kind is not None:
@@ -949,7 +1140,7 @@ class AssociatedInterfaceRequest(ReferenceKind):
self.kind, rhs.kind)
-class Parameter(object):
+class Parameter:
def __init__(self,
mojom_name=None,
kind=None,
@@ -983,7 +1174,7 @@ class Parameter(object):
rhs.default, rhs.attributes))
-class Method(object):
+class Method:
def __init__(self, interface, mojom_name, ordinal=None, attributes=None):
self.interface = interface
self.mojom_name = mojom_name
@@ -999,12 +1190,11 @@ class Method(object):
def Repr(self, as_ref=True):
if as_ref:
return '<%s mojom_name=%r>' % (self.__class__.__name__, self.mojom_name)
- else:
- return GenericRepr(self, {
- 'mojom_name': False,
- 'parameters': True,
- 'response_parameters': True
- })
+ return GenericRepr(self, {
+ 'mojom_name': False,
+ 'parameters': True,
+ 'response_parameters': True
+ })
def AddParameter(self,
mojom_name,
@@ -1061,21 +1251,49 @@ class Method(object):
return self.attributes.get(ATTRIBUTE_UNLIMITED_SIZE) \
if self.attributes else False
+ @property
+ def allowed_context(self):
+ return self.attributes.get(ATTRIBUTE_ALLOWED_CONTEXT) \
+ if self.attributes else None
+
+ @property
+ def supports_urgent(self):
+ return self.attributes.get(ATTRIBUTE_SUPPORTS_URGENT) \
+ if self.attributes else None
+
+ @property
+ def runtime_feature(self):
+ if not self.attributes:
+ return None
+ runtime_feature = self.attributes.get(ATTRIBUTE_RUNTIME_FEATURE, None)
+ if runtime_feature is None:
+ return None
+ if not isinstance(runtime_feature, Feature):
+ raise Exception("RuntimeFeature attribute on %s must be a feature." %
+ self.name)
+ return runtime_feature
+
+ def _tuple(self):
+ return (self.mojom_name, self.ordinal, self.parameters,
+ self.response_parameters, self.attributes)
+
def __eq__(self, rhs):
- return (isinstance(rhs, Method) and
- (self.mojom_name, self.ordinal, self.parameters,
- self.response_parameters,
- self.attributes) == (rhs.mojom_name, rhs.ordinal, rhs.parameters,
- rhs.response_parameters, rhs.attributes))
+ return isinstance(rhs, Method) and self._tuple() == rhs._tuple()
+
+ def __lt__(self, rhs):
+ if not isinstance(self, type(rhs)):
+ return str(type(self)) < str(type(rhs))
+
+ return self._tuple() < rhs._tuple()
class Interface(ReferenceKind):
- ReferenceKind.AddSharedProperty('mojom_name')
- ReferenceKind.AddSharedProperty('name')
- ReferenceKind.AddSharedProperty('methods')
- ReferenceKind.AddSharedProperty('enums')
- ReferenceKind.AddSharedProperty('constants')
- ReferenceKind.AddSharedProperty('attributes')
+ Kind.AddSharedProperty('mojom_name')
+ Kind.AddSharedProperty('name')
+ Kind.AddSharedProperty('methods')
+ Kind.AddSharedProperty('enums')
+ Kind.AddSharedProperty('constants')
+ Kind.AddSharedProperty('attributes')
def __init__(self, mojom_name=None, module=None, attributes=None):
if mojom_name is not None:
@@ -1093,12 +1311,11 @@ class Interface(ReferenceKind):
def Repr(self, as_ref=True):
if as_ref:
return '<%s mojom_name=%r>' % (self.__class__.__name__, self.mojom_name)
- else:
- return GenericRepr(self, {
- 'mojom_name': False,
- 'attributes': False,
- 'methods': False
- })
+ return GenericRepr(self, {
+ 'mojom_name': False,
+ 'attributes': False,
+ 'methods': False
+ })
def AddMethod(self, mojom_name, ordinal=None, attributes=None):
method = Method(self, mojom_name, ordinal, attributes)
@@ -1114,10 +1331,10 @@ class Interface(ReferenceKind):
for constant in self.constants:
constant.Stylize(stylizer)
- def IsBackwardCompatible(self, older_interface, checker):
- """This interface is backward-compatible with older_interface if and only
- if all of the following conditions hold:
- - All defined methods in older_interface (when identified by ordinal) have
+ def IsBackwardCompatible(self, rhs, checker):
+ """This interface is backward-compatible with rhs (older_interface) if and
+ only if all of the following conditions hold:
+ - All defined methods in rhs (when identified by ordinal) have
backward-compatible definitions in this interface. For each method this
means:
- The parameter list is backward-compatible, according to backward-
@@ -1131,7 +1348,7 @@ class Interface(ReferenceKind):
rules for structs.
- All newly introduced methods in this interface have a [MinVersion]
attribute specifying a version greater than any method in
- older_interface.
+ rhs.
"""
def buildOrdinalMethodMap(interface):
@@ -1144,7 +1361,7 @@ class Interface(ReferenceKind):
return methods_by_ordinal
new_methods = buildOrdinalMethodMap(self)
- old_methods = buildOrdinalMethodMap(older_interface)
+ old_methods = buildOrdinalMethodMap(rhs)
max_old_min_version = 0
for ordinal, old_method in old_methods.items():
new_method = new_methods.get(ordinal)
@@ -1187,6 +1404,39 @@ class Interface(ReferenceKind):
return True
@property
+ def service_sandbox(self):
+ if not self.attributes:
+ return None
+ service_sandbox = self.attributes.get(ATTRIBUTE_SERVICE_SANDBOX, None)
+ if service_sandbox is None:
+ return None
+ # Constants are only allowed to refer to an enum here, so replace.
+ if isinstance(service_sandbox, Constant):
+ service_sandbox = service_sandbox.value
+ if not isinstance(service_sandbox, EnumValue):
+ raise Exception("ServiceSandbox attribute on %s must be an enum value." %
+ self.module.name)
+ return service_sandbox
+
+ @property
+ def runtime_feature(self):
+ if not self.attributes:
+ return None
+ runtime_feature = self.attributes.get(ATTRIBUTE_RUNTIME_FEATURE, None)
+ if runtime_feature is None:
+ return None
+ if not isinstance(runtime_feature, Feature):
+ raise Exception("RuntimeFeature attribute on %s must be a feature." %
+ self.name)
+ return runtime_feature
+
+ @property
+ def require_context(self):
+ if not self.attributes:
+ return None
+ return self.attributes.get(ATTRIBUTE_REQUIRE_CONTEXT, None)
+
+ @property
def stable(self):
return self.attributes.get(ATTRIBUTE_STABLE, False) \
if self.attributes else False
@@ -1199,11 +1449,18 @@ class Interface(ReferenceKind):
prefix = self.module.GetNamespacePrefix()
return '%s%s' % (prefix, self.mojom_name)
+ def _tuple(self):
+ return (self.mojom_name, self.methods, self.enums, self.constants,
+ self.attributes)
+
def __eq__(self, rhs):
- return (isinstance(rhs, Interface)
- and (self.mojom_name, self.methods, self.enums, self.constants,
- self.attributes) == (rhs.mojom_name, rhs.methods, rhs.enums,
- rhs.constants, rhs.attributes))
+ return isinstance(rhs, Interface) and self._tuple() == rhs._tuple()
+
+ def __lt__(self, rhs):
+ if not isinstance(self, type(rhs)):
+ return str(type(self)) < str(type(rhs))
+
+ return self._tuple() < rhs._tuple()
@property
def uuid(self):
@@ -1224,7 +1481,7 @@ class Interface(ReferenceKind):
class AssociatedInterface(ReferenceKind):
- ReferenceKind.AddSharedProperty('kind')
+ Kind.AddSharedProperty('kind')
def __init__(self, kind=None):
if kind is not None:
@@ -1249,7 +1506,7 @@ class AssociatedInterface(ReferenceKind):
self.kind, rhs.kind)
-class EnumField(object):
+class EnumField:
def __init__(self,
mojom_name=None,
value=None,
@@ -1281,16 +1538,25 @@ class EnumField(object):
rhs.attributes, rhs.numeric_value))
-class Enum(Kind):
+class Enum(ValueKind):
+ Kind.AddSharedProperty('mojom_name')
+ Kind.AddSharedProperty('name')
+ Kind.AddSharedProperty('native_only')
+ Kind.AddSharedProperty('fields')
+ Kind.AddSharedProperty('attributes')
+ Kind.AddSharedProperty('min_value')
+ Kind.AddSharedProperty('max_value')
+ Kind.AddSharedProperty('default_field')
+
def __init__(self, mojom_name=None, module=None, attributes=None):
- self.mojom_name = mojom_name
- self.name = None
- self.native_only = False
if mojom_name is not None:
spec = 'x:' + mojom_name
else:
spec = None
- Kind.__init__(self, spec, module)
+ ValueKind.__init__(self, spec, False, module)
+ self.mojom_name = mojom_name
+ self.name = None
+ self.native_only = False
self.fields = []
self.attributes = attributes
self.min_value = None
@@ -1300,8 +1566,7 @@ class Enum(Kind):
def Repr(self, as_ref=True):
if as_ref:
return '<%s mojom_name=%r>' % (self.__class__.__name__, self.mojom_name)
- else:
- return GenericRepr(self, {'mojom_name': False, 'fields': False})
+ return GenericRepr(self, {'mojom_name': False, 'fields': False})
def Stylize(self, stylizer):
self.name = stylizer.StylizeEnum(self.mojom_name)
@@ -1327,14 +1592,14 @@ class Enum(Kind):
return '%s%s' % (prefix, self.mojom_name)
# pylint: disable=unused-argument
- def IsBackwardCompatible(self, older_enum, checker):
- """This enum is backward-compatible with older_enum if and only if one of
- the following conditions holds:
+ def IsBackwardCompatible(self, rhs, checker):
+ """This enum is backward-compatible with rhs (older_enum) if and only if one
+ of the following conditions holds:
- Neither enum is [Extensible] and both have the exact same set of valid
numeric values. Field names and aliases for the same numeric value do
not affect compatibility.
- - older_enum is [Extensible], and for every version defined by
- older_enum, this enum has the exact same set of valid numeric values.
+ - rhs is [Extensible], and for every version defined by
+ rhs, this enum has the exact same set of valid numeric values.
"""
def buildVersionFieldMap(enum):
@@ -1345,32 +1610,49 @@ class Enum(Kind):
fields_by_min_version[field.min_version].add(field.numeric_value)
return fields_by_min_version
- old_fields = buildVersionFieldMap(older_enum)
+ old_fields = buildVersionFieldMap(rhs)
new_fields = buildVersionFieldMap(self)
- if new_fields.keys() != old_fields.keys() and not older_enum.extensible:
- return False
+ if new_fields.keys() != old_fields.keys() and not rhs.extensible:
+ raise Exception("Non-extensible enum cannot be modified")
for min_version, valid_values in old_fields.items():
- if (min_version not in new_fields
- or new_fields[min_version] != valid_values):
- return False
+ if min_version not in new_fields:
+ raise Exception('New values added to an extensible enum '
+ 'do not specify MinVersion: %s' % new_fields)
+
+ if (new_fields[min_version] != valid_values):
+ if (len(new_fields[min_version]) < len(valid_values)):
+ raise Exception('Removing values for an existing MinVersion %s '
+ 'is not allowed' % min_version)
+ raise Exception(
+ 'New values don\'t match old values'
+ 'for an existing MinVersion %s,'
+ ' please specify MinVersion equal to "Next version" '
+ 'in the enum description'
+ ' for the following values:\n%s' %
+ (min_version, new_fields[min_version].difference(valid_values)))
return True
+ def _tuple(self):
+ return (self.mojom_name, self.native_only, self.fields, self.attributes,
+ self.min_value, self.max_value, self.default_field)
+
def __eq__(self, rhs):
- return (isinstance(rhs, Enum) and
- (self.mojom_name, self.native_only, self.fields, self.attributes,
- self.min_value, self.max_value,
- self.default_field) == (rhs.mojom_name, rhs.native_only,
- rhs.fields, rhs.attributes, rhs.min_value,
- rhs.max_value, rhs.default_field))
+ return isinstance(rhs, Enum) and self._tuple() == rhs._tuple()
+
+ def __lt__(self, rhs):
+ if not isinstance(self, type(rhs)):
+ return str(type(self)) < str(type(rhs))
+
+ return self._tuple() < rhs._tuple()
def __hash__(self):
return id(self)
-class Module(object):
+class Module:
def __init__(self, path=None, mojom_namespace=None, attributes=None):
self.path = path
self.mojom_namespace = mojom_namespace
@@ -1379,24 +1661,26 @@ class Module(object):
self.unions = []
self.interfaces = []
self.enums = []
+ self.features = []
self.constants = []
- self.kinds = {}
+ self.kinds = OrderedDict()
self.attributes = attributes
self.imports = []
- self.imported_kinds = {}
- self.metadata = {}
+ self.imported_kinds = OrderedDict()
+ self.metadata = OrderedDict()
def __repr__(self):
# Gives us a decent __repr__ for modules.
return self.Repr()
def __eq__(self, rhs):
- return (isinstance(rhs, Module) and
- (self.path, self.attributes, self.mojom_namespace, self.imports,
- self.constants, self.enums, self.structs, self.unions,
- self.interfaces) == (rhs.path, rhs.attributes, rhs.mojom_namespace,
- rhs.imports, rhs.constants, rhs.enums,
- rhs.structs, rhs.unions, rhs.interfaces))
+ return (isinstance(rhs, Module)
+ and (self.path, self.attributes, self.mojom_namespace, self.imports,
+ self.constants, self.enums, self.structs, self.unions,
+ self.interfaces, self.features)
+ == (rhs.path, rhs.attributes, rhs.mojom_namespace, rhs.imports,
+ rhs.constants, rhs.enums, rhs.structs, rhs.unions,
+ rhs.interfaces, rhs.features))
def __hash__(self):
return id(self)
@@ -1405,16 +1689,16 @@ class Module(object):
if as_ref:
return '<%s path=%r mojom_namespace=%r>' % (
self.__class__.__name__, self.path, self.mojom_namespace)
- else:
- return GenericRepr(
- self, {
- 'path': False,
- 'mojom_namespace': False,
- 'attributes': False,
- 'structs': False,
- 'interfaces': False,
- 'unions': False
- })
+ return GenericRepr(
+ self, {
+ 'path': False,
+ 'mojom_namespace': False,
+ 'attributes': False,
+ 'structs': False,
+ 'interfaces': False,
+ 'unions': False,
+ 'features': False,
+ })
def GetNamespacePrefix(self):
return '%s.' % self.mojom_namespace if self.mojom_namespace else ''
@@ -1434,6 +1718,11 @@ class Module(object):
self.unions.append(union)
return union
+ def AddFeature(self, mojom_name, attributes=None):
+ feature = Feature(mojom_name, self, attributes)
+ self.features.append(feature)
+ return feature
+
def Stylize(self, stylizer):
self.namespace = stylizer.StylizeModule(self.mojom_namespace)
for struct in self.structs:
@@ -1446,12 +1735,14 @@ class Module(object):
enum.Stylize(stylizer)
for constant in self.constants:
constant.Stylize(stylizer)
+ for feature in self.features:
+ feature.Stylize(stylizer)
for imported_module in self.imports:
imported_module.Stylize(stylizer)
def Dump(self, f):
- pickle.dump(self, f, 2)
+ pickle.dump(self, f)
@classmethod
def Load(cls, f):
@@ -1461,15 +1752,15 @@ class Module(object):
def IsBoolKind(kind):
- return kind.spec == BOOL.spec
+ return kind.spec == BOOL.spec or kind.spec == NULLABLE_BOOL.spec
def IsFloatKind(kind):
- return kind.spec == FLOAT.spec
+ return kind.spec == FLOAT.spec or kind.spec == NULLABLE_FLOAT.spec
def IsDoubleKind(kind):
- return kind.spec == DOUBLE.spec
+ return kind.spec == DOUBLE.spec or kind.spec == NULLABLE_DOUBLE.spec
def IsIntegralKind(kind):
@@ -1477,7 +1768,14 @@ def IsIntegralKind(kind):
or kind.spec == INT16.spec or kind.spec == INT32.spec
or kind.spec == INT64.spec or kind.spec == UINT8.spec
or kind.spec == UINT16.spec or kind.spec == UINT32.spec
- or kind.spec == UINT64.spec)
+ or kind.spec == UINT64.spec or kind.spec == NULLABLE_BOOL.spec
+ or kind.spec == NULLABLE_INT8.spec or kind.spec == NULLABLE_INT16.spec
+ or kind.spec == NULLABLE_INT32.spec
+ or kind.spec == NULLABLE_INT64.spec
+ or kind.spec == NULLABLE_UINT8.spec
+ or kind.spec == NULLABLE_UINT16.spec
+ or kind.spec == NULLABLE_UINT32.spec
+ or kind.spec == NULLABLE_UINT64.spec)
def IsStringKind(kind):
@@ -1522,6 +1820,10 @@ def IsArrayKind(kind):
return isinstance(kind, Array)
+def IsFeatureKind(kind):
+ return isinstance(kind, Feature)
+
+
def IsInterfaceKind(kind):
return isinstance(kind, Interface)
@@ -1558,12 +1860,16 @@ def IsEnumKind(kind):
return isinstance(kind, Enum)
+def IsValueKind(kind):
+ return isinstance(kind, ValueKind)
+
+
def IsReferenceKind(kind):
return isinstance(kind, ReferenceKind)
def IsNullableKind(kind):
- return IsReferenceKind(kind) and kind.is_nullable
+ return kind.is_nullable
def IsMapKind(kind):
@@ -1664,11 +1970,8 @@ def MethodPassesInterfaces(method):
return _AnyMethodParameterRecursive(method, IsInterfaceKind)
-def HasSyncMethods(interface):
- for method in interface.methods:
- if method.sync:
- return True
- return False
+def GetSyncMethodOrdinals(interface):
+ return [method.ordinal for method in interface.methods if method.sync]
def HasUninterruptableMethods(interface):
@@ -1700,18 +2003,17 @@ def ContainsHandlesOrInterfaces(kind):
checked.add(kind.spec)
if IsStructKind(kind):
return any(Check(field.kind) for field in kind.fields)
- elif IsUnionKind(kind):
+ if IsUnionKind(kind):
return any(Check(field.kind) for field in kind.fields)
- elif IsAnyHandleKind(kind):
+ if IsAnyHandleKind(kind):
return True
- elif IsAnyInterfaceKind(kind):
+ if IsAnyInterfaceKind(kind):
return True
- elif IsArrayKind(kind):
+ if IsArrayKind(kind):
return Check(kind.kind)
- elif IsMapKind(kind):
+ if IsMapKind(kind):
return Check(kind.key_kind) or Check(kind.value_kind)
- else:
- return False
+ return False
return Check(kind)
@@ -1738,21 +2040,20 @@ def ContainsNativeTypes(kind):
checked.add(kind.spec)
if IsEnumKind(kind):
return kind.native_only
- elif IsStructKind(kind):
+ if IsStructKind(kind):
if kind.native_only:
return True
if any(enum.native_only for enum in kind.enums):
return True
return any(Check(field.kind) for field in kind.fields)
- elif IsUnionKind(kind):
+ if IsUnionKind(kind):
return any(Check(field.kind) for field in kind.fields)
- elif IsInterfaceKind(kind):
+ if IsInterfaceKind(kind):
return any(enum.native_only for enum in kind.enums)
- elif IsArrayKind(kind):
+ if IsArrayKind(kind):
return Check(kind.kind)
- elif IsMapKind(kind):
+ if IsMapKind(kind):
return Check(kind.key_kind) or Check(kind.value_kind)
- else:
- return False
+ return False
return Check(kind)
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/module_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/module_unittest.py
index e8fd4936..2a4e852c 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/generate/module_unittest.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/module_unittest.py
@@ -1,4 +1,4 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/pack.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/pack.py
index 88b77c98..61240426 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/generate/pack.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/pack.py
@@ -1,7 +1,8 @@
-# Copyright 2013 The Chromium Authors. All rights reserved.
+# Copyright 2013 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import copy
from mojom.generate import module as mojom
# This module provides a mechanism for determining the packed order and offsets
@@ -15,7 +16,7 @@ from mojom.generate import module as mojom
HEADER_SIZE = 8
-class PackedField(object):
+class PackedField:
kind_to_size = {
mojom.BOOL: 1,
mojom.INT8: 1,
@@ -75,18 +76,55 @@ class PackedField(object):
return 8
return cls.GetSizeForKind(kind)
- def __init__(self, field, index, ordinal):
+ def __init__(self,
+ field,
+ index,
+ ordinal,
+ original_field=None,
+ sub_ordinal=None,
+ linked_value_packed_field=None):
"""
Args:
field: the original field.
index: the position of the original field in the struct.
ordinal: the ordinal of the field for serialization.
+ original_field: See below.
+ sub_ordinal: See below.
+ linked_value_packed_field: See below.
+
+ original_field, sub_ordinal, and linked_value_packed_field are used to
+ support nullable ValueKind fields. For legacy reasons, nullable ValueKind
+ fields actually generate two PackedFields. This allows:
+
+ - backwards compatibility prior to Mojo support for nullable ValueKinds.
+ - correct packing of fields for the aforementioned backwards compatibility.
+
+ When translating Fields to PackedFields, the original field is turned into
+ two PackedFields: the first PackedField always has type mojom.BOOL, while
+ the second PackedField has the non-nullable version of the field's kind.
+
+ When constructing these PackedFields, original_field references the field
+ as defined in the mojom; the name as defined in the mojom will be used for
+ all layers above the wire/data layer.
+
+ sub_ordinal is used to sort the two PackedFields correctly with respect to
+ each other: the first mojom.BOOL field always has sub_ordinal 0, while the
+ second field always has sub_ordinal 1.
+
+ Finally, linked_value_packed_field is used by the serialization and
+ deserialization helpers, which generally just iterate over a PackedStruct's
+ PackedField's in ordinal order. This allows the helpers to easily reference
+ any related PackedFields rather than having to lookup related PackedFields
+ by index while iterating.
"""
self.field = field
self.index = index
self.ordinal = ordinal
- self.size = self.GetSizeForKind(field.kind)
- self.alignment = self.GetAlignmentForKind(field.kind)
+ self.original_field = original_field
+ self.sub_ordinal = sub_ordinal
+ self.linked_value_packed_field = linked_value_packed_field
+ self.size = self.GetSizeForKind(self.field.kind)
+ self.alignment = self.GetAlignmentForKind(self.field.kind)
self.offset = None
self.bit = None
self.min_version = None
@@ -120,7 +158,33 @@ def GetPayloadSizeUpToField(field):
return offset + pad
-class PackedStruct(object):
+def IsNullableValueKindPackedField(field):
+ """Returns true if `field` is derived from a nullable ValueKind field.
+
+ Nullable ValueKind fields often require special handling in the bindings due
+ to the way the implementation is constrained for wire compatibility.
+ """
+ assert isinstance(field, PackedField)
+ return field.sub_ordinal is not None
+
+
+def IsPrimaryNullableValueKindPackedField(field):
+ """Returns true if `field` is derived from a nullable ValueKind mojom field
+ and is the "primary" field.
+
+ The primary field is a bool PackedField that controls if the field should be
+ considered as present or not; it will have a reference to the PackedField that
+ holds the actual value representation if considered present.
+
+ Bindings code that translates between the wire protocol and the higher layers
+ can use this to simplify mapping multiple PackedFields to the single field
+ that is logically exposed to bindings consumers.
+ """
+ assert isinstance(field, PackedField)
+ return field.linked_value_packed_field is not None
+
+
+class PackedStruct:
def __init__(self, struct):
self.struct = struct
# |packed_fields| contains all the fields, in increasing offset order.
@@ -139,9 +203,41 @@ class PackedStruct(object):
for index, field in enumerate(struct.fields):
if field.ordinal is not None:
ordinal = field.ordinal
- src_fields.append(PackedField(field, index, ordinal))
+ # Nullable value types are a bit weird: they generate two PackedFields
+ # despite being a single ValueKind. This is for wire compatibility to
+ # ease the transition from legacy mojom syntax where nullable value types
+ # were not supported.
+ if isinstance(field.kind, mojom.ValueKind) and field.kind.is_nullable:
+ # The suffixes intentionally use Unicode codepoints which are considered
+ # valid C++/Java/JavaScript identifiers, yet are unlikely to be used in
+ # actual user code.
+ has_value_field = copy.copy(field)
+ has_value_field.name = f'{field.mojom_name}_$flag'
+ has_value_field.kind = mojom.BOOL
+
+ value_field = copy.copy(field)
+ value_field.name = f'{field.mojom_name}_$value'
+ value_field.kind = field.kind.MakeUnnullableKind()
+
+ value_packed_field = PackedField(value_field,
+ index,
+ ordinal,
+ original_field=field,
+ sub_ordinal=1,
+ linked_value_packed_field=None)
+ has_value_packed_field = PackedField(
+ has_value_field,
+ index,
+ ordinal,
+ original_field=field,
+ sub_ordinal=0,
+ linked_value_packed_field=value_packed_field)
+ src_fields.append(has_value_packed_field)
+ src_fields.append(value_packed_field)
+ else:
+ src_fields.append(PackedField(field, index, ordinal))
ordinal += 1
- src_fields.sort(key=lambda field: field.ordinal)
+ src_fields.sort(key=lambda field: (field.ordinal, field.sub_ordinal))
# Set |min_version| for each field.
next_min_version = 0
@@ -156,10 +252,11 @@ class PackedStruct(object):
if (packed_field.min_version != 0
and mojom.IsReferenceKind(packed_field.field.kind)
and not packed_field.field.kind.is_nullable):
- raise Exception("Non-nullable fields are only allowed in version 0 of "
- "a struct. %s.%s is defined with [MinVersion=%d]." %
- (self.struct.name, packed_field.field.name,
- packed_field.min_version))
+ raise Exception(
+ "Non-nullable reference fields are only allowed in version 0 of a "
+ "struct. %s.%s is defined with [MinVersion=%d]." %
+ (self.struct.name, packed_field.field.name,
+ packed_field.min_version))
src_field = src_fields[0]
src_field.offset = 0
@@ -186,7 +283,7 @@ class PackedStruct(object):
dst_fields.append(src_field)
-class ByteInfo(object):
+class ByteInfo:
def __init__(self):
self.is_padding = False
self.packed_fields = []
@@ -214,10 +311,11 @@ def GetByteLayout(packed_struct):
return byte_info
-class VersionInfo(object):
- def __init__(self, version, num_fields, num_bytes):
+class VersionInfo:
+ def __init__(self, version, num_fields, num_packed_fields, num_bytes):
self.version = version
self.num_fields = num_fields
+ self.num_packed_fields = num_packed_fields
self.num_bytes = num_bytes
@@ -235,24 +333,35 @@ def GetVersionInfo(packed_struct):
versions = []
last_version = 0
last_num_fields = 0
+ last_num_packed_fields = 0
last_payload_size = 0
for packed_field in packed_struct.packed_fields_in_ordinal_order:
if packed_field.min_version != last_version:
versions.append(
- VersionInfo(last_version, last_num_fields,
+ VersionInfo(last_version, last_num_fields, last_num_packed_fields,
last_payload_size + HEADER_SIZE))
last_version = packed_field.min_version
- last_num_fields += 1
+ # Nullable numeric fields (e.g. `int32?`) expand to two packed fields, so to
+ # avoid double-counting, only increment if the field is:
+ # - not used for representing a nullable value kind field, or
+ # - the primary field representing the nullable value kind field.
+ last_num_fields += 1 if (
+ not IsNullableValueKindPackedField(packed_field)
+ or IsPrimaryNullableValueKindPackedField(packed_field)) else 0
+
+ last_num_packed_fields += 1
+
# The fields are iterated in ordinal order here. However, the size of a
# version is determined by the last field of that version in pack order,
# instead of ordinal order. Therefore, we need to calculate the max value.
- last_payload_size = max(
- GetPayloadSizeUpToField(packed_field), last_payload_size)
+ last_payload_size = max(GetPayloadSizeUpToField(packed_field),
+ last_payload_size)
- assert len(versions) == 0 or last_num_fields != versions[-1].num_fields
+ assert len(
+ versions) == 0 or last_num_packed_fields != versions[-1].num_packed_fields
versions.append(
- VersionInfo(last_version, last_num_fields,
+ VersionInfo(last_version, last_num_fields, last_num_packed_fields,
last_payload_size + HEADER_SIZE))
return versions
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/pack_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/pack_unittest.py
index 98c705ad..7d8e4e01 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/generate/pack_unittest.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/pack_unittest.py
@@ -1,4 +1,4 @@
-# Copyright 2013 The Chromium Authors. All rights reserved.
+# Copyright 2013 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -205,6 +205,34 @@ class PackTest(unittest.TestCase):
self.assertEqual(4, versions[2].num_fields)
self.assertEqual(32, versions[2].num_bytes)
+ def testGetVersionInfoPackedStruct(self):
+ """Tests that pack.GetVersionInfo() correctly sets version, num_fields,
+ and num_packed_fields for a packed struct.
+ """
+ struct = mojom.Struct('test')
+ struct.AddField('field_0', mojom.BOOL, ordinal=0)
+ struct.AddField('field_1',
+ mojom.NULLABLE_BOOL,
+ ordinal=1,
+ attributes={'MinVersion': 1})
+ struct.AddField('field_2',
+ mojom.NULLABLE_BOOL,
+ ordinal=2,
+ attributes={'MinVersion': 2})
+ ps = pack.PackedStruct(struct)
+ versions = pack.GetVersionInfo(ps)
+
+ self.assertEqual(3, len(versions))
+ self.assertEqual(0, versions[0].version)
+ self.assertEqual(1, versions[1].version)
+ self.assertEqual(2, versions[2].version)
+ self.assertEqual(1, versions[0].num_fields)
+ self.assertEqual(2, versions[1].num_fields)
+ self.assertEqual(3, versions[2].num_fields)
+ self.assertEqual(1, versions[0].num_packed_fields)
+ self.assertEqual(3, versions[1].num_packed_fields)
+ self.assertEqual(5, versions[2].num_packed_fields)
+
def testInterfaceAlignment(self):
"""Tests that interfaces are aligned on 4-byte boundaries, although the size
of an interface is 8 bytes.
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/template_expander.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/template_expander.py
index 0da90058..807e2a4f 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/generate/template_expander.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/template_expander.py
@@ -1,4 +1,4 @@
-# Copyright 2013 The Chromium Authors. All rights reserved.
+# Copyright 2013 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/translate.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/translate.py
index 7580b780..83bb297f 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/generate/translate.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/translate.py
@@ -1,4 +1,4 @@
-# Copyright 2013 The Chromium Authors. All rights reserved.
+# Copyright 2013 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Convert parse tree to AST.
@@ -12,17 +12,294 @@ already been parsed and converted to ASTs before.
import itertools
import os
import re
-import sys
+from collections import OrderedDict
from mojom.generate import generator
from mojom.generate import module as mojom
from mojom.parse import ast
-def _IsStrOrUnicode(x):
- if sys.version_info[0] < 3:
- return isinstance(x, (unicode, str))
- return isinstance(x, str)
+is_running_backwards_compatibility_check_hack = False
+
+### DO NOT ADD ENTRIES TO THIS LIST. ###
+_EXTENSIBLE_ENUMS_MISSING_DEFAULT = (
+ 'x:arc.keymaster.mojom.Algorithm',
+ 'x:arc.keymaster.mojom.Digest',
+ 'x:arc.keymaster.mojom.SignatureResult',
+ 'x:arc.mojom.AccessibilityActionType',
+ 'x:arc.mojom.AccessibilityBooleanProperty',
+ 'x:arc.mojom.AccessibilityEventIntListProperty',
+ 'x:arc.mojom.AccessibilityEventIntProperty',
+ 'x:arc.mojom.AccessibilityEventStringProperty',
+ 'x:arc.mojom.AccessibilityEventType',
+ 'x:arc.mojom.AccessibilityFilterType',
+ 'x:arc.mojom.AccessibilityIntListProperty',
+ 'x:arc.mojom.AccessibilityIntProperty',
+ 'x:arc.mojom.AccessibilityLiveRegionType',
+ 'x:arc.mojom.AccessibilityNotificationStateType',
+ 'x:arc.mojom.AccessibilityRangeType',
+ 'x:arc.mojom.AccessibilitySelectionMode',
+ 'x:arc.mojom.AccessibilityStringListProperty',
+ 'x:arc.mojom.AccessibilityStringProperty',
+ 'x:arc.mojom.AccessibilityWindowBooleanProperty',
+ 'x:arc.mojom.AccessibilityWindowIntListProperty',
+ 'x:arc.mojom.AccessibilityWindowIntProperty',
+ 'x:arc.mojom.AccessibilityWindowStringProperty',
+ 'x:arc.mojom.AccessibilityWindowType',
+ 'x:arc.mojom.AccountCheckStatus',
+ 'x:arc.mojom.AccountUpdateType',
+ 'x:arc.mojom.ActionType',
+ 'x:arc.mojom.Algorithm',
+ 'x:arc.mojom.AndroidIdSource',
+ 'x:arc.mojom.AnrSource',
+ 'x:arc.mojom.AnrType',
+ 'x:arc.mojom.AppDiscoveryRequestState',
+ 'x:arc.mojom.AppKillType',
+ 'x:arc.mojom.AppPermission',
+ 'x:arc.mojom.AppPermissionGroup',
+ 'x:arc.mojom.AppReinstallState',
+ 'x:arc.mojom.AppShortcutItemType',
+ 'x:arc.mojom.ArcAuthCodeStatus',
+ 'x:arc.mojom.ArcClipboardDragDropEvent',
+ 'x:arc.mojom.ArcCorePriAbiMigEvent',
+ 'x:arc.mojom.ArcDnsQuery',
+ 'x:arc.mojom.ArcImageCopyPasteCompatAction',
+ 'x:arc.mojom.ArcNetworkError',
+ 'x:arc.mojom.ArcNetworkEvent',
+ 'x:arc.mojom.ArcNotificationEvent',
+ 'x:arc.mojom.ArcNotificationExpandState',
+ 'x:arc.mojom.ArcNotificationPriority',
+ 'x:arc.mojom.ArcNotificationRemoteInputState',
+ 'x:arc.mojom.ArcNotificationShownContents',
+ 'x:arc.mojom.ArcNotificationStyle',
+ 'x:arc.mojom.ArcNotificationType',
+ 'x:arc.mojom.ArcPipEvent',
+ 'x:arc.mojom.ArcResizeLockState',
+ 'x:arc.mojom.ArcSignInSuccess',
+ 'x:arc.mojom.ArcTimerResult',
+ 'x:arc.mojom.AudioSwitch',
+ 'x:arc.mojom.BluetoothAclState',
+ 'x:arc.mojom.BluetoothAdapterState',
+ 'x:arc.mojom.BluetoothAdvertisingDataType',
+ 'x:arc.mojom.BluetoothBondState',
+ 'x:arc.mojom.BluetoothDeviceType',
+ 'x:arc.mojom.BluetoothDiscoveryState',
+ 'x:arc.mojom.BluetoothGattDBAttributeType',
+ 'x:arc.mojom.BluetoothGattStatus',
+ 'x:arc.mojom.BluetoothPropertyType',
+ 'x:arc.mojom.BluetoothScanMode',
+ 'x:arc.mojom.BluetoothSdpAttributeType',
+ 'x:arc.mojom.BluetoothSocketType',
+ 'x:arc.mojom.BluetoothStatus',
+ 'x:arc.mojom.BootType',
+ 'x:arc.mojom.CaptionTextShadowType',
+ 'x:arc.mojom.ChangeType',
+ 'x:arc.mojom.ChromeAccountType',
+ 'x:arc.mojom.ChromeApp',
+ 'x:arc.mojom.ChromePage',
+ 'x:arc.mojom.ClockId',
+ 'x:arc.mojom.CloudProvisionFlowError',
+ 'x:arc.mojom.CommandResultType',
+ 'x:arc.mojom.CompanionLibApiId',
+ 'x:arc.mojom.ConnectionStateType',
+ 'x:arc.mojom.ContentChangeType',
+ 'x:arc.mojom.CpuRestrictionState',
+ 'x:arc.mojom.CursorCoordinateSpace',
+ 'x:arc.mojom.DataRestoreStatus',
+ 'x:arc.mojom.DecoderStatus',
+ 'x:arc.mojom.DeviceType',
+ 'x:arc.mojom.Digest',
+ 'x:arc.mojom.DisplayWakeLockType',
+ 'x:arc.mojom.EapMethod',
+ 'x:arc.mojom.EapPhase2Method',
+ 'x:arc.mojom.FileSelectorEventType',
+ 'x:arc.mojom.GMSCheckInError',
+ 'x:arc.mojom.GMSSignInError',
+ 'x:arc.mojom.GeneralSignInError',
+ 'x:arc.mojom.GetNetworksRequestType',
+ 'x:arc.mojom.HalPixelFormat',
+ 'x:arc.mojom.IPAddressType',
+ 'x:arc.mojom.InstallErrorReason',
+ 'x:arc.mojom.KeyFormat',
+ 'x:arc.mojom.KeyManagement',
+ 'x:arc.mojom.KeyPurpose',
+ 'x:arc.mojom.KeymasterError',
+ 'x:arc.mojom.MainAccountHashMigrationStatus',
+ 'x:arc.mojom.MainAccountResolutionStatus',
+ 'x:arc.mojom.ManagementChangeStatus',
+ 'x:arc.mojom.ManagementState',
+ 'x:arc.mojom.MessageCenterVisibility',
+ 'x:arc.mojom.MetricsType',
+ 'x:arc.mojom.MountEvent',
+ 'x:arc.mojom.NativeBridgeType',
+ 'x:arc.mojom.NetworkResult',
+ 'x:arc.mojom.NetworkType',
+ 'x:arc.mojom.OemCryptoAlgorithm',
+ 'x:arc.mojom.OemCryptoCipherMode',
+ 'x:arc.mojom.OemCryptoHdcpCapability',
+ 'x:arc.mojom.OemCryptoLicenseType',
+ 'x:arc.mojom.OemCryptoPrivateKey',
+ 'x:arc.mojom.OemCryptoProvisioningMethod',
+ 'x:arc.mojom.OemCryptoResult',
+ 'x:arc.mojom.OemCryptoRsaPaddingScheme',
+ 'x:arc.mojom.OemCryptoUsageEntryStatus',
+ 'x:arc.mojom.Padding',
+ 'x:arc.mojom.PaiFlowState',
+ 'x:arc.mojom.PatternType',
+ 'x:arc.mojom.PressureLevel',
+ 'x:arc.mojom.PrintColorMode',
+ 'x:arc.mojom.PrintContentType',
+ 'x:arc.mojom.PrintDuplexMode',
+ 'x:arc.mojom.PrinterStatus',
+ 'x:arc.mojom.ProcessState',
+ 'x:arc.mojom.PurchaseState',
+ 'x:arc.mojom.ReauthReason',
+ 'x:arc.mojom.ScaleFactor',
+ 'x:arc.mojom.SecurityType',
+ 'x:arc.mojom.SegmentStyle',
+ 'x:arc.mojom.SelectFilesActionType',
+ 'x:arc.mojom.SetNativeChromeVoxResponse',
+ 'x:arc.mojom.ShowPackageInfoPage',
+ 'x:arc.mojom.SpanType',
+ 'x:arc.mojom.SupportedLinkChangeSource',
+ 'x:arc.mojom.TetheringClientState',
+ 'x:arc.mojom.TextInputType',
+ 'x:arc.mojom.TtsEventType',
+ 'x:arc.mojom.VideoCodecProfile',
+ 'x:arc.mojom.VideoDecodeAccelerator.Result',
+ 'x:arc.mojom.VideoEncodeAccelerator.Error',
+ 'x:arc.mojom.VideoFrameStorageType',
+ 'x:arc.mojom.VideoPixelFormat',
+ 'x:arc.mojom.WakefulnessMode',
+ 'x:arc.mojom.WebApkInstallResult',
+ 'x:ash.ime.mojom.InputFieldType',
+ 'x:ash.ime.mojom.PersonalizationMode',
+ 'x:ash.language.mojom.FeatureId',
+ 'x:blink.mojom.ScrollRestorationType',
+ 'x:chromeos.cdm.mojom.CdmKeyStatus',
+ 'x:chromeos.cdm.mojom.CdmMessageType',
+ 'x:chromeos.cdm.mojom.CdmSessionType',
+ 'x:chromeos.cdm.mojom.DecryptStatus',
+ 'x:chromeos.cdm.mojom.EmeInitDataType',
+ 'x:chromeos.cdm.mojom.EncryptionScheme',
+ 'x:chromeos.cdm.mojom.HdcpVersion',
+ 'x:chromeos.cdm.mojom.OutputProtection.LinkType',
+ 'x:chromeos.cdm.mojom.OutputProtection.ProtectionType',
+ 'x:chromeos.cdm.mojom.PromiseException',
+ 'x:chromeos.cfm.mojom.EnqueuePriority',
+ 'x:chromeos.cfm.mojom.LoggerErrorCode',
+ 'x:chromeos.cfm.mojom.LoggerState',
+ 'x:chromeos.cros_healthd.mojom.CryptoAlgorithm',
+ 'x:chromeos.cros_healthd.mojom.EncryptionState',
+ 'x:chromeos.machine_learning.mojom.AnnotationUsecase',
+ 'x:chromeos.machine_learning.mojom.BuiltinModelId',
+ 'x:chromeos.machine_learning.mojom.CreateGraphExecutorResult',
+ 'x:chromeos.machine_learning.mojom.DocumentScannerResultStatus',
+ 'x:chromeos.machine_learning.mojom.EndpointReason',
+ 'x:chromeos.machine_learning.mojom.EndpointerType',
+ 'x:chromeos.machine_learning.mojom.ExecuteResult',
+ 'x:chromeos.machine_learning.mojom.GrammarCheckerResult.Status',
+ 'x:chromeos.machine_learning.mojom.HandwritingRecognizerResult.Status',
+ 'x:chromeos.machine_learning.mojom.LoadHandwritingModelResult',
+ 'x:chromeos.machine_learning.mojom.LoadModelResult',
+ 'x:chromeos.machine_learning.mojom.Rotation',
+ 'x:chromeos.network_config.mojom.ConnectionStateType',
+ 'x:chromeos.network_config.mojom.DeviceStateType',
+ 'x:chromeos.network_config.mojom.IPConfigType',
+ 'x:chromeos.network_config.mojom.NetworkType',
+ 'x:chromeos.network_config.mojom.OncSource',
+ 'x:chromeos.network_config.mojom.PolicySource',
+ 'x:chromeos.network_config.mojom.PortalState',
+ 'x:chromeos.wilco_dtc_supportd.mojom.WilcoDtcSupportdEvent',
+ 'x:chromeos.wilco_dtc_supportd.mojom.WilcoDtcSupportdWebRequestHttpMethod',
+ 'x:chromeos.wilco_dtc_supportd.mojom.WilcoDtcSupportdWebRequestStatus',
+ 'x:cros.mojom.CameraClientType',
+ 'x:cros.mojom.CameraMetadataSectionStart',
+ 'x:cros.mojom.CameraMetadataTag',
+ 'x:cros.mojom.HalPixelFormat',
+ 'x:crosapi.mojom.AllowedPaths',
+ 'x:crosapi.mojom.BrowserAppInstanceType',
+ 'x:crosapi.mojom.CreationResult',
+ 'x:crosapi.mojom.DeviceAccessResultCode',
+ 'x:crosapi.mojom.DeviceMode',
+ 'x:crosapi.mojom.DlpRestrictionLevel',
+ 'x:crosapi.mojom.ExoImeSupport',
+ 'x:crosapi.mojom.FullscreenVisibility',
+ 'x:crosapi.mojom.GoogleServiceAuthError.State',
+ 'x:crosapi.mojom.IsInstallableResult',
+ 'x:crosapi.mojom.KeyTag',
+ 'x:crosapi.mojom.KeystoreSigningAlgorithmName',
+ 'x:crosapi.mojom.KeystoreType',
+ 'x:crosapi.mojom.LacrosFeedbackSource',
+ 'x:crosapi.mojom.MemoryPressureLevel',
+ 'x:crosapi.mojom.MetricsReportingManaged',
+ 'x:crosapi.mojom.NotificationType',
+ 'x:crosapi.mojom.OndeviceHandwritingSupport',
+ 'x:crosapi.mojom.OpenResult',
+ 'x:crosapi.mojom.PolicyDomain',
+ 'x:crosapi.mojom.RegistrationCodeType',
+ 'x:crosapi.mojom.ScaleFactor',
+ 'x:crosapi.mojom.SearchResult.OptionalBool',
+ 'x:crosapi.mojom.SelectFileDialogType',
+ 'x:crosapi.mojom.SelectFileResult',
+ 'x:crosapi.mojom.SharesheetResult',
+ 'x:crosapi.mojom.TouchEventType',
+ 'x:crosapi.mojom.VideoRotation',
+ 'x:crosapi.mojom.WallpaperLayout',
+ 'x:crosapi.mojom.WebAppInstallResultCode',
+ 'x:crosapi.mojom.WebAppUninstallResultCode',
+ 'x:device.mojom.HidBusType',
+ 'x:device.mojom.WakeLockReason',
+ 'x:device.mojom.WakeLockType',
+ 'x:drivefs.mojom.DialogReason.Type',
+ 'x:drivefs.mojom.DriveError.Type',
+ 'x:drivefs.mojom.DriveFsDelegate.ExtensionConnectionStatus',
+ 'x:drivefs.mojom.FileMetadata.CanPinStatus',
+ 'x:drivefs.mojom.FileMetadata.Type',
+ 'x:drivefs.mojom.ItemEventReason',
+ 'x:drivefs.mojom.MirrorPathStatus',
+ 'x:drivefs.mojom.MirrorSyncStatus',
+ 'x:drivefs.mojom.QueryParameters.SortField',
+ 'x:fuzz.mojom.FuzzEnum',
+ 'x:media.mojom.FillLightMode',
+ 'x:media.mojom.MeteringMode',
+ 'x:media.mojom.PowerLineFrequency',
+ 'x:media.mojom.RedEyeReduction',
+ 'x:media.mojom.ResolutionChangePolicy',
+ 'x:media.mojom.VideoCaptureApi',
+ 'x:media.mojom.VideoCaptureBufferType',
+ 'x:media.mojom.VideoCaptureError',
+ 'x:media.mojom.VideoCaptureFrameDropReason',
+ 'x:media.mojom.VideoCapturePixelFormat',
+ 'x:media.mojom.VideoCaptureTransportType',
+ 'x:media.mojom.VideoFacingMode',
+ 'x:media_session.mojom.AudioFocusType',
+ 'x:media_session.mojom.CameraState',
+ 'x:media_session.mojom.EnforcementMode',
+ 'x:media_session.mojom.MediaAudioVideoState',
+ 'x:media_session.mojom.MediaImageBitmapColorType',
+ 'x:media_session.mojom.MediaPictureInPictureState',
+ 'x:media_session.mojom.MediaPlaybackState',
+ 'x:media_session.mojom.MediaSession.SuspendType',
+ 'x:media_session.mojom.MediaSessionAction',
+ 'x:media_session.mojom.MediaSessionImageType',
+ 'x:media_session.mojom.MediaSessionInfo.SessionState',
+ 'x:media_session.mojom.MicrophoneState',
+ 'x:ml.model_loader.mojom.ComputeResult',
+ 'x:ml.model_loader.mojom.CreateModelLoaderResult',
+ 'x:ml.model_loader.mojom.LoadModelResult',
+ 'x:mojo.test.AnExtensibleEnum',
+ 'x:mojo.test.EnumB',
+ 'x:mojo.test.ExtensibleEmptyEnum',
+ 'x:mojo.test.enum_default_unittest.mojom.ExtensibleEnumWithoutDefault',
+ 'x:network.mojom.WebSandboxFlags',
+ 'x:payments.mojom.BillingResponseCode',
+ 'x:payments.mojom.CreateDigitalGoodsResponseCode',
+ 'x:payments.mojom.ItemType',
+ 'x:printing.mojom.PrinterType',
+ 'x:ui.mojom.KeyboardCode',
+)
+### DO NOT ADD ENTRIES TO THIS LIST. ###
def _DuplicateName(values):
@@ -98,12 +375,6 @@ def _MapKind(kind):
}
if kind.endswith('?'):
base_kind = _MapKind(kind[0:-1])
- # NOTE: This doesn't rule out enum types. Those will be detected later, when
- # cross-reference is established.
- reference_kinds = ('m', 's', 'h', 'a', 'r', 'x', 'asso', 'rmt', 'rcv',
- 'rma', 'rca')
- if re.split('[^a-z]', base_kind, 1)[0] not in reference_kinds:
- raise Exception('A type (spec "%s") cannot be made nullable' % base_kind)
return '?' + base_kind
if kind.endswith('}'):
lbracket = kind.rfind('{')
@@ -113,8 +384,6 @@ def _MapKind(kind):
lbracket = kind.rfind('[')
typename = kind[0:lbracket]
return 'a' + kind[lbracket + 1:-1] + ':' + _MapKind(typename)
- if kind.endswith('&'):
- return 'r:' + _MapKind(kind[0:-1])
if kind.startswith('asso<'):
assert kind.endswith('>')
return 'asso:' + _MapKind(kind[5:-1])
@@ -135,13 +404,45 @@ def _MapKind(kind):
return 'x:' + kind
-def _AttributeListToDict(attribute_list):
+def _MapAttributeValue(module, kind, value):
+ # True/False/None
+ if value is None:
+ return value
+ if not isinstance(value, str):
+ return value
+ # Is the attribute value the name of a feature?
+ try:
+ # Features cannot be nested in other types, so lookup in the global scope.
+ trial = _LookupKind(module.kinds, 'x:' + value,
+ _GetScopeForKind(module, kind))
+ if isinstance(trial, mojom.Feature):
+ return trial
+ except ValueError:
+ pass
+ # Is the attribute value a constant or enum value?
+ try:
+ trial = _LookupValue(module, None, None, ('IDENTIFIER', value))
+ if isinstance(trial, mojom.ConstantValue):
+ return trial.constant
+ if isinstance(trial, mojom.EnumValue):
+ return trial
+ except ValueError:
+ pass
+ # If not a referenceable mojo type - return as a string.
+ return value
+
+
+def _AttributeListToDict(module, kind, attribute_list):
if attribute_list is None:
return None
assert isinstance(attribute_list, ast.AttributeList)
- # TODO(vtl): Check for duplicate keys here.
- return dict(
- [(attribute.key, attribute.value) for attribute in attribute_list])
+ attributes = dict()
+ for attribute in attribute_list:
+ if attribute.key in attributes:
+ raise Exception("Duplicate key (%s) in attribute list" % attribute.key)
+ attributes[attribute.key] = _MapAttributeValue(module, kind,
+ attribute.value)
+ return attributes
builtin_values = frozenset([
@@ -257,7 +558,8 @@ def _Kind(kinds, spec, scope):
return kind
if spec.startswith('?'):
- kind = _Kind(kinds, spec[1:], scope).MakeNullableKind()
+ kind = _Kind(kinds, spec[1:], scope)
+ kind = kind.MakeNullableKind()
elif spec.startswith('a:'):
kind = mojom.Array(_Kind(kinds, spec[2:], scope))
elif spec.startswith('asso:'):
@@ -303,7 +605,8 @@ def _Kind(kinds, spec, scope):
def _Import(module, import_module):
# Copy the struct kinds from our imports into the current module.
- importable_kinds = (mojom.Struct, mojom.Union, mojom.Enum, mojom.Interface)
+ importable_kinds = (mojom.Struct, mojom.Union, mojom.Enum, mojom.Interface,
+ mojom.Feature)
for kind in import_module.kinds.values():
if (isinstance(kind, importable_kinds)
and kind.module.path == import_module.path):
@@ -316,6 +619,32 @@ def _Import(module, import_module):
return import_module
+def _Feature(module, parsed_feature):
+ """
+ Args:
+ module: {mojom.Module} Module currently being constructed.
+ parsed_feature: {ast.Feature} Parsed feature.
+
+ Returns:
+ {mojom.Feature} AST feature.
+ """
+ feature = mojom.Feature(module=module)
+ feature.mojom_name = parsed_feature.mojom_name
+ feature.spec = 'x:' + module.GetNamespacePrefix() + feature.mojom_name
+ module.kinds[feature.spec] = feature
+ feature.constants = []
+ _ProcessElements(
+ parsed_feature.mojom_name, parsed_feature.body, {
+ ast.Const:
+ lambda const: feature.constants.append(
+ _Constant(module, const, feature)),
+ })
+
+ feature.attributes = _AttributeListToDict(module, feature,
+ parsed_feature.attribute_list)
+ return feature
+
+
def _Struct(module, parsed_struct):
"""
Args:
@@ -345,7 +674,8 @@ def _Struct(module, parsed_struct):
struct.fields_data.append,
})
- struct.attributes = _AttributeListToDict(parsed_struct.attribute_list)
+ struct.attributes = _AttributeListToDict(module, struct,
+ parsed_struct.attribute_list)
# Enforce that a [Native] attribute is set to make native-only struct
# declarations more explicit.
@@ -377,7 +707,8 @@ def _Union(module, parsed_union):
union.fields_data = []
_ProcessElements(parsed_union.mojom_name, parsed_union.body,
{ast.UnionField: union.fields_data.append})
- union.attributes = _AttributeListToDict(parsed_union.attribute_list)
+ union.attributes = _AttributeListToDict(module, union,
+ parsed_union.attribute_list)
return union
@@ -398,7 +729,8 @@ def _StructField(module, parsed_field, struct):
field.ordinal = parsed_field.ordinal.value if parsed_field.ordinal else None
field.default = _LookupValue(module, struct, field.kind,
parsed_field.default_value)
- field.attributes = _AttributeListToDict(parsed_field.attribute_list)
+ field.attributes = _AttributeListToDict(module, field,
+ parsed_field.attribute_list)
return field
@@ -414,11 +746,22 @@ def _UnionField(module, parsed_field, union):
"""
field = mojom.UnionField()
field.mojom_name = parsed_field.mojom_name
+ # Disallow unions from being self-recursive.
+ parsed_typename = parsed_field.typename
+ if parsed_typename.endswith('?'):
+ parsed_typename = parsed_typename[:-1]
+ assert parsed_typename != union.mojom_name
field.kind = _Kind(module.kinds, _MapKind(parsed_field.typename),
(module.mojom_namespace, union.mojom_name))
field.ordinal = parsed_field.ordinal.value if parsed_field.ordinal else None
field.default = None
- field.attributes = _AttributeListToDict(parsed_field.attribute_list)
+ field.attributes = _AttributeListToDict(module, field,
+ parsed_field.attribute_list)
+ if field.is_default and not mojom.IsNullableKind(field.kind) and \
+ not mojom.IsIntegralKind(field.kind):
+ raise Exception(
+ '[Default] field for union %s must be nullable or integral type.' %
+ union.mojom_name)
return field
@@ -439,7 +782,8 @@ def _Parameter(module, parsed_param, interface):
parameter.ordinal = (parsed_param.ordinal.value
if parsed_param.ordinal else None)
parameter.default = None # TODO(tibell): We never have these. Remove field?
- parameter.attributes = _AttributeListToDict(parsed_param.attribute_list)
+ parameter.attributes = _AttributeListToDict(module, parameter,
+ parsed_param.attribute_list)
return parameter
@@ -464,7 +808,8 @@ def _Method(module, parsed_method, interface):
method.response_parameters = list(
map(lambda parameter: _Parameter(module, parameter, interface),
parsed_method.response_parameter_list))
- method.attributes = _AttributeListToDict(parsed_method.attribute_list)
+ method.attributes = _AttributeListToDict(module, method,
+ parsed_method.attribute_list)
# Enforce that only methods with response can have a [Sync] attribute.
if method.sync and method.response_parameters is None:
@@ -492,7 +837,8 @@ def _Interface(module, parsed_iface):
interface.mojom_name = parsed_iface.mojom_name
interface.spec = 'x:' + module.GetNamespacePrefix() + interface.mojom_name
module.kinds[interface.spec] = interface
- interface.attributes = _AttributeListToDict(parsed_iface.attribute_list)
+ interface.attributes = _AttributeListToDict(module, interface,
+ parsed_iface.attribute_list)
interface.enums = []
interface.constants = []
interface.methods_data = []
@@ -522,7 +868,8 @@ def _EnumField(module, enum, parsed_field):
field = mojom.EnumField()
field.mojom_name = parsed_field.mojom_name
field.value = _LookupValue(module, enum, None, parsed_field.value)
- field.attributes = _AttributeListToDict(parsed_field.attribute_list)
+ field.attributes = _AttributeListToDict(module, field,
+ parsed_field.attribute_list)
value = mojom.EnumValue(module, enum, field)
module.values[value.GetSpec()] = value
return field
@@ -544,7 +891,7 @@ def _ResolveNumericEnumValues(enum):
prev_value += 1
# Integral value (e.g: BEGIN = -0x1).
- elif _IsStrOrUnicode(field.value):
+ elif isinstance(field.value, str):
prev_value = int(field.value, 0)
# Reference to a previous enum value (e.g: INIT = BEGIN).
@@ -560,7 +907,10 @@ def _ResolveNumericEnumValues(enum):
else:
raise Exception('Unresolved enum value for %s' % field.value.GetSpec())
- #resolved_enum_values[field.mojom_name] = prev_value
+ if prev_value in (-128, -127):
+ raise Exception(f'{field.mojom_name} in {enum.spec} has the value '
+ f'{prev_value}, which is reserved for WTF::HashTrait\'s '
+ 'default enum specialization and may not be used.')
field.numeric_value = prev_value
if min_value is None or prev_value < min_value:
min_value = prev_value
@@ -588,7 +938,8 @@ def _Enum(module, parsed_enum, parent_kind):
mojom_name = parent_kind.mojom_name + '.' + mojom_name
enum.spec = 'x:%s.%s' % (module.mojom_namespace, mojom_name)
enum.parent_kind = parent_kind
- enum.attributes = _AttributeListToDict(parsed_enum.attribute_list)
+ enum.attributes = _AttributeListToDict(module, enum,
+ parsed_enum.attribute_list)
if not enum.native_only:
enum.fields = list(
@@ -600,11 +951,18 @@ def _Enum(module, parsed_enum, parent_kind):
for field in enum.fields:
if field.default:
if not enum.extensible:
- raise Exception('Non-extensible enums may not specify a default')
- if enum.default_field is not None:
raise Exception(
- 'Only one enumerator value may be specified as the default')
+ f'Non-extensible enum {enum.spec} may not specify a default')
+ if enum.default_field is not None:
+ raise Exception(f'Multiple [Default] enumerators in enum {enum.spec}')
enum.default_field = field
+ # While running the backwards compatibility check, ignore errors because the
+ # old version of the enum might not specify [Default].
+ if (enum.extensible and enum.default_field is None
+ and enum.spec not in _EXTENSIBLE_ENUMS_MISSING_DEFAULT
+ and not is_running_backwards_compatibility_check_hack):
+ raise Exception(
+ f'Extensible enum {enum.spec} must specify a [Default] enumerator')
module.kinds[enum.spec] = enum
@@ -696,6 +1054,11 @@ def _CollectReferencedKinds(module, all_defined_kinds):
for referenced_kind in extract_referenced_user_kinds(param.kind):
sanitized_kind = sanitize_kind(referenced_kind)
referenced_user_kinds[sanitized_kind.spec] = sanitized_kind
+ # Consts can reference imported enums.
+ for const in module.constants:
+ if not const.kind in mojom.PRIMITIVES:
+ sanitized_kind = sanitize_kind(const.kind)
+ referenced_user_kinds[sanitized_kind.spec] = sanitized_kind
return referenced_user_kinds
@@ -741,6 +1104,16 @@ def _AssertTypeIsStable(kind):
assertDependencyIsStable(response_param.kind)
+def _AssertStructIsValid(kind):
+ expected_ordinals = set(range(0, len(kind.fields)))
+ ordinals = set(map(lambda field: field.ordinal, kind.fields))
+ if ordinals != expected_ordinals:
+ raise Exception(
+ 'Structs must use contiguous ordinals starting from 0. ' +
+ '{} is missing the following ordinals: {}.'.format(
+ kind.mojom_name, ', '.join(map(str, expected_ordinals - ordinals))))
+
+
def _Module(tree, path, imports):
"""
Args:
@@ -778,6 +1151,8 @@ def _Module(tree, path, imports):
module.structs = []
module.unions = []
module.interfaces = []
+ module.features = []
+
_ProcessElements(
filename, tree.definition_list, {
ast.Const:
@@ -791,6 +1166,8 @@ def _Module(tree, path, imports):
ast.Interface:
lambda interface: module.interfaces.append(
_Interface(module, interface)),
+ ast.Feature:
+ lambda feature: module.features.append(_Feature(module, feature)),
})
# Second pass expands fields and methods. This allows fields and parameters
@@ -806,12 +1183,24 @@ def _Module(tree, path, imports):
for enum in struct.enums:
all_defined_kinds[enum.spec] = enum
+ for feature in module.features:
+ all_defined_kinds[feature.spec] = feature
+
for union in module.unions:
union.fields = list(
map(lambda field: _UnionField(module, field, union), union.fields_data))
_AssignDefaultOrdinals(union.fields)
+ for field in union.fields:
+ if field.is_default:
+ if union.default_field is not None:
+ raise Exception('Multiple [Default] fields in union %s.' %
+ union.mojom_name)
+ union.default_field = field
del union.fields_data
all_defined_kinds[union.spec] = union
+ if union.extensible and union.default_field is None:
+ raise Exception('Extensible union %s must specify a [Default] field' %
+ union.mojom_name)
for interface in module.interfaces:
interface.methods = list(
@@ -829,8 +1218,8 @@ def _Module(tree, path, imports):
all_defined_kinds.values())
imported_kind_specs = set(all_referenced_kinds.keys()).difference(
set(all_defined_kinds.keys()))
- module.imported_kinds = dict(
- (spec, all_referenced_kinds[spec]) for spec in imported_kind_specs)
+ module.imported_kinds = OrderedDict((spec, all_referenced_kinds[spec])
+ for spec in sorted(imported_kind_specs))
generator.AddComputedData(module)
for iface in module.interfaces:
@@ -847,6 +1236,9 @@ def _Module(tree, path, imports):
if kind.stable:
_AssertTypeIsStable(kind)
+ for kind in module.structs:
+ _AssertStructIsValid(kind)
+
return module
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/translate_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/translate_unittest.py
new file mode 100644
index 00000000..b4fea924
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/translate_unittest.py
@@ -0,0 +1,141 @@
+# Copyright 2014 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from mojom.generate import module as mojom
+from mojom.generate import translate
+from mojom.parse import ast
+
+class TranslateTest(unittest.TestCase):
+ """Tests |parser.Parse()|."""
+
+ def testSimpleArray(self):
+ """Tests a simple int32[]."""
+ # pylint: disable=W0212
+ self.assertEquals(translate._MapKind("int32[]"), "a:i32")
+
+ def testAssociativeArray(self):
+ """Tests a simple uint8{string}."""
+ # pylint: disable=W0212
+ self.assertEquals(translate._MapKind("uint8{string}"), "m[s][u8]")
+
+ def testLeftToRightAssociativeArray(self):
+ """Makes sure that parsing is done from right to left on the internal kinds
+ in the presence of an associative array."""
+ # pylint: disable=W0212
+ self.assertEquals(translate._MapKind("uint8[]{string}"), "m[s][a:u8]")
+
+ def testTranslateSimpleUnions(self):
+ """Makes sure that a simple union is translated correctly."""
+ tree = ast.Mojom(None, ast.ImportList(), [
+ ast.Union(
+ "SomeUnion", None,
+ ast.UnionBody([
+ ast.UnionField("a", None, None, "int32"),
+ ast.UnionField("b", None, None, "string")
+ ]))
+ ])
+
+ translation = translate.OrderedModule(tree, "mojom_tree", [])
+ self.assertEqual(1, len(translation.unions))
+
+ union = translation.unions[0]
+ self.assertTrue(isinstance(union, mojom.Union))
+ self.assertEqual("SomeUnion", union.mojom_name)
+ self.assertEqual(2, len(union.fields))
+ self.assertEqual("a", union.fields[0].mojom_name)
+ self.assertEqual(mojom.INT32.spec, union.fields[0].kind.spec)
+ self.assertEqual("b", union.fields[1].mojom_name)
+ self.assertEqual(mojom.STRING.spec, union.fields[1].kind.spec)
+
+ def testMapKindRaisesWithDuplicate(self):
+ """Verifies _MapTreeForType() raises when passed two values with the same
+ name."""
+ methods = [
+ ast.Method('dup', None, None, ast.ParameterList(), None),
+ ast.Method('dup', None, None, ast.ParameterList(), None)
+ ]
+ with self.assertRaises(Exception):
+ translate._ElemsOfType(methods, ast.Method, 'scope')
+
+ def testAssociatedKinds(self):
+ """Tests type spec translation of associated interfaces and requests."""
+ # pylint: disable=W0212
+ self.assertEquals(
+ translate._MapKind("asso<SomeInterface>?"), "?asso:x:SomeInterface")
+ self.assertEquals(translate._MapKind("rca<SomeInterface>?"),
+ "?rca:x:SomeInterface")
+
+ def testSelfRecursiveUnions(self):
+ """Verifies _UnionField() raises when a union is self-recursive."""
+ tree = ast.Mojom(None, ast.ImportList(), [
+ ast.Union("SomeUnion", None,
+ ast.UnionBody([ast.UnionField("a", None, None, "SomeUnion")]))
+ ])
+ with self.assertRaises(Exception):
+ translate.OrderedModule(tree, "mojom_tree", [])
+
+ tree = ast.Mojom(None, ast.ImportList(), [
+ ast.Union(
+ "SomeUnion", None,
+ ast.UnionBody([ast.UnionField("a", None, None, "SomeUnion?")]))
+ ])
+ with self.assertRaises(Exception):
+ translate.OrderedModule(tree, "mojom_tree", [])
+
+ def testDuplicateAttributesException(self):
+ tree = ast.Mojom(None, ast.ImportList(), [
+ ast.Union(
+ "FakeUnion",
+ ast.AttributeList([
+ ast.Attribute("key1", "value"),
+ ast.Attribute("key1", "value")
+ ]),
+ ast.UnionBody([
+ ast.UnionField("a", None, None, "int32"),
+ ast.UnionField("b", None, None, "string")
+ ]))
+ ])
+ with self.assertRaises(Exception):
+ translate.OrderedModule(tree, "mojom_tree", [])
+
+ def testEnumWithReservedValues(self):
+ """Verifies that assigning reserved values to enumerators fails."""
+ # -128 is reserved for the empty representation in WTF::HashTraits.
+ tree = ast.Mojom(None, ast.ImportList(), [
+ ast.Enum(
+ "MyEnum", None,
+ ast.EnumValueList([
+ ast.EnumValue('kReserved', None, '-128'),
+ ]))
+ ])
+ with self.assertRaises(Exception) as context:
+ translate.OrderedModule(tree, "mojom_tree", [])
+ self.assertIn("reserved for WTF::HashTrait", str(context.exception))
+
+ # -127 is reserved for the deleted representation in WTF::HashTraits.
+ tree = ast.Mojom(None, ast.ImportList(), [
+ ast.Enum(
+ "MyEnum", None,
+ ast.EnumValueList([
+ ast.EnumValue('kReserved', None, '-127'),
+ ]))
+ ])
+ with self.assertRaises(Exception) as context:
+ translate.OrderedModule(tree, "mojom_tree", [])
+ self.assertIn("reserved for WTF::HashTrait", str(context.exception))
+
+ # Implicitly assigning a reserved value should also fail.
+ tree = ast.Mojom(None, ast.ImportList(), [
+ ast.Enum(
+ "MyEnum", None,
+ ast.EnumValueList([
+ ast.EnumValue('kNotReserved', None, '-129'),
+ ast.EnumValue('kImplicitlyReserved', None, None),
+ ]))
+ ])
+ with self.assertRaises(Exception) as context:
+ translate.OrderedModule(tree, "mojom_tree", [])
+ self.assertIn("reserved for WTF::HashTrait", str(context.exception))
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/__init__.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/__init__.py
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/ast.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/ast.py
index 1f0db200..aae9cdb6 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/parse/ast.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/ast.py
@@ -1,4 +1,4 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Node classes for the AST for a Mojo IDL file."""
@@ -8,17 +8,14 @@
# and lineno). You may also define __repr__() to help with analyzing test
# failures, especially for more complex types.
+import os.path
-import sys
+# Instance of 'NodeListBase' has no '_list_item_type' member (no-member)
+# pylint: disable=no-member
-def _IsStrOrUnicode(x):
- if sys.version_info[0] < 3:
- return isinstance(x, (unicode, str))
- return isinstance(x, str)
-
-class NodeBase(object):
+class NodeBase:
"""Base class for nodes in the AST."""
def __init__(self, filename=None, lineno=None):
@@ -43,7 +40,7 @@ class NodeListBase(NodeBase):
classes, in a tuple) of the members of the list.)"""
def __init__(self, item_or_items=None, **kwargs):
- super(NodeListBase, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.items = []
if item_or_items is None:
pass
@@ -62,7 +59,7 @@ class NodeListBase(NodeBase):
return self.items.__iter__()
def __eq__(self, other):
- return super(NodeListBase, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.items == other.items
# Implement this so that on failure, we get slightly more sensible output.
@@ -96,7 +93,7 @@ class Definition(NodeBase):
include parameter definitions.) This class is meant to be subclassed."""
def __init__(self, mojom_name, **kwargs):
- assert _IsStrOrUnicode(mojom_name)
+ assert isinstance(mojom_name, str)
NodeBase.__init__(self, **kwargs)
self.mojom_name = mojom_name
@@ -108,13 +105,13 @@ class Attribute(NodeBase):
"""Represents an attribute."""
def __init__(self, key, value, **kwargs):
- assert _IsStrOrUnicode(key)
- super(Attribute, self).__init__(**kwargs)
+ assert isinstance(key, str)
+ super().__init__(**kwargs)
self.key = key
self.value = value
def __eq__(self, other):
- return super(Attribute, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.key == other.key and \
self.value == other.value
@@ -131,17 +128,17 @@ class Const(Definition):
def __init__(self, mojom_name, attribute_list, typename, value, **kwargs):
assert attribute_list is None or isinstance(attribute_list, AttributeList)
# The typename is currently passed through as a string.
- assert _IsStrOrUnicode(typename)
+ assert isinstance(typename, str)
# The value is either a literal (currently passed through as a string) or a
# "wrapped identifier".
- assert _IsStrOrUnicode or isinstance(value, tuple)
- super(Const, self).__init__(mojom_name, **kwargs)
+ assert isinstance(value, (tuple, str))
+ super().__init__(mojom_name, **kwargs)
self.attribute_list = attribute_list
self.typename = typename
self.value = value
def __eq__(self, other):
- return super(Const, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.attribute_list == other.attribute_list and \
self.typename == other.typename and \
self.value == other.value
@@ -153,12 +150,12 @@ class Enum(Definition):
def __init__(self, mojom_name, attribute_list, enum_value_list, **kwargs):
assert attribute_list is None or isinstance(attribute_list, AttributeList)
assert enum_value_list is None or isinstance(enum_value_list, EnumValueList)
- super(Enum, self).__init__(mojom_name, **kwargs)
+ super().__init__(mojom_name, **kwargs)
self.attribute_list = attribute_list
self.enum_value_list = enum_value_list
def __eq__(self, other):
- return super(Enum, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.attribute_list == other.attribute_list and \
self.enum_value_list == other.enum_value_list
@@ -170,13 +167,13 @@ class EnumValue(Definition):
# The optional value is either an int (which is current a string) or a
# "wrapped identifier".
assert attribute_list is None or isinstance(attribute_list, AttributeList)
- assert value is None or _IsStrOrUnicode(value) or isinstance(value, tuple)
- super(EnumValue, self).__init__(mojom_name, **kwargs)
+ assert value is None or isinstance(value, (tuple, str))
+ super().__init__(mojom_name, **kwargs)
self.attribute_list = attribute_list
self.value = value
def __eq__(self, other):
- return super(EnumValue, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.attribute_list == other.attribute_list and \
self.value == other.value
@@ -188,18 +185,47 @@ class EnumValueList(NodeListBase):
_list_item_type = EnumValue
+class Feature(Definition):
+ """Represents a runtime feature definition."""
+ def __init__(self, mojom_name, attribute_list, body, **kwargs):
+ assert attribute_list is None or isinstance(attribute_list, AttributeList)
+ assert isinstance(body, FeatureBody) or body is None
+ super().__init__(mojom_name, **kwargs)
+ self.attribute_list = attribute_list
+ self.body = body
+
+ def __eq__(self, other):
+ return super().__eq__(other) and \
+ self.attribute_list == other.attribute_list and \
+ self.body == other.body
+
+ def __repr__(self):
+ return "Feature(mojom_name = %s, attribute_list = %s, body = %s)" % (
+ self.mojom_name, self.attribute_list, self.body)
+
+
+# This needs to be declared after `FeatureConst` and `FeatureField`.
+class FeatureBody(NodeListBase):
+ """Represents the body of (i.e., list of definitions inside) a feature."""
+
+ # Features are compile time helpers so all fields are initializers/consts
+ # for the underlying platform feature type.
+ _list_item_type = (Const)
+
+
class Import(NodeBase):
"""Represents an import statement."""
def __init__(self, attribute_list, import_filename, **kwargs):
assert attribute_list is None or isinstance(attribute_list, AttributeList)
- assert _IsStrOrUnicode(import_filename)
- super(Import, self).__init__(**kwargs)
+ assert isinstance(import_filename, str)
+ super().__init__(**kwargs)
self.attribute_list = attribute_list
- self.import_filename = import_filename
+ # TODO(crbug.com/953884): Use pathlib once we're migrated fully to Python 3.
+ self.import_filename = os.path.normpath(import_filename).replace('\\', '/')
def __eq__(self, other):
- return super(Import, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.attribute_list == other.attribute_list and \
self.import_filename == other.import_filename
@@ -216,12 +242,12 @@ class Interface(Definition):
def __init__(self, mojom_name, attribute_list, body, **kwargs):
assert attribute_list is None or isinstance(attribute_list, AttributeList)
assert isinstance(body, InterfaceBody)
- super(Interface, self).__init__(mojom_name, **kwargs)
+ super().__init__(mojom_name, **kwargs)
self.attribute_list = attribute_list
self.body = body
def __eq__(self, other):
- return super(Interface, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.attribute_list == other.attribute_list and \
self.body == other.body
@@ -236,14 +262,14 @@ class Method(Definition):
assert isinstance(parameter_list, ParameterList)
assert response_parameter_list is None or \
isinstance(response_parameter_list, ParameterList)
- super(Method, self).__init__(mojom_name, **kwargs)
+ super().__init__(mojom_name, **kwargs)
self.attribute_list = attribute_list
self.ordinal = ordinal
self.parameter_list = parameter_list
self.response_parameter_list = response_parameter_list
def __eq__(self, other):
- return super(Method, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.attribute_list == other.attribute_list and \
self.ordinal == other.ordinal and \
self.parameter_list == other.parameter_list and \
@@ -264,12 +290,12 @@ class Module(NodeBase):
# |mojom_namespace| is either none or a "wrapped identifier".
assert mojom_namespace is None or isinstance(mojom_namespace, tuple)
assert attribute_list is None or isinstance(attribute_list, AttributeList)
- super(Module, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.mojom_namespace = mojom_namespace
self.attribute_list = attribute_list
def __eq__(self, other):
- return super(Module, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.mojom_namespace == other.mojom_namespace and \
self.attribute_list == other.attribute_list
@@ -281,13 +307,13 @@ class Mojom(NodeBase):
assert module is None or isinstance(module, Module)
assert isinstance(import_list, ImportList)
assert isinstance(definition_list, list)
- super(Mojom, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.module = module
self.import_list = import_list
self.definition_list = definition_list
def __eq__(self, other):
- return super(Mojom, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.module == other.module and \
self.import_list == other.import_list and \
self.definition_list == other.definition_list
@@ -302,11 +328,11 @@ class Ordinal(NodeBase):
def __init__(self, value, **kwargs):
assert isinstance(value, int)
- super(Ordinal, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.value = value
def __eq__(self, other):
- return super(Ordinal, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.value == other.value
@@ -314,18 +340,18 @@ class Parameter(NodeBase):
"""Represents a method request or response parameter."""
def __init__(self, mojom_name, attribute_list, ordinal, typename, **kwargs):
- assert _IsStrOrUnicode(mojom_name)
+ assert isinstance(mojom_name, str)
assert attribute_list is None or isinstance(attribute_list, AttributeList)
assert ordinal is None or isinstance(ordinal, Ordinal)
- assert _IsStrOrUnicode(typename)
- super(Parameter, self).__init__(**kwargs)
+ assert isinstance(typename, str)
+ super().__init__(**kwargs)
self.mojom_name = mojom_name
self.attribute_list = attribute_list
self.ordinal = ordinal
self.typename = typename
def __eq__(self, other):
- return super(Parameter, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.mojom_name == other.mojom_name and \
self.attribute_list == other.attribute_list and \
self.ordinal == other.ordinal and \
@@ -344,42 +370,51 @@ class Struct(Definition):
def __init__(self, mojom_name, attribute_list, body, **kwargs):
assert attribute_list is None or isinstance(attribute_list, AttributeList)
assert isinstance(body, StructBody) or body is None
- super(Struct, self).__init__(mojom_name, **kwargs)
+ super().__init__(mojom_name, **kwargs)
self.attribute_list = attribute_list
self.body = body
def __eq__(self, other):
- return super(Struct, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.attribute_list == other.attribute_list and \
self.body == other.body
+ def __repr__(self):
+ return "Struct(mojom_name = %s, attribute_list = %s, body = %s)" % (
+ self.mojom_name, self.attribute_list, self.body)
+
class StructField(Definition):
"""Represents a struct field definition."""
def __init__(self, mojom_name, attribute_list, ordinal, typename,
default_value, **kwargs):
- assert _IsStrOrUnicode(mojom_name)
+ assert isinstance(mojom_name, str)
assert attribute_list is None or isinstance(attribute_list, AttributeList)
assert ordinal is None or isinstance(ordinal, Ordinal)
- assert _IsStrOrUnicode(typename)
+ assert isinstance(typename, str)
# The optional default value is currently either a value as a string or a
# "wrapped identifier".
- assert default_value is None or _IsStrOrUnicode(default_value) or \
- isinstance(default_value, tuple)
- super(StructField, self).__init__(mojom_name, **kwargs)
+ assert default_value is None or isinstance(default_value, (str, tuple))
+ super().__init__(mojom_name, **kwargs)
self.attribute_list = attribute_list
self.ordinal = ordinal
self.typename = typename
self.default_value = default_value
def __eq__(self, other):
- return super(StructField, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.attribute_list == other.attribute_list and \
self.ordinal == other.ordinal and \
self.typename == other.typename and \
self.default_value == other.default_value
+ def __repr__(self):
+ return ("StructField(mojom_name = %s, attribute_list = %s, ordinal = %s, "
+ "typename = %s, default_value = %s") % (
+ self.mojom_name, self.attribute_list, self.ordinal,
+ self.typename, self.default_value)
+
# This needs to be declared after |StructField|.
class StructBody(NodeListBase):
@@ -394,29 +429,29 @@ class Union(Definition):
def __init__(self, mojom_name, attribute_list, body, **kwargs):
assert attribute_list is None or isinstance(attribute_list, AttributeList)
assert isinstance(body, UnionBody)
- super(Union, self).__init__(mojom_name, **kwargs)
+ super().__init__(mojom_name, **kwargs)
self.attribute_list = attribute_list
self.body = body
def __eq__(self, other):
- return super(Union, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.attribute_list == other.attribute_list and \
self.body == other.body
class UnionField(Definition):
def __init__(self, mojom_name, attribute_list, ordinal, typename, **kwargs):
- assert _IsStrOrUnicode(mojom_name)
+ assert isinstance(mojom_name, str)
assert attribute_list is None or isinstance(attribute_list, AttributeList)
assert ordinal is None or isinstance(ordinal, Ordinal)
- assert _IsStrOrUnicode(typename)
- super(UnionField, self).__init__(mojom_name, **kwargs)
+ assert isinstance(typename, str)
+ super().__init__(mojom_name, **kwargs)
self.attribute_list = attribute_list
self.ordinal = ordinal
self.typename = typename
def __eq__(self, other):
- return super(UnionField, self).__eq__(other) and \
+ return super().__eq__(other) and \
self.attribute_list == other.attribute_list and \
self.ordinal == other.ordinal and \
self.typename == other.typename
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/ast_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/ast_unittest.py
index 62798631..b289f7b1 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/parse/ast_unittest.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/ast_unittest.py
@@ -1,32 +1,26 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import imp
-import os.path
-import sys
import unittest
from mojom.parse import ast
-
class _TestNode(ast.NodeBase):
"""Node type for tests."""
def __init__(self, value, **kwargs):
- super(_TestNode, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.value = value
def __eq__(self, other):
- return super(_TestNode, self).__eq__(other) and self.value == other.value
-
+ return super().__eq__(other) and self.value == other.value
class _TestNodeList(ast.NodeListBase):
"""Node list type for tests."""
_list_item_type = _TestNode
-
class ASTTest(unittest.TestCase):
"""Tests various AST classes."""
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features.py
index 3cb73c5d..9687edbf 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features.py
@@ -1,4 +1,4 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
+# Copyright 2018 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Helpers for processing conditionally enabled features in a mojom."""
@@ -17,8 +17,10 @@ class EnableIfError(Error):
def _IsEnabled(definition, enabled_features):
"""Returns true if a definition is enabled.
- A definition is enabled if it has no EnableIf attribute, or if the value of
- the EnableIf attribute is in enabled_features.
+ A definition is enabled if it has no EnableIf/EnableIfNot attribute.
+ It is retained if it has an EnableIf attribute and the attribute is in
+ enabled_features. It is retained if it has an EnableIfNot attribute and the
+ attribute is not in enabled features.
"""
if not hasattr(definition, "attribute_list"):
return True
@@ -27,17 +29,19 @@ def _IsEnabled(definition, enabled_features):
already_defined = False
for a in definition.attribute_list:
- if a.key == 'EnableIf':
+ if a.key == 'EnableIf' or a.key == 'EnableIfNot':
if already_defined:
raise EnableIfError(
definition.filename,
- "EnableIf attribute may only be defined once per field.",
+ "EnableIf/EnableIfNot attribute may only be set once per field.",
definition.lineno)
already_defined = True
for attribute in definition.attribute_list:
if attribute.key == 'EnableIf' and attribute.value not in enabled_features:
return False
+ if attribute.key == 'EnableIfNot' and attribute.value in enabled_features:
+ return False
return True
@@ -56,15 +60,12 @@ def _FilterDefinition(definition, enabled_features):
"""Filters definitions with a body."""
if isinstance(definition, ast.Enum):
_FilterDisabledFromNodeList(definition.enum_value_list, enabled_features)
- elif isinstance(definition, ast.Interface):
- _FilterDisabledFromNodeList(definition.body, enabled_features)
elif isinstance(definition, ast.Method):
_FilterDisabledFromNodeList(definition.parameter_list, enabled_features)
_FilterDisabledFromNodeList(definition.response_parameter_list,
enabled_features)
- elif isinstance(definition, ast.Struct):
- _FilterDisabledFromNodeList(definition.body, enabled_features)
- elif isinstance(definition, ast.Union):
+ elif isinstance(definition,
+ (ast.Interface, ast.Struct, ast.Union, ast.Feature)):
_FilterDisabledFromNodeList(definition.body, enabled_features)
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py
index aa609be7..cca1764b 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py
@@ -1,13 +1,12 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
+# Copyright 2018 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import imp
+import importlib.util
import os
import sys
import unittest
-
def _GetDirAbove(dirname):
"""Returns the directory "above" this file containing |dirname| (which must
also be "above" this file)."""
@@ -18,9 +17,8 @@ def _GetDirAbove(dirname):
if tail == dirname:
return path
-
try:
- imp.find_module('mojom')
+ importlib.util.find_spec("mojom")
except ImportError:
sys.path.append(os.path.join(_GetDirAbove('pylib'), 'pylib'))
import mojom.parse.ast as ast
@@ -29,7 +27,6 @@ import mojom.parse.parser as parser
ENABLED_FEATURES = frozenset({'red', 'green', 'blue'})
-
class ConditionalFeaturesTest(unittest.TestCase):
"""Tests |mojom.parse.conditional_features|."""
@@ -55,6 +52,48 @@ class ConditionalFeaturesTest(unittest.TestCase):
"""
self.parseAndAssertEqual(const_source, expected_source)
+ def testFilterIfNotConst(self):
+ """Test that Consts are correctly filtered."""
+ const_source = """
+ [EnableIfNot=blue]
+ const int kMyConst1 = 1;
+ [EnableIfNot=orange]
+ const double kMyConst2 = 2;
+ [EnableIf=blue]
+ const int kMyConst3 = 3;
+ [EnableIfNot=blue]
+ const int kMyConst4 = 4;
+ [EnableIfNot=purple]
+ const int kMyConst5 = 5;
+ """
+ expected_source = """
+ [EnableIfNot=orange]
+ const double kMyConst2 = 2;
+ [EnableIf=blue]
+ const int kMyConst3 = 3;
+ [EnableIfNot=purple]
+ const int kMyConst5 = 5;
+ """
+ self.parseAndAssertEqual(const_source, expected_source)
+
+ def testFilterIfNotMultipleConst(self):
+ """Test that Consts are correctly filtered."""
+ const_source = """
+ [EnableIfNot=blue]
+ const int kMyConst1 = 1;
+ [EnableIfNot=orange]
+ const double kMyConst2 = 2;
+ [EnableIfNot=orange]
+ const int kMyConst3 = 3;
+ """
+ expected_source = """
+ [EnableIfNot=orange]
+ const double kMyConst2 = 2;
+ [EnableIfNot=orange]
+ const int kMyConst3 = 3;
+ """
+ self.parseAndAssertEqual(const_source, expected_source)
+
def testFilterEnum(self):
"""Test that EnumValues are correctly filtered from an Enum."""
enum_source = """
@@ -91,6 +130,24 @@ class ConditionalFeaturesTest(unittest.TestCase):
"""
self.parseAndAssertEqual(import_source, expected_source)
+ def testFilterIfNotImport(self):
+ """Test that imports are correctly filtered from a Mojom."""
+ import_source = """
+ [EnableIf=blue]
+ import "foo.mojom";
+ [EnableIfNot=purple]
+ import "bar.mojom";
+ [EnableIfNot=green]
+ import "baz.mojom";
+ """
+ expected_source = """
+ [EnableIf=blue]
+ import "foo.mojom";
+ [EnableIfNot=purple]
+ import "bar.mojom";
+ """
+ self.parseAndAssertEqual(import_source, expected_source)
+
def testFilterInterface(self):
"""Test that definitions are correctly filtered from an Interface."""
interface_source = """
@@ -175,6 +232,50 @@ class ConditionalFeaturesTest(unittest.TestCase):
"""
self.parseAndAssertEqual(struct_source, expected_source)
+ def testFilterIfNotStruct(self):
+ """Test that definitions are correctly filtered from a Struct."""
+ struct_source = """
+ struct MyStruct {
+ [EnableIf=blue]
+ enum MyEnum {
+ VALUE1,
+ [EnableIfNot=red]
+ VALUE2,
+ };
+ [EnableIfNot=yellow]
+ const double kMyConst = 1.23;
+ [EnableIf=green]
+ int32 a;
+ double b;
+ [EnableIfNot=purple]
+ int32 c;
+ [EnableIf=blue]
+ double d;
+ int32 e;
+ [EnableIfNot=red]
+ double f;
+ };
+ """
+ expected_source = """
+ struct MyStruct {
+ [EnableIf=blue]
+ enum MyEnum {
+ VALUE1,
+ };
+ [EnableIfNot=yellow]
+ const double kMyConst = 1.23;
+ [EnableIf=green]
+ int32 a;
+ double b;
+ [EnableIfNot=purple]
+ int32 c;
+ [EnableIf=blue]
+ double d;
+ int32 e;
+ };
+ """
+ self.parseAndAssertEqual(struct_source, expected_source)
+
def testFilterUnion(self):
"""Test that UnionFields are correctly filtered from a Union."""
union_source = """
@@ -216,6 +317,25 @@ class ConditionalFeaturesTest(unittest.TestCase):
"""
self.parseAndAssertEqual(mojom_source, expected_source)
+ def testFeaturesWithEnableIf(self):
+ mojom_source = """
+ feature Foo {
+ const string name = "FooFeature";
+ [EnableIf=red]
+ const bool default_state = false;
+ [EnableIf=yellow]
+ const bool default_state = true;
+ };
+ """
+ expected_source = """
+ feature Foo {
+ const string name = "FooFeature";
+ [EnableIf=red]
+ const bool default_state = false;
+ };
+ """
+ self.parseAndAssertEqual(mojom_source, expected_source)
+
def testMultipleEnableIfs(self):
source = """
enum Foo {
@@ -228,6 +348,29 @@ class ConditionalFeaturesTest(unittest.TestCase):
conditional_features.RemoveDisabledDefinitions,
definition, ENABLED_FEATURES)
+ def testMultipleEnableIfs(self):
+ source = """
+ enum Foo {
+ [EnableIf=red,EnableIfNot=yellow]
+ kBarValue = 5,
+ };
+ """
+ definition = parser.Parse(source, "my_file.mojom")
+ self.assertRaises(conditional_features.EnableIfError,
+ conditional_features.RemoveDisabledDefinitions,
+ definition, ENABLED_FEATURES)
+
+ def testMultipleEnableIfs(self):
+ source = """
+ enum Foo {
+ [EnableIfNot=red,EnableIfNot=yellow]
+ kBarValue = 5,
+ };
+ """
+ definition = parser.Parse(source, "my_file.mojom")
+ self.assertRaises(conditional_features.EnableIfError,
+ conditional_features.RemoveDisabledDefinitions,
+ definition, ENABLED_FEATURES)
if __name__ == '__main__':
unittest.main()
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py
index 3e084bbf..00136a8b 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py
@@ -1,8 +1,7 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import imp
import os.path
import sys
@@ -22,7 +21,7 @@ class LexError(Error):
# We have methods which look like they could be functions:
# pylint: disable=R0201
-class Lexer(object):
+class Lexer:
def __init__(self, filename):
self.filename = filename
@@ -56,6 +55,7 @@ class Lexer(object):
'PENDING_RECEIVER',
'PENDING_ASSOCIATED_REMOTE',
'PENDING_ASSOCIATED_RECEIVER',
+ 'FEATURE',
)
keyword_map = {}
@@ -81,7 +81,6 @@ class Lexer(object):
# Operators
'MINUS',
'PLUS',
- 'AMP',
'QSTN',
# Assignment
@@ -168,7 +167,6 @@ class Lexer(object):
# Operators
t_MINUS = r'-'
t_PLUS = r'\+'
- t_AMP = r'&'
t_QSTN = r'\?'
# =
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py
index eadc6587..bc9f8354 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py
@@ -1,13 +1,12 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import imp
+import importlib.util
import os.path
import sys
import unittest
-
def _GetDirAbove(dirname):
"""Returns the directory "above" this file containing |dirname| (which must
also be "above" this file)."""
@@ -18,17 +17,15 @@ def _GetDirAbove(dirname):
if tail == dirname:
return path
-
sys.path.insert(1, os.path.join(_GetDirAbove("mojo"), "third_party"))
from ply import lex
try:
- imp.find_module("mojom")
+ importlib.util.find_spec("mojom")
except ImportError:
sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib"))
import mojom.parse.lexer
-
# This (monkey-patching LexToken to make comparison value-based) is evil, but
# we'll do it anyway. (I'm pretty sure ply's lexer never cares about comparing
# for object identity.)
@@ -146,7 +143,6 @@ class LexerTest(unittest.TestCase):
self._SingleTokenForInput("+"), _MakeLexToken("PLUS", "+"))
self.assertEquals(
self._SingleTokenForInput("-"), _MakeLexToken("MINUS", "-"))
- self.assertEquals(self._SingleTokenForInput("&"), _MakeLexToken("AMP", "&"))
self.assertEquals(
self._SingleTokenForInput("?"), _MakeLexToken("QSTN", "?"))
self.assertEquals(
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/parser.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/parser.py
index b3b803d6..1dffd98b 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/parse/parser.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/parser.py
@@ -1,8 +1,11 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates a syntax tree from a Mojo IDL file."""
+# Breaking parser stanzas is unhelpful so allow longer lines.
+# pylint: disable=line-too-long
+
import os.path
import sys
@@ -33,7 +36,7 @@ class ParseError(Error):
# We have methods which look like they could be functions:
# pylint: disable=R0201
-class Parser(object):
+class Parser:
def __init__(self, lexer, source, filename):
self.tokens = lexer.tokens
self.source = source
@@ -111,7 +114,8 @@ class Parser(object):
| union
| interface
| enum
- | const"""
+ | const
+ | feature"""
p[0] = p[1]
def p_attribute_section_1(self, p):
@@ -140,12 +144,19 @@ class Parser(object):
p[0].Append(p[3])
def p_attribute_1(self, p):
- """attribute : NAME EQUALS evaled_literal
- | NAME EQUALS NAME"""
- p[0] = ast.Attribute(p[1], p[3], filename=self.filename, lineno=p.lineno(1))
+ """attribute : name_wrapped EQUALS identifier_wrapped"""
+ p[0] = ast.Attribute(p[1],
+ p[3][1],
+ filename=self.filename,
+ lineno=p.lineno(1))
def p_attribute_2(self, p):
- """attribute : NAME"""
+ """attribute : name_wrapped EQUALS evaled_literal
+ | name_wrapped EQUALS name_wrapped"""
+ p[0] = ast.Attribute(p[1], p[3], filename=self.filename, lineno=p.lineno(1))
+
+ def p_attribute_3(self, p):
+ """attribute : name_wrapped"""
p[0] = ast.Attribute(p[1], True, filename=self.filename, lineno=p.lineno(1))
def p_evaled_literal(self, p):
@@ -161,11 +172,11 @@ class Parser(object):
p[0] = eval(p[1])
def p_struct_1(self, p):
- """struct : attribute_section STRUCT NAME LBRACE struct_body RBRACE SEMI"""
+ """struct : attribute_section STRUCT name_wrapped LBRACE struct_body RBRACE SEMI"""
p[0] = ast.Struct(p[3], p[1], p[5])
def p_struct_2(self, p):
- """struct : attribute_section STRUCT NAME SEMI"""
+ """struct : attribute_section STRUCT name_wrapped SEMI"""
p[0] = ast.Struct(p[3], p[1], None)
def p_struct_body_1(self, p):
@@ -180,11 +191,24 @@ class Parser(object):
p[0].Append(p[2])
def p_struct_field(self, p):
- """struct_field : attribute_section typename NAME ordinal default SEMI"""
+ """struct_field : attribute_section typename name_wrapped ordinal default SEMI"""
p[0] = ast.StructField(p[3], p[1], p[4], p[2], p[5])
+ def p_feature(self, p):
+ """feature : attribute_section FEATURE NAME LBRACE feature_body RBRACE SEMI"""
+ p[0] = ast.Feature(p[3], p[1], p[5])
+
+ def p_feature_body_1(self, p):
+ """feature_body : """
+ p[0] = ast.FeatureBody()
+
+ def p_feature_body_2(self, p):
+ """feature_body : feature_body const"""
+ p[0] = p[1]
+ p[0].Append(p[2])
+
def p_union(self, p):
- """union : attribute_section UNION NAME LBRACE union_body RBRACE SEMI"""
+ """union : attribute_section UNION name_wrapped LBRACE union_body RBRACE SEMI"""
p[0] = ast.Union(p[3], p[1], p[5])
def p_union_body_1(self, p):
@@ -197,7 +221,7 @@ class Parser(object):
p[1].Append(p[2])
def p_union_field(self, p):
- """union_field : attribute_section typename NAME ordinal SEMI"""
+ """union_field : attribute_section typename name_wrapped ordinal SEMI"""
p[0] = ast.UnionField(p[3], p[1], p[4], p[2])
def p_default_1(self, p):
@@ -209,8 +233,7 @@ class Parser(object):
p[0] = p[2]
def p_interface(self, p):
- """interface : attribute_section INTERFACE NAME LBRACE interface_body \
- RBRACE SEMI"""
+ """interface : attribute_section INTERFACE name_wrapped LBRACE interface_body RBRACE SEMI"""
p[0] = ast.Interface(p[3], p[1], p[5])
def p_interface_body_1(self, p):
@@ -233,8 +256,7 @@ class Parser(object):
p[0] = p[3]
def p_method(self, p):
- """method : attribute_section NAME ordinal LPAREN parameter_list RPAREN \
- response SEMI"""
+ """method : attribute_section name_wrapped ordinal LPAREN parameter_list RPAREN response SEMI"""
p[0] = ast.Method(p[2], p[1], p[3], p[5], p[7])
def p_parameter_list_1(self, p):
@@ -255,7 +277,7 @@ class Parser(object):
p[0].Append(p[3])
def p_parameter(self, p):
- """parameter : attribute_section typename NAME ordinal"""
+ """parameter : attribute_section typename name_wrapped ordinal"""
p[0] = ast.Parameter(
p[3], p[1], p[4], p[2], filename=self.filename, lineno=p.lineno(3))
@@ -271,8 +293,7 @@ class Parser(object):
"""nonnullable_typename : basictypename
| array
| fixed_array
- | associative_array
- | interfacerequest"""
+ | associative_array"""
p[0] = p[1]
def p_basictypename(self, p):
@@ -297,18 +318,16 @@ class Parser(object):
p[0] = "rcv<%s>" % p[3]
def p_associatedremotetype(self, p):
- """associatedremotetype : PENDING_ASSOCIATED_REMOTE LANGLE identifier \
- RANGLE"""
+ """associatedremotetype : PENDING_ASSOCIATED_REMOTE LANGLE identifier RANGLE"""
p[0] = "rma<%s>" % p[3]
def p_associatedreceivertype(self, p):
- """associatedreceivertype : PENDING_ASSOCIATED_RECEIVER LANGLE identifier \
- RANGLE"""
+ """associatedreceivertype : PENDING_ASSOCIATED_RECEIVER LANGLE identifier RANGLE"""
p[0] = "rca<%s>" % p[3]
def p_handletype(self, p):
"""handletype : HANDLE
- | HANDLE LANGLE NAME RANGLE"""
+ | HANDLE LANGLE name_wrapped RANGLE"""
if len(p) == 2:
p[0] = p[1]
else:
@@ -342,14 +361,6 @@ class Parser(object):
"""associative_array : MAP LANGLE identifier COMMA typename RANGLE"""
p[0] = p[5] + "{" + p[3] + "}"
- def p_interfacerequest(self, p):
- """interfacerequest : identifier AMP
- | ASSOCIATED identifier AMP"""
- if len(p) == 3:
- p[0] = p[1] + "&"
- else:
- p[0] = "asso<" + p[2] + "&>"
-
def p_ordinal_1(self, p):
"""ordinal : """
p[0] = None
@@ -366,15 +377,14 @@ class Parser(object):
p[0] = ast.Ordinal(value, filename=self.filename, lineno=p.lineno(1))
def p_enum_1(self, p):
- """enum : attribute_section ENUM NAME LBRACE enum_value_list \
- RBRACE SEMI
- | attribute_section ENUM NAME LBRACE nonempty_enum_value_list \
- COMMA RBRACE SEMI"""
+ """enum : attribute_section ENUM name_wrapped LBRACE enum_value_list RBRACE SEMI
+ | attribute_section ENUM name_wrapped LBRACE \
+ nonempty_enum_value_list COMMA RBRACE SEMI"""
p[0] = ast.Enum(
p[3], p[1], p[5], filename=self.filename, lineno=p.lineno(2))
def p_enum_2(self, p):
- """enum : attribute_section ENUM NAME SEMI"""
+ """enum : attribute_section ENUM name_wrapped SEMI"""
p[0] = ast.Enum(
p[3], p[1], None, filename=self.filename, lineno=p.lineno(2))
@@ -396,9 +406,9 @@ class Parser(object):
p[0].Append(p[3])
def p_enum_value(self, p):
- """enum_value : attribute_section NAME
- | attribute_section NAME EQUALS int
- | attribute_section NAME EQUALS identifier_wrapped"""
+ """enum_value : attribute_section name_wrapped
+ | attribute_section name_wrapped EQUALS int
+ | attribute_section name_wrapped EQUALS identifier_wrapped"""
p[0] = ast.EnumValue(
p[2],
p[1],
@@ -407,7 +417,7 @@ class Parser(object):
lineno=p.lineno(2))
def p_const(self, p):
- """const : attribute_section CONST typename NAME EQUALS constant SEMI"""
+ """const : attribute_section CONST typename name_wrapped EQUALS constant SEMI"""
p[0] = ast.Const(p[4], p[1], p[3], p[6])
def p_constant(self, p):
@@ -422,10 +432,16 @@ class Parser(object):
# TODO(vtl): Make this produce a "wrapped" identifier (probably as an
# |ast.Identifier|, to be added) and get rid of identifier_wrapped.
def p_identifier(self, p):
- """identifier : NAME
- | NAME DOT identifier"""
+ """identifier : name_wrapped
+ | name_wrapped DOT identifier"""
p[0] = ''.join(p[1:])
+ # Allow 'feature' to be a name literal not just a keyword.
+ def p_name_wrapped(self, p):
+ """name_wrapped : NAME
+ | FEATURE"""
+ p[0] = p[1]
+
def p_literal(self, p):
"""literal : int
| float
@@ -458,6 +474,12 @@ class Parser(object):
# TODO(vtl): Can we figure out what's missing?
raise ParseError(self.filename, "Unexpected end of file")
+ if e.value == 'feature':
+ raise ParseError(self.filename,
+ "`feature` is reserved for a future mojom keyword",
+ lineno=e.lineno,
+ snippet=self._GetSnippet(e.lineno))
+
raise ParseError(
self.filename,
"Unexpected %r:" % e.value,
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/parser_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/parser_unittest.py
index 6d6b7153..0a26307b 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom/parse/parser_unittest.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/parser_unittest.py
@@ -1,17 +1,13 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
+# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import imp
-import os.path
-import sys
import unittest
from mojom.parse import ast
from mojom.parse import lexer
from mojom.parse import parser
-
class ParserTest(unittest.TestCase):
"""Tests |parser.Parse()|."""
@@ -1086,7 +1082,7 @@ class ParserTest(unittest.TestCase):
handle<data_pipe_producer>? k;
handle<message_pipe>? l;
handle<shared_buffer>? m;
- some_interface&? n;
+ pending_receiver<some_interface>? n;
handle<platform>? o;
};
"""
@@ -1110,7 +1106,7 @@ class ParserTest(unittest.TestCase):
ast.StructField('l', None, None, 'handle<message_pipe>?', None),
ast.StructField('m', None, None, 'handle<shared_buffer>?',
None),
- ast.StructField('n', None, None, 'some_interface&?', None),
+ ast.StructField('n', None, None, 'rcv<some_interface>?', None),
ast.StructField('o', None, None, 'handle<platform>?', None)
]))
])
@@ -1138,16 +1134,6 @@ class ParserTest(unittest.TestCase):
r" *handle\?<data_pipe_consumer> a;$"):
parser.Parse(source2, "my_file.mojom")
- source3 = """\
- struct MyStruct {
- some_interface?& a;
- };
- """
- with self.assertRaisesRegexp(
- parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected '&':\n"
- r" *some_interface\?& a;$"):
- parser.Parse(source3, "my_file.mojom")
-
def testSimpleUnion(self):
"""Tests a simple .mojom source that just defines a union."""
source = """\
@@ -1317,9 +1303,9 @@ class ParserTest(unittest.TestCase):
source1 = """\
struct MyStruct {
associated MyInterface a;
- associated MyInterface& b;
+ pending_associated_receiver<MyInterface> b;
associated MyInterface? c;
- associated MyInterface&? d;
+ pending_associated_receiver<MyInterface>? d;
};
"""
expected1 = ast.Mojom(None, ast.ImportList(), [
@@ -1327,16 +1313,16 @@ class ParserTest(unittest.TestCase):
'MyStruct', None,
ast.StructBody([
ast.StructField('a', None, None, 'asso<MyInterface>', None),
- ast.StructField('b', None, None, 'asso<MyInterface&>', None),
+ ast.StructField('b', None, None, 'rca<MyInterface>', None),
ast.StructField('c', None, None, 'asso<MyInterface>?', None),
- ast.StructField('d', None, None, 'asso<MyInterface&>?', None)
+ ast.StructField('d', None, None, 'rca<MyInterface>?', None)
]))
])
self.assertEquals(parser.Parse(source1, "my_file.mojom"), expected1)
source2 = """\
interface MyInterface {
- MyMethod(associated A a) =>(associated B& b);
+ MyMethod(associated A a) =>(pending_associated_receiver<B> b);
};"""
expected2 = ast.Mojom(None, ast.ImportList(), [
ast.Interface(
@@ -1344,10 +1330,10 @@ class ParserTest(unittest.TestCase):
ast.InterfaceBody(
ast.Method(
'MyMethod', None, None,
- ast.ParameterList(
- ast.Parameter('a', None, None, 'asso<A>')),
- ast.ParameterList(
- ast.Parameter('b', None, None, 'asso<B&>')))))
+ ast.ParameterList(ast.Parameter('a', None, None,
+ 'asso<A>')),
+ ast.ParameterList(ast.Parameter('b', None, None,
+ 'rca<B>')))))
])
self.assertEquals(parser.Parse(source2, "my_file.mojom"), expected2)
@@ -1385,6 +1371,5 @@ class ParserTest(unittest.TestCase):
r" *associated\? MyInterface& a;$"):
parser.Parse(source3, "my_file.mojom")
-
if __name__ == "__main__":
unittest.main()
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom_parser.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser.py
index eb90c825..9693090e 100755
--- a/utils/ipc/mojo/public/tools/mojom/mojom_parser.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser.py
@@ -1,5 +1,5 @@
-#!/usr/bin/env python
-# Copyright 2020 The Chromium Authors. All rights reserved.
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Parses mojom IDL files.
@@ -11,6 +11,7 @@ generate usable language bindings.
"""
import argparse
+import builtins
import codecs
import errno
import json
@@ -19,6 +20,7 @@ import multiprocessing
import os
import os.path
import sys
+import traceback
from collections import defaultdict
from mojom.generate import module
@@ -28,16 +30,12 @@ from mojom.parse import conditional_features
# Disable this for easier debugging.
-# In Python 2, subprocesses just hang when exceptions are thrown :(.
-_ENABLE_MULTIPROCESSING = sys.version_info[0] > 2
+_ENABLE_MULTIPROCESSING = True
-if sys.version_info < (3, 4):
- _MULTIPROCESSING_USES_FORK = sys.platform.startswith('linux')
-else:
- # https://docs.python.org/3/library/multiprocessing.html#:~:text=bpo-33725
- if __name__ == '__main__' and sys.platform == 'darwin':
- multiprocessing.set_start_method('fork')
- _MULTIPROCESSING_USES_FORK = multiprocessing.get_start_method() == 'fork'
+# https://docs.python.org/3/library/multiprocessing.html#:~:text=bpo-33725
+if __name__ == '__main__' and sys.platform == 'darwin':
+ multiprocessing.set_start_method('fork')
+_MULTIPROCESSING_USES_FORK = multiprocessing.get_start_method() == 'fork'
def _ResolveRelativeImportPath(path, roots):
@@ -63,7 +61,7 @@ def _ResolveRelativeImportPath(path, roots):
raise ValueError('"%s" does not exist in any of %s' % (path, roots))
-def _RebaseAbsolutePath(path, roots):
+def RebaseAbsolutePath(path, roots):
"""Rewrites an absolute file path as relative to an absolute directory path in
roots.
@@ -139,7 +137,7 @@ def _EnsureInputLoaded(mojom_abspath, module_path, abs_paths, asts,
# Already done.
return
- for dep_abspath, dep_path in dependencies[mojom_abspath]:
+ for dep_abspath, dep_path in sorted(dependencies[mojom_abspath]):
if dep_abspath not in loaded_modules:
_EnsureInputLoaded(dep_abspath, dep_path, abs_paths, asts, dependencies,
loaded_modules, module_metadata)
@@ -159,11 +157,19 @@ def _CollectAllowedImportsFromBuildMetadata(build_metadata_filename):
def collect(metadata_filename):
processed_deps.add(metadata_filename)
+
+ # Paths in the metadata file are relative to the metadata file's dir.
+ metadata_dir = os.path.abspath(os.path.dirname(metadata_filename))
+
+ def to_abs(s):
+ return os.path.normpath(os.path.join(metadata_dir, s))
+
with open(metadata_filename) as f:
metadata = json.load(f)
allowed_imports.update(
- map(os.path.normcase, map(os.path.normpath, metadata['sources'])))
+ [os.path.normcase(to_abs(s)) for s in metadata['sources']])
for dep_metadata in metadata['deps']:
+ dep_metadata = to_abs(dep_metadata)
if dep_metadata not in processed_deps:
collect(dep_metadata)
@@ -172,8 +178,7 @@ def _CollectAllowedImportsFromBuildMetadata(build_metadata_filename):
# multiprocessing helper.
-def _ParseAstHelper(args):
- mojom_abspath, enabled_features = args
+def _ParseAstHelper(mojom_abspath, enabled_features):
with codecs.open(mojom_abspath, encoding='utf-8') as f:
ast = parser.Parse(f.read(), mojom_abspath)
conditional_features.RemoveDisabledDefinitions(ast, enabled_features)
@@ -181,8 +186,7 @@ def _ParseAstHelper(args):
# multiprocessing helper.
-def _SerializeHelper(args):
- mojom_abspath, mojom_path = args
+def _SerializeHelper(mojom_abspath, mojom_path):
module_path = os.path.join(_SerializeHelper.output_root_path,
_GetModuleFilename(mojom_path))
module_dir = os.path.dirname(module_path)
@@ -199,12 +203,33 @@ def _SerializeHelper(args):
_SerializeHelper.loaded_modules[mojom_abspath].Dump(f)
-def _Shard(target_func, args, processes=None):
- args = list(args)
+class _ExceptionWrapper:
+ def __init__(self):
+ # Do not capture exception object to ensure pickling works.
+ self.formatted_trace = traceback.format_exc()
+
+
+class _FuncWrapper:
+ """Marshals exceptions and spreads args."""
+
+ def __init__(self, func):
+ self._func = func
+
+ def __call__(self, args):
+ # multiprocessing does not gracefully handle excptions.
+ # https://crbug.com/1219044
+ try:
+ return self._func(*args)
+ except: # pylint: disable=bare-except
+ return _ExceptionWrapper()
+
+
+def _Shard(target_func, arg_list, processes=None):
+ arg_list = list(arg_list)
if processes is None:
processes = multiprocessing.cpu_count()
# Seems optimal to have each process perform at least 2 tasks.
- processes = min(processes, len(args) // 2)
+ processes = min(processes, len(arg_list) // 2)
if sys.platform == 'win32':
# TODO(crbug.com/1190269) - we can't use more than 56
@@ -213,13 +238,17 @@ def _Shard(target_func, args, processes=None):
# Don't spin up processes unless there is enough work to merit doing so.
if not _ENABLE_MULTIPROCESSING or processes < 2:
- for result in map(target_func, args):
- yield result
+ for arg_tuple in arg_list:
+ yield target_func(*arg_tuple)
return
pool = multiprocessing.Pool(processes=processes)
try:
- for result in pool.imap_unordered(target_func, args):
+ wrapped_func = _FuncWrapper(target_func)
+ for result in pool.imap_unordered(wrapped_func, arg_list):
+ if isinstance(result, _ExceptionWrapper):
+ sys.stderr.write(result.formatted_trace)
+ sys.exit(1)
yield result
finally:
pool.close()
@@ -230,6 +259,7 @@ def _Shard(target_func, args, processes=None):
def _ParseMojoms(mojom_files,
input_root_paths,
output_root_path,
+ module_root_paths,
enabled_features,
module_metadata,
allowed_imports=None):
@@ -245,8 +275,10 @@ def _ParseMojoms(mojom_files,
are based on the mojom's relative path, rebased onto this path.
Additionally, the script expects this root to contain already-generated
modules for any transitive dependencies not listed in mojom_files.
+ module_root_paths: A list of absolute filesystem paths which contain
+ already-generated modules for any non-transitive dependencies.
enabled_features: A list of enabled feature names, controlling which AST
- nodes are filtered by [EnableIf] attributes.
+ nodes are filtered by [EnableIf] or [EnableIfNot] attributes.
module_metadata: A list of 2-tuples representing metadata key-value pairs to
attach to each compiled module output.
@@ -262,7 +294,7 @@ def _ParseMojoms(mojom_files,
loaded_modules = {}
input_dependencies = defaultdict(set)
mojom_files_to_parse = dict((os.path.normcase(abs_path),
- _RebaseAbsolutePath(abs_path, input_root_paths))
+ RebaseAbsolutePath(abs_path, input_root_paths))
for abs_path in mojom_files)
abs_paths = dict(
(path, abs_path) for abs_path, path in mojom_files_to_parse.items())
@@ -274,7 +306,7 @@ def _ParseMojoms(mojom_files,
loaded_mojom_asts[mojom_abspath] = ast
logging.info('Processing dependencies')
- for mojom_abspath, ast in loaded_mojom_asts.items():
+ for mojom_abspath, ast in sorted(loaded_mojom_asts.items()):
invalid_imports = []
for imp in ast.import_list:
import_abspath = _ResolveRelativeImportPath(imp.import_filename,
@@ -295,8 +327,8 @@ def _ParseMojoms(mojom_files,
# be parsed and have a module file sitting in a corresponding output
# location.
module_path = _GetModuleFilename(imp.import_filename)
- module_abspath = _ResolveRelativeImportPath(module_path,
- [output_root_path])
+ module_abspath = _ResolveRelativeImportPath(
+ module_path, module_root_paths + [output_root_path])
with open(module_abspath, 'rb') as module_file:
loaded_modules[import_abspath] = module.Module.Load(module_file)
@@ -371,6 +403,15 @@ already present in the provided output root.""")
'ROOT is also searched for existing modules of any transitive imports '
'which were not included in the set of inputs.')
arg_parser.add_argument(
+ '--module-root',
+ default=[],
+ action='append',
+ metavar='ROOT',
+ dest='module_root_paths',
+ help='Adds ROOT to the set of root paths to search for existing modules '
+ 'of non-transitive imports. Provided root paths are always searched in '
+ 'order from longest absolute path to shortest.')
+ arg_parser.add_argument(
'--mojoms',
nargs='+',
dest='mojom_files',
@@ -396,9 +437,9 @@ already present in the provided output root.""")
help='Enables a named feature when parsing the given mojoms. Features '
'are identified by arbitrary string values. Specifying this flag with a '
'given FEATURE name will cause the parser to process any syntax elements '
- 'tagged with an [EnableIf=FEATURE] attribute. If this flag is not '
- 'provided for a given FEATURE, such tagged elements are discarded by the '
- 'parser and will not be present in the compiled output.')
+ 'tagged with an [EnableIf=FEATURE] or [EnableIfNot] attribute. If this '
+ 'flag is not provided for a given FEATURE, such tagged elements are '
+ 'discarded by the parser and will not be present in the compiled output.')
arg_parser.add_argument(
'--check-imports',
dest='build_metadata_filename',
@@ -436,6 +477,7 @@ already present in the provided output root.""")
mojom_files = list(map(os.path.abspath, args.mojom_files))
input_roots = list(map(os.path.abspath, args.input_root_paths))
output_root = os.path.abspath(args.output_root_path)
+ module_roots = list(map(os.path.abspath, args.module_root_paths))
if args.build_metadata_filename:
allowed_imports = _CollectAllowedImportsFromBuildMetadata(
@@ -445,13 +487,16 @@ already present in the provided output root.""")
module_metadata = list(
map(lambda kvp: tuple(kvp.split('=')), args.module_metadata))
- _ParseMojoms(mojom_files, input_roots, output_root, args.enabled_features,
- module_metadata, allowed_imports)
+ _ParseMojoms(mojom_files, input_roots, output_root, module_roots,
+ args.enabled_features, module_metadata, allowed_imports)
logging.info('Finished')
- # Exit without running GC, which can save multiple seconds due the large
- # number of object created.
- os._exit(0)
if __name__ == '__main__':
Run(sys.argv[1:])
+ # Exit without running GC, which can save multiple seconds due to the large
+ # number of object created. But flush is necessary as os._exit doesn't do
+ # that.
+ sys.stdout.flush()
+ sys.stderr.flush()
+ os._exit(0)
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom_parser_test_case.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser_test_case.py
index e213fbfa..f0ee6966 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom_parser_test_case.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser_test_case.py
@@ -1,4 +1,4 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -20,7 +20,7 @@ class MojomParserTestCase(unittest.TestCase):
resolution, and module serialization and deserialization."""
def __init__(self, method_name):
- super(MojomParserTestCase, self).__init__(method_name)
+ super().__init__(method_name)
self._temp_dir = None
def setUp(self):
@@ -67,7 +67,7 @@ class MojomParserTestCase(unittest.TestCase):
self.ParseMojoms([filename])
m = self.LoadModule(filename)
definitions = {}
- for kinds in (m.enums, m.structs, m.unions, m.interfaces):
+ for kinds in (m.enums, m.structs, m.unions, m.interfaces, m.features):
for kind in kinds:
definitions[kind.mojom_name] = kind
return definitions
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom_parser_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser_unittest.py
index a93f34ba..353a2b6e 100644
--- a/utils/ipc/mojo/public/tools/mojom/mojom_parser_unittest.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser_unittest.py
@@ -1,7 +1,9 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import json
+
from mojom_parser_test_case import MojomParserTestCase
@@ -119,15 +121,22 @@ class MojomParserTest(MojomParserTestCase):
c = 'c.mojom'
c_metadata = 'out/c.build_metadata'
self.WriteFile(a_metadata,
- '{"sources": ["%s"], "deps": []}\n' % self.GetPath(a))
+ json.dumps({
+ "sources": [self.GetPath(a)],
+ "deps": []
+ }))
self.WriteFile(
b_metadata,
- '{"sources": ["%s"], "deps": ["%s"]}\n' % (self.GetPath(b),
- self.GetPath(a_metadata)))
+ json.dumps({
+ "sources": [self.GetPath(b)],
+ "deps": [self.GetPath(a_metadata)]
+ }))
self.WriteFile(
c_metadata,
- '{"sources": ["%s"], "deps": ["%s"]}\n' % (self.GetPath(c),
- self.GetPath(b_metadata)))
+ json.dumps({
+ "sources": [self.GetPath(c)],
+ "deps": [self.GetPath(b_metadata)]
+ }))
self.WriteFile(a, """\
module a;
struct Bar {};""")
@@ -154,9 +163,15 @@ class MojomParserTest(MojomParserTestCase):
b = 'b.mojom'
b_metadata = 'out/b.build_metadata'
self.WriteFile(a_metadata,
- '{"sources": ["%s"], "deps": []}\n' % self.GetPath(a))
+ json.dumps({
+ "sources": [self.GetPath(a)],
+ "deps": []
+ }))
self.WriteFile(b_metadata,
- '{"sources": ["%s"], "deps": []}\n' % self.GetPath(b))
+ json.dumps({
+ "sources": [self.GetPath(b)],
+ "deps": []
+ }))
self.WriteFile(a, """\
module a;
struct Bar {};""")
diff --git a/utils/ipc/mojo/public/tools/mojom/stable_attribute_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/stable_attribute_unittest.py
index d45ec586..d10d69c6 100644
--- a/utils/ipc/mojo/public/tools/mojom/stable_attribute_unittest.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/stable_attribute_unittest.py
@@ -1,4 +1,4 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/utils/codegen/ipc/mojo/public/tools/mojom/union_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/union_unittest.py
new file mode 100644
index 00000000..6b2525e5
--- /dev/null
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/union_unittest.py
@@ -0,0 +1,44 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from mojom_parser_test_case import MojomParserTestCase
+
+
+class UnionTest(MojomParserTestCase):
+ """Tests union parsing behavior."""
+
+ def testExtensibleMustHaveDefault(self):
+ """Verifies that extensible unions must have a default field."""
+ mojom = 'foo.mojom'
+ self.WriteFile(mojom, 'module foo; [Extensible] union U { bool x; };')
+ with self.assertRaisesRegexp(Exception, 'must specify a \[Default\]'):
+ self.ParseMojoms([mojom])
+
+ def testExtensibleSingleDefault(self):
+ """Verifies that extensible unions must not have multiple default fields."""
+ mojom = 'foo.mojom'
+ self.WriteFile(
+ mojom, """\
+ module foo;
+ [Extensible] union U {
+ [Default] bool x;
+ [Default] bool y;
+ };
+ """)
+ with self.assertRaisesRegexp(Exception, 'Multiple \[Default\] fields'):
+ self.ParseMojoms([mojom])
+
+ def testExtensibleDefaultTypeValid(self):
+ """Verifies that an extensible union's default field must be nullable or
+ integral type."""
+ mojom = 'foo.mojom'
+ self.WriteFile(
+ mojom, """\
+ module foo;
+ [Extensible] union U {
+ [Default] handle<message_pipe> p;
+ };
+ """)
+ with self.assertRaisesRegexp(Exception, 'must be nullable or integral'):
+ self.ParseMojoms([mojom])
diff --git a/utils/ipc/mojo/public/tools/mojom/version_compatibility_unittest.py b/utils/codegen/ipc/mojo/public/tools/mojom/version_compatibility_unittest.py
index 65db4dc9..45e45ec5 100644
--- a/utils/ipc/mojo/public/tools/mojom/version_compatibility_unittest.py
+++ b/utils/codegen/ipc/mojo/public/tools/mojom/version_compatibility_unittest.py
@@ -1,4 +1,4 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -23,9 +23,12 @@ class VersionCompatibilityTest(MojomParserTestCase):
checker = module.BackwardCompatibilityChecker()
compatibility_map = {}
- for name in old.keys():
- compatibility_map[name] = checker.IsBackwardCompatible(
- new[name], old[name])
+ for name in old:
+ try:
+ compatibility_map[name] = checker.IsBackwardCompatible(
+ new[name], old[name])
+ except Exception:
+ compatibility_map[name] = False
return compatibility_map
def assertBackwardCompatible(self, old_mojom, new_mojom):
@@ -60,40 +63,48 @@ class VersionCompatibilityTest(MojomParserTestCase):
"""Adding a value to an existing version is not allowed, even if the old
enum was marked [Extensible]. Note that it is irrelevant whether or not the
new enum is marked [Extensible]."""
- self.assertNotBackwardCompatible('[Extensible] enum E { kFoo, kBar };',
- 'enum E { kFoo, kBar, kBaz };')
self.assertNotBackwardCompatible(
- '[Extensible] enum E { kFoo, kBar };',
- '[Extensible] enum E { kFoo, kBar, kBaz };')
+ '[Extensible] enum E { [Default] kFoo, kBar };',
+ 'enum E { kFoo, kBar, kBaz };')
+ self.assertNotBackwardCompatible(
+ '[Extensible] enum E { [Default] kFoo, kBar };',
+ '[Extensible] enum E { [Default] kFoo, kBar, kBaz };')
self.assertNotBackwardCompatible(
- '[Extensible] enum E { kFoo, [MinVersion=1] kBar };',
+ '[Extensible] enum E { [Default] kFoo, [MinVersion=1] kBar };',
'enum E { kFoo, [MinVersion=1] kBar, [MinVersion=1] kBaz };')
def testEnumValueRemoval(self):
"""Removal of an enum value is never valid even for [Extensible] enums."""
self.assertNotBackwardCompatible('enum E { kFoo, kBar };',
'enum E { kFoo };')
- self.assertNotBackwardCompatible('[Extensible] enum E { kFoo, kBar };',
- '[Extensible] enum E { kFoo };')
self.assertNotBackwardCompatible(
- '[Extensible] enum E { kA, [MinVersion=1] kB };',
- '[Extensible] enum E { kA, };')
+ '[Extensible] enum E { [Default] kFoo, kBar };',
+ '[Extensible] enum E { [Default] kFoo };')
+ self.assertNotBackwardCompatible(
+ '[Extensible] enum E { [Default] kA, [MinVersion=1] kB };',
+ '[Extensible] enum E { [Default] kA, };')
self.assertNotBackwardCompatible(
- '[Extensible] enum E { kA, [MinVersion=1] kB, [MinVersion=1] kZ };',
- '[Extensible] enum E { kA, [MinVersion=1] kB };')
+ """[Extensible] enum E {
+ [Default] kA,
+ [MinVersion=1] kB,
+ [MinVersion=1] kZ };""",
+ '[Extensible] enum E { [Default] kA, [MinVersion=1] kB };')
def testNewExtensibleEnumValueWithMinVersion(self):
"""Adding a new and properly [MinVersion]'d value to an [Extensible] enum
is a backward-compatible change. Note that it is irrelevant whether or not
the new enum is marked [Extensible]."""
- self.assertBackwardCompatible('[Extensible] enum E { kA, kB };',
+ self.assertBackwardCompatible('[Extensible] enum E { [Default] kA, kB };',
'enum E { kA, kB, [MinVersion=1] kC };')
self.assertBackwardCompatible(
- '[Extensible] enum E { kA, kB };',
- '[Extensible] enum E { kA, kB, [MinVersion=1] kC };')
+ '[Extensible] enum E { [Default] kA, kB };',
+ '[Extensible] enum E { [Default] kA, kB, [MinVersion=1] kC };')
self.assertBackwardCompatible(
- '[Extensible] enum E { kA, [MinVersion=1] kB };',
- '[Extensible] enum E { kA, [MinVersion=1] kB, [MinVersion=2] kC };')
+ '[Extensible] enum E { [Default] kA, [MinVersion=1] kB };',
+ """[Extensible] enum E {
+ [Default] kA,
+ [MinVersion=1] kB,
+ [MinVersion=2] kC };""")
def testRenameEnumValue(self):
"""Renaming an enum value does not affect backward-compatibility. Only
@@ -161,14 +172,17 @@ class VersionCompatibilityTest(MojomParserTestCase):
'struct S {}; struct T { S s; };',
'struct S { [MinVersion=1] int32 x; }; struct T { S s; };')
self.assertBackwardCompatible(
- '[Extensible] enum E { kA }; struct S { E e; };',
- '[Extensible] enum E { kA, [MinVersion=1] kB }; struct S { E e; };')
+ '[Extensible] enum E { [Default] kA }; struct S { E e; };',
+ """[Extensible] enum E {
+ [Default] kA,
+ [MinVersion=1] kB };
+ struct S { E e; };""")
self.assertNotBackwardCompatible(
'struct S {}; struct T { S s; };',
'struct S { int32 x; }; struct T { S s; };')
self.assertNotBackwardCompatible(
- '[Extensible] enum E { kA }; struct S { E e; };',
- '[Extensible] enum E { kA, kB }; struct S { E e; };')
+ '[Extensible] enum E { [Default] kA }; struct S { E e; };',
+ '[Extensible] enum E { [Default] kA, kB }; struct S { E e; };')
def testNewStructFieldWithInvalidMinVersion(self):
"""Adding a new field using an existing MinVersion breaks backward-
@@ -305,14 +319,17 @@ class VersionCompatibilityTest(MojomParserTestCase):
'struct S {}; union U { S s; };',
'struct S { [MinVersion=1] int32 x; }; union U { S s; };')
self.assertBackwardCompatible(
- '[Extensible] enum E { kA }; union U { E e; };',
- '[Extensible] enum E { kA, [MinVersion=1] kB }; union U { E e; };')
+ '[Extensible] enum E { [Default] kA }; union U { E e; };',
+ """[Extensible] enum E {
+ [Default] kA,
+ [MinVersion=1] kB };
+ union U { E e; };""")
self.assertNotBackwardCompatible(
'struct S {}; union U { S s; };',
'struct S { int32 x; }; union U { S s; };')
self.assertNotBackwardCompatible(
- '[Extensible] enum E { kA }; union U { E e; };',
- '[Extensible] enum E { kA, kB }; union U { E e; };')
+ '[Extensible] enum E { [Default] kA }; union U { E e; };',
+ '[Extensible] enum E { [Default] kA, kB }; union U { E e; };')
def testNewUnionFieldWithInvalidMinVersion(self):
"""Adding a new field using an existing MinVersion breaks backward-
diff --git a/utils/ipc/mojo/public/tools/run_all_python_unittests.py b/utils/codegen/ipc/mojo/public/tools/run_all_python_unittests.py
index b2010958..98bce18c 100755
--- a/utils/ipc/mojo/public/tools/run_all_python_unittests.py
+++ b/utils/codegen/ipc/mojo/public/tools/run_all_python_unittests.py
@@ -1,5 +1,5 @@
-#!/usr/bin/env python
-# Copyright 2020 The Chromium Authors. All rights reserved.
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -8,11 +8,13 @@ import sys
_TOOLS_DIR = os.path.dirname(__file__)
_MOJOM_DIR = os.path.join(_TOOLS_DIR, 'mojom')
+_BINDINGS_DIR = os.path.join(_TOOLS_DIR, 'bindings')
_SRC_DIR = os.path.join(_TOOLS_DIR, os.path.pardir, os.path.pardir,
os.path.pardir)
# Ensure that the mojom library is discoverable.
sys.path.append(_MOJOM_DIR)
+sys.path.append(_BINDINGS_DIR)
# Help Python find typ in //third_party/catapult/third_party/typ/
sys.path.append(
@@ -21,7 +23,7 @@ import typ
def Main():
- return typ.main(top_level_dir=_MOJOM_DIR)
+ return typ.main(top_level_dirs=[_MOJOM_DIR, _BINDINGS_DIR])
if __name__ == '__main__':
diff --git a/utils/ipc/parser.py b/utils/codegen/ipc/parser.py
index 231a3266..8e70322d 100755
--- a/utils/ipc/parser.py
+++ b/utils/codegen/ipc/parser.py
@@ -4,14 +4,11 @@
#
# Author: Paul Elder <paul.elder@ideasonboard.com>
#
-# parser.py - Run mojo parser with python3
+# Run mojo parser with python3
import os
import sys
-# TODO set sys.pycache_prefix for >= python3.8
-sys.dont_write_bytecode = True
-
# Make sure that mojom_parser.py can import mojom
sys.path.insert(0, f'{os.path.dirname(__file__)}/mojo/public/tools/mojom')
diff --git a/utils/ipc/tools/README b/utils/codegen/ipc/tools/README
index d5c24fc3..961cabd2 100644
--- a/utils/ipc/tools/README
+++ b/utils/codegen/ipc/tools/README
@@ -1,4 +1,4 @@
# SPDX-License-Identifier: CC0-1.0
-Files in this directory are imported from 9c138d992bfc of Chromium. Do not
+Files in this directory are imported from 9be4263648d7 of Chromium. Do not
modify them manually.
diff --git a/utils/ipc/tools/diagnosis/crbug_1001171.py b/utils/codegen/ipc/tools/diagnosis/crbug_1001171.py
index 478fb8c1..40900d10 100644
--- a/utils/ipc/tools/diagnosis/crbug_1001171.py
+++ b/utils/codegen/ipc/tools/diagnosis/crbug_1001171.py
@@ -1,4 +1,4 @@
-# Copyright 2019 The Chromium Authors. All rights reserved.
+# Copyright 2019 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/utils/codegen/meson.build b/utils/codegen/meson.build
new file mode 100644
index 00000000..8d1c6908
--- /dev/null
+++ b/utils/codegen/meson.build
@@ -0,0 +1,21 @@
+# SPDX-License-Identifier: CC0-1.0
+
+## Code generation
+
+py_build_env = environment()
+# \todo Investigate usage of PYTHONPYCACHEPREFIX for Python >= 3.8
+py_build_env.set('PYTHONDONTWRITEBYTECODE', '1')
+py_build_env.prepend('PYTHONPATH', meson.current_source_dir())
+
+py_modules += ['jinja2', 'yaml']
+
+gen_controls = files('gen-controls.py')
+gen_formats = files('gen-formats.py')
+gen_gst_controls = files('gen-gst-controls.py')
+gen_header = files('gen-header.sh')
+gen_ipa_pub_key = files('gen-ipa-pub-key.py')
+gen_tracepoints = files('gen-tp-header.py')
+
+py_mod_controls = files('controls.py')
+
+subdir('ipc')
diff --git a/utils/gen-controls.py b/utils/gen-controls.py
deleted file mode 100755
index 1075ae30..00000000
--- a/utils/gen-controls.py
+++ /dev/null
@@ -1,313 +0,0 @@
-#!/usr/bin/env python3
-# SPDX-License-Identifier: GPL-2.0-or-later
-# Copyright (C) 2019, Google Inc.
-#
-# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
-#
-# gen-controls.py - Generate control definitions from YAML
-
-import argparse
-from functools import reduce
-import operator
-import string
-import sys
-import yaml
-
-
-class ControlEnum(object):
- def __init__(self, data):
- self.__data = data
-
- @property
- def description(self):
- """The enum description"""
- return self.__data.get('description')
-
- @property
- def name(self):
- """The enum name"""
- return self.__data.get('name')
-
- @property
- def value(self):
- """The enum value"""
- return self.__data.get('value')
-
-
-class Control(object):
- def __init__(self, name, data):
- self.__name = name
- self.__data = data
- self.__enum_values = None
- self.__size = None
-
- enum_values = data.get('enum')
- if enum_values is not None:
- self.__enum_values = [ControlEnum(enum) for enum in enum_values]
-
- size = self.__data.get('size')
- if size is not None:
- if len(size) == 0:
- raise RuntimeError(f'Control `{self.__name}` size must have at least one dimension')
-
- # Compute the total number of elements in the array. If any of the
- # array dimension is a string, the array is variable-sized.
- num_elems = 1
- for dim in size:
- if type(dim) is str:
- num_elems = 0
- break
-
- dim = int(dim)
- if dim <= 0:
- raise RuntimeError(f'Control `{self.__name}` size must have positive values only')
-
- num_elems *= dim
-
- self.__size = num_elems
-
- @property
- def description(self):
- """The control description"""
- return self.__data.get('description')
-
- @property
- def enum_values(self):
- """The enum values, if the control is an enumeration"""
- if self.__enum_values is None:
- return
- for enum in self.__enum_values:
- yield enum
-
- @property
- def is_enum(self):
- """Is the control an enumeration"""
- return self.__enum_values is not None
-
- @property
- def is_draft(self):
- """Is the control a draft control"""
- return self.__data.get('draft') is not None
-
- @property
- def name(self):
- """The control name (CamelCase)"""
- return self.__name
-
- @property
- def q_name(self):
- """The control name, qualified with a namespace"""
- ns = 'draft::' if self.is_draft else ''
- return ns + self.__name
-
- @property
- def type(self):
- typ = self.__data.get('type')
- size = self.__data.get('size')
-
- if typ == 'string':
- return 'std::string'
-
- if self.__size is None:
- return typ
-
- if self.__size:
- return f"Span<const {typ}, {self.__size}>"
- else:
- return f"Span<const {typ}>"
-
-
-def snake_case(s):
- return ''.join([c.isupper() and ('_' + c) or c for c in s]).strip('_')
-
-
-def format_description(description):
- description = description.strip('\n').split('\n')
- description[0] = '\\brief ' + description[0]
- return '\n'.join([(line and ' * ' or ' *') + line for line in description])
-
-
-def generate_cpp(controls):
- enum_doc_start_template = string.Template('''/**
- * \\enum ${name}Enum
- * \\brief Supported ${name} values''')
- enum_doc_value_template = string.Template(''' * \\var ${value}
-${description}''')
- doc_template = string.Template('''/**
- * \\var ${name}
-${description}
- */''')
- def_template = string.Template('extern const Control<${type}> ${name}(${id_name}, "${name}");')
- enum_values_doc = string.Template('''/**
- * \\var ${name}Values
- * \\brief List of all $name supported values
- */''')
- enum_values_start = string.Template('''extern const std::array<const ControlValue, ${size}> ${name}Values = {''')
- enum_values_values = string.Template('''\tstatic_cast<int32_t>(${name}),''')
-
- ctrls_doc = []
- ctrls_def = []
- draft_ctrls_doc = []
- draft_ctrls_def = []
- ctrls_map = []
-
- for ctrl in controls:
- id_name = snake_case(ctrl.name).upper()
-
- info = {
- 'name': ctrl.name,
- 'type': ctrl.type,
- 'description': format_description(ctrl.description),
- 'id_name': id_name,
- }
-
- target_doc = ctrls_doc
- target_def = ctrls_def
- if ctrl.is_draft:
- target_doc = draft_ctrls_doc
- target_def = draft_ctrls_def
-
- if ctrl.is_enum:
- enum_doc = []
- enum_doc.append(enum_doc_start_template.substitute(info))
-
- num_entries = 0
- for enum in ctrl.enum_values:
- value_info = {
- 'name': ctrl.name,
- 'value': enum.name,
- 'description': format_description(enum.description),
- }
- enum_doc.append(enum_doc_value_template.substitute(value_info))
- num_entries += 1
-
- enum_doc = '\n *\n'.join(enum_doc)
- enum_doc += '\n */'
- target_doc.append(enum_doc)
-
- values_info = {
- 'name': info['name'],
- 'size': num_entries,
- }
- target_doc.append(enum_values_doc.substitute(values_info))
- target_def.append(enum_values_start.substitute(values_info))
- for enum in ctrl.enum_values:
- value_info = {
- 'name': enum.name
- }
- target_def.append(enum_values_values.substitute(value_info))
- target_def.append("};")
-
- target_doc.append(doc_template.substitute(info))
- target_def.append(def_template.substitute(info))
-
- ctrls_map.append('\t{ ' + id_name + ', &' + ctrl.q_name + ' },')
-
- return {
- 'controls_doc': '\n\n'.join(ctrls_doc),
- 'controls_def': '\n'.join(ctrls_def),
- 'draft_controls_doc': '\n\n'.join(draft_ctrls_doc),
- 'draft_controls_def': '\n\n'.join(draft_ctrls_def),
- 'controls_map': '\n'.join(ctrls_map),
- }
-
-
-def generate_h(controls):
- enum_template_start = string.Template('''enum ${name}Enum {''')
- enum_value_template = string.Template('''\t${name} = ${value},''')
- enum_values_template = string.Template('''extern const std::array<const ControlValue, ${size}> ${name}Values;''')
- template = string.Template('''extern const Control<${type}> ${name};''')
-
- ctrls = []
- draft_ctrls = []
- ids = []
- id_value = 1
-
- for ctrl in controls:
- id_name = snake_case(ctrl.name).upper()
-
- ids.append('\t' + id_name + ' = ' + str(id_value) + ',')
-
- info = {
- 'name': ctrl.name,
- 'type': ctrl.type,
- }
-
- target_ctrls = ctrls
- if ctrl.is_draft:
- target_ctrls = draft_ctrls
-
- if ctrl.is_enum:
- target_ctrls.append(enum_template_start.substitute(info))
-
- num_entries = 0
- for enum in ctrl.enum_values:
- value_info = {
- 'name': enum.name,
- 'value': enum.value,
- }
- target_ctrls.append(enum_value_template.substitute(value_info))
- num_entries += 1
- target_ctrls.append("};")
-
- values_info = {
- 'name': info['name'],
- 'size': num_entries,
- }
- target_ctrls.append(enum_values_template.substitute(values_info))
-
- target_ctrls.append(template.substitute(info))
- id_value += 1
-
- return {
- 'ids': '\n'.join(ids),
- 'controls': '\n'.join(ctrls),
- 'draft_controls': '\n'.join(draft_ctrls)
- }
-
-
-def fill_template(template, data):
-
- template = open(template, 'rb').read()
- template = template.decode('utf-8')
- template = string.Template(template)
- return template.substitute(data)
-
-
-def main(argv):
-
- # Parse command line arguments
- parser = argparse.ArgumentParser()
- parser.add_argument('-o', dest='output', metavar='file', type=str,
- help='Output file name. Defaults to standard output if not specified.')
- parser.add_argument('input', type=str,
- help='Input file name.')
- parser.add_argument('template', type=str,
- help='Template file name.')
- args = parser.parse_args(argv[1:])
-
- data = open(args.input, 'rb').read()
- controls = yaml.safe_load(data)['controls']
- controls = [Control(*ctrl.popitem()) for ctrl in controls]
-
- if args.template.endswith('.cpp.in'):
- data = generate_cpp(controls)
- elif args.template.endswith('.h.in'):
- data = generate_h(controls)
- else:
- raise RuntimeError('Unknown template type')
-
- data = fill_template(args.template, data)
-
- if args.output:
- output = open(args.output, 'wb')
- output.write(data.encode('utf-8'))
- output.close()
- else:
- sys.stdout.write(data)
-
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv))
diff --git a/utils/gen-debug-controls.py b/utils/gen-debug-controls.py
new file mode 100755
index 00000000..272597f4
--- /dev/null
+++ b/utils/gen-debug-controls.py
@@ -0,0 +1,163 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright (C) 2024, Google Inc.
+#
+# Author: Stefan Klug <stefan.klug@ideasonboard.com>
+#
+# This script looks for occurrences of the debug metadata controls in the source
+# tree and updates src/libcamera/control_ids_debug.yaml accordingly. It is meant
+# to be used during development to ease updating of the yaml file while
+# debugging.
+
+import argparse
+import logging
+import os
+import re
+import sys
+from dataclasses import dataclass
+from pathlib import Path
+
+logger = logging.getLogger(__name__)
+logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s')
+
+try:
+ import ruamel.yaml as ruyaml
+except:
+ logger.error(
+ f'Failed to import ruamel.yaml. Please install the ruamel.yaml package.')
+ sys.exit(1)
+
+@dataclass
+class FoundMatch:
+ file: os.PathLike
+ whole_match: str
+ line: int
+ type: str
+ name: str
+ size: str = None
+
+
+def get_control_name(control):
+ k = list(control.keys())
+ if len(k) != 1:
+ raise Exception(f"Can't handle control entry with {len(k)} keys")
+ return k[0]
+
+
+def find_debug_controls(dir):
+ extensions = ['.cpp', '.h']
+ files = [p for p in dir.rglob('*') if p.suffix in extensions]
+
+ # The following regex was tested on
+ # set<Span<type>>( controls::debug::something , static_cast<type>(var) )
+ # set<>( controls::debug::something , static_cast<type>(var) )
+ # set( controls::debug::something , static_cast<type> (var) )
+ exp = re.compile(r'set' # set function
+ r'(?:\<((?:[^)(])*)\>)?' # followed by a optional template param
+ r'\(\s*controls::debug::(\w+)\s*,' # referencing a debug control
+ )
+ matches = []
+ for p in files:
+ with p.open('r') as f:
+ for idx, line in enumerate(f):
+ match = exp.search(line)
+ if match:
+ m = FoundMatch(file=p, line=idx, type=match.group(1),
+ name=match.group(2), whole_match=match.group(0))
+ if m.type is not None and m.type.startswith('Span'):
+ # Simple span type detection treating the last word
+ # inside <> as type.
+ r = re.match(r'Span<(?:.*\s+)(.*)>', m.type)
+ m.type = r.group(1)
+ m.size = '[n]'
+ matches.append(m)
+ return matches
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(
+ description='Automatically updates control_ids_debug.yaml')
+ parser.parse_args(argv[1:])
+
+ yaml = ruyaml.YAML()
+ root_dir = Path(__file__).resolve().parent.parent
+ ctrl_file = root_dir.joinpath('src/libcamera/control_ids_debug.yaml')
+
+ matches = find_debug_controls(root_dir.joinpath('src'))
+
+ doc = yaml.load(ctrl_file)
+
+ controls = doc['controls']
+
+ # Create a map of names in the existing yaml for easier updating.
+ controls_map = {}
+ for control in controls:
+ for k, v in control.items():
+ controls_map[k] = v
+
+ obsolete_names = list(controls_map.keys())
+
+ for m in matches:
+ if not m.type:
+ p = m.file.relative_to(Path.cwd(), walk_up=True)
+ logger.warning(
+ f'{p}:{m.line + 1}: Failed to deduce type from {m.whole_match} ... skipping')
+ continue
+
+ p = m.file.relative_to(root_dir)
+ desc = {'type': m.type,
+ 'direction': 'out',
+ 'description': f'Debug control {m.name} found in {p}:{m.line}'}
+ if m.size is not None:
+ desc['size'] = m.size
+
+ if m.name in controls_map:
+ # Can't use == for modified check because of the special yaml dicts.
+ update_needed = False
+ if list(controls_map[m.name].keys()) != list(desc.keys()):
+ update_needed = True
+ else:
+ for k, v in controls_map[m.name].items():
+ if v != desc[k]:
+ update_needed = True
+ break
+
+ if update_needed:
+ logger.info(f"Update control '{m.name}'")
+ controls_map[m.name].clear()
+ controls_map[m.name].update(desc)
+
+ obsolete_names.remove(m.name)
+ else:
+ logger.info(f"Add control '{m.name}'")
+ insert_before = len(controls)
+ for idx, control in enumerate(controls):
+ if get_control_name(control).lower() > m.name.lower():
+ insert_before = idx
+ break
+ controls.insert(insert_before, {m.name: desc})
+
+ # Remove elements from controls without recreating the list (to keep
+ # comments etc.).
+ idx = 0
+ while idx < len(controls):
+ name = get_control_name(controls[idx])
+ if name in obsolete_names:
+ logger.info(f"Remove control '{name}'")
+ controls.pop(idx)
+ else:
+ idx += 1
+
+ with ctrl_file.open('w') as f:
+ # Ruyaml looses the header.
+ f.write(("# SPDX-License-Identifier: LGPL-2.1-or-later\n"
+ "#\n"
+ "# This file was generated by utils/gen-debug-controls.py\n"
+ "#\n"))
+ yaml.dump(doc, f)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/utils/gen-ipa-priv-key.sh b/utils/gen-ipa-priv-key.sh
index 919751f2..2ca7b883 100755
--- a/utils/gen-ipa-priv-key.sh
+++ b/utils/gen-ipa-priv-key.sh
@@ -4,7 +4,7 @@
#
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
#
-# gen-ipa-priv-key.sh - Generate an RSA private key to sign IPA modules
+# Generate an RSA private key to sign IPA modules
key="$1"
diff --git a/utils/gen-version.sh b/utils/gen-version.sh
index e1f7ca7b..1b818e9e 100755
--- a/utils/gen-version.sh
+++ b/utils/gen-version.sh
@@ -42,7 +42,7 @@ if [ -z "$build_dir" ] || (echo "$build_dir" | grep -q "$src_dir")
then
git update-index --refresh > /dev/null 2>&1
fi
-git diff-index --quiet HEAD || version="$version-dirty ($(date --iso-8601=seconds))"
+git diff-index --quiet HEAD || version="$version-dirty ($(date +%Y-%m-%dT%H:%M:%S%Z))"
# If a project version is provided, use it to replace the version number.
if [ -n "$project_version" ]
diff --git a/utils/hooks/pre-push b/utils/hooks/pre-push
index 90ffdf6f..68dcbd0c 100755
--- a/utils/hooks/pre-push
+++ b/utils/hooks/pre-push
@@ -1,4 +1,4 @@
-#!/bin/sh
+#!/bin/bash
# SPDX-License-Identifier: GPL-2.0-or-later
@@ -61,17 +61,17 @@ do
msg=$(git cat-file commit "$commit")
# 1. The commit message shall not contain a local changelog.
- if echo "$msg" | grep -q '^--- *$'
+ if echo -E "$msg" | grep -q '^--- *$'
then
echo >&2 "Found local changelog in commit $commit"
errors=$((errors+1))
fi
# 2. The commit message shall have Signed-off-by lines
- # corresponding the committer and the author.
+ # corresponding the committer, author, and all co-developers.
committer=$(echo "$msg" | grep '^committer ' | head -1 | \
cut -d ' ' -f 2- | rev | cut -d ' ' -f 3- | rev)
- if ! echo "$msg" | grep -F -q "Signed-off-by: ${committer}"
+ if ! echo -E "$msg" | grep -F -q "Signed-off-by: ${committer}"
then
echo >&2 "Missing committer Signed-off-by in commit $commit"
errors=$((errors+1))
@@ -79,21 +79,30 @@ do
author=$(echo "$msg" | grep '^author ' | head -1 | \
cut -d ' ' -f 2- | rev | cut -d ' ' -f 3- | rev)
- if ! echo "$msg" | grep -F -q "Signed-off-by: ${author}"
+ if ! echo -E "$msg" | grep -F -q "Signed-off-by: ${author}"
then
echo >&2 "Missing author Signed-off-by in commit $commit"
errors=$((errors+1))
fi
+ while read -r codev
+ do
+ if ! echo -E "$msg" | grep -F -q "Signed-off-by: ${codev}"
+ then
+ echo >&2 "Missing co-developer '${codev}' Signed-off-by in commit $commit"
+ errors=$((errors+1))
+ fi
+ done < <(echo "$msg" | grep '^Co-developed-by: ' | cut -d ' ' -f 2-)
+
# 3. A Reviewed-by or Acked-by is required.
- if ! echo "$msg" | grep -q '^\(Reviewed\|Acked\)-by: '
+ if ! echo -E "$msg" | grep -q '^\(Reviewed\|Acked\)-by: '
then
echo >&2 "No Reviewed-by or Acked-by in commit $commit"
errors=$((errors+1))
fi
# 4. The commit message shall not contain a Change-Id.
- if echo "$msg" | grep -q '^Change-Id:'
+ if echo -E "$msg" | grep -q '^Change-Id:'
then
echo >&2 "Found Change-Id in commit $commit"
errors=$((errors+1))
diff --git a/utils/ipc/mojo/public/tools/bindings/chromium_bindings_configuration.gni b/utils/ipc/mojo/public/tools/bindings/chromium_bindings_configuration.gni
deleted file mode 100644
index d8a13874..00000000
--- a/utils/ipc/mojo/public/tools/bindings/chromium_bindings_configuration.gni
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-_typemap_imports = [
- "//chrome/chrome_cleaner/mojom/typemaps/typemaps.gni",
- "//chrome/common/importer/typemaps.gni",
- "//chrome/common/media_router/mojom/typemaps.gni",
- "//chrome/typemaps.gni",
- "//chromecast/typemaps.gni",
- "//chromeos/typemaps.gni",
- "//chromeos/components/multidevice/mojom/typemaps.gni",
- "//chromeos/services/cros_healthd/public/mojom/typemaps.gni",
- "//chromeos/services/device_sync/public/mojom/typemaps.gni",
- "//chromeos/services/network_config/public/mojom/typemaps.gni",
- "//chromeos/services/secure_channel/public/mojom/typemaps.gni",
- "//components/arc/mojom/typemaps.gni",
- "//components/chromeos_camera/common/typemaps.gni",
- "//components/services/storage/public/cpp/filesystem/typemaps.gni",
- "//components/sync/mojom/typemaps.gni",
- "//components/typemaps.gni",
- "//content/browser/typemaps.gni",
- "//content/public/common/typemaps.gni",
- "//sandbox/mac/mojom/typemaps.gni",
- "//services/media_session/public/cpp/typemaps.gni",
- "//services/proxy_resolver/public/cpp/typemaps.gni",
- "//services/resource_coordinator/public/cpp/typemaps.gni",
- "//services/service_manager/public/cpp/typemaps.gni",
- "//services/tracing/public/mojom/typemaps.gni",
-]
-
-_typemaps = []
-foreach(typemap_import, _typemap_imports) {
- # Avoid reassignment error by assigning to empty scope first.
- _imported = {
- }
- _imported = read_file(typemap_import, "scope")
- _typemaps += _imported.typemaps
-}
-
-typemaps = []
-foreach(typemap, _typemaps) {
- typemaps += [
- {
- filename = typemap
- config = read_file(typemap, "scope")
- },
- ]
-}
-
-component_macro_suffix = ""
diff --git a/utils/ipc/mojo/public/tools/bindings/compile_typescript.py b/utils/ipc/mojo/public/tools/bindings/compile_typescript.py
deleted file mode 100644
index a978901b..00000000
--- a/utils/ipc/mojo/public/tools/bindings/compile_typescript.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2019 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-import argparse
-
-_HERE_PATH = os.path.dirname(__file__)
-_SRC_PATH = os.path.normpath(os.path.join(_HERE_PATH, '..', '..', '..', '..'))
-
-sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'node'))
-import node
-import node_modules
-
-def main(argv):
- parser = argparse.ArgumentParser()
- parser.add_argument('--tsconfig_path', required=True)
- args = parser.parse_args(argv)
-
- result = node.RunNode([node_modules.PathToTypescript()] +
- ['--project', args.tsconfig_path])
- if len(result) != 0:
- raise RuntimeError('Failed to compile Typescript: \n%s' % result)
-
-if __name__ == '__main__':
- main(sys.argv[1:])
diff --git a/utils/ipc/mojo/public/tools/bindings/format_typemap_generator_args.py b/utils/ipc/mojo/public/tools/bindings/format_typemap_generator_args.py
deleted file mode 100755
index 7ac4af5f..00000000
--- a/utils/ipc/mojo/public/tools/bindings/format_typemap_generator_args.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from __future__ import print_function
-
-import sys
-
-# This utility converts mojom dependencies into their corresponding typemap
-# paths and formats them to be consumed by generate_type_mappings.py.
-
-
-def FormatTypemap(typemap_filename):
- # A simple typemap is valid Python with a minor alteration.
- with open(typemap_filename) as f:
- typemap_content = f.read().replace('=\n', '=')
- typemap = {}
- exec typemap_content in typemap
-
- for header in typemap.get('public_headers', []):
- yield 'public_headers=%s' % header
- for header in typemap.get('traits_headers', []):
- yield 'traits_headers=%s' % header
- for header in typemap.get('type_mappings', []):
- yield 'type_mappings=%s' % header
-
-
-def main():
- typemaps = sys.argv[1:]
- print(' '.join('--start-typemap %s' % ' '.join(FormatTypemap(typemap))
- for typemap in typemaps))
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/utils/ipc/mojo/public/tools/bindings/mojom_types_downgrader.py b/utils/ipc/mojo/public/tools/bindings/mojom_types_downgrader.py
deleted file mode 100755
index 15f0e3ba..00000000
--- a/utils/ipc/mojo/public/tools/bindings/mojom_types_downgrader.py
+++ /dev/null
@@ -1,119 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2020 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Downgrades *.mojom files to the old mojo types for remotes and receivers."""
-
-import argparse
-import fnmatch
-import os
-import re
-import shutil
-import sys
-import tempfile
-
-# List of patterns and replacements to match and use against the contents of a
-# mojo file. Each replacement string will be used with Python string's format()
-# function, so the '{}' substring is used to mark where the mojo type should go.
-_MOJO_REPLACEMENTS = {
- r'pending_remote': r'{}',
- r'pending_receiver': r'{}&',
- r'pending_associated_remote': r'associated {}',
- r'pending_associated_receiver': r'associated {}&',
-}
-
-# Pre-compiled regular expression that matches against any of the replacements.
-_REGEXP_PATTERN = re.compile(
- r'|'.join(
- ['{}\s*<\s*(.*?)\s*>'.format(k) for k in _MOJO_REPLACEMENTS.keys()]),
- flags=re.DOTALL)
-
-
-def ReplaceFunction(match_object):
- """Returns the right replacement for the string matched against the regexp."""
- for index, (match, repl) in enumerate(_MOJO_REPLACEMENTS.items(), 1):
- if match_object.group(0).startswith(match):
- return repl.format(match_object.group(index))
-
-
-def DowngradeFile(path, output_dir=None):
- """Downgrades the mojom file specified by |path| to the old mojo types.
-
- Optionally pass |output_dir| to place the result under a separate output
- directory, preserving the relative path to the file included in |path|.
- """
- # Use a temporary file to dump the new contents after replacing the patterns.
- with open(path) as src_mojo_file:
- with tempfile.NamedTemporaryFile(mode='w', delete=False) as tmp_mojo_file:
- tmp_contents = _REGEXP_PATTERN.sub(ReplaceFunction, src_mojo_file.read())
- tmp_mojo_file.write(tmp_contents)
-
- # Files should be placed in the desired output directory
- if output_dir:
- output_filepath = os.path.join(output_dir, os.path.basename(path))
- if not os.path.exists(output_dir):
- os.makedirs(output_dir)
- else:
- output_filepath = path
-
- # Write the new contents preserving the original file's attributes.
- shutil.copystat(path, tmp_mojo_file.name)
- shutil.move(tmp_mojo_file.name, output_filepath)
-
- # Make sure to "touch" the new file so that access, modify and change times
- # are always newer than the source file's, otherwise Modify time will be kept
- # as per the call to shutil.copystat(), causing unnecessary generations of the
- # output file in subsequent builds due to ninja considering it dirty.
- os.utime(output_filepath, None)
-
-
-def DowngradeDirectory(path, output_dir=None):
- """Downgrades mojom files inside directory |path| to the old mojo types.
-
- Optionally pass |output_dir| to place the result under a separate output
- directory, preserving the relative path to the file included in |path|.
- """
- # We don't have recursive glob.glob() nor pathlib.Path.rglob() in Python 2.7
- mojom_filepaths = []
- for dir_path, _, filenames in os.walk(path):
- for filename in fnmatch.filter(filenames, "*mojom"):
- mojom_filepaths.append(os.path.join(dir_path, filename))
-
- for path in mojom_filepaths:
- absolute_dirpath = os.path.dirname(os.path.abspath(path))
- if output_dir:
- dest_dirpath = output_dir + absolute_dirpath
- else:
- dest_dirpath = absolute_dirpath
- DowngradeFile(path, dest_dirpath)
-
-
-def DowngradePath(src_path, output_dir=None):
- """Downgrades the mojom files pointed by |src_path| to the old mojo types.
-
- Optionally pass |output_dir| to place the result under a separate output
- directory, preserving the relative path to the file included in |path|.
- """
- if os.path.isdir(src_path):
- DowngradeDirectory(src_path, output_dir)
- elif os.path.isfile(src_path):
- DowngradeFile(src_path, output_dir)
- else:
- print(">>> {} not pointing to a valid file or directory".format(src_path))
- sys.exit(1)
-
-
-def main():
- parser = argparse.ArgumentParser(
- description="Downgrade *.mojom files to use the old mojo types.")
- parser.add_argument(
- "srcpath", help="path to the file or directory to apply the conversion")
- parser.add_argument(
- "--outdir", help="the directory to place the converted file(s) under")
- args = parser.parse_args()
-
- DowngradePath(args.srcpath, args.outdir)
-
-
-if __name__ == "__main__":
- sys.exit(main())
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/constant_resolver.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/constant_resolver.py
deleted file mode 100644
index 0dfd996e..00000000
--- a/utils/ipc/mojo/public/tools/mojom/mojom/generate/constant_resolver.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Resolves the values used for constants and enums."""
-
-from itertools import ifilter
-
-from mojom.generate import module as mojom
-
-
-def ResolveConstants(module, expression_to_text):
- in_progress = set()
- computed = set()
-
- def GetResolvedValue(named_value):
- assert isinstance(named_value, (mojom.EnumValue, mojom.ConstantValue))
- if isinstance(named_value, mojom.EnumValue):
- field = next(
- ifilter(lambda field: field.name == named_value.name,
- named_value.enum.fields), None)
- if not field:
- raise RuntimeError(
- 'Unable to get computed value for field %s of enum %s' %
- (named_value.name, named_value.enum.name))
- if field not in computed:
- ResolveEnum(named_value.enum)
- return field.resolved_value
- else:
- ResolveConstant(named_value.constant)
- named_value.resolved_value = named_value.constant.resolved_value
- return named_value.resolved_value
-
- def ResolveConstant(constant):
- if constant in computed:
- return
- if constant in in_progress:
- raise RuntimeError('Circular dependency for constant: %s' % constant.name)
- in_progress.add(constant)
- if isinstance(constant.value, (mojom.EnumValue, mojom.ConstantValue)):
- resolved_value = GetResolvedValue(constant.value)
- else:
- resolved_value = expression_to_text(constant.value)
- constant.resolved_value = resolved_value
- in_progress.remove(constant)
- computed.add(constant)
-
- def ResolveEnum(enum):
- def ResolveEnumField(enum, field, default_value):
- if field in computed:
- return
- if field in in_progress:
- raise RuntimeError('Circular dependency for enum: %s' % enum.name)
- in_progress.add(field)
- if field.value:
- if isinstance(field.value, mojom.EnumValue):
- resolved_value = GetResolvedValue(field.value)
- elif isinstance(field.value, str):
- resolved_value = int(field.value, 0)
- else:
- raise RuntimeError('Unexpected value: %s' % field.value)
- else:
- resolved_value = default_value
- field.resolved_value = resolved_value
- in_progress.remove(field)
- computed.add(field)
-
- current_value = 0
- for field in enum.fields:
- ResolveEnumField(enum, field, current_value)
- current_value = field.resolved_value + 1
-
- for constant in module.constants:
- ResolveConstant(constant)
-
- for enum in module.enums:
- ResolveEnum(enum)
-
- for struct in module.structs:
- for constant in struct.constants:
- ResolveConstant(constant)
- for enum in struct.enums:
- ResolveEnum(enum)
- for field in struct.fields:
- if isinstance(field.default, (mojom.ConstantValue, mojom.EnumValue)):
- field.default.resolved_value = GetResolvedValue(field.default)
-
- for interface in module.interfaces:
- for constant in interface.constants:
- ResolveConstant(constant)
- for enum in interface.enums:
- ResolveEnum(enum)
-
- return module
diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/translate_unittest.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/translate_unittest.py
deleted file mode 100644
index 19905c8a..00000000
--- a/utils/ipc/mojo/public/tools/mojom/mojom/generate/translate_unittest.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import imp
-import os.path
-import sys
-import unittest
-
-from mojom.generate import module as mojom
-from mojom.generate import translate
-from mojom.parse import ast
-
-
-class TranslateTest(unittest.TestCase):
- """Tests |parser.Parse()|."""
-
- def testSimpleArray(self):
- """Tests a simple int32[]."""
- # pylint: disable=W0212
- self.assertEquals(translate._MapKind("int32[]"), "a:i32")
-
- def testAssociativeArray(self):
- """Tests a simple uint8{string}."""
- # pylint: disable=W0212
- self.assertEquals(translate._MapKind("uint8{string}"), "m[s][u8]")
-
- def testLeftToRightAssociativeArray(self):
- """Makes sure that parsing is done from right to left on the internal kinds
- in the presence of an associative array."""
- # pylint: disable=W0212
- self.assertEquals(translate._MapKind("uint8[]{string}"), "m[s][a:u8]")
-
- def testTranslateSimpleUnions(self):
- """Makes sure that a simple union is translated correctly."""
- tree = ast.Mojom(None, ast.ImportList(), [
- ast.Union(
- "SomeUnion", None,
- ast.UnionBody([
- ast.UnionField("a", None, None, "int32"),
- ast.UnionField("b", None, None, "string")
- ]))
- ])
-
- translation = translate.OrderedModule(tree, "mojom_tree", [])
- self.assertEqual(1, len(translation.unions))
-
- union = translation.unions[0]
- self.assertTrue(isinstance(union, mojom.Union))
- self.assertEqual("SomeUnion", union.mojom_name)
- self.assertEqual(2, len(union.fields))
- self.assertEqual("a", union.fields[0].mojom_name)
- self.assertEqual(mojom.INT32.spec, union.fields[0].kind.spec)
- self.assertEqual("b", union.fields[1].mojom_name)
- self.assertEqual(mojom.STRING.spec, union.fields[1].kind.spec)
-
- def testMapKindRaisesWithDuplicate(self):
- """Verifies _MapTreeForType() raises when passed two values with the same
- name."""
- methods = [
- ast.Method('dup', None, None, ast.ParameterList(), None),
- ast.Method('dup', None, None, ast.ParameterList(), None)
- ]
- with self.assertRaises(Exception):
- translate._ElemsOfType(methods, ast.Method, 'scope')
-
- def testAssociatedKinds(self):
- """Tests type spec translation of associated interfaces and requests."""
- # pylint: disable=W0212
- self.assertEquals(
- translate._MapKind("asso<SomeInterface>?"), "?asso:x:SomeInterface")
- self.assertEquals(
- translate._MapKind("asso<SomeInterface&>?"), "?asso:r:x:SomeInterface")
diff --git a/utils/ipu3/ipu3-capture.sh b/utils/ipu3/ipu3-capture.sh
index 9294d025..004a92b0 100755
--- a/utils/ipu3/ipu3-capture.sh
+++ b/utils/ipu3/ipu3-capture.sh
@@ -4,7 +4,7 @@
#
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
#
-# ipu3-capture.sh - Capture raw frames from cameras based on the Intel IPU3
+# Capture raw frames from cameras based on the Intel IPU3
#
# The scripts makes use of the following tools, which are expected to be
# executable from the system-wide path or from the local directory:
diff --git a/utils/ipu3/ipu3-pack.c b/utils/ipu3/ipu3-pack.c
index decbfc6c..23d2db8b 100644
--- a/utils/ipu3/ipu3-pack.c
+++ b/utils/ipu3/ipu3-pack.c
@@ -8,6 +8,7 @@
#include <errno.h>
#include <fcntl.h>
+#include <libgen.h>
#include <stdint.h>
#include <stdio.h>
#include <string.h>
@@ -15,9 +16,8 @@
#include <sys/types.h>
#include <unistd.h>
-static void usage(const char *argv0)
+static void usage(char *argv0)
{
-
printf("Usage: %s input-file output-file\n", basename(argv0));
printf("Convert unpacked RAW10 Bayer data to the IPU3 packed Bayer formats\n");
printf("If the output-file '-', output data will be written to standard output\n");
diff --git a/utils/ipu3/ipu3-process.sh b/utils/ipu3/ipu3-process.sh
index bb4abbe8..25bc849f 100755
--- a/utils/ipu3/ipu3-process.sh
+++ b/utils/ipu3/ipu3-process.sh
@@ -4,7 +4,7 @@
#
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
#
-# ipu3-process.sh - Process raw frames with the Intel IPU3
+# Process raw frames with the Intel IPU3
#
# The scripts makes use of the following tools, which are expected to be
# found in $PATH:
diff --git a/utils/ipu3/ipu3-unpack.c b/utils/ipu3/ipu3-unpack.c
index 9d2c1200..6ee8c45a 100644
--- a/utils/ipu3/ipu3-unpack.c
+++ b/utils/ipu3/ipu3-unpack.c
@@ -8,6 +8,7 @@
#include <errno.h>
#include <fcntl.h>
+#include <libgen.h>
#include <stdint.h>
#include <stdio.h>
#include <string.h>
@@ -15,7 +16,7 @@
#include <sys/types.h>
#include <unistd.h>
-static void usage(const char *argv0)
+static void usage(char *argv0)
{
printf("Usage: %s input-file output-file\n", basename(argv0));
printf("Unpack the IPU3 raw Bayer format to 16-bit Bayer\n");
@@ -78,7 +79,7 @@ int main(int argc, char *argv[])
}
ret = write(out_fd, out_data, 50);
- if (ret < -1) {
+ if (ret == -1) {
fprintf(stderr, "Failed to write output data: %s\n",
strerror(errno));
goto done;
diff --git a/utils/meson.build b/utils/meson.build
index 8e28ada7..95d657ac 100644
--- a/utils/meson.build
+++ b/utils/meson.build
@@ -1,15 +1,7 @@
# SPDX-License-Identifier: CC0-1.0
-subdir('ipc')
+subdir('codegen')
subdir('ipu3')
-subdir('tracepoints')
-
-## Code generation
-py_modules += ['yaml']
-gen_controls = files('gen-controls.py')
-gen_formats = files('gen-formats.py')
-gen_header = files('gen-header.sh')
## Module signing
gen_ipa_priv_key = files('gen-ipa-priv-key.sh')
-gen_ipa_pub_key = files('gen-ipa-pub-key.py')
diff --git a/utils/raspberrypi/ctt/alsc_only.py b/utils/raspberrypi/ctt/alsc_only.py
index 7cd0ac01..a521c4ad 100755
--- a/utils/raspberrypi/ctt/alsc_only.py
+++ b/utils/raspberrypi/ctt/alsc_only.py
@@ -2,12 +2,14 @@
#
# SPDX-License-Identifier: BSD-2-Clause
#
-# Copyright (C) 2022, Raspberry Pi (Trading) Limited
+# Copyright (C) 2022, Raspberry Pi Ltd
#
-# alsc_only.py - alsc tuning tool
+# alsc tuning tool
-from ctt import *
+import sys
+from ctt import *
+from ctt_tools import parse_input
if __name__ == '__main__':
"""
@@ -15,13 +17,14 @@ if __name__ == '__main__':
"""
if len(sys.argv) == 1:
print("""
- Pisp Camera Tuning Tool version 1.0
+ PiSP Lens Shading Camera Tuning Tool version 1.0
Required Arguments:
'-i' : Calibration image directory.
'-o' : Name of output json file.
Optional Arguments:
+ '-t' : Target platform - 'pisp' or 'vc4'. Default 'vc4'
'-c' : Config file for the CTT. If not passed, default parameters used.
'-l' : Name of output log file. If not passed, 'ctt_log.txt' used.
""")
@@ -30,5 +33,10 @@ if __name__ == '__main__':
"""
parse input arguments
"""
- json_output, directory, config, log_output = parse_input()
- run_ctt(json_output, directory, config, log_output, alsc_only=True)
+ json_output, directory, config, log_output, target = parse_input()
+ if target == 'pisp':
+ from ctt_pisp import json_template, grid_size
+ elif target == 'vc4':
+ from ctt_vc4 import json_template, grid_size
+
+ run_ctt(json_output, directory, config, log_output, json_template, grid_size, target, alsc_only=True)
diff --git a/utils/raspberrypi/ctt/cac_only.py b/utils/raspberrypi/ctt/cac_only.py
new file mode 100644
index 00000000..1c0a8193
--- /dev/null
+++ b/utils/raspberrypi/ctt/cac_only.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python3
+#
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2023, Raspberry Pi (Trading) Ltd.
+#
+# cac_only.py - cac tuning tool
+
+
+# This file allows you to tune only the chromatic aberration correction
+# Specify any number of files in the command line args, and it shall iterate through
+# and generate an averaged cac table from all the input images, which you can then
+# input into your tuning file.
+
+# Takes .dng files produced by the camera modules of the dots grid and calculates the chromatic abberation of each dot.
+# Then takes each dot, and works out where it was in the image, and uses that to output a tables of the shifts
+# across the whole image.
+
+from PIL import Image
+import numpy as np
+import rawpy
+import sys
+import getopt
+
+from ctt_cac import *
+
+
+def cac(filelist, output_filepath, plot_results=False):
+ np.set_printoptions(precision=3)
+ np.set_printoptions(suppress=True)
+
+ # Create arrays to hold all the dots data and their colour offsets
+ red_shift = [] # Format is: [[Dot Center X, Dot Center Y, x shift, y shift]]
+ blue_shift = []
+ # Iterate through the files
+ # Multiple files is reccomended to average out the lens aberration through rotations
+ for file in filelist:
+ print("\n Processing file " + str(file))
+ # Read the raw RGB values from the .dng file
+ with rawpy.imread(file) as raw:
+ rgb = raw.postprocess()
+ sizes = (raw.sizes)
+
+ image_size = [sizes[2], sizes[3]] # Image size, X, Y
+ # Create a colour copy of the RGB values to use later in the calibration
+ imout = Image.new(mode="RGB", size=image_size)
+ rgb_image = np.array(imout)
+ # The rgb values need reshaping from a 1d array to a 3d array to be worked with easily
+ rgb.reshape((image_size[0], image_size[1], 3))
+ rgb_image = rgb
+
+ # Pass the RGB image through to the dots locating program
+ # Returns an array of the dots (colour rectangles around the dots), and an array of their locations
+ print("Finding dots")
+ dots, dots_locations = find_dots_locations(rgb_image)
+
+ # Now, analyse each dot. Work out the centroid of each colour channel, and use that to work out
+ # by how far the chromatic aberration has shifted each channel
+ print('Dots found: ' + str(len(dots)))
+
+ for dot, dot_location in zip(dots, dots_locations):
+ if len(dot) > 0:
+ if (dot_location[0] > 0) and (dot_location[1] > 0):
+ ret = analyse_dot(dot, dot_location)
+ red_shift.append(ret[0])
+ blue_shift.append(ret[1])
+
+ # Take our arrays of red shifts and locations, push them through to be interpolated into a 9x9 matrix
+ # for the CAC block to handle and then store these as a .json file to be added to the camera
+ # tuning file
+ print("\nCreating output grid")
+ rx, ry, bx, by = shifts_to_yaml(red_shift, blue_shift, image_size)
+
+ print("CAC correction complete!")
+
+ # The json format that we then paste into the tuning file (manually)
+ sample = '''
+ {
+ "rpi.cac" :
+ {
+ "strength": 1.0,
+ "lut_rx" : [
+ rx_vals
+ ],
+ "lut_ry" : [
+ ry_vals
+ ],
+ "lut_bx" : [
+ bx_vals
+ ],
+ "lut_by" : [
+ by_vals
+ ]
+ }
+ }
+ '''
+
+ # Below, may look incorrect, however, the PiSP (standard) dimensions are flipped in comparison to
+ # PIL image coordinate directions, hence why xr -> yr. Also, the shifts calculated are colour shifts,
+ # and the PiSP block asks for the values it should shift (hence the * -1, to convert from colour shift to a pixel shift)
+ sample = sample.replace("rx_vals", pprint_array(ry * -1))
+ sample = sample.replace("ry_vals", pprint_array(rx * -1))
+ sample = sample.replace("bx_vals", pprint_array(by * -1))
+ sample = sample.replace("by_vals", pprint_array(bx * -1))
+ print("Successfully converted to JSON")
+ f = open(str(output_filepath), "w+")
+ f.write(sample)
+ f.close()
+ print("Successfully written to json file")
+ '''
+ If you wish to see a plot of the colour channel shifts, add the -p or --plots option
+ Can be a quick way of validating if the data/dots you've got are good, or if you need to
+ change some parameters/take some better images
+ '''
+ if plot_results:
+ plot_shifts(red_shift, blue_shift)
+
+
+if __name__ == "__main__":
+ argv = sys.argv
+ # Detect the input and output file paths
+ arg_output = "output.json"
+ arg_help = "{0} -i <input> -o <output> -p <plot results>".format(argv[0])
+ opts, args = getopt.getopt(argv[1:], "hi:o:p", ["help", "input=", "output=", "plot"])
+
+ output_location = 0
+ input_location = 0
+ filelist = []
+ plot_results = False
+ for i in range(len(argv)):
+ if ("-h") in argv[i]:
+ print(arg_help) # print the help message
+ sys.exit(2)
+ if "-o" in argv[i]:
+ output_location = i
+ if ".dng" in argv[i]:
+ filelist.append(argv[i])
+ if "-p" in argv[i]:
+ plot_results = True
+
+ arg_output = argv[output_location + 1]
+ cac(filelist, arg_output, plot_results)
diff --git a/utils/raspberrypi/ctt/colors.py b/utils/raspberrypi/ctt/colors.py
index 1ab986d6..cb4d236b 100644
--- a/utils/raspberrypi/ctt/colors.py
+++ b/utils/raspberrypi/ctt/colors.py
@@ -1,4 +1,4 @@
-# colors.py - Program to convert from RGB to LAB color space
+# Program to convert from RGB to LAB color space
def RGB_to_LAB(RGB): # where RGB is a 1x3 array. e.g RGB = [100, 255, 230]
num = 0
XYZ = [0, 0, 0]
diff --git a/utils/raspberrypi/ctt/convert_tuning.py b/utils/raspberrypi/ctt/convert_tuning.py
index f4504d45..83cf69d4 100755
--- a/utils/raspberrypi/ctt/convert_tuning.py
+++ b/utils/raspberrypi/ctt/convert_tuning.py
@@ -8,30 +8,104 @@
import argparse
import json
+import numpy as np
import sys
from ctt_pretty_print_json import pretty_print
+from ctt_pisp import grid_size as grid_size_pisp
+from ctt_pisp import json_template as json_template_pisp
+from ctt_vc4 import grid_size as grid_size_vc4
+from ctt_vc4 import json_template as json_template_vc4
-def convert_v2(in_json: dict) -> str:
+def interp_2d(in_ls, src_w, src_h, dst_w, dst_h):
- if 'version' in in_json.keys() and in_json['version'] != 1.0:
- print(f'The JSON config reports version {in_json["version"]} that is incompatible with this tool.')
- sys.exit(-1)
+ out_ls = np.zeros((dst_h, dst_w))
+ for i in range(src_h):
+ out_ls[i] = np.interp(np.linspace(0, dst_w - 1, dst_w),
+ np.linspace(0, dst_w - 1, src_w),
+ in_ls[i])
+ for i in range(dst_w):
+ out_ls[:,i] = np.interp(np.linspace(0, dst_h - 1, dst_h),
+ np.linspace(0, dst_h - 1, src_h),
+ out_ls[:src_h, i])
+ return out_ls
- converted = {
- 'version': 2.0,
- 'target': 'bcm2835',
- 'algorithms': [{algo: config} for algo, config in in_json.items()]
- }
- return pretty_print(converted)
+def convert_target(in_json: dict, target: str):
+
+ src_w, src_h = grid_size_pisp if target == 'vc4' else grid_size_vc4
+ dst_w, dst_h = grid_size_vc4 if target == 'vc4' else grid_size_pisp
+ json_template = json_template_vc4 if target == 'vc4' else json_template_pisp
+
+ # ALSC grid sizes
+ alsc = next(algo for algo in in_json['algorithms'] if 'rpi.alsc' in algo)['rpi.alsc']
+ for colour in ['calibrations_Cr', 'calibrations_Cb']:
+ if colour not in alsc:
+ continue
+ for temperature in alsc[colour]:
+ in_ls = np.reshape(temperature['table'], (src_h, src_w))
+ out_ls = interp_2d(in_ls, src_w, src_h, dst_w, dst_h)
+ temperature['table'] = np.round(out_ls.flatten(), 3).tolist()
+
+ if 'luminance_lut' in alsc:
+ in_ls = np.reshape(alsc['luminance_lut'], (src_h, src_w))
+ out_ls = interp_2d(in_ls, src_w, src_h, dst_w, dst_h)
+ alsc['luminance_lut'] = np.round(out_ls.flatten(), 3).tolist()
+
+ # Denoise blocks
+ for i, algo in enumerate(in_json['algorithms']):
+ if list(algo.keys())[0] == 'rpi.sdn':
+ in_json['algorithms'][i] = {'rpi.denoise': json_template['rpi.sdn'] if target == 'vc4' else json_template['rpi.denoise']}
+ break
+
+ # AGC mode weights
+ agc = next(algo for algo in in_json['algorithms'] if 'rpi.agc' in algo)['rpi.agc']
+ if 'channels' in agc:
+ for i, channel in enumerate(agc['channels']):
+ target_agc_metering = json_template['rpi.agc']['channels'][i]['metering_modes']
+ for mode, v in channel['metering_modes'].items():
+ v['weights'] = target_agc_metering[mode]['weights']
+ else:
+ for mode, v in agc["metering_modes"].items():
+ target_agc_metering = json_template['rpi.agc']['channels'][0]['metering_modes']
+ v['weights'] = target_agc_metering[mode]['weights']
+
+ # HDR
+ if target == 'pisp':
+ for i, algo in enumerate(in_json['algorithms']):
+ if list(algo.keys())[0] == 'rpi.hdr':
+ in_json['algorithms'][i] = {'rpi.hdr': json_template['rpi.hdr']}
+
+ return in_json
+
+
+def convert_v2(in_json: dict, target: str) -> str:
+
+ if 'version' in in_json.keys() and in_json['version'] == 1.0:
+ converted = {
+ 'version': 2.0,
+ 'target': target,
+ 'algorithms': [{algo: config} for algo, config in in_json.items()]
+ }
+ else:
+ converted = in_json
+
+ # Convert between vc4 <-> pisp targets. This is a best effort thing.
+ if converted['target'] != target:
+ converted = convert_target(converted, target)
+ converted['target'] = target
+
+ grid_size = grid_size_vc4[0] if target == 'vc4' else grid_size_pisp[0]
+ return pretty_print(converted, custom_elems={'table': grid_size, 'luminance_lut': grid_size})
if __name__ == "__main__":
parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=
- 'Convert the format of the Raspberry Pi camera tuning file from v1.0 to v2.0.\n')
+ 'Convert the format of the Raspberry Pi camera tuning file from v1.0 to v2.0 and/or the vc4 <-> pisp targets.\n')
parser.add_argument('input', type=str, help='Input tuning file.')
+ parser.add_argument('-t', '--target', type=str, help='Target platform.',
+ choices=['pisp', 'vc4'], default='vc4')
parser.add_argument('output', type=str, nargs='?',
help='Output converted tuning file. If not provided, the input file will be updated in-place.',
default=None)
@@ -40,7 +114,7 @@ if __name__ == "__main__":
with open(args.input, 'r') as f:
in_json = json.load(f)
- out_json = convert_v2(in_json)
+ out_json = convert_v2(in_json, args.target)
with open(args.output if args.output is not None else args.input, 'w') as f:
f.write(out_json)
diff --git a/utils/raspberrypi/ctt/ctt.py b/utils/raspberrypi/ctt/ctt.py
index cd89f177..186afda5 100755
--- a/utils/raspberrypi/ctt/ctt.py
+++ b/utils/raspberrypi/ctt/ctt.py
@@ -4,11 +4,12 @@
#
# Copyright (C) 2019, Raspberry Pi Ltd
#
-# ctt.py - camera tuning tool
+# camera tuning tool
import os
import sys
from ctt_image_load import *
+from ctt_cac import *
from ctt_ccm import *
from ctt_awb import *
from ctt_alsc import *
@@ -22,9 +23,10 @@ import re
"""
This file houses the camera object, which is used to perform the calibrations.
-The camera object houses all the calibration images as attributes in two lists:
+The camera object houses all the calibration images as attributes in three lists:
- imgs (macbeth charts)
- imgs_alsc (alsc correction images)
+ - imgs_cac (cac correction images)
Various calibrations are methods of the camera object, and the output is stored
in a dictionary called self.json.
Once all the caibration has been completed, the Camera.json is written into a
@@ -67,139 +69,26 @@ Camera object that is the backbone of the tuning tool.
Input is the desired path of the output json.
"""
class Camera:
- def __init__(self, jfile):
+ def __init__(self, jfile, json):
self.path = os.path.dirname(os.path.expanduser(__file__)) + '/'
if self.path == '/':
self.path = ''
self.imgs = []
self.imgs_alsc = []
+ self.imgs_cac = []
self.log = 'Log created : ' + time.asctime(time.localtime(time.time()))
self.log_separator = '\n'+'-'*70+'\n'
self.jf = jfile
"""
initial json dict populated by uncalibrated values
"""
- self.json = {
- "rpi.black_level": {
- "black_level": 4096
- },
- "rpi.dpc": {
- },
- "rpi.lux": {
- "reference_shutter_speed": 10000,
- "reference_gain": 1,
- "reference_aperture": 1.0
- },
- "rpi.noise": {
- },
- "rpi.geq": {
- },
- "rpi.sdn": {
- },
- "rpi.awb": {
- "priors": [
- {"lux": 0, "prior": [2000, 1.0, 3000, 0.0, 13000, 0.0]},
- {"lux": 800, "prior": [2000, 0.0, 6000, 2.0, 13000, 2.0]},
- {"lux": 1500, "prior": [2000, 0.0, 4000, 1.0, 6000, 6.0, 6500, 7.0, 7000, 1.0, 13000, 1.0]}
- ],
- "modes": {
- "auto": {"lo": 2500, "hi": 8000},
- "incandescent": {"lo": 2500, "hi": 3000},
- "tungsten": {"lo": 3000, "hi": 3500},
- "fluorescent": {"lo": 4000, "hi": 4700},
- "indoor": {"lo": 3000, "hi": 5000},
- "daylight": {"lo": 5500, "hi": 6500},
- "cloudy": {"lo": 7000, "hi": 8600}
- },
- "bayes": 1
- },
- "rpi.agc": {
- "metering_modes": {
- "centre-weighted": {
- "weights": [3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0]
- },
- "spot": {
- "weights": [2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
- },
- "matrix": {
- "weights": [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
- }
- },
- "exposure_modes": {
- "normal": {
- "shutter": [100, 10000, 30000, 60000, 120000],
- "gain": [1.0, 2.0, 4.0, 6.0, 6.0]
- },
- "short": {
- "shutter": [100, 5000, 10000, 20000, 120000],
- "gain": [1.0, 2.0, 4.0, 6.0, 6.0]
- }
- },
- "constraint_modes": {
- "normal": [
- {"bound": "LOWER", "q_lo": 0.98, "q_hi": 1.0, "y_target": [0, 0.5, 1000, 0.5]}
- ],
- "highlight": [
- {"bound": "LOWER", "q_lo": 0.98, "q_hi": 1.0, "y_target": [0, 0.5, 1000, 0.5]},
- {"bound": "UPPER", "q_lo": 0.98, "q_hi": 1.0, "y_target": [0, 0.8, 1000, 0.8]}
- ]
- },
- "y_target": [0, 0.16, 1000, 0.165, 10000, 0.17]
- },
- "rpi.alsc": {
- 'omega': 1.3,
- 'n_iter': 100,
- 'luminance_strength': 0.7,
- },
- "rpi.contrast": {
- "ce_enable": 1,
- "gamma_curve": [
- 0, 0,
- 1024, 5040,
- 2048, 9338,
- 3072, 12356,
- 4096, 15312,
- 5120, 18051,
- 6144, 20790,
- 7168, 23193,
- 8192, 25744,
- 9216, 27942,
- 10240, 30035,
- 11264, 32005,
- 12288, 33975,
- 13312, 35815,
- 14336, 37600,
- 15360, 39168,
- 16384, 40642,
- 18432, 43379,
- 20480, 45749,
- 22528, 47753,
- 24576, 49621,
- 26624, 51253,
- 28672, 52698,
- 30720, 53796,
- 32768, 54876,
- 36864, 57012,
- 40960, 58656,
- 45056, 59954,
- 49152, 61183,
- 53248, 62355,
- 57344, 63419,
- 61440, 64476,
- 65535, 65535
- ]
- },
- "rpi.ccm": {
- },
- "rpi.sharpen": {
- }
- }
+ self.json = json
"""
Perform colour correction calibrations by comparing macbeth patch colours
to standard macbeth chart colours.
"""
- def ccm_cal(self, do_alsc_colour):
+ def ccm_cal(self, do_alsc_colour, grid_size):
if 'rpi.ccm' in self.disable:
return 1
print('\nStarting CCM calibration')
@@ -245,7 +134,7 @@ class Camera:
Do CCM calibration
"""
try:
- ccms = ccm(self, cal_cr_list, cal_cb_list)
+ ccms = ccm(self, cal_cr_list, cal_cb_list, grid_size)
except ArithmeticError:
print('ERROR: Matrix is singular!\nTake new pictures and try again...')
self.log += '\nERROR: Singular matrix encountered during fit!'
@@ -259,11 +148,70 @@ class Camera:
print('Finished CCM calibration')
"""
+ Perform chromatic abberation correction using multiple dots images.
+ """
+ def cac_cal(self, do_alsc_colour):
+ if 'rpi.cac' in self.disable:
+ return 1
+ print('\nStarting CAC calibration')
+ self.log_new_sec('CAC')
+ """
+ check if cac images have been taken
+ """
+ if len(self.imgs_cac) == 0:
+ print('\nError:\nNo cac calibration images found')
+ self.log += '\nERROR: No CAC calibration images found!'
+ self.log += '\nCAC calibration aborted!'
+ return 1
+ """
+ if image is greyscale then CAC makes no sense
+ """
+ if self.grey:
+ print('\nERROR: Can\'t do CAC on greyscale image!')
+ self.log += '\nERROR: Cannot perform CAC calibration '
+ self.log += 'on greyscale image!\nCAC aborted!'
+ del self.json['rpi.cac']
+ return 0
+ a = time.time()
+ """
+ Check if camera is greyscale or color. If not greyscale, then perform cac
+ """
+ if do_alsc_colour:
+ """
+ Here we have a color sensor. Perform cac
+ """
+ try:
+ cacs = cac(self)
+ except ArithmeticError:
+ print('ERROR: Matrix is singular!\nTake new pictures and try again...')
+ self.log += '\nERROR: Singular matrix encountered during fit!'
+ self.log += '\nCAC aborted!'
+ return 1
+ else:
+ """
+ case where config options suggest greyscale camera. No point in doing CAC
+ """
+ cal_cr_list, cal_cb_list = None, None
+ self.log += '\nWARNING: No ALSC tables found.\nCAC calibration '
+ self.log += 'performed without ALSC correction...'
+
+ """
+ Write output to json
+ """
+ if cacs:
+ self.json['rpi.cac']['cac'] = cacs
+ self.log += '\nCAC calibration written to json file'
+ print('Finished CAC calibration')
+ else:
+ self.log += "\nCAC calibration failed"
+
+
+ """
Auto white balance calibration produces a colour curve for
various colour temperatures, as well as providing a maximum 'wiggle room'
distance from this curve (transverse_neg/pos).
"""
- def awb_cal(self, greyworld, do_alsc_colour):
+ def awb_cal(self, greyworld, do_alsc_colour, grid_size):
if 'rpi.awb' in self.disable:
return 1
print('\nStarting AWB calibration')
@@ -306,7 +254,7 @@ class Camera:
call calibration function
"""
plot = "rpi.awb" in self.plot
- awb_out = awb(self, cal_cr_list, cal_cb_list, plot)
+ awb_out = awb(self, cal_cr_list, cal_cb_list, plot, grid_size)
ct_curve, transverse_neg, transverse_pos = awb_out
"""
write output to json
@@ -324,7 +272,7 @@ class Camera:
colour channel seperately, and then partially corrects for vignetting.
The extent of the correction depends on the 'luminance_strength' parameter.
"""
- def alsc_cal(self, luminance_strength, do_alsc_colour):
+ def alsc_cal(self, luminance_strength, do_alsc_colour, grid_size, max_gain=8.0):
if 'rpi.alsc' in self.disable:
return 1
print('\nStarting ALSC calibration')
@@ -347,10 +295,10 @@ class Camera:
call calibration function
"""
plot = "rpi.alsc" in self.plot
- alsc_out = alsc_all(self, do_alsc_colour, plot)
+ alsc_out = alsc_all(self, do_alsc_colour, plot, grid_size, max_gain=max_gain)
cal_cr_list, cal_cb_list, luminance_lut, av_corn = alsc_out
"""
- write ouput to json and finish if not do_alsc_colour
+ write output to json and finish if not do_alsc_colour
"""
if not do_alsc_colour:
self.json['rpi.alsc']['luminance_lut'] = luminance_lut
@@ -393,7 +341,7 @@ class Camera:
"""
obtain worst-case scenario residual sigmas
"""
- sigma_r, sigma_b = get_sigma(self, cal_cr_list, cal_cb_list)
+ sigma_r, sigma_b = get_sigma(self, cal_cr_list, cal_cb_list, grid_size)
"""
write output to json
"""
@@ -509,19 +457,20 @@ class Camera:
"""
writes the json dictionary to the raw json file then make pretty
"""
- def write_json(self):
+ def write_json(self, version=2.0, target='bcm2835', grid_size=(16, 12)):
"""
Write json dictionary to file using our version 2 format
"""
out_json = {
- "version": 2.0,
- 'target': 'bcm2835',
+ "version": version,
+ 'target': target if target != 'vc4' else 'bcm2835',
"algorithms": [{name: data} for name, data in self.json.items()],
}
with open(self.jf, 'w') as f:
- f.write(pretty_print(out_json))
+ f.write(pretty_print(out_json,
+ custom_elems={'table': grid_size[0], 'luminance_lut': grid_size[0]}))
"""
add a new section to the log file
@@ -627,6 +576,16 @@ class Camera:
self.log += '\nWARNING: Error reading colour temperature'
self.log += '\nImage discarded!'
print('DISCARDED')
+ elif 'cac' in filename:
+ Img = load_image(self, address, mac=False)
+ self.log += '\nIdentified as an CAC image'
+ Img.name = filename
+ self.log += '\nColour temperature: {} K'.format(col)
+ self.imgs_cac.append(Img)
+ if blacklevel != -1:
+ Img.blacklevel_16 = blacklevel
+ print(img_suc_msg)
+ continue
else:
self.log += '\nIdentified as macbeth chart image'
"""
@@ -672,6 +631,7 @@ class Camera:
self.log += '\n\nImages found:'
self.log += '\nMacbeth : {}'.format(len(self.imgs))
self.log += '\nALSC : {} '.format(len(self.imgs_alsc))
+ self.log += '\nCAC: {} '.format(len(self.imgs_cac))
self.log += '\n\nCamera metadata'
"""
check usable images found
@@ -680,22 +640,21 @@ class Camera:
print('\nERROR: No usable macbeth chart images found')
self.log += '\nERROR: No usable macbeth chart images found'
return 0
- elif len(self.imgs) == 0 and len(self.imgs_alsc) == 0:
+ elif len(self.imgs) == 0 and len(self.imgs_alsc) == 0 and len(self.imgs_cac) == 0:
print('\nERROR: No usable images found')
self.log += '\nERROR: No usable images found'
return 0
"""
Double check that every image has come from the same camera...
"""
- all_imgs = self.imgs + self.imgs_alsc
+ all_imgs = self.imgs + self.imgs_alsc + self.imgs_cac
camNames = list(set([Img.camName for Img in all_imgs]))
patterns = list(set([Img.pattern for Img in all_imgs]))
sigbitss = list(set([Img.sigbits for Img in all_imgs]))
blacklevels = list(set([Img.blacklevel_16 for Img in all_imgs]))
sizes = list(set([(Img.w, Img.h) for Img in all_imgs]))
- if len(camNames) == 1 and len(patterns) == 1 and len(sigbitss) == 1 and \
- len(blacklevels) == 1 and len(sizes) == 1:
+ if 1:
self.grey = (patterns[0] == 128)
self.blacklevel_16 = blacklevels[0]
self.log += '\nName: {}'.format(camNames[0])
@@ -712,7 +671,7 @@ class Camera:
return 0
-def run_ctt(json_output, directory, config, log_output, alsc_only=False):
+def run_ctt(json_output, directory, config, log_output, json_template, grid_size, target, alsc_only=False):
"""
check input files are jsons
"""
@@ -748,12 +707,14 @@ def run_ctt(json_output, directory, config, log_output, alsc_only=False):
greyworld = get_config(awb_d, "greyworld", 0, 'bool')
alsc_d = get_config(configs, "alsc", {}, 'dict')
do_alsc_colour = get_config(alsc_d, "do_alsc_colour", 1, 'bool')
- luminance_strength = get_config(alsc_d, "luminance_strength", 0.5, 'num')
+ luminance_strength = get_config(alsc_d, "luminance_strength", 0.8, 'num')
+ lsc_max_gain = get_config(alsc_d, "max_gain", 8.0, 'num')
blacklevel = get_config(configs, "blacklevel", -1, 'num')
macbeth_d = get_config(configs, "macbeth", {}, 'dict')
mac_small = get_config(macbeth_d, "small", 0, 'bool')
mac_show = get_config(macbeth_d, "show", 0, 'bool')
mac_config = (mac_small, mac_show)
+ print("Read lsc_max_gain", lsc_max_gain)
if blacklevel < -1 or blacklevel >= 2**16:
print('\nInvalid blacklevel, defaulted to 64')
@@ -772,7 +733,7 @@ def run_ctt(json_output, directory, config, log_output, alsc_only=False):
initialise tuning tool and load images
"""
try:
- Cam = Camera(json_output)
+ Cam = Camera(json_output, json=json_template)
Cam.log_user_input(json_output, directory, config, log_output)
if alsc_only:
disable = set(Cam.json.keys()).symmetric_difference({"rpi.alsc"})
@@ -794,14 +755,17 @@ def run_ctt(json_output, directory, config, log_output, alsc_only=False):
Cam.json['rpi.black_level']['black_level'] = Cam.blacklevel_16
Cam.json_remove(disable)
print('\nSTARTING CALIBRATIONS')
- Cam.alsc_cal(luminance_strength, do_alsc_colour)
+ Cam.alsc_cal(luminance_strength, do_alsc_colour, grid_size, max_gain=lsc_max_gain)
Cam.geq_cal()
Cam.lux_cal()
Cam.noise_cal()
- Cam.awb_cal(greyworld, do_alsc_colour)
- Cam.ccm_cal(do_alsc_colour)
+ if "rpi.cac" in json_template:
+ Cam.cac_cal(do_alsc_colour)
+ Cam.awb_cal(greyworld, do_alsc_colour, grid_size)
+ Cam.ccm_cal(do_alsc_colour, grid_size)
+
print('\nFINISHED CALIBRATIONS')
- Cam.write_json()
+ Cam.write_json(target=target, grid_size=grid_size)
Cam.write_log(log_output)
print('\nCalibrations written to: '+json_output)
if log_output is None:
@@ -811,20 +775,19 @@ def run_ctt(json_output, directory, config, log_output, alsc_only=False):
else:
Cam.write_log(log_output)
-
if __name__ == '__main__':
"""
initialise calibration
"""
if len(sys.argv) == 1:
print("""
- Pisp Camera Tuning Tool version 1.0
-
+ PiSP Tuning Tool version 1.0
Required Arguments:
'-i' : Calibration image directory.
'-o' : Name of output json file.
Optional Arguments:
+ '-t' : Target platform - 'pisp' or 'vc4'. Default 'vc4'
'-c' : Config file for the CTT. If not passed, default parameters used.
'-l' : Name of output log file. If not passed, 'ctt_log.txt' used.
""")
@@ -833,5 +796,10 @@ if __name__ == '__main__':
"""
parse input arguments
"""
- json_output, directory, config, log_output = parse_input()
- run_ctt(json_output, directory, config, log_output)
+ json_output, directory, config, log_output, target = parse_input()
+ if target == 'pisp':
+ from ctt_pisp import json_template, grid_size
+ elif target == 'vc4':
+ from ctt_vc4 import json_template, grid_size
+
+ run_ctt(json_output, directory, config, log_output, json_template, grid_size, target)
diff --git a/utils/raspberrypi/ctt/ctt_alsc.py b/utils/raspberrypi/ctt/ctt_alsc.py
index e51d6931..5d8b2ced 100644
--- a/utils/raspberrypi/ctt/ctt_alsc.py
+++ b/utils/raspberrypi/ctt/ctt_alsc.py
@@ -2,7 +2,7 @@
#
# Copyright (C) 2019, Raspberry Pi Ltd
#
-# ctt_alsc.py - camera tuning tool for ALSC (auto lens shading correction)
+# camera tuning tool for ALSC (auto lens shading correction)
from ctt_image_load import *
import matplotlib.pyplot as plt
@@ -13,8 +13,9 @@ from mpl_toolkits.mplot3d import Axes3D
"""
preform alsc calibration on a set of images
"""
-def alsc_all(Cam, do_alsc_colour, plot):
+def alsc_all(Cam, do_alsc_colour, plot, grid_size=(16, 12), max_gain=8.0):
imgs_alsc = Cam.imgs_alsc
+ grid_w, grid_h = grid_size
"""
create list of colour temperatures and associated calibration tables
"""
@@ -23,7 +24,7 @@ def alsc_all(Cam, do_alsc_colour, plot):
list_cb = []
list_cg = []
for Img in imgs_alsc:
- col, cr, cb, cg, size = alsc(Cam, Img, do_alsc_colour, plot)
+ col, cr, cb, cg, size = alsc(Cam, Img, do_alsc_colour, plot, grid_size=grid_size, max_gain=max_gain)
list_col.append(col)
list_cr.append(cr)
list_cb.append(cb)
@@ -68,11 +69,12 @@ def alsc_all(Cam, do_alsc_colour, plot):
t_b = np.where((100*t_b) % 1 >= 0.95, t_b-0.001, t_b)
t_r = np.round(t_r, 3)
t_b = np.round(t_b, 3)
- r_corners = (t_r[0], t_r[15], t_r[-1], t_r[-16])
- b_corners = (t_b[0], t_b[15], t_b[-1], t_b[-16])
- r_cen = t_r[5*16+7]+t_r[5*16+8]+t_r[6*16+7]+t_r[6*16+8]
+ r_corners = (t_r[0], t_r[grid_w - 1], t_r[-1], t_r[-grid_w])
+ b_corners = (t_b[0], t_b[grid_w - 1], t_b[-1], t_b[-grid_w])
+ middle_pos = (grid_h // 2 - 1) * grid_w + grid_w - 1
+ r_cen = t_r[middle_pos]+t_r[middle_pos + 1]+t_r[middle_pos + grid_w]+t_r[middle_pos + grid_w + 1]
r_cen = round(r_cen/4, 3)
- b_cen = t_b[5*16+7]+t_b[5*16+8]+t_b[6*16+7]+t_b[6*16+8]
+ b_cen = t_b[middle_pos]+t_b[middle_pos + 1]+t_b[middle_pos + grid_w]+t_b[middle_pos + grid_w + 1]
b_cen = round(b_cen/4, 3)
Cam.log += '\nRed table corners: {}'.format(r_corners)
Cam.log += '\nRed table centre: {}'.format(r_cen)
@@ -116,43 +118,48 @@ def alsc_all(Cam, do_alsc_colour, plot):
"""
calculate g/r and g/b for 32x32 points arranged in a grid for a single image
"""
-def alsc(Cam, Img, do_alsc_colour, plot=False):
+def alsc(Cam, Img, do_alsc_colour, plot=False, grid_size=(16, 12), max_gain=8.0):
Cam.log += '\nProcessing image: ' + Img.name
+ grid_w, grid_h = grid_size
"""
get channel in correct order
"""
channels = [Img.channels[i] for i in Img.order]
"""
calculate size of single rectangle.
- -(-(w-1)//32) is a ceiling division. w-1 is to deal robustly with the case
- where w is a multiple of 32.
+ The divisions here must ensure the final row/column of cells has a non-zero number of
+ pixels.
"""
w, h = Img.w/2, Img.h/2
- dx, dy = int(-(-(w-1)//16)), int(-(-(h-1)//12))
+ dx, dy = int((w - 1) // (grid_w - 1)), int((h - 1) // (grid_h - 1))
+
"""
average the green channels into one
"""
av_ch_g = np.mean((channels[1:3]), axis=0)
if do_alsc_colour:
"""
- obtain 16x12 grid of intensities for each channel and subtract black level
+ obtain grid_w x grid_h grid of intensities for each channel and subtract black level
"""
- g = get_16x12_grid(av_ch_g, dx, dy) - Img.blacklevel_16
- r = get_16x12_grid(channels[0], dx, dy) - Img.blacklevel_16
- b = get_16x12_grid(channels[3], dx, dy) - Img.blacklevel_16
+ g = get_grid(av_ch_g, dx, dy, grid_size) - Img.blacklevel_16
+ r = get_grid(channels[0], dx, dy, grid_size) - Img.blacklevel_16
+ b = get_grid(channels[3], dx, dy, grid_size) - Img.blacklevel_16
"""
calculate ratios as 32 bit in order to be supported by medianBlur function
"""
- cr = np.reshape(g/r, (12, 16)).astype('float32')
- cb = np.reshape(g/b, (12, 16)).astype('float32')
- cg = np.reshape(1/g, (12, 16)).astype('float32')
+ cr = np.reshape(g/r, (grid_h, grid_w)).astype('float32')
+ cb = np.reshape(g/b, (grid_h, grid_w)).astype('float32')
+ cg = np.reshape(1/g, (grid_h, grid_w)).astype('float32')
"""
median blur to remove peaks and save as float 64
"""
cr = cv2.medianBlur(cr, 3).astype('float64')
+ cr = cr/np.min(cr) # gain tables are easier for humans to read if the minimum is 1.0
cb = cv2.medianBlur(cb, 3).astype('float64')
+ cb = cb/np.min(cb)
cg = cv2.medianBlur(cg, 3).astype('float64')
cg = cg/np.min(cg)
+ cg = [min(v, max_gain) for v in cg.flatten()] # never exceed the max luminance gain
"""
debugging code showing 2D surface plot of vignetting. Quite useful for
@@ -164,7 +171,7 @@ def alsc(Cam, Img, do_alsc_colour, plot=False):
"""
note Y is plotted as -Y so plot has same axes as image
"""
- X, Y = np.meshgrid(range(16), range(12))
+ X, Y = np.meshgrid(range(grid_w), range(grid_h))
ha.plot_surface(X, -Y, cr, cmap=cm.coolwarm, linewidth=0)
ha.set_title('ALSC Plot\nImg: {}\n\ncr'.format(Img.str))
hb = hf.add_subplot(312, projection='3d')
@@ -176,21 +183,22 @@ def alsc(Cam, Img, do_alsc_colour, plot=False):
# print(Img.str)
plt.show()
- return Img.col, cr.flatten(), cb.flatten(), cg.flatten(), (w, h, dx, dy)
+ return Img.col, cr.flatten(), cb.flatten(), cg, (w, h, dx, dy)
else:
"""
only perform calculations for luminance shading
"""
- g = get_16x12_grid(av_ch_g, dx, dy) - Img.blacklevel_16
- cg = np.reshape(1/g, (12, 16)).astype('float32')
+ g = get_grid(av_ch_g, dx, dy, grid_size) - Img.blacklevel_16
+ cg = np.reshape(1/g, (grid_h, grid_w)).astype('float32')
cg = cv2.medianBlur(cg, 3).astype('float64')
cg = cg/np.min(cg)
+ cg = [min(v, max_gain) for v in cg.flatten()] # never exceed the max luminance gain
if plot:
hf = plt.figure(figssize=(8, 8))
ha = hf.add_subplot(1, 1, 1, projection='3d')
- X, Y = np.meashgrid(range(16), range(12))
+ X, Y = np.meashgrid(range(grid_w), range(grid_h))
ha.plot_surface(X, -Y, cg, cmap=cm.coolwarm, linewidth=0)
ha.set_title('ALSC Plot (Luminance only!)\nImg: {}\n\ncg').format(Img.str)
plt.show()
@@ -199,21 +207,22 @@ def alsc(Cam, Img, do_alsc_colour, plot=False):
"""
-Compresses channel down to a 16x12 grid
+Compresses channel down to a grid of the requested size
"""
-def get_16x12_grid(chan, dx, dy):
+def get_grid(chan, dx, dy, grid_size):
+ grid_w, grid_h = grid_size
grid = []
"""
since left and bottom border will not necessarily have rectangles of
dimension dx x dy, the 32nd iteration has to be handled separately.
"""
- for i in range(11):
- for j in range(15):
+ for i in range(grid_h - 1):
+ for j in range(grid_w - 1):
grid.append(np.mean(chan[dy*i:dy*(1+i), dx*j:dx*(1+j)]))
- grid.append(np.mean(chan[dy*i:dy*(1+i), 15*dx:]))
- for j in range(15):
- grid.append(np.mean(chan[11*dy:, dx*j:dx*(1+j)]))
- grid.append(np.mean(chan[11*dy:, 15*dx:]))
+ grid.append(np.mean(chan[dy*i:dy*(1+i), (grid_w - 1)*dx:]))
+ for j in range(grid_w - 1):
+ grid.append(np.mean(chan[(grid_h - 1)*dy:, dx*j:dx*(1+j)]))
+ grid.append(np.mean(chan[(grid_h - 1)*dy:, (grid_w - 1)*dx:]))
"""
return as np.array, ready for further manipulation
"""
@@ -223,7 +232,7 @@ def get_16x12_grid(chan, dx, dy):
"""
obtains sigmas for red and blue, effectively a measure of the 'error'
"""
-def get_sigma(Cam, cal_cr_list, cal_cb_list):
+def get_sigma(Cam, cal_cr_list, cal_cb_list, grid_size):
Cam.log += '\nCalculating sigmas'
"""
provided colour alsc tables were generated for two different colour
@@ -241,8 +250,8 @@ def get_sigma(Cam, cal_cr_list, cal_cb_list):
sigma_rs = []
sigma_bs = []
for i in range(len(cts)-1):
- sigma_rs.append(calc_sigma(cal_cr_list[i]['table'], cal_cr_list[i+1]['table']))
- sigma_bs.append(calc_sigma(cal_cb_list[i]['table'], cal_cb_list[i+1]['table']))
+ sigma_rs.append(calc_sigma(cal_cr_list[i]['table'], cal_cr_list[i+1]['table'], grid_size))
+ sigma_bs.append(calc_sigma(cal_cb_list[i]['table'], cal_cb_list[i+1]['table'], grid_size))
Cam.log += '\nColour temperature interval {} - {} K'.format(cts[i], cts[i+1])
Cam.log += '\nSigma red: {}'.format(sigma_rs[-1])
Cam.log += '\nSigma blue: {}'.format(sigma_bs[-1])
@@ -263,12 +272,13 @@ def get_sigma(Cam, cal_cr_list, cal_cb_list):
"""
calculate sigma from two adjacent gain tables
"""
-def calc_sigma(g1, g2):
+def calc_sigma(g1, g2, grid_size):
+ grid_w, grid_h = grid_size
"""
reshape into 16x12 matrix
"""
- g1 = np.reshape(g1, (12, 16))
- g2 = np.reshape(g2, (12, 16))
+ g1 = np.reshape(g1, (grid_h, grid_w))
+ g2 = np.reshape(g2, (grid_h, grid_w))
"""
apply gains to gain table
"""
@@ -280,8 +290,8 @@ def calc_sigma(g1, g2):
neighbours, then append to list
"""
diffs = []
- for i in range(10):
- for j in range(14):
+ for i in range(grid_h - 2):
+ for j in range(grid_w - 2):
"""
note indexing is incremented by 1 since all patches on borders are
not counted
diff --git a/utils/raspberrypi/ctt/ctt_awb.py b/utils/raspberrypi/ctt/ctt_awb.py
index bf45e54d..4af1fe41 100644
--- a/utils/raspberrypi/ctt/ctt_awb.py
+++ b/utils/raspberrypi/ctt/ctt_awb.py
@@ -2,7 +2,7 @@
#
# Copyright (C) 2019, Raspberry Pi Ltd
#
-# ctt_awb.py - camera tuning tool for AWB
+# camera tuning tool for AWB
from ctt_image_load import *
import matplotlib.pyplot as plt
@@ -13,7 +13,7 @@ from scipy.optimize import fmin
"""
obtain piecewise linear approximation for colour curve
"""
-def awb(Cam, cal_cr_list, cal_cb_list, plot):
+def awb(Cam, cal_cr_list, cal_cb_list, plot, grid_size):
imgs = Cam.imgs
"""
condense alsc calibration tables into one dictionary
@@ -43,7 +43,7 @@ def awb(Cam, cal_cr_list, cal_cb_list, plot):
Note: if alsc is disabled then colour_cals will be set to None and the
function will just return the greyscale patches
"""
- r_patchs, b_patchs, g_patchs = get_alsc_patches(Img, colour_cals)
+ r_patchs, b_patchs, g_patchs = get_alsc_patches(Img, colour_cals, grid_size=grid_size)
"""
calculate ratio of r, b to g
"""
@@ -293,12 +293,13 @@ def awb(Cam, cal_cr_list, cal_cb_list, plot):
"""
obtain greyscale patches and perform alsc colour correction
"""
-def get_alsc_patches(Img, colour_cals, grey=True):
+def get_alsc_patches(Img, colour_cals, grey=True, grid_size=(16, 12)):
"""
get patch centre coordinates, image colour and the actual
patches for each channel, remembering to subtract blacklevel
If grey then only greyscale patches considered
"""
+ grid_w, grid_h = grid_size
if grey:
cen_coords = Img.cen_coords[3::4]
col = Img.col
@@ -345,12 +346,12 @@ def get_alsc_patches(Img, colour_cals, grey=True):
bef_tabs = np.array(colour_cals[bef])
aft_tabs = np.array(colour_cals[aft])
col_tabs = (bef_tabs*db + aft_tabs*da)/(da+db)
- col_tabs = np.reshape(col_tabs, (2, 12, 16))
+ col_tabs = np.reshape(col_tabs, (2, grid_h, grid_w))
"""
calculate dx, dy used to calculate alsc table
"""
w, h = Img.w/2, Img.h/2
- dx, dy = int(-(-(w-1)//16)), int(-(-(h-1)//12))
+ dx, dy = int(-(-(w-1)//grid_w)), int(-(-(h-1)//grid_h))
"""
make list of pairs of gains for each patch by selecting the correct value
in alsc colour calibration table
diff --git a/utils/raspberrypi/ctt/ctt_cac.py b/utils/raspberrypi/ctt/ctt_cac.py
new file mode 100644
index 00000000..a1183989
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_cac.py
@@ -0,0 +1,250 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2023, Raspberry Pi Ltd
+#
+# ctt_cac.py - CAC (Chromatic Aberration Correction) tuning tool
+
+from PIL import Image
+import numpy as np
+import matplotlib.pyplot as plt
+from matplotlib import cm
+
+from ctt_dots_locator import find_dots_locations
+
+
+# This is the wrapper file that creates a JSON entry for you to append
+# to your camera tuning file.
+# It calculates the chromatic aberration at different points throughout
+# the image and uses that to produce a martix that can then be used
+# in the camera tuning files to correct this aberration.
+
+
+def pprint_array(array):
+ # Function to print the array in a tidier format
+ array = array
+ output = ""
+ for i in range(len(array)):
+ for j in range(len(array[0])):
+ output += str(round(array[i, j], 2)) + ", "
+ # Add the necessary indentation to the array
+ output += "\n "
+ # Cut off the end of the array (nicely formats it)
+ return output[:-22]
+
+
+def plot_shifts(red_shifts, blue_shifts):
+ # If users want, they can pass a command line option to show the shifts on a graph
+ # Can be useful to check that the functions are all working, and that the sample
+ # images are doing the right thing
+ Xs = np.array(red_shifts)[:, 0]
+ Ys = np.array(red_shifts)[:, 1]
+ Zs = np.array(red_shifts)[:, 2]
+ Zs2 = np.array(red_shifts)[:, 3]
+ Zs3 = np.array(blue_shifts)[:, 2]
+ Zs4 = np.array(blue_shifts)[:, 3]
+
+ fig, axs = plt.subplots(2, 2)
+ ax = fig.add_subplot(2, 2, 1, projection='3d')
+ ax.scatter(Xs, Ys, Zs, cmap=cm.jet, linewidth=0)
+ ax.set_title('Red X Shift')
+ ax = fig.add_subplot(2, 2, 2, projection='3d')
+ ax.scatter(Xs, Ys, Zs2, cmap=cm.jet, linewidth=0)
+ ax.set_title('Red Y Shift')
+ ax = fig.add_subplot(2, 2, 3, projection='3d')
+ ax.scatter(Xs, Ys, Zs3, cmap=cm.jet, linewidth=0)
+ ax.set_title('Blue X Shift')
+ ax = fig.add_subplot(2, 2, 4, projection='3d')
+ ax.scatter(Xs, Ys, Zs4, cmap=cm.jet, linewidth=0)
+ ax.set_title('Blue Y Shift')
+ fig.tight_layout()
+ plt.show()
+
+
+def shifts_to_yaml(red_shift, blue_shift, image_dimensions, output_grid_size=9):
+ # Convert the shifts to a numpy array for easier handling and initialise other variables
+ red_shifts = np.array(red_shift)
+ blue_shifts = np.array(blue_shift)
+ # create a grid that's smaller than the output grid, which we then interpolate from to get the output values
+ xrgrid = np.zeros((output_grid_size - 1, output_grid_size - 1))
+ xbgrid = np.zeros((output_grid_size - 1, output_grid_size - 1))
+ yrgrid = np.zeros((output_grid_size - 1, output_grid_size - 1))
+ ybgrid = np.zeros((output_grid_size - 1, output_grid_size - 1))
+
+ xrsgrid = []
+ xbsgrid = []
+ yrsgrid = []
+ ybsgrid = []
+ xg = np.zeros((output_grid_size - 1, output_grid_size - 1))
+ yg = np.zeros((output_grid_size - 1, output_grid_size - 1))
+
+ # Format the grids - numpy doesn't work for this, it wants a
+ # nice uniformly spaced grid, which we don't know if we have yet, hence the rather mundane setup
+ for x in range(output_grid_size - 1):
+ xrsgrid.append([])
+ yrsgrid.append([])
+ xbsgrid.append([])
+ ybsgrid.append([])
+ for y in range(output_grid_size - 1):
+ xrsgrid[x].append([])
+ yrsgrid[x].append([])
+ xbsgrid[x].append([])
+ ybsgrid[x].append([])
+
+ image_size = (image_dimensions[0], image_dimensions[1])
+ gridxsize = image_size[0] / (output_grid_size - 1)
+ gridysize = image_size[1] / (output_grid_size - 1)
+
+ # Iterate through each dot, and it's shift values and put these into the correct grid location
+ for red_shift in red_shifts:
+ xgridloc = int(red_shift[0] / gridxsize)
+ ygridloc = int(red_shift[1] / gridysize)
+ xrsgrid[xgridloc][ygridloc].append(red_shift[2])
+ yrsgrid[xgridloc][ygridloc].append(red_shift[3])
+
+ for blue_shift in blue_shifts:
+ xgridloc = int(blue_shift[0] / gridxsize)
+ ygridloc = int(blue_shift[1] / gridysize)
+ xbsgrid[xgridloc][ygridloc].append(blue_shift[2])
+ ybsgrid[xgridloc][ygridloc].append(blue_shift[3])
+
+ # Now calculate the average pixel shift for each square in the grid
+ grid_incomplete = False
+ for x in range(output_grid_size - 1):
+ for y in range(output_grid_size - 1):
+ if xrsgrid[x][y]:
+ xrgrid[x, y] = np.mean(xrsgrid[x][y])
+ else:
+ grid_incomplete = True
+ if yrsgrid[x][y]:
+ yrgrid[x, y] = np.mean(yrsgrid[x][y])
+ else:
+ grid_incomplete = True
+ if xbsgrid[x][y]:
+ xbgrid[x, y] = np.mean(xbsgrid[x][y])
+ else:
+ grid_incomplete = True
+ if ybsgrid[x][y]:
+ ybgrid[x, y] = np.mean(ybsgrid[x][y])
+ else:
+ grid_incomplete = True
+
+ if grid_incomplete:
+ raise RuntimeError("\nERROR: CAC measurements do not span the image!"
+ "\nConsider using improved CAC images, or remove them entirely.\n")
+
+ # Next, we start to interpolate the central points of the grid that gets passed to the tuning file
+ input_grids = np.array([xrgrid, yrgrid, xbgrid, ybgrid])
+ output_grids = np.zeros((4, output_grid_size, output_grid_size))
+
+ # Interpolate the centre of the grid
+ output_grids[:, 1:-1, 1:-1] = (input_grids[:, 1:, :-1] + input_grids[:, 1:, 1:] + input_grids[:, :-1, 1:] + input_grids[:, :-1, :-1]) / 4
+
+ # Edge cases:
+ output_grids[:, 1:-1, 0] = ((input_grids[:, :-1, 0] + input_grids[:, 1:, 0]) / 2 - output_grids[:, 1:-1, 1]) * 2 + output_grids[:, 1:-1, 1]
+ output_grids[:, 1:-1, -1] = ((input_grids[:, :-1, 7] + input_grids[:, 1:, 7]) / 2 - output_grids[:, 1:-1, -2]) * 2 + output_grids[:, 1:-1, -2]
+ output_grids[:, 0, 1:-1] = ((input_grids[:, 0, :-1] + input_grids[:, 0, 1:]) / 2 - output_grids[:, 1, 1:-1]) * 2 + output_grids[:, 1, 1:-1]
+ output_grids[:, -1, 1:-1] = ((input_grids[:, 7, :-1] + input_grids[:, 7, 1:]) / 2 - output_grids[:, -2, 1:-1]) * 2 + output_grids[:, -2, 1:-1]
+
+ # Corner Cases:
+ output_grids[:, 0, 0] = (output_grids[:, 0, 1] - output_grids[:, 1, 1]) + (output_grids[:, 1, 0] - output_grids[:, 1, 1]) + output_grids[:, 1, 1]
+ output_grids[:, 0, -1] = (output_grids[:, 0, -2] - output_grids[:, 1, -2]) + (output_grids[:, 1, -1] - output_grids[:, 1, -2]) + output_grids[:, 1, -2]
+ output_grids[:, -1, 0] = (output_grids[:, -1, 1] - output_grids[:, -2, 1]) + (output_grids[:, -2, 0] - output_grids[:, -2, 1]) + output_grids[:, -2, 1]
+ output_grids[:, -1, -1] = (output_grids[:, -2, -1] - output_grids[:, -2, -2]) + (output_grids[:, -1, -2] - output_grids[:, -2, -2]) + output_grids[:, -2, -2]
+
+ # Below, we swap the x and the y coordinates, and also multiply by a factor of -1
+ # This is due to the PiSP (standard) dimensions being flipped in comparison to
+ # PIL image coordinate directions, hence why xr -> yr. Also, the shifts calculated are colour shifts,
+ # and the PiSP block asks for the values it should shift by (hence the * -1, to convert from colour shift to a pixel shift)
+
+ output_grid_yr, output_grid_xr, output_grid_yb, output_grid_xb = output_grids * -1
+ return output_grid_xr, output_grid_yr, output_grid_xb, output_grid_yb
+
+
+def analyse_dot(dot, dot_location=[0, 0]):
+ # Scan through the dot, calculate the centroid of each colour channel by doing:
+ # pixel channel brightness * distance from top left corner
+ # Sum these, and divide by the sum of each channel's brightnesses to get a centroid for each channel
+ red_channel = np.array(dot)[:, :, 0]
+ y_num_pixels = len(red_channel[0])
+ x_num_pixels = len(red_channel)
+ yred_weight = np.sum(np.dot(red_channel, np.arange(y_num_pixels)))
+ xred_weight = np.sum(np.dot(np.arange(x_num_pixels), red_channel))
+ red_sum = np.sum(red_channel)
+
+ green_channel = np.array(dot)[:, :, 1]
+ ygreen_weight = np.sum(np.dot(green_channel, np.arange(y_num_pixels)))
+ xgreen_weight = np.sum(np.dot(np.arange(x_num_pixels), green_channel))
+ green_sum = np.sum(green_channel)
+
+ blue_channel = np.array(dot)[:, :, 2]
+ yblue_weight = np.sum(np.dot(blue_channel, np.arange(y_num_pixels)))
+ xblue_weight = np.sum(np.dot(np.arange(x_num_pixels), blue_channel))
+ blue_sum = np.sum(blue_channel)
+
+ # We return this structure. It contains 2 arrays that contain:
+ # the locations of the dot center, along with the channel shifts in the x and y direction:
+ # [ [red_center_x, red_center_y, red_x_shift, red_y_shift], [blue_center_x, blue_center_y, blue_x_shift, blue_y_shift] ]
+
+ return [[int(dot_location[0]) + int(len(dot) / 2), int(dot_location[1]) + int(len(dot[0]) / 2), xred_weight / red_sum - xgreen_weight / green_sum, yred_weight / red_sum - ygreen_weight / green_sum], [dot_location[0] + int(len(dot) / 2), dot_location[1] + int(len(dot[0]) / 2), xblue_weight / blue_sum - xgreen_weight / green_sum, yblue_weight / blue_sum - ygreen_weight / green_sum]]
+
+
+def cac(Cam):
+ filelist = Cam.imgs_cac
+
+ Cam.log += '\nCAC analysing files: {}'.format(str(filelist))
+ np.set_printoptions(precision=3)
+ np.set_printoptions(suppress=True)
+
+ # Create arrays to hold all the dots data and their colour offsets
+ red_shift = [] # Format is: [[Dot Center X, Dot Center Y, x shift, y shift]]
+ blue_shift = []
+ # Iterate through the files
+ # Multiple files is reccomended to average out the lens aberration through rotations
+ for file in filelist:
+ Cam.log += '\nCAC processing file'
+ print("\n Processing file")
+ # Read the raw RGB values
+ rgb = file.rgb
+ image_size = [file.h, file.w] # Image size, X, Y
+ # Create a colour copy of the RGB values to use later in the calibration
+ imout = Image.new(mode="RGB", size=image_size)
+ rgb_image = np.array(imout)
+ # The rgb values need reshaping from a 1d array to a 3d array to be worked with easily
+ rgb.reshape((image_size[0], image_size[1], 3))
+ rgb_image = rgb
+
+ # Pass the RGB image through to the dots locating program
+ # Returns an array of the dots (colour rectangles around the dots), and an array of their locations
+ print("Finding dots")
+ Cam.log += '\nFinding dots'
+ dots, dots_locations = find_dots_locations(rgb_image)
+
+ # Now, analyse each dot. Work out the centroid of each colour channel, and use that to work out
+ # by how far the chromatic aberration has shifted each channel
+ Cam.log += '\nDots found: {}'.format(str(len(dots)))
+ print('Dots found: ' + str(len(dots)))
+
+ for dot, dot_location in zip(dots, dots_locations):
+ if len(dot) > 0:
+ if (dot_location[0] > 0) and (dot_location[1] > 0):
+ ret = analyse_dot(dot, dot_location)
+ red_shift.append(ret[0])
+ blue_shift.append(ret[1])
+
+ # Take our arrays of red shifts and locations, push them through to be interpolated into a 9x9 matrix
+ # for the CAC block to handle and then store these as a .json file to be added to the camera
+ # tuning file
+ print("\nCreating output grid")
+ Cam.log += '\nCreating output grid'
+ try:
+ rx, ry, bx, by = shifts_to_yaml(red_shift, blue_shift, image_size)
+ except RuntimeError as e:
+ print(str(e))
+ Cam.log += "\nCAC correction failed! CAC will not be enabled."
+ return {}
+
+ print("CAC correction complete!")
+ Cam.log += '\nCAC correction complete!'
+
+ # Give the JSON dict back to the main ctt program
+ return {"strength": 1.0, "lut_rx": list(rx.round(2).reshape(81)), "lut_ry": list(ry.round(2).reshape(81)), "lut_bx": list(bx.round(2).reshape(81)), "lut_by": list(by.round(2).reshape(81))}
diff --git a/utils/raspberrypi/ctt/ctt_ccm.py b/utils/raspberrypi/ctt/ctt_ccm.py
index a09bfd09..07c943a8 100644
--- a/utils/raspberrypi/ctt/ctt_ccm.py
+++ b/utils/raspberrypi/ctt/ctt_ccm.py
@@ -2,7 +2,7 @@
#
# Copyright (C) 2019, Raspberry Pi Ltd
#
-# ctt_ccm.py - camera tuning tool for CCM (colour correction matrix)
+# camera tuning tool for CCM (colour correction matrix)
from ctt_image_load import *
from ctt_awb import get_alsc_patches
@@ -56,7 +56,7 @@ FInds colour correction matrices for list of images
"""
-def ccm(Cam, cal_cr_list, cal_cb_list):
+def ccm(Cam, cal_cr_list, cal_cb_list, grid_size):
global matrix_selection_types, typenum
imgs = Cam.imgs
"""
@@ -133,9 +133,7 @@ def ccm(Cam, cal_cr_list, cal_cb_list):
Note: if alsc is disabled then colour_cals will be set to None and no
the function will simply return the macbeth patches
"""
- r, b, g = get_alsc_patches(Img, colour_cals, grey=False)
- # 256 values for each patch of sRGB values
-
+ r, b, g = get_alsc_patches(Img, colour_cals, grey=False, grid_size=grid_size)
"""
do awb
Note: awb is done by measuring the macbeth chart in the image, rather
diff --git a/utils/raspberrypi/ctt/ctt_config_example.json b/utils/raspberrypi/ctt/ctt_config_example.json
index c7f90761..1105862c 100644
--- a/utils/raspberrypi/ctt/ctt_config_example.json
+++ b/utils/raspberrypi/ctt/ctt_config_example.json
@@ -3,7 +3,8 @@
"plot": [],
"alsc": {
"do_alsc_colour": 1,
- "luminance_strength": 0.5
+ "luminance_strength": 0.8,
+ "max_gain": 8.0
},
"awb": {
"greyworld": 0
@@ -13,4 +14,4 @@
"small": 0,
"show": 0
}
-} \ No newline at end of file
+}
diff --git a/utils/raspberrypi/ctt/ctt_dots_locator.py b/utils/raspberrypi/ctt/ctt_dots_locator.py
new file mode 100644
index 00000000..4945c04b
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_dots_locator.py
@@ -0,0 +1,118 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2023, Raspberry Pi Ltd
+#
+# find_dots.py - Used by CAC algorithm to convert image to set of dots
+
+'''
+This file takes the black and white version of the image, along with
+the color version. It then located the black dots on the image by
+thresholding dark pixels.
+In a rather fun way, the algorithm bounces around the thresholded area in a random path
+We then use the maximum and minimum of these paths to determine the dot shape and size
+This info is then used to return colored dots and locations back to the main file
+'''
+
+import numpy as np
+import random
+from PIL import Image, ImageEnhance, ImageFilter
+
+
+def find_dots_locations(rgb_image, color_threshold=100, dots_edge_avoid=75, image_edge_avoid=10, search_path_length=500, grid_scan_step_size=10, logfile=open("log.txt", "a+")):
+ # Initialise some starting variables
+ pixels = Image.fromarray(rgb_image)
+ pixels = pixels.convert("L")
+ enhancer = ImageEnhance.Contrast(pixels)
+ im_output = enhancer.enhance(1.4)
+ # We smooth it slightly to make it easier for the dot recognition program to locate the dots
+ im_output = im_output.filter(ImageFilter.GaussianBlur(radius=2))
+ bw_image = np.array(im_output)
+
+ location = [0, 0]
+ dots = []
+ dots_location = []
+ # the program takes away the edges - we don't want a dot that is half a circle, the
+ # centroids would all be wrong
+ for x in range(dots_edge_avoid, len(bw_image) - dots_edge_avoid, grid_scan_step_size):
+ for y in range(dots_edge_avoid, len(bw_image[0]) - dots_edge_avoid, grid_scan_step_size):
+ location = [x, y]
+ scrap_dot = False # A variable used to make sure that this is a valid dot
+ if (bw_image[location[0], location[1]] < color_threshold) and not (scrap_dot):
+ heading = "south" # Define a starting direction to move in
+ coords = []
+ for i in range(search_path_length): # Creates a path of length `search_path_length`. This turns out to always be enough to work out the rough shape of the dot.
+ # Now make sure that the thresholded area doesn't come within 10 pixels of the edge of the image, ensures we capture all the CA
+ if ((image_edge_avoid < location[0] < len(bw_image) - image_edge_avoid) and (image_edge_avoid < location[1] < len(bw_image[0]) - image_edge_avoid)) and not (scrap_dot):
+ if heading == "south":
+ if bw_image[location[0] + 1, location[1]] < color_threshold:
+ # Here, notice it does not go south, but actually goes southeast
+ # This is crucial in ensuring that we make our way around the majority of the dot
+ location[0] = location[0] + 1
+ location[1] = location[1] + 1
+ heading = "south"
+ else:
+ # This happens when we reach a thresholded edge. We now randomly change direction and keep searching
+ dir = random.randint(1, 2)
+ if dir == 1:
+ heading = "west"
+ if dir == 2:
+ heading = "east"
+
+ if heading == "east":
+ if bw_image[location[0], location[1] + 1] < color_threshold:
+ location[1] = location[1] + 1
+ heading = "east"
+ else:
+ dir = random.randint(1, 2)
+ if dir == 1:
+ heading = "north"
+ if dir == 2:
+ heading = "south"
+
+ if heading == "west":
+ if bw_image[location[0], location[1] - 1] < color_threshold:
+ location[1] = location[1] - 1
+ heading = "west"
+ else:
+ dir = random.randint(1, 2)
+ if dir == 1:
+ heading = "north"
+ if dir == 2:
+ heading = "south"
+
+ if heading == "north":
+ if bw_image[location[0] - 1, location[1]] < color_threshold:
+ location[0] = location[0] - 1
+ heading = "north"
+ else:
+ dir = random.randint(1, 2)
+ if dir == 1:
+ heading = "west"
+ if dir == 2:
+ heading = "east"
+ # Log where our particle travels across the dot
+ coords.append([location[0], location[1]])
+ else:
+ scrap_dot = True # We just don't have enough space around the dot, discard this one, and move on
+ if not scrap_dot:
+ # get the size of the dot surrounding the dot
+ x_coords = np.array(coords)[:, 0]
+ y_coords = np.array(coords)[:, 1]
+ hsquaresize = max(list(x_coords)) - min(list(x_coords))
+ vsquaresize = max(list(y_coords)) - min(list(y_coords))
+ # Create the bounding coordinates of the rectangle surrounding the dot
+ # Program uses the dotsize + half of the dotsize to ensure we get all that color fringing
+ extra_space_factor = 0.45
+ top_left_x = (min(list(x_coords)) - int(hsquaresize * extra_space_factor))
+ btm_right_x = max(list(x_coords)) + int(hsquaresize * extra_space_factor)
+ top_left_y = (min(list(y_coords)) - int(vsquaresize * extra_space_factor))
+ btm_right_y = max(list(y_coords)) + int(vsquaresize * extra_space_factor)
+ # Overwrite the area of the dot to ensure we don't use it again
+ bw_image[top_left_x:btm_right_x, top_left_y:btm_right_y] = 255
+ # Add the color version of the dot to the list to send off, along with some coordinates.
+ dots.append(rgb_image[top_left_x:btm_right_x, top_left_y:btm_right_y])
+ dots_location.append([top_left_x, top_left_y])
+ else:
+ # Dot was too close to the image border to be useable
+ pass
+ return dots, dots_location
diff --git a/utils/raspberrypi/ctt/ctt_geq.py b/utils/raspberrypi/ctt/ctt_geq.py
index c45addcd..5a91ebb4 100644
--- a/utils/raspberrypi/ctt/ctt_geq.py
+++ b/utils/raspberrypi/ctt/ctt_geq.py
@@ -2,7 +2,7 @@
#
# Copyright (C) 2019, Raspberry Pi Ltd
#
-# ctt_geq.py - camera tuning tool for GEQ (green equalisation)
+# camera tuning tool for GEQ (green equalisation)
from ctt_tools import *
import matplotlib.pyplot as plt
diff --git a/utils/raspberrypi/ctt/ctt_image_load.py b/utils/raspberrypi/ctt/ctt_image_load.py
index 310c5e88..531de328 100644
--- a/utils/raspberrypi/ctt/ctt_image_load.py
+++ b/utils/raspberrypi/ctt/ctt_image_load.py
@@ -2,7 +2,7 @@
#
# Copyright (C) 2019-2020, Raspberry Pi Ltd
#
-# ctt_image_load.py - camera tuning tool image loading
+# camera tuning tool image loading
from ctt_tools import *
from ctt_macbeth_locator import *
@@ -350,6 +350,7 @@ def dng_load_image(Cam, im_str):
c2 = np.left_shift(raw_data[1::2, 0::2].astype(np.int64), shift)
c3 = np.left_shift(raw_data[1::2, 1::2].astype(np.int64), shift)
Img.channels = [c0, c1, c2, c3]
+ Img.rgb = raw_im.postprocess()
except Exception:
print("\nERROR: failed to load DNG file", im_str)
diff --git a/utils/raspberrypi/ctt/ctt_lux.py b/utils/raspberrypi/ctt/ctt_lux.py
index 70855e1b..46be1512 100644
--- a/utils/raspberrypi/ctt/ctt_lux.py
+++ b/utils/raspberrypi/ctt/ctt_lux.py
@@ -2,7 +2,7 @@
#
# Copyright (C) 2019, Raspberry Pi Ltd
#
-# ctt_lux.py - camera tuning tool for lux level
+# camera tuning tool for lux level
from ctt_tools import *
diff --git a/utils/raspberrypi/ctt/ctt_macbeth_locator.py b/utils/raspberrypi/ctt/ctt_macbeth_locator.py
index 3e95df89..f22dbf31 100644
--- a/utils/raspberrypi/ctt/ctt_macbeth_locator.py
+++ b/utils/raspberrypi/ctt/ctt_macbeth_locator.py
@@ -2,7 +2,7 @@
#
# Copyright (C) 2019, Raspberry Pi Ltd
#
-# ctt_macbeth_locator.py - camera tuning tool Macbeth chart locator
+# camera tuning tool Macbeth chart locator
from ctt_ransac import *
from ctt_tools import *
@@ -57,6 +57,10 @@ def find_macbeth(Cam, img, mac_config=(0, 0)):
"""
cor, mac, coords, msg = get_macbeth_chart(img, ref_data)
+ # Keep a list that will include this and any brightened up versions of
+ # the image for reuse.
+ all_images = [img]
+
"""
following bits of code tries to fix common problems with simple
techniques.
@@ -71,6 +75,7 @@ def find_macbeth(Cam, img, mac_config=(0, 0)):
if cor < 0.75:
a = 2
img_br = cv2.convertScaleAbs(img, alpha=a, beta=0)
+ all_images.append(img_br)
cor_b, mac_b, coords_b, msg_b = get_macbeth_chart(img_br, ref_data)
if cor_b > cor:
cor, mac, coords, msg = cor_b, mac_b, coords_b, msg_b
@@ -81,6 +86,7 @@ def find_macbeth(Cam, img, mac_config=(0, 0)):
if cor < 0.75:
a = 4
img_br = cv2.convertScaleAbs(img, alpha=a, beta=0)
+ all_images.append(img_br)
cor_b, mac_b, coords_b, msg_b = get_macbeth_chart(img_br, ref_data)
if cor_b > cor:
cor, mac, coords, msg = cor_b, mac_b, coords_b, msg_b
@@ -128,23 +134,26 @@ def find_macbeth(Cam, img, mac_config=(0, 0)):
h_inc = int(h/6)
"""
for each subselection, look for a macbeth chart
+ loop over this and any brightened up images that we made to increase the
+ likelihood of success
"""
- for i in range(3):
- for j in range(3):
- w_s, h_s = i*w_inc, j*h_inc
- img_sel = img[w_s:w_s+w_sel, h_s:h_s+h_sel]
- cor_ij, mac_ij, coords_ij, msg_ij = get_macbeth_chart(img_sel, ref_data)
- """
- if the correlation is better than the best then record the
- scale and current subselection at which macbeth chart was
- found. Also record the coordinates, macbeth chart and message.
- """
- if cor_ij > cor:
- cor = cor_ij
- mac, coords, msg = mac_ij, coords_ij, msg_ij
- ii, jj = i, j
- w_best, h_best = w_inc, h_inc
- d_best = 1
+ for img_br in all_images:
+ for i in range(3):
+ for j in range(3):
+ w_s, h_s = i*w_inc, j*h_inc
+ img_sel = img_br[w_s:w_s+w_sel, h_s:h_s+h_sel]
+ cor_ij, mac_ij, coords_ij, msg_ij = get_macbeth_chart(img_sel, ref_data)
+ """
+ if the correlation is better than the best then record the
+ scale and current subselection at which macbeth chart was
+ found. Also record the coordinates, macbeth chart and message.
+ """
+ if cor_ij > cor:
+ cor = cor_ij
+ mac, coords, msg = mac_ij, coords_ij, msg_ij
+ ii, jj = i, j
+ w_best, h_best = w_inc, h_inc
+ d_best = 1
"""
scale 2
@@ -157,17 +166,19 @@ def find_macbeth(Cam, img, mac_config=(0, 0)):
h_sel = int(h/2)
w_inc = int(w/8)
h_inc = int(h/8)
- for i in range(5):
- for j in range(5):
- w_s, h_s = i*w_inc, j*h_inc
- img_sel = img[w_s:w_s+w_sel, h_s:h_s+h_sel]
- cor_ij, mac_ij, coords_ij, msg_ij = get_macbeth_chart(img_sel, ref_data)
- if cor_ij > cor:
- cor = cor_ij
- mac, coords, msg = mac_ij, coords_ij, msg_ij
- ii, jj = i, j
- w_best, h_best = w_inc, h_inc
- d_best = 2
+ # Again, loop over any brightened up images as well
+ for img_br in all_images:
+ for i in range(5):
+ for j in range(5):
+ w_s, h_s = i*w_inc, j*h_inc
+ img_sel = img_br[w_s:w_s+w_sel, h_s:h_s+h_sel]
+ cor_ij, mac_ij, coords_ij, msg_ij = get_macbeth_chart(img_sel, ref_data)
+ if cor_ij > cor:
+ cor = cor_ij
+ mac, coords, msg = mac_ij, coords_ij, msg_ij
+ ii, jj = i, j
+ w_best, h_best = w_inc, h_inc
+ d_best = 2
"""
The following code checks for macbeth charts at even smaller scales. This
@@ -238,7 +249,7 @@ def find_macbeth(Cam, img, mac_config=(0, 0)):
print error or success message
"""
print(msg)
- Cam.log += '\n' + msg
+ Cam.log += '\n' + str(msg)
if msg == success_msg:
coords_fit = coords
Cam.log += '\nMacbeth chart vertices:\n'
@@ -606,7 +617,7 @@ def get_macbeth_chart(img, ref_data):
'\nNot enough squares found'
'\nPossible problems:\n'
'- Macbeth chart is occluded\n'
- '- Macbeth chart is too dark of bright\n'
+ '- Macbeth chart is too dark or bright\n'
)
ref_cents = np.array(ref_cents)
diff --git a/utils/raspberrypi/ctt/ctt_noise.py b/utils/raspberrypi/ctt/ctt_noise.py
index 3270bf34..0b18d83f 100644
--- a/utils/raspberrypi/ctt/ctt_noise.py
+++ b/utils/raspberrypi/ctt/ctt_noise.py
@@ -2,7 +2,7 @@
#
# Copyright (C) 2019, Raspberry Pi Ltd
#
-# ctt_noise.py - camera tuning tool noise calibration
+# camera tuning tool noise calibration
from ctt_image_load import *
import matplotlib.pyplot as plt
diff --git a/utils/raspberrypi/ctt/ctt_pisp.py b/utils/raspberrypi/ctt/ctt_pisp.py
new file mode 100755
index 00000000..a59b053c
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_pisp.py
@@ -0,0 +1,805 @@
+#!/usr/bin/env python3
+#
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# ctt_pisp.py - camera tuning tool data for PiSP platforms
+
+
+json_template = {
+ "rpi.black_level": {
+ "black_level": 4096
+ },
+ "rpi.lux": {
+ "reference_shutter_speed": 10000,
+ "reference_gain": 1,
+ "reference_aperture": 1.0
+ },
+ "rpi.dpc": {
+ "strength": 1
+ },
+ "rpi.noise": {
+ },
+ "rpi.geq": {
+ },
+ "rpi.denoise":
+ {
+ "normal":
+ {
+ "sdn":
+ {
+ "deviation": 1.6,
+ "strength": 0.5,
+ "deviation2": 3.2,
+ "deviation_no_tdn": 3.2,
+ "strength_no_tdn": 0.75
+ },
+ "cdn":
+ {
+ "deviation": 200,
+ "strength": 0.3
+ },
+ "tdn":
+ {
+ "deviation": 0.8,
+ "threshold": 0.05
+ }
+ },
+ "hdr":
+ {
+ "sdn":
+ {
+ "deviation": 1.6,
+ "strength": 0.5,
+ "deviation2": 3.2,
+ "deviation_no_tdn": 3.2,
+ "strength_no_tdn": 0.75
+ },
+ "cdn":
+ {
+ "deviation": 200,
+ "strength": 0.3
+ },
+ "tdn":
+ {
+ "deviation": 1.3,
+ "threshold": 0.1
+ }
+ },
+ "night":
+ {
+ "sdn":
+ {
+ "deviation": 1.6,
+ "strength": 0.5,
+ "deviation2": 3.2,
+ "deviation_no_tdn": 3.2,
+ "strength_no_tdn": 0.75
+ },
+ "cdn":
+ {
+ "deviation": 200,
+ "strength": 0.3
+ },
+ "tdn":
+ {
+ "deviation": 1.3,
+ "threshold": 0.1
+ }
+ }
+ },
+ "rpi.awb": {
+ "priors": [
+ {"lux": 0, "prior": [2000, 1.0, 3000, 0.0, 13000, 0.0]},
+ {"lux": 800, "prior": [2000, 0.0, 6000, 2.0, 13000, 2.0]},
+ {"lux": 1500, "prior": [2000, 0.0, 4000, 1.0, 6000, 6.0, 6500, 7.0, 7000, 1.0, 13000, 1.0]}
+ ],
+ "modes": {
+ "auto": {"lo": 2500, "hi": 7700},
+ "incandescent": {"lo": 2500, "hi": 3000},
+ "tungsten": {"lo": 3000, "hi": 3500},
+ "fluorescent": {"lo": 4000, "hi": 4700},
+ "indoor": {"lo": 3000, "hi": 5000},
+ "daylight": {"lo": 5500, "hi": 6500},
+ "cloudy": {"lo": 7000, "hi": 8000}
+ },
+ "bayes": 1
+ },
+ "rpi.agc":
+ {
+ "channels":
+ [
+ {
+ "comment": "Channel 0 is normal AGC",
+ "metering_modes":
+ {
+ "centre-weighted":
+ {
+ "weights":
+ [
+ 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,
+ 0, 1, 1, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 0,
+ 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1,
+ 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 3, 3, 3, 3, 3, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 4, 4, 4, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 3, 3, 3, 3, 3, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1,
+ 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1,
+ 0, 1, 1, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 0,
+ 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0
+ ]
+ },
+ "spot":
+ {
+ "weights":
+ [
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 1, 2, 3, 2, 1, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ ]
+ },
+ "matrix":
+ {
+ "weights":
+ [
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
+ ]
+ }
+ },
+ "exposure_modes":
+ {
+ "normal":
+ {
+ "shutter": [ 100, 10000, 30000, 60000, 66666 ],
+ "gain": [ 1.0, 1.5, 2.0, 4.0, 8.0 ]
+ },
+ "short":
+ {
+ "shutter": [ 100, 5000, 10000, 20000, 60000 ],
+ "gain": [ 1.0, 1.5, 2.0, 4.0, 8.0 ]
+ },
+ "long":
+ {
+ "shutter": [ 100, 10000, 30000, 60000, 90000, 120000 ],
+ "gain": [ 1.0, 1.5, 2.0, 4.0, 8.0, 12.0 ]
+ }
+ },
+ "constraint_modes":
+ {
+ "normal": [
+ {
+ "bound": "LOWER",
+ "q_lo": 0.98,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.5,
+ 1000, 0.5
+ ]
+ }
+ ],
+ "highlight": [
+ {
+ "bound": "LOWER",
+ "q_lo": 0.98,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.5,
+ 1000, 0.5
+ ]
+ },
+ {
+ "bound": "UPPER",
+ "q_lo": 0.98,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.8,
+ 1000, 0.8
+ ]
+ },
+ ],
+ "shadows": [
+ {
+ "bound": "LOWER",
+ "q_lo": 0.0,
+ "q_hi": 0.5,
+ "y_target":
+ [
+ 0, 0.17,
+ 1000, 0.17
+ ]
+ }
+ ]
+ },
+ "y_target":
+ [
+ 0, 0.16,
+ 1000, 0.165,
+ 10000, 0.17
+ ]
+ },
+ {
+ "comment": "Channel 1 is the HDR short channel",
+ "desaturate": 0,
+ "metering_modes":
+ {
+ "centre-weighted":
+ {
+ "weights":
+ [
+ 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,
+ 0, 1, 1, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 0,
+ 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1,
+ 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 3, 3, 3, 3, 3, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 4, 4, 4, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 3, 3, 3, 3, 3, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1,
+ 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1,
+ 0, 1, 1, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 0,
+ 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0
+ ]
+ },
+ "spot":
+ {
+ "weights":
+ [
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 1, 2, 3, 2, 1, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ ]
+ },
+ "matrix":
+ {
+ "weights":
+ [
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
+ ]
+ }
+ },
+ "exposure_modes":
+ {
+ "normal":
+ {
+ "shutter": [ 100, 20000, 60000 ],
+ "gain": [ 1.0, 1.0, 1.0 ]
+ },
+ "short":
+ {
+ "shutter": [ 100, 20000, 60000 ],
+ "gain": [ 1.0, 1.0, 1.0 ]
+ },
+ "long":
+ {
+ "shutter": [ 100, 20000, 60000 ],
+ "gain": [ 1.0, 1.0, 1.0 ]
+ }
+ },
+ "constraint_modes":
+ {
+ "normal": [
+ {
+ "bound": "LOWER",
+ "q_lo": 0.95,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.5,
+ 1000, 0.5
+ ]
+ },
+ {
+ "bound": "UPPER",
+ "q_lo": 0.95,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.7,
+ 1000, 0.7
+ ]
+ },
+ {
+ "bound": "LOWER",
+ "q_lo": 0.0,
+ "q_hi": 0.2,
+ "y_target":
+ [
+ 0, 0.002,
+ 1000, 0.002
+ ]
+ }
+ ],
+ "highlight": [
+ {
+ "bound": "LOWER",
+ "q_lo": 0.95,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.5,
+ 1000, 0.5
+ ]
+ },
+ {
+ "bound": "UPPER",
+ "q_lo": 0.95,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.7,
+ 1000, 0.7
+ ]
+ },
+ {
+ "bound": "LOWER",
+ "q_lo": 0.0,
+ "q_hi": 0.2,
+ "y_target":
+ [
+ 0, 0.002,
+ 1000, 0.002
+ ]
+ }
+ ],
+ "shadows": [
+ {
+ "bound": "LOWER",
+ "q_lo": 0.95,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.5,
+ 1000, 0.5
+ ]
+ },
+ {
+ "bound": "UPPER",
+ "q_lo": 0.95,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.7,
+ 1000, 0.7
+ ]
+ },
+ {
+ "bound": "LOWER",
+ "q_lo": 0.0,
+ "q_hi": 0.2,
+ "y_target":
+ [
+ 0, 0.002,
+ 1000, 0.002
+ ]
+ }
+ ]
+ },
+ "y_target":
+ [
+ 0, 0.16,
+ 1000, 0.165,
+ 10000, 0.17
+ ]
+ },
+ {
+ "comment": "Channel 2 is the HDR long channel",
+ "desaturate": 0,
+ "metering_modes":
+ {
+ "centre-weighted":
+ {
+ "weights":
+ [
+ 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,
+ 0, 1, 1, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 0,
+ 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1,
+ 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 3, 3, 3, 3, 3, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 4, 4, 4, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 3, 3, 3, 3, 3, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1,
+ 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1,
+ 0, 1, 1, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 0,
+ 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0
+ ]
+ },
+ "spot":
+ {
+ "weights":
+ [
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 1, 2, 3, 2, 1, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ ]
+ },
+ "matrix":
+ {
+ "weights":
+ [
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
+ ]
+ }
+ },
+ "exposure_modes":
+ {
+ "normal":
+ {
+ "shutter": [ 100, 20000, 30000, 60000 ],
+ "gain": [ 1.0, 2.0, 4.0, 8.0 ]
+ },
+ "short":
+ {
+ "shutter": [ 100, 20000, 30000, 60000 ],
+ "gain": [ 1.0, 2.0, 4.0, 8.0 ]
+ },
+ "long":
+ {
+ "shutter": [ 100, 20000, 30000, 60000 ],
+ "gain": [ 1.0, 2.0, 4.0, 8.0 ]
+ }
+ },
+ "constraint_modes":
+ {
+ "normal": [
+ ],
+ "highlight": [
+ ],
+ "shadows": [
+ ]
+ },
+ "channel_constraints":
+ [
+ {
+ "bound": "UPPER",
+ "channel": 4,
+ "factor": 8
+ },
+ {
+ "bound": "LOWER",
+ "channel": 4,
+ "factor": 2
+ }
+ ],
+ "y_target":
+ [
+ 0, 0.16,
+ 1000, 0.165,
+ 10000, 0.17
+ ]
+ },
+ {
+ "comment": "Channel 3 is the night mode channel",
+ "base_ev": 0.33,
+ "metering_modes":
+ {
+ "centre-weighted":
+ {
+ "weights":
+ [
+ 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,
+ 0, 1, 1, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 0,
+ 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1,
+ 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 3, 3, 3, 3, 3, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 4, 4, 4, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 3, 3, 3, 3, 3, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 1, 1,
+ 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1,
+ 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1,
+ 0, 1, 1, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 0,
+ 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0
+ ]
+ },
+ "spot":
+ {
+ "weights":
+ [
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 1, 2, 3, 2, 1, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ ]
+ },
+ "matrix":
+ {
+ "weights":
+ [
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
+ ]
+ }
+ },
+ "exposure_modes":
+ {
+ "normal":
+ {
+ "shutter": [ 100, 20000, 66666 ],
+ "gain": [ 1.0, 2.0, 4.0 ]
+ },
+ "short":
+ {
+ "shutter": [ 100, 20000, 33333 ],
+ "gain": [ 1.0, 2.0, 4.0 ]
+ },
+ "long":
+ {
+ "shutter": [ 100, 20000, 66666, 120000 ],
+ "gain": [ 1.0, 2.0, 4.0, 4.0 ]
+ }
+ },
+ "constraint_modes":
+ {
+ "normal": [
+ {
+ "bound": "LOWER",
+ "q_lo": 0.98,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.5,
+ 1000, 0.5
+ ]
+ }
+ ],
+ "highlight": [
+ {
+ "bound": "LOWER",
+ "q_lo": 0.98,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.5,
+ 1000, 0.5
+ ]
+ },
+ {
+ "bound": "UPPER",
+ "q_lo": 0.98,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.8,
+ 1000, 0.8
+ ]
+ }
+ ],
+ "shadows": [
+ {
+ "bound": "LOWER",
+ "q_lo": 0.98,
+ "q_hi": 1.0,
+ "y_target":
+ [
+ 0, 0.5,
+ 1000, 0.5
+ ]
+ }
+ ]
+ },
+ "y_target":
+ [
+ 0, 0.16,
+ 1000, 0.16,
+ 10000, 0.17
+ ]
+ }
+ ]
+ },
+ "rpi.alsc": {
+ 'omega': 1.3,
+ 'n_iter': 100,
+ 'luminance_strength': 0.8,
+ },
+ "rpi.contrast": {
+ "ce_enable": 1,
+ "gamma_curve": [
+ 0, 0,
+ 1024, 5040,
+ 2048, 9338,
+ 3072, 12356,
+ 4096, 15312,
+ 5120, 18051,
+ 6144, 20790,
+ 7168, 23193,
+ 8192, 25744,
+ 9216, 27942,
+ 10240, 30035,
+ 11264, 32005,
+ 12288, 33975,
+ 13312, 35815,
+ 14336, 37600,
+ 15360, 39168,
+ 16384, 40642,
+ 18432, 43379,
+ 20480, 45749,
+ 22528, 47753,
+ 24576, 49621,
+ 26624, 51253,
+ 28672, 52698,
+ 30720, 53796,
+ 32768, 54876,
+ 36864, 57012,
+ 40960, 58656,
+ 45056, 59954,
+ 49152, 61183,
+ 53248, 62355,
+ 57344, 63419,
+ 61440, 64476,
+ 65535, 65535
+ ]
+ },
+ "rpi.ccm": {
+ },
+ "rpi.cac": {
+ },
+ "rpi.sharpen": {
+ "threshold": 0.25,
+ "limit": 1.0,
+ "strength": 1.0
+ },
+ "rpi.hdr":
+ {
+ "Off":
+ {
+ "cadence": [ 0 ]
+ },
+ "MultiExposureUnmerged":
+ {
+ "cadence": [ 1, 2 ],
+ "channel_map": { "short": 1, "long": 2 }
+ },
+ "SingleExposure":
+ {
+ "cadence": [1],
+ "channel_map": { "short": 1 },
+ "spatial_gain": 2.0,
+ "tonemap_enable": 1
+ },
+ "MultiExposure":
+ {
+ "cadence": [1, 2],
+ "channel_map": { "short": 1, "long": 2 },
+ "stitch_enable": 1,
+ "spatial_gain": 2.0,
+ "tonemap_enable": 1
+ },
+ "Night":
+ {
+ "cadence": [ 3 ],
+ "channel_map": { "night": 3 },
+ "tonemap_enable": 1,
+ "tonemap":
+ [
+ 0, 0,
+ 5000, 20000,
+ 10000, 30000,
+ 20000, 47000,
+ 30000, 55000,
+ 65535, 65535
+ ]
+ }
+ }
+}
+
+grid_size = (32, 32)
diff --git a/utils/raspberrypi/ctt/ctt_pretty_print_json.py b/utils/raspberrypi/ctt/ctt_pretty_print_json.py
index 3e3b8475..a4cae62d 100755
--- a/utils/raspberrypi/ctt/ctt_pretty_print_json.py
+++ b/utils/raspberrypi/ctt/ctt_pretty_print_json.py
@@ -19,13 +19,19 @@ class Encoder(json.JSONEncoder):
self.indentation_level = 0
self.hard_break = 120
self.custom_elems = {
+ 'weights': 15,
'table': 16,
'luminance_lut': 16,
'ct_curve': 3,
'ccm': 3,
+ 'lut_rx': 9,
+ 'lut_bx': 9,
+ 'lut_by': 9,
+ 'lut_ry': 9,
'gamma_curve': 2,
'y_target': 2,
- 'prior': 2
+ 'prior': 2,
+ 'tonemap': 2
}
def encode(self, o, node_key=None):
@@ -87,7 +93,7 @@ class Encoder(json.JSONEncoder):
return self.encode(o)
-def pretty_print(in_json: dict) -> str:
+def pretty_print(in_json: dict, custom_elems={}) -> str:
if 'version' not in in_json or \
'target' not in in_json or \
@@ -95,12 +101,15 @@ def pretty_print(in_json: dict) -> str:
in_json['version'] < 2.0:
raise RuntimeError('Incompatible JSON dictionary has been provided')
- return json.dumps(in_json, cls=Encoder, indent=4, sort_keys=False)
+ encoder = Encoder(indent=4, sort_keys=False)
+ encoder.custom_elems |= custom_elems
+ return encoder.encode(in_json) #json.dumps(in_json, cls=Encoder, indent=4, sort_keys=False)
if __name__ == "__main__":
parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=
'Prettify a version 2.0 camera tuning config JSON file.')
+ parser.add_argument('-t', '--target', type=str, help='Target platform', choices=['pisp', 'vc4'], default='vc4')
parser.add_argument('input', type=str, help='Input tuning file.')
parser.add_argument('output', type=str, nargs='?',
help='Output converted tuning file. If not provided, the input file will be updated in-place.',
@@ -110,7 +119,12 @@ if __name__ == "__main__":
with open(args.input, 'r') as f:
in_json = json.load(f)
- out_json = pretty_print(in_json)
+ if args.target == 'pisp':
+ from ctt_pisp import grid_size
+ elif args.target == 'vc4':
+ from ctt_vc4 import grid_size
+
+ out_json = pretty_print(in_json, custom_elems={'table': grid_size[0], 'luminance_lut': grid_size[0]})
with open(args.output if args.output is not None else args.input, 'w') as f:
f.write(out_json)
diff --git a/utils/raspberrypi/ctt/ctt_ransac.py b/utils/raspberrypi/ctt/ctt_ransac.py
index 9ed7d93c..01bba302 100644
--- a/utils/raspberrypi/ctt/ctt_ransac.py
+++ b/utils/raspberrypi/ctt/ctt_ransac.py
@@ -2,7 +2,7 @@
#
# Copyright (C) 2019, Raspberry Pi Ltd
#
-# ctt_ransac.py - camera tuning tool RANSAC selector for Macbeth chart locator
+# camera tuning tool RANSAC selector for Macbeth chart locator
import numpy as np
diff --git a/utils/raspberrypi/ctt/ctt_tools.py b/utils/raspberrypi/ctt/ctt_tools.py
index 79195289..50b01ecf 100644
--- a/utils/raspberrypi/ctt/ctt_tools.py
+++ b/utils/raspberrypi/ctt/ctt_tools.py
@@ -2,7 +2,7 @@
#
# Copyright (C) 2019, Raspberry Pi Ltd
#
-# ctt_tools.py - camera tuning tool miscellaneous
+# camera tuning tool miscellaneous
import time
import re
@@ -65,11 +65,12 @@ def parse_input():
directory = get_config(args_dict, '-i', None, 'string')
config = get_config(args_dict, '-c', None, 'string')
log_path = get_config(args_dict, '-l', None, 'string')
+ target = get_config(args_dict, '-t', "vc4", 'string')
if directory is None:
raise ArgError('\n\nERROR! No input directory given.')
if json_output is None:
raise ArgError('\n\nERROR! No output json given.')
- return json_output, directory, config, log_path
+ return json_output, directory, config, log_path, target
"""
diff --git a/utils/raspberrypi/ctt/ctt_vc4.py b/utils/raspberrypi/ctt/ctt_vc4.py
new file mode 100755
index 00000000..7154e110
--- /dev/null
+++ b/utils/raspberrypi/ctt/ctt_vc4.py
@@ -0,0 +1,126 @@
+#!/usr/bin/env python3
+#
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# ctt_vc4.py - camera tuning tool data for VC4 platforms
+
+
+json_template = {
+ "rpi.black_level": {
+ "black_level": 4096
+ },
+ "rpi.dpc": {
+ },
+ "rpi.lux": {
+ "reference_shutter_speed": 10000,
+ "reference_gain": 1,
+ "reference_aperture": 1.0
+ },
+ "rpi.noise": {
+ },
+ "rpi.geq": {
+ },
+ "rpi.sdn": {
+ },
+ "rpi.awb": {
+ "priors": [
+ {"lux": 0, "prior": [2000, 1.0, 3000, 0.0, 13000, 0.0]},
+ {"lux": 800, "prior": [2000, 0.0, 6000, 2.0, 13000, 2.0]},
+ {"lux": 1500, "prior": [2000, 0.0, 4000, 1.0, 6000, 6.0, 6500, 7.0, 7000, 1.0, 13000, 1.0]}
+ ],
+ "modes": {
+ "auto": {"lo": 2500, "hi": 8000},
+ "incandescent": {"lo": 2500, "hi": 3000},
+ "tungsten": {"lo": 3000, "hi": 3500},
+ "fluorescent": {"lo": 4000, "hi": 4700},
+ "indoor": {"lo": 3000, "hi": 5000},
+ "daylight": {"lo": 5500, "hi": 6500},
+ "cloudy": {"lo": 7000, "hi": 8600}
+ },
+ "bayes": 1
+ },
+ "rpi.agc": {
+ "metering_modes": {
+ "centre-weighted": {
+ "weights": [3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0]
+ },
+ "spot": {
+ "weights": [2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
+ },
+ "matrix": {
+ "weights": [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
+ }
+ },
+ "exposure_modes": {
+ "normal": {
+ "shutter": [100, 10000, 30000, 60000, 120000],
+ "gain": [1.0, 2.0, 4.0, 6.0, 6.0]
+ },
+ "short": {
+ "shutter": [100, 5000, 10000, 20000, 120000],
+ "gain": [1.0, 2.0, 4.0, 6.0, 6.0]
+ }
+ },
+ "constraint_modes": {
+ "normal": [
+ {"bound": "LOWER", "q_lo": 0.98, "q_hi": 1.0, "y_target": [0, 0.5, 1000, 0.5]}
+ ],
+ "highlight": [
+ {"bound": "LOWER", "q_lo": 0.98, "q_hi": 1.0, "y_target": [0, 0.5, 1000, 0.5]},
+ {"bound": "UPPER", "q_lo": 0.98, "q_hi": 1.0, "y_target": [0, 0.8, 1000, 0.8]}
+ ]
+ },
+ "y_target": [0, 0.16, 1000, 0.165, 10000, 0.17]
+ },
+ "rpi.alsc": {
+ 'omega': 1.3,
+ 'n_iter': 100,
+ 'luminance_strength': 0.7,
+ },
+ "rpi.contrast": {
+ "ce_enable": 1,
+ "gamma_curve": [
+ 0, 0,
+ 1024, 5040,
+ 2048, 9338,
+ 3072, 12356,
+ 4096, 15312,
+ 5120, 18051,
+ 6144, 20790,
+ 7168, 23193,
+ 8192, 25744,
+ 9216, 27942,
+ 10240, 30035,
+ 11264, 32005,
+ 12288, 33975,
+ 13312, 35815,
+ 14336, 37600,
+ 15360, 39168,
+ 16384, 40642,
+ 18432, 43379,
+ 20480, 45749,
+ 22528, 47753,
+ 24576, 49621,
+ 26624, 51253,
+ 28672, 52698,
+ 30720, 53796,
+ 32768, 54876,
+ 36864, 57012,
+ 40960, 58656,
+ 45056, 59954,
+ 49152, 61183,
+ 53248, 62355,
+ 57344, 63419,
+ 61440, 64476,
+ 65535, 65535
+ ]
+ },
+ "rpi.ccm": {
+ },
+ "rpi.sharpen": {
+ }
+}
+
+grid_size = (16, 12)
diff --git a/utils/rkisp1/gen-csc-table.py b/utils/rkisp1/gen-csc-table.py
index c47f5042..2db84feb 100755
--- a/utils/rkisp1/gen-csc-table.py
+++ b/utils/rkisp1/gen-csc-table.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python3
+#!/usr/bin/env python3
# SPDX-License-Identifier: GPL-2.0-or-later
# Copyright (C) 2022, Ideas on Board Oy
#
@@ -147,6 +147,8 @@ def main(argv):
description='Generate color space conversion table coefficients with '
'configurable fixed-point precision.'
)
+ parser.add_argument('--format', '-f', choices=['dec', 'hex'], default='hex',
+ help='Number format')
parser.add_argument('--invert', '-i', action='store_true',
help='Invert the color space conversion (YUV -> RGB)')
parser.add_argument('--precision', '-p', default='Q1.7',
@@ -190,19 +192,29 @@ def main(argv):
else:
line = round_array(line)
- # Convert coefficients to the number of bits selected by the precision.
- # Negative values will be turned into positive integers using 2's
- # complement.
- line = [coeff & ((1 << precision.total) - 1) for coeff in line]
+ if args.format == 'hex':
+ # Convert coefficients to the number of bits selected by the precision.
+ # Negative values will be turned into positive integers using 2's
+ # complement.
+ line = [coeff & ((1 << precision.total) - 1) for coeff in line]
+
rounded_coeffs.append(line)
# Print the result as C code.
nbits = 1 << (precision.total - 1).bit_length()
nbytes = nbits // 4
- print(f'static const u{nbits} {"yuv2rgb" if args.invert else "rgb2yuv"}_{args.encoding}_{quantization.name.lower()}_coeffs[] = {{')
+
+ if args.format == 'hex':
+ coeff_fmt = '0x{0:0' + str(nbytes) + 'x}'
+ sign = 'u'
+ else:
+ coeff_fmt = '{0}'
+ sign = 's'
+
+ print(f'static const {sign}{nbits} {"yuv2rgb" if args.invert else "rgb2yuv"}_{args.encoding}_{quantization.name.lower()}_coeffs[] = {{')
for line in rounded_coeffs:
- line = [f'0x{coeff:0{nbytes}x}' for coeff in line]
+ line = [coeff_fmt.format(coeff) for coeff in line]
print(f'\t{", ".join(line)},')
diff --git a/utils/rkisp1/rkisp1-capture.sh b/utils/rkisp1/rkisp1-capture.sh
index c5f859f2..d767e31d 100755
--- a/utils/rkisp1/rkisp1-capture.sh
+++ b/utils/rkisp1/rkisp1-capture.sh
@@ -4,8 +4,7 @@
#
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
#
-# rkisp-capture.sh - Capture processed frames from cameras based on the
-# Rockchip ISP1
+# Capture processed frames from cameras based on the Rockchip ISP1
#
# The scripts makes use of the following tools, which are expected to be
# executable from the system-wide path or from the local directory:
diff --git a/utils/tracepoints/analyze-ipa-trace.py b/utils/tracepoints/analyze-ipa-trace.py
index 50fbbf42..92e8a235 100755
--- a/utils/tracepoints/analyze-ipa-trace.py
+++ b/utils/tracepoints/analyze-ipa-trace.py
@@ -4,7 +4,7 @@
#
# Author: Paul Elder <paul.elder@ideasonboard.com>
#
-# analyze-ipa-trace.py - Example of how to extract information from libcamera lttng traces
+# Example of how to extract information from libcamera lttng traces
import argparse
import bt2
diff --git a/utils/tracepoints/meson.build b/utils/tracepoints/meson.build
deleted file mode 100644
index 807230fc..00000000
--- a/utils/tracepoints/meson.build
+++ /dev/null
@@ -1,5 +0,0 @@
-# SPDX-License-Identifier: CC0-1.0
-
-py_modules += ['jinja2']
-
-gen_tracepoints_header = find_program('./gen-tp-header.py')
diff --git a/utils/tuning/README.rst b/utils/tuning/README.rst
index ce533b2c..89a1d61e 100644
--- a/utils/tuning/README.rst
+++ b/utils/tuning/README.rst
@@ -1,11 +1,20 @@
.. SPDX-License-Identifier: CC-BY-SA-4.0
-.. TODO: Write an overview of libtuning
+libcamera tuning tools
+======================
-Dependencies
-------------
+.. Note:: The tuning tools are still very much work in progress. If in doubt,
+ please ask on the mailing list.
+
+.. todo::
+ Write documentation
+
+Installation of dependencies
+----------------------------
+
+::
+ # Using a venv
+ python3 -m venv venv
+ . ./venv/bin/activate
+ pip3 install -r requirements.txt
-- cv2
-- numpy
-- pyexiv2
-- rawpy
diff --git a/utils/tuning/config-example.yaml b/utils/tuning/config-example.yaml
new file mode 100644
index 00000000..5593eaef
--- /dev/null
+++ b/utils/tuning/config-example.yaml
@@ -0,0 +1,54 @@
+general:
+ disable: []
+ plot: []
+ alsc:
+ do_alsc_colour: 1
+ luminance_strength: 0.5
+ awb:
+ # Algorithm can either be 'grey' or 'bayes'
+ algorithm: bayes
+ # Priors is only used for the bayes algorithm. They are defined in linear
+ # space. A good staring point is:
+ # - lux: 0
+ # ct: [ 2000, 3000, 13000 ]
+ # probability: [ 1.005, 1.0, 1.0 ]
+ # - lux: 800
+ # ct: [ 2000, 6000, 13000 ]
+ # probability: [ 1.0, 1.01, 1.01 ]
+ # - lux: 1500
+ # ct: [ 2000, 4000, 6000, 6500, 7000, 13000 ]
+ # probability: [ 1.0, 1.005, 1.032, 1.037, 1.01, 1.01 ]
+ priors:
+ - lux: 0
+ ct: [ 2000, 13000 ]
+ probability: [ 1.0, 1.0 ]
+ AwbMode:
+ AwbAuto:
+ lo: 2500
+ hi: 8000
+ AwbIncandescent:
+ lo: 2500
+ hi: 3000
+ AwbTungsten:
+ lo: 3000
+ hi: 3500
+ AwbFluorescent:
+ lo: 4000
+ hi: 4700
+ AwbIndoor:
+ lo: 3000
+ hi: 5000
+ AwbDaylight:
+ lo: 5500
+ hi: 6500
+ AwbCloudy:
+ lo: 6500
+ hi: 8000
+ # One custom mode can be defined if needed
+ #AwbCustom:
+ # lo: 2000
+ # hi: 1300
+ macbeth:
+ small: 1
+ show: 0
+# blacklevel: 32 \ No newline at end of file
diff --git a/utils/tuning/libtuning/average.py b/utils/tuning/libtuning/average.py
index e28770d7..c41075a1 100644
--- a/utils/tuning/libtuning/average.py
+++ b/utils/tuning/libtuning/average.py
@@ -2,7 +2,7 @@
#
# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
#
-# average.py - Wrapper for numpy averaging functions to enable duck-typing
+# Wrapper for numpy averaging functions to enable duck-typing
import numpy as np
diff --git a/utils/tuning/libtuning/ctt_awb.py b/utils/tuning/libtuning/ctt_awb.py
new file mode 100644
index 00000000..240f37e6
--- /dev/null
+++ b/utils/tuning/libtuning/ctt_awb.py
@@ -0,0 +1,378 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# camera tuning tool for AWB
+
+import logging
+
+import matplotlib.pyplot as plt
+from bisect import bisect_left
+from scipy.optimize import fmin
+import numpy as np
+
+from .image import Image
+
+logger = logging.getLogger(__name__)
+
+"""
+obtain piecewise linear approximation for colour curve
+"""
+def awb(imgs, cal_cr_list, cal_cb_list, plot):
+ """
+ condense alsc calibration tables into one dictionary
+ """
+ if cal_cr_list is None:
+ colour_cals = None
+ else:
+ colour_cals = {}
+ for cr, cb in zip(cal_cr_list, cal_cb_list):
+ cr_tab = cr['table']
+ cb_tab = cb['table']
+ """
+ normalise tables so min value is 1
+ """
+ cr_tab = cr_tab/np.min(cr_tab)
+ cb_tab = cb_tab/np.min(cb_tab)
+ colour_cals[cr['ct']] = [cr_tab, cb_tab]
+ """
+ obtain data from greyscale macbeth patches
+ """
+ rb_raw = []
+ rbs_hat = []
+ for Img in imgs:
+ logger.info(f'Processing {Img.name}')
+ """
+ get greyscale patches with alsc applied if alsc enabled.
+ Note: if alsc is disabled then colour_cals will be set to None and the
+ function will just return the greyscale patches
+ """
+ r_patchs, b_patchs, g_patchs = get_alsc_patches(Img, colour_cals)
+ """
+ calculate ratio of r, b to g
+ """
+ r_g = np.mean(r_patchs/g_patchs)
+ b_g = np.mean(b_patchs/g_patchs)
+ logger.info(f' r : {r_g:.4f} b : {b_g:.4f}')
+ """
+ The curve tends to be better behaved in so-called hatspace.
+ R, B, G represent the individual channels. The colour curve is plotted in
+ r, b space, where:
+ r = R/G
+ b = B/G
+ This will be referred to as dehatspace... (sorry)
+ Hatspace is defined as:
+ r_hat = R/(R+B+G)
+ b_hat = B/(R+B+G)
+ To convert from dehatspace to hastpace (hat operation):
+ r_hat = r/(1+r+b)
+ b_hat = b/(1+r+b)
+ To convert from hatspace to dehatspace (dehat operation):
+ r = r_hat/(1-r_hat-b_hat)
+ b = b_hat/(1-r_hat-b_hat)
+ Proof is left as an excercise to the reader...
+ Throughout the code, r and b are sometimes referred to as r_g and b_g
+ as a reminder that they are ratios
+ """
+ r_g_hat = r_g/(1+r_g+b_g)
+ b_g_hat = b_g/(1+r_g+b_g)
+ logger.info(f' r_hat : {r_g_hat:.4f} b_hat : {b_g_hat:.4f}')
+ rbs_hat.append((r_g_hat, b_g_hat, Img.color))
+ rb_raw.append((r_g, b_g))
+
+ logger.info('Finished processing images')
+ """
+ sort all lits simultaneously by r_hat
+ """
+ rbs_zip = list(zip(rbs_hat, rb_raw))
+ rbs_zip.sort(key=lambda x: x[0][0])
+ rbs_hat, rb_raw = list(zip(*rbs_zip))
+ """
+ unzip tuples ready for processing
+ """
+ rbs_hat = list(zip(*rbs_hat))
+ rb_raw = list(zip(*rb_raw))
+ """
+ fit quadratic fit to r_g hat and b_g_hat
+ """
+ a, b, c = np.polyfit(rbs_hat[0], rbs_hat[1], 2)
+ logger.info('Fit quadratic curve in hatspace')
+ """
+ the algorithm now approximates the shortest distance from each point to the
+ curve in dehatspace. Since the fit is done in hatspace, it is easier to
+ find the actual shortest distance in hatspace and use the projection back
+ into dehatspace as an overestimate.
+ The distance will be used for two things:
+ 1) In the case that colour temperature does not strictly decrease with
+ increasing r/g, the closest point to the line will be chosen out of an
+ increasing pair of colours.
+
+ 2) To calculate transverse negative an dpositive, the maximum positive
+ and negative distance from the line are chosen. This benefits from the
+ overestimate as the transverse pos/neg are upper bound values.
+ """
+ """
+ define fit function
+ """
+ def f(x):
+ return a*x**2 + b*x + c
+ """
+ iterate over points (R, B are x and y coordinates of points) and calculate
+ distance to line in dehatspace
+ """
+ dists = []
+ for i, (R, B) in enumerate(zip(rbs_hat[0], rbs_hat[1])):
+ """
+ define function to minimise as square distance between datapoint and
+ point on curve. Squaring is monotonic so minimising radius squared is
+ equivalent to minimising radius
+ """
+ def f_min(x):
+ y = f(x)
+ return((x-R)**2+(y-B)**2)
+ """
+ perform optimisation with scipy.optmisie.fmin
+ """
+ x_hat = fmin(f_min, R, disp=0)[0]
+ y_hat = f(x_hat)
+ """
+ dehat
+ """
+ x = x_hat/(1-x_hat-y_hat)
+ y = y_hat/(1-x_hat-y_hat)
+ rr = R/(1-R-B)
+ bb = B/(1-R-B)
+ """
+ calculate euclidean distance in dehatspace
+ """
+ dist = ((x-rr)**2+(y-bb)**2)**0.5
+ """
+ return negative if point is below the fit curve
+ """
+ if (x+y) > (rr+bb):
+ dist *= -1
+ dists.append(dist)
+ logger.info('Found closest point on fit line to each point in dehatspace')
+ """
+ calculate wiggle factors in awb. 10% added since this is an upper bound
+ """
+ transverse_neg = - np.min(dists) * 1.1
+ transverse_pos = np.max(dists) * 1.1
+ logger.info(f'Transverse pos : {transverse_pos:.5f}')
+ logger.info(f'Transverse neg : {transverse_neg:.5f}')
+ """
+ set minimum transverse wiggles to 0.1 .
+ Wiggle factors dictate how far off of the curve the algorithm searches. 0.1
+ is a suitable minimum that gives better results for lighting conditions not
+ within calibration dataset. Anything less will generalise poorly.
+ """
+ if transverse_pos < 0.01:
+ transverse_pos = 0.01
+ logger.info('Forced transverse pos to 0.01')
+ if transverse_neg < 0.01:
+ transverse_neg = 0.01
+ logger.info('Forced transverse neg to 0.01')
+
+ """
+ generate new b_hat values at each r_hat according to fit
+ """
+ r_hat_fit = np.array(rbs_hat[0])
+ b_hat_fit = a*r_hat_fit**2 + b*r_hat_fit + c
+ """
+ transform from hatspace to dehatspace
+ """
+ r_fit = r_hat_fit/(1-r_hat_fit-b_hat_fit)
+ b_fit = b_hat_fit/(1-r_hat_fit-b_hat_fit)
+ c_fit = np.round(rbs_hat[2], 0)
+ """
+ round to 4dp
+ """
+ r_fit = np.where((1000*r_fit) % 1 <= 0.05, r_fit+0.0001, r_fit)
+ r_fit = np.where((1000*r_fit) % 1 >= 0.95, r_fit-0.0001, r_fit)
+ b_fit = np.where((1000*b_fit) % 1 <= 0.05, b_fit+0.0001, b_fit)
+ b_fit = np.where((1000*b_fit) % 1 >= 0.95, b_fit-0.0001, b_fit)
+ r_fit = np.round(r_fit, 4)
+ b_fit = np.round(b_fit, 4)
+ """
+ The following code ensures that colour temperature decreases with
+ increasing r/g
+ """
+ """
+ iterate backwards over list for easier indexing
+ """
+ i = len(c_fit) - 1
+ while i > 0:
+ if c_fit[i] > c_fit[i-1]:
+ logger.info('Colour temperature increase found')
+ logger.info(f'{c_fit[i - 1]} K at r = {r_fit[i - 1]} to ')
+ logger.info(f'{c_fit[i]} K at r = {r_fit[i]}')
+ """
+ if colour temperature increases then discard point furthest from
+ the transformed fit (dehatspace)
+ """
+ error_1 = abs(dists[i-1])
+ error_2 = abs(dists[i])
+ logger.info('Distances from fit:')
+ logger.info(f'{c_fit[i]} K : {error_1:.5f}')
+ logger.info(f'{c_fit[i - 1]} K : {error_2:.5f}')
+ """
+ find bad index
+ note that in python false = 0 and true = 1
+ """
+ bad = i - (error_1 < error_2)
+ logger.info(f'Point at {c_fit[bad]} K deleted as ')
+ logger.info('it is furthest from fit')
+ """
+ delete bad point
+ """
+ r_fit = np.delete(r_fit, bad)
+ b_fit = np.delete(b_fit, bad)
+ c_fit = np.delete(c_fit, bad).astype(np.uint16)
+ """
+ note that if a point has been discarded then the length has decreased
+ by one, meaning that decreasing the index by one will reassess the kept
+ point against the next point. It is therefore possible, in theory, for
+ two adjacent points to be discarded, although probably rare
+ """
+ i -= 1
+
+ """
+ return formatted ct curve, ordered by increasing colour temperature
+ """
+ ct_curve = list(np.array(list(zip(b_fit, r_fit, c_fit))).flatten())[::-1]
+ logger.info('Final CT curve:')
+ for i in range(len(ct_curve)//3):
+ j = 3*i
+ logger.info(f' ct: {ct_curve[j]} ')
+ logger.info(f' r: {ct_curve[j + 1]} ')
+ logger.info(f' b: {ct_curve[j + 2]} ')
+
+ """
+ plotting code for debug
+ """
+ if plot:
+ x = np.linspace(np.min(rbs_hat[0]), np.max(rbs_hat[0]), 100)
+ y = a*x**2 + b*x + c
+ plt.subplot(2, 1, 1)
+ plt.title('hatspace')
+ plt.plot(rbs_hat[0], rbs_hat[1], ls='--', color='blue')
+ plt.plot(x, y, color='green', ls='-')
+ plt.scatter(rbs_hat[0], rbs_hat[1], color='red')
+ for i, ct in enumerate(rbs_hat[2]):
+ plt.annotate(str(ct), (rbs_hat[0][i], rbs_hat[1][i]))
+ plt.xlabel('$\\hat{r}$')
+ plt.ylabel('$\\hat{b}$')
+ """
+ optional set axes equal to shortest distance so line really does
+ looks perpendicular and everybody is happy
+ """
+ # ax = plt.gca()
+ # ax.set_aspect('equal')
+ plt.grid()
+ plt.subplot(2, 1, 2)
+ plt.title('dehatspace - indoors?')
+ plt.plot(r_fit, b_fit, color='blue')
+ plt.scatter(rb_raw[0], rb_raw[1], color='green')
+ plt.scatter(r_fit, b_fit, color='red')
+ for i, ct in enumerate(c_fit):
+ plt.annotate(str(ct), (r_fit[i], b_fit[i]))
+ plt.xlabel('$r$')
+ plt.ylabel('$b$')
+ """
+ optional set axes equal to shortest distance so line really does
+ looks perpendicular and everybody is happy
+ """
+ # ax = plt.gca()
+ # ax.set_aspect('equal')
+ plt.subplots_adjust(hspace=0.5)
+ plt.grid()
+ plt.show()
+ """
+ end of plotting code
+ """
+ return(ct_curve, np.round(transverse_pos, 5), np.round(transverse_neg, 5))
+
+
+"""
+obtain greyscale patches and perform alsc colour correction
+"""
+def get_alsc_patches(Img, colour_cals, grey=True):
+ """
+ get patch centre coordinates, image colour and the actual
+ patches for each channel, remembering to subtract blacklevel
+ If grey then only greyscale patches considered
+ """
+ patches = Img.patches
+ if grey:
+ cen_coords = Img.cen_coords[3::4]
+ col = Img.color
+ r_patchs = patches[0][3::4] - Img.blacklevel_16
+ b_patchs = patches[3][3::4] - Img.blacklevel_16
+ """
+ note two green channels are averages
+ """
+ g_patchs = (patches[1][3::4]+patches[2][3::4])/2 - Img.blacklevel_16
+ else:
+ cen_coords = Img.cen_coords
+ col = Img.color
+ r_patchs = patches[0] - Img.blacklevel_16
+ b_patchs = patches[3] - Img.blacklevel_16
+ g_patchs = (patches[1]+patches[2])/2 - Img.blacklevel_16
+
+ if colour_cals is None:
+ return r_patchs, b_patchs, g_patchs
+ """
+ find where image colour fits in alsc colour calibration tables
+ """
+ cts = list(colour_cals.keys())
+ pos = bisect_left(cts, col)
+ """
+ if img colour is below minimum or above maximum alsc calibration colour, simply
+ pick extreme closest to img colour
+ """
+ if pos % len(cts) == 0:
+ """
+ this works because -0 = 0 = first and -1 = last index
+ """
+ col_tabs = np.array(colour_cals[cts[-pos//len(cts)]])
+ """
+ else, perform linear interpolation between existing alsc colour
+ calibration tables
+ """
+ else:
+ bef = cts[pos-1]
+ aft = cts[pos]
+ da = col-bef
+ db = aft-col
+ bef_tabs = np.array(colour_cals[bef])
+ aft_tabs = np.array(colour_cals[aft])
+ col_tabs = (bef_tabs*db + aft_tabs*da)/(da+db)
+ col_tabs = np.reshape(col_tabs, (2, 12, 16))
+ """
+ calculate dx, dy used to calculate alsc table
+ """
+ w, h = Img.w/2, Img.h/2
+ dx, dy = int(-(-(w-1)//16)), int(-(-(h-1)//12))
+ """
+ make list of pairs of gains for each patch by selecting the correct value
+ in alsc colour calibration table
+ """
+ patch_gains = []
+ for cen in cen_coords:
+ x, y = cen[0]//dx, cen[1]//dy
+ # We could probably do with some better spatial interpolation here?
+ col_gains = (col_tabs[0][y][x], col_tabs[1][y][x])
+ patch_gains.append(col_gains)
+
+ """
+ multiply the r and b channels in each patch by the respective gain, finally
+ performing the alsc colour correction
+ """
+ for i, gains in enumerate(patch_gains):
+ r_patchs[i] = r_patchs[i] * gains[0]
+ b_patchs[i] = b_patchs[i] * gains[1]
+
+ """
+ return greyscale patches, g channel and correct r, b channels
+ """
+ return r_patchs, b_patchs, g_patchs
diff --git a/utils/tuning/libtuning/ctt_ccm.py b/utils/tuning/libtuning/ctt_ccm.py
new file mode 100644
index 00000000..2e87a667
--- /dev/null
+++ b/utils/tuning/libtuning/ctt_ccm.py
@@ -0,0 +1,408 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# camera tuning tool for CCM (colour correction matrix)
+
+import logging
+
+import numpy as np
+from scipy.optimize import minimize
+
+from . import ctt_colors as colors
+from .image import Image
+from .ctt_awb import get_alsc_patches
+from .utils import visualise_macbeth_chart
+
+logger = logging.getLogger(__name__)
+
+"""
+takes 8-bit macbeth chart values, degammas and returns 16 bit
+"""
+
+'''
+This program has many options from which to derive the color matrix from.
+The first is average. This minimises the average delta E across all patches of
+the macbeth chart. Testing across all cameras yeilded this as the most color
+accurate and vivid. Other options are avalible however.
+Maximum minimises the maximum Delta E of the patches. It iterates through till
+a minimum maximum is found (so that there is
+not one patch that deviates wildly.)
+This yields generally good results but overall the colors are less accurate
+Have a fiddle with maximum and see what you think.
+The final option allows you to select the patches for which to average across.
+This means that you can bias certain patches, for instance if you want the
+reds to be more accurate.
+'''
+
+matrix_selection_types = ["average", "maximum", "patches"]
+typenum = 0 # select from array above, 0 = average, 1 = maximum, 2 = patches
+test_patches = [1, 2, 5, 8, 9, 12, 14]
+
+'''
+Enter patches to test for. Can also be entered twice if you
+would like twice as much bias on one patch.
+'''
+
+
+def degamma(x):
+ x = x / ((2 ** 8) - 1) # takes 255 and scales it down to one
+ x = np.where(x < 0.04045, x / 12.92, ((x + 0.055) / 1.055) ** 2.4)
+ x = x * ((2 ** 16) - 1) # takes one and scales up to 65535, 16 bit color
+ return x
+
+
+def gamma(x):
+ # Take 3 long array of color values and gamma them
+ return [((colour / 255) ** (1 / 2.4) * 1.055 - 0.055) * 255 for colour in x]
+
+
+"""
+FInds colour correction matrices for list of images
+"""
+
+
+def ccm(imgs, cal_cr_list, cal_cb_list):
+ global matrix_selection_types, typenum
+ """
+ standard macbeth chart colour values
+ """
+ m_rgb = np.array([ # these are in RGB
+ [116, 81, 67], # dark skin
+ [199, 147, 129], # light skin
+ [91, 122, 156], # blue sky
+ [90, 108, 64], # foliage
+ [130, 128, 176], # blue flower
+ [92, 190, 172], # bluish green
+ [224, 124, 47], # orange
+ [68, 91, 170], # purplish blue
+ [198, 82, 97], # moderate red
+ [94, 58, 106], # purple
+ [159, 189, 63], # yellow green
+ [230, 162, 39], # orange yellow
+ [35, 63, 147], # blue
+ [67, 149, 74], # green
+ [180, 49, 57], # red
+ [238, 198, 20], # yellow
+ [193, 84, 151], # magenta
+ [0, 136, 170], # cyan (goes out of gamut)
+ [245, 245, 243], # white 9.5
+ [200, 202, 202], # neutral 8
+ [161, 163, 163], # neutral 6.5
+ [121, 121, 122], # neutral 5
+ [82, 84, 86], # neutral 3.5
+ [49, 49, 51] # black 2
+ ])
+ """
+ convert reference colours from srgb to rgb
+ """
+ m_srgb = degamma(m_rgb) # now in 16 bit color.
+
+ # Produce array of LAB values for ideal color chart
+ m_lab = [colors.RGB_to_LAB(color / 256) for color in m_srgb]
+
+ """
+ reorder reference values to match how patches are ordered
+ """
+ m_srgb = np.array([m_srgb[i::6] for i in range(6)]).reshape((24, 3))
+ m_lab = np.array([m_lab[i::6] for i in range(6)]).reshape((24, 3))
+ m_rgb = np.array([m_rgb[i::6] for i in range(6)]).reshape((24, 3))
+ """
+ reformat alsc correction tables or set colour_cals to None if alsc is
+ deactivated
+ """
+ if cal_cr_list is None:
+ colour_cals = None
+ else:
+ colour_cals = {}
+ for cr, cb in zip(cal_cr_list, cal_cb_list):
+ cr_tab = cr['table']
+ cb_tab = cb['table']
+ """
+ normalise tables so min value is 1
+ """
+ cr_tab = cr_tab / np.min(cr_tab)
+ cb_tab = cb_tab / np.min(cb_tab)
+ colour_cals[cr['ct']] = [cr_tab, cb_tab]
+
+ """
+ for each image, perform awb and alsc corrections.
+ Then calculate the colour correction matrix for that image, recording the
+ ccm and the colour tempertaure.
+ """
+ ccm_tab = {}
+ for Img in imgs:
+ logger.info('Processing image: ' + Img.name)
+ """
+ get macbeth patches with alsc applied if alsc enabled.
+ Note: if alsc is disabled then colour_cals will be set to None and no
+ the function will simply return the macbeth patches
+ """
+ r, b, g = get_alsc_patches(Img, colour_cals, grey=False)
+ # 256 values for each patch of sRGB values
+
+ """
+ do awb
+ Note: awb is done by measuring the macbeth chart in the image, rather
+ than from the awb calibration. This is done so the awb will be perfect
+ and the ccm matrices will be more accurate.
+ """
+ r_greys, b_greys, g_greys = r[3::4], b[3::4], g[3::4]
+ r_g = np.mean(r_greys / g_greys)
+ b_g = np.mean(b_greys / g_greys)
+ r = r / r_g
+ b = b / b_g
+ """
+ normalise brightness wrt reference macbeth colours and then average
+ each channel for each patch
+ """
+ gain = np.mean(m_srgb) / np.mean((r, g, b))
+ logger.info(f'Gain with respect to standard colours: {gain:.3f}')
+ r = np.mean(gain * r, axis=1)
+ b = np.mean(gain * b, axis=1)
+ g = np.mean(gain * g, axis=1)
+ """
+ calculate ccm matrix
+ """
+ # ==== All of below should in sRGB ===##
+ sumde = 0
+ ccm = do_ccm(r, g, b, m_srgb)
+ # This is the initial guess that our optimisation code works with.
+ original_ccm = ccm
+ r1 = ccm[0]
+ r2 = ccm[1]
+ g1 = ccm[3]
+ g2 = ccm[4]
+ b1 = ccm[6]
+ b2 = ccm[7]
+ '''
+ COLOR MATRIX LOOKS AS BELOW
+ R1 R2 R3 Rval Outr
+ G1 G2 G3 * Gval = G
+ B1 B2 B3 Bval B
+ Will be optimising 6 elements and working out the third element using 1-r1-r2 = r3
+ '''
+
+ x0 = [r1, r2, g1, g2, b1, b2]
+ '''
+ We use our old CCM as the initial guess for the program to find the
+ optimised matrix
+ '''
+ result = minimize(guess, x0, args=(r, g, b, m_lab), tol=0.01)
+ '''
+ This produces a color matrix which has the lowest delta E possible,
+ based off the input data. Note it is impossible for this to reach
+ zero since the input data is imperfect
+ '''
+
+ [r1, r2, g1, g2, b1, b2] = result.x
+ # The new, optimised color correction matrix values
+ # This is the optimised Color Matrix (preserving greys by summing rows up to 1)
+ optimised_ccm = [r1, r2, (1 - r1 - r2), g1, g2, (1 - g1 - g2), b1, b2, (1 - b1 - b2)]
+
+ logger.info(f'Optimized Matrix: {np.round(optimised_ccm, 4)}')
+ logger.info(f'Old Matrix: {np.round(ccm, 4)}')
+
+ formatted_ccm = np.array(original_ccm).reshape((3, 3))
+
+ '''
+ below is a whole load of code that then applies the latest color
+ matrix, and returns LAB values for color. This can then be used
+ to calculate the final delta E
+ '''
+ optimised_ccm_rgb = [] # Original Color Corrected Matrix RGB / LAB
+ optimised_ccm_lab = []
+
+ formatted_optimised_ccm = np.array(optimised_ccm).reshape((3, 3))
+ after_gamma_rgb = []
+ after_gamma_lab = []
+
+ for RGB in zip(r, g, b):
+ ccm_applied_rgb = np.dot(formatted_ccm, (np.array(RGB) / 256))
+ optimised_ccm_rgb.append(gamma(ccm_applied_rgb))
+ optimised_ccm_lab.append(colors.RGB_to_LAB(ccm_applied_rgb))
+
+ optimised_ccm_applied_rgb = np.dot(formatted_optimised_ccm, np.array(RGB) / 256)
+ after_gamma_rgb.append(gamma(optimised_ccm_applied_rgb))
+ after_gamma_lab.append(colors.RGB_to_LAB(optimised_ccm_applied_rgb))
+ '''
+ Gamma After RGB / LAB - not used in calculations, only used for visualisation
+ We now want to spit out some data that shows
+ how the optimisation has improved the color matrices
+ '''
+ logger.info("Here are the Improvements")
+
+ # CALCULATE WORST CASE delta e
+ old_worst_delta_e = 0
+ before_average = transform_and_evaluate(formatted_ccm, r, g, b, m_lab)
+ new_worst_delta_e = 0
+ after_average = transform_and_evaluate(formatted_optimised_ccm, r, g, b, m_lab)
+ for i in range(24):
+ old_delta_e = deltae(optimised_ccm_lab[i], m_lab[i]) # Current Old Delta E
+ new_delta_e = deltae(after_gamma_lab[i], m_lab[i]) # Current New Delta E
+ if old_delta_e > old_worst_delta_e:
+ old_worst_delta_e = old_delta_e
+ if new_delta_e > new_worst_delta_e:
+ new_worst_delta_e = new_delta_e
+
+ logger.info(f'delta E optimized: average: {after_average:.2f} max:{new_worst_delta_e:.2f}')
+ logger.info(f'delta E old: average: {before_average:.2f} max:{old_worst_delta_e:.2f}')
+
+ visualise_macbeth_chart(m_rgb, optimised_ccm_rgb, after_gamma_rgb, str(Img.color) + str(matrix_selection_types[typenum]))
+ '''
+ The program will also save some visualisations of improvements.
+ Very pretty to look at. Top rectangle is ideal, Left square is
+ before optimisation, right square is after.
+ '''
+
+ """
+ if a ccm has already been calculated for that temperature then don't
+ overwrite but save both. They will then be averaged later on
+ """ # Now going to use optimised color matrix, optimised_ccm
+ if Img.color in ccm_tab.keys():
+ ccm_tab[Img.color].append(optimised_ccm)
+ else:
+ ccm_tab[Img.color] = [optimised_ccm]
+
+ logger.info('Finished processing images')
+ """
+ average any ccms that share a colour temperature
+ """
+ for k, v in ccm_tab.items():
+ tab = np.mean(v, axis=0)
+ tab = np.where((10000 * tab) % 1 <= 0.05, tab + 0.00001, tab)
+ tab = np.where((10000 * tab) % 1 >= 0.95, tab - 0.00001, tab)
+ ccm_tab[k] = list(np.round(tab, 5))
+ logger.info(f'Matrix calculated for colour temperature of {k} K')
+
+ """
+ return all ccms with respective colour temperature in the correct format,
+ sorted by their colour temperature
+ """
+ sorted_ccms = sorted(ccm_tab.items(), key=lambda kv: kv[0])
+ ccms = []
+ for i in sorted_ccms:
+ ccms.append({
+ 'ct': i[0],
+ 'ccm': i[1]
+ })
+ return ccms
+
+
+def guess(x0, r, g, b, m_lab): # provides a method of numerical feedback for the optimisation code
+ [r1, r2, g1, g2, b1, b2] = x0
+ ccm = np.array([r1, r2, (1 - r1 - r2),
+ g1, g2, (1 - g1 - g2),
+ b1, b2, (1 - b1 - b2)]).reshape((3, 3)) # format the matrix correctly
+ return transform_and_evaluate(ccm, r, g, b, m_lab)
+
+
+def transform_and_evaluate(ccm, r, g, b, m_lab): # Transforms colors to LAB and applies the correction matrix
+ # create list of matrix changed colors
+ realrgb = []
+ for RGB in zip(r, g, b):
+ rgb_post_ccm = np.dot(ccm, np.array(RGB) / 256) # This is RGB values after the color correction matrix has been applied
+ realrgb.append(colors.RGB_to_LAB(rgb_post_ccm))
+ # now compare that with m_lab and return numeric result, averaged for each patch
+ return (sumde(realrgb, m_lab) / 24) # returns an average result of delta E
+
+
+def sumde(listA, listB):
+ global typenum, test_patches
+ sumde = 0
+ maxde = 0
+ patchde = [] # Create array of the delta E values for each patch. useful for optimisation of certain patches
+ for listA_item, listB_item in zip(listA, listB):
+ if maxde < (deltae(listA_item, listB_item)):
+ maxde = deltae(listA_item, listB_item)
+ patchde.append(deltae(listA_item, listB_item))
+ sumde += deltae(listA_item, listB_item)
+ '''
+ The different options specified at the start allow for
+ the maximum to be returned, average or specific patches
+ '''
+ if typenum == 0:
+ return sumde
+ if typenum == 1:
+ return maxde
+ if typenum == 2:
+ output = sum([patchde[test_patch] for test_patch in test_patches])
+ # Selects only certain patches and returns the output for them
+ return output
+
+
+"""
+calculates the ccm for an individual image.
+ccms are calculated in rgb space, and are fit by hand. Although it is a 3x3
+matrix, each row must add up to 1 in order to conserve greyness, simplifying
+calculation.
+The initial CCM is calculated in RGB, and then optimised in LAB color space
+This simplifies the initial calculation but then gets us the accuracy of
+using LAB color space.
+"""
+
+
+def do_ccm(r, g, b, m_srgb):
+ rb = r-b
+ gb = g-b
+ rb_2s = (rb * rb)
+ rb_gbs = (rb * gb)
+ gb_2s = (gb * gb)
+
+ r_rbs = rb * (m_srgb[..., 0] - b)
+ r_gbs = gb * (m_srgb[..., 0] - b)
+ g_rbs = rb * (m_srgb[..., 1] - b)
+ g_gbs = gb * (m_srgb[..., 1] - b)
+ b_rbs = rb * (m_srgb[..., 2] - b)
+ b_gbs = gb * (m_srgb[..., 2] - b)
+
+ """
+ Obtain least squares fit
+ """
+ rb_2 = np.sum(rb_2s)
+ gb_2 = np.sum(gb_2s)
+ rb_gb = np.sum(rb_gbs)
+ r_rb = np.sum(r_rbs)
+ r_gb = np.sum(r_gbs)
+ g_rb = np.sum(g_rbs)
+ g_gb = np.sum(g_gbs)
+ b_rb = np.sum(b_rbs)
+ b_gb = np.sum(b_gbs)
+
+ det = rb_2 * gb_2 - rb_gb * rb_gb
+
+ """
+ Raise error if matrix is singular...
+ This shouldn't really happen with real data but if it does just take new
+ pictures and try again, not much else to be done unfortunately...
+ """
+ if det < 0.001:
+ raise ArithmeticError
+
+ r_a = (gb_2 * r_rb - rb_gb * r_gb) / det
+ r_b = (rb_2 * r_gb - rb_gb * r_rb) / det
+ """
+ Last row can be calculated by knowing the sum must be 1
+ """
+ r_c = 1 - r_a - r_b
+
+ g_a = (gb_2 * g_rb - rb_gb * g_gb) / det
+ g_b = (rb_2 * g_gb - rb_gb * g_rb) / det
+ g_c = 1 - g_a - g_b
+
+ b_a = (gb_2 * b_rb - rb_gb * b_gb) / det
+ b_b = (rb_2 * b_gb - rb_gb * b_rb) / det
+ b_c = 1 - b_a - b_b
+
+ """
+ format ccm
+ """
+ ccm = [r_a, r_b, r_c, g_a, g_b, g_c, b_a, b_b, b_c]
+
+ return ccm
+
+
+def deltae(colorA, colorB):
+ return ((colorA[0] - colorB[0]) ** 2 + (colorA[1] - colorB[1]) ** 2 + (colorA[2] - colorB[2]) ** 2) ** 0.5
+ # return ((colorA[1]-colorB[1]) * * 2 + (colorA[2]-colorB[2]) * * 2) * * 0.5
+ # UNCOMMENT IF YOU WANT TO NEGLECT LUMINANCE FROM CALCULATION OF DELTA E
diff --git a/utils/tuning/libtuning/ctt_colors.py b/utils/tuning/libtuning/ctt_colors.py
new file mode 100644
index 00000000..cb4d236b
--- /dev/null
+++ b/utils/tuning/libtuning/ctt_colors.py
@@ -0,0 +1,30 @@
+# Program to convert from RGB to LAB color space
+def RGB_to_LAB(RGB): # where RGB is a 1x3 array. e.g RGB = [100, 255, 230]
+ num = 0
+ XYZ = [0, 0, 0]
+ # converted all the three R, G, B to X, Y, Z
+ X = RGB[0] * 0.4124 + RGB[1] * 0.3576 + RGB[2] * 0.1805
+ Y = RGB[0] * 0.2126 + RGB[1] * 0.7152 + RGB[2] * 0.0722
+ Z = RGB[0] * 0.0193 + RGB[1] * 0.1192 + RGB[2] * 0.9505
+
+ XYZ[0] = X / 255 * 100
+ XYZ[1] = Y / 255 * 100 # XYZ Must be in range 0 -> 100, so scale down from 255
+ XYZ[2] = Z / 255 * 100
+ XYZ[0] = XYZ[0] / 95.047 # ref_X = 95.047 Observer= 2°, Illuminant= D65
+ XYZ[1] = XYZ[1] / 100.0 # ref_Y = 100.000
+ XYZ[2] = XYZ[2] / 108.883 # ref_Z = 108.883
+ num = 0
+ for value in XYZ:
+ if value > 0.008856:
+ value = value ** (0.3333333333333333)
+ else:
+ value = (7.787 * value) + (16 / 116)
+ XYZ[num] = value
+ num = num + 1
+
+ # L, A, B, values calculated below
+ L = (116 * XYZ[1]) - 16
+ a = 500 * (XYZ[0] - XYZ[1])
+ b = 200 * (XYZ[1] - XYZ[2])
+
+ return [L, a, b]
diff --git a/utils/tuning/libtuning/ctt_ransac.py b/utils/tuning/libtuning/ctt_ransac.py
new file mode 100644
index 00000000..01bba302
--- /dev/null
+++ b/utils/tuning/libtuning/ctt_ransac.py
@@ -0,0 +1,71 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# camera tuning tool RANSAC selector for Macbeth chart locator
+
+import numpy as np
+
+scale = 2
+
+
+"""
+constructs normalised macbeth chart corners for ransac algorithm
+"""
+def get_square_verts(c_err=0.05, scale=scale):
+ """
+ define macbeth chart corners
+ """
+ b_bord_x, b_bord_y = scale*8.5, scale*13
+ s_bord = 6*scale
+ side = 41*scale
+ x_max = side*6 + 5*s_bord + 2*b_bord_x
+ y_max = side*4 + 3*s_bord + 2*b_bord_y
+ c1 = (0, 0)
+ c2 = (0, y_max)
+ c3 = (x_max, y_max)
+ c4 = (x_max, 0)
+ mac_norm = np.array((c1, c2, c3, c4), np.float32)
+ mac_norm = np.array([mac_norm])
+
+ square_verts = []
+ square_0 = np.array(((0, 0), (0, side),
+ (side, side), (side, 0)), np.float32)
+ offset_0 = np.array((b_bord_x, b_bord_y), np.float32)
+ c_off = side * c_err
+ offset_cont = np.array(((c_off, c_off), (c_off, -c_off),
+ (-c_off, -c_off), (-c_off, c_off)), np.float32)
+ square_0 += offset_0
+ square_0 += offset_cont
+ """
+ define macbeth square corners
+ """
+ for i in range(6):
+ shift_i = np.array(((i*side, 0), (i*side, 0),
+ (i*side, 0), (i*side, 0)), np.float32)
+ shift_bord = np.array(((i*s_bord, 0), (i*s_bord, 0),
+ (i*s_bord, 0), (i*s_bord, 0)), np.float32)
+ square_i = square_0 + shift_i + shift_bord
+ for j in range(4):
+ shift_j = np.array(((0, j*side), (0, j*side),
+ (0, j*side), (0, j*side)), np.float32)
+ shift_bord = np.array(((0, j*s_bord),
+ (0, j*s_bord), (0, j*s_bord),
+ (0, j*s_bord)), np.float32)
+ square_j = square_i + shift_j + shift_bord
+ square_verts.append(square_j)
+ # print('square_verts')
+ # print(square_verts)
+ return np.array(square_verts, np.float32), mac_norm
+
+
+def get_square_centres(c_err=0.05, scale=scale):
+ """
+ define macbeth square centres
+ """
+ verts, mac_norm = get_square_verts(c_err, scale=scale)
+
+ centres = np.mean(verts, axis=1)
+ # print('centres')
+ # print(centres)
+ return np.array(centres, np.float32)
diff --git a/utils/tuning/libtuning/generators/generator.py b/utils/tuning/libtuning/generators/generator.py
index 7c8c9b99..77a8ba4a 100644
--- a/utils/tuning/libtuning/generators/generator.py
+++ b/utils/tuning/libtuning/generators/generator.py
@@ -2,7 +2,7 @@
#
# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
#
-# generator.py - Base class for a generator to convert dict to tuning file
+# Base class for a generator to convert dict to tuning file
from pathlib import Path
diff --git a/utils/tuning/libtuning/generators/raspberrypi_output.py b/utils/tuning/libtuning/generators/raspberrypi_output.py
index 813491cd..47b49059 100644
--- a/utils/tuning/libtuning/generators/raspberrypi_output.py
+++ b/utils/tuning/libtuning/generators/raspberrypi_output.py
@@ -2,7 +2,7 @@
#
# Copyright 2022 Raspberry Pi Ltd
#
-# raspberrypi_output.py - Generate tuning file in Raspberry Pi's json format
+# Generate tuning file in Raspberry Pi's json format
#
# (Copied from ctt_pretty_print_json.py)
diff --git a/utils/tuning/libtuning/generators/yaml_output.py b/utils/tuning/libtuning/generators/yaml_output.py
index effb4fb3..c490081d 100644
--- a/utils/tuning/libtuning/generators/yaml_output.py
+++ b/utils/tuning/libtuning/generators/yaml_output.py
@@ -2,15 +2,16 @@
#
# Copyright 2022 Paul Elder <paul.elder@ideasonboard.com>
#
-# yaml_output.py - Generate tuning file in YAML format
+# Generate tuning file in YAML format
from .generator import Generator
from numbers import Number
from pathlib import Path
-import libtuning.utils as utils
+import logging
+logger = logging.getLogger(__name__)
class YamlOutput(Generator):
def __init__(self):
@@ -106,13 +107,16 @@ class YamlOutput(Generator):
]
for module in output_order:
+ if module not in output_dict:
+ continue
+
out_lines.append(f' - {module.out_name}:')
if len(output_dict[module]) == 0:
continue
if not isinstance(output_dict[module], dict):
- utils.eprint(f'Error: Output of {module.type} is not a dictionary')
+ logger.error(f'Error: Output of {module.type} is not a dictionary')
continue
lines = self._stringify_dict(output_dict[module])
diff --git a/utils/tuning/libtuning/gradient.py b/utils/tuning/libtuning/gradient.py
index 5106f821..b643f502 100644
--- a/utils/tuning/libtuning/gradient.py
+++ b/utils/tuning/libtuning/gradient.py
@@ -2,7 +2,7 @@
#
# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
#
-# gradient.py - Gradients that can be used to distribute or map numbers
+# Gradients that can be used to distribute or map numbers
import libtuning as lt
diff --git a/utils/tuning/libtuning/image.py b/utils/tuning/libtuning/image.py
index aa9d20b5..ecd334bd 100644
--- a/utils/tuning/libtuning/image.py
+++ b/utils/tuning/libtuning/image.py
@@ -2,7 +2,7 @@
#
# Copyright (C) 2019, Raspberry Pi Ltd
#
-# image.py - Container for an image and associated metadata
+# Container for an image and associated metadata
import binascii
import numpy as np
@@ -13,6 +13,9 @@ import re
import libtuning as lt
import libtuning.utils as utils
+import logging
+
+logger = logging.getLogger(__name__)
class Image:
@@ -21,17 +24,18 @@ class Image:
self.lsc_only = False
self.color = -1
self.lux = -1
+ self.macbeth = None
try:
self._load_metadata_exif()
except Exception as e:
- utils.eprint(f'Failed to load metadata from {self.path}: {e}')
+ logger.error(f'Failed to load metadata from {self.path}: {e}')
raise e
try:
self._read_image_dng()
except Exception as e:
- utils.eprint(f'Failed to load image data from {self.path}: {e}')
+ logger.error(f'Failed to load image data from {self.path}: {e}')
raise e
@property
@@ -79,7 +83,7 @@ class Image:
# is R, then G, then G, then B.
bayer_case = {
'0 1 1 2': (lt.Color.R, lt.Color.GR, lt.Color.GB, lt.Color.B),
- '1 2 0 1': (lt.Color.GB, lt.Color.R, lt.Color.B, lt.Color.GR),
+ '1 2 0 1': (lt.Color.GB, lt.Color.B, lt.Color.R, lt.Color.GR),
'2 1 1 0': (lt.Color.B, lt.Color.GB, lt.Color.GR, lt.Color.R),
'1 0 2 1': (lt.Color.GR, lt.Color.R, lt.Color.B, lt.Color.GB)
}
@@ -131,6 +135,6 @@ class Image:
all_patches.append(ch_patches)
- self.patches = all_patches
+ self.patches = np.array(all_patches)
return not saturated
diff --git a/utils/tuning/libtuning/libtuning.py b/utils/tuning/libtuning/libtuning.py
index d84c148f..bac57323 100644
--- a/utils/tuning/libtuning/libtuning.py
+++ b/utils/tuning/libtuning/libtuning.py
@@ -2,16 +2,17 @@
#
# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
#
-# libtuning.py - An infrastructure for camera tuning tools
+# An infrastructure for camera tuning tools
import argparse
+import logging
import libtuning as lt
import libtuning.utils as utils
-from libtuning.utils import eprint
from enum import Enum, IntEnum
+logger = logging.getLogger(__name__)
class Color(IntEnum):
R = 0
@@ -94,7 +95,10 @@ class Tuner(object):
self.output = {}
def add(self, module):
- self.modules.append(module)
+ if isinstance(module, list):
+ self.modules.extend(module)
+ else:
+ self.modules.append(module)
def set_input_parser(self, parser):
self.parser = parser
@@ -112,10 +116,10 @@ class Tuner(object):
for module_type in output_order:
modules = [module for module in self.modules if module.type == module_type.type]
if len(modules) > 1:
- eprint(f'Multiple modules found for module type "{module_type.type}"')
+ logger.error(f'Multiple modules found for module type "{module_type.type}"')
return False
if len(modules) < 1:
- eprint(f'No module found for module type "{module_type.type}"')
+ logger.error(f'No module found for module type "{module_type.type}"')
return False
self.output_order.append(modules[0])
@@ -124,19 +128,19 @@ class Tuner(object):
# \todo Validate parser and generator at Tuner construction time?
def _validate_settings(self):
if self.parser is None:
- eprint('Missing parser')
+ logger.error('Missing parser')
return False
if self.generator is None:
- eprint('Missing generator')
+ logger.error('Missing generator')
return False
if len(self.modules) == 0:
- eprint('No modules added')
+ logger.error('No modules added')
return False
if len(self.output_order) != len(self.modules):
- eprint('Number of outputs does not match number of modules')
+ logger.error('Number of outputs does not match number of modules')
return False
return True
@@ -183,7 +187,7 @@ class Tuner(object):
for module in self.modules:
if not module.validate_config(self.config):
- eprint(f'Config is invalid for module {module.type}')
+ logger.error(f'Config is invalid for module {module.type}')
return -1
has_lsc = any(isinstance(m, lt.modules.lsc.LSC) for m in self.modules)
@@ -192,15 +196,15 @@ class Tuner(object):
images = utils.load_images(args.input, self.config, not has_only_lsc, has_lsc)
if images is None or len(images) == 0:
- eprint(f'No images were found, or able to load')
+ logger.error(f'No images were found, or able to load')
return -1
# Do the tuning
for module in self.modules:
out = module.process(self.config, images, self.output)
if out is None:
- eprint(f'Module {module.name} failed to process, aborting')
- break
+ logger.warning(f'Module {module.hr_name} failed to process...')
+ continue
self.output[module] = out
self.generator.write(args.output, self.output, self.output_order)
diff --git a/utils/tuning/libtuning/macbeth.py b/utils/tuning/libtuning/macbeth.py
index 5faddf66..4a2006b0 100644
--- a/utils/tuning/libtuning/macbeth.py
+++ b/utils/tuning/libtuning/macbeth.py
@@ -1,8 +1,9 @@
# SPDX-License-Identifier: BSD-2-Clause
#
# Copyright (C) 2019, Raspberry Pi Ltd
+# Copyright (C) 2024, Ideas on Board Oy
#
-# macbeth.py - Locate and extract Macbeth charts from images
+# Locate and extract Macbeth charts from images
# (Copied from: ctt_macbeth_locator.py)
# \todo Add debugging
@@ -11,8 +12,18 @@ import cv2
import os
from pathlib import Path
import numpy as np
+import warnings
+import logging
+from sklearn import cluster as cluster
-from libtuning.image import Image
+from .ctt_ransac import get_square_verts, get_square_centres
+from .image import Image
+
+logger = logging.getLogger(__name__)
+
+
+class MacbethError(Exception):
+ pass
# Reshape image to fixed width without distorting returns image and scale
@@ -369,7 +380,9 @@ def get_macbeth_chart(img, ref_data):
# Catch macbeth errors and continue with code
except MacbethError as error:
- eprint(error)
+ # \todo: This happens so many times in a normal run, that it shadows
+ # all the relevant output
+ # logger.warning(error)
return (0, None, None, False)
@@ -403,10 +416,15 @@ def find_macbeth(img, mac_config):
# nothing more is tried as this is a high enough confidence to ensure
# reliable macbeth square centre placement.
+ # Keep a list that will include this and any brightened up versions of
+ # the image for reuse.
+ all_images = [img]
+
for brightness in [2, 4]:
if cor >= 0.75:
break
img_br = cv2.convertScaleAbs(img, alpha=brightness, beta=0)
+ all_images.append(img_br)
cor_b, mac_b, coords_b, ret_b = get_macbeth_chart(img_br, ref_data)
if cor_b > cor:
cor, mac, coords, ret = cor_b, mac_b, coords_b, ret_b
@@ -456,23 +474,24 @@ def find_macbeth(img, mac_config):
w_inc = int(w * pair['inc'])
h_inc = int(h * pair['inc'])
- loop = ((1 - pair['sel']) / pair['inc']) + 1
+ loop = int(((1 - pair['sel']) / pair['inc']) + 1)
# For each subselection, look for a macbeth chart
- for i in range(loop):
- for j in range(loop):
- w_s, h_s = i * w_inc, j * h_inc
- img_sel = img[w_s:w_s + w_sel, h_s:h_s + h_sel]
- cor_ij, mac_ij, coords_ij, ret_ij = get_macbeth_chart(img_sel, ref_data)
-
- # If the correlation is better than the best then record the
- # scale and current subselection at which macbeth chart was
- # found. Also record the coordinates, macbeth chart and message.
- if cor_ij > cor:
- cor = cor_ij
- mac, coords, ret = mac_ij, coords_ij, ret_ij
- ii, jj = i, j
- w_best, h_best = w_inc, h_inc
- d_best = index + 1
+ for img_br in all_images:
+ for i in range(loop):
+ for j in range(loop):
+ w_s, h_s = i * w_inc, j * h_inc
+ img_sel = img_br[w_s:w_s + w_sel, h_s:h_s + h_sel]
+ cor_ij, mac_ij, coords_ij, ret_ij = get_macbeth_chart(img_sel, ref_data)
+
+ # If the correlation is better than the best then record the
+ # scale and current subselection at which macbeth chart was
+ # found. Also record the coordinates, macbeth chart and message.
+ if cor_ij > cor:
+ cor = cor_ij
+ mac, coords, ret = mac_ij, coords_ij, ret_ij
+ ii, jj = i, j
+ w_best, h_best = w_inc, h_inc
+ d_best = index + 1
# Transform coordinates from subselection to original image
if ii != -1:
@@ -486,7 +505,7 @@ def find_macbeth(img, mac_config):
coords_fit = coords
if cor < 0.75:
- eprint(f'Warning: Low confidence {cor:.3f} for macbeth chart in {img.path.name}')
+ logger.warning(f'Low confidence {cor:.3f} for macbeth chart')
if show:
draw_macbeth_results(img, coords_fit)
@@ -499,18 +518,20 @@ def locate_macbeth(image: Image, config: dict):
av_chan = (np.mean(np.array(image.channels), axis=0) / (2**16))
av_val = np.mean(av_chan)
if av_val < image.blacklevel_16 / (2**16) + 1 / 64:
- eprint(f'Image {image.path.name} too dark')
+ logger.warning(f'Image {image.path.name} too dark')
return None
macbeth = find_macbeth(av_chan, config['general']['macbeth'])
if macbeth is None:
- eprint(f'No macbeth chart found in {image.path.name}')
+ logger.warning(f'No macbeth chart found in {image.path.name}')
return None
mac_cen_coords = macbeth[1]
if not image.get_patches(mac_cen_coords):
- eprint(f'Macbeth patches have saturated in {image.path.name}')
+ logger.warning(f'Macbeth patches have saturated in {image.path.name}')
return None
+ image.macbeth = macbeth
+
return macbeth
diff --git a/utils/tuning/libtuning/macbeth_ref.pgm b/utils/tuning/libtuning/macbeth_ref.pgm
index 37897140..089ea91f 100644
--- a/utils/tuning/libtuning/macbeth_ref.pgm
+++ b/utils/tuning/libtuning/macbeth_ref.pgm
@@ -1,5 +1,5 @@
-# SPDX-License-Identifier: BSD-2-Clause
P5
+# SPDX-License-Identifier: BSD-2-Clause
# Reference macbeth chart
120 80
255
diff --git a/utils/tuning/libtuning/modules/agc/__init__.py b/utils/tuning/libtuning/modules/agc/__init__.py
new file mode 100644
index 00000000..4db9ca37
--- /dev/null
+++ b/utils/tuning/libtuning/modules/agc/__init__.py
@@ -0,0 +1,6 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2024, Paul Elder <paul.elder@ideasonboard.com>
+
+from libtuning.modules.agc.agc import AGC
+from libtuning.modules.agc.rkisp1 import AGCRkISP1
diff --git a/utils/tuning/libtuning/modules/agc/agc.py b/utils/tuning/libtuning/modules/agc/agc.py
new file mode 100644
index 00000000..9c8899ba
--- /dev/null
+++ b/utils/tuning/libtuning/modules/agc/agc.py
@@ -0,0 +1,21 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+# Copyright (C) 2024, Paul Elder <paul.elder@ideasonboard.com>
+
+from ..module import Module
+
+import libtuning as lt
+
+
+class AGC(Module):
+ type = 'agc'
+ hr_name = 'AGC (Base)'
+ out_name = 'GenericAGC'
+
+ # \todo Add sector shapes and stuff just like lsc
+ def __init__(self, *,
+ debug: list):
+ super().__init__()
+
+ self.debug = debug
diff --git a/utils/tuning/libtuning/modules/agc/rkisp1.py b/utils/tuning/libtuning/modules/agc/rkisp1.py
new file mode 100644
index 00000000..2dad3a09
--- /dev/null
+++ b/utils/tuning/libtuning/modules/agc/rkisp1.py
@@ -0,0 +1,79 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+# Copyright (C) 2024, Paul Elder <paul.elder@ideasonboard.com>
+#
+# rkisp1.py - AGC module for tuning rkisp1
+
+from .agc import AGC
+
+import libtuning as lt
+
+
+class AGCRkISP1(AGC):
+ hr_name = 'AGC (RkISP1)'
+ out_name = 'Agc'
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+
+ # We don't actually need anything from the config file
+ def validate_config(self, config: dict) -> bool:
+ return True
+
+ def _generate_metering_modes(self) -> dict:
+ centre_weighted = [
+ 0, 0, 0, 0, 0,
+ 0, 6, 8, 6, 0,
+ 0, 8, 16, 8, 0,
+ 0, 6, 8, 6, 0,
+ 0, 0, 0, 0, 0
+ ]
+
+ spot = [
+ 0, 0, 0, 0, 0,
+ 0, 2, 4, 2, 0,
+ 0, 4, 16, 4, 0,
+ 0, 2, 4, 2, 0,
+ 0, 0, 0, 0, 0
+ ]
+
+ matrix = [1 for i in range(0, 25)]
+
+ return {
+ 'MeteringCentreWeighted': centre_weighted,
+ 'MeteringSpot': spot,
+ 'MeteringMatrix': matrix
+ }
+
+ def _generate_exposure_modes(self) -> dict:
+ normal = {'exposureTime': [100, 10000, 30000, 60000, 120000],
+ 'gain': [2.0, 4.0, 6.0, 6.0, 6.0]}
+ short = {'exposureTime': [100, 5000, 10000, 20000, 120000],
+ 'gain': [2.0, 4.0, 6.0, 6.0, 6.0]}
+
+ return {'ExposureNormal': normal, 'ExposureShort': short}
+
+ def _generate_constraint_modes(self) -> dict:
+ normal = {'lower': {'qLo': 0.98, 'qHi': 1.0, 'yTarget': 0.5}}
+ highlight = {
+ 'lower': {'qLo': 0.98, 'qHi': 1.0, 'yTarget': 0.5},
+ 'upper': {'qLo': 0.98, 'qHi': 1.0, 'yTarget': 0.8}
+ }
+
+ return {'ConstraintNormal': normal, 'ConstraintHighlight': highlight}
+
+ def _generate_y_target(self) -> list:
+ return 0.5
+
+ def process(self, config: dict, images: list, outputs: dict) -> dict:
+ output = {}
+
+ output['AeMeteringMode'] = self._generate_metering_modes()
+ output['AeExposureMode'] = self._generate_exposure_modes()
+ output['AeConstraintMode'] = self._generate_constraint_modes()
+ output['relativeLuminanceTarget'] = self._generate_y_target()
+
+ # \todo Debug functionality
+
+ return output
diff --git a/utils/tuning/libtuning/modules/awb/__init__.py b/utils/tuning/libtuning/modules/awb/__init__.py
new file mode 100644
index 00000000..2d67f10c
--- /dev/null
+++ b/utils/tuning/libtuning/modules/awb/__init__.py
@@ -0,0 +1,6 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2024, Ideas On Board
+
+from libtuning.modules.awb.awb import AWB
+from libtuning.modules.awb.rkisp1 import AWBRkISP1
diff --git a/utils/tuning/libtuning/modules/awb/awb.py b/utils/tuning/libtuning/modules/awb/awb.py
new file mode 100644
index 00000000..0dc4f59d
--- /dev/null
+++ b/utils/tuning/libtuning/modules/awb/awb.py
@@ -0,0 +1,40 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2024, Ideas On Board
+
+import logging
+
+from ..module import Module
+
+from libtuning.ctt_awb import awb
+import numpy as np
+
+logger = logging.getLogger(__name__)
+
+
+class AWB(Module):
+ type = 'awb'
+ hr_name = 'AWB (Base)'
+ out_name = 'GenericAWB'
+
+ def __init__(self, *, debug: list):
+ super().__init__()
+
+ self.debug = debug
+
+ def do_calculation(self, images):
+ logger.info('Starting AWB calculation')
+
+ imgs = [img for img in images if img.macbeth is not None]
+
+ ct_curve, transverse_pos, transverse_neg = awb(imgs, None, None, False)
+ ct_curve = np.reshape(ct_curve, (-1, 3))
+ gains = [{
+ 'ct': int(v[0]),
+ 'gains': [float(1.0 / v[1]), float(1.0 / v[2])]
+ } for v in ct_curve]
+
+ return {'colourGains': gains,
+ 'transversePos': transverse_pos,
+ 'transverseNeg': transverse_neg}
+
diff --git a/utils/tuning/libtuning/modules/awb/rkisp1.py b/utils/tuning/libtuning/modules/awb/rkisp1.py
new file mode 100644
index 00000000..d562d26e
--- /dev/null
+++ b/utils/tuning/libtuning/modules/awb/rkisp1.py
@@ -0,0 +1,36 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2024, Ideas On Board
+#
+# AWB module for tuning rkisp1
+
+from .awb import AWB
+
+class AWBRkISP1(AWB):
+ hr_name = 'AWB (RkISP1)'
+ out_name = 'Awb'
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+
+ def validate_config(self, config: dict) -> bool:
+ return True
+
+ def process(self, config: dict, images: list, outputs: dict) -> dict:
+ if not 'awb' in config['general']:
+ raise ValueError('AWB configuration missing')
+ awb_config = config['general']['awb']
+ algorithm = awb_config['algorithm']
+
+ output = {'algorithm': algorithm}
+ data = self.do_calculation(images)
+ if algorithm == 'grey':
+ output['colourGains'] = data['colourGains']
+ elif algorithm == 'bayes':
+ output['AwbMode'] = awb_config['AwbMode']
+ output['priors'] = awb_config['priors']
+ output.update(data)
+ else:
+ raise ValueError(f"Unknown AWB algorithm {output['algorithm']}")
+
+ return output
diff --git a/utils/tuning/libtuning/modules/ccm/__init__.py b/utils/tuning/libtuning/modules/ccm/__init__.py
new file mode 100644
index 00000000..322602af
--- /dev/null
+++ b/utils/tuning/libtuning/modules/ccm/__init__.py
@@ -0,0 +1,6 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2024, Paul Elder <paul.elder@ideasonboard.com>
+
+from libtuning.modules.ccm.ccm import CCM
+from libtuning.modules.ccm.rkisp1 import CCMRkISP1
diff --git a/utils/tuning/libtuning/modules/ccm/ccm.py b/utils/tuning/libtuning/modules/ccm/ccm.py
new file mode 100644
index 00000000..18702f8d
--- /dev/null
+++ b/utils/tuning/libtuning/modules/ccm/ccm.py
@@ -0,0 +1,41 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2024, Paul Elder <paul.elder@ideasonboard.com>
+# Copyright (C) 2024, Ideas on Board
+#
+# Base Ccm tuning module
+
+from ..module import Module
+
+from libtuning.ctt_ccm import ccm
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class CCM(Module):
+ type = 'ccm'
+ hr_name = 'CCM (Base)'
+ out_name = 'GenericCCM'
+
+ def __init__(self, debug: list):
+ super().__init__()
+
+ self.debug = debug
+
+ def do_calibration(self, images):
+ logger.info('Starting CCM calibration')
+
+ imgs = [img for img in images if img.macbeth is not None]
+
+ # todo: Take LSC calibration results into account.
+ cal_cr_list = None
+ cal_cb_list = None
+
+ try:
+ ccms = ccm(imgs, cal_cr_list, cal_cb_list)
+ except ArithmeticError:
+ logger.error('CCM calibration failed')
+ return None
+
+ return ccms
diff --git a/utils/tuning/libtuning/modules/ccm/rkisp1.py b/utils/tuning/libtuning/modules/ccm/rkisp1.py
new file mode 100644
index 00000000..be0252d9
--- /dev/null
+++ b/utils/tuning/libtuning/modules/ccm/rkisp1.py
@@ -0,0 +1,28 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2024, Paul Elder <paul.elder@ideasonboard.com>
+# Copyright (C) 2024, Ideas on Board
+#
+# Ccm module for tuning rkisp1
+
+from .ccm import CCM
+
+
+class CCMRkISP1(CCM):
+ hr_name = 'Crosstalk Correction (RkISP1)'
+ out_name = 'Ccm'
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+
+ # We don't need anything from the config file.
+ def validate_config(self, config: dict) -> bool:
+ return True
+
+ def process(self, config: dict, images: list, outputs: dict) -> dict:
+ output = {}
+
+ ccms = self.do_calibration(images)
+ output['ccms'] = ccms
+
+ return output
diff --git a/utils/tuning/libtuning/modules/lsc/lsc.py b/utils/tuning/libtuning/modules/lsc/lsc.py
index 344a07a3..e0ca22eb 100644
--- a/utils/tuning/libtuning/modules/lsc/lsc.py
+++ b/utils/tuning/libtuning/modules/lsc/lsc.py
@@ -59,7 +59,10 @@ class LSC(Module):
def _lsc_single_channel(self, channel: np.array,
image: lt.Image, green_grid: np.array = None):
grid = self._get_grid(channel, image.w, image.h)
- grid -= image.blacklevel_16
+ # Clamp the values to a small positive, so that the following 1/grid
+ # doesn't produce negative results.
+ grid = np.maximum(grid - image.blacklevel_16, 0.1)
+
if green_grid is None:
table = np.reshape(1 / grid, self.sector_shape[::-1])
else:
diff --git a/utils/tuning/libtuning/modules/lsc/raspberrypi.py b/utils/tuning/libtuning/modules/lsc/raspberrypi.py
index 58f5000d..99bc4fe6 100644
--- a/utils/tuning/libtuning/modules/lsc/raspberrypi.py
+++ b/utils/tuning/libtuning/modules/lsc/raspberrypi.py
@@ -3,7 +3,7 @@
# Copyright (C) 2019, Raspberry Pi Ltd
# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
#
-# raspberrypi.py - ALSC module for tuning Raspberry Pi
+# ALSC module for tuning Raspberry Pi
from .lsc import LSC
@@ -12,7 +12,9 @@ import libtuning.utils as utils
from numbers import Number
import numpy as np
+import logging
+logger = logging.getLogger(__name__)
class ALSCRaspberryPi(LSC):
# Override the type name so that the parser can match the entry in the
@@ -35,7 +37,7 @@ class ALSCRaspberryPi(LSC):
def validate_config(self, config: dict) -> bool:
if self not in config:
- utils.eprint(f'{self.type} not in config')
+ logger.error(f'{self.type} not in config')
return False
valid = True
@@ -46,14 +48,14 @@ class ALSCRaspberryPi(LSC):
color_key = self.do_color.name
if lum_key not in conf and self.luminance_strength.required:
- utils.eprint(f'{lum_key} is not in config')
+ logger.error(f'{lum_key} is not in config')
valid = False
if lum_key in conf and (conf[lum_key] < 0 or conf[lum_key] > 1):
- utils.eprint(f'Warning: {lum_key} is not in range [0, 1]; defaulting to 0.5')
+ logger.warning(f'{lum_key} is not in range [0, 1]; defaulting to 0.5')
if color_key not in conf and self.do_color.required:
- utils.eprint(f'{color_key} is not in config')
+ logger.error(f'{color_key} is not in config')
valid = False
return valid
@@ -235,7 +237,7 @@ class ALSCRaspberryPi(LSC):
if count == 1:
output['sigma'] = 0.005
output['sigma_Cb'] = 0.005
- utils.eprint('Warning: Only one alsc calibration found; standard sigmas used for adaptive algorithm.')
+ logger.warning('Only one alsc calibration found; standard sigmas used for adaptive algorithm.')
return output
# Obtain worst-case scenario residual sigmas
diff --git a/utils/tuning/libtuning/modules/lsc/rkisp1.py b/utils/tuning/libtuning/modules/lsc/rkisp1.py
index 5701ae0a..c02b2306 100644
--- a/utils/tuning/libtuning/modules/lsc/rkisp1.py
+++ b/utils/tuning/libtuning/modules/lsc/rkisp1.py
@@ -3,7 +3,7 @@
# Copyright (C) 2019, Raspberry Pi Ltd
# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
#
-# rkisp1.py - LSC module for tuning rkisp1
+# LSC module for tuning rkisp1
from .lsc import LSC
@@ -33,13 +33,13 @@ class LSCRkISP1(LSC):
# table, flattened array of (blue's) green calibration table
def _do_single_lsc(self, image: lt.Image):
- cgr, gr = self._lsc_single_channel(image.channels[lt.Color.GR], image)
- cgb, gb = self._lsc_single_channel(image.channels[lt.Color.GB], image)
-
- # \todo Should these ratio against the average of both greens or just
- # each green like we've done here?
- cr, _ = self._lsc_single_channel(image.channels[lt.Color.R], image, gr)
- cb, _ = self._lsc_single_channel(image.channels[lt.Color.B], image, gb)
+ # Perform LSC on each colour channel independently. A future enhancement
+ # worth investigating would be splitting the luminance and chrominance
+ # LSC as done by Raspberry Pi.
+ cgr, _ = self._lsc_single_channel(image.channels[lt.Color.GR], image)
+ cgb, _ = self._lsc_single_channel(image.channels[lt.Color.GB], image)
+ cr, _ = self._lsc_single_channel(image.channels[lt.Color.R], image)
+ cb, _ = self._lsc_single_channel(image.channels[lt.Color.B], image)
return image.color, cr.flatten(), cb.flatten(), cgr.flatten(), cgb.flatten()
@@ -80,7 +80,8 @@ class LSCRkISP1(LSC):
tables = []
for lis in [list_cr, list_cgr, list_cgb, list_cb]:
table = np.mean(lis[indices], axis=0)
- table = output_map_func((1, 3.999), (1024, 4095), table)
+ table = output_map_func((1, 4), (1024, 4096), table)
+ table = np.clip(table, 1024, 4095)
table = np.round(table).astype('int32').tolist()
tables.append(table)
@@ -106,6 +107,9 @@ class LSCRkISP1(LSC):
output['sets'] = self._do_all_lsc(images)
+ if len(output['sets']) == 0:
+ return None
+
# \todo Validate images from greyscale camera and force grescale mode
# \todo Debug functionality
diff --git a/utils/tuning/libtuning/modules/lux/__init__.py b/utils/tuning/libtuning/modules/lux/__init__.py
new file mode 100644
index 00000000..af9d4e08
--- /dev/null
+++ b/utils/tuning/libtuning/modules/lux/__init__.py
@@ -0,0 +1,6 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2025, Ideas on Board
+
+from libtuning.modules.lux.lux import Lux
+from libtuning.modules.lux.rkisp1 import LuxRkISP1
diff --git a/utils/tuning/libtuning/modules/lux/lux.py b/utils/tuning/libtuning/modules/lux/lux.py
new file mode 100644
index 00000000..4bad429a
--- /dev/null
+++ b/utils/tuning/libtuning/modules/lux/lux.py
@@ -0,0 +1,70 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+# Copyright (C) 2025, Ideas on Board
+#
+# Base Lux tuning module
+
+from ..module import Module
+
+import logging
+import numpy as np
+
+logger = logging.getLogger(__name__)
+
+
+class Lux(Module):
+ type = 'lux'
+ hr_name = 'Lux (Base)'
+ out_name = 'GenericLux'
+
+ def __init__(self, debug: list):
+ super().__init__()
+
+ self.debug = debug
+
+ def calculate_lux_reference_values(self, images):
+ # The lux calibration is done on a single image. For best effects, the
+ # image with lux level closest to 1000 is chosen.
+ imgs = [img for img in images if img.macbeth is not None]
+ lux_values = [img.lux for img in imgs]
+ index = lux_values.index(min(lux_values, key=lambda l: abs(1000 - l)))
+ img = imgs[index]
+ logger.info(f'Selected image {img.name} for lux calibration')
+
+ if img.lux < 50:
+ logger.warning(f'A Lux level of {img.lux} is very low for proper lux calibration')
+
+ ref_y = self.calculate_y(img)
+ exposure_time = img.exposure
+ gain = img.againQ8_norm
+ aperture = 1
+ logger.info(f'RefY:{ref_y} Exposure time:{exposure_time}µs Gain:{gain} Aperture:{aperture}')
+ return {'referenceY': ref_y,
+ 'referenceExposureTime': exposure_time,
+ 'referenceAnalogueGain': gain,
+ 'referenceDigitalGain': 1.0,
+ 'referenceLux': img.lux}
+
+ def calculate_y(self, img):
+ max16Bit = 0xffff
+ # Average over all grey patches.
+ ap_r = np.mean(img.patches[0][3::4]) / max16Bit
+ ap_g = (np.mean(img.patches[1][3::4]) + np.mean(img.patches[2][3::4])) / 2 / max16Bit
+ ap_b = np.mean(img.patches[3][3::4]) / max16Bit
+ logger.debug(f'Averaged grey patches: Red: {ap_r}, Green: {ap_g}, Blue: {ap_b}')
+
+ # Calculate white balance gains.
+ gr = ap_g / ap_r
+ gb = ap_g / ap_b
+ logger.debug(f'WB gains: Red: {gr} Blue: {gb}')
+
+ # Calculate the mean Y value of the whole image
+ a_r = np.mean(img.channels[0]) * gr
+ a_g = (np.mean(img.channels[1]) + np.mean(img.channels[2])) / 2
+ a_b = np.mean(img.channels[3]) * gb
+ y = 0.299 * a_r + 0.587 * a_g + 0.114 * a_b
+ y /= max16Bit
+
+ return y
+
diff --git a/utils/tuning/libtuning/modules/lux/rkisp1.py b/utils/tuning/libtuning/modules/lux/rkisp1.py
new file mode 100644
index 00000000..62d3f94c
--- /dev/null
+++ b/utils/tuning/libtuning/modules/lux/rkisp1.py
@@ -0,0 +1,22 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2024, Ideas on Board
+#
+# Lux module for tuning rkisp1
+
+from .lux import Lux
+
+
+class LuxRkISP1(Lux):
+ hr_name = 'Lux (RkISP1)'
+ out_name = 'Lux'
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+
+ # We don't need anything from the config file.
+ def validate_config(self, config: dict) -> bool:
+ return True
+
+ def process(self, config: dict, images: list, outputs: dict) -> dict:
+ return self.calculate_lux_reference_values(images)
diff --git a/utils/tuning/libtuning/modules/module.py b/utils/tuning/libtuning/modules/module.py
index 12e2fc7c..de624384 100644
--- a/utils/tuning/libtuning/modules/module.py
+++ b/utils/tuning/libtuning/modules/module.py
@@ -2,7 +2,7 @@
#
# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
#
-# module.py - Base class for algorithm-specific tuning modules
+# Base class for algorithm-specific tuning modules
# @var type Type of the module. Defined in the base module.
diff --git a/utils/tuning/libtuning/modules/static.py b/utils/tuning/libtuning/modules/static.py
new file mode 100644
index 00000000..4d0f7e18
--- /dev/null
+++ b/utils/tuning/libtuning/modules/static.py
@@ -0,0 +1,24 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Copyright (C) 2024, Ideas on Board
+#
+# Module implementation for static data
+
+from .module import Module
+
+
+# This module can be used in cases where the tuning file should contain
+# static data.
+class StaticModule(Module):
+ def __init__(self, out_name: str, output: dict = {}):
+ super().__init__()
+ self.out_name = out_name
+ self.hr_name = f'Static {out_name}'
+ self.type = f'static_{out_name}'
+ self.output = output
+
+ def validate_config(self, config: dict) -> bool:
+ return True
+
+ def process(self, config: dict, images: list, outputs: dict) -> dict:
+ return self.output
diff --git a/utils/tuning/libtuning/parsers/parser.py b/utils/tuning/libtuning/parsers/parser.py
index a17d8d71..0c3944c7 100644
--- a/utils/tuning/libtuning/parsers/parser.py
+++ b/utils/tuning/libtuning/parsers/parser.py
@@ -2,7 +2,7 @@
#
# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
#
-# parser.py - Base class for a parser for a specific format of config file
+# Base class for a parser for a specific format of config file
class Parser(object):
def __init__(self):
diff --git a/utils/tuning/libtuning/parsers/raspberrypi_parser.py b/utils/tuning/libtuning/parsers/raspberrypi_parser.py
index d26586ba..f1da4592 100644
--- a/utils/tuning/libtuning/parsers/raspberrypi_parser.py
+++ b/utils/tuning/libtuning/parsers/raspberrypi_parser.py
@@ -2,7 +2,7 @@
#
# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
#
-# raspberrypi_parser.py - Parser for Raspberry Pi config file format
+# Parser for Raspberry Pi config file format
from .parser import Parser
diff --git a/utils/tuning/libtuning/parsers/yaml_parser.py b/utils/tuning/libtuning/parsers/yaml_parser.py
index 5c1673a5..1fa6b7a8 100644
--- a/utils/tuning/libtuning/parsers/yaml_parser.py
+++ b/utils/tuning/libtuning/parsers/yaml_parser.py
@@ -2,16 +2,19 @@
#
# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
#
-# yaml_parser.py - Parser for YAML format config file
+# Parser for YAML format config file
from .parser import Parser
+import yaml
class YamlParser(Parser):
def __init__(self):
super().__init__()
- # \todo Implement this (it's fine for now as we don't need a config for
- # rkisp1 LSC, which is the only user of this so far)
def parse(self, config_file: str, modules: list) -> (dict, list):
- return {}, []
+ # Dummy implementation that just reads the file
+ with open(config_file, 'r') as f:
+ config = yaml.safe_load(f)
+
+ return config, []
diff --git a/utils/tuning/libtuning/smoothing.py b/utils/tuning/libtuning/smoothing.py
index b8a5a242..de4d920c 100644
--- a/utils/tuning/libtuning/smoothing.py
+++ b/utils/tuning/libtuning/smoothing.py
@@ -2,7 +2,7 @@
#
# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
#
-# smoothing.py - Wrapper for cv2 smoothing functions to enable duck-typing
+# Wrapper for cv2 smoothing functions to enable duck-typing
import cv2
diff --git a/utils/tuning/libtuning/utils.py b/utils/tuning/libtuning/utils.py
index b60f2c9b..e35cf409 100644
--- a/utils/tuning/libtuning/utils.py
+++ b/utils/tuning/libtuning/utils.py
@@ -3,8 +3,9 @@
# Copyright (C) 2019, Raspberry Pi Ltd
# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
#
-# utils.py - Utilities for libtuning
+# Utilities for libtuning
+import cv2
import decimal
import math
import numpy as np
@@ -12,16 +13,15 @@ import os
from pathlib import Path
import re
import sys
+import logging
import libtuning as lt
from libtuning.image import Image
-from libtuning.macbeth import locate_macbeth
-
-# Utility functions
+from .macbeth import locate_macbeth
+logger = logging.getLogger(__name__)
-def eprint(*args, **kwargs):
- print(*args, file=sys.stderr, **kwargs)
+# Utility functions
def get_module_by_type_name(modules, name):
@@ -43,16 +43,30 @@ def _list_image_files(directory):
def _parse_image_filename(fn: Path):
- result = re.search(r'^(alsc_)?(\d+)[kK]_(\d+)?[lLuU]?.\w{3,4}$', fn.name)
- if result is None:
- eprint(f'The file name of {fn.name} is incorrectly formatted')
- return None, None, None
+ lsc_only = False
+ color_temperature = None
+ lux = None
+
+ parts = fn.stem.split('_')
+ for part in parts:
+ if part == 'alsc':
+ lsc_only = True
+ continue
+ r = re.match(r'(\d+)[kK]', part)
+ if r:
+ color_temperature = int(r.group(1))
+ continue
+ r = re.match(r'(\d+)[lLuU]', part)
+ if r:
+ lux = int(r.group(1))
+
+ if color_temperature is None:
+ logger.error(f'The file name of "{fn.name}" does not contain a color temperature')
- color = int(result.group(2))
- lsc_only = result.group(1) is not None
- lux = None if lsc_only else int(result.group(3))
+ if lux is None and lsc_only is False:
+ logger.error(f'The file name of "{fn.name}" must either contain alsc or a lux level')
- return color, lux, lsc_only
+ return color_temperature, lux, lsc_only
# \todo Implement this from check_imgs() in ctt.py
@@ -72,30 +86,34 @@ def _validate_images(images):
def load_images(input_dir: str, config: dict, load_nonlsc: bool, load_lsc: bool) -> list:
files = _list_image_files(input_dir)
if len(files) == 0:
- eprint(f'No images found in {input_dir}')
+ logger.error(f'No images found in {input_dir}')
return None
images = []
for f in files:
color, lux, lsc_only = _parse_image_filename(f)
+
if color is None:
+ logger.warning(f'Ignoring "{f.name}" as it has no associated color temperature')
continue
+ logger.info(f'Process image "{f.name}" (color={color}, lux={lux}, lsc_only={lsc_only})')
+
# Skip lsc image if we don't need it
if lsc_only and not load_lsc:
- eprint(f'Skipping {f.name} as this tuner has no LSC module')
+ logger.warning(f'Skipping {f.name} as this tuner has no LSC module')
continue
# Skip non-lsc image if we don't need it
if not lsc_only and not load_nonlsc:
- eprint(f'Skipping {f.name} as this tuner only has an LSC module')
+ logger.warning(f'Skipping {f.name} as this tuner only has an LSC module')
continue
# Load image
try:
image = Image(f)
except Exception as e:
- eprint(f'Failed to load image {f.name}: {e}')
+ logger.error(f'Failed to load image {f.name}: {e}')
continue
# Populate simple fields
@@ -113,7 +131,7 @@ def load_images(input_dir: str, config: dict, load_nonlsc: bool, load_lsc: bool)
continue
# Handle macbeth
- macbeth = locate_macbeth(config)
+ macbeth = locate_macbeth(image, config)
if macbeth is None:
continue
@@ -123,3 +141,46 @@ def load_images(input_dir: str, config: dict, load_nonlsc: bool, load_lsc: bool)
return None
return images
+
+
+
+"""
+Some code that will save virtual macbeth charts that show the difference between optimised matrices and non optimised matrices
+
+The function creates an image that is 1550 by 1050 pixels wide, and fills it with patches which are 200x200 pixels in size
+Each patch contains the ideal color, the color from the original matrix, and the color from the final matrix
+_________________
+| |
+| Ideal Color |
+|_______________|
+| Old | new |
+| Color | Color |
+|_______|_______|
+
+Nice way of showing how the optimisation helps change the colors and the color matricies
+"""
+def visualise_macbeth_chart(macbeth_rgb, original_rgb, new_rgb, output_filename):
+ image = np.zeros((1050, 1550, 3), dtype=np.uint8)
+ colorindex = -1
+ for y in range(6):
+ for x in range(4): # Creates 6 x 4 grid of macbeth chart
+ colorindex += 1
+ xlocation = 50 + 250 * x # Means there is 50px of black gap between each square, more like the real macbeth chart.
+ ylocation = 50 + 250 * y
+ for g in range(200):
+ for i in range(100):
+ image[xlocation + i, ylocation + g] = macbeth_rgb[colorindex]
+ xlocation = 150 + 250 * x
+ ylocation = 50 + 250 * y
+ for i in range(100):
+ for g in range(100):
+ image[xlocation + i, ylocation + g] = original_rgb[colorindex] # Smaller squares below to compare the old colors with the new ones
+ xlocation = 150 + 250 * x
+ ylocation = 150 + 250 * y
+ for i in range(100):
+ for g in range(100):
+ image[xlocation + i, ylocation + g] = new_rgb[colorindex]
+
+ im_bgr = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
+ cv2.imwrite(f'{output_filename} Generated Macbeth Chart.png', im_bgr)
+
diff --git a/utils/tuning/raspberrypi/alsc.py b/utils/tuning/raspberrypi/alsc.py
index 024eb5a3..ba8fc9e1 100644
--- a/utils/tuning/raspberrypi/alsc.py
+++ b/utils/tuning/raspberrypi/alsc.py
@@ -2,7 +2,7 @@
#
# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
#
-# alsc.py - ALSC module instance for Raspberry Pi tuning scripts
+# ALSC module instance for Raspberry Pi tuning scripts
import libtuning as lt
from libtuning.modules.lsc import ALSCRaspberryPi
diff --git a/utils/tuning/raspberrypi_alsc_only.py b/utils/tuning/raspberrypi_alsc_only.py
index af04e6a8..777d8007 100755
--- a/utils/tuning/raspberrypi_alsc_only.py
+++ b/utils/tuning/raspberrypi_alsc_only.py
@@ -3,7 +3,7 @@
#
# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
#
-# raspberrypi_alsc_only.py - Tuning script for raspberrypi, ALSC only
+# Tuning script for raspberrypi, ALSC only
import sys
diff --git a/utils/tuning/requirements.txt b/utils/tuning/requirements.txt
new file mode 100644
index 00000000..3705769b
--- /dev/null
+++ b/utils/tuning/requirements.txt
@@ -0,0 +1,9 @@
+coloredlogs
+matplotlib
+numpy
+opencv-python
+py3exiv2
+pyyaml
+rawpy
+scikit-learn
+scipy
diff --git a/utils/tuning/rkisp1.py b/utils/tuning/rkisp1.py
index 1cea6ddb..207b717a 100755
--- a/utils/tuning/rkisp1.py
+++ b/utils/tuning/rkisp1.py
@@ -2,39 +2,60 @@
# SPDX-License-Identifier: GPL-2.0-or-later
#
# Copyright (C) 2022, Paul Elder <paul.elder@ideasonboard.com>
+# Copyright (C) 2024, Ideas On Board
#
-# rkisp1.py - Tuning script for rkisp1
+# Tuning script for rkisp1
+import logging
import sys
+import coloredlogs
import libtuning as lt
-from libtuning.parsers import YamlParser
from libtuning.generators import YamlOutput
+from libtuning.modules.agc import AGCRkISP1
+from libtuning.modules.awb import AWBRkISP1
+from libtuning.modules.ccm import CCMRkISP1
from libtuning.modules.lsc import LSCRkISP1
+from libtuning.modules.lux import LuxRkISP1
+from libtuning.modules.static import StaticModule
+from libtuning.parsers import YamlParser
+
+coloredlogs.install(level=logging.INFO, fmt='%(name)s %(levelname)s %(message)s')
+
+agc = AGCRkISP1(debug=[lt.Debug.Plot])
+awb = AWBRkISP1(debug=[lt.Debug.Plot])
+blc = StaticModule('BlackLevelCorrection')
+ccm = CCMRkISP1(debug=[lt.Debug.Plot])
+color_processing = StaticModule('ColorProcessing')
+filter = StaticModule('Filter')
+gamma_out = StaticModule('GammaOutCorrection', {'gamma': 2.2})
+lsc = LSCRkISP1(debug=[lt.Debug.Plot],
+ # This is for the actual LSC tuning, and is part of the base LSC
+ # module. rkisp1's table sector sizes (16x16 programmed as mirrored
+ # 8x8) are separate, and is hardcoded in its specific LSC tuning
+ # module.
+ sector_shape=(17, 17),
+
+ sector_x_gradient=lt.gradient.Linear(lt.Remainder.DistributeFront),
+ sector_y_gradient=lt.gradient.Linear(lt.Remainder.DistributeFront),
+
+ # This is the function that will be used to average the pixels in
+ # each sector. This can also be a custom function.
+ sector_average_function=lt.average.Mean(),
+
+ # This is the function that will be used to smooth the color ratio
+ # values. This can also be a custom function.
+ smoothing_function=lt.smoothing.MedianBlur(3),)
+lux = LuxRkISP1(debug=[lt.Debug.Plot])
tuner = lt.Tuner('RkISP1')
-tuner.add(LSCRkISP1(
- debug=[lt.Debug.Plot],
- # This is for the actual LSC tuning, and is part of the base LSC
- # module. rkisp1's table sector sizes (16x16 programmed as mirrored
- # 8x8) are separate, and is hardcoded in its specific LSC tuning
- # module.
- sector_shape=(17, 17),
-
- sector_x_gradient=lt.gradient.Linear(lt.Remainder.DistributeFront),
- sector_y_gradient=lt.gradient.Linear(lt.Remainder.DistributeFront),
-
- # This is the function that will be used to average the pixels in
- # each sector. This can also be a custom function.
- sector_average_function=lt.average.Mean(),
-
- # This is the function that will be used to smooth the color ratio
- # values. This can also be a custom function.
- smoothing_function=lt.smoothing.MedianBlur(3),
- ))
+tuner.add([agc, awb, blc, ccm, color_processing, filter, gamma_out, lsc, lux])
tuner.set_input_parser(YamlParser())
tuner.set_output_formatter(YamlOutput())
-tuner.set_output_order([LSCRkISP1])
+
+# Bayesian AWB uses the lux value, so insert the lux algorithm before AWB.
+tuner.set_output_order([agc, lux, awb, blc, ccm, color_processing,
+ filter, gamma_out, lsc])
if __name__ == '__main__':
sys.exit(tuner.run(sys.argv))
diff --git a/utils/update-kernel-headers.sh b/utils/update-kernel-headers.sh
index 590986d2..9a64dfb5 100755
--- a/utils/update-kernel-headers.sh
+++ b/utils/update-kernel-headers.sh
@@ -9,7 +9,7 @@ if [ $# != 1 ] ; then
fi
header_dir="$(dirname "$(realpath "$0")")/../include/linux"
-kernel_dir="$1"
+kernel_dir="$(realpath "$1")"
# Bail out if the directory doesn't contain kernel sources
line=$(head -3 "${kernel_dir}/Kbuild" 2>/dev/null | tail -1)
@@ -52,6 +52,7 @@ headers="
linux/media-bus-format.h
linux/media.h
linux/rkisp1-config.h
+ linux/udmabuf.h
linux/v4l2-common.h
linux/v4l2-controls.h
linux/v4l2-mediabus.h
diff --git a/utils/update-mojo.sh b/utils/update-mojo.sh
index fcbc81e7..09c8ff5b 100755
--- a/utils/update-mojo.sh
+++ b/utils/update-mojo.sh
@@ -3,13 +3,23 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# Update mojo copy from a chromium source tree
+set -e
+
if [ $# != 1 ] ; then
echo "Usage: $0 <chromium dir>"
exit 1
fi
ipc_dir="$(dirname "$(realpath "$0")")/ipc"
-chromium_dir="$1"
+chromium_dir="$(realpath "$1")"
+
+cd "${ipc_dir}/../../"
+
+# Reject dirty libcamera trees
+if [ -n "$(git status --porcelain -uno)" ] ; then
+ echo "libcamera tree is dirty"
+ exit 1
+fi
if [ ! -d "${chromium_dir}/mojo" ] ; then
echo "Directory ${chromium_dir} doesn't contain mojo"
@@ -24,19 +34,23 @@ fi
# Get the chromium commit id
version=$(git -C "${chromium_dir}" rev-parse --short HEAD)
-# Reject dirty trees
+# Reject dirty chromium trees
if [ -n "$(git -C "${chromium_dir}" status --porcelain)" ] ; then
echo "Chromium tree in ${chromium_dir} is dirty"
exit 1
fi
+# Remove the previously imported files.
+rm -rf utils/ipc/mojo/
+rm -rf utils/ipc/tools/
+
# Copy the diagnosis file
-cp "${chromium_dir}/tools/diagnosis/crbug_1001171.py" "${ipc_dir}/tools/diagnosis"
+mkdir -p utils/ipc/tools/diagnosis/
+cp "${chromium_dir}/tools/diagnosis/crbug_1001171.py" utils/ipc/tools/diagnosis/
# Copy the rest of mojo
-cp "${chromium_dir}/mojo/public/LICENSE" "${ipc_dir}/mojo/public"
-
-rm -rf "${ipc_dir}/mojo/public/tools/*"
+mkdir -p utils/ipc/mojo/public/
+cp "${chromium_dir}/mojo/public/LICENSE" utils/ipc/mojo/public/
(
cd "${chromium_dir}" || exit
@@ -55,12 +69,22 @@ modify them manually.
EOF
)
-echo "$readme" > "${ipc_dir}/mojo/README"
-echo "$readme" > "${ipc_dir}/tools/README"
+echo "$readme" > utils/ipc/mojo/README
+echo "$readme" > utils/ipc/tools/README
-cat <<EOF
-------------------------------------------------------------
-mojo updated. Please review and up-port local changes before
-committing.
-------------------------------------------------------------
-EOF
+# Commit the update. Use 'git commit -n' to avoid checkstyle pre-commit hook
+# failures, as mojo doesn't comply with the Python coding style enforced by
+# checkstyle.py.
+git add utils/ipc/mojo/
+git add utils/ipc/tools/
+
+echo "utils: ipc: Update mojo
+
+Update mojo from commit
+
+$(git -C "${chromium_dir}" show --pretty='%H "%s"' --no-patch)
+
+from the Chromium repository.
+
+The update-mojo.sh script was used for this update." | \
+git commit -n -s -F -