[libcamera-devel] [PATCH v4 01/37] utils: ipc: import mojo

Niklas Söderlund niklas.soderlund at ragnatech.se
Tue Nov 10 01:22:53 CET 2020


Hi Paul,

Thanks for your work.

On 2020-11-06 19:36:31 +0900, Paul Elder wrote:
> Import mojo from the Chromium repository, so that we can use it for
> generating code for the IPC mechanism. The commit from which this was
> taken is:
> a079161ec8c6907b883f9cb84fc8c4e7896cb1d0 "Add PPAPI constructs for
> sending focus object to PdfAccessibilityTree"
> 
> This tree has been pruned to remove directories that didn't have any
> necessary code:
> - mojo/* except for mojo/public
>   - mojo core, docs, and misc files
> - mojo/public/* except for mojo/public/{tools,LICENSE}
>   - language bindings for IPC, tests, and some mojo internals
> - mojo/public/tools/{fuzzers,chrome_ipc}
> - mojo/public/tools/bindings/generators
>   - code generation for other languages
> 
> No files were modified.
> 
> Signed-off-by: Paul Elder <paul.elder at ideasonboard.com>
> Acked-by: Laurent Pinchart <laurent.pinchart at ideasonboard.com>

This patch is _huge_ and there is no way I will review all files and 
diff them against the mentioned upstream commit, so I trust your when 
you say none of the files are modified,

Acked-by: Niklas Söderlund <niklas.soderlund at ragnatech.se>

> 
> ---
> No change in v4
> 
> No change in v3
> 
> Changes in v2:
> - add chromium commit id and list of pruned directories to the changelog
> ---
>  utils/ipc/mojo/public/LICENSE                 |   27 +
>  utils/ipc/mojo/public/tools/.style.yapf       |    6 +
>  utils/ipc/mojo/public/tools/BUILD.gn          |   18 +
>  utils/ipc/mojo/public/tools/bindings/BUILD.gn |  108 +
>  .../ipc/mojo/public/tools/bindings/README.md  |  816 +++++++
>  .../chromium_bindings_configuration.gni       |   51 +
>  .../tools/bindings/compile_typescript.py      |   27 +
>  .../tools/bindings/concatenate-files.py       |   54 +
>  ...concatenate_and_replace_closure_exports.py |   73 +
>  .../bindings/format_typemap_generator_args.py |   36 +
>  .../tools/bindings/gen_data_files_list.py     |   52 +
>  .../tools/bindings/generate_type_mappings.py  |  187 ++
>  .../ipc/mojo/public/tools/bindings/mojom.gni  | 1941 +++++++++++++++++
>  .../bindings/mojom_bindings_generator.py      |  390 ++++
>  .../mojom_bindings_generator_unittest.py      |   62 +
>  .../tools/bindings/mojom_types_downgrader.py  |  119 +
>  .../tools/bindings/validate_typemap_config.py |   57 +
>  utils/ipc/mojo/public/tools/mojom/README.md   |   14 +
>  .../mojom/check_stable_mojom_compatibility.py |  170 ++
>  ...eck_stable_mojom_compatibility_unittest.py |  260 +++
>  .../mojo/public/tools/mojom/const_unittest.py |   90 +
>  .../mojo/public/tools/mojom/enum_unittest.py  |   92 +
>  .../mojo/public/tools/mojom/mojom/BUILD.gn    |   43 +
>  .../mojo/public/tools/mojom/mojom/__init__.py |    0
>  .../mojo/public/tools/mojom/mojom/error.py    |   28 +
>  .../mojo/public/tools/mojom/mojom/fileutil.py |   45 +
>  .../tools/mojom/mojom/fileutil_unittest.py    |   40 +
>  .../tools/mojom/mojom/generate/__init__.py    |    0
>  .../mojom/mojom/generate/constant_resolver.py |   93 +
>  .../tools/mojom/mojom/generate/generator.py   |  325 +++
>  .../mojom/generate/generator_unittest.py      |   74 +
>  .../tools/mojom/mojom/generate/module.py      | 1635 ++++++++++++++
>  .../mojom/mojom/generate/module_unittest.py   |   31 +
>  .../public/tools/mojom/mojom/generate/pack.py |  258 +++
>  .../mojom/mojom/generate/pack_unittest.py     |  225 ++
>  .../mojom/mojom/generate/template_expander.py |   83 +
>  .../tools/mojom/mojom/generate/translate.py   |  854 ++++++++
>  .../mojom/generate/translate_unittest.py      |   73 +
>  .../tools/mojom/mojom/parse/__init__.py       |    0
>  .../public/tools/mojom/mojom/parse/ast.py     |  427 ++++
>  .../tools/mojom/mojom/parse/ast_unittest.py   |  121 +
>  .../mojom/mojom/parse/conditional_features.py |   82 +
>  .../parse/conditional_features_unittest.py    |  233 ++
>  .../public/tools/mojom/mojom/parse/lexer.py   |  251 +++
>  .../tools/mojom/mojom/parse/lexer_unittest.py |  198 ++
>  .../public/tools/mojom/mojom/parse/parser.py  |  488 +++++
>  .../mojom/mojom/parse/parser_unittest.py      | 1390 ++++++++++++
>  .../mojo/public/tools/mojom/mojom_parser.py   |  361 +++
>  .../tools/mojom/mojom_parser_test_case.py     |   73 +
>  .../tools/mojom/mojom_parser_unittest.py      |  171 ++
>  .../tools/mojom/stable_attribute_unittest.py  |  127 ++
>  .../mojom/version_compatibility_unittest.py   |  397 ++++
>  .../public/tools/run_all_python_unittests.py  |   28 +
>  utils/ipc/tools/diagnosis/crbug_1001171.py    |   51 +
>  54 files changed, 12855 insertions(+)
>  create mode 100644 utils/ipc/mojo/public/LICENSE
>  create mode 100644 utils/ipc/mojo/public/tools/.style.yapf
>  create mode 100644 utils/ipc/mojo/public/tools/BUILD.gn
>  create mode 100644 utils/ipc/mojo/public/tools/bindings/BUILD.gn
>  create mode 100644 utils/ipc/mojo/public/tools/bindings/README.md
>  create mode 100644 utils/ipc/mojo/public/tools/bindings/chromium_bindings_configuration.gni
>  create mode 100644 utils/ipc/mojo/public/tools/bindings/compile_typescript.py
>  create mode 100755 utils/ipc/mojo/public/tools/bindings/concatenate-files.py
>  create mode 100755 utils/ipc/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py
>  create mode 100755 utils/ipc/mojo/public/tools/bindings/format_typemap_generator_args.py
>  create mode 100644 utils/ipc/mojo/public/tools/bindings/gen_data_files_list.py
>  create mode 100755 utils/ipc/mojo/public/tools/bindings/generate_type_mappings.py
>  create mode 100644 utils/ipc/mojo/public/tools/bindings/mojom.gni
>  create mode 100755 utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py
>  create mode 100644 utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator_unittest.py
>  create mode 100755 utils/ipc/mojo/public/tools/bindings/mojom_types_downgrader.py
>  create mode 100755 utils/ipc/mojo/public/tools/bindings/validate_typemap_config.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/README.md
>  create mode 100755 utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py
>  create mode 100755 utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility_unittest.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/const_unittest.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/enum_unittest.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/BUILD.gn
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/__init__.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/error.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/fileutil.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/fileutil_unittest.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/generate/__init__.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/generate/constant_resolver.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/generate/generator.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/generate/generator_unittest.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/generate/module.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/generate/module_unittest.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/generate/pack.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/generate/pack_unittest.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/generate/template_expander.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/generate/translate.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/generate/translate_unittest.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/parse/__init__.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/parse/ast.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/parse/ast_unittest.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/parse/parser.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom/parse/parser_unittest.py
>  create mode 100755 utils/ipc/mojo/public/tools/mojom/mojom_parser.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom_parser_test_case.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/mojom_parser_unittest.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/stable_attribute_unittest.py
>  create mode 100644 utils/ipc/mojo/public/tools/mojom/version_compatibility_unittest.py
>  create mode 100755 utils/ipc/mojo/public/tools/run_all_python_unittests.py
>  create mode 100644 utils/ipc/tools/diagnosis/crbug_1001171.py
> 
> diff --git a/utils/ipc/mojo/public/LICENSE b/utils/ipc/mojo/public/LICENSE
> new file mode 100644
> index 00000000..972bb2ed
> --- /dev/null
> +++ b/utils/ipc/mojo/public/LICENSE
> @@ -0,0 +1,27 @@
> +// Copyright 2014 The Chromium Authors. All rights reserved.
> +//
> +// Redistribution and use in source and binary forms, with or without
> +// modification, are permitted provided that the following conditions are
> +// met:
> +//
> +//    * Redistributions of source code must retain the above copyright
> +// notice, this list of conditions and the following disclaimer.
> +//    * Redistributions in binary form must reproduce the above
> +// copyright notice, this list of conditions and the following disclaimer
> +// in the documentation and/or other materials provided with the
> +// distribution.
> +//    * Neither the name of Google Inc. nor the names of its
> +// contributors may be used to endorse or promote products derived from
> +// this software without specific prior written permission.
> +//
> +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
> +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
> +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
> +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
> +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
> +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
> +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
> +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
> +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
> +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
> +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
> diff --git a/utils/ipc/mojo/public/tools/.style.yapf b/utils/ipc/mojo/public/tools/.style.yapf
> new file mode 100644
> index 00000000..b4ebbe24
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/.style.yapf
> @@ -0,0 +1,6 @@
> +[style]
> +based_on_style = pep8
> +
> +# New directories should use a .style.yapf that does not include the following:
> +column_limit = 80
> +indent_width = 2
> diff --git a/utils/ipc/mojo/public/tools/BUILD.gn b/utils/ipc/mojo/public/tools/BUILD.gn
> new file mode 100644
> index 00000000..4c68350b
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/BUILD.gn
> @@ -0,0 +1,18 @@
> +# Copyright 2020 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +# The main target used to aggregate all unit tests for Python-based Mojo tools.
> +# This is used to generate a complete isolate which can be pushed to bots to run
> +# the tests.
> +group("mojo_python_unittests") {
> +  data = [
> +    "run_all_python_unittests.py",
> +    "//testing/scripts/common.py",
> +    "//testing/scripts/run_isolated_script_test.py",
> +    "//testing/test_env.py",
> +    "//testing/xvfb.py",
> +  ]
> +  deps = [ "//mojo/public/tools/mojom/mojom:tests" ]
> +  data_deps = [ "//third_party/catapult/third_party/typ/" ]
> +}
> diff --git a/utils/ipc/mojo/public/tools/bindings/BUILD.gn b/utils/ipc/mojo/public/tools/bindings/BUILD.gn
> new file mode 100644
> index 00000000..8ba6e922
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/bindings/BUILD.gn
> @@ -0,0 +1,108 @@
> +# Copyright 2016 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +import("//mojo/public/tools/bindings/mojom.gni")
> +import("//third_party/jinja2/jinja2.gni")
> +
> +action("precompile_templates") {
> +  sources = mojom_generator_sources
> +  sources += [
> +    "$mojom_generator_root/generators/cpp_templates/enum_macros.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/enum_serialization_declaration.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/interface_declaration.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/interface_definition.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/interface_macros.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/interface_proxy_declaration.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/interface_request_validator_declaration.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/interface_response_validator_declaration.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/interface_stub_declaration.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/module-forward.h.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/module-import-headers.h.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/module-params-data.h.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/module-shared-internal.h.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/module-shared-message-ids.h.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/module-shared.cc.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/module-shared.h.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/module-test-utils.cc.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/module-test-utils.h.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/module.cc.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/module.h.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/struct_data_view_declaration.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/struct_data_view_definition.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/struct_declaration.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/struct_definition.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/struct_macros.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/struct_serialization_declaration.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/struct_traits_declaration.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/struct_traits_definition.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/struct_unserialized_message_context.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/union_data_view_declaration.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/union_data_view_definition.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/union_declaration.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/union_definition.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/union_serialization_declaration.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/union_traits_declaration.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/union_traits_definition.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/validation_macros.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/wrapper_class_declaration.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/wrapper_class_definition.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/wrapper_class_template_definition.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/wrapper_union_class_declaration.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/wrapper_union_class_definition.tmpl",
> +    "$mojom_generator_root/generators/cpp_templates/wrapper_union_class_template_definition.tmpl",
> +    "$mojom_generator_root/generators/java_templates/constant_definition.tmpl",
> +    "$mojom_generator_root/generators/java_templates/constants.java.tmpl",
> +    "$mojom_generator_root/generators/java_templates/data_types_definition.tmpl",
> +    "$mojom_generator_root/generators/java_templates/enum.java.tmpl",
> +    "$mojom_generator_root/generators/java_templates/enum_definition.tmpl",
> +    "$mojom_generator_root/generators/java_templates/header.java.tmpl",
> +    "$mojom_generator_root/generators/java_templates/interface.java.tmpl",
> +    "$mojom_generator_root/generators/java_templates/interface_definition.tmpl",
> +    "$mojom_generator_root/generators/java_templates/interface_internal.java.tmpl",
> +    "$mojom_generator_root/generators/java_templates/struct.java.tmpl",
> +    "$mojom_generator_root/generators/java_templates/union.java.tmpl",
> +    "$mojom_generator_root/generators/js_templates/enum_definition.tmpl",
> +    "$mojom_generator_root/generators/js_templates/externs/interface_definition.tmpl",
> +    "$mojom_generator_root/generators/js_templates/externs/module.externs.tmpl",
> +    "$mojom_generator_root/generators/js_templates/externs/struct_definition.tmpl",
> +    "$mojom_generator_root/generators/js_templates/fuzzing.tmpl",
> +    "$mojom_generator_root/generators/js_templates/interface_definition.tmpl",
> +    "$mojom_generator_root/generators/js_templates/lite/enum_definition.tmpl",
> +    "$mojom_generator_root/generators/js_templates/lite/interface_definition.tmpl",
> +    "$mojom_generator_root/generators/js_templates/lite/module_definition.tmpl",
> +    "$mojom_generator_root/generators/js_templates/lite/mojom-lite.js.tmpl",
> +    "$mojom_generator_root/generators/js_templates/lite/struct_definition.tmpl",
> +    "$mojom_generator_root/generators/js_templates/lite/union_definition.tmpl",
> +    "$mojom_generator_root/generators/js_templates/module.amd.tmpl",
> +    "$mojom_generator_root/generators/js_templates/module_definition.tmpl",
> +    "$mojom_generator_root/generators/js_templates/struct_definition.tmpl",
> +    "$mojom_generator_root/generators/js_templates/union_definition.tmpl",
> +    "$mojom_generator_root/generators/js_templates/validation_macros.tmpl",
> +    "$mojom_generator_root/generators/mojolpm_templates/mojolpm.cc.tmpl",
> +    "$mojom_generator_root/generators/mojolpm_templates/mojolpm.h.tmpl",
> +    "$mojom_generator_root/generators/mojolpm_templates/mojolpm.proto.tmpl",
> +    "$mojom_generator_root/generators/mojolpm_templates/mojolpm_from_proto_macros.tmpl",
> +    "$mojom_generator_root/generators/mojolpm_templates/mojolpm_macros.tmpl",
> +    "$mojom_generator_root/generators/mojolpm_templates/mojolpm_to_proto_macros.tmpl",
> +    "$mojom_generator_root/generators/mojolpm_templates/mojolpm_traits_specialization_macros.tmpl",
> +    "$mojom_generator_root/generators/ts_templates/module_definition.tmpl",
> +    "$mojom_generator_root/generators/ts_templates/mojom.tmpl",
> +  ]
> +  script = mojom_generator_script
> +
> +  inputs = jinja2_sources
> +  outputs = [
> +    "$target_gen_dir/cpp_templates.zip",
> +    "$target_gen_dir/java_templates.zip",
> +    "$target_gen_dir/mojolpm_templates.zip",
> +    "$target_gen_dir/js_templates.zip",
> +    "$target_gen_dir/ts_templates.zip",
> +  ]
> +  args = [
> +    "-o",
> +    rebase_path(target_gen_dir, root_build_dir),
> +    "--use_bundled_pylibs",
> +    "precompile",
> +  ]
> +}
> diff --git a/utils/ipc/mojo/public/tools/bindings/README.md b/utils/ipc/mojo/public/tools/bindings/README.md
> new file mode 100644
> index 00000000..1a3d5c58
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/bindings/README.md
> @@ -0,0 +1,816 @@
> +# Mojom Interface Definition Language (IDL)
> +This document is a subset of the [Mojo documentation](/mojo/README.md).
> +
> +[TOC]
> +
> +## Overview
> +
> +Mojom is the IDL for Mojo interfaces. Given a `.mojom` file, the
> +[bindings
> +generator](https://cs.chromium.org/chromium/src/mojo/public/tools/bindings/) can
> +output bindings for any supported language: **C++**, **JavaScript**, or
> +**Java**.
> +
> +For a trivial example consider the following hypothetical Mojom file we write to
> +`//services/widget/public/mojom/frobinator.mojom`:
> +
> +```
> +module widget.mojom;
> +
> +interface Frobinator {
> +  Frobinate();
> +};
> +```
> +
> +This defines a single [interface](#Interfaces) named `Frobinator` in a
> +[module](#Modules) named `widget.mojom` (and thus fully qualified in Mojom as
> +`widget.mojom.Frobinator`.) Note that many interfaces and/or other types of
> +definitions (structs, enums, *etc.*) may be included in a single Mojom file.
> +
> +If we add a corresponding GN target to
> +`//services/widget/public/mojom/BUILD.gn`:
> +
> +```
> +import("mojo/public/tools/bindings/mojom.gni")
> +
> +mojom("mojom") {
> +  sources = [
> +    "frobinator.mojom",
> +  ]
> +}
> +```
> +
> +and then build this target:
> +
> +```
> +ninja -C out/r services/widget/public/mojom
> +```
> +
> +we'll find several generated sources in our output directory:
> +
> +```
> +out/r/gen/services/widget/public/mojom/frobinator.mojom.cc
> +out/r/gen/services/widget/public/mojom/frobinator.mojom.h
> +out/r/gen/services/widget/public/mojom/frobinator.mojom-shared.h
> +etc...
> +```
> +
> +Each of these generated source modules includes a set of definitions
> +representing the Mojom contents in C++. You can also build or depend on suffixed
> +target names to get bindings for other languages. For example,
> +
> +```
> +ninja -C out/r services/widget/public/mojom:mojom_js
> +ninja -C out/r services/widget/public/mojom:mojom_java
> +```
> +
> +would generate JavaScript and Java bindings respectively, in the same generated
> +output directory.
> +
> +For more details regarding the generated
> +outputs please see
> +[documentation for individual target languages](#Generated-Code-For-Target-Languages).
> +
> +## Mojom Syntax
> +
> +Mojom IDL allows developers to define **structs**, **unions**, **interfaces**,
> +**constants**, and **enums**, all within the context of a **module**. These
> +definitions are used to generate code in the supported target languages at build
> +time.
> +
> +Mojom files may **import** other Mojom files in order to reference their
> +definitions.
> +
> +### Primitive Types
> +Mojom supports a few basic data types which may be composed into structs or used
> +for message parameters.
> +
> +| Type                          | Description
> +|-------------------------------|-------------------------------------------------------|
> +| `bool`                        | Boolean type (`true` or `false`.)
> +| `int8`, `uint8`               | Signed or unsigned 8-bit integer.
> +| `int16`, `uint16`             | Signed or unsigned 16-bit integer.
> +| `int32`, `uint32`             | Signed or unsigned 32-bit integer.
> +| `int64`, `uint64`             | Signed or unsigned 64-bit integer.
> +| `float`, `double`             | 32- or 64-bit floating point number.
> +| `string`                      | UTF-8 encoded string.
> +| `array<T>`                    | Array of any Mojom type *T*; for example, `array<uint8>` or `array<array<string>>`.
> +| `array<T, N>`                 | Fixed-length array of any Mojom type *T*. The parameter *N* must be an integral constant.
> +| `map<S, T>`                   | Associated array maping values of type *S* to values of type *T*. *S* may be a `string`, `enum`, or numeric type.
> +| `handle`                      | Generic Mojo handle. May be any type of handle, including a wrapped native platform handle.
> +| `handle<message_pipe>`        | Generic message pipe handle.
> +| `handle<shared_buffer>`       | Shared buffer handle.
> +| `handle<data_pipe_producer>`  | Data pipe producer handle.
> +| `handle<data_pipe_consumer>`  | Data pipe consumer handle.
> +| `handle<platform>`            | A native platform/OS handle.
> +| *`pending_remote<InterfaceType>`*             | Any user-defined Mojom interface type. This is sugar for a strongly-typed message pipe handle which should eventually be used to make outgoing calls on the interface.
> +| *`pending_receiver<InterfaceType>`*            | A pending receiver for any user-defined Mojom interface type. This is sugar for a more strongly-typed message pipe handle which is expected to receive request messages and should therefore eventually be bound to an implementation of the interface.
> +| *`pending_associated_remote<InterfaceType>`*  | An associated interface handle. See [Associated Interfaces](#Associated-Interfaces)
> +| *`pending_associated_receiver<InterfaceType>`* | A pending associated receiver. See [Associated Interfaces](#Associated-Interfaces)
> +| *T*?                          | An optional (nullable) value. Primitive numeric types (integers, floats, booleans, and enums) are not nullable. All other types are nullable.
> +
> +### Modules
> +
> +Every Mojom file may optionally specify a single **module** to which it belongs.
> +
> +This is used strictly for aggregaging all defined symbols therein within a
> +common Mojom namespace. The specific impact this has on generated binidngs code
> +varies for each target language. For example, if the following Mojom is used to
> +generate bindings:
> +
> +```
> +module business.stuff;
> +
> +interface MoneyGenerator {
> +  GenerateMoney();
> +};
> +```
> +
> +Generated C++ bindings will define a class interface `MoneyGenerator` in the
> +`business::stuff` namespace, while Java bindings will define an interface
> +`MoneyGenerator` in the `org.chromium.business.stuff` package. JavaScript
> +bindings at this time are unaffected by module declarations.
> +
> +**NOTE:** By convention in the Chromium codebase, **all** Mojom files should
> +declare a module name with at least (and preferrably exactly) one top-level name
> +as well as an inner `mojom` module suffix. *e.g.*, `chrome.mojom`,
> +`business.mojom`, *etc.*
> +
> +This convention makes it easy to tell which symbols are generated by Mojom when
> +reading non-Mojom code, and it also avoids namespace collisions in the fairly
> +common scenario where you have a real C++ or Java `Foo` along with a
> +corresponding Mojom `Foo` for its serialized representation.
> +
> +### Imports
> +
> +If your Mojom references definitions from other Mojom files, you must **import**
> +those files. Import syntax is as follows:
> +
> +```
> +import "services/widget/public/mojom/frobinator.mojom";
> +```
> +
> +Import paths are always relative to the top-level directory.
> +
> +Note that circular imports are **not** supported.
> +
> +### Structs
> +
> +Structs are defined using the **struct** keyword, and they provide a way to
> +group related fields together:
> +
> +``` cpp
> +struct StringPair {
> +  string first;
> +  string second;
> +};
> +```
> +
> +Struct fields may be comprised of any of the types listed above in the
> +[Primitive Types](#Primitive-Types) section.
> +
> +Default values may be specified as long as they are constant:
> +
> +``` cpp
> +struct Request {
> +  int32 id = -1;
> +  string details;
> +};
> +```
> +
> +What follows is a fairly
> +comprehensive example using the supported field types:
> +
> +``` cpp
> +struct StringPair {
> +  string first;
> +  string second;
> +};
> +
> +enum AnEnum {
> +  YES,
> +  NO
> +};
> +
> +interface SampleInterface {
> +  DoStuff();
> +};
> +
> +struct AllTheThings {
> +  // Note that these types can never be marked nullable!
> +  bool boolean_value;
> +  int8 signed_8bit_value = 42;
> +  uint8 unsigned_8bit_value;
> +  int16 signed_16bit_value;
> +  uint16 unsigned_16bit_value;
> +  int32 signed_32bit_value;
> +  uint32 unsigned_32bit_value;
> +  int64 signed_64bit_value;
> +  uint64 unsigned_64bit_value;
> +  float float_value_32bit;
> +  double float_value_64bit;
> +  AnEnum enum_value = AnEnum.YES;
> +
> +  // Strings may be nullable.
> +  string? maybe_a_string_maybe_not;
> +
> +  // Structs may contain other structs. These may also be nullable.
> +  StringPair some_strings;
> +  StringPair? maybe_some_more_strings;
> +
> +  // In fact structs can also be nested, though in practice you must always make
> +  // such fields nullable -- otherwise messages would need to be infinitely long
> +  // in order to pass validation!
> +  AllTheThings? more_things;
> +
> +  // Arrays may be templated over any Mojom type, and are always nullable:
> +  array<int32> numbers;
> +  array<int32>? maybe_more_numbers;
> +
> +  // Arrays of arrays of arrays... are fine.
> +  array<array<array<AnEnum>>> this_works_but_really_plz_stop;
> +
> +  // The element type may be nullable if it's a type which is allowed to be
> +  // nullable.
> +  array<AllTheThings?> more_maybe_things;
> +
> +  // Fixed-size arrays get some extra validation on the receiving end to ensure
> +  // that the correct number of elements is always received.
> +  array<uint64, 2> uuid;
> +
> +  // Maps follow many of the same rules as arrays. Key types may be any
> +  // non-handle, non-collection type, and value types may be any supported
> +  // struct field type. Maps may also be nullable.
> +  map<string, int32> one_map;
> +  map<AnEnum, string>? maybe_another_map;
> +  map<StringPair, AllTheThings?>? maybe_a_pretty_weird_but_valid_map;
> +  map<StringPair, map<int32, array<map<string, string>?>?>?> ridiculous;
> +
> +  // And finally, all handle types are valid as struct fields and may be
> +  // nullable. Note that interfaces and interface requests (the "Foo" and
> +  // "Foo&" type syntax respectively) are just strongly-typed message pipe
> +  // handles.
> +  handle generic_handle;
> +  handle<data_pipe_consumer> reader;
> +  handle<data_pipe_producer>? maybe_writer;
> +  handle<shared_buffer> dumping_ground;
> +  handle<message_pipe> raw_message_pipe;
> +  pending_remote<SampleInterface>? maybe_a_sample_interface_client_pipe;
> +  pending_receiver<SampleInterface> non_nullable_sample_pending_receiver;
> +  pending_receiver<SampleInterface>? nullable_sample_pending_receiver;
> +  pending_associated_remote<SampleInterface> associated_interface_client;
> +  pending_associated_receiver<SampleInterface> associated_pending_receiver;
> +  pending_associated_receiver<SampleInterface>? maybe_another_pending_receiver;
> +};
> +```
> +
> +For details on how all of these different types translate to usable generated
> +code, see
> +[documentation for individual target languages](#Generated-Code-For-Target-Languages).
> +
> +### Unions
> +
> +Mojom supports tagged unions using the **union** keyword. A union is a
> +collection of fields which may taken the value of any single one of those fields
> +at a time. Thus they provide a way to represent a variant value type while
> +minimizing storage requirements.
> +
> +Union fields may be of any type supported by [struct](#Structs) fields. For
> +example:
> +
> +```cpp
> +union ExampleUnion {
> +  string str;
> +  StringPair pair;
> +  int64 id;
> +  array<uint64, 2> guid;
> +  SampleInterface iface;
> +};
> +```
> +
> +For details on how unions like this translate to generated bindings code, see
> +[documentation for individual target languages](#Generated-Code-For-Target-Languages).
> +
> +### Enumeration Types
> +
> +Enumeration types may be defined using the **enum** keyword either directly
> +within a module or nested within the namespace of some struct or interface:
> +
> +```
> +module business.mojom;
> +
> +enum Department {
> +  SALES = 0,
> +  DEV,
> +};
> +
> +struct Employee {
> +  enum Type {
> +    FULL_TIME,
> +    PART_TIME,
> +  };
> +
> +  Type type;
> +  // ...
> +};
> +```
> +
> +Similar to C-style enums, individual values may be explicitly assigned within an
> +enum definition. By default, values are based at zero and increment by
> +1 sequentially.
> +
> +The effect of nested definitions on generated bindings varies depending on the
> +target language. See [documentation for individual target languages](#Generated-Code-For-Target-Languages)
> +
> +### Constants
> +
> +Constants may be defined using the **const** keyword either directly within a
> +module or nested within the namespace of some struct or interface:
> +
> +```
> +module business.mojom;
> +
> +const string kServiceName = "business";
> +
> +struct Employee {
> +  const uint64 kInvalidId = 0;
> +
> +  enum Type {
> +    FULL_TIME,
> +    PART_TIME,
> +  };
> +
> +  uint64 id = kInvalidId;
> +  Type type;
> +};
> +```
> +
> +The effect of nested definitions on generated bindings varies depending on the
> +target language. See [documentation for individual target languages](#Generated-Code-For-Target-Languages)
> +
> +### Interfaces
> +
> +An **interface** is a logical bundle of parameterized request messages. Each
> +request message may optionally define a parameterized response message. Here's
> +an example to define an interface `Foo` with various kinds of requests:
> +
> +```
> +interface Foo {
> +  // A request which takes no arguments and expects no response.
> +  MyMessage();
> +
> +  // A request which has some arguments and expects no response.
> +  MyOtherMessage(string name, array<uint8> bytes);
> +
> +  // A request which expects a single-argument response.
> +  MyMessageWithResponse(string command) => (bool success);
> +
> +  // A request which expects a response with multiple arguments.
> +  MyMessageWithMoarResponse(string a, string b) => (int8 c, int8 d);
> +};
> +```
> +
> +Anything which is a valid struct field type (see [Structs](#Structs)) is also a
> +valid request or response argument type. The type notation is the same for both.
> +
> +### Attributes
> +
> +Mojom definitions may have their meaning altered by **attributes**, specified
> +with a syntax similar to Java or C# attributes. There are a handle of
> +interesting attributes supported today.
> +
> +**`[Sync]`**
> +:   The `Sync` attribute may be specified for any interface method which expects
> +    a response. This makes it so that callers of the method can wait
> +    synchronously for a response. See
> +    [Synchronous Calls](/mojo/public/cpp/bindings/README.md#Synchronous-Calls)
> +    in the C++ bindings documentation. Note that sync methods are only actually
> +    synchronous when called from C++.
> +
> +**`[Extensible]`**
> +:   The `Extensible` attribute may be specified for any enum definition. This
> +    essentially disables builtin range validation when receiving values of the
> +    enum type in a message, allowing older bindings to tolerate unrecognized
> +    values from newer versions of the enum.
> +
> +**`[Native]`**
> +:   The `Native` attribute may be specified for an empty struct declaration to
> +    provide a nominal bridge between Mojo IPC and legacy `IPC::ParamTraits` or
> +    `IPC_STRUCT_TRAITS*` macros.
> +    See
> +    [Repurposing Legacy IPC Traits](/docs/mojo_ipc_conversion.md#repurposing-and-invocations)
> +    for more details. Note support for this attribute is strictly limited to C++
> +    bindings generation.
> +
> +**`[MinVersion=N]`**
> +:   The `MinVersion` attribute is used to specify the version at which a given
> +    field, enum value, interface method, or method parameter was introduced.
> +    See [Versioning](#Versioning) for more details.
> +
> +**`[Stable]`**
> +:   The `Stable` attribute specifies that a given mojom type or interface
> +    definition can be considered stable over time, meaning it is safe to use for
> +    things like persistent storage or communication between independent
> +    version-skewed binaries. Stable definitions may only depend on builtin mojom
> +    types or other stable definitions, and changes to such definitions MUST
> +    preserve backward-compatibility through appropriate use of versioning.
> +    Backward-compatibility of changes is enforced in the Chromium tree using a
> +    strict presubmit check. See [Versioning](#Versioning) for more details on
> +    backward-compatibility constraints.
> +
> +**`[EnableIf=value]`**
> +:   The `EnableIf` attribute is used to conditionally enable definitions when
> +    the mojom is parsed. If the `mojom` target in the GN file does not include
> +    the matching `value` in the list of `enabled_features`, the definition
> +    will be disabled. This is useful for mojom definitions that only make
> +    sense on one platform. Note that the `EnableIf` attribute can only be set
> +    once per definition.
> +
> +## Generated Code For Target Languages
> +
> +When the bindings generator successfully processes an input Mojom file, it emits
> +corresponding code for each supported target language. For more details on how
> +Mojom concepts translate to a given target langauge, please refer to the
> +bindings API documentation for that language:
> +
> +* [C++ Bindings](/mojo/public/cpp/bindings/README.md)
> +* [JavaScript Bindings](/mojo/public/js/README.md)
> +* [Java Bindings](/mojo/public/java/bindings/README.md)
> +
> +## Message Validation
> +
> +Regardless of target language, all interface messages are validated during
> +deserialization before they are dispatched to a receiving implementation of the
> +interface. This helps to ensure consitent validation across interfaces without
> +leaving the burden to developers and security reviewers every time a new message
> +is added.
> +
> +If a message fails validation, it is never dispatched. Instead a **connection
> +error** is raised on the binding object (see
> +[C++ Connection Errors](/mojo/public/cpp/bindings/README.md#Connection-Errors),
> +[Java Connection Errors](/mojo/public/java/bindings/README.md#Connection-Errors),
> +or
> +[JavaScript Connection Errors](/mojo/public/js/README.md#Connection-Errors) for
> +details.)
> +
> +Some baseline level of validation is done automatically for primitive Mojom
> +types.
> +
> +### Non-Nullable Objects
> +
> +Mojom fields or parameter values (*e.g.*, structs, interfaces, arrays, *etc.*)
> +may be marked nullable in Mojom definitions (see
> +[Primitive Types](#Primitive-Types).) If a field or parameter is **not** marked
> +nullable but a message is received with a null value in its place, that message
> +will fail validation.
> +
> +### Enums
> +
> +Enums declared in Mojom are automatically validated against the range of legal
> +values. For example if a Mojom declares the enum:
> +
> +``` cpp
> +enum AdvancedBoolean {
> +  TRUE = 0,
> +  FALSE = 1,
> +  FILE_NOT_FOUND = 2,
> +};
> +```
> +
> +and a message is received with the integral value 3 (or anything other than 0,
> +1, or 2) in place of some `AdvancedBoolean` field or parameter, the message will
> +fail validation.
> +
> +*** note
> +NOTE: It's possible to avoid this type of validation error by explicitly marking
> +an enum as [Extensible](#Attributes) if you anticipate your enum being exchanged
> +between two different versions of the binding interface. See
> +[Versioning](#Versioning).
> +***
> +
> +### Other failures
> +
> +There are a  host of internal validation errors that may occur when a malformed
> +message is received, but developers should not be concerned with these
> +specifically; in general they can only result from internal bindings bugs,
> +compromised processes, or some remote endpoint making a dubious effort to
> +manually encode their own bindings messages.
> +
> +### Custom Validation
> +
> +It's also possible for developers to define custom validation logic for specific
> +Mojom struct types by exploiting the
> +[type mapping](/mojo/public/cpp/bindings/README.md#Type-Mapping) system for C++
> +bindings. Messages rejected by custom validation logic trigger the same
> +validation failure behavior as the built-in type validation routines.
> +
> +## Associated Interfaces
> +
> +As mentioned in the [Primitive Types](#Primitive-Types) section above, pending_remote
> +and pending_receiver fields and parameters may be marked as `associated`. This
> +essentially means that they are piggy-backed on some other interface's message
> +pipe.
> +
> +Because individual interface message pipes operate independently there can be no
> +relative ordering guarantees among them. Associated interfaces are useful when
> +one interface needs to guarantee strict FIFO ordering with respect to one or
> +more other interfaces, as they allow interfaces to share a single pipe.
> +
> +Currently associated interfaces are only supported in generated C++ bindings.
> +See the documentation for
> +[C++ Associated Interfaces](/mojo/public/cpp/bindings/README.md#Associated-Interfaces).
> +
> +## Versioning
> +
> +### Overview
> +
> +*** note
> +**NOTE:** You don't need to worry about versioning if you don't care about
> +backwards compatibility. Specifically, all parts of Chrome are updated
> +atomically today and there is not yet any possibility of any two Chrome
> +processes communicating with two different versions of any given Mojom
> +interface.
> +***
> +
> +Services extend their interfaces to support new features over time, and clients
> +want to use those new features when they are available. If services and clients
> +are not updated at the same time, it's important for them to be able to
> +communicate with each other using different snapshots (versions) of their
> +interfaces.
> +
> +This document shows how to extend Mojom interfaces in a backwards-compatible
> +way. Changing interfaces in a non-backwards-compatible way is not discussed,
> +because in that case communication between different interface versions is
> +impossible anyway.
> +
> +### Versioned Structs
> +
> +You can use the `MinVersion` [attribute](#Attributes) to indicate from which
> +version a struct field is introduced. Assume you have the following struct:
> +
> +``` cpp
> +struct Employee {
> +  uint64 employee_id;
> +  string name;
> +};
> +```
> +
> +and you would like to add a birthday field. You can do:
> +
> +``` cpp
> +struct Employee {
> +  uint64 employee_id;
> +  string name;
> +  [MinVersion=1] Date? birthday;
> +};
> +```
> +
> +By default, fields belong to version 0. New fields must be appended to the
> +struct definition (*i.e*., existing fields must not change **ordinal value**)
> +with the `MinVersion` attribute set to a number greater than any previous
> +existing versions.
> +
> +*** note
> +**NOTE:** do not change existing fields in versioned structs, as this is
> +not backwards-compatible. Instead, rename the old field to make its
> +deprecation clear and add a new field with the new version number.
> +***
> +
> +**Ordinal value** refers to the relative positional layout of a struct's fields
> +(and an interface's methods) when encoded in a message. Implicitly, ordinal
> +numbers are assigned to fields according to lexical position. In the example
> +above, `employee_id` has an ordinal value of 0 and `name` has an ordinal value
> +of 1.
> +
> +Ordinal values can be specified explicitly using `**@**` notation, subject to
> +the following hard constraints:
> +
> +* For any given struct or interface, if any field or method explicitly specifies
> +    an ordinal value, all fields or methods must explicitly specify an ordinal
> +    value.
> +* For an *N*-field struct or *N*-method interface, the set of explicitly
> +    assigned ordinal values must be limited to the range *[0, N-1]*. Interfaces
> +    should include placeholder methods to fill the ordinal positions of removed
> +    methods (for example "Unused_Message_7 at 7()" or "RemovedMessage at 42()", etc).
> +
> +You may reorder fields, but you must ensure that the ordinal values of existing
> +fields remain unchanged. For example, the following struct remains
> +backwards-compatible:
> +
> +``` cpp
> +struct Employee {
> +  uint64 employee_id at 0;
> +  [MinVersion=1] Date? birthday at 2;
> +  string name at 1;
> +};
> +```
> +
> +*** note
> +**NOTE:** Newly added fields of Mojo object or handle types MUST be nullable.
> +See [Primitive Types](#Primitive-Types).
> +***
> +
> +### Versioned Interfaces
> +
> +There are two dimensions on which an interface can be extended
> +
> +**Appending New Parameters To Existing Methods**
> +:   Parameter lists are treated as structs internally, so all the rules of
> +    versioned structs apply to method parameter lists. The only difference is
> +    that the version number is scoped to the whole interface rather than to any
> +    individual parameter list.
> +
> +    Please note that adding a response to a message which did not previously
> +    expect a response is a not a backwards-compatible change.
> +
> +**Appending New Methods**
> +:   Similarly, you can reorder methods with explicit ordinal values as long as
> +    the ordinal values of existing methods are unchanged.
> +
> +For example:
> +
> +``` cpp
> +// Old version:
> +interface HumanResourceDatabase {
> +  AddEmployee(Employee employee) => (bool success);
> +  QueryEmployee(uint64 id) => (Employee? employee);
> +};
> +
> +// New version:
> +interface HumanResourceDatabase {
> +  AddEmployee(Employee employee) => (bool success);
> +
> +  QueryEmployee(uint64 id, [MinVersion=1] bool retrieve_finger_print)
> +      => (Employee? employee,
> +          [MinVersion=1] array<uint8>? finger_print);
> +
> +  [MinVersion=1]
> +  AttachFingerPrint(uint64 id, array<uint8> finger_print)
> +      => (bool success);
> +};
> +```
> +
> +Similar to [versioned structs](#Versioned-Structs), when you pass the parameter
> +list of a request or response method to a destination using an older version of
> +an interface, unrecognized fields are silently discarded. However, if the method
> +call itself is not recognized, it is considered a validation error and the
> +receiver will close its end of the interface pipe. For example, if a client on
> +version 1 of the above interface sends an `AttachFingerPrint` request to an
> +implementation of version 0, the client will be disconnected.
> +
> +Bindings target languages that support versioning expose means to query or
> +assert the remote version from a client handle (*e.g.*, an
> +`mojo::Remote<T>` in C++ bindings.)
> +
> +See
> +[C++ Versioning Considerations](/mojo/public/cpp/bindings/README.md#Versioning-Considerations)
> +and
> +[Java Versioning Considerations](/mojo/public/java/bindings/README.md#Versioning-Considerations)
> +
> +### Versioned Enums
> +
> +**By default, enums are non-extensible**, which means that generated message
> +validation code does not expect to see new values in the future. When an unknown
> +value is seen for a non-extensible enum field or parameter, a validation error
> +is raised.
> +
> +If you want an enum to be extensible in the future, you can apply the
> +`[Extensible]` [attribute](#Attributes):
> +
> +``` cpp
> +[Extensible]
> +enum Department {
> +  SALES,
> +  DEV,
> +};
> +```
> +
> +And later you can extend this enum without breaking backwards compatibility:
> +
> +``` cpp
> +[Extensible]
> +enum Department {
> +  SALES,
> +  DEV,
> +  [MinVersion=1] RESEARCH,
> +};
> +```
> +
> +*** note
> +**NOTE:** For versioned enum definitions, the use of a `[MinVersion]` attribute
> +is strictly for documentation purposes. It has no impact on the generated code.
> +***
> +
> +With extensible enums, bound interface implementations may receive unknown enum
> +values and will need to deal with them gracefully. See
> +[C++ Versioning Considerations](/mojo/public/cpp/bindings/README.md#Versioning-Considerations)
> +for details.
> +
> +## Grammar Reference
> +
> +Below is the (BNF-ish) context-free grammar of the Mojom language:
> +
> +```
> +MojomFile = StatementList
> +StatementList = Statement StatementList | Statement
> +Statement = ModuleStatement | ImportStatement | Definition
> +
> +ModuleStatement = AttributeSection "module" Identifier ";"
> +ImportStatement = "import" StringLiteral ";"
> +Definition = Struct Union Interface Enum Const
> +
> +AttributeSection = <empty> | "[" AttributeList "]"
> +AttributeList = <empty> | NonEmptyAttributeList
> +NonEmptyAttributeList = Attribute
> +                      | Attribute "," NonEmptyAttributeList
> +Attribute = Name
> +          | Name "=" Name
> +          | Name "=" Literal
> +
> +Struct = AttributeSection "struct" Name "{" StructBody "}" ";"
> +       | AttributeSection "struct" Name ";"
> +StructBody = <empty>
> +           | StructBody Const
> +           | StructBody Enum
> +           | StructBody StructField
> +StructField = AttributeSection TypeSpec Name Ordinal Default ";"
> +
> +Union = AttributeSection "union" Name "{" UnionBody "}" ";"
> +UnionBody = <empty> | UnionBody UnionField
> +UnionField = AttributeSection TypeSpec Name Ordinal ";"
> +
> +Interface = AttributeSection "interface" Name "{" InterfaceBody "}" ";"
> +InterfaceBody = <empty>
> +              | InterfaceBody Const
> +              | InterfaceBody Enum
> +              | InterfaceBody Method
> +Method = AttributeSection Name Ordinal "(" ParamterList ")" Response ";"
> +ParameterList = <empty> | NonEmptyParameterList
> +NonEmptyParameterList = Parameter
> +                      | Parameter "," NonEmptyParameterList
> +Parameter = AttributeSection TypeSpec Name Ordinal
> +Response = <empty> | "=>" "(" ParameterList ")"
> +
> +TypeSpec = TypeName "?" | TypeName
> +TypeName = BasicTypeName
> +         | Array
> +         | FixedArray
> +         | Map
> +         | InterfaceRequest
> +BasicTypeName = Identifier | "associated" Identifier | HandleType | NumericType
> +NumericType = "bool" | "int8" | "uint8" | "int16" | "uint16" | "int32"
> +            | "uint32" | "int64" | "uint64" | "float" | "double"
> +HandleType = "handle" | "handle" "<" SpecificHandleType ">"
> +SpecificHandleType = "message_pipe"
> +                   | "shared_buffer"
> +                   | "data_pipe_consumer"
> +                   | "data_pipe_producer"
> +                   | "platform"
> +Array = "array" "<" TypeSpec ">"
> +FixedArray = "array" "<" TypeSpec "," IntConstDec ">"
> +Map = "map" "<" Identifier "," TypeSpec ">"
> +InterfaceRequest = Identifier "&" | "associated" Identifier "&"
> +
> +Ordinal = <empty> | OrdinalValue
> +
> +Default = <empty> | "=" Constant
> +
> +Enum = AttributeSection "enum" Name "{" NonEmptyEnumValueList "}" ";"
> +     | AttributeSection "enum" Name "{" NonEmptyEnumValueList "," "}" ";"
> +NonEmptyEnumValueList = EnumValue | NonEmptyEnumValueList "," EnumValue
> +EnumValue = AttributeSection Name
> +          | AttributeSection Name "=" Integer
> +          | AttributeSection Name "=" Identifier
> +
> +Const = "const" TypeSpec Name "=" Constant ";"
> +
> +Constant = Literal | Identifier ";"
> +
> +Identifier = Name | Name "." Identifier
> +
> +Literal = Integer | Float | "true" | "false" | "default" | StringLiteral
> +
> +Integer = IntConst | "+" IntConst | "-" IntConst
> +IntConst = IntConstDec | IntConstHex
> +
> +Float = FloatConst | "+" FloatConst | "-" FloatConst
> +
> +; The rules below are for tokens matched strictly according to the given regexes
> +
> +Identifier = /[a-zA-Z_][0-9a-zA-Z_]*/
> +IntConstDec = /0|(1-9[0-9]*)/
> +IntConstHex = /0[xX][0-9a-fA-F]+/
> +OrdinalValue = /@(0|(1-9[0-9]*))/
> +FloatConst = ... # Imagine it's close enough to C-style float syntax.
> +StringLiteral = ... # Imagine it's close enough to C-style string literals, including escapes.
> +```
> +
> +## Additional Documentation
> +
> +[Mojom Message Format](https://docs.google.com/document/d/13pv9cFh5YKuBggDBQ1-AL8VReF-IYpFOFpRfvWFrwio/edit)
> +:    Describes the wire format used by Mojo bindings interfaces over message
> +     pipes.
> +
> +[Input Format of Mojom Message Validation Tests](https://docs.google.com/document/d/1-y-2IYctyX2NPaLxJjpJfzVNWCC2SR2MJAD9MpIytHQ/edit)
> +:    Describes a text format used to facilitate bindings message validation
> +     tests.
> diff --git a/utils/ipc/mojo/public/tools/bindings/chromium_bindings_configuration.gni b/utils/ipc/mojo/public/tools/bindings/chromium_bindings_configuration.gni
> new file mode 100644
> index 00000000..d8a13874
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/bindings/chromium_bindings_configuration.gni
> @@ -0,0 +1,51 @@
> +# Copyright 2016 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +_typemap_imports = [
> +  "//chrome/chrome_cleaner/mojom/typemaps/typemaps.gni",
> +  "//chrome/common/importer/typemaps.gni",
> +  "//chrome/common/media_router/mojom/typemaps.gni",
> +  "//chrome/typemaps.gni",
> +  "//chromecast/typemaps.gni",
> +  "//chromeos/typemaps.gni",
> +  "//chromeos/components/multidevice/mojom/typemaps.gni",
> +  "//chromeos/services/cros_healthd/public/mojom/typemaps.gni",
> +  "//chromeos/services/device_sync/public/mojom/typemaps.gni",
> +  "//chromeos/services/network_config/public/mojom/typemaps.gni",
> +  "//chromeos/services/secure_channel/public/mojom/typemaps.gni",
> +  "//components/arc/mojom/typemaps.gni",
> +  "//components/chromeos_camera/common/typemaps.gni",
> +  "//components/services/storage/public/cpp/filesystem/typemaps.gni",
> +  "//components/sync/mojom/typemaps.gni",
> +  "//components/typemaps.gni",
> +  "//content/browser/typemaps.gni",
> +  "//content/public/common/typemaps.gni",
> +  "//sandbox/mac/mojom/typemaps.gni",
> +  "//services/media_session/public/cpp/typemaps.gni",
> +  "//services/proxy_resolver/public/cpp/typemaps.gni",
> +  "//services/resource_coordinator/public/cpp/typemaps.gni",
> +  "//services/service_manager/public/cpp/typemaps.gni",
> +  "//services/tracing/public/mojom/typemaps.gni",
> +]
> +
> +_typemaps = []
> +foreach(typemap_import, _typemap_imports) {
> +  # Avoid reassignment error by assigning to empty scope first.
> +  _imported = {
> +  }
> +  _imported = read_file(typemap_import, "scope")
> +  _typemaps += _imported.typemaps
> +}
> +
> +typemaps = []
> +foreach(typemap, _typemaps) {
> +  typemaps += [
> +    {
> +      filename = typemap
> +      config = read_file(typemap, "scope")
> +    },
> +  ]
> +}
> +
> +component_macro_suffix = ""
> diff --git a/utils/ipc/mojo/public/tools/bindings/compile_typescript.py b/utils/ipc/mojo/public/tools/bindings/compile_typescript.py
> new file mode 100644
> index 00000000..a978901b
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/bindings/compile_typescript.py
> @@ -0,0 +1,27 @@
> +# Copyright 2019 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +import os
> +import sys
> +import argparse
> +
> +_HERE_PATH = os.path.dirname(__file__)
> +_SRC_PATH = os.path.normpath(os.path.join(_HERE_PATH, '..', '..', '..', '..'))
> +
> +sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'node'))
> +import node
> +import node_modules
> +
> +def main(argv):
> +  parser = argparse.ArgumentParser()
> +  parser.add_argument('--tsconfig_path', required=True)
> +  args = parser.parse_args(argv)
> +
> +  result = node.RunNode([node_modules.PathToTypescript()] +
> +                        ['--project', args.tsconfig_path])
> +  if len(result) != 0:
> +    raise RuntimeError('Failed to compile Typescript: \n%s' % result)
> +
> +if __name__ == '__main__':
> +  main(sys.argv[1:])
> diff --git a/utils/ipc/mojo/public/tools/bindings/concatenate-files.py b/utils/ipc/mojo/public/tools/bindings/concatenate-files.py
> new file mode 100755
> index 00000000..48bc66fd
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/bindings/concatenate-files.py
> @@ -0,0 +1,54 @@
> +#!/usr/bin/env python
> +# Copyright 2019 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +#
> +# This utility concatenates several files into one. On Unix-like systems
> +# it is equivalent to:
> +#   cat file1 file2 file3 ...files... > target
> +#
> +# The reason for writing a separate utility is that 'cat' is not available
> +# on all supported build platforms, but Python is, and hence this provides
> +# us with an easy and uniform way of doing this on all platforms.
> +
> +# for py2/py3 compatibility
> +from __future__ import print_function
> +
> +import optparse
> +
> +
> +def Concatenate(filenames):
> +  """Concatenate files.
> +
> +  Args:
> +    files: Array of file names.
> +           The last name is the target; all earlier ones are sources.
> +
> +  Returns:
> +    True, if the operation was successful.
> +  """
> +  if len(filenames) < 2:
> +    print("An error occurred generating %s:\nNothing to do." % filenames[-1])
> +    return False
> +
> +  try:
> +    with open(filenames[-1], "wb") as target:
> +      for filename in filenames[:-1]:
> +        with open(filename, "rb") as current:
> +          target.write(current.read())
> +    return True
> +  except IOError as e:
> +    print("An error occurred when writing %s:\n%s" % (filenames[-1], e))
> +    return False
> +
> +
> +def main():
> +  parser = optparse.OptionParser()
> +  parser.set_usage("""Concatenate several files into one.
> +      Equivalent to: cat file1 ... > target.""")
> +  (_options, args) = parser.parse_args()
> +  exit(0 if Concatenate(args) else 1)
> +
> +
> +if __name__ == "__main__":
> +  main()
> diff --git a/utils/ipc/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py b/utils/ipc/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py
> new file mode 100755
> index 00000000..be8985ce
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py
> @@ -0,0 +1,73 @@
> +#!/usr/bin/env python
> +# Copyright 2018 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +"""Simple utility which concatenates a set of files into a single output file
> +while also stripping any goog.provide or goog.require lines. This allows us to
> +provide a very primitive sort of "compilation" without any extra toolchain
> +support and without having to modify otherwise compilable sources in the tree
> +which use these directives.
> +
> +goog.provide lines are replaced with an equivalent invocation of
> +mojo.internal.exportModule, which accomplishes essentially the same thing in an
> +uncompiled context. A singular exception is made for the 'mojo.internal' export,
> +which is instead replaced with an inlined assignment to initialize the
> +namespace.
> +"""
> +
> +from __future__ import print_function
> +
> +import optparse
> +import re
> +
> +
> +_MOJO_INTERNAL_MODULE_NAME = "mojo.internal"
> +_MOJO_EXPORT_MODULE_SYMBOL = "mojo.internal.exportModule"
> +
> +
> +def FilterLine(filename, line, output):
> +  if line.startswith("goog.require"):
> +    return
> +
> +  if line.startswith("goog.provide"):
> +    match = re.match("goog.provide\('([^']+)'\);", line)
> +    if not match:
> +      print("Invalid goog.provide line in %s:\n%s" % (filename, line))
> +      exit(1)
> +
> +    module_name = match.group(1)
> +    if module_name == _MOJO_INTERNAL_MODULE_NAME:
> +      output.write("self.mojo = { internal: {} };")
> +    else:
> +      output.write("%s('%s');\n" % (_MOJO_EXPORT_MODULE_SYMBOL, module_name))
> +    return
> +
> +  output.write(line)
> +
> +def ConcatenateAndReplaceExports(filenames):
> +  if (len(filenames) < 2):
> +    print("At least two filenames (one input and the output) are required.")
> +    return False
> +
> +  try:
> +    with open(filenames[-1], "w") as target:
> +      for filename in filenames[:-1]:
> +        with open(filename, "r") as current:
> +          for line in current.readlines():
> +            FilterLine(filename, line, target)
> +    return True
> +  except IOError as e:
> +    print("Error generating %s\n: %s" % (filenames[-1], e))
> +    return False
> +
> +def main():
> +  parser = optparse.OptionParser()
> +  parser.set_usage("""file1 [file2...] outfile
> +    Concatenate several files into one, stripping Closure provide and
> +    require directives along the way.""")
> +  (_, args) = parser.parse_args()
> +  exit(0 if ConcatenateAndReplaceExports(args) else 1)
> +
> +if __name__ == "__main__":
> +  main()
> diff --git a/utils/ipc/mojo/public/tools/bindings/format_typemap_generator_args.py b/utils/ipc/mojo/public/tools/bindings/format_typemap_generator_args.py
> new file mode 100755
> index 00000000..7ac4af5f
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/bindings/format_typemap_generator_args.py
> @@ -0,0 +1,36 @@
> +#!/usr/bin/env python
> +# Copyright 2016 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +from __future__ import print_function
> +
> +import sys
> +
> +# This utility converts mojom dependencies into their corresponding typemap
> +# paths and formats them to be consumed by generate_type_mappings.py.
> +
> +
> +def FormatTypemap(typemap_filename):
> +  # A simple typemap is valid Python with a minor alteration.
> +  with open(typemap_filename) as f:
> +    typemap_content = f.read().replace('=\n', '=')
> +  typemap = {}
> +  exec typemap_content in typemap
> +
> +  for header in typemap.get('public_headers', []):
> +    yield 'public_headers=%s' % header
> +  for header in typemap.get('traits_headers', []):
> +    yield 'traits_headers=%s' % header
> +  for header in typemap.get('type_mappings', []):
> +    yield 'type_mappings=%s' % header
> +
> +
> +def main():
> +  typemaps = sys.argv[1:]
> +  print(' '.join('--start-typemap %s' % ' '.join(FormatTypemap(typemap))
> +                 for typemap in typemaps))
> +
> +
> +if __name__ == '__main__':
> +  sys.exit(main())
> diff --git a/utils/ipc/mojo/public/tools/bindings/gen_data_files_list.py b/utils/ipc/mojo/public/tools/bindings/gen_data_files_list.py
> new file mode 100644
> index 00000000..79c9e50e
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/bindings/gen_data_files_list.py
> @@ -0,0 +1,52 @@
> +# Copyright 2017 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +"""Generates a list of all files in a directory.
> +
> +This script takes in a directory and an output file name as input.
> +It then reads the directory and creates a list of all file names
> +in that directory.  The list is written to the output file.
> +There is also an option to pass in '-p' or '--pattern'
> +which will check each file name against a regular expression
> +pattern that is passed in.  Only files which match the regex
> +will be written to the list.
> +"""
> +
> +from __future__ import print_function
> +
> +import os
> +import re
> +import sys
> +
> +from cStringIO import StringIO
> +from optparse import OptionParser
> +
> +sys.path.insert(
> +    0,
> +    os.path.join(
> +        os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "mojom"))
> +
> +from mojom.generate.generator import WriteFile
> +
> +
> +def main():
> +  parser = OptionParser()
> +  parser.add_option('-d', '--directory', help='Read files from DIRECTORY')
> +  parser.add_option('-o', '--output', help='Write list to FILE')
> +  parser.add_option('-p',
> +                    '--pattern',
> +                    help='Only reads files that name matches PATTERN',
> +                    default=".")
> +  (options, _) = parser.parse_args()
> +  pattern = re.compile(options.pattern)
> +  files = [f for f in os.listdir(options.directory) if pattern.match(f)]
> +
> +  stream = StringIO()
> +  for f in files:
> +    print(f, file=stream)
> +
> +  WriteFile(stream.getvalue(), options.output)
> +  stream.close()
> +
> +if __name__ == '__main__':
> +  sys.exit(main())
> diff --git a/utils/ipc/mojo/public/tools/bindings/generate_type_mappings.py b/utils/ipc/mojo/public/tools/bindings/generate_type_mappings.py
> new file mode 100755
> index 00000000..64ca048f
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/bindings/generate_type_mappings.py
> @@ -0,0 +1,187 @@
> +#!/usr/bin/env python
> +# Copyright 2016 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +"""Generates a JSON typemap from its command-line arguments and dependencies.
> +
> +Each typemap should be specified in an command-line argument of the form
> +key=value, with an argument of "--start-typemap" preceding each typemap.
> +
> +For example,
> +generate_type_mappings.py --output=foo.typemap --start-typemap \\
> +    public_headers=foo.h traits_headers=foo_traits.h \\
> +    type_mappings=mojom.Foo=FooImpl
> +
> +generates a foo.typemap containing
> +{
> +  "c++": {
> +    "mojom.Foo": {
> +      "typename": "FooImpl",
> +      "traits_headers": [
> +        "foo_traits.h"
> +      ],
> +      "public_headers": [
> +        "foo.h"
> +      ]
> +    }
> +  }
> +}
> +
> +Then,
> +generate_type_mappings.py --dependency foo.typemap --output=bar.typemap \\
> +    --start-typemap public_headers=bar.h traits_headers=bar_traits.h \\
> +    type_mappings=mojom.Bar=BarImpl
> +
> +generates a bar.typemap containing
> +{
> +  "c++": {
> +    "mojom.Bar": {
> +      "typename": "BarImpl",
> +      "traits_headers": [
> +        "bar_traits.h"
> +      ],
> +      "public_headers": [
> +        "bar.h"
> +      ]
> +    },
> +    "mojom.Foo": {
> +      "typename": "FooImpl",
> +      "traits_headers": [
> +        "foo_traits.h"
> +      ],
> +      "public_headers": [
> +        "foo.h"
> +      ]
> +    }
> +  }
> +}
> +"""
> +
> +import argparse
> +import json
> +import os
> +import re
> +import sys
> +
> +sys.path.insert(
> +    0,
> +    os.path.join(
> +        os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "mojom"))
> +
> +from mojom.generate.generator import WriteFile
> +
> +def ReadTypemap(path):
> +  with open(path) as f:
> +    return json.load(f)['c++']
> +
> +
> +def ParseTypemapArgs(args):
> +  typemaps = [s for s in '\n'.join(args).split('--start-typemap\n') if s]
> +  result = {}
> +  for typemap in typemaps:
> +    result.update(ParseTypemap(typemap))
> +  return result
> +
> +
> +def LoadCppTypemapConfig(path):
> +  configs = {}
> +  with open(path) as f:
> +    for config in json.load(f):
> +      for entry in config['types']:
> +        configs[entry['mojom']] = {
> +            'typename': entry['cpp'],
> +            'public_headers': config.get('traits_headers', []),
> +            'traits_headers': config.get('traits_private_headers', []),
> +            'copyable_pass_by_value': entry.get('copyable_pass_by_value',
> +                                                False),
> +            'force_serialize': entry.get('force_serialize', False),
> +            'hashable': entry.get('hashable', False),
> +            'move_only': entry.get('move_only', False),
> +            'nullable_is_same_type': entry.get('nullable_is_same_type', False),
> +            'non_copyable_non_movable': False,
> +        }
> +  return configs
> +
> +
> +def ParseTypemap(typemap):
> +  values = {'type_mappings': [], 'public_headers': [], 'traits_headers': []}
> +  for line in typemap.split('\n'):
> +    if not line:
> +      continue
> +    key, _, value = line.partition('=')
> +    values[key].append(value.lstrip('/'))
> +  result = {}
> +  mapping_pattern = \
> +      re.compile(r"""^([^=]+)           # mojom type
> +                     =
> +                     ([^[]+)            # native type
> +                     (?:\[([^]]+)\])?$  # optional attribute in square brackets
> +                 """, re.X)
> +  for typename in values['type_mappings']:
> +    match_result = mapping_pattern.match(typename)
> +    assert match_result, (
> +        "Cannot parse entry in the \"type_mappings\" section: %s" % typename)
> +
> +    mojom_type = match_result.group(1)
> +    native_type = match_result.group(2)
> +    attributes = []
> +    if match_result.group(3):
> +      attributes = match_result.group(3).split(',')
> +
> +    assert mojom_type not in result, (
> +        "Cannot map multiple native types (%s, %s) to the same mojom type: %s" %
> +        (result[mojom_type]['typename'], native_type, mojom_type))
> +
> +    result[mojom_type] = {
> +        'public_headers': values['public_headers'],
> +        'traits_headers': values['traits_headers'],
> +        'typename': native_type,
> +
> +        # Attributes supported for individual mappings.
> +        'copyable_pass_by_value': 'copyable_pass_by_value' in attributes,
> +        'force_serialize': 'force_serialize' in attributes,
> +        'hashable': 'hashable' in attributes,
> +        'move_only': 'move_only' in attributes,
> +        'non_copyable_non_movable': 'non_copyable_non_movable' in attributes,
> +        'nullable_is_same_type': 'nullable_is_same_type' in attributes,
> +    }
> +  return result
> +
> +
> +def main():
> +  parser = argparse.ArgumentParser(
> +      description=__doc__,
> +      formatter_class=argparse.RawDescriptionHelpFormatter)
> +  parser.add_argument(
> +      '--dependency',
> +      type=str,
> +      action='append',
> +      default=[],
> +      help=('A path to another JSON typemap to merge into the output. '
> +            'This may be repeated to merge multiple typemaps.'))
> +  parser.add_argument(
> +      '--cpp-typemap-config',
> +      type=str,
> +      action='store',
> +      dest='cpp_config_path',
> +      help=('A path to a single JSON-formatted typemap config as emitted by'
> +            'GN when processing a mojom_cpp_typemap build rule.'))
> +  parser.add_argument('--output',
> +                      type=str,
> +                      required=True,
> +                      help='The path to which to write the generated JSON.')
> +  params, typemap_params = parser.parse_known_args()
> +  typemaps = ParseTypemapArgs(typemap_params)
> +  if params.cpp_config_path:
> +    typemaps.update(LoadCppTypemapConfig(params.cpp_config_path))
> +  missing = [path for path in params.dependency if not os.path.exists(path)]
> +  if missing:
> +    raise IOError('Missing dependencies: %s' % ', '.join(missing))
> +  for path in params.dependency:
> +    typemaps.update(ReadTypemap(path))
> +
> +  WriteFile(json.dumps({'c++': typemaps}, indent=2), params.output)
> +
> +
> +if __name__ == '__main__':
> +  main()
> diff --git a/utils/ipc/mojo/public/tools/bindings/mojom.gni b/utils/ipc/mojo/public/tools/bindings/mojom.gni
> new file mode 100644
> index 00000000..a739fa6e
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/bindings/mojom.gni
> @@ -0,0 +1,1941 @@
> +# Copyright 2014 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +import("//build/config/jumbo.gni")
> +import("//third_party/closure_compiler/closure_args.gni")
> +import("//third_party/closure_compiler/compile_js.gni")
> +import("//third_party/protobuf/proto_library.gni")
> +import("//ui/webui/webui_features.gni")
> +
> +# TODO(rockot): Maybe we can factor these dependencies out of //mojo. They're
> +# used to conditionally enable message ID scrambling in a way which is
> +# consistent across toolchains and which is affected by branded vs non-branded
> +# Chrome builds. Ideally we could create some generic knobs here that could be
> +# flipped elsewhere though.
> +import("//build/config/chrome_build.gni")
> +import("//build/config/chromecast_build.gni")
> +import("//build/config/chromeos/ui_mode.gni")
> +import("//build/config/nacl/config.gni")
> +import("//build/toolchain/kythe.gni")
> +import("//components/nacl/features.gni")
> +import("//third_party/jinja2/jinja2.gni")
> +import("//tools/ipc_fuzzer/ipc_fuzzer.gni")
> +declare_args() {
> +  # Indicates whether typemapping should be supported in this build
> +  # configuration. This may be disabled when building external projects which
> +  # depend on //mojo but which do not need/want all of the Chromium tree
> +  # dependencies that come with typemapping.
> +  #
> +  # Note that (perhaps obviously) a huge amount of Chromium code will not build
> +  # with typemapping disabled, so it is never valid to set this to |false| in
> +  # any Chromium build configuration.
> +  enable_mojom_typemapping = true
> +
> +  # Controls message ID scrambling behavior. If |true|, message IDs are
> +  # scrambled (i.e. randomized based on the contents of //chrome/VERSION) on
> +  # non-Chrome OS desktop platforms. Set to |false| to disable message ID
> +  # scrambling on all platforms.
> +  enable_mojom_message_id_scrambling = true
> +
> +  # Enables Closure compilation of generated JS lite bindings. In environments
> +  # where compilation is supported, any mojom target "foo" will also have a
> +  # corresponding "foo_js_library_for_compile" target generated.
> +  enable_mojom_closure_compile = enable_js_type_check && optimize_webui
> +
> +  # Enables generating Typescript bindings and compiling them to JS bindings.
> +  enable_typescript_bindings = false
> +
> +  # Enables generating javascript fuzzing-related code and the bindings for the
> +  # MojoLPM fuzzer targets. Off by default.
> +  enable_mojom_fuzzer = false
> +}
> +
> +# NOTE: We would like to avoid scrambling message IDs where it doesn't add
> +# value, so we limit the behavior to desktop builds for now. There is some
> +# redundancy in the conditions here, but it is tolerated for clarity:
> +# We're explicit about Mac, Windows, and Linux desktop support, but it's
> +# also necessary to ensure that bindings in alternate toolchains (e.g.
> +# NaCl IRT) are always consistent with the default toolchain; for that
> +# reason we always enable scrambling within NaCl toolchains when possible,
> +# as well as within the default toolchain when NaCl is enabled.
> +#
> +# Finally, because we *cannot* enable scrambling on Chrome OS (it would break
> +# ARC) we have to explicitly opt out there even when NaCl is enabled (and
> +# consequently also when building for NaCl toolchains.) For this reason we
> +# check |target_os| explicitly, as it's consistent across all toolchains.
> +enable_scrambled_message_ids =
> +    enable_mojom_message_id_scrambling &&
> +    (is_mac || is_win || (is_linux && !is_chromeos && !is_chromecast &&
> +                          !chromeos_is_browser_only) ||
> +     ((enable_nacl || is_nacl || is_nacl_nonsfi) &&
> +      (target_os != "chromeos" && !chromeos_is_browser_only)))
> +
> +_mojom_tools_root = "//mojo/public/tools"
> +_mojom_library_root = "$_mojom_tools_root/mojom/mojom"
> +mojom_parser_script = "$_mojom_tools_root/mojom/mojom_parser.py"
> +mojom_parser_sources = [
> +  "$_mojom_library_root/__init__.py",
> +  "$_mojom_library_root/error.py",
> +  "$_mojom_library_root/generate/__init__.py",
> +  "$_mojom_library_root/generate/constant_resolver.py",
> +  "$_mojom_library_root/generate/generator.py",
> +  "$_mojom_library_root/generate/module.py",
> +  "$_mojom_library_root/generate/pack.py",
> +  "$_mojom_library_root/generate/template_expander.py",
> +  "$_mojom_library_root/generate/translate.py",
> +  "$_mojom_library_root/parse/__init__.py",
> +  "$_mojom_library_root/parse/ast.py",
> +  "$_mojom_library_root/parse/lexer.py",
> +  "$_mojom_library_root/parse/parser.py",
> +]
> +
> +mojom_generator_root = "$_mojom_tools_root/bindings"
> +mojom_generator_script = "$mojom_generator_root/mojom_bindings_generator.py"
> +mojom_generator_sources =
> +    mojom_parser_sources + [
> +      "$mojom_generator_root/generators/mojom_cpp_generator.py",
> +      "$mojom_generator_root/generators/mojom_java_generator.py",
> +      "$mojom_generator_root/generators/mojom_mojolpm_generator.py",
> +      "$mojom_generator_root/generators/mojom_js_generator.py",
> +      "$mojom_generator_root/generators/mojom_ts_generator.py",
> +      "$mojom_generator_script",
> +    ]
> +
> +if (enable_scrambled_message_ids) {
> +  declare_args() {
> +    # The path to a file whose contents can be used as the basis for a message
> +    # ID scrambling salt.
> +    mojom_message_id_salt_path = "//chrome/VERSION"
> +
> +    # The path to a file whose contents will be concatenated to the contents of
> +    # the file at |mojom_message_id_salt_path| to form a complete salt for
> +    # message ID scrambling. May be the empty string, in which case the contents
> +    # of the above file alone are used as the complete salt.
> +    if (is_chrome_branded) {
> +      mojom_message_id_salt_suffix_path =
> +          "//mojo/internal/chrome-message-id-salt-suffix"
> +    } else {
> +      mojom_message_id_salt_suffix_path = ""
> +    }
> +  }
> +
> +  assert(mojom_message_id_salt_path != "")
> +  message_scrambling_args = [
> +    "--scrambled_message_id_salt_path",
> +    rebase_path(mojom_message_id_salt_path, root_build_dir),
> +  ]
> +  message_scrambling_inputs = [ mojom_message_id_salt_path ]
> +
> +  if (mojom_message_id_salt_suffix_path != "") {
> +    message_scrambling_args += [
> +      "--scrambled_message_id_salt_path",
> +      rebase_path(mojom_message_id_salt_suffix_path, root_build_dir),
> +    ]
> +    message_scrambling_inputs += [ mojom_message_id_salt_suffix_path ]
> +  }
> +} else {
> +  message_scrambling_args = []
> +  message_scrambling_inputs = []
> +}
> +
> +if (enable_mojom_typemapping) {
> +  _bindings_configuration_files =
> +      [ "//mojo/public/tools/bindings/chromium_bindings_configuration.gni" ]
> +  _bindings_configurations = []
> +  foreach(config_file, _bindings_configuration_files) {
> +    _bindings_configurations += [ read_file(config_file, "scope") ]
> +  }
> +  foreach(configuration, _bindings_configurations) {
> +    # Check that the mojom field of each typemap refers to a mojom that exists.
> +    foreach(typemap, configuration.typemaps) {
> +      _typemap_config = {
> +      }
> +      _typemap_config = typemap.config
> +      read_file(_typemap_config.mojom, "")
> +    }
> +  }
> +} else {
> +  _bindings_configuration_files = []
> +  _bindings_configurations = [
> +    {
> +      typemaps = []
> +      component_macro_suffix = ""
> +    },
> +  ]
> +}
> +
> +if (!is_ios) {
> +  _bindings_configurations += [
> +    {
> +      variant = "blink"
> +      component_macro_suffix = "_BLINK"
> +      for_blink = true
> +      typemaps = []
> +    },
> +  ]
> +}
> +
> +# Generates targets for building C++, JavaScript and Java bindings from mojom
> +# files. The output files will go under the generated file directory tree with
> +# the same path as each input file.
> +#
> +# Other targets should depend on one of these generated targets (where "foo"
> +# is the target name):
> +#
> +#   foo
> +#       C++ bindings.
> +#
> +#   foo_blink
> +#       C++ bindings using Blink standard types.
> +#
> +#   foo_java
> +#       Java bindings.
> +#
> +#   foo_js
> +#       JavaScript bindings; used as compile-time dependency.
> +#
> +#   foo_js_data_deps
> +#       JavaScript bindings; used as run-time dependency.
> +#
> +# Parameters:
> +#
> +#   sources (optional if one of the deps sets listed below is present)
> +#       List of source .mojom files to compile.
> +#
> +#   deps (optional)
> +#       Note: this can contain only other mojom targets.
> +#
> +#       DEPRECATED: This is synonymous with public_deps because all mojom
> +#       dependencies must be public by design. Please use public_deps.
> +#
> +#   public_deps (optional)
> +#       Note: this can contain only other mojom targets.
> +#
> +#   parser_deps (optional)
> +#       List of non-mojom targets required for the mojom sources to be parsed.
> +#
> +#   import_dirs (optional)
> +#       List of import directories that will get added when processing sources.
> +#
> +#   testonly (optional)
> +#
> +#   visibility (optional)
> +#
> +#   visibility_blink (optional)
> +#       The value to use for visibility for the blink variant. If unset,
> +#       |visibility| is used.
> +#
> +#   cpp_only (optional)
> +#       If set to true, only the C++ bindings targets will be generated.
> +#
> +#       NOTE: If the global |enable_mojom_fuzzer| build arg is true, JS bindings
> +#       will still be generated even when |cpp_only| is set to |true|, unless
> +#       you also set |enable_fuzzing| to |false| in your mojom target.
> +#
> +#   cpp_typemaps (optional)
> +#       A list of typemaps to be applied to the generated C++ bindings for this
> +#       mojom target. Note that this only applies to the non-Blink variant of
> +#       generated C++ bindings.
> +#
> +#       Every typemap is a GN scope describing how one or more mojom types maps
> +#       to a non-mojom C++ type, including necessary deps and headers required
> +#       for the mapping to work. See the Typemaps section below.
> +#
> +#   blink_cpp_typemaps (optional)
> +#       Same as above, but for the Blink variant of generated C++ bindings.
> +#
> +#   generate_java (optional)
> +#       If set to true, Java bindings are generated for Android builds. If
> +#       |cpp_only| is set to true, it overrides this to prevent generation of
> +#       Java bindings.
> +#
> +#   enable_fuzzing (optional)
> +#       Enables generation of fuzzing sources for the target if the global build
> +#       arg |enable_mojom_fuzzer| is also set to |true|. Defaults to |true|. If
> +#       fuzzing generation is enabled for a target, the target will always
> +#       generate JS bindings even if |cpp_only| is set to |true|. See note
> +#       above.
> +#
> +#   support_lazy_serialization (optional)
> +#       If set to |true|, generated C++ bindings will effectively prefer to
> +#       transmit messages in an unserialized form when going between endpoints
> +#       in the same process. This avoids the runtime cost of serialization,
> +#       deserialization, and validation logic at the expensive of increased
> +#       code size. Defaults to |false|.
> +#
> +#   disable_variants (optional)
> +#       If |true|, no variant sources will be generated for the target. Defaults
> +#       to |false|.
> +#
> +#   disallow_native_types (optional)
> +#       If set to |true|, mojoms in this target may not apply the [Native]
> +#       attribute to struct or enum declarations. This avoids emitting code
> +#       which depends on legacy IPC serialization. Default is |false|, meaning
> +#       [Native] types are allowed.
> +#
> +#   disallow_interfaces (optional)
> +#       If set to |true|, mojoms in this target may not define interfaces.
> +#       Generates bindings with a smaller set of dependencies. Defaults to
> +#       |false|.
> +#
> +#   scramble_message_ids (optional)
> +#       If set to |true| (the default), generated mojom interfaces will use
> +#       scrambled ordinal identifiers in encoded messages.
> +#
> +#   component_output_prefix (optional)
> +#       The prefix to use for the output_name of any component library emitted
> +#       for generated C++ bindings. If this is omitted, C++ bindings targets are
> +#       emitted as source_sets instead. Because this controls the name of the
> +#       output shared library binary in the root output directory, it must be
> +#       unique across the entire build configuration.
> +#
> +#       This is required if |component_macro_prefix| is specified.
> +#
> +#   component_macro_prefix (optional)
> +#       This specifies a macro prefix to use for component export macros and
> +#       should therefore be globally unique in the project. For example if this
> +#       is "FOO_BAR", then the generated C++ sources will be built with
> +#       IS_FOO_BAR_{suffix}_IMPL defined, and the generated public headers will
> +#       annotate public symbol definitions with
> +#       COMPONENT_EXPORT(FOO_BAR_{suffix}). "suffix" in this case depends on
> +#       which internal subtarget is generating the code (e.g. "SHARED", or a
> +#       variant name like "BLINK").
> +#
> +#   enabled_features (optional)
> +#       Definitions in a mojom file can be guarded by an EnableIf attribute. If
> +#       the value specified by the attribute does not match any items in the
> +#       list of enabled_features, the definition will be disabled, with no code
> +#       emitted for it.
> +#
> +#   generate_closure_exports (optional)
> +#       Generates JS lite bindings will use goog.provide and goog.require
> +#       annotations to export its symbols and import core Mojo bindings support
> +#       and other mojom dependency modules. Use this if you plan to compile your
> +#       bindings into a larger JS binary. Defaults to |false|, instead
> +#       generating JS lite bindings which assume they will be manually loaded in
> +#       correct dependency order. Note that this only has an effect if
> +#       the |enable_mojom_closure_compile| global arg is set to |true| as well.
> +#
> +#   use_typescript_sources (optional)
> +#       Uses the Typescript generator to generate JavaScript bindings.
> +#
> +#   js_generate_struct_deserializers (optional)
> +#       Generates JS deerialize methods for structs.
> +#
> +#   extra_cpp_template_paths (optional)
> +#       List of extra C++ templates that are used to generate additional source
> +#       and/or header files. The templates should end with extension ".tmpl".
> +#
> +# The following parameters are used to support the component build. They are
> +# needed so that bindings which are linked with a component can use the same
> +# export settings for classes. The first three are for the chromium variant, and
> +# the last three are for the blink variant. These parameters can also override
> +# |component_macro_prefix| for a specific variant, allowing e.g. one variant
> +# to be linked into a larger non-mojom component target, while all other
> +# variants get their own unique component target.
> +#   export_class_attribute (optional)
> +#       The attribute to add to the class declaration. e.g. "CONTENT_EXPORT"
> +#   export_define (optional)
> +#       A define to be added to the source_set which is needed by the export
> +#       header. e.g. "CONTENT_IMPLEMENTATION=1"
> +#   export_header (optional)
> +#       A header to be added to the generated bindings to support the component
> +#       build. e.g. "content/common/content_export.h"
> +#   export_class_attribute_blink (optional)
> +#   export_define_blink (optional)
> +#   export_header_blink (optional)
> +#       These three parameters are the blink variants of the previous 3.
> +#
> +# The following parameters are used to correct component build dependencies.
> +# They are needed so mojom-mojom dependencies follow the rule that dependencies
> +# on a source set in another component are replaced by a dependency on the
> +# containing component. The first two are for the chromium variant; the other
> +# two are for the blink variant.
> +#   overridden_deps (optional)
> +#       The list of mojom deps to be overridden.
> +#   component_deps (optional)
> +#       The list of component deps to add to replace overridden_deps.
> +#   overridden_deps_blink (optional)
> +#   component_deps_blink (optional)
> +#       These two parameters are the blink variants of the previous two.
> +#
> +#   check_includes_blink (optional)
> +#       Overrides the check_includes variable for the blink variant.
> +#       If check_includes_blink is not defined, the check_includes variable
> +#       retains its original value.
> +#
> +# Typemaps
> +# ========
> +# The cpp_typemaps and blink_cpp_typemaps each specify an optional list of
> +# typemapping configurations. Each configuration is a GN scope with metadata
> +# describing what and how to map.
> +#
> +# Typemap scope parameters:
> +#   types
> +#       A list of type specifications for this typemap. Each type specification
> +#       is a nested GN scope which can be expressed with the following syntax:
> +#
> +#           {
> +#             mojom = "foo.mojom.Bar"
> +#             cpp = "::foo::LegitBar"
> +#             move_only = true
> +#             # etc...
> +#           }
> +#
> +#       Each type specification supports the following values:
> +#
> +#         mojom (required)
> +#             The fully qualified name of a mojom type to be mapped. This is a
> +#             string like "foo.mojom.Bar".
> +#
> +#         cpp (required)
> +#             The fully qualified name of the C++ type to which the mojom type
> +#             should be mapped in generated bindings. This is a string like
> +#             "::base::Value" or "std::vector<::base::Value>".
> +#
> +#         move_only (optional)
> +#             A boolean value (default false) which indicates whether the C++
> +#             type is move-only. If true, generated bindings will pass the type
> +#             by value and use std::move() at call sites.
> +#
> +#         copyable_pass_by_value (optional)
> +#             A boolean value (default false) which effectively indicates
> +#             whether the C++ type is very cheap to copy. If so, generated
> +#             bindings will pass by value but not use std::move() at call sites.
> +#
> +#         nullable_is_same_type (optional)
> +#             A boolean value (default false) which indicates that the C++ type
> +#             has some baked-in semantic notion of a "null" state. If true, the
> +#             traits for the type must define IsNull and SetToNull methods.
> +#
> +#             When false, nullable fields are represented by wrapping the C++
> +#             type with base::Optional, and null values are simply
> +#             base::nullopt.
> +#
> +#         hashable (optional)
> +#             A boolean value (default false) indicating whether the C++ type is
> +#             hashable. Set to true if true AND needed (i.e. you need to use the
> +#             type as the key of a mojom map).
> +#
> +#         force_serialize (optional)
> +#             A boolean value (default false) which disables lazy serialization
> +#             of the typemapped type if lazy serialization is enabled for the
> +#             mojom target applying this typemap.
> +#
> +# Additional typemap scope parameters:
> +#
> +#   traits_headers (optional)
> +#       Headers which must be included in the generated mojom in order for
> +#       serialization to be possible. This generally means including at least
> +#       the header for the corresponding mojom traits definitions.
> +#
> +#   traits_private_headers (optional)
> +#       Headers which must be included in generated C++ serialization code for
> +#       a mojom using the typemap. This should be used only when including a
> +#       header in |traits_headers| is problematic for compilation, as is
> +#       sometimes the case with legacy IPC message headers.
> +#
> +#   traits_sources (optional)
> +#       The references to the source files (typically a single .cc and .h file)
> +#       defining an appropriate set of EnumTraits or StructTraits, etc for the
> +#       the type-mapping. Using this will cause the listed sources to be
> +#       integrated directly into the dependent mojom's generated type-mapping
> +#       targets.
> +#
> +#       Prefer using |traits_public_deps| over inlined |traits_sources|, as this
> +#       will generally lead to easier build maintenance over time.
> +#
> +#       NOTE: If a typemap is shared by Blink and non-Blink bindings, you cannot
> +#       use this and MUST use |traits_public_deps| to reference traits built
> +#       within a separate target.
> +#
> +#   traits_deps / traits_public_deps (optional)
> +#       Any dependencies of sources in |traits_headers| or |traits_sources| must
> +#       be listed here.
> +#
> +template("mojom") {
> +  assert(
> +      defined(invoker.sources) || defined(invoker.deps) ||
> +          defined(invoker.public_deps),
> +      "\"sources\" or \"deps\" must be defined for the $target_name template.")
> +
> +  if (defined(invoker.export_class_attribute) ||
> +      defined(invoker.export_define) || defined(invoker.export_header)) {
> +    assert(defined(invoker.export_class_attribute))
> +    assert(defined(invoker.export_define))
> +    assert(defined(invoker.export_header))
> +  }
> +  if (defined(invoker.export_class_attribute_blink) ||
> +      defined(invoker.export_define_blink) ||
> +      defined(invoker.export_header_blink)) {
> +    assert(defined(invoker.export_class_attribute_blink))
> +    assert(defined(invoker.export_define_blink))
> +    assert(defined(invoker.export_header_blink))
> +
> +    # Not all platforms use the Blink variant, so make sure GN doesn't complain
> +    # about these values being inconsequential.
> +    not_needed(invoker,
> +               [
> +                 "export_class_attribute_blink",
> +                 "export_define_blink",
> +                 "export_header_blink",
> +               ])
> +  }
> +  if (defined(invoker.overridden_deps) || defined(invoker.component_deps)) {
> +    assert(defined(invoker.overridden_deps))
> +    assert(defined(invoker.component_deps))
> +  }
> +
> +  if (defined(invoker.overridden_deps_blink) ||
> +      defined(invoker.component_deps_blink)) {
> +    assert(defined(invoker.overridden_deps_blink))
> +    assert(defined(invoker.component_deps_blink))
> +  }
> +
> +  # Type-mapping may be disabled or we may not generate C++ bindings.
> +  not_needed(invoker,
> +             [
> +               "cpp_typemaps",
> +               "blink_cpp_typemaps",
> +             ])
> +
> +  require_full_cpp_deps =
> +      !defined(invoker.disallow_native_types) ||
> +      !invoker.disallow_native_types || !defined(invoker.disallow_interfaces) ||
> +      !invoker.disallow_interfaces
> +
> +  all_deps = []
> +  if (defined(invoker.deps)) {
> +    all_deps += invoker.deps
> +  }
> +  if (defined(invoker.public_deps)) {
> +    all_deps += invoker.public_deps
> +  }
> +
> +  if (defined(invoker.component_macro_prefix)) {
> +    assert(defined(invoker.component_output_prefix))
> +  }
> +
> +  group("${target_name}__is_mojom") {
> +  }
> +
> +  # Explicitly ensure that all dependencies (invoker.deps and
> +  # invoker.public_deps) are mojom targets.
> +  group("${target_name}__check_deps_are_all_mojom") {
> +    deps = []
> +    foreach(d, all_deps) {
> +      name = get_label_info(d, "label_no_toolchain")
> +      toolchain = get_label_info(d, "toolchain")
> +      deps += [ "${name}__is_mojom(${toolchain})" ]
> +    }
> +  }
> +
> +  sources_list = []
> +  if (defined(invoker.sources)) {
> +    sources_list = invoker.sources
> +  }
> +
> +  # Reset sources_assignment_filter for the BUILD.gn file to prevent
> +  # regression during the migration of Chromium away from the feature.
> +  # See docs/no_sources_assignment_filter.md for more information.
> +  # TODO(crbug.com/1018739): remove this when migration is done.
> +  set_sources_assignment_filter([])
> +
> +  # Listed sources may be relative to the current target dir, or they may be
> +  # absolute paths, including paths to generated mojom files. While those are
> +  # fine as-is for input references, deriving output paths can be more subtle.
> +  #
> +  # Here we rewrite all source paths to be relative to the root build dir and
> +  # strip any root_gen_dir prefixes.
> +  #
> +  # So for a target in //foo/bar with:
> +  #
> +  #     sources = [
> +  #       "a.mojom",
> +  #       "b/c.mojom",
> +  #       "//baz/d.mojom",
> +  #       "$target_gen_dir/e.mojom",
> +  #     ]
> +  #
> +  # output_file_base_paths will be:
> +  #
> +  #     [
> +  #       "foo/bar/a.mojom",
> +  #       "foo/bar/b/c.mojom",
> +  #       "baz/d.mojom",
> +  #       "foo/bar/e.mojom",
> +  #     ]
> +  #
> +  # This result is essentially a list of base filename paths which are suitable
> +  # for the naming of any generated output files derived from their
> +  # corresponding input mojoms. These paths are always considered to be relative
> +  # to root_gen_dir.
> +  source_abspaths = rebase_path(sources_list, "//")
> +  output_file_base_paths = []
> +  foreach(path, source_abspaths) {
> +    output_file_base_paths +=
> +        [ string_replace(path, rebase_path(root_gen_dir, "//") + "/", "") ]
> +  }
> +
> +  # Sanity check that either all input files have a .mojom extension, or
> +  # all input files have a .test-mojom extension AND |testonly| is |true|.
> +  sources_list_filenames =
> +      process_file_template(sources_list, "{{source_file_part}}")
> +  sources_list_filenames_with_mojom_extension =
> +      process_file_template(sources_list, "{{source_name_part}}.mojom")
> +  if (sources_list_filenames != sources_list_filenames_with_mojom_extension) {
> +    sources_list_filenames_with_test_mojom_extension =
> +        process_file_template(sources_list, "{{source_name_part}}.test-mojom")
> +    if (sources_list_filenames ==
> +        sources_list_filenames_with_test_mojom_extension) {
> +      assert(
> +          defined(invoker.testonly) && invoker.testonly,
> +          "mojom targets for .test-mojom files must set |testonly| to |true|")
> +    } else {
> +      assert(
> +          false,
> +          "One or more mojom files has an invalid extension. The only " +
> +              "allowed extensions are .mojom and .test-mojom, and any given " +
> +              "mojom target must use one or the other exclusively.")
> +    }
> +  }
> +
> +  build_metadata_filename = "$target_gen_dir/$target_name.build_metadata"
> +  build_metadata = {
> +  }
> +  build_metadata.sources = rebase_path(sources_list)
> +  build_metadata.deps = []
> +  foreach(dep, all_deps) {
> +    dep_target_gen_dir = get_label_info(dep, "target_gen_dir")
> +    dep_name = get_label_info(dep, "name")
> +    build_metadata.deps +=
> +        [ rebase_path("$dep_target_gen_dir/$dep_name.build_metadata") ]
> +  }
> +  write_file(build_metadata_filename, build_metadata, "json")
> +
> +  generate_fuzzing =
> +      (!defined(invoker.enable_fuzzing) || invoker.enable_fuzzing) &&
> +      enable_mojom_fuzzer && (!defined(invoker.testonly) || !invoker.testonly)
> +
> +  parser_target_name = "${target_name}__parser"
> +  parser_deps = []
> +  foreach(dep, all_deps) {
> +    _label = get_label_info(dep, "label_no_toolchain")
> +    parser_deps += [ "${_label}__parser" ]
> +  }
> +  if (defined(invoker.parser_deps)) {
> +    parser_deps += invoker.parser_deps
> +  }
> +  if (sources_list == []) {
> +    # Even without sources we generate a parser target to at least forward
> +    # other parser dependencies.
> +    group(parser_target_name) {
> +      public_deps = parser_deps
> +    }
> +  } else {
> +    enabled_features = []
> +    if (defined(invoker.enabled_features)) {
> +      enabled_features += invoker.enabled_features
> +    }
> +    if (is_posix) {
> +      enabled_features += [ "is_posix" ]
> +    }
> +    if (is_android) {
> +      enabled_features += [ "is_android" ]
> +    } else if (is_chromeos) {
> +      enabled_features += [ "is_chromeos" ]
> +    } else if (is_fuchsia) {
> +      enabled_features += [ "is_fuchsia" ]
> +    } else if (is_ios) {
> +      enabled_features += [ "is_ios" ]
> +    } else if (is_linux) {
> +      enabled_features += [ "is_linux" ]
> +    } else if (is_mac) {
> +      enabled_features += [ "is_mac" ]
> +    } else if (is_win) {
> +      enabled_features += [ "is_win" ]
> +    }
> +
> +    action(parser_target_name) {
> +      script = mojom_parser_script
> +      inputs = mojom_parser_sources + [ build_metadata_filename ]
> +      sources = sources_list
> +      deps = parser_deps
> +      outputs = []
> +      foreach(base_path, output_file_base_paths) {
> +        filename = get_path_info(base_path, "file")
> +        dirname = get_path_info(base_path, "dir")
> +        outputs += [ "$root_gen_dir/$dirname/${filename}-module" ]
> +      }
> +
> +      filelist = []
> +      foreach(source, sources_list) {
> +        filelist += [ rebase_path(source) ]
> +      }
> +      response_file_contents = filelist
> +
> +      args = [
> +        # Resolve relative input mojom paths against both the root src dir and
> +        # the root gen dir.
> +        "--input-root",
> +        rebase_path("//"),
> +        "--input-root",
> +        rebase_path(root_gen_dir),
> +
> +        "--output-root",
> +        rebase_path(root_gen_dir),
> +
> +        "--mojom-file-list={{response_file_name}}",
> +
> +        "--check-imports",
> +        rebase_path(build_metadata_filename),
> +      ]
> +
> +      foreach(enabled_feature, enabled_features) {
> +        args += [
> +          "--enable-feature",
> +          enabled_feature,
> +        ]
> +      }
> +    }
> +  }
> +
> +  generator_cpp_message_ids_target_name = "${target_name}__generate_message_ids"
> +
> +  # Generate code that is shared by different variants.
> +  if (sources_list != []) {
> +    common_generator_args = [
> +      "--use_bundled_pylibs",
> +      "-o",
> +      rebase_path(root_gen_dir, root_build_dir),
> +      "generate",
> +      "-d",
> +      rebase_path("//", root_build_dir),
> +      "-I",
> +      rebase_path("//", root_build_dir),
> +      "--bytecode_path",
> +      rebase_path("$root_gen_dir/mojo/public/tools/bindings", root_build_dir),
> +    ]
> +
> +    if (defined(invoker.disallow_native_types) &&
> +        invoker.disallow_native_types) {
> +      common_generator_args += [ "--disallow_native_types" ]
> +    }
> +
> +    if (defined(invoker.disallow_interfaces) && invoker.disallow_interfaces) {
> +      common_generator_args += [ "--disallow_interfaces" ]
> +    }
> +
> +    if (defined(invoker.import_dirs)) {
> +      foreach(import_dir, invoker.import_dirs) {
> +        common_generator_args += [
> +          "-I",
> +          rebase_path(import_dir, root_build_dir),
> +        ]
> +      }
> +    }
> +
> +    if (defined(invoker.component_macro_prefix)) {
> +      shared_component_export_macro =
> +          "COMPONENT_EXPORT(${invoker.component_macro_prefix}_SHARED)"
> +      shared_component_impl_macro =
> +          "IS_${invoker.component_macro_prefix}_SHARED_IMPL"
> +      shared_component_output_name = "${invoker.component_output_prefix}_shared"
> +    } else if (defined(invoker.export_class_attribute_shared) ||
> +               defined(invoker.export_class_attribute)) {
> +      if (defined(invoker.export_class_attribute_shared)) {
> +        assert(defined(invoker.export_header_shared))
> +        shared_component_export_macro = invoker.export_class_attribute_shared
> +        shared_component_impl_macro = invoker.export_define_shared
> +      } else {
> +        assert(!defined(invoker.export_header_shared))
> +
> +        # If no explicit shared attribute/define was provided by the invoker,
> +        # we derive some reasonable settings frorm the default variant.
> +        shared_component_export_macro = "COMPONENT_EXPORT(MOJOM_SHARED_" +
> +                                        invoker.export_class_attribute + ")"
> +        shared_component_impl_macro =
> +            "IS_MOJOM_SHARED_" + invoker.export_class_attribute + "_IMPL"
> +      }
> +
> +      if (defined(invoker.component_output_prefix)) {
> +        shared_component_output_name =
> +            "${invoker.component_output_prefix}_shared"
> +      } else {
> +        shared_component_output_name = "${target_name}_shared"
> +      }
> +    }
> +
> +    action(generator_cpp_message_ids_target_name) {
> +      script = mojom_generator_script
> +      inputs = mojom_generator_sources + jinja2_sources
> +      sources = sources_list
> +      deps = [
> +        ":$parser_target_name",
> +        "//mojo/public/tools/bindings:precompile_templates",
> +      ]
> +      if (defined(invoker.parser_deps)) {
> +        deps += invoker.parser_deps
> +      }
> +      outputs = []
> +      args = common_generator_args
> +      filelist = []
> +      foreach(source, sources_list) {
> +        filelist += [ rebase_path("$source", root_build_dir) ]
> +      }
> +      foreach(base_path, output_file_base_paths) {
> +        outputs += [ "$root_gen_dir/$base_path-shared-message-ids.h" ]
> +      }
> +
> +      response_file_contents = filelist
> +
> +      args += [
> +        "--filelist={{response_file_name}}",
> +        "--generate_non_variant_code",
> +        "--generate_message_ids",
> +        "-g",
> +        "c++",
> +      ]
> +
> +      if (!defined(invoker.scramble_message_ids) ||
> +          invoker.scramble_message_ids) {
> +        inputs += message_scrambling_inputs
> +        args += message_scrambling_args
> +      }
> +    }
> +
> +    generator_shared_target_name = "${target_name}_shared__generator"
> +    action(generator_shared_target_name) {
> +      visibility = [ ":*" ]
> +      script = mojom_generator_script
> +      inputs = mojom_generator_sources + jinja2_sources
> +      sources = sources_list
> +      deps = [
> +        ":$parser_target_name",
> +        "//mojo/public/tools/bindings:precompile_templates",
> +      ]
> +      if (defined(invoker.parser_deps)) {
> +        deps += invoker.parser_deps
> +      }
> +
> +      outputs = []
> +      args = common_generator_args
> +      filelist = []
> +      foreach(source, sources_list) {
> +        filelist += [ rebase_path("$source", root_build_dir) ]
> +      }
> +      foreach(base_path, output_file_base_paths) {
> +        outputs += [
> +          "$root_gen_dir/$base_path-params-data.h",
> +          "$root_gen_dir/$base_path-shared-internal.h",
> +          "$root_gen_dir/$base_path-shared.cc",
> +          "$root_gen_dir/$base_path-shared.h",
> +        ]
> +      }
> +
> +      response_file_contents = filelist
> +
> +      args += [
> +        "--filelist={{response_file_name}}",
> +        "--generate_non_variant_code",
> +        "-g",
> +        "c++",
> +      ]
> +
> +      if (defined(shared_component_export_macro)) {
> +        args += [
> +          "--export_attribute",
> +          shared_component_export_macro,
> +          "--export_header",
> +          "base/component_export.h",
> +        ]
> +      }
> +
> +      # Enable adding annotations to generated C++ headers that are used for
> +      # cross-references in CodeSearch.
> +      if (enable_kythe_annotations) {
> +        args += [ "--enable_kythe_annotations" ]
> +      }
> +    }
> +  } else {
> +    group(generator_cpp_message_ids_target_name) {
> +    }
> +  }
> +
> +  shared_cpp_sources_target_name = "${target_name}_shared_cpp_sources"
> +  jumbo_source_set(shared_cpp_sources_target_name) {
> +    if (defined(invoker.testonly)) {
> +      testonly = invoker.testonly
> +    }
> +    deps = []
> +    public_deps = []
> +    if (output_file_base_paths != []) {
> +      sources = []
> +      foreach(base_path, output_file_base_paths) {
> +        sources += [
> +          "$root_gen_dir/$base_path-params-data.h",
> +          "$root_gen_dir/$base_path-shared-internal.h",
> +          "$root_gen_dir/$base_path-shared.cc",
> +          "$root_gen_dir/$base_path-shared.h",
> +        ]
> +      }
> +      public_deps += [ ":$generator_shared_target_name" ]
> +    }
> +    if (require_full_cpp_deps) {
> +      public_deps += [ "//mojo/public/cpp/bindings" ]
> +    } else {
> +      public_deps += [ "//mojo/public/cpp/bindings:bindings_base" ]
> +    }
> +    foreach(d, all_deps) {
> +      # Resolve the name, so that a target //mojo/something becomes
> +      # //mojo/something:something and we can append shared_cpp_sources_suffix
> +      # to get the cpp dependency name.
> +      full_name = get_label_info("$d", "label_no_toolchain")
> +      public_deps += [ "${full_name}_shared" ]
> +    }
> +    if (defined(shared_component_impl_macro)) {
> +      defines = [ shared_component_impl_macro ]
> +    }
> +  }
> +
> +  shared_cpp_library_target_name = "${target_name}_shared"
> +  if (defined(shared_component_output_name)) {
> +    component(shared_cpp_library_target_name) {
> +      if (defined(invoker.testonly)) {
> +        testonly = invoker.testonly
> +      }
> +      output_name = "$shared_component_output_name"
> +      public_deps = [ ":$shared_cpp_sources_target_name" ]
> +    }
> +  } else {
> +    group(shared_cpp_library_target_name) {
> +      if (defined(invoker.testonly)) {
> +        testonly = invoker.testonly
> +      }
> +      public_deps = [ ":$shared_cpp_sources_target_name" ]
> +    }
> +  }
> +
> +  if (generate_fuzzing) {
> +    # This block generates the proto files used for the MojoLPM fuzzer,
> +    # and the corresponding proto targets that will be linked in the fuzzer
> +    # targets. These are independent of the typemappings, and can be done
> +    # separately here.
> +
> +    generator_mojolpm_proto_target_name =
> +        "${target_name}_mojolpm_proto_generator"
> +    action(generator_mojolpm_proto_target_name) {
> +      script = mojom_generator_script
> +      inputs = mojom_generator_sources + jinja2_sources
> +      sources = invoker.sources
> +      deps = [
> +        ":$parser_target_name",
> +        "//mojo/public/tools/bindings:precompile_templates",
> +      ]
> +
> +      outputs = []
> +      args = common_generator_args
> +      filelist = []
> +      foreach(source, invoker.sources) {
> +        filelist += [ rebase_path("$source", root_build_dir) ]
> +        outputs += [ "$target_gen_dir/$source.mojolpm.proto" ]
> +      }
> +
> +      response_file_contents = filelist
> +
> +      args += [
> +        "--filelist={{response_file_name}}",
> +        "--generate_non_variant_code",
> +        "-g",
> +        "mojolpm",
> +      ]
> +    }
> +
> +    mojolpm_proto_target_name = "${target_name}_mojolpm_proto"
> +    if (defined(invoker.sources)) {
> +      proto_library(mojolpm_proto_target_name) {
> +        testonly = true
> +        generate_python = false
> +        sources = process_file_template(
> +                invoker.sources,
> +                [ "{{source_gen_dir}}/{{source_file_part}}.mojolpm.proto" ])
> +        import_dirs = [ "${root_gen_dir}" ]
> +        proto_in_dir = "${root_gen_dir}"
> +        proto_out_dir = "."
> +        proto_deps = [ "//mojo/public/tools/fuzzers:mojolpm_proto_copy" ]
> +        proto_deps += [ ":$generator_mojolpm_proto_target_name" ]
> +        link_deps = [ "//mojo/public/tools/fuzzers:mojolpm_proto" ]
> +
> +        foreach(d, all_deps) {
> +          # Resolve the name, so that a target //mojo/something becomes
> +          # //mojo/something:something and we can append mojolpm_proto_suffix
> +          # to get the proto dependency name.
> +          full_name = get_label_info("$d", "label_no_toolchain")
> +          proto_deps += [ "${full_name}_mojolpm_proto" ]
> +          link_deps += [ "${full_name}_mojolpm_proto" ]
> +        }
> +      }
> +    } else {
> +      group(mojolpm_proto_target_name) {
> +        testonly = true
> +        public_deps = [ "//mojo/public/tools/fuzzers:mojolpm_proto" ]
> +        if (defined(generator_shared_target_name)) {
> +          public_deps += [ ":$generator_shared_target_name" ]
> +        }
> +        foreach(d, all_deps) {
> +          # Resolve the name, so that a target //mojo/something becomes
> +          # //mojo/something:something and we can append #mojolpm_proto_suffix
> +          # to get the proto dependency name.
> +          full_name = get_label_info("$d", "label_no_toolchain")
> +          public_deps += [ "${full_name}_mojolpm_proto" ]
> +        }
> +      }
> +    }
> +  }
> +
> +  # Generate code for variants.
> +  if (!defined(invoker.disable_variants) || !invoker.disable_variants) {
> +    enabled_configurations = _bindings_configurations
> +  } else {
> +    first_config = _bindings_configurations[0]
> +    assert(!defined(first_config.variant))
> +    enabled_configurations = [ first_config ]
> +  }
> +  foreach(bindings_configuration, enabled_configurations) {
> +    cpp_only = false
> +    if (defined(invoker.cpp_only)) {
> +      cpp_only = invoker.cpp_only
> +    }
> +    variant_suffix = ""
> +    if (defined(bindings_configuration.variant)) {
> +      variant = bindings_configuration.variant
> +      variant_suffix = "_${variant}"
> +      cpp_only = true
> +    }
> +
> +    cpp_typemap_configs = []
> +    export_defines = []
> +    export_defines_overridden = false
> +    force_source_set = false
> +    if (defined(bindings_configuration.for_blink) &&
> +        bindings_configuration.for_blink) {
> +      if (defined(invoker.blink_cpp_typemaps)) {
> +        cpp_typemap_configs = invoker.blink_cpp_typemaps
> +      }
> +      if (defined(invoker.export_define_blink)) {
> +        export_defines_overridden = true
> +        export_defines = [ invoker.export_define_blink ]
> +        force_source_set = true
> +      }
> +    } else {
> +      if (defined(invoker.cpp_typemaps)) {
> +        cpp_typemap_configs = invoker.cpp_typemaps
> +      }
> +
> +      if (defined(invoker.export_define)) {
> +        export_defines_overridden = true
> +        export_defines = [ invoker.export_define ]
> +        force_source_set = true
> +      }
> +    }
> +    not_needed([ "cpp_typemap_configs" ])
> +
> +    if (!export_defines_overridden && defined(invoker.component_macro_prefix)) {
> +      output_name_override =
> +          "${invoker.component_output_prefix}${variant_suffix}"
> +      export_defines =
> +          [ "IS_${invoker.component_macro_prefix}" +
> +            "${bindings_configuration.component_macro_suffix}_IMPL" ]
> +    }
> +
> +    export_args = []
> +    export_args_overridden = false
> +    if (defined(bindings_configuration.for_blink) &&
> +        bindings_configuration.for_blink) {
> +      if (defined(invoker.export_class_attribute_blink)) {
> +        export_args_overridden = true
> +        export_args += [
> +          "--export_attribute",
> +          invoker.export_class_attribute_blink,
> +          "--export_header",
> +          invoker.export_header_blink,
> +        ]
> +      }
> +    } else if (defined(invoker.export_class_attribute)) {
> +      export_args_overridden = true
> +      export_args += [
> +        "--export_attribute",
> +        invoker.export_class_attribute,
> +        "--export_header",
> +        invoker.export_header,
> +      ]
> +    }
> +
> +    if (!export_args_overridden && defined(invoker.component_macro_prefix)) {
> +      export_args += [
> +        "--export_attribute",
> +        "COMPONENT_EXPORT(${invoker.component_macro_prefix}" +
> +            "${bindings_configuration.component_macro_suffix})",
> +        "--export_header",
> +        "base/component_export.h",
> +      ]
> +    }
> +
> +    generate_java = false
> +    if (!cpp_only && defined(invoker.generate_java)) {
> +      generate_java = invoker.generate_java
> +    }
> +    type_mappings_target_name = "${target_name}${variant_suffix}__type_mappings"
> +    type_mappings_path =
> +        "$target_gen_dir/${target_name}${variant_suffix}__type_mappings"
> +    active_typemaps = []
> +    if (sources_list != []) {
> +      generator_cpp_output_suffixes = []
> +      variant_dash_suffix = ""
> +      if (defined(variant)) {
> +        variant_dash_suffix = "-${variant}"
> +      }
> +      generator_cpp_output_suffixes += [
> +        "${variant_dash_suffix}-forward.h",
> +        "${variant_dash_suffix}-import-headers.h",
> +        "${variant_dash_suffix}-test-utils.cc",
> +        "${variant_dash_suffix}-test-utils.h",
> +        "${variant_dash_suffix}.cc",
> +        "${variant_dash_suffix}.h",
> +      ]
> +      foreach(source, sources_list) {
> +        # TODO(sammc): Use a map instead of a linear scan when GN supports maps.
> +        foreach(typemap, bindings_configuration.typemaps) {
> +          _typemap_config = {
> +          }
> +          _typemap_config = typemap.config
> +          if (get_path_info(source, "abspath") == _typemap_config.mojom) {
> +            active_typemaps += [ typemap ]
> +          }
> +        }
> +      }
> +
> +      generator_target_name = "${target_name}${variant_suffix}__generator"
> +      action(generator_target_name) {
> +        visibility = [ ":*" ]
> +        script = mojom_generator_script
> +        inputs = mojom_generator_sources + jinja2_sources
> +        sources = sources_list
> +        deps = [
> +          ":$parser_target_name",
> +          ":$type_mappings_target_name",
> +          "//mojo/public/tools/bindings:precompile_templates",
> +        ]
> +        if (defined(invoker.parser_deps)) {
> +          deps += invoker.parser_deps
> +        }
> +        outputs = []
> +        args = common_generator_args + export_args
> +        filelist = []
> +        foreach(source, sources_list) {
> +          filelist += [ rebase_path("$source", root_build_dir) ]
> +        }
> +        foreach(base_path, output_file_base_paths) {
> +          outputs += [
> +            "$root_gen_dir/${base_path}${variant_dash_suffix}-forward.h",
> +            "$root_gen_dir/${base_path}${variant_dash_suffix}-import-headers.h",
> +            "$root_gen_dir/${base_path}${variant_dash_suffix}-test-utils.cc",
> +            "$root_gen_dir/${base_path}${variant_dash_suffix}-test-utils.h",
> +            "$root_gen_dir/${base_path}${variant_dash_suffix}.cc",
> +            "$root_gen_dir/${base_path}${variant_dash_suffix}.h",
> +          ]
> +          if (generate_fuzzing && !defined(bindings_configuration.variant)) {
> +            outputs += [
> +              "$root_gen_dir/${base_path}${variant_dash_suffix}-mojolpm.cc",
> +              "$root_gen_dir/${base_path}${variant_dash_suffix}-mojolpm.h",
> +            ]
> +          }
> +        }
> +
> +        response_file_contents = filelist
> +
> +        args += [
> +          "--filelist={{response_file_name}}",
> +          "-g",
> +        ]
> +
> +        if (generate_fuzzing && !defined(bindings_configuration.variant)) {
> +          args += [ "c++,mojolpm" ]
> +        } else {
> +          args += [ "c++" ]
> +        }
> +
> +        if (defined(bindings_configuration.variant)) {
> +          args += [
> +            "--variant",
> +            bindings_configuration.variant,
> +          ]
> +        }
> +
> +        args += [
> +          "--typemap",
> +          rebase_path(type_mappings_path, root_build_dir),
> +        ]
> +
> +        if (defined(bindings_configuration.for_blink) &&
> +            bindings_configuration.for_blink) {
> +          args += [ "--for_blink" ]
> +        }
> +
> +        if (defined(invoker.support_lazy_serialization) &&
> +            invoker.support_lazy_serialization) {
> +          args += [ "--support_lazy_serialization" ]
> +        }
> +
> +        if (enable_kythe_annotations) {
> +          args += [ "--enable_kythe_annotations" ]
> +        }
> +
> +        if (!defined(invoker.scramble_message_ids) ||
> +            invoker.scramble_message_ids) {
> +          inputs += message_scrambling_inputs
> +          args += message_scrambling_args
> +        }
> +
> +        if (defined(invoker.extra_cpp_template_paths)) {
> +          foreach(extra_cpp_template, invoker.extra_cpp_template_paths) {
> +            args += [
> +              "--extra_cpp_template_paths",
> +              rebase_path(extra_cpp_template, root_build_dir),
> +            ]
> +            assert(
> +                get_path_info(extra_cpp_template, "extension") == "tmpl",
> +                "--extra_cpp_template_paths only accepts template files ending in extension .tmpl")
> +            foreach(base_path, output_file_base_paths) {
> +              template_file_name = get_path_info("$extra_cpp_template", "name")
> +              outputs += [ "$root_gen_dir/${base_path}${variant_dash_suffix}-${template_file_name}" ]
> +            }
> +          }
> +        }
> +      }
> +    }
> +
> +    if (generate_fuzzing && !defined(variant)) {
> +      # This block contains the C++ targets for the MojoLPM fuzzer, we need to
> +      # do this here so that we can use the typemap configuration for the
> +      # empty-variant Mojo target.
> +
> +      mojolpm_target_name = "${target_name}_mojolpm"
> +      mojolpm_generator_target_name = "${target_name}__generator"
> +      source_set(mojolpm_target_name) {
> +        # There are still a few missing header dependencies between mojo targets
> +        # with typemaps and the dependencies of their typemap headers. It would
> +        # be good to enable include checking for these in the future though.
> +        check_includes = false
> +        testonly = true
> +        if (defined(invoker.sources)) {
> +          sources = process_file_template(
> +                  invoker.sources,
> +                  [
> +                    "{{source_gen_dir}}/{{source_file_part}}-mojolpm.cc",
> +                    "{{source_gen_dir}}/{{source_file_part}}-mojolpm.h",
> +                  ])
> +          deps = []
> +        } else {
> +          sources = []
> +          deps = []
> +        }
> +
> +        public_deps = [
> +          ":$generator_shared_target_name",
> +
> +          # NB: hardcoded dependency on the no-variant variant generator, since
> +          # mojolpm only uses the no-variant type.
> +          ":$mojolpm_generator_target_name",
> +          ":$mojolpm_proto_target_name",
> +          "//mojo/public/tools/fuzzers:mojolpm",
> +        ]
> +
> +        foreach(d, all_deps) {
> +          # Resolve the name, so that a target //mojo/something becomes
> +          # //mojo/something:something and we can append variant_suffix to
> +          # get the cpp dependency name.
> +          full_name = get_label_info("$d", "label_no_toolchain")
> +          public_deps += [ "${full_name}_mojolpm" ]
> +        }
> +
> +        foreach(typemap, active_typemaps) {
> +          _typemap_config = {
> +          }
> +          _typemap_config = typemap.config
> +
> +          if (defined(_typemap_config.deps)) {
> +            deps += _typemap_config.deps
> +          }
> +          if (defined(_typemap_config.public_deps)) {
> +            public_deps += _typemap_config.public_deps
> +          }
> +        }
> +        foreach(config, cpp_typemap_configs) {
> +          if (defined(config.traits_deps)) {
> +            deps += config.traits_deps
> +          }
> +          if (defined(config.traits_public_deps)) {
> +            public_deps += config.traits_public_deps
> +          }
> +        }
> +      }
> +    }
> +
> +    # Write the typemapping configuration for this target out to a file to be
> +    # validated by a Python script. This helps catch mistakes that can't
> +    # be caught by logic in GN.
> +    _typemap_config_filename =
> +        "$target_gen_dir/${target_name}${variant_suffix}.typemap_config"
> +    _typemap_stamp_filename = "${_typemap_config_filename}.validated"
> +    _typemap_validator_target_name = "${type_mappings_target_name}__validator"
> +    _rebased_typemap_configs = []
> +    foreach(config, cpp_typemap_configs) {
> +      _rebased_config = {
> +      }
> +      _rebased_config = config
> +      if (defined(config.traits_headers)) {
> +        _rebased_config.traits_headers = []
> +        _rebased_config.traits_headers =
> +            rebase_path(config.traits_headers, "//")
> +      }
> +      if (defined(config.traits_private_headers)) {
> +        _rebased_config.traits_private_headers = []
> +        _rebased_config.traits_private_headers =
> +            rebase_path(config.traits_private_headers, "//")
> +      }
> +      _rebased_typemap_configs += [ _rebased_config ]
> +    }
> +    write_file(_typemap_config_filename, _rebased_typemap_configs, "json")
> +    _mojom_target_name = target_name
> +    action(_typemap_validator_target_name) {
> +      script = "$mojom_generator_root/validate_typemap_config.py"
> +      inputs = [ _typemap_config_filename ]
> +      outputs = [ _typemap_stamp_filename ]
> +      args = [
> +        get_label_info(_mojom_target_name, "label_no_toolchain"),
> +        rebase_path(_typemap_config_filename),
> +        rebase_path(_typemap_stamp_filename),
> +      ]
> +    }
> +
> +    action(type_mappings_target_name) {
> +      inputs = _bindings_configuration_files + mojom_generator_sources +
> +               jinja2_sources + [ _typemap_stamp_filename ]
> +      outputs = [ type_mappings_path ]
> +      script = "$mojom_generator_root/generate_type_mappings.py"
> +      deps = [ ":$_typemap_validator_target_name" ]
> +      args = [
> +        "--output",
> +        rebase_path(type_mappings_path, root_build_dir),
> +      ]
> +
> +      foreach(d, all_deps) {
> +        name = get_label_info(d, "label_no_toolchain")
> +        toolchain = get_label_info(d, "toolchain")
> +        dependency_output = "${name}${variant_suffix}__type_mappings"
> +        dependency_target = "${dependency_output}(${toolchain})"
> +        deps += [ dependency_target ]
> +        dependency_output_dir =
> +            get_label_info(dependency_output, "target_gen_dir")
> +        dependency_name = get_label_info(dependency_output, "name")
> +        dependency_path =
> +            rebase_path("$dependency_output_dir/${dependency_name}",
> +                        root_build_dir)
> +        args += [
> +          "--dependency",
> +          dependency_path,
> +        ]
> +      }
> +
> +      if (sources_list != []) {
> +        # TODO(sammc): Pass the typemap description in a file to avoid command
> +        # line length limitations.
> +        typemap_description = []
> +        foreach(typemap, active_typemaps) {
> +          _typemap_config = {
> +          }
> +          _typemap_config = typemap.config
> +
> +          typemap_description += [ "--start-typemap" ]
> +          if (defined(_typemap_config.public_headers)) {
> +            foreach(value, _typemap_config.public_headers) {
> +              typemap_description += [ "public_headers=$value" ]
> +            }
> +          }
> +          if (defined(_typemap_config.traits_headers)) {
> +            foreach(value, _typemap_config.traits_headers) {
> +              typemap_description += [ "traits_headers=$value" ]
> +            }
> +          }
> +          foreach(value, _typemap_config.type_mappings) {
> +            typemap_description += [ "type_mappings=$value" ]
> +          }
> +
> +          # The typemap configuration files are not actually used as inputs here
> +          # but this establishes a necessary build dependency to ensure that
> +          # typemap changes force a rebuild of affected targets.
> +          if (defined(typemap.filename)) {
> +            inputs += [ typemap.filename ]
> +          }
> +        }
> +        args += typemap_description
> +
> +        # Newer GN-based typemaps are aggregated into a single config.
> +        inputs += [ _typemap_config_filename ]
> +        args += [
> +          "--cpp-typemap-config",
> +          rebase_path(_typemap_config_filename, root_build_dir),
> +        ]
> +      }
> +    }
> +
> +    group("${target_name}${variant_suffix}_headers") {
> +      public_deps = []
> +      if (sources_list != []) {
> +        public_deps += [
> +          ":$generator_cpp_message_ids_target_name",
> +          ":$generator_shared_target_name",
> +          ":$generator_target_name",
> +        ]
> +      }
> +      foreach(d, all_deps) {
> +        full_name = get_label_info("$d", "label_no_toolchain")
> +        public_deps += [ "${full_name}${variant_suffix}_headers" ]
> +      }
> +    }
> +
> +    if (!force_source_set && defined(invoker.component_macro_prefix)) {
> +      output_target_type = "component"
> +    } else {
> +      output_target_type = "source_set"
> +    }
> +
> +    js_data_deps_target_name = target_name + "_js_data_deps"
> +    not_needed([ "js_data_deps_target_name" ])
> +
> +    target("jumbo_" + output_target_type, "${target_name}${variant_suffix}") {
> +      if (defined(output_name_override)) {
> +        output_name = output_name_override
> +      }
> +      if (defined(bindings_configuration.for_blink) &&
> +          bindings_configuration.for_blink &&
> +          defined(invoker.visibility_blink)) {
> +        visibility = invoker.visibility_blink
> +      } else if (defined(invoker.visibility)) {
> +        visibility = invoker.visibility
> +      }
> +      if (defined(invoker.testonly)) {
> +        testonly = invoker.testonly
> +      }
> +      defines = export_defines
> +      if (output_file_base_paths != []) {
> +        sources = []
> +        foreach(base_path, output_file_base_paths) {
> +          foreach(suffix, generator_cpp_output_suffixes) {
> +            sources += [ "$root_gen_dir/${base_path}$suffix" ]
> +          }
> +        }
> +      }
> +      deps = [
> +        ":$generator_cpp_message_ids_target_name",
> +        "//mojo/public/cpp/bindings:struct_traits",
> +        "//mojo/public/interfaces/bindings:bindings_headers",
> +      ]
> +      public_deps = [
> +        ":$shared_cpp_library_target_name",
> +        "//base",
> +      ]
> +      if (require_full_cpp_deps) {
> +        public_deps += [ "//mojo/public/cpp/bindings" ]
> +      } else {
> +        public_deps += [ "//mojo/public/cpp/bindings:bindings_base" ]
> +      }
> +
> +      if (sources_list != []) {
> +        public_deps += [ ":$generator_target_name" ]
> +      }
> +      foreach(d, all_deps) {
> +        # Resolve the name, so that a target //mojo/something becomes
> +        # //mojo/something:something and we can append variant_suffix to
> +        # get the cpp dependency name.
> +        full_name = get_label_info("$d", "label_no_toolchain")
> +        public_deps += [ "${full_name}${variant_suffix}" ]
> +      }
> +      if (defined(bindings_configuration.for_blink) &&
> +          bindings_configuration.for_blink) {
> +        if (defined(invoker.overridden_deps_blink)) {
> +          foreach(d, invoker.overridden_deps_blink) {
> +            # Resolve the name, so that a target //mojo/something becomes
> +            # //mojo/something:something and we can append variant_suffix
> +            # to get the cpp dependency name.
> +            full_name = get_label_info("$d", "label_no_toolchain")
> +            public_deps -= [ "${full_name}${variant_suffix}" ]
> +          }
> +          public_deps += invoker.component_deps_blink
> +        }
> +        if (defined(invoker.check_includes_blink)) {
> +          check_includes = invoker.check_includes_blink
> +        }
> +      } else {
> +        if (defined(invoker.check_includes_blink)) {
> +          not_needed(invoker, [ "check_includes_blink" ])
> +        }
> +        if (defined(invoker.overridden_deps)) {
> +          foreach(d, invoker.overridden_deps) {
> +            # Resolve the name, so that a target //mojo/something becomes
> +            # //mojo/something:something and we can append variant_suffix
> +            # to get the cpp dependency name.
> +            full_name = get_label_info("$d", "label_no_toolchain")
> +            public_deps -= [ "${full_name}${variant_suffix}" ]
> +          }
> +          public_deps += invoker.component_deps
> +        }
> +      }
> +      foreach(typemap, active_typemaps) {
> +        _typemap_config = {
> +        }
> +        _typemap_config = typemap.config
> +        if (defined(_typemap_config.sources)) {
> +          sources += _typemap_config.sources
> +        }
> +        if (defined(_typemap_config.public_deps)) {
> +          public_deps += _typemap_config.public_deps
> +        }
> +        if (defined(_typemap_config.deps)) {
> +          deps += _typemap_config.deps
> +        }
> +      }
> +      foreach(config, cpp_typemap_configs) {
> +        if (defined(config.traits_sources)) {
> +          sources += config.traits_sources
> +        }
> +        if (defined(config.traits_deps)) {
> +          deps += config.traits_deps
> +        }
> +        if (defined(config.traits_public_deps)) {
> +          public_deps += config.traits_public_deps
> +        }
> +      }
> +      if (defined(invoker.export_header)) {
> +        sources += [ "//" + invoker.export_header ]
> +      }
> +      if (defined(bindings_configuration.for_blink) &&
> +          bindings_configuration.for_blink) {
> +        public_deps += [ "//mojo/public/cpp/bindings:wtf_support" ]
> +      }
> +
> +      if (generate_fuzzing) {
> +        # Generate JS bindings by default if IPC fuzzer is enabled.
> +        public_deps += [ ":$js_data_deps_target_name" ]
> +      }
> +    }
> +
> +    if (generate_java && is_android) {
> +      import("//build/config/android/rules.gni")
> +
> +      java_generator_target_name = target_name + "_java__generator"
> +      if (sources_list != []) {
> +        action(java_generator_target_name) {
> +          script = mojom_generator_script
> +          inputs = mojom_generator_sources + jinja2_sources
> +          sources = sources_list
> +          deps = [
> +            ":$parser_target_name",
> +            ":$type_mappings_target_name",
> +            "//mojo/public/tools/bindings:precompile_templates",
> +          ]
> +          outputs = []
> +          args = common_generator_args
> +          filelist = []
> +          foreach(source, sources_list) {
> +            filelist += [ rebase_path("$source", root_build_dir) ]
> +          }
> +          foreach(base_path, output_file_base_paths) {
> +            outputs += [ "$root_gen_dir/$base_path.srcjar" ]
> +          }
> +
> +          response_file_contents = filelist
> +
> +          args += [
> +            "--filelist={{response_file_name}}",
> +            "-g",
> +            "java",
> +          ]
> +
> +          if (!defined(invoker.scramble_message_ids) ||
> +              invoker.scramble_message_ids) {
> +            inputs += message_scrambling_inputs
> +            args += message_scrambling_args
> +          }
> +        }
> +      } else {
> +        group(java_generator_target_name) {
> +        }
> +      }
> +
> +      java_srcjar_target_name = target_name + "_java_sources"
> +      action(java_srcjar_target_name) {
> +        script = "//build/android/gyp/zip.py"
> +        inputs = []
> +        if (output_file_base_paths != []) {
> +          foreach(base_path, output_file_base_paths) {
> +            inputs += [ "$root_gen_dir/${base_path}.srcjar" ]
> +          }
> +        }
> +        output = "$target_gen_dir/$target_name.srcjar"
> +        outputs = [ output ]
> +        rebase_inputs = rebase_path(inputs, root_build_dir)
> +        rebase_output = rebase_path(output, root_build_dir)
> +        args = [
> +          "--input-zips=$rebase_inputs",
> +          "--output=$rebase_output",
> +        ]
> +        deps = []
> +        if (sources_list != []) {
> +          deps = [ ":$java_generator_target_name" ]
> +        }
> +      }
> +
> +      java_target_name = target_name + "_java"
> +      android_library(java_target_name) {
> +        forward_variables_from(invoker, [ "enable_bytecode_checks" ])
> +        deps = [
> +          "//base:base_java",
> +          "//mojo/public/java:bindings_java",
> +          "//mojo/public/java:system_java",
> +        ]
> +
> +        # Disable warnings/checks on these generated files.
> +        chromium_code = false
> +
> +        foreach(d, all_deps) {
> +          # Resolve the name, so that a target //mojo/something becomes
> +          # //mojo/something:something and we can append "_java" to get the java
> +          # dependency name.
> +          full_name = get_label_info(d, "label_no_toolchain")
> +          deps += [ "${full_name}_java" ]
> +        }
> +
> +        srcjar_deps = [ ":$java_srcjar_target_name" ]
> +      }
> +    }
> +  }
> +
> +  use_typescript_for_target =
> +      enable_typescript_bindings && defined(invoker.use_typescript_sources) &&
> +      invoker.use_typescript_sources
> +
> +  if (!use_typescript_for_target && defined(invoker.use_typescript_sources)) {
> +    not_needed(invoker, [ "use_typescript_sources" ])
> +  }
> +
> +  if ((generate_fuzzing || !defined(invoker.cpp_only) || !invoker.cpp_only) &&
> +      !use_typescript_for_target) {
> +    if (sources_list != []) {
> +      generator_js_target_name = "${target_name}_js__generator"
> +      action(generator_js_target_name) {
> +        script = mojom_generator_script
> +        inputs = mojom_generator_sources + jinja2_sources
> +        sources = sources_list
> +        deps = [
> +          ":$parser_target_name",
> +          "//mojo/public/tools/bindings:precompile_templates",
> +        ]
> +        if (defined(invoker.parser_deps)) {
> +          deps += invoker.parser_deps
> +        }
> +        outputs = []
> +        args = common_generator_args
> +        filelist = []
> +        foreach(source, sources_list) {
> +          filelist += [ rebase_path("$source", root_build_dir) ]
> +        }
> +        foreach(base_path, output_file_base_paths) {
> +          outputs += [
> +            "$root_gen_dir/$base_path.js",
> +            "$root_gen_dir/$base_path.externs.js",
> +            "$root_gen_dir/$base_path-lite.js",
> +            "$root_gen_dir/$base_path.html",
> +            "$root_gen_dir/$base_path-lite-for-compile.js",
> +          ]
> +        }
> +
> +        response_file_contents = filelist
> +
> +        args += [
> +          "--filelist={{response_file_name}}",
> +          "-g",
> +          "javascript",
> +          "--js_bindings_mode=new",
> +        ]
> +
> +        if (defined(invoker.js_generate_struct_deserializers) &&
> +            invoker.js_generate_struct_deserializers) {
> +          args += [ "--js_generate_struct_deserializers" ]
> +        }
> +
> +        if (!defined(invoker.scramble_message_ids) ||
> +            invoker.scramble_message_ids) {
> +          inputs += message_scrambling_inputs
> +          args += message_scrambling_args
> +        }
> +
> +        if (generate_fuzzing) {
> +          args += [ "--generate_fuzzing" ]
> +        }
> +      }
> +    }
> +
> +    js_target_name = target_name + "_js"
> +    group(js_target_name) {
> +      public_deps = []
> +      if (sources_list != []) {
> +        public_deps += [ ":$generator_js_target_name" ]
> +      }
> +
> +      foreach(d, all_deps) {
> +        full_name = get_label_info(d, "label_no_toolchain")
> +        public_deps += [ "${full_name}_js" ]
> +      }
> +    }
> +
> +    group(js_data_deps_target_name) {
> +      deps = []
> +      if (sources_list != []) {
> +        data = []
> +        foreach(base_path, output_file_base_paths) {
> +          data += [
> +            "$root_gen_dir/${base_path}.js",
> +            "$root_gen_dir/${base_path}-lite.js",
> +          ]
> +        }
> +        deps += [ ":$generator_js_target_name" ]
> +      }
> +
> +      data_deps = []
> +      foreach(d, all_deps) {
> +        full_name = get_label_info(d, "label_no_toolchain")
> +        data_deps += [ "${full_name}_js_data_deps" ]
> +      }
> +    }
> +
> +    js_library_target_name = "${target_name}_js_library"
> +    if (sources_list != []) {
> +      js_library(js_library_target_name) {
> +        extra_public_deps = [ ":$generator_js_target_name" ]
> +        sources = []
> +        foreach(base_path, output_file_base_paths) {
> +          sources += [ "$root_gen_dir/${base_path}-lite.js" ]
> +        }
> +        externs_list = [
> +          "${externs_path}/mojo_core.js",
> +          "${externs_path}/pending.js",
> +        ]
> +
> +        deps = []
> +        foreach(d, all_deps) {
> +          full_name = get_label_info(d, "label_no_toolchain")
> +          deps += [ "${full_name}_js_library" ]
> +        }
> +      }
> +    } else {
> +      group(js_library_target_name) {
> +      }
> +    }
> +
> +    js_library_for_compile_target_name = "${target_name}_js_library_for_compile"
> +    if (sources_list != []) {
> +      js_library(js_library_for_compile_target_name) {
> +        extra_public_deps = [ ":$generator_js_target_name" ]
> +        sources = []
> +        foreach(base_path, output_file_base_paths) {
> +          sources += [ "$root_gen_dir/${base_path}-lite-for-compile.js" ]
> +        }
> +        externs_list = [
> +          "${externs_path}/mojo_core.js",
> +          "${externs_path}/pending.js",
> +        ]
> +        deps = []
> +        if (!defined(invoker.disallow_native_types)) {
> +          deps += [ "//mojo/public/js:bindings_lite_sources" ]
> +        }
> +        foreach(d, all_deps) {
> +          full_name = get_label_info(d, "label_no_toolchain")
> +          deps += [ "${full_name}_js_library_for_compile" ]
> +        }
> +      }
> +    } else {
> +      group(js_library_for_compile_target_name) {
> +      }
> +    }
> +  }
> +  if ((generate_fuzzing || !defined(invoker.cpp_only) || !invoker.cpp_only) &&
> +      use_typescript_for_target) {
> +    generator_js_target_names = []
> +    source_filelist = []
> +    foreach(source, sources_list) {
> +      source_filelist += [ rebase_path("$source", root_build_dir) ]
> +    }
> +
> +    dependency_types = [
> +      {
> +        name = "regular"
> +        ts_extension = ".ts"
> +        js_extension = ".js"
> +      },
> +      {
> +        name = "es_modules"
> +        ts_extension = ".m.ts"
> +        js_extension = ".m.js"
> +      },
> +    ]
> +
> +    foreach(dependency_type, dependency_types) {
> +      ts_outputs = []
> +      js_outputs = []
> +
> +      foreach(base_path, output_file_base_paths) {
> +        ts_outputs +=
> +            [ "$root_gen_dir/$base_path-lite${dependency_type.ts_extension}" ]
> +        js_outputs +=
> +            [ "$root_gen_dir/$base_path-lite${dependency_type.js_extension}" ]
> +      }
> +
> +      # Generate Typescript bindings.
> +      generator_ts_target_name =
> +          "${target_name}_${dependency_type.name}__ts__generator"
> +      action(generator_ts_target_name) {
> +        script = mojom_generator_script
> +        inputs = mojom_generator_sources + jinja2_sources
> +        sources = sources_list
> +        deps = [
> +          ":$parser_target_name",
> +          "//mojo/public/tools/bindings:precompile_templates",
> +        ]
> +
> +        outputs = ts_outputs
> +        args = common_generator_args
> +        response_file_contents = source_filelist
> +
> +        args += [
> +          "--filelist={{response_file_name}}",
> +          "-g",
> +          "typescript",
> +        ]
> +
> +        if (dependency_type.name == "es_modules") {
> +          args += [ "--ts_use_es_modules" ]
> +        }
> +
> +        # TODO(crbug.com/1007587): Support scramble_message_ids.
> +        # TODO(crbug.com/1007591): Support generate_fuzzing.
> +      }
> +
> +      # Create tsconfig.json for the generated Typescript.
> +      tsconfig_filename =
> +          "$target_gen_dir/$target_name-${dependency_type.name}-tsconfig.json"
> +      tsconfig = {
> +      }
> +      tsconfig.compilerOptions = {
> +        composite = true
> +        target = "es6"
> +        module = "es6"
> +        lib = [
> +          "es6",
> +          "esnext.bigint",
> +        ]
> +        strict = true
> +      }
> +      tsconfig.files = []
> +      foreach(base_path, output_file_base_paths) {
> +        tsconfig.files += [ rebase_path(
> +                "$root_gen_dir/$base_path-lite${dependency_type.ts_extension}",
> +                target_gen_dir,
> +                root_gen_dir) ]
> +      }
> +      tsconfig.references = []
> +
> +      # Get tsconfigs for deps.
> +      foreach(d, all_deps) {
> +        dep_target_gen_dir = rebase_path(get_label_info(d, "target_gen_dir"))
> +        dep_name = get_label_info(d, "name")
> +        reference = {
> +        }
> +        reference.path = "$dep_target_gen_dir/$dep_name-${dependency_type.name}-tsconfig.json"
> +        tsconfig.references += [ reference ]
> +      }
> +      write_file(tsconfig_filename, tsconfig, "json")
> +
> +      # Compile previously generated Typescript to Javascript.
> +      generator_js_target_name =
> +          "${target_name}_${dependency_type.name}__js__generator"
> +      generator_js_target_names += [ generator_js_target_name ]
> +
> +      action(generator_js_target_name) {
> +        script = "$mojom_generator_root/compile_typescript.py"
> +        sources = ts_outputs
> +        outputs = js_outputs
> +        public_deps = [ ":$generator_ts_target_name" ]
> +        foreach(d, all_deps) {
> +          full_name = get_label_info(d, "label_no_toolchain")
> +          public_deps +=
> +              [ "${full_name}_${dependency_type.name}__js__generator" ]
> +        }
> +
> +        absolute_tsconfig_path =
> +            rebase_path(tsconfig_filename, "", target_gen_dir)
> +        args = [ "--tsconfig_path=$absolute_tsconfig_path" ]
> +      }
> +    }
> +
> +    js_target_name = target_name + "_js"
> +    group(js_target_name) {
> +      public_deps = []
> +      if (sources_list != []) {
> +        foreach(generator_js_target_name, generator_js_target_names) {
> +          public_deps += [ ":$generator_js_target_name" ]
> +        }
> +      }
> +
> +      foreach(d, all_deps) {
> +        full_name = get_label_info(d, "label_no_toolchain")
> +        public_deps += [ "${full_name}_js" ]
> +      }
> +    }
> +
> +    group(js_data_deps_target_name) {
> +      data = js_outputs
> +      deps = []
> +      foreach(generator_js_target_name, generator_js_target_names) {
> +        deps += [ ":$generator_js_target_name" ]
> +      }
> +      data_deps = []
> +      foreach(d, all_deps) {
> +        full_name = get_label_info(d, "label_no_toolchain")
> +        data_deps += [ "${full_name}_js_data_deps" ]
> +      }
> +    }
> +  }
> +}
> +
> +# A helper for the mojom() template above when component libraries are desired
> +# for generated C++ bindings units. Supports all the same arguments as mojom()
> +# except for the optional |component_output_prefix| and |component_macro_prefix|
> +# arguments. These are instead shortened to |output_prefix| and |macro_prefix|
> +# and are *required*.
> +template("mojom_component") {
> +  assert(defined(invoker.output_prefix) && defined(invoker.macro_prefix))
> +
> +  mojom(target_name) {
> +    forward_variables_from(invoker,
> +                           "*",
> +                           [
> +                             "output_prefix",
> +                             "macro_prefix",
> +                           ])
> +    component_output_prefix = invoker.output_prefix
> +    component_macro_prefix = invoker.macro_prefix
> +  }
> +}
> diff --git a/utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py b/utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py
> new file mode 100755
> index 00000000..da9efc71
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py
> @@ -0,0 +1,390 @@
> +#!/usr/bin/env python
> +# Copyright 2013 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +"""The frontend for the Mojo bindings system."""
> +
> +from __future__ import print_function
> +
> +import argparse
> +
> +import hashlib
> +import importlib
> +import json
> +import os
> +import pprint
> +import re
> +import struct
> +import sys
> +
> +# Disable lint check for finding modules:
> +# pylint: disable=F0401
> +
> +def _GetDirAbove(dirname):
> +  """Returns the directory "above" this file containing |dirname| (which must
> +  also be "above" this file)."""
> +  path = os.path.abspath(__file__)
> +  while True:
> +    path, tail = os.path.split(path)
> +    assert tail
> +    if tail == dirname:
> +      return path
> +
> +
> +sys.path.insert(
> +    0,
> +    os.path.join(
> +        os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "mojom"))
> +
> +from mojom.error import Error
> +import mojom.fileutil as fileutil
> +from mojom.generate.module import Module
> +from mojom.generate import template_expander
> +from mojom.generate import translate
> +from mojom.generate.generator import WriteFile
> +
> +sys.path.append(
> +    os.path.join(_GetDirAbove("mojo"), "tools", "diagnosis"))
> +import crbug_1001171
> +
> +
> +_BUILTIN_GENERATORS = {
> +    "c++": "mojom_cpp_generator",
> +    "javascript": "mojom_js_generator",
> +    "java": "mojom_java_generator",
> +    "mojolpm": "mojom_mojolpm_generator",
> +    "typescript": "mojom_ts_generator",
> +}
> +
> +
> +def LoadGenerators(generators_string):
> +  if not generators_string:
> +    return []  # No generators.
> +
> +  generators = {}
> +  for generator_name in [s.strip() for s in generators_string.split(",")]:
> +    language = generator_name.lower()
> +    if language not in _BUILTIN_GENERATORS:
> +      print("Unknown generator name %s" % generator_name)
> +      sys.exit(1)
> +    generator_module = importlib.import_module(
> +        "generators.%s" % _BUILTIN_GENERATORS[language])
> +    generators[language] = generator_module
> +  return generators
> +
> +
> +def MakeImportStackMessage(imported_filename_stack):
> +  """Make a (human-readable) message listing a chain of imports. (Returned
> +  string begins with a newline (if nonempty) and does not end with one.)"""
> +  return ''.join(
> +      reversed(["\n  %s was imported by %s" % (a, b) for (a, b) in \
> +                    zip(imported_filename_stack[1:], imported_filename_stack)]))
> +
> +
> +class RelativePath(object):
> +  """Represents a path relative to the source tree or generated output dir."""
> +
> +  def __init__(self, path, source_root, output_dir):
> +    self.path = path
> +    if path.startswith(source_root):
> +      self.root = source_root
> +    elif path.startswith(output_dir):
> +      self.root = output_dir
> +    else:
> +      raise Exception("Invalid input path %s" % path)
> +
> +  def relative_path(self):
> +    return os.path.relpath(
> +        os.path.abspath(self.path), os.path.abspath(self.root))
> +
> +
> +def _GetModulePath(path, output_dir):
> +  return os.path.join(output_dir, path.relative_path() + '-module')
> +
> +
> +def ScrambleMethodOrdinals(interfaces, salt):
> +  already_generated = set()
> +  for interface in interfaces:
> +    i = 0
> +    already_generated.clear()
> +    for method in interface.methods:
> +      if method.explicit_ordinal is not None:
> +        continue
> +      while True:
> +        i = i + 1
> +        if i == 1000000:
> +          raise Exception("Could not generate %d method ordinals for %s" %
> +              (len(interface.methods), interface.mojom_name))
> +        # Generate a scrambled method.ordinal value. The algorithm doesn't have
> +        # to be very strong, cryptographically. It just needs to be non-trivial
> +        # to guess the results without the secret salt, in order to make it
> +        # harder for a compromised process to send fake Mojo messages.
> +        sha256 = hashlib.sha256(salt)
> +        sha256.update(interface.mojom_name.encode('utf-8'))
> +        sha256.update(str(i).encode('utf-8'))
> +        # Take the first 4 bytes as a little-endian uint32.
> +        ordinal = struct.unpack('<L', sha256.digest()[:4])[0]
> +        # Trim to 31 bits, so it always fits into a Java (signed) int.
> +        ordinal = ordinal & 0x7fffffff
> +        if ordinal in already_generated:
> +          continue
> +        already_generated.add(ordinal)
> +        method.ordinal = ordinal
> +        method.ordinal_comment = (
> +            'The %s value is based on sha256(salt + "%s%d").' %
> +            (ordinal, interface.mojom_name, i))
> +        break
> +
> +
> +def ReadFileContents(filename):
> +  with open(filename, 'rb') as f:
> +    return f.read()
> +
> +
> +class MojomProcessor(object):
> +  """Takes parsed mojom modules and generates language bindings from them.
> +
> +  Attributes:
> +    _processed_files: {Dict[str, mojom.generate.module.Module]} Mapping from
> +        relative mojom filename paths to the module AST for that mojom file.
> +  """
> +  def __init__(self, should_generate):
> +    self._should_generate = should_generate
> +    self._processed_files = {}
> +    self._typemap = {}
> +
> +  def LoadTypemaps(self, typemaps):
> +    # Support some very simple single-line comments in typemap JSON.
> +    comment_expr = r"^\s*//.*$"
> +    def no_comments(line):
> +      return not re.match(comment_expr, line)
> +    for filename in typemaps:
> +      with open(filename) as f:
> +        typemaps = json.loads("".join(filter(no_comments, f.readlines())))
> +        for language, typemap in typemaps.items():
> +          language_map = self._typemap.get(language, {})
> +          language_map.update(typemap)
> +          self._typemap[language] = language_map
> +    if 'c++' in self._typemap:
> +      self._typemap['mojolpm'] = self._typemap['c++']
> +
> +  def _GenerateModule(self, args, remaining_args, generator_modules,
> +                      rel_filename, imported_filename_stack):
> +    # Return the already-generated module.
> +    if rel_filename.path in self._processed_files:
> +      return self._processed_files[rel_filename.path]
> +
> +    if rel_filename.path in imported_filename_stack:
> +      print("%s: Error: Circular dependency" % rel_filename.path + \
> +          MakeImportStackMessage(imported_filename_stack + [rel_filename.path]))
> +      sys.exit(1)
> +
> +    module_path = _GetModulePath(rel_filename, args.output_dir)
> +    with open(module_path, 'rb') as f:
> +      module = Module.Load(f)
> +
> +    if args.scrambled_message_id_salt_paths:
> +      salt = b''.join(
> +          map(ReadFileContents, args.scrambled_message_id_salt_paths))
> +      ScrambleMethodOrdinals(module.interfaces, salt)
> +
> +    if self._should_generate(rel_filename.path):
> +      for language, generator_module in generator_modules.items():
> +        generator = generator_module.Generator(
> +            module, args.output_dir, typemap=self._typemap.get(language, {}),
> +            variant=args.variant, bytecode_path=args.bytecode_path,
> +            for_blink=args.for_blink,
> +            js_bindings_mode=args.js_bindings_mode,
> +            js_generate_struct_deserializers=\
> +                    args.js_generate_struct_deserializers,
> +            export_attribute=args.export_attribute,
> +            export_header=args.export_header,
> +            generate_non_variant_code=args.generate_non_variant_code,
> +            support_lazy_serialization=args.support_lazy_serialization,
> +            disallow_native_types=args.disallow_native_types,
> +            disallow_interfaces=args.disallow_interfaces,
> +            generate_message_ids=args.generate_message_ids,
> +            generate_fuzzing=args.generate_fuzzing,
> +            enable_kythe_annotations=args.enable_kythe_annotations,
> +            extra_cpp_template_paths=args.extra_cpp_template_paths,
> +            generate_extra_cpp_only=args.generate_extra_cpp_only)
> +        filtered_args = []
> +        if hasattr(generator_module, 'GENERATOR_PREFIX'):
> +          prefix = '--' + generator_module.GENERATOR_PREFIX + '_'
> +          filtered_args = [arg for arg in remaining_args
> +                           if arg.startswith(prefix)]
> +        generator.GenerateFiles(filtered_args)
> +
> +    # Save result.
> +    self._processed_files[rel_filename.path] = module
> +    return module
> +
> +
> +def _Generate(args, remaining_args):
> +  if args.variant == "none":
> +    args.variant = None
> +
> +  for idx, import_dir in enumerate(args.import_directories):
> +    tokens = import_dir.split(":")
> +    if len(tokens) >= 2:
> +      args.import_directories[idx] = RelativePath(tokens[0], tokens[1],
> +                                                  args.output_dir)
> +    else:
> +      args.import_directories[idx] = RelativePath(tokens[0], args.depth,
> +                                                  args.output_dir)
> +  generator_modules = LoadGenerators(args.generators_string)
> +
> +  fileutil.EnsureDirectoryExists(args.output_dir)
> +
> +  processor = MojomProcessor(lambda filename: filename in args.filename)
> +  processor.LoadTypemaps(set(args.typemaps))
> +
> +  if args.filelist:
> +    with open(args.filelist) as f:
> +      args.filename.extend(f.read().split())
> +
> +  for filename in args.filename:
> +    processor._GenerateModule(
> +        args, remaining_args, generator_modules,
> +        RelativePath(filename, args.depth, args.output_dir), [])
> +
> +  return 0
> +
> +
> +def _Precompile(args, _):
> +  generator_modules = LoadGenerators(",".join(_BUILTIN_GENERATORS.keys()))
> +
> +  template_expander.PrecompileTemplates(generator_modules, args.output_dir)
> +  return 0
> +
> +
> +def main():
> +  parser = argparse.ArgumentParser(
> +      description="Generate bindings from mojom files.")
> +  parser.add_argument("--use_bundled_pylibs", action="store_true",
> +                      help="use Python modules bundled in the SDK")
> +  parser.add_argument(
> +      "-o",
> +      "--output_dir",
> +      dest="output_dir",
> +      default=".",
> +      help="output directory for generated files")
> +
> +  subparsers = parser.add_subparsers()
> +
> +  generate_parser = subparsers.add_parser(
> +      "generate", description="Generate bindings from mojom files.")
> +  generate_parser.add_argument("filename", nargs="*",
> +                               help="mojom input file")
> +  generate_parser.add_argument("--filelist", help="mojom input file list")
> +  generate_parser.add_argument("-d", "--depth", dest="depth", default=".",
> +                               help="depth from source root")
> +  generate_parser.add_argument("-g",
> +                               "--generators",
> +                               dest="generators_string",
> +                               metavar="GENERATORS",
> +                               default="c++,javascript,java,mojolpm",
> +                               help="comma-separated list of generators")
> +  generate_parser.add_argument(
> +      "--gen_dir", dest="gen_directories", action="append", metavar="directory",
> +      default=[], help="add a directory to be searched for the syntax trees.")
> +  generate_parser.add_argument(
> +      "-I", dest="import_directories", action="append", metavar="directory",
> +      default=[],
> +      help="add a directory to be searched for import files. The depth from "
> +           "source root can be specified for each import by appending it after "
> +           "a colon")
> +  generate_parser.add_argument("--typemap", action="append", metavar="TYPEMAP",
> +                               default=[], dest="typemaps",
> +                               help="apply TYPEMAP to generated output")
> +  generate_parser.add_argument("--variant", dest="variant", default=None,
> +                               help="output a named variant of the bindings")
> +  generate_parser.add_argument(
> +      "--bytecode_path", required=True, help=(
> +          "the path from which to load template bytecode; to generate template "
> +          "bytecode, run %s precompile BYTECODE_PATH" % os.path.basename(
> +              sys.argv[0])))
> +  generate_parser.add_argument("--for_blink", action="store_true",
> +                               help="Use WTF types as generated types for mojo "
> +                               "string/array/map.")
> +  generate_parser.add_argument(
> +      "--js_bindings_mode", choices=["new", "old"], default="old",
> +      help="This option only affects the JavaScript bindings. The value could "
> +      "be \"new\" to generate new-style lite JS bindings in addition to the "
> +      "old, or \"old\" to only generate old bindings.")
> +  generate_parser.add_argument(
> +      "--js_generate_struct_deserializers", action="store_true",
> +      help="Generate javascript deserialize methods for structs in "
> +      "mojom-lite.js file")
> +  generate_parser.add_argument(
> +      "--export_attribute", default="",
> +      help="Optional attribute to specify on class declaration to export it "
> +      "for the component build.")
> +  generate_parser.add_argument(
> +      "--export_header", default="",
> +      help="Optional header to include in the generated headers to support the "
> +      "component build.")
> +  generate_parser.add_argument(
> +      "--generate_non_variant_code", action="store_true",
> +      help="Generate code that is shared by different variants.")
> +  generate_parser.add_argument(
> +      "--scrambled_message_id_salt_path",
> +      dest="scrambled_message_id_salt_paths",
> +      help="If non-empty, the path to a file whose contents should be used as"
> +      "a salt for generating scrambled message IDs. If this switch is specified"
> +      "more than once, the contents of all salt files are concatenated to form"
> +      "the salt value.", default=[], action="append")
> +  generate_parser.add_argument(
> +      "--support_lazy_serialization",
> +      help="If set, generated bindings will serialize lazily when possible.",
> +      action="store_true")
> +  generate_parser.add_argument(
> +      "--extra_cpp_template_paths",
> +      dest="extra_cpp_template_paths",
> +      action="append",
> +      metavar="path_to_template",
> +      default=[],
> +      help="Provide a path to a new template (.tmpl) that is used to generate "
> +      "additional C++ source/header files ")
> +  generate_parser.add_argument(
> +      "--generate_extra_cpp_only",
> +      help="If set and extra_cpp_template_paths provided, will only generate"
> +      "extra_cpp_template related C++ bindings",
> +      action="store_true")
> +  generate_parser.add_argument(
> +      "--disallow_native_types",
> +      help="Disallows the [Native] attribute to be specified on structs or "
> +      "enums within the mojom file.", action="store_true")
> +  generate_parser.add_argument(
> +      "--disallow_interfaces",
> +      help="Disallows interface definitions within the mojom file. It is an "
> +      "error to specify this flag when processing a mojom file which defines "
> +      "any interface.", action="store_true")
> +  generate_parser.add_argument(
> +      "--generate_message_ids",
> +      help="Generates only the message IDs header for C++ bindings. Note that "
> +      "this flag only matters if --generate_non_variant_code is also "
> +      "specified.", action="store_true")
> +  generate_parser.add_argument(
> +      "--generate_fuzzing",
> +      action="store_true",
> +      help="Generates additional bindings for fuzzing in JS.")
> +  generate_parser.add_argument(
> +      "--enable_kythe_annotations",
> +      action="store_true",
> +      help="Adds annotations for kythe metadata generation.")
> +
> +  generate_parser.set_defaults(func=_Generate)
> +
> +  precompile_parser = subparsers.add_parser("precompile",
> +      description="Precompile templates for the mojom bindings generator.")
> +  precompile_parser.set_defaults(func=_Precompile)
> +
> +  args, remaining_args = parser.parse_known_args()
> +  return args.func(args, remaining_args)
> +
> +
> +if __name__ == "__main__":
> +  with crbug_1001171.DumpStateOnLookupError():
> +    sys.exit(main())
> diff --git a/utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator_unittest.py b/utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator_unittest.py
> new file mode 100644
> index 00000000..bddbe3f4
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator_unittest.py
> @@ -0,0 +1,62 @@
> +# Copyright 2014 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +import unittest
> +
> +from mojom_bindings_generator import MakeImportStackMessage
> +from mojom_bindings_generator import ScrambleMethodOrdinals
> +
> +
> +class FakeIface(object):
> +  def __init__(self):
> +    self.mojom_name = None
> +    self.methods = None
> +
> +
> +class FakeMethod(object):
> +  def __init__(self, explicit_ordinal=None):
> +    self.explicit_ordinal = explicit_ordinal
> +    self.ordinal = explicit_ordinal
> +    self.ordinal_comment = None
> +
> +
> +class MojoBindingsGeneratorTest(unittest.TestCase):
> +  """Tests mojo_bindings_generator."""
> +
> +  def testMakeImportStackMessage(self):
> +    """Tests MakeImportStackMessage()."""
> +    self.assertEqual(MakeImportStackMessage(["x"]), "")
> +    self.assertEqual(MakeImportStackMessage(["x", "y"]),
> +                     "\n  y was imported by x")
> +    self.assertEqual(MakeImportStackMessage(["x", "y", "z"]),
> +                     "\n  z was imported by y\n  y was imported by x")
> +
> +  def testScrambleMethodOrdinals(self):
> +    """Tests ScrambleMethodOrdinals()."""
> +    interface = FakeIface()
> +    interface.mojom_name = 'RendererConfiguration'
> +    interface.methods = [
> +        FakeMethod(),
> +        FakeMethod(),
> +        FakeMethod(),
> +        FakeMethod(explicit_ordinal=42)
> +    ]
> +    ScrambleMethodOrdinals([interface], "foo".encode('utf-8'))
> +    # These next three values are hard-coded. If the generation algorithm
> +    # changes from being based on sha256(seed + interface.name + str(i)) then
> +    # these numbers will obviously need to change too.
> +    #
> +    # Note that hashlib.sha256('fooRendererConfiguration1').digest()[:4] is
> +    # '\xa5\xbc\xf9\xca' and that hex(1257880741) = '0x4af9bca5'. The
> +    # difference in 0x4a vs 0xca is because we only take 31 bits.
> +    self.assertEqual(interface.methods[0].ordinal, 1257880741)
> +    self.assertEqual(interface.methods[1].ordinal, 631133653)
> +    self.assertEqual(interface.methods[2].ordinal, 549336076)
> +
> +    # Explicit method ordinals should not be scrambled.
> +    self.assertEqual(interface.methods[3].ordinal, 42)
> +
> +
> +if __name__ == "__main__":
> +  unittest.main()
> diff --git a/utils/ipc/mojo/public/tools/bindings/mojom_types_downgrader.py b/utils/ipc/mojo/public/tools/bindings/mojom_types_downgrader.py
> new file mode 100755
> index 00000000..15f0e3ba
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/bindings/mojom_types_downgrader.py
> @@ -0,0 +1,119 @@
> +#!/usr/bin/env python
> +# Copyright 2020 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +"""Downgrades *.mojom files to the old mojo types for remotes and receivers."""
> +
> +import argparse
> +import fnmatch
> +import os
> +import re
> +import shutil
> +import sys
> +import tempfile
> +
> +# List of patterns and replacements to match and use against the contents of a
> +# mojo file. Each replacement string will be used with Python string's format()
> +# function, so the '{}' substring is used to mark where the mojo type should go.
> +_MOJO_REPLACEMENTS = {
> +    r'pending_remote': r'{}',
> +    r'pending_receiver': r'{}&',
> +    r'pending_associated_remote': r'associated {}',
> +    r'pending_associated_receiver': r'associated {}&',
> +}
> +
> +# Pre-compiled regular expression that matches against any of the replacements.
> +_REGEXP_PATTERN = re.compile(
> +    r'|'.join(
> +        ['{}\s*<\s*(.*?)\s*>'.format(k) for k in _MOJO_REPLACEMENTS.keys()]),
> +    flags=re.DOTALL)
> +
> +
> +def ReplaceFunction(match_object):
> +  """Returns the right replacement for the string matched against the regexp."""
> +  for index, (match, repl) in enumerate(_MOJO_REPLACEMENTS.items(), 1):
> +    if match_object.group(0).startswith(match):
> +      return repl.format(match_object.group(index))
> +
> +
> +def DowngradeFile(path, output_dir=None):
> +  """Downgrades the mojom file specified by |path| to the old mojo types.
> +
> +  Optionally pass |output_dir| to place the result under a separate output
> +  directory, preserving the relative path to the file included in |path|.
> +  """
> +  # Use a temporary file to dump the new contents after replacing the patterns.
> +  with open(path) as src_mojo_file:
> +    with tempfile.NamedTemporaryFile(mode='w', delete=False) as tmp_mojo_file:
> +      tmp_contents = _REGEXP_PATTERN.sub(ReplaceFunction, src_mojo_file.read())
> +      tmp_mojo_file.write(tmp_contents)
> +
> +  # Files should be placed in the desired output directory
> +  if output_dir:
> +    output_filepath = os.path.join(output_dir, os.path.basename(path))
> +    if not os.path.exists(output_dir):
> +      os.makedirs(output_dir)
> +  else:
> +    output_filepath = path
> +
> +  # Write the new contents preserving the original file's attributes.
> +  shutil.copystat(path, tmp_mojo_file.name)
> +  shutil.move(tmp_mojo_file.name, output_filepath)
> +
> +  # Make sure to "touch" the new file so that access, modify and change times
> +  # are always newer than the source file's, otherwise Modify time will be kept
> +  # as per the call to shutil.copystat(), causing unnecessary generations of the
> +  # output file in subsequent builds due to ninja considering it dirty.
> +  os.utime(output_filepath, None)
> +
> +
> +def DowngradeDirectory(path, output_dir=None):
> +  """Downgrades mojom files inside directory |path| to the old mojo types.
> +
> +  Optionally pass |output_dir| to place the result under a separate output
> +  directory, preserving the relative path to the file included in |path|.
> +  """
> +  # We don't have recursive glob.glob() nor pathlib.Path.rglob() in Python 2.7
> +  mojom_filepaths = []
> +  for dir_path, _, filenames in os.walk(path):
> +    for filename in fnmatch.filter(filenames, "*mojom"):
> +      mojom_filepaths.append(os.path.join(dir_path, filename))
> +
> +  for path in mojom_filepaths:
> +    absolute_dirpath = os.path.dirname(os.path.abspath(path))
> +    if output_dir:
> +      dest_dirpath = output_dir + absolute_dirpath
> +    else:
> +      dest_dirpath = absolute_dirpath
> +    DowngradeFile(path, dest_dirpath)
> +
> +
> +def DowngradePath(src_path, output_dir=None):
> +  """Downgrades the mojom files pointed by |src_path| to the old mojo types.
> +
> +  Optionally pass |output_dir| to place the result under a separate output
> +  directory, preserving the relative path to the file included in |path|.
> +  """
> +  if os.path.isdir(src_path):
> +    DowngradeDirectory(src_path, output_dir)
> +  elif os.path.isfile(src_path):
> +    DowngradeFile(src_path, output_dir)
> +  else:
> +    print(">>> {} not pointing to a valid file or directory".format(src_path))
> +    sys.exit(1)
> +
> +
> +def main():
> +  parser = argparse.ArgumentParser(
> +      description="Downgrade *.mojom files to use the old mojo types.")
> +  parser.add_argument(
> +      "srcpath", help="path to the file or directory to apply the conversion")
> +  parser.add_argument(
> +      "--outdir", help="the directory to place the converted file(s) under")
> +  args = parser.parse_args()
> +
> +  DowngradePath(args.srcpath, args.outdir)
> +
> +
> +if __name__ == "__main__":
> +  sys.exit(main())
> diff --git a/utils/ipc/mojo/public/tools/bindings/validate_typemap_config.py b/utils/ipc/mojo/public/tools/bindings/validate_typemap_config.py
> new file mode 100755
> index 00000000..f1783d59
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/bindings/validate_typemap_config.py
> @@ -0,0 +1,57 @@
> +#!/usr/bin/env python
> +# Copyright 2020 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +import argparse
> +import json
> +import os
> +import re
> +import sys
> +
> +
> +def CheckCppTypemapConfigs(target_name, config_filename, out_filename):
> +  _SUPPORTED_CONFIG_KEYS = set([
> +      'types', 'traits_headers', 'traits_private_headers', 'traits_sources',
> +      'traits_deps', 'traits_public_deps'
> +  ])
> +  _SUPPORTED_TYPE_KEYS = set([
> +      'mojom', 'cpp', 'copyable_pass_by_value', 'force_serialize', 'hashable',
> +      'move_only', 'nullable_is_same_type'
> +  ])
> +  with open(config_filename, 'r') as f:
> +    for config in json.load(f):
> +      for key in config.keys():
> +        if key not in _SUPPORTED_CONFIG_KEYS:
> +          raise ValueError('Invalid typemap property "%s" when processing %s' %
> +                           (key, target_name))
> +
> +      types = config.get('types')
> +      if not types:
> +        raise ValueError('Typemap for %s must specify at least one type to map'
> +                         % target_name)
> +
> +      for entry in types:
> +        for key in entry.keys():
> +          if key not in _SUPPORTED_TYPE_KEYS:
> +            raise IOError(
> +                'Invalid type property "%s" in typemap for "%s" on target %s' %
> +                (key, entry.get('mojom', '(unknown)'), target_name))
> +
> +  with open(out_filename, 'w') as f:
> +    f.truncate(0)
> +
> +
> +def main():
> +  parser = argparse.ArgumentParser()
> +  _, args = parser.parse_known_args()
> +  if len(args) != 3:
> +    print('Usage: validate_typemap_config.py target_name config_filename '
> +          'stamp_filename')
> +    sys.exit(1)
> +
> +  CheckCppTypemapConfigs(args[0], args[1], args[2])
> +
> +
> +if __name__ == '__main__':
> +  main()
> diff --git a/utils/ipc/mojo/public/tools/mojom/README.md b/utils/ipc/mojo/public/tools/mojom/README.md
> new file mode 100644
> index 00000000..6a4ff78a
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/README.md
> @@ -0,0 +1,14 @@
> +# The Mojom Parser
> +
> +The Mojom format is an interface definition language (IDL) for describing
> +interprocess communication (IPC) messages and data types for use with the
> +low-level cross-platform
> +[Mojo IPC library](https://chromium.googlesource.com/chromium/src/+/master/mojo/public/c/system/README.md).
> +
> +This directory consists of a `mojom` Python module, its tests, and supporting
> +command-line tools. The Python module implements the parser used by the
> +command-line tools and exposes an API to help external bindings generators emit
> +useful code from the parser's outputs.
> +
> +TODO(https://crbug.com/1060464): Fill out this documentation once the library
> +and tools have stabilized.
> diff --git a/utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py b/utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py
> new file mode 100755
> index 00000000..7e746112
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py
> @@ -0,0 +1,170 @@
> +#!/usr/bin/env python
> +# Copyright 2020 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +"""Verifies backward-compatibility of mojom type changes.
> +
> +Given a set of pre- and post-diff mojom file contents, and a root directory
> +for a project, this tool verifies that any changes to [Stable] mojom types are
> +backward-compatible with the previous version.
> +
> +This can be used e.g. by a presubmit check to prevent developers from making
> +breaking changes to stable mojoms."""
> +
> +import argparse
> +import errno
> +import io
> +import json
> +import os
> +import os.path
> +import shutil
> +import six
> +import sys
> +import tempfile
> +
> +from mojom.generate import module
> +from mojom.generate import translate
> +from mojom.parse import parser
> +
> +
> +class ParseError(Exception):
> +  pass
> +
> +
> +def _ValidateDelta(root, delta):
> +  """Parses all modified mojoms (including all transitive mojom dependencies,
> +  even if unmodified) to perform backward-compatibility checks on any types
> +  marked with the [Stable] attribute.
> +
> +  Note that unlike the normal build-time parser in mojom_parser.py, this does
> +  not produce or rely on cached module translations, but instead parses the full
> +  transitive closure of a mojom's input dependencies all at once.
> +  """
> +
> +  # First build a map of all files covered by the delta
> +  affected_files = set()
> +  old_files = {}
> +  new_files = {}
> +  for change in delta:
> +    # TODO(crbug.com/953884): Use pathlib once we're migrated fully to Python 3.
> +    filename = change['filename'].replace('\\', '/')
> +    affected_files.add(filename)
> +    if change['old']:
> +      old_files[filename] = change['old']
> +    if change['new']:
> +      new_files[filename] = change['new']
> +
> +  # Parse and translate all mojoms relevant to the delta, including transitive
> +  # imports that weren't modified.
> +  unmodified_modules = {}
> +
> +  def parseMojom(mojom, file_overrides, override_modules):
> +    if mojom in unmodified_modules or mojom in override_modules:
> +      return
> +
> +    contents = file_overrides.get(mojom)
> +    if contents:
> +      modules = override_modules
> +    else:
> +      modules = unmodified_modules
> +      with io.open(os.path.join(root, mojom), encoding='utf-8') as f:
> +        contents = f.read()
> +
> +    try:
> +      ast = parser.Parse(contents, mojom)
> +    except Exception as e:
> +      six.reraise(
> +          ParseError,
> +          'encountered exception {0} while parsing {1}'.format(e, mojom),
> +          sys.exc_info()[2])
> +    for imp in ast.import_list:
> +      parseMojom(imp.import_filename, file_overrides, override_modules)
> +
> +    # Now that the transitive set of dependencies has been imported and parsed
> +    # above, translate each mojom AST into a Module so that all types are fully
> +    # defined and can be inspected.
> +    all_modules = {}
> +    all_modules.update(unmodified_modules)
> +    all_modules.update(override_modules)
> +    modules[mojom] = translate.OrderedModule(ast, mojom, all_modules)
> +
> +  old_modules = {}
> +  for mojom in old_files.keys():
> +    parseMojom(mojom, old_files, old_modules)
> +  new_modules = {}
> +  for mojom in new_files.keys():
> +    parseMojom(mojom, new_files, new_modules)
> +
> +  # At this point we have a complete set of translated Modules from both the
> +  # pre- and post-diff mojom contents. Now we can analyze backward-compatibility
> +  # of the deltas.
> +  #
> +  # Note that for backward-compatibility checks we only care about types which
> +  # were marked [Stable] before the diff. Types newly marked as [Stable] are not
> +  # checked.
> +  def collectTypes(modules):
> +    types = {}
> +    for m in modules.values():
> +      for kinds in (m.enums, m.structs, m.unions, m.interfaces):
> +        for kind in kinds:
> +          types[kind.qualified_name] = kind
> +    return types
> +
> +  old_types = collectTypes(old_modules)
> +  new_types = collectTypes(new_modules)
> +
> +  # Collect any renamed types so they can be compared accordingly.
> +  renamed_types = {}
> +  for name, kind in new_types.items():
> +    old_name = kind.attributes and kind.attributes.get('RenamedFrom')
> +    if old_name:
> +      renamed_types[old_name] = name
> +
> +  for qualified_name, kind in old_types.items():
> +    if not kind.stable:
> +      continue
> +
> +    new_name = renamed_types.get(qualified_name, qualified_name)
> +    if new_name not in new_types:
> +      raise Exception(
> +          'Stable type %s appears to be deleted by this change. If it was '
> +          'renamed, please add a [RenamedFrom] attribute to the new type. This '
> +          'can be deleted by a subsequent change.' % qualified_name)
> +
> +    if not new_types[new_name].IsBackwardCompatible(kind):
> +      raise Exception('Stable type %s appears to have changed in a way which '
> +                      'breaks backward-compatibility. Please fix!\n\nIf you '
> +                      'believe this assessment to be incorrect, please file a '
> +                      'Chromium bug against the "Internals>Mojo>Bindings" '
> +                      'component.' % qualified_name)
> +
> +
> +def Run(command_line, delta=None):
> +  """Runs the tool with the given command_line. Normally this will read the
> +  change description from stdin as a JSON-encoded list, but tests may pass a
> +  delta directly for convenience."""
> +  arg_parser = argparse.ArgumentParser(
> +      description='Verifies backward-compatibility of mojom type changes.',
> +      epilog="""
> +This tool reads a change description from stdin and verifies that all modified
> +[Stable] mojom types will retain backward-compatibility. The change description
> +must be a JSON-encoded list of objects, each with a "filename" key (path to a
> +changed mojom file, relative to ROOT); an "old" key whose value is a string of
> +the full file contents before the change, or null if the file is being added;
> +and a "new" key whose value is a string of the full file contents after the
> +change, or null if the file is being deleted.""")
> +  arg_parser.add_argument(
> +      '--src-root',
> +      required=True,
> +      action='store',
> +      metavar='ROOT',
> +      help='The root of the source tree in which the checked mojoms live.')
> +
> +  args, _ = arg_parser.parse_known_args(command_line)
> +  if not delta:
> +    delta = json.load(sys.stdin)
> +  _ValidateDelta(args.src_root, delta)
> +
> +
> +if __name__ == '__main__':
> +  Run(sys.argv[1:])
> diff --git a/utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility_unittest.py b/utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility_unittest.py
> new file mode 100755
> index 00000000..9f51ea77
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility_unittest.py
> @@ -0,0 +1,260 @@
> +#!/usr/bin/env python
> +# Copyright 2020 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +import json
> +import os
> +import os.path
> +import shutil
> +import tempfile
> +import unittest
> +
> +import check_stable_mojom_compatibility
> +
> +from mojom.generate import module
> +
> +
> +class Change(object):
> +  """Helper to clearly define a mojom file delta to be analyzed."""
> +
> +  def __init__(self, filename, old=None, new=None):
> +    """If old is None, this is a file addition. If new is None, this is a file
> +    deletion. Otherwise it's a file change."""
> +    self.filename = filename
> +    self.old = old
> +    self.new = new
> +
> +
> +class UnchangedFile(Change):
> +  def __init__(self, filename, contents):
> +    super(UnchangedFile, self).__init__(filename, old=contents, new=contents)
> +
> +
> +class CheckStableMojomCompatibilityTest(unittest.TestCase):
> +  """Tests covering the behavior of the compatibility checking tool. Note that
> +  details of different compatibility checks and relevant failure modes are NOT
> +  covered by these tests. Those are instead covered by unittests in
> +  version_compatibility_unittest.py. Additionally, the tests which ensure a
> +  given set of [Stable] mojom definitions are indeed plausibly stable (i.e. they
> +  have no unstable dependencies) are covered by stable_attribute_unittest.py.
> +
> +  These tests cover higher-level concerns of the compatibility checking tool,
> +  like file or symbol, renames, changes spread over multiple files, etc."""
> +
> +  def verifyBackwardCompatibility(self, changes):
> +    """Helper for implementing assertBackwardCompatible and
> +    assertNotBackwardCompatible"""
> +
> +    temp_dir = tempfile.mkdtemp()
> +    for change in changes:
> +      if change.old:
> +        # Populate the old file on disk in our temporary fake source root
> +        file_path = os.path.join(temp_dir, change.filename)
> +        dir_path = os.path.dirname(file_path)
> +        if not os.path.exists(dir_path):
> +          os.makedirs(dir_path)
> +        with open(file_path, 'w') as f:
> +          f.write(change.old)
> +
> +    delta = []
> +    for change in changes:
> +      if change.old != change.new:
> +        delta.append({
> +            'filename': change.filename,
> +            'old': change.old,
> +            'new': change.new
> +        })
> +
> +    try:
> +      check_stable_mojom_compatibility.Run(['--src-root', temp_dir],
> +                                           delta=delta)
> +    finally:
> +      shutil.rmtree(temp_dir)
> +
> +  def assertBackwardCompatible(self, changes):
> +    self.verifyBackwardCompatibility(changes)
> +
> +  def assertNotBackwardCompatible(self, changes):
> +    try:
> +      self.verifyBackwardCompatibility(changes)
> +    except Exception:
> +      return
> +
> +    raise Exception('Change unexpectedly passed a backward-compatibility check')
> +
> +  def testBasicCompatibility(self):
> +    """Minimal smoke test to verify acceptance of a simple valid change."""
> +    self.assertBackwardCompatible([
> +        Change('foo/foo.mojom',
> +               old='[Stable] struct S {};',
> +               new='[Stable] struct S { [MinVersion=1] int32 x; };')
> +    ])
> +
> +  def testBasicIncompatibility(self):
> +    """Minimal smoke test to verify rejection of a simple invalid change."""
> +    self.assertNotBackwardCompatible([
> +        Change('foo/foo.mojom',
> +               old='[Stable] struct S {};',
> +               new='[Stable] struct S { int32 x; };')
> +    ])
> +
> +  def testIgnoreIfNotStable(self):
> +    """We don't care about types not marked [Stable]"""
> +    self.assertBackwardCompatible([
> +        Change('foo/foo.mojom',
> +               old='struct S {};',
> +               new='struct S { int32 x; };')
> +    ])
> +
> +  def testRename(self):
> +    """We can do checks for renamed types."""
> +    self.assertBackwardCompatible([
> +        Change('foo/foo.mojom',
> +               old='[Stable] struct S {};',
> +               new='[Stable, RenamedFrom="S"] struct T {};')
> +    ])
> +    self.assertNotBackwardCompatible([
> +        Change('foo/foo.mojom',
> +               old='[Stable] struct S {};',
> +               new='[Stable, RenamedFrom="S"] struct T { int32 x; };')
> +    ])
> +    self.assertBackwardCompatible([
> +        Change('foo/foo.mojom',
> +               old='[Stable] struct S {};',
> +               new="""\
> +               [Stable, RenamedFrom="S"]
> +               struct T { [MinVersion=1] int32 x; };
> +               """)
> +    ])
> +
> +  def testNewlyStable(self):
> +    """We don't care about types newly marked as [Stable]."""
> +    self.assertBackwardCompatible([
> +        Change('foo/foo.mojom',
> +               old='struct S {};',
> +               new='[Stable] struct S { int32 x; };')
> +    ])
> +
> +  def testFileRename(self):
> +    """Make sure we can still do compatibility checks after a file rename."""
> +    self.assertBackwardCompatible([
> +        Change('foo/foo.mojom', old='[Stable] struct S {};', new=None),
> +        Change('bar/bar.mojom',
> +               old=None,
> +               new='[Stable] struct S { [MinVersion=1] int32 x; };')
> +    ])
> +    self.assertNotBackwardCompatible([
> +        Change('foo/foo.mojom', old='[Stable] struct S {};', new=None),
> +        Change('bar/bar.mojom', old=None, new='[Stable] struct S { int32 x; };')
> +    ])
> +
> +  def testWithImport(self):
> +    """Ensure that cross-module dependencies do not break the compatibility
> +    checking tool."""
> +    self.assertBackwardCompatible([
> +        Change('foo/foo.mojom',
> +               old="""\
> +               module foo;
> +               [Stable] struct S {};
> +               """,
> +               new="""\
> +               module foo;
> +               [Stable] struct S { [MinVersion=2] int32 x; };
> +               """),
> +        Change('bar/bar.mojom',
> +               old="""\
> +               module bar;
> +               import "foo/foo.mojom";
> +               [Stable] struct T { foo.S s; };
> +               """,
> +               new="""\
> +               module bar;
> +               import "foo/foo.mojom";
> +               [Stable] struct T { foo.S s; [MinVersion=1] int32 y; };
> +               """)
> +    ])
> +
> +  def testWithMovedDefinition(self):
> +    """If a definition moves from one file to another, we should still be able
> +    to check compatibility accurately."""
> +    self.assertBackwardCompatible([
> +        Change('foo/foo.mojom',
> +               old="""\
> +               module foo;
> +               [Stable] struct S {};
> +               """,
> +               new="""\
> +               module foo;
> +               """),
> +        Change('bar/bar.mojom',
> +               old="""\
> +               module bar;
> +               import "foo/foo.mojom";
> +               [Stable] struct T { foo.S s; };
> +               """,
> +               new="""\
> +               module bar;
> +               import "foo/foo.mojom";
> +               [Stable, RenamedFrom="foo.S"] struct S {
> +                 [MinVersion=2] int32 x;
> +               };
> +               [Stable] struct T { S s; [MinVersion=1] int32 y; };
> +               """)
> +    ])
> +
> +    self.assertNotBackwardCompatible([
> +        Change('foo/foo.mojom',
> +               old="""\
> +               module foo;
> +               [Stable] struct S {};
> +               """,
> +               new="""\
> +               module foo;
> +               """),
> +        Change('bar/bar.mojom',
> +               old="""\
> +               module bar;
> +               import "foo/foo.mojom";
> +               [Stable] struct T { foo.S s; };
> +               """,
> +               new="""\
> +               module bar;
> +               import "foo/foo.mojom";
> +               [Stable, RenamedFrom="foo.S"] struct S { int32 x; };
> +               [Stable] struct T { S s; [MinVersion=1] int32 y; };
> +               """)
> +    ])
> +
> +  def testWithUnmodifiedImport(self):
> +    """Unchanged files in the filesystem are still parsed by the compatibility
> +    checking tool if they're imported by a changed file."""
> +    self.assertBackwardCompatible([
> +        UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
> +        Change('bar/bar.mojom',
> +               old="""\
> +               module bar;
> +               import "foo/foo.mojom";
> +               [Stable] struct T { foo.S s; };
> +               """,
> +               new="""\
> +               module bar;
> +               import "foo/foo.mojom";
> +               [Stable] struct T { foo.S s; [MinVersion=1] int32 x; };
> +               """)
> +    ])
> +
> +    self.assertNotBackwardCompatible([
> +        UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
> +        Change('bar/bar.mojom',
> +               old="""\
> +               module bar;
> +               import "foo/foo.mojom";
> +               [Stable] struct T { foo.S s; };
> +               """,
> +               new="""\
> +               module bar;
> +               import "foo/foo.mojom";
> +               [Stable] struct T { foo.S s; int32 x; };
> +               """)
> +    ])
> diff --git a/utils/ipc/mojo/public/tools/mojom/const_unittest.py b/utils/ipc/mojo/public/tools/mojom/const_unittest.py
> new file mode 100644
> index 00000000..cb42dfac
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/const_unittest.py
> @@ -0,0 +1,90 @@
> +# Copyright 2020 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +from mojom_parser_test_case import MojomParserTestCase
> +from mojom.generate import module as mojom
> +
> +
> +class ConstTest(MojomParserTestCase):
> +  """Tests constant parsing behavior."""
> +
> +  def testLiteralInt(self):
> +    a_mojom = 'a.mojom'
> +    self.WriteFile(a_mojom, 'const int32 k = 42;')
> +    self.ParseMojoms([a_mojom])
> +    a = self.LoadModule(a_mojom)
> +    self.assertEqual(1, len(a.constants))
> +    self.assertEqual('k', a.constants[0].mojom_name)
> +    self.assertEqual('42', a.constants[0].value)
> +
> +  def testLiteralFloat(self):
> +    a_mojom = 'a.mojom'
> +    self.WriteFile(a_mojom, 'const float k = 42.5;')
> +    self.ParseMojoms([a_mojom])
> +    a = self.LoadModule(a_mojom)
> +    self.assertEqual(1, len(a.constants))
> +    self.assertEqual('k', a.constants[0].mojom_name)
> +    self.assertEqual('42.5', a.constants[0].value)
> +
> +  def testLiteralString(self):
> +    a_mojom = 'a.mojom'
> +    self.WriteFile(a_mojom, 'const string k = "woot";')
> +    self.ParseMojoms([a_mojom])
> +    a = self.LoadModule(a_mojom)
> +    self.assertEqual(1, len(a.constants))
> +    self.assertEqual('k', a.constants[0].mojom_name)
> +    self.assertEqual('"woot"', a.constants[0].value)
> +
> +  def testEnumConstant(self):
> +    a_mojom = 'a.mojom'
> +    self.WriteFile(a_mojom, 'module a; enum E { kA = 41, kB };')
> +    b_mojom = 'b.mojom'
> +    self.WriteFile(
> +        b_mojom, """\
> +      import "a.mojom";
> +      const a.E kE1 = a.E.kB;
> +
> +      // We also allow value names to be unqualified, implying scope from the
> +      // constant's type.
> +      const a.E kE2 = kB;
> +      """)
> +    self.ParseMojoms([a_mojom, b_mojom])
> +    a = self.LoadModule(a_mojom)
> +    b = self.LoadModule(b_mojom)
> +    self.assertEqual(1, len(a.enums))
> +    self.assertEqual('E', a.enums[0].mojom_name)
> +    self.assertEqual(2, len(b.constants))
> +    self.assertEqual('kE1', b.constants[0].mojom_name)
> +    self.assertEqual(a.enums[0], b.constants[0].kind)
> +    self.assertEqual(a.enums[0].fields[1], b.constants[0].value.field)
> +    self.assertEqual(42, b.constants[0].value.field.numeric_value)
> +    self.assertEqual('kE2', b.constants[1].mojom_name)
> +    self.assertEqual(a.enums[0].fields[1], b.constants[1].value.field)
> +    self.assertEqual(42, b.constants[1].value.field.numeric_value)
> +
> +  def testConstantReference(self):
> +    a_mojom = 'a.mojom'
> +    self.WriteFile(a_mojom, 'const int32 kA = 42; const int32 kB = kA;')
> +    self.ParseMojoms([a_mojom])
> +    a = self.LoadModule(a_mojom)
> +    self.assertEqual(2, len(a.constants))
> +    self.assertEqual('kA', a.constants[0].mojom_name)
> +    self.assertEqual('42', a.constants[0].value)
> +    self.assertEqual('kB', a.constants[1].mojom_name)
> +    self.assertEqual('42', a.constants[1].value)
> +
> +  def testImportedConstantReference(self):
> +    a_mojom = 'a.mojom'
> +    self.WriteFile(a_mojom, 'const int32 kA = 42;')
> +    b_mojom = 'b.mojom'
> +    self.WriteFile(b_mojom, 'import "a.mojom"; const int32 kB = kA;')
> +    self.ParseMojoms([a_mojom, b_mojom])
> +    a = self.LoadModule(a_mojom)
> +    b = self.LoadModule(b_mojom)
> +    self.assertEqual(1, len(a.constants))
> +    self.assertEqual(1, len(b.constants))
> +    self.assertEqual('kA', a.constants[0].mojom_name)
> +    self.assertEqual('42', a.constants[0].value)
> +    self.assertEqual('kB', b.constants[0].mojom_name)
> +    self.assertEqual('42', b.constants[0].value)
> diff --git a/utils/ipc/mojo/public/tools/mojom/enum_unittest.py b/utils/ipc/mojo/public/tools/mojom/enum_unittest.py
> new file mode 100644
> index 00000000..d9005078
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/enum_unittest.py
> @@ -0,0 +1,92 @@
> +# Copyright 2020 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +from mojom_parser_test_case import MojomParserTestCase
> +
> +
> +class EnumTest(MojomParserTestCase):
> +  """Tests enum parsing behavior."""
> +
> +  def testExplicitValues(self):
> +    """Verifies basic parsing of assigned integral values."""
> +    types = self.ExtractTypes('enum E { kFoo=0, kBar=2, kBaz };')
> +    self.assertEqual('kFoo', types['E'].fields[0].mojom_name)
> +    self.assertEqual(0, types['E'].fields[0].numeric_value)
> +    self.assertEqual('kBar', types['E'].fields[1].mojom_name)
> +    self.assertEqual(2, types['E'].fields[1].numeric_value)
> +    self.assertEqual('kBaz', types['E'].fields[2].mojom_name)
> +    self.assertEqual(3, types['E'].fields[2].numeric_value)
> +
> +  def testImplicitValues(self):
> +    """Verifies basic automatic assignment of integral values at parse time."""
> +    types = self.ExtractTypes('enum E { kFoo, kBar, kBaz };')
> +    self.assertEqual('kFoo', types['E'].fields[0].mojom_name)
> +    self.assertEqual(0, types['E'].fields[0].numeric_value)
> +    self.assertEqual('kBar', types['E'].fields[1].mojom_name)
> +    self.assertEqual(1, types['E'].fields[1].numeric_value)
> +    self.assertEqual('kBaz', types['E'].fields[2].mojom_name)
> +    self.assertEqual(2, types['E'].fields[2].numeric_value)
> +
> +  def testSameEnumReference(self):
> +    """Verifies that an enum value can be assigned from the value of another
> +    field within the same enum."""
> +    types = self.ExtractTypes('enum E { kA, kB, kFirst=kA };')
> +    self.assertEqual('kA', types['E'].fields[0].mojom_name)
> +    self.assertEqual(0, types['E'].fields[0].numeric_value)
> +    self.assertEqual('kB', types['E'].fields[1].mojom_name)
> +    self.assertEqual(1, types['E'].fields[1].numeric_value)
> +    self.assertEqual('kFirst', types['E'].fields[2].mojom_name)
> +    self.assertEqual(0, types['E'].fields[2].numeric_value)
> +
> +  def testSameModuleOtherEnumReference(self):
> +    """Verifies that an enum value can be assigned from the value of a field
> +    in another enum within the same module."""
> +    types = self.ExtractTypes('enum E { kA, kB }; enum F { kA = E.kB };')
> +    self.assertEqual(1, types['F'].fields[0].numeric_value)
> +
> +  def testImportedEnumReference(self):
> +    """Verifies that an enum value can be assigned from the value of a field
> +    in another enum within a different module."""
> +    a_mojom = 'a.mojom'
> +    self.WriteFile(a_mojom, 'module a; enum E { kFoo=42, kBar };')
> +    b_mojom = 'b.mojom'
> +    self.WriteFile(b_mojom,
> +                   'module b; import "a.mojom"; enum F { kFoo = a.E.kBar };')
> +    self.ParseMojoms([a_mojom, b_mojom])
> +    b = self.LoadModule(b_mojom)
> +
> +    self.assertEqual('F', b.enums[0].mojom_name)
> +    self.assertEqual('kFoo', b.enums[0].fields[0].mojom_name)
> +    self.assertEqual(43, b.enums[0].fields[0].numeric_value)
> +
> +  def testConstantReference(self):
> +    """Verifies that an enum value can be assigned from the value of an
> +    integral constant within the same module."""
> +    types = self.ExtractTypes('const int32 kFoo = 42; enum E { kA = kFoo };')
> +    self.assertEqual(42, types['E'].fields[0].numeric_value)
> +
> +  def testInvalidConstantReference(self):
> +    """Verifies that enum values cannot be assigned from the value of
> +    non-integral constants."""
> +    with self.assertRaisesRegexp(ValueError, 'not an integer'):
> +      self.ExtractTypes('const float kFoo = 1.0; enum E { kA = kFoo };')
> +    with self.assertRaisesRegexp(ValueError, 'not an integer'):
> +      self.ExtractTypes('const double kFoo = 1.0; enum E { kA = kFoo };')
> +    with self.assertRaisesRegexp(ValueError, 'not an integer'):
> +      self.ExtractTypes('const string kFoo = "lol"; enum E { kA = kFoo };')
> +
> +  def testImportedConstantReference(self):
> +    """Verifies that an enum value can be assigned from the value of an integral
> +    constant within an imported module."""
> +    a_mojom = 'a.mojom'
> +    self.WriteFile(a_mojom, 'module a; const int32 kFoo = 37;')
> +    b_mojom = 'b.mojom'
> +    self.WriteFile(b_mojom,
> +                   'module b; import "a.mojom"; enum F { kFoo = a.kFoo };')
> +    self.ParseMojoms([a_mojom, b_mojom])
> +    b = self.LoadModule(b_mojom)
> +
> +    self.assertEqual('F', b.enums[0].mojom_name)
> +    self.assertEqual('kFoo', b.enums[0].fields[0].mojom_name)
> +    self.assertEqual(37, b.enums[0].fields[0].numeric_value)
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/BUILD.gn b/utils/ipc/mojo/public/tools/mojom/mojom/BUILD.gn
> new file mode 100644
> index 00000000..7416ef19
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom/BUILD.gn
> @@ -0,0 +1,43 @@
> +# Copyright 2020 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +group("mojom") {
> +  data = [
> +    "__init__.py",
> +    "error.py",
> +    "fileutil.py",
> +    "generate/__init__.py",
> +    "generate/constant_resolver.py",
> +    "generate/generator.py",
> +    "generate/module.py",
> +    "generate/pack.py",
> +    "generate/template_expander.py",
> +    "generate/translate.py",
> +    "parse/__init__.py",
> +    "parse/ast.py",
> +    "parse/conditional_features.py",
> +    "parse/lexer.py",
> +    "parse/parser.py",
> +
> +    # Third-party module dependencies
> +    "//third_party/jinja2/",
> +    "//third_party/ply/",
> +  ]
> +}
> +
> +group("tests") {
> +  data = [
> +    "fileutil_unittest.py",
> +    "generate/generator_unittest.py",
> +    "generate/module_unittest.py",
> +    "generate/pack_unittest.py",
> +    "generate/translate_unittest.py",
> +    "parse/ast_unittest.py",
> +    "parse/conditional_features_unittest.py",
> +    "parse/lexer_unittest.py",
> +    "parse/parser_unittest.py",
> +  ]
> +
> +  public_deps = [ ":mojom" ]
> +}
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/__init__.py b/utils/ipc/mojo/public/tools/mojom/mojom/__init__.py
> new file mode 100644
> index 00000000..e69de29b
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/error.py b/utils/ipc/mojo/public/tools/mojom/mojom/error.py
> new file mode 100644
> index 00000000..8a1e03da
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom/error.py
> @@ -0,0 +1,28 @@
> +# Copyright 2014 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +
> +class Error(Exception):
> +  """Base class for Mojo IDL bindings parser/generator errors."""
> +
> +  def __init__(self, filename, message, lineno=None, addenda=None, **kwargs):
> +    """|filename| is the (primary) file which caused the error, |message| is the
> +    error message, |lineno| is the 1-based line number (or |None| if not
> +    applicable/available), and |addenda| is a list of additional lines to append
> +    to the final error message."""
> +    Exception.__init__(self, **kwargs)
> +    self.filename = filename
> +    self.message = message
> +    self.lineno = lineno
> +    self.addenda = addenda
> +
> +  def __str__(self):
> +    if self.lineno:
> +      s = "%s:%d: Error: %s" % (self.filename, self.lineno, self.message)
> +    else:
> +      s = "%s: Error: %s" % (self.filename, self.message)
> +    return "\n".join([s] + self.addenda) if self.addenda else s
> +
> +  def __repr__(self):
> +    return str(self)
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/fileutil.py b/utils/ipc/mojo/public/tools/mojom/mojom/fileutil.py
> new file mode 100644
> index 00000000..bf626f54
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom/fileutil.py
> @@ -0,0 +1,45 @@
> +# Copyright 2015 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +import errno
> +import imp
> +import os.path
> +import sys
> +
> +
> +def _GetDirAbove(dirname):
> +  """Returns the directory "above" this file containing |dirname| (which must
> +  also be "above" this file)."""
> +  path = os.path.abspath(__file__)
> +  while True:
> +    path, tail = os.path.split(path)
> +    if not tail:
> +      return None
> +    if tail == dirname:
> +      return path
> +
> +
> +def EnsureDirectoryExists(path, always_try_to_create=False):
> +  """A wrapper for os.makedirs that does not error if the directory already
> +  exists. A different process could be racing to create this directory."""
> +
> +  if not os.path.exists(path) or always_try_to_create:
> +    try:
> +      os.makedirs(path)
> +    except OSError as e:
> +      # There may have been a race to create this directory.
> +      if e.errno != errno.EEXIST:
> +        raise
> +
> +
> +def AddLocalRepoThirdPartyDirToModulePath():
> +  """Helper function to find the top-level directory of this script's repository
> +  assuming the script falls somewhere within a 'mojo' directory, and insert the
> +  top-level 'third_party' directory early in the module search path. Used to
> +  ensure that third-party dependencies provided within the repository itself
> +  (e.g. Chromium sources include snapshots of jinja2 and ply) are preferred over
> +  locally installed system library packages."""
> +  toplevel_dir = _GetDirAbove('mojo')
> +  if toplevel_dir:
> +    sys.path.insert(1, os.path.join(toplevel_dir, 'third_party'))
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/fileutil_unittest.py b/utils/ipc/mojo/public/tools/mojom/mojom/fileutil_unittest.py
> new file mode 100644
> index 00000000..ff5753a2
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom/fileutil_unittest.py
> @@ -0,0 +1,40 @@
> +# Copyright 2015 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +import imp
> +import os.path
> +import shutil
> +import sys
> +import tempfile
> +import unittest
> +
> +from mojom import fileutil
> +
> +
> +class FileUtilTest(unittest.TestCase):
> +  def testEnsureDirectoryExists(self):
> +    """Test that EnsureDirectoryExists fuctions correctly."""
> +
> +    temp_dir = tempfile.mkdtemp()
> +    try:
> +      self.assertTrue(os.path.exists(temp_dir))
> +
> +      # Directory does not exist, yet.
> +      full = os.path.join(temp_dir, "foo", "bar")
> +      self.assertFalse(os.path.exists(full))
> +
> +      # Create the directory.
> +      fileutil.EnsureDirectoryExists(full)
> +      self.assertTrue(os.path.exists(full))
> +
> +      # Trying to create it again does not cause an error.
> +      fileutil.EnsureDirectoryExists(full)
> +      self.assertTrue(os.path.exists(full))
> +
> +      # Bypass check for directory existence to tickle error handling that
> +      # occurs in response to a race.
> +      fileutil.EnsureDirectoryExists(full, always_try_to_create=True)
> +      self.assertTrue(os.path.exists(full))
> +    finally:
> +      shutil.rmtree(temp_dir)
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/__init__.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/__init__.py
> new file mode 100644
> index 00000000..e69de29b
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/constant_resolver.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/constant_resolver.py
> new file mode 100644
> index 00000000..0dfd996e
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom/generate/constant_resolver.py
> @@ -0,0 +1,93 @@
> +# Copyright 2015 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +"""Resolves the values used for constants and enums."""
> +
> +from itertools import ifilter
> +
> +from mojom.generate import module as mojom
> +
> +
> +def ResolveConstants(module, expression_to_text):
> +  in_progress = set()
> +  computed = set()
> +
> +  def GetResolvedValue(named_value):
> +    assert isinstance(named_value, (mojom.EnumValue, mojom.ConstantValue))
> +    if isinstance(named_value, mojom.EnumValue):
> +      field = next(
> +          ifilter(lambda field: field.name == named_value.name,
> +                  named_value.enum.fields), None)
> +      if not field:
> +        raise RuntimeError(
> +            'Unable to get computed value for field %s of enum %s' %
> +            (named_value.name, named_value.enum.name))
> +      if field not in computed:
> +        ResolveEnum(named_value.enum)
> +      return field.resolved_value
> +    else:
> +      ResolveConstant(named_value.constant)
> +      named_value.resolved_value = named_value.constant.resolved_value
> +      return named_value.resolved_value
> +
> +  def ResolveConstant(constant):
> +    if constant in computed:
> +      return
> +    if constant in in_progress:
> +      raise RuntimeError('Circular dependency for constant: %s' % constant.name)
> +    in_progress.add(constant)
> +    if isinstance(constant.value, (mojom.EnumValue, mojom.ConstantValue)):
> +      resolved_value = GetResolvedValue(constant.value)
> +    else:
> +      resolved_value = expression_to_text(constant.value)
> +    constant.resolved_value = resolved_value
> +    in_progress.remove(constant)
> +    computed.add(constant)
> +
> +  def ResolveEnum(enum):
> +    def ResolveEnumField(enum, field, default_value):
> +      if field in computed:
> +        return
> +      if field in in_progress:
> +        raise RuntimeError('Circular dependency for enum: %s' % enum.name)
> +      in_progress.add(field)
> +      if field.value:
> +        if isinstance(field.value, mojom.EnumValue):
> +          resolved_value = GetResolvedValue(field.value)
> +        elif isinstance(field.value, str):
> +          resolved_value = int(field.value, 0)
> +        else:
> +          raise RuntimeError('Unexpected value: %s' % field.value)
> +      else:
> +        resolved_value = default_value
> +      field.resolved_value = resolved_value
> +      in_progress.remove(field)
> +      computed.add(field)
> +
> +    current_value = 0
> +    for field in enum.fields:
> +      ResolveEnumField(enum, field, current_value)
> +      current_value = field.resolved_value + 1
> +
> +  for constant in module.constants:
> +    ResolveConstant(constant)
> +
> +  for enum in module.enums:
> +    ResolveEnum(enum)
> +
> +  for struct in module.structs:
> +    for constant in struct.constants:
> +      ResolveConstant(constant)
> +    for enum in struct.enums:
> +      ResolveEnum(enum)
> +    for field in struct.fields:
> +      if isinstance(field.default, (mojom.ConstantValue, mojom.EnumValue)):
> +        field.default.resolved_value = GetResolvedValue(field.default)
> +
> +  for interface in module.interfaces:
> +    for constant in interface.constants:
> +      ResolveConstant(constant)
> +    for enum in interface.enums:
> +      ResolveEnum(enum)
> +
> +  return module
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/generator.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/generator.py
> new file mode 100644
> index 00000000..de62260a
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom/generate/generator.py
> @@ -0,0 +1,325 @@
> +# Copyright 2013 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +"""Code shared by the various language-specific code generators."""
> +
> +from __future__ import print_function
> +
> +from functools import partial
> +import os.path
> +import re
> +
> +from mojom import fileutil
> +from mojom.generate import module as mojom
> +from mojom.generate import pack
> +
> +
> +def ExpectedArraySize(kind):
> +  if mojom.IsArrayKind(kind):
> +    return kind.length
> +  return None
> +
> +
> +def SplitCamelCase(identifier):
> +  """Splits a camel-cased |identifier| and returns a list of lower-cased
> +  strings.
> +  """
> +  # Add underscores after uppercase letters when appropriate. An uppercase
> +  # letter is considered the end of a word if it is followed by an upper and a
> +  # lower. E.g. URLLoaderFactory -> URL_LoaderFactory
> +  identifier = re.sub('([A-Z][0-9]*)(?=[A-Z][0-9]*[a-z])', r'\1_', identifier)
> +  # Add underscores after lowercase letters when appropriate. A lowercase letter
> +  # is considered the end of a word if it is followed by an upper.
> +  # E.g. URLLoaderFactory -> URLLoader_Factory
> +  identifier = re.sub('([a-z][0-9]*)(?=[A-Z])', r'\1_', identifier)
> +  return [x.lower() for x in identifier.split('_')]
> +
> +
> +def ToCamel(identifier, lower_initial=False, digits_split=False, delimiter='_'):
> +  """Splits |identifier| using |delimiter|, makes the first character of each
> +  word uppercased (but makes the first character of the first word lowercased
> +  if |lower_initial| is set to True), and joins the words. Please note that for
> +  each word, all the characters except the first one are untouched.
> +  """
> +  result = ''
> +  capitalize_next = True
> +  for i in range(len(identifier)):
> +    if identifier[i] == delimiter:
> +      capitalize_next = True
> +    elif digits_split and identifier[i].isdigit():
> +      capitalize_next = True
> +      result += identifier[i]
> +    elif capitalize_next:
> +      capitalize_next = False
> +      result += identifier[i].upper()
> +    else:
> +      result += identifier[i]
> +
> +  if lower_initial and result:
> +    result = result[0].lower() + result[1:]
> +
> +  return result
> +
> +
> +def _ToSnakeCase(identifier, upper=False):
> +  """Splits camel-cased |identifier| into lower case words, removes the first
> +  word if it's "k" and joins them using "_" e.g. for "URLLoaderFactory", returns
> +  "URL_LOADER_FACTORY" if upper, otherwise "url_loader_factory".
> +  """
> +  words = SplitCamelCase(identifier)
> +  if words[0] == 'k' and len(words) > 1:
> +    words = words[1:]
> +
> +  # Variables cannot start with a digit
> +  if (words[0][0].isdigit()):
> +    words[0] = '_' + words[0]
> +
> +
> +  if upper:
> +    words = map(lambda x: x.upper(), words)
> +
> +  return '_'.join(words)
> +
> +
> +def ToUpperSnakeCase(identifier):
> +  """Splits camel-cased |identifier| into lower case words, removes the first
> +  word if it's "k" and joins them using "_" e.g. for "URLLoaderFactory", returns
> +  "URL_LOADER_FACTORY".
> +  """
> +  return _ToSnakeCase(identifier, upper=True)
> +
> +
> +def ToLowerSnakeCase(identifier):
> +  """Splits camel-cased |identifier| into lower case words, removes the first
> +  word if it's "k" and joins them using "_" e.g. for "URLLoaderFactory", returns
> +  "url_loader_factory".
> +  """
> +  return _ToSnakeCase(identifier, upper=False)
> +
> +
> +class Stylizer(object):
> +  """Stylizers specify naming rules to map mojom names to names in generated
> +  code. For example, if you would like method_name in mojom to be mapped to
> +  MethodName in the generated code, you need to define a subclass of Stylizer
> +  and override StylizeMethod to do the conversion."""
> +
> +  def StylizeConstant(self, mojom_name):
> +    return mojom_name
> +
> +  def StylizeField(self, mojom_name):
> +    return mojom_name
> +
> +  def StylizeStruct(self, mojom_name):
> +    return mojom_name
> +
> +  def StylizeUnion(self, mojom_name):
> +    return mojom_name
> +
> +  def StylizeParameter(self, mojom_name):
> +    return mojom_name
> +
> +  def StylizeMethod(self, mojom_name):
> +    return mojom_name
> +
> +  def StylizeInterface(self, mojom_name):
> +    return mojom_name
> +
> +  def StylizeEnumField(self, mojom_name):
> +    return mojom_name
> +
> +  def StylizeEnum(self, mojom_name):
> +    return mojom_name
> +
> +  def StylizeModule(self, mojom_namespace):
> +    return mojom_namespace
> +
> +
> +def WriteFile(contents, full_path):
> +  # If |contents| is same with the file content, we skip updating.
> +  if os.path.isfile(full_path):
> +    with open(full_path, 'rb') as destination_file:
> +      if destination_file.read() == contents:
> +        return
> +
> +  # Make sure the containing directory exists.
> +  full_dir = os.path.dirname(full_path)
> +  fileutil.EnsureDirectoryExists(full_dir)
> +
> +  # Dump the data to disk.
> +  with open(full_path, "wb") as f:
> +    if not isinstance(contents, bytes):
> +      f.write(contents.encode('utf-8'))
> +    else:
> +      f.write(contents)
> +
> +
> +def AddComputedData(module):
> +  """Adds computed data to the given module. The data is computed once and
> +  used repeatedly in the generation process."""
> +
> +  def _AddStructComputedData(exported, struct):
> +    struct.packed = pack.PackedStruct(struct)
> +    struct.bytes = pack.GetByteLayout(struct.packed)
> +    struct.versions = pack.GetVersionInfo(struct.packed)
> +    struct.exported = exported
> +
> +  def _AddInterfaceComputedData(interface):
> +    interface.version = 0
> +    for method in interface.methods:
> +      # this field is never scrambled
> +      method.sequential_ordinal = method.ordinal
> +
> +      if method.min_version is not None:
> +        interface.version = max(interface.version, method.min_version)
> +
> +      method.param_struct = _GetStructFromMethod(method)
> +      if interface.stable:
> +        method.param_struct.attributes[mojom.ATTRIBUTE_STABLE] = True
> +        if method.explicit_ordinal is None:
> +          raise Exception(
> +              'Stable interfaces must declare explicit method ordinals. The '
> +              'method %s on stable interface %s does not declare an explicit '
> +              'ordinal.' % (method.mojom_name, interface.qualified_name))
> +      interface.version = max(interface.version,
> +                              method.param_struct.versions[-1].version)
> +
> +      if method.response_parameters is not None:
> +        method.response_param_struct = _GetResponseStructFromMethod(method)
> +        if interface.stable:
> +          method.response_param_struct.attributes[mojom.ATTRIBUTE_STABLE] = True
> +        interface.version = max(
> +            interface.version,
> +            method.response_param_struct.versions[-1].version)
> +      else:
> +        method.response_param_struct = None
> +
> +  def _GetStructFromMethod(method):
> +    """Converts a method's parameters into the fields of a struct."""
> +    params_class = "%s_%s_Params" % (method.interface.mojom_name,
> +                                     method.mojom_name)
> +    struct = mojom.Struct(params_class,
> +                          module=method.interface.module,
> +                          attributes={})
> +    for param in method.parameters:
> +      struct.AddField(
> +          param.mojom_name,
> +          param.kind,
> +          param.ordinal,
> +          attributes=param.attributes)
> +    _AddStructComputedData(False, struct)
> +    return struct
> +
> +  def _GetResponseStructFromMethod(method):
> +    """Converts a method's response_parameters into the fields of a struct."""
> +    params_class = "%s_%s_ResponseParams" % (method.interface.mojom_name,
> +                                             method.mojom_name)
> +    struct = mojom.Struct(params_class,
> +                          module=method.interface.module,
> +                          attributes={})
> +    for param in method.response_parameters:
> +      struct.AddField(
> +          param.mojom_name,
> +          param.kind,
> +          param.ordinal,
> +          attributes=param.attributes)
> +    _AddStructComputedData(False, struct)
> +    return struct
> +
> +  for struct in module.structs:
> +    _AddStructComputedData(True, struct)
> +  for interface in module.interfaces:
> +    _AddInterfaceComputedData(interface)
> +
> +
> +class Generator(object):
> +  # Pass |output_dir| to emit files to disk. Omit |output_dir| to echo all
> +  # files to stdout.
> +  def __init__(self,
> +               module,
> +               output_dir=None,
> +               typemap=None,
> +               variant=None,
> +               bytecode_path=None,
> +               for_blink=False,
> +               js_bindings_mode="new",
> +               js_generate_struct_deserializers=False,
> +               export_attribute=None,
> +               export_header=None,
> +               generate_non_variant_code=False,
> +               support_lazy_serialization=False,
> +               disallow_native_types=False,
> +               disallow_interfaces=False,
> +               generate_message_ids=False,
> +               generate_fuzzing=False,
> +               enable_kythe_annotations=False,
> +               extra_cpp_template_paths=None,
> +               generate_extra_cpp_only=False):
> +    self.module = module
> +    self.output_dir = output_dir
> +    self.typemap = typemap or {}
> +    self.variant = variant
> +    self.bytecode_path = bytecode_path
> +    self.for_blink = for_blink
> +    self.js_bindings_mode = js_bindings_mode
> +    self.js_generate_struct_deserializers = js_generate_struct_deserializers
> +    self.export_attribute = export_attribute
> +    self.export_header = export_header
> +    self.generate_non_variant_code = generate_non_variant_code
> +    self.support_lazy_serialization = support_lazy_serialization
> +    self.disallow_native_types = disallow_native_types
> +    self.disallow_interfaces = disallow_interfaces
> +    self.generate_message_ids = generate_message_ids
> +    self.generate_fuzzing = generate_fuzzing
> +    self.enable_kythe_annotations = enable_kythe_annotations
> +    self.extra_cpp_template_paths = extra_cpp_template_paths
> +    self.generate_extra_cpp_only = generate_extra_cpp_only
> +
> +  def Write(self, contents, filename):
> +    if self.output_dir is None:
> +      print(contents)
> +      return
> +    full_path = os.path.join(self.output_dir, filename)
> +    WriteFile(contents, full_path)
> +
> +  def OptimizeEmpty(self, contents):
> +    # Look for .cc files that contain no actual code. There are many of these
> +    # and they collectively take a while to compile.
> +    lines = contents.splitlines()
> +
> +    for line in lines:
> +      if line.startswith('#') or line.startswith('//'):
> +        continue
> +      if re.match(r'namespace .* {', line) or re.match(r'}.*//.*namespace',
> +                                                       line):
> +        continue
> +      if line.strip():
> +        # There is some actual code - return the unmodified contents.
> +        return contents
> +
> +    # If we reach here then we have a .cc file with no actual code. The
> +    # includes are therefore unneeded and can be removed.
> +    new_lines = [line for line in lines if not line.startswith('#include')]
> +    if len(new_lines) < len(lines):
> +      new_lines.append('')
> +      new_lines.append('// Includes removed due to no code being generated.')
> +    return '\n'.join(new_lines)
> +
> +  def WriteWithComment(self, contents, filename):
> +    generator_name = "mojom_bindings_generator.py"
> +    comment = r"// %s is auto generated by %s, do not edit" % (filename,
> +                                                               generator_name)
> +    contents = comment + '\n' + '\n' + contents;
> +    if filename.endswith('.cc'):
> +      contents = self.OptimizeEmpty(contents)
> +    self.Write(contents, filename)
> +
> +  def GenerateFiles(self, args):
> +    raise NotImplementedError("Subclasses must override/implement this method")
> +
> +  def GetJinjaParameters(self):
> +    """Returns default constructor parameters for the jinja environment."""
> +    return {}
> +
> +  def GetGlobals(self):
> +    """Returns global mappings for the template generation."""
> +    return {}
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/generator_unittest.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/generator_unittest.py
> new file mode 100644
> index 00000000..32c884a8
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom/generate/generator_unittest.py
> @@ -0,0 +1,74 @@
> +# Copyright 2014 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +import imp
> +import os.path
> +import sys
> +import unittest
> +
> +
> +def _GetDirAbove(dirname):
> +  """Returns the directory "above" this file containing |dirname| (which must
> +  also be "above" this file)."""
> +  path = os.path.abspath(__file__)
> +  while True:
> +    path, tail = os.path.split(path)
> +    assert tail
> +    if tail == dirname:
> +      return path
> +
> +
> +try:
> +  imp.find_module("mojom")
> +except ImportError:
> +  sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib"))
> +from mojom.generate import generator
> +
> +
> +class StringManipulationTest(unittest.TestCase):
> +  """generator contains some string utilities, this tests only those."""
> +
> +  def testSplitCamelCase(self):
> +    self.assertEquals(["camel", "case"], generator.SplitCamelCase("CamelCase"))
> +    self.assertEquals(["url", "loader", "factory"],
> +                      generator.SplitCamelCase('URLLoaderFactory'))
> +    self.assertEquals(["get99", "entries"],
> +                      generator.SplitCamelCase('Get99Entries'))
> +    self.assertEquals(["get99entries"],
> +                      generator.SplitCamelCase('Get99entries'))
> +
> +  def testToCamel(self):
> +    self.assertEquals("CamelCase", generator.ToCamel("camel_case"))
> +    self.assertEquals("CAMELCASE", generator.ToCamel("CAMEL_CASE"))
> +    self.assertEquals("camelCase",
> +                      generator.ToCamel("camel_case", lower_initial=True))
> +    self.assertEquals("CamelCase", generator.ToCamel(
> +        "camel case", delimiter=' '))
> +    self.assertEquals("CaMelCaSe", generator.ToCamel("caMel_caSe"))
> +    self.assertEquals("L2Tp", generator.ToCamel("l2tp", digits_split=True))
> +    self.assertEquals("l2tp", generator.ToCamel("l2tp", lower_initial=True))
> +
> +  def testToSnakeCase(self):
> +    self.assertEquals("snake_case", generator.ToLowerSnakeCase("SnakeCase"))
> +    self.assertEquals("snake_case", generator.ToLowerSnakeCase("snakeCase"))
> +    self.assertEquals("snake_case", generator.ToLowerSnakeCase("SnakeCASE"))
> +    self.assertEquals("snake_d3d11_case",
> +                      generator.ToLowerSnakeCase("SnakeD3D11Case"))
> +    self.assertEquals("snake_d3d11_case",
> +                      generator.ToLowerSnakeCase("SnakeD3d11Case"))
> +    self.assertEquals("snake_d3d11_case",
> +                      generator.ToLowerSnakeCase("snakeD3d11Case"))
> +    self.assertEquals("SNAKE_CASE", generator.ToUpperSnakeCase("SnakeCase"))
> +    self.assertEquals("SNAKE_CASE", generator.ToUpperSnakeCase("snakeCase"))
> +    self.assertEquals("SNAKE_CASE", generator.ToUpperSnakeCase("SnakeCASE"))
> +    self.assertEquals("SNAKE_D3D11_CASE",
> +                      generator.ToUpperSnakeCase("SnakeD3D11Case"))
> +    self.assertEquals("SNAKE_D3D11_CASE",
> +                      generator.ToUpperSnakeCase("SnakeD3d11Case"))
> +    self.assertEquals("SNAKE_D3D11_CASE",
> +                      generator.ToUpperSnakeCase("snakeD3d11Case"))
> +
> +
> +if __name__ == "__main__":
> +  unittest.main()
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/module.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/module.py
> new file mode 100644
> index 00000000..8547ff64
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom/generate/module.py
> @@ -0,0 +1,1635 @@
> +# Copyright 2013 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +# This module's classes provide an interface to mojo modules. Modules are
> +# collections of interfaces and structs to be used by mojo ipc clients and
> +# servers.
> +#
> +# A simple interface would be created this way:
> +# module = mojom.generate.module.Module('Foo')
> +# interface = module.AddInterface('Bar')
> +# method = interface.AddMethod('Tat', 0)
> +# method.AddParameter('baz', 0, mojom.INT32)
> +
> +import pickle
> +
> +# We use our own version of __repr__ when displaying the AST, as the
> +# AST currently doesn't capture which nodes are reference (e.g. to
> +# types) and which nodes are definitions. This allows us to e.g. print
> +# the definition of a struct when it's defined inside a module, but
> +# only print its name when it's referenced in e.g. a method parameter.
> +def Repr(obj, as_ref=True):
> +  """A version of __repr__ that can distinguish references.
> +
> +  Sometimes we like to print an object's full representation
> +  (e.g. with its fields) and sometimes we just want to reference an
> +  object that was printed in full elsewhere. This function allows us
> +  to make that distinction.
> +
> +  Args:
> +    obj: The object whose string representation we compute.
> +    as_ref: If True, use the short reference representation.
> +
> +  Returns:
> +    A str representation of |obj|.
> +  """
> +  if hasattr(obj, 'Repr'):
> +    return obj.Repr(as_ref=as_ref)
> +  # Since we cannot implement Repr for existing container types, we
> +  # handle them here.
> +  elif isinstance(obj, list):
> +    if not obj:
> +      return '[]'
> +    else:
> +      return ('[\n%s\n]' % (',\n'.join(
> +          '    %s' % Repr(elem, as_ref).replace('\n', '\n    ')
> +          for elem in obj)))
> +  elif isinstance(obj, dict):
> +    if not obj:
> +      return '{}'
> +    else:
> +      return ('{\n%s\n}' % (',\n'.join(
> +          '    %s: %s' % (Repr(key, as_ref).replace('\n', '\n    '),
> +                          Repr(val, as_ref).replace('\n', '\n    '))
> +          for key, val in obj.items())))
> +  else:
> +    return repr(obj)
> +
> +
> +def GenericRepr(obj, names):
> +  """Compute generic Repr for |obj| based on the attributes in |names|.
> +
> +  Args:
> +    obj: The object to compute a Repr for.
> +    names: A dict from attribute names to include, to booleans
> +        specifying whether those attributes should be shown as
> +        references or not.
> +
> +  Returns:
> +    A str representation of |obj|.
> +  """
> +
> +  def ReprIndent(name, as_ref):
> +    return '    %s=%s' % (name, Repr(getattr(obj, name), as_ref).replace(
> +        '\n', '\n    '))
> +
> +  return '%s(\n%s\n)' % (obj.__class__.__name__, ',\n'.join(
> +      ReprIndent(name, as_ref) for (name, as_ref) in names.items()))
> +
> +
> +class Kind(object):
> +  """Kind represents a type (e.g. int8, string).
> +
> +  Attributes:
> +    spec: A string uniquely identifying the type. May be None.
> +    module: {Module} The defining module. Set to None for built-in types.
> +    parent_kind: The enclosing type. For example, an enum defined
> +        inside an interface has that interface as its parent. May be None.
> +  """
> +
> +  def __init__(self, spec=None, module=None):
> +    self.spec = spec
> +    self.module = module
> +    self.parent_kind = None
> +
> +  def Repr(self, as_ref=True):
> +    # pylint: disable=unused-argument
> +    return '<%s spec=%r>' % (self.__class__.__name__, self.spec)
> +
> +  def __repr__(self):
> +    # Gives us a decent __repr__ for all kinds.
> +    return self.Repr()
> +
> +  def __eq__(self, rhs):
> +    # pylint: disable=unidiomatic-typecheck
> +    return (type(self) == type(rhs)
> +            and (self.spec, self.parent_kind) == (rhs.spec, rhs.parent_kind))
> +
> +  def __hash__(self):
> +    # TODO(crbug.com/1060471): Remove this and other __hash__ methods on Kind
> +    # and its subclasses. This is to support existing generator code which uses
> +    # some primitive Kinds as dict keys. The default hash (object identity)
> +    # breaks these dicts when a pickled Module instance is unpickled and used
> +    # during a subsequent run of the parser.
> +    return hash((self.spec, self.parent_kind))
> +
> +
> +class ReferenceKind(Kind):
> +  """ReferenceKind represents pointer and handle types.
> +
> +  A type is nullable if null (for pointer types) or invalid handle (for handle
> +  types) is a legal value for the type.
> +
> +  Attributes:
> +    is_nullable: True if the type is nullable.
> +  """
> +
> +  def __init__(self, spec=None, is_nullable=False, module=None):
> +    assert spec is None or is_nullable == spec.startswith('?')
> +    Kind.__init__(self, spec, module)
> +    self.is_nullable = is_nullable
> +    self.shared_definition = {}
> +
> +  def Repr(self, as_ref=True):
> +    return '<%s spec=%r is_nullable=%r>' % (self.__class__.__name__, self.spec,
> +                                            self.is_nullable)
> +
> +  def MakeNullableKind(self):
> +    assert not self.is_nullable
> +
> +    if self == STRING:
> +      return NULLABLE_STRING
> +    if self == HANDLE:
> +      return NULLABLE_HANDLE
> +    if self == DCPIPE:
> +      return NULLABLE_DCPIPE
> +    if self == DPPIPE:
> +      return NULLABLE_DPPIPE
> +    if self == MSGPIPE:
> +      return NULLABLE_MSGPIPE
> +    if self == SHAREDBUFFER:
> +      return NULLABLE_SHAREDBUFFER
> +    if self == PLATFORMHANDLE:
> +      return NULLABLE_PLATFORMHANDLE
> +
> +    nullable_kind = type(self)()
> +    nullable_kind.shared_definition = self.shared_definition
> +    if self.spec is not None:
> +      nullable_kind.spec = '?' + self.spec
> +    nullable_kind.is_nullable = True
> +    nullable_kind.parent_kind = self.parent_kind
> +    nullable_kind.module = self.module
> +
> +    return nullable_kind
> +
> +  @classmethod
> +  def AddSharedProperty(cls, name):
> +    """Adds a property |name| to |cls|, which accesses the corresponding item in
> +       |shared_definition|.
> +
> +       The reason of adding such indirection is to enable sharing definition
> +       between a reference kind and its nullable variation. For example:
> +         a = Struct('test_struct_1')
> +         b = a.MakeNullableKind()
> +         a.name = 'test_struct_2'
> +         print(b.name)  # Outputs 'test_struct_2'.
> +    """
> +
> +    def Get(self):
> +      try:
> +        return self.shared_definition[name]
> +      except KeyError:  # Must raise AttributeError if property doesn't exist.
> +        raise AttributeError
> +
> +    def Set(self, value):
> +      self.shared_definition[name] = value
> +
> +    setattr(cls, name, property(Get, Set))
> +
> +  def __eq__(self, rhs):
> +    return (isinstance(rhs, ReferenceKind)
> +            and super(ReferenceKind, self).__eq__(rhs)
> +            and self.is_nullable == rhs.is_nullable)
> +
> +  def __hash__(self):
> +    return hash((super(ReferenceKind, self).__hash__(), self.is_nullable))
> +
> +
> +# Initialize the set of primitive types. These can be accessed by clients.
> +BOOL = Kind('b')
> +INT8 = Kind('i8')
> +INT16 = Kind('i16')
> +INT32 = Kind('i32')
> +INT64 = Kind('i64')
> +UINT8 = Kind('u8')
> +UINT16 = Kind('u16')
> +UINT32 = Kind('u32')
> +UINT64 = Kind('u64')
> +FLOAT = Kind('f')
> +DOUBLE = Kind('d')
> +STRING = ReferenceKind('s')
> +HANDLE = ReferenceKind('h')
> +DCPIPE = ReferenceKind('h:d:c')
> +DPPIPE = ReferenceKind('h:d:p')
> +MSGPIPE = ReferenceKind('h:m')
> +SHAREDBUFFER = ReferenceKind('h:s')
> +PLATFORMHANDLE = ReferenceKind('h:p')
> +NULLABLE_STRING = ReferenceKind('?s', True)
> +NULLABLE_HANDLE = ReferenceKind('?h', True)
> +NULLABLE_DCPIPE = ReferenceKind('?h:d:c', True)
> +NULLABLE_DPPIPE = ReferenceKind('?h:d:p', True)
> +NULLABLE_MSGPIPE = ReferenceKind('?h:m', True)
> +NULLABLE_SHAREDBUFFER = ReferenceKind('?h:s', True)
> +NULLABLE_PLATFORMHANDLE = ReferenceKind('?h:p', True)
> +
> +# Collection of all Primitive types
> +PRIMITIVES = (
> +    BOOL,
> +    INT8,
> +    INT16,
> +    INT32,
> +    INT64,
> +    UINT8,
> +    UINT16,
> +    UINT32,
> +    UINT64,
> +    FLOAT,
> +    DOUBLE,
> +    STRING,
> +    HANDLE,
> +    DCPIPE,
> +    DPPIPE,
> +    MSGPIPE,
> +    SHAREDBUFFER,
> +    PLATFORMHANDLE,
> +    NULLABLE_STRING,
> +    NULLABLE_HANDLE,
> +    NULLABLE_DCPIPE,
> +    NULLABLE_DPPIPE,
> +    NULLABLE_MSGPIPE,
> +    NULLABLE_SHAREDBUFFER,
> +    NULLABLE_PLATFORMHANDLE,
> +)
> +
> +ATTRIBUTE_MIN_VERSION = 'MinVersion'
> +ATTRIBUTE_EXTENSIBLE = 'Extensible'
> +ATTRIBUTE_STABLE = 'Stable'
> +ATTRIBUTE_SYNC = 'Sync'
> +
> +
> +class NamedValue(object):
> +  def __init__(self, module, parent_kind, mojom_name):
> +    self.module = module
> +    self.parent_kind = parent_kind
> +    self.mojom_name = mojom_name
> +
> +  def GetSpec(self):
> +    return (self.module.GetNamespacePrefix() +
> +            (self.parent_kind and
> +             (self.parent_kind.mojom_name + '.') or "") + self.mojom_name)
> +
> +  def __eq__(self, rhs):
> +    return (isinstance(rhs, NamedValue)
> +            and (self.parent_kind, self.mojom_name) == (rhs.parent_kind,
> +                                                        rhs.mojom_name))
> +
> +
> +class BuiltinValue(object):
> +  def __init__(self, value):
> +    self.value = value
> +
> +  def __eq__(self, rhs):
> +    return isinstance(rhs, BuiltinValue) and self.value == rhs.value
> +
> +
> +class ConstantValue(NamedValue):
> +  def __init__(self, module, parent_kind, constant):
> +    NamedValue.__init__(self, module, parent_kind, constant.mojom_name)
> +    self.constant = constant
> +
> +  @property
> +  def name(self):
> +    return self.constant.name
> +
> +
> +class EnumValue(NamedValue):
> +  def __init__(self, module, enum, field):
> +    NamedValue.__init__(self, module, enum.parent_kind, field.mojom_name)
> +    self.field = field
> +    self.enum = enum
> +
> +  def GetSpec(self):
> +    return (self.module.GetNamespacePrefix() +
> +            (self.parent_kind and (self.parent_kind.mojom_name + '.') or "") +
> +            self.enum.mojom_name + '.' + self.mojom_name)
> +
> +  @property
> +  def name(self):
> +    return self.field.name
> +
> +
> +class Constant(object):
> +  def __init__(self, mojom_name=None, kind=None, value=None, parent_kind=None):
> +    self.mojom_name = mojom_name
> +    self.name = None
> +    self.kind = kind
> +    self.value = value
> +    self.parent_kind = parent_kind
> +
> +  def Stylize(self, stylizer):
> +    self.name = stylizer.StylizeConstant(self.mojom_name)
> +
> +  def __eq__(self, rhs):
> +    return (isinstance(rhs, Constant)
> +            and (self.mojom_name, self.kind, self.value,
> +                 self.parent_kind) == (rhs.mojom_name, rhs.kind, rhs.value,
> +                                       rhs.parent_kind))
> +
> +
> +class Field(object):
> +  def __init__(self,
> +               mojom_name=None,
> +               kind=None,
> +               ordinal=None,
> +               default=None,
> +               attributes=None):
> +    if self.__class__.__name__ == 'Field':
> +      raise Exception()
> +    self.mojom_name = mojom_name
> +    self.name = None
> +    self.kind = kind
> +    self.ordinal = ordinal
> +    self.default = default
> +    self.attributes = attributes
> +
> +  def Repr(self, as_ref=True):
> +    # pylint: disable=unused-argument
> +    # Fields are only referenced by objects which define them and thus
> +    # they are always displayed as non-references.
> +    return GenericRepr(self, {'mojom_name': False, 'kind': True})
> +
> +  def Stylize(self, stylizer):
> +    self.name = stylizer.StylizeField(self.mojom_name)
> +
> +  @property
> +  def min_version(self):
> +    return self.attributes.get(ATTRIBUTE_MIN_VERSION) \
> +        if self.attributes else None
> +
> +  def __eq__(self, rhs):
> +    return (isinstance(rhs, Field)
> +            and (self.mojom_name, self.kind, self.ordinal, self.default,
> +                 self.attributes) == (rhs.mojom_name, rhs.kind, rhs.ordinal,
> +                                      rhs.default, rhs.attributes))
> +
> +  def __hash__(self):
> +    return hash((self.mojom_name, self.kind, self.ordinal, self.default))
> +
> +
> +class StructField(Field):
> +  pass
> +
> +
> +class UnionField(Field):
> +  pass
> +
> +
> +def _IsFieldBackwardCompatible(new_field, old_field):
> +  if (new_field.min_version or 0) != (old_field.min_version or 0):
> +    return False
> +
> +  if isinstance(new_field.kind, (Enum, Struct, Union)):
> +    return new_field.kind.IsBackwardCompatible(old_field.kind)
> +
> +  return new_field.kind == old_field.kind
> +
> +
> +class Struct(ReferenceKind):
> +  """A struct with typed fields.
> +
> +  Attributes:
> +    mojom_name: {str} The name of the struct type as defined in mojom.
> +    name: {str} The stylized name.
> +    native_only: {bool} Does the struct have a body (i.e. any fields) or is it
> +        purely a native struct.
> +    custom_serializer: {bool} Should we generate a serializer for the struct or
> +        will one be provided by non-generated code.
> +    fields: {List[StructField]} The members of the struct.
> +    enums: {List[Enum]} The enums defined in the struct scope.
> +    constants: {List[Constant]} The constants defined in the struct scope.
> +    attributes: {dict} Additional information about the struct, such as
> +        if it's a native struct.
> +  """
> +
> +  ReferenceKind.AddSharedProperty('mojom_name')
> +  ReferenceKind.AddSharedProperty('name')
> +  ReferenceKind.AddSharedProperty('native_only')
> +  ReferenceKind.AddSharedProperty('custom_serializer')
> +  ReferenceKind.AddSharedProperty('fields')
> +  ReferenceKind.AddSharedProperty('enums')
> +  ReferenceKind.AddSharedProperty('constants')
> +  ReferenceKind.AddSharedProperty('attributes')
> +
> +  def __init__(self, mojom_name=None, module=None, attributes=None):
> +    if mojom_name is not None:
> +      spec = 'x:' + mojom_name
> +    else:
> +      spec = None
> +    ReferenceKind.__init__(self, spec, False, module)
> +    self.mojom_name = mojom_name
> +    self.name = None
> +    self.native_only = False
> +    self.custom_serializer = False
> +    self.fields = []
> +    self.enums = []
> +    self.constants = []
> +    self.attributes = attributes
> +
> +  def Repr(self, as_ref=True):
> +    if as_ref:
> +      return '<%s mojom_name=%r module=%s>' % (self.__class__.__name__,
> +                                               self.mojom_name,
> +                                               Repr(self.module, as_ref=True))
> +    else:
> +      return GenericRepr(self, {
> +          'mojom_name': False,
> +          'fields': False,
> +          'module': True
> +      })
> +
> +  def AddField(self,
> +               mojom_name,
> +               kind,
> +               ordinal=None,
> +               default=None,
> +               attributes=None):
> +    field = StructField(mojom_name, kind, ordinal, default, attributes)
> +    self.fields.append(field)
> +    return field
> +
> +  def Stylize(self, stylizer):
> +    self.name = stylizer.StylizeStruct(self.mojom_name)
> +    for field in self.fields:
> +      field.Stylize(stylizer)
> +    for enum in self.enums:
> +      enum.Stylize(stylizer)
> +    for constant in self.constants:
> +      constant.Stylize(stylizer)
> +
> +  def IsBackwardCompatible(self, older_struct):
> +    """This struct is backward-compatible with older_struct if and only if all
> +    of the following conditions hold:
> +      - Any newly added field is tagged with a [MinVersion] attribute specifying
> +        a version number greater than all previously used [MinVersion]
> +        attributes within the struct.
> +      - All fields present in older_struct remain present in the new struct,
> +        with the same ordinal position, same optional or non-optional status,
> +        same (or backward-compatible) type and where applicable, the same
> +        [MinVersion] attribute value.
> +      - All [MinVersion] attributes must be non-decreasing in ordinal order.
> +      - All reference-typed (string, array, map, struct, or union) fields tagged
> +        with a [MinVersion] greater than zero must be optional.
> +    """
> +
> +    def buildOrdinalFieldMap(struct):
> +      fields_by_ordinal = {}
> +      for field in struct.fields:
> +        if field.ordinal in fields_by_ordinal:
> +          raise Exception('Multiple fields with ordinal %s in struct %s.' %
> +                          (field.ordinal, struct.mojom_name))
> +        fields_by_ordinal[field.ordinal] = field
> +      return fields_by_ordinal
> +
> +    new_fields = buildOrdinalFieldMap(self)
> +    old_fields = buildOrdinalFieldMap(older_struct)
> +    if len(new_fields) < len(old_fields):
> +      # At least one field was removed, which is not OK.
> +      return False
> +
> +    # If there are N fields, existing ordinal values must exactly cover the
> +    # range from 0 to N-1.
> +    num_old_ordinals = len(old_fields)
> +    max_old_min_version = 0
> +    for ordinal in range(num_old_ordinals):
> +      new_field = new_fields[ordinal]
> +      old_field = old_fields[ordinal]
> +      if (old_field.min_version or 0) > max_old_min_version:
> +        max_old_min_version = old_field.min_version
> +      if not _IsFieldBackwardCompatible(new_field, old_field):
> +        # Type or min-version mismatch between old and new versions of the same
> +        # ordinal field.
> +        return False
> +
> +    # At this point we know all old fields are intact in the new struct
> +    # definition. Now verify that all new fields have a high enough min version
> +    # and are appropriately optional where required.
> +    num_new_ordinals = len(new_fields)
> +    last_min_version = max_old_min_version
> +    for ordinal in range(num_old_ordinals, num_new_ordinals):
> +      new_field = new_fields[ordinal]
> +      min_version = new_field.min_version or 0
> +      if min_version <= max_old_min_version:
> +        # A new field is being added to an existing version, which is not OK.
> +        return False
> +      if min_version < last_min_version:
> +        # The [MinVersion] of a field cannot be lower than the [MinVersion] of
> +        # a field with lower ordinal value.
> +        return False
> +      if IsReferenceKind(new_field.kind) and not IsNullableKind(new_field.kind):
> +        # New fields whose type can be nullable MUST be nullable.
> +        return False
> +
> +    return True
> +
> +  @property
> +  def stable(self):
> +    return self.attributes.get(ATTRIBUTE_STABLE, False) \
> +        if self.attributes else False
> +
> +  @property
> +  def qualified_name(self):
> +    if self.parent_kind:
> +      prefix = self.parent_kind.qualified_name + '.'
> +    else:
> +      prefix = self.module.GetNamespacePrefix()
> +    return '%s%s' % (prefix, self.mojom_name)
> +
> +  def __eq__(self, rhs):
> +    return (isinstance(rhs, Struct) and
> +            (self.mojom_name, self.native_only, self.fields, self.constants,
> +             self.attributes) == (rhs.mojom_name, rhs.native_only, rhs.fields,
> +                                  rhs.constants, rhs.attributes))
> +
> +  def __hash__(self):
> +    return id(self)
> +
> +
> +class Union(ReferenceKind):
> +  """A union of several kinds.
> +
> +  Attributes:
> +    mojom_name: {str} The name of the union type as defined in mojom.
> +    name: {str} The stylized name.
> +    fields: {List[UnionField]} The members of the union.
> +    attributes: {dict} Additional information about the union, such as
> +        which Java class name to use to represent it in the generated
> +        bindings.
> +  """
> +  ReferenceKind.AddSharedProperty('mojom_name')
> +  ReferenceKind.AddSharedProperty('name')
> +  ReferenceKind.AddSharedProperty('fields')
> +  ReferenceKind.AddSharedProperty('attributes')
> +
> +  def __init__(self, mojom_name=None, module=None, attributes=None):
> +    if mojom_name is not None:
> +      spec = 'x:' + mojom_name
> +    else:
> +      spec = None
> +    ReferenceKind.__init__(self, spec, False, module)
> +    self.mojom_name = mojom_name
> +    self.name = None
> +    self.fields = []
> +    self.attributes = attributes
> +
> +  def Repr(self, as_ref=True):
> +    if as_ref:
> +      return '<%s spec=%r is_nullable=%r fields=%s>' % (
> +          self.__class__.__name__, self.spec, self.is_nullable, Repr(
> +              self.fields))
> +    else:
> +      return GenericRepr(self, {'fields': True, 'is_nullable': False})
> +
> +  def AddField(self, mojom_name, kind, ordinal=None, attributes=None):
> +    field = UnionField(mojom_name, kind, ordinal, None, attributes)
> +    self.fields.append(field)
> +    return field
> +
> +  def Stylize(self, stylizer):
> +    self.name = stylizer.StylizeUnion(self.mojom_name)
> +    for field in self.fields:
> +      field.Stylize(stylizer)
> +
> +  def IsBackwardCompatible(self, older_union):
> +    """This union is backward-compatible with older_union if and only if all
> +    of the following conditions hold:
> +      - Any newly added field is tagged with a [MinVersion] attribute specifying
> +        a version number greater than all previously used [MinVersion]
> +        attributes within the union.
> +      - All fields present in older_union remain present in the new union,
> +        with the same ordinal value, same optional or non-optional status,
> +        same (or backward-compatible) type, and where applicable, the same
> +        [MinVersion] attribute value.
> +    """
> +
> +    def buildOrdinalFieldMap(union):
> +      fields_by_ordinal = {}
> +      for field in union.fields:
> +        if field.ordinal in fields_by_ordinal:
> +          raise Exception('Multiple fields with ordinal %s in union %s.' %
> +                          (field.ordinal, union.mojom_name))
> +        fields_by_ordinal[field.ordinal] = field
> +      return fields_by_ordinal
> +
> +    new_fields = buildOrdinalFieldMap(self)
> +    old_fields = buildOrdinalFieldMap(older_union)
> +    if len(new_fields) < len(old_fields):
> +      # At least one field was removed, which is not OK.
> +      return False
> +
> +    max_old_min_version = 0
> +    for ordinal, old_field in old_fields.items():
> +      new_field = new_fields.get(ordinal)
> +      if not new_field:
> +        # A field was removed, which is not OK.
> +        return False
> +      if not _IsFieldBackwardCompatible(new_field, old_field):
> +        # An field changed its type or MinVersion, which is not OK.
> +        return False
> +      old_min_version = old_field.min_version or 0
> +      if old_min_version > max_old_min_version:
> +        max_old_min_version = old_min_version
> +
> +    new_ordinals = set(new_fields.keys()) - set(old_fields.keys())
> +    for ordinal in new_ordinals:
> +      if (new_fields[ordinal].min_version or 0) <= max_old_min_version:
> +        # New fields must use a MinVersion greater than any old fields.
> +        return False
> +
> +    return True
> +
> +  @property
> +  def stable(self):
> +    return self.attributes.get(ATTRIBUTE_STABLE, False) \
> +        if self.attributes else False
> +
> +  @property
> +  def qualified_name(self):
> +    if self.parent_kind:
> +      prefix = self.parent_kind.qualified_name + '.'
> +    else:
> +      prefix = self.module.GetNamespacePrefix()
> +    return '%s%s' % (prefix, self.mojom_name)
> +
> +  def __eq__(self, rhs):
> +    return (isinstance(rhs, Union) and
> +            (self.mojom_name, self.fields,
> +             self.attributes) == (rhs.mojom_name, rhs.fields, rhs.attributes))
> +
> +  def __hash__(self):
> +    return id(self)
> +
> +
> +class Array(ReferenceKind):
> +  """An array.
> +
> +  Attributes:
> +    kind: {Kind} The type of the elements. May be None.
> +    length: The number of elements. None if unknown.
> +  """
> +
> +  ReferenceKind.AddSharedProperty('kind')
> +  ReferenceKind.AddSharedProperty('length')
> +
> +  def __init__(self, kind=None, length=None):
> +    if kind is not None:
> +      if length is not None:
> +        spec = 'a%d:%s' % (length, kind.spec)
> +      else:
> +        spec = 'a:%s' % kind.spec
> +
> +      ReferenceKind.__init__(self, spec)
> +    else:
> +      ReferenceKind.__init__(self)
> +    self.kind = kind
> +    self.length = length
> +
> +  def Repr(self, as_ref=True):
> +    if as_ref:
> +      return '<%s spec=%r is_nullable=%r kind=%s length=%r>' % (
> +          self.__class__.__name__, self.spec, self.is_nullable, Repr(
> +              self.kind), self.length)
> +    else:
> +      return GenericRepr(self, {
> +          'kind': True,
> +          'length': False,
> +          'is_nullable': False
> +      })
> +
> +  def __eq__(self, rhs):
> +    return (isinstance(rhs, Array)
> +            and (self.kind, self.length) == (rhs.kind, rhs.length))
> +
> +  def __hash__(self):
> +    return id(self)
> +
> +
> +class Map(ReferenceKind):
> +  """A map.
> +
> +  Attributes:
> +    key_kind: {Kind} The type of the keys. May be None.
> +    value_kind: {Kind} The type of the elements. May be None.
> +  """
> +  ReferenceKind.AddSharedProperty('key_kind')
> +  ReferenceKind.AddSharedProperty('value_kind')
> +
> +  def __init__(self, key_kind=None, value_kind=None):
> +    if (key_kind is not None and value_kind is not None):
> +      ReferenceKind.__init__(
> +          self, 'm[' + key_kind.spec + '][' + value_kind.spec + ']')
> +      if IsNullableKind(key_kind):
> +        raise Exception("Nullable kinds cannot be keys in maps.")
> +      if IsAnyHandleKind(key_kind):
> +        raise Exception("Handles cannot be keys in maps.")
> +      if IsAnyInterfaceKind(key_kind):
> +        raise Exception("Interfaces cannot be keys in maps.")
> +      if IsArrayKind(key_kind):
> +        raise Exception("Arrays cannot be keys in maps.")
> +    else:
> +      ReferenceKind.__init__(self)
> +
> +    self.key_kind = key_kind
> +    self.value_kind = value_kind
> +
> +  def Repr(self, as_ref=True):
> +    if as_ref:
> +      return '<%s spec=%r is_nullable=%r key_kind=%s value_kind=%s>' % (
> +          self.__class__.__name__, self.spec, self.is_nullable,
> +          Repr(self.key_kind), Repr(self.value_kind))
> +    else:
> +      return GenericRepr(self, {'key_kind': True, 'value_kind': True})
> +
> +  def __eq__(self, rhs):
> +    return (isinstance(rhs, Map) and
> +            (self.key_kind, self.value_kind) == (rhs.key_kind, rhs.value_kind))
> +
> +  def __hash__(self):
> +    return id(self)
> +
> +
> +class PendingRemote(ReferenceKind):
> +  ReferenceKind.AddSharedProperty('kind')
> +
> +  def __init__(self, kind=None):
> +    if kind is not None:
> +      if not isinstance(kind, Interface):
> +        raise Exception(
> +            'pending_remote<T> requires T to be an interface type. Got %r' %
> +            kind.spec)
> +      ReferenceKind.__init__(self, 'rmt:' + kind.spec)
> +    else:
> +      ReferenceKind.__init__(self)
> +    self.kind = kind
> +
> +  def __eq__(self, rhs):
> +    return isinstance(rhs, PendingRemote) and self.kind == rhs.kind
> +
> +  def __hash__(self):
> +    return id(self)
> +
> +
> +class PendingReceiver(ReferenceKind):
> +  ReferenceKind.AddSharedProperty('kind')
> +
> +  def __init__(self, kind=None):
> +    if kind is not None:
> +      if not isinstance(kind, Interface):
> +        raise Exception(
> +            'pending_receiver<T> requires T to be an interface type. Got %r' %
> +            kind.spec)
> +      ReferenceKind.__init__(self, 'rcv:' + kind.spec)
> +    else:
> +      ReferenceKind.__init__(self)
> +    self.kind = kind
> +
> +  def __eq__(self, rhs):
> +    return isinstance(rhs, PendingReceiver) and self.kind == rhs.kind
> +
> +  def __hash__(self):
> +    return id(self)
> +
> +
> +class PendingAssociatedRemote(ReferenceKind):
> +  ReferenceKind.AddSharedProperty('kind')
> +
> +  def __init__(self, kind=None):
> +    if kind is not None:
> +      if not isinstance(kind, Interface):
> +        raise Exception(
> +            'pending_associated_remote<T> requires T to be an interface ' +
> +            'type. Got %r' % kind.spec)
> +      ReferenceKind.__init__(self, 'rma:' + kind.spec)
> +    else:
> +      ReferenceKind.__init__(self)
> +    self.kind = kind
> +
> +  def __eq__(self, rhs):
> +    return isinstance(rhs, PendingAssociatedRemote) and self.kind == rhs.kind
> +
> +  def __hash__(self):
> +    return id(self)
> +
> +
> +class PendingAssociatedReceiver(ReferenceKind):
> +  ReferenceKind.AddSharedProperty('kind')
> +
> +  def __init__(self, kind=None):
> +    if kind is not None:
> +      if not isinstance(kind, Interface):
> +        raise Exception(
> +            'pending_associated_receiver<T> requires T to be an interface' +
> +            'type. Got %r' % kind.spec)
> +      ReferenceKind.__init__(self, 'rca:' + kind.spec)
> +    else:
> +      ReferenceKind.__init__(self)
> +    self.kind = kind
> +
> +  def __eq__(self, rhs):
> +    return isinstance(rhs, PendingAssociatedReceiver) and self.kind == rhs.kind
> +
> +  def __hash__(self):
> +    return id(self)
> +
> +
> +class InterfaceRequest(ReferenceKind):
> +  ReferenceKind.AddSharedProperty('kind')
> +
> +  def __init__(self, kind=None):
> +    if kind is not None:
> +      if not isinstance(kind, Interface):
> +        raise Exception(
> +            "Interface request requires %r to be an interface." % kind.spec)
> +      ReferenceKind.__init__(self, 'r:' + kind.spec)
> +    else:
> +      ReferenceKind.__init__(self)
> +    self.kind = kind
> +
> +  def __eq__(self, rhs):
> +    return isinstance(rhs, InterfaceRequest) and self.kind == rhs.kind
> +
> +  def __hash__(self):
> +    return id(self)
> +
> +
> +class AssociatedInterfaceRequest(ReferenceKind):
> +  ReferenceKind.AddSharedProperty('kind')
> +
> +  def __init__(self, kind=None):
> +    if kind is not None:
> +      if not isinstance(kind, InterfaceRequest):
> +        raise Exception(
> +            "Associated interface request requires %r to be an interface "
> +            "request." % kind.spec)
> +      assert not kind.is_nullable
> +      ReferenceKind.__init__(self, 'asso:' + kind.spec)
> +    else:
> +      ReferenceKind.__init__(self)
> +    self.kind = kind.kind if kind is not None else None
> +
> +  def __eq__(self, rhs):
> +    return isinstance(rhs, AssociatedInterfaceRequest) and self.kind == rhs.kind
> +
> +  def __hash__(self):
> +    return id(self)
> +
> +
> +class Parameter(object):
> +  def __init__(self,
> +               mojom_name=None,
> +               kind=None,
> +               ordinal=None,
> +               default=None,
> +               attributes=None):
> +    self.mojom_name = mojom_name
> +    self.name = None
> +    self.ordinal = ordinal
> +    self.kind = kind
> +    self.default = default
> +    self.attributes = attributes
> +
> +  def Repr(self, as_ref=True):
> +    # pylint: disable=unused-argument
> +    return '<%s mojom_name=%r kind=%s>' % (
> +        self.__class__.__name__, self.mojom_name, self.kind.Repr(as_ref=True))
> +
> +  def Stylize(self, stylizer):
> +    self.name = stylizer.StylizeParameter(self.mojom_name)
> +
> +  @property
> +  def min_version(self):
> +    return self.attributes.get(ATTRIBUTE_MIN_VERSION) \
> +        if self.attributes else None
> +
> +  def __eq__(self, rhs):
> +    return (isinstance(rhs, Parameter)
> +            and (self.mojom_name, self.ordinal, self.kind, self.default,
> +                 self.attributes) == (rhs.mojom_name, rhs.ordinal, rhs.kind,
> +                                      rhs.default, rhs.attributes))
> +
> +
> +class Method(object):
> +  def __init__(self, interface, mojom_name, ordinal=None, attributes=None):
> +    self.interface = interface
> +    self.mojom_name = mojom_name
> +    self.name = None
> +    self.explicit_ordinal = ordinal
> +    self.ordinal = ordinal
> +    self.parameters = []
> +    self.param_struct = None
> +    self.response_parameters = None
> +    self.response_param_struct = None
> +    self.attributes = attributes
> +
> +  def Repr(self, as_ref=True):
> +    if as_ref:
> +      return '<%s mojom_name=%r>' % (self.__class__.__name__, self.mojom_name)
> +    else:
> +      return GenericRepr(self, {
> +          'mojom_name': False,
> +          'parameters': True,
> +          'response_parameters': True
> +      })
> +
> +  def AddParameter(self,
> +                   mojom_name,
> +                   kind,
> +                   ordinal=None,
> +                   default=None,
> +                   attributes=None):
> +    parameter = Parameter(mojom_name, kind, ordinal, default, attributes)
> +    self.parameters.append(parameter)
> +    return parameter
> +
> +  def AddResponseParameter(self,
> +                           mojom_name,
> +                           kind,
> +                           ordinal=None,
> +                           default=None,
> +                           attributes=None):
> +    if self.response_parameters == None:
> +      self.response_parameters = []
> +    parameter = Parameter(mojom_name, kind, ordinal, default, attributes)
> +    self.response_parameters.append(parameter)
> +    return parameter
> +
> +  def Stylize(self, stylizer):
> +    self.name = stylizer.StylizeMethod(self.mojom_name)
> +    for param in self.parameters:
> +      param.Stylize(stylizer)
> +    if self.response_parameters is not None:
> +      for param in self.response_parameters:
> +        param.Stylize(stylizer)
> +
> +    if self.param_struct:
> +      self.param_struct.Stylize(stylizer)
> +    if self.response_param_struct:
> +      self.response_param_struct.Stylize(stylizer)
> +
> +  @property
> +  def min_version(self):
> +    return self.attributes.get(ATTRIBUTE_MIN_VERSION) \
> +        if self.attributes else None
> +
> +  @property
> +  def sync(self):
> +    return self.attributes.get(ATTRIBUTE_SYNC) \
> +        if self.attributes else None
> +
> +  def __eq__(self, rhs):
> +    return (isinstance(rhs, Method) and
> +            (self.mojom_name, self.ordinal, self.parameters,
> +             self.response_parameters,
> +             self.attributes) == (rhs.mojom_name, rhs.ordinal, rhs.parameters,
> +                                  rhs.response_parameters, rhs.attributes))
> +
> +
> +class Interface(ReferenceKind):
> +  ReferenceKind.AddSharedProperty('mojom_name')
> +  ReferenceKind.AddSharedProperty('name')
> +  ReferenceKind.AddSharedProperty('methods')
> +  ReferenceKind.AddSharedProperty('enums')
> +  ReferenceKind.AddSharedProperty('constants')
> +  ReferenceKind.AddSharedProperty('attributes')
> +
> +  def __init__(self, mojom_name=None, module=None, attributes=None):
> +    if mojom_name is not None:
> +      spec = 'x:' + mojom_name
> +    else:
> +      spec = None
> +    ReferenceKind.__init__(self, spec, False, module)
> +    self.mojom_name = mojom_name
> +    self.name = None
> +    self.methods = []
> +    self.enums = []
> +    self.constants = []
> +    self.attributes = attributes
> +
> +  def Repr(self, as_ref=True):
> +    if as_ref:
> +      return '<%s mojom_name=%r>' % (self.__class__.__name__, self.mojom_name)
> +    else:
> +      return GenericRepr(self, {
> +          'mojom_name': False,
> +          'attributes': False,
> +          'methods': False
> +      })
> +
> +  def AddMethod(self, mojom_name, ordinal=None, attributes=None):
> +    method = Method(self, mojom_name, ordinal, attributes)
> +    self.methods.append(method)
> +    return method
> +
> +  def Stylize(self, stylizer):
> +    self.name = stylizer.StylizeInterface(self.mojom_name)
> +    for method in self.methods:
> +      method.Stylize(stylizer)
> +    for enum in self.enums:
> +      enum.Stylize(stylizer)
> +    for constant in self.constants:
> +      constant.Stylize(stylizer)
> +
> +  def IsBackwardCompatible(self, older_interface):
> +    """This interface is backward-compatible with older_interface if and only
> +    if all of the following conditions hold:
> +      - All defined methods in older_interface (when identified by ordinal) have
> +        backward-compatible definitions in this interface. For each method this
> +        means:
> +          - The parameter list is backward-compatible, according to backward-
> +            compatibility rules for structs, where each parameter is essentially
> +            a struct field.
> +          - If the old method definition does not specify a reply message, the
> +            new method definition must not specify a reply message.
> +          - If the old method definition specifies a reply message, the new
> +            method definition must also specify a reply message with a parameter
> +            list that is backward-compatible according to backward-compatibility
> +            rules for structs.
> +      - All newly introduced methods in this interface have a [MinVersion]
> +        attribute specifying a version greater than any method in
> +        older_interface.
> +    """
> +
> +    def buildOrdinalMethodMap(interface):
> +      methods_by_ordinal = {}
> +      for method in interface.methods:
> +        if method.ordinal in methods_by_ordinal:
> +          raise Exception('Multiple methods with ordinal %s in interface %s.' %
> +                          (method.ordinal, interface.mojom_name))
> +        methods_by_ordinal[method.ordinal] = method
> +      return methods_by_ordinal
> +
> +    new_methods = buildOrdinalMethodMap(self)
> +    old_methods = buildOrdinalMethodMap(older_interface)
> +    max_old_min_version = 0
> +    for ordinal, old_method in old_methods.items():
> +      new_method = new_methods.get(ordinal)
> +      if not new_method:
> +        # A method was removed, which is not OK.
> +        return False
> +
> +      if not new_method.param_struct.IsBackwardCompatible(
> +          old_method.param_struct):
> +        # The parameter list is not backward-compatible, which is not OK.
> +        return False
> +
> +      if old_method.response_param_struct is None:
> +        if new_method.response_param_struct is not None:
> +          # A reply was added to a message which didn't have one before, and
> +          # this is not OK.
> +          return False
> +      else:
> +        if new_method.response_param_struct is None:
> +          # A reply was removed from a message, which is not OK.
> +          return False
> +        if not new_method.response_param_struct.IsBackwardCompatible(
> +            old_method.response_param_struct):
> +          # The new message's reply is not backward-compatible with the old
> +          # message's reply, which is not OK.
> +          return False
> +
> +      if (old_method.min_version or 0) > max_old_min_version:
> +        max_old_min_version = old_method.min_version
> +
> +    # All the old methods are compatible with their new counterparts. Now verify
> +    # that newly added methods are properly versioned.
> +    new_ordinals = set(new_methods.keys()) - set(old_methods.keys())
> +    for ordinal in new_ordinals:
> +      new_method = new_methods[ordinal]
> +      if (new_method.min_version or 0) <= max_old_min_version:
> +        # A method was added to an existing version, which is not OK.
> +        return False
> +
> +    return True
> +
> +  @property
> +  def stable(self):
> +    return self.attributes.get(ATTRIBUTE_STABLE, False) \
> +        if self.attributes else False
> +
> +  @property
> +  def qualified_name(self):
> +    if self.parent_kind:
> +      prefix = self.parent_kind.qualified_name + '.'
> +    else:
> +      prefix = self.module.GetNamespacePrefix()
> +    return '%s%s' % (prefix, self.mojom_name)
> +
> +  def __eq__(self, rhs):
> +    return (isinstance(rhs, Interface)
> +            and (self.mojom_name, self.methods, self.enums, self.constants,
> +                 self.attributes) == (rhs.mojom_name, rhs.methods, rhs.enums,
> +                                      rhs.constants, rhs.attributes))
> +
> +  def __hash__(self):
> +    return id(self)
> +
> +
> +class AssociatedInterface(ReferenceKind):
> +  ReferenceKind.AddSharedProperty('kind')
> +
> +  def __init__(self, kind=None):
> +    if kind is not None:
> +      if not isinstance(kind, Interface):
> +        raise Exception(
> +            "Associated interface requires %r to be an interface." % kind.spec)
> +      assert not kind.is_nullable
> +      ReferenceKind.__init__(self, 'asso:' + kind.spec)
> +    else:
> +      ReferenceKind.__init__(self)
> +    self.kind = kind
> +
> +  def __eq__(self, rhs):
> +    return isinstance(rhs, AssociatedInterface) and self.kind == rhs.kind
> +
> +  def __hash__(self):
> +    return id(self)
> +
> +
> +class EnumField(object):
> +  def __init__(self,
> +               mojom_name=None,
> +               value=None,
> +               attributes=None,
> +               numeric_value=None):
> +    self.mojom_name = mojom_name
> +    self.name = None
> +    self.value = value
> +    self.attributes = attributes
> +    self.numeric_value = numeric_value
> +
> +  def Stylize(self, stylizer):
> +    self.name = stylizer.StylizeEnumField(self.mojom_name)
> +
> +  @property
> +  def min_version(self):
> +    return self.attributes.get(ATTRIBUTE_MIN_VERSION) \
> +        if self.attributes else None
> +
> +  def __eq__(self, rhs):
> +    return (isinstance(rhs, EnumField)
> +            and (self.mojom_name, self.value, self.attributes,
> +                 self.numeric_value) == (rhs.mojom_name, rhs.value,
> +                                         rhs.attributes, rhs.numeric_value))
> +
> +
> +class Enum(Kind):
> +  def __init__(self, mojom_name=None, module=None, attributes=None):
> +    self.mojom_name = mojom_name
> +    self.name = None
> +    self.native_only = False
> +    if mojom_name is not None:
> +      spec = 'x:' + mojom_name
> +    else:
> +      spec = None
> +    Kind.__init__(self, spec, module)
> +    self.fields = []
> +    self.attributes = attributes
> +    self.min_value = None
> +    self.max_value = None
> +
> +  def Repr(self, as_ref=True):
> +    if as_ref:
> +      return '<%s mojom_name=%r>' % (self.__class__.__name__, self.mojom_name)
> +    else:
> +      return GenericRepr(self, {'mojom_name': False, 'fields': False})
> +
> +  def Stylize(self, stylizer):
> +    self.name = stylizer.StylizeEnum(self.mojom_name)
> +    for field in self.fields:
> +      field.Stylize(stylizer)
> +
> +  @property
> +  def extensible(self):
> +    return self.attributes.get(ATTRIBUTE_EXTENSIBLE, False) \
> +        if self.attributes else False
> +
> +  @property
> +  def stable(self):
> +    return self.attributes.get(ATTRIBUTE_STABLE, False) \
> +        if self.attributes else False
> +
> +  @property
> +  def qualified_name(self):
> +    if self.parent_kind:
> +      prefix = self.parent_kind.qualified_name + '.'
> +    else:
> +      prefix = self.module.GetNamespacePrefix()
> +    return '%s%s' % (prefix, self.mojom_name)
> +
> +  def IsBackwardCompatible(self, older_enum):
> +    """This enum is backward-compatible with older_enum if and only if one of
> +    the following conditions holds:
> +        - Neither enum is [Extensible] and both have the exact same set of valid
> +          numeric values. Field names and aliases for the same numeric value do
> +          not affect compatibility.
> +        - older_enum is [Extensible], and for every version defined by
> +          older_enum, this enum has the exact same set of valid numeric values.
> +    """
> +
> +    def buildVersionFieldMap(enum):
> +      fields_by_min_version = {}
> +      for field in enum.fields:
> +        if field.min_version not in fields_by_min_version:
> +          fields_by_min_version[field.min_version] = set()
> +        fields_by_min_version[field.min_version].add(field.numeric_value)
> +      return fields_by_min_version
> +
> +    old_fields = buildVersionFieldMap(older_enum)
> +    new_fields = buildVersionFieldMap(self)
> +
> +    if new_fields.keys() != old_fields.keys() and not older_enum.extensible:
> +      return False
> +
> +    for min_version, valid_values in old_fields.items():
> +      if (min_version not in new_fields
> +          or new_fields[min_version] != valid_values):
> +        return False
> +
> +    return True
> +
> +  def __eq__(self, rhs):
> +    return (isinstance(rhs, Enum) and
> +            (self.mojom_name, self.native_only, self.fields, self.attributes,
> +             self.min_value,
> +             self.max_value) == (rhs.mojom_name, rhs.native_only, rhs.fields,
> +                                 rhs.attributes, rhs.min_value, rhs.max_value))
> +
> +  def __hash__(self):
> +    return id(self)
> +
> +
> +class Module(object):
> +  def __init__(self, path=None, mojom_namespace=None, attributes=None):
> +    self.path = path
> +    self.mojom_namespace = mojom_namespace
> +    self.namespace = None
> +    self.structs = []
> +    self.unions = []
> +    self.interfaces = []
> +    self.enums = []
> +    self.constants = []
> +    self.kinds = {}
> +    self.attributes = attributes
> +    self.imports = []
> +    self.imported_kinds = {}
> +
> +  def __repr__(self):
> +    # Gives us a decent __repr__ for modules.
> +    return self.Repr()
> +
> +  def __eq__(self, rhs):
> +    return (isinstance(rhs, Module) and
> +            (self.path, self.attributes, self.mojom_namespace, self.imports,
> +             self.constants, self.enums, self.structs, self.unions,
> +             self.interfaces) == (rhs.path, rhs.attributes, rhs.mojom_namespace,
> +                                  rhs.imports, rhs.constants, rhs.enums,
> +                                  rhs.structs, rhs.unions, rhs.interfaces))
> +
> +  def Repr(self, as_ref=True):
> +    if as_ref:
> +      return '<%s path=%r mojom_namespace=%r>' % (
> +          self.__class__.__name__, self.path, self.mojom_namespace)
> +    else:
> +      return GenericRepr(
> +          self, {
> +              'path': False,
> +              'mojom_namespace': False,
> +              'attributes': False,
> +              'structs': False,
> +              'interfaces': False,
> +              'unions': False
> +          })
> +
> +  def GetNamespacePrefix(self):
> +    return '%s.' % self.mojom_namespace if self.mojom_namespace else ''
> +
> +  def AddInterface(self, mojom_name, attributes=None):
> +    interface = Interface(mojom_name, self, attributes)
> +    self.interfaces.append(interface)
> +    return interface
> +
> +  def AddStruct(self, mojom_name, attributes=None):
> +    struct = Struct(mojom_name, self, attributes)
> +    self.structs.append(struct)
> +    return struct
> +
> +  def AddUnion(self, mojom_name, attributes=None):
> +    union = Union(mojom_name, self, attributes)
> +    self.unions.append(union)
> +    return union
> +
> +  def Stylize(self, stylizer):
> +    self.namespace = stylizer.StylizeModule(self.mojom_namespace)
> +    for struct in self.structs:
> +      struct.Stylize(stylizer)
> +    for union in self.unions:
> +      union.Stylize(stylizer)
> +    for interface in self.interfaces:
> +      interface.Stylize(stylizer)
> +    for enum in self.enums:
> +      enum.Stylize(stylizer)
> +    for constant in self.constants:
> +      constant.Stylize(stylizer)
> +
> +    for imported_module in self.imports:
> +      imported_module.Stylize(stylizer)
> +
> +  def Dump(self, f):
> +    pickle.dump(self, f, 2)
> +
> +  @classmethod
> +  def Load(cls, f):
> +    result = pickle.load(f)
> +    assert isinstance(result, Module)
> +    return result
> +
> +
> +def IsBoolKind(kind):
> +  return kind.spec == BOOL.spec
> +
> +
> +def IsFloatKind(kind):
> +  return kind.spec == FLOAT.spec
> +
> +
> +def IsDoubleKind(kind):
> +  return kind.spec == DOUBLE.spec
> +
> +
> +def IsIntegralKind(kind):
> +  return (kind.spec == BOOL.spec or kind.spec == INT8.spec
> +          or kind.spec == INT16.spec or kind.spec == INT32.spec
> +          or kind.spec == INT64.spec or kind.spec == UINT8.spec
> +          or kind.spec == UINT16.spec or kind.spec == UINT32.spec
> +          or kind.spec == UINT64.spec)
> +
> +
> +def IsStringKind(kind):
> +  return kind.spec == STRING.spec or kind.spec == NULLABLE_STRING.spec
> +
> +
> +def IsGenericHandleKind(kind):
> +  return kind.spec == HANDLE.spec or kind.spec == NULLABLE_HANDLE.spec
> +
> +
> +def IsDataPipeConsumerKind(kind):
> +  return kind.spec == DCPIPE.spec or kind.spec == NULLABLE_DCPIPE.spec
> +
> +
> +def IsDataPipeProducerKind(kind):
> +  return kind.spec == DPPIPE.spec or kind.spec == NULLABLE_DPPIPE.spec
> +
> +
> +def IsMessagePipeKind(kind):
> +  return kind.spec == MSGPIPE.spec or kind.spec == NULLABLE_MSGPIPE.spec
> +
> +
> +def IsSharedBufferKind(kind):
> +  return (kind.spec == SHAREDBUFFER.spec
> +          or kind.spec == NULLABLE_SHAREDBUFFER.spec)
> +
> +
> +def IsPlatformHandleKind(kind):
> +  return (kind.spec == PLATFORMHANDLE.spec
> +          or kind.spec == NULLABLE_PLATFORMHANDLE.spec)
> +
> +
> +def IsStructKind(kind):
> +  return isinstance(kind, Struct)
> +
> +
> +def IsUnionKind(kind):
> +  return isinstance(kind, Union)
> +
> +
> +def IsArrayKind(kind):
> +  return isinstance(kind, Array)
> +
> +
> +def IsInterfaceKind(kind):
> +  return isinstance(kind, Interface)
> +
> +
> +def IsAssociatedInterfaceKind(kind):
> +  return isinstance(kind, AssociatedInterface)
> +
> +
> +def IsInterfaceRequestKind(kind):
> +  return isinstance(kind, InterfaceRequest)
> +
> +
> +def IsAssociatedInterfaceRequestKind(kind):
> +  return isinstance(kind, AssociatedInterfaceRequest)
> +
> +
> +def IsPendingRemoteKind(kind):
> +  return isinstance(kind, PendingRemote)
> +
> +
> +def IsPendingReceiverKind(kind):
> +  return isinstance(kind, PendingReceiver)
> +
> +
> +def IsPendingAssociatedRemoteKind(kind):
> +  return isinstance(kind, PendingAssociatedRemote)
> +
> +
> +def IsPendingAssociatedReceiverKind(kind):
> +  return isinstance(kind, PendingAssociatedReceiver)
> +
> +
> +def IsEnumKind(kind):
> +  return isinstance(kind, Enum)
> +
> +
> +def IsReferenceKind(kind):
> +  return isinstance(kind, ReferenceKind)
> +
> +
> +def IsNullableKind(kind):
> +  return IsReferenceKind(kind) and kind.is_nullable
> +
> +
> +def IsMapKind(kind):
> +  return isinstance(kind, Map)
> +
> +
> +def IsObjectKind(kind):
> +  return IsPointerKind(kind) or IsUnionKind(kind)
> +
> +
> +def IsPointerKind(kind):
> +  return (IsStructKind(kind) or IsArrayKind(kind) or IsStringKind(kind)
> +          or IsMapKind(kind))
> +
> +
> +# Please note that it doesn't include any interface kind.
> +def IsAnyHandleKind(kind):
> +  return (IsGenericHandleKind(kind) or IsDataPipeConsumerKind(kind)
> +          or IsDataPipeProducerKind(kind) or IsMessagePipeKind(kind)
> +          or IsSharedBufferKind(kind) or IsPlatformHandleKind(kind))
> +
> +
> +def IsAnyInterfaceKind(kind):
> +  return (IsInterfaceKind(kind) or IsInterfaceRequestKind(kind)
> +          or IsAssociatedKind(kind) or IsPendingRemoteKind(kind)
> +          or IsPendingReceiverKind(kind))
> +
> +
> +def IsAnyHandleOrInterfaceKind(kind):
> +  return IsAnyHandleKind(kind) or IsAnyInterfaceKind(kind)
> +
> +
> +def IsAssociatedKind(kind):
> +  return (IsAssociatedInterfaceKind(kind)
> +          or IsAssociatedInterfaceRequestKind(kind)
> +          or IsPendingAssociatedRemoteKind(kind)
> +          or IsPendingAssociatedReceiverKind(kind))
> +
> +
> +def HasCallbacks(interface):
> +  for method in interface.methods:
> +    if method.response_parameters != None:
> +      return True
> +  return False
> +
> +
> +# Finds out whether an interface passes associated interfaces and associated
> +# interface requests.
> +def PassesAssociatedKinds(interface):
> +  visited_kinds = set()
> +  for method in interface.methods:
> +    if MethodPassesAssociatedKinds(method, visited_kinds):
> +      return True
> +  return False
> +
> +
> +def _AnyMethodParameterRecursive(method, predicate, visited_kinds=None):
> +  def _HasProperty(kind):
> +    if kind in visited_kinds:
> +      # No need to examine the kind again.
> +      return False
> +    visited_kinds.add(kind)
> +    if predicate(kind):
> +      return True
> +    if IsArrayKind(kind):
> +      return _HasProperty(kind.kind)
> +    if IsStructKind(kind) or IsUnionKind(kind):
> +      for field in kind.fields:
> +        if _HasProperty(field.kind):
> +          return True
> +    if IsMapKind(kind):
> +      if _HasProperty(kind.key_kind) or _HasProperty(kind.value_kind):
> +        return True
> +    return False
> +
> +  if visited_kinds is None:
> +    visited_kinds = set()
> +
> +  for param in method.parameters:
> +    if _HasProperty(param.kind):
> +      return True
> +  if method.response_parameters != None:
> +    for param in method.response_parameters:
> +      if _HasProperty(param.kind):
> +        return True
> +  return False
> +
> +
> +# Finds out whether a method passes associated interfaces and associated
> +# interface requests.
> +def MethodPassesAssociatedKinds(method, visited_kinds=None):
> +  return _AnyMethodParameterRecursive(
> +      method, IsAssociatedKind, visited_kinds=visited_kinds)
> +
> +
> +# Determines whether a method passes interfaces.
> +def MethodPassesInterfaces(method):
> +  return _AnyMethodParameterRecursive(method, IsInterfaceKind)
> +
> +
> +def HasSyncMethods(interface):
> +  for method in interface.methods:
> +    if method.sync:
> +      return True
> +  return False
> +
> +
> +def ContainsHandlesOrInterfaces(kind):
> +  """Check if the kind contains any handles.
> +
> +  This check is recursive so it checks all struct fields, containers elements,
> +  etc.
> +
> +  Args:
> +    struct: {Kind} The kind to check.
> +
> +  Returns:
> +    {bool}: True if the kind contains handles.
> +  """
> +  # We remember the types we already checked to avoid infinite recursion when
> +  # checking recursive (or mutually recursive) types:
> +  checked = set()
> +
> +  def Check(kind):
> +    if kind.spec in checked:
> +      return False
> +    checked.add(kind.spec)
> +    if IsStructKind(kind):
> +      return any(Check(field.kind) for field in kind.fields)
> +    elif IsUnionKind(kind):
> +      return any(Check(field.kind) for field in kind.fields)
> +    elif IsAnyHandleKind(kind):
> +      return True
> +    elif IsAnyInterfaceKind(kind):
> +      return True
> +    elif IsArrayKind(kind):
> +      return Check(kind.kind)
> +    elif IsMapKind(kind):
> +      return Check(kind.key_kind) or Check(kind.value_kind)
> +    else:
> +      return False
> +
> +  return Check(kind)
> +
> +
> +def ContainsNativeTypes(kind):
> +  """Check if the kind contains any native type (struct or enum).
> +
> +  This check is recursive so it checks all struct fields, scoped interface
> +  enums, etc.
> +
> +  Args:
> +    struct: {Kind} The kind to check.
> +
> +  Returns:
> +    {bool}: True if the kind contains native types.
> +  """
> +  # We remember the types we already checked to avoid infinite recursion when
> +  # checking recursive (or mutually recursive) types:
> +  checked = set()
> +
> +  def Check(kind):
> +    if kind.spec in checked:
> +      return False
> +    checked.add(kind.spec)
> +    if IsEnumKind(kind):
> +      return kind.native_only
> +    elif IsStructKind(kind):
> +      if kind.native_only:
> +        return True
> +      if any(enum.native_only for enum in kind.enums):
> +        return True
> +      return any(Check(field.kind) for field in kind.fields)
> +    elif IsUnionKind(kind):
> +      return any(Check(field.kind) for field in kind.fields)
> +    elif IsInterfaceKind(kind):
> +      return any(enum.native_only for enum in kind.enums)
> +    elif IsArrayKind(kind):
> +      return Check(kind.kind)
> +    elif IsMapKind(kind):
> +      return Check(kind.key_kind) or Check(kind.value_kind)
> +    else:
> +      return False
> +
> +  return Check(kind)
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/module_unittest.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/module_unittest.py
> new file mode 100644
> index 00000000..e8fd4936
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom/generate/module_unittest.py
> @@ -0,0 +1,31 @@
> +# Copyright 2014 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +import sys
> +import unittest
> +
> +from mojom.generate import module as mojom
> +
> +
> +class ModuleTest(unittest.TestCase):
> +  def testNonInterfaceAsInterfaceRequest(self):
> +    """Tests that a non-interface cannot be used for interface requests."""
> +    module = mojom.Module('test_module', 'test_namespace')
> +    struct = mojom.Struct('TestStruct', module=module)
> +    with self.assertRaises(Exception) as e:
> +      mojom.InterfaceRequest(struct)
> +    self.assertEquals(
> +        e.exception.__str__(),
> +        'Interface request requires \'x:TestStruct\' to be an interface.')
> +
> +  def testNonInterfaceAsAssociatedInterface(self):
> +    """Tests that a non-interface type cannot be used for associated interfaces.
> +    """
> +    module = mojom.Module('test_module', 'test_namespace')
> +    struct = mojom.Struct('TestStruct', module=module)
> +    with self.assertRaises(Exception) as e:
> +      mojom.AssociatedInterface(struct)
> +    self.assertEquals(
> +        e.exception.__str__(),
> +        'Associated interface requires \'x:TestStruct\' to be an interface.')
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/pack.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/pack.py
> new file mode 100644
> index 00000000..88b77c98
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom/generate/pack.py
> @@ -0,0 +1,258 @@
> +# Copyright 2013 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +from mojom.generate import module as mojom
> +
> +# This module provides a mechanism for determining the packed order and offsets
> +# of a mojom.Struct.
> +#
> +# ps = pack.PackedStruct(struct)
> +# ps.packed_fields will access a list of PackedField objects, each of which
> +# will have an offset, a size and a bit (for mojom.BOOLs).
> +
> +# Size of struct header in bytes: num_bytes [4B] + version [4B].
> +HEADER_SIZE = 8
> +
> +
> +class PackedField(object):
> +  kind_to_size = {
> +      mojom.BOOL: 1,
> +      mojom.INT8: 1,
> +      mojom.UINT8: 1,
> +      mojom.INT16: 2,
> +      mojom.UINT16: 2,
> +      mojom.INT32: 4,
> +      mojom.UINT32: 4,
> +      mojom.FLOAT: 4,
> +      mojom.HANDLE: 4,
> +      mojom.MSGPIPE: 4,
> +      mojom.SHAREDBUFFER: 4,
> +      mojom.PLATFORMHANDLE: 4,
> +      mojom.DCPIPE: 4,
> +      mojom.DPPIPE: 4,
> +      mojom.NULLABLE_HANDLE: 4,
> +      mojom.NULLABLE_MSGPIPE: 4,
> +      mojom.NULLABLE_SHAREDBUFFER: 4,
> +      mojom.NULLABLE_PLATFORMHANDLE: 4,
> +      mojom.NULLABLE_DCPIPE: 4,
> +      mojom.NULLABLE_DPPIPE: 4,
> +      mojom.INT64: 8,
> +      mojom.UINT64: 8,
> +      mojom.DOUBLE: 8,
> +      mojom.STRING: 8,
> +      mojom.NULLABLE_STRING: 8
> +  }
> +
> +  @classmethod
> +  def GetSizeForKind(cls, kind):
> +    if isinstance(kind, (mojom.Array, mojom.Map, mojom.Struct, mojom.Interface,
> +                         mojom.AssociatedInterface, mojom.PendingRemote,
> +                         mojom.PendingAssociatedRemote)):
> +      return 8
> +    if isinstance(kind, mojom.Union):
> +      return 16
> +    if isinstance(kind, (mojom.InterfaceRequest, mojom.PendingReceiver)):
> +      kind = mojom.MSGPIPE
> +    if isinstance(
> +        kind,
> +        (mojom.AssociatedInterfaceRequest, mojom.PendingAssociatedReceiver)):
> +      return 4
> +    if isinstance(kind, mojom.Enum):
> +      # TODO(mpcomplete): what about big enums?
> +      return cls.kind_to_size[mojom.INT32]
> +    if not kind in cls.kind_to_size:
> +      raise Exception("Undefined type: %s. Did you forget to import the file "
> +                      "containing the definition?" % kind.spec)
> +    return cls.kind_to_size[kind]
> +
> +  @classmethod
> +  def GetAlignmentForKind(cls, kind):
> +    if isinstance(kind, (mojom.Interface, mojom.AssociatedInterface,
> +                         mojom.PendingRemote, mojom.PendingAssociatedRemote)):
> +      return 4
> +    if isinstance(kind, mojom.Union):
> +      return 8
> +    return cls.GetSizeForKind(kind)
> +
> +  def __init__(self, field, index, ordinal):
> +    """
> +    Args:
> +      field: the original field.
> +      index: the position of the original field in the struct.
> +      ordinal: the ordinal of the field for serialization.
> +    """
> +    self.field = field
> +    self.index = index
> +    self.ordinal = ordinal
> +    self.size = self.GetSizeForKind(field.kind)
> +    self.alignment = self.GetAlignmentForKind(field.kind)
> +    self.offset = None
> +    self.bit = None
> +    self.min_version = None
> +
> +
> +def GetPad(offset, alignment):
> +  """Returns the pad necessary to reserve space so that |offset + pad| equals to
> +  some multiple of |alignment|."""
> +  return (alignment - (offset % alignment)) % alignment
> +
> +
> +def GetFieldOffset(field, last_field):
> +  """Returns a 2-tuple of the field offset and bit (for BOOLs)."""
> +  if (field.field.kind == mojom.BOOL and last_field.field.kind == mojom.BOOL
> +      and last_field.bit < 7):
> +    return (last_field.offset, last_field.bit + 1)
> +
> +  offset = last_field.offset + last_field.size
> +  pad = GetPad(offset, field.alignment)
> +  return (offset + pad, 0)
> +
> +
> +def GetPayloadSizeUpToField(field):
> +  """Returns the payload size (not including struct header) if |field| is the
> +  last field.
> +  """
> +  if not field:
> +    return 0
> +  offset = field.offset + field.size
> +  pad = GetPad(offset, 8)
> +  return offset + pad
> +
> +
> +class PackedStruct(object):
> +  def __init__(self, struct):
> +    self.struct = struct
> +    # |packed_fields| contains all the fields, in increasing offset order.
> +    self.packed_fields = []
> +    # |packed_fields_in_ordinal_order| refers to the same fields as
> +    # |packed_fields|, but in ordinal order.
> +    self.packed_fields_in_ordinal_order = []
> +
> +    # No fields.
> +    if (len(struct.fields) == 0):
> +      return
> +
> +    # Start by sorting by ordinal.
> +    src_fields = self.packed_fields_in_ordinal_order
> +    ordinal = 0
> +    for index, field in enumerate(struct.fields):
> +      if field.ordinal is not None:
> +        ordinal = field.ordinal
> +      src_fields.append(PackedField(field, index, ordinal))
> +      ordinal += 1
> +    src_fields.sort(key=lambda field: field.ordinal)
> +
> +    # Set |min_version| for each field.
> +    next_min_version = 0
> +    for packed_field in src_fields:
> +      if packed_field.field.min_version is None:
> +        assert next_min_version == 0
> +      else:
> +        assert packed_field.field.min_version >= next_min_version
> +        next_min_version = packed_field.field.min_version
> +      packed_field.min_version = next_min_version
> +
> +      if (packed_field.min_version != 0
> +          and mojom.IsReferenceKind(packed_field.field.kind)
> +          and not packed_field.field.kind.is_nullable):
> +        raise Exception("Non-nullable fields are only allowed in version 0 of "
> +                        "a struct. %s.%s is defined with [MinVersion=%d]." %
> +                        (self.struct.name, packed_field.field.name,
> +                         packed_field.min_version))
> +
> +    src_field = src_fields[0]
> +    src_field.offset = 0
> +    src_field.bit = 0
> +    dst_fields = self.packed_fields
> +    dst_fields.append(src_field)
> +
> +    # Then find first slot that each field will fit.
> +    for src_field in src_fields[1:]:
> +      last_field = dst_fields[0]
> +      for i in range(1, len(dst_fields)):
> +        next_field = dst_fields[i]
> +        offset, bit = GetFieldOffset(src_field, last_field)
> +        if offset + src_field.size <= next_field.offset:
> +          # Found hole.
> +          src_field.offset = offset
> +          src_field.bit = bit
> +          dst_fields.insert(i, src_field)
> +          break
> +        last_field = next_field
> +      if src_field.offset is None:
> +        # Add to end
> +        src_field.offset, src_field.bit = GetFieldOffset(src_field, last_field)
> +        dst_fields.append(src_field)
> +
> +
> +class ByteInfo(object):
> +  def __init__(self):
> +    self.is_padding = False
> +    self.packed_fields = []
> +
> +
> +def GetByteLayout(packed_struct):
> +  total_payload_size = GetPayloadSizeUpToField(
> +      packed_struct.packed_fields[-1] if packed_struct.packed_fields else None)
> +  byte_info = [ByteInfo() for i in range(total_payload_size)]
> +
> +  limit_of_previous_field = 0
> +  for packed_field in packed_struct.packed_fields:
> +    for i in range(limit_of_previous_field, packed_field.offset):
> +      byte_info[i].is_padding = True
> +    byte_info[packed_field.offset].packed_fields.append(packed_field)
> +    limit_of_previous_field = packed_field.offset + packed_field.size
> +
> +  for i in range(limit_of_previous_field, len(byte_info)):
> +    byte_info[i].is_padding = True
> +
> +  for byte in byte_info:
> +    # A given byte cannot both be padding and have a fields packed into it.
> +    assert not (byte.is_padding and byte.packed_fields)
> +
> +  return byte_info
> +
> +
> +class VersionInfo(object):
> +  def __init__(self, version, num_fields, num_bytes):
> +    self.version = version
> +    self.num_fields = num_fields
> +    self.num_bytes = num_bytes
> +
> +
> +def GetVersionInfo(packed_struct):
> +  """Get version information for a struct.
> +
> +  Args:
> +    packed_struct: A PackedStruct instance.
> +
> +  Returns:
> +    A non-empty list of VersionInfo instances, sorted by version in increasing
> +    order.
> +    Note: The version numbers may not be consecutive.
> +  """
> +  versions = []
> +  last_version = 0
> +  last_num_fields = 0
> +  last_payload_size = 0
> +
> +  for packed_field in packed_struct.packed_fields_in_ordinal_order:
> +    if packed_field.min_version != last_version:
> +      versions.append(
> +          VersionInfo(last_version, last_num_fields,
> +                      last_payload_size + HEADER_SIZE))
> +      last_version = packed_field.min_version
> +
> +    last_num_fields += 1
> +    # The fields are iterated in ordinal order here. However, the size of a
> +    # version is determined by the last field of that version in pack order,
> +    # instead of ordinal order. Therefore, we need to calculate the max value.
> +    last_payload_size = max(
> +        GetPayloadSizeUpToField(packed_field), last_payload_size)
> +
> +  assert len(versions) == 0 or last_num_fields != versions[-1].num_fields
> +  versions.append(
> +      VersionInfo(last_version, last_num_fields,
> +                  last_payload_size + HEADER_SIZE))
> +  return versions
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/pack_unittest.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/pack_unittest.py
> new file mode 100644
> index 00000000..98c705ad
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom/generate/pack_unittest.py
> @@ -0,0 +1,225 @@
> +# Copyright 2013 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +import sys
> +import unittest
> +
> +from mojom.generate import module as mojom
> +from mojom.generate import pack
> +
> +
> +class PackTest(unittest.TestCase):
> +  def testOrdinalOrder(self):
> +    struct = mojom.Struct('test')
> +    struct.AddField('testfield1', mojom.INT32, 2)
> +    struct.AddField('testfield2', mojom.INT32, 1)
> +    ps = pack.PackedStruct(struct)
> +
> +    self.assertEqual(2, len(ps.packed_fields))
> +    self.assertEqual('testfield2', ps.packed_fields[0].field.mojom_name)
> +    self.assertEqual('testfield1', ps.packed_fields[1].field.mojom_name)
> +
> +  def testZeroFields(self):
> +    struct = mojom.Struct('test')
> +    ps = pack.PackedStruct(struct)
> +    self.assertEqual(0, len(ps.packed_fields))
> +
> +  def testOneField(self):
> +    struct = mojom.Struct('test')
> +    struct.AddField('testfield1', mojom.INT8)
> +    ps = pack.PackedStruct(struct)
> +    self.assertEqual(1, len(ps.packed_fields))
> +
> +  def _CheckPackSequence(self, kinds, fields, offsets):
> +    """Checks the pack order and offsets of a sequence of mojom.Kinds.
> +
> +    Args:
> +      kinds: A sequence of mojom.Kinds that specify the fields that are to be
> +      created.
> +      fields: The expected order of the resulting fields, with the integer "1"
> +      first.
> +      offsets: The expected order of offsets, with the integer "0" first.
> +    """
> +    struct = mojom.Struct('test')
> +    index = 1
> +    for kind in kinds:
> +      struct.AddField('%d' % index, kind)
> +      index += 1
> +    ps = pack.PackedStruct(struct)
> +    num_fields = len(ps.packed_fields)
> +    self.assertEqual(len(kinds), num_fields)
> +    for i in range(num_fields):
> +      self.assertEqual('%d' % fields[i], ps.packed_fields[i].field.mojom_name)
> +      self.assertEqual(offsets[i], ps.packed_fields[i].offset)
> +
> +  def testPaddingPackedInOrder(self):
> +    return self._CheckPackSequence((mojom.INT8, mojom.UINT8, mojom.INT32),
> +                                   (1, 2, 3), (0, 1, 4))
> +
> +  def testPaddingPackedOutOfOrder(self):
> +    return self._CheckPackSequence((mojom.INT8, mojom.INT32, mojom.UINT8),
> +                                   (1, 3, 2), (0, 1, 4))
> +
> +  def testPaddingPackedOverflow(self):
> +    kinds = (mojom.INT8, mojom.INT32, mojom.INT16, mojom.INT8, mojom.INT8)
> +    # 2 bytes should be packed together first, followed by short, then by int.
> +    fields = (1, 4, 3, 2, 5)
> +    offsets = (0, 1, 2, 4, 8)
> +    return self._CheckPackSequence(kinds, fields, offsets)
> +
> +  def testNullableTypes(self):
> +    kinds = (mojom.STRING.MakeNullableKind(), mojom.HANDLE.MakeNullableKind(),
> +             mojom.Struct('test_struct').MakeNullableKind(),
> +             mojom.DCPIPE.MakeNullableKind(), mojom.Array().MakeNullableKind(),
> +             mojom.DPPIPE.MakeNullableKind(),
> +             mojom.Array(length=5).MakeNullableKind(),
> +             mojom.MSGPIPE.MakeNullableKind(),
> +             mojom.Interface('test_interface').MakeNullableKind(),
> +             mojom.SHAREDBUFFER.MakeNullableKind(),
> +             mojom.InterfaceRequest().MakeNullableKind())
> +    fields = (1, 2, 4, 3, 5, 6, 8, 7, 9, 10, 11)
> +    offsets = (0, 8, 12, 16, 24, 32, 36, 40, 48, 56, 60)
> +    return self._CheckPackSequence(kinds, fields, offsets)
> +
> +  def testAllTypes(self):
> +    return self._CheckPackSequence(
> +        (mojom.BOOL, mojom.INT8, mojom.STRING, mojom.UINT8, mojom.INT16,
> +         mojom.DOUBLE, mojom.UINT16, mojom.INT32, mojom.UINT32, mojom.INT64,
> +         mojom.FLOAT, mojom.STRING, mojom.HANDLE, mojom.UINT64,
> +         mojom.Struct('test'), mojom.Array(), mojom.STRING.MakeNullableKind()),
> +        (1, 2, 4, 5, 7, 3, 6, 8, 9, 10, 11, 13, 12, 14, 15, 16, 17, 18),
> +        (0, 1, 2, 4, 6, 8, 16, 24, 28, 32, 40, 44, 48, 56, 64, 72, 80, 88))
> +
> +  def testPaddingPackedOutOfOrderByOrdinal(self):
> +    struct = mojom.Struct('test')
> +    struct.AddField('testfield1', mojom.INT8)
> +    struct.AddField('testfield3', mojom.UINT8, 3)
> +    struct.AddField('testfield2', mojom.INT32, 2)
> +    ps = pack.PackedStruct(struct)
> +    self.assertEqual(3, len(ps.packed_fields))
> +
> +    # Second byte should be packed in behind first, altering order.
> +    self.assertEqual('testfield1', ps.packed_fields[0].field.mojom_name)
> +    self.assertEqual('testfield3', ps.packed_fields[1].field.mojom_name)
> +    self.assertEqual('testfield2', ps.packed_fields[2].field.mojom_name)
> +
> +    # Second byte should be packed with first.
> +    self.assertEqual(0, ps.packed_fields[0].offset)
> +    self.assertEqual(1, ps.packed_fields[1].offset)
> +    self.assertEqual(4, ps.packed_fields[2].offset)
> +
> +  def testBools(self):
> +    struct = mojom.Struct('test')
> +    struct.AddField('bit0', mojom.BOOL)
> +    struct.AddField('bit1', mojom.BOOL)
> +    struct.AddField('int', mojom.INT32)
> +    struct.AddField('bit2', mojom.BOOL)
> +    struct.AddField('bit3', mojom.BOOL)
> +    struct.AddField('bit4', mojom.BOOL)
> +    struct.AddField('bit5', mojom.BOOL)
> +    struct.AddField('bit6', mojom.BOOL)
> +    struct.AddField('bit7', mojom.BOOL)
> +    struct.AddField('bit8', mojom.BOOL)
> +    ps = pack.PackedStruct(struct)
> +    self.assertEqual(10, len(ps.packed_fields))
> +
> +    # First 8 bits packed together.
> +    for i in range(8):
> +      pf = ps.packed_fields[i]
> +      self.assertEqual(0, pf.offset)
> +      self.assertEqual("bit%d" % i, pf.field.mojom_name)
> +      self.assertEqual(i, pf.bit)
> +
> +    # Ninth bit goes into second byte.
> +    self.assertEqual("bit8", ps.packed_fields[8].field.mojom_name)
> +    self.assertEqual(1, ps.packed_fields[8].offset)
> +    self.assertEqual(0, ps.packed_fields[8].bit)
> +
> +    # int comes last.
> +    self.assertEqual("int", ps.packed_fields[9].field.mojom_name)
> +    self.assertEqual(4, ps.packed_fields[9].offset)
> +
> +  def testMinVersion(self):
> +    """Tests that |min_version| is properly set for packed fields."""
> +    struct = mojom.Struct('test')
> +    struct.AddField('field_2', mojom.BOOL, 2)
> +    struct.AddField('field_0', mojom.INT32, 0)
> +    struct.AddField('field_1', mojom.INT64, 1)
> +    ps = pack.PackedStruct(struct)
> +
> +    self.assertEqual('field_0', ps.packed_fields[0].field.mojom_name)
> +    self.assertEqual('field_2', ps.packed_fields[1].field.mojom_name)
> +    self.assertEqual('field_1', ps.packed_fields[2].field.mojom_name)
> +
> +    self.assertEqual(0, ps.packed_fields[0].min_version)
> +    self.assertEqual(0, ps.packed_fields[1].min_version)
> +    self.assertEqual(0, ps.packed_fields[2].min_version)
> +
> +    struct.fields[0].attributes = {'MinVersion': 1}
> +    ps = pack.PackedStruct(struct)
> +
> +    self.assertEqual(0, ps.packed_fields[0].min_version)
> +    self.assertEqual(1, ps.packed_fields[1].min_version)
> +    self.assertEqual(0, ps.packed_fields[2].min_version)
> +
> +  def testGetVersionInfoEmptyStruct(self):
> +    """Tests that pack.GetVersionInfo() never returns an empty list, even for
> +    empty structs.
> +    """
> +    struct = mojom.Struct('test')
> +    ps = pack.PackedStruct(struct)
> +
> +    versions = pack.GetVersionInfo(ps)
> +    self.assertEqual(1, len(versions))
> +    self.assertEqual(0, versions[0].version)
> +    self.assertEqual(0, versions[0].num_fields)
> +    self.assertEqual(8, versions[0].num_bytes)
> +
> +  def testGetVersionInfoComplexOrder(self):
> +    """Tests pack.GetVersionInfo() using a struct whose definition order,
> +    ordinal order and pack order for fields are all different.
> +    """
> +    struct = mojom.Struct('test')
> +    struct.AddField(
> +        'field_3', mojom.BOOL, ordinal=3, attributes={'MinVersion': 3})
> +    struct.AddField('field_0', mojom.INT32, ordinal=0)
> +    struct.AddField(
> +        'field_1', mojom.INT64, ordinal=1, attributes={'MinVersion': 2})
> +    struct.AddField(
> +        'field_2', mojom.INT64, ordinal=2, attributes={'MinVersion': 3})
> +    ps = pack.PackedStruct(struct)
> +
> +    versions = pack.GetVersionInfo(ps)
> +    self.assertEqual(3, len(versions))
> +
> +    self.assertEqual(0, versions[0].version)
> +    self.assertEqual(1, versions[0].num_fields)
> +    self.assertEqual(16, versions[0].num_bytes)
> +
> +    self.assertEqual(2, versions[1].version)
> +    self.assertEqual(2, versions[1].num_fields)
> +    self.assertEqual(24, versions[1].num_bytes)
> +
> +    self.assertEqual(3, versions[2].version)
> +    self.assertEqual(4, versions[2].num_fields)
> +    self.assertEqual(32, versions[2].num_bytes)
> +
> +  def testInterfaceAlignment(self):
> +    """Tests that interfaces are aligned on 4-byte boundaries, although the size
> +    of an interface is 8 bytes.
> +    """
> +    kinds = (mojom.INT32, mojom.Interface('test_interface'))
> +    fields = (1, 2)
> +    offsets = (0, 4)
> +    self._CheckPackSequence(kinds, fields, offsets)
> +
> +  def testAssociatedInterfaceAlignment(self):
> +    """Tests that associated interfaces are aligned on 4-byte boundaries,
> +    although the size of an associated interface is 8 bytes.
> +    """
> +    kinds = (mojom.INT32,
> +             mojom.AssociatedInterface(mojom.Interface('test_interface')))
> +    fields = (1, 2)
> +    offsets = (0, 4)
> +    self._CheckPackSequence(kinds, fields, offsets)
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/template_expander.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/template_expander.py
> new file mode 100644
> index 00000000..7a300560
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom/generate/template_expander.py
> @@ -0,0 +1,83 @@
> +# Copyright 2013 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +# Based on third_party/WebKit/Source/build/scripts/template_expander.py.
> +
> +import os.path
> +import sys
> +
> +from mojom import fileutil
> +
> +fileutil.AddLocalRepoThirdPartyDirToModulePath()
> +import jinja2
> +
> +
> +def ApplyTemplate(mojo_generator, path_to_template, params, **kwargs):
> +  loader = jinja2.ModuleLoader(
> +      os.path.join(mojo_generator.bytecode_path,
> +                   "%s.zip" % mojo_generator.GetTemplatePrefix()))
> +  final_kwargs = dict(mojo_generator.GetJinjaParameters())
> +  final_kwargs.update(kwargs)
> +
> +  jinja_env = jinja2.Environment(
> +      loader=loader, keep_trailing_newline=True, **final_kwargs)
> +  jinja_env.globals.update(mojo_generator.GetGlobals())
> +  jinja_env.filters.update(mojo_generator.GetFilters())
> +  template = jinja_env.get_template(path_to_template)
> +  return template.render(params)
> +
> +
> +def UseJinja(path_to_template, **kwargs):
> +  def RealDecorator(generator):
> +    def GeneratorInternal(*args, **kwargs2):
> +      parameters = generator(*args, **kwargs2)
> +      return ApplyTemplate(args[0], path_to_template, parameters, **kwargs)
> +
> +    GeneratorInternal.__name__ = generator.__name__
> +    return GeneratorInternal
> +
> +  return RealDecorator
> +
> +
> +def ApplyImportedTemplate(mojo_generator, path_to_template, filename, params,
> +                          **kwargs):
> +  loader = jinja2.FileSystemLoader(searchpath=path_to_template)
> +  final_kwargs = dict(mojo_generator.GetJinjaParameters())
> +  final_kwargs.update(kwargs)
> +
> +  jinja_env = jinja2.Environment(
> +      loader=loader, keep_trailing_newline=True, **final_kwargs)
> +  jinja_env.globals.update(mojo_generator.GetGlobals())
> +  jinja_env.filters.update(mojo_generator.GetFilters())
> +  template = jinja_env.get_template(filename)
> +  return template.render(params)
> +
> +
> +def UseJinjaForImportedTemplate(func):
> +  def wrapper(*args, **kwargs):
> +    parameters = func(*args, **kwargs)
> +    path_to_template = args[1]
> +    filename = args[2]
> +    return ApplyImportedTemplate(args[0], path_to_template, filename,
> +                                 parameters)
> +
> +  wrapper.__name__ = func.__name__
> +  return wrapper
> +
> +
> +def PrecompileTemplates(generator_modules, output_dir):
> +  for module in generator_modules.values():
> +    generator = module.Generator(None)
> +    jinja_env = jinja2.Environment(
> +        loader=jinja2.FileSystemLoader([
> +            os.path.join(
> +                os.path.dirname(module.__file__), generator.GetTemplatePrefix())
> +        ]))
> +    jinja_env.filters.update(generator.GetFilters())
> +    jinja_env.compile_templates(
> +        os.path.join(output_dir, "%s.zip" % generator.GetTemplatePrefix()),
> +        extensions=["tmpl"],
> +        zip="stored",
> +        py_compile=True,
> +        ignore_errors=False)
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/translate.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/translate.py
> new file mode 100644
> index 00000000..d6df3ca6
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom/generate/translate.py
> @@ -0,0 +1,854 @@
> +# Copyright 2013 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +"""Convert parse tree to AST.
> +
> +This module converts the parse tree to the AST we use for code generation. The
> +main entry point is OrderedModule, which gets passed the parser
> +representation of a mojom file. When called it's assumed that all imports have
> +already been parsed and converted to ASTs before.
> +"""
> +
> +import itertools
> +import os
> +import re
> +import sys
> +
> +from mojom.generate import generator
> +from mojom.generate import module as mojom
> +from mojom.parse import ast
> +
> +
> +def _IsStrOrUnicode(x):
> +  if sys.version_info[0] < 3:
> +    return isinstance(x, (unicode, str))
> +  return isinstance(x, str)
> +
> +
> +def _DuplicateName(values):
> +  """Returns the 'mojom_name' of the first entry in |values| whose 'mojom_name'
> +  has already been encountered. If there are no duplicates, returns None."""
> +  names = set()
> +  for value in values:
> +    if value.mojom_name in names:
> +      return value.mojom_name
> +    names.add(value.mojom_name)
> +  return None
> +
> +
> +def _ElemsOfType(elems, elem_type, scope):
> +  """Find all elements of the given type.
> +
> +  Args:
> +    elems: {Sequence[Any]} Sequence of elems.
> +    elem_type: {Type[C]} Extract all elems of this type.
> +    scope: {str} The name of the surrounding scope (e.g. struct
> +        definition). Used in error messages.
> +
> +  Returns:
> +    {List[C]} All elems of matching type.
> +  """
> +  assert isinstance(elem_type, type)
> +  result = [elem for elem in elems if isinstance(elem, elem_type)]
> +  duplicate_name = _DuplicateName(result)
> +  if duplicate_name:
> +    raise Exception('Names in mojom must be unique within a scope. The name '
> +                    '"%s" is used more than once within the scope "%s".' %
> +                    (duplicate_name, scope))
> +  return result
> +
> +
> +def _ProcessElements(scope, elements, operations_by_type):
> +  """Iterates over the given elements, running a function from
> +  operations_by_type for any element that matches a key in that dict. The scope
> +  is the name of the surrounding scope, such as a filename or struct name, used
> +  only in error messages."""
> +  names_in_this_scope = set()
> +  for element in elements:
> +    # pylint: disable=unidiomatic-typecheck
> +    element_type = type(element)
> +    if element_type in operations_by_type:
> +      if element.mojom_name in names_in_this_scope:
> +        raise Exception('Names must be unique within a scope. The name "%s" is '
> +                        'used more than once within the scope "%s".' %
> +                        (duplicate_name, scope))
> +      operations_by_type[element_type](element)
> +
> +
> +def _MapKind(kind):
> +  map_to_kind = {
> +      'bool': 'b',
> +      'int8': 'i8',
> +      'int16': 'i16',
> +      'int32': 'i32',
> +      'int64': 'i64',
> +      'uint8': 'u8',
> +      'uint16': 'u16',
> +      'uint32': 'u32',
> +      'uint64': 'u64',
> +      'float': 'f',
> +      'double': 'd',
> +      'string': 's',
> +      'handle': 'h',
> +      'handle<data_pipe_consumer>': 'h:d:c',
> +      'handle<data_pipe_producer>': 'h:d:p',
> +      'handle<message_pipe>': 'h:m',
> +      'handle<shared_buffer>': 'h:s',
> +      'handle<platform>': 'h:p'
> +  }
> +  if kind.endswith('?'):
> +    base_kind = _MapKind(kind[0:-1])
> +    # NOTE: This doesn't rule out enum types. Those will be detected later, when
> +    # cross-reference is established.
> +    reference_kinds = ('m', 's', 'h', 'a', 'r', 'x', 'asso', 'rmt', 'rcv',
> +                       'rma', 'rca')
> +    if re.split('[^a-z]', base_kind, 1)[0] not in reference_kinds:
> +      raise Exception('A type (spec "%s") cannot be made nullable' % base_kind)
> +    return '?' + base_kind
> +  if kind.endswith('}'):
> +    lbracket = kind.rfind('{')
> +    value = kind[0:lbracket]
> +    return 'm[' + _MapKind(kind[lbracket + 1:-1]) + '][' + _MapKind(value) + ']'
> +  if kind.endswith(']'):
> +    lbracket = kind.rfind('[')
> +    typename = kind[0:lbracket]
> +    return 'a' + kind[lbracket + 1:-1] + ':' + _MapKind(typename)
> +  if kind.endswith('&'):
> +    return 'r:' + _MapKind(kind[0:-1])
> +  if kind.startswith('asso<'):
> +    assert kind.endswith('>')
> +    return 'asso:' + _MapKind(kind[5:-1])
> +  if kind.startswith('rmt<'):
> +    assert kind.endswith('>')
> +    return 'rmt:' + _MapKind(kind[4:-1])
> +  if kind.startswith('rcv<'):
> +    assert kind.endswith('>')
> +    return 'rcv:' + _MapKind(kind[4:-1])
> +  if kind.startswith('rma<'):
> +    assert kind.endswith('>')
> +    return 'rma:' + _MapKind(kind[4:-1])
> +  if kind.startswith('rca<'):
> +    assert kind.endswith('>')
> +    return 'rca:' + _MapKind(kind[4:-1])
> +  if kind in map_to_kind:
> +    return map_to_kind[kind]
> +  return 'x:' + kind
> +
> +
> +def _AttributeListToDict(attribute_list):
> +  if attribute_list is None:
> +    return None
> +  assert isinstance(attribute_list, ast.AttributeList)
> +  # TODO(vtl): Check for duplicate keys here.
> +  return dict(
> +      [(attribute.key, attribute.value) for attribute in attribute_list])
> +
> +
> +builtin_values = frozenset([
> +    "double.INFINITY", "double.NEGATIVE_INFINITY", "double.NAN",
> +    "float.INFINITY", "float.NEGATIVE_INFINITY", "float.NAN"
> +])
> +
> +
> +def _IsBuiltinValue(value):
> +  return value in builtin_values
> +
> +
> +def _LookupKind(kinds, spec, scope):
> +  """Tries to find which Kind a spec refers to, given the scope in which its
> +  referenced. Starts checking from the narrowest scope to most general. For
> +  example, given a struct field like
> +    Foo.Bar x;
> +  Foo.Bar could refer to the type 'Bar' in the 'Foo' namespace, or an inner
> +  type 'Bar' in the struct 'Foo' in the current namespace.
> +
> +  |scope| is a tuple that looks like (namespace, struct/interface), referring
> +  to the location where the type is referenced."""
> +  if spec.startswith('x:'):
> +    mojom_name = spec[2:]
> +    for i in range(len(scope), -1, -1):
> +      test_spec = 'x:'
> +      if i > 0:
> +        test_spec += '.'.join(scope[:i]) + '.'
> +      test_spec += mojom_name
> +      kind = kinds.get(test_spec)
> +      if kind:
> +        return kind
> +
> +  return kinds.get(spec)
> +
> +
> +def _GetScopeForKind(module, kind):
> +  """For a given kind, returns a tuple of progressively more specific names
> +  used to qualify the kind. For example if kind is an enum named Bar nested in a
> +  struct Foo within module 'foo', this would return ('foo', 'Foo', 'Bar')"""
> +  if isinstance(kind, mojom.Enum) and kind.parent_kind:
> +    # Enums may be nested in other kinds.
> +    return _GetScopeForKind(module, kind.parent_kind) + (kind.mojom_name, )
> +
> +  module_fragment = (module.mojom_namespace, ) if module.mojom_namespace else ()
> +  kind_fragment = (kind.mojom_name, ) if kind else ()
> +  return module_fragment + kind_fragment
> +
> +
> +def _LookupValueInScope(module, kind, identifier):
> +  """Given a kind and an identifier, this attempts to resolve the given
> +  identifier to a concrete NamedValue within the scope of the given kind."""
> +  scope = _GetScopeForKind(module, kind)
> +  for i in reversed(range(len(scope) + 1)):
> +    qualified_name = '.'.join(scope[:i] + (identifier, ))
> +    value = module.values.get(qualified_name)
> +    if value:
> +      return value
> +  return None
> +
> +
> +def _LookupValue(module, parent_kind, implied_kind, ast_leaf_node):
> +  """Resolves a leaf node in the form ('IDENTIFIER', 'x') to a constant value
> +  identified by 'x' in some mojom definition. parent_kind is used as context
> +  when resolving the identifier. If the given leaf node is not an IDENTIFIER
> +  (e.g. already a constant value), it is returned as-is.
> +
> +  If implied_kind is provided, the parsed identifier may also be resolved within
> +  its scope as fallback. This can be useful for more concise value references
> +  when assigning enum-typed constants or field values."""
> +  if not isinstance(ast_leaf_node, tuple) or ast_leaf_node[0] != 'IDENTIFIER':
> +    return ast_leaf_node
> +
> +  # First look for a known user-defined identifier to resolve this within the
> +  # enclosing scope.
> +  identifier = ast_leaf_node[1]
> +
> +  value = _LookupValueInScope(module, parent_kind, identifier)
> +  if value:
> +    return value
> +
> +  # Next look in the scope of implied_kind, if provided.
> +  value = (implied_kind and implied_kind.module and _LookupValueInScope(
> +      implied_kind.module, implied_kind, identifier))
> +  if value:
> +    return value
> +
> +  # Fall back on defined builtin symbols
> +  if _IsBuiltinValue(identifier):
> +    return mojom.BuiltinValue(identifier)
> +
> +  raise ValueError('Unknown identifier %s' % identifier)
> +
> +
> +def _Kind(kinds, spec, scope):
> +  """Convert a type name into a mojom.Kind object.
> +
> +  As a side-effect this function adds the result to 'kinds'.
> +
> +  Args:
> +    kinds: {Dict[str, mojom.Kind]} All known kinds up to this point, indexed by
> +        their names.
> +    spec: {str} A name uniquely identifying a type.
> +    scope: {Tuple[str, str]} A tuple that looks like (namespace,
> +        struct/interface), referring to the location where the type is
> +        referenced.
> +
> +  Returns:
> +    {mojom.Kind} The type corresponding to 'spec'.
> +  """
> +  kind = _LookupKind(kinds, spec, scope)
> +  if kind:
> +    return kind
> +
> +  if spec.startswith('?'):
> +    kind = _Kind(kinds, spec[1:], scope).MakeNullableKind()
> +  elif spec.startswith('a:'):
> +    kind = mojom.Array(_Kind(kinds, spec[2:], scope))
> +  elif spec.startswith('asso:'):
> +    inner_kind = _Kind(kinds, spec[5:], scope)
> +    if isinstance(inner_kind, mojom.InterfaceRequest):
> +      kind = mojom.AssociatedInterfaceRequest(inner_kind)
> +    else:
> +      kind = mojom.AssociatedInterface(inner_kind)
> +  elif spec.startswith('a'):
> +    colon = spec.find(':')
> +    length = int(spec[1:colon])
> +    kind = mojom.Array(_Kind(kinds, spec[colon + 1:], scope), length)
> +  elif spec.startswith('r:'):
> +    kind = mojom.InterfaceRequest(_Kind(kinds, spec[2:], scope))
> +  elif spec.startswith('rmt:'):
> +    kind = mojom.PendingRemote(_Kind(kinds, spec[4:], scope))
> +  elif spec.startswith('rcv:'):
> +    kind = mojom.PendingReceiver(_Kind(kinds, spec[4:], scope))
> +  elif spec.startswith('rma:'):
> +    kind = mojom.PendingAssociatedRemote(_Kind(kinds, spec[4:], scope))
> +  elif spec.startswith('rca:'):
> +    kind = mojom.PendingAssociatedReceiver(_Kind(kinds, spec[4:], scope))
> +  elif spec.startswith('m['):
> +    # Isolate the two types from their brackets.
> +
> +    # It is not allowed to use map as key, so there shouldn't be nested ']'s
> +    # inside the key type spec.
> +    key_end = spec.find(']')
> +    assert key_end != -1 and key_end < len(spec) - 1
> +    assert spec[key_end + 1] == '[' and spec[-1] == ']'
> +
> +    first_kind = spec[2:key_end]
> +    second_kind = spec[key_end + 2:-1]
> +
> +    kind = mojom.Map(
> +        _Kind(kinds, first_kind, scope), _Kind(kinds, second_kind, scope))
> +  else:
> +    kind = mojom.Kind(spec)
> +
> +  kinds[spec] = kind
> +  return kind
> +
> +
> +def _Import(module, import_module):
> +  # Copy the struct kinds from our imports into the current module.
> +  importable_kinds = (mojom.Struct, mojom.Union, mojom.Enum, mojom.Interface)
> +  for kind in import_module.kinds.values():
> +    if (isinstance(kind, importable_kinds)
> +        and kind.module.path == import_module.path):
> +      module.kinds[kind.spec] = kind
> +  # Ditto for values.
> +  for value in import_module.values.values():
> +    if value.module.path == import_module.path:
> +      module.values[value.GetSpec()] = value
> +
> +  return import_module
> +
> +
> +def _Struct(module, parsed_struct):
> +  """
> +  Args:
> +    module: {mojom.Module} Module currently being constructed.
> +    parsed_struct: {ast.Struct} Parsed struct.
> +
> +  Returns:
> +    {mojom.Struct} AST struct.
> +  """
> +  struct = mojom.Struct(module=module)
> +  struct.mojom_name = parsed_struct.mojom_name
> +  struct.native_only = parsed_struct.body is None
> +  struct.spec = 'x:' + module.GetNamespacePrefix() + struct.mojom_name
> +  module.kinds[struct.spec] = struct
> +  struct.enums = []
> +  struct.constants = []
> +  struct.fields_data = []
> +  if not struct.native_only:
> +    _ProcessElements(
> +        parsed_struct.mojom_name, parsed_struct.body, {
> +            ast.Enum:
> +            lambda enum: struct.enums.append(_Enum(module, enum, struct)),
> +            ast.Const:
> +            lambda const: struct.constants.append(
> +                _Constant(module, const, struct)),
> +            ast.StructField:
> +            struct.fields_data.append,
> +        })
> +
> +  struct.attributes = _AttributeListToDict(parsed_struct.attribute_list)
> +
> +  # Enforce that a [Native] attribute is set to make native-only struct
> +  # declarations more explicit.
> +  if struct.native_only:
> +    if not struct.attributes or not struct.attributes.get('Native', False):
> +      raise Exception("Native-only struct declarations must include a " +
> +                      "Native attribute.")
> +
> +  if struct.attributes and struct.attributes.get('CustomSerializer', False):
> +    struct.custom_serializer = True
> +
> +  return struct
> +
> +
> +def _Union(module, parsed_union):
> +  """
> +  Args:
> +    module: {mojom.Module} Module currently being constructed.
> +    parsed_union: {ast.Union} Parsed union.
> +
> +  Returns:
> +    {mojom.Union} AST union.
> +  """
> +  union = mojom.Union(module=module)
> +  union.mojom_name = parsed_union.mojom_name
> +  union.spec = 'x:' + module.GetNamespacePrefix() + union.mojom_name
> +  module.kinds[union.spec] = union
> +  # Stash fields parsed_union here temporarily.
> +  union.fields_data = []
> +  _ProcessElements(parsed_union.mojom_name, parsed_union.body,
> +                   {ast.UnionField: union.fields_data.append})
> +  union.attributes = _AttributeListToDict(parsed_union.attribute_list)
> +  return union
> +
> +
> +def _StructField(module, parsed_field, struct):
> +  """
> +  Args:
> +    module: {mojom.Module} Module currently being constructed.
> +    parsed_field: {ast.StructField} Parsed struct field.
> +    struct: {mojom.Struct} Struct this field belongs to.
> +
> +  Returns:
> +    {mojom.StructField} AST struct field.
> +  """
> +  field = mojom.StructField()
> +  field.mojom_name = parsed_field.mojom_name
> +  field.kind = _Kind(module.kinds, _MapKind(parsed_field.typename),
> +                     (module.mojom_namespace, struct.mojom_name))
> +  field.ordinal = parsed_field.ordinal.value if parsed_field.ordinal else None
> +  field.default = _LookupValue(module, struct, field.kind,
> +                               parsed_field.default_value)
> +  field.attributes = _AttributeListToDict(parsed_field.attribute_list)
> +  return field
> +
> +
> +def _UnionField(module, parsed_field, union):
> +  """
> +  Args:
> +    module: {mojom.Module} Module currently being constructed.
> +    parsed_field: {ast.UnionField} Parsed union field.
> +    union: {mojom.Union} Union this fields belong to.
> +
> +  Returns:
> +    {mojom.UnionField} AST union.
> +  """
> +  field = mojom.UnionField()
> +  field.mojom_name = parsed_field.mojom_name
> +  field.kind = _Kind(module.kinds, _MapKind(parsed_field.typename),
> +                     (module.mojom_namespace, union.mojom_name))
> +  field.ordinal = parsed_field.ordinal.value if parsed_field.ordinal else None
> +  field.default = None
> +  field.attributes = _AttributeListToDict(parsed_field.attribute_list)
> +  return field
> +
> +
> +def _Parameter(module, parsed_param, interface):
> +  """
> +  Args:
> +    module: {mojom.Module} Module currently being constructed.
> +    parsed_param: {ast.Parameter} Parsed parameter.
> +    union: {mojom.Interface} Interface this parameter belongs to.
> +
> +  Returns:
> +    {mojom.Parameter} AST parameter.
> +  """
> +  parameter = mojom.Parameter()
> +  parameter.mojom_name = parsed_param.mojom_name
> +  parameter.kind = _Kind(module.kinds, _MapKind(parsed_param.typename),
> +                         (module.mojom_namespace, interface.mojom_name))
> +  parameter.ordinal = (parsed_param.ordinal.value
> +                       if parsed_param.ordinal else None)
> +  parameter.default = None  # TODO(tibell): We never have these. Remove field?
> +  parameter.attributes = _AttributeListToDict(parsed_param.attribute_list)
> +  return parameter
> +
> +
> +def _Method(module, parsed_method, interface):
> +  """
> +  Args:
> +    module: {mojom.Module} Module currently being constructed.
> +    parsed_method: {ast.Method} Parsed method.
> +    interface: {mojom.Interface} Interface this method belongs to.
> +
> +  Returns:
> +    {mojom.Method} AST method.
> +  """
> +  method = mojom.Method(
> +      interface,
> +      parsed_method.mojom_name,
> +      ordinal=parsed_method.ordinal.value if parsed_method.ordinal else None)
> +  method.parameters = list(
> +      map(lambda parameter: _Parameter(module, parameter, interface),
> +          parsed_method.parameter_list))
> +  if parsed_method.response_parameter_list is not None:
> +    method.response_parameters = list(
> +        map(lambda parameter: _Parameter(module, parameter, interface),
> +            parsed_method.response_parameter_list))
> +  method.attributes = _AttributeListToDict(parsed_method.attribute_list)
> +
> +  # Enforce that only methods with response can have a [Sync] attribute.
> +  if method.sync and method.response_parameters is None:
> +    raise Exception("Only methods with response can include a [Sync] "
> +                    "attribute. If no response parameters are needed, you "
> +                    "could use an empty response parameter list, i.e., "
> +                    "\"=> ()\".")
> +
> +  return method
> +
> +
> +def _Interface(module, parsed_iface):
> +  """
> +  Args:
> +    module: {mojom.Module} Module currently being constructed.
> +    parsed_iface: {ast.Interface} Parsed interface.
> +
> +  Returns:
> +    {mojom.Interface} AST interface.
> +  """
> +  interface = mojom.Interface(module=module)
> +  interface.mojom_name = parsed_iface.mojom_name
> +  interface.spec = 'x:' + module.GetNamespacePrefix() + interface.mojom_name
> +  module.kinds[interface.spec] = interface
> +  interface.attributes = _AttributeListToDict(parsed_iface.attribute_list)
> +  interface.enums = []
> +  interface.constants = []
> +  interface.methods_data = []
> +  _ProcessElements(
> +      parsed_iface.mojom_name, parsed_iface.body, {
> +          ast.Enum:
> +          lambda enum: interface.enums.append(_Enum(module, enum, interface)),
> +          ast.Const:
> +          lambda const: interface.constants.append(
> +              _Constant(module, const, interface)),
> +          ast.Method:
> +          interface.methods_data.append,
> +      })
> +  return interface
> +
> +
> +def _EnumField(module, enum, parsed_field):
> +  """
> +  Args:
> +    module: {mojom.Module} Module currently being constructed.
> +    enum: {mojom.Enum} Enum this field belongs to.
> +    parsed_field: {ast.EnumValue} Parsed enum value.
> +
> +  Returns:
> +    {mojom.EnumField} AST enum field.
> +  """
> +  field = mojom.EnumField()
> +  field.mojom_name = parsed_field.mojom_name
> +  field.value = _LookupValue(module, enum, None, parsed_field.value)
> +  field.attributes = _AttributeListToDict(parsed_field.attribute_list)
> +  value = mojom.EnumValue(module, enum, field)
> +  module.values[value.GetSpec()] = value
> +  return field
> +
> +
> +def _ResolveNumericEnumValues(enum):
> +  """
> +  Given a reference to a mojom.Enum, resolves and assigns the numeric value of
> +  each field, and also computes the min_value and max_value of the enum.
> +  """
> +
> +  # map of <mojom_name> -> integral value
> +  prev_value = -1
> +  min_value = None
> +  max_value = None
> +  for field in enum.fields:
> +    # This enum value is +1 the previous enum value (e.g: BEGIN).
> +    if field.value is None:
> +      prev_value += 1
> +
> +    # Integral value (e.g: BEGIN = -0x1).
> +    elif _IsStrOrUnicode(field.value):
> +      prev_value = int(field.value, 0)
> +
> +    # Reference to a previous enum value (e.g: INIT = BEGIN).
> +    elif isinstance(field.value, mojom.EnumValue):
> +      prev_value = field.value.field.numeric_value
> +    elif isinstance(field.value, mojom.ConstantValue):
> +      constant = field.value.constant
> +      kind = constant.kind
> +      if not mojom.IsIntegralKind(kind) or mojom.IsBoolKind(kind):
> +        raise ValueError('Enum values must be integers. %s is not an integer.' %
> +                         constant.mojom_name)
> +      prev_value = int(constant.value, 0)
> +    else:
> +      raise Exception('Unresolved enum value for %s' % field.value.GetSpec())
> +
> +    #resolved_enum_values[field.mojom_name] = prev_value
> +    field.numeric_value = prev_value
> +    if min_value is None or prev_value < min_value:
> +      min_value = prev_value
> +    if max_value is None or prev_value > max_value:
> +      max_value = prev_value
> +
> +  enum.min_value = min_value
> +  enum.max_value = max_value
> +
> +
> +def _Enum(module, parsed_enum, parent_kind):
> +  """
> +  Args:
> +    module: {mojom.Module} Module currently being constructed.
> +    parsed_enum: {ast.Enum} Parsed enum.
> +
> +  Returns:
> +    {mojom.Enum} AST enum.
> +  """
> +  enum = mojom.Enum(module=module)
> +  enum.mojom_name = parsed_enum.mojom_name
> +  enum.native_only = parsed_enum.enum_value_list is None
> +  mojom_name = enum.mojom_name
> +  if parent_kind:
> +    mojom_name = parent_kind.mojom_name + '.' + mojom_name
> +  enum.spec = 'x:%s.%s' % (module.mojom_namespace, mojom_name)
> +  enum.parent_kind = parent_kind
> +  enum.attributes = _AttributeListToDict(parsed_enum.attribute_list)
> +
> +  if not enum.native_only:
> +    enum.fields = list(
> +        map(lambda field: _EnumField(module, enum, field),
> +            parsed_enum.enum_value_list))
> +    _ResolveNumericEnumValues(enum)
> +
> +  module.kinds[enum.spec] = enum
> +
> +  # Enforce that a [Native] attribute is set to make native-only enum
> +  # declarations more explicit.
> +  if enum.native_only:
> +    if not enum.attributes or not enum.attributes.get('Native', False):
> +      raise Exception("Native-only enum declarations must include a " +
> +                      "Native attribute.")
> +
> +  return enum
> +
> +
> +def _Constant(module, parsed_const, parent_kind):
> +  """
> +  Args:
> +    module: {mojom.Module} Module currently being constructed.
> +    parsed_const: {ast.Const} Parsed constant.
> +
> +  Returns:
> +    {mojom.Constant} AST constant.
> +  """
> +  constant = mojom.Constant()
> +  constant.mojom_name = parsed_const.mojom_name
> +  if parent_kind:
> +    scope = (module.mojom_namespace, parent_kind.mojom_name)
> +  else:
> +    scope = (module.mojom_namespace, )
> +  # TODO(mpcomplete): maybe we should only support POD kinds.
> +  constant.kind = _Kind(module.kinds, _MapKind(parsed_const.typename), scope)
> +  constant.parent_kind = parent_kind
> +  constant.value = _LookupValue(module, parent_kind, constant.kind,
> +                                parsed_const.value)
> +
> +  # Iteratively resolve this constant reference to a concrete value
> +  while isinstance(constant.value, mojom.ConstantValue):
> +    constant.value = constant.value.constant.value
> +
> +  value = mojom.ConstantValue(module, parent_kind, constant)
> +  module.values[value.GetSpec()] = value
> +  return constant
> +
> +
> +def _CollectReferencedKinds(module, all_defined_kinds):
> +  """
> +  Takes a {mojom.Module} object and a list of all defined kinds within that
> +  module, and enumerates the complete dict of user-defined mojom types
> +  (as {mojom.Kind} objects) referenced by the module's own defined kinds (i.e.
> +  as types of struct or union or interface parameters. The returned dict is
> +  keyed by kind spec.
> +  """
> +
> +  def extract_referenced_user_kinds(kind):
> +    if mojom.IsArrayKind(kind):
> +      return extract_referenced_user_kinds(kind.kind)
> +    if mojom.IsMapKind(kind):
> +      return (extract_referenced_user_kinds(kind.key_kind) +
> +              extract_referenced_user_kinds(kind.value_kind))
> +    if mojom.IsInterfaceRequestKind(kind) or mojom.IsAssociatedKind(kind):
> +      return [kind.kind]
> +    if mojom.IsStructKind(kind):
> +      return [kind]
> +    if (mojom.IsInterfaceKind(kind) or mojom.IsEnumKind(kind)
> +        or mojom.IsUnionKind(kind)):
> +      return [kind]
> +    return []
> +
> +  def sanitize_kind(kind):
> +    """Removes nullability from a kind"""
> +    if kind.spec.startswith('?'):
> +      return _Kind(module.kinds, kind.spec[1:], (module.mojom_namespace, ''))
> +    return kind
> +
> +  referenced_user_kinds = {}
> +  for defined_kind in all_defined_kinds:
> +    if mojom.IsStructKind(defined_kind) or mojom.IsUnionKind(defined_kind):
> +      for field in defined_kind.fields:
> +        for referenced_kind in extract_referenced_user_kinds(field.kind):
> +          sanitized_kind = sanitize_kind(referenced_kind)
> +          referenced_user_kinds[sanitized_kind.spec] = sanitized_kind
> +
> +  # Also scan for references in parameter lists
> +  for interface in module.interfaces:
> +    for method in interface.methods:
> +      for param in itertools.chain(method.parameters or [],
> +                                   method.response_parameters or []):
> +        if (mojom.IsStructKind(param.kind) or mojom.IsUnionKind(param.kind)
> +            or mojom.IsEnumKind(param.kind)
> +            or mojom.IsAnyInterfaceKind(param.kind)):
> +          for referenced_kind in extract_referenced_user_kinds(param.kind):
> +            sanitized_kind = sanitize_kind(referenced_kind)
> +            referenced_user_kinds[sanitized_kind.spec] = sanitized_kind
> +
> +  return referenced_user_kinds
> +
> +
> +def _AssignDefaultOrdinals(items):
> +  """Assigns default ordinal values to a sequence of items if necessary."""
> +  next_ordinal = 0
> +  for item in items:
> +    if item.ordinal is not None:
> +      next_ordinal = item.ordinal + 1
> +    else:
> +      item.ordinal = next_ordinal
> +      next_ordinal += 1
> +
> +
> +def _AssertTypeIsStable(kind):
> +  """Raises an error if a type is not stable, meaning it is composed of at least
> +  one type that is not marked [Stable]."""
> +
> +  def assertDependencyIsStable(dependency):
> +    if (mojom.IsEnumKind(dependency) or mojom.IsStructKind(dependency)
> +        or mojom.IsUnionKind(dependency) or mojom.IsInterfaceKind(dependency)):
> +      if not dependency.stable:
> +        raise Exception(
> +            '%s is marked [Stable] but cannot be stable because it depends on '
> +            '%s, which is not marked [Stable].' %
> +            (kind.mojom_name, dependency.mojom_name))
> +    elif mojom.IsArrayKind(dependency) or mojom.IsAnyInterfaceKind(dependency):
> +      assertDependencyIsStable(dependency.kind)
> +    elif mojom.IsMapKind(dependency):
> +      assertDependencyIsStable(dependency.key_kind)
> +      assertDependencyIsStable(dependency.value_kind)
> +
> +  if mojom.IsStructKind(kind) or mojom.IsUnionKind(kind):
> +    for field in kind.fields:
> +      assertDependencyIsStable(field.kind)
> +  elif mojom.IsInterfaceKind(kind):
> +    for method in kind.methods:
> +      for param in method.param_struct.fields:
> +        assertDependencyIsStable(param.kind)
> +      if method.response_param_struct:
> +        for response_param in method.response_param_struct.fields:
> +          assertDependencyIsStable(response_param.kind)
> +
> +
> +def _Module(tree, path, imports):
> +  """
> +  Args:
> +    tree: {ast.Mojom} The parse tree.
> +    path: {str} The path to the mojom file.
> +    imports: {Dict[str, mojom.Module]} Mapping from filenames, as they appear in
> +        the import list, to already processed modules. Used to process imports.
> +
> +  Returns:
> +    {mojom.Module} An AST for the mojom.
> +  """
> +  module = mojom.Module(path=path)
> +  module.kinds = {}
> +  for kind in mojom.PRIMITIVES:
> +    module.kinds[kind.spec] = kind
> +
> +  module.values = {}
> +
> +  module.mojom_namespace = tree.module.mojom_namespace[1] if tree.module else ''
> +  # Imports must come first, because they add to module.kinds which is used
> +  # by by the others.
> +  module.imports = [
> +      _Import(module, imports[imp.import_filename]) for imp in tree.import_list
> +  ]
> +  if tree.module and tree.module.attribute_list:
> +    assert isinstance(tree.module.attribute_list, ast.AttributeList)
> +    # TODO(vtl): Check for duplicate keys here.
> +    module.attributes = dict((attribute.key, attribute.value)
> +                             for attribute in tree.module.attribute_list)
> +
> +  filename = os.path.basename(path)
> +  # First pass collects kinds.
> +  module.constants = []
> +  module.enums = []
> +  module.structs = []
> +  module.unions = []
> +  module.interfaces = []
> +  _ProcessElements(
> +      filename, tree.definition_list, {
> +          ast.Const:
> +          lambda const: module.constants.append(_Constant(module, const, None)),
> +          ast.Enum:
> +          lambda enum: module.enums.append(_Enum(module, enum, None)),
> +          ast.Struct:
> +          lambda struct: module.structs.append(_Struct(module, struct)),
> +          ast.Union:
> +          lambda union: module.unions.append(_Union(module, union)),
> +          ast.Interface:
> +          lambda interface: module.interfaces.append(
> +              _Interface(module, interface)),
> +      })
> +
> +  # Second pass expands fields and methods. This allows fields and parameters
> +  # to refer to kinds defined anywhere in the mojom.
> +  all_defined_kinds = {}
> +  for struct in module.structs:
> +    struct.fields = list(
> +        map(lambda field: _StructField(module, field, struct),
> +            struct.fields_data))
> +    _AssignDefaultOrdinals(struct.fields)
> +    del struct.fields_data
> +    all_defined_kinds[struct.spec] = struct
> +    for enum in struct.enums:
> +      all_defined_kinds[enum.spec] = enum
> +
> +  for union in module.unions:
> +    union.fields = list(
> +        map(lambda field: _UnionField(module, field, union), union.fields_data))
> +    _AssignDefaultOrdinals(union.fields)
> +    del union.fields_data
> +    all_defined_kinds[union.spec] = union
> +
> +  for interface in module.interfaces:
> +    interface.methods = list(
> +        map(lambda method: _Method(module, method, interface),
> +            interface.methods_data))
> +    _AssignDefaultOrdinals(interface.methods)
> +    del interface.methods_data
> +    all_defined_kinds[interface.spec] = interface
> +    for enum in interface.enums:
> +      all_defined_kinds[enum.spec] = enum
> +  for enum in module.enums:
> +    all_defined_kinds[enum.spec] = enum
> +
> +  all_referenced_kinds = _CollectReferencedKinds(module,
> +                                                 all_defined_kinds.values())
> +  imported_kind_specs = set(all_referenced_kinds.keys()).difference(
> +      set(all_defined_kinds.keys()))
> +  module.imported_kinds = dict(
> +      (spec, all_referenced_kinds[spec]) for spec in imported_kind_specs)
> +
> +  generator.AddComputedData(module)
> +  for iface in module.interfaces:
> +    for method in iface.methods:
> +      if method.param_struct:
> +        _AssignDefaultOrdinals(method.param_struct.fields)
> +      if method.response_param_struct:
> +        _AssignDefaultOrdinals(method.response_param_struct.fields)
> +
> +  # Ensure that all types marked [Stable] are actually stable. Enums are
> +  # automatically OK since they don't depend on other definitions.
> +  for kinds in (module.structs, module.unions, module.interfaces):
> +    for kind in kinds:
> +      if kind.stable:
> +        _AssertTypeIsStable(kind)
> +
> +  return module
> +
> +
> +def OrderedModule(tree, path, imports):
> +  """Convert parse tree to AST module.
> +
> +  Args:
> +    tree: {ast.Mojom} The parse tree.
> +    path: {str} The path to the mojom file.
> +    imports: {Dict[str, mojom.Module]} Mapping from filenames, as they appear in
> +        the import list, to already processed modules. Used to process imports.
> +
> +  Returns:
> +    {mojom.Module} An AST for the mojom.
> +  """
> +  module = _Module(tree, path, imports)
> +  return module
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/generate/translate_unittest.py b/utils/ipc/mojo/public/tools/mojom/mojom/generate/translate_unittest.py
> new file mode 100644
> index 00000000..19905c8a
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom/generate/translate_unittest.py
> @@ -0,0 +1,73 @@
> +# Copyright 2014 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +import imp
> +import os.path
> +import sys
> +import unittest
> +
> +from mojom.generate import module as mojom
> +from mojom.generate import translate
> +from mojom.parse import ast
> +
> +
> +class TranslateTest(unittest.TestCase):
> +  """Tests |parser.Parse()|."""
> +
> +  def testSimpleArray(self):
> +    """Tests a simple int32[]."""
> +    # pylint: disable=W0212
> +    self.assertEquals(translate._MapKind("int32[]"), "a:i32")
> +
> +  def testAssociativeArray(self):
> +    """Tests a simple uint8{string}."""
> +    # pylint: disable=W0212
> +    self.assertEquals(translate._MapKind("uint8{string}"), "m[s][u8]")
> +
> +  def testLeftToRightAssociativeArray(self):
> +    """Makes sure that parsing is done from right to left on the internal kinds
> +       in the presence of an associative array."""
> +    # pylint: disable=W0212
> +    self.assertEquals(translate._MapKind("uint8[]{string}"), "m[s][a:u8]")
> +
> +  def testTranslateSimpleUnions(self):
> +    """Makes sure that a simple union is translated correctly."""
> +    tree = ast.Mojom(None, ast.ImportList(), [
> +        ast.Union(
> +            "SomeUnion", None,
> +            ast.UnionBody([
> +                ast.UnionField("a", None, None, "int32"),
> +                ast.UnionField("b", None, None, "string")
> +            ]))
> +    ])
> +
> +    translation = translate.OrderedModule(tree, "mojom_tree", [])
> +    self.assertEqual(1, len(translation.unions))
> +
> +    union = translation.unions[0]
> +    self.assertTrue(isinstance(union, mojom.Union))
> +    self.assertEqual("SomeUnion", union.mojom_name)
> +    self.assertEqual(2, len(union.fields))
> +    self.assertEqual("a", union.fields[0].mojom_name)
> +    self.assertEqual(mojom.INT32.spec, union.fields[0].kind.spec)
> +    self.assertEqual("b", union.fields[1].mojom_name)
> +    self.assertEqual(mojom.STRING.spec, union.fields[1].kind.spec)
> +
> +  def testMapKindRaisesWithDuplicate(self):
> +    """Verifies _MapTreeForType() raises when passed two values with the same
> +       name."""
> +    methods = [
> +        ast.Method('dup', None, None, ast.ParameterList(), None),
> +        ast.Method('dup', None, None, ast.ParameterList(), None)
> +    ]
> +    with self.assertRaises(Exception):
> +      translate._ElemsOfType(methods, ast.Method, 'scope')
> +
> +  def testAssociatedKinds(self):
> +    """Tests type spec translation of associated interfaces and requests."""
> +    # pylint: disable=W0212
> +    self.assertEquals(
> +        translate._MapKind("asso<SomeInterface>?"), "?asso:x:SomeInterface")
> +    self.assertEquals(
> +        translate._MapKind("asso<SomeInterface&>?"), "?asso:r:x:SomeInterface")
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/__init__.py b/utils/ipc/mojo/public/tools/mojom/mojom/parse/__init__.py
> new file mode 100644
> index 00000000..e69de29b
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/ast.py b/utils/ipc/mojo/public/tools/mojom/mojom/parse/ast.py
> new file mode 100644
> index 00000000..1f0db200
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom/parse/ast.py
> @@ -0,0 +1,427 @@
> +# Copyright 2014 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +"""Node classes for the AST for a Mojo IDL file."""
> +
> +# Note: For convenience of testing, you probably want to define __eq__() methods
> +# for all node types; it's okay to be slightly lax (e.g., not compare filename
> +# and lineno). You may also define __repr__() to help with analyzing test
> +# failures, especially for more complex types.
> +
> +
> +import sys
> +
> +
> +def _IsStrOrUnicode(x):
> +  if sys.version_info[0] < 3:
> +    return isinstance(x, (unicode, str))
> +  return isinstance(x, str)
> +
> +
> +class NodeBase(object):
> +  """Base class for nodes in the AST."""
> +
> +  def __init__(self, filename=None, lineno=None):
> +    self.filename = filename
> +    self.lineno = lineno
> +
> +  def __eq__(self, other):
> +    # We want strict comparison of the two object's types. Disable pylint's
> +    # insistence upon recommending isinstance().
> +    # pylint: disable=unidiomatic-typecheck
> +    return type(self) == type(other)
> +
> +  # Make != the inverse of ==. (Subclasses shouldn't have to override this.)
> +  def __ne__(self, other):
> +    return not self == other
> +
> +
> +# TODO(vtl): Some of this is complicated enough that it should be tested.
> +class NodeListBase(NodeBase):
> +  """Represents a list of other nodes, all having the same type. (This is meant
> +  to be subclassed, with subclasses defining _list_item_type to be the class (or
> +  classes, in a tuple) of the members of the list.)"""
> +
> +  def __init__(self, item_or_items=None, **kwargs):
> +    super(NodeListBase, self).__init__(**kwargs)
> +    self.items = []
> +    if item_or_items is None:
> +      pass
> +    elif isinstance(item_or_items, list):
> +      for item in item_or_items:
> +        assert isinstance(item, self._list_item_type)
> +        self.Append(item)
> +    else:
> +      assert isinstance(item_or_items, self._list_item_type)
> +      self.Append(item_or_items)
> +
> +  # Support iteration. For everything else, users should just access |items|
> +  # directly. (We intentionally do NOT supply |__len__()| or |__nonzero__()|, so
> +  # |bool(NodeListBase())| is true.)
> +  def __iter__(self):
> +    return self.items.__iter__()
> +
> +  def __eq__(self, other):
> +    return super(NodeListBase, self).__eq__(other) and \
> +           self.items == other.items
> +
> +  # Implement this so that on failure, we get slightly more sensible output.
> +  def __repr__(self):
> +    return self.__class__.__name__ + "([" + \
> +           ", ".join([repr(elem) for elem in self.items]) + "])"
> +
> +  def Insert(self, item):
> +    """Inserts item at the front of the list."""
> +
> +    assert isinstance(item, self._list_item_type)
> +    self.items.insert(0, item)
> +    self._UpdateFilenameAndLineno()
> +
> +  def Append(self, item):
> +    """Appends item to the end of the list."""
> +
> +    assert isinstance(item, self._list_item_type)
> +    self.items.append(item)
> +    self._UpdateFilenameAndLineno()
> +
> +  def _UpdateFilenameAndLineno(self):
> +    if self.items:
> +      self.filename = self.items[0].filename
> +      self.lineno = self.items[0].lineno
> +
> +
> +class Definition(NodeBase):
> +  """Represents a definition of anything that has a global name (e.g., enums,
> +  enum values, consts, structs, struct fields, interfaces). (This does not
> +  include parameter definitions.) This class is meant to be subclassed."""
> +
> +  def __init__(self, mojom_name, **kwargs):
> +    assert _IsStrOrUnicode(mojom_name)
> +    NodeBase.__init__(self, **kwargs)
> +    self.mojom_name = mojom_name
> +
> +
> +################################################################################
> +
> +
> +class Attribute(NodeBase):
> +  """Represents an attribute."""
> +
> +  def __init__(self, key, value, **kwargs):
> +    assert _IsStrOrUnicode(key)
> +    super(Attribute, self).__init__(**kwargs)
> +    self.key = key
> +    self.value = value
> +
> +  def __eq__(self, other):
> +    return super(Attribute, self).__eq__(other) and \
> +           self.key == other.key and \
> +           self.value == other.value
> +
> +
> +class AttributeList(NodeListBase):
> +  """Represents a list attributes."""
> +
> +  _list_item_type = Attribute
> +
> +
> +class Const(Definition):
> +  """Represents a const definition."""
> +
> +  def __init__(self, mojom_name, attribute_list, typename, value, **kwargs):
> +    assert attribute_list is None or isinstance(attribute_list, AttributeList)
> +    # The typename is currently passed through as a string.
> +    assert _IsStrOrUnicode(typename)
> +    # The value is either a literal (currently passed through as a string) or a
> +    # "wrapped identifier".
> +    assert _IsStrOrUnicode or isinstance(value, tuple)
> +    super(Const, self).__init__(mojom_name, **kwargs)
> +    self.attribute_list = attribute_list
> +    self.typename = typename
> +    self.value = value
> +
> +  def __eq__(self, other):
> +    return super(Const, self).__eq__(other) and \
> +           self.attribute_list == other.attribute_list and \
> +           self.typename == other.typename and \
> +           self.value == other.value
> +
> +
> +class Enum(Definition):
> +  """Represents an enum definition."""
> +
> +  def __init__(self, mojom_name, attribute_list, enum_value_list, **kwargs):
> +    assert attribute_list is None or isinstance(attribute_list, AttributeList)
> +    assert enum_value_list is None or isinstance(enum_value_list, EnumValueList)
> +    super(Enum, self).__init__(mojom_name, **kwargs)
> +    self.attribute_list = attribute_list
> +    self.enum_value_list = enum_value_list
> +
> +  def __eq__(self, other):
> +    return super(Enum, self).__eq__(other) and \
> +           self.attribute_list == other.attribute_list and \
> +           self.enum_value_list == other.enum_value_list
> +
> +
> +class EnumValue(Definition):
> +  """Represents a definition of an enum value."""
> +
> +  def __init__(self, mojom_name, attribute_list, value, **kwargs):
> +    # The optional value is either an int (which is current a string) or a
> +    # "wrapped identifier".
> +    assert attribute_list is None or isinstance(attribute_list, AttributeList)
> +    assert value is None or _IsStrOrUnicode(value) or isinstance(value, tuple)
> +    super(EnumValue, self).__init__(mojom_name, **kwargs)
> +    self.attribute_list = attribute_list
> +    self.value = value
> +
> +  def __eq__(self, other):
> +    return super(EnumValue, self).__eq__(other) and \
> +           self.attribute_list == other.attribute_list and \
> +           self.value == other.value
> +
> +
> +class EnumValueList(NodeListBase):
> +  """Represents a list of enum value definitions (i.e., the "body" of an enum
> +  definition)."""
> +
> +  _list_item_type = EnumValue
> +
> +
> +class Import(NodeBase):
> +  """Represents an import statement."""
> +
> +  def __init__(self, attribute_list, import_filename, **kwargs):
> +    assert attribute_list is None or isinstance(attribute_list, AttributeList)
> +    assert _IsStrOrUnicode(import_filename)
> +    super(Import, self).__init__(**kwargs)
> +    self.attribute_list = attribute_list
> +    self.import_filename = import_filename
> +
> +  def __eq__(self, other):
> +    return super(Import, self).__eq__(other) and \
> +           self.attribute_list == other.attribute_list and \
> +           self.import_filename == other.import_filename
> +
> +
> +class ImportList(NodeListBase):
> +  """Represents a list (i.e., sequence) of import statements."""
> +
> +  _list_item_type = Import
> +
> +
> +class Interface(Definition):
> +  """Represents an interface definition."""
> +
> +  def __init__(self, mojom_name, attribute_list, body, **kwargs):
> +    assert attribute_list is None or isinstance(attribute_list, AttributeList)
> +    assert isinstance(body, InterfaceBody)
> +    super(Interface, self).__init__(mojom_name, **kwargs)
> +    self.attribute_list = attribute_list
> +    self.body = body
> +
> +  def __eq__(self, other):
> +    return super(Interface, self).__eq__(other) and \
> +           self.attribute_list == other.attribute_list and \
> +           self.body == other.body
> +
> +
> +class Method(Definition):
> +  """Represents a method definition."""
> +
> +  def __init__(self, mojom_name, attribute_list, ordinal, parameter_list,
> +               response_parameter_list, **kwargs):
> +    assert attribute_list is None or isinstance(attribute_list, AttributeList)
> +    assert ordinal is None or isinstance(ordinal, Ordinal)
> +    assert isinstance(parameter_list, ParameterList)
> +    assert response_parameter_list is None or \
> +           isinstance(response_parameter_list, ParameterList)
> +    super(Method, self).__init__(mojom_name, **kwargs)
> +    self.attribute_list = attribute_list
> +    self.ordinal = ordinal
> +    self.parameter_list = parameter_list
> +    self.response_parameter_list = response_parameter_list
> +
> +  def __eq__(self, other):
> +    return super(Method, self).__eq__(other) and \
> +           self.attribute_list == other.attribute_list and \
> +           self.ordinal == other.ordinal and \
> +           self.parameter_list == other.parameter_list and \
> +           self.response_parameter_list == other.response_parameter_list
> +
> +
> +# This needs to be declared after |Method|.
> +class InterfaceBody(NodeListBase):
> +  """Represents the body of (i.e., list of definitions inside) an interface."""
> +
> +  _list_item_type = (Const, Enum, Method)
> +
> +
> +class Module(NodeBase):
> +  """Represents a module statement."""
> +
> +  def __init__(self, mojom_namespace, attribute_list, **kwargs):
> +    # |mojom_namespace| is either none or a "wrapped identifier".
> +    assert mojom_namespace is None or isinstance(mojom_namespace, tuple)
> +    assert attribute_list is None or isinstance(attribute_list, AttributeList)
> +    super(Module, self).__init__(**kwargs)
> +    self.mojom_namespace = mojom_namespace
> +    self.attribute_list = attribute_list
> +
> +  def __eq__(self, other):
> +    return super(Module, self).__eq__(other) and \
> +           self.mojom_namespace == other.mojom_namespace and \
> +           self.attribute_list == other.attribute_list
> +
> +
> +class Mojom(NodeBase):
> +  """Represents an entire .mojom file. (This is the root node.)"""
> +
> +  def __init__(self, module, import_list, definition_list, **kwargs):
> +    assert module is None or isinstance(module, Module)
> +    assert isinstance(import_list, ImportList)
> +    assert isinstance(definition_list, list)
> +    super(Mojom, self).__init__(**kwargs)
> +    self.module = module
> +    self.import_list = import_list
> +    self.definition_list = definition_list
> +
> +  def __eq__(self, other):
> +    return super(Mojom, self).__eq__(other) and \
> +           self.module == other.module and \
> +           self.import_list == other.import_list and \
> +           self.definition_list == other.definition_list
> +
> +  def __repr__(self):
> +    return "%s(%r, %r, %r)" % (self.__class__.__name__, self.module,
> +                               self.import_list, self.definition_list)
> +
> +
> +class Ordinal(NodeBase):
> +  """Represents an ordinal value labeling, e.g., a struct field."""
> +
> +  def __init__(self, value, **kwargs):
> +    assert isinstance(value, int)
> +    super(Ordinal, self).__init__(**kwargs)
> +    self.value = value
> +
> +  def __eq__(self, other):
> +    return super(Ordinal, self).__eq__(other) and \
> +           self.value == other.value
> +
> +
> +class Parameter(NodeBase):
> +  """Represents a method request or response parameter."""
> +
> +  def __init__(self, mojom_name, attribute_list, ordinal, typename, **kwargs):
> +    assert _IsStrOrUnicode(mojom_name)
> +    assert attribute_list is None or isinstance(attribute_list, AttributeList)
> +    assert ordinal is None or isinstance(ordinal, Ordinal)
> +    assert _IsStrOrUnicode(typename)
> +    super(Parameter, self).__init__(**kwargs)
> +    self.mojom_name = mojom_name
> +    self.attribute_list = attribute_list
> +    self.ordinal = ordinal
> +    self.typename = typename
> +
> +  def __eq__(self, other):
> +    return super(Parameter, self).__eq__(other) and \
> +           self.mojom_name == other.mojom_name and \
> +           self.attribute_list == other.attribute_list and \
> +           self.ordinal == other.ordinal and \
> +           self.typename == other.typename
> +
> +
> +class ParameterList(NodeListBase):
> +  """Represents a list of (method request or response) parameters."""
> +
> +  _list_item_type = Parameter
> +
> +
> +class Struct(Definition):
> +  """Represents a struct definition."""
> +
> +  def __init__(self, mojom_name, attribute_list, body, **kwargs):
> +    assert attribute_list is None or isinstance(attribute_list, AttributeList)
> +    assert isinstance(body, StructBody) or body is None
> +    super(Struct, self).__init__(mojom_name, **kwargs)
> +    self.attribute_list = attribute_list
> +    self.body = body
> +
> +  def __eq__(self, other):
> +    return super(Struct, self).__eq__(other) and \
> +           self.attribute_list == other.attribute_list and \
> +           self.body == other.body
> +
> +
> +class StructField(Definition):
> +  """Represents a struct field definition."""
> +
> +  def __init__(self, mojom_name, attribute_list, ordinal, typename,
> +               default_value, **kwargs):
> +    assert _IsStrOrUnicode(mojom_name)
> +    assert attribute_list is None or isinstance(attribute_list, AttributeList)
> +    assert ordinal is None or isinstance(ordinal, Ordinal)
> +    assert _IsStrOrUnicode(typename)
> +    # The optional default value is currently either a value as a string or a
> +    # "wrapped identifier".
> +    assert default_value is None or _IsStrOrUnicode(default_value) or \
> +        isinstance(default_value, tuple)
> +    super(StructField, self).__init__(mojom_name, **kwargs)
> +    self.attribute_list = attribute_list
> +    self.ordinal = ordinal
> +    self.typename = typename
> +    self.default_value = default_value
> +
> +  def __eq__(self, other):
> +    return super(StructField, self).__eq__(other) and \
> +           self.attribute_list == other.attribute_list and \
> +           self.ordinal == other.ordinal and \
> +           self.typename == other.typename and \
> +           self.default_value == other.default_value
> +
> +
> +# This needs to be declared after |StructField|.
> +class StructBody(NodeListBase):
> +  """Represents the body of (i.e., list of definitions inside) a struct."""
> +
> +  _list_item_type = (Const, Enum, StructField)
> +
> +
> +class Union(Definition):
> +  """Represents a union definition."""
> +
> +  def __init__(self, mojom_name, attribute_list, body, **kwargs):
> +    assert attribute_list is None or isinstance(attribute_list, AttributeList)
> +    assert isinstance(body, UnionBody)
> +    super(Union, self).__init__(mojom_name, **kwargs)
> +    self.attribute_list = attribute_list
> +    self.body = body
> +
> +  def __eq__(self, other):
> +    return super(Union, self).__eq__(other) and \
> +           self.attribute_list == other.attribute_list and \
> +           self.body == other.body
> +
> +
> +class UnionField(Definition):
> +  def __init__(self, mojom_name, attribute_list, ordinal, typename, **kwargs):
> +    assert _IsStrOrUnicode(mojom_name)
> +    assert attribute_list is None or isinstance(attribute_list, AttributeList)
> +    assert ordinal is None or isinstance(ordinal, Ordinal)
> +    assert _IsStrOrUnicode(typename)
> +    super(UnionField, self).__init__(mojom_name, **kwargs)
> +    self.attribute_list = attribute_list
> +    self.ordinal = ordinal
> +    self.typename = typename
> +
> +  def __eq__(self, other):
> +    return super(UnionField, self).__eq__(other) and \
> +           self.attribute_list == other.attribute_list and \
> +           self.ordinal == other.ordinal and \
> +           self.typename == other.typename
> +
> +
> +class UnionBody(NodeListBase):
> +
> +  _list_item_type = UnionField
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/ast_unittest.py b/utils/ipc/mojo/public/tools/mojom/mojom/parse/ast_unittest.py
> new file mode 100644
> index 00000000..62798631
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom/parse/ast_unittest.py
> @@ -0,0 +1,121 @@
> +# Copyright 2014 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +import imp
> +import os.path
> +import sys
> +import unittest
> +
> +from mojom.parse import ast
> +
> +
> +class _TestNode(ast.NodeBase):
> +  """Node type for tests."""
> +
> +  def __init__(self, value, **kwargs):
> +    super(_TestNode, self).__init__(**kwargs)
> +    self.value = value
> +
> +  def __eq__(self, other):
> +    return super(_TestNode, self).__eq__(other) and self.value == other.value
> +
> +
> +class _TestNodeList(ast.NodeListBase):
> +  """Node list type for tests."""
> +
> +  _list_item_type = _TestNode
> +
> +
> +class ASTTest(unittest.TestCase):
> +  """Tests various AST classes."""
> +
> +  def testNodeBase(self):
> +    # Test |__eq__()|; this is only used for testing, where we want to do
> +    # comparison by value and ignore filenames/line numbers (for convenience).
> +    node1 = ast.NodeBase(filename="hello.mojom", lineno=123)
> +    node2 = ast.NodeBase()
> +    self.assertEquals(node1, node2)
> +    self.assertEquals(node2, node1)
> +
> +    # Check that |__ne__()| just defers to |__eq__()| properly.
> +    self.assertFalse(node1 != node2)
> +    self.assertFalse(node2 != node1)
> +
> +    # Check that |filename| and |lineno| are set properly (and are None by
> +    # default).
> +    self.assertEquals(node1.filename, "hello.mojom")
> +    self.assertEquals(node1.lineno, 123)
> +    self.assertIsNone(node2.filename)
> +    self.assertIsNone(node2.lineno)
> +
> +    # |NodeBase|'s |__eq__()| should compare types (and a subclass's |__eq__()|
> +    # should first defer to its superclass's).
> +    node3 = _TestNode(123)
> +    self.assertNotEqual(node1, node3)
> +    self.assertNotEqual(node3, node1)
> +    # Also test |__eq__()| directly.
> +    self.assertFalse(node1 == node3)
> +    self.assertFalse(node3 == node1)
> +
> +    node4 = _TestNode(123, filename="world.mojom", lineno=123)
> +    self.assertEquals(node4, node3)
> +    node5 = _TestNode(456)
> +    self.assertNotEquals(node5, node4)
> +
> +  def testNodeListBase(self):
> +    node1 = _TestNode(1, filename="foo.mojom", lineno=1)
> +    # Equal to, but not the same as, |node1|:
> +    node1b = _TestNode(1, filename="foo.mojom", lineno=1)
> +    node2 = _TestNode(2, filename="foo.mojom", lineno=2)
> +
> +    nodelist1 = _TestNodeList()  # Contains: (empty).
> +    self.assertEquals(nodelist1, nodelist1)
> +    self.assertEquals(nodelist1.items, [])
> +    self.assertIsNone(nodelist1.filename)
> +    self.assertIsNone(nodelist1.lineno)
> +
> +    nodelist2 = _TestNodeList(node1)  # Contains: 1.
> +    self.assertEquals(nodelist2, nodelist2)
> +    self.assertEquals(nodelist2.items, [node1])
> +    self.assertNotEqual(nodelist2, nodelist1)
> +    self.assertEquals(nodelist2.filename, "foo.mojom")
> +    self.assertEquals(nodelist2.lineno, 1)
> +
> +    nodelist3 = _TestNodeList([node2])  # Contains: 2.
> +    self.assertEquals(nodelist3.items, [node2])
> +    self.assertNotEqual(nodelist3, nodelist1)
> +    self.assertNotEqual(nodelist3, nodelist2)
> +    self.assertEquals(nodelist3.filename, "foo.mojom")
> +    self.assertEquals(nodelist3.lineno, 2)
> +
> +    nodelist1.Append(node1b)  # Contains: 1.
> +    self.assertEquals(nodelist1.items, [node1])
> +    self.assertEquals(nodelist1, nodelist2)
> +    self.assertNotEqual(nodelist1, nodelist3)
> +    self.assertEquals(nodelist1.filename, "foo.mojom")
> +    self.assertEquals(nodelist1.lineno, 1)
> +
> +    nodelist1.Append(node2)  # Contains: 1, 2.
> +    self.assertEquals(nodelist1.items, [node1, node2])
> +    self.assertNotEqual(nodelist1, nodelist2)
> +    self.assertNotEqual(nodelist1, nodelist3)
> +    self.assertEquals(nodelist1.lineno, 1)
> +
> +    nodelist2.Append(node2)  # Contains: 1, 2.
> +    self.assertEquals(nodelist2.items, [node1, node2])
> +    self.assertEquals(nodelist2, nodelist1)
> +    self.assertNotEqual(nodelist2, nodelist3)
> +    self.assertEquals(nodelist2.lineno, 1)
> +
> +    nodelist3.Insert(node1)  # Contains: 1, 2.
> +    self.assertEquals(nodelist3.items, [node1, node2])
> +    self.assertEquals(nodelist3, nodelist1)
> +    self.assertEquals(nodelist3, nodelist2)
> +    self.assertEquals(nodelist3.lineno, 1)
> +
> +    # Test iteration:
> +    i = 1
> +    for item in nodelist1:
> +      self.assertEquals(item.value, i)
> +      i += 1
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features.py b/utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features.py
> new file mode 100644
> index 00000000..3cb73c5d
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features.py
> @@ -0,0 +1,82 @@
> +# Copyright 2018 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +"""Helpers for processing conditionally enabled features in a mojom."""
> +
> +from mojom.error import Error
> +from mojom.parse import ast
> +
> +
> +class EnableIfError(Error):
> +  """ Class for errors from ."""
> +
> +  def __init__(self, filename, message, lineno=None):
> +    Error.__init__(self, filename, message, lineno=lineno, addenda=None)
> +
> +
> +def _IsEnabled(definition, enabled_features):
> +  """Returns true if a definition is enabled.
> +
> +  A definition is enabled if it has no EnableIf attribute, or if the value of
> +  the EnableIf attribute is in enabled_features.
> +  """
> +  if not hasattr(definition, "attribute_list"):
> +    return True
> +  if not definition.attribute_list:
> +    return True
> +
> +  already_defined = False
> +  for a in definition.attribute_list:
> +    if a.key == 'EnableIf':
> +      if already_defined:
> +        raise EnableIfError(
> +            definition.filename,
> +            "EnableIf attribute may only be defined once per field.",
> +            definition.lineno)
> +      already_defined = True
> +
> +  for attribute in definition.attribute_list:
> +    if attribute.key == 'EnableIf' and attribute.value not in enabled_features:
> +      return False
> +  return True
> +
> +
> +def _FilterDisabledFromNodeList(node_list, enabled_features):
> +  if not node_list:
> +    return
> +  assert isinstance(node_list, ast.NodeListBase)
> +  node_list.items = [
> +      item for item in node_list.items if _IsEnabled(item, enabled_features)
> +  ]
> +  for item in node_list.items:
> +    _FilterDefinition(item, enabled_features)
> +
> +
> +def _FilterDefinition(definition, enabled_features):
> +  """Filters definitions with a body."""
> +  if isinstance(definition, ast.Enum):
> +    _FilterDisabledFromNodeList(definition.enum_value_list, enabled_features)
> +  elif isinstance(definition, ast.Interface):
> +    _FilterDisabledFromNodeList(definition.body, enabled_features)
> +  elif isinstance(definition, ast.Method):
> +    _FilterDisabledFromNodeList(definition.parameter_list, enabled_features)
> +    _FilterDisabledFromNodeList(definition.response_parameter_list,
> +                                enabled_features)
> +  elif isinstance(definition, ast.Struct):
> +    _FilterDisabledFromNodeList(definition.body, enabled_features)
> +  elif isinstance(definition, ast.Union):
> +    _FilterDisabledFromNodeList(definition.body, enabled_features)
> +
> +
> +def RemoveDisabledDefinitions(mojom, enabled_features):
> +  """Removes conditionally disabled definitions from a Mojom node."""
> +  mojom.import_list = ast.ImportList([
> +      imported_file for imported_file in mojom.import_list
> +      if _IsEnabled(imported_file, enabled_features)
> +  ])
> +  mojom.definition_list = [
> +      definition for definition in mojom.definition_list
> +      if _IsEnabled(definition, enabled_features)
> +  ]
> +  for definition in mojom.definition_list:
> +    _FilterDefinition(definition, enabled_features)
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py b/utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py
> new file mode 100644
> index 00000000..aa609be7
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py
> @@ -0,0 +1,233 @@
> +# Copyright 2018 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +import imp
> +import os
> +import sys
> +import unittest
> +
> +
> +def _GetDirAbove(dirname):
> +  """Returns the directory "above" this file containing |dirname| (which must
> +  also be "above" this file)."""
> +  path = os.path.abspath(__file__)
> +  while True:
> +    path, tail = os.path.split(path)
> +    assert tail
> +    if tail == dirname:
> +      return path
> +
> +
> +try:
> +  imp.find_module('mojom')
> +except ImportError:
> +  sys.path.append(os.path.join(_GetDirAbove('pylib'), 'pylib'))
> +import mojom.parse.ast as ast
> +import mojom.parse.conditional_features as conditional_features
> +import mojom.parse.parser as parser
> +
> +ENABLED_FEATURES = frozenset({'red', 'green', 'blue'})
> +
> +
> +class ConditionalFeaturesTest(unittest.TestCase):
> +  """Tests |mojom.parse.conditional_features|."""
> +
> +  def parseAndAssertEqual(self, source, expected_source):
> +    definition = parser.Parse(source, "my_file.mojom")
> +    conditional_features.RemoveDisabledDefinitions(definition, ENABLED_FEATURES)
> +    expected = parser.Parse(expected_source, "my_file.mojom")
> +    self.assertEquals(definition, expected)
> +
> +  def testFilterConst(self):
> +    """Test that Consts are correctly filtered."""
> +    const_source = """
> +      [EnableIf=blue]
> +      const int kMyConst1 = 1;
> +      [EnableIf=orange]
> +      const double kMyConst2 = 2;
> +      const int kMyConst3 = 3;
> +    """
> +    expected_source = """
> +      [EnableIf=blue]
> +      const int kMyConst1 = 1;
> +      const int kMyConst3 = 3;
> +    """
> +    self.parseAndAssertEqual(const_source, expected_source)
> +
> +  def testFilterEnum(self):
> +    """Test that EnumValues are correctly filtered from an Enum."""
> +    enum_source = """
> +      enum MyEnum {
> +        [EnableIf=purple]
> +        VALUE1,
> +        [EnableIf=blue]
> +        VALUE2,
> +        VALUE3,
> +      };
> +    """
> +    expected_source = """
> +      enum MyEnum {
> +        [EnableIf=blue]
> +        VALUE2,
> +        VALUE3
> +      };
> +    """
> +    self.parseAndAssertEqual(enum_source, expected_source)
> +
> +  def testFilterImport(self):
> +    """Test that imports are correctly filtered from a Mojom."""
> +    import_source = """
> +      [EnableIf=blue]
> +      import "foo.mojom";
> +      import "bar.mojom";
> +      [EnableIf=purple]
> +      import "baz.mojom";
> +    """
> +    expected_source = """
> +      [EnableIf=blue]
> +      import "foo.mojom";
> +      import "bar.mojom";
> +    """
> +    self.parseAndAssertEqual(import_source, expected_source)
> +
> +  def testFilterInterface(self):
> +    """Test that definitions are correctly filtered from an Interface."""
> +    interface_source = """
> +      interface MyInterface {
> +        [EnableIf=blue]
> +        enum MyEnum {
> +          [EnableIf=purple]
> +          VALUE1,
> +          VALUE2,
> +        };
> +        [EnableIf=blue]
> +        const int32 kMyConst = 123;
> +        [EnableIf=purple]
> +        MyMethod();
> +      };
> +    """
> +    expected_source = """
> +      interface MyInterface {
> +        [EnableIf=blue]
> +        enum MyEnum {
> +          VALUE2,
> +        };
> +        [EnableIf=blue]
> +        const int32 kMyConst = 123;
> +      };
> +    """
> +    self.parseAndAssertEqual(interface_source, expected_source)
> +
> +  def testFilterMethod(self):
> +    """Test that Parameters are correctly filtered from a Method."""
> +    method_source = """
> +      interface MyInterface {
> +        [EnableIf=blue]
> +        MyMethod([EnableIf=purple] int32 a) => ([EnableIf=red] int32 b);
> +      };
> +    """
> +    expected_source = """
> +      interface MyInterface {
> +        [EnableIf=blue]
> +        MyMethod() => ([EnableIf=red] int32 b);
> +      };
> +    """
> +    self.parseAndAssertEqual(method_source, expected_source)
> +
> +  def testFilterStruct(self):
> +    """Test that definitions are correctly filtered from a Struct."""
> +    struct_source = """
> +      struct MyStruct {
> +        [EnableIf=blue]
> +        enum MyEnum {
> +          VALUE1,
> +          [EnableIf=purple]
> +          VALUE2,
> +        };
> +        [EnableIf=yellow]
> +        const double kMyConst = 1.23;
> +        [EnableIf=green]
> +        int32 a;
> +        double b;
> +        [EnableIf=purple]
> +        int32 c;
> +        [EnableIf=blue]
> +        double d;
> +        int32 e;
> +        [EnableIf=orange]
> +        double f;
> +      };
> +    """
> +    expected_source = """
> +      struct MyStruct {
> +        [EnableIf=blue]
> +        enum MyEnum {
> +          VALUE1,
> +        };
> +        [EnableIf=green]
> +        int32 a;
> +        double b;
> +        [EnableIf=blue]
> +        double d;
> +        int32 e;
> +      };
> +    """
> +    self.parseAndAssertEqual(struct_source, expected_source)
> +
> +  def testFilterUnion(self):
> +    """Test that UnionFields are correctly filtered from a Union."""
> +    union_source = """
> +      union MyUnion {
> +        [EnableIf=yellow]
> +        int32 a;
> +        [EnableIf=red]
> +        bool b;
> +      };
> +    """
> +    expected_source = """
> +      union MyUnion {
> +        [EnableIf=red]
> +        bool b;
> +      };
> +    """
> +    self.parseAndAssertEqual(union_source, expected_source)
> +
> +  def testSameNameFields(self):
> +    mojom_source = """
> +      enum Foo {
> +        [EnableIf=red]
> +        VALUE1 = 5,
> +        [EnableIf=yellow]
> +        VALUE1 = 6,
> +      };
> +      [EnableIf=red]
> +      const double kMyConst = 1.23;
> +      [EnableIf=yellow]
> +      const double kMyConst = 4.56;
> +    """
> +    expected_source = """
> +      enum Foo {
> +        [EnableIf=red]
> +        VALUE1 = 5,
> +      };
> +      [EnableIf=red]
> +      const double kMyConst = 1.23;
> +    """
> +    self.parseAndAssertEqual(mojom_source, expected_source)
> +
> +  def testMultipleEnableIfs(self):
> +    source = """
> +      enum Foo {
> +        [EnableIf=red,EnableIf=yellow]
> +        kBarValue = 5,
> +      };
> +    """
> +    definition = parser.Parse(source, "my_file.mojom")
> +    self.assertRaises(conditional_features.EnableIfError,
> +                      conditional_features.RemoveDisabledDefinitions,
> +                      definition, ENABLED_FEATURES)
> +
> +
> +if __name__ == '__main__':
> +  unittest.main()
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py b/utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py
> new file mode 100644
> index 00000000..3e084bbf
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py
> @@ -0,0 +1,251 @@
> +# Copyright 2014 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +import imp
> +import os.path
> +import sys
> +
> +from mojom import fileutil
> +from mojom.error import Error
> +
> +fileutil.AddLocalRepoThirdPartyDirToModulePath()
> +from ply.lex import TOKEN
> +
> +
> +class LexError(Error):
> +  """Class for errors from the lexer."""
> +
> +  def __init__(self, filename, message, lineno):
> +    Error.__init__(self, filename, message, lineno=lineno)
> +
> +
> +# We have methods which look like they could be functions:
> +# pylint: disable=R0201
> +class Lexer(object):
> +  def __init__(self, filename):
> +    self.filename = filename
> +
> +  ######################--   PRIVATE   --######################
> +
> +  ##
> +  ## Internal auxiliary methods
> +  ##
> +  def _error(self, msg, token):
> +    raise LexError(self.filename, msg, token.lineno)
> +
> +  ##
> +  ## Reserved keywords
> +  ##
> +  keywords = (
> +      'HANDLE',
> +      'IMPORT',
> +      'MODULE',
> +      'STRUCT',
> +      'UNION',
> +      'INTERFACE',
> +      'ENUM',
> +      'CONST',
> +      'TRUE',
> +      'FALSE',
> +      'DEFAULT',
> +      'ARRAY',
> +      'MAP',
> +      'ASSOCIATED',
> +      'PENDING_REMOTE',
> +      'PENDING_RECEIVER',
> +      'PENDING_ASSOCIATED_REMOTE',
> +      'PENDING_ASSOCIATED_RECEIVER',
> +  )
> +
> +  keyword_map = {}
> +  for keyword in keywords:
> +    keyword_map[keyword.lower()] = keyword
> +
> +  ##
> +  ## All the tokens recognized by the lexer
> +  ##
> +  tokens = keywords + (
> +      # Identifiers
> +      'NAME',
> +
> +      # Constants
> +      'ORDINAL',
> +      'INT_CONST_DEC',
> +      'INT_CONST_HEX',
> +      'FLOAT_CONST',
> +
> +      # String literals
> +      'STRING_LITERAL',
> +
> +      # Operators
> +      'MINUS',
> +      'PLUS',
> +      'AMP',
> +      'QSTN',
> +
> +      # Assignment
> +      'EQUALS',
> +
> +      # Request / response
> +      'RESPONSE',
> +
> +      # Delimiters
> +      'LPAREN',
> +      'RPAREN',  # ( )
> +      'LBRACKET',
> +      'RBRACKET',  # [ ]
> +      'LBRACE',
> +      'RBRACE',  # { }
> +      'LANGLE',
> +      'RANGLE',  # < >
> +      'SEMI',  # ;
> +      'COMMA',
> +      'DOT'  # , .
> +  )
> +
> +  ##
> +  ## Regexes for use in tokens
> +  ##
> +
> +  # valid C identifiers (K&R2: A.2.3)
> +  identifier = r'[a-zA-Z_][0-9a-zA-Z_]*'
> +
> +  hex_prefix = '0[xX]'
> +  hex_digits = '[0-9a-fA-F]+'
> +
> +  # integer constants (K&R2: A.2.5.1)
> +  decimal_constant = '0|([1-9][0-9]*)'
> +  hex_constant = hex_prefix + hex_digits
> +  # Don't allow octal constants (even invalid octal).
> +  octal_constant_disallowed = '0[0-9]+'
> +
> +  # character constants (K&R2: A.2.5.2)
> +  # Note: a-zA-Z and '.-~^_!=&;,' are allowed as escape chars to support #line
> +  # directives with Windows paths as filenames (..\..\dir\file)
> +  # For the same reason, decimal_escape allows all digit sequences. We want to
> +  # parse all correct code, even if it means to sometimes parse incorrect
> +  # code.
> +  #
> +  simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])"""
> +  decimal_escape = r"""(\d+)"""
> +  hex_escape = r"""(x[0-9a-fA-F]+)"""
> +  bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-7])"""
> +
> +  escape_sequence = \
> +      r"""(\\("""+simple_escape+'|'+decimal_escape+'|'+hex_escape+'))'
> +
> +  # string literals (K&R2: A.2.6)
> +  string_char = r"""([^"\\\n]|""" + escape_sequence + ')'
> +  string_literal = '"' + string_char + '*"'
> +  bad_string_literal = '"' + string_char + '*' + bad_escape + string_char + '*"'
> +
> +  # floating constants (K&R2: A.2.5.3)
> +  exponent_part = r"""([eE][-+]?[0-9]+)"""
> +  fractional_constant = r"""([0-9]*\.[0-9]+)|([0-9]+\.)"""
> +  floating_constant = \
> +      '(((('+fractional_constant+')'+ \
> +      exponent_part+'?)|([0-9]+'+exponent_part+')))'
> +
> +  # Ordinals
> +  ordinal = r'@[0-9]+'
> +  missing_ordinal_value = r'@'
> +  # Don't allow ordinal values in octal (even invalid octal, like 09) or
> +  # hexadecimal.
> +  octal_or_hex_ordinal_disallowed = (
> +      r'@((0[0-9]+)|(' + hex_prefix + hex_digits + '))')
> +
> +  ##
> +  ## Rules for the normal state
> +  ##
> +  t_ignore = ' \t\r'
> +
> +  # Newlines
> +  def t_NEWLINE(self, t):
> +    r'\n+'
> +    t.lexer.lineno += len(t.value)
> +
> +  # Operators
> +  t_MINUS = r'-'
> +  t_PLUS = r'\+'
> +  t_AMP = r'&'
> +  t_QSTN = r'\?'
> +
> +  # =
> +  t_EQUALS = r'='
> +
> +  # =>
> +  t_RESPONSE = r'=>'
> +
> +  # Delimiters
> +  t_LPAREN = r'\('
> +  t_RPAREN = r'\)'
> +  t_LBRACKET = r'\['
> +  t_RBRACKET = r'\]'
> +  t_LBRACE = r'\{'
> +  t_RBRACE = r'\}'
> +  t_LANGLE = r'<'
> +  t_RANGLE = r'>'
> +  t_COMMA = r','
> +  t_DOT = r'\.'
> +  t_SEMI = r';'
> +
> +  t_STRING_LITERAL = string_literal
> +
> +  # The following floating and integer constants are defined as
> +  # functions to impose a strict order (otherwise, decimal
> +  # is placed before the others because its regex is longer,
> +  # and this is bad)
> +  #
> +  @TOKEN(floating_constant)
> +  def t_FLOAT_CONST(self, t):
> +    return t
> +
> +  @TOKEN(hex_constant)
> +  def t_INT_CONST_HEX(self, t):
> +    return t
> +
> +  @TOKEN(octal_constant_disallowed)
> +  def t_OCTAL_CONSTANT_DISALLOWED(self, t):
> +    msg = "Octal values not allowed"
> +    self._error(msg, t)
> +
> +  @TOKEN(decimal_constant)
> +  def t_INT_CONST_DEC(self, t):
> +    return t
> +
> +  # unmatched string literals are caught by the preprocessor
> +
> +  @TOKEN(bad_string_literal)
> +  def t_BAD_STRING_LITERAL(self, t):
> +    msg = "String contains invalid escape code"
> +    self._error(msg, t)
> +
> +  # Handle ordinal-related tokens in the right order:
> +  @TOKEN(octal_or_hex_ordinal_disallowed)
> +  def t_OCTAL_OR_HEX_ORDINAL_DISALLOWED(self, t):
> +    msg = "Octal and hexadecimal ordinal values not allowed"
> +    self._error(msg, t)
> +
> +  @TOKEN(ordinal)
> +  def t_ORDINAL(self, t):
> +    return t
> +
> +  @TOKEN(missing_ordinal_value)
> +  def t_BAD_ORDINAL(self, t):
> +    msg = "Missing ordinal value"
> +    self._error(msg, t)
> +
> +  @TOKEN(identifier)
> +  def t_NAME(self, t):
> +    t.type = self.keyword_map.get(t.value, "NAME")
> +    return t
> +
> +  # Ignore C and C++ style comments
> +  def t_COMMENT(self, t):
> +    r'(/\*(.|\n)*?\*/)|(//.*(\n[ \t]*//.*)*)'
> +    t.lexer.lineno += t.value.count("\n")
> +
> +  def t_error(self, t):
> +    msg = "Illegal character %s" % repr(t.value[0])
> +    self._error(msg, t)
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py b/utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py
> new file mode 100644
> index 00000000..eadc6587
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py
> @@ -0,0 +1,198 @@
> +# Copyright 2014 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +import imp
> +import os.path
> +import sys
> +import unittest
> +
> +
> +def _GetDirAbove(dirname):
> +  """Returns the directory "above" this file containing |dirname| (which must
> +  also be "above" this file)."""
> +  path = os.path.abspath(__file__)
> +  while True:
> +    path, tail = os.path.split(path)
> +    assert tail
> +    if tail == dirname:
> +      return path
> +
> +
> +sys.path.insert(1, os.path.join(_GetDirAbove("mojo"), "third_party"))
> +from ply import lex
> +
> +try:
> +  imp.find_module("mojom")
> +except ImportError:
> +  sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib"))
> +import mojom.parse.lexer
> +
> +
> +# This (monkey-patching LexToken to make comparison value-based) is evil, but
> +# we'll do it anyway. (I'm pretty sure ply's lexer never cares about comparing
> +# for object identity.)
> +def _LexTokenEq(self, other):
> +  return self.type == other.type and self.value == other.value and \
> +         self.lineno == other.lineno and self.lexpos == other.lexpos
> +
> +
> +setattr(lex.LexToken, '__eq__', _LexTokenEq)
> +
> +
> +def _MakeLexToken(token_type, value, lineno=1, lexpos=0):
> +  """Makes a LexToken with the given parameters. (Note that lineno is 1-based,
> +  but lexpos is 0-based.)"""
> +  rv = lex.LexToken()
> +  rv.type, rv.value, rv.lineno, rv.lexpos = token_type, value, lineno, lexpos
> +  return rv
> +
> +
> +def _MakeLexTokenForKeyword(keyword, **kwargs):
> +  """Makes a LexToken for the given keyword."""
> +  return _MakeLexToken(keyword.upper(), keyword.lower(), **kwargs)
> +
> +
> +class LexerTest(unittest.TestCase):
> +  """Tests |mojom.parse.lexer.Lexer|."""
> +
> +  def __init__(self, *args, **kwargs):
> +    unittest.TestCase.__init__(self, *args, **kwargs)
> +    # Clone all lexer instances from this one, since making a lexer is slow.
> +    self._zygote_lexer = lex.lex(mojom.parse.lexer.Lexer("my_file.mojom"))
> +
> +  def testValidKeywords(self):
> +    """Tests valid keywords."""
> +    self.assertEquals(
> +        self._SingleTokenForInput("handle"), _MakeLexTokenForKeyword("handle"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("import"), _MakeLexTokenForKeyword("import"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("module"), _MakeLexTokenForKeyword("module"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("struct"), _MakeLexTokenForKeyword("struct"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("union"), _MakeLexTokenForKeyword("union"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("interface"),
> +        _MakeLexTokenForKeyword("interface"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("enum"), _MakeLexTokenForKeyword("enum"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("const"), _MakeLexTokenForKeyword("const"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("true"), _MakeLexTokenForKeyword("true"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("false"), _MakeLexTokenForKeyword("false"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("default"),
> +        _MakeLexTokenForKeyword("default"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("array"), _MakeLexTokenForKeyword("array"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("map"), _MakeLexTokenForKeyword("map"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("associated"),
> +        _MakeLexTokenForKeyword("associated"))
> +
> +  def testValidIdentifiers(self):
> +    """Tests identifiers."""
> +    self.assertEquals(
> +        self._SingleTokenForInput("abcd"), _MakeLexToken("NAME", "abcd"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("AbC_d012_"),
> +        _MakeLexToken("NAME", "AbC_d012_"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("_0123"), _MakeLexToken("NAME", "_0123"))
> +
> +  def testInvalidIdentifiers(self):
> +    with self.assertRaisesRegexp(
> +        mojom.parse.lexer.LexError,
> +        r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
> +      self._TokensForInput("$abc")
> +    with self.assertRaisesRegexp(
> +        mojom.parse.lexer.LexError,
> +        r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
> +      self._TokensForInput("a$bc")
> +
> +  def testDecimalIntegerConstants(self):
> +    self.assertEquals(
> +        self._SingleTokenForInput("0"), _MakeLexToken("INT_CONST_DEC", "0"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("1"), _MakeLexToken("INT_CONST_DEC", "1"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("123"), _MakeLexToken("INT_CONST_DEC", "123"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("10"), _MakeLexToken("INT_CONST_DEC", "10"))
> +
> +  def testValidTokens(self):
> +    """Tests valid tokens (which aren't tested elsewhere)."""
> +    # Keywords tested in |testValidKeywords|.
> +    # NAME tested in |testValidIdentifiers|.
> +    self.assertEquals(
> +        self._SingleTokenForInput("@123"), _MakeLexToken("ORDINAL", "@123"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("456"), _MakeLexToken("INT_CONST_DEC", "456"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("0x01aB2eF3"),
> +        _MakeLexToken("INT_CONST_HEX", "0x01aB2eF3"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("123.456"),
> +        _MakeLexToken("FLOAT_CONST", "123.456"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("\"hello\""),
> +        _MakeLexToken("STRING_LITERAL", "\"hello\""))
> +    self.assertEquals(
> +        self._SingleTokenForInput("+"), _MakeLexToken("PLUS", "+"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("-"), _MakeLexToken("MINUS", "-"))
> +    self.assertEquals(self._SingleTokenForInput("&"), _MakeLexToken("AMP", "&"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("?"), _MakeLexToken("QSTN", "?"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("="), _MakeLexToken("EQUALS", "="))
> +    self.assertEquals(
> +        self._SingleTokenForInput("=>"), _MakeLexToken("RESPONSE", "=>"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("("), _MakeLexToken("LPAREN", "("))
> +    self.assertEquals(
> +        self._SingleTokenForInput(")"), _MakeLexToken("RPAREN", ")"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("["), _MakeLexToken("LBRACKET", "["))
> +    self.assertEquals(
> +        self._SingleTokenForInput("]"), _MakeLexToken("RBRACKET", "]"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("{"), _MakeLexToken("LBRACE", "{"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("}"), _MakeLexToken("RBRACE", "}"))
> +    self.assertEquals(
> +        self._SingleTokenForInput("<"), _MakeLexToken("LANGLE", "<"))
> +    self.assertEquals(
> +        self._SingleTokenForInput(">"), _MakeLexToken("RANGLE", ">"))
> +    self.assertEquals(
> +        self._SingleTokenForInput(";"), _MakeLexToken("SEMI", ";"))
> +    self.assertEquals(
> +        self._SingleTokenForInput(","), _MakeLexToken("COMMA", ","))
> +    self.assertEquals(self._SingleTokenForInput("."), _MakeLexToken("DOT", "."))
> +
> +  def _TokensForInput(self, input_string):
> +    """Gets a list of tokens for the given input string."""
> +    lexer = self._zygote_lexer.clone()
> +    lexer.input(input_string)
> +    rv = []
> +    while True:
> +      tok = lexer.token()
> +      if not tok:
> +        return rv
> +      rv.append(tok)
> +
> +  def _SingleTokenForInput(self, input_string):
> +    """Gets the single token for the given input string. (Raises an exception if
> +    the input string does not result in exactly one token.)"""
> +    toks = self._TokensForInput(input_string)
> +    assert len(toks) == 1
> +    return toks[0]
> +
> +
> +if __name__ == "__main__":
> +  unittest.main()
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/parser.py b/utils/ipc/mojo/public/tools/mojom/mojom/parse/parser.py
> new file mode 100644
> index 00000000..b3b803d6
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom/parse/parser.py
> @@ -0,0 +1,488 @@
> +# Copyright 2014 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +"""Generates a syntax tree from a Mojo IDL file."""
> +
> +import os.path
> +import sys
> +
> +from mojom import fileutil
> +from mojom.error import Error
> +from mojom.parse import ast
> +from mojom.parse.lexer import Lexer
> +
> +fileutil.AddLocalRepoThirdPartyDirToModulePath()
> +from ply import lex
> +from ply import yacc
> +
> +_MAX_ORDINAL_VALUE = 0xffffffff
> +_MAX_ARRAY_SIZE = 0xffffffff
> +
> +
> +class ParseError(Error):
> +  """Class for errors from the parser."""
> +
> +  def __init__(self, filename, message, lineno=None, snippet=None):
> +    Error.__init__(
> +        self,
> +        filename,
> +        message,
> +        lineno=lineno,
> +        addenda=([snippet] if snippet else None))
> +
> +
> +# We have methods which look like they could be functions:
> +# pylint: disable=R0201
> +class Parser(object):
> +  def __init__(self, lexer, source, filename):
> +    self.tokens = lexer.tokens
> +    self.source = source
> +    self.filename = filename
> +
> +  # Names of functions
> +  #
> +  # In general, we name functions after the left-hand-side of the rule(s) that
> +  # they handle. E.g., |p_foo_bar| for a rule |foo_bar : ...|.
> +  #
> +  # There may be multiple functions handling rules for the same left-hand-side;
> +  # then we name the functions |p_foo_bar_N| (for left-hand-side |foo_bar|),
> +  # where N is a number (numbered starting from 1). Note that using multiple
> +  # functions is actually more efficient than having single functions handle
> +  # multiple rules (and, e.g., distinguishing them by examining |len(p)|).
> +  #
> +  # It's also possible to have a function handling multiple rules with different
> +  # left-hand-sides. We do not do this.
> +  #
> +  # See http://www.dabeaz.com/ply/ply.html#ply_nn25 for more details.
> +
> +  # TODO(vtl): Get rid of the braces in the module "statement". (Consider
> +  # renaming "module" -> "package".) Then we'll be able to have a single rule
> +  # for root (by making module "optional").
> +  def p_root_1(self, p):
> +    """root : """
> +    p[0] = ast.Mojom(None, ast.ImportList(), [])
> +
> +  def p_root_2(self, p):
> +    """root : root module"""
> +    if p[1].module is not None:
> +      raise ParseError(
> +          self.filename,
> +          "Multiple \"module\" statements not allowed:",
> +          p[2].lineno,
> +          snippet=self._GetSnippet(p[2].lineno))
> +    if p[1].import_list.items or p[1].definition_list:
> +      raise ParseError(
> +          self.filename,
> +          "\"module\" statements must precede imports and definitions:",
> +          p[2].lineno,
> +          snippet=self._GetSnippet(p[2].lineno))
> +    p[0] = p[1]
> +    p[0].module = p[2]
> +
> +  def p_root_3(self, p):
> +    """root : root import"""
> +    if p[1].definition_list:
> +      raise ParseError(
> +          self.filename,
> +          "\"import\" statements must precede definitions:",
> +          p[2].lineno,
> +          snippet=self._GetSnippet(p[2].lineno))
> +    p[0] = p[1]
> +    p[0].import_list.Append(p[2])
> +
> +  def p_root_4(self, p):
> +    """root : root definition"""
> +    p[0] = p[1]
> +    p[0].definition_list.append(p[2])
> +
> +  def p_import(self, p):
> +    """import : attribute_section IMPORT STRING_LITERAL SEMI"""
> +    # 'eval' the literal to strip the quotes.
> +    # TODO(vtl): This eval is dubious. We should unquote/unescape ourselves.
> +    p[0] = ast.Import(
> +        p[1], eval(p[3]), filename=self.filename, lineno=p.lineno(2))
> +
> +  def p_module(self, p):
> +    """module : attribute_section MODULE identifier_wrapped SEMI"""
> +    p[0] = ast.Module(p[3], p[1], filename=self.filename, lineno=p.lineno(2))
> +
> +  def p_definition(self, p):
> +    """definition : struct
> +                  | union
> +                  | interface
> +                  | enum
> +                  | const"""
> +    p[0] = p[1]
> +
> +  def p_attribute_section_1(self, p):
> +    """attribute_section : """
> +    p[0] = None
> +
> +  def p_attribute_section_2(self, p):
> +    """attribute_section : LBRACKET attribute_list RBRACKET"""
> +    p[0] = p[2]
> +
> +  def p_attribute_list_1(self, p):
> +    """attribute_list : """
> +    p[0] = ast.AttributeList()
> +
> +  def p_attribute_list_2(self, p):
> +    """attribute_list : nonempty_attribute_list"""
> +    p[0] = p[1]
> +
> +  def p_nonempty_attribute_list_1(self, p):
> +    """nonempty_attribute_list : attribute"""
> +    p[0] = ast.AttributeList(p[1])
> +
> +  def p_nonempty_attribute_list_2(self, p):
> +    """nonempty_attribute_list : nonempty_attribute_list COMMA attribute"""
> +    p[0] = p[1]
> +    p[0].Append(p[3])
> +
> +  def p_attribute_1(self, p):
> +    """attribute : NAME EQUALS evaled_literal
> +                 | NAME EQUALS NAME"""
> +    p[0] = ast.Attribute(p[1], p[3], filename=self.filename, lineno=p.lineno(1))
> +
> +  def p_attribute_2(self, p):
> +    """attribute : NAME"""
> +    p[0] = ast.Attribute(p[1], True, filename=self.filename, lineno=p.lineno(1))
> +
> +  def p_evaled_literal(self, p):
> +    """evaled_literal : literal"""
> +    # 'eval' the literal to strip the quotes. Handle keywords "true" and "false"
> +    # specially since they cannot directly be evaluated to python boolean
> +    # values.
> +    if p[1] == "true":
> +      p[0] = True
> +    elif p[1] == "false":
> +      p[0] = False
> +    else:
> +      p[0] = eval(p[1])
> +
> +  def p_struct_1(self, p):
> +    """struct : attribute_section STRUCT NAME LBRACE struct_body RBRACE SEMI"""
> +    p[0] = ast.Struct(p[3], p[1], p[5])
> +
> +  def p_struct_2(self, p):
> +    """struct : attribute_section STRUCT NAME SEMI"""
> +    p[0] = ast.Struct(p[3], p[1], None)
> +
> +  def p_struct_body_1(self, p):
> +    """struct_body : """
> +    p[0] = ast.StructBody()
> +
> +  def p_struct_body_2(self, p):
> +    """struct_body : struct_body const
> +                   | struct_body enum
> +                   | struct_body struct_field"""
> +    p[0] = p[1]
> +    p[0].Append(p[2])
> +
> +  def p_struct_field(self, p):
> +    """struct_field : attribute_section typename NAME ordinal default SEMI"""
> +    p[0] = ast.StructField(p[3], p[1], p[4], p[2], p[5])
> +
> +  def p_union(self, p):
> +    """union : attribute_section UNION NAME LBRACE union_body RBRACE SEMI"""
> +    p[0] = ast.Union(p[3], p[1], p[5])
> +
> +  def p_union_body_1(self, p):
> +    """union_body : """
> +    p[0] = ast.UnionBody()
> +
> +  def p_union_body_2(self, p):
> +    """union_body : union_body union_field"""
> +    p[0] = p[1]
> +    p[1].Append(p[2])
> +
> +  def p_union_field(self, p):
> +    """union_field : attribute_section typename NAME ordinal SEMI"""
> +    p[0] = ast.UnionField(p[3], p[1], p[4], p[2])
> +
> +  def p_default_1(self, p):
> +    """default : """
> +    p[0] = None
> +
> +  def p_default_2(self, p):
> +    """default : EQUALS constant"""
> +    p[0] = p[2]
> +
> +  def p_interface(self, p):
> +    """interface : attribute_section INTERFACE NAME LBRACE interface_body \
> +                       RBRACE SEMI"""
> +    p[0] = ast.Interface(p[3], p[1], p[5])
> +
> +  def p_interface_body_1(self, p):
> +    """interface_body : """
> +    p[0] = ast.InterfaceBody()
> +
> +  def p_interface_body_2(self, p):
> +    """interface_body : interface_body const
> +                      | interface_body enum
> +                      | interface_body method"""
> +    p[0] = p[1]
> +    p[0].Append(p[2])
> +
> +  def p_response_1(self, p):
> +    """response : """
> +    p[0] = None
> +
> +  def p_response_2(self, p):
> +    """response : RESPONSE LPAREN parameter_list RPAREN"""
> +    p[0] = p[3]
> +
> +  def p_method(self, p):
> +    """method : attribute_section NAME ordinal LPAREN parameter_list RPAREN \
> +                    response SEMI"""
> +    p[0] = ast.Method(p[2], p[1], p[3], p[5], p[7])
> +
> +  def p_parameter_list_1(self, p):
> +    """parameter_list : """
> +    p[0] = ast.ParameterList()
> +
> +  def p_parameter_list_2(self, p):
> +    """parameter_list : nonempty_parameter_list"""
> +    p[0] = p[1]
> +
> +  def p_nonempty_parameter_list_1(self, p):
> +    """nonempty_parameter_list : parameter"""
> +    p[0] = ast.ParameterList(p[1])
> +
> +  def p_nonempty_parameter_list_2(self, p):
> +    """nonempty_parameter_list : nonempty_parameter_list COMMA parameter"""
> +    p[0] = p[1]
> +    p[0].Append(p[3])
> +
> +  def p_parameter(self, p):
> +    """parameter : attribute_section typename NAME ordinal"""
> +    p[0] = ast.Parameter(
> +        p[3], p[1], p[4], p[2], filename=self.filename, lineno=p.lineno(3))
> +
> +  def p_typename(self, p):
> +    """typename : nonnullable_typename QSTN
> +                | nonnullable_typename"""
> +    if len(p) == 2:
> +      p[0] = p[1]
> +    else:
> +      p[0] = p[1] + "?"
> +
> +  def p_nonnullable_typename(self, p):
> +    """nonnullable_typename : basictypename
> +                            | array
> +                            | fixed_array
> +                            | associative_array
> +                            | interfacerequest"""
> +    p[0] = p[1]
> +
> +  def p_basictypename(self, p):
> +    """basictypename : remotetype
> +                     | receivertype
> +                     | associatedremotetype
> +                     | associatedreceivertype
> +                     | identifier
> +                     | ASSOCIATED identifier
> +                     | handletype"""
> +    if len(p) == 2:
> +      p[0] = p[1]
> +    else:
> +      p[0] = "asso<" + p[2] + ">"
> +
> +  def p_remotetype(self, p):
> +    """remotetype : PENDING_REMOTE LANGLE identifier RANGLE"""
> +    p[0] = "rmt<%s>" % p[3]
> +
> +  def p_receivertype(self, p):
> +    """receivertype : PENDING_RECEIVER LANGLE identifier RANGLE"""
> +    p[0] = "rcv<%s>" % p[3]
> +
> +  def p_associatedremotetype(self, p):
> +    """associatedremotetype : PENDING_ASSOCIATED_REMOTE LANGLE identifier \
> +                                  RANGLE"""
> +    p[0] = "rma<%s>" % p[3]
> +
> +  def p_associatedreceivertype(self, p):
> +    """associatedreceivertype : PENDING_ASSOCIATED_RECEIVER LANGLE identifier \
> +                                    RANGLE"""
> +    p[0] = "rca<%s>" % p[3]
> +
> +  def p_handletype(self, p):
> +    """handletype : HANDLE
> +                  | HANDLE LANGLE NAME RANGLE"""
> +    if len(p) == 2:
> +      p[0] = p[1]
> +    else:
> +      if p[3] not in ('data_pipe_consumer', 'data_pipe_producer',
> +                      'message_pipe', 'shared_buffer', 'platform'):
> +        # Note: We don't enable tracking of line numbers for everything, so we
> +        # can't use |p.lineno(3)|.
> +        raise ParseError(
> +            self.filename,
> +            "Invalid handle type %r:" % p[3],
> +            lineno=p.lineno(1),
> +            snippet=self._GetSnippet(p.lineno(1)))
> +      p[0] = "handle<" + p[3] + ">"
> +
> +  def p_array(self, p):
> +    """array : ARRAY LANGLE typename RANGLE"""
> +    p[0] = p[3] + "[]"
> +
> +  def p_fixed_array(self, p):
> +    """fixed_array : ARRAY LANGLE typename COMMA INT_CONST_DEC RANGLE"""
> +    value = int(p[5])
> +    if value == 0 or value > _MAX_ARRAY_SIZE:
> +      raise ParseError(
> +          self.filename,
> +          "Fixed array size %d invalid:" % value,
> +          lineno=p.lineno(5),
> +          snippet=self._GetSnippet(p.lineno(5)))
> +    p[0] = p[3] + "[" + p[5] + "]"
> +
> +  def p_associative_array(self, p):
> +    """associative_array : MAP LANGLE identifier COMMA typename RANGLE"""
> +    p[0] = p[5] + "{" + p[3] + "}"
> +
> +  def p_interfacerequest(self, p):
> +    """interfacerequest : identifier AMP
> +                        | ASSOCIATED identifier AMP"""
> +    if len(p) == 3:
> +      p[0] = p[1] + "&"
> +    else:
> +      p[0] = "asso<" + p[2] + "&>"
> +
> +  def p_ordinal_1(self, p):
> +    """ordinal : """
> +    p[0] = None
> +
> +  def p_ordinal_2(self, p):
> +    """ordinal : ORDINAL"""
> +    value = int(p[1][1:])
> +    if value > _MAX_ORDINAL_VALUE:
> +      raise ParseError(
> +          self.filename,
> +          "Ordinal value %d too large:" % value,
> +          lineno=p.lineno(1),
> +          snippet=self._GetSnippet(p.lineno(1)))
> +    p[0] = ast.Ordinal(value, filename=self.filename, lineno=p.lineno(1))
> +
> +  def p_enum_1(self, p):
> +    """enum : attribute_section ENUM NAME LBRACE enum_value_list \
> +                  RBRACE SEMI
> +            | attribute_section ENUM NAME LBRACE nonempty_enum_value_list \
> +                  COMMA RBRACE SEMI"""
> +    p[0] = ast.Enum(
> +        p[3], p[1], p[5], filename=self.filename, lineno=p.lineno(2))
> +
> +  def p_enum_2(self, p):
> +    """enum : attribute_section ENUM NAME SEMI"""
> +    p[0] = ast.Enum(
> +        p[3], p[1], None, filename=self.filename, lineno=p.lineno(2))
> +
> +  def p_enum_value_list_1(self, p):
> +    """enum_value_list : """
> +    p[0] = ast.EnumValueList()
> +
> +  def p_enum_value_list_2(self, p):
> +    """enum_value_list : nonempty_enum_value_list"""
> +    p[0] = p[1]
> +
> +  def p_nonempty_enum_value_list_1(self, p):
> +    """nonempty_enum_value_list : enum_value"""
> +    p[0] = ast.EnumValueList(p[1])
> +
> +  def p_nonempty_enum_value_list_2(self, p):
> +    """nonempty_enum_value_list : nonempty_enum_value_list COMMA enum_value"""
> +    p[0] = p[1]
> +    p[0].Append(p[3])
> +
> +  def p_enum_value(self, p):
> +    """enum_value : attribute_section NAME
> +                  | attribute_section NAME EQUALS int
> +                  | attribute_section NAME EQUALS identifier_wrapped"""
> +    p[0] = ast.EnumValue(
> +        p[2],
> +        p[1],
> +        p[4] if len(p) == 5 else None,
> +        filename=self.filename,
> +        lineno=p.lineno(2))
> +
> +  def p_const(self, p):
> +    """const : attribute_section CONST typename NAME EQUALS constant SEMI"""
> +    p[0] = ast.Const(p[4], p[1], p[3], p[6])
> +
> +  def p_constant(self, p):
> +    """constant : literal
> +                | identifier_wrapped"""
> +    p[0] = p[1]
> +
> +  def p_identifier_wrapped(self, p):
> +    """identifier_wrapped : identifier"""
> +    p[0] = ('IDENTIFIER', p[1])
> +
> +  # TODO(vtl): Make this produce a "wrapped" identifier (probably as an
> +  # |ast.Identifier|, to be added) and get rid of identifier_wrapped.
> +  def p_identifier(self, p):
> +    """identifier : NAME
> +                  | NAME DOT identifier"""
> +    p[0] = ''.join(p[1:])
> +
> +  def p_literal(self, p):
> +    """literal : int
> +               | float
> +               | TRUE
> +               | FALSE
> +               | DEFAULT
> +               | STRING_LITERAL"""
> +    p[0] = p[1]
> +
> +  def p_int(self, p):
> +    """int : int_const
> +           | PLUS int_const
> +           | MINUS int_const"""
> +    p[0] = ''.join(p[1:])
> +
> +  def p_int_const(self, p):
> +    """int_const : INT_CONST_DEC
> +                 | INT_CONST_HEX"""
> +    p[0] = p[1]
> +
> +  def p_float(self, p):
> +    """float : FLOAT_CONST
> +             | PLUS FLOAT_CONST
> +             | MINUS FLOAT_CONST"""
> +    p[0] = ''.join(p[1:])
> +
> +  def p_error(self, e):
> +    if e is None:
> +      # Unexpected EOF.
> +      # TODO(vtl): Can we figure out what's missing?
> +      raise ParseError(self.filename, "Unexpected end of file")
> +
> +    raise ParseError(
> +        self.filename,
> +        "Unexpected %r:" % e.value,
> +        lineno=e.lineno,
> +        snippet=self._GetSnippet(e.lineno))
> +
> +  def _GetSnippet(self, lineno):
> +    return self.source.split('\n')[lineno - 1]
> +
> +
> +def Parse(source, filename):
> +  """Parse source file to AST.
> +
> +  Args:
> +    source: The source text as a str (Python 2 or 3) or unicode (Python 2).
> +    filename: The filename that |source| originates from.
> +
> +  Returns:
> +    The AST as a mojom.parse.ast.Mojom object.
> +  """
> +  lexer = Lexer(filename)
> +  parser = Parser(lexer, source, filename)
> +
> +  lex.lex(object=lexer)
> +  yacc.yacc(module=parser, debug=0, write_tables=0)
> +
> +  tree = yacc.parse(source)
> +  return tree
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom/parse/parser_unittest.py b/utils/ipc/mojo/public/tools/mojom/mojom/parse/parser_unittest.py
> new file mode 100644
> index 00000000..6d6b7153
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom/parse/parser_unittest.py
> @@ -0,0 +1,1390 @@
> +# Copyright 2014 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +import imp
> +import os.path
> +import sys
> +import unittest
> +
> +from mojom.parse import ast
> +from mojom.parse import lexer
> +from mojom.parse import parser
> +
> +
> +class ParserTest(unittest.TestCase):
> +  """Tests |parser.Parse()|."""
> +
> +  def testTrivialValidSource(self):
> +    """Tests a trivial, but valid, .mojom source."""
> +
> +    source = """\
> +        // This is a comment.
> +
> +        module my_module;
> +        """
> +    expected = ast.Mojom(
> +        ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), [])
> +    self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
> +
> +  def testSourceWithCrLfs(self):
> +    """Tests a .mojom source with CR-LFs instead of LFs."""
> +
> +    source = "// This is a comment.\r\n\r\nmodule my_module;\r\n"
> +    expected = ast.Mojom(
> +        ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), [])
> +    self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
> +
> +  def testUnexpectedEOF(self):
> +    """Tests a "truncated" .mojom source."""
> +
> +    source = """\
> +        // This is a comment.
> +
> +        module my_module
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom: Error: Unexpected end of file$"):
> +      parser.Parse(source, "my_file.mojom")
> +
> +  def testCommentLineNumbers(self):
> +    """Tests that line numbers are correctly tracked when comments are
> +    present."""
> +
> +    source1 = """\
> +        // Isolated C++-style comments.
> +
> +        // Foo.
> +        asdf1
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError,
> +        r"^my_file\.mojom:4: Error: Unexpected 'asdf1':\n *asdf1$"):
> +      parser.Parse(source1, "my_file.mojom")
> +
> +    source2 = """\
> +        // Consecutive C++-style comments.
> +        // Foo.
> +        // Bar.
> +
> +        struct Yada {  // Baz.
> +                       // Quux.
> +          int32 x;
> +        };
> +
> +        asdf2
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError,
> +        r"^my_file\.mojom:10: Error: Unexpected 'asdf2':\n *asdf2$"):
> +      parser.Parse(source2, "my_file.mojom")
> +
> +    source3 = """\
> +        /* Single-line C-style comments. */
> +        /* Foobar. */
> +
> +        /* Baz. */
> +        asdf3
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError,
> +        r"^my_file\.mojom:5: Error: Unexpected 'asdf3':\n *asdf3$"):
> +      parser.Parse(source3, "my_file.mojom")
> +
> +    source4 = """\
> +        /* Multi-line C-style comments.
> +        */
> +        /*
> +        Foo.
> +        Bar.
> +        */
> +
> +        /* Baz
> +           Quux. */
> +        asdf4
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError,
> +        r"^my_file\.mojom:10: Error: Unexpected 'asdf4':\n *asdf4$"):
> +      parser.Parse(source4, "my_file.mojom")
> +
> +  def testSimpleStruct(self):
> +    """Tests a simple .mojom source that just defines a struct."""
> +
> +    source = """\
> +        module my_module;
> +
> +        struct MyStruct {
> +          int32 a;
> +          double b;
> +        };
> +        """
> +    expected = ast.Mojom(
> +        ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), [
> +            ast.Struct(
> +                'MyStruct', None,
> +                ast.StructBody([
> +                    ast.StructField('a', None, None, 'int32', None),
> +                    ast.StructField('b', None, None, 'double', None)
> +                ]))
> +        ])
> +    self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
> +
> +  def testSimpleStructWithoutModule(self):
> +    """Tests a simple struct without an explict module statement."""
> +
> +    source = """\
> +        struct MyStruct {
> +          int32 a;
> +          double b;
> +        };
> +        """
> +    expected = ast.Mojom(None, ast.ImportList(), [
> +        ast.Struct(
> +            'MyStruct', None,
> +            ast.StructBody([
> +                ast.StructField('a', None, None, 'int32', None),
> +                ast.StructField('b', None, None, 'double', None)
> +            ]))
> +    ])
> +    self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
> +
> +  def testValidStructDefinitions(self):
> +    """Tests all types of definitions that can occur in a struct."""
> +
> +    source = """\
> +        struct MyStruct {
> +          enum MyEnum { VALUE };
> +          const double kMyConst = 1.23;
> +          int32 a;
> +          SomeOtherStruct b;  // Invalidity detected at another stage.
> +        };
> +        """
> +    expected = ast.Mojom(None, ast.ImportList(), [
> +        ast.Struct(
> +            'MyStruct', None,
> +            ast.StructBody([
> +                ast.Enum('MyEnum', None,
> +                         ast.EnumValueList(ast.EnumValue('VALUE', None, None))),
> +                ast.Const('kMyConst', None, 'double', '1.23'),
> +                ast.StructField('a', None, None, 'int32', None),
> +                ast.StructField('b', None, None, 'SomeOtherStruct', None)
> +            ]))
> +    ])
> +    self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
> +
> +  def testInvalidStructDefinitions(self):
> +    """Tests that definitions that aren't allowed in a struct are correctly
> +    detected."""
> +
> +    source1 = """\
> +        struct MyStruct {
> +          MyMethod(int32 a);
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected '\(':\n"
> +        r" *MyMethod\(int32 a\);$"):
> +      parser.Parse(source1, "my_file.mojom")
> +
> +    source2 = """\
> +        struct MyStruct {
> +          struct MyInnerStruct {
> +            int32 a;
> +          };
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'struct':\n"
> +        r" *struct MyInnerStruct {$"):
> +      parser.Parse(source2, "my_file.mojom")
> +
> +    source3 = """\
> +        struct MyStruct {
> +          interface MyInterface {
> +            MyMethod(int32 a);
> +          };
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError,
> +        r"^my_file\.mojom:2: Error: Unexpected 'interface':\n"
> +        r" *interface MyInterface {$"):
> +      parser.Parse(source3, "my_file.mojom")
> +
> +  def testMissingModuleName(self):
> +    """Tests an (invalid) .mojom with a missing module name."""
> +
> +    source1 = """\
> +        // Missing module name.
> +        module ;
> +        struct MyStruct {
> +          int32 a;
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError,
> +        r"^my_file\.mojom:2: Error: Unexpected ';':\n *module ;$"):
> +      parser.Parse(source1, "my_file.mojom")
> +
> +    # Another similar case, but make sure that line-number tracking/reporting
> +    # is correct.
> +    source2 = """\
> +        module
> +        // This line intentionally left unblank.
> +
> +        struct MyStruct {
> +          int32 a;
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:4: Error: Unexpected 'struct':\n"
> +        r" *struct MyStruct {$"):
> +      parser.Parse(source2, "my_file.mojom")
> +
> +  def testMultipleModuleStatements(self):
> +    """Tests an (invalid) .mojom with multiple module statements."""
> +
> +    source = """\
> +        module foo;
> +        module bar;
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError,
> +        r"^my_file\.mojom:2: Error: Multiple \"module\" statements not "
> +        r"allowed:\n *module bar;$"):
> +      parser.Parse(source, "my_file.mojom")
> +
> +  def testModuleStatementAfterImport(self):
> +    """Tests an (invalid) .mojom with a module statement after an import."""
> +
> +    source = """\
> +        import "foo.mojom";
> +        module foo;
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError,
> +        r"^my_file\.mojom:2: Error: \"module\" statements must precede imports "
> +        r"and definitions:\n *module foo;$"):
> +      parser.Parse(source, "my_file.mojom")
> +
> +  def testModuleStatementAfterDefinition(self):
> +    """Tests an (invalid) .mojom with a module statement after a definition."""
> +
> +    source = """\
> +        struct MyStruct {
> +          int32 a;
> +        };
> +        module foo;
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError,
> +        r"^my_file\.mojom:4: Error: \"module\" statements must precede imports "
> +        r"and definitions:\n *module foo;$"):
> +      parser.Parse(source, "my_file.mojom")
> +
> +  def testImportStatementAfterDefinition(self):
> +    """Tests an (invalid) .mojom with an import statement after a definition."""
> +
> +    source = """\
> +        struct MyStruct {
> +          int32 a;
> +        };
> +        import "foo.mojom";
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError,
> +        r"^my_file\.mojom:4: Error: \"import\" statements must precede "
> +        r"definitions:\n *import \"foo.mojom\";$"):
> +      parser.Parse(source, "my_file.mojom")
> +
> +  def testEnums(self):
> +    """Tests that enum statements are correctly parsed."""
> +
> +    source = """\
> +        module my_module;
> +        enum MyEnum1 { VALUE1, VALUE2 };  // No trailing comma.
> +        enum MyEnum2 {
> +          VALUE1 = -1,
> +          VALUE2 = 0,
> +          VALUE3 = + 987,  // Check that space is allowed.
> +          VALUE4 = 0xAF12,
> +          VALUE5 = -0x09bcd,
> +          VALUE6 = VALUE5,
> +          VALUE7,  // Leave trailing comma.
> +        };
> +        """
> +    expected = ast.Mojom(
> +        ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), [
> +            ast.Enum(
> +                'MyEnum1', None,
> +                ast.EnumValueList([
> +                    ast.EnumValue('VALUE1', None, None),
> +                    ast.EnumValue('VALUE2', None, None)
> +                ])),
> +            ast.Enum(
> +                'MyEnum2', None,
> +                ast.EnumValueList([
> +                    ast.EnumValue('VALUE1', None, '-1'),
> +                    ast.EnumValue('VALUE2', None, '0'),
> +                    ast.EnumValue('VALUE3', None, '+987'),
> +                    ast.EnumValue('VALUE4', None, '0xAF12'),
> +                    ast.EnumValue('VALUE5', None, '-0x09bcd'),
> +                    ast.EnumValue('VALUE6', None, ('IDENTIFIER', 'VALUE5')),
> +                    ast.EnumValue('VALUE7', None, None)
> +                ]))
> +        ])
> +    self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
> +
> +  def testInvalidEnumInitializers(self):
> +    """Tests that invalid enum initializers are correctly detected."""
> +
> +    # Floating point value.
> +    source2 = "enum MyEnum { VALUE = 0.123 };"
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:1: Error: Unexpected '0\.123':\n"
> +        r"enum MyEnum { VALUE = 0\.123 };$"):
> +      parser.Parse(source2, "my_file.mojom")
> +
> +    # Boolean value.
> +    source2 = "enum MyEnum { VALUE = true };"
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:1: Error: Unexpected 'true':\n"
> +        r"enum MyEnum { VALUE = true };$"):
> +      parser.Parse(source2, "my_file.mojom")
> +
> +  def testConsts(self):
> +    """Tests some constants and struct members initialized with them."""
> +
> +    source = """\
> +        module my_module;
> +
> +        struct MyStruct {
> +          const int8 kNumber = -1;
> +          int8 number at 0 = kNumber;
> +        };
> +        """
> +    expected = ast.Mojom(
> +        ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), [
> +            ast.Struct(
> +                'MyStruct', None,
> +                ast.StructBody([
> +                    ast.Const('kNumber', None, 'int8', '-1'),
> +                    ast.StructField('number', None, ast.Ordinal(0), 'int8',
> +                                    ('IDENTIFIER', 'kNumber'))
> +                ]))
> +        ])
> +    self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
> +
> +  def testNoConditionals(self):
> +    """Tests that ?: is not allowed."""
> +
> +    source = """\
> +        module my_module;
> +
> +        enum MyEnum {
> +          MY_ENUM_1 = 1 ? 2 : 3
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:4: Error: Unexpected '\?':\n"
> +        r" *MY_ENUM_1 = 1 \? 2 : 3$"):
> +      parser.Parse(source, "my_file.mojom")
> +
> +  def testSimpleOrdinals(self):
> +    """Tests that (valid) ordinal values are scanned correctly."""
> +
> +    source = """\
> +        module my_module;
> +
> +        // This isn't actually valid .mojom, but the problem (missing ordinals)
> +        // should be handled at a different level.
> +        struct MyStruct {
> +          int32 a0 at 0;
> +          int32 a1 at 1;
> +          int32 a2 at 2;
> +          int32 a9 at 9;
> +          int32 a10 @10;
> +          int32 a11 @11;
> +          int32 a29 @29;
> +          int32 a1234567890 @1234567890;
> +        };
> +        """
> +    expected = ast.Mojom(
> +        ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), [
> +            ast.Struct(
> +                'MyStruct', None,
> +                ast.StructBody([
> +                    ast.StructField('a0', None, ast.Ordinal(0), 'int32', None),
> +                    ast.StructField('a1', None, ast.Ordinal(1), 'int32', None),
> +                    ast.StructField('a2', None, ast.Ordinal(2), 'int32', None),
> +                    ast.StructField('a9', None, ast.Ordinal(9), 'int32', None),
> +                    ast.StructField('a10', None, ast.Ordinal(10), 'int32',
> +                                    None),
> +                    ast.StructField('a11', None, ast.Ordinal(11), 'int32',
> +                                    None),
> +                    ast.StructField('a29', None, ast.Ordinal(29), 'int32',
> +                                    None),
> +                    ast.StructField('a1234567890', None,
> +                                    ast.Ordinal(1234567890), 'int32', None)
> +                ]))
> +        ])
> +    self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
> +
> +  def testInvalidOrdinals(self):
> +    """Tests that (lexically) invalid ordinals are correctly detected."""
> +
> +    source1 = """\
> +        module my_module;
> +
> +        struct MyStruct {
> +          int32 a_missing@;
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        lexer.LexError, r"^my_file\.mojom:4: Error: Missing ordinal value$"):
> +      parser.Parse(source1, "my_file.mojom")
> +
> +    source2 = """\
> +        module my_module;
> +
> +        struct MyStruct {
> +          int32 a_octal at 01;
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        lexer.LexError, r"^my_file\.mojom:4: Error: "
> +        r"Octal and hexadecimal ordinal values not allowed$"):
> +      parser.Parse(source2, "my_file.mojom")
> +
> +    source3 = """\
> +        module my_module; struct MyStruct { int32 a_invalid_octal at 08; };
> +        """
> +    with self.assertRaisesRegexp(
> +        lexer.LexError, r"^my_file\.mojom:1: Error: "
> +        r"Octal and hexadecimal ordinal values not allowed$"):
> +      parser.Parse(source3, "my_file.mojom")
> +
> +    source4 = "module my_module; struct MyStruct { int32 a_hex at 0x1aB9; };"
> +    with self.assertRaisesRegexp(
> +        lexer.LexError, r"^my_file\.mojom:1: Error: "
> +        r"Octal and hexadecimal ordinal values not allowed$"):
> +      parser.Parse(source4, "my_file.mojom")
> +
> +    source5 = "module my_module; struct MyStruct { int32 a_hex at 0X0; };"
> +    with self.assertRaisesRegexp(
> +        lexer.LexError, r"^my_file\.mojom:1: Error: "
> +        r"Octal and hexadecimal ordinal values not allowed$"):
> +      parser.Parse(source5, "my_file.mojom")
> +
> +    source6 = """\
> +        struct MyStruct {
> +          int32 a_too_big at 999999999999;
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:2: Error: "
> +        r"Ordinal value 999999999999 too large:\n"
> +        r" *int32 a_too_big at 999999999999;$"):
> +      parser.Parse(source6, "my_file.mojom")
> +
> +  def testNestedNamespace(self):
> +    """Tests that "nested" namespaces work."""
> +
> +    source = """\
> +        module my.mod;
> +
> +        struct MyStruct {
> +          int32 a;
> +        };
> +        """
> +    expected = ast.Mojom(
> +        ast.Module(('IDENTIFIER', 'my.mod'), None), ast.ImportList(), [
> +            ast.Struct(
> +                'MyStruct', None,
> +                ast.StructBody(ast.StructField('a', None, None, 'int32', None)))
> +        ])
> +    self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
> +
> +  def testValidHandleTypes(self):
> +    """Tests (valid) handle types."""
> +
> +    source = """\
> +        struct MyStruct {
> +          handle a;
> +          handle<data_pipe_consumer> b;
> +          handle <data_pipe_producer> c;
> +          handle < message_pipe > d;
> +          handle
> +            < shared_buffer
> +            > e;
> +          handle
> +            <platform
> +
> +            > f;
> +        };
> +        """
> +    expected = ast.Mojom(None, ast.ImportList(), [
> +        ast.Struct(
> +            'MyStruct', None,
> +            ast.StructBody([
> +                ast.StructField('a', None, None, 'handle', None),
> +                ast.StructField('b', None, None, 'handle<data_pipe_consumer>',
> +                                None),
> +                ast.StructField('c', None, None, 'handle<data_pipe_producer>',
> +                                None),
> +                ast.StructField('d', None, None, 'handle<message_pipe>', None),
> +                ast.StructField('e', None, None, 'handle<shared_buffer>', None),
> +                ast.StructField('f', None, None, 'handle<platform>', None)
> +            ]))
> +    ])
> +    self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
> +
> +  def testInvalidHandleType(self):
> +    """Tests an invalid (unknown) handle type."""
> +
> +    source = """\
> +        struct MyStruct {
> +          handle<wtf_is_this> foo;
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:2: Error: "
> +        r"Invalid handle type 'wtf_is_this':\n"
> +        r" *handle<wtf_is_this> foo;$"):
> +      parser.Parse(source, "my_file.mojom")
> +
> +  def testValidDefaultValues(self):
> +    """Tests default values that are valid (to the parser)."""
> +
> +    source = """\
> +        struct MyStruct {
> +          int16 a0 = 0;
> +          uint16 a1 = 0x0;
> +          uint16 a2 = 0x00;
> +          uint16 a3 = 0x01;
> +          uint16 a4 = 0xcd;
> +          int32 a5 = 12345;
> +          int64 a6 = -12345;
> +          int64 a7 = +12345;
> +          uint32 a8 = 0x12cd3;
> +          uint32 a9 = -0x12cD3;
> +          uint32 a10 = +0x12CD3;
> +          bool a11 = true;
> +          bool a12 = false;
> +          float a13 = 1.2345;
> +          float a14 = -1.2345;
> +          float a15 = +1.2345;
> +          float a16 = 123.;
> +          float a17 = .123;
> +          double a18 = 1.23E10;
> +          double a19 = 1.E-10;
> +          double a20 = .5E+10;
> +          double a21 = -1.23E10;
> +          double a22 = +.123E10;
> +        };
> +        """
> +    expected = ast.Mojom(None, ast.ImportList(), [
> +        ast.Struct(
> +            'MyStruct', None,
> +            ast.StructBody([
> +                ast.StructField('a0', None, None, 'int16', '0'),
> +                ast.StructField('a1', None, None, 'uint16', '0x0'),
> +                ast.StructField('a2', None, None, 'uint16', '0x00'),
> +                ast.StructField('a3', None, None, 'uint16', '0x01'),
> +                ast.StructField('a4', None, None, 'uint16', '0xcd'),
> +                ast.StructField('a5', None, None, 'int32', '12345'),
> +                ast.StructField('a6', None, None, 'int64', '-12345'),
> +                ast.StructField('a7', None, None, 'int64', '+12345'),
> +                ast.StructField('a8', None, None, 'uint32', '0x12cd3'),
> +                ast.StructField('a9', None, None, 'uint32', '-0x12cD3'),
> +                ast.StructField('a10', None, None, 'uint32', '+0x12CD3'),
> +                ast.StructField('a11', None, None, 'bool', 'true'),
> +                ast.StructField('a12', None, None, 'bool', 'false'),
> +                ast.StructField('a13', None, None, 'float', '1.2345'),
> +                ast.StructField('a14', None, None, 'float', '-1.2345'),
> +                ast.StructField('a15', None, None, 'float', '+1.2345'),
> +                ast.StructField('a16', None, None, 'float', '123.'),
> +                ast.StructField('a17', None, None, 'float', '.123'),
> +                ast.StructField('a18', None, None, 'double', '1.23E10'),
> +                ast.StructField('a19', None, None, 'double', '1.E-10'),
> +                ast.StructField('a20', None, None, 'double', '.5E+10'),
> +                ast.StructField('a21', None, None, 'double', '-1.23E10'),
> +                ast.StructField('a22', None, None, 'double', '+.123E10')
> +            ]))
> +    ])
> +    self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
> +
> +  def testValidFixedSizeArray(self):
> +    """Tests parsing a fixed size array."""
> +
> +    source = """\
> +        struct MyStruct {
> +          array<int32> normal_array;
> +          array<int32, 1> fixed_size_array_one_entry;
> +          array<int32, 10> fixed_size_array_ten_entries;
> +          array<array<array<int32, 1>>, 2> nested_arrays;
> +        };
> +        """
> +    expected = ast.Mojom(None, ast.ImportList(), [
> +        ast.Struct(
> +            'MyStruct', None,
> +            ast.StructBody([
> +                ast.StructField('normal_array', None, None, 'int32[]', None),
> +                ast.StructField('fixed_size_array_one_entry', None, None,
> +                                'int32[1]', None),
> +                ast.StructField('fixed_size_array_ten_entries', None, None,
> +                                'int32[10]', None),
> +                ast.StructField('nested_arrays', None, None, 'int32[1][][2]',
> +                                None)
> +            ]))
> +    ])
> +    self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
> +
> +  def testValidNestedArray(self):
> +    """Tests parsing a nested array."""
> +
> +    source = "struct MyStruct { array<array<int32>> nested_array; };"
> +    expected = ast.Mojom(None, ast.ImportList(), [
> +        ast.Struct(
> +            'MyStruct', None,
> +            ast.StructBody(
> +                ast.StructField('nested_array', None, None, 'int32[][]', None)))
> +    ])
> +    self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
> +
> +  def testInvalidFixedArraySize(self):
> +    """Tests that invalid fixed array bounds are correctly detected."""
> +
> +    source1 = """\
> +        struct MyStruct {
> +          array<int32, 0> zero_size_array;
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError,
> +        r"^my_file\.mojom:2: Error: Fixed array size 0 invalid:\n"
> +        r" *array<int32, 0> zero_size_array;$"):
> +      parser.Parse(source1, "my_file.mojom")
> +
> +    source2 = """\
> +        struct MyStruct {
> +          array<int32, 999999999999> too_big_array;
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError,
> +        r"^my_file\.mojom:2: Error: Fixed array size 999999999999 invalid:\n"
> +        r" *array<int32, 999999999999> too_big_array;$"):
> +      parser.Parse(source2, "my_file.mojom")
> +
> +    source3 = """\
> +        struct MyStruct {
> +          array<int32, abcdefg> not_a_number;
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'abcdefg':\n"
> +        r" *array<int32, abcdefg> not_a_number;"):
> +      parser.Parse(source3, "my_file.mojom")
> +
> +  def testValidAssociativeArrays(self):
> +    """Tests that we can parse valid associative array structures."""
> +
> +    source1 = "struct MyStruct { map<string, uint8> data; };"
> +    expected1 = ast.Mojom(None, ast.ImportList(), [
> +        ast.Struct(
> +            'MyStruct', None,
> +            ast.StructBody(
> +                [ast.StructField('data', None, None, 'uint8{string}', None)]))
> +    ])
> +    self.assertEquals(parser.Parse(source1, "my_file.mojom"), expected1)
> +
> +    source2 = "interface MyInterface { MyMethod(map<string, uint8> a); };"
> +    expected2 = ast.Mojom(None, ast.ImportList(), [
> +        ast.Interface(
> +            'MyInterface', None,
> +            ast.InterfaceBody(
> +                ast.Method(
> +                    'MyMethod', None, None,
> +                    ast.ParameterList(
> +                        ast.Parameter('a', None, None, 'uint8{string}')),
> +                    None)))
> +    ])
> +    self.assertEquals(parser.Parse(source2, "my_file.mojom"), expected2)
> +
> +    source3 = "struct MyStruct { map<string, array<uint8>> data; };"
> +    expected3 = ast.Mojom(None, ast.ImportList(), [
> +        ast.Struct(
> +            'MyStruct', None,
> +            ast.StructBody(
> +                [ast.StructField('data', None, None, 'uint8[]{string}', None)]))
> +    ])
> +    self.assertEquals(parser.Parse(source3, "my_file.mojom"), expected3)
> +
> +  def testValidMethod(self):
> +    """Tests parsing method declarations."""
> +
> +    source1 = "interface MyInterface { MyMethod(int32 a); };"
> +    expected1 = ast.Mojom(None, ast.ImportList(), [
> +        ast.Interface(
> +            'MyInterface', None,
> +            ast.InterfaceBody(
> +                ast.Method(
> +                    'MyMethod', None, None,
> +                    ast.ParameterList(ast.Parameter('a', None, None, 'int32')),
> +                    None)))
> +    ])
> +    self.assertEquals(parser.Parse(source1, "my_file.mojom"), expected1)
> +
> +    source2 = """\
> +        interface MyInterface {
> +          MyMethod1 at 0(int32 a at 0, int64 b at 1);
> +          MyMethod2 at 1() => ();
> +        };
> +        """
> +    expected2 = ast.Mojom(None, ast.ImportList(), [
> +        ast.Interface(
> +            'MyInterface', None,
> +            ast.InterfaceBody([
> +                ast.Method(
> +                    'MyMethod1', None, ast.Ordinal(0),
> +                    ast.ParameterList([
> +                        ast.Parameter('a', None, ast.Ordinal(0), 'int32'),
> +                        ast.Parameter('b', None, ast.Ordinal(1), 'int64')
> +                    ]), None),
> +                ast.Method('MyMethod2', None, ast.Ordinal(1),
> +                           ast.ParameterList(), ast.ParameterList())
> +            ]))
> +    ])
> +    self.assertEquals(parser.Parse(source2, "my_file.mojom"), expected2)
> +
> +    source3 = """\
> +        interface MyInterface {
> +          MyMethod(string a) => (int32 a, bool b);
> +        };
> +        """
> +    expected3 = ast.Mojom(None, ast.ImportList(), [
> +        ast.Interface(
> +            'MyInterface', None,
> +            ast.InterfaceBody(
> +                ast.Method(
> +                    'MyMethod', None, None,
> +                    ast.ParameterList(ast.Parameter('a', None, None, 'string')),
> +                    ast.ParameterList([
> +                        ast.Parameter('a', None, None, 'int32'),
> +                        ast.Parameter('b', None, None, 'bool')
> +                    ]))))
> +    ])
> +    self.assertEquals(parser.Parse(source3, "my_file.mojom"), expected3)
> +
> +  def testInvalidMethods(self):
> +    """Tests that invalid method declarations are correctly detected."""
> +
> +    # No trailing commas.
> +    source1 = """\
> +        interface MyInterface {
> +          MyMethod(string a,);
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected '\)':\n"
> +        r" *MyMethod\(string a,\);$"):
> +      parser.Parse(source1, "my_file.mojom")
> +
> +    # No leading commas.
> +    source2 = """\
> +        interface MyInterface {
> +          MyMethod(, string a);
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected ',':\n"
> +        r" *MyMethod\(, string a\);$"):
> +      parser.Parse(source2, "my_file.mojom")
> +
> +  def testValidInterfaceDefinitions(self):
> +    """Tests all types of definitions that can occur in an interface."""
> +
> +    source = """\
> +        interface MyInterface {
> +          enum MyEnum { VALUE };
> +          const int32 kMyConst = 123;
> +          MyMethod(int32 x) => (MyEnum y);
> +        };
> +        """
> +    expected = ast.Mojom(None, ast.ImportList(), [
> +        ast.Interface(
> +            'MyInterface', None,
> +            ast.InterfaceBody([
> +                ast.Enum('MyEnum', None,
> +                         ast.EnumValueList(ast.EnumValue('VALUE', None, None))),
> +                ast.Const('kMyConst', None, 'int32', '123'),
> +                ast.Method(
> +                    'MyMethod', None, None,
> +                    ast.ParameterList(ast.Parameter('x', None, None, 'int32')),
> +                    ast.ParameterList(ast.Parameter('y', None, None, 'MyEnum')))
> +            ]))
> +    ])
> +    self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
> +
> +  def testInvalidInterfaceDefinitions(self):
> +    """Tests that definitions that aren't allowed in an interface are correctly
> +    detected."""
> +
> +    source1 = """\
> +        interface MyInterface {
> +          struct MyStruct {
> +            int32 a;
> +          };
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'struct':\n"
> +        r" *struct MyStruct {$"):
> +      parser.Parse(source1, "my_file.mojom")
> +
> +    source2 = """\
> +        interface MyInterface {
> +          interface MyInnerInterface {
> +            MyMethod(int32 x);
> +          };
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError,
> +        r"^my_file\.mojom:2: Error: Unexpected 'interface':\n"
> +        r" *interface MyInnerInterface {$"):
> +      parser.Parse(source2, "my_file.mojom")
> +
> +    source3 = """\
> +        interface MyInterface {
> +          int32 my_field;
> +        };
> +        """
> +    # The parser thinks that "int32" is a plausible name for a method, so it's
> +    # "my_field" that gives it away.
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'my_field':\n"
> +        r" *int32 my_field;$"):
> +      parser.Parse(source3, "my_file.mojom")
> +
> +  def testValidAttributes(self):
> +    """Tests parsing attributes (and attribute lists)."""
> +
> +    # Note: We use structs because they have (optional) attribute lists.
> +
> +    # Empty attribute list.
> +    source1 = "[] struct MyStruct {};"
> +    expected1 = ast.Mojom(
> +        None, ast.ImportList(),
> +        [ast.Struct('MyStruct', ast.AttributeList(), ast.StructBody())])
> +    self.assertEquals(parser.Parse(source1, "my_file.mojom"), expected1)
> +
> +    # One-element attribute list, with name value.
> +    source2 = "[MyAttribute=MyName] struct MyStruct {};"
> +    expected2 = ast.Mojom(None, ast.ImportList(), [
> +        ast.Struct('MyStruct',
> +                   ast.AttributeList(ast.Attribute("MyAttribute", "MyName")),
> +                   ast.StructBody())
> +    ])
> +    self.assertEquals(parser.Parse(source2, "my_file.mojom"), expected2)
> +
> +    # Two-element attribute list, with one string value and one integer value.
> +    source3 = "[MyAttribute1 = \"hello\", MyAttribute2 = 5] struct MyStruct {};"
> +    expected3 = ast.Mojom(None, ast.ImportList(), [
> +        ast.Struct(
> +            'MyStruct',
> +            ast.AttributeList([
> +                ast.Attribute("MyAttribute1", "hello"),
> +                ast.Attribute("MyAttribute2", 5)
> +            ]), ast.StructBody())
> +    ])
> +    self.assertEquals(parser.Parse(source3, "my_file.mojom"), expected3)
> +
> +    # Various places that attribute list is allowed.
> +    source4 = """\
> +        [Attr0=0] module my_module;
> +
> +        [Attr1=1] import "my_import";
> +
> +        [Attr2=2] struct MyStruct {
> +          [Attr3=3] int32 a;
> +        };
> +        [Attr4=4] union MyUnion {
> +          [Attr5=5] int32 a;
> +        };
> +        [Attr6=6] enum MyEnum {
> +          [Attr7=7] a
> +        };
> +        [Attr8=8] interface MyInterface {
> +          [Attr9=9] MyMethod([Attr10=10] int32 a) => ([Attr11=11] bool b);
> +        };
> +        [Attr12=12] const double kMyConst = 1.23;
> +        """
> +    expected4 = ast.Mojom(
> +        ast.Module(('IDENTIFIER', 'my_module'),
> +                   ast.AttributeList([ast.Attribute("Attr0", 0)])),
> +        ast.ImportList(
> +            ast.Import(
> +                ast.AttributeList([ast.Attribute("Attr1", 1)]), "my_import")),
> +        [
> +            ast.Struct(
> +                'MyStruct', ast.AttributeList(ast.Attribute("Attr2", 2)),
> +                ast.StructBody(
> +                    ast.StructField(
> +                        'a', ast.AttributeList([ast.Attribute("Attr3", 3)]),
> +                        None, 'int32', None))),
> +            ast.Union(
> +                'MyUnion', ast.AttributeList(ast.Attribute("Attr4", 4)),
> +                ast.UnionBody(
> +                    ast.UnionField(
> +                        'a', ast.AttributeList([ast.Attribute("Attr5", 5)]),
> +                        None, 'int32'))),
> +            ast.Enum(
> +                'MyEnum', ast.AttributeList(ast.Attribute("Attr6", 6)),
> +                ast.EnumValueList(
> +                    ast.EnumValue(
> +                        'VALUE', ast.AttributeList([ast.Attribute("Attr7", 7)]),
> +                        None))),
> +            ast.Interface(
> +                'MyInterface', ast.AttributeList(ast.Attribute("Attr8", 8)),
> +                ast.InterfaceBody(
> +                    ast.Method(
> +                        'MyMethod', ast.AttributeList(
> +                            ast.Attribute("Attr9", 9)), None,
> +                        ast.ParameterList(
> +                            ast.Parameter(
> +                                'a',
> +                                ast.AttributeList([ast.Attribute("Attr10", 10)
> +                                                   ]), None, 'int32')),
> +                        ast.ParameterList(
> +                            ast.Parameter(
> +                                'b',
> +                                ast.AttributeList([ast.Attribute("Attr11", 11)
> +                                                   ]), None, 'bool'))))),
> +            ast.Const('kMyConst', ast.AttributeList(
> +                ast.Attribute("Attr12", 12)), 'double', '1.23')
> +        ])
> +    self.assertEquals(parser.Parse(source4, "my_file.mojom"), expected4)
> +
> +    # TODO(vtl): Boolean attributes don't work yet. (In fact, we just |eval()|
> +    # literal (non-name) values, which is extremely dubious.)
> +
> +  def testInvalidAttributes(self):
> +    """Tests that invalid attributes and attribute lists are correctly
> +    detected."""
> +
> +    # Trailing commas not allowed.
> +    source1 = "[MyAttribute=MyName,] struct MyStruct {};"
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:1: Error: Unexpected '\]':\n"
> +        r"\[MyAttribute=MyName,\] struct MyStruct {};$"):
> +      parser.Parse(source1, "my_file.mojom")
> +
> +    # Missing value.
> +    source2 = "[MyAttribute=] struct MyStruct {};"
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:1: Error: Unexpected '\]':\n"
> +        r"\[MyAttribute=\] struct MyStruct {};$"):
> +      parser.Parse(source2, "my_file.mojom")
> +
> +    # Missing key.
> +    source3 = "[=MyName] struct MyStruct {};"
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:1: Error: Unexpected '=':\n"
> +        r"\[=MyName\] struct MyStruct {};$"):
> +      parser.Parse(source3, "my_file.mojom")
> +
> +  def testValidImports(self):
> +    """Tests parsing import statements."""
> +
> +    # One import (no module statement).
> +    source1 = "import \"somedir/my.mojom\";"
> +    expected1 = ast.Mojom(None,
> +                          ast.ImportList(ast.Import(None, "somedir/my.mojom")),
> +                          [])
> +    self.assertEquals(parser.Parse(source1, "my_file.mojom"), expected1)
> +
> +    # Two imports (no module statement).
> +    source2 = """\
> +        import "somedir/my1.mojom";
> +        import "somedir/my2.mojom";
> +        """
> +    expected2 = ast.Mojom(
> +        None,
> +        ast.ImportList([
> +            ast.Import(None, "somedir/my1.mojom"),
> +            ast.Import(None, "somedir/my2.mojom")
> +        ]), [])
> +    self.assertEquals(parser.Parse(source2, "my_file.mojom"), expected2)
> +
> +    # Imports with module statement.
> +    source3 = """\
> +        module my_module;
> +        import "somedir/my1.mojom";
> +        import "somedir/my2.mojom";
> +        """
> +    expected3 = ast.Mojom(
> +        ast.Module(('IDENTIFIER', 'my_module'), None),
> +        ast.ImportList([
> +            ast.Import(None, "somedir/my1.mojom"),
> +            ast.Import(None, "somedir/my2.mojom")
> +        ]), [])
> +    self.assertEquals(parser.Parse(source3, "my_file.mojom"), expected3)
> +
> +  def testInvalidImports(self):
> +    """Tests that invalid import statements are correctly detected."""
> +
> +    source1 = """\
> +        // Make the error occur on line 2.
> +        import invalid
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'invalid':\n"
> +        r" *import invalid$"):
> +      parser.Parse(source1, "my_file.mojom")
> +
> +    source2 = """\
> +        import  // Missing string.
> +        struct MyStruct {
> +          int32 a;
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'struct':\n"
> +        r" *struct MyStruct {$"):
> +      parser.Parse(source2, "my_file.mojom")
> +
> +    source3 = """\
> +        import "foo.mojom"  // Missing semicolon.
> +        struct MyStruct {
> +          int32 a;
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'struct':\n"
> +        r" *struct MyStruct {$"):
> +      parser.Parse(source3, "my_file.mojom")
> +
> +  def testValidNullableTypes(self):
> +    """Tests parsing nullable types."""
> +
> +    source = """\
> +        struct MyStruct {
> +          int32? a;  // This is actually invalid, but handled at a different
> +                     // level.
> +          string? b;
> +          array<int32> ? c;
> +          array<string ? > ? d;
> +          array<array<int32>?>? e;
> +          array<int32, 1>? f;
> +          array<string?, 1>? g;
> +          some_struct? h;
> +          handle? i;
> +          handle<data_pipe_consumer>? j;
> +          handle<data_pipe_producer>? k;
> +          handle<message_pipe>? l;
> +          handle<shared_buffer>? m;
> +          some_interface&? n;
> +          handle<platform>? o;
> +        };
> +        """
> +    expected = ast.Mojom(None, ast.ImportList(), [
> +        ast.Struct(
> +            'MyStruct', None,
> +            ast.StructBody([
> +                ast.StructField('a', None, None, 'int32?', None),
> +                ast.StructField('b', None, None, 'string?', None),
> +                ast.StructField('c', None, None, 'int32[]?', None),
> +                ast.StructField('d', None, None, 'string?[]?', None),
> +                ast.StructField('e', None, None, 'int32[]?[]?', None),
> +                ast.StructField('f', None, None, 'int32[1]?', None),
> +                ast.StructField('g', None, None, 'string?[1]?', None),
> +                ast.StructField('h', None, None, 'some_struct?', None),
> +                ast.StructField('i', None, None, 'handle?', None),
> +                ast.StructField('j', None, None, 'handle<data_pipe_consumer>?',
> +                                None),
> +                ast.StructField('k', None, None, 'handle<data_pipe_producer>?',
> +                                None),
> +                ast.StructField('l', None, None, 'handle<message_pipe>?', None),
> +                ast.StructField('m', None, None, 'handle<shared_buffer>?',
> +                                None),
> +                ast.StructField('n', None, None, 'some_interface&?', None),
> +                ast.StructField('o', None, None, 'handle<platform>?', None)
> +            ]))
> +    ])
> +    self.assertEquals(parser.Parse(source, "my_file.mojom"), expected)
> +
> +  def testInvalidNullableTypes(self):
> +    """Tests that invalid nullable types are correctly detected."""
> +    source1 = """\
> +        struct MyStruct {
> +          string?? a;
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected '\?':\n"
> +        r" *string\?\? a;$"):
> +      parser.Parse(source1, "my_file.mojom")
> +
> +    source2 = """\
> +        struct MyStruct {
> +          handle?<data_pipe_consumer> a;
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected '<':\n"
> +        r" *handle\?<data_pipe_consumer> a;$"):
> +      parser.Parse(source2, "my_file.mojom")
> +
> +    source3 = """\
> +        struct MyStruct {
> +          some_interface?& a;
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected '&':\n"
> +        r" *some_interface\?& a;$"):
> +      parser.Parse(source3, "my_file.mojom")
> +
> +  def testSimpleUnion(self):
> +    """Tests a simple .mojom source that just defines a union."""
> +    source = """\
> +        module my_module;
> +
> +        union MyUnion {
> +          int32 a;
> +          double b;
> +        };
> +        """
> +    expected = ast.Mojom(
> +        ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), [
> +            ast.Union(
> +                'MyUnion', None,
> +                ast.UnionBody([
> +                    ast.UnionField('a', None, None, 'int32'),
> +                    ast.UnionField('b', None, None, 'double')
> +                ]))
> +        ])
> +    actual = parser.Parse(source, "my_file.mojom")
> +    self.assertEquals(actual, expected)
> +
> +  def testUnionWithOrdinals(self):
> +    """Test that ordinals are assigned to fields."""
> +    source = """\
> +        module my_module;
> +
> +        union MyUnion {
> +          int32 a @10;
> +          double b @30;
> +        };
> +        """
> +    expected = ast.Mojom(
> +        ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), [
> +            ast.Union(
> +                'MyUnion', None,
> +                ast.UnionBody([
> +                    ast.UnionField('a', None, ast.Ordinal(10), 'int32'),
> +                    ast.UnionField('b', None, ast.Ordinal(30), 'double')
> +                ]))
> +        ])
> +    actual = parser.Parse(source, "my_file.mojom")
> +    self.assertEquals(actual, expected)
> +
> +  def testUnionWithStructMembers(self):
> +    """Test that struct members are accepted."""
> +    source = """\
> +        module my_module;
> +
> +        union MyUnion {
> +          SomeStruct s;
> +        };
> +        """
> +    expected = ast.Mojom(
> +        ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), [
> +            ast.Union(
> +                'MyUnion', None,
> +                ast.UnionBody([ast.UnionField('s', None, None, 'SomeStruct')]))
> +        ])
> +    actual = parser.Parse(source, "my_file.mojom")
> +    self.assertEquals(actual, expected)
> +
> +  def testUnionWithArrayMember(self):
> +    """Test that array members are accepted."""
> +    source = """\
> +        module my_module;
> +
> +        union MyUnion {
> +          array<int32> a;
> +        };
> +        """
> +    expected = ast.Mojom(
> +        ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), [
> +            ast.Union(
> +                'MyUnion', None,
> +                ast.UnionBody([ast.UnionField('a', None, None, 'int32[]')]))
> +        ])
> +    actual = parser.Parse(source, "my_file.mojom")
> +    self.assertEquals(actual, expected)
> +
> +  def testUnionWithMapMember(self):
> +    """Test that map members are accepted."""
> +    source = """\
> +        module my_module;
> +
> +        union MyUnion {
> +          map<int32, string> m;
> +        };
> +        """
> +    expected = ast.Mojom(
> +        ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), [
> +            ast.Union(
> +                'MyUnion', None,
> +                ast.UnionBody(
> +                    [ast.UnionField('m', None, None, 'string{int32}')]))
> +        ])
> +    actual = parser.Parse(source, "my_file.mojom")
> +    self.assertEquals(actual, expected)
> +
> +  def testUnionDisallowNestedStruct(self):
> +    """Tests that structs cannot be nested in unions."""
> +    source = """\
> +        module my_module;
> +
> +        union MyUnion {
> +          struct MyStruct {
> +            int32 a;
> +          };
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:4: Error: Unexpected 'struct':\n"
> +        r" *struct MyStruct {$"):
> +      parser.Parse(source, "my_file.mojom")
> +
> +  def testUnionDisallowNestedInterfaces(self):
> +    """Tests that interfaces cannot be nested in unions."""
> +    source = """\
> +        module my_module;
> +
> +        union MyUnion {
> +          interface MyInterface {
> +            MyMethod(int32 a);
> +          };
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError,
> +        r"^my_file\.mojom:4: Error: Unexpected 'interface':\n"
> +        r" *interface MyInterface {$"):
> +      parser.Parse(source, "my_file.mojom")
> +
> +  def testUnionDisallowNestedUnion(self):
> +    """Tests that unions cannot be nested in unions."""
> +    source = """\
> +        module my_module;
> +
> +        union MyUnion {
> +          union MyOtherUnion {
> +            int32 a;
> +          };
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:4: Error: Unexpected 'union':\n"
> +        r" *union MyOtherUnion {$"):
> +      parser.Parse(source, "my_file.mojom")
> +
> +  def testUnionDisallowNestedEnum(self):
> +    """Tests that enums cannot be nested in unions."""
> +    source = """\
> +        module my_module;
> +
> +        union MyUnion {
> +          enum MyEnum {
> +            A,
> +          };
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:4: Error: Unexpected 'enum':\n"
> +        r" *enum MyEnum {$"):
> +      parser.Parse(source, "my_file.mojom")
> +
> +  def testValidAssociatedKinds(self):
> +    """Tests parsing associated interfaces and requests."""
> +    source1 = """\
> +        struct MyStruct {
> +          associated MyInterface a;
> +          associated MyInterface& b;
> +          associated MyInterface? c;
> +          associated MyInterface&? d;
> +        };
> +        """
> +    expected1 = ast.Mojom(None, ast.ImportList(), [
> +        ast.Struct(
> +            'MyStruct', None,
> +            ast.StructBody([
> +                ast.StructField('a', None, None, 'asso<MyInterface>', None),
> +                ast.StructField('b', None, None, 'asso<MyInterface&>', None),
> +                ast.StructField('c', None, None, 'asso<MyInterface>?', None),
> +                ast.StructField('d', None, None, 'asso<MyInterface&>?', None)
> +            ]))
> +    ])
> +    self.assertEquals(parser.Parse(source1, "my_file.mojom"), expected1)
> +
> +    source2 = """\
> +        interface MyInterface {
> +          MyMethod(associated A a) =>(associated B& b);
> +        };"""
> +    expected2 = ast.Mojom(None, ast.ImportList(), [
> +        ast.Interface(
> +            'MyInterface', None,
> +            ast.InterfaceBody(
> +                ast.Method(
> +                    'MyMethod', None, None,
> +                    ast.ParameterList(
> +                        ast.Parameter('a', None, None, 'asso<A>')),
> +                    ast.ParameterList(
> +                        ast.Parameter('b', None, None, 'asso<B&>')))))
> +    ])
> +    self.assertEquals(parser.Parse(source2, "my_file.mojom"), expected2)
> +
> +  def testInvalidAssociatedKinds(self):
> +    """Tests that invalid associated interfaces and requests are correctly
> +    detected."""
> +    source1 = """\
> +        struct MyStruct {
> +          associated associated SomeInterface a;
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError,
> +        r"^my_file\.mojom:2: Error: Unexpected 'associated':\n"
> +        r" *associated associated SomeInterface a;$"):
> +      parser.Parse(source1, "my_file.mojom")
> +
> +    source2 = """\
> +        struct MyStruct {
> +          associated handle a;
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'handle':\n"
> +        r" *associated handle a;$"):
> +      parser.Parse(source2, "my_file.mojom")
> +
> +    source3 = """\
> +        struct MyStruct {
> +          associated? MyInterface& a;
> +        };
> +        """
> +    with self.assertRaisesRegexp(
> +        parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected '\?':\n"
> +        r" *associated\? MyInterface& a;$"):
> +      parser.Parse(source3, "my_file.mojom")
> +
> +
> +if __name__ == "__main__":
> +  unittest.main()
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom_parser.py b/utils/ipc/mojo/public/tools/mojom/mojom_parser.py
> new file mode 100755
> index 00000000..12adbfb9
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom_parser.py
> @@ -0,0 +1,361 @@
> +#!/usr/bin/env python
> +# Copyright 2020 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +"""Parses mojom IDL files.
> +
> +This script parses one or more input mojom files and produces corresponding
> +module files fully describing the definitions contained within each mojom. The
> +module data is pickled and can be easily consumed by other tools to, e.g.,
> +generate usable language bindings.
> +"""
> +
> +import argparse
> +import codecs
> +import errno
> +import json
> +import os
> +import os.path
> +import sys
> +from collections import defaultdict
> +
> +from mojom.generate import module
> +from mojom.generate import translate
> +from mojom.parse import parser
> +from mojom.parse import conditional_features
> +
> +
> +def _ResolveRelativeImportPath(path, roots):
> +  """Attempts to resolve a relative import path against a set of possible roots.
> +
> +  Args:
> +    path: The relative import path to resolve.
> +    roots: A list of absolute paths which will be checked in descending length
> +        order for a match against path.
> +
> +  Returns:
> +    A normalized absolute path combining one of the roots with the input path if
> +    and only if such a file exists.
> +
> +  Raises:
> +    ValueError: The path could not be resolved against any of the given roots.
> +  """
> +  for root in reversed(sorted(roots, key=len)):
> +    abs_path = os.path.join(root, path)
> +    if os.path.isfile(abs_path):
> +      return os.path.normcase(os.path.normpath(abs_path))
> +
> +  raise ValueError('"%s" does not exist in any of %s' % (path, roots))
> +
> +
> +def _RebaseAbsolutePath(path, roots):
> +  """Rewrites an absolute file path as relative to an absolute directory path in
> +  roots.
> +
> +  Args:
> +    path: The absolute path of an existing file.
> +    roots: A list of absolute directory paths. The given path argument must fall
> +        within one of these directories.
> +
> +  Returns:
> +    A path equivalent to the input path, but relative to one of the provided
> +    roots. If the input path falls within multiple roots, the longest root is
> +    chosen (and thus the shortest relative path is returned).
> +
> +    Paths returned by this method always use forward slashes as a separator to
> +    mirror mojom import syntax.
> +
> +  Raises:
> +    ValueError if the given path does not fall within any of the listed roots.
> +  """
> +  assert os.path.isabs(path)
> +  assert os.path.isfile(path)
> +  assert all(map(os.path.isabs, roots))
> +
> +  sorted_roots = list(reversed(sorted(roots, key=len)))
> +
> +  def try_rebase_path(path, root):
> +    head, rebased_path = os.path.split(path)
> +    while head != root:
> +      head, tail = os.path.split(head)
> +      if not tail:
> +        return None
> +      rebased_path = os.path.join(tail, rebased_path)
> +    return rebased_path
> +
> +  for root in sorted_roots:
> +    relative_path = try_rebase_path(path, root)
> +    if relative_path:
> +      # TODO(crbug.com/953884): Use pathlib for this kind of thing once we're
> +      # fully migrated to Python 3.
> +      return relative_path.replace('\\', '/')
> +
> +  raise ValueError('%s does not fall within any of %s' % (path, sorted_roots))
> +
> +
> +def _GetModuleFilename(mojom_filename):
> +  return mojom_filename + '-module'
> +
> +
> +def _EnsureInputLoaded(mojom_abspath, module_path, abs_paths, asts,
> +                       dependencies, loaded_modules):
> +  """Recursively ensures that a module and its dependencies are loaded.
> +
> +  Args:
> +    mojom_abspath: An absolute file path pointing to a mojom file to load.
> +    module_path: The relative path used to identify mojom_abspath.
> +    abs_paths: A mapping from module paths to absolute file paths for all
> +        inputs given to this execution of the script.
> +    asts: A map from each input mojom's absolute path to its parsed AST.
> +    dependencies: A mapping of which input mojoms depend on each other, indexed
> +        by absolute file path.
> +    loaded_modules: A mapping of all modules loaded so far, including non-input
> +        modules that were pulled in as transitive dependencies of the inputs.
> +    import_set: The working set of mojom imports processed so far in this
> +        call stack. Used to detect circular dependencies.
> +    import_stack: An ordered list of imports processed so far in this call
> +        stack. Used to report circular dependencies.
> +
> +  Returns:
> +    None
> +
> +    On return, loaded_modules will be populated with the loaded input mojom's
> +    Module as well as the Modules of all of its transitive dependencies."""
> +
> +  if mojom_abspath in loaded_modules:
> +    # Already done.
> +    return
> +
> +  for dep_abspath, dep_path in dependencies[mojom_abspath]:
> +    if dep_abspath not in loaded_modules:
> +      _EnsureInputLoaded(dep_abspath, dep_path, abs_paths, asts, dependencies,
> +                         loaded_modules)
> +
> +  imports = {}
> +  for imp in asts[mojom_abspath].import_list:
> +    path = imp.import_filename
> +    imports[path] = loaded_modules[abs_paths[path]]
> +  loaded_modules[mojom_abspath] = translate.OrderedModule(
> +      asts[mojom_abspath], module_path, imports)
> +
> +
> +def _CollectAllowedImportsFromBuildMetadata(build_metadata_filename):
> +  allowed_imports = set()
> +  processed_deps = set()
> +
> +  def collect(metadata_filename):
> +    processed_deps.add(metadata_filename)
> +    with open(metadata_filename) as f:
> +      metadata = json.load(f)
> +      allowed_imports.update(
> +          map(os.path.normcase, map(os.path.normpath, metadata['sources'])))
> +      for dep_metadata in metadata['deps']:
> +        if dep_metadata not in processed_deps:
> +          collect(dep_metadata)
> +
> +  collect(build_metadata_filename)
> +  return allowed_imports
> +
> +
> +def _ParseMojoms(mojom_files,
> +                 input_root_paths,
> +                 output_root_path,
> +                 enabled_features,
> +                 allowed_imports=None):
> +  """Parses a set of mojom files and produces serialized module outputs.
> +
> +  Args:
> +    mojom_files: A list of mojom files to process. Paths must be absolute paths
> +        which fall within one of the input or output root paths.
> +    input_root_paths: A list of absolute filesystem paths which may be used to
> +        resolve relative mojom file paths.
> +    output_root_path: An absolute filesystem path which will service as the root
> +        for all emitted artifacts. Artifacts produced from a given mojom file
> +        are based on the mojom's relative path, rebased onto this path.
> +        Additionally, the script expects this root to contain already-generated
> +        modules for any transitive dependencies not listed in mojom_files.
> +    enabled_features: A list of enabled feature names, controlling which AST
> +        nodes are filtered by [EnableIf] attributes.
> +
> +  Returns:
> +    None.
> +
> +    Upon completion, a mojom-module file will be saved for each input mojom.
> +  """
> +  assert input_root_paths
> +  assert output_root_path
> +
> +  loaded_mojom_asts = {}
> +  loaded_modules = {}
> +  input_dependencies = defaultdict(set)
> +  mojom_files_to_parse = dict((os.path.normcase(abs_path),
> +                               _RebaseAbsolutePath(abs_path, input_root_paths))
> +                              for abs_path in mojom_files)
> +  abs_paths = dict(
> +      (path, abs_path) for abs_path, path in mojom_files_to_parse.items())
> +  for mojom_abspath, _ in mojom_files_to_parse.items():
> +    with codecs.open(mojom_abspath, encoding='utf-8') as f:
> +      ast = parser.Parse(''.join(f.readlines()), mojom_abspath)
> +      conditional_features.RemoveDisabledDefinitions(ast, enabled_features)
> +      loaded_mojom_asts[mojom_abspath] = ast
> +      invalid_imports = []
> +      for imp in ast.import_list:
> +        import_abspath = _ResolveRelativeImportPath(imp.import_filename,
> +                                                    input_root_paths)
> +        if allowed_imports and import_abspath not in allowed_imports:
> +          invalid_imports.append(imp.import_filename)
> +
> +        abs_paths[imp.import_filename] = import_abspath
> +        if import_abspath in mojom_files_to_parse:
> +          # This import is in the input list, so we're going to translate it
> +          # into a module below; however it's also a dependency of another input
> +          # module. We retain record of dependencies to help with input
> +          # processing later.
> +          input_dependencies[mojom_abspath].add((import_abspath,
> +                                                 imp.import_filename))
> +        else:
> +          # We have an import that isn't being parsed right now. It must already
> +          # be parsed and have a module file sitting in a corresponding output
> +          # location.
> +          module_path = _GetModuleFilename(imp.import_filename)
> +          module_abspath = _ResolveRelativeImportPath(module_path,
> +                                                      [output_root_path])
> +          with open(module_abspath, 'rb') as module_file:
> +            loaded_modules[import_abspath] = module.Module.Load(module_file)
> +
> +      if invalid_imports:
> +        raise ValueError(
> +            '\nThe file %s imports the following files not allowed by build '
> +            'dependencies:\n\n%s\n' % (mojom_abspath,
> +                                       '\n'.join(invalid_imports)))
> +
> +
> +  # At this point all transitive imports not listed as inputs have been loaded
> +  # and we have a complete dependency tree of the unprocessed inputs. Now we can
> +  # load all the inputs, resolving dependencies among them recursively as we go.
> +  num_existing_modules_loaded = len(loaded_modules)
> +  for mojom_abspath, mojom_path in mojom_files_to_parse.items():
> +    _EnsureInputLoaded(mojom_abspath, mojom_path, abs_paths, loaded_mojom_asts,
> +                       input_dependencies, loaded_modules)
> +  assert (num_existing_modules_loaded +
> +          len(mojom_files_to_parse) == len(loaded_modules))
> +
> +  # Now we have fully translated modules for every input and every transitive
> +  # dependency. We can dump the modules to disk for other tools to use.
> +  for mojom_abspath, mojom_path in mojom_files_to_parse.items():
> +    module_path = os.path.join(output_root_path, _GetModuleFilename(mojom_path))
> +    module_dir = os.path.dirname(module_path)
> +    if not os.path.exists(module_dir):
> +      try:
> +        # Python 2 doesn't support exist_ok on makedirs(), so we just ignore
> +        # that failure if it happens. It's possible during build due to races
> +        # among build steps with module outputs in the same directory.
> +        os.makedirs(module_dir)
> +      except OSError as e:
> +        if e.errno != errno.EEXIST:
> +          raise
> +    with open(module_path, 'wb') as f:
> +      loaded_modules[mojom_abspath].Dump(f)
> +
> +
> +def Run(command_line):
> +  arg_parser = argparse.ArgumentParser(
> +      description="""
> +Parses one or more mojom files and produces corresponding module outputs fully
> +describing the definitions therein. The output is exhaustive, stable, and
> +sufficient for another tool to consume and emit e.g. usable language
> +bindings based on the original mojoms.""",
> +      epilog="""
> +Note that each transitive import dependency reachable from the input mojoms must
> +either also be listed as an input or must have its corresponding compiled module
> +already present in the provided output root.""")
> +
> +  arg_parser.add_argument(
> +      '--input-root',
> +      default=[],
> +      action='append',
> +      metavar='ROOT',
> +      dest='input_root_paths',
> +      help='Adds ROOT to the set of root paths against which relative input '
> +      'paths should be resolved. Provided root paths are always searched '
> +      'in order from longest absolute path to shortest.')
> +  arg_parser.add_argument(
> +      '--output-root',
> +      action='store',
> +      required=True,
> +      dest='output_root_path',
> +      metavar='ROOT',
> +      help='Use ROOT as the root path in which the parser should emit compiled '
> +      'modules for each processed input mojom. The path of emitted module is '
> +      'based on the relative input path, rebased onto this root. Note that '
> +      'ROOT is also searched for existing modules of any transitive imports '
> +      'which were not included in the set of inputs.')
> +  arg_parser.add_argument(
> +      '--mojoms',
> +      nargs='+',
> +      dest='mojom_files',
> +      default=[],
> +      metavar='MOJOM_FILE',
> +      help='Input mojom filename(s). Each filename must be either an absolute '
> +      'path which falls within one of the given input or output roots, or a '
> +      'relative path the parser will attempt to resolve using each of those '
> +      'roots in unspecified order.')
> +  arg_parser.add_argument(
> +      '--mojom-file-list',
> +      action='store',
> +      metavar='LIST_FILENAME',
> +      help='Input file whose contents are a list of mojoms to process. This '
> +      'may be provided in lieu of --mojoms to avoid hitting command line '
> +      'length limtations')
> +  arg_parser.add_argument(
> +      '--enable-feature',
> +      dest='enabled_features',
> +      default=[],
> +      action='append',
> +      metavar='FEATURE',
> +      help='Enables a named feature when parsing the given mojoms. Features '
> +      'are identified by arbitrary string values. Specifying this flag with a '
> +      'given FEATURE name will cause the parser to process any syntax elements '
> +      'tagged with an [EnableIf=FEATURE] attribute. If this flag is not '
> +      'provided for a given FEATURE, such tagged elements are discarded by the '
> +      'parser and will not be present in the compiled output.')
> +  arg_parser.add_argument(
> +      '--check-imports',
> +      dest='build_metadata_filename',
> +      action='store',
> +      metavar='METADATA_FILENAME',
> +      help='Instructs the parser to check imports against a set of allowed '
> +      'imports. Allowed imports are based on build metadata within '
> +      'METADATA_FILENAME. This is a JSON file with a `sources` key listing '
> +      'paths to the set of input mojom files being processed by this parser '
> +      'run, and a `deps` key listing paths to metadata files for any '
> +      'dependencies of these inputs. This feature can be used to implement '
> +      'build-time dependency checking for mojom imports, where each build '
> +      'metadata file corresponds to a build target in the dependency graph of '
> +      'a typical build system.')
> +
> +  args, _ = arg_parser.parse_known_args(command_line)
> +  if args.mojom_file_list:
> +    with open(args.mojom_file_list) as f:
> +      args.mojom_files.extend(f.read().split())
> +
> +  if not args.mojom_files:
> +    raise ValueError(
> +        'Must list at least one mojom file via --mojoms or --mojom-file-list')
> +
> +  mojom_files = list(map(os.path.abspath, args.mojom_files))
> +  input_roots = list(map(os.path.abspath, args.input_root_paths))
> +  output_root = os.path.abspath(args.output_root_path)
> +
> +  if args.build_metadata_filename:
> +    allowed_imports = _CollectAllowedImportsFromBuildMetadata(
> +        args.build_metadata_filename)
> +  else:
> +    allowed_imports = None
> +
> +  _ParseMojoms(mojom_files, input_roots, output_root, args.enabled_features,
> +               allowed_imports)
> +
> +
> +if __name__ == '__main__':
> +  Run(sys.argv[1:])
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom_parser_test_case.py b/utils/ipc/mojo/public/tools/mojom/mojom_parser_test_case.py
> new file mode 100644
> index 00000000..e213fbfa
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom_parser_test_case.py
> @@ -0,0 +1,73 @@
> +# Copyright 2020 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +import json
> +import os
> +import os.path
> +import shutil
> +import tempfile
> +import unittest
> +
> +import mojom_parser
> +
> +from mojom.generate import module
> +
> +
> +class MojomParserTestCase(unittest.TestCase):
> +  """Tests covering the behavior defined by the main mojom_parser.py script.
> +  This includes behavior around input and output path manipulation, dependency
> +  resolution, and module serialization and deserialization."""
> +
> +  def __init__(self, method_name):
> +    super(MojomParserTestCase, self).__init__(method_name)
> +    self._temp_dir = None
> +
> +  def setUp(self):
> +    self._temp_dir = tempfile.mkdtemp()
> +
> +  def tearDown(self):
> +    shutil.rmtree(self._temp_dir)
> +    self._temp_dir = None
> +
> +  def GetPath(self, path):
> +    assert not os.path.isabs(path)
> +    return os.path.join(self._temp_dir, path)
> +
> +  def GetModulePath(self, path):
> +    assert not os.path.isabs(path)
> +    return os.path.join(self.GetPath('out'), path) + '-module'
> +
> +  def WriteFile(self, path, contents):
> +    full_path = self.GetPath(path)
> +    dirname = os.path.dirname(full_path)
> +    if not os.path.exists(dirname):
> +      os.makedirs(dirname)
> +    with open(full_path, 'w') as f:
> +      f.write(contents)
> +
> +  def LoadModule(self, mojom_path):
> +    with open(self.GetModulePath(mojom_path), 'rb') as f:
> +      return module.Module.Load(f)
> +
> +  def ParseMojoms(self, mojoms, metadata=None):
> +    """Parse all input mojoms relative the temp dir."""
> +    out_dir = self.GetPath('out')
> +    args = [
> +        '--input-root', self._temp_dir, '--input-root', out_dir,
> +        '--output-root', out_dir, '--mojoms'
> +    ] + list(map(lambda mojom: os.path.join(self._temp_dir, mojom), mojoms))
> +    if metadata:
> +      args.extend(['--check-imports', self.GetPath(metadata)])
> +    mojom_parser.Run(args)
> +
> +  def ExtractTypes(self, mojom):
> +    filename = 'test.mojom'
> +    self.WriteFile(filename, mojom)
> +    self.ParseMojoms([filename])
> +    m = self.LoadModule(filename)
> +    definitions = {}
> +    for kinds in (m.enums, m.structs, m.unions, m.interfaces):
> +      for kind in kinds:
> +        definitions[kind.mojom_name] = kind
> +    return definitions
> diff --git a/utils/ipc/mojo/public/tools/mojom/mojom_parser_unittest.py b/utils/ipc/mojo/public/tools/mojom/mojom_parser_unittest.py
> new file mode 100644
> index 00000000..a93f34ba
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/mojom_parser_unittest.py
> @@ -0,0 +1,171 @@
> +# Copyright 2020 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +from mojom_parser_test_case import MojomParserTestCase
> +
> +
> +class MojomParserTest(MojomParserTestCase):
> +  """Tests covering the behavior defined by the main mojom_parser.py script.
> +  This includes behavior around input and output path manipulation, dependency
> +  resolution, and module serialization and deserialization."""
> +
> +  def testBasicParse(self):
> +    """Basic test to verify that we can parse a mojom file and get a module."""
> +    mojom = 'foo/bar.mojom'
> +    self.WriteFile(
> +        mojom, """\
> +        module test;
> +        enum TestEnum { kFoo };
> +        """)
> +    self.ParseMojoms([mojom])
> +
> +    m = self.LoadModule(mojom)
> +    self.assertEqual('foo/bar.mojom', m.path)
> +    self.assertEqual('test', m.mojom_namespace)
> +    self.assertEqual(1, len(m.enums))
> +
> +  def testBasicParseWithAbsolutePaths(self):
> +    """Verifies that we can parse a mojom file given an absolute path input."""
> +    mojom = 'foo/bar.mojom'
> +    self.WriteFile(
> +        mojom, """\
> +        module test;
> +        enum TestEnum { kFoo };
> +        """)
> +    self.ParseMojoms([self.GetPath(mojom)])
> +
> +    m = self.LoadModule(mojom)
> +    self.assertEqual('foo/bar.mojom', m.path)
> +    self.assertEqual('test', m.mojom_namespace)
> +    self.assertEqual(1, len(m.enums))
> +
> +  def testImport(self):
> +    """Verify imports within the same set of mojom inputs."""
> +    a = 'a.mojom'
> +    b = 'b.mojom'
> +    self.WriteFile(
> +        a, """\
> +        module a;
> +        import "b.mojom";
> +        struct Foo { b.Bar bar; };""")
> +    self.WriteFile(b, """\
> +        module b;
> +        struct Bar {};""")
> +    self.ParseMojoms([a, b])
> +
> +    ma = self.LoadModule(a)
> +    mb = self.LoadModule(b)
> +    self.assertEqual('a.mojom', ma.path)
> +    self.assertEqual('b.mojom', mb.path)
> +    self.assertEqual(1, len(ma.imports))
> +    self.assertEqual(mb, ma.imports[0])
> +
> +  def testPreProcessedImport(self):
> +    """Verify imports processed by a previous parser execution can be loaded
> +    properly when parsing a dependent mojom."""
> +    a = 'a.mojom'
> +    self.WriteFile(a, """\
> +        module a;
> +        struct Bar {};""")
> +    self.ParseMojoms([a])
> +
> +    b = 'b.mojom'
> +    self.WriteFile(
> +        b, """\
> +        module b;
> +        import "a.mojom";
> +        struct Foo { a.Bar bar; };""")
> +    self.ParseMojoms([b])
> +
> +  def testMissingImport(self):
> +    """Verify that an import fails if the imported mojom does not exist."""
> +    a = 'a.mojom'
> +    self.WriteFile(
> +        a, """\
> +        module a;
> +        import "non-existent.mojom";
> +        struct Bar {};""")
> +    with self.assertRaisesRegexp(ValueError, "does not exist"):
> +      self.ParseMojoms([a])
> +
> +  def testUnparsedImport(self):
> +    """Verify that an import fails if the imported mojom is not in the set of
> +    mojoms provided to the parser on this execution AND there is no pre-existing
> +    parsed output module already on disk for it."""
> +    a = 'a.mojom'
> +    b = 'b.mojom'
> +    self.WriteFile(a, """\
> +        module a;
> +        struct Bar {};""")
> +    self.WriteFile(
> +        b, """\
> +        module b;
> +        import "a.mojom";
> +        struct Foo { a.Bar bar; };""")
> +
> +    # a.mojom has not been parsed yet, so its import will fail when processing
> +    # b.mojom here.
> +    with self.assertRaisesRegexp(ValueError, "does not exist"):
> +      self.ParseMojoms([b])
> +
> +  def testCheckImportsBasic(self):
> +    """Verify that the parser can handle --check-imports with a valid set of
> +    inputs, including support for transitive dependency resolution."""
> +    a = 'a.mojom'
> +    a_metadata = 'out/a.build_metadata'
> +    b = 'b.mojom'
> +    b_metadata = 'out/b.build_metadata'
> +    c = 'c.mojom'
> +    c_metadata = 'out/c.build_metadata'
> +    self.WriteFile(a_metadata,
> +                   '{"sources": ["%s"], "deps": []}\n' % self.GetPath(a))
> +    self.WriteFile(
> +        b_metadata,
> +        '{"sources": ["%s"], "deps": ["%s"]}\n' % (self.GetPath(b),
> +                                                   self.GetPath(a_metadata)))
> +    self.WriteFile(
> +        c_metadata,
> +        '{"sources": ["%s"], "deps": ["%s"]}\n' % (self.GetPath(c),
> +                                                   self.GetPath(b_metadata)))
> +    self.WriteFile(a, """\
> +        module a;
> +        struct Bar {};""")
> +    self.WriteFile(
> +        b, """\
> +        module b;
> +        import "a.mojom";
> +        struct Foo { a.Bar bar; };""")
> +    self.WriteFile(
> +        c, """\
> +        module c;
> +        import "a.mojom";
> +        import "b.mojom";
> +        struct Baz { b.Foo foo; };""")
> +    self.ParseMojoms([a], metadata=a_metadata)
> +    self.ParseMojoms([b], metadata=b_metadata)
> +    self.ParseMojoms([c], metadata=c_metadata)
> +
> +  def testCheckImportsMissing(self):
> +    """Verify that the parser rejects valid input mojoms when imports don't
> +    agree with build metadata given via --check-imports."""
> +    a = 'a.mojom'
> +    a_metadata = 'out/a.build_metadata'
> +    b = 'b.mojom'
> +    b_metadata = 'out/b.build_metadata'
> +    self.WriteFile(a_metadata,
> +                   '{"sources": ["%s"], "deps": []}\n' % self.GetPath(a))
> +    self.WriteFile(b_metadata,
> +                   '{"sources": ["%s"], "deps": []}\n' % self.GetPath(b))
> +    self.WriteFile(a, """\
> +        module a;
> +        struct Bar {};""")
> +    self.WriteFile(
> +        b, """\
> +        module b;
> +        import "a.mojom";
> +        struct Foo { a.Bar bar; };""")
> +
> +    self.ParseMojoms([a], metadata=a_metadata)
> +    with self.assertRaisesRegexp(ValueError, "not allowed by build"):
> +      self.ParseMojoms([b], metadata=b_metadata)
> diff --git a/utils/ipc/mojo/public/tools/mojom/stable_attribute_unittest.py b/utils/ipc/mojo/public/tools/mojom/stable_attribute_unittest.py
> new file mode 100644
> index 00000000..d45ec586
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/stable_attribute_unittest.py
> @@ -0,0 +1,127 @@
> +# Copyright 2020 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +from mojom_parser_test_case import MojomParserTestCase
> +
> +from mojom.generate import module
> +
> +
> +class StableAttributeTest(MojomParserTestCase):
> +  """Tests covering usage of the [Stable] attribute."""
> +
> +  def testStableAttributeTagging(self):
> +    """Verify that we recognize the [Stable] attribute on relevant definitions
> +    and the resulting parser outputs are tagged accordingly."""
> +    mojom = 'test.mojom'
> +    self.WriteFile(
> +        mojom, """\
> +        [Stable] enum TestEnum { kFoo };
> +        enum UnstableEnum { kBar };
> +        [Stable] struct TestStruct { TestEnum a; };
> +        struct UnstableStruct { UnstableEnum a; };
> +        [Stable] union TestUnion { TestEnum a; TestStruct b; };
> +        union UnstableUnion { UnstableEnum a; UnstableStruct b; };
> +        [Stable] interface TestInterface { Foo at 0(TestUnion x) => (); };
> +        interface UnstableInterface { Foo(UnstableUnion x) => (); };
> +        """)
> +    self.ParseMojoms([mojom])
> +
> +    m = self.LoadModule(mojom)
> +    self.assertEqual(2, len(m.enums))
> +    self.assertTrue(m.enums[0].stable)
> +    self.assertFalse(m.enums[1].stable)
> +    self.assertEqual(2, len(m.structs))
> +    self.assertTrue(m.structs[0].stable)
> +    self.assertFalse(m.structs[1].stable)
> +    self.assertEqual(2, len(m.unions))
> +    self.assertTrue(m.unions[0].stable)
> +    self.assertFalse(m.unions[1].stable)
> +    self.assertEqual(2, len(m.interfaces))
> +    self.assertTrue(m.interfaces[0].stable)
> +    self.assertFalse(m.interfaces[1].stable)
> +
> +  def testStableStruct(self):
> +    """A [Stable] struct is valid if all its fields are also stable."""
> +    self.ExtractTypes('[Stable] struct S {};')
> +    self.ExtractTypes('[Stable] struct S { int32 x; bool b; };')
> +    self.ExtractTypes('[Stable] enum E { A }; [Stable] struct S { E e; };')
> +    self.ExtractTypes('[Stable] struct S {}; [Stable] struct T { S s; };')
> +    self.ExtractTypes(
> +        '[Stable] struct S {}; [Stable] struct T { array<S> ss; };')
> +    self.ExtractTypes(
> +        '[Stable] interface F {}; [Stable] struct T { pending_remote<F> f; };')
> +
> +    with self.assertRaisesRegexp(Exception, 'because it depends on E'):
> +      self.ExtractTypes('enum E { A }; [Stable] struct S { E e; };')
> +    with self.assertRaisesRegexp(Exception, 'because it depends on X'):
> +      self.ExtractTypes('struct X {}; [Stable] struct S { X x; };')
> +    with self.assertRaisesRegexp(Exception, 'because it depends on T'):
> +      self.ExtractTypes('struct T {}; [Stable] struct S { array<T> xs; };')
> +    with self.assertRaisesRegexp(Exception, 'because it depends on T'):
> +      self.ExtractTypes('struct T {}; [Stable] struct S { map<int32, T> xs; };')
> +    with self.assertRaisesRegexp(Exception, 'because it depends on T'):
> +      self.ExtractTypes('struct T {}; [Stable] struct S { map<T, int32> xs; };')
> +    with self.assertRaisesRegexp(Exception, 'because it depends on F'):
> +      self.ExtractTypes(
> +          'interface F {}; [Stable] struct S { pending_remote<F> f; };')
> +    with self.assertRaisesRegexp(Exception, 'because it depends on F'):
> +      self.ExtractTypes(
> +          'interface F {}; [Stable] struct S { pending_receiver<F> f; };')
> +
> +  def testStableUnion(self):
> +    """A [Stable] union is valid if all its fields' types are also stable."""
> +    self.ExtractTypes('[Stable] union U {};')
> +    self.ExtractTypes('[Stable] union U { int32 x; bool b; };')
> +    self.ExtractTypes('[Stable] enum E { A }; [Stable] union U { E e; };')
> +    self.ExtractTypes('[Stable] struct S {}; [Stable] union U { S s; };')
> +    self.ExtractTypes(
> +        '[Stable] struct S {}; [Stable] union U { array<S> ss; };')
> +    self.ExtractTypes(
> +        '[Stable] interface F {}; [Stable] union U { pending_remote<F> f; };')
> +
> +    with self.assertRaisesRegexp(Exception, 'because it depends on E'):
> +      self.ExtractTypes('enum E { A }; [Stable] union U { E e; };')
> +    with self.assertRaisesRegexp(Exception, 'because it depends on X'):
> +      self.ExtractTypes('struct X {}; [Stable] union U { X x; };')
> +    with self.assertRaisesRegexp(Exception, 'because it depends on T'):
> +      self.ExtractTypes('struct T {}; [Stable] union U { array<T> xs; };')
> +    with self.assertRaisesRegexp(Exception, 'because it depends on T'):
> +      self.ExtractTypes('struct T {}; [Stable] union U { map<int32, T> xs; };')
> +    with self.assertRaisesRegexp(Exception, 'because it depends on T'):
> +      self.ExtractTypes('struct T {}; [Stable] union U { map<T, int32> xs; };')
> +    with self.assertRaisesRegexp(Exception, 'because it depends on F'):
> +      self.ExtractTypes(
> +          'interface F {}; [Stable] union U { pending_remote<F> f; };')
> +    with self.assertRaisesRegexp(Exception, 'because it depends on F'):
> +      self.ExtractTypes(
> +          'interface F {}; [Stable] union U { pending_receiver<F> f; };')
> +
> +  def testStableInterface(self):
> +    """A [Stable] interface is valid if all its methods' parameter types are
> +    stable, including response parameters where applicable."""
> +    self.ExtractTypes('[Stable] interface F {};')
> +    self.ExtractTypes('[Stable] interface F { A at 0(int32 x); };')
> +    self.ExtractTypes('[Stable] interface F { A at 0(int32 x) => (bool b); };')
> +    self.ExtractTypes("""\
> +        [Stable] enum E { A, B, C };
> +        [Stable] struct S {};
> +        [Stable] interface F { A at 0(E e, S s) => (bool b, array<S> s); };
> +        """)
> +
> +    with self.assertRaisesRegexp(Exception, 'because it depends on E'):
> +      self.ExtractTypes(
> +          'enum E { A, B, C }; [Stable] interface F { A at 0(E e); };')
> +    with self.assertRaisesRegexp(Exception, 'because it depends on E'):
> +      self.ExtractTypes(
> +          'enum E { A, B, C }; [Stable] interface F { A at 0(int32 x) => (E e); };'
> +      )
> +    with self.assertRaisesRegexp(Exception, 'because it depends on S'):
> +      self.ExtractTypes(
> +          'struct S {}; [Stable] interface F { A at 0(int32 x) => (S s); };')
> +    with self.assertRaisesRegexp(Exception, 'because it depends on S'):
> +      self.ExtractTypes(
> +          'struct S {}; [Stable] interface F { A at 0(S s) => (bool b); };')
> +
> +    with self.assertRaisesRegexp(Exception, 'explicit method ordinals'):
> +      self.ExtractTypes('[Stable] interface F { A() => (); };')
> diff --git a/utils/ipc/mojo/public/tools/mojom/version_compatibility_unittest.py b/utils/ipc/mojo/public/tools/mojom/version_compatibility_unittest.py
> new file mode 100644
> index 00000000..a0ee150e
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/mojom/version_compatibility_unittest.py
> @@ -0,0 +1,397 @@
> +# Copyright 2020 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +from mojom_parser_test_case import MojomParserTestCase
> +
> +
> +class VersionCompatibilityTest(MojomParserTestCase):
> +  """Tests covering compatibility between two versions of the same mojom type
> +  definition. This coverage ensures that we can reliably detect unsafe changes
> +  to definitions that are expected to tolerate version skew in production
> +  environments."""
> +
> +  def _GetTypeCompatibilityMap(self, old_mojom, new_mojom):
> +    """Helper to support the implementation of assertBackwardCompatible and
> +    assertNotBackwardCompatible."""
> +
> +    old = self.ExtractTypes(old_mojom)
> +    new = self.ExtractTypes(new_mojom)
> +    self.assertEqual(set(old.keys()), set(new.keys()),
> +                     'Old and new test mojoms should use the same type names.')
> +
> +    compatibility_map = {}
> +    for name in old.keys():
> +      compatibility_map[name] = new[name].IsBackwardCompatible(old[name])
> +    return compatibility_map
> +
> +  def assertBackwardCompatible(self, old_mojom, new_mojom):
> +    compatibility_map = self._GetTypeCompatibilityMap(old_mojom, new_mojom)
> +    for name, compatible in compatibility_map.items():
> +      if not compatible:
> +        raise AssertionError(
> +            'Given the old mojom:\n\n    %s\n\nand the new mojom:\n\n    %s\n\n'
> +            'The new definition of %s should pass a backward-compatibiity '
> +            'check, but it does not.' % (old_mojom, new_mojom, name))
> +
> +  def assertNotBackwardCompatible(self, old_mojom, new_mojom):
> +    compatibility_map = self._GetTypeCompatibilityMap(old_mojom, new_mojom)
> +    if all(compatibility_map.values()):
> +      raise AssertionError(
> +          'Given the old mojom:\n\n    %s\n\nand the new mojom:\n\n    %s\n\n'
> +          'The new mojom should fail a backward-compatibility check, but it '
> +          'does not.' % (old_mojom, new_mojom))
> +
> +  def testNewNonExtensibleEnumValue(self):
> +    """Adding a value to a non-extensible enum breaks backward-compatibility."""
> +    self.assertNotBackwardCompatible('enum E { kFoo, kBar };',
> +                                     'enum E { kFoo, kBar, kBaz };')
> +
> +  def testNewNonExtensibleEnumValueWithMinVersion(self):
> +    """Adding a value to a non-extensible enum breaks backward-compatibility,
> +    even with a new [MinVersion] specified for the value."""
> +    self.assertNotBackwardCompatible(
> +        'enum E { kFoo, kBar };', 'enum E { kFoo, kBar, [MinVersion=1] kBaz };')
> +
> +  def testNewValueInExistingVersion(self):
> +    """Adding a value to an existing version is not allowed, even if the old
> +    enum was marked [Extensible]. Note that it is irrelevant whether or not the
> +    new enum is marked [Extensible]."""
> +    self.assertNotBackwardCompatible('[Extensible] enum E { kFoo, kBar };',
> +                                     'enum E { kFoo, kBar, kBaz };')
> +    self.assertNotBackwardCompatible(
> +        '[Extensible] enum E { kFoo, kBar };',
> +        '[Extensible] enum E { kFoo, kBar, kBaz };')
> +    self.assertNotBackwardCompatible(
> +        '[Extensible] enum E { kFoo, [MinVersion=1] kBar };',
> +        'enum E { kFoo, [MinVersion=1] kBar, [MinVersion=1] kBaz };')
> +
> +  def testEnumValueRemoval(self):
> +    """Removal of an enum value is never valid even for [Extensible] enums."""
> +    self.assertNotBackwardCompatible('enum E { kFoo, kBar };',
> +                                     'enum E { kFoo };')
> +    self.assertNotBackwardCompatible('[Extensible] enum E { kFoo, kBar };',
> +                                     '[Extensible] enum E { kFoo };')
> +    self.assertNotBackwardCompatible(
> +        '[Extensible] enum E { kA, [MinVersion=1] kB };',
> +        '[Extensible] enum E { kA, };')
> +    self.assertNotBackwardCompatible(
> +        '[Extensible] enum E { kA, [MinVersion=1] kB, [MinVersion=1] kZ };',
> +        '[Extensible] enum E { kA, [MinVersion=1] kB };')
> +
> +  def testNewExtensibleEnumValueWithMinVersion(self):
> +    """Adding a new and properly [MinVersion]'d value to an [Extensible] enum
> +    is a backward-compatible change. Note that it is irrelevant whether or not
> +    the new enum is marked [Extensible]."""
> +    self.assertBackwardCompatible('[Extensible] enum E { kA, kB };',
> +                                  'enum E { kA, kB, [MinVersion=1] kC };')
> +    self.assertBackwardCompatible(
> +        '[Extensible] enum E { kA, kB };',
> +        '[Extensible] enum E { kA, kB, [MinVersion=1] kC };')
> +    self.assertBackwardCompatible(
> +        '[Extensible] enum E { kA, [MinVersion=1] kB };',
> +        '[Extensible] enum E { kA, [MinVersion=1] kB, [MinVersion=2] kC };')
> +
> +  def testRenameEnumValue(self):
> +    """Renaming an enum value does not affect backward-compatibility. Only
> +    numeric value is relevant."""
> +    self.assertBackwardCompatible('enum E { kA, kB };', 'enum E { kX, kY };')
> +
> +  def testAddEnumValueAlias(self):
> +    """Adding new enum fields does not affect backward-compatibility if it does
> +    not introduce any new numeric values."""
> +    self.assertBackwardCompatible(
> +        'enum E { kA, kB };', 'enum E { kA, kB, kC = kA, kD = 1, kE = kD };')
> +
> +  def testEnumIdentity(self):
> +    """An unchanged enum is obviously backward-compatible."""
> +    self.assertBackwardCompatible('enum E { kA, kB, kC };',
> +                                  'enum E { kA, kB, kC };')
> +
> +  def testNewStructFieldUnversioned(self):
> +    """Adding a new field to a struct without a new (i.e. higher than any
> +    existing version) [MinVersion] tag breaks backward-compatibility."""
> +    self.assertNotBackwardCompatible('struct S { string a; };',
> +                                     'struct S { string a; string b; };')
> +
> +  def testStructFieldRemoval(self):
> +    """Removing a field from a struct breaks backward-compatibility."""
> +    self.assertNotBackwardCompatible('struct S { string a; string b; };',
> +                                     'struct S { string a; };')
> +
> +  def testStructFieldTypeChange(self):
> +    """Changing the type of an existing field always breaks
> +    backward-compatibility."""
> +    self.assertNotBackwardCompatible('struct S { string a; };',
> +                                     'struct S { array<int32> a; };')
> +
> +  def testStructFieldBecomingOptional(self):
> +    """Changing a field from non-optional to optional breaks
> +    backward-compatibility."""
> +    self.assertNotBackwardCompatible('struct S { string a; };',
> +                                     'struct S { string? a; };')
> +
> +  def testStructFieldBecomingNonOptional(self):
> +    """Changing a field from optional to non-optional breaks
> +    backward-compatibility."""
> +    self.assertNotBackwardCompatible('struct S { string? a; };',
> +                                     'struct S { string a; };')
> +
> +  def testStructFieldOrderChange(self):
> +    """Changing the order of fields breaks backward-compatibility."""
> +    self.assertNotBackwardCompatible('struct S { string a; bool b; };',
> +                                     'struct S { bool b; string a; };')
> +    self.assertNotBackwardCompatible('struct S { string a at 0; bool b at 1; };',
> +                                     'struct S { string a at 1; bool b at 0; };')
> +
> +  def testStructFieldMinVersionChange(self):
> +    """Changing the MinVersion of a field breaks backward-compatibility."""
> +    self.assertNotBackwardCompatible(
> +        'struct S { string a; [MinVersion=1] string? b; };',
> +        'struct S { string a; [MinVersion=2] string? b; };')
> +
> +  def testStructFieldTypeChange(self):
> +    """If a struct field's own type definition changes, the containing struct
> +    is backward-compatible if and only if the field type's change is
> +    backward-compatible."""
> +    self.assertBackwardCompatible(
> +        'struct S {}; struct T { S s; };',
> +        'struct S { [MinVersion=1] int32 x; }; struct T { S s; };')
> +    self.assertBackwardCompatible(
> +        '[Extensible] enum E { kA }; struct S { E e; };',
> +        '[Extensible] enum E { kA, [MinVersion=1] kB }; struct S { E e; };')
> +    self.assertNotBackwardCompatible(
> +        'struct S {}; struct T { S s; };',
> +        'struct S { int32 x; }; struct T { S s; };')
> +    self.assertNotBackwardCompatible(
> +        '[Extensible] enum E { kA }; struct S { E e; };',
> +        '[Extensible] enum E { kA, kB }; struct S { E e; };')
> +
> +  def testNewStructFieldWithInvalidMinVersion(self):
> +    """Adding a new field using an existing MinVersion breaks backward-
> +    compatibility."""
> +    self.assertNotBackwardCompatible(
> +        """\
> +        struct S {
> +          string a;
> +          [MinVersion=1] string? b;
> +        };
> +        """, """\
> +        struct S {
> +          string a;
> +          [MinVersion=1] string? b;
> +          [MinVersion=1] string? c;
> +        };""")
> +
> +  def testNewStructFieldWithValidMinVersion(self):
> +    """Adding a new field is safe if tagged with a MinVersion greater than any
> +    previously used MinVersion in the struct."""
> +    self.assertBackwardCompatible(
> +        'struct S { int32 a; };',
> +        'struct S { int32 a; [MinVersion=1] int32 b; };')
> +    self.assertBackwardCompatible(
> +        'struct S { int32 a; [MinVersion=1] int32 b; };',
> +        'struct S { int32 a; [MinVersion=1] int32 b; [MinVersion=2] bool c; };')
> +
> +  def testNewStructFieldNullableReference(self):
> +    """Adding a new nullable reference-typed field is fine if versioned
> +    properly."""
> +    self.assertBackwardCompatible(
> +        'struct S { int32 a; };',
> +        'struct S { int32 a; [MinVersion=1] string? b; };')
> +
> +  def testStructFieldRename(self):
> +    """Renaming a field has no effect on backward-compatibility."""
> +    self.assertBackwardCompatible('struct S { int32 x; bool b; };',
> +                                  'struct S { int32 a; bool b; };')
> +
> +  def testStructFieldReorderWithExplicitOrdinals(self):
> +    """Reordering fields has no effect on backward-compatibility when field
> +    ordinals are explicitly labeled and remain unchanged."""
> +    self.assertBackwardCompatible('struct S { bool b at 1; int32 a at 0; };',
> +                                  'struct S { int32 a at 0; bool b at 1; };')
> +
> +  def testNewUnionFieldUnversioned(self):
> +    """Adding a new field to a union without a new (i.e. higher than any
> +    existing version) [MinVersion] tag breaks backward-compatibility."""
> +    self.assertNotBackwardCompatible('union U { string a; };',
> +                                     'union U { string a; string b; };')
> +
> +  def testUnionFieldRemoval(self):
> +    """Removing a field from a union breaks backward-compatibility."""
> +    self.assertNotBackwardCompatible('union U { string a; string b; };',
> +                                     'union U { string a; };')
> +
> +  def testUnionFieldTypeChange(self):
> +    """Changing the type of an existing field always breaks
> +    backward-compatibility."""
> +    self.assertNotBackwardCompatible('union U { string a; };',
> +                                     'union U { array<int32> a; };')
> +
> +  def testUnionFieldBecomingOptional(self):
> +    """Changing a field from non-optional to optional breaks
> +    backward-compatibility."""
> +    self.assertNotBackwardCompatible('union U { string a; };',
> +                                     'union U { string? a; };')
> +
> +  def testUnionFieldBecomingNonOptional(self):
> +    """Changing a field from optional to non-optional breaks
> +    backward-compatibility."""
> +    self.assertNotBackwardCompatible('union U { string? a; };',
> +                                     'union U { string a; };')
> +
> +  def testUnionFieldOrderChange(self):
> +    """Changing the order of fields breaks backward-compatibility."""
> +    self.assertNotBackwardCompatible('union U { string a; bool b; };',
> +                                     'union U { bool b; string a; };')
> +    self.assertNotBackwardCompatible('union U { string a at 0; bool b at 1; };',
> +                                     'union U { string a at 1; bool b at 0; };')
> +
> +  def testUnionFieldMinVersionChange(self):
> +    """Changing the MinVersion of a field breaks backward-compatibility."""
> +    self.assertNotBackwardCompatible(
> +        'union U { string a; [MinVersion=1] string b; };',
> +        'union U { string a; [MinVersion=2] string b; };')
> +
> +  def testUnionFieldTypeChange(self):
> +    """If a union field's own type definition changes, the containing union
> +    is backward-compatible if and only if the field type's change is
> +    backward-compatible."""
> +    self.assertBackwardCompatible(
> +        'struct S {}; union U { S s; };',
> +        'struct S { [MinVersion=1] int32 x; }; union U { S s; };')
> +    self.assertBackwardCompatible(
> +        '[Extensible] enum E { kA }; union U { E e; };',
> +        '[Extensible] enum E { kA, [MinVersion=1] kB }; union U { E e; };')
> +    self.assertNotBackwardCompatible(
> +        'struct S {}; union U { S s; };',
> +        'struct S { int32 x; }; union U { S s; };')
> +    self.assertNotBackwardCompatible(
> +        '[Extensible] enum E { kA }; union U { E e; };',
> +        '[Extensible] enum E { kA, kB }; union U { E e; };')
> +
> +  def testNewUnionFieldWithInvalidMinVersion(self):
> +    """Adding a new field using an existing MinVersion breaks backward-
> +    compatibility."""
> +    self.assertNotBackwardCompatible(
> +        """\
> +        union U {
> +          string a;
> +          [MinVersion=1] string b;
> +        };
> +        """, """\
> +        union U {
> +          string a;
> +          [MinVersion=1] string b;
> +          [MinVersion=1] string c;
> +        };""")
> +
> +  def testNewUnionFieldWithValidMinVersion(self):
> +    """Adding a new field is safe if tagged with a MinVersion greater than any
> +    previously used MinVersion in the union."""
> +    self.assertBackwardCompatible(
> +        'union U { int32 a; };',
> +        'union U { int32 a; [MinVersion=1] int32 b; };')
> +    self.assertBackwardCompatible(
> +        'union U { int32 a; [MinVersion=1] int32 b; };',
> +        'union U { int32 a; [MinVersion=1] int32 b; [MinVersion=2] bool c; };')
> +
> +  def testUnionFieldRename(self):
> +    """Renaming a field has no effect on backward-compatibility."""
> +    self.assertBackwardCompatible('union U { int32 x; bool b; };',
> +                                  'union U { int32 a; bool b; };')
> +
> +  def testUnionFieldReorderWithExplicitOrdinals(self):
> +    """Reordering fields has no effect on backward-compatibility when field
> +    ordinals are explicitly labeled and remain unchanged."""
> +    self.assertBackwardCompatible('union U { bool b at 1; int32 a at 0; };',
> +                                  'union U { int32 a at 0; bool b at 1; };')
> +
> +  def testNewInterfaceMethodUnversioned(self):
> +    """Adding a new method to an interface without a new (i.e. higher than any
> +    existing version) [MinVersion] tag breaks backward-compatibility."""
> +    self.assertNotBackwardCompatible('interface F { A(); };',
> +                                     'interface F { A(); B(); };')
> +
> +  def testInterfaceMethodRemoval(self):
> +    """Removing a method from an interface breaks backward-compatibility."""
> +    self.assertNotBackwardCompatible('interface F { A(); B(); };',
> +                                     'interface F { A(); };')
> +
> +  def testInterfaceMethodParamsChanged(self):
> +    """Changes to the parameter list are only backward-compatible if they meet
> +    backward-compatibility requirements of an equivalent struct definition."""
> +    self.assertNotBackwardCompatible('interface F { A(); };',
> +                                     'interface F { A(int32 x); };')
> +    self.assertNotBackwardCompatible('interface F { A(int32 x); };',
> +                                     'interface F { A(bool x); };')
> +    self.assertNotBackwardCompatible(
> +        'interface F { A(int32 x, [MinVersion=1] string? s); };', """\
> +        interface F {
> +          A(int32 x, [MinVersion=1] string? s, [MinVersion=1] int32 y);
> +        };""")
> +
> +    self.assertBackwardCompatible('interface F { A(int32 x); };',
> +                                  'interface F { A(int32 a); };')
> +    self.assertBackwardCompatible(
> +        'interface F { A(int32 x); };',
> +        'interface F { A(int32 x, [MinVersion=1] string? s); };')
> +
> +    self.assertBackwardCompatible(
> +        'struct S {}; interface F { A(S s); };',
> +        'struct S { [MinVersion=1] int32 x; }; interface F { A(S s); };')
> +    self.assertBackwardCompatible(
> +        'struct S {}; struct T {}; interface F { A(S s); };',
> +        'struct S {}; struct T {}; interface F { A(T s); };')
> +    self.assertNotBackwardCompatible(
> +        'struct S {}; struct T { int32 x; }; interface F { A(S s); };',
> +        'struct S {}; struct T { int32 x; }; interface F { A(T t); };')
> +
> +  def testInterfaceMethodReplyAdded(self):
> +    """Adding a reply to a message breaks backward-compatibilty."""
> +    self.assertNotBackwardCompatible('interface F { A(); };',
> +                                     'interface F { A() => (); };')
> +
> +  def testInterfaceMethodReplyRemoved(self):
> +    """Removing a reply from a message breaks backward-compatibility."""
> +    self.assertNotBackwardCompatible('interface F { A() => (); };',
> +                                     'interface F { A(); };')
> +
> +  def testInterfaceMethodReplyParamsChanged(self):
> +    """Similar to request parameters, a change to reply parameters is considered
> +    backward-compatible if it meets the same backward-compatibility
> +    requirements imposed on equivalent struct changes."""
> +    self.assertNotBackwardCompatible('interface F { A() => (); };',
> +                                     'interface F { A() => (int32 x); };')
> +    self.assertNotBackwardCompatible('interface F { A() => (int32 x); };',
> +                                     'interface F { A() => (); };')
> +    self.assertNotBackwardCompatible('interface F { A() => (bool x); };',
> +                                     'interface F { A() => (int32 x); };')
> +
> +    self.assertBackwardCompatible('interface F { A() => (int32 a); };',
> +                                  'interface F { A() => (int32 x); };')
> +    self.assertBackwardCompatible(
> +        'interface F { A() => (int32 x); };',
> +        'interface F { A() => (int32 x, [MinVersion] string? s); };')
> +
> +  def testNewInterfaceMethodWithInvalidMinVersion(self):
> +    """Adding a new method to an existing version is not backward-compatible."""
> +    self.assertNotBackwardCompatible(
> +        """\
> +        interface F {
> +          A();
> +          [MinVersion=1] B();
> +        };
> +        """, """\
> +        interface F {
> +          A();
> +          [MinVersion=1] B();
> +          [MinVersion=1] C();
> +        };
> +        """)
> +
> +  def testNewInterfaceMethodWithValidMinVersion(self):
> +    """Adding a new method is fine as long as its MinVersion exceeds that of any
> +    method on the old interface definition."""
> +    self.assertBackwardCompatible('interface F { A(); };',
> +                                  'interface F { A(); [MinVersion=1] B(); };')
> diff --git a/utils/ipc/mojo/public/tools/run_all_python_unittests.py b/utils/ipc/mojo/public/tools/run_all_python_unittests.py
> new file mode 100755
> index 00000000..b2010958
> --- /dev/null
> +++ b/utils/ipc/mojo/public/tools/run_all_python_unittests.py
> @@ -0,0 +1,28 @@
> +#!/usr/bin/env python
> +# Copyright 2020 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +import os.path
> +import sys
> +
> +_TOOLS_DIR = os.path.dirname(__file__)
> +_MOJOM_DIR = os.path.join(_TOOLS_DIR, 'mojom')
> +_SRC_DIR = os.path.join(_TOOLS_DIR, os.path.pardir, os.path.pardir,
> +                        os.path.pardir)
> +
> +# Ensure that the mojom library is discoverable.
> +sys.path.append(_MOJOM_DIR)
> +
> +# Help Python find typ in //third_party/catapult/third_party/typ/
> +sys.path.append(
> +    os.path.join(_SRC_DIR, 'third_party', 'catapult', 'third_party', 'typ'))
> +import typ
> +
> +
> +def Main():
> +  return typ.main(top_level_dir=_MOJOM_DIR)
> +
> +
> +if __name__ == '__main__':
> +  sys.exit(Main())
> diff --git a/utils/ipc/tools/diagnosis/crbug_1001171.py b/utils/ipc/tools/diagnosis/crbug_1001171.py
> new file mode 100644
> index 00000000..478fb8c1
> --- /dev/null
> +++ b/utils/ipc/tools/diagnosis/crbug_1001171.py
> @@ -0,0 +1,51 @@
> +# Copyright 2019 The Chromium Authors. All rights reserved.
> +# Use of this source code is governed by a BSD-style license that can be
> +# found in the LICENSE file.
> +
> +"""Helper context wrapper for diagnosing crbug.com/1001171.
> +
> +This module and all uses thereof can and should be removed once
> +crbug.com/1001171 has been resolved.
> +"""
> +
> +from __future__ import print_function
> +
> +import contextlib
> +import os
> +import sys
> +
> +
> + at contextlib.contextmanager
> +def DumpStateOnLookupError():
> +  """Prints potentially useful state info in the event of a LookupError."""
> +  try:
> +    yield
> +  except LookupError:
> +    print('LookupError diagnosis for crbug.com/1001171:')
> +    for path_index, path_entry in enumerate(sys.path):
> +      desc = 'unknown'
> +      if not os.path.exists(path_entry):
> +        desc = 'missing'
> +      elif os.path.islink(path_entry):
> +        desc = 'link -> %s' % os.path.realpath(path_entry)
> +      elif os.path.isfile(path_entry):
> +        desc = 'file'
> +      elif os.path.isdir(path_entry):
> +        desc = 'dir'
> +      print('  sys.path[%d]: %s (%s)' % (path_index, path_entry, desc))
> +
> +      real_path_entry = os.path.realpath(path_entry)
> +      if (path_entry.endswith(os.path.join('lib', 'python2.7'))
> +          and os.path.isdir(real_path_entry)):
> +        encodings_dir = os.path.realpath(
> +            os.path.join(real_path_entry, 'encodings'))
> +        if os.path.exists(encodings_dir):
> +          if os.path.isdir(encodings_dir):
> +            print('    %s contents: %s' % (encodings_dir,
> +                                           str(os.listdir(encodings_dir))))
> +          else:
> +            print('    %s exists but is not a directory' % encodings_dir)
> +        else:
> +          print('    %s missing' % encodings_dir)
> +
> +    raise
> -- 
> 2.27.0
> 
> _______________________________________________
> libcamera-devel mailing list
> libcamera-devel at lists.libcamera.org
> https://lists.libcamera.org/listinfo/libcamera-devel

-- 
Regards,
Niklas Söderlund


More information about the libcamera-devel mailing list