mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-11-23 21:01:08 +00:00
5a3734d3ef
Differential Revision: https://phabricator.services.mozilla.com/D214289
749 lines
26 KiB
Python
749 lines
26 KiB
Python
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
|
|
|
# This module contains code for managing WebIDL files and bindings for
|
|
# the build system.
|
|
|
|
import errno
|
|
import hashlib
|
|
import io
|
|
import json
|
|
import logging
|
|
import os
|
|
import sys
|
|
from multiprocessing import Pool
|
|
|
|
import mozpack.path as mozpath
|
|
from mach.mixin.logging import LoggingMixin
|
|
from mozbuild.makeutil import Makefile
|
|
from mozbuild.pythonutil import iter_modules_in_path
|
|
from mozbuild.util import FileAvoidWrite, cpu_count
|
|
|
|
# There are various imports in this file in functions to avoid adding
|
|
# dependencies to config.status. See bug 949875.
|
|
|
|
# Limit the count on Windows, because of bug 1889842 and also the
|
|
# inefficiency of fork on Windows.
|
|
DEFAULT_PROCESS_COUNT = 4 if sys.platform == "win32" else cpu_count()
|
|
|
|
|
|
class WebIDLPool:
|
|
"""
|
|
Distribute generation load across several processes, avoiding redundant state
|
|
copies.
|
|
"""
|
|
|
|
GeneratorState = None
|
|
|
|
def __init__(self, GeneratorState, *, processes=None):
|
|
if processes is None:
|
|
processes = DEFAULT_PROCESS_COUNT
|
|
|
|
# As a special case, don't spawn an extra process if processes=1
|
|
if processes == 1:
|
|
WebIDLPool._init(GeneratorState)
|
|
|
|
class SeqPool:
|
|
def map(self, *args):
|
|
return list(map(*args))
|
|
|
|
self.pool = SeqPool()
|
|
else:
|
|
self.pool = Pool(
|
|
initializer=WebIDLPool._init,
|
|
initargs=(GeneratorState,),
|
|
processes=processes,
|
|
)
|
|
|
|
def run(self, filenames):
|
|
return self.pool.map(WebIDLPool._run, filenames)
|
|
|
|
@staticmethod
|
|
def _init(GeneratorState):
|
|
WebIDLPool.GeneratorState = GeneratorState
|
|
|
|
@staticmethod
|
|
def _run(filename):
|
|
return WebIDLPool.GeneratorState._generate_build_files_for_webidl(filename)
|
|
|
|
|
|
class BuildResult(object):
|
|
"""Represents the result of processing WebIDL files.
|
|
|
|
This holds a summary of output file generation during code generation.
|
|
"""
|
|
|
|
def __init__(self):
|
|
# The .webidl files that had their outputs regenerated.
|
|
self.inputs = set()
|
|
|
|
# The output files that were created.
|
|
self.created = set()
|
|
|
|
# The output files that changed.
|
|
self.updated = set()
|
|
|
|
# The output files that didn't change.
|
|
self.unchanged = set()
|
|
|
|
|
|
class WebIDLCodegenManagerState(dict):
|
|
"""Holds state for the WebIDL code generation manager.
|
|
|
|
State is currently just an extended dict. The internal implementation of
|
|
state should be considered a black box to everyone except
|
|
WebIDLCodegenManager. But we'll still document it.
|
|
|
|
Any set stored in this dict should be copied and sorted in the `dump()`
|
|
method.
|
|
|
|
Fields:
|
|
|
|
version
|
|
The integer version of the format. This is to detect incompatible
|
|
changes between state. It should be bumped whenever the format
|
|
changes or semantics change.
|
|
|
|
webidls
|
|
A dictionary holding information about every known WebIDL input.
|
|
Keys are the basenames of input WebIDL files. Values are dicts of
|
|
metadata. Keys in those dicts are:
|
|
|
|
* filename - The full path to the input filename.
|
|
* inputs - A set of full paths to other webidl files this webidl
|
|
depends on.
|
|
* outputs - Set of full output paths that are created/derived from
|
|
this file.
|
|
* sha1 - The hexidecimal SHA-1 of the input filename from the last
|
|
processing time.
|
|
|
|
global_inputs
|
|
A dictionary defining files that influence all processing. Keys
|
|
are full filenames. Values are hexidecimal SHA-1 from the last
|
|
processing time.
|
|
|
|
dictionaries_convertible_to_js
|
|
A set of names of dictionaries that are convertible to JS.
|
|
|
|
dictionaries_convertible_from_js
|
|
A set of names of dictionaries that are convertible from JS.
|
|
"""
|
|
|
|
VERSION = 3
|
|
|
|
def __init__(self, fh=None):
|
|
self["version"] = self.VERSION
|
|
self["webidls"] = {}
|
|
self["global_depends"] = {}
|
|
|
|
if not fh:
|
|
return
|
|
|
|
state = json.load(fh)
|
|
if state["version"] != self.VERSION:
|
|
raise Exception("Unknown state version: %s" % state["version"])
|
|
|
|
self["version"] = state["version"]
|
|
self["global_depends"] = state["global_depends"]
|
|
|
|
for k, v in state["webidls"].items():
|
|
self["webidls"][k] = v
|
|
|
|
# Sets are converted to lists for serialization because JSON
|
|
# doesn't support sets.
|
|
self["webidls"][k]["inputs"] = set(v["inputs"])
|
|
self["webidls"][k]["outputs"] = set(v["outputs"])
|
|
|
|
self["dictionaries_convertible_to_js"] = set(
|
|
state["dictionaries_convertible_to_js"]
|
|
)
|
|
|
|
self["dictionaries_convertible_from_js"] = set(
|
|
state["dictionaries_convertible_from_js"]
|
|
)
|
|
|
|
def dump(self, fh):
|
|
"""Dump serialized state to a file handle."""
|
|
normalized = self.copy()
|
|
|
|
webidls = normalized["webidls"] = self["webidls"].copy()
|
|
for k, v in self["webidls"].items():
|
|
webidls_k = webidls[k] = v.copy()
|
|
|
|
# Convert sets to lists because JSON doesn't support sets.
|
|
webidls_k["outputs"] = sorted(v["outputs"])
|
|
webidls_k["inputs"] = sorted(v["inputs"])
|
|
|
|
normalized["dictionaries_convertible_to_js"] = sorted(
|
|
self["dictionaries_convertible_to_js"]
|
|
)
|
|
|
|
normalized["dictionaries_convertible_from_js"] = sorted(
|
|
self["dictionaries_convertible_from_js"]
|
|
)
|
|
|
|
json.dump(normalized, fh, sort_keys=True)
|
|
|
|
|
|
class WebIDLCodegenManager(LoggingMixin):
|
|
"""Manages all code generation around WebIDL.
|
|
|
|
To facilitate testing, this object is meant to be generic and reusable.
|
|
Paths, etc should be parameters and not hardcoded.
|
|
"""
|
|
|
|
# Global parser derived declaration files.
|
|
GLOBAL_DECLARE_FILES = {
|
|
"BindingNames.h",
|
|
"GeneratedAtomList.h",
|
|
"GeneratedEventList.h",
|
|
"PrototypeList.h",
|
|
"RegisterBindings.h",
|
|
"RegisterShadowRealmBindings.h",
|
|
"RegisterWorkerBindings.h",
|
|
"RegisterWorkerDebuggerBindings.h",
|
|
"RegisterWorkletBindings.h",
|
|
"UnionTypes.h",
|
|
"WebIDLPrefs.h",
|
|
"WebIDLSerializable.h",
|
|
}
|
|
|
|
# Global parser derived definition files.
|
|
GLOBAL_DEFINE_FILES = {
|
|
"BindingNames.cpp",
|
|
"RegisterBindings.cpp",
|
|
"RegisterShadowRealmBindings.cpp",
|
|
"RegisterWorkerBindings.cpp",
|
|
"RegisterWorkerDebuggerBindings.cpp",
|
|
"RegisterWorkletBindings.cpp",
|
|
"UnionTypes.cpp",
|
|
"PrototypeList.cpp",
|
|
"WebIDLPrefs.cpp",
|
|
"WebIDLSerializable.cpp",
|
|
}
|
|
|
|
def __init__(
|
|
self,
|
|
config_path,
|
|
webidl_root,
|
|
inputs,
|
|
exported_header_dir,
|
|
codegen_dir,
|
|
state_path,
|
|
cache_dir=None,
|
|
make_deps_path=None,
|
|
make_deps_target=None,
|
|
):
|
|
"""Create an instance that manages WebIDLs in the build system.
|
|
|
|
config_path refers to a WebIDL config file (e.g. Bindings.conf).
|
|
inputs is a 4-tuple describing the input .webidl files and how to
|
|
process them. Members are:
|
|
(set(.webidl files), set(basenames of exported files),
|
|
set(basenames of generated events files),
|
|
set(example interface names))
|
|
|
|
exported_header_dir and codegen_dir are directories where generated
|
|
files will be written to.
|
|
state_path is the path to a file that will receive JSON state from our
|
|
actions.
|
|
make_deps_path is the path to a make dependency file that we can
|
|
optionally write.
|
|
make_deps_target is the target that receives the make dependencies. It
|
|
must be defined if using make_deps_path.
|
|
"""
|
|
self.populate_logger()
|
|
|
|
input_paths, exported_stems, generated_events_stems, example_interfaces = inputs
|
|
|
|
self._config_path = config_path
|
|
self._webidl_root = webidl_root
|
|
self._input_paths = set(input_paths)
|
|
self._exported_stems = set(exported_stems)
|
|
self._generated_events_stems = set(generated_events_stems)
|
|
self._generated_events_stems_as_array = generated_events_stems
|
|
self._example_interfaces = set(example_interfaces)
|
|
self._exported_header_dir = exported_header_dir
|
|
self._codegen_dir = codegen_dir
|
|
self._state_path = state_path
|
|
self._cache_dir = cache_dir
|
|
self._make_deps_path = make_deps_path
|
|
self._make_deps_target = make_deps_target
|
|
|
|
if (make_deps_path and not make_deps_target) or (
|
|
not make_deps_path and make_deps_target
|
|
):
|
|
raise Exception(
|
|
"Must define both make_deps_path and make_deps_target "
|
|
"if one is defined."
|
|
)
|
|
|
|
self._parser_results = None
|
|
self._config = None
|
|
self._state = WebIDLCodegenManagerState()
|
|
|
|
if os.path.exists(state_path):
|
|
with io.open(state_path, "r") as fh:
|
|
try:
|
|
self._state = WebIDLCodegenManagerState(fh=fh)
|
|
except Exception as e:
|
|
self.log(
|
|
logging.WARN,
|
|
"webidl_bad_state",
|
|
{"msg": str(e)},
|
|
"Bad WebIDL state: {msg}",
|
|
)
|
|
|
|
@property
|
|
def config(self):
|
|
if not self._config:
|
|
self._parse_webidl()
|
|
|
|
return self._config
|
|
|
|
def generate_build_files(self, *, processes=None):
|
|
"""Generate files required for the build.
|
|
|
|
This function is in charge of generating all the .h/.cpp files derived
|
|
from input .webidl files. Please note that there are build actions
|
|
required to produce .webidl files and these build actions are
|
|
explicitly not captured here: this function assumes all .webidl files
|
|
are present and up to date.
|
|
|
|
This routine is called as part of the build to ensure files that need
|
|
to exist are present and up to date. This routine may not be called if
|
|
the build dependencies (generated as a result of calling this the first
|
|
time) say everything is up to date.
|
|
|
|
Because reprocessing outputs for every .webidl on every invocation
|
|
is expensive, we only regenerate the minimal set of files on every
|
|
invocation. The rules for deciding what needs done are roughly as
|
|
follows:
|
|
|
|
1. If any .webidl changes, reparse all .webidl files and regenerate
|
|
the global derived files. Only regenerate output files (.h/.cpp)
|
|
impacted by the modified .webidl files.
|
|
2. If an non-.webidl dependency (Python files, config file) changes,
|
|
assume everything is out of date and regenerate the world. This
|
|
is because changes in those could globally impact every output
|
|
file.
|
|
3. If an output file is missing, ensure it is present by performing
|
|
necessary regeneration.
|
|
|
|
if `processes` is set to None, run in parallel using the
|
|
multiprocess.Pool default. If set to 1, don't use extra processes.
|
|
"""
|
|
# Despite #1 above, we assume the build system is smart enough to not
|
|
# invoke us if nothing has changed. Therefore, any invocation means
|
|
# something has changed. And, if anything has changed, we need to
|
|
# parse the WebIDL.
|
|
self._parse_webidl()
|
|
|
|
result = BuildResult()
|
|
|
|
# If we parse, we always update globals - they are cheap and it is
|
|
# easier that way.
|
|
created, updated, unchanged = self._write_global_derived()
|
|
result.created |= created
|
|
result.updated |= updated
|
|
result.unchanged |= unchanged
|
|
|
|
# If any of the extra dependencies changed, regenerate the world.
|
|
global_changed, global_hashes = self._global_dependencies_changed()
|
|
if global_changed:
|
|
# Make a copy because we may modify.
|
|
changed_inputs = set(self._input_paths)
|
|
else:
|
|
changed_inputs = self._compute_changed_inputs()
|
|
|
|
self._state["global_depends"] = global_hashes
|
|
self._state["dictionaries_convertible_to_js"] = set(
|
|
d.identifier.name for d in self._config.getDictionariesConvertibleToJS()
|
|
)
|
|
self._state["dictionaries_convertible_from_js"] = set(
|
|
d.identifier.name for d in self._config.getDictionariesConvertibleFromJS()
|
|
)
|
|
|
|
# Distribute the generation load across several processes. This requires
|
|
# a) that `self' is serializable and b) that `self' is unchanged by
|
|
# _generate_build_files_for_webidl(...)
|
|
ordered_changed_inputs = sorted(changed_inputs)
|
|
pool = WebIDLPool(self, processes=processes)
|
|
generation_results = pool.run(ordered_changed_inputs)
|
|
|
|
# Generate bindings from .webidl files.
|
|
for filename, generation_result in zip(
|
|
ordered_changed_inputs, generation_results
|
|
):
|
|
basename = mozpath.basename(filename)
|
|
result.inputs.add(filename)
|
|
written, deps = generation_result
|
|
result.created |= written[0]
|
|
result.updated |= written[1]
|
|
result.unchanged |= written[2]
|
|
|
|
self._state["webidls"][basename] = dict(
|
|
filename=filename,
|
|
outputs=written[0] | written[1] | written[2],
|
|
inputs=set(deps),
|
|
sha1=self._input_hashes[filename],
|
|
)
|
|
|
|
# Process some special interfaces required for testing.
|
|
for interface in self._example_interfaces:
|
|
written = self.generate_example_files(interface)
|
|
result.created |= written[0]
|
|
result.updated |= written[1]
|
|
result.unchanged |= written[2]
|
|
|
|
# Generate a make dependency file.
|
|
if self._make_deps_path:
|
|
mk = Makefile()
|
|
codegen_rule = mk.create_rule([self._make_deps_target])
|
|
codegen_rule.add_dependencies(global_hashes.keys())
|
|
codegen_rule.add_dependencies(self._input_paths)
|
|
|
|
with FileAvoidWrite(self._make_deps_path) as fh:
|
|
mk.dump(fh)
|
|
|
|
self._save_state()
|
|
|
|
return result
|
|
|
|
def generate_example_files(self, interface):
|
|
"""Generates example files for a given interface."""
|
|
from Codegen import CGExampleRoot
|
|
|
|
root = CGExampleRoot(self.config, interface)
|
|
|
|
example_paths = self._example_paths(interface)
|
|
for path in example_paths:
|
|
self.log(
|
|
logging.INFO,
|
|
"webidl_generate_example_files",
|
|
{"filename": path},
|
|
"Generating WebIDL example files derived from {filename}",
|
|
)
|
|
|
|
return self._maybe_write_codegen(root, *example_paths)
|
|
|
|
def _parse_webidl(self):
|
|
import WebIDL
|
|
from Configuration import Configuration
|
|
|
|
self.log(
|
|
logging.INFO,
|
|
"webidl_parse",
|
|
{"count": len(self._input_paths)},
|
|
"Parsing {count} WebIDL files.",
|
|
)
|
|
|
|
hashes = {}
|
|
parser = WebIDL.Parser(self._cache_dir, lexer=None)
|
|
|
|
for path in sorted(self._input_paths):
|
|
with io.open(path, "r", encoding="utf-8") as fh:
|
|
data = fh.read()
|
|
hashes[path] = hashlib.sha1(data.encode()).hexdigest()
|
|
parser.parse(data, path)
|
|
|
|
# Only these directories may contain WebIDL files with interfaces
|
|
# which are exposed to the web. WebIDL files in these roots may not
|
|
# be changed without DOM peer review.
|
|
#
|
|
# Other directories may contain WebIDL files as long as they only
|
|
# contain ChromeOnly interfaces. These are not subject to mandatory
|
|
# DOM peer review.
|
|
web_roots = (
|
|
# The main WebIDL root.
|
|
self._webidl_root,
|
|
# The binding config root, which contains some test-only
|
|
# interfaces.
|
|
os.path.dirname(self._config_path),
|
|
# The objdir sub-directory which contains generated WebIDL files.
|
|
self._codegen_dir,
|
|
)
|
|
|
|
self._parser_results = parser.finish()
|
|
self._config = Configuration(
|
|
self._config_path,
|
|
web_roots,
|
|
self._parser_results,
|
|
self._generated_events_stems_as_array,
|
|
)
|
|
self._input_hashes = hashes
|
|
|
|
def _write_global_derived(self):
|
|
from Codegen import GlobalGenRoots
|
|
|
|
things = [("declare", f) for f in self.GLOBAL_DECLARE_FILES]
|
|
things.extend(("define", f) for f in self.GLOBAL_DEFINE_FILES)
|
|
|
|
result = (set(), set(), set())
|
|
|
|
for what, filename in things:
|
|
stem = mozpath.splitext(filename)[0]
|
|
root = getattr(GlobalGenRoots, stem)(self._config)
|
|
|
|
if what == "declare":
|
|
code = root.declare()
|
|
output_root = self._exported_header_dir
|
|
elif what == "define":
|
|
code = root.define()
|
|
output_root = self._codegen_dir
|
|
else:
|
|
raise Exception("Unknown global gen type: %s" % what)
|
|
|
|
output_path = mozpath.join(output_root, filename)
|
|
self._maybe_write_file(output_path, code, result)
|
|
|
|
return result
|
|
|
|
def _compute_changed_inputs(self):
|
|
"""Compute the set of input files that need to be regenerated."""
|
|
changed_inputs = set()
|
|
expected_outputs = self.expected_build_output_files()
|
|
|
|
# Look for missing output files.
|
|
if any(not os.path.exists(f) for f in expected_outputs):
|
|
# FUTURE Bug 940469 Only regenerate minimum set.
|
|
changed_inputs |= self._input_paths
|
|
|
|
# That's it for examining output files. We /could/ examine SHA-1's of
|
|
# output files from a previous run to detect modifications. But that's
|
|
# a lot of extra work and most build systems don't do that anyway.
|
|
|
|
# Now we move on to the input files.
|
|
old_hashes = {v["filename"]: v["sha1"] for v in self._state["webidls"].values()}
|
|
|
|
old_filenames = set(old_hashes.keys())
|
|
new_filenames = self._input_paths
|
|
|
|
# If an old file has disappeared or a new file has arrived, mark
|
|
# it.
|
|
changed_inputs |= old_filenames ^ new_filenames
|
|
|
|
# For the files in common between runs, compare content. If the file
|
|
# has changed, mark it. We don't need to perform mtime comparisons
|
|
# because content is a stronger validator.
|
|
for filename in old_filenames & new_filenames:
|
|
if old_hashes[filename] != self._input_hashes[filename]:
|
|
changed_inputs.add(filename)
|
|
|
|
# We've now populated the base set of inputs that have changed.
|
|
|
|
# Inherit dependencies from previous run. The full set of dependencies
|
|
# is associated with each record, so we don't need to perform any fancy
|
|
# graph traversal.
|
|
for v in self._state["webidls"].values():
|
|
if any(dep for dep in v["inputs"] if dep in changed_inputs):
|
|
changed_inputs.add(v["filename"])
|
|
|
|
# Now check for changes to the set of dictionaries that are convertible to JS
|
|
oldDictionariesConvertibleToJS = self._state["dictionaries_convertible_to_js"]
|
|
newDictionariesConvertibleToJS = self._config.getDictionariesConvertibleToJS()
|
|
newNames = set(d.identifier.name for d in newDictionariesConvertibleToJS)
|
|
changedDictionaryNames = oldDictionariesConvertibleToJS ^ newNames
|
|
|
|
# Now check for changes to the set of dictionaries that are convertible from JS
|
|
oldDictionariesConvertibleFromJS = self._state[
|
|
"dictionaries_convertible_from_js"
|
|
]
|
|
newDictionariesConvertibleFromJS = (
|
|
self._config.getDictionariesConvertibleFromJS()
|
|
)
|
|
newNames = set(d.identifier.name for d in newDictionariesConvertibleFromJS)
|
|
changedDictionaryNames |= oldDictionariesConvertibleFromJS ^ newNames
|
|
|
|
for name in changedDictionaryNames:
|
|
d = self._config.getDictionaryIfExists(name)
|
|
if d:
|
|
changed_inputs.add(d.filename)
|
|
|
|
# Only use paths that are known to our current state.
|
|
# This filters out files that were deleted or changed type (e.g. from
|
|
# static to preprocessed).
|
|
return changed_inputs & self._input_paths
|
|
|
|
def _binding_info(self, p):
|
|
"""Compute binding metadata for an input path.
|
|
|
|
Returns a tuple of:
|
|
|
|
(stem, binding_stem, is_event, output_files)
|
|
|
|
output_files is itself a tuple. The first two items are the binding
|
|
header and C++ paths, respectively. The 2nd pair are the event header
|
|
and C++ paths or None if this isn't an event binding.
|
|
"""
|
|
basename = mozpath.basename(p)
|
|
stem = mozpath.splitext(basename)[0]
|
|
binding_stem = "%sBinding" % stem
|
|
|
|
if stem in self._exported_stems:
|
|
header_dir = self._exported_header_dir
|
|
else:
|
|
header_dir = self._codegen_dir
|
|
|
|
is_event = stem in self._generated_events_stems
|
|
|
|
files = (
|
|
mozpath.join(header_dir, "%s.h" % binding_stem),
|
|
mozpath.join(self._codegen_dir, "%s.cpp" % binding_stem),
|
|
mozpath.join(header_dir, "%s.h" % stem) if is_event else None,
|
|
mozpath.join(self._codegen_dir, "%s.cpp" % stem) if is_event else None,
|
|
)
|
|
|
|
return stem, binding_stem, is_event, header_dir, files
|
|
|
|
def _example_paths(self, interface):
|
|
return (
|
|
mozpath.join(self._codegen_dir, "%s-example.h" % interface),
|
|
mozpath.join(self._codegen_dir, "%s-example.cpp" % interface),
|
|
)
|
|
|
|
def expected_build_output_files(self):
|
|
"""Obtain the set of files generate_build_files() should write."""
|
|
paths = set()
|
|
|
|
# Account for global generation.
|
|
for p in self.GLOBAL_DECLARE_FILES:
|
|
paths.add(mozpath.join(self._exported_header_dir, p))
|
|
for p in self.GLOBAL_DEFINE_FILES:
|
|
paths.add(mozpath.join(self._codegen_dir, p))
|
|
|
|
for p in self._input_paths:
|
|
stem, binding_stem, is_event, header_dir, files = self._binding_info(p)
|
|
paths |= {f for f in files if f}
|
|
|
|
for interface in self._example_interfaces:
|
|
for p in self._example_paths(interface):
|
|
paths.add(p)
|
|
|
|
return paths
|
|
|
|
# Parallelization of the generation step relies on this method not changing
|
|
# the internal state of the object
|
|
def _generate_build_files_for_webidl(self, filename):
|
|
from Codegen import CGBindingRoot, CGEventRoot
|
|
|
|
self.log(
|
|
logging.INFO,
|
|
"webidl_generate_build_for_input",
|
|
{"filename": filename},
|
|
"Generating WebIDL files derived from {filename}",
|
|
)
|
|
|
|
stem, binding_stem, is_event, header_dir, files = self._binding_info(filename)
|
|
root = CGBindingRoot(self._config, binding_stem, filename)
|
|
|
|
result = self._maybe_write_codegen(root, files[0], files[1])
|
|
|
|
if is_event:
|
|
generated_event = CGEventRoot(self._config, stem)
|
|
result = self._maybe_write_codegen(
|
|
generated_event, files[2], files[3], result
|
|
)
|
|
|
|
return result, root.deps()
|
|
|
|
def _global_dependencies_changed(self):
|
|
"""Determine whether the global dependencies have changed."""
|
|
current_files = set(iter_modules_in_path(mozpath.dirname(__file__)))
|
|
|
|
# We need to catch other .py files from /dom/bindings. We assume these
|
|
# are in the same directory as the config file.
|
|
current_files |= set(iter_modules_in_path(mozpath.dirname(self._config_path)))
|
|
|
|
current_files.add(self._config_path)
|
|
|
|
current_hashes = {}
|
|
for f in current_files:
|
|
# This will fail if the file doesn't exist. If a current global
|
|
# dependency doesn't exist, something else is wrong.
|
|
with io.open(f, "rb") as fh:
|
|
current_hashes[f] = hashlib.sha1(fh.read()).hexdigest()
|
|
|
|
# The set of files has changed.
|
|
if current_files ^ set(self._state["global_depends"].keys()):
|
|
return True, current_hashes
|
|
|
|
# Compare hashes.
|
|
for f, sha1 in current_hashes.items():
|
|
if sha1 != self._state["global_depends"][f]:
|
|
return True, current_hashes
|
|
|
|
return False, current_hashes
|
|
|
|
def _save_state(self):
|
|
with io.open(self._state_path, "w", newline="\n") as fh:
|
|
self._state.dump(fh)
|
|
|
|
def _maybe_write_codegen(self, obj, declare_path, define_path, result=None):
|
|
assert declare_path and define_path
|
|
if not result:
|
|
result = (set(), set(), set())
|
|
|
|
self._maybe_write_file(declare_path, obj.declare(), result)
|
|
self._maybe_write_file(define_path, obj.define(), result)
|
|
|
|
return result
|
|
|
|
def _maybe_write_file(self, path, content, result):
|
|
fh = FileAvoidWrite(path)
|
|
fh.write(content)
|
|
existed, updated = fh.close()
|
|
|
|
if not existed:
|
|
result[0].add(path)
|
|
elif updated:
|
|
result[1].add(path)
|
|
else:
|
|
result[2].add(path)
|
|
|
|
|
|
def create_build_system_manager(topsrcdir=None, topobjdir=None, dist_dir=None):
|
|
"""Create a WebIDLCodegenManager for use by the build system."""
|
|
if topsrcdir is None:
|
|
assert topobjdir is None and dist_dir is None
|
|
import buildconfig
|
|
|
|
topsrcdir = buildconfig.topsrcdir
|
|
topobjdir = buildconfig.topobjdir
|
|
dist_dir = buildconfig.substs["DIST"]
|
|
|
|
src_dir = os.path.join(topsrcdir, "dom", "bindings")
|
|
obj_dir = os.path.join(topobjdir, "dom", "bindings")
|
|
webidl_root = os.path.join(topsrcdir, "dom", "webidl")
|
|
|
|
with io.open(os.path.join(obj_dir, "file-lists.json"), "r") as fh:
|
|
files = json.load(fh)
|
|
|
|
inputs = (
|
|
files["webidls"],
|
|
files["exported_stems"],
|
|
files["generated_events_stems"],
|
|
files["example_interfaces"],
|
|
)
|
|
|
|
cache_dir = os.path.join(obj_dir, "_cache")
|
|
try:
|
|
os.makedirs(cache_dir)
|
|
except OSError as e:
|
|
if e.errno != errno.EEXIST:
|
|
raise
|
|
|
|
return WebIDLCodegenManager(
|
|
os.path.join(src_dir, "Bindings.conf"),
|
|
webidl_root,
|
|
inputs,
|
|
os.path.join(dist_dir, "include", "mozilla", "dom"),
|
|
obj_dir,
|
|
os.path.join(obj_dir, "codegen.json"),
|
|
cache_dir=cache_dir,
|
|
# The make rules include a codegen.pp file containing dependencies.
|
|
make_deps_path=os.path.join(obj_dir, "codegen.pp"),
|
|
make_deps_target="webidl.stub",
|
|
)
|