mirror of
https://github.com/mozilla/gecko-dev.git
synced 2025-01-19 09:30:44 +00:00
Bug 928195 - Part 4: Rewrite WebIDL build system integration; r=bz, r=glandium
WebIDL build system integration has been rewritten from the ground up. Changes: * GlobalGen.py, BindingGen.py, and ExampleGen.py have been removed in favor of mozwebidl.py. * Static .webidl files are now processed directly in their original location and aren't copied to the object directory. * Generated events <stem>.cpp files are now compiled into the unified sources. Previously, only the <stem>Binding.cpp files were compiled into unified sources. * Exported .h files are now generated directly into their final location. Previously, they were generated into the local directory then installed in their final location. * The list of globalgen-generated files now lives in Python and isn't duplicated in 3 places. * The make dependencies are much simpler as a result of using a single command to perform all code generation. The auto-generated .pp file from code generation sets up all dependencies necessary to reinvoke code generation and Python takes care of dependency management. --HG-- extra : rebase_source : e4918878274b22a412329c7cb18cc7138daf5dc6
This commit is contained in:
parent
0dd4e7e164
commit
b47eeb6ab3
7
CLOBBER
7
CLOBBER
@ -18,4 +18,9 @@
|
||||
# Modifying this file will now automatically clobber the buildbot machines \o/
|
||||
#
|
||||
|
||||
Bug 887836 - webidl changes require a Windows clobber.
|
||||
# Are you updating CLOBBER because you think it's needed for your WebIDL
|
||||
# changes to stick? As of bug 928195, this shouldn't be necessary! Please
|
||||
# don't change CLOBBER for WebIDL changes any more.
|
||||
Bug 928195 rewrote WebIDL build system integration from the ground up. This
|
||||
will hopefully be the last required clobber due to WebIDLs poorly interacting
|
||||
with the build system.
|
||||
|
@ -1,98 +0,0 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import os
|
||||
import cPickle
|
||||
from Configuration import Configuration
|
||||
from Codegen import CGBindingRoot, replaceFileIfChanged, CGEventRoot
|
||||
from mozbuild.makeutil import Makefile
|
||||
from mozbuild.pythonutil import iter_modules_in_path
|
||||
from buildconfig import topsrcdir
|
||||
|
||||
|
||||
def generate_binding_files(config, outputprefix, srcprefix, webidlfile,
|
||||
generatedEventsWebIDLFiles):
|
||||
"""
|
||||
|config| Is the configuration object.
|
||||
|outputprefix| is a prefix to use for the header guards and filename.
|
||||
"""
|
||||
|
||||
depsname = ".deps/" + outputprefix + ".pp"
|
||||
root = CGBindingRoot(config, outputprefix, webidlfile)
|
||||
replaceFileIfChanged(outputprefix + ".h", root.declare())
|
||||
replaceFileIfChanged(outputprefix + ".cpp", root.define())
|
||||
|
||||
if webidlfile in generatedEventsWebIDLFiles:
|
||||
eventName = webidlfile[:-len(".webidl")]
|
||||
generatedEvent = CGEventRoot(config, eventName)
|
||||
replaceFileIfChanged(eventName + ".h", generatedEvent.declare())
|
||||
replaceFileIfChanged(eventName + ".cpp", generatedEvent.define())
|
||||
|
||||
mk = Makefile()
|
||||
# NOTE: it's VERY important that we output dependencies for the FooBinding
|
||||
# file here, not for the header or generated cpp file. These dependencies
|
||||
# are used later to properly determine changedDeps and prevent rebuilding
|
||||
# too much. See the comment explaining $(binding_dependency_trackers) in
|
||||
# Makefile.in.
|
||||
rule = mk.create_rule([outputprefix])
|
||||
rule.add_dependencies(os.path.join(srcprefix, x) for x in sorted(root.deps()))
|
||||
rule.add_dependencies(iter_modules_in_path(topsrcdir))
|
||||
with open(depsname, 'w') as f:
|
||||
mk.dump(f)
|
||||
|
||||
def main():
|
||||
# Parse arguments.
|
||||
from optparse import OptionParser
|
||||
usagestring = "usage: %prog [header|cpp] configFile outputPrefix srcPrefix webIDLFile"
|
||||
o = OptionParser(usage=usagestring)
|
||||
o.add_option("--verbose-errors", action='store_true', default=False,
|
||||
help="When an error happens, display the Python traceback.")
|
||||
(options, args) = o.parse_args()
|
||||
|
||||
configFile = os.path.normpath(args[0])
|
||||
srcPrefix = os.path.normpath(args[1])
|
||||
|
||||
# Load the configuration
|
||||
f = open('ParserResults.pkl', 'rb')
|
||||
config = cPickle.load(f)
|
||||
f.close()
|
||||
|
||||
def readFile(f):
|
||||
file = open(f, 'rb')
|
||||
try:
|
||||
contents = file.read()
|
||||
finally:
|
||||
file.close()
|
||||
return contents
|
||||
allWebIDLFiles = readFile(args[2]).split()
|
||||
generatedEventsWebIDLFiles = readFile(args[3]).split()
|
||||
changedDeps = readFile(args[4]).split()
|
||||
|
||||
if all(f.endswith("Binding") or f == "ParserResults.pkl" for f in changedDeps):
|
||||
toRegenerate = filter(lambda f: f.endswith("Binding"), changedDeps)
|
||||
if len(toRegenerate) == 0 and len(changedDeps) == 1:
|
||||
# Work around build system bug 874923: if we get here that means
|
||||
# that changedDeps contained only one entry and it was
|
||||
# "ParserResults.pkl". That should never happen: if the
|
||||
# ParserResults.pkl changes then either one of the globalgen files
|
||||
# changed (in which case we wouldn't be in this "only
|
||||
# ParserResults.pkl and *Binding changed" code) or some .webidl
|
||||
# files changed (and then the corresponding *Binding files should
|
||||
# show up in changedDeps). Since clearly the build system is
|
||||
# confused, just regenerate everything to be safe.
|
||||
toRegenerate = allWebIDLFiles
|
||||
else:
|
||||
toRegenerate = map(lambda f: f[:-len("Binding")] + ".webidl",
|
||||
toRegenerate)
|
||||
else:
|
||||
toRegenerate = allWebIDLFiles
|
||||
|
||||
for webIDLFile in toRegenerate:
|
||||
assert webIDLFile.endswith(".webidl")
|
||||
outputPrefix = webIDLFile[:-len(".webidl")] + "Binding"
|
||||
generate_binding_files(config, outputPrefix, srcPrefix, webIDLFile,
|
||||
generatedEventsWebIDLFiles);
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -29,27 +29,6 @@ INSTANCE_RESERVED_SLOTS = 3
|
||||
def memberReservedSlot(member):
|
||||
return "(DOM_INSTANCE_RESERVED_SLOTS + %d)" % member.slotIndex
|
||||
|
||||
def replaceFileIfChanged(filename, newContents):
|
||||
"""
|
||||
Read a copy of the old file, so that we don't touch it if it hasn't changed.
|
||||
Returns True if the file was updated, false otherwise.
|
||||
"""
|
||||
oldFileContents = ""
|
||||
try:
|
||||
oldFile = open(filename, 'rb')
|
||||
oldFileContents = ''.join(oldFile.readlines())
|
||||
oldFile.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
if newContents == oldFileContents:
|
||||
return False
|
||||
|
||||
f = open(filename, 'wb')
|
||||
f.write(newContents)
|
||||
f.close()
|
||||
return True
|
||||
|
||||
def toStringBool(arg):
|
||||
return str(not not arg).lower()
|
||||
|
||||
|
@ -1,46 +0,0 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import os
|
||||
import cPickle
|
||||
from Configuration import Configuration
|
||||
from Codegen import CGExampleRoot, replaceFileIfChanged
|
||||
|
||||
def generate_interface_example(config, interfaceName):
|
||||
"""
|
||||
|config| Is the configuration object.
|
||||
|interfaceName| is the name of the interface we're generating an example for.
|
||||
"""
|
||||
|
||||
root = CGExampleRoot(config, interfaceName)
|
||||
exampleHeader = interfaceName + "-example.h"
|
||||
exampleImpl = interfaceName + "-example.cpp"
|
||||
replaceFileIfChanged(exampleHeader, root.declare())
|
||||
replaceFileIfChanged(exampleImpl, root.define())
|
||||
|
||||
def main():
|
||||
|
||||
# Parse arguments.
|
||||
from optparse import OptionParser
|
||||
usagestring = "usage: %prog configFile interfaceName"
|
||||
o = OptionParser(usage=usagestring)
|
||||
o.add_option("--verbose-errors", action='store_true', default=False,
|
||||
help="When an error happens, display the Python traceback.")
|
||||
(options, args) = o.parse_args()
|
||||
|
||||
if len(args) != 2:
|
||||
o.error(usagestring)
|
||||
configFile = os.path.normpath(args[0])
|
||||
interfaceName = args[1]
|
||||
|
||||
# Load the configuration
|
||||
f = open('ParserResults.pkl', 'rb')
|
||||
config = cPickle.load(f)
|
||||
f.close()
|
||||
|
||||
# Generate the example class.
|
||||
generate_interface_example(config, interfaceName)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -1,81 +0,0 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
# We do one global pass over all the WebIDL to generate our prototype enum
|
||||
# and generate information for subsequent phases.
|
||||
|
||||
import os
|
||||
import WebIDL
|
||||
import cPickle
|
||||
from Configuration import Configuration
|
||||
from Codegen import GlobalGenRoots, replaceFileIfChanged
|
||||
|
||||
def generate_file(config, name, action):
|
||||
|
||||
root = getattr(GlobalGenRoots, name)(config)
|
||||
if action is 'declare':
|
||||
filename = name + '.h'
|
||||
code = root.declare()
|
||||
else:
|
||||
assert action is 'define'
|
||||
filename = name + '.cpp'
|
||||
code = root.define()
|
||||
|
||||
if replaceFileIfChanged(filename, code):
|
||||
print "Generating %s" % (filename)
|
||||
else:
|
||||
print "%s hasn't changed - not touching it" % (filename)
|
||||
|
||||
def main():
|
||||
# Parse arguments.
|
||||
from optparse import OptionParser
|
||||
usageString = "usage: %prog [options] webidldir [files]"
|
||||
o = OptionParser(usage=usageString)
|
||||
o.add_option("--cachedir", dest='cachedir', default=None,
|
||||
help="Directory in which to cache lex/parse tables.")
|
||||
o.add_option("--verbose-errors", action='store_true', default=False,
|
||||
help="When an error happens, display the Python traceback.")
|
||||
(options, args) = o.parse_args()
|
||||
|
||||
if len(args) < 2:
|
||||
o.error(usageString)
|
||||
|
||||
configFile = args[0]
|
||||
baseDir = args[1]
|
||||
fileList = args[2:]
|
||||
|
||||
# Parse the WebIDL.
|
||||
parser = WebIDL.Parser(options.cachedir)
|
||||
for filename in fileList:
|
||||
fullPath = os.path.normpath(os.path.join(baseDir, filename))
|
||||
f = open(fullPath, 'rb')
|
||||
lines = f.readlines()
|
||||
f.close()
|
||||
parser.parse(''.join(lines), fullPath)
|
||||
parserResults = parser.finish()
|
||||
|
||||
# Load the configuration.
|
||||
config = Configuration(configFile, parserResults)
|
||||
|
||||
# Write the configuration out to a pickle.
|
||||
resultsFile = open('ParserResults.pkl', 'wb')
|
||||
cPickle.dump(config, resultsFile, -1)
|
||||
resultsFile.close()
|
||||
|
||||
# Generate the atom list.
|
||||
generate_file(config, 'GeneratedAtomList', 'declare')
|
||||
|
||||
# Generate the prototype list.
|
||||
generate_file(config, 'PrototypeList', 'declare')
|
||||
|
||||
# Generate the common code.
|
||||
generate_file(config, 'RegisterBindings', 'declare')
|
||||
generate_file(config, 'RegisterBindings', 'define')
|
||||
|
||||
generate_file(config, 'UnionTypes', 'declare')
|
||||
generate_file(config, 'UnionTypes', 'define')
|
||||
generate_file(config, 'UnionConversions', 'declare')
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -1,245 +1,85 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
abs_dist := $(abspath $(DIST))
|
||||
webidl_base := $(topsrcdir)/dom/webidl
|
||||
|
||||
webidl_base = $(topsrcdir)/dom/webidl
|
||||
# Generated by moz.build
|
||||
include webidlsrcs.mk
|
||||
|
||||
binding_include_path := mozilla/dom
|
||||
webidl_files += $(generated_events_webidl_files)
|
||||
all_webidl_files = $(webidl_files) $(generated_webidl_files) $(preprocessed_webidl_files)
|
||||
|
||||
# Set exported_binding_headers before adding the test IDL to the mix
|
||||
exported_binding_headers := $(subst .webidl,Binding.h,$(all_webidl_files))
|
||||
exported_generated_events_headers := $(subst .webidl,.h,$(generated_events_webidl_files))
|
||||
|
||||
# Set linked_binding_cpp_files before adding the test IDL to the mix
|
||||
linked_binding_cpp_files := $(subst .webidl,Binding.cpp,$(all_webidl_files))
|
||||
linked_generated_events_cpp_files := $(subst .webidl,.cpp,$(generated_events_webidl_files))
|
||||
|
||||
all_webidl_files += $(test_webidl_files) $(preprocessed_test_webidl_files)
|
||||
|
||||
generated_header_files := $(subst .webidl,Binding.h,$(all_webidl_files)) $(exported_generated_events_headers)
|
||||
generated_cpp_files := $(subst .webidl,Binding.cpp,$(all_webidl_files)) $(linked_generated_events_cpp_files)
|
||||
|
||||
# We want to be able to only regenerate the .cpp and .h files that really need
|
||||
# to change when a .webidl file changes. We do this by making the
|
||||
# binding_dependency_trackers targets have dependencies on the right .webidl
|
||||
# files via generated .pp files, having a .BindingGen target that depends on the
|
||||
# binding_dependency_trackers and which has all the generated binding .h/.cpp
|
||||
# depending on it, and then in the make commands for that target being able to
|
||||
# check which exact binding_dependency_trackers changed.
|
||||
binding_dependency_trackers := $(subst .webidl,Binding,$(all_webidl_files))
|
||||
|
||||
globalgen_targets := \
|
||||
GeneratedAtomList.h \
|
||||
PrototypeList.h \
|
||||
RegisterBindings.h \
|
||||
RegisterBindings.cpp \
|
||||
UnionTypes.h \
|
||||
UnionTypes.cpp \
|
||||
UnionConversions.h \
|
||||
$(NULL)
|
||||
|
||||
# Nasty hack: when the test/Makefile.in invokes us to do codegen, it
|
||||
# uses a target of
|
||||
# "export TestExampleInterface-example TestExampleProxyInterface-example".
|
||||
# We don't actually need to load our .o.pp files in that case, so just
|
||||
# pretend like we have no CPPSRCS if that's the target. It makes the
|
||||
# test cycle much faster, which is why we're doing it.
|
||||
#
|
||||
# XXXbz We could try to cheat even more and only include our CPPSRCS
|
||||
# when $(MAKECMDGOALS) contains libs, so that we can skip loading all
|
||||
# those .o.pp when trying to make a single .cpp file too, but that
|
||||
# would break |make FooBinding.o(bj)|. Ah, well.
|
||||
ifneq (export TestExampleInterface-example TestExampleProxyInterface-example,$(MAKECMDGOALS))
|
||||
CPPSRCS = \
|
||||
$(unified_binding_cpp_files) \
|
||||
$(linked_generated_events_cpp_files) \
|
||||
$(filter %.cpp, $(globalgen_targets)) \
|
||||
$(NULL)
|
||||
ifdef GNU_CC
|
||||
OS_CXXFLAGS += -Wno-uninitialized
|
||||
endif
|
||||
|
||||
ABS_DIST := $(abspath $(DIST))
|
||||
# These come from webidlsrcs.mk.
|
||||
# TODO Write directly into backend.mk.
|
||||
CPPSRCS += $(globalgen_sources) $(unified_binding_cpp_files)
|
||||
|
||||
EXTRA_EXPORT_MDDEPEND_FILES := $(addsuffix .pp,$(binding_dependency_trackers))
|
||||
|
||||
EXPORTS_GENERATED_FILES := $(exported_binding_headers) $(exported_generated_events_headers)
|
||||
EXPORTS_GENERATED_DEST := $(ABS_DIST)/include/$(binding_include_path)
|
||||
EXPORTS_GENERATED_TARGET := export
|
||||
INSTALL_TARGETS += EXPORTS_GENERATED
|
||||
|
||||
# Install auto-generated GlobalGen files. The rules for the install must
|
||||
# be in the same target/subtier as GlobalGen.py, otherwise the files will not
|
||||
# get installed into the appropriate location as they are generated.
|
||||
globalgen_headers_FILES := \
|
||||
GeneratedAtomList.h \
|
||||
PrototypeList.h \
|
||||
RegisterBindings.h \
|
||||
UnionConversions.h \
|
||||
UnionTypes.h \
|
||||
$(NULL)
|
||||
globalgen_headers_DEST = $(ABS_DIST)/include/mozilla/dom
|
||||
globalgen_headers_TARGET := export
|
||||
INSTALL_TARGETS += globalgen_headers
|
||||
# Generated bindings reference *Binding.h, not mozilla/dom/*Binding.h. And,
|
||||
# since we generate exported bindings directly to $(DIST)/include, we need
|
||||
# to add that path to the search list.
|
||||
#
|
||||
# Ideally, binding generation uses the prefixed header file names.
|
||||
# Bug 932092 tracks.
|
||||
LOCAL_INCLUDES += -I$(DIST)/include/mozilla/dom
|
||||
|
||||
PYTHON_UNIT_TESTS += $(srcdir)/mozwebidlcodegen/test/test_mozwebidlcodegen.py
|
||||
|
||||
include $(topsrcdir)/config/rules.mk
|
||||
|
||||
ifdef GNU_CC
|
||||
CXXFLAGS += -Wno-uninitialized
|
||||
endif
|
||||
|
||||
# If you change bindinggen_dependencies here, change it in
|
||||
# dom/bindings/test/Makefile.in too.
|
||||
bindinggen_dependencies := \
|
||||
BindingGen.py \
|
||||
Bindings.conf \
|
||||
Configuration.py \
|
||||
Codegen.py \
|
||||
ParserResults.pkl \
|
||||
parser/WebIDL.py \
|
||||
# TODO This list should be emitted to a .pp file via
|
||||
# GenerateCSS2PropertiesWebIDL.py.
|
||||
css2properties_dependencies = \
|
||||
$(topsrcdir)/layout/style/nsCSSPropList.h \
|
||||
$(topsrcdir)/layout/style/nsCSSPropAliasList.h \
|
||||
$(webidl_base)/CSS2Properties.webidl.in \
|
||||
$(webidl_base)/CSS2PropertiesProps.h \
|
||||
$(srcdir)/GenerateCSS2PropertiesWebIDL.py \
|
||||
$(GLOBAL_DEPS) \
|
||||
$(NULL)
|
||||
|
||||
CSS2Properties.webidl: $(topsrcdir)/layout/style/nsCSSPropList.h \
|
||||
$(topsrcdir)/layout/style/nsCSSPropAliasList.h \
|
||||
$(webidl_base)/CSS2Properties.webidl.in \
|
||||
$(webidl_base)/CSS2PropertiesProps.h \
|
||||
$(srcdir)/GenerateCSS2PropertiesWebIDL.py \
|
||||
$(GLOBAL_DEPS)
|
||||
$(CPP) $(DEFINES) $(ACDEFINES) -I$(topsrcdir)/layout/style $(webidl_base)/CSS2PropertiesProps.h | \
|
||||
PYTHONDONTWRITEBYTECODE=1 $(PYTHON) \
|
||||
$(srcdir)/GenerateCSS2PropertiesWebIDL.py $(webidl_base)/CSS2Properties.webidl.in > CSS2Properties.webidl
|
||||
CSS2Properties.webidl: $(css2properties_dependencies)
|
||||
$(CPP) $(DEFINES) $(ACDEFINES) -I$(topsrcdir)/layout/style \
|
||||
$(webidl_base)/CSS2PropertiesProps.h | \
|
||||
PYTHONDONTWRITEBYTECODE=1 $(PYTHON) \
|
||||
$(srcdir)/GenerateCSS2PropertiesWebIDL.py \
|
||||
$(webidl_base)/CSS2Properties.webidl.in > $@
|
||||
|
||||
$(webidl_files): %: $(webidl_base)/%
|
||||
$(INSTALL) $(IFLAGS1) $(webidl_base)/$* .
|
||||
|
||||
$(test_webidl_files): %: $(srcdir)/test/%
|
||||
$(INSTALL) $(IFLAGS1) $(srcdir)/test/$* .
|
||||
|
||||
# We can't easily use PP_TARGETS here because it insists on outputting targets
|
||||
# that look like "$(CURDIR)/foo" whereas we want our target to just be "foo".
|
||||
# Make sure to include $(GLOBAL_DEPS) so we pick up changes to what symbols are
|
||||
# defined. Also make sure to remove $@ before writing to it, because otherwise
|
||||
# if a file goes from non-preprocessed to preprocessed we can end up writing to
|
||||
# a symlink, which will clobber files in the srcdir, which is bad.
|
||||
$(preprocessed_webidl_files): %: $(webidl_base)/% $(GLOBAL_DEPS)
|
||||
$(RM) $@
|
||||
$(call py_action,preprocessor, \
|
||||
$(DEFINES) $(ACDEFINES) $(XULPPFLAGS) $(webidl_base)/$* -o $@)
|
||||
|
||||
# See the comment about PP_TARGETS for $(preprocessed_webidl_files)
|
||||
$(preprocessed_test_webidl_files): %: $(srcdir)/test/% $(GLOBAL_DEPS)
|
||||
$(RM) $@
|
||||
$(call py_action,preprocessor, \
|
||||
$(DEFINES) $(ACDEFINES) $(XULPPFLAGS) $(srcdir)/test/$* -o $@)
|
||||
|
||||
# Make is dumb and can get confused between "foo" and "$(CURDIR)/foo". Make
|
||||
# sure that the latter depends on the former, since the latter gets used in .pp
|
||||
# files.
|
||||
all_webidl_files_absolute = $(addprefix $(CURDIR)/,$(all_webidl_files))
|
||||
$(all_webidl_files_absolute): $(CURDIR)/%: %
|
||||
|
||||
$(generated_header_files): .BindingGen
|
||||
|
||||
$(generated_cpp_files): .BindingGen
|
||||
|
||||
# $(binding_dependency_trackers) pick up additional dependencies via .pp files
|
||||
# The rule: just brings the tracker up to date, if it's out of date, so that
|
||||
# we'll know that we have to redo binding generation and flag this prerequisite
|
||||
# there as being newer than the bindinggen target.
|
||||
$(binding_dependency_trackers):
|
||||
@$(TOUCH) $@
|
||||
|
||||
$(globalgen_targets): ParserResults.pkl
|
||||
|
||||
%-example: .BindingGen
|
||||
PYTHONDONTWRITEBYTECODE=1 $(PYTHON) $(topsrcdir)/config/pythonpath.py \
|
||||
$(PLY_INCLUDE) -I$(srcdir)/parser \
|
||||
$(srcdir)/ExampleGen.py \
|
||||
$(srcdir)/Bindings.conf $*
|
||||
|
||||
CACHE_DIR = _cache
|
||||
|
||||
globalgen_dependencies := \
|
||||
GlobalGen.py \
|
||||
Bindings.conf \
|
||||
Configuration.py \
|
||||
Codegen.py \
|
||||
parser/WebIDL.py \
|
||||
webidlsrcs.mk \
|
||||
$(all_webidl_files) \
|
||||
$(CACHE_DIR)/.done \
|
||||
# Most of the logic for dependencies lives inside Python so it can be
|
||||
# used by multiple build backends. We simply have rules to generate
|
||||
# and include the .pp file.
|
||||
#
|
||||
# The generated .pp file contains all the important dependencies such as
|
||||
# changes to .webidl or .py files should result in code generation being
|
||||
# performed.
|
||||
codegen_dependencies := \
|
||||
$(nonstatic_webidl_files) \
|
||||
$(GLOBAL_DEPS) \
|
||||
$(NULL)
|
||||
|
||||
$(CACHE_DIR)/.done:
|
||||
$(MKDIR) -p $(CACHE_DIR)
|
||||
$(call include_deps,codegen.pp)
|
||||
|
||||
codegen.pp: $(codegen_dependencies)
|
||||
$(call py_action,webidl,$(srcdir))
|
||||
@$(TOUCH) $@
|
||||
|
||||
# Running GlobalGen.py updates ParserResults.pkl as a side-effect
|
||||
ParserResults.pkl: $(globalgen_dependencies)
|
||||
$(info Generating global WebIDL files)
|
||||
PYTHONDONTWRITEBYTECODE=1 $(PYTHON) $(topsrcdir)/config/pythonpath.py \
|
||||
$(PLY_INCLUDE) -I$(srcdir)/parser \
|
||||
$(srcdir)/GlobalGen.py $(srcdir)/Bindings.conf . \
|
||||
--cachedir=$(CACHE_DIR) \
|
||||
$(all_webidl_files)
|
||||
|
||||
$(globalgen_headers_FILES): ParserResults.pkl
|
||||
|
||||
# Make sure .deps actually exists, since we'll try to write to it from
|
||||
# BindingGen.py but we're typically running in the export phase, which is
|
||||
# before anyone has bothered creating .deps.
|
||||
# Then, pass our long lists through files to try to avoid blowing out the
|
||||
# command line.
|
||||
# Next, BindingGen.py will examine the changed dependency list to figure out
|
||||
# what it really needs to regenerate.
|
||||
# Finally, touch the .BindingGen file so that we don't have to keep redoing
|
||||
# all that until something else actually changes.
|
||||
.BindingGen: $(bindinggen_dependencies) $(binding_dependency_trackers)
|
||||
$(info Generating WebIDL bindings)
|
||||
$(MKDIR) -p .deps
|
||||
echo $(all_webidl_files) > .all-webidl-file-list
|
||||
echo $(generated_events_webidl_files) > .generated-events-webidl-files
|
||||
echo $? > .changed-dependency-list
|
||||
PYTHONDONTWRITEBYTECODE=1 $(PYTHON) $(topsrcdir)/config/pythonpath.py \
|
||||
$(PLY_INCLUDE) -I$(srcdir)/parser \
|
||||
$(srcdir)/BindingGen.py \
|
||||
$(srcdir)/Bindings.conf \
|
||||
$(CURDIR) \
|
||||
.all-webidl-file-list \
|
||||
.generated-events-webidl-files \
|
||||
.changed-dependency-list
|
||||
@$(TOUCH) $@
|
||||
.PHONY: compiletests
|
||||
compiletests:
|
||||
$(call SUBMAKE,libs,test)
|
||||
|
||||
GARBAGE += \
|
||||
webidlyacc.py \
|
||||
codegen.pp \
|
||||
codegen.json \
|
||||
parser.out \
|
||||
$(wildcard *-example.h) \
|
||||
$(wildcard *-example.cpp) \
|
||||
.BindingGen \
|
||||
.all-webidl-file-list \
|
||||
.generated-events-webidl-files \
|
||||
.changed-dependency-list \
|
||||
$(binding_dependency_trackers) \
|
||||
WebIDLGrammar.pkl \
|
||||
$(wildcard *.h) \
|
||||
$(wildcard *Binding.cpp) \
|
||||
$(wildcard *Event.cpp) \
|
||||
$(wildcard *-event.cpp) \
|
||||
$(wildcard *.webidl) \
|
||||
$(NULL)
|
||||
|
||||
# Make sure all binding header files are created during the export stage, so we
|
||||
# don't have issues with .cpp files being compiled before we've generated the
|
||||
# headers they depend on. This is really only needed for the test files, since
|
||||
# the non-test headers are all exported above anyway. Note that this means that
|
||||
# we do all of our codegen during export.
|
||||
export:: $(generated_header_files)
|
||||
|
||||
distclean::
|
||||
-$(RM) \
|
||||
$(generated_header_files) \
|
||||
$(generated_cpp_files) \
|
||||
$(all_webidl_files) \
|
||||
$(globalgen_targets) \
|
||||
ParserResults.pkl \
|
||||
$(NULL)
|
||||
DIST_GARBAGE += \
|
||||
file-lists.json \
|
||||
$(NULL)
|
||||
|
@ -4,6 +4,8 @@
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
TEST_DIRS += ['test']
|
||||
|
||||
EXPORTS.mozilla += [
|
||||
'ErrorResult.h',
|
||||
]
|
||||
|
@ -17,6 +17,7 @@ from copy import deepcopy
|
||||
|
||||
from mach.mixin.logging import LoggingMixin
|
||||
|
||||
from mozbuild.base import MozbuildObject
|
||||
from mozbuild.makeutil import Makefile
|
||||
from mozbuild.pythonutil import iter_modules_in_path
|
||||
from mozbuild.util import FileAvoidWrite
|
||||
@ -521,3 +522,44 @@ class WebIDLCodegenManager(LoggingMixin):
|
||||
result[1].add(path)
|
||||
else:
|
||||
result[2].add(path)
|
||||
|
||||
|
||||
def create_build_system_manager(topsrcdir, topobjdir, dist_dir):
|
||||
"""Create a WebIDLCodegenManager for use by the build system."""
|
||||
src_dir = os.path.join(topsrcdir, 'dom', 'bindings')
|
||||
obj_dir = os.path.join(topobjdir, 'dom', 'bindings')
|
||||
|
||||
with open(os.path.join(obj_dir, 'file-lists.json'), 'rb') as fh:
|
||||
files = json.load(fh)
|
||||
|
||||
inputs = (files['webidls'], files['exported_stems'],
|
||||
files['generated_events_stems'])
|
||||
|
||||
cache_dir = os.path.join(obj_dir, '_cache')
|
||||
try:
|
||||
os.makedirs(cache_dir)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
return WebIDLCodegenManager(
|
||||
os.path.join(src_dir, 'Bindings.conf'),
|
||||
inputs,
|
||||
os.path.join(dist_dir, 'include', 'mozilla', 'dom'),
|
||||
obj_dir,
|
||||
os.path.join(obj_dir, 'codegen.json'),
|
||||
cache_dir=cache_dir,
|
||||
# The make rules include a codegen.pp file containing dependencies.
|
||||
make_deps_path=os.path.join(obj_dir, 'codegen.pp'),
|
||||
make_deps_target='codegen.pp',
|
||||
)
|
||||
|
||||
|
||||
class BuildSystemWebIDL(MozbuildObject):
|
||||
@property
|
||||
def manager(self):
|
||||
if not hasattr(self, '_webidl_manager'):
|
||||
self._webidl_manager = create_build_system_manager(
|
||||
self.topsrcdir, self.topobjdir, self.distdir)
|
||||
|
||||
return self._webidl_manager
|
||||
|
@ -2,89 +2,23 @@
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
# Do NOT export this library. We don't actually want our test code
|
||||
# being added to libxul or anything.
|
||||
|
||||
# pymake can't handle descending into dom/bindings several times simultaneously
|
||||
ifdef .PYMAKE
|
||||
.NOTPARALLEL:
|
||||
endif
|
||||
|
||||
# Need this for $(test_webidl_files)
|
||||
include ../webidlsrcs.mk
|
||||
|
||||
# But the webidl actually lives in our parent dir
|
||||
test_webidl_files := $(addprefix ../,$(test_webidl_files))
|
||||
# Store the actual locations of our source preprocessed files, so we
|
||||
# can depend on them sanely.
|
||||
source_preprocessed_test_webidl_files := $(addprefix $(srcdir)/,$(preprocessed_test_webidl_files))
|
||||
preprocessed_test_webidl_files := $(addprefix ../,$(preprocessed_test_webidl_files))
|
||||
|
||||
CPPSRCS += \
|
||||
$(subst .webidl,Binding.cpp,$(test_webidl_files)) \
|
||||
$(subst .webidl,Binding.cpp,$(preprocessed_test_webidl_files)) \
|
||||
$(NULL)
|
||||
|
||||
# If you change bindinggen_dependencies here, change it in
|
||||
# dom/bindings/Makefile.in too. But note that we include ../Makefile
|
||||
# here manually, since $(GLOBAL_DEPS) won't cover it.
|
||||
bindinggen_dependencies := \
|
||||
../BindingGen.py \
|
||||
../Bindings.conf \
|
||||
../Configuration.py \
|
||||
../Codegen.py \
|
||||
../ParserResults.pkl \
|
||||
../parser/WebIDL.py \
|
||||
../Makefile \
|
||||
$(GLOBAL_DEPS) \
|
||||
$(NULL)
|
||||
# $(test_sources) comes from webidlsrcs.mk.
|
||||
# TODO Update this variable in backend.mk.
|
||||
CPPSRCS += $(addprefix ../,$(test_sources))
|
||||
|
||||
ifdef GNU_CC
|
||||
CXXFLAGS += -Wno-uninitialized
|
||||
OS_CXXFLAGS += -Wno-uninitialized
|
||||
endif
|
||||
|
||||
# Bug 932082 tracks having bindings use namespaced includes.
|
||||
LOCAL_INCLUDES += -I$(DIST)/include/mozilla/dom -I..
|
||||
|
||||
# Include rules.mk before any of our targets so our first target is coming from
|
||||
# rules.mk and running make with no target in this dir does the right thing.
|
||||
include $(topsrcdir)/config/rules.mk
|
||||
|
||||
$(CPPSRCS): .BindingGen
|
||||
|
||||
.BindingGen: $(bindinggen_dependencies) \
|
||||
$(test_webidl_files) \
|
||||
$(source_preprocessed_test_webidl_files) \
|
||||
$(NULL)
|
||||
# The export phase in dom/bindings is what actually looks at
|
||||
# dependencies and regenerates things as needed, so just go ahead and
|
||||
# make that phase here. Also make our example interface files. If the
|
||||
# target used here ever changes, change the conditional around
|
||||
# $(CPPSRCS) in dom/bindings/Makefile.in.
|
||||
$(MAKE) -C .. export TestExampleInterface-example TestExampleProxyInterface-example
|
||||
@$(TOUCH) $@
|
||||
|
||||
check::
|
||||
PYTHONDONTWRITEBYTECODE=1 $(PYTHON) $(topsrcdir)/config/pythonpath.py \
|
||||
$(PLY_INCLUDE) $(srcdir)/../parser/runtests.py
|
||||
|
||||
# Since we define MOCHITEST_FILES, config/makefiles/mochitest.mk goes ahead and
|
||||
# sets up a rule with libs:: in itm which makes our .DEFAULT_TARGET be "libs".
|
||||
# Then ruls.mk does |.DEFAULT_TARGET ?= default| which leaves it as "libs". So
|
||||
# if we make without an explicit target in this directory, we try to make
|
||||
# "libs", but with a $(MAKECMDGOALS) of empty string. And then rules.mk
|
||||
# helpfully does not include our *.o.pp files, since it includes them only if
|
||||
# filtering some stuff out from $(MAKECMDGOALS) leaves it nonempty. The upshot
|
||||
# is that if some headers change and we run make in this dir without an explicit
|
||||
# target things don't get rebuilt.
|
||||
#
|
||||
# On the other hand, if we set .DEFAULT_TARGET to "default" explicitly here,
|
||||
# then rules.mk will reinvoke make with "export" and "libs" but this time hey
|
||||
# will be passed as explicit targets, show up in $(MAKECMDGOALS), and things
|
||||
# will work. Do this at the end of our Makefile so the rest of the build system
|
||||
# does not get a chance to muck with it after we set it.
|
||||
.DEFAULT_GOAL := default
|
||||
|
||||
# Make sure to add .BindingGen to GARBAGE so we'll rebuild our example
|
||||
# files if someone goes through and deletes GARBAGE all over, which
|
||||
# will delete example files from our parent dir.
|
||||
GARBAGE += \
|
||||
.BindingGen \
|
||||
$(NULL)
|
||||
|
@ -14,9 +14,20 @@ MOCHITEST_MANIFESTS += ['mochitest.ini']
|
||||
|
||||
MOCHITEST_CHROME_MANIFESTS += ['chrome.ini']
|
||||
|
||||
TEST_WEBIDL_FILES += [
|
||||
'TestDictionary.webidl',
|
||||
'TestJSImplInheritanceGen.webidl',
|
||||
'TestTypedef.webidl',
|
||||
]
|
||||
|
||||
PREPROCESSED_TEST_WEBIDL_FILES += [
|
||||
'TestCodeGen.webidl',
|
||||
'TestExampleGen.webidl',
|
||||
'TestJSImplGen.webidl',
|
||||
]
|
||||
|
||||
LOCAL_INCLUDES += [
|
||||
'/dom/bindings',
|
||||
'/js/xpconnect/src',
|
||||
'/js/xpconnect/wrappers',
|
||||
]
|
||||
|
||||
|
@ -107,12 +107,9 @@ if CONFIG['MOZ_NFC']:
|
||||
if CONFIG['MOZ_B2G']:
|
||||
PARALLEL_DIRS += ['downloads']
|
||||
|
||||
# bindings/test is here, because it needs to build after bindings/, and
|
||||
# we build subdirectories before ourselves.
|
||||
TEST_DIRS += [
|
||||
'tests',
|
||||
'imptests',
|
||||
'bindings/test',
|
||||
]
|
||||
|
||||
if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('gtk2', 'cocoa', 'windows', 'android', 'qt', 'os2'):
|
||||
|
@ -553,18 +553,6 @@ if CONFIG['MOZ_B2G_FM']:
|
||||
'FMRadio.webidl',
|
||||
]
|
||||
|
||||
if CONFIG['ENABLE_TESTS']:
|
||||
TEST_WEBIDL_FILES += [
|
||||
'TestDictionary.webidl',
|
||||
'TestJSImplInheritanceGen.webidl',
|
||||
'TestTypedef.webidl',
|
||||
]
|
||||
PREPROCESSED_TEST_WEBIDL_FILES += [
|
||||
'TestCodeGen.webidl',
|
||||
'TestExampleGen.webidl',
|
||||
'TestJSImplGen.webidl',
|
||||
]
|
||||
|
||||
GENERATED_EVENTS_WEBIDL_FILES = [
|
||||
'BlobEvent.webidl',
|
||||
'CallGroupErrorEvent.webidl',
|
||||
|
17
python/mozbuild/mozbuild/action/webidl.py
Normal file
17
python/mozbuild/mozbuild/action/webidl.py
Normal file
@ -0,0 +1,17 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import sys
|
||||
|
||||
from mozwebidlcodegen import BuildSystemWebIDL
|
||||
|
||||
|
||||
def main(argv):
|
||||
"""Perform WebIDL code generation required by the build system."""
|
||||
manager = BuildSystemWebIDL.from_environment().manager
|
||||
manager.generate_build_files()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
@ -10,15 +10,19 @@ import re
|
||||
|
||||
import mozpack.path as mozpath
|
||||
|
||||
from mozbuild.preprocessor import Preprocessor
|
||||
|
||||
from .base import BuildBackend
|
||||
|
||||
from ..frontend.data import (
|
||||
ConfigFileSubstitution,
|
||||
HeaderFileSubstitution,
|
||||
GeneratedEventWebIDLFile,
|
||||
GeneratedWebIDLFile,
|
||||
PreprocessedTestWebIDLFile,
|
||||
PreprocessedWebIDLFile,
|
||||
TestManifest,
|
||||
TestWebIDLFile,
|
||||
XPIDLFile,
|
||||
WebIDLFile,
|
||||
)
|
||||
|
||||
from ..util import DefaultOnReadDict
|
||||
@ -56,6 +60,79 @@ class XPIDLManager(object):
|
||||
self.modules.setdefault(entry['module'], set()).add(entry['root'])
|
||||
|
||||
|
||||
class WebIDLCollection(object):
|
||||
"""Collects WebIDL info referenced during the build."""
|
||||
|
||||
def __init__(self):
|
||||
self.sources = set()
|
||||
self.generated_sources = set()
|
||||
self.generated_events_sources = set()
|
||||
self.preprocessed_sources = set()
|
||||
self.test_sources = set()
|
||||
self.preprocessed_test_sources = set()
|
||||
|
||||
def all_regular_sources(self):
|
||||
return self.sources | self.generated_sources | \
|
||||
self.generated_events_sources | self.preprocessed_sources
|
||||
|
||||
def all_regular_basenames(self):
|
||||
return [os.path.basename(source) for source in self.all_regular_sources()]
|
||||
|
||||
def all_regular_stems(self):
|
||||
return [os.path.splitext(b)[0] for b in self.all_regular_basenames()]
|
||||
|
||||
def all_regular_bindinggen_stems(self):
|
||||
for stem in self.all_regular_stems():
|
||||
yield '%sBinding' % stem
|
||||
|
||||
for source in self.generated_events_sources:
|
||||
yield os.path.splitext(os.path.basename(source))[0]
|
||||
|
||||
def all_regular_cpp_basenames(self):
|
||||
for stem in self.all_regular_bindinggen_stems():
|
||||
yield '%s.cpp' % stem
|
||||
|
||||
def all_test_sources(self):
|
||||
return self.test_sources | self.preprocessed_test_sources
|
||||
|
||||
def all_test_basenames(self):
|
||||
return [os.path.basename(source) for source in self.all_test_sources()]
|
||||
|
||||
def all_test_stems(self):
|
||||
return [os.path.splitext(b)[0] for b in self.all_test_basenames()]
|
||||
|
||||
def all_test_cpp_basenames(self):
|
||||
return ['%sBinding.cpp' % s for s in self.all_test_stems()]
|
||||
|
||||
def all_static_sources(self):
|
||||
return self.sources | self.generated_events_sources | \
|
||||
self.test_sources
|
||||
|
||||
def all_non_static_sources(self):
|
||||
return self.generated_sources | self.all_preprocessed_sources()
|
||||
|
||||
def all_non_static_basenames(self):
|
||||
return [os.path.basename(s) for s in self.all_non_static_sources()]
|
||||
|
||||
def all_preprocessed_sources(self):
|
||||
return self.preprocessed_sources | self.preprocessed_test_sources
|
||||
|
||||
def all_sources(self):
|
||||
return set(self.all_regular_sources()) | set(self.all_test_sources())
|
||||
|
||||
def all_basenames(self):
|
||||
return [os.path.basename(source) for source in self.all_sources()]
|
||||
|
||||
def all_stems(self):
|
||||
return [os.path.splitext(b)[0] for b in self.all_basenames()]
|
||||
|
||||
def generated_events_basenames(self):
|
||||
return [os.path.basename(s) for s in self.generated_events_sources]
|
||||
|
||||
def generated_events_stems(self):
|
||||
return [os.path.splitext(b)[0] for b in self.generated_events_basenames()]
|
||||
|
||||
|
||||
class TestManager(object):
|
||||
"""Helps hold state related to tests."""
|
||||
|
||||
@ -90,6 +167,7 @@ class CommonBackend(BuildBackend):
|
||||
def _init(self):
|
||||
self._idl_manager = XPIDLManager(self.environment)
|
||||
self._test_manager = TestManager(self.environment)
|
||||
self._webidls = WebIDLCollection()
|
||||
|
||||
def consume_object(self, obj):
|
||||
if isinstance(obj, TestManifest):
|
||||
@ -113,6 +191,30 @@ class CommonBackend(BuildBackend):
|
||||
self._create_config_header(obj)
|
||||
self.backend_input_files.add(obj.input_path)
|
||||
|
||||
# We should consider aggregating WebIDL types in emitter.py.
|
||||
elif isinstance(obj, WebIDLFile):
|
||||
self._webidls.sources.add(mozpath.join(obj.srcdir, obj.basename))
|
||||
|
||||
elif isinstance(obj, GeneratedEventWebIDLFile):
|
||||
self._webidls.generated_events_sources.add(mozpath.join(
|
||||
obj.srcdir, obj.basename))
|
||||
|
||||
elif isinstance(obj, TestWebIDLFile):
|
||||
self._webidls.test_sources.add(mozpath.join(obj.srcdir,
|
||||
obj.basename))
|
||||
|
||||
elif isinstance(obj, PreprocessedTestWebIDLFile):
|
||||
self._webidls.preprocessed_test_sources.add(mozpath.join(
|
||||
obj.srcdir, obj.basename))
|
||||
|
||||
elif isinstance(obj, GeneratedWebIDLFile):
|
||||
self._webidls.generated_sources.add(mozpath.join(obj.srcdir,
|
||||
obj.basename))
|
||||
|
||||
elif isinstance(obj, PreprocessedWebIDLFile):
|
||||
self._webidls.preprocessed_sources.add(mozpath.join(
|
||||
obj.srcdir, obj.basename))
|
||||
|
||||
else:
|
||||
return
|
||||
|
||||
@ -122,6 +224,8 @@ class CommonBackend(BuildBackend):
|
||||
if len(self._idl_manager.idls):
|
||||
self._handle_idl_manager(self._idl_manager)
|
||||
|
||||
self._handle_webidl_collection(self._webidls)
|
||||
|
||||
# Write out a machine-readable file describing every test.
|
||||
path = mozpath.join(self.environment.topobjdir, 'all-tests.json')
|
||||
with self._write_file(path) as fh:
|
||||
|
@ -5,13 +5,15 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import itertools
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import types
|
||||
|
||||
from collections import namedtuple
|
||||
|
||||
import mozwebidlcodegen
|
||||
|
||||
import mozbuild.makeutil as mozmakeutil
|
||||
from mozpack.copier import FilePurger
|
||||
from mozpack.manifests import (
|
||||
@ -25,9 +27,7 @@ from ..frontend.data import (
|
||||
Defines,
|
||||
DirectoryTraversal,
|
||||
Exports,
|
||||
GeneratedEventWebIDLFile,
|
||||
GeneratedInclude,
|
||||
GeneratedWebIDLFile,
|
||||
HostProgram,
|
||||
HostSimpleProgram,
|
||||
InstallationTarget,
|
||||
@ -35,17 +35,13 @@ from ..frontend.data import (
|
||||
JavaJarData,
|
||||
LibraryDefinition,
|
||||
LocalInclude,
|
||||
PreprocessedTestWebIDLFile,
|
||||
PreprocessedWebIDLFile,
|
||||
Program,
|
||||
SandboxDerived,
|
||||
SandboxWrapped,
|
||||
SimpleProgram,
|
||||
TestWebIDLFile,
|
||||
TestManifest,
|
||||
VariablePassthru,
|
||||
XPIDLFile,
|
||||
TestManifest,
|
||||
WebIDLFile,
|
||||
)
|
||||
from ..util import (
|
||||
ensureParentDir,
|
||||
@ -270,12 +266,6 @@ class RecursiveMakeBackend(CommonBackend):
|
||||
|
||||
self._backend_files = {}
|
||||
self._ipdl_sources = set()
|
||||
self._webidl_sources = set()
|
||||
self._generated_events_webidl_sources = set()
|
||||
self._test_webidl_sources = set()
|
||||
self._preprocessed_test_webidl_sources = set()
|
||||
self._preprocessed_webidl_sources = set()
|
||||
self._generated_webidl_sources = set()
|
||||
|
||||
def detailed(summary):
|
||||
s = '{:d} total backend files. {:d} created; {:d} updated; {:d} unchanged'.format(
|
||||
@ -410,33 +400,6 @@ class RecursiveMakeBackend(CommonBackend):
|
||||
elif isinstance(obj, IPDLFile):
|
||||
self._ipdl_sources.add(mozpath.join(obj.srcdir, obj.basename))
|
||||
|
||||
elif isinstance(obj, WebIDLFile):
|
||||
self._webidl_sources.add(mozpath.join(obj.srcdir, obj.basename))
|
||||
self._process_webidl_basename(obj.basename)
|
||||
|
||||
elif isinstance(obj, GeneratedEventWebIDLFile):
|
||||
self._generated_events_webidl_sources.add(mozpath.join(obj.srcdir, obj.basename))
|
||||
|
||||
elif isinstance(obj, TestWebIDLFile):
|
||||
self._test_webidl_sources.add(mozpath.join(obj.srcdir,
|
||||
obj.basename))
|
||||
# Test WebIDL files are not exported.
|
||||
|
||||
elif isinstance(obj, PreprocessedTestWebIDLFile):
|
||||
self._preprocessed_test_webidl_sources.add(mozpath.join(obj.srcdir,
|
||||
obj.basename))
|
||||
# Test WebIDL files are not exported.
|
||||
|
||||
elif isinstance(obj, GeneratedWebIDLFile):
|
||||
self._generated_webidl_sources.add(mozpath.join(obj.srcdir,
|
||||
obj.basename))
|
||||
self._process_webidl_basename(obj.basename)
|
||||
|
||||
elif isinstance(obj, PreprocessedWebIDLFile):
|
||||
self._preprocessed_webidl_sources.add(mozpath.join(obj.srcdir,
|
||||
obj.basename))
|
||||
self._process_webidl_basename(obj.basename)
|
||||
|
||||
elif isinstance(obj, Program):
|
||||
self._process_program(obj.program, backend_file)
|
||||
|
||||
@ -607,6 +570,9 @@ class RecursiveMakeBackend(CommonBackend):
|
||||
poison_windows_h=False,
|
||||
files_per_unified_file=16):
|
||||
|
||||
# In case it's a generator.
|
||||
files = sorted(files)
|
||||
|
||||
explanation = "\n" \
|
||||
"# We build files in 'unified' mode by including several files\n" \
|
||||
"# together into a single source file. This cuts down on\n" \
|
||||
@ -632,7 +598,7 @@ class RecursiveMakeBackend(CommonBackend):
|
||||
return itertools.izip_longest(fillvalue=dummy_fill_value, *args)
|
||||
|
||||
for i, unified_group in enumerate(grouper(files_per_unified_file,
|
||||
sorted(files))):
|
||||
files)):
|
||||
just_the_filenames = list(filter_out_dummy(unified_group))
|
||||
yield '%s%d.%s' % (unified_prefix, i, unified_suffix), just_the_filenames
|
||||
|
||||
@ -750,42 +716,12 @@ class RecursiveMakeBackend(CommonBackend):
|
||||
with self._write_file(mozpath.join(ipdl_dir, 'ipdlsrcs.mk')) as ipdls:
|
||||
mk.dump(ipdls, removal_guard=False)
|
||||
|
||||
self._may_skip['compile'] -= set(['ipc/ipdl'])
|
||||
|
||||
# Write out master lists of WebIDL source files.
|
||||
bindings_dir = mozpath.join(self.environment.topobjdir, 'dom', 'bindings')
|
||||
|
||||
mk = mozmakeutil.Makefile()
|
||||
|
||||
def write_var(variable, sources):
|
||||
files = [mozpath.basename(f) for f in sorted(sources)]
|
||||
mk.add_statement('%s += %s' % (variable, ' '.join(files)))
|
||||
write_var('webidl_files', self._webidl_sources)
|
||||
write_var('generated_events_webidl_files', self._generated_events_webidl_sources)
|
||||
write_var('test_webidl_files', self._test_webidl_sources)
|
||||
write_var('preprocessed_test_webidl_files', self._preprocessed_test_webidl_sources)
|
||||
write_var('generated_webidl_files', self._generated_webidl_sources)
|
||||
write_var('preprocessed_webidl_files', self._preprocessed_webidl_sources)
|
||||
|
||||
all_webidl_files = itertools.chain(iter(self._webidl_sources),
|
||||
iter(self._generated_events_webidl_sources),
|
||||
iter(self._generated_webidl_sources),
|
||||
iter(self._preprocessed_webidl_sources))
|
||||
all_webidl_files = [mozpath.basename(x) for x in all_webidl_files]
|
||||
all_webidl_sources = [re.sub(r'\.webidl$', 'Binding.cpp', x) for x in all_webidl_files]
|
||||
|
||||
self._add_unified_build_rules(mk, all_webidl_sources,
|
||||
bindings_dir,
|
||||
unified_prefix='UnifiedBindings',
|
||||
unified_files_makefile_variable='unified_binding_cpp_files',
|
||||
poison_windows_h=True)
|
||||
|
||||
# Assume that Somebody Else has responsibility for correctly
|
||||
# specifying removal dependencies for |all_webidl_sources|.
|
||||
with self._write_file(mozpath.join(bindings_dir, 'webidlsrcs.mk')) as webidls:
|
||||
mk.dump(webidls, removal_guard=False)
|
||||
|
||||
self._may_skip['compile'] -= set(['dom/bindings', 'dom/bindings/test'])
|
||||
# These contain autogenerated sources that the build config doesn't
|
||||
# yet know about.
|
||||
# TODO Emit GENERATED_SOURCES so these special cases are dealt with
|
||||
# the proper way.
|
||||
self._may_skip['compile'] -= {'ipc/ipdl'}
|
||||
self._may_skip['compile'] -= {'dom/bindings', 'dom/bindings/test'}
|
||||
|
||||
self._fill_root_mk()
|
||||
|
||||
@ -1026,10 +962,6 @@ class RecursiveMakeBackend(CommonBackend):
|
||||
def _process_host_simple_program(self, program, backend_file):
|
||||
backend_file.write('HOST_SIMPLE_PROGRAMS += %s\n' % program)
|
||||
|
||||
def _process_webidl_basename(self, basename):
|
||||
header = 'mozilla/dom/%sBinding.h' % mozpath.splitext(basename)[0]
|
||||
self._install_manifests['dist_include'].add_optional_exists(header)
|
||||
|
||||
def _process_test_manifest(self, obj, backend_file):
|
||||
# Much of the logic in this function could be moved to CommonBackend.
|
||||
self.backend_input_files.add(mozpath.join(obj.topsrcdir,
|
||||
@ -1171,3 +1103,86 @@ class RecursiveMakeBackend(CommonBackend):
|
||||
# Makefile.in files, the old file will get replaced with
|
||||
# the autogenerated one automatically.
|
||||
self.backend_input_files.add(obj.input_path)
|
||||
|
||||
def _handle_webidl_collection(self, webidls):
|
||||
if not webidls.all_stems():
|
||||
return
|
||||
|
||||
bindings_dir = mozpath.join(self.environment.topobjdir, 'dom',
|
||||
'bindings')
|
||||
|
||||
all_inputs = set(webidls.all_static_sources())
|
||||
for s in webidls.all_non_static_basenames():
|
||||
all_inputs.add(mozpath.join(bindings_dir, s))
|
||||
|
||||
generated_events_stems = webidls.generated_events_stems()
|
||||
exported_stems = webidls.all_regular_stems()
|
||||
|
||||
# The WebIDL manager reads configuration from a JSON file. So, we
|
||||
# need to write this file early.
|
||||
o = dict(
|
||||
webidls=sorted(all_inputs),
|
||||
generated_events_stems=sorted(generated_events_stems),
|
||||
exported_stems=sorted(exported_stems),
|
||||
)
|
||||
|
||||
file_lists = mozpath.join(bindings_dir, 'file-lists.json')
|
||||
with self._write_file(file_lists) as fh:
|
||||
json.dump(o, fh, sort_keys=True, indent=2)
|
||||
|
||||
manager = mozwebidlcodegen.create_build_system_manager(
|
||||
self.environment.topsrcdir,
|
||||
self.environment.topobjdir,
|
||||
mozpath.join(self.environment.topobjdir, 'dist')
|
||||
)
|
||||
|
||||
# The manager is the source of truth on what files are generated.
|
||||
# Consult it for install manifests.
|
||||
include_dir = mozpath.join(self.environment.topobjdir, 'dist',
|
||||
'include')
|
||||
for f in manager.expected_build_output_files():
|
||||
if f.startswith(include_dir):
|
||||
self._install_manifests['dist_include'].add_optional_exists(
|
||||
mozpath.relpath(f, include_dir))
|
||||
|
||||
# We pass WebIDL info to make via a completely generated make file.
|
||||
mk = Makefile()
|
||||
mk.add_statement('nonstatic_webidl_files := %s' % ' '.join(
|
||||
sorted(webidls.all_non_static_basenames())))
|
||||
mk.add_statement('globalgen_sources := %s' % ' '.join(
|
||||
sorted(manager.GLOBAL_DEFINE_FILES)))
|
||||
mk.add_statement('test_sources := %s' % ' '.join(
|
||||
sorted('%sBinding.cpp' % s for s in webidls.all_test_stems())))
|
||||
|
||||
# Add rules to preprocess bindings.
|
||||
# This should ideally be using PP_TARGETS. However, since the input
|
||||
# filenames match the output filenames, the existing PP_TARGETS rules
|
||||
# result in circular dependencies and other make weirdness. One
|
||||
# solution is to rename the input or output files repsectively. See
|
||||
# bug 928195 comment 129.
|
||||
for source in sorted(webidls.all_preprocessed_sources()):
|
||||
basename = os.path.basename(source)
|
||||
rule = mk.create_rule([basename])
|
||||
rule.add_dependencies([source, '$(GLOBAL_DEPS)'])
|
||||
rule.add_commands([
|
||||
# Remove the file before writing so bindings that go from
|
||||
# static to preprocessed don't end up writing to a symlink,
|
||||
# which would modify content in the source directory.
|
||||
'$(RM) $@',
|
||||
'$(call py_action,preprocessor,$(DEFINES) $(ACDEFINES) '
|
||||
'$(XULPPFLAGS) $< -o $@)'
|
||||
])
|
||||
|
||||
# Bindings are compiled in unified mode to speed up compilation and
|
||||
# to reduce linker memory size. Note that test bindings are separated
|
||||
# from regular ones so tests bindings aren't shipped.
|
||||
self._add_unified_build_rules(mk,
|
||||
webidls.all_regular_cpp_basenames(),
|
||||
bindings_dir,
|
||||
unified_prefix='UnifiedBindings',
|
||||
unified_files_makefile_variable='unified_binding_cpp_files',
|
||||
poison_windows_h=True)
|
||||
|
||||
webidls_mk = mozpath.join(bindings_dir, 'webidlsrcs.mk')
|
||||
with self._write_file(webidls_mk) as fh:
|
||||
mk.dump(fh, removal_guard=False)
|
||||
|
@ -273,6 +273,10 @@ class MozbuildObject(ProcessExecutionMixin):
|
||||
def bindir(self):
|
||||
return os.path.join(self.topobjdir, 'dist', 'bin')
|
||||
|
||||
@property
|
||||
def includedir(self):
|
||||
return os.path.join(self.topobjdir, 'dist', 'include')
|
||||
|
||||
@property
|
||||
def statedir(self):
|
||||
return os.path.join(self.topobjdir, '.mozbuild')
|
||||
|
Loading…
x
Reference in New Issue
Block a user