Bug 1542963 - run './mach lint ... --fix' on mozbuild/mozbuild, undoes some black changes. r=#build

Lint python/mozbuild/{mozbuild,mozpack}.

ran './mach lint -l py2 -l flake8 -l shellcheck -l codespell -l yaml python/mozbuild/{mozbuild,mozpack}/  --fix' in order
to undo some black changes and get closer to  making this folder able to be validated on every lint run

Differential Revision: https://phabricator.services.mozilla.com/D26640

--HG--
extra : rebase_source : 6b69b6ebcac73835b752607f7b5b4429de7f95cf
extra : intermediate-source : 114355f83d36188fac592c8c6497242c71b27ad6
extra : source : 51fbdf63bbce8f75fac407de305f826dc775fcb5
This commit is contained in:
Justin Wood 2019-04-24 22:12:09 -04:00
parent 8eba83f0f6
commit c078e63bbc
125 changed files with 1988 additions and 1581 deletions

View File

@ -17,6 +17,7 @@ from mozbuild.util import (
lock_file,
)
def addEntriesToListFile(listFile, entries):
"""Given a file |listFile| containing one entry per line,
add each entry in |entries| to the file, unless it is already

View File

@ -287,6 +287,7 @@ def check_networking(binary):
print('TEST-PASS | check_networking | {}'.format(basename))
return retcode
def checks(target, binary):
# The clang-plugin is built as target but is really a host binary.
# Cheat and pretend we were passed the right argument.

View File

@ -11,6 +11,7 @@ import shutil
import sys
import os
def dump_symbols(target, tracking_file, count_ctors=False):
# Our tracking file, if present, will contain path(s) to the previously generated
# symbols. Remove them in this case so we don't simply accumulate old symbols
@ -73,6 +74,7 @@ def dump_symbols(target, tracking_file, count_ctors=False):
fh.write(out_files)
fh.flush()
def main(argv):
parser = argparse.ArgumentParser(
usage="Usage: dumpsymbols.py <library or program> <tracking file>")

View File

@ -13,6 +13,7 @@ import mozpack.path as mozpath
import buildconfig
from mozbuild.base import BuildEnvironmentNotFoundException
def archive_exe(pkg_dir, tagfile, sfx_package, package, use_upx):
tmpdir = tempfile.mkdtemp(prefix='tmp')
try:
@ -30,7 +31,8 @@ def archive_exe(pkg_dir, tagfile, sfx_package, package, use_upx):
except BuildEnvironmentNotFoundException:
# configure hasn't been run, just use the default
sevenz = '7z'
subprocess.check_call([sevenz, 'a', '-r', '-t7z', mozpath.join(tmpdir, 'app.7z'), '-mx', '-m0=BCJ2', '-m1=LZMA:d25', '-m2=LZMA:d19', '-m3=LZMA:d19', '-mb0:1', '-mb0s1:2', '-mb0s2:3'])
subprocess.check_call([sevenz, 'a', '-r', '-t7z', mozpath.join(tmpdir, 'app.7z'), '-mx',
'-m0=BCJ2', '-m1=LZMA:d25', '-m2=LZMA:d19', '-m3=LZMA:d19', '-mb0:1', '-mb0s1:2', '-mb0s2:3'])
with open(package, 'wb') as o:
for i in [final_sfx, tagfile, mozpath.join(tmpdir, 'app.7z')]:
@ -41,6 +43,7 @@ def archive_exe(pkg_dir, tagfile, sfx_package, package, use_upx):
shutil.move('core', pkg_dir)
shutil.rmtree(tmpdir)
def main(args):
if len(args) != 4:
print('Usage: exe_7z_archive.py <pkg_dir> <tagfile> <sfx_package> <package> <use_upx>',
@ -50,5 +53,6 @@ def main(args):
archive_exe(args[0], args[1], args[2], args[3], args[4])
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@ -8,10 +8,12 @@ import shutil
import sys
import subprocess
def extract_exe(package, target):
subprocess.check_call(['7z', 'x', package, 'core'])
shutil.move('core', target)
def main(args):
if len(args) != 2:
print('Usage: exe_7z_extract.py <package> <target>',
@ -21,5 +23,6 @@ def main(args):
extract_exe(args[0], args[1])
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@ -116,5 +116,6 @@ def main(argv):
return 1
return ret
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@ -23,13 +23,15 @@ else:
localeSearchInfo = {}
localeSearchInfo["default"] = searchinfo["default"]
def validateDefault(key):
if (not key in searchinfo["default"]):
print >>sys.stderr, "Error: Missing default %s in list.json" % (key)
sys.exit(1)
validateDefault("searchDefault");
validateDefault("visibleDefaultEngines");
validateDefault("searchDefault")
validateDefault("visibleDefaultEngines")
# If the selected locale doesn't have a searchDefault,
# use the global one.
@ -57,11 +59,12 @@ if "regionOverrides" in searchinfo:
if set(visibleDefaultEngines) & enginesToOverride:
if region not in localeSearchInfo:
localeSearchInfo[region] = {}
localeSearchInfo[region]["visibleDefaultEngines"] = copy.deepcopy(visibleDefaultEngines)
localeSearchInfo[region]["visibleDefaultEngines"] = copy.deepcopy(
visibleDefaultEngines)
for i, engine in enumerate(localeSearchInfo[region]["visibleDefaultEngines"]):
if engine in regionOverrides[region]:
localeSearchInfo[region]["visibleDefaultEngines"][i] = regionOverrides[region][engine]
output.write(json.dumps(localeSearchInfo, ensure_ascii=False).encode('utf8'))
output.close();
output.close()

View File

@ -102,7 +102,8 @@ def main(output, *args, **kwargs):
def add_names(names, defaults={}):
for name in names:
site = copy.deepcopy(defaults)
site.update(properties.get_dict('browser.suggestedsites.{name}'.format(name=name), required_keys=('title', 'url', 'bgcolor')))
site.update(properties.get_dict('browser.suggestedsites.{name}'.format(
name=name), required_keys=('title', 'url', 'bgcolor')))
site['imageurl'] = image_url_template.format(name=name)
sites.append(site)
@ -129,12 +130,14 @@ def main(output, *args, **kwargs):
('browser.suggestedsites.restricted.list', {'restricted': True}),
]
if opts.verbose:
print('Reading {len} suggested site lists: {lists}'.format(len=len(lists), lists=[list_name for list_name, _ in lists]))
print('Reading {len} suggested site lists: {lists}'.format(
len=len(lists), lists=[list_name for list_name, _ in lists]))
for (list_name, list_item_defaults) in lists:
names = properties.get_list(list_name)
if opts.verbose:
print('Reading {len} suggested sites from {list}: {names}'.format(len=len(names), list=list_name, names=names))
print('Reading {len} suggested sites from {list}: {names}'.format(
len=len(names), list=list_name, names=names))
add_names(names, list_item_defaults)
# We must define at least one site -- that's what the fallback is for.

View File

@ -7,9 +7,11 @@ from __future__ import print_function
import sys
import subprocess
def make_unzip(package):
subprocess.check_call(['unzip', package])
def main(args):
if len(args) != 1:
print('Usage: make_unzip.py <package>',
@ -19,5 +21,6 @@ def main(args):
make_unzip(args[0])
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@ -7,9 +7,11 @@ from __future__ import print_function
import sys
import subprocess
def make_zip(source, package):
subprocess.check_call(['zip', '-r9D', package, source, '-x', '\*/.mkdir.done'])
def main(args):
if len(args) != 2:
print('Usage: make_zip.py <source> <package>',
@ -19,5 +21,6 @@ def main(args):
make_zip(args[0], args[1])
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@ -70,7 +70,7 @@ def package_fennec_apk(inputs=[], omni_ja=None,
if verbose:
print('Packaging %s from %s' % (path, file.path))
if not os.path.exists(abspath):
raise ValueError('File %s not found (looked for %s)' % \
raise ValueError('File %s not found (looked for %s)' %
(file.path, abspath))
if jarrer.contains(path):
jarrer.remove(path)

View File

@ -23,6 +23,7 @@ def main(argv):
args = parser.parse_args(argv)
objdir_abspath = mozpath.abspath(buildconfig.topobjdir)
def is_valid_entry(entry):
if isinstance(entry[1], BaseFile):
entry_abspath = mozpath.abspath(entry[1].path)

View File

@ -15,6 +15,7 @@ def generate(output, *args):
pp.handleCommandLine(list(args), True)
return set(pp.includes)
def main(args):
pp = Preprocessor()
pp.handleCommandLine(args, True)

View File

@ -55,6 +55,7 @@ def process_define_file(output, input):
# via the command line, which raises a mass of macro
# redefinition warnings. Just handle those macros
# specially here.
def define_for_name(name, val):
define = "#define {name} {val}".format(name=name, val=val)
if name in ('WINVER', '_WIN32_WINNT'):

View File

@ -44,15 +44,15 @@ def process_manifest(destdir, paths, track,
for p, f in finder.find(dest):
remove_unaccounted.add(p, dummy_file)
remove_empty_directories=True
remove_all_directory_symlinks=True
remove_empty_directories = True
remove_all_directory_symlinks = True
else:
# If tracking is enabled and there is no file, we don't want to
# be removing anything.
remove_unaccounted = False
remove_empty_directories=False
remove_all_directory_symlinks=False
remove_empty_directories = False
remove_all_directory_symlinks = False
manifest = InstallManifest()
for path in paths:
@ -109,5 +109,6 @@ def main(argv):
rm_files=result.removed_files_count,
rm_dirs=result.removed_directories_count))
if __name__ == '__main__':
main(sys.argv[1:])

View File

@ -12,6 +12,7 @@ from mozpack.files import FileFinder
from mozpack.mozjar import JarWriter
import mozpack.path as mozpath
def make_archive(archive_name, base, exclude, include):
compress = ['**/*.sym']
finder = FileFinder(base, ignore=exclude)
@ -27,11 +28,13 @@ def make_archive(archive_name, base, exclude, include):
writer.add(p.encode('utf-8'), f, mode=f.mode,
compress=should_compress, skip_duplicates=True)
def main(argv):
parser = argparse.ArgumentParser(description='Produce a symbols archive')
parser.add_argument('archive', help='Which archive to generate')
parser.add_argument('base', help='Base directory to package')
parser.add_argument('--full-archive', action='store_true', help='Generate a full symbol archive')
parser.add_argument('--full-archive', action='store_true',
help='Generate a full symbol archive')
args = parser.parse_args(argv)
@ -47,5 +50,6 @@ def main(argv):
make_archive(args.archive, args.base, excludes, includes)
if __name__ == '__main__':
main(sys.argv[1:])

View File

@ -1300,5 +1300,6 @@ def main(argv, _skip_logging=False):
return 0 if process_command(options, args) else 1
if __name__ == "__main__": # pragma: no cover
sys.exit(main(sys.argv))

View File

@ -9,6 +9,7 @@ import subprocess
import sys
import os
def parse_outputs(crate_output, dep_outputs, pass_l_flag):
env = {}
args = []
@ -59,6 +60,7 @@ def parse_outputs(crate_output, dep_outputs, pass_l_flag):
return env, args
def wrap_rustc(args):
parser = argparse.ArgumentParser()
parser.add_argument('--crate-out', nargs='?')
@ -73,5 +75,6 @@ def wrap_rustc(args):
os.environ.update(new_env)
return subprocess.Popen(args.cmd + new_args, cwd=args.cwd).wait()
if __name__ == '__main__':
sys.exit(wrap_rustc(sys.argv[1:]))

View File

@ -15,11 +15,13 @@ import os
from glob import glob
import manifestparser
def getIniTests(testdir):
mp = manifestparser.ManifestParser(strict=False)
mp.read(os.path.join(testdir, 'xpcshell.ini'))
return mp.tests
def verifyDirectory(initests, directory):
files = glob(os.path.join(os.path.abspath(directory), "test_*"))
for f in files:
@ -40,9 +42,11 @@ def verifyDirectory(initests, directory):
break
if not found:
print >>sys.stderr, "TEST-UNEXPECTED-FAIL | xpccheck | test %s is missing from test manifest %s!" % (name, os.path.join(directory, 'xpcshell.ini'))
print >>sys.stderr, "TEST-UNEXPECTED-FAIL | xpccheck | test %s is missing from test manifest %s!" % (
name, os.path.join(directory, 'xpcshell.ini'))
sys.exit(1)
def verifyIniFile(initests, directory):
files = glob(os.path.join(os.path.abspath(directory), "test_*"))
for test in initests:
@ -60,9 +64,11 @@ def verifyIniFile(initests, directory):
break
if not found:
print >>sys.stderr, "TEST-UNEXPECTED-FAIL | xpccheck | found %s in xpcshell.ini and not in directory '%s'" % (name, directory)
print >>sys.stderr, "TEST-UNEXPECTED-FAIL | xpccheck | found %s in xpcshell.ini and not in directory '%s'" % (
name, directory)
sys.exit(1)
def main(argv):
if len(argv) < 2:
print >>sys.stderr, "Usage: xpccheck.py <topsrcdir> <directory> [<directory> ...]"
@ -79,5 +85,6 @@ def main(argv):
verifyDirectory(initests, d)
verifyIniFile(initests, d)
if __name__ == '__main__':
main(sys.argv[1:])

View File

@ -114,5 +114,6 @@ def main(argv):
args.headerdir, args.xpcrsdir, args.xptdir, args.depsdir, args.module,
args.idls)
if __name__ == '__main__':
main(sys.argv[1:])

View File

@ -5,6 +5,7 @@
import os
import sqlite3 as lite
class Node(object):
def __init__(self, graph, node_id):
@ -44,6 +45,7 @@ class Node(object):
else:
return None
class Graph(object):
def __init__(self, path=None, connect=None):
@ -67,7 +69,7 @@ class Graph(object):
self.connect.close()
def query_arg(self, q, arg):
assert isinstance(arg, tuple) #execute() requires tuple argument
assert isinstance(arg, tuple) # execute() requires tuple argument
cursor = self.connect.cursor()
cursor.execute(q, arg)
return cursor
@ -127,5 +129,4 @@ class Graph(object):
def get_cost_dict(self):
if self.results is None:
self.populate()
return {k:v for k,v in self.results if v > 0}
return {k: v for k, v in self.results if v > 0}

View File

@ -17,24 +17,29 @@ PUSHLOG_CHUNK_SIZE = 500
URL = 'https://hg.mozilla.org/mozilla-central/json-pushes?'
def unix_epoch(date):
return (date - datetime(1970,1,1)).total_seconds()
return (date - datetime(1970, 1, 1)).total_seconds()
def unix_from_date(n, today):
return unix_epoch(today - timedelta(days=n))
def get_lastpid(session):
return session.get(URL+'&version=2').json()['lastpushid']
def get_pushlog_chunk(session, start, end):
# returns pushes sorted by date
res = session.get(URL+'version=1&startID={0}&\
endID={1}&full=1'.format(start, end)).json()
return sorted(res.items(), key = lambda x: x[1]['date'])
return sorted(res.items(), key=lambda x: x[1]['date'])
def collect_data(session, date):
if date < 1206031764: #first push
raise Exception ("No pushes exist before March 20, 2008.")
if date < 1206031764: # first push
raise Exception("No pushes exist before March 20, 2008.")
lastpushid = get_lastpid(session)
data = []
start_id = lastpushid - PUSHLOG_CHUNK_SIZE
@ -52,10 +57,12 @@ def collect_data(session, date):
end_id = start_id + 1
start_id = start_id - PUSHLOG_CHUNK_SIZE
def get_data(epoch):
session = requests.Session()
data = collect_data(session, epoch)
return {k:v for sublist in data for (k,v) in sublist}
return {k: v for sublist in data for (k, v) in sublist}
class Pushlog(object):
@ -78,6 +85,7 @@ class Pushlog(object):
keys.sort()
return keys
class Push(object):
def __init__(self, pid, p_dict):
@ -85,6 +93,7 @@ class Push(object):
self.date = p_dict['date']
self.files = [f for x in p_dict['changesets'] for f in x['files']]
class Report(object):
def __init__(self, days, path=None, cost_dict=None):
@ -112,7 +121,7 @@ class Report(object):
cost = costs.get(f)
count = counts.get(f)
if cost is not None:
res.append((f, cost, count, round(cost*count,3)))
res.append((f, cost, count, round(cost*count, 3)))
return res
def get_sorted_report(self, format):
@ -143,7 +152,8 @@ class Report(object):
res = self.get_sorted_report(format)
if limit is not None:
res = self.cut(limit, res)
for x in res: data.append(x)
for x in res:
data.append(x)
if format == 'pretty':
print (data)
else:
@ -160,4 +170,3 @@ class Report(object):
with open(file_path, 'wb') as f:
f.write(content)
print ("Created report: %s" % file_path)

View File

@ -13,6 +13,7 @@ import time
# build ID use the v1 version scheme.
V1_CUTOFF = 20150801000000 # YYYYmmddHHMMSS
def android_version_code_v0(buildid, cpu_arch=None, min_sdk=0, max_sdk=0):
base = int(str(buildid)[:10])
# None is interpreted as arm.
@ -30,6 +31,7 @@ def android_version_code_v0(buildid, cpu_arch=None, min_sdk=0, max_sdk=0):
raise ValueError("Don't know how to compute android:versionCode "
"for CPU arch %s" % cpu_arch)
def android_version_code_v1(buildid, cpu_arch=None, min_sdk=0, max_sdk=0):
'''Generate a v1 android:versionCode.
@ -134,6 +136,7 @@ def android_version_code_v1(buildid, cpu_arch=None, min_sdk=0, max_sdk=0):
return version
def android_version_code(buildid, *args, **kwargs):
base = int(str(buildid))
if base < V1_CUTOFF:

View File

@ -77,7 +77,8 @@ from mozpack.mozjar import (
from mozpack.packager.unpack import UnpackFinder
import mozpack.path as mozpath
NUM_PUSHHEADS_TO_QUERY_PER_PARENT = 50 # Number of candidate pushheads to cache per parent changeset.
# Number of candidate pushheads to cache per parent changeset.
NUM_PUSHHEADS_TO_QUERY_PER_PARENT = 50
# Number of parent changesets to consider as possible pushheads.
# There isn't really such a thing as a reasonable default here, because we don't
@ -323,7 +324,8 @@ class AndroidArtifactJob(ArtifactJob):
writer.add(basename.encode('utf-8'), f.open())
def process_symbols_archive(self, filename, processed_filename):
ArtifactJob.process_symbols_archive(self, filename, processed_filename, skip_compressed=True)
ArtifactJob.process_symbols_archive(
self, filename, processed_filename, skip_compressed=True)
if self._symbols_archive_suffix != 'crashreporter-symbols-full.zip':
return
@ -693,7 +695,8 @@ class CacheManager(object):
return
ensureParentDir(self._cache_filename)
pickle.dump(list(reversed(list(self._cache.items()))), open(self._cache_filename, 'wb'), -1)
pickle.dump(list(reversed(list(self._cache.items()))),
open(self._cache_filename, 'wb'), -1)
def clear_cache(self):
if self._skip_cache:
@ -712,11 +715,13 @@ class CacheManager(object):
def __exit__(self, type, value, traceback):
self.dump_cache()
class PushheadCache(CacheManager):
'''Helps map tree/revision pairs to parent pushheads according to the pushlog.'''
def __init__(self, cache_dir, log=None, skip_cache=False):
CacheManager.__init__(self, cache_dir, 'pushhead_cache', MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
CacheManager.__init__(self, cache_dir, 'pushhead_cache',
MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
@cachedmethod(operator.attrgetter('_cache'))
def parent_pushhead_id(self, tree, revision):
@ -743,11 +748,13 @@ class PushheadCache(CacheManager):
p['changesets'][-1] for p in result['pushes'].values()
]
class TaskCache(CacheManager):
'''Map candidate pushheads to Task Cluster task IDs and artifact URLs.'''
def __init__(self, cache_dir, log=None, skip_cache=False):
CacheManager.__init__(self, cache_dir, 'artifact_url', MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
CacheManager.__init__(self, cache_dir, 'artifact_url',
MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
@cachedmethod(operator.attrgetter('_cache'))
def artifacts(self, tree, job, artifact_job_class, rev):
@ -782,7 +789,8 @@ class TaskCache(CacheManager):
except KeyError:
# Not all revisions correspond to pushes that produce the job we
# care about; and even those that do may not have completed yet.
raise ValueError('Task for {namespace} does not exist (yet)!'.format(namespace=namespace))
raise ValueError(
'Task for {namespace} does not exist (yet)!'.format(namespace=namespace))
return taskId, list_artifacts(taskId)
@ -825,8 +833,10 @@ class Artifacts(object):
raise KeyError("Unknown job")
self._task_cache = TaskCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
self._artifact_cache = ArtifactCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
self._pushhead_cache = PushheadCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
self._artifact_cache = ArtifactCache(
self._cache_dir, log=self._log, skip_cache=self._skip_cache)
self._pushhead_cache = PushheadCache(
self._cache_dir, log=self._log, skip_cache=self._skip_cache)
def log(self, *args, **kwargs):
if self._log:
@ -1002,7 +1012,8 @@ see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code
def find_pushhead_artifacts(self, task_cache, job, tree, pushhead):
try:
taskId, artifacts = task_cache.artifacts(tree, job, self._artifact_job.__class__, pushhead)
taskId, artifacts = task_cache.artifacts(
tree, job, self._artifact_job.__class__, pushhead)
except ValueError:
return None
@ -1165,7 +1176,8 @@ see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code
url = get_artifact_url(taskId, artifact_name)
urls.append(url)
if not urls:
raise ValueError('Task {taskId} existed, but no artifacts found!'.format(taskId=taskId))
raise ValueError(
'Task {taskId} existed, but no artifacts found!'.format(taskId=taskId))
for url in urls:
if self.install_from_url(url, distdir):
return 1
@ -1194,7 +1206,6 @@ see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code
return self.install_from_recent(distdir)
def clear_cache(self):
self.log(logging.INFO, 'artifact',
{},

View File

@ -57,6 +57,7 @@ from mozbuild.util import (
mkdir,
)
class XPIDLManager(object):
"""Helps manage XPCOM IDLs in the context of the build system."""
@ -102,6 +103,7 @@ class XPIDLManager(object):
"""
return itertools.chain(*[m.stems() for m in self.modules.itervalues()])
class BinariesCollection(object):
"""Tracks state of binaries produced by the build."""
@ -109,6 +111,7 @@ class BinariesCollection(object):
self.shared_libraries = []
self.programs = []
class CommonBackend(BuildBackend):
"""Holds logic common to all build backends."""
@ -182,7 +185,8 @@ class CommonBackend(BuildBackend):
return False
elif isinstance(obj, Exports):
objdir_files = [f.full_path for path, files in obj.files.walk() for f in files if isinstance(f, ObjDirPath)]
objdir_files = [f.full_path for path, files in obj.files.walk()
for f in files if isinstance(f, ObjDirPath)]
if objdir_files:
self._handle_generated_sources(objdir_files)
return False
@ -204,7 +208,6 @@ class CommonBackend(BuildBackend):
self._handle_generated_sources(mozpath.join(self.environment.topobjdir, 'dist/include/%s.h' % stem)
for stem in self._idl_manager.idl_stems())
for config in self._configs:
self.backend_input_files.add(config.source)
@ -294,7 +297,7 @@ class CommonBackend(BuildBackend):
seen_libs.add(lib)
os_libs.append(lib)
return (objs, sorted(seen_pgo_gen_only_objs), no_pgo_objs, \
return (objs, sorted(seen_pgo_gen_only_objs), no_pgo_objs,
shared_libs, os_libs, static_libs)
def _make_list_file(self, kind, objdir, objs, name):
@ -332,7 +335,8 @@ class CommonBackend(BuildBackend):
return ref
def _handle_generated_sources(self, files):
self._generated_sources.update(mozpath.relpath(f, self.environment.topobjdir) for f in files)
self._generated_sources.update(mozpath.relpath(
f, self.environment.topobjdir) for f in files)
def _handle_webidl_collection(self, webidls):
@ -411,7 +415,7 @@ class CommonBackend(BuildBackend):
'so it cannot be built in unified mode."\n'
'#undef INITGUID\n'
'#endif')
f.write('\n'.join(includeTemplate % { "cppfile": s } for
f.write('\n'.join(includeTemplate % {"cppfile": s} for
s in source_filenames))
def _write_unified_files(self, unified_source_mapping, output_directory,

View File

@ -152,6 +152,7 @@ class ConfigEnvironment(object):
self.substs['ACDEFINES'] = ' '.join(['-D%s=%s' % (name,
shell_quote(self.defines[name]).replace('$', '$$'))
for name in sorted(global_defines)])
def serialize(name, obj):
if isinstance(obj, StringTypes):
return obj
@ -224,6 +225,7 @@ class PartialConfigDict(object):
similar for substs), where the value of FOO is delay-loaded until it is
needed.
"""
def __init__(self, config_statusd, typ, environ_override=False):
self._dict = {}
self._datadir = mozpath.join(config_statusd, typ)
@ -338,6 +340,7 @@ class PartialConfigEnvironment(object):
intended to be used instead of the defines structure from config.status so
that scripts can depend directly on its value.
"""
def __init__(self, topobjdir):
config_statusd = mozpath.join(topobjdir, 'config.statusd')
self.substs = PartialConfigDict(config_statusd, 'substs', environ_override=True)

View File

@ -26,6 +26,7 @@ from mozbuild.base import ExecutionSummary
# Open eclipse:
# /Users/bgirard/mozilla/eclipse/eclipse/eclipse/eclipse -data $PWD/workspace
class CppEclipseBackend(CommonBackend):
"""Backend that generates Cpp Eclipse project files.
"""
@ -81,7 +82,8 @@ class CppEclipseBackend(CommonBackend):
# Note that unlike VS, Eclipse' indexer seem to crawl the headers and
# isn't picky about the local includes.
if isinstance(obj, ComputedFlags):
args = self._args_for_dirs.setdefault('tree/' + reldir, {'includes': [], 'defines': []})
args = self._args_for_dirs.setdefault(
'tree/' + reldir, {'includes': [], 'defines': []})
# use the same args for any objdirs we include:
if reldir == 'dom/bindings':
self._args_for_dirs.setdefault('generated-webidl', args)
@ -105,7 +107,8 @@ class CppEclipseBackend(CommonBackend):
def consume_finished(self):
settings_dir = os.path.join(self._project_dir, '.settings')
launch_dir = os.path.join(self._project_dir, 'RunConfigurations')
workspace_settings_dir = os.path.join(self._workspace_dir, '.metadata/.plugins/org.eclipse.core.runtime/.settings')
workspace_settings_dir = os.path.join(
self._workspace_dir, '.metadata/.plugins/org.eclipse.core.runtime/.settings')
for dir_name in [self._project_dir, settings_dir, launch_dir, workspace_settings_dir, self._workspace_lang_dir]:
try:
@ -129,22 +132,25 @@ class CppEclipseBackend(CommonBackend):
workspace_language_path = os.path.join(self._workspace_lang_dir, 'language.settings.xml')
with open(workspace_language_path, 'wb') as fh:
workspace_lang_settings = WORKSPACE_LANGUAGE_SETTINGS_TEMPLATE
workspace_lang_settings = workspace_lang_settings.replace("@COMPILER_FLAGS@", self._cxx + " " + self._cppflags);
workspace_lang_settings = workspace_lang_settings.replace(
"@COMPILER_FLAGS@", self._cxx + " " + self._cppflags)
fh.write(workspace_lang_settings)
self._write_launch_files(launch_dir)
core_resources_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.core.resources.prefs')
core_resources_prefs_path = os.path.join(
workspace_settings_dir, 'org.eclipse.core.resources.prefs')
with open(core_resources_prefs_path, 'wb') as fh:
fh.write(STATIC_CORE_RESOURCES_PREFS);
fh.write(STATIC_CORE_RESOURCES_PREFS)
core_runtime_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.core.runtime.prefs')
core_runtime_prefs_path = os.path.join(
workspace_settings_dir, 'org.eclipse.core.runtime.prefs')
with open(core_runtime_prefs_path, 'wb') as fh:
fh.write(STATIC_CORE_RUNTIME_PREFS);
fh.write(STATIC_CORE_RUNTIME_PREFS)
ui_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.ui.prefs')
with open(ui_prefs_path, 'wb') as fh:
fh.write(STATIC_UI_PREFS);
fh.write(STATIC_UI_PREFS)
cdt_ui_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.cdt.ui.prefs')
cdt_ui_prefs = STATIC_CDT_UI_PREFS
@ -155,10 +161,11 @@ class CppEclipseBackend(CommonBackend):
XML_PREF_TEMPLATE = """<setting id\="@PREF_NAME@" value\="@PREF_VAL@"/>\\n"""
for line in FORMATTER_SETTINGS.splitlines():
[pref, val] = line.split("=")
cdt_ui_prefs += XML_PREF_TEMPLATE.replace("@PREF_NAME@", pref).replace("@PREF_VAL@", val)
cdt_ui_prefs += XML_PREF_TEMPLATE.replace("@PREF_NAME@",
pref).replace("@PREF_VAL@", val)
cdt_ui_prefs += "</profile>\\n</profiles>\\n"
with open(cdt_ui_prefs_path, 'wb') as fh:
fh.write(cdt_ui_prefs);
fh.write(cdt_ui_prefs)
cdt_core_prefs_path = os.path.join(workspace_settings_dir, 'org.eclipse.cdt.core.prefs')
with open(cdt_core_prefs_path, 'wb') as fh:
@ -168,11 +175,11 @@ class CppEclipseBackend(CommonBackend):
# as the active formatter all its prefs are set in this prefs file,
# so we need add those now:
cdt_core_prefs += FORMATTER_SETTINGS
fh.write(cdt_core_prefs);
fh.write(cdt_core_prefs)
editor_prefs_path = os.path.join(workspace_settings_dir, "org.eclipse.ui.editors.prefs");
editor_prefs_path = os.path.join(workspace_settings_dir, "org.eclipse.ui.editors.prefs")
with open(editor_prefs_path, 'wb') as fh:
fh.write(EDITOR_SETTINGS);
fh.write(EDITOR_SETTINGS)
# Now import the project into the workspace
self._import_project()
@ -208,7 +215,7 @@ class CppEclipseBackend(CommonBackend):
def _write_noindex(self):
noindex_path = os.path.join(self._project_dir, '.settings/org.eclipse.cdt.core.prefs')
with open(noindex_path, 'wb') as fh:
fh.write(NOINDEX_TEMPLATE);
fh.write(NOINDEX_TEMPLATE)
def _remove_noindex(self):
# Below we remove the config file that temporarily disabled the indexer
@ -257,7 +264,8 @@ class CppEclipseBackend(CommonBackend):
dirsettings_template = LANGUAGE_SETTINGS_TEMPLATE_DIR_HEADER
# Add OS_COMPILE_CXXFLAGS args (same as OS_COMPILE_CFLAGS):
dirsettings_template = dirsettings_template.replace('@PREINCLUDE_FILE_PATH@', os.path.join(self.environment.topobjdir, 'dist/include/mozilla-config.h'))
dirsettings_template = dirsettings_template.replace('@PREINCLUDE_FILE_PATH@', os.path.join(
self.environment.topobjdir, 'dist/include/mozilla-config.h'))
dirsettings_template += add_define('MOZILLA_CLIENT', '1')
# Add EXTRA_INCLUDES args:
@ -314,7 +322,8 @@ class CppEclipseBackend(CommonBackend):
dirsettings += LANGUAGE_SETTINGS_TEMPLATE_DIR_FOOTER
fh.write(dirsettings)
fh.write(LANGUAGE_SETTINGS_TEMPLATE_FOOTER.replace("@COMPILER_FLAGS@", self._cxx + " " + self._cppflags))
fh.write(LANGUAGE_SETTINGS_TEMPLATE_FOOTER.replace(
"@COMPILER_FLAGS@", self._cxx + " " + self._cppflags))
def _write_launch_files(self, launch_dir):
bin_dir = os.path.join(self.environment.topobjdir, 'dist')
@ -334,21 +343,25 @@ class CppEclipseBackend(CommonBackend):
launch = launch.replace('@LAUNCH_ARGS@', '-P -no-remote')
fh.write(launch)
#TODO Add more launch configs (and delegate calls to mach)
# TODO Add more launch configs (and delegate calls to mach)
def _write_project(self, fh):
project = PROJECT_TEMPLATE;
project = PROJECT_TEMPLATE
project = project.replace('@PROJECT_NAME@', self._project_name)
project = project.replace('@PROJECT_TOPSRCDIR@', self.environment.topsrcdir)
project = project.replace('@GENERATED_IPDL_FILES@', os.path.join(self.environment.topobjdir, "ipc", "ipdl"))
project = project.replace('@GENERATED_WEBIDL_FILES@', os.path.join(self.environment.topobjdir, "dom", "bindings"))
project = project.replace('@GENERATED_IPDL_FILES@', os.path.join(
self.environment.topobjdir, "ipc", "ipdl"))
project = project.replace('@GENERATED_WEBIDL_FILES@', os.path.join(
self.environment.topobjdir, "dom", "bindings"))
fh.write(project)
def _write_cproject(self, fh):
cproject_header = CPROJECT_TEMPLATE_HEADER
cproject_header = cproject_header.replace('@PROJECT_TOPSRCDIR@', self.environment.topobjdir)
cproject_header = cproject_header.replace('@MACH_COMMAND@', os.path.join(self.environment.topsrcdir, 'mach'))
cproject_header = cproject_header.replace(
'@PROJECT_TOPSRCDIR@', self.environment.topobjdir)
cproject_header = cproject_header.replace(
'@MACH_COMMAND@', os.path.join(self.environment.topsrcdir, 'mach'))
fh.write(cproject_header)
fh.write(CPROJECT_TEMPLATE_FOOTER)
@ -615,21 +628,21 @@ undoHistorySize=200
"""
STATIC_CORE_RESOURCES_PREFS="""eclipse.preferences.version=1
STATIC_CORE_RESOURCES_PREFS = """eclipse.preferences.version=1
refresh.enabled=true
"""
STATIC_CORE_RUNTIME_PREFS="""eclipse.preferences.version=1
STATIC_CORE_RUNTIME_PREFS = """eclipse.preferences.version=1
content-types/org.eclipse.cdt.core.cxxSource/file-extensions=mm
content-types/org.eclipse.core.runtime.xml/file-extensions=xul
content-types/org.eclipse.wst.jsdt.core.jsSource/file-extensions=jsm
"""
STATIC_UI_PREFS="""eclipse.preferences.version=1
STATIC_UI_PREFS = """eclipse.preferences.version=1
showIntro=false
"""
STATIC_CDT_CORE_PREFS="""eclipse.preferences.version=1
STATIC_CDT_CORE_PREFS = """eclipse.preferences.version=1
indexer.updatePolicy=0
"""
@ -797,7 +810,7 @@ org.eclipse.cdt.core.formatter.tabulation.size=2
org.eclipse.cdt.core.formatter.use_tabs_only_for_leading_indentations=false
"""
STATIC_CDT_UI_PREFS="""eclipse.preferences.version=1
STATIC_CDT_UI_PREFS = """eclipse.preferences.version=1
buildConsoleLines=10000
Console.limitConsoleOutput=false
ensureNewlineAtEOF=false

View File

@ -134,10 +134,12 @@ class FasterMakeBackend(CommonBackend, PartialBackend):
elif isinstance(obj, GeneratedFile):
if obj.outputs:
first_output = mozpath.relpath(mozpath.join(obj.objdir, obj.outputs[0]), self.environment.topobjdir)
first_output = mozpath.relpath(mozpath.join(
obj.objdir, obj.outputs[0]), self.environment.topobjdir)
for o in obj.outputs[1:]:
fullpath = mozpath.join(obj.objdir, o)
self._generated_files_map[mozpath.relpath(fullpath, self.environment.topobjdir)] = first_output
self._generated_files_map[mozpath.relpath(
fullpath, self.environment.topobjdir)] = first_output
# We don't actually handle GeneratedFiles, we just need to know if
# we can build multiple of them from a single make invocation in the
# faster backend.
@ -194,7 +196,6 @@ class FasterMakeBackend(CommonBackend, PartialBackend):
mk.create_rule([target]).add_dependencies(
'$(TOPOBJDIR)/%s' % d for d in deps)
# This is not great, but it's better to have some dependencies on these Python files.
python_deps = [
'$(TOPSRCDIR)/python/mozbuild/mozbuild/action/l10n_merge.py',
@ -208,7 +209,8 @@ class FasterMakeBackend(CommonBackend, PartialBackend):
for (merge, ref_file, l10n_file) in deps:
rule = mk.create_rule([merge]).add_dependencies(
[ref_file, l10n_file] + python_deps)
rule.add_commands(['$(PYTHON) -m mozbuild.action.l10n_merge --output {} --ref-file {} --l10n-file {}'.format(merge, ref_file, l10n_file)])
rule.add_commands(
['$(PYTHON) -m mozbuild.action.l10n_merge --output {} --ref-file {} --l10n-file {}'.format(merge, ref_file, l10n_file)])
# Add a dummy rule for the l10n file since it might not exist.
mk.create_rule([l10n_file])

View File

@ -20,6 +20,7 @@ from mach.decorators import (
Command,
)
@CommandProvider
class MachCommands(MachCommandBase):
@Command('ide', category='devenv',

View File

@ -126,7 +126,7 @@ MOZBUILD_VARIABLES = [
b'TEST_DIRS',
b'TOOL_DIRS',
# XXX config/Makefile.in specifies this in a make invocation
#'USE_EXTENSION_MANIFEST',
# 'USE_EXTENSION_MANIFEST',
b'XPCSHELL_TESTS',
b'XPIDL_MODULE',
]
@ -248,6 +248,7 @@ class RecursiveMakeTraversal(object):
"""
SubDirectoryCategories = ['dirs', 'tests']
SubDirectoriesTuple = namedtuple('SubDirectories', SubDirectoryCategories)
class SubDirectories(SubDirectoriesTuple):
def __new__(self):
return RecursiveMakeTraversal.SubDirectoriesTuple.__new__(self, [], [])
@ -784,7 +785,7 @@ class RecursiveMakeBackend(CommonBackend):
main, all_deps = \
self._traversal.compute_dependencies(filter)
for dir, deps in all_deps.items():
if deps is not None or (dir in self._idl_dirs \
if deps is not None or (dir in self._idl_dirs
and tier == 'export'):
rule = root_deps_mk.create_rule(['%s/%s' % (dir, tier)])
if deps:
@ -795,7 +796,7 @@ class RecursiveMakeBackend(CommonBackend):
if main:
rule.add_dependencies('%s/%s' % (d, tier) for d in main)
all_compile_deps = reduce(lambda x,y: x|y,
all_compile_deps = reduce(lambda x, y: x | y,
self._compile_graph.values()) if self._compile_graph else set()
# Include the following as dependencies of the top recursion target for
# compilation:
@ -1094,7 +1095,8 @@ class RecursiveMakeBackend(CommonBackend):
if obj.target and not obj.is_custom():
backend_file.write('FINAL_TARGET = $(DEPTH)/%s\n' % (obj.target))
else:
backend_file.write('FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),$(DIST)/bin)$(DIST_SUBDIR:%=/%)\n')
backend_file.write(
'FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),$(DIST)/bin)$(DIST_SUBDIR:%=/%)\n')
if not obj.enabled:
backend_file.write('NO_DIST_INSTALL := 1\n')
@ -1295,7 +1297,8 @@ class RecursiveMakeBackend(CommonBackend):
def _process_per_source_flag(self, per_source_flag, backend_file):
for flag in per_source_flag.flags:
backend_file.write('%s_FLAGS += %s\n' % (mozpath.basename(per_source_flag.file_name), flag))
backend_file.write('%s_FLAGS += %s\n' %
(mozpath.basename(per_source_flag.file_name), flag))
def _process_computed_flags(self, computed_flags, backend_file):
for var, flags in computed_flags.get_flags():
@ -1688,7 +1691,7 @@ class RecursiveMakeBackend(CommonBackend):
pp.context.update(extra)
if not pp.context.get('autoconfmk', ''):
pp.context['autoconfmk'] = 'autoconf.mk'
pp.handleLine(b'# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.\n');
pp.handleLine(b'# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.\n')
pp.handleLine(b'DEPTH := @DEPTH@\n')
pp.handleLine(b'topobjdir := @topobjdir@\n')
pp.handleLine(b'topsrcdir := @top_srcdir@\n')

View File

@ -475,7 +475,6 @@ class TupBackend(CommonBackend):
# accurate once we start building libraries in their final locations.
inputs = objs + static_libs + shared_libs + [self._shlibs]
rust_linked = [l for l in prog.linked_libraries
if isinstance(l, RustLibrary)]
@ -513,12 +512,10 @@ class TupBackend(CommonBackend):
display='LINK %o'
)
def _gen_host_programs(self, backend_file):
for p in backend_file.host_programs:
self._gen_host_program(backend_file, p)
def _gen_host_program(self, backend_file, prog):
_, _, _, _, extra_libs, _ = self._expand_libs(prog)
objs = prog.objs
@ -559,7 +556,6 @@ class TupBackend(CommonBackend):
display='LINK %o'
)
def _gen_static_library(self, backend_file):
ar = [
backend_file.environment.substs['AR'],
@ -584,7 +580,6 @@ class TupBackend(CommonBackend):
display='AR %o'
)
def consume_object(self, obj):
"""Write out build files necessary to build with tup."""
@ -734,7 +729,8 @@ class TupBackend(CommonBackend):
"should contain the fewest files possible that are not "
"necessary for this build." % tup_base_dir)
tup = self.environment.substs.get('TUP', 'tup')
self._cmd.run_process(cwd=tup_base_dir, log_name='tup', args=[tup, 'init', '--no-sync'])
self._cmd.run_process(cwd=tup_base_dir, log_name='tup',
args=[tup, 'init', '--no-sync'])
def _get_cargo_flags(self, obj):
@ -984,16 +980,15 @@ class TupBackend(CommonBackend):
obj.name),
output_group)
for val in enumerate(invocations):
_process(*val)
def _gen_rust_rules(self, obj, backend_file):
cargo_flags = self._get_cargo_flags(obj)
cargo_env = self._get_cargo_env(obj, backend_file)
output_lines = []
def accumulate_output(line):
output_lines.append(line)
@ -1014,7 +1009,6 @@ class TupBackend(CommonBackend):
self._gen_cargo_rules(obj, cargo_plan, cargo_env, output_group)
self.backend_input_files |= set(cargo_plan['inputs'])
def _process_generated_file(self, backend_file, obj):
if obj.script and obj.method:
backend_file.export_shell()
@ -1163,7 +1157,8 @@ class TupBackend(CommonBackend):
output=mozpath.join(output_dir, output),
output_group=output_group)
else:
backend_file.symlink_rule(f.full_path, output=f.target_basename, output_group=output_group)
backend_file.symlink_rule(
f.full_path, output=f.target_basename, output_group=output_group)
else:
if (self.environment.is_artifact_build and
any(mozpath.match(f.target_basename, p) for p in self._compile_env_gen_files)):
@ -1175,18 +1170,19 @@ class TupBackend(CommonBackend):
f.target_basename)
gen_backend_file = self._get_backend_file(f.context.relobjdir)
if gen_backend_file.requires_delay([f]):
gen_backend_file.delayed_installed_files.append((f.full_path, output, output_group))
gen_backend_file.delayed_installed_files.append(
(f.full_path, output, output_group))
else:
gen_backend_file.symlink_rule(f.full_path, output=output,
output_group=output_group)
def _process_final_target_pp_files(self, obj, backend_file):
for i, (path, files) in enumerate(obj.files.walk()):
self._add_features(obj.install_target, path)
for f in files:
self._preprocess(backend_file, f.full_path,
destdir=mozpath.join(self.environment.topobjdir, obj.install_target, path),
destdir=mozpath.join(self.environment.topobjdir,
obj.install_target, path),
target=f.target_basename)
def _process_computed_flags(self, obj, backend_file):
@ -1315,7 +1311,8 @@ class TupBackend(CommonBackend):
cmd.extend(['-I%s' % d for d in ipdldirs])
cmd.extend(sorted_ipdl_sources)
outputs = ['IPCMessageTypeName.cpp', mozpath.join(outheaderdir, 'IPCMessageStart.h'), 'ipdl_lextab.py', 'ipdl_yacctab.py']
outputs = ['IPCMessageTypeName.cpp', mozpath.join(
outheaderdir, 'IPCMessageStart.h'), 'ipdl_lextab.py', 'ipdl_yacctab.py']
for filename in sorted_ipdl_sources:
filepath, ext = os.path.splitext(filename)
@ -1379,4 +1376,5 @@ class TupBackend(CommonBackend):
backend_file.sources['.cpp'].extend(sorted(global_define_files))
test_backend_file = self._get_backend_file('dom/bindings/test')
test_backend_file.sources['.cpp'].extend(sorted('../%sBinding.cpp' % s for s in webidls.all_test_stems()))
test_backend_file.sources['.cpp'].extend(
sorted('../%sBinding.cpp' % s for s in webidls.all_test_stems()))

View File

@ -34,21 +34,25 @@ from mozbuild.base import ExecutionSummary
MSBUILD_NAMESPACE = 'http://schemas.microsoft.com/developer/msbuild/2003'
def get_id(name):
return str(uuid.uuid5(uuid.NAMESPACE_URL, name)).upper()
def visual_studio_product_to_solution_version(version):
if version == '2017':
return '12.00', '15'
else:
raise Exception('Unknown version seen: %s' % version)
def visual_studio_product_to_platform_toolset_version(version):
if version == '2017':
return 'v141'
else:
raise Exception('Unknown version seen: %s' % version)
class VisualStudioBackend(CommonBackend):
"""Generate Visual Studio project files.
@ -100,7 +104,7 @@ class VisualStudioBackend(CommonBackend):
elif isinstance(obj, UnifiedSources):
# XXX we should be letting CommonBackend.consume_object call this
# for us instead.
self._process_unified_sources(obj);
self._process_unified_sources(obj)
elif isinstance(obj, Library):
self._libs_to_paths[obj.basename] = reldir
@ -223,7 +227,7 @@ class VisualStudioBackend(CommonBackend):
else:
defines.append('%s=%s' % (k, v))
debugger=None
debugger = None
if prefix == 'binary':
if item.startswith(self.environment.substs['MOZ_APP_NAME']):
app_args = '-no-remote -profile $(TopObjDir)\\tmp\\profile-default'
@ -237,7 +241,8 @@ class VisualStudioBackend(CommonBackend):
project_id = self._write_vs_project(out_dir, basename, item,
includes=includes,
forced_includes=['$(TopObjDir)\\dist\\include\\mozilla-config.h'],
forced_includes=[
'$(TopObjDir)\\dist\\include\\mozilla-config.h'],
defines=defines,
headers=headers,
sources=sources,
@ -510,7 +515,8 @@ class VisualStudioBackend(CommonBackend):
rn.appendChild(doc.createTextNode('mozilla'))
pts = pg.appendChild(doc.createElement('PlatformToolset'))
pts.appendChild(doc.createTextNode(visual_studio_product_to_platform_toolset_version(version)))
pts.appendChild(doc.createTextNode(
visual_studio_product_to_platform_toolset_version(version)))
i = project.appendChild(doc.createElement('Import'))
i.setAttribute('Project', '$(VCTargetsPath)\\Microsoft.Cpp.Default.props')

View File

@ -53,12 +53,14 @@ def ancestors(path):
break
path = newpath
def samepath(path1, path2):
if hasattr(os.path, 'samefile'):
return os.path.samefile(path1, path2)
return os.path.normcase(os.path.realpath(path1)) == \
os.path.normcase(os.path.realpath(path2))
class BadEnvironmentException(Exception):
"""Base class for errors raised when the build environment is not sane."""
@ -69,6 +71,7 @@ class BuildEnvironmentNotFoundException(BadEnvironmentException):
class ObjdirMismatchException(BadEnvironmentException):
"""Raised when the current dir is an objdir and doesn't match the mozconfig."""
def __init__(self, objdir1, objdir2):
self.objdir1 = objdir1
self.objdir2 = objdir2
@ -85,6 +88,7 @@ class MozbuildObject(ProcessExecutionMixin):
running processes, etc. This classes provides that functionality. Other
modules can inherit from this class to obtain this functionality easily.
"""
def __init__(self, topsrcdir, settings, log_manager, topobjdir=None,
mozconfig=MozconfigLoader.AUTODETECT):
"""Create a new Mozbuild object instance.
@ -253,7 +257,8 @@ class MozbuildObject(ProcessExecutionMixin):
def virtualenv_manager(self):
if self._virtualenv_manager is None:
self._virtualenv_manager = VirtualenvManager(self.topsrcdir,
self.topobjdir, os.path.join(self.topobjdir, '_virtualenvs', 'init'),
self.topobjdir, os.path.join(
self.topobjdir, '_virtualenvs', 'init'),
sys.stdout, os.path.join(self.topsrcdir, 'build',
'virtualenv_packages.txt'))
@ -490,7 +495,6 @@ class MozbuildObject(ProcessExecutionMixin):
return BuildReader(config, finder=finder)
@memoized_property
def python3(self):
"""Obtain info about a Python 3 executable.
@ -542,7 +546,7 @@ class MozbuildObject(ProcessExecutionMixin):
if substs['OS_ARCH'] == 'Darwin':
if substs['MOZ_BUILD_APP'] == 'xulrunner':
stem = os.path.join(stem, 'XUL.framework');
stem = os.path.join(stem, 'XUL.framework')
else:
stem = os.path.join(stem, substs['MOZ_MACBUNDLE_NAME'], 'Contents',
'MacOS')
@ -585,6 +589,7 @@ class MozbuildObject(ProcessExecutionMixin):
elif sys.platform.startswith('win'):
from ctypes import Structure, windll, POINTER, sizeof
from ctypes.wintypes import DWORD, HANDLE, WINFUNCTYPE, BOOL, UINT
class FLASHWINDOW(Structure):
_fields_ = [("cbSize", UINT),
("hwnd", HANDLE),
@ -826,7 +831,6 @@ class MozbuildObject(ProcessExecutionMixin):
self.virtualenv_manager.ensure()
self.virtualenv_manager.activate()
def _set_log_level(self, verbose):
self.log_manager.terminal_handler.setLevel(logging.INFO if not verbose else logging.DEBUG)
@ -835,7 +839,8 @@ class MozbuildObject(ProcessExecutionMixin):
pipenv = os.path.join(self.virtualenv_manager.bin_path, 'pipenv')
if not os.path.exists(pipenv):
for package in ['certifi', 'pipenv', 'six', 'virtualenv', 'virtualenv-clone']:
path = os.path.normpath(os.path.join(self.topsrcdir, 'third_party/python', package))
path = os.path.normpath(os.path.join(
self.topsrcdir, 'third_party/python', package))
self.virtualenv_manager.install_pip_package(path, vendored=True)
return pipenv

View File

@ -54,4 +54,3 @@ def chunkify(things, this_chunk, chunks):
return things[start:end]
except TypeError:
return islice(things, start, end)

View File

@ -26,6 +26,8 @@ from manifest_handler import ChromeManifestHandler
_line_comment_re = re.compile('^//@line (\d+) "(.+)"$')
def generate_pp_info(path, topsrcdir):
with open(path) as fh:
# (start, end) -> (included_source, start)
@ -57,6 +59,8 @@ def generate_pp_info(path, topsrcdir):
# This build backend is assuming the build to have happened already, as it is parsing
# built preprocessed files to generate data to map them to the original sources.
class ChromeMapBackend(CommonBackend):
def _init(self):
CommonBackend._init(self)

View File

@ -17,6 +17,7 @@ from mozpack.chrome.manifest import parse_manifest
import mozpack.path as mozpath
from manifest_handler import ChromeManifestHandler
class LcovRecord(object):
__slots__ = ("test_name",
"source_file",
@ -30,6 +31,7 @@ class LcovRecord(object):
"lines",
"line_count",
"covered_line_count")
def __init__(self):
self.functions = {}
self.function_exec_counts = {}
@ -72,6 +74,7 @@ class LcovRecord(object):
self.branch_count = len(self.branches)
self.covered_branch_count = len([c for c in self.branches.values() if c])
class RecordRewriter(object):
# Helper class for rewriting/spliting individual lcov records according
# to what the preprocessor did.
@ -164,7 +167,8 @@ class RecordRewriter(object):
def rewrite_record(self, record, pp_info):
# Rewrite the lines in the given record according to preprocessor info
# and split to additional records when pp_info has included file info.
self._current_pp_info = dict([(tuple([int(l) for l in k.split(',')]), v) for k, v in pp_info.items()])
self._current_pp_info = dict(
[(tuple([int(l) for l in k.split(',')]), v) for k, v in pp_info.items()])
self._ranges = sorted(self._current_pp_info.keys())
self._additions = {}
self._rewrite_lines(record)
@ -178,6 +182,7 @@ class RecordRewriter(object):
r.resummarize()
return generated_records
class LcovFile(object):
# Simple parser/pretty-printer for lcov format.
# lcov parsing based on http://ltp.sourceforge.net/coverage/lcov/geninfo.1.php
@ -404,6 +409,7 @@ class LcovFile(object):
class UrlFinderError(Exception):
pass
class UrlFinder(object):
# Given a "chrome://" or "resource://" url, uses data from the UrlMapBackend
# and install manifests to find a path to the source file and the corresponding
@ -580,7 +586,8 @@ class UrlFinder(object):
return url_obj.path, None
dir_parts = parts[0].rsplit(app_name + '/', 1)
url = mozpath.normpath(mozpath.join(self.topobjdir, 'dist', 'bin', dir_parts[1].lstrip('/'), parts[1].lstrip('/')))
url = mozpath.normpath(mozpath.join(self.topobjdir, 'dist',
'bin', dir_parts[1].lstrip('/'), parts[1].lstrip('/')))
elif '.xpi!' in url:
# This matching mechanism is quite brittle and based on examples seen in the wild.
# There's no rule to match the XPI name to the path in dist/xpi-stage.
@ -590,7 +597,8 @@ class UrlFinder(object):
addon_name = addon_name[:-len('-test@mozilla.org')]
elif addon_name.endswith('@mozilla.org'):
addon_name = addon_name[:-len('@mozilla.org')]
url = mozpath.normpath(mozpath.join(self.topobjdir, 'dist', 'xpi-stage', addon_name, parts[1].lstrip('/')))
url = mozpath.normpath(mozpath.join(self.topobjdir, 'dist',
'xpi-stage', addon_name, parts[1].lstrip('/')))
elif url_obj.scheme == 'file' and os.path.isabs(url_obj.path):
path = url_obj.path
if not os.path.isfile(path):
@ -607,6 +615,7 @@ class UrlFinder(object):
self._final_mapping[url] = result
return result
class LcovFileRewriter(object):
# Class for partial parses of LCOV format and rewriting to resolve urls
# and preprocessed file lines.
@ -694,5 +703,6 @@ def main():
rewriter.rewrite_files(files, args.output_file, args.output_suffix)
if __name__ == '__main__':
main()

View File

@ -17,6 +17,7 @@ from mozpack.manifests import (
)
import mozpack.path as mozpath
def describe_install_manifest(manifest, dest_dir):
try:
manifest = InstallManifest(manifest)
@ -75,5 +76,6 @@ def cli(args=sys.argv[1:]):
return package_coverage_data(args.root, args.output_file)
if __name__ == '__main__':
sys.exit(cli())

View File

@ -5,6 +5,7 @@
import os
from mozbuild import shellutil
def check_top_objdir(topobjdir):
top_make = os.path.join(topobjdir, 'Makefile')
if not os.path.exists(top_make):
@ -13,6 +14,7 @@ def check_top_objdir(topobjdir):
return False
return True
def get_build_vars(directory, cmd):
build_vars = {}
@ -34,6 +36,7 @@ def get_build_vars(directory, cmd):
return build_vars
def sanitize_cflags(flags):
# We filter out -Xclang arguments as clang based tools typically choke on
# passing these flags down to the clang driver. -Xclang tells the clang

View File

@ -87,22 +87,22 @@ class CompilerWarning(dict):
return func(self._cmpkey(), other._cmpkey())
def __eq__(self, other):
return self._compare(other, lambda s,o: s == o)
return self._compare(other, lambda s, o: s == o)
def __neq__(self, other):
return self._compare(other, lambda s,o: s != o)
return self._compare(other, lambda s, o: s != o)
def __lt__(self, other):
return self._compare(other, lambda s,o: s < o)
return self._compare(other, lambda s, o: s < o)
def __le__(self, other):
return self._compare(other, lambda s,o: s <= o)
return self._compare(other, lambda s, o: s <= o)
def __gt__(self, other):
return self._compare(other, lambda s,o: s > o)
return self._compare(other, lambda s, o: s > o)
def __ge__(self, other):
return self._compare(other, lambda s,o: s >= o)
return self._compare(other, lambda s, o: s >= o)
def __hash__(self):
"""Define so this can exist inside a set, etc."""
@ -132,6 +132,7 @@ class WarningsDatabase(object):
Callers should periodically prune old, invalid warnings from the database
by calling prune(). A good time to do this is at the end of a build.
"""
def __init__(self):
"""Create an empty database."""
self._files = {}
@ -304,6 +305,7 @@ class WarningsCollector(object):
output from the compiler. Therefore, it can maintain state to parse
multi-line warning messages.
"""
def __init__(self, cb, objdir=None):
"""Initialize a new collector.

View File

@ -49,6 +49,7 @@ class ConfigureError(Exception):
class SandboxDependsFunction(object):
'''Sandbox-visible representation of @depends functions.'''
def __init__(self, unsandboxed):
self._or = unsandboxed.__or__
self._and = unsandboxed.__and__
@ -233,6 +234,7 @@ class CombinedDependsFunction(DependsFunction):
def __ne__(self, other):
return not self == other
class SandboxedGlobal(dict):
'''Identifiable dict type for use as function global'''
@ -357,10 +359,12 @@ class ConfigureSandbox(dict):
# that can't be converted to ascii. Make our log methods robust to this
# by detecting the encoding that a producer is likely to have used.
encoding = getpreferredencoding()
def wrapped_log_method(logger, key):
method = getattr(logger, key)
if not encoding:
return method
def wrapped(*args, **kwargs):
out_args = [
arg.decode(encoding) if isinstance(arg, str) else arg

View File

@ -12,6 +12,7 @@ import subprocess
import sys
import re
def get_range_for(compilation_unit, debug_info):
'''Returns the range offset for a given compilation unit
in a given debug_info.'''
@ -32,6 +33,7 @@ def get_range_for(compilation_unit, debug_info):
ranges = nfo.rsplit(None, 1)[1]
return None
def get_range_length(range, debug_ranges):
'''Returns the number of items in the range starting at the
given offset.'''
@ -42,8 +44,9 @@ def get_range_length(range, debug_ranges):
length += 1
return length
def main(bin, compilation_unit):
p = subprocess.Popen(['objdump', '-W', bin], stdout = subprocess.PIPE, stderr = subprocess.PIPE)
p = subprocess.Popen(['objdump', '-W', bin], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = p.communicate()
sections = re.split('\n(Contents of the|The section) ', out)
debug_info = [s for s in sections if s.startswith('.debug_info')]

View File

@ -21,12 +21,14 @@ import re
re_for_ld = re.compile('.*\((.*)\).*')
def parse_readelf_line(x):
"""Return the version from a readelf line that looks like:
0x00ec: Rev: 1 Flags: none Index: 8 Cnt: 2 Name: GLIBCXX_3.4.6
"""
return x.split(':')[-1].split('_')[-1].strip()
def parse_ld_line(x):
"""Parse a line from the output of ld -t. The output of gold is just
the full path, gnu ld prints "-lstdc++ (path)".
@ -36,11 +38,13 @@ def parse_ld_line(x):
return t.groups()[0].strip()
return x.strip()
def split_ver(v):
"""Covert the string '1.2.3' into the list [1,2,3]
"""
return [int(x) for x in v.split('.')]
def cmp_ver(a, b):
"""Compare versions in the form 'a.b.c'
"""
@ -49,12 +53,14 @@ def cmp_ver(a, b):
return i - j
return 0
def encode_ver(v):
"""Encode the version as a single number.
"""
t = split_ver(v)
return t[0] << 16 | t[1] << 8 | t[2]
def find_version(args):
"""Given a base command line for a compiler, find the version of the
libstdc++ it uses.
@ -77,9 +83,10 @@ candidates:
p = subprocess.Popen(['readelf', '-V', libstdcxx], stdout=subprocess.PIPE)
versions = [parse_readelf_line(x)
for x in p.stdout.readlines() if 'Name: GLIBCXX' in x]
last_version = sorted(versions, cmp = cmp_ver)[-1]
last_version = sorted(versions, cmp=cmp_ver)[-1]
return (last_version, encode_ver(last_version))
if __name__ == '__main__':
"""Given the value of environment variable CXX or HOST_CXX, find the
version of the libstdc++ it uses.

View File

@ -237,7 +237,6 @@ class LintSandbox(ConfigureSandbox):
name, default))
self._raise_from(e, frame.f_back if frame else None)
def _check_help_for_option_with_func_default(self, option, *args, **kwargs):
default = kwargs['default']
@ -285,6 +284,7 @@ class LintSandbox(ConfigureSandbox):
def imports_impl(self, _import, _from=None, _as=None):
wrapper = super(LintSandbox, self).imports_impl(_import, _from=_from, _as=_as)
def decorator(func):
self._has_imports.add(func)
return wrapper(func)

View File

@ -46,7 +46,7 @@ def disassemble_as_iter(co):
c = code[i]
op = ord(c)
opname = dis.opname[op]
i += 1;
i += 1
if op >= dis.HAVE_ARGUMENT:
arg = ord(code[i]) + ord(code[i + 1]) * 256 + extended_arg
extended_arg = 0

View File

@ -106,6 +106,7 @@ class PositiveOptionValue(OptionValue):
in the form of a tuple for when values are given to the option (in the form
--option=value[,value2...].
'''
def __nonzero__(self):
return True
@ -424,6 +425,7 @@ class CommandLineHelper(object):
Extra options can be added afterwards through API calls. For those,
conflicting values will raise an exception.
'''
def __init__(self, environ=os.environ, argv=sys.argv):
self._environ = dict(environ)
self._args = OrderedDict()

View File

@ -14,6 +14,7 @@ from collections import deque
from contextlib import contextmanager
from distutils.version import LooseVersion
def getpreferredencoding():
# locale._parse_localename makes locale.getpreferredencoding
# return None when LC_ALL is C, instead of e.g. 'US-ASCII' or
@ -29,6 +30,7 @@ def getpreferredencoding():
encoding = 'utf-8'
return encoding
class Version(LooseVersion):
'''A simple subclass of distutils.version.LooseVersion.
Adds attributes for `major`, `minor`, `patch` for the first three
@ -40,13 +42,14 @@ class Version(LooseVersion):
v.minor == 2
v.patch == 0
'''
def __init__(self, version):
# Can't use super, LooseVersion's base class is not a new-style class.
LooseVersion.__init__(self, version)
# Take the first three integer components, stopping at the first
# non-integer and padding the rest with zeroes.
(self.major, self.minor, self.patch) = list(itertools.chain(
itertools.takewhile(lambda x:isinstance(x, int), self.version),
itertools.takewhile(lambda x: isinstance(x, int), self.version),
(0, 0, 0)))[:3]
def __cmp__(self, other):
@ -71,6 +74,7 @@ class ConfigureOutputHandler(logging.Handler):
printed out. This feature is only enabled under the `queue_debug` context
manager.
'''
def __init__(self, stdout=sys.stdout, stderr=sys.stderr, maxlen=20):
super(ConfigureOutputHandler, self).__init__()
@ -193,6 +197,7 @@ class LineIO(object):
'''File-like class that sends each line of the written data to a callback
(without carriage returns).
'''
def __init__(self, callback, errors='strict'):
self._callback = callback
self._buf = ''

View File

@ -79,7 +79,7 @@ Preferences.
INSTALL_TESTS_CLOBBER = ''.join([TextWrapper().fill(line) + '\n' for line in
'''
'''
The build system was unable to install tests because the CLOBBER file has \
been updated. This means if you edited any test files, your changes may not \
be picked up until a full/clobber build is performed.
@ -478,7 +478,6 @@ class BuildMonitor(MozbuildObject):
o['resources'].append(entry)
# If the imports for this file ran before the in-tree virtualenv
# was bootstrapped (for instance, for a clobber build in automation),
# psutil might not be available.
@ -548,6 +547,7 @@ class TerminalLoggingHandler(logging.Handler):
This class should probably live elsewhere, like the mach core. Consider
this a proving ground for its usefulness.
"""
def __init__(self):
logging.Handler.__init__(self)
@ -683,7 +683,6 @@ class BuildOutputManager(OutputManager):
# collection child process hasn't been told to stop.
self.monitor.stop_resource_recording()
def on_line(self, line):
warning, state_changed, message = self.monitor.on_line(line)
@ -1009,7 +1008,7 @@ class BuildDriver(MozbuildObject):
return 1
if directory is not None:
disable_extra_make_dependencies=True
disable_extra_make_dependencies = True
directory = mozpath.normsep(directory)
if directory.startswith('/'):
directory = directory[1:]
@ -1148,7 +1147,8 @@ class BuildDriver(MozbuildObject):
status = self._run_make(directory=make_dir, target=make_target,
line_handler=output.on_line, log=False, print_directory=False,
ensure_exit_code=False, num_jobs=jobs, silent=not verbose,
append_env={b'NO_BUILDSTATUS_MESSAGES': no_build_status},
append_env={
b'NO_BUILDSTATUS_MESSAGES': no_build_status},
keep_going=keep_going)
if status != 0:
@ -1284,7 +1284,8 @@ class BuildDriver(MozbuildObject):
long_build = monitor.elapsed > 600
if long_build:
output.on_line('We know it took a while, but your build finally finished successfully!')
output.on_line(
'We know it took a while, but your build finally finished successfully!')
else:
output.on_line('Your build was successful!')
@ -1332,7 +1333,7 @@ class BuildDriver(MozbuildObject):
if not status:
print('Configure complete!')
print('Be sure to run |mach build| to pick up any changes');
print('Be sure to run |mach build| to pick up any changes')
return status

View File

@ -16,7 +16,7 @@ from textwrap import TextWrapper
CLOBBER_MESSAGE = ''.join([TextWrapper().fill(line) + '\n' for line in
'''
'''
The CLOBBER file has been updated, indicating that an incremental build since \
your last build will probably not work. A full/clobber build is required.
@ -39,6 +39,7 @@ Well, are ya? -- you can ignore this clobber requirement by running:
$ touch {clobber_file}
'''.splitlines()])
class Clobberer(object):
def __init__(self, topsrcdir, topobjdir):
"""Create a new object to manage clobbering the tree.

View File

@ -34,6 +34,7 @@ hour. Backup programs that rely on this feature may be affected.
https://technet.microsoft.com/en-us/library/cc785435.aspx
'''
class Doctor(object):
def __init__(self, srcdir, objdir, fix):
self.srcdir = mozpath.normpath(srcdir)

View File

@ -16,6 +16,7 @@ if sys.version_info[0] == 3:
else:
str_type = basestring
class DotProperties:
r'''A thin representation of a key=value .properties file.'''

View File

@ -89,6 +89,7 @@ class Context(KeyedDefaultDict):
config is the ConfigEnvironment for this context.
"""
def __init__(self, allowed_variables={}, config=None, finder=None):
self._allowed_variables = allowed_variables
self.main_path = None
@ -269,6 +270,7 @@ class SubContext(Context, ContextDerivedValue):
Sub-contexts inherit paths and other relevant state from the parent
context.
"""
def __init__(self, parent):
assert isinstance(parent, Context)
@ -570,6 +572,7 @@ class PathMeta(type):
cls = SourcePath
return super(PathMeta, cls).__call__(context, value)
class Path(ContextDerivedValue, unicode):
"""Stores and resolves a source path relative to a given context
@ -636,6 +639,7 @@ class Path(ContextDerivedValue, unicode):
class SourcePath(Path):
"""Like Path, but limited to paths in the source directory."""
def __init__(self, context, value):
if value.startswith('!'):
raise ValueError('Object directory paths are not allowed')
@ -676,6 +680,7 @@ class RenamedSourcePath(SourcePath):
This class is not meant to be exposed to moz.build sandboxes as of now,
and is not supported by the RecursiveMake backend.
"""
def __init__(self, context, value):
assert isinstance(value, tuple)
source, self._target_basename = value
@ -688,13 +693,14 @@ class RenamedSourcePath(SourcePath):
class ObjDirPath(Path):
"""Like Path, but limited to paths in the object directory."""
def __init__(self, context, value=None):
if not value.startswith('!'):
raise ValueError('Object directory paths must start with ! prefix')
super(ObjDirPath, self).__init__(context, value)
if value.startswith('!/'):
path = mozpath.join(context.config.topobjdir,value[2:])
path = mozpath.join(context.config.topobjdir, value[2:])
else:
path = mozpath.join(context.objdir, value[1:])
self.full_path = mozpath.normpath(path)
@ -702,6 +708,7 @@ class ObjDirPath(Path):
class AbsolutePath(Path):
"""Like Path, but allows arbitrary paths outside the source and object directories."""
def __init__(self, context, value=None):
if not value.startswith('%'):
raise ValueError('Absolute paths must start with % prefix')
@ -717,6 +724,7 @@ def ContextDerivedTypedList(klass, base_class=List):
"""Specialized TypedList for use with ContextDerivedValue types.
"""
assert issubclass(klass, ContextDerivedValue)
class _TypedList(ContextDerivedValue, TypedList(klass, base_class)):
def __init__(self, context, iterable=[], **kwargs):
self.context = context
@ -729,6 +737,7 @@ def ContextDerivedTypedList(klass, base_class=List):
return _TypedList
@memoize
def ContextDerivedTypedListWithItems(type, base_class=List):
"""Specialized TypedList for use with ContextDerivedValue types.
@ -862,6 +871,7 @@ def ContextDerivedTypedHierarchicalStringList(type):
return _TypedListWithItems
def OrderedPathListWithAction(action):
"""Returns a class which behaves as a StrictOrderingOnAppendList, but
invokes the given callable with each input and a context as it is
@ -879,6 +889,7 @@ def OrderedPathListWithAction(action):
return _OrderedListWithAction
def TypedListWithAction(typ, action):
"""Returns a class which behaves as a TypedList with the provided type, but
invokes the given given callable with each input and a context as it is
@ -894,6 +905,7 @@ def TypedListWithAction(typ, action):
super(_TypedListWithAction, self).__init__(action=_action, *args)
return _TypedListWithAction
ManifestparserManifestList = OrderedPathListWithAction(read_manifestparser_manifest)
ReftestManifestList = OrderedPathListWithAction(read_reftest_manifest)
@ -1706,7 +1718,7 @@ VARIABLES = {
``GENERATED_FILES``.
"""),
'PROGRAM' : (unicode, unicode,
'PROGRAM': (unicode, unicode,
"""Compiled executable name.
If the configuration token ``BIN_SUFFIX`` is set, its value will be
@ -1714,7 +1726,7 @@ VARIABLES = {
``BIN_SUFFIX``, ``PROGRAM`` will remain unchanged.
"""),
'HOST_PROGRAM' : (unicode, unicode,
'HOST_PROGRAM': (unicode, unicode,
"""Compiled host executable name.
If the configuration token ``HOST_BIN_SUFFIX`` is set, its value will be

View File

@ -192,6 +192,7 @@ class ComputedFlags(ContextDerived):
flags[dest_var].extend(value)
return flags.items()
class XPIDLModule(ContextDerived):
"""Describes an XPIDL module to be compiled."""
@ -207,6 +208,7 @@ class XPIDLModule(ContextDerived):
self.name = name
self.idl_files = idl_files
class BaseDefines(ContextDerived):
"""Context derived container object for DEFINES/HOST_DEFINES,
which are OrderedDicts.
@ -232,12 +234,15 @@ class BaseDefines(ContextDerived):
else:
self.defines.update(more_defines)
class Defines(BaseDefines):
pass
class HostDefines(BaseDefines):
pass
class WebIDLCollection(ContextDerived):
"""Collects WebIDL info referenced during the build."""
@ -1036,7 +1041,7 @@ class UnifiedSources(BaseSources):
unified_prefix = unified_prefix.replace('/', '_')
suffix = self.canonical_suffix[1:]
unified_prefix='Unified_%s_%s' % (suffix, unified_prefix)
unified_prefix = 'Unified_%s_%s' % (suffix, unified_prefix)
self.unified_source_mapping = list(group_unified_files(source_files,
unified_prefix=unified_prefix,
unified_suffix=suffix,
@ -1100,6 +1105,7 @@ class FinalTargetPreprocessedFiles(ContextDerived):
ContextDerived.__init__(self, sandbox)
self.files = files
class LocalizedFiles(FinalTargetFiles):
"""Sandbox container object for LOCALIZED_FILES, which is a
HierarchicalStringList.
@ -1188,11 +1194,13 @@ class GeneratedFile(ContextDerived):
'.py',
'.rs',
'node.stub', # To avoid VPATH issues with installing node files: https://bugzilla.mozilla.org/show_bug.cgi?id=1461714#c55
'android_apks', # We need to compile Java to generate JNI wrappers for native code compilation to consume.
# We need to compile Java to generate JNI wrappers for native code compilation to consume.
'android_apks',
'.profdata',
'.webidl'
)
self.required_for_compile = [f for f in self.outputs if f.endswith(suffixes) or 'stl_wrappers/' in f]
self.required_for_compile = [
f for f in self.outputs if f.endswith(suffixes) or 'stl_wrappers/' in f]
class ChromeManifestEntry(ContextDerived):

View File

@ -185,7 +185,8 @@ class TreeMetadataEmitter(LoggingMixin):
objs = list(emitfn(out))
self._emitter_time += time.time() - start
for o in emit_objs(objs): yield o
for o in emit_objs(objs):
yield o
else:
raise Exception('Unhandled output type: %s' % type(out))
@ -196,7 +197,8 @@ class TreeMetadataEmitter(LoggingMixin):
objs = list(self._emit_libs_derived(contexts))
self._emitter_time += time.time() - start
for o in emit_objs(objs): yield o
for o in emit_objs(objs):
yield o
def _emit_libs_derived(self, contexts):
@ -234,11 +236,11 @@ class TreeMetadataEmitter(LoggingMixin):
if isinstance(collection, WebIDLCollection):
# Test webidl sources are added here as a somewhat special
# case.
idl_sources[mozpath.join(root, 'test')] = [s for s in collection.all_test_cpp_basenames()]
idl_sources[mozpath.join(root, 'test')] = [
s for s in collection.all_test_cpp_basenames()]
yield collection
# Next do FINAL_LIBRARY linkage.
for lib in (l for libs in self._libs.values() for l in libs):
if not isinstance(lib, (StaticLibrary, RustLibrary)) or not lib.link_into:
@ -305,7 +307,6 @@ class TreeMetadataEmitter(LoggingMixin):
propagate_defines(lib, lib.lib_defines)
yield lib
for lib in (l for libs in self._libs.values() for l in libs):
lib_defines = list(lib.lib_defines.get_defines())
if lib_defines:
@ -325,7 +326,6 @@ class TreeMetadataEmitter(LoggingMixin):
for obj in self._binaries.values():
yield obj
LIBRARY_NAME_VAR = {
'host': 'HOST_LIBRARY_NAME',
'target': 'LIBRARY_NAME',
@ -485,25 +485,29 @@ class TreeMetadataEmitter(LoggingMixin):
# A simple version number.
if isinstance(values, (str, unicode)):
raise SandboxValidationError(
'%s %s of crate %s does not list a path' % (description, dep_crate_name, crate_name),
'%s %s of crate %s does not list a path' % (
description, dep_crate_name, crate_name),
context)
dep_path = values.get('path', None)
if not dep_path:
raise SandboxValidationError(
'%s %s of crate %s does not list a path' % (description, dep_crate_name, crate_name),
'%s %s of crate %s does not list a path' % (
description, dep_crate_name, crate_name),
context)
# Try to catch the case where somebody listed a
# local path for development.
if os.path.isabs(dep_path):
raise SandboxValidationError(
'%s %s of crate %s has a non-relative path' % (description, dep_crate_name, crate_name),
'%s %s of crate %s has a non-relative path' % (
description, dep_crate_name, crate_name),
context)
if not os.path.exists(mozpath.join(context.config.topsrcdir, crate_dir, dep_path)):
raise SandboxValidationError(
'%s %s of crate %s refers to a non-existent path' % (description, dep_crate_name, crate_name),
'%s %s of crate %s refers to a non-existent path' % (
description, dep_crate_name, crate_name),
context)
def _rust_library(self, context, libname, static_args, cls=RustLibrary):
@ -549,7 +553,6 @@ class TreeMetadataEmitter(LoggingMixin):
return cls(context, libname, cargo_file, crate_type, dependencies,
features, cargo_target_dir, **static_args)
def _handle_gn_dirs(self, context):
for target_dir in context.get('GN_DIRS', []):
context['DIRS'] += [target_dir]
@ -569,10 +572,10 @@ class TreeMetadataEmitter(LoggingMixin):
yield GnProjectData(context, target_dir, gn_dir, non_unified_sources)
def _handle_linkables(self, context, passthru, generated_files):
linkables = []
host_linkables = []
def add_program(prog, var):
if var.startswith('HOST_'):
host_linkables.append(prog)
@ -605,7 +608,7 @@ class TreeMetadataEmitter(LoggingMixin):
# Verify Rust program definitions.
if all_rust_programs:
config, cargo_file = self._parse_cargo_file(context);
config, cargo_file = self._parse_cargo_file(context)
bin_section = config.get('bin', None)
if not bin_section:
raise SandboxValidationError(
@ -1003,7 +1006,6 @@ class TreeMetadataEmitter(LoggingMixin):
l.cxx_link = True
break
def emit_from_context(self, context):
"""Convert a Context to tree metadata objects.
@ -1022,7 +1024,8 @@ class TreeMetadataEmitter(LoggingMixin):
# We always emit a directory traversal descriptor. This is needed by
# the recursive make backend.
for o in self._emit_directory_traversal_from_context(context): yield o
for o in self._emit_directory_traversal_from_context(context):
yield o
for obj in self._process_xpidl(context):
yield obj
@ -1207,7 +1210,8 @@ class TreeMetadataEmitter(LoggingMixin):
for obj in self._handle_linkables(context, passthru, generated_files):
yield obj
generated_files.update(['%s%s' % (k, self.config.substs.get('BIN_SUFFIX', '')) for k in self._binaries.keys()])
generated_files.update(['%s%s' % (k, self.config.substs.get('BIN_SUFFIX', ''))
for k in self._binaries.keys()])
components = []
for var, cls in (
@ -1376,7 +1380,6 @@ class TreeMetadataEmitter(LoggingMixin):
if context.objdir in self._host_compile_dirs:
yield computed_host_flags
def _create_substitution(self, cls, context, path):
sub = cls(context)
sub.input_path = '%s.in' % path.full_path
@ -1630,6 +1633,6 @@ class TreeMetadataEmitter(LoggingMixin):
# Some paths have a subconfigure, yet also have a moz.build. Those
# shouldn't end up in self._external_paths.
if o.objdir:
self._external_paths -= { o.relobjdir }
self._external_paths -= {o.relobjdir}
yield o

View File

@ -74,6 +74,7 @@ class GypContext(TemplateContext):
relobjdir is the object directory that will be used for this context,
relative to the topobjdir defined in the ConfigEnvironment.
"""
def __init__(self, config, relobjdir):
self._relobjdir = relobjdir
TemplateContext.__init__(self, template='Gyp',
@ -88,10 +89,12 @@ def handle_actions(actions, context, action_overrides):
raise RuntimeError('GYP action %s not listed in action_overrides' % name)
outputs = action['outputs']
if len(outputs) > 1:
raise NotImplementedError('GYP actions with more than one output not supported: %s' % name)
raise NotImplementedError(
'GYP actions with more than one output not supported: %s' % name)
output = outputs[0]
if not output.startswith(idir):
raise NotImplementedError('GYP actions outputting to somewhere other than <(INTERMEDIATE_DIR) not supported: %s' % output)
raise NotImplementedError(
'GYP actions outputting to somewhere other than <(INTERMEDIATE_DIR) not supported: %s' % output)
output = output[len(idir):]
context['GENERATED_FILES'] += [output]
g = context['GENERATED_FILES'][output]
@ -104,7 +107,8 @@ def handle_copies(copies, context):
for copy in copies:
dest = copy['destination']
if not dest.startswith(dist):
raise NotImplementedError('GYP copies to somewhere other than <(PRODUCT_DIR)/dist not supported: %s' % dest)
raise NotImplementedError(
'GYP copies to somewhere other than <(PRODUCT_DIR)/dist not supported: %s' % dest)
dest_paths = dest[len(dist):].split('/')
exports = context['EXPORTS']
while dest_paths:
@ -161,6 +165,7 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
use_libs = []
libs = []
def add_deps(s):
for t in s.get('dependencies', []) + s.get('dependencies_original', []):
ty = targets[t]['type']
@ -171,7 +176,7 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
if ty in ('static_library', 'none'):
add_deps(targets[t])
libs.extend(spec.get('libraries', []))
#XXX: this sucks, but webrtc breaks with this right now because
# XXX: this sucks, but webrtc breaks with this right now because
# it builds a library called 'gtest' and we just get lucky
# that it isn't in USE_LIBS by that name anywhere.
if no_chromium:
@ -282,7 +287,8 @@ def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
if include.startswith('/'):
resolved = mozpath.abspath(mozpath.join(config.topsrcdir, include[1:]))
elif not include.startswith(('!', '%')):
resolved = mozpath.abspath(mozpath.join(mozpath.dirname(build_file), include))
resolved = mozpath.abspath(mozpath.join(
mozpath.dirname(build_file), include))
if not include.startswith(('!', '%')) and not os.path.exists(resolved):
continue
context['LOCAL_INCLUDES'] += [include]
@ -368,6 +374,7 @@ class GypProcessor(object):
gyp dependencies will be. gyp_dir_attrs are attributes set for the dir
from moz.build.
"""
def __init__(self, config, gyp_dir_attrs, path, output, executor,
action_overrides, non_unified_sources):
self._path = path

View File

@ -21,6 +21,7 @@ import mozpack.path as mozpath
TOPSRCDIR = os.path.abspath(os.path.join(__file__, '../../../../../'))
class InvalidPathException(Exception):
"""Represents an error due to an invalid path."""
@ -284,7 +285,6 @@ class MozbuildFileCommands(MachCommandBase):
print(e.message)
return 1
def _get_files_info(self, paths, rev=None):
reader = self.mozbuild_reader(config_mode='empty', vcs_revision=rev)
@ -328,7 +328,6 @@ class MozbuildFileCommands(MachCommandBase):
return reader.files_info(allpaths)
@SubCommand('file-info', 'schedules',
'Show the combined SCHEDULES for the files listed.')
@CommandArgument('paths', nargs='+',

View File

@ -80,7 +80,6 @@ from mozbuild.base import ExecutionSummary
from concurrent.futures.process import ProcessPoolExecutor
if sys.version_info.major == 2:
text_type = unicode
type_type = types.TypeType
@ -106,6 +105,7 @@ class EmptyConfig(object):
This variation is needed because CONFIG uses .get() to access members.
Without it, None (instead of our EmptyValue types) would be returned.
"""
def get(self, key, default=None):
return self[key]
@ -182,6 +182,7 @@ class MozbuildSandbox(Sandbox):
metadata is a dict of metadata that can be used during the sandbox
evaluation.
"""
def __init__(self, context, metadata={}, finder=default_finder):
assert isinstance(context, Context)
@ -320,6 +321,7 @@ class MozbuildSandbox(Sandbox):
The wrapper function does type coercion on the function arguments
"""
func, args_def, doc = function_def
def function(*args):
def coerce(arg, type):
if not isinstance(arg, type):
@ -463,6 +465,7 @@ class TemplateFunction(object):
"""AST Node Transformer to rewrite variable accesses to go through
a dict.
"""
def __init__(self, sandbox, global_name):
self._sandbox = sandbox
self._global_name = global_name
@ -491,6 +494,7 @@ class TemplateFunction(object):
class SandboxValidationError(Exception):
"""Represents an error encountered when validating sandbox results."""
def __init__(self, message, context):
Exception.__init__(self, message)
self.context = context
@ -532,6 +536,7 @@ class BuildReaderError(Exception):
MozbuildSandbox has over Sandbox (e.g. the concept of included files -
which affect error messages, of course).
"""
def __init__(self, file_stack, trace, sandbox_exec_error=None,
sandbox_load_error=None, validation_error=None, other_error=None,
sandbox_called_error=None):
@ -802,7 +807,7 @@ class BuildReaderError(Exception):
s.write(' %s\n' % inner.args[4].__name__)
else:
for t in inner.args[4]:
s.write( ' %s\n' % t.__name__)
s.write(' %s\n' % t.__name__)
s.write('\n')
s.write('Change the file to write a value of the appropriate type ')
s.write('and try again.\n')
@ -1283,6 +1288,7 @@ class BuildReader(object):
# Exporting doesn't work reliably in tree traversal mode. Override
# the function to no-op.
functions = dict(FUNCTIONS)
def export(sandbox):
return lambda varname: None
functions['export'] = tuple([export] + list(FUNCTIONS['export'][1:]))
@ -1337,6 +1343,7 @@ class BuildReader(object):
# times (once for every path in a directory that doesn't have any
# test metadata). So, we cache the function call.
defaults_cache = {}
def test_defaults_for_path(ctxs):
key = tuple(ctx.current_path or ctx.main_path for ctx in ctxs)
@ -1394,7 +1401,8 @@ class BuildReader(object):
test_manifest_contexts = set(
['%s_MANIFESTS' % key for key in TEST_MANIFESTS] +
['%s_MANIFESTS' % flavor.upper() for flavor in REFTEST_FLAVORS] +
['%s_MANIFESTS' % flavor.upper().replace('-', '_') for flavor in WEB_PLATFORM_TESTS_FLAVORS]
['%s_MANIFESTS' % flavor.upper().replace('-', '_')
for flavor in WEB_PLATFORM_TESTS_FLAVORS]
)
result_context = Files(Context())

View File

@ -53,6 +53,7 @@ class SandboxExecutionError(SandboxError):
This is a simple container exception. It's purpose is to capture state
so something else can report on it.
"""
def __init__(self, file_stack, exc_type, exc_value, trace):
SandboxError.__init__(self, file_stack)
@ -69,6 +70,7 @@ class SandboxLoadError(SandboxError):
a file. If so, the file_stack will be non-empty and the file that caused
the load will be on top of the stack.
"""
def __init__(self, file_stack, trace, illegal_path=None, read_error=None):
SandboxError.__init__(self, file_stack)

View File

@ -78,7 +78,8 @@ class MozbuildWriter(object):
self.write('\n')
self.write(self.indent + key)
self.write(' += [\n ' + self.indent)
self.write((',\n ' + self.indent).join(alphabetical_sorted(self.mb_serialize(v) for v in value)))
self.write(
(',\n ' + self.indent).join(alphabetical_sorted(self.mb_serialize(v) for v in value)))
self.write('\n')
self.write_ln(']')
@ -112,7 +113,6 @@ class MozbuildWriter(object):
if not wrote_ln:
self.write_ln("%s[%s] = %s" % subst_vals)
def write_condition(self, values):
def mk_condition(k, v):
if not v:

View File

@ -90,7 +90,8 @@ class JarInfo(object):
self.entries = []
class DeprecatedJarManifest(Exception): pass
class DeprecatedJarManifest(Exception):
pass
class JarManifestParser(object):
@ -238,13 +239,12 @@ class JarMaker(object):
p.add_option('-s', type='string', action='append', default=[],
help='source directory')
p.add_option('-t', type='string', help='top source directory')
p.add_option('-c', '--l10n-src', type='string', action='append'
, help='localization directory')
p.add_option('-c', '--l10n-src', type='string',
action='append', help='localization directory')
p.add_option('--l10n-base', type='string', action='store',
help='base directory to be used for localization (requires relativesrcdir)'
)
p.add_option('--locale-mergedir', type='string', action='store'
,
p.add_option('--locale-mergedir', type='string', action='store',
help='base directory to be used for l10n-merge (requires l10n-base and relativesrcdir)'
)
p.add_option('--relativesrcdir', type='string',
@ -314,7 +314,7 @@ class JarMaker(object):
'''
# making paths absolute, guess srcdir if file and add to sourcedirs
_normpath = lambda p: os.path.normpath(os.path.abspath(p))
def _normpath(p): return os.path.normpath(os.path.abspath(p))
self.topsourcedir = _normpath(self.topsourcedir)
self.sourcedirs = [_normpath(p) for p in self.sourcedirs]
if self.localedirs:

View File

@ -335,6 +335,7 @@ class CargoProvider(MachCommandBase):
return 0
@CommandProvider
class Doctor(MachCommandBase):
"""Provide commands for diagnosing common build environment problems"""
@ -348,6 +349,7 @@ class Doctor(MachCommandBase):
doctor = Doctor(self.topsrcdir, self.topobjdir, fix)
return doctor.check_all()
@CommandProvider
class Clobber(MachCommandBase):
NO_AUTO_LOG = True
@ -392,7 +394,7 @@ class Clobber(MachCommandBase):
Clobberer(self.topsrcdir, self.topobjdir).remove_objdir(full)
except OSError as e:
if sys.platform.startswith('win'):
if isinstance(e, WindowsError) and e.winerror in (5,32):
if isinstance(e, WindowsError) and e.winerror in (5, 32):
self.log(logging.ERROR, 'file_access_error', {'error': e},
"Could not clobber because a file was in use. If the "
"application is running, try closing it. {error}")
@ -421,6 +423,7 @@ class Clobber(MachCommandBase):
except BuildEnvironmentNotFoundException:
return {}
@CommandProvider
class Logs(MachCommandBase):
"""Provide commands to read mach logs."""
@ -583,6 +586,7 @@ class Warnings(MachCommandBase):
print('Specified directory not found.')
return None
@CommandProvider
class GTestCommands(MachCommandBase):
@Command('gtest', category='testing',
@ -596,7 +600,6 @@ class GTestCommands(MachCommandBase):
help='Output test results in a format that can be parsed by TBPL.')
@CommandArgument('--shuffle', '-s', action='store_true',
help='Randomize the execution order of tests.')
@CommandArgument('--package',
default='org.mozilla.geckoview.test',
help='(Android only) Package name of test app.')
@ -615,7 +618,6 @@ class GTestCommands(MachCommandBase):
@CommandArgument('--libxul',
dest='libxul_path',
help='(Android only) Path to gtest libxul.so.')
@CommandArgumentGroup('debugging')
@CommandArgument('--debug', action='store_true', group='debugging',
help='Enable the debugger. Not specifying a --debugger option will result in the default debugger being used.')
@ -624,7 +626,6 @@ class GTestCommands(MachCommandBase):
@CommandArgument('--debugger-args', default=None, metavar='params', type=str,
group='debugging',
help='Command-line arguments to pass to the debugger itself; split as the Bourne shell would.')
def gtest(self, shuffle, jobs, gtest_filter, tbpl_parser,
package, adb_path, device_serial, remote_test_root, libxul_path,
debug, debugger, debugger_args):
@ -669,7 +670,7 @@ class GTestCommands(MachCommandBase):
print("One or more Android-only options will be ignored")
app_path = self.get_binary_path('app')
args = [app_path, '-unittest', '--gtest_death_test_style=threadsafe'];
args = [app_path, '-unittest', '--gtest_death_test_style=threadsafe']
if sys.platform.startswith('win') and \
'MOZ_LAUNCHER_PROCESS' in self.defines:
@ -710,6 +711,7 @@ class GTestCommands(MachCommandBase):
from mozprocess import ProcessHandlerMixin
import functools
def handle_line(job_id, line):
# Prepend the jobId
line = '[%d] %s' % (job_id + 1, line.strip())
@ -722,7 +724,8 @@ class GTestCommands(MachCommandBase):
processes[i] = ProcessHandlerMixin([app_path, "-unittest"],
cwd=cwd,
env=gtest_env,
processOutputLine=[functools.partial(handle_line, i)],
processOutputLine=[
functools.partial(handle_line, i)],
universal_newlines=True)
processes[i].run()
@ -808,6 +811,7 @@ class GTestCommands(MachCommandBase):
args = [debuggerInfo.path] + debuggerInfo.args + args
return args
@CommandProvider
class ClangCommands(MachCommandBase):
@Command('clang-complete', category='devenv',
@ -876,6 +880,7 @@ class Package(MachCommandBase):
self.notify('Packaging complete')
return ret
@CommandProvider
class Install(MachCommandBase):
"""Install a package."""
@ -893,6 +898,7 @@ class Install(MachCommandBase):
self.notify('Install complete')
return ret
@SettingsProvider
class RunSettings():
config_settings = [
@ -903,6 +909,7 @@ single quoted to force them to be strings.
""".strip()),
]
@CommandProvider
class RunProgram(MachCommandBase):
"""Run the compiled program."""
@ -930,7 +937,6 @@ class RunProgram(MachCommandBase):
help='Run the program using a new temporary profile created inside the objdir.')
@CommandArgument('--macos-open', action='store_true', group=prog_group,
help="On macOS, run the program using the open(1) command. Per open(1), the browser is launched \"just as if you had double-clicked the file's icon\". The browser can not be launched under a debugger with this option.")
@CommandArgumentGroup('debugging')
@CommandArgument('--debug', action='store_true', group='debugging',
help='Enable the debugger. Not specifying a --debugger option will result in the default debugger being used.')
@ -942,7 +948,6 @@ class RunProgram(MachCommandBase):
@CommandArgument('--debugparams', action=StoreDebugParamsAndWarnAction,
default=None, type=str, dest='debugger_args', group='debugging',
help=argparse.SUPPRESS)
@CommandArgumentGroup('DMD')
@CommandArgument('--dmd', action='store_true', group='DMD',
help='Enable DMD. The following arguments have no effect without this.')
@ -1112,6 +1117,7 @@ class RunProgram(MachCommandBase):
return self.run_process(args=args, ensure_exit_code=False,
pass_thru=True, append_env=extra_env)
@CommandProvider
class Buildsymbols(MachCommandBase):
"""Produce a package of debug symbols suitable for use with Breakpad."""
@ -1121,6 +1127,7 @@ class Buildsymbols(MachCommandBase):
def buildsymbols(self):
return self._run_make(directory=".", target='buildsymbols', ensure_exit_code=False)
@CommandProvider
class Makefiles(MachCommandBase):
@Command('empty-makefiles', category='build-dev',
@ -1177,6 +1184,7 @@ class Makefiles(MachCommandBase):
if f == 'Makefile.in':
yield os.path.join(root, f)
@CommandProvider
class MachDebug(MachCommandBase):
@Command('environment', category='build-dev',
@ -1252,6 +1260,7 @@ class MachDebug(MachCommandBase):
def _environment_json(self, out, verbose):
import json
class EnvironmentEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, MozbuildObject):
@ -1667,6 +1676,7 @@ class PackageFrontend(MachCommandBase):
return 0
class StaticAnalysisSubCommand(SubCommand):
def __call__(self, func):
after = SubCommand.__call__(self, func)
@ -1862,11 +1872,13 @@ class StaticAnalysis(MachCommandBase):
args = self._get_clang_tidy_command(
checks=checks, header_filter=header_filter, sources=source, jobs=jobs, fix=fix)
monitor = StaticAnalysisMonitor(self.topsrcdir, self.topobjdir, self._clang_tidy_config, total)
monitor = StaticAnalysisMonitor(
self.topsrcdir, self.topobjdir, self._clang_tidy_config, total)
footer = StaticAnalysisFooter(self.log_manager.terminal, monitor)
with StaticAnalysisOutputManager(self.log_manager, monitor, footer) as output_manager:
rc = self.run_process(args=args, ensure_exit_code=False, line_handler=output_manager.on_line, cwd=cwd)
rc = self.run_process(args=args, ensure_exit_code=False,
line_handler=output_manager.on_line, cwd=cwd)
self.log(logging.WARNING, 'warning_summary',
{'count': len(monitor.warnings_db)},
@ -1904,7 +1916,8 @@ class StaticAnalysis(MachCommandBase):
self.log_manager.enable_all_structured_loggers()
if 'MOZ_AUTOMATION' not in os.environ:
self.log(logging.INFO, 'static-analysis', {}, 'Coverity based static-analysis cannot be ran outside automation.')
self.log(logging.INFO, 'static-analysis', {},
'Coverity based static-analysis cannot be ran outside automation.')
return
# Use outgoing files instead of source files
@ -1914,7 +1927,8 @@ class StaticAnalysis(MachCommandBase):
source = map(os.path.abspath, files)
if len(source) == 0:
self.log(logging.ERROR, 'static-analysis', {}, 'There are no files that coverity can use to scan.')
self.log(logging.ERROR, 'static-analysis', {},
'There are no files that coverity can use to scan.')
return 0
rc = self._build_compile_db(verbose=verbose)
@ -1925,7 +1939,8 @@ class StaticAnalysis(MachCommandBase):
commands_list = self.get_files_with_commands(source)
if len(commands_list) == 0:
self.log(logging.INFO, 'static-analysis', {}, 'There are no files that need to be analyzed.')
self.log(logging.INFO, 'static-analysis', {},
'There are no files that need to be analyzed.')
return 0
# Load the configuration file for coverity static-analysis
@ -1939,31 +1954,37 @@ class StaticAnalysis(MachCommandBase):
# First run cov-run-desktop --setup in order to setup the analysis env
cmd = [self.cov_run_desktop, '--setup']
self.log(logging.INFO, 'static-analysis', {}, 'Running {} --setup'.format(self.cov_run_desktop))
self.log(logging.INFO, 'static-analysis', {},
'Running {} --setup'.format(self.cov_run_desktop))
rc = self.run_process(args=cmd, cwd=self.cov_path, pass_thru=True)
if rc != 0:
self.log(logging.ERROR, 'static-analysis', {}, 'Running {} --setup failed!'.format(self.cov_run_desktop))
self.log(logging.ERROR, 'static-analysis', {},
'Running {} --setup failed!'.format(self.cov_run_desktop))
return rc
# Run cov-configure for clang
cmd = [self.cov_configure, '--clang']
self.log(logging.INFO, 'static-analysis', {}, 'Running {} --clang'.format(self.cov_configure))
self.log(logging.INFO, 'static-analysis', {},
'Running {} --clang'.format(self.cov_configure))
rc = self.run_process(args=cmd, cwd=self.cov_path, pass_thru=True)
if rc != 0:
self.log(logging.ERROR, 'static-analysis', {}, 'Running {} --clang failed!'.format(self.cov_configure))
self.log(logging.ERROR, 'static-analysis', {},
'Running {} --clang failed!'.format(self.cov_configure))
return rc
# For each element in commands_list run `cov-translate`
for element in commands_list:
cmd = [self.cov_translate, '--dir', self.cov_idir_path] + element['command'].split(' ')
self.log(logging.INFO, 'static-analysis', {}, 'Running Coverity Tranlate for {}'.format(cmd))
self.log(logging.INFO, 'static-analysis', {},
'Running Coverity Tranlate for {}'.format(cmd))
rc = self.run_process(args=cmd, cwd=element['directory'], pass_thru=True)
if rc != 0:
self.log(logging.ERROR, 'static-analysis', {}, 'Running Coverity Tranlate failed for {}'.format(cmd))
self.log(logging.ERROR, 'static-analysis', {},
'Running Coverity Tranlate failed for {}'.format(cmd))
return cmd
if coverity_output_path is None:
@ -1973,7 +1994,8 @@ class StaticAnalysis(MachCommandBase):
# Once the capture is performed we need to do the actual Coverity Desktop analysis
cmd = [self.cov_run_desktop, '--json-output-v6', cov_result, '--analyze-captured-source']
self.log(logging.INFO, 'static-analysis', {}, 'Running Coverity Analysis for {}'.format(cmd))
self.log(logging.INFO, 'static-analysis', {},
'Running Coverity Analysis for {}'.format(cmd))
rc = self.run_process(cmd, cwd=self.cov_state_path, pass_thru=True)
if rc != 0:
self.log(logging.ERROR, 'static-analysis', {}, 'Coverity Analysis failed!')
@ -2016,7 +2038,8 @@ class StaticAnalysis(MachCommandBase):
def build_element(issue):
# We look only for main event
event_path = next((event for event in issue['events'] if event['main'] is True), None)
event_path = next(
(event for event in issue['events'] if event['main'] is True), None)
dict_issue = {
'line': issue['mainEventLineNumber'],
@ -2060,7 +2083,8 @@ class StaticAnalysis(MachCommandBase):
secret_name = 'project/relman/coverity'
secrets_url = '{}/secrets/v1/secret/{}'.format(get_root_url(True), secret_name)
self.log(logging.INFO, 'static-analysis', {}, 'Using symbol upload token from the secrets service: "{}"'.format(secrets_url))
self.log(logging.INFO, 'static-analysis', {},
'Using symbol upload token from the secrets service: "{}"'.format(secrets_url))
import requests
res = requests.get(secrets_url)
@ -2069,7 +2093,8 @@ class StaticAnalysis(MachCommandBase):
cov_config = secret['secret'] if 'secret' in secret else None
if cov_config is None:
self.log(logging.ERROR, 'static-analysis', {}, 'Ill formatted secret for Coverity. Aborting analysis.')
self.log(logging.ERROR, 'static-analysis', {},
'Ill formatted secret for Coverity. Aborting analysis.')
return 1
self.cov_analysis_url = cov_config.get('package_url')
@ -2156,7 +2181,8 @@ class StaticAnalysis(MachCommandBase):
self.cov_idir_path = mozpath.join(self.cov_work_path, self.cov_package_ver, 'idir')
if not os.path.exists(self.cov_path):
self.log(logging.ERROR, 'static-analysis', {}, 'Missing Coverity in {}'.format(self.cov_path))
self.log(logging.ERROR, 'static-analysis', {},
'Missing Coverity in {}'.format(self.cov_path))
return 1
return 0
@ -2376,7 +2402,8 @@ class StaticAnalysis(MachCommandBase):
if 'package_version' in self._clang_tidy_config:
version = self._clang_tidy_config['package_version']
else:
self.log(logging.ERROR, 'static-analysis', {}, "Unable to find 'package_version' in the config.yml")
self.log(logging.ERROR, 'static-analysis', {},
"Unable to find 'package_version' in the config.yml")
return False
# Because the fact that we ship together clang-tidy and clang-format
@ -2571,7 +2598,8 @@ class StaticAnalysis(MachCommandBase):
ignored_checker = item['name'] in ['mozilla-*', '-*']
# 4. List checker_names is passed and the current checker is not part of the
# list or 'publish' is False
checker_not_in_list = checker_names and (item['name'] not in checker_names or not_published)
checker_not_in_list = checker_names and (
item['name'] not in checker_names or not_published)
if not_published or \
ignored_platform or \
ignored_checker or \
@ -2592,7 +2620,8 @@ class StaticAnalysis(MachCommandBase):
if error_code != self.TOOLS_SUCCESS:
self.log(logging.INFO, 'static-analysis', {}, "FAIL: the following clang-tidy check(s) failed:")
self.log(logging.INFO, 'static-analysis', {},
"FAIL: the following clang-tidy check(s) failed:")
for failure in checkers_results:
checker_error = failure['checker-error']
checker_name = failure['checker-name']
@ -2605,12 +2634,14 @@ class StaticAnalysis(MachCommandBase):
message_to_log = "\tChecker {} not present in this clang-tidy version.".format(
checker_name)
elif checker_error == self.TOOLS_CHECKER_NO_TEST_FILE:
message_to_log = "\tChecker {0} does not have a test file - {0}.cpp".format(checker_name)
message_to_log = "\tChecker {0} does not have a test file - {0}.cpp".format(
checker_name)
elif checker_error == self.TOOLS_CHECKER_RETURNED_NO_ISSUES:
message_to_log = "\tChecker {0} did not find any issues in its test file, clang-tidy output for the run is:\n{1}".format(
checker_name, info1)
elif checker_error == self.TOOLS_CHECKER_RESULT_FILE_NOT_FOUND:
message_to_log = "\tChecker {0} does not have a result file - {0}.json".format(checker_name)
message_to_log = "\tChecker {0} does not have a result file - {0}.json".format(
checker_name)
elif checker_error == self.TOOLS_CHECKER_DIFF_FAILED:
message_to_log = "\tChecker {0}\nExpected: {1}\nGot: {2}\nclang-tidy output for the run is:\n{3}".format(
checker_name, info1, info2, info3)
@ -2647,9 +2678,11 @@ class StaticAnalysis(MachCommandBase):
return self._parse_issues(clang_output), clang_output
def _run_analysis_batch(self, items):
self.log(logging.INFO, 'static-analysis', {},"RUNNING: clang-tidy checker batch analysis.")
self.log(logging.INFO, 'static-analysis', {},
"RUNNING: clang-tidy checker batch analysis.")
if not len(items):
self.log(logging.ERROR, 'static-analysis', {}, "ERROR: clang-tidy checker list is empty!")
self.log(logging.ERROR, 'static-analysis', {},
"ERROR: clang-tidy checker list is empty!")
return self.TOOLS_CHECKER_LIST_EMPTY
issues, clang_output = self._run_analysis(
@ -2662,7 +2695,8 @@ class StaticAnalysis(MachCommandBase):
failed_checks = []
failed_checks_baseline = []
for checker in items:
test_file_path_json = mozpath.join(self._clang_tidy_base_path, "test", checker) + '.json'
test_file_path_json = mozpath.join(
self._clang_tidy_base_path, "test", checker) + '.json'
# Read the pre-determined issues
baseline_issues = self._get_autotest_stored_issues(test_file_path_json)
@ -2676,10 +2710,12 @@ class StaticAnalysis(MachCommandBase):
failed_checks_baseline.append(baseline_issues)
if len(failed_checks) > 0:
self.log(logging.ERROR, 'static-analysis', {}, 'The following check(s) failed for bulk analysis: ' + ' '.join(failed_checks))
self.log(logging.ERROR, 'static-analysis', {},
'The following check(s) failed for bulk analysis: ' + ' '.join(failed_checks))
for failed_check, baseline_issue in zip(failed_checks, failed_checks_baseline):
print('\tChecker {0} expect following results: \n\t\t{1}'.format(failed_check, baseline_issue))
print('\tChecker {0} expect following results: \n\t\t{1}'.format(
failed_check, baseline_issue))
print('This is the output generated by clang-tidy for the bulk build:\n{}'.format(clang_output))
return self.TOOLS_CHECKER_DIFF_FAILED
@ -2922,7 +2958,7 @@ class StaticAnalysis(MachCommandBase):
if path:
# Create the full path list
path_maker = lambda f_name: os.path.join(self.topsrcdir, f_name)
def path_maker(f_name): return os.path.join(self.topsrcdir, f_name)
path = map(path_maker, path)
os.chdir(self.topsrcdir)
@ -3668,6 +3704,7 @@ class Vendor(MachCommandBase):
from mozbuild.vendor_manifest import verify_manifests
verify_manifests(files)
@CommandProvider
class WebRTCGTestCommands(GTestCommands):
@Command('webrtc-gtest', category='testing',
@ -3714,6 +3751,7 @@ class WebRTCGTestCommands(GTestCommands):
ensure_exit_code=False,
pass_thru=True)
@CommandProvider
class Repackage(MachCommandBase):
'''Repackages artifacts into different formats.
@ -3824,6 +3862,7 @@ class Repackage(MachCommandBase):
from mozbuild.repackaging.mar import repackage_mar
repackage_mar(self.topsrcdir, input, mar, output, format, arch=arch)
@CommandProvider
class Analyze(MachCommandBase):
""" Get information about a file in the build graph """
@ -3969,7 +4008,8 @@ class L10NCommands(MachCommandBase):
self.log(logging.INFO, 'package-multi-locale', {},
'Invoking `mach android archive-geckoview`')
self.run_process(
[mozpath.join(self.topsrcdir, 'mach'), 'android', 'archive-geckoview'.format(locale)],
[mozpath.join(self.topsrcdir, 'mach'), 'android',
'archive-geckoview'.format(locale)],
append_env=append_env,
pass_thru=True,
ensure_exit_code=True,

View File

@ -62,6 +62,7 @@ class _SimpleOrderedSet(object):
It doesn't expose a complete API, and normalizes path separators
at insertion.
'''
def __init__(self):
self._list = []
self._set = set()
@ -95,6 +96,7 @@ class Rule(object):
command2
...
'''
def __init__(self, targets=[]):
self._targets = _SimpleOrderedSet()
self._dependencies = _SimpleOrderedSet()
@ -175,6 +177,7 @@ def read_dep_makefile(fh):
if rule:
raise Exception('Makefile finishes with a backslash. Expected more input.')
def write_dep_makefile(fh, target, deps):
'''
Write a Makefile containing only target's dependencies to the file handle

View File

@ -306,6 +306,7 @@ def _schema_1_additional(filename, manifest, require_license_file=True):
class License(object):
"""Voluptuous validator which verifies the license(s) are valid as per our
whitelist."""
def __call__(self, values):
if isinstance(values, str):
values = [values]

View File

@ -306,7 +306,7 @@ class MozconfigLoader(object):
# Environment variables also appear as shell variables, but that's
# uninteresting duplication of information. Filter them out.
filt = lambda x, y: {k: v for k, v in x.items() if k not in y}
def filt(x, y): return {k: v for k, v in x.items() if k not in y}
result['vars'] = diff_vars(
filt(parsed['vars_before'], parsed['env_before']),
filt(parsed['vars_after'], parsed['env_after'])

View File

@ -58,7 +58,7 @@ def build_dict(config, env=os.environ):
# processor
p = substs["TARGET_CPU"]
# do some slight massaging for some values
#TODO: retain specific values in case someone wants them?
# TODO: retain specific values in case someone wants them?
if p.startswith("arm"):
p = "arm"
elif re.match("i[3-9]86", p):

View File

@ -198,6 +198,7 @@ class Expression:
rv = not rv
return rv
# Helper function to evaluate __get_logical_and and __get_logical_or results
def eval_logical_op(tok):
left = opmap[tok[0].type](tok[0])
right = opmap[tok[2].type](tok[2])
@ -217,12 +218,13 @@ class Expression:
'defined': lambda tok: tok.value in context,
'int': lambda tok: tok.value}
return opmap[self.e.type](self.e);
return opmap[self.e.type](self.e)
class __AST(list):
"""
Internal class implementing Abstract Syntax Tree nodes
"""
def __init__(self, type):
self.type = type
super(self.__class__, self).__init__(self)
@ -231,11 +233,14 @@ class Expression:
"""
Internal class implementing Abstract Syntax Tree leafs
"""
def __init__(self, type, value):
self.value = value
self.type = type
def __str__(self):
return self.value.__str__()
def __repr__(self):
return self.value.__repr__()
@ -245,13 +250,16 @@ class Expression:
It has two members, offset and content, which give the offset of the
error and the offending content.
"""
def __init__(self, expression):
self.offset = expression.offset
self.content = expression.content[:3]
def __str__(self):
return 'Unexpected content at offset {0}, "{1}"'.format(self.offset,
self.content)
class Context(dict):
"""
This class holds variable values by subclassing dict, and while it
@ -266,6 +274,7 @@ class Context(dict):
to reflect the ambiguity between string literals and preprocessor
variables.
"""
def __getitem__(self, key):
if key in self:
return super(self.__class__, self).__getitem__(key)
@ -285,7 +294,7 @@ class Preprocessor:
def __init__(self, defines=None, marker='#'):
self.context = Context()
for k,v in {'FILE': '',
for k, v in {'FILE': '',
'LINE': 0,
'DIRECTORY': os.path.abspath('.')}.iteritems():
self.context[k] = v
@ -341,7 +350,8 @@ class Preprocessor:
elif self.actionLevel == 1:
msg = 'no useful preprocessor directives found'
if msg:
class Fake(object): pass
class Fake(object):
pass
fake = Fake()
fake.context = {
'FILE': file,
@ -454,7 +464,7 @@ class Preprocessor:
self.actionLevel = 2
self.out.write(filteredLine)
def handleCommandLine(self, args, defaultToStdin = False):
def handleCommandLine(self, args, defaultToStdin=False):
"""
Parse a commandline into this parser.
Uses OptionParser internally, no args mean sys.argv[1:].
@ -505,9 +515,10 @@ class Preprocessor:
if options.output:
out.close()
def getCommandLineParser(self, unescapeDefines = False):
def getCommandLineParser(self, unescapeDefines=False):
escapedValue = re.compile('".*"$')
numberValue = re.compile('\d+$')
def handleD(option, opt, value, parser):
vals = value.split('=', 1)
if len(vals) == 1:
@ -518,12 +529,16 @@ class Preprocessor:
elif numberValue.match(vals[1]):
vals[1] = int(vals[1])
self.context[vals[0]] = vals[1]
def handleU(option, opt, value, parser):
del self.context[value]
def handleF(option, opt, value, parser):
self.do_filter(value)
def handleMarker(option, opt, value, parser):
self.setMarker(value)
def handleSilenceDirectiveWarnings(option, opt, value, parse):
self.setSilenceDirectiveWarnings(True)
p = OptionParser()
@ -534,7 +549,7 @@ class Preprocessor:
p.add_option('-F', action='callback', callback=handleF, type="string",
metavar="FILTER", help='Enable the specified filter')
p.add_option('-o', '--output', type="string", default=None,
metavar="FILENAME", help='Output to the specified file '+
metavar="FILENAME", help='Output to the specified file ' +
'instead of stdout')
p.add_option('--depend', type="string", default=None, metavar="FILENAME",
help='Generate dependencies in the given file')
@ -586,6 +601,7 @@ class Preprocessor:
except:
pass
self.context[m.group('name')] = val
def do_undef(self, args):
m = re.match('(?P<name>\w+)$', args, re.U)
if not m:
@ -593,9 +609,11 @@ class Preprocessor:
if args in self.context:
del self.context[args]
# Logic
def ensure_not_else(self):
if len(self.ifStates) == 0 or self.ifStates[-1] == 2:
sys.stderr.write('WARNING: bad nesting of #else in %s\n' % self.context['FILE'])
def do_if(self, args, replace=False):
if self.disableLevel and not replace:
self.disableLevel += 1
@ -619,6 +637,7 @@ class Preprocessor:
else:
self.ifStates.append(self.disableLevel)
pass
def do_ifdef(self, args, replace=False):
if self.disableLevel and not replace:
self.disableLevel += 1
@ -634,6 +653,7 @@ class Preprocessor:
else:
self.ifStates.append(self.disableLevel)
pass
def do_ifndef(self, args, replace=False):
if self.disableLevel and not replace:
self.disableLevel += 1
@ -649,7 +669,8 @@ class Preprocessor:
else:
self.ifStates.append(self.disableLevel)
pass
def do_else(self, args, ifState = 2):
def do_else(self, args, ifState=2):
self.ensure_not_else()
hadTrue = self.ifStates[-1] == 0
self.ifStates[-1] = ifState # in-else
@ -657,33 +678,39 @@ class Preprocessor:
self.disableLevel = 1
return
self.disableLevel = 0
def do_elif(self, args):
if self.disableLevel == 1:
if self.ifStates[-1] == 1:
self.do_if(args, replace=True)
else:
self.do_else(None, self.ifStates[-1])
def do_elifdef(self, args):
if self.disableLevel == 1:
if self.ifStates[-1] == 1:
self.do_ifdef(args, replace=True)
else:
self.do_else(None, self.ifStates[-1])
def do_elifndef(self, args):
if self.disableLevel == 1:
if self.ifStates[-1] == 1:
self.do_ifndef(args, replace=True)
else:
self.do_else(None, self.ifStates[-1])
def do_endif(self, args):
if self.disableLevel > 0:
self.disableLevel -= 1
if self.disableLevel == 0:
self.ifStates.pop()
# output processing
def do_expand(self, args):
lst = re.split('__(\w+)__', args, re.U)
do_replace = False
def vsubst(v):
if v in self.context:
return str(self.context[v])
@ -692,8 +719,10 @@ class Preprocessor:
lst[i] = vsubst(lst[i])
lst.append('\n') # add back the newline
self.write(reduce(lambda x, y: x+y, lst, ''))
def do_literal(self, args):
self.write(args + '\n')
def do_filter(self, args):
filters = [f for f in args.split(' ') if hasattr(self, 'filter_' + f)]
if len(filters) == 0:
@ -705,6 +734,7 @@ class Preprocessor:
filterNames.sort()
self.filters = [(fn, current[fn]) for fn in filterNames]
return
def do_unfilter(self, args):
filters = args.split(' ')
current = dict(self.filters)
@ -719,12 +749,14 @@ class Preprocessor:
#
# emptyLines
# Strips blank lines from the output.
def filter_emptyLines(self, aLine):
if aLine == '\n':
return ''
return aLine
# slashslash
# Strips everything after //
def filter_slashslash(self, aLine):
if (aLine.find('//') == -1):
return aLine
@ -734,10 +766,12 @@ class Preprocessor:
return aLine
# spaces
# Collapses sequences of spaces into a single space
def filter_spaces(self, aLine):
return re.sub(' +', ' ', aLine).strip(' ')
# substition
# helper to be used by both substition and attemptSubstitution
def filter_substitution(self, aLine, fatal=True):
def repl(matchobj):
varname = matchobj.group('VAR')
@ -747,9 +781,11 @@ class Preprocessor:
raise Preprocessor.Error(self, 'UNDEFINED_VAR', varname)
return matchobj.group(0)
return self.varsubst.sub(repl, aLine)
def filter_attemptSubstitution(self, aLine):
return self.filter_substitution(aLine, fatal=False)
# File ops
def do_include(self, args, filters=True):
"""
Preprocess a given file.
@ -806,15 +842,17 @@ class Preprocessor:
self.context['LINE'] = oldLine
self.context['DIRECTORY'] = oldDir
self.curdir = oldCurdir
def do_includesubst(self, args):
args = self.filter_substitution(args)
self.do_include(args)
def do_error(self, args):
raise Preprocessor.Error(self, 'Error: ', str(args))
def preprocess(includes=[sys.stdin], defines={},
output = sys.stdout,
output=sys.stdout,
marker='#'):
pp = Preprocessor(defines=defines,
marker=marker)

View File

@ -11,6 +11,7 @@ import mozpack.path as mozpath
from mozpack.dmg import create_dmg
from application_ini import get_application_ini_value
def repackage_dmg(infile, output):
if not tarfile.is_tarfile(infile):

View File

@ -16,13 +16,14 @@ _MSI_ARCH = {
'x86_64': 'x64',
}
def update_wsx(wfile, pvalues):
parsed = minidom.parse(wfile)
# construct a dictinary for the pre-processing options
# iterate over that list and add them to the wsx xml doc
for k,v in pvalues.items():
for k, v in pvalues.items():
entry = parsed.createProcessingInstruction('define', k + ' = "' + v + '"')
root = parsed.firstChild
parsed.insertBefore(entry, root)
@ -100,7 +101,7 @@ def repackage_msi(topsrcdir, wsx, version, locale, arch, setupexe, candle, light
'-out', wix_installer, wix_object_file]
subprocess.check_call(light_cmd, env=env)
os.remove(wix_object_file)
#mv file to output dir
# mv file to output dir
shutil.move(wix_installer, output)
finally:
os.chdir(old_cwd)

View File

@ -22,6 +22,7 @@ def _tokens2re(**tokens):
# backslash, captured in the "escape" match group.
return re.compile('(?:%s|%s)' % (nonescaped, r'(?P<escape>\\\\)'))
UNQUOTED_TOKENS_RE = _tokens2re(
whitespace=r'[\t\r\n ]+',
quote=r'[\'"]',
@ -54,6 +55,7 @@ class _ClineSplitter(object):
Parses a given command line string and creates a list of command
and arguments, with wildcard expansion.
'''
def __init__(self, cline):
self.arg = None
self.cline = cline

View File

@ -238,6 +238,7 @@ def get_build_attrs(attrs):
res['cpu_percent'] = int(round(usage['cpu_percent']))
return res
def filter_args(command, argv, paths):
'''
Given the full list of command-line arguments, remove anything up to and including `command`,

View File

@ -4,7 +4,10 @@
import unittest
import os, sys, os.path, time
import os
import sys
import os.path
import time
from tempfile import mkdtemp
from shutil import rmtree
import mozunit
@ -16,6 +19,7 @@ class TestBuildList(unittest.TestCase):
"""
Unit tests for buildlist.py
"""
def setUp(self):
self.tmpdir = mkdtemp()
@ -62,7 +66,7 @@ class TestBuildList(unittest.TestCase):
l = ["a", "b", "c"]
addEntriesToListFile(testfile, l)
self.assertFileContains(testfile, l)
l2 = ["x","y","z"]
l2 = ["x", "y", "z"]
addEntriesToListFile(testfile, l2)
l.extend(l2)
self.assertFileContains(testfile, l)
@ -80,10 +84,11 @@ class TestBuildList(unittest.TestCase):
"""Test that attempting to add the same entry multiple times results in
only one entry being added."""
testfile = os.path.join(self.tmpdir, "test.list")
addEntriesToListFile(testfile, ["a","b","a","a","b"])
self.assertFileContains(testfile, ["a","b"])
addEntriesToListFile(testfile, ["c","a","c","b","c"])
self.assertFileContains(testfile, ["a","b","c"])
addEntriesToListFile(testfile, ["a", "b", "a", "a", "b"])
self.assertFileContains(testfile, ["a", "b"])
addEntriesToListFile(testfile, ["c", "a", "c", "b", "c"])
self.assertFileContains(testfile, ["a", "b", "c"])
if __name__ == '__main__':
mozunit.main()

View File

@ -21,6 +21,7 @@ test_data_path = mozpath.join(test_data_path, 'data', 'node')
def data(name):
return os.path.join(test_data_path, name)
TEST_SCRIPT = data("node-test-script.js")
NONEXISTENT_TEST_SCRIPT = data("non-existent-test-script.js")

View File

@ -70,5 +70,6 @@ class TestGenerateManifest(TestWithTmpDir):
self.assertFalse(os.path.exists(self.tmppath('dest/foo/file2')))
self.assertFalse(os.path.exists(self.tmppath('dest/foo/file3')))
if __name__ == '__main__':
mozunit.main()

View File

@ -20,7 +20,7 @@ CREATE_NORMAL_LINK = """CREATE TABLE normal_link
(from_id integer,
to_id integer, unique(from_id, to_id));"""
NODE_DATA = [(1, 0 ,2, -1, '.'),
NODE_DATA = [(1, 0, 2, -1, '.'),
(2, 100, 0, 1, 'Base64.cpp'),
(3, 200, 0, 1, 'nsArray.cpp'),
(4, 100, 0, 1, 'nsWildCard.h'),
@ -63,6 +63,7 @@ NORMAL_LINK_DATA = [(2, 5), (3, 6), (4, 7), (4, 8), (4, 9), (5, 10), (6, 11),
PATH_TO_TEST_DB = ':memory:'
class TestGraph(unittest.TestCase):
@classmethod
def setUpClass(cls):
@ -93,10 +94,10 @@ class TestGraph(unittest.TestCase):
self.assertEqual(len(g.get_node(21).cmds), 0)
self.assertEqual(len(g.get_node(28).cmds), 0)
# one immediate command child
self.assertItemsEqual(g.get_node(2).get_cmd_ids(),[5] + libxul)
self.assertItemsEqual(g.get_node(3).get_cmd_ids(),[6] + libxul)
self.assertItemsEqual(g.get_node(2).get_cmd_ids(), [5] + libxul)
self.assertItemsEqual(g.get_node(3).get_cmd_ids(), [6] + libxul)
# multiple immediate command children
self.assertItemsEqual(g.get_node(4).get_cmd_ids(),[7, 8, 9] + libxul)
self.assertItemsEqual(g.get_node(4).get_cmd_ids(), [7, 8, 9] + libxul)
# node is not a file or command
self.assertItemsEqual(g.get_node(16).get_cmd_ids(), libxul[1:])
self.assertItemsEqual(g.get_node(11).get_cmd_ids(), libxul)
@ -131,5 +132,6 @@ class TestGraph(unittest.TestCase):
self.assertEqual(g.get_node(4).path, 'xpcom/io/nsWildCard.h')
self.assertEqual(g.get_node(28).path, 'dummy node')
if __name__ == '__main__':
mozunit.main()

View File

@ -233,5 +233,6 @@ class TestBuild(unittest.TestCase):
'bin/app/modules/foo.jsm': 'foo.jsm\n',
})
if __name__ == '__main__':
main()

View File

@ -2,7 +2,8 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os, posixpath
import os
import posixpath
from StringIO import StringIO
import unittest
from mozunit import main, MockedOpen
@ -39,15 +40,16 @@ class TestEnvironment(unittest.TestCase):
and ALLEMPTYSUBSTS.
'''
env = ConfigEnvironment('.', '.',
defines = { 'foo': 'bar', 'baz': 'qux 42',
'abc': "d'e'f", 'extra': 'foobar' },
non_global_defines = ['extra', 'ignore'],
substs = { 'FOO': 'bar', 'FOOBAR': '', 'ABC': 'def',
defines={'foo': 'bar', 'baz': 'qux 42',
'abc': "d'e'f", 'extra': 'foobar'},
non_global_defines=['extra', 'ignore'],
substs={'FOO': 'bar', 'FOOBAR': '', 'ABC': 'def',
'bar': 'baz qux', 'zzz': '"abc def"',
'qux': '' })
'qux': ''})
# non_global_defines should be filtered out in ACDEFINES.
# Original order of the defines need to be respected in ACDEFINES
self.assertEqual(env.substs['ACDEFINES'], """-Dabc='d'\\''e'\\''f' -Dbaz='qux 42' -Dfoo=bar""")
self.assertEqual(env.substs['ACDEFINES'],
"""-Dabc='d'\\''e'\\''f' -Dbaz='qux 42' -Dfoo=bar""")
# Likewise for ALLSUBSTS, which also must contain ACDEFINES
self.assertEqual(env.substs['ALLSUBSTS'], '''ABC = def
ACDEFINES = -Dabc='d'\\''e'\\''f' -Dbaz='qux 42' -Dfoo=bar

View File

@ -30,6 +30,7 @@ from mozbuild.frontend.data import (
UnifiedSources,
)
class TestGnMozbuildWriter(BackendTester):
def setUp(self):

View File

@ -82,7 +82,8 @@ class TestPartial(unittest.TestCase):
self.assertTrue(os.path.exists(path))
def _assert_deps(self, env, deps):
deps = sorted(['$(wildcard %s)' % (mozpath.join(env.topobjdir, 'config.statusd', d)) for d in deps])
deps = sorted(['$(wildcard %s)' %
(mozpath.join(env.topobjdir, 'config.statusd', d)) for d in deps])
self.assertEqual(sorted(env.get_dependencies()), deps)
def test_dependencies(self):
@ -107,7 +108,8 @@ class TestPartial(unittest.TestCase):
with self.assertRaises(KeyError):
x = env.substs['NON_EXISTENT']
self._assert_deps(env, ['defines/MOZ_FOO', 'defines/MOZ_BAR', 'substs/MOZ_SUBST_1', 'substs/NON_EXISTENT'])
self._assert_deps(env, ['defines/MOZ_FOO', 'defines/MOZ_BAR',
'substs/MOZ_SUBST_1', 'substs/NON_EXISTENT'])
self.assertEqual(env.substs.get('NON_EXISTENT'), None)
def test_set_subst(self):
@ -159,5 +161,6 @@ class TestPartial(unittest.TestCase):
self.assertEqual(mydefines['DEBUG'], '1')
self.assertEqual(mydefines['MOZ_FOO'], '1')
if __name__ == "__main__":
main()

View File

@ -59,6 +59,7 @@ class TestRecursiveMakeTraversal(unittest.TestCase):
traversal.add('X')
parallels = set(('G', 'H', 'I', 'J', 'O', 'P', 'Q', 'R', 'U'))
def filter(current, subdirs):
return (current, [d for d in subdirs.dirs if d in parallels],
[d for d in subdirs.dirs if d not in parallels])
@ -191,6 +192,7 @@ class TestRecursiveMakeTraversal(unittest.TestCase):
'J': ('',),
})
class TestRecursiveMakeBackend(BackendTester):
def test_basic(self):
"""Ensure the RecursiveMakeBackend works without error."""
@ -772,7 +774,8 @@ class TestRecursiveMakeBackend(BackendTester):
topsrcdir = env.topsrcdir.replace(os.sep, '/')
expected = [
"ALL_IPDLSRCS := bar1.ipdl foo1.ipdl %s/bar/bar.ipdl %s/bar/bar2.ipdlh %s/foo/foo.ipdl %s/foo/foo2.ipdlh" % tuple([topsrcdir] * 4),
"ALL_IPDLSRCS := bar1.ipdl foo1.ipdl %s/bar/bar.ipdl %s/bar/bar2.ipdlh %s/foo/foo.ipdl %s/foo/foo2.ipdlh" % tuple([
topsrcdir] * 4),
"CPPSRCS := UnifiedProtocols0.cpp",
"IPDLDIRS := %s %s/bar %s/foo" % (env.topobjdir, topsrcdir, topsrcdir),
]

View File

@ -108,6 +108,7 @@ LH:2
end_of_record
"""
class TempFile():
def __init__(self, content):
self.file = NamedTemporaryFile(delete=False)
@ -142,6 +143,7 @@ class TestLcovParser(unittest.TestCase):
output = self.parser_roundtrip(fn_with_multiple_commas)
self.assertEqual(fn_with_multiple_commas, output)
multiple_included_files = """//@line 1 "/src/dir/foo.js"
bazfoobar
//@line 2 "/src/dir/path/bar.js"
@ -158,6 +160,7 @@ baz
fin
"""
class TestLineRemapping(unittest.TestCase):
def setUp(self):
chrome_map_file = os.path.join(buildconfig.topobjdir, 'chrome-map.json')
@ -237,6 +240,7 @@ class TestLineRemapping(unittest.TestCase):
# Rewrite preprocessed entries.
lcov_file = lcov_rewriter.LcovFile([fpath])
r_num = []
def rewrite_source(s):
r_num.append(1)
return s, pp_remap
@ -262,6 +266,7 @@ class TestLineRemapping(unittest.TestCase):
self.assertEqual(original_covered_function_count,
sum(r.covered_function_count for r in records))
class TestUrlFinder(unittest.TestCase):
def setUp(self):
chrome_map_file = os.path.join(buildconfig.topobjdir, 'chrome-map.json')
@ -329,8 +334,10 @@ class TestUrlFinder(unittest.TestCase):
omnijar_name = buildconfig.substs.get('OMNIJAR_NAME')
paths = [
('jar:file:///home/worker/workspace/build/application/' + app_name + '/' + omnijar_name + '!/components/MainProcessSingleton.js', 'path1'),
('jar:file:///home/worker/workspace/build/application/' + app_name + '/browser/features/firefox@getpocket.com.xpi!/bootstrap.js', 'path4'),
('jar:file:///home/worker/workspace/build/application/' + app_name +
'/' + omnijar_name + '!/components/MainProcessSingleton.js', 'path1'),
('jar:file:///home/worker/workspace/build/application/' + app_name +
'/browser/features/firefox@getpocket.com.xpi!/bootstrap.js', 'path4'),
]
url_finder = lcov_rewriter.UrlFinder(self._chrome_map_file, '', '', [])
@ -356,17 +363,21 @@ class TestUrlFinder(unittest.TestCase):
def test_chrome_resource_paths(self):
paths = [
# Path with default url prefix
('resource://gre/modules/osfile/osfile_async_worker.js', ('toolkit/components/osfile/modules/osfile_async_worker.js', None)),
('resource://gre/modules/osfile/osfile_async_worker.js',
('toolkit/components/osfile/modules/osfile_async_worker.js', None)),
# Path with url prefix that is in chrome map
('resource://activity-stream/lib/PrefsFeed.jsm', ('browser/components/newtab/lib/PrefsFeed.jsm', None)),
('resource://activity-stream/lib/PrefsFeed.jsm',
('browser/components/newtab/lib/PrefsFeed.jsm', None)),
# Path which is in url overrides
('chrome://global/content/netError.xhtml', ('browser/base/content/aboutNetError.xhtml', None)),
('chrome://global/content/netError.xhtml',
('browser/base/content/aboutNetError.xhtml', None)),
# Path which ends with > eval
('resource://gre/modules/osfile/osfile_async_worker.js line 3 > eval', None),
# Path which ends with > Function
('resource://gre/modules/osfile/osfile_async_worker.js line 3 > Function', None),
# Path which contains "->"
('resource://gre/modules/addons/XPIProvider.jsm -> resource://gre/modules/osfile/osfile_async_worker.js', ('toolkit/components/osfile/modules/osfile_async_worker.js', None)),
('resource://gre/modules/addons/XPIProvider.jsm -> resource://gre/modules/osfile/osfile_async_worker.js',
('toolkit/components/osfile/modules/osfile_async_worker.js', None)),
# Path with pp_info
('resource://gre/modules/AppConstants.jsm', ('toolkit/modules/AppConstants.jsm', {
'101,102': [
@ -375,12 +386,14 @@ class TestUrlFinder(unittest.TestCase):
],
})),
# Path with query
('resource://activity-stream/lib/PrefsFeed.jsm?q=0.9098419174803978', ('browser/components/newtab/lib/PrefsFeed.jsm', None)),
('resource://activity-stream/lib/PrefsFeed.jsm?q=0.9098419174803978',
('browser/components/newtab/lib/PrefsFeed.jsm', None)),
]
url_finder = lcov_rewriter.UrlFinder(self._chrome_map_file, '', 'dist/bin/', [])
for path, expected in paths:
self.assertEqual(url_finder.rewrite_url(path), expected)
if __name__ == '__main__':
mozunit.main()

View File

@ -32,6 +32,7 @@ MSVC_TESTS = [
CURRENT_LINE = 1
def get_warning():
global CURRENT_LINE
@ -45,6 +46,7 @@ def get_warning():
return w
class TestCompilerWarning(unittest.TestCase):
def test_equivalence(self):
w1 = CompilerWarning()
@ -122,6 +124,7 @@ class TestCompilerWarning(unittest.TestCase):
self.assertGreaterEqual(w2, w1)
self.assertGreaterEqual(w1, w2)
class TestWarningsParsing(unittest.TestCase):
def test_clang_parsing(self):
for source, filename, line, column, message, flag in CLANG_TESTS:
@ -148,6 +151,7 @@ class TestWarningsParsing(unittest.TestCase):
self.assertEqual(warning['flag'], flag)
self.assertEqual(warning['message'], message)
class TestWarningsDatabase(unittest.TestCase):
def test_basic(self):
db = WarningsDatabase()

View File

@ -31,6 +31,7 @@ def fake_short_path(path):
for p in mozpath.split(path))
return path
def ensure_exe_extension(path):
if sys.platform.startswith('win'):
return path + '.exe'
@ -73,6 +74,7 @@ class ConfigureTestSandbox(ConfigureSandbox):
This class is only meant to implement the minimal things to make
moz.configure testing possible. As such, it takes shortcuts.
'''
def __init__(self, paths, config, environ, *args, **kwargs):
self._search_path = environ.get('PATH', '').split(os.pathsep)
@ -153,7 +155,6 @@ class ConfigureTestSandbox(ConfigureSandbox):
def __call__(self, *args, **kwargs):
return self._func(*args, **kwargs)
return ReadOnlyNamespace(
create_unicode_buffer=self.create_unicode_buffer,
windll=ReadOnlyNamespace(

View File

@ -233,7 +233,6 @@ class TestChecksConfigure(unittest.TestCase):
self.assertEqual(config, {'FOO': self.KNOWN_A})
self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_A)
def test_check_prog_with_args(self):
config, out, status = self.get_result(
'check_prog("FOO", ("unknown", "known-b", "known c"))',
@ -431,7 +430,8 @@ class TestChecksConfigure(unittest.TestCase):
'single element, or a string')
def test_check_prog_with_path(self):
config, out, status = self.get_result('check_prog("A", ("known-a",), paths=["/some/path"])')
config, out, status = self.get_result(
'check_prog("A", ("known-a",), paths=["/some/path"])')
self.assertEqual(status, 1)
self.assertEqual(config, {})
self.assertEqual(out, textwrap.dedent('''\
@ -675,7 +675,6 @@ class TestChecksConfigure(unittest.TestCase):
*** to the full path to pkg-config.
'''))
config, output, status = get_result("pkg_check_modules('MOZ_VALID', 'valid')",
extra_paths=extra_paths)
self.assertEqual(status, 0)

View File

@ -21,6 +21,7 @@ from test_toolchain_helpers import FakeCompiler
class BaseCompileChecks(unittest.TestCase):
def get_mock_compiler(self, expected_test_content=None, expected_flags=None):
expected_flags = expected_flags or []
def mock_compiler(stdin, args):
args, test_file = args[:-1], args[-1]
self.assertIn('-c', args)
@ -220,7 +221,7 @@ class TestHeaderChecks(BaseCompileChecks):
config, out, status = self.do_compile_test(cmd)
self.assertEqual(status, 0)
self.assertEqual(out, '')
self.assertEqual(config, {'DEFINES':{}})
self.assertEqual(config, {'DEFINES': {}})
def test_check_header_include(self):
expected_test_content = textwrap.dedent('''\

View File

@ -587,7 +587,7 @@ class TestConfigure(unittest.TestCase):
config = get_config(['--enable-foo=a,b'])
self.assertIn('BAR', config)
self.assertEquals(config['BAR'], PositiveOptionValue(('a','b')))
self.assertEquals(config['BAR'], PositiveOptionValue(('a', 'b')))
with self.assertRaises(InvalidOptionError) as e:
get_config(['--enable-foo=a,b', '--disable-bar'])

View File

@ -250,7 +250,7 @@ class TestOption(unittest.TestCase):
self.assertEquals(PositiveOptionValue(('c',)), value)
value = option.get_value('--with-option=-b,+d')
self.assertEquals(PositiveOptionValue(('c','d')), value)
self.assertEquals(PositiveOptionValue(('c', 'd')), value)
# Adding something that is in the default is fine
value = option.get_value('--with-option=+b')

View File

@ -42,6 +42,7 @@ class CompilerPreprocessor(Preprocessor):
# different handling than what our Preprocessor does out of the box.
# Hack around it enough that the configure tests work properly.
context = self.context
def normalize_numbers(value):
if isinstance(value, types.StringTypes):
if value[-1:] == 'L' and value[:-1].isdigit():
@ -49,6 +50,7 @@ class CompilerPreprocessor(Preprocessor):
return value
# Our Preprocessor doesn't handle macros with parameters, so we hack
# around that for __has_feature()-like things.
def normalize_has_feature_or_builtin(expr):
return self.HAS_FEATURE_OR_BUILTIN.sub(r'\1\2', expr)
self.context = self.Context(
@ -162,6 +164,7 @@ class FakeCompiler(dict):
For convenience, FakeCompiler instances can be added (+) to one another.
'''
def __init__(self, *definitions):
for definition in definitions:
if all(not isinstance(d, dict) for d in definition.itervalues()):

View File

@ -474,6 +474,7 @@ class TestVersion(unittest.TestCase):
self.assertEqual(v.minor, 0)
self.assertEqual(v.patch, 0)
class TestCheckCmdOutput(unittest.TestCase):
def get_result(self, command='', paths=None):

View File

@ -291,5 +291,6 @@ class TestCcacheStats(unittest.TestCase):
stat9 = CCacheStats(self.STAT9)
self.assertTrue(stat9)
if __name__ == '__main__':
main()

View File

@ -181,7 +181,6 @@ class TestClobberer(unittest.TestCase):
self.assertFalse(performed)
self.assertIn('Cannot clobber while the shell is inside', reason)
def test_mozconfig_opt_in(self):
"""Auto clobber iff AUTOCLOBBER is in the environment."""

View File

@ -40,20 +40,20 @@ class TestContext(unittest.TestCase):
self.assertEqual(test['foo'], 0)
self.assertEqual(set(test.keys()), { 'foo' })
self.assertEqual(set(test.keys()), {'foo'})
self.assertEqual(test['bar'], False)
self.assertEqual(set(test.keys()), { 'foo', 'bar' })
self.assertEqual(set(test.keys()), {'foo', 'bar'})
self.assertEqual(test['baz'], {})
self.assertEqual(set(test.keys()), { 'foo', 'bar', 'baz' })
self.assertEqual(set(test.keys()), {'foo', 'bar', 'baz'})
with self.assertRaises(KeyError):
test['qux']
self.assertEqual(set(test.keys()), { 'foo', 'bar', 'baz' })
self.assertEqual(set(test.keys()), {'foo', 'bar', 'baz'})
def test_type_check(self):
test = Context({
@ -75,7 +75,7 @@ class TestContext(unittest.TestCase):
test['baz'] = [('a', 1), ('b', 2)]
self.assertEqual(test['baz'], { 'a': 1, 'b': 2 })
self.assertEqual(test['baz'], {'a': 1, 'b': 2})
def test_update(self):
test = Context({
@ -93,7 +93,7 @@ class TestContext(unittest.TestCase):
test.update(bar=True, foo=1)
self.assertEqual(set(test.keys()), { 'foo', 'bar' })
self.assertEqual(set(test.keys()), {'foo', 'bar'})
self.assertEqual(test['foo'], 1)
self.assertEqual(test['bar'], True)
@ -101,13 +101,13 @@ class TestContext(unittest.TestCase):
self.assertEqual(test['foo'], 2)
self.assertEqual(test['bar'], False)
test.update([('foo', 0), ('baz', { 'a': 1, 'b': 2 })])
test.update([('foo', 0), ('baz', {'a': 1, 'b': 2})])
self.assertEqual(test['foo'], 0)
self.assertEqual(test['baz'], { 'a': 1, 'b': 2 })
self.assertEqual(test['baz'], {'a': 1, 'b': 2})
test.update([('foo', 42), ('baz', [('c', 3), ('d', 4)])])
self.assertEqual(test['foo'], 42)
self.assertEqual(test['baz'], { 'c': 3, 'd': 4 })
self.assertEqual(test['baz'], {'c': 3, 'd': 4})
def test_context_paths(self):
test = Context()
@ -214,7 +214,8 @@ class TestContext(unittest.TestCase):
self.assertEqual(test.source_stack, [foo, bar, bar, foo])
def test_context_dirs(self):
class Config(object): pass
class Config(object):
pass
config = Config()
config.topsrcdir = mozpath.abspath(os.curdir)
config.topobjdir = mozpath.abspath('obj')
@ -277,7 +278,8 @@ class TestSymbols(unittest.TestCase):
class TestPaths(unittest.TestCase):
@classmethod
def setUpClass(cls):
class Config(object): pass
class Config(object):
pass
cls.config = config = Config()
config.topsrcdir = mozpath.abspath(os.curdir)
config.topobjdir = mozpath.abspath('obj')
@ -723,7 +725,7 @@ class TestFiles(unittest.TestCase):
f2 = Files(c, 'b/**', 'a/bar')
f2['BUG_COMPONENT'] = (u'Product2', u'Component2')
files = {'a/foo': f1, 'a/bar' : f2, 'b/foo' : f2 }
files = {'a/foo': f1, 'a/bar': f2, 'b/foo': f2}
self.assertEqual(Files.aggregate(files), {
'bug_component_counts': [
((u'Product2', u'Component2'), 2),

View File

@ -537,7 +537,6 @@ class TestEmitterBasic(unittest.TestCase):
'Objdir file listed in LOCALIZED_FILES not in LOCALIZED_GENERATED_FILES:'):
objs = self.read_topsrcdir(reader)
def test_localized_generated_files_final_target_files(self):
"""Test that using LOCALIZED_GENERATED_FILES and then putting the output in
FINAL_TARGET_FILES as an objdir path produces an error.
@ -736,7 +735,6 @@ class TestEmitterBasic(unittest.TestCase):
with self.assertRaisesRegexp(SandboxValidationError, 'Empty test manifest'):
self.read_topsrcdir(reader)
def test_test_manifest_just_support_files(self):
"""A test manifest with no tests but support-files is not supported."""
reader = self.reader('test-manifest-just-support')
@ -1028,7 +1026,6 @@ class TestEmitterBasic(unittest.TestCase):
])
self.assertEqual(generated_sources, expected)
def test_local_includes(self):
"""Test that LOCAL_INCLUDES is emitted correctly."""
reader = self.reader('local_includes')
@ -1159,7 +1156,7 @@ class TestEmitterBasic(unittest.TestCase):
reader = self.reader('library-defines')
objs = self.read_topsrcdir(reader)
libraries = [o for o in objs if isinstance(o,StaticLibrary)]
libraries = [o for o in objs if isinstance(o, StaticLibrary)]
library_flags = [o for o in objs if isinstance(o, ComputedFlags)
and 'LIBRARY_DEFINES' in o.flags]
expected = {
@ -1334,7 +1331,6 @@ class TestEmitterBasic(unittest.TestCase):
reader.config.substs['OBJ_SUFFIX'])),
linkable.objs)
def test_unified_sources(self):
"""Test that UNIFIED_SOURCES works properly."""
reader = self.reader('unified-sources')
@ -1369,7 +1365,6 @@ class TestEmitterBasic(unittest.TestCase):
reader.config.substs['OBJ_SUFFIX'])),
linkable.objs)
def test_unified_sources_non_unified(self):
"""Test that UNIFIED_SOURCES with FILES_PER_UNIFIED_FILE=1 works properly."""
reader = self.reader('unified-sources-non-unified')

View File

@ -60,6 +60,7 @@ VARIABLES = {
})), list, None),
}
class TestContext(unittest.TestCase):
def test_key_rejection(self):
# Lowercase keys should be rejected during normal operation.
@ -203,5 +204,6 @@ class TestContext(unittest.TestCase):
with self.assertRaises(UnsortedError):
ns['HOGEHOGE'] += ['f', 'e', 'd']
if __name__ == '__main__':
main()

View File

@ -389,7 +389,8 @@ class TestBuildReader(unittest.TestCase):
self.assertEqual(js_flags['BUG_COMPONENT'], BugzillaComponent('Firefox', 'JS'))
self.assertEqual(cpp_flags['BUG_COMPONENT'], BugzillaComponent('Firefox', 'C++'))
self.assertEqual(misc_flags['BUG_COMPONENT'], BugzillaComponent('default_product', 'default_component'))
self.assertEqual(misc_flags['BUG_COMPONENT'], BugzillaComponent(
'default_product', 'default_component'))
def test_files_bug_component_final(self):
reader = self.reader('files-info')
@ -506,7 +507,8 @@ class TestBuildReader(unittest.TestCase):
self.assertEqual(info['subd/aa.py']['SCHEDULES'].exclusive, schedules.EXCLUSIVE_COMPONENTS)
# Files('yaml.py') in subd/moz.build combines with Files('subdir/**.py')
self.assertEqual(info['subd/yaml.py']['SCHEDULES'].inclusive, ['py-lint', 'yaml-lint'])
self.assertEqual(info['subd/yaml.py']['SCHEDULES'].exclusive, schedules.EXCLUSIVE_COMPONENTS)
self.assertEqual(info['subd/yaml.py']['SCHEDULES'].exclusive,
schedules.EXCLUSIVE_COMPONENTS)
# .. but exlusive does not override inclusive
self.assertEqual(info['subd/win.js']['SCHEDULES'].inclusive, ['js-lint'])
self.assertEqual(info['subd/win.js']['SCHEDULES'].exclusive, ['windows'])
@ -519,5 +521,6 @@ class TestBuildReader(unittest.TestCase):
self.assertEqual(set(info['win.and.osx']['SCHEDULES'].exclusive),
set(['macosx', 'windows']))
if __name__ == '__main__':
main()

View File

@ -129,6 +129,7 @@ class TestedSandbox(MozbuildSandbox):
It automatically normalizes paths given to exec_file and exec_source. This
helps simplify the test code.
'''
def normalize_path(self, path):
return mozpath.normpath(
mozpath.join(self._context.config.topsrcdir, path))
@ -493,7 +494,8 @@ def Template():
sandbox.normalize_path('templates.mozbuild'))
def test_function_args(self):
class Foo(int): pass
class Foo(int):
pass
def foo(a, b):
return type(a), type(b)

View File

@ -10,6 +10,7 @@ from mozbuild.android_version_code import (
android_version_code_v1,
)
class TestAndroidVersionCode(unittest.TestCase):
def test_android_version_code_v0(self):
# From https://treeherder.mozilla.org/#/jobs?repo=mozilla-central&revision=e25de9972a77.
@ -17,24 +18,31 @@ class TestAndroidVersionCode(unittest.TestCase):
arm_api9 = 2015070819
arm_api11 = 2015070821
x86_api9 = 2015070822
self.assertEqual(android_version_code_v0(buildid, cpu_arch='armeabi', min_sdk=9, max_sdk=None), arm_api9)
self.assertEqual(android_version_code_v0(buildid, cpu_arch='armeabi-v7a', min_sdk=11, max_sdk=None), arm_api11)
self.assertEqual(android_version_code_v0(buildid, cpu_arch='x86', min_sdk=9, max_sdk=None), x86_api9)
self.assertEqual(android_version_code_v0(
buildid, cpu_arch='armeabi', min_sdk=9, max_sdk=None), arm_api9)
self.assertEqual(android_version_code_v0(
buildid, cpu_arch='armeabi-v7a', min_sdk=11, max_sdk=None), arm_api11)
self.assertEqual(android_version_code_v0(
buildid, cpu_arch='x86', min_sdk=9, max_sdk=None), x86_api9)
def test_android_version_code_v1(self):
buildid = '20150825141628'
arm_api16 = 0b01111000001000000001001001110001
arm64_api21 = 0b01111000001000000001001001110100
x86_api9 = 0b01111000001000000001001001110100
self.assertEqual(android_version_code_v1(buildid, cpu_arch='armeabi-v7a', min_sdk=16, max_sdk=None), arm_api16)
self.assertEqual(android_version_code_v1(buildid, cpu_arch='arm64-v8a', min_sdk=21, max_sdk=None), arm64_api21)
self.assertEqual(android_version_code_v1(buildid, cpu_arch='x86', min_sdk=9, max_sdk=None), x86_api9)
self.assertEqual(android_version_code_v1(
buildid, cpu_arch='armeabi-v7a', min_sdk=16, max_sdk=None), arm_api16)
self.assertEqual(android_version_code_v1(
buildid, cpu_arch='arm64-v8a', min_sdk=21, max_sdk=None), arm64_api21)
self.assertEqual(android_version_code_v1(
buildid, cpu_arch='x86', min_sdk=9, max_sdk=None), x86_api9)
def test_android_version_code_v1_underflow(self):
'''Verify that it is an error to ask for v1 codes predating the cutoff.'''
buildid = '201508010000' # Earliest possible.
arm_api9 = 0b01111000001000000000000000000000
self.assertEqual(android_version_code_v1(buildid, cpu_arch='armeabi', min_sdk=9, max_sdk=None), arm_api9)
self.assertEqual(android_version_code_v1(
buildid, cpu_arch='armeabi', min_sdk=9, max_sdk=None), arm_api9)
with self.assertRaises(ValueError) as cm:
underflow = '201507310000' # Latest possible (valid) underflowing date.
android_version_code_v1(underflow, cpu_arch='armeabi', min_sdk=9, max_sdk=None)

View File

@ -24,6 +24,7 @@ CONTENTS = {
'http://server/larger': b'larger' * 3000,
}
class FakeResponse(object):
def __init__(self, content):
self._content = content

View File

@ -214,7 +214,7 @@ class TestMozbuildObject(unittest.TestCase):
context.topdir = topsrcdir
context.settings = None
context.log_manager = None
context.detect_virtualenv_mozinfo=False
context.detect_virtualenv_mozinfo = False
o = MachCommandBase(context)
@ -280,7 +280,7 @@ class TestMozbuildObject(unittest.TestCase):
context.topdir = topsrcdir
context.settings = None
context.log_manager = None
context.detect_virtualenv_mozinfo=False
context.detect_virtualenv_mozinfo = False
stdout = sys.stdout
sys.stdout = StringIO()
@ -366,6 +366,7 @@ class TestMozbuildObject(unittest.TestCase):
else:
self.assertTrue(p.endswith('foobar'))
class TestPathArgument(unittest.TestCase):
def test_path_argument(self):
# Absolute path
@ -404,5 +405,6 @@ class TestPathArgument(unittest.TestCase):
self.assertEqual(p.srcdir_path(), "/src/foo/bar")
self.assertEqual(p.objdir_path(), "/src/obj/foo/bar")
if __name__ == '__main__':
main()

View File

@ -142,6 +142,7 @@ class TestList(unittest.TestCase):
with self.assertRaises(ValueError):
test = test + False
class TestOrderedDefaultDict(unittest.TestCase):
def test_simple(self):
original = OrderedDict(foo=1, bar=2)
@ -152,21 +153,21 @@ class TestOrderedDefaultDict(unittest.TestCase):
self.assertEqual(test['foo'], 1)
self.assertEqual(test.keys(), ['foo', 'bar' ])
self.assertEqual(test.keys(), ['foo', 'bar'])
def test_defaults(self):
test = OrderedDefaultDict(bool, {'foo': 1 })
test = OrderedDefaultDict(bool, {'foo': 1})
self.assertEqual(test['foo'], 1)
self.assertEqual(test['qux'], False)
self.assertEqual(test.keys(), ['foo', 'qux' ])
self.assertEqual(test.keys(), ['foo', 'qux'])
class TestKeyedDefaultDict(unittest.TestCase):
def test_simple(self):
original = {'foo': 1, 'bar': 2 }
original = {'foo': 1, 'bar': 2}
test = KeyedDefaultDict(lambda x: x, original)
@ -175,7 +176,7 @@ class TestKeyedDefaultDict(unittest.TestCase):
self.assertEqual(test['foo'], 1)
def test_defaults(self):
test = KeyedDefaultDict(lambda x: x, {'foo': 1 })
test = KeyedDefaultDict(lambda x: x, {'foo': 1})
self.assertEqual(test['foo'], 1)
@ -196,7 +197,7 @@ class TestKeyedDefaultDict(unittest.TestCase):
class TestReadOnlyKeyedDefaultDict(unittest.TestCase):
def test_defaults(self):
test = ReadOnlyKeyedDefaultDict(lambda x: x, {'foo': 1 })
test = ReadOnlyKeyedDefaultDict(lambda x: x, {'foo': 1})
self.assertEqual(test['foo'], 1)

View File

@ -31,7 +31,6 @@ key=value
self.assertEqual(p.get('missing', 'default'), 'default')
self.assertEqual(p.get('key'), 'value')
def test_update(self):
contents = StringIO('''
old=old value
@ -48,7 +47,6 @@ key=new value
self.assertEqual(p.get('old'), 'old value')
self.assertEqual(p.get('key'), 'new value')
def test_get_list(self):
contents = StringIO('''
list.0=A
@ -64,7 +62,6 @@ order.2=C
self.assertEqual(p.get_list('list'), ['A', 'B', 'C'])
self.assertEqual(p.get_list('order'), ['A', 'B', 'C'])
def test_get_list_with_shared_prefix(self):
contents = StringIO('''
list.0=A
@ -85,7 +82,6 @@ list.other.0=H
self.assertEqual(p.get_list('list.sublist.second'), ['G'])
self.assertEqual(p.get_list('list.other'), ['H'])
def test_get_dict(self):
contents = StringIO('''
A.title=title A
@ -108,7 +104,6 @@ C=value
with self.assertRaises(ValueError):
p.get_dict('C', required_keys=['missing_key'])
def test_get_dict_with_shared_prefix(self):
contents = StringIO('''
A.title=title A
@ -143,7 +138,6 @@ A.default.C.ignored=C ignored
self.assertEqual(p.get('A.default.B'), 'B')
self.assertEqual(p.get('A.default.C'), 'C')
def test_unicode(self):
contents = StringIO('''
# Danish.

View File

@ -6,6 +6,7 @@ import mozunit
from mozbuild.preprocessor import Expression, Context
class TestContext(unittest.TestCase):
"""
Unit tests for the Context class
@ -28,6 +29,7 @@ class TestContext(unittest.TestCase):
self.assert_('FAIL' in self.c)
self.assert_('PASS' not in self.c)
class TestExpression(unittest.TestCase):
"""
Unit tests for the Expression class
@ -70,7 +72,8 @@ class TestExpression(unittest.TestCase):
def test_logical_ops(self):
""" Test for the && and || operators precedence"""
# Would evaluate to false if precedence was wrong
self.assertTrue(Expression('PASS == PASS || PASS != NOTPASS && PASS == NOTPASS').evaluate(self.c))
self.assertTrue(Expression(
'PASS == PASS || PASS != NOTPASS && PASS == NOTPASS').evaluate(self.c))
def test_defined(self):
""" Test for the defined() value"""

Some files were not shown because too many files have changed in this diff Show More