Bug 1109302 - make moz.build *SOURCES variables emit proper objects; r=gps

Having SOURCES and its close relatives go through VariablePassthru
objects clutters the handling of VariablePassthru in build backends and
makes it less obvious how to handle things that actually get compiled.

Therefore, this patch introduces four new moz.build objects
corresponding to the major variants of SOURCES.  It looks like a large
patch, but there's an ample amount of new tests included, which accounts
for about half of the changes.
This commit is contained in:
Nathan Froyd 2014-12-09 16:45:59 -05:00
parent 559c57d455
commit fbabe54826
54 changed files with 441 additions and 133 deletions

View File

@ -38,9 +38,11 @@ from ..frontend.data import (
ExternalLibrary,
FinalTargetFiles,
GeneratedInclude,
GeneratedSources,
HostLibrary,
HostProgram,
HostSimpleProgram,
HostSources,
InstallationTarget,
IPDLFile,
JARManifest,
@ -53,9 +55,11 @@ from ..frontend.data import (
Resources,
SharedLibrary,
SimpleProgram,
Sources,
StaticLibrary,
TestHarnessFiles,
TestManifest,
UnifiedSources,
VariablePassthru,
XPIDLFile,
)
@ -359,46 +363,70 @@ class RecursiveMakeBackend(CommonBackend):
# CommonBackend.
assert os.path.basename(obj.output_path) == 'Makefile'
self._create_makefile(obj)
elif isinstance(obj, VariablePassthru):
unified_suffixes = dict(
UNIFIED_CSRCS='c',
UNIFIED_CMMSRCS='mm',
UNIFIED_CPPSRCS='cpp',
)
elif isinstance(obj, (Sources, GeneratedSources)):
suffix_map = {
'.s': 'ASFILES',
'.c': 'CSRCS',
'.m': 'CMSRCS',
'.mm': 'CMMSRCS',
'.cpp': 'CPPSRCS',
'.S': 'SSRCS',
}
var = suffix_map[obj.canonical_suffix]
for f in sorted(obj.files):
backend_file.write('%s += %s\n' % (var, f))
elif isinstance(obj, HostSources):
suffix_map = {
'.c': 'HOST_CSRCS',
'.mm': 'HOST_CMMSRCS',
'.cpp': 'HOST_CPPSRCS',
}
var = suffix_map[obj.canonical_suffix]
for f in sorted(obj.files):
backend_file.write('%s += %s\n' % (var, f))
elif isinstance(obj, UnifiedSources):
suffix_map = {
'.c': 'UNIFIED_CSRCS',
'.mm': 'UNIFIED_CMMSRCS',
'.cpp': 'UNIFIED_CPPSRCS',
}
files_per_unification = 16
if 'FILES_PER_UNIFIED_FILE' in obj.variables.keys():
files_per_unification = obj.variables['FILES_PER_UNIFIED_FILE']
var = suffix_map[obj.canonical_suffix]
non_unified_var = var[len('UNIFIED_'):]
files_per_unification = obj.files_per_unified_file
do_unify = not self.environment.substs.get(
'MOZ_DISABLE_UNIFIED_COMPILATION') and files_per_unification > 1
# Sorted so output is consistent and we don't bump mtimes.
source_files = list(sorted(obj.files))
if do_unify:
# On Windows, path names have a maximum length of 255 characters,
# so avoid creating extremely long path names.
unified_prefix = mozpath.relpath(backend_file.objdir,
backend_file.environment.topobjdir)
if len(unified_prefix) > 20:
unified_prefix = unified_prefix[-20:].split('/', 1)[-1]
unified_prefix = unified_prefix.replace('/', '_')
suffix = obj.canonical_suffix[1:]
self._add_unified_build_rules(backend_file, source_files,
backend_file.objdir,
unified_prefix='Unified_%s_%s' % (
suffix,
unified_prefix),
unified_suffix=suffix,
unified_files_makefile_variable=var,
include_curdir_build_rules=False,
files_per_unified_file=files_per_unification)
backend_file.write('%s += $(%s)\n' % (non_unified_var, var))
else:
backend_file.write('%s += %s\n' % (
non_unified_var, ' '.join(source_files)))
elif isinstance(obj, VariablePassthru):
# Sorted so output is consistent and we don't bump mtimes.
for k, v in sorted(obj.variables.items()):
if k in unified_suffixes:
if do_unify:
# On Windows, path names have a maximum length of 255 characters,
# so avoid creating extremely long path names.
unified_prefix = mozpath.relpath(backend_file.objdir,
backend_file.environment.topobjdir)
if len(unified_prefix) > 20:
unified_prefix = unified_prefix[-20:].split('/', 1)[-1]
unified_prefix = unified_prefix.replace('/', '_')
self._add_unified_build_rules(backend_file, v,
backend_file.objdir,
unified_prefix='Unified_%s_%s' % (
unified_suffixes[k],
unified_prefix),
unified_suffix=unified_suffixes[k],
unified_files_makefile_variable=k,
include_curdir_build_rules=False,
files_per_unified_file=files_per_unification)
backend_file.write('%s += $(%s)\n' % (k[len('UNIFIED_'):], k))
else:
backend_file.write('%s += %s\n' % (
k[len('UNIFIED_'):], ' '.join(sorted(v))))
elif isinstance(v, list):
if isinstance(v, list):
for item in v:
backend_file.write('%s += %s\n' % (k, item))
elif isinstance(v, bool):

View File

@ -20,9 +20,12 @@ from mozpack.files import FileFinder
from .common import CommonBackend
from ..frontend.data import (
Defines,
GeneratedSources,
HostSources,
Library,
LocalInclude,
VariablePassthru,
Sources,
UnifiedSources,
)
@ -97,11 +100,17 @@ class VisualStudioBackend(CommonBackend):
if hasattr(obj, 'config') and reldir not in self._paths_to_configs:
self._paths_to_configs[reldir] = obj.config
if isinstance(obj, VariablePassthru):
for k, v in obj.variables.items():
if k.endswith('SRCS'):
s = self._paths_to_sources.setdefault(reldir, set())
s.update(v)
if isinstance(obj, Sources):
self._add_sources(self, reldir, obj)
elif isinstance(obj, HostSources):
self._add_sources(self, reldir, obj)
elif isinstance(obj, GeneratedSources):
self._add_sources(self, reldir, obj)
elif isinstance(obj, UnifiedSources):
self._addr_sources(self, reldir, obj)
elif isinstance(obj, Library):
self._libs_to_paths[obj.basename] = reldir
@ -118,6 +127,10 @@ class VisualStudioBackend(CommonBackend):
else:
includes.append(os.path.join('$(TopSrcDir)', reldir, p))
def _add_sources(self, reldir, obj):
s = self._paths_to_sources.setdefault(reldir, set())
s.update(obj.files)
def consume_finished(self):
out_dir = self._out_dir
try:

View File

@ -726,6 +726,55 @@ class JavaJarData(object):
self.javac_flags = list(javac_flags)
class BaseSources(ContextDerived):
"""Base class for files to be compiled during the build."""
__slots__ = (
'files',
'canonical_suffix',
)
def __init__(self, context, files, canonical_suffix):
ContextDerived.__init__(self, context)
self.files = files
self.canonical_suffix = canonical_suffix
class Sources(BaseSources):
"""Represents files to be compiled during the build."""
def __init__(self, context, files, canonical_suffix):
BaseSources.__init__(self, context, files, canonical_suffix)
class GeneratedSources(BaseSources):
"""Represents generated files to be compiled during the build."""
def __init__(self, context, files, canonical_suffix):
BaseSources.__init__(self, context, files, canonical_suffix)
class HostSources(BaseSources):
"""Represents files to be compiled for the host during the build."""
def __init__(self, context, files, canonical_suffix):
BaseSources.__init__(self, context, files, canonical_suffix)
class UnifiedSources(BaseSources):
"""Represents files to be compiled in a unified fashion during the build."""
__slots__ = (
'files_per_unified_file',
)
def __init__(self, context, files, canonical_suffix, files_per_unified_file=16):
BaseSources.__init__(self, context, files, canonical_suffix)
self.files_per_unified_file = files_per_unified_file
class InstallationTarget(ContextDerived):
"""Describes the rules that affect where files get installed to."""

View File

@ -33,6 +33,7 @@ from .data import (
FinalTargetFiles,
GeneratedEventWebIDLFile,
GeneratedInclude,
GeneratedSources,
GeneratedWebIDLFile,
ExampleWebIDLInterface,
ExternalStaticLibrary,
@ -41,6 +42,7 @@ from .data import (
HostLibrary,
HostProgram,
HostSimpleProgram,
HostSources,
InstallationTarget,
IPDLFile,
JARManifest,
@ -57,10 +59,12 @@ from .data import (
Resources,
SharedLibrary,
SimpleProgram,
Sources,
StaticLibrary,
TestHarnessFiles,
TestWebIDLFile,
TestManifest,
UnifiedSources,
VariablePassthru,
WebIDLFile,
XPIDLFile,
@ -396,7 +400,6 @@ class TreeMetadataEmitter(LoggingMixin):
'EXTRA_DSO_LDOPTS',
'EXTRA_PP_COMPONENTS',
'FAIL_ON_WARNINGS',
'FILES_PER_UNIFIED_FILE',
'USE_STATIC_LIBS',
'GENERATED_FILES',
'IS_GYP_DIR',
@ -428,47 +431,6 @@ class TreeMetadataEmitter(LoggingMixin):
if context['NO_VISIBILITY_FLAGS']:
passthru.variables['VISIBILITY_FLAGS'] = ''
varmap = dict(
SOURCES={
'.s': 'ASFILES',
'.asm': 'ASFILES',
'.c': 'CSRCS',
'.m': 'CMSRCS',
'.mm': 'CMMSRCS',
'.cc': 'CPPSRCS',
'.cpp': 'CPPSRCS',
'.cxx': 'CPPSRCS',
'.S': 'SSRCS',
},
HOST_SOURCES={
'.c': 'HOST_CSRCS',
'.mm': 'HOST_CMMSRCS',
'.cc': 'HOST_CPPSRCS',
'.cpp': 'HOST_CPPSRCS',
'.cxx': 'HOST_CPPSRCS',
},
UNIFIED_SOURCES={
'.c': 'UNIFIED_CSRCS',
'.mm': 'UNIFIED_CMMSRCS',
'.cc': 'UNIFIED_CPPSRCS',
'.cpp': 'UNIFIED_CPPSRCS',
'.cxx': 'UNIFIED_CPPSRCS',
}
)
varmap.update(dict(('GENERATED_%s' % k, v) for k, v in varmap.items()
if k in ('SOURCES', 'UNIFIED_SOURCES')))
for variable, mapping in varmap.items():
for f in context[variable]:
ext = mozpath.splitext(f)[1]
if ext not in mapping:
raise SandboxValidationError(
'%s has an unknown file type.' % f, context)
l = passthru.variables.setdefault(mapping[ext], [])
l.append(f)
if variable.startswith('GENERATED_'):
l = passthru.variables.setdefault('GARBAGE', [])
l.append(f)
no_pgo = context.get('NO_PGO')
sources = context.get('SOURCES', [])
no_pgo_sources = [f for f in sources if sources[f].no_pgo]
@ -480,6 +442,63 @@ class TreeMetadataEmitter(LoggingMixin):
if no_pgo_sources:
passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo_sources
# A map from "canonical suffixes" for a particular source file
# language to the range of suffixes associated with that language.
#
# We deliberately don't list the canonical suffix in the suffix list
# in the definition; we'll add it in programmatically after defining
# things.
suffix_map = {
'.s': set(['.asm']),
'.c': set(),
'.m': set(),
'.mm': set(),
'.cpp': set(['.cc', '.cxx']),
'.S': set(),
}
# The inverse of the above, mapping suffixes to their canonical suffix.
canonicalized_suffix_map = {}
for suffix, alternatives in suffix_map.iteritems():
alternatives.add(suffix)
for a in alternatives:
canonicalized_suffix_map[a] = suffix
def canonical_suffix_for_file(f):
return canonicalized_suffix_map[mozpath.splitext(f)[1]]
# A map from moz.build variables to the canonical suffixes of file
# kinds that can be listed therein.
all_suffixes = list(suffix_map.keys())
varmap = dict(
SOURCES=(Sources, all_suffixes),
HOST_SOURCES=(HostSources, ['.c', '.mm', '.cpp']),
UNIFIED_SOURCES=(UnifiedSources, ['.c', '.mm', '.cpp']),
GENERATED_SOURCES=(GeneratedSources, all_suffixes),
)
for variable, (klass, suffixes) in varmap.items():
allowed_suffixes = set().union(*[suffix_map[s] for s in suffixes])
# First ensure that we haven't been given filetypes that we don't
# recognize.
for f in context[variable]:
ext = mozpath.splitext(f)[1]
if ext not in allowed_suffixes:
raise SandboxValidationError(
'%s has an unknown file type.' % f, context)
if variable.startswith('GENERATED_'):
l = passthru.variables.setdefault('GARBAGE', [])
l.append(f)
# Now sort the files to let groupby work.
sorted_files = sorted(context[variable], key=canonical_suffix_for_file)
for canonical_suffix, files in itertools.groupby(sorted_files, canonical_suffix_for_file):
arglist = [context, list(files), canonical_suffix]
if variable.startswith('UNIFIED_') and 'FILES_PER_UNIFIED_FILE' in context:
arglist.append(context['FILES_PER_UNIFIED_FILE'])
yield klass(*arglist)
sources_with_flags = [f for f in sources if sources[f].flags]
for f in sources_with_flags:
ext = mozpath.splitext(f)[1]

View File

@ -0,0 +1,14 @@
# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
# Any copyright is dedicated to the Public Domain.
# http://creativecommons.org/publicdomain/zero/1.0/
SOURCES += ['bar.s', 'foo.asm']
HOST_SOURCES += ['bar.cpp', 'foo.cpp']
HOST_SOURCES += ['bar.c', 'foo.c']
SOURCES += ['bar.c', 'foo.c']
SOURCES += ['bar.mm', 'foo.mm']
SOURCES += ['baz.S', 'foo.S']

View File

@ -2,20 +2,9 @@
# Any copyright is dedicated to the Public Domain.
# http://creativecommons.org/publicdomain/zero/1.0/
SOURCES += ['bar.s', 'foo.asm']
EXTRA_COMPONENTS = ['bar.js', 'foo.js']
EXTRA_PP_COMPONENTS = ['bar.pp.js', 'foo.pp.js']
HOST_SOURCES += ['bar.cpp', 'foo.cpp']
HOST_SOURCES += ['bar.c', 'foo.c']
SOURCES += ['bar.c', 'foo.c']
SOURCES += ['bar.mm', 'foo.mm']
SOURCES += ['baz.S', 'foo.S']
FAIL_ON_WARNINGS = True
MSVC_ENABLE_PGO = True
NO_VISIBILITY_FLAGS = True

View File

@ -260,18 +260,6 @@ class TestRecursiveMakeBackend(BackendTester):
lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]]
expected = {
'ASFILES': [
'ASFILES += bar.s',
'ASFILES += foo.asm',
],
'CMMSRCS': [
'CMMSRCS += bar.mm',
'CMMSRCS += foo.mm',
],
'CSRCS': [
'CSRCS += bar.c',
'CSRCS += foo.c',
],
'DISABLE_STL_WRAPPING': [
'DISABLE_STL_WRAPPING := 1',
],
@ -286,21 +274,9 @@ class TestRecursiveMakeBackend(BackendTester):
'FAIL_ON_WARNINGS': [
'FAIL_ON_WARNINGS := 1',
],
'HOST_CPPSRCS': [
'HOST_CPPSRCS += bar.cpp',
'HOST_CPPSRCS += foo.cpp',
],
'HOST_CSRCS': [
'HOST_CSRCS += bar.c',
'HOST_CSRCS += foo.c',
],
'MSVC_ENABLE_PGO': [
'MSVC_ENABLE_PGO := 1',
],
'SSRCS': [
'SSRCS += baz.S',
'SSRCS += foo.S',
],
'VISIBILITY_FLAGS': [
'VISIBILITY_FLAGS :=',
],
@ -343,6 +319,44 @@ class TestRecursiveMakeBackend(BackendTester):
found = [str for str in lines if str.startswith(var)]
self.assertEqual(found, val)
def test_sources(self):
"""Ensure SOURCES and HOST_SOURCES are handled properly."""
env = self._consume('sources', RecursiveMakeBackend)
backend_path = mozpath.join(env.topobjdir, 'backend.mk')
lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]]
expected = {
'ASFILES': [
'ASFILES += bar.s',
'ASFILES += foo.asm',
],
'CMMSRCS': [
'CMMSRCS += bar.mm',
'CMMSRCS += foo.mm',
],
'CSRCS': [
'CSRCS += bar.c',
'CSRCS += foo.c',
],
'HOST_CPPSRCS': [
'HOST_CPPSRCS += bar.cpp',
'HOST_CPPSRCS += foo.cpp',
],
'HOST_CSRCS': [
'HOST_CSRCS += bar.c',
'HOST_CSRCS += foo.c',
],
'SSRCS': [
'SSRCS += baz.S',
'SSRCS += foo.S',
],
}
for var, val in expected.items():
found = [str for str in lines if str.startswith(var)]
self.assertEqual(found, val)
def test_exports(self):
"""Ensure EXPORTS is handled properly."""
env = self._consume('exports', RecursiveMakeBackend)

View File

@ -0,0 +1,30 @@
# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
# Any copyright is dedicated to the Public Domain.
# http://creativecommons.org/publicdomain/zero/1.0/
GENERATED_SOURCES += [
'a.cpp',
'b.cc',
'c.cxx',
]
GENERATED_SOURCES += [
'd.c',
]
GENERATED_SOURCES += [
'e.m',
]
GENERATED_SOURCES += [
'f.mm',
]
GENERATED_SOURCES += [
'g.S',
]
GENERATED_SOURCES += [
'h.s',
'i.asm',
]

View File

@ -0,0 +1,18 @@
# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
# Any copyright is dedicated to the Public Domain.
# http://creativecommons.org/publicdomain/zero/1.0/
HOST_SOURCES += [
'a.cpp',
'b.cc',
'c.cxx',
]
HOST_SOURCES += [
'd.c',
]
HOST_SOURCES += [
'e.mm',
'f.mm',
]

View File

@ -0,0 +1,30 @@
# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
# Any copyright is dedicated to the Public Domain.
# http://creativecommons.org/publicdomain/zero/1.0/
SOURCES += [
'a.cpp',
'b.cc',
'c.cxx',
]
SOURCES += [
'd.c',
]
SOURCES += [
'e.m',
]
SOURCES += [
'f.mm',
]
SOURCES += [
'g.S',
]
SOURCES += [
'h.s',
'i.asm',
]

View File

@ -0,0 +1,21 @@
# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
# Any copyright is dedicated to the Public Domain.
# http://creativecommons.org/publicdomain/zero/1.0/
UNIFIED_SOURCES += [
'bar.cxx',
'foo.cpp',
'quux.cc',
]
UNIFIED_SOURCES += [
'objc1.mm',
'objc2.mm',
]
UNIFIED_SOURCES += [
'c1.c',
'c2.c',
]
FILES_PER_UNIFIED_FILE = 32

View File

@ -2,20 +2,9 @@
# Any copyright is dedicated to the Public Domain.
# http://creativecommons.org/publicdomain/zero/1.0/
SOURCES += ['fans.asm', 'tans.s']
EXTRA_COMPONENTS=['fans.js', 'tans.js']
EXTRA_PP_COMPONENTS=['fans.pp.js', 'tans.pp.js']
HOST_SOURCES += ['fans.cpp', 'tans.cpp']
HOST_SOURCES += ['fans.c', 'tans.c']
SOURCES += ['fans.c', 'tans.c']
SOURCES += ['fans.mm', 'tans.mm']
SOURCES += ['bans.S', 'fans.S']
FAIL_ON_WARNINGS = True
MSVC_ENABLE_PGO = True

View File

@ -15,6 +15,8 @@ from mozbuild.frontend.data import (
DirectoryTraversal,
Exports,
GeneratedInclude,
GeneratedSources,
HostSources,
IPDLFile,
JARManifest,
LocalInclude,
@ -22,9 +24,11 @@ from mozbuild.frontend.data import (
ReaderSummary,
Resources,
SimpleProgram,
Sources,
StaticLibrary,
TestHarnessFiles,
TestManifest,
UnifiedSources,
VariablePassthru,
)
from mozbuild.frontend.emitter import TreeMetadataEmitter
@ -152,18 +156,12 @@ class TestEmitterBasic(unittest.TestCase):
self.assertIsInstance(objs[0], VariablePassthru)
wanted = {
'ASFILES': ['fans.asm', 'tans.s'],
'CMMSRCS': ['fans.mm', 'tans.mm'],
'CSRCS': ['fans.c', 'tans.c'],
'DISABLE_STL_WRAPPING': True,
'EXTRA_COMPONENTS': ['fans.js', 'tans.js'],
'EXTRA_PP_COMPONENTS': ['fans.pp.js', 'tans.pp.js'],
'FAIL_ON_WARNINGS': True,
'HOST_CPPSRCS': ['fans.cpp', 'tans.cpp'],
'HOST_CSRCS': ['fans.c', 'tans.c'],
'MSVC_ENABLE_PGO': True,
'NO_DIST_INSTALL': True,
'SSRCS': ['bans.S', 'fans.S'],
'VISIBILITY_FLAGS': '',
'RCFILE': 'foo.rc',
'RESFILE': 'bar.res',
@ -617,5 +615,101 @@ class TestEmitterBasic(unittest.TestCase):
defines[lib.basename] = ' '.join(lib.defines.get_defines())
self.assertEqual(expected, defines)
def test_sources(self):
"""Test that SOURCES works properly."""
reader = self.reader('sources')
objs = self.read_topsrcdir(reader)
self.assertEqual(len(objs), 6)
for o in objs:
self.assertIsInstance(o, Sources)
suffix_map = {obj.canonical_suffix: obj for obj in objs}
self.assertEqual(len(suffix_map), 6)
expected = {
'.cpp': ['a.cpp', 'b.cc', 'c.cxx'],
'.c': ['d.c'],
'.m': ['e.m'],
'.mm': ['f.mm'],
'.S': ['g.S'],
'.s': ['h.s', 'i.asm'],
}
for suffix, files in expected.items():
sources = suffix_map[suffix]
self.assertEqual(sources.files, files)
def test_sources(self):
"""Test that GENERATED_SOURCES works properly."""
reader = self.reader('generated-sources')
objs = self.read_topsrcdir(reader)
self.assertEqual(len(objs), 7)
# GENERATED_SOURCES automatically generate GARBAGE definitions.
garbage = [o for o in objs if isinstance(o, VariablePassthru)]
self.assertEqual(len(garbage), 1)
generated_sources = [o for o in objs if isinstance(o, GeneratedSources)]
self.assertEqual(len(generated_sources), 6)
suffix_map = {obj.canonical_suffix: obj for obj in generated_sources}
self.assertEqual(len(suffix_map), 6)
expected = {
'.cpp': ['a.cpp', 'b.cc', 'c.cxx'],
'.c': ['d.c'],
'.m': ['e.m'],
'.mm': ['f.mm'],
'.S': ['g.S'],
'.s': ['h.s', 'i.asm'],
}
for suffix, files in expected.items():
sources = suffix_map[suffix]
self.assertEqual(sources.files, files)
def test_host_sources(self):
"""Test that HOST_SOURCES works properly."""
reader = self.reader('host-sources')
objs = self.read_topsrcdir(reader)
self.assertEqual(len(objs), 3)
for o in objs:
self.assertIsInstance(o, HostSources)
suffix_map = {obj.canonical_suffix: obj for obj in objs}
self.assertEqual(len(suffix_map), 3)
expected = {
'.cpp': ['a.cpp', 'b.cc', 'c.cxx'],
'.c': ['d.c'],
'.mm': ['e.mm', 'f.mm'],
}
for suffix, files in expected.items():
sources = suffix_map[suffix]
self.assertEqual(sources.files, files)
def test_unified_sources(self):
"""Test that UNIFIED_SOURCES works properly."""
reader = self.reader('unified-sources')
objs = self.read_topsrcdir(reader)
self.assertEqual(len(objs), 3)
for o in objs:
self.assertIsInstance(o, UnifiedSources)
suffix_map = {obj.canonical_suffix: obj for obj in objs}
self.assertEqual(len(suffix_map), 3)
expected = {
'.cpp': ['bar.cxx', 'foo.cpp', 'quux.cc'],
'.mm': ['objc1.mm', 'objc2.mm'],
'.c': ['c1.c', 'c2.c'],
}
for suffix, files in expected.items():
sources = suffix_map[suffix]
self.assertEqual(sources.files, files)
self.assertEqual(sources.files_per_unified_file, 32)
if __name__ == '__main__':
main()