Bug 1530587 - Don't optimize jars without preloading/reordering data. r=chmanchester

Optimizing jars without preloading/reordering data only moves the
jar central directory to the beginning of the file, which, without
preloading information, is not very useful. Let's just stop doing it if
there's not going to be preloading/reordering information at all.

Differential Revision: https://phabricator.services.mozilla.com/D21170

--HG--
extra : moz-landing-system : lando
This commit is contained in:
Mike Hommey 2019-02-27 01:26:46 +00:00
parent 8b285d0a8f
commit c7022eb656
17 changed files with 38 additions and 65 deletions

View File

@ -235,7 +235,7 @@ def write_zip(zip_path, prefix=None):
if isinstance(prefix, unicode): # noqa Special case for Python 2
prefix = prefix.encode('utf-8')
with JarWriter(file=zip_path, optimize=False, compress_level=5) as zip:
with JarWriter(file=zip_path, compress_level=5) as zip:
manifest = {}
for p, data, mode in resolve_files_and_hash(manifest):
print(p)

View File

@ -30,7 +30,7 @@ def package_fennec_apk(inputs=[], omni_ja=None,
features_dirs=[],
root_files=[],
verbose=False):
jarrer = Jarrer(optimize=False)
jarrer = Jarrer()
# First, take input files. The contents of the later files overwrites the
# content of earlier files. Multidexing requires special care: we want a

View File

@ -19,7 +19,7 @@ def make_archive(archive_name, base, exclude, include):
include = ['*']
archive_basename = os.path.basename(archive_name)
with open(archive_name, 'wb') as fh:
with JarWriter(fileobj=fh, optimize=False, compress_level=5) as writer:
with JarWriter(fileobj=fh, compress_level=5) as writer:
for pat in include:
for p, f in finder.find(pat):
print(' Adding to "%s":\n\t"%s"' % (archive_basename, p))

View File

@ -775,7 +775,7 @@ def main(argv):
create_tar_gz_from_files(fh, files, compresslevel=5)
file_count = len(files)
elif out_file.endswith('.zip'):
with JarWriter(fileobj=fh, optimize=False, compress_level=5) as writer:
with JarWriter(fileobj=fh, compress_level=5) as writer:
for p, f in res:
writer.add(p.encode('utf-8'), f.read(), mode=f.mode,
skip_duplicates=True)

View File

@ -31,7 +31,7 @@ def main(args):
help="Path to files to add to zip")
args = parser.parse_args(args)
jarrer = Jarrer(optimize=False)
jarrer = Jarrer()
with errors.accumulate():
finder = FileFinder(args.C, find_executables=args.strip)

View File

@ -214,7 +214,7 @@ class ArtifactJob(object):
from mozbuild.action.test_archive import OBJDIR_TEST_FILES
added_entry = False
with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
with JarWriter(file=processed_filename, compress_level=5) as writer:
reader = JarReader(filename)
for filename, entry in reader.entries.iteritems():
for pattern, (src_prefix, dest_prefix) in self.test_artifact_patterns:
@ -250,7 +250,7 @@ class ArtifactJob(object):
from mozbuild.action.test_archive import OBJDIR_TEST_FILES
added_entry = False
with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
with JarWriter(file=processed_filename, compress_level=5) as writer:
with tarfile.open(filename) as reader:
for filename, entry in TarFinder(filename, reader):
for pattern, (src_prefix, dest_prefix) in self.test_artifact_patterns:
@ -284,7 +284,7 @@ class ArtifactJob(object):
patterns=LinuxArtifactJob.test_artifact_patterns))
def process_symbols_archive(self, filename, processed_filename):
with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
with JarWriter(file=processed_filename, compress_level=5) as writer:
reader = JarReader(filename)
for filename in reader.entries:
destpath = mozpath.join('crashreporter-symbols', filename)
@ -294,7 +294,7 @@ class ArtifactJob(object):
writer.add(destpath.encode('utf-8'), reader[filename])
def process_host_bin(self, filename, processed_filename):
with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
with JarWriter(file=processed_filename, compress_level=5) as writer:
# Turn 'HASH-mar.exe' into 'mar.exe'. `filename` is a path on disk
# without any of the path parts of the artifact, so we must inject
# the desired `host/bin` prefix here.
@ -315,7 +315,7 @@ class AndroidArtifactJob(ArtifactJob):
def process_package_artifact(self, filename, processed_filename):
# Extract all .so files into the root, which will get copied into dist/bin.
with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
with JarWriter(file=processed_filename, compress_level=5) as writer:
for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))):
if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns):
continue
@ -353,7 +353,7 @@ class LinuxArtifactJob(ArtifactJob):
def process_package_artifact(self, filename, processed_filename):
added_entry = False
with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
with JarWriter(file=processed_filename, compress_level=5) as writer:
with tarfile.open(filename) as reader:
for p, f in UnpackFinder(TarFinder(filename, reader)):
if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns):
@ -449,7 +449,7 @@ class MacArtifactJob(ArtifactJob):
]),
]
with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
with JarWriter(file=processed_filename, compress_level=5) as writer:
root, paths = paths_no_keep_path
finder = UnpackFinder(mozpath.join(source, root))
for path in paths:
@ -514,7 +514,7 @@ class WinArtifactJob(ArtifactJob):
def process_package_artifact(self, filename, processed_filename):
added_entry = False
with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
with JarWriter(file=processed_filename, compress_level=5) as writer:
for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))):
if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns):
continue

View File

@ -43,7 +43,7 @@ def package_coverage_data(root, output_file):
root = root.encode('utf-8')
finder = FileFinder(root)
jarrer = Jarrer(optimize=False)
jarrer = Jarrer()
for p, f in finder.find("**/*.gcno"):
jarrer.add(p, f)

View File

@ -507,13 +507,12 @@ class Jarrer(FileRegistry, BaseFile):
FileRegistry with the ability to copy and pack the registered files as a
jar file. Also acts as a BaseFile instance, to be copied with a FileCopier.
'''
def __init__(self, compress=True, optimize=True):
def __init__(self, compress=True):
'''
Create a Jarrer instance. See mozpack.mozjar.JarWriter documentation
for details on the compress and optimize arguments.
for details on the compress argument.
'''
self.compress = compress
self.optimize = optimize
self._preload = []
self._compress_options = {} # Map path to compress boolean option.
FileRegistry.__init__(self)
@ -574,8 +573,7 @@ class Jarrer(FileRegistry, BaseFile):
old_contents = dict([(f.filename, f) for f in old_jar])
with JarWriter(fileobj=dest, compress=self.compress,
optimize=self.optimize) as jar:
with JarWriter(fileobj=dest, compress=self.compress) as jar:
for path, file in self:
compress = self._compress_options.get(path, self.compress)
# Temporary: Because l10n repacks can't handle brotli just yet,

View File

@ -474,8 +474,7 @@ class JarWriter(object):
archives as well as jar archives optimized for Gecko. See the documentation
for the close() member function for a description of both layouts.
'''
def __init__(self, file=None, fileobj=None, compress=True, optimize=True,
compress_level=9):
def __init__(self, file=None, fileobj=None, compress=True, compress_level=9):
'''
Initialize a Jar archive in the given file. Use the given file-like
object if one is given instead of opening the given file name.
@ -495,7 +494,6 @@ class JarWriter(object):
self._compress_level = compress_level
self._contents = OrderedDict()
self._last_preloaded = None
self._optimize = optimize
def __enter__(self):
'''
@ -565,11 +563,10 @@ class JarWriter(object):
self._contents.values(), 0)
# On optimized archives, store the preloaded size and the central
# directory entries, followed by the first end of central directory.
if self._optimize:
if preload_size:
end['cdir_offset'] = 4
offset = end['cdir_size'] + end['cdir_offset'] + end.size
if preload_size:
preload_size += offset
preload_size += offset
self._data.write(struct.pack('<I', preload_size))
for entry, _ in self._contents.itervalues():
entry['offset'] += offset
@ -580,7 +577,7 @@ class JarWriter(object):
self._data.write(headers[entry].serialize())
self._data.write(content)
# On non optimized archives, store the central directory entries.
if not self._optimize:
if not preload_size:
end['cdir_offset'] = offset
for entry, _ in self._contents.itervalues():
self._data.write(entry.serialize())

View File

@ -190,20 +190,19 @@ class JarFormatter(PiecemealFormatter):
manifest entries for resources are registered after chrome manifest
entries.
'''
def __init__(self, copier, compress=True, optimize=True):
def __init__(self, copier, compress=True):
PiecemealFormatter.__init__(self, copier)
self._compress=compress
self._optimize=optimize
def _add_base(self, base, addon=False):
if addon is True:
jarrer = Jarrer(self._compress, self._optimize)
jarrer = Jarrer(self._compress)
self.copier.add(base + '.xpi', jarrer)
self._sub_formatter[base] = FlatSubFormatter(jarrer)
else:
self._sub_formatter[base] = JarSubFormatter(
FileRegistrySubtree(base, self.copier),
self._compress, self._optimize)
self._compress)
class JarSubFormatter(PiecemealFormatter):
@ -213,11 +212,10 @@ class JarSubFormatter(PiecemealFormatter):
dispatches the chrome data to, and a FlatSubFormatter for the non-chrome
files.
'''
def __init__(self, copier, compress=True, optimize=True):
def __init__(self, copier, compress=True):
PiecemealFormatter.__init__(self, copier)
self._frozen_chrome = False
self._compress = compress
self._optimize = optimize
self._sub_formatter[''] = FlatSubFormatter(copier)
def _jarize(self, entry, relpath):
@ -239,7 +237,7 @@ class JarSubFormatter(PiecemealFormatter):
chromepath, entry = self._jarize(entry, entry.relpath)
assert not self._frozen_chrome
if chromepath not in self._sub_formatter:
jarrer = Jarrer(self._compress, self._optimize)
jarrer = Jarrer(self._compress)
self.copier.add(chromepath + '.jar', jarrer)
self._sub_formatter[chromepath] = FlatSubFormatter(jarrer)
elif isinstance(entry, ManifestResource) and \
@ -254,9 +252,8 @@ class OmniJarFormatter(JarFormatter):
'''
Formatter for the omnijar package format.
'''
def __init__(self, copier, omnijar_name, compress=True, optimize=True,
non_resources=()):
JarFormatter.__init__(self, copier, compress, optimize)
def __init__(self, copier, omnijar_name, compress=True, non_resources=()):
JarFormatter.__init__(self, copier, compress)
self._omnijar_name = omnijar_name
self._non_resources = non_resources
@ -271,7 +268,7 @@ class OmniJarFormatter(JarFormatter):
self.copier.add(path, ManifestFile(''))
self._sub_formatter[base] = OmniJarSubFormatter(
FileRegistrySubtree(base, self.copier), self._omnijar_name,
self._compress, self._optimize, self._non_resources)
self._compress, self._non_resources)
class OmniJarSubFormatter(PiecemealFormatter):
@ -280,15 +277,13 @@ class OmniJarSubFormatter(PiecemealFormatter):
that dispatches between a FlatSubFormatter for the resources data and
another FlatSubFormatter for the other files.
'''
def __init__(self, copier, omnijar_name, compress=True, optimize=True,
non_resources=()):
def __init__(self, copier, omnijar_name, compress=True, non_resources=()):
PiecemealFormatter.__init__(self, copier)
self._omnijar_name = omnijar_name
self._compress = compress
self._optimize = optimize
self._non_resources = non_resources
self._sub_formatter[''] = FlatSubFormatter(copier)
jarrer = Jarrer(self._compress, self._optimize)
jarrer = Jarrer(self._compress)
self._sub_formatter[omnijar_name] = FlatSubFormatter(jarrer)
def _get_base(self, path):

View File

@ -298,11 +298,9 @@ def repack(source, l10n, extra_l10n={}, non_resources=[], non_chrome=set()):
formatter = FlatFormatter(copier)
elif app_finder.kind == 'jar':
formatter = JarFormatter(copier,
optimize=app_finder.optimizedjars,
compress=compress)
elif app_finder.kind == 'omni':
formatter = OmniJarFormatter(copier, app_finder.omnijar,
optimize=app_finder.optimizedjars,
compress=compress,
non_resources=non_resources)

View File

@ -50,7 +50,6 @@ class UnpackFinder(BaseFinder):
self.kind = 'flat'
self.omnijar = None
self.jarlogs = {}
self.optimizedjars = False
self.compressed = False
jars = set()
@ -141,8 +140,6 @@ class UnpackFinder(BaseFinder):
the preloaded entries it has.
'''
jar = JarReader(fileobj=file.open())
if jar.is_optimized:
self.optimizedjars = True
self.compressed = max(self.compressed, jar.compression)
if jar.last_preloaded:
jarlog = jar.entries.keys()

View File

@ -139,11 +139,9 @@ class TestDeflaterMemoryView(TestDeflater):
class TestJar(unittest.TestCase):
optimize = False
def test_jar(self):
s = MockDest()
with JarWriter(fileobj=s, optimize=self.optimize) as jar:
with JarWriter(fileobj=s) as jar:
jar.add('foo', 'foo')
self.assertRaises(JarWriterError, jar.add, 'foo', 'bar')
jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz')
@ -172,8 +170,7 @@ class TestJar(unittest.TestCase):
'backslashes in filenames on POSIX platform are untouched')
s = MockDest()
with JarWriter(fileobj=s, compress=False,
optimize=self.optimize) as jar:
with JarWriter(fileobj=s, compress=False) as jar:
jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz')
jar.add('foo', 'foo')
jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz', True)
@ -225,13 +222,13 @@ class TestJar(unittest.TestCase):
def test_rejar(self):
s = MockDest()
with JarWriter(fileobj=s, optimize=self.optimize) as jar:
with JarWriter(fileobj=s) as jar:
jar.add('foo', 'foo')
jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz')
jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz', False)
new = MockDest()
with JarWriter(fileobj=new, optimize=self.optimize) as jar:
with JarWriter(fileobj=new) as jar:
for j in JarReader(fileobj=s):
jar.add(j.filename, j)
@ -252,7 +249,7 @@ class TestJar(unittest.TestCase):
def test_add_from_finder(self):
s = MockDest()
with JarWriter(fileobj=s, optimize=self.optimize) as jar:
with JarWriter(fileobj=s) as jar:
finder = FileFinder(test_data_path)
for p, f in finder.find('test_data'):
jar.add('test_data', f)
@ -265,10 +262,6 @@ class TestJar(unittest.TestCase):
self.assertEqual(files[0].read(), 'test_data')
class TestOptimizeJar(TestJar):
optimize = True
class TestPreload(unittest.TestCase):
def test_preload(self):
s = MockDest()

View File

@ -27,7 +27,7 @@ class MachCommands(MachCommandBase):
if os.path.isfile(dest):
os.unlink(dest)
jarrer = Jarrer(optimize=False)
jarrer = Jarrer()
for p, f in FileFinder(src).find('*'):
jarrer.add(p, f)
jarrer.copy(dest)

View File

@ -34,7 +34,6 @@ stage-package: multilocale.txt locale-manifest.in $(MOZ_PKG_MANIFEST) $(MOZ_PKG_
$(addprefix --js-binary ,$(JS_BINARY)) \
) \
$(addprefix --jarlog ,$(wildcard $(JARLOG_FILE_AB_CD))) \
$(if $(OPTIMIZEJARS),--optimizejars) \
$(addprefix --compress ,$(JAR_COMPRESSION)) \
$(MOZ_PKG_MANIFEST) '$(DIST)' '$(DIST)'/$(MOZ_PKG_DIR)$(if $(MOZ_PKG_MANIFEST),,$(_BINPATH)) \
$(if $(filter omni,$(MOZ_PACKAGER_FORMAT)),$(if $(NON_OMNIJAR_FILES),--non-resource $(NON_OMNIJAR_FILES)))

View File

@ -210,8 +210,6 @@ def main():
'minification verification will not be performed.')
parser.add_argument('--jarlog', default='', help='File containing jar ' +
'access logs')
parser.add_argument('--optimizejars', action='store_true', default=False,
help='Enable jar optimizations')
parser.add_argument('--compress', choices=('none', 'deflate', 'brotli'),
default='deflate',
help='Use given jar compression (default: deflate)')
@ -242,12 +240,11 @@ def main():
if args.format == 'flat':
formatter = FlatFormatter(copier)
elif args.format == 'jar':
formatter = JarFormatter(copier, compress=compress, optimize=args.optimizejars)
formatter = JarFormatter(copier, compress=compress)
elif args.format == 'omni':
formatter = OmniJarFormatter(copier,
buildconfig.substs['OMNIJAR_NAME'],
compress=compress,
optimize=args.optimizejars,
non_resources=args.non_resource)
else:
errors.fatal('Unknown format: %s' % args.format)

View File

@ -317,7 +317,6 @@ ifndef MOZ_PACKAGER_FORMAT
endif
ifneq (android,$(MOZ_WIDGET_TOOLKIT))
OPTIMIZEJARS = 1
JAR_COMPRESSION ?= none
endif