Bug 1356142 - Use mozbuild.util.mkdir instead of os.makedirs for the artifact cache directory. r=gps

At the same time, make the artifact cache directory not indexed, and
move the directory creation to the base classes that actually use it.

--HG--
extra : rebase_source : 62994499afceb5166c0041148dcc702f87166fdc
This commit is contained in:
Mike Hommey 2017-04-13 16:35:20 +09:00
parent 7b4adcc548
commit a8cb87ea48
3 changed files with 17 additions and 20 deletions

View File

@ -71,6 +71,7 @@ from taskgraph.util.taskcluster import (
from mozbuild.util import (
ensureParentDir,
FileAvoidWrite,
mkdir,
)
import mozinstall
from mozpack.files import (
@ -533,6 +534,7 @@ class CacheManager(object):
self._cache = pylru.lrucache(cache_size, callback=cache_callback)
self._cache_filename = mozpath.join(cache_dir, cache_name + '-cache.pickle')
self._log = log
mkdir(cache_dir, not_indexed=True)
def log(self, *args, **kwargs):
if self._log:
@ -703,7 +705,8 @@ class ArtifactPersistLimit(PersistLimit):
self._log(*args, **kwargs)
def register_file(self, path):
if path.endswith('.pickle'):
if path.endswith('.pickle') or \
os.path.basename(path) == '.metadata_never_index':
return
if not self._registering_dir:
# Touch the file so that subsequent calls to a mach artifact
@ -752,6 +755,7 @@ class ArtifactCache(object):
'''Fetch Task Cluster artifact URLs and purge least recently used artifacts from disk.'''
def __init__(self, cache_dir, log=None, skip_cache=False):
mkdir(cache_dir, not_indexed=True)
self._cache_dir = cache_dir
self._log = log
self._skip_cache = skip_cache

View File

@ -1483,12 +1483,6 @@ class PackageFrontend(MachCommandBase):
state_dir = self._mach_context.state_dir
cache_dir = os.path.join(state_dir, 'package-frontend')
try:
os.makedirs(cache_dir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
import which
here = os.path.abspath(os.path.dirname(__file__))
@ -1590,11 +1584,6 @@ class PackageFrontend(MachCommandBase):
self.log_manager.structured_filter)
if not cache_dir:
cache_dir = os.path.join(self._mach_context.state_dir, 'toolchains')
try:
os.makedirs(cache_dir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
tooltool_url = (tooltool_url or
'https://api.pub.build.mozilla.org/tooltool').rstrip('/')

View File

@ -79,27 +79,31 @@ class TestArtifactCache(unittest.TestCase):
self.timestamp += 2
self._real_utime(path, times)
def listtmpdir(self):
return [p for p in os.listdir(self.tmpdir)
if p != '.metadata_never_index']
def test_artifact_cache_persistence(self):
cache = ArtifactCache(self.tmpdir)
cache._download_manager.session = FakeSession()
path = cache.fetch('http://server/foo')
expected = [os.path.basename(path)]
self.assertEqual(os.listdir(self.tmpdir), expected)
self.assertEqual(self.listtmpdir(), expected)
path = cache.fetch('http://server/bar')
expected.append(os.path.basename(path))
self.assertEqual(sorted(os.listdir(self.tmpdir)), sorted(expected))
self.assertEqual(sorted(self.listtmpdir()), sorted(expected))
# We're downloading more than the cache allows us, but since it's all
# in the same session, no purge happens.
path = cache.fetch('http://server/qux')
expected.append(os.path.basename(path))
self.assertEqual(sorted(os.listdir(self.tmpdir)), sorted(expected))
self.assertEqual(sorted(self.listtmpdir()), sorted(expected))
path = cache.fetch('http://server/fuga')
expected.append(os.path.basename(path))
self.assertEqual(sorted(os.listdir(self.tmpdir)), sorted(expected))
self.assertEqual(sorted(self.listtmpdir()), sorted(expected))
cache = ArtifactCache(self.tmpdir)
cache._download_manager.session = FakeSession()
@ -109,14 +113,14 @@ class TestArtifactCache(unittest.TestCase):
path = cache.fetch('http://server/hoge')
expected.append(os.path.basename(path))
expected = expected[2:]
self.assertEqual(sorted(os.listdir(self.tmpdir)), sorted(expected))
self.assertEqual(sorted(self.listtmpdir()), sorted(expected))
# Downloading a file already in the cache leaves the cache untouched
cache = ArtifactCache(self.tmpdir)
cache._download_manager.session = FakeSession()
path = cache.fetch('http://server/qux')
self.assertEqual(sorted(os.listdir(self.tmpdir)), sorted(expected))
self.assertEqual(sorted(self.listtmpdir()), sorted(expected))
# bar was purged earlier, re-downloading it should purge the oldest
# downloaded file, which at this point would be qux, but we also
@ -128,7 +132,7 @@ class TestArtifactCache(unittest.TestCase):
path = cache.fetch('http://server/bar')
expected.append(os.path.basename(path))
expected = [p for p in expected if 'fuga' not in p]
self.assertEqual(sorted(os.listdir(self.tmpdir)), sorted(expected))
self.assertEqual(sorted(self.listtmpdir()), sorted(expected))
# Downloading one file larger than the cache size should still leave
# MIN_CACHED_ARTIFACTS files.
@ -138,7 +142,7 @@ class TestArtifactCache(unittest.TestCase):
path = cache.fetch('http://server/larger')
expected.append(os.path.basename(path))
expected = expected[-2:]
self.assertEqual(sorted(os.listdir(self.tmpdir)), sorted(expected))
self.assertEqual(sorted(self.listtmpdir()), sorted(expected))
if __name__ == '__main__':