Bug 1660615 - ride along - reformat moztreedocs with black r=firefox-source-docs-reviewers,championshuttler

Differential Revision: https://phabricator.services.mozilla.com/D87950
This commit is contained in:
Sylvestre Ledru 2020-08-23 08:49:52 +00:00
parent 09e50d2ff3
commit dcc2514c21
5 changed files with 230 additions and 158 deletions

View File

@ -12,6 +12,7 @@ black:
- taskcluster/test
- testing/condprofile/condprof
- tools/crashreporter/system-symbols
- tools/moztreedocs/
- tools/lint/
- tools/tryselect/selectors/scriptworker.py
exclude:

View File

@ -23,7 +23,7 @@ import sphinx.ext.apidoc
here = os.path.abspath(os.path.dirname(__file__))
build = MozbuildObject.from_environment(cwd=here)
MAIN_DOC_PATH = os.path.normpath(os.path.join(build.topsrcdir, 'docs'))
MAIN_DOC_PATH = os.path.normpath(os.path.join(build.topsrcdir, "docs"))
logger = sphinx.util.logging.getLogger(__name__)
@ -45,29 +45,31 @@ def read_build_config(docdir):
class fakeconfig(object):
topsrcdir = build.topsrcdir
variables = ('SPHINX_TREES', 'SPHINX_PYTHON_PACKAGE_DIRS')
variables = ("SPHINX_TREES", "SPHINX_PYTHON_PACKAGE_DIRS")
reader = BuildReader(fakeconfig())
result = reader.find_variables_from_ast(variables, path=relevant_mozbuild_path)
for path, name, key, value in result:
reldir = os.path.dirname(path)
if name == 'SPHINX_TREES':
if name == "SPHINX_TREES":
# If we're building a subtree, only process that specific subtree.
absdir = os.path.join(build.topsrcdir, reldir, value)
if not is_main and absdir not in (docdir, MAIN_DOC_PATH):
continue
assert key
if key.startswith('/'):
if key.startswith("/"):
key = key[1:]
else:
key = os.path.normpath(os.path.join(reldir, key))
if key in trees:
raise Exception('%s has already been registered as a destination.' % key)
raise Exception(
"%s has already been registered as a destination." % key
)
trees[key] = os.path.join(reldir, value)
if name == 'SPHINX_PYTHON_PACKAGE_DIRS':
if name == "SPHINX_PYTHON_PACKAGE_DIRS":
python_package_dirs.add(os.path.join(reldir, value))
return trees, python_package_dirs
@ -78,8 +80,8 @@ class _SphinxManager(object):
def __init__(self, topsrcdir, main_path):
self.topsrcdir = topsrcdir
self.conf_py_path = os.path.join(main_path, 'conf.py')
self.index_path = os.path.join(main_path, 'index.rst')
self.conf_py_path = os.path.join(main_path, "conf.py")
self.index_path = os.path.join(main_path, "index.rst")
# Instance variables that get set in self.generate_docs()
self.staging_dir = None
@ -88,27 +90,27 @@ class _SphinxManager(object):
def generate_docs(self, app):
"""Generate/stage documentation."""
self.staging_dir = os.path.join(app.outdir, '_staging')
self.staging_dir = os.path.join(app.outdir, "_staging")
logger.info('Reading Sphinx metadata from build configuration')
logger.info("Reading Sphinx metadata from build configuration")
self.trees, self.python_package_dirs = read_build_config(app.srcdir)
logger.info('Staging static documentation')
logger.info("Staging static documentation")
self._synchronize_docs(app)
logger.info('Generating Python API documentation')
logger.info("Generating Python API documentation")
self._generate_python_api_docs()
def _generate_python_api_docs(self):
"""Generate Python API doc files."""
out_dir = os.path.join(self.staging_dir, 'python')
base_args = ['--no-toc', '-o', out_dir]
out_dir = os.path.join(self.staging_dir, "python")
base_args = ["--no-toc", "-o", out_dir]
for p in sorted(self.python_package_dirs):
full = os.path.join(self.topsrcdir, p)
finder = FileFinder(full)
dirs = {os.path.dirname(f[0]) for f in finder.find('**')}
dirs = {os.path.dirname(f[0]) for f in finder.find("**")}
test_dirs = {"test", "tests"}
excludes = {d for d in dirs if set(PurePath(d).parts) & test_dirs}
@ -134,7 +136,7 @@ class _SphinxManager(object):
# Use the frontmatter title
fh.write(post["title"] + "\n")
# Add the md syntax for the title
fh.write('=' * len(post["title"]) + "\n")
fh.write("=" * len(post["title"]) + "\n")
# If there is a summary, add it
if "summary" in post:
fh.write(post["summary"] + "\n")
@ -151,20 +153,22 @@ class _SphinxManager(object):
def _synchronize_docs(self, app):
m = InstallManifest()
with open(os.path.join(MAIN_DOC_PATH, 'config.yml'), 'r') as fh:
tree_config = yaml.safe_load(fh)['categories']
with open(os.path.join(MAIN_DOC_PATH, "config.yml"), "r") as fh:
tree_config = yaml.safe_load(fh)["categories"]
m.add_link(self.conf_py_path, 'conf.py')
m.add_link(self.conf_py_path, "conf.py")
for dest, source in sorted(self.trees.items()):
source_dir = os.path.join(self.topsrcdir, source)
for root, _, files in os.walk(source_dir):
for f in files:
source_path = os.path.normpath(os.path.join(root, f))
rel_source = source_path[len(source_dir) + 1:]
rel_source = source_path[len(source_dir) + 1 :]
target = os.path.normpath(os.path.join(dest, rel_source))
if source_path.endswith(".md"):
self._process_markdown(m, source_path, os.path.join(".", target))
self._process_markdown(
m, source_path, os.path.join(".", target)
)
else:
m.add_link(source_path, target)
@ -172,7 +176,7 @@ class _SphinxManager(object):
m.populate_registry(copier)
copier.copy(self.staging_dir, remove_empty_directories=False)
with open(self.index_path, 'r') as fh:
with open(self.index_path, "r") as fh:
data = fh.read()
def is_toplevel(key):
@ -187,8 +191,8 @@ class _SphinxManager(object):
return True
def format_paths(paths):
source_doc = ['%s/index' % p for p in paths]
return '\n '.join(source_doc)
source_doc = ["%s/index" % p for p in paths]
return "\n ".join(source_doc)
toplevel_trees = {k: v for k, v in self.trees.items() if is_toplevel(k)}
@ -201,21 +205,27 @@ class _SphinxManager(object):
# tree (Bug 1557020). The page is no longer referenced within the index
# tree, thus we shall check categorisation only if complete tree is being rebuilt.
if app.srcdir == self.topsrcdir:
indexes = set([os.path.normpath(os.path.join(p, 'index'))
for p in toplevel_trees.keys()])
indexes = set(
[
os.path.normpath(os.path.join(p, "index"))
for p in toplevel_trees.keys()
]
)
# Format categories like indexes
cats = '\n'.join(CATEGORIES.values()).split("\n")
cats = "\n".join(CATEGORIES.values()).split("\n")
# Remove heading spaces
cats = [os.path.normpath(x.strip()) for x in cats]
indexes = tuple(set(indexes) - set(cats))
if indexes:
# In case a new doc isn't categorized
print(indexes)
raise Exception("Uncategorized documentation. Please add it in docs/config.yml")
raise Exception(
"Uncategorized documentation. Please add it in docs/config.yml"
)
data = data.format(**CATEGORIES)
with open(os.path.join(self.staging_dir, 'index.rst'), 'w') as fh:
with open(os.path.join(self.staging_dir, "index.rst"), "w") as fh:
fh.write(data)

View File

@ -27,7 +27,7 @@ from mach.decorators import (
here = os.path.abspath(os.path.dirname(__file__))
topsrcdir = os.path.abspath(os.path.dirname(os.path.dirname(here)))
DOC_ROOT = os.path.join(topsrcdir, 'docs')
DOC_ROOT = os.path.join(topsrcdir, "docs")
JSDOC_NOT_FOUND = """\
JSDoc==3.5.5 is required to build the docs but was not found on your system.
Please install it globally by running:
@ -50,45 +50,92 @@ class Documentation(MachCommandBase):
self._project = None
self._version = None
@Command('doc', category='devenv', virtualenv_name="docs",
description='Generate and serve documentation from the tree.')
@CommandArgument('path', default=None, metavar='DIRECTORY', nargs='?',
help='Path to documentation to build and display.')
@CommandArgument('--format', default='html', dest='fmt',
help='Documentation format to write.')
@CommandArgument('--outdir', default=None, metavar='DESTINATION',
help='Where to write output.')
@CommandArgument('--archive', action='store_true',
help='Write a gzipped tarball of generated docs.')
@CommandArgument('--no-open', dest='auto_open', default=True,
action='store_false',
help="Don't automatically open HTML docs in a browser.")
@CommandArgument('--no-serve', dest='serve', default=True, action='store_false',
help="Don't serve the generated docs after building.")
@CommandArgument('--http', default='localhost:5500', metavar='ADDRESS',
help='Serve documentation on the specified host and port, '
'default "localhost:5500".')
@CommandArgument('--upload', action='store_true',
help='Upload generated files to S3.')
@CommandArgument('-j', '--jobs', default=str(multiprocessing.cpu_count()), dest='jobs',
help='Distribute the build over N processes in parallel.')
@CommandArgument('--write-url', default=None,
help='Write S3 Upload URL to text file')
def build_docs(self, path=None, fmt='html', outdir=None, auto_open=True,
serve=True, http=None, archive=False, upload=False, jobs=None,
write_url=None):
@Command(
"doc",
category="devenv",
virtualenv_name="docs",
description="Generate and serve documentation from the tree.",
)
@CommandArgument(
"path",
default=None,
metavar="DIRECTORY",
nargs="?",
help="Path to documentation to build and display.",
)
@CommandArgument(
"--format", default="html", dest="fmt", help="Documentation format to write."
)
@CommandArgument(
"--outdir", default=None, metavar="DESTINATION", help="Where to write output."
)
@CommandArgument(
"--archive",
action="store_true",
help="Write a gzipped tarball of generated docs.",
)
@CommandArgument(
"--no-open",
dest="auto_open",
default=True,
action="store_false",
help="Don't automatically open HTML docs in a browser.",
)
@CommandArgument(
"--no-serve",
dest="serve",
default=True,
action="store_false",
help="Don't serve the generated docs after building.",
)
@CommandArgument(
"--http",
default="localhost:5500",
metavar="ADDRESS",
help="Serve documentation on the specified host and port, "
'default "localhost:5500".',
)
@CommandArgument(
"--upload", action="store_true", help="Upload generated files to S3."
)
@CommandArgument(
"-j",
"--jobs",
default=str(multiprocessing.cpu_count()),
dest="jobs",
help="Distribute the build over N processes in parallel.",
)
@CommandArgument(
"--write-url", default=None, help="Write S3 Upload URL to text file"
)
def build_docs(
self,
path=None,
fmt="html",
outdir=None,
auto_open=True,
serve=True,
http=None,
archive=False,
upload=False,
jobs=None,
write_url=None,
):
if self.check_jsdoc():
return die(JSDOC_NOT_FOUND)
self.activate_virtualenv()
self.virtualenv_manager.install_pip_requirements(os.path.join(here, 'requirements.txt'))
self.virtualenv_manager.install_pip_requirements(
os.path.join(here, "requirements.txt")
)
import webbrowser
from livereload import Server
from moztreedocs.package import create_tarball
unique_id = str(uuid.uuid1())
outdir = outdir or os.path.join(self.topobjdir, 'docs')
outdir = outdir or os.path.join(self.topobjdir, "docs")
savedir = os.path.join(outdir, fmt)
path = path or self.topsrcdir
@ -97,39 +144,45 @@ class Documentation(MachCommandBase):
docdir = self._find_doc_dir(path)
if not docdir:
print(self._dump_sphinx_backtrace())
return die('failed to generate documentation:\n'
'%s: could not find docs at this location' % path)
return die(
"failed to generate documentation:\n"
"%s: could not find docs at this location" % path
)
result = self._run_sphinx(docdir, savedir, fmt=fmt, jobs=jobs)
if result != 0:
print(self._dump_sphinx_backtrace())
return die('failed to generate documentation:\n'
'%s: sphinx return code %d' % (path, result))
return die(
"failed to generate documentation:\n"
"%s: sphinx return code %d" % (path, result)
)
else:
print('\nGenerated documentation:\n%s' % savedir)
print("\nGenerated documentation:\n%s" % savedir)
print('Post processing HTML files')
print("Post processing HTML files")
self._post_process_html(savedir)
# Upload the artifact containing the link to S3
# This would be used by code-review to post the link to Phabricator
if write_url is not None:
base_link = "http://gecko-docs.mozilla.org-l1.s3-website.us-west-2.amazonaws.com/"
base_link = (
"http://gecko-docs.mozilla.org-l1.s3-website.us-west-2.amazonaws.com/"
)
unique_link = base_link + unique_id + "/index.html"
with open(write_url, 'w') as fp:
with open(write_url, "w") as fp:
fp.write(unique_link)
fp.flush()
if archive:
archive_path = os.path.join(outdir, '%s.tar.gz' % self.project)
archive_path = os.path.join(outdir, "%s.tar.gz" % self.project)
create_tarball(archive_path, savedir)
print('Archived to %s' % archive_path)
print("Archived to %s" % archive_path)
if upload:
self._s3_upload(savedir, self.project, unique_id, self.version)
if not serve:
index_path = os.path.join(savedir, 'index.html')
index_path = os.path.join(savedir, "index.html")
if auto_open and os.path.isfile(index_path):
webbrowser.open(index_path)
return
@ -137,10 +190,10 @@ class Documentation(MachCommandBase):
# Create livereload server. Any files modified in the specified docdir
# will cause a re-build and refresh of the browser (if open).
try:
host, port = http.split(':', 1)
host, port = http.split(":", 1)
port = int(port)
except ValueError:
return die('invalid address: %s' % http)
return die("invalid address: %s" % http)
server = Server()
@ -148,8 +201,12 @@ class Documentation(MachCommandBase):
for _, src in sphinx_trees.items():
run_sphinx = partial(self._run_sphinx, src, savedir, fmt=fmt, jobs=jobs)
server.watch(src, run_sphinx)
server.serve(host=host, port=port, root=savedir,
open_url_delay=0.1 if auto_open else None)
server.serve(
host=host,
port=port,
root=savedir,
open_url_delay=0.1 if auto_open else None,
)
def _dump_sphinx_backtrace(self):
"""
@ -170,22 +227,26 @@ class Documentation(MachCommandBase):
pathFile = os.path.join(tmpdir, name)
stat = os.stat(pathFile)
output += "Name: {0} / Creation date: {1}\n".format(
pathFile, time.ctime(stat.st_mtime))
pathFile, time.ctime(stat.st_mtime)
)
with open(pathFile) as f:
output += f.read()
return output
def _run_sphinx(self, docdir, savedir, config=None, fmt='html', jobs=None):
def _run_sphinx(self, docdir, savedir, config=None, fmt="html", jobs=None):
import sphinx.cmd.build
config = config or self.manager.conf_py_path
args = [
'-b', fmt,
'-c', os.path.dirname(config),
"-b",
fmt,
"-c",
os.path.dirname(config),
docdir,
savedir,
]
if jobs:
args.extend(['-j', jobs])
args.extend(["-j", jobs])
return sphinx.cmd.build.build_main(args)
def _post_process_html(self, savedir):
@ -198,7 +259,7 @@ class Documentation(MachCommandBase):
if file.endswith(".html"):
p = os.path.join(root, file)
with open(p, 'r') as file:
with open(p, "r") as file:
filedata = file.read()
# Workaround https://bugzilla.mozilla.org/show_bug.cgi?id=1607143
@ -207,35 +268,40 @@ class Documentation(MachCommandBase):
# https://github.com/mgaitan/sphinxcontrib-mermaid/pull/37 is merged
# As sphinx-mermaid currently uses an old version, also force
# a more recent version
filedata = re.sub(r'https://unpkg.com/mermaid@.*/dist',
r'https://cdnjs.cloudflare.com/ajax/libs/mermaid/{}'
.format(MERMAID_VERSION), filedata)
filedata = re.sub(
r"https://unpkg.com/mermaid@.*/dist",
r"https://cdnjs.cloudflare.com/ajax/libs/mermaid/{}".format(
MERMAID_VERSION
),
filedata,
)
with open(p, 'w') as file:
with open(p, "w") as file:
file.write(filedata)
@property
def manager(self):
if not self._manager:
from moztreedocs import manager
self._manager = manager
return self._manager
def _read_project_properties(self):
import imp
path = os.path.normpath(self.manager.conf_py_path)
with open(path, 'r') as fh:
conf = imp.load_module('doc_conf', fh, path,
('.py', 'r', imp.PY_SOURCE))
with open(path, "r") as fh:
conf = imp.load_module("doc_conf", fh, path, (".py", "r", imp.PY_SOURCE))
# Prefer the Mozilla project name, falling back to Sphinx's
# default variable if it isn't defined.
project = getattr(conf, 'moz_project_name', None)
project = getattr(conf, "moz_project_name", None)
if not project:
project = conf.project.replace(' ', '_')
project = conf.project.replace(" ", "_")
self._project = project
self._version = getattr(conf, 'version', None)
self._version = getattr(conf, "version", None)
@property
def project(self):
@ -253,7 +319,7 @@ class Documentation(MachCommandBase):
if os.path.isfile(path):
return
valid_doc_dirs = ('doc', 'docs')
valid_doc_dirs = ("doc", "docs")
if os.path.basename(path) in valid_doc_dirs:
return path
@ -277,17 +343,17 @@ class Documentation(MachCommandBase):
files = list(distribution_files(root))
key_prefixes = []
if version:
key_prefixes.append('%s/%s' % (project, version))
key_prefixes.append("%s/%s" % (project, version))
# Until we redirect / to main/latest, upload the main docs
# to the root.
if project == 'main':
key_prefixes.append('')
if project == "main":
key_prefixes.append("")
key_prefixes.append(unique_id)
with open(os.path.join(DOC_ROOT, 'config.yml'), 'r') as fh:
redirects = yaml.safe_load(fh)['redirects']
with open(os.path.join(DOC_ROOT, "config.yml"), "r") as fh:
redirects = yaml.safe_load(fh)["redirects"]
redirects = {k.strip("/"): v.strip("/") for k, v in redirects.items()}
@ -303,7 +369,7 @@ class Documentation(MachCommandBase):
continue
if prefix:
prefix += '/'
prefix += "/"
all_redirects.update({prefix + k: prefix + v for k, v in redirects.items()})
print("Redirects currently staged")
@ -314,19 +380,20 @@ class Documentation(MachCommandBase):
def check_jsdoc(self):
try:
from mozfile import which
exe_name = which('jsdoc')
exe_name = which("jsdoc")
if not exe_name:
return 1
out = subprocess.check_output([exe_name, '--version'])
out = subprocess.check_output([exe_name, "--version"])
version = out.split()[1]
except subprocess.CalledProcessError:
version = None
if not version or not version.startswith(b'3.5'):
if not version or not version.startswith(b"3.5"):
return 1
def die(msg, exit_code=1):
msg = '%s: %s' % (sys.argv[0], msg)
msg = "%s: %s" % (sys.argv[0], msg)
print(msg, file=sys.stderr)
return exit_code

View File

@ -17,14 +17,15 @@ def distribution_files(root):
of (path, BaseFile) for files that should be archived, uploaded, etc.
Paths are relative to given root directory.
"""
finder = FileFinder(root, ignore=('_staging', '_venv'))
return finder.find('**')
finder = FileFinder(root, ignore=("_staging", "_venv"))
return finder.find("**")
def create_tarball(filename, root):
"""Create a tar.gz archive of docs in a directory."""
files = dict(distribution_files(root))
with open(filename, 'wb') as fh:
create_tar_gz_from_files(fh, files, filename=os.path.basename(filename),
compresslevel=6)
with open(filename, "wb") as fh:
create_tar_gz_from_files(
fh, files, filename=os.path.basename(filename), compresslevel=6
)

View File

@ -21,54 +21,53 @@ from mozbuild.util import memoize
@memoize
def create_aws_session():
'''
"""
This function creates an aws session that is
shared between upload and delete both.
'''
region = 'us-west-2'
level = os.environ.get('MOZ_SCM_LEVEL', '1')
"""
region = "us-west-2"
level = os.environ.get("MOZ_SCM_LEVEL", "1")
bucket = {
'1': 'gecko-docs.mozilla.org-l1',
'2': 'gecko-docs.mozilla.org-l2',
'3': 'gecko-docs.mozilla.org',
"1": "gecko-docs.mozilla.org-l1",
"2": "gecko-docs.mozilla.org-l2",
"3": "gecko-docs.mozilla.org",
}[level]
secrets_url = 'http://taskcluster/secrets/v1/secret/'
secrets_url += 'project/releng/gecko/build/level-{}/gecko-docs-upload'.format(
level)
secrets_url = "http://taskcluster/secrets/v1/secret/"
secrets_url += "project/releng/gecko/build/level-{}/gecko-docs-upload".format(level)
# Get the credentials from the TC secrets service. Note that these
# differ per SCM level
if 'TASK_ID' in os.environ:
if "TASK_ID" in os.environ:
print("Using AWS credentials from the secrets service")
session = requests.Session()
res = session.get(secrets_url)
res.raise_for_status()
secret = res.json()['secret']
secret = res.json()["secret"]
session = boto3.session.Session(
aws_access_key_id=secret['AWS_ACCESS_KEY_ID'],
aws_secret_access_key=secret['AWS_SECRET_ACCESS_KEY'],
region_name=region)
aws_access_key_id=secret["AWS_ACCESS_KEY_ID"],
aws_secret_access_key=secret["AWS_SECRET_ACCESS_KEY"],
region_name=region,
)
else:
print("Trying to use your AWS credentials..")
session = boto3.session.Session(region_name=region)
s3 = session.client('s3',
config=botocore.client.Config(max_pool_connections=20))
s3 = session.client("s3", config=botocore.client.Config(max_pool_connections=20))
return s3, bucket
@memoize
def get_s3_keys(s3, bucket):
kwargs = {'Bucket': bucket}
kwargs = {"Bucket": bucket}
all_keys = []
while True:
response = s3.list_objects_v2(**kwargs)
for obj in response['Contents']:
all_keys.append(obj['Key'])
for obj in response["Contents"]:
all_keys.append(obj["Key"])
try:
kwargs['ContinuationToken'] = response['NextContinuationToken']
kwargs["ContinuationToken"] = response["NextContinuationToken"]
except KeyError:
break
@ -79,28 +78,20 @@ def s3_set_redirects(redirects):
s3, bucket = create_aws_session()
configuration = {
'IndexDocument': {"Suffix": "index.html"},
'RoutingRules': []
}
configuration = {"IndexDocument": {"Suffix": "index.html"}, "RoutingRules": []}
for path, redirect in redirects.items():
rule = {
'Condition': {
"KeyPrefixEquals": path
},
'Redirect': {
"ReplaceKeyPrefixWith": redirect
},
"Condition": {"KeyPrefixEquals": path},
"Redirect": {"ReplaceKeyPrefixWith": redirect},
}
if os.environ.get('MOZ_SCM_LEVEL') == '3':
rule['Redirect']['HostName'] = 'firefox-source-docs.mozilla.org'
if os.environ.get("MOZ_SCM_LEVEL") == "3":
rule["Redirect"]["HostName"] = "firefox-source-docs.mozilla.org"
configuration['RoutingRules'].append(rule)
configuration["RoutingRules"].append(rule)
s3.put_bucket_website(
Bucket=bucket,
WebsiteConfiguration=configuration,
Bucket=bucket, WebsiteConfiguration=configuration,
)
@ -109,20 +100,21 @@ def s3_delete_missing(files, key_prefix=None):
s3, bucket = create_aws_session()
files_on_server = get_s3_keys(s3, bucket)
if key_prefix:
files_on_server = [path for path in files_on_server if path.startswith(key_prefix)]
files_on_server = [
path for path in files_on_server if path.startswith(key_prefix)
]
else:
files_on_server = [path for path in files_on_server if not path.startswith("main/")]
files = [key_prefix + '/' + path if key_prefix else path for path, f in files]
files_on_server = [
path for path in files_on_server if not path.startswith("main/")
]
files = [key_prefix + "/" + path if key_prefix else path for path, f in files]
files_to_delete = [path for path in files_on_server if path not in files]
query_size = 1000
while files_to_delete:
keys_to_remove = [{'Key': key} for key in files_to_delete[:query_size]]
keys_to_remove = [{"Key": key} for key in files_to_delete[:query_size]]
response = s3.delete_objects(
Bucket=bucket,
Delete={
'Objects': keys_to_remove,
},
Bucket=bucket, Delete={"Objects": keys_to_remove,}, # NOQA
)
pprint(response, indent=2)
files_to_delete = files_to_delete[query_size:]
@ -141,7 +133,7 @@ def s3_upload(files, key_prefix=None):
def upload(f, path, bucket, key, extra_args):
# Need to flush to avoid buffering/interleaving from multiple threads.
sys.stdout.write('uploading %s to %s\n' % (path, key))
sys.stdout.write("uploading %s to %s\n" % (path, key))
sys.stdout.flush()
"""
When running on try, we need to set an
@ -150,10 +142,10 @@ def s3_upload(files, key_prefix=None):
for m-c, we do not want to set any expiration
"""
if os.environ.get('MOZ_SCM_LEVEL') == '1':
if os.environ.get("MOZ_SCM_LEVEL") == "1":
now = datetime.datetime.now()
expires = now + datetime.timedelta(days=7)
extra_args['Expires'] = expires
extra_args["Expires"] = expires
s3.upload_fileobj(f, bucket, key, ExtraArgs=extra_args)
@ -163,19 +155,20 @@ def s3_upload(files, key_prefix=None):
content_type, content_encoding = mimetypes.guess_type(path)
extra_args = {}
if content_type:
extra_args['ContentType'] = content_type
extra_args["ContentType"] = content_type
if content_encoding:
extra_args['ContentEncoding'] = content_encoding
extra_args["ContentEncoding"] = content_encoding
if key_prefix:
key = '%s/%s' % (key_prefix, path)
key = "%s/%s" % (key_prefix, path)
else:
key = path
# The file types returned by mozpack behave like file objects. But
# they don't accept an argument to read(). So we wrap in a BytesIO.
fs.append(e.submit(upload, io.BytesIO(f.read()), path, bucket, key,
extra_args))
fs.append(
e.submit(upload, io.BytesIO(f.read()), path, bucket, key, extra_args)
)
s3_delete_missing(files, key_prefix)
# Need to do this to catch any exceptions.