mirror of
https://github.com/mozilla/gecko-dev.git
synced 2025-02-17 14:25:49 +00:00
Bug 1696251 - Replace self with command_context where possible in existing mach commands. r=mhentges,webdriver-reviewers,perftest-reviewers,whimboo
This step removes all the dependencies of mach commands to having a MachCommandBase as the `self` by using the `command_context` argument instead. This also removes any remaining statefulness from those classes that implement mach commands, ultimately making it easier to move existing commands out of classes in a follow-up. Differential Revision: https://phabricator.services.mozilla.com/D118058
This commit is contained in:
parent
53dcf83296
commit
190e03aaab
@ -59,7 +59,7 @@ class MachCommands(MachCommandBase):
|
||||
from six import string_types
|
||||
from valgrind.output_handler import OutputHandler
|
||||
|
||||
build_dir = os.path.join(self.topsrcdir, "build")
|
||||
build_dir = os.path.join(command_context.topsrcdir, "build")
|
||||
|
||||
# XXX: currently we just use the PGO inputs for Valgrind runs. This may
|
||||
# change in the future.
|
||||
@ -68,7 +68,9 @@ class MachCommands(MachCommandBase):
|
||||
|
||||
with TemporaryDirectory() as profilePath:
|
||||
# TODO: refactor this into mozprofile
|
||||
profile_data_dir = os.path.join(self.topsrcdir, "testing", "profiles")
|
||||
profile_data_dir = os.path.join(
|
||||
command_context.topsrcdir, "testing", "profiles"
|
||||
)
|
||||
with open(os.path.join(profile_data_dir, "profiles.json"), "r") as fh:
|
||||
base_profiles = json.load(fh)["valgrind"]
|
||||
|
||||
@ -89,7 +91,7 @@ class MachCommands(MachCommandBase):
|
||||
prefs[k] = Preferences.cast(v)
|
||||
|
||||
quitter = os.path.join(
|
||||
self.topsrcdir, "tools", "quitter", "quitter@mozilla.org.xpi"
|
||||
command_context.topsrcdir, "tools", "quitter", "quitter@mozilla.org.xpi"
|
||||
)
|
||||
|
||||
locations = ServerLocations()
|
||||
@ -113,7 +115,7 @@ class MachCommands(MachCommandBase):
|
||||
env["MOZ_DISABLE_NONLOCAL_CONNECTIONS"] = "1"
|
||||
env["XPCOM_DEBUG_BREAK"] = "warn"
|
||||
|
||||
outputHandler = OutputHandler(self.log)
|
||||
outputHandler = OutputHandler(command_context.log)
|
||||
kp_kwargs = {
|
||||
"processOutputLine": [outputHandler],
|
||||
"universal_newlines": True,
|
||||
@ -171,7 +173,7 @@ class MachCommands(MachCommandBase):
|
||||
try:
|
||||
runner = FirefoxRunner(
|
||||
profile=profile,
|
||||
binary=self.get_binary_path(),
|
||||
binary=command_context.get_binary_path(),
|
||||
cmdargs=firefox_args,
|
||||
env=env,
|
||||
process_args=kp_kwargs,
|
||||
@ -185,7 +187,7 @@ class MachCommands(MachCommandBase):
|
||||
supps = outputHandler.suppression_count
|
||||
if errs != supps:
|
||||
status = 1 # turns the TBPL job orange
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"valgrind-fail-parsing",
|
||||
{"errs": errs, "supps": supps},
|
||||
@ -195,7 +197,7 @@ class MachCommands(MachCommandBase):
|
||||
|
||||
elif errs == 0:
|
||||
status = 0
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"valgrind-pass",
|
||||
{},
|
||||
@ -207,13 +209,13 @@ class MachCommands(MachCommandBase):
|
||||
|
||||
if binary_not_found_exception:
|
||||
status = 2 # turns the TBPL job red
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"valgrind-fail-errors",
|
||||
{"error": str(binary_not_found_exception)},
|
||||
"TEST-UNEXPECTED-FAIL | valgrind-test | {error}",
|
||||
)
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"valgrind-fail-errors",
|
||||
{"help": binary_not_found_exception.help()},
|
||||
@ -221,7 +223,7 @@ class MachCommands(MachCommandBase):
|
||||
)
|
||||
elif exitcode is None:
|
||||
status = 2 # turns the TBPL job red
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"valgrind-fail-timeout",
|
||||
{"timeout": timeout},
|
||||
@ -230,7 +232,7 @@ class MachCommands(MachCommandBase):
|
||||
)
|
||||
elif exitcode != 0:
|
||||
status = 2 # turns the TBPL job red
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"valgrind-fail-errors",
|
||||
{"exitcode": exitcode},
|
||||
|
@ -49,35 +49,39 @@ class MachCommands(MachCommandBase):
|
||||
"""Generate the static css properties database for devtools and write it to file."""
|
||||
|
||||
print("Re-generating the css properties database...")
|
||||
db = self.get_properties_db_from_xpcshell()
|
||||
db = self.get_properties_db_from_xpcshell(command_context)
|
||||
if not db:
|
||||
return 1
|
||||
|
||||
self.output_template(
|
||||
command_context,
|
||||
{
|
||||
"preferences": stringify(db["preferences"]),
|
||||
"cssProperties": stringify(db["cssProperties"]),
|
||||
"pseudoElements": stringify(db["pseudoElements"]),
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def get_properties_db_from_xpcshell(self):
|
||||
def get_properties_db_from_xpcshell(self, command_context):
|
||||
"""Generate the static css properties db for devtools from an xpcshell script."""
|
||||
build = MozbuildObject.from_environment()
|
||||
|
||||
# Get the paths
|
||||
script_path = resolve_path(
|
||||
self.topsrcdir, "devtools/shared/css/generated/generate-properties-db.js"
|
||||
command_context.topsrcdir,
|
||||
"devtools/shared/css/generated/generate-properties-db.js",
|
||||
)
|
||||
gre_path = resolve_path(self.topobjdir, "dist/bin")
|
||||
browser_path = resolve_path(self.topobjdir, "dist/bin/browser")
|
||||
gre_path = resolve_path(command_context.topobjdir, "dist/bin")
|
||||
browser_path = resolve_path(command_context.topobjdir, "dist/bin/browser")
|
||||
try:
|
||||
xpcshell_path = build.get_binary_path(what="xpcshell")
|
||||
except BinaryNotFoundException as e:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR, "devtools-css-db", {"error": str(e)}, "ERROR: {error}"
|
||||
)
|
||||
self.log(logging.INFO, "devtools-css-db", {"help": e.help()}, "{help}")
|
||||
command_context.log(
|
||||
logging.INFO, "devtools-css-db", {"help": e.help()}, "{help}"
|
||||
)
|
||||
return None
|
||||
|
||||
print(browser_path)
|
||||
@ -98,13 +102,14 @@ class MachCommands(MachCommandBase):
|
||||
|
||||
return json.loads(contents)
|
||||
|
||||
def output_template(self, substitutions):
|
||||
def output_template(self, command_context, substitutions):
|
||||
"""Output a the properties-db.js from a template."""
|
||||
js_template_path = resolve_path(
|
||||
self.topsrcdir, "devtools/shared/css/generated/properties-db.js.in"
|
||||
command_context.topsrcdir,
|
||||
"devtools/shared/css/generated/properties-db.js.in",
|
||||
)
|
||||
destination_path = resolve_path(
|
||||
self.topsrcdir, "devtools/shared/css/generated/properties-db.js"
|
||||
command_context.topsrcdir, "devtools/shared/css/generated/properties-db.js"
|
||||
)
|
||||
|
||||
with open(js_template_path, "rb") as handle:
|
||||
|
@ -36,7 +36,7 @@ class WebIDLProvider(MachCommandBase):
|
||||
def webidl_example(self, command_context, interface):
|
||||
from mozwebidlcodegen import BuildSystemWebIDL
|
||||
|
||||
manager = self._spawn(BuildSystemWebIDL).manager
|
||||
manager = command_context._spawn(BuildSystemWebIDL).manager
|
||||
for i in interface:
|
||||
manager.generate_example_files(i)
|
||||
|
||||
@ -47,15 +47,17 @@ class WebIDLProvider(MachCommandBase):
|
||||
description="Run WebIDL tests (Interface Browser parser).",
|
||||
)
|
||||
def webidl_test(self, command_context, **kwargs):
|
||||
sys.path.insert(0, os.path.join(self.topsrcdir, "other-licenses", "ply"))
|
||||
sys.path.insert(
|
||||
0, os.path.join(command_context.topsrcdir, "other-licenses", "ply")
|
||||
)
|
||||
|
||||
# Ensure the topobjdir exists. On a Taskcluster test run there won't be
|
||||
# an objdir yet.
|
||||
mkdir(self.topobjdir)
|
||||
mkdir(command_context.topobjdir)
|
||||
|
||||
# Make sure we drop our cached grammar bits in the objdir, not
|
||||
# wherever we happen to be running from.
|
||||
os.chdir(self.topobjdir)
|
||||
os.chdir(command_context.topobjdir)
|
||||
|
||||
if kwargs["verbose"] is None:
|
||||
kwargs["verbose"] = False
|
||||
@ -64,7 +66,7 @@ class WebIDLProvider(MachCommandBase):
|
||||
# objdir. But we're going to try loading it as a python
|
||||
# module, so we need to make sure the objdir is in our search
|
||||
# path.
|
||||
sys.path.insert(0, self.topobjdir)
|
||||
sys.path.insert(0, command_context.topobjdir)
|
||||
|
||||
import runtests
|
||||
|
||||
|
@ -79,13 +79,13 @@ class MachCommands(MachCommandBase):
|
||||
def gcc_dir(self):
|
||||
return os.path.join(self.tools_dir(), "gcc")
|
||||
|
||||
def script_dir(self):
|
||||
return os.path.join(self.topsrcdir, "js/src/devtools/rootAnalysis")
|
||||
def script_dir(self, command_context):
|
||||
return os.path.join(command_context.topsrcdir, "js/src/devtools/rootAnalysis")
|
||||
|
||||
def work_dir(self, application, given):
|
||||
def work_dir(self, command_context, application, given):
|
||||
if given is not None:
|
||||
return given
|
||||
return os.path.join(self.topsrcdir, "haz-" + application)
|
||||
return os.path.join(command_context.topsrcdir, "haz-" + application)
|
||||
|
||||
def ensure_dir_exists(self, dir):
|
||||
os.makedirs(dir, exist_ok=True)
|
||||
@ -122,8 +122,11 @@ class MachCommands(MachCommandBase):
|
||||
os.chdir(self.ensure_dir_exists(self.tools_dir()))
|
||||
try:
|
||||
kwargs["from_build"] = ("linux64-gcc-sixgill", "linux64-gcc-9")
|
||||
self._mach_context.commands.dispatch(
|
||||
"artifact", self._mach_context, subcommand="toolchain", **kwargs
|
||||
command_context._mach_context.commands.dispatch(
|
||||
"artifact",
|
||||
command_context._mach_context,
|
||||
subcommand="toolchain",
|
||||
**kwargs
|
||||
)
|
||||
finally:
|
||||
os.chdir(orig_dir)
|
||||
@ -152,8 +155,8 @@ class MachCommands(MachCommandBase):
|
||||
or os.environ.get("MOZCONFIG")
|
||||
or default_mozconfig
|
||||
)
|
||||
mozconfig_path = os.path.join(self.topsrcdir, mozconfig_path)
|
||||
loader = MozconfigLoader(self.topsrcdir)
|
||||
mozconfig_path = os.path.join(command_context.topsrcdir, mozconfig_path)
|
||||
loader = MozconfigLoader(command_context.topsrcdir)
|
||||
mozconfig = loader.read_mozconfig(mozconfig_path)
|
||||
|
||||
# Validate the mozconfig settings in case the user overrode the default.
|
||||
@ -170,20 +173,20 @@ class MachCommands(MachCommandBase):
|
||||
|
||||
# Set a default objdir for the shell, for developer builds.
|
||||
os.environ.setdefault(
|
||||
"MOZ_OBJDIR", os.path.join(self.topsrcdir, "obj-haz-shell")
|
||||
"MOZ_OBJDIR", os.path.join(command_context.topsrcdir, "obj-haz-shell")
|
||||
)
|
||||
|
||||
return self._mach_context.commands.dispatch(
|
||||
"build", self._mach_context, **kwargs
|
||||
return command_context._mach_context.commands.dispatch(
|
||||
"build", command_context._mach_context, **kwargs
|
||||
)
|
||||
|
||||
def read_json_file(self, filename):
|
||||
with open(filename) as fh:
|
||||
return json.load(fh)
|
||||
|
||||
def ensure_shell(self, objdir):
|
||||
def ensure_shell(self, command_context, objdir):
|
||||
if objdir is None:
|
||||
objdir = os.path.join(self.topsrcdir, "obj-haz-shell")
|
||||
objdir = os.path.join(command_context.topsrcdir, "obj-haz-shell")
|
||||
|
||||
try:
|
||||
binaries = self.read_json_file(os.path.join(objdir, "binaries.json"))
|
||||
@ -218,9 +221,11 @@ no shell found in %s -- must build the JS shell with `mach hazards build-shell`
|
||||
if objdir is None:
|
||||
objdir = os.environ.get("HAZ_OBJDIR")
|
||||
if objdir is None:
|
||||
objdir = os.path.join(self.topsrcdir, "obj-analyzed-" + application)
|
||||
objdir = os.path.join(
|
||||
command_context.topsrcdir, "obj-analyzed-" + application
|
||||
)
|
||||
|
||||
work_dir = self.work_dir(application, kwargs["work_dir"])
|
||||
work_dir = self.work_dir(command_context, application, kwargs["work_dir"])
|
||||
self.ensure_dir_exists(work_dir)
|
||||
with open(os.path.join(work_dir, "defaults.py"), "wt") as fh:
|
||||
data = textwrap.dedent(
|
||||
@ -233,9 +238,9 @@ no shell found in %s -- must build the JS shell with `mach hazards build-shell`
|
||||
gcc_bin = "{gcc_dir}/bin"
|
||||
"""
|
||||
).format(
|
||||
script_dir=self.script_dir(),
|
||||
script_dir=self.script_dir(command_context),
|
||||
objdir=objdir,
|
||||
srcdir=self.topsrcdir,
|
||||
srcdir=command_context.topsrcdir,
|
||||
sixgill_dir=self.sixgill_dir(),
|
||||
gcc_dir=self.gcc_dir(),
|
||||
)
|
||||
@ -243,14 +248,14 @@ no shell found in %s -- must build the JS shell with `mach hazards build-shell`
|
||||
|
||||
buildscript = " ".join(
|
||||
[
|
||||
self.topsrcdir + "/mach hazards compile",
|
||||
command_context.topsrcdir + "/mach hazards compile",
|
||||
"--application=" + application,
|
||||
"--haz-objdir=" + objdir,
|
||||
]
|
||||
)
|
||||
args = [
|
||||
sys.executable,
|
||||
os.path.join(self.script_dir(), "analyze.py"),
|
||||
os.path.join(self.script_dir(command_context), "analyze.py"),
|
||||
"dbs",
|
||||
"--upto",
|
||||
"dbs",
|
||||
@ -258,7 +263,7 @@ no shell found in %s -- must build the JS shell with `mach hazards build-shell`
|
||||
"--buildcommand=" + buildscript,
|
||||
]
|
||||
|
||||
return self.run_process(args=args, cwd=work_dir, pass_thru=True)
|
||||
return command_context.run_process(args=args, cwd=work_dir, pass_thru=True)
|
||||
|
||||
@inherit_command_args("build")
|
||||
@SubCommand("hazards", "compile", description=argparse.SUPPRESS)
|
||||
@ -295,13 +300,13 @@ no shell found in %s -- must build the JS shell with `mach hazards build-shell`
|
||||
mozconfig_path = (
|
||||
kwargs.pop("mozconfig", None) or env.get("MOZCONFIG") or default_mozconfig
|
||||
)
|
||||
mozconfig_path = os.path.join(self.topsrcdir, mozconfig_path)
|
||||
mozconfig_path = os.path.join(command_context.topsrcdir, mozconfig_path)
|
||||
|
||||
# Validate the mozconfig.
|
||||
|
||||
# Require an explicit --enable-application=APP (even if you just
|
||||
# want to build the default browser application.)
|
||||
loader = MozconfigLoader(self.topsrcdir)
|
||||
loader = MozconfigLoader(command_context.topsrcdir)
|
||||
mozconfig = loader.read_mozconfig(mozconfig_path)
|
||||
configure_args = mozconfig["configure_args"]
|
||||
if "--enable-application=%s" % app not in configure_args:
|
||||
@ -310,7 +315,7 @@ no shell found in %s -- must build the JS shell with `mach hazards build-shell`
|
||||
raise Exception("mozconfig must wrap compiles")
|
||||
|
||||
# Communicate mozconfig to build subprocesses.
|
||||
env["MOZCONFIG"] = os.path.join(self.topsrcdir, mozconfig_path)
|
||||
env["MOZCONFIG"] = os.path.join(command_context.topsrcdir, mozconfig_path)
|
||||
|
||||
# hazard mozconfigs need to find binaries in .mozbuild
|
||||
env["MOZBUILD_STATE_PATH"] = self.state_dir()
|
||||
@ -323,8 +328,8 @@ no shell found in %s -- must build the JS shell with `mach hazards build-shell`
|
||||
if "haz_objdir" in kwargs:
|
||||
env["MOZ_OBJDIR"] = kwargs.pop("haz_objdir")
|
||||
|
||||
return self._mach_context.commands.dispatch(
|
||||
"build", self._mach_context, **kwargs
|
||||
return command_context._mach_context.commands.dispatch(
|
||||
"build", command_context._mach_context, **kwargs
|
||||
)
|
||||
|
||||
@SubCommand(
|
||||
@ -346,9 +351,9 @@ no shell found in %s -- must build the JS shell with `mach hazards build-shell`
|
||||
def analyze(self, command_context, application, shell_objdir, work_dir):
|
||||
"""Analyzed gathered data for rooting hazards"""
|
||||
|
||||
shell = self.ensure_shell(shell_objdir)
|
||||
shell = self.ensure_shell(command_context, shell_objdir)
|
||||
args = [
|
||||
os.path.join(self.script_dir(), "analyze.py"),
|
||||
os.path.join(self.script_dir(command_context), "analyze.py"),
|
||||
"--js",
|
||||
shell,
|
||||
"gcTypes",
|
||||
@ -358,8 +363,8 @@ no shell found in %s -- must build the JS shell with `mach hazards build-shell`
|
||||
self.setup_env_for_tools(os.environ)
|
||||
os.environ["LD_LIBRARY_PATH"] += ":" + os.path.dirname(shell)
|
||||
|
||||
work_dir = self.work_dir(application, work_dir)
|
||||
return self.run_process(args=args, cwd=work_dir, pass_thru=True)
|
||||
work_dir = self.work_dir(command_context, application, work_dir)
|
||||
return command_context.run_process(args=args, cwd=work_dir, pass_thru=True)
|
||||
|
||||
@SubCommand(
|
||||
"hazards",
|
||||
@ -373,9 +378,9 @@ no shell found in %s -- must build the JS shell with `mach hazards build-shell`
|
||||
)
|
||||
def self_test(self, command_context, shell_objdir):
|
||||
"""Analyzed gathered data for rooting hazards"""
|
||||
shell = self.ensure_shell(shell_objdir)
|
||||
shell = self.ensure_shell(command_context, shell_objdir)
|
||||
args = [
|
||||
os.path.join(self.script_dir(), "run-test.py"),
|
||||
os.path.join(self.script_dir(command_context), "run-test.py"),
|
||||
"-v",
|
||||
"--js",
|
||||
shell,
|
||||
@ -387,4 +392,4 @@ no shell found in %s -- must build the JS shell with `mach hazards build-shell`
|
||||
|
||||
self.setup_env_for_tools(os.environ)
|
||||
os.environ["LD_LIBRARY_PATH"] += ":" + os.path.dirname(shell)
|
||||
return self.run_process(args=args, pass_thru=True)
|
||||
return command_context.run_process(args=args, pass_thru=True)
|
||||
|
@ -238,7 +238,7 @@ class MachCommands(MachCommandBase):
|
||||
)
|
||||
def run_reftest(self, command_context, **kwargs):
|
||||
kwargs["suite"] = "reftest"
|
||||
return self._run_reftest(**kwargs)
|
||||
return self._run_reftest(command_context, **kwargs)
|
||||
|
||||
@Command(
|
||||
"jstestbrowser",
|
||||
@ -247,15 +247,15 @@ class MachCommands(MachCommandBase):
|
||||
parser=get_parser,
|
||||
)
|
||||
def run_jstestbrowser(self, command_context, **kwargs):
|
||||
if "--enable-js-shell" not in self.mozconfig["configure_args"]:
|
||||
if "--enable-js-shell" not in command_context.mozconfig["configure_args"]:
|
||||
raise Exception(
|
||||
"jstestbrowser requires --enable-js-shell be specified in mozconfig."
|
||||
)
|
||||
self._mach_context.commands.dispatch(
|
||||
"build", self._mach_context, what=["stage-jstests"]
|
||||
command_context._mach_context.commands.dispatch(
|
||||
"build", command_context._mach_context, what=["stage-jstests"]
|
||||
)
|
||||
kwargs["suite"] = "jstestbrowser"
|
||||
return self._run_reftest(**kwargs)
|
||||
return self._run_reftest(command_context, **kwargs)
|
||||
|
||||
@Command(
|
||||
"crashtest",
|
||||
@ -265,16 +265,16 @@ class MachCommands(MachCommandBase):
|
||||
)
|
||||
def run_crashtest(self, command_context, **kwargs):
|
||||
kwargs["suite"] = "crashtest"
|
||||
return self._run_reftest(**kwargs)
|
||||
return self._run_reftest(command_context, **kwargs)
|
||||
|
||||
def _run_reftest(self, **kwargs):
|
||||
kwargs["topsrcdir"] = self.topsrcdir
|
||||
def _run_reftest(self, command_context, **kwargs):
|
||||
kwargs["topsrcdir"] = command_context.topsrcdir
|
||||
process_test_objects(kwargs)
|
||||
reftest = self._spawn(ReftestRunner)
|
||||
reftest = command_context._spawn(ReftestRunner)
|
||||
# Unstructured logging must be enabled prior to calling
|
||||
# adb which uses an unstructured logger in its constructor.
|
||||
reftest.log_manager.enable_unstructured()
|
||||
if conditions.is_android(self):
|
||||
if conditions.is_android(command_context):
|
||||
from mozrunner.devices.android_device import (
|
||||
verify_android_device,
|
||||
InstallIntent,
|
||||
@ -292,7 +292,7 @@ class MachCommands(MachCommandBase):
|
||||
):
|
||||
verbose = True
|
||||
verify_android_device(
|
||||
self,
|
||||
command_context,
|
||||
install=install,
|
||||
xre=True,
|
||||
network=True,
|
||||
|
@ -116,6 +116,6 @@ class ReftestCommands(MachCommandBase):
|
||||
parser=setup_argument_parser,
|
||||
)
|
||||
def reftest(self, command_context, **kwargs):
|
||||
self._mach_context.activate_mozharness_venv()
|
||||
command_context._mach_context.activate_mozharness_venv()
|
||||
kwargs["suite"] = "reftest"
|
||||
return run_reftest(self._mach_context, **kwargs)
|
||||
return run_reftest(command_context._mach_context, **kwargs)
|
||||
|
@ -70,7 +70,7 @@ class MachCommands(MachCommandBase):
|
||||
def android_assemble_app(self, command_context, args):
|
||||
ret = self.gradle(
|
||||
command_context,
|
||||
self.substs["GRADLE_ANDROID_APP_TASKS"] + ["-x", "lint"] + args,
|
||||
command_context.substs["GRADLE_ANDROID_APP_TASKS"] + ["-x", "lint"] + args,
|
||||
verbose=True,
|
||||
)
|
||||
|
||||
@ -103,7 +103,7 @@ class MachCommands(MachCommandBase):
|
||||
|
||||
ret = self.gradle(
|
||||
command_context,
|
||||
self.substs["GRADLE_ANDROID_GENERATE_SDK_BINDINGS_TASKS"]
|
||||
command_context.substs["GRADLE_ANDROID_GENERATE_SDK_BINDINGS_TASKS"]
|
||||
+ [bindings_args]
|
||||
+ args,
|
||||
verbose=True,
|
||||
@ -120,7 +120,10 @@ class MachCommands(MachCommandBase):
|
||||
def android_generate_generated_jni_wrappers(self, command_context, args):
|
||||
ret = self.gradle(
|
||||
command_context,
|
||||
self.substs["GRADLE_ANDROID_GENERATE_GENERATED_JNI_WRAPPERS_TASKS"] + args,
|
||||
command_context.substs[
|
||||
"GRADLE_ANDROID_GENERATE_GENERATED_JNI_WRAPPERS_TASKS"
|
||||
]
|
||||
+ args,
|
||||
verbose=True,
|
||||
)
|
||||
|
||||
@ -179,7 +182,9 @@ REMOVED/DEPRECATED: Use 'mach lint --linter android-checkstyle'.""",
|
||||
# can change the outputs for those processes.
|
||||
self.gradle(
|
||||
command_context,
|
||||
self.substs["GRADLE_ANDROID_DEPENDENCIES_TASKS"] + ["--continue"] + args,
|
||||
command_context.substs["GRADLE_ANDROID_DEPENDENCIES_TASKS"]
|
||||
+ ["--continue"]
|
||||
+ args,
|
||||
verbose=True,
|
||||
)
|
||||
|
||||
@ -195,7 +200,7 @@ REMOVED/DEPRECATED: Use 'mach lint --linter android-checkstyle'.""",
|
||||
def android_archive_geckoview(self, command_context, args):
|
||||
ret = self.gradle(
|
||||
command_context,
|
||||
self.substs["GRADLE_ANDROID_ARCHIVE_GECKOVIEW_TASKS"] + args,
|
||||
command_context.substs["GRADLE_ANDROID_ARCHIVE_GECKOVIEW_TASKS"] + args,
|
||||
verbose=True,
|
||||
)
|
||||
|
||||
@ -206,7 +211,8 @@ REMOVED/DEPRECATED: Use 'mach lint --linter android-checkstyle'.""",
|
||||
def android_build_geckoview_example(self, command_context, args):
|
||||
self.gradle(
|
||||
command_context,
|
||||
self.substs["GRADLE_ANDROID_BUILD_GECKOVIEW_EXAMPLE_TASKS"] + args,
|
||||
command_context.substs["GRADLE_ANDROID_BUILD_GECKOVIEW_EXAMPLE_TASKS"]
|
||||
+ args,
|
||||
verbose=True,
|
||||
)
|
||||
|
||||
@ -224,7 +230,8 @@ REMOVED/DEPRECATED: Use 'mach lint --linter android-checkstyle'.""",
|
||||
def android_install_geckoview_example(self, command_context, args):
|
||||
self.gradle(
|
||||
command_context,
|
||||
self.substs["GRADLE_ANDROID_INSTALL_GECKOVIEW_EXAMPLE_TASKS"] + args,
|
||||
command_context.substs["GRADLE_ANDROID_INSTALL_GECKOVIEW_EXAMPLE_TASKS"]
|
||||
+ args,
|
||||
verbose=True,
|
||||
)
|
||||
|
||||
@ -278,9 +285,9 @@ REMOVED/DEPRECATED: Use 'mach lint --linter android-checkstyle'.""",
|
||||
):
|
||||
|
||||
tasks = (
|
||||
self.substs["GRADLE_ANDROID_GECKOVIEW_DOCS_ARCHIVE_TASKS"]
|
||||
command_context.substs["GRADLE_ANDROID_GECKOVIEW_DOCS_ARCHIVE_TASKS"]
|
||||
if archive or upload
|
||||
else self.substs["GRADLE_ANDROID_GECKOVIEW_DOCS_TASKS"]
|
||||
else command_context.substs["GRADLE_ANDROID_GECKOVIEW_DOCS_TASKS"]
|
||||
)
|
||||
|
||||
ret = self.gradle(command_context, tasks, verbose=True)
|
||||
@ -321,7 +328,7 @@ REMOVED/DEPRECATED: Use 'mach lint --linter android-checkstyle'.""",
|
||||
branch = upload_branch.format(**fmt)
|
||||
repo_url = "git@github.com:%s.git" % upload
|
||||
repo_path = mozpath.abspath("gv-docs-repo")
|
||||
self.run_process(
|
||||
command_context.run_process(
|
||||
[
|
||||
"git",
|
||||
"clone",
|
||||
@ -345,7 +352,7 @@ REMOVED/DEPRECATED: Use 'mach lint --linter android-checkstyle'.""",
|
||||
|
||||
# Extract new javadoc to specified directory inside repo.
|
||||
src_tar = mozpath.join(
|
||||
self.topobjdir,
|
||||
command_context.topobjdir,
|
||||
"gradle",
|
||||
"build",
|
||||
"mobile",
|
||||
@ -359,9 +366,11 @@ REMOVED/DEPRECATED: Use 'mach lint --linter android-checkstyle'.""",
|
||||
mozfile.extract_zip(src_tar, dst_path)
|
||||
|
||||
# Commit and push.
|
||||
self.run_process(["git", "add", "--all"], append_env=env, pass_thru=True)
|
||||
command_context.run_process(
|
||||
["git", "add", "--all"], append_env=env, pass_thru=True
|
||||
)
|
||||
if (
|
||||
self.run_process(
|
||||
command_context.run_process(
|
||||
["git", "diff", "--cached", "--quiet"],
|
||||
append_env=env,
|
||||
pass_thru=True,
|
||||
@ -370,12 +379,12 @@ REMOVED/DEPRECATED: Use 'mach lint --linter android-checkstyle'.""",
|
||||
!= 0
|
||||
):
|
||||
# We have something to commit.
|
||||
self.run_process(
|
||||
command_context.run_process(
|
||||
["git", "commit", "--message", upload_message.format(**fmt)],
|
||||
append_env=env,
|
||||
pass_thru=True,
|
||||
)
|
||||
self.run_process(
|
||||
command_context.run_process(
|
||||
["git", "push", "origin", branch], append_env=env, pass_thru=True
|
||||
)
|
||||
|
||||
@ -400,14 +409,14 @@ REMOVED/DEPRECATED: Use 'mach lint --linter android-checkstyle'.""",
|
||||
def gradle(self, command_context, args, verbose=False):
|
||||
if not verbose:
|
||||
# Avoid logging the command
|
||||
self.log_manager.terminal_handler.setLevel(logging.CRITICAL)
|
||||
command_context.log_manager.terminal_handler.setLevel(logging.CRITICAL)
|
||||
|
||||
# In automation, JAVA_HOME is set via mozconfig, which needs
|
||||
# to be specially handled in each mach command. This turns
|
||||
# $JAVA_HOME/bin/java into $JAVA_HOME.
|
||||
java_home = os.path.dirname(os.path.dirname(self.substs["JAVA"]))
|
||||
java_home = os.path.dirname(os.path.dirname(command_context.substs["JAVA"]))
|
||||
|
||||
gradle_flags = self.substs.get("GRADLE_FLAGS", "") or os.environ.get(
|
||||
gradle_flags = command_context.substs.get("GRADLE_FLAGS", "") or os.environ.get(
|
||||
"GRADLE_FLAGS", ""
|
||||
)
|
||||
gradle_flags = shell_split(gradle_flags)
|
||||
@ -430,7 +439,7 @@ REMOVED/DEPRECATED: Use 'mach lint --linter android-checkstyle'.""",
|
||||
# https://discuss.gradle.org/t/unmappable-character-for-encoding-ascii-when-building-a-utf-8-project/10692/11 # NOQA: E501
|
||||
# and especially https://stackoverflow.com/a/21755671.
|
||||
|
||||
if self.substs.get("MOZ_AUTOMATION"):
|
||||
if command_context.substs.get("MOZ_AUTOMATION"):
|
||||
gradle_flags += ["--console=plain"]
|
||||
|
||||
env = os.environ.copy()
|
||||
@ -443,16 +452,16 @@ REMOVED/DEPRECATED: Use 'mach lint --linter android-checkstyle'.""",
|
||||
)
|
||||
# Set ANDROID_SDK_ROOT if --with-android-sdk was set.
|
||||
# See https://bugzilla.mozilla.org/show_bug.cgi?id=1576471
|
||||
android_sdk_root = self.substs.get("ANDROID_SDK_ROOT", "")
|
||||
android_sdk_root = command_context.substs.get("ANDROID_SDK_ROOT", "")
|
||||
if android_sdk_root:
|
||||
env["ANDROID_SDK_ROOT"] = android_sdk_root
|
||||
|
||||
return self.run_process(
|
||||
[self.substs["GRADLE"]] + gradle_flags + args,
|
||||
return command_context.run_process(
|
||||
[command_context.substs["GRADLE"]] + gradle_flags + args,
|
||||
explicit_env=env,
|
||||
pass_thru=True, # Allow user to run gradle interactively.
|
||||
ensure_exit_code=False, # Don't throw on non-zero exit code.
|
||||
cwd=mozpath.join(self.topsrcdir),
|
||||
cwd=mozpath.join(command_context.topsrcdir),
|
||||
)
|
||||
|
||||
@Command("gradle-install", category="devenv", conditions=[REMOVED])
|
||||
@ -511,7 +520,10 @@ class AndroidEmulatorCommands(MachCommandBase):
|
||||
from mozrunner.devices.android_device import AndroidEmulator
|
||||
|
||||
emulator = AndroidEmulator(
|
||||
version, verbose, substs=self.substs, device_serial="emulator-5554"
|
||||
version,
|
||||
verbose,
|
||||
substs=command_context.substs,
|
||||
device_serial="emulator-5554",
|
||||
)
|
||||
if emulator.is_running():
|
||||
# It is possible to run multiple emulators simultaneously, but:
|
||||
@ -520,7 +532,7 @@ class AndroidEmulatorCommands(MachCommandBase):
|
||||
# - additional parameters must be specified when running tests,
|
||||
# to select a specific device.
|
||||
# To avoid these complications, allow just one emulator at a time.
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"emulator",
|
||||
{},
|
||||
@ -530,7 +542,7 @@ class AndroidEmulatorCommands(MachCommandBase):
|
||||
return 1
|
||||
|
||||
if not emulator.is_available():
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.WARN,
|
||||
"emulator",
|
||||
{},
|
||||
@ -540,7 +552,7 @@ class AndroidEmulatorCommands(MachCommandBase):
|
||||
return 2
|
||||
|
||||
if not emulator.check_avd(force_update):
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"emulator",
|
||||
{},
|
||||
@ -548,7 +560,7 @@ class AndroidEmulatorCommands(MachCommandBase):
|
||||
)
|
||||
emulator.update_avd(force_update)
|
||||
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"emulator",
|
||||
{},
|
||||
@ -556,25 +568,27 @@ class AndroidEmulatorCommands(MachCommandBase):
|
||||
)
|
||||
emulator.start(gpu)
|
||||
if emulator.wait_for_start():
|
||||
self.log(logging.INFO, "emulator", {}, "Android emulator is running.")
|
||||
command_context.log(
|
||||
logging.INFO, "emulator", {}, "Android emulator is running."
|
||||
)
|
||||
else:
|
||||
# This is unusual but the emulator may still function.
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.WARN,
|
||||
"emulator",
|
||||
{},
|
||||
"Unable to verify that emulator is running.",
|
||||
)
|
||||
|
||||
if conditions.is_android(self):
|
||||
self.log(
|
||||
if conditions.is_android(command_context):
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"emulator",
|
||||
{},
|
||||
"Use 'mach install' to install or update Firefox on your emulator.",
|
||||
)
|
||||
else:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.WARN,
|
||||
"emulator",
|
||||
{},
|
||||
@ -584,19 +598,19 @@ class AndroidEmulatorCommands(MachCommandBase):
|
||||
)
|
||||
|
||||
if wait:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO, "emulator", {}, "Waiting for Android emulator to close..."
|
||||
)
|
||||
rc = emulator.wait()
|
||||
if rc is not None:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"emulator",
|
||||
{},
|
||||
"Android emulator completed with return code %d." % rc,
|
||||
)
|
||||
else:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.WARN,
|
||||
"emulator",
|
||||
{},
|
||||
|
@ -13,14 +13,9 @@ from itertools import chain
|
||||
|
||||
import attr
|
||||
|
||||
from mach.decorators import (
|
||||
CommandProvider,
|
||||
Command,
|
||||
CommandArgument,
|
||||
SubCommand,
|
||||
)
|
||||
from mach.decorators import CommandProvider, Command, CommandArgument, SubCommand
|
||||
from mozbuild.base import MachCommandBase
|
||||
from mozbuild.util import memoize, memoized_property
|
||||
from mozbuild.util import memoize
|
||||
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
COMPLETION_TEMPLATES_DIR = os.path.join(here, "completion_templates")
|
||||
@ -44,15 +39,15 @@ def render_template(shell, context):
|
||||
|
||||
@CommandProvider
|
||||
class BuiltinCommands(MachCommandBase):
|
||||
@memoized_property
|
||||
def command_handlers(self):
|
||||
@memoize
|
||||
def command_handlers(self, command_context):
|
||||
"""A dictionary of command handlers keyed by command name."""
|
||||
return self._mach_context.commands.command_handlers
|
||||
return command_context._mach_context.commands.command_handlers
|
||||
|
||||
@memoized_property
|
||||
def commands(self):
|
||||
@memoize
|
||||
def commands(self, command_context):
|
||||
"""A sorted list of all command names."""
|
||||
return sorted(self.command_handlers)
|
||||
return sorted(self.command_handlers(command_context))
|
||||
|
||||
def _get_parser_options(self, parser):
|
||||
options = {}
|
||||
@ -68,13 +63,13 @@ class BuiltinCommands(MachCommandBase):
|
||||
options[tuple(action.option_strings)] = action.help or ""
|
||||
return options
|
||||
|
||||
@memoized_property
|
||||
def global_options(self):
|
||||
@memoize
|
||||
def global_options(self, command_context):
|
||||
"""Return a dict of global options.
|
||||
|
||||
Of the form `{("-o", "--option"): "description"}`.
|
||||
"""
|
||||
for group in self._mach_context.global_parser._action_groups:
|
||||
for group in command_context._mach_context.global_parser._action_groups:
|
||||
if group.title == "Global Arguments":
|
||||
return self._get_parser_options(group)
|
||||
|
||||
@ -117,19 +112,21 @@ class BuiltinCommands(MachCommandBase):
|
||||
subcommand=handler.subcommand,
|
||||
)
|
||||
|
||||
@memoized_property
|
||||
def commands_info(self):
|
||||
@memoize
|
||||
def commands_info(self, command_context):
|
||||
"""Return a list of CommandInfo objects for each command."""
|
||||
commands_info = []
|
||||
# Loop over self.commands rather than self.command_handlers.items() for
|
||||
# Loop over self.commands() rather than self.command_handlers().items() for
|
||||
# alphabetical order.
|
||||
for c in self.commands:
|
||||
commands_info.append(self._get_handler_info(self.command_handlers[c]))
|
||||
for c in self.commands(command_context):
|
||||
commands_info.append(
|
||||
self._get_handler_info(self.command_handlers(command_context)[c])
|
||||
)
|
||||
return commands_info
|
||||
|
||||
@Command("mach-commands", category="misc", description="List all mach commands.")
|
||||
def run_commands(self, command_context):
|
||||
print("\n".join(self.commands))
|
||||
print("\n".join(self.commands(command_context)))
|
||||
|
||||
@Command(
|
||||
"mach-debug-commands",
|
||||
@ -146,7 +143,7 @@ class BuiltinCommands(MachCommandBase):
|
||||
def run_debug_commands(self, command_context, match=None):
|
||||
import inspect
|
||||
|
||||
for command, handler in self.command_handlers.items():
|
||||
for command, handler in self.command_handlers(command_context).items():
|
||||
if match and match not in command:
|
||||
continue
|
||||
|
||||
@ -171,23 +168,23 @@ class BuiltinCommands(MachCommandBase):
|
||||
)
|
||||
def run_completion(self, command_context, args):
|
||||
if not args:
|
||||
print("\n".join(self.commands))
|
||||
print("\n".join(self.commands(command_context)))
|
||||
return
|
||||
|
||||
is_help = "help" in args
|
||||
command = None
|
||||
for i, arg in enumerate(args):
|
||||
if arg in self.commands:
|
||||
if arg in self.commands(command_context):
|
||||
command = arg
|
||||
args = args[i + 1 :]
|
||||
break
|
||||
|
||||
# If no command is typed yet, just offer the commands.
|
||||
if not command:
|
||||
print("\n".join(self.commands))
|
||||
print("\n".join(self.commands(command_context)))
|
||||
return
|
||||
|
||||
handler = self.command_handlers[command]
|
||||
handler = self.command_handlers(command_context)[command]
|
||||
# If a subcommand was typed, update the handler.
|
||||
for arg in args:
|
||||
if arg in handler.subcommand_handlers:
|
||||
@ -235,7 +232,7 @@ class BuiltinCommands(MachCommandBase):
|
||||
commands_subcommands = []
|
||||
case_options = []
|
||||
case_subcommands = []
|
||||
for i, cmd in enumerate(self.commands_info):
|
||||
for i, cmd in enumerate(self.commands_info(command_context)):
|
||||
# Build case statement for options.
|
||||
options = []
|
||||
for opt_strs, description in cmd.options.items():
|
||||
@ -301,11 +298,13 @@ class BuiltinCommands(MachCommandBase):
|
||||
)
|
||||
)
|
||||
|
||||
globalopts = [opt for opt_strs in self.global_options for opt in opt_strs]
|
||||
globalopts = [
|
||||
opt for opt_strs in self.global_options(command_context) for opt in opt_strs
|
||||
]
|
||||
context = {
|
||||
"case_options": "\n".join(case_options),
|
||||
"case_subcommands": "\n".join(case_subcommands),
|
||||
"commands": " ".join(self.commands),
|
||||
"commands": " ".join(self.commands(command_context)),
|
||||
"commands_subcommands": " ".join(sorted(commands_subcommands)),
|
||||
"globalopts": " ".join(sorted(globalopts)),
|
||||
}
|
||||
@ -330,7 +329,7 @@ class BuiltinCommands(MachCommandBase):
|
||||
commands_subcommands = []
|
||||
case_options = []
|
||||
case_subcommands = []
|
||||
for i, cmd in enumerate(self.commands_info):
|
||||
for i, cmd in enumerate(self.commands_info(command_context)):
|
||||
commands_descriptions.append(self._zsh_describe(cmd.name, cmd.description))
|
||||
|
||||
# Build case statement for options.
|
||||
@ -393,7 +392,7 @@ class BuiltinCommands(MachCommandBase):
|
||||
)
|
||||
|
||||
globalopts = []
|
||||
for opt_strings, description in self.global_options.items():
|
||||
for opt_strings, description in self.global_options(command_context).items():
|
||||
for opt in opt_strings:
|
||||
globalopts.append(self._zsh_describe(opt, description))
|
||||
|
||||
@ -430,7 +429,7 @@ class BuiltinCommands(MachCommandBase):
|
||||
return comp
|
||||
|
||||
globalopts = []
|
||||
for opt_strs, description in self.global_options.items():
|
||||
for opt_strs, description in self.global_options(command_context).items():
|
||||
comp = (
|
||||
"complete -c mach -n '__fish_mach_complete_no_command' "
|
||||
"-d '{}'".format(description.replace("'", "\\'"))
|
||||
@ -440,13 +439,10 @@ class BuiltinCommands(MachCommandBase):
|
||||
|
||||
cmds = []
|
||||
cmds_opts = []
|
||||
for i, cmd in enumerate(self.commands_info):
|
||||
for i, cmd in enumerate(self.commands_info(command_context)):
|
||||
cmds.append(
|
||||
"complete -c mach -f -n '__fish_mach_complete_no_command' "
|
||||
"-a {} -d '{}'".format(
|
||||
cmd.name,
|
||||
cmd.description.replace("'", "\\'"),
|
||||
)
|
||||
"-a {} -d '{}'".format(cmd.name, cmd.description.replace("'", "\\'"))
|
||||
)
|
||||
|
||||
cmds_opts += ["# {}".format(cmd.name)]
|
||||
@ -484,11 +480,11 @@ class BuiltinCommands(MachCommandBase):
|
||||
)
|
||||
cmds_opts.append(comp)
|
||||
|
||||
if i < len(self.commands) - 1:
|
||||
if i < len(self.commands(command_context)) - 1:
|
||||
cmds_opts.append("")
|
||||
|
||||
context = {
|
||||
"commands": " ".join(self.commands),
|
||||
"commands": " ".join(self.commands(command_context)),
|
||||
"command_completions": "\n".join(cmds),
|
||||
"command_option_completions": "\n".join(cmds_opts),
|
||||
"global_option_completions": "\n".join(globalopts),
|
||||
|
@ -7,11 +7,7 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||
from textwrap import TextWrapper
|
||||
|
||||
from mach.config import TYPE_CLASSES
|
||||
from mach.decorators import (
|
||||
CommandArgument,
|
||||
CommandProvider,
|
||||
Command,
|
||||
)
|
||||
from mach.decorators import CommandArgument, CommandProvider, Command
|
||||
from mozbuild.base import MachCommandBase
|
||||
|
||||
|
||||
@ -38,12 +34,14 @@ class Settings(MachCommandBase):
|
||||
"""List available settings."""
|
||||
types = {v: k for k, v in TYPE_CLASSES.items()}
|
||||
wrapper = TextWrapper(initial_indent="# ", subsequent_indent="# ")
|
||||
for i, section in enumerate(sorted(self._mach_context.settings)):
|
||||
for i, section in enumerate(sorted(command_context._mach_context.settings)):
|
||||
if not short:
|
||||
print("%s[%s]" % ("" if i == 0 else "\n", section))
|
||||
|
||||
for option in sorted(self._mach_context.settings[section]._settings):
|
||||
meta = self._mach_context.settings[section].get_meta(option)
|
||||
for option in sorted(
|
||||
command_context._mach_context.settings[section]._settings
|
||||
):
|
||||
meta = command_context._mach_context.settings[section].get_meta(option)
|
||||
desc = meta["description"]
|
||||
|
||||
if short:
|
||||
|
@ -57,12 +57,10 @@ def test_register_command_with_metrics_path(registrar):
|
||||
class CommandFoo(MachCommandBase):
|
||||
@Command("cmd_foo", category="testing", metrics_path=metrics_path)
|
||||
def run_foo(self, command_context):
|
||||
assert self.metrics == metrics_mock
|
||||
assert command_context.metrics == metrics_mock
|
||||
|
||||
@SubCommand("cmd_foo", "sub_foo", metrics_path=metrics_path + "2")
|
||||
def run_subfoo(self, command_context):
|
||||
assert self.metrics == metrics_mock
|
||||
assert command_context.metrics == metrics_mock
|
||||
|
||||
registrar.dispatch("cmd_foo", context)
|
||||
@ -86,9 +84,6 @@ def test_register_command_sets_up_class_at_runtime(registrar):
|
||||
class CommandFoo(MachCommandBase):
|
||||
@Command("cmd_foo", category="testing", virtualenv_name="env_foo")
|
||||
def run_foo(self, command_context):
|
||||
assert (
|
||||
os.path.basename(self.virtualenv_manager.virtualenv_root) == "env_foo"
|
||||
)
|
||||
assert (
|
||||
os.path.basename(command_context.virtualenv_manager.virtualenv_root)
|
||||
== "env_foo"
|
||||
@ -97,9 +92,6 @@ def test_register_command_sets_up_class_at_runtime(registrar):
|
||||
|
||||
@Command("cmd_bar", category="testing", virtualenv_name="env_bar")
|
||||
def run_bar(self, command_context):
|
||||
assert (
|
||||
os.path.basename(self.virtualenv_manager.virtualenv_root) == "env_bar"
|
||||
)
|
||||
assert (
|
||||
os.path.basename(command_context.virtualenv_manager.virtualenv_root)
|
||||
== "env_bar"
|
||||
|
@ -63,7 +63,7 @@ class MachCommands(MachCommandBase):
|
||||
args,
|
||||
):
|
||||
# Avoid logging the command
|
||||
self.log_manager.terminal_handler.setLevel(logging.CRITICAL)
|
||||
command_context.log_manager.terminal_handler.setLevel(logging.CRITICAL)
|
||||
|
||||
# Note: subprocess requires native strings in os.environ on Windows.
|
||||
append_env = {"PYTHONDONTWRITEBYTECODE": str("1")}
|
||||
@ -75,14 +75,16 @@ class MachCommands(MachCommandBase):
|
||||
from mach_bootstrap import mach_sys_path
|
||||
|
||||
python_path = sys.executable
|
||||
append_env["PYTHONPATH"] = os.pathsep.join(mach_sys_path(self.topsrcdir))
|
||||
append_env["PYTHONPATH"] = os.pathsep.join(
|
||||
mach_sys_path(command_context.topsrcdir)
|
||||
)
|
||||
else:
|
||||
self.virtualenv_manager.ensure()
|
||||
command_context.virtualenv_manager.ensure()
|
||||
if not no_activate:
|
||||
self.virtualenv_manager.activate()
|
||||
python_path = self.virtualenv_manager.python_path
|
||||
command_context.virtualenv_manager.activate()
|
||||
python_path = command_context.virtualenv_manager.python_path
|
||||
if requirements:
|
||||
self.virtualenv_manager.install_pip_requirements(
|
||||
command_context.virtualenv_manager.install_pip_requirements(
|
||||
requirements, require_hashes=False
|
||||
)
|
||||
|
||||
@ -97,14 +99,14 @@ class MachCommands(MachCommandBase):
|
||||
if not no_virtualenv:
|
||||
# Use `_run_pip` directly rather than `install_pip_package` to bypass
|
||||
# `req.check_if_exists()` which may detect a system installed ipython.
|
||||
self.virtualenv_manager._run_pip(["install", "ipython"])
|
||||
command_context.virtualenv_manager._run_pip(["install", "ipython"])
|
||||
python_path = which("ipython", path=bindir)
|
||||
|
||||
if not python_path:
|
||||
print("error: could not detect or install ipython")
|
||||
return 1
|
||||
|
||||
return self.run_process(
|
||||
return command_context.run_process(
|
||||
[python_path] + args,
|
||||
pass_thru=True, # Allow user to run Python interactively.
|
||||
ensure_exit_code=False, # Don't throw on non-zero exit code.
|
||||
@ -169,7 +171,7 @@ class MachCommands(MachCommandBase):
|
||||
os.environ[b"PYTHON_TEST_TMP"] = tempdir
|
||||
else:
|
||||
os.environ["PYTHON_TEST_TMP"] = tempdir
|
||||
return self.run_python_tests(*args, **kwargs)
|
||||
return self.run_python_tests(command_context, *args, **kwargs)
|
||||
finally:
|
||||
import mozfile
|
||||
|
||||
@ -177,6 +179,7 @@ class MachCommands(MachCommandBase):
|
||||
|
||||
def run_python_tests(
|
||||
self,
|
||||
command_context,
|
||||
tests=None,
|
||||
test_objects=None,
|
||||
subsuite=None,
|
||||
@ -187,11 +190,11 @@ class MachCommands(MachCommandBase):
|
||||
**kwargs
|
||||
):
|
||||
|
||||
self.activate_virtualenv()
|
||||
command_context.activate_virtualenv()
|
||||
if test_objects is None:
|
||||
from moztest.resolve import TestResolver
|
||||
|
||||
resolver = self._spawn(TestResolver)
|
||||
resolver = command_context._spawn(TestResolver)
|
||||
# If we were given test paths, try to find tests matching them.
|
||||
test_objects = resolver.resolve_tests(paths=tests, flavor="python")
|
||||
else:
|
||||
@ -212,7 +215,7 @@ class MachCommands(MachCommandBase):
|
||||
tests = mp.active_tests(
|
||||
filters=filters,
|
||||
disabled=False,
|
||||
python=self.virtualenv_manager.version_info()[0],
|
||||
python=command_context.virtualenv_manager.version_info()[0],
|
||||
**mozinfo.info
|
||||
)
|
||||
|
||||
@ -222,7 +225,7 @@ class MachCommands(MachCommandBase):
|
||||
"TEST-UNEXPECTED-FAIL | No tests collected "
|
||||
+ "{}(Not in PYTHON_UNITTEST_MANIFESTS?)".format(submsg)
|
||||
)
|
||||
self.log(logging.WARN, "python-test", {}, message)
|
||||
command_context.log(logging.WARN, "python-test", {}, message)
|
||||
return 1
|
||||
|
||||
parallel = []
|
||||
@ -238,7 +241,7 @@ class MachCommands(MachCommandBase):
|
||||
test.get("requirements")
|
||||
and test["requirements"] not in installed_requirements
|
||||
):
|
||||
self.virtualenv_manager.install_pip_requirements(
|
||||
command_context.virtualenv_manager.install_pip_requirements(
|
||||
test["requirements"], quiet=True
|
||||
)
|
||||
installed_requirements.add(test["requirements"])
|
||||
@ -253,9 +256,7 @@ class MachCommands(MachCommandBase):
|
||||
else:
|
||||
parallel.append(test)
|
||||
|
||||
self.jobs = jobs or cpu_count()
|
||||
self.terminate = False
|
||||
self.verbose = verbose
|
||||
jobs = jobs or cpu_count()
|
||||
|
||||
return_code = 0
|
||||
|
||||
@ -263,10 +264,12 @@ class MachCommands(MachCommandBase):
|
||||
output, ret, test_path = result
|
||||
|
||||
for line in output:
|
||||
self.log(logging.INFO, "python-test", {"line": line.rstrip()}, "{line}")
|
||||
command_context.log(
|
||||
logging.INFO, "python-test", {"line": line.rstrip()}, "{line}"
|
||||
)
|
||||
|
||||
if ret and not return_code:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"python-test",
|
||||
{"test_path": test_path, "ret": ret},
|
||||
@ -274,9 +277,12 @@ class MachCommands(MachCommandBase):
|
||||
)
|
||||
return return_code or ret
|
||||
|
||||
with ThreadPoolExecutor(max_workers=self.jobs) as executor:
|
||||
with ThreadPoolExecutor(max_workers=jobs) as executor:
|
||||
futures = [
|
||||
executor.submit(self._run_python_test, test) for test in parallel
|
||||
executor.submit(
|
||||
self._run_python_test, command_context, test, jobs, verbose
|
||||
)
|
||||
for test in parallel
|
||||
]
|
||||
|
||||
try:
|
||||
@ -290,11 +296,13 @@ class MachCommands(MachCommandBase):
|
||||
raise
|
||||
|
||||
for test in sequential:
|
||||
return_code = on_test_finished(self._run_python_test(test))
|
||||
return_code = on_test_finished(
|
||||
self._run_python_test(command_context, test, jobs, verbose)
|
||||
)
|
||||
if return_code and exitfirst:
|
||||
break
|
||||
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"python-test",
|
||||
{"return_code": return_code},
|
||||
@ -302,17 +310,19 @@ class MachCommands(MachCommandBase):
|
||||
)
|
||||
return return_code
|
||||
|
||||
def _run_python_test(self, test):
|
||||
def _run_python_test(self, command_context, test, jobs, verbose):
|
||||
from mozprocess import ProcessHandler
|
||||
|
||||
output = []
|
||||
|
||||
def _log(line):
|
||||
# Buffer messages if more than one worker to avoid interleaving
|
||||
if self.jobs > 1:
|
||||
if jobs > 1:
|
||||
output.append(line)
|
||||
else:
|
||||
self.log(logging.INFO, "python-test", {"line": line.rstrip()}, "{line}")
|
||||
command_context.log(
|
||||
logging.INFO, "python-test", {"line": line.rstrip()}, "{line}"
|
||||
)
|
||||
|
||||
file_displayed_test = [] # used as boolean
|
||||
|
||||
@ -332,7 +342,7 @@ class MachCommands(MachCommandBase):
|
||||
_log(line)
|
||||
|
||||
_log(test["path"])
|
||||
python = self.virtualenv_manager.python_path
|
||||
python = command_context.virtualenv_manager.python_path
|
||||
cmd = [python, test["path"]]
|
||||
env = os.environ.copy()
|
||||
if six.PY2:
|
||||
@ -353,7 +363,7 @@ class MachCommands(MachCommandBase):
|
||||
"call?): {}".format(test["path"])
|
||||
)
|
||||
|
||||
if self.verbose:
|
||||
if verbose:
|
||||
if return_code != 0:
|
||||
_log("Test failed: {}".format(test["path"]))
|
||||
else:
|
||||
|
@ -7,11 +7,7 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||
import errno
|
||||
import sys
|
||||
|
||||
from mach.decorators import (
|
||||
CommandArgument,
|
||||
CommandProvider,
|
||||
Command,
|
||||
)
|
||||
from mach.decorators import CommandArgument, CommandProvider, Command
|
||||
from mozbuild.base import MachCommandBase
|
||||
from mozboot.bootstrap import APPLICATIONS
|
||||
|
||||
@ -39,20 +35,17 @@ class Bootstrap(MachCommandBase):
|
||||
help="Only execute actions that leave the system " "configuration alone.",
|
||||
)
|
||||
def bootstrap(
|
||||
self,
|
||||
command_context,
|
||||
application_choice=None,
|
||||
no_system_changes=False,
|
||||
self, command_context, application_choice=None, no_system_changes=False
|
||||
):
|
||||
from mozboot.bootstrap import Bootstrapper
|
||||
|
||||
bootstrapper = Bootstrapper(
|
||||
choice=application_choice,
|
||||
no_interactive=not self._mach_context.is_interactive,
|
||||
no_interactive=not command_context._mach_context.is_interactive,
|
||||
no_system_changes=no_system_changes,
|
||||
mach_context=self._mach_context,
|
||||
mach_context=command_context._mach_context,
|
||||
)
|
||||
bootstrapper.bootstrap(self.settings)
|
||||
bootstrapper.bootstrap(command_context.settings)
|
||||
|
||||
|
||||
@CommandProvider
|
||||
@ -87,7 +80,9 @@ class VersionControlCommands(MachCommandBase):
|
||||
import mozversioncontrol
|
||||
from mozfile import which
|
||||
|
||||
repo = mozversioncontrol.get_repository_object(self._mach_context.topdir)
|
||||
repo = mozversioncontrol.get_repository_object(
|
||||
command_context._mach_context.topdir
|
||||
)
|
||||
tool = "hg"
|
||||
if repo.name == "git":
|
||||
tool = "git"
|
||||
@ -105,17 +100,21 @@ class VersionControlCommands(MachCommandBase):
|
||||
if update_only:
|
||||
if repo.name == "git":
|
||||
bootstrap.update_git_tools(
|
||||
vcs, self._mach_context.state_dir, self._mach_context.topdir
|
||||
vcs,
|
||||
command_context._mach_context.state_dir,
|
||||
command_context._mach_context.topdir,
|
||||
)
|
||||
else:
|
||||
bootstrap.update_vct(vcs, self._mach_context.state_dir)
|
||||
bootstrap.update_vct(vcs, command_context._mach_context.state_dir)
|
||||
else:
|
||||
if repo.name == "git":
|
||||
bootstrap.configure_git(
|
||||
vcs,
|
||||
which("git-cinnabar"),
|
||||
self._mach_context.state_dir,
|
||||
self._mach_context.topdir,
|
||||
command_context._mach_context.state_dir,
|
||||
command_context._mach_context.topdir,
|
||||
)
|
||||
else:
|
||||
bootstrap.configure_mercurial(vcs, self._mach_context.state_dir)
|
||||
bootstrap.configure_mercurial(
|
||||
vcs, command_context._mach_context.state_dir
|
||||
)
|
||||
|
@ -13,17 +13,9 @@ import six
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
from mach.decorators import (
|
||||
CommandArgument,
|
||||
CommandProvider,
|
||||
Command,
|
||||
SubCommand,
|
||||
)
|
||||
from mach.decorators import CommandArgument, CommandProvider, Command, SubCommand
|
||||
from mozbuild.artifact_builds import JOB_CHOICES
|
||||
from mozbuild.base import (
|
||||
MachCommandBase,
|
||||
MachCommandConditions as conditions,
|
||||
)
|
||||
from mozbuild.base import MachCommandBase, MachCommandConditions as conditions
|
||||
from mozbuild.util import ensureParentDir
|
||||
import mozversioncontrol
|
||||
|
||||
@ -88,6 +80,7 @@ class PackageFrontend(MachCommandBase):
|
||||
|
||||
def _make_artifacts(
|
||||
self,
|
||||
command_context,
|
||||
tree=None,
|
||||
job=None,
|
||||
skip_cache=False,
|
||||
@ -97,19 +90,21 @@ class PackageFrontend(MachCommandBase):
|
||||
download_maven_zip=False,
|
||||
no_process=False,
|
||||
):
|
||||
state_dir = self._mach_context.state_dir
|
||||
state_dir = command_context._mach_context.state_dir
|
||||
cache_dir = os.path.join(state_dir, "package-frontend")
|
||||
|
||||
hg = None
|
||||
if conditions.is_hg(self):
|
||||
hg = self.substs["HG"]
|
||||
if conditions.is_hg(command_context):
|
||||
hg = command_context.substs["HG"]
|
||||
|
||||
git = None
|
||||
if conditions.is_git(self):
|
||||
git = self.substs["GIT"]
|
||||
if conditions.is_git(command_context):
|
||||
git = command_context.substs["GIT"]
|
||||
|
||||
# If we're building Thunderbird, we should be checking for comm-central artifacts.
|
||||
topsrcdir = self.substs.get("commtopsrcdir", self.topsrcdir)
|
||||
topsrcdir = command_context.substs.get(
|
||||
"commtopsrcdir", command_context.topsrcdir
|
||||
)
|
||||
|
||||
if download_maven_zip:
|
||||
if download_tests:
|
||||
@ -125,10 +120,10 @@ class PackageFrontend(MachCommandBase):
|
||||
|
||||
artifacts = Artifacts(
|
||||
tree,
|
||||
self.substs,
|
||||
self.defines,
|
||||
command_context.substs,
|
||||
command_context.defines,
|
||||
job,
|
||||
log=self.log,
|
||||
log=command_context.log,
|
||||
cache_dir=cache_dir,
|
||||
skip_cache=skip_cache,
|
||||
hg=hg,
|
||||
@ -139,7 +134,7 @@ class PackageFrontend(MachCommandBase):
|
||||
download_host_bins=download_host_bins,
|
||||
download_maven_zip=download_maven_zip,
|
||||
no_process=no_process,
|
||||
mozbuild=self,
|
||||
mozbuild=command_context,
|
||||
)
|
||||
return artifacts
|
||||
|
||||
@ -189,8 +184,9 @@ class PackageFrontend(MachCommandBase):
|
||||
no_process=False,
|
||||
maven_zip=False,
|
||||
):
|
||||
self._set_log_level(verbose)
|
||||
command_context._set_log_level(verbose)
|
||||
artifacts = self._make_artifacts(
|
||||
command_context,
|
||||
tree=tree,
|
||||
job=job,
|
||||
skip_cache=skip_cache,
|
||||
@ -201,7 +197,7 @@ class PackageFrontend(MachCommandBase):
|
||||
no_process=no_process,
|
||||
)
|
||||
|
||||
return artifacts.install_from(source, distdir or self.distdir)
|
||||
return artifacts.install_from(source, distdir or command_context.distdir)
|
||||
|
||||
@ArtifactSubCommand(
|
||||
"artifact",
|
||||
@ -209,8 +205,8 @@ class PackageFrontend(MachCommandBase):
|
||||
"Delete local artifacts and reset local artifact cache.",
|
||||
)
|
||||
def artifact_clear_cache(self, command_context, tree=None, job=None, verbose=False):
|
||||
self._set_log_level(verbose)
|
||||
artifacts = self._make_artifacts(tree=tree, job=job)
|
||||
command_context._set_log_level(verbose)
|
||||
artifacts = self._make_artifacts(command_context, tree=tree, job=job)
|
||||
artifacts.clear_cache()
|
||||
return 0
|
||||
|
||||
@ -273,11 +269,7 @@ class PackageFrontend(MachCommandBase):
|
||||
):
|
||||
"""Download, cache and install pre-built toolchains."""
|
||||
from mozbuild.artifacts import ArtifactCache
|
||||
from mozbuild.action.tooltool import (
|
||||
FileRecord,
|
||||
open_manifest,
|
||||
unpack_file,
|
||||
)
|
||||
from mozbuild.action.tooltool import FileRecord, open_manifest, unpack_file
|
||||
import redo
|
||||
import requests
|
||||
import time
|
||||
@ -285,22 +277,26 @@ class PackageFrontend(MachCommandBase):
|
||||
from taskgraph.util.taskcluster import get_artifact_url
|
||||
|
||||
start = time.time()
|
||||
self._set_log_level(verbose)
|
||||
# Normally, we'd use self.log_manager.enable_unstructured(),
|
||||
command_context._set_log_level(verbose)
|
||||
# Normally, we'd use command_context.log_manager.enable_unstructured(),
|
||||
# but that enables all logging, while we only really want tooltool's
|
||||
# and it also makes structured log output twice.
|
||||
# So we manually do what it does, and limit that to the tooltool
|
||||
# logger.
|
||||
if self.log_manager.terminal_handler:
|
||||
if command_context.log_manager.terminal_handler:
|
||||
logging.getLogger("mozbuild.action.tooltool").addHandler(
|
||||
self.log_manager.terminal_handler
|
||||
command_context.log_manager.terminal_handler
|
||||
)
|
||||
logging.getLogger("redo").addHandler(self.log_manager.terminal_handler)
|
||||
self.log_manager.terminal_handler.addFilter(
|
||||
self.log_manager.structured_filter
|
||||
logging.getLogger("redo").addHandler(
|
||||
command_context.log_manager.terminal_handler
|
||||
)
|
||||
command_context.log_manager.terminal_handler.addFilter(
|
||||
command_context.log_manager.structured_filter
|
||||
)
|
||||
if not cache_dir:
|
||||
cache_dir = os.path.join(self._mach_context.state_dir, "toolchains")
|
||||
cache_dir = os.path.join(
|
||||
command_context._mach_context.state_dir, "toolchains"
|
||||
)
|
||||
|
||||
tooltool_host = os.environ.get("TOOLTOOL_HOST", "tooltool.mozilla-releng.net")
|
||||
taskcluster_proxy_url = os.environ.get("TASKCLUSTER_PROXY_URL")
|
||||
@ -309,7 +305,9 @@ class PackageFrontend(MachCommandBase):
|
||||
else:
|
||||
tooltool_url = "https://{}".format(tooltool_host)
|
||||
|
||||
cache = ArtifactCache(cache_dir=cache_dir, log=self.log, skip_cache=skip_cache)
|
||||
cache = ArtifactCache(
|
||||
cache_dir=cache_dir, log=command_context.log, skip_cache=skip_cache
|
||||
)
|
||||
|
||||
class DownloadRecord(FileRecord):
|
||||
def __init__(self, url, *args, **kwargs):
|
||||
@ -376,7 +374,7 @@ class PackageFrontend(MachCommandBase):
|
||||
|
||||
if from_build:
|
||||
if "MOZ_AUTOMATION" in os.environ:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"artifact",
|
||||
{},
|
||||
@ -397,7 +395,7 @@ class PackageFrontend(MachCommandBase):
|
||||
|
||||
task = tasks.get(b)
|
||||
if not task:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"artifact",
|
||||
{"build": user_value},
|
||||
@ -410,7 +408,7 @@ class PackageFrontend(MachCommandBase):
|
||||
# are built on trunk projects, so the task will be available to
|
||||
# install here.
|
||||
if bootstrap and not task.attributes.get("local-toolchain"):
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"artifact",
|
||||
{"build": user_value},
|
||||
@ -419,7 +417,7 @@ class PackageFrontend(MachCommandBase):
|
||||
return 1
|
||||
|
||||
artifact_name = task.attributes.get("toolchain-artifact")
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.DEBUG,
|
||||
"artifact",
|
||||
{
|
||||
@ -433,19 +431,21 @@ class PackageFrontend(MachCommandBase):
|
||||
task, {}, deadline, task.optimization.get("index-search", [])
|
||||
)
|
||||
if task_id in (True, False) or not artifact_name:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"artifact",
|
||||
{"build": user_value},
|
||||
_COULD_NOT_FIND_ARTIFACTS_TEMPLATE,
|
||||
)
|
||||
# Get and print some helpful info for diagnosis.
|
||||
repo = mozversioncontrol.get_repository_object(self.topsrcdir)
|
||||
repo = mozversioncontrol.get_repository_object(
|
||||
command_context.topsrcdir
|
||||
)
|
||||
changed_files = set(repo.get_outgoing_files()) | set(
|
||||
repo.get_changed_files()
|
||||
)
|
||||
if changed_files:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"artifact",
|
||||
{},
|
||||
@ -453,7 +453,7 @@ class PackageFrontend(MachCommandBase):
|
||||
"to the following files: %s" % sorted(changed_files),
|
||||
)
|
||||
if "TASKCLUSTER_ROOT_URL" in os.environ:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"artifact",
|
||||
{"build": user_value},
|
||||
@ -466,7 +466,7 @@ class PackageFrontend(MachCommandBase):
|
||||
)
|
||||
return 1
|
||||
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.DEBUG,
|
||||
"artifact",
|
||||
{"name": artifact_name, "task_id": task_id},
|
||||
@ -477,7 +477,7 @@ class PackageFrontend(MachCommandBase):
|
||||
records[record.filename] = record
|
||||
|
||||
for record in six.itervalues(records):
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"artifact",
|
||||
{"name": record.basename},
|
||||
@ -507,11 +507,11 @@ class PackageFrontend(MachCommandBase):
|
||||
level = logging.WARN
|
||||
else:
|
||||
level = logging.ERROR
|
||||
self.log(level, "artifact", {}, str(e))
|
||||
command_context.log(level, "artifact", {}, str(e))
|
||||
if not should_retry:
|
||||
break
|
||||
if attempt < retry:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO, "artifact", {}, "Will retry in a moment..."
|
||||
)
|
||||
continue
|
||||
@ -522,7 +522,7 @@ class PackageFrontend(MachCommandBase):
|
||||
if not valid:
|
||||
os.unlink(record.filename)
|
||||
if attempt < retry:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"artifact",
|
||||
{},
|
||||
@ -534,7 +534,7 @@ class PackageFrontend(MachCommandBase):
|
||||
break
|
||||
|
||||
if not valid:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"artifact",
|
||||
{"name": record.basename},
|
||||
@ -566,15 +566,13 @@ class PackageFrontend(MachCommandBase):
|
||||
if not data:
|
||||
break
|
||||
h.update(data)
|
||||
artifacts[record.url] = {
|
||||
"sha256": h.hexdigest(),
|
||||
}
|
||||
artifacts[record.url] = {"sha256": h.hexdigest()}
|
||||
if record.unpack and not no_unpack:
|
||||
unpack_file(local)
|
||||
os.unlink(local)
|
||||
|
||||
if not downloaded:
|
||||
self.log(logging.ERROR, "artifact", {}, "Nothing to download")
|
||||
command_context.log(logging.ERROR, "artifact", {}, "Nothing to download")
|
||||
|
||||
if artifacts:
|
||||
ensureParentDir(artifact_manifest)
|
||||
@ -596,7 +594,7 @@ class PackageFrontend(MachCommandBase):
|
||||
}
|
||||
],
|
||||
}
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"perfherder",
|
||||
{"data": json.dumps(perfherder_data)},
|
||||
|
@ -34,13 +34,13 @@ class MachCommands(MachCommandBase):
|
||||
backend = "Clangd"
|
||||
|
||||
if ide == "eclipse" and not which("eclipse"):
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"ide",
|
||||
{},
|
||||
"Eclipse CDT 8.4 or later must be installed in your PATH.",
|
||||
)
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"ide",
|
||||
{},
|
||||
@ -50,12 +50,15 @@ class MachCommands(MachCommandBase):
|
||||
|
||||
if ide == "vscode":
|
||||
# Verify if platform has VSCode installed
|
||||
if not self.found_vscode_path():
|
||||
self.log(logging.ERROR, "ide", {}, "VSCode cannot be found, abording!")
|
||||
vscode_cmd = self.found_vscode_path(command_context)
|
||||
if vscode_cmd is None:
|
||||
command_context.log(
|
||||
logging.ERROR, "ide", {}, "VSCode cannot be found, aborting!"
|
||||
)
|
||||
return 1
|
||||
|
||||
# Create the Build environment to configure the tree
|
||||
builder = Build(self._mach_context, None)
|
||||
builder = Build(command_context._mach_context, None)
|
||||
|
||||
rc = builder.configure(command_context)
|
||||
if rc != 0:
|
||||
@ -63,7 +66,9 @@ class MachCommands(MachCommandBase):
|
||||
|
||||
# First install what we can through install manifests.
|
||||
rc = builder._run_make(
|
||||
directory=self.topobjdir, target="pre-export", line_handler=None
|
||||
directory=command_context.topobjdir,
|
||||
target="pre-export",
|
||||
line_handler=None,
|
||||
)
|
||||
if rc != 0:
|
||||
return rc
|
||||
@ -72,7 +77,9 @@ class MachCommands(MachCommandBase):
|
||||
# export target, because we can't do anything better.
|
||||
for target in ("export", "pre-compile"):
|
||||
rc = builder._run_make(
|
||||
directory=self.topobjdir, target=target, line_handler=None
|
||||
directory=command_context.topobjdir,
|
||||
target=target,
|
||||
line_handler=None,
|
||||
)
|
||||
if rc != 0:
|
||||
return rc
|
||||
@ -80,47 +87,53 @@ class MachCommands(MachCommandBase):
|
||||
# Here we refresh the whole build. 'build export' is sufficient here and is
|
||||
# probably more correct but it's also nice having a single target to get a fully
|
||||
# built and indexed project (gives a easy target to use before go out to lunch).
|
||||
res = self._mach_context.commands.dispatch("build", self._mach_context)
|
||||
res = command_context._mach_context.commands.dispatch(
|
||||
"build", command_context._mach_context
|
||||
)
|
||||
if res != 0:
|
||||
return 1
|
||||
|
||||
# Generate or refresh the IDE backend.
|
||||
python = self.virtualenv_manager.python_path
|
||||
config_status = os.path.join(self.topobjdir, "config.status")
|
||||
python = command_context.virtualenv_manager.python_path
|
||||
config_status = os.path.join(command_context.topobjdir, "config.status")
|
||||
args = [python, config_status, "--backend=%s" % backend]
|
||||
res = self._run_command_in_objdir(
|
||||
res = command_context._run_command_in_objdir(
|
||||
args=args, pass_thru=True, ensure_exit_code=False
|
||||
)
|
||||
if res != 0:
|
||||
return 1
|
||||
|
||||
if ide == "eclipse":
|
||||
eclipse_workspace_dir = self.get_eclipse_workspace_path()
|
||||
eclipse_workspace_dir = self.get_eclipse_workspace_path(command_context)
|
||||
subprocess.check_call(["eclipse", "-data", eclipse_workspace_dir])
|
||||
elif ide == "visualstudio":
|
||||
visual_studio_workspace_dir = self.get_visualstudio_workspace_path()
|
||||
visual_studio_workspace_dir = self.get_visualstudio_workspace_path(
|
||||
command_context
|
||||
)
|
||||
subprocess.check_call(["explorer.exe", visual_studio_workspace_dir])
|
||||
elif ide == "vscode":
|
||||
return self.setup_vscode()
|
||||
return self.setup_vscode(command_context, vscode_cmd)
|
||||
|
||||
def get_eclipse_workspace_path(self):
|
||||
def get_eclipse_workspace_path(self, command_context):
|
||||
from mozbuild.backend.cpp_eclipse import CppEclipseBackend
|
||||
|
||||
return CppEclipseBackend.get_workspace_path(self.topsrcdir, self.topobjdir)
|
||||
return CppEclipseBackend.get_workspace_path(
|
||||
command_context.topsrcdir, command_context.topobjdir
|
||||
)
|
||||
|
||||
def get_visualstudio_workspace_path(self):
|
||||
return os.path.join(self.topobjdir, "msvc", "mozilla.sln")
|
||||
def get_visualstudio_workspace_path(self, command_context):
|
||||
return os.path.join(command_context.topobjdir, "msvc", "mozilla.sln")
|
||||
|
||||
def found_vscode_path(self):
|
||||
def found_vscode_path(self, command_context):
|
||||
|
||||
if "linux" in self.platform[0]:
|
||||
if "linux" in command_context.platform[0]:
|
||||
cmd_and_path = [
|
||||
{"path": "/usr/local/bin/code", "cmd": ["/usr/local/bin/code"]},
|
||||
{"path": "/snap/bin/code", "cmd": ["/snap/bin/code"]},
|
||||
{"path": "/usr/bin/code", "cmd": ["/usr/bin/code"]},
|
||||
{"path": "/usr/bin/code-insiders", "cmd": ["/usr/bin/code-insiders"]},
|
||||
]
|
||||
elif "macos" in self.platform[0]:
|
||||
elif "macos" in command_context.platform[0]:
|
||||
cmd_and_path = [
|
||||
{"path": "/usr/local/bin/code", "cmd": ["/usr/local/bin/code"]},
|
||||
{
|
||||
@ -136,7 +149,7 @@ class MachCommands(MachCommandBase):
|
||||
],
|
||||
},
|
||||
]
|
||||
elif "win64" in self.platform[0]:
|
||||
elif "win64" in command_context.platform[0]:
|
||||
from pathlib import Path
|
||||
|
||||
vscode_path = mozpath.join(
|
||||
@ -163,42 +176,44 @@ class MachCommands(MachCommandBase):
|
||||
# Did we guess the path?
|
||||
for element in cmd_and_path:
|
||||
if os.path.exists(element["path"]):
|
||||
self.vscode_cmd = element["cmd"]
|
||||
return True
|
||||
return element["cmd"]
|
||||
|
||||
for _ in range(5):
|
||||
vscode_path = input(
|
||||
"Could not find the VSCode binary. Please provide the full path to it:\n"
|
||||
)
|
||||
if os.path.exists(vscode_path):
|
||||
self.vscode_cmd = [vscode_path]
|
||||
return True
|
||||
return [vscode_path]
|
||||
|
||||
# Path cannot be found
|
||||
return False
|
||||
return None
|
||||
|
||||
def setup_vscode(self):
|
||||
vscode_settings = mozpath.join(self.topsrcdir, ".vscode", "settings.json")
|
||||
def setup_vscode(self, command_context, vscode_cmd):
|
||||
vscode_settings = mozpath.join(
|
||||
command_context.topsrcdir, ".vscode", "settings.json"
|
||||
)
|
||||
|
||||
clangd_cc_path = mozpath.join(self.topobjdir, "clangd")
|
||||
clangd_cc_path = mozpath.join(command_context.topobjdir, "clangd")
|
||||
|
||||
# Verify if the required files are present
|
||||
clang_tools_path = mozpath.join(self._mach_context.state_dir, "clang-tools")
|
||||
clang_tools_path = mozpath.join(
|
||||
command_context._mach_context.state_dir, "clang-tools"
|
||||
)
|
||||
clang_tidy_bin = mozpath.join(clang_tools_path, "clang-tidy", "bin")
|
||||
|
||||
clangd_path = mozpath.join(
|
||||
clang_tidy_bin,
|
||||
"clangd" + self.config_environment.substs.get("BIN_SUFFIX", ""),
|
||||
"clangd" + command_context.config_environment.substs.get("BIN_SUFFIX", ""),
|
||||
)
|
||||
|
||||
if not os.path.exists(clangd_path):
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"ide",
|
||||
{},
|
||||
"Unable to locate clangd in {}.".format(clang_tidy_bin),
|
||||
)
|
||||
rc = self._get_clang_tools(clang_tools_path)
|
||||
rc = self._get_clang_tools(command_context, clang_tools_path)
|
||||
|
||||
if rc != 0:
|
||||
return rc
|
||||
@ -207,7 +222,7 @@ class MachCommands(MachCommandBase):
|
||||
import json
|
||||
from mozbuild.code_analysis.utils import ClangTidyConfig
|
||||
|
||||
clang_tidy_cfg = ClangTidyConfig(self.topsrcdir)
|
||||
clang_tidy_cfg = ClangTidyConfig(command_context.topsrcdir)
|
||||
|
||||
clangd_json = json.loads(
|
||||
"""
|
||||
@ -285,21 +300,21 @@ class MachCommands(MachCommandBase):
|
||||
fh.write(json.dumps(settings, indent=4))
|
||||
|
||||
# Open vscode with new configuration
|
||||
rc = subprocess.call(self.vscode_cmd + [self.topsrcdir])
|
||||
rc = subprocess.call(vscode_cmd + [command_context.topsrcdir])
|
||||
|
||||
if rc != 0:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"ide",
|
||||
{},
|
||||
"Unable to open VS Code. Please open VS Code manually and load "
|
||||
"directory: {}".format(self.topsrcdir),
|
||||
"directory: {}".format(command_context.topsrcdir),
|
||||
)
|
||||
return rc
|
||||
|
||||
return 0
|
||||
|
||||
def _get_clang_tools(self, clang_tools_path):
|
||||
def _get_clang_tools(self, command_context, clang_tools_path):
|
||||
|
||||
import shutil
|
||||
|
||||
@ -311,12 +326,12 @@ class MachCommands(MachCommandBase):
|
||||
|
||||
from mozbuild.artifact_commands import PackageFrontend
|
||||
|
||||
self._artifact_manager = PackageFrontend(self._mach_context)
|
||||
_artifact_manager = PackageFrontend(command_context._mach_context)
|
||||
|
||||
job, _ = self.platform
|
||||
job, _ = command_context.platform
|
||||
|
||||
if job is None:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"ide",
|
||||
{},
|
||||
@ -331,8 +346,8 @@ class MachCommands(MachCommandBase):
|
||||
# We want to unpack data in the clang-tidy mozbuild folder
|
||||
currentWorkingDir = os.getcwd()
|
||||
os.chdir(clang_tools_path)
|
||||
rc = self._artifact_manager.artifact_toolchain(
|
||||
self, verbose=False, from_build=[job], no_unpack=False, retry=0
|
||||
rc = _artifact_manager.artifact_toolchain(
|
||||
command_context, verbose=False, from_build=[job], no_unpack=False, retry=0
|
||||
)
|
||||
# Change back the cwd
|
||||
os.chdir(currentWorkingDir)
|
||||
|
@ -91,9 +91,9 @@ class Build(MachCommandBase):
|
||||
"""
|
||||
from mozbuild.controller.building import BuildDriver
|
||||
|
||||
self.log_manager.enable_all_structured_loggers()
|
||||
command_context.log_manager.enable_all_structured_loggers()
|
||||
|
||||
loader = MozconfigLoader(self.topsrcdir)
|
||||
loader = MozconfigLoader(command_context.topsrcdir)
|
||||
mozconfig = loader.read_mozconfig(loader.AUTODETECT)
|
||||
configure_args = mozconfig["configure_args"]
|
||||
doing_pgo = configure_args and "MOZ_PGO=1" in configure_args
|
||||
@ -104,19 +104,19 @@ class Build(MachCommandBase):
|
||||
raise Exception(
|
||||
"Cannot specify targets (%s) in MOZ_PGO=1 builds" % what
|
||||
)
|
||||
instr = self._spawn(BuildDriver)
|
||||
instr = command_context._spawn(BuildDriver)
|
||||
orig_topobjdir = instr._topobjdir
|
||||
instr._topobjdir = mozpath.join(instr._topobjdir, "instrumented")
|
||||
|
||||
append_env = {"MOZ_PROFILE_GENERATE": "1"}
|
||||
status = instr.build(
|
||||
self.metrics,
|
||||
command_context.metrics,
|
||||
what=what,
|
||||
jobs=jobs,
|
||||
directory=directory,
|
||||
verbose=verbose,
|
||||
keep_going=keep_going,
|
||||
mach_context=self._mach_context,
|
||||
mach_context=command_context._mach_context,
|
||||
append_env=append_env,
|
||||
)
|
||||
if status != 0:
|
||||
@ -142,22 +142,22 @@ class Build(MachCommandBase):
|
||||
pgo_env["JARLOG_FILE"] = mozpath.join(orig_topobjdir, "jarlog/en-US.log")
|
||||
pgo_cmd = [
|
||||
instr.virtualenv_manager.python_path,
|
||||
mozpath.join(self.topsrcdir, "build/pgo/profileserver.py"),
|
||||
mozpath.join(command_context.topsrcdir, "build/pgo/profileserver.py"),
|
||||
]
|
||||
subprocess.check_call(pgo_cmd, cwd=instr.topobjdir, env=pgo_env)
|
||||
|
||||
# Set the default build to MOZ_PROFILE_USE
|
||||
append_env = {"MOZ_PROFILE_USE": "1"}
|
||||
|
||||
driver = self._spawn(BuildDriver)
|
||||
driver = command_context._spawn(BuildDriver)
|
||||
return driver.build(
|
||||
self.metrics,
|
||||
command_context.metrics,
|
||||
what=what,
|
||||
jobs=jobs,
|
||||
directory=directory,
|
||||
verbose=verbose,
|
||||
keep_going=keep_going,
|
||||
mach_context=self._mach_context,
|
||||
mach_context=command_context._mach_context,
|
||||
append_env=append_env,
|
||||
)
|
||||
|
||||
@ -179,11 +179,11 @@ class Build(MachCommandBase):
|
||||
):
|
||||
from mozbuild.controller.building import BuildDriver
|
||||
|
||||
self.log_manager.enable_all_structured_loggers()
|
||||
driver = self._spawn(BuildDriver)
|
||||
command_context.log_manager.enable_all_structured_loggers()
|
||||
driver = command_context._spawn(BuildDriver)
|
||||
|
||||
return driver.configure(
|
||||
self.metrics,
|
||||
command_context.metrics,
|
||||
options=options,
|
||||
buildstatus_messages=buildstatus_messages,
|
||||
line_handler=line_handler,
|
||||
@ -222,7 +222,7 @@ class Build(MachCommandBase):
|
||||
if url:
|
||||
server.add_resource_json_url("url", url)
|
||||
else:
|
||||
last = self._get_state_filename("build_resources.json")
|
||||
last = command_context._get_state_filename("build_resources.json")
|
||||
if not os.path.exists(last):
|
||||
print(
|
||||
"Build resources not available. If you have performed a "
|
||||
@ -271,8 +271,8 @@ class Build(MachCommandBase):
|
||||
def build_backend(
|
||||
self, command_context, backend, diff=False, verbose=False, dry_run=False
|
||||
):
|
||||
python = self.virtualenv_manager.python_path
|
||||
config_status = os.path.join(self.topobjdir, "config.status")
|
||||
python = command_context.virtualenv_manager.python_path
|
||||
config_status = os.path.join(command_context.topobjdir, "config.status")
|
||||
|
||||
if not os.path.exists(config_status):
|
||||
print(
|
||||
@ -292,6 +292,6 @@ class Build(MachCommandBase):
|
||||
if dry_run:
|
||||
args.append("--dry-run")
|
||||
|
||||
return self._run_command_in_objdir(
|
||||
return command_context._run_command_in_objdir(
|
||||
args=args, pass_thru=True, ensure_exit_code=False
|
||||
)
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -6,17 +6,10 @@
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
from mach.decorators import (
|
||||
CommandArgument,
|
||||
CommandProvider,
|
||||
Command,
|
||||
)
|
||||
from mach.decorators import CommandArgument, CommandProvider, Command
|
||||
|
||||
from mozbuild.base import MachCommandBase
|
||||
from mozbuild.shellutil import (
|
||||
split as shell_split,
|
||||
quote as shell_quote,
|
||||
)
|
||||
from mozbuild.shellutil import split as shell_split, quote as shell_quote
|
||||
|
||||
|
||||
@CommandProvider
|
||||
@ -35,13 +28,13 @@ class Introspection(MachCommandBase):
|
||||
from mozbuild.util import resolve_target_to_make
|
||||
from mozbuild.compilation import util
|
||||
|
||||
if not util.check_top_objdir(self.topobjdir):
|
||||
if not util.check_top_objdir(command_context.topobjdir):
|
||||
return 1
|
||||
|
||||
path_arg = self._wrap_path_argument(what)
|
||||
path_arg = command_context._wrap_path_argument(what)
|
||||
|
||||
make_dir, make_target = resolve_target_to_make(
|
||||
self.topobjdir, path_arg.relpath()
|
||||
command_context.topobjdir, path_arg.relpath()
|
||||
)
|
||||
|
||||
if make_dir is None and make_target is None:
|
||||
|
@ -44,8 +44,8 @@ class MozbuildFileCommands(MachCommandBase):
|
||||
def reference(self, command_context, symbol, name_only=False):
|
||||
# mozbuild.sphinx imports some Sphinx modules, so we need to be sure
|
||||
# the optional Sphinx package is installed.
|
||||
self.activate_virtualenv()
|
||||
self.virtualenv_manager.install_pip_package("Sphinx==1.1.3")
|
||||
command_context.activate_virtualenv()
|
||||
command_context.virtualenv_manager.install_pip_package("Sphinx==1.1.3")
|
||||
|
||||
from mozbuild.sphinx import (
|
||||
format_module,
|
||||
@ -127,7 +127,7 @@ class MozbuildFileCommands(MachCommandBase):
|
||||
"""
|
||||
components = defaultdict(set)
|
||||
try:
|
||||
for p, m in self._get_files_info(paths, rev=rev).items():
|
||||
for p, m in self._get_files_info(command_context, paths, rev=rev).items():
|
||||
components[m.get("BUG_COMPONENT")].add(p)
|
||||
except InvalidPathException as e:
|
||||
print(e)
|
||||
@ -179,7 +179,7 @@ class MozbuildFileCommands(MachCommandBase):
|
||||
missing = set()
|
||||
|
||||
try:
|
||||
for p, m in self._get_files_info(paths, rev=rev).items():
|
||||
for p, m in self._get_files_info(command_context, paths, rev=rev).items():
|
||||
if "BUG_COMPONENT" not in m:
|
||||
missing.add(p)
|
||||
except InvalidPathException as e:
|
||||
@ -218,7 +218,7 @@ class MozbuildFileCommands(MachCommandBase):
|
||||
# TODO operate in VCS space. This requires teaching the VCS reader
|
||||
# to understand wildcards and/or for the relative path issue in the
|
||||
# VCS finder to be worked out.
|
||||
for p, m in sorted(self._get_files_info(["**"]).items()):
|
||||
for p, m in sorted(self._get_files_info(command_context, ["**"]).items()):
|
||||
if "BUG_COMPONENT" not in m:
|
||||
missing_component.add(p)
|
||||
print(
|
||||
@ -284,17 +284,17 @@ class MozbuildFileCommands(MachCommandBase):
|
||||
if missing_component:
|
||||
return 1
|
||||
|
||||
def _get_files_info(self, paths, rev=None):
|
||||
reader = self.mozbuild_reader(config_mode="empty", vcs_revision=rev)
|
||||
def _get_files_info(self, command_context, paths, rev=None):
|
||||
reader = command_context.mozbuild_reader(config_mode="empty", vcs_revision=rev)
|
||||
|
||||
# Normalize to relative from topsrcdir.
|
||||
relpaths = []
|
||||
for p in paths:
|
||||
a = mozpath.abspath(p)
|
||||
if not mozpath.basedir(a, [self.topsrcdir]):
|
||||
if not mozpath.basedir(a, [command_context.topsrcdir]):
|
||||
raise InvalidPathException("path is outside topsrcdir: %s" % p)
|
||||
|
||||
relpaths.append(mozpath.relpath(a, self.topsrcdir))
|
||||
relpaths.append(mozpath.relpath(a, command_context.topsrcdir))
|
||||
|
||||
# Expand wildcards.
|
||||
# One variable is for ordering. The other for membership tests.
|
||||
@ -304,7 +304,7 @@ class MozbuildFileCommands(MachCommandBase):
|
||||
for p in relpaths:
|
||||
if "*" not in p:
|
||||
if p not in all_paths_set:
|
||||
if not os.path.exists(mozpath.join(self.topsrcdir, p)):
|
||||
if not os.path.exists(mozpath.join(command_context.topsrcdir, p)):
|
||||
print("(%s does not exist; ignoring)" % p, file=sys.stderr)
|
||||
continue
|
||||
|
||||
@ -319,9 +319,9 @@ class MozbuildFileCommands(MachCommandBase):
|
||||
|
||||
# finder is rooted at / for now.
|
||||
# TODO bug 1171069 tracks changing to relative.
|
||||
search = mozpath.join(self.topsrcdir, p)[1:]
|
||||
search = mozpath.join(command_context.topsrcdir, p)[1:]
|
||||
for path, f in reader.finder.find(search):
|
||||
path = path[len(self.topsrcdir) :]
|
||||
path = path[len(command_context.topsrcdir) :]
|
||||
if path not in all_paths_set:
|
||||
all_paths_set.add(path)
|
||||
allpaths.append(path)
|
||||
|
@ -85,21 +85,21 @@ class Watch(MachCommandBase):
|
||||
)
|
||||
def watch(self, command_context, verbose=False):
|
||||
"""Watch and re-build (parts of) the source tree."""
|
||||
if not conditions.is_artifact_build(self):
|
||||
if not conditions.is_artifact_build(command_context):
|
||||
print(
|
||||
"WARNING: mach watch only rebuilds the `mach build faster` parts of the tree!"
|
||||
)
|
||||
|
||||
if not self.substs.get("WATCHMAN", None):
|
||||
if not command_context.substs.get("WATCHMAN", None):
|
||||
print(
|
||||
"mach watch requires watchman to be installed and found at configure time. See "
|
||||
"https://developer.mozilla.org/docs/Mozilla/Developer_guide/Build_Instructions/Incremental_builds_with_filesystem_watching" # noqa
|
||||
)
|
||||
return 1
|
||||
|
||||
self.activate_virtualenv()
|
||||
command_context.activate_virtualenv()
|
||||
try:
|
||||
self.virtualenv_manager.install_pip_package("pywatchman==1.4.1")
|
||||
command_context.virtualenv_manager.install_pip_package("pywatchman==1.4.1")
|
||||
except Exception:
|
||||
print(
|
||||
"Could not install pywatchman from pip. See "
|
||||
@ -109,7 +109,7 @@ class Watch(MachCommandBase):
|
||||
|
||||
from mozbuild.faster_daemon import Daemon
|
||||
|
||||
daemon = Daemon(self.config_environment)
|
||||
daemon = Daemon(command_context.config_environment)
|
||||
|
||||
try:
|
||||
return daemon.watch()
|
||||
@ -183,7 +183,7 @@ class CargoProvider(MachCommandBase):
|
||||
"force-cargo-host-program-check",
|
||||
]
|
||||
|
||||
ret = self._run_make(
|
||||
ret = command_context._run_make(
|
||||
srcdir=False,
|
||||
directory=root,
|
||||
ensure_exit_code=0,
|
||||
@ -220,11 +220,14 @@ class Doctor(MachCommandBase):
|
||||
help="Print verbose information found by checks.",
|
||||
)
|
||||
def doctor(self, command_context, fix=False, verbose=False):
|
||||
self.activate_virtualenv()
|
||||
command_context.activate_virtualenv()
|
||||
from mozbuild.doctor import run_doctor
|
||||
|
||||
return run_doctor(
|
||||
topsrcdir=self.topsrcdir, topobjdir=self.topobjdir, fix=fix, verbose=verbose
|
||||
topsrcdir=command_context.topsrcdir,
|
||||
topobjdir=command_context.topobjdir,
|
||||
fix=fix,
|
||||
verbose=verbose,
|
||||
)
|
||||
|
||||
|
||||
@ -282,16 +285,18 @@ class Clobber(MachCommandBase):
|
||||
from mozbuild.controller.clobber import Clobberer
|
||||
|
||||
try:
|
||||
substs = self.substs
|
||||
substs = command_context.substs
|
||||
except BuildEnvironmentNotFoundException:
|
||||
substs = {}
|
||||
|
||||
try:
|
||||
Clobberer(self.topsrcdir, self.topobjdir, substs).remove_objdir(full)
|
||||
Clobberer(
|
||||
command_context.topsrcdir, command_context.topobjdir, substs
|
||||
).remove_objdir(full)
|
||||
except OSError as e:
|
||||
if sys.platform.startswith("win"):
|
||||
if isinstance(e, WindowsError) and e.winerror in (5, 32):
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"file_access_error",
|
||||
{"error": e},
|
||||
@ -302,7 +307,7 @@ class Clobber(MachCommandBase):
|
||||
raise
|
||||
|
||||
if "python" in what:
|
||||
if conditions.is_hg(self):
|
||||
if conditions.is_hg(command_context):
|
||||
cmd = [
|
||||
"hg",
|
||||
"--config",
|
||||
@ -314,11 +319,11 @@ class Clobber(MachCommandBase):
|
||||
"-I",
|
||||
"glob:**/__pycache__",
|
||||
]
|
||||
elif conditions.is_git(self):
|
||||
elif conditions.is_git(command_context):
|
||||
cmd = ["git", "clean", "-d", "-f", "-x", "*.py[cdo]", "*/__pycache__/*"]
|
||||
else:
|
||||
cmd = ["find", ".", "-type", "f", "-name", "*.py[cdo]", "-delete"]
|
||||
subprocess.call(cmd, cwd=self.topsrcdir)
|
||||
subprocess.call(cmd, cwd=command_context.topsrcdir)
|
||||
cmd = [
|
||||
"find",
|
||||
".",
|
||||
@ -329,13 +334,16 @@ class Clobber(MachCommandBase):
|
||||
"-empty",
|
||||
"-delete",
|
||||
]
|
||||
ret = subprocess.call(cmd, cwd=self.topsrcdir)
|
||||
ret = subprocess.call(cmd, cwd=command_context.topsrcdir)
|
||||
shutil.rmtree(
|
||||
mozpath.join(self.topobjdir, "_virtualenvs"), ignore_errors=True
|
||||
mozpath.join(command_context.topobjdir, "_virtualenvs"),
|
||||
ignore_errors=True,
|
||||
)
|
||||
|
||||
if "gradle" in what:
|
||||
shutil.rmtree(mozpath.join(self.topobjdir, "gradle"), ignore_errors=True)
|
||||
shutil.rmtree(
|
||||
mozpath.join(command_context.topobjdir, "gradle"), ignore_errors=True
|
||||
)
|
||||
|
||||
return ret
|
||||
|
||||
@ -356,7 +364,7 @@ class Logs(MachCommandBase):
|
||||
)
|
||||
def show_log(self, command_context, log_file=None):
|
||||
if not log_file:
|
||||
path = self._get_state_filename("last_log.json")
|
||||
path = command_context._get_state_filename("last_log.json")
|
||||
log_file = open(path, "rb")
|
||||
|
||||
if os.isatty(sys.stdout.fileno()):
|
||||
@ -378,21 +386,23 @@ class Logs(MachCommandBase):
|
||||
created, action, params = json.loads(line)
|
||||
if not startTime:
|
||||
startTime = created
|
||||
self.log_manager.terminal_handler.formatter.start_time = created
|
||||
command_context.log_manager.terminal_handler.formatter.start_time = (
|
||||
created
|
||||
)
|
||||
if "line" in params:
|
||||
record = logging.makeLogRecord(
|
||||
{
|
||||
"created": created,
|
||||
"name": self._logger.name,
|
||||
"name": command_context._logger.name,
|
||||
"levelno": logging.INFO,
|
||||
"msg": "{line}",
|
||||
"params": params,
|
||||
"action": action,
|
||||
}
|
||||
)
|
||||
self._logger.handle(record)
|
||||
command_context._logger.handle(record)
|
||||
|
||||
if self.log_manager.terminal:
|
||||
if command_context.log_manager.terminal:
|
||||
# Close less's input so that it knows that we're done sending data.
|
||||
less.stdin.close()
|
||||
# Since the less's input file descriptor is now also the stdout
|
||||
@ -408,13 +418,13 @@ class Logs(MachCommandBase):
|
||||
class Warnings(MachCommandBase):
|
||||
"""Provide commands for inspecting warnings."""
|
||||
|
||||
def database_path(self):
|
||||
return self._get_state_filename("warnings.json")
|
||||
def database_path(self, command_context):
|
||||
return command_context._get_state_filename("warnings.json")
|
||||
|
||||
def database(self):
|
||||
def database(self, command_context):
|
||||
from mozbuild.compilation.warnings import WarningsDatabase
|
||||
|
||||
path = self.database_path()
|
||||
path = self.database_path(command_context)
|
||||
|
||||
database = WarningsDatabase()
|
||||
|
||||
@ -442,10 +452,10 @@ class Warnings(MachCommandBase):
|
||||
"recent report.",
|
||||
)
|
||||
def summary(self, command_context, directory=None, report=None):
|
||||
database = self.database()
|
||||
database = self.database(command_context)
|
||||
|
||||
if directory:
|
||||
dirpath = self.join_ensure_dir(self.topsrcdir, directory)
|
||||
dirpath = self.join_ensure_dir(command_context.topsrcdir, directory)
|
||||
if not dirpath:
|
||||
return 1
|
||||
else:
|
||||
@ -483,11 +493,11 @@ class Warnings(MachCommandBase):
|
||||
"recent report.",
|
||||
)
|
||||
def list(self, command_context, directory=None, flags=None, report=None):
|
||||
database = self.database()
|
||||
database = self.database(command_context)
|
||||
|
||||
by_name = sorted(database.warnings)
|
||||
|
||||
topsrcdir = mozpath.normpath(self.topsrcdir)
|
||||
topsrcdir = mozpath.normpath(command_context.topsrcdir)
|
||||
|
||||
if directory:
|
||||
directory = mozpath.normsep(directory)
|
||||
@ -664,29 +674,29 @@ class GTestCommands(MachCommandBase):
|
||||
|
||||
# We lazy build gtest because it's slow to link
|
||||
try:
|
||||
self.config_environment
|
||||
command_context.config_environment
|
||||
except Exception:
|
||||
print("Please run |./mach build| before |./mach gtest|.")
|
||||
return 1
|
||||
|
||||
res = self._mach_context.commands.dispatch(
|
||||
"build", self._mach_context, what=["recurse_gtest"]
|
||||
res = command_context._mach_context.commands.dispatch(
|
||||
"build", command_context._mach_context, what=["recurse_gtest"]
|
||||
)
|
||||
if res:
|
||||
print("Could not build xul-gtest")
|
||||
return res
|
||||
|
||||
if self.substs.get("MOZ_WIDGET_TOOLKIT") == "cocoa":
|
||||
self._run_make(
|
||||
if command_context.substs.get("MOZ_WIDGET_TOOLKIT") == "cocoa":
|
||||
command_context._run_make(
|
||||
directory="browser/app", target="repackage", ensure_exit_code=True
|
||||
)
|
||||
|
||||
cwd = os.path.join(self.topobjdir, "_tests", "gtest")
|
||||
cwd = os.path.join(command_context.topobjdir, "_tests", "gtest")
|
||||
|
||||
if not os.path.isdir(cwd):
|
||||
os.makedirs(cwd)
|
||||
|
||||
if conditions.is_android(self):
|
||||
if conditions.is_android(command_context):
|
||||
if jobs != 1:
|
||||
print("--jobs is not supported on Android and will be ignored")
|
||||
if debug or debugger or debugger_args:
|
||||
@ -696,6 +706,7 @@ class GTestCommands(MachCommandBase):
|
||||
from mozrunner.devices.android_device import InstallIntent
|
||||
|
||||
return self.android_gtest(
|
||||
command_context,
|
||||
cwd,
|
||||
shuffle,
|
||||
gtest_filter,
|
||||
@ -718,10 +729,13 @@ class GTestCommands(MachCommandBase):
|
||||
):
|
||||
print("One or more Android-only options will be ignored")
|
||||
|
||||
app_path = self.get_binary_path("app")
|
||||
app_path = command_context.get_binary_path("app")
|
||||
args = [app_path, "-unittest", "--gtest_death_test_style=threadsafe"]
|
||||
|
||||
if sys.platform.startswith("win") and "MOZ_LAUNCHER_PROCESS" in self.defines:
|
||||
if (
|
||||
sys.platform.startswith("win")
|
||||
and "MOZ_LAUNCHER_PROCESS" in command_context.defines
|
||||
):
|
||||
args.append("--wait-for-browser")
|
||||
|
||||
if list_tests:
|
||||
@ -740,7 +754,9 @@ class GTestCommands(MachCommandBase):
|
||||
# Note: we must normalize the path here so that gtest on Windows sees
|
||||
# a MOZ_GMP_PATH which has only Windows dir seperators, because
|
||||
# nsIFile cannot open the paths with non-Windows dir seperators.
|
||||
xre_path = os.path.join(os.path.normpath(self.topobjdir), "dist", "bin")
|
||||
xre_path = os.path.join(
|
||||
os.path.normpath(command_context.topobjdir), "dist", "bin"
|
||||
)
|
||||
gtest_env["MOZ_XRE_DIR"] = xre_path
|
||||
gtest_env["MOZ_GMP_PATH"] = os.pathsep.join(
|
||||
os.path.join(xre_path, p, "1.0") for p in ("gmp-fake", "gmp-fakeopenh264")
|
||||
@ -761,7 +777,7 @@ class GTestCommands(MachCommandBase):
|
||||
gtest_env["MOZ_WEBRENDER"] = "0"
|
||||
|
||||
if jobs == 1:
|
||||
return self.run_process(
|
||||
return command_context.run_process(
|
||||
args=args,
|
||||
append_env=gtest_env,
|
||||
cwd=cwd,
|
||||
@ -775,7 +791,7 @@ class GTestCommands(MachCommandBase):
|
||||
def handle_line(job_id, line):
|
||||
# Prepend the jobId
|
||||
line = "[%d] %s" % (job_id + 1, line.strip())
|
||||
self.log(logging.INFO, "GTest", {"line": line}, "{line}")
|
||||
command_context.log(logging.INFO, "GTest", {"line": line}, "{line}")
|
||||
|
||||
gtest_env["GTEST_TOTAL_SHARDS"] = str(jobs)
|
||||
processes = {}
|
||||
@ -805,6 +821,7 @@ class GTestCommands(MachCommandBase):
|
||||
|
||||
def android_gtest(
|
||||
self,
|
||||
command_context,
|
||||
test_dir,
|
||||
shuffle,
|
||||
gtest_filter,
|
||||
@ -819,22 +836,22 @@ class GTestCommands(MachCommandBase):
|
||||
# setup logging for mozrunner
|
||||
from mozlog.commandline import setup_logging
|
||||
|
||||
format_args = {"level": self._mach_context.settings["test"]["level"]}
|
||||
default_format = self._mach_context.settings["test"]["format"]
|
||||
format_args = {"level": command_context._mach_context.settings["test"]["level"]}
|
||||
default_format = command_context._mach_context.settings["test"]["format"]
|
||||
setup_logging("mach-gtest", {}, {default_format: sys.stdout}, format_args)
|
||||
|
||||
# ensure that a device is available and test app is installed
|
||||
from mozrunner.devices.android_device import verify_android_device, get_adb_path
|
||||
|
||||
verify_android_device(
|
||||
self, install=install, app=package, device_serial=device_serial
|
||||
command_context, install=install, app=package, device_serial=device_serial
|
||||
)
|
||||
|
||||
if not adb_path:
|
||||
adb_path = get_adb_path(self)
|
||||
adb_path = get_adb_path(command_context)
|
||||
if not libxul_path:
|
||||
libxul_path = os.path.join(
|
||||
self.topobjdir, "dist", "bin", "gtest", "libxul.so"
|
||||
command_context.topobjdir, "dist", "bin", "gtest", "libxul.so"
|
||||
)
|
||||
|
||||
# run gtest via remotegtests.py
|
||||
@ -881,11 +898,11 @@ class Package(MachCommandBase):
|
||||
help="Verbose output for what commands the packaging process is running.",
|
||||
)
|
||||
def package(self, command_context, verbose=False):
|
||||
ret = self._run_make(
|
||||
ret = command_context._run_make(
|
||||
directory=".", target="package", silent=not verbose, ensure_exit_code=False
|
||||
)
|
||||
if ret == 0:
|
||||
self.notify("Packaging complete")
|
||||
command_context.notify("Packaging complete")
|
||||
return ret
|
||||
|
||||
|
||||
@ -924,20 +941,25 @@ class Install(MachCommandBase):
|
||||
description="Install the package on the machine (or device in the case of Android).",
|
||||
)
|
||||
def install(self, command_context, **kwargs):
|
||||
if conditions.is_android(self):
|
||||
if conditions.is_android(command_context):
|
||||
from mozrunner.devices.android_device import (
|
||||
verify_android_device,
|
||||
InstallIntent,
|
||||
)
|
||||
|
||||
ret = verify_android_device(self, install=InstallIntent.YES, **kwargs) == 0
|
||||
ret = (
|
||||
verify_android_device(
|
||||
command_context, install=InstallIntent.YES, **kwargs
|
||||
)
|
||||
== 0
|
||||
)
|
||||
else:
|
||||
ret = self._run_make(
|
||||
ret = command_context._run_make(
|
||||
directory=".", target="install", ensure_exit_code=False
|
||||
)
|
||||
|
||||
if ret == 0:
|
||||
self.notify("Install complete")
|
||||
command_context.notify("Install complete")
|
||||
return ret
|
||||
|
||||
|
||||
@ -1233,14 +1255,15 @@ class RunProgram(MachCommandBase):
|
||||
description="Run the compiled program, possibly under a debugger or DMD.",
|
||||
)
|
||||
def run(self, command_context, **kwargs):
|
||||
if conditions.is_android(self):
|
||||
return self._run_android(**kwargs)
|
||||
if conditions.is_jsshell(self):
|
||||
return self._run_jsshell(**kwargs)
|
||||
return self._run_desktop(**kwargs)
|
||||
if conditions.is_android(command_context):
|
||||
return self._run_android(command_context, **kwargs)
|
||||
if conditions.is_jsshell(command_context):
|
||||
return self._run_jsshell(command_context, **kwargs)
|
||||
return self._run_desktop(command_context, **kwargs)
|
||||
|
||||
def _run_android(
|
||||
self,
|
||||
command_context,
|
||||
app="org.mozilla.geckoview_example",
|
||||
intent=None,
|
||||
env=[],
|
||||
@ -1280,7 +1303,7 @@ class RunProgram(MachCommandBase):
|
||||
|
||||
# `verify_android_device` respects `DEVICE_SERIAL` if it is set and sets it otherwise.
|
||||
verify_android_device(
|
||||
self,
|
||||
command_context,
|
||||
app=app,
|
||||
debugger=debug,
|
||||
install=InstallIntent.NO if no_install else InstallIntent.YES,
|
||||
@ -1290,13 +1313,13 @@ class RunProgram(MachCommandBase):
|
||||
print("No ADB devices connected.")
|
||||
return 1
|
||||
|
||||
device = _get_device(self.substs, device_serial=device_serial)
|
||||
device = _get_device(command_context.substs, device_serial=device_serial)
|
||||
|
||||
if debug:
|
||||
# This will terminate any existing processes, so we skip it when we
|
||||
# want to attach to an existing one.
|
||||
if not use_existing_process:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"run",
|
||||
{"app": app},
|
||||
@ -1318,7 +1341,7 @@ class RunProgram(MachCommandBase):
|
||||
target_profile = "/data/local/tmp/{}-profile".format(app)
|
||||
device.rm(target_profile, recursive=True, force=True)
|
||||
device.push(host_profile, target_profile)
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"run",
|
||||
{
|
||||
@ -1330,7 +1353,7 @@ class RunProgram(MachCommandBase):
|
||||
)
|
||||
else:
|
||||
target_profile = profile
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"run",
|
||||
{"target_profile": target_profile},
|
||||
@ -1353,7 +1376,7 @@ class RunProgram(MachCommandBase):
|
||||
|
||||
if restart:
|
||||
fail_if_running = False
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"run",
|
||||
{"app": app},
|
||||
@ -1363,7 +1386,7 @@ class RunProgram(MachCommandBase):
|
||||
|
||||
# We'd prefer to log the actual `am start ...` command, but it's not trivial
|
||||
# to wire the device's logger to mach's logger.
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"run",
|
||||
{"app": app, "activity_name": activity_name},
|
||||
@ -1385,9 +1408,9 @@ class RunProgram(MachCommandBase):
|
||||
|
||||
from mozrunner.devices.android_device import run_lldb_server
|
||||
|
||||
socket_file = run_lldb_server(app, self.substs, device_serial)
|
||||
socket_file = run_lldb_server(app, command_context.substs, device_serial)
|
||||
if not socket_file:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"run",
|
||||
{"msg": "Failed to obtain a socket file!"},
|
||||
@ -1396,7 +1419,7 @@ class RunProgram(MachCommandBase):
|
||||
return 1
|
||||
|
||||
# Give lldb-server a chance to start
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"run",
|
||||
{"msg": "Pausing to ensure lldb-server has started..."},
|
||||
@ -1429,7 +1452,7 @@ class RunProgram(MachCommandBase):
|
||||
]
|
||||
|
||||
if not proc_list:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"run",
|
||||
{"app": app},
|
||||
@ -1451,14 +1474,16 @@ class RunProgram(MachCommandBase):
|
||||
response = int(input(prompt).strip())
|
||||
if response in valid_range:
|
||||
break
|
||||
self.log(logging.ERROR, "run", {"msg": "Invalid response"}, "{msg}")
|
||||
command_context.log(
|
||||
logging.ERROR, "run", {"msg": "Invalid response"}, "{msg}"
|
||||
)
|
||||
pid = proc_list[response - 1][0]
|
||||
else:
|
||||
# We're not using an existing process, so there should only be our
|
||||
# parent process at this time.
|
||||
pids = device.pidof(app_name=app)
|
||||
if len(pids) != 1:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"run",
|
||||
{"msg": "Not sure which pid to attach to!"},
|
||||
@ -1467,19 +1492,21 @@ class RunProgram(MachCommandBase):
|
||||
return 1
|
||||
pid = pids[0]
|
||||
|
||||
self.log(logging.INFO, "run", {"pid": str(pid)}, "Debuggee pid set to {pid}...")
|
||||
command_context.log(
|
||||
logging.INFO, "run", {"pid": str(pid)}, "Debuggee pid set to {pid}..."
|
||||
)
|
||||
|
||||
lldb_connect_url = "unix-abstract-connect://" + socket_file
|
||||
local_jdb_port = device.forward("tcp:0", "jdwp:%d" % pid)
|
||||
|
||||
if no_attach:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"run",
|
||||
{"pid": str(pid), "url": lldb_connect_url},
|
||||
"To debug native code, connect lldb to {url} and attach to pid {pid}",
|
||||
)
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"run",
|
||||
{"port": str(local_jdb_port)},
|
||||
@ -1490,7 +1517,9 @@ class RunProgram(MachCommandBase):
|
||||
# Beyond this point we want to be able to automatically clean up after ourselves,
|
||||
# so we enter the following try block.
|
||||
try:
|
||||
self.log(logging.INFO, "run", {"msg": "Starting debugger..."}, "{msg}")
|
||||
command_context.log(
|
||||
logging.INFO, "run", {"msg": "Starting debugger..."}, "{msg}"
|
||||
)
|
||||
|
||||
if not use_existing_process:
|
||||
# The app is waiting for jdb to attach and will not continue running
|
||||
@ -1527,9 +1556,11 @@ platform connect {connect_url}
|
||||
process attach {continue_flag}-p {pid!s}
|
||||
""".lstrip()
|
||||
|
||||
obj_xul = os.path.join(self.topobjdir, "toolkit", "library", "build")
|
||||
obj_mozglue = os.path.join(self.topobjdir, "mozglue", "build")
|
||||
obj_nss = os.path.join(self.topobjdir, "security")
|
||||
obj_xul = os.path.join(
|
||||
command_context.topobjdir, "toolkit", "library", "build"
|
||||
)
|
||||
obj_mozglue = os.path.join(command_context.topobjdir, "mozglue", "build")
|
||||
obj_nss = os.path.join(command_context.topobjdir, "security")
|
||||
|
||||
if use_existing_process:
|
||||
continue_flag = ""
|
||||
@ -1568,7 +1599,7 @@ process attach {continue_flag}-p {pid!s}
|
||||
if not args:
|
||||
return 1
|
||||
|
||||
return self.run_process(
|
||||
return command_context.run_process(
|
||||
args=args, ensure_exit_code=False, pass_thru=True
|
||||
)
|
||||
finally:
|
||||
@ -1579,12 +1610,14 @@ process attach {continue_flag}-p {pid!s}
|
||||
if not use_existing_process:
|
||||
device.shell("am clear-debug-app")
|
||||
|
||||
def _run_jsshell(self, params, debug, debugger, debugger_args):
|
||||
def _run_jsshell(self, command_context, params, debug, debugger, debugger_args):
|
||||
try:
|
||||
binpath = self.get_binary_path("app")
|
||||
binpath = command_context.get_binary_path("app")
|
||||
except BinaryNotFoundException as e:
|
||||
self.log(logging.ERROR, "run", {"error": str(e)}, "ERROR: {error}")
|
||||
self.log(logging.INFO, "run", {"help": e.help()}, "{help}")
|
||||
command_context.log(
|
||||
logging.ERROR, "run", {"error": str(e)}, "ERROR: {error}"
|
||||
)
|
||||
command_context.log(logging.INFO, "run", {"help": e.help()}, "{help}")
|
||||
return 1
|
||||
|
||||
args = [binpath]
|
||||
@ -1596,7 +1629,7 @@ process attach {continue_flag}-p {pid!s}
|
||||
|
||||
if debug or debugger or debugger_args:
|
||||
if "INSIDE_EMACS" in os.environ:
|
||||
self.log_manager.terminal_handler.setLevel(logging.WARNING)
|
||||
command_context.log_manager.terminal_handler.setLevel(logging.WARNING)
|
||||
|
||||
import mozdebug
|
||||
|
||||
@ -1608,21 +1641,22 @@ process attach {continue_flag}-p {pid!s}
|
||||
)
|
||||
|
||||
if debugger:
|
||||
self.debuggerInfo = mozdebug.get_debugger_info(debugger, debugger_args)
|
||||
debuggerInfo = mozdebug.get_debugger_info(debugger, debugger_args)
|
||||
|
||||
if not debugger or not self.debuggerInfo:
|
||||
if not debugger or not debuggerInfo:
|
||||
print("Could not find a suitable debugger in your PATH.")
|
||||
return 1
|
||||
|
||||
# Prepend the debugger args.
|
||||
args = [self.debuggerInfo.path] + self.debuggerInfo.args + args
|
||||
args = [debuggerInfo.path] + debuggerInfo.args + args
|
||||
|
||||
return self.run_process(
|
||||
return command_context.run_process(
|
||||
args=args, ensure_exit_code=False, pass_thru=True, append_env=extra_env
|
||||
)
|
||||
|
||||
def _run_desktop(
|
||||
self,
|
||||
command_context,
|
||||
params,
|
||||
packaged,
|
||||
app,
|
||||
@ -1647,13 +1681,15 @@ process attach {continue_flag}-p {pid!s}
|
||||
|
||||
try:
|
||||
if packaged:
|
||||
binpath = self.get_binary_path(where="staged-package")
|
||||
binpath = command_context.get_binary_path(where="staged-package")
|
||||
else:
|
||||
binpath = app or self.get_binary_path("app")
|
||||
binpath = app or command_context.get_binary_path("app")
|
||||
except BinaryNotFoundException as e:
|
||||
self.log(logging.ERROR, "run", {"error": str(e)}, "ERROR: {error}")
|
||||
command_context.log(
|
||||
logging.ERROR, "run", {"error": str(e)}, "ERROR: {error}"
|
||||
)
|
||||
if packaged:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"run",
|
||||
{
|
||||
@ -1663,7 +1699,7 @@ process attach {continue_flag}-p {pid!s}
|
||||
"{help}",
|
||||
)
|
||||
else:
|
||||
self.log(logging.INFO, "run", {"help": e.help()}, "{help}")
|
||||
command_context.log(logging.INFO, "run", {"help": e.help()}, "{help}")
|
||||
return 1
|
||||
|
||||
args = []
|
||||
@ -1697,7 +1733,10 @@ process attach {continue_flag}-p {pid!s}
|
||||
if not background and sys.platform == "darwin":
|
||||
args.append("-foreground")
|
||||
|
||||
if sys.platform.startswith("win") and "MOZ_LAUNCHER_PROCESS" in self.defines:
|
||||
if (
|
||||
sys.platform.startswith("win")
|
||||
and "MOZ_LAUNCHER_PROCESS" in command_context.defines
|
||||
):
|
||||
args.append("-wait-for-browser")
|
||||
|
||||
no_profile_option_given = all(
|
||||
@ -1709,12 +1748,12 @@ process attach {continue_flag}-p {pid!s}
|
||||
"browser.shell.checkDefaultBrowser": False,
|
||||
"general.warnOnAboutConfig": False,
|
||||
}
|
||||
prefs.update(self._mach_context.settings.runprefs)
|
||||
prefs.update(command_context._mach_context.settings.runprefs)
|
||||
prefs.update([p.split("=", 1) for p in setpref])
|
||||
for pref in prefs:
|
||||
prefs[pref] = Preferences.cast(prefs[pref])
|
||||
|
||||
tmpdir = os.path.join(self.topobjdir, "tmp")
|
||||
tmpdir = os.path.join(command_context.topobjdir, "tmp")
|
||||
if not os.path.exists(tmpdir):
|
||||
os.makedirs(tmpdir)
|
||||
|
||||
@ -1754,8 +1793,8 @@ process attach {continue_flag}-p {pid!s}
|
||||
args.append("-attach-console")
|
||||
|
||||
extra_env = {
|
||||
"MOZ_DEVELOPER_REPO_DIR": self.topsrcdir,
|
||||
"MOZ_DEVELOPER_OBJ_DIR": self.topobjdir,
|
||||
"MOZ_DEVELOPER_REPO_DIR": command_context.topsrcdir,
|
||||
"MOZ_DEVELOPER_OBJ_DIR": command_context.topobjdir,
|
||||
"RUST_BACKTRACE": "full",
|
||||
}
|
||||
|
||||
@ -1766,7 +1805,7 @@ process attach {continue_flag}-p {pid!s}
|
||||
|
||||
if disable_e10s:
|
||||
version_file = os.path.join(
|
||||
self.topsrcdir, "browser", "config", "version.txt"
|
||||
command_context.topsrcdir, "browser", "config", "version.txt"
|
||||
)
|
||||
f = open(version_file, "r")
|
||||
extra_env["MOZ_FORCE_DISABLE_E10S"] = f.read().strip()
|
||||
@ -1776,7 +1815,7 @@ process attach {continue_flag}-p {pid!s}
|
||||
|
||||
if some_debugging_option:
|
||||
if "INSIDE_EMACS" in os.environ:
|
||||
self.log_manager.terminal_handler.setLevel(logging.WARNING)
|
||||
command_context.log_manager.terminal_handler.setLevel(logging.WARNING)
|
||||
|
||||
import mozdebug
|
||||
|
||||
@ -1788,9 +1827,9 @@ process attach {continue_flag}-p {pid!s}
|
||||
)
|
||||
|
||||
if debugger:
|
||||
self.debuggerInfo = mozdebug.get_debugger_info(debugger, debugger_args)
|
||||
debuggerInfo = mozdebug.get_debugger_info(debugger, debugger_args)
|
||||
|
||||
if not debugger or not self.debuggerInfo:
|
||||
if not debugger or not debuggerInfo:
|
||||
print("Could not find a suitable debugger in your PATH.")
|
||||
return 1
|
||||
|
||||
@ -1809,7 +1848,7 @@ process attach {continue_flag}-p {pid!s}
|
||||
return 1
|
||||
|
||||
# Prepend the debugger args.
|
||||
args = [self.debuggerInfo.path] + self.debuggerInfo.args + args
|
||||
args = [debuggerInfo.path] + debuggerInfo.args + args
|
||||
|
||||
if dmd:
|
||||
dmd_params = []
|
||||
@ -1826,7 +1865,7 @@ process attach {continue_flag}-p {pid!s}
|
||||
else:
|
||||
extra_env["DMD"] = "1"
|
||||
|
||||
return self.run_process(
|
||||
return command_context.run_process(
|
||||
args=args, ensure_exit_code=False, pass_thru=True, append_env=extra_env
|
||||
)
|
||||
|
||||
@ -1841,7 +1880,7 @@ class Buildsymbols(MachCommandBase):
|
||||
description="Produce a package of Breakpad-format symbols.",
|
||||
)
|
||||
def buildsymbols(self, command_context):
|
||||
return self._run_make(
|
||||
return command_context._run_make(
|
||||
directory=".", target="buildsymbols", ensure_exit_code=False
|
||||
)
|
||||
|
||||
@ -1872,43 +1911,43 @@ class MachDebug(MachCommandBase):
|
||||
from mozbuild.util import FileAvoidWrite
|
||||
|
||||
with FileAvoidWrite(output) as out:
|
||||
return func(out, verbose)
|
||||
return func(sys.stdout, verbose)
|
||||
return func(command_context, out, verbose)
|
||||
return func(command_context, sys.stdout, verbose)
|
||||
|
||||
def _environment_pretty(self, out, verbose):
|
||||
state_dir = self._mach_context.state_dir
|
||||
def _environment_pretty(self, command_context, out, verbose):
|
||||
state_dir = command_context._mach_context.state_dir
|
||||
|
||||
print("platform:\n\t%s" % platform.platform(), file=out)
|
||||
print("python version:\n\t%s" % sys.version, file=out)
|
||||
print("python prefix:\n\t%s" % sys.prefix, file=out)
|
||||
print("mach cwd:\n\t%s" % self._mach_context.cwd, file=out)
|
||||
print("mach cwd:\n\t%s" % command_context._mach_context.cwd, file=out)
|
||||
print("os cwd:\n\t%s" % os.getcwd(), file=out)
|
||||
print("mach directory:\n\t%s" % self._mach_context.topdir, file=out)
|
||||
print("mach directory:\n\t%s" % command_context._mach_context.topdir, file=out)
|
||||
print("state directory:\n\t%s" % state_dir, file=out)
|
||||
|
||||
print("object directory:\n\t%s" % self.topobjdir, file=out)
|
||||
print("object directory:\n\t%s" % command_context.topobjdir, file=out)
|
||||
|
||||
if self.mozconfig["path"]:
|
||||
print("mozconfig path:\n\t%s" % self.mozconfig["path"], file=out)
|
||||
if self.mozconfig["configure_args"]:
|
||||
if command_context.mozconfig["path"]:
|
||||
print("mozconfig path:\n\t%s" % command_context.mozconfig["path"], file=out)
|
||||
if command_context.mozconfig["configure_args"]:
|
||||
print("mozconfig configure args:", file=out)
|
||||
for arg in self.mozconfig["configure_args"]:
|
||||
for arg in command_context.mozconfig["configure_args"]:
|
||||
print("\t%s" % arg, file=out)
|
||||
|
||||
if self.mozconfig["make_extra"]:
|
||||
if command_context.mozconfig["make_extra"]:
|
||||
print("mozconfig extra make args:", file=out)
|
||||
for arg in self.mozconfig["make_extra"]:
|
||||
for arg in command_context.mozconfig["make_extra"]:
|
||||
print("\t%s" % arg, file=out)
|
||||
|
||||
if self.mozconfig["make_flags"]:
|
||||
if command_context.mozconfig["make_flags"]:
|
||||
print("mozconfig make flags:", file=out)
|
||||
for arg in self.mozconfig["make_flags"]:
|
||||
for arg in command_context.mozconfig["make_flags"]:
|
||||
print("\t%s" % arg, file=out)
|
||||
|
||||
config = None
|
||||
|
||||
try:
|
||||
config = self.config_environment
|
||||
config = command_context.config_environment
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
@ -1926,7 +1965,7 @@ class MachDebug(MachCommandBase):
|
||||
for k in sorted(config.defines):
|
||||
print("\t%s" % k, file=out)
|
||||
|
||||
def _environment_json(self, out, verbose):
|
||||
def _environment_json(self, command_context, out, verbose):
|
||||
import json
|
||||
|
||||
class EnvironmentEncoder(json.JSONEncoder):
|
||||
@ -1945,7 +1984,7 @@ class MachDebug(MachCommandBase):
|
||||
return list(obj)
|
||||
return json.JSONEncoder.default(self, obj)
|
||||
|
||||
json.dump(self, cls=EnvironmentEncoder, sort_keys=True, fp=out)
|
||||
json.dump(command_context, cls=EnvironmentEncoder, sort_keys=True, fp=out)
|
||||
|
||||
|
||||
@CommandProvider
|
||||
@ -1975,7 +2014,7 @@ class Repackage(MachCommandBase):
|
||||
print("Input file does not exist: %s" % input)
|
||||
return 1
|
||||
|
||||
if not os.path.exists(os.path.join(self.topobjdir, "config.status")):
|
||||
if not os.path.exists(os.path.join(command_context.topobjdir, "config.status")):
|
||||
print(
|
||||
"config.status not found. Please run |mach configure| "
|
||||
"prior to |mach repackage|."
|
||||
@ -2037,7 +2076,7 @@ class Repackage(MachCommandBase):
|
||||
from mozbuild.repackaging.installer import repackage_installer
|
||||
|
||||
repackage_installer(
|
||||
topsrcdir=self.topsrcdir,
|
||||
topsrcdir=command_context.topsrcdir,
|
||||
tag=tag,
|
||||
setupexe=setupexe,
|
||||
package=package,
|
||||
@ -2089,7 +2128,7 @@ class Repackage(MachCommandBase):
|
||||
from mozbuild.repackaging.msi import repackage_msi
|
||||
|
||||
repackage_msi(
|
||||
topsrcdir=self.topsrcdir,
|
||||
topsrcdir=command_context.topsrcdir,
|
||||
wsx=wsx,
|
||||
version=version,
|
||||
locale=locale,
|
||||
@ -2112,7 +2151,12 @@ class Repackage(MachCommandBase):
|
||||
from mozbuild.repackaging.mar import repackage_mar
|
||||
|
||||
repackage_mar(
|
||||
self.topsrcdir, input, mar, output, arch=arch, mar_channel_id=mar_channel_id
|
||||
command_context.topsrcdir,
|
||||
input,
|
||||
mar,
|
||||
output,
|
||||
arch=arch,
|
||||
mar_channel_id=mar_channel_id,
|
||||
)
|
||||
|
||||
|
||||
@ -2135,7 +2179,7 @@ class L10NCommands(MachCommandBase):
|
||||
"--verbose", action="store_true", help="Log informative status messages."
|
||||
)
|
||||
def package_l10n(self, command_context, verbose=False, locales=[]):
|
||||
if "RecursiveMake" not in self.substs["BUILD_BACKENDS"]:
|
||||
if "RecursiveMake" not in command_context.substs["BUILD_BACKENDS"]:
|
||||
print(
|
||||
"Artifact builds do not support localization. "
|
||||
"If you know what you are doing, you can use:\n"
|
||||
@ -2146,7 +2190,7 @@ class L10NCommands(MachCommandBase):
|
||||
return 1
|
||||
|
||||
if "en-US" not in locales:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.WARN,
|
||||
"package-multi-locale",
|
||||
{"locales": locales},
|
||||
@ -2165,7 +2209,7 @@ class L10NCommands(MachCommandBase):
|
||||
|
||||
for locale in locales:
|
||||
if locale == "en-US":
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"package-multi-locale",
|
||||
{"locale": locale},
|
||||
@ -2173,80 +2217,90 @@ class L10NCommands(MachCommandBase):
|
||||
)
|
||||
continue
|
||||
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"package-multi-locale",
|
||||
{"locale": locale},
|
||||
"Processing chrome Gecko resources for locale {locale}",
|
||||
)
|
||||
self.run_process(
|
||||
command_context.run_process(
|
||||
[
|
||||
mozpath.join(self.topsrcdir, "mach"),
|
||||
mozpath.join(command_context.topsrcdir, "mach"),
|
||||
"build",
|
||||
"chrome-{}".format(locale),
|
||||
],
|
||||
append_env=append_env,
|
||||
pass_thru=True,
|
||||
ensure_exit_code=True,
|
||||
cwd=mozpath.join(self.topsrcdir),
|
||||
cwd=mozpath.join(command_context.topsrcdir),
|
||||
)
|
||||
|
||||
if self.substs["MOZ_BUILD_APP"] == "mobile/android":
|
||||
self.log(
|
||||
if command_context.substs["MOZ_BUILD_APP"] == "mobile/android":
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"package-multi-locale",
|
||||
{},
|
||||
"Invoking `mach android assemble-app`",
|
||||
)
|
||||
self.run_process(
|
||||
[mozpath.join(self.topsrcdir, "mach"), "android", "assemble-app"],
|
||||
command_context.run_process(
|
||||
[
|
||||
mozpath.join(command_context.topsrcdir, "mach"),
|
||||
"android",
|
||||
"assemble-app",
|
||||
],
|
||||
append_env=append_env,
|
||||
pass_thru=True,
|
||||
ensure_exit_code=True,
|
||||
cwd=mozpath.join(self.topsrcdir),
|
||||
cwd=mozpath.join(command_context.topsrcdir),
|
||||
)
|
||||
|
||||
if self.substs["MOZ_BUILD_APP"] == "browser":
|
||||
self.log(logging.INFO, "package-multi-locale", {}, "Repackaging browser")
|
||||
self._run_make(
|
||||
directory=mozpath.join(self.topobjdir, "browser", "app"),
|
||||
if command_context.substs["MOZ_BUILD_APP"] == "browser":
|
||||
command_context.log(
|
||||
logging.INFO, "package-multi-locale", {}, "Repackaging browser"
|
||||
)
|
||||
command_context._run_make(
|
||||
directory=mozpath.join(command_context.topobjdir, "browser", "app"),
|
||||
target=["tools"],
|
||||
append_env=append_env,
|
||||
pass_thru=True,
|
||||
ensure_exit_code=True,
|
||||
)
|
||||
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"package-multi-locale",
|
||||
{},
|
||||
"Invoking multi-locale `mach package`",
|
||||
)
|
||||
target = ["package"]
|
||||
if self.substs["MOZ_BUILD_APP"] == "mobile/android":
|
||||
if command_context.substs["MOZ_BUILD_APP"] == "mobile/android":
|
||||
target.append("AB_CD=multi")
|
||||
|
||||
self._run_make(
|
||||
directory=self.topobjdir,
|
||||
command_context._run_make(
|
||||
directory=command_context.topobjdir,
|
||||
target=target,
|
||||
append_env=append_env,
|
||||
pass_thru=True,
|
||||
ensure_exit_code=True,
|
||||
)
|
||||
|
||||
if self.substs["MOZ_BUILD_APP"] == "mobile/android":
|
||||
self.log(
|
||||
if command_context.substs["MOZ_BUILD_APP"] == "mobile/android":
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"package-multi-locale",
|
||||
{},
|
||||
"Invoking `mach android archive-geckoview`",
|
||||
)
|
||||
self.run_process(
|
||||
[mozpath.join(self.topsrcdir, "mach"), "android", "archive-geckoview"],
|
||||
command_context.run_process(
|
||||
[
|
||||
mozpath.join(command_context.topsrcdir, "mach"),
|
||||
"android",
|
||||
"archive-geckoview",
|
||||
],
|
||||
append_env=append_env,
|
||||
pass_thru=True,
|
||||
ensure_exit_code=True,
|
||||
cwd=mozpath.join(self.topsrcdir),
|
||||
cwd=mozpath.join(command_context.topsrcdir),
|
||||
)
|
||||
|
||||
return 0
|
||||
@ -2303,10 +2357,12 @@ class CreateMachEnvironment(MachCommandBase):
|
||||
return 1
|
||||
|
||||
manager = VirtualenvManager(
|
||||
self.topsrcdir,
|
||||
command_context.topsrcdir,
|
||||
virtualenv_path,
|
||||
sys.stdout,
|
||||
os.path.join(self.topsrcdir, "build", "mach_virtualenv_packages.txt"),
|
||||
os.path.join(
|
||||
command_context.topsrcdir, "build", "mach_virtualenv_packages.txt"
|
||||
),
|
||||
populate_local_paths=False,
|
||||
)
|
||||
|
||||
@ -2319,7 +2375,9 @@ class CreateMachEnvironment(MachCommandBase):
|
||||
# `mach` can handle it perfectly fine if `psutil` is missing, so
|
||||
# there's no reason to freak out in this case.
|
||||
manager.install_pip_requirements(
|
||||
os.path.join(self.topsrcdir, "build", "psutil_requirements.txt")
|
||||
os.path.join(
|
||||
command_context.topsrcdir, "build", "psutil_requirements.txt"
|
||||
)
|
||||
)
|
||||
except subprocess.CalledProcessError:
|
||||
print(
|
||||
@ -2328,14 +2386,18 @@ class CreateMachEnvironment(MachCommandBase):
|
||||
)
|
||||
|
||||
manager.install_pip_requirements(
|
||||
os.path.join(self.topsrcdir, "build", "zstandard_requirements.txt")
|
||||
os.path.join(
|
||||
command_context.topsrcdir, "build", "zstandard_requirements.txt"
|
||||
)
|
||||
)
|
||||
|
||||
# This can fail on some platforms. See
|
||||
# https://bugzilla.mozilla.org/show_bug.cgi?id=1660120
|
||||
try:
|
||||
manager.install_pip_requirements(
|
||||
os.path.join(self.topsrcdir, "build", "glean_requirements.txt")
|
||||
os.path.join(
|
||||
command_context.topsrcdir, "build", "glean_requirements.txt"
|
||||
)
|
||||
)
|
||||
except subprocess.CalledProcessError:
|
||||
print(
|
||||
|
@ -39,25 +39,31 @@ class TestStaticAnalysis(unittest.TestCase):
|
||||
context.cwd = config.topsrcdir
|
||||
|
||||
cmd = StaticAnalysis(context)
|
||||
cmd.topsrcdir = os.path.join("/root", "dir")
|
||||
command_context = mock.MagicMock()
|
||||
command_context.topsrcdir = os.path.join("/root", "dir")
|
||||
path = os.path.join("/root", "dir", "path1")
|
||||
|
||||
ignored_dirs_re = r"path1|path2/here|path3\there"
|
||||
self.assertTrue(cmd._is_ignored_path(ignored_dirs_re, path) is not None)
|
||||
self.assertTrue(
|
||||
cmd._is_ignored_path(command_context, ignored_dirs_re, path) is not None
|
||||
)
|
||||
|
||||
# simulating a win32 env
|
||||
win32_path = "\\root\\dir\\path1"
|
||||
cmd.topsrcdir = "\\root\\dir"
|
||||
command_context.topsrcdir = "\\root\\dir"
|
||||
old_sep = os.sep
|
||||
os.sep = "\\"
|
||||
try:
|
||||
self.assertTrue(
|
||||
cmd._is_ignored_path(ignored_dirs_re, win32_path) is not None
|
||||
cmd._is_ignored_path(command_context, ignored_dirs_re, win32_path)
|
||||
is not None
|
||||
)
|
||||
finally:
|
||||
os.sep = old_sep
|
||||
|
||||
self.assertTrue(cmd._is_ignored_path(ignored_dirs_re, "path2") is None)
|
||||
self.assertTrue(
|
||||
cmd._is_ignored_path(command_context, ignored_dirs_re, "path2") is None
|
||||
)
|
||||
|
||||
def test_get_files(self):
|
||||
from mozbuild.code_analysis.mach_commands import StaticAnalysis
|
||||
@ -67,14 +73,17 @@ class TestStaticAnalysis(unittest.TestCase):
|
||||
context.cwd = config.topsrcdir
|
||||
|
||||
cmd = StaticAnalysis(context)
|
||||
cmd.topsrcdir = mozpath.join("/root", "dir")
|
||||
source = cmd.get_abspath_files(["file1", mozpath.join("directory", "file2")])
|
||||
command_context = mock.MagicMock()
|
||||
command_context.topsrcdir = mozpath.join("/root", "dir")
|
||||
source = cmd.get_abspath_files(
|
||||
command_context, ["file1", mozpath.join("directory", "file2")]
|
||||
)
|
||||
|
||||
self.assertTrue(
|
||||
source
|
||||
== [
|
||||
mozpath.join(cmd.topsrcdir, "file1"),
|
||||
mozpath.join(cmd.topsrcdir, "directory", "file2"),
|
||||
mozpath.join(command_context.topsrcdir, "file1"),
|
||||
mozpath.join(command_context.topsrcdir, "directory", "file2"),
|
||||
]
|
||||
)
|
||||
|
||||
|
18
python/mozbuild/mozbuild/vendor/mach_commands.py
vendored
18
python/mozbuild/mozbuild/vendor/mach_commands.py
vendored
@ -62,8 +62,8 @@ class Vendor(MachCommandBase):
|
||||
library = library[0]
|
||||
assert library not in ["rust", "python"]
|
||||
|
||||
self.populate_logger()
|
||||
self.log_manager.enable_unstructured()
|
||||
command_context.populate_logger()
|
||||
command_context.log_manager.enable_unstructured()
|
||||
if check_for_update:
|
||||
logging.disable()
|
||||
|
||||
@ -77,26 +77,26 @@ class Vendor(MachCommandBase):
|
||||
sys.exit(1)
|
||||
|
||||
if not ignore_modified and not check_for_update:
|
||||
self.check_modified_files()
|
||||
self.check_modified_files(command_context)
|
||||
if not revision:
|
||||
revision = "HEAD"
|
||||
|
||||
from mozbuild.vendor.vendor_manifest import VendorManifest
|
||||
|
||||
vendor_command = self._spawn(VendorManifest)
|
||||
vendor_command = command_context._spawn(VendorManifest)
|
||||
vendor_command.vendor(library, manifest, revision, check_for_update)
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
def check_modified_files(self):
|
||||
def check_modified_files(self, command_context):
|
||||
"""
|
||||
Ensure that there aren't any uncommitted changes to files
|
||||
in the working copy, since we're going to change some state
|
||||
on the user.
|
||||
"""
|
||||
modified = self.repository.get_changed_files("M")
|
||||
modified = command_context.repository.get_changed_files("M")
|
||||
if modified:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"modified_files",
|
||||
{},
|
||||
@ -137,7 +137,7 @@ Please commit or stash these changes before vendoring, or re-run with `--ignore-
|
||||
def vendor_rust(self, command_context, **kwargs):
|
||||
from mozbuild.vendor.vendor_rust import VendorRust
|
||||
|
||||
vendor_command = self._spawn(VendorRust)
|
||||
vendor_command = command_context._spawn(VendorRust)
|
||||
vendor_command.vendor(**kwargs)
|
||||
|
||||
# =====================================================================
|
||||
@ -168,5 +168,5 @@ Please commit or stash these changes before vendoring, or re-run with `--ignore-
|
||||
)
|
||||
return 1
|
||||
|
||||
vendor_command = self._spawn(VendorPython)
|
||||
vendor_command = command_context._spawn(VendorPython)
|
||||
vendor_command.vendor(**kwargs)
|
||||
|
@ -56,7 +56,7 @@ class Perftest(MachCommandBase):
|
||||
from moztest.resolve import TestResolver
|
||||
from mozperftest.fzf.fzf import select
|
||||
|
||||
resolver = self._spawn(TestResolver)
|
||||
resolver = command_context._spawn(TestResolver)
|
||||
test_objects = list(resolver.resolve_tests(paths=None, flavor="perftest"))
|
||||
selected = select(test_objects)
|
||||
|
||||
@ -64,7 +64,7 @@ class Perftest(MachCommandBase):
|
||||
__, script_name, __, location = selection.split(" ")
|
||||
return str(
|
||||
Path(
|
||||
self.topsrcdir.rstrip(os.sep),
|
||||
command_context.topsrcdir.rstrip(os.sep),
|
||||
location.strip(os.sep),
|
||||
script_name,
|
||||
)
|
||||
@ -102,7 +102,7 @@ class Perftest(MachCommandBase):
|
||||
|
||||
push_to_try = kwargs.pop("push_to_try", False)
|
||||
if push_to_try:
|
||||
sys.path.append(str(Path(self.topsrcdir, "tools", "tryselect")))
|
||||
sys.path.append(str(Path(command_context.topsrcdir, "tools", "tryselect")))
|
||||
|
||||
from tryselect.push import push_to_try
|
||||
|
||||
@ -126,8 +126,8 @@ class Perftest(MachCommandBase):
|
||||
)
|
||||
|
||||
def relative(path):
|
||||
if path.startswith(self.topsrcdir):
|
||||
return path[len(self.topsrcdir) :].lstrip(os.sep)
|
||||
if path.startswith(command_context.topsrcdir):
|
||||
return path[len(command_context.topsrcdir) :].lstrip(os.sep)
|
||||
return path
|
||||
|
||||
for name, value in args.items():
|
||||
@ -156,18 +156,14 @@ class Perftest(MachCommandBase):
|
||||
|
||||
from mozperftest.runner import run_tests
|
||||
|
||||
run_tests(self, kwargs, original_parser.get_user_args(kwargs))
|
||||
run_tests(command_context, kwargs, original_parser.get_user_args(kwargs))
|
||||
|
||||
print("\nFirefox. Fast For Good.\n")
|
||||
|
||||
|
||||
@CommandProvider
|
||||
class PerftestTests(MachCommandBase):
|
||||
@Command(
|
||||
"perftest-test",
|
||||
category="testing",
|
||||
description="Run perftest tests",
|
||||
)
|
||||
@Command("perftest-test", category="testing", description="Run perftest tests")
|
||||
@CommandArgument(
|
||||
"tests", default=None, nargs="*", help="Tests to run. By default will run all"
|
||||
)
|
||||
@ -179,14 +175,10 @@ class PerftestTests(MachCommandBase):
|
||||
help="Skip flake8 and black",
|
||||
)
|
||||
@CommandArgument(
|
||||
"-v",
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Verbose mode",
|
||||
"-v", "--verbose", action="store_true", default=False, help="Verbose mode"
|
||||
)
|
||||
def run_tests(self, command_context, **kwargs):
|
||||
MachCommandBase.activate_virtualenv(self)
|
||||
command_context.activate_virtualenv()
|
||||
|
||||
from pathlib import Path
|
||||
from mozperftest.utils import temporary_env
|
||||
@ -194,9 +186,9 @@ class PerftestTests(MachCommandBase):
|
||||
with temporary_env(
|
||||
COVERAGE_RCFILE=str(Path(HERE, ".coveragerc")), RUNNING_TESTS="YES"
|
||||
):
|
||||
self._run_tests(**kwargs)
|
||||
self._run_tests(command_context, **kwargs)
|
||||
|
||||
def _run_tests(self, **kwargs):
|
||||
def _run_tests(self, command_context, **kwargs):
|
||||
from pathlib import Path
|
||||
from mozperftest.runner import _setup_path
|
||||
from mozperftest.utils import (
|
||||
@ -206,7 +198,7 @@ class PerftestTests(MachCommandBase):
|
||||
checkout_python_script,
|
||||
)
|
||||
|
||||
venv = self.virtualenv_manager
|
||||
venv = command_context.virtualenv_manager
|
||||
skip_linters = kwargs.get("skip_linters", False)
|
||||
verbose = kwargs.get("verbose", False)
|
||||
|
||||
@ -215,14 +207,16 @@ class PerftestTests(MachCommandBase):
|
||||
try:
|
||||
import coverage # noqa
|
||||
except ImportError:
|
||||
pydeps = Path(self.topsrcdir, "third_party", "python")
|
||||
pydeps = Path(command_context.topsrcdir, "third_party", "python")
|
||||
vendors = ["coverage"]
|
||||
if not ON_TRY:
|
||||
vendors.append("attrs")
|
||||
|
||||
# pip-installing dependencies that require compilation or special setup
|
||||
for dep in vendors:
|
||||
install_package(self.virtualenv_manager, str(Path(pydeps, dep)))
|
||||
install_package(
|
||||
command_context.virtualenv_manager, str(Path(pydeps, dep))
|
||||
)
|
||||
|
||||
if not ON_TRY and not skip_linters:
|
||||
cmd = "./mach lint "
|
||||
@ -270,14 +264,7 @@ class PerftestTests(MachCommandBase):
|
||||
assert checkout_python_script(
|
||||
venv, "coverage", ["erase"], label="remove old coverage data"
|
||||
)
|
||||
args = [
|
||||
"run",
|
||||
pytest.__file__,
|
||||
options,
|
||||
"--duration",
|
||||
"10",
|
||||
tests,
|
||||
]
|
||||
args = ["run", pytest.__file__, options, "--duration", "10", tests]
|
||||
assert checkout_python_script(
|
||||
venv, "coverage", args, label="running tests", verbose=verbose
|
||||
)
|
||||
|
@ -10,12 +10,7 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||
import sys
|
||||
import logging
|
||||
|
||||
from mach.decorators import (
|
||||
CommandArgument,
|
||||
CommandProvider,
|
||||
Command,
|
||||
SubCommand,
|
||||
)
|
||||
from mach.decorators import CommandArgument, CommandProvider, Command, SubCommand
|
||||
|
||||
from mozbuild.base import MachCommandBase
|
||||
from mozilla_version.gecko import GeckoVersion
|
||||
@ -48,7 +43,7 @@ class MachCommands(MachCommandBase):
|
||||
@CommandArgument("--repo", help="The repo being built.")
|
||||
@CommandArgument("--revision", required=True, help="The revision being built.")
|
||||
def buglist(self, command_context, version, product, revision, repo):
|
||||
self.setup_logging()
|
||||
self.setup_logging(command_context)
|
||||
from mozrelease.buglist_creator import create_bugs_url
|
||||
|
||||
print(
|
||||
@ -85,7 +80,7 @@ class MachCommands(MachCommandBase):
|
||||
@CommandArgument("--build-number", required=True, help="The build number")
|
||||
@CommandArgument("--task-group-id", help="The task group of the build.")
|
||||
def buglist_email(self, command_context, **options):
|
||||
self.setup_logging()
|
||||
self.setup_logging(command_context)
|
||||
from mozrelease.buglist_creator import email_release_drivers
|
||||
|
||||
email_release_drivers(**options)
|
||||
@ -118,7 +113,7 @@ class MachCommands(MachCommandBase):
|
||||
def push_scriptworker_canary(
|
||||
self, command_context, scriptworkers, addresses, ssh_key_secret
|
||||
):
|
||||
self.setup_logging()
|
||||
self.setup_logging(command_context)
|
||||
from mozrelease.scriptworker_canary import push_canary
|
||||
|
||||
push_canary(
|
||||
@ -127,19 +122,19 @@ class MachCommands(MachCommandBase):
|
||||
ssh_key_secret=ssh_key_secret,
|
||||
)
|
||||
|
||||
def setup_logging(self, quiet=False, verbose=True):
|
||||
def setup_logging(self, command_context, quiet=False, verbose=True):
|
||||
"""
|
||||
Set up Python logging for all loggers, sending results to stderr (so
|
||||
that command output can be redirected easily) and adding the typical
|
||||
mach timestamp.
|
||||
"""
|
||||
# remove the old terminal handler
|
||||
old = self.log_manager.replace_terminal_handler(None)
|
||||
old = command_context.log_manager.replace_terminal_handler(None)
|
||||
|
||||
# re-add it, with level and fh set appropriately
|
||||
if not quiet:
|
||||
level = logging.DEBUG if verbose else logging.INFO
|
||||
self.log_manager.add_terminal_logging(
|
||||
command_context.log_manager.add_terminal_logging(
|
||||
fh=sys.stderr,
|
||||
level=level,
|
||||
write_interval=old.formatter.write_interval,
|
||||
@ -147,4 +142,4 @@ class MachCommands(MachCommandBase):
|
||||
)
|
||||
|
||||
# all of the taskgraph logging is unstructured logging
|
||||
self.log_manager.enable_unstructured()
|
||||
command_context.log_manager.enable_unstructured()
|
||||
|
@ -54,15 +54,15 @@ def setup():
|
||||
|
||||
@CommandProvider
|
||||
class RemoteCommands(MachCommandBase):
|
||||
def remotedir(self):
|
||||
return os.path.join(self.topsrcdir, "remote")
|
||||
def remotedir(self, command_context):
|
||||
return os.path.join(command_context.topsrcdir, "remote")
|
||||
|
||||
@Command(
|
||||
"remote", category="misc", description="Remote protocol related operations."
|
||||
)
|
||||
def remote(self, command_context):
|
||||
"""The remote subcommands all relate to the remote protocol."""
|
||||
self._sub_mach(["help", "remote"])
|
||||
command_context._sub_mach(["help", "remote"])
|
||||
return 1
|
||||
|
||||
@SubCommand(
|
||||
@ -88,11 +88,14 @@ class RemoteCommands(MachCommandBase):
|
||||
help="Do not install the just-pulled Puppeteer package,",
|
||||
)
|
||||
def vendor_puppeteer(self, command_context, repository, commitish, install):
|
||||
puppeteer_dir = os.path.join(self.remotedir(), "test", "puppeteer")
|
||||
puppeteer_dir = os.path.join(
|
||||
self.remotedir(command_context), "test", "puppeteer"
|
||||
)
|
||||
|
||||
# Preserve our custom mocha reporter
|
||||
shutil.move(
|
||||
os.path.join(puppeteer_dir, "json-mocha-reporter.js"), self.remotedir()
|
||||
os.path.join(puppeteer_dir, "json-mocha-reporter.js"),
|
||||
self.remotedir(command_context),
|
||||
)
|
||||
shutil.rmtree(puppeteer_dir, ignore_errors=True)
|
||||
os.makedirs(puppeteer_dir)
|
||||
@ -123,7 +126,8 @@ class RemoteCommands(MachCommandBase):
|
||||
shutil.rmtree(dir_path)
|
||||
|
||||
shutil.move(
|
||||
os.path.join(self.remotedir(), "json-mocha-reporter.js"), puppeteer_dir
|
||||
os.path.join(self.remotedir(command_context), "json-mocha-reporter.js"),
|
||||
puppeteer_dir,
|
||||
)
|
||||
|
||||
import yaml
|
||||
@ -153,7 +157,11 @@ class RemoteCommands(MachCommandBase):
|
||||
|
||||
if install:
|
||||
env = {"PUPPETEER_SKIP_DOWNLOAD": "1"}
|
||||
npm("install", cwd=os.path.join(self.topsrcdir, puppeteer_dir), env=env)
|
||||
npm(
|
||||
"install",
|
||||
cwd=os.path.join(command_context.topsrcdir, puppeteer_dir),
|
||||
env=env,
|
||||
)
|
||||
|
||||
|
||||
def git(*args, **kwargs):
|
||||
@ -602,8 +610,6 @@ class PuppeteerTest(MachCommandBase):
|
||||
**kwargs
|
||||
):
|
||||
|
||||
self.ci = ci
|
||||
|
||||
logger = mozlog.commandline.setup_logging(
|
||||
"puppeteer-test", kwargs, {"mach": sys.stdout}
|
||||
)
|
||||
@ -648,7 +654,7 @@ class PuppeteerTest(MachCommandBase):
|
||||
if verbosity > 2:
|
||||
prefs["remote.log.truncate"] = False
|
||||
|
||||
self.install_puppeteer(product)
|
||||
self.install_puppeteer(command_context, product, ci)
|
||||
|
||||
params = {
|
||||
"binary": binary,
|
||||
@ -660,7 +666,7 @@ class PuppeteerTest(MachCommandBase):
|
||||
"write_results": write_results,
|
||||
"subset": subset,
|
||||
}
|
||||
puppeteer = self._spawn(PuppeteerRunner)
|
||||
puppeteer = command_context._spawn(PuppeteerRunner)
|
||||
try:
|
||||
return puppeteer.run_test(logger, *tests, **params)
|
||||
except BinaryNotFoundException as e:
|
||||
@ -670,12 +676,12 @@ class PuppeteerTest(MachCommandBase):
|
||||
except Exception as e:
|
||||
exit(EX_SOFTWARE, e)
|
||||
|
||||
def install_puppeteer(self, product):
|
||||
def install_puppeteer(self, command_context, product, ci):
|
||||
setup()
|
||||
env = {}
|
||||
from mozversioncontrol import get_repository_object
|
||||
|
||||
repo = get_repository_object(self.topsrcdir)
|
||||
repo = get_repository_object(command_context.topsrcdir)
|
||||
puppeteer_dir = os.path.join("remote", "test", "puppeteer")
|
||||
changed_files = False
|
||||
for f in repo.get_changed_files():
|
||||
@ -685,13 +691,15 @@ class PuppeteerTest(MachCommandBase):
|
||||
|
||||
if product != "chrome":
|
||||
env["PUPPETEER_SKIP_DOWNLOAD"] = "1"
|
||||
lib_dir = os.path.join(self.topsrcdir, puppeteer_dir, "lib")
|
||||
lib_dir = os.path.join(command_context.topsrcdir, puppeteer_dir, "lib")
|
||||
if changed_files and os.path.isdir(lib_dir):
|
||||
# clobber lib to force `tsc compile` step
|
||||
shutil.rmtree(lib_dir)
|
||||
|
||||
command = "ci" if self.ci else "install"
|
||||
npm(command, cwd=os.path.join(self.topsrcdir, puppeteer_dir), env=env)
|
||||
command = "ci" if ci else "install"
|
||||
npm(
|
||||
command, cwd=os.path.join(command_context.topsrcdir, puppeteer_dir), env=env
|
||||
)
|
||||
|
||||
|
||||
def exit(code, error=None):
|
||||
|
@ -79,12 +79,12 @@ class MachCommands(MachCommandBase):
|
||||
def generate_test_certs(self, command_context, specifications):
|
||||
"""Generate test certificates and keys from specifications."""
|
||||
|
||||
self.activate_virtualenv()
|
||||
command_context.activate_virtualenv()
|
||||
import pycert
|
||||
import pykey
|
||||
|
||||
if not specifications:
|
||||
specifications = self.find_all_specifications()
|
||||
specifications = self.find_all_specifications(command_context)
|
||||
|
||||
for specification in specifications:
|
||||
if is_certspec_file(specification):
|
||||
@ -98,7 +98,7 @@ class MachCommands(MachCommandBase):
|
||||
run_module_main_on(module, os.path.abspath(specification))
|
||||
return 0
|
||||
|
||||
def find_all_specifications(self):
|
||||
def find_all_specifications(self, command_context):
|
||||
"""Searches the source tree for all specification files
|
||||
and returns them as a list."""
|
||||
specifications = []
|
||||
@ -109,11 +109,11 @@ class MachCommands(MachCommandBase):
|
||||
"testing/xpcshell/moz-http2",
|
||||
]
|
||||
exclusions = ["security/manager/ssl/tests/unit/test_signed_apps"]
|
||||
finder = FileFinder(self.topsrcdir)
|
||||
finder = FileFinder(command_context.topsrcdir)
|
||||
for inclusion_path in inclusions:
|
||||
for f, _ in finder.find(inclusion_path):
|
||||
if basedir(f, exclusions):
|
||||
continue
|
||||
if is_specification_file(f):
|
||||
specifications.append(os.path.join(self.topsrcdir, f))
|
||||
specifications.append(os.path.join(command_context.topsrcdir, f))
|
||||
return specifications
|
||||
|
@ -173,35 +173,35 @@ class MachCommands(MachCommandBase):
|
||||
"taskgraph", "tasks", description="Show all tasks in the taskgraph"
|
||||
)
|
||||
def taskgraph_tasks(self, command_context, **options):
|
||||
return self.show_taskgraph("full_task_set", options)
|
||||
return self.show_taskgraph(command_context, "full_task_set", options)
|
||||
|
||||
@ShowTaskGraphSubCommand("taskgraph", "full", description="Show the full taskgraph")
|
||||
def taskgraph_full(self, command_context, **options):
|
||||
return self.show_taskgraph("full_task_graph", options)
|
||||
return self.show_taskgraph(command_context, "full_task_graph", options)
|
||||
|
||||
@ShowTaskGraphSubCommand(
|
||||
"taskgraph", "target", description="Show the target task set"
|
||||
)
|
||||
def taskgraph_target(self, command_context, **options):
|
||||
return self.show_taskgraph("target_task_set", options)
|
||||
return self.show_taskgraph(command_context, "target_task_set", options)
|
||||
|
||||
@ShowTaskGraphSubCommand(
|
||||
"taskgraph", "target-graph", description="Show the target taskgraph"
|
||||
)
|
||||
def taskgraph_target_taskgraph(self, command_context, **options):
|
||||
return self.show_taskgraph("target_task_graph", options)
|
||||
return self.show_taskgraph(command_context, "target_task_graph", options)
|
||||
|
||||
@ShowTaskGraphSubCommand(
|
||||
"taskgraph", "optimized", description="Show the optimized taskgraph"
|
||||
)
|
||||
def taskgraph_optimized(self, command_context, **options):
|
||||
return self.show_taskgraph("optimized_task_graph", options)
|
||||
return self.show_taskgraph(command_context, "optimized_task_graph", options)
|
||||
|
||||
@ShowTaskGraphSubCommand(
|
||||
"taskgraph", "morphed", description="Show the morphed taskgraph"
|
||||
)
|
||||
def taskgraph_morphed(self, command_context, **options):
|
||||
return self.show_taskgraph("morphed_task_graph", options)
|
||||
return self.show_taskgraph(command_context, "morphed_task_graph", options)
|
||||
|
||||
@SubCommand("taskgraph", "actions", description="Write actions.json to stdout")
|
||||
@CommandArgument(
|
||||
@ -224,7 +224,7 @@ class MachCommands(MachCommandBase):
|
||||
"`taskcluster/docs/parameters.rst`)`",
|
||||
)
|
||||
def taskgraph_actions(self, command_context, **options):
|
||||
return self.show_actions(options)
|
||||
return self.show_actions(command_context, options)
|
||||
|
||||
@SubCommand("taskgraph", "decision", description="Run the decision task")
|
||||
@CommandArgument(
|
||||
@ -349,7 +349,7 @@ class MachCommands(MachCommandBase):
|
||||
import taskgraph.decision
|
||||
|
||||
try:
|
||||
self.setup_logging()
|
||||
self.setup_logging(command_context)
|
||||
start = time.monotonic()
|
||||
ret = taskgraph.decision.taskgraph_decision(options)
|
||||
end = time.monotonic()
|
||||
@ -403,7 +403,7 @@ class MachCommands(MachCommandBase):
|
||||
from taskgraph.actions.util import get_parameters
|
||||
|
||||
try:
|
||||
self.setup_logging()
|
||||
self.setup_logging(command_context)
|
||||
|
||||
# the target task for this action (or null if it's a group action)
|
||||
task_id = json.loads(os.environ.get("ACTION_TASK_ID", "null"))
|
||||
@ -471,7 +471,7 @@ class MachCommands(MachCommandBase):
|
||||
raise Exception("unknown filename {}".format(filename))
|
||||
|
||||
try:
|
||||
self.setup_logging()
|
||||
self.setup_logging(command_context)
|
||||
task_id = options["task_id"]
|
||||
|
||||
if options["input"]:
|
||||
@ -502,19 +502,19 @@ class MachCommands(MachCommandBase):
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
|
||||
def setup_logging(self, quiet=False, verbose=True):
|
||||
def setup_logging(self, command_context, quiet=False, verbose=True):
|
||||
"""
|
||||
Set up Python logging for all loggers, sending results to stderr (so
|
||||
that command output can be redirected easily) and adding the typical
|
||||
mach timestamp.
|
||||
"""
|
||||
# remove the old terminal handler
|
||||
old = self.log_manager.replace_terminal_handler(None)
|
||||
old = command_context.log_manager.replace_terminal_handler(None)
|
||||
|
||||
# re-add it, with level and fh set appropriately
|
||||
if not quiet:
|
||||
level = logging.DEBUG if verbose else logging.INFO
|
||||
self.log_manager.add_terminal_logging(
|
||||
command_context.log_manager.add_terminal_logging(
|
||||
fh=sys.stderr,
|
||||
level=level,
|
||||
write_interval=old.formatter.write_interval,
|
||||
@ -522,10 +522,12 @@ class MachCommands(MachCommandBase):
|
||||
)
|
||||
|
||||
# all of the taskgraph logging is unstructured logging
|
||||
self.log_manager.enable_unstructured()
|
||||
command_context.log_manager.enable_unstructured()
|
||||
|
||||
def show_taskgraph(self, graph_attr, options):
|
||||
self.setup_logging(quiet=options["quiet"], verbose=options["verbose"])
|
||||
def show_taskgraph(self, command_context, graph_attr, options):
|
||||
self.setup_logging(
|
||||
command_context, quiet=options["quiet"], verbose=options["verbose"]
|
||||
)
|
||||
vcs = None
|
||||
base_out = ""
|
||||
base_ref = None
|
||||
@ -534,7 +536,7 @@ class MachCommands(MachCommandBase):
|
||||
if options["diff"]:
|
||||
from mozversioncontrol import get_repository_object
|
||||
|
||||
vcs = get_repository_object(self.topsrcdir)
|
||||
vcs = get_repository_object(command_context.topsrcdir)
|
||||
with vcs:
|
||||
if not vcs.working_directory_clean():
|
||||
print("abort: can't diff taskgraph with dirty working directory")
|
||||
@ -571,7 +573,7 @@ class MachCommands(MachCommandBase):
|
||||
finally:
|
||||
vcs.update(cur_ref)
|
||||
|
||||
diffcmd = self._mach_context.settings["taskgraph"]["diffcmd"]
|
||||
diffcmd = command_context._mach_context.settings["taskgraph"]["diffcmd"]
|
||||
diffcmd = diffcmd.format(attr=graph_attr, base=base_ref, cur=cur_ref)
|
||||
|
||||
with tempfile.NamedTemporaryFile(mode="w") as base:
|
||||
@ -677,14 +679,16 @@ class MachCommands(MachCommandBase):
|
||||
)
|
||||
return filtered_taskgraph
|
||||
|
||||
def show_actions(self, options):
|
||||
def show_actions(self, command_context, options):
|
||||
import taskgraph
|
||||
import taskgraph.actions
|
||||
import taskgraph.generator
|
||||
import taskgraph.parameters
|
||||
|
||||
try:
|
||||
self.setup_logging(quiet=options["quiet"], verbose=options["verbose"])
|
||||
self.setup_logging(
|
||||
command_context, quiet=options["quiet"], verbose=options["verbose"]
|
||||
)
|
||||
parameters = taskgraph.parameters.parameters_loader(options["parameters"])
|
||||
|
||||
tgg = taskgraph.generator.TaskGraphGenerator(
|
||||
|
@ -44,7 +44,7 @@ class MachCommands(MachCommandBase):
|
||||
sys.path.append(AWSY_PATH)
|
||||
from awsy import ITERATIONS, PER_TAB_PAUSE, SETTLE_WAIT_TIME, MAX_TABS
|
||||
|
||||
def run_awsy(self, tests, binary=None, **kwargs):
|
||||
def run_awsy(self, command_context, tests, binary=None, **kwargs):
|
||||
import json
|
||||
from mozlog.structured import commandline
|
||||
|
||||
@ -52,7 +52,7 @@ class MachCommands(MachCommandBase):
|
||||
|
||||
parser = setup_awsy_argument_parser()
|
||||
|
||||
awsy_source_dir = os.path.join(self.topsrcdir, "testing", "awsy")
|
||||
awsy_source_dir = os.path.join(command_context.topsrcdir, "testing", "awsy")
|
||||
if not tests:
|
||||
tests = [os.path.join(awsy_source_dir, "awsy", "test_memory_usage.py")]
|
||||
|
||||
@ -88,7 +88,7 @@ class MachCommands(MachCommandBase):
|
||||
runtime_testvars[arg] = kwargs[arg]
|
||||
|
||||
if "webRootDir" not in runtime_testvars:
|
||||
awsy_tests_dir = os.path.join(self.topobjdir, "_tests", "awsy")
|
||||
awsy_tests_dir = os.path.join(command_context.topobjdir, "_tests", "awsy")
|
||||
web_root_dir = os.path.join(awsy_tests_dir, "html")
|
||||
runtime_testvars["webRootDir"] = web_root_dir
|
||||
else:
|
||||
@ -122,15 +122,16 @@ class MachCommands(MachCommandBase):
|
||||
tooltool_args = {
|
||||
"args": [
|
||||
sys.executable,
|
||||
os.path.join(self.topsrcdir, "mach"),
|
||||
os.path.join(command_context.topsrcdir, "mach"),
|
||||
"artifact",
|
||||
"toolchain",
|
||||
"-v",
|
||||
"--tooltool-manifest=%s" % manifest_file,
|
||||
"--cache-dir=%s" % os.path.join(self.topsrcdir, "tooltool-cache"),
|
||||
"--cache-dir=%s"
|
||||
% os.path.join(command_context.topsrcdir, "tooltool-cache"),
|
||||
]
|
||||
}
|
||||
self.run_process(cwd=page_load_test_dir, **tooltool_args)
|
||||
command_context.run_process(cwd=page_load_test_dir, **tooltool_args)
|
||||
tp5nzip = os.path.join(page_load_test_dir, "tp5n.zip")
|
||||
tp5nmanifest = os.path.join(page_load_test_dir, "tp5n", "tp5n.manifest")
|
||||
if not os.path.exists(tp5nmanifest):
|
||||
@ -138,7 +139,7 @@ class MachCommands(MachCommandBase):
|
||||
"args": ["unzip", "-q", "-o", tp5nzip, "-d", page_load_test_dir]
|
||||
}
|
||||
try:
|
||||
self.run_process(**unzip_args)
|
||||
command_context.run_process(**unzip_args)
|
||||
except Exception as exc:
|
||||
troubleshoot = ""
|
||||
if mozinfo.os == "win":
|
||||
@ -147,7 +148,7 @@ class MachCommands(MachCommandBase):
|
||||
"directory closer to the drive root."
|
||||
)
|
||||
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"awsy",
|
||||
{"directory": page_load_test_dir, "exception": exc},
|
||||
@ -172,7 +173,7 @@ class MachCommands(MachCommandBase):
|
||||
# Work around a startup crash with DMD on windows
|
||||
if mozinfo.os == "win":
|
||||
kwargs["pref"] = "security.sandbox.content.level:0"
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.WARNING,
|
||||
"awsy",
|
||||
{},
|
||||
@ -344,11 +345,13 @@ class MachCommands(MachCommandBase):
|
||||
tests.append(obj["file_relpath"])
|
||||
del kwargs["test_objects"]
|
||||
|
||||
if not kwargs.get("binary") and conditions.is_firefox(self):
|
||||
if not kwargs.get("binary") and conditions.is_firefox(command_context):
|
||||
try:
|
||||
kwargs["binary"] = self.get_binary_path("app")
|
||||
kwargs["binary"] = command_context.get_binary_path("app")
|
||||
except BinaryNotFoundException as e:
|
||||
self.log(logging.ERROR, "awsy", {"error": str(e)}, "ERROR: {error}")
|
||||
self.log(logging.INFO, "awsy", {"help": e.help()}, "{help}")
|
||||
command_context.log(
|
||||
logging.ERROR, "awsy", {"error": str(e)}, "ERROR: {error}"
|
||||
)
|
||||
command_context.log(logging.INFO, "awsy", {"help": e.help()}, "{help}")
|
||||
return 1
|
||||
return self.run_awsy(tests, **kwargs)
|
||||
return self.run_awsy(command_context, tests, **kwargs)
|
||||
|
@ -15,9 +15,9 @@ requirements = os.path.join(os.path.dirname(__file__), "requirements", "base.txt
|
||||
|
||||
@CommandProvider
|
||||
class CondprofileCommandProvider(MachCommandBase):
|
||||
def _init(self):
|
||||
self.activate_virtualenv()
|
||||
self.virtualenv_manager.install_pip_requirements(
|
||||
def _init(self, command_context):
|
||||
command_context.activate_virtualenv()
|
||||
command_context.virtualenv_manager.install_pip_requirements(
|
||||
requirements, require_hashes=False
|
||||
)
|
||||
|
||||
@ -45,7 +45,7 @@ class CondprofileCommandProvider(MachCommandBase):
|
||||
download_cache,
|
||||
repo,
|
||||
):
|
||||
self._init()
|
||||
self._init(command_context)
|
||||
from condprof.client import get_profile
|
||||
from condprof.util import get_current_platform
|
||||
|
||||
@ -91,19 +91,21 @@ class CondprofileCommandProvider(MachCommandBase):
|
||||
@CommandArgument("--device-name", help="Name of the device", type=str, default=None)
|
||||
def run(self, command_context, **kw):
|
||||
os.environ["MANUAL_MACH_RUN"] = "1"
|
||||
self._init()
|
||||
self._init(command_context)
|
||||
|
||||
if kw["firefox"] is None:
|
||||
try:
|
||||
kw["firefox"] = self.get_binary_path()
|
||||
kw["firefox"] = command_context.get_binary_path()
|
||||
except BinaryNotFoundException as e:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"run-condprofile",
|
||||
{"error": str(e)},
|
||||
"ERROR: {error}",
|
||||
)
|
||||
self.log(logging.INFO, "run-condprofile", {"help": e.help()}, "{help}")
|
||||
command_context.log(
|
||||
logging.INFO, "run-condprofile", {"help": e.help()}, "{help}"
|
||||
)
|
||||
return 1
|
||||
|
||||
from condprof.runner import run
|
||||
|
@ -100,19 +100,21 @@ class MachCommands(MachCommandBase):
|
||||
)
|
||||
def run_firefox_ui_functional(self, command_context, **kwargs):
|
||||
try:
|
||||
kwargs["binary"] = kwargs["binary"] or self.get_binary_path("app")
|
||||
kwargs["binary"] = kwargs["binary"] or command_context.get_binary_path(
|
||||
"app"
|
||||
)
|
||||
except BinaryNotFoundException as e:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"firefox-ui-functional",
|
||||
{"error": str(e)},
|
||||
"ERROR: {error}",
|
||||
)
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO, "firefox-ui-functional", {"help": e.help()}, "{help}"
|
||||
)
|
||||
return 1
|
||||
|
||||
return run_firefox_ui_test(
|
||||
testtype="functional", topsrcdir=self.topsrcdir, **kwargs
|
||||
testtype="functional", topsrcdir=command_context.topsrcdir, **kwargs
|
||||
)
|
||||
|
@ -57,10 +57,12 @@ class GeckoDriver(MachCommandBase):
|
||||
)
|
||||
def run(self, command_context, binary, params, debug, debugger, debugger_args):
|
||||
try:
|
||||
binpath = self.get_binary_path("geckodriver")
|
||||
binpath = command_context.get_binary_path("geckodriver")
|
||||
except BinaryNotFoundException as e:
|
||||
self.log(logging.ERROR, "geckodriver", {"error": str(e)}, "ERROR: {error}")
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR, "geckodriver", {"error": str(e)}, "ERROR: {error}"
|
||||
)
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"geckodriver",
|
||||
{},
|
||||
@ -78,19 +80,21 @@ class GeckoDriver(MachCommandBase):
|
||||
|
||||
if binary is None:
|
||||
try:
|
||||
binary = self.get_binary_path("app")
|
||||
binary = command_context.get_binary_path("app")
|
||||
except BinaryNotFoundException as e:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR, "geckodriver", {"error": str(e)}, "ERROR: {error}"
|
||||
)
|
||||
self.log(logging.INFO, "geckodriver", {"help": e.help()}, "{help}")
|
||||
command_context.log(
|
||||
logging.INFO, "geckodriver", {"help": e.help()}, "{help}"
|
||||
)
|
||||
return 1
|
||||
|
||||
args.extend(["--binary", binary])
|
||||
|
||||
if debug or debugger or debugger_args:
|
||||
if "INSIDE_EMACS" in os.environ:
|
||||
self.log_manager.terminal_handler.setLevel(logging.WARNING)
|
||||
command_context.log_manager.terminal_handler.setLevel(logging.WARNING)
|
||||
|
||||
import mozdebug
|
||||
|
||||
@ -102,8 +106,8 @@ class GeckoDriver(MachCommandBase):
|
||||
)
|
||||
|
||||
if debugger:
|
||||
self.debuggerInfo = mozdebug.get_debugger_info(debugger, debugger_args)
|
||||
if not self.debuggerInfo:
|
||||
debuggerInfo = mozdebug.get_debugger_info(debugger, debugger_args)
|
||||
if not debuggerInfo:
|
||||
print("Could not find a suitable debugger in your PATH.")
|
||||
return 1
|
||||
|
||||
@ -122,6 +126,8 @@ class GeckoDriver(MachCommandBase):
|
||||
return 1
|
||||
|
||||
# Prepend the debugger args.
|
||||
args = [self.debuggerInfo.path] + self.debuggerInfo.args + args
|
||||
args = [debuggerInfo.path] + debuggerInfo.args + args
|
||||
|
||||
return self.run_process(args=args, ensure_exit_code=False, pass_thru=True)
|
||||
return command_context.run_process(
|
||||
args=args, ensure_exit_code=False, pass_thru=True
|
||||
)
|
||||
|
@ -128,6 +128,6 @@ class GtestCommands(MachCommandBase):
|
||||
parser=setup_argument_parser,
|
||||
)
|
||||
def gtest(self, command_context, **kwargs):
|
||||
self._mach_context.activate_mozharness_venv()
|
||||
result = run_gtest(self._mach_context, **kwargs)
|
||||
command_context._mach_context.activate_mozharness_venv()
|
||||
result = run_gtest(command_context._mach_context, **kwargs)
|
||||
return 0 if result else 1
|
||||
|
@ -232,7 +232,7 @@ class AddTest(MachCommandBase):
|
||||
print("Sorry, `addtest` doesn't currently know how to add {}".format(suite))
|
||||
return 1
|
||||
|
||||
creator = creator_cls(self.topsrcdir, test, suite, doc, **kwargs)
|
||||
creator = creator_cls(command_context.topsrcdir, test, suite, doc, **kwargs)
|
||||
|
||||
creator.check_args()
|
||||
|
||||
@ -391,7 +391,7 @@ class Test(MachCommandBase):
|
||||
from mozlog.handlers import StreamHandler
|
||||
from moztest.resolve import get_suite_definition, TestResolver, TEST_SUITES
|
||||
|
||||
resolver = self._spawn(TestResolver)
|
||||
resolver = command_context._spawn(TestResolver)
|
||||
run_suites, run_tests = resolver.resolve_metadata(what)
|
||||
|
||||
if not run_suites and not run_tests:
|
||||
@ -412,12 +412,12 @@ class Test(MachCommandBase):
|
||||
extra_args = [extra_args_debugger_notation]
|
||||
|
||||
# Create shared logger
|
||||
format_args = {"level": self._mach_context.settings["test"]["level"]}
|
||||
format_args = {"level": command_context._mach_context.settings["test"]["level"]}
|
||||
if not run_suites and len(run_tests) == 1:
|
||||
format_args["verbose"] = True
|
||||
format_args["compact"] = False
|
||||
|
||||
default_format = self._mach_context.settings["test"]["format"]
|
||||
default_format = command_context._mach_context.settings["test"]["format"]
|
||||
log = setup_logging(
|
||||
"mach-test", log_args, {default_format: sys.stdout}, format_args
|
||||
)
|
||||
@ -433,8 +433,11 @@ class Test(MachCommandBase):
|
||||
kwargs.setdefault("subsuite", None)
|
||||
|
||||
if "mach_command" in suite:
|
||||
res = self._mach_context.commands.dispatch(
|
||||
suite["mach_command"], self._mach_context, argv=extra_args, **kwargs
|
||||
res = command_context._mach_context.commands.dispatch(
|
||||
suite["mach_command"],
|
||||
command_context._mach_context,
|
||||
argv=extra_args,
|
||||
**kwargs
|
||||
)
|
||||
if res:
|
||||
status = res
|
||||
@ -456,9 +459,9 @@ class Test(MachCommandBase):
|
||||
kwargs["log"] = log
|
||||
kwargs.setdefault("subsuite", None)
|
||||
|
||||
res = self._mach_context.commands.dispatch(
|
||||
res = command_context._mach_context.commands.dispatch(
|
||||
m["mach_command"],
|
||||
self._mach_context,
|
||||
command_context._mach_context,
|
||||
argv=extra_args,
|
||||
test_objects=tests,
|
||||
**kwargs
|
||||
@ -498,34 +501,38 @@ class MachCommands(MachCommandBase):
|
||||
log = commandline.setup_logging("cppunittest", {}, {"tbpl": sys.stdout})
|
||||
|
||||
# See if we have crash symbols
|
||||
symbols_path = os.path.join(self.distdir, "crashreporter-symbols")
|
||||
symbols_path = os.path.join(command_context.distdir, "crashreporter-symbols")
|
||||
if not os.path.isdir(symbols_path):
|
||||
symbols_path = None
|
||||
|
||||
# If no tests specified, run all tests in main manifest
|
||||
tests = params["test_files"]
|
||||
if not tests:
|
||||
tests = [os.path.join(self.distdir, "cppunittests")]
|
||||
manifest_path = os.path.join(self.topsrcdir, "testing", "cppunittest.ini")
|
||||
tests = [os.path.join(command_context.distdir, "cppunittests")]
|
||||
manifest_path = os.path.join(
|
||||
command_context.topsrcdir, "testing", "cppunittest.ini"
|
||||
)
|
||||
else:
|
||||
manifest_path = None
|
||||
|
||||
utility_path = self.bindir
|
||||
utility_path = command_context.bindir
|
||||
|
||||
if conditions.is_android(self):
|
||||
if conditions.is_android(command_context):
|
||||
from mozrunner.devices.android_device import (
|
||||
verify_android_device,
|
||||
InstallIntent,
|
||||
)
|
||||
|
||||
verify_android_device(self, install=InstallIntent.NO)
|
||||
verify_android_device(command_context, install=InstallIntent.NO)
|
||||
return self.run_android_test(tests, symbols_path, manifest_path, log)
|
||||
|
||||
return self.run_desktop_test(
|
||||
tests, symbols_path, manifest_path, utility_path, log
|
||||
)
|
||||
|
||||
def run_desktop_test(self, tests, symbols_path, manifest_path, utility_path, log):
|
||||
def run_desktop_test(
|
||||
self, command_context, tests, symbols_path, manifest_path, utility_path, log
|
||||
):
|
||||
import runcppunittests as cppunittests
|
||||
from mozlog import commandline
|
||||
|
||||
@ -536,7 +543,7 @@ class MachCommands(MachCommandBase):
|
||||
options.symbols_path = symbols_path
|
||||
options.manifest_path = manifest_path
|
||||
options.utility_path = utility_path
|
||||
options.xre_path = self.bindir
|
||||
options.xre_path = command_context.bindir
|
||||
|
||||
try:
|
||||
result = cppunittests.run_test_harness(options, tests)
|
||||
@ -547,7 +554,9 @@ class MachCommands(MachCommandBase):
|
||||
|
||||
return 0 if result else 1
|
||||
|
||||
def run_android_test(self, tests, symbols_path, manifest_path, log):
|
||||
def run_android_test(
|
||||
self, command_context, tests, symbols_path, manifest_path, log
|
||||
):
|
||||
import remotecppunittests as remotecppunittests
|
||||
from mozlog import commandline
|
||||
|
||||
@ -558,14 +567,16 @@ class MachCommands(MachCommandBase):
|
||||
if not options.adb_path:
|
||||
from mozrunner.devices.android_device import get_adb_path
|
||||
|
||||
options.adb_path = get_adb_path(self)
|
||||
options.adb_path = get_adb_path(command_context)
|
||||
options.symbols_path = symbols_path
|
||||
options.manifest_path = manifest_path
|
||||
options.xre_path = self.bindir
|
||||
options.local_lib = self.bindir.replace("bin", "fennec")
|
||||
for file in os.listdir(os.path.join(self.topobjdir, "dist")):
|
||||
options.xre_path = command_context.bindir
|
||||
options.local_lib = command_context.bindir.replace("bin", "fennec")
|
||||
for file in os.listdir(os.path.join(command_context.topobjdir, "dist")):
|
||||
if file.endswith(".apk") and file.startswith("fennec"):
|
||||
options.local_apk = os.path.join(self.topobjdir, "dist", file)
|
||||
options.local_apk = os.path.join(
|
||||
command_context.topobjdir, "dist", file
|
||||
)
|
||||
log.info("using APK: " + options.local_apk)
|
||||
break
|
||||
|
||||
@ -599,13 +610,13 @@ class SpiderMonkeyTests(MachCommandBase):
|
||||
def run_jstests(self, command_context, shell, params):
|
||||
import subprocess
|
||||
|
||||
self.virtualenv_manager.ensure()
|
||||
python = self.virtualenv_manager.python_path
|
||||
command_context.virtualenv_manager.ensure()
|
||||
python = command_context.virtualenv_manager.python_path
|
||||
|
||||
js = shell or os.path.join(self.bindir, executable_name("js"))
|
||||
js = shell or os.path.join(command_context.bindir, executable_name("js"))
|
||||
jstest_cmd = [
|
||||
python,
|
||||
os.path.join(self.topsrcdir, "js", "src", "tests", "jstests.py"),
|
||||
os.path.join(command_context.topsrcdir, "js", "src", "tests", "jstests.py"),
|
||||
js,
|
||||
] + params
|
||||
|
||||
@ -632,13 +643,15 @@ class SpiderMonkeyTests(MachCommandBase):
|
||||
def run_jittests(self, command_context, shell, cgc, params):
|
||||
import subprocess
|
||||
|
||||
self.virtualenv_manager.ensure()
|
||||
python = self.virtualenv_manager.python_path
|
||||
command_context.virtualenv_manager.ensure()
|
||||
python = command_context.virtualenv_manager.python_path
|
||||
|
||||
js = shell or os.path.join(self.bindir, executable_name("js"))
|
||||
js = shell or os.path.join(command_context.bindir, executable_name("js"))
|
||||
jittest_cmd = [
|
||||
python,
|
||||
os.path.join(self.topsrcdir, "js", "src", "jit-test", "jit_test.py"),
|
||||
os.path.join(
|
||||
command_context.topsrcdir, "js", "src", "jit-test", "jit_test.py"
|
||||
),
|
||||
js,
|
||||
] + params
|
||||
|
||||
@ -661,24 +674,28 @@ class SpiderMonkeyTests(MachCommandBase):
|
||||
def run_jsapitests(self, command_context, test_name=None):
|
||||
import subprocess
|
||||
|
||||
jsapi_tests_cmd = [os.path.join(self.bindir, executable_name("jsapi-tests"))]
|
||||
jsapi_tests_cmd = [
|
||||
os.path.join(command_context.bindir, executable_name("jsapi-tests"))
|
||||
]
|
||||
if test_name:
|
||||
jsapi_tests_cmd.append(test_name)
|
||||
|
||||
test_env = os.environ.copy()
|
||||
test_env["TOPSRCDIR"] = self.topsrcdir
|
||||
test_env["TOPSRCDIR"] = command_context.topsrcdir
|
||||
|
||||
return subprocess.call(jsapi_tests_cmd, env=test_env)
|
||||
|
||||
def run_check_js_msg(self):
|
||||
def run_check_js_msg(self, command_context):
|
||||
import subprocess
|
||||
|
||||
self.virtualenv_manager.ensure()
|
||||
python = self.virtualenv_manager.python_path
|
||||
command_context.virtualenv_manager.ensure()
|
||||
python = command_context.virtualenv_manager.python_path
|
||||
|
||||
check_cmd = [
|
||||
python,
|
||||
os.path.join(self.topsrcdir, "config", "check_js_msg_encoding.py"),
|
||||
os.path.join(
|
||||
command_context.topsrcdir, "config", "check_js_msg_encoding.py"
|
||||
),
|
||||
]
|
||||
|
||||
return subprocess.call(check_cmd)
|
||||
@ -699,7 +716,7 @@ class JsShellTests(MachCommandBase):
|
||||
description="Run benchmarks in the SpiderMonkey JS shell.",
|
||||
)
|
||||
def run_jsshelltests(self, command_context, **kwargs):
|
||||
self.activate_virtualenv()
|
||||
command_context.activate_virtualenv()
|
||||
from jsshell import benchmark
|
||||
|
||||
return benchmark.run(**kwargs)
|
||||
@ -728,14 +745,14 @@ class CramTest(MachCommandBase):
|
||||
def cramtest(
|
||||
self, command_context, cram_args=None, test_paths=None, test_objects=None
|
||||
):
|
||||
self.activate_virtualenv()
|
||||
command_context.activate_virtualenv()
|
||||
import mozinfo
|
||||
from manifestparser import TestManifest
|
||||
|
||||
if test_objects is None:
|
||||
from moztest.resolve import TestResolver
|
||||
|
||||
resolver = self._spawn(TestResolver)
|
||||
resolver = command_context._spawn(TestResolver)
|
||||
if test_paths:
|
||||
# If we were given test paths, try to find tests matching them.
|
||||
test_objects = resolver.resolve_tests(paths=test_paths, flavor="cram")
|
||||
@ -745,16 +762,16 @@ class CramTest(MachCommandBase):
|
||||
|
||||
if not test_objects:
|
||||
message = "No tests were collected, check spelling of the test paths."
|
||||
self.log(logging.WARN, "cramtest", {}, message)
|
||||
command_context.log(logging.WARN, "cramtest", {}, message)
|
||||
return 1
|
||||
|
||||
mp = TestManifest()
|
||||
mp.tests.extend(test_objects)
|
||||
tests = mp.active_tests(disabled=False, **mozinfo.info)
|
||||
|
||||
python = self.virtualenv_manager.python_path
|
||||
python = command_context.virtualenv_manager.python_path
|
||||
cmd = [python, "-m", "cram"] + cram_args + [t["relpath"] for t in tests]
|
||||
return subprocess.call(cmd, cwd=self.topsrcdir)
|
||||
return subprocess.call(cmd, cwd=command_context.topsrcdir)
|
||||
|
||||
|
||||
@CommandProvider
|
||||
@ -898,10 +915,10 @@ class TestInfoCommand(MachCommandBase):
|
||||
from mozbuild.build_commands import Build
|
||||
|
||||
try:
|
||||
self.config_environment
|
||||
command_context.config_environment
|
||||
except BuildEnvironmentNotFoundException:
|
||||
print("Looks like configure has not run yet, running it now...")
|
||||
builder = Build(self._mach_context, None)
|
||||
builder = Build(command_context._mach_context, None)
|
||||
builder.configure(command_context)
|
||||
|
||||
ti = testinfo.TestInfoReport(verbose)
|
||||
@ -955,9 +972,9 @@ class RustTests(MachCommandBase):
|
||||
description="Run rust unit tests (via cargo test).",
|
||||
)
|
||||
def run_rusttests(self, command_context, **kwargs):
|
||||
return self._mach_context.commands.dispatch(
|
||||
return command_context._mach_context.commands.dispatch(
|
||||
"build",
|
||||
self._mach_context,
|
||||
command_context._mach_context,
|
||||
what=["pre-export", "export", "recurse_rusttests"],
|
||||
)
|
||||
|
||||
@ -973,7 +990,7 @@ class TestFluentMigration(MachCommandBase):
|
||||
def run_migration_tests(self, command_context, test_paths=None, **kwargs):
|
||||
if not test_paths:
|
||||
test_paths = []
|
||||
self.activate_virtualenv()
|
||||
command_context.activate_virtualenv()
|
||||
from test_fluent_migrations import fmt
|
||||
|
||||
rv = 0
|
||||
@ -982,7 +999,7 @@ class TestFluentMigration(MachCommandBase):
|
||||
try:
|
||||
context = fmt.inspect_migration(to_test)
|
||||
for issue in context["issues"]:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"fluent-migration-test",
|
||||
{
|
||||
@ -1000,14 +1017,14 @@ class TestFluentMigration(MachCommandBase):
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"fluent-migration-test",
|
||||
{"error": str(e), "file": to_test},
|
||||
"ERROR in {file}: {error}",
|
||||
)
|
||||
rv |= 1
|
||||
obj_dir = fmt.prepare_object_dir(self)
|
||||
obj_dir = fmt.prepare_object_dir(command_context)
|
||||
for context in with_context:
|
||||
rv |= fmt.test_migration(self, obj_dir, **context)
|
||||
rv |= fmt.test_migration(command_context, obj_dir, **context)
|
||||
return rv
|
||||
|
@ -80,33 +80,37 @@ class MarionetteTest(MachCommandBase):
|
||||
del kwargs["test_objects"]
|
||||
|
||||
if not tests:
|
||||
if conditions.is_thunderbird(self):
|
||||
if conditions.is_thunderbird(command_context):
|
||||
tests = [
|
||||
os.path.join(
|
||||
self.topsrcdir, "comm/testing/marionette/unit-tests.ini"
|
||||
command_context.topsrcdir,
|
||||
"comm/testing/marionette/unit-tests.ini",
|
||||
)
|
||||
]
|
||||
else:
|
||||
tests = [
|
||||
os.path.join(
|
||||
self.topsrcdir,
|
||||
command_context.topsrcdir,
|
||||
"testing/marionette/harness/marionette_harness/tests/unit-tests.ini",
|
||||
)
|
||||
]
|
||||
|
||||
if not kwargs.get("binary") and (
|
||||
conditions.is_firefox(self) or conditions.is_thunderbird(self)
|
||||
conditions.is_firefox(command_context)
|
||||
or conditions.is_thunderbird(command_context)
|
||||
):
|
||||
try:
|
||||
kwargs["binary"] = self.get_binary_path("app")
|
||||
kwargs["binary"] = command_context.get_binary_path("app")
|
||||
except BinaryNotFoundException as e:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"marionette-test",
|
||||
{"error": str(e)},
|
||||
"ERROR: {error}",
|
||||
)
|
||||
self.log(logging.INFO, "marionette-test", {"help": e.help()}, "{help}")
|
||||
command_context.log(
|
||||
logging.INFO, "marionette-test", {"help": e.help()}, "{help}"
|
||||
)
|
||||
return 1
|
||||
|
||||
return run_marionette(tests, topsrcdir=self.topsrcdir, **kwargs)
|
||||
return run_marionette(tests, topsrcdir=command_context.topsrcdir, **kwargs)
|
||||
|
@ -71,5 +71,5 @@ class MachCommands(MachCommandBase):
|
||||
parser=setup_marionette_argument_parser,
|
||||
)
|
||||
def run_marionette_test(self, command_context, **kwargs):
|
||||
self.context.activate_mozharness_venv()
|
||||
return run_marionette(self.context, **kwargs)
|
||||
command_context.context.activate_mozharness_venv()
|
||||
return run_marionette(command_context.context, **kwargs)
|
||||
|
@ -165,7 +165,7 @@ class MochitestRunner(MozbuildObject):
|
||||
from mozrunner.devices.android_device import get_adb_path
|
||||
|
||||
if not kwargs["adbPath"]:
|
||||
kwargs["adbPath"] = get_adb_path(self)
|
||||
kwargs["adbPath"] = get_adb_path(command_context)
|
||||
|
||||
options = Namespace(**kwargs)
|
||||
|
||||
@ -321,11 +321,11 @@ class MachCommands(MachCommandBase):
|
||||
|
||||
# TODO: This is only strictly necessary while mochitest is using Python
|
||||
# 2 and can be removed once the command is migrated to Python 3.
|
||||
self.activate_virtualenv()
|
||||
command_context.activate_virtualenv()
|
||||
|
||||
buildapp = None
|
||||
for app in SUPPORTED_APPS:
|
||||
if conditions.is_buildapp_in(self, apps=[app]):
|
||||
if conditions.is_buildapp_in(command_context, apps=[app]):
|
||||
buildapp = app
|
||||
break
|
||||
|
||||
@ -346,7 +346,7 @@ class MachCommands(MachCommandBase):
|
||||
|
||||
from mozbuild.controller.building import BuildDriver
|
||||
|
||||
self._ensure_state_subdir_exists(".")
|
||||
command_context._ensure_state_subdir_exists(".")
|
||||
|
||||
test_paths = kwargs["test_paths"]
|
||||
kwargs["test_paths"] = []
|
||||
@ -357,21 +357,23 @@ class MachCommands(MachCommandBase):
|
||||
if not mozdebug.get_debugger_info(kwargs.get("debugger")):
|
||||
sys.exit(1)
|
||||
|
||||
mochitest = self._spawn(MochitestRunner)
|
||||
mochitest = command_context._spawn(MochitestRunner)
|
||||
tests = []
|
||||
if resolve_tests:
|
||||
tests = mochitest.resolve_tests(
|
||||
test_paths, test_objects, cwd=self._mach_context.cwd
|
||||
test_paths, test_objects, cwd=command_context._mach_context.cwd
|
||||
)
|
||||
|
||||
if not kwargs.get("log"):
|
||||
# Create shared logger
|
||||
format_args = {"level": self._mach_context.settings["test"]["level"]}
|
||||
format_args = {
|
||||
"level": command_context._mach_context.settings["test"]["level"]
|
||||
}
|
||||
if len(tests) == 1:
|
||||
format_args["verbose"] = True
|
||||
format_args["compact"] = False
|
||||
|
||||
default_format = self._mach_context.settings["test"]["format"]
|
||||
default_format = command_context._mach_context.settings["test"]["format"]
|
||||
kwargs["log"] = setup_logging(
|
||||
"mach-mochitest", kwargs, {default_format: sys.stdout}, format_args
|
||||
)
|
||||
@ -379,7 +381,7 @@ class MachCommands(MachCommandBase):
|
||||
if isinstance(handler, StreamHandler):
|
||||
handler.formatter.inner.summary_on_shutdown = True
|
||||
|
||||
driver = self._spawn(BuildDriver)
|
||||
driver = command_context._spawn(BuildDriver)
|
||||
driver.install_tests()
|
||||
|
||||
subsuite = kwargs.get("subsuite")
|
||||
@ -423,12 +425,14 @@ class MachCommands(MachCommandBase):
|
||||
"websocketprocessbridge",
|
||||
"websocketprocessbridge_requirements_3.txt",
|
||||
)
|
||||
self.virtualenv_manager.activate()
|
||||
self.virtualenv_manager.install_pip_requirements(req, require_hashes=False)
|
||||
command_context.virtualenv_manager.activate()
|
||||
command_context.virtualenv_manager.install_pip_requirements(
|
||||
req, require_hashes=False
|
||||
)
|
||||
|
||||
# sys.executable is used to start the websocketprocessbridge, though for some
|
||||
# reason it doesn't get set when calling `activate_this.py` in the virtualenv.
|
||||
sys.executable = self.virtualenv_manager.python_path
|
||||
sys.executable = command_context.virtualenv_manager.python_path
|
||||
|
||||
# This is a hack to introduce an option in mach to not send
|
||||
# filtered tests to the mochitest harness. Mochitest harness will read
|
||||
@ -480,7 +484,7 @@ class MachCommands(MachCommandBase):
|
||||
|
||||
# verify installation
|
||||
verify_android_device(
|
||||
self,
|
||||
command_context,
|
||||
install=install,
|
||||
xre=False,
|
||||
network=True,
|
||||
@ -504,7 +508,9 @@ class MachCommands(MachCommandBase):
|
||||
# specific mochitest suite has to be loaded. See Bug 1637463.
|
||||
harness_args.update({"suite_name": suite_name})
|
||||
|
||||
result = run_mochitest(self._mach_context, tests=tests, **harness_args)
|
||||
result = run_mochitest(
|
||||
command_context._mach_context, tests=tests, **harness_args
|
||||
)
|
||||
|
||||
if result:
|
||||
overall = result
|
||||
@ -537,7 +543,7 @@ class GeckoviewJunitCommands(MachCommandBase):
|
||||
default=False,
|
||||
)
|
||||
def run_junit(self, command_context, no_install, **kwargs):
|
||||
self._ensure_state_subdir_exists(".")
|
||||
command_context._ensure_state_subdir_exists(".")
|
||||
|
||||
from mozrunner.devices.android_device import (
|
||||
get_adb_path,
|
||||
@ -549,7 +555,7 @@ class GeckoviewJunitCommands(MachCommandBase):
|
||||
app = kwargs.get("app")
|
||||
device_serial = kwargs.get("deviceSerial")
|
||||
verify_android_device(
|
||||
self,
|
||||
command_context,
|
||||
install=InstallIntent.NO if no_install else InstallIntent.YES,
|
||||
xre=False,
|
||||
app=app,
|
||||
@ -557,16 +563,20 @@ class GeckoviewJunitCommands(MachCommandBase):
|
||||
)
|
||||
|
||||
if not kwargs.get("adbPath"):
|
||||
kwargs["adbPath"] = get_adb_path(self)
|
||||
kwargs["adbPath"] = get_adb_path(command_context)
|
||||
|
||||
if not kwargs.get("log"):
|
||||
from mozlog.commandline import setup_logging
|
||||
|
||||
format_args = {"level": self._mach_context.settings["test"]["level"]}
|
||||
default_format = self._mach_context.settings["test"]["format"]
|
||||
format_args = {
|
||||
"level": command_context._mach_context.settings["test"]["level"]
|
||||
}
|
||||
default_format = command_context._mach_context.settings["test"]["format"]
|
||||
kwargs["log"] = setup_logging(
|
||||
"mach-mochitest", kwargs, {default_format: sys.stdout}, format_args
|
||||
)
|
||||
|
||||
mochitest = self._spawn(MochitestRunner)
|
||||
return mochitest.run_geckoview_junit_test(self._mach_context, **kwargs)
|
||||
mochitest = command_context._spawn(MochitestRunner)
|
||||
return mochitest.run_geckoview_junit_test(
|
||||
command_context._mach_context, **kwargs
|
||||
)
|
||||
|
@ -202,8 +202,8 @@ class MochitestCommands(MachCommandBase):
|
||||
parser=setup_mochitest_argument_parser,
|
||||
)
|
||||
def mochitest(self, command_context, **kwargs):
|
||||
self._mach_context.activate_mozharness_venv()
|
||||
return run_test(self._mach_context, False, **kwargs)
|
||||
command_context._mach_context.activate_mozharness_venv()
|
||||
return run_test(command_context._mach_context, False, **kwargs)
|
||||
|
||||
@Command(
|
||||
"geckoview-junit",
|
||||
@ -212,5 +212,5 @@ class MochitestCommands(MachCommandBase):
|
||||
parser=setup_junit_argument_parser,
|
||||
)
|
||||
def geckoview_junit(self, command_context, **kwargs):
|
||||
self._mach_context.activate_mozharness_venv()
|
||||
return run_test(self._mach_context, True, **kwargs)
|
||||
command_context._mach_context.activate_mozharness_venv()
|
||||
return run_test(command_context._mach_context, True, **kwargs)
|
||||
|
@ -217,5 +217,5 @@ class MozharnessCommands(MachCommandBase):
|
||||
parser=get_parser,
|
||||
)
|
||||
def mozharness(self, command_context, **kwargs):
|
||||
runner = self._spawn(MozharnessRunner)
|
||||
runner = command_context._spawn(MozharnessRunner)
|
||||
return runner.run_suite(kwargs.pop("suite_name")[0], **kwargs)
|
||||
|
@ -323,7 +323,7 @@ class MachRaptor(MachCommandBase):
|
||||
# stop |mach bootstrap| from running
|
||||
from raptor.power import enable_charging, disable_charging
|
||||
|
||||
build_obj = self
|
||||
build_obj = command_context
|
||||
|
||||
is_android = (
|
||||
Conditions.is_android(build_obj) or kwargs["app"] in ANDROID_BROWSERS
|
||||
@ -369,7 +369,7 @@ class MachRaptor(MachCommandBase):
|
||||
if arg.startswith("raptor"):
|
||||
in_mach = False
|
||||
|
||||
raptor = self._spawn(RaptorRunner)
|
||||
raptor = command_context._spawn(RaptorRunner)
|
||||
device = None
|
||||
|
||||
try:
|
||||
@ -378,8 +378,10 @@ class MachRaptor(MachCommandBase):
|
||||
disable_charging(device)
|
||||
return raptor.run_test(argv, kwargs)
|
||||
except BinaryNotFoundException as e:
|
||||
self.log(logging.ERROR, "raptor", {"error": str(e)}, "ERROR: {error}")
|
||||
self.log(logging.INFO, "raptor", {"help": e.help()}, "{help}")
|
||||
command_context.log(
|
||||
logging.ERROR, "raptor", {"error": str(e)}, "ERROR: {error}"
|
||||
)
|
||||
command_context.log(logging.INFO, "raptor", {"help": e.help()}, "{help}")
|
||||
return 1
|
||||
except Exception as e:
|
||||
print(repr(e))
|
||||
|
@ -132,7 +132,7 @@ class MachCommands(MachCommandBase):
|
||||
parser=create_parser,
|
||||
)
|
||||
def run_talos_test(self, command_context, **kwargs):
|
||||
talos = self._spawn(TalosRunner)
|
||||
talos = command_context._spawn(TalosRunner)
|
||||
|
||||
try:
|
||||
return talos.run_test(sys.argv[2:])
|
||||
|
@ -19,10 +19,11 @@ class MachCommands(MachCommandBase):
|
||||
@CommandArgument("--dest", default=None, help="Where to write add-on.")
|
||||
def build(self, command_context, dest):
|
||||
src = os.path.join(
|
||||
self.topsrcdir, "services", "sync", "tps", "extensions", "tps"
|
||||
command_context.topsrcdir, "services", "sync", "tps", "extensions", "tps"
|
||||
)
|
||||
dest = os.path.join(
|
||||
dest or os.path.join(self.topobjdir, "services", "sync"), "tps.xpi"
|
||||
dest or os.path.join(command_context.topobjdir, "services", "sync"),
|
||||
"tps.xpi",
|
||||
)
|
||||
|
||||
if not os.path.exists(os.path.dirname(dest)):
|
||||
|
@ -470,8 +470,9 @@ def create_parser_testpaths():
|
||||
|
||||
@CommandProvider
|
||||
class MachCommands(MachCommandBase):
|
||||
def setup(self):
|
||||
self.activate_virtualenv()
|
||||
@staticmethod
|
||||
def setup(command_context):
|
||||
command_context.activate_virtualenv()
|
||||
|
||||
@Command(
|
||||
"web-platform-tests",
|
||||
@ -481,9 +482,9 @@ class MachCommands(MachCommandBase):
|
||||
parser=create_parser_wpt,
|
||||
)
|
||||
def run_web_platform_tests(self, command_context, **params):
|
||||
self.setup()
|
||||
self.setup(command_context)
|
||||
if params["product"] is None:
|
||||
if conditions.is_android(self):
|
||||
if conditions.is_android(command_context):
|
||||
params["product"] = "firefox_android"
|
||||
else:
|
||||
params["product"] = "firefox"
|
||||
@ -503,13 +504,16 @@ class MachCommands(MachCommandBase):
|
||||
if not mozdebug.get_debugger_info(params.get("debugger")):
|
||||
sys.exit(1)
|
||||
|
||||
wpt_setup = self._spawn(WebPlatformTestsRunnerSetup)
|
||||
wpt_setup._mach_context = self._mach_context
|
||||
wpt_setup = command_context._spawn(WebPlatformTestsRunnerSetup)
|
||||
wpt_setup._mach_context = command_context._mach_context
|
||||
wpt_runner = WebPlatformTestsRunner(wpt_setup)
|
||||
|
||||
logger = wpt_runner.setup_logging(**params)
|
||||
|
||||
if conditions.is_android(self) and params["product"] != "firefox_android":
|
||||
if (
|
||||
conditions.is_android(command_context)
|
||||
and params["product"] != "firefox_android"
|
||||
):
|
||||
logger.warning(
|
||||
"Must specify --product=firefox_android in Android environment."
|
||||
)
|
||||
@ -533,12 +537,12 @@ class MachCommands(MachCommandBase):
|
||||
parser=create_parser_update,
|
||||
)
|
||||
def update_web_platform_tests(self, command_context, **params):
|
||||
self.setup()
|
||||
self.virtualenv_manager.install_pip_package("html5lib==1.0.1")
|
||||
self.virtualenv_manager.install_pip_package("ujson")
|
||||
self.virtualenv_manager.install_pip_package("requests")
|
||||
self.setup(command_context)
|
||||
command_context.virtualenv_manager.install_pip_package("html5lib==1.0.1")
|
||||
command_context.virtualenv_manager.install_pip_package("ujson")
|
||||
command_context.virtualenv_manager.install_pip_package("requests")
|
||||
|
||||
wpt_updater = self._spawn(WebPlatformTestsUpdater)
|
||||
wpt_updater = command_context._spawn(WebPlatformTestsUpdater)
|
||||
logger = wpt_updater.setup_logging(**params)
|
||||
return wpt_updater.run_update(logger, **params)
|
||||
|
||||
@ -558,8 +562,8 @@ class MachCommands(MachCommandBase):
|
||||
parser=create_parser_manifest_update,
|
||||
)
|
||||
def wpt_manifest_update(self, command_context, **params):
|
||||
self.setup()
|
||||
wpt_setup = self._spawn(WebPlatformTestsRunnerSetup)
|
||||
self.setup(command_context)
|
||||
wpt_setup = command_context._spawn(WebPlatformTestsRunnerSetup)
|
||||
wpt_runner = WebPlatformTestsRunner(wpt_setup)
|
||||
logger = wpt_runner.setup_logging(**params)
|
||||
logger.warning(
|
||||
@ -575,12 +579,12 @@ class MachCommands(MachCommandBase):
|
||||
parser=create_parser_serve,
|
||||
)
|
||||
def wpt_serve(self, command_context, **params):
|
||||
self.setup()
|
||||
self.setup(command_context)
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger("web-platform-tests")
|
||||
logger.addHandler(logging.StreamHandler(sys.stdout))
|
||||
wpt_serve = self._spawn(WebPlatformTestsServeRunner)
|
||||
wpt_serve = command_context._spawn(WebPlatformTestsServeRunner)
|
||||
return wpt_serve.run(**params)
|
||||
|
||||
@Command(
|
||||
@ -592,7 +596,7 @@ class MachCommands(MachCommandBase):
|
||||
def wpt_summary(self, command_context, **params):
|
||||
import metasummary
|
||||
|
||||
wpt_setup = self._spawn(WebPlatformTestsRunnerSetup)
|
||||
wpt_setup = command_context._spawn(WebPlatformTestsRunnerSetup)
|
||||
return metasummary.run(wpt_setup.topsrcdir, wpt_setup.topobjdir, **params)
|
||||
|
||||
@Command(
|
||||
@ -612,9 +616,9 @@ class MachCommands(MachCommandBase):
|
||||
parser=create_parser_unittest,
|
||||
)
|
||||
def wpt_unittest(self, command_context, **params):
|
||||
self.setup()
|
||||
self.virtualenv_manager.install_pip_package("tox")
|
||||
runner = self._spawn(WebPlatformTestsUnittestRunner)
|
||||
self.setup(command_context)
|
||||
command_context.virtualenv_manager.install_pip_package("tox")
|
||||
runner = command_context._spawn(WebPlatformTestsUnittestRunner)
|
||||
return 0 if runner.run(**params) else 1
|
||||
|
||||
@Command(
|
||||
@ -624,7 +628,7 @@ class MachCommands(MachCommandBase):
|
||||
parser=create_parser_testpaths,
|
||||
)
|
||||
def wpt_test_paths(self, command_context, **params):
|
||||
runner = self._spawn(WebPlatformTestsTestPathsRunner)
|
||||
runner = command_context._spawn(WebPlatformTestsTestPathsRunner)
|
||||
runner.run(**params)
|
||||
return 0
|
||||
|
||||
@ -635,6 +639,6 @@ class MachCommands(MachCommandBase):
|
||||
parser=create_parser_fission_regressions,
|
||||
)
|
||||
def wpt_fission_regressions(self, command_context, **params):
|
||||
runner = self._spawn(WebPlatformTestsFissionRegressionsRunner)
|
||||
runner = command_context._spawn(WebPlatformTestsFissionRegressionsRunner)
|
||||
runner.run(**params)
|
||||
return 0
|
||||
|
@ -76,11 +76,11 @@ class WebPlatformTestsRunnerSetup(object):
|
||||
class MachCommands(MachCommandBase):
|
||||
@Command("web-platform-tests", category="testing", parser=create_parser_wpt)
|
||||
def run_web_platform_tests(self, command_context, **kwargs):
|
||||
self._mach_context.activate_mozharness_venv()
|
||||
command_context._mach_context.activate_mozharness_venv()
|
||||
return WebPlatformTestsRunner(
|
||||
WebPlatformTestsRunnerSetup(self._mach_context)
|
||||
WebPlatformTestsRunnerSetup(command_context._mach_context)
|
||||
).run(**kwargs)
|
||||
|
||||
@Command("wpt", category="testing", parser=create_parser_wpt)
|
||||
def run_wpt(self, command_context, **params):
|
||||
return self.run_web_platform_tests(**params)
|
||||
return command_context.run_web_platform_tests(**params)
|
||||
|
@ -238,18 +238,20 @@ class MachCommands(MachCommandBase):
|
||||
m.tests.extend(test_objects)
|
||||
params["manifest"] = m
|
||||
|
||||
driver = self._spawn(BuildDriver)
|
||||
driver = command_context._spawn(BuildDriver)
|
||||
driver.install_tests()
|
||||
|
||||
# We should probably have a utility function to ensure the tree is
|
||||
# ready to run tests. Until then, we just create the state dir (in
|
||||
# case the tree wasn't built with mach).
|
||||
self._ensure_state_subdir_exists(".")
|
||||
command_context._ensure_state_subdir_exists(".")
|
||||
|
||||
if not params.get("log"):
|
||||
log_defaults = {self._mach_context.settings["test"]["format"]: sys.stdout}
|
||||
log_defaults = {
|
||||
command_context._mach_context.settings["test"]["format"]: sys.stdout
|
||||
}
|
||||
fmt_defaults = {
|
||||
"level": self._mach_context.settings["test"]["level"],
|
||||
"level": command_context._mach_context.settings["test"]["level"],
|
||||
"verbose": True,
|
||||
}
|
||||
params["log"] = structured.commandline.setup_logging(
|
||||
@ -260,7 +262,10 @@ class MachCommands(MachCommandBase):
|
||||
# pylint --py3k W1619
|
||||
params["threadCount"] = int((cpu_count() * 3) / 2)
|
||||
|
||||
if conditions.is_android(self) or self.substs.get("MOZ_BUILD_APP") == "b2g":
|
||||
if (
|
||||
conditions.is_android(command_context)
|
||||
or command_context.substs.get("MOZ_BUILD_APP") == "b2g"
|
||||
):
|
||||
from mozrunner.devices.android_device import (
|
||||
verify_android_device,
|
||||
get_adb_path,
|
||||
@ -270,14 +275,17 @@ class MachCommands(MachCommandBase):
|
||||
install = InstallIntent.YES if params["setup"] else InstallIntent.NO
|
||||
device_serial = params.get("deviceSerial")
|
||||
verify_android_device(
|
||||
self, network=True, install=install, device_serial=device_serial
|
||||
command_context,
|
||||
network=True,
|
||||
install=install,
|
||||
device_serial=device_serial,
|
||||
)
|
||||
if not params["adbPath"]:
|
||||
params["adbPath"] = get_adb_path(self)
|
||||
xpcshell = self._spawn(AndroidXPCShellRunner)
|
||||
params["adbPath"] = get_adb_path(command_context)
|
||||
xpcshell = command_context._spawn(AndroidXPCShellRunner)
|
||||
else:
|
||||
xpcshell = self._spawn(XPCShellRunner)
|
||||
xpcshell.cwd = self._mach_context.cwd
|
||||
xpcshell = command_context._spawn(XPCShellRunner)
|
||||
xpcshell.cwd = command_context._mach_context.cwd
|
||||
|
||||
try:
|
||||
return xpcshell.run_test(**params)
|
||||
|
@ -61,5 +61,5 @@ class MochitestCommands(MachCommandBase):
|
||||
parser=parser_desktop,
|
||||
)
|
||||
def xpcshell(self, command_context, **kwargs):
|
||||
self._mach_context.activate_mozharness_venv()
|
||||
return run_xpcshell(self._mach_context, **kwargs)
|
||||
command_context._mach_context.activate_mozharness_venv()
|
||||
return run_xpcshell(command_context._mach_context, **kwargs)
|
||||
|
@ -80,17 +80,17 @@ class TelemetryTest(MachCommandBase):
|
||||
for obj in kwargs["test_objects"]:
|
||||
tests.append(obj["file_relpath"])
|
||||
del kwargs["test_objects"]
|
||||
if not kwargs.get("binary") and conditions.is_firefox(self):
|
||||
if not kwargs.get("binary") and conditions.is_firefox(command_context):
|
||||
try:
|
||||
kwargs["binary"] = self.get_binary_path("app")
|
||||
kwargs["binary"] = command_context.get_binary_path("app")
|
||||
except BinaryNotFoundException as e:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"telemetry-tests-client",
|
||||
{"error": str(e)},
|
||||
"ERROR: {error}",
|
||||
)
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO, "telemetry-tests-client", {"help": e.help()}, "{help}"
|
||||
)
|
||||
return 1
|
||||
@ -98,4 +98,4 @@ class TelemetryTest(MachCommandBase):
|
||||
kwargs[
|
||||
"server_root"
|
||||
] = "toolkit/components/telemetry/tests/marionette/harness/www"
|
||||
return run_telemetry(tests, topsrcdir=self.topsrcdir, **kwargs)
|
||||
return run_telemetry(tests, topsrcdir=command_context.topsrcdir, **kwargs)
|
||||
|
@ -155,17 +155,19 @@ host_fetches = {
|
||||
|
||||
@CommandProvider
|
||||
class MachBrowsertime(MachCommandBase):
|
||||
def artifact_cache_path(self):
|
||||
def artifact_cache_path(self, command_context):
|
||||
r"""Downloaded artifacts will be kept here."""
|
||||
# The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE.
|
||||
return mozpath.join(self._mach_context.state_dir, "cache", "browsertime")
|
||||
return mozpath.join(
|
||||
command_context._mach_context.state_dir, "cache", "browsertime"
|
||||
)
|
||||
|
||||
def state_path(self):
|
||||
def state_path(self, command_context):
|
||||
r"""Unpacked artifacts will be kept here."""
|
||||
# The convention is $MOZBUILD_STATE_PATH/$FEATURE.
|
||||
return mozpath.join(self._mach_context.state_dir, "browsertime")
|
||||
return mozpath.join(command_context._mach_context.state_dir, "browsertime")
|
||||
|
||||
def setup_prerequisites(self):
|
||||
def setup_prerequisites(self, command_context):
|
||||
r"""Install browsertime and visualmetrics.py prerequisites."""
|
||||
|
||||
from mozbuild.action.tooltool import unpack_file
|
||||
@ -191,7 +193,9 @@ class MachBrowsertime(MachCommandBase):
|
||||
|
||||
# Download the visualmetrics.py requirements.
|
||||
artifact_cache = ArtifactCache(
|
||||
self.artifact_cache_path(), log=self.log, skip_cache=False
|
||||
self.artifact_cache_path(command_context),
|
||||
log=command_context.log,
|
||||
skip_cache=False,
|
||||
)
|
||||
|
||||
fetches = host_fetches[host_platform()]
|
||||
@ -202,9 +206,9 @@ class MachBrowsertime(MachCommandBase):
|
||||
if fetch.get("unpack", True):
|
||||
cwd = os.getcwd()
|
||||
try:
|
||||
mkdir(self.state_path())
|
||||
os.chdir(self.state_path())
|
||||
self.log(
|
||||
mkdir(self.state_path(command_context))
|
||||
os.chdir(self.state_path(command_context))
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"browsertime",
|
||||
{"path": archive},
|
||||
@ -215,14 +219,20 @@ class MachBrowsertime(MachCommandBase):
|
||||
# Windows archive does not contain a subfolder
|
||||
# so we make one for it here
|
||||
mkdir(fetch.get("path"))
|
||||
os.chdir(os.path.join(self.state_path(), fetch.get("path")))
|
||||
os.chdir(
|
||||
os.path.join(
|
||||
self.state_path(command_context), fetch.get("path")
|
||||
)
|
||||
)
|
||||
unpack_file(archive)
|
||||
os.chdir(self.state_path())
|
||||
os.chdir(self.state_path(command_context))
|
||||
else:
|
||||
unpack_file(archive)
|
||||
|
||||
# Make sure the expected path exists after extraction
|
||||
path = os.path.join(self.state_path(), fetch.get("path"))
|
||||
path = os.path.join(
|
||||
self.state_path(command_context), fetch.get("path")
|
||||
)
|
||||
if not os.path.exists(path):
|
||||
raise Exception("Cannot find an extracted directory: %s" % path)
|
||||
|
||||
@ -246,19 +256,21 @@ class MachBrowsertime(MachCommandBase):
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
|
||||
def setup(self, should_clobber=False, new_upstream_url=""):
|
||||
def setup(self, command_context, should_clobber=False, new_upstream_url=""):
|
||||
r"""Install browsertime and visualmetrics.py prerequisites and the Node.js package."""
|
||||
|
||||
sys.path.append(mozpath.join(self.topsrcdir, "tools", "lint", "eslint"))
|
||||
sys.path.append(
|
||||
mozpath.join(command_context.topsrcdir, "tools", "lint", "eslint")
|
||||
)
|
||||
import setup_helper
|
||||
|
||||
if not new_upstream_url:
|
||||
self.setup_prerequisites()
|
||||
self.setup_prerequisites(command_context)
|
||||
|
||||
if new_upstream_url:
|
||||
package_json_path = os.path.join(BROWSERTIME_ROOT, "package.json")
|
||||
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"browsertime",
|
||||
{
|
||||
@ -299,7 +311,7 @@ class MachBrowsertime(MachCommandBase):
|
||||
os.environ["CHROMEDRIVER_SKIP_DOWNLOAD"] = "true"
|
||||
os.environ["GECKODRIVER_SKIP_DOWNLOAD"] = "true"
|
||||
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"browsertime",
|
||||
{"package_json": mozpath.join(BROWSERTIME_ROOT, "package.json")},
|
||||
@ -319,19 +331,19 @@ class MachBrowsertime(MachCommandBase):
|
||||
if new_upstream_url or AUTOMATION:
|
||||
return 0
|
||||
|
||||
return self.check()
|
||||
return self.check(command_context)
|
||||
|
||||
def node(self, args):
|
||||
def node(self, command_context, args):
|
||||
r"""Invoke node (interactively) with the given arguments."""
|
||||
return self.run_process(
|
||||
return command_context.run_process(
|
||||
[node_path()] + args,
|
||||
append_env=self.append_env(),
|
||||
append_env=self.append_env(command_context),
|
||||
pass_thru=True, # Allow user to run Node interactively.
|
||||
ensure_exit_code=False, # Don't throw on non-zero exit code.
|
||||
cwd=mozpath.join(self.topsrcdir),
|
||||
cwd=mozpath.join(command_context.topsrcdir),
|
||||
)
|
||||
|
||||
def append_env(self, append_path=True):
|
||||
def append_env(self, command_context, append_path=True):
|
||||
fetches = host_fetches[host_platform()]
|
||||
|
||||
# Ensure that bare `ffmpeg` and ImageMagick commands
|
||||
@ -339,12 +351,14 @@ class MachBrowsertime(MachCommandBase):
|
||||
# script doesn't take these as configuration, so we do this (for now).
|
||||
# We should update the script itself to accept this configuration.
|
||||
path = os.environ.get("PATH", "").split(os.pathsep) if append_path else []
|
||||
path_to_ffmpeg = mozpath.join(self.state_path(), fetches["ffmpeg"]["path"])
|
||||
path_to_ffmpeg = mozpath.join(
|
||||
self.state_path(command_context), fetches["ffmpeg"]["path"]
|
||||
)
|
||||
|
||||
path_to_imagemagick = None
|
||||
if "ImageMagick" in fetches:
|
||||
path_to_imagemagick = mozpath.join(
|
||||
self.state_path(), fetches["ImageMagick"]["path"]
|
||||
self.state_path(command_context), fetches["ImageMagick"]["path"]
|
||||
)
|
||||
|
||||
if path_to_imagemagick:
|
||||
@ -352,7 +366,7 @@ class MachBrowsertime(MachCommandBase):
|
||||
# want to ensure that our ffmpeg goes first, just in case.
|
||||
path.insert(
|
||||
0,
|
||||
self.state_path()
|
||||
self.state_path(command_context)
|
||||
if host_platform().startswith("win")
|
||||
else mozpath.join(path_to_imagemagick, "bin"),
|
||||
) # noqa
|
||||
@ -403,7 +417,7 @@ class MachBrowsertime(MachCommandBase):
|
||||
#
|
||||
# Our fork of browsertime supports a `PYTHON` environment variable
|
||||
# that points to the exact python executable to use.
|
||||
"PYTHON": self.virtualenv_manager.python_path,
|
||||
"PYTHON": command_context.virtualenv_manager.python_path,
|
||||
}
|
||||
|
||||
if path_to_imagemagick:
|
||||
@ -419,7 +433,7 @@ class MachBrowsertime(MachCommandBase):
|
||||
|
||||
return append_env
|
||||
|
||||
def _need_install(self, package):
|
||||
def _need_install(self, command_context, package):
|
||||
from pip._internal.req.constructors import install_req_from_line
|
||||
|
||||
req = install_req_from_line(package)
|
||||
@ -427,38 +441,41 @@ class MachBrowsertime(MachCommandBase):
|
||||
if req.satisfied_by is None:
|
||||
return True
|
||||
venv_site_lib = os.path.abspath(
|
||||
os.path.join(self.virtualenv_manager.bin_path, "..", "lib")
|
||||
os.path.join(command_context.virtualenv_manager.bin_path, "..", "lib")
|
||||
)
|
||||
site_packages = os.path.abspath(req.satisfied_by.location)
|
||||
return not site_packages.startswith(venv_site_lib)
|
||||
|
||||
def activate_virtualenv(self, *args, **kwargs):
|
||||
def activate_browsertime_virtualenv(self, command_context, *args, **kwargs):
|
||||
r"""Activates virtualenv.
|
||||
|
||||
This function will also install Pillow and pyssim if needed.
|
||||
It will raise an error in case the install failed.
|
||||
"""
|
||||
MachCommandBase.activate_virtualenv(self, *args, **kwargs)
|
||||
MachCommandBase.activate_virtualenv(command_context, *args, **kwargs)
|
||||
|
||||
# installing Python deps on the fly
|
||||
for dep in ("Pillow==%s" % PILLOW_VERSION, "pyssim==%s" % PYSSIM_VERSION):
|
||||
if self._need_install(dep):
|
||||
self.virtualenv_manager._run_pip(["install", dep])
|
||||
if self._need_install(command_context, dep):
|
||||
command_context.virtualenv_manager._run_pip(["install", dep])
|
||||
|
||||
def check(self):
|
||||
def check(self, command_context):
|
||||
r"""Run `visualmetrics.py --check`."""
|
||||
self.activate_virtualenv()
|
||||
command_context.activate_virtualenv()
|
||||
|
||||
args = ["--check"]
|
||||
status = self.run_process(
|
||||
[self.virtualenv_manager.python_path, visualmetrics_path()] + args,
|
||||
status = command_context.run_process(
|
||||
[command_context.virtualenv_manager.python_path, visualmetrics_path()]
|
||||
+ args,
|
||||
# For --check, don't allow user's path to interfere with
|
||||
# path testing except on Linux, where ImageMagick needs to
|
||||
# be installed manually.
|
||||
append_env=self.append_env(append_path=host_platform().startswith("linux")),
|
||||
append_env=self.append_env(
|
||||
command_context, append_path=host_platform().startswith("linux")
|
||||
),
|
||||
pass_thru=True,
|
||||
ensure_exit_code=False, # Don't throw on non-zero exit code.
|
||||
cwd=mozpath.join(self.topsrcdir),
|
||||
cwd=mozpath.join(command_context.topsrcdir),
|
||||
)
|
||||
|
||||
sys.stdout.flush()
|
||||
@ -468,15 +485,15 @@ class MachBrowsertime(MachCommandBase):
|
||||
return status
|
||||
|
||||
# Avoid logging the command (and, on Windows, the environment).
|
||||
self.log_manager.terminal_handler.setLevel(logging.CRITICAL)
|
||||
command_context.log_manager.terminal_handler.setLevel(logging.CRITICAL)
|
||||
print("browsertime version:", end=" ")
|
||||
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
|
||||
return self.node([browsertime_path()] + ["--version"])
|
||||
return self.node(command_context, [browsertime_path()] + ["--version"])
|
||||
|
||||
def extra_default_args(self, args=[]):
|
||||
def extra_default_args(self, command_context, args=[]):
|
||||
# Add Mozilla-specific default arguments. This is tricky because browsertime is quite
|
||||
# loose about arguments; repeat arguments are generally accepted but then produce
|
||||
# difficult to interpret type errors.
|
||||
@ -527,15 +544,17 @@ class MachBrowsertime(MachCommandBase):
|
||||
|
||||
if not specifies_binaryPath:
|
||||
try:
|
||||
extra_args.extend(("--firefox.binaryPath", self.get_binary_path()))
|
||||
extra_args.extend(
|
||||
("--firefox.binaryPath", command_context.get_binary_path())
|
||||
)
|
||||
except BinaryNotFoundException as e:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"browsertime",
|
||||
{"error": str(e)},
|
||||
"ERROR: {error}",
|
||||
)
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"browsertime",
|
||||
{},
|
||||
@ -545,7 +564,7 @@ class MachBrowsertime(MachCommandBase):
|
||||
return 1
|
||||
|
||||
if extra_args:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.DEBUG,
|
||||
"browsertime",
|
||||
{"extra_args": extra_args},
|
||||
@ -554,9 +573,11 @@ class MachBrowsertime(MachCommandBase):
|
||||
|
||||
return extra_args
|
||||
|
||||
def _verify_node_install(self):
|
||||
def _verify_node_install(self, command_context):
|
||||
# check if Node is installed
|
||||
sys.path.append(mozpath.join(self.topsrcdir, "tools", "lint", "eslint"))
|
||||
sys.path.append(
|
||||
mozpath.join(command_context.topsrcdir, "tools", "lint", "eslint")
|
||||
)
|
||||
import setup_helper
|
||||
|
||||
with silence():
|
||||
@ -614,13 +635,13 @@ class MachBrowsertime(MachCommandBase):
|
||||
check=False,
|
||||
browsertime_help=False,
|
||||
):
|
||||
self._set_log_level(verbose)
|
||||
command_context._set_log_level(verbose)
|
||||
|
||||
# Output a message before going further to make sure the
|
||||
# user knows that this tool is unsupported by the perftest
|
||||
# team and point them to our supported tools. Pause a bit to
|
||||
# make sure the user sees this message.
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"browsertime",
|
||||
{},
|
||||
@ -637,21 +658,21 @@ class MachBrowsertime(MachCommandBase):
|
||||
time.sleep(5)
|
||||
|
||||
if update_upstream_url:
|
||||
return self.setup(new_upstream_url=update_upstream_url)
|
||||
return self.setup(command_context, new_upstream_url=update_upstream_url)
|
||||
elif setup:
|
||||
return self.setup(should_clobber=clobber)
|
||||
return self.setup(command_context, should_clobber=clobber)
|
||||
else:
|
||||
if not self._verify_node_install():
|
||||
if not self._verify_node_install(command_context):
|
||||
return 1
|
||||
|
||||
if check:
|
||||
return self.check()
|
||||
return self.check(command_context)
|
||||
|
||||
if browsertime_help:
|
||||
args.append("--help")
|
||||
|
||||
self.activate_virtualenv()
|
||||
default_args = self.extra_default_args(args)
|
||||
self.activate_browsertime_virtualenv(command_context)
|
||||
default_args = self.extra_default_args(command_context, args)
|
||||
if default_args == 1:
|
||||
return 1
|
||||
return self.node([browsertime_path()] + default_args + args)
|
||||
return self.node(command_context, [browsertime_path()] + default_args + args)
|
||||
|
@ -71,25 +71,27 @@ class MachCommands(MachCommandBase):
|
||||
)
|
||||
def lint(self, command_context, *runargs, **lintargs):
|
||||
"""Run linters."""
|
||||
self.activate_virtualenv()
|
||||
command_context.activate_virtualenv()
|
||||
from mozlint import cli, parser
|
||||
|
||||
try:
|
||||
buildargs = {}
|
||||
buildargs["substs"] = copy.deepcopy(dict(self.substs))
|
||||
buildargs["defines"] = copy.deepcopy(dict(self.defines))
|
||||
buildargs["topobjdir"] = self.topobjdir
|
||||
buildargs["substs"] = copy.deepcopy(dict(command_context.substs))
|
||||
buildargs["defines"] = copy.deepcopy(dict(command_context.defines))
|
||||
buildargs["topobjdir"] = command_context.topobjdir
|
||||
lintargs.update(buildargs)
|
||||
except BuildEnvironmentNotFoundException:
|
||||
pass
|
||||
|
||||
lintargs.setdefault("root", self.topsrcdir)
|
||||
lintargs.setdefault("root", command_context.topsrcdir)
|
||||
lintargs["exclude"] = get_global_excludes(lintargs["root"])
|
||||
lintargs["config_paths"].insert(0, here)
|
||||
lintargs["virtualenv_bin_path"] = self.virtualenv_manager.bin_path
|
||||
lintargs["virtualenv_manager"] = self.virtualenv_manager
|
||||
lintargs["virtualenv_bin_path"] = command_context.virtualenv_manager.bin_path
|
||||
lintargs["virtualenv_manager"] = command_context.virtualenv_manager
|
||||
for path in EXCLUSION_FILES:
|
||||
parser.GLOBAL_SUPPORT_FILES.append(os.path.join(self.topsrcdir, path))
|
||||
parser.GLOBAL_SUPPORT_FILES.append(
|
||||
os.path.join(command_context.topsrcdir, path)
|
||||
)
|
||||
return cli.run(*runargs, **lintargs)
|
||||
|
||||
@Command(
|
||||
@ -125,9 +127,9 @@ class MachCommands(MachCommandBase):
|
||||
help="Extra args that will be forwarded to eslint.",
|
||||
)
|
||||
def eslint(self, command_context, paths, extra_args=[], **kwargs):
|
||||
self._mach_context.commands.dispatch(
|
||||
command_context._mach_context.commands.dispatch(
|
||||
"lint",
|
||||
self._mach_context,
|
||||
command_context._mach_context,
|
||||
linters=["eslint"],
|
||||
paths=paths,
|
||||
argv=extra_args,
|
||||
@ -158,6 +160,10 @@ class MachCommands(MachCommandBase):
|
||||
kwargs["linters"] = list(linters)
|
||||
|
||||
kwargs["fix"] = True
|
||||
self._mach_context.commands.dispatch(
|
||||
"lint", self._mach_context, paths=paths, argv=extra_args, **kwargs
|
||||
command_context._mach_context.commands.dispatch(
|
||||
"lint",
|
||||
command_context._mach_context,
|
||||
paths=paths,
|
||||
argv=extra_args,
|
||||
**kwargs
|
||||
)
|
||||
|
@ -77,7 +77,7 @@ class BustedProvider(MachCommandBase):
|
||||
|
||||
if (
|
||||
against != "general"
|
||||
and against not in self._mach_context.commands.command_handlers
|
||||
and against not in command_context._mach_context.commands.command_handlers
|
||||
):
|
||||
print(
|
||||
"%s is not a valid value for `against`. `against` must be "
|
||||
@ -95,10 +95,12 @@ class BustedProvider(MachCommandBase):
|
||||
|
||||
# Look up the file implementing that command, then cross-refernce
|
||||
# moz.build files to get the product/component.
|
||||
handler = self._mach_context.commands.command_handlers[against]
|
||||
handler = command_context._mach_context.commands.command_handlers[against]
|
||||
method = getattr(handler.cls, handler.method)
|
||||
sourcefile = mozpath.relpath(inspect.getsourcefile(method), self.topsrcdir)
|
||||
reader = self.mozbuild_reader(config_mode="empty")
|
||||
sourcefile = mozpath.relpath(
|
||||
inspect.getsourcefile(method), command_context.topsrcdir
|
||||
)
|
||||
reader = command_context.mozbuild_reader(config_mode="empty")
|
||||
try:
|
||||
res = reader.files_info([sourcefile])[sourcefile]["BUG_COMPONENT"]
|
||||
product, component = res.product, res.component
|
||||
@ -439,7 +441,7 @@ class MozregressionCommand(MachCommandBase):
|
||||
parser=mozregression_create_parser,
|
||||
)
|
||||
def run(self, command_context, **options):
|
||||
self.activate_virtualenv()
|
||||
command_context.activate_virtualenv()
|
||||
mozregression = PypiBasedTool("mozregression")
|
||||
mozregression.run(**options)
|
||||
|
||||
@ -456,11 +458,11 @@ class NodeCommands(MachCommandBase):
|
||||
from mozbuild.nodeutil import find_node_executable
|
||||
|
||||
# Avoid logging the command
|
||||
self.log_manager.terminal_handler.setLevel(logging.CRITICAL)
|
||||
command_context.log_manager.terminal_handler.setLevel(logging.CRITICAL)
|
||||
|
||||
node_path, _ = find_node_executable()
|
||||
|
||||
return self.run_process(
|
||||
return command_context.run_process(
|
||||
[node_path] + args,
|
||||
pass_thru=True, # Allow user to run Node interactively.
|
||||
ensure_exit_code=False, # Don't throw on non-zero exit code.
|
||||
@ -476,7 +478,7 @@ class NodeCommands(MachCommandBase):
|
||||
from mozbuild.nodeutil import find_npm_executable
|
||||
|
||||
# Avoid logging the command
|
||||
self.log_manager.terminal_handler.setLevel(logging.CRITICAL)
|
||||
command_context.log_manager.terminal_handler.setLevel(logging.CRITICAL)
|
||||
|
||||
import os
|
||||
|
||||
@ -492,7 +494,7 @@ class NodeCommands(MachCommandBase):
|
||||
path = os.path.abspath(os.path.dirname(npm_path))
|
||||
os.environ["PATH"] = "{}:{}".format(path, os.environ["PATH"])
|
||||
|
||||
return self.run_process(
|
||||
return command_context.run_process(
|
||||
[npm_path, "--scripts-prepend-node-path=auto"] + args,
|
||||
pass_thru=True, # Avoid eating npm output/error messages
|
||||
ensure_exit_code=False, # Don't throw on non-zero exit code.
|
||||
@ -522,18 +524,18 @@ class LogspamCommand(MachCommandBase):
|
||||
|
||||
@SubCommand("logspam", "report", parser=partial(logspam_create_parser, "report"))
|
||||
def report(self, command_context, **options):
|
||||
self.activate_virtualenv()
|
||||
command_context.activate_virtualenv()
|
||||
logspam = PypiBasedTool("logspam")
|
||||
logspam.run(command="report", **options)
|
||||
|
||||
@SubCommand("logspam", "bisect", parser=partial(logspam_create_parser, "bisect"))
|
||||
def bisect(self, command_context, **options):
|
||||
self.activate_virtualenv()
|
||||
command_context.activate_virtualenv()
|
||||
logspam = PypiBasedTool("logspam")
|
||||
logspam.run(command="bisect", **options)
|
||||
|
||||
@SubCommand("logspam", "file", parser=partial(logspam_create_parser, "file"))
|
||||
def create(self, command_context, **options):
|
||||
self.activate_virtualenv()
|
||||
command_context.activate_virtualenv()
|
||||
logspam = PypiBasedTool("logspam")
|
||||
logspam.run(command="file", **options)
|
||||
|
@ -116,10 +116,12 @@ class Documentation(MachCommandBase):
|
||||
):
|
||||
|
||||
# TODO: Bug 1704891 - move the ESLint setup tools to a shared place.
|
||||
sys.path.append(mozpath.join(self.topsrcdir, "tools", "lint", "eslint"))
|
||||
sys.path.append(
|
||||
mozpath.join(command_context.topsrcdir, "tools", "lint", "eslint")
|
||||
)
|
||||
import setup_helper
|
||||
|
||||
setup_helper.set_project_root(self.topsrcdir)
|
||||
setup_helper.set_project_root(command_context.topsrcdir)
|
||||
|
||||
if not setup_helper.check_node_executables_valid():
|
||||
return 1
|
||||
@ -129,15 +131,15 @@ class Documentation(MachCommandBase):
|
||||
# Set the path so that Sphinx can find jsdoc, unfortunately there isn't
|
||||
# a way to pass this to Sphinx itself at the moment.
|
||||
os.environ["PATH"] = (
|
||||
mozpath.join(self.topsrcdir, "node_modules", ".bin")
|
||||
mozpath.join(command_context.topsrcdir, "node_modules", ".bin")
|
||||
+ os.pathsep
|
||||
+ self._node_path()
|
||||
+ os.pathsep
|
||||
+ os.environ["PATH"]
|
||||
)
|
||||
|
||||
self.activate_virtualenv()
|
||||
self.virtualenv_manager.install_pip_requirements(
|
||||
command_context.activate_virtualenv()
|
||||
command_context.virtualenv_manager.install_pip_requirements(
|
||||
os.path.join(here, "requirements.txt")
|
||||
)
|
||||
|
||||
@ -147,10 +149,10 @@ class Documentation(MachCommandBase):
|
||||
|
||||
unique_id = "%s/%s" % (self.project(), str(uuid.uuid1()))
|
||||
|
||||
outdir = outdir or os.path.join(self.topobjdir, "docs")
|
||||
outdir = outdir or os.path.join(command_context.topobjdir, "docs")
|
||||
savedir = os.path.join(outdir, fmt)
|
||||
|
||||
path = path or self.topsrcdir
|
||||
path = path or command_context.topsrcdir
|
||||
path = os.path.normpath(os.path.abspath(path))
|
||||
|
||||
docdir = self._find_doc_dir(path)
|
||||
@ -393,7 +395,12 @@ class Documentation(MachCommandBase):
|
||||
for handler in Registrar.command_handlers.values()
|
||||
if handler.metrics_path is not None
|
||||
]
|
||||
args.extend([os.path.join(self.topsrcdir, path) for path in set(metrics_paths)])
|
||||
args.extend(
|
||||
[
|
||||
os.path.join(command_context.topsrcdir, path)
|
||||
for path in set(metrics_paths)
|
||||
]
|
||||
)
|
||||
subprocess.check_call(args)
|
||||
|
||||
|
||||
|
@ -31,7 +31,7 @@ class PhabricatorCommandProvider(MachCommandBase):
|
||||
|
||||
existing = mozfile.which("moz-phab")
|
||||
if existing and not force:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"already_installed",
|
||||
{},
|
||||
@ -44,7 +44,7 @@ class PhabricatorCommandProvider(MachCommandBase):
|
||||
# if pip3 is missing.
|
||||
pip3 = mozfile.which("pip3")
|
||||
if not pip3:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"pip3_not_installed",
|
||||
{},
|
||||
@ -74,7 +74,7 @@ class PhabricatorCommandProvider(MachCommandBase):
|
||||
|
||||
else:
|
||||
# Unsupported, default to --user.
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.WARNING,
|
||||
"unsupported_platform",
|
||||
{},
|
||||
@ -88,7 +88,7 @@ class PhabricatorCommandProvider(MachCommandBase):
|
||||
# installation if we're not within one.
|
||||
command.append("--user")
|
||||
|
||||
self.log(logging.INFO, "run", {}, "Installing moz-phab")
|
||||
command_context.log(logging.INFO, "run", {}, "Installing moz-phab")
|
||||
subprocess.run(command)
|
||||
|
||||
# There isn't an elegant way of determining the CLI location of a pip-installed package.
|
||||
@ -110,7 +110,7 @@ class PhabricatorCommandProvider(MachCommandBase):
|
||||
).findall(info)
|
||||
|
||||
if len(potential_cli_paths) != 1:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.WARNING,
|
||||
"no_mozphab_console_script",
|
||||
{},
|
||||
|
@ -47,7 +47,7 @@ class MachCommands(MachCommandBase):
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
rapl = os.path.join(self.topobjdir, "dist", "bin", "rapl")
|
||||
rapl = os.path.join(command_context.topobjdir, "dist", "bin", "rapl")
|
||||
|
||||
interval = str(interval)
|
||||
|
||||
|
@ -70,13 +70,13 @@ class TryConfig:
|
||||
|
||||
@CommandProvider
|
||||
class TrySelect(MachCommandBase):
|
||||
def init(self):
|
||||
def init(self, command_context):
|
||||
from tryselect import push
|
||||
|
||||
push.MAX_HISTORY = self._mach_context.settings["try"]["maxhistory"]
|
||||
push.MAX_HISTORY = command_context._mach_context.settings["try"]["maxhistory"]
|
||||
|
||||
@memoize
|
||||
def presets(self):
|
||||
def presets(self, command_context):
|
||||
from tryselect.preset import MergedHandler
|
||||
|
||||
# Create our handler using both local and in-tree presets. The first
|
||||
@ -88,12 +88,16 @@ class TrySelect(MachCommandBase):
|
||||
else:
|
||||
preset_paths = [
|
||||
os.path.join(get_state_dir(), "try_presets.yml"),
|
||||
os.path.join(self.topsrcdir, "tools", "tryselect", "try_presets.yml"),
|
||||
os.path.join(
|
||||
command_context.topsrcdir, "tools", "tryselect", "try_presets.yml"
|
||||
),
|
||||
]
|
||||
|
||||
return MergedHandler(*preset_paths)
|
||||
|
||||
def handle_presets(self, preset_action=None, save=None, preset=None, **kwargs):
|
||||
def handle_presets(
|
||||
self, command_context, preset_action=None, save=None, preset=None, **kwargs
|
||||
):
|
||||
"""Handle preset related arguments.
|
||||
|
||||
This logic lives here so that the underlying selectors don't need
|
||||
@ -102,23 +106,25 @@ class TrySelect(MachCommandBase):
|
||||
"""
|
||||
from tryselect.util.dicttools import merge
|
||||
|
||||
user_presets = self.presets().handlers[0]
|
||||
user_presets = self.presets(command_context).handlers[0]
|
||||
if preset_action == "list":
|
||||
self.presets().list()
|
||||
self.presets(command_context).list()
|
||||
sys.exit()
|
||||
|
||||
if preset_action == "edit":
|
||||
user_presets.edit()
|
||||
sys.exit()
|
||||
|
||||
parser = self._mach_context.handler.parser
|
||||
subcommand = self._mach_context.handler.subcommand
|
||||
parser = command_context._mach_context.handler.parser
|
||||
subcommand = command_context._mach_context.handler.subcommand
|
||||
if "preset" not in parser.common_groups:
|
||||
return kwargs
|
||||
|
||||
default = parser.get_default
|
||||
if save:
|
||||
selector = subcommand or self._mach_context.settings["try"]["default"]
|
||||
selector = (
|
||||
subcommand or command_context._mach_context.settings["try"]["default"]
|
||||
)
|
||||
|
||||
# Only save non-default values for simplicity.
|
||||
kwargs = {k: v for k, v in kwargs.items() if v != default(k)}
|
||||
@ -127,13 +133,13 @@ class TrySelect(MachCommandBase):
|
||||
sys.exit()
|
||||
|
||||
if preset:
|
||||
if preset not in self.presets():
|
||||
self._mach_context.parser.error(
|
||||
if preset not in self.presets(command_context):
|
||||
command_context._mach_context.parser.error(
|
||||
"preset '{}' does not exist".format(preset)
|
||||
)
|
||||
|
||||
name = preset
|
||||
preset = self.presets()[name]
|
||||
preset = self.presets(command_context)[name]
|
||||
selector = preset.pop("selector")
|
||||
preset.pop("description", None) # description isn't used by any selectors
|
||||
|
||||
@ -162,12 +168,12 @@ class TrySelect(MachCommandBase):
|
||||
|
||||
return kwargs
|
||||
|
||||
def handle_try_config(self, **kwargs):
|
||||
def handle_try_config(self, command_context, **kwargs):
|
||||
from tryselect.util.dicttools import merge
|
||||
|
||||
to_validate = []
|
||||
kwargs.setdefault("try_config", {})
|
||||
for cls in self._mach_context.handler.parser.task_configs.values():
|
||||
for cls in command_context._mach_context.handler.parser.task_configs.values():
|
||||
try_config = cls.try_config(**kwargs)
|
||||
if try_config is not None:
|
||||
to_validate.append(cls)
|
||||
@ -182,14 +188,16 @@ class TrySelect(MachCommandBase):
|
||||
cls.validate(**kwargs)
|
||||
return kwargs
|
||||
|
||||
def run(self, **kwargs):
|
||||
kwargs = self.handle_presets(**kwargs)
|
||||
def run(self, command_context, **kwargs):
|
||||
kwargs = self.handle_presets(command_context, **kwargs)
|
||||
|
||||
if self._mach_context.handler.parser.task_configs:
|
||||
kwargs = self.handle_try_config(**kwargs)
|
||||
if command_context._mach_context.handler.parser.task_configs:
|
||||
kwargs = self.handle_try_config(command_context, **kwargs)
|
||||
|
||||
mod = importlib.import_module(
|
||||
"tryselect.selectors.{}".format(self._mach_context.handler.subcommand)
|
||||
"tryselect.selectors.{}".format(
|
||||
command_context._mach_context.handler.subcommand
|
||||
)
|
||||
)
|
||||
return mod.run(**kwargs)
|
||||
|
||||
@ -211,22 +219,22 @@ class TrySelect(MachCommandBase):
|
||||
default. Run |mach try auto --help| for more information on
|
||||
scheduling with the `auto` selector.
|
||||
"""
|
||||
self.init()
|
||||
subcommand = self._mach_context.handler.subcommand
|
||||
self.init(command_context)
|
||||
subcommand = command_context._mach_context.handler.subcommand
|
||||
# We do special handling of presets here so that `./mach try --preset foo`
|
||||
# works no matter what subcommand 'foo' was saved with.
|
||||
preset = kwargs["preset"]
|
||||
if preset:
|
||||
if preset not in self.presets():
|
||||
self._mach_context.handler.parser.error(
|
||||
if preset not in self.presets(command_context):
|
||||
command_context._mach_context.handler.parser.error(
|
||||
"preset '{}' does not exist".format(preset)
|
||||
)
|
||||
|
||||
subcommand = self.presets()[preset]["selector"]
|
||||
subcommand = self.presets(command_context)[preset]["selector"]
|
||||
|
||||
sub = subcommand or self._mach_context.settings["try"]["default"]
|
||||
return self._mach_context.commands.dispatch(
|
||||
"try", self._mach_context, subcommand=sub, argv=argv, **kwargs
|
||||
sub = subcommand or command_context._mach_context.settings["try"]["default"]
|
||||
return command_context._mach_context.commands.dispatch(
|
||||
"try", command_context._mach_context, subcommand=sub, argv=argv, **kwargs
|
||||
)
|
||||
|
||||
@SubCommand(
|
||||
@ -310,7 +318,7 @@ class TrySelect(MachCommandBase):
|
||||
For more detailed documentation, please see:
|
||||
https://firefox-source-docs.mozilla.org/tools/try/selectors/fuzzy.html
|
||||
"""
|
||||
self.init()
|
||||
self.init(command_context)
|
||||
if kwargs.pop("interactive"):
|
||||
kwargs["query"].append("INTERACTIVE")
|
||||
|
||||
@ -324,14 +332,14 @@ class TrySelect(MachCommandBase):
|
||||
kwargs_copy = kwargs.copy()
|
||||
kwargs_copy["push"] = False
|
||||
kwargs_copy["save"] = None
|
||||
kwargs["query"] = self.run(save_query=True, **kwargs_copy)
|
||||
kwargs["query"] = self.run(command_context, save_query=True, **kwargs_copy)
|
||||
if not kwargs["query"]:
|
||||
return
|
||||
|
||||
if kwargs.get("paths"):
|
||||
kwargs["test_paths"] = kwargs["paths"]
|
||||
|
||||
return self.run(**kwargs)
|
||||
return self.run(command_context, **kwargs)
|
||||
|
||||
@SubCommand(
|
||||
"try",
|
||||
@ -347,14 +355,14 @@ class TrySelect(MachCommandBase):
|
||||
has been made, pressing the 'Push' button will automatically push the
|
||||
selection to try.
|
||||
"""
|
||||
self.init()
|
||||
self.activate_virtualenv()
|
||||
self.init(command_context)
|
||||
command_context.activate_virtualenv()
|
||||
path = os.path.join(
|
||||
"tools", "tryselect", "selectors", "chooser", "requirements.txt"
|
||||
)
|
||||
self.virtualenv_manager.install_pip_requirements(path, quiet=True)
|
||||
command_context.virtualenv_manager.install_pip_requirements(path, quiet=True)
|
||||
|
||||
return self.run(**kwargs)
|
||||
return self.run(command_context, **kwargs)
|
||||
|
||||
@SubCommand(
|
||||
"try",
|
||||
@ -365,8 +373,8 @@ class TrySelect(MachCommandBase):
|
||||
parser=get_parser("auto"),
|
||||
)
|
||||
def try_auto(self, command_context, **kwargs):
|
||||
self.init()
|
||||
return self.run(**kwargs)
|
||||
self.init(command_context)
|
||||
return self.run(command_context, **kwargs)
|
||||
|
||||
@SubCommand(
|
||||
"try",
|
||||
@ -375,8 +383,8 @@ class TrySelect(MachCommandBase):
|
||||
parser=get_parser("again"),
|
||||
)
|
||||
def try_again(self, command_context, **kwargs):
|
||||
self.init()
|
||||
return self.run(**kwargs)
|
||||
self.init(command_context)
|
||||
return self.run(command_context, **kwargs)
|
||||
|
||||
@SubCommand(
|
||||
"try",
|
||||
@ -393,8 +401,8 @@ class TrySelect(MachCommandBase):
|
||||
via Treeherder's Add New Jobs feature, located in the per-push
|
||||
menu.
|
||||
"""
|
||||
self.init()
|
||||
return self.run(**kwargs)
|
||||
self.init(command_context)
|
||||
return self.run(command_context, **kwargs)
|
||||
|
||||
@SubCommand(
|
||||
"try",
|
||||
@ -440,9 +448,9 @@ class TrySelect(MachCommandBase):
|
||||
(installable from mach vcs-setup).
|
||||
|
||||
"""
|
||||
self.init()
|
||||
self.init(command_context)
|
||||
try:
|
||||
if self.substs.get("MOZ_ARTIFACT_BUILDS"):
|
||||
if command_context.substs.get("MOZ_ARTIFACT_BUILDS"):
|
||||
kwargs["local_artifact_build"] = True
|
||||
except BuildEnvironmentNotFoundException:
|
||||
# If we don't have a build locally, we can't tell whether
|
||||
@ -450,12 +458,12 @@ class TrySelect(MachCommandBase):
|
||||
# command to succeed, if possible.
|
||||
pass
|
||||
|
||||
config_status = os.path.join(self.topobjdir, "config.status")
|
||||
config_status = os.path.join(command_context.topobjdir, "config.status")
|
||||
if (kwargs["paths"] or kwargs["tags"]) and not config_status:
|
||||
print(CONFIG_ENVIRONMENT_NOT_FOUND)
|
||||
sys.exit(1)
|
||||
|
||||
return self.run(**kwargs)
|
||||
return self.run(command_context, **kwargs)
|
||||
|
||||
@SubCommand(
|
||||
"try",
|
||||
@ -465,8 +473,8 @@ class TrySelect(MachCommandBase):
|
||||
)
|
||||
def try_coverage(self, command_context, **kwargs):
|
||||
"""Select which tasks to use using coverage data."""
|
||||
self.init()
|
||||
return self.run(**kwargs)
|
||||
self.init(command_context)
|
||||
return self.run(command_context, **kwargs)
|
||||
|
||||
@SubCommand(
|
||||
"try",
|
||||
@ -476,8 +484,8 @@ class TrySelect(MachCommandBase):
|
||||
)
|
||||
def try_release(self, command_context, **kwargs):
|
||||
"""Push the current tree to try, configured for a staging release."""
|
||||
self.init()
|
||||
return self.run(**kwargs)
|
||||
self.init(command_context)
|
||||
return self.run(command_context, **kwargs)
|
||||
|
||||
@SubCommand(
|
||||
"try",
|
||||
@ -490,5 +498,5 @@ class TrySelect(MachCommandBase):
|
||||
|
||||
Requires VPN and shipit access.
|
||||
"""
|
||||
self.init()
|
||||
return self.run(**kwargs)
|
||||
self.init(command_context)
|
||||
return self.run(command_context, **kwargs)
|
||||
|
@ -91,7 +91,7 @@ class PullRequestImporter(MachCommandBase):
|
||||
break
|
||||
|
||||
if repository is None:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"unrecognized_repo",
|
||||
{},
|
||||
@ -100,7 +100,7 @@ class PullRequestImporter(MachCommandBase):
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"import_pr",
|
||||
{"pr_url": pull_request},
|
||||
@ -108,22 +108,24 @@ class PullRequestImporter(MachCommandBase):
|
||||
)
|
||||
dirty = [
|
||||
f
|
||||
for f in self.repository.get_changed_files(mode="all")
|
||||
for f in command_context.repository.get_changed_files(mode="all")
|
||||
if f.startswith(repository["path"])
|
||||
]
|
||||
if dirty:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"dirty_tree",
|
||||
repository,
|
||||
"Local {path} tree is dirty; aborting!",
|
||||
)
|
||||
sys.exit(1)
|
||||
target_dir = mozpath.join(self.topsrcdir, os.path.normpath(repository["path"]))
|
||||
target_dir = mozpath.join(
|
||||
command_context.topsrcdir, os.path.normpath(repository["path"])
|
||||
)
|
||||
|
||||
if bug_number is None:
|
||||
if bugzilla_token is None:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.WARNING,
|
||||
"no_token",
|
||||
{},
|
||||
@ -131,9 +133,11 @@ class PullRequestImporter(MachCommandBase):
|
||||
"be added to commit messages.",
|
||||
)
|
||||
else:
|
||||
bug_number = self._file_bug(bugzilla_token, repository, pr_number)
|
||||
bug_number = self._file_bug(
|
||||
command_context, bugzilla_token, repository, pr_number
|
||||
)
|
||||
elif bugzilla_token is not None:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.WARNING,
|
||||
"too_much_bug",
|
||||
{},
|
||||
@ -146,7 +150,7 @@ class PullRequestImporter(MachCommandBase):
|
||||
for patch in self._split_patches(
|
||||
pr_patch.content, bug_number, pull_request, reviewer
|
||||
):
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO,
|
||||
"commit_msg",
|
||||
patch,
|
||||
@ -159,19 +163,21 @@ class PullRequestImporter(MachCommandBase):
|
||||
patch_cmd.stdin.close()
|
||||
patch_cmd.wait()
|
||||
if patch_cmd.returncode != 0:
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.ERROR,
|
||||
"commit_fail",
|
||||
{},
|
||||
'Error applying diff from commit via "patch -p1 -s". Aborting...',
|
||||
)
|
||||
sys.exit(patch_cmd.returncode)
|
||||
self.repository.commit(
|
||||
command_context.repository.commit(
|
||||
patch["commit_msg"], patch["author"], patch["date"], [target_dir]
|
||||
)
|
||||
self.log(logging.INFO, "commit_pass", {}, "Committed successfully.")
|
||||
command_context.log(
|
||||
logging.INFO, "commit_pass", {}, "Committed successfully."
|
||||
)
|
||||
|
||||
def _file_bug(self, token, repo, pr_number):
|
||||
def _file_bug(self, command_context, token, repo, pr_number):
|
||||
import requests
|
||||
|
||||
bug = requests.post(
|
||||
@ -185,9 +191,9 @@ class PullRequestImporter(MachCommandBase):
|
||||
},
|
||||
)
|
||||
bug.raise_for_status()
|
||||
self.log(logging.DEBUG, "new_bug", {}, bug.content)
|
||||
command_context.log(logging.DEBUG, "new_bug", {}, bug.content)
|
||||
bugnumber = json.loads(bug.content)["id"]
|
||||
self.log(
|
||||
command_context.log(
|
||||
logging.INFO, "new_bug", {"bugnumber": bugnumber}, "Filed bug {bugnumber}"
|
||||
)
|
||||
return bugnumber
|
||||
|
Loading…
x
Reference in New Issue
Block a user