Bug 1638986 - Run 'mach talos-test' with python 3; r=sparky,perftest-reviewers

Differential Revision: https://phabricator.services.mozilla.com/D94770
This commit is contained in:
Geoff Brown 2020-10-29 00:23:51 +00:00
parent 8ee9f46d79
commit e83b64c0b7
21 changed files with 83 additions and 70 deletions

1
mach
View File

@ -27,7 +27,6 @@ py2commands="
raptor
raptor-test
reftest
talos-test
telemetry-tests-client
test
web-platform-tests

View File

@ -9,9 +9,9 @@ installation script for talos. This script:
from __future__ import absolute_import
import os
import six
import subprocess
import sys
import urllib2
try:
from subprocess import check_call as call
@ -47,7 +47,9 @@ def main(args=sys.argv[1:]):
process = subprocess.Popen(
[sys.executable, "-", "--system-site-packages", here], stdin=subprocess.PIPE
)
stdout, stderr = process.communicate(input=urllib2.urlopen(VIRTUALENV).read())
stdout, stderr = process.communicate(
input=six.moves.urllib.request.urlopen(VIRTUALENV).read()
)
# find the virtualenv's python
for i in ("bin", "Scripts"):

View File

@ -8,6 +8,7 @@ from __future__ import absolute_import, print_function, unicode_literals
import logging
import os
import six
import sys
import json
import socket
@ -98,7 +99,7 @@ class TalosRunner(MozbuildObject):
def write_config(self):
try:
config_file = open(self.config_file_path, "wb")
config_file.write(json.dumps(self.config))
config_file.write(six.ensure_binary(json.dumps(self.config)))
except IOError as e:
err_str = "Error writing to Talos Mozharness config file {0}:{1}"
print(err_str.format(self.config_file_path, str(e)))

View File

@ -10,4 +10,4 @@ mozrunner>=7.1.0
psutil>=3.1.1
simplejson>=2.1.1
requests>=2.9.1
wptserve>=2.0.0
wptserve>=3.0

View File

@ -3,6 +3,7 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import
import six
import struct
from ctypes import byref, create_string_buffer, memmove, Union, c_double, c_longlong
from ctypes import windll
@ -69,7 +70,7 @@ def _getExpandedCounterPaths(processName, counterName):
paths = []
i = 0
path = ""
for j in range(0, pcchPathListLength.value):
for j in six.moves.range(0, pcchPathListLength.value):
c = struct.unpack_from("c", buffer, offset=j)[0]
if c == "\0":
if j == i:

View File

@ -7,6 +7,7 @@ import argparse
import os
from mozlog.commandline import add_logging_group
import six
class _StopAction(argparse.Action):
@ -34,14 +35,14 @@ class _ListTests(_StopAction):
print("================\n")
test_class_names = [
(test_class.name(), test_class.description())
for test_class in test.test_dict().itervalues()
for test_class in six.itervalues(test.test_dict())
]
test_class_names.sort()
for name, description in test_class_names:
print(name)
print("-" * len(name))
print(description)
print # Appends a single blank line to the end
print() # Appends a single blank line to the end
parser.exit()
@ -55,7 +56,7 @@ class _ListSuite(_StopAction):
pattern = " %%-%ds (%%s)" % max_suite_name
for name in conf:
print(pattern % (name, ":".join(conf[name]["tests"])))
print
print()
parser.exit()

View File

@ -367,7 +367,7 @@ def get_config(argv=None):
for validate in CONF_VALIDATORS:
validate(config)
# remove None Values
for k, v in config.items():
for k, v in config.copy().items():
if v is None:
del config[k]
return config
@ -382,5 +382,5 @@ def get_configs(argv=None):
if __name__ == "__main__":
cfgs = get_configs()
print(cfgs[0])
print
print()
print(cfgs[1])

View File

@ -21,6 +21,7 @@ from talos import utils
from talos.gecko_profile import GeckoProfile
from talos.utils import TalosError, run_in_debug_mode
from talos import heavy
import six
here = os.path.abspath(os.path.dirname(__file__))
@ -71,7 +72,7 @@ class FFSetup(object):
def _init_env(self):
self.env = dict(os.environ)
for k, v in self.browser_config["env"].iteritems():
for k, v in six.iteritems(self.browser_config["env"]):
self.env[k] = str(v)
self.env["MOZ_CRASHREPORTER_NO_REPORT"] = "1"
if self.browser_config["symbols_path"]:
@ -157,14 +158,14 @@ class FFSetup(object):
# installing webextensions
webextensions_to_install = []
webextensions_folder = self.test_config.get("webextensions_folder", None)
if isinstance(webextensions_folder, basestring):
if isinstance(webextensions_folder, six.string_types):
folder = utils.interpolate(webextensions_folder)
for file in os.listdir(folder):
if file.endswith(".xpi"):
webextensions_to_install.append(os.path.join(folder, file))
webextensions = self.test_config.get("webextensions", None)
if isinstance(webextensions, basestring):
if isinstance(webextensions, six.string_types):
webextensions_to_install.append(webextensions)
if webextensions_to_install is not None:

View File

@ -1,6 +1,7 @@
from __future__ import absolute_import
import math
import six
"""
data filters:
@ -114,10 +115,10 @@ def median(series):
series = sorted(series)
if len(series) % 2:
# odd
return series[len(series) / 2]
return series[int(len(series) / 2)]
else:
# even
middle = len(series) / 2 # the higher of the middle 2, actually
middle = int(len(series) / 2) # the higher of the middle 2, actually
return 0.5 * (series[middle - 1] + series[middle])
@ -162,7 +163,7 @@ def dromaeo(series):
@register_filter
@define_filter
def dromaeo_chunks(series, size):
for i in range(0, len(series), size):
for i in six.moves.range(0, len(series), size):
yield series[i : i + size]

View File

@ -11,6 +11,7 @@ from talos import utils
from talos import whitelist
from collections import OrderedDict
import six
SCRIPT_DIR = os.path.abspath(os.path.realpath(os.path.dirname(__file__)))
@ -120,7 +121,7 @@ def write_output(outfilename, data):
try:
with open(outfilename, "w") as outfile:
outfile.write("[\n")
for idx, (key, value) in utils.indexed_items(data.iteritems()):
for idx, (key, value) in utils.indexed_items(six.iteritems(data)):
output = ' ["%s", "%s", "%s", "%s", %d, %d, %f]' % (
key[0],
key[1],

View File

@ -14,6 +14,7 @@ import csv
import json
import os
import re
import six
from talos import output, utils, filter
@ -473,7 +474,7 @@ class BrowserLogResults(object):
# data is counters
header = row
continue
values = dict(zip(header, row))
values = dict(six.moves.zip(header, row))
# Format for talos
thread = values["thread"]
@ -505,7 +506,7 @@ class BrowserLogResults(object):
# other data is counters
header = row
continue
values = dict(zip(header, row))
values = dict(six.moves.zip(header, row))
for i, mainthread_counter in enumerate(mainthread_counters):
if int(values[mainthread_counter_keys[i]]) > 0:
counter_results.setdefault(mainthread_counter, []).append(

View File

@ -7,10 +7,10 @@ from __future__ import absolute_import, print_function
import copy
import os
import six
import sys
import time
import traceback
import urllib
import mozinfo
import mozversion
@ -43,7 +43,7 @@ def useBaseTestDefaults(base, tests):
def set_tp_preferences(test, browser_config):
# sanity check pageloader values
# mandatory options: tpmanifest, tpcycles
if test["tpcycles"] not in range(1, 1000):
if test["tpcycles"] not in six.moves.range(1, 1000):
raise TalosError("pageloader cycles must be int 1 to 1,000")
if "tpmanifest" not in test:
raise TalosError("tpmanifest not found in test: %s" % test)
@ -129,7 +129,8 @@ def run_tests(config, browser_config):
test[path] = utils.interpolate(test[path])
if test.get("tpmanifest"):
test["tpmanifest"] = os.path.normpath(
"file:/%s" % (urllib.quote(test["tpmanifest"], "/\\t:\\"))
"file:/%s"
% (six.moves.urllib.parse.quote(test["tpmanifest"], "/\\t:\\"))
)
test["preferences"]["talos.tpmanifest"] = test["tpmanifest"]

View File

@ -4,6 +4,7 @@ import argparse
import collections
import csv
import os
import six
import sys
from calendar import day_name
from datetime import datetime
@ -80,7 +81,7 @@ def generate_report(tuple_list, filepath, mode="variance"):
week_avgs.append(average)
outliers = is_normal(week_avgs)
for j in range(7):
for j in six.moves.range(7):
if j in outliers:
line[j + 1] = "**" + str(line[j + 1]) + "**"
@ -102,7 +103,7 @@ def is_normal(y):
outliers = []
# find a baseline for the week
if (min(y[0:4]) * limit) <= max(y[0:4]):
for i in range(1, 5):
for i in six.moves.range(1, 5):
if y[i] > (y[i - 1] * limit) or y[i] > (y[i + 1] * limit):
outliers.append(i)
continue
@ -112,7 +113,7 @@ def is_normal(y):
# look at weekends now
avg = sum(clean_week) / len(clean_week)
for i in range(5, 7):
for i in six.moves.range(5, 7):
# look for something outside of the 20% window
if (y[i] * 1.2) < avg or y[i] > (avg * 1.2):
outliers.append(i)
@ -128,11 +129,11 @@ def main():
tuple_list = get_all_test_tuples()
f = "report"
if args.platform:
tuple_list = filter(lambda x: x[4] == args.platform, tuple_list)
tuple_list = [x for x in tuple_list if x[4] == args.platform]
f += "-%s" % args.platform
if args.test:
tuple_list = filter(lambda x: x[3] == args.test, tuple_list)
tuple_list = [x for x in tuple_list if x[3] == args.test]
f += "-%s" % args.test
f += "-%s" % args.mode

View File

@ -5,6 +5,7 @@ from __future__ import absolute_import
import pprint
import signal
import six
import sys
import time
import traceback
@ -82,6 +83,7 @@ class Reader(object):
self.proc = None
def __call__(self, line):
line = six.ensure_str(line)
if line.find("__endTimestamp") != -1:
self.got_end_timestamp = True
self.event.set()
@ -170,7 +172,7 @@ def run_browser(
kill_and_get_minidump(context, minidump_dir)
raise TalosError("timeout")
if reader.got_end_timestamp:
for i in range(1, wait_for_quit_timeout):
for i in six.moves.range(1, wait_for_quit_timeout):
if proc.wait(1) is not None:
break
if proc.poll() is None:

View File

@ -17,6 +17,7 @@ import json
import os
import platform
import shutil
import six
import subprocess
import sys
import time
@ -139,7 +140,7 @@ class TTest(object):
test_config, global_counters, browser_config.get("framework")
)
for i in range(test_config["cycles"]):
for i in six.moves.range(test_config["cycles"]):
time.sleep(0.25)
LOG.info(
"Running cycle %d/%d for %s test..."

View File

@ -19,6 +19,7 @@ from talos.config import (
DEFAULTS,
)
from talos.test import PageloaderTest
import six
ORIGINAL_DEFAULTS = copy.deepcopy(DEFAULTS)
@ -308,7 +309,7 @@ class Test_get_config(object):
assert bool(config) is True
# no null values
null_keys = [key for key, val in config.iteritems() if val is None]
null_keys = [key for key, val in six.iteritems(config) if val is None]
assert len(null_keys) == 0
# expected keys are there

View File

@ -8,10 +8,9 @@ from __future__ import absolute_import
import os
import platform
import re
import six
import string
import time
import urllib
import urlparse
from mozlog import get_proxy_logger
@ -96,13 +95,13 @@ def tokenize(string, start, end):
end,
len(_end),
)
for i in range(len(_start)):
for i in six.moves.range(len(_start)):
assert _end[i] > _start[i], "End token '%s' occurs before start token '%s'" % (
end,
start,
)
parts = []
for i in range(len(_start)):
for i in six.moves.range(len(_start)):
parts.append(string[_start[i] + len(start) : _end[i]])
return parts, _end[-1]
@ -119,7 +118,7 @@ def urlsplit(url, default_scheme="file"):
return ["file", "", url[len("file://") :], "", ""]
# split the URL and return a list
return [i for i in urlparse.urlsplit(url)]
return [i for i in six.moves.urllib.parse.urlsplit(url)]
def parse_pref(value):
@ -154,9 +153,9 @@ def GenerateBrowserCommandLine(
if url is not None:
# for non-pageloader/non-manifest tests the profiling info is added to the test url
if url.find("?") != -1:
url += "&" + urllib.urlencode(profiling_info)
url += "&" + six.moves.urllib.parse.urlencode(profiling_info)
else:
url += "?" + urllib.urlencode(profiling_info)
url += "?" + six.moves.urllib.parse.urlencode(profiling_info)
command_args.extend(url.split(" "))
# if there's a url i.e. startup test / non-manifest test, add it to the cmd line args
@ -171,7 +170,7 @@ def indexed_items(itr):
Generator that allows us to figure out which item is the last one so
that we can serialize this data properly
"""
prev_i, prev_val = 0, itr.next()
prev_i, prev_val = 0, next(itr)
for i, val in enumerate(itr, start=1):
yield prev_i, prev_val
prev_i, prev_val = i, val

View File

@ -9,6 +9,7 @@ import json
import os
import re
from talos import utils
import six
KEY_XRE = "{xre}"
DEFAULT_DURATION = 100.0
@ -57,7 +58,7 @@ class Whitelist:
filename = filename.lower()
filename.replace(" (x86)", "")
for path, subst in self.path_substitutions.iteritems():
for path, subst in six.iteritems(self.path_substitutions):
parts = filename.split(path)
if len(parts) >= 2:
if self.PRE_PROFILE == "" and subst == "{profile}":
@ -80,7 +81,7 @@ class Whitelist:
filename = "%s%s" % (subst, path.join(parts[1:]))
for old_name, new_name in self.name_substitutions.iteritems():
for old_name, new_name in six.iteritems(self.name_substitutions):
if isinstance(old_name, re._pattern_type):
filename = re.sub(old_name, new_name, filename)
else:
@ -92,7 +93,7 @@ class Whitelist:
def check(self, test, file_name_index, event_source_index=None):
errors = {}
for row_key in test.iterkeys():
for row_key in six.iterkeys(test):
filename = self.sanitize_filename(row_key[file_name_index])
if filename in self.listmap:
@ -116,7 +117,7 @@ class Whitelist:
def checkDuration(self, test, file_name_index, file_duration_index):
errors = {}
for idx, (row_key, row_value) in utils.indexed_items(test.iteritems()):
for idx, (row_key, row_value) in utils.indexed_items(six.iteritems(test)):
if row_value[file_duration_index] > DEFAULT_DURATION:
filename = self.sanitize_filename(row_key[file_name_index])
if (
@ -156,7 +157,7 @@ class Whitelist:
@staticmethod
def get_error_strings(errors):
error_strs = []
for filename, data in errors.iteritems():
for filename, data in six.iteritems(errors):
for datum in data:
error_strs.append(
"File '%s' was accessed and we were not"

View File

@ -14,6 +14,7 @@ import subprocess
import sys
from talos.xtalos import xtalos
import six
EVENTNAME_INDEX = 0
PROCESS_INDEX = 2
@ -72,7 +73,7 @@ def filterOutHeader(data):
done = False
while not done:
try:
row = data.next()
row = next(data)
except StopIteration:
done = True
break
@ -380,7 +381,7 @@ def etlparser(
uploadFile(csvname)
output = "thread, stage, counter, value\n"
for cntr in sorted(io.iterkeys()):
for cntr in sorted(six.iterkeys(io)):
output += "%s, %s\n" % (", ".join(cntr), str(io[cntr]))
if outputFile:
fname = "%s_thread_stats%s" % os.path.splitext(outputFile)
@ -399,24 +400,26 @@ def etlparser(
# Filter out stages, threads, and whitelisted files that we're not
# interested in
filekeys = filter(
lambda x: (all_stages or x[2] == stages[0])
filekeys = [
x
for x in six.iterkeys(files)
if (all_stages or x[2] == stages[0])
and (all_threads or x[1].endswith("(main)"))
and (all_stages and x[2] != stages[0] or not checkWhitelist(x[0], whitelist)),
files.iterkeys(),
)
and (all_stages and x[2] != stages[0] or not checkWhitelist(x[0], whitelist))
]
if debug:
# in debug, we want stages = [startup+normal] and all threads, not just (main)
# we will use this data to upload fileIO info to blobber only for debug mode
outputData = filter(
lambda x: (all_stages or x[2] in [stages[0], stages[1]])
outputData = [
x
for x in six.iterkeys(files)
if (all_stages or x[2] in [stages[0], stages[1]])
and (
all_stages
and x[2] not in [stages[0], stages[1]]
or not checkWhitelist(x[0], whitelist)
),
files.iterkeys(),
)
)
]
else:
outputData = filekeys

View File

@ -12,6 +12,7 @@ import os
import re
import subprocess
from uuid import UUID
import six
# This constant must match the event declared in
# toolkit/components/startup/mozprofilerprobe.mof
@ -58,7 +59,7 @@ class XPerfSession(object):
e.do_match(row)
class XPerfAttribute(object):
class XPerfAttribute(six.with_metaclass(ABCMeta, object)):
"""Base class for all attributes. Each attribute has one or more events
that are associated with it. When those events fire, the attribute
accumulates statistics for those events.
@ -68,8 +69,6 @@ class XPerfAttribute(object):
that persistent attributes are an exception to this (see __init__).
"""
__metaclass__ = ABCMeta
# Keys for the dict returned by get_results:
# Key whose value should be a dict containing any statistics that were
@ -302,7 +301,7 @@ class XPerfCounter(XPerfAttribute):
def accumulate(self, evt):
data = evt.get_whiteboard()
for (key, comp) in self.filters.iteritems():
for (key, comp) in six.iteritems(self.filters):
try:
testdata = data[key]
except KeyError:
@ -331,7 +330,7 @@ class XPerfCounter(XPerfAttribute):
)
if self.values:
msg += " with accumulated"
for (k, v) in self.values.iteritems():
for (k, v) in six.iteritems(self.values):
msg += " [[{!s}] == {!s}]".format((k), (v))
return msg
@ -416,7 +415,7 @@ class XPerfEvent(object):
return self.timestamp
class EventExpression(object):
class EventExpression(six.with_metaclass(ABCMeta, object)):
"""EventExpression is an optional layer that sits between attributes and
events, and allow the user to compose multiple events into a more complex
event. To achieve this, EventExpression implementations must implement both
@ -425,8 +424,6 @@ class EventExpression(object):
they present themselves as attributes to the events that run above them.
"""
__metaclass__ = ABCMeta
def __init__(self, events):
# Event expressions implement the attribute interface, so for each
# event, we set ourselves as the underlying attribute
@ -1034,7 +1031,7 @@ class XPerfFile(object):
while True:
try:
row = csvdata.next()
row = next(csvdata)
except StopIteration:
break
except csv.Error:

View File

@ -14,9 +14,8 @@ from __future__ import absolute_import, print_function
import json
import os
import re
import six
import sys
import urllib2
import urlparse
from optparse import OptionParser
@ -100,7 +99,7 @@ def get_filename_from_url(url):
"""
This returns the filename of the file we're trying to download
"""
parsed = urlparse.urlsplit(url.rstrip("/"))
parsed = six.moves.urllib.parse.urlsplit(url.rstrip("/"))
if parsed.path != "":
return parsed.path.rsplit("/", 1)[-1]
else:
@ -115,8 +114,8 @@ def download_file(url, path="", saveAs=None):
"""
It downloads a file from URL to the indicated path
"""
req = urllib2.Request(url)
f = urllib2.urlopen(req)
req = six.moves.urllib.request.Request(url)
f = six.moves.urllib.request.urlopen(req)
if path != "" and not os.path.isdir(path):
try:
os.makedirs(path)