Bug 1653326 - [perfdocs] Add documentation about individual mozperftest tests to perfdocs r=sparky,perftest-reviewers

Differential Revision: https://phabricator.services.mozilla.com/D86488
This commit is contained in:
Myeongjun Go 2020-09-09 17:58:39 +00:00
parent 787c7f4ce9
commit b53336d5a4
8 changed files with 452 additions and 31 deletions

View File

@ -95,7 +95,7 @@ class ScriptInfo(defaultdict):
def __str__(self):
"""Used to generate docs."""
d = {}
d = defaultdict(lambda: "N/A")
for field, value in self.items():
if field == "filename":
d[field] = self.script.name
@ -108,7 +108,7 @@ class ScriptInfo(defaultdict):
value = ", ".join(value)
d[field] = value
d["filename_underline"] = "-" * len(d["filename"])
d["filename_underline"] = "=" * len(d["filename"])
return _INFO % d
def __missing__(self, key):

View File

@ -4,5 +4,31 @@
---
name: mozperftest
manifest: None
static-only: True
suites: {}
static-only: False
suites:
netwerk/test/perf:
description: "Performance tests from the 'network/test/perf' folder."
tests:
youtube-scroll: ""
facebook-scroll: ""
cloudflare: ""
g-search: ""
g-image: ""
lq-fetch: ""
youtube-noscroll: ""
testing/performance:
description: "Performance tests from the 'testing/performance' folder."
tests:
Politico Link: ""
BBC Link: ""
JSConf (cold): ""
VIEW: ""
main: ""
Facebook: ""
YouTube Link: ""
pageload: ""
JSConf (warm): ""
browser/base/content/test:
description: "Performance tests from the 'browser/base/content/test' folder."
tests:
Dom-size: ""

View File

@ -10,3 +10,9 @@ mozperftest
running
writing
developing
The following documents all testing we have for mozperftest.
If the owner does not specify the Usage and Description, it's marked N/A.
{documentation}
If you have any questions, please see this `wiki page <https://wiki.mozilla.org/TestEngineering/Performance#Where_to_find_us>`_.

View File

@ -10,3 +10,348 @@ mozperftest
running
writing
developing
The following documents all testing we have for mozperftest.
If the owner does not specify the Usage and Description, it's marked N/A.
browser/base/content/test
-------------------------
Performance tests from the 'browser/base/content/test' folder.
perftest_browser_xhtml_dom.js
=============================
Measures the size of the DOM
| Owner: Browser Front-end team
| Test Name: Dom-size
| Usage:
::
N/A
| Description:
N/A
netwerk/test/perf
-----------------
Performance tests from the 'network/test/perf' folder.
perftest_http3_cloudflareblog.js
================================
User-journey live site test for cloudflare blog.
| Owner: Network Team
| Test Name: cloudflare
| Usage:
::
N/A
| Description:
N/A
perftest_http3_facebook_scroll.js
=================================
Measures the number of requests per second after a scroll.
| Owner: Network Team
| Test Name: facebook-scroll
| Usage:
::
N/A
| Description:
N/A
perftest_http3_google_image.js
==============================
Measures the number of images per second after a scroll.
| Owner: Network Team
| Test Name: g-image
| Usage:
::
N/A
| Description:
N/A
perftest_http3_google_search.js
===============================
User-journey live site test for google search
| Owner: Network Team
| Test Name: g-search
| Usage:
::
N/A
| Description:
N/A
perftest_http3_lucasquicfetch.js
================================
Measures the amount of time it takes to load a set of images.
| Owner: Network Team
| Test Name: lq-fetch
| Usage:
::
N/A
| Description:
N/A
perftest_http3_youtube_watch.js
===============================
Measures quality of the video being played.
| Owner: Network Team
| Test Name: youtube-noscroll
| Usage:
::
N/A
| Description:
N/A
perftest_http3_youtube_watch_scroll.js
======================================
Measures quality of the video being played.
| Owner: Network Team
| Test Name: youtube-scroll
| Usage:
::
N/A
| Description:
N/A
testing/performance
-------------------
Performance tests from the 'testing/performance' folder.
perftest_bbc_link.js
====================
Measures time to load BBC homepage
| Owner: Performance Team
| Test Name: BBC Link
| Usage:
::
N/A
| Description:
N/A
perftest_facebook.js
====================
Measures time to log in to Facebook
| Owner: Performance Team
| Test Name: Facebook
| Usage:
::
N/A
| Description:
N/A
perftest_jsconf_cold.js
=======================
Measures time to load JSConf page (cold)
| Owner: Performance Team
| Test Name: JSConf (cold)
| Usage:
::
N/A
| Description:
N/A
perftest_jsconf_warm.js
=======================
Measures time to load JSConf page (warm)
| Owner: Performance Team
| Test Name: JSConf (warm)
| Usage:
::
N/A
| Description:
N/A
perftest_politico_link.js
=========================
Measures time to load Politico homepage
| Owner: Performance Team
| Test Name: Politico Link
| Usage:
::
N/A
| Description:
N/A
perftest_android_view.js
========================
Measures cold process view time
| Owner: Performance Team
| Test Name: VIEW
| Usage:
::
./mach perftest testing/performance/perftest_android_view.js
--android-install-apk ~/fenix.v2.fennec-nightly.2020.04.22-arm32.apk
--hooks testing/performance/hooks_android_view.py --android-app-name
org.mozilla.fenix --perfherder-metrics processLaunchToNavStart
| Description:
This test launches the appropriate android app, simulating a opening a
link through VIEW intent workflow. The application is launched with
the intent action android.intent.action.VIEW loading a trivially
simple website. The reported metric is the time from process start to
navigationStart, reported as processLaunchToNavStart
perftest_youtube_link.js
========================
Measures time to load YouTube video
| Owner: Performance Team
| Test Name: YouTube Link
| Usage:
::
N/A
| Description:
N/A
perftest_android_main.js
========================
Measures the time from process start until the Fenix main activity
(HomeActivity) reports Fully Drawn
| Owner: Performance Team
| Test Name: main
| Usage:
::
./mach perftest testing/performance/perftest_android_main.js --android
--flavor mobile-browser --hooks
testing/performance/hooks_home_activity.py --perfherder
--android-app-name org.mozilla.fenix --android-activity .App
--android-install-apk ~/Downloads/fenix.apk --android-clear-logcat
--android-capture-logcat logcat --androidlog-first-timestamp ".*Start
proc.*org.mozilla.fenix.*.App.*" --androidlog-second-timestamp
".*Fully drawn.*org.mozilla.fenix.*" --androidlog-subtest-name "MAIN"
--androidlog
| Description:
This test launches Fenix to its main activity (HomeActivity). The
application logs "Fully Drawn" when the activity is drawn. Using the
android log transformer we measure the time from process start to this
event.
perftest_pageload.js
====================
Measures time to load mozilla page
| Owner: Performance Team
| Test Name: pageload
| Usage:
::
N/A
| Description:
N/A
If you have any questions, please see this `wiki page <https://wiki.mozilla.org/TestEngineering/Performance#Where_to_find_us>`_.

View File

@ -19,4 +19,7 @@ module.exports = { // eslint-disable-line
setUp,
tearDown,
test,
owner: "Performance Team",
name: "pageload",
description: "Measures time to load mozilla page",
};

View File

@ -5,10 +5,12 @@ from __future__ import absolute_import
import collections
import os
import pathlib
import re
from perfdocs.utils import read_yaml
from manifestparser import TestManifest
from mozperftest.test.browsertime.script import ScriptInfo
"""
This file is for framework specific gatherers since manifests
@ -33,6 +35,7 @@ class FrameworkGatherer(object):
self._urls = {}
self._manifest_path = ""
self._manifest = None
self.script_infos = {}
def get_manifest_path(self):
"""
@ -71,11 +74,10 @@ class FrameworkGatherer(object):
and paragraph as content mentioned.
:param title: title of the section
:param content: content of section paragraph
:param documentation: documentation object to add section to
:param type: type of the title heading
:param header_type: type of the title heading
"""
heading_map = {"H4": "-", "H5": "^"}
return [title, heading_map.get(type, "^") * len(title), content, ""]
heading_map = {"H3": "=", "H4": "-", "H5": "^"}
return [title, heading_map.get(header_type, "^") * len(title), content, ""]
class RaptorGatherer(FrameworkGatherer):
@ -147,10 +149,10 @@ class RaptorGatherer(FrameworkGatherer):
Returns a dictionary containing the tests in every suite ini file.
:return dict: A dictionary with the following structure: {
"suite_name": [
"suite_name": {
'raptor_test1',
'raptor_test2'
]
},
}
"""
if self._test_list:
@ -193,10 +195,62 @@ class RaptorGatherer(FrameworkGatherer):
+ ">`__"
]
def build_suite_section(self, title, content):
return self._build_section_with_header(
title.capitalize(), content, header_type="H4"
)
class MozperftestGatherer(FrameworkGatherer):
"""
Gatherer for the Mozperftest framework.
"""
pass
def get_test_list(self):
"""
Returns a dictionary containing the tests that start with perftest_*.
:return dict: A dictionary with the following structure: {
"suite_name": {
'perftest_test1',
'perftest_test2',
},
}
"""
exclude_dir = [".hg", "mozperftest/tests/data"]
for path in pathlib.Path(self.workspace_dir).rglob("perftest_*"):
if any(re.search(d, str(path)) for d in exclude_dir):
continue
suite_name = re.sub(self.workspace_dir, "", os.path.dirname(path))
si = ScriptInfo(path)
self.script_infos[si["name"]] = si
self._test_list.setdefault(suite_name, {}).update({si["name"]: ""})
return self._test_list
def build_test_description(self, title, test_description="", suite_name=""):
result, tab_flag = "", False
desc = str(self.script_infos[title])
category = ("Owner: ", "Test Name: ", "Usage:", "Description:")
for s in desc.split("\n"):
if s.startswith(category):
result += "| " + s + "\n"
if s in category[2:]:
result += "\n"
tab_flag = False
else:
if tab_flag and s:
result += " " + s + "\n"
else:
result += s + "\n"
if s == category[2]:
result += "::\n\n"
tab_flag = True
return [result]
def build_suite_section(self, title, content):
return self._build_section_with_header(title, content, header_type="H4")

View File

@ -58,19 +58,6 @@ class Generator(object):
}
"""
def _append_rst_section(title, content, documentation, type=None):
"""
Adds a section to the documentation with the title as the type mentioned
and paragraph as content mentioned.
:param title: title of the section
:param content: content of section paragraph
:param documentation: documentation object to add section to
:param type: type of the title heading
"""
heading_map = {"H4": "-", "H5": "^"}
heading_symbol = heading_map.get(type, "-")
documentation.extend([title, heading_symbol * len(title), content, ""])
# Using the verified `perfdocs_tree`, build up the documentation.
frameworks_info = {}
for framework in self._perfdocs_tree:
@ -86,13 +73,13 @@ class Generator(object):
for suite_name in sorted(suites.keys()):
suite_info = suites[suite_name]
# Add the suite with an H4 heading
_append_rst_section(
suite_name.capitalize(),
suite_info["description"],
documentation,
type="H4",
# Add the suite section
documentation.extend(
self._verifier._gatherer.framework_gatherers[
yaml_content["name"]
].build_suite_section(suite_name, suite_info["description"])
)
tests = suite_info.get("tests", {})
for test_name in sorted(tests.keys()):
documentation.extend(

View File

@ -210,7 +210,7 @@ class Verifier(object):
Recompute the description in case it's a file.
"""
desc_path = os.path.join(self.workspace_dir, desc)
if os.path.exists(desc_path):
if os.path.exists(desc_path) and os.path.isfile(desc_path):
with open(desc_path, "r") as f:
desc = f.readlines()
return desc