diff --git a/browser/base/content/test/general/browser_bookmark_titles.js b/browser/base/content/test/general/browser_bookmark_titles.js
index 6fbd4763c79d..1f7082396979 100644
--- a/browser/base/content/test/general/browser_bookmark_titles.js
+++ b/browser/base/content/test/general/browser_bookmark_titles.js
@@ -29,7 +29,7 @@ add_task(function* () {
let [uri, title] = tests[i];
let promiseLoaded = promisePageLoaded(browser);
- content.location = uri;
+ BrowserTestUtils.loadURI(browser, uri);
yield promiseLoaded;
yield checkBookmark(uri, title);
}
@@ -53,12 +53,14 @@ add_task(function* () {
let [uri, title] = tests[0];
let promiseLoaded = promisePageLoaded(browser);
- content.location = uri;
+ BrowserTestUtils.loadURI(browser, uri);
yield promiseLoaded;
// The offline mode test is only good if the page failed to load.
- is(content.document.documentURI.substring(0, 14), 'about:neterror',
- "Offline mode successfully simulated network outage.");
+ yield ContentTask.spawn(browser, null, function() {
+ is(content.document.documentURI.substring(0, 14), 'about:neterror',
+ "Offline mode successfully simulated network outage.");
+ });
yield checkBookmark(uri, title);
gBrowser.removeCurrentTab();
diff --git a/browser/components/search/content/search.xml b/browser/components/search/content/search.xml
index 9208f87761b9..ca93bf46a44e 100644
--- a/browser/components/search/content/search.xml
+++ b/browser/components/search/content/search.xml
@@ -1066,6 +1066,7 @@
@@ -1377,7 +1378,8 @@
let button = document.createElementNS(kXULNS, "button");
let label = this.bundle.formatStringFromName("cmd_addFoundEngine",
[engine.title], 1);
- button.id = "searchbar-add-engine-" + engine.title.replace(/ /g, '-');
+ button.id = this.telemetryOrigin + "-add-engine-" +
+ engine.title.replace(/ /g, '-');
button.setAttribute("class", "addengine-item");
button.setAttribute("label", label);
button.setAttribute("pack", "start");
@@ -1442,9 +1444,12 @@
let height = rowCount * 33; // 32px per row, 1px border.
list.setAttribute("height", height + "px");
- // Ensure we can refer to the settings button by ID:
+ // Ensure we can refer to the settings buttons by ID:
let settingsEl = document.getAnonymousElementByAttribute(this, "anonid", "search-settings");
- settingsEl.id = this.id + "-anon-search-settings";
+ settingsEl.id = this.telemetryOrigin + "-anon-search-settings";
+ let compactSettingsEl = document.getAnonymousElementByAttribute(this, "anonid", "search-settings-compact");
+ compactSettingsEl.id = this.telemetryOrigin +
+ "-anon-search-settings-compact";
let dummyItems = enginesPerRow - (oneOffCount % enginesPerRow || enginesPerRow);
for (let i = 0; i < engines.length; ++i) {
@@ -1505,7 +1510,7 @@
@@ -1542,11 +1547,15 @@
else {
header.selectedIndex = this.query ? 1 : 0;
}
- this.setAttribute("aria-activedescendant", val.id);
+ if (this.textbox) {
+ this.textbox.setAttribute("aria-activedescendant", val.id);
+ }
} else {
val = null;
header.selectedIndex = this.query ? 1 : 0;
- this.removeAttribute("aria-activedescendant");
+ if (this.textbox) {
+ this.textbox.removeAttribute("aria-activedescendant");
+ }
}
if (aUpdateLogicallySelectedButton) {
@@ -1772,10 +1781,12 @@
if (!allowEmptySelection) {
// Wrap around the selection to the last one-off.
this.selectedButton = null;
- stopEvent = this.advanceSelection(false, true, true);
- if (stopEvent) {
- this.popup.selectedIndex = -1;
- }
+ this.popup.selectedIndex = -1;
+ // Call advanceSelection after setting selectedIndex so that
+ // screen readers see the newly selected one-off. Both trigger
+ // accessibility events.
+ this.advanceSelection(false, true, true);
+ stopEvent = true;
}
} else {
let firstButtonSelected =
@@ -1799,16 +1810,17 @@
// The autocomplete controller should handle this case.
} else if (this.popup.selectedIndex == numListItems - 1) {
this.selectedButton = null;
- stopEvent = this.advanceSelection(true, true, true);
- if (stopEvent) {
- stopEvent = !allowEmptySelection;
- if (this.textbox && typeof(textboxUserValue) == "string") {
- this.textbox.value = textboxUserValue;
- }
- if (!allowEmptySelection) {
- this.popup.selectedIndex = -1;
- }
+ if (!allowEmptySelection) {
+ this.popup.selectedIndex = -1;
+ stopEvent = true;
}
+ if (this.textbox && typeof(textboxUserValue) == "string") {
+ this.textbox.value = textboxUserValue;
+ }
+ // Call advanceSelection after setting selectedIndex so that
+ // screen readers see the newly selected one-off. Both trigger
+ // accessibility events.
+ this.advanceSelection(true, true, true);
} else {
let buttons = this.getSelectableButtons(true);
let lastButtonSelected =
diff --git a/browser/extensions/e10srollout/bootstrap.js b/browser/extensions/e10srollout/bootstrap.js
index 6720cd6098cb..1af261f5a48c 100644
--- a/browser/extensions/e10srollout/bootstrap.js
+++ b/browser/extensions/e10srollout/bootstrap.js
@@ -17,7 +17,7 @@ const TEST_THRESHOLD = {
};
const ADDON_ROLLOUT_POLICY = {
- "beta" : "49a", // 10 tested add-ons + any WebExtension
+ "beta" : "50allmpc", // Any WebExtension or addon with mpc = true
"release" : "49a", // 10 tested add-ons + any WebExtension
};
@@ -28,6 +28,7 @@ const PREF_E10S_FORCE_ENABLED = "browser.tabs.remote.force-enable";
const PREF_E10S_FORCE_DISABLED = "browser.tabs.remote.force-disable";
const PREF_TOGGLE_E10S = "browser.tabs.remote.autostart.2";
const PREF_E10S_ADDON_POLICY = "extensions.e10s.rollout.policy";
+const PREF_E10S_ADDON_BLOCKLIST = "extensions.e10s.rollout.blocklist";
const PREF_E10S_HAS_NONEXEMPT_ADDON = "extensions.e10s.rollout.hasAddon";
function startup() {
@@ -64,9 +65,13 @@ function defineCohort() {
let addonPolicy = "unknown";
if (updateChannel in ADDON_ROLLOUT_POLICY) {
addonPolicy = ADDON_ROLLOUT_POLICY[updateChannel];
- Preferences.set(PREF_E10S_ADDON_POLICY, ADDON_ROLLOUT_POLICY[updateChannel]);
+ Preferences.set(PREF_E10S_ADDON_POLICY, addonPolicy);
// This is also the proper place to set the blocklist pref
// in case it is necessary.
+
+ // Tab Mix Plus exception tracked at bug 1185672.
+ Preferences.set(PREF_E10S_ADDON_BLOCKLIST,
+ "{dc572301-7619-498c-a57d-39143191b318}");
} else {
Preferences.reset(PREF_E10S_ADDON_POLICY);
}
diff --git a/browser/extensions/pocket/skin/windows/pocket.css b/browser/extensions/pocket/skin/windows/pocket.css
index 3608e007a5cb..011b821a91dd 100644
--- a/browser/extensions/pocket/skin/windows/pocket.css
+++ b/browser/extensions/pocket/skin/windows/pocket.css
@@ -11,6 +11,6 @@
@media (-moz-windows-theme: luna-silver) and (max-resolution: 1dppx) {
#pocket-button {
- list-style-image: url(chrome://pocket/skin/toolbar-lunaSilver.png)
+ list-style-image: url(Toolbar-lunaSilver.png);
}
}
diff --git a/browser/modules/ContentCrashHandlers.jsm b/browser/modules/ContentCrashHandlers.jsm
index 285f5277640b..ef7a996e12e8 100644
--- a/browser/modules/ContentCrashHandlers.jsm
+++ b/browser/modules/ContentCrashHandlers.jsm
@@ -376,9 +376,9 @@ this.UnsubmittedCrashHandler = {
* bar to prompt the user to submit them.
*
* @returns Promise
- * Resolves after it tries to append a notification on
- * the most recent browser window. If a notification
- * cannot be shown, will resolve anyways.
+ * Resolves with the after it tries to
+ * show a notification on the most recent browser window.
+ * If a notification cannot be shown, will resolve with null.
*/
checkForUnsubmittedCrashReports: Task.async(function*() {
let dateLimit = new Date();
@@ -389,16 +389,17 @@ this.UnsubmittedCrashHandler = {
reportIDs = yield CrashSubmit.pendingIDsAsync(dateLimit);
} catch (e) {
Cu.reportError(e);
- return;
+ return null;
}
if (reportIDs.length) {
if (CrashNotificationBar.autoSubmit) {
CrashNotificationBar.submitReports(reportIDs);
} else {
- this.showPendingSubmissionsNotification(reportIDs);
+ return this.showPendingSubmissionsNotification(reportIDs);
}
}
+ return null;
}),
/**
@@ -407,11 +408,12 @@ this.UnsubmittedCrashHandler = {
*
* @param reportIDs (Array)
* The Array of report IDs to offer the user to send.
+ * @returns The if one is shown. null otherwise.
*/
showPendingSubmissionsNotification(reportIDs) {
let count = reportIDs.length;
if (!count) {
- return;
+ return null;
}
let messageTemplate =
@@ -419,7 +421,7 @@ this.UnsubmittedCrashHandler = {
let message = PluralForm.get(count, messageTemplate).replace("#1", count);
- CrashNotificationBar.show({
+ return CrashNotificationBar.show({
notificationID: "pending-crash-reports",
message,
reportIDs,
@@ -450,6 +452,7 @@ this.CrashNotificationBar = {
*
* reportIDs (Array)
* The array of report IDs to offer to the user.
+ * @returns The if one is shown. null otherwise.
*/
show({ notificationID, message, reportIDs }) {
let chromeWin = RecentWindow.getMostRecentBrowserWindow();
@@ -457,13 +460,13 @@ this.CrashNotificationBar = {
// Can't show a notification in this case. We'll hopefully
// get another opportunity to have the user submit their
// crash reports later.
- return;
+ return null;
}
let nb = chromeWin.document.getElementById("global-notificationbox");
let notification = nb.getNotificationWithValue(notificationID);
if (notification) {
- return;
+ return null;
}
let buttons = [{
@@ -499,10 +502,10 @@ this.CrashNotificationBar = {
}
};
- nb.appendNotification(message, notificationID,
- "chrome://browser/skin/tab-crashed.svg",
- nb.PRIORITY_INFO_HIGH, buttons,
- eventCallback);
+ return nb.appendNotification(message, notificationID,
+ "chrome://browser/skin/tab-crashed.svg",
+ nb.PRIORITY_INFO_HIGH, buttons,
+ eventCallback);
},
get autoSubmit() {
diff --git a/browser/modules/test/browser.ini b/browser/modules/test/browser.ini
index 13cb18d719eb..fabca4b1cf73 100644
--- a/browser/modules/test/browser.ini
+++ b/browser/modules/test/browser.ini
@@ -23,6 +23,8 @@ support-files =
../../components/uitour/UITour-lib.js
[browser_taskbar_preview.js]
skip-if = os != "win"
+[browser_UnsubmittedCrashHandler.js]
+run-if = crashreporter
[browser_UsageTelemetry.js]
[browser_UsageTelemetry_private_and_restore.js]
[browser_urlBar_zoom.js]
diff --git a/browser/modules/test/browser_UnsubmittedCrashHandler.js b/browser/modules/test/browser_UnsubmittedCrashHandler.js
new file mode 100644
index 000000000000..60aafe9d467c
--- /dev/null
+++ b/browser/modules/test/browser_UnsubmittedCrashHandler.js
@@ -0,0 +1,419 @@
+"use strict";
+
+/**
+ * This suite tests the "unsubmitted crash report" notification
+ * that is seen when we detect pending crash reports on startup.
+ */
+
+const { UnsubmittedCrashHandler } =
+ Cu.import("resource:///modules/ContentCrashHandlers.jsm", this);
+const { FileUtils } =
+ Cu.import("resource://gre/modules/FileUtils.jsm", this);
+const { makeFakeAppDir } =
+ Cu.import("resource://testing-common/AppData.jsm", this);
+const { OS } =
+ Cu.import("resource://gre/modules/osfile.jsm", this);
+
+const DAY = 24 * 60 * 60 * 1000; // milliseconds
+const SERVER_URL = "http://example.com/browser/toolkit/crashreporter/test/browser/crashreport.sjs";
+
+/**
+ * Returns the directly where the browsing is storing the
+ * pending crash reports.
+ *
+ * @returns nsIFile
+ */
+function getPendingCrashReportDir() {
+ // The fake UAppData directory that makeFakeAppDir provides
+ // is just UAppData under the profile directory.
+ return FileUtils.getDir("ProfD", [
+ "UAppData",
+ "Crash Reports",
+ "pending",
+ ], false);
+}
+
+/**
+ * Synchronously deletes all entries inside the pending
+ * crash report directory.
+ */
+function clearPendingCrashReports() {
+ let dir = getPendingCrashReportDir();
+ let entries = dir.directoryEntries;
+
+ while (entries.hasMoreElements()) {
+ let entry = entries.getNext().QueryInterface(Ci.nsIFile);
+ if (entry.isFile()) {
+ entry.remove(false);
+ }
+ }
+}
+
+/**
+ * Randomly generates howMany crash report .dmp and .extra files
+ * to put into the pending crash report directory. We're not
+ * actually creating real crash reports here, just stubbing
+ * out enough of the files to satisfy our notification and
+ * submission code.
+ *
+ * @param howMany (int)
+ * How many pending crash reports to put in the pending
+ * crash report directory.
+ * @param accessDate (Date, optional)
+ * What date to set as the last accessed time on the created
+ * crash reports. This defaults to the current date and time.
+ * @returns Promise
+ */
+function* createPendingCrashReports(howMany, accessDate) {
+ let dir = getPendingCrashReportDir();
+ if (!accessDate) {
+ accessDate = new Date();
+ }
+
+ /**
+ * Helper function for creating a file in the pending crash report
+ * directory.
+ *
+ * @param fileName (string)
+ * The filename for the crash report, not including the
+ * extension. This is usually a UUID.
+ * @param extension (string)
+ * The file extension for the created file.
+ * @param accessDate (Date)
+ * The date to set lastAccessed to.
+ * @param contents (string, optional)
+ * Set this to whatever the file needs to contain, if anything.
+ * @returns Promise
+ */
+ let createFile = (fileName, extension, accessDate, contents) => {
+ let file = dir.clone();
+ file.append(fileName + "." + extension);
+ file.create(Ci.nsILocalFile.NORMAL_FILE_TYPE, FileUtils.PERMS_FILE);
+ let promises = [OS.File.setDates(file.path, accessDate)];
+
+ if (contents) {
+ let encoder = new TextEncoder();
+ let array = encoder.encode(contents);
+ promises.push(OS.File.writeAtomic(file.path, array, {
+ tmpPath: file.path + ".tmp",
+ }));
+ }
+ return Promise.all(promises);
+ }
+
+ let uuidGenerator = Cc["@mozilla.org/uuid-generator;1"]
+ .getService(Ci.nsIUUIDGenerator);
+ // CrashSubmit expects there to be a ServerURL key-value
+ // pair in the .extra file, so we'll satisfy it.
+ let extraFileContents = "ServerURL=" + SERVER_URL;
+
+ return Task.spawn(function*() {
+ let uuids = [];
+ for (let i = 0; i < howMany; ++i) {
+ let uuid = uuidGenerator.generateUUID().toString();
+ // Strip the {}...
+ uuid = uuid.substring(1, uuid.length - 1);
+ yield createFile(uuid, "dmp", accessDate);
+ yield createFile(uuid, "extra", accessDate, extraFileContents);
+ uuids.push(uuid);
+ }
+ return uuids;
+ });
+}
+
+/**
+ * Returns a Promise that resolves once CrashSubmit starts sending
+ * success notifications for crash submission matching the reportIDs
+ * being passed in.
+ *
+ * @param reportIDs (Array)
+ * The IDs for the reports that we expect CrashSubmit to have sent.
+ * @returns Promise
+ */
+function waitForSubmittedReports(reportIDs) {
+ let promises = [];
+ for (let reportID of reportIDs) {
+ let promise = TestUtils.topicObserved("crash-report-status", (subject, data) => {
+ if (data == "success") {
+ let propBag = subject.QueryInterface(Ci.nsIPropertyBag2);
+ let dumpID = propBag.getPropertyAsAString("minidumpID");
+ if (dumpID == reportID) {
+ return true;
+ }
+ }
+ return false;
+ });
+ promises.push(promise);
+ }
+ return Promise.all(promises);
+}
+
+/**
+ * Returns a Promise that resolves once a .dmp.ignore file is created for
+ * the crashes in the pending directory matching the reportIDs being
+ * passed in.
+ *
+ * @param reportIDs (Array)
+ * The IDs for the reports that we expect CrashSubmit to have been
+ * marked for ignoring.
+ * @returns Promise
+ */
+function waitForIgnoredReports(reportIDs) {
+ let dir = getPendingCrashReportDir();
+ let promises = [];
+ for (let reportID of reportIDs) {
+ let file = dir.clone();
+ file.append(reportID + ".dmp.ignore");
+ promises.push(OS.File.exists(file.path));
+ }
+ return Promise.all(promises);
+}
+
+let gNotificationBox;
+
+add_task(function* setup() {
+ // Pending crash reports are stored in the UAppData folder,
+ // which exists outside of the profile folder. In order to
+ // not overwrite / clear pending crash reports for the poor
+ // soul who runs this test, we use AppData.jsm to point to
+ // a special made-up directory inside the profile
+ // directory.
+ yield makeFakeAppDir();
+ // We'll assume that the notifications will be shown in the current
+ // browser window's global notification box.
+ gNotificationBox = document.getElementById("global-notificationbox");
+
+ // If we happen to already be seeing the unsent crash report
+ // notification, it's because the developer running this test
+ // happened to have some unsent reports in their UAppDir.
+ // We'll remove the notification without touching those reports.
+ let notification =
+ gNotificationBox.getNotificationWithValue("pending-crash-reports");
+ if (notification) {
+ notification.close();
+ }
+
+ let env = Cc["@mozilla.org/process/environment;1"]
+ .getService(Components.interfaces.nsIEnvironment);
+ let oldServerURL = env.get("MOZ_CRASHREPORTER_URL");
+ env.set("MOZ_CRASHREPORTER_URL", SERVER_URL);
+
+ registerCleanupFunction(function() {
+ gNotificationBox = null;
+ clearPendingCrashReports();
+ env.set("MOZ_CRASHREPORTER_URL", oldServerURL);
+ });
+});
+
+/**
+ * Tests that if there are no pending crash reports, then the
+ * notification will not show up.
+ */
+add_task(function* test_no_pending_no_notification() {
+ // Make absolutely sure there are no pending crash reports first...
+ clearPendingCrashReports();
+ let notification =
+ yield UnsubmittedCrashHandler.checkForUnsubmittedCrashReports();
+ Assert.equal(notification, null,
+ "There should not be a notification if there are no " +
+ "pending crash reports");
+});
+
+/**
+ * Tests that there is a notification if there is one pending
+ * crash report.
+ */
+add_task(function* test_one_pending() {
+ yield createPendingCrashReports(1);
+ let notification =
+ yield UnsubmittedCrashHandler.checkForUnsubmittedCrashReports();
+ Assert.ok(notification, "There should be a notification");
+
+ gNotificationBox.removeNotification(notification, true);
+ clearPendingCrashReports();
+});
+
+/**
+ * Tests that there is a notification if there is more than one
+ * pending crash report.
+ */
+add_task(function* test_several_pending() {
+ yield createPendingCrashReports(3);
+ let notification =
+ yield UnsubmittedCrashHandler.checkForUnsubmittedCrashReports();
+ Assert.ok(notification, "There should be a notification");
+
+ gNotificationBox.removeNotification(notification, true);
+ clearPendingCrashReports();
+});
+
+/**
+ * Tests that there is no notification if the only pending crash
+ * reports are over 28 days old. Also checks that if we put a newer
+ * crash with that older set, that we can still get a notification.
+ */
+add_task(function* test_several_pending() {
+ // Let's create some crash reports from 30 days ago.
+ let oldDate = new Date(Date.now() - (30 * DAY));
+ yield createPendingCrashReports(3, oldDate);
+ let notification =
+ yield UnsubmittedCrashHandler.checkForUnsubmittedCrashReports();
+ Assert.equal(notification, null,
+ "There should not be a notification if there are only " +
+ "old pending crash reports");
+ // Now let's create a new one and check again
+ yield createPendingCrashReports(1);
+ notification =
+ yield UnsubmittedCrashHandler.checkForUnsubmittedCrashReports();
+ Assert.ok(notification, "There should be a notification");
+
+ gNotificationBox.removeNotification(notification, true);
+ clearPendingCrashReports();
+});
+
+/**
+ * Tests that the notification can submit a report.
+ */
+add_task(function* test_can_submit() {
+ let reportIDs = yield createPendingCrashReports(1);
+ let notification =
+ yield UnsubmittedCrashHandler.checkForUnsubmittedCrashReports();
+ Assert.ok(notification, "There should be a notification");
+
+ // Attempt to submit the notification by clicking on the submit
+ // button
+ let buttons = notification.querySelectorAll(".notification-button");
+ // ...which should be the first button.
+ let submit = buttons[0];
+
+ let promiseReports = waitForSubmittedReports(reportIDs);
+ info("Sending crash report");
+ submit.click();
+ info("Sent!");
+ // We'll not wait for the notification to finish its transition -
+ // we'll just remove it right away.
+ gNotificationBox.removeNotification(notification, true);
+
+ info("Waiting on reports to be received.");
+ yield promiseReports;
+ info("Received!");
+ clearPendingCrashReports();
+});
+
+/**
+ * Tests that the notification can submit multiple reports.
+ */
+add_task(function* test_can_submit_several() {
+ let reportIDs = yield createPendingCrashReports(3);
+ let notification =
+ yield UnsubmittedCrashHandler.checkForUnsubmittedCrashReports();
+ Assert.ok(notification, "There should be a notification");
+
+ // Attempt to submit the notification by clicking on the submit
+ // button
+ let buttons = notification.querySelectorAll(".notification-button");
+ // ...which should be the first button.
+ let submit = buttons[0];
+
+ let promiseReports = waitForSubmittedReports(reportIDs);
+ info("Sending crash reports");
+ submit.click();
+ info("Sent!");
+ // We'll not wait for the notification to finish its transition -
+ // we'll just remove it right away.
+ gNotificationBox.removeNotification(notification, true);
+
+ info("Waiting on reports to be received.");
+ yield promiseReports;
+ info("Received!");
+ clearPendingCrashReports();
+});
+
+/**
+ * Tests that choosing "Send Always" flips the autoSubmit pref
+ * and sends the pending crash reports.
+ */
+add_task(function* test_can_submit_always() {
+ let pref = "browser.crashReports.unsubmittedCheck.autoSubmit";
+ Assert.equal(Services.prefs.getBoolPref(pref), false,
+ "We should not be auto-submitting by default");
+
+ let reportIDs = yield createPendingCrashReports(1);
+ let notification =
+ yield UnsubmittedCrashHandler.checkForUnsubmittedCrashReports();
+ Assert.ok(notification, "There should be a notification");
+
+ // Attempt to submit the notification by clicking on the send all
+ // button
+ let buttons = notification.querySelectorAll(".notification-button");
+ // ...which should be the second button.
+ let sendAll = buttons[1];
+
+ let promiseReports = waitForSubmittedReports(reportIDs);
+ info("Sending crash reports");
+ sendAll.click();
+ info("Sent!");
+ // We'll not wait for the notification to finish its transition -
+ // we'll just remove it right away.
+ gNotificationBox.removeNotification(notification, true);
+
+ info("Waiting on reports to be received.");
+ yield promiseReports;
+ info("Received!");
+
+ // Make sure the pref was set
+ Assert.equal(Services.prefs.getBoolPref(pref), true,
+ "The autoSubmit pref should have been set");
+
+ // And revert back to default now.
+ Services.prefs.clearUserPref(pref);
+
+ clearPendingCrashReports();
+});
+
+/**
+ * Tests that if the user has chosen to automatically send
+ * crash reports that no notification is displayed to the
+ * user.
+ */
+add_task(function* test_can_auto_submit() {
+ yield SpecialPowers.pushPrefEnv({ set: [
+ ["browser.crashReports.unsubmittedCheck.autoSubmit", true],
+ ]});
+
+ let reportIDs = yield createPendingCrashReports(3);
+ let promiseReports = waitForSubmittedReports(reportIDs);
+ let notification =
+ yield UnsubmittedCrashHandler.checkForUnsubmittedCrashReports();
+ Assert.equal(notification, null, "There should be no notification");
+ info("Waiting on reports to be received.");
+ yield promiseReports;
+ info("Received!");
+
+ clearPendingCrashReports();
+ yield SpecialPowers.popPrefEnv();
+});
+
+/**
+ * Tests that if the user chooses to dismiss the notification,
+ * then the current pending requests won't cause the notification
+ * to appear again in the future.
+ */
+add_task(function* test_can_ignore() {
+ let reportIDs = yield createPendingCrashReports(3);
+ let notification =
+ yield UnsubmittedCrashHandler.checkForUnsubmittedCrashReports();
+ Assert.ok(notification, "There should be a notification");
+
+ // Dismiss the notification by clicking on the "X" button.
+ let anonyNodes = document.getAnonymousNodes(notification)[0];
+ let closeButton = anonyNodes.querySelector(".close-icon");
+ closeButton.click();
+ yield waitForIgnoredReports(reportIDs);
+
+ notification =
+ yield UnsubmittedCrashHandler.checkForUnsubmittedCrashReports();
+ Assert.equal(notification, null, "There should be no notification");
+
+ clearPendingCrashReports();
+});
diff --git a/browser/themes/linux/browser.css b/browser/themes/linux/browser.css
index 5d2a63de8fe8..32e263e7a9ba 100644
--- a/browser/themes/linux/browser.css
+++ b/browser/themes/linux/browser.css
@@ -578,10 +578,6 @@ menuitem:not([type]):not(.menuitem-tooltip):not(.menuitem-iconic-tooltip) {
list-style-image: url("moz-icon://stock/gtk-about?size=menu");
}
-#javascriptConsole {
- list-style-image: url("chrome://global/skin/console/console.png");
-}
-
/* Primary toolbar buttons */
:-moz-any(toolbar, .widget-overflow-list) .toolbarbutton-1 > .toolbarbutton-icon,
diff --git a/browser/themes/shared/plugin-doorhanger.inc.css b/browser/themes/shared/plugin-doorhanger.inc.css
index 7a05200e2050..09b1683c5bca 100644
--- a/browser/themes/shared/plugin-doorhanger.inc.css
+++ b/browser/themes/shared/plugin-doorhanger.inc.css
@@ -18,7 +18,7 @@
}
.center-item-warning-icon {
- background-image: url("chrome://mozapps/skin/extensions/alerticon-info-negative.png");
+ background-image: url("chrome://mozapps/skin/extensions/alerticon-info-negative.svg");
background-repeat: no-repeat;
width: 16px;
height: 15px;
diff --git a/media/libstagefright/binding/mp4parse-cargo.patch b/media/libstagefright/binding/mp4parse-cargo.patch
index 058a8d4b2d03..b3c5138c4a6e 100644
--- a/media/libstagefright/binding/mp4parse-cargo.patch
+++ b/media/libstagefright/binding/mp4parse-cargo.patch
@@ -9,7 +9,7 @@ index 5092cd7..ecbc8c0 100644
-build = "build.rs"
-
[dependencies]
- "mp4parse" = {version = "0.5.0", path = "../mp4parse"}
+ "mp4parse" = {version = "0.5.1", path = "../mp4parse"}
-[build-dependencies]
-rusty-cheddar = "0.3.2"
diff --git a/media/libstagefright/binding/mp4parse/Cargo.toml b/media/libstagefright/binding/mp4parse/Cargo.toml
index 67fa8a4465a9..0c3c5f019272 100644
--- a/media/libstagefright/binding/mp4parse/Cargo.toml
+++ b/media/libstagefright/binding/mp4parse/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "mp4parse"
-version = "0.5.0"
+version = "0.5.1"
authors = [
"Ralph Giles ",
"Matthew Gregan ",
diff --git a/media/libstagefright/binding/mp4parse/src/lib.rs b/media/libstagefright/binding/mp4parse/src/lib.rs
index 4e8db561d074..314b5010576d 100644
--- a/media/libstagefright/binding/mp4parse/src/lib.rs
+++ b/media/libstagefright/binding/mp4parse/src/lib.rs
@@ -291,19 +291,21 @@ impl Default for TrackType {
fn default() -> Self { TrackType::Unknown }
}
-/// The media's global (mvhd) timescale.
+/// The media's global (mvhd) timescale in units per second.
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct MediaTimeScale(pub u64);
-/// A time scaled by the media's global (mvhd) timescale.
+/// A time to be scaled by the media's global (mvhd) timescale.
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct MediaScaledTime(pub u64);
/// The track's local (mdhd) timescale.
+/// Members are timescale units per second and the track id.
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct TrackTimeScale(pub u64, pub usize);
-/// A time scaled by the track's local (mdhd) timescale.
+/// A time to be scaled by the track's local (mdhd) timescale.
+/// Members are time in scale units and the track id.
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct TrackScaledTime(pub u64, pub usize);
diff --git a/media/libstagefright/binding/mp4parse_capi/Cargo.toml b/media/libstagefright/binding/mp4parse_capi/Cargo.toml
index ecbc8c065a3f..be19307a0162 100644
--- a/media/libstagefright/binding/mp4parse_capi/Cargo.toml
+++ b/media/libstagefright/binding/mp4parse_capi/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "mp4parse_capi"
-version = "0.5.0"
+version = "0.5.1"
authors = [
"Ralph Giles ",
"Matthew Gregan ",
@@ -18,7 +18,7 @@ exclude = [
]
[dependencies]
-"mp4parse" = {version = "0.5.0", path = "../mp4parse"}
+"mp4parse" = {version = "0.5.1", path = "../mp4parse"}
[features]
fuzz = ["mp4parse/fuzz"]
diff --git a/media/libstagefright/binding/mp4parse_capi/src/lib.rs b/media/libstagefright/binding/mp4parse_capi/src/lib.rs
index 93205675a095..dcdaec787434 100644
--- a/media/libstagefright/binding/mp4parse_capi/src/lib.rs
+++ b/media/libstagefright/binding/mp4parse_capi/src/lib.rs
@@ -278,15 +278,36 @@ pub unsafe extern fn mp4parse_get_track_count(parser: *const mp4parse_parser, co
MP4PARSE_OK
}
-fn media_time_to_ms(time: MediaScaledTime, scale: MediaTimeScale) -> u64 {
- assert!(scale.0 != 0);
- time.0 * 1000000 / scale.0
+/// Calculate numerator * scale / denominator, if possible.
+///
+/// Applying the associativity of integer arithmetic, we divide first
+/// and add the remainder after multiplying each term separately
+/// to preserve precision while leaving more headroom. That is,
+/// (n * s) / d is split into floor(n / d) * s + (n % d) * s / d.
+///
+/// Return None on overflow or if the denominator is zero.
+fn rational_scale(numerator: u64, denominator: u64, scale: u64) -> Option {
+ if denominator == 0 {
+ return None;
+ }
+ let integer = numerator / denominator;
+ let remainder = numerator % denominator;
+ match integer.checked_mul(scale) {
+ Some(integer) => remainder.checked_mul(scale)
+ .and_then(|remainder| (remainder/denominator).checked_add(integer)),
+ None => None,
+ }
}
-fn track_time_to_ms(time: TrackScaledTime, scale: TrackTimeScale) -> u64 {
+fn media_time_to_us(time: MediaScaledTime, scale: MediaTimeScale) -> Option {
+ let microseconds_per_second = 1000000;
+ rational_scale(time.0, scale.0, microseconds_per_second)
+}
+
+fn track_time_to_us(time: TrackScaledTime, scale: TrackTimeScale) -> Option {
assert!(time.1 == scale.1);
- assert!(scale.0 != 0);
- time.0 * 1000000 / scale.0
+ let microseconds_per_second = 1000000;
+ rational_scale(time.0, scale.0, microseconds_per_second)
}
/// Fill the supplied `mp4parse_track_info` with metadata for `track`.
@@ -333,13 +354,24 @@ pub unsafe extern fn mp4parse_get_track_info(parser: *mut mp4parse_parser, track
Some(track_duration)) = (track.timescale,
context.timescale,
track.duration) {
- info.media_time = track.media_time.map_or(0, |media_time| {
- track_time_to_ms(media_time, track_timescale) as i64
- }) - track.empty_duration.map_or(0, |empty_duration| {
- media_time_to_ms(empty_duration, context_timescale) as i64
- });
+ let media_time =
+ match track.media_time.map_or(Some(0), |media_time| {
+ track_time_to_us(media_time, track_timescale) }) {
+ Some(time) => time as i64,
+ None => return MP4PARSE_ERROR_INVALID,
+ };
+ let empty_duration =
+ match track.empty_duration.map_or(Some(0), |empty_duration| {
+ media_time_to_us(empty_duration, context_timescale) }) {
+ Some(time) => time as i64,
+ None => return MP4PARSE_ERROR_INVALID,
+ };
+ info.media_time = media_time - empty_duration;
- info.duration = track_time_to_ms(track_duration, track_timescale);
+ match track_time_to_us(track_duration, track_timescale) {
+ Some(duration) => info.duration = duration,
+ None => return MP4PARSE_ERROR_INVALID,
+ }
} else {
return MP4PARSE_ERROR_INVALID
}
@@ -747,3 +779,29 @@ fn arg_validation_with_data() {
mp4parse_free(parser);
}
}
+
+#[test]
+fn rational_scale_overflow() {
+ assert_eq!(rational_scale(17, 3, 1000), Some(5666));
+ let large = 0x4000_0000_0000_0000;
+ assert_eq!(rational_scale(large, 2, 2), Some(large));
+ assert_eq!(rational_scale(large, 4, 4), Some(large));
+ assert_eq!(rational_scale(large, 2, 8), None);
+ assert_eq!(rational_scale(large, 8, 4), Some(large/2));
+ assert_eq!(rational_scale(large + 1, 4, 4), Some(large+1));
+ assert_eq!(rational_scale(large, 40, 1000), None);
+}
+
+#[test]
+fn media_time_overflow() {
+ let scale = MediaTimeScale(90000);
+ let duration = MediaScaledTime(9007199254710000);
+ assert_eq!(media_time_to_us(duration, scale), Some(100079991719000000));
+}
+
+#[test]
+fn track_time_overflow() {
+ let scale = TrackTimeScale(44100, 0);
+ let duration = TrackScaledTime(4413527634807900, 0);
+ assert_eq!(track_time_to_us(duration, scale), Some(100079991719000000));
+}
diff --git a/media/libstagefright/binding/update-rust.sh b/media/libstagefright/binding/update-rust.sh
index a3585c175b39..16d9978ea13d 100755
--- a/media/libstagefright/binding/update-rust.sh
+++ b/media/libstagefright/binding/update-rust.sh
@@ -2,7 +2,7 @@
# Script to update mp4parse-rust sources to latest upstream
# Default version.
-VER=v0.5.0
+VER=v0.5.1
# Accept version or commit from the command line.
if test -n "$1"; then
diff --git a/netwerk/base/nsStandardURL.cpp b/netwerk/base/nsStandardURL.cpp
index 33d3f310d125..12f2dc9ee1ee 100644
--- a/netwerk/base/nsStandardURL.cpp
+++ b/netwerk/base/nsStandardURL.cpp
@@ -1290,6 +1290,8 @@ nsStandardURL::GetHost(nsACString &result)
NS_IMETHODIMP
nsStandardURL::GetPort(int32_t *result)
{
+ // should never be more than 16 bit
+ MOZ_ASSERT(mPort <= std::numeric_limits::max());
*result = mPort;
return NS_OK;
}
@@ -1967,8 +1969,9 @@ nsStandardURL::SetPort(int32_t port)
if ((port == mPort) || (mPort == -1 && port == mDefaultPort))
return NS_OK;
- // ports must be >= 0
- if (port < -1) // -1 == use default
+ // ports must be >= 0 and 16 bit
+ // -1 == use default
+ if (port < -1 || port > std::numeric_limits::max())
return NS_ERROR_MALFORMED_URI;
if (mURLType == URLTYPE_NO_AUTHORITY) {
@@ -3123,7 +3126,8 @@ nsStandardURL::Init(uint32_t urlType,
{
ENSURE_MUTABLE();
- if (spec.Length() > (uint32_t) net_GetURLMaxLength()) {
+ if (spec.Length() > (uint32_t) net_GetURLMaxLength() ||
+ defaultPort > std::numeric_limits::max()) {
return NS_ERROR_MALFORMED_URI;
}
@@ -3174,6 +3178,11 @@ nsStandardURL::SetDefaultPort(int32_t aNewDefaultPort)
InvalidateCache();
+ // should never be more than 16 bit
+ if (aNewDefaultPort >= std::numeric_limits::max()) {
+ return NS_ERROR_MALFORMED_URI;
+ }
+
// If we're already using the new default-port as a custom port, then clear
// it off of our mSpec & set mPort to -1, to indicate that we'll be using
// the default from now on (which happens to match what we already had).
diff --git a/netwerk/base/nsURLParsers.cpp b/netwerk/base/nsURLParsers.cpp
index 015c0c02555f..b75ee0c4d4db 100644
--- a/netwerk/base/nsURLParsers.cpp
+++ b/netwerk/base/nsURLParsers.cpp
@@ -606,7 +606,7 @@ nsAuthURLParser::ParseServerInfo(const char *serverinfo, int32_t serverinfoLen,
nsresult err;
*port = buf.ToInteger(&err);
- if (NS_FAILED(err) || *port < 0)
+ if (NS_FAILED(err) || *port < 0 || *port > std::numeric_limits::max())
return NS_ERROR_MALFORMED_URI;
}
}
diff --git a/netwerk/test/unit/test_bug652761.js b/netwerk/test/unit/test_bug652761.js
index fc5708a8b341..e2b781da8721 100644
--- a/netwerk/test/unit/test_bug652761.js
+++ b/netwerk/test/unit/test_bug652761.js
@@ -2,19 +2,16 @@
Cu.import("resource://gre/modules/NetUtil.jsm");
-function completeTest(request, data, ctx)
+function run_test()
{
+ // Bug 1301621 makes invalid ports throw
+ Assert.throws(() => {
+ var chan = NetUtil.newChannel({
+ uri: "http://localhost:80000/",
+ loadUsingSystemPrincipal: true
+ });
+ }, "invalid port");
+
do_test_finished();
}
-function run_test()
-{
- var chan = NetUtil.newChannel({
- uri: "http://localhost:80000/",
- loadUsingSystemPrincipal: true
- });
- var httpChan = chan.QueryInterface(Components.interfaces.nsIHttpChannel);
- httpChan.asyncOpen2(new ChannelListener(completeTest,httpChan, CL_EXPECT_FAILURE));
- do_test_pending();
-}
-
diff --git a/netwerk/test/unit/test_invalidport.js b/netwerk/test/unit/test_invalidport.js
deleted file mode 100644
index 70d401c84c54..000000000000
--- a/netwerk/test/unit/test_invalidport.js
+++ /dev/null
@@ -1,38 +0,0 @@
-// This is essentially a crashtest for accessing an out of range port
-// Perform the async open several times in order to induce exponential
-// scheduling behavior bugs.
-
-Cu.import("resource://gre/modules/NetUtil.jsm");
-
-var CC = Components.Constructor;
-
-var counter = 0;
-const iterations = 10;
-
-var listener = {
- onStartRequest: function test_onStartR(request, ctx) {
- },
-
- onDataAvailable: function test_ODA() {
- do_throw("Should not get any data!");
- },
-
- onStopRequest: function test_onStopR(request, ctx, status) {
- if (counter++ == iterations)
- do_test_finished();
- else
- execute_test();
- },
-};
-
-function run_test() {
- execute_test();
- do_test_pending();
-}
-
-function execute_test() {
- var chan = NetUtil.newChannel({uri: "http://localhost:75000", loadUsingSystemPrincipal: true});
- chan.QueryInterface(Ci.nsIHttpChannel);
- chan.asyncOpen2(listener);
-}
-
diff --git a/netwerk/test/unit/test_large_port.js b/netwerk/test/unit/test_large_port.js
new file mode 100644
index 000000000000..d2480582f2d0
--- /dev/null
+++ b/netwerk/test/unit/test_large_port.js
@@ -0,0 +1,36 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+// Ensure that non-16-bit URIs are rejected
+
+"use strict";
+
+var Cc = Components.classes;
+var Ci = Components.interfaces;
+
+const StandardURL = Components.Constructor("@mozilla.org/network/standard-url;1",
+ "nsIStandardURL",
+ "init");
+function run_test()
+{
+ // Bug 1301621 makes invalid ports throw
+ Assert.throws(() => {
+ new StandardURL(Ci.nsIStandardURL.URLTYPE_AUTHORITY, 65536,
+ "http://localhost", "UTF-8", null)
+ }, "invalid port during creation");
+ let url = new StandardURL(Ci.nsIStandardURL.URLTYPE_AUTHORITY, 65535,
+ "http://localhost", "UTF-8", null)
+ .QueryInterface(Ci.nsIStandardURL)
+
+ Assert.throws(() => {
+ url.setDefaultPort(65536);
+ }, "invalid port in setDefaultPort");
+ Assert.throws(() => {
+ url.port = 65536;
+ }, "invalid port in port setter");
+
+ do_check_eq(url.QueryInterface(Ci.nsIURI).port, -1);
+ do_test_finished();
+}
+
diff --git a/netwerk/test/unit/xpcshell.ini b/netwerk/test/unit/xpcshell.ini
index 5d7b81003bbd..a53cc0cc5396 100644
--- a/netwerk/test/unit/xpcshell.ini
+++ b/netwerk/test/unit/xpcshell.ini
@@ -232,8 +232,8 @@ skip-if = os == "android"
[test_immutable.js]
skip-if = !hasNode
run-sequentially = node server exceptions dont replay well
-[test_invalidport.js]
[test_localstreams.js]
+[test_large_port.js]
[test_mismatch_last-modified.js]
[test_MIME_params.js]
[test_mozTXTToHTMLConv.js]
diff --git a/python/mozlint/test/conftest.py b/python/mozlint/test/conftest.py
new file mode 100644
index 000000000000..e171798b01a2
--- /dev/null
+++ b/python/mozlint/test/conftest.py
@@ -0,0 +1,42 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+import pytest
+
+from mozlint import LintRoller
+
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+
+@pytest.fixture
+def lint(request):
+ lintargs = getattr(request.module, 'lintargs', {})
+ return LintRoller(root=here, **lintargs)
+
+
+@pytest.fixture(scope='session')
+def filedir():
+ return os.path.join(here, 'files')
+
+
+@pytest.fixture(scope='module')
+def files(filedir, request):
+ suffix_filter = getattr(request.module, 'files', [''])
+ return [os.path.join(filedir, p) for p in os.listdir(filedir)
+ if any(p.endswith(suffix) for suffix in suffix_filter)]
+
+
+@pytest.fixture(scope='session')
+def lintdir():
+ return os.path.join(here, 'linters')
+
+
+@pytest.fixture(scope='module')
+def linters(lintdir, request):
+ suffix_filter = getattr(request.module, 'linters', ['.lint'])
+ return [os.path.join(lintdir, p) for p in os.listdir(lintdir)
+ if any(p.endswith(suffix) for suffix in suffix_filter)]
diff --git a/python/mozlint/test/test_formatters.py b/python/mozlint/test/test_formatters.py
index 22f658ee6f2d..b9e6512b24e3 100644
--- a/python/mozlint/test/test_formatters.py
+++ b/python/mozlint/test/test_formatters.py
@@ -5,56 +5,50 @@
from __future__ import unicode_literals
import json
-import os
+import sys
from collections import defaultdict
-from unittest import TestCase
-from mozunit import main
+import pytest
from mozlint import ResultContainer
from mozlint import formatters
-here = os.path.abspath(os.path.dirname(__file__))
+@pytest.fixture
+def results(scope='module'):
+ containers = (
+ ResultContainer(
+ linter='foo',
+ path='a/b/c.txt',
+ message="oh no foo",
+ lineno=1,
+ ),
+ ResultContainer(
+ linter='bar',
+ path='d/e/f.txt',
+ message="oh no bar",
+ hint="try baz instead",
+ level='warning',
+ lineno=4,
+ column=2,
+ rule="bar-not-allowed",
+ ),
+ ResultContainer(
+ linter='baz',
+ path='a/b/c.txt',
+ message="oh no baz",
+ lineno=4,
+ source="if baz:",
+ ),
+ )
+ results = defaultdict(list)
+ for c in containers:
+ results[c.path].append(c)
+ return results
-class TestFormatters(TestCase):
-
- def __init__(self, *args, **kwargs):
- TestCase.__init__(self, *args, **kwargs)
-
- containers = (
- ResultContainer(
- linter='foo',
- path='a/b/c.txt',
- message="oh no foo",
- lineno=1,
- ),
- ResultContainer(
- linter='bar',
- path='d/e/f.txt',
- message="oh no bar",
- hint="try baz instead",
- level='warning',
- lineno=4,
- column=2,
- rule="bar-not-allowed",
- ),
- ResultContainer(
- linter='baz',
- path='a/b/c.txt',
- message="oh no baz",
- lineno=4,
- source="if baz:",
- ),
- )
-
- self.results = defaultdict(list)
- for c in containers:
- self.results[c.path].append(c)
-
- def test_stylish_formatter(self):
- expected = """
+def test_stylish_formatter(results):
+ expected = """
a/b/c.txt
1 error oh no foo (foo)
4 error oh no baz (baz)
@@ -65,30 +59,32 @@ d/e/f.txt
\u2716 3 problems (2 errors, 1 warning)
""".strip()
- fmt = formatters.get('stylish', disable_colors=True)
- self.assertEqual(expected, fmt(self.results))
+ fmt = formatters.get('stylish', disable_colors=True)
+ assert expected == fmt(results)
- def test_treeherder_formatter(self):
- expected = """
+
+def test_treeherder_formatter(results):
+ expected = """
TEST-UNEXPECTED-ERROR | a/b/c.txt:1 | oh no foo (foo)
TEST-UNEXPECTED-ERROR | a/b/c.txt:4 | oh no baz (baz)
TEST-UNEXPECTED-WARNING | d/e/f.txt:4:2 | oh no bar (bar-not-allowed)
""".strip()
- fmt = formatters.get('treeherder')
- self.assertEqual(expected, fmt(self.results))
+ fmt = formatters.get('treeherder')
+ assert expected == fmt(results)
- def test_json_formatter(self):
- fmt = formatters.get('json')
- formatted = json.loads(fmt(self.results))
- self.assertEqual(set(formatted.keys()), set(self.results.keys()))
+def test_json_formatter(results):
+ fmt = formatters.get('json')
+ formatted = json.loads(fmt(results))
- slots = ResultContainer.__slots__
- for errors in formatted.values():
- for err in errors:
- self.assertTrue(all(s in err for s in slots))
+ assert set(formatted.keys()) == set(results.keys())
+
+ slots = ResultContainer.__slots__
+ for errors in formatted.values():
+ for err in errors:
+ assert all(s in err for s in slots)
if __name__ == '__main__':
- main()
+ sys.exit(pytest.main(['--verbose', __file__]))
diff --git a/python/mozlint/test/test_parser.py b/python/mozlint/test/test_parser.py
index 33a1ac94f953..e18e7a5a92fa 100644
--- a/python/mozlint/test/test_parser.py
+++ b/python/mozlint/test/test_parser.py
@@ -3,9 +3,9 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
-from unittest import TestCase
+import sys
-from mozunit import main
+import pytest
from mozlint.parser import Parser
from mozlint.errors import (
@@ -14,55 +14,42 @@ from mozlint.errors import (
)
-here = os.path.abspath(os.path.dirname(__file__))
+@pytest.fixture(scope='module')
+def parse(lintdir):
+ parser = Parser()
+
+ def _parse(name):
+ path = os.path.join(lintdir, name)
+ return parser(path)
+ return _parse
-class TestParser(TestCase):
+def test_parse_valid_linter(parse):
+ lintobj = parse('string.lint')
+ assert isinstance(lintobj, dict)
+ assert 'name' in lintobj
+ assert 'description' in lintobj
+ assert 'type' in lintobj
+ assert 'payload' in lintobj
- def __init__(self, *args, **kwargs):
- TestCase.__init__(self, *args, **kwargs)
- self._lintdir = os.path.join(here, 'linters')
- self._parse = Parser()
+@pytest.mark.parametrize('linter', [
+ 'invalid_type.lint',
+ 'invalid_extension.lnt',
+ 'invalid_include.lint',
+ 'invalid_exclude.lint',
+ 'missing_attrs.lint',
+ 'missing_definition.lint',
+])
+def test_parse_invalid_linter(parse, linter):
+ with pytest.raises(LinterParseError):
+ parse(linter)
- def parse(self, name):
- return self._parse(os.path.join(self._lintdir, name))
- def test_parse_valid_linter(self):
- linter = self.parse('string.lint')
- self.assertIsInstance(linter, dict)
- self.assertIn('name', linter)
- self.assertIn('description', linter)
- self.assertIn('type', linter)
- self.assertIn('payload', linter)
-
- def test_parse_invalid_type(self):
- with self.assertRaises(LinterParseError):
- self.parse('invalid_type.lint')
-
- def test_parse_invalid_extension(self):
- with self.assertRaises(LinterParseError):
- self.parse('invalid_extension.lnt')
-
- def test_parse_invalid_include_exclude(self):
- with self.assertRaises(LinterParseError):
- self.parse('invalid_include.lint')
-
- with self.assertRaises(LinterParseError):
- self.parse('invalid_exclude.lint')
-
- def test_parse_missing_attributes(self):
- with self.assertRaises(LinterParseError):
- self.parse('missing_attrs.lint')
-
- def test_parse_missing_definition(self):
- with self.assertRaises(LinterParseError):
- self.parse('missing_definition.lint')
-
- def test_parse_non_existent_linter(self):
- with self.assertRaises(LinterNotFound):
- self.parse('missing_file.lint')
+def test_parse_non_existent_linter(parse):
+ with pytest.raises(LinterNotFound):
+ parse('missing_file.lint')
if __name__ == '__main__':
- main()
+ sys.exit(pytest.main(['--verbose', __file__]))
diff --git a/python/mozlint/test/test_roller.py b/python/mozlint/test/test_roller.py
index abfd619bd119..b347ee6eb155 100644
--- a/python/mozlint/test/test_roller.py
+++ b/python/mozlint/test/test_roller.py
@@ -4,77 +4,67 @@
import os
import sys
-from unittest import TestCase
-from mozunit import main
+import pytest
-from mozlint import LintRoller, ResultContainer
+from mozlint import ResultContainer
from mozlint.errors import LintersNotConfigured, LintException
here = os.path.abspath(os.path.dirname(__file__))
-class TestLintRoller(TestCase):
+linters = ('string.lint', 'regex.lint', 'external.lint')
- def __init__(self, *args, **kwargs):
- TestCase.__init__(self, *args, **kwargs)
- self.filedir = os.path.join(here, 'files')
- self.files = [os.path.join(self.filedir, f) for f in os.listdir(self.filedir)]
- self.lintdir = os.path.join(here, 'linters')
+def test_roll_no_linters_configured(lint, files):
+ with pytest.raises(LintersNotConfigured):
+ lint.roll(files)
- names = ('string.lint', 'regex.lint', 'external.lint')
- self.linters = [os.path.join(self.lintdir, n) for n in names]
- def setUp(self):
- TestCase.setUp(self)
- self.lint = LintRoller(root=here)
+def test_roll_successful(lint, linters, files):
+ lint.read(linters)
- def test_roll_no_linters_configured(self):
- with self.assertRaises(LintersNotConfigured):
- self.lint.roll(self.files)
+ result = lint.roll(files)
+ assert len(result) == 1
- def test_roll_successful(self):
- self.lint.read(self.linters)
+ path = result.keys()[0]
+ assert os.path.basename(path) == 'foobar.js'
- result = self.lint.roll(self.files)
- self.assertEqual(len(result), 1)
+ errors = result[path]
+ assert isinstance(errors, list)
+ assert len(errors) == 6
- path = result.keys()[0]
- self.assertEqual(os.path.basename(path), 'foobar.js')
+ container = errors[0]
+ assert isinstance(container, ResultContainer)
+ assert container.rule == 'no-foobar'
- errors = result[path]
- self.assertIsInstance(errors, list)
- self.assertEqual(len(errors), 6)
- container = errors[0]
- self.assertIsInstance(container, ResultContainer)
- self.assertEqual(container.rule, 'no-foobar')
+def test_roll_catch_exception(lint, lintdir, files):
+ lint.read(os.path.join(lintdir, 'raises.lint'))
- def test_roll_catch_exception(self):
- self.lint.read(os.path.join(self.lintdir, 'raises.lint'))
+ # suppress printed traceback from test output
+ old_stderr = sys.stderr
+ sys.stderr = open(os.devnull, 'w')
+ with pytest.raises(LintException):
+ lint.roll(files)
+ sys.stderr = old_stderr
- # suppress printed traceback from test output
- old_stderr = sys.stderr
- sys.stderr = open(os.devnull, 'w')
- with self.assertRaises(LintException):
- self.lint.roll(self.files)
- sys.stderr = old_stderr
- def test_roll_with_excluded_path(self):
- self.lint.lintargs.update({'exclude': ['**/foobar.js']})
+def test_roll_with_excluded_path(lint, linters, files):
+ lint.lintargs.update({'exclude': ['**/foobar.js']})
- self.lint.read(self.linters)
- result = self.lint.roll(self.files)
+ lint.read(linters)
+ result = lint.roll(files)
- self.assertEqual(len(result), 0)
+ assert len(result) == 0
- def test_roll_with_invalid_extension(self):
- self.lint.read(os.path.join(self.lintdir, 'external.lint'))
- result = self.lint.roll(os.path.join(self.filedir, 'foobar.py'))
- self.assertEqual(len(result), 0)
+
+def test_roll_with_invalid_extension(lint, lintdir, filedir):
+ lint.read(os.path.join(lintdir, 'external.lint'))
+ result = lint.roll(os.path.join(filedir, 'foobar.py'))
+ assert len(result) == 0
if __name__ == '__main__':
- main()
+ sys.exit(pytest.main(['--verbose', __file__]))
diff --git a/python/mozlint/test/test_types.py b/python/mozlint/test/test_types.py
index 23285654799a..9df668c766e1 100644
--- a/python/mozlint/test/test_types.py
+++ b/python/mozlint/test/test_types.py
@@ -3,76 +3,48 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
-from unittest import TestCase
+import sys
-from mozunit import main
+import pytest
-from mozlint import LintRoller
from mozlint.result import ResultContainer
-here = os.path.abspath(os.path.dirname(__file__))
+@pytest.fixture
+def path(filedir):
+ def _path(name):
+ return os.path.join(filedir, name)
+ return _path
-class TestLinterTypes(TestCase):
+@pytest.fixture(params=['string.lint', 'regex.lint', 'external.lint'])
+def linter(lintdir, request):
+ return os.path.join(lintdir, request.param)
- def __init__(self, *args, **kwargs):
- TestCase.__init__(self, *args, **kwargs)
- self.lintdir = os.path.join(here, 'linters')
- self.filedir = os.path.join(here, 'files')
- self.files = [os.path.join(self.filedir, f) for f in os.listdir(self.filedir)]
+def test_linter_types(lint, linter, files, path):
+ lint.read(linter)
+ result = lint.roll(files)
+ assert isinstance(result, dict)
+ assert path('foobar.js') in result
+ assert path('no_foobar.js') not in result
- def setUp(self):
- TestCase.setUp(self)
- self.lint = LintRoller(root=here)
+ result = result[path('foobar.js')][0]
+ assert isinstance(result, ResultContainer)
- def path(self, name):
- return os.path.join(self.filedir, name)
+ name = os.path.basename(linter).split('.')[0]
+ assert result.linter.lower().startswith(name)
- def test_string_linter(self):
- self.lint.read(os.path.join(self.lintdir, 'string.lint'))
- result = self.lint.roll(self.files)
- self.assertIsInstance(result, dict)
- self.assertIn(self.path('foobar.js'), result.keys())
- self.assertNotIn(self.path('no_foobar.js'), result.keys())
+def test_no_filter(lint, lintdir, files):
+ lint.read(os.path.join(lintdir, 'explicit_path.lint'))
+ result = lint.roll(files)
+ assert len(result) == 0
- result = result[self.path('foobar.js')][0]
- self.assertIsInstance(result, ResultContainer)
- self.assertEqual(result.linter, 'StringLinter')
-
- def test_regex_linter(self):
- self.lint.read(os.path.join(self.lintdir, 'regex.lint'))
- result = self.lint.roll(self.files)
- self.assertIsInstance(result, dict)
- self.assertIn(self.path('foobar.js'), result.keys())
- self.assertNotIn(self.path('no_foobar.js'), result.keys())
-
- result = result[self.path('foobar.js')][0]
- self.assertIsInstance(result, ResultContainer)
- self.assertEqual(result.linter, 'RegexLinter')
-
- def test_external_linter(self):
- self.lint.read(os.path.join(self.lintdir, 'external.lint'))
- result = self.lint.roll(self.files)
- self.assertIsInstance(result, dict)
- self.assertIn(self.path('foobar.js'), result.keys())
- self.assertNotIn(self.path('no_foobar.js'), result.keys())
-
- result = result[self.path('foobar.js')][0]
- self.assertIsInstance(result, ResultContainer)
- self.assertEqual(result.linter, 'ExternalLinter')
-
- def test_no_filter(self):
- self.lint.read(os.path.join(self.lintdir, 'explicit_path.lint'))
- result = self.lint.roll(self.files)
- self.assertEqual(len(result), 0)
-
- self.lint.lintargs['use_filters'] = False
- result = self.lint.roll(self.files)
- self.assertEqual(len(result), 2)
+ lint.lintargs['use_filters'] = False
+ result = lint.roll(files)
+ assert len(result) == 2
if __name__ == '__main__':
- main()
+ sys.exit(pytest.main(['--verbose', __file__]))
diff --git a/taskcluster/ci/desktop-test/tests.yml b/taskcluster/ci/desktop-test/tests.yml
index 8364218a2adb..fb7693110a01 100644
--- a/taskcluster/ci/desktop-test/tests.yml
+++ b/taskcluster/ci/desktop-test/tests.yml
@@ -394,6 +394,7 @@ web-platform-tests:
max-run-time: 7200
instance-size: xlarge
docker-image: {"in-tree": "desktop1604-test"}
+ checkout: true
mozharness:
script: mozharness/scripts/web_platform_tests.py
no-read-buildbot-config: true
@@ -410,6 +411,7 @@ web-platform-tests-reftests:
max-run-time: 5400
instance-size: xlarge
docker-image: {"in-tree": "desktop1604-test"}
+ checkout: true
mozharness:
script: mozharness/scripts/web_platform_tests.py
no-read-buildbot-config: true
diff --git a/taskcluster/taskgraph/transforms/job/common.py b/taskcluster/taskgraph/transforms/job/common.py
index 018e5a5944a0..a7caf55ceecf 100644
--- a/taskcluster/taskgraph/transforms/job/common.py
+++ b/taskcluster/taskgraph/transforms/job/common.py
@@ -57,6 +57,38 @@ def docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc):
})
+def docker_worker_support_vcs_checkout(config, job, taskdesc):
+ """Update a job/task with parameters to enable a VCS checkout.
+
+ The configuration is intended for tasks using "run-task" and its
+ VCS checkout behavior.
+ """
+ level = config.params['level']
+
+ taskdesc['worker'].setdefault('caches', []).extend([
+ {
+ 'type': 'persistent',
+ 'name': 'level-%s-hg-shared' % level,
+ 'mount-point': '/home/worker/hg-shared',
+ }, {
+ 'type': 'persistent',
+ 'name': 'level-%s-checkouts' % level,
+ 'mount-point': '/home/worker/checkouts',
+ }
+ ])
+
+ taskdesc['worker'].setdefault('env', {}).update({
+ 'GECKO_BASE_REPOSITORY': config.params['base_repository'],
+ 'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
+ 'GECKO_HEAD_REV': config.params['head_rev'],
+ })
+
+ # Give task access to hgfingerprint secret so it can pin the certificate
+ # for hg.mozilla.org.
+ taskdesc['scopes'].append('secrets:get:project/taskcluster/gecko/hgfingerprint')
+ taskdesc['worker']['taskcluster-proxy'] = True
+
+
def docker_worker_setup_secrets(config, job, taskdesc):
"""Set up access to secrets via taskcluster-proxy. The value of
run['secrets'] should be a boolean or a list of secret names that
diff --git a/taskcluster/taskgraph/transforms/job/run_task.py b/taskcluster/taskgraph/transforms/job/run_task.py
index 98abf1769764..296fe43eee27 100644
--- a/taskcluster/taskgraph/transforms/job/run_task.py
+++ b/taskcluster/taskgraph/transforms/job/run_task.py
@@ -10,6 +10,9 @@ from __future__ import absolute_import, print_function, unicode_literals
import copy
from taskgraph.transforms.job import run_job_using
+from taskgraph.transforms.job.common import (
+ docker_worker_support_vcs_checkout,
+)
from voluptuous import Schema, Required, Any
run_task_schema = Schema({
@@ -32,20 +35,11 @@ run_task_schema = Schema({
@run_job_using("docker-worker", "run-task", schema=run_task_schema)
def docker_worker_run_task(config, job, taskdesc):
run = job['run']
- checkout = run['checkout']
worker = taskdesc['worker'] = copy.deepcopy(job['worker'])
- if checkout:
- worker['caches'] = [{
- 'type': 'persistent',
- 'name': 'level-{}-hg-shared'.format(config.params['level']),
- 'mount-point': "/home/worker/hg-shared",
- }, {
- 'type': 'persistent',
- 'name': 'level-{}-checkouts'.format(config.params['level']),
- 'mount-point': "/home/worker/checkouts",
- }]
+ if run['checkout']:
+ docker_worker_support_vcs_checkout(config, job, taskdesc)
if run.get('cache-dotcache') and int(config.params['level']) > 1:
worker['caches'].append({
@@ -54,23 +48,11 @@ def docker_worker_run_task(config, job, taskdesc):
'mount-point': '/home/worker/.cache',
})
- env = worker['env'] = {}
- env.update({
- 'GECKO_BASE_REPOSITORY': config.params['base_repository'],
- 'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
- 'GECKO_HEAD_REV': config.params['head_rev'],
- })
-
- # give the task access to the hgfingerprint secret
- if checkout:
- taskdesc['scopes'].append('secrets:get:project/taskcluster/gecko/hgfingerprint')
- worker['taskcluster-proxy'] = True
-
run_command = run['command']
if isinstance(run_command, basestring):
run_command = ['bash', '-cx', run_command]
command = ['/home/worker/bin/run-task']
- if checkout:
+ if run['checkout']:
command.append('--vcs-checkout=/home/worker/checkouts/gecko')
command.append('--')
command.extend(run_command)
diff --git a/taskcluster/taskgraph/transforms/tests/make_task_description.py b/taskcluster/taskgraph/transforms/tests/make_task_description.py
index 7cf6f68e59c0..c86f53b0dcbb 100644
--- a/taskcluster/taskgraph/transforms/tests/make_task_description.py
+++ b/taskcluster/taskgraph/transforms/tests/make_task_description.py
@@ -20,6 +20,9 @@ for example - use `all_tests.py` instead.
from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.transforms.base import TransformSequence
+from taskgraph.transforms.job.common import (
+ docker_worker_support_vcs_checkout,
+)
import logging
@@ -157,8 +160,6 @@ def docker_worker_setup(config, test, taskdesc):
}]
env = worker['env'] = {
- 'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
- 'GECKO_HEAD_REV': config.params['head_rev'],
'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
'MOZHARNESS_SCRIPT': mozharness['script'],
'MOZHARNESS_URL': {'task-reference': mozharness_url},
@@ -192,9 +193,16 @@ def docker_worker_setup(config, test, taskdesc):
'/home/worker/bin/run-task',
# The workspace cache/volume is default owned by root:root.
'--chown', '/home/worker/workspace',
+ ]
+
+ if test['checkout']:
+ docker_worker_support_vcs_checkout(config, test, taskdesc)
+ command.extend(['--vcs-checkout', '/home/worker/checkouts/gecko'])
+
+ command.extend([
'--',
'/home/worker/bin/test-linux.sh',
- ]
+ ])
if mozharness.get('no-read-buildbot-config'):
command.append("--no-read-buildbot-config")
diff --git a/taskcluster/taskgraph/transforms/tests/test_description.py b/taskcluster/taskgraph/transforms/tests/test_description.py
index f122a37144c8..6919568b11b3 100644
--- a/taskcluster/taskgraph/transforms/tests/test_description.py
+++ b/taskcluster/taskgraph/transforms/tests/test_description.py
@@ -119,6 +119,9 @@ test_description_schema = Schema({
{'by-test-platform': {basestring: int}},
),
+ # Whether to perform a gecko checkout.
+ Required('checkout', default=False): bool,
+
# What to run
Required('mozharness'): Any({
# the mozharness script used to run this task
diff --git a/testing/docker/desktop-test/Dockerfile b/testing/docker/desktop-test/Dockerfile
index 060145910fd7..caa697a39d1b 100644
--- a/testing/docker/desktop-test/Dockerfile
+++ b/testing/docker/desktop-test/Dockerfile
@@ -27,6 +27,8 @@ ADD topsrcdir/taskcluster/scripts/tester/test-ubuntu1204.sh /home/worker/bin/tes
# This will create a host mounted filesystem when the cache is stripped
# on Try. This cancels out some of the performance losses of aufs. See
# bug 1291940.
+VOLUME /home/worker/hg-shared
+VOLUME /home/worker/checkouts
VOLUME /home/worker/workspace
# Set variable normally configured at login, by the shells parent process, these
diff --git a/testing/docker/desktop1604-test/Dockerfile b/testing/docker/desktop1604-test/Dockerfile
index ccfea93f248d..7ebc88f73cb3 100644
--- a/testing/docker/desktop1604-test/Dockerfile
+++ b/testing/docker/desktop1604-test/Dockerfile
@@ -27,6 +27,8 @@ ADD topsrcdir/taskcluster/scripts/tester/test-ubuntu1604.sh /home/worker/bin/tes
# This will create a host mounted filesystem when the cache is stripped
# on Try. This cancels out some of the performance losses of aufs. See
# bug 1291940.
+VOLUME /home/worker/hg-shared
+VOLUME /home/worker/checkouts
VOLUME /home/worker/workspace
# Set variable normally configured at login, by the shells parent process, these
diff --git a/testing/marionette/harness/marionette/tests/unit/test_navigation.py b/testing/marionette/harness/marionette/tests/unit/test_navigation.py
index 319137824521..f6ca0f00706a 100644
--- a/testing/marionette/harness/marionette/tests/unit/test_navigation.py
+++ b/testing/marionette/harness/marionette/tests/unit/test_navigation.py
@@ -3,11 +3,12 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import time
+import unittest
import urllib
from marionette import MarionetteTestCase
from marionette_driver.errors import MarionetteException, TimeoutException
-from marionette_driver.by import By
+from marionette_driver import By, Wait
def inline(doc):
@@ -17,19 +18,18 @@ def inline(doc):
class TestNavigate(MarionetteTestCase):
def setUp(self):
MarionetteTestCase.setUp(self)
- self.marionette.execute_script("window.location.href = 'about:blank'")
- self.assertEqual("about:blank", self.location_href)
+ self.marionette.navigate("about:")
self.test_doc = self.marionette.absolute_url("test.html")
self.iframe_doc = self.marionette.absolute_url("test_iframe.html")
def test_set_location_through_execute_script(self):
self.marionette.execute_script("window.location.href = '%s'" % self.test_doc)
- self.assertEqual(self.test_doc, self.location_href)
+ Wait(self.marionette).until(lambda _: self.test_doc == self.location_href)
self.assertEqual("Marionette Test", self.marionette.title)
def test_navigate(self):
self.marionette.navigate(self.test_doc)
- self.assertNotEqual("about:blank", self.location_href)
+ self.assertNotEqual("about:", self.location_href)
self.assertEqual("Marionette Test", self.marionette.title)
def test_navigate_chrome_error(self):
@@ -123,6 +123,7 @@ class TestNavigate(MarionetteTestCase):
self.assertEqual("complete", state)
self.assertTrue(self.marionette.find_element(By.ID, "mozLink"))
+ @unittest.skip("Bug 1302707 - No timeout exception raised.")
def test_should_throw_a_timeoutexception_when_loading_page(self):
try:
self.marionette.timeouts("page load", 0)
diff --git a/testing/marionette/harness/marionette/tests/unit/test_window_management.py b/testing/marionette/harness/marionette/tests/unit/test_window_management.py
index 614450a22553..e67d55759a12 100644
--- a/testing/marionette/harness/marionette/tests/unit/test_window_management.py
+++ b/testing/marionette/harness/marionette/tests/unit/test_window_management.py
@@ -4,7 +4,7 @@
import time
from marionette import MarionetteTestCase
-from marionette_driver.by import By
+from marionette_driver import By, Wait
class TestSwitchWindow(MarionetteTestCase):
@@ -41,7 +41,9 @@ if (win != null)
self.assertEqual(self.marionette.current_window_handle, orig_win)
now_available = self.marionette.window_handles
#assert we can find the new window
- self.assertEqual(len(now_available), len(orig_available) + 1)
+ Wait(self.marionette).until(
+ lambda _: len(now_available) == len(orig_available) + 1,
+ message="The new window has not been opened.")
#assert that our window is there
self.assertTrue(orig_win in now_available)
new_win = None
diff --git a/testing/web-platform/meta/url/url-constructor.html.ini b/testing/web-platform/meta/url/url-constructor.html.ini
index dcfe308c5acd..7f99f601db16 100644
--- a/testing/web-platform/meta/url/url-constructor.html.ini
+++ b/testing/web-platform/meta/url/url-constructor.html.ini
@@ -237,9 +237,6 @@
[Parsing: against ]
expected: FAIL
- [Parsing: against ]
- expected: FAIL
-
[Parsing: against ]
expected: FAIL
diff --git a/testing/web-platform/meta/url/url-setters.html.ini b/testing/web-platform/meta/url/url-setters.html.ini
index 0f574ea33bdc..b0f131bd5bcc 100644
--- a/testing/web-platform/meta/url/url-setters.html.ini
+++ b/testing/web-platform/meta/url/url-setters.html.ini
@@ -75,9 +75,6 @@
[Setting .host = 'example.com:8080stuff2' Anything other than ASCII digit stops the port parser in a setter but is not an error]
expected: FAIL
- [Setting .host = 'example.com:65536' Port numbers are 16 bit integers, overflowing is an error. Hostname is still set, though.]
- expected: FAIL
-
[Setting .hostname = '' The empty host is OK for non-special schemes]
expected: FAIL
@@ -105,9 +102,6 @@
[Setting .port = '8080stuff2' Anything other than ASCII digit stops the port parser in a setter but is not an error]
expected: FAIL
- [Setting .port = '65536' Port numbers are 16 bit integers, overflowing is an error]
- expected: FAIL
-
[Setting .pathname = '/var/log/../run/bar.socket']
expected: FAIL
diff --git a/toolkit/components/search/tests/xpcshell/test_location_error.js b/toolkit/components/search/tests/xpcshell/test_location_error.js
index 5ff66c20b0a6..049189351a42 100644
--- a/toolkit/components/search/tests/xpcshell/test_location_error.js
+++ b/toolkit/components/search/tests/xpcshell/test_location_error.js
@@ -4,8 +4,8 @@
function run_test() {
installTestEngine();
- // using a port > 2^32 causes an error to be reported.
- let url = "http://localhost:111111111";
+ // We use an invalid port that parses but won't open
+ let url = "http://localhost:0";
Services.prefs.setCharPref("browser.search.geoip.url", url);
Services.search.init(() => {
diff --git a/toolkit/library/rust/Cargo.lock b/toolkit/library/rust/Cargo.lock
index e845facd99cf..8d2c1a219700 100644
--- a/toolkit/library/rust/Cargo.lock
+++ b/toolkit/library/rust/Cargo.lock
@@ -2,7 +2,7 @@
name = "gkrust"
version = "0.1.0"
dependencies = [
- "mp4parse_capi 0.5.0",
+ "mp4parse_capi 0.5.1",
]
[[package]]
@@ -12,16 +12,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "mp4parse"
-version = "0.5.0"
+version = "0.5.1"
dependencies = [
"byteorder 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "mp4parse_capi"
-version = "0.5.0"
+version = "0.5.1"
dependencies = [
- "mp4parse 0.5.0",
+ "mp4parse 0.5.1",
]
[metadata]
diff --git a/toolkit/mozapps/extensions/internal/E10SAddonsRollout.jsm b/toolkit/mozapps/extensions/internal/E10SAddonsRollout.jsm
index 5e9f4f6f0c2d..f8e3ba051db4 100644
--- a/toolkit/mozapps/extensions/internal/E10SAddonsRollout.jsm
+++ b/toolkit/mozapps/extensions/internal/E10SAddonsRollout.jsm
@@ -118,6 +118,9 @@ const RolloutPolicy = {
"49limiteda": { addons: set49PaneOnly, webextensions: true },
"49limitedb": { addons: set49PaneOnly, webextensions: false },
+ // Beta testing on 50
+ "50allmpc": { addons: [], webextensions: true, mpc: true },
+
"xpcshell-test": { addons: [ADDONS.test1, ADDONS.test2], webextensions: false },
};
@@ -143,6 +146,10 @@ Object.defineProperty(this, "isAddonPartOfE10SRollout", {
return true;
}
+ if (policy.mpc && aAddon.multiprocessCompatible) {
+ return true;
+ }
+
for (let rolloutAddon of policy.addons) {
if (aAddon.id == rolloutAddon.id &&
Services.vc.compare(aAddon.version, rolloutAddon.minVersion) >= 0) {
diff --git a/toolkit/xre/nsAppRunner.cpp b/toolkit/xre/nsAppRunner.cpp
index bb5c4cd51831..15ebf9efd28c 100644
--- a/toolkit/xre/nsAppRunner.cpp
+++ b/toolkit/xre/nsAppRunner.cpp
@@ -12,12 +12,13 @@
#include "mozilla/ChaosMode.h"
#include "mozilla/IOInterposer.h"
#include "mozilla/Likely.h"
+#include "mozilla/MemoryChecking.h"
#include "mozilla/Poison.h"
#include "mozilla/Preferences.h"
+#include "mozilla/ScopeExit.h"
#include "mozilla/Services.h"
#include "mozilla/ServoBindings.h"
#include "mozilla/Telemetry.h"
-#include "mozilla/MemoryChecking.h"
#include "nsAppRunner.h"
#include "mozilla/AppData.h"
@@ -378,6 +379,25 @@ strimatch(const char* lowerstr, const char* mixedstr)
return true;
}
+static bool gIsExpectedExit = false;
+
+void MozExpectedExit() {
+ gIsExpectedExit = true;
+}
+
+/**
+ * Runs atexit() to catch unexpected exit from 3rd party libraries like the
+ * Intel graphics driver calling exit in an error condition. When they
+ * call exit() to report an error we won't shutdown correctly and wont catch
+ * the issue with our crash reporter.
+ */
+static void UnexpectedExit() {
+ if (!gIsExpectedExit) {
+ gIsExpectedExit = true; // Don't risk re-entrency issues when crashing.
+ MOZ_CRASH("Exit called by third party code.");
+ }
+}
+
/**
* Output a string to the user. This method is really only meant to be used to
* output last-ditch error messages designed for developers NOT END USERS.
@@ -3018,6 +3038,11 @@ XREMain::XRE_mainInit(bool* aExitFlag)
return 1;
*aExitFlag = false;
+ atexit(UnexpectedExit);
+ auto expectedShutdown = mozilla::MakeScopeExit([&] {
+ MozExpectedExit();
+ });
+
StartupTimeline::Record(StartupTimeline::MAIN);
if (PR_GetEnv("MOZ_CHAOSMODE")) {
diff --git a/toolkit/xre/nsAppRunner.h b/toolkit/xre/nsAppRunner.h
index 4f201766e9fb..b8d955319042 100644
--- a/toolkit/xre/nsAppRunner.h
+++ b/toolkit/xre/nsAppRunner.h
@@ -97,6 +97,13 @@ WriteConsoleLog();
void
OverrideDefaultLocaleIfNeeded();
+/**
+ * Allow exit() calls to complete. This should be done from a proper Gecko
+ * shutdown path. Otherwise we aim to catch improper shutdowns.
+ */
+void
+MozExpectedExit();
+
#ifdef XP_WIN
void
UseParentConsole();
diff --git a/toolkit/xre/nsNativeAppSupportUnix.cpp b/toolkit/xre/nsNativeAppSupportUnix.cpp
index 75652e38f5db..a04a79953cb0 100644
--- a/toolkit/xre/nsNativeAppSupportUnix.cpp
+++ b/toolkit/xre/nsNativeAppSupportUnix.cpp
@@ -472,6 +472,7 @@ nsNativeAppSupportUnix::Start(bool *aRetVal)
MIN_GTK_MINOR_VERSION);
gtk_dialog_run(GTK_DIALOG(versionErrDialog));
gtk_widget_destroy(versionErrDialog);
+ MozExpectedExit();
exit(0);
}
#endif
diff --git a/widget/android/nsAppShell.cpp b/widget/android/nsAppShell.cpp
index 5feb6963317a..dbbed314c013 100644
--- a/widget/android/nsAppShell.cpp
+++ b/widget/android/nsAppShell.cpp
@@ -104,7 +104,9 @@ StaticRefPtr sWakeLockListener;
class GeckoThreadSupport final
: public java::GeckoThread::Natives
{
- static uint32_t sPauseCount;
+ // When this number goes above 0, the app is paused. When less than or
+ // equal to zero, the app is resumed.
+ static int32_t sPauseCount;
public:
static void SpeculativeConnect(jni::String::Param aUriStr)
@@ -142,8 +144,10 @@ public:
{
MOZ_ASSERT(NS_IsMainThread());
- if ((++sPauseCount) > 1) {
- // Already paused.
+ sPauseCount++;
+ // If sPauseCount is now 1, we just crossed the threshold from "resumed"
+ // "paused". so we should notify observers and so on.
+ if (sPauseCount != 1) {
return;
}
@@ -174,8 +178,10 @@ public:
{
MOZ_ASSERT(NS_IsMainThread());
- if (!sPauseCount || (--sPauseCount) > 0) {
- // Still paused.
+ sPauseCount--;
+ // If sPauseCount is now 0, we just crossed the threshold from "paused"
+ // to "resumed", so we should notify observers and so on.
+ if (sPauseCount != 0) {
return;
}
@@ -216,7 +222,7 @@ public:
}
};
-uint32_t GeckoThreadSupport::sPauseCount;
+int32_t GeckoThreadSupport::sPauseCount;
class GeckoAppShellSupport final