Merge inbound to mozilla-central. a=merge

This commit is contained in:
Bogdan Tara 2018-04-19 01:36:28 +03:00
commit 9bf1554768
88 changed files with 1410 additions and 2346 deletions

View File

@ -4,9 +4,17 @@ var EXPORTED_SYMBOLS = [
"SiteDataTestUtils",
];
ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm");
ChromeUtils.import("resource://gre/modules/Services.jsm");
ChromeUtils.import("resource://testing-common/ContentTask.jsm");
ChromeUtils.import("resource://testing-common/BrowserTestUtils.jsm");
const {Sanitizer} = ChromeUtils.import("resource:///modules/Sanitizer.jsm", {});
XPCOMUtils.defineLazyServiceGetter(this, "swm",
"@mozilla.org/serviceworkers/manager;1",
"nsIServiceWorkerManager");
/**
* This module assists with tasks around testing functionality that shows
* or clears site data.
@ -20,10 +28,12 @@ var SiteDataTestUtils = {
* Adds a new entry to a dummy indexedDB database for the specified origin.
*
* @param {String} origin - the origin of the site to add test data for
* @param {String} name [optional] - the entry key
* @param {String} value [optional] - the entry value
*
* @returns a Promise that resolves when the data was added successfully.
*/
addToIndexedDB(origin) {
addToIndexedDB(origin, key = "foo", value = "bar") {
return new Promise(resolve => {
let uri = Services.io.newURI(origin);
let principal = Services.scriptSecurityManager.createCodebasePrincipal(uri, {});
@ -37,7 +47,7 @@ var SiteDataTestUtils = {
let tx = db.transaction("TestStore", "readwrite");
let store = tx.objectStore("TestStore");
tx.oncomplete = resolve;
store.put({ id: performance.now().toString(), description: "IndexedDB Test"});
store.put({ id: key, description: value});
};
});
},
@ -46,15 +56,60 @@ var SiteDataTestUtils = {
* Adds a new cookie for the specified origin, with the specified contents.
* The cookie will be valid for one day.
*
* @param {String} name - the cookie name
* @param {String} value - the cookie value
* @param {String} origin - the origin of the site to add test data for
* @param {String} name [optional] - the cookie name
* @param {String} value [optional] - the cookie value
*/
addToCookies(origin, name, value) {
addToCookies(origin, name = "foo", value = "bar") {
let uri = Services.io.newURI(origin);
Services.cookies.add(uri.host, uri.pathQueryRef, name, value,
false, false, false, Date.now() + 24000 * 60 * 60);
},
/**
* Adds a new serviceworker with the specified path. Note that this
* method will open a new tab at the domain of the SW path to that effect.
*
* @param {String} path - the path to the service worker to add.
*
* @returns a Promise that resolves when the service worker was registered
*/
addServiceWorker(path) {
let uri = Services.io.newURI(path);
// Register a dummy ServiceWorker.
return BrowserTestUtils.withNewTab(uri.prePath, async function(browser) {
return ContentTask.spawn(browser, {path}, async ({path: p}) => {
let r = await content.navigator.serviceWorker.register(p);
return new Promise(resolve => {
let worker = r.installing;
worker.addEventListener("statechange", () => {
if (worker.state === "installed") {
resolve();
}
});
});
});
});
},
/**
* Checks whether the specified origin has registered ServiceWorkers.
*
* @param {String} origin - the origin of the site to check
*
* @returns {Boolean} whether or not the site has ServiceWorkers.
*/
hasServiceWorkers(origin) {
let serviceWorkers = swm.getAllRegistrations();
for (let i = 0; i < serviceWorkers.length; i++) {
let sw = serviceWorkers.queryElementAt(i, Ci.nsIServiceWorkerRegistrationInfo);
if (sw.principal.origin == origin) {
return true;
}
}
return false;
},
/**
* Gets the current quota usage for the specified origin.
*

View File

@ -1,10 +1,12 @@
[DEFAULT]
support-files=
head.js
dummy.js
dummy_page.html
[browser_purgehistory_clears_sh.js]
[browser_sanitize-formhistory.js]
[browser_sanitize-offlineData.js]
[browser_sanitize-passwordDisabledHosts.js]
[browser_sanitize-sitepermissions.js]
[browser_sanitize-timespans.js]

View File

@ -0,0 +1,165 @@
// Bug 380852 - Delete permission manager entries in Clear Recent History
ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm");
const {Sanitizer} = ChromeUtils.import("resource:///modules/Sanitizer.jsm", {});
const {SiteDataTestUtils} = ChromeUtils.import("resource://testing-common/SiteDataTestUtils.jsm", {});
const {PromiseTestUtils} = ChromeUtils.import("resource://testing-common/PromiseTestUtils.jsm", {});
XPCOMUtils.defineLazyServiceGetter(this, "sas",
"@mozilla.org/storage/activity-service;1",
"nsIStorageActivityService");
XPCOMUtils.defineLazyServiceGetter(this, "swm",
"@mozilla.org/serviceworkers/manager;1",
"nsIServiceWorkerManager");
const oneHour = 3600000000;
const fiveHours = oneHour * 5;
const itemsToClear = [ "cookies", "offlineApps" ];
function hasIndexedDB(origin) {
return new Promise(resolve => {
let hasData = true;
let uri = Services.io.newURI(origin);
let principal = Services.scriptSecurityManager.createCodebasePrincipal(uri, {});
let request = indexedDB.openForPrincipal(principal, "TestDatabase", 1);
request.onupgradeneeded = function(e) {
hasData = false;
};
request.onsuccess = function(e) {
resolve(hasData);
};
});
}
function waitForUnregister(host) {
return new Promise(resolve => {
let listener = {
onUnregister: registration => {
if (registration.principal.URI.host != host) {
return;
}
let swm = Cc["@mozilla.org/serviceworkers/manager;1"]
.getService(Ci.nsIServiceWorkerManager);
swm.removeListener(listener);
resolve(registration);
}
};
swm.addListener(listener);
});
}
async function createData(host) {
let origin = "https://" + host;
let dummySWURL = getRootDirectory(gTestPath).replace("chrome://mochitests/content", origin) + "dummy.js";
await SiteDataTestUtils.addToIndexedDB(origin);
await SiteDataTestUtils.addServiceWorker(dummySWURL);
}
function moveOriginInTime(principals, endDate, host) {
for (let i = 0; i < principals.length; ++i) {
let principal = principals.queryElementAt(i, Ci.nsIPrincipal);
if (principal.URI.host == host) {
sas.moveOriginInTime(principal, endDate - fiveHours);
return true;
}
}
return false;
}
add_task(async function testWithRange() {
// We have intermittent occurrences of NS_ERROR_ABORT being
// thrown at closing database instances when using Santizer.sanitize().
// This does not seem to impact cleanup, since our tests run fine anyway.
PromiseTestUtils.whitelistRejectionsGlobally(/NS_ERROR_ABORT/);
await SpecialPowers.pushPrefEnv({"set": [
["dom.serviceWorkers.enabled", true],
["dom.serviceWorkers.exemptFromPerDomainMax", true],
["dom.serviceWorkers.testing.enabled", true]
]});
// The service may have picked up activity from prior tests in this run.
// Clear it.
sas.testOnlyReset();
let endDate = Date.now() * 1000;
let principals = sas.getActiveOrigins(endDate - oneHour, endDate);
is(principals.length, 0, "starting from clear activity state");
info("sanitize: " + itemsToClear.join(", "));
await Sanitizer.sanitize(itemsToClear, {ignoreTimespan: false});
await createData("example.org");
await createData("example.com");
endDate = Date.now() * 1000;
principals = sas.getActiveOrigins(endDate - oneHour, endDate);
ok(!!principals, "We have an active origin.");
ok(principals.length >= 2, "We have an active origin.");
let found = 0;
for (let i = 0; i < principals.length; ++i) {
let principal = principals.queryElementAt(i, Ci.nsIPrincipal);
if (principal.URI.host == "example.org" ||
principal.URI.host == "example.com") {
found++;
}
}
is(found, 2, "Our origins are active.");
ok(await hasIndexedDB("https://example.org"),
"We have indexedDB data for example.org");
ok(SiteDataTestUtils.hasServiceWorkers("https://example.org"),
"We have serviceWorker data for example.org");
ok(await hasIndexedDB("https://example.com"),
"We have indexedDB data for example.com");
ok(SiteDataTestUtils.hasServiceWorkers("https://example.com"),
"We have serviceWorker data for example.com");
// Let's move example.com in the past.
ok(moveOriginInTime(principals, endDate, "example.com"), "Operation completed!");
let p = waitForUnregister("example.org");
// Clear it
info("sanitize: " + itemsToClear.join(", "));
await Sanitizer.sanitize(itemsToClear, {ignoreTimespan: false});
await p;
ok(!(await hasIndexedDB("https://example.org")),
"We don't have indexedDB data for example.org");
ok(!SiteDataTestUtils.hasServiceWorkers("https://example.org"),
"We don't have serviceWorker data for example.org");
ok(await hasIndexedDB("https://example.com"),
"We still have indexedDB data for example.com");
ok(SiteDataTestUtils.hasServiceWorkers("https://example.com"),
"We still have serviceWorker data for example.com");
// We have to move example.com in the past because how we check IDB triggers
// a storage activity.
ok(moveOriginInTime(principals, endDate, "example.com"), "Operation completed!");
// Let's call the clean up again.
info("sanitize again to ensure clearing doesn't expand the activity scope");
await Sanitizer.sanitize(itemsToClear, {ignoreTimespan: false});
ok(await hasIndexedDB("https://example.com"),
"We still have indexedDB data for example.com");
ok(SiteDataTestUtils.hasServiceWorkers("https://example.com"),
"We still have serviceWorker data for example.com");
ok(!(await hasIndexedDB("https://example.org")),
"We don't have indexedDB data for example.org");
ok(!SiteDataTestUtils.hasServiceWorkers("https://example.org"),
"We don't have serviceWorker data for example.org");
sas.testOnlyReset();
// Clean up.
await Sanitizer.sanitize(itemsToClear);
});

View File

@ -812,7 +812,7 @@ if (Services.prefs.getBoolPref("privacy.panicButton.enabled")) {
let group = doc.getElementById("PanelUI-panic-timeSpan");
BrowserUITelemetry.countPanicEvent(group.selectedItem.id);
let itemsToClear = [
"cookies", "history", "openWindows", "formdata", "sessions", "cache", "downloads"
"cookies", "history", "openWindows", "formdata", "sessions", "cache", "downloads", "offlineApps"
];
let newWindowPrivateState = PrivateBrowsingUtils.isWindowPrivate(doc.defaultView) ?
"private" : "non-private";

View File

@ -13,6 +13,8 @@ ChromeUtils.defineModuleGetter(this, "Services",
"resource://gre/modules/Services.jsm");
ChromeUtils.defineModuleGetter(this, "setTimeout",
"resource://gre/modules/Timer.jsm");
ChromeUtils.defineModuleGetter(this, "ServiceWorkerCleanUp",
"resource://gre/modules/ServiceWorkerCleanUp.jsm");
XPCOMUtils.defineLazyServiceGetter(this, "serviceWorkerManager",
"@mozilla.org/serviceworkers/manager;1",
@ -145,22 +147,6 @@ const clearPluginData = options => {
return Sanitizer.items.pluginData.clear(makeRange(options));
};
const clearServiceWorkers = async function() {
// Clearing service workers does not support timestamps.
let yieldCounter = 0;
// Iterate through the service workers and remove them.
let serviceWorkers = serviceWorkerManager.getAllRegistrations();
for (let i = 0; i < serviceWorkers.length; i++) {
let sw = serviceWorkers.queryElementAt(i, Ci.nsIServiceWorkerRegistrationInfo);
let host = sw.principal.URI.host;
serviceWorkerManager.removeAndPropagate(host);
if (++yieldCounter % YIELD_PERIOD == 0) {
await new Promise(resolve => setTimeout(resolve, 0)); // Don't block the main thread too long.
}
}
};
const doRemoval = (options, dataToRemove, extension) => {
if (options.originTypes &&
(options.originTypes.protectedWeb || options.originTypes.extension)) {
@ -201,7 +187,7 @@ const doRemoval = (options, dataToRemove, extension) => {
removalPromises.push(clearPluginData(options));
break;
case "serviceWorkers":
removalPromises.push(clearServiceWorkers());
removalPromises.push(ServiceWorkerCleanUp.removeAll());
break;
default:
invalidDataTypes.push(dataType);

View File

@ -17,12 +17,6 @@ DIRS += [
'webcompat-reporter'
]
# Only include the following system add-ons if building Aurora or Nightly
if not CONFIG['RELEASE_OR_BETA']:
DIRS += [
'presentation',
]
# Only include mortar system add-ons if we locally enable it
if CONFIG['MOZ_MORTAR']:
DIRS += [

View File

@ -1,86 +0,0 @@
/* -*- indent-tabs-mode: nil; js-indent-level: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
const Cm = Components.manager;
ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm");
const PRESENTATION_DEVICE_PROMPT_PATH =
"chrome://presentation/content/PresentationDevicePrompt.jsm";
function log(aMsg) {
// dump("@ Presentation: " + aMsg + "\n");
}
function install(aData, aReason) {
}
function uninstall(aData, aReason) {
}
function startup(aData, aReason) {
log("startup");
Presentation.init();
}
function shutdown(aData, aReason) {
log("shutdown");
Presentation.uninit();
}
// Register/unregister a constructor as a factory.
function Factory() {}
Factory.prototype = {
register(targetConstructor) {
let proto = targetConstructor.prototype;
this._classID = proto.classID;
let factory = XPCOMUtils._getFactory(targetConstructor);
this._factory = factory;
let registrar = Cm.QueryInterface(Ci.nsIComponentRegistrar);
registrar.registerFactory(proto.classID, proto.classDescription,
proto.contractID, factory);
},
unregister() {
let registrar = Cm.QueryInterface(Ci.nsIComponentRegistrar);
registrar.unregisterFactory(this._classID, this._factory);
this._factory = null;
this._classID = null;
},
};
var Presentation = {
// PUBLIC APIs
init() {
log("init");
// Register PresentationDevicePrompt into a XPCOM component.
let {PresentationDevicePrompt} = ChromeUtils.import(PRESENTATION_DEVICE_PROMPT_PATH, {});
this.PresentationDevicePrompt = PresentationDevicePrompt;
this._register();
},
uninit() {
log("uninit");
// Unregister PresentationDevicePrompt XPCOM component.
this._unregister();
delete this.PresentationDevicePrompt;
Cu.unload(PRESENTATION_DEVICE_PROMPT_PATH);
},
// PRIVATE APIs
_register() {
log("_register");
this._devicePromptFactory = new Factory();
this._devicePromptFactory.register(this.PresentationDevicePrompt);
},
_unregister() {
log("_unregister");
this._devicePromptFactory.unregister();
delete this._devicePromptFactory;
},
};

View File

@ -1,251 +0,0 @@
/* -*- indent-tabs-mode: nil; js-indent-level: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
/*
* This is the implementation of nsIPresentationDevicePrompt XPCOM.
* It will be registered into a XPCOM component by Presentation.jsm.
*
* This component will prompt a device selection UI for users to choose which
* devices they want to connect, when PresentationRequest is started.
*/
"use strict";
var EXPORTED_SYMBOLS = ["PresentationDevicePrompt"];
ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm");
ChromeUtils.import("resource://gre/modules/Services.jsm");
// An string bundle for localization.
XPCOMUtils.defineLazyGetter(this, "Strings", function() {
return Services.strings.createBundle("chrome://presentation/locale/presentation.properties");
});
// To generate a device selection prompt.
ChromeUtils.defineModuleGetter(this, "PermissionUI",
"resource:///modules/PermissionUI.jsm");
/*
* Utils
*/
function log(aMsg) {
// Prefix is useful to grep log.
// dump("@ PresentationDevicePrompt: " + aMsg + "\n");
}
function GetString(aName) {
return Strings.GetStringFromName(aName);
}
/*
* Device Selection UI
*/
const kNotificationId = "presentation-device-selection";
const kNotificationPopupIcon = "chrome://presentation-shared/skin/link.svg";
// There is no dependancy between kNotificationId and kNotificationAnchorId,
// so it's NOT necessary to name them by same prefix
// (e.g., presentation-device-selection-notification-icon).
const kNotificationAnchorId = "presentation-device-notification-icon";
const kNotificationAnchorIcon = "chrome://presentation-shared/skin/link.svg";
// This will insert our own popupnotification content with the device list
// into the displayed popupnotification element.
// PopupNotifications.jsm will automatically generate a popupnotification
// element whose id is <notification id> + "-notification" and show it,
// so kPopupNotificationId must be kNotificationId + "-notification".
// Read more detail in PopupNotifications._refreshPanel.
const kPopupNotificationId = kNotificationId + "-notification";
function PresentationPermissionPrompt(aRequest, aDevices) {
this.request = aRequest;
this._isResponded = false;
this._devices = aDevices;
}
PresentationPermissionPrompt.prototype = {
__proto__: PermissionUI.PermissionPromptForRequestPrototype,
// PUBLIC APIs
get browser() {
return this.request.chromeEventHandler;
},
get principal() {
return this.request.principal;
},
get popupOptions() {
return {
removeOnDismissal: true,
popupIconURL: kNotificationPopupIcon, // Icon shown on prompt content
eventCallback: (aTopic, aNewBrowser) => {
log("eventCallback: " + aTopic);
let handler = {
// dismissed: () => { // Won't be fired if removeOnDismissal is true.
// log("Dismissed by user. Cancel the request.");
// },
removed: () => {
log("Prompt is removed.");
if (!this._isResponded) {
log("Dismissed by user. Cancel the request.");
this.request.cancel(Cr.NS_ERROR_NOT_AVAILABLE);
}
},
showing: () => {
log("Prompt is showing.");
// We cannot insert the device list at "showing" phase because
// the popupnotification content whose id is kPopupNotificationId
// is not generated yet.
},
shown: () => {
log("Prompt is shown.");
// Insert device selection list into popupnotification element.
this._createPopupContent();
},
};
// Call the handler for Notification events.
handler[aTopic]();
},
};
},
get notificationID() {
return kNotificationId;
},
get anchorID() {
let chromeDoc = this.browser.ownerDocument;
let anchor = chromeDoc.getElementById(kNotificationAnchorId);
if (!anchor) {
let notificationPopupBox =
chromeDoc.getElementById("notification-popup-box");
// Icon shown on URL bar
let notificationIcon = chromeDoc.createElement("image");
notificationIcon.id = kNotificationAnchorId;
notificationIcon.setAttribute("src", kNotificationAnchorIcon);
notificationIcon.classList.add("notification-anchor-icon");
notificationIcon.setAttribute("role", "button");
notificationIcon.setAttribute("tooltiptext",
GetString("presentation.urlbar.tooltiptext"));
notificationIcon.style.setProperty("-moz-context-properties", "fill");
notificationIcon.style.fill = "currentcolor";
notificationIcon.style.opacity = "0.4";
notificationPopupBox.appendChild(notificationIcon);
}
return kNotificationAnchorId;
},
get message() {
return GetString("presentation.message", this._domainName);
},
get promptActions() {
return [{
label: GetString("presentation.deviceprompt.select.label"),
accessKey: GetString("presentation.deviceprompt.select.accessKey"),
callback: () => {
log("Select");
this._isResponded = true;
if (!this._listbox || !this._devices.length) {
log("No device can be selected!");
this.request.cancel(Cr.NS_ERROR_NOT_AVAILABLE);
return;
}
let device = this._devices[this._listbox.selectedIndex];
this.request.select(device);
log("device: " + device.name + "(" + device.id + ") is selected!");
},
}, {
label: GetString("presentation.deviceprompt.cancel.label"),
accessKey: GetString("presentation.deviceprompt.cancel.accessKey"),
callback: () => {
log("Cancel selection.");
this._isResponded = true;
this.request.cancel(Cr.NS_ERROR_NOT_AVAILABLE);
},
dismiss: true,
}];
},
// PRIVATE APIs
get _domainName() {
if (this.principal.URI instanceof Ci.nsIFileURL) {
return this.principal.URI.pathQueryRef.split("/")[1];
}
return this.principal.URI.hostPort;
},
_createPopupContent() {
log("_createPopupContent");
if (!this._devices.length) {
log("No available devices can be listed!");
return;
}
let chromeDoc = this.browser.ownerDocument;
let popupnotification = chromeDoc.getElementById(kPopupNotificationId);
if (!popupnotification) {
log("No available popupnotification element to be inserted!");
return;
}
let popupnotificationcontent =
chromeDoc.createElement("popupnotificationcontent");
this._listbox = chromeDoc.createElement("richlistbox");
this._listbox.setAttribute("flex", "1");
this._devices.forEach((device) => {
let listitem = chromeDoc.createElement("richlistitem");
let label = chromeDoc.createElement("label");
label.setAttribute("value", device.name);
listitem.appendChild(label);
this._listbox.appendChild(listitem);
});
popupnotificationcontent.appendChild(this._listbox);
popupnotification.appendChild(popupnotificationcontent);
},
};
/*
* nsIPresentationDevicePrompt
*/
// For XPCOM registration
const PRESENTATIONDEVICEPROMPT_CONTRACTID = "@mozilla.org/presentation-device/prompt;1";
const PRESENTATIONDEVICEPROMPT_CID = Components.ID("{388bd149-c919-4a43-b646-d7ec57877689}");
function PresentationDevicePrompt() {}
PresentationDevicePrompt.prototype = {
// properties required for XPCOM registration:
classID: PRESENTATIONDEVICEPROMPT_CID,
classDescription: "Presentation API Device Prompt",
contractID: PRESENTATIONDEVICEPROMPT_CONTRACTID,
QueryInterface: XPCOMUtils.generateQI([Ci.nsIPresentationDevicePrompt]),
// This will be fired when window.PresentationRequest(URL).start() is called.
promptDeviceSelection(aRequest) {
log("promptDeviceSelection");
// Cancel request if no available device.
let devices = this._loadDevices();
if (!devices.length) {
log("No available device.");
aRequest.cancel(Cr.NS_ERROR_NOT_AVAILABLE);
return;
}
// Show the prompt to users.
let promptUI = new PresentationPermissionPrompt(aRequest, devices);
promptUI.prompt();
},
_loadDevices() {
let deviceManager = Cc["@mozilla.org/presentation-device/manager;1"]
.getService(Ci.nsIPresentationDeviceManager);
let devices = deviceManager.getAvailableDevices().QueryInterface(Ci.nsIArray);
let list = [];
for (let i = 0; i < devices.length; i++) {
let device = devices.queryElementAt(i, Ci.nsIPresentationDevice);
list.push(device);
}
return list;
},
};

View File

@ -1,33 +0,0 @@
<?xml version="1.0"?>
<!-- This Source Code Form is subject to the terms of the Mozilla Public
- License, v. 2.0. If a copy of the MPL was not distributed with this
- file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
#filter substitution
<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:em="http://www.mozilla.org/2004/em-rdf#">
<Description about="urn:mozilla:install-manifest">
<em:id>presentation@mozilla.org</em:id>
<em:version>1.0.0</em:version>
<em:type>2</em:type>
<em:bootstrap>true</em:bootstrap>
<em:multiprocessCompatible>true</em:multiprocessCompatible>
<!-- Target Application this theme can install into,
with minimum and maximum supported versions. -->
<em:targetApplication>
<Description>
<!-- Firefox GUID -->
<em:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</em:id>
<em:minVersion>@MOZ_APP_VERSION@</em:minVersion>
<em:maxVersion>@MOZ_APP_MAXVERSION@</em:maxVersion>
</Description>
</em:targetApplication>
<!-- Front End MetaData -->
<em:name>Presentation</em:name>
<em:description>Discover nearby devices in the browser</em:description>
</Description>
</RDF>

View File

@ -1,5 +0,0 @@
[features/presentation@mozilla.org] chrome.jar:
% content presentation %content/
content/ (content/*)
% skin presentation-shared classic/1.0 %skin/shared/
skin/ (skin/*)

View File

@ -1,6 +0,0 @@
presentation.message=Select one device to send the content.
presentation.urlbar.tooltiptext=View the device-selection request
presentation.deviceprompt.select.label=Send
presentation.deviceprompt.select.accessKey=S
presentation.deviceprompt.cancel.label=Cancel
presentation.deviceprompt.cancel.accessKey=C

View File

@ -1,8 +0,0 @@
#filter substitution
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
[features/presentation@mozilla.org] @AB_CD@.jar:
% locale presentation @AB_CD@ %locale/@AB_CD@/
locale/@AB_CD@/ (en-US/*)

View File

@ -1,7 +0,0 @@
# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
# vim: set filetype=python:
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
JAR_MANIFESTS += ['jar.mn']

View File

@ -1,17 +0,0 @@
with Files("**"):
BUG_COMPONENT = ("Firefox", "Device Permissions")
DEFINES['MOZ_APP_VERSION'] = CONFIG['MOZ_APP_VERSION']
DEFINES['MOZ_APP_MAXVERSION'] = CONFIG['MOZ_APP_MAXVERSION']
DIRS += ['locale']
FINAL_TARGET_FILES.features['presentation@mozilla.org'] += [
'bootstrap.js'
]
FINAL_TARGET_PP_FILES.features['presentation@mozilla.org'] += [
'install.rdf.in'
]
JAR_MANIFESTS += ['jar.mn']

View File

@ -1,11 +0,0 @@
<svg version="1.1" xmlns="http://www.w3.org/2000/svg"
width="32px" height="32px" viewBox="0 0 32 32">
<path fill="context-fill gray" d="M15.246,17.992c0,1.064,0.862,1.926,1.926,1.926c1.064,0,1.926-0.862,1.926-1.926c0-1.064-0.862-1.926-1.926-1.926C16.108,16.066,15.246,16.929,15.246,17.992z"/>
<path fill="context-fill gray" d="M17.135,13.964c1.948,0,3.566,1.397,3.917,3.244l1.779-1.35c-0.849-2.276-3.023-3.904-5.595-3.904c-2.669,0-4.904,1.758-5.677,4.171l1.647,1.205C13.509,15.424,15.145,13.964,17.135,13.964z"/>
<path fill="context-fill gray" d="M16.968,9.895c3.293,0,6.117,1.995,7.338,4.841l1.603-1.216c-1.628-3.3-4.994-5.591-8.923-5.591c-3.942,0-7.319,2.305-8.94,5.624l1.594,1.166C10.865,11.883,13.682,9.895,16.968,9.895z"/>
<path fill="context-fill gray" d="M29.962,2.034H4.011c-1.102,0-1.996,0.894-1.996,1.996v10.008h4.042V5.937c0-1.102,0.894-1.996,1.996-1.996h17.973c1.103,0,1.996,0.894,1.996,1.996v16.075c0,1.103-0.894,1.996-1.996,1.996H12.089v1.918h17.873c1.102,0,1.996-0.894,1.996-1.996V4.03C31.958,2.927,31.064,2.034,29.962,2.034z"/>
<path fill="context-fill gray" fill-rule="evenodd"
d="M8.756,16.025H1.833c-0.737,0-1.729,0.598-1.729,1.335v11.271c0,0.738,0.596,1.335,1.334,1.335h7.318c0.738,0,1.336-0.597,1.336-1.335V17.36C10.092,16.623,9.494,16.025,8.756,16.025z
M8.113,27.472c0,0.299-0.243,0.541-0.541,0.541H2.599c-0.298,0-0.541-0.242-0.541-0.541v-8.949c0-0.299,0.243-0.541,0.541-0.541h4.973c0.298,0,0.541,0.242,0.541,0.541V27.472z"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.5 KiB

View File

@ -75,9 +75,6 @@ ifneq (,$(wildcard ../extensions/formautofill/locales))
endif
@$(MAKE) -C ../extensions/onboarding/locales AB_CD=$* XPI_NAME=locale-$*
@$(MAKE) -C ../extensions/pocket/locale AB_CD=$* XPI_NAME=locale-$*
ifndef RELEASE_OR_BETA
@$(MAKE) -C ../extensions/presentation/locale AB_CD=$* XPI_NAME=locale-$*
endif
@$(MAKE) -C ../extensions/webcompat-reporter/locales AB_CD=$* XPI_NAME=locale-$*
@$(MAKE) -C ../../devtools/client/locales AB_CD=$* XPI_NAME=locale-$* XPI_ROOT_APPID='$(XPI_ROOT_APPID)'
@$(MAKE) -C ../../devtools/startup/locales AB_CD=$* XPI_NAME=locale-$* XPI_ROOT_APPID='$(XPI_ROOT_APPID)'
@ -97,9 +94,6 @@ ifneq (,$(wildcard ../extensions/formautofill/locales))
@$(MAKE) -C ../extensions/formautofill/locales chrome AB_CD=$*
endif
@$(MAKE) -C ../extensions/pocket/locale chrome AB_CD=$*
ifndef RELEASE_OR_BETA
@$(MAKE) -C ../extensions/presentation/locale chrome AB_CD=$*
endif
@$(MAKE) -C ../../devtools/client/locales chrome AB_CD=$*
@$(MAKE) -C ../../devtools/startup/locales chrome AB_CD=$*
@$(MAKE) chrome AB_CD=$*

View File

@ -16,11 +16,13 @@ XPCOMUtils.defineLazyModuleGetters(this, {
DownloadsCommon: "resource:///modules/DownloadsCommon.jsm",
TelemetryStopwatch: "resource://gre/modules/TelemetryStopwatch.jsm",
setTimeout: "resource://gre/modules/Timer.jsm",
ServiceWorkerCleanUp: "resource://gre/modules/ServiceWorkerCleanUp.jsm",
OfflineAppCacheHelper: "resource:///modules/offlineAppCache.jsm",
});
XPCOMUtils.defineLazyServiceGetter(this, "serviceWorkerManager",
"@mozilla.org/serviceworkers/manager;1",
"nsIServiceWorkerManager");
XPCOMUtils.defineLazyServiceGetter(this, "sas",
"@mozilla.org/storage/activity-service;1",
"nsIStorageActivityService");
XPCOMUtils.defineLazyServiceGetter(this, "quotaManagerService",
"@mozilla.org/dom/quota-manager-service;1",
"nsIQuotaManagerService");
@ -364,37 +366,48 @@ var Sanitizer = {
offlineApps: {
async clear(range) {
// AppCache
ChromeUtils.import("resource:///modules/offlineAppCache.jsm");
// This doesn't wait for the cleanup to be complete.
// AppCache: this doesn't wait for the cleanup to be complete.
OfflineAppCacheHelper.clear();
if (range) {
let principals = sas.getActiveOrigins(range[0], range[1])
.QueryInterface(Ci.nsIArray);
let promises = [];
for (let i = 0; i < principals.length; ++i) {
let principal = principals.queryElementAt(i, Ci.nsIPrincipal);
if (principal.URI.scheme != "http" &&
principal.URI.scheme != "https" &&
principal.URI.scheme != "file") {
continue;
}
// LocalStorage
Services.obs.notifyObservers(null, "browser:purge-domain-data", principal.URI.host);
// ServiceWorkers
await ServiceWorkerCleanUp.removeFromPrincipal(principal);
// QuotaManager
promises.push(new Promise(r => {
let req = quotaManagerService.clearStoragesForPrincipal(principal, null, false);
req.callback = () => { r(); };
}));
}
return Promise.all(promises);
}
// LocalStorage
Services.obs.notifyObservers(null, "extension:purge-localStorage");
// ServiceWorkers
let promises = [];
let serviceWorkers = serviceWorkerManager.getAllRegistrations();
for (let i = 0; i < serviceWorkers.length; i++) {
let sw = serviceWorkers.queryElementAt(i, Ci.nsIServiceWorkerRegistrationInfo);
promises.push(new Promise(resolve => {
let unregisterCallback = {
unregisterSucceeded: () => { resolve(true); },
// We don't care about failures.
unregisterFailed: () => { resolve(true); },
QueryInterface: XPCOMUtils.generateQI(
[Ci.nsIServiceWorkerUnregisterCallback])
};
serviceWorkerManager.propagateUnregister(sw.principal, unregisterCallback, sw.scope);
}));
}
await Promise.all(promises);
await ServiceWorkerCleanUp.removeAll();
// QuotaManager
promises = [];
let promises = [];
await new Promise(resolve => {
quotaManagerService.getUsage(request => {
if (request.resultCode != Cr.NS_OK) {

View File

@ -5,9 +5,8 @@ ChromeUtils.import("resource://gre/modules/Services.jsm");
ChromeUtils.defineModuleGetter(this, "OfflineAppCacheHelper",
"resource:///modules/offlineAppCache.jsm");
XPCOMUtils.defineLazyServiceGetter(this, "serviceWorkerManager",
"@mozilla.org/serviceworkers/manager;1",
"nsIServiceWorkerManager");
ChromeUtils.defineModuleGetter(this, "ServiceWorkerCleanUp",
"resource://gre/modules/ServiceWorkerCleanUp.jsm");
var EXPORTED_SYMBOLS = [
"SiteDataManager"
@ -30,7 +29,8 @@ var SiteDataManager = {
// A Map of sites and their disk usage according to Quota Manager and appcache
// Key is host (group sites based on host across scheme, port, origin atttributes).
// Value is one object holding:
// - principals: instances of nsIPrincipal.
// - principals: instances of nsIPrincipal (only when the site has
// quota storage or AppCache).
// - persisted: the persistent-storage status.
// - quotaUsage: the usage of indexedDB and localStorage.
// - appCacheList: an array of app cache; instances of nsIApplicationCache
@ -315,30 +315,6 @@ var SiteDataManager = {
site.cookies = [];
},
_unregisterServiceWorker(serviceWorker) {
return new Promise(resolve => {
let unregisterCallback = {
unregisterSucceeded: resolve,
unregisterFailed: resolve, // We don't care about failures.
QueryInterface: XPCOMUtils.generateQI([Ci.nsIServiceWorkerUnregisterCallback])
};
serviceWorkerManager.propagateUnregister(serviceWorker.principal, unregisterCallback, serviceWorker.scope);
});
},
_removeServiceWorkersForSites(sites) {
let promises = [];
let serviceWorkers = serviceWorkerManager.getAllRegistrations();
for (let i = 0; i < serviceWorkers.length; i++) {
let sw = serviceWorkers.queryElementAt(i, Ci.nsIServiceWorkerRegistrationInfo);
// Sites are grouped and removed by host so we unregister service workers by the same host as well
if (sites.has(sw.principal.URI.host)) {
promises.push(this._unregisterServiceWorker(sw));
}
}
return Promise.all(promises);
},
/**
* Removes all site data for the specified list of hosts.
*
@ -352,29 +328,24 @@ var SiteDataManager = {
this._updateAppCache();
let unknownHost = "";
let targetSites = new Map();
let promises = [];
for (let host of hosts) {
let site = this._sites.get(host);
if (site) {
// Clear localstorage.
Services.obs.notifyObservers(null, "browser:purge-domain-data", host);
this._removePermission(site);
this._removeAppCache(site);
this._removeCookies(site);
Services.obs.notifyObservers(null, "browser:purge-domain-data", host);
targetSites.set(host, site);
promises.push(ServiceWorkerCleanUp.removeFromHost(host));
promises.push(this._removeQuotaUsage(site));
} else {
unknownHost = host;
break;
}
}
if (targetSites.size > 0) {
await this._removeServiceWorkersForSites(targetSites);
let promises = [];
for (let [, site] of targetSites) {
promises.push(this._removeQuotaUsage(site));
}
await Promise.all(promises);
}
await Promise.all(promises);
if (unknownHost) {
throw `SiteDataManager: removing unknown site of ${unknownHost}`;
@ -438,6 +409,7 @@ var SiteDataManager = {
* Clears all site data, which currently means
* - Cookies
* - AppCache
* - LocalStorage
* - ServiceWorkers
* - Quota Managed Storage
* - persistent-storage permissions
@ -445,17 +417,13 @@ var SiteDataManager = {
* @returns a Promise that resolves with the cache size on disk in bytes
*/
async removeSiteData() {
// LocalStorage
Services.obs.notifyObservers(null, "extension:purge-localStorage");
Services.cookies.removeAll();
OfflineAppCacheHelper.clear();
// Iterate through the service workers and remove them.
let promises = [];
let serviceWorkers = serviceWorkerManager.getAllRegistrations();
for (let i = 0; i < serviceWorkers.length; i++) {
let sw = serviceWorkers.queryElementAt(i, Ci.nsIServiceWorkerRegistrationInfo);
promises.push(this._unregisterServiceWorker(sw));
}
await Promise.all(promises);
await ServiceWorkerCleanUp.removeAll();
// Refresh sites using quota usage again.
// This is for the case:
@ -469,7 +437,7 @@ var SiteDataManager = {
// see https://bugzilla.mozilla.org/show_bug.cgi?id=1312361#c9
this._sites.clear();
await this._getQuotaUsage();
promises = [];
let promises = [];
for (let site of this._sites.values()) {
this._removePermission(site);
promises.push(this._removeQuotaUsage(site));

View File

@ -36,7 +36,7 @@ function* spawnTest() {
output: [/The following/, /Mochitest/, /Special Powers/],
notinoutput: [
/Web Compat/, /Pocket/, /Multi-process staged rollout/,
/Form Autofill/, /Application Update Service Helper/, /Presentation/,
/Form Autofill/, /Application Update Service Helper/,
/Shield Recipe Client/]
}
},

View File

@ -1,9 +1,9 @@
This is the debugger.html project output.
See https://github.com/devtools-html/debugger.html
Version 39.0
Version 40.0
Comparison: https://github.com/devtools-html/debugger.html/compare/release-38...release-39
Comparison: https://github.com/devtools-html/debugger.html/compare/release-39...release-40
Packages:
- babel-plugin-transform-es2015-modules-commonjs @6.26.0

View File

@ -915,6 +915,7 @@ html[dir="rtl"] .tree-node img.arrow {
flex-direction: row;
justify-content: center;
align-items: center;
padding: 0;
}
.close-btn .close {
@ -936,14 +937,23 @@ html[dir="rtl"] .tree-node img.arrow {
background-color: var(--theme-selection-background);
}
.close-btn.big {
width: 16px;
height: 16px;
.close-btn:focus {
background-color: var(--theme-selection-background);
}
.close-btn.big .close {
width: 9px;
height: 9px;
.close-btn:focus img.close {
background-color: white;
}
.close-btn.big {
width: 16px;
height: 18px;
}
img.close::before {
width: 100%;
height: 100%;
padding: 6px;
}
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this

View File

@ -3235,7 +3235,7 @@ function createPendingBreakpoint(bp) {
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.replaceOriginalVariableName = exports.getPausePoints = exports.getFramework = exports.mapOriginalExpression = exports.hasSyntaxError = exports.clearSources = exports.setSource = exports.hasSource = exports.isInvalidPauseLocation = exports.getNextStep = exports.clearASTs = exports.clearScopes = exports.clearSymbols = exports.findOutOfScopeLocations = exports.getScopes = exports.getSymbols = exports.getClosestExpression = exports.stopParserWorker = exports.startParserWorker = undefined;
exports.replaceOriginalVariableName = exports.getPausePoints = exports.getFramework = exports.mapOriginalExpression = exports.hasSyntaxError = exports.clearSources = exports.setSource = exports.hasSource = exports.getNextStep = exports.clearASTs = exports.clearScopes = exports.clearSymbols = exports.findOutOfScopeLocations = exports.getScopes = exports.getSymbols = exports.getClosestExpression = exports.stopParserWorker = exports.startParserWorker = undefined;
var _devtoolsUtils = __webpack_require__(1363);
@ -3255,7 +3255,6 @@ const clearSymbols = exports.clearSymbols = dispatcher.task("clearSymbols");
const clearScopes = exports.clearScopes = dispatcher.task("clearScopes");
const clearASTs = exports.clearASTs = dispatcher.task("clearASTs");
const getNextStep = exports.getNextStep = dispatcher.task("getNextStep");
const isInvalidPauseLocation = exports.isInvalidPauseLocation = dispatcher.task("isInvalidPauseLocation");
const hasSource = exports.hasSource = dispatcher.task("hasSource");
const setSource = exports.setSource = dispatcher.task("setSource");
const clearSources = exports.clearSources = dispatcher.task("clearSources");
@ -4194,7 +4193,7 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
function CloseButton({ handleClick, buttonClass, tooltip }) {
return _react2.default.createElement(
"div",
"button",
{
className: buttonClass ? `close-btn ${buttonClass}` : "close-btn",
onClick: handleClick,
@ -4885,6 +4884,7 @@ exports.isSymbolsLoading = isSymbolsLoading;
exports.isEmptyLineInSource = isEmptyLineInSource;
exports.getEmptyLines = getEmptyLines;
exports.getPausePoints = getPausePoints;
exports.getPausePoint = getPausePoint;
exports.hasPausePoints = hasPausePoints;
exports.getOutOfScopeLocations = getOutOfScopeLocations;
exports.getPreview = getPreview;
@ -5038,6 +5038,21 @@ function getPausePoints(state, sourceId) {
return state.ast.pausePoints.get(sourceId);
}
function getPausePoint(state, location) {
if (!location) {
return;
}
const { column, line, sourceId } = location;
const pausePoints = getPausePoints(state, sourceId);
if (!pausePoints) {
return;
}
const linePoints = pausePoints[line];
return linePoints && linePoints[column];
}
function hasPausePoints(state, sourceId) {
const pausePoints = getPausePoints(state, sourceId);
return !!pausePoints;
@ -15406,7 +15421,6 @@ const svg = {
file: __webpack_require__(354),
folder: __webpack_require__(355),
globe: __webpack_require__(356),
help: __webpack_require__(3633),
home: __webpack_require__(3604),
javascript: __webpack_require__(2251),
jquery: __webpack_require__(999),
@ -23485,7 +23499,7 @@ class Breakpoints extends _react.Component {
const groupedBreakpoints = (0, _lodash.groupBy)((0, _lodash.sortBy)([...breakpoints.valueSeq()], bp => bp.location.line), bp => getBreakpointFilename(bp.source));
return [...Object.keys(groupedBreakpoints).map(filename => {
return [...Object.keys(groupedBreakpoints).sort().map(filename => {
return [_react2.default.createElement(
"div",
{ className: "breakpoint-heading", title: filename, key: filename },
@ -24454,12 +24468,16 @@ class Accordion extends _react.Component {
{ className: item.className, key: i },
_react2.default.createElement(
"div",
{ className: "_header", onClick: () => this.handleHeaderClick(i) },
{
className: "_header",
tabIndex: "0",
onClick: () => this.handleHeaderClick(i)
},
_react2.default.createElement(_Svg2.default, { name: "arrow", className: opened ? "expanded" : "" }),
item.header,
item.buttons ? _react2.default.createElement(
"div",
{ className: "header-buttons" },
{ className: "header-buttons", tabIndex: "-1" },
item.buttons
) : null
),
@ -27181,7 +27199,7 @@ function paused(pauseInfo) {
// Ensure that the original file has loaded if there is one.
await dispatch((0, _loadSourceText.loadSourceText)(source));
if (await (0, _pause.shouldStep)(mappedFrame, getState(), sourceMaps)) {
if ((0, _pause.shouldStep)(mappedFrame, getState(), sourceMaps)) {
dispatch((0, _commands.command)("stepOver"));
return;
}
@ -34877,10 +34895,10 @@ async function findGeneratedBindingFromPosition(sourceMaps, client, source, pos,
function filterApplicableBindings(bindings, mapped) {
// Any binding overlapping a part of the mapping range.
return (0, _filtering.filterSortedArray)(bindings, binding => {
if (positionCmp(binding.loc.end, mapped.start) < 0) {
if (positionCmp(binding.loc.end, mapped.start) <= 0) {
return -1;
}
if (positionCmp(binding.loc.start, mapped.end) > 0) {
if (positionCmp(binding.loc.start, mapped.end) >= 0) {
return 1;
}
@ -34943,14 +34961,48 @@ async function findGeneratedImportReference(type, generatedAstBindings, mapped)
async function findGeneratedImportDeclaration(generatedAstBindings, mapped) {
const bindings = filterApplicableBindings(generatedAstBindings, mapped);
return bindings.reduce(async (acc, val) => {
const accVal = await acc;
if (accVal) {
return accVal;
let result = null;
for (const binding of bindings) {
if (binding.loc.type !== "decl") {
continue;
}
return await mapImportDeclarationToDescriptor(val, mapped);
}, null);
const namespaceDesc = await binding.desc();
if (isPrimitiveValue(namespaceDesc)) {
continue;
}
if (!isObjectValue(namespaceDesc)) {
// We want to handle cases like
//
// var _mod = require(...);
// var _mod2 = _interopRequire(_mod);
//
// where "_mod" is optimized out because it is only referenced once. To
// allow that, we track the optimized-out value as a possible result,
// but allow later binding values to overwrite the result.
result = {
name: binding.name,
desc: namespaceDesc,
expression: binding.name
};
continue;
}
const desc = await readDescriptorProperty(namespaceDesc, mapped.importName);
const expression = `${binding.name}.${mapped.importName}`;
if (desc) {
result = {
name: binding.name,
desc,
expression
};
break;
}
}
return result;
}
/**
@ -34978,40 +35030,6 @@ async function mapBindingReferenceToDescriptor(binding, mapped, isFirst) {
return null;
}
/**
* Given an generated binding, and a range over the generated code, statically
* resolve the module namespace object and attempt to access the imported
* property on the namespace.
*
* This is mostly hard-coded to work for Babel 6's imports.
*/
async function mapImportDeclarationToDescriptor(binding, mapped) {
// When trying to map an actual import declaration binding, we can try
// to map it back to the namespace object in the original code.
if (!mappingContains(mapped, binding.loc)) {
return null;
}
const desc = await readDescriptorProperty((await binding.desc()), mapped.importName,
// If the value was optimized out or otherwise unavailable, we skip it
// entirely because there is a good chance that this means that this
// isn't the right binding. This allows us to catch cases like
//
// var _mod = require(...);
// var _mod2 = _interopRequire(_mod);
//
// where "_mod" is optimized out because it is only referenced once, and
// we want to continue searching to try to find "_mod2".
true);
const expression = `${binding.name}.${mapped.importName}`;
return desc ? {
name: binding.name,
desc,
expression
} : null;
}
/**
* Given an generated binding, and a range over the generated code, statically
* evaluate accessed properties within the mapped range to resolve the actual
@ -35086,16 +35104,22 @@ async function mapImportReferenceToDescriptor(binding, mapped) {
} : null;
}
async function readDescriptorProperty(desc, property, requireValidObject = false) {
function isPrimitiveValue(desc) {
return desc && (!desc.value || typeof desc.value !== "object");
}
function isObjectValue(desc) {
return desc && !isPrimitiveValue(desc) && desc.value.type === "object" &&
// Note: The check for `.type` might already cover the optimizedOut case
// but not 100% sure, so just being cautious.
!desc.value.optimizedOut;
}
async function readDescriptorProperty(desc, property) {
if (!desc) {
return null;
}
if (typeof desc.value !== "object" || !desc.value) {
if (requireValidObject) {
return null;
}
// If accessing a property on a primitive type, just return 'undefined'
// as the value.
return {
@ -35105,13 +35129,7 @@ async function readDescriptorProperty(desc, property, requireValidObject = false
};
}
// Note: The check for `.type` might already cover the optimizedOut case
// but not 100% sure, so just being cautious.
if (desc.value.type !== "object" || desc.value.optimizedOut) {
if (requireValidObject) {
return null;
}
if (!isObjectValue(desc)) {
// If we got a non-primitive descriptor but it isn't an object, then
// it's definitely not the namespace and it is probably an error.
return desc;
@ -35152,7 +35170,7 @@ async function getGeneratedLocationRange(pos, source, type, sourceMaps) {
return null;
}
// If the stand and end positions collapse into eachother, it means that
// If the start and end positions collapse into eachother, it means that
// the range in the original content didn't _start_ at the start position.
// Since this likely means that the range doesn't logically apply to this
// binding location, we skip it.
@ -35430,32 +35448,37 @@ var _devtoolsSourceMap = __webpack_require__(1360);
var _selectors = __webpack_require__(3590);
var _parser = __webpack_require__(1365);
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at <http://mozilla.org/MPL/2.0/>. */
async function shouldStep(rootFrame, state, sourceMaps) {
if (!rootFrame) {
return false;
function getFrameLocation(source, frame) {
if (!frame) {
return null;
}
return (0, _devtoolsSourceMap.isOriginalId)(source.id) ? frame.location : frame.generatedLocation;
} /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at <http://mozilla.org/MPL/2.0/>. */
function shouldStep(rootFrame, state, sourceMaps) {
const selectedSource = (0, _selectors.getSelectedSource)(state);
const previousFrameInfo = (0, _selectors.getPreviousPauseFrameLocation)(state);
let previousFrameLoc;
let currentFrameLoc;
if (selectedSource && (0, _devtoolsSourceMap.isOriginalId)(selectedSource.get("id"))) {
currentFrameLoc = rootFrame.location;
previousFrameLoc = previousFrameInfo && previousFrameInfo.location;
} else {
currentFrameLoc = rootFrame.generatedLocation;
previousFrameLoc = previousFrameInfo && previousFrameInfo.generatedLocation;
if (!rootFrame || !selectedSource) {
return false;
}
return (0, _devtoolsSourceMap.isOriginalId)(currentFrameLoc.sourceId) && (previousFrameLoc && (0, _lodash.isEqual)(previousFrameLoc, currentFrameLoc) || (await (0, _parser.isInvalidPauseLocation)(currentFrameLoc)));
const previousFrameLoc = getFrameLocation(selectedSource, previousFrameInfo);
const frameLoc = getFrameLocation(selectedSource, rootFrame);
const sameLocation = previousFrameLoc && (0, _lodash.isEqual)(previousFrameLoc, frameLoc);
const pausePoint = (0, _selectors.getPausePoint)(state, frameLoc);
const invalidPauseLocation = pausePoint && !pausePoint.step;
// We always want to pause in generated locations
if (!frameLoc || (0, _devtoolsSourceMap.isGeneratedId)(frameLoc.sourceId)) {
return false;
}
return sameLocation || invalidPauseLocation;
}
/***/ }),
@ -38938,8 +38961,8 @@ function insertStrtAt(string, index, newString) {
function convertToList(pausePoints) {
const list = [];
for (let line in pausePoints) {
for (let column in pausePoints[line]) {
for (const line in pausePoints) {
for (const column in pausePoints[line]) {
const point = pausePoints[line][column];
list.push({
location: { line: parseInt(line, 10), column: parseInt(column, 10) },
@ -39188,13 +39211,6 @@ module.exports = "<!-- This Source Code Form is subject to the terms of the Mozi
/***/ }),
/***/ 3633:
/***/ (function(module, exports) {
module.exports = "<!-- This Source Code Form is subject to the terms of the Mozilla Public - License, v. 2.0. If a copy of the MPL was not distributed with this - file, You can obtain one at http://mozilla.org/MPL/2.0/. --><svg xmlns=\"http://www.w3.org/2000/svg\" viewBox=\"0 0 16 16\"><path fill=\"context-fill\" d=\"M8 1a7 7 0 1 0 7 7 7.008 7.008 0 0 0-7-7zm0 13a6 6 0 1 1 6-6 6.007 6.007 0 0 1-6 6zM8 3.125A2.7 2.7 0 0 0 5.125 6a.875.875 0 0 0 1.75 0c0-1 .6-1.125 1.125-1.125a1.105 1.105 0 0 1 1.13.744.894.894 0 0 1-.53 1.016A2.738 2.738 0 0 0 7.125 9v.337a.875.875 0 0 0 1.75 0v-.37a1.041 1.041 0 0 1 .609-.824A2.637 2.637 0 0 0 10.82 5.16 2.838 2.838 0 0 0 8 3.125zm0 7.625A1.25 1.25 0 1 0 9.25 12 1.25 1.25 0 0 0 8 10.75z\"></path></svg>"
/***/ }),
/***/ 3634:
/***/ (function(module, exports, __webpack_require__) {

View File

@ -2207,8 +2207,6 @@ var _validate = __webpack_require__(1629);
var _frameworks = __webpack_require__(1703);
var _pauseLocation = __webpack_require__(2422);
var _pausePoints = __webpack_require__(3612);
var _mapOriginalExpression = __webpack_require__(3613);
@ -2219,11 +2217,9 @@ var _devtoolsUtils = __webpack_require__(1363);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at <http://mozilla.org/MPL/2.0/>. */
const { workerHandler } = _devtoolsUtils.workerUtils;
const { workerHandler } = _devtoolsUtils.workerUtils; /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at <http://mozilla.org/MPL/2.0/>. */
self.onmessage = workerHandler({
findOutOfScopeLocations: _findOutOfScopeLocations2.default,
@ -2235,7 +2231,6 @@ self.onmessage = workerHandler({
hasSource: _sources.hasSource,
setSource: _sources.setSource,
clearSources: _sources.clearSources,
isInvalidPauseLocation: _pauseLocation.isInvalidPauseLocation,
getNextStep: _steps.getNextStep,
hasSyntaxError: _validate.hasSyntaxError,
getFramework: _frameworks.getFramework,
@ -19985,76 +19980,6 @@ function stripModuleScope(rootScope) {
/***/ }),
/***/ 2422:
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.isInvalidPauseLocation = isInvalidPauseLocation;
var _types = __webpack_require__(2268);
var t = _interopRequireWildcard(_types);
var _ast = __webpack_require__(1375);
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj.default = obj; return newObj; } }
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at <http://mozilla.org/MPL/2.0/>. */
const STOP = {};
function isInvalidPauseLocation(location) {
const state = {
invalid: false,
location
};
try {
(0, _ast.traverseAst)(location.sourceId, { enter: invalidLocationVisitor }, state);
} catch (e) {
if (e !== STOP) {
throw e;
}
}
return state.invalid;
}
function invalidLocationVisitor(node, ancestors, state) {
const { location } = state;
if (node.loc.end.line < location.line) {
return;
}
if (node.loc.start.line > location.line) {
throw STOP;
}
if (location.line === node.loc.start.line && location.column >= node.loc.start.column && t.isFunction(node) && !t.isArrowFunctionExpression(node) && (location.line < node.body.loc.start.line || location.line === node.body.loc.start.line && location.column <= node.body.loc.start.column)) {
// Disallow pausing _inside_ in function arguments to avoid pausing inside
// of destructuring and other logic.
state.invalid = true;
throw STOP;
}
if (location.line === node.loc.start.line && location.column === node.loc.start.column && t.isBlockStatement(node)) {
// Disallow pausing directly before the opening curly of a block statement.
// Babel occasionally maps statements with unknown original positions to
// this location.
state.invalid = true;
throw STOP;
}
}
/***/ }),
/***/ 248:
/***/ (function(module, exports, __webpack_require__) {
@ -21415,11 +21340,13 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj.default = obj; return newObj; } }
const isControlFlow = node => t.isForStatement(node) || t.isWhileStatement(node) || t.isIfStatement(node) || t.isSwitchCase(node) || t.isSwitchStatement(node); /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at <http://mozilla.org/MPL/2.0/>. */
const isForStatement = node => t.isForStatement(node) || t.isForOfStatement(node); /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at <http://mozilla.org/MPL/2.0/>. */
const isAssignment = node => t.isVariableDeclarator(node) || t.isAssignmentExpression(node);
const isControlFlow = node => isForStatement(node) || t.isWhileStatement(node) || t.isIfStatement(node) || t.isSwitchCase(node) || t.isSwitchStatement(node);
const isAssignment = node => t.isVariableDeclarator(node) || t.isAssignmentExpression(node) || t.isAssignmentPattern(node);
const isImport = node => t.isImport(node) || t.isImportDeclaration(node);
const isReturn = node => t.isReturnStatement(node);
@ -21449,7 +21376,11 @@ function onEnter(node, ancestors, state) {
}
if (isControlFlow(node)) {
addEmptyPoint(state, startLocation);
if (isForStatement(node)) {
addStopPoint(state, startLocation);
} else {
addEmptyPoint(state, startLocation);
}
const test = node.test || node.discriminant;
if (test) {
@ -21458,6 +21389,10 @@ function onEnter(node, ancestors, state) {
return;
}
if (t.isBlockStatement(node)) {
return addEmptyPoint(state, startLocation);
}
if (isReturn(node)) {
// We do not want to pause at the return and the call e.g. return foo()
if (isCall(node.argument)) {
@ -21469,7 +21404,10 @@ function onEnter(node, ancestors, state) {
if (isAssignment(node)) {
// We only want to pause at literal assignments `var a = foo()`
const value = node.right || node.init;
if (!isCall(value)) {
if (isCall(value) || t.isFunction(parentNode)) {
return addEmptyPoint(state, startLocation);
} else {
return addStopPoint(state, startLocation);
}
}

View File

@ -4,36 +4,12 @@
// Tests for preview through Babel's compile output.
requestLongerTimeout(3);
function getCoordsFromPosition(cm, { line, ch }) {
return cm.charCoords({ line: ~~line, ch: ~~ch });
}
async function assertPreviews(dbg, previews) {
for (const { line, column, expression, result, fields } of previews) {
hoverAtPos(dbg, { line, ch: column });
if (fields && result) {
throw new Error("Invalid test fixture");
}
if (fields) {
for (const [field, value] of fields) {
await assertPreviewPopup(dbg, { expression, field, value });
}
} else {
await assertPreviewTextValue(dbg, { expression, text: result });
}
// Move to column 0 after to make sure that the preview created by this
// test does not affect later attempts to hover and preview.
hoverAtPos(dbg, { line: line - 1, ch: 0 });
}
}
async function breakpointPreviews(dbg, fixture, { line, column }, previews) {
const filename = `fixtures/${fixture}/input.js`;
const fnName = fixture.replace(/-([a-z])/g, (s, c) => c.toUpperCase());
log(`Starting ${fixture} tests`);
await invokeWithBreakpoint(dbg, fnName, filename, { line, column }, async () => {
await assertPreviews(dbg, previews);
});
@ -41,12 +17,8 @@ async function breakpointPreviews(dbg, fixture, { line, column }, previews) {
ok(true, `Ran tests for ${fixture} at line ${line} column ${column}`);
}
add_task(async function() {
await pushPref("devtools.debugger.features.map-scopes", true);
const dbg = await initDebugger("doc-babel.html");
await breakpointPreviews(dbg, "for-of", { line: 5, column: 4 }, [
function testForOf(dbg) {
return breakpointPreviews(dbg, "for-of", { line: 5, column: 4 }, [
{
line: 5,
column: 7,
@ -55,7 +27,7 @@ add_task(async function() {
},
{
line: 5,
column: 12,
column: 13,
expression: "x",
result: "1",
},
@ -66,136 +38,150 @@ add_task(async function() {
result: "doThing(arg)",
},
]);
}
await breakpointPreviews(dbg, "shadowed-vars", { line: 18, column: 6 }, [
// These aren't what the user would expect, but we test them anyway since
// they reflect what this actually returns. These shadowed bindings read
// the binding closest to the current frame's scope even though their
// actual value is different.
{
line: 2,
column: 9,
expression: "aVar",
result: '"var3"',
},
{
line: 3,
column: 9,
expression: "_aLet2;",
result: '"let3"',
},
{
line: 4,
column: 11,
expression: "_aConst2;",
result: '"const3"',
},
{
line: 10,
column: 11,
expression: "aVar",
result: '"var3"',
},
{
line: 11,
column: 11,
expression: "_aLet2;",
result: '"let3"',
},
{
line: 12,
column: 13,
expression: "_aConst2;",
result: '"const3"',
},
function testShadowing(dbg) {
return breakpointPreviews(dbg, "shadowed-vars", { line: 18, column: 6 }, [
// These aren't what the user would expect, but we test them anyway since
// they reflect what this actually returns. These shadowed bindings read
// the binding closest to the current frame's scope even though their
// actual value is different.
{
line: 2,
column: 9,
expression: "aVar",
result: '"var3"',
},
{
line: 3,
column: 9,
expression: "_aLet2;",
result: '"let3"',
},
{
line: 4,
column: 11,
expression: "_aConst2;",
result: '"const3"',
},
{
line: 10,
column: 11,
expression: "aVar",
result: '"var3"',
},
{
line: 11,
column: 11,
expression: "_aLet2;",
result: '"let3"',
},
{
line: 12,
column: 13,
expression: "_aConst2;",
result: '"const3"',
},
// These actually result in the values the user would expect.
{
line: 14,
column: 13,
expression: "aVar",
result: '"var3"',
},
{
line: 15,
column: 13,
expression: "_aLet2;",
result: '"let3"',
},
{
line: 16,
column: 13,
expression: "_aConst2;",
result: '"const3"',
},
]);
// These actually result in the values the user would expect.
{
line: 14,
column: 13,
expression: "aVar",
result: '"var3"',
},
{
line: 15,
column: 13,
expression: "_aLet2;",
result: '"let3"',
},
{
line: 16,
column: 13,
expression: "_aConst2;",
result: '"const3"',
},
]);
}
await breakpointPreviews(dbg, "imported-bindings", { line: 20, column: 2 }, [
{
line: 22,
column: 16,
expression: "_mod2.default;",
result: '"a-default"',
},
{
line: 23,
column: 16,
expression: "_mod4.original;",
result: '"an-original"',
},
{
line: 24,
column: 16,
expression: "_mod3.aNamed;",
result: '"a-named"',
},
{
line: 25,
column: 16,
expression: "_mod4.original;",
result: '"an-original"',
},
{
line: 26,
column: 16,
expression: "aNamespace",
fields: [
['aNamed', 'a-named'],
['default', 'a-default'],
],
},
{
line: 31,
column: 20,
expression: "_mod7.default;",
result: '"a-default2"',
},
{
line: 32,
column: 20,
expression: "_mod9.original;",
result: '"an-original2"',
},
{
line: 33,
column: 20,
expression: "_mod8.aNamed2;",
result: '"a-named2"',
},
{
line: 34,
column: 20,
expression: "_mod9.original;",
result: '"an-original2"',
},
{
line: 35,
column: 20,
expression: "aNamespace2",
fields: [
['aNamed', 'a-named2'],
['default', 'a-default2'],
],
},
]);
function testImportedBindings(dbg) {
return breakpointPreviews(dbg, "imported-bindings", { line: 20, column: 2 }, [
{
line: 22,
column: 16,
expression: "_mod2.default;",
result: '"a-default"',
},
{
line: 23,
column: 16,
expression: "_mod4.original;",
result: '"an-original"',
},
{
line: 24,
column: 16,
expression: "_mod3.aNamed;",
result: '"a-named"',
},
{
line: 25,
column: 16,
expression: "_mod4.original;",
result: '"an-original"',
},
{
line: 26,
column: 16,
expression: "aNamespace",
fields: [
['aNamed', 'a-named'],
['default', 'a-default'],
],
},
{
line: 31,
column: 20,
expression: "_mod7.default;",
result: '"a-default2"',
},
{
line: 32,
column: 20,
expression: "_mod9.original;",
result: '"an-original2"',
},
{
line: 33,
column: 20,
expression: "_mod8.aNamed2;",
result: '"a-named2"',
},
{
line: 34,
column: 20,
expression: "_mod9.original;",
result: '"an-original2"',
},
{
line: 35,
column: 20,
expression: "aNamespace2",
fields: [
['aNamed', 'a-named2'],
['default', 'a-default2'],
],
},
]);
}
add_task(async function() {
await pushPref("devtools.debugger.features.map-scopes", true);
const dbg = await initDebugger("doc-babel.html");
await testForOf(dbg)
await testShadowing(dbg)
await testImportedBindings(dbg)
});

View File

@ -103,7 +103,7 @@ add_task(async function() {
{ line: 8, column: 6 },
[
"arrow",
["argArrow", "(optimized away)"],
["argArrow", "(unmapped)"],
"Block",
"arrow()",
"fn",
@ -220,9 +220,13 @@ add_task(async function() {
["val", "(optimized away)"],
"Module",
// This value is currently unmapped because import declarations don't map
// very well and ones at the end of the file map especially badly.
["aDefault", "(unmapped)"],
// This value is currently optimized away, which isn't 100% accurate.
// Because import declarations is the last thing in the file, our current
// logic doesn't cover _both_ 'var' statements that it generates,
// making us use the first, optimized-out binding. Given that imports
// are almost never the last thing in a file though, this is probably not
// a huge deal for now.
["aDefault", "(optimized away)"],
["root", "(optimized away)"],
["val", "(optimized away)"],
]);

View File

@ -173,6 +173,7 @@ function waitForState(dbg, predicate, msg) {
return new Promise(resolve => {
info(`Waiting for state change: ${msg || ""}`);
if (predicate(dbg.store.getState())) {
info(`Finished waiting for state change: ${msg || ""}`);
return resolve();
}
@ -1260,8 +1261,9 @@ async function assertPreviewPopup(dbg, { field, value, expression }) {
const properties =
preview.result.preview.ownProperties || preview.result.preview.items;
const property = properties[field];
const propertyValue = property.value || property
is(`${property.value || property}`, value, "Preview.result");
is(`${propertyValue}`, value, "Preview.result");
is(preview.updating, false, "Preview.updating");
is(preview.expression, expression, "Preview.expression");
}
@ -1282,9 +1284,7 @@ async function assertPreviews(dbg, previews) {
await assertPreviewTextValue(dbg, { expression, text: result });
}
// Move to column 0 after to make sure that the preview created by this
// test does not affect later attempts to hover and preview.
hoverAtPos(dbg, { line: line, ch: 0 });
dbg.actions.clearPreview();
}
}

View File

@ -81,6 +81,7 @@ class ShapesHighlighter extends AutoRefreshHighlighter {
this.fillRule = "";
this.numInsetPoints = 0;
this.transformMode = false;
this.viewport = {};
this.markup = new CanvasFrameAnonymousContentHelper(this.highlighterEnv,
this._buildMarkup.bind(this));
@ -433,6 +434,27 @@ class ShapesHighlighter extends AutoRefreshHighlighter {
`${style}pointer-events:${pointerEvents};cursor:${cursorType};`);
}
/**
* Set the absolute pixel offsets which define the current viewport in relation to
* the full page size.
*
* If a padding value is given, inset the viewport by this value. This is used to define
* a virtual viewport which ensures some element remains visible even when at the edges
* of the actual viewport.
*
* @param {Number} padding
* Optional. Amount by which to inset the viewport in all directions.
*/
setViewport(padding = 0) {
const { pageXOffset, pageYOffset, innerWidth, innerHeight } =
this.currentNode.ownerGlobal;
const left = pageXOffset + padding;
const right = innerWidth + pageXOffset - padding;
const top = pageYOffset + padding;
const bottom = innerHeight + pageYOffset - padding;
this.viewport = { left, right, top, bottom, padding };
}
handleEvent(event, id) {
// No event handling if the highlighter is hidden
if (this.areShapesHidden()) {
@ -481,6 +503,10 @@ class ShapesHighlighter extends AutoRefreshHighlighter {
}
event.stopPropagation();
event.preventDefault();
// Calculate constraints for a virtual viewport which ensures that a dragged
// marker remains visible even at the edges of the actual viewport.
this.setViewport(BASE_MARKER_SIZE);
break;
case "mouseup":
if (this[_dragging]) {
@ -496,6 +522,22 @@ class ShapesHighlighter extends AutoRefreshHighlighter {
event.stopPropagation();
event.preventDefault();
// Set constraints for mouse position to ensure dragged marker stays in viewport.
const { left, right, top, bottom, padding } = this.viewport;
const { x, y } = this[_dragging];
// If marker was within viewport at mousedown, clamp its changes to the viewport.
// If marker was outside, do not clamp and allow dragging outside of the viewport.
// The latter applies to shapes in iframes which exceed the iframe viewport,
// but their markers are visible in the viewport of the iframe's parent.
if (x > left - padding && x < right + padding) {
pageX = Math.min(Math.max(left, pageX), right);
}
if (y > top - padding && y < bottom + padding) {
pageY = Math.min(Math.max(top, pageY), bottom);
}
let { point } = this[_dragging];
if (this.transformMode) {
this._handleTransformMove(pageX, pageY);

View File

@ -144,17 +144,6 @@ interface nsIServiceWorkerManager : nsISupports
[notxpcom, nostdcall] bool StartControlling(in const_ClientInfoRef aClientInfo,
in const_ServiceWorkerDescriptorRef aServiceWorker);
/*
* Clears ServiceWorker registrations from memory and disk for the specified
* host.
* - All ServiceWorker instances change their state to redundant.
* - Existing ServiceWorker instances handling fetches will keep running.
* - All documents will immediately stop being controlled.
* - Unregister jobs will be queued for all registrations.
* This eventually results in the registration being deleted from disk too.
*/
void removeAndPropagate(in AUTF8String aHost);
// Testing
DOMString getScopeForUrl(in nsIPrincipal aPrincipal, in DOMString aPath);

View File

@ -10,6 +10,7 @@ with Files("**"):
XPIDL_SOURCES += [
'nsIDOMStorage.idl',
'nsIDOMStorageManager.idl',
'nsIStorageActivityService.idl',
]
XPIDL_MODULE = 'dom_storage'

View File

@ -0,0 +1,42 @@
/* -*- Mode: IDL; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "domstubs.idl"
interface nsIArray;
interface nsIPrincipal;
/**
* nsIStorageActivityService is a service that can be used to know which
* origins have been active in a time range. This information can be used to
* implement "Clear Recent History" or similar features.
*
* If you are implementing a new Storage component, you should use
* QuotaManager. But if you don't do it, remember to call
* StorageActivityService methods in order to inform this service about
* 'writing' operations executed by origins.
*/
[scriptable, builtinclass, uuid(fd1310ba-d1be-4327-988e-92b39fcff6f4)]
interface nsIStorageActivityService : nsISupports
{
// This returns an array of nsIPrincipals, active between |from| and |to|
// timestamps. Note activities older than 1 day are forgotten.
// Activity details are not persisted, so this only covers activity since
// Firefox was started. All codebase principals are logged, which includes
// non-system principals like "moz-extension://ID", "moz-safe-about:home",
// "about:newtab", so principals may need to be filtered before being used.
nsIArray getActiveOrigins(in PRTime from, in PRTime to);
// NOTE: This method is meant to be used for testing only.
// The activity of |origin| is moved to the specified timestamp |when|.
void moveOriginInTime(in nsIPrincipal origin, in PRTime when);
// TEST-ONLY method to support clearing all previously known activity.
void testOnlyReset();
};
%{ C++
#define STORAGE_ACTIVITY_SERVICE_CONTRACTID "@mozilla.org/storage/activity-service;1"
%}

View File

@ -36,6 +36,7 @@
#include "mozilla/dom/quota/PQuotaParent.h"
#include "mozilla/dom/quota/PQuotaRequestParent.h"
#include "mozilla/dom/quota/PQuotaUsageRequestParent.h"
#include "mozilla/dom/StorageActivityService.h"
#include "mozilla/ipc/BackgroundParent.h"
#include "mozilla/ipc/BackgroundUtils.h"
#include "mozilla/IntegerRange.h"
@ -3025,6 +3026,11 @@ QuotaObject::LockedMaybeUpdateSize(int64_t aSize, bool aTruncate)
quotaManager->mQuotaMutex.AssertCurrentThreadOwns();
if (mWritingDone == false && mOriginInfo) {
mWritingDone = true;
StorageActivityService::SendActivity(mOriginInfo->mOrigin);
}
if (mQuotaCheckDisabled) {
return true;
}

View File

@ -56,6 +56,7 @@ private:
, mPath(aPath)
, mSize(aSize)
, mQuotaCheckDisabled(false)
, mWritingDone(false)
{
MOZ_COUNT_CTOR(QuotaObject);
}
@ -86,6 +87,7 @@ private:
int64_t mSize;
bool mQuotaCheckDisabled;
bool mWritingDone;
};
END_QUOTA_NAMESPACE

View File

@ -100,8 +100,6 @@ using namespace mozilla::ipc;
namespace mozilla {
namespace dom {
#define PURGE_DOMAIN_DATA "browser:purge-domain-data"
#define PURGE_SESSION_HISTORY "browser:purge-session-history"
#define CLEAR_ORIGIN_DATA "clear-origin-attributes-data"
static_assert(nsIHttpChannelInternal::CORS_MODE_SAME_ORIGIN == static_cast<uint32_t>(RequestMode::Same_origin),
@ -292,10 +290,6 @@ ServiceWorkerManager::Init(ServiceWorkerRegistrar* aRegistrar)
if (obs) {
DebugOnly<nsresult> rv;
rv = obs->AddObserver(this, PURGE_SESSION_HISTORY, false /* ownsWeak */);
MOZ_ASSERT(NS_SUCCEEDED(rv));
rv = obs->AddObserver(this, PURGE_DOMAIN_DATA, false /* ownsWeak */);
MOZ_ASSERT(NS_SUCCEEDED(rv));
rv = obs->AddObserver(this, CLEAR_ORIGIN_DATA, false /* ownsWeak */);
MOZ_ASSERT(NS_SUCCEEDED(rv));
}
@ -416,8 +410,6 @@ ServiceWorkerManager::MaybeStartShutdown()
obs->RemoveObserver(this, NS_XPCOM_SHUTDOWN_OBSERVER_ID);
if (XRE_IsParentProcess()) {
obs->RemoveObserver(this, PURGE_SESSION_HISTORY);
obs->RemoveObserver(this, PURGE_DOMAIN_DATA);
obs->RemoveObserver(this, CLEAR_ORIGIN_DATA);
}
}
@ -3063,14 +3055,6 @@ ServiceWorkerManager::ForceUnregister(RegistrationDataPerPrincipal* aRegistratio
Unregister(aRegistration->Principal(), nullptr, NS_ConvertUTF8toUTF16(aRegistration->Scope()));
}
NS_IMETHODIMP
ServiceWorkerManager::RemoveAndPropagate(const nsACString& aHost)
{
Remove(aHost);
PropagateRemove(aHost);
return NS_OK;
}
void
ServiceWorkerManager::Remove(const nsACString& aHost)
{
@ -3183,20 +3167,6 @@ ServiceWorkerManager::Observe(nsISupports* aSubject,
const char* aTopic,
const char16_t* aData)
{
if (strcmp(aTopic, PURGE_SESSION_HISTORY) == 0) {
MOZ_ASSERT(XRE_IsParentProcess());
RemoveAll();
PropagateRemoveAll();
return NS_OK;
}
if (strcmp(aTopic, PURGE_DOMAIN_DATA) == 0) {
MOZ_ASSERT(XRE_IsParentProcess());
nsAutoString domain(aData);
RemoveAndPropagate(NS_ConvertUTF16toUTF8(domain));
return NS_OK;
}
if (strcmp(aTopic, CLEAR_ORIGIN_DATA) == 0) {
MOZ_ASSERT(XRE_IsParentProcess());
OriginAttributesPattern pattern;

View File

@ -19,6 +19,7 @@
#include "MainThreadUtils.h"
#include "mozilla/ClearOnShutdown.h"
#include "mozilla/CycleCollectedJSContext.h"
#include "mozilla/dom/StorageActivityService.h"
#include "mozilla/ErrorNames.h"
#include "mozilla/ipc/BackgroundChild.h"
#include "mozilla/ipc/BackgroundParent.h"
@ -265,6 +266,7 @@ ServiceWorkerRegistrar::RegisterServiceWorker(
}
MaybeScheduleSaveData();
StorageActivityService::SendActivity(aData.principal());
}
void
@ -301,6 +303,7 @@ ServiceWorkerRegistrar::UnregisterServiceWorker(
if (deleted) {
MaybeScheduleSaveData();
StorageActivityService::SendActivity(aPrincipalInfo);
}
}
@ -316,18 +319,28 @@ ServiceWorkerRegistrar::RemoveAll()
bool deleted = false;
nsTArray<ServiceWorkerRegistrationData> data;
{
MonitorAutoLock lock(mMonitor);
MOZ_ASSERT(mDataLoaded);
// Let's take a copy in order to inform StorageActivityService.
data = mData;
deleted = !mData.IsEmpty();
mData.Clear();
mDataGeneration = GetNextGeneration();
}
if (deleted) {
MaybeScheduleSaveData();
if (!deleted) {
return;
}
MaybeScheduleSaveData();
for (uint32_t i = 0, len = data.Length(); i < len; ++i) {
StorageActivityService::SendActivity(data[i].principal());
}
}

View File

@ -38,7 +38,7 @@
is(body, "intercepted", "Expected serviceworker to intercept request");
});
}).then(function() {
SpecialPowers.removeServiceWorkerDataForExampleDomain();
return SpecialPowers.removeServiceWorkerDataForExampleDomain();
}).then(function() {
return checkDomainRegistration("prefixexample.com", true /* exists */)
.then(function(e) {

View File

@ -0,0 +1,318 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "StorageActivityService.h"
#include "mozilla/ipc/BackgroundUtils.h"
#include "mozilla/StaticPtr.h"
#include "nsIMutableArray.h"
#include "nsSupportsPrimitives.h"
#include "nsXPCOM.h"
// This const is used to know when origin activities should be purged because
// too old. This value should be in sync with what the UI needs.
#define TIME_MAX_SECS 86400 /* 24 hours */
namespace mozilla {
namespace dom {
static StaticRefPtr<StorageActivityService> gStorageActivityService;
static bool gStorageActivityShutdown = false;
/* static */ void
StorageActivityService::SendActivity(nsIPrincipal* aPrincipal)
{
MOZ_ASSERT(NS_IsMainThread());
if (!aPrincipal ||
BasePrincipal::Cast(aPrincipal)->Kind() != BasePrincipal::eCodebasePrincipal) {
// Only codebase principals.
return;
}
RefPtr<StorageActivityService> service = GetOrCreate();
if (NS_WARN_IF(!service)) {
return;
}
service->SendActivityInternal(aPrincipal);
}
/* static */ void
StorageActivityService::SendActivity(const mozilla::ipc::PrincipalInfo& aPrincipalInfo)
{
if (aPrincipalInfo.type() !=
mozilla::ipc::PrincipalInfo::TContentPrincipalInfo) {
// only content principal.
return;
}
RefPtr<Runnable> r = NS_NewRunnableFunction(
"StorageActivityService::SendActivity",
[aPrincipalInfo] () {
MOZ_ASSERT(NS_IsMainThread());
nsCOMPtr<nsIPrincipal> principal =
mozilla::ipc::PrincipalInfoToPrincipal(aPrincipalInfo);
StorageActivityService::SendActivity(principal);
});
SystemGroup::Dispatch(TaskCategory::Other, r.forget());
}
/* static */ void
StorageActivityService::SendActivity(const nsACString& aOrigin)
{
MOZ_ASSERT(XRE_IsParentProcess());
nsCString origin;
origin.Assign(aOrigin);
RefPtr<Runnable> r = NS_NewRunnableFunction(
"StorageActivityService::SendActivity",
[origin] () {
MOZ_ASSERT(NS_IsMainThread());
RefPtr<StorageActivityService> service = GetOrCreate();
if (NS_WARN_IF(!service)) {
return;
}
service->SendActivityInternal(origin);
});
if (NS_IsMainThread()) {
Unused << r->Run();
} else {
SystemGroup::Dispatch(TaskCategory::Other, r.forget());
}
}
/* static */ already_AddRefed<StorageActivityService>
StorageActivityService::GetOrCreate()
{
MOZ_ASSERT(NS_IsMainThread());
if (!gStorageActivityService && !gStorageActivityShutdown) {
RefPtr<StorageActivityService> service = new StorageActivityService();
nsCOMPtr<nsIObserverService> obs = mozilla::services::GetObserverService();
if (NS_WARN_IF(!obs)) {
return nullptr;
}
nsresult rv = obs->AddObserver(service, NS_XPCOM_SHUTDOWN_OBSERVER_ID, true);
if (NS_WARN_IF(NS_FAILED(rv))) {
return nullptr;
}
gStorageActivityService = service;
}
RefPtr<StorageActivityService> service = gStorageActivityService;
return service.forget();
}
StorageActivityService::StorageActivityService()
{
MOZ_ASSERT(NS_IsMainThread());
}
StorageActivityService::~StorageActivityService()
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(!mTimer);
}
void
StorageActivityService::SendActivityInternal(nsIPrincipal* aPrincipal)
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(aPrincipal);
MOZ_ASSERT(BasePrincipal::Cast(aPrincipal)->Kind() == BasePrincipal::eCodebasePrincipal);
if (!XRE_IsParentProcess()) {
SendActivityToParent(aPrincipal);
return;
}
nsAutoCString origin;
nsresult rv = aPrincipal->GetOrigin(origin);
if (NS_WARN_IF(NS_FAILED(rv))) {
return;
}
SendActivityInternal(origin);
}
void
StorageActivityService::SendActivityInternal(const nsACString& aOrigin)
{
MOZ_ASSERT(XRE_IsParentProcess());
mActivities.Put(aOrigin, PR_Now());
MaybeStartTimer();
}
void
StorageActivityService::SendActivityToParent(nsIPrincipal* aPrincipal)
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(!XRE_IsParentProcess());
PBackgroundChild* actor = BackgroundChild::GetOrCreateForCurrentThread();
if (NS_WARN_IF(!actor)) {
return;
}
mozilla::ipc::PrincipalInfo principalInfo;
nsresult rv =
mozilla::ipc::PrincipalToPrincipalInfo(aPrincipal, &principalInfo);
if (NS_WARN_IF(NS_FAILED(rv))) {
return;
}
actor->SendStorageActivity(principalInfo);
}
NS_IMETHODIMP
StorageActivityService::Observe(nsISupports* aSubject, const char* aTopic,
const char16_t* aData)
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(!strcmp(aTopic, NS_XPCOM_SHUTDOWN_OBSERVER_ID));
MaybeStopTimer();
nsCOMPtr<nsIObserverService> obs = mozilla::services::GetObserverService();
if (obs) {
obs->RemoveObserver(this, NS_XPCOM_SHUTDOWN_OBSERVER_ID);
}
gStorageActivityShutdown = true;
gStorageActivityService = nullptr;
return NS_OK;
}
void
StorageActivityService::MaybeStartTimer()
{
MOZ_ASSERT(NS_IsMainThread());
if (!mTimer) {
mTimer = do_CreateInstance(NS_TIMER_CONTRACTID);
mTimer->InitWithCallback(this,
1000 * 5 * 60 /* any 5 minutes */,
nsITimer::TYPE_REPEATING_SLACK);
}
}
void
StorageActivityService::MaybeStopTimer()
{
MOZ_ASSERT(NS_IsMainThread());
if (mTimer) {
mTimer->Cancel();
mTimer = nullptr;
}
}
NS_IMETHODIMP
StorageActivityService::Notify(nsITimer* aTimer)
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(mTimer == aTimer);
uint64_t now = PR_Now();
for (auto iter = mActivities.Iter(); !iter.Done(); iter.Next()) {
if ((now - iter.UserData()) / PR_USEC_PER_SEC > TIME_MAX_SECS) {
iter.Remove();
}
}
// If no activities, let's stop the timer.
if (mActivities.Count() == 0) {
MaybeStopTimer();
}
return NS_OK;
}
NS_IMETHODIMP
StorageActivityService::GetActiveOrigins(PRTime aFrom, PRTime aTo,
nsIArray** aRetval)
{
uint64_t now = PR_Now();
if (((now - aFrom) / PR_USEC_PER_SEC) > TIME_MAX_SECS ||
aFrom >= aTo) {
return NS_ERROR_RANGE_ERR;
}
nsresult rv = NS_OK;
nsCOMPtr<nsIMutableArray> devices =
do_CreateInstance(NS_ARRAY_CONTRACTID, &rv);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
for (auto iter = mActivities.Iter(); !iter.Done(); iter.Next()) {
if (iter.UserData() >= aFrom && iter.UserData() <= aTo) {
RefPtr<BasePrincipal> principal =
BasePrincipal::CreateCodebasePrincipal(iter.Key());
MOZ_ASSERT(principal);
rv = devices->AppendElement(principal);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
}
}
devices.forget(aRetval);
return NS_OK;
}
NS_IMETHODIMP
StorageActivityService::MoveOriginInTime(nsIPrincipal* aPrincipal,
PRTime aWhen)
{
if (!XRE_IsParentProcess()) {
return NS_ERROR_FAILURE;
}
nsAutoCString origin;
nsresult rv = aPrincipal->GetOrigin(origin);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
mActivities.Put(origin, aWhen / PR_USEC_PER_SEC);
return NS_OK;
}
NS_IMETHODIMP
StorageActivityService::TestOnlyReset()
{
mActivities.Clear();
return NS_OK;
}
NS_INTERFACE_MAP_BEGIN(StorageActivityService)
NS_INTERFACE_MAP_ENTRY_AMBIGUOUS(nsISupports, nsIStorageActivityService)
NS_INTERFACE_MAP_ENTRY(nsIStorageActivityService)
NS_INTERFACE_MAP_ENTRY(nsIObserver)
NS_INTERFACE_MAP_ENTRY(nsITimerCallback)
NS_INTERFACE_MAP_ENTRY(nsISupportsWeakReference)
NS_INTERFACE_MAP_END
NS_IMPL_ADDREF(StorageActivityService)
NS_IMPL_RELEASE(StorageActivityService)
} // dom namespace
} // mozilla namespace

View File

@ -0,0 +1,78 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef mozilla_dom_StorageActivityService_h
#define mozilla_dom_StorageActivityService_h
#include "nsDataHashtable.h"
#include "nsIStorageActivityService.h"
#include "nsITimer.h"
#include "nsWeakReference.h"
namespace mozilla {
namespace ipc {
class PrincipalInfo;
} // ipc
namespace dom {
class StorageActivityService final : public nsIStorageActivityService
, public nsIObserver
, public nsITimerCallback
, public nsSupportsWeakReference
{
public:
NS_DECL_ISUPPORTS
NS_DECL_NSISTORAGEACTIVITYSERVICE
NS_DECL_NSIOBSERVER
NS_DECL_NSITIMERCALLBACK
// Main-thread only.
static void
SendActivity(nsIPrincipal* aPrincipal);
// Thread-safe.
static void
SendActivity(const mozilla::ipc::PrincipalInfo& aPrincipalInfo);
// Thread-safe but for parent process only!
static void
SendActivity(const nsACString& aOrigin);
// Used by XPCOM. Don't use it, use SendActivity() instead.
static already_AddRefed<StorageActivityService>
GetOrCreate();
private:
StorageActivityService();
~StorageActivityService();
void
SendActivityInternal(nsIPrincipal* aPrincipal);
void
SendActivityInternal(const nsACString& aOrigin);
void
SendActivityToParent(nsIPrincipal* aPrincipal);
void
MaybeStartTimer();
void
MaybeStopTimer();
// Activities grouped by origin (+OriginAttributes).
nsDataHashtable<nsCStringHashKey, PRTime> mActivities;
nsCOMPtr<nsITimer> mTimer;
};
} // namespace dom
} // namespace mozilla
#endif // mozilla_dom_StorageActivityService_h

View File

@ -12,6 +12,7 @@ EXPORTS.mozilla.dom += [
'LocalStorageManager.h',
'SessionStorageManager.h',
'Storage.h',
'StorageActivityService.h',
'StorageIPC.h',
'StorageNotifierService.h',
'StorageUtils.h',
@ -25,6 +26,7 @@ UNIFIED_SOURCES += [
'SessionStorageCache.cpp',
'SessionStorageManager.cpp',
'Storage.cpp',
'StorageActivityService.cpp',
'StorageDBThread.cpp',
'StorageDBUpdater.cpp',
'StorageIPC.cpp',

View File

@ -2136,31 +2136,35 @@ nsEventStatus AsyncPanZoomController::OnScrollWheel(const ScrollWheelInput& aEve
// first, and then get the delta values in parent-layer pixels based on the
// adjusted values.
bool adjustedByAutoDir = false;
auto deltaX = aEvent.mDeltaX;
auto deltaY = aEvent.mDeltaY;
ParentLayerPoint delta;
if (aEvent.IsAutoDir()) {
// It's an auto-dir scroll, so check if its delta should be adjusted, if so,
// adjust it.
RecursiveMutexAutoLock lock(mRecursiveMutex);
auto deltaX = aEvent.mDeltaX;
auto deltaY = aEvent.mDeltaY;
bool isRTL = IsContentOfHonouredTargetRightToLeft(aEvent.HonoursRoot());
APZAutoDirWheelDeltaAdjuster adjuster(deltaX, deltaY, mX, mY, isRTL);
if (adjuster.ShouldBeAdjusted()) {
adjuster.Adjust();
// If the original delta values have been adjusted, we pass them to
// replace the original delta values in |aEvent| so that the delta values
// in parent-layer pixels are caculated based on the adjusted values, not
// the original ones.
// Pay special attention to the last two parameters. They are in a swaped
// order so that they still correspond to their delta after adjustment.
delta = GetScrollWheelDelta(aEvent,
deltaX, deltaY,
aEvent.mUserDeltaMultiplierY,
aEvent.mUserDeltaMultiplierX);
adjustedByAutoDir = true;
}
}
if (!adjustedByAutoDir) {
// Ensure the calls to GetScrollWheelDelta are outside the mRecursiveMutex
// lock since these calls may acquire the APZ tree lock. Holding mRecursiveMutex
// while acquiring the APZ tree lock is lock ordering violation.
if (adjustedByAutoDir) {
// If the original delta values have been adjusted, we pass them to
// replace the original delta values in |aEvent| so that the delta values
// in parent-layer pixels are caculated based on the adjusted values, not
// the original ones.
// Pay special attention to the last two parameters. They are in a swaped
// order so that they still correspond to their delta after adjustment.
delta = GetScrollWheelDelta(aEvent,
deltaX, deltaY,
aEvent.mUserDeltaMultiplierY,
aEvent.mUserDeltaMultiplierX);
} else {
// If the original delta values haven't been adjusted by auto-dir, just pass
// the |aEvent| and caculate the delta values in parent-layer pixels based
// on the original delta values from |aEvent|.

View File

@ -25,6 +25,7 @@
#include "mozilla/dom/MessagePortParent.h"
#include "mozilla/dom/ServiceWorkerManagerParent.h"
#include "mozilla/dom/ServiceWorkerRegistrar.h"
#include "mozilla/dom/StorageActivityService.h"
#include "mozilla/dom/asmjscache/AsmJSCache.h"
#include "mozilla/dom/cache/ActorUtils.h"
#include "mozilla/dom/indexedDB/ActorsParent.h"
@ -289,6 +290,9 @@ BackgroundParentImpl::RecvBroadcastLocalStorageChange(
const PrincipalInfo& aPrincipalInfo,
const bool& aIsPrivate)
{
// Let's inform the StorageActivityService about this change.
dom::StorageActivityService::SendActivity(aPrincipalInfo);
nsTArray<PBackgroundParent*> liveActorArray;
if (NS_WARN_IF(!BackgroundParent::GetLiveActorArray(this, liveActorArray))) {
return IPC_FAIL_NO_REASON(this);
@ -1036,6 +1040,13 @@ BackgroundParentImpl::RecvPClientManagerConstructor(mozilla::dom::PClientManager
return IPC_OK();
}
IPCResult
BackgroundParentImpl::RecvStorageActivity(const PrincipalInfo& aPrincipalInfo)
{
dom::StorageActivityService::SendActivity(aPrincipalInfo);
return IPC_OK();
}
} // namespace ipc
} // namespace mozilla

View File

@ -284,6 +284,9 @@ protected:
virtual bool
DeallocPMIDIManagerParent(PMIDIManagerParent* aActor) override;
virtual mozilla::ipc::IPCResult
RecvStorageActivity(const PrincipalInfo& aPrincipalInfo) override;
};
} // namespace ipc

View File

@ -148,6 +148,10 @@ parent:
async PMIDIManager();
async PMIDIPort(MIDIPortInfo portInfo, bool sysexEnabled);
// This method is used to propagate storage activities from the child actor
// to the parent actor. See StorageActivityService.
async StorageActivity(PrincipalInfo principalInfo);
child:
async PCache();
async PCacheStreamControl();

View File

@ -150,6 +150,12 @@ ref = exports.nested(baguette, 0);
assertEq(ref, baguette);
assertEq(ref.calories, baguette.calories);
if (wasmDebuggingIsSupported()) {
let g = newGlobal();
let dbg = new Debugger(g);
g.eval(`o = new WebAssembly.Instance(new WebAssembly.Module(wasmTextToBinary('(module (func (result anyref) (param anyref) get_local 0) (export "" 0))')));`);
}
// More interesting use cases about control flow joins.
function assertJoin(body) {

View File

@ -81,6 +81,7 @@ class NoTypePolicy
class BoxInputsPolicy final : public TypePolicy
{
public:
constexpr BoxInputsPolicy() { }
SPECIALIZATION_DATA_;
static MOZ_MUST_USE bool staticAdjustInputs(TempAllocator& alloc, MInstruction* def);
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override {
@ -91,6 +92,7 @@ class BoxInputsPolicy final : public TypePolicy
class ArithPolicy final : public TypePolicy
{
public:
constexpr ArithPolicy() { }
SPECIALIZATION_DATA_;
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override;
};
@ -98,6 +100,7 @@ class ArithPolicy final : public TypePolicy
class AllDoublePolicy final : public TypePolicy
{
public:
constexpr AllDoublePolicy() { }
EMPTY_DATA_;
static MOZ_MUST_USE bool staticAdjustInputs(TempAllocator& alloc, MInstruction* def);
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override {
@ -108,6 +111,7 @@ class AllDoublePolicy final : public TypePolicy
class BitwisePolicy final : public TypePolicy
{
public:
constexpr BitwisePolicy() { }
SPECIALIZATION_DATA_;
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override;
};
@ -115,6 +119,7 @@ class BitwisePolicy final : public TypePolicy
class ComparePolicy final : public TypePolicy
{
public:
constexpr ComparePolicy() { }
EMPTY_DATA_;
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override;
};
@ -122,6 +127,7 @@ class ComparePolicy final : public TypePolicy
class SameValuePolicy final : public TypePolicy
{
public:
constexpr SameValuePolicy() { }
EMPTY_DATA_;
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override;
};
@ -130,6 +136,7 @@ class SameValuePolicy final : public TypePolicy
class TestPolicy final : public TypePolicy
{
public:
constexpr TestPolicy() { }
EMPTY_DATA_;
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* ins) const override;
};
@ -137,6 +144,7 @@ class TestPolicy final : public TypePolicy
class TypeBarrierPolicy final : public TypePolicy
{
public:
constexpr TypeBarrierPolicy() { }
EMPTY_DATA_;
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* ins) const override;
};
@ -144,6 +152,7 @@ class TypeBarrierPolicy final : public TypePolicy
class CallPolicy final : public TypePolicy
{
public:
constexpr CallPolicy() { }
EMPTY_DATA_;
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override;
};
@ -152,6 +161,7 @@ class CallPolicy final : public TypePolicy
class PowPolicy final : public TypePolicy
{
public:
constexpr PowPolicy() { }
SPECIALIZATION_DATA_;
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* ins) const override;
};
@ -161,6 +171,7 @@ template <unsigned Op>
class StringPolicy final : public TypePolicy
{
public:
constexpr StringPolicy() { }
EMPTY_DATA_;
static MOZ_MUST_USE bool staticAdjustInputs(TempAllocator& alloc, MInstruction* def);
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override {
@ -173,6 +184,7 @@ template <unsigned Op>
class ConvertToStringPolicy final : public TypePolicy
{
public:
constexpr ConvertToStringPolicy() { }
EMPTY_DATA_;
static MOZ_MUST_USE bool staticAdjustInputs(TempAllocator& alloc, MInstruction* def);
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override {
@ -185,6 +197,7 @@ template <unsigned Op>
class BooleanPolicy final : private TypePolicy
{
public:
constexpr BooleanPolicy() { }
EMPTY_DATA_;
static MOZ_MUST_USE bool staticAdjustInputs(TempAllocator& alloc, MInstruction* def);
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override {
@ -197,6 +210,7 @@ template <unsigned Op>
class UnboxedInt32Policy final : private TypePolicy
{
public:
constexpr UnboxedInt32Policy() { }
EMPTY_DATA_;
static MOZ_MUST_USE bool staticAdjustInputs(TempAllocator& alloc, MInstruction* def);
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override {
@ -209,6 +223,7 @@ template <unsigned Op>
class ConvertToInt32Policy final : public TypePolicy
{
public:
constexpr ConvertToInt32Policy() { }
EMPTY_DATA_;
static MOZ_MUST_USE bool staticAdjustInputs(TempAllocator& alloc, MInstruction* def);
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override {
@ -221,6 +236,7 @@ template <unsigned Op>
class TruncateToInt32Policy final : public TypePolicy
{
public:
constexpr TruncateToInt32Policy() { }
EMPTY_DATA_;
static MOZ_MUST_USE bool staticAdjustInputs(TempAllocator& alloc, MInstruction* def);
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override {
@ -233,6 +249,7 @@ template <unsigned Op>
class DoublePolicy final : public TypePolicy
{
public:
constexpr DoublePolicy() { }
EMPTY_DATA_;
static MOZ_MUST_USE bool staticAdjustInputs(TempAllocator& alloc, MInstruction* def);
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override {
@ -245,6 +262,7 @@ template <unsigned Op>
class Float32Policy final : public TypePolicy
{
public:
constexpr Float32Policy() { }
EMPTY_DATA_;
static MOZ_MUST_USE bool staticAdjustInputs(TempAllocator& alloc, MInstruction* def);
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override {
@ -258,6 +276,7 @@ template <unsigned Op>
class FloatingPointPolicy final : public TypePolicy
{
public:
constexpr FloatingPointPolicy() { }
SPECIALIZATION_DATA_;
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override;
};
@ -266,6 +285,7 @@ template <unsigned Op>
class NoFloatPolicy final : public TypePolicy
{
public:
constexpr NoFloatPolicy() { }
EMPTY_DATA_;
static MOZ_MUST_USE bool staticAdjustInputs(TempAllocator& alloc, MInstruction* def);
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override {
@ -279,6 +299,7 @@ template <unsigned FirstOp>
class NoFloatPolicyAfter final : public TypePolicy
{
public:
constexpr NoFloatPolicyAfter() { }
EMPTY_DATA_;
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* ins) const override;
};
@ -287,6 +308,7 @@ class NoFloatPolicyAfter final : public TypePolicy
class ToDoublePolicy final : public TypePolicy
{
public:
constexpr ToDoublePolicy() { }
EMPTY_DATA_;
static MOZ_MUST_USE bool staticAdjustInputs(TempAllocator& alloc, MInstruction* def);
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override {
@ -298,6 +320,7 @@ class ToDoublePolicy final : public TypePolicy
class ToInt32Policy final : public TypePolicy
{
public:
constexpr ToInt32Policy() { }
EMPTY_DATA_;
static MOZ_MUST_USE bool staticAdjustInputs(TempAllocator& alloc, MInstruction* def);
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override {
@ -309,6 +332,7 @@ class ToInt32Policy final : public TypePolicy
class ToStringPolicy final : public TypePolicy
{
public:
constexpr ToStringPolicy() { }
EMPTY_DATA_;
static MOZ_MUST_USE bool staticAdjustInputs(TempAllocator& alloc, MInstruction* def);
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override {
@ -320,6 +344,7 @@ template <unsigned Op>
class ObjectPolicy final : public TypePolicy
{
public:
constexpr ObjectPolicy() { }
EMPTY_DATA_;
static MOZ_MUST_USE bool staticAdjustInputs(TempAllocator& alloc, MInstruction* ins);
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* ins) const override {
@ -337,6 +362,7 @@ template <unsigned Op>
class SimdScalarPolicy final : public TypePolicy
{
public:
constexpr SimdScalarPolicy() { }
EMPTY_DATA_;
static MOZ_MUST_USE bool staticAdjustInputs(TempAllocator& alloc, MInstruction* def);
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override {
@ -347,6 +373,7 @@ class SimdScalarPolicy final : public TypePolicy
class SimdAllPolicy final : public TypePolicy
{
public:
constexpr SimdAllPolicy () { }
SPECIALIZATION_DATA_;
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* ins) const override;
};
@ -355,6 +382,7 @@ template <unsigned Op>
class SimdPolicy final : public TypePolicy
{
public:
constexpr SimdPolicy() { }
SPECIALIZATION_DATA_;
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* ins) const override;
};
@ -362,6 +390,7 @@ class SimdPolicy final : public TypePolicy
class SimdSelectPolicy final : public TypePolicy
{
public:
constexpr SimdSelectPolicy() { }
SPECIALIZATION_DATA_;
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* ins) const override;
};
@ -369,6 +398,7 @@ class SimdSelectPolicy final : public TypePolicy
class SimdShufflePolicy final : public TypePolicy
{
public:
constexpr SimdShufflePolicy() { }
SPECIALIZATION_DATA_;
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* ins) const override;
};
@ -379,6 +409,7 @@ template <unsigned Op>
class SimdSameAsReturnedTypePolicy final : public TypePolicy
{
public:
constexpr SimdSameAsReturnedTypePolicy() { }
EMPTY_DATA_;
static MOZ_MUST_USE bool staticAdjustInputs(TempAllocator& alloc, MInstruction* ins);
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* ins) const override {
@ -390,6 +421,7 @@ template <unsigned Op>
class BoxPolicy final : public TypePolicy
{
public:
constexpr BoxPolicy() { }
EMPTY_DATA_;
static MOZ_MUST_USE bool staticAdjustInputs(TempAllocator& alloc, MInstruction* ins);
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* ins) const override {
@ -402,6 +434,7 @@ template <unsigned Op, MIRType Type>
class BoxExceptPolicy final : public TypePolicy
{
public:
constexpr BoxExceptPolicy() { }
EMPTY_DATA_;
static MOZ_MUST_USE bool staticAdjustInputs(TempAllocator& alloc, MInstruction* ins);
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* ins) const override {
@ -438,6 +471,7 @@ class MixPolicy final : public TypePolicy
}
public:
constexpr MixPolicy() { }
EMPTY_DATA_;
static MOZ_MUST_USE bool staticAdjustInputs(TempAllocator& alloc, MInstruction* ins) {
return MixPolicy::staticAdjustInputsHelper<Policies...>(alloc, ins);
@ -450,6 +484,7 @@ class MixPolicy final : public TypePolicy
class CallSetElementPolicy final : public TypePolicy
{
public:
constexpr CallSetElementPolicy() { }
EMPTY_DATA_;
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override;
};
@ -459,6 +494,7 @@ class CallSetElementPolicy final : public TypePolicy
class InstanceOfPolicy final : public TypePolicy
{
public:
constexpr InstanceOfPolicy() { }
EMPTY_DATA_;
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override;
};
@ -468,6 +504,7 @@ class StoreTypedArrayHolePolicy;
class StoreUnboxedScalarPolicy : public TypePolicy
{
private:
constexpr StoreUnboxedScalarPolicy() { }
static MOZ_MUST_USE bool adjustValueInput(TempAllocator& alloc, MInstruction* ins,
Scalar::Type arrayType, MDefinition* value,
int valueOperand);
@ -482,6 +519,7 @@ class StoreUnboxedScalarPolicy : public TypePolicy
class StoreTypedArrayHolePolicy final : public StoreUnboxedScalarPolicy
{
public:
constexpr StoreTypedArrayHolePolicy() { }
EMPTY_DATA_;
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* ins) const override;
};
@ -489,6 +527,7 @@ class StoreTypedArrayHolePolicy final : public StoreUnboxedScalarPolicy
class StoreUnboxedObjectOrNullPolicy final : public TypePolicy
{
public:
constexpr StoreUnboxedObjectOrNullPolicy() { }
EMPTY_DATA_;
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override;
};
@ -496,6 +535,7 @@ class StoreUnboxedObjectOrNullPolicy final : public TypePolicy
class StoreUnboxedStringPolicy final : public TypePolicy
{
public:
constexpr StoreUnboxedStringPolicy() { }
EMPTY_DATA_;
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) const override;
};
@ -504,6 +544,7 @@ class StoreUnboxedStringPolicy final : public TypePolicy
class ClampPolicy final : public TypePolicy
{
public:
constexpr ClampPolicy() { }
EMPTY_DATA_;
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* ins) const override;
};
@ -511,6 +552,7 @@ class ClampPolicy final : public TypePolicy
class FilterTypeSetPolicy final : public TypePolicy
{
public:
constexpr FilterTypeSetPolicy() { }
EMPTY_DATA_;
MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* ins) const override;
};

View File

@ -3429,6 +3429,9 @@ class BaseCompiler final : public BaseCompilerInterface
case ExprType::F32:
masm.storeFloat32(RegF32(ReturnFloat32Reg), resultsAddress);
break;
case ExprType::AnyRef:
masm.storePtr(RegPtr(ReturnReg), resultsAddress);
break;
default:
MOZ_CRASH("Function return type");
}
@ -3453,6 +3456,9 @@ class BaseCompiler final : public BaseCompilerInterface
case ExprType::F32:
masm.loadFloat32(resultsAddress, RegF32(ReturnFloat32Reg));
break;
case ExprType::AnyRef:
masm.loadPtr(resultsAddress, RegPtr(ReturnReg));
break;
default:
MOZ_CRASH("Function return type");
}

View File

@ -71,6 +71,10 @@
#define SERVICEWORKERMANAGER_CID \
{ 0xc74bde32, 0xbcc7, 0x4840, { 0x84, 0x30, 0xc7, 0x33, 0x35, 0x1b, 0x21, 0x2a } }
// {69da374a-fda3-4a93-9fbc-d9304f66a7fe}
#define STORAGEACTIVITYSERVICE_CID \
{ 0x69da374a, 0xfda3, 0x4a93, { 0x9f, 0xbc, 0xd9, 0x30, 0x4f, 0x66, 0xa7, 0xfe } }
#define NOTIFICATIONTELEMETRYSERVICE_CID \
{ 0x5995b782, 0x6a0e, 0x4066, { 0xaa, 0xc5, 0x27, 0x6f, 0x0a, 0x9a, 0xd8, 0xcf } }

View File

@ -74,6 +74,7 @@
#include "mozilla/dom/quota/QuotaManagerService.h"
#include "mozilla/dom/ServiceWorkerManager.h"
#include "mozilla/dom/SessionStorageManager.h"
#include "mozilla/dom/StorageActivityService.h"
#include "mozilla/dom/WorkerDebuggerManager.h"
#include "mozilla/dom/Notification.h"
#include "mozilla/OSFileConstants.h"
@ -211,6 +212,8 @@ NS_GENERIC_FACTORY_SINGLETON_CONSTRUCTOR(ServiceWorkerManager,
ServiceWorkerManager::GetInstance)
NS_GENERIC_FACTORY_SINGLETON_CONSTRUCTOR(WorkerDebuggerManager,
WorkerDebuggerManager::GetInstance)
NS_GENERIC_FACTORY_SINGLETON_CONSTRUCTOR(StorageActivityService,
StorageActivityService::GetOrCreate)
#ifdef MOZ_WEBSPEECH
NS_GENERIC_FACTORY_SINGLETON_CONSTRUCTOR(nsSynthVoiceRegistry,
@ -557,6 +560,7 @@ NS_DEFINE_NAMED_CID(NS_TEXTEDITOR_CID);
NS_DEFINE_NAMED_CID(DOMREQUEST_SERVICE_CID);
NS_DEFINE_NAMED_CID(QUOTAMANAGER_SERVICE_CID);
NS_DEFINE_NAMED_CID(SERVICEWORKERMANAGER_CID);
NS_DEFINE_NAMED_CID(STORAGEACTIVITYSERVICE_CID);
NS_DEFINE_NAMED_CID(NOTIFICATIONTELEMETRYSERVICE_CID);
NS_DEFINE_NAMED_CID(PUSHNOTIFIER_CID);
NS_DEFINE_NAMED_CID(WORKERDEBUGGERMANAGER_CID);
@ -799,6 +803,7 @@ static const mozilla::Module::CIDEntry kLayoutCIDs[] = {
{ &kDOMREQUEST_SERVICE_CID, false, nullptr, DOMRequestServiceConstructor },
{ &kQUOTAMANAGER_SERVICE_CID, false, nullptr, QuotaManagerServiceConstructor },
{ &kSERVICEWORKERMANAGER_CID, false, nullptr, ServiceWorkerManagerConstructor },
{ &kSTORAGEACTIVITYSERVICE_CID, false, nullptr, StorageActivityServiceConstructor },
{ &kNOTIFICATIONTELEMETRYSERVICE_CID, false, nullptr, NotificationTelemetryServiceConstructor },
{ &kPUSHNOTIFIER_CID, false, nullptr, PushNotifierConstructor },
{ &kWORKERDEBUGGERMANAGER_CID, true, nullptr, WorkerDebuggerManagerConstructor },
@ -906,6 +911,7 @@ static const mozilla::Module::ContractIDEntry kLayoutContracts[] = {
{ DOMREQUEST_SERVICE_CONTRACTID, &kDOMREQUEST_SERVICE_CID },
{ QUOTAMANAGER_SERVICE_CONTRACTID, &kQUOTAMANAGER_SERVICE_CID },
{ SERVICEWORKERMANAGER_CONTRACTID, &kSERVICEWORKERMANAGER_CID },
{ STORAGE_ACTIVITY_SERVICE_CONTRACTID, &kSTORAGEACTIVITYSERVICE_CID },
{ NOTIFICATIONTELEMETRYSERVICE_CONTRACTID, &kNOTIFICATIONTELEMETRYSERVICE_CID },
{ PUSHNOTIFIER_CONTRACTID, &kPUSHNOTIFIER_CID },
{ WORKERDEBUGGERMANAGER_CONTRACTID, &kWORKERDEBUGGERMANAGER_CID },

View File

@ -80,6 +80,7 @@
* scope.
*/
#include "mozilla/Attributes.h"
#include "mozilla/GuardObjects.h"
#include "mozilla/Move.h"
@ -124,7 +125,7 @@ private:
};
template <typename ExitFunction>
ScopeExit<ExitFunction>
MOZ_MUST_USE ScopeExit<ExitFunction>
MakeScopeExit(ExitFunction&& exitFunction)
{
return ScopeExit<ExitFunction>(mozilla::Move(exitFunction));

View File

@ -5,7 +5,7 @@
//! Applicable declarations management.
use properties::PropertyDeclarationBlock;
use rule_tree::{CascadeLevel, StyleSource};
use rule_tree::{CascadeLevel, ShadowCascadeOrder, StyleSource};
use servo_arc::Arc;
use shared_lock::Locked;
use smallvec::SmallVec;
@ -83,6 +83,8 @@ pub struct ApplicableDeclarationBlock {
order_and_level: SourceOrderAndCascadeLevel,
/// The specificity of the selector this block is represented by.
pub specificity: u32,
/// The order in the tree of trees we carry on.
pub shadow_cascade_order: ShadowCascadeOrder,
}
impl ApplicableDeclarationBlock {
@ -97,16 +99,24 @@ impl ApplicableDeclarationBlock {
source: StyleSource::Declarations(declarations),
order_and_level: SourceOrderAndCascadeLevel::new(0, level),
specificity: 0,
shadow_cascade_order: 0,
}
}
/// Constructs an applicable declaration block from the given components
#[inline]
pub fn new(source: StyleSource, order: u32, level: CascadeLevel, specificity: u32) -> Self {
pub fn new(
source: StyleSource,
order: u32,
level: CascadeLevel,
specificity: u32,
shadow_cascade_order: u32,
) -> Self {
ApplicableDeclarationBlock {
source: source,
source,
order_and_level: SourceOrderAndCascadeLevel::new(order, level),
specificity: specificity,
specificity,
shadow_cascade_order,
}
}
@ -122,11 +132,11 @@ impl ApplicableDeclarationBlock {
self.order_and_level.level()
}
/// Convenience method to consume self and return the source alongside the
/// level.
/// Convenience method to consume self and return the right thing for the
/// rule tree to iterate over.
#[inline]
pub fn order_and_level(self) -> (StyleSource, CascadeLevel) {
pub fn for_rule_tree(self) -> (StyleSource, CascadeLevel, ShadowCascadeOrder) {
let level = self.level();
(self.source, level)
(self.source, level, self.shadow_cascade_order)
}
}

View File

@ -162,6 +162,16 @@ const FREE_LIST_SENTINEL: *mut RuleNode = 0x01 as *mut RuleNode;
/// another thread is currently adding an entry). We spin if we find this value.
const FREE_LIST_LOCKED: *mut RuleNode = 0x02 as *mut RuleNode;
/// A counter to track how many inner shadow roots rules deep we are.
///
/// This is used to handle:
///
/// https://drafts.csswg.org/css-scoping/#shadow-cascading
///
/// In particular, it'd be `0` for the innermost shadow host, `1` for the next,
/// and so on.
pub type ShadowCascadeOrder = u32;
impl RuleTree {
/// Construct a new rule tree.
pub fn new() -> Self {
@ -198,7 +208,7 @@ impl RuleTree {
guards: &StylesheetGuards,
) -> StrongRuleNode
where
I: Iterator<Item = (StyleSource, CascadeLevel)>,
I: Iterator<Item = (StyleSource, CascadeLevel, ShadowCascadeOrder)>,
{
use self::CascadeLevel::*;
let mut current = self.root.clone();
@ -206,13 +216,18 @@ impl RuleTree {
let mut found_important = false;
let mut important_style_attr = None;
let mut important_author = SmallVec::<[StyleSource; 4]>::new();
let mut important_same_tree = SmallVec::<[StyleSource; 4]>::new();
let mut important_inner_shadow = SmallVec::<[SmallVec<[StyleSource; 4]>; 4]>::new();
important_inner_shadow.push(SmallVec::new());
let mut important_user = SmallVec::<[StyleSource; 4]>::new();
let mut important_ua = SmallVec::<[StyleSource; 4]>::new();
let mut transition = None;
for (source, level) in iter {
debug_assert!(last_level <= level, "Not really ordered");
let mut last_cascade_order = 0;
for (source, level, shadow_cascade_order) in iter {
debug_assert!(level >= last_level, "Not really ordered");
debug_assert!(!level.is_important(), "Important levels handled internally");
let any_important = {
let pdb = source.read(level.guard(guards));
@ -222,7 +237,22 @@ impl RuleTree {
if any_important {
found_important = true;
match level {
AuthorNormal => important_author.push(source.clone()),
InnerShadowNormal => {
debug_assert!(
shadow_cascade_order >= last_cascade_order,
"Not really ordered"
);
if shadow_cascade_order > last_cascade_order &&
!important_inner_shadow.last().unwrap().is_empty()
{
last_cascade_order = shadow_cascade_order;
important_inner_shadow.push(SmallVec::new());
}
important_inner_shadow.last_mut().unwrap().push(source.clone())
}
SameTreeAuthorNormal => {
important_same_tree.push(source.clone())
},
UANormal => important_ua.push(source.clone()),
UserNormal => important_user.push(source.clone()),
StyleAttributeNormal => {
@ -265,14 +295,20 @@ impl RuleTree {
// followed by any transition rule.
//
for source in important_author.drain() {
current = current.ensure_child(self.root.downgrade(), source, AuthorImportant);
for source in important_same_tree.drain() {
current = current.ensure_child(self.root.downgrade(), source, SameTreeAuthorImportant);
}
if let Some(source) = important_style_attr {
current = current.ensure_child(self.root.downgrade(), source, StyleAttributeImportant);
}
for mut list in important_inner_shadow.drain().rev() {
for source in list.drain() {
current = current.ensure_child(self.root.downgrade(), source, InnerShadowImportant);
}
}
for source in important_user.drain() {
current = current.ensure_child(self.root.downgrade(), source, UserImportant);
}
@ -295,9 +331,10 @@ impl RuleTree {
applicable_declarations: &mut ApplicableDeclarationList,
guards: &StylesheetGuards,
) -> StrongRuleNode {
let rules = applicable_declarations.drain().map(|d| d.order_and_level());
let rule_node = self.insert_ordered_rules_with_important(rules, guards);
rule_node
self.insert_ordered_rules_with_important(
applicable_declarations.drain().map(|d| d.for_rule_tree()),
guards,
)
}
/// Insert the given rules, that must be in proper order by specifity, and
@ -381,8 +418,8 @@ impl RuleTree {
// also equally valid. This is less likely, and would require an
// in-place mutation of the source, which is, at best, fiddly,
// so let's skip it for now.
let is_here_already = match &current.get().source {
&StyleSource::Declarations(ref already_here) => {
let is_here_already = match current.get().source {
StyleSource::Declarations(ref already_here) => {
pdb.with_arc(|arc| Arc::ptr_eq(arc, already_here))
},
_ => unreachable!("Replacing non-declarations style?"),
@ -500,9 +537,22 @@ const RULE_TREE_GC_INTERVAL: usize = 300;
/// The order of variants declared here is significant, and must be in
/// _ascending_ order of precedence.
///
/// See also [4] for the Shadow DOM bits. We rely on the invariant that rules
/// from outside the tree the element is in can't affect the element.
///
/// The opposite is not true (i.e., :host and ::slotted) from an "inner" shadow
/// tree may affect an element connected to the document or an "outer" shadow
/// tree.
///
/// We need to differentiate between rules from the same tree and "inner" shadow
/// trees in order to be able to find the right position for the style attribute
/// easily. Otherwise we wouldn't be able to avoid selector-matching when a
/// style attribute is added or removed.
///
/// [1]: https://drafts.csswg.org/css-cascade/#cascade-origin
/// [2]: https://drafts.csswg.org/css-cascade/#preshint
/// [3]: https://html.spec.whatwg.org/multipage/#presentational-hints
/// [4]: https://drafts.csswg.org/css-scoping/#shadow-cascading
#[repr(u8)]
#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd)]
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
@ -513,18 +563,27 @@ pub enum CascadeLevel {
UserNormal,
/// Presentational hints.
PresHints,
/// Author normal rules.
AuthorNormal,
/// Shadow DOM styles from "inner" shadow trees.
///
/// See above for why this is needed instead of merging InnerShadowNormal,
/// SameTreeAuthorNormal and StyleAttributeNormal inside something like
/// AuthorNormal.
InnerShadowNormal,
/// Author normal rules from the same tree the element is in.
SameTreeAuthorNormal,
/// Style attribute normal rules.
StyleAttributeNormal,
/// SVG SMIL animations.
SMILOverride,
/// CSS animations and script-generated animations.
Animations,
/// Author-supplied important rules.
AuthorImportant,
/// Author-supplied important rules from the same tree the element came
/// from.
SameTreeAuthorImportant,
/// Style attribute important rules.
StyleAttributeImportant,
/// Shadow DOM important rules.
InnerShadowImportant,
/// User important rules.
UserImportant,
/// User-agent important rules.
@ -571,7 +630,8 @@ impl CascadeLevel {
#[inline]
pub fn is_important(&self) -> bool {
match *self {
CascadeLevel::AuthorImportant |
CascadeLevel::SameTreeAuthorImportant |
CascadeLevel::InnerShadowImportant |
CascadeLevel::StyleAttributeImportant |
CascadeLevel::UserImportant |
CascadeLevel::UAImportant => true,
@ -1302,11 +1362,13 @@ impl StrongRuleNode {
},
// Author rules:
CascadeLevel::PresHints |
CascadeLevel::AuthorNormal |
CascadeLevel::SameTreeAuthorNormal |
CascadeLevel::InnerShadowNormal |
CascadeLevel::StyleAttributeNormal |
CascadeLevel::SMILOverride |
CascadeLevel::Animations |
CascadeLevel::AuthorImportant |
CascadeLevel::SameTreeAuthorImportant |
CascadeLevel::InnerShadowImportant |
CascadeLevel::StyleAttributeImportant |
CascadeLevel::Transitions => {
for (id, declaration) in longhands {

View File

@ -14,7 +14,7 @@ use hash::{HashMap, HashSet};
use hash::map as hash_map;
use hashglobe::FailedAllocationError;
use precomputed_hash::PrecomputedHash;
use rule_tree::CascadeLevel;
use rule_tree::{CascadeLevel, ShadowCascadeOrder};
use selector_parser::SelectorImpl;
use selectors::matching::{matches_selector, ElementSelectorFlags, MatchingContext};
use selectors::parser::{Combinator, Component, SelectorIter};
@ -163,6 +163,7 @@ impl SelectorMap<Rule> {
context: &mut MatchingContext<E::Impl>,
flags_setter: &mut F,
cascade_level: CascadeLevel,
shadow_cascade_order: ShadowCascadeOrder,
) where
E: TElement,
F: FnMut(&E, ElementSelectorFlags),
@ -185,6 +186,7 @@ impl SelectorMap<Rule> {
context,
flags_setter,
cascade_level,
shadow_cascade_order,
)
}
}
@ -198,6 +200,7 @@ impl SelectorMap<Rule> {
context,
flags_setter,
cascade_level,
shadow_cascade_order,
)
}
});
@ -210,6 +213,7 @@ impl SelectorMap<Rule> {
context,
flags_setter,
cascade_level,
shadow_cascade_order,
)
}
@ -220,6 +224,7 @@ impl SelectorMap<Rule> {
context,
flags_setter,
cascade_level,
shadow_cascade_order,
);
// Sort only the rules we just added.
@ -235,6 +240,7 @@ impl SelectorMap<Rule> {
context: &mut MatchingContext<E::Impl>,
flags_setter: &mut F,
cascade_level: CascadeLevel,
shadow_cascade_order: ShadowCascadeOrder,
) where
E: TElement,
F: FnMut(&E, ElementSelectorFlags),
@ -248,7 +254,7 @@ impl SelectorMap<Rule> {
context,
flags_setter,
) {
matching_rules.push(rule.to_applicable_declaration_block(cascade_level));
matching_rules.push(rule.to_applicable_declaration_block(cascade_level, shadow_cascade_order));
}
}
}

View File

@ -23,7 +23,7 @@ use media_queries::Device;
use properties::{self, CascadeFlags, ComputedValues};
use properties::{AnimationRules, PropertyDeclarationBlock};
use rule_cache::{RuleCache, RuleCacheConditions};
use rule_tree::{CascadeLevel, RuleTree, StrongRuleNode, StyleSource};
use rule_tree::{CascadeLevel, RuleTree, ShadowCascadeOrder, StrongRuleNode, StyleSource};
use selector_map::{PrecomputedHashMap, SelectorMap, SelectorMapEntry};
use selector_parser::{PerPseudoElementMap, PseudoElement, SelectorImpl, SnapshotMap};
use selectors::NthIndexCache;
@ -693,7 +693,7 @@ impl Stylist {
match declarations {
Some(decls) => self.rule_tree.insert_ordered_rules_with_important(
decls.into_iter().map(|a| (a.source.clone(), a.level())),
decls.into_iter().map(|a| a.clone().for_rule_tree()),
guards,
),
None => self.rule_tree.root().clone(),
@ -1020,7 +1020,7 @@ impl Stylist {
);
if !declarations.is_empty() {
let rule_node = self.rule_tree.insert_ordered_rules_with_important(
declarations.drain().map(|a| a.order_and_level()),
declarations.drain().map(|a| a.for_rule_tree()),
guards,
);
if rule_node != *self.rule_tree.root() {
@ -1187,6 +1187,7 @@ impl Stylist {
context,
flags_setter,
CascadeLevel::UANormal,
0,
);
}
@ -1208,6 +1209,7 @@ impl Stylist {
context,
flags_setter,
CascadeLevel::UserNormal,
0,
);
}
}
@ -1232,6 +1234,7 @@ impl Stylist {
}
let mut match_document_author_rules = matches_author_rules;
let mut shadow_cascade_order = 0;
// XBL / Shadow DOM rules, which are author rules too.
//
@ -1249,9 +1252,11 @@ impl Stylist {
applicable_declarations,
context,
flags_setter,
CascadeLevel::AuthorNormal,
CascadeLevel::InnerShadowNormal,
shadow_cascade_order,
);
});
shadow_cascade_order += 1;
}
}
@ -1275,9 +1280,11 @@ impl Stylist {
applicable_declarations,
context,
flags_setter,
CascadeLevel::AuthorNormal,
CascadeLevel::InnerShadowNormal,
shadow_cascade_order,
);
});
shadow_cascade_order += 1;
}
}
@ -1291,9 +1298,11 @@ impl Stylist {
applicable_declarations,
context,
flags_setter,
CascadeLevel::AuthorNormal,
CascadeLevel::SameTreeAuthorNormal,
shadow_cascade_order,
);
});
shadow_cascade_order += 1;
}
match_document_author_rules = false;
@ -1309,10 +1318,6 @@ impl Stylist {
if let Some(map) = cascade_data.normal_rules(pseudo_element) {
// NOTE(emilio): This is needed because the XBL stylist may
// think it has a different quirks mode than the document.
//
// FIXME(emilio): this should use the same VisitedMatchingMode
// as `context`, write a test-case of :visited not working on
// Shadow DOM and fix it!
let mut matching_context = MatchingContext::new(
context.matching_mode(),
context.bloom_filter,
@ -1322,13 +1327,16 @@ impl Stylist {
matching_context.pseudo_element_matching_fn =
context.pseudo_element_matching_fn;
// SameTreeAuthorNormal instead of InnerShadowNormal to
// preserve behavior, though that's kinda fishy...
map.get_all_matching_rules(
element,
rule_hash_target,
applicable_declarations,
&mut matching_context,
flags_setter,
CascadeLevel::AuthorNormal,
CascadeLevel::SameTreeAuthorNormal,
shadow_cascade_order,
);
}
});
@ -1344,7 +1352,8 @@ impl Stylist {
applicable_declarations,
context,
flags_setter,
CascadeLevel::AuthorNormal,
CascadeLevel::SameTreeAuthorNormal,
shadow_cascade_order,
);
}
}
@ -2172,6 +2181,7 @@ impl CascadeData {
self.rules_source_order,
CascadeLevel::UANormal,
selector.specificity(),
0,
));
continue;
}
@ -2468,9 +2478,10 @@ impl Rule {
pub fn to_applicable_declaration_block(
&self,
level: CascadeLevel,
shadow_cascade_order: ShadowCascadeOrder,
) -> ApplicableDeclarationBlock {
let source = StyleSource::Style(self.style_rule.clone());
ApplicableDeclarationBlock::new(source, self.source_order, level, self.specificity())
ApplicableDeclarationBlock::new(source, self.source_order, level, self.specificity(), shadow_cascade_order)
}
/// Creates a new Rule.

View File

@ -2708,7 +2708,7 @@ pub unsafe extern "C" fn Servo_ComputedValues_GetForAnonymousBox(
let level = match origin {
Origin::UserAgent => CascadeLevel::UANormal,
Origin::User => CascadeLevel::UserNormal,
Origin::Author => CascadeLevel::AuthorNormal,
Origin::Author => CascadeLevel::SameTreeAuthorNormal,
};
for rule in data.pages.iter() {
declarations.push(ApplicableDeclarationBlock::from_declarations(

View File

@ -35,7 +35,7 @@ size_of_test!(test_size_of_element_data, ElementData, 24);
size_of_test!(test_size_of_property_declaration, style::properties::PropertyDeclaration, 32);
size_of_test!(test_size_of_application_declaration_block, ApplicableDeclarationBlock, 24);
size_of_test!(test_size_of_application_declaration_block, ApplicableDeclarationBlock, 32);
size_of_test!(test_size_of_rule_node, RuleNode, 80);
// This is huge, but we allocate it on the stack and then never move it,

View File

@ -91,8 +91,6 @@ jobs:
symbol: I(gps)
funsize-balrog-submitter:
symbol: I(fbs)
beet-mover:
symbol: I(bm)
update-verify:
symbol: I(uv)
diffoscope:

View File

@ -1,23 +0,0 @@
FROM ubuntu:xenial
RUN apt-get -q update \
&& apt-get install --yes -q \
mercurial \
python-dev \
python-pip \
python-virtualenv \
libffi-dev \
liblzma-dev \
libssl-dev \
libyaml-dev \
libmysqlclient-dev \
clamav \
clamav-freshclam \
curl \
wget \
&& apt-get clean
COPY requirements.txt /tmp/
RUN pip install -r /tmp/requirements.txt
# Freshclam may be flaky, retry if it fails
RUN for i in 1 2 3 4 5; do freshclam --verbose && break || sleep 15; done

View File

@ -1,2 +0,0 @@
sh
redo

View File

@ -32,7 +32,8 @@ generate, and each entry includes the earlier (or 'from') version, and the most
recent (or 'to') version, which for most releases will likely be a taskcluster
artifact.
.. code-block::
.. code-block:: json
{
"to_mar": "https://queue.taskcluster.net/v1/task/EWtBFqVuT-WqG3tGLxWhmA/artifacts/public/build/ach/target.complete.mar",
"product": "Firefox",

View File

@ -1,87 +0,0 @@
---
metadata:
name: "Beet Mover Manifest"
description: "Maps artifact locations to s3 key names for the en-US locale"
owner: "release@mozilla.com"
mapping:
{% for locale in locales %}
{{ locale }}:
buildinfo:
artifact: {{ artifact_base_url }}/target.json
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.json
mozinfo:
artifact: {{ artifact_base_url }}/target.mozinfo.json
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.mozinfo.json
socorroinfo:
artifact: {{ artifact_base_url }}/target.txt
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.txt
jsshell:
artifact: {{ artifact_base_url }}/target.jsshell.zip
s3_key: {{ s3_prefix }}jsshell/jsshell-{{ platform }}.zip
mozharness_package:
artifact: {{ artifact_base_url }}/mozharness.zip
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/mozharness.zip
xpi:
artifact: {{ artifact_base_url }}/target.langpack.xpi
s3_key: {{ s3_prefix }}{{ platform }}/xpi/{{ locale }}.xpi
symbols:
artifact: {{ artifact_base_url }}/target.crashreporter-symbols.zip
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.crashreporter-symbols.zip
{% if platform == "win32" %}
buildid_info:
artifact: {{ artifact_base_url }}/target_info.txt
s3_key: {{ s3_prefix }}win32_info.txt
mar_tools_mar:
artifact: {{ artifact_base_url }}/host/bin/mar.exe
s3_key: {{ s3_prefix }}mar-tools/win32/mar.exe
mar_tools_mbdiff:
artifact: {{ artifact_base_url }}/host/bin/mbsdiff.exe
s3_key: {{ s3_prefix }}mar-tools/win32/mbsdiff.exe
{% endif %}
{% if platform == "win64" %}
buildid_info:
artifact: {{ artifact_base_url }}/target_info.txt
s3_key: {{ s3_prefix }}win64_info.txt
mar_tools_mar:
artifact: {{ artifact_base_url }}/host/bin/mar.exe
s3_key: {{ s3_prefix }}mar-tools/win64/mar.exe
mar_tools_mbdiff:
artifact: {{ artifact_base_url }}/host/bin/mbsdiff.exe
s3_key: {{ s3_prefix }}mar-tools/win64/mbsdiff.exe
{% endif %}
{% if platform == "linux-i686" %}
buildid_info:
artifact: {{ artifact_base_url }}/target_info.txt
s3_key: {{ s3_prefix }}linux_info.txt
mar_tools_mar:
artifact: {{ artifact_base_url }}/host/bin/mar
s3_key: {{ s3_prefix }}mar-tools/linux/mar
mar_tools_mbdiff:
artifact: {{ artifact_base_url }}/host/bin/mbsdiff
s3_key: {{ s3_prefix }}mar-tools/linux/mbsdiff
{% endif %}
{% if platform == "linux-x86_64" %}
buildid_info:
artifact: {{ artifact_base_url }}/target_info.txt
s3_key: {{ s3_prefix }}linux64_info.txt
mar_tools_mar:
artifact: {{ artifact_base_url }}/host/bin/mar
s3_key: {{ s3_prefix }}mar-tools/linux64/mar
mar_tools_mbdiff:
artifact: {{ artifact_base_url }}/host/bin/mbsdiff
s3_key: {{ s3_prefix }}mar-tools/linux64/mbsdiff
{% endif %}
{% if platform == "mac" %}
buildid_info:
artifact: {{ artifact_base_url }}/target_info.txt
s3_key: {{ s3_prefix }}macosx64_info.txt
{% endif %}
{% endfor %}

View File

@ -1,16 +0,0 @@
---
metadata:
name: "Beet Mover Manifest"
description: "Maps artifact locations to s3 key names for the en-US locale"
owner: "release@mozilla.com"
mapping:
{% for locale in locales %}
{{ locale }}:
{% if platform == "mac" %}
package:
artifact: {{ artifact_base_url }}/target.dmg
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/Firefox {{ version }}.dmg
{% endif %}
{% endfor %}

View File

@ -1,35 +0,0 @@
---
metadata:
name: "Beet Mover Manifest"
description: "Maps artifact locations to s3 key names for the en-US locale"
owner: "release@mozilla.com"
mapping:
{% for locale in locales %}
{{ locale }}:
{% if platform == "win32" %}
complete_mar:
artifact: {{ artifact_base_url }}/target.complete.mar
s3_key: {{ s3_prefix }}update/{{ platform }}/{{ locale }}/firefox-{{ version }}.complete.mar
full_installer:
artifact: {{ artifact_base_url }}/target.installer.exe
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/Firefox Setup {{ version }}.exe
{% if "esr" not in version %}
stub_installer:
artifact: {{ artifact_base_url }}/target.stub-installer.exe
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/Firefox Installer.exe
{% endif %}
{% elif platform == "win64" %}
complete_mar:
artifact: {{ artifact_base_url }}/target.complete.mar
s3_key: {{ s3_prefix }}update/{{ platform }}/{{ locale }}/firefox-{{ version }}.complete.mar
full_installer:
artifact: {{ artifact_base_url }}/target.installer.exe
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/Firefox Setup {{ version }}.exe
{% else %}
complete_mar:
artifact: {{ artifact_base_url }}/target.complete.mar
s3_key: {{ s3_prefix }}update/{{ platform }}/{{ locale }}/firefox-{{ version }}.complete.mar
{% endif %}
{% endfor %}

View File

@ -1,38 +0,0 @@
---
metadata:
name: "Beet Mover Manifest"
description: "Maps artifact locations to s3 key names for the en-US locale"
owner: "release@mozilla.com"
mapping:
{% for locale in locales %}
{{ locale }}:
{% if platform == "win32" %}
package:
artifact: {{ artifact_base_url }}/target.zip
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.zip
{% endif %}
{% if platform == "win64" %}
package:
artifact: {{ artifact_base_url }}/target.zip
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.zip
{% endif %}
{% if platform == "linux-i686" %}
package:
artifact: {{ artifact_base_url }}/target.tar.bz2
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.tar.bz2
{% endif %}
{% if platform == "linux-x86_64" %}
package:
artifact: {{ artifact_base_url }}/target.tar.bz2
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.tar.bz2
{% endif %}
{% if platform == "mac" %}
# nothing to see here
{% endif %}
{% endfor %}

View File

@ -1,11 +0,0 @@
---
metadata:
name: "Beet Mover L10N Changesets"
description: "Maps artifact locations to s3 key names for L10N changesets"
owner: "release@mozilla.com"
mapping:
all:
l10n_changesets:
artifact: {{ artifact_base_url }}/l10n_changesets.txt
s3_key: {{ s3_prefix }}l10n_changesets.txt

View File

@ -1,16 +0,0 @@
---
metadata:
name: "Beet Mover Manifest"
description: "Maps artifact locations to s3 key names for partials"
owner: "release@mozilla.com"
mapping:
{% for locale in locales %}
{{ locale }}:
partial_mar:
artifact: {{ artifact_base_url }}/firefox-{{ partial_version }}-{{ version }}.{{ locale }}.{{ platform }}.partial.mar
s3_key: {{ s3_prefix }}update/{{ platform }}/{{ locale }}/firefox-{{ partial_version }}-{{ version }}.partial.mar
partial_mar_sig:
artifact: {{ artifact_base_url }}/firefox-{{ partial_version }}-{{ version }}.{{ locale }}.{{ platform }}.partial.mar.asc
s3_key: {{ s3_prefix }}update/{{ platform }}/{{ locale }}/firefox-{{ partial_version }}-{{ version }}.partial.mar.asc
{% endfor %}

View File

@ -1,16 +0,0 @@
---
metadata:
name: "Beet Mover Manifest"
description: "Maps artifact locations to s3 key names for recompressed completes"
owner: "release@mozilla.com"
mapping:
{% for locale in locales %}
{{ locale }}:
complete_mar:
artifact: {{ artifact_base_url }}/firefox-{{ version }}.{{ locale }}.{{ platform }}.bz2.complete.mar
s3_key: {{ s3_prefix }}update/{{ platform }}/{{ locale }}/firefox-{{ version }}.bz2.complete.mar
complete_mar_sig:
artifact: {{ artifact_base_url }}/firefox-{{ version }}.{{ locale }}.{{ platform }}.bz2.complete.mar.asc
s3_key: {{ s3_prefix }}update/{{ platform }}/{{ locale }}/firefox-{{ version }}.bz2.complete.mar.asc
{% endfor %}

View File

@ -1,65 +0,0 @@
---
metadata:
name: "Beet Mover Manifest"
description: "Maps artifact locations to s3 key names for the non en-US locales"
owner: "release@mozilla.com"
mapping:
{% for locale in locales %}
# common deliverables
{{ locale }}:
complete_mar:
artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.complete.mar
s3_key: {{ s3_prefix }}update/{{ platform }}/{{ locale }}/firefox-{{ version }}.complete.mar
checksum:
artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.checksums
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.checksums
checksum_sig:
artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.checksums.asc
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.checksums.asc
xpi:
artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.langpack.xpi
s3_key: {{ s3_prefix }}{{ platform }}/xpi/{{ locale }}.xpi
{% if platform == "win32" %}
full_installer:
artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.installer.exe
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/Firefox Setup {{ version }}.exe
{% if "esr" not in version %}
stub_installer:
artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.installer-stub.exe
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/Firefox Installer.exe
{% endif %}
package:
artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.zip
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.zip
{% endif %}
{% if platform == "win64" %}
full_installer:
artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.installer.exe
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/Firefox Setup {{ version }}.exe
package:
artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.zip
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.zip
{% endif %}
{% if platform == "linux-i686" %}
package:
artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.tar.bz2
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.tar.bz2
{% endif %}
{% if platform == "linux-x86_64" %}
package:
artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.tar.bz2
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.tar.bz2
{% endif %}
{% if platform == "mac" %}
package:
artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.dmg
s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/Firefox {{ version }}.dmg
{% endif %}
{% endfor %}

View File

@ -1,11 +0,0 @@
---
metadata:
name: "Beet Mover Manifest"
description: "Maps artifact locations to s3 key names for snap iamge"
owner: "release@mozilla.com"
mapping:
all:
snap:
artifact: {{ artifact_base_url }}/firefox-{{ version }}.snap
s3_key: {{ s3_prefix }}snap/firefox-{{ version }}.snap

View File

@ -1,14 +0,0 @@
---
metadata:
name: "Beet Mover Manifest"
description: "Maps artifact locations to s3 key names for snap checksums"
owner: "release@mozilla.com"
mapping:
all:
snap_checksum:
artifact: {{ artifact_base_url }}/firefox-{{ version }}.snap.checksums
s3_key: {{ s3_prefix }}snap/firefox-{{ version }}.snap.checksums
snap_checksum_asc:
artifact: {{ artifact_base_url }}/firefox-{{ version }}.snap.checksums.asc
s3_key: {{ s3_prefix }}snap/firefox-{{ version }}.snap.checksums.asc

View File

@ -1,14 +0,0 @@
---
metadata:
name: "Beet Mover Manifest"
description: "Maps artifact locations to s3 key names for source bundles"
owner: "release@mozilla.com"
mapping:
all:
source_bundle:
artifact: {{ artifact_base_url }}/firefox-{{ version }}.bundle
s3_key: {{ s3_prefix }}source/firefox-{{ version }}.bundle
source_tar:
artifact: {{ artifact_base_url }}/firefox-{{ version }}.source.tar.xz
s3_key: {{ s3_prefix }}source/firefox-{{ version }}.source.tar.xz

View File

@ -1,14 +0,0 @@
---
metadata:
name: "Beet Mover Manifest"
description: "Maps artifact locations to s3 key names for source bundle checksums"
owner: "release@mozilla.com"
mapping:
all:
source_checksum:
artifact: {{ artifact_base_url }}/firefox-{{ version }}.source.checksums
s3_key: {{ s3_prefix }}source/firefox-{{ version }}.source.checksums
source_checksum_asc:
artifact: {{ artifact_base_url }}/firefox-{{ version }}.source.checksums.asc
s3_key: {{ s3_prefix }}source/firefox-{{ version }}.source.checksums.asc

View File

@ -1,16 +0,0 @@
---
metadata:
name: "Beet Mover Manifest"
description: "Maps artifact locations to s3 key names for partials"
owner: "release@mozilla.com"
mapping:
{% for locale in locales %}
{{ locale }}:
partial_mar:
artifact: {{ artifact_base_url }}/firefox-{{ partial_version }}-{{ version }}.{{ locale }}.win32-to-win64.partial.mar
s3_key: {{ s3_prefix }}update/win32-to-win64/{{ locale }}/firefox-{{ partial_version }}-{{ version }}.partial.mar
partial_mar_sig:
artifact: {{ artifact_base_url }}/firefox-{{ partial_version }}-{{ version }}.{{ locale }}.win32-to-win64.partial.mar.asc
s3_key: {{ s3_prefix }}update/win32-to-win64/{{ locale }}/firefox-{{ partial_version }}-{{ version }}.partial.mar.asc
{% endfor %}

View File

@ -1,18 +0,0 @@
config = {
"log_name": "bump_beta",
"version_files": [{"file": "browser/config/version_display.txt"}],
"repo": {
"repo": "https://hg.mozilla.org/releases/mozilla-beta",
"branch": "default",
"dest": "mozilla-beta",
"vcs": "hg",
"clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
},
"vcs_share_base": "/builds/hg-shared",
"push_dest": "ssh://hg.mozilla.org/releases/mozilla-beta",
"ignore_no_changes": True,
"ssh_user": "ffxbld",
"ssh_key": "~/.ssh/ffxbld_rsa",
"ship_it_root": "https://ship-it.mozilla.org",
"ship_it_username": "ship_it-ffxbld",
}

View File

@ -1,22 +0,0 @@
config = {
"log_name": "bump_release",
"version_files": [
{"file": "browser/config/version.txt"},
{"file": "browser/config/version_display.txt"},
{"file": "config/milestone.txt"},
],
"repo": {
"repo": "https://hg.mozilla.org/releases/mozilla-release",
"branch": "default",
"dest": "mozilla-release",
"vcs": "hg",
"clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
},
"vcs_share_base": "/builds/hg-shared",
"push_dest": "ssh://hg.mozilla.org/releases/mozilla-release",
"ignore_no_changes": True,
"ssh_user": "ffxbld",
"ssh_key": "~/.ssh/ffxbld_rsa",
"ship_it_root": "https://ship-it.mozilla.org",
"ship_it_username": "ship_it-ffxbld",
}

View File

@ -1,18 +0,0 @@
config = {
"log_name": "bump_beta",
"version_files": [{"file": "browser/config/version_display.txt"}],
"repo": {
"repo": "https://hg.mozilla.org/releases/mozilla-beta",
"branch": "default",
"dest": "mozilla-beta",
"vcs": "hg",
"clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
},
"vcs_share_base": "/builds/hg-shared",
"push_dest": "ssh://hg.mozilla.org/releases/mozilla-beta",
"ignore_no_changes": True,
"ssh_user": "ffxbld",
"ssh_key": "~/.ssh/ffxbld_rsa",
"ship_it_root": "https://ship-it.mozilla.org",
"ship_it_username": "ship_it-ffxbld",
}

View File

@ -1,22 +0,0 @@
config = {
"log_name": "bump_esr52",
"version_files": [
{"file": "browser/config/version.txt"},
{"file": "browser/config/version_display.txt"},
{"file": "config/milestone.txt"},
],
"repo": {
"repo": "https://hg.mozilla.org/releases/mozilla-esr52",
"branch": "default",
"dest": "mozilla-esr52",
"vcs": "hg",
"clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
},
"vcs_share_base": "/builds/hg-shared",
"push_dest": "ssh://hg.mozilla.org/releases/mozilla-esr52",
"ignore_no_changes": True,
"ssh_user": "ffxbld",
"ssh_key": "~/.ssh/ffxbld_rsa",
"ship_it_root": "https://ship-it.mozilla.org",
"ship_it_username": "ship_it-ffxbld",
}

View File

@ -1,22 +0,0 @@
config = {
"log_name": "bump_release",
"version_files": [
{"file": "browser/config/version.txt"},
{"file": "browser/config/version_display.txt"},
{"file": "config/milestone.txt"},
],
"repo": {
"repo": "https://hg.mozilla.org/releases/mozilla-release",
"branch": "default",
"dest": "mozilla-release",
"vcs": "hg",
"clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
},
"vcs_share_base": "/builds/hg-shared",
"push_dest": "ssh://hg.mozilla.org/releases/mozilla-release",
"ignore_no_changes": True,
"ssh_user": "ffxbld",
"ssh_key": "~/.ssh/ffxbld_rsa",
"ship_it_root": "https://ship-it.mozilla.org",
"ship_it_username": "ship_it-ffxbld",
}

View File

@ -1,378 +0,0 @@
#!/usr/bin/env python
# ***** BEGIN LICENSE BLOCK *****
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
# ***** END LICENSE BLOCK *****
"""beet_mover.py.
downloads artifacts, scans them and uploads them to s3
"""
import hashlib
import sys
import os
import pprint
import re
from os import listdir
from os.path import isfile, join
import sh
import redo
sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
from mozharness.base.log import FATAL
from mozharness.base.python import VirtualenvMixin
from mozharness.base.script import BaseScript
from mozharness.mozilla.aws import pop_aws_auth_from_env
import mozharness
import mimetypes
def get_hash(content, hash_type="md5"):
h = hashlib.new(hash_type)
h.update(content)
return h.hexdigest()
CONFIG_OPTIONS = [
[["--template"], {
"dest": "template",
"help": "Specify jinja2 template file",
}],
[['--locale', ], {
"action": "extend",
"dest": "locales",
"type": "string",
"help": "Specify the locale(s) to upload."}],
[["--platform"], {
"dest": "platform",
"help": "Specify the platform of the build",
}],
[["--version"], {
"dest": "version",
"help": "full release version based on gecko and tag/stage identifier. e.g. '44.0b1'"
}],
[["--app-version"], {
"dest": "app_version",
"help": "numbered version based on gecko. e.g. '44.0'"
}],
[["--partial-version"], {
"dest": "partial_version",
"help": "the partial version the mar is based off of"
}],
[["--artifact-subdir"], {
"dest": "artifact_subdir",
"default": 'build',
"help": "subdir location for taskcluster artifacts after public/ base.",
}],
[["--build-num"], {
"dest": "build_num",
"help": "the release build identifier"
}],
[["--taskid"], {
"dest": "taskid",
"help": "taskcluster task id to download artifacts from",
}],
[["--bucket"], {
"dest": "bucket",
"help": "s3 bucket to move beets to.",
}],
[["--product"], {
"dest": "product",
"help": "product for which artifacts are beetmoved",
}],
[["--exclude"], {
"dest": "excludes",
"action": "append",
"help": "List of filename patterns to exclude. See script source for default",
}],
[["-s", "--scan-parallelization"], {
"dest": "scan_parallelization",
"default": 4,
"type": "int",
"help": "Number of concurrent file scans",
}],
]
DEFAULT_EXCLUDES = [
r"^.*tests.*$",
r"^.*crashreporter.*$",
r"^.*\.zip(\.asc)?$",
r"^.*\.log$",
r"^.*\.txt$",
r"^.*\.asc$",
r"^.*/partner-repacks.*$",
r"^.*.checksums(\.asc)?$",
r"^.*/logs/.*$",
r"^.*json$",
r"^.*/host.*$",
r"^.*/mar-tools/.*$",
r"^.*robocop.apk$",
r"^.*contrib.*"
]
CACHE_DIR = 'cache'
MIME_MAP = {
'': 'text/plain',
'.asc': 'text/plain',
'.beet': 'text/plain',
'.bundle': 'application/octet-stream',
'.bz2': 'application/octet-stream',
'.checksums': 'text/plain',
'.dmg': 'application/x-iso9660-image',
'.mar': 'application/octet-stream',
'.xpi': 'application/x-xpinstall'
}
HASH_FORMATS = ["sha512", "sha256"]
class BeetMover(BaseScript, VirtualenvMixin, object):
def __init__(self, aws_creds):
beetmover_kwargs = {
'config_options': CONFIG_OPTIONS,
'all_actions': [
# 'clobber',
'create-virtualenv',
'activate-virtualenv',
'generate-candidates-manifest',
'refresh-antivirus',
'verify-bits', # beets
'download-bits', # beets
'scan-bits', # beets
'upload-bits', # beets
],
'require_config_file': False,
# Default configuration
'config': {
# base index url where to find taskcluster artifact based on taskid
"artifact_base_url": \
'https://queue.taskcluster.net/v1/task/{taskid}/artifacts/public/{subdir}',
"virtualenv_modules": [
"boto",
"PyYAML",
"Jinja2",
"redo",
"cryptography==2.0.3",
"mar",
],
"virtualenv_path": "venv",
},
}
# todo do excludes need to be configured via command line for specific builds?
super(BeetMover, self).__init__(**beetmover_kwargs)
c = self.config
self.manifest = {}
# assigned in _post_create_virtualenv
self.virtualenv_imports = None
self.bucket = c['bucket']
if not all(aws_creds):
self.fatal('credentials must be passed in env: '
'"AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"')
self.aws_key_id, self.aws_secret_key = aws_creds
# if excludes is set from command line, use it otherwise use defaults
self.excludes = self.config.get('excludes', DEFAULT_EXCLUDES)
dirs = self.query_abs_dirs()
self.dest_dir = os.path.join(dirs['abs_work_dir'], CACHE_DIR)
self.mime_fix()
def activate_virtualenv(self):
"""
activates virtualenv and adds module imports to a instance wide namespace.
creating and activating a virtualenv onto the currently executing python interpreter is a
bit black magic. Rather than having import statements added in various places within the
script, we import them here immediately after we activate the newly created virtualenv
"""
VirtualenvMixin.activate_virtualenv(self)
import boto
import yaml
import jinja2
self.virtualenv_imports = {
'boto': boto,
'yaml': yaml,
'jinja2': jinja2,
}
self.log("activated virtualenv with the modules: {}".format(str(self.virtualenv_imports)))
def _get_template_vars(self):
return {
"platform": self.config['platform'],
"locales": self.config.get('locales'),
"version": self.config['version'],
"app_version": self.config.get('app_version', ''),
"partial_version": self.config.get('partial_version', ''),
"build_num": self.config['build_num'],
# keep the trailing slash
"s3_prefix": 'pub/{prod}/candidates/{ver}-candidates/{n}/'.format(
prod=self.config['product'], ver=self.config['version'],
n=self.config['build_num']
),
"artifact_base_url": self.config['artifact_base_url'].format(
taskid=self.config['taskid'], subdir=self.config['artifact_subdir']
)
}
def generate_candidates_manifest(self):
"""
generates and outputs a manifest that maps expected Taskcluster artifact names
to release deliverable names
"""
self.log('generating manifest from {}...'.format(self.config['template']))
template_dir, template_file = os.path.split(os.path.abspath(self.config['template']))
jinja2 = self.virtualenv_imports['jinja2']
yaml = self.virtualenv_imports['yaml']
jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir),
undefined=jinja2.StrictUndefined)
template = jinja_env.get_template(template_file)
self.manifest = yaml.safe_load(template.render(**self._get_template_vars()))
self.log("manifest generated:")
self.log(pprint.pformat(self.manifest['mapping']))
def verify_bits(self):
"""
inspects each artifact and verifies that they were created by trustworthy tasks
"""
# TODO
self.log('skipping verification. unimplemented...')
def refresh_antivirus(self):
self.info("Refreshing clamav db...")
try:
redo.retry(lambda:
sh.freshclam("--stdout", "--verbose", _timeout=300,
_err_to_out=True))
self.info("Done.")
except sh.ErrorReturnCode:
self.warning("Freshclam failed, skipping DB update")
def download_bits(self):
"""
downloads list of artifacts to self.dest_dir dir based on a given manifest
"""
self.log('downloading and uploading artifacts to self_dest_dir...')
dirs = self.query_abs_dirs()
for locale in self.manifest['mapping']:
for deliverable in self.manifest['mapping'][locale]:
self.log("downloading '{}' deliverable for '{}' locale".format(deliverable,
locale))
source = self.manifest['mapping'][locale][deliverable]['artifact']
self.retry(
self.download_file,
args=[source],
kwargs={'parent_dir': dirs['abs_work_dir']},
error_level=FATAL)
self.log('Success!')
def _strip_prefix(self, s3_key):
"""Return file name relative to prefix"""
# "abc/def/hfg".split("abc/de")[-1] == "f/hfg"
return s3_key.split(self._get_template_vars()["s3_prefix"])[-1]
def upload_bits(self):
"""
uploads list of artifacts to s3 candidates dir based on a given manifest
"""
self.log('uploading artifacts to s3...')
dirs = self.query_abs_dirs()
# connect to s3
boto = self.virtualenv_imports['boto']
conn = boto.connect_s3(self.aws_key_id, self.aws_secret_key)
bucket = conn.get_bucket(self.bucket)
for locale in self.manifest['mapping']:
for deliverable in self.manifest['mapping'][locale]:
self.log("uploading '{}' deliverable for '{}' locale".format(deliverable, locale))
# we have already downloaded the files locally so we can use that version
source = self.manifest['mapping'][locale][deliverable]['artifact']
s3_key = self.manifest['mapping'][locale][deliverable]['s3_key']
downloaded_file = os.path.join(dirs['abs_work_dir'],
self.get_filename_from_url(source))
# generate checksums for every uploaded file
beet_file_name = '{}.beet'.format(downloaded_file)
# upload checksums to a separate subdirectory
beet_dest = '{prefix}beetmover-checksums/{f}.beet'.format(
prefix=self._get_template_vars()["s3_prefix"],
f=self._strip_prefix(s3_key)
)
beet_contents = '\n'.join([
'{hash} {fmt} {size} {name}'.format(
hash=self.get_hash_for_file(downloaded_file, hash_type=fmt),
fmt=fmt,
size=os.path.getsize(downloaded_file),
name=self._strip_prefix(s3_key)) for fmt in HASH_FORMATS
])
self.write_to_file(beet_file_name, beet_contents)
self.upload_bit(source=downloaded_file, s3_key=s3_key,
bucket=bucket)
self.upload_bit(source=beet_file_name, s3_key=beet_dest,
bucket=bucket)
self.log('Success!')
def upload_bit(self, source, s3_key, bucket):
boto = self.virtualenv_imports['boto']
self.info('uploading to s3 with key: {}'.format(s3_key))
key = boto.s3.key.Key(bucket) # create new key
key.key = s3_key # set key name
self.info("Checking if `{}` already exists".format(s3_key))
key = bucket.get_key(s3_key)
if not key:
self.info("Uploading to `{}`".format(s3_key))
key = bucket.new_key(s3_key)
# set key value
mime_type, _ = mimetypes.guess_type(source)
self.retry(lambda: key.set_contents_from_filename(
source, headers={'Content-Type': mime_type}), error_level=FATAL),
else:
if not get_hash(key.get_contents_as_string()) == get_hash(open(source).read()):
# for now, let's halt. If necessary, we can revisit this and allow for overwrites
# to the same buildnum release with different bits
self.fatal("`{}` already exists with different checksum.".format(s3_key))
self.log("`{}` has the same MD5 checksum, not uploading".format(s3_key))
def scan_bits(self):
dirs = self.query_abs_dirs()
filenames = [f for f in listdir(dirs['abs_work_dir'])
if isfile(join(dirs['abs_work_dir'], f))]
self.mkdir_p(self.dest_dir)
for file_name in filenames:
if self._matches_exclude(file_name):
self.info("Excluding {} from virus scan".format(file_name))
else:
self.info('Copying {} to {}'.format(file_name, self.dest_dir))
self.copyfile(os.path.join(dirs['abs_work_dir'], file_name),
os.path.join(self.dest_dir, file_name))
self._scan_files()
self.info('Emptying {}'.format(self.dest_dir))
self.rmtree(self.dest_dir)
def _scan_files(self):
"""Scan the files we've collected. We do the download and scan concurrently to make
it easier to have a coherent log afterwards. Uses the venv python."""
external_tools_path = os.path.join(os.path.abspath(os.path.dirname(
os.path.dirname(mozharness.__file__))), 'external_tools')
self.run_command([self.query_python_path(), os.path.join(external_tools_path,
'extract_and_run_command.py'),
'-j{}'.format(self.config['scan_parallelization']),
'clamscan', '--no-summary', '--', self.dest_dir])
def _matches_exclude(self, keyname):
return any(re.search(exclude, keyname) for exclude in self.excludes)
def mime_fix(self):
""" Add mimetypes for custom extensions """
mimetypes.init()
map(lambda (ext, mime_type,): mimetypes.add_type(mime_type, ext), MIME_MAP.items())
if __name__ == '__main__':
beet_mover = BeetMover(pop_aws_auth_from_env())
beet_mover.run_and_exit()

View File

@ -1,107 +0,0 @@
#!/usr/bin/env python
# lint_ignore=E501
# ***** BEGIN LICENSE BLOCK *****
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
# ***** END LICENSE BLOCK *****
""" postrelease_bouncer_aliases.py
A script to replace the old-fashion way of updating the bouncer aliaes through
tools script.
"""
import os
import sys
sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
from mozharness.base.script import BaseScript
from mozharness.mozilla.buildbot import BuildbotMixin
# PostReleaseBouncerAliases {{{1
class PostReleaseBouncerAliases(BaseScript, VirtualenvMixin, BuildbotMixin):
config_options = virtualenv_config_options
def __init__(self, require_config_file=True):
super(PostReleaseBouncerAliases, self).__init__(
config_options=self.config_options,
require_config_file=require_config_file,
config={
"virtualenv_modules": [
"redo",
"requests",
],
"virtualenv_path": "venv",
'credentials_file': 'oauth.txt',
'buildbot_json_path': 'buildprops.json',
},
all_actions=[
"create-virtualenv",
"activate-virtualenv",
"update-bouncer-aliases",
],
default_actions=[
"create-virtualenv",
"activate-virtualenv",
"update-bouncer-aliases",
],
)
def _pre_config_lock(self, rw_config):
super(PostReleaseBouncerAliases, self)._pre_config_lock(rw_config)
# override properties from buildbot properties here as defined by
# taskcluster properties
self.read_buildbot_config()
if not self.buildbot_config:
self.warning("Skipping buildbot properties overrides")
return
props = self.buildbot_config["properties"]
for prop in ['tuxedo_server_url', 'version']:
if props.get(prop):
self.info("Overriding %s with %s" % (prop, props[prop]))
self.config[prop] = props.get(prop)
else:
self.warning("%s could not be found within buildprops" % prop)
return
def _update_bouncer_alias(self, tuxedo_server_url, auth,
related_product, alias):
from redo import retry
import requests
url = "%s/create_update_alias" % tuxedo_server_url
data = {"alias": alias, "related_product": related_product}
self.log("Updating {} to point to {} using {}".format(alias,
related_product,
url))
# Wrap the real call to hide credentials from retry's logging
def do_update_bouncer_alias():
r = requests.post(url, data=data, auth=auth,
verify=False, timeout=60)
r.raise_for_status()
retry(do_update_bouncer_alias)
def update_bouncer_aliases(self):
tuxedo_server_url = self.config['tuxedo_server_url']
credentials_file = os.path.join(os.getcwd(),
self.config['credentials_file'])
credentials = {}
execfile(credentials_file, credentials)
auth = (credentials['tuxedoUsername'], credentials['tuxedoPassword'])
version = self.config['version']
for product, info in self.config["products"].iteritems():
if "alias" in info:
product_template = info["product-name"]
related_product = product_template % {"version": version}
self._update_bouncer_alias(tuxedo_server_url, auth,
related_product, info["alias"])
# __main__ {{{1
if __name__ == '__main__':
PostReleaseBouncerAliases().run_and_exit()

View File

@ -1,110 +0,0 @@
#!/usr/bin/env python
# lint_ignore=E501
# ***** BEGIN LICENSE BLOCK *****
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
# ***** END LICENSE BLOCK *****
""" postrelease_mark_as_shipped.py
A script to automate the manual way of updating a release as shipped in Ship-it
following its successful ship-to-the-door opertion.
"""
import os
import sys
from datetime import datetime
sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
from mozharness.base.script import BaseScript
from mozharness.mozilla.buildbot import BuildbotMixin
def build_release_name(product, version, buildno):
"""Function to reconstruct the name of the release based on product,
version and buildnumber
"""
return "{}-{}-build{}".format(product.capitalize(),
str(version), str(buildno))
class MarkReleaseAsShipped(BaseScript, VirtualenvMixin, BuildbotMixin):
config_options = virtualenv_config_options
def __init__(self, require_config_file=True):
super(MarkReleaseAsShipped, self).__init__(
config_options=self.config_options,
require_config_file=require_config_file,
config={
"virtualenv_modules": [
"shipitapi",
],
"virtualenv_path": "venv",
"credentials_file": "oauth.txt",
"buildbot_json_path": "buildprops.json",
"timeout": 60,
},
all_actions=[
"create-virtualenv",
"activate-virtualenv",
"mark-as-shipped",
],
default_actions=[
"create-virtualenv",
"activate-virtualenv",
"mark-as-shipped",
],
)
def _pre_config_lock(self, rw_config):
super(MarkReleaseAsShipped, self)._pre_config_lock(rw_config)
# override properties from buildbot properties here as defined by
# taskcluster properties
self.read_buildbot_config()
if not self.buildbot_config:
self.warning("Skipping buildbot properties overrides")
return
props = self.buildbot_config['properties']
mandatory_props = ['product', 'version', 'build_number']
missing_props = []
for prop in mandatory_props:
if prop in props:
self.info("Overriding %s with %s" % (prop, props[prop]))
self.config[prop] = props.get(prop)
else:
self.warning("%s could not be found within buildprops" % prop)
missing_props.append(prop)
if missing_props:
raise Exception("%s not found in configs" % missing_props)
self.config['name'] = build_release_name(self.config['product'],
self.config['version'],
self.config['build_number'])
def mark_as_shipped(self):
"""Method to make a simple call to Ship-it API to change a release
status to 'shipped'
"""
credentials_file = os.path.join(os.getcwd(),
self.config["credentials_file"])
credentials = {}
execfile(credentials_file, credentials)
ship_it_credentials = credentials["ship_it_credentials"]
auth = (self.config["ship_it_username"],
ship_it_credentials.get(self.config["ship_it_username"]))
api_root = self.config['ship_it_root']
from shipitapi import Release
release_api = Release(auth, api_root=api_root,
timeout=self.config['timeout'])
shipped_at = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
self.info("Mark the release as shipped with %s timestamp" % shipped_at)
release_api.update(self.config['name'],
status='shipped', shippedAt=shipped_at)
if __name__ == '__main__':
MarkReleaseAsShipped().run_and_exit()

View File

@ -1,231 +0,0 @@
#!/usr/bin/env python
# lint_ignore=E501
# ***** BEGIN LICENSE BLOCK *****
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
# ***** END LICENSE BLOCK *****
""" postrelease_version_bump.py
A script to increase in-tree version number after shipping a release.
"""
from distutils.version import StrictVersion
import os
import sys
sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
from mozharness.base.vcs.vcsbase import MercurialScript
from mozharness.base.vcs.mercurial import MercurialVCS
from mozharness.mozilla.buildbot import BuildbotMixin
from mozharness.mozilla.repo_manipulation import MercurialRepoManipulationMixin
# PostReleaseVersionBump {{{1
class PostReleaseVersionBump(MercurialScript, BuildbotMixin,
MercurialRepoManipulationMixin):
config_options = [
[['--hg-user', ], {
"action": "store",
"dest": "hg_user",
"type": "string",
"default": "ffxbld <release@mozilla.com>",
"help": "Specify what user to use to commit to hg.",
}],
[['--next-version', ], {
"action": "store",
"dest": "next_version",
"type": "string",
"help": "Next version used in version bump",
}],
[['--ssh-user', ], {
"action": "store",
"dest": "ssh_user",
"type": "string",
"help": "SSH username with hg.mozilla.org permissions",
}],
[['--ssh-key', ], {
"action": "store",
"dest": "ssh_key",
"type": "string",
"help": "Path to SSH key.",
}],
[['--product', ], {
"action": "store",
"dest": "product",
"type": "string",
"help": "Product name",
}],
[['--version', ], {
"action": "store",
"dest": "version",
"type": "string",
"help": "Version",
}],
[['--build-number', ], {
"action": "store",
"dest": "build_number",
"type": "string",
"help": "Build number",
}],
[['--revision', ], {
"action": "store",
"dest": "revision",
"type": "string",
"help": "HG revision to tag",
}],
]
def __init__(self, require_config_file=True):
super(PostReleaseVersionBump, self).__init__(
config_options=self.config_options,
all_actions=[
'clobber',
'clean-repos',
'pull',
'bump_postrelease',
'commit-changes',
'tag',
'push',
],
default_actions=[
'clean-repos',
'pull',
'bump_postrelease',
'commit-changes',
'tag',
'push',
],
config={
'buildbot_json_path': 'buildprops.json',
},
require_config_file=require_config_file
)
def _pre_config_lock(self, rw_config):
super(PostReleaseVersionBump, self)._pre_config_lock(rw_config)
# override properties from buildbot properties here as defined by
# taskcluster properties
self.read_buildbot_config()
if not self.buildbot_config:
self.warning("Skipping buildbot properties overrides")
else:
props = self.buildbot_config["properties"]
for prop in ['next_version', 'product', 'version', 'build_number',
'revision']:
if props.get(prop):
self.info("Overriding %s with %s" % (prop, props[prop]))
self.config[prop] = props.get(prop)
if not self.config.get("next_version"):
self.fatal("Next version has to be set. Use --next-version or "
"pass `next_version' via buildbot properties.")
def query_abs_dirs(self):
""" Allow for abs_from_dir and abs_to_dir
"""
if self.abs_dirs:
return self.abs_dirs
self.abs_dirs = super(PostReleaseVersionBump, self).query_abs_dirs()
self.abs_dirs["abs_gecko_dir"] = os.path.join(
self.abs_dirs['abs_work_dir'], self.config["repo"]["dest"])
return self.abs_dirs
def query_repos(self):
"""Build a list of repos to clone."""
return [self.config["repo"]]
def query_commit_dirs(self):
return [self.query_abs_dirs()["abs_gecko_dir"]]
def query_commit_message(self):
return "Automatic version bump. CLOSED TREE NO BUG a=release"
def query_push_dirs(self):
return self.query_commit_dirs()
def query_push_args(self, cwd):
# cwd is not used here
hg_ssh_opts = "ssh -l {user} -i {key}".format(
user=self.config["ssh_user"],
key=os.path.expanduser(self.config["ssh_key"])
)
return ["-e", hg_ssh_opts, "-r", "."]
def pull(self):
dirs = self.query_abs_dirs()
# bug 1417697 - clone default first, then pull to get the revision.
# This to deal with relbranches, which don't show up in mozilla-unified.
super(PostReleaseVersionBump, self).pull(
repos=self.query_repos())
vcs_obj = MercurialVCS(log_obj=self.log_obj, config=self.config)
vcs_obj.pull(
self.config['repo']['repo'],
dirs['abs_gecko_dir'],
update_dest=False,
revision=self.config['revision']
)
def bump_postrelease(self, *args, **kwargs):
"""Bump version"""
dirs = self.query_abs_dirs()
for f in self.config["version_files"]:
curr_version = ".".join(self.get_version(dirs['abs_gecko_dir'], f["file"]))
next_version = self.config['next_version']
if StrictVersion(next_version) < StrictVersion(curr_version):
self.warning("Version bumping skipped due to conflicting values")
continue
elif StrictVersion(next_version) == StrictVersion(curr_version):
self.info("Version bumping skipped due to unchanged values")
continue
else:
self.replace(os.path.join(dirs['abs_gecko_dir'], f["file"]),
curr_version, self.config["next_version"])
def check_tags(self, tag_names):
dirs = self.query_abs_dirs()
existing_tags = self.query_existing_tags(cwd=dirs['abs_gecko_dir'])
tags = []
for tag in tag_names:
if tag in existing_tags:
if self.config['revision'] == existing_tags[tag]:
self.info(
"Tag {} already exists on revision {}. Skipping...".format(
tag, self.config['revision']
)
)
continue
else:
self.warning(
"Tag {} exists on mismatched revision {}! Retagging...".format(
tag, existing_tags[tag]
)
)
tags.append(tag)
return tags
def tag(self):
dirs = self.query_abs_dirs()
tags = ["{product}_{version}_BUILD{build_number}",
"{product}_{version}_RELEASE"]
tags = [t.format(product=self.config["product"].upper(),
version=self.config["version"].replace(".", "_"),
build_number=self.config["build_number"])
for t in tags]
tags = self.check_tags(tags)
if not tags:
self.info("No unique tags to add; skipping tagging.")
return
message = "No bug - Tagging {revision} with {tags} a=release CLOSED TREE"
message = message.format(
revision=self.config["revision"],
tags=', '.join(tags))
self.hg_tag(cwd=dirs["abs_gecko_dir"], tags=tags,
revision=self.config["revision"], message=message,
user=self.config["hg_user"], force=True)
# __main__ {{{1
if __name__ == '__main__':
PostReleaseVersionBump().run_and_exit()

View File

@ -20,6 +20,7 @@ ChromeUtils.import("resource://gre/modules/PrivateBrowsingUtils.jsm");
ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm");
ChromeUtils.import("resource://gre/modules/NetUtil.jsm");
ChromeUtils.import("resource://gre/modules/AppConstants.jsm");
ChromeUtils.import("resource://gre/modules/ServiceWorkerCleanUp.jsm");
// We're loaded with "this" not set to the global in some cases, so we
// have to play some games to get at the global object here. Normally
@ -1961,11 +1962,11 @@ SpecialPowersAPI.prototype = {
},
removeAllServiceWorkerData() {
this.notifyObserversInParentProcess(null, "browser:purge-session-history", "");
return wrapIfUnwrapped(ServiceWorkerCleanUp.removeAll());
},
removeServiceWorkerDataForExampleDomain() {
this.notifyObserversInParentProcess(null, "browser:purge-domain-data", "example.com");
return wrapIfUnwrapped(ServiceWorkerCleanUp.removeFromHost("example.com"));
},
cleanUpSTSData(origin, flags) {

View File

@ -1,43 +0,0 @@
[shadow-cascade-order-001.html]
[D1. document vs ::slotted both with !important, ::slotted rule should win for open mode.]
expected: FAIL
[D2. document vs :host both with !important, :host rule should win for open mode.]
expected: FAIL
[D4. ::slotted vs :host both with !important, later in tree-of-trees rule should win for open mode.]
expected: FAIL
[D5. ::slotted vs inline both with !important, ::slotted rule should win for open mode.]
expected: FAIL
[D6. :host vs inline both with !important, :host rule should win for open mode.]
expected: FAIL
[E2. all styles with !important applied, rule in the last tree-of-trees should win for open mode.]
expected: FAIL
[F6. all rules with !important, the last rule in tree-of-trees should win for open mode.]
expected: FAIL
[D1. document vs ::slotted both with !important, ::slotted rule should win for closed mode.]
expected: FAIL
[D2. document vs :host both with !important, :host rule should win for closed mode.]
expected: FAIL
[D4. ::slotted vs :host both with !important, later in tree-of-trees rule should win for closed mode.]
expected: FAIL
[D5. ::slotted vs inline both with !important, ::slotted rule should win for closed mode.]
expected: FAIL
[D6. :host vs inline both with !important, :host rule should win for closed mode.]
expected: FAIL
[E2. all styles with !important applied, rule in the last tree-of-trees should win for closed mode.]
expected: FAIL
[F6. all rules with !important, the last rule in tree-of-trees should win for closed mode.]
expected: FAIL

View File

@ -219,7 +219,7 @@
oncommand="close();"/>
</content>
<implementation implements="nsIEditActionListener">
<implementation>
<!-- Please keep in sync with toolkit/content/browser-content.js -->
<field name="FIND_NORMAL">0</field>
<field name="FIND_TYPEAHEAD">1</field>

View File

@ -11,6 +11,8 @@ ChromeUtils.defineModuleGetter(this, "PlacesUtils",
"resource://gre/modules/PlacesUtils.jsm");
ChromeUtils.defineModuleGetter(this, "Downloads",
"resource://gre/modules/Downloads.jsm");
ChromeUtils.defineModuleGetter(this, "ServiceWorkerCleanUp",
"resource://gre/modules/ServiceWorkerCleanUp.jsm");
var EXPORTED_SYMBOLS = ["ForgetAboutSite"];
@ -142,7 +144,11 @@ var ForgetAboutSite = {
}));
}
// Offline Storages
// ServiceWorkers
await ServiceWorkerCleanUp.removeFromHost("http://" + aDomain);
await ServiceWorkerCleanUp.removeFromHost("https://" + aDomain);
// Offline Storages. This must run after the ServiceWorkers promises.
promises.push((async function() {
// delete data from both HTTP and HTTPS sites
let httpURI = NetUtil.newURI("http://" + aDomain);

View File

@ -0,0 +1,60 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm");
XPCOMUtils.defineLazyServiceGetter(this, "serviceWorkerManager",
"@mozilla.org/serviceworkers/manager;1",
"nsIServiceWorkerManager");
this.EXPORTED_SYMBOLS = ["ServiceWorkerCleanUp"];
function unregisterServiceWorker(aSW) {
return new Promise(resolve => {
let unregisterCallback = {
unregisterSucceeded: resolve,
unregisterFailed: resolve, // We don't care about failures.
QueryInterface: XPCOMUtils.generateQI([Ci.nsIServiceWorkerUnregisterCallback])
};
serviceWorkerManager.propagateUnregister(aSW.principal, unregisterCallback, aSW.scope);
});
}
this.ServiceWorkerCleanUp = {
removeFromHost(aHost) {
let promises = [];
let serviceWorkers = serviceWorkerManager.getAllRegistrations();
for (let i = 0; i < serviceWorkers.length; i++) {
let sw = serviceWorkers.queryElementAt(i, Ci.nsIServiceWorkerRegistrationInfo);
if (sw.principal.URI.host == aHost) {
promises.push(unregisterServiceWorker(sw));
}
}
return Promise.all(promises);
},
removeFromPrincipal(aPrincipal) {
let promises = [];
let serviceWorkers = serviceWorkerManager.getAllRegistrations();
for (let i = 0; i < serviceWorkers.length; i++) {
let sw = serviceWorkers.queryElementAt(i, Ci.nsIServiceWorkerRegistrationInfo);
if (sw.principal.equals(aPrincipal)) {
promises.push(unregisterServiceWorker(sw));
}
}
return Promise.all(promises);
},
removeAll() {
let promises = [];
let serviceWorkers = serviceWorkerManager.getAllRegistrations();
for (let i = 0; i < serviceWorkers.length; i++) {
let sw = serviceWorkers.queryElementAt(i, Ci.nsIServiceWorkerRegistrationInfo);
promises.push(unregisterServiceWorker(sw));
}
return Promise.all(promises);
},
};

View File

@ -9,6 +9,7 @@ XPCSHELL_TESTS_MANIFESTS += ['test/unit/xpcshell.ini']
EXTRA_JS_MODULES += [
'ForgetAboutSite.jsm',
'ServiceWorkerCleanUp.jsm',
]
with Files('**'):

View File

@ -185,7 +185,7 @@ already_AddRefed<nsIRunnable>
PrioritizedEventQueue<InnerQueueT>::GetEvent(EventPriority* aPriority,
const MutexAutoLock& aProofOfLock)
{
MakeScopeExit([&] {
auto guard = MakeScopeExit([&] {
mHasPendingEventsPromisedIdleEvent = false;
});