mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-11-24 13:21:05 +00:00
Merge m-c to inbound, a=merge
MozReview-Commit-ID: Cb0b59wJ0vy
This commit is contained in:
commit
4a05296446
@ -18,6 +18,8 @@ module.exports = {
|
|||||||
|
|
||||||
// No (!foo in bar) or (!object instanceof Class)
|
// No (!foo in bar) or (!object instanceof Class)
|
||||||
"no-unsafe-negation": "error",
|
"no-unsafe-negation": "error",
|
||||||
|
// No eval() and no strings in the first param of setTimeout or setInterval
|
||||||
|
"no-implied-eval": "error",
|
||||||
},
|
},
|
||||||
"env": {
|
"env": {
|
||||||
"es6": true
|
"es6": true
|
||||||
|
@ -522,6 +522,10 @@ toolbar:not(#TabsToolbar) > #personal-bookmarks {
|
|||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#PopupAutoComplete[firstresultstyle="insecureWarning"] {
|
||||||
|
min-width: 200px;
|
||||||
|
}
|
||||||
|
|
||||||
#PopupAutoComplete > richlistbox > richlistitem[originaltype="insecureWarning"] {
|
#PopupAutoComplete > richlistbox > richlistitem[originaltype="insecureWarning"] {
|
||||||
-moz-binding: url("chrome://global/content/bindings/autocomplete.xml#autocomplete-richlistitem-insecure-field");
|
-moz-binding: url("chrome://global/content/bindings/autocomplete.xml#autocomplete-richlistitem-insecure-field");
|
||||||
height: auto;
|
height: auto;
|
||||||
|
@ -1071,17 +1071,6 @@
|
|||||||
<deck id="content-deck" flex="1">
|
<deck id="content-deck" flex="1">
|
||||||
<hbox flex="1" id="browser">
|
<hbox flex="1" id="browser">
|
||||||
<vbox id="browser-border-start" hidden="true" layer="true"/>
|
<vbox id="browser-border-start" hidden="true" layer="true"/>
|
||||||
<vbox id="sidebar-box" hidden="true" class="chromeclass-extrachrome">
|
|
||||||
<sidebarheader id="sidebar-header" align="center">
|
|
||||||
<label id="sidebar-title" persist="value" flex="1" crop="end" control="sidebar"/>
|
|
||||||
<image id="sidebar-throbber"/>
|
|
||||||
<toolbarbutton class="close-icon tabbable" tooltiptext="&sidebarCloseButton.tooltip;" oncommand="SidebarUI.hide();"/>
|
|
||||||
</sidebarheader>
|
|
||||||
<browser id="sidebar" flex="1" autoscroll="false" disablehistory="true" disablefullscreen="true"
|
|
||||||
style="min-width: 14em; width: 18em; max-width: 36em;" tooltip="aHTMLTooltip"/>
|
|
||||||
</vbox>
|
|
||||||
|
|
||||||
<splitter id="sidebar-splitter" class="chromeclass-extrachrome sidebar-splitter" hidden="true"/>
|
|
||||||
<vbox id="appcontent" flex="1">
|
<vbox id="appcontent" flex="1">
|
||||||
<notificationbox id="high-priority-global-notificationbox" notificationside="top"/>
|
<notificationbox id="high-priority-global-notificationbox" notificationside="top"/>
|
||||||
<tabbrowser id="content"
|
<tabbrowser id="content"
|
||||||
@ -1092,6 +1081,16 @@
|
|||||||
selectmenulist="ContentSelectDropdown"
|
selectmenulist="ContentSelectDropdown"
|
||||||
datetimepicker="DateTimePickerPanel"/>
|
datetimepicker="DateTimePickerPanel"/>
|
||||||
</vbox>
|
</vbox>
|
||||||
|
<splitter id="sidebar-splitter" class="chromeclass-extrachrome sidebar-splitter" hidden="true"/>
|
||||||
|
<vbox id="sidebar-box" hidden="true" class="chromeclass-extrachrome">
|
||||||
|
<sidebarheader id="sidebar-header" align="center">
|
||||||
|
<label id="sidebar-title" persist="value" flex="1" crop="end" control="sidebar"/>
|
||||||
|
<image id="sidebar-throbber"/>
|
||||||
|
<toolbarbutton class="close-icon tabbable" tooltiptext="&sidebarCloseButton.tooltip;" oncommand="SidebarUI.hide();"/>
|
||||||
|
</sidebarheader>
|
||||||
|
<browser id="sidebar" flex="1" autoscroll="false" disablehistory="true" disablefullscreen="true"
|
||||||
|
style="min-width: 14em; width: 18em; max-width: 36em;" tooltip="aHTMLTooltip"/>
|
||||||
|
</vbox>
|
||||||
<vbox id="browser-border-end" hidden="true" layer="true"/>
|
<vbox id="browser-border-end" hidden="true" layer="true"/>
|
||||||
</hbox>
|
</hbox>
|
||||||
#include ../../components/customizableui/content/customizeMode.inc.xul
|
#include ../../components/customizableui/content/customizeMode.inc.xul
|
||||||
|
@ -145,25 +145,27 @@ add_task(function* () {
|
|||||||
let sidebar = document.getElementById("sidebar");
|
let sidebar = document.getElementById("sidebar");
|
||||||
|
|
||||||
let loadPromise = BrowserTestUtils.waitForEvent(sidebar, "load", true);
|
let loadPromise = BrowserTestUtils.waitForEvent(sidebar, "load", true);
|
||||||
|
let focusPromise = BrowserTestUtils.waitForEvent(sidebar, "focus", true);
|
||||||
SidebarUI.toggle("viewBookmarksSidebar");
|
SidebarUI.toggle("viewBookmarksSidebar");
|
||||||
yield loadPromise;
|
yield loadPromise;
|
||||||
|
yield focusPromise;
|
||||||
|
|
||||||
gURLBar.focus();
|
gURLBar.focus();
|
||||||
|
|
||||||
|
yield* expectFocusOnF6(false, "html1", "html1",
|
||||||
|
true, "focus with sidebar open content");
|
||||||
yield* expectFocusOnF6(false, "bookmarksPanel",
|
yield* expectFocusOnF6(false, "bookmarksPanel",
|
||||||
sidebar.contentDocument.getElementById("search-box").inputField,
|
sidebar.contentDocument.getElementById("search-box").inputField,
|
||||||
false, "focus with sidebar open sidebar");
|
false, "focus with sidebar open sidebar");
|
||||||
yield* expectFocusOnF6(false, "html1", "html1",
|
|
||||||
true, "focus with sidebar open content");
|
|
||||||
yield* expectFocusOnF6(false, "main-window", gURLBar.inputField,
|
yield* expectFocusOnF6(false, "main-window", gURLBar.inputField,
|
||||||
false, "focus with sidebar urlbar");
|
false, "focus with sidebar urlbar");
|
||||||
|
|
||||||
// Now go backwards
|
// Now go backwards
|
||||||
yield* expectFocusOnF6(true, "html1", "html1",
|
|
||||||
true, "back focus with sidebar open content");
|
|
||||||
yield* expectFocusOnF6(true, "bookmarksPanel",
|
yield* expectFocusOnF6(true, "bookmarksPanel",
|
||||||
sidebar.contentDocument.getElementById("search-box").inputField,
|
sidebar.contentDocument.getElementById("search-box").inputField,
|
||||||
false, "back focus with sidebar open sidebar");
|
false, "back focus with sidebar open sidebar");
|
||||||
|
yield* expectFocusOnF6(true, "html1", "html1",
|
||||||
|
true, "back focus with sidebar open content");
|
||||||
yield* expectFocusOnF6(true, "main-window", gURLBar.inputField,
|
yield* expectFocusOnF6(true, "main-window", gURLBar.inputField,
|
||||||
false, "back focus with sidebar urlbar");
|
false, "back focus with sidebar urlbar");
|
||||||
|
|
||||||
|
@ -159,11 +159,6 @@ var whitelist = new Set([
|
|||||||
platforms: ["linux"]},
|
platforms: ["linux"]},
|
||||||
{file: "chrome://global/skin/arrow/panelarrow-vertical.svg",
|
{file: "chrome://global/skin/arrow/panelarrow-vertical.svg",
|
||||||
platforms: ["linux"]},
|
platforms: ["linux"]},
|
||||||
// Bug 1348359
|
|
||||||
{file: "chrome://global/skin/dirListing/folder.png", platforms: ["linux"]},
|
|
||||||
{file: "chrome://global/skin/dirListing/local.png", platforms: ["linux", "win"]},
|
|
||||||
{file: "chrome://global/skin/dirListing/remote.png"},
|
|
||||||
{file: "chrome://global/skin/dirListing/up.png", platforms: ["linux"]},
|
|
||||||
// Bug 1348362
|
// Bug 1348362
|
||||||
{file: "chrome://global/skin/icons/Close.gif", platforms: ["win"]},
|
{file: "chrome://global/skin/icons/Close.gif", platforms: ["win"]},
|
||||||
{file: "chrome://global/skin/icons/Error.png", platforms: ["linux", "macosx"]},
|
{file: "chrome://global/skin/icons/Error.png", platforms: ["linux", "macosx"]},
|
||||||
|
@ -124,6 +124,12 @@ file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
|||||||
this.inputField.removeEventListener("mouseout", this);
|
this.inputField.removeEventListener("mouseout", this);
|
||||||
this.inputField.removeEventListener("overflow", this);
|
this.inputField.removeEventListener("overflow", this);
|
||||||
this.inputField.removeEventListener("underflow", this);
|
this.inputField.removeEventListener("underflow", this);
|
||||||
|
|
||||||
|
// Null out the one-offs' popup and textbox so that it cleans up its
|
||||||
|
// internal state for both. Most importantly, it removes the event
|
||||||
|
// listeners that it added to both.
|
||||||
|
this.popup.oneOffSearchButtons.popup = null;
|
||||||
|
this.popup.oneOffSearchButtons.textbox = null;
|
||||||
]]></destructor>
|
]]></destructor>
|
||||||
|
|
||||||
<field name="_value">""</field>
|
<field name="_value">""</field>
|
||||||
|
@ -100,6 +100,11 @@ add_task(function* () {
|
|||||||
yield expectEvent("on-input-started-fired");
|
yield expectEvent("on-input-started-fired");
|
||||||
EventUtils.synthesizeKey("t", {});
|
EventUtils.synthesizeKey("t", {});
|
||||||
yield expectEvent("on-input-changed-fired", {text: "t"});
|
yield expectEvent("on-input-changed-fired", {text: "t"});
|
||||||
|
// Wait for the autocomplete search. Note that we cannot wait for the search
|
||||||
|
// to be complete, since the add-on doesn't communicate when it's done, so
|
||||||
|
// just check matches count.
|
||||||
|
yield BrowserTestUtils.waitForCondition(() => gURLBar.controller.matchCount >= 2,
|
||||||
|
"waiting urlbar search to complete");
|
||||||
return "t";
|
return "t";
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -267,7 +272,6 @@ add_task(function* () {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Start monitoring the console.
|
// Start monitoring the console.
|
||||||
SimpleTest.waitForExplicitFinish();
|
|
||||||
let waitForConsole = new Promise(resolve => {
|
let waitForConsole = new Promise(resolve => {
|
||||||
SimpleTest.monitorConsole(resolve, [{
|
SimpleTest.monitorConsole(resolve, [{
|
||||||
message: new RegExp(`The keyword provided is already registered: "${keyword}"`),
|
message: new RegExp(`The keyword provided is already registered: "${keyword}"`),
|
||||||
|
@ -19,7 +19,9 @@ var gLanguagesDialog = {
|
|||||||
// see bug 1194346.
|
// see bug 1194346.
|
||||||
forceReflow() {
|
forceReflow() {
|
||||||
this._activeLanguages.style.fontKerning = "none";
|
this._activeLanguages.style.fontKerning = "none";
|
||||||
setTimeout("gLanguagesDialog._activeLanguages.style.removeProperty('font-kerning')", 0);
|
setTimeout(() => {
|
||||||
|
this._activeLanguages.style.removeProperty("font-kerning")
|
||||||
|
}, 0);
|
||||||
},
|
},
|
||||||
|
|
||||||
get _activeLanguages() {
|
get _activeLanguages() {
|
||||||
|
@ -1243,10 +1243,6 @@
|
|||||||
return this._popup;
|
return this._popup;
|
||||||
]]></getter>
|
]]></getter>
|
||||||
<setter><![CDATA[
|
<setter><![CDATA[
|
||||||
if (this._popup == val) {
|
|
||||||
return val;
|
|
||||||
}
|
|
||||||
|
|
||||||
let events = [
|
let events = [
|
||||||
"popupshowing",
|
"popupshowing",
|
||||||
"popuphidden",
|
"popuphidden",
|
||||||
@ -1285,9 +1281,6 @@
|
|||||||
return this._textbox;
|
return this._textbox;
|
||||||
]]></getter>
|
]]></getter>
|
||||||
<setter><![CDATA[
|
<setter><![CDATA[
|
||||||
if (this._textbox == val) {
|
|
||||||
return val;
|
|
||||||
}
|
|
||||||
if (this._textbox) {
|
if (this._textbox) {
|
||||||
this._textbox.removeEventListener("input", this);
|
this._textbox.removeEventListener("input", this);
|
||||||
}
|
}
|
||||||
|
@ -594,7 +594,7 @@ Experiments.Experiments.prototype = {
|
|||||||
}),
|
}),
|
||||||
|
|
||||||
_telemetryStatusChanged() {
|
_telemetryStatusChanged() {
|
||||||
this._toggleExperimentsEnabled(gExperimentsEnabled);
|
this._toggleExperimentsEnabled(gPrefs.get(PREF_ENABLED, false));
|
||||||
},
|
},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -16,30 +16,19 @@ XPCOMUtils.defineLazyModuleGetter(this, "OS",
|
|||||||
"resource://gre/modules/osfile.jsm");
|
"resource://gre/modules/osfile.jsm");
|
||||||
XPCOMUtils.defineLazyModuleGetter(this, "CommonUtils",
|
XPCOMUtils.defineLazyModuleGetter(this, "CommonUtils",
|
||||||
"resource://services-common/utils.js");
|
"resource://services-common/utils.js");
|
||||||
|
XPCOMUtils.defineLazyModuleGetter(this, "TelemetryUtils",
|
||||||
|
"resource://gre/modules/TelemetryUtils.jsm");
|
||||||
|
|
||||||
|
|
||||||
const PREF_EXPERIMENTS_ENABLED = "experiments.enabled";
|
const PREF_EXPERIMENTS_ENABLED = "experiments.enabled";
|
||||||
const PREF_ACTIVE_EXPERIMENT = "experiments.activeExperiment"; // whether we have an active experiment
|
const PREF_ACTIVE_EXPERIMENT = "experiments.activeExperiment"; // whether we have an active experiment
|
||||||
const PREF_TELEMETRY_ENABLED = "toolkit.telemetry.enabled";
|
|
||||||
const PREF_TELEMETRY_UNIFIED = "toolkit.telemetry.unified";
|
|
||||||
const DELAY_INIT_MS = 30 * 1000;
|
const DELAY_INIT_MS = 30 * 1000;
|
||||||
|
|
||||||
// Whether the FHR/Telemetry unification features are enabled.
|
|
||||||
// Changing this pref requires a restart.
|
|
||||||
const IS_UNIFIED_TELEMETRY = Preferences.get(PREF_TELEMETRY_UNIFIED, false);
|
|
||||||
|
|
||||||
XPCOMUtils.defineLazyGetter(
|
XPCOMUtils.defineLazyGetter(
|
||||||
this, "gPrefs", () => {
|
this, "gPrefs", () => {
|
||||||
return new Preferences();
|
return new Preferences();
|
||||||
});
|
});
|
||||||
|
|
||||||
XPCOMUtils.defineLazyGetter(
|
|
||||||
this, "gExperimentsEnabled", () => {
|
|
||||||
// We can enable experiments if either unified Telemetry or FHR is on, and the user
|
|
||||||
// has opted into Telemetry.
|
|
||||||
return gPrefs.get(PREF_EXPERIMENTS_ENABLED, false) &&
|
|
||||||
IS_UNIFIED_TELEMETRY && gPrefs.get(PREF_TELEMETRY_ENABLED, false);
|
|
||||||
});
|
|
||||||
|
|
||||||
XPCOMUtils.defineLazyGetter(
|
XPCOMUtils.defineLazyGetter(
|
||||||
this, "gActiveExperiment", () => {
|
this, "gActiveExperiment", () => {
|
||||||
return gPrefs.get(PREF_ACTIVE_EXPERIMENT);
|
return gPrefs.get(PREF_ACTIVE_EXPERIMENT);
|
||||||
@ -54,8 +43,15 @@ ExperimentsService.prototype = {
|
|||||||
classID: Components.ID("{f7800463-3b97-47f9-9341-b7617e6d8d49}"),
|
classID: Components.ID("{f7800463-3b97-47f9-9341-b7617e6d8d49}"),
|
||||||
QueryInterface: XPCOMUtils.generateQI([Ci.nsITimerCallback, Ci.nsIObserver]),
|
QueryInterface: XPCOMUtils.generateQI([Ci.nsITimerCallback, Ci.nsIObserver]),
|
||||||
|
|
||||||
|
get _experimentsEnabled() {
|
||||||
|
// We can enable experiments if either unified Telemetry or FHR is on, and the user
|
||||||
|
// has opted into Telemetry.
|
||||||
|
return gPrefs.get(PREF_EXPERIMENTS_ENABLED, false) &&
|
||||||
|
TelemetryUtils.isTelemetryEnabled;
|
||||||
|
},
|
||||||
|
|
||||||
notify(timer) {
|
notify(timer) {
|
||||||
if (!gExperimentsEnabled) {
|
if (!this._experimentsEnabled) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (OS.Constants.Path.profileDir === undefined) {
|
if (OS.Constants.Path.profileDir === undefined) {
|
||||||
@ -63,7 +59,11 @@ ExperimentsService.prototype = {
|
|||||||
}
|
}
|
||||||
let instance = Experiments.instance();
|
let instance = Experiments.instance();
|
||||||
if (instance.isReady) {
|
if (instance.isReady) {
|
||||||
instance.updateManifest();
|
instance.updateManifest().catch(error => {
|
||||||
|
// Don't throw, as this breaks tests. In any case the best we can do here
|
||||||
|
// is to log the failure.
|
||||||
|
Cu.reportError(error);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -77,7 +77,7 @@ ExperimentsService.prototype = {
|
|||||||
observe(subject, topic, data) {
|
observe(subject, topic, data) {
|
||||||
switch (topic) {
|
switch (topic) {
|
||||||
case "profile-after-change":
|
case "profile-after-change":
|
||||||
if (gExperimentsEnabled) {
|
if (this._experimentsEnabled) {
|
||||||
Services.obs.addObserver(this, "quit-application");
|
Services.obs.addObserver(this, "quit-application");
|
||||||
Services.obs.addObserver(this, "sessionstore-state-finalized");
|
Services.obs.addObserver(this, "sessionstore-state-finalized");
|
||||||
Services.obs.addObserver(this, "EM-loaded");
|
Services.obs.addObserver(this, "EM-loaded");
|
||||||
|
@ -13,9 +13,16 @@ add_test(function test_experiments_activation() {
|
|||||||
Services.prefs.setBoolPref(PREF_TELEMETRY_ENABLED, false);
|
Services.prefs.setBoolPref(PREF_TELEMETRY_ENABLED, false);
|
||||||
|
|
||||||
let experiments = Experiments.instance();
|
let experiments = Experiments.instance();
|
||||||
|
|
||||||
Assert.ok(!experiments.enabled, "Experiments must be disabled if Telemetry is disabled.");
|
Assert.ok(!experiments.enabled, "Experiments must be disabled if Telemetry is disabled.");
|
||||||
|
|
||||||
// TODO: Test that Experiments are turned back on when bug 1232648 lands.
|
// Patch updateManifest to not do anything when the pref is switched back to true,
|
||||||
|
// otherwise it attempts to connect to the server.
|
||||||
|
experiments.updateManifest = () => Promise.resolve();
|
||||||
|
|
||||||
|
Services.prefs.setBoolPref(PREF_TELEMETRY_ENABLED, true);
|
||||||
|
|
||||||
|
Assert.ok(experiments.enabled, "Experiments must be re-enabled if Telemetry is re-enabled");
|
||||||
|
|
||||||
run_next_test();
|
run_next_test();
|
||||||
});
|
});
|
||||||
|
@ -1,12 +1,8 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
"extends": "../../.eslintrc.js",
|
|
||||||
|
|
||||||
"globals": {
|
"globals": {
|
||||||
"addMessageListener": false,
|
"addMessageListener": false,
|
||||||
"Components": true,
|
|
||||||
"dump": true,
|
|
||||||
"removeMessageListener": false,
|
"removeMessageListener": false,
|
||||||
"sendAsyncMessage": false,
|
"sendAsyncMessage": false,
|
||||||
"TextDecoder": false,
|
"TextDecoder": false,
|
||||||
@ -34,10 +30,7 @@ module.exports = {
|
|||||||
"requireReturn": false,
|
"requireReturn": false,
|
||||||
"requireReturnDescription": false,
|
"requireReturnDescription": false,
|
||||||
}],
|
}],
|
||||||
|
|
||||||
// Braces only needed for multi-line arrow function blocks
|
|
||||||
// "arrow-body-style": ["error", "as-needed"],
|
|
||||||
|
|
||||||
// Forbid spaces inside the square brackets of array literals.
|
// Forbid spaces inside the square brackets of array literals.
|
||||||
"array-bracket-spacing": ["error", "never"],
|
"array-bracket-spacing": ["error", "never"],
|
||||||
|
|
||||||
@ -46,11 +39,7 @@ module.exports = {
|
|||||||
|
|
||||||
// No space padding in parentheses
|
// No space padding in parentheses
|
||||||
"space-in-parens": ["error", "never"],
|
"space-in-parens": ["error", "never"],
|
||||||
|
|
||||||
// Enforce one true brace style (opening brace on the same line) and avoid
|
|
||||||
// start and end braces on the same line.
|
|
||||||
"brace-style": ["error", "1tbs", {"allowSingleLine": true}],
|
|
||||||
|
|
||||||
// Commas at the end of the line not the start
|
// Commas at the end of the line not the start
|
||||||
"comma-style": "error",
|
"comma-style": "error",
|
||||||
|
|
||||||
@ -63,18 +52,12 @@ module.exports = {
|
|||||||
// Two space indent
|
// Two space indent
|
||||||
"indent": ["error", 2, {"SwitchCase": 1}],
|
"indent": ["error", 2, {"SwitchCase": 1}],
|
||||||
|
|
||||||
// Space after colon not before in property declarations
|
|
||||||
"key-spacing": ["error", {"beforeColon": false, "afterColon": true, "mode": "minimum"}],
|
|
||||||
|
|
||||||
// Always require parenthesis for new calls
|
// Always require parenthesis for new calls
|
||||||
"new-parens": "error",
|
"new-parens": "error",
|
||||||
|
|
||||||
// Use [] instead of Array()
|
// Use [] instead of Array()
|
||||||
"no-array-constructor": "error",
|
"no-array-constructor": "error",
|
||||||
|
|
||||||
// If an if block ends with a return no need for an else block
|
|
||||||
// "no-else-return": "error",
|
|
||||||
|
|
||||||
// Disallow empty statements. This will report an error for:
|
// Disallow empty statements. This will report an error for:
|
||||||
// try { something(); } catch (e) {}
|
// try { something(); } catch (e) {}
|
||||||
// but will not report it for:
|
// but will not report it for:
|
||||||
@ -97,9 +80,6 @@ module.exports = {
|
|||||||
// Always require semicolon at end of statement
|
// Always require semicolon at end of statement
|
||||||
"semi": ["error", "always"],
|
"semi": ["error", "always"],
|
||||||
|
|
||||||
// Require spaces around operators, except for a|"off".
|
|
||||||
"space-infix-ops": ["error", {"int32Hint": true}],
|
|
||||||
|
|
||||||
// Disallow using variables outside the blocks they are defined (especially
|
// Disallow using variables outside the blocks they are defined (especially
|
||||||
// since only let and const are used, see "no-var").
|
// since only let and const are used, see "no-var").
|
||||||
"block-scoped-var": "error",
|
"block-scoped-var": "error",
|
||||||
@ -111,10 +91,6 @@ module.exports = {
|
|||||||
// Warn about cyclomatic complexity in functions.
|
// Warn about cyclomatic complexity in functions.
|
||||||
"complexity": ["error", {"max": 20}],
|
"complexity": ["error", {"max": 20}],
|
||||||
|
|
||||||
// Don't warn for inconsistent naming when capturing this (not so important
|
|
||||||
// with auto-binding fat arrow functions).
|
|
||||||
// "consistent-this": ["error", "self"],
|
|
||||||
|
|
||||||
// Enforce dots on the next line with property name.
|
// Enforce dots on the next line with property name.
|
||||||
"dot-location": ["error", "property"],
|
"dot-location": ["error", "property"],
|
||||||
|
|
||||||
|
@ -239,6 +239,7 @@ inline bool isIgnoredPathForSprintfLiteral(const CallExpr *Call, const SourceMan
|
|||||||
Begin->compare_lower(StringRef("google-breakpad")) == 0 ||
|
Begin->compare_lower(StringRef("google-breakpad")) == 0 ||
|
||||||
Begin->compare_lower(StringRef("gflags")) == 0 ||
|
Begin->compare_lower(StringRef("gflags")) == 0 ||
|
||||||
Begin->compare_lower(StringRef("harfbuzz")) == 0 ||
|
Begin->compare_lower(StringRef("harfbuzz")) == 0 ||
|
||||||
|
Begin->compare_lower(StringRef("jsoncpp")) == 0 ||
|
||||||
Begin->compare_lower(StringRef("libstagefright")) == 0 ||
|
Begin->compare_lower(StringRef("libstagefright")) == 0 ||
|
||||||
Begin->compare_lower(StringRef("mtransport")) == 0 ||
|
Begin->compare_lower(StringRef("mtransport")) == 0 ||
|
||||||
Begin->compare_lower(StringRef("protobuf")) == 0 ||
|
Begin->compare_lower(StringRef("protobuf")) == 0 ||
|
||||||
|
@ -50,7 +50,7 @@ def rust_compiler(rustc_info, cargo_info):
|
|||||||
die(dedent('''\
|
die(dedent('''\
|
||||||
Rust compiler not found.
|
Rust compiler not found.
|
||||||
To compile rust language sources, you must have 'rustc' in your path.
|
To compile rust language sources, you must have 'rustc' in your path.
|
||||||
See https//www.rust-lang.org/ for more information.
|
See https://www.rust-lang.org/ for more information.
|
||||||
|
|
||||||
You can install rust by running './mach bootstrap'
|
You can install rust by running './mach bootstrap'
|
||||||
or by directly running the installer from https://rustup.rs/
|
or by directly running the installer from https://rustup.rs/
|
||||||
|
@ -233,6 +233,8 @@ module.exports = {
|
|||||||
"no-fallthrough": "error",
|
"no-fallthrough": "error",
|
||||||
// Allow the use of leading or trailing decimal points in numeric literals.
|
// Allow the use of leading or trailing decimal points in numeric literals.
|
||||||
"no-floating-decimal": "off",
|
"no-floating-decimal": "off",
|
||||||
|
// disallow use of eval()-like methods
|
||||||
|
"no-implied-eval": "error",
|
||||||
// Allow comments inline after code.
|
// Allow comments inline after code.
|
||||||
"no-inline-comments": "off",
|
"no-inline-comments": "off",
|
||||||
// Disallow if as the only statement in an else block.
|
// Disallow if as the only statement in an else block.
|
||||||
@ -427,8 +429,6 @@ module.exports = {
|
|||||||
"no-eq-null": "off",
|
"no-eq-null": "off",
|
||||||
// disallow overwriting functions written as function declarations
|
// disallow overwriting functions written as function declarations
|
||||||
"no-func-assign": "off",
|
"no-func-assign": "off",
|
||||||
// disallow use of eval()-like methods
|
|
||||||
"no-implied-eval": "off",
|
|
||||||
// disallow function or variable declarations in nested blocks
|
// disallow function or variable declarations in nested blocks
|
||||||
"no-inner-declarations": "off",
|
"no-inner-declarations": "off",
|
||||||
// disallow invalid regular expression strings in the RegExp constructor
|
// disallow invalid regular expression strings in the RegExp constructor
|
||||||
|
@ -13,7 +13,7 @@ const TreeView = React.createFactory(require("devtools/client/shared/components/
|
|||||||
|
|
||||||
// Reps
|
// Reps
|
||||||
const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
|
const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
|
||||||
const Rep = React.createFactory(REPS.Rep);
|
const { Rep } = REPS;
|
||||||
const Grip = REPS.Grip;
|
const Grip = REPS.Grip;
|
||||||
|
|
||||||
// DOM Panel
|
// DOM Panel
|
||||||
|
@ -15,7 +15,7 @@ const BoxModelEditable = createFactory(require("./BoxModelEditable"));
|
|||||||
|
|
||||||
// Reps
|
// Reps
|
||||||
const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
|
const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
|
||||||
const Rep = createFactory(REPS.Rep);
|
const { Rep } = REPS;
|
||||||
|
|
||||||
const Types = require("../types");
|
const Types = require("../types");
|
||||||
|
|
||||||
|
@ -4,12 +4,12 @@
|
|||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
const { addons, createClass, createFactory, DOM: dom, PropTypes } = require("devtools/client/shared/vendor/react");
|
const { addons, createClass, DOM: dom, PropTypes } = require("devtools/client/shared/vendor/react");
|
||||||
const { findDOMNode } = require("devtools/client/shared/vendor/react-dom");
|
const { findDOMNode } = require("devtools/client/shared/vendor/react-dom");
|
||||||
|
|
||||||
// Reps
|
// Reps
|
||||||
const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
|
const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
|
||||||
const Rep = createFactory(REPS.Rep);
|
const { Rep } = REPS;
|
||||||
const ElementNode = REPS.ElementNode;
|
const ElementNode = REPS.ElementNode;
|
||||||
|
|
||||||
const Types = require("../types");
|
const Types = require("../types");
|
||||||
|
@ -12,7 +12,7 @@ define(function (require, exports, module) {
|
|||||||
|
|
||||||
const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
|
const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
|
||||||
const { createFactories } = require("devtools/client/shared/react-utils");
|
const { createFactories } = require("devtools/client/shared/react-utils");
|
||||||
const Rep = createFactory(REPS.Rep);
|
const { Rep } = REPS;
|
||||||
|
|
||||||
const { SearchBox } = createFactories(require("./search-box"));
|
const { SearchBox } = createFactories(require("./search-box"));
|
||||||
const { Toolbar, ToolbarButton } = createFactories(require("./reps/toolbar"));
|
const { Toolbar, ToolbarButton } = createFactories(require("./reps/toolbar"));
|
||||||
|
@ -24,7 +24,7 @@ const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
|
|||||||
const MDNLink = createFactory(require("./mdn-link"));
|
const MDNLink = createFactory(require("./mdn-link"));
|
||||||
const PropertiesView = createFactory(require("./properties-view"));
|
const PropertiesView = createFactory(require("./properties-view"));
|
||||||
|
|
||||||
const Rep = createFactory(REPS.Rep);
|
const { Rep } = REPS;
|
||||||
const { button, div, input, textarea } = DOM;
|
const { button, div, input, textarea } = DOM;
|
||||||
|
|
||||||
const EDIT_AND_RESEND = L10N.getStr("netmonitor.summary.editAndResend");
|
const EDIT_AND_RESEND = L10N.getStr("netmonitor.summary.editAndResend");
|
||||||
|
@ -14,7 +14,7 @@ const {
|
|||||||
} = require("devtools/client/shared/vendor/react");
|
} = require("devtools/client/shared/vendor/react");
|
||||||
|
|
||||||
const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
|
const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
|
||||||
const Rep = createFactory(REPS.Rep);
|
const { Rep } = REPS;
|
||||||
|
|
||||||
const { FILTER_SEARCH_DELAY } = require("../constants");
|
const { FILTER_SEARCH_DELAY } = require("../constants");
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@ const React = require("devtools/client/shared/vendor/react");
|
|||||||
const TreeView = React.createFactory(require("devtools/client/shared/components/tree/tree-view"));
|
const TreeView = React.createFactory(require("devtools/client/shared/components/tree/tree-view"));
|
||||||
|
|
||||||
const { REPS, MODE, parseURLEncodedText } = require("devtools/client/shared/components/reps/reps");
|
const { REPS, MODE, parseURLEncodedText } = require("devtools/client/shared/components/reps/reps");
|
||||||
const Rep = React.createFactory(REPS.Rep);
|
const { Rep } = REPS;
|
||||||
|
|
||||||
// Network
|
// Network
|
||||||
const NetInfoParams = React.createFactory(require("./net-info-params"));
|
const NetInfoParams = React.createFactory(require("./net-info-params"));
|
||||||
|
@ -8,7 +8,7 @@ const React = require("devtools/client/shared/vendor/react");
|
|||||||
// Reps
|
// Reps
|
||||||
const TreeView = React.createFactory(require("devtools/client/shared/components/tree/tree-view"));
|
const TreeView = React.createFactory(require("devtools/client/shared/components/tree/tree-view"));
|
||||||
const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
|
const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
|
||||||
const Rep = React.createFactory(REPS.Rep);
|
const { Rep } = REPS;
|
||||||
|
|
||||||
// Network
|
// Network
|
||||||
const SizeLimit = React.createFactory(require("./size-limit"));
|
const SizeLimit = React.createFactory(require("./size-limit"));
|
||||||
|
@ -21,10 +21,9 @@ const {
|
|||||||
const VariablesViewLink = createFactory(require("devtools/client/webconsole/new-console-output/components/variables-view-link"));
|
const VariablesViewLink = createFactory(require("devtools/client/webconsole/new-console-output/components/variables-view-link"));
|
||||||
|
|
||||||
const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
|
const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
|
||||||
const { createFactories } = require("devtools/client/shared/react-utils");
|
const Rep = REPS.Rep;
|
||||||
const Rep = createFactory(REPS.Rep);
|
|
||||||
const Grip = REPS.Grip;
|
const Grip = REPS.Grip;
|
||||||
const StringRep = createFactories(REPS.StringRep).rep;
|
const StringRep = REPS.StringRep.rep;
|
||||||
|
|
||||||
GripMessageBody.displayName = "GripMessageBody";
|
GripMessageBody.displayName = "GripMessageBody";
|
||||||
|
|
||||||
@ -39,6 +38,7 @@ GripMessageBody.propTypes = {
|
|||||||
}),
|
}),
|
||||||
userProvidedStyle: PropTypes.string,
|
userProvidedStyle: PropTypes.string,
|
||||||
useQuotes: PropTypes.bool,
|
useQuotes: PropTypes.bool,
|
||||||
|
escapeWhitespace: PropTypes.bool,
|
||||||
};
|
};
|
||||||
|
|
||||||
GripMessageBody.defaultProps = {
|
GripMessageBody.defaultProps = {
|
||||||
@ -46,7 +46,13 @@ GripMessageBody.defaultProps = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
function GripMessageBody(props) {
|
function GripMessageBody(props) {
|
||||||
const { grip, userProvidedStyle, serviceContainer, useQuotes } = props;
|
const {
|
||||||
|
grip,
|
||||||
|
userProvidedStyle,
|
||||||
|
serviceContainer,
|
||||||
|
useQuotes,
|
||||||
|
escapeWhitespace
|
||||||
|
} = props;
|
||||||
|
|
||||||
let styleObject;
|
let styleObject;
|
||||||
if (userProvidedStyle && userProvidedStyle !== "") {
|
if (userProvidedStyle && userProvidedStyle !== "") {
|
||||||
@ -66,6 +72,7 @@ function GripMessageBody(props) {
|
|||||||
? StringRep({
|
? StringRep({
|
||||||
object: grip,
|
object: grip,
|
||||||
useQuotes: useQuotes,
|
useQuotes: useQuotes,
|
||||||
|
escapeWhitespace: escapeWhitespace,
|
||||||
mode: props.mode,
|
mode: props.mode,
|
||||||
style: styleObject
|
style: styleObject
|
||||||
})
|
})
|
||||||
|
@ -43,7 +43,12 @@ function EvaluationResult(props) {
|
|||||||
if (message.messageText) {
|
if (message.messageText) {
|
||||||
messageBody = message.messageText;
|
messageBody = message.messageText;
|
||||||
} else {
|
} else {
|
||||||
messageBody = GripMessageBody({grip: parameters, serviceContainer, useQuotes: true});
|
messageBody = GripMessageBody({
|
||||||
|
grip: parameters,
|
||||||
|
serviceContainer,
|
||||||
|
useQuotes: true,
|
||||||
|
escapeWhitespace: false,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const topLevelClasses = ["cm-s-mozilla"];
|
const topLevelClasses = ["cm-s-mozilla"];
|
||||||
|
@ -11,7 +11,7 @@ add_task(function* () {
|
|||||||
let hud = yield openNewTabAndConsole(TEST_URI);
|
let hud = yield openNewTabAndConsole(TEST_URI);
|
||||||
|
|
||||||
info("console.log with a string argument");
|
info("console.log with a string argument");
|
||||||
let receievedMessages = waitForMessages({
|
let receivedMessages = waitForMessages({
|
||||||
hud,
|
hud,
|
||||||
messages: [{
|
messages: [{
|
||||||
// Test that the output does not include quotes.
|
// Test that the output does not include quotes.
|
||||||
@ -23,13 +23,14 @@ add_task(function* () {
|
|||||||
content.wrappedJSObject.stringLog();
|
content.wrappedJSObject.stringLog();
|
||||||
});
|
});
|
||||||
|
|
||||||
yield receievedMessages;
|
yield receivedMessages;
|
||||||
|
|
||||||
info("evaluating a string constant");
|
info("evaluating a string constant");
|
||||||
let jsterm = hud.jsterm;
|
let jsterm = hud.jsterm;
|
||||||
yield jsterm.execute("\"string constant\"");
|
yield jsterm.execute("\"string\\nconstant\"");
|
||||||
let msg = yield waitFor(() => findMessage(hud, "constant"));
|
let msg = yield waitFor(() => findMessage(hud, "constant"));
|
||||||
let body = msg.querySelector(".message-body");
|
let body = msg.querySelector(".message-body");
|
||||||
// On the other hand, a string constant result should be quoted.
|
// On the other hand, a string constant result should be quoted, but
|
||||||
ok(body.textContent.includes("\"string constant\""), "found expected text");
|
// newlines should be let through.
|
||||||
|
ok(body.textContent.includes("\"string\nconstant\""), "found expected text");
|
||||||
});
|
});
|
||||||
|
@ -7,16 +7,44 @@ We use telemetry to get metrics of usage of the different features and panels in
|
|||||||
The process to add metrics to a tool roughly consists in:
|
The process to add metrics to a tool roughly consists in:
|
||||||
|
|
||||||
1. Adding the probe to Firefox
|
1. Adding the probe to Firefox
|
||||||
2. Using the probe in DevTools code
|
2. Using Histograms.json probes in DevTools code
|
||||||
3. Getting approval from the data team
|
3. Using Scalars.yaml probes in DevTools code
|
||||||
|
4. Getting approval from the data team
|
||||||
|
|
||||||
### 1. Adding the probe to Firefox
|
### 1. Adding the probe to Firefox
|
||||||
|
|
||||||
The first step involves creating entries for the probe in the file that contains declarations for all data that Firefox might report to Mozilla.
|
The first step involves creating entries for the probe in one of the files that contain declarations for all data that Firefox might report to Mozilla.
|
||||||
|
|
||||||
This file is at `toolkit/components/telemetry/Histograms.json`.
|
These files are:
|
||||||
|
- `toolkit/components/telemetry/Histograms.json`
|
||||||
|
- `toolkit/components/telemetry/Scalars.yaml`
|
||||||
|
|
||||||
If it's the first time you add one of these, it's advised to follow the style of existing entries. Our entries are prepended with `DEVTOOLS_`. For example:
|
|
||||||
|
Scalars allow collection of simple values, like counts, booleans and strings and are to be used whenever possible instead of histograms.
|
||||||
|
|
||||||
|
Histograms allow collection of multiple different values, but aggregate them into a number of buckets. Each bucket has a value range and a count of how many values we recorded.
|
||||||
|
|
||||||
|
Both scalars & histograms allow recording by keys. This allows for more flexible, two-level data collection.
|
||||||
|
|
||||||
|
#### Why the different file formats?
|
||||||
|
|
||||||
|
The data team chose YAML for `Scalars.yaml` because it is easy to write and provides a number of features not available in JSON including comments, extensible data types, relational anchors, strings without quotation marks, and mapping types preserving key order.
|
||||||
|
|
||||||
|
While we previously used JSON for similar purposes in histograms.json, we have used YAML here because it allows for comments and is generally easier to write.
|
||||||
|
|
||||||
|
When the YAML format is proven the data team are considering moving the histograms over to YAML format at some point.
|
||||||
|
|
||||||
|
If it's the first time you add one of these, it's advised to follow the style of existing entries.
|
||||||
|
|
||||||
|
New data types have been added over the years, so it's quite feasible that some of our probes are not the most suitable nowadays.
|
||||||
|
|
||||||
|
There's more information about types (and telemetry in general) on [this page](https://developer.mozilla.org/en-US/docs/Mozilla/Performance/Adding_a_new_Telemetry_probe) and [this other page](https://gecko.readthedocs.io/en/latest/toolkit/components/telemetry/telemetry/collection/index.html).
|
||||||
|
|
||||||
|
And of course, in case of doubt, ask!
|
||||||
|
|
||||||
|
### Adding probes to `Histograms.json`
|
||||||
|
|
||||||
|
Our entries are prefixed with `DEVTOOLS_`. For example:
|
||||||
|
|
||||||
```javascript
|
```javascript
|
||||||
"DEVTOOLS_DOM_OPENED_COUNT": {
|
"DEVTOOLS_DOM_OPENED_COUNT": {
|
||||||
@ -40,13 +68,41 @@ If it's the first time you add one of these, it's advised to follow the style of
|
|||||||
|
|
||||||
There are different types of probes you can use. These are specified by the `kind` field. Normally we use `count` for counting how many times the tools are opened, and `exponential` for how many times a panel is active.
|
There are different types of probes you can use. These are specified by the `kind` field. Normally we use `count` for counting how many times the tools are opened, and `exponential` for how many times a panel is active.
|
||||||
|
|
||||||
New data types have been added over the years, so it's quite feasible that some of our probes are not the most suitable nowadays.
|
### Adding probes to `Scalars.yaml`
|
||||||
|
|
||||||
There's more information about types (and telemetry in general) on [this page](https://developer.mozilla.org/en-US/docs/Mozilla/Performance/Adding_a_new_Telemetry_probe) and [this other page](https://gecko.readthedocs.io/en/latest/toolkit/components/telemetry/telemetry/collection/index.html).
|
Our entries are prefixed with `devtools.`. For example:
|
||||||
|
|
||||||
And of course, in case of doubt, ask!
|
```javascript
|
||||||
|
devtools.toolbar.eyedropper:
|
||||||
|
opened:
|
||||||
|
bug_numbers:
|
||||||
|
- 1247985
|
||||||
|
- 1352115
|
||||||
|
description: Number of times the DevTools Eyedropper has been opened via the inspector toolbar.
|
||||||
|
expires: never
|
||||||
|
kind: uint
|
||||||
|
notification_emails:
|
||||||
|
- dev-developer-tools@lists.mozilla.org
|
||||||
|
release_channel_collection: opt-out
|
||||||
|
record_in_processes:
|
||||||
|
- 'main'
|
||||||
|
|
||||||
### 2. Using the probe in DevTools code
|
devtools.copy.unique.css.selector:
|
||||||
|
opened:
|
||||||
|
bug_numbers:
|
||||||
|
- 1323700
|
||||||
|
- 1352115
|
||||||
|
description: Number of times the DevTools copy unique CSS selector has been used.
|
||||||
|
expires: "57"
|
||||||
|
kind: uint
|
||||||
|
notification_emails:
|
||||||
|
- dev-developer-tools@lists.mozilla.org
|
||||||
|
release_channel_collection: opt-out
|
||||||
|
record_in_processes:
|
||||||
|
- 'main'
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Using Histograms.json probes in DevTools code
|
||||||
|
|
||||||
Once the probe has been declared in the `Histograms.json` file, you'll need to actually use it in our code.
|
Once the probe has been declared in the `Histograms.json` file, you'll need to actually use it in our code.
|
||||||
|
|
||||||
@ -67,7 +123,7 @@ Then, include that module on each tool that requires telemetry:
|
|||||||
let Telemetry = require("devtools/client/shared/telemetry");
|
let Telemetry = require("devtools/client/shared/telemetry");
|
||||||
```
|
```
|
||||||
|
|
||||||
Create telemetry instance on the tool constructor:
|
Create a telemetry instance on the tool constructor:
|
||||||
|
|
||||||
```javascript
|
```javascript
|
||||||
this._telemetry = new Telemetry();
|
this._telemetry = new Telemetry();
|
||||||
@ -87,6 +143,48 @@ this._telemetry.toolClosed("mytoolname");
|
|||||||
|
|
||||||
Note that `mytoolname` is the id we declared in the `telemetry.js` module.
|
Note that `mytoolname` is the id we declared in the `telemetry.js` module.
|
||||||
|
|
||||||
|
### 3. Using Scalars.yaml probes in DevTools code
|
||||||
|
|
||||||
|
Once the probe has been declared in the `Scalars.yaml` file, you'll need to actually use it in our code.
|
||||||
|
|
||||||
|
First, you need to give it an id in `devtools/client/shared/telemetry.js`. You will want to follow the style of existing lowercase histogram entries. For example:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
toolbareyedropper: {
|
||||||
|
scalar: "devtools.toolbar.eyedropper.opened", // Note that the scalar is lowercase
|
||||||
|
},
|
||||||
|
copyuniquecssselector: {
|
||||||
|
scalar: "devtools.copy.unique.css.selector.opened",
|
||||||
|
},
|
||||||
|
```
|
||||||
|
|
||||||
|
... would correspond to the probes we declared in the previous section.
|
||||||
|
|
||||||
|
Then, include that module on each tool that requires telemetry:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
let Telemetry = require("devtools/client/shared/telemetry");
|
||||||
|
```
|
||||||
|
|
||||||
|
Create a telemetry instance on the tool constructor:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._telemetry = new Telemetry();
|
||||||
|
```
|
||||||
|
|
||||||
|
And use the instance to report e.g. tool opening...
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._telemetry.toolOpened("mytoolname");
|
||||||
|
```
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
|
||||||
|
- `mytoolname` is the id we declared in the `Scalars.yaml` module.
|
||||||
|
- Because we are not logging tool's time opened in `Scalars.yaml` we don't care
|
||||||
|
about toolClosed. Of course, if there was an accompanying `timerHistogram` field defined
|
||||||
|
in `telemetry.js` and `histograms.json` then `toolClosed` should also be added.
|
||||||
|
|
||||||
#### Note on top level panels
|
#### Note on top level panels
|
||||||
|
|
||||||
The code for the tabs uses their ids to automatically report telemetry when you switch between panels, so you don't need to explicitly call `toolOpened` and `toolClosed` on top level panels.
|
The code for the tabs uses their ids to automatically report telemetry when you switch between panels, so you don't need to explicitly call `toolOpened` and `toolClosed` on top level panels.
|
||||||
@ -116,7 +214,7 @@ So watch out for errors.
|
|||||||
|
|
||||||
#### Compile it!
|
#### Compile it!
|
||||||
|
|
||||||
It's strongly recommended that you do a full Firefox build if you have edited `Histograms.json`, as it is processed at build time, and various checks will be run on it to guarantee it is valid.
|
It's strongly recommended that you do a full Firefox build if you have edited either `Histograms.json` or `Scalars.yaml`, as they are processed at build time, and various checks will be run on it to guarantee it is valid.
|
||||||
|
|
||||||
```
|
```
|
||||||
./mach build
|
./mach build
|
||||||
@ -126,7 +224,7 @@ If you use `mach build faster` or artifact builds, the checks will not be perfor
|
|||||||
|
|
||||||
Save yourself some time and run the checks locally.
|
Save yourself some time and run the checks locally.
|
||||||
|
|
||||||
### 3. Getting approval from the data team
|
### 4. Getting approval from the data team
|
||||||
|
|
||||||
This is required before the changes make their way into `mozilla-central`.
|
This is required before the changes make their way into `mozilla-central`.
|
||||||
|
|
||||||
@ -162,4 +260,3 @@ It's also recommended to take small steps and run the queries often to detect er
|
|||||||
Slow queries will be interrupted by the system, so don't worry about "fetching too much data" or "using too many resources". There's built-in protection to avoid your code eating up the Telemetry database.
|
Slow queries will be interrupted by the system, so don't worry about "fetching too much data" or "using too many resources". There's built-in protection to avoid your code eating up the Telemetry database.
|
||||||
|
|
||||||
Funnily, if you're based in Europe, you might be in luck, as the website tends to be more responsive during European working hours than it is at Pacific working hours, as seemingly there's less people in Europe interacting with it.
|
Funnily, if you're based in Europe, you might be in luck, as the website tends to be more responsive during European working hours than it is at Pacific working hours, as seemingly there's less people in Europe interacting with it.
|
||||||
|
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
|
|
||||||
const { Cc, Ci, Cu } = require("chrome");
|
const { Cc, Ci, Cu } = require("chrome");
|
||||||
const { getCurrentZoom, getWindowDimensions, getViewportDimensions,
|
const { getCurrentZoom, getWindowDimensions, getViewportDimensions,
|
||||||
getRootBindingParent } = require("devtools/shared/layout/utils");
|
getRootBindingParent, loadSheet } = require("devtools/shared/layout/utils");
|
||||||
const { on, emit } = require("sdk/event/core");
|
const { on, emit } = require("sdk/event/core");
|
||||||
|
|
||||||
const lazyContainer = {};
|
const lazyContainer = {};
|
||||||
@ -104,17 +104,14 @@ exports.isXUL = isXUL;
|
|||||||
/**
|
/**
|
||||||
* Inject a helper stylesheet in the window.
|
* Inject a helper stylesheet in the window.
|
||||||
*/
|
*/
|
||||||
var installedHelperSheets = new WeakMap();
|
var installedHelperSheets = new WeakSet();
|
||||||
|
|
||||||
function installHelperSheet(win, source, type = "agent") {
|
function installHelperSheet(win, url = STYLESHEET_URI, type = "agent") {
|
||||||
if (installedHelperSheets.has(win.document)) {
|
if (installedHelperSheets.has(win.document)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let {Style} = require("sdk/stylesheet/style");
|
loadSheet(win, url, type);
|
||||||
let {attach} = require("sdk/content/mod");
|
installedHelperSheets.add(win.document);
|
||||||
let style = Style({source, type});
|
|
||||||
attach(style, win);
|
|
||||||
installedHelperSheets.set(win.document, style);
|
|
||||||
}
|
}
|
||||||
exports.installHelperSheet = installHelperSheet;
|
exports.installHelperSheet = installHelperSheet;
|
||||||
|
|
||||||
@ -278,15 +275,7 @@ CanvasFrameAnonymousContentHelper.prototype = {
|
|||||||
// <style scoped> doesn't work inside anonymous content (see bug 1086532).
|
// <style scoped> doesn't work inside anonymous content (see bug 1086532).
|
||||||
// If it did, highlighters.css would be injected as an anonymous content
|
// If it did, highlighters.css would be injected as an anonymous content
|
||||||
// node using CanvasFrameAnonymousContentHelper instead.
|
// node using CanvasFrameAnonymousContentHelper instead.
|
||||||
if (!installedHelperSheets.has(doc)) {
|
installHelperSheet(this.highlighterEnv.window);
|
||||||
installedHelperSheets.set(doc, true);
|
|
||||||
let source = "@import url('" + STYLESHEET_URI + "');";
|
|
||||||
let url = "data:text/css;charset=utf-8," + encodeURIComponent(source);
|
|
||||||
let winUtils = this.highlighterEnv.window
|
|
||||||
.QueryInterface(Ci.nsIInterfaceRequestor)
|
|
||||||
.getInterface(Ci.nsIDOMWindowUtils);
|
|
||||||
winUtils.loadSheetUsingURIString(url, winUtils.AGENT_SHEET);
|
|
||||||
}
|
|
||||||
|
|
||||||
let node = this.nodeBuilder();
|
let node = this.nodeBuilder();
|
||||||
|
|
||||||
|
@ -72,7 +72,8 @@ const {
|
|||||||
isNativeAnonymous,
|
isNativeAnonymous,
|
||||||
isXBLAnonymous,
|
isXBLAnonymous,
|
||||||
isShadowAnonymous,
|
isShadowAnonymous,
|
||||||
getFrameElement
|
getFrameElement,
|
||||||
|
loadSheet
|
||||||
} = require("devtools/shared/layout/utils");
|
} = require("devtools/shared/layout/utils");
|
||||||
const {getLayoutChangesObserver, releaseLayoutChangesObserver} = require("devtools/server/actors/reflow");
|
const {getLayoutChangesObserver, releaseLayoutChangesObserver} = require("devtools/server/actors/reflow");
|
||||||
const nodeFilterConstants = require("devtools/shared/dom-node-filter-constants");
|
const nodeFilterConstants = require("devtools/shared/dom-node-filter-constants");
|
||||||
@ -127,7 +128,7 @@ const PSEUDO_SELECTORS = [
|
|||||||
["::selection", 0]
|
["::selection", 0]
|
||||||
];
|
];
|
||||||
|
|
||||||
var HELPER_SHEET = `
|
var HELPER_SHEET = `data:text/css;charset=utf-8,
|
||||||
.__fx-devtools-hide-shortcut__ {
|
.__fx-devtools-hide-shortcut__ {
|
||||||
visibility: hidden !important;
|
visibility: hidden !important;
|
||||||
}
|
}
|
||||||
@ -1865,15 +1866,12 @@ var WalkerActor = protocol.ActorClassWithSpec(walkerSpec, {
|
|||||||
|
|
||||||
_installHelperSheet: function (node) {
|
_installHelperSheet: function (node) {
|
||||||
if (!this.installedHelpers) {
|
if (!this.installedHelpers) {
|
||||||
this.installedHelpers = new WeakMap();
|
this.installedHelpers = new WeakSet();
|
||||||
}
|
}
|
||||||
let win = node.rawNode.ownerGlobal;
|
let win = node.rawNode.ownerGlobal;
|
||||||
if (!this.installedHelpers.has(win)) {
|
if (!this.installedHelpers.has(win)) {
|
||||||
let { Style } = require("sdk/stylesheet/style");
|
loadSheet(win, HELPER_SHEET, "agent");
|
||||||
let { attach } = require("sdk/content/mod");
|
this.installedHelpers.add(win);
|
||||||
let style = Style({source: HELPER_SHEET, type: "agent" });
|
|
||||||
attach(style, win);
|
|
||||||
this.installedHelpers.set(win, style);
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -106,8 +106,12 @@ ObjectActor.prototype = {
|
|||||||
// to lazily display them when there is a bunch.
|
// to lazily display them when there is a bunch.
|
||||||
// Throws on some MouseEvent object in tests.
|
// Throws on some MouseEvent object in tests.
|
||||||
try {
|
try {
|
||||||
// Bug 1163520: Assert on internal functions
|
if (TYPED_ARRAY_CLASSES.indexOf(g.class) != -1) {
|
||||||
if (!["Function", "Proxy"].includes(g.class)) {
|
// Bug 1348761: getOwnPropertyNames is unecessary slow on TypedArrays
|
||||||
|
let length = DevToolsUtils.getProperty(this.obj, "length");
|
||||||
|
g.ownPropertyLength = length;
|
||||||
|
} else if (!["Function", "Proxy"].includes(g.class)) {
|
||||||
|
// Bug 1163520: Assert on internal functions
|
||||||
g.ownPropertyLength = this.obj.getOwnPropertyNames().length;
|
g.ownPropertyLength = this.obj.getOwnPropertyNames().length;
|
||||||
}
|
}
|
||||||
} catch (e) {}
|
} catch (e) {}
|
||||||
|
@ -253,7 +253,9 @@ const ProfilerManager = (function () {
|
|||||||
* profiler is stopped.
|
* profiler is stopped.
|
||||||
*/
|
*/
|
||||||
get sharedLibraries() {
|
get sharedLibraries() {
|
||||||
return nsIProfilerModule.sharedLibraries;
|
return {
|
||||||
|
sharedLibraries: nsIProfilerModule.sharedLibraries
|
||||||
|
};
|
||||||
},
|
},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -21,7 +21,8 @@ function run_test() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function test_sharedlibraries(client, actor, callback) {
|
function test_sharedlibraries(client, actor, callback) {
|
||||||
client.request({ to: actor, type: "sharedLibraries" }, libs => {
|
client.request({ to: actor, type: "sharedLibraries" }, response => {
|
||||||
|
const libs = response.sharedLibraries;
|
||||||
do_check_eq(typeof libs, "object");
|
do_check_eq(typeof libs, "object");
|
||||||
do_check_true(Array.isArray(libs));
|
do_check_true(Array.isArray(libs));
|
||||||
do_check_eq(typeof libs, "object");
|
do_check_eq(typeof libs, "object");
|
||||||
|
@ -7,6 +7,12 @@
|
|||||||
const { Ci, Cc } = require("chrome");
|
const { Ci, Cc } = require("chrome");
|
||||||
const nodeFilterConstants = require("devtools/shared/dom-node-filter-constants");
|
const nodeFilterConstants = require("devtools/shared/dom-node-filter-constants");
|
||||||
|
|
||||||
|
const SHEET_TYPE = {
|
||||||
|
"agent": "AGENT_SHEET",
|
||||||
|
"user": "USER_SHEET",
|
||||||
|
"author": "AUTHOR_SHEET"
|
||||||
|
};
|
||||||
|
|
||||||
loader.lazyRequireGetter(this, "setIgnoreLayoutChanges", "devtools/server/actors/reflow", true);
|
loader.lazyRequireGetter(this, "setIgnoreLayoutChanges", "devtools/server/actors/reflow", true);
|
||||||
exports.setIgnoreLayoutChanges = (...args) =>
|
exports.setIgnoreLayoutChanges = (...args) =>
|
||||||
this.setIgnoreLayoutChanges(...args);
|
this.setIgnoreLayoutChanges(...args);
|
||||||
@ -715,3 +721,49 @@ function getWindowFor(node) {
|
|||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Synchronously loads a style sheet from `uri` and adds it to the list of
|
||||||
|
* additional style sheets of the document.
|
||||||
|
* The sheets added takes effect immediately, and only on the document of the
|
||||||
|
* `window` given.
|
||||||
|
*
|
||||||
|
* @param {DOMWindow} window
|
||||||
|
* @param {String} url
|
||||||
|
* @param {String} [type="author"]
|
||||||
|
*/
|
||||||
|
function loadSheet(window, url, type = "author") {
|
||||||
|
if (!(type in SHEET_TYPE)) {
|
||||||
|
type = "author";
|
||||||
|
}
|
||||||
|
|
||||||
|
let windowUtils = utilsFor(window);
|
||||||
|
try {
|
||||||
|
windowUtils.loadSheetUsingURIString(url, windowUtils[SHEET_TYPE[type]]);
|
||||||
|
} catch (e) {
|
||||||
|
// The method fails if the url is already loaded.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.loadSheet = loadSheet;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove the document style sheet at `sheetURI` from the list of additional
|
||||||
|
* style sheets of the document. The removal takes effect immediately.
|
||||||
|
*
|
||||||
|
* @param {DOMWindow} window
|
||||||
|
* @param {String} url
|
||||||
|
* @param {String} [type="author"]
|
||||||
|
*/
|
||||||
|
function removeSheet(window, url, type = "author") {
|
||||||
|
if (!(type in SHEET_TYPE)) {
|
||||||
|
type = "author";
|
||||||
|
}
|
||||||
|
|
||||||
|
let windowUtils = utilsFor(window);
|
||||||
|
try {
|
||||||
|
windowUtils.removeSheetUsingURIString(url, windowUtils[SHEET_TYPE[type]]);
|
||||||
|
} catch (e) {
|
||||||
|
// The method fails if the url is already removed.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.removeSheet = removeSheet;
|
||||||
|
@ -1012,7 +1012,13 @@ WebGLContext::SetDimensions(int32_t signedWidth, int32_t signedHeight)
|
|||||||
if (!CreateAndInitGL(forceEnabled, &failReasons)) {
|
if (!CreateAndInitGL(forceEnabled, &failReasons)) {
|
||||||
nsCString text("WebGL creation failed: ");
|
nsCString text("WebGL creation failed: ");
|
||||||
for (const auto& cur : failReasons) {
|
for (const auto& cur : failReasons) {
|
||||||
Telemetry::Accumulate(Telemetry::CANVAS_WEBGL_FAILURE_ID, cur.key);
|
// Don't try to accumulate using an empty key if |cur.key| is empty.
|
||||||
|
if (cur.key.IsEmpty()) {
|
||||||
|
Telemetry::Accumulate(Telemetry::CANVAS_WEBGL_FAILURE_ID,
|
||||||
|
NS_LITERAL_CSTRING("FEATURE_FAILURE_REASON_UNKNOWN"));
|
||||||
|
} else {
|
||||||
|
Telemetry::Accumulate(Telemetry::CANVAS_WEBGL_FAILURE_ID, cur.key);
|
||||||
|
}
|
||||||
|
|
||||||
text.AppendASCII("\n* ");
|
text.AppendASCII("\n* ");
|
||||||
text.Append(cur.info);
|
text.Append(cur.info);
|
||||||
|
@ -146,6 +146,7 @@ TextCompositionArray* IMEStateManager::sTextCompositions = nullptr;
|
|||||||
bool IMEStateManager::sInstalledMenuKeyboardListener = false;
|
bool IMEStateManager::sInstalledMenuKeyboardListener = false;
|
||||||
bool IMEStateManager::sIsGettingNewIMEState = false;
|
bool IMEStateManager::sIsGettingNewIMEState = false;
|
||||||
bool IMEStateManager::sCheckForIMEUnawareWebApps = false;
|
bool IMEStateManager::sCheckForIMEUnawareWebApps = false;
|
||||||
|
bool IMEStateManager::sInputModeSupported = false;
|
||||||
bool IMEStateManager::sRemoteHasFocus = false;
|
bool IMEStateManager::sRemoteHasFocus = false;
|
||||||
|
|
||||||
// static
|
// static
|
||||||
@ -156,6 +157,11 @@ IMEStateManager::Init()
|
|||||||
&sCheckForIMEUnawareWebApps,
|
&sCheckForIMEUnawareWebApps,
|
||||||
"intl.ime.hack.on_ime_unaware_apps.fire_key_events_for_composition",
|
"intl.ime.hack.on_ime_unaware_apps.fire_key_events_for_composition",
|
||||||
false);
|
false);
|
||||||
|
|
||||||
|
Preferences::AddBoolVarCache(
|
||||||
|
&sInputModeSupported,
|
||||||
|
"dom.forms.inputmode",
|
||||||
|
false);
|
||||||
}
|
}
|
||||||
|
|
||||||
// static
|
// static
|
||||||
@ -1000,7 +1006,7 @@ IMEStateManager::SetIMEState(const IMEState& aState,
|
|||||||
context.mHTMLInputType.Assign(nsGkAtoms::textarea->GetUTF16String());
|
context.mHTMLInputType.Assign(nsGkAtoms::textarea->GetUTF16String());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (Preferences::GetBool("dom.forms.inputmode", false) ||
|
if (sInputModeSupported ||
|
||||||
nsContentUtils::IsChromeDoc(aContent->OwnerDoc())) {
|
nsContentUtils::IsChromeDoc(aContent->OwnerDoc())) {
|
||||||
aContent->GetAttr(kNameSpaceID_None, nsGkAtoms::inputmode,
|
aContent->GetAttr(kNameSpaceID_None, nsGkAtoms::inputmode,
|
||||||
context.mHTMLInputInputmode);
|
context.mHTMLInputInputmode);
|
||||||
|
@ -285,6 +285,7 @@ protected:
|
|||||||
static bool sInstalledMenuKeyboardListener;
|
static bool sInstalledMenuKeyboardListener;
|
||||||
static bool sIsGettingNewIMEState;
|
static bool sIsGettingNewIMEState;
|
||||||
static bool sCheckForIMEUnawareWebApps;
|
static bool sCheckForIMEUnawareWebApps;
|
||||||
|
static bool sInputModeSupported;
|
||||||
static bool sRemoteHasFocus;
|
static bool sRemoteHasFocus;
|
||||||
|
|
||||||
class MOZ_STACK_CLASS GettingNewIMEStateBlocker final
|
class MOZ_STACK_CLASS GettingNewIMEStateBlocker final
|
||||||
|
@ -131,7 +131,7 @@ function UpdateSessionFunc(test, token, sessionType, resolve, reject) {
|
|||||||
"k":HexToBase64(key)
|
"k":HexToBase64(key)
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
bail(token + " couldn't find key for key id " + idHex)("No such key");
|
reject(`${token} couldn't find key for key id ${idHex}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -145,8 +145,7 @@ function UpdateSessionFunc(test, token, sessionType, resolve, reject) {
|
|||||||
Log(token, "MediaKeySession update ok!");
|
Log(token, "MediaKeySession update ok!");
|
||||||
resolve(ev.target);
|
resolve(ev.target);
|
||||||
}).catch(function(reason) {
|
}).catch(function(reason) {
|
||||||
bail(token + " MediaKeySession update failed")(reason);
|
reject(`${token} MediaKeySession update failed: ${reason}`);
|
||||||
reject();
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -192,8 +191,12 @@ function AppendTrack(test, ms, track, token, loadParams)
|
|||||||
sb.appendBuffer(new Uint8Array(req.response));
|
sb.appendBuffer(new Uint8Array(req.response));
|
||||||
});
|
});
|
||||||
|
|
||||||
req.addEventListener("error", function(){info(token + " error fetching " + fragmentFile);});
|
req.addEventListener("error", function() {
|
||||||
req.addEventListener("abort", function(){info(token + " aborted fetching " + fragmentFile);});
|
reject(`${token} - ${track.name}: error fetching ${fragmentFile}`);
|
||||||
|
});
|
||||||
|
req.addEventListener("abort", function() {
|
||||||
|
reject(`${token} - ${track.name}: aborted fetching ${fragmentFile}`);
|
||||||
|
});
|
||||||
|
|
||||||
Log(token, track.name + ": addNextFragment() fetching next fragment " + fragmentFile);
|
Log(token, track.name + ": addNextFragment() fetching next fragment " + fragmentFile);
|
||||||
req.send(null);
|
req.send(null);
|
||||||
@ -238,16 +241,9 @@ function LoadTest(test, elem, token, loadParams)
|
|||||||
elem.src = URL.createObjectURL(ms);
|
elem.src = URL.createObjectURL(ms);
|
||||||
|
|
||||||
return new Promise(function (resolve, reject) {
|
return new Promise(function (resolve, reject) {
|
||||||
var firstOpen = true;
|
|
||||||
ms.addEventListener("sourceopen", function () {
|
ms.addEventListener("sourceopen", function () {
|
||||||
if (!firstOpen) {
|
|
||||||
Log(token, "sourceopen again?");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
firstOpen = false;
|
|
||||||
Log(token, "sourceopen");
|
Log(token, "sourceopen");
|
||||||
return Promise.all(test.tracks.map(function(track) {
|
Promise.all(test.tracks.map(function(track) {
|
||||||
return AppendTrack(test, ms, track, token, loadParams);
|
return AppendTrack(test, ms, track, token, loadParams);
|
||||||
})).then(function() {
|
})).then(function() {
|
||||||
if (loadParams && loadParams.noEndOfStream) {
|
if (loadParams && loadParams.noEndOfStream) {
|
||||||
@ -257,10 +253,8 @@ function LoadTest(test, elem, token, loadParams)
|
|||||||
ms.endOfStream();
|
ms.endOfStream();
|
||||||
}
|
}
|
||||||
resolve();
|
resolve();
|
||||||
}).catch(function() {
|
}).catch(reject);
|
||||||
Log(token, "error while loading tracks");
|
}, {once: true});
|
||||||
});
|
|
||||||
})
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1415,13 +1415,6 @@ CompositorD3D11::EndFrame()
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (mDevice->GetDeviceRemovedReason() != S_OK) {
|
|
||||||
gfxCriticalNote << "GFX: D3D11 skip EndFrame with device-removed.";
|
|
||||||
Compositor::EndFrame();
|
|
||||||
mCurrentRT = nullptr;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
LayoutDeviceIntSize oldSize = mSize;
|
LayoutDeviceIntSize oldSize = mSize;
|
||||||
EnsureSize();
|
EnsureSize();
|
||||||
if (mSize.width <= 0 || mSize.height <= 0) {
|
if (mSize.width <= 0 || mSize.height <= 0) {
|
||||||
|
@ -23,7 +23,7 @@ if CONFIG['OS_ARCH'] == 'WINNT':
|
|||||||
# POSIX
|
# POSIX
|
||||||
|
|
||||||
LOCAL_INCLUDES += [
|
LOCAL_INCLUDES += [
|
||||||
'/toolkit/crashreporter/jsoncpp/include',
|
'/toolkit/components/jsoncpp/include',
|
||||||
]
|
]
|
||||||
|
|
||||||
USE_LIBS += [
|
USE_LIBS += [
|
||||||
|
@ -208,5 +208,23 @@ interface imgIRequest : nsIRequest
|
|||||||
* underlying call.
|
* underlying call.
|
||||||
*/
|
*/
|
||||||
void decrementAnimationConsumers();
|
void decrementAnimationConsumers();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Request loading priority boost to requested category, each category
|
||||||
|
* of request increases priority only one time..
|
||||||
|
*
|
||||||
|
* CATEGORY_FRAME_INIT: increase priority when the imgRequest is associated
|
||||||
|
* with an nsImageFrame.
|
||||||
|
*
|
||||||
|
* CATEGORY_SIZE_QUERY: increase priority when size decoding is necessary to
|
||||||
|
* determine the layout size of the associated nsImageFrame.
|
||||||
|
*
|
||||||
|
* CATEGORY_DISPLAY: increase priority when the image is about to be displayed
|
||||||
|
* in the viewport.
|
||||||
|
*/
|
||||||
|
const uint32_t CATEGORY_FRAME_INIT = 1 << 0;
|
||||||
|
const uint32_t CATEGORY_SIZE_QUERY = 1 << 1;
|
||||||
|
const uint32_t CATEGORY_DISPLAY = 1 << 2;
|
||||||
|
void boostPriority(in uint32_t aCategory);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -541,12 +541,50 @@ imgRequest::AdjustPriority(imgRequestProxy* proxy, int32_t delta)
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
AdjustPriorityInternal(delta);
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
imgRequest::AdjustPriorityInternal(int32_t aDelta)
|
||||||
|
{
|
||||||
nsCOMPtr<nsISupportsPriority> p = do_QueryInterface(mChannel);
|
nsCOMPtr<nsISupportsPriority> p = do_QueryInterface(mChannel);
|
||||||
if (p) {
|
if (p) {
|
||||||
p->AdjustPriority(delta);
|
p->AdjustPriority(aDelta);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
imgRequest::BoostPriority(uint32_t aCategory)
|
||||||
|
{
|
||||||
|
uint32_t newRequestedCategory =
|
||||||
|
(mBoostCategoriesRequested & aCategory) ^ aCategory;
|
||||||
|
if (!newRequestedCategory) {
|
||||||
|
// priority boost for each category can only apply once.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
MOZ_LOG(gImgLog, LogLevel::Debug,
|
||||||
|
("[this=%p] imgRequest::BoostPriority for category %x",
|
||||||
|
this, newRequestedCategory));
|
||||||
|
|
||||||
|
int32_t delta = 0;
|
||||||
|
|
||||||
|
if (newRequestedCategory & imgIRequest::CATEGORY_FRAME_INIT) {
|
||||||
|
--delta;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (newRequestedCategory & imgIRequest::CATEGORY_SIZE_QUERY) {
|
||||||
|
--delta;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (newRequestedCategory & imgIRequest::CATEGORY_DISPLAY) {
|
||||||
|
delta += nsISupportsPriority::PRIORITY_HIGH;
|
||||||
|
}
|
||||||
|
|
||||||
|
AdjustPriorityInternal(delta);
|
||||||
|
mBoostCategoriesRequested |= newRequestedCategory;
|
||||||
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
imgRequest::HasTransferredData() const
|
imgRequest::HasTransferredData() const
|
||||||
{
|
{
|
||||||
|
@ -171,6 +171,8 @@ public:
|
|||||||
/// of @aProxy.
|
/// of @aProxy.
|
||||||
void AdjustPriority(imgRequestProxy* aProxy, int32_t aDelta);
|
void AdjustPriority(imgRequestProxy* aProxy, int32_t aDelta);
|
||||||
|
|
||||||
|
void BoostPriority(uint32_t aCategory);
|
||||||
|
|
||||||
/// Returns a weak pointer to the underlying request.
|
/// Returns a weak pointer to the underlying request.
|
||||||
nsIRequest* GetRequest() const { return mRequest; }
|
nsIRequest* GetRequest() const { return mRequest; }
|
||||||
|
|
||||||
@ -223,6 +225,8 @@ private:
|
|||||||
/// Returns true if StartDecoding() was called.
|
/// Returns true if StartDecoding() was called.
|
||||||
bool IsDecodeRequested() const;
|
bool IsDecodeRequested() const;
|
||||||
|
|
||||||
|
void AdjustPriorityInternal(int32_t aDelta);
|
||||||
|
|
||||||
// Weak reference to parent loader; this request cannot outlive its owner.
|
// Weak reference to parent loader; this request cannot outlive its owner.
|
||||||
imgLoader* mLoader;
|
imgLoader* mLoader;
|
||||||
nsCOMPtr<nsIRequest> mRequest;
|
nsCOMPtr<nsIRequest> mRequest;
|
||||||
@ -275,6 +279,9 @@ private:
|
|||||||
|
|
||||||
nsresult mImageErrorCode;
|
nsresult mImageErrorCode;
|
||||||
|
|
||||||
|
// The categories of prioritization strategy that have been requested.
|
||||||
|
uint32_t mBoostCategoriesRequested = 0;
|
||||||
|
|
||||||
mutable mozilla::Mutex mMutex;
|
mutable mozilla::Mutex mMutex;
|
||||||
|
|
||||||
// Member variables protected by mMutex. Note that *all* flags in our bitfield
|
// Member variables protected by mMutex. Note that *all* flags in our bitfield
|
||||||
|
@ -718,6 +718,14 @@ imgRequestProxy::GetCORSMode(int32_t* aCorsMode)
|
|||||||
return NS_OK;
|
return NS_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
NS_IMETHODIMP
|
||||||
|
imgRequestProxy::BoostPriority(uint32_t aCategory)
|
||||||
|
{
|
||||||
|
NS_ENSURE_STATE(GetOwner() && !mCanceled);
|
||||||
|
GetOwner()->BoostPriority(aCategory);
|
||||||
|
return NS_OK;
|
||||||
|
}
|
||||||
|
|
||||||
/** nsISupportsPriority methods **/
|
/** nsISupportsPriority methods **/
|
||||||
|
|
||||||
NS_IMETHODIMP
|
NS_IMETHODIMP
|
||||||
|
@ -6,6 +6,7 @@ support-files =
|
|||||||
head.js
|
head.js
|
||||||
image.html
|
image.html
|
||||||
imageX2.html
|
imageX2.html
|
||||||
|
browser_docshell_type_editor/**
|
||||||
|
|
||||||
[browser_bug666317.js]
|
[browser_bug666317.js]
|
||||||
skip-if = true || e10s # Bug 1207012 - Permaorange from an uncaught exception that isn't actually turning the suite orange until it hits beta, Bug 948194 - Decoded Images seem to not be discarded on memory-pressure notification with e10s enabled
|
skip-if = true || e10s # Bug 1207012 - Permaorange from an uncaught exception that isn't actually turning the suite orange until it hits beta, Bug 948194 - Decoded Images seem to not be discarded on memory-pressure notification with e10s enabled
|
||||||
|
@ -4,14 +4,31 @@
|
|||||||
const Ci = Components.interfaces;
|
const Ci = Components.interfaces;
|
||||||
const SIMPLE_HTML = "data:text/html,<html><head></head><body></body></html>";
|
const SIMPLE_HTML = "data:text/html,<html><head></head><body></body></html>";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the directory where the chrome.manifest file for the test can be found.
|
||||||
|
*
|
||||||
|
* @return nsILocalFile of the manifest directory
|
||||||
|
*/
|
||||||
|
function getManifestDir() {
|
||||||
|
let path = getTestFilePath("browser_docshell_type_editor");
|
||||||
|
let file = Components.classes["@mozilla.org/file/local;1"]
|
||||||
|
.createInstance(Components.interfaces.nsILocalFile);
|
||||||
|
file.initWithPath(path);
|
||||||
|
return file;
|
||||||
|
}
|
||||||
|
|
||||||
// The following URI is *not* accessible to content, hence loading that URI
|
// The following URI is *not* accessible to content, hence loading that URI
|
||||||
// from an unprivileged site should be blocked. If docshell is of appType
|
// from an unprivileged site should be blocked. If docshell is of appType
|
||||||
// APP_TYPE_EDITOR however the load should be allowed.
|
// APP_TYPE_EDITOR however the load should be allowed.
|
||||||
// >> chrome://devtools/content/framework/dev-edition-promo/dev-edition-logo.png
|
// >> chrome://test1/skin/privileged.png
|
||||||
|
|
||||||
add_task(function* () {
|
add_task(function* () {
|
||||||
info("docshell of appType APP_TYPE_EDITOR can access privileged images.");
|
info("docshell of appType APP_TYPE_EDITOR can access privileged images.");
|
||||||
|
|
||||||
|
// Load a temporary manifest adding a route to a privileged image
|
||||||
|
let manifestDir = getManifestDir();
|
||||||
|
Components.manager.addBootstrappedManifestLocation(manifestDir);
|
||||||
|
|
||||||
yield BrowserTestUtils.withNewTab({
|
yield BrowserTestUtils.withNewTab({
|
||||||
gBrowser,
|
gBrowser,
|
||||||
url: SIMPLE_HTML
|
url: SIMPLE_HTML
|
||||||
@ -28,6 +45,7 @@ add_task(function* () {
|
|||||||
is(rootDocShell.appType, Ci.nsIDocShell.APP_TYPE_EDITOR,
|
is(rootDocShell.appType, Ci.nsIDocShell.APP_TYPE_EDITOR,
|
||||||
"sanity check: appType after update should be type editor");
|
"sanity check: appType after update should be type editor");
|
||||||
|
|
||||||
|
|
||||||
return new Promise(resolve => {
|
return new Promise(resolve => {
|
||||||
let doc = content.document;
|
let doc = content.document;
|
||||||
let image = doc.createElement("img");
|
let image = doc.createElement("img");
|
||||||
@ -44,15 +62,21 @@ add_task(function* () {
|
|||||||
resolve();
|
resolve();
|
||||||
}
|
}
|
||||||
doc.body.appendChild(image);
|
doc.body.appendChild(image);
|
||||||
image.src = "chrome://devtools/content/framework/dev-edition-promo/dev-edition-logo.png";
|
image.src = "chrome://test1/skin/privileged.png";
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
Components.manager.removeBootstrappedManifestLocation(manifestDir);
|
||||||
});
|
});
|
||||||
|
|
||||||
add_task(function* () {
|
add_task(function* () {
|
||||||
info("docshell of appType APP_TYPE_UNKNOWN can *not* access privileged images.");
|
info("docshell of appType APP_TYPE_UNKNOWN can *not* access privileged images.");
|
||||||
|
|
||||||
|
// Load a temporary manifest adding a route to a privileged image
|
||||||
|
let manifestDir = getManifestDir();
|
||||||
|
Components.manager.addBootstrappedManifestLocation(manifestDir);
|
||||||
|
|
||||||
yield BrowserTestUtils.withNewTab({
|
yield BrowserTestUtils.withNewTab({
|
||||||
gBrowser,
|
gBrowser,
|
||||||
url: SIMPLE_HTML
|
url: SIMPLE_HTML
|
||||||
@ -85,8 +109,10 @@ add_task(function* () {
|
|||||||
resolve();
|
resolve();
|
||||||
}
|
}
|
||||||
doc.body.appendChild(image);
|
doc.body.appendChild(image);
|
||||||
image.src = "chrome://devtools/content/framework/dev-edition-promo/dev-edition-logo.png";
|
image.src = "chrome://test1/skin/privileged.png";
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
Components.manager.removeBootstrappedManifestLocation(manifestDir);
|
||||||
});
|
});
|
||||||
|
@ -0,0 +1 @@
|
|||||||
|
skin test1 test img/
|
Binary file not shown.
After Width: | Height: | Size: 90 B |
@ -9231,41 +9231,6 @@ BytecodeEmitter::isRestParameter(ParseNode* pn)
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
|
||||||
BytecodeEmitter::emitOptimizeSpread(ParseNode* arg0, JumpList* jmp, bool* emitted)
|
|
||||||
{
|
|
||||||
// Emit a pereparation code to optimize the spread call with a rest
|
|
||||||
// parameter:
|
|
||||||
//
|
|
||||||
// function f(...args) {
|
|
||||||
// g(...args);
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// If the spread operand is a rest parameter and it's optimizable array,
|
|
||||||
// skip spread operation and pass it directly to spread call operation.
|
|
||||||
// See the comment in OptimizeSpreadCall in Interpreter.cpp for the
|
|
||||||
// optimizable conditons.
|
|
||||||
if (!isRestParameter(arg0)) {
|
|
||||||
*emitted = false;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!emitTree(arg0))
|
|
||||||
return false;
|
|
||||||
|
|
||||||
if (!emit1(JSOP_OPTIMIZE_SPREADCALL))
|
|
||||||
return false;
|
|
||||||
|
|
||||||
if (!emitJump(JSOP_IFNE, jmp))
|
|
||||||
return false;
|
|
||||||
|
|
||||||
if (!emit1(JSOP_POP))
|
|
||||||
return false;
|
|
||||||
|
|
||||||
*emitted = true;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool
|
bool
|
||||||
BytecodeEmitter::emitCallOrNew(ParseNode* pn, ValueUsage valueUsage /* = ValueUsage::WantValue */)
|
BytecodeEmitter::emitCallOrNew(ParseNode* pn, ValueUsage valueUsage /* = ValueUsage::WantValue */)
|
||||||
{
|
{
|
||||||
@ -9423,18 +9388,43 @@ BytecodeEmitter::emitCallOrNew(ParseNode* pn, ValueUsage valueUsage /* = ValueUs
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
ParseNode* args = pn2->pn_next;
|
ParseNode* args = pn2->pn_next;
|
||||||
JumpList jmp;
|
bool emitOptCode = (argc == 1) && isRestParameter(args->pn_kid);
|
||||||
bool optCodeEmitted = false;
|
IfThenElseEmitter ifNotOptimizable(this);
|
||||||
if (argc == 1) {
|
|
||||||
if (!emitOptimizeSpread(args->pn_kid, &jmp, &optCodeEmitted))
|
if (emitOptCode) {
|
||||||
|
// Emit a preparation code to optimize the spread call with a rest
|
||||||
|
// parameter:
|
||||||
|
//
|
||||||
|
// function f(...args) {
|
||||||
|
// g(...args);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// If the spread operand is a rest parameter and it's optimizable
|
||||||
|
// array, skip spread operation and pass it directly to spread call
|
||||||
|
// operation. See the comment in OptimizeSpreadCall in
|
||||||
|
// Interpreter.cpp for the optimizable conditons.
|
||||||
|
|
||||||
|
if (!emitTree(args->pn_kid))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
if (!emit1(JSOP_OPTIMIZE_SPREADCALL))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
if (!emit1(JSOP_NOT))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
if (!ifNotOptimizable.emitIf())
|
||||||
|
return false;
|
||||||
|
|
||||||
|
if (!emit1(JSOP_POP))
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!emitArray(args, argc, JSOP_SPREADCALLARRAY))
|
if (!emitArray(args, argc, JSOP_SPREADCALLARRAY))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
if (optCodeEmitted) {
|
if (emitOptCode) {
|
||||||
if (!emitJumpTargetAndPatch(jmp))
|
if (!ifNotOptimizable.emitEnd())
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -753,7 +753,6 @@ struct MOZ_STACK_CLASS BytecodeEmitter
|
|||||||
ValueUsage valueUsage = ValueUsage::WantValue);
|
ValueUsage valueUsage = ValueUsage::WantValue);
|
||||||
|
|
||||||
bool isRestParameter(ParseNode* pn);
|
bool isRestParameter(ParseNode* pn);
|
||||||
MOZ_MUST_USE bool emitOptimizeSpread(ParseNode* arg0, JumpList* jmp, bool* emitted);
|
|
||||||
|
|
||||||
MOZ_MUST_USE bool emitCallOrNew(ParseNode* pn, ValueUsage valueUsage = ValueUsage::WantValue);
|
MOZ_MUST_USE bool emitCallOrNew(ParseNode* pn, ValueUsage valueUsage = ValueUsage::WantValue);
|
||||||
MOZ_MUST_USE bool emitSelfHostedCallFunction(ParseNode* pn);
|
MOZ_MUST_USE bool emitSelfHostedCallFunction(ParseNode* pn);
|
||||||
|
@ -2316,6 +2316,16 @@ IonBuilder::inspectOpcode(JSOp op)
|
|||||||
pushConstant(MagicValue(JS_IS_CONSTRUCTING));
|
pushConstant(MagicValue(JS_IS_CONSTRUCTING));
|
||||||
return Ok();
|
return Ok();
|
||||||
|
|
||||||
|
case JSOP_OPTIMIZE_SPREADCALL:
|
||||||
|
{
|
||||||
|
// Assuming optimization isn't available doesn't affect correctness.
|
||||||
|
// TODO: Investigate dynamic checks.
|
||||||
|
MDefinition* arr = current->peek(-1);
|
||||||
|
arr->setImplicitlyUsedUnchecked();
|
||||||
|
pushConstant(BooleanValue(false));
|
||||||
|
return Ok();
|
||||||
|
}
|
||||||
|
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -336,7 +336,6 @@ ControlFlowGenerator::snoopControlFlow(JSOp op)
|
|||||||
case JSOP_TRY:
|
case JSOP_TRY:
|
||||||
return processTry();
|
return processTry();
|
||||||
|
|
||||||
case JSOP_OPTIMIZE_SPREADCALL:
|
|
||||||
case JSOP_THROWMSG:
|
case JSOP_THROWMSG:
|
||||||
// Not implemented yet.
|
// Not implemented yet.
|
||||||
return ControlStatus::Abort;
|
return ControlStatus::Abort;
|
||||||
|
@ -236,7 +236,14 @@ private:
|
|||||||
bool
|
bool
|
||||||
xpc_LocalizeContext(JSContext* cx)
|
xpc_LocalizeContext(JSContext* cx)
|
||||||
{
|
{
|
||||||
JS_SetLocaleCallbacks(cx, new XPCLocaleCallbacks());
|
// We want to assign the locale callbacks only the first time we
|
||||||
|
// localize the context.
|
||||||
|
// All consequent calls to this function are result of language changes
|
||||||
|
// and should not assign it again.
|
||||||
|
const JSLocaleCallbacks* lc = JS_GetLocaleCallbacks(cx);
|
||||||
|
if (!lc) {
|
||||||
|
JS_SetLocaleCallbacks(cx, new XPCLocaleCallbacks());
|
||||||
|
}
|
||||||
|
|
||||||
// Set the default locale.
|
// Set the default locale.
|
||||||
|
|
||||||
|
@ -163,6 +163,9 @@ SERVO_BINDING_FUNC(Servo_AnimationValues_Interpolate,
|
|||||||
SERVO_BINDING_FUNC(Servo_AnimationValues_IsInterpolable, bool,
|
SERVO_BINDING_FUNC(Servo_AnimationValues_IsInterpolable, bool,
|
||||||
RawServoAnimationValueBorrowed from,
|
RawServoAnimationValueBorrowed from,
|
||||||
RawServoAnimationValueBorrowed to)
|
RawServoAnimationValueBorrowed to)
|
||||||
|
SERVO_BINDING_FUNC(Servo_AnimationValues_ComputeDistance, double,
|
||||||
|
RawServoAnimationValueBorrowed from,
|
||||||
|
RawServoAnimationValueBorrowed to)
|
||||||
SERVO_BINDING_FUNC(Servo_AnimationValue_Serialize, void,
|
SERVO_BINDING_FUNC(Servo_AnimationValue_Serialize, void,
|
||||||
RawServoAnimationValueBorrowed value,
|
RawServoAnimationValueBorrowed value,
|
||||||
nsCSSPropertyID property,
|
nsCSSPropertyID property,
|
||||||
|
@ -224,6 +224,7 @@ skip-if = toolkit == 'android'
|
|||||||
skip-if = toolkit == 'android'
|
skip-if = toolkit == 'android'
|
||||||
[test_initial_storage.html]
|
[test_initial_storage.html]
|
||||||
[test_keyframes_rules.html]
|
[test_keyframes_rules.html]
|
||||||
|
[test_keyframes_vendor_prefix.html]
|
||||||
[test_load_events_on_stylesheets.html]
|
[test_load_events_on_stylesheets.html]
|
||||||
[test_logical_properties.html]
|
[test_logical_properties.html]
|
||||||
[test_media_queries.html]
|
[test_media_queries.html]
|
||||||
|
193
layout/style/test/test_keyframes_vendor_prefix.html
Normal file
193
layout/style/test/test_keyframes_vendor_prefix.html
Normal file
@ -0,0 +1,193 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<meta charset=utf-8>
|
||||||
|
<title>
|
||||||
|
Test for interaction between prefixed and non-prefixed @keyframes rules with
|
||||||
|
the same name
|
||||||
|
</title>
|
||||||
|
<script src='/resources/testharness.js'></script>
|
||||||
|
<script src='/resources/testharnessreport.js'></script>
|
||||||
|
<div id='log'></div>
|
||||||
|
<script>
|
||||||
|
/**
|
||||||
|
* Appends a style element to the document head.
|
||||||
|
*
|
||||||
|
* @param t The testharness.js Test object. If provided, this will be used
|
||||||
|
* to register a cleanup callback to remove the style element
|
||||||
|
* when the test finishes.
|
||||||
|
*
|
||||||
|
* @param rules A dictionary object with selector names and rules to set on
|
||||||
|
* the style sheet.
|
||||||
|
*/
|
||||||
|
function addStyle(t, rules) {
|
||||||
|
var extraStyle = document.createElement('style');
|
||||||
|
document.head.appendChild(extraStyle);
|
||||||
|
if (rules) {
|
||||||
|
var sheet = extraStyle.sheet;
|
||||||
|
for (var selector in rules) {
|
||||||
|
sheet.insertRule(selector + '{' + rules[selector] + '}',
|
||||||
|
sheet.cssRules.length);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (t && typeof t.add_cleanup === 'function') {
|
||||||
|
t.add_cleanup(function() {
|
||||||
|
extraStyle.remove();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Appends a div to the document body.
|
||||||
|
*
|
||||||
|
* @param t The testharness.js Test object. If provided, this will be used
|
||||||
|
* to register a cleanup callback to remove the div when the test
|
||||||
|
* finishes.
|
||||||
|
*
|
||||||
|
* @param attrs A dictionary object with attribute names and values to set on
|
||||||
|
* the div.
|
||||||
|
*/
|
||||||
|
function addDiv(t, attrs) {
|
||||||
|
var div = document.createElement('div');
|
||||||
|
if (attrs) {
|
||||||
|
for (var attrName in attrs) {
|
||||||
|
div.setAttribute(attrName, attrs[attrName]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
document.body.appendChild(div);
|
||||||
|
if (t && typeof t.add_cleanup === 'function') {
|
||||||
|
t.add_cleanup(function() {
|
||||||
|
div.remove();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return div;
|
||||||
|
}
|
||||||
|
|
||||||
|
var isStylo = false;
|
||||||
|
// 'layout.css.servo.enabled' is not yet defined on gecko, so we need a try
|
||||||
|
// block.
|
||||||
|
try {
|
||||||
|
isStylo = SpecialPowers.getBoolPref('layout.css.servo.enabled');
|
||||||
|
} catch(e) {
|
||||||
|
}
|
||||||
|
|
||||||
|
test(function(t) {
|
||||||
|
addStyle(t,
|
||||||
|
{ 'dummy': '', // XXX bug 1336863 hackaround: a single insertRule is broken
|
||||||
|
// on stylo.
|
||||||
|
'@-webkit-keyframes anim': 'from,to { color: rgb(0, 255, 0); }' });
|
||||||
|
|
||||||
|
var div = addDiv(t, { style: 'animation: anim 100s' });
|
||||||
|
|
||||||
|
assert_equals(getComputedStyle(div).color, 'rgb(0, 255, 0)');
|
||||||
|
}, '-webkit- prefix keyframes');
|
||||||
|
|
||||||
|
test(function(t) {
|
||||||
|
addStyle(t,
|
||||||
|
{ 'dummy': '', // XXX bug 1336863 hackaround, as above.
|
||||||
|
'@-moz-keyframes anim': 'from,to { color: rgb(0, 255, 0); }' });
|
||||||
|
|
||||||
|
var div = addDiv(t, { style: 'animation: anim 100s' });
|
||||||
|
|
||||||
|
assert_equals(getComputedStyle(div).color, 'rgb(0, 255, 0)');
|
||||||
|
}, '-moz- prefix keyframes');
|
||||||
|
|
||||||
|
test(function(t) {
|
||||||
|
addStyle(t,
|
||||||
|
{ 'dummy': '', // XXX bug 1336863 hackaround, as above.
|
||||||
|
'@-WEBKIT-keyframes anim': 'from,to { color: rgb(0, 255, 0); }' });
|
||||||
|
|
||||||
|
var div = addDiv(t, { style: 'animation: anim 100s' });
|
||||||
|
|
||||||
|
assert_equals(getComputedStyle(div).color, 'rgb(0, 255, 0)');
|
||||||
|
}, '-WEBKIT- prefix keyframes');
|
||||||
|
|
||||||
|
test(function(t) {
|
||||||
|
addStyle(t,
|
||||||
|
{ 'dummy': '', // XXX bug 1336863 hackaround, as above.
|
||||||
|
'@-MOZ-keyframes anim': 'from,to { color: rgb(0, 255, 0); }' });
|
||||||
|
|
||||||
|
var div = addDiv(t, { style: 'animation: anim 100s' });
|
||||||
|
|
||||||
|
assert_equals(getComputedStyle(div).color, 'rgb(0, 255, 0)');
|
||||||
|
}, '-MOZ- prefix keyframes');
|
||||||
|
|
||||||
|
test(function(t) {
|
||||||
|
addStyle(t,
|
||||||
|
{ 'dummy': '', // XXX bug 1336863 hackaround, as above.
|
||||||
|
'@-webkit-KEYFRAMES anim': 'from,to { color: rgb(0, 255, 0); }' });
|
||||||
|
|
||||||
|
var div = addDiv(t, { style: 'animation: anim 100s' });
|
||||||
|
|
||||||
|
assert_equals(getComputedStyle(div).color, 'rgb(0, 255, 0)');
|
||||||
|
}, '-webkit- prefix KEYFRAMES');
|
||||||
|
|
||||||
|
test(function(t) {
|
||||||
|
if (!isStylo) {
|
||||||
|
// FIXME: Bug 1312918: later prefixed rule incorrectly overrides earlier
|
||||||
|
// non-prefixed on gecko.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
addStyle(t,
|
||||||
|
{ '@keyframes anim': 'from,to { color: rgb(0, 255, 0); }',
|
||||||
|
'@-webkit-keyframes anim': 'from,to { color: rgb(255, 0, 0); }' });
|
||||||
|
|
||||||
|
var div = addDiv(t, { style: 'animation: anim 100s' });
|
||||||
|
|
||||||
|
assert_equals(getComputedStyle(div).color, 'rgb(0, 255, 0)');
|
||||||
|
}, '-webkit-keyframes should not override earlier non-prefix keyframes');
|
||||||
|
|
||||||
|
test(function(t) {
|
||||||
|
if (!isStylo) {
|
||||||
|
// FIXME: Bug 1312918: later prefixed rule incorrectly overrides earlier
|
||||||
|
// non-prefixed on gecko.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
addStyle(t,
|
||||||
|
{ '@keyframes anim': 'from,to { color: rgb(0, 255, 0); }',
|
||||||
|
'@-moz-keyframes anim': 'from,to { color: rgb(255, 0, 0); }' });
|
||||||
|
|
||||||
|
var div = addDiv(t, { style: 'animation: anim 100s' });
|
||||||
|
|
||||||
|
assert_equals(getComputedStyle(div).color, 'rgb(0, 255, 0)');
|
||||||
|
}, '-moz-keyframes should not override earlier non-prefix keyframes');
|
||||||
|
|
||||||
|
test(function(t) {
|
||||||
|
addStyle(t,
|
||||||
|
{ '@-moz-keyframes anim': 'from,to { color: rgb(255, 0, 0); }',
|
||||||
|
'@keyframes anim': 'from,to { color: rgb(0, 255, 0); }' });
|
||||||
|
|
||||||
|
var div = addDiv(t, { style: 'animation: anim 100s' });
|
||||||
|
|
||||||
|
assert_equals(getComputedStyle(div).color, 'rgb(0, 255, 0)');
|
||||||
|
}, 'non-prefix keyframes should override earlier -moz-keyframes');
|
||||||
|
|
||||||
|
test(function(t) {
|
||||||
|
addStyle(t,
|
||||||
|
{ '@-webkit-keyframes anim': 'from,to { color: rgb(255, 0, 0); }',
|
||||||
|
'@keyframes anim': 'from,to { color: rgb(0, 255, 0); }' });
|
||||||
|
|
||||||
|
var div = addDiv(t, { style: 'animation: anim 100s' });
|
||||||
|
|
||||||
|
assert_equals(getComputedStyle(div).color, 'rgb(0, 255, 0)');
|
||||||
|
}, 'non-prefix keyframes should override earlier -webkit-keyframes');
|
||||||
|
|
||||||
|
test(function(t) {
|
||||||
|
addStyle(t,
|
||||||
|
{ '@-webkit-keyframes anim': 'from,to { color: rgb(255, 0, 0); }',
|
||||||
|
'@-moz-keyframes anim': 'from,to { color: rgb(0, 255, 0); }' });
|
||||||
|
|
||||||
|
var div = addDiv(t, { style: 'animation: anim 100s' });
|
||||||
|
|
||||||
|
assert_equals(getComputedStyle(div).color, 'rgb(0, 255, 0)');
|
||||||
|
|
||||||
|
addStyle(t,
|
||||||
|
{ '@-moz-keyframes anim2': 'from,to { color: rgb(255, 0, 0); }',
|
||||||
|
'@-webkit-keyframes anim2': 'from,to { color: rgb(0, 255, 0); }' });
|
||||||
|
|
||||||
|
var div = addDiv(t, { style: 'animation: anim2 100s' });
|
||||||
|
|
||||||
|
assert_equals(getComputedStyle(div).color, 'rgb(0, 255, 0)');
|
||||||
|
}, 'last prefixed keyframes should override earlier prefixed keyframes');
|
||||||
|
</script>
|
@ -234,6 +234,9 @@ static const DllBlockInfo sWindowsDllBlocklist[] = {
|
|||||||
{ "idmcchandler7.dll", ALL_VERSIONS },
|
{ "idmcchandler7.dll", ALL_VERSIONS },
|
||||||
{ "idmcchandler7_64.dll", ALL_VERSIONS },
|
{ "idmcchandler7_64.dll", ALL_VERSIONS },
|
||||||
|
|
||||||
|
// Nahimic 2 breaks applicaton update (bug 1356637)
|
||||||
|
{ "nahimic2devprops.dll", ALL_VERSIONS },
|
||||||
|
|
||||||
{ nullptr, 0 }
|
{ nullptr, 0 }
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -48,18 +48,123 @@ namespace net {
|
|||||||
//
|
//
|
||||||
static LazyLogModule gChannelClassifierLog("nsChannelClassifier");
|
static LazyLogModule gChannelClassifierLog("nsChannelClassifier");
|
||||||
|
|
||||||
// Whether channels should be annotated as being on the tracking protection
|
|
||||||
// list.
|
|
||||||
static bool sAnnotateChannelEnabled = false;
|
|
||||||
// Whether the priority of the channels annotated as being on the tracking
|
|
||||||
// protection list should be lowered.
|
|
||||||
static bool sLowerNetworkPriority = false;
|
|
||||||
static bool sIsInited = false;
|
|
||||||
|
|
||||||
#undef LOG
|
#undef LOG
|
||||||
#define LOG(args) MOZ_LOG(gChannelClassifierLog, LogLevel::Debug, args)
|
#define LOG(args) MOZ_LOG(gChannelClassifierLog, LogLevel::Debug, args)
|
||||||
#define LOG_ENABLED() MOZ_LOG_TEST(gChannelClassifierLog, LogLevel::Debug)
|
#define LOG_ENABLED() MOZ_LOG_TEST(gChannelClassifierLog, LogLevel::Debug)
|
||||||
|
|
||||||
|
#define URLCLASSIFIER_SKIP_HOSTNAMES "urlclassifier.skipHostnames"
|
||||||
|
#define URLCLASSIFIER_TRACKING_WHITELIST "urlclassifier.trackingWhitelistTable"
|
||||||
|
|
||||||
|
// Put CachedPrefs in anonymous namespace to avoid any collision from outside of
|
||||||
|
// this file.
|
||||||
|
namespace {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* It is not recommended to read from Preference everytime a channel is
|
||||||
|
* connected.
|
||||||
|
* That is not fast and we should cache preference values and reuse them
|
||||||
|
*/
|
||||||
|
class CachedPrefs final
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
static CachedPrefs* GetInstance();
|
||||||
|
|
||||||
|
void Init();
|
||||||
|
bool IsAllowListExample() { return sAllowListExample;}
|
||||||
|
bool IsLowerNetworkPriority() { return sLowerNetworkPriority;}
|
||||||
|
bool IsAnnotateChannelEnabled() { return sAnnotateChannelEnabled;}
|
||||||
|
nsCString GetTrackingWhiteList() { return mTrackingWhitelist; }
|
||||||
|
void SetTrackingWhiteList(const nsACString& aList) { mTrackingWhitelist = aList; }
|
||||||
|
nsCString GetSkipHostnames() { return mSkipHostnames; }
|
||||||
|
void SetSkipHostnames(const nsACString& aHostnames) { mSkipHostnames = aHostnames; }
|
||||||
|
|
||||||
|
private:
|
||||||
|
friend class StaticAutoPtr<CachedPrefs>;
|
||||||
|
CachedPrefs();
|
||||||
|
~CachedPrefs();
|
||||||
|
|
||||||
|
static void OnPrefsChange(const char* aPrefName, void* );
|
||||||
|
|
||||||
|
// Whether channels should be annotated as being on the tracking protection
|
||||||
|
// list.
|
||||||
|
static bool sAnnotateChannelEnabled;
|
||||||
|
// Whether the priority of the channels annotated as being on the tracking
|
||||||
|
// protection list should be lowered.
|
||||||
|
static bool sLowerNetworkPriority;
|
||||||
|
static bool sAllowListExample;
|
||||||
|
|
||||||
|
nsCString mTrackingWhitelist;
|
||||||
|
nsCString mSkipHostnames;
|
||||||
|
|
||||||
|
static StaticAutoPtr<CachedPrefs> sInstance;
|
||||||
|
};
|
||||||
|
|
||||||
|
bool CachedPrefs::sAllowListExample = false;
|
||||||
|
bool CachedPrefs::sLowerNetworkPriority = false;
|
||||||
|
bool CachedPrefs::sAnnotateChannelEnabled = false;
|
||||||
|
|
||||||
|
StaticAutoPtr<CachedPrefs> CachedPrefs::sInstance;
|
||||||
|
|
||||||
|
// static
|
||||||
|
void
|
||||||
|
CachedPrefs::OnPrefsChange(const char* aPref, void* aClosure)
|
||||||
|
{
|
||||||
|
CachedPrefs* prefs = static_cast<CachedPrefs*> (aClosure);
|
||||||
|
|
||||||
|
if (!strcmp(aPref, URLCLASSIFIER_SKIP_HOSTNAMES)) {
|
||||||
|
nsCString skipHostnames = Preferences::GetCString(URLCLASSIFIER_SKIP_HOSTNAMES);
|
||||||
|
ToLowerCase(skipHostnames);
|
||||||
|
prefs->SetSkipHostnames(skipHostnames);
|
||||||
|
} else if (!strcmp(aPref, URLCLASSIFIER_TRACKING_WHITELIST)) {
|
||||||
|
nsCString trackingWhitelist = Preferences::GetCString(URLCLASSIFIER_TRACKING_WHITELIST);
|
||||||
|
prefs->SetTrackingWhiteList(trackingWhitelist);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
CachedPrefs::Init()
|
||||||
|
{
|
||||||
|
Preferences::AddBoolVarCache(&sAnnotateChannelEnabled,
|
||||||
|
"privacy.trackingprotection.annotate_channels");
|
||||||
|
Preferences::AddBoolVarCache(&sLowerNetworkPriority,
|
||||||
|
"privacy.trackingprotection.lower_network_priority");
|
||||||
|
Preferences::AddBoolVarCache(&sAllowListExample,
|
||||||
|
"channelclassifier.allowlist_example");
|
||||||
|
Preferences::RegisterCallbackAndCall(CachedPrefs::OnPrefsChange,
|
||||||
|
URLCLASSIFIER_SKIP_HOSTNAMES, this);
|
||||||
|
Preferences::RegisterCallbackAndCall(CachedPrefs::OnPrefsChange,
|
||||||
|
URLCLASSIFIER_TRACKING_WHITELIST, this);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// static
|
||||||
|
CachedPrefs*
|
||||||
|
CachedPrefs::GetInstance()
|
||||||
|
{
|
||||||
|
if (!sInstance) {
|
||||||
|
sInstance = new CachedPrefs();
|
||||||
|
sInstance->Init();
|
||||||
|
ClearOnShutdown(&sInstance);
|
||||||
|
}
|
||||||
|
MOZ_ASSERT(sInstance);
|
||||||
|
return sInstance;
|
||||||
|
}
|
||||||
|
|
||||||
|
CachedPrefs::CachedPrefs()
|
||||||
|
{
|
||||||
|
MOZ_COUNT_CTOR(CachedPrefs);
|
||||||
|
}
|
||||||
|
|
||||||
|
CachedPrefs::~CachedPrefs()
|
||||||
|
{
|
||||||
|
MOZ_COUNT_DTOR(CachedPrefs);
|
||||||
|
|
||||||
|
Preferences::UnregisterCallback(CachedPrefs::OnPrefsChange, URLCLASSIFIER_SKIP_HOSTNAMES, this);
|
||||||
|
Preferences::UnregisterCallback(CachedPrefs::OnPrefsChange, URLCLASSIFIER_TRACKING_WHITELIST, this);
|
||||||
|
}
|
||||||
|
} // anonymous namespace
|
||||||
|
|
||||||
NS_IMPL_ISUPPORTS(nsChannelClassifier,
|
NS_IMPL_ISUPPORTS(nsChannelClassifier,
|
||||||
nsIURIClassifierCallback,
|
nsIURIClassifierCallback,
|
||||||
nsIObserver)
|
nsIObserver)
|
||||||
@ -71,13 +176,6 @@ nsChannelClassifier::nsChannelClassifier(nsIChannel *aChannel)
|
|||||||
mTrackingProtectionEnabled(Nothing())
|
mTrackingProtectionEnabled(Nothing())
|
||||||
{
|
{
|
||||||
MOZ_ASSERT(mChannel);
|
MOZ_ASSERT(mChannel);
|
||||||
if (!sIsInited) {
|
|
||||||
sIsInited = true;
|
|
||||||
Preferences::AddBoolVarCache(&sAnnotateChannelEnabled,
|
|
||||||
"privacy.trackingprotection.annotate_channels");
|
|
||||||
Preferences::AddBoolVarCache(&sLowerNetworkPriority,
|
|
||||||
"privacy.trackingprotection.lower_network_priority");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
nsresult
|
nsresult
|
||||||
@ -156,8 +254,7 @@ nsChannelClassifier::ShouldEnableTrackingProtectionInternal(nsIChannel *aChannel
|
|||||||
nsCOMPtr<nsIIOService> ios = do_GetService(NS_IOSERVICE_CONTRACTID, &rv);
|
nsCOMPtr<nsIIOService> ios = do_GetService(NS_IOSERVICE_CONTRACTID, &rv);
|
||||||
NS_ENSURE_SUCCESS(rv, rv);
|
NS_ENSURE_SUCCESS(rv, rv);
|
||||||
|
|
||||||
const char ALLOWLIST_EXAMPLE_PREF[] = "channelclassifier.allowlist_example";
|
if (!topWinURI && CachedPrefs::GetInstance()->IsAllowListExample()) {
|
||||||
if (!topWinURI && Preferences::GetBool(ALLOWLIST_EXAMPLE_PREF, false)) {
|
|
||||||
LOG(("nsChannelClassifier[%p]: Allowlisting test domain\n", this));
|
LOG(("nsChannelClassifier[%p]: Allowlisting test domain\n", this));
|
||||||
rv = ios->NewURI(NS_LITERAL_CSTRING("http://allowlisted.example.com"),
|
rv = ios->NewURI(NS_LITERAL_CSTRING("http://allowlisted.example.com"),
|
||||||
nullptr, nullptr, getter_AddRefs(topWinURI));
|
nullptr, nullptr, getter_AddRefs(topWinURI));
|
||||||
@ -365,14 +462,11 @@ nsChannelClassifier::StartInternal()
|
|||||||
NS_ENSURE_SUCCESS(rv, rv);
|
NS_ENSURE_SUCCESS(rv, rv);
|
||||||
if (hasFlags) return NS_ERROR_UNEXPECTED;
|
if (hasFlags) return NS_ERROR_UNEXPECTED;
|
||||||
|
|
||||||
// Skip whitelisted hostnames.
|
nsCString skipHostnames = CachedPrefs::GetInstance()->GetSkipHostnames();
|
||||||
nsAutoCString whitelisted;
|
if (!skipHostnames.IsEmpty()) {
|
||||||
Preferences::GetCString("urlclassifier.skipHostnames", &whitelisted);
|
|
||||||
if (!whitelisted.IsEmpty()) {
|
|
||||||
ToLowerCase(whitelisted);
|
|
||||||
LOG(("nsChannelClassifier[%p]:StartInternal whitelisted hostnames = %s",
|
LOG(("nsChannelClassifier[%p]:StartInternal whitelisted hostnames = %s",
|
||||||
this, whitelisted.get()));
|
this, skipHostnames.get()));
|
||||||
if (IsHostnameWhitelisted(uri, whitelisted)) {
|
if (IsHostnameWhitelisted(uri, skipHostnames)) {
|
||||||
return NS_ERROR_UNEXPECTED;
|
return NS_ERROR_UNEXPECTED;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -411,7 +505,9 @@ nsChannelClassifier::StartInternal()
|
|||||||
}
|
}
|
||||||
// The classify is running in parent process, no need to give a valid event
|
// The classify is running in parent process, no need to give a valid event
|
||||||
// target
|
// target
|
||||||
rv = uriClassifier->Classify(principal, nullptr, sAnnotateChannelEnabled | trackingProtectionEnabled,
|
rv = uriClassifier->Classify(principal, nullptr,
|
||||||
|
CachedPrefs::GetInstance()->IsAnnotateChannelEnabled() ||
|
||||||
|
trackingProtectionEnabled,
|
||||||
this, &expectCallback);
|
this, &expectCallback);
|
||||||
if (NS_FAILED(rv)) {
|
if (NS_FAILED(rv)) {
|
||||||
return rv;
|
return rv;
|
||||||
@ -733,10 +829,8 @@ nsChannelClassifier::IsTrackerWhitelisted(const nsACString& aList,
|
|||||||
do_GetService(NS_URICLASSIFIERSERVICE_CONTRACTID, &rv);
|
do_GetService(NS_URICLASSIFIERSERVICE_CONTRACTID, &rv);
|
||||||
NS_ENSURE_SUCCESS(rv, rv);
|
NS_ENSURE_SUCCESS(rv, rv);
|
||||||
|
|
||||||
nsAutoCString tables;
|
nsCString trackingWhitelist = CachedPrefs::GetInstance()->GetTrackingWhiteList();
|
||||||
Preferences::GetCString("urlclassifier.trackingWhitelistTable", &tables);
|
if (trackingWhitelist.IsEmpty()) {
|
||||||
|
|
||||||
if (tables.IsEmpty()) {
|
|
||||||
LOG(("nsChannelClassifier[%p]:IsTrackerWhitelisted whitelist disabled",
|
LOG(("nsChannelClassifier[%p]:IsTrackerWhitelisted whitelist disabled",
|
||||||
this));
|
this));
|
||||||
return NS_ERROR_TRACKING_URI;
|
return NS_ERROR_TRACKING_URI;
|
||||||
@ -780,7 +874,7 @@ nsChannelClassifier::IsTrackerWhitelisted(const nsACString& aList,
|
|||||||
new IsTrackerWhitelistedCallback(this, aList, aProvider, aPrefix,
|
new IsTrackerWhitelistedCallback(this, aList, aProvider, aPrefix,
|
||||||
whitelistEntry);
|
whitelistEntry);
|
||||||
|
|
||||||
return uriClassifier->AsyncClassifyLocalWithTables(whitelistURI, tables, cb);
|
return uriClassifier->AsyncClassifyLocalWithTables(whitelistURI, trackingWhitelist, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
NS_IMETHODIMP
|
NS_IMETHODIMP
|
||||||
@ -823,7 +917,7 @@ nsChannelClassifier::OnClassifyCompleteInternal(nsresult aErrorCode,
|
|||||||
|
|
||||||
if (aErrorCode == NS_ERROR_TRACKING_URI &&
|
if (aErrorCode == NS_ERROR_TRACKING_URI &&
|
||||||
!mTrackingProtectionEnabled.valueOr(false)) {
|
!mTrackingProtectionEnabled.valueOr(false)) {
|
||||||
if (sAnnotateChannelEnabled) {
|
if (CachedPrefs::GetInstance()->IsAnnotateChannelEnabled()) {
|
||||||
nsCOMPtr<nsIParentChannel> parentChannel;
|
nsCOMPtr<nsIParentChannel> parentChannel;
|
||||||
NS_QueryNotificationCallbacks(mChannel, parentChannel);
|
NS_QueryNotificationCallbacks(mChannel, parentChannel);
|
||||||
if (parentChannel) {
|
if (parentChannel) {
|
||||||
@ -837,7 +931,7 @@ nsChannelClassifier::OnClassifyCompleteInternal(nsresult aErrorCode,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (sLowerNetworkPriority) {
|
if (CachedPrefs::GetInstance()->IsLowerNetworkPriority()) {
|
||||||
if (LOG_ENABLED()) {
|
if (LOG_ENABLED()) {
|
||||||
nsCOMPtr<nsIURI> uri;
|
nsCOMPtr<nsIURI> uri;
|
||||||
mChannel->GetURI(getter_AddRefs(uri));
|
mChannel->GetURI(getter_AddRefs(uri));
|
||||||
|
@ -432,7 +432,6 @@ nsIndexedToHTML::DoOnStartRequest(nsIRequest* request, nsISupports *aContext,
|
|||||||
nsCOMPtr<nsIFileURL> fileURL(do_QueryInterface(innerUri));
|
nsCOMPtr<nsIFileURL> fileURL(do_QueryInterface(innerUri));
|
||||||
//XXX bug 388553: can't use skinnable icons here due to security restrictions
|
//XXX bug 388553: can't use skinnable icons here due to security restrictions
|
||||||
if (fileURL) {
|
if (fileURL) {
|
||||||
//buffer.AppendLiteral("chrome://global/skin/dirListing/local.png");
|
|
||||||
buffer.AppendLiteral("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB"
|
buffer.AppendLiteral("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB"
|
||||||
"AAAAAQCAYAAAAf8%2F9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i"
|
"AAAAAQCAYAAAAf8%2F9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i"
|
||||||
"ZSBJbWFnZVJlYWR5ccllPAAAAjFJREFUeNqsU8uOElEQPffR"
|
"ZSBJbWFnZVJlYWR5ccllPAAAAjFJREFUeNqsU8uOElEQPffR"
|
||||||
@ -454,7 +453,6 @@ nsIndexedToHTML::DoOnStartRequest(nsIRequest* request, nsISupports *aContext,
|
|||||||
"4pQ1%2FlPF0RGM9Ns91Wmptk0GfB4EJkt77vXYj%2F8m%2B8"
|
"4pQ1%2FlPF0RGM9Ns91Wmptk0GfB4EJkt77vXYj%2F8m%2B8"
|
||||||
"y%2FkrwABHbz2H9V68DQAAAABJRU5ErkJggg%3D%3D");
|
"y%2FkrwABHbz2H9V68DQAAAABJRU5ErkJggg%3D%3D");
|
||||||
} else {
|
} else {
|
||||||
//buffer.AppendLiteral("chrome://global/skin/dirListing/remote.png");
|
|
||||||
buffer.AppendLiteral("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB"
|
buffer.AppendLiteral("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB"
|
||||||
"AAAAAQCAYAAAAf8%2F9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i"
|
"AAAAAQCAYAAAAf8%2F9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i"
|
||||||
"ZSBJbWFnZVJlYWR5ccllPAAAAeBJREFUeNqcU81O20AQ%2Ft"
|
"ZSBJbWFnZVJlYWR5ccllPAAAAeBJREFUeNqcU81O20AQ%2Ft"
|
||||||
|
@ -25,6 +25,7 @@ const CONFIG_PREFS = [
|
|||||||
"identity.sync.tokenserver.uri",
|
"identity.sync.tokenserver.uri",
|
||||||
"identity.fxaccounts.remote.webchannel.uri",
|
"identity.fxaccounts.remote.webchannel.uri",
|
||||||
"identity.fxaccounts.settings.uri",
|
"identity.fxaccounts.settings.uri",
|
||||||
|
"identity.fxaccounts.settings.devices.uri",
|
||||||
"identity.fxaccounts.remote.signup.uri",
|
"identity.fxaccounts.remote.signup.uri",
|
||||||
"identity.fxaccounts.remote.signin.uri",
|
"identity.fxaccounts.remote.signin.uri",
|
||||||
"identity.fxaccounts.remote.force_auth.uri",
|
"identity.fxaccounts.remote.force_auth.uri",
|
||||||
@ -153,6 +154,7 @@ this.FxAccountsConfig = {
|
|||||||
|
|
||||||
Services.prefs.setCharPref("identity.fxaccounts.remote.webchannel.uri", rootURL);
|
Services.prefs.setCharPref("identity.fxaccounts.remote.webchannel.uri", rootURL);
|
||||||
Services.prefs.setCharPref("identity.fxaccounts.settings.uri", rootURL + "/settings?service=sync&context=" + contextParam);
|
Services.prefs.setCharPref("identity.fxaccounts.settings.uri", rootURL + "/settings?service=sync&context=" + contextParam);
|
||||||
|
Services.prefs.setCharPref("identity.fxaccounts.settings.devices.uri", rootURL + "/settings/clients?service=sync&context=" + contextParam);
|
||||||
Services.prefs.setCharPref("identity.fxaccounts.remote.signup.uri", rootURL + "/signup?service=sync&context=" + contextParam);
|
Services.prefs.setCharPref("identity.fxaccounts.remote.signup.uri", rootURL + "/signup?service=sync&context=" + contextParam);
|
||||||
Services.prefs.setCharPref("identity.fxaccounts.remote.signin.uri", rootURL + "/signin?service=sync&context=" + contextParam);
|
Services.prefs.setCharPref("identity.fxaccounts.remote.signin.uri", rootURL + "/signin?service=sync&context=" + contextParam);
|
||||||
Services.prefs.setCharPref("identity.fxaccounts.remote.force_auth.uri", rootURL + "/force_auth?service=sync&context=" + contextParam);
|
Services.prefs.setCharPref("identity.fxaccounts.remote.force_auth.uri", rootURL + "/force_auth?service=sync&context=" + contextParam);
|
||||||
|
@ -639,7 +639,7 @@ class BookmarkRepairResponder extends CollectionRepairResponder {
|
|||||||
let itemSource = engine.itemSource();
|
let itemSource = engine.itemSource();
|
||||||
itemSource.ids = repairable.map(item => item.syncId);
|
itemSource.ids = repairable.map(item => item.syncId);
|
||||||
log.trace(`checking the server for items`, itemSource.ids);
|
log.trace(`checking the server for items`, itemSource.ids);
|
||||||
let itemsResponse = itemSource.get();
|
let itemsResponse = await itemSource.get();
|
||||||
// If the response failed, don't bother trying to parse the output.
|
// If the response failed, don't bother trying to parse the output.
|
||||||
// Throwing here means we abort the repair, which isn't ideal for transient
|
// Throwing here means we abort the repair, which isn't ideal for transient
|
||||||
// errors (eg, no network, 500 service outage etc), but we don't currently
|
// errors (eg, no network, 500 service outage etc), but we don't currently
|
||||||
|
@ -813,7 +813,7 @@ class BookmarkValidator {
|
|||||||
return inspectionInfo;
|
return inspectionInfo;
|
||||||
}
|
}
|
||||||
|
|
||||||
_getServerState(engine) {
|
async _getServerState(engine) {
|
||||||
// XXXXX - todo - we need to capture last-modified of the server here and
|
// XXXXX - todo - we need to capture last-modified of the server here and
|
||||||
// ensure the repairer only applys with if-unmodified-since that date.
|
// ensure the repairer only applys with if-unmodified-since that date.
|
||||||
let collection = engine.itemSource();
|
let collection = engine.itemSource();
|
||||||
@ -824,7 +824,7 @@ class BookmarkValidator {
|
|||||||
item.decrypt(collectionKey);
|
item.decrypt(collectionKey);
|
||||||
items.push(item.cleartext);
|
items.push(item.cleartext);
|
||||||
};
|
};
|
||||||
let resp = collection.getBatched();
|
let resp = await collection.getBatched();
|
||||||
if (!resp.success) {
|
if (!resp.success) {
|
||||||
throw resp;
|
throw resp;
|
||||||
}
|
}
|
||||||
@ -836,7 +836,7 @@ class BookmarkValidator {
|
|||||||
let clientTree = await PlacesUtils.promiseBookmarksTree("", {
|
let clientTree = await PlacesUtils.promiseBookmarksTree("", {
|
||||||
includeItemIds: true
|
includeItemIds: true
|
||||||
});
|
});
|
||||||
let serverState = this._getServerState(engine);
|
let serverState = await this._getServerState(engine);
|
||||||
let serverRecordCount = serverState.length;
|
let serverRecordCount = serverState.length;
|
||||||
let result = await this.compareServerWithClient(serverState, clientTree);
|
let result = await this.compareServerWithClient(serverState, clientTree);
|
||||||
let end = Date.now();
|
let end = Date.now();
|
||||||
|
@ -59,7 +59,7 @@ class CollectionValidator {
|
|||||||
return new CollectionProblemData();
|
return new CollectionProblemData();
|
||||||
}
|
}
|
||||||
|
|
||||||
getServerItems(engine) {
|
async getServerItems(engine) {
|
||||||
let collection = engine.itemSource();
|
let collection = engine.itemSource();
|
||||||
let collectionKey = engine.service.collectionKeys.keyForCollection(engine.name);
|
let collectionKey = engine.service.collectionKeys.keyForCollection(engine.name);
|
||||||
collection.full = true;
|
collection.full = true;
|
||||||
@ -68,7 +68,7 @@ class CollectionValidator {
|
|||||||
item.decrypt(collectionKey);
|
item.decrypt(collectionKey);
|
||||||
items.push(item.cleartext);
|
items.push(item.cleartext);
|
||||||
};
|
};
|
||||||
let resp = collection.getBatched();
|
let resp = await collection.getBatched();
|
||||||
if (!resp.success) {
|
if (!resp.success) {
|
||||||
throw resp;
|
throw resp;
|
||||||
}
|
}
|
||||||
|
@ -968,7 +968,7 @@ SyncEngine.prototype = {
|
|||||||
_syncStartup() {
|
_syncStartup() {
|
||||||
|
|
||||||
// Determine if we need to wipe on outdated versions
|
// Determine if we need to wipe on outdated versions
|
||||||
let metaGlobal = this.service.recordManager.get(this.metaURL);
|
let metaGlobal = Async.promiseSpinningly(this.service.recordManager.get(this.metaURL));
|
||||||
let engines = metaGlobal.payload.engines || {};
|
let engines = metaGlobal.payload.engines || {};
|
||||||
let engineData = engines[this.name] || {};
|
let engineData = engines[this.name] || {};
|
||||||
|
|
||||||
@ -1233,7 +1233,7 @@ SyncEngine.prototype = {
|
|||||||
|
|
||||||
// Only bother getting data from the server if there's new things
|
// Only bother getting data from the server if there's new things
|
||||||
if (this.lastModified == null || this.lastModified > this.lastSync) {
|
if (this.lastModified == null || this.lastModified > this.lastSync) {
|
||||||
let resp = newitems.getBatched();
|
let resp = Async.promiseSpinningly(newitems.getBatched());
|
||||||
doApplyBatchAndPersistFailed.call(this);
|
doApplyBatchAndPersistFailed.call(this);
|
||||||
if (!resp.success) {
|
if (!resp.success) {
|
||||||
resp.failureCode = ENGINE_DOWNLOAD_FAIL;
|
resp.failureCode = ENGINE_DOWNLOAD_FAIL;
|
||||||
@ -1256,7 +1256,7 @@ SyncEngine.prototype = {
|
|||||||
// index: Orders by the sortindex descending (highest weight first).
|
// index: Orders by the sortindex descending (highest weight first).
|
||||||
guidColl.sort = "index";
|
guidColl.sort = "index";
|
||||||
|
|
||||||
let guids = guidColl.get();
|
let guids = Async.promiseSpinningly(guidColl.get());
|
||||||
if (!guids.success)
|
if (!guids.success)
|
||||||
throw guids;
|
throw guids;
|
||||||
|
|
||||||
@ -1289,7 +1289,7 @@ SyncEngine.prototype = {
|
|||||||
newitems.ids = fetchBatch.slice(0, batchSize);
|
newitems.ids = fetchBatch.slice(0, batchSize);
|
||||||
|
|
||||||
// Reuse the existing record handler set earlier
|
// Reuse the existing record handler set earlier
|
||||||
let resp = newitems.get();
|
let resp = Async.promiseSpinningly(newitems.get());
|
||||||
if (!resp.success) {
|
if (!resp.success) {
|
||||||
resp.failureCode = ENGINE_DOWNLOAD_FAIL;
|
resp.failureCode = ENGINE_DOWNLOAD_FAIL;
|
||||||
throw resp;
|
throw resp;
|
||||||
@ -1760,7 +1760,7 @@ SyncEngine.prototype = {
|
|||||||
let doDelete = Utils.bind2(this, function(key, val) {
|
let doDelete = Utils.bind2(this, function(key, val) {
|
||||||
let coll = new Collection(this.engineURL, this._recordObj, this.service);
|
let coll = new Collection(this.engineURL, this._recordObj, this.service);
|
||||||
coll[key] = val;
|
coll[key] = val;
|
||||||
coll.delete();
|
Async.promiseSpinningly(coll.delete());
|
||||||
});
|
});
|
||||||
|
|
||||||
for (let [key, val] of Object.entries(this._delete)) {
|
for (let [key, val] of Object.entries(this._delete)) {
|
||||||
@ -1826,7 +1826,7 @@ SyncEngine.prototype = {
|
|||||||
// Any failure fetching/decrypting will just result in false
|
// Any failure fetching/decrypting will just result in false
|
||||||
try {
|
try {
|
||||||
this._log.trace("Trying to decrypt a record from the server..");
|
this._log.trace("Trying to decrypt a record from the server..");
|
||||||
test.get();
|
Async.promiseSpinningly(test.get());
|
||||||
} catch (ex) {
|
} catch (ex) {
|
||||||
if (Async.isShutdownException(ex)) {
|
if (Async.isShutdownException(ex)) {
|
||||||
throw ex;
|
throw ex;
|
||||||
@ -1844,14 +1844,14 @@ SyncEngine.prototype = {
|
|||||||
},
|
},
|
||||||
|
|
||||||
wipeServer() {
|
wipeServer() {
|
||||||
let response = this.service.resource(this.engineURL).delete();
|
let response = Async.promiseSpinningly(this.service.resource(this.engineURL).delete());
|
||||||
if (response.status != 200 && response.status != 404) {
|
if (response.status != 200 && response.status != 404) {
|
||||||
throw response;
|
throw response;
|
||||||
}
|
}
|
||||||
this._resetClient();
|
this._resetClient();
|
||||||
},
|
},
|
||||||
|
|
||||||
removeClientData() {
|
async removeClientData() {
|
||||||
// Implement this method in engines that store client specific data
|
// Implement this method in engines that store client specific data
|
||||||
// on the server.
|
// on the server.
|
||||||
},
|
},
|
||||||
|
@ -509,9 +509,9 @@ ClientEngine.prototype = {
|
|||||||
);
|
);
|
||||||
},
|
},
|
||||||
|
|
||||||
removeClientData: function removeClientData() {
|
async removeClientData() {
|
||||||
let res = this.service.resource(this.engineURL + "/" + this.localID);
|
let res = this.service.resource(this.engineURL + "/" + this.localID);
|
||||||
res.delete();
|
await res.delete();
|
||||||
},
|
},
|
||||||
|
|
||||||
// Override the default behavior to delete bad records from the server.
|
// Override the default behavior to delete bad records from the server.
|
||||||
|
@ -73,9 +73,9 @@ TabEngine.prototype = {
|
|||||||
this.hasSyncedThisSession = false;
|
this.hasSyncedThisSession = false;
|
||||||
},
|
},
|
||||||
|
|
||||||
removeClientData() {
|
async removeClientData() {
|
||||||
let url = this.engineURL + "/" + this.service.clientsEngine.localID;
|
let url = this.engineURL + "/" + this.service.clientsEngine.localID;
|
||||||
this.service.resource(url).delete();
|
await this.service.resource(url).delete();
|
||||||
},
|
},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -42,12 +42,12 @@ WBORecord.prototype = {
|
|||||||
|
|
||||||
// Get thyself from your URI, then deserialize.
|
// Get thyself from your URI, then deserialize.
|
||||||
// Set thine 'response' field.
|
// Set thine 'response' field.
|
||||||
fetch: function fetch(resource) {
|
async fetch(resource) {
|
||||||
if (!(resource instanceof Resource)) {
|
if (!(resource instanceof Resource)) {
|
||||||
throw new Error("First argument must be a Resource instance.");
|
throw new Error("First argument must be a Resource instance.");
|
||||||
}
|
}
|
||||||
|
|
||||||
let r = resource.get();
|
let r = await resource.get();
|
||||||
if (r.success) {
|
if (r.success) {
|
||||||
this.deserialize(r); // Warning! Muffles exceptions!
|
this.deserialize(r); // Warning! Muffles exceptions!
|
||||||
}
|
}
|
||||||
@ -55,7 +55,7 @@ WBORecord.prototype = {
|
|||||||
return this;
|
return this;
|
||||||
},
|
},
|
||||||
|
|
||||||
upload: function upload(resource) {
|
upload(resource) {
|
||||||
if (!(resource instanceof Resource)) {
|
if (!(resource instanceof Resource)) {
|
||||||
throw new Error("First argument must be a Resource instance.");
|
throw new Error("First argument must be a Resource instance.");
|
||||||
}
|
}
|
||||||
@ -223,12 +223,12 @@ RecordManager.prototype = {
|
|||||||
_recordType: CryptoWrapper,
|
_recordType: CryptoWrapper,
|
||||||
_logName: "Sync.RecordManager",
|
_logName: "Sync.RecordManager",
|
||||||
|
|
||||||
import: function RecordMgr_import(url) {
|
async import(url) {
|
||||||
this._log.trace("Importing record: " + (url.spec ? url.spec : url));
|
this._log.trace("Importing record: " + (url.spec ? url.spec : url));
|
||||||
try {
|
try {
|
||||||
// Clear out the last response with empty object if GET fails
|
// Clear out the last response with empty object if GET fails
|
||||||
this.response = {};
|
this.response = {};
|
||||||
this.response = this.service.resource(url).get();
|
this.response = await this.service.resource(url).get();
|
||||||
|
|
||||||
// Don't parse and save the record on failure
|
// Don't parse and save the record on failure
|
||||||
if (!this.response.success)
|
if (!this.response.success)
|
||||||
@ -247,11 +247,11 @@ RecordManager.prototype = {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
get: function RecordMgr_get(url) {
|
get(url) {
|
||||||
// Use a url string as the key to the hash
|
// Use a url string as the key to the hash
|
||||||
let spec = url.spec ? url.spec : url;
|
let spec = url.spec ? url.spec : url;
|
||||||
if (spec in this._records)
|
if (spec in this._records)
|
||||||
return this._records[spec];
|
return Promise.resolve(this._records[spec]);
|
||||||
return this.import(url);
|
return this.import(url);
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -703,7 +703,7 @@ Collection.prototype = {
|
|||||||
// Returns the last response processed, and doesn't run the record handler
|
// Returns the last response processed, and doesn't run the record handler
|
||||||
// on any items if a non-success status is received while downloading the
|
// on any items if a non-success status is received while downloading the
|
||||||
// records (or if a network error occurs).
|
// records (or if a network error occurs).
|
||||||
getBatched(batchSize = DEFAULT_DOWNLOAD_BATCH_SIZE) {
|
async getBatched(batchSize = DEFAULT_DOWNLOAD_BATCH_SIZE) {
|
||||||
let totalLimit = Number(this.limit) || Infinity;
|
let totalLimit = Number(this.limit) || Infinity;
|
||||||
if (batchSize <= 0 || batchSize >= totalLimit) {
|
if (batchSize <= 0 || batchSize >= totalLimit) {
|
||||||
// Invalid batch sizes should arguably be an error, but they're easy to handle
|
// Invalid batch sizes should arguably be an error, but they're easy to handle
|
||||||
@ -733,7 +733,7 @@ Collection.prototype = {
|
|||||||
}
|
}
|
||||||
this._log.trace("Performing batched GET", { limit: this.limit, offset: this.offset });
|
this._log.trace("Performing batched GET", { limit: this.limit, offset: this.offset });
|
||||||
// Actually perform the request
|
// Actually perform the request
|
||||||
resp = this.get();
|
resp = await this.get();
|
||||||
if (!resp.success) {
|
if (!resp.success) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -996,7 +996,8 @@ PostQueue.prototype = {
|
|||||||
}
|
}
|
||||||
this.queued = "";
|
this.queued = "";
|
||||||
this.numQueued = 0;
|
this.numQueued = 0;
|
||||||
let response = this.poster(queued, headers, batch, !!(finalBatchPost && this.batchID !== null));
|
let response = Async.promiseSpinningly(
|
||||||
|
this.poster(queued, headers, batch, !!(finalBatchPost && this.batchID !== null)));
|
||||||
|
|
||||||
if (!response.success) {
|
if (!response.success) {
|
||||||
this.log.trace("Server error response during a batch", response);
|
this.log.trace("Server error response during a batch", response);
|
||||||
|
@ -133,7 +133,8 @@ AsyncResource.prototype = {
|
|||||||
// through. It is never called directly, only {{{_doRequest}}} uses it
|
// through. It is never called directly, only {{{_doRequest}}} uses it
|
||||||
// to obtain a request channel.
|
// to obtain a request channel.
|
||||||
//
|
//
|
||||||
_createRequest: function Res__createRequest(method) {
|
_createRequest(method) {
|
||||||
|
this.method = method;
|
||||||
let channel = NetUtil.newChannel({uri: this.spec, loadUsingSystemPrincipal: true})
|
let channel = NetUtil.newChannel({uri: this.spec, loadUsingSystemPrincipal: true})
|
||||||
.QueryInterface(Ci.nsIRequest)
|
.QueryInterface(Ci.nsIRequest)
|
||||||
.QueryInterface(Ci.nsIHttpChannel);
|
.QueryInterface(Ci.nsIHttpChannel);
|
||||||
@ -172,55 +173,55 @@ AsyncResource.prototype = {
|
|||||||
return channel;
|
return channel;
|
||||||
},
|
},
|
||||||
|
|
||||||
_onProgress: function Res__onProgress(channel) {},
|
_onProgress(channel) {},
|
||||||
|
|
||||||
_doRequest: function _doRequest(action, data, callback) {
|
_doRequest(action, data) {
|
||||||
this._log.trace("In _doRequest.");
|
this._log.trace("In _doRequest.");
|
||||||
this._callback = callback;
|
return new Promise((resolve, reject) => {
|
||||||
let channel = this._createRequest(action);
|
this._deferred = { resolve, reject };
|
||||||
|
let channel = this._createRequest(action);
|
||||||
|
|
||||||
if ("undefined" != typeof(data))
|
if ("undefined" != typeof(data))
|
||||||
this._data = data;
|
this._data = data;
|
||||||
|
|
||||||
// PUT and POST are treated differently because they have payload data.
|
// PUT and POST are treated differently because they have payload data.
|
||||||
if ("PUT" == action || "POST" == action) {
|
if ("PUT" == action || "POST" == action) {
|
||||||
// Convert non-string bodies into JSON
|
// Convert non-string bodies into JSON
|
||||||
if (this._data.constructor.toString() != String)
|
if (this._data.constructor.toString() != String)
|
||||||
this._data = JSON.stringify(this._data);
|
this._data = JSON.stringify(this._data);
|
||||||
|
|
||||||
this._log.debug(action + " Length: " + this._data.length);
|
this._log.debug(action + " Length: " + this._data.length);
|
||||||
this._log.trace(action + " Body: " + this._data);
|
this._log.trace(action + " Body: " + this._data);
|
||||||
|
|
||||||
let type = ("content-type" in this._headers) ?
|
let type = ("content-type" in this._headers) ?
|
||||||
this._headers["content-type"] : "text/plain";
|
this._headers["content-type"] : "text/plain";
|
||||||
|
|
||||||
let stream = Cc["@mozilla.org/io/string-input-stream;1"].
|
let stream = Cc["@mozilla.org/io/string-input-stream;1"].
|
||||||
createInstance(Ci.nsIStringInputStream);
|
createInstance(Ci.nsIStringInputStream);
|
||||||
stream.setData(this._data, this._data.length);
|
stream.setData(this._data, this._data.length);
|
||||||
|
|
||||||
channel.QueryInterface(Ci.nsIUploadChannel);
|
channel.QueryInterface(Ci.nsIUploadChannel);
|
||||||
channel.setUploadStream(stream, type, this._data.length);
|
channel.setUploadStream(stream, type, this._data.length);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Setup a channel listener so that the actual network operation
|
// Setup a channel listener so that the actual network operation
|
||||||
// is performed asynchronously.
|
// is performed asynchronously.
|
||||||
let listener = new ChannelListener(this._onComplete, this._onProgress,
|
let listener = new ChannelListener(this._onComplete, this._onProgress,
|
||||||
this._log, this.ABORT_TIMEOUT);
|
this._log, this.ABORT_TIMEOUT);
|
||||||
channel.requestMethod = action;
|
channel.requestMethod = action;
|
||||||
try {
|
|
||||||
channel.asyncOpen2(listener);
|
channel.asyncOpen2(listener);
|
||||||
} catch (ex) {
|
});
|
||||||
// asyncOpen2 can throw in a bunch of cases -- e.g., a forbidden port.
|
|
||||||
this._log.warn("Caught an error in asyncOpen2", ex);
|
|
||||||
CommonUtils.nextTick(callback.bind(this, ex));
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
|
|
||||||
_onComplete: function _onComplete(error, data, channel) {
|
_onComplete(ex, data, channel) {
|
||||||
this._log.trace("In _onComplete. Error is " + error + ".");
|
this._log.trace("In _onComplete. Error is " + ex + ".");
|
||||||
|
|
||||||
if (error) {
|
if (ex) {
|
||||||
this._callback(error);
|
if (!Async.isShutdownException(ex)) {
|
||||||
|
this._log.warn("${action} request to ${url} failed: ${ex}",
|
||||||
|
{ action: this.method, url: this.uri.spec, ex});
|
||||||
|
}
|
||||||
|
this._deferred.reject(ex);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -324,117 +325,29 @@ AsyncResource.prototype = {
|
|||||||
}
|
}
|
||||||
}.bind(this));
|
}.bind(this));
|
||||||
|
|
||||||
this._callback(null, ret);
|
this._deferred.resolve(ret);
|
||||||
},
|
},
|
||||||
|
|
||||||
get: function get(callback) {
|
get() {
|
||||||
this._doRequest("GET", undefined, callback);
|
return this._doRequest("GET", undefined);
|
||||||
},
|
},
|
||||||
|
|
||||||
put: function put(data, callback) {
|
put(data) {
|
||||||
if (typeof data == "function")
|
return this._doRequest("PUT", data);
|
||||||
[data, callback] = [undefined, data];
|
|
||||||
this._doRequest("PUT", data, callback);
|
|
||||||
},
|
},
|
||||||
|
|
||||||
post: function post(data, callback) {
|
post(data) {
|
||||||
if (typeof data == "function")
|
return this._doRequest("POST", data);
|
||||||
[data, callback] = [undefined, data];
|
|
||||||
this._doRequest("POST", data, callback);
|
|
||||||
},
|
},
|
||||||
|
|
||||||
delete: function delete_(callback) {
|
delete() {
|
||||||
this._doRequest("DELETE", undefined, callback);
|
return this._doRequest("DELETE", undefined);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// TODO: We still export both "Resource" and "AsyncRecourse" as the same
|
||||||
/*
|
// object, but we should decide on one and unify all references.
|
||||||
* Represent a remote network resource, identified by a URI, with a
|
this.Resource = AsyncResource;
|
||||||
* synchronous API.
|
|
||||||
*
|
|
||||||
* 'Resource' is not recommended for new code. Use the asynchronous API of
|
|
||||||
* 'AsyncResource' instead.
|
|
||||||
*/
|
|
||||||
this.Resource = function Resource(uri) {
|
|
||||||
AsyncResource.call(this, uri);
|
|
||||||
}
|
|
||||||
Resource.prototype = {
|
|
||||||
|
|
||||||
__proto__: AsyncResource.prototype,
|
|
||||||
|
|
||||||
_logName: "Sync.Resource",
|
|
||||||
|
|
||||||
// ** {{{ Resource._request }}} **
|
|
||||||
//
|
|
||||||
// Perform a particular HTTP request on the resource. This method
|
|
||||||
// is never called directly, but is used by the high-level
|
|
||||||
// {{{get}}}, {{{put}}}, {{{post}}} and {{delete}} methods.
|
|
||||||
_request: function Res__request(action, data) {
|
|
||||||
let cb = Async.makeSyncCallback();
|
|
||||||
function callback(error, ret) {
|
|
||||||
if (error)
|
|
||||||
cb.throw(error);
|
|
||||||
else
|
|
||||||
cb(ret);
|
|
||||||
}
|
|
||||||
|
|
||||||
// The channel listener might get a failure code
|
|
||||||
try {
|
|
||||||
this._doRequest(action, data, callback);
|
|
||||||
return Async.waitForSyncCallback(cb);
|
|
||||||
} catch (ex) {
|
|
||||||
if (Async.isShutdownException(ex)) {
|
|
||||||
throw ex;
|
|
||||||
}
|
|
||||||
this._log.warn("${action} request to ${url} failed: ${ex}",
|
|
||||||
{ action, url: this.uri.spec, ex });
|
|
||||||
// Combine the channel stack with this request stack. Need to create
|
|
||||||
// a new error object for that.
|
|
||||||
let error = Error(ex.message);
|
|
||||||
error.result = ex.result;
|
|
||||||
let chanStack = [];
|
|
||||||
if (ex.stack)
|
|
||||||
chanStack = ex.stack.trim().split(/\n/).slice(1);
|
|
||||||
let requestStack = error.stack.split(/\n/).slice(1);
|
|
||||||
|
|
||||||
// Strip out the args for the last 2 frames because they're usually HUGE!
|
|
||||||
for (let i = 0; i <= 1; i++)
|
|
||||||
requestStack[i] = requestStack[i].replace(/\(".*"\)@/, "(...)@");
|
|
||||||
|
|
||||||
error.stack = chanStack.concat(requestStack).join("\n");
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
// ** {{{ Resource.get }}} **
|
|
||||||
//
|
|
||||||
// Perform an asynchronous HTTP GET for this resource.
|
|
||||||
get: function Res_get() {
|
|
||||||
return this._request("GET");
|
|
||||||
},
|
|
||||||
|
|
||||||
// ** {{{ Resource.put }}} **
|
|
||||||
//
|
|
||||||
// Perform a HTTP PUT for this resource.
|
|
||||||
put: function Res_put(data) {
|
|
||||||
return this._request("PUT", data);
|
|
||||||
},
|
|
||||||
|
|
||||||
// ** {{{ Resource.post }}} **
|
|
||||||
//
|
|
||||||
// Perform a HTTP POST for this resource.
|
|
||||||
post: function Res_post(data) {
|
|
||||||
return this._request("POST", data);
|
|
||||||
},
|
|
||||||
|
|
||||||
// ** {{{ Resource.delete }}} **
|
|
||||||
//
|
|
||||||
// Perform a HTTP DELETE for this resource.
|
|
||||||
delete: function Res_delete() {
|
|
||||||
return this._request("DELETE");
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// = ChannelListener =
|
// = ChannelListener =
|
||||||
//
|
//
|
||||||
|
@ -206,7 +206,7 @@ Sync11Service.prototype = {
|
|||||||
// Fetch keys.
|
// Fetch keys.
|
||||||
let cryptoKeys = new CryptoWrapper(CRYPTO_COLLECTION, KEYS_WBO);
|
let cryptoKeys = new CryptoWrapper(CRYPTO_COLLECTION, KEYS_WBO);
|
||||||
try {
|
try {
|
||||||
let cryptoResp = cryptoKeys.fetch(this.resource(this.cryptoKeysURL)).response;
|
let cryptoResp = Async.promiseSpinningly(cryptoKeys.fetch(this.resource(this.cryptoKeysURL))).response;
|
||||||
|
|
||||||
// Save out the ciphertext for when we reupload. If there's a bug in
|
// Save out the ciphertext for when we reupload. If there's a bug in
|
||||||
// CollectionKeyManager, this will prevent us from uploading junk.
|
// CollectionKeyManager, this will prevent us from uploading junk.
|
||||||
@ -465,7 +465,7 @@ Sync11Service.prototype = {
|
|||||||
this._log.trace("In _fetchInfo: " + infoURL);
|
this._log.trace("In _fetchInfo: " + infoURL);
|
||||||
let info;
|
let info;
|
||||||
try {
|
try {
|
||||||
info = this.resource(infoURL).get();
|
info = Async.promiseSpinningly(this.resource(infoURL).get());
|
||||||
} catch (ex) {
|
} catch (ex) {
|
||||||
this.errorHandler.checkServerError(ex);
|
this.errorHandler.checkServerError(ex);
|
||||||
throw ex;
|
throw ex;
|
||||||
@ -518,7 +518,7 @@ Sync11Service.prototype = {
|
|||||||
if (infoCollections && (CRYPTO_COLLECTION in infoCollections)) {
|
if (infoCollections && (CRYPTO_COLLECTION in infoCollections)) {
|
||||||
try {
|
try {
|
||||||
cryptoKeys = new CryptoWrapper(CRYPTO_COLLECTION, KEYS_WBO);
|
cryptoKeys = new CryptoWrapper(CRYPTO_COLLECTION, KEYS_WBO);
|
||||||
let cryptoResp = cryptoKeys.fetch(this.resource(this.cryptoKeysURL)).response;
|
let cryptoResp = Async.promiseSpinningly(cryptoKeys.fetch(this.resource(this.cryptoKeysURL))).response;
|
||||||
|
|
||||||
if (cryptoResp.success) {
|
if (cryptoResp.success) {
|
||||||
this.handleFetchedKeys(syncKeyBundle, cryptoKeys);
|
this.handleFetchedKeys(syncKeyBundle, cryptoKeys);
|
||||||
@ -613,7 +613,7 @@ Sync11Service.prototype = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Fetch collection info on every startup.
|
// Fetch collection info on every startup.
|
||||||
let test = this.resource(this.infoURL).get();
|
let test = Async.promiseSpinningly(this.resource(this.infoURL).get());
|
||||||
|
|
||||||
switch (test.status) {
|
switch (test.status) {
|
||||||
case 200:
|
case 200:
|
||||||
@ -717,7 +717,7 @@ Sync11Service.prototype = {
|
|||||||
|
|
||||||
// Download and install them.
|
// Download and install them.
|
||||||
let cryptoKeys = new CryptoWrapper(CRYPTO_COLLECTION, KEYS_WBO);
|
let cryptoKeys = new CryptoWrapper(CRYPTO_COLLECTION, KEYS_WBO);
|
||||||
let cryptoResp = cryptoKeys.fetch(this.resource(this.cryptoKeysURL)).response;
|
let cryptoResp = Async.promiseSpinningly(cryptoKeys.fetch(this.resource(this.cryptoKeysURL))).response;
|
||||||
if (cryptoResp.status != 200) {
|
if (cryptoResp.status != 200) {
|
||||||
this._log.warn("Failed to download keys.");
|
this._log.warn("Failed to download keys.");
|
||||||
throw new Error("Symmetric key download failed.");
|
throw new Error("Symmetric key download failed.");
|
||||||
@ -739,7 +739,11 @@ Sync11Service.prototype = {
|
|||||||
// Clear client-specific data from the server, including disabled engines.
|
// Clear client-specific data from the server, including disabled engines.
|
||||||
for (let engine of [this.clientsEngine].concat(this.engineManager.getAll())) {
|
for (let engine of [this.clientsEngine].concat(this.engineManager.getAll())) {
|
||||||
try {
|
try {
|
||||||
engine.removeClientData();
|
// Note the additional Promise.resolve here is to handle the fact that
|
||||||
|
// some 3rd party engines probably don't return a promise. We can
|
||||||
|
// probably nuke this once webextensions become mandatory as then
|
||||||
|
// no 3rd party engines will be allowed to exist.
|
||||||
|
Async.promiseSpinningly(Promise.resolve().then(() => engine.removeClientData()));
|
||||||
} catch (ex) {
|
} catch (ex) {
|
||||||
this._log.warn(`Deleting client data for ${engine.name} failed`, ex);
|
this._log.warn(`Deleting client data for ${engine.name} failed`, ex);
|
||||||
}
|
}
|
||||||
@ -780,7 +784,7 @@ Sync11Service.prototype = {
|
|||||||
this._clusterManager = this.identity.createClusterManager(this);
|
this._clusterManager = this.identity.createClusterManager(this);
|
||||||
Svc.Obs.notify("weave:service:start-over:finish");
|
Svc.Obs.notify("weave:service:start-over:finish");
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
this._log.error("startOver failed to re-initialize the identity manager: " + err);
|
this._log.error("startOver failed to re-initialize the identity manager", err);
|
||||||
// Still send the observer notification so the current state is
|
// Still send the observer notification so the current state is
|
||||||
// reflected in the UI.
|
// reflected in the UI.
|
||||||
Svc.Obs.notify("weave:service:start-over:finish");
|
Svc.Obs.notify("weave:service:start-over:finish");
|
||||||
@ -847,7 +851,7 @@ Sync11Service.prototype = {
|
|||||||
this._log.debug("Fetching server configuration", infoURL);
|
this._log.debug("Fetching server configuration", infoURL);
|
||||||
let configResponse;
|
let configResponse;
|
||||||
try {
|
try {
|
||||||
configResponse = this.resource(infoURL).get();
|
configResponse = Async.promiseSpinningly(this.resource(infoURL).get());
|
||||||
} catch (ex) {
|
} catch (ex) {
|
||||||
// This is probably a network or similar error.
|
// This is probably a network or similar error.
|
||||||
this._log.warn("Failed to fetch info/configuration", ex);
|
this._log.warn("Failed to fetch info/configuration", ex);
|
||||||
@ -877,7 +881,7 @@ Sync11Service.prototype = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
this._log.debug("Fetching global metadata record");
|
this._log.debug("Fetching global metadata record");
|
||||||
let meta = this.recordManager.get(this.metaURL);
|
let meta = Async.promiseSpinningly(this.recordManager.get(this.metaURL));
|
||||||
|
|
||||||
// Checking modified time of the meta record.
|
// Checking modified time of the meta record.
|
||||||
if (infoResponse &&
|
if (infoResponse &&
|
||||||
@ -892,7 +896,7 @@ Sync11Service.prototype = {
|
|||||||
this.recordManager.del(this.metaURL);
|
this.recordManager.del(this.metaURL);
|
||||||
|
|
||||||
// ... fetch the current record from the server, and COPY THE FLAGS.
|
// ... fetch the current record from the server, and COPY THE FLAGS.
|
||||||
let newMeta = this.recordManager.get(this.metaURL);
|
let newMeta = Async.promiseSpinningly(this.recordManager.get(this.metaURL));
|
||||||
|
|
||||||
// If we got a 401, we do not want to create a new meta/global - we
|
// If we got a 401, we do not want to create a new meta/global - we
|
||||||
// should be able to get the existing meta after we get a new node.
|
// should be able to get the existing meta after we get a new node.
|
||||||
@ -1092,7 +1096,7 @@ Sync11Service.prototype = {
|
|||||||
// Now let's update our declined engines (but only if we have a metaURL;
|
// Now let's update our declined engines (but only if we have a metaURL;
|
||||||
// if Sync failed due to no node we will not have one)
|
// if Sync failed due to no node we will not have one)
|
||||||
if (this.metaURL) {
|
if (this.metaURL) {
|
||||||
let meta = this.recordManager.get(this.metaURL);
|
let meta = Async.promiseSpinningly(this.recordManager.get(this.metaURL));
|
||||||
if (!meta) {
|
if (!meta) {
|
||||||
this._log.warn("No meta/global; can't update declined state.");
|
this._log.warn("No meta/global; can't update declined state.");
|
||||||
return;
|
return;
|
||||||
@ -1134,7 +1138,7 @@ Sync11Service.prototype = {
|
|||||||
this._log.debug("Uploading meta/global", meta);
|
this._log.debug("Uploading meta/global", meta);
|
||||||
let res = this.resource(this.metaURL);
|
let res = this.resource(this.metaURL);
|
||||||
res.setHeader("X-If-Unmodified-Since", meta.modified);
|
res.setHeader("X-If-Unmodified-Since", meta.modified);
|
||||||
let response = res.put(meta);
|
let response = Async.promiseSpinningly(res.put(meta));
|
||||||
if (!response.success) {
|
if (!response.success) {
|
||||||
throw response;
|
throw response;
|
||||||
}
|
}
|
||||||
@ -1154,7 +1158,7 @@ Sync11Service.prototype = {
|
|||||||
this._log.debug(`Uploading crypto/keys (lastModified: ${lastModified})`);
|
this._log.debug(`Uploading crypto/keys (lastModified: ${lastModified})`);
|
||||||
let res = this.resource(this.cryptoKeysURL);
|
let res = this.resource(this.cryptoKeysURL);
|
||||||
res.setHeader("X-If-Unmodified-Since", lastModified);
|
res.setHeader("X-If-Unmodified-Since", lastModified);
|
||||||
return res.put(cryptoKeys);
|
return Async.promiseSpinningly(res.put(cryptoKeys));
|
||||||
},
|
},
|
||||||
|
|
||||||
_freshStart: function _freshStart() {
|
_freshStart: function _freshStart() {
|
||||||
@ -1197,7 +1201,7 @@ Sync11Service.prototype = {
|
|||||||
let res = this.resource(this.storageURL.slice(0, -1));
|
let res = this.resource(this.storageURL.slice(0, -1));
|
||||||
res.setHeader("X-Confirm-Delete", "1");
|
res.setHeader("X-Confirm-Delete", "1");
|
||||||
try {
|
try {
|
||||||
response = res.delete();
|
response = Async.promiseSpinningly(res.delete());
|
||||||
} catch (ex) {
|
} catch (ex) {
|
||||||
this._log.debug("Failed to wipe server", ex);
|
this._log.debug("Failed to wipe server", ex);
|
||||||
histogram.add(false);
|
histogram.add(false);
|
||||||
@ -1217,7 +1221,7 @@ Sync11Service.prototype = {
|
|||||||
for (let name of collections) {
|
for (let name of collections) {
|
||||||
let url = this.storageURL + name;
|
let url = this.storageURL + name;
|
||||||
try {
|
try {
|
||||||
response = this.resource(url).delete();
|
response = Async.promiseSpinningly(this.resource(url).delete());
|
||||||
} catch (ex) {
|
} catch (ex) {
|
||||||
this._log.debug("Failed to wipe '" + name + "' collection", ex);
|
this._log.debug("Failed to wipe '" + name + "' collection", ex);
|
||||||
histogram.add(false);
|
histogram.add(false);
|
||||||
|
@ -180,7 +180,7 @@ EngineSynchronizer.prototype = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Upload meta/global if any engines changed anything.
|
// Upload meta/global if any engines changed anything.
|
||||||
let meta = this.service.recordManager.get(this.service.metaURL);
|
let meta = Async.promiseSpinningly(this.service.recordManager.get(this.service.metaURL));
|
||||||
if (meta.isNew || meta.changed) {
|
if (meta.isNew || meta.changed) {
|
||||||
this._log.info("meta/global changed locally: reuploading.");
|
this._log.info("meta/global changed locally: reuploading.");
|
||||||
try {
|
try {
|
||||||
@ -342,7 +342,7 @@ EngineSynchronizer.prototype = {
|
|||||||
},
|
},
|
||||||
|
|
||||||
_updateEnabledEngines() {
|
_updateEnabledEngines() {
|
||||||
let meta = this.service.recordManager.get(this.service.metaURL);
|
let meta = Async.promiseSpinningly(this.service.recordManager.get(this.service.metaURL));
|
||||||
let numClients = this.service.scheduler.numClients;
|
let numClients = this.service.scheduler.numClients;
|
||||||
let engineManager = this.service.engineManager;
|
let engineManager = this.service.engineManager;
|
||||||
|
|
||||||
|
@ -100,7 +100,7 @@ const EHTestsCommon = {
|
|||||||
let newSyncKeyBundle = new SyncKeyBundle("johndoe", "23456234562345623456234562");
|
let newSyncKeyBundle = new SyncKeyBundle("johndoe", "23456234562345623456234562");
|
||||||
let keys = Service.collectionKeys.asWBO();
|
let keys = Service.collectionKeys.asWBO();
|
||||||
keys.encrypt(newSyncKeyBundle);
|
keys.encrypt(newSyncKeyBundle);
|
||||||
keys.upload(Service.resource(Service.cryptoKeysURL));
|
return keys.upload(Service.resource(Service.cryptoKeysURL));
|
||||||
},
|
},
|
||||||
|
|
||||||
async setUp(server) {
|
async setUp(server) {
|
||||||
@ -108,10 +108,11 @@ const EHTestsCommon = {
|
|||||||
return EHTestsCommon.generateAndUploadKeys()
|
return EHTestsCommon.generateAndUploadKeys()
|
||||||
},
|
},
|
||||||
|
|
||||||
generateAndUploadKeys() {
|
async generateAndUploadKeys() {
|
||||||
generateNewKeys(Service.collectionKeys);
|
generateNewKeys(Service.collectionKeys);
|
||||||
let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
|
let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
|
||||||
serverKeys.encrypt(Service.identity.syncKeyBundle);
|
serverKeys.encrypt(Service.identity.syncKeyBundle);
|
||||||
return serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success;
|
let response = await serverKeys.upload(Service.resource(Service.cryptoKeysURL));
|
||||||
|
return response.success;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -723,7 +723,7 @@ add_task(async function test_misreconciled_root() {
|
|||||||
getBatched() {
|
getBatched() {
|
||||||
return this.get();
|
return this.get();
|
||||||
},
|
},
|
||||||
get() {
|
async get() {
|
||||||
this.recordHandler(encrypted);
|
this.recordHandler(encrypted);
|
||||||
return {success: true}
|
return {success: true}
|
||||||
},
|
},
|
||||||
|
@ -92,11 +92,11 @@ add_task(async function test_bad_hmac() {
|
|||||||
return !wbo || !wbo.payload;
|
return !wbo || !wbo.payload;
|
||||||
}
|
}
|
||||||
|
|
||||||
function uploadNewKeys() {
|
async function uploadNewKeys() {
|
||||||
generateNewKeys(Service.collectionKeys);
|
generateNewKeys(Service.collectionKeys);
|
||||||
let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
|
let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
|
||||||
serverKeys.encrypt(Service.identity.syncKeyBundle);
|
serverKeys.encrypt(Service.identity.syncKeyBundle);
|
||||||
ok(serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success);
|
ok((await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success);
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@ -126,7 +126,7 @@ add_task(async function test_bad_hmac() {
|
|||||||
generateNewKeys(Service.collectionKeys);
|
generateNewKeys(Service.collectionKeys);
|
||||||
let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
|
let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
|
||||||
serverKeys.encrypt(Service.identity.syncKeyBundle);
|
serverKeys.encrypt(Service.identity.syncKeyBundle);
|
||||||
ok(serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success);
|
ok((await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success);
|
||||||
|
|
||||||
_("Sync.");
|
_("Sync.");
|
||||||
engine._sync();
|
engine._sync();
|
||||||
@ -161,14 +161,14 @@ add_task(async function test_bad_hmac() {
|
|||||||
deletedItems = [];
|
deletedItems = [];
|
||||||
check_clients_count(0);
|
check_clients_count(0);
|
||||||
|
|
||||||
uploadNewKeys();
|
await uploadNewKeys();
|
||||||
|
|
||||||
// Sync once to upload a record.
|
// Sync once to upload a record.
|
||||||
engine._sync();
|
engine._sync();
|
||||||
check_clients_count(1);
|
check_clients_count(1);
|
||||||
|
|
||||||
// Generate and upload new keys, so the old client record is wrong.
|
// Generate and upload new keys, so the old client record is wrong.
|
||||||
uploadNewKeys();
|
await uploadNewKeys();
|
||||||
|
|
||||||
// Create a new client record and new keys. Now our keys are wrong, as well
|
// Create a new client record and new keys. Now our keys are wrong, as well
|
||||||
// as the object on the server. We'll download the new keys and also delete
|
// as the object on the server. We'll download the new keys and also delete
|
||||||
@ -317,7 +317,7 @@ add_task(async function test_sync() {
|
|||||||
ok(engine.lastRecordUpload > lastweek);
|
ok(engine.lastRecordUpload > lastweek);
|
||||||
|
|
||||||
_("Remove client record.");
|
_("Remove client record.");
|
||||||
engine.removeClientData();
|
await engine.removeClientData();
|
||||||
equal(clientWBO().payload, undefined);
|
equal(clientWBO().payload, undefined);
|
||||||
|
|
||||||
_("Time travel one day back, no record uploaded.");
|
_("Time travel one day back, no record uploaded.");
|
||||||
|
@ -26,7 +26,7 @@ function get_test_collection_info({ totalRecords, batchSize, lastModified,
|
|||||||
let requests = [];
|
let requests = [];
|
||||||
let responses = [];
|
let responses = [];
|
||||||
let sawRecord = false;
|
let sawRecord = false;
|
||||||
coll.get = function() {
|
coll.get = async function() {
|
||||||
ok(!sawRecord); // make sure we call record handler after all requests.
|
ok(!sawRecord); // make sure we call record handler after all requests.
|
||||||
let limit = +this.limit;
|
let limit = +this.limit;
|
||||||
let offset = 0;
|
let offset = 0;
|
||||||
@ -76,7 +76,7 @@ function get_test_collection_info({ totalRecords, batchSize, lastModified,
|
|||||||
return { records, responses, requests, coll };
|
return { records, responses, requests, coll };
|
||||||
}
|
}
|
||||||
|
|
||||||
add_test(function test_success() {
|
add_task(async function test_success() {
|
||||||
const totalRecords = 11;
|
const totalRecords = 11;
|
||||||
const batchSize = 2;
|
const batchSize = 2;
|
||||||
const lastModified = "111111";
|
const lastModified = "111111";
|
||||||
@ -85,7 +85,7 @@ add_test(function test_success() {
|
|||||||
batchSize,
|
batchSize,
|
||||||
lastModified,
|
lastModified,
|
||||||
});
|
});
|
||||||
let response = coll.getBatched(batchSize);
|
let response = await coll.getBatched(batchSize);
|
||||||
|
|
||||||
equal(requests.length, Math.ceil(totalRecords / batchSize));
|
equal(requests.length, Math.ceil(totalRecords / batchSize));
|
||||||
|
|
||||||
@ -116,11 +116,9 @@ add_test(function test_success() {
|
|||||||
ok(!coll._headers["x-if-unmodified-since"]);
|
ok(!coll._headers["x-if-unmodified-since"]);
|
||||||
ok(!coll.offset);
|
ok(!coll.offset);
|
||||||
ok(!coll.limit || (coll.limit == Infinity));
|
ok(!coll.limit || (coll.limit == Infinity));
|
||||||
|
|
||||||
run_next_test();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_total_limit() {
|
add_task(async function test_total_limit() {
|
||||||
_("getBatched respects the (initial) value of the limit property");
|
_("getBatched respects the (initial) value of the limit property");
|
||||||
const totalRecords = 100;
|
const totalRecords = 100;
|
||||||
const recordLimit = 11;
|
const recordLimit = 11;
|
||||||
@ -132,7 +130,7 @@ add_test(function test_total_limit() {
|
|||||||
lastModified,
|
lastModified,
|
||||||
});
|
});
|
||||||
coll.limit = recordLimit;
|
coll.limit = recordLimit;
|
||||||
coll.getBatched(batchSize);
|
await coll.getBatched(batchSize);
|
||||||
|
|
||||||
equal(requests.length, Math.ceil(recordLimit / batchSize));
|
equal(requests.length, Math.ceil(recordLimit / batchSize));
|
||||||
equal(records.length, recordLimit);
|
equal(records.length, recordLimit);
|
||||||
@ -147,11 +145,9 @@ add_test(function test_total_limit() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
equal(coll._limit, recordLimit);
|
equal(coll._limit, recordLimit);
|
||||||
|
|
||||||
run_next_test();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_412() {
|
add_task(async function test_412() {
|
||||||
_("We shouldn't record records if we get a 412 in the middle of a batch");
|
_("We shouldn't record records if we get a 412 in the middle of a batch");
|
||||||
const totalRecords = 11;
|
const totalRecords = 11;
|
||||||
const batchSize = 2;
|
const batchSize = 2;
|
||||||
@ -162,7 +158,7 @@ add_test(function test_412() {
|
|||||||
lastModified,
|
lastModified,
|
||||||
interruptedAfter: 3
|
interruptedAfter: 3
|
||||||
});
|
});
|
||||||
let response = coll.getBatched(batchSize);
|
let response = await coll.getBatched(batchSize);
|
||||||
|
|
||||||
equal(requests.length, 3);
|
equal(requests.length, 3);
|
||||||
equal(records.length, 0); // record handler shouldn't be called for anything
|
equal(records.length, 0); // record handler shouldn't be called for anything
|
||||||
@ -172,10 +168,9 @@ add_test(function test_412() {
|
|||||||
|
|
||||||
ok(!response.success);
|
ok(!response.success);
|
||||||
equal(response.status, 412);
|
equal(response.status, 412);
|
||||||
run_next_test();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_get_throws() {
|
add_task(async function test_get_throws() {
|
||||||
_("We shouldn't record records if get() throws for some reason");
|
_("We shouldn't record records if get() throws for some reason");
|
||||||
const totalRecords = 11;
|
const totalRecords = 11;
|
||||||
const batchSize = 2;
|
const batchSize = 2;
|
||||||
@ -187,9 +182,8 @@ add_test(function test_get_throws() {
|
|||||||
throwAfter: 3
|
throwAfter: 3
|
||||||
});
|
});
|
||||||
|
|
||||||
throws(() => coll.getBatched(batchSize), "Some Network Error");
|
await Assert.rejects(coll.getBatched(batchSize), "Some Network Error");
|
||||||
|
|
||||||
equal(requests.length, 3);
|
equal(requests.length, 3);
|
||||||
equal(records.length, 0);
|
equal(records.length, 0);
|
||||||
run_next_test();
|
|
||||||
});
|
});
|
||||||
|
@ -71,7 +71,7 @@ add_task(async function test_locally_changed_keys() {
|
|||||||
let m = new WBORecord("meta", "global");
|
let m = new WBORecord("meta", "global");
|
||||||
m.payload = {"syncID": "foooooooooooooooooooooooooo",
|
m.payload = {"syncID": "foooooooooooooooooooooooooo",
|
||||||
"storageVersion": STORAGE_VERSION};
|
"storageVersion": STORAGE_VERSION};
|
||||||
m.upload(Service.resource(Service.metaURL));
|
await m.upload(Service.resource(Service.metaURL));
|
||||||
|
|
||||||
_("New meta/global: " + JSON.stringify(johndoe.collection("meta").wbo("global")));
|
_("New meta/global: " + JSON.stringify(johndoe.collection("meta").wbo("global")));
|
||||||
|
|
||||||
@ -79,7 +79,7 @@ add_task(async function test_locally_changed_keys() {
|
|||||||
generateNewKeys(Service.collectionKeys);
|
generateNewKeys(Service.collectionKeys);
|
||||||
let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
|
let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
|
||||||
serverKeys.encrypt(Service.identity.syncKeyBundle);
|
serverKeys.encrypt(Service.identity.syncKeyBundle);
|
||||||
do_check_true(serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success);
|
do_check_true((await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success);
|
||||||
|
|
||||||
// Check that login works.
|
// Check that login works.
|
||||||
do_check_true(Service.login());
|
do_check_true(Service.login());
|
||||||
@ -123,7 +123,7 @@ add_task(async function test_locally_changed_keys() {
|
|||||||
|
|
||||||
// Check that we can decrypt one.
|
// Check that we can decrypt one.
|
||||||
let rec = new CryptoWrapper("history", "record-no--0");
|
let rec = new CryptoWrapper("history", "record-no--0");
|
||||||
rec.fetch(Service.resource(Service.storageURL + "history/record-no--0"));
|
await rec.fetch(Service.resource(Service.storageURL + "history/record-no--0"));
|
||||||
_(JSON.stringify(rec));
|
_(JSON.stringify(rec));
|
||||||
do_check_true(!!rec.decrypt(liveKeys));
|
do_check_true(!!rec.decrypt(liveKeys));
|
||||||
|
|
||||||
|
@ -120,7 +120,7 @@ add_task(async function test_credentials_changed_logout() {
|
|||||||
do_check_eq(Status.sync, SYNC_SUCCEEDED);
|
do_check_eq(Status.sync, SYNC_SUCCEEDED);
|
||||||
do_check_true(Service.isLoggedIn);
|
do_check_true(Service.isLoggedIn);
|
||||||
|
|
||||||
EHTestsCommon.generateCredentialsChangedFailure();
|
await EHTestsCommon.generateCredentialsChangedFailure();
|
||||||
|
|
||||||
let ping = await sync_and_validate_telem(true);
|
let ping = await sync_and_validate_telem(true);
|
||||||
equal(ping.status.sync, CREDENTIALS_CHANGED);
|
equal(ping.status.sync, CREDENTIALS_CHANGED);
|
||||||
@ -397,7 +397,7 @@ add_task(async function test_sync_syncAndReportErrors_non_network_error() {
|
|||||||
do_check_eq(Status.sync, SYNC_SUCCEEDED);
|
do_check_eq(Status.sync, SYNC_SUCCEEDED);
|
||||||
do_check_true(Service.isLoggedIn);
|
do_check_true(Service.isLoggedIn);
|
||||||
|
|
||||||
EHTestsCommon.generateCredentialsChangedFailure();
|
await EHTestsCommon.generateCredentialsChangedFailure();
|
||||||
|
|
||||||
let promiseObserved = promiseOneObserver("weave:ui:sync:error");
|
let promiseObserved = promiseOneObserver("weave:ui:sync:error");
|
||||||
|
|
||||||
@ -450,7 +450,7 @@ add_task(async function test_sync_syncAndReportErrors_prolonged_non_network_erro
|
|||||||
do_check_eq(Status.sync, SYNC_SUCCEEDED);
|
do_check_eq(Status.sync, SYNC_SUCCEEDED);
|
||||||
do_check_true(Service.isLoggedIn);
|
do_check_true(Service.isLoggedIn);
|
||||||
|
|
||||||
EHTestsCommon.generateCredentialsChangedFailure();
|
await EHTestsCommon.generateCredentialsChangedFailure();
|
||||||
|
|
||||||
let promiseObserved = promiseOneObserver("weave:ui:sync:error");
|
let promiseObserved = promiseOneObserver("weave:ui:sync:error");
|
||||||
|
|
||||||
@ -579,7 +579,7 @@ add_task(async function test_sync_prolonged_non_network_error() {
|
|||||||
do_check_eq(Status.sync, SYNC_SUCCEEDED);
|
do_check_eq(Status.sync, SYNC_SUCCEEDED);
|
||||||
do_check_true(Service.isLoggedIn);
|
do_check_true(Service.isLoggedIn);
|
||||||
|
|
||||||
EHTestsCommon.generateCredentialsChangedFailure();
|
await EHTestsCommon.generateCredentialsChangedFailure();
|
||||||
|
|
||||||
let promiseObserved = promiseOneObserver("weave:ui:sync:error");
|
let promiseObserved = promiseOneObserver("weave:ui:sync:error");
|
||||||
|
|
||||||
@ -668,7 +668,7 @@ add_task(async function test_sync_non_network_error() {
|
|||||||
do_check_eq(Status.sync, SYNC_SUCCEEDED);
|
do_check_eq(Status.sync, SYNC_SUCCEEDED);
|
||||||
do_check_true(Service.isLoggedIn);
|
do_check_true(Service.isLoggedIn);
|
||||||
|
|
||||||
EHTestsCommon.generateCredentialsChangedFailure();
|
await EHTestsCommon.generateCredentialsChangedFailure();
|
||||||
|
|
||||||
let promiseObserved = promiseOneObserver("weave:ui:sync:error");
|
let promiseObserved = promiseOneObserver("weave:ui:sync:error");
|
||||||
|
|
||||||
|
@ -57,12 +57,12 @@ async function setUp(server) {
|
|||||||
new FakeCryptoService();
|
new FakeCryptoService();
|
||||||
}
|
}
|
||||||
|
|
||||||
function generateAndUploadKeys(server) {
|
async function generateAndUploadKeys(server) {
|
||||||
generateNewKeys(Service.collectionKeys);
|
generateNewKeys(Service.collectionKeys);
|
||||||
let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
|
let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
|
||||||
serverKeys.encrypt(Service.identity.syncKeyBundle);
|
serverKeys.encrypt(Service.identity.syncKeyBundle);
|
||||||
let res = Service.resource(server.baseURI + "/1.1/johndoe/storage/crypto/keys");
|
let res = Service.resource(server.baseURI + "/1.1/johndoe/storage/crypto/keys");
|
||||||
return serverKeys.upload(res).success;
|
return (await serverKeys.upload(res)).success;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -81,7 +81,7 @@ add_task(async function test_backoff500() {
|
|||||||
do_check_false(Status.enforceBackoff);
|
do_check_false(Status.enforceBackoff);
|
||||||
|
|
||||||
// Forcibly create and upload keys here -- otherwise we don't get to the 500!
|
// Forcibly create and upload keys here -- otherwise we don't get to the 500!
|
||||||
do_check_true(generateAndUploadKeys(server));
|
do_check_true(await generateAndUploadKeys(server));
|
||||||
|
|
||||||
Service.login();
|
Service.login();
|
||||||
Service.sync();
|
Service.sync();
|
||||||
@ -116,7 +116,7 @@ add_task(async function test_backoff503() {
|
|||||||
try {
|
try {
|
||||||
do_check_false(Status.enforceBackoff);
|
do_check_false(Status.enforceBackoff);
|
||||||
|
|
||||||
do_check_true(generateAndUploadKeys(server));
|
do_check_true(await generateAndUploadKeys(server));
|
||||||
|
|
||||||
Service.login();
|
Service.login();
|
||||||
Service.sync();
|
Service.sync();
|
||||||
@ -150,7 +150,7 @@ add_task(async function test_overQuota() {
|
|||||||
try {
|
try {
|
||||||
do_check_eq(Status.sync, SYNC_SUCCEEDED);
|
do_check_eq(Status.sync, SYNC_SUCCEEDED);
|
||||||
|
|
||||||
do_check_true(generateAndUploadKeys(server));
|
do_check_true(await generateAndUploadKeys(server));
|
||||||
|
|
||||||
Service.login();
|
Service.login();
|
||||||
Service.sync();
|
Service.sync();
|
||||||
@ -230,7 +230,7 @@ add_task(async function test_engine_networkError() {
|
|||||||
try {
|
try {
|
||||||
do_check_eq(Status.sync, SYNC_SUCCEEDED);
|
do_check_eq(Status.sync, SYNC_SUCCEEDED);
|
||||||
|
|
||||||
do_check_true(generateAndUploadKeys(server));
|
do_check_true(await generateAndUploadKeys(server));
|
||||||
|
|
||||||
Service.login();
|
Service.login();
|
||||||
Service.sync();
|
Service.sync();
|
||||||
@ -259,7 +259,7 @@ add_task(async function test_resource_timeout() {
|
|||||||
try {
|
try {
|
||||||
do_check_eq(Status.sync, SYNC_SUCCEEDED);
|
do_check_eq(Status.sync, SYNC_SUCCEEDED);
|
||||||
|
|
||||||
do_check_true(generateAndUploadKeys(server));
|
do_check_true(await generateAndUploadKeys(server));
|
||||||
|
|
||||||
Service.login();
|
Service.login();
|
||||||
Service.sync();
|
Service.sync();
|
||||||
|
@ -22,7 +22,7 @@ function makePostQueue(config, lastModTime, responseGenerator) {
|
|||||||
thisPost.headers = headers;
|
thisPost.headers = headers;
|
||||||
}
|
}
|
||||||
stats.posts.push(thisPost);
|
stats.posts.push(thisPost);
|
||||||
return responseGenerator.next().value;
|
return Promise.resolve(responseGenerator.next().value);
|
||||||
}
|
}
|
||||||
|
|
||||||
let done = () => {}
|
let done = () => {}
|
||||||
|
@ -7,8 +7,9 @@ Cu.import("resource://services-sync/service.js");
|
|||||||
Cu.import("resource://services-sync/util.js");
|
Cu.import("resource://services-sync/util.js");
|
||||||
Cu.import("resource://testing-common/services/sync/utils.js");
|
Cu.import("resource://testing-common/services/sync/utils.js");
|
||||||
|
|
||||||
|
initTestLogging("Trace");
|
||||||
|
|
||||||
function test_toJSON() {
|
add_test(function test_toJSON() {
|
||||||
_("Create a record, for now without a TTL.");
|
_("Create a record, for now without a TTL.");
|
||||||
let wbo = new WBORecord("coll", "a_record");
|
let wbo = new WBORecord("coll", "a_record");
|
||||||
wbo.modified = 12345;
|
wbo.modified = 12345;
|
||||||
@ -26,10 +27,11 @@ function test_toJSON() {
|
|||||||
wbo.ttl = 30 * 60;
|
wbo.ttl = 30 * 60;
|
||||||
json = JSON.parse(JSON.stringify(wbo));
|
json = JSON.parse(JSON.stringify(wbo));
|
||||||
do_check_eq(json.ttl, 30 * 60);
|
do_check_eq(json.ttl, 30 * 60);
|
||||||
}
|
run_next_test();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
function test_fetch() {
|
add_task(async function test_fetch() {
|
||||||
let record = {id: "asdf-1234-asdf-1234",
|
let record = {id: "asdf-1234-asdf-1234",
|
||||||
modified: 2454725.98283,
|
modified: 2454725.98283,
|
||||||
payload: JSON.stringify({cheese: "roquefort"})};
|
payload: JSON.stringify({cheese: "roquefort"})};
|
||||||
@ -46,12 +48,11 @@ function test_fetch() {
|
|||||||
"/record2": httpd_handler(200, "OK", JSON.stringify(record2)),
|
"/record2": httpd_handler(200, "OK", JSON.stringify(record2)),
|
||||||
"/coll": httpd_handler(200, "OK", JSON.stringify(coll))
|
"/coll": httpd_handler(200, "OK", JSON.stringify(coll))
|
||||||
});
|
});
|
||||||
do_test_pending();
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
_("Fetching a WBO record");
|
_("Fetching a WBO record");
|
||||||
let rec = new WBORecord("coll", "record");
|
let rec = new WBORecord("coll", "record");
|
||||||
rec.fetch(Service.resource(server.baseURI + "/record"));
|
await rec.fetch(Service.resource(server.baseURI + "/record"));
|
||||||
do_check_eq(rec.id, "asdf-1234-asdf-1234"); // NOT "record"!
|
do_check_eq(rec.id, "asdf-1234-asdf-1234"); // NOT "record"!
|
||||||
|
|
||||||
do_check_eq(rec.modified, 2454725.98283);
|
do_check_eq(rec.modified, 2454725.98283);
|
||||||
@ -59,7 +60,7 @@ function test_fetch() {
|
|||||||
do_check_eq(rec.payload.cheese, "roquefort");
|
do_check_eq(rec.payload.cheese, "roquefort");
|
||||||
|
|
||||||
_("Fetching a WBO record using the record manager");
|
_("Fetching a WBO record using the record manager");
|
||||||
let rec2 = Service.recordManager.get(server.baseURI + "/record2");
|
let rec2 = await Service.recordManager.get(server.baseURI + "/record2");
|
||||||
do_check_eq(rec2.id, "record2");
|
do_check_eq(rec2.id, "record2");
|
||||||
do_check_eq(rec2.modified, 2454725.98284);
|
do_check_eq(rec2.modified, 2454725.98284);
|
||||||
do_check_eq(typeof(rec2.payload), "object");
|
do_check_eq(typeof(rec2.payload), "object");
|
||||||
@ -72,13 +73,6 @@ function test_fetch() {
|
|||||||
do_check_eq(rec3.collection, "tabs");
|
do_check_eq(rec3.collection, "tabs");
|
||||||
|
|
||||||
} finally {
|
} finally {
|
||||||
server.stop(do_test_finished);
|
await promiseStopServer(server);
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
|
|
||||||
function run_test() {
|
|
||||||
initTestLogging("Trace");
|
|
||||||
|
|
||||||
test_toJSON();
|
|
||||||
test_fetch();
|
|
||||||
}
|
|
||||||
|
@ -148,11 +148,9 @@ function server_headers(metadata, response) {
|
|||||||
response.bodyOutputStream.write(body, body.length);
|
response.bodyOutputStream.write(body, body.length);
|
||||||
}
|
}
|
||||||
|
|
||||||
function run_test() {
|
add_task(async function test() {
|
||||||
initTestLogging("Trace");
|
initTestLogging("Trace");
|
||||||
|
|
||||||
do_test_pending();
|
|
||||||
|
|
||||||
let logger = Log.repository.getLogger("Test");
|
let logger = Log.repository.getLogger("Test");
|
||||||
Log.repository.rootLogger.addAppender(new Log.DumpAppender());
|
Log.repository.rootLogger.addAppender(new Log.DumpAppender());
|
||||||
|
|
||||||
@ -179,7 +177,7 @@ function run_test() {
|
|||||||
PACSystemSettings.PACURI = server.baseURI + "/pac1";
|
PACSystemSettings.PACURI = server.baseURI + "/pac1";
|
||||||
installFakePAC();
|
installFakePAC();
|
||||||
let proxiedRes = new Resource(server.baseURI + "/open");
|
let proxiedRes = new Resource(server.baseURI + "/open");
|
||||||
let content = proxiedRes.get();
|
let content = await proxiedRes.get();
|
||||||
do_check_true(pacFetched);
|
do_check_true(pacFetched);
|
||||||
do_check_true(fetched);
|
do_check_true(fetched);
|
||||||
do_check_eq(content, "This path exists");
|
do_check_eq(content, "This path exists");
|
||||||
@ -198,7 +196,7 @@ function run_test() {
|
|||||||
do_check_eq(res.data, null);
|
do_check_eq(res.data, null);
|
||||||
|
|
||||||
_("GET a non-password-protected resource");
|
_("GET a non-password-protected resource");
|
||||||
content = res.get();
|
content = await res.get();
|
||||||
do_check_eq(content, "This path exists");
|
do_check_eq(content, "This path exists");
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(content.status, 200);
|
||||||
do_check_true(content.success);
|
do_check_true(content.success);
|
||||||
@ -231,7 +229,7 @@ function run_test() {
|
|||||||
|
|
||||||
_("GET a password protected resource (test that it'll fail w/o pass, no throw)");
|
_("GET a password protected resource (test that it'll fail w/o pass, no throw)");
|
||||||
let res2 = new Resource(server.baseURI + "/protected");
|
let res2 = new Resource(server.baseURI + "/protected");
|
||||||
content = res2.get();
|
content = await res2.get();
|
||||||
do_check_eq(content, "This path exists and is protected - failed");
|
do_check_eq(content, "This path exists and is protected - failed");
|
||||||
do_check_eq(content.status, 401);
|
do_check_eq(content.status, 401);
|
||||||
do_check_false(content.success);
|
do_check_false(content.success);
|
||||||
@ -244,14 +242,14 @@ function run_test() {
|
|||||||
let auth = browseridManager.getResourceAuthenticator();
|
let auth = browseridManager.getResourceAuthenticator();
|
||||||
res3.authenticator = auth;
|
res3.authenticator = auth;
|
||||||
do_check_eq(res3.authenticator, auth);
|
do_check_eq(res3.authenticator, auth);
|
||||||
content = res3.get();
|
content = await res3.get();
|
||||||
do_check_eq(content, "This path exists and is protected");
|
do_check_eq(content, "This path exists and is protected");
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(content.status, 200);
|
||||||
do_check_true(content.success);
|
do_check_true(content.success);
|
||||||
|
|
||||||
_("GET a non-existent resource (test that it'll fail, but not throw)");
|
_("GET a non-existent resource (test that it'll fail, but not throw)");
|
||||||
let res4 = new Resource(server.baseURI + "/404");
|
let res4 = new Resource(server.baseURI + "/404");
|
||||||
content = res4.get();
|
content = await res4.get();
|
||||||
do_check_eq(content, "File not found");
|
do_check_eq(content, "File not found");
|
||||||
do_check_eq(content.status, 404);
|
do_check_eq(content.status, 404);
|
||||||
do_check_false(content.success);
|
do_check_false(content.success);
|
||||||
@ -263,66 +261,66 @@ function run_test() {
|
|||||||
|
|
||||||
_("PUT to a resource (string)");
|
_("PUT to a resource (string)");
|
||||||
let res5 = new Resource(server.baseURI + "/upload");
|
let res5 = new Resource(server.baseURI + "/upload");
|
||||||
content = res5.put(JSON.stringify(sample_data));
|
content = await res5.put(JSON.stringify(sample_data));
|
||||||
do_check_eq(content, "Valid data upload via PUT");
|
do_check_eq(content, "Valid data upload via PUT");
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(res5.data, content);
|
do_check_eq(res5.data, content);
|
||||||
|
|
||||||
_("PUT to a resource (object)");
|
_("PUT to a resource (object)");
|
||||||
content = res5.put(sample_data);
|
content = await res5.put(sample_data);
|
||||||
do_check_eq(content, "Valid data upload via PUT");
|
do_check_eq(content, "Valid data upload via PUT");
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(res5.data, content);
|
do_check_eq(res5.data, content);
|
||||||
|
|
||||||
_("PUT without data arg (uses resource.data) (string)");
|
_("PUT without data arg (uses resource.data) (string)");
|
||||||
res5.data = JSON.stringify(sample_data);
|
res5.data = JSON.stringify(sample_data);
|
||||||
content = res5.put();
|
content = await res5.put();
|
||||||
do_check_eq(content, "Valid data upload via PUT");
|
do_check_eq(content, "Valid data upload via PUT");
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(res5.data, content);
|
do_check_eq(res5.data, content);
|
||||||
|
|
||||||
_("PUT without data arg (uses resource.data) (object)");
|
_("PUT without data arg (uses resource.data) (object)");
|
||||||
res5.data = sample_data;
|
res5.data = sample_data;
|
||||||
content = res5.put();
|
content = await res5.put();
|
||||||
do_check_eq(content, "Valid data upload via PUT");
|
do_check_eq(content, "Valid data upload via PUT");
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(res5.data, content);
|
do_check_eq(res5.data, content);
|
||||||
|
|
||||||
_("POST to a resource (string)");
|
_("POST to a resource (string)");
|
||||||
content = res5.post(JSON.stringify(sample_data));
|
content = await res5.post(JSON.stringify(sample_data));
|
||||||
do_check_eq(content, "Valid data upload via POST");
|
do_check_eq(content, "Valid data upload via POST");
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(res5.data, content);
|
do_check_eq(res5.data, content);
|
||||||
|
|
||||||
_("POST to a resource (object)");
|
_("POST to a resource (object)");
|
||||||
content = res5.post(sample_data);
|
content = await res5.post(sample_data);
|
||||||
do_check_eq(content, "Valid data upload via POST");
|
do_check_eq(content, "Valid data upload via POST");
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(res5.data, content);
|
do_check_eq(res5.data, content);
|
||||||
|
|
||||||
_("POST without data arg (uses resource.data) (string)");
|
_("POST without data arg (uses resource.data) (string)");
|
||||||
res5.data = JSON.stringify(sample_data);
|
res5.data = JSON.stringify(sample_data);
|
||||||
content = res5.post();
|
content = await res5.post();
|
||||||
do_check_eq(content, "Valid data upload via POST");
|
do_check_eq(content, "Valid data upload via POST");
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(res5.data, content);
|
do_check_eq(res5.data, content);
|
||||||
|
|
||||||
_("POST without data arg (uses resource.data) (object)");
|
_("POST without data arg (uses resource.data) (object)");
|
||||||
res5.data = sample_data;
|
res5.data = sample_data;
|
||||||
content = res5.post();
|
content = await res5.post();
|
||||||
do_check_eq(content, "Valid data upload via POST");
|
do_check_eq(content, "Valid data upload via POST");
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(res5.data, content);
|
do_check_eq(res5.data, content);
|
||||||
|
|
||||||
_("DELETE a resource");
|
_("DELETE a resource");
|
||||||
let res6 = new Resource(server.baseURI + "/delete");
|
let res6 = new Resource(server.baseURI + "/delete");
|
||||||
content = res6.delete();
|
content = await res6.delete();
|
||||||
do_check_eq(content, "This resource has been deleted")
|
do_check_eq(content, "This resource has been deleted")
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(content.status, 200);
|
||||||
|
|
||||||
_("JSON conversion of response body");
|
_("JSON conversion of response body");
|
||||||
let res7 = new Resource(server.baseURI + "/json");
|
let res7 = new Resource(server.baseURI + "/json");
|
||||||
content = res7.get();
|
content = await res7.get();
|
||||||
do_check_eq(content, JSON.stringify(sample_data));
|
do_check_eq(content, JSON.stringify(sample_data));
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(JSON.stringify(content.obj), JSON.stringify(sample_data));
|
do_check_eq(JSON.stringify(content.obj), JSON.stringify(sample_data));
|
||||||
@ -332,26 +330,26 @@ function run_test() {
|
|||||||
// X-Weave-Timestamp header, AsyncResource.serverTime is null.
|
// X-Weave-Timestamp header, AsyncResource.serverTime is null.
|
||||||
do_check_eq(AsyncResource.serverTime, null);
|
do_check_eq(AsyncResource.serverTime, null);
|
||||||
let res8 = new Resource(server.baseURI + "/timestamp");
|
let res8 = new Resource(server.baseURI + "/timestamp");
|
||||||
content = res8.get();
|
content = await res8.get();
|
||||||
do_check_eq(AsyncResource.serverTime, TIMESTAMP);
|
do_check_eq(AsyncResource.serverTime, TIMESTAMP);
|
||||||
|
|
||||||
_("GET: no special request headers");
|
_("GET: no special request headers");
|
||||||
let res9 = new Resource(server.baseURI + "/headers");
|
let res9 = new Resource(server.baseURI + "/headers");
|
||||||
content = res9.get();
|
content = await res9.get();
|
||||||
do_check_eq(content, "{}");
|
do_check_eq(content, "{}");
|
||||||
|
|
||||||
_("PUT: Content-Type defaults to text/plain");
|
_("PUT: Content-Type defaults to text/plain");
|
||||||
content = res9.put("data");
|
content = await res9.put("data");
|
||||||
do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
|
do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
|
||||||
|
|
||||||
_("POST: Content-Type defaults to text/plain");
|
_("POST: Content-Type defaults to text/plain");
|
||||||
content = res9.post("data");
|
content = await res9.post("data");
|
||||||
do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
|
do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
|
||||||
|
|
||||||
_("setHeader(): setting simple header");
|
_("setHeader(): setting simple header");
|
||||||
res9.setHeader("X-What-Is-Weave", "awesome");
|
res9.setHeader("X-What-Is-Weave", "awesome");
|
||||||
do_check_eq(res9.headers["x-what-is-weave"], "awesome");
|
do_check_eq(res9.headers["x-what-is-weave"], "awesome");
|
||||||
content = res9.get();
|
content = await res9.get();
|
||||||
do_check_eq(content, JSON.stringify({"x-what-is-weave": "awesome"}));
|
do_check_eq(content, JSON.stringify({"x-what-is-weave": "awesome"}));
|
||||||
|
|
||||||
_("setHeader(): setting multiple headers, overwriting existing header");
|
_("setHeader(): setting multiple headers, overwriting existing header");
|
||||||
@ -359,21 +357,21 @@ function run_test() {
|
|||||||
res9.setHeader("X-Another-Header", "hello world");
|
res9.setHeader("X-Another-Header", "hello world");
|
||||||
do_check_eq(res9.headers["x-what-is-weave"], "more awesomer");
|
do_check_eq(res9.headers["x-what-is-weave"], "more awesomer");
|
||||||
do_check_eq(res9.headers["x-another-header"], "hello world");
|
do_check_eq(res9.headers["x-another-header"], "hello world");
|
||||||
content = res9.get();
|
content = await res9.get();
|
||||||
do_check_eq(content, JSON.stringify({"x-another-header": "hello world",
|
do_check_eq(content, JSON.stringify({"x-another-header": "hello world",
|
||||||
"x-what-is-weave": "more awesomer"}));
|
"x-what-is-weave": "more awesomer"}));
|
||||||
|
|
||||||
_("Setting headers object");
|
_("Setting headers object");
|
||||||
res9.headers = {};
|
res9.headers = {};
|
||||||
content = res9.get();
|
content = await res9.get();
|
||||||
do_check_eq(content, "{}");
|
do_check_eq(content, "{}");
|
||||||
|
|
||||||
_("PUT/POST: override default Content-Type");
|
_("PUT/POST: override default Content-Type");
|
||||||
res9.setHeader("Content-Type", "application/foobar");
|
res9.setHeader("Content-Type", "application/foobar");
|
||||||
do_check_eq(res9.headers["content-type"], "application/foobar");
|
do_check_eq(res9.headers["content-type"], "application/foobar");
|
||||||
content = res9.put("data");
|
content = await res9.put("data");
|
||||||
do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
|
do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
|
||||||
content = res9.post("data");
|
content = await res9.post("data");
|
||||||
do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
|
do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
|
||||||
|
|
||||||
|
|
||||||
@ -385,7 +383,7 @@ function run_test() {
|
|||||||
Observers.add("weave:service:backoff:interval", onBackoff);
|
Observers.add("weave:service:backoff:interval", onBackoff);
|
||||||
|
|
||||||
let res10 = new Resource(server.baseURI + "/backoff");
|
let res10 = new Resource(server.baseURI + "/backoff");
|
||||||
content = res10.get();
|
content = await res10.get();
|
||||||
do_check_eq(backoffInterval, 600);
|
do_check_eq(backoffInterval, 600);
|
||||||
|
|
||||||
|
|
||||||
@ -397,12 +395,12 @@ function run_test() {
|
|||||||
Observers.add("weave:service:quota:remaining", onQuota);
|
Observers.add("weave:service:quota:remaining", onQuota);
|
||||||
|
|
||||||
res10 = new Resource(server.baseURI + "/quota-error");
|
res10 = new Resource(server.baseURI + "/quota-error");
|
||||||
content = res10.get();
|
content = await res10.get();
|
||||||
do_check_eq(content.status, 400);
|
do_check_eq(content.status, 400);
|
||||||
do_check_eq(quotaValue, undefined); // HTTP 400, so no observer notification.
|
do_check_eq(quotaValue, undefined); // HTTP 400, so no observer notification.
|
||||||
|
|
||||||
res10 = new Resource(server.baseURI + "/quota-notice");
|
res10 = new Resource(server.baseURI + "/quota-notice");
|
||||||
content = res10.get();
|
content = await res10.get();
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(quotaValue, 1048576);
|
do_check_eq(quotaValue, 1048576);
|
||||||
|
|
||||||
@ -411,7 +409,7 @@ function run_test() {
|
|||||||
let error;
|
let error;
|
||||||
let res11 = new Resource("http://localhost:12345/does/not/exist");
|
let res11 = new Resource("http://localhost:12345/does/not/exist");
|
||||||
try {
|
try {
|
||||||
content = res11.get();
|
content = await res11.get();
|
||||||
} catch (ex) {
|
} catch (ex) {
|
||||||
error = ex;
|
error = ex;
|
||||||
}
|
}
|
||||||
@ -430,14 +428,14 @@ function run_test() {
|
|||||||
res18._log.warn = function(msg) { warnings.push(msg) };
|
res18._log.warn = function(msg) { warnings.push(msg) };
|
||||||
error = undefined;
|
error = undefined;
|
||||||
try {
|
try {
|
||||||
content = res18.get();
|
content = await res18.get();
|
||||||
} catch (ex) {
|
} catch (ex) {
|
||||||
error = ex;
|
error = ex;
|
||||||
}
|
}
|
||||||
|
|
||||||
// It throws and logs.
|
// It throws and logs.
|
||||||
do_check_eq(error.result, Cr.NS_ERROR_MALFORMED_URI);
|
do_check_eq(error.result, Cr.NS_ERROR_MALFORMED_URI);
|
||||||
do_check_eq(error, "Error: NS_ERROR_MALFORMED_URI");
|
do_check_eq(error.message, "NS_ERROR_MALFORMED_URI");
|
||||||
// Note the strings haven't been formatted yet, but that's OK for this test.
|
// Note the strings haven't been formatted yet, but that's OK for this test.
|
||||||
do_check_eq(warnings.pop(), "${action} request to ${url} failed: ${ex}");
|
do_check_eq(warnings.pop(), "${action} request to ${url} failed: ${ex}");
|
||||||
do_check_eq(warnings.pop(),
|
do_check_eq(warnings.pop(),
|
||||||
@ -455,14 +453,14 @@ function run_test() {
|
|||||||
res18._log.warn = function(msg) { warnings.push(msg) };
|
res18._log.warn = function(msg) { warnings.push(msg) };
|
||||||
error = undefined;
|
error = undefined;
|
||||||
try {
|
try {
|
||||||
content = res18.get();
|
content = await res18.get();
|
||||||
} catch (ex) {
|
} catch (ex) {
|
||||||
error = ex;
|
error = ex;
|
||||||
}
|
}
|
||||||
|
|
||||||
// It throws and logs.
|
// It throws and logs.
|
||||||
do_check_eq(error.result, Cr.NS_ERROR_XPC_JS_THREW_STRING);
|
do_check_eq(error.result, Cr.NS_ERROR_XPC_JS_THREW_STRING);
|
||||||
do_check_eq(error, "Error: NS_ERROR_XPC_JS_THREW_STRING");
|
do_check_eq(error.message, "NS_ERROR_XPC_JS_THREW_STRING");
|
||||||
do_check_eq(warnings.pop(), "${action} request to ${url} failed: ${ex}");
|
do_check_eq(warnings.pop(), "${action} request to ${url} failed: ${ex}");
|
||||||
do_check_eq(warnings.pop(),
|
do_check_eq(warnings.pop(),
|
||||||
"Got exception calling onProgress handler during fetch of " +
|
"Got exception calling onProgress handler during fetch of " +
|
||||||
@ -475,7 +473,7 @@ function run_test() {
|
|||||||
res19.ABORT_TIMEOUT = 0;
|
res19.ABORT_TIMEOUT = 0;
|
||||||
error = undefined;
|
error = undefined;
|
||||||
try {
|
try {
|
||||||
content = res19.get();
|
content = await res19.get();
|
||||||
} catch (ex) {
|
} catch (ex) {
|
||||||
error = ex;
|
error = ex;
|
||||||
}
|
}
|
||||||
@ -496,4 +494,4 @@ function run_test() {
|
|||||||
uri2.query = query;
|
uri2.query = query;
|
||||||
do_check_eq(uri1.query, uri2.query);
|
do_check_eq(uri1.query, uri2.query);
|
||||||
server.stop(do_test_finished);
|
server.stop(do_test_finished);
|
||||||
}
|
});
|
||||||
|
@ -163,7 +163,7 @@ function run_test() {
|
|||||||
|
|
||||||
// This apparently has to come first in order for our PAC URL to be hit.
|
// This apparently has to come first in order for our PAC URL to be hit.
|
||||||
// Don't put any other HTTP requests earlier in the file!
|
// Don't put any other HTTP requests earlier in the file!
|
||||||
add_test(function test_proxy_auth_redirect() {
|
add_task(async function test_proxy_auth_redirect() {
|
||||||
_("Ensure that a proxy auth redirect (which switches out our channel) " +
|
_("Ensure that a proxy auth redirect (which switches out our channel) " +
|
||||||
"doesn't break AsyncResource.");
|
"doesn't break AsyncResource.");
|
||||||
let server = httpd_setup({
|
let server = httpd_setup({
|
||||||
@ -174,18 +174,16 @@ add_test(function test_proxy_auth_redirect() {
|
|||||||
PACSystemSettings.PACURI = server.baseURI + "/pac2";
|
PACSystemSettings.PACURI = server.baseURI + "/pac2";
|
||||||
installFakePAC();
|
installFakePAC();
|
||||||
let res = new AsyncResource(server.baseURI + "/open");
|
let res = new AsyncResource(server.baseURI + "/open");
|
||||||
res.get(function(error, result) {
|
let result = await res.get();
|
||||||
do_check_true(!error);
|
do_check_true(pacFetched);
|
||||||
do_check_true(pacFetched);
|
do_check_true(fetched);
|
||||||
do_check_true(fetched);
|
do_check_eq("This path exists", result);
|
||||||
do_check_eq("This path exists", result);
|
pacFetched = fetched = false;
|
||||||
pacFetched = fetched = false;
|
uninstallFakePAC();
|
||||||
uninstallFakePAC();
|
await promiseStopServer(server);
|
||||||
server.stop(run_next_test);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_new_channel() {
|
add_task(async function test_new_channel() {
|
||||||
_("Ensure a redirect to a new channel is handled properly.");
|
_("Ensure a redirect to a new channel is handled properly.");
|
||||||
|
|
||||||
let resourceRequested = false;
|
let resourceRequested = false;
|
||||||
@ -210,15 +208,13 @@ add_test(function test_new_channel() {
|
|||||||
locationURL = server.baseURI + "/resource";
|
locationURL = server.baseURI + "/resource";
|
||||||
|
|
||||||
let request = new AsyncResource(server.baseURI + "/redirect");
|
let request = new AsyncResource(server.baseURI + "/redirect");
|
||||||
request.get(function onRequest(error, content) {
|
let content = await request.get()
|
||||||
do_check_null(error);
|
do_check_true(resourceRequested);
|
||||||
do_check_true(resourceRequested);
|
do_check_eq(200, content.status);
|
||||||
do_check_eq(200, content.status);
|
do_check_true("content-type" in content.headers);
|
||||||
do_check_true("content-type" in content.headers);
|
do_check_eq("text/plain", content.headers["content-type"]);
|
||||||
do_check_eq("text/plain", content.headers["content-type"]);
|
|
||||||
|
|
||||||
server.stop(run_next_test);
|
await promiseStopServer(server);
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
@ -259,42 +255,38 @@ add_test(function test_members() {
|
|||||||
run_next_test();
|
run_next_test();
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_get() {
|
add_task(async function test_get() {
|
||||||
_("GET a non-password-protected resource");
|
_("GET a non-password-protected resource");
|
||||||
let res = new AsyncResource(server.baseURI + "/open");
|
let res = new AsyncResource(server.baseURI + "/open");
|
||||||
res.get(function(error, content) {
|
let content = await res.get();
|
||||||
do_check_eq(error, null);
|
do_check_eq(content, "This path exists");
|
||||||
do_check_eq(content, "This path exists");
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(content.status, 200);
|
do_check_true(content.success);
|
||||||
do_check_true(content.success);
|
// res.data has been updated with the result from the request
|
||||||
// res.data has been updated with the result from the request
|
do_check_eq(res.data, content);
|
||||||
do_check_eq(res.data, content);
|
|
||||||
|
|
||||||
// Observe logging messages.
|
// Observe logging messages.
|
||||||
let resLogger = res._log;
|
let resLogger = res._log;
|
||||||
let dbg = resLogger.debug;
|
let dbg = resLogger.debug;
|
||||||
let debugMessages = [];
|
let debugMessages = [];
|
||||||
resLogger.debug = function(msg) {
|
resLogger.debug = function(msg) {
|
||||||
debugMessages.push(msg);
|
debugMessages.push(msg);
|
||||||
dbg.call(this, msg);
|
dbg.call(this, msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Since we didn't receive proper JSON data, accessing content.obj
|
// Since we didn't receive proper JSON data, accessing content.obj
|
||||||
// will result in a SyntaxError from JSON.parse
|
// will result in a SyntaxError from JSON.parse
|
||||||
let didThrow = false;
|
let didThrow = false;
|
||||||
try {
|
try {
|
||||||
content.obj;
|
content.obj;
|
||||||
} catch (ex) {
|
} catch (ex) {
|
||||||
didThrow = true;
|
didThrow = true;
|
||||||
}
|
}
|
||||||
do_check_true(didThrow);
|
do_check_true(didThrow);
|
||||||
do_check_eq(debugMessages.length, 1);
|
do_check_eq(debugMessages.length, 1);
|
||||||
do_check_eq(debugMessages[0],
|
do_check_eq(debugMessages[0],
|
||||||
"Parse fail: Response body starts: \"\"This path exists\"\".");
|
"Parse fail: Response body starts: \"\"This path exists\"\".");
|
||||||
resLogger.debug = dbg;
|
resLogger.debug = dbg;
|
||||||
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_basicauth() {
|
add_test(function test_basicauth() {
|
||||||
@ -307,19 +299,16 @@ add_test(function test_basicauth() {
|
|||||||
run_next_test();
|
run_next_test();
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_get_protected_fail() {
|
add_task(async function test_get_protected_fail() {
|
||||||
_("GET a password protected resource (test that it'll fail w/o pass, no throw)");
|
_("GET a password protected resource (test that it'll fail w/o pass, no throw)");
|
||||||
let res2 = new AsyncResource(server.baseURI + "/protected");
|
let res2 = new AsyncResource(server.baseURI + "/protected");
|
||||||
res2.get(function(error, content) {
|
let content = await res2.get()
|
||||||
do_check_eq(error, null);
|
do_check_eq(content, "This path exists and is protected - failed");
|
||||||
do_check_eq(content, "This path exists and is protected - failed");
|
do_check_eq(content.status, 401);
|
||||||
do_check_eq(content.status, 401);
|
do_check_false(content.success);
|
||||||
do_check_false(content.success);
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_get_protected_success() {
|
add_task(async function test_get_protected_success() {
|
||||||
_("GET a password protected resource");
|
_("GET a password protected resource");
|
||||||
let identityConfig = makeIdentityConfig();
|
let identityConfig = makeIdentityConfig();
|
||||||
let browseridManager = new BrowserIDManager();
|
let browseridManager = new BrowserIDManager();
|
||||||
@ -328,262 +317,197 @@ add_test(function test_get_protected_success() {
|
|||||||
let res3 = new AsyncResource(server.baseURI + "/protected");
|
let res3 = new AsyncResource(server.baseURI + "/protected");
|
||||||
res3.authenticator = auth;
|
res3.authenticator = auth;
|
||||||
do_check_eq(res3.authenticator, auth);
|
do_check_eq(res3.authenticator, auth);
|
||||||
res3.get(function(error, content) {
|
let content = await res3.get();
|
||||||
do_check_eq(error, null);
|
do_check_eq(content, "This path exists and is protected");
|
||||||
do_check_eq(content, "This path exists and is protected");
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(content.status, 200);
|
do_check_true(content.success);
|
||||||
do_check_true(content.success);
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_get_404() {
|
add_task(async function test_get_404() {
|
||||||
_("GET a non-existent resource (test that it'll fail, but not throw)");
|
_("GET a non-existent resource (test that it'll fail, but not throw)");
|
||||||
let res4 = new AsyncResource(server.baseURI + "/404");
|
let res4 = new AsyncResource(server.baseURI + "/404");
|
||||||
res4.get(function(error, content) {
|
let content = await res4.get();
|
||||||
do_check_eq(error, null);
|
do_check_eq(content, "File not found");
|
||||||
do_check_eq(content, "File not found");
|
do_check_eq(content.status, 404);
|
||||||
do_check_eq(content.status, 404);
|
do_check_false(content.success);
|
||||||
do_check_false(content.success);
|
|
||||||
|
|
||||||
// Check some headers of the 404 response
|
// Check some headers of the 404 response
|
||||||
do_check_eq(content.headers.connection, "close");
|
do_check_eq(content.headers.connection, "close");
|
||||||
do_check_eq(content.headers.server, "httpd.js");
|
do_check_eq(content.headers.server, "httpd.js");
|
||||||
do_check_eq(content.headers["content-length"], 14);
|
do_check_eq(content.headers["content-length"], 14);
|
||||||
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_put_string() {
|
add_task(async function test_put_string() {
|
||||||
_("PUT to a resource (string)");
|
_("PUT to a resource (string)");
|
||||||
let res_upload = new AsyncResource(server.baseURI + "/upload");
|
let res_upload = new AsyncResource(server.baseURI + "/upload");
|
||||||
res_upload.put(JSON.stringify(sample_data), function(error, content) {
|
let content = await res_upload.put(JSON.stringify(sample_data));
|
||||||
do_check_eq(error, null);
|
do_check_eq(content, "Valid data upload via PUT");
|
||||||
do_check_eq(content, "Valid data upload via PUT");
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(res_upload.data, content);
|
||||||
do_check_eq(res_upload.data, content);
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_put_object() {
|
add_task(async function test_put_object() {
|
||||||
_("PUT to a resource (object)");
|
_("PUT to a resource (object)");
|
||||||
let res_upload = new AsyncResource(server.baseURI + "/upload");
|
let res_upload = new AsyncResource(server.baseURI + "/upload");
|
||||||
res_upload.put(sample_data, function(error, content) {
|
let content = await res_upload.put(sample_data);
|
||||||
do_check_eq(error, null);
|
do_check_eq(content, "Valid data upload via PUT");
|
||||||
do_check_eq(content, "Valid data upload via PUT");
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(res_upload.data, content);
|
||||||
do_check_eq(res_upload.data, content);
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_put_data_string() {
|
add_task(async function test_put_data_string() {
|
||||||
_("PUT without data arg (uses resource.data) (string)");
|
_("PUT without data arg (uses resource.data) (string)");
|
||||||
let res_upload = new AsyncResource(server.baseURI + "/upload");
|
let res_upload = new AsyncResource(server.baseURI + "/upload");
|
||||||
res_upload.data = JSON.stringify(sample_data);
|
res_upload.data = JSON.stringify(sample_data);
|
||||||
res_upload.put(function(error, content) {
|
let content = await res_upload.put();
|
||||||
do_check_eq(error, null);
|
do_check_eq(content, "Valid data upload via PUT");
|
||||||
do_check_eq(content, "Valid data upload via PUT");
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(res_upload.data, content);
|
||||||
do_check_eq(res_upload.data, content);
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_put_data_object() {
|
add_task(async function test_put_data_object() {
|
||||||
_("PUT without data arg (uses resource.data) (object)");
|
_("PUT without data arg (uses resource.data) (object)");
|
||||||
let res_upload = new AsyncResource(server.baseURI + "/upload");
|
let res_upload = new AsyncResource(server.baseURI + "/upload");
|
||||||
res_upload.data = sample_data;
|
res_upload.data = sample_data;
|
||||||
res_upload.put(function(error, content) {
|
let content = await res_upload.put();
|
||||||
do_check_eq(error, null);
|
do_check_eq(content, "Valid data upload via PUT");
|
||||||
do_check_eq(content, "Valid data upload via PUT");
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(res_upload.data, content);
|
||||||
do_check_eq(res_upload.data, content);
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_post_string() {
|
add_task(async function test_post_string() {
|
||||||
_("POST to a resource (string)");
|
_("POST to a resource (string)");
|
||||||
let res_upload = new AsyncResource(server.baseURI + "/upload");
|
let res_upload = new AsyncResource(server.baseURI + "/upload");
|
||||||
res_upload.post(JSON.stringify(sample_data), function(error, content) {
|
let content = await res_upload.post(JSON.stringify(sample_data));
|
||||||
do_check_eq(error, null);
|
do_check_eq(content, "Valid data upload via POST");
|
||||||
do_check_eq(content, "Valid data upload via POST");
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(res_upload.data, content);
|
||||||
do_check_eq(res_upload.data, content);
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_post_object() {
|
add_task(async function test_post_object() {
|
||||||
_("POST to a resource (object)");
|
_("POST to a resource (object)");
|
||||||
let res_upload = new AsyncResource(server.baseURI + "/upload");
|
let res_upload = new AsyncResource(server.baseURI + "/upload");
|
||||||
res_upload.post(sample_data, function(error, content) {
|
let content = await res_upload.post(sample_data);
|
||||||
do_check_eq(error, null);
|
do_check_eq(content, "Valid data upload via POST");
|
||||||
do_check_eq(content, "Valid data upload via POST");
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(res_upload.data, content);
|
||||||
do_check_eq(res_upload.data, content);
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_post_data_string() {
|
add_task(async function test_post_data_string() {
|
||||||
_("POST without data arg (uses resource.data) (string)");
|
_("POST without data arg (uses resource.data) (string)");
|
||||||
let res_upload = new AsyncResource(server.baseURI + "/upload");
|
let res_upload = new AsyncResource(server.baseURI + "/upload");
|
||||||
res_upload.data = JSON.stringify(sample_data);
|
res_upload.data = JSON.stringify(sample_data);
|
||||||
res_upload.post(function(error, content) {
|
let content = await res_upload.post();
|
||||||
do_check_eq(error, null);
|
do_check_eq(content, "Valid data upload via POST");
|
||||||
do_check_eq(content, "Valid data upload via POST");
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(res_upload.data, content);
|
||||||
do_check_eq(res_upload.data, content);
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_post_data_object() {
|
add_task(async function test_post_data_object() {
|
||||||
_("POST without data arg (uses resource.data) (object)");
|
_("POST without data arg (uses resource.data) (object)");
|
||||||
let res_upload = new AsyncResource(server.baseURI + "/upload");
|
let res_upload = new AsyncResource(server.baseURI + "/upload");
|
||||||
res_upload.data = sample_data;
|
res_upload.data = sample_data;
|
||||||
res_upload.post(function(error, content) {
|
let content = await res_upload.post();
|
||||||
do_check_eq(error, null);
|
do_check_eq(content, "Valid data upload via POST");
|
||||||
do_check_eq(content, "Valid data upload via POST");
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(res_upload.data, content);
|
||||||
do_check_eq(res_upload.data, content);
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_delete() {
|
add_task(async function test_delete() {
|
||||||
_("DELETE a resource");
|
_("DELETE a resource");
|
||||||
let res6 = new AsyncResource(server.baseURI + "/delete");
|
let res6 = new AsyncResource(server.baseURI + "/delete");
|
||||||
res6.delete(function(error, content) {
|
let content = await res6.delete();
|
||||||
do_check_eq(error, null);
|
do_check_eq(content, "This resource has been deleted");
|
||||||
do_check_eq(content, "This resource has been deleted");
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(content.status, 200);
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_json_body() {
|
add_task(async function test_json_body() {
|
||||||
_("JSON conversion of response body");
|
_("JSON conversion of response body");
|
||||||
let res7 = new AsyncResource(server.baseURI + "/json");
|
let res7 = new AsyncResource(server.baseURI + "/json");
|
||||||
res7.get(function(error, content) {
|
let content = await res7.get();
|
||||||
do_check_eq(error, null);
|
do_check_eq(content, JSON.stringify(sample_data));
|
||||||
do_check_eq(content, JSON.stringify(sample_data));
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(JSON.stringify(content.obj), JSON.stringify(sample_data));
|
||||||
do_check_eq(JSON.stringify(content.obj), JSON.stringify(sample_data));
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_weave_timestamp() {
|
add_task(async function test_weave_timestamp() {
|
||||||
_("X-Weave-Timestamp header updates AsyncResource.serverTime");
|
_("X-Weave-Timestamp header updates AsyncResource.serverTime");
|
||||||
// Before having received any response containing the
|
// Before having received any response containing the
|
||||||
// X-Weave-Timestamp header, AsyncResource.serverTime is null.
|
// X-Weave-Timestamp header, AsyncResource.serverTime is null.
|
||||||
do_check_eq(AsyncResource.serverTime, null);
|
do_check_eq(AsyncResource.serverTime, null);
|
||||||
let res8 = new AsyncResource(server.baseURI + "/timestamp");
|
let res8 = new AsyncResource(server.baseURI + "/timestamp");
|
||||||
res8.get(function(error, content) {
|
await res8.get();
|
||||||
do_check_eq(error, null);
|
do_check_eq(AsyncResource.serverTime, TIMESTAMP);
|
||||||
do_check_eq(AsyncResource.serverTime, TIMESTAMP);
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_get_no_headers() {
|
add_task(async function test_get_no_headers() {
|
||||||
_("GET: no special request headers");
|
_("GET: no special request headers");
|
||||||
let res_headers = new AsyncResource(server.baseURI + "/headers");
|
let res_headers = new AsyncResource(server.baseURI + "/headers");
|
||||||
res_headers.get(function(error, content) {
|
let content = await res_headers.get();
|
||||||
do_check_eq(error, null);
|
do_check_eq(content, "{}");
|
||||||
do_check_eq(content, "{}");
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_put_default_content_type() {
|
add_task(async function test_put_default_content_type() {
|
||||||
_("PUT: Content-Type defaults to text/plain");
|
_("PUT: Content-Type defaults to text/plain");
|
||||||
let res_headers = new AsyncResource(server.baseURI + "/headers");
|
let res_headers = new AsyncResource(server.baseURI + "/headers");
|
||||||
res_headers.put("data", function(error, content) {
|
let content = await res_headers.put("data");
|
||||||
do_check_eq(error, null);
|
do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
|
||||||
do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_post_default_content_type() {
|
add_task(async function test_post_default_content_type() {
|
||||||
_("POST: Content-Type defaults to text/plain");
|
_("POST: Content-Type defaults to text/plain");
|
||||||
let res_headers = new AsyncResource(server.baseURI + "/headers");
|
let res_headers = new AsyncResource(server.baseURI + "/headers");
|
||||||
res_headers.post("data", function(error, content) {
|
let content = await res_headers.post("data");
|
||||||
do_check_eq(error, null);
|
do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
|
||||||
do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_setHeader() {
|
add_task(async function test_setHeader() {
|
||||||
_("setHeader(): setting simple header");
|
_("setHeader(): setting simple header");
|
||||||
let res_headers = new AsyncResource(server.baseURI + "/headers");
|
let res_headers = new AsyncResource(server.baseURI + "/headers");
|
||||||
res_headers.setHeader("X-What-Is-Weave", "awesome");
|
res_headers.setHeader("X-What-Is-Weave", "awesome");
|
||||||
do_check_eq(res_headers.headers["x-what-is-weave"], "awesome");
|
do_check_eq(res_headers.headers["x-what-is-weave"], "awesome");
|
||||||
res_headers.get(function(error, content) {
|
let content = await res_headers.get();
|
||||||
do_check_eq(error, null);
|
do_check_eq(content, JSON.stringify({"x-what-is-weave": "awesome"}));
|
||||||
do_check_eq(content, JSON.stringify({"x-what-is-weave": "awesome"}));
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_setHeader_overwrite() {
|
add_task(async function test_setHeader_overwrite() {
|
||||||
_("setHeader(): setting multiple headers, overwriting existing header");
|
_("setHeader(): setting multiple headers, overwriting existing header");
|
||||||
let res_headers = new AsyncResource(server.baseURI + "/headers");
|
let res_headers = new AsyncResource(server.baseURI + "/headers");
|
||||||
res_headers.setHeader("X-WHAT-is-Weave", "more awesomer");
|
res_headers.setHeader("X-WHAT-is-Weave", "more awesomer");
|
||||||
res_headers.setHeader("X-Another-Header", "hello world");
|
res_headers.setHeader("X-Another-Header", "hello world");
|
||||||
do_check_eq(res_headers.headers["x-what-is-weave"], "more awesomer");
|
do_check_eq(res_headers.headers["x-what-is-weave"], "more awesomer");
|
||||||
do_check_eq(res_headers.headers["x-another-header"], "hello world");
|
do_check_eq(res_headers.headers["x-another-header"], "hello world");
|
||||||
res_headers.get(function(error, content) {
|
let content = await res_headers.get();
|
||||||
do_check_eq(error, null);
|
do_check_eq(content, JSON.stringify({"x-another-header": "hello world",
|
||||||
do_check_eq(content, JSON.stringify({"x-another-header": "hello world",
|
"x-what-is-weave": "more awesomer"}));
|
||||||
"x-what-is-weave": "more awesomer"}));
|
|
||||||
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_headers_object() {
|
add_task(async function test_headers_object() {
|
||||||
_("Setting headers object");
|
_("Setting headers object");
|
||||||
let res_headers = new AsyncResource(server.baseURI + "/headers");
|
let res_headers = new AsyncResource(server.baseURI + "/headers");
|
||||||
res_headers.headers = {};
|
res_headers.headers = {};
|
||||||
res_headers.get(function(error, content) {
|
let content = await res_headers.get();
|
||||||
do_check_eq(error, null);
|
do_check_eq(content, "{}");
|
||||||
do_check_eq(content, "{}");
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_put_override_content_type() {
|
add_task(async function test_put_override_content_type() {
|
||||||
_("PUT: override default Content-Type");
|
_("PUT: override default Content-Type");
|
||||||
let res_headers = new AsyncResource(server.baseURI + "/headers");
|
let res_headers = new AsyncResource(server.baseURI + "/headers");
|
||||||
res_headers.setHeader("Content-Type", "application/foobar");
|
res_headers.setHeader("Content-Type", "application/foobar");
|
||||||
do_check_eq(res_headers.headers["content-type"], "application/foobar");
|
do_check_eq(res_headers.headers["content-type"], "application/foobar");
|
||||||
res_headers.put("data", function(error, content) {
|
let content = await res_headers.put("data");
|
||||||
do_check_eq(error, null);
|
do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
|
||||||
do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_post_override_content_type() {
|
add_task(async function test_post_override_content_type() {
|
||||||
_("POST: override default Content-Type");
|
_("POST: override default Content-Type");
|
||||||
let res_headers = new AsyncResource(server.baseURI + "/headers");
|
let res_headers = new AsyncResource(server.baseURI + "/headers");
|
||||||
res_headers.setHeader("Content-Type", "application/foobar");
|
res_headers.setHeader("Content-Type", "application/foobar");
|
||||||
res_headers.post("data", function(error, content) {
|
let content = await res_headers.post("data");
|
||||||
do_check_eq(error, null);
|
do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
|
||||||
do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_weave_backoff() {
|
add_task(async function test_weave_backoff() {
|
||||||
_("X-Weave-Backoff header notifies observer");
|
_("X-Weave-Backoff header notifies observer");
|
||||||
let backoffInterval;
|
let backoffInterval;
|
||||||
function onBackoff(subject, data) {
|
function onBackoff(subject, data) {
|
||||||
@ -592,46 +516,37 @@ add_test(function test_weave_backoff() {
|
|||||||
Observers.add("weave:service:backoff:interval", onBackoff);
|
Observers.add("weave:service:backoff:interval", onBackoff);
|
||||||
|
|
||||||
let res10 = new AsyncResource(server.baseURI + "/backoff");
|
let res10 = new AsyncResource(server.baseURI + "/backoff");
|
||||||
res10.get(function(error, content) {
|
await res10.get();
|
||||||
do_check_eq(error, null);
|
do_check_eq(backoffInterval, 600);
|
||||||
do_check_eq(backoffInterval, 600);
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_quota_error() {
|
add_task(async function test_quota_error() {
|
||||||
_("X-Weave-Quota-Remaining header notifies observer on successful requests.");
|
_("X-Weave-Quota-Remaining header notifies observer on successful requests.");
|
||||||
let res10 = new AsyncResource(server.baseURI + "/quota-error");
|
let res10 = new AsyncResource(server.baseURI + "/quota-error");
|
||||||
res10.get(function(error, content) {
|
let content = await res10.get();
|
||||||
do_check_eq(error, null);
|
do_check_eq(content.status, 400);
|
||||||
do_check_eq(content.status, 400);
|
do_check_eq(quotaValue, undefined); // HTTP 400, so no observer notification.
|
||||||
do_check_eq(quotaValue, undefined); // HTTP 400, so no observer notification.
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_quota_notice() {
|
add_task(async function test_quota_notice() {
|
||||||
let res10 = new AsyncResource(server.baseURI + "/quota-notice");
|
let res10 = new AsyncResource(server.baseURI + "/quota-notice");
|
||||||
res10.get(function(error, content) {
|
let content = await res10.get();
|
||||||
do_check_eq(error, null);
|
do_check_eq(content.status, 200);
|
||||||
do_check_eq(content.status, 200);
|
do_check_eq(quotaValue, 1048576);
|
||||||
do_check_eq(quotaValue, 1048576);
|
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_preserve_exceptions() {
|
add_task(async function test_preserve_exceptions() {
|
||||||
_("Error handling in ChannelListener etc. preserves exception information");
|
_("Error handling in ChannelListener etc. preserves exception information");
|
||||||
let res11 = new AsyncResource("http://localhost:12345/does/not/exist");
|
let res11 = new AsyncResource("http://localhost:12345/does/not/exist");
|
||||||
res11.get(function(error, content) {
|
await Assert.rejects(res11.get(), error => {
|
||||||
do_check_neq(error, null);
|
do_check_neq(error, null);
|
||||||
do_check_eq(error.result, Cr.NS_ERROR_CONNECTION_REFUSED);
|
do_check_eq(error.result, Cr.NS_ERROR_CONNECTION_REFUSED);
|
||||||
do_check_eq(error.message, "NS_ERROR_CONNECTION_REFUSED");
|
do_check_eq(error.message, "NS_ERROR_CONNECTION_REFUSED");
|
||||||
run_next_test();
|
return true;
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_xpc_exception_handling() {
|
add_task(async function test_xpc_exception_handling() {
|
||||||
_("Exception handling inside fetches.");
|
_("Exception handling inside fetches.");
|
||||||
let res14 = new AsyncResource(server.baseURI + "/json");
|
let res14 = new AsyncResource(server.baseURI + "/json");
|
||||||
res14._onProgress = function(rec) {
|
res14._onProgress = function(rec) {
|
||||||
@ -641,19 +556,19 @@ add_test(function test_xpc_exception_handling() {
|
|||||||
let warnings = [];
|
let warnings = [];
|
||||||
res14._log.warn = function(msg) { warnings.push(msg); };
|
res14._log.warn = function(msg) { warnings.push(msg); };
|
||||||
|
|
||||||
res14.get(function(error, content) {
|
await Assert.rejects(res14.get(), error => {
|
||||||
do_check_eq(error.result, Cr.NS_ERROR_MALFORMED_URI);
|
do_check_eq(error.result, Cr.NS_ERROR_MALFORMED_URI);
|
||||||
do_check_eq(error.message, "NS_ERROR_MALFORMED_URI");
|
do_check_eq(error.message, "NS_ERROR_MALFORMED_URI");
|
||||||
do_check_eq(content, null);
|
return true;
|
||||||
do_check_eq(warnings.pop(),
|
|
||||||
"Got exception calling onProgress handler during fetch of " +
|
|
||||||
server.baseURI + "/json");
|
|
||||||
|
|
||||||
run_next_test();
|
|
||||||
});
|
});
|
||||||
|
do_check_eq(warnings.pop(),
|
||||||
|
"${action} request to ${url} failed: ${ex}");
|
||||||
|
do_check_eq(warnings.pop(),
|
||||||
|
"Got exception calling onProgress handler during fetch of " +
|
||||||
|
server.baseURI + "/json");
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_js_exception_handling() {
|
add_task(async function test_js_exception_handling() {
|
||||||
_("JS exception handling inside fetches.");
|
_("JS exception handling inside fetches.");
|
||||||
let res15 = new AsyncResource(server.baseURI + "/json");
|
let res15 = new AsyncResource(server.baseURI + "/json");
|
||||||
res15._onProgress = function(rec) {
|
res15._onProgress = function(rec) {
|
||||||
@ -662,25 +577,25 @@ add_test(function test_js_exception_handling() {
|
|||||||
let warnings = [];
|
let warnings = [];
|
||||||
res15._log.warn = function(msg) { warnings.push(msg); };
|
res15._log.warn = function(msg) { warnings.push(msg); };
|
||||||
|
|
||||||
res15.get(function(error, content) {
|
await Assert.rejects(res15.get(), error => {
|
||||||
do_check_eq(error.result, Cr.NS_ERROR_XPC_JS_THREW_STRING);
|
do_check_eq(error.result, Cr.NS_ERROR_XPC_JS_THREW_STRING);
|
||||||
do_check_eq(error.message, "NS_ERROR_XPC_JS_THREW_STRING");
|
do_check_eq(error.message, "NS_ERROR_XPC_JS_THREW_STRING");
|
||||||
do_check_eq(content, null);
|
return true;
|
||||||
do_check_eq(warnings.pop(),
|
|
||||||
"Got exception calling onProgress handler during fetch of " +
|
|
||||||
server.baseURI + "/json");
|
|
||||||
|
|
||||||
run_next_test();
|
|
||||||
});
|
});
|
||||||
|
do_check_eq(warnings.pop(),
|
||||||
|
"${action} request to ${url} failed: ${ex}");
|
||||||
|
do_check_eq(warnings.pop(),
|
||||||
|
"Got exception calling onProgress handler during fetch of " +
|
||||||
|
server.baseURI + "/json");
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_timeout() {
|
add_task(async function test_timeout() {
|
||||||
_("Ensure channel timeouts are thrown appropriately.");
|
_("Ensure channel timeouts are thrown appropriately.");
|
||||||
let res19 = new AsyncResource(server.baseURI + "/json");
|
let res19 = new AsyncResource(server.baseURI + "/json");
|
||||||
res19.ABORT_TIMEOUT = 0;
|
res19.ABORT_TIMEOUT = 0;
|
||||||
res19.get(function(error, content) {
|
await Assert.rejects(res19.get(), error => {
|
||||||
do_check_eq(error.result, Cr.NS_ERROR_NET_TIMEOUT);
|
do_check_eq(error.result, Cr.NS_ERROR_NET_TIMEOUT);
|
||||||
run_next_test();
|
return true;
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -703,21 +618,6 @@ add_test(function test_uri_construction() {
|
|||||||
run_next_test();
|
run_next_test();
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_not_sending_cookie() {
|
|
||||||
let cookieSer = Cc["@mozilla.org/cookieService;1"]
|
|
||||||
.getService(Ci.nsICookieService);
|
|
||||||
let uri = CommonUtils.makeURI(server.baseURI);
|
|
||||||
cookieSer.setCookieString(uri, null, "test=test; path=/;", null);
|
|
||||||
|
|
||||||
let res = new AsyncResource(server.baseURI + "/test");
|
|
||||||
res.get(function(error) {
|
|
||||||
do_check_null(error);
|
|
||||||
do_check_true(this.response.success);
|
|
||||||
do_check_eq("COOKIE!", this.response.body);
|
|
||||||
server.stop(run_next_test);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* End of tests that rely on a single HTTP server.
|
* End of tests that rely on a single HTTP server.
|
||||||
* All tests after this point must begin and end their own.
|
* All tests after this point must begin and end their own.
|
||||||
|
@ -46,7 +46,7 @@ function triggerRedirect() {
|
|||||||
prefs.setCharPref("autoconfig_url", "data:text/plain," + PROXY_FUNCTION);
|
prefs.setCharPref("autoconfig_url", "data:text/plain," + PROXY_FUNCTION);
|
||||||
}
|
}
|
||||||
|
|
||||||
add_test(function test_headers_copied() {
|
add_task(async function test_headers_copied() {
|
||||||
triggerRedirect();
|
triggerRedirect();
|
||||||
|
|
||||||
_("Issuing request.");
|
_("Issuing request.");
|
||||||
@ -54,12 +54,12 @@ add_test(function test_headers_copied() {
|
|||||||
resource.setHeader("Authorization", "Basic foobar");
|
resource.setHeader("Authorization", "Basic foobar");
|
||||||
resource.setHeader("X-Foo", "foofoo");
|
resource.setHeader("X-Foo", "foofoo");
|
||||||
|
|
||||||
let result = resource.get(TEST_URL);
|
let result = await resource.get(TEST_URL);
|
||||||
_("Result: " + result);
|
_("Result: " + result);
|
||||||
|
|
||||||
do_check_eq(result, BODY);
|
do_check_eq(result, BODY);
|
||||||
do_check_eq(auth, "Basic foobar");
|
do_check_eq(auth, "Basic foobar");
|
||||||
do_check_eq(foo, "foofoo");
|
do_check_eq(foo, "foofoo");
|
||||||
|
|
||||||
httpServer.stop(run_next_test);
|
await promiseStopServer(httpServer);
|
||||||
});
|
});
|
||||||
|
@ -57,39 +57,33 @@ add_test(function test_fetchInfo() {
|
|||||||
run_next_test();
|
run_next_test();
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_desktop_post() {
|
add_task(async function test_desktop_post() {
|
||||||
_("Testing direct Resource POST.");
|
_("Testing direct Resource POST.");
|
||||||
let r = new AsyncResource(server.baseURI + "/1.1/johndoe/storage/meta/global");
|
let r = new AsyncResource(server.baseURI + "/1.1/johndoe/storage/meta/global");
|
||||||
r.post("foo=bar", function(error, content) {
|
await r.post("foo=bar");
|
||||||
_("User-Agent: " + ua);
|
_("User-Agent: " + ua);
|
||||||
do_check_eq(ua, expectedUA + ".desktop");
|
do_check_eq(ua, expectedUA + ".desktop");
|
||||||
ua = "";
|
ua = "";
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_desktop_get() {
|
add_task(async function test_desktop_get() {
|
||||||
_("Testing async.");
|
_("Testing async.");
|
||||||
Svc.Prefs.set("client.type", "desktop");
|
Svc.Prefs.set("client.type", "desktop");
|
||||||
let r = new AsyncResource(server.baseURI + "/1.1/johndoe/storage/meta/global");
|
let r = new AsyncResource(server.baseURI + "/1.1/johndoe/storage/meta/global");
|
||||||
r.get(function(error, content) {
|
await r.get();
|
||||||
_("User-Agent: " + ua);
|
_("User-Agent: " + ua);
|
||||||
do_check_eq(ua, expectedUA + ".desktop");
|
do_check_eq(ua, expectedUA + ".desktop");
|
||||||
ua = "";
|
ua = "";
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_mobile_get() {
|
add_task(async function test_mobile_get() {
|
||||||
_("Testing mobile.");
|
_("Testing mobile.");
|
||||||
Svc.Prefs.set("client.type", "mobile");
|
Svc.Prefs.set("client.type", "mobile");
|
||||||
let r = new AsyncResource(server.baseURI + "/1.1/johndoe/storage/meta/global");
|
let r = new AsyncResource(server.baseURI + "/1.1/johndoe/storage/meta/global");
|
||||||
r.get(function(error, content) {
|
await r.get();
|
||||||
_("User-Agent: " + ua);
|
_("User-Agent: " + ua);
|
||||||
do_check_eq(ua, expectedUA + ".mobile");
|
do_check_eq(ua, expectedUA + ".mobile");
|
||||||
ua = "";
|
ua = "";
|
||||||
run_next_test();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function tear_down() {
|
add_test(function tear_down() {
|
||||||
|
@ -113,11 +113,11 @@ add_task(async function v4_upgrade() {
|
|||||||
let serverResp;
|
let serverResp;
|
||||||
|
|
||||||
|
|
||||||
function retrieve_server_default() {
|
async function retrieve_server_default() {
|
||||||
serverKeys = serverResp = serverDecrypted = null;
|
serverKeys = serverResp = serverDecrypted = null;
|
||||||
|
|
||||||
serverKeys = new CryptoWrapper("crypto", "keys");
|
serverKeys = new CryptoWrapper("crypto", "keys");
|
||||||
serverResp = serverKeys.fetch(Service.resource(Service.cryptoKeysURL)).response;
|
serverResp = (await serverKeys.fetch(Service.resource(Service.cryptoKeysURL))).response;
|
||||||
do_check_true(serverResp.success);
|
do_check_true(serverResp.success);
|
||||||
|
|
||||||
serverDecrypted = serverKeys.decrypt(Service.identity.syncKeyBundle);
|
serverDecrypted = serverKeys.decrypt(Service.identity.syncKeyBundle);
|
||||||
@ -127,8 +127,8 @@ add_task(async function v4_upgrade() {
|
|||||||
return serverDecrypted.default;
|
return serverDecrypted.default;
|
||||||
}
|
}
|
||||||
|
|
||||||
function retrieve_and_compare_default(should_succeed) {
|
async function retrieve_and_compare_default(should_succeed) {
|
||||||
let serverDefault = retrieve_server_default();
|
let serverDefault = await retrieve_server_default();
|
||||||
let localDefault = Service.collectionKeys.keyForCollection().keyPairB64;
|
let localDefault = Service.collectionKeys.keyForCollection().keyPairB64;
|
||||||
|
|
||||||
_("Retrieved keyBundle: " + JSON.stringify(serverDefault));
|
_("Retrieved keyBundle: " + JSON.stringify(serverDefault));
|
||||||
@ -141,26 +141,26 @@ add_task(async function v4_upgrade() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Uses the objects set above.
|
// Uses the objects set above.
|
||||||
function set_server_keys(pair) {
|
async function set_server_keys(pair) {
|
||||||
serverDecrypted.default = pair;
|
serverDecrypted.default = pair;
|
||||||
serverKeys.cleartext = serverDecrypted;
|
serverKeys.cleartext = serverDecrypted;
|
||||||
serverKeys.encrypt(Service.identity.syncKeyBundle);
|
serverKeys.encrypt(Service.identity.syncKeyBundle);
|
||||||
serverKeys.upload(Service.resource(Service.cryptoKeysURL));
|
await serverKeys.upload(Service.resource(Service.cryptoKeysURL));
|
||||||
}
|
}
|
||||||
|
|
||||||
_("Checking we have the latest keys.");
|
_("Checking we have the latest keys.");
|
||||||
retrieve_and_compare_default(true);
|
await retrieve_and_compare_default(true);
|
||||||
|
|
||||||
_("Update keys on server.");
|
_("Update keys on server.");
|
||||||
set_server_keys(["KaaaaaaaaaaaHAtfmuRY0XEJ7LXfFuqvF7opFdBD/MY=",
|
await set_server_keys(["KaaaaaaaaaaaHAtfmuRY0XEJ7LXfFuqvF7opFdBD/MY=",
|
||||||
"aaaaaaaaaaaapxMO6TEWtLIOv9dj6kBAJdzhWDkkkis="]);
|
"aaaaaaaaaaaapxMO6TEWtLIOv9dj6kBAJdzhWDkkkis="]);
|
||||||
|
|
||||||
_("Checking that we no longer have the latest keys.");
|
_("Checking that we no longer have the latest keys.");
|
||||||
retrieve_and_compare_default(false);
|
await retrieve_and_compare_default(false);
|
||||||
|
|
||||||
_("Indeed, they're what we set them to...");
|
_("Indeed, they're what we set them to...");
|
||||||
do_check_eq("KaaaaaaaaaaaHAtfmuRY0XEJ7LXfFuqvF7opFdBD/MY=",
|
do_check_eq("KaaaaaaaaaaaHAtfmuRY0XEJ7LXfFuqvF7opFdBD/MY=",
|
||||||
retrieve_server_default()[0]);
|
(await retrieve_server_default())[0]);
|
||||||
|
|
||||||
_("Sync. Should download changed keys automatically.");
|
_("Sync. Should download changed keys automatically.");
|
||||||
let oldClientsModified = collections.clients;
|
let oldClientsModified = collections.clients;
|
||||||
@ -175,7 +175,7 @@ add_task(async function v4_upgrade() {
|
|||||||
do_check_true(collections.tabs > oldTabsModified);
|
do_check_true(collections.tabs > oldTabsModified);
|
||||||
|
|
||||||
_("... and keys will now match.");
|
_("... and keys will now match.");
|
||||||
retrieve_and_compare_default(true);
|
await retrieve_and_compare_default(true);
|
||||||
|
|
||||||
// Clean up.
|
// Clean up.
|
||||||
Service.startOver();
|
Service.startOver();
|
||||||
@ -237,12 +237,12 @@ add_task(async function v5_upgrade() {
|
|||||||
// Test an upgrade where the contents of the server would cause us to error
|
// Test an upgrade where the contents of the server would cause us to error
|
||||||
// -- keys decrypted with a different sync key, for example.
|
// -- keys decrypted with a different sync key, for example.
|
||||||
_("Testing v4 -> v5 (or similar) upgrade.");
|
_("Testing v4 -> v5 (or similar) upgrade.");
|
||||||
function update_server_keys(syncKeyBundle, wboName, collWBO) {
|
async function update_server_keys(syncKeyBundle, wboName, collWBO) {
|
||||||
generateNewKeys(Service.collectionKeys);
|
generateNewKeys(Service.collectionKeys);
|
||||||
let serverKeys = Service.collectionKeys.asWBO("crypto", wboName);
|
let serverKeys = Service.collectionKeys.asWBO("crypto", wboName);
|
||||||
serverKeys.encrypt(syncKeyBundle);
|
serverKeys.encrypt(syncKeyBundle);
|
||||||
let res = Service.resource(Service.storageURL + collWBO);
|
let res = Service.resource(Service.storageURL + collWBO);
|
||||||
do_check_true(serverKeys.upload(res).success);
|
do_check_true((await serverKeys.upload(res)).success);
|
||||||
}
|
}
|
||||||
|
|
||||||
_("Bumping version.");
|
_("Bumping version.");
|
||||||
@ -250,14 +250,14 @@ add_task(async function v5_upgrade() {
|
|||||||
let m = new WBORecord("meta", "global");
|
let m = new WBORecord("meta", "global");
|
||||||
m.payload = {"syncID": "foooooooooooooooooooooooooo",
|
m.payload = {"syncID": "foooooooooooooooooooooooooo",
|
||||||
"storageVersion": STORAGE_VERSION + 1};
|
"storageVersion": STORAGE_VERSION + 1};
|
||||||
m.upload(Service.resource(Service.metaURL));
|
await m.upload(Service.resource(Service.metaURL));
|
||||||
|
|
||||||
_("New meta/global: " + JSON.stringify(meta_global));
|
_("New meta/global: " + JSON.stringify(meta_global));
|
||||||
|
|
||||||
// Fill the keys with bad data.
|
// Fill the keys with bad data.
|
||||||
let badKeys = new SyncKeyBundle("foobar", "aaaaaaaaaaaaaaaaaaaaaaaaaa");
|
let badKeys = new SyncKeyBundle("foobar", "aaaaaaaaaaaaaaaaaaaaaaaaaa");
|
||||||
update_server_keys(badKeys, "keys", "crypto/keys"); // v4
|
await update_server_keys(badKeys, "keys", "crypto/keys"); // v4
|
||||||
update_server_keys(badKeys, "bulk", "crypto/bulk"); // v5
|
await update_server_keys(badKeys, "bulk", "crypto/bulk"); // v5
|
||||||
|
|
||||||
_("Generating new keys.");
|
_("Generating new keys.");
|
||||||
generateNewKeys(Service.collectionKeys);
|
generateNewKeys(Service.collectionKeys);
|
||||||
|
@ -14,7 +14,7 @@ BlaEngine.prototype = {
|
|||||||
__proto__: SyncEngine.prototype,
|
__proto__: SyncEngine.prototype,
|
||||||
|
|
||||||
removed: false,
|
removed: false,
|
||||||
removeClientData() {
|
async removeClientData() {
|
||||||
this.removed = true;
|
this.removed = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -85,7 +85,7 @@ add_task(async function run_test() {
|
|||||||
|
|
||||||
Service.login();
|
Service.login();
|
||||||
_("Checking that remoteSetup returns true when credentials have changed.");
|
_("Checking that remoteSetup returns true when credentials have changed.");
|
||||||
Service.recordManager.get(Service.metaURL).payload.syncID = "foobar";
|
(await Service.recordManager.get(Service.metaURL)).payload.syncID = "foobar";
|
||||||
do_check_true(Service._remoteSetup());
|
do_check_true(Service._remoteSetup());
|
||||||
|
|
||||||
let returnStatusCode = (method, code) => (oldMethod) => (req, res) => {
|
let returnStatusCode = (method, code) => (oldMethod) => (req, res) => {
|
||||||
|
@ -265,7 +265,7 @@ add_task(async function test_enabledRemotely() {
|
|||||||
_("Upload some keys to avoid a fresh start.");
|
_("Upload some keys to avoid a fresh start.");
|
||||||
let wbo = Service.collectionKeys.generateNewKeysWBO();
|
let wbo = Service.collectionKeys.generateNewKeysWBO();
|
||||||
wbo.encrypt(Service.identity.syncKeyBundle);
|
wbo.encrypt(Service.identity.syncKeyBundle);
|
||||||
do_check_eq(200, wbo.upload(Service.resource(Service.cryptoKeysURL)).status);
|
do_check_eq(200, (await wbo.upload(Service.resource(Service.cryptoKeysURL))).status);
|
||||||
|
|
||||||
_("Engine is disabled.");
|
_("Engine is disabled.");
|
||||||
do_check_false(engine.enabled);
|
do_check_false(engine.enabled);
|
||||||
|
@ -99,7 +99,7 @@ add_task(async function test_syncStartup_emptyOrOutdatedGlobalsResetsSync() {
|
|||||||
|
|
||||||
// Confirm initial environment
|
// Confirm initial environment
|
||||||
do_check_eq(engine._tracker.changedIDs["rekolok"], undefined);
|
do_check_eq(engine._tracker.changedIDs["rekolok"], undefined);
|
||||||
let metaGlobal = Service.recordManager.get(engine.metaURL);
|
let metaGlobal = await Service.recordManager.get(engine.metaURL);
|
||||||
do_check_eq(metaGlobal.payload.engines, undefined);
|
do_check_eq(metaGlobal.payload.engines, undefined);
|
||||||
do_check_true(!!collection.payload("flying"));
|
do_check_true(!!collection.payload("flying"));
|
||||||
do_check_true(!!collection.payload("scotsman"));
|
do_check_true(!!collection.payload("scotsman"));
|
||||||
|
@ -63,7 +63,7 @@ async function setUp(server) {
|
|||||||
generateNewKeys(Service.collectionKeys);
|
generateNewKeys(Service.collectionKeys);
|
||||||
let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
|
let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
|
||||||
serverKeys.encrypt(Service.identity.syncKeyBundle);
|
serverKeys.encrypt(Service.identity.syncKeyBundle);
|
||||||
let result = serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success;
|
let result = (await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success;
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -38,35 +38,33 @@ function getWarningMessages(log) {
|
|||||||
return warnMessages;
|
return warnMessages;
|
||||||
}
|
}
|
||||||
|
|
||||||
add_test(function test_resource_logs_content_length_mismatch() {
|
add_task(async function test_resource_logs_content_length_mismatch() {
|
||||||
_("Issuing request.");
|
_("Issuing request.");
|
||||||
let httpServer = httpd_setup({"/content": contentHandler});
|
let httpServer = httpd_setup({"/content": contentHandler});
|
||||||
let resource = new Resource(httpServer.baseURI + "/content");
|
let resource = new Resource(httpServer.baseURI + "/content");
|
||||||
|
|
||||||
let warnMessages = getWarningMessages(resource._log);
|
let warnMessages = getWarningMessages(resource._log);
|
||||||
let result = resource.get();
|
let result = await resource.get();
|
||||||
|
|
||||||
notEqual(warnMessages.length, 0, "test that a warning was logged");
|
notEqual(warnMessages.length, 0, "test that a warning was logged");
|
||||||
notEqual(result.length, contentLength);
|
notEqual(result.length, contentLength);
|
||||||
equal(result, BODY);
|
equal(result, BODY);
|
||||||
|
|
||||||
httpServer.stop(run_next_test);
|
await promiseStopServer(httpServer);
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_async_resource_logs_content_length_mismatch() {
|
add_task(async function test_async_resource_logs_content_length_mismatch() {
|
||||||
_("Issuing request.");
|
_("Issuing request.");
|
||||||
let httpServer = httpd_setup({"/content": contentHandler});
|
let httpServer = httpd_setup({"/content": contentHandler});
|
||||||
let asyncResource = new AsyncResource(httpServer.baseURI + "/content");
|
let asyncResource = new AsyncResource(httpServer.baseURI + "/content");
|
||||||
|
|
||||||
let warnMessages = getWarningMessages(asyncResource._log);
|
let warnMessages = getWarningMessages(asyncResource._log);
|
||||||
|
|
||||||
asyncResource.get(function(error, content) {
|
let content = await asyncResource.get();
|
||||||
equal(error, null);
|
equal(content, BODY);
|
||||||
equal(content, BODY);
|
notEqual(warnMessages.length, 0, "test that warning was logged");
|
||||||
notEqual(warnMessages.length, 0, "test that warning was logged");
|
notEqual(content.length, contentLength);
|
||||||
notEqual(content.length, contentLength);
|
await promiseStopServer(httpServer);
|
||||||
httpServer.stop(run_next_test);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
add_test(function test_sync_storage_request_logs_content_length_mismatch() {
|
add_test(function test_sync_storage_request_logs_content_length_mismatch() {
|
||||||
|
@ -672,7 +672,7 @@ var TPS = {
|
|||||||
Logger.logInfo(`About to perform validation for "${engineName}"`);
|
Logger.logInfo(`About to perform validation for "${engineName}"`);
|
||||||
let engine = Weave.Service.engineManager.get(engineName);
|
let engine = Weave.Service.engineManager.get(engineName);
|
||||||
let validator = new ValidatorType(engine);
|
let validator = new ValidatorType(engine);
|
||||||
let serverRecords = validator.getServerItems(engine);
|
let serverRecords = Async.promiseSpinningly(validator.getServerItems(engine));
|
||||||
let clientRecords = Async.promiseSpinningly(validator.getClientItems());
|
let clientRecords = Async.promiseSpinningly(validator.getClientItems());
|
||||||
try {
|
try {
|
||||||
// This substantially improves the logs for addons while not making a
|
// This substantially improves the logs for addons while not making a
|
||||||
|
@ -16,9 +16,9 @@ use gecko_bindings::structs::RawGeckoPresContextOwned;
|
|||||||
use media_queries::MediaType;
|
use media_queries::MediaType;
|
||||||
use parser::ParserContext;
|
use parser::ParserContext;
|
||||||
use properties::ComputedValues;
|
use properties::ComputedValues;
|
||||||
use std::ascii::AsciiExt;
|
|
||||||
use std::fmt::{self, Write};
|
use std::fmt::{self, Write};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
use str::starts_with_ignore_ascii_case;
|
||||||
use string_cache::Atom;
|
use string_cache::Atom;
|
||||||
use style_traits::ToCss;
|
use style_traits::ToCss;
|
||||||
use style_traits::viewport::ViewportConstraints;
|
use style_traits::viewport::ViewportConstraints;
|
||||||
@ -340,11 +340,6 @@ impl MediaExpressionValue {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn starts_with_ignore_ascii_case(string: &str, prefix: &str) -> bool {
|
|
||||||
string.len() > prefix.len() &&
|
|
||||||
string[0..prefix.len()].eq_ignore_ascii_case(prefix)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn find_feature<F>(mut f: F) -> Option<&'static nsMediaFeature>
|
fn find_feature<F>(mut f: F) -> Option<&'static nsMediaFeature>
|
||||||
where F: FnMut(&'static nsMediaFeature) -> bool,
|
where F: FnMut(&'static nsMediaFeature) -> bool,
|
||||||
{
|
{
|
||||||
|
@ -1753,6 +1753,13 @@ extern "C" {
|
|||||||
RawServoAnimationValueBorrowed)
|
RawServoAnimationValueBorrowed)
|
||||||
-> bool;
|
-> bool;
|
||||||
}
|
}
|
||||||
|
extern "C" {
|
||||||
|
pub fn Servo_AnimationValues_ComputeDistance(from:
|
||||||
|
RawServoAnimationValueBorrowed,
|
||||||
|
to:
|
||||||
|
RawServoAnimationValueBorrowed)
|
||||||
|
-> f64;
|
||||||
|
}
|
||||||
extern "C" {
|
extern "C" {
|
||||||
pub fn Servo_AnimationValue_Serialize(value:
|
pub fn Servo_AnimationValue_Serialize(value:
|
||||||
RawServoAnimationValueBorrowed,
|
RawServoAnimationValueBorrowed,
|
||||||
|
@ -18,7 +18,7 @@ use shared_lock::{SharedRwLock, SharedRwLockReadGuard, Locked, ToCssWithGuard};
|
|||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use style_traits::ToCss;
|
use style_traits::ToCss;
|
||||||
use stylesheets::{CssRuleType, MemoryHoleReporter, Stylesheet};
|
use stylesheets::{CssRuleType, MemoryHoleReporter, Stylesheet, VendorPrefix};
|
||||||
|
|
||||||
/// A number from 0 to 1, indicating the percentage of the animation when this
|
/// A number from 0 to 1, indicating the percentage of the animation when this
|
||||||
/// keyframe should run.
|
/// keyframe should run.
|
||||||
@ -239,6 +239,8 @@ pub struct KeyframesAnimation {
|
|||||||
pub steps: Vec<KeyframesStep>,
|
pub steps: Vec<KeyframesStep>,
|
||||||
/// The properties that change in this animation.
|
/// The properties that change in this animation.
|
||||||
pub properties_changed: Vec<TransitionProperty>,
|
pub properties_changed: Vec<TransitionProperty>,
|
||||||
|
/// Vendor prefix type the @keyframes has.
|
||||||
|
pub vendor_prefix: Option<VendorPrefix>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get all the animated properties in a keyframes animation.
|
/// Get all the animated properties in a keyframes animation.
|
||||||
@ -275,11 +277,14 @@ impl KeyframesAnimation {
|
|||||||
///
|
///
|
||||||
/// Otherwise, this will compute and sort the steps used for the animation,
|
/// Otherwise, this will compute and sort the steps used for the animation,
|
||||||
/// and return the animation object.
|
/// and return the animation object.
|
||||||
pub fn from_keyframes(keyframes: &[Arc<Locked<Keyframe>>], guard: &SharedRwLockReadGuard)
|
pub fn from_keyframes(keyframes: &[Arc<Locked<Keyframe>>],
|
||||||
|
vendor_prefix: Option<VendorPrefix>,
|
||||||
|
guard: &SharedRwLockReadGuard)
|
||||||
-> Self {
|
-> Self {
|
||||||
let mut result = KeyframesAnimation {
|
let mut result = KeyframesAnimation {
|
||||||
steps: vec![],
|
steps: vec![],
|
||||||
properties_changed: vec![],
|
properties_changed: vec![],
|
||||||
|
vendor_prefix: vendor_prefix,
|
||||||
};
|
};
|
||||||
|
|
||||||
if keyframes.is_empty() {
|
if keyframes.is_empty() {
|
||||||
|
@ -95,6 +95,19 @@
|
|||||||
self.0.interpolate(&other.0, progress).map(T)
|
self.0.interpolate(&other.0, progress).map(T)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
use properties::animated_properties::ComputeDistance;
|
||||||
|
impl ComputeDistance for T {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.0.compute_distance(&other.0)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.0.compute_squared_distance(&other.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
% endif
|
% endif
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -785,3 +798,41 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
</%def>
|
</%def>
|
||||||
|
|
||||||
|
/// Macro for defining ComputeDistance trait for tuple struct which has Option<T>,
|
||||||
|
/// e.g. struct T(pub Option<Au>).
|
||||||
|
<%def name="impl_compute_distance_for_option_tuple(value_for_none)">
|
||||||
|
impl ComputeDistance for T {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
match (self, other) {
|
||||||
|
(&T(Some(ref this)), &T(Some(ref other))) => {
|
||||||
|
this.compute_distance(other)
|
||||||
|
},
|
||||||
|
(&T(Some(ref value)), &T(None)) |
|
||||||
|
(&T(None), &T(Some(ref value)))=> {
|
||||||
|
value.compute_distance(&${value_for_none})
|
||||||
|
},
|
||||||
|
(&T(None), &T(None)) => {
|
||||||
|
Ok(0.0)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
match (self, other) {
|
||||||
|
(&T(Some(ref this)), &T(Some(ref other))) => {
|
||||||
|
this.compute_squared_distance(other)
|
||||||
|
},
|
||||||
|
(&T(Some(ref value)), &T(None)) |
|
||||||
|
(&T(None), &T(Some(ref value))) => {
|
||||||
|
value.compute_squared_distance(&${value_for_none})
|
||||||
|
},
|
||||||
|
(&T(None), &T(None)) => {
|
||||||
|
Ok(0.0)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</%def>
|
||||||
|
@ -10,7 +10,7 @@ use euclid::{Point2D, Size2D};
|
|||||||
#[cfg(feature = "gecko")] use gecko_bindings::structs::nsCSSPropertyID;
|
#[cfg(feature = "gecko")] use gecko_bindings::structs::nsCSSPropertyID;
|
||||||
use properties::{CSSWideKeyword, PropertyDeclaration};
|
use properties::{CSSWideKeyword, PropertyDeclaration};
|
||||||
use properties::longhands;
|
use properties::longhands;
|
||||||
use properties::longhands::background_size::computed_value::T as BackgroundSize;
|
use properties::longhands::background_size::computed_value::T as BackgroundSizeList;
|
||||||
use properties::longhands::font_weight::computed_value::T as FontWeight;
|
use properties::longhands::font_weight::computed_value::T as FontWeight;
|
||||||
use properties::longhands::line_height::computed_value::T as LineHeight;
|
use properties::longhands::line_height::computed_value::T as LineHeight;
|
||||||
use properties::longhands::text_shadow::computed_value::T as TextShadowList;
|
use properties::longhands::text_shadow::computed_value::T as TextShadowList;
|
||||||
@ -696,14 +696,14 @@ impl Interpolate for VerticalAlign {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl Interpolate for BackgroundSize {
|
|
||||||
|
impl Interpolate for BackgroundSizeList {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn interpolate(&self, other: &Self, progress: f64) -> Result<Self, ()> {
|
fn interpolate(&self, other: &Self, progress: f64) -> Result<Self, ()> {
|
||||||
self.0.interpolate(&other.0, progress).map(BackgroundSize)
|
self.0.interpolate(&other.0, progress).map(BackgroundSizeList)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// https://drafts.csswg.org/css-transitions/#animtype-color
|
/// https://drafts.csswg.org/css-transitions/#animtype-color
|
||||||
impl Interpolate for RGBA {
|
impl Interpolate for RGBA {
|
||||||
#[inline]
|
#[inline]
|
||||||
@ -2032,3 +2032,614 @@ impl<T, U> Interpolate for Either<T, U>
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/// We support ComputeDistance for an API in gecko to test the transition per property.
|
||||||
|
impl ComputeDistance for AnimationValue {
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
match (self, other) {
|
||||||
|
% for prop in data.longhands:
|
||||||
|
% if prop.animatable:
|
||||||
|
% if prop.animation_type == "normal":
|
||||||
|
(&AnimationValue::${prop.camel_case}(ref from),
|
||||||
|
&AnimationValue::${prop.camel_case}(ref to)) => {
|
||||||
|
from.compute_distance(to)
|
||||||
|
},
|
||||||
|
% else:
|
||||||
|
(&AnimationValue::${prop.camel_case}(ref _from),
|
||||||
|
&AnimationValue::${prop.camel_case}(ref _to)) => {
|
||||||
|
Err(())
|
||||||
|
},
|
||||||
|
% endif
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
_ => {
|
||||||
|
panic!("Expected compute_distance of computed values of the same \
|
||||||
|
property, got: {:?}, {:?}", self, other);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A trait used to implement [compute_distance].
|
||||||
|
/// In order to compute the Euclidean distance of a list, we need to compute squared distance
|
||||||
|
/// for each element, so the vector can sum it and then get its squared root as the distance.
|
||||||
|
pub trait ComputeDistance: Sized {
|
||||||
|
/// Compute distance between a value and another for a given property.
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()>;
|
||||||
|
|
||||||
|
/// Compute squared distance between a value and another for a given property.
|
||||||
|
/// This is used for list or if there are many components in a property value.
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.compute_distance(other).map(|d| d * d)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: ComputeDistance> ComputeDistance for Vec<T> {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.compute_squared_distance(other).map(|sd| sd.sqrt())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
if self.len() != other.len() {
|
||||||
|
return Err(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut squared_dist = 0.0f64;
|
||||||
|
for (this, other) in self.iter().zip(other) {
|
||||||
|
let diff = try!(this.compute_squared_distance(other));
|
||||||
|
squared_dist += diff;
|
||||||
|
}
|
||||||
|
Ok(squared_dist)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for Au {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.0.compute_distance(&other.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for Auto {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, _other: &Self) -> Result<f64, ()> {
|
||||||
|
Err(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for Normal {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, _other: &Self) -> Result<f64, ()> {
|
||||||
|
Err(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl <T> ComputeDistance for Option<T>
|
||||||
|
where T: ComputeDistance,
|
||||||
|
{
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
match (self, other) {
|
||||||
|
(&Some(ref this), &Some(ref other)) => {
|
||||||
|
this.compute_distance(other)
|
||||||
|
},
|
||||||
|
_ => Err(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
match (self, other) {
|
||||||
|
(&Some(ref this), &Some(ref other)) => {
|
||||||
|
this.compute_squared_distance(other)
|
||||||
|
},
|
||||||
|
_ => Err(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for f32 {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
Ok((*self - *other).abs() as f64)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for f64 {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
Ok((*self - *other).abs())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for i32 {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
Ok((*self - *other).abs() as f64)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for Visibility {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
if *self == *other {
|
||||||
|
Ok(0.0)
|
||||||
|
} else {
|
||||||
|
Ok(1.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// https://www.w3.org/TR/smil-animation/#animateColorElement says we should use Euclidean RGB-cube distance.
|
||||||
|
impl ComputeDistance for RGBA {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.compute_squared_distance(other).map(|sd| sd.sqrt())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
fn clamp(val: f32) -> f32 {
|
||||||
|
val.max(0.).min(1.)
|
||||||
|
}
|
||||||
|
|
||||||
|
let start_a = clamp(self.alpha_f32());
|
||||||
|
let end_a = clamp(other.alpha_f32());
|
||||||
|
let start = [ start_a,
|
||||||
|
self.red_f32() * start_a,
|
||||||
|
self.green_f32() * start_a,
|
||||||
|
self.blue_f32() * start_a ];
|
||||||
|
let end = [ end_a,
|
||||||
|
other.red_f32() * end_a,
|
||||||
|
other.green_f32() * end_a,
|
||||||
|
other.blue_f32() * end_a ];
|
||||||
|
let diff = start.iter().zip(&end)
|
||||||
|
.fold(0.0f64, |n, (&a, &b)| {
|
||||||
|
let diff = (a - b) as f64;
|
||||||
|
n + diff * diff
|
||||||
|
});
|
||||||
|
Ok(diff)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for CSSParserColor {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.compute_squared_distance(other).map(|sq| sq.sqrt())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
match (*self, *other) {
|
||||||
|
(CSSParserColor::RGBA(ref this), CSSParserColor::RGBA(ref other)) => {
|
||||||
|
this.compute_squared_distance(other)
|
||||||
|
},
|
||||||
|
_ => Ok(0.0),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for CalcLengthOrPercentage {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.compute_squared_distance(other).map(|sq| sq.sqrt())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
let length_diff = (self.length().0 - other.length().0) as f64;
|
||||||
|
let percentage_diff = (self.percentage() - other.percentage()) as f64;
|
||||||
|
Ok(length_diff * length_diff + percentage_diff * percentage_diff)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for LengthOrPercentage {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
match (*self, *other) {
|
||||||
|
(LengthOrPercentage::Length(ref this),
|
||||||
|
LengthOrPercentage::Length(ref other)) => {
|
||||||
|
this.compute_distance(other)
|
||||||
|
},
|
||||||
|
(LengthOrPercentage::Percentage(ref this),
|
||||||
|
LengthOrPercentage::Percentage(ref other)) => {
|
||||||
|
this.compute_distance(other)
|
||||||
|
},
|
||||||
|
(this, other) => {
|
||||||
|
let this: CalcLengthOrPercentage = From::from(this);
|
||||||
|
let other: CalcLengthOrPercentage = From::from(other);
|
||||||
|
this.compute_distance(&other)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
match (*self, *other) {
|
||||||
|
(LengthOrPercentage::Length(ref this),
|
||||||
|
LengthOrPercentage::Length(ref other)) => {
|
||||||
|
let diff = (this.0 - other.0) as f64;
|
||||||
|
Ok(diff * diff)
|
||||||
|
},
|
||||||
|
(LengthOrPercentage::Percentage(ref this),
|
||||||
|
LengthOrPercentage::Percentage(ref other)) => {
|
||||||
|
let diff = (this - other) as f64;
|
||||||
|
Ok(diff * diff)
|
||||||
|
},
|
||||||
|
(this, other) => {
|
||||||
|
let this: CalcLengthOrPercentage = From::from(this);
|
||||||
|
let other: CalcLengthOrPercentage = From::from(other);
|
||||||
|
let length_diff = (this.length().0 - other.length().0) as f64;
|
||||||
|
let percentage_diff = (this.percentage() - other.percentage()) as f64;
|
||||||
|
Ok(length_diff * length_diff + percentage_diff * percentage_diff)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for LengthOrPercentageOrAuto {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
match (*self, *other) {
|
||||||
|
(LengthOrPercentageOrAuto::Length(ref this),
|
||||||
|
LengthOrPercentageOrAuto::Length(ref other)) => {
|
||||||
|
this.compute_distance(other)
|
||||||
|
},
|
||||||
|
(LengthOrPercentageOrAuto::Percentage(ref this),
|
||||||
|
LengthOrPercentageOrAuto::Percentage(ref other)) => {
|
||||||
|
this.compute_distance(other)
|
||||||
|
},
|
||||||
|
(this, other) => {
|
||||||
|
// If one of the element is Auto, Option<> will be None, and the returned distance is Err(())
|
||||||
|
let this: Option<CalcLengthOrPercentage> = From::from(this);
|
||||||
|
let other: Option<CalcLengthOrPercentage> = From::from(other);
|
||||||
|
this.compute_distance(&other)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
match (*self, *other) {
|
||||||
|
(LengthOrPercentageOrAuto::Length(ref this),
|
||||||
|
LengthOrPercentageOrAuto::Length(ref other)) => {
|
||||||
|
let diff = (this.0 - other.0) as f64;
|
||||||
|
Ok(diff * diff)
|
||||||
|
},
|
||||||
|
(LengthOrPercentageOrAuto::Percentage(ref this),
|
||||||
|
LengthOrPercentageOrAuto::Percentage(ref other)) => {
|
||||||
|
let diff = (this - other) as f64;
|
||||||
|
Ok(diff * diff)
|
||||||
|
},
|
||||||
|
(this, other) => {
|
||||||
|
let this: Option<CalcLengthOrPercentage> = From::from(this);
|
||||||
|
let other: Option<CalcLengthOrPercentage> = From::from(other);
|
||||||
|
if this.is_none() || other.is_none() {
|
||||||
|
Err(())
|
||||||
|
} else {
|
||||||
|
let length_diff = (this.unwrap().length().0 - other.unwrap().length().0) as f64;
|
||||||
|
let percentage_diff = (this.unwrap().percentage() - other.unwrap().percentage()) as f64;
|
||||||
|
Ok(length_diff * length_diff + percentage_diff * percentage_diff)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for LengthOrPercentageOrNone {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
match (*self, *other) {
|
||||||
|
(LengthOrPercentageOrNone::Length(ref this),
|
||||||
|
LengthOrPercentageOrNone::Length(ref other)) => {
|
||||||
|
this.compute_distance(other)
|
||||||
|
},
|
||||||
|
(LengthOrPercentageOrNone::Percentage(ref this),
|
||||||
|
LengthOrPercentageOrNone::Percentage(ref other)) => {
|
||||||
|
this.compute_distance(other)
|
||||||
|
},
|
||||||
|
_ => Err(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for LengthOrNone {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
match (*self, *other) {
|
||||||
|
(Either::First(ref length), Either::First(ref other)) => {
|
||||||
|
length.compute_distance(other)
|
||||||
|
},
|
||||||
|
_ => Err(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for MinLength {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
match (*self, *other) {
|
||||||
|
(MinLength::LengthOrPercentage(ref this),
|
||||||
|
MinLength::LengthOrPercentage(ref other)) => {
|
||||||
|
this.compute_distance(other)
|
||||||
|
},
|
||||||
|
_ => Err(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for MaxLength {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
match (*self, *other) {
|
||||||
|
(MaxLength::LengthOrPercentage(ref this),
|
||||||
|
MaxLength::LengthOrPercentage(ref other)) => {
|
||||||
|
this.compute_distance(other)
|
||||||
|
},
|
||||||
|
_ => Err(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for VerticalAlign {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
match (*self, *other) {
|
||||||
|
(VerticalAlign::LengthOrPercentage(ref this),
|
||||||
|
VerticalAlign::LengthOrPercentage(ref other)) => {
|
||||||
|
this.compute_distance(other)
|
||||||
|
},
|
||||||
|
_ => Err(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for BorderRadiusSize {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.compute_squared_distance(other).map(|sd| sd.sqrt())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
Ok(try!(self.0.width.compute_squared_distance(&other.0.width)) +
|
||||||
|
try!(self.0.height.compute_squared_distance(&other.0.height)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for BackgroundSizeList {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.0.compute_distance(&other.0)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.0.compute_squared_distance(&other.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for LineHeight {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
match (*self, *other) {
|
||||||
|
(LineHeight::Length(ref this),
|
||||||
|
LineHeight::Length(ref other)) => {
|
||||||
|
this.compute_distance(other)
|
||||||
|
},
|
||||||
|
(LineHeight::Number(ref this),
|
||||||
|
LineHeight::Number(ref other)) => {
|
||||||
|
this.compute_distance(other)
|
||||||
|
},
|
||||||
|
_ => Err(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for FontWeight {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
let a = (*self as u32) as f64;
|
||||||
|
let b = (*other as u32) as f64;
|
||||||
|
a.compute_distance(&b)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for Position {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.compute_squared_distance(other).map(|sd| sd.sqrt())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
Ok(try!(self.horizontal.compute_squared_distance(&other.horizontal)) +
|
||||||
|
try!(self.vertical.compute_squared_distance(&other.vertical)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for HorizontalPosition {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.0.compute_distance(&other.0)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.0.compute_squared_distance(&other.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for VerticalPosition {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.0.compute_distance(&other.0)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.0.compute_squared_distance(&other.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for ClipRect {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.compute_squared_distance(other).map(|sd| sd.sqrt())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
let list = [ try!(self.top.compute_distance(&other.top)),
|
||||||
|
try!(self.right.compute_distance(&other.right)),
|
||||||
|
try!(self.bottom.compute_distance(&other.bottom)),
|
||||||
|
try!(self.left.compute_distance(&other.left)) ];
|
||||||
|
Ok(list.iter().fold(0.0f64, |sum, diff| sum + diff * diff))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for TextShadow {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.compute_squared_distance(other).map(|sd| sd.sqrt())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
let list = [ try!(self.offset_x.compute_distance(&other.offset_x)),
|
||||||
|
try!(self.offset_y.compute_distance(&other.offset_y)),
|
||||||
|
try!(self.blur_radius.compute_distance(&other.blur_radius)),
|
||||||
|
try!(self.color.compute_distance(&other.color)) ];
|
||||||
|
Ok(list.iter().fold(0.0f64, |sum, diff| sum + diff * diff))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for TextShadowList {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.compute_squared_distance(other).map(|sd| sd.sqrt())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
let zero = TextShadow {
|
||||||
|
offset_x: Au(0),
|
||||||
|
offset_y: Au(0),
|
||||||
|
blur_radius: Au(0),
|
||||||
|
color: CSSParserColor::RGBA(RGBA::transparent()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let max_len = cmp::max(self.0.len(), other.0.len());
|
||||||
|
let mut diff_squared = 0.0f64;
|
||||||
|
for i in 0..max_len {
|
||||||
|
diff_squared += match (self.0.get(i), other.0.get(i)) {
|
||||||
|
(Some(shadow), Some(other)) => {
|
||||||
|
try!(shadow.compute_squared_distance(other))
|
||||||
|
},
|
||||||
|
(Some(shadow), None) |
|
||||||
|
(None, Some(shadow)) => {
|
||||||
|
try!(shadow.compute_squared_distance(&zero))
|
||||||
|
},
|
||||||
|
(None, None) => unreachable!(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
Ok(diff_squared)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for BoxShadow {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.compute_squared_distance(other).map(|sd| sd.sqrt())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
if self.inset != other.inset {
|
||||||
|
return Err(());
|
||||||
|
}
|
||||||
|
let list = [ try!(self.offset_x.compute_distance(&other.offset_x)),
|
||||||
|
try!(self.offset_y.compute_distance(&other.offset_y)),
|
||||||
|
try!(self.color.compute_distance(&other.color)),
|
||||||
|
try!(self.spread_radius.compute_distance(&other.spread_radius)),
|
||||||
|
try!(self.blur_radius.compute_distance(&other.blur_radius)) ];
|
||||||
|
Ok(list.iter().fold(0.0f64, |sum, diff| sum + diff * diff))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for BoxShadowList {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.compute_squared_distance(other).map(|sd| sd.sqrt())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
// The inset value must change
|
||||||
|
let mut zero = BoxShadow {
|
||||||
|
offset_x: Au(0),
|
||||||
|
offset_y: Au(0),
|
||||||
|
spread_radius: Au(0),
|
||||||
|
blur_radius: Au(0),
|
||||||
|
color: CSSParserColor::RGBA(RGBA::transparent()),
|
||||||
|
inset: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
let max_len = cmp::max(self.0.len(), other.0.len());
|
||||||
|
let mut diff_squared = 0.0f64;
|
||||||
|
for i in 0..max_len {
|
||||||
|
diff_squared += match (self.0.get(i), other.0.get(i)) {
|
||||||
|
(Some(shadow), Some(other)) => {
|
||||||
|
try!(shadow.compute_squared_distance(other))
|
||||||
|
},
|
||||||
|
(Some(shadow), None) |
|
||||||
|
(None, Some(shadow)) => {
|
||||||
|
zero.inset = shadow.inset;
|
||||||
|
try!(shadow.compute_squared_distance(&zero))
|
||||||
|
}
|
||||||
|
(None, None) => unreachable!(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
Ok(diff_squared)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for TransformList {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, _other: &Self) -> Result<f64, ()> {
|
||||||
|
Err(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T, U> ComputeDistance for Either<T, U>
|
||||||
|
where T: ComputeDistance, U: ComputeDistance
|
||||||
|
{
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
match (self, other) {
|
||||||
|
(&Either::First(ref this), &Either::First(ref other)) => {
|
||||||
|
this.compute_distance(other)
|
||||||
|
},
|
||||||
|
(&Either::Second(ref this), &Either::Second(ref other)) => {
|
||||||
|
this.compute_distance(other)
|
||||||
|
},
|
||||||
|
_ => Err(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
match (self, other) {
|
||||||
|
(&Either::First(ref this), &Either::First(ref other)) => {
|
||||||
|
this.compute_squared_distance(other)
|
||||||
|
},
|
||||||
|
(&Either::Second(ref this), &Either::Second(ref other)) => {
|
||||||
|
this.compute_squared_distance(other)
|
||||||
|
},
|
||||||
|
_ => Err(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -334,7 +334,7 @@ ${helpers.single_keyword("background-origin",
|
|||||||
#[allow(missing_docs)]
|
#[allow(missing_docs)]
|
||||||
pub mod computed_value {
|
pub mod computed_value {
|
||||||
use values::computed::LengthOrPercentageOrAuto;
|
use values::computed::LengthOrPercentageOrAuto;
|
||||||
use properties::animated_properties::{Interpolate, RepeatableListInterpolate};
|
use properties::animated_properties::{ComputeDistance, Interpolate, RepeatableListInterpolate};
|
||||||
|
|
||||||
#[derive(PartialEq, Clone, Debug)]
|
#[derive(PartialEq, Clone, Debug)]
|
||||||
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
|
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
|
||||||
@ -367,6 +367,24 @@ ${helpers.single_keyword("background-origin",
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for T {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.compute_squared_distance(other).map(|sd| sd.sqrt())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
match (self, other) {
|
||||||
|
(&T::Explicit(ref me), &T::Explicit(ref other)) => {
|
||||||
|
Ok(try!(me.width.compute_squared_distance(&other.width)) +
|
||||||
|
try!(me.height.compute_squared_distance(&other.height)))
|
||||||
|
},
|
||||||
|
_ => Err(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToCss for computed_value::T {
|
impl ToCss for computed_value::T {
|
||||||
|
@ -2197,7 +2197,7 @@ ${helpers.single_keyword("transform-style",
|
|||||||
use values::specified::{NoCalcLength, LengthOrPercentage, Percentage};
|
use values::specified::{NoCalcLength, LengthOrPercentage, Percentage};
|
||||||
|
|
||||||
pub mod computed_value {
|
pub mod computed_value {
|
||||||
use properties::animated_properties::Interpolate;
|
use properties::animated_properties::{ComputeDistance, Interpolate};
|
||||||
use values::computed::{Length, LengthOrPercentage};
|
use values::computed::{Length, LengthOrPercentage};
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||||
@ -2209,6 +2209,7 @@ ${helpers.single_keyword("transform-style",
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Interpolate for T {
|
impl Interpolate for T {
|
||||||
|
#[inline]
|
||||||
fn interpolate(&self, other: &Self, time: f64) -> Result<Self, ()> {
|
fn interpolate(&self, other: &Self, time: f64) -> Result<Self, ()> {
|
||||||
Ok(T {
|
Ok(T {
|
||||||
horizontal: try!(self.horizontal.interpolate(&other.horizontal, time)),
|
horizontal: try!(self.horizontal.interpolate(&other.horizontal, time)),
|
||||||
@ -2217,6 +2218,20 @@ ${helpers.single_keyword("transform-style",
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for T {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
self.compute_squared_distance(other).map(|sd| sd.sqrt())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
Ok(try!(self.horizontal.compute_squared_distance(&other.horizontal)) +
|
||||||
|
try!(self.vertical.compute_squared_distance(&other.vertical)) +
|
||||||
|
try!(self.depth.compute_squared_distance(&other.depth)))
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HasViewportPercentage for SpecifiedValue {
|
impl HasViewportPercentage for SpecifiedValue {
|
||||||
|
@ -818,7 +818,7 @@ ${helpers.single_keyword("font-variant-caps",
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub mod computed_value {
|
pub mod computed_value {
|
||||||
use properties::animated_properties::Interpolate;
|
use properties::animated_properties::{ComputeDistance, Interpolate};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use style_traits::ToCss;
|
use style_traits::ToCss;
|
||||||
use values::CSSFloat;
|
use values::CSSFloat;
|
||||||
@ -850,6 +850,17 @@ ${helpers.single_keyword("font-variant-caps",
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ComputeDistance for T {
|
||||||
|
#[inline]
|
||||||
|
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
|
||||||
|
match (*self, *other) {
|
||||||
|
(T::Number(ref number), T::Number(ref other)) =>
|
||||||
|
number.compute_distance(other),
|
||||||
|
_ => Err(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -439,13 +439,14 @@ ${helpers.single_keyword("text-align-last",
|
|||||||
|
|
||||||
pub mod computed_value {
|
pub mod computed_value {
|
||||||
use app_units::Au;
|
use app_units::Au;
|
||||||
use properties::animated_properties::Interpolate;
|
use properties::animated_properties::{ComputeDistance, Interpolate};
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
|
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
|
||||||
pub struct T(pub Option<Au>);
|
pub struct T(pub Option<Au>);
|
||||||
|
|
||||||
${helpers.impl_interpolate_for_option_tuple('Au(0)')}
|
${helpers.impl_interpolate_for_option_tuple('Au(0)')}
|
||||||
|
${helpers.impl_compute_distance_for_option_tuple('Au(0)')}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToCss for computed_value::T {
|
impl ToCss for computed_value::T {
|
||||||
@ -523,13 +524,14 @@ ${helpers.single_keyword("text-align-last",
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub mod computed_value {
|
pub mod computed_value {
|
||||||
use properties::animated_properties::Interpolate;
|
use properties::animated_properties::{ComputeDistance, Interpolate};
|
||||||
use values::computed::LengthOrPercentage;
|
use values::computed::LengthOrPercentage;
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
|
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
|
||||||
pub struct T(pub Option<LengthOrPercentage>);
|
pub struct T(pub Option<LengthOrPercentage>);
|
||||||
|
|
||||||
${helpers.impl_interpolate_for_option_tuple('LengthOrPercentage::zero()')}
|
${helpers.impl_interpolate_for_option_tuple('LengthOrPercentage::zero()')}
|
||||||
|
${helpers.impl_compute_distance_for_option_tuple('LengthOrPercentage::zero()')}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToCss for computed_value::T {
|
impl ToCss for computed_value::T {
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user