Merge fx-team to central, a=merge CLOSED TREE

This commit is contained in:
Wes Kocher 2016-08-16 16:37:28 -07:00
commit 073e095b66
59 changed files with 1504 additions and 610 deletions

View File

@ -419,11 +419,11 @@
// display debug information about the cert error.
var errorCode = document.getElementById("errorCode");
if (errorCode) {
errorCode.href = "#technicalInformation";
errorCode.href = "javascript:void(0)";
errorCode.addEventListener("click", () => {
var div = document.getElementById("certificateErrorDebugInformation");
if (toggleDisplay(div) == "block") {
div.scrollIntoView({block: "start", behavior: "smooth"});
let debugInfo = document.getElementById("certificateErrorDebugInformation");
if (toggleDisplay(debugInfo) == "block") {
debugInfo.scrollIntoView({block: "start", behavior: "smooth"});
}
}, false);
}
@ -646,7 +646,6 @@
</div>
<div id="certificateErrorDebugInformation">
<a name="technicalInformation"></a>
<button id="copyToClipboard">&certerror.copyToClipboard.label;</button>
<div id="certificateErrorText"/>
<button id="copyToClipboard">&certerror.copyToClipboard.label;</button>

View File

@ -6787,7 +6787,6 @@ var gIdentityHandler = {
if (this._identityPopup.state == "open") {
this.updateSitePermissions();
this._identityPopupMultiView.setHeightToFit();
}
},
@ -7306,7 +7305,6 @@ var gIdentityHandler = {
button.setAttribute("class", "identity-popup-permission-remove-button");
button.addEventListener("command", () => {
this._permissionList.removeChild(container);
this._identityPopupMultiView.setHeightToFit();
if (aPermission.inUse &&
["camera", "microphone", "screen"].includes(aPermission.id)) {
let windowId = this._sharingState.windowId;

View File

@ -16,7 +16,7 @@
<panelmultiview id="identity-popup-multiView"
mainViewId="identity-popup-mainView">
<panelview id="identity-popup-mainView" flex="1">
<panelview id="identity-popup-mainView">
<!-- Security Section -->
<hbox id="identity-popup-security" class="identity-popup-section">

View File

@ -318,7 +318,7 @@
break;
case "popuphidden":
this.removeAttribute("panelopen");
this._mainView.style.removeProperty("height");
this._mainView.style.removeProperty("max-height");
this.showMainView();
this._mainViewObserver.disconnect();
break;
@ -346,7 +346,7 @@
// Ignore the mutation that'll fire when we set the height of
// the main view.
this.ignoreMutations = true;
this._mainView.style.height =
this._mainView.style.maxHeight =
this.getBoundingClientRect().height + "px";
this.ignoreMutations = false;
]]></body>

View File

@ -155,6 +155,7 @@ def old_configure_options(*options):
@old_configure_options(
'--cache-file',
'--datadir',
'--enable-accessibility',
'--enable-address-sanitizer',
'--enable-alsa',
@ -260,6 +261,8 @@ def old_configure_options(*options):
'--enable-webspeechtestbackend',
'--enable-xul',
'--enable-zipwriter',
'--includedir',
'--libdir',
'--no-create',
'--prefix',
'--with-adjust-sdk-keyfile',

View File

@ -119,6 +119,7 @@ support-files =
doc_terminate-on-tab-close.html
doc_watch-expressions.html
doc_watch-expression-button.html
doc_whitespace-property-names.html
doc_with-frame.html
doc_worker-source-map.html
doc_WorkerActor.attach-tab1.html
@ -521,6 +522,8 @@ skip-if = e10s && debug
skip-if = e10s && debug
[browser_dbg_variables-view-07.js]
skip-if = e10s && debug
[browser_dbg_variables-view-08.js]
skip-if = e10s && debug
[browser_dbg_variables-view-accessibility.js]
subsuite = clipboard
skip-if = e10s && debug

View File

@ -121,7 +121,5 @@ var test = Task.async(function* () {
}
}
debugger;
resumeDebuggerThenCloseAndFinish(panel);
});

View File

@ -28,9 +28,11 @@ var test = Task.async(function* () {
ok(scope, "Should get the current function's scope.");
let proxy;
[...scope].forEach(function([name, value]) {
if(name === "proxy") proxy = value;
});
for (let [name, value] of scope) {
if (name === "proxy") {
proxy = value;
}
}
ok(proxy, "Should have found the proxy variable");
info("Expanding variable 'proxy'");
@ -52,16 +54,16 @@ var test = Task.async(function* () {
}
} else {
is(property, "<handler>", "There shouldn't be properties other than <target> and <handler>");
for(let [subprop, subdata] of data) if(subprop === "name") {
is(subdata.value, "handler", "The value of '<handler>' should be the [[ProxyHandler]]");
foundHandler = true;
for (let [subprop, subdata] of data) {
if(subprop === "name") {
is(subdata.value, "handler", "The value of '<handler>' should be the [[ProxyHandler]]");
foundHandler = true;
}
}
}
}
ok(foundTarget, "Should have found the '<target>' property containing the [[ProxyTarget]]");
ok(foundHandler, "Should have found the '<handler>' property containing the [[ProxyHandler]]");
debugger;
resumeDebuggerThenCloseAndFinish(panel);
});

View File

@ -0,0 +1,61 @@
/* -*- indent-tabs-mode: nil; js-indent-level: 2 -*- */
/* vim: set ft=javascript ts=2 et sw=2 tw=80: */
/* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/publicdomain/zero/1.0/ */
/**
* Test that property values are not missing when the property names only contain whitespace.
*/
const TAB_URL = EXAMPLE_URL + "doc_whitespace-property-names.html";
var test = Task.async(function* () {
let options = {
source: TAB_URL,
line: 1
};
var dbg = initDebugger(TAB_URL, options);
const [tab,, panel] = yield dbg;
const debuggerLineNumber = 24;
const scopes = waitForCaretAndScopes(panel, debuggerLineNumber);
callInTab(tab, "doPause");
yield scopes;
const variables = panel.panelWin.DebuggerView.Variables;
ok(variables, "Should get the variables view.");
const scope = [...variables][0];
ok(scope, "Should get the current function's scope.");
let obj;
for (let [name, value] of scope) {
if (name === "obj") {
obj = value;
}
}
ok(obj, "Should have found the 'obj' variable");
info("Expanding variable 'obj'");
let expanded = once(variables, "fetched");
obj.expand();
yield expanded;
let values = [" ", "\r", "\n", "\t", "\f", "\uFEFF", "\xA0"];
let count = values.length;
for (let [property, value] of obj) {
let index = values.indexOf(property);
if (index >= 0) {
--count;
is(value._nameString, property,
"The _nameString is different than the property name");
is(value._valueString, index + "",
"The _valueString is different than the stringified value");
is(value._valueLabel.getAttribute("value"), index + "",
"The _valueLabel value is different than the stringified value");
}
}
is(count, 0, "There are " + count + " missing properties");
resumeDebuggerThenCloseAndFinish(panel);
});

View File

@ -0,0 +1,29 @@
<!-- Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ -->
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
<title>Debugger + Whitespace property name test page</title>
</head>
<body>
<script>
window.obj = {
" ": 0,
"\r": 1,
"\n": 2,
"\t": 3,
"\f": 4,
"\uFEFF": 5,
"\xA0": 6
};
window.doPause = function () {
var obj = window.obj;
debugger;
};
</script>
</body>
</html>

View File

@ -23,6 +23,9 @@ const SCROLL_REPEAT_MS = 100;
const EventEmitter = require("devtools/shared/event-emitter");
const {KeyShortcuts} = require("devtools/client/shared/key-shortcuts");
// Some margin may be required for visible element detection.
const SCROLL_MARGIN = 1;
/**
* Component to replicate functionality of XUL arrowscrollbox
* for breadcrumbs
@ -40,6 +43,9 @@ function ArrowScrollBox(win, container) {
ArrowScrollBox.prototype = {
// Scroll behavior, exposed for testing
scrollBehavior: "smooth",
/**
* Build the HTML, add to the DOM and start listening to
* events
@ -68,10 +74,26 @@ ArrowScrollBox.prototype = {
this.inner.addEventListener("overflow", this.onOverflow, false);
},
/**
* Determine whether the current text directionality is RTL
*/
isRtl: function () {
return this.win.getComputedStyle(this.container).direction === "rtl";
},
/**
* Scroll to the specified element using the current scroll behavior
* @param {Element} element element to scroll
* @param {String} block desired alignment of element after scrolling
*/
scrollToElement: function (element, block) {
element.scrollIntoView({ block: block, behavior: this.scrollBehavior });
},
/**
* Call the given function once; then continuously
* while the mouse button is held
* @param {repeatFn} the function to repeat while the button is held
* @param {Function} repeatFn the function to repeat while the button is held
*/
clickOrHold: function (repeatFn) {
let timer;
@ -109,7 +131,7 @@ ArrowScrollBox.prototype = {
}
let element = this.inner.childNodes[0];
element.scrollIntoView({ block: "start", behavior: "smooth" });
this.scrollToElement(element, "start");
},
/**
@ -122,7 +144,7 @@ ArrowScrollBox.prototype = {
}
let element = children[children.length - 1];
element.scrollIntoView({ block: "start", behavior: "smooth" });
this.scrollToElement(element, "start");
},
/**
@ -135,7 +157,8 @@ ArrowScrollBox.prototype = {
return;
}
element.scrollIntoView({ block: "start", behavior: "smooth" });
let block = this.isRtl() ? "end" : "start";
this.scrollToElement(element, block);
};
this.clickOrHold(scrollToStart);
@ -151,7 +174,8 @@ ArrowScrollBox.prototype = {
return;
}
element.scrollIntoView({ block: "end", behavior: "smooth" });
let block = this.isRtl() ? "start" : "end";
this.scrollToElement(element, block);
};
this.clickOrHold(scrollToEnd);
@ -195,47 +219,73 @@ ArrowScrollBox.prototype = {
this.emit("overflow");
},
/**
* Check whether the element is to the left of its container but does
* not also span the entire container.
* @param {Number} left the left scroll point of the container
* @param {Number} right the right edge of the container
* @param {Number} elementLeft the left edge of the element
* @param {Number} elementRight the right edge of the element
*/
elementLeftOfContainer: function (left, right, elementLeft, elementRight) {
return elementLeft < (left - SCROLL_MARGIN)
&& elementRight < (right - SCROLL_MARGIN);
},
/**
* Check whether the element is to the right of its container but does
* not also span the entire container.
* @param {Number} left the left scroll point of the container
* @param {Number} right the right edge of the container
* @param {Number} elementLeft the left edge of the element
* @param {Number} elementRight the right edge of the element
*/
elementRightOfContainer: function (left, right, elementLeft, elementRight) {
return elementLeft > (left + SCROLL_MARGIN)
&& elementRight > (right + SCROLL_MARGIN);
},
/**
* Get the first (i.e. furthest left for LTR)
* non visible element in the scroll box
* non or partly visible element in the scroll box
*/
getFirstInvisibleElement: function () {
let start = this.inner.scrollLeft;
let end = this.inner.scrollLeft + this.inner.clientWidth;
let crumbs = this.inner.childNodes;
for (let i = crumbs.length - 1; i > -1; i--) {
let element = crumbs[i];
let elementRight = element.offsetLeft + element.offsetWidth;
if (element.offsetLeft < start) {
// edge case, check the element isn't already visible
if (elementRight >= end) {
continue;
}
return element;
}
}
let elementsList = Array.from(this.inner.childNodes).reverse();
return null;
let predicate = this.isRtl() ?
this.elementRightOfContainer : this.elementLeftOfContainer;
return this.findFirstWithBounds(elementsList, predicate);
},
/**
* Get the last (i.e. furthest right for LTR)
* non-visible element in the scroll box
* non or partly visible element in the scroll box
*/
getLastInvisibleElement: function () {
let end = this.inner.scrollLeft + this.inner.clientWidth;
let elementStart = 0;
for (let element of this.inner.childNodes) {
let elementEnd = elementStart + element.offsetWidth;
if (elementEnd > end) {
// Edge case: check the element isn't bigger than the
// container and thus already in view
if (elementStart > this.inner.scrollLeft) {
return element;
}
}
let predicate = this.isRtl() ?
this.elementLeftOfContainer : this.elementRightOfContainer;
return this.findFirstWithBounds(this.inner.childNodes, predicate);
},
elementStart = elementEnd;
/**
* Find the first element that matches the given predicate, called with bounds
* information
* @param {Array} elements an ordered list of elements
* @param {Function} predicate a function to be called with bounds
* information
*/
findFirstWithBounds: function (elements, predicate) {
let left = this.inner.scrollLeft;
let right = left + this.inner.clientWidth;
for (let element of elements) {
let elementLeft = element.offsetLeft - element.parentElement.offsetLeft;
let elementRight = elementLeft + element.offsetWidth;
// Check that the starting edge of the element is out of the visible area
// and that the ending edge does not span the whole container
if (predicate(left, right, elementLeft, elementRight)) {
return element;
}
}
return null;
@ -725,7 +775,7 @@ HTMLBreadcrumbs.prototype = {
// FIXME bug 684352: make sure its immediate neighbors are visible too.
if (!this.isDestroyed) {
let element = this.nodeHierarchy[this.currentIndex].button;
element.scrollIntoView({ block: "end", behavior: "smooth" });
this.arrowScrollBox.scrollToElement(element, "end");
}
},

View File

@ -483,6 +483,9 @@ CssComputedView.prototype = {
onItem: (propView) => {
propView.refresh();
},
onCancel: () => {
deferred.reject("_refreshProcess of computed view cancelled");
},
onDone: () => {
this._refreshProcess = null;
this.noResults.hidden = this.numVisibleProperties > 0;

View File

@ -182,17 +182,21 @@ InspectorPanel.prototype = {
this._supportsScrollIntoView = false;
this._supportsResolveRelativeURL = false;
return promise.all([
this._target.actorHasMethod("domwalker", "duplicateNode").then(value => {
this._supportsDuplicateNode = value;
}).catch(e => console.error(e)),
this._target.actorHasMethod("domnode", "scrollIntoView").then(value => {
this._supportsScrollIntoView = value;
}).catch(e => console.error(e)),
this._target.actorHasMethod("inspector", "resolveRelativeURL").then(value => {
this._supportsResolveRelativeURL = value;
}).catch(e => console.error(e)),
]);
// Use getActorDescription first so that all actorHasMethod calls use
// a cached response from the server.
return this._target.getActorDescription("domwalker").then(desc => {
return promise.all([
this._target.actorHasMethod("domwalker", "duplicateNode").then(value => {
this._supportsDuplicateNode = value;
}).catch(e => console.error(e)),
this._target.actorHasMethod("domnode", "scrollIntoView").then(value => {
this._supportsScrollIntoView = value;
}).catch(e => console.error(e)),
this._target.actorHasMethod("inspector", "resolveRelativeURL").then(value => {
this._supportsResolveRelativeURL = value;
}).catch(e => console.error(e)),
]);
});
},
_deferredOpen: function (defaultSelection) {

View File

@ -132,6 +132,7 @@ InspectorSearch.prototype = {
},
_onClearSearch: function () {
this.searchBox.classList.remove("devtools-style-searchbox-no-match");
this.searchBox.value = "";
this.searchClearButton.hidden = true;
}

View File

@ -23,9 +23,7 @@ support-files =
[browser_layout_rotate-labels-on-sides.js]
[browser_layout_sync.js]
[browser_layout_tooltips.js]
# [browser_layout_update-after-navigation.js]
# Disabled for too many intermittent failures (bug 1288213)
# [browser_layout_update-after-reload.js]
# Disabled for too many intermittent failures (bug 1287745)
[browser_layout_update-after-navigation.js]
[browser_layout_update-after-reload.js]
# [browser_layout_update-in-iframes.js]
# Bug 1020038 layout-view updates for iframe elements changes

View File

@ -4,6 +4,7 @@ subsuite = devtools
support-files =
doc_inspector_add_node.html
doc_inspector_breadcrumbs.html
doc_inspector_breadcrumbs_visibility.html
doc_inspector_delete-selected-node-01.html
doc_inspector_delete-selected-node-02.html
doc_inspector_embed.html
@ -48,6 +49,7 @@ support-files =
skip-if = os == "mac" # Full keyboard navigation on OSX only works if Full Keyboard Access setting is set to All Control in System Keyboard Preferences
[browser_inspector_breadcrumbs_mutations.js]
[browser_inspector_breadcrumbs_namespaced.js]
[browser_inspector_breadcrumbs_visibility.js]
[browser_inspector_delete-selected-node-01.js]
[browser_inspector_delete-selected-node-02.js]
[browser_inspector_delete-selected-node-03.js]

View File

@ -0,0 +1,106 @@
/* vim: set ft=javascript ts=2 et sw=2 tw=80: */
/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
"use strict";
// Test that the start and end buttons on the breadcrumb trail bring the right
// crumbs into the visible area, for both LTR and RTL
let { Toolbox } = require("devtools/client/framework/toolbox");
const TEST_URI = URL_ROOT + "doc_inspector_breadcrumbs_visibility.html";
const NODE_ONE = "div#aVeryLongIdToExceedTheBreadcrumbTruncationLimit";
const NODE_TWO = "div#anotherVeryLongIdToExceedTheBreadcrumbTruncationLimit";
const NODE_THREE = "div#aThirdVeryLongIdToExceedTheTruncationLimit";
const NODE_FOUR = "div#aFourthOneToExceedTheTruncationLimit";
const NODE_FIVE = "div#aFifthOneToExceedTheTruncationLimit";
const NODE_SIX = "div#aSixthOneToExceedTheTruncationLimit";
const NODE_SEVEN = "div#aSeventhOneToExceedTheTruncationLimit";
const NODES = [
{ action: "start", title: NODE_SIX },
{ action: "start", title: NODE_FIVE },
{ action: "start", title: NODE_FOUR },
{ action: "start", title: NODE_THREE },
{ action: "start", title: NODE_TWO },
{ action: "start", title: NODE_ONE },
{ action: "end", title: NODE_TWO },
{ action: "end", title: NODE_THREE },
{ action: "end", title: NODE_FOUR },
{ action: "end", title: NODE_FIVE },
{ action: "end", title: NODE_SIX }
];
add_task(function* () {
let { inspector, toolbox } = yield openInspectorForURL(TEST_URI);
// No way to wait for scrolling to end (Bug 1172171)
// Rather than wait a max time; limit test to instant scroll behavior
inspector.breadcrumbs.arrowScrollBox.scrollBehavior = "instant";
yield toolbox.switchHost(Toolbox.HostType.WINDOW);
let hostWindow = toolbox._host._window;
let originalWidth = hostWindow.outerWidth;
let originalHeight = hostWindow.outerHeight;
hostWindow.resizeTo(640, 300);
info("Testing transitions ltr");
yield pushPref("intl.uidirection.en-US", "ltr");
yield testBreadcrumbTransitions(hostWindow, inspector);
info("Testing transitions rtl");
yield pushPref("intl.uidirection.en-US", "rtl");
yield testBreadcrumbTransitions(hostWindow, inspector);
hostWindow.resizeTo(originalWidth, originalHeight);
});
function* testBreadcrumbTransitions(hostWindow, inspector) {
let breadcrumbs = inspector.panelDoc.getElementById("inspector-breadcrumbs");
let startBtn = breadcrumbs.querySelector(".scrollbutton-up");
let endBtn = breadcrumbs.querySelector(".scrollbutton-down");
let container = breadcrumbs.querySelector(".html-arrowscrollbox-inner");
let breadcrumbsUpdated = inspector.once("breadcrumbs-updated");
info("Selecting initial node");
yield selectNode(NODE_SEVEN, inspector);
// So just need to wait for a duration
yield breadcrumbsUpdated;
let initialCrumb = container.querySelector("button[checked]");
is(isElementInViewport(hostWindow, initialCrumb), true,
"initial element was visible");
for (let node of NODES) {
info("Checking for visibility of crumb " + node.title);
if (node.action === "end") {
info("Simulating click of end button");
EventUtils.synthesizeMouseAtCenter(endBtn, {}, inspector.panelWin);
} else if (node.action === "start") {
info("Simulating click of start button");
EventUtils.synthesizeMouseAtCenter(startBtn, {}, inspector.panelWin);
}
yield breadcrumbsUpdated;
let selector = "button[title=\"" + node.title + "\"]";
let relevantCrumb = container.querySelector(selector);
is(isElementInViewport(hostWindow, relevantCrumb), true,
node.title + " crumb is visible");
}
}
function isElementInViewport(window, el) {
let rect = el.getBoundingClientRect();
return (
rect.top >= 0 &&
rect.left >= 0 &&
rect.bottom <= window.innerHeight &&
rect.right <= window.innerWidth
);
}
registerCleanupFunction(function () {
// Restore the host type for other tests.
Services.prefs.clearUserPref("devtools.toolbox.host");
});

View File

@ -0,0 +1,22 @@
<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=windows-1252">
</head>
<body>
<div id="aVeryLongIdToExceedTheBreadcrumbTruncationLimit">
<div id="anotherVeryLongIdToExceedTheBreadcrumbTruncationLimit">
<div id="aThirdVeryLongIdToExceedTheTruncationLimit">
<div id="aFourthOneToExceedTheTruncationLimit">
<div id="aFifthOneToExceedTheTruncationLimit">
<div id="aSixthOneToExceedTheTruncationLimit">
<div id="aSeventhOneToExceedTheTruncationLimit">
A text node at the end
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</body>
</html>

View File

@ -165,6 +165,7 @@ HarBuilder.prototype = {
request.httpVersion = file.httpVersion || "";
request.headers = this.buildHeaders(file.requestHeaders);
request.headers = this.appendHeadersPostData(request.headers, file);
request.cookies = this.buildCookies(file.requestCookies);
request.queryString = NetworkHelper.parseQueryString(
@ -199,6 +200,33 @@ HarBuilder.prototype = {
return this.buildNameValuePairs(input.headers);
},
appendHeadersPostData: function (input = [], file) {
if (!file.requestPostData) {
return input;
}
this.fetchData(file.requestPostData.postData.text).then(value => {
let contentType = value.match(/Content-Type: ([^;\s]+)/);
let contentLength = value.match(/Content-Length: (.+)/);
if (contentType && contentType.length > 1) {
input.push({
name: "Content-Type",
value: contentType[1]
});
}
if (contentLength && contentLength.length > 1) {
input.push({
name: "Content-Length",
value: contentLength[1]
});
}
});
return input;
},
buildCookies: function (input) {
if (!input) {
return [];

View File

@ -37,24 +37,19 @@ define(function (require, exports, module) {
delim = (i == array.length - 1 ? "" : ", ");
if (value === array) {
items.push(Reference({
key: i,
object: value,
delim: delim
}));
} else {
items.push(ItemRep({
key: i,
object: value,
delim: delim
}));
}
items.push(ItemRep({
key: i,
object: value,
// Hardcode tiny mode to avoid recursive handling.
mode: "tiny",
delim: delim
}));
} catch (exc) {
items.push(ItemRep({
key: i,
object: exc,
delim: delim,
key: i
mode: "tiny",
delim: delim
}));
}
}
@ -173,30 +168,16 @@ define(function (require, exports, module) {
let object = this.props.object;
let delim = this.props.delim;
let mode = this.props.mode;
return (
DOM.span({},
Rep({object: object}),
Rep({object: object, mode: mode}),
delim
)
);
}
}));
/**
* Renders cycle references in an array.
*/
let Reference = React.createFactory(React.createClass({
displayName: "Reference",
render: function () {
let tooltip = "Circular reference";
return (
DOM.span({title: tooltip},
"[…]")
);
}
}));
function supportsObject(object, type) {
return Array.isArray(object) ||
Object.prototype.toString.call(object) === "[object Arguments]";

View File

@ -11,10 +11,8 @@ define(function (require, exports, module) {
const React = require("devtools/client/shared/vendor/react");
// Reps
const { isGrip } = require("./rep-utils");
// Shortcuts
const { span } = React.DOM;
const { createFactories, isGrip } = require("./rep-utils");
const { rep } = createFactories(require("./grip").Grip);
/**
* Renders DOM event objects.
@ -26,40 +24,18 @@ define(function (require, exports, module) {
object: React.PropTypes.object.isRequired
},
getTitle: function (grip) {
if (this.props.objectLink) {
return this.props.objectLink({
object: grip
}, grip.preview.type);
}
return grip.preview.type;
},
summarizeEvent: function (grip) {
let info = [];
let eventFamily = grip.class;
let props = grip.preview.properties;
if (eventFamily == "MouseEvent") {
info.push("clientX=", props.clientX, ", clientY=", props.clientY);
} else if (eventFamily == "KeyboardEvent") {
info.push("charCode=", props.charCode, ", keyCode=", props.keyCode);
} else if (eventFamily == "MessageEvent") {
info.push("origin=", props.origin, ", data=", props.data);
}
return info.join("");
},
render: function () {
let grip = this.props.object;
return (
span({className: "objectBox objectBox-event"},
this.getTitle(grip),
this.summarizeEvent(grip)
)
);
// Use `Object.assign` to keep `this.props` without changes becuase:
// 1. JSON.stringify/JSON.parse is slow.
// 2. Immutable.js is planned for the future.
let props = Object.assign({}, this.props);
props.object = Object.assign({}, this.props.object);
props.object.preview = Object.assign({}, this.props.object.preview);
props.object.preview.ownProperties = props.object.preview.properties;
delete props.object.preview.properties;
props.object.ownPropertyLength =
Object.keys(props.object.preview.ownProperties).length;
return rep(props);
},
});

View File

@ -56,6 +56,10 @@ define(function (require, exports, module) {
}
let delim;
// number of grip.preview.items is limited to 10, but we may have more
// items in grip-array
let delimMax = grip.preview.length > array.length ?
array.length : array.length - 1;
let provider = this.props.provider;
for (let i = 0; i < array.length && i < max; i++) {
@ -63,7 +67,7 @@ define(function (require, exports, module) {
let itemGrip = array[i];
let value = provider ? provider.getValue(itemGrip) : itemGrip;
delim = (i == array.length - 1 ? "" : ", ");
delim = (i == delimMax ? "" : ", ");
if (value === array) {
items.push(Reference({
@ -86,14 +90,15 @@ define(function (require, exports, module) {
)));
}
}
if (array.length > max) {
if (array.length > max || grip.preview.length > array.length) {
let objectLink = this.props.objectLink || span;
let leftItemNum = grip.preview.length - max > 0 ?
grip.preview.length - max : grip.preview.length - array.length;
items.push(Caption({
key: "more",
object: objectLink({
object: this.props.object
}, (grip.preview.length - max) + " more…")
}, leftItemNum + " more…")
}));
}

View File

@ -110,7 +110,8 @@ define(function (require, exports, module) {
indexes.forEach((i) => {
let name = Object.keys(ownProperties)[i];
let value = ownProperties[name].value;
let prop = ownProperties[name];
let value = prop.value !== undefined ? prop.value : prop;
props.push(PropRep(Object.assign({}, this.props, {
key: name,
mode: "tiny",
@ -144,7 +145,7 @@ define(function (require, exports, module) {
}
let prop = ownProperties[name];
let value = prop.value;
let value = prop.value !== undefined ? prop.value : prop;
// Type is specified in grip's "class" field and for primitive
// values use typeof.

View File

@ -96,7 +96,8 @@ define(function (require, exports, module) {
return props;
}
let mode = this.props.mode;
// Hardcode tiny mode to avoid recursive handling.
let mode = "tiny";
try {
for (let name in object) {

View File

@ -35,8 +35,11 @@ define(function (require, exports, module) {
let croppedString = this.props.cropLimit ?
cropMultipleLines(text, this.props.cropLimit) : cropMultipleLines(text);
let formattedString = this.props.omitQuotes ?
croppedString : "\"" + croppedString + "\"";
return (
span({className: "objectBox objectBox-string"}, "\"" + croppedString + "\""
span({className: "objectBox objectBox-string"}, formattedString
)
);
},

View File

@ -35,7 +35,6 @@
height: calc(100% - 24px);
}
.tabs .tab-panel-box,
.tabs .tab-panel {
height: 100%;
}

View File

@ -161,7 +161,7 @@ window.onload = Task.async(function* () {
function testRecursiveArray() {
let stub = [1];
stub.push(stub);
const defaultOutput = `[ 1, [] ]`;
const defaultOutput = `[ 1, [2] ]`;
const modeTests = [
{
@ -194,7 +194,7 @@ window.onload = Task.async(function* () {
p4: "s4"
}
];
const defaultOutput = `[ Object { p1: "s1", p3: "s3", p4: "s4", 1 more… } ]`;
const defaultOutput = `[ Object ]`;
const modeTests = [
{

View File

@ -35,22 +35,30 @@ window.onload = Task.async(function* () {
function testEvent() {
const renderedComponent = renderComponent(Event.rep, { object: getGripStub("testEvent") });
is(renderedComponent.textContent, "beforeprint", "Event rep has expected text content for an event");
is(renderedComponent.textContent,
"Event { isTrusted: true, eventPhase: 2, bubbles: false, 7 more… }",
"Event rep has expected text content for an event");
}
function testMouseEvent() {
const renderedComponent = renderComponent(Event.rep, { object: getGripStub("testMouseEvent") });
is(renderedComponent.textContent, "clickclientX=62, clientY=18", "Event rep has expected text content for a mouse event");
is(renderedComponent.textContent,
"MouseEvent { buttons: 0, clientX: 62, clientY: 18, 2 more… }",
"Event rep has expected text content for a mouse event");
}
function testKeyboardEvent() {
const renderedComponent = renderComponent(Event.rep, { object: getGripStub("testKeyboardEvent") });
is(renderedComponent.textContent, "keyupcharCode=0, keyCode=17", "Event rep has expected text content for a keyboard event");
is(renderedComponent.textContent,
"KeyboardEvent { key: \"Control\", charCode: 0, keyCode: 17 }",
"Event rep has expected text content for a keyboard event");
}
function testMessageEvent() {
const renderedComponent = renderComponent(Event.rep, { object: getGripStub("testMessageEvent") });
is(renderedComponent.textContent, "messageorigin=null, data=test data", "Event rep has expected text content for a message event");
is(renderedComponent.textContent,
"MessageEvent { isTrusted: false, data: \"test data\", origin: \"null\", 7 more… }",
"Event rep has expected text content for a message event");
}
function getGripStub(name) {

View File

@ -32,6 +32,7 @@ window.onload = Task.async(function* () {
yield testMoreThanShortMaxProps();
yield testMoreThanLongMaxProps();
yield testRecursiveArray();
yield testPreviewLimit();
yield testNamedNodeMap();
} catch(e) {
@ -190,6 +191,34 @@ window.onload = Task.async(function* () {
testRepRenderModes(modeTests, testName, componentUnderTest, getGripStub(testName));
}
function testPreviewLimit() {
const testName = "testPreviewLimit";
const shortOutput = `Array[ 0, 1, 2, 8 more… ]`;
const defaultOutput = `Array[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1 more… ]`;
const modeTests = [
{
mode: undefined,
expectedOutput: shortOutput,
},
{
mode: "tiny",
expectedOutput: `[11]`,
},
{
mode: "short",
expectedOutput: shortOutput,
},
{
mode: "long",
expectedOutput: defaultOutput,
}
];
testRepRenderModes(modeTests, testName, componentUnderTest, getGripStub(testName));
}
function testNamedNodeMap() {
const testName = "testNamedNodeMap";
@ -311,6 +340,22 @@ window.onload = Task.async(function* () {
return longArrayGrip;
case "testPreviewLimit":
return {
"type": "object",
"class": "Array",
"actor": "server1.conn1.obj31",
"extensible": true,
"frozen": false,
"sealed": false,
"ownPropertyLength": 12,
"preview": {
"kind": "ArrayLike",
"length": 11,
"items": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
}
};
case "testRecursiveArray":
return {
"type": "object",

View File

@ -160,7 +160,7 @@ window.onload = Task.async(function* () {
strProp: "test string",
arrProp: [1]
};
const defaultOutput = `Object { strProp: "test string", objProp: Object { id: 1, arr: [ 2 ] }, arrProp: [ 1 ] }`;
const defaultOutput = `Object { strProp: "test string", objProp: Object, arrProp: [1] }`;
const modeTests = [
{

View File

@ -27,6 +27,7 @@ window.onload = Task.async(function* () {
yield testMultiline();
yield testMultilineOpen();
yield testMultilineLimit();
yield testOmitQuotes();
} catch(e) {
ok(false, "Got an error: " + DevToolsUtils.safeErrorString(e));
} finally {
@ -48,10 +49,18 @@ window.onload = Task.async(function* () {
is(renderedComponent.textContent, "\"aaaaaaaaaaaaaaaaaaaaa\nbbbbbbbbbbbbbbbbbbb\ncccccccccccccccc\n\"", "String rep has expected text content for multiline string when open");
}
function testOmitQuotes(){
const renderedComponent = renderComponent(StringRep.rep, { object: getGripStub("testOmitQuotes"), omitQuotes: true });
is(renderedComponent.textContent, "abc","String rep has expected to omit quotes");
}
function getGripStub(name) {
switch (name) {
case "testMultiline":
return "aaaaaaaaaaaaaaaaaaaaa\nbbbbbbbbbbbbbbbbbbb\ncccccccccccccccc\n";
return "aaaaaaaaaaaaaaaaaaaaa\nbbbbbbbbbbbbbbbbbbb\ncccccccccccccccc\n";
break;
case "testOmitQuotes":
return "abc";
}
}
});

View File

@ -1257,7 +1257,7 @@ function Scope(aView, aName, aFlags = {}) {
this.contextMenuId = aView.contextMenuId;
this.separatorStr = aView.separatorStr;
this._init(aName.trim(), aFlags);
this._init(aName, aFlags);
}
Scope.prototype = {
@ -1822,7 +1822,7 @@ Scope.prototype = {
let name = this._name = document.createElement("label");
name.className = "plain name";
name.setAttribute("value", aName);
name.setAttribute("value", aName.trim());
name.setAttribute("crop", "end");
let title = this._title = document.createElement("hbox");

View File

@ -37,7 +37,6 @@
/* TODO: bug 1265759: should apply to .devtools-searchinput once all searchbox
is converted to html*/
#inspector-searchbox {
flex: 1;
width: 100%;
}

View File

@ -462,7 +462,7 @@
.devtools-searchinput-clear {
position: absolute;
top: 3.5px;
right: 7px;
offset-inline-end: 7px;
padding: 0;
border: 0;
width: 16px;

View File

@ -567,125 +567,119 @@ exports.createEmptyNodeList = function(doc) {
* See BUG 664991: GCLI's keyboard handling should be updated to use DOM-L3
* https://bugzilla.mozilla.org/show_bug.cgi?id=664991
*/
if (typeof 'KeyEvent' === 'undefined') {
/* jshint -W040 */
exports.KeyEvent = this.KeyEvent;
}
else {
exports.KeyEvent = {
DOM_VK_CANCEL: 3,
DOM_VK_HELP: 6,
DOM_VK_BACK_SPACE: 8,
DOM_VK_TAB: 9,
DOM_VK_CLEAR: 12,
DOM_VK_RETURN: 13,
DOM_VK_SHIFT: 16,
DOM_VK_CONTROL: 17,
DOM_VK_ALT: 18,
DOM_VK_PAUSE: 19,
DOM_VK_CAPS_LOCK: 20,
DOM_VK_ESCAPE: 27,
DOM_VK_SPACE: 32,
DOM_VK_PAGE_UP: 33,
DOM_VK_PAGE_DOWN: 34,
DOM_VK_END: 35,
DOM_VK_HOME: 36,
DOM_VK_LEFT: 37,
DOM_VK_UP: 38,
DOM_VK_RIGHT: 39,
DOM_VK_DOWN: 40,
DOM_VK_PRINTSCREEN: 44,
DOM_VK_INSERT: 45,
DOM_VK_DELETE: 46,
DOM_VK_0: 48,
DOM_VK_1: 49,
DOM_VK_2: 50,
DOM_VK_3: 51,
DOM_VK_4: 52,
DOM_VK_5: 53,
DOM_VK_6: 54,
DOM_VK_7: 55,
DOM_VK_8: 56,
DOM_VK_9: 57,
DOM_VK_SEMICOLON: 59,
DOM_VK_EQUALS: 61,
DOM_VK_A: 65,
DOM_VK_B: 66,
DOM_VK_C: 67,
DOM_VK_D: 68,
DOM_VK_E: 69,
DOM_VK_F: 70,
DOM_VK_G: 71,
DOM_VK_H: 72,
DOM_VK_I: 73,
DOM_VK_J: 74,
DOM_VK_K: 75,
DOM_VK_L: 76,
DOM_VK_M: 77,
DOM_VK_N: 78,
DOM_VK_O: 79,
DOM_VK_P: 80,
DOM_VK_Q: 81,
DOM_VK_R: 82,
DOM_VK_S: 83,
DOM_VK_T: 84,
DOM_VK_U: 85,
DOM_VK_V: 86,
DOM_VK_W: 87,
DOM_VK_X: 88,
DOM_VK_Y: 89,
DOM_VK_Z: 90,
DOM_VK_CONTEXT_MENU: 93,
DOM_VK_NUMPAD0: 96,
DOM_VK_NUMPAD1: 97,
DOM_VK_NUMPAD2: 98,
DOM_VK_NUMPAD3: 99,
DOM_VK_NUMPAD4: 100,
DOM_VK_NUMPAD5: 101,
DOM_VK_NUMPAD6: 102,
DOM_VK_NUMPAD7: 103,
DOM_VK_NUMPAD8: 104,
DOM_VK_NUMPAD9: 105,
DOM_VK_MULTIPLY: 106,
DOM_VK_ADD: 107,
DOM_VK_SEPARATOR: 108,
DOM_VK_SUBTRACT: 109,
DOM_VK_DECIMAL: 110,
DOM_VK_DIVIDE: 111,
DOM_VK_F1: 112,
DOM_VK_F2: 113,
DOM_VK_F3: 114,
DOM_VK_F4: 115,
DOM_VK_F5: 116,
DOM_VK_F6: 117,
DOM_VK_F7: 118,
DOM_VK_F8: 119,
DOM_VK_F9: 120,
DOM_VK_F10: 121,
DOM_VK_F11: 122,
DOM_VK_F12: 123,
DOM_VK_F13: 124,
DOM_VK_F14: 125,
DOM_VK_F15: 126,
DOM_VK_F16: 127,
DOM_VK_F17: 128,
DOM_VK_F18: 129,
DOM_VK_F19: 130,
DOM_VK_F20: 131,
DOM_VK_F21: 132,
DOM_VK_F22: 133,
DOM_VK_F23: 134,
DOM_VK_F24: 135,
DOM_VK_NUM_LOCK: 144,
DOM_VK_SCROLL_LOCK: 145,
DOM_VK_COMMA: 188,
DOM_VK_PERIOD: 190,
DOM_VK_SLASH: 191,
DOM_VK_BACK_QUOTE: 192,
DOM_VK_OPEN_BRACKET: 219,
DOM_VK_BACK_SLASH: 220,
DOM_VK_CLOSE_BRACKET: 221,
DOM_VK_QUOTE: 222,
DOM_VK_META: 224
};
}
exports.KeyEvent = {
DOM_VK_CANCEL: 3,
DOM_VK_HELP: 6,
DOM_VK_BACK_SPACE: 8,
DOM_VK_TAB: 9,
DOM_VK_CLEAR: 12,
DOM_VK_RETURN: 13,
DOM_VK_SHIFT: 16,
DOM_VK_CONTROL: 17,
DOM_VK_ALT: 18,
DOM_VK_PAUSE: 19,
DOM_VK_CAPS_LOCK: 20,
DOM_VK_ESCAPE: 27,
DOM_VK_SPACE: 32,
DOM_VK_PAGE_UP: 33,
DOM_VK_PAGE_DOWN: 34,
DOM_VK_END: 35,
DOM_VK_HOME: 36,
DOM_VK_LEFT: 37,
DOM_VK_UP: 38,
DOM_VK_RIGHT: 39,
DOM_VK_DOWN: 40,
DOM_VK_PRINTSCREEN: 44,
DOM_VK_INSERT: 45,
DOM_VK_DELETE: 46,
DOM_VK_0: 48,
DOM_VK_1: 49,
DOM_VK_2: 50,
DOM_VK_3: 51,
DOM_VK_4: 52,
DOM_VK_5: 53,
DOM_VK_6: 54,
DOM_VK_7: 55,
DOM_VK_8: 56,
DOM_VK_9: 57,
DOM_VK_SEMICOLON: 59,
DOM_VK_EQUALS: 61,
DOM_VK_A: 65,
DOM_VK_B: 66,
DOM_VK_C: 67,
DOM_VK_D: 68,
DOM_VK_E: 69,
DOM_VK_F: 70,
DOM_VK_G: 71,
DOM_VK_H: 72,
DOM_VK_I: 73,
DOM_VK_J: 74,
DOM_VK_K: 75,
DOM_VK_L: 76,
DOM_VK_M: 77,
DOM_VK_N: 78,
DOM_VK_O: 79,
DOM_VK_P: 80,
DOM_VK_Q: 81,
DOM_VK_R: 82,
DOM_VK_S: 83,
DOM_VK_T: 84,
DOM_VK_U: 85,
DOM_VK_V: 86,
DOM_VK_W: 87,
DOM_VK_X: 88,
DOM_VK_Y: 89,
DOM_VK_Z: 90,
DOM_VK_CONTEXT_MENU: 93,
DOM_VK_NUMPAD0: 96,
DOM_VK_NUMPAD1: 97,
DOM_VK_NUMPAD2: 98,
DOM_VK_NUMPAD3: 99,
DOM_VK_NUMPAD4: 100,
DOM_VK_NUMPAD5: 101,
DOM_VK_NUMPAD6: 102,
DOM_VK_NUMPAD7: 103,
DOM_VK_NUMPAD8: 104,
DOM_VK_NUMPAD9: 105,
DOM_VK_MULTIPLY: 106,
DOM_VK_ADD: 107,
DOM_VK_SEPARATOR: 108,
DOM_VK_SUBTRACT: 109,
DOM_VK_DECIMAL: 110,
DOM_VK_DIVIDE: 111,
DOM_VK_F1: 112,
DOM_VK_F2: 113,
DOM_VK_F3: 114,
DOM_VK_F4: 115,
DOM_VK_F5: 116,
DOM_VK_F6: 117,
DOM_VK_F7: 118,
DOM_VK_F8: 119,
DOM_VK_F9: 120,
DOM_VK_F10: 121,
DOM_VK_F11: 122,
DOM_VK_F12: 123,
DOM_VK_F13: 124,
DOM_VK_F14: 125,
DOM_VK_F15: 126,
DOM_VK_F16: 127,
DOM_VK_F17: 128,
DOM_VK_F18: 129,
DOM_VK_F19: 130,
DOM_VK_F20: 131,
DOM_VK_F21: 132,
DOM_VK_F22: 133,
DOM_VK_F23: 134,
DOM_VK_F24: 135,
DOM_VK_NUM_LOCK: 144,
DOM_VK_SCROLL_LOCK: 145,
DOM_VK_COMMA: 188,
DOM_VK_PERIOD: 190,
DOM_VK_SLASH: 191,
DOM_VK_BACK_QUOTE: 192,
DOM_VK_OPEN_BRACKET: 219,
DOM_VK_BACK_SLASH: 220,
DOM_VK_CLOSE_BRACKET: 221,
DOM_VK_QUOTE: 222,
DOM_VK_META: 224
};

View File

@ -185,7 +185,7 @@ SessionStore.prototype = {
this._clearDisk();
// Clear all data about closed tabs
for (let [ssid, win] in Iterator(this._windows))
for (let [ssid, win] of Object.entries(this._windows))
win.closedTabs = [];
this._lastClosedTabIndex = -1;
@ -300,7 +300,7 @@ SessionStore.prototype = {
break;
case "last-pb-context-exited":
// Clear private closed tab data when we leave private browsing.
for (let [, window] in Iterator(this._windows)) {
for (let window of Object.values(this._windows)) {
window.closedTabs = window.closedTabs.filter(tab => !tab.isPrivate);
}
this._lastClosedTabIndex = -1;

View File

@ -217,9 +217,6 @@
}
.volumeStack,
.controlBar[firstshow="true"] .muteButton,
.controlBar[firstshow="true"] .scrubberStack,
.controlBar[firstshow="true"] .durationBox,
.timeLabel {
display: none;
}

View File

@ -14,24 +14,18 @@ class TestSafeBrowsingInitialDownload(FirefoxTestCase):
'platforms': ['linux', 'windows_nt', 'darwin'],
'files': [
# Phishing
"goog-badbinurl-shavar.cache",
"goog-badbinurl-shavar.pset",
"goog-badbinurl-shavar.sbstore",
"goog-malware-shavar.cache",
"goog-malware-shavar.pset",
"goog-malware-shavar.sbstore",
"goog-phish-shavar.cache",
"goog-phish-shavar.pset",
"goog-phish-shavar.sbstore",
"goog-unwanted-shavar.cache",
"goog-unwanted-shavar.pset",
"goog-unwanted-shavar.sbstore",
# Tracking Protections
"base-track-digest256.cache",
"base-track-digest256.pset",
"base-track-digest256.sbstore",
"mozstd-trackwhite-digest256.cache",
"mozstd-trackwhite-digest256.pset",
"mozstd-trackwhite-digest256.sbstore"
]
@ -39,7 +33,6 @@ class TestSafeBrowsingInitialDownload(FirefoxTestCase):
{
'platforms': ['windows_nt'],
'files': [
"goog-downloadwhite-digest256.cache",
"goog-downloadwhite-digest256.pset",
"goog-downloadwhite-digest256.sbstore"
]

View File

@ -304,11 +304,11 @@ Classifier::ApplyUpdates(nsTArray<TableUpdate*>* aUpdates)
LOG(("Applying %d table updates.", aUpdates->Length()));
for (uint32_t i = 0; i < aUpdates->Length(); i++) {
// Previous ApplyTableUpdates() may have consumed this update..
// Previous UpdateHashStore() may have consumed this update..
if ((*aUpdates)[i]) {
// Run all updates for one table
nsCString updateTable(aUpdates->ElementAt(i)->TableName());
rv = ApplyTableUpdates(aUpdates, updateTable);
rv = UpdateHashStore(aUpdates, updateTable);
if (NS_FAILED(rv)) {
if (rv != NS_ERROR_OUT_OF_MEMORY) {
Reset();
@ -344,6 +344,25 @@ Classifier::ApplyUpdates(nsTArray<TableUpdate*>* aUpdates)
return NS_OK;
}
nsresult
Classifier::ApplyFullHashes(nsTArray<TableUpdate*>* aUpdates)
{
LOG(("Applying %d table gethashes.", aUpdates->Length()));
for (uint32_t i = 0; i < aUpdates->Length(); i++) {
TableUpdate *update = aUpdates->ElementAt(i);
nsresult rv = UpdateCache(update);
NS_ENSURE_SUCCESS(rv, rv);
aUpdates->ElementAt(i) = nullptr;
delete update;
}
aUpdates->Clear();
return NS_OK;
}
nsresult
Classifier::MarkSpoiled(nsTArray<nsCString>& aTables)
{
@ -354,12 +373,20 @@ Classifier::MarkSpoiled(nsTArray<nsCString>& aTables)
// Remove any cached Completes for this table
LookupCache *cache = GetLookupCache(aTables[i]);
if (cache) {
cache->ClearCompleteCache();
cache->ClearCache();
}
}
return NS_OK;
}
int64_t
Classifier::GetLastUpdateTime(const nsACString& aTableName)
{
int64_t age;
bool found = mTableFreshness.Get(aTableName, &age);
return found ? (age * PR_MSEC_PER_SEC) : 0;
}
void
Classifier::SetLastUpdateTime(const nsACString &aTable,
uint64_t updateTime)
@ -550,24 +577,17 @@ Classifier::RecoverBackups()
return NS_OK;
}
/*
* This will consume+delete updates from the passed nsTArray.
*/
nsresult
Classifier::ApplyTableUpdates(nsTArray<TableUpdate*>* aUpdates,
const nsACString& aTable)
bool
Classifier::CheckValidUpdate(nsTArray<TableUpdate*>* aUpdates,
const nsACString& aTable)
{
LOG(("Classifier::ApplyTableUpdates(%s)", PromiseFlatCString(aTable).get()));
HashStore store(aTable, mStoreDirectory);
// take the quick exit if there is no valid update for us
// (common case)
uint32_t validupdates = 0;
for (uint32_t i = 0; i < aUpdates->Length(); i++) {
TableUpdate *update = aUpdates->ElementAt(i);
if (!update || !update->TableName().Equals(store.TableName()))
if (!update || !update->TableName().Equals(aTable))
continue;
if (update->Empty()) {
aUpdates->ElementAt(i) = nullptr;
@ -579,6 +599,24 @@ Classifier::ApplyTableUpdates(nsTArray<TableUpdate*>* aUpdates,
if (!validupdates) {
// This can happen if the update was only valid for one table.
return false;
}
return true;
}
/*
* This will consume+delete updates from the passed nsTArray.
*/
nsresult
Classifier::UpdateHashStore(nsTArray<TableUpdate*>* aUpdates,
const nsACString& aTable)
{
LOG(("Classifier::UpdateHashStore(%s)", PromiseFlatCString(aTable).get()));
HashStore store(aTable, mStoreDirectory);
if (!CheckValidUpdate(aUpdates, store.TableName())) {
return NS_OK;
}
@ -588,20 +626,22 @@ Classifier::ApplyTableUpdates(nsTArray<TableUpdate*>* aUpdates,
NS_ENSURE_SUCCESS(rv, rv);
// Read the part of the store that is (only) in the cache
LookupCache *prefixSet = GetLookupCache(store.TableName());
if (!prefixSet) {
LookupCache *lookupCache = GetLookupCache(store.TableName());
if (!lookupCache) {
return NS_ERROR_FAILURE;
}
// Clear cache when update
lookupCache->ClearCache();
FallibleTArray<uint32_t> AddPrefixHashes;
rv = prefixSet->GetPrefixes(AddPrefixHashes);
rv = lookupCache->GetPrefixes(AddPrefixHashes);
NS_ENSURE_SUCCESS(rv, rv);
rv = store.AugmentAdds(AddPrefixHashes);
NS_ENSURE_SUCCESS(rv, rv);
AddPrefixHashes.Clear();
uint32_t applied = 0;
bool updateFreshness = false;
bool hasCompletes = false;
for (uint32_t i = 0; i < aUpdates->Length(); i++) {
TableUpdate *update = aUpdates->ElementAt(i);
@ -623,17 +663,6 @@ Classifier::ApplyTableUpdates(nsTArray<TableUpdate*>* aUpdates,
LOG((" %d add expirations", update->AddExpirations().Length()));
LOG((" %d sub expirations", update->SubExpirations().Length()));
if (!update->IsLocalUpdate()) {
updateFreshness = true;
LOG(("Remote update, updating freshness"));
}
if (update->AddCompletes().Length() > 0
|| update->SubCompletes().Length() > 0) {
hasCompletes = true;
LOG(("Contains Completes, keeping cache."));
}
aUpdates->ElementAt(i) = nullptr;
delete update;
}
@ -643,11 +672,6 @@ Classifier::ApplyTableUpdates(nsTArray<TableUpdate*>* aUpdates,
rv = store.Rebuild();
NS_ENSURE_SUCCESS(rv, rv);
// Not an update with Completes, clear all completes data.
if (!hasCompletes) {
store.ClearCompletes();
}
LOG(("Table %s now has:", store.TableName().get()));
LOG((" %d add chunks", store.AddChunks().Length()));
LOG((" %d add prefixes", store.AddPrefixes().Length()));
@ -661,21 +685,41 @@ Classifier::ApplyTableUpdates(nsTArray<TableUpdate*>* aUpdates,
// At this point the store is updated and written out to disk, but
// the data is still in memory. Build our quick-lookup table here.
rv = prefixSet->Build(store.AddPrefixes(), store.AddCompletes());
rv = lookupCache->Build(store.AddPrefixes(), store.AddCompletes());
NS_ENSURE_SUCCESS(rv, rv);
#if defined(DEBUG)
prefixSet->Dump();
lookupCache->Dump();
#endif
rv = prefixSet->WriteFile();
rv = lookupCache->WriteFile();
NS_ENSURE_SUCCESS(rv, rv);
if (updateFreshness) {
int64_t now = (PR_Now() / PR_USEC_PER_SEC);
LOG(("Successfully updated %s", store.TableName().get()));
mTableFreshness.Put(store.TableName(), now);
int64_t now = (PR_Now() / PR_USEC_PER_SEC);
LOG(("Successfully updated %s", store.TableName().get()));
mTableFreshness.Put(store.TableName(), now);
return NS_OK;
}
nsresult
Classifier::UpdateCache(TableUpdate* aUpdate)
{
if (!aUpdate) {
return NS_OK;
}
nsAutoCString table(aUpdate->TableName());
LOG(("Classifier::UpdateCache(%s)", table.get()));
LookupCache *lookupCache = GetLookupCache(table);
NS_ENSURE_TRUE(lookupCache, NS_ERROR_FAILURE);
lookupCache->AddCompletionsToCache(aUpdate->AddCompletes());
#if defined(DEBUG)
lookupCache->DumpCache();
#endif
return NS_OK;
}

View File

@ -55,12 +55,19 @@ public:
* the updates in the array and clears it. Wacky!
*/
nsresult ApplyUpdates(nsTArray<TableUpdate*>* aUpdates);
/**
* Apply full hashes retrived from gethash to cache.
*/
nsresult ApplyFullHashes(nsTArray<TableUpdate*>* aUpdates);
/**
* Failed update. Spoil the entries so we don't block hosts
* unnecessarily
*/
nsresult MarkSpoiled(nsTArray<nsCString>& aTables);
void SetLastUpdateTime(const nsACString& aTableName, uint64_t updateTime);
int64_t GetLastUpdateTime(const nsACString& aTableName);
nsresult CacheCompletions(const CacheResultArray& aResults);
uint32_t GetHashKey(void) { return mHashKey; }
/*
@ -84,11 +91,16 @@ private:
nsresult RegenActiveTables();
nsresult ScanStoreDir(nsTArray<nsCString>& aTables);
nsresult ApplyTableUpdates(nsTArray<TableUpdate*>* aUpdates,
const nsACString& aTable);
nsresult UpdateHashStore(nsTArray<TableUpdate*>* aUpdates,
const nsACString& aTable);
nsresult UpdateCache(TableUpdate* aUpdates);
LookupCache *GetLookupCache(const nsACString& aTable);
bool CheckValidUpdate(nsTArray<TableUpdate*>* aUpdates,
const nsACString& aTable);
// Root dir of the Local profile.
nsCOMPtr<nsIFile> mCacheDirectory;
// Main directory where to store the databases.

View File

@ -165,6 +165,7 @@ HashStore::HashStore(const nsACString& aTableName, nsIFile* aStoreDir)
: mTableName(aTableName)
, mStoreDirectory(aStoreDir)
, mInUpdate(false)
, mFileSize(0)
{
}
@ -187,13 +188,18 @@ HashStore::Reset()
rv = storeFile->Remove(false);
NS_ENSURE_SUCCESS(rv, rv);
mFileSize = 0;
return NS_OK;
}
nsresult
HashStore::CheckChecksum(nsIFile* aStoreFile,
uint32_t aFileSize)
HashStore::CheckChecksum(uint32_t aFileSize)
{
if (!mInputStream) {
return NS_OK;
}
// Check for file corruption by
// comparing the stored checksum to actual checksum of data
nsAutoCString hash;
@ -255,11 +261,8 @@ HashStore::Open()
return NS_ERROR_FAILURE;
}
uint32_t fileSize32 = static_cast<uint32_t>(fileSize);
mInputStream = NS_BufferInputStream(origStream, fileSize32);
rv = CheckChecksum(storeFile, fileSize32);
SUCCESS_OR_RESET(rv);
mFileSize = static_cast<uint32_t>(fileSize);
mInputStream = NS_BufferInputStream(origStream, mFileSize);
rv = ReadHeader();
SUCCESS_OR_RESET(rv);
@ -267,9 +270,6 @@ HashStore::Open()
rv = SanityCheck();
SUCCESS_OR_RESET(rv);
rv = ReadChunkNumbers();
SUCCESS_OR_RESET(rv);
return NS_OK;
}
@ -363,7 +363,9 @@ HashStore::UpdateHeader()
nsresult
HashStore::ReadChunkNumbers()
{
NS_ENSURE_STATE(mInputStream);
if (!mInputStream || AlreadyReadChunkNumbers()) {
return NS_OK;
}
nsCOMPtr<nsISeekableStream> seekable = do_QueryInterface(mInputStream);
nsresult rv = seekable->Seek(nsISeekableStream::NS_SEEK_SET,
@ -403,6 +405,45 @@ HashStore::ReadHashes()
rv = ReadSubPrefixes();
NS_ENSURE_SUCCESS(rv, rv);
// If completions was read before, then we are done here.
if (AlreadyReadCompletions()) {
return NS_OK;
}
rv = ReadTArray(mInputStream, &mAddCompletes, mHeader.numAddCompletes);
NS_ENSURE_SUCCESS(rv, rv);
rv = ReadTArray(mInputStream, &mSubCompletes, mHeader.numSubCompletes);
NS_ENSURE_SUCCESS(rv, rv);
return NS_OK;
}
nsresult
HashStore::ReadCompletions()
{
if (!mInputStream || AlreadyReadCompletions()) {
return NS_OK;
}
nsCOMPtr<nsIFile> storeFile;
nsresult rv = mStoreDirectory->Clone(getter_AddRefs(storeFile));
NS_ENSURE_SUCCESS(rv, rv);
rv = storeFile->AppendNative(mTableName + NS_LITERAL_CSTRING(STORE_SUFFIX));
NS_ENSURE_SUCCESS(rv, rv);
uint32_t offset = mFileSize -
sizeof(struct AddComplete) * mHeader.numAddCompletes -
sizeof(struct SubComplete) * mHeader.numSubCompletes -
nsCheckSummedOutputStream::CHECKSUM_SIZE;
nsCOMPtr<nsISeekableStream> seekable = do_QueryInterface(mInputStream);
rv = seekable->Seek(nsISeekableStream::NS_SEEK_SET, offset);
NS_ENSURE_SUCCESS(rv, rv);
rv = ReadTArray(mInputStream, &mAddCompletes, mHeader.numAddCompletes);
NS_ENSURE_SUCCESS(rv, rv);
@ -412,12 +453,28 @@ HashStore::ReadHashes()
return NS_OK;
}
nsresult
HashStore::PrepareForUpdate()
{
nsresult rv = CheckChecksum(mFileSize);
SUCCESS_OR_RESET(rv);
rv = ReadChunkNumbers();
SUCCESS_OR_RESET(rv);
rv = ReadHashes();
SUCCESS_OR_RESET(rv);
return NS_OK;
}
nsresult
HashStore::BeginUpdate()
{
// Read the rest of the store in memory.
nsresult rv = ReadHashes();
SUCCESS_OR_RESET(rv);
// Check wether the file is corrupted and read the rest of the store
// in memory.
nsresult rv = PrepareForUpdate();
NS_ENSURE_SUCCESS(rv, rv);
// Close input stream, won't be needed any more and
// we will rewrite ourselves.
@ -1066,5 +1123,61 @@ HashStore::AugmentAdds(const nsTArray<uint32_t>& aPrefixes)
return NS_OK;
}
ChunkSet&
HashStore::AddChunks()
{
ReadChunkNumbers();
return mAddChunks;
}
ChunkSet&
HashStore::SubChunks()
{
ReadChunkNumbers();
return mSubChunks;
}
AddCompleteArray&
HashStore::AddCompletes()
{
ReadCompletions();
return mAddCompletes;
}
SubCompleteArray&
HashStore::SubCompletes()
{
ReadCompletions();
return mSubCompletes;
}
bool
HashStore::AlreadyReadChunkNumbers()
{
// If there are chunks but chunk set not yet contains any data
// Then we haven't read chunk numbers.
if ((mHeader.numAddChunks != 0 && mAddChunks.Length() == 0) ||
(mHeader.numSubChunks != 0 && mSubChunks.Length() == 0)) {
return false;
}
return true;
}
bool
HashStore::AlreadyReadCompletions()
{
// If there are completions but completion set not yet contains any data
// Then we haven't read completions.
if ((mHeader.numAddCompletes != 0 && mAddCompletes.Length() == 0) ||
(mHeader.numSubCompletes != 0 && mSubCompletes.Length() == 0)) {
return false;
}
return true;
}
} // namespace safebrowsing
} // namespace mozilla

View File

@ -23,7 +23,7 @@ namespace safebrowsing {
class TableUpdate {
public:
explicit TableUpdate(const nsACString& aTable)
: mTable(aTable), mLocalUpdate(false) {}
: mTable(aTable) {}
const nsCString& TableName() const { return mTable; }
bool Empty() const {
@ -60,8 +60,6 @@ public:
MOZ_MUST_USE nsresult NewSubComplete(uint32_t aAddChunk,
const Completion& aCompletion,
uint32_t aSubChunk);
void SetLocalUpdate(void) { mLocalUpdate = true; }
bool IsLocalUpdate(void) { return mLocalUpdate; }
ChunkSet& AddChunks() { return mAddChunks; }
ChunkSet& SubChunks() { return mSubChunks; }
@ -78,8 +76,6 @@ public:
private:
nsCString mTable;
// Update not from the remote server (no freshness)
bool mLocalUpdate;
// The list of chunk numbers that we have for each of the type of chunks.
ChunkSet mAddChunks;
@ -112,12 +108,12 @@ public:
// prefixes+chunknumbers dataset.
nsresult AugmentAdds(const nsTArray<uint32_t>& aPrefixes);
ChunkSet& AddChunks() { return mAddChunks; }
ChunkSet& SubChunks() { return mSubChunks; }
ChunkSet& AddChunks();
ChunkSet& SubChunks();
AddPrefixArray& AddPrefixes() { return mAddPrefixes; }
AddCompleteArray& AddCompletes() { return mAddCompletes; }
SubPrefixArray& SubPrefixes() { return mSubPrefixes; }
SubCompleteArray& SubCompletes() { return mSubCompletes; }
AddCompleteArray& AddCompletes();
SubCompleteArray& SubCompletes();
// =======
// Updates
@ -149,9 +145,10 @@ private:
nsresult SanityCheck();
nsresult CalculateChecksum(nsAutoCString& aChecksum, uint32_t aFileSize,
bool aChecksumPresent);
nsresult CheckChecksum(nsIFile* aStoreFile, uint32_t aFileSize);
nsresult CheckChecksum(uint32_t aFileSize);
void UpdateHeader();
nsresult ReadCompletions();
nsresult ReadChunkNumbers();
nsresult ReadHashes();
@ -163,6 +160,11 @@ private:
nsresult ProcessSubs();
nsresult PrepareForUpdate();
bool AlreadyReadChunkNumbers();
bool AlreadyReadCompletions();
// This is used for checking that the database is correct and for figuring out
// the number of chunks, etc. to read from disk on restart.
struct Header {
@ -202,6 +204,8 @@ private:
// updates from the completion server and updates from the regular server.
AddCompleteArray mAddCompletes;
SubCompleteArray mSubCompletes;
uint32_t mFileSize;
};
} // namespace safebrowsing

View File

@ -6,7 +6,6 @@
#include "LookupCache.h"
#include "HashStore.h"
#include "nsISeekableStream.h"
#include "nsISafeOutputStream.h"
#include "mozilla/Telemetry.h"
#include "mozilla/Logging.h"
#include "nsNetUtil.h"
@ -17,19 +16,17 @@
// The latter solely exists to store the data needed to handle
// the updates from the protocol.
// This module has its own store, which stores the Completions,
// mostly caching lookups that have happened over the net.
// The prefixes are cached/checked by looking them up in the
// PrefixSet.
// This module provides a front for PrefixSet, mUpdateCompletions,
// and mGetHashCache, which together contain everything needed to
// provide a classification as long as the data is up to date.
// Data format for the ".cache" files:
// uint32_t magic Identify the file type
// uint32_t version Version identifier for file format
// uint32_t numCompletions Amount of completions stored
// 0...numCompletions 256-bit Completions
// Name of the lookupcomplete cache
#define CACHE_SUFFIX ".cache"
// PrefixSet stores and provides lookups for 4-byte prefixes.
// mUpdateCompletions contains 32-byte completions which were
// contained in updates. They are retrieved from HashStore/.sbtore
// on startup.
// mGetHashCache contains 32-byte completions which were
// returned from the gethash server. They are not serialized,
// only cached until the next update.
// Name of the persistent PrefixSet storage
#define PREFIXSET_SUFFIX ".pset"
@ -42,9 +39,6 @@ extern mozilla::LazyLogModule gUrlClassifierDbServiceLog;
namespace mozilla {
namespace safebrowsing {
const uint32_t LOOKUPCACHE_MAGIC = 0x1231af3e;
const uint32_t CURRENT_VERSION = 2;
LookupCache::LookupCache(const nsACString& aTableName, nsIFile* aStoreDir)
: mPrimed(false)
, mTableName(aTableName)
@ -69,40 +63,10 @@ LookupCache::~LookupCache()
nsresult
LookupCache::Open()
{
nsCOMPtr<nsIFile> storeFile;
nsresult rv = mStoreDirectory->Clone(getter_AddRefs(storeFile));
LOG(("Reading Completions"));
nsresult rv = ReadCompletions();
NS_ENSURE_SUCCESS(rv, rv);
rv = storeFile->AppendNative(mTableName + NS_LITERAL_CSTRING(CACHE_SUFFIX));
NS_ENSURE_SUCCESS(rv, rv);
nsCOMPtr<nsIInputStream> inputStream;
rv = NS_NewLocalFileInputStream(getter_AddRefs(inputStream), storeFile,
PR_RDONLY | nsIFile::OS_READAHEAD);
if (NS_FAILED(rv) && rv != NS_ERROR_FILE_NOT_FOUND) {
Reset();
return rv;
}
if (rv == NS_ERROR_FILE_NOT_FOUND) {
// Simply lacking a .cache file is a recoverable error,
// as unlike the .pset/.sbstore files it is a pure cache.
// Just create a new empty one.
ClearCompleteCache();
} else {
// Read in the .cache file
rv = ReadHeader(inputStream);
NS_ENSURE_SUCCESS(rv, rv);
LOG(("ReadCompletions"));
rv = ReadCompletions(inputStream);
NS_ENSURE_SUCCESS(rv, rv);
rv = inputStream->Close();
NS_ENSURE_SUCCESS(rv, rv);
}
LOG(("Loading PrefixSet"));
rv = LoadPrefixSet();
NS_ENSURE_SUCCESS(rv, rv);
@ -121,20 +85,13 @@ LookupCache::Reset()
{
LOG(("LookupCache resetting"));
nsCOMPtr<nsIFile> storeFile;
nsCOMPtr<nsIFile> prefixsetFile;
nsresult rv = mStoreDirectory->Clone(getter_AddRefs(storeFile));
NS_ENSURE_SUCCESS(rv, rv);
rv = mStoreDirectory->Clone(getter_AddRefs(prefixsetFile));
nsresult rv = mStoreDirectory->Clone(getter_AddRefs(prefixsetFile));
NS_ENSURE_SUCCESS(rv, rv);
rv = storeFile->AppendNative(mTableName + NS_LITERAL_CSTRING(CACHE_SUFFIX));
NS_ENSURE_SUCCESS(rv, rv);
rv = prefixsetFile->AppendNative(mTableName + NS_LITERAL_CSTRING(PREFIXSET_SUFFIX));
NS_ENSURE_SUCCESS(rv, rv);
rv = storeFile->Remove(false);
NS_ENSURE_SUCCESS(rv, rv);
rv = prefixsetFile->Remove(false);
NS_ENSURE_SUCCESS(rv, rv);
@ -151,13 +108,13 @@ LookupCache::Build(AddPrefixArray& aAddPrefixes,
Telemetry::Accumulate(Telemetry::URLCLASSIFIER_LC_COMPLETIONS,
static_cast<uint32_t>(aAddCompletes.Length()));
mCompletions.Clear();
mCompletions.SetCapacity(aAddCompletes.Length());
mUpdateCompletions.Clear();
mUpdateCompletions.SetCapacity(aAddCompletes.Length());
for (uint32_t i = 0; i < aAddCompletes.Length(); i++) {
mCompletions.AppendElement(aAddCompletes[i].CompleteHash());
mUpdateCompletions.AppendElement(aAddCompletes[i].CompleteHash());
}
aAddCompletes.Clear();
mCompletions.Sort();
mUpdateCompletions.Sort();
Telemetry::Accumulate(Telemetry::URLCLASSIFIER_LC_PREFIXES,
static_cast<uint32_t>(aAddPrefixes.Length()));
@ -169,17 +126,43 @@ LookupCache::Build(AddPrefixArray& aAddPrefixes,
return NS_OK;
}
nsresult
LookupCache::AddCompletionsToCache(AddCompleteArray& aAddCompletes)
{
for (uint32_t i = 0; i < aAddCompletes.Length(); i++) {
if (mGetHashCache.BinaryIndexOf(aAddCompletes[i].CompleteHash()) == mGetHashCache.NoIndex) {
mGetHashCache.AppendElement(aAddCompletes[i].CompleteHash());
}
}
mGetHashCache.Sort();
return NS_OK;
}
#if defined(DEBUG)
void
LookupCache::DumpCache()
{
if (!LOG_ENABLED())
return;
for (uint32_t i = 0; i < mGetHashCache.Length(); i++) {
nsAutoCString str;
mGetHashCache[i].ToHexString(str);
LOG(("Caches: %s", str.get()));
}
}
void
LookupCache::Dump()
{
if (!LOG_ENABLED())
return;
for (uint32_t i = 0; i < mCompletions.Length(); i++) {
for (uint32_t i = 0; i < mUpdateCompletions.Length(); i++) {
nsAutoCString str;
mCompletions[i].ToHexString(str);
LOG(("Completion: %s", str.get()));
mUpdateCompletions[i].ToHexString(str);
LOG(("Update: %s", str.get()));
}
}
#endif
@ -202,7 +185,9 @@ LookupCache::Has(const Completion& aCompletion,
*aHas = true;
}
if (mCompletions.BinaryIndexOf(aCompletion) != nsTArray<Completion>::NoIndex) {
// TODO: We may need to distinguish completions found in cache or update in the future
if ((mGetHashCache.BinaryIndexOf(aCompletion) != nsTArray<Completion>::NoIndex) ||
(mUpdateCompletions.BinaryIndexOf(aCompletion) != nsTArray<Completion>::NoIndex)) {
LOG(("Complete in %s", mTableName.get()));
*aComplete = true;
*aHas = true;
@ -214,36 +199,8 @@ LookupCache::Has(const Completion& aCompletion,
nsresult
LookupCache::WriteFile()
{
nsCOMPtr<nsIFile> storeFile;
nsresult rv = mStoreDirectory->Clone(getter_AddRefs(storeFile));
NS_ENSURE_SUCCESS(rv, rv);
rv = storeFile->AppendNative(mTableName + NS_LITERAL_CSTRING(CACHE_SUFFIX));
NS_ENSURE_SUCCESS(rv, rv);
nsCOMPtr<nsIOutputStream> out;
rv = NS_NewSafeLocalFileOutputStream(getter_AddRefs(out), storeFile,
PR_WRONLY | PR_TRUNCATE | PR_CREATE_FILE);
NS_ENSURE_SUCCESS(rv, rv);
UpdateHeader();
LOG(("Writing %d completions", mHeader.numCompletions));
uint32_t written;
rv = out->Write(reinterpret_cast<char*>(&mHeader), sizeof(mHeader), &written);
NS_ENSURE_SUCCESS(rv, rv);
rv = WriteTArray(out, mCompletions);
NS_ENSURE_SUCCESS(rv, rv);
nsCOMPtr<nsISafeOutputStream> safeOut = do_QueryInterface(out);
rv = safeOut->Finish();
NS_ENSURE_SUCCESS(rv, rv);
rv = EnsureSizeConsistent();
NS_ENSURE_SUCCESS(rv, rv);
nsCOMPtr<nsIFile> psFile;
rv = mStoreDirectory->Clone(getter_AddRefs(psFile));
nsresult rv = mStoreDirectory->Clone(getter_AddRefs(psFile));
NS_ENSURE_SUCCESS(rv, rv);
rv = psFile->AppendNative(mTableName + NS_LITERAL_CSTRING(PREFIXSET_SUFFIX));
@ -258,102 +215,39 @@ LookupCache::WriteFile()
void
LookupCache::ClearAll()
{
ClearCompleteCache();
ClearCache();
ClearUpdatedCompletions();
mPrefixSet->SetPrefixes(nullptr, 0);
mPrimed = false;
}
void
LookupCache::ClearCompleteCache()
LookupCache::ClearUpdatedCompletions()
{
mCompletions.Clear();
UpdateHeader();
mUpdateCompletions.Clear();
}
void
LookupCache::UpdateHeader()
LookupCache::ClearCache()
{
mHeader.magic = LOOKUPCACHE_MAGIC;
mHeader.version = CURRENT_VERSION;
mHeader.numCompletions = mCompletions.Length();
mGetHashCache.Clear();
}
nsresult
LookupCache::EnsureSizeConsistent()
LookupCache::ReadCompletions()
{
nsCOMPtr<nsIFile> storeFile;
nsresult rv = mStoreDirectory->Clone(getter_AddRefs(storeFile));
NS_ENSURE_SUCCESS(rv, rv);
rv = storeFile->AppendNative(mTableName + NS_LITERAL_CSTRING(CACHE_SUFFIX));
HashStore store(mTableName, mStoreDirectory);
nsresult rv = store.Open();
NS_ENSURE_SUCCESS(rv, rv);
int64_t fileSize;
rv = storeFile->GetFileSize(&fileSize);
NS_ENSURE_SUCCESS(rv, rv);
mUpdateCompletions.Clear();
if (fileSize < 0) {
return NS_ERROR_FAILURE;
const AddCompleteArray& addComplete = store.AddCompletes();
for (uint32_t i = 0; i < addComplete.Length(); i++) {
mUpdateCompletions.AppendElement(addComplete[i].complete);
}
int64_t expectedSize = sizeof(mHeader)
+ mHeader.numCompletions*sizeof(Completion);
if (expectedSize != fileSize) {
NS_WARNING("File length does not match. Probably corrupted.");
Reset();
return NS_ERROR_FILE_CORRUPTED;
}
return NS_OK;
}
nsresult
LookupCache::ReadHeader(nsIInputStream* aInputStream)
{
if (!aInputStream) {
ClearCompleteCache();
return NS_OK;
}
nsCOMPtr<nsISeekableStream> seekable = do_QueryInterface(aInputStream);
nsresult rv = seekable->Seek(nsISeekableStream::NS_SEEK_SET, 0);
NS_ENSURE_SUCCESS(rv, rv);
void *buffer = &mHeader;
rv = NS_ReadInputStreamToBuffer(aInputStream,
&buffer,
sizeof(Header));
NS_ENSURE_SUCCESS(rv, rv);
if (mHeader.magic != LOOKUPCACHE_MAGIC || mHeader.version != CURRENT_VERSION) {
NS_WARNING("Unexpected header data in the store.");
Reset();
return NS_ERROR_FILE_CORRUPTED;
}
LOG(("%d completions present", mHeader.numCompletions));
rv = EnsureSizeConsistent();
NS_ENSURE_SUCCESS(rv, rv);
return NS_OK;
}
nsresult
LookupCache::ReadCompletions(nsIInputStream* aInputStream)
{
if (!mHeader.numCompletions) {
mCompletions.Clear();
return NS_OK;
}
nsCOMPtr<nsISeekableStream> seekable = do_QueryInterface(aInputStream);
nsresult rv = seekable->Seek(nsISeekableStream::NS_SEEK_SET, sizeof(Header));
NS_ENSURE_SUCCESS(rv, rv);
rv = ReadTArray(aInputStream, &mCompletions, mHeader.numCompletions);
NS_ENSURE_SUCCESS(rv, rv);
LOG(("Read %d completions", mCompletions.Length()));
return NS_OK;
}

View File

@ -108,10 +108,13 @@ public:
// This will Clear() the passed arrays when done.
nsresult Build(AddPrefixArray& aAddPrefixes,
AddCompleteArray& aAddCompletes);
nsresult AddCompletionsToCache(AddCompleteArray& aAddCompletes);
nsresult GetPrefixes(FallibleTArray<uint32_t>& aAddPrefixes);
void ClearCompleteCache();
void ClearUpdatedCompletions();
void ClearCache();
#if DEBUG
void DumpCache();
void Dump();
#endif
nsresult WriteFile();
@ -122,28 +125,22 @@ public:
private:
void ClearAll();
nsresult Reset();
void UpdateHeader();
nsresult ReadHeader(nsIInputStream* aInputStream);
nsresult ReadCompletions(nsIInputStream* aInputStream);
nsresult EnsureSizeConsistent();
nsresult ReadCompletions();
nsresult LoadPrefixSet();
nsresult LoadCompletions();
// Construct a Prefix Set with known prefixes.
// This will Clear() aAddPrefixes when done.
nsresult ConstructPrefixSet(AddPrefixArray& aAddPrefixes);
struct Header {
uint32_t magic;
uint32_t version;
uint32_t numCompletions;
};
Header mHeader;
bool mPrimed;
nsCString mTableName;
nsCOMPtr<nsIFile> mStoreDirectory;
CompletionArray mCompletions;
// Set of prefixes known to be in the database
RefPtr<nsUrlClassifierPrefixSet> mPrefixSet;
// Full length hashes obtained in update request
CompletionArray mUpdateCompletions;
// Full length hashes obtained in gethash request
CompletionArray mGetHashCache;
};
} // namespace safebrowsing

View File

@ -191,6 +191,13 @@ interface nsIUrlClassifierDBService : nsISupports
* database, emptying all tables. Mostly intended for use in unit tests.
*/
void resetDatabase();
/**
* Reload he url-classifier database. This will empty all cache for
* completions from gethash, and reload it from database. Mostly intended
* for use in tests.
*/
void reloadDatabase();
};
/**

View File

@ -627,6 +627,38 @@ nsUrlClassifierDBServiceWorker::ResetDatabase()
return NS_OK;
}
NS_IMETHODIMP
nsUrlClassifierDBServiceWorker::ReloadDatabase()
{
nsTArray<nsCString> tables;
nsTArray<int64_t> lastUpdateTimes;
nsresult rv = mClassifier->ActiveTables(tables);
NS_ENSURE_SUCCESS(rv, rv);
// We need to make sure lastupdatetime is set after reload database
// Otherwise request will be skipped if it is not confirmed.
for (uint32_t table = 0; table < tables.Length(); table++) {
lastUpdateTimes.AppendElement(mClassifier->GetLastUpdateTime(tables[table]));
}
// This will null out mClassifier
rv = CloseDb();
NS_ENSURE_SUCCESS(rv, rv);
// Create new mClassifier and load prefixset and completions from disk.
rv = OpenDb();
NS_ENSURE_SUCCESS(rv, rv);
for (uint32_t table = 0; table < tables.Length(); table++) {
int64_t time = lastUpdateTimes[table];
if (time) {
mClassifier->SetLastUpdateTime(tables[table], lastUpdateTimes[table]);
}
}
return NS_OK;
}
NS_IMETHODIMP
nsUrlClassifierDBServiceWorker::CancelUpdate()
{
@ -720,7 +752,6 @@ nsUrlClassifierDBServiceWorker::CacheCompletions(CacheResultArray *results)
if (NS_FAILED(rv)) {
return rv;
}
tu->SetLocalUpdate();
updates.AppendElement(tu);
pParse->ForgetTableUpdates();
} else {
@ -728,7 +759,7 @@ nsUrlClassifierDBServiceWorker::CacheCompletions(CacheResultArray *results)
}
}
mClassifier->ApplyUpdates(&updates);
mClassifier->ApplyFullHashes(&updates);
mLastResults = *resultsPtr;
return NS_OK;
}
@ -1590,6 +1621,14 @@ nsUrlClassifierDBService::ResetDatabase()
return mWorkerProxy->ResetDatabase();
}
NS_IMETHODIMP
nsUrlClassifierDBService::ReloadDatabase()
{
NS_ENSURE_TRUE(gDbBackgroundThread, NS_ERROR_NOT_INITIALIZED);
return mWorkerProxy->ReloadDatabase();
}
nsresult
nsUrlClassifierDBService::CacheCompletions(CacheResultArray *results)
{

View File

@ -170,6 +170,15 @@ UrlClassifierDBServiceWorkerProxy::ResetDatabase()
return DispatchToWorkerThread(r);
}
NS_IMETHODIMP
UrlClassifierDBServiceWorkerProxy::ReloadDatabase()
{
nsCOMPtr<nsIRunnable> r =
NewRunnableMethod(mTarget,
&nsUrlClassifierDBServiceWorker::ReloadDatabase);
return DispatchToWorkerThread(r);
}
nsresult
UrlClassifierDBServiceWorkerProxy::OpenDb()
{

View File

@ -3,6 +3,9 @@
const { classes: Cc, interfaces: Ci, results: Cr } = Components;
var dbService = Cc["@mozilla.org/url-classifier/dbservice;1"]
.getService(Ci.nsIUrlClassifierDBService);
function setTimeout(callback, delay) {
let timer = Cc["@mozilla.org/timer;1"].createInstance(Ci.nsITimer);
timer.initWithCallback({ notify: callback },
@ -11,8 +14,6 @@ function setTimeout(callback, delay) {
}
function doUpdate(update) {
const { classes: Cc, interfaces: Ci, results: Cr } = Components;
let listener = {
QueryInterface: function(iid)
{
@ -48,6 +49,63 @@ function doUpdate(update) {
}
}
function doReload() {
dbService.reloadDatabase();
sendAsyncMessage("reloadSuccess");
}
// SafeBrowsing.jsm is initialized after mozEntries are added. Add observer
// to receive "finished" event. For the case when this function is called
// after the event had already been notified, we lookup entries to see if
// they are already added to database.
function waitForInit() {
let observerService = Cc["@mozilla.org/observer-service;1"]
.getService(Ci.nsIObserverService);
observerService.addObserver(function() {
sendAsyncMessage("safeBrowsingInited");
}, "mozentries-update-finished", false);
// This url must sync with the table, url in SafeBrowsing.jsm addMozEntries
const table = "test-phish-simple";
const url = "http://itisatrap.org/firefox/its-a-trap.html";
let secMan = Cc["@mozilla.org/scriptsecuritymanager;1"]
.getService(Ci.nsIScriptSecurityManager);
let iosvc = Cc["@mozilla.org/network/io-service;1"]
.getService(Ci.nsIIOService);
let principal = secMan.createCodebasePrincipal(
iosvc.newURI(url, null, null), {});
let listener = {
QueryInterface: function(iid)
{
if (iid.equals(Ci.nsISupports) ||
iid.equals(Ci.nsIUrlClassifierUpdateObserver))
return this;
throw Cr.NS_ERROR_NO_INTERFACE;
},
handleEvent: function(value)
{
if (value === table) {
sendAsyncMessage("safeBrowsingInited");
}
},
};
dbService.lookup(principal, table, listener);
}
addMessageListener("doUpdate", ({ testUpdate }) => {
doUpdate(testUpdate);
});
addMessageListener("doReload", () => {
doReload();
});
addMessageListener("waitForInit", () => {
waitForInit();
});

View File

@ -23,6 +23,17 @@ classifierHelper._updates = [];
// removed after test complete.
classifierHelper._updatesToCleanup = [];
classifierHelper._initsCB = [];
// This function return a Promise, promise is resolved when SafeBrowsing.jsm
// is initialized.
classifierHelper.waitForInit = function() {
return new Promise(function(resolve, reject) {
classifierHelper._initsCB.push(resolve);
gScript.sendAsyncMessage("waitForInit");
});
}
// This function is used to allow completion for specific "list",
// some lists like "test-malware-simple" is default disabled to ask for complete.
// "list" is the db we would like to allow it
@ -115,6 +126,17 @@ classifierHelper.resetDB = function() {
});
};
classifierHelper.reloadDatabase = function() {
return new Promise(function(resolve, reject) {
gScript.addMessageListener("reloadSuccess", function handler() {
gScript.removeMessageListener('reloadSuccess', handler);
resolve();
});
gScript.sendAsyncMessage("doReload");
});
}
classifierHelper._update = function(testUpdate, onsuccess, onerror) {
// Queue the task if there is still an on-going update
classifierHelper._updates.push({"data": testUpdate,
@ -147,9 +169,17 @@ classifierHelper._updateError = function(errorCode) {
}
};
classifierHelper._inited = function() {
classifierHelper._initsCB.forEach(function (cb) {
cb();
});
classifierHelper._initsCB = [];
};
classifierHelper._setup = function() {
gScript.addMessageListener("updateSuccess", classifierHelper._updateSuccess);
gScript.addMessageListener("updateError", classifierHelper._updateError);
gScript.addMessageListener("safeBrowsingInited", classifierHelper._inited);
// cleanup will be called at end of each testcase to remove all the urls added to database.
SimpleTest.registerCleanupFunction(classifierHelper._cleanup);

View File

@ -11,6 +11,8 @@ function handleRequest(request, response)
query[val.slice(0, idx)] = unescape(val.slice(idx + 1));
});
var responseBody;
// Store fullhash in the server side.
if ("list" in query && "fullhash" in query) {
// In the server side we will store:
@ -31,18 +33,27 @@ function handleRequest(request, response)
}
return;
// gethash count return how many gethash request received.
// This is used by client to know if a gethash request is triggered by gecko
} else if ("gethashcount" == request.queryString) {
var counter = getState("counter");
responseBody = counter == "" ? "0" : counter;
} else {
var body = new BinaryInputStream(request.bodyInputStream);
var avail;
var bytes = [];
while ((avail = body.available()) > 0) {
Array.prototype.push.apply(bytes, body.readByteArray(avail));
}
var counter = getState("counter");
counter = counter == "" ? "1" : (parseInt(counter) + 1).toString();
setState("counter", counter);
responseBody = parseV2Request(bytes);
}
var body = new BinaryInputStream(request.bodyInputStream);
var avail;
var bytes = [];
while ((avail = body.available()) > 0) {
Array.prototype.push.apply(bytes, body.readByteArray(avail));
}
var responseBody = parseV2Request(bytes);
response.setHeader("Content-Type", "text/plain", false);
response.write(responseBody);

View File

@ -33,3 +33,4 @@ skip-if = (os == 'linux' && debug) #Bug 1199778
[test_classify_ping.html]
[test_classify_track.html]
[test_gethash.html]
[test_bug1254766.html]

View File

@ -0,0 +1,299 @@
<!DOCTYPE HTML>
<html>
<head>
<title>Bug 1272239 - Test gethash.</title>
<script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
<script type="text/javascript" src="classifierHelper.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
</head>
<body>
<p id="display"></p>
<div id="content" style="display: none">
</div>
<pre id="test">
<script class="testbody" type="text/javascript">
const MALWARE_LIST = "test-malware-simple";
const MALWARE_HOST1 = "malware.example.com/";
const MALWARE_HOST2 = "test1.example.com/";
const UNWANTED_LIST = "test-unwanted-simple";
const UNWANTED_HOST1 = "unwanted.example.com/";
const UNWANTED_HOST2 = "test2.example.com/";
const UNUSED_MALWARE_HOST = "unused.malware.com/";
const UNUSED_UNWANTED_HOST = "unused.unwanted.com/";
const GETHASH_URL =
"http://mochi.test:8888/tests/toolkit/components/url-classifier/tests/mochitest/gethash.sjs";
var gPreGethashCounter = 0;
var gCurGethashCounter = 0;
var expectLoad = false;
function loadTestFrame() {
return new Promise(function(resolve, reject) {
var iframe = document.createElement("iframe");
iframe.setAttribute("src", "gethashFrame.html");
document.body.appendChild(iframe);
iframe.onload = function() {
document.body.removeChild(iframe);
resolve();
};
}).then(getGethashCounter);
}
function getGethashCounter() {
return new Promise(function(resolve, reject) {
var xhr = new XMLHttpRequest;
xhr.open("PUT", GETHASH_URL + "?gethashcount");
xhr.setRequestHeader("Content-Type", "text/plain");
xhr.onreadystatechange = function() {
if (this.readyState == this.DONE) {
gPreGethashCounter = gCurGethashCounter;
gCurGethashCounter = parseInt(xhr.response);
resolve();
}
};
xhr.send();
});
}
// calculate the fullhash and send it to gethash server
function addCompletionToServer(list, url) {
return new Promise(function(resolve, reject) {
var listParam = "list=" + list;
var fullhashParam = "fullhash=" + hash(url);
var xhr = new XMLHttpRequest;
xhr.open("PUT", GETHASH_URL + "?" + listParam + "&" + fullhashParam, true);
xhr.setRequestHeader("Content-Type", "text/plain");
xhr.onreadystatechange = function() {
if (this.readyState == this.DONE) {
resolve();
}
};
xhr.send();
});
}
function hash(str) {
function bytesFromString(str) {
var converter =
SpecialPowers.Cc["@mozilla.org/intl/scriptableunicodeconverter"]
.createInstance(SpecialPowers.Ci.nsIScriptableUnicodeConverter);
converter.charset = "UTF-8";
return converter.convertToByteArray(str);
}
var hasher = SpecialPowers.Cc["@mozilla.org/security/hash;1"]
.createInstance(SpecialPowers.Ci.nsICryptoHash);
var data = bytesFromString(str);
hasher.init(hasher.SHA256);
hasher.update(data, data.length);
return hasher.finish(true);
}
// setup function allows classifier send gethash request for test database
// also it calculate to fullhash for url and store those hashes in gethash sjs.
function setup() {
classifierHelper.allowCompletion([MALWARE_LIST, UNWANTED_LIST], GETHASH_URL);
return Promise.all([
addCompletionToServer(MALWARE_LIST, MALWARE_HOST1),
addCompletionToServer(MALWARE_LIST, MALWARE_HOST2),
addCompletionToServer(UNWANTED_LIST, UNWANTED_HOST1),
addCompletionToServer(UNWANTED_LIST, UNWANTED_HOST2),
]);
}
// Reset function in helper try to simulate the behavior we restart firefox
function reset() {
return classifierHelper.resetDB()
.catch(err => {
ok(false, "Couldn't update classifier. Error code: " + errorCode);
// Abort test.
SimpleTest.finish();
});
}
function updateUnusedUrl() {
var testData = [
{ url: UNUSED_MALWARE_HOST, db: MALWARE_LIST },
{ url: UNUSED_UNWANTED_HOST, db: UNWANTED_LIST }
];
return classifierHelper.addUrlToDB(testData)
.catch(err => {
ok(false, "Couldn't update classifier. Error code: " + err);
// Abort test.
SimpleTest.finish();
});
}
function addPrefixToDB() {
return update(true);
}
function addCompletionToDB() {
return update(false);
}
function update(prefix = false) {
var length = prefix ? 4 : 32;
var testData = [
{ url: MALWARE_HOST1, db: MALWARE_LIST, len: length },
{ url: MALWARE_HOST2, db: MALWARE_LIST, len: length },
{ url: UNWANTED_HOST1, db: UNWANTED_LIST, len: length },
{ url: UNWANTED_HOST2, db: UNWANTED_LIST, len: length }
];
return classifierHelper.addUrlToDB(testData)
.catch(err => {
ok(false, "Couldn't update classifier. Error code: " + errorCode);
// Abort test.
SimpleTest.finish();
});
}
// This testcase is to make sure gethash works:
// 1. Add prefixes to DB.
// 2. Load test frame contains malware & unwanted url, those urls should be blocked.
// 3. The second step should also trigger a gethash request since completions is not in
// either cache or DB.
// 4. Load test frame again, since completions is stored in cache now, no gethash
// request should be triggered.
function testGethash() {
return Promise.resolve()
.then(addPrefixToDB)
.then(loadTestFrame)
.then(() => {
ok(gCurGethashCounter > gPreGethashCounter, "Gethash request is triggered."); })
.then(loadTestFrame)
.then(() => {
ok(gCurGethashCounter == gPreGethashCounter, "Gethash request is not triggered."); })
.then(reset);
}
// This testcase is to make sure an update request will clear completion cache:
// 1. Add prefixes to DB.
// 2. Load test frame, this should trigger a gethash request
// 3. Trigger an update, completion cache should be cleared now.
// 4. Load test frame again, since cache is cleared now, gethash request should be triggered.
function testUpdateClearCache() {
return Promise.resolve()
.then(addPrefixToDB)
.then(loadTestFrame)
.then(() => {
ok(gCurGethashCounter > gPreGethashCounter, "Gethash request is triggered."); })
.then(updateUnusedUrl)
.then(loadTestFrame)
.then(() => {
ok(gCurGethashCounter > gPreGethashCounter, "Gethash request is triggered."); })
.then(reset);
}
// This testcae is to make sure completions in update works:
// 1. Add completions to DB.
// 2. Load test frame, since completions is stored in DB, gethash request should
// not be triggered.
function testUpdate() {
return Promise.resolve()
.then(addCompletionToDB)
.then(loadTestFrame)
.then(() => {
ok(gCurGethashCounter == gPreGethashCounter, "Gethash request is not triggered."); })
.then(reset);
}
// This testcase is to make sure an update request will not clear completions in DB:
// 1. Add completions to DB.
// 2. Load test frame to make sure completions is stored in database, in this case, gethash
// should not be triggered.
// 3. Trigger an update, cache is cleared, but completions in DB should still remain.
// 4. Load test frame again, since completions is in DB, gethash request should not be triggered.
function testUpdateNotClearCompletions() {
return Promise.resolve()
.then(addCompletionToDB)
.then(loadTestFrame)
.then(() => {
ok(gCurGethashCounter == gPreGethashCounter, "Gethash request is not triggered."); })
.then(updateUnusedUrl)
.then(loadTestFrame)
.then(() => {
ok(gCurGethashCounter == gPreGethashCounter, "Gethash request is not triggered."); })
.then(reset);
}
// This testcase is to make sure completion store in DB will properly load after restarting.
// 1. Add completions to DB.
// 2. Simulate firefox restart by calling reloadDatabase.
// 3. Load test frame, since completions should be loaded from DB, no gethash request should
// be triggered.
function testUpdateCompletionsAfterReload() {
return Promise.resolve()
.then(addCompletionToDB)
.then(classifierHelper.reloadDatabase)
.then(loadTestFrame)
.then(() => {
ok(gCurGethashCounter == gPreGethashCounter, "Gethash request is not triggered."); })
.then(reset);
}
// This testcase is to make sure cache will be cleared after restarting
// 1. Add prefixes to DB.
// 2. Load test frame, this should trigger a gethash request and completions will be stored in
// cache.
// 3. Load test frame again, no gethash should be triggered because of cache.
// 4. Simulate firefox restart by calling reloadDatabase.
// 5. Load test frame again, since cache is cleared, gethash request should be triggered.
function testGethashCompletionsAfterReload() {
return Promise.resolve()
.then(addPrefixToDB)
.then(loadTestFrame)
.then(() => {
ok(gCurGethashCounter > gPreGethashCounter, "Gethash request is triggered."); })
.then(loadTestFrame)
.then(() => {
ok(gCurGethashCounter == gPreGethashCounter, "Gethash request is not triggered."); })
.then(classifierHelper.reloadDatabase)
.then(loadTestFrame)
.then(() => {
ok(gCurGethashCounter > gPreGethashCounter, "Gethash request is triggered."); })
.then(reset);
}
function runTest() {
Promise.resolve()
.then(classifierHelper.waitForInit)
.then(setup)
.then(testGethash)
.then(testUpdateClearCache)
.then(testUpdate)
.then(testUpdateNotClearCompletions)
.then(testUpdateCompletionsAfterReload)
.then(testGethashCompletionsAfterReload)
.then(function() {
SimpleTest.finish();
}).catch(function(e) {
ok(false, "Some test failed with error " + e);
SimpleTest.finish();
});
}
SimpleTest.waitForExplicitFinish();
SpecialPowers.pushPrefEnv({"set": [
["browser.safebrowsing.malware.enabled", true]
]}, runTest);
</script>
</pre>
</body>
</html>

View File

@ -58,12 +58,6 @@ function cleanUp() {
delFile("safebrowsing/test-block-simple.sbstore");
delFile("safebrowsing/test-track-simple.sbstore");
delFile("safebrowsing/test-trackwhite-simple.sbstore");
delFile("safebrowsing/test-phish-simple.cache");
delFile("safebrowsing/test-malware-simple.cache");
delFile("safebrowsing/test-unwanted-simple.cache");
delFile("safebrowsing/test-block-simple.cache");
delFile("safebrowsing/test-track-simple.cache");
delFile("safebrowsing/test-trackwhite-simple.cache");
delFile("safebrowsing/test-phish-simple.pset");
delFile("safebrowsing/test-malware-simple.pset");
delFile("safebrowsing/test-unwanted-simple.pset");

View File

@ -82,7 +82,6 @@ Finder.prototype = {
this._searchString = options.searchString;
this.clipboardSearchString = options.searchString
}
this._outlineLink(options.drawOutline);
let foundLink = this._fastFind.foundLink;
let linkURL = null;
@ -100,6 +99,8 @@ Finder.prototype = {
options.searchString = this._searchString;
if (!this.iterator.continueRunning({
caseSensitive: this._fastFind.caseSensitive,
entireWord: this._fastFind.entireWord,
linksOnly: options.linksOnly,
word: options.searchString
})) {
@ -107,6 +108,8 @@ Finder.prototype = {
}
this.highlighter.update(options);
this._outlineLink(options.drawOutline);
for (let l of this._listeners) {
try {
l.onFindResult(options);
@ -403,6 +406,8 @@ Finder.prototype = {
let foundRange = this._fastFind.getFoundRange();
this.iterator.start({
caseSensitive: this._fastFind.caseSensitive,
entireWord: this._fastFind.entireWord,
finder: this,
limit: aMatchLimit,
linksOnly: aLinksOnly,

View File

@ -219,6 +219,8 @@ FinderHighlighter.prototype = {
if (highlight) {
yield this.iterator.start({
caseSensitive: this.finder._fastFind.caseSensitive,
entireWord: this.finder._fastFind.entireWord,
linksOnly, word,
finder: this.finder,
onRange: range => {
@ -227,6 +229,8 @@ FinderHighlighter.prototype = {
},
useCache: true
});
if (found)
this.finder._outlineLink(true);
} else {
this.hide(window);
this.clear();
@ -360,10 +364,15 @@ FinderHighlighter.prototype = {
let foundRange = this.finder._fastFind.getFoundRange();
if (!this._modal) {
if (this._highlightAll) {
this.hide(window, foundRange);
let params = this.iterator.params;
if (this._lastIteratorParams &&
this.iterator._areParamsEqual(params, this._lastIteratorParams)) {
return;
}
this.hide(window, foundRange);
if (params.word)
this.highlight(true, params.word, params.linksOnly);
this._lastIteratorParams = params;
}
return;
}
@ -489,18 +498,16 @@ FinderHighlighter.prototype = {
* controller. Optionally skips a specific range.
*
* @param {nsISelectionController} controller
* @param {nsIDOMRange} skipRange
* @param {nsIDOMRange} restoreRange
*/
_clearSelection(controller, skipRange = null) {
_clearSelection(controller, restoreRange = null) {
let sel = controller.getSelection(Ci.nsISelectionController.SELECTION_FIND);
if (!skipRange) {
sel.removeAllRanges();
} else {
for (let i = sel.rangeCount - 1; i >= 0; --i) {
let range = sel.getRangeAt(i);
if (range !== skipRange)
sel.removeRange(range);
}
sel.removeAllRanges();
if (restoreRange) {
sel = controller.getSelection(Ci.nsISelectionController.SELECTION_NORMAL);
sel.addRange(restoreRange);
controller.setDisplaySelection(Ci.nsISelectionController.SELECTION_ATTENTION);
controller.repaintSelection(Ci.nsISelectionController.SELECTION_NORMAL);
}
},

View File

@ -48,21 +48,24 @@ this.FinderIterator = {
* The returned promise is resolved when 1) the limit is reached, 2) when all
* the ranges have been found or 3) when `stop()` is called whilst iterating.
*
* @param {Finder} options.finder Currently active Finder instance
* @param {Number} [options.limit] Limit the amount of results to be
* passed back. Optional, defaults to no
* limit.
* @param {Boolean} [options.linksOnly] Only yield ranges that are inside a
* hyperlink (used by QuickFind).
* Optional, defaults to `false`.
* @param {Function} options.onRange Callback invoked when a range is found
* @param {Boolean} [options.useCache] Whether to allow results already
* present in the cache or demand fresh.
* Optional, defaults to `false`.
* @param {String} options.word Word to search for
* @param {Boolean} options.caseSensitive Whether to search in case sensitive
* mode
* @param {Boolean} options.entireWord Whether to search in entire-word mode
* @param {Finder} options.finder Currently active Finder instance
* @param {Number} [options.limit] Limit the amount of results to be
* passed back. Optional, defaults to no
* limit.
* @param {Boolean} [options.linksOnly] Only yield ranges that are inside a
* hyperlink (used by QuickFind).
* Optional, defaults to `false`.
* @param {Function} options.onRange Callback invoked when a range is found
* @param {Boolean} [options.useCache] Whether to allow results already
* present in the cache or demand fresh.
* Optional, defaults to `false`.
* @param {String} options.word Word to search for
* @return {Promise}
*/
start({ finder, limit, linksOnly, onRange, useCache, word }) {
start({ caseSensitive, entireWord, finder, limit, linksOnly, onRange, useCache, word }) {
// Take care of default values for non-required options.
if (typeof limit != "number")
limit = -1;
@ -72,6 +75,10 @@ this.FinderIterator = {
useCache = false;
// Validate the options.
if (typeof caseSensitive != "boolean")
throw new Error("Missing required option 'caseSensitive'");
if (typeof entireWord != "boolean")
throw new Error("Missing required option 'entireWord'");
if (!finder)
throw new Error("Missing required option 'finder'");
if (!word)
@ -86,7 +93,7 @@ this.FinderIterator = {
let window = finder._getWindow();
let resolver;
let promise = new Promise(resolve => resolver = resolve);
let iterParams = { linksOnly, useCache, word };
let iterParams = { caseSensitive, entireWord, linksOnly, useCache, word };
this._listeners.set(onRange, { limit, onEnd: resolver });
@ -168,13 +175,18 @@ this.FinderIterator = {
* passed through the arguments. When `true`, we can keep it running as-is and
* the consumer should stop the iterator when `false`.
*
* @param {Boolean} options.linksOnly Whether to search for the word to be
* present in links only
* @param {String} options.word The word being searched for
* @param {Boolean} options.caseSensitive Whether to search in case sensitive
* mode
* @param {Boolean} options.entireWord Whether to search in entire-word mode
* @param {Boolean} options.linksOnly Whether to search for the word to be
* present in links only
* @param {String} options.word The word being searched for
* @return {Boolean}
*/
continueRunning({ linksOnly, word }) {
continueRunning({ caseSensitive, entireWord, linksOnly, word }) {
return (this.running &&
this._currentParams.caseSensitive === caseSensitive &&
this._currentParams.entireWord === entireWord &&
this._currentParams.linksOnly === linksOnly &&
this._currentParams.word == word);
},
@ -183,16 +195,19 @@ this.FinderIterator = {
* Internal; check if an iteration request is available in the previous result
* that we cached.
*
* @param {Boolean} options.linksOnly Whether to search for the word to be
* present in links only
* @param {Boolean} options.useCache Whether the consumer wants to use the
* cached previous result at all
* @param {String} options.word The word being searched for
* @param {Boolean} options.caseSensitive Whether to search in case sensitive
* mode
* @param {Boolean} options.entireWord Whether to search in entire-word mode
* @param {Boolean} options.linksOnly Whether to search for the word to be
* present in links only
* @param {Boolean} options.useCache Whether the consumer wants to use the
* cached previous result at all
* @param {String} options.word The word being searched for
* @return {Boolean}
*/
_previousResultAvailable({ linksOnly, useCache, word }) {
_previousResultAvailable({ caseSensitive, entireWord, linksOnly, useCache, word }) {
return !!(useCache &&
this._areParamsEqual(this._previousParams, { word, linksOnly }) &&
this._areParamsEqual(this._previousParams, { caseSensitive, entireWord, linksOnly, word }) &&
this._previousRanges.length);
},
@ -205,6 +220,8 @@ this.FinderIterator = {
*/
_areParamsEqual(paramSet1, paramSet2) {
return (!!paramSet1 && !!paramSet2 &&
paramSet1.caseSensitive === paramSet2.caseSensitive &&
paramSet1.entireWord === paramSet2.entireWord &&
paramSet1.linksOnly === paramSet2.linksOnly &&
paramSet1.word == paramSet2.word);
},
@ -318,7 +335,7 @@ this.FinderIterator = {
let { linksOnly, word } = this._currentParams;
let iterCount = 0;
for (let frame of frames) {
for (let range of this._iterateDocument(word, frame, finder)) {
for (let range of this._iterateDocument(this._currentParams, frame)) {
// Between iterations, for example after a sleep of one cycle, we could
// have gotten the signal to stop iterating. Make sure we do here.
if (!this.running || spawnId !== this._spawnId)
@ -367,12 +384,15 @@ this.FinderIterator = {
* Internal; basic wrapper around nsIFind that provides a generator yielding
* a range each time an occurence of `word` string is found.
*
* @param {String} word The word to search for
* @param {nsIDOMWindow} window The window to search in
* @param {Finder} finder The Finder instance
* @param {Boolean} options.caseSensitive Whether to search in case
* sensitive mode
* @param {Boolean} options.entireWord Whether to search in entire-word
* mode
* @param {String} options.word The word to search for
* @param {nsIDOMWindow} window The window to search in
* @yield {nsIDOMRange}
*/
_iterateDocument: function* (word, window, finder) {
_iterateDocument: function* ({ caseSensitive, entireWord, word }, window) {
let doc = window.document;
let body = (doc instanceof Ci.nsIDOMHTMLDocument && doc.body) ?
doc.body : doc.documentElement;
@ -394,8 +414,8 @@ this.FinderIterator = {
let nsIFind = Cc["@mozilla.org/embedcomp/rangefind;1"]
.createInstance()
.QueryInterface(Ci.nsIFind);
nsIFind.caseSensitive = finder._fastFind.caseSensitive;
nsIFind.entireWord = finder._fastFind.entireWord;
nsIFind.caseSensitive = caseSensitive;
nsIFind.entireWord = entireWord;
while ((retRange = nsIFind.Find(word, searchRange, startPt, endPt))) {
yield retRange;

View File

@ -42,6 +42,8 @@ add_task(function* test_start() {
let count = 0;
yield FinderIterator.start({
caseSensitive: false,
entireWord: false,
finder: gMockFinder,
onRange: range => {
++count;
@ -63,6 +65,8 @@ add_task(function* test_valid_arguments() {
let count = 0;
yield FinderIterator.start({
caseSensitive: false,
entireWord: false,
finder: gMockFinder,
onRange: range => ++count,
word: findText
@ -75,18 +79,38 @@ add_task(function* test_valid_arguments() {
count = 0;
Assert.throws(() => FinderIterator.start({
entireWord: false,
onRange: range => ++count,
word: findText
}), /Missing required option 'caseSensitive'/, "Should throw when missing an argument");
FinderIterator.reset();
Assert.throws(() => FinderIterator.start({
caseSensitive: false,
onRange: range => ++count,
word: findText
}), /Missing required option 'entireWord'/, "Should throw when missing an argument");
FinderIterator.reset();
Assert.throws(() => FinderIterator.start({
caseSensitive: false,
entireWord: false,
onRange: range => ++count,
word: findText
}), /Missing required option 'finder'/, "Should throw when missing an argument");
FinderIterator.reset();
Assert.throws(() => FinderIterator.start({
caseSensitive: true,
entireWord: false,
finder: gMockFinder,
word: findText
}), /Missing valid, required option 'onRange'/, "Should throw when missing an argument");
FinderIterator.reset();
Assert.throws(() => FinderIterator.start({
caseSensitive: false,
entireWord: true,
finder: gMockFinder,
onRange: range => ++count
}), /Missing required option 'word'/, "Should throw when missing an argument");
@ -102,6 +126,8 @@ add_task(function* test_stop() {
let count = 0;
let whenDone = FinderIterator.start({
caseSensitive: false,
entireWord: false,
finder: gMockFinder,
onRange: range => ++count,
word: findText
@ -121,6 +147,8 @@ add_task(function* test_reset() {
let count = 0;
let whenDone = FinderIterator.start({
caseSensitive: false,
entireWord: false,
finder: gMockFinder,
onRange: range => ++count,
word: findText
@ -150,6 +178,8 @@ add_task(function* test_parallel_starts() {
// Start off the iterator.
let count = 0;
let whenDone = FinderIterator.start({
caseSensitive: false,
entireWord: false,
finder: gMockFinder,
onRange: range => ++count,
word: findText
@ -161,6 +191,8 @@ add_task(function* test_parallel_starts() {
let count2 = 0;
let whenDone2 = FinderIterator.start({
caseSensitive: false,
entireWord: false,
finder: gMockFinder,
onRange: range => ++count2,
word: findText

View File

@ -131,7 +131,7 @@ id ConnectToUpdateServer()
}
}
} @catch (NSException* e) {
// Ignore exceptions.
NSLog(@"%@: %@", e.name, e.reason);
return nil;
}
return updateServer;

View File

@ -169,7 +169,7 @@ void AbortElevatedUpdate()
currTry++;
}
} @catch (NSException* e) {
// Ignore exceptions.
NSLog(@"%@: %@", e.name, e.reason);
}
NSLog(@"Unable to clean up updater.");
}