Merge autoland to mozilla-central a=merge

This commit is contained in:
arthur.iakab 2018-05-01 00:49:30 +03:00
commit 52b7451194
66 changed files with 1011 additions and 577 deletions

View File

@ -1089,7 +1089,8 @@ pref("security.sandbox.gmp.win32k-disable", false);
pref("security.sandbox.content.level", 3);
#endif
#if defined(NIGHTLY_BUILD) && defined(XP_MACOSX) && defined(MOZ_SANDBOX)
// Enable the Mac Flash sandbox on Nightly and Beta, not Release
#if defined(EARLY_BETA_OR_EARLIER) && defined(XP_MACOSX) && defined(MOZ_SANDBOX)
// Controls whether and how the Mac NPAPI Flash plugin process is sandboxed.
// On Mac these levels are:
// 0 - "no sandbox"
@ -1535,9 +1536,7 @@ pref("browser.tabs.remote.desktopbehavior", true);
// This feature is enabled on macOS only on the Nightly channel
// until bug 1453080 is fixed.
//
#if defined(XP_LINUX) || defined(XP_WIN)
pref("browser.tabs.remote.warmup.enabled", true);
#elif defined(NIGHTLY_BUILD) && defined(XP_MACOSX)
#if !defined(XP_MACOSX) || defined(NIGHTLY_BUILD)
pref("browser.tabs.remote.warmup.enabled", true);
#else
pref("browser.tabs.remote.warmup.enabled", false);

View File

@ -864,10 +864,21 @@
let wrap = n => n.parentNode.localName == "toolbarpaletteitem" ? n.parentNode : n;
let unwrap = n => n && n.localName == "toolbarpaletteitem" ? n.firstElementChild : n;
// Starting from the tabs element, find the next sibling that:
// - isn't hidden; and
// - isn't one of the titlebar placeholder elements; and
// - isn't the all-tabs button.
// If it's the new tab button, consider the new tab button adjacent to the tabs.
// If the new tab button is marked as adjacent and the tabstrip doesn't
// overflow, we'll display the 'new tab' button inline in the tabstrip.
// In all other cases, the separate new tab button is displayed in its
// customized location.
let sib = this;
do {
sib = unwrap(wrap(sib).nextElementSibling);
} while (sib && sib.hidden);
} while (sib && (sib.hidden ||
sib.getAttribute("skipintoolbarset") == "true" ||
sib.id == "alltabs-button"));
const kAttr = "hasadjacentnewtabbutton";
if (sib && sib.id == "new-tab-button") {

View File

@ -16,7 +16,6 @@ support-files =
skip-if = true # Bug 1409054 to remove; previously skipped for intermittents, e.g., Bug 1399648
[browser_aboutHome_search_POST.js]
[browser_aboutHome_search_composing.js]
skip-if = !debug && (os == "mac" || (os == "linux" && bits == 32)) # Bug 1400491, bug 1399648
[browser_aboutHome_search_searchbar.js]
[browser_aboutHome_search_suggestion.js]
skip-if = os == "mac" || (os == "linux" && (!debug || bits == 64)) # Bug 1399648, bug 1402502

View File

@ -21,18 +21,8 @@ add_task(async function() {
input.focus();
});
// FYI: "compositionstart" will be dispatched automatically.
await BrowserTestUtils.synthesizeCompositionChange({
composition: {
string: "x",
clauses: [
{ length: 1, attr: Ci.nsITextInputProcessor.ATTR_RAW_CLAUSE }
]
},
caret: { start: 1, length: 0 }
}, browser);
await ContentTask.spawn(browser, null, async function() {
info("Setting up the mutation observer before synthesizing composition");
let mutationPromise = ContentTask.spawn(browser, null, async function() {
let searchController = content.wrappedJSObject.gContentSearchController;
// Wait for the search suggestions to become visible.
@ -63,6 +53,20 @@ add_task(async function() {
searchController.selectedIndex = 1;
});
// FYI: "compositionstart" will be dispatched automatically.
await BrowserTestUtils.synthesizeCompositionChange({
composition: {
string: "x",
clauses: [
{ length: 1, attr: Ci.nsITextInputProcessor.ATTR_RAW_CLAUSE }
]
},
caret: { start: 1, length: 0 }
}, browser);
info("Waiting for search suggestion table unhidden");
await mutationPromise;
// Click the second suggestion.
let expectedURL = Services.search.currentEngine
.getSubmission("xbar", null, "homepage").uri.spec;

View File

@ -13,27 +13,9 @@
* for tips on how to do that.
*/
const EXPECTED_REFLOWS = [
{
stack: [
"onOverflow@resource:///modules/CustomizableUI.jsm",
],
maxCount: 48,
},
{
stack: [
"_moveItemsBackToTheirOrigin@resource:///modules/CustomizableUI.jsm",
"_onLazyResize@resource:///modules/CustomizableUI.jsm",
],
maxCount: 5,
},
{
stack: [
"_onLazyResize@resource:///modules/CustomizableUI.jsm",
],
maxCount: 4,
},
/**
* Nothing here! Please don't add anything new!
*/
];
const gToolbar = document.getElementById("PersonalToolbar");

View File

@ -4408,18 +4408,36 @@ OverflowableToolbar.prototype = {
}
},
onOverflow(aEvent) {
// The rangeParent check is here because of bug 1111986 and ensuring that
// overflow events from the bookmarks toolbar items or similar things that
// manage their own overflow don't trigger an overflow on the entire toolbar
if (!this._enabled ||
(aEvent && aEvent.target != this._toolbar.customizationTarget) ||
(aEvent && aEvent.rangeParent))
/**
* Avoid re-entrancy in the overflow handling by keeping track of invocations:
*/
_lastOverflowCounter: 0,
/**
* Handle overflow in the toolbar by moving items to the overflow menu.
* @param {Event} aEvent
* The overflow event that triggered handling overflow. May be omitted
* in some cases (e.g. when we run this method after overflow handling
* is re-enabled from customize mode, to ensure correct handling of
* initial overflow).
*/
async onOverflow(aEvent) {
if (!this._enabled)
return;
let child = this._target.lastChild;
while (child && this._target.scrollLeftMin != this._target.scrollLeftMax) {
let thisOverflowResponse = ++this._lastOverflowCounter;
let win = this._target.ownerGlobal;
let [scrollLeftMin, scrollLeftMax] = await win.promiseDocumentFlushed(() => {
return [this._target.scrollLeftMin, this._target.scrollLeftMax];
});
if (win.closed || this._lastOverflowCounter != thisOverflowResponse) {
return;
}
while (child && scrollLeftMin != scrollLeftMax) {
let prevChild = child.previousSibling;
if (child.getAttribute("overflows") != "false") {
@ -4438,13 +4456,26 @@ OverflowableToolbar.prototype = {
}
}
child = prevChild;
[scrollLeftMin, scrollLeftMax] = await win.promiseDocumentFlushed(() => {
return [this._target.scrollLeftMin, this._target.scrollLeftMax];
});
// If the window has closed or if we re-enter because we were waiting
// for layout, stop.
if (win.closed || this._lastOverflowCounter != thisOverflowResponse) {
return;
}
}
let win = this._target.ownerGlobal;
win.UpdateUrlbarSearchSplitterState();
// Reset the counter because we finished handling overflow.
this._lastOverflowCounter = 0;
},
_onResize(aEvent) {
// Ignore bubbled-up resize events.
if (aEvent.target != aEvent.target.ownerGlobal.top) {
return;
}
if (!this._lazyResizeHandler) {
this._lazyResizeHandler = new DeferredTask(this._onLazyResize.bind(this),
LAZY_RESIZE_INTERVAL_MS, 0);
@ -4452,16 +4483,33 @@ OverflowableToolbar.prototype = {
this._lazyResizeHandler.arm();
},
_moveItemsBackToTheirOrigin(shouldMoveAllItems) {
/**
* Try to move toolbar items back to the toolbar from the overflow menu.
* @param {boolean} shouldMoveAllItems
* Whether we should move everything (e.g. because we're being disabled)
* @param {number} targetWidth
* Optional; the width of the toolbar in which we can put things.
* Some consumers pass this to avoid reflows.
* While there are items in the list, this width won't change, and so
* we can avoid flushing layout by providing it and/or caching it.
* Note that if `shouldMoveAllItems` is true, we never need the width
* anyway.
*/
_moveItemsBackToTheirOrigin(shouldMoveAllItems, targetWidth) {
let placements = gPlacements.get(this._toolbar.id);
let win = this._target.ownerGlobal;
while (this._list.firstChild) {
let child = this._list.firstChild;
let minSize = this._collapsed.get(child.id);
if (!shouldMoveAllItems &&
minSize &&
this._target.clientWidth <= minSize) {
break;
if (!shouldMoveAllItems && minSize) {
if (!targetWidth) {
let dwu = win.QueryInterface(Ci.nsIInterfaceRequestor).getInterface(Ci.nsIDOMWindowUtils);
targetWidth = Math.floor(dwu.getBoundsWithoutFlushing(this._target).width);
}
if (targetWidth <= minSize) {
break;
}
}
this._collapsed.delete(child.id);
@ -4493,7 +4541,6 @@ OverflowableToolbar.prototype = {
CustomizableUIInternal.notifyListeners("onWidgetUnderflow", child, this._target);
}
let win = this._target.ownerGlobal;
win.UpdateUrlbarSearchSplitterState();
let collapsedWidgetIds = Array.from(this._collapsed.keys());
@ -4506,14 +4553,21 @@ OverflowableToolbar.prototype = {
}
},
_onLazyResize() {
async _onLazyResize() {
if (!this._enabled)
return;
if (this._target.scrollLeftMin != this._target.scrollLeftMax) {
let win = this._target.ownerGlobal;
let [min, max, targetWidth] = await win.promiseDocumentFlushed(() => {
return [this._target.scrollLeftMin, this._target.scrollLeftMax, this._target.clientWidth];
});
if (win.closed) {
return;
}
if (min != max) {
this.onOverflow();
} else {
this._moveItemsBackToTheirOrigin();
this._moveItemsBackToTheirOrigin(false, targetWidth);
}
},
@ -4608,7 +4662,7 @@ OverflowableToolbar.prototype = {
} else {
// If it's now the first item in the overflow list,
// maybe we can return it:
this._moveItemsBackToTheirOrigin();
this._moveItemsBackToTheirOrigin(false);
}
},

View File

@ -49,7 +49,7 @@ add_task(async function() {
});
// Ctrl+K should open the overflow panel and focus the search bar if the search bar is overflowed.
add_task(async function() {
add_task(async function check_shortcut_when_in_overflow() {
this.originalWindowWidth = window.outerWidth;
let navbar = document.getElementById(CustomizableUI.AREA_NAVBAR);
ok(!navbar.hasAttribute("overflowing"), "Should start with a non-overflowing toolbar.");
@ -58,7 +58,10 @@ add_task(async function() {
Services.prefs.setBoolPref("browser.search.widget.inNavBar", true);
window.resizeTo(kForceOverflowWidthPx, window.outerHeight);
await waitForCondition(() => navbar.getAttribute("overflowing") == "true");
await waitForCondition(() => {
return navbar.getAttribute("overflowing") == "true" &&
!navbar.querySelector("#search-container");
});
ok(!navbar.querySelector("#search-container"), "Search container should be overflowing");
let shownPanelPromise = promiseOverflowShown(window);

View File

@ -68,7 +68,7 @@ add_task(async function() {
let originalWindowWidth = window.outerWidth;
window.resizeTo(kForceOverflowWidthPx, window.outerHeight);
await waitForCondition(() => navbar.hasAttribute("overflowing"));
await waitForCondition(() => navbar.hasAttribute("overflowing") && !navbar.querySelector("#" + widgetIds[0]));
let testWidgetId = kTestWidgetPrefix + 3;
@ -117,7 +117,7 @@ add_task(async function() {
let originalWindowWidth = window.outerWidth;
window.resizeTo(kForceOverflowWidthPx, window.outerHeight);
await waitForCondition(() => navbar.hasAttribute("overflowing"));
await waitForCondition(() => navbar.hasAttribute("overflowing") && !navbar.querySelector("#" + widgetIds[0]));
let testWidgetId = kTestWidgetPrefix + 3;
@ -167,7 +167,7 @@ add_task(async function() {
let originalWindowWidth = window.outerWidth;
window.resizeTo(kForceOverflowWidthPx, window.outerHeight);
await waitForCondition(() => navbar.hasAttribute("overflowing"));
await waitForCondition(() => navbar.hasAttribute("overflowing") && !navbar.querySelector("#" + widgetIds[0]));
let testWidgetId = kTestWidgetPrefix + 3;
@ -226,7 +226,14 @@ add_task(async function() {
let originalWindowWidth = window.outerWidth;
window.resizeTo(kForceOverflowWidthPx, window.outerHeight);
await waitForCondition(() => navbar.hasAttribute("overflowing"));
// Wait for all the widgets to overflow. We can't just wait for the
// `overflowing` attribute because we leave time for layout flushes
// inbetween, so it's possible for the timeout to run before the
// navbar has "settled"
await waitForCondition(() => {
return navbar.hasAttribute("overflowing") &&
navbar.customizationTarget.lastChild.getAttribute("overflows") == "false";
});
// Find last widget that doesn't allow overflowing
let nonOverflowing = navbar.customizationTarget.lastChild;
@ -287,7 +294,7 @@ add_task(async function() {
let originalWindowWidth = window.outerWidth;
window.resizeTo(kForceOverflowWidthPx, window.outerHeight);
await waitForCondition(() => toolbarNode.hasAttribute("overflowing"));
await waitForCondition(() => toolbarNode.hasAttribute("overflowing") && !toolbarNode.querySelector("#" + widgetIds[1]));
ok(toolbarNode.hasAttribute("overflowing"), "Should have an overflowing toolbar.");
let btnId = kTestWidgetPrefix + missingId;

View File

@ -91,7 +91,7 @@ add_task(async function test_panelui_opened() {
add_task(async function test_panelui_customize_to_toolbar() {
await startCustomizing();
let navbar = document.getElementById("nav-bar");
simulateItemDrag(document.getElementById("edit-controls"), navbar.customizationTarget);
simulateItemDrag(document.getElementById("edit-controls"), navbar.customizationTarget, "end");
await endCustomizing();
// updateEditUIVisibility should be called when customization ends but isn't. See bug 1359790.
@ -125,7 +125,8 @@ add_task(async function test_panelui_customize_to_toolbar() {
});
window.resizeTo(kForceOverflowWidthPx, window.outerHeight);
await waitForCondition(() => navbar.hasAttribute("overflowing"));
await waitForCondition(() =>
navbar.hasAttribute("overflowing") && !navbar.querySelector("edit-controls"));
// Mac will update the enabled state even when the buttons are overflowing,
// so main menubar shortcuts will work properly.

View File

@ -1170,11 +1170,15 @@ PlacesToolbar.prototype = {
case "overflow":
if (!this._isOverflowStateEventRelevant(aEvent))
return;
// Avoid triggering overflow in containers if possible
aEvent.stopPropagation();
this._onOverflow();
break;
case "underflow":
if (!this._isOverflowStateEventRelevant(aEvent))
return;
// Avoid triggering underflow in containers if possible
aEvent.stopPropagation();
this._onUnderflow();
break;
case "TabOpen":

View File

@ -6,7 +6,7 @@ add_task(async function() {
let tab1, tab1Zoom, tab2, tab2Zoom, tab3, tab3Zoom;
tab1 = await BrowserTestUtils.openNewForegroundTab(gBrowser, testPage);
FullZoom.enlarge();
await FullZoom.enlarge();
tab1Zoom = ZoomManager.getZoomForBrowser(tab1.linkedBrowser);
tab2 = await BrowserTestUtils.openNewForegroundTab(gBrowser, testPage);

View File

@ -16,9 +16,7 @@ module.exports = {
// XXX Bug 1230193. We're still working on enabling no-undef for these test
// directories.
"files": [
"client/scratchpad/**",
"server/tests/mochitest/**",
"shared/tests/unit/**",
],
"rules": {
"no-undef": "off",

View File

@ -79,6 +79,8 @@ Converter.prototype = {
request.QueryInterface(Ci.nsIChannel);
request.contentType = "text/html";
let headers = getHttpHeaders(request);
// Enforce strict CSP:
try {
request.QueryInterface(Ci.nsIHttpChannel);
@ -105,7 +107,7 @@ Converter.prototype = {
// Initialize stuff.
let win = NetworkHelper.getWindowForRequest(request);
this.data = exportData(win, request);
this.data = exportData(win, headers);
insertJsonData(win, this.data.json);
win.addEventListener("contentMessage", onContentMessage, false, true);
keepThemeUpdated(win);
@ -164,8 +166,30 @@ function fixSave(request) {
request.setProperty("contentType", originalType);
}
function getHttpHeaders(request) {
let headers = {
response: [],
request: []
};
// The request doesn't have to be always nsIHttpChannel
// (e.g. in case of data: URLs)
if (request instanceof Ci.nsIHttpChannel) {
request.visitResponseHeaders({
visitHeader: function(name, value) {
headers.response.push({name: name, value: value});
}
});
request.visitRequestHeaders({
visitHeader: function(name, value) {
headers.request.push({name: name, value: value});
}
});
}
return headers;
}
// Exports variables that will be accessed by the non-privileged scripts.
function exportData(win, request) {
function exportData(win, headers) {
let data = Cu.createObjectIn(win, {
defineAs: "JSONView"
});
@ -188,24 +212,6 @@ function exportData(win, request) {
};
data.Locale = Cu.cloneInto(Locale, win, {cloneFunctions: true});
let headers = {
response: [],
request: []
};
// The request doesn't have to be always nsIHttpChannel
// (e.g. in case of data: URLs)
if (request instanceof Ci.nsIHttpChannel) {
request.visitResponseHeaders({
visitHeader: function(name, value) {
headers.response.push({name: name, value: value});
}
});
request.visitRequestHeaders({
visitHeader: function(name, value) {
headers.request.push({name: name, value: value});
}
});
}
data.headers = Cu.cloneInto(headers, win);
return data;

View File

@ -10,8 +10,26 @@ const TEST_JSON_URL = URL_ROOT + "csp_json.json";
add_task(async function() {
info("Test CSP JSON started");
await addJsonViewTab(TEST_JSON_URL);
let tab = await addJsonViewTab(TEST_JSON_URL);
let count = await getElementCount(".jsonPanelBox .treeTable .treeRow");
is(count, 1, "There must be one row");
// The JSON Viewer alters the CSP, but the displayed header should be the original one
await selectJsonViewContentTab("headers");
await ContentTask.spawn(tab.linkedBrowser, null, async function() {
let responseHeaders = content.document.querySelector(".netHeadersGroup");
let names = responseHeaders.querySelectorAll(".netInfoParamName");
let found = false;
for (let name of names) {
if (name.textContent.toLowerCase() == "content-security-policy") {
ok(!found, "The CSP header only appears once");
found = true;
let value = name.nextElementSibling.textContent;
let expected = "default-src 'none'; base-uri 'none';";
is(value, expected, "The CSP value has not been altered");
}
}
ok(found, "The CSP header is present");
});
});

View File

@ -93,3 +93,16 @@
white-space: nowrap;
margin-top: 1px;
}
/* Search box */
.devtools-searchbox {
height: 100%;
}
.devtools-plaininput:focus {
border: 1px solid var(--blue-50);
margin-bottom: 0;
margin-top: 0;
box-shadow: none;
}

View File

@ -12,6 +12,11 @@
* https://bugzilla.mozilla.org/show_bug.cgi?id=653934
*/
// Via scratchpad.xul
/* import-globals-from ../../../toolkit/content/globalOverlay.js */
// Via editMenuCommands.inc.xul
/* import-globals-from ../../../toolkit/content/editMenuOverlay.js */
"use strict";
const SCRATCHPAD_CONTEXT_CONTENT = 1;

View File

@ -461,11 +461,12 @@ class ShapesHighlighter extends AutoRefreshHighlighter {
yOffset = bounds.top - nodeWin.scrollY + win.scrollY;
}
const { pageXOffset, pageYOffset, innerWidth, innerHeight } = this.win;
const { pageXOffset, pageYOffset } = this.win;
const { clientHeight, clientWidth } = this.win.document.documentElement;
const left = pageXOffset + padding - xOffset;
const right = innerWidth + pageXOffset - padding - xOffset;
const right = clientWidth + pageXOffset - padding - xOffset;
const top = pageYOffset + padding - yOffset;
const bottom = innerHeight + pageYOffset - padding - yOffset;
const bottom = clientHeight + pageYOffset - padding - yOffset;
this.viewport = { left, right, top, bottom, padding };
}

View File

@ -21,6 +21,9 @@ registerCleanupFunction(() => {
// failures, set this to true.
var ALLOW_CONSOLE_ERRORS = false;
// XXX This listener is broken, see bug 1456634, for now turn off no-undef here,
// this needs turning back on!
/* eslint-disable no-undef */
var listener = {
observe: function(message) {
let string;
@ -50,5 +53,6 @@ var listener = {
}
}
};
/* eslint-enable no-undef */
Services.console.registerListener(listener);

View File

@ -2679,8 +2679,8 @@ ContentParent::RecvPlaySound(const URIParams& aURI)
bool isChrome = false;
// If the check here fails, it can only mean that this message was spoofed.
if (!soundURI || NS_FAILED(soundURI->SchemeIs("chrome", &isChrome)) || !isChrome) {
KillHard("PlaySound only accepts a valid chrome URI.");
return IPC_OK();
// PlaySound only accepts a valid chrome URI.
return IPC_FAIL_NO_REASON(this);
}
nsCOMPtr<nsIURL> soundURL(do_QueryInterface(soundURI));
if (!soundURL) {
@ -5585,7 +5585,6 @@ ContentParent::RecvFileCreationRequest(const nsID& aID,
// or for testing.
if (!mRemoteType.EqualsLiteral(FILE_REMOTE_TYPE) &&
!Preferences::GetBool("dom.file.createInChild", false)) {
KillHard("FileCreationRequest is not supported.");
return IPC_FAIL_NO_REASON(this);
}

View File

@ -120,12 +120,9 @@ AnalyserNode::Create(AudioContext& aAudioContext,
return nullptr;
}
analyserNode->SetMinDecibels(aOptions.mMinDecibels, aRv);
if (NS_WARN_IF(aRv.Failed())) {
return nullptr;
}
analyserNode->SetMaxDecibels(aOptions.mMaxDecibels, aRv);
analyserNode->SetMinAndMaxDecibels(aOptions.mMinDecibels,
aOptions.mMaxDecibels,
aRv);
if (NS_WARN_IF(aRv.Failed())) {
return nullptr;
}
@ -219,6 +216,17 @@ AnalyserNode::SetMaxDecibels(double aValue, ErrorResult& aRv)
mMaxDecibels = aValue;
}
void
AnalyserNode::SetMinAndMaxDecibels(double aMinValue, double aMaxValue, ErrorResult& aRv)
{
if (aMinValue >= aMaxValue) {
aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR);
return;
}
mMinDecibels = aMinValue;
mMaxDecibels = aMaxValue;
}
void
AnalyserNode::SetSmoothingTimeConstant(double aValue, ErrorResult& aRv)
{

View File

@ -72,6 +72,8 @@ public:
virtual size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const override;
virtual size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const override;
void SetMinAndMaxDecibels(double aMinValue, double aMaxValue, ErrorResult& aRv);
private:
~AnalyserNode() = default;

View File

@ -171,7 +171,7 @@ AudioBuffer::AudioBuffer(nsPIDOMWindowInner* aWindow,
aSampleRate > WebAudioUtils::MaxSampleRate ||
aNumberOfChannels > WebAudioUtils::MaxChannelCount ||
!aLength || aLength > INT32_MAX) {
aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR);
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
return;
}
@ -194,7 +194,7 @@ AudioBuffer::Constructor(const GlobalObject& aGlobal,
ErrorResult& aRv)
{
if (!aOptions.mNumberOfChannels) {
aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR);
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
return nullptr;
}
@ -331,7 +331,7 @@ AudioBuffer::CopyFromChannel(const Float32Array& aDestination, uint32_t aChannel
end += length;
if (aChannelNumber >= NumberOfChannels() ||
!end.isValid() || end.value() > Length()) {
aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR);
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
return;
}
@ -375,7 +375,7 @@ AudioBuffer::CopyToChannel(JSContext* aJSContext, const Float32Array& aSource,
end += length;
if (aChannelNumber >= NumberOfChannels() ||
!end.isValid() || end.value() > Length()) {
aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR);
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
return;
}
@ -406,7 +406,7 @@ AudioBuffer::GetChannelData(JSContext* aJSContext, uint32_t aChannel,
ErrorResult& aRv)
{
if (aChannel >= NumberOfChannels()) {
aRv.Throw(NS_ERROR_DOM_SYNTAX_ERR);
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
return;
}

View File

@ -81,17 +81,17 @@ addLoadEvent(function() {
expectException(function() {
context.createBuffer(2, 2048, 7999);
}, DOMException.INDEX_SIZE_ERR);
}, DOMException.NOT_SUPPORTED_ERR);
expectException(function() {
context.createBuffer(2, 2048, 192001);
}, DOMException.INDEX_SIZE_ERR);
}, DOMException.NOT_SUPPORTED_ERR);
context.createBuffer(2, 2048, 8000); // no exception
context.createBuffer(2, 2048, 192000); // no exception
context.createBuffer(32, 2048, 48000); // no exception
// Null length
expectException(function() {
context.createBuffer(2, 0, 48000);
}, DOMException.INDEX_SIZE_ERR);
}, DOMException.NOT_SUPPORTED_ERR);
// Null number of channels
expectException(function() {
context.createBuffer(0, 2048, 48000);

View File

@ -12,9 +12,9 @@
dictionary AnalyserOptions : AudioNodeOptions {
unsigned long fftSize = 2048;
float maxDecibels = -30;
float minDecibels = -100;
float smoothingTimeConstant = 0.8;
double maxDecibels = -30;
double minDecibels = -100;
double smoothingTimeConstant = 0.8;
};
[Pref="dom.webaudio.enabled",

View File

@ -2102,25 +2102,31 @@ CompositorBridgeParent::NotifyPipelineRemoved(const wr::PipelineId& aPipelineId)
}
void
CompositorBridgeParent::NotifyDidCompositeToPipeline(const wr::PipelineId& aPipelineId, const wr::Epoch& aEpoch, TimeStamp& aCompositeStart, TimeStamp& aCompositeEnd)
CompositorBridgeParent::NotifyPipelineRendered(const wr::PipelineId& aPipelineId,
const wr::Epoch& aEpoch,
TimeStamp& aCompositeStart,
TimeStamp& aCompositeEnd)
{
if (!mWrBridge || !mAsyncImageManager) {
return;
if (mAsyncImageManager) {
mAsyncImageManager->PipelineRendered(aPipelineId, aEpoch);
}
mAsyncImageManager->PipelineRendered(aPipelineId, aEpoch);
if (mPaused) {
if (!mWrBridge) {
return;
}
if (mWrBridge->PipelineId() == aPipelineId) {
TransactionId transactionId = mWrBridge->FlushTransactionIdsForEpoch(aEpoch, aCompositeEnd);
Unused << SendDidComposite(LayersId{0}, transactionId, aCompositeStart, aCompositeEnd);
mWrBridge->RemoveEpochDataPriorTo(aEpoch);
nsTArray<ImageCompositeNotificationInfo> notifications;
mWrBridge->ExtractImageCompositeNotifications(&notifications);
if (!notifications.IsEmpty()) {
Unused << ImageBridgeParent::NotifyImageComposites(notifications);
if (!mPaused) {
TransactionId transactionId = mWrBridge->FlushTransactionIdsForEpoch(aEpoch, aCompositeEnd);
Unused << SendDidComposite(LayersId{0}, transactionId, aCompositeStart, aCompositeEnd);
nsTArray<ImageCompositeNotificationInfo> notifications;
mWrBridge->ExtractImageCompositeNotifications(&notifications);
if (!notifications.IsEmpty()) {
Unused << ImageBridgeParent::NotifyImageComposites(notifications);
}
}
return;
}
@ -2130,9 +2136,14 @@ CompositorBridgeParent::NotifyDidCompositeToPipeline(const wr::PipelineId& aPipe
if (lts->mCrossProcessParent &&
lts->mWrBridge &&
lts->mWrBridge->PipelineId() == aPipelineId) {
CrossProcessCompositorBridgeParent* cpcp = lts->mCrossProcessParent;
TransactionId transactionId = lts->mWrBridge->FlushTransactionIdsForEpoch(aEpoch, aCompositeEnd);
Unused << cpcp->SendDidComposite(aLayersId, transactionId, aCompositeStart, aCompositeEnd);
lts->mWrBridge->RemoveEpochDataPriorTo(aEpoch);
if (!mPaused) {
CrossProcessCompositorBridgeParent* cpcp = lts->mCrossProcessParent;
TransactionId transactionId = lts->mWrBridge->FlushTransactionIdsForEpoch(aEpoch, aCompositeEnd);
Unused << cpcp->SendDidComposite(aLayersId, transactionId, aCompositeStart, aCompositeEnd);
}
}
});
}

View File

@ -140,10 +140,10 @@ public:
virtual void DidComposite(LayersId aId, TimeStamp& aCompositeStart, TimeStamp& aCompositeEnd) {}
virtual void NotifyDidCompositeToPipeline(const wr::PipelineId& aPipelineId,
const wr::Epoch& aEpoch,
TimeStamp& aCompositeStart,
TimeStamp& aCompositeEnd) { MOZ_ASSERT_UNREACHABLE("WebRender only"); }
virtual void NotifyPipelineRendered(const wr::PipelineId& aPipelineId,
const wr::Epoch& aEpoch,
TimeStamp& aCompositeStart,
TimeStamp& aCompositeEnd) { MOZ_ASSERT_UNREACHABLE("WebRender only"); }
virtual void NotifyPipelineRemoved(const wr::PipelineId& aPipelineId) { MOZ_ASSERT_UNREACHABLE("WebRender only"); }
// HostIPCAllocator
@ -580,10 +580,10 @@ protected:
using CompositorBridgeParentBase::DidComposite;
void DidComposite(TimeStamp& aCompositeStart, TimeStamp& aCompositeEnd);
void NotifyDidCompositeToPipeline(const wr::PipelineId& aPipelineId,
const wr::Epoch& aEpoch,
TimeStamp& aCompositeStart,
TimeStamp& aCompositeEnd) override;
void NotifyPipelineRendered(const wr::PipelineId& aPipelineId,
const wr::Epoch& aEpoch,
TimeStamp& aCompositeStart,
TimeStamp& aCompositeEnd) override;
void NotifyPipelineRemoved(const wr::PipelineId& aPipelineId) override;
void NotifyDidComposite(TransactionId aTransactionId, TimeStamp& aCompositeStart, TimeStamp& aCompositeEnd);

View File

@ -478,17 +478,29 @@ WebRenderBridgeParent::RecvDeleteCompositorAnimations(InfallibleTArray<uint64_t>
return IPC_OK();
}
for (uint32_t i = 0; i < aIds.Length(); i++) {
if (mActiveAnimations.erase(aIds[i]) > 0) {
mAnimStorage->ClearById(aIds[i]);
} else {
NS_ERROR("Tried to delete invalid animation");
}
}
// Once mWrEpoch has been rendered, we can delete these compositor animations
mCompositorAnimationsToDelete.push(CompositorAnimationIdsForEpoch(mWrEpoch, Move(aIds)));
return IPC_OK();
}
void
WebRenderBridgeParent::RemoveEpochDataPriorTo(const wr::Epoch& aRenderedEpoch)
{
while (!mCompositorAnimationsToDelete.empty()) {
if (mCompositorAnimationsToDelete.front().mEpoch.mHandle > aRenderedEpoch.mHandle) {
break;
}
for (uint64_t id : mCompositorAnimationsToDelete.front().mIds) {
if (mActiveAnimations.erase(id) > 0) {
mAnimStorage->ClearById(id);
} else {
NS_ERROR("Tried to delete invalid animation");
}
}
mCompositorAnimationsToDelete.pop();
}
}
CompositorBridgeParent*
WebRenderBridgeParent::GetRootCompositorBridgeParent() const
{
@ -728,6 +740,7 @@ WebRenderBridgeParent::RecvParentCommands(nsTArray<WebRenderParentCommand>&& aCo
void
WebRenderBridgeParent::ProcessWebRenderParentCommands(const InfallibleTArray<WebRenderParentCommand>& aCommands)
{
wr::TransactionBuilder txn;
for (InfallibleTArray<WebRenderParentCommand>::index_type i = 0; i < aCommands.Length(); ++i) {
const WebRenderParentCommand& cmd = aCommands[i];
switch (cmd.type()) {
@ -740,7 +753,7 @@ WebRenderBridgeParent::ProcessWebRenderParentCommands(const InfallibleTArray<Web
}
case WebRenderParentCommand::TOpRemovePipelineIdForCompositable: {
const OpRemovePipelineIdForCompositable& op = cmd.get_OpRemovePipelineIdForCompositable();
RemovePipelineIdForCompositable(op.pipelineId());
RemovePipelineIdForCompositable(op.pipelineId(), txn);
break;
}
case WebRenderParentCommand::TOpAddExternalImageIdForCompositable: {
@ -794,6 +807,7 @@ WebRenderBridgeParent::ProcessWebRenderParentCommands(const InfallibleTArray<Web
}
}
}
mApi->SendTransaction(txn);
}
mozilla::ipc::IPCResult
@ -896,7 +910,8 @@ WebRenderBridgeParent::AddPipelineIdForCompositable(const wr::PipelineId& aPipel
}
void
WebRenderBridgeParent::RemovePipelineIdForCompositable(const wr::PipelineId& aPipelineId)
WebRenderBridgeParent::RemovePipelineIdForCompositable(const wr::PipelineId& aPipelineId,
wr::TransactionBuilder& aTxn)
{
if (mDestroyed) {
return;
@ -907,12 +922,9 @@ WebRenderBridgeParent::RemovePipelineIdForCompositable(const wr::PipelineId& aPi
return;
}
wr::TransactionBuilder txn;
wrHost->ClearWrBridge();
mAsyncImageManager->RemoveAsyncImagePipeline(aPipelineId, txn);
txn.RemovePipeline(aPipelineId);
mApi->SendTransaction(txn);
mAsyncImageManager->RemoveAsyncImagePipeline(aPipelineId, aTxn);
aTxn.RemovePipeline(aPipelineId);
mAsyncCompositables.Remove(wr::AsUint64(aPipelineId));
return;
}
@ -997,6 +1009,7 @@ WebRenderBridgeParent::RecvClearCachedResources()
mAnimStorage->ClearById(*iter);
}
mActiveAnimations.clear();
std::queue<CompositorAnimationIdsForEpoch>().swap(mCompositorAnimationsToDelete); // clear queue
return IPC_OK();
}
@ -1461,6 +1474,7 @@ WebRenderBridgeParent::ClearResources()
mAnimStorage->ClearById(*iter);
}
mActiveAnimations.clear();
std::queue<CompositorAnimationIdsForEpoch>().swap(mCompositorAnimationsToDelete); // clear queue
if (mWidget) {
mCompositorScheduler->Destroy();

View File

@ -183,6 +183,8 @@ public:
AsyncImagePipelineManager* aImageMgr,
CompositorAnimationStorage* aAnimStorage);
void RemoveEpochDataPriorTo(const wr::Epoch& aRenderedEpoch);
private:
explicit WebRenderBridgeParent(const wr::PipelineId& aPipelineId);
virtual ~WebRenderBridgeParent();
@ -200,7 +202,8 @@ private:
void AddPipelineIdForCompositable(const wr::PipelineId& aPipelineIds,
const CompositableHandle& aHandle,
const bool& aAsync);
void RemovePipelineIdForCompositable(const wr::PipelineId& aPipelineId);
void RemovePipelineIdForCompositable(const wr::PipelineId& aPipelineId,
wr::TransactionBuilder& aTxn);
void AddExternalImageIdForCompositable(const ExternalImageId& aImageId,
const CompositableHandle& aHandle);
@ -239,6 +242,17 @@ private:
TimeStamp mFwdTime;
};
struct CompositorAnimationIdsForEpoch {
CompositorAnimationIdsForEpoch(const wr::Epoch& aEpoch, InfallibleTArray<uint64_t>&& aIds)
: mEpoch(aEpoch)
, mIds(Move(aIds))
{
}
wr::Epoch mEpoch;
InfallibleTArray<uint64_t> mIds;
};
CompositorBridgeParentBase* MOZ_NON_OWNING_REF mCompositorBridge;
wr::PipelineId mPipelineId;
RefPtr<widget::CompositorWidget> mWidget;
@ -262,6 +276,7 @@ private:
uint64_t mParentLayerObserverEpoch;
std::queue<PendingTransactionId> mPendingTransactionIds;
std::queue<CompositorAnimationIdsForEpoch> mCompositorAnimationsToDelete;
wr::Epoch mWrEpoch;
wr::IdNamespace mIdNamespace;

View File

@ -40,24 +40,6 @@ ImageBrushData fetch_image_data(int address) {
return data;
}
struct ImageBrushExtraData {
RectWithSize rendered_task_rect;
vec2 offset;
};
ImageBrushExtraData fetch_image_extra_data(int address) {
vec4[2] raw_data = fetch_from_resource_cache_2(address);
RectWithSize rendered_task_rect = RectWithSize(
raw_data[0].xy,
raw_data[0].zw
);
ImageBrushExtraData data = ImageBrushExtraData(
rendered_task_rect,
raw_data[1].xy
);
return data;
}
#ifdef WR_FEATURE_ALPHA_PASS
vec2 transform_point_snapped(
vec2 local_pos,
@ -105,7 +87,7 @@ void brush_vs(
max_uv - vec2(0.5)
) / texture_size.xyxy;
vec2 f;
vec2 f = (vi.local_pos - local_rect.p0) / local_rect.size;
#ifdef WR_FEATURE_ALPHA_PASS
int color_mode = user_data.y >> 16;
@ -121,41 +103,18 @@ void brush_vs(
// image.
switch (raster_space) {
case RASTER_SCREEN: {
ImageBrushExtraData extra_data = fetch_image_extra_data(user_data.z);
vec2 snapped_device_pos;
// For drop-shadows, we need to apply a local offset
// in order to generate the correct screen-space UV.
// For other effects, we can use the 1:1 mapping of
// the vertex device position for the UV generation.
switch (color_mode) {
case COLOR_MODE_ALPHA: {
vec2 local_pos = vi.local_pos - extra_data.offset;
snapped_device_pos = transform_point_snapped(
local_pos,
local_rect,
transform
);
break;
}
default:
snapped_device_pos = vi.snapped_device_pos;
break;
}
f = (snapped_device_pos - extra_data.rendered_task_rect.p0) / extra_data.rendered_task_rect.size;
// Since the screen space UVs specify an arbitrary quad, do
// a bilinear interpolation to get the correct UV for this
// local position.
ImageResourceExtra extra_data = fetch_image_resource_extra(user_data.x);
vec2 x = mix(extra_data.st_tl, extra_data.st_tr, f.x);
vec2 y = mix(extra_data.st_bl, extra_data.st_br, f.x);
f = mix(x, y, f.y);
break;
}
case RASTER_LOCAL:
default: {
f = (vi.local_pos - local_rect.p0) / local_rect.size;
default:
break;
}
}
#else
f = (vi.local_pos - local_rect.p0) / local_rect.size;
#endif
// Offset and scale vUv here to avoid doing it in the fragment shader.

View File

@ -58,8 +58,6 @@ void brush_vs(
vRepeatedSize = local_rect.size / tile_repeat.xy;
vRepeatedSize.y *= ratio_xy;
vPos;
vGradientAddress = user_data.x;
// Whether to repeat the gradient instead of clamping.

View File

@ -15,6 +15,7 @@ flat varying vec4 vPoint_Tangent0;
flat varying vec4 vPoint_Tangent1;
flat varying vec3 vDotParams;
flat varying vec2 vAlphaMask;
flat varying vec4 vTaskRect;
#ifdef WR_VERTEX_SHADER
// Matches BorderCorner enum in border.rs
@ -145,9 +146,13 @@ void main(void) {
vec2 device_pos = world_pos.xy * uDevicePixelRatio;
// Position vertex within the render task area.
vec2 final_pos = device_pos -
area.screen_origin +
area.common_data.task_rect.p0;
vec2 task_rect_origin = area.common_data.task_rect.p0;
vec2 final_pos = device_pos - area.screen_origin + task_rect_origin;
// We pass the task rectangle to the fragment shader so that we can do one last clip
// in order to ensure that we don't draw outside the task rectangle.
vTaskRect.xy = task_rect_origin;
vTaskRect.zw = task_rect_origin + area.common_data.task_rect.size;
// Calculate the local space position for this vertex.
vec4 node_pos = get_node_pos(world_pos.xy, scroll_node);
@ -190,6 +195,9 @@ void main(void) {
// Completely mask out clip if zero'ing out the rect.
d = d * vAlphaMask.y;
// Make sure that we don't draw outside the task rectangle.
d = d * point_inside_rect(gl_FragCoord.xy, vTaskRect.xy, vTaskRect.zw);
oFragColor = vec4(d, 0.0, 0.0, 1.0);
}
#endif

View File

@ -4,6 +4,8 @@
uniform HIGHP_SAMPLER_FLOAT sampler2D sResourceCache;
#define VECS_PER_IMAGE_RESOURCE 2
// TODO(gw): This is here temporarily while we have
// both GPU store and cache. When the GPU
// store code is removed, we can change the
@ -113,4 +115,23 @@ ImageResource fetch_image_resource_direct(ivec2 address) {
return ImageResource(uv_rect, data[1].x, data[1].yzw);
}
// Fetch optional extra data for a texture cache resource. This can contain
// a polygon defining a UV rect within the texture cache resource.
struct ImageResourceExtra {
vec2 st_tl;
vec2 st_tr;
vec2 st_bl;
vec2 st_br;
};
ImageResourceExtra fetch_image_resource_extra(int address) {
vec4 data[2] = fetch_from_resource_cache_2(address + VECS_PER_IMAGE_RESOURCE);
return ImageResourceExtra(
data[0].xy,
data[0].zw,
data[1].xy,
data[1].zw
);
}
#endif //WR_VERTEX_SHADER

View File

@ -18,7 +18,6 @@ use gpu_types::{PrimitiveInstance, RasterizationSpace, SimplePrimitiveInstance,
use gpu_types::ZBufferIdGenerator;
use internal_types::{FastHashMap, SavedTargetIndex, SourceTexture};
use picture::{PictureCompositeMode, PicturePrimitive, PictureSurface};
use picture::{IMAGE_BRUSH_BLOCKS, IMAGE_BRUSH_EXTRA_BLOCKS};
use plane_split::{BspSplitter, Polygon, Splitter};
use prim_store::{CachedGradient, ImageSource, PrimitiveIndex, PrimitiveKind, PrimitiveMetadata, PrimitiveStore};
use prim_store::{BrushPrimitive, BrushKind, DeferredResolve, EdgeAaSegmentMask, PictureIndex, PrimitiveRun};
@ -700,7 +699,7 @@ impl AlphaBatchBuilder {
uv_rect_address.as_int(),
(ShaderColorMode::ColorBitmap as i32) << 16 |
RasterizationSpace::Screen as i32,
picture.extra_gpu_data_handle.as_int(gpu_cache),
0,
],
};
batch.push(PrimitiveInstance::from(instance));
@ -750,11 +749,7 @@ impl AlphaBatchBuilder {
.as_int();
// Get the GPU cache address of the extra data handle.
let extra_data_address = gpu_cache.get_address(&picture.extra_gpu_data_handle);
let shadow_prim_address = extra_data_address
.offset(IMAGE_BRUSH_EXTRA_BLOCKS);
let shadow_data_address = extra_data_address
.offset(IMAGE_BRUSH_EXTRA_BLOCKS + IMAGE_BRUSH_BLOCKS);
let shadow_prim_address = gpu_cache.get_address(&picture.extra_gpu_data_handle);
let shadow_instance = BrushInstance {
picture_address: task_address,
@ -770,7 +765,7 @@ impl AlphaBatchBuilder {
shadow_uv_rect_address,
(ShaderColorMode::Alpha as i32) << 16 |
RasterizationSpace::Screen as i32,
shadow_data_address.as_int(),
0,
],
};
@ -780,7 +775,7 @@ impl AlphaBatchBuilder {
content_uv_rect_address,
(ShaderColorMode::ColorBitmap as i32) << 16 |
RasterizationSpace::Screen as i32,
extra_data_address.as_int(),
0,
],
..shadow_instance
};
@ -953,7 +948,7 @@ impl AlphaBatchBuilder {
uv_rect_address,
(ShaderColorMode::ColorBitmap as i32) << 16 |
RasterizationSpace::Screen as i32,
picture.extra_gpu_data_handle.as_int(gpu_cache),
0,
],
};
batch.push(PrimitiveInstance::from(instance));

View File

@ -101,6 +101,9 @@ pub struct ClipScrollNode {
/// World transform for content transformed by this node.
pub world_content_transform: LayoutToWorldFastTransform,
/// The current transform kind of world_content_transform.
pub transform_kind: TransformedRectKind,
/// Pipeline that this layer belongs to
pub pipeline_id: PipelineId,
@ -142,6 +145,7 @@ impl ClipScrollNode {
local_viewport_rect: *rect,
world_viewport_transform: LayoutToWorldFastTransform::identity(),
world_content_transform: LayoutToWorldFastTransform::identity(),
transform_kind: TransformedRectKind::AxisAligned,
parent: parent_index,
children: Vec::new(),
pipeline_id,
@ -285,15 +289,10 @@ impl ClipScrollNode {
}
};
let transform_kind = if self.world_content_transform.preserves_2d_axis_alignment() {
TransformedRectKind::AxisAligned
} else {
TransformedRectKind::Complex
};
let data = ClipScrollNodeData {
transform: self.world_content_transform.into(),
inv_transform,
transform_kind: transform_kind as u32 as f32,
transform_kind: self.transform_kind as u32 as f32,
padding: [0.0; 3],
};
@ -321,6 +320,12 @@ impl ClipScrollNode {
self.update_transform(state, next_coordinate_system_id, scene_properties);
self.transform_kind = if self.world_content_transform.preserves_2d_axis_alignment() {
TransformedRectKind::AxisAligned
} else {
TransformedRectKind::Complex
};
// If this node is a reference frame, we check if it has a non-invertible matrix.
// For non-reference-frames we assume that they will produce only additional
// translations which should be invertible.

View File

@ -1843,6 +1843,7 @@ impl<'a> DisplayListFlattener<'a> {
stops_count: usize,
extend_mode: ExtendMode,
gradient_index: CachedGradientIndex,
stretch_size: LayoutSize,
) {
// Try to ensure that if the gradient is specified in reverse, then so long as the stops
// are also supplied in reverse that the rendered result will be equivalent. To do this,
@ -1871,6 +1872,7 @@ impl<'a> DisplayListFlattener<'a> {
start_point: sp,
end_point: ep,
gradient_index,
stretch_size,
},
None,
);
@ -1889,43 +1891,49 @@ impl<'a> DisplayListFlattener<'a> {
stops: ItemRange<GradientStop>,
stops_count: usize,
extend_mode: ExtendMode,
tile_size: LayoutSize,
stretch_size: LayoutSize,
tile_spacing: LayoutSize,
) {
let gradient_index = CachedGradientIndex(self.cached_gradients.len());
self.cached_gradients.push(CachedGradient::new());
let prim_infos = info.decompose(
tile_size,
tile_spacing,
64 * 64,
);
if prim_infos.is_empty() {
self.add_gradient_impl(
clip_and_scroll,
info,
start_point,
end_point,
stops,
stops_count,
extend_mode,
gradient_index,
if tile_spacing != LayoutSize::zero() {
let prim_infos = info.decompose(
stretch_size,
tile_spacing,
64 * 64,
);
} else {
for prim_info in prim_infos {
self.add_gradient_impl(
clip_and_scroll,
&prim_info,
start_point,
end_point,
stops,
stops_count,
extend_mode,
gradient_index,
);
if !prim_infos.is_empty() {
for prim_info in prim_infos {
self.add_gradient_impl(
clip_and_scroll,
&prim_info,
start_point,
end_point,
stops,
stops_count,
extend_mode,
gradient_index,
prim_info.rect.size,
);
}
return;
}
}
self.add_gradient_impl(
clip_and_scroll,
info,
start_point,
end_point,
stops,
stops_count,
extend_mode,
gradient_index,
stretch_size,
);
}
fn add_radial_gradient_impl(
@ -1939,6 +1947,7 @@ impl<'a> DisplayListFlattener<'a> {
stops: ItemRange<GradientStop>,
extend_mode: ExtendMode,
gradient_index: CachedGradientIndex,
stretch_size: LayoutSize,
) {
let prim = BrushPrimitive::new(
BrushKind::RadialGradient {
@ -1949,6 +1958,7 @@ impl<'a> DisplayListFlattener<'a> {
end_radius,
ratio_xy,
gradient_index,
stretch_size,
},
None,
);
@ -1971,45 +1981,51 @@ impl<'a> DisplayListFlattener<'a> {
ratio_xy: f32,
stops: ItemRange<GradientStop>,
extend_mode: ExtendMode,
tile_size: LayoutSize,
stretch_size: LayoutSize,
tile_spacing: LayoutSize,
) {
let gradient_index = CachedGradientIndex(self.cached_gradients.len());
self.cached_gradients.push(CachedGradient::new());
let prim_infos = info.decompose(
tile_size,
tile_spacing,
64 * 64,
);
if prim_infos.is_empty() {
self.add_radial_gradient_impl(
clip_and_scroll,
info,
center,
start_radius,
end_radius,
ratio_xy,
stops,
extend_mode,
gradient_index,
if tile_spacing != LayoutSize::zero() {
let prim_infos = info.decompose(
stretch_size,
tile_spacing,
64 * 64,
);
} else {
for prim_info in prim_infos {
self.add_radial_gradient_impl(
clip_and_scroll,
&prim_info,
center,
start_radius,
end_radius,
ratio_xy,
stops,
extend_mode,
gradient_index,
);
if !prim_infos.is_empty() {
for prim_info in prim_infos {
self.add_radial_gradient_impl(
clip_and_scroll,
&prim_info,
center,
start_radius,
end_radius,
ratio_xy,
stops,
extend_mode,
gradient_index,
stretch_size,
);
}
return;
}
}
self.add_radial_gradient_impl(
clip_and_scroll,
info,
center,
start_radius,
end_radius,
ratio_xy,
stops,
extend_mode,
gradient_index,
stretch_size,
);
}
pub fn add_text(
@ -2137,7 +2153,6 @@ impl<'a> DisplayListFlattener<'a> {
// See if conditions are met to run through the new
// image brush shader, which supports segments.
if tile_spacing == LayoutSize::zero() &&
stretch_size == info.rect.size &&
tile_offset.is_none() {
let prim = BrushPrimitive::new(
BrushKind::Image {

View File

@ -10,7 +10,7 @@ use clip_scroll_node::{ClipScrollNode};
use clip_scroll_tree::{ClipScrollNodeIndex, ClipScrollTree};
use display_list_flattener::{DisplayListFlattener};
use gpu_cache::GpuCache;
use gpu_types::{ClipChainRectIndex, ClipScrollNodeData};
use gpu_types::{ClipChainRectIndex, ClipScrollNodeData, UvRectKind};
use hit_test::{HitTester, HitTestingRun};
use internal_types::{FastHashMap};
use picture::PictureSurface;
@ -233,6 +233,7 @@ impl FrameBuilder {
PrimitiveIndex(0),
DeviceIntPoint::zero(),
pic_state.tasks,
UvRectKind::Rect,
);
let render_task_id = frame_state.render_tasks.add(root_render_task);

View File

@ -21,6 +21,7 @@ use device::TextureFilter;
use euclid::{TypedPoint2D, TypedSize2D, TypedVector2D};
use glyph_cache::{CachedGlyphInfo, GlyphCache, GlyphCacheEntry};
use gpu_cache::GpuCache;
use gpu_types::UvRectKind;
use internal_types::ResourceCacheError;
#[cfg(feature = "pathfinder")]
use pathfinder_font_renderer;
@ -800,6 +801,7 @@ impl GlyphRasterizer {
None,
gpu_cache,
Some(glyph_key_cache.eviction_notice()),
UvRectKind::Rect,
);
GlyphCacheEntry::Cached(CachedGlyphInfo {
texture_cache_handle,

View File

@ -151,13 +151,6 @@ impl GpuCacheAddress {
v: u16::MAX,
}
}
pub fn offset(&self, offset: usize) -> Self {
GpuCacheAddress {
u: self.u + offset as u16,
v: self.v
}
}
}
impl Add<usize> for GpuCacheAddress {

View File

@ -277,15 +277,38 @@ impl ClipScrollNodeData {
#[repr(C)]
pub struct ClipChainRectIndex(pub usize);
// Texture cache resources can be either a simple rect, or define
// a polygon within a rect by specifying a UV coordinate for each
// corner. This is useful for rendering screen-space rasterized
// off-screen surfaces.
#[derive(Debug, Copy, Clone)]
#[cfg_attr(feature = "capture", derive(Serialize))]
#[cfg_attr(feature = "replay", derive(Deserialize))]
pub enum UvRectKind {
// The 2d bounds of the texture cache entry define the
// valid UV space for this texture cache entry.
Rect,
// The four vertices below define a quad within
// the texture cache entry rect. The shader can
// use a bilerp() to correctly interpolate a
// UV coord in the vertex shader.
Quad {
top_left: DevicePoint,
top_right: DevicePoint,
bottom_left: DevicePoint,
bottom_right: DevicePoint,
},
}
#[derive(Debug, Copy, Clone)]
#[cfg_attr(feature = "capture", derive(Serialize))]
#[cfg_attr(feature = "replay", derive(Deserialize))]
#[repr(C)]
pub struct ImageSource {
pub p0: DevicePoint,
pub p1: DevicePoint,
pub texture_layer: f32,
pub user_data: [f32; 3],
pub uv_rect_kind: UvRectKind,
}
impl ImageSource {
@ -302,5 +325,22 @@ impl ImageSource {
self.user_data[1],
self.user_data[2],
]);
// If this is a polygon uv kind, then upload the four vertices.
if let UvRectKind::Quad { top_left, top_right, bottom_left, bottom_right } = self.uv_rect_kind {
request.push([
top_left.x,
top_left.y,
top_right.x,
top_right.y,
]);
request.push([
bottom_left.x,
bottom_left.y,
bottom_right.x,
bottom_right.y,
]);
}
}
}

View File

@ -2,13 +2,15 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use api::{FilterOp, MixBlendMode, PipelineId, PremultipliedColorF};
use api::{DeviceIntRect, DeviceIntSize, LayoutRect};
use api::{PictureIntPoint, PictureIntRect, PictureIntSize};
use api::{DeviceRect, FilterOp, MixBlendMode, PipelineId, PremultipliedColorF};
use api::{DeviceIntRect, DeviceIntSize, DevicePoint, LayoutPoint, LayoutRect};
use api::{DevicePixelScale, PictureIntPoint, PictureIntRect, PictureIntSize};
use box_shadow::{BLUR_SAMPLE_SCALE};
use clip_scroll_node::ClipScrollNode;
use clip_scroll_tree::ClipScrollNodeIndex;
use frame_builder::{FrameBuildingContext, FrameBuildingState, PictureState};
use frame_builder::{FrameBuildingContext, FrameBuildingState, PictureState, PrimitiveRunContext};
use gpu_cache::{GpuCacheHandle};
use gpu_types::UvRectKind;
use prim_store::{PrimitiveIndex, PrimitiveRun, PrimitiveRunLocalRect};
use prim_store::{PrimitiveMetadata, ScrollNodeAndClipChain};
use render_task::{ClearMode, RenderTask, RenderTaskCacheEntryHandle};
@ -16,6 +18,7 @@ use render_task::{RenderTaskCacheKey, RenderTaskCacheKeyKind, RenderTaskId, Rend
use scene::{FilterOpHelpers, SceneProperties};
use std::mem;
use tiling::RenderTargetKind;
use util::TransformedRectKind;
/*
A picture represents a dynamically rendered image. It consists of:
@ -27,9 +30,6 @@ use tiling::RenderTargetKind;
this picture (e.g. in screen space or local space).
*/
pub const IMAGE_BRUSH_EXTRA_BLOCKS: usize = 2;
pub const IMAGE_BRUSH_BLOCKS: usize = 6;
/// Specifies how this Picture should be composited
/// onto the target it belongs to.
#[derive(Debug, Copy, Clone, PartialEq)]
@ -127,10 +127,6 @@ pub struct PicturePrimitive {
// in this picture.
pub apply_local_clip_rect: bool,
// The current screen-space rect of the rendered
// portion of this picture.
task_rect: DeviceIntRect,
// If a mix-blend-mode, contains the render task for
// the readback of the framebuffer that we use to sample
// from in the mix-blend-mode shader.
@ -199,7 +195,6 @@ impl PicturePrimitive {
extra_gpu_data_handle: GpuCacheHandle::new(),
apply_local_clip_rect,
pipeline_id,
task_rect: DeviceIntRect::zero(),
id,
}
}
@ -281,11 +276,12 @@ impl PicturePrimitive {
&mut self,
prim_index: PrimitiveIndex,
prim_metadata: &mut PrimitiveMetadata,
prim_run_context: &PrimitiveRunContext,
mut pic_state_for_children: PictureState,
pic_state: &mut PictureState,
frame_context: &FrameBuildingContext,
frame_state: &mut FrameBuildingState,
) -> Option<DeviceIntRect> {
) {
let prim_screen_rect = prim_metadata
.screen_rect
.as_ref()
@ -293,10 +289,9 @@ impl PicturePrimitive {
if self.can_draw_directly_to_parent_surface() {
pic_state.tasks.extend(pic_state_for_children.tasks);
self.surface = None;
return None;
return;
}
// TODO(gw): Almost all of the Picture types below use extra_gpu_cache_data
// to store the same type of data. The exception is the filter
// with a ColorMatrix, which stores the color matrix here. It's
@ -322,6 +317,13 @@ impl PicturePrimitive {
.intersection(&prim_screen_rect.unclipped)
.unwrap();
let uv_rect_kind = calculate_uv_rect_kind(
&prim_metadata.local_rect,
&prim_run_context.scroll_node,
&device_rect,
frame_context.device_pixel_scale,
);
// If we are drawing a blur that has primitives or clips that contain
// a complex coordinate system, don't bother caching them (for now).
// It's likely that they are animating and caching may not help here
@ -334,6 +336,7 @@ impl PicturePrimitive {
prim_index,
device_rect.origin,
pic_state_for_children.tasks,
uv_rect_kind,
);
let picture_task_id = frame_state.render_tasks.add(picture_task);
@ -388,6 +391,7 @@ impl PicturePrimitive {
prim_index,
device_rect.origin,
child_tasks,
uv_rect_kind,
);
let picture_task_id = render_tasks.add(picture_task);
@ -412,10 +416,8 @@ impl PicturePrimitive {
};
self.surface = Some(surface);
Some(device_rect)
}
Some(PictureCompositeMode::Filter(FilterOp::DropShadow(_, blur_radius, _))) => {
Some(PictureCompositeMode::Filter(FilterOp::DropShadow(offset, blur_radius, color))) => {
let blur_std_deviation = blur_radius * frame_context.device_pixel_scale.0;
let blur_range = (blur_std_deviation * BLUR_SAMPLE_SCALE).ceil() as i32;
@ -433,11 +435,19 @@ impl PicturePrimitive {
.intersection(&prim_screen_rect.unclipped)
.unwrap();
let uv_rect_kind = calculate_uv_rect_kind(
&prim_metadata.local_rect,
&prim_run_context.scroll_node,
&device_rect,
frame_context.device_pixel_scale,
);
let mut picture_task = RenderTask::new_picture(
RenderTaskLocation::Dynamic(None, Some(device_rect.size)),
prim_index,
device_rect.origin,
pic_state_for_children.tasks,
uv_rect_kind,
);
picture_task.mark_for_saving();
@ -457,112 +467,7 @@ impl PicturePrimitive {
pic_state.tasks.push(render_task_id);
self.surface = Some(PictureSurface::RenderTask(render_task_id));
Some(device_rect)
}
Some(PictureCompositeMode::MixBlend(..)) => {
let picture_task = RenderTask::new_picture(
RenderTaskLocation::Dynamic(None, Some(prim_screen_rect.clipped.size)),
prim_index,
prim_screen_rect.clipped.origin,
pic_state_for_children.tasks,
);
let readback_task_id = frame_state.render_tasks.add(
RenderTask::new_readback(prim_screen_rect.clipped)
);
self.secondary_render_task_id = Some(readback_task_id);
pic_state.tasks.push(readback_task_id);
let render_task_id = frame_state.render_tasks.add(picture_task);
pic_state.tasks.push(render_task_id);
self.surface = Some(PictureSurface::RenderTask(render_task_id));
Some(prim_screen_rect.clipped)
}
Some(PictureCompositeMode::Filter(filter)) => {
let device_rect = match filter {
FilterOp::ColorMatrix(m) => {
if let Some(mut request) = frame_state.gpu_cache.request(&mut self.extra_gpu_data_handle) {
for i in 0..5 {
request.push([m[i*4], m[i*4+1], m[i*4+2], m[i*4+3]]);
}
}
None
}
_ => Some(prim_screen_rect.clipped),
};
let picture_task = RenderTask::new_picture(
RenderTaskLocation::Dynamic(None, Some(prim_screen_rect.clipped.size)),
prim_index,
prim_screen_rect.clipped.origin,
pic_state_for_children.tasks,
);
let render_task_id = frame_state.render_tasks.add(picture_task);
pic_state.tasks.push(render_task_id);
self.surface = Some(PictureSurface::RenderTask(render_task_id));
device_rect
}
Some(PictureCompositeMode::Blit) | None => {
let picture_task = RenderTask::new_picture(
RenderTaskLocation::Dynamic(None, Some(prim_screen_rect.clipped.size)),
prim_index,
prim_screen_rect.clipped.origin,
pic_state_for_children.tasks,
);
let render_task_id = frame_state.render_tasks.add(picture_task);
pic_state.tasks.push(render_task_id);
self.surface = Some(PictureSurface::RenderTask(render_task_id));
Some(prim_screen_rect.clipped)
}
}
}
pub fn prepare_for_render(
&mut self,
prim_index: PrimitiveIndex,
prim_metadata: &mut PrimitiveMetadata,
pic_state_for_children: PictureState,
pic_state: &mut PictureState,
frame_context: &FrameBuildingContext,
frame_state: &mut FrameBuildingState,
) {
let device_rect = self.prepare_for_render_inner(
prim_index,
prim_metadata,
pic_state_for_children,
pic_state,
frame_context,
frame_state,
);
// If this picture type uses the common / general GPU data
// format, then write it now.
if let Some(device_rect) = device_rect {
// If scrolling or property animation has resulted in the task
// rect being different than last time, invalidate the GPU
// cache entry for this picture to ensure that the correct
// task rect is provided to the image shader.
if self.task_rect != device_rect {
frame_state.gpu_cache.invalidate(&self.extra_gpu_data_handle);
self.task_rect = device_rect;
}
if let Some(mut request) = frame_state.gpu_cache.request(&mut self.extra_gpu_data_handle) {
// [GLSL ImageBrushExtraData: task_rect, offset]
request.push(self.task_rect.to_f32());
request.push([0.0; 4]);
// TODO(gw): It would make the shaders a bit simpler if the offset
// was provided as part of the brush::picture instance,
// rather than in the Picture data itself.
if let Some(PictureCompositeMode::Filter(FilterOp::DropShadow(offset, _, color))) = self.composite_mode {
if let Some(mut request) = frame_state.gpu_cache.request(&mut self.extra_gpu_data_handle) {
// TODO(gw): This is very hacky code below! It stores an extra
// brush primitive below for the special case of a
// drop-shadow where we need a different local
@ -570,9 +475,6 @@ impl PicturePrimitive {
// we could consider abstracting the code in prim_store.rs
// that writes a brush primitive header.
// NOTE: If any of the layout below changes, the IMAGE_BRUSH_EXTRA_BLOCKS and
// IMAGE_BRUSH_BLOCKS fields above *must* be updated.
// Basic brush primitive header is (see end of prepare_prim_for_render_inner in prim_store.rs)
// local_rect
// clip_rect
@ -592,12 +494,175 @@ impl PicturePrimitive {
// segment rect / repetitions
request.push(shadow_rect);
request.push([1.0, 1.0, 0.0, 0.0]);
// Now write another GLSL ImageBrush struct, for the shadow to reference.
request.push(self.task_rect.to_f32());
request.push([offset.x, offset.y, 0.0, 0.0]);
}
}
Some(PictureCompositeMode::MixBlend(..)) => {
let uv_rect_kind = calculate_uv_rect_kind(
&prim_metadata.local_rect,
&prim_run_context.scroll_node,
&prim_screen_rect.clipped,
frame_context.device_pixel_scale,
);
let picture_task = RenderTask::new_picture(
RenderTaskLocation::Dynamic(None, Some(prim_screen_rect.clipped.size)),
prim_index,
prim_screen_rect.clipped.origin,
pic_state_for_children.tasks,
uv_rect_kind,
);
let readback_task_id = frame_state.render_tasks.add(
RenderTask::new_readback(prim_screen_rect.clipped)
);
self.secondary_render_task_id = Some(readback_task_id);
pic_state.tasks.push(readback_task_id);
let render_task_id = frame_state.render_tasks.add(picture_task);
pic_state.tasks.push(render_task_id);
self.surface = Some(PictureSurface::RenderTask(render_task_id));
}
Some(PictureCompositeMode::Filter(filter)) => {
if let FilterOp::ColorMatrix(m) = filter {
if let Some(mut request) = frame_state.gpu_cache.request(&mut self.extra_gpu_data_handle) {
for i in 0..5 {
request.push([m[i*4], m[i*4+1], m[i*4+2], m[i*4+3]]);
}
}
}
let uv_rect_kind = calculate_uv_rect_kind(
&prim_metadata.local_rect,
&prim_run_context.scroll_node,
&prim_screen_rect.clipped,
frame_context.device_pixel_scale,
);
let picture_task = RenderTask::new_picture(
RenderTaskLocation::Dynamic(None, Some(prim_screen_rect.clipped.size)),
prim_index,
prim_screen_rect.clipped.origin,
pic_state_for_children.tasks,
uv_rect_kind,
);
let render_task_id = frame_state.render_tasks.add(picture_task);
pic_state.tasks.push(render_task_id);
self.surface = Some(PictureSurface::RenderTask(render_task_id));
}
Some(PictureCompositeMode::Blit) | None => {
let uv_rect_kind = calculate_uv_rect_kind(
&prim_metadata.local_rect,
&prim_run_context.scroll_node,
&prim_screen_rect.clipped,
frame_context.device_pixel_scale,
);
let picture_task = RenderTask::new_picture(
RenderTaskLocation::Dynamic(None, Some(prim_screen_rect.clipped.size)),
prim_index,
prim_screen_rect.clipped.origin,
pic_state_for_children.tasks,
uv_rect_kind,
);
let render_task_id = frame_state.render_tasks.add(picture_task);
pic_state.tasks.push(render_task_id);
self.surface = Some(PictureSurface::RenderTask(render_task_id));
}
}
}
pub fn prepare_for_render(
&mut self,
prim_index: PrimitiveIndex,
prim_metadata: &mut PrimitiveMetadata,
prim_run_context: &PrimitiveRunContext,
pic_state_for_children: PictureState,
pic_state: &mut PictureState,
frame_context: &FrameBuildingContext,
frame_state: &mut FrameBuildingState,
) {
self.prepare_for_render_inner(
prim_index,
prim_metadata,
prim_run_context,
pic_state_for_children,
pic_state,
frame_context,
frame_state,
);
}
}
// Calculate a single screen-space UV for a picture.
fn calculate_screen_uv(
local_pos: &LayoutPoint,
clip_scroll_node: &ClipScrollNode,
rendered_rect: &DeviceRect,
device_pixel_scale: DevicePixelScale,
) -> DevicePoint {
let world_pos = clip_scroll_node
.world_content_transform
.transform_point2d(local_pos);
let mut device_pos = world_pos * device_pixel_scale;
// Apply snapping for axis-aligned scroll nodes, as per prim_shared.glsl.
if clip_scroll_node.transform_kind == TransformedRectKind::AxisAligned {
device_pos.x = (device_pos.x + 0.5).floor();
device_pos.y = (device_pos.y + 0.5).floor();
}
DevicePoint::new(
(device_pos.x - rendered_rect.origin.x) / rendered_rect.size.width,
(device_pos.y - rendered_rect.origin.y) / rendered_rect.size.height,
)
}
// Calculate a UV rect within an image based on the screen space
// vertex positions of a picture.
fn calculate_uv_rect_kind(
local_rect: &LayoutRect,
clip_scroll_node: &ClipScrollNode,
rendered_rect: &DeviceIntRect,
device_pixel_scale: DevicePixelScale,
) -> UvRectKind {
let rendered_rect = rendered_rect.to_f32();
let top_left = calculate_screen_uv(
&local_rect.origin,
clip_scroll_node,
&rendered_rect,
device_pixel_scale,
);
let top_right = calculate_screen_uv(
&local_rect.top_right(),
clip_scroll_node,
&rendered_rect,
device_pixel_scale,
);
let bottom_left = calculate_screen_uv(
&local_rect.bottom_left(),
clip_scroll_node,
&rendered_rect,
device_pixel_scale,
);
let bottom_right = calculate_screen_uv(
&local_rect.bottom_right(),
clip_scroll_node,
&rendered_rect,
device_pixel_scale,
);
UvRectKind::Quad {
top_left,
top_right,
bottom_left,
bottom_right,
}
}

View File

@ -131,7 +131,10 @@ impl FontContext {
}
let system_fc = dwrote::FontCollection::system();
let font = system_fc.get_font_from_descriptor(&font_handle).unwrap();
let font = match system_fc.get_font_from_descriptor(&font_handle) {
Some(font) => font,
None => { panic!("missing descriptor {:?}", font_handle) }
};
let face = font.create_font_face();
self.fonts.insert(*font_key, face);
}

View File

@ -276,6 +276,7 @@ pub enum BrushKind {
start_radius: f32,
end_radius: f32,
ratio_xy: f32,
stretch_size: LayoutSize,
},
LinearGradient {
gradient_index: CachedGradientIndex,
@ -285,6 +286,7 @@ pub enum BrushKind {
reverse_stops: bool,
start_point: LayoutPoint,
end_point: LayoutPoint,
stretch_size: LayoutSize,
}
}
@ -1641,6 +1643,7 @@ impl PrimitiveStore {
pic.prepare_for_render(
prim_index,
metadata,
prim_run_context,
pic_state_for_children,
pic_state,
frame_context,
@ -1684,15 +1687,32 @@ impl PrimitiveStore {
PrimitiveKind::Brush => {
let brush = &self.cpu_brushes[metadata.cpu_prim_index.0];
brush.write_gpu_blocks(&mut request);
let repeat = match brush.kind {
BrushKind::Image { stretch_size, .. } |
BrushKind::LinearGradient { stretch_size, .. } |
BrushKind::RadialGradient { stretch_size, .. } => {
[
metadata.local_rect.size.width / stretch_size.width,
metadata.local_rect.size.height / stretch_size.height,
0.0,
0.0,
]
}
_ => {
[1.0, 1.0, 0.0, 0.0]
}
};
match brush.segment_desc {
Some(ref segment_desc) => {
for segment in &segment_desc.segments {
// has to match VECS_PER_SEGMENT
request.write_segment(segment.local_rect);
request.write_segment(segment.local_rect, repeat);
}
}
None => {
request.write_segment(metadata.local_rect);
request.write_segment(metadata.local_rect, repeat);
}
}
}
@ -2462,13 +2482,9 @@ impl<'a> GpuDataRequest<'a> {
fn write_segment(
&mut self,
local_rect: LayoutRect,
extra_params: [f32; 4],
) {
self.push(local_rect);
self.push([
1.0,
1.0,
0.0,
0.0
]);
self.push(extra_params);
}
}

View File

@ -732,6 +732,9 @@ impl RenderBackend {
);
}
},
SceneBuilderResult::FlushComplete(tx) => {
tx.send(()).ok();
}
SceneBuilderResult::Stopped => {
panic!("We haven't sent a Stop yet, how did we get a Stopped back?");
}
@ -754,6 +757,13 @@ impl RenderBackend {
// inflight messages, otherwise the scene builder might panic.
while let Ok(msg) = self.scene_rx.recv() {
match msg {
SceneBuilderResult::FlushComplete(tx) => {
// If somebody's blocked waiting for a flush, how did they
// trigger the RB thread to shut down? This shouldn't happen
// but handle it gracefully anyway.
debug_assert!(false);
tx.send(()).ok();
}
SceneBuilderResult::Stopped => break,
_ => continue,
}
@ -778,6 +788,9 @@ impl RenderBackend {
ApiMsg::WakeSceneBuilder => {
self.scene_tx.send(SceneBuilderRequest::WakeUp).unwrap();
}
ApiMsg::FlushSceneBuilder(tx) => {
self.scene_tx.send(SceneBuilderRequest::Flush(tx)).unwrap();
}
ApiMsg::UpdateResources(updates) => {
self.resource_cache
.update_resources(updates, &mut profile_counters.resources);

View File

@ -14,7 +14,7 @@ use euclid::{TypedPoint2D, TypedVector2D};
use freelist::{FreeList, FreeListHandle, WeakFreeListHandle};
use glyph_rasterizer::GpuGlyphCacheKey;
use gpu_cache::{GpuCache, GpuCacheAddress, GpuCacheHandle};
use gpu_types::{ImageSource, RasterizationSpace};
use gpu_types::{ImageSource, RasterizationSpace, UvRectKind};
use internal_types::{FastHashMap, SavedTargetIndex, SourceTexture};
#[cfg(feature = "pathfinder")]
use pathfinder_partitioner::mesh::Mesh;
@ -196,6 +196,7 @@ pub struct PictureTask {
pub prim_index: PrimitiveIndex,
pub content_origin: DeviceIntPoint,
pub uv_rect_handle: GpuCacheHandle,
uv_rect_kind: UvRectKind,
}
#[derive(Debug)]
@ -205,6 +206,7 @@ pub struct BlurTask {
pub blur_std_deviation: f32,
pub target_kind: RenderTargetKind,
pub uv_rect_handle: GpuCacheHandle,
uv_rect_kind: UvRectKind,
}
impl BlurTask {
@ -306,6 +308,7 @@ impl RenderTask {
prim_index: PrimitiveIndex,
content_origin: DeviceIntPoint,
children: Vec<RenderTaskId>,
uv_rect_kind: UvRectKind,
) -> Self {
RenderTask {
children,
@ -314,6 +317,7 @@ impl RenderTask {
prim_index,
content_origin,
uv_rect_handle: GpuCacheHandle::new(),
uv_rect_kind,
}),
clear_mode: ClearMode::Transparent,
saved_index: None,
@ -488,7 +492,10 @@ impl RenderTask {
) -> Self {
// Adjust large std deviation value.
let mut adjusted_blur_std_deviation = blur_std_deviation;
let blur_target_size = render_tasks[src_task_id].get_dynamic_size();
let (blur_target_size, uv_rect_kind) = {
let src_task = &render_tasks[src_task_id];
(src_task.get_dynamic_size(), src_task.uv_rect_kind())
};
let mut adjusted_blur_target_size = blur_target_size;
let mut downscaling_src_task_id = src_task_id;
let mut scale_factor = 1.0;
@ -515,6 +522,7 @@ impl RenderTask {
blur_std_deviation: adjusted_blur_std_deviation,
target_kind,
uv_rect_handle: GpuCacheHandle::new(),
uv_rect_kind,
}),
clear_mode,
saved_index: None,
@ -529,6 +537,7 @@ impl RenderTask {
blur_std_deviation: adjusted_blur_std_deviation,
target_kind,
uv_rect_handle: GpuCacheHandle::new(),
uv_rect_kind,
}),
clear_mode,
saved_index: None,
@ -575,6 +584,31 @@ impl RenderTask {
}
}
fn uv_rect_kind(&self) -> UvRectKind {
match self.kind {
RenderTaskKind::CacheMask(..) |
RenderTaskKind::Glyph(_) |
RenderTaskKind::Readback(..) |
RenderTaskKind::Scaling(..) => {
unreachable!("bug: unexpected render task");
}
RenderTaskKind::Picture(ref task) => {
task.uv_rect_kind
}
RenderTaskKind::VerticalBlur(ref task) |
RenderTaskKind::HorizontalBlur(ref task) => {
task.uv_rect_kind
}
RenderTaskKind::ClipRegion(..) |
RenderTaskKind::Blit(..) => {
UvRectKind::Rect
}
}
}
// Write (up to) 8 floats of data specific to the type
// of render task that is provided to the GPU shaders
// via a vertex texture.
@ -778,13 +812,13 @@ impl RenderTask {
) {
let (target_rect, target_index) = self.get_target_rect();
let cache_handle = match self.kind {
let (cache_handle, uv_rect_kind) = match self.kind {
RenderTaskKind::HorizontalBlur(ref mut info) |
RenderTaskKind::VerticalBlur(ref mut info) => {
&mut info.uv_rect_handle
(&mut info.uv_rect_handle, info.uv_rect_kind)
}
RenderTaskKind::Picture(ref mut info) => {
&mut info.uv_rect_handle
(&mut info.uv_rect_handle, info.uv_rect_kind)
}
RenderTaskKind::Readback(..) |
RenderTaskKind::Scaling(..) |
@ -797,11 +831,15 @@ impl RenderTask {
};
if let Some(mut request) = gpu_cache.request(cache_handle) {
let p0 = target_rect.origin.to_f32();
let p1 = target_rect.bottom_right().to_f32();
let image_source = ImageSource {
p0: target_rect.origin.to_f32(),
p1: target_rect.bottom_right().to_f32(),
p0,
p1,
texture_layer: target_index.0 as f32,
user_data: [0.0; 3],
uv_rect_kind,
};
image_source.write_gpu_blocks(&mut request);
}
@ -1012,6 +1050,7 @@ impl RenderTaskCache {
None,
gpu_cache,
None,
render_task.uv_rect_kind(),
);
// Get the allocation details in the texture cache, and store

View File

@ -24,6 +24,7 @@ use glyph_cache::GlyphCache;
use glyph_cache::GlyphCacheEntry;
use glyph_rasterizer::{FontInstance, GlyphFormat, GlyphRasterizer, GlyphRequest};
use gpu_cache::{GpuCache, GpuCacheAddress, GpuCacheHandle};
use gpu_types::UvRectKind;
use internal_types::{FastHashMap, FastHashSet, SourceTexture, TextureUpdateList};
use profiler::{ResourceProfileCounters, TextureCacheProfileCounters};
use render_backend::FrameId;
@ -1057,6 +1058,7 @@ impl ResourceCache {
dirty_rect,
gpu_cache,
None,
UvRectKind::Rect,
);
image_template.dirty_rect = None;
}

View File

@ -25,6 +25,7 @@ pub enum SceneBuilderRequest {
current_epochs: FastHashMap<PipelineId, Epoch>,
},
WakeUp,
Flush(MsgSender<()>),
Stop
}
@ -38,6 +39,7 @@ pub enum SceneBuilderResult {
render: bool,
result_tx: Sender<SceneSwapResult>,
},
FlushComplete(MsgSender<()>),
Stopped,
}
@ -125,6 +127,10 @@ impl SceneBuilder {
fn process_message(&mut self, msg: SceneBuilderRequest) -> bool {
match msg {
SceneBuilderRequest::WakeUp => {}
SceneBuilderRequest::Flush(tx) => {
self.tx.send(SceneBuilderResult::FlushComplete(tx)).unwrap();
let _ = self.api_tx.send(ApiMsg::WakeUp);
}
SceneBuilderRequest::Transaction {
document_id,
scene,

View File

@ -8,7 +8,7 @@ use api::ImageDescriptor;
use device::TextureFilter;
use freelist::{FreeList, FreeListHandle, UpsertResult, WeakFreeListHandle};
use gpu_cache::{GpuCache, GpuCacheHandle};
use gpu_types::ImageSource;
use gpu_types::{ImageSource, UvRectKind};
use internal_types::{CacheTextureId, FastHashMap, TextureUpdateList, TextureUpdateSource};
use internal_types::{RenderTargetInfo, SourceTexture, TextureUpdate, TextureUpdateOp};
use profiler::{ResourceProfileCounter, TextureCacheProfileCounters};
@ -110,6 +110,8 @@ struct CacheEntry {
texture_id: CacheTextureId,
// Optional notice when the entry is evicted from the cache.
eviction_notice: Option<EvictionNotice>,
// The type of UV rect this entry specifies.
uv_rect_kind: UvRectKind,
}
impl CacheEntry {
@ -121,6 +123,7 @@ impl CacheEntry {
filter: TextureFilter,
user_data: [f32; 3],
last_access: FrameId,
uv_rect_kind: UvRectKind,
) -> Self {
CacheEntry {
size,
@ -132,6 +135,7 @@ impl CacheEntry {
filter,
uv_rect_handle: GpuCacheHandle::new(),
eviction_notice: None,
uv_rect_kind,
}
}
@ -154,6 +158,7 @@ impl CacheEntry {
p1: (origin + self.size).to_f32(),
texture_layer: layer_index,
user_data: self.user_data,
uv_rect_kind: self.uv_rect_kind,
};
image_source.write_gpu_blocks(&mut request);
}
@ -394,6 +399,7 @@ impl TextureCache {
mut dirty_rect: Option<DeviceUintRect>,
gpu_cache: &mut GpuCache,
eviction_notice: Option<&EvictionNotice>,
uv_rect_kind: UvRectKind,
) {
// Determine if we need to allocate texture cache memory
// for this item. We need to reallocate if any of the following
@ -422,7 +428,13 @@ impl TextureCache {
};
if realloc {
self.allocate(handle, descriptor, filter, user_data);
self.allocate(
handle,
descriptor,
filter,
user_data,
uv_rect_kind,
);
// If we reallocated, we need to upload the whole item again.
dirty_rect = None;
@ -639,7 +651,7 @@ impl TextureCache {
// - We have freed an item that will definitely allow us to
// fit the currently requested allocation.
let needed_slab_size =
SlabSize::new(required_alloc.width, required_alloc.height).get_size();
SlabSize::new(required_alloc.width, required_alloc.height);
let mut found_matching_slab = false;
let mut freed_complete_page = false;
let mut evicted_items = 0;
@ -698,6 +710,7 @@ impl TextureCache {
descriptor: &ImageDescriptor,
filter: TextureFilter,
user_data: [f32; 3],
uv_rect_kind: UvRectKind,
) -> Option<CacheEntry> {
// Work out which cache it goes in, based on format.
let texture_array = match (descriptor.format, filter) {
@ -745,6 +758,7 @@ impl TextureCache {
descriptor.height,
user_data,
self.frame_id,
uv_rect_kind,
)
}
@ -765,11 +779,12 @@ impl TextureCache {
allowed_in_shared_cache = false;
}
// Anything larger than 512 goes in a standalone texture.
// Anything larger than TEXTURE_REGION_DIMENSIONS goes in a standalone texture.
// TODO(gw): If we find pages that suffer from batch breaks in this
// case, add support for storing these in a standalone
// texture array.
if descriptor.width > 512 || descriptor.height > 512 {
if descriptor.width > TEXTURE_REGION_DIMENSIONS ||
descriptor.height > TEXTURE_REGION_DIMENSIONS {
allowed_in_shared_cache = false;
}
@ -785,6 +800,7 @@ impl TextureCache {
descriptor: ImageDescriptor,
filter: TextureFilter,
user_data: [f32; 3],
uv_rect_kind: UvRectKind,
) {
assert!(descriptor.width > 0 && descriptor.height > 0);
@ -803,7 +819,8 @@ impl TextureCache {
new_cache_entry = self.allocate_from_shared_cache(
&descriptor,
filter,
user_data
user_data,
uv_rect_kind,
);
// If we failed to allocate in the shared cache, run an
@ -814,7 +831,8 @@ impl TextureCache {
new_cache_entry = self.allocate_from_shared_cache(
&descriptor,
filter,
user_data
user_data,
uv_rect_kind,
);
}
}
@ -847,6 +865,7 @@ impl TextureCache {
filter,
user_data,
frame_id,
uv_rect_kind,
));
allocated_in_shared_cache = false;
@ -900,43 +919,48 @@ impl TextureCache {
}
}
// A list of the block sizes that a region can be initialized with.
#[cfg_attr(feature = "capture", derive(Serialize))]
#[cfg_attr(feature = "replay", derive(Deserialize))]
#[derive(Copy, Clone, PartialEq)]
enum SlabSize {
Size16x16,
Size32x32,
Size64x64,
Size128x128,
Size256x256,
Size512x512,
struct SlabSize {
width: u32,
height: u32,
}
impl SlabSize {
fn new(width: u32, height: u32) -> SlabSize {
// TODO(gw): Consider supporting non-square
// allocator sizes here.
let max_dim = cmp::max(width, height);
let x_size = quantize_dimension(width);
let y_size = quantize_dimension(height);
match max_dim {
0 => unreachable!(),
1...16 => SlabSize::Size16x16,
17...32 => SlabSize::Size32x32,
33...64 => SlabSize::Size64x64,
65...128 => SlabSize::Size128x128,
129...256 => SlabSize::Size256x256,
257...512 => SlabSize::Size512x512,
_ => panic!("Invalid dimensions for cache!"),
assert!(x_size > 0 && x_size <= TEXTURE_REGION_DIMENSIONS);
assert!(y_size > 0 && y_size <= TEXTURE_REGION_DIMENSIONS);
let (width, height) = match (x_size, y_size) {
// Special cased rectangular slab pages.
(512, 256) => (512, 256),
(512, 128) => (512, 128),
(512, 64) => (512, 64),
(256, 512) => (256, 512),
(128, 512) => (128, 512),
( 64, 512) => ( 64, 512),
// If none of those fit, use a square slab size.
(x_size, y_size) => {
let square_size = cmp::max(x_size, y_size);
(square_size, square_size)
}
};
SlabSize {
width,
height,
}
}
fn get_size(&self) -> u32 {
match *self {
SlabSize::Size16x16 => 16,
SlabSize::Size32x32 => 32,
SlabSize::Size64x64 => 64,
SlabSize::Size128x128 => 128,
SlabSize::Size256x256 => 256,
SlabSize::Size512x512 => 512,
fn invalid() -> SlabSize {
SlabSize {
width: 0,
height: 0,
}
}
}
@ -960,9 +984,8 @@ impl TextureLocation {
struct TextureRegion {
layer_index: i32,
region_size: u32,
slab_size: u32,
slab_size: SlabSize,
free_slots: Vec<TextureLocation>,
slots_per_axis: u32,
total_slot_count: usize,
origin: DeviceUintPoint,
}
@ -972,9 +995,8 @@ impl TextureRegion {
TextureRegion {
layer_index,
region_size,
slab_size: 0,
slab_size: SlabSize::invalid(),
free_slots: Vec::new(),
slots_per_axis: 0,
total_slot_count: 0,
origin,
}
@ -982,15 +1004,16 @@ impl TextureRegion {
// Initialize a region to be an allocator for a specific slab size.
fn init(&mut self, slab_size: SlabSize) {
debug_assert!(self.slab_size == 0);
debug_assert!(self.slab_size == SlabSize::invalid());
debug_assert!(self.free_slots.is_empty());
self.slab_size = slab_size.get_size();
self.slots_per_axis = self.region_size / self.slab_size;
self.slab_size = slab_size;
let slots_per_x_axis = self.region_size / self.slab_size.width;
let slots_per_y_axis = self.region_size / self.slab_size.height;
// Add each block to a freelist.
for y in 0 .. self.slots_per_axis {
for x in 0 .. self.slots_per_axis {
for y in 0 .. slots_per_y_axis {
for x in 0 .. slots_per_x_axis {
self.free_slots.push(TextureLocation::new(x, y));
}
}
@ -1001,30 +1024,31 @@ impl TextureRegion {
// Deinit a region, allowing it to become a region with
// a different allocator size.
fn deinit(&mut self) {
self.slab_size = 0;
self.slab_size = SlabSize::invalid();
self.free_slots.clear();
self.slots_per_axis = 0;
self.total_slot_count = 0;
}
fn is_empty(&self) -> bool {
self.slab_size == 0
self.slab_size == SlabSize::invalid()
}
// Attempt to allocate a fixed size block from this region.
fn alloc(&mut self) -> Option<DeviceUintPoint> {
debug_assert!(self.slab_size != SlabSize::invalid());
self.free_slots.pop().map(|location| {
DeviceUintPoint::new(
self.origin.x + self.slab_size * location.0 as u32,
self.origin.y + self.slab_size * location.1 as u32,
self.origin.x + self.slab_size.width * location.0 as u32,
self.origin.y + self.slab_size.height * location.1 as u32,
)
})
}
// Free a block in this region.
fn free(&mut self, point: DeviceUintPoint) {
let x = (point.x - self.origin.x) / self.slab_size;
let y = (point.y - self.origin.y) / self.slab_size;
let x = (point.x - self.origin.x) / self.slab_size.width;
let y = (point.y - self.origin.y) / self.slab_size.height;
self.free_slots.push(TextureLocation::new(x, y));
// If this region is completely unused, deinit it
@ -1089,6 +1113,7 @@ impl TextureArray {
height: u32,
user_data: [f32; 3],
frame_id: FrameId,
uv_rect_kind: UvRectKind,
) -> Option<CacheEntry> {
// Lazily allocate the regions if not already created.
// This means that very rarely used image formats can be
@ -1118,7 +1143,6 @@ impl TextureArray {
// Quantize the size of the allocation to select a region to
// allocate from.
let slab_size = SlabSize::new(width, height);
let slab_size_dim = slab_size.get_size();
// TODO(gw): For simplicity, the initial implementation just
// has a single vec<> of regions. We could easily
@ -1134,9 +1158,9 @@ impl TextureArray {
// Run through the existing regions of this size, and see if
// we can find a free block in any of them.
for (i, region) in self.regions.iter_mut().enumerate() {
if region.slab_size == 0 {
if region.is_empty() {
empty_region_index = Some(i);
} else if region.slab_size == slab_size_dim {
} else if region.slab_size == slab_size {
if let Some(location) = region.alloc() {
entry_kind = Some(EntryKind::Cache {
layer_index: region.layer_index as u16,
@ -1174,6 +1198,7 @@ impl TextureArray {
filter: self.filter,
texture_id: self.texture_id.unwrap(),
eviction_notice: None,
uv_rect_kind,
}
})
}
@ -1244,3 +1269,16 @@ impl TextureUpdate {
}
}
}
fn quantize_dimension(size: u32) -> u32 {
match size {
0 => unreachable!(),
1...16 => 16,
17...32 => 32,
33...64 => 64,
65...128 => 128,
129...256 => 256,
257...512 => 512,
_ => panic!("Invalid dimensions for cache!"),
}
}

View File

@ -633,6 +633,7 @@ pub enum ApiMsg {
/// through another channel.
WakeUp,
WakeSceneBuilder,
FlushSceneBuilder(MsgSender<()>),
ShutDown,
}
@ -653,6 +654,7 @@ impl fmt::Debug for ApiMsg {
ApiMsg::ShutDown => "ApiMsg::ShutDown",
ApiMsg::WakeUp => "ApiMsg::WakeUp",
ApiMsg::WakeSceneBuilder => "ApiMsg::WakeSceneBuilder",
ApiMsg::FlushSceneBuilder(..) => "ApiMsg::FlushSceneBuilder",
})
}
}
@ -965,6 +967,15 @@ impl RenderApi {
self.send_message(ApiMsg::WakeSceneBuilder);
}
/// Block until a round-trip to the scene builder thread has completed. This
/// ensures that any transactions (including ones deferred to the scene
/// builder thread) have been processed.
pub fn flush_scene_builder(&self) {
let (tx, rx) = channel::msg_channel().unwrap();
self.send_message(ApiMsg::FlushSceneBuilder(tx));
rx.recv().unwrap(); // block until done
}
/// Save a capture of the current frame state for debugging.
pub fn save_capture(&self, path: PathBuf, bits: CaptureBits) {
let msg = ApiMsg::DebugCommand(DebugCommand::SaveCapture(path, bits));

View File

@ -247,7 +247,7 @@ NotifyDidRender(layers::CompositorBridgeParentBase* aBridge,
TimeStamp aEnd)
{
for (uintptr_t i = 0; i < aInfo.epochs.length; i++) {
aBridge->NotifyDidCompositeToPipeline(
aBridge->NotifyPipelineRendered(
aInfo.epochs.data[i].pipeline_id,
aInfo.epochs.data[i].epoch,
aStart,

View File

@ -1 +1 @@
751236199b39bb8dac78522713133ca18c603fb3
4b65822a2f7e1fed246a492f9fe193ede2f37d74

View File

@ -1,3 +1,3 @@
== scaled-color-stop-position.html scaled-color-stop-position-ref.html
== color-stop-clamp-interpolation.html color-stop-clamp-interpolation-ref.html
fuzzy-if(webrender&&winWidget,2-2,72-72) == linear-gradient-repeated.html linear-gradient-repeated-ref.html
== linear-gradient-repeated.html linear-gradient-repeated-ref.html

View File

@ -172,7 +172,7 @@ fuzzy(16,69) fuzzy-if(skiaContent,95,2206) == attachment-local-clipping-image-5.
fuzzy(80,500) fuzzy-if(skiaContent,109,908) == attachment-local-clipping-image-6.html attachment-local-clipping-image-6-ref.html
fuzzy-if(skiaContent,1,8) fuzzy-if(webrender,1,84) == background-multiple-with-border-radius.html background-multiple-with-border-radius-ref.html
== background-repeat-large-area.html background-repeat-large-area-ref.html
fuzzy-if(webrender&&winWidget,73-73,49600-49600) == background-repeat-large-area.html background-repeat-large-area-ref.html
fuzzy(30,474) fuzzy-if(skiaContent,31,474) == background-tiling-zoom-1.html background-tiling-zoom-1-ref.html

View File

@ -305,7 +305,7 @@ fuzzy-if(Android,3,50) fuzzy-if(skiaContent,1,133) == 273681-1.html 273681-1-ref
== 283686-2.html 283686-2-ref.html
== 283686-3.html about:blank
== 289384-1.xhtml 289384-ref.xhtml
random-if(d2d) fuzzy-if(Android,8,1439) HTTP == 289480.html#top 289480-ref.html # basically-verbatim acid2 test, HTTP for a 404 page -- bug 578114 for the d2d failures
fails-if(webrender) random-if(d2d) fuzzy-if(Android,8,1439) HTTP == 289480.html#top 289480-ref.html # basically-verbatim acid2 test, HTTP for a 404 page -- bug 578114 for the d2d failures
== 290129-1.html 290129-1-ref.html
== 291078-1.html 291078-1-ref.html
== 291078-2.html 291078-2-ref.html
@ -1391,7 +1391,7 @@ fuzzy-if(skiaContent,1,17000) == 498228-1.xul 498228-1-ref.xul
== 501257-1a.html 501257-1-ref.html
== 501257-1b.html 501257-1-ref.html
== 501257-1.xhtml 501257-1-ref.xhtml
== 501627-1.html 501627-1-ref.html
fuzzy-if(webrender&&winWidget,5-5,83252-83252) == 501627-1.html 501627-1-ref.html
== 502288-1.html 502288-1-ref.html
fuzzy-if(gtkWidget,1,2) == 502447-1.html 502447-1-ref.html #Bug 1315834
== 502795-1.html 502795-1-ref.html
@ -1980,7 +1980,7 @@ random-if(!winWidget) == 1273154-2.html 1273154-2-ref.html # depends on Windows
!= 1276161-1a.html 1276161-1b.html
== 1275411-1.html 1275411-1-ref.html
== 1288255.html 1288255-ref.html
fuzzy(8,1900) == 1291528.html 1291528-ref.html
fuzzy(8,1900) fails-if(webrender) == 1291528.html 1291528-ref.html
# Buttons in 2 pages have different position and the rendering result can be
# different, but they should use the same button style and the background color
# should be same. |fuzzy()| here allows the difference in border, but not

View File

@ -46,8 +46,8 @@ fuzzy-if(Android,8,771) == radial-shape-farthest-corner-1a.html radial-shape-far
fails-if(gtkWidget&&/x86_64-/.test(xulRuntime.XPCOMABI)) fuzzy(1,1622) fuzzy-if(cocoaWidget,2,41281) fuzzy-if(Android,8,1091) fuzzy-if(skiaContent,2,500) == radial-shape-farthest-corner-1b.html radial-shape-farthest-corner-1-ref.html
fuzzy-if(Android,17,13320) == radial-shape-farthest-side-1a.html radial-shape-farthest-side-1-ref.html
fuzzy-if(Android,17,13320) == radial-shape-farthest-side-1b.html radial-shape-farthest-side-1-ref.html
== radial-size-1a.html radial-size-1-ref.html
== radial-size-1b.html radial-size-1-ref.html
fuzzy-if(webrender,1-2,4-9) == radial-size-1a.html radial-size-1-ref.html
fuzzy-if(webrender,1-2,4-9) == radial-size-1b.html radial-size-1-ref.html
fuzzy-if(Android,4,248) == radial-zero-length-1a.html radial-zero-length-1-ref.html
fuzzy-if(Android,4,248) == radial-zero-length-1b.html radial-zero-length-1-ref.html
fuzzy-if(Android,4,248) == radial-zero-length-1c.html radial-zero-length-1-ref.html

View File

@ -289,6 +289,10 @@ class CodeCoverageMixin(SingleTestMixin):
return
if self.per_test_coverage:
if not self.per_test_reports:
self.info("No tests were found...not saving coverage data.")
return
dest = os.path.join(dirs['abs_blob_upload_dir'], 'per-test-coverage-reports.zip')
with zipfile.ZipFile(dest, 'w', zipfile.ZIP_DEFLATED) as z:
for suite, data in self.per_test_reports.items():

View File

@ -24,7 +24,7 @@ class PreferenceRollbackAction extends BaseAction {
const rollout = await PreferenceRollouts.get(rolloutSlug);
if (!rollout) {
TelemetryEvents.sendEvent("unenrollFailure", "preference_rollout", rolloutSlug, {"reason": "rollout missing"});
TelemetryEvents.sendEvent("unenrollFailed", "preference_rollout", rolloutSlug, {"reason": "rollout missing"});
this.log.info(`Cannot rollback ${rolloutSlug}: no rollout found with that slug`);
return;
}
@ -46,7 +46,7 @@ class PreferenceRollbackAction extends BaseAction {
}
case PreferenceRollouts.STATE_GRADUATED: {
// graduated rollouts can't be rolled back
TelemetryEvents.sendEvent("unenrollFailure", "preference_rollout", rolloutSlug, {"reason": "graduated"});
TelemetryEvents.sendEvent("unenrollFailed", "preference_rollout", rolloutSlug, {"reason": "graduated"});
throw new Error(`Cannot rollback already graduated rollout ${rolloutSlug}`);
}
default: {

View File

@ -113,7 +113,7 @@ class PreferenceRolloutAction extends BaseAction {
"enrollFailed",
"preference_rollout",
slug,
{reason: "invalid type", pref: prefSpec.preferenceName},
{reason: "invalid type", preference: prefSpec.preferenceName},
);
throw new Error(
`Cannot start rollout "${slug}" on "${prefSpec.preferenceName}". ` +

View File

@ -11,50 +11,52 @@ var EXPORTED_SYMBOLS = ["TelemetryEvents"];
const TELEMETRY_CATEGORY = "normandy";
const TelemetryEvents = {
eventSchema: {
enroll: {
methods: ["enroll"],
objects: ["preference_study", "addon_study", "preference_rollout"],
extra_keys: ["experimentType", "branch", "addonId", "addonVersion"],
record_on_release: true,
},
enroll_failed: {
methods: ["enrollFailed"],
objects: ["addon_study", "preference_rollout"],
extra_keys: ["reason", "preference"],
record_on_release: true,
},
update: {
methods: ["update"],
objects: ["preference_rollout"],
extra_keys: ["previousState"],
record_on_release: true,
},
unenroll: {
methods: ["unenroll"],
objects: ["preference_study", "addon_study", "preference_rollout"],
extra_keys: ["reason", "didResetValue", "addonId", "addonVersion"],
record_on_release: true,
},
unenroll_failed: {
methods: ["unenrollFailed"],
objects: ["preference_rollout"],
extra_keys: ["reason"],
record_on_release: true,
},
graduate: {
methods: ["graduate"],
objects: ["preference_rollout"],
extra_keys: [],
record_on_release: true,
},
},
init() {
Services.telemetry.registerEvents(TELEMETRY_CATEGORY, {
enroll: {
methods: ["enroll"],
objects: ["preference_study", "addon_study", "preference_rollout"],
extra_keys: ["experimentType", "branch", "addonId", "addonVersion"],
record_on_release: true,
},
enroll_failure: {
methods: ["enrollFailed"],
objects: ["addon_study", "preference_rollout"],
extra_keys: ["reason", "preference"],
record_on_release: true,
},
update: {
methods: ["update"],
objects: ["preference_rollout"],
extra_keys: ["previousState"],
record_on_release: true,
},
unenroll: {
methods: ["unenroll"],
objects: ["preference_study", "addon_study", "preference_addon"],
extra_keys: ["reason", "didResetValue", "addonId", "addonVersion"],
record_on_release: true,
},
unenroll_failure: {
methods: ["unenrollFailed"],
objects: ["preference_rollout"],
extra_keys: ["reason"],
record_on_release: true,
},
graduate: {
methods: ["graduate"],
objects: ["preference_rollout"],
extra_keys: [],
record_on_release: true,
},
});
Services.telemetry.registerEvents(TELEMETRY_CATEGORY, this.eventSchema);
},
sendEvent(method, object, value, extra) {

View File

@ -142,7 +142,7 @@ decorate_task(
decorate_task(
withStub(Addons, "applyInstall"),
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
withWebExtension(),
async function testStartAddonCleanup(applyInstallStub, sendEventStub, [addonId, addonFile]) {
const fakeError = new Error("Fake failure");
@ -188,7 +188,7 @@ decorate_task(
decorate_task(
withWebExtension({version: "2.0"}),
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
AddonStudies.withStudies(),
async function testStart([addonId, addonFile], sendEventStub) {
const startupPromise = AddonTestUtils.promiseWebExtensionStartup(addonId);
@ -265,7 +265,7 @@ decorate_task(
studyFactory({active: true, addonId: testStopId, studyEndDate: null}),
]),
withInstalledWebExtension({id: testStopId}),
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
async function testStop([study], [addonId, addonFile], sendEventStub) {
await AddonStudies.stop(study.recipeId, "test-reason");
const newStudy = await AddonStudies.get(study.recipeId);
@ -311,7 +311,7 @@ decorate_task(
studyFactory({active: true, addonId: "installed@example.com"}),
studyFactory({active: false, addonId: "already.gone@example.com", studyEndDate: new Date(2012, 1)}),
]),
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
withInstalledWebExtension({id: "installed@example.com"}),
async function testInit([activeUninstalledStudy, activeInstalledStudy, inactiveStudy], sendEventStub) {
await AddonStudies.init();
@ -372,7 +372,7 @@ decorate_task(
// stop should pass "unknown" to TelemetryEvents for `reason` if none specified
decorate_task(
AddonStudies.withStudies([studyFactory({ active: true })]),
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
async function testStopUnknownReason([study], sendEventStub) {
await AddonStudies.stop(study.recipeId);
is(

View File

@ -102,7 +102,7 @@ decorate_task(
withMockExperiments,
withMockPreferences,
withStub(PreferenceExperiments, "startObserver"),
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
async function testStart(experiments, mockPreferences, startObserverStub, sendEventStub) {
mockPreferences.set("fake.preference", "oldvalue", "default");
mockPreferences.set("fake.preference", "uservalue", "user");
@ -409,7 +409,7 @@ decorate_task(
withMockExperiments,
withMockPreferences,
withSpy(PreferenceExperiments, "stopObserver"),
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
async function testStop(experiments, mockPreferences, stopObserverSpy, sendEventStub) {
// this assertion is mostly useful for --verify test runs, to make
// sure that tests clean up correctly.
@ -520,7 +520,7 @@ decorate_task(
withMockExperiments,
withMockPreferences,
withStub(PreferenceExperiments, "stopObserver"),
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
async function testStopReset(experiments, mockPreferences, stopObserverStub, sendEventStub) {
mockPreferences.set("fake.preference", "customvalue", "default");
experiments.test = experimentFactory({
@ -701,7 +701,7 @@ decorate_task(
withMockExperiments,
withStub(TelemetryEnvironment, "setExperimentActive"),
withStub(TelemetryEnvironment, "setExperimentInactive"),
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
async function testStartAndStopTelemetry(experiments, setActiveStub, setInactiveStub, sendEventStub) {
await PreferenceExperiments.start({
name: "test",
@ -745,7 +745,7 @@ decorate_task(
withMockExperiments,
withStub(TelemetryEnvironment, "setExperimentActive"),
withStub(TelemetryEnvironment, "setExperimentInactive"),
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
async function testInitTelemetryExperimentType(experiments, setActiveStub, setInactiveStub, sendEventStub) {
await PreferenceExperiments.start({
name: "test",
@ -1002,7 +1002,7 @@ decorate_task(
withMockExperiments,
withMockPreferences,
withStub(PreferenceExperiments, "stopObserver"),
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
async function testStopUnknownReason(experiments, mockPreferences, stopObserverStub, sendEventStub) {
mockPreferences.set("fake.preference", "default value", "default");
experiments.test = experimentFactory({ name: "test", preferenceName: "fake.preference" });
@ -1020,7 +1020,7 @@ decorate_task(
withMockExperiments,
withMockPreferences,
withStub(PreferenceExperiments, "stopObserver"),
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
async function testStopResetValue(experiments, mockPreferences, stopObserverStub, sendEventStub) {
mockPreferences.set("fake.preference1", "default value", "default");
experiments.test1 = experimentFactory({ name: "test1", preferenceName: "fake.preference1" });
@ -1048,7 +1048,7 @@ decorate_task(
// the user changed preferences during a browser run.
decorate_task(
withMockPreferences,
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
withMockExperiments,
async function testPrefChangeEventTelemetry(mockPreferences, sendEventStub, mockExperiments) {
is(Preferences.get("fake.preference"), null, "preference should start unset");

View File

@ -145,7 +145,7 @@ decorate_task(
// recordOriginalValue should graduate a study when it is no longer relevant.
decorate_task(
PreferenceRollouts.withTestMock,
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
async function testRecordOriginalValuesUpdatesPreviousValues(sendEventStub) {
await PreferenceRollouts.add({
slug: "test-rollout",

View File

@ -12,7 +12,7 @@ ChromeUtils.import("resource://normandy/lib/TelemetryEvents.jsm", this);
decorate_task(
PreferenceRollouts.withTestMock,
withStub(TelemetryEnvironment, "setExperimentInactive"),
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
async function simple_rollback(setExperimentInactiveStub, sendEventStub) {
Services.prefs.getDefaultBranch("").setIntPref("test.pref1", 2);
Services.prefs.getDefaultBranch("").setCharPref("test.pref2", "rollout value");
@ -77,7 +77,7 @@ decorate_task(
// Test that a graduated rollout can't be rolled back
decorate_task(
PreferenceRollouts.withTestMock,
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
async function cant_rollback_graduated(sendEventStub) {
Services.prefs.getDefaultBranch("").setIntPref("test.pref", 1);
await PreferenceRollouts.add({
@ -108,7 +108,7 @@ decorate_task(
Assert.deepEqual(
sendEventStub.args,
[["unenrollFailure", "preference_rollout", "graduated-rollout", {reason: "graduated"}]],
[["unenrollFailed", "preference_rollout", "graduated-rollout", {reason: "graduated"}]],
"correct event was sent"
);
@ -120,7 +120,7 @@ decorate_task(
// Test that a rollback without a matching rollout
decorate_task(
PreferenceRollouts.withTestMock,
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
withStub(Uptake, "reportRecipe"),
async function rollback_without_rollout(sendEventStub, reportRecipeStub) {
let recipe = {id: 1, arguments: {rolloutSlug: "missing-rollout"}};
@ -131,7 +131,7 @@ decorate_task(
Assert.deepEqual(
sendEventStub.args,
[["unenrollFailure", "preference_rollout", "missing-rollout", {reason: "rollout missing"}]],
[["unenrollFailed", "preference_rollout", "missing-rollout", {reason: "rollout missing"}]],
"an unenrollFailure event should be sent",
);
// This is too common a case for an error, so it should be reported as success
@ -147,7 +147,7 @@ decorate_task(
decorate_task(
PreferenceRollouts.withTestMock,
withStub(TelemetryEnvironment, "setExperimentInactive"),
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
async function rollback_already_rolled_back(setExperimentInactiveStub, sendEventStub) {
Services.prefs.getDefaultBranch("").setIntPref("test.pref", 1);

View File

@ -11,7 +11,7 @@ ChromeUtils.import("resource://normandy/lib/TelemetryEvents.jsm", this);
decorate_task(
PreferenceRollouts.withTestMock,
withStub(TelemetryEnvironment, "setExperimentActive"),
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
async function simple_recipe_enrollment(setExperimentActiveStub, sendEventStub) {
const recipe = {
id: 1,
@ -75,7 +75,7 @@ decorate_task(
// Test that an enrollment's values can change, be removed, and be added
decorate_task(
PreferenceRollouts.withTestMock,
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
async function update_enrollment(sendEventStub) {
// first enrollment
const recipe = {
@ -164,7 +164,7 @@ decorate_task(
// Test that a graduated rollout can be ungraduated
decorate_task(
PreferenceRollouts.withTestMock,
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
async function ungraduate_enrollment(sendEventStub) {
Services.prefs.getDefaultBranch("").setIntPref("test.pref", 1);
await PreferenceRollouts.add({
@ -215,7 +215,7 @@ decorate_task(
// Test when recipes conflict, only one is applied
decorate_task(
PreferenceRollouts.withTestMock,
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
async function conflicting_recipes(sendEventStub) {
// create two recipes that each share a pref and have a unique pref.
const recipe1 = {
@ -291,7 +291,7 @@ decorate_task(
// Test when the wrong value type is given, the recipe is not applied
decorate_task(
PreferenceRollouts.withTestMock,
withStub(TelemetryEvents, "sendEvent"),
withSendEventStub,
async function wrong_preference_value(sendEventStub) {
Services.prefs.getDefaultBranch("").setCharPref("test.pref", "not an int");
const recipe = {
@ -312,7 +312,7 @@ decorate_task(
Assert.deepEqual(await PreferenceRollouts.getAll(), [], "no rollout is stored in the db");
Assert.deepEqual(
sendEventStub.args,
[["enrollFailed", "preference_rollout", recipe.arguments.slug, {reason: "invalid type", pref: "test.pref"}]],
[["enrollFailed", "preference_rollout", recipe.arguments.slug, {reason: "invalid type", preference: "test.pref"}]],
"an enrollment failed event should be sent",
);

View File

@ -318,3 +318,39 @@ this.studyEndObserved = function(recipeId) {
(subject, endedRecipeId) => Number.parseInt(endedRecipeId) === recipeId,
);
};
this.withSendEventStub = function(testFunction) {
return async function wrappedTestFunction(...args) {
/* Checks that calls match the event schema. */
function checkEventMatchesSchema(method, object, value, extra) {
let match = true;
const spec = Array.from(Object.values(TelemetryEvents.eventSchema))
.filter(spec => spec.methods.includes(method))[0];
if (spec) {
if (!spec.objects.includes(object)) {
match = false;
}
for (const key of Object.keys(extra)) {
if (!spec.extra_keys.includes(key)) {
match = false;
}
}
} else {
match = false;
}
ok(match, `sendEvent(${method}, ${object}, ${value}, ${JSON.stringify(extra)}) should match spec`);
}
const stub = sinon.stub(TelemetryEvents, "sendEvent");
stub.callsFake(checkEventMatchesSchema);
try {
await testFunction(...args, stub);
} finally {
stub.restore();
}
};
};

View File

@ -227,7 +227,10 @@ function promiseFindFinished(searchText, highlightOn) {
};
resultListener = {
onFindResult: foundOrTimedout
onFindResult: foundOrTimedout,
onCurrentSelection() {},
onMatchesCountResult() {},
onHighlightFinished() {},
};
findbar.browser.finder.addResultListener(resultListener);
findbar._find();

View File

@ -91,8 +91,9 @@ RemoteFinder.prototype = {
} catch (e) {
if (!l[callback]) {
Cu.reportError(`Missing ${callback} callback on RemoteFinderListener`);
} else {
Cu.reportError(e);
}
Cu.reportError(e);
}
}
},