mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-11-27 23:02:20 +00:00
Merge autoland to mozilla-central. a=merge
This commit is contained in:
commit
44d1a86f71
@ -46,6 +46,28 @@
|
||||
<key>CFBundleTypeRole</key>
|
||||
<string>Viewer</string>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>CFBundleTypeExtensions</key>
|
||||
<array>
|
||||
<string>pdf</string>
|
||||
</array>
|
||||
<key>CFBundleTypeIconFile</key>
|
||||
<string>document.icns</string>
|
||||
<key>CFBundleTypeMIMETypes</key>
|
||||
<array>
|
||||
<string>application/pdf</string>
|
||||
</array>
|
||||
<key>CFBundleTypeName</key>
|
||||
<string>PDF document</string>
|
||||
<key>CFBundleTypeOSTypes</key>
|
||||
<array>
|
||||
<string>TEXT</string>
|
||||
</array>
|
||||
<key>CFBundleTypeRole</key>
|
||||
<string>Viewer</string>
|
||||
<key>LSHandlerRank</key>
|
||||
<string>Alternate</string>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>CFBundleTypeExtensions</key>
|
||||
<array>
|
||||
|
@ -1683,7 +1683,7 @@ toolbar[keyNav=true]:not([collapsed=true], [customizing=true]) toolbartabstop {
|
||||
}
|
||||
|
||||
/* Hide tab-modal dialogs when a window-modal one is up. */
|
||||
:root[window-modal-open] .browserContainer > .dialogStack {
|
||||
:root[window-modal-open] .browserStack > .dialogStack {
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
|
@ -9116,7 +9116,7 @@ const SafeBrowsingNotificationBox = {
|
||||
*/
|
||||
class TabDialogBox {
|
||||
static _containerFor(browser) {
|
||||
return browser.closest(".browserContainer, .webextension-popup-stack");
|
||||
return browser.closest(".browserStack, .webextension-popup-stack");
|
||||
}
|
||||
|
||||
constructor(browser) {
|
||||
|
@ -53,7 +53,7 @@ add_task(async function test_old_modal_ui() {
|
||||
// switch tab back, and check the checkbox is displayed:
|
||||
await BrowserTestUtils.switchTab(gBrowser, openedTab);
|
||||
// check the prompt is there, and the extra row is present
|
||||
let promptElements = openedTab.linkedBrowser.parentNode.parentNode.querySelectorAll(
|
||||
let promptElements = openedTab.linkedBrowser.parentNode.querySelectorAll(
|
||||
"tabmodalprompt"
|
||||
);
|
||||
is(promptElements.length, 1, "There should be 1 prompt");
|
||||
@ -164,7 +164,7 @@ add_task(async function test_new_modal_ui() {
|
||||
// switch tab back, and check the checkbox is displayed:
|
||||
await BrowserTestUtils.switchTab(gBrowser, openedTab);
|
||||
// check the prompt is there
|
||||
let promptElements = openedTab.linkedBrowser.parentNode.parentNode.querySelectorAll(
|
||||
let promptElements = openedTab.linkedBrowser.parentNode.querySelectorAll(
|
||||
".content-prompt-dialog"
|
||||
);
|
||||
|
||||
|
@ -5267,6 +5267,11 @@ class OverflowableToolbar {
|
||||
*/
|
||||
#overflowedInfo = new Map();
|
||||
|
||||
/**
|
||||
* The set of overflowed DOM nodes that were hidden at the time of overflowing.
|
||||
*/
|
||||
#hiddenOverflowedNodes = new WeakSet();
|
||||
|
||||
/**
|
||||
* True if the overflowable toolbar is actively handling overflows and
|
||||
* underflows. This value is set internally by the private #enable() and
|
||||
@ -5303,11 +5308,12 @@ class OverflowableToolbar {
|
||||
/**
|
||||
* A reference to the the element that overflowed extension browser action
|
||||
* toolbar items will be appended to as children upon overflow if the
|
||||
* Unified Extension UI is enabled.
|
||||
* Unified Extension UI is enabled. This is created lazily and might be null,
|
||||
* so you should use the #webExtList memoizing getter instead to get this.
|
||||
*
|
||||
* @type {Element}
|
||||
* @type {Element|null}
|
||||
*/
|
||||
#webExtList = null;
|
||||
#webExtListRef = null;
|
||||
|
||||
/**
|
||||
* An empty object that is created in #checkOverflow to identify individual
|
||||
@ -5605,16 +5611,21 @@ class OverflowableToolbar {
|
||||
}
|
||||
}
|
||||
|
||||
let overflowList = CustomizableUI.isWebExtensionWidget(aNode.id)
|
||||
? this.#webExtList
|
||||
: this.#defaultList;
|
||||
|
||||
let containerForAppending =
|
||||
this.#overflowedInfo.size && newNodeCanOverflow
|
||||
? this.#defaultList
|
||||
? overflowList
|
||||
: this.#target;
|
||||
return [containerForAppending, null];
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows callers to query for the current parent of a toolbar item that may
|
||||
* or may not be overflowed. That parent will either be #defaultList or #target.
|
||||
* or may not be overflowed. That parent will either be #defaultList,
|
||||
* #webExtList (if it's an extension button) or #target.
|
||||
*
|
||||
* Note: It is assumed that the caller has verified that aNode is placed
|
||||
* within the toolbar customizable area according to CustomizableUI.
|
||||
@ -5625,7 +5636,9 @@ class OverflowableToolbar {
|
||||
*/
|
||||
getContainerFor(aNode) {
|
||||
if (aNode.getAttribute("overflowedItem") == "true") {
|
||||
return this.#defaultList;
|
||||
return CustomizableUI.isWebExtensionWidget(aNode.id)
|
||||
? this.#webExtList
|
||||
: this.#defaultList;
|
||||
}
|
||||
return this.#target;
|
||||
}
|
||||
@ -5657,7 +5670,7 @@ class OverflowableToolbar {
|
||||
return;
|
||||
}
|
||||
|
||||
let webExtList = this.#getWebExtList();
|
||||
let webExtList = this.#webExtList;
|
||||
|
||||
let child = this.#target.lastElementChild;
|
||||
while (child && isOverflowing) {
|
||||
@ -5665,6 +5678,13 @@ class OverflowableToolbar {
|
||||
|
||||
if (child.getAttribute("overflows") != "false") {
|
||||
this.#overflowedInfo.set(child.id, targetContentWidth);
|
||||
let { width: childWidth } = win.windowUtils.getBoundsWithoutFlushing(
|
||||
child
|
||||
);
|
||||
if (!childWidth) {
|
||||
this.#hiddenOverflowedNodes.add(child);
|
||||
}
|
||||
|
||||
child.setAttribute("overflowedItem", true);
|
||||
CustomizableUIInternal.ensureButtonContextMenu(
|
||||
child,
|
||||
@ -5690,7 +5710,7 @@ class OverflowableToolbar {
|
||||
child,
|
||||
this.#defaultList.firstElementChild
|
||||
);
|
||||
if (!CustomizableUI.isSpecialWidget(child.id)) {
|
||||
if (!CustomizableUI.isSpecialWidget(child.id) && childWidth) {
|
||||
this.#toolbar.setAttribute("overflowing", "true");
|
||||
}
|
||||
}
|
||||
@ -5886,8 +5906,13 @@ class OverflowableToolbar {
|
||||
win.UpdateUrlbarSearchSplitterState();
|
||||
|
||||
let defaultListItems = Array.from(this.#defaultList.children);
|
||||
let collapsedWidgetIds = defaultListItems.map(item => item.id);
|
||||
if (collapsedWidgetIds.every(w => CustomizableUI.isSpecialWidget(w))) {
|
||||
if (
|
||||
defaultListItems.every(
|
||||
item =>
|
||||
CustomizableUI.isSpecialWidget(item.id) ||
|
||||
this.#hiddenOverflowedNodes.has(item)
|
||||
)
|
||||
) {
|
||||
this.#toolbar.removeAttribute("overflowing");
|
||||
}
|
||||
}
|
||||
@ -5991,16 +6016,31 @@ class OverflowableToolbar {
|
||||
* buttons should go to if the Unified Extensions UI is enabled, or null
|
||||
* if no such list exists.
|
||||
*/
|
||||
#getWebExtList() {
|
||||
if (!this.#webExtList) {
|
||||
get #webExtList() {
|
||||
if (!this.#webExtListRef) {
|
||||
let targetID = this.#toolbar.getAttribute("addon-webext-overflowtarget");
|
||||
if (targetID) {
|
||||
let win = this.#toolbar.ownerGlobal;
|
||||
let { panel } = win.gUnifiedExtensions;
|
||||
this.#webExtList = panel.querySelector(`#${targetID}`);
|
||||
if (!targetID) {
|
||||
throw new Error(
|
||||
"addon-webext-overflowtarget was not defined on the " +
|
||||
`overflowable toolbar with id: ${this.#toolbar.id}`
|
||||
);
|
||||
}
|
||||
let win = this.#toolbar.ownerGlobal;
|
||||
let { panel } = win.gUnifiedExtensions;
|
||||
this.#webExtListRef = panel.querySelector(`#${targetID}`);
|
||||
}
|
||||
return this.#webExtList;
|
||||
return this.#webExtListRef;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if aNode is not null and is one of either this.#webExtList or
|
||||
* this.#defaultList.
|
||||
*
|
||||
* @param {DOMElement} aNode The node to test.
|
||||
* @returns {boolean}
|
||||
*/
|
||||
#isOverflowList(aNode) {
|
||||
return aNode == this.#defaultList || aNode == this.#webExtList;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -6075,30 +6115,25 @@ class OverflowableToolbar {
|
||||
// moved or removed from an area via the CustomizableUI API while
|
||||
// overflowed. It reorganizes the internal state of this OverflowableToolbar
|
||||
// to handle that change.
|
||||
if (
|
||||
!this.#enabled ||
|
||||
(aContainer != this.#target && aContainer != this.#defaultList)
|
||||
) {
|
||||
if (!this.#enabled || !this.#isOverflowList(aContainer)) {
|
||||
return;
|
||||
}
|
||||
// When we (re)move an item, update all the items that come after it in the list
|
||||
// with the minsize *of the item before the to-be-removed node*. This way, we
|
||||
// ensure that we try to move items back as soon as that's possible.
|
||||
if (aNode.parentNode == this.#defaultList) {
|
||||
let updatedMinSize;
|
||||
if (aNode.previousElementSibling) {
|
||||
updatedMinSize = this.#overflowedInfo.get(
|
||||
aNode.previousElementSibling.id
|
||||
);
|
||||
} else {
|
||||
// Force (these) items to try to flow back into the bar:
|
||||
updatedMinSize = 1;
|
||||
}
|
||||
let nextItem = aNode.nextElementSibling;
|
||||
while (nextItem) {
|
||||
this.#overflowedInfo.set(nextItem.id, updatedMinSize);
|
||||
nextItem = nextItem.nextElementSibling;
|
||||
}
|
||||
let updatedMinSize;
|
||||
if (aNode.previousElementSibling) {
|
||||
updatedMinSize = this.#overflowedInfo.get(
|
||||
aNode.previousElementSibling.id
|
||||
);
|
||||
} else {
|
||||
// Force (these) items to try to flow back into the bar:
|
||||
updatedMinSize = 1;
|
||||
}
|
||||
let nextItem = aNode.nextElementSibling;
|
||||
while (nextItem) {
|
||||
this.#overflowedInfo.set(nextItem.id, updatedMinSize);
|
||||
nextItem = nextItem.nextElementSibling;
|
||||
}
|
||||
}
|
||||
|
||||
@ -6109,12 +6144,12 @@ class OverflowableToolbar {
|
||||
// causes overflow or underflow of the toolbar.
|
||||
if (
|
||||
!this.#enabled ||
|
||||
(aContainer != this.#target && aContainer != this.#defaultList)
|
||||
(aContainer != this.#target && !this.#isOverflowList(aContainer))
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
let nowOverflowed = aNode.parentNode == this.#defaultList;
|
||||
let nowOverflowed = this.#isOverflowList(aNode.parentNode);
|
||||
let wasOverflowed = this.#overflowedInfo.has(aNode.id);
|
||||
|
||||
// If this wasn't overflowed before...
|
||||
|
@ -83,10 +83,12 @@ tags = overflowable-toolbar
|
||||
skip-if = verify
|
||||
[browser_972267_customizationchange_events.js]
|
||||
[browser_976792_insertNodeInWindow.js]
|
||||
tags = overflowable-toolbar
|
||||
skip-if = os == "linux"
|
||||
[browser_978084_dragEnd_after_move.js]
|
||||
skip-if = verify
|
||||
[browser_980155_add_overflow_toolbar.js]
|
||||
tags = overflowable-toolbar
|
||||
skip-if = verify
|
||||
[browser_981305_separator_insertion.js]
|
||||
[browser_981418-widget-onbeforecreated-handler.js]
|
||||
@ -141,6 +143,7 @@ tags = overflowable-toolbar
|
||||
https_first_disabled = true
|
||||
[browser_flexible_space_area.js]
|
||||
[browser_help_panel_cloning.js]
|
||||
[browser_hidden_widget_overflow.js]
|
||||
[browser_history_after_appMenu.js]
|
||||
[browser_history_recently_closed.js]
|
||||
[browser_history_recently_closed_middleclick.js]
|
||||
|
@ -0,0 +1,122 @@
|
||||
/* Any copyright is dedicated to the Public Domain.
|
||||
http://creativecommons.org/publicdomain/zero/1.0/ */
|
||||
|
||||
"use strict";
|
||||
|
||||
/**
|
||||
* Tests that if only hidden widgets are overflowed that the
|
||||
* OverflowableToolbar won't show the overflow panel anchor.
|
||||
*/
|
||||
|
||||
const kHiddenButtonID = "fake-hidden-button";
|
||||
const kDisplayNoneButtonID = "display-none-button";
|
||||
const kWebExtensionButtonID1 = "fake-webextension-button-1";
|
||||
const kWebExtensionButtonID2 = "fake-webextension-button-2";
|
||||
let gWin = null;
|
||||
|
||||
add_setup(async function() {
|
||||
await SpecialPowers.pushPrefEnv({
|
||||
set: [["extensions.unifiedExtensions.enabled", true]],
|
||||
});
|
||||
|
||||
gWin = await BrowserTestUtils.openNewBrowserWindow();
|
||||
|
||||
// To make it easier to write a test where we can control overflowing
|
||||
// for a test that can run in a bunch of environments with slightly
|
||||
// different rules on when things will overflow, we'll go ahead and
|
||||
// just remove everything removable from the nav-bar by default. Then
|
||||
// we'll add our hidden item, and a single WebExtension item, and
|
||||
// force toolbar overflow.
|
||||
let widgetIDs = CustomizableUI.getWidgetIdsInArea(CustomizableUI.AREA_NAVBAR);
|
||||
for (let widgetID of widgetIDs) {
|
||||
if (CustomizableUI.isWidgetRemovable(widgetID)) {
|
||||
CustomizableUI.removeWidgetFromArea(widgetID);
|
||||
}
|
||||
}
|
||||
|
||||
CustomizableUI.createWidget({
|
||||
id: kWebExtensionButtonID1,
|
||||
label: "Test WebExtension widget 1",
|
||||
defaultArea: CustomizableUI.AREA_NAVBAR,
|
||||
webExtension: true,
|
||||
});
|
||||
|
||||
CustomizableUI.createWidget({
|
||||
id: kWebExtensionButtonID2,
|
||||
label: "Test WebExtension widget 2",
|
||||
defaultArea: CustomizableUI.AREA_NAVBAR,
|
||||
webExtension: true,
|
||||
});
|
||||
|
||||
// Let's force the WebExtension widgets to be significantly wider. This
|
||||
// just makes it easier to ensure that both of these (which are to the left
|
||||
// of the hidden widget) get overflowed.
|
||||
for (let webExtID of [kWebExtensionButtonID1, kWebExtensionButtonID2]) {
|
||||
let webExtNode = CustomizableUI.getWidget(webExtID).forWindow(gWin).node;
|
||||
webExtNode.style.width = "100px";
|
||||
}
|
||||
|
||||
CustomizableUI.createWidget({
|
||||
id: kHiddenButtonID,
|
||||
label: "Test hidden=true widget",
|
||||
defaultArea: CustomizableUI.AREA_NAVBAR,
|
||||
});
|
||||
|
||||
// Now hide the button with hidden=true so that it has no dimensions.
|
||||
let hiddenButtonNode = CustomizableUI.getWidget(kHiddenButtonID).forWindow(
|
||||
gWin
|
||||
).node;
|
||||
hiddenButtonNode.hidden = true;
|
||||
|
||||
CustomizableUI.createWidget({
|
||||
id: kDisplayNoneButtonID,
|
||||
label: "Test display:none widget",
|
||||
defaultArea: CustomizableUI.AREA_NAVBAR,
|
||||
});
|
||||
|
||||
// Now hide the button with display: none so that it has no dimensions.
|
||||
let displayNoneButtonNode = CustomizableUI.getWidget(
|
||||
kDisplayNoneButtonID
|
||||
).forWindow(gWin).node;
|
||||
displayNoneButtonNode.style.display = "none";
|
||||
|
||||
registerCleanupFunction(async () => {
|
||||
CustomizableUI.destroyWidget(kWebExtensionButtonID1);
|
||||
CustomizableUI.destroyWidget(kWebExtensionButtonID2);
|
||||
CustomizableUI.destroyWidget(kHiddenButtonID);
|
||||
CustomizableUI.destroyWidget(kDisplayNoneButtonID);
|
||||
await BrowserTestUtils.closeWindow(gWin);
|
||||
await CustomizableUI.reset();
|
||||
});
|
||||
});
|
||||
|
||||
add_task(async function test_hidden_widget_overflow() {
|
||||
gWin.resizeTo(kForceOverflowWidthPx, window.outerHeight);
|
||||
|
||||
// Wait until the left-most fake WebExtension button is overflowing.
|
||||
let webExtNode = CustomizableUI.getWidget(kWebExtensionButtonID1).forWindow(
|
||||
gWin
|
||||
).node;
|
||||
await BrowserTestUtils.waitForMutationCondition(
|
||||
webExtNode,
|
||||
{ attributes: true },
|
||||
() => {
|
||||
return webExtNode.hasAttribute("overflowedItem");
|
||||
}
|
||||
);
|
||||
|
||||
let hiddenButtonNode = CustomizableUI.getWidget(kHiddenButtonID).forWindow(
|
||||
gWin
|
||||
).node;
|
||||
Assert.ok(
|
||||
hiddenButtonNode.hasAttribute("overflowedItem"),
|
||||
"Hidden button should be overflowed."
|
||||
);
|
||||
|
||||
let overflowButton = gWin.document.getElementById("nav-bar-overflow-button");
|
||||
|
||||
Assert.ok(
|
||||
!BrowserTestUtils.is_visible(overflowButton),
|
||||
"Overflow panel button should be hidden."
|
||||
);
|
||||
});
|
@ -94,6 +94,8 @@ function createOverflowableToolbarWithPlacements(id, placements) {
|
||||
tb.setAttribute("default-overflowpanel", overflowPanel.id);
|
||||
tb.setAttribute("default-overflowtarget", overflowList.id);
|
||||
tb.setAttribute("default-overflowbutton", chevron.id);
|
||||
tb.setAttribute("addon-webext-overflowbutton", "unified-extensions-button");
|
||||
tb.setAttribute("addon-webext-overflowtarget", "overflowed-extensions-list");
|
||||
|
||||
gNavToolbox.appendChild(tb);
|
||||
CustomizableUI.registerToolbarNode(tb);
|
||||
|
@ -218,6 +218,7 @@ async function removeTab(tab) {
|
||||
var AppUiTestInternals = {
|
||||
awaitBrowserLoaded,
|
||||
getBrowserActionWidget,
|
||||
getBrowserActionWidgetId,
|
||||
getPageActionButton,
|
||||
getPageActionPopup,
|
||||
getPanelForNode,
|
||||
|
@ -50,6 +50,11 @@ let win;
|
||||
add_setup(async function() {
|
||||
win = await promiseEnableUnifiedExtensions();
|
||||
|
||||
// Make sure extension buttons added to the navbar will not overflow in the
|
||||
// panel, which could happen when a previous test file resizes the current
|
||||
// window.
|
||||
await ensureMaximizedWindow(win);
|
||||
|
||||
registerCleanupFunction(async () => {
|
||||
await BrowserTestUtils.closeWindow(win);
|
||||
});
|
||||
|
@ -77,8 +77,18 @@ function getVisibleMenuItems(popup) {
|
||||
* 5. Unloads all of the test WebExtensions
|
||||
*
|
||||
* @param {DOMWindow} win The browser window to perform the test on.
|
||||
* @param {Function} taskFn The async function to run once the window is in
|
||||
* the overflow state. The function is called with the following arguments:
|
||||
* @param {object} options Additional options when running this test.
|
||||
* @param {Function} options.beforeOverflowed This optional async function will
|
||||
* be run after the extensions are created and added to the toolbar, but
|
||||
* before the toolbar overflows. The function is called with the following
|
||||
* arguments:
|
||||
*
|
||||
* {string[]} extensionIDs: The IDs of the test WebExtensions.
|
||||
*
|
||||
* The return value of the function is ignored.
|
||||
* @param {Function} options.whenOverflowed This optional async function will
|
||||
* run once the window is in the overflow state. The function is called
|
||||
* with the following arguments:
|
||||
*
|
||||
* {Element} defaultList: The DOM element that holds overflowed default
|
||||
* items.
|
||||
@ -86,24 +96,26 @@ function getVisibleMenuItems(popup) {
|
||||
* WebExtension browser_actions when Unified Extensions is enabled.
|
||||
* {string[]} extensionIDs: The IDs of the test WebExtensions.
|
||||
*
|
||||
* The function is expected to return a Promise that does not resolve
|
||||
* with anything.
|
||||
* The return value of the function is ignored.
|
||||
* @param {Function} options.afterUnderflowed This optional async function will
|
||||
* be run after the window is expanded and the toolbar has underflowed, but
|
||||
* before the extensions are removed. This function is not passed any
|
||||
* arguments. The return value of the function is ignored.
|
||||
*
|
||||
*/
|
||||
async function withWindowOverflowed(win, taskFn) {
|
||||
async function withWindowOverflowed(
|
||||
win,
|
||||
{
|
||||
beforeOverflowed = async () => {},
|
||||
whenOverflowed = async () => {},
|
||||
afterUnderflowed = async () => {},
|
||||
} = {}
|
||||
) {
|
||||
const doc = win.document;
|
||||
doc.documentElement.removeAttribute("persist");
|
||||
const navbar = doc.getElementById(CustomizableUI.AREA_NAVBAR);
|
||||
|
||||
win.moveTo(0, 0);
|
||||
|
||||
const widthDiff = win.screen.availWidth - win.outerWidth;
|
||||
const heightDiff = win.screen.availHeight - win.outerHeight;
|
||||
|
||||
if (widthDiff || heightDiff) {
|
||||
let resizeDone = BrowserTestUtils.waitForEvent(win, "resize", false);
|
||||
win.resizeBy(widthDiff, heightDiff);
|
||||
await resizeDone;
|
||||
}
|
||||
await ensureMaximizedWindow(win);
|
||||
|
||||
// The OverflowableToolbar operates asynchronously at times, so we will
|
||||
// poll a widget's overflowedItem attribute to detect whether or not the
|
||||
@ -223,61 +235,83 @@ async function withWindowOverflowed(win, taskFn) {
|
||||
await listener.promise;
|
||||
CustomizableUI.removeListener(listener);
|
||||
|
||||
const originalWindowWidth = win.outerWidth;
|
||||
|
||||
let widgetOverflowListener = {
|
||||
_remainingOverflowables: NUM_EXTENSIONS + DEFAULT_WIDGET_IDS.length,
|
||||
_deferred: PromiseUtils.defer(),
|
||||
|
||||
get promise() {
|
||||
return this._deferred.promise;
|
||||
},
|
||||
|
||||
onWidgetOverflow(widgetNode, areaNode) {
|
||||
this._remainingOverflowables--;
|
||||
if (!this._remainingOverflowables) {
|
||||
this._deferred.resolve();
|
||||
}
|
||||
},
|
||||
};
|
||||
CustomizableUI.addListener(widgetOverflowListener);
|
||||
|
||||
win.resizeTo(OVERFLOW_WINDOW_WIDTH_PX, win.outerHeight);
|
||||
await widgetOverflowListener.promise;
|
||||
CustomizableUI.removeListener(widgetOverflowListener);
|
||||
|
||||
Assert.ok(
|
||||
navbar.hasAttribute("overflowing"),
|
||||
"Should have an overflowing toolbar."
|
||||
);
|
||||
|
||||
const defaultList = doc.getElementById(
|
||||
navbar.getAttribute("default-overflowtarget")
|
||||
);
|
||||
|
||||
const unifiedExtensionList = doc.getElementById(
|
||||
navbar.getAttribute("addon-webext-overflowtarget")
|
||||
);
|
||||
|
||||
const extensionIDs = extensions.map(extension => extension.id);
|
||||
|
||||
try {
|
||||
await taskFn(defaultList, unifiedExtensionList, extensionIDs);
|
||||
info("Running beforeOverflowed task");
|
||||
await beforeOverflowed(extensionIDs);
|
||||
} finally {
|
||||
win.resizeTo(originalWindowWidth, win.outerHeight);
|
||||
await BrowserTestUtils.waitForEvent(win, "resize");
|
||||
const originalWindowWidth = win.outerWidth;
|
||||
|
||||
// Notably, we don't wait for the nav-bar to not have the "overflowing"
|
||||
// attribute. This is because we might be running in an environment
|
||||
// where the nav-bar was overflowing to begin with. Let's just hope that
|
||||
// our sign-post widget has stopped overflowing.
|
||||
await TestUtils.waitForCondition(() => {
|
||||
return !doc
|
||||
.getElementById(signpostWidgetID)
|
||||
.hasAttribute("overflowedItem");
|
||||
// The beforeOverflowed task may have moved some items out from the navbar,
|
||||
// so only listen for overflows for items still in there.
|
||||
const browserActionIDs = extensionIDs.map(id =>
|
||||
AppUiTestInternals.getBrowserActionWidgetId(id)
|
||||
);
|
||||
const browserActionsInNavBar = browserActionIDs.filter(widgetID => {
|
||||
let placement = CustomizableUI.getPlacementOfWidget(widgetID);
|
||||
return placement.area == CustomizableUI.AREA_NAVBAR;
|
||||
});
|
||||
|
||||
await Promise.all(extensions.map(extension => extension.unload()));
|
||||
let widgetOverflowListener = {
|
||||
_remainingOverflowables:
|
||||
browserActionsInNavBar.length + DEFAULT_WIDGET_IDS.length,
|
||||
_deferred: PromiseUtils.defer(),
|
||||
|
||||
get promise() {
|
||||
return this._deferred.promise;
|
||||
},
|
||||
|
||||
onWidgetOverflow(widgetNode, areaNode) {
|
||||
this._remainingOverflowables--;
|
||||
if (!this._remainingOverflowables) {
|
||||
this._deferred.resolve();
|
||||
}
|
||||
},
|
||||
};
|
||||
CustomizableUI.addListener(widgetOverflowListener);
|
||||
|
||||
win.resizeTo(OVERFLOW_WINDOW_WIDTH_PX, win.outerHeight);
|
||||
await widgetOverflowListener.promise;
|
||||
CustomizableUI.removeListener(widgetOverflowListener);
|
||||
|
||||
Assert.ok(
|
||||
navbar.hasAttribute("overflowing"),
|
||||
"Should have an overflowing toolbar."
|
||||
);
|
||||
|
||||
const defaultList = doc.getElementById(
|
||||
navbar.getAttribute("default-overflowtarget")
|
||||
);
|
||||
|
||||
const unifiedExtensionList = doc.getElementById(
|
||||
navbar.getAttribute("addon-webext-overflowtarget")
|
||||
);
|
||||
|
||||
try {
|
||||
info("Running whenOverflowed task");
|
||||
await whenOverflowed(defaultList, unifiedExtensionList, extensionIDs);
|
||||
} finally {
|
||||
win.resizeTo(originalWindowWidth, win.outerHeight);
|
||||
await BrowserTestUtils.waitForEvent(win, "resize");
|
||||
|
||||
// Notably, we don't wait for the nav-bar to not have the "overflowing"
|
||||
// attribute. This is because we might be running in an environment
|
||||
// where the nav-bar was overflowing to begin with. Let's just hope that
|
||||
// our sign-post widget has stopped overflowing.
|
||||
await TestUtils.waitForCondition(() => {
|
||||
return !doc
|
||||
.getElementById(signpostWidgetID)
|
||||
.hasAttribute("overflowedItem");
|
||||
});
|
||||
|
||||
try {
|
||||
info("Running afterUnderflowed task");
|
||||
await afterUnderflowed();
|
||||
} finally {
|
||||
await Promise.all(extensions.map(extension => extension.unload()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -394,10 +428,10 @@ async function verifyExtensionWidget(win, widget, unifiedExtensionsEnabled) {
|
||||
*/
|
||||
add_task(async function test_overflowable_toolbar() {
|
||||
let win = await promiseEnableUnifiedExtensions();
|
||||
let movedNode;
|
||||
|
||||
await withWindowOverflowed(
|
||||
win,
|
||||
async (defaultList, unifiedExtensionList, extensionIDs) => {
|
||||
await withWindowOverflowed(win, {
|
||||
whenOverflowed: async (defaultList, unifiedExtensionList, extensionIDs) => {
|
||||
// Ensure that there are 5 items in the Unified Extensions overflow
|
||||
// list, and the default widgets should all be in the default overflow
|
||||
// list (though there might be more items from the nav-bar in there that
|
||||
@ -423,8 +457,35 @@ add_task(async function test_overflowable_toolbar() {
|
||||
);
|
||||
await verifyExtensionWidget(win, child, true);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
let extensionWidgetID = AppUiTestInternals.getBrowserActionWidgetId(
|
||||
extensionIDs.at(-1)
|
||||
);
|
||||
movedNode = CustomizableUI.getWidget(extensionWidgetID).forWindow(win)
|
||||
.node;
|
||||
Assert.equal(movedNode.getAttribute("cui-areatype"), "toolbar");
|
||||
|
||||
CustomizableUI.addWidgetToArea(
|
||||
extensionWidgetID,
|
||||
CustomizableUI.AREA_ADDONS
|
||||
);
|
||||
|
||||
Assert.equal(
|
||||
movedNode.getAttribute("cui-areatype"),
|
||||
"panel",
|
||||
"The moved browser action button should have the right cui-areatype set."
|
||||
);
|
||||
},
|
||||
afterUnderflowed: async () => {
|
||||
// Ensure that the moved node's parent is still the add-ons panel.
|
||||
Assert.equal(
|
||||
movedNode.parentElement.id,
|
||||
CustomizableUI.AREA_ADDONS,
|
||||
"The browser action should still be in the addons panel"
|
||||
);
|
||||
CustomizableUI.addWidgetToArea(movedNode.id, CustomizableUI.AREA_NAVBAR);
|
||||
},
|
||||
});
|
||||
|
||||
await BrowserTestUtils.closeWindow(win);
|
||||
});
|
||||
@ -436,9 +497,8 @@ add_task(async function test_overflowable_toolbar() {
|
||||
add_task(async function test_overflowable_toolbar_legacy() {
|
||||
let win = await promiseDisableUnifiedExtensions();
|
||||
|
||||
await withWindowOverflowed(
|
||||
win,
|
||||
async (defaultList, unifiedExtensionList, extensionIDs) => {
|
||||
await withWindowOverflowed(win, {
|
||||
whenOverflowed: async (defaultList, unifiedExtensionList, extensionIDs) => {
|
||||
// First, ensure that all default items are in the default overflow list.
|
||||
// (though there might be more items from the nav-bar in there that
|
||||
// already existed in the nav-bar before we put the default widgets in
|
||||
@ -465,8 +525,8 @@ add_task(async function test_overflowable_toolbar_legacy() {
|
||||
0,
|
||||
"Unified Extension overflow list should be empty."
|
||||
);
|
||||
}
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
await BrowserTestUtils.closeWindow(win);
|
||||
await SpecialPowers.popPrefEnv();
|
||||
@ -475,9 +535,8 @@ add_task(async function test_overflowable_toolbar_legacy() {
|
||||
add_task(async function test_menu_button() {
|
||||
let win = await promiseEnableUnifiedExtensions();
|
||||
|
||||
await withWindowOverflowed(
|
||||
win,
|
||||
async (defaultList, unifiedExtensionList, extensionIDs) => {
|
||||
await withWindowOverflowed(win, {
|
||||
whenOverflowed: async (defaultList, unifiedExtensionList, extensionIDs) => {
|
||||
Assert.ok(
|
||||
unifiedExtensionList.children.length,
|
||||
"Should have items in the Unified Extension list."
|
||||
@ -631,8 +690,8 @@ add_task(async function test_menu_button() {
|
||||
);
|
||||
|
||||
await closeExtensionsPanel(win);
|
||||
}
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
await BrowserTestUtils.closeWindow(win);
|
||||
});
|
||||
@ -640,9 +699,8 @@ add_task(async function test_menu_button() {
|
||||
add_task(async function test_context_menu() {
|
||||
let win = await promiseEnableUnifiedExtensions();
|
||||
|
||||
await withWindowOverflowed(
|
||||
win,
|
||||
async (defaultList, unifiedExtensionList, extensionIDs) => {
|
||||
await withWindowOverflowed(win, {
|
||||
whenOverflowed: async (defaultList, unifiedExtensionList, extensionIDs) => {
|
||||
Assert.ok(
|
||||
unifiedExtensionList.children.length,
|
||||
"Should have items in the Unified Extension list."
|
||||
@ -730,8 +788,8 @@ add_task(async function test_context_menu() {
|
||||
|
||||
// We can close the unified extensions panel now.
|
||||
await closeExtensionsPanel(win);
|
||||
}
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
await BrowserTestUtils.closeWindow(win);
|
||||
});
|
||||
@ -739,9 +797,8 @@ add_task(async function test_context_menu() {
|
||||
add_task(async function test_action_button() {
|
||||
let win = await promiseEnableUnifiedExtensions();
|
||||
|
||||
await withWindowOverflowed(
|
||||
win,
|
||||
async (defaultList, unifiedExtensionList, extensionIDs) => {
|
||||
await withWindowOverflowed(win, {
|
||||
whenOverflowed: async (defaultList, unifiedExtensionList, extensionIDs) => {
|
||||
Assert.ok(
|
||||
unifiedExtensionList.children.length,
|
||||
"Should have items in the Unified Extension list."
|
||||
@ -876,8 +933,56 @@ add_task(async function test_action_button() {
|
||||
await closeExtensionsPanel(win);
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
await BrowserTestUtils.closeWindow(win);
|
||||
});
|
||||
|
||||
/**
|
||||
* Tests that if we pin a browser action button listed in the addons panel
|
||||
* to the toolbar when that button would immediately overflow, that the
|
||||
* button is put into the addons panel overflow list.
|
||||
*/
|
||||
add_task(async function test_pinning_to_toolbar_when_overflowed() {
|
||||
let win = await promiseEnableUnifiedExtensions();
|
||||
let movedNode;
|
||||
let extensionWidgetID;
|
||||
|
||||
await withWindowOverflowed(win, {
|
||||
beforeOverflowed: async extensionIDs => {
|
||||
// Before we overflow the toolbar, let's move the last item to the addons
|
||||
// panel.
|
||||
extensionWidgetID = AppUiTestInternals.getBrowserActionWidgetId(
|
||||
extensionIDs.at(-1)
|
||||
);
|
||||
|
||||
movedNode = CustomizableUI.getWidget(extensionWidgetID).forWindow(win)
|
||||
.node;
|
||||
|
||||
CustomizableUI.addWidgetToArea(
|
||||
extensionWidgetID,
|
||||
CustomizableUI.AREA_ADDONS
|
||||
);
|
||||
},
|
||||
whenOverflowed: async (defaultList, unifiedExtensionList, extensionIDs) => {
|
||||
// Now that the window is overflowed, let's move the widget in the addons
|
||||
// panel back to the navbar. This should cause the widget to overflow back
|
||||
// into the addons panel.
|
||||
CustomizableUI.addWidgetToArea(
|
||||
extensionWidgetID,
|
||||
CustomizableUI.AREA_NAVBAR
|
||||
);
|
||||
await TestUtils.waitForCondition(() => {
|
||||
return movedNode.hasAttribute("overflowedItem");
|
||||
});
|
||||
Assert.equal(
|
||||
movedNode.parentElement,
|
||||
unifiedExtensionList,
|
||||
"Should have overflowed the extension button to the right list."
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
await BrowserTestUtils.closeWindow(win);
|
||||
});
|
||||
|
@ -6,6 +6,7 @@
|
||||
/* exported clickUnifiedExtensionsItem,
|
||||
closeExtensionsPanel,
|
||||
createExtensions,
|
||||
ensureMaximizedWindow,
|
||||
getUnifiedExtensionsItem,
|
||||
openExtensionsPanel,
|
||||
openUnifiedExtensionsContextMenu,
|
||||
@ -139,3 +140,24 @@ const createExtensions = (
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Given a window, this test helper resizes it so that the window takes most of
|
||||
* the available screen size (unless the window is already maximized).
|
||||
*/
|
||||
const ensureMaximizedWindow = async win => {
|
||||
let resizeDone = Promise.resolve();
|
||||
|
||||
win.moveTo(0, 0);
|
||||
|
||||
const widthDiff = win.screen.availWidth - win.outerWidth;
|
||||
const heightDiff = win.screen.availHeight - win.outerHeight;
|
||||
|
||||
if (widthDiff || heightDiff) {
|
||||
resizeDone = BrowserTestUtils.waitForEvent(win, "resize", false);
|
||||
win.windowUtils.ensureDirtyRootFrame();
|
||||
win.resizeBy(widthDiff, heightDiff);
|
||||
}
|
||||
|
||||
return resizeDone;
|
||||
};
|
||||
|
@ -4,7 +4,6 @@
|
||||
|
||||
import { AppConstants } from "resource://gre/modules/AppConstants.sys.mjs";
|
||||
|
||||
const { OS } = ChromeUtils.import("resource://gre/modules/osfile.jsm");
|
||||
import { XPCOMUtils } from "resource://gre/modules/XPCOMUtils.sys.mjs";
|
||||
|
||||
import {
|
||||
@ -506,7 +505,7 @@ EdgeProfileMigrator.prototype.getLastUsedDate = async function() {
|
||||
if (sourceProfiles !== null || !lazy.gEdgeDatabase) {
|
||||
return Promise.resolve(new Date(0));
|
||||
}
|
||||
let logFilePath = OS.Path.join(
|
||||
let logFilePath = PathUtils.join(
|
||||
lazy.gEdgeDatabase.parent.path,
|
||||
"LogFiles",
|
||||
"edb.log"
|
||||
@ -517,11 +516,9 @@ EdgeProfileMigrator.prototype.getLastUsedDate = async function() {
|
||||
);
|
||||
let cookiePaths = cookieMigrator._cookiesFolders.map(f => f.path);
|
||||
let datePromises = [logFilePath, dbPath, ...cookiePaths].map(path => {
|
||||
return OS.File.stat(path)
|
||||
.catch(() => null)
|
||||
.then(info => {
|
||||
return info ? info.lastModificationDate : 0;
|
||||
});
|
||||
return IOUtils.stat(path)
|
||||
.then(info => info.lastModified)
|
||||
.catch(() => 0);
|
||||
});
|
||||
datePromises.push(
|
||||
new Promise(resolve => {
|
||||
|
@ -28,7 +28,6 @@ ChromeUtils.defineModuleGetter(
|
||||
"SessionMigration",
|
||||
"resource:///modules/sessionstore/SessionMigration.jsm"
|
||||
);
|
||||
ChromeUtils.defineModuleGetter(lazy, "OS", "resource://gre/modules/osfile.jsm");
|
||||
|
||||
export function FirefoxProfileMigrator() {
|
||||
this.wrappedJSObject = this; // for testing...
|
||||
@ -243,32 +242,28 @@ FirefoxProfileMigrator.prototype._getResourcesInternal = function(
|
||||
// if we can, copy it to the new profile and set sync's username pref
|
||||
// (which acts as a de-facto flag to indicate if sync is configured)
|
||||
try {
|
||||
let oldPath = lazy.OS.Path.join(
|
||||
let oldPath = PathUtils.join(
|
||||
sourceProfileDir.path,
|
||||
"signedInUser.json"
|
||||
);
|
||||
let exists = await lazy.OS.File.exists(oldPath);
|
||||
let exists = await IOUtils.exists(oldPath);
|
||||
if (exists) {
|
||||
let raw = await lazy.OS.File.read(oldPath, { encoding: "utf-8" });
|
||||
let data = JSON.parse(raw);
|
||||
let data = await IOUtils.readJSON(oldPath);
|
||||
if (data && data.accountData && data.accountData.email) {
|
||||
let username = data.accountData.email;
|
||||
// copy the file itself.
|
||||
await lazy.OS.File.copy(
|
||||
await IOUtils.copy(
|
||||
oldPath,
|
||||
lazy.OS.Path.join(currentProfileDir.path, "signedInUser.json")
|
||||
PathUtils.join(currentProfileDir.path, "signedInUser.json")
|
||||
);
|
||||
// Now we need to know whether Sync is actually configured for this
|
||||
// user. The only way we know is by looking at the prefs file from
|
||||
// the old profile. We avoid trying to do a full parse of the prefs
|
||||
// file and even avoid parsing the single string value we care
|
||||
// about.
|
||||
let prefsPath = lazy.OS.Path.join(
|
||||
sourceProfileDir.path,
|
||||
"prefs.js"
|
||||
);
|
||||
if (await lazy.OS.File.exists(oldPath)) {
|
||||
let rawPrefs = await lazy.OS.File.read(prefsPath, {
|
||||
let prefsPath = PathUtils.join(sourceProfileDir.path, "prefs.js");
|
||||
if (await IOUtils.exists(oldPath)) {
|
||||
let rawPrefs = await IOUtils.readUTF8(prefsPath, {
|
||||
encoding: "utf-8",
|
||||
});
|
||||
if (/^user_pref\("services\.sync\.username"/m.test(rawPrefs)) {
|
||||
|
@ -51,8 +51,8 @@ export class _TopSites extends React.PureComponent {
|
||||
const link = {
|
||||
customScreenshotURL: topSiteSpoc.image_src,
|
||||
type: "SPOC",
|
||||
label: topSiteSpoc.sponsor,
|
||||
title: topSiteSpoc.sponsor,
|
||||
label: topSiteSpoc.title || topSiteSpoc.sponsor,
|
||||
title: topSiteSpoc.title || topSiteSpoc.sponsor,
|
||||
url: topSiteSpoc.url,
|
||||
flightId: topSiteSpoc.flight_id,
|
||||
id: topSiteSpoc.id,
|
||||
|
@ -13660,8 +13660,8 @@ class TopSites_TopSites_TopSites extends (external_React_default()).PureComponen
|
||||
const link = {
|
||||
customScreenshotURL: topSiteSpoc.image_src,
|
||||
type: "SPOC",
|
||||
label: topSiteSpoc.sponsor,
|
||||
title: topSiteSpoc.sponsor,
|
||||
label: topSiteSpoc.title || topSiteSpoc.sponsor,
|
||||
title: topSiteSpoc.title || topSiteSpoc.sponsor,
|
||||
url: topSiteSpoc.url,
|
||||
flightId: topSiteSpoc.flight_id,
|
||||
id: topSiteSpoc.id,
|
||||
|
@ -1059,6 +1059,9 @@ class TelemetryFeed {
|
||||
}
|
||||
Glean.newtab.newtabCategory.set(newtabCategory);
|
||||
Glean.newtab.homepageCategory.set(homePageCategory);
|
||||
if (lazy.NimbusFeatures.glean.getVariable("newtabPingEnabled") ?? true) {
|
||||
GleanPings.newtab.submit("component_init");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1275,12 +1278,6 @@ class TelemetryFeed {
|
||||
}
|
||||
setNewtabPrefMetrics();
|
||||
Glean.pocket.isSignedIn.set(lazy.pktApi.isUserLoggedIn());
|
||||
if (
|
||||
this.telemetryEnabled &&
|
||||
(lazy.NimbusFeatures.glean.getVariable("newtabPingEnabled") ?? true)
|
||||
) {
|
||||
GleanPings.newtab.submit("component_init");
|
||||
}
|
||||
}
|
||||
|
||||
uninit() {
|
||||
|
@ -15,7 +15,8 @@ newtab:
|
||||
The newtab visit ended.
|
||||
Could be by navigation, being closed, etc.
|
||||
component_init: |
|
||||
The newtab component init'd.
|
||||
The newtab component init'd,
|
||||
and the newtab and homepage settings have been categorized.
|
||||
This is mostly to ensure we hear at least once from clients configured to
|
||||
not show a newtab UI.
|
||||
include_client_id: true
|
||||
|
@ -199,7 +199,7 @@ add_task(async function test_newtab_doesnt_send_nimbus() {
|
||||
await SpecialPowers.popPrefEnv();
|
||||
});
|
||||
|
||||
add_task(async function test_newtab_init_sends_ping() {
|
||||
add_task(async function test_newtab_categorization_sends_ping() {
|
||||
await SpecialPowers.pushPrefEnv({
|
||||
set: [["browser.newtabpage.activity-stream.telemetry", true]],
|
||||
});
|
||||
@ -214,7 +214,7 @@ add_task(async function test_newtab_init_sends_ping() {
|
||||
pingSent = true;
|
||||
Assert.equal(reason, "component_init");
|
||||
});
|
||||
TelemetryFeed.init(); // INIT action doesn't happen by default.
|
||||
await TelemetryFeed.sendPageTakeoverData();
|
||||
Assert.ok(pingSent, "ping was sent");
|
||||
|
||||
await SpecialPowers.popPrefEnv();
|
||||
|
@ -181,13 +181,6 @@ describe("TelemetryFeed", () => {
|
||||
assert.calledWithExactly(stub, "unload", instance.handleEvent);
|
||||
assert.calledWithExactly(stub, "TabPinned", instance.handleEvent);
|
||||
});
|
||||
it("should send a 'newtab' ping", () => {
|
||||
instance._prefs.set(TELEMETRY_PREF, true);
|
||||
sandbox.spy(GleanPings.newtab, "submit");
|
||||
instance.init();
|
||||
assert.calledOnce(GleanPings.newtab.submit);
|
||||
assert.calledWithExactly(GleanPings.newtab.submit, "component_init");
|
||||
});
|
||||
describe("telemetry pref changes from false to true", () => {
|
||||
beforeEach(() => {
|
||||
FakePrefs.prototype.prefs = {};
|
||||
@ -1725,6 +1718,13 @@ describe("TelemetryFeed", () => {
|
||||
assert.calledOnce(Glean.newtab.homepageCategory.set);
|
||||
assert.calledWith(Glean.newtab.homepageCategory.set, "disabled");
|
||||
});
|
||||
it("should send a 'newtab' ping", async () => {
|
||||
instance._prefs.set(TELEMETRY_PREF, true);
|
||||
sandbox.spy(GleanPings.newtab, "submit");
|
||||
await instance.sendPageTakeoverData();
|
||||
assert.calledOnce(GleanPings.newtab.submit);
|
||||
assert.calledWithExactly(GleanPings.newtab.submit, "component_init");
|
||||
});
|
||||
});
|
||||
describe("#sendDiscoveryStreamImpressions", () => {
|
||||
it("should not send impression pings if there is no impression data", () => {
|
||||
|
@ -483,7 +483,7 @@
|
||||
"win64-aarch64-devedition",
|
||||
"win64-devedition"
|
||||
],
|
||||
"revision": "9884a10915a5d27531be1848bb3056bfb541661e"
|
||||
"revision": "51b4794b186d52193f1d647e7399e2557c113dae"
|
||||
},
|
||||
"es-CL": {
|
||||
"pin": false,
|
||||
@ -645,7 +645,7 @@
|
||||
"win64-aarch64-devedition",
|
||||
"win64-devedition"
|
||||
],
|
||||
"revision": "e270b824dd33e62364be110b6d15723af68e1b1e"
|
||||
"revision": "cae65157bbcebc26625586b1920d1f34d9683653"
|
||||
},
|
||||
"fy-NL": {
|
||||
"pin": false,
|
||||
@ -843,7 +843,7 @@
|
||||
"win64-aarch64-devedition",
|
||||
"win64-devedition"
|
||||
],
|
||||
"revision": "268da803b62cbe9b579100ade7c3735dcf42baef"
|
||||
"revision": "9432435c581d8839ad71ce11acf5098090c6e9e5"
|
||||
},
|
||||
"hy-AM": {
|
||||
"pin": false,
|
||||
@ -933,7 +933,7 @@
|
||||
"win64-aarch64-devedition",
|
||||
"win64-devedition"
|
||||
],
|
||||
"revision": "4d395af1ad99bbef4e8e631e415d3ece59910cb5"
|
||||
"revision": "5f4dedafbdf70c69d98352064b3a3cf4c58a94a9"
|
||||
},
|
||||
"it": {
|
||||
"pin": false,
|
||||
@ -1425,7 +1425,7 @@
|
||||
"win64-aarch64-devedition",
|
||||
"win64-devedition"
|
||||
],
|
||||
"revision": "e2004dd62601de421adba549123e28cf12667900"
|
||||
"revision": "ffb56c5ea7907321595a1308c6232a26f4660a40"
|
||||
},
|
||||
"rm": {
|
||||
"pin": false,
|
||||
@ -1803,7 +1803,7 @@
|
||||
"win64-aarch64-devedition",
|
||||
"win64-devedition"
|
||||
],
|
||||
"revision": "8b56fdc7d226a359747095054358cc0808438b8e"
|
||||
"revision": "8d24e72d9f81161e2207a68f56cdc169b8c8110c"
|
||||
},
|
||||
"trs": {
|
||||
"pin": false,
|
||||
@ -1947,7 +1947,7 @@
|
||||
"win64-aarch64-devedition",
|
||||
"win64-devedition"
|
||||
],
|
||||
"revision": "edededa35a86a31ec0472037da9581f588eedf47"
|
||||
"revision": "574722005ef12d41efe4b2261e91fb21974fee7c"
|
||||
},
|
||||
"zh-TW": {
|
||||
"pin": false,
|
||||
|
@ -5,45 +5,79 @@
|
||||
:root {
|
||||
/* uei = unified extensions item */
|
||||
--uei-icon-size: 32px;
|
||||
--uei-dot-position: calc(var(--uei-icon-size) / 2 + var(--arrowpanel-menuitem-margin-inline) + var(--arrowpanel-menuitem-padding-inline) - 4px);
|
||||
--uei-attention-dot-size: 8px;
|
||||
--uei-button-hover-bgcolor: var(--panel-item-hover-bgcolor);
|
||||
--uei-button-hover-color: inherit;
|
||||
--uei-button-active-bgcolor: var(--panel-item-active-bgcolor);
|
||||
--uei-button-active-color: inherit;
|
||||
--uei-button-attention-dot-color: var(--tab-attention-icon-color);
|
||||
}
|
||||
|
||||
:root[uidensity="compact"] {
|
||||
--uei-icon-size: 24px;
|
||||
}
|
||||
|
||||
/* Align extensions rendered with custom elements. */
|
||||
unified-extensions-item {
|
||||
align-items: center;
|
||||
display: flex;
|
||||
#unified-extensions-panel {
|
||||
--uei-dot-horizontal-position-in-panel: calc(var(--uei-icon-size) / 2 + var(--arrowpanel-menuitem-padding-inline) - var(--uei-attention-dot-size) / 2);
|
||||
--uei-dot-vertical-position-in-panel: max(0px, calc(var(--arrowpanel-menuitem-padding-block) / 2 - var(--uei-attention-dot-size) / 2));
|
||||
}
|
||||
|
||||
.unified-extensions-item {
|
||||
/* Align extensions rendered with custom elements. */
|
||||
unified-extensions-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
#unified-extensions-panel .unified-extensions-item {
|
||||
/* Have some spacing between items in the panel; mainly useful for when HCM is enabled. */
|
||||
padding-block: 2px;
|
||||
}
|
||||
|
||||
/* This is based on the attention UI defined in:
|
||||
/* The "attention UI" for the unified extensions is based on:
|
||||
* https://searchfox.org/mozilla-central/rev/560b7b1b17/browser/themes/shared/tabs.css#624 */
|
||||
#unified-extensions-button[attention],
|
||||
.unified-extensions-item[attention] {
|
||||
background-image: radial-gradient(circle, var(--tab-attention-icon-color), var(--tab-attention-icon-color) 2px, transparent 2px);
|
||||
background-position: center bottom max(0px, calc(var(--arrowpanel-menuitem-padding-block) - 4px));
|
||||
background-size: 8px 8px;
|
||||
|
||||
/* On the main unified extensions button, we draw the attention on the icon element. */
|
||||
#unified-extensions-button[attention] > .toolbarbutton-icon,
|
||||
/* For extension widgets placed in a toolbar, we use the stack element (containing the icon)
|
||||
* of the action button to draw the attention dot.
|
||||
* Otherwise (in the extensions panel), we use the action button itself. */
|
||||
toolbar .unified-extensions-item[attention] > .unified-extensions-item-action > .toolbarbutton-badge-stack,
|
||||
#unified-extensions-panel .unified-extensions-item[attention] > .unified-extensions-item-action,
|
||||
.widget-overflow-list .unified-extensions-item[attention][unified-extensions="false"] > .unified-extensions-item-action {
|
||||
background-image: radial-gradient(circle, var(--uei-button-attention-dot-color), var(--uei-button-attention-dot-color) 2px, transparent 2px);
|
||||
background-size: var(--uei-attention-dot-size) var(--uei-attention-dot-size);
|
||||
background-repeat: no-repeat;
|
||||
}
|
||||
|
||||
/* Adjust attention dots for the custom elements. */
|
||||
.unified-extensions-list > unified-extensions-item[attention] {
|
||||
background-position: left var(--uei-dot-position) bottom 0px;
|
||||
/* Adjust attention dots position in the toolbar. */
|
||||
#unified-extensions-button[attention] > .toolbarbutton-icon,
|
||||
toolbar .unified-extensions-item[attention] > .unified-extensions-item-action > .toolbarbutton-badge-stack {
|
||||
background-position: center bottom calc(var(--toolbarbutton-inner-padding) / 2 - var(--uei-attention-dot-size) / 2);
|
||||
}
|
||||
|
||||
/* Adjust attention dots for the custom elements. */
|
||||
.unified-extensions-list > unified-extensions-item[attention]:-moz-locale-dir(rtl) {
|
||||
background-position-x: right var(--uei-dot-position);
|
||||
/* Adjust attention dots position in the unified extensions panel. */
|
||||
#unified-extensions-panel .unified-extensions-item[attention] > .unified-extensions-item-action {
|
||||
background-position: left var(--uei-dot-horizontal-position-in-panel) bottom var(--uei-dot-vertical-position-in-panel);
|
||||
}
|
||||
|
||||
/* Adjust attention dots position in the unified extensions panel for RTL. */
|
||||
#unified-extensions-panel .unified-extensions-item[attention] > .unified-extensions-item-action:-moz-locale-dir(rtl) {
|
||||
background-position-x: right var(--uei-dot-horizontal-position-in-panel);
|
||||
}
|
||||
|
||||
/* Adjust attention dots position in the overflow panel. */
|
||||
.widget-overflow-list .unified-extensions-item[attention][unified-extensions="false"] > .unified-extensions-item-action {
|
||||
background-position-x: left calc(16px / 2 + var(--arrowpanel-menuitem-padding-inline) - var(--uei-attention-dot-size) / 2);
|
||||
background-position-y: bottom calc(var(--arrowpanel-menuitem-padding-block) / 2 - var(--uei-attention-dot-size) / 2);
|
||||
}
|
||||
|
||||
:root[uidensity="compact"] .widget-overflow-list .unified-extensions-item[attention][unified-extensions="false"] > .unified-extensions-item-action {
|
||||
background-position-y: bottom -2px;
|
||||
}
|
||||
|
||||
/* Adjust attention dots position in the overflow panel for RTL. */
|
||||
.widget-overflow-list toolbaritem.unified-extensions-item[attention][unified-extensions="false"] > .unified-extensions-item-action:-moz-locale-dir(rtl) {
|
||||
background-position-x: right calc(16px / 2 + var(--arrowpanel-menuitem-padding-inline) - var(--uei-attention-dot-size) / 2);
|
||||
}
|
||||
|
||||
.unified-extensions-item-action {
|
||||
@ -165,25 +199,9 @@ toolbaritem.unified-extensions-item[unified-extensions="true"] .unified-extensio
|
||||
display: block;
|
||||
}
|
||||
|
||||
:is(#unified-extensions-panel, .widget-overflow-list) toolbaritem.unified-extensions-item[attention] {
|
||||
background-position: left calc(12px + var(--arrowpanel-menuitem-margin-inline)) bottom;
|
||||
}
|
||||
|
||||
:is(#unified-extensions-panel, .widget-overflow-list) toolbaritem.unified-extensions-item[attention]:-moz-locale-dir(rtl) {
|
||||
background-position-x: right calc(12px + var(--arrowpanel-menuitem-margin-inline));
|
||||
}
|
||||
|
||||
.widget-overflow-list toolbaritem.unified-extensions-item[attention][unified-extensions="false"] {
|
||||
background-position-x: left 12px;
|
||||
}
|
||||
|
||||
.widget-overflow-list toolbaritem.unified-extensions-item[attention][unified-extensions="false"]:-moz-locale-dir(rtl) {
|
||||
background-position-x: right 12px;
|
||||
}
|
||||
|
||||
@media (prefers-contrast) {
|
||||
.unified-extensions-item[attention] {
|
||||
background-image: radial-gradient(circle, ButtonText, ButtonText 2px, transparent 2px);
|
||||
:root {
|
||||
--uei-button-attention-dot-color: ButtonText;
|
||||
}
|
||||
|
||||
.unified-extensions-item-action:not([disabled]).subviewbutton,
|
||||
|
@ -3,12 +3,14 @@
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
import buildconfig
|
||||
import mozpack.path as mozpath
|
||||
import os
|
||||
import six
|
||||
import subprocess
|
||||
import pytoml
|
||||
import six
|
||||
|
||||
|
||||
# Try to read the package name or otherwise assume same name as the crate path.
|
||||
|
@ -6,21 +6,20 @@
|
||||
# Only necessary for flake8 to be happy...
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import shutil
|
||||
import subprocess
|
||||
import platform
|
||||
import json
|
||||
import argparse
|
||||
import errno
|
||||
import fnmatch
|
||||
import glob
|
||||
import errno
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tarfile
|
||||
from contextlib import contextmanager
|
||||
|
||||
from shutil import which
|
||||
|
||||
import zstandard
|
||||
|
@ -5,6 +5,7 @@
|
||||
"cxx": "{MOZ_FETCHES_DIR}/clang/bin/clang++",
|
||||
"as": "{MOZ_FETCHES_DIR}/clang/bin/clang",
|
||||
"patches": [
|
||||
"clang_include_cleaner.patch",
|
||||
"clang-tidy-ci.patch"
|
||||
]
|
||||
}
|
||||
|
@ -10,6 +10,7 @@
|
||||
"libtool": "{MOZ_FETCHES_DIR}/cctools/bin/x86_64-apple-darwin-libtool",
|
||||
"ld": "{MOZ_FETCHES_DIR}/clang/bin/clang",
|
||||
"patches": [
|
||||
"clang_include_cleaner.patch",
|
||||
"clang-tidy-ci.patch"
|
||||
]
|
||||
}
|
||||
|
@ -5,6 +5,7 @@
|
||||
"cxx": "cl.exe",
|
||||
"ml": "ml64.exe",
|
||||
"patches": [
|
||||
"clang_include_cleaner.patch",
|
||||
"clang-tidy-ci.patch"
|
||||
]
|
||||
}
|
||||
|
2235
build/build-clang/clang_include_cleaner.patch
Normal file
2235
build/build-clang/clang_include_cleaner.patch
Normal file
File diff suppressed because it is too large
Load Diff
@ -3,8 +3,9 @@
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import sys
|
||||
from mozbuild.base import MozbuildObject
|
||||
|
||||
from mozbuild.backend.configenvironment import PartialConfigEnvironment
|
||||
from mozbuild.base import MozbuildObject
|
||||
|
||||
config = MozbuildObject.from_environment()
|
||||
partial_config = PartialConfigEnvironment(config.topobjdir)
|
||||
|
@ -27,7 +27,6 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
SANITIZERS = {
|
||||
"asan": "address",
|
||||
"hwasan": "hwaddress",
|
||||
|
@ -5,10 +5,10 @@
|
||||
|
||||
from __future__ import with_statement
|
||||
|
||||
from optparse import OptionParser
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
from optparse import OptionParser
|
||||
|
||||
logger = logging.getLogger("checksums.py")
|
||||
|
||||
|
@ -4,7 +4,6 @@
|
||||
import json
|
||||
import os
|
||||
import posixpath
|
||||
|
||||
from os import PathLike
|
||||
|
||||
# `typing.Literal` not available until Python 3.8;
|
||||
|
@ -3,10 +3,10 @@
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import os
|
||||
import glob
|
||||
import shutil
|
||||
import errno
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import ThirdPartyPaths
|
||||
import ThreadAllows
|
||||
|
@ -7,9 +7,9 @@
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import difflib
|
||||
import logging
|
||||
import os
|
||||
import difflib
|
||||
import unittest
|
||||
|
||||
import buildconfig
|
||||
|
@ -5,6 +5,7 @@
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import sys
|
||||
|
||||
from mozbuild.preprocessor import Preprocessor
|
||||
|
||||
|
||||
|
@ -5,7 +5,6 @@
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import json
|
||||
|
||||
from argparse import ArgumentParser
|
||||
|
||||
ALL_HARNESSES = [
|
||||
|
@ -20,10 +20,8 @@ if sys.version_info[0] < 3:
|
||||
else:
|
||||
from importlib.abc import MetaPathFinder
|
||||
|
||||
|
||||
from types import ModuleType
|
||||
|
||||
|
||||
STATE_DIR_FIRST_RUN = """
|
||||
Mach and the build system store shared state in a common directory
|
||||
on the filesystem. The following directory will be created:
|
||||
@ -145,7 +143,7 @@ def initialize(topsrcdir):
|
||||
)
|
||||
]
|
||||
|
||||
from mach.util import setenv, get_state_dir
|
||||
from mach.util import get_state_dir, setenv
|
||||
|
||||
state_dir = _create_state_dir()
|
||||
|
||||
@ -157,7 +155,6 @@ def initialize(topsrcdir):
|
||||
|
||||
import mach.base
|
||||
import mach.main
|
||||
|
||||
from mach.main import MachCommandReference
|
||||
|
||||
# Centralized registry of available mach commands
|
||||
|
@ -2,12 +2,13 @@
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import buildconfig
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import os
|
||||
import sys
|
||||
|
||||
import buildconfig
|
||||
|
||||
|
||||
def relativize(path, base=None):
|
||||
# For absolute path in Unix builds, we need relative paths because
|
||||
|
@ -7,18 +7,18 @@
|
||||
# certificates used for SSL testing in Mochitest. The already generated
|
||||
# certs are located at $topsrcdir/build/pgo/certs/ .
|
||||
|
||||
import mozinfo
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from distutils.spawn import find_executable
|
||||
|
||||
from mozbuild.base import MozbuildObject, BinaryNotFoundException
|
||||
import mozinfo
|
||||
from mozbuild.base import BinaryNotFoundException, MozbuildObject
|
||||
from mozfile import NamedTemporaryFile, TemporaryDirectory
|
||||
from mozprofile.permissions import ServerLocations
|
||||
from distutils.spawn import find_executable
|
||||
|
||||
dbFiles = [
|
||||
re.compile("^cert[0-9]+\.db$"),
|
||||
|
@ -4,19 +4,19 @@
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import glob
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import mozcrash
|
||||
from mozbuild.base import MozbuildObject, BinaryNotFoundException
|
||||
from mozbuild.base import BinaryNotFoundException, MozbuildObject
|
||||
from mozfile import TemporaryDirectory
|
||||
from mozhttpd import MozHttpd
|
||||
from mozprofile import FirefoxProfile, Preferences
|
||||
from mozprofile.permissions import ServerLocations
|
||||
from mozrunner import FirefoxRunner, CLI
|
||||
from mozrunner import CLI, FirefoxRunner
|
||||
from six import string_types
|
||||
|
||||
PORT = 8888
|
||||
|
@ -2,9 +2,10 @@
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import buildconfig
|
||||
import textwrap
|
||||
import string
|
||||
import textwrap
|
||||
|
||||
import buildconfig
|
||||
|
||||
|
||||
def generate_bool(name):
|
||||
|
@ -2,13 +2,14 @@
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from argparse import ArgumentParser
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from argparse import ArgumentParser
|
||||
from pathlib import Path
|
||||
|
||||
from buildconfig import substs
|
||||
|
||||
"""
|
||||
|
@ -15,9 +15,9 @@
|
||||
# to indicate that files should be uploaded including their paths relative
|
||||
# to the base path.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
|
||||
|
||||
|
@ -6,22 +6,23 @@
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
from contextlib import contextmanager
|
||||
import gzip
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import tarfile
|
||||
import time
|
||||
from contextlib import contextmanager
|
||||
from threading import Event, Thread
|
||||
|
||||
import requests
|
||||
from mozbuild.generated_sources import (
|
||||
get_filename_with_digest,
|
||||
get_s3_region_and_bucket,
|
||||
)
|
||||
import os
|
||||
from six.moves.queue import Queue
|
||||
import requests
|
||||
import sys
|
||||
import tarfile
|
||||
from requests.packages.urllib3.util.retry import Retry
|
||||
from threading import Event, Thread
|
||||
import time
|
||||
from six.moves.queue import Queue
|
||||
|
||||
# Arbitrary, should probably measure this.
|
||||
NUM_WORKER_THREADS = 10
|
||||
|
@ -6,18 +6,13 @@ from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import json
|
||||
import logging
|
||||
import mozinfo
|
||||
import os
|
||||
import time
|
||||
|
||||
from mach.decorators import (
|
||||
Command,
|
||||
CommandArgument,
|
||||
)
|
||||
from mozbuild.base import (
|
||||
MachCommandConditions as conditions,
|
||||
BinaryNotFoundException,
|
||||
)
|
||||
import mozinfo
|
||||
from mach.decorators import Command, CommandArgument
|
||||
from mozbuild.base import BinaryNotFoundException
|
||||
from mozbuild.base import MachCommandConditions as conditions
|
||||
|
||||
|
||||
def is_valgrind_build(cls):
|
||||
|
@ -3,17 +3,17 @@
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import sys
|
||||
|
||||
import yaml
|
||||
from vsdownload import (
|
||||
getArgsParser,
|
||||
getManifest,
|
||||
getPackages,
|
||||
getSelectedPackages,
|
||||
setPackageSelection,
|
||||
lowercaseIgnores,
|
||||
setPackageSelection,
|
||||
)
|
||||
import sys
|
||||
import yaml
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = getArgsParser()
|
||||
|
@ -3,17 +3,15 @@
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from vsdownload import (
|
||||
downloadPackages,
|
||||
extractPackages,
|
||||
)
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
from zstandard import ZstdCompressor
|
||||
import argparse
|
||||
import os
|
||||
import tarfile
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
import yaml
|
||||
from vsdownload import downloadPackages, extractPackages
|
||||
from zstandard import ZstdCompressor
|
||||
|
||||
|
||||
def tzstd_path(path):
|
||||
|
@ -7,11 +7,12 @@
|
||||
# run the Winchecksec tool (https://github.com/trailofbits/winchecksec)
|
||||
# against a given Windows binary.
|
||||
|
||||
import buildconfig
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import buildconfig
|
||||
|
||||
# usage
|
||||
if len(sys.argv) != 2:
|
||||
print("""usage : autowinchecksec.by path_to_binary""")
|
||||
|
@ -4,6 +4,7 @@
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
from buildconfig import substs
|
||||
|
||||
|
||||
|
@ -38,8 +38,8 @@ mozilla::LogModule* gfxUserFontSet::GetUserFontsLog() {
|
||||
static Atomic<uint64_t> sFontSetGeneration(0);
|
||||
|
||||
gfxUserFontEntry::gfxUserFontEntry(
|
||||
gfxUserFontSet* aFontSet, const nsTArray<gfxFontFaceSrc>& aFontFaceSrcList,
|
||||
WeightRange aWeight, StretchRange aStretch, SlantStyleRange aStyle,
|
||||
const nsTArray<gfxFontFaceSrc>& aFontFaceSrcList, WeightRange aWeight,
|
||||
StretchRange aStretch, SlantStyleRange aStyle,
|
||||
const nsTArray<gfxFontFeature>& aFeatureSettings,
|
||||
const nsTArray<gfxFontVariation>& aVariationSettings,
|
||||
uint32_t aLanguageOverride, gfxCharacterMap* aUnicodeRanges,
|
||||
@ -52,8 +52,7 @@ gfxUserFontEntry::gfxUserFontEntry(
|
||||
mSeenLocalSource(false),
|
||||
mUnsupportedFormat(false),
|
||||
mFontDisplay(aFontDisplay),
|
||||
mLoader(nullptr),
|
||||
mFontSet(aFontSet) {
|
||||
mLoader(nullptr) {
|
||||
mIsUserFontContainer = true;
|
||||
mSrcList = aFontFaceSrcList.Clone();
|
||||
mCurrentSrcIndex = 0;
|
||||
@ -410,6 +409,15 @@ static bool IgnorePrincipal(gfxFontSrcURI* aURI) {
|
||||
}
|
||||
|
||||
void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) {
|
||||
RefPtr<gfxUserFontSet> fontSet = GetUserFontSet();
|
||||
if (NS_WARN_IF(!fontSet)) {
|
||||
LOG(("userfonts (%p) failed expired font set for (%s)\n", fontSet.get(),
|
||||
mFamilyName.get()));
|
||||
mFontDataLoadingState = LOADING_FAILED;
|
||||
SetLoadState(STATUS_FAILED);
|
||||
return;
|
||||
}
|
||||
|
||||
uint32_t numSrc = mSrcList.Length();
|
||||
|
||||
// load each src entry in turn, until a local face is found
|
||||
@ -425,7 +433,7 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) {
|
||||
gfxFontEntry* fe = nullptr;
|
||||
if (!pfl->IsFontFamilyWhitelistActive()) {
|
||||
fe = gfxPlatform::GetPlatform()->LookupLocalFont(
|
||||
mFontSet->GetPresContext(), currSrc.mLocalName, Weight(), Stretch(),
|
||||
fontSet->GetPresContext(), currSrc.mLocalName, Weight(), Stretch(),
|
||||
SlantStyle());
|
||||
// Note that we've attempted a local lookup, even if it failed,
|
||||
// as this means we are dependent on any updates to the font list.
|
||||
@ -440,8 +448,8 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) {
|
||||
}
|
||||
if (fe) {
|
||||
LOG(("userfonts (%p) [src %d] loaded local: (%s) for (%s) gen: %8.8x\n",
|
||||
mFontSet, mCurrentSrcIndex, currSrc.mLocalName.get(),
|
||||
mFamilyName.get(), uint32_t(mFontSet->mGeneration)));
|
||||
fontSet.get(), mCurrentSrcIndex, currSrc.mLocalName.get(),
|
||||
mFamilyName.get(), uint32_t(fontSet->mGeneration)));
|
||||
fe->mFeatureSettings.AppendElements(mFeatureSettings);
|
||||
fe->mVariationSettings.AppendElements(mVariationSettings);
|
||||
fe->mLanguageOverride = mLanguageOverride;
|
||||
@ -462,8 +470,9 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) {
|
||||
currSrc.mSourceType + 1);
|
||||
return;
|
||||
} else {
|
||||
LOG(("userfonts (%p) [src %d] failed local: (%s) for (%s)\n", mFontSet,
|
||||
mCurrentSrcIndex, currSrc.mLocalName.get(), mFamilyName.get()));
|
||||
LOG(("userfonts (%p) [src %d] failed local: (%s) for (%s)\n",
|
||||
fontSet.get(), mCurrentSrcIndex, currSrc.mLocalName.get(),
|
||||
mFamilyName.get()));
|
||||
}
|
||||
}
|
||||
|
||||
@ -494,7 +503,7 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) {
|
||||
LOG(
|
||||
("userfonts (%p) [src %d] "
|
||||
"loaded uri from cache: (%s) for (%s)\n",
|
||||
mFontSet, mCurrentSrcIndex,
|
||||
fontSet.get(), mCurrentSrcIndex,
|
||||
currSrc.mURI->GetSpecOrDefault().get(), mFamilyName.get()));
|
||||
}
|
||||
return;
|
||||
@ -510,7 +519,7 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) {
|
||||
|
||||
// record the principal we should use for the load for use when
|
||||
// creating a channel and when caching the loaded entry.
|
||||
mPrincipal = currSrc.LoadPrincipal(*mFontSet);
|
||||
mPrincipal = currSrc.LoadPrincipal(*fontSet);
|
||||
|
||||
bool loadDoesntSpin = !aForceAsync && currSrc.mURI->SyncLoadIsOK();
|
||||
|
||||
@ -520,7 +529,7 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) {
|
||||
|
||||
// sync load font immediately
|
||||
nsresult rv =
|
||||
mFontSet->SyncLoadFontData(this, &currSrc, buffer, bufferLength);
|
||||
fontSet->SyncLoadFontData(this, &currSrc, buffer, bufferLength);
|
||||
|
||||
if (NS_SUCCEEDED(rv) &&
|
||||
LoadPlatformFontSync(mCurrentSrcIndex, buffer, bufferLength)) {
|
||||
@ -529,26 +538,26 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) {
|
||||
currSrc.mSourceType + 1);
|
||||
return;
|
||||
} else {
|
||||
mFontSet->LogMessage(this, mCurrentSrcIndex, "font load failed",
|
||||
nsIScriptError::errorFlag, rv);
|
||||
fontSet->LogMessage(this, mCurrentSrcIndex, "font load failed",
|
||||
nsIScriptError::errorFlag, rv);
|
||||
}
|
||||
|
||||
} else {
|
||||
// otherwise load font async
|
||||
nsresult rv = mFontSet->StartLoad(this, mCurrentSrcIndex);
|
||||
nsresult rv = fontSet->StartLoad(this, mCurrentSrcIndex);
|
||||
bool loadOK = NS_SUCCEEDED(rv);
|
||||
|
||||
if (loadOK) {
|
||||
if (LOG_ENABLED()) {
|
||||
LOG(("userfonts (%p) [src %d] loading uri: (%s) for (%s)\n",
|
||||
mFontSet, mCurrentSrcIndex,
|
||||
fontSet.get(), mCurrentSrcIndex,
|
||||
currSrc.mURI->GetSpecOrDefault().get(), mFamilyName.get()));
|
||||
}
|
||||
return;
|
||||
} else {
|
||||
mFontSet->LogMessage(this, mCurrentSrcIndex,
|
||||
"failed to start download",
|
||||
nsIScriptError::errorFlag, rv);
|
||||
fontSet->LogMessage(this, mCurrentSrcIndex,
|
||||
"failed to start download",
|
||||
nsIScriptError::errorFlag, rv);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -577,8 +586,8 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) {
|
||||
currSrc.mSourceType + 1);
|
||||
return;
|
||||
} else {
|
||||
mFontSet->LogMessage(this, mCurrentSrcIndex, "font load failed",
|
||||
nsIScriptError::errorFlag);
|
||||
fontSet->LogMessage(this, mCurrentSrcIndex, "font load failed",
|
||||
nsIScriptError::errorFlag);
|
||||
}
|
||||
}
|
||||
|
||||
@ -586,12 +595,12 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) {
|
||||
}
|
||||
|
||||
if (mUnsupportedFormat) {
|
||||
mFontSet->LogMessage(this, mCurrentSrcIndex, "no supported format found",
|
||||
nsIScriptError::warningFlag);
|
||||
fontSet->LogMessage(this, mCurrentSrcIndex, "no supported format found",
|
||||
nsIScriptError::warningFlag);
|
||||
}
|
||||
|
||||
// all src's failed; mark this entry as unusable (so fallback will occur)
|
||||
LOG(("userfonts (%p) failed all src for (%s)\n", mFontSet,
|
||||
LOG(("userfonts (%p) failed all src for (%s)\n", fontSet.get(),
|
||||
mFamilyName.get()));
|
||||
mFontDataLoadingState = LOADING_FAILED;
|
||||
SetLoadState(STATUS_FAILED);
|
||||
@ -658,22 +667,27 @@ bool gfxUserFontEntry::LoadPlatformFont(uint32_t aSrcIndex,
|
||||
uint32_t aSanitizedLength,
|
||||
nsTArray<OTSMessage>&& aMessages) {
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
RefPtr<gfxUserFontSet> fontSet = GetUserFontSet();
|
||||
if (NS_WARN_IF(!fontSet)) {
|
||||
free((void*)aOriginalFontData);
|
||||
return false;
|
||||
}
|
||||
|
||||
for (const auto& msg : aMessages) {
|
||||
mFontSet->LogMessage(this, aSrcIndex, msg.mMessage.get(),
|
||||
msg.mLevel > 0 ? nsIScriptError::warningFlag
|
||||
: nsIScriptError::errorFlag);
|
||||
fontSet->LogMessage(this, aSrcIndex, msg.mMessage.get(),
|
||||
msg.mLevel > 0 ? nsIScriptError::warningFlag
|
||||
: nsIScriptError::errorFlag);
|
||||
}
|
||||
|
||||
if (!aSanitizedFontData) {
|
||||
mFontSet->LogMessage(this, aSrcIndex, "rejected by sanitizer");
|
||||
fontSet->LogMessage(this, aSrcIndex, "rejected by sanitizer");
|
||||
} else {
|
||||
// Check whether aSanitizedFontData is a known OpenType format; it might be
|
||||
// a TrueType Collection, which OTS would accept but we don't yet
|
||||
// know how to handle. If so, discard.
|
||||
if (gfxFontUtils::DetermineFontDataType(
|
||||
aSanitizedFontData, aSanitizedLength) != GFX_USERFONT_OPENTYPE) {
|
||||
mFontSet->LogMessage(this, aSrcIndex, "not a supported OpenType format");
|
||||
fontSet->LogMessage(this, aSrcIndex, "not a supported OpenType format");
|
||||
free((void*)aSanitizedFontData);
|
||||
aSanitizedFontData = nullptr;
|
||||
}
|
||||
@ -721,7 +735,7 @@ bool gfxUserFontEntry::LoadPlatformFont(uint32_t aSrcIndex,
|
||||
mName, Weight(), Stretch(), SlantStyle(), aSanitizedFontData,
|
||||
aSanitizedLength);
|
||||
if (!fe) {
|
||||
mFontSet->LogMessage(this, aSrcIndex, "not usable by platform");
|
||||
fontSet->LogMessage(this, aSrcIndex, "not usable by platform");
|
||||
}
|
||||
}
|
||||
|
||||
@ -755,15 +769,15 @@ bool gfxUserFontEntry::LoadPlatformFont(uint32_t aSrcIndex,
|
||||
fe->mDescentOverride = mDescentOverride;
|
||||
fe->mLineGapOverride = mLineGapOverride;
|
||||
fe->mSizeAdjust = mSizeAdjust;
|
||||
StoreUserFontData(fe, aSrcIndex, mFontSet->GetPrivateBrowsing(),
|
||||
StoreUserFontData(fe, aSrcIndex, fontSet->GetPrivateBrowsing(),
|
||||
originalFullName, &metadata, metaOrigLen, compression);
|
||||
if (LOG_ENABLED()) {
|
||||
LOG((
|
||||
"userfonts (%p) [src %d] loaded uri: (%s) for (%s) "
|
||||
"(%p) gen: %8.8x compress: %d%%\n",
|
||||
mFontSet, aSrcIndex,
|
||||
fontSet.get(), aSrcIndex,
|
||||
mSrcList[aSrcIndex].mURI->GetSpecOrDefault().get(), mFamilyName.get(),
|
||||
this, uint32_t(mFontSet->mGeneration), fontCompressionRatio));
|
||||
this, uint32_t(fontSet->mGeneration), fontCompressionRatio));
|
||||
}
|
||||
mPlatformFontEntry = fe;
|
||||
SetLoadState(STATUS_LOADED);
|
||||
@ -773,7 +787,7 @@ bool gfxUserFontEntry::LoadPlatformFont(uint32_t aSrcIndex,
|
||||
LOG(
|
||||
("userfonts (%p) [src %d] failed uri: (%s) for (%s)"
|
||||
" error making platform font\n",
|
||||
mFontSet, aSrcIndex,
|
||||
fontSet.get(), aSrcIndex,
|
||||
mSrcList[aSrcIndex].mURI->GetSpecOrDefault().get(),
|
||||
mFamilyName.get()));
|
||||
}
|
||||
@ -830,14 +844,17 @@ void gfxUserFontEntry::FontDataDownloadComplete(
|
||||
return;
|
||||
}
|
||||
|
||||
// download failed or font-display timeout passed
|
||||
if (mFontDataLoadingState == LOADING_TIMED_OUT) {
|
||||
mFontSet->LogMessage(this, aSrcIndex,
|
||||
"font-display timeout, webfont not used",
|
||||
nsIScriptError::infoFlag, aDownloadStatus);
|
||||
} else {
|
||||
mFontSet->LogMessage(this, aSrcIndex, "download failed",
|
||||
nsIScriptError::errorFlag, aDownloadStatus);
|
||||
RefPtr<gfxUserFontSet> fontSet = GetUserFontSet();
|
||||
if (fontSet) {
|
||||
// download failed or font-display timeout passed
|
||||
if (mFontDataLoadingState == LOADING_TIMED_OUT) {
|
||||
fontSet->LogMessage(this, aSrcIndex,
|
||||
"font-display timeout, webfont not used",
|
||||
nsIScriptError::infoFlag, aDownloadStatus);
|
||||
} else {
|
||||
fontSet->LogMessage(this, aSrcIndex, "download failed",
|
||||
nsIScriptError::errorFlag, aDownloadStatus);
|
||||
}
|
||||
}
|
||||
|
||||
if (aFontData) {
|
||||
@ -860,8 +877,12 @@ void gfxUserFontEntry::LoadPlatformFontAsync(
|
||||
// We hold a strong reference to the gfxUserFontSet during this work, since
|
||||
// the document might be closed while we are OMT, and release it at the end
|
||||
// of ContinuePlatformFontLoadOnMainThread.
|
||||
//
|
||||
// If the set has already been freed, then the loading will fail when we
|
||||
// resume on the main thread.
|
||||
|
||||
mFontSet->AddRef();
|
||||
MOZ_ASSERT(!mLoadingFontSet);
|
||||
mLoadingFontSet = GetUserFontSet();
|
||||
|
||||
nsCOMPtr<nsIRunnable> event =
|
||||
NewRunnableMethod<uint32_t, const uint8_t*, uint32_t,
|
||||
@ -893,7 +914,10 @@ void gfxUserFontEntry::ContinuePlatformFontLoadOnMainThread(
|
||||
FontLoadFailed(aCallback);
|
||||
}
|
||||
|
||||
mFontSet->Release(); // for the AddRef in LoadPlatformFontAsync
|
||||
// Set in LoadPlatformFontAsync. If it is null, then the font set should have
|
||||
// already been freed and we would not succeed in loading the font.
|
||||
MOZ_ASSERT_IF(loaded, mLoadingFontSet);
|
||||
mLoadingFontSet = nullptr;
|
||||
}
|
||||
|
||||
void gfxUserFontEntry::FontLoadFailed(nsIFontLoadCompleteCallback* aCallback) {
|
||||
@ -919,7 +943,10 @@ void gfxUserFontEntry::FontLoadFailed(nsIFontLoadCompleteCallback* aCallback) {
|
||||
void gfxUserFontEntry::GetUserFontSets(
|
||||
nsTArray<RefPtr<gfxUserFontSet>>& aResult) {
|
||||
aResult.Clear();
|
||||
aResult.AppendElement(mFontSet);
|
||||
RefPtr<gfxUserFontSet> fontSet = GetUserFontSet();
|
||||
if (fontSet) {
|
||||
aResult.AppendElement(std::move(fontSet));
|
||||
}
|
||||
}
|
||||
|
||||
gfxUserFontSet::gfxUserFontSet()
|
||||
@ -1260,19 +1287,23 @@ void gfxUserFontSet::UserFontCache::ForgetFont(gfxFontEntry* aFontEntry) {
|
||||
|
||||
gfxFontEntry* gfxUserFontSet::UserFontCache::GetFont(
|
||||
const gfxFontFaceSrc& aSrc, const gfxUserFontEntry& aUserFontEntry) {
|
||||
if (!sUserFonts || aUserFontEntry.mFontSet->BypassCache() ||
|
||||
if (!sUserFonts ||
|
||||
Preferences::GetBool("gfx.downloadable_fonts.disable_cache")) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
RefPtr<gfxUserFontSet> srcFontSet = aUserFontEntry.GetUserFontSet();
|
||||
if (NS_WARN_IF(!srcFontSet) || srcFontSet->BypassCache()) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Ignore principal when looking up a data: URI.
|
||||
RefPtr<gfxFontSrcPrincipal> principal =
|
||||
IgnorePrincipal(aSrc.mURI) ? nullptr
|
||||
: aSrc.LoadPrincipal(*aUserFontEntry.mFontSet);
|
||||
IgnorePrincipal(aSrc.mURI) ? nullptr : aSrc.LoadPrincipal(*srcFontSet);
|
||||
|
||||
Entry* entry = sUserFonts->GetEntry(
|
||||
Key(aSrc.mURI, principal, const_cast<gfxUserFontEntry*>(&aUserFontEntry),
|
||||
aUserFontEntry.mFontSet->GetPrivateBrowsing()));
|
||||
srcFontSet->GetPrivateBrowsing()));
|
||||
if (!entry) {
|
||||
return nullptr;
|
||||
}
|
||||
@ -1280,7 +1311,7 @@ gfxFontEntry* gfxUserFontSet::UserFontCache::GetFont(
|
||||
// We have to perform another content policy check here to prevent
|
||||
// cache poisoning. E.g. a.com loads a font into the cache but
|
||||
// b.com has a CSP not allowing any fonts to be loaded.
|
||||
if (!aUserFontEntry.mFontSet->IsFontLoadAllowed(aSrc)) {
|
||||
if (!srcFontSet->IsFontLoadAllowed(aSrc)) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
|
@ -56,7 +56,7 @@ enum class StyleFontDisplay : uint8_t;
|
||||
} // namespace mozilla
|
||||
class nsFontFaceLoader;
|
||||
|
||||
//#define DEBUG_USERFONT_CACHE
|
||||
// #define DEBUG_USERFONT_CACHE
|
||||
|
||||
class gfxFontFaceBufferSource {
|
||||
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(gfxFontFaceBufferSource)
|
||||
@ -561,7 +561,6 @@ class gfxUserFontEntry : public gfxFontEntry {
|
||||
};
|
||||
|
||||
gfxUserFontEntry(
|
||||
gfxUserFontSet* aFontSet,
|
||||
const nsTArray<gfxFontFaceSrc>& aFontFaceSrcList, WeightRange aWeight,
|
||||
StretchRange aStretch, SlantStyleRange aStyle,
|
||||
const nsTArray<gfxFontFeature>& aFeatureSettings,
|
||||
@ -603,6 +602,8 @@ class gfxUserFontEntry : public gfxFontEntry {
|
||||
UserFontLoadState LoadState() const { return mUserFontLoadState; }
|
||||
|
||||
void LoadCanceled() {
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
|
||||
mUserFontLoadState = STATUS_NOT_LOADED;
|
||||
mFontDataLoadingState = NOT_LOADING;
|
||||
mLoader = nullptr;
|
||||
@ -647,8 +648,16 @@ class gfxUserFontEntry : public gfxFontEntry {
|
||||
|
||||
// methods to expose some information to FontFaceSet::UserFontSet
|
||||
// since we can't make that class a friend
|
||||
void SetLoader(nsFontFaceLoader* aLoader) { mLoader = aLoader; }
|
||||
nsFontFaceLoader* GetLoader() const { return mLoader; }
|
||||
void SetLoader(nsFontFaceLoader* aLoader) {
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
mLoader = aLoader;
|
||||
}
|
||||
|
||||
nsFontFaceLoader* GetLoader() const {
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
return mLoader;
|
||||
}
|
||||
|
||||
gfxFontSrcPrincipal* GetPrincipal() const { return mPrincipal; }
|
||||
void GetFamilyNameAndURIForLogging(uint32_t aSrcIndex,
|
||||
nsACString& aFamilyName, nsACString& aURI);
|
||||
@ -658,9 +667,7 @@ class gfxUserFontEntry : public gfxFontEntry {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
gfxUserFontSet* GetUserFontSet() const { return mFontSet; }
|
||||
#endif
|
||||
virtual already_AddRefed<gfxUserFontSet> GetUserFontSet() const = 0;
|
||||
|
||||
const nsTArray<gfxFontFaceSrc>& SourceList() const { return mSrcList; }
|
||||
|
||||
@ -752,8 +759,8 @@ class gfxUserFontEntry : public gfxFontEntry {
|
||||
uint32_t aMetaOrigLen, uint8_t aCompression);
|
||||
|
||||
// Clears and then adds to aResult all of the user font sets that this user
|
||||
// font entry has been added to. This will at least include mFontSet, the
|
||||
// owner of this user font entry.
|
||||
// font entry has been added to. This will at least include the owner of this
|
||||
// user font entry.
|
||||
virtual void GetUserFontSets(nsTArray<RefPtr<gfxUserFontSet>>& aResult);
|
||||
|
||||
// Calls IncrementGeneration() on all user font sets that contain this
|
||||
@ -789,8 +796,7 @@ class gfxUserFontEntry : public gfxFontEntry {
|
||||
// Cancel() methods of nsFontFaceLoader this reference is nulled out.
|
||||
nsFontFaceLoader* MOZ_NON_OWNING_REF
|
||||
mLoader; // current loader for this entry, if any
|
||||
gfxUserFontSet* MOZ_NON_OWNING_REF
|
||||
mFontSet; // font-set which owns this userfont entry
|
||||
RefPtr<gfxUserFontSet> mLoadingFontSet;
|
||||
RefPtr<gfxFontSrcPrincipal> mPrincipal;
|
||||
};
|
||||
|
||||
|
@ -670,6 +670,23 @@ void ModuleNamespaceObject::ProxyHandler::finalize(JS::GCContext* gcx,
|
||||
class js::CyclicModuleFields {
|
||||
public:
|
||||
ModuleStatus status = ModuleStatus::Unlinked;
|
||||
|
||||
bool hasTopLevelAwait : 1;
|
||||
|
||||
private:
|
||||
// Flag bits that determine whether other fields are present.
|
||||
bool hasDfsIndex : 1;
|
||||
bool hasDfsAncestorIndex : 1;
|
||||
bool isAsyncEvaluating : 1;
|
||||
bool hasPendingAsyncDependencies : 1;
|
||||
|
||||
// Fields whose presence is conditional on the flag bits above.
|
||||
uint32_t dfsIndex = 0;
|
||||
uint32_t dfsAncestorIndex = 0;
|
||||
uint32_t asyncEvaluatingPostOrder = 0;
|
||||
uint32_t pendingAsyncDependencies = 0;
|
||||
|
||||
public:
|
||||
HeapPtr<Value> evaluationError;
|
||||
HeapPtr<JSObject*> metaObject;
|
||||
HeapPtr<ScriptSourceObject*> scriptSourceObject;
|
||||
@ -680,18 +697,37 @@ class js::CyclicModuleFields {
|
||||
HeapPtr<ArrayObject*> starExportEntries;
|
||||
IndirectBindingMap importBindings;
|
||||
UniquePtr<FunctionDeclarationVector> functionDeclarations;
|
||||
Maybe<uint32_t> dfsIndex;
|
||||
Maybe<uint32_t> dfsAncestorIndex;
|
||||
bool hasTopLevelAwait = false;
|
||||
Maybe<uint32_t> asyncEvaluatingPostOrder;
|
||||
HeapPtr<PromiseObject*> topLevelCapability;
|
||||
HeapPtr<ListObject*> asyncParentModules;
|
||||
Maybe<uint32_t> pendingAsyncDependencies;
|
||||
HeapPtr<ModuleObject*> cycleRoot;
|
||||
|
||||
public:
|
||||
CyclicModuleFields();
|
||||
|
||||
void trace(JSTracer* trc);
|
||||
|
||||
void setDfsIndex(uint32_t index);
|
||||
Maybe<uint32_t> maybeDfsIndex() const;
|
||||
void setDfsAncestorIndex(uint32_t index);
|
||||
Maybe<uint32_t> maybeDfsAncestorIndex() const;
|
||||
void clearDfsIndexes();
|
||||
|
||||
void setAsyncEvaluating(uint32_t postOrder);
|
||||
bool getIsAsyncEvaluating() const;
|
||||
Maybe<uint32_t> maybeAsyncEvaluatingPostOrder() const;
|
||||
void clearAsyncEvaluatingPostOrder();
|
||||
|
||||
void setPendingAsyncDependencies(uint32_t newValue);
|
||||
Maybe<uint32_t> maybePendingAsyncDependencies() const;
|
||||
};
|
||||
|
||||
CyclicModuleFields::CyclicModuleFields()
|
||||
: hasTopLevelAwait(false),
|
||||
hasDfsIndex(false),
|
||||
hasDfsAncestorIndex(false),
|
||||
isAsyncEvaluating(false),
|
||||
hasPendingAsyncDependencies(false) {}
|
||||
|
||||
void CyclicModuleFields::trace(JSTracer* trc) {
|
||||
TraceEdge(trc, &evaluationError, "CyclicModuleFields::evaluationError");
|
||||
TraceNullableEdge(trc, &metaObject, "CyclicModuleFields::metaObject");
|
||||
@ -706,12 +742,69 @@ void CyclicModuleFields::trace(JSTracer* trc) {
|
||||
"CyclicModuleFields::indirectExportEntries");
|
||||
TraceNullableEdge(trc, &starExportEntries,
|
||||
"CyclicModuleFields::starExportEntries");
|
||||
importBindings.trace(trc);
|
||||
TraceNullableEdge(trc, &topLevelCapability,
|
||||
"CyclicModuleFields::topLevelCapability");
|
||||
TraceNullableEdge(trc, &asyncParentModules,
|
||||
"CyclicModuleFields::asyncParentModules");
|
||||
TraceNullableEdge(trc, &cycleRoot, "CyclicModuleFields::cycleRoot");
|
||||
importBindings.trace(trc);
|
||||
}
|
||||
|
||||
void CyclicModuleFields::setDfsIndex(uint32_t index) {
|
||||
dfsIndex = index;
|
||||
hasDfsIndex = true;
|
||||
}
|
||||
|
||||
Maybe<uint32_t> CyclicModuleFields::maybeDfsIndex() const {
|
||||
return hasDfsIndex ? Some(dfsIndex) : Nothing();
|
||||
}
|
||||
|
||||
void CyclicModuleFields::setDfsAncestorIndex(uint32_t index) {
|
||||
dfsAncestorIndex = index;
|
||||
hasDfsAncestorIndex = true;
|
||||
}
|
||||
|
||||
Maybe<uint32_t> CyclicModuleFields::maybeDfsAncestorIndex() const {
|
||||
return hasDfsAncestorIndex ? Some(dfsAncestorIndex) : Nothing();
|
||||
}
|
||||
|
||||
void CyclicModuleFields::clearDfsIndexes() {
|
||||
dfsIndex = 0;
|
||||
hasDfsIndex = false;
|
||||
dfsAncestorIndex = 0;
|
||||
hasDfsAncestorIndex = false;
|
||||
}
|
||||
|
||||
void CyclicModuleFields::setAsyncEvaluating(uint32_t postOrder) {
|
||||
isAsyncEvaluating = true;
|
||||
asyncEvaluatingPostOrder = postOrder;
|
||||
}
|
||||
|
||||
bool CyclicModuleFields::getIsAsyncEvaluating() const {
|
||||
return isAsyncEvaluating;
|
||||
}
|
||||
|
||||
Maybe<uint32_t> CyclicModuleFields::maybeAsyncEvaluatingPostOrder() const {
|
||||
if (!isAsyncEvaluating ||
|
||||
asyncEvaluatingPostOrder == ASYNC_EVALUATING_POST_ORDER_CLEARED) {
|
||||
return Nothing();
|
||||
}
|
||||
|
||||
return Some(asyncEvaluatingPostOrder);
|
||||
}
|
||||
|
||||
void CyclicModuleFields::clearAsyncEvaluatingPostOrder() {
|
||||
asyncEvaluatingPostOrder = ASYNC_EVALUATING_POST_ORDER_CLEARED;
|
||||
}
|
||||
|
||||
void CyclicModuleFields::setPendingAsyncDependencies(uint32_t newValue) {
|
||||
pendingAsyncDependencies = newValue;
|
||||
hasPendingAsyncDependencies = true;
|
||||
}
|
||||
|
||||
Maybe<uint32_t> CyclicModuleFields::maybePendingAsyncDependencies() const {
|
||||
return hasPendingAsyncDependencies ? Some(pendingAsyncDependencies)
|
||||
: Nothing();
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////
|
||||
@ -851,7 +944,7 @@ void ModuleObject::initAsyncSlots(JSContext* cx, bool hasTopLevelAwait,
|
||||
|
||||
static uint32_t NextPostOrder(JSRuntime* rt) {
|
||||
uint32_t ordinal = rt->moduleAsyncEvaluatingPostOrder;
|
||||
MOZ_ASSERT(ordinal != ASYNC_EVALUATING_POST_ORDER_TRUE);
|
||||
MOZ_ASSERT(ordinal != ASYNC_EVALUATING_POST_ORDER_CLEARED);
|
||||
MOZ_ASSERT(ordinal < MAX_UINT32);
|
||||
rt->moduleAsyncEvaluatingPostOrder++;
|
||||
return ordinal;
|
||||
@ -870,8 +963,9 @@ static void MaybeResetPostOrderCounter(JSRuntime* rt,
|
||||
}
|
||||
|
||||
void ModuleObject::setAsyncEvaluating() {
|
||||
cyclicModuleFields()->asyncEvaluatingPostOrder =
|
||||
Some(NextPostOrder(runtimeFromMainThread()));
|
||||
MOZ_ASSERT(!isAsyncEvaluating());
|
||||
uint32_t postOrder = NextPostOrder(runtimeFromMainThread());
|
||||
cyclicModuleFields()->setAsyncEvaluating(postOrder);
|
||||
}
|
||||
|
||||
void ModuleObject::initScriptSlots(HandleScript script) {
|
||||
@ -1017,21 +1111,21 @@ bool ModuleObject::hasTopLevelAwait() const {
|
||||
}
|
||||
|
||||
bool ModuleObject::isAsyncEvaluating() const {
|
||||
return cyclicModuleFields()->asyncEvaluatingPostOrder.isSome();
|
||||
return cyclicModuleFields()->getIsAsyncEvaluating();
|
||||
}
|
||||
|
||||
Maybe<uint32_t> ModuleObject::maybeDfsIndex() const {
|
||||
return cyclicModuleFields()->dfsIndex;
|
||||
return cyclicModuleFields()->maybeDfsIndex();
|
||||
}
|
||||
|
||||
uint32_t ModuleObject::dfsIndex() const { return maybeDfsIndex().value(); }
|
||||
|
||||
void ModuleObject::setDfsIndex(uint32_t index) {
|
||||
cyclicModuleFields()->dfsIndex = Some(index);
|
||||
cyclicModuleFields()->setDfsIndex(index);
|
||||
}
|
||||
|
||||
Maybe<uint32_t> ModuleObject::maybeDfsAncestorIndex() const {
|
||||
return cyclicModuleFields()->dfsAncestorIndex;
|
||||
return cyclicModuleFields()->maybeDfsAncestorIndex();
|
||||
}
|
||||
|
||||
uint32_t ModuleObject::dfsAncestorIndex() const {
|
||||
@ -1039,12 +1133,11 @@ uint32_t ModuleObject::dfsAncestorIndex() const {
|
||||
}
|
||||
|
||||
void ModuleObject::setDfsAncestorIndex(uint32_t index) {
|
||||
cyclicModuleFields()->dfsAncestorIndex = Some(index);
|
||||
cyclicModuleFields()->setDfsAncestorIndex(index);
|
||||
}
|
||||
|
||||
void ModuleObject::clearDfsIndexes() {
|
||||
cyclicModuleFields()->dfsIndex = Nothing();
|
||||
cyclicModuleFields()->dfsAncestorIndex = Nothing();
|
||||
cyclicModuleFields()->clearDfsIndexes();
|
||||
}
|
||||
|
||||
PromiseObject* ModuleObject::maybeTopLevelCapability() const {
|
||||
@ -1088,25 +1181,19 @@ bool ModuleObject::appendAsyncParentModule(JSContext* cx,
|
||||
}
|
||||
|
||||
Maybe<uint32_t> ModuleObject::maybePendingAsyncDependencies() const {
|
||||
return cyclicModuleFields()->pendingAsyncDependencies;
|
||||
return cyclicModuleFields()->maybePendingAsyncDependencies();
|
||||
}
|
||||
|
||||
uint32_t ModuleObject::pendingAsyncDependencies() const {
|
||||
return maybePendingAsyncDependencies().value();
|
||||
}
|
||||
|
||||
bool ModuleObject::hasAsyncEvaluatingPostOrder() const {
|
||||
Maybe<uint32_t> value = cyclicModuleFields()->asyncEvaluatingPostOrder;
|
||||
return value.isSome() && *value != ASYNC_EVALUATING_POST_ORDER_TRUE;
|
||||
}
|
||||
|
||||
Maybe<uint32_t> ModuleObject::maybeAsyncEvaluatingPostOrder() const {
|
||||
return cyclicModuleFields()->asyncEvaluatingPostOrder;
|
||||
return cyclicModuleFields()->maybeAsyncEvaluatingPostOrder();
|
||||
}
|
||||
|
||||
uint32_t ModuleObject::getAsyncEvaluatingPostOrder() const {
|
||||
MOZ_ASSERT(hasAsyncEvaluatingPostOrder());
|
||||
return maybeAsyncEvaluatingPostOrder().value();
|
||||
return cyclicModuleFields()->maybeAsyncEvaluatingPostOrder().value();
|
||||
}
|
||||
|
||||
void ModuleObject::clearAsyncEvaluatingPostOrder() {
|
||||
@ -1115,12 +1202,11 @@ void ModuleObject::clearAsyncEvaluatingPostOrder() {
|
||||
JSRuntime* rt = runtimeFromMainThread();
|
||||
MaybeResetPostOrderCounter(rt, getAsyncEvaluatingPostOrder());
|
||||
|
||||
cyclicModuleFields()->asyncEvaluatingPostOrder =
|
||||
Some(ASYNC_EVALUATING_POST_ORDER_TRUE);
|
||||
cyclicModuleFields()->clearAsyncEvaluatingPostOrder();
|
||||
}
|
||||
|
||||
void ModuleObject::setPendingAsyncDependencies(uint32_t newValue) {
|
||||
cyclicModuleFields()->pendingAsyncDependencies = Some(newValue);
|
||||
cyclicModuleFields()->setPendingAsyncDependencies(newValue);
|
||||
}
|
||||
|
||||
void ModuleObject::setCycleRoot(ModuleObject* cycleRoot) {
|
||||
|
@ -250,7 +250,7 @@ class ModuleNamespaceObject : public ProxyObject {
|
||||
|
||||
// Value types of [[Status]] in a Cyclic Module Record
|
||||
// https://tc39.es/ecma262/#table-cyclic-module-fields
|
||||
enum class ModuleStatus : int32_t {
|
||||
enum class ModuleStatus : int8_t {
|
||||
Unlinked,
|
||||
Linking,
|
||||
Linked,
|
||||
@ -265,30 +265,26 @@ enum class ModuleStatus : int32_t {
|
||||
Evaluated_Error
|
||||
};
|
||||
|
||||
// Special values for ModuleObject's AsyncEvaluatingPostOrderSlot slot, which is
|
||||
// used to implement the AsyncEvaluation field of cyclic module records.
|
||||
// Special values for CyclicModuleFields' asyncEvaluatingPostOrderSlot field,
|
||||
// which is used as part of the implementation of the AsyncEvaluation field of
|
||||
// cyclic module records.
|
||||
//
|
||||
// The spec requires us to distinguish true, false, and 'never previously set to
|
||||
// true', as well as the order in which the field was set to true for async
|
||||
// evaluating modules.
|
||||
// The spec requires us to be able to tell the order in which the field was set
|
||||
// to true for async evaluating modules.
|
||||
//
|
||||
// This is arranged by using an integer to record the order. Undefined is used
|
||||
// to mean false and any integer value true. While a module is async evaluating
|
||||
// the integer value gives the order that the field was set to true. After
|
||||
// evaluation is complete the value is set to ASYNC_EVALUATING_POST_ORDER_TRUE,
|
||||
// which still signifies true but loses the order information.
|
||||
// This is arranged by using an integer to record the order. After evaluation is
|
||||
// complete the value is set to ASYNC_EVALUATING_POST_ORDER_CLEARED.
|
||||
//
|
||||
// See https://tc39.es/ecma262/#sec-cyclic-module-records for field defintion.
|
||||
// See https://tc39.es/ecma262/#sec-async-module-execution-fulfilled for sort
|
||||
// requirement.
|
||||
|
||||
// True value that also indicates that the field was previously true.
|
||||
constexpr uint32_t ASYNC_EVALUATING_POST_ORDER_TRUE = 0;
|
||||
|
||||
// Initial value for the runtime's counter used to generate these values; the
|
||||
// first non-false value.
|
||||
// Initial value for the runtime's counter used to generate these values.
|
||||
constexpr uint32_t ASYNC_EVALUATING_POST_ORDER_INIT = 1;
|
||||
|
||||
// Value that the field is set to after being cleared.
|
||||
constexpr uint32_t ASYNC_EVALUATING_POST_ORDER_CLEARED = 0;
|
||||
|
||||
class ModuleObject : public NativeObject {
|
||||
public:
|
||||
// Module fields including those for AbstractModuleRecords described by:
|
||||
@ -365,7 +361,6 @@ class ModuleObject : public NativeObject {
|
||||
ListObject* asyncParentModules() const;
|
||||
mozilla::Maybe<uint32_t> maybePendingAsyncDependencies() const;
|
||||
uint32_t pendingAsyncDependencies() const;
|
||||
bool hasAsyncEvaluatingPostOrder() const;
|
||||
mozilla::Maybe<uint32_t> maybeAsyncEvaluatingPostOrder() const;
|
||||
uint32_t getAsyncEvaluatingPostOrder() const;
|
||||
void clearAsyncEvaluatingPostOrder();
|
||||
|
@ -34,7 +34,7 @@ const StatusEvaluated = 5;
|
||||
drainJobQueue();
|
||||
assertEq(m.isAsyncEvaluating, true);
|
||||
assertEq(m.status, StatusEvaluated);
|
||||
assertEq(m.asyncEvaluatingPostOrder, 0);
|
||||
assertEq(m.asyncEvaluatingPostOrder, undefined);
|
||||
}
|
||||
|
||||
{
|
||||
@ -50,7 +50,7 @@ const StatusEvaluated = 5;
|
||||
assertEq(m.isAsyncEvaluating, true);
|
||||
assertEq(m.status, StatusEvaluated);
|
||||
assertEq(m.evaluationError, 2);
|
||||
assertEq(m.asyncEvaluatingPostOrder, 0);
|
||||
assertEq(m.asyncEvaluatingPostOrder, undefined);
|
||||
}
|
||||
|
||||
{
|
||||
@ -65,7 +65,7 @@ const StatusEvaluated = 5;
|
||||
assertEq(m.isAsyncEvaluating, true);
|
||||
assertEq(m.status, StatusEvaluated);
|
||||
assertEq(m.evaluationError, 1);
|
||||
assertEq(m.asyncEvaluatingPostOrder, 0);
|
||||
assertEq(m.asyncEvaluatingPostOrder, undefined);
|
||||
}
|
||||
|
||||
{
|
||||
@ -86,7 +86,7 @@ const StatusEvaluated = 5;
|
||||
assertEq(a.status, StatusEvaluated);
|
||||
assertEq(b.isAsyncEvaluating, true);
|
||||
assertEq(b.status, StatusEvaluated);
|
||||
assertEq(b.asyncEvaluatingPostOrder, 0);
|
||||
assertEq(b.asyncEvaluatingPostOrder, undefined);
|
||||
}
|
||||
|
||||
{
|
||||
@ -106,10 +106,10 @@ const StatusEvaluated = 5;
|
||||
drainJobQueue();
|
||||
assertEq(a.isAsyncEvaluating, true);
|
||||
assertEq(a.status, StatusEvaluated);
|
||||
assertEq(a.asyncEvaluatingPostOrder, 0);
|
||||
assertEq(a.asyncEvaluatingPostOrder, undefined);
|
||||
assertEq(b.isAsyncEvaluating, true);
|
||||
assertEq(b.status, StatusEvaluated);
|
||||
assertEq(b.asyncEvaluatingPostOrder, 0);
|
||||
assertEq(b.asyncEvaluatingPostOrder, undefined);
|
||||
}
|
||||
|
||||
{
|
||||
@ -136,13 +136,13 @@ const StatusEvaluated = 5;
|
||||
drainJobQueue();
|
||||
assertEq(a.isAsyncEvaluating, true);
|
||||
assertEq(a.status, StatusEvaluated);
|
||||
assertEq(a.asyncEvaluatingPostOrder, 0);
|
||||
assertEq(a.asyncEvaluatingPostOrder, undefined);
|
||||
assertEq(b.isAsyncEvaluating, true);
|
||||
assertEq(b.status, StatusEvaluated);
|
||||
assertEq(b.asyncEvaluatingPostOrder, 0);
|
||||
assertEq(b.asyncEvaluatingPostOrder, undefined);
|
||||
assertEq(c.isAsyncEvaluating, true);
|
||||
assertEq(c.status, StatusEvaluated);
|
||||
assertEq(c.asyncEvaluatingPostOrder, 0);
|
||||
assertEq(c.asyncEvaluatingPostOrder, undefined);
|
||||
}
|
||||
|
||||
{
|
||||
@ -176,11 +176,11 @@ const StatusEvaluated = 5;
|
||||
assertEq(a.isAsyncEvaluating, true);
|
||||
assertEq(a.status, StatusEvaluated);
|
||||
assertEq(a.evaluationError, 1);
|
||||
assertEq(a.asyncEvaluatingPostOrder, 0);
|
||||
assertEq(a.asyncEvaluatingPostOrder, undefined);
|
||||
assertEq(b.isAsyncEvaluating, true);
|
||||
assertEq(b.status, StatusEvaluated);
|
||||
assertEq(b.evaluationError, 1);
|
||||
assertEq(b.asyncEvaluatingPostOrder, 0);
|
||||
assertEq(b.asyncEvaluatingPostOrder, undefined);
|
||||
}
|
||||
|
||||
{
|
||||
@ -199,9 +199,9 @@ const StatusEvaluated = 5;
|
||||
assertEq(a.isAsyncEvaluating, true);
|
||||
assertEq(a.status, StatusEvaluated);
|
||||
assertEq(a.evaluationError, 2);
|
||||
assertEq(a.asyncEvaluatingPostOrder, 0);
|
||||
assertEq(a.asyncEvaluatingPostOrder, undefined);
|
||||
assertEq(b.isAsyncEvaluating, true);
|
||||
assertEq(b.status, StatusEvaluated);
|
||||
assertEq(b.evaluationError, 2);
|
||||
assertEq(b.asyncEvaluatingPostOrder, 0);
|
||||
assertEq(b.asyncEvaluatingPostOrder, undefined);
|
||||
}
|
||||
|
@ -156,7 +156,7 @@ add_test(function test_categoryRegistration()
|
||||
const XULAPPINFO_CID = Components.ID("{fc937916-656b-4fb3-a395-8c63569e27a8}");
|
||||
|
||||
// Create a fake app entry for our category registration apps filter.
|
||||
let { newAppInfo } = ChromeUtils.import("resource://testing-common/AppInfo.jsm");
|
||||
let { newAppInfo } = ChromeUtils.importESModule("resource://testing-common/AppInfo.sys.mjs");
|
||||
let XULAppInfo = newAppInfo({
|
||||
name: "catRegTest",
|
||||
ID: "{adb42a9a-0d19-4849-bf4d-627614ca19be}",
|
||||
|
@ -3,7 +3,7 @@
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
# Frame class definitions, used to generate FrameIdList.h and FrameTypeList.h
|
||||
|
||||
from FrameClass import Frame, AbstractFrame, LEAF, NOT_LEAF, DYNAMIC_LEAF
|
||||
from FrameClass import DYNAMIC_LEAF, LEAF, NOT_LEAF, AbstractFrame, Frame
|
||||
|
||||
FRAME_CLASSES = [
|
||||
Frame("BRFrame", "Br", LEAF),
|
||||
|
@ -4,7 +4,6 @@
|
||||
|
||||
from FrameClasses import FRAME_CLASSES
|
||||
|
||||
|
||||
HEADER = "// THIS IS AUTOGENERATED BY GenerateFrameLists.py. DO NOT EDIT\n"
|
||||
|
||||
|
||||
|
@ -519,14 +519,12 @@ void FontFaceImpl::SetUserFontEntry(gfxUserFontEntry* aEntry) {
|
||||
}
|
||||
|
||||
if (mUserFontEntry) {
|
||||
MutexAutoLock lock(mUserFontEntry->mMutex);
|
||||
mUserFontEntry->mFontFaces.RemoveElement(this);
|
||||
mUserFontEntry->RemoveFontFace(this);
|
||||
}
|
||||
|
||||
auto* entry = static_cast<Entry*>(aEntry);
|
||||
if (entry) {
|
||||
MutexAutoLock lock(entry->mMutex);
|
||||
entry->mFontFaces.AppendElement(this);
|
||||
entry->AddFontFace(this);
|
||||
}
|
||||
|
||||
mUserFontEntry = entry;
|
||||
@ -535,7 +533,7 @@ void FontFaceImpl::SetUserFontEntry(gfxUserFontEntry* aEntry) {
|
||||
return;
|
||||
}
|
||||
|
||||
MOZ_ASSERT(mUserFontEntry->GetUserFontSet() == mFontFaceSet,
|
||||
MOZ_ASSERT(mUserFontEntry->HasUserFontSet(mFontFaceSet),
|
||||
"user font entry must be associated with the same user font set "
|
||||
"as the FontFace");
|
||||
|
||||
@ -700,6 +698,11 @@ void FontFaceImpl::RemoveFontFaceSet(FontFaceSetImpl* aFontFaceSet) {
|
||||
} else {
|
||||
mOtherFontFaceSets.RemoveElement(aFontFaceSet);
|
||||
}
|
||||
|
||||
// The caller should be holding a strong reference to the FontFaceSetImpl.
|
||||
if (mUserFontEntry) {
|
||||
mUserFontEntry->CheckUserFontSet();
|
||||
}
|
||||
}
|
||||
|
||||
gfxCharacterMap* FontFaceImpl::GetUnicodeRangeAsCharacterMap() {
|
||||
@ -766,6 +769,11 @@ void FontFaceImpl::Entry::GetUserFontSets(
|
||||
MutexAutoLock lock(mMutex);
|
||||
|
||||
aResult.Clear();
|
||||
|
||||
if (mFontSet) {
|
||||
aResult.AppendElement(mFontSet);
|
||||
}
|
||||
|
||||
for (FontFaceImpl* f : mFontFaces) {
|
||||
if (f->mInFontFaceSet) {
|
||||
aResult.AppendElement(f->mFontFaceSet);
|
||||
@ -781,6 +789,40 @@ void FontFaceImpl::Entry::GetUserFontSets(
|
||||
aResult.TruncateLength(it - aResult.begin());
|
||||
}
|
||||
|
||||
/* virtual */ already_AddRefed<gfxUserFontSet>
|
||||
FontFaceImpl::Entry::GetUserFontSet() const {
|
||||
MutexAutoLock lock(mMutex);
|
||||
if (mFontSet) {
|
||||
return do_AddRef(mFontSet);
|
||||
}
|
||||
if (NS_IsMainThread() && mLoadingFontSet) {
|
||||
return do_AddRef(mLoadingFontSet);
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
void FontFaceImpl::Entry::CheckUserFontSetLocked() {
|
||||
// If this is the last font containing a strong reference to the set, we need
|
||||
// to clear the reference as there is no longer anything guaranteeing the set
|
||||
// will be kept alive.
|
||||
if (mFontSet) {
|
||||
auto* set = static_cast<FontFaceSetImpl*>(mFontSet);
|
||||
for (FontFaceImpl* f : mFontFaces) {
|
||||
if (f->mFontFaceSet == set || f->mOtherFontFaceSets.Contains(set)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If possible, promote the most recently added FontFace and its owning
|
||||
// FontFaceSetImpl as the primary set.
|
||||
if (!mFontFaces.IsEmpty()) {
|
||||
mFontSet = mFontFaces.LastElement()->mFontFaceSet;
|
||||
} else {
|
||||
mFontSet = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
void FontFaceImpl::Entry::FindFontFaceOwners(nsTHashSet<FontFace*>& aOwners) {
|
||||
MutexAutoLock lock(mMutex);
|
||||
for (FontFaceImpl* f : mFontFaces) {
|
||||
@ -790,5 +832,17 @@ void FontFaceImpl::Entry::FindFontFaceOwners(nsTHashSet<FontFace*>& aOwners) {
|
||||
}
|
||||
}
|
||||
|
||||
void FontFaceImpl::Entry::AddFontFace(FontFaceImpl* aFontFace) {
|
||||
MutexAutoLock lock(mMutex);
|
||||
mFontFaces.AppendElement(aFontFace);
|
||||
CheckUserFontSetLocked();
|
||||
}
|
||||
|
||||
void FontFaceImpl::Entry::RemoveFontFace(FontFaceImpl* aFontFace) {
|
||||
MutexAutoLock lock(mMutex);
|
||||
mFontFaces.RemoveElement(aFontFace);
|
||||
CheckUserFontSetLocked();
|
||||
}
|
||||
|
||||
} // namespace dom
|
||||
} // namespace mozilla
|
||||
|
@ -56,19 +56,41 @@ class FontFaceImpl final {
|
||||
StyleFontDisplay aFontDisplay, RangeFlags aRangeFlags,
|
||||
float aAscentOverride, float aDescentOverride, float aLineGapOverride,
|
||||
float aSizeAdjust)
|
||||
: gfxUserFontEntry(aFontSet, aFontFaceSrcList, aWeight, aStretch,
|
||||
aStyle, aFeatureSettings, aVariationSettings,
|
||||
: gfxUserFontEntry(aFontFaceSrcList, aWeight, aStretch, aStyle,
|
||||
aFeatureSettings, aVariationSettings,
|
||||
aLanguageOverride, aUnicodeRanges, aFontDisplay,
|
||||
aRangeFlags, aAscentOverride, aDescentOverride,
|
||||
aLineGapOverride, aSizeAdjust),
|
||||
mMutex("FontFaceImpl::Entry::mMutex") {}
|
||||
mMutex("FontFaceImpl::Entry::mMutex"),
|
||||
mFontSet(aFontSet) {}
|
||||
|
||||
void SetLoadState(UserFontLoadState aLoadState) override;
|
||||
void GetUserFontSets(nsTArray<RefPtr<gfxUserFontSet>>& aResult) override;
|
||||
already_AddRefed<gfxUserFontSet> GetUserFontSet() const override;
|
||||
|
||||
void CheckUserFontSet() {
|
||||
MutexAutoLock lock(mMutex);
|
||||
CheckUserFontSetLocked();
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
bool HasUserFontSet(gfxUserFontSet* aFontSet) const {
|
||||
MutexAutoLock lock(mMutex);
|
||||
return mFontSet == aFontSet;
|
||||
}
|
||||
#endif
|
||||
|
||||
void AddFontFace(FontFaceImpl* aOwner);
|
||||
void RemoveFontFace(FontFaceImpl* aOwner);
|
||||
void FindFontFaceOwners(nsTHashSet<FontFace*>& aOwners);
|
||||
|
||||
protected:
|
||||
Mutex mMutex;
|
||||
void CheckUserFontSetLocked() MOZ_REQUIRES(mMutex);
|
||||
|
||||
mutable Mutex mMutex;
|
||||
|
||||
// Font set which owns this entry;
|
||||
gfxUserFontSet* MOZ_NON_OWNING_REF mFontSet;
|
||||
|
||||
// The FontFace objects that use this user font entry. We need to store
|
||||
// an array of these, not just a single pointer, since the user font
|
||||
|
@ -53,7 +53,7 @@ reftest.jar:
|
||||
res/ReftestFissionChild.jsm (ReftestFissionChild.jsm)
|
||||
res/AsyncSpellCheckTestHelper.jsm (../../../editor/AsyncSpellCheckTestHelper.jsm)
|
||||
res/httpd.jsm (../../../netwerk/test/httpserver/httpd.js)
|
||||
res/StructuredLog.jsm (../../../testing/modules/StructuredLog.jsm)
|
||||
res/StructuredLog.sys.mjs (../../../testing/modules/StructuredLog.sys.mjs)
|
||||
res/PerTestCoverageUtils.jsm (../../../tools/code-coverage/PerTestCoverageUtils.jsm)
|
||||
res/input.css (../../../editor/reftests/xul/input.css)
|
||||
res/progress.css (../../../layout/reftests/forms/progress/style.css)
|
||||
|
@ -2,22 +2,16 @@
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals, print_function
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from argparse import Namespace
|
||||
|
||||
from mozbuild.base import (
|
||||
MachCommandConditions as conditions,
|
||||
MozbuildObject,
|
||||
)
|
||||
|
||||
from mach.decorators import (
|
||||
Command,
|
||||
)
|
||||
|
||||
from mach.decorators import Command
|
||||
from mozbuild.base import MachCommandConditions as conditions
|
||||
from mozbuild.base import MozbuildObject
|
||||
|
||||
parser = None
|
||||
|
||||
@ -279,8 +273,8 @@ def _run_reftest(command_context, **kwargs):
|
||||
reftest.log_manager.enable_unstructured()
|
||||
if conditions.is_android(command_context):
|
||||
from mozrunner.devices.android_device import (
|
||||
verify_android_device,
|
||||
InstallIntent,
|
||||
verify_android_device,
|
||||
)
|
||||
|
||||
install = InstallIntent.NO if kwargs.get("no_install") else InstallIntent.YES
|
||||
|
@ -2,16 +2,14 @@
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals, print_function
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
from argparse import Namespace
|
||||
from functools import partial
|
||||
|
||||
from mach.decorators import (
|
||||
Command,
|
||||
)
|
||||
from mach.decorators import Command
|
||||
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
logger = None
|
||||
|
@ -48,8 +48,8 @@ const { HttpServer } = ChromeUtils.import("resource://reftest/httpd.jsm");
|
||||
const { ReadTopManifest, CreateUrls } = ChromeUtils.import(
|
||||
"resource://reftest/manifest.jsm"
|
||||
);
|
||||
const { StructuredLogger } = ChromeUtils.import(
|
||||
"resource://reftest/StructuredLog.jsm"
|
||||
const { StructuredLogger } = ChromeUtils.importESModule(
|
||||
"resource://reftest/StructuredLog.sys.mjs"
|
||||
);
|
||||
const { PerTestCoverageUtils } = ChromeUtils.import(
|
||||
"resource://reftest/PerTestCoverageUtils.jsm"
|
||||
@ -915,7 +915,7 @@ function DoneTests()
|
||||
g.suiteStarted = false
|
||||
logger.suiteEnd({'results': g.testResults});
|
||||
} else {
|
||||
logger._logData('results', {results: g.testResults});
|
||||
logger.logData('results', {results: g.testResults});
|
||||
}
|
||||
logger.info("Slowest test took " + g.slowestTestTime + "ms (" + g.slowestTestURL + ")");
|
||||
logger.info("Total canvas count = " + g.recycledCanvases.length);
|
||||
|
@ -2,11 +2,12 @@
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import unicode_literals, absolute_import, print_function
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
|
||||
import six
|
||||
|
||||
RE_COMMENT = re.compile(r"\s+#")
|
||||
|
@ -4,9 +4,9 @@ import argparse
|
||||
import os
|
||||
import sys
|
||||
from collections import OrderedDict
|
||||
|
||||
import mozinfo
|
||||
import mozlog
|
||||
|
||||
from six.moves.urllib.parse import urlparse
|
||||
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
@ -16,14 +16,12 @@ import time
|
||||
import traceback
|
||||
from contextlib import closing
|
||||
|
||||
from six.moves.urllib_request import urlopen
|
||||
|
||||
from mozdevice import ADBDeviceFactory, RemoteProcessMonitor
|
||||
import mozcrash
|
||||
|
||||
import reftestcommandline
|
||||
from mozdevice import ADBDeviceFactory, RemoteProcessMonitor
|
||||
from output import OutputHandler
|
||||
from runreftest import RefTest, ReftestResolver, build_obj
|
||||
import reftestcommandline
|
||||
from six.moves.urllib_request import urlopen
|
||||
|
||||
# We need to know our current directory so that we can serve our test files from it.
|
||||
SCRIPT_DIRECTORY = os.path.abspath(os.path.realpath(os.path.dirname(__file__)))
|
||||
|
@ -5,8 +5,6 @@
|
||||
"""
|
||||
Runs the reftest test harness.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import json
|
||||
@ -37,9 +35,10 @@ import mozlog
|
||||
import mozprocess
|
||||
import mozprofile
|
||||
import mozrunner
|
||||
from manifestparser import TestManifest, filters as mpf
|
||||
from manifestparser import TestManifest
|
||||
from manifestparser import filters as mpf
|
||||
from mozrunner.utils import get_stack_fixer_function, test_environment
|
||||
from mozscreenshot import printstatus, dump_screen
|
||||
from mozscreenshot import dump_screen, printstatus
|
||||
from six import reraise, string_types
|
||||
from six.moves import range
|
||||
|
||||
@ -57,8 +56,8 @@ except ImportError as e: # noqa
|
||||
|
||||
Marionette = reraise_
|
||||
|
||||
from output import OutputHandler, ReftestFormatter
|
||||
import reftestcommandline
|
||||
from output import OutputHandler, ReftestFormatter
|
||||
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
|
@ -12,14 +12,14 @@ try:
|
||||
except ImportError:
|
||||
# Python3
|
||||
from io import StringIO
|
||||
|
||||
from functools import partial
|
||||
|
||||
import mozunit
|
||||
import pytest
|
||||
from moztest.selftest.output import get_mozharness_status, filter_action
|
||||
|
||||
from mozharness.base.log import INFO, WARNING, ERROR
|
||||
from mozharness.mozilla.automation import TBPL_SUCCESS, TBPL_WARNING, TBPL_FAILURE
|
||||
from mozharness.base.log import ERROR, INFO, WARNING
|
||||
from mozharness.mozilla.automation import TBPL_FAILURE, TBPL_SUCCESS, TBPL_WARNING
|
||||
from moztest.selftest.output import filter_action, get_mozharness_status
|
||||
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
get_mozharness_status = partial(get_mozharness_status, "reftest")
|
||||
|
@ -1,3 +1,38 @@
|
||||
2.1.4
|
||||
=====
|
||||
|
||||
### Significant changes relative to 2.1.3
|
||||
|
||||
1. Fixed a regression introduced in 2.1.3 that caused build failures with
|
||||
Visual Studio 2010.
|
||||
|
||||
2. The `tjDecompressHeader3()` function in the TurboJPEG C API and the
|
||||
`TJDecompressor.setSourceImage()` method in the TurboJPEG Java API now accept
|
||||
"abbreviated table specification" (AKA "tables-only") datastreams, which can be
|
||||
used to prime the decompressor with quantization and Huffman tables that can be
|
||||
used when decompressing subsequent "abbreviated image" datastreams.
|
||||
|
||||
3. libjpeg-turbo now performs run-time detection of AltiVec instructions on
|
||||
OS X/PowerPC systems if AltiVec instructions are not enabled at compile time.
|
||||
This allows both AltiVec-equipped (PowerPC G4 and G5) and non-AltiVec-equipped
|
||||
(PowerPC G3) CPUs to be supported using the same build of libjpeg-turbo.
|
||||
|
||||
4. Fixed an error ("Bogus virtual array access") that occurred when attempting
|
||||
to decompress a progressive JPEG image with a height less than or equal to one
|
||||
iMCU (8 * the vertical sampling factor) using buffered-image mode with
|
||||
interblock smoothing enabled. This was a regression introduced by
|
||||
2.1 beta1[6(b)].
|
||||
|
||||
5. Fixed two issues that prevented partial image decompression from working
|
||||
properly with buffered-image mode:
|
||||
|
||||
- Attempting to call `jpeg_crop_scanline()` after
|
||||
`jpeg_start_decompress()` but before `jpeg_start_output()` resulted in an error
|
||||
("Improper call to JPEG library in state 207".)
|
||||
- Attempting to use `jpeg_skip_scanlines()` resulted in an error ("Bogus
|
||||
virtual array access") under certain circumstances.
|
||||
|
||||
|
||||
2.1.3
|
||||
=====
|
||||
|
||||
|
@ -48,6 +48,10 @@ To upgrade to a new revision of libjpeg-turbo, do the following:
|
||||
|
||||
$ hg addremove
|
||||
|
||||
== November 10, 2022 (libjpeg-turbo v2.1.4 8162eddf041e0be26f5c671bb6528723c55fed9d 2022-08-12) ==
|
||||
|
||||
* Updated to v2.1.4 release.
|
||||
|
||||
== February 28, 2022 (libjpeg-turbo v2.1.3 c5f269eb9665435271c05fbcaf8721fa58e9eafa 2022-02-25) ==
|
||||
|
||||
* Updated to v2.1.3 release.
|
||||
|
@ -159,9 +159,12 @@ jpeg_crop_scanline(j_decompress_ptr cinfo, JDIMENSION *xoffset,
|
||||
JDIMENSION input_xoffset;
|
||||
boolean reinit_upsampler = FALSE;
|
||||
jpeg_component_info *compptr;
|
||||
#ifdef UPSAMPLE_MERGING_SUPPORTED
|
||||
my_master_ptr master = (my_master_ptr)cinfo->master;
|
||||
#endif
|
||||
|
||||
if (cinfo->global_state != DSTATE_SCANNING || cinfo->output_scanline != 0)
|
||||
if ((cinfo->global_state != DSTATE_SCANNING &&
|
||||
cinfo->global_state != DSTATE_BUFIMAGE) || cinfo->output_scanline != 0)
|
||||
ERREXIT1(cinfo, JERR_BAD_STATE, cinfo->global_state);
|
||||
|
||||
if (!xoffset || !width)
|
||||
@ -209,11 +212,13 @@ jpeg_crop_scanline(j_decompress_ptr cinfo, JDIMENSION *xoffset,
|
||||
*/
|
||||
*width = *width + input_xoffset - *xoffset;
|
||||
cinfo->output_width = *width;
|
||||
#ifdef UPSAMPLE_MERGING_SUPPORTED
|
||||
if (master->using_merged_upsample && cinfo->max_v_samp_factor == 2) {
|
||||
my_merged_upsample_ptr upsample = (my_merged_upsample_ptr)cinfo->upsample;
|
||||
upsample->out_row_width =
|
||||
cinfo->output_width * cinfo->out_color_components;
|
||||
}
|
||||
#endif
|
||||
|
||||
/* Set the first and last iMCU columns that we must decompress. These values
|
||||
* will be used in single-scan decompressions.
|
||||
@ -324,7 +329,9 @@ LOCAL(void)
|
||||
read_and_discard_scanlines(j_decompress_ptr cinfo, JDIMENSION num_lines)
|
||||
{
|
||||
JDIMENSION n;
|
||||
#ifdef UPSAMPLE_MERGING_SUPPORTED
|
||||
my_master_ptr master = (my_master_ptr)cinfo->master;
|
||||
#endif
|
||||
JSAMPLE dummy_sample[1] = { 0 };
|
||||
JSAMPROW dummy_row = dummy_sample;
|
||||
JSAMPARRAY scanlines = NULL;
|
||||
@ -348,10 +355,12 @@ read_and_discard_scanlines(j_decompress_ptr cinfo, JDIMENSION num_lines)
|
||||
cinfo->cquantize->color_quantize = noop_quantize;
|
||||
}
|
||||
|
||||
#ifdef UPSAMPLE_MERGING_SUPPORTED
|
||||
if (master->using_merged_upsample && cinfo->max_v_samp_factor == 2) {
|
||||
my_merged_upsample_ptr upsample = (my_merged_upsample_ptr)cinfo->upsample;
|
||||
scanlines = &upsample->spare_row;
|
||||
}
|
||||
#endif
|
||||
|
||||
for (n = 0; n < num_lines; n++)
|
||||
jpeg_read_scanlines(cinfo, scanlines, 1);
|
||||
@ -517,7 +526,7 @@ jpeg_skip_scanlines(j_decompress_ptr cinfo, JDIMENSION num_lines)
|
||||
* all of the entropy decoding occurs in jpeg_start_decompress(), assuming
|
||||
* that the input data source is non-suspending. This makes skipping easy.
|
||||
*/
|
||||
if (cinfo->inputctl->has_multiple_scans) {
|
||||
if (cinfo->inputctl->has_multiple_scans || cinfo->buffered_image) {
|
||||
if (cinfo->upsample->need_context_rows) {
|
||||
cinfo->output_scanline += lines_to_skip;
|
||||
cinfo->output_iMCU_row += lines_to_skip / lines_per_iMCU_row;
|
||||
|
@ -5,7 +5,7 @@
|
||||
* Copyright (C) 1994-1997, Thomas G. Lane.
|
||||
* libjpeg-turbo Modifications:
|
||||
* Copyright 2009 Pierre Ossman <ossman@cendio.se> for Cendio AB
|
||||
* Copyright (C) 2010, 2015-2016, 2019-2020, D. R. Commander.
|
||||
* Copyright (C) 2010, 2015-2016, 2019-2020, 2022, D. R. Commander.
|
||||
* Copyright (C) 2015, 2020, Google, Inc.
|
||||
* For conditions of distribution and use, see the accompanying README.ijg
|
||||
* file.
|
||||
@ -475,7 +475,7 @@ decompress_smooth_data(j_decompress_ptr cinfo, JSAMPIMAGE output_buf)
|
||||
if (!compptr->component_needed)
|
||||
continue;
|
||||
/* Count non-dummy DCT block rows in this iMCU row. */
|
||||
if (cinfo->output_iMCU_row < last_iMCU_row - 1) {
|
||||
if (cinfo->output_iMCU_row + 1 < last_iMCU_row) {
|
||||
block_rows = compptr->v_samp_factor;
|
||||
access_rows = block_rows * 3; /* this and next two iMCU rows */
|
||||
} else if (cinfo->output_iMCU_row < last_iMCU_row) {
|
||||
@ -560,7 +560,7 @@ decompress_smooth_data(j_decompress_ptr cinfo, JSAMPIMAGE output_buf)
|
||||
next_block_row = buffer_ptr;
|
||||
|
||||
if (block_row < block_rows - 2 ||
|
||||
cinfo->output_iMCU_row < last_iMCU_row - 1)
|
||||
cinfo->output_iMCU_row + 1 < last_iMCU_row)
|
||||
next_next_block_row =
|
||||
buffer[block_row + 2] + cinfo->master->first_MCU_col[ci];
|
||||
else
|
||||
|
@ -189,9 +189,9 @@ format_message(j_common_ptr cinfo, char *buffer)
|
||||
|
||||
/* Format the message into the passed buffer */
|
||||
if (isstring)
|
||||
snprintf(buffer, JMSG_LENGTH_MAX, msgtext, err->msg_parm.s);
|
||||
SNPRINTF(buffer, JMSG_LENGTH_MAX, msgtext, err->msg_parm.s);
|
||||
else
|
||||
snprintf(buffer, JMSG_LENGTH_MAX, msgtext,
|
||||
SNPRINTF(buffer, JMSG_LENGTH_MAX, msgtext,
|
||||
err->msg_parm.i[0], err->msg_parm.i[1],
|
||||
err->msg_parm.i[2], err->msg_parm.i[3],
|
||||
err->msg_parm.i[4], err->msg_parm.i[5],
|
||||
|
@ -45,6 +45,18 @@
|
||||
*/
|
||||
|
||||
|
||||
#ifdef _MSC_VER
|
||||
|
||||
#define SNPRINTF(str, n, format, ...) \
|
||||
_snprintf_s(str, n, _TRUNCATE, format, ##__VA_ARGS__)
|
||||
|
||||
#else
|
||||
|
||||
#define SNPRINTF snprintf
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
#ifndef NO_GETENV
|
||||
|
||||
#ifdef _MSC_VER
|
||||
|
@ -68,10 +68,13 @@ round_up_pow2(size_t a, size_t b)
|
||||
* There isn't any really portable way to determine the worst-case alignment
|
||||
* requirement. This module assumes that the alignment requirement is
|
||||
* multiples of ALIGN_SIZE.
|
||||
* By default, we define ALIGN_SIZE as sizeof(double). This is necessary on
|
||||
* some workstations (where doubles really do need 8-byte alignment) and will
|
||||
* work fine on nearly everything. If your machine has lesser alignment needs,
|
||||
* you can save a few bytes by making ALIGN_SIZE smaller.
|
||||
* By default, we define ALIGN_SIZE as the maximum of sizeof(double) and
|
||||
* sizeof(void *). This is necessary on some workstations (where doubles
|
||||
* really do need 8-byte alignment) and will work fine on nearly everything.
|
||||
* We use the maximum of sizeof(double) and sizeof(void *) since sizeof(double)
|
||||
* may be insufficient, for example, on CHERI-enabled platforms with 16-byte
|
||||
* pointers and a 16-byte alignment requirement. If your machine has lesser
|
||||
* alignment needs, you can save a few bytes by making ALIGN_SIZE smaller.
|
||||
* The only place I know of where this will NOT work is certain Macintosh
|
||||
* 680x0 compilers that define double as a 10-byte IEEE extended float.
|
||||
* Doing 10-byte alignment is counterproductive because longwords won't be
|
||||
@ -81,7 +84,7 @@ round_up_pow2(size_t a, size_t b)
|
||||
|
||||
#ifndef ALIGN_SIZE /* so can override from jconfig.h */
|
||||
#ifndef WITH_SIMD
|
||||
#define ALIGN_SIZE sizeof(double)
|
||||
#define ALIGN_SIZE MAX(sizeof(void *), sizeof(double))
|
||||
#else
|
||||
#define ALIGN_SIZE 32 /* Most of the SIMD instructions we support require
|
||||
16-byte (128-bit) alignment, but AVX2 requires
|
||||
|
@ -25,8 +25,6 @@
|
||||
#include "../../../jsimddct.h"
|
||||
#include "../../jsimd.h"
|
||||
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include <ctype.h>
|
||||
|
||||
static unsigned int simd_support = ~0;
|
||||
|
@ -25,8 +25,6 @@
|
||||
#include "../../jsimd.h"
|
||||
#include "jconfigint.h"
|
||||
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include <ctype.h>
|
||||
|
||||
#define JSIMD_FASTLD3 1
|
||||
|
@ -23,8 +23,6 @@
|
||||
#include "../../jsimddct.h"
|
||||
#include "../jsimd.h"
|
||||
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include <ctype.h>
|
||||
|
||||
static unsigned int simd_support = ~0;
|
||||
|
@ -24,8 +24,6 @@
|
||||
#include "../../jsimddct.h"
|
||||
#include "../jsimd.h"
|
||||
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include <ctype.h>
|
||||
|
||||
static unsigned int simd_support = ~0;
|
||||
|
@ -27,11 +27,12 @@
|
||||
#include "../../jsimddct.h"
|
||||
#include "../jsimd.h"
|
||||
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include <ctype.h>
|
||||
|
||||
#if defined(__OpenBSD__)
|
||||
#if defined(__APPLE__)
|
||||
#include <sys/types.h>
|
||||
#include <sys/sysctl.h>
|
||||
#elif defined(__OpenBSD__)
|
||||
#include <sys/param.h>
|
||||
#include <sys/sysctl.h>
|
||||
#include <machine/cpu.h>
|
||||
@ -121,6 +122,10 @@ init_simd(void)
|
||||
int bufsize = 1024; /* an initial guess for the line buffer size limit */
|
||||
#elif defined(__amigaos4__)
|
||||
uint32 altivec = 0;
|
||||
#elif defined(__APPLE__)
|
||||
int mib[2] = { CTL_HW, HW_VECTORUNIT };
|
||||
int altivec;
|
||||
size_t len = sizeof(altivec);
|
||||
#elif defined(__OpenBSD__)
|
||||
int mib[2] = { CTL_MACHDEP, CPU_ALTIVEC };
|
||||
int altivec;
|
||||
@ -134,7 +139,7 @@ init_simd(void)
|
||||
|
||||
simd_support = 0;
|
||||
|
||||
#if defined(__ALTIVEC__) || defined(__APPLE__)
|
||||
#if defined(__ALTIVEC__)
|
||||
simd_support |= JSIMD_ALTIVEC;
|
||||
#elif defined(__linux__) || defined(ANDROID) || defined(__ANDROID__)
|
||||
while (!parse_proc_cpuinfo(bufsize)) {
|
||||
@ -146,7 +151,7 @@ init_simd(void)
|
||||
IExec->GetCPUInfoTags(GCIT_VectorUnit, &altivec, TAG_DONE);
|
||||
if (altivec == VECTORTYPE_ALTIVEC)
|
||||
simd_support |= JSIMD_ALTIVEC;
|
||||
#elif defined(__OpenBSD__)
|
||||
#elif defined(__APPLE__) || defined(__OpenBSD__)
|
||||
if (sysctl(mib, 2, &altivec, &len, NULL, 0) == 0 && altivec != 0)
|
||||
simd_support |= JSIMD_ALTIVEC;
|
||||
#elif defined(__FreeBSD__)
|
||||
|
@ -491,15 +491,15 @@ uint32_t nsInputStreamPump::OnStateStart() {
|
||||
}
|
||||
|
||||
{
|
||||
nsCOMPtr<nsIStreamListener> listener = mListener;
|
||||
// We're on the writing thread
|
||||
AssertOnThread();
|
||||
|
||||
// Note: Must exit mutex for call to OnStartRequest to avoid
|
||||
// deadlocks when calls to RetargetDeliveryTo for multiple
|
||||
// nsInputStreamPumps are needed (e.g. nsHttpChannel).
|
||||
RecursiveMutexAutoUnlock unlock(mMutex);
|
||||
// We're on the writing thread
|
||||
MOZ_PUSH_IGNORE_THREAD_SAFETY
|
||||
AssertOnThread();
|
||||
rv = mListener->OnStartRequest(this);
|
||||
MOZ_POP_THREAD_SAFETY
|
||||
rv = listener->OnStartRequest(this);
|
||||
}
|
||||
|
||||
// an error returned from OnStartRequest should cause us to abort; however,
|
||||
@ -562,6 +562,15 @@ uint32_t nsInputStreamPump::OnStateTransfer() {
|
||||
mStreamOffset, avail, odaAvail));
|
||||
|
||||
{
|
||||
// We may be called on non-MainThread even if mOffMainThread is
|
||||
// false, due to RetargetDeliveryTo(), so don't use AssertOnThread()
|
||||
if (mTargetThread) {
|
||||
MOZ_ASSERT(mTargetThread->IsOnCurrentThread());
|
||||
} else {
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
}
|
||||
|
||||
nsCOMPtr<nsIStreamListener> listener = mListener;
|
||||
// Note: Must exit mutex for call to OnStartRequest to avoid
|
||||
// deadlocks when calls to RetargetDeliveryTo for multiple
|
||||
// nsInputStreamPumps are needed (e.g. nsHttpChannel).
|
||||
@ -570,16 +579,9 @@ uint32_t nsInputStreamPump::OnStateTransfer() {
|
||||
// mStreamOffset is only touched in OnStateTransfer, and AsyncRead
|
||||
// shouldn't be called during OnDataAvailable()
|
||||
|
||||
// We may be called on non-MainThread even if mOffMainThread is
|
||||
// false, due to RetargetDeliveryTo(), so don't use AssertOnThread()
|
||||
MOZ_PUSH_IGNORE_THREAD_SAFETY
|
||||
if (mTargetThread) {
|
||||
MOZ_ASSERT(mTargetThread->IsOnCurrentThread());
|
||||
} else {
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
}
|
||||
rv = mListener->OnDataAvailable(this, mAsyncStream, mStreamOffset,
|
||||
odaAvail);
|
||||
rv = listener->OnDataAvailable(this, mAsyncStream, mStreamOffset,
|
||||
odaAvail);
|
||||
MOZ_POP_THREAD_SAFETY
|
||||
}
|
||||
|
||||
@ -678,16 +680,18 @@ uint32_t nsInputStreamPump::OnStateStop() {
|
||||
mAsyncStream = nullptr;
|
||||
mIsPending = false;
|
||||
{
|
||||
// We're on the writing thread.
|
||||
// We believe that mStatus can't be changed on us here.
|
||||
AssertOnThread();
|
||||
|
||||
nsCOMPtr<nsIStreamListener> listener = mListener;
|
||||
nsresult status = mStatus;
|
||||
// Note: Must exit mutex for call to OnStartRequest to avoid
|
||||
// deadlocks when calls to RetargetDeliveryTo for multiple
|
||||
// nsInputStreamPumps are needed (e.g. nsHttpChannel).
|
||||
RecursiveMutexAutoUnlock unlock(mMutex);
|
||||
// We're on the writing thread.
|
||||
// We believe that mStatus can't be changed on us here.
|
||||
MOZ_PUSH_IGNORE_THREAD_SAFETY
|
||||
AssertOnThread();
|
||||
mListener->OnStopRequest(this, mStatus);
|
||||
MOZ_POP_THREAD_SAFETY
|
||||
|
||||
listener->OnStopRequest(this, status);
|
||||
}
|
||||
mTargetThread = nullptr;
|
||||
mListener = nullptr;
|
||||
|
@ -80,14 +80,12 @@ class nsInputStreamPump final : public nsIInputStreamPump,
|
||||
nsresult CreateBufferedStreamIfNeeded() MOZ_REQUIRES(mMutex);
|
||||
|
||||
// This should optimize away in non-DEBUG builds
|
||||
MOZ_ALWAYS_INLINE void AssertOnThread() const {
|
||||
MOZ_PUSH_IGNORE_THREAD_SAFETY
|
||||
MOZ_ALWAYS_INLINE void AssertOnThread() const MOZ_REQUIRES(mMutex) {
|
||||
if (mOffMainThread) {
|
||||
MOZ_ASSERT(mTargetThread->IsOnCurrentThread());
|
||||
} else {
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
}
|
||||
MOZ_POP_THREAD_SAFETY
|
||||
}
|
||||
|
||||
uint32_t mState MOZ_GUARDED_BY(mMutex){STATE_IDLE};
|
||||
|
@ -178,6 +178,9 @@ const COMMON_PREFERENCES = new Map([
|
||||
["dom.max_chrome_script_run_time", 0],
|
||||
["dom.max_script_run_time", 0],
|
||||
|
||||
// Disable location change rate limitation
|
||||
["dom.navigation.locationChangeRateLimit.count", 0],
|
||||
|
||||
// DOM Push
|
||||
["dom.push.connection.enabled", false],
|
||||
|
||||
|
@ -24,12 +24,12 @@ hash: The name of a hash algorithm to use when signing. Optional.
|
||||
Defaults to 'sha256'.
|
||||
"""
|
||||
|
||||
from pyasn1.codec.der import encoder
|
||||
import binascii
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pyasn1.codec.der import encoder
|
||||
|
||||
sys.path.append(
|
||||
os.path.join(os.path.dirname(__file__), "..", "..", "..", "manager", "tools")
|
||||
)
|
||||
|
@ -7,10 +7,11 @@
|
||||
# This exists to paper over differences between gyp's `action` definitions
|
||||
# and moz.build `GENERATED_FILES` semantics.
|
||||
|
||||
import buildconfig
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
import buildconfig
|
||||
|
||||
|
||||
def main(output, *inputs):
|
||||
env = dict(os.environ)
|
||||
|
@ -13,20 +13,17 @@ duplicates.
|
||||
Requires Python 3.
|
||||
"""
|
||||
import argparse
|
||||
import re
|
||||
import requests
|
||||
import sys
|
||||
import io
|
||||
import re
|
||||
import sys
|
||||
|
||||
from pyasn1.codec.der import decoder
|
||||
from pyasn1.codec.der import encoder
|
||||
from pyasn1_modules import pem
|
||||
from pyasn1_modules import rfc5280
|
||||
|
||||
import requests
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.x509.oid import NameOID
|
||||
from pyasn1.codec.der import decoder, encoder
|
||||
from pyasn1_modules import pem, rfc5280
|
||||
|
||||
assert sys.version_info >= (3, 2), "Requires Python 3.2 or later"
|
||||
|
||||
|
@ -15,7 +15,7 @@ https://cs.chromium.org/chromium/src/net/cert/ct_known_logs_static-inc.h
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
from string import Template
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
import datetime
|
||||
@ -23,9 +23,10 @@ import json
|
||||
import os.path
|
||||
import sys
|
||||
import textwrap
|
||||
import urllib2
|
||||
from string import Template
|
||||
|
||||
import six
|
||||
import urllib2
|
||||
|
||||
|
||||
def decodebytes(s):
|
||||
|
@ -4,17 +4,12 @@
|
||||
|
||||
import os
|
||||
|
||||
from mach.decorators import Command, CommandArgument
|
||||
from mach.util import UserError
|
||||
from mozpack.files import FileFinder
|
||||
from mozpack.path import basedir
|
||||
|
||||
|
||||
from mach.decorators import (
|
||||
CommandArgument,
|
||||
Command,
|
||||
)
|
||||
|
||||
|
||||
def run_module_main_on(module, input_filename):
|
||||
"""Run the given module (pycert or pykey) on the given
|
||||
file."""
|
||||
|
@ -84,21 +84,20 @@ If a serial number is not explicitly specified, it is automatically
|
||||
generated based on the contents of the certificate.
|
||||
"""
|
||||
|
||||
from pyasn1.codec.der import decoder
|
||||
from pyasn1.codec.der import encoder
|
||||
from pyasn1.type import constraint, tag, univ, useful
|
||||
from pyasn1_modules import rfc2459
|
||||
from struct import pack
|
||||
import base64
|
||||
import datetime
|
||||
import hashlib
|
||||
import re
|
||||
import socket
|
||||
import six
|
||||
import sys
|
||||
from struct import pack
|
||||
|
||||
import pyct
|
||||
import pykey
|
||||
import six
|
||||
from pyasn1.codec.der import decoder, encoder
|
||||
from pyasn1.type import constraint, tag, univ, useful
|
||||
from pyasn1_modules import rfc2459
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
|
@ -26,15 +26,15 @@ information).
|
||||
The certificate specification must come last.
|
||||
"""
|
||||
|
||||
from pyasn1.codec.der import decoder
|
||||
from pyasn1.codec.der import encoder
|
||||
from pyasn1.type import tag, univ
|
||||
from pyasn1_modules import rfc2315, rfc2459
|
||||
import base64
|
||||
import sys
|
||||
from io import StringIO
|
||||
|
||||
import pycert
|
||||
import pykey
|
||||
import sys
|
||||
from pyasn1.codec.der import decoder, encoder
|
||||
from pyasn1.type import tag, univ
|
||||
from pyasn1_modules import rfc2315, rfc2459
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
|
@ -10,13 +10,13 @@ details of a signing key, when to sign, and the certificate data to
|
||||
sign. Currently only supports precert_entry types. See RFC 6962.
|
||||
"""
|
||||
|
||||
from pyasn1.codec.der import encoder
|
||||
from struct import pack
|
||||
import binascii
|
||||
import calendar
|
||||
import hashlib
|
||||
from struct import pack
|
||||
|
||||
import pykey
|
||||
from pyasn1.codec.der import encoder
|
||||
|
||||
|
||||
class InvalidKeyError(Exception):
|
||||
|
@ -30,17 +30,18 @@ secp384r1: an ECC key on the curve secp384r1
|
||||
secp521r1: an ECC key on the curve secp521r1
|
||||
"""
|
||||
|
||||
from pyasn1.codec.der import encoder
|
||||
from pyasn1.type import univ, namedtype, tag
|
||||
from pyasn1_modules import rfc2459
|
||||
import base64
|
||||
import binascii
|
||||
import ecdsa
|
||||
import hashlib
|
||||
import math
|
||||
import sys
|
||||
|
||||
import ecdsa
|
||||
import rsa
|
||||
import six
|
||||
import sys
|
||||
from pyasn1.codec.der import encoder
|
||||
from pyasn1.type import namedtype, tag, univ
|
||||
from pyasn1_modules import rfc2459
|
||||
|
||||
# "constants" to make it easier for consumers to specify hash algorithms
|
||||
HASH_MD5 = "hash:md5"
|
||||
|
@ -11,11 +11,12 @@ Mac-specific utility command to register a font file with the OS.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import CoreText
|
||||
import Cocoa
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
import Cocoa
|
||||
import CoreText
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
|
@ -120,6 +120,10 @@ eslint:
|
||||
|
||||
eslint-build:
|
||||
description: ESLint checks with build data
|
||||
always-target: false
|
||||
run-on-projects: []
|
||||
attributes:
|
||||
code-review: false
|
||||
treeherder:
|
||||
symbol: js(ES-B)
|
||||
tier: 3
|
||||
|
@ -22,7 +22,7 @@ jobs:
|
||||
platform: updatebot/all
|
||||
symbol: cron
|
||||
tier: 1
|
||||
worker-type: b-linux
|
||||
worker-type: b-linux-gcp
|
||||
worker:
|
||||
docker-image: {in-tree: updatebot}
|
||||
max-run-time: 3600
|
||||
|
@ -4,24 +4,22 @@
|
||||
|
||||
|
||||
import copy
|
||||
from datetime import datetime, timedelta
|
||||
import os
|
||||
import re
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from gecko_taskgraph import GECKO, try_option_syntax
|
||||
from gecko_taskgraph.util.attributes import (
|
||||
match_run_on_hg_branches,
|
||||
match_run_on_projects,
|
||||
)
|
||||
from gecko_taskgraph.util.hg import find_hg_revision_push_info, get_hg_commit_message
|
||||
from gecko_taskgraph.util.platforms import platform_family
|
||||
from redo import retry
|
||||
from taskgraph.parameters import Parameters
|
||||
from taskgraph.target_tasks import _target_task, get_method
|
||||
from taskgraph.util.taskcluster import find_task_id
|
||||
|
||||
from gecko_taskgraph import try_option_syntax, GECKO
|
||||
from gecko_taskgraph.util.attributes import (
|
||||
match_run_on_projects,
|
||||
match_run_on_hg_branches,
|
||||
)
|
||||
from gecko_taskgraph.util.platforms import platform_family
|
||||
from gecko_taskgraph.util.hg import find_hg_revision_push_info, get_hg_commit_message
|
||||
|
||||
|
||||
# Some tasks show up in the target task set, but are possibly special cases,
|
||||
# uncommon tasks, or tasks running against limited hardware set that they
|
||||
# should only be selectable with --full.
|
||||
@ -1256,10 +1254,7 @@ def target_tasks_backfill_all_browsertime(full_task_graph, parameters, graph_con
|
||||
and landed the day before the cron is running. Trigger backfill-all-browsertime action
|
||||
task on each of them.
|
||||
"""
|
||||
from gecko_taskgraph.actions.util import (
|
||||
get_decision_task_id,
|
||||
get_pushes,
|
||||
)
|
||||
from gecko_taskgraph.actions.util import get_decision_task_id, get_pushes
|
||||
|
||||
def date_is_yesterday(date):
|
||||
yesterday = datetime.today() - timedelta(days=1)
|
||||
@ -1398,5 +1393,5 @@ def target_tasks_eslint_build(full_task_graph, parameters, graph_config):
|
||||
for name, task in full_task_graph.tasks.items():
|
||||
if task.kind != "source-test":
|
||||
continue
|
||||
if name == "eslint-build":
|
||||
if "eslint-build" in name:
|
||||
yield name
|
||||
|
@ -18,14 +18,12 @@ import sys
|
||||
import tempfile
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from copy import deepcopy
|
||||
|
||||
import mozversion
|
||||
|
||||
from mozprofile import Profile
|
||||
from mozrunner import Runner, FennecEmulatorRunner
|
||||
import six
|
||||
from mozprofile import Profile
|
||||
from mozrunner import FennecEmulatorRunner, Runner
|
||||
from six import reraise
|
||||
|
||||
from . import errors
|
||||
@ -69,6 +67,8 @@ class GeckoInstance(object):
|
||||
# No slow script dialogs
|
||||
"dom.max_chrome_script_run_time": 0,
|
||||
"dom.max_script_run_time": 0,
|
||||
# Disable location change rate limitation
|
||||
"dom.navigation.locationChangeRateLimit.count": 0,
|
||||
# DOM Push
|
||||
"dom.push.connection.enabled": False,
|
||||
# Disable dialog abuse if alerts are triggered too quickly
|
||||
|
@ -45,7 +45,7 @@ FINAL_TARGET_FILES.content.static += [
|
||||
|
||||
FINAL_TARGET_FILES.content.tests.SimpleTest += [
|
||||
"../../docshell/test/chrome/docshell_helpers.js",
|
||||
"../modules/StructuredLog.jsm",
|
||||
"../modules/StructuredLog.sys.mjs",
|
||||
"tests/SimpleTest/AccessibilityUtils.js",
|
||||
"tests/SimpleTest/EventUtils.js",
|
||||
"tests/SimpleTest/ExtensionTestUtils.js",
|
||||
|
@ -766,10 +766,6 @@ function testListing(metadata, response) {
|
||||
type: "text/css",
|
||||
href: "/static/harness.css",
|
||||
}),
|
||||
SCRIPT({
|
||||
type: "text/javascript",
|
||||
src: "/tests/SimpleTest/StructuredLog.jsm",
|
||||
}),
|
||||
SCRIPT({
|
||||
type: "text/javascript",
|
||||
src: "/tests/SimpleTest/LogController.js",
|
||||
|
@ -7,7 +7,6 @@
|
||||
*/
|
||||
|
||||
// This file expects the following files to be loaded.
|
||||
/* import-globals-from ../../../modules/StructuredLog.jsm */
|
||||
/* import-globals-from LogController.js */
|
||||
/* import-globals-from MemoryStats.js */
|
||||
/* import-globals-from MozillaLogger.js */
|
||||
@ -16,6 +15,13 @@
|
||||
|
||||
"use strict";
|
||||
|
||||
const {
|
||||
StructuredLogger,
|
||||
StructuredFormatter,
|
||||
} = SpecialPowers.ChromeUtils.importESModule(
|
||||
"resource://testing-common/StructuredLog.sys.mjs"
|
||||
);
|
||||
|
||||
function getElement(id) {
|
||||
return typeof id == "string" ? document.getElementById(id) : id;
|
||||
}
|
||||
@ -337,13 +343,15 @@ TestRunner._dumpMessage = function(message) {
|
||||
// From https://searchfox.org/mozilla-central/source/testing/modules/StructuredLog.jsm
|
||||
TestRunner.structuredLogger = new StructuredLogger(
|
||||
"mochitest",
|
||||
TestRunner._dumpMessage
|
||||
TestRunner._dumpMessage,
|
||||
[],
|
||||
TestRunner
|
||||
);
|
||||
TestRunner.structuredLogger.deactivateBuffering = function() {
|
||||
TestRunner.structuredLogger._logData("buffering_off");
|
||||
TestRunner.structuredLogger.logData("buffering_off");
|
||||
};
|
||||
TestRunner.structuredLogger.activateBuffering = function() {
|
||||
TestRunner.structuredLogger._logData("buffering_on");
|
||||
TestRunner.structuredLogger.logData("buffering_on");
|
||||
};
|
||||
|
||||
TestRunner.log = function(msg) {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user