diff --git a/browser/app/macbuild/Contents/Info.plist.in b/browser/app/macbuild/Contents/Info.plist.in index 9ceaf88f15c1..99849c6f06c6 100644 --- a/browser/app/macbuild/Contents/Info.plist.in +++ b/browser/app/macbuild/Contents/Info.plist.in @@ -46,6 +46,28 @@ CFBundleTypeRole Viewer + + CFBundleTypeExtensions + + pdf + + CFBundleTypeIconFile + document.icns + CFBundleTypeMIMETypes + + application/pdf + + CFBundleTypeName + PDF document + CFBundleTypeOSTypes + + TEXT + + CFBundleTypeRole + Viewer + LSHandlerRank + Alternate + CFBundleTypeExtensions diff --git a/browser/base/content/browser.css b/browser/base/content/browser.css index 9223a21ebcc3..b8cdd7effa45 100644 --- a/browser/base/content/browser.css +++ b/browser/base/content/browser.css @@ -1683,7 +1683,7 @@ toolbar[keyNav=true]:not([collapsed=true], [customizing=true]) toolbartabstop { } /* Hide tab-modal dialogs when a window-modal one is up. */ -:root[window-modal-open] .browserContainer > .dialogStack { +:root[window-modal-open] .browserStack > .dialogStack { visibility: hidden; } diff --git a/browser/base/content/browser.js b/browser/base/content/browser.js index e0beb73dea5f..481791be5c08 100644 --- a/browser/base/content/browser.js +++ b/browser/base/content/browser.js @@ -9116,7 +9116,7 @@ const SafeBrowsingNotificationBox = { */ class TabDialogBox { static _containerFor(browser) { - return browser.closest(".browserContainer, .webextension-popup-stack"); + return browser.closest(".browserStack, .webextension-popup-stack"); } constructor(browser) { diff --git a/browser/base/content/test/tabPrompts/browser_openPromptInBackgroundTab.js b/browser/base/content/test/tabPrompts/browser_openPromptInBackgroundTab.js index a63def6a599a..4280e88bef8f 100644 --- a/browser/base/content/test/tabPrompts/browser_openPromptInBackgroundTab.js +++ b/browser/base/content/test/tabPrompts/browser_openPromptInBackgroundTab.js @@ -53,7 +53,7 @@ add_task(async function test_old_modal_ui() { // switch tab back, and check the checkbox is displayed: await BrowserTestUtils.switchTab(gBrowser, openedTab); // check the prompt is there, and the extra row is present - let promptElements = openedTab.linkedBrowser.parentNode.parentNode.querySelectorAll( + let promptElements = openedTab.linkedBrowser.parentNode.querySelectorAll( "tabmodalprompt" ); is(promptElements.length, 1, "There should be 1 prompt"); @@ -164,7 +164,7 @@ add_task(async function test_new_modal_ui() { // switch tab back, and check the checkbox is displayed: await BrowserTestUtils.switchTab(gBrowser, openedTab); // check the prompt is there - let promptElements = openedTab.linkedBrowser.parentNode.parentNode.querySelectorAll( + let promptElements = openedTab.linkedBrowser.parentNode.querySelectorAll( ".content-prompt-dialog" ); diff --git a/browser/components/customizableui/CustomizableUI.jsm b/browser/components/customizableui/CustomizableUI.jsm index b3f76d8acfbf..d43758e27586 100644 --- a/browser/components/customizableui/CustomizableUI.jsm +++ b/browser/components/customizableui/CustomizableUI.jsm @@ -5267,6 +5267,11 @@ class OverflowableToolbar { */ #overflowedInfo = new Map(); + /** + * The set of overflowed DOM nodes that were hidden at the time of overflowing. + */ + #hiddenOverflowedNodes = new WeakSet(); + /** * True if the overflowable toolbar is actively handling overflows and * underflows. This value is set internally by the private #enable() and @@ -5303,11 +5308,12 @@ class OverflowableToolbar { /** * A reference to the the element that overflowed extension browser action * toolbar items will be appended to as children upon overflow if the - * Unified Extension UI is enabled. + * Unified Extension UI is enabled. This is created lazily and might be null, + * so you should use the #webExtList memoizing getter instead to get this. * - * @type {Element} + * @type {Element|null} */ - #webExtList = null; + #webExtListRef = null; /** * An empty object that is created in #checkOverflow to identify individual @@ -5605,16 +5611,21 @@ class OverflowableToolbar { } } + let overflowList = CustomizableUI.isWebExtensionWidget(aNode.id) + ? this.#webExtList + : this.#defaultList; + let containerForAppending = this.#overflowedInfo.size && newNodeCanOverflow - ? this.#defaultList + ? overflowList : this.#target; return [containerForAppending, null]; } /** * Allows callers to query for the current parent of a toolbar item that may - * or may not be overflowed. That parent will either be #defaultList or #target. + * or may not be overflowed. That parent will either be #defaultList, + * #webExtList (if it's an extension button) or #target. * * Note: It is assumed that the caller has verified that aNode is placed * within the toolbar customizable area according to CustomizableUI. @@ -5625,7 +5636,9 @@ class OverflowableToolbar { */ getContainerFor(aNode) { if (aNode.getAttribute("overflowedItem") == "true") { - return this.#defaultList; + return CustomizableUI.isWebExtensionWidget(aNode.id) + ? this.#webExtList + : this.#defaultList; } return this.#target; } @@ -5657,7 +5670,7 @@ class OverflowableToolbar { return; } - let webExtList = this.#getWebExtList(); + let webExtList = this.#webExtList; let child = this.#target.lastElementChild; while (child && isOverflowing) { @@ -5665,6 +5678,13 @@ class OverflowableToolbar { if (child.getAttribute("overflows") != "false") { this.#overflowedInfo.set(child.id, targetContentWidth); + let { width: childWidth } = win.windowUtils.getBoundsWithoutFlushing( + child + ); + if (!childWidth) { + this.#hiddenOverflowedNodes.add(child); + } + child.setAttribute("overflowedItem", true); CustomizableUIInternal.ensureButtonContextMenu( child, @@ -5690,7 +5710,7 @@ class OverflowableToolbar { child, this.#defaultList.firstElementChild ); - if (!CustomizableUI.isSpecialWidget(child.id)) { + if (!CustomizableUI.isSpecialWidget(child.id) && childWidth) { this.#toolbar.setAttribute("overflowing", "true"); } } @@ -5886,8 +5906,13 @@ class OverflowableToolbar { win.UpdateUrlbarSearchSplitterState(); let defaultListItems = Array.from(this.#defaultList.children); - let collapsedWidgetIds = defaultListItems.map(item => item.id); - if (collapsedWidgetIds.every(w => CustomizableUI.isSpecialWidget(w))) { + if ( + defaultListItems.every( + item => + CustomizableUI.isSpecialWidget(item.id) || + this.#hiddenOverflowedNodes.has(item) + ) + ) { this.#toolbar.removeAttribute("overflowing"); } } @@ -5991,16 +6016,31 @@ class OverflowableToolbar { * buttons should go to if the Unified Extensions UI is enabled, or null * if no such list exists. */ - #getWebExtList() { - if (!this.#webExtList) { + get #webExtList() { + if (!this.#webExtListRef) { let targetID = this.#toolbar.getAttribute("addon-webext-overflowtarget"); - if (targetID) { - let win = this.#toolbar.ownerGlobal; - let { panel } = win.gUnifiedExtensions; - this.#webExtList = panel.querySelector(`#${targetID}`); + if (!targetID) { + throw new Error( + "addon-webext-overflowtarget was not defined on the " + + `overflowable toolbar with id: ${this.#toolbar.id}` + ); } + let win = this.#toolbar.ownerGlobal; + let { panel } = win.gUnifiedExtensions; + this.#webExtListRef = panel.querySelector(`#${targetID}`); } - return this.#webExtList; + return this.#webExtListRef; + } + + /** + * Returns true if aNode is not null and is one of either this.#webExtList or + * this.#defaultList. + * + * @param {DOMElement} aNode The node to test. + * @returns {boolean} + */ + #isOverflowList(aNode) { + return aNode == this.#defaultList || aNode == this.#webExtList; } /** @@ -6075,30 +6115,25 @@ class OverflowableToolbar { // moved or removed from an area via the CustomizableUI API while // overflowed. It reorganizes the internal state of this OverflowableToolbar // to handle that change. - if ( - !this.#enabled || - (aContainer != this.#target && aContainer != this.#defaultList) - ) { + if (!this.#enabled || !this.#isOverflowList(aContainer)) { return; } // When we (re)move an item, update all the items that come after it in the list // with the minsize *of the item before the to-be-removed node*. This way, we // ensure that we try to move items back as soon as that's possible. - if (aNode.parentNode == this.#defaultList) { - let updatedMinSize; - if (aNode.previousElementSibling) { - updatedMinSize = this.#overflowedInfo.get( - aNode.previousElementSibling.id - ); - } else { - // Force (these) items to try to flow back into the bar: - updatedMinSize = 1; - } - let nextItem = aNode.nextElementSibling; - while (nextItem) { - this.#overflowedInfo.set(nextItem.id, updatedMinSize); - nextItem = nextItem.nextElementSibling; - } + let updatedMinSize; + if (aNode.previousElementSibling) { + updatedMinSize = this.#overflowedInfo.get( + aNode.previousElementSibling.id + ); + } else { + // Force (these) items to try to flow back into the bar: + updatedMinSize = 1; + } + let nextItem = aNode.nextElementSibling; + while (nextItem) { + this.#overflowedInfo.set(nextItem.id, updatedMinSize); + nextItem = nextItem.nextElementSibling; } } @@ -6109,12 +6144,12 @@ class OverflowableToolbar { // causes overflow or underflow of the toolbar. if ( !this.#enabled || - (aContainer != this.#target && aContainer != this.#defaultList) + (aContainer != this.#target && !this.#isOverflowList(aContainer)) ) { return; } - let nowOverflowed = aNode.parentNode == this.#defaultList; + let nowOverflowed = this.#isOverflowList(aNode.parentNode); let wasOverflowed = this.#overflowedInfo.has(aNode.id); // If this wasn't overflowed before... diff --git a/browser/components/customizableui/test/browser.ini b/browser/components/customizableui/test/browser.ini index 299f8855c373..1c1d856e7af6 100644 --- a/browser/components/customizableui/test/browser.ini +++ b/browser/components/customizableui/test/browser.ini @@ -83,10 +83,12 @@ tags = overflowable-toolbar skip-if = verify [browser_972267_customizationchange_events.js] [browser_976792_insertNodeInWindow.js] +tags = overflowable-toolbar skip-if = os == "linux" [browser_978084_dragEnd_after_move.js] skip-if = verify [browser_980155_add_overflow_toolbar.js] +tags = overflowable-toolbar skip-if = verify [browser_981305_separator_insertion.js] [browser_981418-widget-onbeforecreated-handler.js] @@ -141,6 +143,7 @@ tags = overflowable-toolbar https_first_disabled = true [browser_flexible_space_area.js] [browser_help_panel_cloning.js] +[browser_hidden_widget_overflow.js] [browser_history_after_appMenu.js] [browser_history_recently_closed.js] [browser_history_recently_closed_middleclick.js] diff --git a/browser/components/customizableui/test/browser_hidden_widget_overflow.js b/browser/components/customizableui/test/browser_hidden_widget_overflow.js new file mode 100644 index 000000000000..c4adced590be --- /dev/null +++ b/browser/components/customizableui/test/browser_hidden_widget_overflow.js @@ -0,0 +1,122 @@ +/* Any copyright is dedicated to the Public Domain. + http://creativecommons.org/publicdomain/zero/1.0/ */ + +"use strict"; + +/** + * Tests that if only hidden widgets are overflowed that the + * OverflowableToolbar won't show the overflow panel anchor. + */ + +const kHiddenButtonID = "fake-hidden-button"; +const kDisplayNoneButtonID = "display-none-button"; +const kWebExtensionButtonID1 = "fake-webextension-button-1"; +const kWebExtensionButtonID2 = "fake-webextension-button-2"; +let gWin = null; + +add_setup(async function() { + await SpecialPowers.pushPrefEnv({ + set: [["extensions.unifiedExtensions.enabled", true]], + }); + + gWin = await BrowserTestUtils.openNewBrowserWindow(); + + // To make it easier to write a test where we can control overflowing + // for a test that can run in a bunch of environments with slightly + // different rules on when things will overflow, we'll go ahead and + // just remove everything removable from the nav-bar by default. Then + // we'll add our hidden item, and a single WebExtension item, and + // force toolbar overflow. + let widgetIDs = CustomizableUI.getWidgetIdsInArea(CustomizableUI.AREA_NAVBAR); + for (let widgetID of widgetIDs) { + if (CustomizableUI.isWidgetRemovable(widgetID)) { + CustomizableUI.removeWidgetFromArea(widgetID); + } + } + + CustomizableUI.createWidget({ + id: kWebExtensionButtonID1, + label: "Test WebExtension widget 1", + defaultArea: CustomizableUI.AREA_NAVBAR, + webExtension: true, + }); + + CustomizableUI.createWidget({ + id: kWebExtensionButtonID2, + label: "Test WebExtension widget 2", + defaultArea: CustomizableUI.AREA_NAVBAR, + webExtension: true, + }); + + // Let's force the WebExtension widgets to be significantly wider. This + // just makes it easier to ensure that both of these (which are to the left + // of the hidden widget) get overflowed. + for (let webExtID of [kWebExtensionButtonID1, kWebExtensionButtonID2]) { + let webExtNode = CustomizableUI.getWidget(webExtID).forWindow(gWin).node; + webExtNode.style.width = "100px"; + } + + CustomizableUI.createWidget({ + id: kHiddenButtonID, + label: "Test hidden=true widget", + defaultArea: CustomizableUI.AREA_NAVBAR, + }); + + // Now hide the button with hidden=true so that it has no dimensions. + let hiddenButtonNode = CustomizableUI.getWidget(kHiddenButtonID).forWindow( + gWin + ).node; + hiddenButtonNode.hidden = true; + + CustomizableUI.createWidget({ + id: kDisplayNoneButtonID, + label: "Test display:none widget", + defaultArea: CustomizableUI.AREA_NAVBAR, + }); + + // Now hide the button with display: none so that it has no dimensions. + let displayNoneButtonNode = CustomizableUI.getWidget( + kDisplayNoneButtonID + ).forWindow(gWin).node; + displayNoneButtonNode.style.display = "none"; + + registerCleanupFunction(async () => { + CustomizableUI.destroyWidget(kWebExtensionButtonID1); + CustomizableUI.destroyWidget(kWebExtensionButtonID2); + CustomizableUI.destroyWidget(kHiddenButtonID); + CustomizableUI.destroyWidget(kDisplayNoneButtonID); + await BrowserTestUtils.closeWindow(gWin); + await CustomizableUI.reset(); + }); +}); + +add_task(async function test_hidden_widget_overflow() { + gWin.resizeTo(kForceOverflowWidthPx, window.outerHeight); + + // Wait until the left-most fake WebExtension button is overflowing. + let webExtNode = CustomizableUI.getWidget(kWebExtensionButtonID1).forWindow( + gWin + ).node; + await BrowserTestUtils.waitForMutationCondition( + webExtNode, + { attributes: true }, + () => { + return webExtNode.hasAttribute("overflowedItem"); + } + ); + + let hiddenButtonNode = CustomizableUI.getWidget(kHiddenButtonID).forWindow( + gWin + ).node; + Assert.ok( + hiddenButtonNode.hasAttribute("overflowedItem"), + "Hidden button should be overflowed." + ); + + let overflowButton = gWin.document.getElementById("nav-bar-overflow-button"); + + Assert.ok( + !BrowserTestUtils.is_visible(overflowButton), + "Overflow panel button should be hidden." + ); +}); diff --git a/browser/components/customizableui/test/head.js b/browser/components/customizableui/test/head.js index 9d5c52a41334..3f606abe6af3 100644 --- a/browser/components/customizableui/test/head.js +++ b/browser/components/customizableui/test/head.js @@ -94,6 +94,8 @@ function createOverflowableToolbarWithPlacements(id, placements) { tb.setAttribute("default-overflowpanel", overflowPanel.id); tb.setAttribute("default-overflowtarget", overflowList.id); tb.setAttribute("default-overflowbutton", chevron.id); + tb.setAttribute("addon-webext-overflowbutton", "unified-extensions-button"); + tb.setAttribute("addon-webext-overflowtarget", "overflowed-extensions-list"); gNavToolbox.appendChild(tb); CustomizableUI.registerToolbarNode(tb); diff --git a/browser/components/extensions/test/AppUiTestDelegate.jsm b/browser/components/extensions/test/AppUiTestDelegate.jsm index 710b4e03733d..f33669176def 100644 --- a/browser/components/extensions/test/AppUiTestDelegate.jsm +++ b/browser/components/extensions/test/AppUiTestDelegate.jsm @@ -218,6 +218,7 @@ async function removeTab(tab) { var AppUiTestInternals = { awaitBrowserLoaded, getBrowserActionWidget, + getBrowserActionWidgetId, getPageActionButton, getPageActionPopup, getPanelForNode, diff --git a/browser/components/extensions/test/browser/browser_unified_extensions.js b/browser/components/extensions/test/browser/browser_unified_extensions.js index e07184e84ed1..171275393ac4 100644 --- a/browser/components/extensions/test/browser/browser_unified_extensions.js +++ b/browser/components/extensions/test/browser/browser_unified_extensions.js @@ -50,6 +50,11 @@ let win; add_setup(async function() { win = await promiseEnableUnifiedExtensions(); + // Make sure extension buttons added to the navbar will not overflow in the + // panel, which could happen when a previous test file resizes the current + // window. + await ensureMaximizedWindow(win); + registerCleanupFunction(async () => { await BrowserTestUtils.closeWindow(win); }); diff --git a/browser/components/extensions/test/browser/browser_unified_extensions_overflowable_toolbar.js b/browser/components/extensions/test/browser/browser_unified_extensions_overflowable_toolbar.js index 662d25df9d61..6cd2ec4d97fb 100644 --- a/browser/components/extensions/test/browser/browser_unified_extensions_overflowable_toolbar.js +++ b/browser/components/extensions/test/browser/browser_unified_extensions_overflowable_toolbar.js @@ -77,8 +77,18 @@ function getVisibleMenuItems(popup) { * 5. Unloads all of the test WebExtensions * * @param {DOMWindow} win The browser window to perform the test on. - * @param {Function} taskFn The async function to run once the window is in - * the overflow state. The function is called with the following arguments: + * @param {object} options Additional options when running this test. + * @param {Function} options.beforeOverflowed This optional async function will + * be run after the extensions are created and added to the toolbar, but + * before the toolbar overflows. The function is called with the following + * arguments: + * + * {string[]} extensionIDs: The IDs of the test WebExtensions. + * + * The return value of the function is ignored. + * @param {Function} options.whenOverflowed This optional async function will + * run once the window is in the overflow state. The function is called + * with the following arguments: * * {Element} defaultList: The DOM element that holds overflowed default * items. @@ -86,24 +96,26 @@ function getVisibleMenuItems(popup) { * WebExtension browser_actions when Unified Extensions is enabled. * {string[]} extensionIDs: The IDs of the test WebExtensions. * - * The function is expected to return a Promise that does not resolve - * with anything. + * The return value of the function is ignored. + * @param {Function} options.afterUnderflowed This optional async function will + * be run after the window is expanded and the toolbar has underflowed, but + * before the extensions are removed. This function is not passed any + * arguments. The return value of the function is ignored. + * */ -async function withWindowOverflowed(win, taskFn) { +async function withWindowOverflowed( + win, + { + beforeOverflowed = async () => {}, + whenOverflowed = async () => {}, + afterUnderflowed = async () => {}, + } = {} +) { const doc = win.document; doc.documentElement.removeAttribute("persist"); const navbar = doc.getElementById(CustomizableUI.AREA_NAVBAR); - win.moveTo(0, 0); - - const widthDiff = win.screen.availWidth - win.outerWidth; - const heightDiff = win.screen.availHeight - win.outerHeight; - - if (widthDiff || heightDiff) { - let resizeDone = BrowserTestUtils.waitForEvent(win, "resize", false); - win.resizeBy(widthDiff, heightDiff); - await resizeDone; - } + await ensureMaximizedWindow(win); // The OverflowableToolbar operates asynchronously at times, so we will // poll a widget's overflowedItem attribute to detect whether or not the @@ -223,61 +235,83 @@ async function withWindowOverflowed(win, taskFn) { await listener.promise; CustomizableUI.removeListener(listener); - const originalWindowWidth = win.outerWidth; - - let widgetOverflowListener = { - _remainingOverflowables: NUM_EXTENSIONS + DEFAULT_WIDGET_IDS.length, - _deferred: PromiseUtils.defer(), - - get promise() { - return this._deferred.promise; - }, - - onWidgetOverflow(widgetNode, areaNode) { - this._remainingOverflowables--; - if (!this._remainingOverflowables) { - this._deferred.resolve(); - } - }, - }; - CustomizableUI.addListener(widgetOverflowListener); - - win.resizeTo(OVERFLOW_WINDOW_WIDTH_PX, win.outerHeight); - await widgetOverflowListener.promise; - CustomizableUI.removeListener(widgetOverflowListener); - - Assert.ok( - navbar.hasAttribute("overflowing"), - "Should have an overflowing toolbar." - ); - - const defaultList = doc.getElementById( - navbar.getAttribute("default-overflowtarget") - ); - - const unifiedExtensionList = doc.getElementById( - navbar.getAttribute("addon-webext-overflowtarget") - ); - const extensionIDs = extensions.map(extension => extension.id); try { - await taskFn(defaultList, unifiedExtensionList, extensionIDs); + info("Running beforeOverflowed task"); + await beforeOverflowed(extensionIDs); } finally { - win.resizeTo(originalWindowWidth, win.outerHeight); - await BrowserTestUtils.waitForEvent(win, "resize"); + const originalWindowWidth = win.outerWidth; - // Notably, we don't wait for the nav-bar to not have the "overflowing" - // attribute. This is because we might be running in an environment - // where the nav-bar was overflowing to begin with. Let's just hope that - // our sign-post widget has stopped overflowing. - await TestUtils.waitForCondition(() => { - return !doc - .getElementById(signpostWidgetID) - .hasAttribute("overflowedItem"); + // The beforeOverflowed task may have moved some items out from the navbar, + // so only listen for overflows for items still in there. + const browserActionIDs = extensionIDs.map(id => + AppUiTestInternals.getBrowserActionWidgetId(id) + ); + const browserActionsInNavBar = browserActionIDs.filter(widgetID => { + let placement = CustomizableUI.getPlacementOfWidget(widgetID); + return placement.area == CustomizableUI.AREA_NAVBAR; }); - await Promise.all(extensions.map(extension => extension.unload())); + let widgetOverflowListener = { + _remainingOverflowables: + browserActionsInNavBar.length + DEFAULT_WIDGET_IDS.length, + _deferred: PromiseUtils.defer(), + + get promise() { + return this._deferred.promise; + }, + + onWidgetOverflow(widgetNode, areaNode) { + this._remainingOverflowables--; + if (!this._remainingOverflowables) { + this._deferred.resolve(); + } + }, + }; + CustomizableUI.addListener(widgetOverflowListener); + + win.resizeTo(OVERFLOW_WINDOW_WIDTH_PX, win.outerHeight); + await widgetOverflowListener.promise; + CustomizableUI.removeListener(widgetOverflowListener); + + Assert.ok( + navbar.hasAttribute("overflowing"), + "Should have an overflowing toolbar." + ); + + const defaultList = doc.getElementById( + navbar.getAttribute("default-overflowtarget") + ); + + const unifiedExtensionList = doc.getElementById( + navbar.getAttribute("addon-webext-overflowtarget") + ); + + try { + info("Running whenOverflowed task"); + await whenOverflowed(defaultList, unifiedExtensionList, extensionIDs); + } finally { + win.resizeTo(originalWindowWidth, win.outerHeight); + await BrowserTestUtils.waitForEvent(win, "resize"); + + // Notably, we don't wait for the nav-bar to not have the "overflowing" + // attribute. This is because we might be running in an environment + // where the nav-bar was overflowing to begin with. Let's just hope that + // our sign-post widget has stopped overflowing. + await TestUtils.waitForCondition(() => { + return !doc + .getElementById(signpostWidgetID) + .hasAttribute("overflowedItem"); + }); + + try { + info("Running afterUnderflowed task"); + await afterUnderflowed(); + } finally { + await Promise.all(extensions.map(extension => extension.unload())); + } + } } } @@ -394,10 +428,10 @@ async function verifyExtensionWidget(win, widget, unifiedExtensionsEnabled) { */ add_task(async function test_overflowable_toolbar() { let win = await promiseEnableUnifiedExtensions(); + let movedNode; - await withWindowOverflowed( - win, - async (defaultList, unifiedExtensionList, extensionIDs) => { + await withWindowOverflowed(win, { + whenOverflowed: async (defaultList, unifiedExtensionList, extensionIDs) => { // Ensure that there are 5 items in the Unified Extensions overflow // list, and the default widgets should all be in the default overflow // list (though there might be more items from the nav-bar in there that @@ -423,8 +457,35 @@ add_task(async function test_overflowable_toolbar() { ); await verifyExtensionWidget(win, child, true); } - } - ); + + let extensionWidgetID = AppUiTestInternals.getBrowserActionWidgetId( + extensionIDs.at(-1) + ); + movedNode = CustomizableUI.getWidget(extensionWidgetID).forWindow(win) + .node; + Assert.equal(movedNode.getAttribute("cui-areatype"), "toolbar"); + + CustomizableUI.addWidgetToArea( + extensionWidgetID, + CustomizableUI.AREA_ADDONS + ); + + Assert.equal( + movedNode.getAttribute("cui-areatype"), + "panel", + "The moved browser action button should have the right cui-areatype set." + ); + }, + afterUnderflowed: async () => { + // Ensure that the moved node's parent is still the add-ons panel. + Assert.equal( + movedNode.parentElement.id, + CustomizableUI.AREA_ADDONS, + "The browser action should still be in the addons panel" + ); + CustomizableUI.addWidgetToArea(movedNode.id, CustomizableUI.AREA_NAVBAR); + }, + }); await BrowserTestUtils.closeWindow(win); }); @@ -436,9 +497,8 @@ add_task(async function test_overflowable_toolbar() { add_task(async function test_overflowable_toolbar_legacy() { let win = await promiseDisableUnifiedExtensions(); - await withWindowOverflowed( - win, - async (defaultList, unifiedExtensionList, extensionIDs) => { + await withWindowOverflowed(win, { + whenOverflowed: async (defaultList, unifiedExtensionList, extensionIDs) => { // First, ensure that all default items are in the default overflow list. // (though there might be more items from the nav-bar in there that // already existed in the nav-bar before we put the default widgets in @@ -465,8 +525,8 @@ add_task(async function test_overflowable_toolbar_legacy() { 0, "Unified Extension overflow list should be empty." ); - } - ); + }, + }); await BrowserTestUtils.closeWindow(win); await SpecialPowers.popPrefEnv(); @@ -475,9 +535,8 @@ add_task(async function test_overflowable_toolbar_legacy() { add_task(async function test_menu_button() { let win = await promiseEnableUnifiedExtensions(); - await withWindowOverflowed( - win, - async (defaultList, unifiedExtensionList, extensionIDs) => { + await withWindowOverflowed(win, { + whenOverflowed: async (defaultList, unifiedExtensionList, extensionIDs) => { Assert.ok( unifiedExtensionList.children.length, "Should have items in the Unified Extension list." @@ -631,8 +690,8 @@ add_task(async function test_menu_button() { ); await closeExtensionsPanel(win); - } - ); + }, + }); await BrowserTestUtils.closeWindow(win); }); @@ -640,9 +699,8 @@ add_task(async function test_menu_button() { add_task(async function test_context_menu() { let win = await promiseEnableUnifiedExtensions(); - await withWindowOverflowed( - win, - async (defaultList, unifiedExtensionList, extensionIDs) => { + await withWindowOverflowed(win, { + whenOverflowed: async (defaultList, unifiedExtensionList, extensionIDs) => { Assert.ok( unifiedExtensionList.children.length, "Should have items in the Unified Extension list." @@ -730,8 +788,8 @@ add_task(async function test_context_menu() { // We can close the unified extensions panel now. await closeExtensionsPanel(win); - } - ); + }, + }); await BrowserTestUtils.closeWindow(win); }); @@ -739,9 +797,8 @@ add_task(async function test_context_menu() { add_task(async function test_action_button() { let win = await promiseEnableUnifiedExtensions(); - await withWindowOverflowed( - win, - async (defaultList, unifiedExtensionList, extensionIDs) => { + await withWindowOverflowed(win, { + whenOverflowed: async (defaultList, unifiedExtensionList, extensionIDs) => { Assert.ok( unifiedExtensionList.children.length, "Should have items in the Unified Extension list." @@ -876,8 +933,56 @@ add_task(async function test_action_button() { await closeExtensionsPanel(win); } ); - } - ); + }, + }); + + await BrowserTestUtils.closeWindow(win); +}); + +/** + * Tests that if we pin a browser action button listed in the addons panel + * to the toolbar when that button would immediately overflow, that the + * button is put into the addons panel overflow list. + */ +add_task(async function test_pinning_to_toolbar_when_overflowed() { + let win = await promiseEnableUnifiedExtensions(); + let movedNode; + let extensionWidgetID; + + await withWindowOverflowed(win, { + beforeOverflowed: async extensionIDs => { + // Before we overflow the toolbar, let's move the last item to the addons + // panel. + extensionWidgetID = AppUiTestInternals.getBrowserActionWidgetId( + extensionIDs.at(-1) + ); + + movedNode = CustomizableUI.getWidget(extensionWidgetID).forWindow(win) + .node; + + CustomizableUI.addWidgetToArea( + extensionWidgetID, + CustomizableUI.AREA_ADDONS + ); + }, + whenOverflowed: async (defaultList, unifiedExtensionList, extensionIDs) => { + // Now that the window is overflowed, let's move the widget in the addons + // panel back to the navbar. This should cause the widget to overflow back + // into the addons panel. + CustomizableUI.addWidgetToArea( + extensionWidgetID, + CustomizableUI.AREA_NAVBAR + ); + await TestUtils.waitForCondition(() => { + return movedNode.hasAttribute("overflowedItem"); + }); + Assert.equal( + movedNode.parentElement, + unifiedExtensionList, + "Should have overflowed the extension button to the right list." + ); + }, + }); await BrowserTestUtils.closeWindow(win); }); diff --git a/browser/components/extensions/test/browser/head_unified_extensions.js b/browser/components/extensions/test/browser/head_unified_extensions.js index d2cbe8d8f5a8..efd0c3395774 100644 --- a/browser/components/extensions/test/browser/head_unified_extensions.js +++ b/browser/components/extensions/test/browser/head_unified_extensions.js @@ -6,6 +6,7 @@ /* exported clickUnifiedExtensionsItem, closeExtensionsPanel, createExtensions, + ensureMaximizedWindow, getUnifiedExtensionsItem, openExtensionsPanel, openUnifiedExtensionsContextMenu, @@ -139,3 +140,24 @@ const createExtensions = ( }) ); }; + +/** + * Given a window, this test helper resizes it so that the window takes most of + * the available screen size (unless the window is already maximized). + */ +const ensureMaximizedWindow = async win => { + let resizeDone = Promise.resolve(); + + win.moveTo(0, 0); + + const widthDiff = win.screen.availWidth - win.outerWidth; + const heightDiff = win.screen.availHeight - win.outerHeight; + + if (widthDiff || heightDiff) { + resizeDone = BrowserTestUtils.waitForEvent(win, "resize", false); + win.windowUtils.ensureDirtyRootFrame(); + win.resizeBy(widthDiff, heightDiff); + } + + return resizeDone; +}; diff --git a/browser/components/migration/EdgeProfileMigrator.sys.mjs b/browser/components/migration/EdgeProfileMigrator.sys.mjs index 0644dcf73921..e0d47eddf5ac 100644 --- a/browser/components/migration/EdgeProfileMigrator.sys.mjs +++ b/browser/components/migration/EdgeProfileMigrator.sys.mjs @@ -4,7 +4,6 @@ import { AppConstants } from "resource://gre/modules/AppConstants.sys.mjs"; -const { OS } = ChromeUtils.import("resource://gre/modules/osfile.jsm"); import { XPCOMUtils } from "resource://gre/modules/XPCOMUtils.sys.mjs"; import { @@ -506,7 +505,7 @@ EdgeProfileMigrator.prototype.getLastUsedDate = async function() { if (sourceProfiles !== null || !lazy.gEdgeDatabase) { return Promise.resolve(new Date(0)); } - let logFilePath = OS.Path.join( + let logFilePath = PathUtils.join( lazy.gEdgeDatabase.parent.path, "LogFiles", "edb.log" @@ -517,11 +516,9 @@ EdgeProfileMigrator.prototype.getLastUsedDate = async function() { ); let cookiePaths = cookieMigrator._cookiesFolders.map(f => f.path); let datePromises = [logFilePath, dbPath, ...cookiePaths].map(path => { - return OS.File.stat(path) - .catch(() => null) - .then(info => { - return info ? info.lastModificationDate : 0; - }); + return IOUtils.stat(path) + .then(info => info.lastModified) + .catch(() => 0); }); datePromises.push( new Promise(resolve => { diff --git a/browser/components/migration/FirefoxProfileMigrator.sys.mjs b/browser/components/migration/FirefoxProfileMigrator.sys.mjs index d387ba102a3c..7b39d68bc238 100644 --- a/browser/components/migration/FirefoxProfileMigrator.sys.mjs +++ b/browser/components/migration/FirefoxProfileMigrator.sys.mjs @@ -28,7 +28,6 @@ ChromeUtils.defineModuleGetter( "SessionMigration", "resource:///modules/sessionstore/SessionMigration.jsm" ); -ChromeUtils.defineModuleGetter(lazy, "OS", "resource://gre/modules/osfile.jsm"); export function FirefoxProfileMigrator() { this.wrappedJSObject = this; // for testing... @@ -243,32 +242,28 @@ FirefoxProfileMigrator.prototype._getResourcesInternal = function( // if we can, copy it to the new profile and set sync's username pref // (which acts as a de-facto flag to indicate if sync is configured) try { - let oldPath = lazy.OS.Path.join( + let oldPath = PathUtils.join( sourceProfileDir.path, "signedInUser.json" ); - let exists = await lazy.OS.File.exists(oldPath); + let exists = await IOUtils.exists(oldPath); if (exists) { - let raw = await lazy.OS.File.read(oldPath, { encoding: "utf-8" }); - let data = JSON.parse(raw); + let data = await IOUtils.readJSON(oldPath); if (data && data.accountData && data.accountData.email) { let username = data.accountData.email; // copy the file itself. - await lazy.OS.File.copy( + await IOUtils.copy( oldPath, - lazy.OS.Path.join(currentProfileDir.path, "signedInUser.json") + PathUtils.join(currentProfileDir.path, "signedInUser.json") ); // Now we need to know whether Sync is actually configured for this // user. The only way we know is by looking at the prefs file from // the old profile. We avoid trying to do a full parse of the prefs // file and even avoid parsing the single string value we care // about. - let prefsPath = lazy.OS.Path.join( - sourceProfileDir.path, - "prefs.js" - ); - if (await lazy.OS.File.exists(oldPath)) { - let rawPrefs = await lazy.OS.File.read(prefsPath, { + let prefsPath = PathUtils.join(sourceProfileDir.path, "prefs.js"); + if (await IOUtils.exists(oldPath)) { + let rawPrefs = await IOUtils.readUTF8(prefsPath, { encoding: "utf-8", }); if (/^user_pref\("services\.sync\.username"/m.test(rawPrefs)) { diff --git a/browser/components/newtab/content-src/components/DiscoveryStreamComponents/TopSites/TopSites.jsx b/browser/components/newtab/content-src/components/DiscoveryStreamComponents/TopSites/TopSites.jsx index e1a562d4d4cb..e1bb0cd50d87 100644 --- a/browser/components/newtab/content-src/components/DiscoveryStreamComponents/TopSites/TopSites.jsx +++ b/browser/components/newtab/content-src/components/DiscoveryStreamComponents/TopSites/TopSites.jsx @@ -51,8 +51,8 @@ export class _TopSites extends React.PureComponent { const link = { customScreenshotURL: topSiteSpoc.image_src, type: "SPOC", - label: topSiteSpoc.sponsor, - title: topSiteSpoc.sponsor, + label: topSiteSpoc.title || topSiteSpoc.sponsor, + title: topSiteSpoc.title || topSiteSpoc.sponsor, url: topSiteSpoc.url, flightId: topSiteSpoc.flight_id, id: topSiteSpoc.id, diff --git a/browser/components/newtab/data/content/activity-stream.bundle.js b/browser/components/newtab/data/content/activity-stream.bundle.js index bd8a0613500a..2df9082ff645 100644 --- a/browser/components/newtab/data/content/activity-stream.bundle.js +++ b/browser/components/newtab/data/content/activity-stream.bundle.js @@ -13660,8 +13660,8 @@ class TopSites_TopSites_TopSites extends (external_React_default()).PureComponen const link = { customScreenshotURL: topSiteSpoc.image_src, type: "SPOC", - label: topSiteSpoc.sponsor, - title: topSiteSpoc.sponsor, + label: topSiteSpoc.title || topSiteSpoc.sponsor, + title: topSiteSpoc.title || topSiteSpoc.sponsor, url: topSiteSpoc.url, flightId: topSiteSpoc.flight_id, id: topSiteSpoc.id, diff --git a/browser/components/newtab/lib/TelemetryFeed.jsm b/browser/components/newtab/lib/TelemetryFeed.jsm index 39daa4af5847..20206e4ac5cf 100644 --- a/browser/components/newtab/lib/TelemetryFeed.jsm +++ b/browser/components/newtab/lib/TelemetryFeed.jsm @@ -1059,6 +1059,9 @@ class TelemetryFeed { } Glean.newtab.newtabCategory.set(newtabCategory); Glean.newtab.homepageCategory.set(homePageCategory); + if (lazy.NimbusFeatures.glean.getVariable("newtabPingEnabled") ?? true) { + GleanPings.newtab.submit("component_init"); + } } } @@ -1275,12 +1278,6 @@ class TelemetryFeed { } setNewtabPrefMetrics(); Glean.pocket.isSignedIn.set(lazy.pktApi.isUserLoggedIn()); - if ( - this.telemetryEnabled && - (lazy.NimbusFeatures.glean.getVariable("newtabPingEnabled") ?? true) - ) { - GleanPings.newtab.submit("component_init"); - } } uninit() { diff --git a/browser/components/newtab/pings.yaml b/browser/components/newtab/pings.yaml index 80b690097e1b..d9cf6151a492 100644 --- a/browser/components/newtab/pings.yaml +++ b/browser/components/newtab/pings.yaml @@ -15,7 +15,8 @@ newtab: The newtab visit ended. Could be by navigation, being closed, etc. component_init: | - The newtab component init'd. + The newtab component init'd, + and the newtab and homepage settings have been categorized. This is mostly to ensure we hear at least once from clients configured to not show a newtab UI. include_client_id: true diff --git a/browser/components/newtab/test/browser/browser_newtab_ping.js b/browser/components/newtab/test/browser/browser_newtab_ping.js index 039bcb7a514b..f8d642a2a8c2 100644 --- a/browser/components/newtab/test/browser/browser_newtab_ping.js +++ b/browser/components/newtab/test/browser/browser_newtab_ping.js @@ -199,7 +199,7 @@ add_task(async function test_newtab_doesnt_send_nimbus() { await SpecialPowers.popPrefEnv(); }); -add_task(async function test_newtab_init_sends_ping() { +add_task(async function test_newtab_categorization_sends_ping() { await SpecialPowers.pushPrefEnv({ set: [["browser.newtabpage.activity-stream.telemetry", true]], }); @@ -214,7 +214,7 @@ add_task(async function test_newtab_init_sends_ping() { pingSent = true; Assert.equal(reason, "component_init"); }); - TelemetryFeed.init(); // INIT action doesn't happen by default. + await TelemetryFeed.sendPageTakeoverData(); Assert.ok(pingSent, "ping was sent"); await SpecialPowers.popPrefEnv(); diff --git a/browser/components/newtab/test/unit/lib/TelemetryFeed.test.js b/browser/components/newtab/test/unit/lib/TelemetryFeed.test.js index 544b784ac1d6..aa381cbbdd99 100644 --- a/browser/components/newtab/test/unit/lib/TelemetryFeed.test.js +++ b/browser/components/newtab/test/unit/lib/TelemetryFeed.test.js @@ -181,13 +181,6 @@ describe("TelemetryFeed", () => { assert.calledWithExactly(stub, "unload", instance.handleEvent); assert.calledWithExactly(stub, "TabPinned", instance.handleEvent); }); - it("should send a 'newtab' ping", () => { - instance._prefs.set(TELEMETRY_PREF, true); - sandbox.spy(GleanPings.newtab, "submit"); - instance.init(); - assert.calledOnce(GleanPings.newtab.submit); - assert.calledWithExactly(GleanPings.newtab.submit, "component_init"); - }); describe("telemetry pref changes from false to true", () => { beforeEach(() => { FakePrefs.prototype.prefs = {}; @@ -1725,6 +1718,13 @@ describe("TelemetryFeed", () => { assert.calledOnce(Glean.newtab.homepageCategory.set); assert.calledWith(Glean.newtab.homepageCategory.set, "disabled"); }); + it("should send a 'newtab' ping", async () => { + instance._prefs.set(TELEMETRY_PREF, true); + sandbox.spy(GleanPings.newtab, "submit"); + await instance.sendPageTakeoverData(); + assert.calledOnce(GleanPings.newtab.submit); + assert.calledWithExactly(GleanPings.newtab.submit, "component_init"); + }); }); describe("#sendDiscoveryStreamImpressions", () => { it("should not send impression pings if there is no impression data", () => { diff --git a/browser/locales/l10n-changesets.json b/browser/locales/l10n-changesets.json index 79b5ebc65c69..5f0b604d5715 100644 --- a/browser/locales/l10n-changesets.json +++ b/browser/locales/l10n-changesets.json @@ -483,7 +483,7 @@ "win64-aarch64-devedition", "win64-devedition" ], - "revision": "9884a10915a5d27531be1848bb3056bfb541661e" + "revision": "51b4794b186d52193f1d647e7399e2557c113dae" }, "es-CL": { "pin": false, @@ -645,7 +645,7 @@ "win64-aarch64-devedition", "win64-devedition" ], - "revision": "e270b824dd33e62364be110b6d15723af68e1b1e" + "revision": "cae65157bbcebc26625586b1920d1f34d9683653" }, "fy-NL": { "pin": false, @@ -843,7 +843,7 @@ "win64-aarch64-devedition", "win64-devedition" ], - "revision": "268da803b62cbe9b579100ade7c3735dcf42baef" + "revision": "9432435c581d8839ad71ce11acf5098090c6e9e5" }, "hy-AM": { "pin": false, @@ -933,7 +933,7 @@ "win64-aarch64-devedition", "win64-devedition" ], - "revision": "4d395af1ad99bbef4e8e631e415d3ece59910cb5" + "revision": "5f4dedafbdf70c69d98352064b3a3cf4c58a94a9" }, "it": { "pin": false, @@ -1425,7 +1425,7 @@ "win64-aarch64-devedition", "win64-devedition" ], - "revision": "e2004dd62601de421adba549123e28cf12667900" + "revision": "ffb56c5ea7907321595a1308c6232a26f4660a40" }, "rm": { "pin": false, @@ -1803,7 +1803,7 @@ "win64-aarch64-devedition", "win64-devedition" ], - "revision": "8b56fdc7d226a359747095054358cc0808438b8e" + "revision": "8d24e72d9f81161e2207a68f56cdc169b8c8110c" }, "trs": { "pin": false, @@ -1947,7 +1947,7 @@ "win64-aarch64-devedition", "win64-devedition" ], - "revision": "edededa35a86a31ec0472037da9581f588eedf47" + "revision": "574722005ef12d41efe4b2261e91fb21974fee7c" }, "zh-TW": { "pin": false, diff --git a/browser/themes/shared/addons/unified-extensions.css b/browser/themes/shared/addons/unified-extensions.css index a3cfec04f066..108cd9dc9499 100644 --- a/browser/themes/shared/addons/unified-extensions.css +++ b/browser/themes/shared/addons/unified-extensions.css @@ -5,45 +5,79 @@ :root { /* uei = unified extensions item */ --uei-icon-size: 32px; - --uei-dot-position: calc(var(--uei-icon-size) / 2 + var(--arrowpanel-menuitem-margin-inline) + var(--arrowpanel-menuitem-padding-inline) - 4px); + --uei-attention-dot-size: 8px; --uei-button-hover-bgcolor: var(--panel-item-hover-bgcolor); --uei-button-hover-color: inherit; --uei-button-active-bgcolor: var(--panel-item-active-bgcolor); --uei-button-active-color: inherit; + --uei-button-attention-dot-color: var(--tab-attention-icon-color); } :root[uidensity="compact"] { --uei-icon-size: 24px; } -/* Align extensions rendered with custom elements. */ -unified-extensions-item { - align-items: center; - display: flex; +#unified-extensions-panel { + --uei-dot-horizontal-position-in-panel: calc(var(--uei-icon-size) / 2 + var(--arrowpanel-menuitem-padding-inline) - var(--uei-attention-dot-size) / 2); + --uei-dot-vertical-position-in-panel: max(0px, calc(var(--arrowpanel-menuitem-padding-block) / 2 - var(--uei-attention-dot-size) / 2)); } -.unified-extensions-item { +/* Align extensions rendered with custom elements. */ +unified-extensions-item { + display: flex; + align-items: center; +} + +#unified-extensions-panel .unified-extensions-item { + /* Have some spacing between items in the panel; mainly useful for when HCM is enabled. */ padding-block: 2px; } -/* This is based on the attention UI defined in: +/* The "attention UI" for the unified extensions is based on: * https://searchfox.org/mozilla-central/rev/560b7b1b17/browser/themes/shared/tabs.css#624 */ -#unified-extensions-button[attention], -.unified-extensions-item[attention] { - background-image: radial-gradient(circle, var(--tab-attention-icon-color), var(--tab-attention-icon-color) 2px, transparent 2px); - background-position: center bottom max(0px, calc(var(--arrowpanel-menuitem-padding-block) - 4px)); - background-size: 8px 8px; + +/* On the main unified extensions button, we draw the attention on the icon element. */ +#unified-extensions-button[attention] > .toolbarbutton-icon, +/* For extension widgets placed in a toolbar, we use the stack element (containing the icon) + * of the action button to draw the attention dot. + * Otherwise (in the extensions panel), we use the action button itself. */ +toolbar .unified-extensions-item[attention] > .unified-extensions-item-action > .toolbarbutton-badge-stack, +#unified-extensions-panel .unified-extensions-item[attention] > .unified-extensions-item-action, +.widget-overflow-list .unified-extensions-item[attention][unified-extensions="false"] > .unified-extensions-item-action { + background-image: radial-gradient(circle, var(--uei-button-attention-dot-color), var(--uei-button-attention-dot-color) 2px, transparent 2px); + background-size: var(--uei-attention-dot-size) var(--uei-attention-dot-size); background-repeat: no-repeat; } -/* Adjust attention dots for the custom elements. */ -.unified-extensions-list > unified-extensions-item[attention] { - background-position: left var(--uei-dot-position) bottom 0px; +/* Adjust attention dots position in the toolbar. */ +#unified-extensions-button[attention] > .toolbarbutton-icon, +toolbar .unified-extensions-item[attention] > .unified-extensions-item-action > .toolbarbutton-badge-stack { + background-position: center bottom calc(var(--toolbarbutton-inner-padding) / 2 - var(--uei-attention-dot-size) / 2); } -/* Adjust attention dots for the custom elements. */ -.unified-extensions-list > unified-extensions-item[attention]:-moz-locale-dir(rtl) { - background-position-x: right var(--uei-dot-position); +/* Adjust attention dots position in the unified extensions panel. */ +#unified-extensions-panel .unified-extensions-item[attention] > .unified-extensions-item-action { + background-position: left var(--uei-dot-horizontal-position-in-panel) bottom var(--uei-dot-vertical-position-in-panel); +} + +/* Adjust attention dots position in the unified extensions panel for RTL. */ +#unified-extensions-panel .unified-extensions-item[attention] > .unified-extensions-item-action:-moz-locale-dir(rtl) { + background-position-x: right var(--uei-dot-horizontal-position-in-panel); +} + +/* Adjust attention dots position in the overflow panel. */ +.widget-overflow-list .unified-extensions-item[attention][unified-extensions="false"] > .unified-extensions-item-action { + background-position-x: left calc(16px / 2 + var(--arrowpanel-menuitem-padding-inline) - var(--uei-attention-dot-size) / 2); + background-position-y: bottom calc(var(--arrowpanel-menuitem-padding-block) / 2 - var(--uei-attention-dot-size) / 2); +} + +:root[uidensity="compact"] .widget-overflow-list .unified-extensions-item[attention][unified-extensions="false"] > .unified-extensions-item-action { + background-position-y: bottom -2px; +} + +/* Adjust attention dots position in the overflow panel for RTL. */ +.widget-overflow-list toolbaritem.unified-extensions-item[attention][unified-extensions="false"] > .unified-extensions-item-action:-moz-locale-dir(rtl) { + background-position-x: right calc(16px / 2 + var(--arrowpanel-menuitem-padding-inline) - var(--uei-attention-dot-size) / 2); } .unified-extensions-item-action { @@ -165,25 +199,9 @@ toolbaritem.unified-extensions-item[unified-extensions="true"] .unified-extensio display: block; } -:is(#unified-extensions-panel, .widget-overflow-list) toolbaritem.unified-extensions-item[attention] { - background-position: left calc(12px + var(--arrowpanel-menuitem-margin-inline)) bottom; -} - -:is(#unified-extensions-panel, .widget-overflow-list) toolbaritem.unified-extensions-item[attention]:-moz-locale-dir(rtl) { - background-position-x: right calc(12px + var(--arrowpanel-menuitem-margin-inline)); -} - -.widget-overflow-list toolbaritem.unified-extensions-item[attention][unified-extensions="false"] { - background-position-x: left 12px; -} - -.widget-overflow-list toolbaritem.unified-extensions-item[attention][unified-extensions="false"]:-moz-locale-dir(rtl) { - background-position-x: right 12px; -} - @media (prefers-contrast) { - .unified-extensions-item[attention] { - background-image: radial-gradient(circle, ButtonText, ButtonText 2px, transparent 2px); + :root { + --uei-button-attention-dot-color: ButtonText; } .unified-extensions-item-action:not([disabled]).subviewbutton, diff --git a/build/RunCbindgen.py b/build/RunCbindgen.py index 4c5bc8ce6edb..835bea5621c6 100644 --- a/build/RunCbindgen.py +++ b/build/RunCbindgen.py @@ -3,12 +3,14 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import print_function + +import os +import subprocess + import buildconfig import mozpack.path as mozpath -import os -import six -import subprocess import pytoml +import six # Try to read the package name or otherwise assume same name as the crate path. diff --git a/build/build-clang/build-clang.py b/build/build-clang/build-clang.py index ec22b3057856..0e856e9574fb 100755 --- a/build/build-clang/build-clang.py +++ b/build/build-clang/build-clang.py @@ -6,21 +6,20 @@ # Only necessary for flake8 to be happy... from __future__ import print_function -import os -import os.path -import shutil -import subprocess -import platform -import json import argparse +import errno import fnmatch import glob -import errno +import json +import os +import os.path +import platform import re +import shutil +import subprocess import sys import tarfile from contextlib import contextmanager - from shutil import which import zstandard diff --git a/build/build-clang/clang-tidy-linux64.json b/build/build-clang/clang-tidy-linux64.json index e654aeef92e2..53dee2120d02 100644 --- a/build/build-clang/clang-tidy-linux64.json +++ b/build/build-clang/clang-tidy-linux64.json @@ -5,6 +5,7 @@ "cxx": "{MOZ_FETCHES_DIR}/clang/bin/clang++", "as": "{MOZ_FETCHES_DIR}/clang/bin/clang", "patches": [ + "clang_include_cleaner.patch", "clang-tidy-ci.patch" ] } diff --git a/build/build-clang/clang-tidy-macosx64.json b/build/build-clang/clang-tidy-macosx64.json index a0d5bfa9b537..ac21a121fc43 100644 --- a/build/build-clang/clang-tidy-macosx64.json +++ b/build/build-clang/clang-tidy-macosx64.json @@ -10,6 +10,7 @@ "libtool": "{MOZ_FETCHES_DIR}/cctools/bin/x86_64-apple-darwin-libtool", "ld": "{MOZ_FETCHES_DIR}/clang/bin/clang", "patches": [ + "clang_include_cleaner.patch", "clang-tidy-ci.patch" ] } diff --git a/build/build-clang/clang-tidy-win64.json b/build/build-clang/clang-tidy-win64.json index 9951d45fc433..38a017bc24c2 100644 --- a/build/build-clang/clang-tidy-win64.json +++ b/build/build-clang/clang-tidy-win64.json @@ -5,6 +5,7 @@ "cxx": "cl.exe", "ml": "ml64.exe", "patches": [ + "clang_include_cleaner.patch", "clang-tidy-ci.patch" ] } diff --git a/build/build-clang/clang_include_cleaner.patch b/build/build-clang/clang_include_cleaner.patch new file mode 100644 index 000000000000..0bc3b5b019f8 --- /dev/null +++ b/build/build-clang/clang_include_cleaner.patch @@ -0,0 +1,2235 @@ +Ported from clangd, this still can be improved over time, but it can be landed. +This was based on the work from https://bit.ly/3TkV2N1 + + The utility makes the assumption that all header are self contained! + It only checkes Decls from the main translation file, where SourceLocarion is the passed cpp file. + It builds a list with all of the includes from the translation unit. + It matches all of the Decls from the main translation units with definitions from the included header files and builds a list with used header files. + All of the includes that are not part of the matched used header files are considered to be unused. Of course this is correct if the first assumption if followed by the coding guide, where all of the header are self contained. Since the mozilla code base doesn't follow this approach false positives might appear where the is the following situation: + +FOO.cpp + +#include +#Include + +If header A defines a symbol that is used by header B and B doesn't include A nor +it has symbols defined that are used by FOO.cpp then B it will be marked as potentially to be removed +by the tool. +This is the limitation determined by header that are not self contained. + +The limitation presented above can be fixed in the future with extra work, but it's very time expensive +during the runtime of the checker. + +diff --git a/clang-tools-extra/CMakeLists.txt b/clang-tools-extra/CMakeLists.txt +index 6a3f741721ee..ff17c8e8472a 100644 +--- a/clang-tools-extra/CMakeLists.txt ++++ b/clang-tools-extra/CMakeLists.txt +@@ -16,6 +16,7 @@ endif() + add_subdirectory(clang-apply-replacements) + add_subdirectory(clang-reorder-fields) + add_subdirectory(modularize) ++add_subdirectory(include-cleaner) + add_subdirectory(clang-tidy) + + add_subdirectory(clang-change-namespace) +@@ -23,7 +24,6 @@ add_subdirectory(clang-doc) + add_subdirectory(clang-include-fixer) + add_subdirectory(clang-move) + add_subdirectory(clang-query) +-add_subdirectory(include-cleaner) + add_subdirectory(pp-trace) + add_subdirectory(pseudo) + add_subdirectory(tool-template) +diff --git a/clang-tools-extra/clang-tidy/CMakeLists.txt b/clang-tools-extra/clang-tidy/CMakeLists.txt +index 8a953eeea275..f2edc509acaf 100644 +--- a/clang-tools-extra/clang-tidy/CMakeLists.txt ++++ b/clang-tools-extra/clang-tidy/CMakeLists.txt +@@ -50,6 +50,7 @@ endif() + + # Checks. + # If you add a check, also add it to ClangTidyForceLinker.h in this directory. ++add_subdirectory(alpha) + add_subdirectory(android) + add_subdirectory(abseil) + add_subdirectory(altera) +@@ -77,6 +78,7 @@ add_subdirectory(portability) + add_subdirectory(readability) + add_subdirectory(zircon) + set(ALL_CLANG_TIDY_CHECKS ++ clangTidyAlphaModule + clangTidyAndroidModule + clangTidyAbseilModule + clangTidyAlteraModule +diff --git a/clang-tools-extra/clang-tidy/ClangTidyForceLinker.h b/clang-tools-extra/clang-tidy/ClangTidyForceLinker.h +index 2691d90fa521..2fa064cff22a 100644 +--- a/clang-tools-extra/clang-tidy/ClangTidyForceLinker.h ++++ b/clang-tools-extra/clang-tidy/ClangTidyForceLinker.h +@@ -20,6 +20,11 @@ extern volatile int AbseilModuleAnchorSource; + static int LLVM_ATTRIBUTE_UNUSED AbseilModuleAnchorDestination = + AbseilModuleAnchorSource; + ++// This anchor is used to force the linker to link the AlphaModule. ++extern volatile int AlphaModuleAnchorSource; ++static int LLVM_ATTRIBUTE_UNUSED AlphaModuleAnchorDestination = ++ AlphaModuleAnchorSource; ++ + // This anchor is used to force the linker to link the AlteraModule. + extern volatile int AlteraModuleAnchorSource; + static int LLVM_ATTRIBUTE_UNUSED AlteraModuleAnchorDestination = +diff --git a/clang-tools-extra/clang-tidy/alpha/AlphaTidyModule.cpp b/clang-tools-extra/clang-tidy/alpha/AlphaTidyModule.cpp +new file mode 100644 +index 000000000000..b598a36cebf7 +--- /dev/null ++++ b/clang-tools-extra/clang-tidy/alpha/AlphaTidyModule.cpp +@@ -0,0 +1,38 @@ ++//===--- AlphaTidyModule.cpp - clang-tidy ----------------------------------===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++ ++#include "../ClangTidy.h" ++#include "../ClangTidyModule.h" ++#include "../ClangTidyModuleRegistry.h" ++#include "UnusedIncludesCheck.h" ++ ++ ++namespace clang { ++namespace tidy { ++namespace alpha { ++ ++class AlphaModule : public ClangTidyModule { ++public: ++ void addCheckFactories(ClangTidyCheckFactories &CheckFactories) override { ++ ++ CheckFactories.registerCheck("alpha-unused-includes"); ++ } ++}; ++ ++} // namespace alpha ++ ++// Register the AlphaTidyModule using this statically initialized variable. ++static ClangTidyModuleRegistry::Add ++ X("alpha-module", "Adds alpha lint checks."); ++ ++// This anchor is used to force the linker to link in the generated object file ++// and thus register the AlphaModule. ++volatile int AlphaModuleAnchorSource = 0; ++ ++} // namespace tidy ++} // namespace clang +diff --git a/clang-tools-extra/clang-tidy/alpha/CMakeLists.txt b/clang-tools-extra/clang-tidy/alpha/CMakeLists.txt +new file mode 100644 +index 000000000000..b50576868645 +--- /dev/null ++++ b/clang-tools-extra/clang-tidy/alpha/CMakeLists.txt +@@ -0,0 +1,32 @@ ++include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../../include-cleaner/include) ++ ++set(LLVM_LINK_COMPONENTS ++ Support ++ ) ++ ++add_clang_library(clangTidyAlphaModule ++ ++ AlphaTidyModule.cpp ++ UnusedIncludesCheck.cpp ++ ++ LINK_LIBS ++ clangAnalysis ++ clangIncludeCleaner ++ clangTidy ++ clangTidyUtils ++ ++ DEPENDS ++ omp_gen ++ ) ++ ++clang_target_link_libraries(clangTidyAlphaModule ++ PRIVATE ++ clangAnalysis ++ clangAST ++ clangASTMatchers ++ clangBasic ++ clangIncludeCleaner ++ clangLex ++ clangSerialization ++ clangTooling ++ ) +diff --git a/clang-tools-extra/clang-tidy/alpha/UnusedIncludesCheck.cpp b/clang-tools-extra/clang-tidy/alpha/UnusedIncludesCheck.cpp +new file mode 100644 +index 000000000000..0d6a6bf7a367 +--- /dev/null ++++ b/clang-tools-extra/clang-tidy/alpha/UnusedIncludesCheck.cpp +@@ -0,0 +1,76 @@ ++//===--- UnusedIncludesCheck.cpp - clang-tidy------------------------------===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++ ++#include "UnusedIncludesCheck.h" ++#include "clang-include-cleaner/Analysis.h" ++#include "clang-include-cleaner/Hooks.h" ++#include "clang/Basic/Diagnostic.h" ++#include "clang/Basic/LLVM.h" ++#include "clang/Basic/SourceLocation.h" ++#include "clang/Lex/Preprocessor.h" ++ ++using namespace clang::ast_matchers; ++ ++namespace clang { ++namespace tidy { ++namespace alpha { ++ ++UnusedIncludesCheck::UnusedIncludesCheck(StringRef Name, ++ ClangTidyContext *Context) ++ : ClangTidyCheck(Name, Context) {} ++ ++void UnusedIncludesCheck::registerPPCallbacks(const SourceManager &SM, ++ Preprocessor *PP, ++ Preprocessor *) { ++ Ctx = std::make_unique( ++ include_cleaner::Policy{}, *PP); ++ RecordedPP = std::make_unique(); ++ PP->addPPCallbacks(RecordedPP->record(*Ctx)); ++} ++ ++void UnusedIncludesCheck::registerMatchers(MatchFinder *Finder) { ++ Finder->addMatcher( ++ translationUnitDecl(forEach(decl(isExpansionInMainFile()).bind("top"))), ++ this); ++} ++ ++void UnusedIncludesCheck::check(const MatchFinder::MatchResult &Result) { ++ Top.push_back(const_cast(Result.Nodes.getNodeAs("top"))); ++} ++ ++void UnusedIncludesCheck::onEndOfTranslationUnit() { ++ llvm::DenseSet Used; ++ llvm::DenseSet Seen; ++ include_cleaner::walkUsed( ++ *Ctx, Top, RecordedPP->MacroReferences, ++ [&](SourceLocation Loc, include_cleaner::Symbol Sym, ++ llvm::ArrayRef Headers) { ++ for (const auto &Header : Headers) { ++ if (!Seen.insert(Header).second) ++ continue; ++ const auto& HeadersToInsert = RecordedPP->Includes.match(Header); ++ Used.insert(HeadersToInsert.begin(), HeadersToInsert.end()); ++ } ++ }); ++ for (const auto &I : RecordedPP->Includes.all()) { ++ if (!Used.contains(&I)) { ++ const auto &SM = Ctx->sourceManager(); ++ FileID FID = SM.getFileID(I.Location); ++ diag(I.Location, "there is a high probability that include is unused") ++ << FixItHint::CreateRemoval(CharSourceRange::getCharRange( ++ SM.translateLineCol(FID, I.Line, 1), ++ SM.translateLineCol(FID, I.Line + 1, 1))); ++ } ++ } ++} ++ ++UnusedIncludesCheck::~UnusedIncludesCheck() = default; ++ ++} // namespace alpha ++} // namespace tidy ++} // namespace clang +diff --git a/clang-tools-extra/clang-tidy/alpha/UnusedIncludesCheck.h b/clang-tools-extra/clang-tidy/alpha/UnusedIncludesCheck.h +new file mode 100644 +index 000000000000..f67c46e6cc3e +--- /dev/null ++++ b/clang-tools-extra/clang-tidy/alpha/UnusedIncludesCheck.h +@@ -0,0 +1,42 @@ ++//===--- UnusedIncludesCheck.h - clang-tidy----------------------*- C++ -*-===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++ ++#ifndef LLVM_CLANG_TOOLS_EXTRA_CLANG_TIDY_MISC_UNUSED_INCLUDES_H ++#define LLVM_CLANG_TOOLS_EXTRA_CLANG_TIDY_MISC_UNUSED_INCLUDES_H ++ ++#include "../ClangTidyCheck.h" ++ ++namespace clang { ++namespace include_cleaner { ++class AnalysisContext; ++struct RecordedPP; ++} // namespace include_cleaner ++namespace tidy { ++namespace alpha { ++ ++class UnusedIncludesCheck : public ClangTidyCheck { ++public: ++ UnusedIncludesCheck(StringRef Name, ClangTidyContext *Context); ++ ~UnusedIncludesCheck(); ++ void registerPPCallbacks(const SourceManager &SM, Preprocessor *, ++ Preprocessor *) override; ++ void registerMatchers(ast_matchers::MatchFinder *Finder) override; ++ void check(const ast_matchers::MatchFinder::MatchResult &Result) override; ++ void onEndOfTranslationUnit() override; ++ ++private: ++ std::unique_ptr Ctx; ++ std::unique_ptr RecordedPP; ++ std::vector Top; ++}; ++ ++} // namespace misc ++} // namespace tidy ++} // namespace clang ++ ++#endif // LLVM_CLANG_TOOLS_EXTRA_CLANG_TIDY_MISC_UNUSED_INCLUDES_H +diff --git a/clang-tools-extra/clangd/CMakeLists.txt b/clang-tools-extra/clangd/CMakeLists.txt +index de8f087a52a5..14f605b1efaf 100644 +--- a/clang-tools-extra/clangd/CMakeLists.txt ++++ b/clang-tools-extra/clangd/CMakeLists.txt +@@ -2,6 +2,8 @@ + include_directories(${CMAKE_CURRENT_SOURCE_DIR}) + include_directories(${CMAKE_CURRENT_BINARY_DIR}) + ++include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../include-cleaner/include) ++ + add_subdirectory(support) + + # Configure the Features.inc file. +@@ -153,6 +155,7 @@ clang_target_link_libraries(clangDaemon + clangDriver + clangFormat + clangFrontend ++ clangIncludeCleaner + clangIndex + clangLex + clangSema +diff --git a/clang-tools-extra/clangd/Hover.cpp b/clang-tools-extra/clangd/Hover.cpp +index 26eb2574195d..a3cbc8894f6d 100644 +--- a/clang-tools-extra/clangd/Hover.cpp ++++ b/clang-tools-extra/clangd/Hover.cpp +@@ -12,9 +12,11 @@ + #include "CodeCompletionStrings.h" + #include "Config.h" + #include "FindTarget.h" ++#include "IncludeCleaner.h" + #include "ParsedAST.h" + #include "Selection.h" + #include "SourceCode.h" ++#include "clang-include-cleaner/Analysis.h" + #include "index/SymbolCollector.h" + #include "support/Markup.h" + #include "clang/AST/ASTContext.h" +@@ -985,6 +987,23 @@ llvm::Optional getHover(ParsedAST &AST, Position Pos, + // FIXME: We don't have a fitting value for Kind. + HI.Definition = + URIForFile::canonicalize(Inc.Resolved, *MainFilePath).file().str(); ++ ++ // FIXME: share code, macros too... ++ include_cleaner::AnalysisContext Ctx(include_cleaner::Policy{}, ++ AST.getPreprocessor()); ++ std::vector Provides; ++ include_cleaner::walkUsed( ++ Ctx, AST.getLocalTopLevelDecls(), /*Macros=*/{}, ++ [&](SourceLocation Loc, include_cleaner::Symbol S, ++ llvm::ArrayRef Headers) { ++ for (const auto &H : Headers) ++ if (match(H, Inc, AST.getIncludeStructure())) ++ Provides.push_back(S.name()); ++ }); ++ llvm::sort(Provides); ++ Provides.erase(std::unique(Provides.begin(), Provides.end()), ++ Provides.end()); ++ HI.Documentation = "provides " + llvm::join(Provides, ", "); + HI.DefinitionLanguage = ""; + return HI; + } +diff --git a/clang-tools-extra/clangd/IncludeCleaner.cpp b/clang-tools-extra/clangd/IncludeCleaner.cpp +index e5b5187e030c..3c0ba06316ac 100644 +--- a/clang-tools-extra/clangd/IncludeCleaner.cpp ++++ b/clang-tools-extra/clangd/IncludeCleaner.cpp +@@ -12,6 +12,8 @@ + #include "ParsedAST.h" + #include "Protocol.h" + #include "SourceCode.h" ++#include "clang-include-cleaner/Analysis.h" ++#include "clang-include-cleaner/Types.h" + #include "index/CanonicalIncludes.h" + #include "support/Logger.h" + #include "support/Trace.h" +@@ -40,181 +42,6 @@ void setIncludeCleanerAnalyzesStdlib(bool B) { AnalyzeStdlib = B; } + + namespace { + +-/// Crawler traverses the AST and feeds in the locations of (sometimes +-/// implicitly) used symbols into \p Result. +-class ReferencedLocationCrawler +- : public RecursiveASTVisitor { +-public: +- ReferencedLocationCrawler(ReferencedLocations &Result, +- const SourceManager &SM) +- : Result(Result), SM(SM) {} +- +- bool VisitDeclRefExpr(DeclRefExpr *DRE) { +- add(DRE->getDecl()); +- add(DRE->getFoundDecl()); +- return true; +- } +- +- bool VisitMemberExpr(MemberExpr *ME) { +- add(ME->getMemberDecl()); +- add(ME->getFoundDecl().getDecl()); +- return true; +- } +- +- bool VisitTagType(TagType *TT) { +- add(TT->getDecl()); +- return true; +- } +- +- bool VisitFunctionDecl(FunctionDecl *FD) { +- // Function definition will require redeclarations to be included. +- if (FD->isThisDeclarationADefinition()) +- add(FD); +- return true; +- } +- +- bool VisitCXXConstructExpr(CXXConstructExpr *CCE) { +- add(CCE->getConstructor()); +- return true; +- } +- +- bool VisitTemplateSpecializationType(TemplateSpecializationType *TST) { +- // Using templateName case is handled by the override TraverseTemplateName. +- if (TST->getTemplateName().getKind() == TemplateName::UsingTemplate) +- return true; +- add(TST->getAsCXXRecordDecl()); // Specialization +- return true; +- } +- +- // There is no VisitTemplateName in RAV, thus we override the Traverse version +- // to handle the Using TemplateName case. +- bool TraverseTemplateName(TemplateName TN) { +- VisitTemplateName(TN); +- return Base::TraverseTemplateName(TN); +- } +- // A pseudo VisitTemplateName, dispatched by the above TraverseTemplateName! +- bool VisitTemplateName(TemplateName TN) { +- if (const auto *USD = TN.getAsUsingShadowDecl()) { +- add(USD); +- return true; +- } +- add(TN.getAsTemplateDecl()); // Primary template. +- return true; +- } +- +- bool VisitUsingType(UsingType *UT) { +- add(UT->getFoundDecl()); +- return true; +- } +- +- bool VisitTypedefType(TypedefType *TT) { +- add(TT->getDecl()); +- return true; +- } +- +- // Consider types of any subexpression used, even if the type is not named. +- // This is helpful in getFoo().bar(), where Foo must be complete. +- // FIXME(kirillbobyrev): Should we tweak this? It may not be desirable to +- // consider types "used" when they are not directly spelled in code. +- bool VisitExpr(Expr *E) { +- TraverseType(E->getType()); +- return true; +- } +- +- bool TraverseType(QualType T) { +- if (isNew(T.getTypePtrOrNull())) // don't care about quals +- Base::TraverseType(T); +- return true; +- } +- +- bool VisitUsingDecl(UsingDecl *D) { +- for (const auto *Shadow : D->shadows()) +- add(Shadow->getTargetDecl()); +- return true; +- } +- +- // Enums may be usefully forward-declared as *complete* types by specifying +- // an underlying type. In this case, the definition should see the declaration +- // so they can be checked for compatibility. +- bool VisitEnumDecl(EnumDecl *D) { +- if (D->isThisDeclarationADefinition() && D->getIntegerTypeSourceInfo()) +- add(D); +- return true; +- } +- +- // When the overload is not resolved yet, mark all candidates as used. +- bool VisitOverloadExpr(OverloadExpr *E) { +- for (const auto *ResolutionDecl : E->decls()) +- add(ResolutionDecl); +- return true; +- } +- +-private: +- using Base = RecursiveASTVisitor; +- +- void add(const Decl *D) { +- if (!D || !isNew(D->getCanonicalDecl())) +- return; +- if (auto SS = StdRecognizer(D)) { +- Result.Stdlib.insert(*SS); +- return; +- } +- // Special case RecordDecls, as it is common for them to be forward +- // declared multiple times. The most common cases are: +- // - Definition available in TU, only mark that one as usage. The rest is +- // likely to be unnecessary. This might result in false positives when an +- // internal definition is visible. +- // - There's a forward declaration in the main file, no need for other +- // redecls. +- if (const auto *RD = llvm::dyn_cast(D)) { +- if (const auto *Definition = RD->getDefinition()) { +- Result.User.insert(Definition->getLocation()); +- return; +- } +- if (SM.isInMainFile(RD->getMostRecentDecl()->getLocation())) +- return; +- } +- for (const Decl *Redecl : D->redecls()) +- Result.User.insert(Redecl->getLocation()); +- } +- +- bool isNew(const void *P) { return P && Visited.insert(P).second; } +- +- ReferencedLocations &Result; +- llvm::DenseSet Visited; +- const SourceManager &SM; +- tooling::stdlib::Recognizer StdRecognizer; +-}; +- +-// Given a set of referenced FileIDs, determines all the potentially-referenced +-// files and macros by traversing expansion/spelling locations of macro IDs. +-// This is used to map the referenced SourceLocations onto real files. +-struct ReferencedFilesBuilder { +- ReferencedFilesBuilder(const SourceManager &SM) : SM(SM) {} +- llvm::DenseSet Files; +- llvm::DenseSet Macros; +- const SourceManager &SM; +- +- void add(SourceLocation Loc) { add(SM.getFileID(Loc), Loc); } +- +- void add(FileID FID, SourceLocation Loc) { +- if (FID.isInvalid()) +- return; +- assert(SM.isInFileID(Loc, FID)); +- if (Loc.isFileID()) { +- Files.insert(FID); +- return; +- } +- // Don't process the same macro FID twice. +- if (!Macros.insert(FID).second) +- return; +- const auto &Exp = SM.getSLocEntry(FID).getExpansion(); +- add(Exp.getSpellingLoc()); +- add(Exp.getExpansionLocStart()); +- add(Exp.getExpansionLocEnd()); +- } +-}; +- + // Returns the range starting at '#' and ending at EOL. Escaped newlines are not + // handled. + clangd::Range getDiagnosticRange(llvm::StringRef Code, unsigned HashOffset) { +@@ -231,10 +58,10 @@ clangd::Range getDiagnosticRange(llvm::StringRef Code, unsigned HashOffset) { + + // Finds locations of macros referenced from within the main file. That includes + // references that were not yet expanded, e.g `BAR` in `#define FOO BAR`. +-void findReferencedMacros(const SourceManager &SM, Preprocessor &PP, +- const syntax::TokenBuffer *Tokens, +- ReferencedLocations &Result) { ++std::vector ++findReferencedMacros(ParsedAST &AST, include_cleaner::AnalysisContext &Ctx) { + trace::Span Tracer("IncludeCleaner::findReferencedMacros"); ++ std::vector Result; + // FIXME(kirillbobyrev): The macros from the main file are collected in + // ParsedAST's MainFileMacros. However, we can't use it here because it + // doesn't handle macro references that were not expanded, e.g. in macro +@@ -244,15 +71,19 @@ void findReferencedMacros(const SourceManager &SM, Preprocessor &PP, + // this mechanism (as opposed to iterating through all tokens) will improve + // the performance of findReferencedMacros and also improve other features + // relying on MainFileMacros. +- for (const syntax::Token &Tok : Tokens->spelledTokens(SM.getMainFileID())) { +- auto Macro = locateMacroAt(Tok, PP); ++ for (const syntax::Token &Tok : ++ AST.getTokens().spelledTokens(AST.getSourceManager().getMainFileID())) { ++ auto Macro = locateMacroAt(Tok, AST.getPreprocessor()); + if (!Macro) + continue; + auto Loc = Macro->Info->getDefinitionLoc(); + if (Loc.isValid()) +- Result.User.insert(Loc); +- // FIXME: support stdlib macros ++ Result.push_back(include_cleaner::SymbolReference{ ++ Tok.location(), ++ Ctx.macro(AST.getPreprocessor().getIdentifierInfo(Macro->Name), ++ Loc)}); + } ++ return Result; + } + + static bool mayConsiderUnused(const Inclusion &Inc, ParsedAST &AST, +@@ -296,110 +127,8 @@ static bool mayConsiderUnused(const Inclusion &Inc, ParsedAST &AST, + } + return true; + } +- +-// In case symbols are coming from non self-contained header, we need to find +-// its first includer that is self-contained. This is the header users can +-// include, so it will be responsible for bringing the symbols from given +-// header into the scope. +-FileID headerResponsible(FileID ID, const SourceManager &SM, +- const IncludeStructure &Includes) { +- // Unroll the chain of non self-contained headers until we find the one that +- // can be included. +- for (const FileEntry *FE = SM.getFileEntryForID(ID); ID != SM.getMainFileID(); +- FE = SM.getFileEntryForID(ID)) { +- // If FE is nullptr, we consider it to be the responsible header. +- if (!FE) +- break; +- auto HID = Includes.getID(FE); +- assert(HID && "We're iterating over headers already existing in " +- "IncludeStructure"); +- if (Includes.isSelfContained(*HID)) +- break; +- // The header is not self-contained: put the responsibility for its symbols +- // on its includer. +- ID = SM.getFileID(SM.getIncludeLoc(ID)); +- } +- return ID; +-} +- + } // namespace + +-ReferencedLocations findReferencedLocations(ASTContext &Ctx, Preprocessor &PP, +- const syntax::TokenBuffer *Tokens) { +- trace::Span Tracer("IncludeCleaner::findReferencedLocations"); +- ReferencedLocations Result; +- const auto &SM = Ctx.getSourceManager(); +- ReferencedLocationCrawler Crawler(Result, SM); +- Crawler.TraverseAST(Ctx); +- if (Tokens) +- findReferencedMacros(SM, PP, Tokens, Result); +- return Result; +-} +- +-ReferencedLocations findReferencedLocations(ParsedAST &AST) { +- return findReferencedLocations(AST.getASTContext(), AST.getPreprocessor(), +- &AST.getTokens()); +-} +- +-ReferencedFiles findReferencedFiles( +- const ReferencedLocations &Locs, const SourceManager &SM, +- llvm::function_ref HeaderResponsible, +- llvm::function_ref(FileID)> UmbrellaHeader) { +- std::vector Sorted{Locs.User.begin(), Locs.User.end()}; +- llvm::sort(Sorted); // Group by FileID. +- ReferencedFilesBuilder Builder(SM); +- for (auto It = Sorted.begin(); It < Sorted.end();) { +- FileID FID = SM.getFileID(*It); +- Builder.add(FID, *It); +- // Cheaply skip over all the other locations from the same FileID. +- // This avoids lots of redundant Loc->File lookups for the same file. +- do +- ++It; +- while (It != Sorted.end() && SM.isInFileID(*It, FID)); +- } +- +- // If a header is not self-contained, we consider its symbols a logical part +- // of the including file. Therefore, mark the parents of all used +- // non-self-contained FileIDs as used. Perform this on FileIDs rather than +- // HeaderIDs, as each inclusion of a non-self-contained file is distinct. +- llvm::DenseSet UserFiles; +- llvm::StringSet<> PublicHeaders; +- for (FileID ID : Builder.Files) { +- UserFiles.insert(HeaderResponsible(ID)); +- if (auto PublicHeader = UmbrellaHeader(ID)) { +- PublicHeaders.insert(*PublicHeader); +- } +- } +- +- llvm::DenseSet StdlibFiles; +- for (const auto &Symbol : Locs.Stdlib) +- for (const auto &Header : Symbol.headers()) +- StdlibFiles.insert(Header); +- +- return {std::move(UserFiles), std::move(StdlibFiles), +- std::move(PublicHeaders)}; +-} +- +-ReferencedFiles findReferencedFiles(const ReferencedLocations &Locs, +- const IncludeStructure &Includes, +- const CanonicalIncludes &CanonIncludes, +- const SourceManager &SM) { +- return findReferencedFiles( +- Locs, SM, +- [&SM, &Includes](FileID ID) { +- return headerResponsible(ID, SM, Includes); +- }, +- [&SM, &CanonIncludes](FileID ID) -> Optional { +- auto Entry = SM.getFileEntryRefForID(ID); +- if (!Entry) +- return llvm::None; +- auto PublicHeader = CanonIncludes.mapHeader(*Entry); +- if (PublicHeader.empty()) +- return llvm::None; +- return PublicHeader; +- }); +-} +- + std::vector + getUnused(ParsedAST &AST, + const llvm::DenseSet &ReferencedFiles, +@@ -426,51 +155,50 @@ getUnused(ParsedAST &AST, + return Unused; + } + +-#ifndef NDEBUG +-// Is FID a , etc? +-static bool isSpecialBuffer(FileID FID, const SourceManager &SM) { +- const SrcMgr::FileInfo &FI = SM.getSLocEntry(FID).getFile(); +- return FI.getName().startswith("<"); +-} +-#endif +- +-llvm::DenseSet +-translateToHeaderIDs(const ReferencedFiles &Files, +- const IncludeStructure &Includes, +- const SourceManager &SM) { +- trace::Span Tracer("IncludeCleaner::translateToHeaderIDs"); +- llvm::DenseSet TranslatedHeaderIDs; +- TranslatedHeaderIDs.reserve(Files.User.size()); +- for (FileID FID : Files.User) { +- const FileEntry *FE = SM.getFileEntryForID(FID); +- if (!FE) { +- assert(isSpecialBuffer(FID, SM)); +- continue; +- } +- const auto File = Includes.getID(FE); +- assert(File); +- TranslatedHeaderIDs.insert(*File); +- } +- for (tooling::stdlib::Header StdlibUsed : Files.Stdlib) +- for (auto HID : Includes.StdlibHeaders.lookup(StdlibUsed)) +- TranslatedHeaderIDs.insert(HID); +- return TranslatedHeaderIDs; ++bool match(const include_cleaner::Header &H, const Inclusion &I, ++ const IncludeStructure &S) { ++ switch (H.kind()) { ++ case include_cleaner::Header::Physical: ++ if (auto HID = S.getID(H.getPhysical())) ++ if (static_cast(*HID) == I.HeaderID) ++ return true; ++ break; ++ case include_cleaner::Header::StandardLibrary: ++ return I.Written == H.getStandardLibrary().name(); ++ case include_cleaner::Header::Verbatim: ++ return llvm::StringRef(I.Written).trim("\"<>") == H.getVerbatimSpelling(); ++ case include_cleaner::Header::Builtin: ++ case include_cleaner::Header::MainFile: ++ break; ++ } ++ return false; + } + + std::vector computeUnusedIncludes(ParsedAST &AST) { +- const auto &SM = AST.getSourceManager(); +- +- auto Refs = findReferencedLocations(AST); +- auto ReferencedFiles = +- findReferencedFiles(Refs, AST.getIncludeStructure(), +- AST.getCanonicalIncludes(), AST.getSourceManager()); +- auto ReferencedHeaders = +- translateToHeaderIDs(ReferencedFiles, AST.getIncludeStructure(), SM); +- return getUnused(AST, ReferencedHeaders, ReferencedFiles.SpelledUmbrellas); ++ include_cleaner::AnalysisContext Ctx(include_cleaner::Policy{}, ++ AST.getPreprocessor()); ++ llvm::DenseSet Used; ++ include_cleaner::walkUsed( ++ Ctx, AST.getLocalTopLevelDecls(), ++ /*MacroRefs=*/findReferencedMacros(AST, Ctx), ++ [&](SourceLocation Loc, include_cleaner::Symbol Sym, ++ llvm::ArrayRef Headers) { ++ for (const auto &I : AST.getIncludeStructure().MainFileIncludes) ++ for (const auto &H : Headers) ++ if (match(H, I, AST.getIncludeStructure())) ++ Used.insert(&I); ++ }); ++ std::vector Unused; ++ const Config &Cfg = Config::current(); ++ for (const auto &I : AST.getIncludeStructure().MainFileIncludes) { ++ if (!Used.contains(&I) && mayConsiderUnused(I, AST, Cfg)) ++ Unused.push_back(&I); ++ } ++ return Unused; + } + +-std::vector issueUnusedIncludesDiagnostics(ParsedAST &AST, +- llvm::StringRef Code) { ++auto issueUnusedIncludesDiagnostics(ParsedAST &AST, ++ llvm::StringRef Code) -> std::vector { + const Config &Cfg = Config::current(); + if (Cfg.Diagnostics.UnusedIncludes != Config::UnusedIncludesPolicy::Strict || + Cfg.Diagnostics.SuppressAll || +diff --git a/clang-tools-extra/clangd/IncludeCleaner.h b/clang-tools-extra/clangd/IncludeCleaner.h +index 4ce31baaa067..c858a60c5db7 100644 +--- a/clang-tools-extra/clangd/IncludeCleaner.h ++++ b/clang-tools-extra/clangd/IncludeCleaner.h +@@ -23,6 +23,7 @@ + #include "index/CanonicalIncludes.h" + #include "clang/Basic/SourceLocation.h" + #include "clang/Tooling/Inclusions/StandardLibrary.h" ++#include "clang-include-cleaner/Types.h" + #include "llvm/ADT/DenseSet.h" + #include "llvm/ADT/STLFunctionalExtras.h" + #include "llvm/ADT/StringSet.h" +@@ -100,6 +101,10 @@ std::vector computeUnusedIncludes(ParsedAST &AST); + std::vector issueUnusedIncludesDiagnostics(ParsedAST &AST, + llvm::StringRef Code); + ++// Does an include-cleaner header spec match a clangd recorded inclusion? ++bool match(const include_cleaner::Header &H, const Inclusion &I, ++ const IncludeStructure &S); ++ + /// Affects whether standard library includes should be considered for + /// removal. This is off by default for now due to implementation limitations: + /// - macros are not tracked +diff --git a/clang-tools-extra/include-cleaner/CMakeLists.txt b/clang-tools-extra/include-cleaner/CMakeLists.txt +index 0550b02f603b..325186879a47 100644 +--- a/clang-tools-extra/include-cleaner/CMakeLists.txt ++++ b/clang-tools-extra/include-cleaner/CMakeLists.txt +@@ -1,4 +1,8 @@ ++include_directories(include) ++include_directories(${CMAKE_CURRENT_BINARY_DIR}/include) + add_subdirectory(lib) ++add_subdirectory(tool) ++ + if(CLANG_INCLUDE_TESTS) + add_subdirectory(test) + add_subdirectory(unittests) +diff --git a/clang-tools-extra/include-cleaner/README.md b/clang-tools-extra/include-cleaner/README.md +deleted file mode 100644 +index e69de29bb2d1..000000000000 +diff --git a/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Analysis.h b/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Analysis.h +new file mode 100644 +index 000000000000..4e5cc8d03814 +--- /dev/null ++++ b/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Analysis.h +@@ -0,0 +1,77 @@ ++//===--- Analysis.h - Analyze used files --------------------------- C++-*-===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++ ++#ifndef CLANG_INCLUDE_CLEANER_ANALYSIS_H ++#define CLANG_INCLUDE_CLEANER_ANALYSIS_H ++ ++#include "clang-include-cleaner/Policy.h" ++#include "clang-include-cleaner/Types.h" ++ ++namespace clang { ++namespace include_cleaner { ++class Cache; ++ ++// Bundles the policy, compiler state, and caches for one include-cleaner run. ++// (This is needed everywhere, but shouldn't be used to propagate state around!) ++class AnalysisContext { ++public: ++ AnalysisContext(const Policy &, const Preprocessor &); ++ AnalysisContext(AnalysisContext &&) = delete; ++ AnalysisContext &operator=(AnalysisContext &&) = delete; ++ ~AnalysisContext(); ++ ++ const Policy &policy() const { return P; } ++ ++ const SourceManager &sourceManager() const { return *SM; } ++ const Preprocessor &preprocessor() const { return *PP; } ++ ++ // Only for internal use (the Cache class definition is not exposed). ++ // This allows us to reuse e.g. mappings from symbols to their locations. ++ Cache &cache() { return *C; } ++ // FIXME: does this need to be public? ++ Symbol macro(const IdentifierInfo *, SourceLocation); ++ ++private: ++ Policy P; ++ const SourceManager *SM; ++ const Preprocessor *PP; ++ std::unique_ptr C; ++}; ++ ++// A UsedSymbolVisitor is a callback invoked for each symbol reference seen. ++// ++// References occur at a particular location, refer to a single symbol, and ++// that symbol may be provided by any of several headers. ++// ++// The first element of ProvidedBy is the *preferred* header, e.g. to insert. ++using UsedSymbolVisitor = ++ llvm::function_ref ProvidedBy)>; ++ ++// Find and report all references to symbols in a region of code. ++// ++// The AST traversal is rooted at ASTRoots - typically top-level declarations ++// of a single source file. MacroRefs are additional recorded references to ++// macros, which do not appear in the AST. ++// ++// This is the main entrypoint of the include-cleaner library, and can be used: ++// - to diagnose missing includes: a referenced symbol is provided by ++// headers which don't match any #include in the main file ++// - to diagnose unused includes: an #include in the main file does not match ++// the headers for any referenced symbol ++// ++// Mapping between Header and #include directives is not provided here, but see ++// RecordedPP::Includes::match() in Hooks.h. ++void walkUsed(AnalysisContext &, llvm::ArrayRef ASTRoots, ++ llvm::ArrayRef MacroRefs, ++ UsedSymbolVisitor Callback); ++ ++} // namespace include_cleaner ++} // namespace clang ++ ++#endif +diff --git a/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Hooks.h b/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Hooks.h +new file mode 100644 +index 000000000000..39e11653b210 +--- /dev/null ++++ b/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Hooks.h +@@ -0,0 +1,87 @@ ++//===--- Hooks.h - Record compiler events -------------------------- C++-*-===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++// ++// Where Analysis.h analyzes AST nodes and recorded preprocessor events, this ++// file defines ways to capture AST and preprocessor information from a parse. ++// ++// These are the simplest way to connect include-cleaner logic to the parser, ++// but other ways are possible (for example clangd records includes separately). ++// ++//===----------------------------------------------------------------------===// ++ ++#ifndef CLANG_INCLUDE_CLEANER_HOOKS_H ++#define CLANG_INCLUDE_CLEANER_HOOKS_H ++ ++#include "Analysis.h" ++#include "Types.h" ++#include "clang/Basic/FileEntry.h" ++#include "clang/Basic/SourceLocation.h" ++#include "llvm/ADT/DenseMap.h" ++#include "llvm/ADT/SmallVector.h" ++#include "llvm/ADT/StringMap.h" ++#include "llvm/ADT/StringRef.h" ++#include ++ ++namespace clang { ++class FileEntry; ++class PPCallbacks; ++namespace include_cleaner { ++class PPRecorder; ++ ++// Contains recorded preprocessor events relevant to include-cleaner. ++struct RecordedPP { ++ // The callback (when installed into clang) tracks macros/includes in this. ++ std::unique_ptr record(AnalysisContext &Ctx); ++ // FIXME: probably also want a comment handler to capture IWYU pragmas. ++ ++ // Describes where macros were used from the main file. ++ std::vector MacroReferences; ++ ++ // A single #include directive from the main file. ++ struct Include { ++ llvm::StringRef Spelled; // e.g. vector ++ const FileEntry *Resolved; // e.g. /path/to/c++/v1/vector ++ SourceLocation Location; // of hash in #include ++ unsigned Line; // 1-based line number for #include ++ }; ++ // The set of includes recorded from the main file. ++ class RecordedIncludes { ++ public: ++ // All #includes seen, in the order they appear. ++ llvm::ArrayRef all() const { return All; } ++ // Determine #includes that match a header (that provides a used symbol). ++ // ++ // Matching is based on the type of Header specified: ++ // - for a physical file like /path/to/foo.h, we check Resolved ++ // - for a logical file like , we check Spelled ++ llvm::SmallVector match(Header H) const; ++ ++ private: ++ std::vector All; ++ llvm::StringMap> BySpelling; ++ llvm::DenseMap> ByFile; ++ friend PPRecorder; ++ } Includes; ++}; ++ ++// Contains recorded parser events relevant to include-cleaner. ++struct RecordedAST { ++ // The consumer (when installed into clang) tracks declarations in this. ++ std::unique_ptr record(AnalysisContext &Ctx); ++ ++ // The set of declarations written at file scope inside the main file. ++ // ++ // These are the roots of the subtrees that should be traversed to find uses. ++ // (Traversing the TranslationUnitDecl would find uses inside headers!) ++ std::vector TopLevelDecls; ++}; ++ ++} // namespace include_cleaner ++} // namespace clang ++ ++#endif +diff --git a/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Policy.h b/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Policy.h +new file mode 100644 +index 000000000000..142887b85529 +--- /dev/null ++++ b/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Policy.h +@@ -0,0 +1,35 @@ ++//===--- Policy.h - Tuning what is considered used ----------------- C++-*-===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++ ++#ifndef CLANG_INCLUDE_CLEANER_POLICY_H ++#define CLANG_INCLUDE_CLEANER_POLICY_H ++ ++namespace clang { ++namespace include_cleaner { ++ ++// Provides some fine-tuning of include-cleaner's choices about what is used. ++// ++// Changing the policy serves two purposes: ++// - marking more things used reduces the false-positives for "unused include", ++// while marking fewer things improves "missing include" in the same way. ++// - different coding styles may make different decisions about which includes ++// are required. ++struct Policy { ++ // Does construction count as use of the type, when the type is not named? ++ // e.g. printVector({x, y, z}); - is std::vector used? ++ bool Construction = false; ++ // Is member access tracked as a reference? ++ bool Members = false; ++ // Are operator calls tracked as references? ++ bool Operators = false; ++}; ++ ++} // namespace include_cleaner ++} // namespace clang ++ ++#endif +diff --git a/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Types.h b/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Types.h +new file mode 100644 +index 000000000000..2a91473b926e +--- /dev/null ++++ b/clang-tools-extra/include-cleaner/include/clang-include-cleaner/Types.h +@@ -0,0 +1,219 @@ ++//===--- Types.h - Data structures for used-symbol analysis -------- C++-*-===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++// ++// Find referenced files is mostly a matter of translating: ++// AST Node => declaration => source location => file ++// ++// clang has types for these (DynTypedNode, Decl, SourceLocation, FileID), but ++// there are special cases: macros are not declarations, the concrete file where ++// a standard library symbol was defined doesn't matter, etc. ++// ++// We define some slightly more abstract sum types to handle these cases while ++// keeping the API clean. For example, Symbol is Decl+DefinedMacro. ++// ++//===----------------------------------------------------------------------===// ++ ++#ifndef CLANG_INCLUDE_CLEANER_TYPES_H ++#define CLANG_INCLUDE_CLEANER_TYPES_H ++ ++#include "clang/AST/DeclBase.h" ++#include "clang/Tooling/Inclusions/StandardLibrary.h" ++#include "llvm/ADT/BitmaskEnum.h" ++#include "llvm/ADT/PointerSumType.h" ++ ++namespace clang { ++class IdentifierInfo; ++class MacroDirective; ++namespace include_cleaner { ++ ++// Identifies a macro, along with a particular definition of it. ++// We generally consider redefined macros to be different symbols. ++struct DefinedMacro { ++ const IdentifierInfo *Name; ++ const SourceLocation Definition; ++}; ++ ++// A Symbol is an entity that can be referenced. ++// It is either a declaration (NamedDecl) or a macro (DefinedMacro). ++class Symbol { ++public: ++ enum Kind { ++ Macro, ++ Declaration, ++ }; ++ Symbol(NamedDecl *ND) : Target(ND) {} ++ Symbol(const DefinedMacro *M) : Target(M) {} ++ ++ std::string name() const; ++ std::string nodeName() const; ++ Kind kind() const { return Target.is() ? Declaration : Macro; } ++ ++ NamedDecl *getDeclaration() const { return Target.get(); } ++ const DefinedMacro *getMacro() const { ++ return Target.get(); ++ } ++ ++private: ++ llvm::PointerUnion Target; ++}; ++ ++// A usage of a Symbol seen in our source code. ++struct SymbolReference { ++ // The point in the code where the reference occurred. ++ // We could track the DynTypedNode we found it in if it's important. ++ SourceLocation Location; ++ Symbol Target; ++}; ++ ++// A Location is a place where a symbol can be provided. ++// It is either a physical part of the TU (SourceLocation) or a logical location ++// in the standard library (stdlib::Symbol). ++class Location { ++public: ++ enum Kind : uint8_t { ++ Physical, ++ StandardLibrary, ++ }; ++ ++ Location(SourceLocation S) : K(Physical), SrcLoc(S) {} ++ Location(tooling::stdlib::Symbol S) : K(StandardLibrary), StdlibSym(S) {} ++ ++ std::string name(const SourceManager &SM) const; ++ Kind kind() const { return K; } ++ ++ SourceLocation getPhysical() const { ++ assert(kind() == Physical); ++ return SrcLoc; ++ }; ++ tooling::stdlib::Symbol getStandardLibrary() const { ++ assert(kind() == StandardLibrary); ++ return StdlibSym; ++ }; ++ ++private: ++ Kind K; ++ union { ++ SourceLocation SrcLoc; ++ tooling::stdlib::Symbol StdlibSym; ++ }; ++}; ++ ++// A Header is an includable file that can provide access to Locations. ++// It is either a physical file (FileEntry), a logical location in the standard ++// library (stdlib::Header), or a verbatim header spelling (StringRef). ++class Header { ++public: ++ enum Kind : uint8_t { ++ Physical, ++ StandardLibrary, ++ Verbatim, ++ Builtin, ++ MainFile, ++ }; ++ ++ Header(const FileEntry *FE) : K(Physical), PhysicalFile(FE) {} ++ Header(tooling::stdlib::Header H) : K(StandardLibrary), StdlibHeader(H) {} ++ Header(const char *V) : K(Verbatim), VerbatimSpelling(V) {} ++ static Header builtin() { return Header{Builtin}; }; ++ static Header mainFile() { return Header{MainFile}; }; ++ ++ std::string name() const; ++ Kind kind() const { return K; } ++ ++ const FileEntry *getPhysical() const { ++ assert(kind() == Physical); ++ return PhysicalFile; ++ }; ++ tooling::stdlib::Header getStandardLibrary() const { ++ assert(kind() == StandardLibrary); ++ return StdlibHeader; ++ }; ++ llvm::StringRef getVerbatimSpelling() const { ++ assert(kind() == Verbatim); ++ return VerbatimSpelling; ++ }; ++ ++private: ++ Header(Kind K) : K(K) {} ++ ++ Kind K; ++ union { ++ const FileEntry *PhysicalFile; ++ tooling::stdlib::Header StdlibHeader; ++ const char *VerbatimSpelling; ++ }; ++ ++ friend bool operator==(const Header &L, const Header &R) { ++ if (L.kind() != R.kind()) ++ return false; ++ switch (L.kind()) { ++ case Physical: ++ return L.getPhysical() == R.getPhysical(); ++ case StandardLibrary: ++ return L.getStandardLibrary() == R.getStandardLibrary(); ++ case Verbatim: ++ return L.getVerbatimSpelling() == R.getVerbatimSpelling(); ++ case Builtin: ++ case MainFile: ++ return true; // no payload ++ } ++ llvm_unreachable("unhandled Header kind"); ++ } ++ ++ friend bool operator<(const Header &L, const Header &R) { ++ if (L.kind() != R.kind()) ++ return L.kind() < R.kind(); ++ switch (L.kind()) { ++ case Physical: ++ return L.getPhysical() == R.getPhysical(); ++ case StandardLibrary: ++ return L.getStandardLibrary() < R.getStandardLibrary(); ++ case Verbatim: ++ return L.getVerbatimSpelling() < R.getVerbatimSpelling(); ++ case Builtin: ++ case MainFile: ++ return false; // no payload ++ } ++ llvm_unreachable("unhandled Header kind"); ++ } ++ ++ friend llvm::hash_code hash_value(const Header &H) { ++ switch (H.K) { ++ case Header::Physical: ++ return llvm::hash_combine(H.K, H.getPhysical()); ++ case Header::StandardLibrary: ++ // FIXME: make StdlibHeader hashable instead. ++ return llvm::hash_combine(H.K, H.getStandardLibrary().name()); ++ case Header::Verbatim: ++ return llvm::hash_combine(H.K, llvm::StringRef(H.VerbatimSpelling)); ++ case Header::Builtin: ++ case Header::MainFile: ++ return llvm::hash_value(H.K); ++ } ++ } ++}; ++ ++template struct DefaultDenseMapInfo { ++ static T isEqual(const T &L, const T &R) { return L == R; } ++ static unsigned getHashValue(const T &V) { return hash_value(V); } ++}; ++ ++} // namespace include_cleaner ++} // namespace clang ++ ++namespace llvm { ++template <> struct DenseMapInfo { ++ using Header = clang::include_cleaner::Header; ++ static Header getTombstoneKey() { return Header("__tombstone__"); } ++ static Header getEmptyKey() { return Header("__empty__"); } ++ static bool isEqual(const Header &L, const Header &R) { return L == R; } ++ static unsigned getHashValue(const Header &V) { return hash_value(V); } ++}; ++} // namespace llvm ++ ++#endif +diff --git a/clang-tools-extra/include-cleaner/lib/Analysis.cpp b/clang-tools-extra/include-cleaner/lib/Analysis.cpp +new file mode 100644 +index 000000000000..5ac0008b07e8 +--- /dev/null ++++ b/clang-tools-extra/include-cleaner/lib/Analysis.cpp +@@ -0,0 +1,101 @@ ++//===--- Analysis.cpp - Analyze used files --------------------------------===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++ ++#include "clang-include-cleaner/Analysis.h" ++#include "AnalysisInternal.h" ++#include "clang/Lex/Preprocessor.h" ++ ++namespace clang { ++namespace include_cleaner { ++ ++AnalysisContext::AnalysisContext(const Policy &P, const Preprocessor &PP) ++ : P(P), SM(&PP.getSourceManager()), PP(&PP), C(std::make_unique()) {} ++AnalysisContext::~AnalysisContext() = default; ++ ++static bool prefer(AnalysisContext &Ctx, Hint L, Hint R) { ++ return std::make_tuple(bool(L & Hint::NameMatch), bool(L & Hint::Complete)) > ++ std::make_tuple(bool(R & Hint::NameMatch), bool(R & Hint::Complete)); ++} ++ ++// Is this hint actually useful? ++static void addNameMatchHint(const IdentifierInfo *II, ++ llvm::SmallVector> &H) { ++ if (!II) ++ return; ++ for (auto &HH : H) ++ if (HH->kind() == Header::Physical && ++ II->getName().equals_insensitive(HH->getPhysical()->getName())) ++ HH.Hint |= Hint::NameMatch; ++} ++ ++static llvm::SmallVector
++rank(AnalysisContext &Ctx, llvm::SmallVector> &Candidates) { ++ // Sort by Header, so we can deduplicate (and combine flags). ++ llvm::stable_sort(Candidates, ++ [&](const Hinted
&L, const Hinted
&R) { ++ return *L < *R; ++ }); ++ // Like unique(), but merge hints. ++ auto *Write = Candidates.begin(); ++ for (auto *Read = Candidates.begin(); Read != Candidates.end(); ++Write) { ++ *Write = *Read; ++ for (++Read; Read != Candidates.end() && Read->Value == Write->Value; ++ ++Read) ++ Write->Hint |= Read->Hint; ++ } ++ Candidates.erase(Write, Candidates.end()); ++ // Now sort by hints. ++ llvm::stable_sort(Candidates, ++ [&](const Hinted
&L, const Hinted
&R) { ++ return prefer(Ctx, L.Hint, R.Hint); ++ }); ++ // Drop hints to return clean result list. ++ llvm::SmallVector
Result; ++ for (const auto &H : Candidates) ++ Result.push_back(*H); ++ return Result; ++} ++ ++template void addHint(Hint H, T &Items) { ++ for (auto &Item : Items) ++ Item.Hint |= H; ++} ++ ++void walkUsed(AnalysisContext &Ctx, llvm::ArrayRef ASTRoots, ++ llvm::ArrayRef MacroRefs, ++ UsedSymbolVisitor Callback) { ++ for (Decl *Root : ASTRoots) { ++ walkAST(Ctx, *Root, [&](SourceLocation RefLoc, Hinted ND) { ++ auto Locations = locateDecl(Ctx, *ND); ++ llvm::SmallVector> Headers; ++ for (const auto &Loc : Locations) { ++ auto LocHeaders = includableHeader(Ctx, *Loc); ++ addHint(Loc.Hint, LocHeaders); ++ Headers.append(std::move(LocHeaders)); ++ } ++ addHint(ND.Hint, Headers); ++ addNameMatchHint(ND.Value.getDeclName().getAsIdentifierInfo(), Headers); ++ Callback(RefLoc, &ND.Value, rank(Ctx, Headers)); ++ }); ++ } ++ for (const SymbolReference &MacroRef : MacroRefs) { ++ assert(MacroRef.Target.kind() == Symbol::Macro); ++ auto Loc = locateMacro(Ctx, *MacroRef.Target.getMacro()); ++ auto Headers = includableHeader(Ctx, *Loc); ++ addHint(Loc.Hint, Headers); ++ addNameMatchHint(MacroRef.Target.getMacro()->Name, Headers); ++ Callback(MacroRef.Location, MacroRef.Target, rank(Ctx, Headers)); ++ } ++} ++ ++Symbol AnalysisContext::macro(const IdentifierInfo *II, SourceLocation Loc) { ++ return cache().macro(II, Loc); ++} ++ ++} // namespace include_cleaner ++} // namespace clang +diff --git a/clang-tools-extra/include-cleaner/lib/AnalysisInternal.h b/clang-tools-extra/include-cleaner/lib/AnalysisInternal.h +index 8b0c73fe7997..31b1ad8039d8 100644 +--- a/clang-tools-extra/include-cleaner/lib/AnalysisInternal.h ++++ b/clang-tools-extra/include-cleaner/lib/AnalysisInternal.h +@@ -21,6 +21,95 @@ + #ifndef CLANG_INCLUDE_CLEANER_ANALYSISINTERNAL_H + #define CLANG_INCLUDE_CLEANER_ANALYSISINTERNAL_H + ++#include "clang-include-cleaner/Analysis.h" ++#include "clang-include-cleaner/Types.h" ++#include "clang/Tooling/Inclusions/StandardLibrary.h" ++ ++namespace clang { ++namespace include_cleaner { ++ ++// FIXME: Right now we cache nothing, this is just used as an arena for macros. ++// Verify we're burning time in repeated analysis and cache partial operations. ++class Cache { ++public: ++ Symbol macro(const IdentifierInfo *Name, const SourceLocation Def) { ++ auto &DMS = DefinedMacros[Name->getName()]; ++ // Linear search. We probably only saw ~1 definition of each macro name. ++ for (const DefinedMacro &DM : DMS) ++ if (DM.Definition == Def) ++ return &DM; ++ DMS.push_back(DefinedMacro{Name, Def}); ++ return &DMS.back(); ++ } ++ ++ tooling::stdlib::Recognizer StdlibRecognizer; ++ ++private: ++ llvm::StringMap> DefinedMacros; ++}; ++ ++enum class Hint : uint16_t { ++ None = 0, ++ Complete = 1, // Provides a complete definition that is often needed. ++ // e.g. classes, templates. ++ NameMatch = 1, // Header name matches the symbol name. ++ LLVM_MARK_AS_BITMASK_ENUM(Hint::Complete) ++}; ++LLVM_ENABLE_BITMASK_ENUMS_IN_NAMESPACE(); ++ ++template struct Hinted { ++ Hinted(T Value, Hint H = Hint::None) : Value(Value), Hint(H) {} ++ T Value; ++ include_cleaner::Hint Hint; ++ ++ T &operator*() { return Value; } ++ const T &operator*() const { return Value; } ++ std::remove_reference_t *operator->() { return &Value; } ++ const std::remove_reference_t *operator->() const { return &Value; } ++}; ++ ++// Traverses a subtree of the AST, reporting declarations referenced. ++void walkAST(AnalysisContext &, Decl &Root, ++ llvm::function_ref)>); ++ ++// Finds the locations where a declaration is provided. ++llvm::SmallVector> locateDecl(AnalysisContext &, ++ const NamedDecl &); ++ ++// Finds the locations where a macro is provided. ++Hinted locateMacro(AnalysisContext &, const DefinedMacro &); ++ ++// Finds the headers that provide a location. ++llvm::SmallVector> includableHeader(AnalysisContext &, ++ const Location &); ++ ++} // namespace include_cleaner ++} // namespace clang ++ ++#endif ++//===--- AnalysisInternal.h - Analysis building blocks ------------- C++-*-===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++// ++// This file provides smaller, testable pieces of the used-header analysis. ++// We find the headers by chaining together several mappings. ++// ++// AST => AST node => Symbol => Location => Header ++// / ++// Macro expansion => ++// ++// The individual steps are declared here. ++// (AST => AST Node => Symbol is one API to avoid materializing DynTypedNodes). ++// ++//===----------------------------------------------------------------------===// ++ ++#ifndef CLANG_INCLUDE_CLEANER_ANALYSISINTERNAL_H ++#define CLANG_INCLUDE_CLEANER_ANALYSISINTERNAL_H ++ + #include "clang/Basic/SourceLocation.h" + #include "llvm/ADT/STLFunctionalExtras.h" + +diff --git a/clang-tools-extra/include-cleaner/lib/CMakeLists.txt b/clang-tools-extra/include-cleaner/lib/CMakeLists.txt +index 5e2807332f94..25d66b4f30df 100644 +--- a/clang-tools-extra/include-cleaner/lib/CMakeLists.txt ++++ b/clang-tools-extra/include-cleaner/lib/CMakeLists.txt +@@ -1,10 +1,15 @@ + set(LLVM_LINK_COMPONENTS Support) + + add_clang_library(clangIncludeCleaner ++ Analysis.cpp ++ Headers.cpp ++ Hooks.cpp ++ Locations.cpp ++ Types.cpp + WalkAST.cpp + + LINK_LIBS + clangBasic ++ clangLex + clangAST + ) +- +diff --git a/clang-tools-extra/include-cleaner/lib/Headers.cpp b/clang-tools-extra/include-cleaner/lib/Headers.cpp +new file mode 100644 +index 000000000000..f41bbe4c59c8 +--- /dev/null ++++ b/clang-tools-extra/include-cleaner/lib/Headers.cpp +@@ -0,0 +1,46 @@ ++//===--- Headers.cpp - Find headers that provide locations ----------------===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++ ++#include "AnalysisInternal.h" ++#include "clang/Basic/SourceManager.h" ++#include "clang/Lex/Preprocessor.h" ++ ++namespace clang { ++namespace include_cleaner { ++ ++llvm::SmallVector> includableHeader(AnalysisContext &Ctx, ++ const Location &Loc) { ++ switch (Loc.kind()) { ++ case Location::Physical: { ++ FileID FID = Ctx.sourceManager().getFileID( ++ Ctx.sourceManager().getExpansionLoc(Loc.getPhysical())); ++ if (FID == Ctx.sourceManager().getMainFileID()) ++ return {Header::mainFile()}; ++ if (FID == Ctx.preprocessor().getPredefinesFileID()) ++ return {Header::builtin()}; ++ // FIXME: if the file is not self-contained, find its umbrella header: ++ // - files that lack header guards (e.g. *.def) ++ // - IWYU private pragmas (and maybe export?) ++ // - #pragma clang include_instead ++ // - headers containing "#error ... include" clangd isDontIncludeMeHeader ++ // - apple framework header layout ++ if (auto *FE = Ctx.sourceManager().getFileEntryForID(FID)) ++ return {{FE}}; ++ return {}; ++ } ++ case Location::StandardLibrary: ++ // FIXME: some symbols are provided by multiple stdlib headers: ++ // - for historical reasons, like size_t ++ // - some headers are guaranteed to include others () ++ // - ::printf is de facto provided by cstdio and stdio.h, etc ++ return {{Loc.getStandardLibrary().header()}}; ++ } ++} ++ ++} // namespace include_cleaner ++} // namespace clang +diff --git a/clang-tools-extra/include-cleaner/lib/Hooks.cpp b/clang-tools-extra/include-cleaner/lib/Hooks.cpp +new file mode 100644 +index 000000000000..decb83110c65 +--- /dev/null ++++ b/clang-tools-extra/include-cleaner/lib/Hooks.cpp +@@ -0,0 +1,166 @@ ++//===--- Hooks.cpp - Record events from the compiler --------------- C++-*-===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++ ++#include "clang-include-cleaner/Hooks.h" ++#include "AnalysisInternal.h" ++#include "clang-include-cleaner/Analysis.h" ++#include "clang/AST/ASTConsumer.h" ++#include "clang/AST/DeclCXX.h" ++#include "clang/AST/DeclGroup.h" ++#include "clang/AST/DeclObjC.h" ++#include "clang/Lex/MacroInfo.h" ++#include "clang/Lex/PPCallbacks.h" ++#include "clang/Lex/Preprocessor.h" ++#include "clang/Lex/Token.h" ++ ++namespace clang { ++namespace include_cleaner { ++ ++class PPRecorder : public PPCallbacks { ++public: ++ PPRecorder(AnalysisContext &Ctx, RecordedPP &Recorded) ++ : Ctx(Ctx), Recorded(Recorded) {} ++ ++ virtual void FileChanged(SourceLocation Loc, FileChangeReason Reason, ++ SrcMgr::CharacteristicKind FileType, ++ FileID PrevFID) override { ++ Active = Ctx.sourceManager().isWrittenInMainFile(Loc); ++ } ++ ++ void InclusionDirective(SourceLocation Hash, const Token &IncludeTok, ++ StringRef SpelledFilename, bool IsAngled, ++ CharSourceRange FilenameRange, Optional File, ++ StringRef SearchPath, StringRef RelativePath, ++ const Module *, SrcMgr::CharacteristicKind) override { ++ if (!Active) ++ return; ++ ++ unsigned Index = Recorded.Includes.All.size(); ++ Recorded.Includes.All.emplace_back(); ++ RecordedPP::Include &I = Recorded.Includes.All.back(); ++ const auto *const RawFile = &(*File).getFileEntry(); ++ I.Location = Hash; ++ I.Resolved = RawFile; ++ I.Line = Ctx.sourceManager().getSpellingLineNumber(Hash); ++ auto BySpellingIt = ++ Recorded.Includes.BySpelling.try_emplace(SpelledFilename).first; ++ I.Spelled = BySpellingIt->first(); ++ ++ BySpellingIt->second.push_back(Index); ++ Recorded.Includes.ByFile[RawFile].push_back(Index); ++ } ++ ++ void MacroExpands(const Token &MacroName, const MacroDefinition &MD, ++ SourceRange Range, const MacroArgs *Args) override { ++ if (!Active) ++ return; ++ recordMacroRef(MacroName, *MD.getMacroInfo()); ++ } ++ ++ void MacroDefined(const Token &MacroName, const MacroDirective *MD) override { ++ if (!Active) ++ return; ++ ++ const auto *MI = MD->getMacroInfo(); ++ // The tokens of a macro definition could refer to a macro. ++ // Formally this reference isn't resolved until this macro is expanded, ++ // but we want to treat it as a reference anyway. ++ for (const auto &Tok : MI->tokens()) { ++ auto *II = Tok.getIdentifierInfo(); ++ // Could this token be a reference to a macro? (Not param to this macro). ++ if (!II || !II->hadMacroDefinition() || ++ llvm::is_contained(MI->params(), II)) ++ continue; ++ if (const MacroInfo *MI = Ctx.preprocessor().getMacroInfo(II)) ++ recordMacroRef(Tok, *MI); ++ } ++ } ++ ++private: ++ void recordMacroRef(const Token &Tok, const MacroInfo &MI) { ++ if (MI.isBuiltinMacro()) ++ return; // __FILE__ is not a reference. ++ Recorded.MacroReferences.push_back(SymbolReference{ ++ Tok.getLocation(), ++ Ctx.cache().macro(Tok.getIdentifierInfo(), MI.getDefinitionLoc())}); ++ } ++ ++ bool Active = false; ++ AnalysisContext &Ctx; ++ RecordedPP &Recorded; ++}; ++ ++llvm::SmallVector ++RecordedPP::RecordedIncludes::match(Header H) const { ++ llvm::SmallVector Result; ++ switch (H.kind()) { ++ case Header::Physical: ++ for (unsigned I : ByFile.lookup(H.getPhysical())) ++ Result.push_back(&All[I]); ++ break; ++ case Header::StandardLibrary: ++ for (unsigned I : ++ BySpelling.lookup(H.getStandardLibrary().name().trim("<>"))) ++ Result.push_back(&All[I]); ++ break; ++ case Header::Verbatim: ++ for (unsigned I : BySpelling.lookup(H.getVerbatimSpelling())) ++ Result.push_back(&All[I]); ++ break; ++ case Header::Builtin: ++ case Header::MainFile: ++ break; ++ } ++ llvm::sort(Result); ++ Result.erase(std::unique(Result.begin(), Result.end()), Result.end()); ++ return Result; ++} ++ ++class ASTRecorder : public ASTConsumer { ++public: ++ ASTRecorder(AnalysisContext &Ctx, RecordedAST &Recorded) ++ : Ctx(Ctx), Recorded(Recorded) {} ++ ++ bool HandleTopLevelDecl(DeclGroupRef DG) override { ++ for (Decl *D : DG) { ++ if (!Ctx.sourceManager().isWrittenInMainFile( ++ Ctx.sourceManager().getExpansionLoc(D->getLocation()))) ++ continue; ++ if (const auto *T = llvm::dyn_cast(D)) ++ if (T->getTemplateSpecializationKind() == TSK_ImplicitInstantiation) ++ continue; ++ if (const auto *T = llvm::dyn_cast(D)) ++ if (T->getTemplateSpecializationKind() == TSK_ImplicitInstantiation) ++ continue; ++ if (const auto *T = llvm::dyn_cast(D)) ++ if (T->getTemplateSpecializationKind() == TSK_ImplicitInstantiation) ++ continue; ++ // ObjCMethodDecl are not actually top-level! ++ if (isa(D)) ++ continue; ++ ++ Recorded.TopLevelDecls.push_back(D); ++ } ++ return true; ++ } ++ ++private: ++ AnalysisContext &Ctx; ++ RecordedAST &Recorded; ++}; ++ ++std::unique_ptr RecordedPP::record(AnalysisContext &Ctx) { ++ return std::make_unique(Ctx, *this); ++} ++ ++std::unique_ptr RecordedAST::record(AnalysisContext &Ctx) { ++ return std::make_unique(Ctx, *this); ++} ++ ++} // namespace include_cleaner ++} // namespace clang +\ No newline at end of file +diff --git a/clang-tools-extra/include-cleaner/lib/Locations.cpp b/clang-tools-extra/include-cleaner/lib/Locations.cpp +new file mode 100644 +index 000000000000..7e23c56c1dfc +--- /dev/null ++++ b/clang-tools-extra/include-cleaner/lib/Locations.cpp +@@ -0,0 +1,60 @@ ++//===--- Locations.cpp - Find the locations that provide symbols ----------===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++ ++#include "AnalysisInternal.h" ++#include "clang-include-cleaner/Analysis.h" ++#include "clang-include-cleaner/Types.h" ++#include "clang/AST/Decl.h" ++#include "clang/AST/DeclBase.h" ++#include "clang/AST/DeclTemplate.h" ++#include "clang/Basic/SourceLocation.h" ++#include "llvm/ADT/SmallVector.h" ++ ++namespace clang { ++namespace include_cleaner { ++ ++Hint declHint(const NamedDecl &D) { ++ Hint H = Hint::None; ++ if (auto *TD = llvm::dyn_cast(&D)) ++ if (TD->isThisDeclarationADefinition()) ++ H |= Hint::Complete; ++ if (auto *CTD = llvm::dyn_cast(&D)) ++ if (CTD->isThisDeclarationADefinition()) ++ H |= Hint::Complete; ++ // A function template being defined is similar to a class being defined. ++ if (auto *FTD = llvm::dyn_cast(&D)) ++ if (FTD->isThisDeclarationADefinition()) ++ H |= Hint::Complete; ++ return H; ++} ++ ++llvm::SmallVector> locateDecl(AnalysisContext &Ctx, ++ const NamedDecl &ND) { ++ if (auto StdlibSym = Ctx.cache().StdlibRecognizer(&ND)) ++ return {{*StdlibSym}}; ++ ++ llvm::SmallVector> Result; ++ // Is accepting all the redecls too naive? ++ for (const Decl *RD : ND.redecls()) { ++ // `friend X` is not an interesting location for X unless it's acting as a ++ // forward-declaration. ++ if (RD->getFriendObjectKind() == Decl::FOK_Declared) ++ continue; ++ SourceLocation Loc = RD->getLocation(); ++ if (Loc.isValid()) ++ Result.push_back({Loc, declHint(*cast(RD))}); ++ } ++ return Result; ++} ++ ++Hinted locateMacro(AnalysisContext &Ctx, const DefinedMacro &M) { ++ return {M.Definition}; ++} ++ ++} // namespace include_cleaner ++} // namespace clang +diff --git a/clang-tools-extra/include-cleaner/lib/Types.cpp b/clang-tools-extra/include-cleaner/lib/Types.cpp +new file mode 100644 +index 000000000000..6b79c603a70d +--- /dev/null ++++ b/clang-tools-extra/include-cleaner/lib/Types.cpp +@@ -0,0 +1,61 @@ ++//===--- Types.cpp - Data structures for used-symbol analysis -------------===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++ ++#include "clang-include-cleaner/Types.h" ++#include "clang/AST/Decl.h" ++#include "clang/Basic/FileEntry.h" ++#include "clang/Basic/IdentifierTable.h" ++#include "clang/Tooling/Inclusions/StandardLibrary.h" ++ ++namespace clang { ++namespace include_cleaner { ++ ++std::string Symbol::name() const { ++ switch (kind()) { ++ case Macro: ++ return getMacro()->Name->getName().str(); ++ case Declaration: ++ return getDeclaration()->getNameAsString(); ++ } ++ llvm_unreachable("Unhandled Symbol kind"); ++} ++ ++std::string Symbol::nodeName() const { ++ if (kind() == Macro) ++ return "macro"; ++ return getDeclaration()->getDeclKindName(); ++} ++ ++std::string Location::name(const SourceManager &SM) const { ++ switch (K) { ++ case Physical: ++ return SrcLoc.printToString(SM); ++ case StandardLibrary: ++ return StdlibSym.name().str(); ++ } ++ llvm_unreachable("Unhandled Location kind"); ++} ++ ++std::string Header::name() const { ++ switch (K) { ++ case Physical: ++ return PhysicalFile->getName().str(); ++ case StandardLibrary: ++ return StdlibHeader.name().str(); ++ case Verbatim: ++ return VerbatimSpelling; ++ case Builtin: ++ return ""; ++ case MainFile: ++ return ""; ++ } ++ llvm_unreachable("Unhandled Header kind"); ++} ++ ++} // namespace include_cleaner ++} // namespace clang +diff --git a/clang-tools-extra/include-cleaner/lib/WalkAST.cpp b/clang-tools-extra/include-cleaner/lib/WalkAST.cpp +index b7354fe300e0..02a27977005f 100644 +--- a/clang-tools-extra/include-cleaner/lib/WalkAST.cpp ++++ b/clang-tools-extra/include-cleaner/lib/WalkAST.cpp +@@ -7,40 +7,132 @@ + //===----------------------------------------------------------------------===// + + #include "AnalysisInternal.h" ++#include "clang-include-cleaner/Analysis.h" + #include "clang/AST/RecursiveASTVisitor.h" ++#include "clang/Basic/SourceManager.h" ++#include "llvm/Support/SaveAndRestore.h" + + namespace clang { + namespace include_cleaner { + namespace { +-using DeclCallback = llvm::function_ref; + ++using DeclCallback = ++ llvm::function_ref)>; ++ ++// Traverses part of the AST, looking for references and reporting them. + class ASTWalker : public RecursiveASTVisitor { +- DeclCallback Callback; ++public: ++ ASTWalker(AnalysisContext &Ctx, DeclCallback Callback) ++ : Ctx(Ctx), Callback(Callback) {} + +- void report(SourceLocation Loc, NamedDecl *ND) { +- if (!ND || Loc.isInvalid()) +- return; +- Callback(Loc, *cast(ND->getCanonicalDecl())); ++ bool VisitDeclRefExpr(DeclRefExpr *E) { ++ if (!Ctx.policy().Operators) ++ if (auto *FD = E->getDecl()->getAsFunction()) ++ if (FD->isOverloadedOperator()) ++ return true; ++ report(E->getLocation(), E->getFoundDecl()); ++ return true; + } + +-public: +- ASTWalker(DeclCallback Callback) : Callback(Callback) {} ++ bool VisitMemberExpr(MemberExpr *ME) { ++ if (Ctx.policy().Members) ++ report(ME->getMemberLoc(), ME->getFoundDecl().getDecl()); ++ return true; ++ } ++ ++ bool VisitTagType(TagType *TT) { ++ report(LocationOfType, TT->getDecl()); ++ return true; ++ } ++ ++ bool VisitFunctionDecl(FunctionDecl *FD) { ++ // Count function definitions as a reference to their declarations. ++ if (FD->isThisDeclarationADefinition() && FD->getCanonicalDecl() != FD) ++ report(FD->getLocation(), FD->getCanonicalDecl()); ++ return true; ++ } ++ ++ bool VisitCXXConstructExpr(CXXConstructExpr *E) { ++ if (!Ctx.policy().Construction) ++ return true; ++ SaveAndRestore Loc(LocationOfType, E->getLocation()); ++ LocationOfType = E->getLocation(); ++ return TraverseType(E->getType()); ++ } ++ ++ // We handle TypeLocs by saving their loc and consuming it in Visit*Type(). ++ // ++ // Handling Visit*TypeLoc() directly would be simpler, but sometimes unwritten ++ // types count as references (e.g. implicit conversions, with no TypeLoc). ++ // Stashing the location and visiting the contained type lets us handle both ++ // cases in VisitTagType() etc. ++ bool TraverseTypeLoc(TypeLoc TL) { ++ SaveAndRestore Loc(LocationOfType, TL.getBeginLoc()); ++ // The base implementation calls: ++ // - Visit*TypeLoc() - does nothing ++ // - Visit*Type() - where we handle type references ++ // - TraverseTypeLoc for each lexically nested type. ++ return Base::TraverseTypeLoc(TL); ++ } + +- bool VisitTagTypeLoc(TagTypeLoc TTL) { +- report(TTL.getNameLoc(), TTL.getDecl()); ++ bool VisitTemplateSpecializationType(TemplateSpecializationType *TST) { ++ report(LocationOfType, ++ TST->getTemplateName().getAsTemplateDecl()); // Primary template. ++ report(LocationOfType, TST->getAsCXXRecordDecl()); // Specialization + return true; + } + +- bool VisitDeclRefExpr(DeclRefExpr *DRE) { +- report(DRE->getLocation(), DRE->getFoundDecl()); ++ bool VisitUsingType(UsingType *UT) { ++ report(LocationOfType, UT->getFoundDecl()); + return true; + } ++ ++ bool VisitTypedefType(TypedefType *TT) { ++ report(LocationOfType, TT->getDecl()); ++ return true; ++ } ++ ++ bool VisitUsingDecl(UsingDecl *UD) { ++ for (const auto *USD : UD->shadows()) ++ report(UD->getLocation(), USD->getTargetDecl()); ++ return true; ++ } ++ ++ bool VisitOverloadExpr(OverloadExpr *E) { ++ if (llvm::isa(E) && !Ctx.policy().Members) ++ return true; ++ for (auto *Candidate : E->decls()) ++ report(E->getExprLoc(), Candidate); ++ return true; ++ } ++ ++private: ++ void report(SourceLocation Loc, NamedDecl *ND) { ++ while (Loc.isMacroID()) { ++ auto DecLoc = Ctx.sourceManager().getDecomposedLoc(Loc); ++ const SrcMgr::ExpansionInfo &Expansion = ++ Ctx.sourceManager().getSLocEntry(DecLoc.first).getExpansion(); ++ if (!Expansion.isMacroArgExpansion()) ++ return; // Names within macro bodies are not considered references. ++ Loc = Expansion.getSpellingLoc().getLocWithOffset(DecLoc.second); ++ } ++ // FIXME: relevant ranking hints? ++ if (ND) ++ Callback(Loc, *cast(ND->getCanonicalDecl())); ++ } ++ ++ using Base = RecursiveASTVisitor; ++ ++ AnalysisContext &Ctx; ++ DeclCallback Callback; ++ ++ SourceLocation LocationOfType; + }; + + } // namespace + +-void walkAST(Decl &Root, DeclCallback Callback) { +- ASTWalker(Callback).TraverseDecl(&Root); ++void walkAST(AnalysisContext &Ctx, Decl &Root, DeclCallback Callback) { ++ ASTWalker(Ctx, Callback).TraverseDecl(&Root); + } + + } // namespace include_cleaner +diff --git a/clang-tools-extra/include-cleaner/tool/CMakeLists.txt b/clang-tools-extra/include-cleaner/tool/CMakeLists.txt +new file mode 100644 +index 000000000000..f8f7c81c761b +--- /dev/null ++++ b/clang-tools-extra/include-cleaner/tool/CMakeLists.txt +@@ -0,0 +1,17 @@ ++set(LLVM_LINK_COMPONENTS support) ++ ++add_clang_tool(clang-include-cleaner ++ ClangIncludeCleaner.cpp ++ ) ++ ++clang_target_link_libraries(clang-include-cleaner ++ PRIVATE ++ clangBasic ++ clangFrontend ++ clangTooling ++ ) ++ ++target_link_libraries(clang-include-cleaner ++ PRIVATE ++ clangIncludeCleaner ++ ) +\ No newline at end of file +diff --git a/clang-tools-extra/include-cleaner/tool/ClangIncludeCleaner.cpp b/clang-tools-extra/include-cleaner/tool/ClangIncludeCleaner.cpp +new file mode 100644 +index 000000000000..aad70eabdae9 +--- /dev/null ++++ b/clang-tools-extra/include-cleaner/tool/ClangIncludeCleaner.cpp +@@ -0,0 +1,187 @@ ++//===--- ClangIncludeCleaner.cpp - Standalone used-header analysis --------===// ++// ++// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. ++// See https://llvm.org/LICENSE.txt for license information. ++// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception ++// ++//===----------------------------------------------------------------------===// ++// ++// clang-include-cleaner finds violations of include-what-you-use policy. ++// ++// It scans a file, finding referenced symbols and headers providing them. ++// - if a reference is satisfied only by indirect #include dependencies, ++// this violates the policy and direct #includes are suggested. ++// - if some #include directive doesn't satisfy any references, this violates ++// the policy (don't include what you don't use!) and removal is suggested. ++// ++// With the -satisfied flag, it will also explain things that were OK: ++// satisfied references and used #includes. ++// ++// This tool doesn't fix broken code where missing #includes prevent parsing, ++// try clang-include-fixer for this instead. ++// ++//===----------------------------------------------------------------------===// ++ ++#include "clang-include-cleaner/Analysis.h" ++#include "clang-include-cleaner/Hooks.h" ++#include "clang/Basic/Diagnostic.h" ++#include "clang/Frontend/CompilerInstance.h" ++#include "clang/Frontend/FrontendAction.h" ++#include "clang/Tooling/CommonOptionsParser.h" ++#include "clang/Tooling/Tooling.h" ++#include "llvm/Support/CommandLine.h" ++#include "llvm/Support/InitLLVM.h" ++ ++llvm::cl::OptionCategory OptionsCat{"clang-include-cleaner"}; ++llvm::cl::opt ShowSatisfied{ ++ "satisfied", ++ llvm::cl::cat(OptionsCat), ++ llvm::cl::desc( ++ "Show references whose header is included, and used includes"), ++ llvm::cl::init(false), ++}; ++llvm::cl::opt Recover{ ++ "recover", ++ llvm::cl::cat(OptionsCat), ++ llvm::cl::desc("Suppress further errors for the same header"), ++ llvm::cl::init(true), ++}; ++ ++namespace clang { ++namespace include_cleaner { ++namespace { ++ ++class Action : public clang::ASTFrontendAction { ++public: ++ bool BeginSourceFileAction(CompilerInstance &CI) override { ++ Diag = &CI.getDiagnostics(); ++ ID.emplace(Diag); ++ Ctx.emplace(Policy{}, CI.getPreprocessor()); ++ CI.getPreprocessor().addPPCallbacks(PP.record(*Ctx)); ++ return true; ++ } ++ ++ void EndSourceFile() override { ++ llvm::DenseSet
Recovered; ++ llvm::DenseMap Used; ++ walkUsed(*Ctx, AST.TopLevelDecls, PP.MacroReferences, ++ [&](SourceLocation Loc, Symbol Sym, ArrayRef
Headers) { ++ diagnoseReference(Loc, Sym, Headers, Recovered, Used); ++ }); ++ diagnoseIncludes(PP.Includes.all(), Used); ++ Ctx.reset(); ++ ++ ASTFrontendAction::EndSourceFile(); ++ } ++ ++ virtual std::unique_ptr ++ CreateASTConsumer(CompilerInstance &CI, StringRef InFile) override { ++ return AST.record(*Ctx); ++ } ++ ++private: ++ // The diagnostics that we issue. ++ struct CustomDiagnosticIDs { ++ // References ++ unsigned Satisfied; ++ unsigned Unsatisfied; ++ unsigned NoHeader; ++ unsigned NoteHeader; ++ // #includes ++ unsigned Used; ++ unsigned Unused; ++ ++ CustomDiagnosticIDs(DiagnosticsEngine *D) { ++ auto SatisfiedLevel = ShowSatisfied ? DiagnosticsEngine::Remark ++ : DiagnosticsEngine::Ignored; ++ auto Error = DiagnosticsEngine::Error; ++ auto Note = DiagnosticsEngine::Note; ++ auto Warn = DiagnosticsEngine::Warning; ++ ++ Satisfied = D->getCustomDiagID(SatisfiedLevel, "%0 '%1' provided by %2"); ++ Unsatisfied = D->getCustomDiagID(Error, "no header included for %0 '%1'"); ++ NoHeader = D->getCustomDiagID(Warn, "unknown header provides %0 '%1'"); ++ NoteHeader = D->getCustomDiagID(Note, "provided by %0"); ++ Used = D->getCustomDiagID(SatisfiedLevel, "include provides %0 '%1'"); ++ Unused = D->getCustomDiagID(Error, "include is unused"); ++ } ++ }; ++ ++ void ++ diagnoseReference(SourceLocation Loc, Symbol Sym, ArrayRef
Headers, ++ llvm::DenseSet
&Recovered, ++ llvm::DenseMap &Used) { ++ bool Diagnosed = false; ++ for (const auto &H : Headers) { ++ if (H.kind() == Header::Builtin || H.kind() == Header::MainFile) { ++ if (!Diagnosed) { ++ Diag->Report(Loc, ID->Satisfied) ++ << Sym.nodeName() << Sym.name() << H.name(); ++ Diagnosed = true; ++ } ++ } ++ for (const auto *I : PP.Includes.match(H)) { ++ Used.try_emplace(I, Sym); ++ if (!Diagnosed) { ++ Diag->Report(Loc, ID->Satisfied) ++ << Sym.nodeName() << Sym.name() << I->Spelled; ++ Diagnosed = true; ++ } ++ } ++ } ++ if (Diagnosed) ++ return; ++ for (const auto &H : Headers) { ++ if (Recovered.contains(H)) { ++ Diag->Report(Loc, ID->Satisfied) ++ << Sym.nodeName() << Sym.name() << H.name(); ++ return; ++ } ++ } ++ Diag->Report(Loc, Headers.empty() ? ID->NoHeader : ID->Unsatisfied) ++ << Sym.nodeName() << Sym.name(); ++ for (const auto &H : Headers) { ++ Recovered.insert(H); ++ Diag->Report(ID->NoteHeader) << H.name(); ++ } ++ } ++ ++ void diagnoseIncludes( ++ ArrayRef Includes, ++ const llvm::DenseMap &Used) { ++ for (const auto &I : Includes) { ++ auto It = Used.find(&I); ++ if (It == Used.end()) ++ Diag->Report(I.Location, ID->Unused); ++ else ++ Diag->Report(I.Location, ID->Used) ++ << It->second.nodeName() << It->second.name(); ++ } ++ } ++ ++ llvm::Optional Ctx; ++ RecordedPP PP; ++ RecordedAST AST; ++ DiagnosticsEngine *Diag; ++ llvm::Optional ID; ++}; ++ ++} // namespace ++} // namespace include_cleaner ++} // namespace clang ++ ++int main(int Argc, const char **Argv) { ++ llvm::InitLLVM X(Argc, Argv); ++ auto OptionsParser = ++ clang::tooling::CommonOptionsParser::create(Argc, Argv, OptionsCat); ++ if (!OptionsParser) { ++ llvm::errs() << toString(OptionsParser.takeError()); ++ return 1; ++ } ++ ++ return clang::tooling::ClangTool(OptionsParser->getCompilations(), ++ OptionsParser->getSourcePathList()) ++ .run(clang::tooling::newFrontendActionFactory< ++ clang::include_cleaner::Action>() ++ .get()); ++} +diff --git a/clang/include/clang/Tooling/Inclusions/StandardLibrary.h b/clang/include/clang/Tooling/Inclusions/StandardLibrary.h +index c6ce2780dae6..e94a7fb9304a 100644 +--- a/clang/include/clang/Tooling/Inclusions/StandardLibrary.h ++++ b/clang/include/clang/Tooling/Inclusions/StandardLibrary.h +@@ -49,6 +49,9 @@ private: + friend bool operator==(const Header &L, const Header &R) { + return L.ID == R.ID; + } ++ friend bool operator<(const Header &L, const Header &R) { ++ return L.ID < R.ID; ++ } + }; + + // A top-level standard library symbol, such as std::vector diff --git a/build/buildconfig.py b/build/buildconfig.py index 3fcc82ffdb5d..527690c2b0e8 100644 --- a/build/buildconfig.py +++ b/build/buildconfig.py @@ -3,8 +3,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. import sys -from mozbuild.base import MozbuildObject + from mozbuild.backend.configenvironment import PartialConfigEnvironment +from mozbuild.base import MozbuildObject config = MozbuildObject.from_environment() partial_config = PartialConfigEnvironment(config.topobjdir) diff --git a/build/cargo-linker b/build/cargo-linker index e7546365c381..94b05f821394 100755 --- a/build/cargo-linker +++ b/build/cargo-linker @@ -27,7 +27,6 @@ import os import sys - SANITIZERS = { "asan": "address", "hwasan": "hwaddress", diff --git a/build/checksums.py b/build/checksums.py index d82c7211175f..970e44d80323 100755 --- a/build/checksums.py +++ b/build/checksums.py @@ -5,10 +5,10 @@ from __future__ import with_statement -from optparse import OptionParser import hashlib import logging import os +from optparse import OptionParser logger = logging.getLogger("checksums.py") diff --git a/build/clang-plugin/ThreadAllows.py b/build/clang-plugin/ThreadAllows.py index 782f32c006fe..f3e1ee894c8e 100644 --- a/build/clang-plugin/ThreadAllows.py +++ b/build/clang-plugin/ThreadAllows.py @@ -4,7 +4,6 @@ import json import os import posixpath - from os import PathLike # `typing.Literal` not available until Python 3.8; diff --git a/build/clang-plugin/import_mozilla_checks.py b/build/clang-plugin/import_mozilla_checks.py index f18abfae647f..d573dafcf1bf 100755 --- a/build/clang-plugin/import_mozilla_checks.py +++ b/build/clang-plugin/import_mozilla_checks.py @@ -3,10 +3,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -import os -import glob -import shutil import errno +import glob +import os +import shutil import ThirdPartyPaths import ThreadAllows diff --git a/build/compare-mozconfig/compare-mozconfigs.py b/build/compare-mozconfig/compare-mozconfigs.py index 7e39d9c071c5..7a46d61d7623 100644 --- a/build/compare-mozconfig/compare-mozconfigs.py +++ b/build/compare-mozconfig/compare-mozconfigs.py @@ -7,9 +7,9 @@ from __future__ import unicode_literals +import difflib import logging import os -import difflib import unittest import buildconfig diff --git a/build/gen_symverscript.py b/build/gen_symverscript.py index f32554abc800..d1a5abd07d4e 100644 --- a/build/gen_symverscript.py +++ b/build/gen_symverscript.py @@ -5,6 +5,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. import sys + from mozbuild.preprocessor import Preprocessor diff --git a/build/gen_test_packages_manifest.py b/build/gen_test_packages_manifest.py index e57bff3dd206..162f4e84209a 100644 --- a/build/gen_test_packages_manifest.py +++ b/build/gen_test_packages_manifest.py @@ -5,7 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. import json - from argparse import ArgumentParser ALL_HARNESSES = [ diff --git a/build/mach_initialize.py b/build/mach_initialize.py index 84da39f6fb29..13d5cac1483a 100644 --- a/build/mach_initialize.py +++ b/build/mach_initialize.py @@ -20,10 +20,8 @@ if sys.version_info[0] < 3: else: from importlib.abc import MetaPathFinder - from types import ModuleType - STATE_DIR_FIRST_RUN = """ Mach and the build system store shared state in a common directory on the filesystem. The following directory will be created: @@ -145,7 +143,7 @@ def initialize(topsrcdir): ) ] - from mach.util import setenv, get_state_dir + from mach.util import get_state_dir, setenv state_dir = _create_state_dir() @@ -157,7 +155,6 @@ def initialize(topsrcdir): import mach.base import mach.main - from mach.main import MachCommandReference # Centralized registry of available mach commands diff --git a/build/midl.py b/build/midl.py index 3480e8df3ad1..463a161113ad 100644 --- a/build/midl.py +++ b/build/midl.py @@ -2,12 +2,13 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -import buildconfig +import os import shutil import subprocess -import os import sys +import buildconfig + def relativize(path, base=None): # For absolute path in Unix builds, we need relative paths because diff --git a/build/pgo/genpgocert.py b/build/pgo/genpgocert.py index 98b05355bfb6..e3d2c4c88f63 100644 --- a/build/pgo/genpgocert.py +++ b/build/pgo/genpgocert.py @@ -7,18 +7,18 @@ # certificates used for SSL testing in Mochitest. The already generated # certs are located at $topsrcdir/build/pgo/certs/ . -import mozinfo import os import random import re import shutil import subprocess import sys +from distutils.spawn import find_executable -from mozbuild.base import MozbuildObject, BinaryNotFoundException +import mozinfo +from mozbuild.base import BinaryNotFoundException, MozbuildObject from mozfile import NamedTemporaryFile, TemporaryDirectory from mozprofile.permissions import ServerLocations -from distutils.spawn import find_executable dbFiles = [ re.compile("^cert[0-9]+\.db$"), diff --git a/build/pgo/profileserver.py b/build/pgo/profileserver.py index 3b112562ad03..94f54cbd1736 100755 --- a/build/pgo/profileserver.py +++ b/build/pgo/profileserver.py @@ -4,19 +4,19 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. +import glob import json import os -import sys -import glob import subprocess +import sys import mozcrash -from mozbuild.base import MozbuildObject, BinaryNotFoundException +from mozbuild.base import BinaryNotFoundException, MozbuildObject from mozfile import TemporaryDirectory from mozhttpd import MozHttpd from mozprofile import FirefoxProfile, Preferences from mozprofile.permissions import ServerLocations -from mozrunner import FirefoxRunner, CLI +from mozrunner import CLI, FirefoxRunner from six import string_types PORT = 8888 diff --git a/build/rust/mozbuild/generate_buildconfig.py b/build/rust/mozbuild/generate_buildconfig.py index c5f8cce668f5..255135ae81a2 100644 --- a/build/rust/mozbuild/generate_buildconfig.py +++ b/build/rust/mozbuild/generate_buildconfig.py @@ -2,9 +2,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -import buildconfig -import textwrap import string +import textwrap + +import buildconfig def generate_bool(name): diff --git a/build/unix/rewrite_sanitizer_dylib.py b/build/unix/rewrite_sanitizer_dylib.py index 405b3a97c9aa..2fa8a92b27f2 100644 --- a/build/unix/rewrite_sanitizer_dylib.py +++ b/build/unix/rewrite_sanitizer_dylib.py @@ -2,13 +2,14 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -from argparse import ArgumentParser import os -from pathlib import Path import re import shutil import subprocess import sys +from argparse import ArgumentParser +from pathlib import Path + from buildconfig import substs """ diff --git a/build/upload.py b/build/upload.py index 1a9a69c971fb..75eb163b24f0 100644 --- a/build/upload.py +++ b/build/upload.py @@ -15,9 +15,9 @@ # to indicate that files should be uploaded including their paths relative # to the base path. -import sys import os import shutil +import sys from optparse import OptionParser diff --git a/build/upload_generated_sources.py b/build/upload_generated_sources.py index 15439565d91c..abfef92e3cec 100644 --- a/build/upload_generated_sources.py +++ b/build/upload_generated_sources.py @@ -6,22 +6,23 @@ from __future__ import absolute_import, print_function, unicode_literals import argparse -from contextlib import contextmanager import gzip import io import logging +import os +import sys +import tarfile +import time +from contextlib import contextmanager +from threading import Event, Thread + +import requests from mozbuild.generated_sources import ( get_filename_with_digest, get_s3_region_and_bucket, ) -import os -from six.moves.queue import Queue -import requests -import sys -import tarfile from requests.packages.urllib3.util.retry import Retry -from threading import Event, Thread -import time +from six.moves.queue import Queue # Arbitrary, should probably measure this. NUM_WORKER_THREADS = 10 diff --git a/build/valgrind/mach_commands.py b/build/valgrind/mach_commands.py index 74d5b05b5396..c3b8eab49db6 100644 --- a/build/valgrind/mach_commands.py +++ b/build/valgrind/mach_commands.py @@ -6,18 +6,13 @@ from __future__ import absolute_import, unicode_literals import json import logging -import mozinfo import os import time -from mach.decorators import ( - Command, - CommandArgument, -) -from mozbuild.base import ( - MachCommandConditions as conditions, - BinaryNotFoundException, -) +import mozinfo +from mach.decorators import Command, CommandArgument +from mozbuild.base import BinaryNotFoundException +from mozbuild.base import MachCommandConditions as conditions def is_valgrind_build(cls): diff --git a/build/vs/generate_yaml.py b/build/vs/generate_yaml.py index c2c5af24bde5..2d6a429d3ead 100755 --- a/build/vs/generate_yaml.py +++ b/build/vs/generate_yaml.py @@ -3,17 +3,17 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. +import sys + +import yaml from vsdownload import ( getArgsParser, getManifest, getPackages, getSelectedPackages, - setPackageSelection, lowercaseIgnores, + setPackageSelection, ) -import sys -import yaml - if __name__ == "__main__": parser = getArgsParser() diff --git a/build/vs/pack_vs.py b/build/vs/pack_vs.py index 9ad7f0a36d30..978b3840175b 100755 --- a/build/vs/pack_vs.py +++ b/build/vs/pack_vs.py @@ -3,17 +3,15 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -from vsdownload import ( - downloadPackages, - extractPackages, -) -from pathlib import Path -from tempfile import TemporaryDirectory -from zstandard import ZstdCompressor import argparse import os import tarfile +from pathlib import Path +from tempfile import TemporaryDirectory + import yaml +from vsdownload import downloadPackages, extractPackages +from zstandard import ZstdCompressor def tzstd_path(path): diff --git a/build/win32/autowinchecksec.py b/build/win32/autowinchecksec.py index 1a394977dc14..5038dc56a68d 100644 --- a/build/win32/autowinchecksec.py +++ b/build/win32/autowinchecksec.py @@ -7,11 +7,12 @@ # run the Winchecksec tool (https://github.com/trailofbits/winchecksec) # against a given Windows binary. -import buildconfig import json import subprocess import sys +import buildconfig + # usage if len(sys.argv) != 2: print("""usage : autowinchecksec.by path_to_binary""") diff --git a/build/win32/dummy_libs.py b/build/win32/dummy_libs.py index 93faeef429b3..16947c6c846c 100644 --- a/build/win32/dummy_libs.py +++ b/build/win32/dummy_libs.py @@ -4,6 +4,7 @@ import os import subprocess + from buildconfig import substs diff --git a/gfx/thebes/gfxUserFontSet.cpp b/gfx/thebes/gfxUserFontSet.cpp index a0981d991f0e..f9f12aeb1d6e 100644 --- a/gfx/thebes/gfxUserFontSet.cpp +++ b/gfx/thebes/gfxUserFontSet.cpp @@ -38,8 +38,8 @@ mozilla::LogModule* gfxUserFontSet::GetUserFontsLog() { static Atomic sFontSetGeneration(0); gfxUserFontEntry::gfxUserFontEntry( - gfxUserFontSet* aFontSet, const nsTArray& aFontFaceSrcList, - WeightRange aWeight, StretchRange aStretch, SlantStyleRange aStyle, + const nsTArray& aFontFaceSrcList, WeightRange aWeight, + StretchRange aStretch, SlantStyleRange aStyle, const nsTArray& aFeatureSettings, const nsTArray& aVariationSettings, uint32_t aLanguageOverride, gfxCharacterMap* aUnicodeRanges, @@ -52,8 +52,7 @@ gfxUserFontEntry::gfxUserFontEntry( mSeenLocalSource(false), mUnsupportedFormat(false), mFontDisplay(aFontDisplay), - mLoader(nullptr), - mFontSet(aFontSet) { + mLoader(nullptr) { mIsUserFontContainer = true; mSrcList = aFontFaceSrcList.Clone(); mCurrentSrcIndex = 0; @@ -410,6 +409,15 @@ static bool IgnorePrincipal(gfxFontSrcURI* aURI) { } void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) { + RefPtr fontSet = GetUserFontSet(); + if (NS_WARN_IF(!fontSet)) { + LOG(("userfonts (%p) failed expired font set for (%s)\n", fontSet.get(), + mFamilyName.get())); + mFontDataLoadingState = LOADING_FAILED; + SetLoadState(STATUS_FAILED); + return; + } + uint32_t numSrc = mSrcList.Length(); // load each src entry in turn, until a local face is found @@ -425,7 +433,7 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) { gfxFontEntry* fe = nullptr; if (!pfl->IsFontFamilyWhitelistActive()) { fe = gfxPlatform::GetPlatform()->LookupLocalFont( - mFontSet->GetPresContext(), currSrc.mLocalName, Weight(), Stretch(), + fontSet->GetPresContext(), currSrc.mLocalName, Weight(), Stretch(), SlantStyle()); // Note that we've attempted a local lookup, even if it failed, // as this means we are dependent on any updates to the font list. @@ -440,8 +448,8 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) { } if (fe) { LOG(("userfonts (%p) [src %d] loaded local: (%s) for (%s) gen: %8.8x\n", - mFontSet, mCurrentSrcIndex, currSrc.mLocalName.get(), - mFamilyName.get(), uint32_t(mFontSet->mGeneration))); + fontSet.get(), mCurrentSrcIndex, currSrc.mLocalName.get(), + mFamilyName.get(), uint32_t(fontSet->mGeneration))); fe->mFeatureSettings.AppendElements(mFeatureSettings); fe->mVariationSettings.AppendElements(mVariationSettings); fe->mLanguageOverride = mLanguageOverride; @@ -462,8 +470,9 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) { currSrc.mSourceType + 1); return; } else { - LOG(("userfonts (%p) [src %d] failed local: (%s) for (%s)\n", mFontSet, - mCurrentSrcIndex, currSrc.mLocalName.get(), mFamilyName.get())); + LOG(("userfonts (%p) [src %d] failed local: (%s) for (%s)\n", + fontSet.get(), mCurrentSrcIndex, currSrc.mLocalName.get(), + mFamilyName.get())); } } @@ -494,7 +503,7 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) { LOG( ("userfonts (%p) [src %d] " "loaded uri from cache: (%s) for (%s)\n", - mFontSet, mCurrentSrcIndex, + fontSet.get(), mCurrentSrcIndex, currSrc.mURI->GetSpecOrDefault().get(), mFamilyName.get())); } return; @@ -510,7 +519,7 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) { // record the principal we should use for the load for use when // creating a channel and when caching the loaded entry. - mPrincipal = currSrc.LoadPrincipal(*mFontSet); + mPrincipal = currSrc.LoadPrincipal(*fontSet); bool loadDoesntSpin = !aForceAsync && currSrc.mURI->SyncLoadIsOK(); @@ -520,7 +529,7 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) { // sync load font immediately nsresult rv = - mFontSet->SyncLoadFontData(this, &currSrc, buffer, bufferLength); + fontSet->SyncLoadFontData(this, &currSrc, buffer, bufferLength); if (NS_SUCCEEDED(rv) && LoadPlatformFontSync(mCurrentSrcIndex, buffer, bufferLength)) { @@ -529,26 +538,26 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) { currSrc.mSourceType + 1); return; } else { - mFontSet->LogMessage(this, mCurrentSrcIndex, "font load failed", - nsIScriptError::errorFlag, rv); + fontSet->LogMessage(this, mCurrentSrcIndex, "font load failed", + nsIScriptError::errorFlag, rv); } } else { // otherwise load font async - nsresult rv = mFontSet->StartLoad(this, mCurrentSrcIndex); + nsresult rv = fontSet->StartLoad(this, mCurrentSrcIndex); bool loadOK = NS_SUCCEEDED(rv); if (loadOK) { if (LOG_ENABLED()) { LOG(("userfonts (%p) [src %d] loading uri: (%s) for (%s)\n", - mFontSet, mCurrentSrcIndex, + fontSet.get(), mCurrentSrcIndex, currSrc.mURI->GetSpecOrDefault().get(), mFamilyName.get())); } return; } else { - mFontSet->LogMessage(this, mCurrentSrcIndex, - "failed to start download", - nsIScriptError::errorFlag, rv); + fontSet->LogMessage(this, mCurrentSrcIndex, + "failed to start download", + nsIScriptError::errorFlag, rv); } } } else { @@ -577,8 +586,8 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) { currSrc.mSourceType + 1); return; } else { - mFontSet->LogMessage(this, mCurrentSrcIndex, "font load failed", - nsIScriptError::errorFlag); + fontSet->LogMessage(this, mCurrentSrcIndex, "font load failed", + nsIScriptError::errorFlag); } } @@ -586,12 +595,12 @@ void gfxUserFontEntry::DoLoadNextSrc(bool aForceAsync) { } if (mUnsupportedFormat) { - mFontSet->LogMessage(this, mCurrentSrcIndex, "no supported format found", - nsIScriptError::warningFlag); + fontSet->LogMessage(this, mCurrentSrcIndex, "no supported format found", + nsIScriptError::warningFlag); } // all src's failed; mark this entry as unusable (so fallback will occur) - LOG(("userfonts (%p) failed all src for (%s)\n", mFontSet, + LOG(("userfonts (%p) failed all src for (%s)\n", fontSet.get(), mFamilyName.get())); mFontDataLoadingState = LOADING_FAILED; SetLoadState(STATUS_FAILED); @@ -658,22 +667,27 @@ bool gfxUserFontEntry::LoadPlatformFont(uint32_t aSrcIndex, uint32_t aSanitizedLength, nsTArray&& aMessages) { MOZ_ASSERT(NS_IsMainThread()); + RefPtr fontSet = GetUserFontSet(); + if (NS_WARN_IF(!fontSet)) { + free((void*)aOriginalFontData); + return false; + } for (const auto& msg : aMessages) { - mFontSet->LogMessage(this, aSrcIndex, msg.mMessage.get(), - msg.mLevel > 0 ? nsIScriptError::warningFlag - : nsIScriptError::errorFlag); + fontSet->LogMessage(this, aSrcIndex, msg.mMessage.get(), + msg.mLevel > 0 ? nsIScriptError::warningFlag + : nsIScriptError::errorFlag); } if (!aSanitizedFontData) { - mFontSet->LogMessage(this, aSrcIndex, "rejected by sanitizer"); + fontSet->LogMessage(this, aSrcIndex, "rejected by sanitizer"); } else { // Check whether aSanitizedFontData is a known OpenType format; it might be // a TrueType Collection, which OTS would accept but we don't yet // know how to handle. If so, discard. if (gfxFontUtils::DetermineFontDataType( aSanitizedFontData, aSanitizedLength) != GFX_USERFONT_OPENTYPE) { - mFontSet->LogMessage(this, aSrcIndex, "not a supported OpenType format"); + fontSet->LogMessage(this, aSrcIndex, "not a supported OpenType format"); free((void*)aSanitizedFontData); aSanitizedFontData = nullptr; } @@ -721,7 +735,7 @@ bool gfxUserFontEntry::LoadPlatformFont(uint32_t aSrcIndex, mName, Weight(), Stretch(), SlantStyle(), aSanitizedFontData, aSanitizedLength); if (!fe) { - mFontSet->LogMessage(this, aSrcIndex, "not usable by platform"); + fontSet->LogMessage(this, aSrcIndex, "not usable by platform"); } } @@ -755,15 +769,15 @@ bool gfxUserFontEntry::LoadPlatformFont(uint32_t aSrcIndex, fe->mDescentOverride = mDescentOverride; fe->mLineGapOverride = mLineGapOverride; fe->mSizeAdjust = mSizeAdjust; - StoreUserFontData(fe, aSrcIndex, mFontSet->GetPrivateBrowsing(), + StoreUserFontData(fe, aSrcIndex, fontSet->GetPrivateBrowsing(), originalFullName, &metadata, metaOrigLen, compression); if (LOG_ENABLED()) { LOG(( "userfonts (%p) [src %d] loaded uri: (%s) for (%s) " "(%p) gen: %8.8x compress: %d%%\n", - mFontSet, aSrcIndex, + fontSet.get(), aSrcIndex, mSrcList[aSrcIndex].mURI->GetSpecOrDefault().get(), mFamilyName.get(), - this, uint32_t(mFontSet->mGeneration), fontCompressionRatio)); + this, uint32_t(fontSet->mGeneration), fontCompressionRatio)); } mPlatformFontEntry = fe; SetLoadState(STATUS_LOADED); @@ -773,7 +787,7 @@ bool gfxUserFontEntry::LoadPlatformFont(uint32_t aSrcIndex, LOG( ("userfonts (%p) [src %d] failed uri: (%s) for (%s)" " error making platform font\n", - mFontSet, aSrcIndex, + fontSet.get(), aSrcIndex, mSrcList[aSrcIndex].mURI->GetSpecOrDefault().get(), mFamilyName.get())); } @@ -830,14 +844,17 @@ void gfxUserFontEntry::FontDataDownloadComplete( return; } - // download failed or font-display timeout passed - if (mFontDataLoadingState == LOADING_TIMED_OUT) { - mFontSet->LogMessage(this, aSrcIndex, - "font-display timeout, webfont not used", - nsIScriptError::infoFlag, aDownloadStatus); - } else { - mFontSet->LogMessage(this, aSrcIndex, "download failed", - nsIScriptError::errorFlag, aDownloadStatus); + RefPtr fontSet = GetUserFontSet(); + if (fontSet) { + // download failed or font-display timeout passed + if (mFontDataLoadingState == LOADING_TIMED_OUT) { + fontSet->LogMessage(this, aSrcIndex, + "font-display timeout, webfont not used", + nsIScriptError::infoFlag, aDownloadStatus); + } else { + fontSet->LogMessage(this, aSrcIndex, "download failed", + nsIScriptError::errorFlag, aDownloadStatus); + } } if (aFontData) { @@ -860,8 +877,12 @@ void gfxUserFontEntry::LoadPlatformFontAsync( // We hold a strong reference to the gfxUserFontSet during this work, since // the document might be closed while we are OMT, and release it at the end // of ContinuePlatformFontLoadOnMainThread. + // + // If the set has already been freed, then the loading will fail when we + // resume on the main thread. - mFontSet->AddRef(); + MOZ_ASSERT(!mLoadingFontSet); + mLoadingFontSet = GetUserFontSet(); nsCOMPtr event = NewRunnableMethodRelease(); // for the AddRef in LoadPlatformFontAsync + // Set in LoadPlatformFontAsync. If it is null, then the font set should have + // already been freed and we would not succeed in loading the font. + MOZ_ASSERT_IF(loaded, mLoadingFontSet); + mLoadingFontSet = nullptr; } void gfxUserFontEntry::FontLoadFailed(nsIFontLoadCompleteCallback* aCallback) { @@ -919,7 +943,10 @@ void gfxUserFontEntry::FontLoadFailed(nsIFontLoadCompleteCallback* aCallback) { void gfxUserFontEntry::GetUserFontSets( nsTArray>& aResult) { aResult.Clear(); - aResult.AppendElement(mFontSet); + RefPtr fontSet = GetUserFontSet(); + if (fontSet) { + aResult.AppendElement(std::move(fontSet)); + } } gfxUserFontSet::gfxUserFontSet() @@ -1260,19 +1287,23 @@ void gfxUserFontSet::UserFontCache::ForgetFont(gfxFontEntry* aFontEntry) { gfxFontEntry* gfxUserFontSet::UserFontCache::GetFont( const gfxFontFaceSrc& aSrc, const gfxUserFontEntry& aUserFontEntry) { - if (!sUserFonts || aUserFontEntry.mFontSet->BypassCache() || + if (!sUserFonts || Preferences::GetBool("gfx.downloadable_fonts.disable_cache")) { return nullptr; } + RefPtr srcFontSet = aUserFontEntry.GetUserFontSet(); + if (NS_WARN_IF(!srcFontSet) || srcFontSet->BypassCache()) { + return nullptr; + } + // Ignore principal when looking up a data: URI. RefPtr principal = - IgnorePrincipal(aSrc.mURI) ? nullptr - : aSrc.LoadPrincipal(*aUserFontEntry.mFontSet); + IgnorePrincipal(aSrc.mURI) ? nullptr : aSrc.LoadPrincipal(*srcFontSet); Entry* entry = sUserFonts->GetEntry( Key(aSrc.mURI, principal, const_cast(&aUserFontEntry), - aUserFontEntry.mFontSet->GetPrivateBrowsing())); + srcFontSet->GetPrivateBrowsing())); if (!entry) { return nullptr; } @@ -1280,7 +1311,7 @@ gfxFontEntry* gfxUserFontSet::UserFontCache::GetFont( // We have to perform another content policy check here to prevent // cache poisoning. E.g. a.com loads a font into the cache but // b.com has a CSP not allowing any fonts to be loaded. - if (!aUserFontEntry.mFontSet->IsFontLoadAllowed(aSrc)) { + if (!srcFontSet->IsFontLoadAllowed(aSrc)) { return nullptr; } diff --git a/gfx/thebes/gfxUserFontSet.h b/gfx/thebes/gfxUserFontSet.h index 43b5063b78e7..c7b12c002768 100644 --- a/gfx/thebes/gfxUserFontSet.h +++ b/gfx/thebes/gfxUserFontSet.h @@ -56,7 +56,7 @@ enum class StyleFontDisplay : uint8_t; } // namespace mozilla class nsFontFaceLoader; -//#define DEBUG_USERFONT_CACHE +// #define DEBUG_USERFONT_CACHE class gfxFontFaceBufferSource { NS_INLINE_DECL_THREADSAFE_REFCOUNTING(gfxFontFaceBufferSource) @@ -561,7 +561,6 @@ class gfxUserFontEntry : public gfxFontEntry { }; gfxUserFontEntry( - gfxUserFontSet* aFontSet, const nsTArray& aFontFaceSrcList, WeightRange aWeight, StretchRange aStretch, SlantStyleRange aStyle, const nsTArray& aFeatureSettings, @@ -603,6 +602,8 @@ class gfxUserFontEntry : public gfxFontEntry { UserFontLoadState LoadState() const { return mUserFontLoadState; } void LoadCanceled() { + MOZ_ASSERT(NS_IsMainThread()); + mUserFontLoadState = STATUS_NOT_LOADED; mFontDataLoadingState = NOT_LOADING; mLoader = nullptr; @@ -647,8 +648,16 @@ class gfxUserFontEntry : public gfxFontEntry { // methods to expose some information to FontFaceSet::UserFontSet // since we can't make that class a friend - void SetLoader(nsFontFaceLoader* aLoader) { mLoader = aLoader; } - nsFontFaceLoader* GetLoader() const { return mLoader; } + void SetLoader(nsFontFaceLoader* aLoader) { + MOZ_ASSERT(NS_IsMainThread()); + mLoader = aLoader; + } + + nsFontFaceLoader* GetLoader() const { + MOZ_ASSERT(NS_IsMainThread()); + return mLoader; + } + gfxFontSrcPrincipal* GetPrincipal() const { return mPrincipal; } void GetFamilyNameAndURIForLogging(uint32_t aSrcIndex, nsACString& aFamilyName, nsACString& aURI); @@ -658,9 +667,7 @@ class gfxUserFontEntry : public gfxFontEntry { return nullptr; } -#ifdef DEBUG - gfxUserFontSet* GetUserFontSet() const { return mFontSet; } -#endif + virtual already_AddRefed GetUserFontSet() const = 0; const nsTArray& SourceList() const { return mSrcList; } @@ -752,8 +759,8 @@ class gfxUserFontEntry : public gfxFontEntry { uint32_t aMetaOrigLen, uint8_t aCompression); // Clears and then adds to aResult all of the user font sets that this user - // font entry has been added to. This will at least include mFontSet, the - // owner of this user font entry. + // font entry has been added to. This will at least include the owner of this + // user font entry. virtual void GetUserFontSets(nsTArray>& aResult); // Calls IncrementGeneration() on all user font sets that contain this @@ -789,8 +796,7 @@ class gfxUserFontEntry : public gfxFontEntry { // Cancel() methods of nsFontFaceLoader this reference is nulled out. nsFontFaceLoader* MOZ_NON_OWNING_REF mLoader; // current loader for this entry, if any - gfxUserFontSet* MOZ_NON_OWNING_REF - mFontSet; // font-set which owns this userfont entry + RefPtr mLoadingFontSet; RefPtr mPrincipal; }; diff --git a/js/src/builtin/ModuleObject.cpp b/js/src/builtin/ModuleObject.cpp index db4d58cc7641..b3eaac7bb6d6 100644 --- a/js/src/builtin/ModuleObject.cpp +++ b/js/src/builtin/ModuleObject.cpp @@ -670,6 +670,23 @@ void ModuleNamespaceObject::ProxyHandler::finalize(JS::GCContext* gcx, class js::CyclicModuleFields { public: ModuleStatus status = ModuleStatus::Unlinked; + + bool hasTopLevelAwait : 1; + + private: + // Flag bits that determine whether other fields are present. + bool hasDfsIndex : 1; + bool hasDfsAncestorIndex : 1; + bool isAsyncEvaluating : 1; + bool hasPendingAsyncDependencies : 1; + + // Fields whose presence is conditional on the flag bits above. + uint32_t dfsIndex = 0; + uint32_t dfsAncestorIndex = 0; + uint32_t asyncEvaluatingPostOrder = 0; + uint32_t pendingAsyncDependencies = 0; + + public: HeapPtr evaluationError; HeapPtr metaObject; HeapPtr scriptSourceObject; @@ -680,18 +697,37 @@ class js::CyclicModuleFields { HeapPtr starExportEntries; IndirectBindingMap importBindings; UniquePtr functionDeclarations; - Maybe dfsIndex; - Maybe dfsAncestorIndex; - bool hasTopLevelAwait = false; - Maybe asyncEvaluatingPostOrder; HeapPtr topLevelCapability; HeapPtr asyncParentModules; - Maybe pendingAsyncDependencies; HeapPtr cycleRoot; + public: + CyclicModuleFields(); + void trace(JSTracer* trc); + + void setDfsIndex(uint32_t index); + Maybe maybeDfsIndex() const; + void setDfsAncestorIndex(uint32_t index); + Maybe maybeDfsAncestorIndex() const; + void clearDfsIndexes(); + + void setAsyncEvaluating(uint32_t postOrder); + bool getIsAsyncEvaluating() const; + Maybe maybeAsyncEvaluatingPostOrder() const; + void clearAsyncEvaluatingPostOrder(); + + void setPendingAsyncDependencies(uint32_t newValue); + Maybe maybePendingAsyncDependencies() const; }; +CyclicModuleFields::CyclicModuleFields() + : hasTopLevelAwait(false), + hasDfsIndex(false), + hasDfsAncestorIndex(false), + isAsyncEvaluating(false), + hasPendingAsyncDependencies(false) {} + void CyclicModuleFields::trace(JSTracer* trc) { TraceEdge(trc, &evaluationError, "CyclicModuleFields::evaluationError"); TraceNullableEdge(trc, &metaObject, "CyclicModuleFields::metaObject"); @@ -706,12 +742,69 @@ void CyclicModuleFields::trace(JSTracer* trc) { "CyclicModuleFields::indirectExportEntries"); TraceNullableEdge(trc, &starExportEntries, "CyclicModuleFields::starExportEntries"); + importBindings.trace(trc); TraceNullableEdge(trc, &topLevelCapability, "CyclicModuleFields::topLevelCapability"); TraceNullableEdge(trc, &asyncParentModules, "CyclicModuleFields::asyncParentModules"); TraceNullableEdge(trc, &cycleRoot, "CyclicModuleFields::cycleRoot"); - importBindings.trace(trc); +} + +void CyclicModuleFields::setDfsIndex(uint32_t index) { + dfsIndex = index; + hasDfsIndex = true; +} + +Maybe CyclicModuleFields::maybeDfsIndex() const { + return hasDfsIndex ? Some(dfsIndex) : Nothing(); +} + +void CyclicModuleFields::setDfsAncestorIndex(uint32_t index) { + dfsAncestorIndex = index; + hasDfsAncestorIndex = true; +} + +Maybe CyclicModuleFields::maybeDfsAncestorIndex() const { + return hasDfsAncestorIndex ? Some(dfsAncestorIndex) : Nothing(); +} + +void CyclicModuleFields::clearDfsIndexes() { + dfsIndex = 0; + hasDfsIndex = false; + dfsAncestorIndex = 0; + hasDfsAncestorIndex = false; +} + +void CyclicModuleFields::setAsyncEvaluating(uint32_t postOrder) { + isAsyncEvaluating = true; + asyncEvaluatingPostOrder = postOrder; +} + +bool CyclicModuleFields::getIsAsyncEvaluating() const { + return isAsyncEvaluating; +} + +Maybe CyclicModuleFields::maybeAsyncEvaluatingPostOrder() const { + if (!isAsyncEvaluating || + asyncEvaluatingPostOrder == ASYNC_EVALUATING_POST_ORDER_CLEARED) { + return Nothing(); + } + + return Some(asyncEvaluatingPostOrder); +} + +void CyclicModuleFields::clearAsyncEvaluatingPostOrder() { + asyncEvaluatingPostOrder = ASYNC_EVALUATING_POST_ORDER_CLEARED; +} + +void CyclicModuleFields::setPendingAsyncDependencies(uint32_t newValue) { + pendingAsyncDependencies = newValue; + hasPendingAsyncDependencies = true; +} + +Maybe CyclicModuleFields::maybePendingAsyncDependencies() const { + return hasPendingAsyncDependencies ? Some(pendingAsyncDependencies) + : Nothing(); } /////////////////////////////////////////////////////////////////////////// @@ -851,7 +944,7 @@ void ModuleObject::initAsyncSlots(JSContext* cx, bool hasTopLevelAwait, static uint32_t NextPostOrder(JSRuntime* rt) { uint32_t ordinal = rt->moduleAsyncEvaluatingPostOrder; - MOZ_ASSERT(ordinal != ASYNC_EVALUATING_POST_ORDER_TRUE); + MOZ_ASSERT(ordinal != ASYNC_EVALUATING_POST_ORDER_CLEARED); MOZ_ASSERT(ordinal < MAX_UINT32); rt->moduleAsyncEvaluatingPostOrder++; return ordinal; @@ -870,8 +963,9 @@ static void MaybeResetPostOrderCounter(JSRuntime* rt, } void ModuleObject::setAsyncEvaluating() { - cyclicModuleFields()->asyncEvaluatingPostOrder = - Some(NextPostOrder(runtimeFromMainThread())); + MOZ_ASSERT(!isAsyncEvaluating()); + uint32_t postOrder = NextPostOrder(runtimeFromMainThread()); + cyclicModuleFields()->setAsyncEvaluating(postOrder); } void ModuleObject::initScriptSlots(HandleScript script) { @@ -1017,21 +1111,21 @@ bool ModuleObject::hasTopLevelAwait() const { } bool ModuleObject::isAsyncEvaluating() const { - return cyclicModuleFields()->asyncEvaluatingPostOrder.isSome(); + return cyclicModuleFields()->getIsAsyncEvaluating(); } Maybe ModuleObject::maybeDfsIndex() const { - return cyclicModuleFields()->dfsIndex; + return cyclicModuleFields()->maybeDfsIndex(); } uint32_t ModuleObject::dfsIndex() const { return maybeDfsIndex().value(); } void ModuleObject::setDfsIndex(uint32_t index) { - cyclicModuleFields()->dfsIndex = Some(index); + cyclicModuleFields()->setDfsIndex(index); } Maybe ModuleObject::maybeDfsAncestorIndex() const { - return cyclicModuleFields()->dfsAncestorIndex; + return cyclicModuleFields()->maybeDfsAncestorIndex(); } uint32_t ModuleObject::dfsAncestorIndex() const { @@ -1039,12 +1133,11 @@ uint32_t ModuleObject::dfsAncestorIndex() const { } void ModuleObject::setDfsAncestorIndex(uint32_t index) { - cyclicModuleFields()->dfsAncestorIndex = Some(index); + cyclicModuleFields()->setDfsAncestorIndex(index); } void ModuleObject::clearDfsIndexes() { - cyclicModuleFields()->dfsIndex = Nothing(); - cyclicModuleFields()->dfsAncestorIndex = Nothing(); + cyclicModuleFields()->clearDfsIndexes(); } PromiseObject* ModuleObject::maybeTopLevelCapability() const { @@ -1088,25 +1181,19 @@ bool ModuleObject::appendAsyncParentModule(JSContext* cx, } Maybe ModuleObject::maybePendingAsyncDependencies() const { - return cyclicModuleFields()->pendingAsyncDependencies; + return cyclicModuleFields()->maybePendingAsyncDependencies(); } uint32_t ModuleObject::pendingAsyncDependencies() const { return maybePendingAsyncDependencies().value(); } -bool ModuleObject::hasAsyncEvaluatingPostOrder() const { - Maybe value = cyclicModuleFields()->asyncEvaluatingPostOrder; - return value.isSome() && *value != ASYNC_EVALUATING_POST_ORDER_TRUE; -} - Maybe ModuleObject::maybeAsyncEvaluatingPostOrder() const { - return cyclicModuleFields()->asyncEvaluatingPostOrder; + return cyclicModuleFields()->maybeAsyncEvaluatingPostOrder(); } uint32_t ModuleObject::getAsyncEvaluatingPostOrder() const { - MOZ_ASSERT(hasAsyncEvaluatingPostOrder()); - return maybeAsyncEvaluatingPostOrder().value(); + return cyclicModuleFields()->maybeAsyncEvaluatingPostOrder().value(); } void ModuleObject::clearAsyncEvaluatingPostOrder() { @@ -1115,12 +1202,11 @@ void ModuleObject::clearAsyncEvaluatingPostOrder() { JSRuntime* rt = runtimeFromMainThread(); MaybeResetPostOrderCounter(rt, getAsyncEvaluatingPostOrder()); - cyclicModuleFields()->asyncEvaluatingPostOrder = - Some(ASYNC_EVALUATING_POST_ORDER_TRUE); + cyclicModuleFields()->clearAsyncEvaluatingPostOrder(); } void ModuleObject::setPendingAsyncDependencies(uint32_t newValue) { - cyclicModuleFields()->pendingAsyncDependencies = Some(newValue); + cyclicModuleFields()->setPendingAsyncDependencies(newValue); } void ModuleObject::setCycleRoot(ModuleObject* cycleRoot) { diff --git a/js/src/builtin/ModuleObject.h b/js/src/builtin/ModuleObject.h index 905c21e4a5c9..268242d7ae5f 100644 --- a/js/src/builtin/ModuleObject.h +++ b/js/src/builtin/ModuleObject.h @@ -250,7 +250,7 @@ class ModuleNamespaceObject : public ProxyObject { // Value types of [[Status]] in a Cyclic Module Record // https://tc39.es/ecma262/#table-cyclic-module-fields -enum class ModuleStatus : int32_t { +enum class ModuleStatus : int8_t { Unlinked, Linking, Linked, @@ -265,30 +265,26 @@ enum class ModuleStatus : int32_t { Evaluated_Error }; -// Special values for ModuleObject's AsyncEvaluatingPostOrderSlot slot, which is -// used to implement the AsyncEvaluation field of cyclic module records. +// Special values for CyclicModuleFields' asyncEvaluatingPostOrderSlot field, +// which is used as part of the implementation of the AsyncEvaluation field of +// cyclic module records. // -// The spec requires us to distinguish true, false, and 'never previously set to -// true', as well as the order in which the field was set to true for async -// evaluating modules. +// The spec requires us to be able to tell the order in which the field was set +// to true for async evaluating modules. // -// This is arranged by using an integer to record the order. Undefined is used -// to mean false and any integer value true. While a module is async evaluating -// the integer value gives the order that the field was set to true. After -// evaluation is complete the value is set to ASYNC_EVALUATING_POST_ORDER_TRUE, -// which still signifies true but loses the order information. +// This is arranged by using an integer to record the order. After evaluation is +// complete the value is set to ASYNC_EVALUATING_POST_ORDER_CLEARED. // // See https://tc39.es/ecma262/#sec-cyclic-module-records for field defintion. // See https://tc39.es/ecma262/#sec-async-module-execution-fulfilled for sort // requirement. -// True value that also indicates that the field was previously true. -constexpr uint32_t ASYNC_EVALUATING_POST_ORDER_TRUE = 0; - -// Initial value for the runtime's counter used to generate these values; the -// first non-false value. +// Initial value for the runtime's counter used to generate these values. constexpr uint32_t ASYNC_EVALUATING_POST_ORDER_INIT = 1; +// Value that the field is set to after being cleared. +constexpr uint32_t ASYNC_EVALUATING_POST_ORDER_CLEARED = 0; + class ModuleObject : public NativeObject { public: // Module fields including those for AbstractModuleRecords described by: @@ -365,7 +361,6 @@ class ModuleObject : public NativeObject { ListObject* asyncParentModules() const; mozilla::Maybe maybePendingAsyncDependencies() const; uint32_t pendingAsyncDependencies() const; - bool hasAsyncEvaluatingPostOrder() const; mozilla::Maybe maybeAsyncEvaluatingPostOrder() const; uint32_t getAsyncEvaluatingPostOrder() const; void clearAsyncEvaluatingPostOrder(); diff --git a/js/src/jit-test/tests/modules/async-eval-state.js b/js/src/jit-test/tests/modules/async-eval-state.js index e7edbf13a8e5..b3c524d961b2 100644 --- a/js/src/jit-test/tests/modules/async-eval-state.js +++ b/js/src/jit-test/tests/modules/async-eval-state.js @@ -34,7 +34,7 @@ const StatusEvaluated = 5; drainJobQueue(); assertEq(m.isAsyncEvaluating, true); assertEq(m.status, StatusEvaluated); - assertEq(m.asyncEvaluatingPostOrder, 0); + assertEq(m.asyncEvaluatingPostOrder, undefined); } { @@ -50,7 +50,7 @@ const StatusEvaluated = 5; assertEq(m.isAsyncEvaluating, true); assertEq(m.status, StatusEvaluated); assertEq(m.evaluationError, 2); - assertEq(m.asyncEvaluatingPostOrder, 0); + assertEq(m.asyncEvaluatingPostOrder, undefined); } { @@ -65,7 +65,7 @@ const StatusEvaluated = 5; assertEq(m.isAsyncEvaluating, true); assertEq(m.status, StatusEvaluated); assertEq(m.evaluationError, 1); - assertEq(m.asyncEvaluatingPostOrder, 0); + assertEq(m.asyncEvaluatingPostOrder, undefined); } { @@ -86,7 +86,7 @@ const StatusEvaluated = 5; assertEq(a.status, StatusEvaluated); assertEq(b.isAsyncEvaluating, true); assertEq(b.status, StatusEvaluated); - assertEq(b.asyncEvaluatingPostOrder, 0); + assertEq(b.asyncEvaluatingPostOrder, undefined); } { @@ -106,10 +106,10 @@ const StatusEvaluated = 5; drainJobQueue(); assertEq(a.isAsyncEvaluating, true); assertEq(a.status, StatusEvaluated); - assertEq(a.asyncEvaluatingPostOrder, 0); + assertEq(a.asyncEvaluatingPostOrder, undefined); assertEq(b.isAsyncEvaluating, true); assertEq(b.status, StatusEvaluated); - assertEq(b.asyncEvaluatingPostOrder, 0); + assertEq(b.asyncEvaluatingPostOrder, undefined); } { @@ -136,13 +136,13 @@ const StatusEvaluated = 5; drainJobQueue(); assertEq(a.isAsyncEvaluating, true); assertEq(a.status, StatusEvaluated); - assertEq(a.asyncEvaluatingPostOrder, 0); + assertEq(a.asyncEvaluatingPostOrder, undefined); assertEq(b.isAsyncEvaluating, true); assertEq(b.status, StatusEvaluated); - assertEq(b.asyncEvaluatingPostOrder, 0); + assertEq(b.asyncEvaluatingPostOrder, undefined); assertEq(c.isAsyncEvaluating, true); assertEq(c.status, StatusEvaluated); - assertEq(c.asyncEvaluatingPostOrder, 0); + assertEq(c.asyncEvaluatingPostOrder, undefined); } { @@ -176,11 +176,11 @@ const StatusEvaluated = 5; assertEq(a.isAsyncEvaluating, true); assertEq(a.status, StatusEvaluated); assertEq(a.evaluationError, 1); - assertEq(a.asyncEvaluatingPostOrder, 0); + assertEq(a.asyncEvaluatingPostOrder, undefined); assertEq(b.isAsyncEvaluating, true); assertEq(b.status, StatusEvaluated); assertEq(b.evaluationError, 1); - assertEq(b.asyncEvaluatingPostOrder, 0); + assertEq(b.asyncEvaluatingPostOrder, undefined); } { @@ -199,9 +199,9 @@ const StatusEvaluated = 5; assertEq(a.isAsyncEvaluating, true); assertEq(a.status, StatusEvaluated); assertEq(a.evaluationError, 2); - assertEq(a.asyncEvaluatingPostOrder, 0); + assertEq(a.asyncEvaluatingPostOrder, undefined); assertEq(b.isAsyncEvaluating, true); assertEq(b.status, StatusEvaluated); assertEq(b.evaluationError, 2); - assertEq(b.asyncEvaluatingPostOrder, 0); + assertEq(b.asyncEvaluatingPostOrder, undefined); } diff --git a/js/xpconnect/tests/unit/test_xpcomutils.js b/js/xpconnect/tests/unit/test_xpcomutils.js index ff11b60de70e..6aa73cb67025 100644 --- a/js/xpconnect/tests/unit/test_xpcomutils.js +++ b/js/xpconnect/tests/unit/test_xpcomutils.js @@ -156,7 +156,7 @@ add_test(function test_categoryRegistration() const XULAPPINFO_CID = Components.ID("{fc937916-656b-4fb3-a395-8c63569e27a8}"); // Create a fake app entry for our category registration apps filter. - let { newAppInfo } = ChromeUtils.import("resource://testing-common/AppInfo.jsm"); + let { newAppInfo } = ChromeUtils.importESModule("resource://testing-common/AppInfo.sys.mjs"); let XULAppInfo = newAppInfo({ name: "catRegTest", ID: "{adb42a9a-0d19-4849-bf4d-627614ca19be}", diff --git a/layout/generic/FrameClasses.py b/layout/generic/FrameClasses.py index 809842be62a8..61bb3e60d3fb 100644 --- a/layout/generic/FrameClasses.py +++ b/layout/generic/FrameClasses.py @@ -3,7 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Frame class definitions, used to generate FrameIdList.h and FrameTypeList.h -from FrameClass import Frame, AbstractFrame, LEAF, NOT_LEAF, DYNAMIC_LEAF +from FrameClass import DYNAMIC_LEAF, LEAF, NOT_LEAF, AbstractFrame, Frame FRAME_CLASSES = [ Frame("BRFrame", "Br", LEAF), diff --git a/layout/generic/GenerateFrameLists.py b/layout/generic/GenerateFrameLists.py index 32f8a2830849..af2c60922bcd 100644 --- a/layout/generic/GenerateFrameLists.py +++ b/layout/generic/GenerateFrameLists.py @@ -4,7 +4,6 @@ from FrameClasses import FRAME_CLASSES - HEADER = "// THIS IS AUTOGENERATED BY GenerateFrameLists.py. DO NOT EDIT\n" diff --git a/layout/style/FontFaceImpl.cpp b/layout/style/FontFaceImpl.cpp index 87686e11f1b8..7b9d44bd73a1 100644 --- a/layout/style/FontFaceImpl.cpp +++ b/layout/style/FontFaceImpl.cpp @@ -519,14 +519,12 @@ void FontFaceImpl::SetUserFontEntry(gfxUserFontEntry* aEntry) { } if (mUserFontEntry) { - MutexAutoLock lock(mUserFontEntry->mMutex); - mUserFontEntry->mFontFaces.RemoveElement(this); + mUserFontEntry->RemoveFontFace(this); } auto* entry = static_cast(aEntry); if (entry) { - MutexAutoLock lock(entry->mMutex); - entry->mFontFaces.AppendElement(this); + entry->AddFontFace(this); } mUserFontEntry = entry; @@ -535,7 +533,7 @@ void FontFaceImpl::SetUserFontEntry(gfxUserFontEntry* aEntry) { return; } - MOZ_ASSERT(mUserFontEntry->GetUserFontSet() == mFontFaceSet, + MOZ_ASSERT(mUserFontEntry->HasUserFontSet(mFontFaceSet), "user font entry must be associated with the same user font set " "as the FontFace"); @@ -700,6 +698,11 @@ void FontFaceImpl::RemoveFontFaceSet(FontFaceSetImpl* aFontFaceSet) { } else { mOtherFontFaceSets.RemoveElement(aFontFaceSet); } + + // The caller should be holding a strong reference to the FontFaceSetImpl. + if (mUserFontEntry) { + mUserFontEntry->CheckUserFontSet(); + } } gfxCharacterMap* FontFaceImpl::GetUnicodeRangeAsCharacterMap() { @@ -766,6 +769,11 @@ void FontFaceImpl::Entry::GetUserFontSets( MutexAutoLock lock(mMutex); aResult.Clear(); + + if (mFontSet) { + aResult.AppendElement(mFontSet); + } + for (FontFaceImpl* f : mFontFaces) { if (f->mInFontFaceSet) { aResult.AppendElement(f->mFontFaceSet); @@ -781,6 +789,40 @@ void FontFaceImpl::Entry::GetUserFontSets( aResult.TruncateLength(it - aResult.begin()); } +/* virtual */ already_AddRefed +FontFaceImpl::Entry::GetUserFontSet() const { + MutexAutoLock lock(mMutex); + if (mFontSet) { + return do_AddRef(mFontSet); + } + if (NS_IsMainThread() && mLoadingFontSet) { + return do_AddRef(mLoadingFontSet); + } + return nullptr; +} + +void FontFaceImpl::Entry::CheckUserFontSetLocked() { + // If this is the last font containing a strong reference to the set, we need + // to clear the reference as there is no longer anything guaranteeing the set + // will be kept alive. + if (mFontSet) { + auto* set = static_cast(mFontSet); + for (FontFaceImpl* f : mFontFaces) { + if (f->mFontFaceSet == set || f->mOtherFontFaceSets.Contains(set)) { + return; + } + } + } + + // If possible, promote the most recently added FontFace and its owning + // FontFaceSetImpl as the primary set. + if (!mFontFaces.IsEmpty()) { + mFontSet = mFontFaces.LastElement()->mFontFaceSet; + } else { + mFontSet = nullptr; + } +} + void FontFaceImpl::Entry::FindFontFaceOwners(nsTHashSet& aOwners) { MutexAutoLock lock(mMutex); for (FontFaceImpl* f : mFontFaces) { @@ -790,5 +832,17 @@ void FontFaceImpl::Entry::FindFontFaceOwners(nsTHashSet& aOwners) { } } +void FontFaceImpl::Entry::AddFontFace(FontFaceImpl* aFontFace) { + MutexAutoLock lock(mMutex); + mFontFaces.AppendElement(aFontFace); + CheckUserFontSetLocked(); +} + +void FontFaceImpl::Entry::RemoveFontFace(FontFaceImpl* aFontFace) { + MutexAutoLock lock(mMutex); + mFontFaces.RemoveElement(aFontFace); + CheckUserFontSetLocked(); +} + } // namespace dom } // namespace mozilla diff --git a/layout/style/FontFaceImpl.h b/layout/style/FontFaceImpl.h index bb97a46cc1ec..eb5bd52395c6 100644 --- a/layout/style/FontFaceImpl.h +++ b/layout/style/FontFaceImpl.h @@ -56,19 +56,41 @@ class FontFaceImpl final { StyleFontDisplay aFontDisplay, RangeFlags aRangeFlags, float aAscentOverride, float aDescentOverride, float aLineGapOverride, float aSizeAdjust) - : gfxUserFontEntry(aFontSet, aFontFaceSrcList, aWeight, aStretch, - aStyle, aFeatureSettings, aVariationSettings, + : gfxUserFontEntry(aFontFaceSrcList, aWeight, aStretch, aStyle, + aFeatureSettings, aVariationSettings, aLanguageOverride, aUnicodeRanges, aFontDisplay, aRangeFlags, aAscentOverride, aDescentOverride, aLineGapOverride, aSizeAdjust), - mMutex("FontFaceImpl::Entry::mMutex") {} + mMutex("FontFaceImpl::Entry::mMutex"), + mFontSet(aFontSet) {} void SetLoadState(UserFontLoadState aLoadState) override; void GetUserFontSets(nsTArray>& aResult) override; + already_AddRefed GetUserFontSet() const override; + + void CheckUserFontSet() { + MutexAutoLock lock(mMutex); + CheckUserFontSetLocked(); + } + +#ifdef DEBUG + bool HasUserFontSet(gfxUserFontSet* aFontSet) const { + MutexAutoLock lock(mMutex); + return mFontSet == aFontSet; + } +#endif + + void AddFontFace(FontFaceImpl* aOwner); + void RemoveFontFace(FontFaceImpl* aOwner); void FindFontFaceOwners(nsTHashSet& aOwners); protected: - Mutex mMutex; + void CheckUserFontSetLocked() MOZ_REQUIRES(mMutex); + + mutable Mutex mMutex; + + // Font set which owns this entry; + gfxUserFontSet* MOZ_NON_OWNING_REF mFontSet; // The FontFace objects that use this user font entry. We need to store // an array of these, not just a single pointer, since the user font diff --git a/layout/tools/reftest/jar.mn b/layout/tools/reftest/jar.mn index a2ed6f8b226d..2d6ea891e2a5 100644 --- a/layout/tools/reftest/jar.mn +++ b/layout/tools/reftest/jar.mn @@ -53,7 +53,7 @@ reftest.jar: res/ReftestFissionChild.jsm (ReftestFissionChild.jsm) res/AsyncSpellCheckTestHelper.jsm (../../../editor/AsyncSpellCheckTestHelper.jsm) res/httpd.jsm (../../../netwerk/test/httpserver/httpd.js) - res/StructuredLog.jsm (../../../testing/modules/StructuredLog.jsm) + res/StructuredLog.sys.mjs (../../../testing/modules/StructuredLog.sys.mjs) res/PerTestCoverageUtils.jsm (../../../tools/code-coverage/PerTestCoverageUtils.jsm) res/input.css (../../../editor/reftests/xul/input.css) res/progress.css (../../../layout/reftests/forms/progress/style.css) diff --git a/layout/tools/reftest/mach_commands.py b/layout/tools/reftest/mach_commands.py index baba9a940d0c..96595c3fe37a 100644 --- a/layout/tools/reftest/mach_commands.py +++ b/layout/tools/reftest/mach_commands.py @@ -2,22 +2,16 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -from __future__ import absolute_import, unicode_literals, print_function +from __future__ import absolute_import, print_function, unicode_literals import os import re import sys from argparse import Namespace -from mozbuild.base import ( - MachCommandConditions as conditions, - MozbuildObject, -) - -from mach.decorators import ( - Command, -) - +from mach.decorators import Command +from mozbuild.base import MachCommandConditions as conditions +from mozbuild.base import MozbuildObject parser = None @@ -279,8 +273,8 @@ def _run_reftest(command_context, **kwargs): reftest.log_manager.enable_unstructured() if conditions.is_android(command_context): from mozrunner.devices.android_device import ( - verify_android_device, InstallIntent, + verify_android_device, ) install = InstallIntent.NO if kwargs.get("no_install") else InstallIntent.YES diff --git a/layout/tools/reftest/mach_test_package_commands.py b/layout/tools/reftest/mach_test_package_commands.py index 6b6f2f8fd93b..56599647c5e7 100644 --- a/layout/tools/reftest/mach_test_package_commands.py +++ b/layout/tools/reftest/mach_test_package_commands.py @@ -2,16 +2,14 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -from __future__ import absolute_import, unicode_literals, print_function +from __future__ import absolute_import, print_function, unicode_literals import os import sys from argparse import Namespace from functools import partial -from mach.decorators import ( - Command, -) +from mach.decorators import Command here = os.path.abspath(os.path.dirname(__file__)) logger = None diff --git a/layout/tools/reftest/reftest.jsm b/layout/tools/reftest/reftest.jsm index a7aa1be926b9..90c49a1bacc9 100644 --- a/layout/tools/reftest/reftest.jsm +++ b/layout/tools/reftest/reftest.jsm @@ -48,8 +48,8 @@ const { HttpServer } = ChromeUtils.import("resource://reftest/httpd.jsm"); const { ReadTopManifest, CreateUrls } = ChromeUtils.import( "resource://reftest/manifest.jsm" ); -const { StructuredLogger } = ChromeUtils.import( - "resource://reftest/StructuredLog.jsm" +const { StructuredLogger } = ChromeUtils.importESModule( + "resource://reftest/StructuredLog.sys.mjs" ); const { PerTestCoverageUtils } = ChromeUtils.import( "resource://reftest/PerTestCoverageUtils.jsm" @@ -915,7 +915,7 @@ function DoneTests() g.suiteStarted = false logger.suiteEnd({'results': g.testResults}); } else { - logger._logData('results', {results: g.testResults}); + logger.logData('results', {results: g.testResults}); } logger.info("Slowest test took " + g.slowestTestTime + "ms (" + g.slowestTestURL + ")"); logger.info("Total canvas count = " + g.recycledCanvases.length); diff --git a/layout/tools/reftest/reftest/__init__.py b/layout/tools/reftest/reftest/__init__.py index 601014a6cac9..e3ed9b53e13d 100644 --- a/layout/tools/reftest/reftest/__init__.py +++ b/layout/tools/reftest/reftest/__init__.py @@ -2,11 +2,12 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -from __future__ import unicode_literals, absolute_import, print_function +from __future__ import absolute_import, print_function, unicode_literals import io import os import re + import six RE_COMMENT = re.compile(r"\s+#") diff --git a/layout/tools/reftest/reftestcommandline.py b/layout/tools/reftest/reftestcommandline.py index a6aac3df791b..70958b2dc9c6 100644 --- a/layout/tools/reftest/reftestcommandline.py +++ b/layout/tools/reftest/reftestcommandline.py @@ -4,9 +4,9 @@ import argparse import os import sys from collections import OrderedDict + import mozinfo import mozlog - from six.moves.urllib.parse import urlparse here = os.path.abspath(os.path.dirname(__file__)) diff --git a/layout/tools/reftest/remotereftest.py b/layout/tools/reftest/remotereftest.py index 77a670519600..8b22fb2c7884 100644 --- a/layout/tools/reftest/remotereftest.py +++ b/layout/tools/reftest/remotereftest.py @@ -16,14 +16,12 @@ import time import traceback from contextlib import closing -from six.moves.urllib_request import urlopen - -from mozdevice import ADBDeviceFactory, RemoteProcessMonitor import mozcrash - +import reftestcommandline +from mozdevice import ADBDeviceFactory, RemoteProcessMonitor from output import OutputHandler from runreftest import RefTest, ReftestResolver, build_obj -import reftestcommandline +from six.moves.urllib_request import urlopen # We need to know our current directory so that we can serve our test files from it. SCRIPT_DIRECTORY = os.path.abspath(os.path.realpath(os.path.dirname(__file__))) diff --git a/layout/tools/reftest/runreftest.py b/layout/tools/reftest/runreftest.py index b77e8567da2d..052f037693f5 100644 --- a/layout/tools/reftest/runreftest.py +++ b/layout/tools/reftest/runreftest.py @@ -5,8 +5,6 @@ """ Runs the reftest test harness. """ -from __future__ import print_function - from __future__ import absolute_import, print_function import json @@ -37,9 +35,10 @@ import mozlog import mozprocess import mozprofile import mozrunner -from manifestparser import TestManifest, filters as mpf +from manifestparser import TestManifest +from manifestparser import filters as mpf from mozrunner.utils import get_stack_fixer_function, test_environment -from mozscreenshot import printstatus, dump_screen +from mozscreenshot import dump_screen, printstatus from six import reraise, string_types from six.moves import range @@ -57,8 +56,8 @@ except ImportError as e: # noqa Marionette = reraise_ -from output import OutputHandler, ReftestFormatter import reftestcommandline +from output import OutputHandler, ReftestFormatter here = os.path.abspath(os.path.dirname(__file__)) diff --git a/layout/tools/reftest/selftest/test_reftest_output.py b/layout/tools/reftest/selftest/test_reftest_output.py index 15cffa925bd8..1ca48d42cdb5 100644 --- a/layout/tools/reftest/selftest/test_reftest_output.py +++ b/layout/tools/reftest/selftest/test_reftest_output.py @@ -12,14 +12,14 @@ try: except ImportError: # Python3 from io import StringIO + from functools import partial import mozunit import pytest -from moztest.selftest.output import get_mozharness_status, filter_action - -from mozharness.base.log import INFO, WARNING, ERROR -from mozharness.mozilla.automation import TBPL_SUCCESS, TBPL_WARNING, TBPL_FAILURE +from mozharness.base.log import ERROR, INFO, WARNING +from mozharness.mozilla.automation import TBPL_FAILURE, TBPL_SUCCESS, TBPL_WARNING +from moztest.selftest.output import filter_action, get_mozharness_status here = os.path.abspath(os.path.dirname(__file__)) get_mozharness_status = partial(get_mozharness_status, "reftest") diff --git a/media/libjpeg/ChangeLog.md b/media/libjpeg/ChangeLog.md index e6700c3c271c..b0d166ea1011 100644 --- a/media/libjpeg/ChangeLog.md +++ b/media/libjpeg/ChangeLog.md @@ -1,3 +1,38 @@ +2.1.4 +===== + +### Significant changes relative to 2.1.3 + +1. Fixed a regression introduced in 2.1.3 that caused build failures with +Visual Studio 2010. + +2. The `tjDecompressHeader3()` function in the TurboJPEG C API and the +`TJDecompressor.setSourceImage()` method in the TurboJPEG Java API now accept +"abbreviated table specification" (AKA "tables-only") datastreams, which can be +used to prime the decompressor with quantization and Huffman tables that can be +used when decompressing subsequent "abbreviated image" datastreams. + +3. libjpeg-turbo now performs run-time detection of AltiVec instructions on +OS X/PowerPC systems if AltiVec instructions are not enabled at compile time. +This allows both AltiVec-equipped (PowerPC G4 and G5) and non-AltiVec-equipped +(PowerPC G3) CPUs to be supported using the same build of libjpeg-turbo. + +4. Fixed an error ("Bogus virtual array access") that occurred when attempting +to decompress a progressive JPEG image with a height less than or equal to one +iMCU (8 * the vertical sampling factor) using buffered-image mode with +interblock smoothing enabled. This was a regression introduced by +2.1 beta1[6(b)]. + +5. Fixed two issues that prevented partial image decompression from working +properly with buffered-image mode: + + - Attempting to call `jpeg_crop_scanline()` after +`jpeg_start_decompress()` but before `jpeg_start_output()` resulted in an error +("Improper call to JPEG library in state 207".) + - Attempting to use `jpeg_skip_scanlines()` resulted in an error ("Bogus +virtual array access") under certain circumstances. + + 2.1.3 ===== diff --git a/media/libjpeg/MOZCHANGES b/media/libjpeg/MOZCHANGES index 4e65df22222e..1014df1341ee 100644 --- a/media/libjpeg/MOZCHANGES +++ b/media/libjpeg/MOZCHANGES @@ -48,6 +48,10 @@ To upgrade to a new revision of libjpeg-turbo, do the following: $ hg addremove +== November 10, 2022 (libjpeg-turbo v2.1.4 8162eddf041e0be26f5c671bb6528723c55fed9d 2022-08-12) == + +* Updated to v2.1.4 release. + == February 28, 2022 (libjpeg-turbo v2.1.3 c5f269eb9665435271c05fbcaf8721fa58e9eafa 2022-02-25) == * Updated to v2.1.3 release. diff --git a/media/libjpeg/jdapistd.c b/media/libjpeg/jdapistd.c index 8827d8abf5c5..02cd0cb93a85 100644 --- a/media/libjpeg/jdapistd.c +++ b/media/libjpeg/jdapistd.c @@ -159,9 +159,12 @@ jpeg_crop_scanline(j_decompress_ptr cinfo, JDIMENSION *xoffset, JDIMENSION input_xoffset; boolean reinit_upsampler = FALSE; jpeg_component_info *compptr; +#ifdef UPSAMPLE_MERGING_SUPPORTED my_master_ptr master = (my_master_ptr)cinfo->master; +#endif - if (cinfo->global_state != DSTATE_SCANNING || cinfo->output_scanline != 0) + if ((cinfo->global_state != DSTATE_SCANNING && + cinfo->global_state != DSTATE_BUFIMAGE) || cinfo->output_scanline != 0) ERREXIT1(cinfo, JERR_BAD_STATE, cinfo->global_state); if (!xoffset || !width) @@ -209,11 +212,13 @@ jpeg_crop_scanline(j_decompress_ptr cinfo, JDIMENSION *xoffset, */ *width = *width + input_xoffset - *xoffset; cinfo->output_width = *width; +#ifdef UPSAMPLE_MERGING_SUPPORTED if (master->using_merged_upsample && cinfo->max_v_samp_factor == 2) { my_merged_upsample_ptr upsample = (my_merged_upsample_ptr)cinfo->upsample; upsample->out_row_width = cinfo->output_width * cinfo->out_color_components; } +#endif /* Set the first and last iMCU columns that we must decompress. These values * will be used in single-scan decompressions. @@ -324,7 +329,9 @@ LOCAL(void) read_and_discard_scanlines(j_decompress_ptr cinfo, JDIMENSION num_lines) { JDIMENSION n; +#ifdef UPSAMPLE_MERGING_SUPPORTED my_master_ptr master = (my_master_ptr)cinfo->master; +#endif JSAMPLE dummy_sample[1] = { 0 }; JSAMPROW dummy_row = dummy_sample; JSAMPARRAY scanlines = NULL; @@ -348,10 +355,12 @@ read_and_discard_scanlines(j_decompress_ptr cinfo, JDIMENSION num_lines) cinfo->cquantize->color_quantize = noop_quantize; } +#ifdef UPSAMPLE_MERGING_SUPPORTED if (master->using_merged_upsample && cinfo->max_v_samp_factor == 2) { my_merged_upsample_ptr upsample = (my_merged_upsample_ptr)cinfo->upsample; scanlines = &upsample->spare_row; } +#endif for (n = 0; n < num_lines; n++) jpeg_read_scanlines(cinfo, scanlines, 1); @@ -517,7 +526,7 @@ jpeg_skip_scanlines(j_decompress_ptr cinfo, JDIMENSION num_lines) * all of the entropy decoding occurs in jpeg_start_decompress(), assuming * that the input data source is non-suspending. This makes skipping easy. */ - if (cinfo->inputctl->has_multiple_scans) { + if (cinfo->inputctl->has_multiple_scans || cinfo->buffered_image) { if (cinfo->upsample->need_context_rows) { cinfo->output_scanline += lines_to_skip; cinfo->output_iMCU_row += lines_to_skip / lines_per_iMCU_row; diff --git a/media/libjpeg/jdcoefct.c b/media/libjpeg/jdcoefct.c index 15e6cded628e..88e10c08cb62 100644 --- a/media/libjpeg/jdcoefct.c +++ b/media/libjpeg/jdcoefct.c @@ -5,7 +5,7 @@ * Copyright (C) 1994-1997, Thomas G. Lane. * libjpeg-turbo Modifications: * Copyright 2009 Pierre Ossman for Cendio AB - * Copyright (C) 2010, 2015-2016, 2019-2020, D. R. Commander. + * Copyright (C) 2010, 2015-2016, 2019-2020, 2022, D. R. Commander. * Copyright (C) 2015, 2020, Google, Inc. * For conditions of distribution and use, see the accompanying README.ijg * file. @@ -475,7 +475,7 @@ decompress_smooth_data(j_decompress_ptr cinfo, JSAMPIMAGE output_buf) if (!compptr->component_needed) continue; /* Count non-dummy DCT block rows in this iMCU row. */ - if (cinfo->output_iMCU_row < last_iMCU_row - 1) { + if (cinfo->output_iMCU_row + 1 < last_iMCU_row) { block_rows = compptr->v_samp_factor; access_rows = block_rows * 3; /* this and next two iMCU rows */ } else if (cinfo->output_iMCU_row < last_iMCU_row) { @@ -560,7 +560,7 @@ decompress_smooth_data(j_decompress_ptr cinfo, JSAMPIMAGE output_buf) next_block_row = buffer_ptr; if (block_row < block_rows - 2 || - cinfo->output_iMCU_row < last_iMCU_row - 1) + cinfo->output_iMCU_row + 1 < last_iMCU_row) next_next_block_row = buffer[block_row + 2] + cinfo->master->first_MCU_col[ci]; else diff --git a/media/libjpeg/jerror.c b/media/libjpeg/jerror.c index d54470293758..d0ab5b88b0c7 100644 --- a/media/libjpeg/jerror.c +++ b/media/libjpeg/jerror.c @@ -189,9 +189,9 @@ format_message(j_common_ptr cinfo, char *buffer) /* Format the message into the passed buffer */ if (isstring) - snprintf(buffer, JMSG_LENGTH_MAX, msgtext, err->msg_parm.s); + SNPRINTF(buffer, JMSG_LENGTH_MAX, msgtext, err->msg_parm.s); else - snprintf(buffer, JMSG_LENGTH_MAX, msgtext, + SNPRINTF(buffer, JMSG_LENGTH_MAX, msgtext, err->msg_parm.i[0], err->msg_parm.i[1], err->msg_parm.i[2], err->msg_parm.i[3], err->msg_parm.i[4], err->msg_parm.i[5], diff --git a/media/libjpeg/jinclude.h b/media/libjpeg/jinclude.h index 120614b25cf3..e8d983ac171f 100644 --- a/media/libjpeg/jinclude.h +++ b/media/libjpeg/jinclude.h @@ -45,6 +45,18 @@ */ +#ifdef _MSC_VER + +#define SNPRINTF(str, n, format, ...) \ + _snprintf_s(str, n, _TRUNCATE, format, ##__VA_ARGS__) + +#else + +#define SNPRINTF snprintf + +#endif + + #ifndef NO_GETENV #ifdef _MSC_VER diff --git a/media/libjpeg/jmemmgr.c b/media/libjpeg/jmemmgr.c index 8f5a4ab1c78b..a40446f6ac36 100644 --- a/media/libjpeg/jmemmgr.c +++ b/media/libjpeg/jmemmgr.c @@ -68,10 +68,13 @@ round_up_pow2(size_t a, size_t b) * There isn't any really portable way to determine the worst-case alignment * requirement. This module assumes that the alignment requirement is * multiples of ALIGN_SIZE. - * By default, we define ALIGN_SIZE as sizeof(double). This is necessary on - * some workstations (where doubles really do need 8-byte alignment) and will - * work fine on nearly everything. If your machine has lesser alignment needs, - * you can save a few bytes by making ALIGN_SIZE smaller. + * By default, we define ALIGN_SIZE as the maximum of sizeof(double) and + * sizeof(void *). This is necessary on some workstations (where doubles + * really do need 8-byte alignment) and will work fine on nearly everything. + * We use the maximum of sizeof(double) and sizeof(void *) since sizeof(double) + * may be insufficient, for example, on CHERI-enabled platforms with 16-byte + * pointers and a 16-byte alignment requirement. If your machine has lesser + * alignment needs, you can save a few bytes by making ALIGN_SIZE smaller. * The only place I know of where this will NOT work is certain Macintosh * 680x0 compilers that define double as a 10-byte IEEE extended float. * Doing 10-byte alignment is counterproductive because longwords won't be @@ -81,7 +84,7 @@ round_up_pow2(size_t a, size_t b) #ifndef ALIGN_SIZE /* so can override from jconfig.h */ #ifndef WITH_SIMD -#define ALIGN_SIZE sizeof(double) +#define ALIGN_SIZE MAX(sizeof(void *), sizeof(double)) #else #define ALIGN_SIZE 32 /* Most of the SIMD instructions we support require 16-byte (128-bit) alignment, but AVX2 requires diff --git a/media/libjpeg/simd/arm/aarch32/jsimd.c b/media/libjpeg/simd/arm/aarch32/jsimd.c index e3adf23d5013..920f7656ebfe 100644 --- a/media/libjpeg/simd/arm/aarch32/jsimd.c +++ b/media/libjpeg/simd/arm/aarch32/jsimd.c @@ -25,8 +25,6 @@ #include "../../../jsimddct.h" #include "../../jsimd.h" -#include -#include #include static unsigned int simd_support = ~0; diff --git a/media/libjpeg/simd/arm/aarch64/jsimd.c b/media/libjpeg/simd/arm/aarch64/jsimd.c index 604d5472f6a6..41c06d318010 100644 --- a/media/libjpeg/simd/arm/aarch64/jsimd.c +++ b/media/libjpeg/simd/arm/aarch64/jsimd.c @@ -25,8 +25,6 @@ #include "../../jsimd.h" #include "jconfigint.h" -#include -#include #include #define JSIMD_FASTLD3 1 diff --git a/media/libjpeg/simd/mips/jsimd.c b/media/libjpeg/simd/mips/jsimd.c index d2546eed3289..36ea865d41e4 100644 --- a/media/libjpeg/simd/mips/jsimd.c +++ b/media/libjpeg/simd/mips/jsimd.c @@ -23,8 +23,6 @@ #include "../../jsimddct.h" #include "../jsimd.h" -#include -#include #include static unsigned int simd_support = ~0; diff --git a/media/libjpeg/simd/mips64/jsimd.c b/media/libjpeg/simd/mips64/jsimd.c index e8f1af562bab..2e626b2d3d97 100644 --- a/media/libjpeg/simd/mips64/jsimd.c +++ b/media/libjpeg/simd/mips64/jsimd.c @@ -24,8 +24,6 @@ #include "../../jsimddct.h" #include "../jsimd.h" -#include -#include #include static unsigned int simd_support = ~0; diff --git a/media/libjpeg/simd/powerpc/jsimd.c b/media/libjpeg/simd/powerpc/jsimd.c index b9e86dcfac26..9a452a309074 100644 --- a/media/libjpeg/simd/powerpc/jsimd.c +++ b/media/libjpeg/simd/powerpc/jsimd.c @@ -27,11 +27,12 @@ #include "../../jsimddct.h" #include "../jsimd.h" -#include -#include #include -#if defined(__OpenBSD__) +#if defined(__APPLE__) +#include +#include +#elif defined(__OpenBSD__) #include #include #include @@ -121,6 +122,10 @@ init_simd(void) int bufsize = 1024; /* an initial guess for the line buffer size limit */ #elif defined(__amigaos4__) uint32 altivec = 0; +#elif defined(__APPLE__) + int mib[2] = { CTL_HW, HW_VECTORUNIT }; + int altivec; + size_t len = sizeof(altivec); #elif defined(__OpenBSD__) int mib[2] = { CTL_MACHDEP, CPU_ALTIVEC }; int altivec; @@ -134,7 +139,7 @@ init_simd(void) simd_support = 0; -#if defined(__ALTIVEC__) || defined(__APPLE__) +#if defined(__ALTIVEC__) simd_support |= JSIMD_ALTIVEC; #elif defined(__linux__) || defined(ANDROID) || defined(__ANDROID__) while (!parse_proc_cpuinfo(bufsize)) { @@ -146,7 +151,7 @@ init_simd(void) IExec->GetCPUInfoTags(GCIT_VectorUnit, &altivec, TAG_DONE); if (altivec == VECTORTYPE_ALTIVEC) simd_support |= JSIMD_ALTIVEC; -#elif defined(__OpenBSD__) +#elif defined(__APPLE__) || defined(__OpenBSD__) if (sysctl(mib, 2, &altivec, &len, NULL, 0) == 0 && altivec != 0) simd_support |= JSIMD_ALTIVEC; #elif defined(__FreeBSD__) diff --git a/netwerk/base/nsInputStreamPump.cpp b/netwerk/base/nsInputStreamPump.cpp index 678f1c9beca1..750e724aa18c 100644 --- a/netwerk/base/nsInputStreamPump.cpp +++ b/netwerk/base/nsInputStreamPump.cpp @@ -491,15 +491,15 @@ uint32_t nsInputStreamPump::OnStateStart() { } { + nsCOMPtr listener = mListener; + // We're on the writing thread + AssertOnThread(); + // Note: Must exit mutex for call to OnStartRequest to avoid // deadlocks when calls to RetargetDeliveryTo for multiple // nsInputStreamPumps are needed (e.g. nsHttpChannel). RecursiveMutexAutoUnlock unlock(mMutex); - // We're on the writing thread - MOZ_PUSH_IGNORE_THREAD_SAFETY - AssertOnThread(); - rv = mListener->OnStartRequest(this); - MOZ_POP_THREAD_SAFETY + rv = listener->OnStartRequest(this); } // an error returned from OnStartRequest should cause us to abort; however, @@ -562,6 +562,15 @@ uint32_t nsInputStreamPump::OnStateTransfer() { mStreamOffset, avail, odaAvail)); { + // We may be called on non-MainThread even if mOffMainThread is + // false, due to RetargetDeliveryTo(), so don't use AssertOnThread() + if (mTargetThread) { + MOZ_ASSERT(mTargetThread->IsOnCurrentThread()); + } else { + MOZ_ASSERT(NS_IsMainThread()); + } + + nsCOMPtr listener = mListener; // Note: Must exit mutex for call to OnStartRequest to avoid // deadlocks when calls to RetargetDeliveryTo for multiple // nsInputStreamPumps are needed (e.g. nsHttpChannel). @@ -570,16 +579,9 @@ uint32_t nsInputStreamPump::OnStateTransfer() { // mStreamOffset is only touched in OnStateTransfer, and AsyncRead // shouldn't be called during OnDataAvailable() - // We may be called on non-MainThread even if mOffMainThread is - // false, due to RetargetDeliveryTo(), so don't use AssertOnThread() MOZ_PUSH_IGNORE_THREAD_SAFETY - if (mTargetThread) { - MOZ_ASSERT(mTargetThread->IsOnCurrentThread()); - } else { - MOZ_ASSERT(NS_IsMainThread()); - } - rv = mListener->OnDataAvailable(this, mAsyncStream, mStreamOffset, - odaAvail); + rv = listener->OnDataAvailable(this, mAsyncStream, mStreamOffset, + odaAvail); MOZ_POP_THREAD_SAFETY } @@ -678,16 +680,18 @@ uint32_t nsInputStreamPump::OnStateStop() { mAsyncStream = nullptr; mIsPending = false; { + // We're on the writing thread. + // We believe that mStatus can't be changed on us here. + AssertOnThread(); + + nsCOMPtr listener = mListener; + nsresult status = mStatus; // Note: Must exit mutex for call to OnStartRequest to avoid // deadlocks when calls to RetargetDeliveryTo for multiple // nsInputStreamPumps are needed (e.g. nsHttpChannel). RecursiveMutexAutoUnlock unlock(mMutex); - // We're on the writing thread. - // We believe that mStatus can't be changed on us here. - MOZ_PUSH_IGNORE_THREAD_SAFETY - AssertOnThread(); - mListener->OnStopRequest(this, mStatus); - MOZ_POP_THREAD_SAFETY + + listener->OnStopRequest(this, status); } mTargetThread = nullptr; mListener = nullptr; diff --git a/netwerk/base/nsInputStreamPump.h b/netwerk/base/nsInputStreamPump.h index e9a3ae6b5981..7248364a6f94 100644 --- a/netwerk/base/nsInputStreamPump.h +++ b/netwerk/base/nsInputStreamPump.h @@ -80,14 +80,12 @@ class nsInputStreamPump final : public nsIInputStreamPump, nsresult CreateBufferedStreamIfNeeded() MOZ_REQUIRES(mMutex); // This should optimize away in non-DEBUG builds - MOZ_ALWAYS_INLINE void AssertOnThread() const { - MOZ_PUSH_IGNORE_THREAD_SAFETY + MOZ_ALWAYS_INLINE void AssertOnThread() const MOZ_REQUIRES(mMutex) { if (mOffMainThread) { MOZ_ASSERT(mTargetThread->IsOnCurrentThread()); } else { MOZ_ASSERT(NS_IsMainThread()); } - MOZ_POP_THREAD_SAFETY } uint32_t mState MOZ_GUARDED_BY(mMutex){STATE_IDLE}; diff --git a/remote/shared/RecommendedPreferences.sys.mjs b/remote/shared/RecommendedPreferences.sys.mjs index b286129e91c7..44b816c7c340 100644 --- a/remote/shared/RecommendedPreferences.sys.mjs +++ b/remote/shared/RecommendedPreferences.sys.mjs @@ -178,6 +178,9 @@ const COMMON_PREFERENCES = new Map([ ["dom.max_chrome_script_run_time", 0], ["dom.max_script_run_time", 0], + // Disable location change rate limitation + ["dom.navigation.locationChangeRateLimit.count", 0], + // DOM Push ["dom.push.connection.enabled", false], diff --git a/security/ct/tests/gtest/createSTHTestData.py b/security/ct/tests/gtest/createSTHTestData.py index c6fd7588e68c..ab61d4ba0eef 100755 --- a/security/ct/tests/gtest/createSTHTestData.py +++ b/security/ct/tests/gtest/createSTHTestData.py @@ -24,12 +24,12 @@ hash: The name of a hash algorithm to use when signing. Optional. Defaults to 'sha256'. """ -from pyasn1.codec.der import encoder import binascii - import os import sys +from pyasn1.codec.der import encoder + sys.path.append( os.path.join(os.path.dirname(__file__), "..", "..", "..", "manager", "tools") ) diff --git a/security/generate_certdata.py b/security/generate_certdata.py index 3dda68ec8af2..fee6b009ccf5 100644 --- a/security/generate_certdata.py +++ b/security/generate_certdata.py @@ -7,10 +7,11 @@ # This exists to paper over differences between gyp's `action` definitions # and moz.build `GENERATED_FILES` semantics. -import buildconfig import os import subprocess +import buildconfig + def main(output, *inputs): env = dict(os.environ) diff --git a/security/manager/tools/crtshToIdentifyingStruct/crtshToIdentifyingStruct.py b/security/manager/tools/crtshToIdentifyingStruct/crtshToIdentifyingStruct.py index f9748eff7ce3..05e0842e2a45 100644 --- a/security/manager/tools/crtshToIdentifyingStruct/crtshToIdentifyingStruct.py +++ b/security/manager/tools/crtshToIdentifyingStruct/crtshToIdentifyingStruct.py @@ -13,20 +13,17 @@ duplicates. Requires Python 3. """ import argparse -import re -import requests -import sys import io +import re +import sys -from pyasn1.codec.der import decoder -from pyasn1.codec.der import encoder -from pyasn1_modules import pem -from pyasn1_modules import rfc5280 - +import requests from cryptography import x509 from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes from cryptography.x509.oid import NameOID +from pyasn1.codec.der import decoder, encoder +from pyasn1_modules import pem, rfc5280 assert sys.version_info >= (3, 2), "Requires Python 3.2 or later" diff --git a/security/manager/tools/getCTKnownLogs.py b/security/manager/tools/getCTKnownLogs.py index a50be4f9de14..3270f567ed16 100755 --- a/security/manager/tools/getCTKnownLogs.py +++ b/security/manager/tools/getCTKnownLogs.py @@ -15,7 +15,7 @@ https://cs.chromium.org/chromium/src/net/cert/ct_known_logs_static-inc.h """ from __future__ import print_function -from string import Template + import argparse import base64 import datetime @@ -23,9 +23,10 @@ import json import os.path import sys import textwrap -import urllib2 +from string import Template import six +import urllib2 def decodebytes(s): diff --git a/security/manager/tools/mach_commands.py b/security/manager/tools/mach_commands.py index bd1d300fd629..58b5e972d321 100644 --- a/security/manager/tools/mach_commands.py +++ b/security/manager/tools/mach_commands.py @@ -4,17 +4,12 @@ import os +from mach.decorators import Command, CommandArgument from mach.util import UserError from mozpack.files import FileFinder from mozpack.path import basedir -from mach.decorators import ( - CommandArgument, - Command, -) - - def run_module_main_on(module, input_filename): """Run the given module (pycert or pykey) on the given file.""" diff --git a/security/manager/tools/pycert.py b/security/manager/tools/pycert.py index 896c1d20336a..cac01daff74a 100755 --- a/security/manager/tools/pycert.py +++ b/security/manager/tools/pycert.py @@ -84,21 +84,20 @@ If a serial number is not explicitly specified, it is automatically generated based on the contents of the certificate. """ -from pyasn1.codec.der import decoder -from pyasn1.codec.der import encoder -from pyasn1.type import constraint, tag, univ, useful -from pyasn1_modules import rfc2459 -from struct import pack import base64 import datetime import hashlib import re import socket -import six import sys +from struct import pack import pyct import pykey +import six +from pyasn1.codec.der import decoder, encoder +from pyasn1.type import constraint, tag, univ, useful +from pyasn1_modules import rfc2459 class Error(Exception): diff --git a/security/manager/tools/pycms.py b/security/manager/tools/pycms.py index befe68e34696..1717513fdf9a 100755 --- a/security/manager/tools/pycms.py +++ b/security/manager/tools/pycms.py @@ -26,15 +26,15 @@ information). The certificate specification must come last. """ -from pyasn1.codec.der import decoder -from pyasn1.codec.der import encoder -from pyasn1.type import tag, univ -from pyasn1_modules import rfc2315, rfc2459 import base64 +import sys from io import StringIO + import pycert import pykey -import sys +from pyasn1.codec.der import decoder, encoder +from pyasn1.type import tag, univ +from pyasn1_modules import rfc2315, rfc2459 class Error(Exception): diff --git a/security/manager/tools/pyct.py b/security/manager/tools/pyct.py index 125b626fc2cc..8f9d61b72b06 100644 --- a/security/manager/tools/pyct.py +++ b/security/manager/tools/pyct.py @@ -10,13 +10,13 @@ details of a signing key, when to sign, and the certificate data to sign. Currently only supports precert_entry types. See RFC 6962. """ -from pyasn1.codec.der import encoder -from struct import pack import binascii import calendar import hashlib +from struct import pack import pykey +from pyasn1.codec.der import encoder class InvalidKeyError(Exception): diff --git a/security/manager/tools/pykey.py b/security/manager/tools/pykey.py index 05163adc3619..3f08c341de3b 100755 --- a/security/manager/tools/pykey.py +++ b/security/manager/tools/pykey.py @@ -30,17 +30,18 @@ secp384r1: an ECC key on the curve secp384r1 secp521r1: an ECC key on the curve secp521r1 """ -from pyasn1.codec.der import encoder -from pyasn1.type import univ, namedtype, tag -from pyasn1_modules import rfc2459 import base64 import binascii -import ecdsa import hashlib import math +import sys + +import ecdsa import rsa import six -import sys +from pyasn1.codec.der import encoder +from pyasn1.type import namedtype, tag, univ +from pyasn1_modules import rfc2459 # "constants" to make it easier for consumers to specify hash algorithms HASH_MD5 = "hash:md5" diff --git a/security/sandbox/test/mac_register_font.py b/security/sandbox/test/mac_register_font.py index e5996fcb9069..d537e5837609 100755 --- a/security/sandbox/test/mac_register_font.py +++ b/security/sandbox/test/mac_register_font.py @@ -11,11 +11,12 @@ Mac-specific utility command to register a font file with the OS. from __future__ import print_function -import CoreText -import Cocoa import argparse import sys +import Cocoa +import CoreText + def main(): parser = argparse.ArgumentParser() diff --git a/taskcluster/ci/source-test/mozlint.yml b/taskcluster/ci/source-test/mozlint.yml index d6798ba35e8d..f994dd88ef69 100644 --- a/taskcluster/ci/source-test/mozlint.yml +++ b/taskcluster/ci/source-test/mozlint.yml @@ -120,6 +120,10 @@ eslint: eslint-build: description: ESLint checks with build data + always-target: false + run-on-projects: [] + attributes: + code-review: false treeherder: symbol: js(ES-B) tier: 3 diff --git a/taskcluster/ci/updatebot/kind.yml b/taskcluster/ci/updatebot/kind.yml index f4809297d66b..1cbdbaaa77c1 100644 --- a/taskcluster/ci/updatebot/kind.yml +++ b/taskcluster/ci/updatebot/kind.yml @@ -22,7 +22,7 @@ jobs: platform: updatebot/all symbol: cron tier: 1 - worker-type: b-linux + worker-type: b-linux-gcp worker: docker-image: {in-tree: updatebot} max-run-time: 3600 diff --git a/taskcluster/gecko_taskgraph/target_tasks.py b/taskcluster/gecko_taskgraph/target_tasks.py index 01a3cb61b6e5..d35ddcc5997b 100644 --- a/taskcluster/gecko_taskgraph/target_tasks.py +++ b/taskcluster/gecko_taskgraph/target_tasks.py @@ -4,24 +4,22 @@ import copy -from datetime import datetime, timedelta import os import re +from datetime import datetime, timedelta +from gecko_taskgraph import GECKO, try_option_syntax +from gecko_taskgraph.util.attributes import ( + match_run_on_hg_branches, + match_run_on_projects, +) +from gecko_taskgraph.util.hg import find_hg_revision_push_info, get_hg_commit_message +from gecko_taskgraph.util.platforms import platform_family from redo import retry from taskgraph.parameters import Parameters from taskgraph.target_tasks import _target_task, get_method from taskgraph.util.taskcluster import find_task_id -from gecko_taskgraph import try_option_syntax, GECKO -from gecko_taskgraph.util.attributes import ( - match_run_on_projects, - match_run_on_hg_branches, -) -from gecko_taskgraph.util.platforms import platform_family -from gecko_taskgraph.util.hg import find_hg_revision_push_info, get_hg_commit_message - - # Some tasks show up in the target task set, but are possibly special cases, # uncommon tasks, or tasks running against limited hardware set that they # should only be selectable with --full. @@ -1256,10 +1254,7 @@ def target_tasks_backfill_all_browsertime(full_task_graph, parameters, graph_con and landed the day before the cron is running. Trigger backfill-all-browsertime action task on each of them. """ - from gecko_taskgraph.actions.util import ( - get_decision_task_id, - get_pushes, - ) + from gecko_taskgraph.actions.util import get_decision_task_id, get_pushes def date_is_yesterday(date): yesterday = datetime.today() - timedelta(days=1) @@ -1398,5 +1393,5 @@ def target_tasks_eslint_build(full_task_graph, parameters, graph_config): for name, task in full_task_graph.tasks.items(): if task.kind != "source-test": continue - if name == "eslint-build": + if "eslint-build" in name: yield name diff --git a/testing/marionette/client/marionette_driver/geckoinstance.py b/testing/marionette/client/marionette_driver/geckoinstance.py index f53a835e3dd2..83ca08295852 100644 --- a/testing/marionette/client/marionette_driver/geckoinstance.py +++ b/testing/marionette/client/marionette_driver/geckoinstance.py @@ -18,14 +18,12 @@ import sys import tempfile import time import traceback - from copy import deepcopy import mozversion - -from mozprofile import Profile -from mozrunner import Runner, FennecEmulatorRunner import six +from mozprofile import Profile +from mozrunner import FennecEmulatorRunner, Runner from six import reraise from . import errors @@ -69,6 +67,8 @@ class GeckoInstance(object): # No slow script dialogs "dom.max_chrome_script_run_time": 0, "dom.max_script_run_time": 0, + # Disable location change rate limitation + "dom.navigation.locationChangeRateLimit.count": 0, # DOM Push "dom.push.connection.enabled": False, # Disable dialog abuse if alerts are triggered too quickly diff --git a/testing/mochitest/moz.build b/testing/mochitest/moz.build index 3251059fa0e8..edae41386eb3 100644 --- a/testing/mochitest/moz.build +++ b/testing/mochitest/moz.build @@ -45,7 +45,7 @@ FINAL_TARGET_FILES.content.static += [ FINAL_TARGET_FILES.content.tests.SimpleTest += [ "../../docshell/test/chrome/docshell_helpers.js", - "../modules/StructuredLog.jsm", + "../modules/StructuredLog.sys.mjs", "tests/SimpleTest/AccessibilityUtils.js", "tests/SimpleTest/EventUtils.js", "tests/SimpleTest/ExtensionTestUtils.js", diff --git a/testing/mochitest/server.js b/testing/mochitest/server.js index 7bcffb7e133c..e8e514d34497 100644 --- a/testing/mochitest/server.js +++ b/testing/mochitest/server.js @@ -766,10 +766,6 @@ function testListing(metadata, response) { type: "text/css", href: "/static/harness.css", }), - SCRIPT({ - type: "text/javascript", - src: "/tests/SimpleTest/StructuredLog.jsm", - }), SCRIPT({ type: "text/javascript", src: "/tests/SimpleTest/LogController.js", diff --git a/testing/mochitest/tests/SimpleTest/TestRunner.js b/testing/mochitest/tests/SimpleTest/TestRunner.js index dbbd49d46f38..2c67b4673b7a 100644 --- a/testing/mochitest/tests/SimpleTest/TestRunner.js +++ b/testing/mochitest/tests/SimpleTest/TestRunner.js @@ -7,7 +7,6 @@ */ // This file expects the following files to be loaded. -/* import-globals-from ../../../modules/StructuredLog.jsm */ /* import-globals-from LogController.js */ /* import-globals-from MemoryStats.js */ /* import-globals-from MozillaLogger.js */ @@ -16,6 +15,13 @@ "use strict"; +const { + StructuredLogger, + StructuredFormatter, +} = SpecialPowers.ChromeUtils.importESModule( + "resource://testing-common/StructuredLog.sys.mjs" +); + function getElement(id) { return typeof id == "string" ? document.getElementById(id) : id; } @@ -337,13 +343,15 @@ TestRunner._dumpMessage = function(message) { // From https://searchfox.org/mozilla-central/source/testing/modules/StructuredLog.jsm TestRunner.structuredLogger = new StructuredLogger( "mochitest", - TestRunner._dumpMessage + TestRunner._dumpMessage, + [], + TestRunner ); TestRunner.structuredLogger.deactivateBuffering = function() { - TestRunner.structuredLogger._logData("buffering_off"); + TestRunner.structuredLogger.logData("buffering_off"); }; TestRunner.structuredLogger.activateBuffering = function() { - TestRunner.structuredLogger._logData("buffering_on"); + TestRunner.structuredLogger.logData("buffering_on"); }; TestRunner.log = function(msg) { diff --git a/testing/modules/StructuredLog.jsm b/testing/modules/StructuredLog.sys.mjs similarity index 73% rename from testing/modules/StructuredLog.jsm rename to testing/modules/StructuredLog.sys.mjs index 7aadc4574d68..a4524841a380 100644 --- a/testing/modules/StructuredLog.jsm +++ b/testing/modules/StructuredLog.sys.mjs @@ -2,38 +2,34 @@ * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ -"use strict"; - -var EXPORTED_SYMBOLS = ["StructuredLogger", "StructuredFormatter"]; - /** * TestLogger: Logger class generating messages compliant with the * structured logging protocol for tests exposed by mozlog * - * @param name + * @param {string} name * The name of the logger to instantiate. - * @param dumpFun + * @param {function} [dumpFun] * An underlying function to be used to log raw messages. This function * will receive the complete serialized json string to log. - * @param mutators - * An array of functions used to add global context to log messages. - * These will each be called with the complete object to log as an - * argument. + * @param {object} [scope] + * The scope that the dumpFun is loaded in, so that messages are cloned + * into that scope before passing them. */ -var StructuredLogger = function(name, dumpFun = dump, mutators = []) { - this.name = name; - this._dumpFun = dumpFun; - this._mutatorFuns = mutators; -}; +export class StructuredLogger { + name = null; + #dumpFun = null; + #dumpScope = null; + + constructor(name, dumpFun = dump, scope = null) { + this.name = name; + this.#dumpFun = dumpFun; + this.#dumpScope = scope; + } -/** - * Log functions producing messages in the format specified by mozlog - */ -StructuredLogger.prototype = { testStart(test) { - var data = { test: this._testId(test) }; - this._logData("test_start", data); - }, + var data = { test: this.#testId(test) }; + this.logData("test_start", data); + } testStatus( test, @@ -50,7 +46,7 @@ StructuredLogger.prototype = { } var data = { - test: this._testId(test), + test: this.#testId(test), subtest, status, }; @@ -68,8 +64,8 @@ StructuredLogger.prototype = { data.extra = extra; } - this._logData("test_status", data); - }, + this.logData("test_status", data); + } testEnd( test, @@ -79,7 +75,7 @@ StructuredLogger.prototype = { stack = null, extra = null ) { - var data = { test: this._testId(test), status }; + var data = { test: this.#testId(test), status }; if (expected != status && status != "SKIP") { data.expected = expected; @@ -94,19 +90,19 @@ StructuredLogger.prototype = { data.extra = extra; } - this._logData("test_end", data); - }, + this.logData("test_end", data); + } assertionCount(test, count, minExpected = 0, maxExpected = 0) { var data = { - test: this._testId(test), + test: this.#testId(test), min_expected: minExpected, max_expected: maxExpected, count, }; - this._logData("assertion_count", data); - }, + this.logData("assertion_count", data); + } suiteStart( ids, @@ -117,7 +113,7 @@ StructuredLogger.prototype = { extra = null ) { Object.keys(ids).map(function(manifest) { - ids[manifest] = ids[manifest].map(x => this._testId(x)); + ids[manifest] = ids[manifest].map(x => this.#testId(x)); }, this); var data = { tests: ids }; @@ -141,8 +137,8 @@ StructuredLogger.prototype = { data.extra = extra; } - this._logData("suite_start", data); - }, + this.logData("suite_start", data); + } suiteEnd(extra = null) { var data = {}; @@ -151,8 +147,8 @@ StructuredLogger.prototype = { data.extra = extra; } - this._logData("suite_end", data); - }, + this.logData("suite_end", data); + } /** * Unstructured logging functions. The "extra" parameter can always by used to @@ -172,37 +168,37 @@ StructuredLogger.prototype = { } } - this._logData("log", data); - }, + this.logData("log", data); + } debug(message, extra = null) { this.log("DEBUG", message, extra); - }, + } info(message, extra = null) { this.log("INFO", message, extra); - }, + } warning(message, extra = null) { this.log("WARNING", message, extra); - }, + } error(message, extra = null) { this.log("ERROR", message, extra); - }, + } critical(message, extra = null) { this.log("CRITICAL", message, extra); - }, + } processOutput(thread, message) { - this._logData("process_output", { + this.logData("process_output", { message, thread, }); - }, + } - _logData(action, data = {}) { + logData(action, data = {}) { var allData = { action, time: Date.now(), @@ -215,43 +211,44 @@ StructuredLogger.prototype = { allData[field] = data[field]; } - for (var fun of this._mutatorFuns) { - fun(allData); + if (this.#dumpScope) { + this.#dumpFun(Cu.cloneInto(allData, this.#dumpScope)); + } else { + this.#dumpFun(allData); } + } - this._dumpFun(allData); - }, - - _testId(test) { + #testId(test) { if (Array.isArray(test)) { return test.join(" "); } return test; - }, -}; + } +} /** * StructuredFormatter: Formatter class turning structured messages * into human-readable messages. */ -var StructuredFormatter = function() { - this.testStartTimes = {}; -}; +export class StructuredFormatter { + // The time at which the whole suite of tests started. + #suiteStartTime = null; + + #testStartTimes = new Map(); -StructuredFormatter.prototype = { log(message) { return message.message; - }, + } suite_start(message) { - this.suiteStartTime = message.time; + this.#suiteStartTime = message.time; return "SUITE-START | Running " + message.tests.length + " tests"; - }, + } test_start(message) { - this.testStartTimes[message.test] = new Date().getTime(); + this.#testStartTimes.set(message.test, new Date().getTime()); return "TEST-START | " + message.test; - }, + } test_status(message) { var statusInfo = @@ -270,11 +267,11 @@ StructuredFormatter.prototype = { ); } return "TEST-" + message.status + " | " + statusInfo; - }, + } test_end(message) { - var startTime = this.testStartTimes[message.test]; - delete this.testStartTimes[message.test]; + var startTime = this.#testStartTimes.get(message.test); + this.#testStartTimes.delete(message.test); var statusInfo = message.test + (message.message ? " | " + String(message.message) : ""); var result; @@ -291,9 +288,9 @@ StructuredFormatter.prototype = { } result = result + " | took " + message.time - startTime + "ms"; return result; - }, + } suite_end(message) { - return "SUITE-END | took " + message.time - this.suiteStartTime + "ms"; - }, -}; + return "SUITE-END | took " + message.time - this.#suiteStartTime + "ms"; + } +} diff --git a/testing/modules/moz.build b/testing/modules/moz.build index 9326347a3fe5..d201d9dd524c 100644 --- a/testing/modules/moz.build +++ b/testing/modules/moz.build @@ -16,7 +16,7 @@ TESTING_JS_MODULES += [ "MockRegistrar.sys.mjs", "sinon-7.2.7.js", "Sinon.jsm", - "StructuredLog.jsm", + "StructuredLog.sys.mjs", "TestUtils.sys.mjs", "XPCShellContentUtils.sys.mjs", ] @@ -27,7 +27,7 @@ if CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": ] -TEST_HARNESS_FILES.testing.mochitest.tests.SimpleTest += ["StructuredLog.jsm"] +TEST_HARNESS_FILES.testing.mochitest.tests.SimpleTest += ["StructuredLog.sys.mjs"] with Files("**"): BUG_COMPONENT = ("Testing", "General") diff --git a/testing/modules/tests/xpcshell/test_structuredlog.js b/testing/modules/tests/xpcshell/test_structuredlog.js index 79f4162ccbf1..3802fcc0980e 100644 --- a/testing/modules/tests/xpcshell/test_structuredlog.js +++ b/testing/modules/tests/xpcshell/test_structuredlog.js @@ -2,8 +2,8 @@ http://creativecommons.org/publicdomain/zero/1.0/ */ function run_test() { - const { StructuredLogger } = ChromeUtils.import( - "resource://testing-common/StructuredLog.jsm" + const { StructuredLogger } = ChromeUtils.importESModule( + "resource://testing-common/StructuredLog.sys.mjs" ); let testBuffer = []; @@ -21,15 +21,9 @@ function run_test() { } // The logger should always set the source to the logger name. equal(lastMsg.source, "test_log"); - // The source_file field is always set by the mutator function. - equal(lastMsg.source_file, "test_structuredlog.js"); }; - let addFileName = function(data) { - data.source_file = "test_structuredlog.js"; - }; - - let logger = new StructuredLogger("test_log", appendBuffer, [addFileName]); + let logger = new StructuredLogger("test_log", appendBuffer); // Test unstructured logging logger.info("Test message"); diff --git a/testing/mozbase/mozcrash/setup.py b/testing/mozbase/mozcrash/setup.py index 4df75e5a9f7d..f3fb252a2039 100644 --- a/testing/mozbase/mozcrash/setup.py +++ b/testing/mozbase/mozcrash/setup.py @@ -7,7 +7,7 @@ from __future__ import absolute_import from setuptools import setup PACKAGE_NAME = "mozcrash" -PACKAGE_VERSION = "2.0.0" +PACKAGE_VERSION = "2.2.0" # dependencies deps = ["mozfile >= 1.0", "mozlog >= 6.0"] diff --git a/testing/xpcshell/head.js b/testing/xpcshell/head.js index b458d937876a..646c036badf1 100644 --- a/testing/xpcshell/head.js +++ b/testing/xpcshell/head.js @@ -90,8 +90,8 @@ var _dumpLog = function(raw_msg) { dump("\n" + JSON.stringify(raw_msg) + "\n"); }; -var { StructuredLogger: _LoggerClass } = ChromeUtils.import( - "resource://testing-common/StructuredLog.jsm" +var { StructuredLogger: _LoggerClass } = ChromeUtils.importESModule( + "resource://testing-common/StructuredLog.sys.mjs" ); var _testLogger = new _LoggerClass("xpcshell/head.js", _dumpLog, [_add_params]); diff --git a/toolkit/components/extensions/test/xpcshell/test_ext_scripting_startupCache.js b/toolkit/components/extensions/test/xpcshell/test_ext_scripting_startupCache.js index acea225a31b7..4b35e9153272 100644 --- a/toolkit/components/extensions/test/xpcshell/test_ext_scripting_startupCache.js +++ b/toolkit/components/extensions/test/xpcshell/test_ext_scripting_startupCache.js @@ -15,8 +15,8 @@ AddonTestUtils.createAppInfo( const { ExtensionScriptingStore } = ChromeUtils.import( "resource://gre/modules/ExtensionScriptingStore.jsm" ); -const { TestUtils } = ChromeUtils.import( - "resource://testing-common/TestUtils.jsm" +const { TestUtils } = ChromeUtils.importESModule( + "resource://testing-common/TestUtils.sys.mjs" ); const { sinon } = ChromeUtils.import("resource://testing-common/Sinon.jsm"); diff --git a/toolkit/components/telemetry/metrics.yaml b/toolkit/components/telemetry/metrics.yaml index 4b92b6ff1738..a03170e266b0 100644 --- a/toolkit/components/telemetry/metrics.yaml +++ b/toolkit/components/telemetry/metrics.yaml @@ -14,6 +14,7 @@ $tags: legacy.telemetry: client_id: type: uuid + lifetime: application description: | The client_id according to Telemetry. Might not always have a value due to being too early for it to have diff --git a/toolkit/mozapps/extensions/test/browser/browser.ini b/toolkit/mozapps/extensions/test/browser/browser.ini index 61c1dbecfcb4..483935e2892f 100644 --- a/toolkit/mozapps/extensions/test/browser/browser.ini +++ b/toolkit/mozapps/extensions/test/browser/browser.ini @@ -46,6 +46,10 @@ skip-if = fission && os == "linux" && asan # Bug 1713895 - new Fission platform triage os == "win" && os_version == "6.1" # Bug 1717250 +prefs = + dom.webmidi.enabled=true + midi.testing=true + [browser_about_debugging_link.js] [browser_addon_list_reordering.js] [browser_bug572561.js] diff --git a/toolkit/mozapps/extensions/test/browser/browser_html_sitepermission_addons.js b/toolkit/mozapps/extensions/test/browser/browser_html_sitepermission_addons.js index 02bf8c911305..7652048961a9 100644 --- a/toolkit/mozapps/extensions/test/browser/browser_html_sitepermission_addons.js +++ b/toolkit/mozapps/extensions/test/browser/browser_html_sitepermission_addons.js @@ -25,13 +25,6 @@ async function uninstallAllSitePermissionAddons() { } add_setup(async () => { - await SpecialPowers.pushPrefEnv({ - set: [ - ["midi.prompt.testing", false], - ["midi.testing", true], - ], - }); - registerCleanupFunction(uninstallAllSitePermissionAddons); }); diff --git a/toolkit/mozapps/installer/packager.mk b/toolkit/mozapps/installer/packager.mk index 0010a2692b6a..001512b1bdb0 100644 --- a/toolkit/mozapps/installer/packager.mk +++ b/toolkit/mozapps/installer/packager.mk @@ -114,7 +114,7 @@ ifndef MOZ_ARTIFACT_BUILDS else @echo 'Packaging existing XPT artifacts from artifact build into archive ($(XPT_ARTIFACTS_ARCHIVE_BASENAME).zip)' $(call py_action,zip,-C $(ABS_DIST)/xpt_artifacts '$(ABS_DIST)/$(PKG_PATH)$(XPT_ARTIFACTS_ARCHIVE_BASENAME).zip' '*.xpt') -endif # COMPILE_ENVIRONMENT +endif # MOZ_ARTIFACT_BUILDS prepare-package: stage-package