Merge mozilla-central to autoland. a=merge CLOSED TREE

This commit is contained in:
Csoregi Natalia 2018-02-09 18:54:37 +02:00
commit fca056426a
120 changed files with 3728 additions and 804 deletions

View File

@ -72,11 +72,11 @@ function observeChannels(onChannel) {
// We use a dummy proxy filter to catch all channels, even those that do not
// generate an "http-on-modify-request" notification, such as link preconnects.
let proxyFilter = {
applyFilter(aProxyService, aChannel, aProxy) {
applyFilter(aProxyService, aChannel, aProxy, aCallback) {
// We have the channel; provide it to the callback.
onChannel(aChannel);
// Pass on aProxy unmodified.
return aProxy;
aCallback.onProxyFilterResult(aProxy);
}
};
protocolProxyService.registerChannelFilter(proxyFilter, 0);

View File

@ -15,6 +15,8 @@ support-files =
[browser_addBookmarkForFrame.js]
[browser_bookmark_add_tags.js]
skip-if = (os == 'win' && ccov) # Bug 1423667
[browser_bookmark_change_location.js]
skip-if = (os == 'win' && ccov) # Bug 1423667
[browser_bookmark_folder_moveability.js]
skip-if = (os == 'win' && ccov) # Bug 1423667
[browser_bookmark_remove_tags.js]

View File

@ -0,0 +1,108 @@
/* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/publicdomain/zero/1.0/
*/
/**
* Test that the bookmark location (url) can be changed from the toolbar and the sidebar.
*/
"use strict";
const TEST_URL = "about:buildconfig";
const TEST_URL2 = "about:config";
const TEST_URL3 = "about:credits";
// Setup.
add_task(async function setup() {
let toolbar = document.getElementById("PersonalToolbar");
let wasCollapsed = toolbar.collapsed;
// Uncollapse the personal toolbar if needed.
if (wasCollapsed) {
await promiseSetToolbarVisibility(toolbar, true);
}
// Cleanup.
registerCleanupFunction(async () => {
// Collapse the personal toolbar if needed.
if (wasCollapsed) {
await promiseSetToolbarVisibility(toolbar, false);
}
await PlacesUtils.bookmarks.eraseEverything();
});
});
add_task(async function test_change_location_from_Toolbar() {
let toolbarBookmark = await PlacesUtils.bookmarks.insert({
parentGuid: PlacesUtils.bookmarks.toolbarGuid,
title: "",
url: TEST_URL
});
let toolbarNode = getToolbarNodeForItemGuid(toolbarBookmark.guid);
await withBookmarksDialog(
false,
async function openPropertiesDialog() {
let placesContext = document.getElementById("placesContext");
let promisePopup = BrowserTestUtils.waitForEvent(placesContext, "popupshown");
EventUtils.synthesizeMouseAtCenter(toolbarNode, {
button: 2,
type: "contextmenu"
});
await promisePopup;
let properties = document.getElementById("placesContext_show:info");
EventUtils.synthesizeMouseAtCenter(properties, {});
},
async function test(dialogWin) {
// Check the initial location.
let locationPicker = dialogWin.document.getElementById("editBMPanel_locationField");
Assert.equal(locationPicker.value, TEST_URL, "The location is the expected one.");
let promiseLocationChange = PlacesTestUtils.waitForNotification("onItemChanged", (id, parentId, index, itemUrl) => itemUrl === TEST_URL2);
// Update the "location" field.
fillBookmarkTextField("editBMPanel_locationField", TEST_URL2, dialogWin, false);
await waitForCondition(() => locationPicker.value === TEST_URL2, "The location is correct after update.");
// Confirm and close the dialog.
EventUtils.synthesizeKey("VK_RETURN", {}, dialogWin);
await promiseLocationChange;
let updatedBm = await PlacesUtils.bookmarks.fetch(toolbarBookmark.guid);
Assert.equal(updatedBm.url, TEST_URL2, "Should have updated the bookmark location in the database.");
}
);
});
add_task(async function test_change_location_from_Sidebar() {
let bm = await PlacesUtils.bookmarks.fetch({url: TEST_URL2});
await withSidebarTree("bookmarks", async function(tree) {
tree.selectItems([bm.guid]);
await withBookmarksDialog(
false,
function openPropertiesDialog() {
tree.controller.doCommand("placesCmd_show:info");
},
async function test(dialogWin) {
// Check the initial location.
let locationPicker = dialogWin.document.getElementById("editBMPanel_locationField");
Assert.equal(locationPicker.value, TEST_URL2, "The location is the expected one.");
let promiseLocationChange = PlacesTestUtils.waitForNotification("onItemChanged", (id, parentId, index, itemUrl) => itemUrl === TEST_URL3);
// Update the "location" field.
fillBookmarkTextField("editBMPanel_locationField", TEST_URL3, dialogWin, false);
await waitForCondition(() => locationPicker.value === TEST_URL3, "The location is correct after update.");
// Confirm and close the dialog.
EventUtils.synthesizeKey("VK_RETURN", {}, dialogWin);
await promiseLocationChange;
let updatedBm = await PlacesUtils.bookmarks.fetch(bm.guid);
Assert.equal(updatedBm.url, TEST_URL3, "Should have updated the bookmark location in the database.");
}
);
});
});

View File

@ -0,0 +1,47 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const {
CLEAR_FLEXBOX,
UPDATE_FLEXBOX,
UPDATE_FLEXBOX_HIGHLIGHTED,
} = require("./index");
module.exports = {
/**
* Clears the flexbox state by resetting it back to the initial flexbox state.
*/
clearFlexbox() {
return {
type: CLEAR_FLEXBOX,
};
},
/**
* Updates the flexbox state with the newly selected flexbox.
*/
updateFlexbox(flexbox) {
return {
type: UPDATE_FLEXBOX,
flexbox,
};
},
/**
* Updates the flexbox highlighted state.
*
* @param {Boolean} highlighted
* Whether or not the flexbox highlighter is highlighting the flexbox.
*/
updateFlexboxHighlighted(highlighted) {
return {
type: UPDATE_FLEXBOX_HIGHLIGHTED,
highlighted,
};
},
};

View File

@ -8,7 +8,13 @@ const { createEnum } = require("devtools/client/shared/enum");
createEnum([
// Update the entire flexboxes state with the new list of flexboxes.
"UPDATE_FLEXBOXES",
// Clears the flexbox state by resetting it back to the initial flexbox state.
"CLEAR_FLEXBOX",
// Updates the flexbox state with the newly selected flexbox.
"UPDATE_FLEXBOX",
// Updates the flexbox highlighted state.
"UPDATE_FLEXBOX_HIGHLIGHTED",
], module.exports);

View File

@ -5,5 +5,6 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
DevToolsModules(
'flexbox.js',
'index.js',
)

View File

@ -4,22 +4,61 @@
"use strict";
const {
DOM: dom,
PureComponent,
} = require("devtools/client/shared/vendor/react");
const { createFactory, PureComponent } = require("devtools/client/shared/vendor/react");
const dom = require("devtools/client/shared/vendor/react-dom-factories");
const PropTypes = require("devtools/client/shared/vendor/react-prop-types");
const { getStr } = require("devtools/client/inspector/layout/utils/l10n");
const FlexboxItem = createFactory(require("./FlexboxItem"));
const Types = require("../types");
class Flexbox extends PureComponent {
static get propTypes() {
return {};
return {
flexbox: PropTypes.shape(Types.flexbox).isRequired,
setSelectedNode: PropTypes.func.isRequired,
onHideBoxModelHighlighter: PropTypes.func.isRequired,
onShowBoxModelHighlighterForNode: PropTypes.func.isRequired,
onToggleFlexboxHighlighter: PropTypes.func.isRequired,
};
}
render() {
return dom.div(
{
id: "layout-flexbox-container",
}
);
const {
flexbox,
setSelectedNode,
onHideBoxModelHighlighter,
onShowBoxModelHighlighterForNode,
onToggleFlexboxHighlighter,
} = this.props;
return flexbox.actorID ?
dom.div({ id: "layout-flexbox-container" },
dom.div({ className: "flexbox-content" },
dom.div({ className: "flexbox-container" },
dom.span({}, getStr("flexbox.overlayFlexbox")),
dom.ul(
{
id: "flexbox-list",
className: "devtools-monospace",
},
FlexboxItem({
key: flexbox.id,
flexbox,
setSelectedNode,
onHideBoxModelHighlighter,
onShowBoxModelHighlighterForNode,
onToggleFlexboxHighlighter,
})
)
)
)
)
:
dom.div({ className: "devtools-sidepanel-no-result" },
getStr("flexbox.noFlexboxeOnThisPage")
);
}
}

View File

@ -0,0 +1,99 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const { PureComponent } = require("devtools/client/shared/vendor/react");
const dom = require("devtools/client/shared/vendor/react-dom-factories");
const PropTypes = require("devtools/client/shared/vendor/react-prop-types");
const { translateNodeFrontToGrip } = require("devtools/client/inspector/shared/utils");
// Reps
const { REPS, MODE } = require("devtools/client/shared/components/reps/reps");
const { Rep } = REPS;
const ElementNode = REPS.ElementNode;
const Types = require("../types");
class FlexboxItem extends PureComponent {
static get propTypes() {
return {
flexbox: PropTypes.shape(Types.flexbox).isRequired,
setSelectedNode: PropTypes.func.isRequired,
onHideBoxModelHighlighter: PropTypes.func.isRequired,
onShowBoxModelHighlighterForNode: PropTypes.func.isRequired,
onToggleFlexboxHighlighter: PropTypes.func.isRequired,
};
}
constructor(props) {
super(props);
this.onFlexboxCheckboxClick = this.onFlexboxCheckboxClick.bind(this);
this.onFlexboxInspectIconClick = this.onFlexboxInspectIconClick.bind(this);
}
onFlexboxCheckboxClick(e) {
// If the click was on the svg icon to select the node in the inspector, bail out.
const originalTarget = e.nativeEvent && e.nativeEvent.explicitOriginalTarget;
if (originalTarget && originalTarget.namespaceURI === "http://www.w3.org/2000/svg") {
// We should be able to cancel the click event propagation after the following reps
// issue is implemented : https://github.com/devtools-html/reps/issues/95 .
e.preventDefault();
return;
}
const {
flexbox,
onToggleFlexboxHighlighter,
} = this.props;
onToggleFlexboxHighlighter(flexbox.nodeFront);
}
onFlexboxInspectIconClick(nodeFront) {
const { setSelectedNode } = this.props;
setSelectedNode(nodeFront, "layout-panel").catch(e => console.error(e));
nodeFront.scrollIntoView().catch(e => console.error(e));
}
render() {
const {
flexbox,
onHideBoxModelHighlighter,
onShowBoxModelHighlighterForNode,
} = this.props;
const {
actorID,
highlighted,
nodeFront,
} = flexbox;
return dom.li(
{},
dom.label(
{},
dom.input(
{
type: "checkbox",
value: actorID,
checked: highlighted,
onChange: this.onFlexboxCheckboxClick,
}
),
Rep(
{
defaultRep: ElementNode,
mode: MODE.TINY,
object: translateNodeFrontToGrip(nodeFront),
onDOMNodeMouseOut: () => onHideBoxModelHighlighter(),
onDOMNodeMouseOver: () => onShowBoxModelHighlighterForNode(nodeFront),
onInspectIconClick: () => this.onFlexboxInspectIconClick(nodeFront),
}
)
)
);
}
}
module.exports = FlexboxItem;

View File

@ -6,4 +6,5 @@
DevToolsModules(
'Flexbox.js',
'FlexboxItem.js',
)

View File

@ -4,17 +4,254 @@
"use strict";
const { throttle } = require("devtools/client/inspector/shared/utils");
const {
clearFlexbox,
updateFlexbox,
updateFlexboxHighlighted,
} = require("./actions/flexbox");
class FlexboxInspector {
constructor(inspector, window) {
this.document = window.document;
this.highlighters = inspector.highlighters;
this.inspector = inspector;
this.store = inspector.store;
this.walker = inspector.walker;
this.onHighlighterChange = this.onHighlighterChange.bind(this);
this.onReflow = throttle(this.onReflow, 500, this);
this.onSidebarSelect = this.onSidebarSelect.bind(this);
this.onToggleFlexboxHighlighter = this.onToggleFlexboxHighlighter.bind(this);
this.onUpdatePanel = this.onUpdatePanel.bind(this);
this.init();
}
async init() {
if (!this.inspector) {
return;
}
try {
this.hasGetCurrentFlexbox = await this.inspector.target.actorHasMethod("layout",
"getCurrentFlexbox");
this.layoutInspector = await this.walker.getLayoutInspector();
} catch (e) {
// These calls might fail if called asynchrously after the toolbox is finished
// closing.
return;
}
this.highlighters.on("flexbox-highlighter-hidden", this.onHighlighterChange);
this.highlighters.on("flexbox-highlighter-shown", this.onHighlighterChange);
this.inspector.sidebar.on("select", this.onSidebarSelect);
this.onSidebarSelect();
}
destroy() {
this.highlighters.off("flexbox-highlighter-hidden", this.onHighlighterChange);
this.highlighters.off("flexbox-highlighter-shown", this.onHighlighterChange);
this.inspector.selection.off("new-node-front", this.onUpdatePanel);
this.inspector.sidebar.off("select", this.onSidebarSelect);
this.inspector.off("new-root", this.onUpdatePanel);
this.inspector.reflowTracker.untrackReflows(this, this.onReflow);
this.document = null;
this.hasGetCurrentFlexbox = null;
this.highlighters = null;
this.inspector = null;
this.layoutInspector = null;
this.store = null;
this.walker = null;
}
getComponentProps() {
return {
onToggleFlexboxHighlighter: this.onToggleFlexboxHighlighter,
};
}
/**
* Returns true if the layout panel is visible, and false otherwise.
*/
isPanelVisible() {
return this.inspector && this.inspector.toolbox && this.inspector.sidebar &&
this.inspector.toolbox.currentToolId === "inspector" &&
this.inspector.sidebar.getCurrentTabID() === "layoutview";
}
/**
* Handler for "flexbox-highlighter-shown" and "flexbox-highlighter-hidden" events
* emitted from the HighlightersOverlay. Updates the flex container highlighted state
* only if the provided NodeFront is the current selected flex container.
*
* @param {Event} event
* Event that was triggered.
* @param {NodeFront} nodeFront
* The NodeFront of the flex container element for which the flexbox
* highlighter is shown for.
*/
onHighlighterChange(event, nodeFront) {
const { flexbox } = this.store.getState();
const highlighted = event === "flexbox-highlighter-shown";
if (flexbox.nodeFront === nodeFront && flexbox.highlighted !== highlighted) {
this.store.dispatch(updateFlexboxHighlighted(highlighted));
}
}
/**
* Handler for the "reflow" event fired by the inspector's reflow tracker. On reflows,
* updates the flexbox panel because the shape of the flexbox on the page may have
* changed.
*
* TODO: In the future, we will want to compare the flex item fragment data returned
* for rendering the flexbox outline.
*/
async onReflow() {
if (!this.isPanelVisible() || !this.store || !this.inspector.selection.nodeFront) {
return;
}
const { flexbox } = this.store.getState();
let flexboxFront;
try {
if (!this.hasGetCurrentFlexbox) {
return;
}
flexboxFront = await this.layoutInspector.getCurrentFlexbox(
this.inspector.selection.nodeFront);
} catch (e) {
// This call might fail if called asynchrously after the toolbox is finished
// closing.
return;
}
// Clear the flexbox panel if there is no flex container for the current node
// selection.
if (!flexboxFront) {
this.store.dispatch(clearFlexbox());
return;
}
// Do nothing because the same flex container is still selected.
if (flexbox.actorID == flexboxFront.actorID) {
return;
}
// Update the flexbox panel with the new flexbox front contents.
this.update(flexboxFront);
}
/**
* Handler for the inspector sidebar "select" event. Updates the flexbox panel if it
* is visible.
*/
onSidebarSelect() {
if (!this.isPanelVisible()) {
this.inspector.reflowTracker.untrackReflows(this, this.onReflow);
this.inspector.selection.off("new-node-front", this.onUpdatePanel);
this.inspector.off("new-root", this.onUpdatePanel);
return;
}
this.inspector.reflowTracker.trackReflows(this, this.onReflow);
this.inspector.selection.on("new-node-front", this.onUpdatePanel);
this.inspector.on("new-root", this.onUpdatePanel);
this.update();
}
/**
* Handler for a change in the input checkboxes in the FlexboxItem component.
* Toggles on/off the flexbox highlighter for the provided flex container element.
*
* @param {NodeFront} node
* The NodeFront of the flexb container element for which the flexbox
* highlighter is toggled on/off for.
*/
onToggleFlexboxHighlighter(node) {
this.highlighters.toggleFlexboxHighlighter(node);
this.store.dispatch(updateFlexboxHighlighted(node !==
this.highlighters.flexboxHighlighterShow));
}
/**
* Handler for "new-root" event fired by the inspector and "new-node-front" event fired
* by the inspector selection. Updates the flexbox panel if it is visible.
*/
onUpdatePanel() {
if (!this.isPanelVisible()) {
return;
}
this.update();
}
/**
* Updates the flexbox panel by dispatching the new flexbox data. This is called when
* the layout view becomes visible or a new node is selected and needs to be update
* with new flexbox data.
*
* @param {FlexboxFront|Null} flexboxFront
* THe FlexboxFront of the flex container for the current node selection.
*/
async update(flexboxFront) {
// Stop refreshing if the inspector or store is already destroyed or no node is
// selected.
if (!this.inspector || !this.store || !this.inspector.selection.nodeFront) {
return;
}
// Fetch the current flexbox if no flexbox front was passed into this update.
if (!flexboxFront) {
try {
if (!this.hasGetCurrentFlexbox) {
return;
}
flexboxFront = await this.layoutInspector.getCurrentFlexbox(
this.inspector.selection.nodeFront);
} catch (e) {
// This call might fail if called asynchrously after the toolbox is finished
// closing.
return;
}
}
// Clear the flexbox panel if there is no flex container for the current node
// selection.
if (!flexboxFront) {
this.store.dispatch(clearFlexbox());
return;
}
let nodeFront = flexboxFront.containerNodeFront;
// If the FlexboxFront doesn't yet have access to the NodeFront for its container,
// then get it from the walker. This happens when the walker hasn't seen this
// particular DOM Node in the tree yet or when we are connected to an older server.
if (!nodeFront) {
try {
nodeFront = await this.walker.getNodeFromActor(flexboxFront.actorID,
["containerEl"]);
} catch (e) {
// This call might fail if called asynchrously after the toolbox is finished
// closing.
return;
}
}
this.store.dispatch(updateFlexbox({
actorID: flexboxFront.actorID,
highlighted: nodeFront == this.highlighters.flexboxHighlighterShown,
nodeFront,
}));
}
}

View File

@ -0,0 +1,46 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const {
CLEAR_FLEXBOX,
UPDATE_FLEXBOX,
UPDATE_FLEXBOX_HIGHLIGHTED,
} = require("../actions/index");
const INITIAL_FLEXBOX = {
// The actor ID of the flex container.
actorID: null,
// Whether or not the flexbox highlighter is highlighting the flex container.
highlighted: false,
// The NodeFront of the flex container.
nodeFront: null,
};
let reducers = {
[CLEAR_FLEXBOX](flexbox, _) {
return INITIAL_FLEXBOX;
},
[UPDATE_FLEXBOX](_, { flexbox }) {
return flexbox;
},
[UPDATE_FLEXBOX_HIGHLIGHTED](flexbox, { highlighted }) {
return Object.assign({}, flexbox, {
highlighted,
});
},
};
module.exports = function (flexbox = INITIAL_FLEXBOX, action) {
let reducer = reducers[action.type];
if (!reducer) {
return flexbox;
}
return reducer(flexbox, action);
};

View File

@ -1,19 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const INITIAL_FLEXBOXES = [];
let reducers = {
};
module.exports = function (flexboxes = INITIAL_FLEXBOXES, action) {
let reducer = reducers[action.type];
if (!reducer) {
return flexboxes;
}
return reducer(flexboxes, action);
};

View File

@ -4,4 +4,4 @@
"use strict";
exports.flexboxes = require("./flexboxes");
exports.flexbox = require("./flexbox");

View File

@ -5,6 +5,6 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
DevToolsModules(
'flexboxes.js',
'flexbox.js',
'index.js',
)

View File

@ -7,9 +7,14 @@
const PropTypes = require("devtools/client/shared/vendor/react-prop-types");
exports.flexbox = {
// The id of the flexbox container.
id: PropTypes.number,
// The node front of the flexbox container.
// The actor ID of the flex container.
actorID: PropTypes.number,
// Whether or not the flexbox highlighter is highlighting the flex container.
highlighted: PropTypes.bool,
// The NodeFront of the flex container.
nodeFront: PropTypes.object,
};

View File

@ -7,13 +7,13 @@
const { createFactory, PureComponent } = require("devtools/client/shared/vendor/react");
const dom = require("devtools/client/shared/vendor/react-dom-factories");
const PropTypes = require("devtools/client/shared/vendor/react-prop-types");
const { getStr } = require("devtools/client/inspector/layout/utils/l10n");
const GridDisplaySettings = createFactory(require("./GridDisplaySettings"));
const GridList = createFactory(require("./GridList"));
const GridOutline = createFactory(require("./GridOutline"));
const Types = require("../types");
const { getStr } = require("../utils/l10n");
class Grid extends PureComponent {
static get propTypes() {

View File

@ -7,9 +7,9 @@
const { PureComponent } = require("devtools/client/shared/vendor/react");
const dom = require("devtools/client/shared/vendor/react-dom-factories");
const PropTypes = require("devtools/client/shared/vendor/react-prop-types");
const { getStr } = require("devtools/client/inspector/layout/utils/l10n");
const Types = require("../types");
const { getStr } = require("../utils/l10n");
class GridDisplaySettings extends PureComponent {
static get propTypes() {

View File

@ -7,11 +7,11 @@
const { createFactory, PureComponent } = require("devtools/client/shared/vendor/react");
const dom = require("devtools/client/shared/vendor/react-dom-factories");
const PropTypes = require("devtools/client/shared/vendor/react-prop-types");
const { getStr } = require("devtools/client/inspector/layout/utils/l10n");
const GridItem = createFactory(require("./GridItem"));
const Types = require("../types");
const { getStr } = require("../utils/l10n");
class GridList extends PureComponent {
static get propTypes() {

View File

@ -8,9 +8,9 @@ const Services = require("Services");
const { PureComponent } = require("devtools/client/shared/vendor/react");
const dom = require("devtools/client/shared/vendor/react-dom-factories");
const PropTypes = require("devtools/client/shared/vendor/react-prop-types");
const { getStr } = require("devtools/client/inspector/layout/utils/l10n");
const Types = require("../types");
const { getStr } = require("../utils/l10n");
// The delay prior to executing the grid cell highlighting.
const GRID_HIGHLIGHTING_DEBOUNCE = 50;

View File

@ -5,6 +5,5 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
DevToolsModules(
'l10n.js',
'utils.js',
)

View File

@ -42,6 +42,12 @@ class LayoutView {
onToggleGeometryEditor,
} = this.inspector.getPanel("boxmodel").getComponentProps();
this.flexboxInspector = new FlexboxInspector(this.inspector,
this.inspector.panelWin);
let {
onToggleFlexboxHighlighter,
} = this.flexboxInspector.getComponentProps();
this.gridInspector = new GridInspector(this.inspector, this.inspector.panelWin);
let {
getSwatchColorPickerTooltip,
@ -71,6 +77,7 @@ class LayoutView {
onShowGridAreaHighlight,
onShowGridCellHighlight,
onShowGridLineNamesHighlight,
onToggleFlexboxHighlighter,
onToggleGeometryEditor,
onToggleGridHighlighter,
onToggleShowGridAreas,
@ -93,6 +100,7 @@ class LayoutView {
* Destruction function called when the inspector is destroyed. Cleans up references.
*/
destroy() {
this.flexboxInspector.destroy();
this.gridInspector.destroy();
this.document = null;

View File

@ -6,6 +6,7 @@
DIRS += [
'components',
'utils',
]
DevToolsModules(

View File

@ -0,0 +1,9 @@
# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
# vim: set filetype=python:
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
DevToolsModules(
'l10n.js',
)

View File

@ -12,7 +12,7 @@ exports.boxModel = require("devtools/client/inspector/boxmodel/reducers/box-mode
exports.changes = require("devtools/client/inspector/changes/reducers/changes");
exports.events = require("devtools/client/inspector/events/reducers/events");
exports.extensionsSidebar = require("devtools/client/inspector/extensions/reducers/sidebar");
exports.flexboxes = require("devtools/client/inspector/flexbox/reducers/flexboxes");
exports.flexbox = require("devtools/client/inspector/flexbox/reducers/flexbox");
exports.fontOptions = require("devtools/client/inspector/fonts/reducers/font-options");
exports.fonts = require("devtools/client/inspector/fonts/reducers/fonts");
exports.grids = require("devtools/client/inspector/grids/reducers/grids");

View File

@ -8,6 +8,14 @@
# LOCALIZATION NOTE (flexbox.header): The accordion header for the Flexbox pane.
flexbox.header=Flexbox
# LOCALIZATION NOTE (flexbox.noFlexboxeOnThisPage): In the case where there are no CSS
# flex containers to display.
flexbox.noFlexboxeOnThisPage=Select a Flex container or item to continue.
# LOCALIZATION NOTE (flexbox.overlayFlexbox): Header for the list of flex container
# elements if only one item can be selected.
flexbox.overlayFlexbox=Overlay Flexbox
# LOCALIZATION NOTE (layout.cannotShowGridOutline, layout.cannotSHowGridOutline.title):
# In the case where the grid outline cannot be effectively displayed.
layout.cannotShowGridOutline=Cannot show outline for this grid

View File

@ -13,6 +13,7 @@
* Common styles for shared components
*/
.flexbox-container,
.grid-container {
display: flex;
flex-direction: column;
@ -21,28 +22,37 @@
min-width: 140px;
}
.grid-container:first-child {
margin-bottom: 10px;
}
.flexbox-container > span,
.grid-container > span {
font-weight: 600;
margin-bottom: 5px;
pointer-events: none;
}
.flexbox-container > ul,
.grid-container > ul {
list-style: none;
margin: 0;
padding: 0;
}
.flexbox-container li,
.grid-container li {
display: flex;
align-items: center;
padding: 4px 0;
}
.flexbox-container input
.grid-container input {
margin: 0 5px;
}
.flexbox-container label,
.grid-container label {
display: flex;
align-items: center;
@ -52,6 +62,7 @@
* Grid Container
*/
#layout-flexbox-container,
#layout-grid-container {
display: flex;
flex-direction: column;
@ -62,6 +73,7 @@
* Grid Content
*/
.flexbox-content,
.grid-content {
display: flex;
flex-wrap: wrap;
@ -69,10 +81,6 @@
margin: 5px 0;
}
.grid-container:first-child {
margin-bottom: 10px;
}
/**
* Grid Outline
*/

View File

@ -4,6 +4,7 @@
"use strict";
const { Cu } = require("chrome");
const { Actor, ActorClassWithSpec } = require("devtools/shared/protocol");
const { flexboxSpec, gridSpec, layoutSpec } = require("devtools/shared/specs/layout");
const nodeFilterConstants = require("devtools/shared/dom-node-filter-constants");
@ -30,7 +31,7 @@ const FlexboxActor = ActorClassWithSpec(flexboxSpec, {
* @param {LayoutActor} layoutActor
* The LayoutActor instance.
* @param {DOMNode} containerEl
* The flexbox container element.
* The flex container element.
*/
initialize(layoutActor, containerEl) {
Actor.prototype.initialize.call(this, layoutActor.conn);
@ -136,65 +137,61 @@ const LayoutActor = ActorClassWithSpec(layoutSpec, {
},
/**
* Returns an array of FlexboxActor objects for all the flexbox containers found by
* iterating below the given rootNode.
* Returns the flex container found by iterating on the given selected node. The current
* node can be a flex container or flex item. If it is a flex item, returns the parent
* flex container. Otherwise, return null if the current or parent node is not a flex
* container.
*
* @param {Node|NodeActor} rootNode
* The root node to start iterating at.
* @return {Array} An array of FlexboxActor objects.
* @param {Node|NodeActor} node
* The node to start iterating at.
* @return {FlexboxActor|Null} The FlexboxActor of the flex container of the give node.
* Otherwise, returns null.
*/
getFlexbox(rootNode) {
let flexboxes = [];
if (!rootNode) {
return flexboxes;
getCurrentFlexbox(node) {
if (isNodeDead(node)) {
return null;
}
let treeWalker = this.walker.getDocumentWalker(rootNode,
// Given node can either be a Node or a NodeActor.
if (node.rawNode) {
node = node.rawNode;
}
let treeWalker = this.walker.getDocumentWalker(node,
nodeFilterConstants.SHOW_ELEMENT);
let currentNode = treeWalker.currentNode;
let displayType = this.walker.getNode(currentNode).displayType;
while (treeWalker.nextNode()) {
let currentNode = treeWalker.currentNode;
let computedStyle = CssLogic.getComputedStyle(currentNode);
if (!displayType) {
return null;
}
if (!computedStyle) {
continue;
// Check if the current node is a flex container.
if (displayType == "inline-flex" || displayType == "flex") {
return new FlexboxActor(this, treeWalker.currentNode);
}
// Otherwise, check if this is a flex item or the parent node is a flex container.
while ((currentNode = treeWalker.parentNode())) {
if (!currentNode) {
break;
}
if (computedStyle.display == "inline-flex" || computedStyle.display == "flex") {
let flexboxActor = new FlexboxActor(this, currentNode);
flexboxes.push(flexboxActor);
displayType = this.walker.getNode(currentNode).displayType;
switch (displayType) {
case "inline-flex":
case "flex":
return new FlexboxActor(this, currentNode);
case "contents":
// Continue walking up the tree since the parent node is a content element.
continue;
}
break;
}
return flexboxes;
},
/**
* Returns an array of FlexboxActor objects for all existing flexbox containers found by
* iterating below the given rootNode and optionally including nested frames.
*
* @param {NodeActor} rootNode
* @param {Boolean} traverseFrames
* Whether or not we should iterate through nested frames.
* @return {Array} An array of FlexboxActor objects.
*/
getAllFlexbox(rootNode, traverseFrames) {
let flexboxes = [];
if (!rootNode) {
return flexboxes;
}
if (!traverseFrames) {
return this.getFlexbox(rootNode.rawNode);
}
for (let {document} of this.tabActor.windows) {
flexboxes = [...flexboxes, ...this.getFlexbox(document.documentElement)];
}
return flexboxes;
return null;
},
/**
@ -206,7 +203,7 @@ const LayoutActor = ActorClassWithSpec(layoutSpec, {
* @return {Array} An array of GridActor objects.
*/
getGrids(node) {
if (!node) {
if (isNodeDead(node)) {
return [];
}
@ -232,6 +229,10 @@ const LayoutActor = ActorClassWithSpec(layoutSpec, {
},
});
function isNodeDead(node) {
return !node || (node.rawNode && Cu.isDeadWrapper(node.rawNode));
}
exports.FlexboxActor = FlexboxActor;
exports.GridActor = GridActor;
exports.LayoutActor = LayoutActor;

View File

@ -112,7 +112,7 @@ const Types = exports.__TypesForTests = [
front: "devtools/shared/fronts/inspector",
},
{
types: ["grid", "layout"],
types: ["flexbox", "grid", "layout"],
spec: "devtools/shared/specs/layout",
front: "devtools/shared/fronts/layout",
},

View File

@ -22,13 +22,12 @@ const layoutSpec = generateActorSpec({
typeName: "layout",
methods: {
getAllFlexbox: {
getCurrentFlexbox: {
request: {
rootNode: Arg(0, "domnode"),
traverseFrames: Arg(1, "nullable:boolean")
node: Arg(0, "domnode"),
},
response: {
flexboxes: RetVal("array:flexbox")
flexbox: RetVal("nullable:flexbox")
}
},

View File

@ -1201,6 +1201,9 @@ nsDocShell::DispatchToTabGroup(TaskCategory aCategory,
return NS_ERROR_FAILURE;
}
if (win->GetDocGroup()) {
return win->GetDocGroup()->Dispatch(aCategory, runnable.forget());
}
RefPtr<mozilla::dom::TabGroup> tabGroup = win->TabGroup();
return tabGroup->Dispatch(aCategory, runnable.forget());
}

View File

@ -2083,12 +2083,13 @@ nsSHistory::SetRootDocShell(nsIDocShell* aDocShell)
mHistoryTracker->AgeAllGenerations();
}
RefPtr<mozilla::dom::TabGroup> tabGroup = win->TabGroup();
nsCOMPtr<nsIGlobalObject> global = do_QueryInterface(win);
mHistoryTracker = mozilla::MakeUnique<HistoryTracker>(
this,
mozilla::Preferences::GetUint(CONTENT_VIEWER_TIMEOUT_SECONDS,
CONTENT_VIEWER_TIMEOUT_SECONDS_DEFAULT),
tabGroup->EventTargetFor(mozilla::TaskCategory::Other));
global->EventTargetFor(mozilla::TaskCategory::Other));
}
return NS_OK;

View File

@ -10,6 +10,7 @@
#include "nsCRT.h"
#include "nsError.h"
#include "nsString.h"
#include "nsGlobalWindowInner.h"
#include "nsReadableUtils.h"
#include "nsJSProtocolHandler.h"
#include "nsStringStream.h"
@ -655,8 +656,9 @@ nsJSChannel::AsyncOpen(nsIStreamListener *aListener, nsISupports *aContext)
name = "nsJSChannel::NotifyListener";
}
nsresult rv = NS_DispatchToCurrentThread(
mozilla::NewRunnableMethod(name, this, method));
nsCOMPtr<nsIRunnable> runnable = mozilla::NewRunnableMethod(name, this, method);
nsGlobalWindowInner* window = nsGlobalWindowInner::Cast(mOriginalInnerWindow);
nsresult rv = window->Dispatch(mozilla::TaskCategory::Other, runnable.forget());
if (NS_FAILED(rv)) {
loadGroup->RemoveRequest(this, nullptr, rv);

View File

@ -513,7 +513,12 @@ Performance::RunNotificationObserversTask()
{
mPendingNotificationObserversTask = true;
nsCOMPtr<nsIRunnable> task = new NotifyObserversTask(this);
nsresult rv = NS_DispatchToCurrentThread(task);
nsresult rv;
if (GetOwnerGlobal()) {
rv = GetOwnerGlobal()->Dispatch(TaskCategory::Other, task.forget());
} else {
rv = NS_DispatchToCurrentThread(task);
}
if (NS_WARN_IF(NS_FAILED(rv))) {
mPendingNotificationObserversTask = false;
}

View File

@ -1376,10 +1376,15 @@ ScriptLoader::ProcessExternalScript(nsIScriptElement* aElement,
ReportErrorToConsole(request, rv);
// Asynchronously report the load failure
NS_DispatchToCurrentThread(
nsCOMPtr<nsIRunnable> runnable =
NewRunnableMethod("nsIScriptElement::FireErrorEvent",
aElement,
&nsIScriptElement::FireErrorEvent));
&nsIScriptElement::FireErrorEvent);
if (mDocument) {
mDocument->Dispatch(TaskCategory::Other, runnable.forget());
} else {
NS_DispatchToCurrentThread(runnable);
}
return false;
}
}

View File

@ -109,11 +109,6 @@ OSFileConstantsService::Paths
* The user's home directory
*/
nsString homeDir;
/**
* The user's desktop directory, if there is one. Otherwise this is
* the same as homeDir.
*/
nsString desktopDir;
/**
* The user's 'application data' directory.
* Windows:
@ -130,31 +125,11 @@ OSFileConstantsService::Paths
*/
nsString userApplicationDataDir;
#if defined(XP_WIN)
/**
* The user's application data directory.
*/
nsString winAppDataDir;
/**
* The programs subdirectory in the user's start menu directory.
*/
nsString winStartMenuProgsDir;
#endif // defined(XP_WIN)
#if defined(XP_MACOSX)
/**
* The user's Library directory.
*/
nsString macUserLibDir;
/**
* The Application directory, that stores applications installed in the
* system.
*/
nsString macLocalApplicationsDir;
/**
* The user's trash directory.
*/
nsString macTrashDir;
#endif // defined(XP_MACOSX)
Paths()
@ -164,18 +139,10 @@ OSFileConstantsService::Paths
profileDir.SetIsVoid(true);
localProfileDir.SetIsVoid(true);
homeDir.SetIsVoid(true);
desktopDir.SetIsVoid(true);
userApplicationDataDir.SetIsVoid(true);
#if defined(XP_WIN)
winAppDataDir.SetIsVoid(true);
winStartMenuProgsDir.SetIsVoid(true);
#endif // defined(XP_WIN)
#if defined(XP_MACOSX)
macUserLibDir.SetIsVoid(true);
macLocalApplicationsDir.SetIsVoid(true);
macTrashDir.SetIsVoid(true);
#endif // defined(XP_MACOSX)
}
};
@ -289,18 +256,10 @@ OSFileConstantsService::InitOSFileConstants()
GetPathToSpecialDir(NS_OS_TEMP_DIR, paths->tmpDir);
GetPathToSpecialDir(NS_OS_HOME_DIR, paths->homeDir);
GetPathToSpecialDir(NS_OS_DESKTOP_DIR, paths->desktopDir);
GetPathToSpecialDir(XRE_USER_APP_DATA_DIR, paths->userApplicationDataDir);
#if defined(XP_WIN)
GetPathToSpecialDir(NS_WIN_APPDATA_DIR, paths->winAppDataDir);
GetPathToSpecialDir(NS_WIN_PROGRAMS_DIR, paths->winStartMenuProgsDir);
#endif // defined(XP_WIN)
#if defined(XP_MACOSX)
GetPathToSpecialDir(NS_MAC_USER_LIB_DIR, paths->macUserLibDir);
GetPathToSpecialDir(NS_OSX_LOCAL_APPLICATIONS_DIR, paths->macLocalApplicationsDir);
GetPathToSpecialDir(NS_MAC_TRASH_DIR, paths->macTrashDir);
#endif // defined(XP_MACOSX)
mPaths = Move(paths);
@ -980,36 +939,14 @@ OSFileConstantsService::DefineOSFileConstants(JSContext* aCx,
return false;
}
if (!SetStringProperty(aCx, objPath, "desktopDir", mPaths->desktopDir)) {
return false;
}
if (!SetStringProperty(aCx, objPath, "userApplicationDataDir", mPaths->userApplicationDataDir)) {
return false;
}
#if defined(XP_WIN)
if (!SetStringProperty(aCx, objPath, "winAppDataDir", mPaths->winAppDataDir)) {
return false;
}
if (!SetStringProperty(aCx, objPath, "winStartMenuProgsDir", mPaths->winStartMenuProgsDir)) {
return false;
}
#endif // defined(XP_WIN)
#if defined(XP_MACOSX)
if (!SetStringProperty(aCx, objPath, "macUserLibDir", mPaths->macUserLibDir)) {
return false;
}
if (!SetStringProperty(aCx, objPath, "macLocalApplicationsDir", mPaths->macLocalApplicationsDir)) {
return false;
}
if (!SetStringProperty(aCx, objPath, "macTrashDir", mPaths->macTrashDir)) {
return false;
}
#endif // defined(XP_MACOSX)
// sqlite3 is linked from different places depending on the platform

View File

@ -109,17 +109,18 @@ DecoderFactory::GetDecoder(DecoderType aType,
return decoder.forget();
}
/* static */ already_AddRefed<IDecodingTask>
/* static */ nsresult
DecoderFactory::CreateDecoder(DecoderType aType,
NotNull<RasterImage*> aImage,
NotNull<SourceBuffer*> aSourceBuffer,
const IntSize& aIntrinsicSize,
const IntSize& aOutputSize,
DecoderFlags aDecoderFlags,
SurfaceFlags aSurfaceFlags)
SurfaceFlags aSurfaceFlags,
IDecodingTask** aOutTask)
{
if (aType == DecoderType::UNKNOWN) {
return nullptr;
return NS_ERROR_INVALID_ARG;
}
// Create an anonymous decoder. Interaction with the SurfaceCache and the
@ -135,8 +136,9 @@ DecoderFactory::CreateDecoder(DecoderType aType,
decoder->SetDecoderFlags(aDecoderFlags | DecoderFlags::FIRST_FRAME_ONLY);
decoder->SetSurfaceFlags(aSurfaceFlags);
if (NS_FAILED(decoder->Init())) {
return nullptr;
nsresult rv = decoder->Init();
if (NS_FAILED(rv)) {
return NS_ERROR_FAILURE;
}
// Create a DecodedSurfaceProvider which will manage the decoding process and
@ -151,25 +153,32 @@ DecoderFactory::CreateDecoder(DecoderType aType,
// Attempt to insert the surface provider into the surface cache right away so
// we won't trigger any more decoders with the same parameters.
if (SurfaceCache::Insert(provider) != InsertOutcome::SUCCESS) {
return nullptr;
switch (SurfaceCache::Insert(provider)) {
case InsertOutcome::SUCCESS:
break;
case InsertOutcome::FAILURE_ALREADY_PRESENT:
return NS_ERROR_ALREADY_INITIALIZED;
default:
return NS_ERROR_FAILURE;
}
// Return the surface provider in its IDecodingTask guise.
RefPtr<IDecodingTask> task = provider.get();
return task.forget();
task.forget(aOutTask);
return NS_OK;
}
/* static */ already_AddRefed<IDecodingTask>
/* static */ nsresult
DecoderFactory::CreateAnimationDecoder(DecoderType aType,
NotNull<RasterImage*> aImage,
NotNull<SourceBuffer*> aSourceBuffer,
const IntSize& aIntrinsicSize,
DecoderFlags aDecoderFlags,
SurfaceFlags aSurfaceFlags)
SurfaceFlags aSurfaceFlags,
IDecodingTask** aOutTask)
{
if (aType == DecoderType::UNKNOWN) {
return nullptr;
return NS_ERROR_INVALID_ARG;
}
MOZ_ASSERT(aType == DecoderType::GIF || aType == DecoderType::PNG,
@ -186,8 +195,9 @@ DecoderFactory::CreateAnimationDecoder(DecoderType aType,
decoder->SetDecoderFlags(aDecoderFlags | DecoderFlags::IS_REDECODE);
decoder->SetSurfaceFlags(aSurfaceFlags);
if (NS_FAILED(decoder->Init())) {
return nullptr;
nsresult rv = decoder->Init();
if (NS_FAILED(rv)) {
return NS_ERROR_FAILURE;
}
// Create an AnimationSurfaceProvider which will manage the decoding process
@ -199,13 +209,19 @@ DecoderFactory::CreateAnimationDecoder(DecoderType aType,
// Attempt to insert the surface provider into the surface cache right away so
// we won't trigger any more decoders with the same parameters.
if (SurfaceCache::Insert(provider) != InsertOutcome::SUCCESS) {
return nullptr;
switch (SurfaceCache::Insert(provider)) {
case InsertOutcome::SUCCESS:
break;
case InsertOutcome::FAILURE_ALREADY_PRESENT:
return NS_ERROR_ALREADY_INITIALIZED;
default:
return NS_ERROR_FAILURE;
}
// Return the surface provider in its IDecodingTask guise.
RefPtr<IDecodingTask> task = provider.get();
return task.forget();
task.forget(aOutTask);
return NS_OK;
}
/* static */ already_AddRefed<IDecodingTask>

View File

@ -64,15 +64,21 @@ public:
* @param aDecoderFlags Flags specifying the behavior of this decoder.
* @param aSurfaceFlags Flags specifying the type of output this decoder
* should produce.
* @param aOutTask Task representing the decoder.
* @return NS_OK if the decoder has been created/initialized successfully;
* NS_ERROR_ALREADY_INITIALIZED if there is already an active decoder
* for this image;
* Else some other unrecoverable error occurred.
*/
static already_AddRefed<IDecodingTask>
static nsresult
CreateDecoder(DecoderType aType,
NotNull<RasterImage*> aImage,
NotNull<SourceBuffer*> aSourceBuffer,
const gfx::IntSize& aIntrinsicSize,
const gfx::IntSize& aOutputSize,
DecoderFlags aDecoderFlags,
SurfaceFlags aSurfaceFlags);
SurfaceFlags aSurfaceFlags,
IDecodingTask** aOutTask);
/**
* Creates and initializes a decoder for animated images of type @aType.
@ -88,14 +94,20 @@ public:
* @param aDecoderFlags Flags specifying the behavior of this decoder.
* @param aSurfaceFlags Flags specifying the type of output this decoder
* should produce.
* @param aOutTask Task representing the decoder.
* @return NS_OK if the decoder has been created/initialized successfully;
* NS_ERROR_ALREADY_INITIALIZED if there is already an active decoder
* for this image;
* Else some other unrecoverable error occurred.
*/
static already_AddRefed<IDecodingTask>
static nsresult
CreateAnimationDecoder(DecoderType aType,
NotNull<RasterImage*> aImage,
NotNull<SourceBuffer*> aSourceBuffer,
const gfx::IntSize& aIntrinsicSize,
DecoderFlags aDecoderFlags,
SurfaceFlags aSurfaceFlags);
SurfaceFlags aSurfaceFlags,
IDecodingTask** aOutTask);
/**
* Creates and initializes a metadata decoder of type @aType. This decoder

View File

@ -27,8 +27,6 @@ Downscaler::Downscaler(const nsIntSize& aTargetSize)
, mHasAlpha(true)
, mFlipVertically(false)
{
MOZ_ASSERT(gfxPrefs::ImageDownscaleDuringDecodeEnabled(),
"Downscaling even though downscale-during-decode is disabled?");
MOZ_ASSERT(mTargetSize.width > 0 && mTargetSize.height > 0,
"Invalid target size");
}

View File

@ -98,10 +98,7 @@ public:
, mInputRow(0)
, mOutputRow(0)
, mHasAlpha(true)
{
MOZ_ASSERT(gfxPrefs::ImageDownscaleDuringDecodeEnabled(),
"Downscaling even though downscale-during-decode is disabled?");
}
{ }
~DownscalingFilter()
{

View File

@ -1249,10 +1249,29 @@ RasterImage::Decode(const IntSize& aSize,
// Create a decoder.
RefPtr<IDecodingTask> task;
if (mAnimationState && aPlaybackType == PlaybackType::eAnimated) {
task = DecoderFactory::CreateAnimationDecoder(mDecoderType, WrapNotNull(this),
mSourceBuffer, mSize,
decoderFlags, surfaceFlags);
nsresult rv;
bool animated = mAnimationState && aPlaybackType == PlaybackType::eAnimated;
if (animated) {
rv = DecoderFactory::CreateAnimationDecoder(mDecoderType, WrapNotNull(this),
mSourceBuffer, mSize,
decoderFlags, surfaceFlags,
getter_AddRefs(task));
} else {
rv = DecoderFactory::CreateDecoder(mDecoderType, WrapNotNull(this),
mSourceBuffer, mSize, aSize,
decoderFlags, surfaceFlags,
getter_AddRefs(task));
}
if (rv == NS_ERROR_ALREADY_INITIALIZED) {
// We raced with an already pending decoder, and it finished before we
// managed to insert the new decoder. Pretend we did a sync call to make
// the caller lookup in the surface cache again.
MOZ_ASSERT(!task);
return true;
}
if (animated) {
// We pass false for aAllowInvalidation because we may be asked to use
// async notifications. Any potential invalidation here will be sent when
// RequestRefresh is called, or NotifyDecodeComplete.
@ -1261,17 +1280,15 @@ RasterImage::Decode(const IntSize& aSize,
#endif
mAnimationState->UpdateState(mAnimationFinished, this, mSize, false);
MOZ_ASSERT(rect.IsEmpty());
} else {
task = DecoderFactory::CreateDecoder(mDecoderType, WrapNotNull(this),
mSourceBuffer, mSize, aSize,
decoderFlags, surfaceFlags);
}
// Make sure DecoderFactory was able to create a decoder successfully.
if (!task) {
if (NS_FAILED(rv)) {
MOZ_ASSERT(!task);
return false;
}
MOZ_ASSERT(task);
mDecodeCount++;
// We're ready to decode; start the decoder.

View File

@ -357,7 +357,7 @@ imgRequestProxy::AddToOwner(nsIDocument* aLoadingDocument)
mTabGroup = docGroup->GetTabGroup();
MOZ_ASSERT(mTabGroup);
mEventTarget = mTabGroup->EventTargetFor(mozilla::TaskCategory::Other);
mEventTarget = docGroup->EventTargetFor(mozilla::TaskCategory::Other);
MOZ_ASSERT(mEventTarget);
}
}

View File

@ -172,3 +172,141 @@ function wasmGetScriptBreakpoints(wasmScript) {
});
return result;
}
const WasmHelpers = {};
(function() {
let enabled = false;
try {
enableSingleStepProfiling();
disableSingleStepProfiling();
enabled = true;
} catch (e) {
print(e.message);
}
WasmHelpers.isSingleStepProfilingEnabled = enabled;
})();
WasmHelpers._normalizeStack = (stack, preciseStacks) => {
var wasmFrameTypes = [
{re:/^jit call to int64 wasm function$/, sub:"i64>"},
{re:/^out-of-line coercion for jit entry arguments \(in wasm\)$/, sub:"ool>"},
{re:/^wasm-function\[(\d+)\] \(.*\)$/, sub:"$1"},
{re:/^(fast|slow) exit trampoline (to native )?\(in wasm\)$/, sub:"<"},
{re:/^call to[ asm.js]? native (.*) \(in wasm\)$/, sub:"$1"},
{re:/ \(in wasm\)$/, sub:""}
];
let entryRegexps;
if (preciseStacks) {
entryRegexps = [
{re:/^slow entry trampoline \(in wasm\)$/, sub:"!>"},
{re:/^fast entry trampoline \(in wasm\)$/, sub:">"},
];
} else {
entryRegexps = [
{re:/^(fast|slow) entry trampoline \(in wasm\)$/, sub:">"}
];
}
wasmFrameTypes = entryRegexps.concat(wasmFrameTypes);
var framesIn = stack.split(',');
var framesOut = [];
for (let frame of framesIn) {
for (let {re, sub} of wasmFrameTypes) {
if (re.test(frame)) {
framesOut.push(frame.replace(re, sub));
break;
}
}
}
return framesOut.join(',');
};
WasmHelpers._removeAdjacentDuplicates = array => {
if (array.length < 2)
return;
let i = 0;
for (let j = 1; j < array.length; j++) {
if (array[i] !== array[j])
array[++i] = array[j];
}
array.length = i + 1;
}
WasmHelpers.normalizeStacks = (stacks, preciseStacks = false) => {
let observed = [];
for (let i = 0; i < stacks.length; i++)
observed[i] = WasmHelpers._normalizeStack(stacks[i], preciseStacks);
WasmHelpers._removeAdjacentDuplicates(observed);
return observed;
};
WasmHelpers._compareStacks = (got, expect) => {
if (got.length != expect.length) {
return false;
}
for (let i = 0; i < got.length; i++) {
if (got[i] !== expect[i])
return false;
}
return true;
}
WasmHelpers.assertEqImpreciseStacks = (got, expect) => {
let observed = WasmHelpers.normalizeStacks(got, /* precise */ false);
let same = WasmHelpers._compareStacks(observed, expect);
if (!same) {
if (observed.length != expect.length) {
print(`Got:\n${observed.toSource()}\nExpect:\n${expect.toSource()}`);
assertEq(observed.length, expect.length);
}
for (let i = 0; i < observed.length; i++) {
if (observed[i] !== expect[i]) {
print(`On stack ${i}, Got:\n${observed[i]}\nExpect:\n${expect[i]}`);
assertEq(observed[i], expect[i]);
}
}
}
}
WasmHelpers.assertStackTrace = (exception, expected) => {
let callsites = exception.stack.trim().split('\n').map(line => line.split('@')[0]);
assertEq(callsites.length, expected.length);
for (let i = 0; i < callsites.length; i++) {
assertEq(callsites[i], expected[i]);
}
};
WasmHelpers.nextLineNumber = (n=1) => {
return +(new Error().stack).split('\n')[1].split(':')[1] + n;
}
WasmHelpers.startProfiling = () => {
if (!WasmHelpers.isSingleStepProfilingEnabled)
return;
enableSingleStepProfiling();
}
WasmHelpers.endProfiling = () => {
if (!WasmHelpers.isSingleStepProfilingEnabled)
return;
return disableSingleStepProfiling();
}
WasmHelpers.assertEqPreciseStacks = (observed, expectedStacks) => {
if (!WasmHelpers.isSingleStepProfilingEnabled)
return null;
observed = WasmHelpers.normalizeStacks(observed, /* precise */ true);
for (let i = 0; i < expectedStacks.length; i++) {
if (WasmHelpers._compareStacks(observed, expectedStacks[i]))
return i;
}
throw new Error(`no plausible stacks found, observed: ${observed.join('/')}
Expected one of:
${expectedStacks.map(stacks => stacks.join("/")).join('\n')}`);
}

View File

@ -37,10 +37,10 @@ function assertStackContainsSeq(got, expect)
for (var j = 0; j < parts.length; j++) {
var frame = parts[j];
frame = frame.replace(/ \([^\)]*\)/g, "");
frame = frame.replace(/fast FFI trampoline to native/g, "N");
frame = frame.replace(/fast exit trampoline to native/g, "N");
frame = frame.replace(/^call to( asm.js)? native .*\(in wasm\)$/g, "N");
frame = frame.replace(/(fast|slow) FFI trampoline/g, "<");
frame = frame.replace(/slow entry trampoline/g, ">");
frame = frame.replace(/(fast|slow) exit trampoline/g, "<");
frame = frame.replace(/(fast|slow) entry trampoline/g, ">");
frame = frame.replace(/(\/[^\/,<]+)*\/testProfiling.js/g, "");
frame = frame.replace(/testBuiltinD2D/g, "");
frame = frame.replace(/testBuiltinF2F/g, "");

View File

@ -0,0 +1,60 @@
let { exports } = wasmEvalText(`(module
(func (export "i32") (result i32) (param i32)
get_local 0
)
(func (export "f32") (result f32) (param f32)
get_local 0
)
(func (export "f64") (result f64) (param f64)
get_local 0
)
)`);
const options = getJitCompilerOptions();
const jitThreshold = options['ion.warmup.trigger'] * 2;
let coercions = {
i32(x) { return x|0; },
f32(x) { return Math.fround(x); },
f64(x) { return +x; }
}
function call(func, coercion, arg) {
let expected;
try {
expected = coercion(arg);
} catch(e) {
expected = e.message;
}
for (var i = jitThreshold; i --> 0;) {
try {
assertEq(func(arg), expected);
} catch(e) {
assertEq(e.message, expected);
}
}
}
const inputs = [
42,
3.5,
-0,
-Infinity,
2**32,
true,
Symbol(),
undefined,
null,
{},
{ valueOf() { return 13.37; } },
"bonjour"
];
for (let arg of inputs) {
for (let func of ['i32', 'f32', 'f64']) {
call(exports[func], coercions[func], arg);
}
}

View File

@ -0,0 +1,15 @@
if (typeof evaluate === 'undefined')
quit();
evaluate(`
var f = (function module() {
"use asm";
function f(i) {
i=i|0;
if (!i)
return;
}
return f;
})();
evaluate(\`new f({}, {});\`);
`);

View File

@ -0,0 +1,18 @@
var g = newGlobal();
g.parent = this;
g.eval("Debugger(parent).onExceptionUnwind = function () {};");
lfModule = new WebAssembly.Module(wasmTextToBinary(`
(module
(export "f" $func0)
(func $func0 (result i32)
i32.const -1
)
)
`));
processModule(lfModule);
function processModule(module, jscode) {
for (let i = 0; i < 2; ++i) {
imports = {}
instance = new WebAssembly.Instance(module, imports);
}
}

View File

@ -0,0 +1,85 @@
const options = getJitCompilerOptions();
// These tests need at least baseline to make sense.
if (!options['baseline.enable'])
quit();
const { nextLineNumber, startProfiling, endProfiling, assertEqPreciseStacks } = WasmHelpers;
const TRIGGER = options['ion.warmup.trigger'] + 10;
const ITER = 2 * TRIGGER;
const EXCEPTION_ITER = ITER - 2;
enableGeckoProfiling();
var instance = wasmEvalText(`(module
(func $add (export "add") (result i32) (param i32) (param i32)
get_local 0
get_local 1
i32.add
)
(func $addi64 (export "add64") (result i64) (param i32) (param i32)
get_local 0
get_local 1
call $add
i64.extend_s/i32
)
)`).exports;
var callToMain;
function main() {
var arr = [instance.add, (x,y)=>x+y];
var arrayCallLine = nextLineNumber(6);
for (var i = 0; i < ITER; i++) {
var caught = null;
startProfiling();
try {
arr[i%2](i, i);
} catch(e) {
caught = e;
}
let profilingStack = endProfiling();
if (i === EXCEPTION_ITER - 1) {
arr[0] = instance.add64;
} else if (i === EXCEPTION_ITER) {
arr[0] = instance.add;
}
assertEq(!!caught, i === EXCEPTION_ITER);
if (caught) {
assertEqPreciseStacks(profilingStack, [
// Error stack: control flow is redirected to a builtin thunk
// then calling into C++ from the wasm entry before jumping to
// the wasm jit entry exception handler.
['', '>', '<,>', 'i64>,>', '<,>', '>', ''],
[''] // the jit path wasn't taken (interpreter/baseline only).
]);
assertEq(caught.message, 'cannot pass i64 to or from JS');
let stack = caught.stack.split('\n');
// Which callsites appear on the error stack.
let callsites = stack.map(s => s.split('@')[0]);
assertEq(callsites[0], 'main');
assertEq(callsites[1], ''); // global scope
// Which line numbers appear in the error stack.
let lines = stack.map(s => s.split(':')[1]);
assertEq(+lines[0], arrayCallLine);
assertEq(+lines[1], callToMain);
} else if ((i % 2) == 0) {
assertEqPreciseStacks(profilingStack, [
['', '>', '0,>', '>', ''], // fast path
['', '!>', '0,!>', '!>', ''], // slow path
]);
}
}
}
callToMain = nextLineNumber();
main();

View File

@ -0,0 +1,78 @@
const options = getJitCompilerOptions();
// These tests need at least baseline to make sense.
if (!options['baseline.enable'])
quit();
const { assertStackTrace, startProfiling, endProfiling, assertEqPreciseStacks } = WasmHelpers;
const TRIGGER = options['baseline.warmup.trigger'] + 10;
const ITER = 2 * TRIGGER;
const EXCEPTION_ITER = TRIGGER + 5;
const SLOW_ENTRY_STACK = ['', '!>', '0,!>', '!>', ''];
const FAST_ENTRY_STACK = ['', '>', '0,>', '>', ''];
const FAST_OOL_ENTRY_STACK = ['', '>', '<,>', 'ool>,>', '<,>', '>', '0,>', '>', ''];
const EXCEPTION_ENTRY_STACK = ['', '>', '<,>', 'ool>,>', '<,>', '>', ''];
enableGeckoProfiling();
for (let type of ['i32', 'f32', 'f64']) {
var instance = wasmEvalText(`(module
(func $add (export "add") (result ${type}) (param ${type}) (param ${type})
get_local 0
get_local 1
${type}.add
)
)`).exports;
function loopBody(a, b) {
var caught = null;
try {
instance.add(a, b);
} catch(e) {
assertEq(e.message, "ph34r");
assertStackTrace(e, ['innerValueOf', 'outerValueOf', 'loopBody', 'main', '']);
caught = e;
}
assertEq(!!caught, b === EXCEPTION_ITER);
}
var x = 0;
function main() {
let observedStacks = [0, 0, 0];
for (var i = 0; i < ITER; i++) {
startProfiling();
loopBody(i + 1, i + EXCEPTION_ITER + 1);
assertEqPreciseStacks(endProfiling(), [FAST_ENTRY_STACK, SLOW_ENTRY_STACK]);
if (i === EXCEPTION_ITER) {
x = { valueOf: function innerValueOf() { throw new Error("ph34r"); }};
} else {
x = i;
}
startProfiling();
loopBody({valueOf: function outerValueOf() { return x|0; }}, i);
let stack = endProfiling();
let which = assertEqPreciseStacks(stack, [FAST_OOL_ENTRY_STACK, SLOW_ENTRY_STACK, EXCEPTION_ENTRY_STACK]);
if (which !== null) {
if (i === EXCEPTION_ITER) {
assertEq(which, 2);
}
observedStacks[which]++;
}
}
let sum = observedStacks.reduce((acc, x) => acc + x);
assertEq(sum === 0 || sum === ITER, true);
if (sum === ITER) {
assertEq(observedStacks[0] > 0, true, "the fast entry should have been taken at least once");
assertEq(observedStacks[2], 1, "the error path should have been taken exactly once");
}
}
main();
}
disableGeckoProfiling();

View File

@ -0,0 +1,126 @@
///////////////////////////////////////////////////////////////////////////////
// FIRST TEST /////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////
(function() {
function debug() {
throw new Error('gotcha');
}
var imports = {
numCalls:0,
main: {
f() {
debug();
}
}
};
var instance = new WebAssembly.Instance(new WebAssembly.Module(wasmTextToBinary(`(module
(import $main "main" "f" (func))
(func $lol (export "add") (result i32) (param i32) (param i32)
get_local 0
get_local 1
call $add
)
(func $add (result i32) (param i32) (param i32)
get_local 0
i32.const 5000
i32.eq
if
call $main
end
get_local 0
get_local 1
i32.add
)
)`)), imports).exports;
function loopBody(i) {
var caught = null;
try {
assertEq(instance.add(i, i), 2 * i);
} catch(e) {
// TODO check stack trace
print(e.stack);
caught = e;
}
assertEq(!!caught, i === 5000);
}
function main() {
for (var i = 0; i < 100000; i++) {
loopBody(i);
}
assertEq(i, 100000);
}
main();
})();
///////////////////////////////////////////////////////////////////////////////
// SECOND TEST ////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////
(function() {
function debug() {
gc();
}
var imports = {
numCalls:0,
main: {
f() {
debug();
}
}
};
var instance = new WebAssembly.Instance(new WebAssembly.Module(wasmTextToBinary(`(module
(import $main "main" "f" (func))
(func $lol (export "add") (result i32) (param i32) (param i32)
get_local 0
get_local 1
call $add
)
(func $add (result i32) (param i32) (param i32)
get_local 0
i32.const 5000
i32.eq
if
call $main
unreachable
end
get_local 0
get_local 1
i32.add
)
)`)), imports).exports;
function loopBody(i) {
var caught = null;
try {
assertEq(instance.add(i, i), 2 * i);
} catch(e) {
// TODO check stack trace
print(e.stack);
caught = e;
}
assertEq(!!caught, i === 5000);
}
function main() {
for (var i = 0; i < 100000; i++) {
loopBody(i);
}
assertEq(i, 100000);
}
main();
})();

View File

@ -0,0 +1,55 @@
const options = getJitCompilerOptions();
// These tests need at least baseline to make sense.
if (!options['baseline.enable'])
quit();
const TRIGGER = options['baseline.warmup.trigger'] + 10;
const ITER = 2 * TRIGGER;
const EXCEPTION_ITER = TRIGGER + 5;
for (let type of ['i32', 'f32', 'f64']) {
var instance = wasmEvalText(`(module
(func $add (export "add") (result ${type}) (param ${type}) (param ${type})
get_local 0
get_local 1
${type}.add
)
)`).exports;
function loopBody(a, b) {
var caught = null;
try {
instance.add(a, b);
} catch(e) {
caught = e;
}
assertEq(!!caught, b === EXCEPTION_ITER);
}
var x = 0;
function main() {
for (var i = 0; i <= EXCEPTION_ITER; i++) {
loopBody(i + 1, i + EXCEPTION_ITER + 1);
let otherArg = { valueOf() { return i|0; } };
if (i === EXCEPTION_ITER) {
x = { valueOf: function innerValueOf() {
// Supress callee.
instance = null;
// Suppress other arguments.
otherArg = null;
gc();
return 42;
}};
} else {
x = i;
}
loopBody({valueOf: function outerValueOf() { return x|0; }}, otherArg);
}
}
main();
}

View File

@ -0,0 +1,174 @@
var ITERATIONS = 10;
var INNER_ITERATIONS = 100;
let instance = wasmEvalText(`(module
(func (export "add") (result i32) (param i32) (param i32)
get_local 0
get_local 1
i32.add
)
(func (export "no_arg") (result i32)
i32.const 42
i32.const 58
i32.add
)
(global $g (mut i32) (i32.const 0))
(func (export "set_global_one") (param i32)
get_local 0
set_global $g
)
(func (export "set_global_two") (param i32) (param i32)
get_local 0
get_local 1
i32.add
set_global $g
)
(func (export "glob") (result i32)
get_global $g
)
)`).exports;
function run(name, func) {
for (let i = ITERATIONS; i --> 0;) {
func();
}
}
function testCallKnown() {
for (let i = 0; i < INNER_ITERATIONS; i++) {
assertEq(instance.add(i, i + 1), 2*i + 1);
}
}
function testCallKnownRectifying() {
for (let i = 0; i < INNER_ITERATIONS; i++) {
assertEq(instance.add(i + 1), i+1);
}
}
function jsAdd(x, y) {
return (x|0) + (y|0) | 0;
}
function testCallGeneric() {
var arr = [instance.add, jsAdd];
for (let i = 0; i < INNER_ITERATIONS; i++) {
assertEq(arr[i%2](i, i+1), 2*i + 1);
}
}
function testCallGenericRectifying() {
var arr = [instance.add, jsAdd];
for (let i = 0; i < INNER_ITERATIONS; i++) {
assertEq(arr[i%2](i+1), i + 1);
}
}
function testCallScriptedGetter() {
var obj = {};
Object.defineProperty(obj, 'x', {
get: instance.no_arg
});
for (let i = 0; i < INNER_ITERATIONS; i++) {
assertEq(obj.x, 100);
}
}
function testCallScriptedGetterRectifying() {
var obj = {};
Object.defineProperty(obj, 'x', {
// Missing two arguments.
get: instance.add
});
for (let i = 0; i < INNER_ITERATIONS; i++) {
assertEq(obj.x, 0);
}
}
function testCallScriptedSetter() {
var obj = {};
Object.defineProperty(obj, 'x', {
set: instance.set_global_one
});
for (let i = 0; i < INNER_ITERATIONS; i++) {
obj.x = i;
}
assertEq(instance.glob(), INNER_ITERATIONS-1);
}
function testCallScriptedSetterRectifying() {
var obj = {};
Object.defineProperty(obj, 'x', {
set: instance.set_global_two
});
for (let i = 0; i < INNER_ITERATIONS; i++) {
obj.x = i;
}
assertEq(instance.glob(), INNER_ITERATIONS-1);
}
function testFunctionApplyArray() {
for (let i = 0; i < INNER_ITERATIONS; i++) {
assertEq(instance.add.apply(null, [i, i + 1]), 2*i+1);
}
}
function testFunctionApplyArrayRectifying() {
for (let i = 0; i < INNER_ITERATIONS; i++) {
assertEq(instance.add.apply(null, [i + 1]), i+1);
}
}
function testFunctionApplyArgs() {
function wrapper() {
assertEq(instance.add.apply(null, arguments), 2*arguments[0]+1);
}
for (let i = 0; i < INNER_ITERATIONS; i++) {
wrapper(i, i + 1);
}
}
function testFunctionApplyArgsRectifying() {
function wrapper() {
assertEq(instance.add.apply(null, arguments), arguments[0]);
}
for (let i = 0; i < INNER_ITERATIONS; i++) {
wrapper(i + 1);
}
}
function testFunctionCall() {
for (let i = 0; i < INNER_ITERATIONS; i++) {
assertEq(instance.add.call(null, i, i + 1), 2*i+1);
}
}
function testFunctionCallRectifying() {
for (let i = 0; i < INNER_ITERATIONS; i++) {
assertEq(instance.add.call(null, i + 1), i+1);
}
}
run('call known', testCallKnown);
run('call known rectifying', testCallKnownRectifying);
run('call generic', testCallGeneric);
run('call generic rectifying', testCallGenericRectifying);
run('scripted getter', testCallScriptedGetter);
run('scripted getter rectifiying', testCallScriptedGetterRectifying);
run('scripted setter', testCallScriptedSetter);
run('scripted setter rectifiying', testCallScriptedSetterRectifying);
run('function.apply array', testFunctionApplyArray);
run('function.apply array rectifying', testFunctionApplyArrayRectifying);
run('function.apply args', testFunctionApplyArgs);
run('function.apply args rectifying', testFunctionApplyArgsRectifying);
run('function.call', testFunctionCall);
run('function.call rectifying', testFunctionCallRectifying);

View File

@ -1,78 +1,20 @@
try {
enableSingleStepProfiling();
disableSingleStepProfiling();
} catch(e) {
// Single step profiling not supported here.
if (!WasmHelpers.isSingleStepProfilingEnabled)
quit();
}
const Module = WebAssembly.Module;
const Instance = WebAssembly.Instance;
const Table = WebAssembly.Table;
function normalize(stack)
{
var wasmFrameTypes = [
{re:/^slow entry trampoline \(in wasm\)$/, sub:">"},
{re:/^wasm-function\[(\d+)\] \(.*\)$/, sub:"$1"},
{re:/^(fast|slow) FFI trampoline (to native )?\(in wasm\)$/, sub:"<"},
{re:/^call to[ asm.js]? native (.*) \(in wasm\)$/, sub:"$1"},
{re:/ \(in wasm\)$/, sub:""}
];
const { assertEqImpreciseStacks, startProfiling, endProfiling } = WasmHelpers;
var framesIn = stack.split(',');
var framesOut = [];
for (let frame of framesIn) {
for (let {re, sub} of wasmFrameTypes) {
if (re.test(frame)) {
framesOut.push(frame.replace(re, sub));
break;
}
}
}
return framesOut.join(',');
}
function removeAdjacentDuplicates(array) {
if (array.length < 2)
return;
let i = 0;
for (let j = 1; j < array.length; j++) {
if (array[i] !== array[j])
array[++i] = array[j];
}
array.length = i + 1;
}
function assertEqStacks(got, expect)
{
for (let i = 0; i < got.length; i++)
got[i] = normalize(got[i]);
removeAdjacentDuplicates(got);
if (got.length != expect.length) {
print(`Got:\n${got.toSource()}\nExpect:\n${expect.toSource()}`);
assertEq(got.length, expect.length);
}
for (let i = 0; i < got.length; i++) {
if (got[i] !== expect[i]) {
print(`On stack ${i}, Got:\n${got[i]}\nExpect:\n${expect[i]}`);
assertEq(got[i], expect[i]);
}
}
}
function test(code, importObj, expect)
function test(code, importObj, expectedStacks)
{
enableGeckoProfiling();
var f = wasmEvalText(code, importObj).exports[""];
enableSingleStepProfiling();
startProfiling();
f();
assertEqStacks(disableSingleStepProfiling(), expect);
assertEqImpreciseStacks(endProfiling(), expectedStacks);
disableGeckoProfiling();
}
@ -137,7 +79,8 @@ if (getBuildConfiguration()["arm-simulator"]) {
)
)`,
this,
["", ">", "0,>", "<,0,>", `i64.${op},0,>`, "<,0,>", "0,>", ">", ""]);
["", ">", "0,>", "<,0,>", `i64.${op},0,>`, "<,0,>", "0,>", ">", ""],
);
}
}
@ -149,7 +92,8 @@ test(`(module
)
)`,
this,
["", ">", "0,>", "<,0,>", "current_memory,0,>", "<,0,>", "0,>", ">", ""]);
["", ">", "0,>", "<,0,>", "current_memory,0,>", "<,0,>", "0,>", ">", ""],
);
// grow_memory is a callout.
test(`(module
@ -160,7 +104,8 @@ test(`(module
)
)`,
this,
["", ">", "0,>", "<,0,>", "grow_memory,0,>", "<,0,>", "0,>", ">", ""]);
["", ">", "0,>", "<,0,>", "grow_memory,0,>", "<,0,>", "0,>", ">", ""],
);
// A few math builtins.
for (let type of ['f32', 'f64']) {
@ -184,7 +129,7 @@ for (let type of ['f32', 'f64']) {
var f = wasmEvalText(code).exports[""];
enableSingleStepProfiling();
assertThrowsInstanceOf(f, error);
assertEqStacks(disableSingleStepProfiling(), expect);
assertEqImpreciseStacks(disableSingleStepProfiling(), expect);
disableGeckoProfiling();
}
@ -230,7 +175,7 @@ for (let type of ['f32', 'f64']) {
enableGeckoProfiling();
enableSingleStepProfiling();
assertEq(e.tbl.get(0)(), 42);
assertEqStacks(disableSingleStepProfiling(), ["", ">", "0,>", ">", ""]);
assertEqImpreciseStacks(disableSingleStepProfiling(), ["", ">", "0,>", ">", ""]);
disableGeckoProfiling();
assertEq(e.foo(), 42);
@ -240,7 +185,7 @@ for (let type of ['f32', 'f64']) {
enableGeckoProfiling();
enableSingleStepProfiling();
assertEq(e.tbl.get(1)(), 13);
assertEqStacks(disableSingleStepProfiling(), ["", ">", "1,>", ">", ""]);
assertEqImpreciseStacks(disableSingleStepProfiling(), ["", ">", "1,>", ">", ""]);
disableGeckoProfiling();
assertEq(e.tbl.get(0)(), 42);
@ -251,7 +196,7 @@ for (let type of ['f32', 'f64']) {
enableSingleStepProfiling();
assertEq(e.foo(), 42);
assertEq(e.tbl.get(1)(), 13);
assertEqStacks(disableSingleStepProfiling(), ["", ">", "0,>", ">", "", ">", "1,>", ">", ""]);
assertEqImpreciseStacks(disableSingleStepProfiling(), ["", ">", "0,>", ">", "", ">", "1,>", ">", ""]);
disableGeckoProfiling();
var e2 = wasmEvalText(`
@ -267,19 +212,19 @@ for (let type of ['f32', 'f64']) {
enableGeckoProfiling();
enableSingleStepProfiling();
assertEq(e2.baz(0), 42);
assertEqStacks(disableSingleStepProfiling(), ["", ">", "1,>", "0,1,>", "1,>", ">", ""]);
assertEqImpreciseStacks(disableSingleStepProfiling(), ["", ">", "1,>", "0,1,>", "1,>", ">", ""]);
disableGeckoProfiling();
enableGeckoProfiling();
enableSingleStepProfiling();
assertEq(e2.baz(1), 13);
assertEqStacks(disableSingleStepProfiling(), ["", ">", "1,>", "1,1,>", "1,>", ">", ""]);
assertEqImpreciseStacks(disableSingleStepProfiling(), ["", ">", "1,>", "1,1,>", "1,>", ">", ""]);
disableGeckoProfiling();
enableGeckoProfiling();
enableSingleStepProfiling();
assertEq(e2.baz(2), 99);
assertEqStacks(disableSingleStepProfiling(), ["", ">", "1,>", "0,1,>", "1,>", ">", ""]);
assertEqImpreciseStacks(disableSingleStepProfiling(), ["", ">", "1,>", "0,1,>", "1,>", ">", ""]);
disableGeckoProfiling();
})();
@ -301,7 +246,7 @@ for (let type of ['f32', 'f64']) {
enableGeckoProfiling();
enableSingleStepProfiling();
assertEq(e2.bar(), 42);
assertEqStacks(disableSingleStepProfiling(), ["", ">", "1,>", "0,1,>", "1,>", ">", ""]);
assertEqImpreciseStacks(disableSingleStepProfiling(), ["", ">", "1,>", "0,1,>", "1,>", ">", ""]);
disableGeckoProfiling();
assertEq(e2.bar(), 42);
@ -311,7 +256,7 @@ for (let type of ['f32', 'f64']) {
var e4 = new Instance(m2, {a:e3}).exports;
enableSingleStepProfiling();
assertEq(e4.bar(), 42);
assertEqStacks(disableSingleStepProfiling(), ["", ">", "1,>", "0,1,>", "1,>", ">", ""]);
assertEqImpreciseStacks(disableSingleStepProfiling(), ["", ">", "1,>", "0,1,>", "1,>", ">", ""]);
disableGeckoProfiling();
assertEq(e4.bar(), 42);
})();
@ -377,7 +322,7 @@ for (let type of ['f32', 'f64']) {
// Test normal conditions.
enableSingleStepProfiling();
assertEq(i.foo(0), 42);
assertEqStacks(disableSingleStepProfiling(), ["", ">", "2,>", "<,2,>",
assertEqImpreciseStacks(disableSingleStepProfiling(), ["", ">", "2,>", "<,2,>",
// Losing stack information while the JIT func prologue sets profiler
// virtual FP.
"",
@ -394,7 +339,7 @@ for (let type of ['f32', 'f64']) {
// Test rectifier frame.
enableSingleStepProfiling();
assertEq(i.id(100), 100);
assertEqStacks(disableSingleStepProfiling(), ["", ">", "3,>", "<,3,>",
assertEqImpreciseStacks(disableSingleStepProfiling(), ["", ">", "3,>", "<,3,>",
// Rectifier frame time is spent here (lastProfilingFrame has not been
// set).
"",
@ -409,7 +354,7 @@ for (let type of ['f32', 'f64']) {
enableSingleStepProfiling();
assertEq(i.foo(1337), -(2**31));
assertEqStacks(disableSingleStepProfiling(), ["", ">", "2,>", "<,2,>", "", "<,2,>", "",
assertEqImpreciseStacks(disableSingleStepProfiling(), ["", ">", "2,>", "<,2,>", "", "<,2,>", "",
// Back into the jit exit (frame info has been recovered).
// Inline conversion fails, we skip to the OOL path, call from there
// and get back to the jit exit.

View File

@ -659,7 +659,7 @@ BaselineCacheIRCompiler::emitCallScriptedGetterResult()
return false;
masm.loadPtr(getterAddr, callee);
masm.branchIfFunctionHasNoScript(callee, failure->label());
masm.branchIfFunctionHasNoJitEntry(callee, /* constructing */ false, failure->label());
masm.loadJitCodeRaw(callee, code);
}
@ -1759,7 +1759,7 @@ BaselineCacheIRCompiler::emitCallScriptedSetter()
return false;
masm.loadPtr(setterAddr, scratch1);
masm.branchIfFunctionHasNoScript(scratch1, failure->label());
masm.branchIfFunctionHasNoJitEntry(scratch1, /* constructing */ false, failure->label());
}
allocator.discardStack(masm);

View File

@ -1668,11 +1668,9 @@ TryAttachFunApplyStub(JSContext* cx, ICCall_Fallback* stub, HandleScript script,
return true;
RootedFunction target(cx, &thisv.toObject().as<JSFunction>());
bool isScripted = target->hasScript();
// right now, only handle situation where second argument is |arguments|
if (argv[1].isMagic(JS_OPTIMIZED_ARGUMENTS) && !script->needsArgsObj()) {
if (isScripted && !stub->hasStub(ICStub::Call_ScriptedApplyArguments)) {
if (target->hasJitEntry() && !stub->hasStub(ICStub::Call_ScriptedApplyArguments)) {
JitSpew(JitSpew_BaselineIC, " Generating Call_ScriptedApplyArguments stub");
ICCall_ScriptedApplyArguments::Compiler compiler(
@ -1690,7 +1688,7 @@ TryAttachFunApplyStub(JSContext* cx, ICCall_Fallback* stub, HandleScript script,
}
if (argv[1].isObject() && argv[1].toObject().is<ArrayObject>()) {
if (isScripted && !stub->hasStub(ICStub::Call_ScriptedApplyArray)) {
if (target->hasJitEntry() && !stub->hasStub(ICStub::Call_ScriptedApplyArray)) {
JitSpew(JitSpew_BaselineIC, " Generating Call_ScriptedApplyArray stub");
ICCall_ScriptedApplyArray::Compiler compiler(
@ -1722,7 +1720,8 @@ TryAttachFunCallStub(JSContext* cx, ICCall_Fallback* stub, HandleScript script,
// Attach a stub if the script can be Baseline-compiled. We do this also
// if the script is not yet compiled to avoid attaching a CallNative stub
// that handles everything, even after the callee becomes hot.
if (target->hasScript() && target->nonLazyScript()->canBaselineCompile() &&
if (((target->hasScript() && target->nonLazyScript()->canBaselineCompile()) ||
(target->isNativeWithJitEntry())) &&
!stub->hasStub(ICStub::Call_ScriptedFunCall))
{
JitSpew(JitSpew_BaselineIC, " Generating Call_ScriptedFunCall stub");
@ -2012,7 +2011,8 @@ TryAttachCallStub(JSContext* cx, ICCall_Fallback* stub, HandleScript script, jsb
RootedFunction fun(cx, &obj->as<JSFunction>());
if (fun->isInterpreted()) {
bool nativeWithJitEntry = fun->isNativeWithJitEntry();
if (fun->isInterpreted() || nativeWithJitEntry) {
// Never attach optimized scripted call stubs for JSOP_FUNAPPLY.
// MagicArguments may escape the frame through them.
if (op == JSOP_FUNAPPLY)
@ -2026,7 +2026,7 @@ TryAttachCallStub(JSContext* cx, ICCall_Fallback* stub, HandleScript script, jsb
if (!constructing && fun->isClassConstructor())
return true;
if (!fun->hasScript()) {
if (!fun->hasJitEntry()) {
// Don't treat this as an unoptimizable case, as we'll add a stub
// when the callee is delazified.
*handled = true;
@ -2116,10 +2116,17 @@ TryAttachCallStub(JSContext* cx, ICCall_Fallback* stub, HandleScript script, jsb
templateObject = thisObject;
}
JitSpew(JitSpew_BaselineIC,
" Generating Call_Scripted stub (fun=%p, %s:%zu, cons=%s, spread=%s)",
fun.get(), fun->nonLazyScript()->filename(), fun->nonLazyScript()->lineno(),
constructing ? "yes" : "no", isSpread ? "yes" : "no");
if (nativeWithJitEntry) {
JitSpew(JitSpew_BaselineIC,
" Generating Call_Scripted stub (native=%p with jit entry, cons=%s, spread=%s)",
fun->native(), constructing ? "yes" : "no", isSpread ? "yes" : "no");
} else {
JitSpew(JitSpew_BaselineIC,
" Generating Call_Scripted stub (fun=%p, %s:%zu, cons=%s, spread=%s)",
fun.get(), fun->nonLazyScript()->filename(), fun->nonLazyScript()->lineno(),
constructing ? "yes" : "no", isSpread ? "yes" : "no");
}
ICCallScriptedCompiler compiler(cx, typeMonitorFallback->firstMonitorStub(),
fun, templateObject,
constructing, isSpread, script->pcToOffset(pc));
@ -2649,7 +2656,7 @@ ICCallStubCompiler::guardFunApply(MacroAssembler& masm, AllocatableGeneralRegist
// Ensure no holes. Loop through values in array and make sure none are magic.
// Start address is secondArgObj, end address is secondArgObj + (lenReg * sizeof(Value))
JS_STATIC_ASSERT(sizeof(Value) == 8);
static_assert(sizeof(Value) == 8, "shift by 3 below assumes Value is 8 bytes");
masm.lshiftPtr(Imm32(3), lenReg);
masm.addPtr(secondArgObj, lenReg);
@ -2696,7 +2703,7 @@ ICCallStubCompiler::guardFunApply(MacroAssembler& masm, AllocatableGeneralRegist
failure);
Register temp = regs.takeAny();
masm.branchIfFunctionHasNoScript(target, failure);
masm.branchIfFunctionHasNoJitEntry(target, /* constructing */ false, failure);
masm.branchFunctionKind(Assembler::Equal, JSFunction::ClassConstructor, callee, temp, failure);
regs.add(temp);
return target;
@ -2937,7 +2944,7 @@ ICCallScriptedCompiler::generateStubCode(MacroAssembler& masm)
masm.branchPtr(Assembler::NotEqual, expectedCallee, callee, &failure);
// Guard against relazification.
masm.branchIfFunctionHasNoScript(callee, &failure);
masm.branchIfFunctionHasNoJitEntry(callee, isConstructing_, &failure);
} else {
// Ensure the object is a function.
masm.branchTestObjClass(Assembler::NotEqual, callee, regs.getAny(), &JSFunction::class_,
@ -2945,7 +2952,7 @@ ICCallScriptedCompiler::generateStubCode(MacroAssembler& masm)
if (isConstructing_) {
masm.branchIfNotInterpretedConstructor(callee, regs.getAny(), &failure);
} else {
masm.branchIfFunctionHasNoScript(callee, &failure);
masm.branchIfFunctionHasNoJitEntry(callee, /* constructing */ false, &failure);
masm.branchFunctionKind(Assembler::Equal, JSFunction::ClassConstructor, callee,
regs.getAny(), &failure);
}
@ -3570,7 +3577,7 @@ ICCall_ScriptedApplyArray::Compiler::generateStubCode(MacroAssembler& masm)
masm.bind(&noUnderflow);
regs.add(argcReg);
// Do call
// Do call.
masm.callJit(target);
leaveStubFrame(masm, true);
@ -3698,7 +3705,7 @@ ICCall_ScriptedFunCall::Compiler::generateStubCode(MacroAssembler& masm)
masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrEnv()), callee);
masm.branchPtr(Assembler::NotEqual, callee, ImmPtr(fun_call), &failure);
// Ensure |this| is a scripted function with JIT code.
// Ensure |this| is a function with a jit entry.
BaseIndex thisSlot(masm.getStackPointer(), argcReg, TimesEight, ICStackValueOffset);
masm.loadValue(thisSlot, R1);
@ -3707,7 +3714,7 @@ ICCall_ScriptedFunCall::Compiler::generateStubCode(MacroAssembler& masm)
masm.branchTestObjClass(Assembler::NotEqual, callee, regs.getAny(), &JSFunction::class_,
&failure);
masm.branchIfFunctionHasNoScript(callee, &failure);
masm.branchIfFunctionHasNoJitEntry(callee, /* constructing */ false, &failure);
masm.branchFunctionKind(Assembler::Equal, JSFunction::ClassConstructor,
callee, regs.getAny(), &failure);

View File

@ -339,7 +339,7 @@ IsCacheableGetPropCallNative(JSObject* obj, JSObject* holder, Shape* shape)
return false;
JSFunction& getter = shape->getterValue().toObject().as<JSFunction>();
if (!getter.isNative())
if (!getter.isNativeWithCppEntry())
return false;
if (getter.isClassConstructor())
@ -374,9 +374,13 @@ IsCacheableGetPropCallScripted(JSObject* obj, JSObject* holder, Shape* shape,
return false;
JSFunction& getter = shape->getterValue().toObject().as<JSFunction>();
if (getter.isNative())
if (getter.isNativeWithCppEntry())
return false;
// Natives with jit entry can use the scripted path.
if (getter.isNativeWithJitEntry())
return true;
if (!getter.hasScript()) {
if (isTemporarilyUnoptimizable)
*isTemporarilyUnoptimizable = true;
@ -684,7 +688,7 @@ EmitCallGetterResultNoGuards(CacheIRWriter& writer, JSObject* obj, JSObject* hol
{
if (IsCacheableGetPropCallNative(obj, holder, shape)) {
JSFunction* target = &shape->getterValue().toObject().as<JSFunction>();
MOZ_ASSERT(target->isNative());
MOZ_ASSERT(target->isNativeWithCppEntry());
writer.callNativeGetterResult(receiverId, target);
writer.typeMonitorResult();
return;
@ -693,7 +697,7 @@ EmitCallGetterResultNoGuards(CacheIRWriter& writer, JSObject* obj, JSObject* hol
MOZ_ASSERT(IsCacheableGetPropCallScripted(obj, holder, shape));
JSFunction* target = &shape->getterValue().toObject().as<JSFunction>();
MOZ_ASSERT(target->hasScript());
MOZ_ASSERT(target->hasJitEntry());
writer.callScriptedGetterResult(receiverId, target);
writer.typeMonitorResult();
}
@ -3208,7 +3212,7 @@ IsCacheableSetPropCallNative(JSObject* obj, JSObject* holder, Shape* shape)
return false;
JSFunction& setter = shape->setterObject()->as<JSFunction>();
if (!setter.isNative())
if (!setter.isNativeWithCppEntry())
return false;
if (setter.isClassConstructor())
@ -3237,9 +3241,13 @@ IsCacheableSetPropCallScripted(JSObject* obj, JSObject* holder, Shape* shape,
return false;
JSFunction& setter = shape->setterObject()->as<JSFunction>();
if (setter.isNative())
if (setter.isNativeWithCppEntry())
return false;
// Natives with jit entry can use the scripted path.
if (setter.isNativeWithJitEntry())
return true;
if (!setter.hasScript()) {
if (isTemporarilyUnoptimizable)
*isTemporarilyUnoptimizable = true;
@ -3283,7 +3291,7 @@ EmitCallSetterNoGuards(CacheIRWriter& writer, JSObject* obj, JSObject* holder,
{
if (IsCacheableSetPropCallNative(obj, holder, shape)) {
JSFunction* target = &shape->setterValue().toObject().as<JSFunction>();
MOZ_ASSERT(target->isNative());
MOZ_ASSERT(target->isNativeWithCppEntry());
writer.callNativeSetter(objId, target, rhsId);
writer.returnFromIC();
return;
@ -3292,7 +3300,7 @@ EmitCallSetterNoGuards(CacheIRWriter& writer, JSObject* obj, JSObject* holder,
MOZ_ASSERT(IsCacheableSetPropCallScripted(obj, holder, shape));
JSFunction* target = &shape->setterValue().toObject().as<JSFunction>();
MOZ_ASSERT(target->hasScript());
MOZ_ASSERT(target->hasJitEntry());
writer.callScriptedSetter(objId, target, rhsId);
writer.returnFromIC();
}

View File

@ -4035,7 +4035,7 @@ CodeGenerator::visitCallNative(LCallNative* call)
{
WrappedFunction* target = call->getSingleTarget();
MOZ_ASSERT(target);
MOZ_ASSERT(target->isNative());
MOZ_ASSERT(target->isNativeWithCppEntry());
int callargslot = call->argslot();
int unusedStack = StackOffsetOfPassedArg(callargslot);
@ -4060,8 +4060,9 @@ CodeGenerator::visitCallNative(LCallNative* call)
// Allocate space for the outparam, moving the StackPointer to what will be &vp[1].
masm.adjustStack(unusedStack);
// Push a Value containing the callee object: natives are allowed to access their callee before
// setitng the return value. The StackPointer is moved to &vp[0].
// Push a Value containing the callee object: natives are allowed to access
// their callee before setting the return value. The StackPointer is moved
// to &vp[0].
masm.Push(ObjectValue(*target->rawJSFunction()));
// Preload arguments into registers.
@ -4310,16 +4311,17 @@ CodeGenerator::visitCallGeneric(LCallGeneric* call)
// Guard that calleereg is actually a function object.
masm.branchTestObjClass(Assembler::NotEqual, calleereg, nargsreg, &JSFunction::class_, &invoke);
// Guard that calleereg is an interpreted function with a JSScript.
// Guard that calleereg is an interpreted function with a JSScript or a
// wasm function.
// If we are constructing, also ensure the callee is a constructor.
if (call->mir()->isConstructing()) {
masm.branchIfNotInterpretedConstructor(calleereg, nargsreg, &invoke);
} else {
masm.branchIfFunctionHasNoScript(calleereg, &invoke);
masm.branchFunctionKind(Assembler::Equal, JSFunction::ClassConstructor, calleereg, objreg, &invoke);
masm.branchIfFunctionHasNoJitEntry(calleereg, /* isConstructing */ false, &invoke);
masm.branchFunctionKind(Assembler::Equal, JSFunction::ClassConstructor, calleereg, objreg,
&invoke);
}
// Knowing that calleereg is a non-native function, load the jit code.
masm.loadJitCodeRaw(calleereg, objreg);
// Nestle the StackPointer up to the argument vector.
@ -4404,10 +4406,9 @@ CodeGenerator::visitCallKnown(LCallKnown* call)
Register objreg = ToRegister(call->getTempObject());
uint32_t unusedStack = StackOffsetOfPassedArg(call->argslot());
WrappedFunction* target = call->getSingleTarget();
Label end, uncompiled;
// Native single targets are handled by LCallNative.
MOZ_ASSERT(!target->isNative());
// Native single targets (except wasm) are handled by LCallNative.
MOZ_ASSERT(!target->isNativeWithCppEntry());
// Missing arguments must have been explicitly appended by the IonBuilder.
DebugOnly<unsigned> numNonArgsOnStack = 1 + call->isConstructing();
MOZ_ASSERT(target->nargs() <= call->mir()->numStackArgs() - numNonArgsOnStack);
@ -4424,11 +4425,13 @@ CodeGenerator::visitCallKnown(LCallKnown* call)
MOZ_ASSERT_IF(target->isClassConstructor(), call->isConstructing());
// The calleereg is known to be a non-native function, but might point to
// a LazyScript instead of a JSScript.
masm.branchIfFunctionHasNoScript(calleereg, &uncompiled);
Label uncompiled;
if (!target->isNativeWithJitEntry()) {
// The calleereg is known to be a non-native function, but might point
// to a LazyScript instead of a JSScript.
masm.branchIfFunctionHasNoJitEntry(calleereg, call->isConstructing(), &uncompiled);
}
// Load non-native jitcode from the script.
if (call->mir()->needsArgCheck())
masm.loadJitCodeRaw(calleereg, objreg);
else
@ -4452,17 +4455,22 @@ CodeGenerator::visitCallKnown(LCallKnown* call)
// The return address has already been removed from the Ion frame.
int prefixGarbage = sizeof(JitFrameLayout) - sizeof(void*);
masm.adjustStack(prefixGarbage - unusedStack);
masm.jump(&end);
// Handle uncompiled functions.
masm.bind(&uncompiled);
if (call->isConstructing() && target->nargs() > call->numActualArgs())
emitCallInvokeFunctionShuffleNewTarget(call, calleereg, target->nargs(), unusedStack);
else
emitCallInvokeFunction(call, calleereg, call->isConstructing(), call->ignoresReturnValue(),
call->numActualArgs(), unusedStack);
if (uncompiled.used()) {
Label end;
masm.jump(&end);
masm.bind(&end);
// Handle uncompiled functions.
masm.bind(&uncompiled);
if (call->isConstructing() && target->nargs() > call->numActualArgs()) {
emitCallInvokeFunctionShuffleNewTarget(call, calleereg, target->nargs(), unusedStack);
} else {
emitCallInvokeFunction(call, calleereg, call->isConstructing(),
call->ignoresReturnValue(), call->numActualArgs(), unusedStack);
}
masm.bind(&end);
}
// If the return value of the constructing function is Primitive,
// replace the return value with the Object from CreateThis.
@ -4716,7 +4724,7 @@ CodeGenerator::emitApplyGeneric(T* apply)
masm.checkStackAlignment();
// If the function is native, only emit the call to InvokeFunction.
if (apply->hasSingleTarget() && apply->getSingleTarget()->isNative()) {
if (apply->hasSingleTarget() && apply->getSingleTarget()->isNativeWithCppEntry()) {
emitCallInvokeFunction(apply, extraStackSpace);
emitPopArguments(extraStackSpace);
return;
@ -4725,13 +4733,13 @@ CodeGenerator::emitApplyGeneric(T* apply)
Label end, invoke;
// Guard that calleereg is an interpreted function with a JSScript.
masm.branchIfFunctionHasNoScript(calleereg, &invoke);
masm.branchIfFunctionHasNoJitEntry(calleereg, /* constructing */ false, &invoke);
// Guard that calleereg is not a class constrcuctor
masm.branchFunctionKind(Assembler::Equal, JSFunction::ClassConstructor,
calleereg, objreg, &invoke);
// Knowing that calleereg is a non-native function, load script's jitcode.
// Knowing that calleereg is a non-native function, load jitcode.
masm.loadJitCodeRaw(calleereg, objreg);
// Call with an Ion frame or a rectifier frame.

View File

@ -2767,13 +2767,18 @@ InvalidateActivation(FreeOp* fop, const JitActivationIterator& activations, bool
for (OnlyJSJitFrameIter iter(activations); !iter.done(); ++iter, ++frameno) {
const JSJitFrameIter& frame = iter.frame();
MOZ_ASSERT_IF(frameno == 1, frame.isExitFrame() || frame.type() == JitFrame_Bailout);
MOZ_ASSERT_IF(frameno == 1, frame.isExitFrame() ||
frame.type() == JitFrame_Bailout ||
frame.type() == JitFrame_JSJitToWasm);
#ifdef JS_JITSPEW
switch (frame.type()) {
case JitFrame_Exit:
JitSpew(JitSpew_IonInvalidate, "#%zu exit frame @ %p", frameno, frame.fp());
break;
case JitFrame_JSJitToWasm:
JitSpew(JitSpew_IonInvalidate, "#%zu wasm exit frame @ %p", frameno, frame.fp());
break;
case JitFrame_BaselineJS:
case JitFrame_IonJS:
case JitFrame_Bailout:

View File

@ -5041,6 +5041,13 @@ IonBuilder::createThis(JSFunction* target, MDefinition* callee, MDefinition* new
if (!target->isConstructor())
return nullptr;
if (target->isNativeWithJitEntry()) {
// Do not bother inlining constructor calls to asm.js, since it is
// not used much in practice.
MOZ_ASSERT(target->isWasmOptimized());
return nullptr;
}
MConstant* magic = MConstant::New(alloc(), MagicValue(JS_IS_CONSTRUCTING));
current->add(magic);
return magic;
@ -5525,7 +5532,7 @@ IonBuilder::makeCallHelper(JSFunction* target, CallInfo& callInfo)
// Collect number of missing arguments provided that the target is
// scripted. Native functions are passed an explicit 'argc' parameter.
if (target && !target->isNative())
if (target && !target->isNativeWithCppEntry())
targetArgs = Max<uint32_t>(target->nargs(), callInfo.argc());
bool isDOMCall = false;
@ -5554,8 +5561,8 @@ IonBuilder::makeCallHelper(JSFunction* target, CallInfo& callInfo)
// Explicitly pad any missing arguments with |undefined|.
// This permits skipping the argumentsRectifier.
MOZ_ASSERT_IF(target && targetArgs > callInfo.argc(), !target->isNativeWithCppEntry());
for (int i = targetArgs; i > (int)callInfo.argc(); i--) {
MOZ_ASSERT_IF(target, !target->isNative());
MConstant* undef = constant(UndefinedValue());
if (!alloc().ensureBallast())
return abort(AbortReason::Alloc);

View File

@ -1092,9 +1092,10 @@ IonCacheIRCompiler::emitCallScriptedGetterResult()
// Check stack alignment. Add sizeof(uintptr_t) for the return address.
MOZ_ASSERT(((masm.framePushed() + sizeof(uintptr_t)) % JitStackAlignment) == 0);
// The getter currently has a non-lazy script. We will only relazify when
// we do a shrinking GC and when that happens we will also purge IC stubs.
MOZ_ASSERT(target->hasScript());
// The getter currently has a jit entry or a non-lazy script. We will only
// relazify when we do a shrinking GC and when that happens we will also
// purge IC stubs.
MOZ_ASSERT(target->hasJitEntry());
masm.loadJitCodeRaw(scratch, scratch);
masm.callJit(scratch);
masm.storeCallResultValue(output);
@ -2117,9 +2118,10 @@ IonCacheIRCompiler::emitCallScriptedSetter()
// Check stack alignment. Add sizeof(uintptr_t) for the return address.
MOZ_ASSERT(((masm.framePushed() + sizeof(uintptr_t)) % JitStackAlignment) == 0);
// The setter currently has a non-lazy script. We will only relazify when
// we do a shrinking GC and when that happens we will also purge IC stubs.
MOZ_ASSERT(target->hasScript());
// The setter currently has a jit entry or a non-lazy script. We will only
// relazify when we do a shrinking GC and when that happens we will also
// purge IC stubs.
MOZ_ASSERT(target->hasJitEntry());
masm.loadJitCodeRaw(scratch, scratch);
masm.callJit(scratch);

View File

@ -31,6 +31,18 @@ JSJitFrameIter::JSJitFrameIter(const JitActivation* activation)
}
}
JSJitFrameIter::JSJitFrameIter(const JitActivation* activation, uint8_t* fp)
: current_(fp),
type_(JitFrame_JSJitToWasm),
returnAddressToFp_(nullptr),
frameSize_(0),
cachedSafepointIndex_(nullptr),
activation_(activation)
{
MOZ_ASSERT(!activation_->bailoutData());
MOZ_ASSERT(!TlsContext.get()->inUnsafeCallWithABI);
}
bool
JSJitFrameIter::checkInvalidation() const
{
@ -371,6 +383,9 @@ JSJitFrameIter::dump() const
case JitFrame_Exit:
fprintf(stderr, " Exit frame\n");
break;
case JitFrame_JSJitToWasm:
fprintf(stderr, " Wasm exit frame\n");
break;
};
fputc('\n', stderr);
}
@ -450,8 +465,7 @@ JSJitFrameIter::verifyReturnAddressUsingNativeToBytecodeMap()
}
#endif // DEBUG
JSJitProfilingFrameIterator::JSJitProfilingFrameIterator(
JSContext* cx, const JS::ProfilingFrameIterator::RegisterState& state)
JSJitProfilingFrameIterator::JSJitProfilingFrameIterator(JSContext* cx, void* pc)
{
// If no profilingActivation is live, initialize directly to
// end-of-iteration state.
@ -478,22 +492,21 @@ JSJitProfilingFrameIterator::JSJitProfilingFrameIterator(
// Get the fp from the current profilingActivation
fp_ = (uint8_t*) act->lastProfilingFrame();
void* lastCallSite = act->lastProfilingCallSite();
JitcodeGlobalTable* table = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
// Profiler sampling must NOT be suppressed if we are here.
MOZ_ASSERT(cx->isProfilerSamplingEnabled());
// Try initializing with sampler pc
if (tryInitWithPC(state.pc))
if (tryInitWithPC(pc))
return;
// Try initializing with sampler pc using native=>bytecode table.
if (tryInitWithTable(table, state.pc, cx->runtime(), /* forLastCallSite = */ false))
JitcodeGlobalTable* table = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
if (tryInitWithTable(table, pc, cx->runtime(), /* forLastCallSite = */ false))
return;
// Try initializing with lastProfilingCallSite pc
void* lastCallSite = act->lastProfilingCallSite();
if (lastCallSite) {
if (tryInitWithPC(lastCallSite))
return;
@ -519,11 +532,9 @@ GetPreviousRawFrame(CommonFrameLayout* frame)
return ReturnType((uint8_t*)frame + prevSize);
}
JSJitProfilingFrameIterator::JSJitProfilingFrameIterator(void* exitFrame)
JSJitProfilingFrameIterator::JSJitProfilingFrameIterator(CommonFrameLayout* fp)
{
// Skip the exit frame.
ExitFrameLayout* frame = (ExitFrameLayout*) exitFrame;
moveToNextFrame(frame);
moveToNextFrame(fp);
}
bool

View File

@ -60,6 +60,11 @@ enum FrameType
// jits, used as a marker to interleave JS jit and wasm frames. From the
// point of view of JS JITs, this is just another kind of entry frame.
JitFrame_WasmToJSJit,
// A JS to wasm frame is constructed during fast calls from any JS jits to
// wasm, and is a special kind of exit frame that doesn't have the exit
// footer. From the point of view of the jit, it can be skipped as an exit.
JitFrame_JSJitToWasm,
};
enum ReadFrameArgsBehavior {
@ -113,6 +118,10 @@ class JSJitFrameIter
// See comment above the class.
explicit JSJitFrameIter(const JitActivation* activation);
// A constructor specialized for jit->wasm frames, which starts at a
// specific FP.
JSJitFrameIter(const JitActivation* activation, uint8_t* fp);
// Used only by DebugModeOSRVolatileJitFrameIter.
void exchangeReturnAddressIfMatch(uint8_t* oldAddr, uint8_t* newAddr) {
if (returnAddressToFp_ == oldAddr)
@ -301,9 +310,8 @@ class JSJitProfilingFrameIterator
void moveToNextFrame(CommonFrameLayout* frame);
public:
JSJitProfilingFrameIterator(JSContext* cx,
const JS::ProfilingFrameIterator::RegisterState& state);
explicit JSJitProfilingFrameIterator(void* exitFrame);
JSJitProfilingFrameIterator(JSContext* cx, void* pc);
explicit JSJitProfilingFrameIterator(CommonFrameLayout* exitFP);
void operator++();
bool done() const { return fp_ == nullptr; }

View File

@ -821,11 +821,12 @@ ReadAllocation(const JSJitFrameIter& frame, const LAllocation* a)
static void
TraceThisAndArguments(JSTracer* trc, const JSJitFrameIter& frame, JitFrameLayout* layout)
{
// Trace |this| and any extra actual arguments for an Ion frame. Tracinging
// Trace |this| and any extra actual arguments for an Ion frame. Tracing
// of formal arguments is taken care of by the frame's safepoint/snapshot,
// except when the script might have lazy arguments or rest, in which case
// we trace them as well. We also have to trace formals if we have a
// LazyLink frame or an InterpreterStub frame.
// LazyLink frame or an InterpreterStub frame or a special JSJit to wasm
// frame (since wasm doesn't use snapshots).
if (!CalleeTokenIsFunction(layout->calleeToken()))
return;
@ -834,7 +835,8 @@ TraceThisAndArguments(JSTracer* trc, const JSJitFrameIter& frame, JitFrameLayout
size_t nformals = 0;
JSFunction* fun = CalleeTokenToFunction(layout->calleeToken());
if (!frame.isExitFrameLayout<LazyLinkExitFrameLayout>() &&
if (frame.type() != JitFrame_JSJitToWasm &&
!frame.isExitFrameLayout<LazyLinkExitFrameLayout>() &&
!frame.isExitFrameLayout<InterpreterStubExitFrameLayout>() &&
!fun->nonLazyScript()->mayReadFrameArgsDirectly())
{
@ -1239,6 +1241,16 @@ TraceRectifierFrame(JSTracer* trc, const JSJitFrameIter& frame)
TraceRoot(trc, &layout->argv()[0], "ion-thisv");
}
static void
TraceJSJitToWasmFrame(JSTracer* trc, const JSJitFrameIter& frame)
{
// This is doing a subset of TraceIonJSFrame, since the callee doesn't
// have a script.
JitFrameLayout* layout = (JitFrameLayout*)frame.fp();
layout->replaceCalleeToken(TraceCalleeToken(trc, layout->calleeToken()));
TraceThisAndArguments(trc, frame, layout);
}
static void
TraceJitActivation(JSTracer* trc, JitActivation* activation)
{
@ -1280,8 +1292,12 @@ TraceJitActivation(JSTracer* trc, JitActivation* activation)
TraceIonICCallFrame(trc, jitFrame);
break;
case JitFrame_WasmToJSJit:
// Ignore: this is a marked used to let the JitFrameIter the
// frame above is a wasm frame, handled in the next iteration.
// Ignore: this is a special marker used to let the
// JitFrameIter know the frame above is a wasm frame, handled
// in the next iteration.
break;
case JitFrame_JSJitToWasm:
TraceJSJitToWasmFrame(trc, jitFrame);
break;
default:
MOZ_CRASH("unexpected frame type");

View File

@ -477,6 +477,7 @@ enum class ExitFrameType : uint8_t
IonDOMMethod = 0x4,
IonOOLNative = 0x5,
IonOOLProxy = 0x6,
WasmJitEntry = 0x7,
InterpreterStub = 0xFC,
VMFunction = 0xFD,
LazyLink = 0xFE,

View File

@ -165,7 +165,7 @@ void*
JitcodeGlobalEntry::BaselineEntry::canonicalNativeAddrFor(JSRuntime* rt, void* ptr) const
{
// TODO: We can't yet normalize Baseline addresses until we unify
// BaselineScript's PCMappingEntries with JitcodeGlobalMap.
// BaselineScript's PCMappingEntries with JitcodeGlobalTable.
return ptr;
}

View File

@ -484,9 +484,11 @@ LDefinition::dump() const
void
LNode::printOperands(GenericPrinter& out)
{
for (size_t i = 0, e = numOperands(); i < e; i++) {
size_t numOperands = isPhi() ? toPhi()->numOperands() : toInstruction()->numOperands();
for (size_t i = 0; i < numOperands; i++) {
out.printf(" (%s)", getOperand(i)->toString().get());
if (i != numOperands() - 1)
if (i != numOperands - 1)
out.printf(",");
}
}

View File

@ -666,18 +666,24 @@ class LNode
protected:
// Bitfields below are all uint32_t to make sure MSVC packs them correctly.
uint32_t isCall_ : 1;
// LPhi::numOperands() may not fit in this bitfield, so we only use this
// field for LInstruction.
uint32_t nonPhiNumOperands_ : 6;
uint32_t numDefs_ : 4;
uint32_t numTemps_ : 4;
public:
LNode(uint32_t numDefs, uint32_t numTemps)
LNode(uint32_t nonPhiNumOperands, uint32_t numDefs, uint32_t numTemps)
: mir_(nullptr),
block_(nullptr),
id_(0),
isCall_(false),
nonPhiNumOperands_(nonPhiNumOperands),
numDefs_(numDefs),
numTemps_(numTemps)
{
MOZ_ASSERT(nonPhiNumOperands_ == nonPhiNumOperands,
"nonPhiNumOperands must fit in bitfield");
MOZ_ASSERT(numDefs_ == numDefs, "numDefs must fit in bitfield");
MOZ_ASSERT(numTemps_ == numTemps, "numTemps must fit in bitfield");
}
@ -723,7 +729,6 @@ class LNode
virtual void setDef(size_t index, const LDefinition& def) = 0;
// Returns information about operands.
virtual size_t numOperands() const = 0;
virtual LAllocation* getOperand(size_t index) = 0;
virtual void setOperand(size_t index, const LAllocation& a) = 0;
@ -748,9 +753,7 @@ class LNode
// Does this call preserve the given register?
// By default, it is assumed that all registers are clobbered by a call.
virtual bool isCallPreserved(AnyRegister reg) const {
return false;
}
inline bool isCallPreserved(AnyRegister reg) const;
uint32_t id() const {
return id_;
@ -776,9 +779,7 @@ class LNode
// For an instruction which has a MUST_REUSE_INPUT output, whether that
// output register will be restored to its original value when bailing out.
virtual bool recoversInput() const {
return false;
}
inline bool recoversInput() const;
virtual void dump(GenericPrinter& out);
void dump();
@ -827,8 +828,8 @@ class LInstruction
LMoveGroup* movesAfter_;
protected:
LInstruction(uint32_t numDefs, uint32_t numTemps)
: LNode(numDefs, numTemps),
LInstruction(uint32_t numOperands, uint32_t numDefs, uint32_t numTemps)
: LNode(numOperands, numDefs, numTemps),
snapshot_(nullptr),
safepoint_(nullptr),
inputMoves_(nullptr),
@ -865,6 +866,9 @@ class LInstruction
void setMovesAfter(LMoveGroup* moves) {
movesAfter_ = moves;
}
uint32_t numOperands() const {
return nonPhiNumOperands_;
}
void assignSnapshot(LSnapshot* snapshot);
void initSafepoint(TempAllocator& alloc);
@ -937,7 +941,9 @@ class LPhi final : public LNode
LIR_HEADER(Phi)
LPhi(MPhi* ins, LAllocation* inputs)
: LNode(/* numDefs = */ 1, /* numTemps = */ 0),
: LNode(/* nonPhiNumOperands = */ 0,
/* numDefs = */ 1,
/* numTemps = */ 0),
inputs_(inputs)
{
setMir(ins);
@ -951,7 +957,7 @@ class LPhi final : public LNode
MOZ_ASSERT(index == 0);
def_ = def;
}
size_t numOperands() const override {
size_t numOperands() const {
return mir_->toPhi()->numOperands();
}
LAllocation* getOperand(size_t index) override {
@ -1084,8 +1090,8 @@ namespace details {
mozilla::Array<LDefinition, Temps> temps_;
protected:
LInstructionFixedDefsTempsHelper()
: LInstruction(Defs, Temps)
explicit LInstructionFixedDefsTempsHelper(uint32_t numOperands)
: LInstruction(numOperands, Defs, Temps)
{}
public:
@ -1139,10 +1145,12 @@ class LInstructionHelper : public details::LInstructionFixedDefsTempsHelper<Defs
{
mozilla::Array<LAllocation, Operands> operands_;
protected:
LInstructionHelper()
: details::LInstructionFixedDefsTempsHelper<Defs, Temps>(Operands)
{}
public:
size_t numOperands() const final override {
return Operands;
}
LAllocation* getOperand(size_t index) final override {
return &operands_[index];
}
@ -1180,12 +1188,14 @@ class LVariadicInstruction : public details::LInstructionFixedDefsTempsHelper<De
{
FixedList<LAllocation> operands_;
protected:
explicit LVariadicInstruction(size_t numOperands)
: details::LInstructionFixedDefsTempsHelper<Defs, Temps>(numOperands)
{}
public:
MOZ_MUST_USE bool init(TempAllocator& alloc, size_t length) {
return operands_.init(alloc, length);
}
size_t numOperands() const final override {
return operands_.length();
MOZ_MUST_USE bool init(TempAllocator& alloc) {
return operands_.init(alloc, this->nonPhiNumOperands_);
}
LAllocation* getOperand(size_t index) final override {
return &operands_[index];

View File

@ -536,7 +536,7 @@ LIRGenerator::visitCall(MCall* call)
tempFixed(privReg), tempFixed(argsReg));
} else if (target) {
// Call known functions.
if (target->isNative()) {
if (target->isNativeWithCppEntry()) {
Register cxReg, numReg, vpReg, tmpReg;
GetTempRegForIntArg(0, 0, &cxReg);
GetTempRegForIntArg(1, 0, &numReg);
@ -3293,16 +3293,18 @@ LIRGenerator::visitLoadElementFromState(MLoadElementFromState* ins)
#ifdef JS_NUNBOX32
temp1 = temp();
#endif
LLoadElementFromStateV* lir = new(alloc()) LLoadElementFromStateV(temp(), temp1, tempDouble());
MOZ_ASSERT(ins->array()->isArgumentState(),
"LIRGenerator::visitLoadElementFromState: Unsupported state object");
MArgumentState* array = ins->array()->toArgumentState();
size_t numOperands = 1 + BOX_PIECES * array->numElements();
LLoadElementFromStateV* lir = new(alloc()) LLoadElementFromStateV(temp(), temp1, tempDouble(),
numOperands);
// 1 -- for the index as a register
// BOX_PIECES * array->numElements() -- for using as operand all the
// elements of the inlined array.
size_t numOperands = 1 + BOX_PIECES * array->numElements();
if (!lir->init(alloc(), numOperands)) {
if (!lir->init(alloc())) {
abort(AbortReason::Alloc, "OOM: LIRGenerator::visitLoadElementFromState");
return;
}

View File

@ -2041,6 +2041,7 @@ WrappedFunction::WrappedFunction(JSFunction* fun)
: fun_(fun),
nargs_(fun->nargs()),
isNative_(fun->isNative()),
isNativeWithJitEntry_(fun->isNativeWithJitEntry()),
isConstructor_(fun->isConstructor()),
isClassConstructor_(fun->isClassConstructor()),
isSelfHostedBuiltin_(fun->isSelfHostedBuiltin())

View File

@ -4158,6 +4158,7 @@ class WrappedFunction : public TempObject
CompilerFunction fun_;
uint16_t nargs_;
bool isNative_ : 1;
bool isNativeWithJitEntry_ : 1;
bool isConstructor_ : 1;
bool isClassConstructor_ : 1;
bool isSelfHostedBuiltin_ : 1;
@ -4165,7 +4166,11 @@ class WrappedFunction : public TempObject
public:
explicit WrappedFunction(JSFunction* fun);
size_t nargs() const { return nargs_; }
bool isNative() const { return isNative_; }
bool isNativeWithJitEntry() const { return isNativeWithJitEntry_; }
bool isNativeWithCppEntry() const { return isNative() && !isNativeWithJitEntry(); }
bool isConstructor() const { return isConstructor_; }
bool isClassConstructor() const { return isClassConstructor_; }
bool isSelfHostedBuiltin() const { return isSelfHostedBuiltin_; }

View File

@ -434,16 +434,21 @@ MacroAssembler::branchTwoByteString(Register string, Label* label)
}
void
MacroAssembler::branchIfFunctionHasNoScript(Register fun, Label* label)
MacroAssembler::branchIfFunctionHasNoJitEntry(Register fun, bool isConstructing, Label* label)
{
// 16-bit loads are slow and unaligned 32-bit loads may be too so
// perform an aligned 32-bit load and adjust the bitmask accordingly.
static_assert(JSFunction::offsetOfNargs() % sizeof(uint32_t) == 0,
"The code in this function and the ones below must change");
static_assert(JSFunction::offsetOfFlags() == JSFunction::offsetOfNargs() + 2,
"The code in this function and the ones below must change");
Address address(fun, JSFunction::offsetOfNargs());
int32_t bit = IMM32_16ADJ(JSFunction::INTERPRETED);
int32_t bit = JSFunction::INTERPRETED;
if (!isConstructing)
bit |= JSFunction::WASM_OPTIMIZED;
bit = IMM32_16ADJ(bit);
branchTest32(Assembler::Zero, address, Imm32(bit), label);
}
@ -688,6 +693,12 @@ MacroAssembler::storeDouble(FloatRegister src, const T& dest)
template void MacroAssembler::storeDouble(FloatRegister src, const Address& dest);
template void MacroAssembler::storeDouble(FloatRegister src, const BaseIndex& dest);
void
MacroAssembler::boxDouble(FloatRegister src, const Address& dest)
{
storeDouble(src, dest);
}
template<class T> void
MacroAssembler::storeFloat32(FloatRegister src, const T& dest)
{

View File

@ -201,9 +201,9 @@ using mozilla::FloatingPoint;
# define OOL_IN_HEADER
#if MOZ_LITTLE_ENDIAN
#define IMM32_16ADJ(X) X << 16
#define IMM32_16ADJ(X) (X) << 16
#else
#define IMM32_16ADJ(X) X
#define IMM32_16ADJ(X) (X)
#endif
namespace js {
@ -1178,7 +1178,7 @@ class MacroAssembler : public MacroAssemblerSpecific
inline void branchLatin1String(Register string, Label* label);
inline void branchTwoByteString(Register string, Label* label);
inline void branchIfFunctionHasNoScript(Register fun, Label* label);
inline void branchIfFunctionHasNoJitEntry(Register fun, bool isConstructing, Label* label);
inline void branchIfInterpreted(Register fun, Label* label);
inline void branchFunctionKind(Condition cond, JSFunction::FunctionKind kind, Register fun,
@ -1415,6 +1415,9 @@ class MacroAssembler : public MacroAssemblerSpecific
template<class T>
inline void storeDouble(FloatRegister src, const T& dest);
inline void boxDouble(FloatRegister src, const Address& dest);
using MacroAssemblerSpecific::boxDouble;
inline void storeUncanonicalizedFloat32(FloatRegister src, const Address& dest)
DEFINED_ON(x86_shared, arm, arm64, mips32, mips64);
inline void storeUncanonicalizedFloat32(FloatRegister src, const BaseIndex& dest)

View File

@ -120,6 +120,10 @@ static constexpr Register ABINonArgReg0 = r4;
static constexpr Register ABINonArgReg1 = r5;
static constexpr Register ABINonArgReg2 = r6;
// This register may be volatile or nonvolatile. Avoid d15 which is the
// ScratchDoubleReg.
static constexpr FloatRegister ABINonArgDoubleReg { FloatRegisters::d8, VFPRegister::Double };
// These registers may be volatile or nonvolatile.
// Note: these three registers are all guaranteed to be different
static constexpr Register ABINonArgReturnReg0 = r4;

View File

@ -3043,8 +3043,7 @@ void
MacroAssemblerARMCompat::unboxDouble(const ValueOperand& operand, FloatRegister dest)
{
MOZ_ASSERT(dest.isDouble());
as_vxfer(operand.payloadReg(), operand.typeReg(),
VFPRegister(dest), CoreToFloat);
as_vxfer(operand.payloadReg(), operand.typeReg(), VFPRegister(dest), CoreToFloat);
}
void

View File

@ -1580,7 +1580,7 @@ Simulator::exclusiveMonitorClear()
exclusiveMonitorHeld_ = false;
}
void
bool
Simulator::startWasmInterrupt(JitActivation* activation)
{
JS::ProfilingFrameIterator::RegisterState state;
@ -1588,7 +1588,7 @@ Simulator::startWasmInterrupt(JitActivation* activation)
state.fp = (void*) get_register(fp);
state.sp = (void*) get_register(sp);
state.lr = (void*) get_register(lr);
activation->startWasmInterrupt(state);
return activation->startWasmInterrupt(state);
}
// The signal handler only redirects the PC to the interrupt stub when the PC is
@ -1603,17 +1603,14 @@ Simulator::handleWasmInterrupt()
return;
uint8_t* pc = (uint8_t*)get_pc();
uint8_t* fp = (uint8_t*)get_register(r11);
const wasm::CodeSegment* cs = nullptr;
if (!wasm::InInterruptibleCode(cx_, pc, &cs))
return;
// fp can be null during the prologue/epilogue of the entry function.
if (!fp)
if (!startWasmInterrupt(cx_->activation()->asJit()))
return;
startWasmInterrupt(cx_->activation()->asJit());
set_pc(int32_t(cs->interruptCode()));
}
@ -1653,7 +1650,7 @@ Simulator::handleWasmSegFault(int32_t addr, unsigned numBytes)
const wasm::MemoryAccess* memoryAccess = instance->code().lookupMemoryAccess(pc);
if (!memoryAccess) {
startWasmInterrupt(act);
MOZ_ALWAYS_TRUE(startWasmInterrupt(act));
if (!instance->code().containsCodePC(pc))
MOZ_CRASH("Cannot map PC to trap handler");
set_pc(int32_t(segment->outOfBoundsCode()));

View File

@ -293,7 +293,7 @@ class Simulator
// Handle a wasm interrupt triggered by an async signal handler.
void handleWasmInterrupt();
void startWasmInterrupt(JitActivation* act);
bool startWasmInterrupt(JitActivation* act);
// Handle any wasm faults, returning true if the fault was handled.
bool handleWasmSegFault(int32_t addr, unsigned numBytes);

View File

@ -457,6 +457,10 @@ static constexpr Register ABINonArgReg0 = r8;
static constexpr Register ABINonArgReg1 = r9;
static constexpr Register ABINonArgReg2 = r10;
// This register may be volatile or nonvolatile. Avoid d31 which is the
// ScratchDoubleReg.
static constexpr FloatRegister ABINonArgDoubleReg = { FloatRegisters::s16, FloatRegisters::Single };
// These registers may be volatile or nonvolatile.
// Note: these three registers are all guaranteed to be different
static constexpr Register ABINonArgReturnReg0 = r8;

View File

@ -248,16 +248,14 @@ void Simulator::handle_wasm_interrupt() {
if (!js::wasm::InInterruptibleCode(cx_, pc, &cs))
return;
// fp can be null during the prologue/epilogue of the entry function.
if (!fp)
return;
JS::ProfilingFrameIterator::RegisterState state;
state.pc = pc;
state.fp = fp;
state.lr = (uint8_t*) xreg(30);
state.sp = (uint8_t*) xreg(31);
cx_->activation_->asJit()->startWasmInterrupt(state);
if (!cx_->activation_->asJit()->startWasmInterrupt(state))
return;
set_pc((Instruction*)cs->interruptCode());
}

View File

@ -1649,10 +1649,6 @@ Simulator::handleWasmInterrupt()
if (!segment || !segment->containsCodePC(pc))
return;
// fp can be null during the prologue/epilogue of the entry function.
if (!fp)
return;
startInterrupt(activation);
set_pc(int32_t(segment->interruptCode()));
}

View File

@ -78,10 +78,13 @@ static constexpr Register64 ReturnReg64(InvalidReg);
static constexpr Register ABINonArgReg0 { Registers::invalid_reg };
static constexpr Register ABINonArgReg1 { Registers::invalid_reg };
static constexpr Register ABINonArgReg2 { Registers::invalid_reg };
static constexpr Register ABINonArgReturnReg0 { Registers::invalid_reg };
static constexpr Register ABINonArgReturnReg1 { Registers::invalid_reg };
static constexpr Register ABINonArgReturnVolatileReg { Registers::invalid_reg };
static constexpr FloatRegister ABINonArgDoubleReg = { FloatRegisters::invalid_reg };
static constexpr Register WasmTableCallScratchReg { Registers::invalid_reg };
static constexpr Register WasmTableCallSigReg { Registers::invalid_reg };
static constexpr Register WasmTableCallIndexReg { Registers::invalid_reg };
@ -320,6 +323,7 @@ class MacroAssemblerNone : public Assembler
template <typename T> void unboxSymbol(T, Register) { MOZ_CRASH(); }
template <typename T> void unboxObject(T, Register) { MOZ_CRASH(); }
template <typename T> void unboxDouble(T, FloatRegister) { MOZ_CRASH(); }
template <typename T> void unboxPrivate(T, Register) { MOZ_CRASH(); }
void unboxValue(const ValueOperand&, AnyRegister, JSValueType) { MOZ_CRASH(); }
void unboxNonDouble(const ValueOperand&, Register, JSValueType) { MOZ_CRASH();}
void unboxNonDouble(const Address&, Register, JSValueType) { MOZ_CRASH();}

View File

@ -405,7 +405,9 @@ class LSimdSwizzleF : public LSimdSwizzleBase
class LSimdGeneralShuffleBase : public LVariadicInstruction<1, 1>
{
public:
explicit LSimdGeneralShuffleBase(const LDefinition& temp) {
LSimdGeneralShuffleBase(const LDefinition& temp, uint32_t numOperands)
: LVariadicInstruction<1, 1>(numOperands)
{
setTemp(0, temp);
}
const LAllocation* vector(unsigned i) {
@ -428,8 +430,9 @@ class LSimdGeneralShuffleI : public LSimdGeneralShuffleBase
{
public:
LIR_HEADER(SimdGeneralShuffleI);
explicit LSimdGeneralShuffleI(const LDefinition& temp)
: LSimdGeneralShuffleBase(temp)
LSimdGeneralShuffleI(const LDefinition& temp, uint32_t numOperands)
: LSimdGeneralShuffleBase(temp, numOperands)
{}
};
@ -437,8 +440,9 @@ class LSimdGeneralShuffleF : public LSimdGeneralShuffleBase
{
public:
LIR_HEADER(SimdGeneralShuffleF);
explicit LSimdGeneralShuffleF(const LDefinition& temp)
: LSimdGeneralShuffleBase(temp)
LSimdGeneralShuffleF(const LDefinition& temp, uint32_t numOperands)
: LSimdGeneralShuffleBase(temp, numOperands)
{}
};
@ -3938,7 +3942,7 @@ class LAddI : public LBinaryMath<0>
return snapshot() ? "OverflowCheck" : nullptr;
}
virtual bool recoversInput() const override {
bool recoversInput() const {
return recoversInput_;
}
void setRecoversInput() {
@ -3975,7 +3979,7 @@ class LSubI : public LBinaryMath<0>
return snapshot() ? "OverflowCheck" : nullptr;
}
virtual bool recoversInput() const override {
bool recoversInput() const {
return recoversInput_;
}
void setRecoversInput() {
@ -3986,6 +3990,19 @@ class LSubI : public LBinaryMath<0>
}
};
inline bool
LNode::recoversInput() const
{
switch (op()) {
case LOp_AddI:
return toAddI()->recoversInput();
case LOp_SubI:
return toSubI()->recoversInput();
default:
return false;
}
}
class LSubI64 : public LInstructionHelper<INT64_PIECES, 2 * INT64_PIECES, 0>
{
public:
@ -5824,7 +5841,8 @@ class LLoadElementFromStateV : public LVariadicInstruction<BOX_PIECES, 3>
LIR_HEADER(LoadElementFromStateV)
LLoadElementFromStateV(const LDefinition& temp0, const LDefinition& temp1,
const LDefinition& tempD)
const LDefinition& tempD, uint32_t numOperands)
: LVariadicInstruction<BOX_PIECES, 3>(numOperands)
{
setTemp(0, temp0);
setTemp(1, temp1);
@ -8995,16 +9013,14 @@ class LWasmStackArgI64 : public LInstructionHelper<0, INT64_PIECES, 0>
class LWasmCallBase : public LInstruction
{
LAllocation* operands_;
uint32_t numOperands_;
uint32_t needsBoundsCheck_;
public:
LWasmCallBase(LAllocation* operands, uint32_t numOperands, uint32_t numDefs,
bool needsBoundsCheck)
: LInstruction(numDefs, /* numTemps = */ 0),
: LInstruction(numOperands, numDefs, /* numTemps = */ 0),
operands_(operands),
numOperands_(numOperands),
needsBoundsCheck_(needsBoundsCheck)
{
setIsCall();
@ -9014,7 +9030,7 @@ class LWasmCallBase : public LInstruction
return mir_->toWasmCall();
}
bool isCallPreserved(AnyRegister reg) const override {
static bool isCallPreserved(AnyRegister reg) {
// All MWasmCalls preserve the TLS register:
// - internal/indirect calls do by the internal wasm ABI
// - import calls do by explicitly saving/restoring at the callsite
@ -9024,15 +9040,12 @@ class LWasmCallBase : public LInstruction
}
// LInstruction interface
size_t numOperands() const override {
return numOperands_;
}
LAllocation* getOperand(size_t index) override {
MOZ_ASSERT(index < numOperands_);
MOZ_ASSERT(index < numOperands());
return &operands_[index];
}
void setOperand(size_t index, const LAllocation& a) override {
MOZ_ASSERT(index < numOperands_);
MOZ_ASSERT(index < numOperands());
operands_[index] = a;
}
LDefinition* getTemp(size_t index) override {
@ -9104,6 +9117,18 @@ class LWasmCallI64 : public LWasmCallBase
}
};
inline bool
LNode::isCallPreserved(AnyRegister reg) const
{
switch (op()) {
case LOp_WasmCallI64:
case LOp_WasmCall:
return LWasmCallBase::isCallPreserved(reg);
default:
return false;
}
}
class LAssertRangeI : public LInstructionHelper<0, 1, 0>
{
public:

View File

@ -195,6 +195,10 @@ static constexpr Register ABINonArgReg0 = rax;
static constexpr Register ABINonArgReg1 = rbx;
static constexpr Register ABINonArgReg2 = r10;
// This register may be volatile or nonvolatile. Avoid xmm15 which is the
// ScratchDoubleReg.
static constexpr FloatRegister ABINonArgDoubleReg = FloatRegister(X86Encoding::xmm8, FloatRegisters::Double);
// These registers may be volatile or nonvolatile.
// Note: these three registers are all guaranteed to be different
static constexpr Register ABINonArgReturnReg0 = r10;

View File

@ -942,6 +942,8 @@ LIRGeneratorX86Shared::visitSimdGeneralShuffle(MSimdGeneralShuffle* ins)
{
MOZ_ASSERT(IsSimdType(ins->type()));
size_t numOperands = ins->numVectors() + ins->numLanes();
LSimdGeneralShuffleBase* lir;
if (IsIntegerSimdType(ins->type())) {
#if defined(JS_CODEGEN_X86)
@ -955,14 +957,14 @@ LIRGeneratorX86Shared::visitSimdGeneralShuffle(MSimdGeneralShuffle* ins)
#else
LDefinition t = temp();
#endif
lir = new (alloc()) LSimdGeneralShuffleI(t);
lir = new (alloc()) LSimdGeneralShuffleI(t, numOperands);
} else if (ins->type() == MIRType::Float32x4) {
lir = new (alloc()) LSimdGeneralShuffleF(temp());
lir = new (alloc()) LSimdGeneralShuffleF(temp(), numOperands);
} else {
MOZ_CRASH("Unknown SIMD kind when doing a shuffle");
}
if (!lir->init(alloc(), ins->numVectors() + ins->numLanes()))
if (!lir->init(alloc()))
return;
for (unsigned i = 0; i < ins->numVectors(); i++) {

View File

@ -84,6 +84,10 @@ static constexpr Register ABINonArgReg0 = eax;
static constexpr Register ABINonArgReg1 = ebx;
static constexpr Register ABINonArgReg2 = ecx;
// This register may be volatile or nonvolatile. Avoid xmm7 which is the
// ScratchDoubleReg.
static constexpr FloatRegister ABINonArgDoubleReg = FloatRegister(X86Encoding::xmm0, FloatRegisters::Double);
// These registers may be volatile or nonvolatile.
// Note: these three registers are all guaranteed to be different
static constexpr Register ABINonArgReturnReg0 = ecx;

View File

@ -2002,7 +2002,10 @@ js::NewFunctionWithProto(JSContext* cx, Native native,
} else {
MOZ_ASSERT(fun->isNative());
MOZ_ASSERT(native);
fun->initNative(native, nullptr);
if (fun->isWasmOptimized())
fun->initWasmNative(native);
else
fun->initNative(native, nullptr);
}
if (allocKind == AllocKind::FUNCTION_EXTENDED)
fun->initializeExtended();

View File

@ -91,7 +91,6 @@ class JSFunction : public js::NativeObject
NATIVE_CTOR = NATIVE_FUN | CONSTRUCTOR,
NATIVE_CLASS_CTOR = NATIVE_FUN | CONSTRUCTOR | CLASSCONSTRUCTOR_KIND,
ASMJS_CTOR = ASMJS_KIND | NATIVE_CTOR,
ASMJS_OPT_CTOR = ASMJS_CTOR | WASM_OPTIMIZED,
ASMJS_LAMBDA_CTOR = ASMJS_KIND | NATIVE_CTOR | LAMBDA,
WASM_FUN = NATIVE_FUN | WASM_OPTIMIZED,
INTERPRETED_METHOD = INTERPRETED | METHOD_KIND,
@ -123,8 +122,13 @@ class JSFunction : public js::NativeObject
class {
friend class JSFunction;
js::Native func_; /* native method pointer or null */
const JSJitInfo* jitinfo_; /* Information about this function to be
used by the JIT; use the accessor! */
union {
// Information about this function to be used by the JIT, only
// used if isBuiltinNative(); use the accessor!
const JSJitInfo* jitInfo_;
// asm.js function index, only used if isAsmJSNative().
size_t asmJSFuncIndex_;
} extra;
} native;
struct {
JSObject* env_; /* environment for new activations */
@ -137,7 +141,7 @@ class JSFunction : public js::NativeObject
class {
friend class JSFunction;
js::Native native_; // The native for interpreter wasm calls.
void* jitEntry_; // A pointer to a fast jit->wasm table entry.
void** jitEntry_; // A pointer to a fast jit->wasm table entry.
} wasm;
} u;
js::GCPtrAtom atom_; /* name for diagnostics and decompiling */
@ -194,7 +198,7 @@ class JSFunction : public js::NativeObject
/* Possible attributes of a native function: */
bool isAsmJSNative() const { return kind() == AsmJS; }
bool isWasmOptimized() const { return (flags() & WASM_OPTIMIZED); }
bool isBuiltinNative() const { return isNative() && !isAsmJSNative() && !isWasmOptimized(); }
bool isBuiltinNative() const { return isNativeWithCppEntry() && !isAsmJSNative(); }
// May be called from the JIT with the jitEntry_ field.
bool isNativeWithJitEntry() const { return isNative() && isWasmOptimized(); }
@ -247,6 +251,9 @@ class JSFunction : public js::NativeObject
return nonLazyScript()->hasBaselineScript() || nonLazyScript()->hasIonScript();
}
bool hasJitEntry() const {
return hasScript() || isNativeWithJitEntry();
}
/* Compound attributes: */
bool isBuiltin() const {
@ -583,36 +590,59 @@ class JSFunction : public js::NativeObject
return isInterpreted() ? nullptr : native();
}
void initNative(js::Native native, const JSJitInfo* jitinfo) {
void initNative(js::Native native, const JSJitInfo* jitInfo) {
MOZ_ASSERT(isNativeWithCppEntry());
MOZ_ASSERT_IF(jitInfo, isBuiltinNative());
MOZ_ASSERT(native);
u.native.func_ = native;
u.native.jitinfo_ = jitinfo;
u.native.extra.jitInfo_ = jitInfo;
}
bool hasJitInfo() const {
return isNativeWithCppEntry() && u.native.jitinfo_;
return isBuiltinNative() && u.native.extra.jitInfo_;
}
const JSJitInfo* jitInfo() const {
MOZ_ASSERT(hasJitInfo());
return u.native.jitinfo_;
return u.native.extra.jitInfo_;
}
void setJitInfo(const JSJitInfo* data) {
MOZ_ASSERT(isNativeWithCppEntry());
u.native.jitinfo_ = data;
MOZ_ASSERT(isBuiltinNative());
u.native.extra.jitInfo_ = data;
}
// Wasm natives are optimized and have a jit entry.
void initWasmNative(js::Native native) {
MOZ_ASSERT(isNativeWithJitEntry());
MOZ_ASSERT(native);
u.wasm.native_ = native;
u.wasm.jitEntry_ = nullptr;
}
void setWasmJitEntry(void* entry) {
void setWasmJitEntry(void** entry) {
MOZ_ASSERT(isNativeWithJitEntry());
MOZ_ASSERT(entry);
MOZ_ASSERT(!u.wasm.jitEntry_);
u.wasm.jitEntry_ = entry;
}
void** wasmJitEntry() const {
MOZ_ASSERT(isNativeWithJitEntry());
MOZ_ASSERT(u.wasm.jitEntry_);
return u.wasm.jitEntry_;
}
// AsmJS functions store the func index in the jitinfo slot, since these
// don't have a jit info associated.
void setAsmJSIndex(uint32_t funcIndex) {
MOZ_ASSERT(isAsmJSNative());
MOZ_ASSERT(!isWasmOptimized());
MOZ_ASSERT(!u.native.extra.asmJSFuncIndex_);
static_assert(offsetof(U, native.extra.asmJSFuncIndex_) == offsetof(U, wasm.jitEntry_),
"asm.js func index and wasm jit entry pointer must be at the same location");
u.native.extra.asmJSFuncIndex_ = funcIndex;
}
uint32_t asmJSFuncIndex() const {
MOZ_ASSERT(isAsmJSNative());
MOZ_ASSERT(!isWasmOptimized());
return u.native.extra.asmJSFuncIndex_;
}
bool isDerivedClassConstructor();
@ -638,7 +668,7 @@ class JSFunction : public js::NativeObject
}
static unsigned offsetOfJitInfo() {
return offsetof(JSFunction, u.native.jitinfo_);
return offsetof(JSFunction, u.native.extra.jitInfo_);
}
inline void trace(JSTracer* trc);
@ -821,10 +851,10 @@ class FunctionExtended : public JSFunction
static const unsigned WASM_INSTANCE_SLOT = 0;
/*
* wasm/asm.js exported functions store the function index of the exported
* function in the original module.
* wasm/asm.js exported functions store the wasm::TlsData pointer of their
* instance.
*/
static const unsigned WASM_FUNC_INDEX_SLOT = 1;
static const unsigned WASM_TLSDATA_SLOT = 1;
/*
* asm.js module functions store their WasmModuleObject in the first slot.

View File

@ -896,6 +896,12 @@ class JSScript : public js::gc::TenuredCell
js::MutableHandle<JS::GCVector<js::Scope*>> scopes);
private:
// Pointer to baseline->method()->raw(), ion->method()->raw(), a wasm jit
// entry, the JIT's EnterInterpreter stub, or the lazy link stub. Must be
// non-null.
uint8_t* jitCodeRaw_;
uint8_t* jitCodeSkipArgCheck_;
js::SharedScriptData* scriptData_;
public:
uint8_t* data; /* pointer to variable-length data array (see
@ -927,13 +933,6 @@ class JSScript : public js::gc::TenuredCell
/* Information used to re-lazify a lazily-parsed interpreted function. */
js::LazyScript* lazyScript;
/*
* Pointer to baseline->method()->raw(), ion->method()->raw(), the JIT's
* EnterInterpreter stub, or the lazy link stub. Must be non-null.
*/
uint8_t* jitCodeRaw_;
uint8_t* jitCodeSkipArgCheck_;
// 32-bit fields.
uint32_t dataSize_; /* size of the used part of the data array */
@ -1584,10 +1583,10 @@ class JSScript : public js::gc::TenuredCell
static size_t offsetOfIonScript() {
return offsetof(JSScript, ion);
}
static size_t offsetOfJitCodeRaw() {
static constexpr size_t offsetOfJitCodeRaw() {
return offsetof(JSScript, jitCodeRaw_);
}
static size_t offsetOfJitCodeSkipArgCheck() {
static constexpr size_t offsetOfJitCodeSkipArgCheck() {
return offsetof(JSScript, jitCodeSkipArgCheck_);
}
uint8_t* jitCodeRaw() const {

View File

@ -859,7 +859,7 @@ js::CreateWasmBuffer(JSContext* cx, const wasm::Limits& memory,
}
#ifndef WASM_HUGE_MEMORY
if (sizeof(void*) == 8 && maxSize && maxSize.value() == UINT32_MAX) {
if (sizeof(void*) == 8 && maxSize && maxSize.value() >= (UINT32_MAX - wasm::PageSize)) {
// On 64-bit platforms that don't define WASM_HUGE_MEMORY
// clamp maxSize to smaller value that satisfies the 32-bit invariants
// maxSize + wasm::PageSize < UINT32_MAX and maxSize % wasm::PageSize == 0

View File

@ -76,14 +76,11 @@ GetTopProfilingJitFrame(Activation* act)
return nullptr;
// Skip wasm frames that might be in the way.
JitFrameIter iter(jitActivation);
while (!iter.done() && iter.isWasm())
++iter;
if (!iter.isJSJit())
OnlyJSJitFrameIter iter(jitActivation);
if (iter.done())
return nullptr;
jit::JSJitProfilingFrameIterator jitIter(iter.asJSJit().fp());
jit::JSJitProfilingFrameIterator jitIter((jit::CommonFrameLayout*) iter.frame().fp());
MOZ_ASSERT(!jitIter.done());
return jitIter.fp();
}

View File

@ -351,16 +351,17 @@ struct JSRuntime : public js::MallocProvider<JSRuntime>
/*
* The start of the range stored in the profiler sample buffer, as measured
* after the most recent sample.
* All JitcodeGlobalMap entries referenced from a given sample are assigned
* the buffer position of the START of the sample. The buffer entries that
* reference the JitcodeGlobalMap entries will only ever be read from the
* buffer while the entire sample is still inside the buffer; if some
* buffer entries at the start of the sample have left the buffer, the
* entire sample will be considered inaccessible.
* All JitcodeGlobalTable entries referenced from a given sample are
* assigned the buffer position of the START of the sample. The buffer
* entries that reference the JitcodeGlobalTable entries will only ever be
* read from the buffer while the entire sample is still inside the buffer;
* if some buffer entries at the start of the sample have left the buffer,
* the entire sample will be considered inaccessible.
* This means that, once profilerSampleBufferRangeStart_ advances beyond
* the sample position that's stored on a JitcodeGlobalMap entry, the buffer
* entries that reference this JitcodeGlobalMap entry will be considered
* inaccessible, and those JitcodeGlobalMap entry can be disposed of.
* the sample position that's stored on a JitcodeGlobalTable entry, the
* buffer entries that reference this JitcodeGlobalTable entry will be
* considered inaccessible, and those JitcodeGlobalTable entry can be
* disposed of.
*/
mozilla::Atomic<uint64_t, mozilla::ReleaseAcquire> profilerSampleBufferRangeStart_;

View File

@ -568,6 +568,33 @@ JitFrameIter::settle()
MOZ_ASSERT(!asWasm().done());
return;
}
if (isWasm()) {
const wasm::WasmFrameIter& wasmFrame = asWasm();
if (!wasmFrame.unwoundIonCallerFP())
return;
// Transition from wasm frames to jit frames: we're on the
// jit-to-wasm fast path. The current stack layout is as follows:
// (stack grows downward)
//
// [--------------------]
// [JIT FRAME ]
// [WASM JIT ENTRY FRAME] <-- we're here
//
// The wasm iterator has saved the previous jit frame pointer for us.
MOZ_ASSERT(wasmFrame.done());
uint8_t* prevFP = wasmFrame.unwoundIonCallerFP();
if (mustUnwindActivation_)
act_->setJSExitFP(prevFP);
iter_.destroy();
iter_.construct<jit::JSJitFrameIter>(act_, prevFP);
MOZ_ASSERT(!asJSJit().done());
return;
}
}
void
@ -1719,28 +1746,43 @@ jit::JitActivation::traceIonRecovery(JSTracer* trc)
it->trace(trc);
}
void
bool
jit::JitActivation::startWasmInterrupt(const JS::ProfilingFrameIterator::RegisterState& state)
{
// fp may be null when first entering wasm code from an interpreter entry
// stub.
if (!state.fp)
return false;
MOZ_ASSERT(state.pc);
MOZ_ASSERT(state.fp);
// Execution can only be interrupted in function code. Afterwards, control
// flow does not reenter function code and thus there can be no
// interrupt-during-interrupt.
bool ignoredUnwound;
bool unwound;
wasm::UnwindState unwindState;
MOZ_ALWAYS_TRUE(wasm::StartUnwinding(state, &unwindState, &ignoredUnwound));
MOZ_ALWAYS_TRUE(wasm::StartUnwinding(state, &unwindState, &unwound));
void* pc = unwindState.pc;
MOZ_ASSERT(wasm::LookupCode(pc)->lookupRange(pc)->isFunction());
if (unwound) {
// In the prologue/epilogue, FP might have been fixed up to the
// caller's FP, and the caller could be the jit entry. Ignore this
// interrupt, in this case, because FP points to a jit frame and not a
// wasm one.
const wasm::CodeRange* codeRange = wasm::LookupCode(pc)->lookupRange(pc);
if (codeRange->isJitEntry())
return false;
MOZ_ASSERT(codeRange->isFunction());
}
cx_->runtime()->wasmUnwindData.ref().construct<wasm::InterruptData>(pc, state.pc);
setWasmExitFP(unwindState.fp);
MOZ_ASSERT(compartment() == unwindState.fp->tls->instance->compartment());
MOZ_ASSERT(isWasmInterrupted());
return true;
}
void
@ -1961,6 +2003,19 @@ JS::ProfilingFrameIterator::settleFrames()
new (storage()) wasm::ProfilingFrameIterator(*activation_->asJit(), fp);
kind_ = Kind::Wasm;
MOZ_ASSERT(!wasmIter().done());
return;
}
if (isWasm() && wasmIter().done() && wasmIter().unwoundIonCallerFP()) {
uint8_t* fp = wasmIter().unwoundIonCallerFP();
iteratorDestroy();
// Using this ctor will skip the first ion->wasm frame, which is
// needed because the profiling iterator doesn't know how to unwind
// when the callee has no script.
new (storage()) jit::JSJitProfilingFrameIterator((jit::CommonFrameLayout*)fp);
kind_ = Kind::JSJit;
MOZ_ASSERT(!jsJitIter().done());
return;
}
}
@ -1999,7 +2054,7 @@ JS::ProfilingFrameIterator::iteratorConstruct(const RegisterState& state)
return;
}
new (storage()) jit::JSJitProfilingFrameIterator(cx_, state);
new (storage()) jit::JSJitProfilingFrameIterator(cx_, state.pc);
kind_ = Kind::JSJit;
}
@ -2020,7 +2075,8 @@ JS::ProfilingFrameIterator::iteratorConstruct()
return;
}
new (storage()) jit::JSJitProfilingFrameIterator(activation->jsExitFP());
auto* fp = (jit::ExitFrameLayout*) activation->jsExitFP();
new (storage()) jit::JSJitProfilingFrameIterator(fp);
kind_ = Kind::JSJit;
}

View File

@ -1667,7 +1667,8 @@ class JitActivation : public Activation
// simulator) and cleared by WasmHandleExecutionInterrupt or WasmHandleThrow
// when the interrupt is handled.
void startWasmInterrupt(const JS::ProfilingFrameIterator::RegisterState& state);
// Returns true iff we've entered interrupted state.
bool startWasmInterrupt(const JS::ProfilingFrameIterator::RegisterState& state);
void finishWasmInterrupt();
bool isWasmInterrupted() const;
void* wasmInterruptUnwindPC() const;

View File

@ -177,10 +177,6 @@ wasm::HandleThrow(JSContext* cx, WasmFrameIter& iter)
// is necessary to prevent a DebugFrame from being observed again after we
// just called onLeaveFrame (which would lead to the frame being re-added
// to the map of live frames, right as it becomes trash).
//
// TODO(bug 1360211): when JitActivation and WasmActivation get merged,
// we'll be able to switch to ion / other wasm state from here, and we'll
// need to do things differently.
MOZ_ASSERT(CallingActivation() == iter.activation());
MOZ_ASSERT(!iter.done());
@ -311,6 +307,13 @@ WasmReportUnalignedAccess()
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr, JSMSG_WASM_UNALIGNED_ACCESS);
}
static void
WasmReportInt64JSCall()
{
JSContext* cx = TlsContext.get();
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr, JSMSG_WASM_BAD_I64_TYPE);
}
static int32_t
CoerceInPlace_ToInt32(Value* rawVal)
{
@ -343,6 +346,43 @@ CoerceInPlace_ToNumber(Value* rawVal)
return true;
}
static int32_t
CoerceInPlace_JitEntry(int funcExportIndex, TlsData* tlsData, Value* argv)
{
JSContext* cx = CallingActivation()->cx();
const Code& code = tlsData->instance->code();
const FuncExport& fe = code.metadata(code.stableTier()).funcExports[funcExportIndex];
for (size_t i = 0; i < fe.sig().args().length(); i++) {
HandleValue arg = HandleValue::fromMarkedLocation(&argv[i]);
switch (fe.sig().args()[i]) {
case ValType::I32: {
int32_t i32;
if (!ToInt32(cx, arg, &i32))
return false;
argv[i] = Int32Value(i32);
break;
}
case ValType::F32:
case ValType::F64: {
double dbl;
if (!ToNumber(cx, arg, &dbl))
return false;
// No need to convert double-to-float for f32, it's done inline
// in the wasm stub later.
argv[i] = DoubleValue(dbl);
break;
}
default: {
MOZ_CRASH("unexpected input argument in CoerceInPlace_JitEntry");
}
}
}
return true;
}
static int64_t
DivI64(uint32_t x_hi, uint32_t x_lo, uint32_t y_hi, uint32_t y_lo)
{
@ -465,6 +505,9 @@ AddressOf(SymbolicAddress imm, ABIFunctionType* abiType)
case SymbolicAddress::ReportUnalignedAccess:
*abiType = Args_General0;
return FuncCast(WasmReportUnalignedAccess, *abiType);
case SymbolicAddress::ReportInt64JSCall:
*abiType = Args_General0;
return FuncCast(WasmReportInt64JSCall, *abiType);
case SymbolicAddress::CallImport_Void:
*abiType = Args_General4;
return FuncCast(Instance::callImport_void, *abiType);
@ -483,6 +526,9 @@ AddressOf(SymbolicAddress imm, ABIFunctionType* abiType)
case SymbolicAddress::CoerceInPlace_ToNumber:
*abiType = Args_General1;
return FuncCast(CoerceInPlace_ToNumber, *abiType);
case SymbolicAddress::CoerceInPlace_JitEntry:
*abiType = Args_General3;
return FuncCast(CoerceInPlace_JitEntry, *abiType);
case SymbolicAddress::ToInt32:
*abiType = Args_Int_Double;
return FuncCast<int32_t (double)>(JS::ToInt32, *abiType);
@ -619,7 +665,7 @@ wasm::NeedsBuiltinThunk(SymbolicAddress sym)
case SymbolicAddress::ReportTrap: // GenerateTrapExit
case SymbolicAddress::OldReportTrap: // GenerateOldTrapExit
case SymbolicAddress::ReportOutOfBounds: // GenerateOutOfBoundsExit
case SymbolicAddress::ReportUnalignedAccess: // GeneratesUnalignedExit
case SymbolicAddress::ReportUnalignedAccess: // GenerateUnalignedExit
case SymbolicAddress::CallImport_Void: // GenerateImportInterpExit
case SymbolicAddress::CallImport_I32:
case SymbolicAddress::CallImport_I64:
@ -669,6 +715,8 @@ wasm::NeedsBuiltinThunk(SymbolicAddress sym)
case SymbolicAddress::WaitI32:
case SymbolicAddress::WaitI64:
case SymbolicAddress::Wake:
case SymbolicAddress::CoerceInPlace_JitEntry:
case SymbolicAddress::ReportInt64JSCall:
return true;
case SymbolicAddress::Limit:
break;

View File

@ -187,6 +187,10 @@ SendCodeRangesToProfiler(const CodeSegment& cs, const Bytes& bytecode, const Met
if (!AppendToString(" slow entry", &name))
return;
writePerfSpewerWasmMap(start, size, file, name.begin());
} else if (codeRange.isJitEntry()) {
if (!AppendToString(" fast entry", &name))
return;
writePerfSpewerWasmMap(start, size, file, name.begin());
} else if (codeRange.isImportInterpExit()) {
if (!AppendToString(" slow exit", &name))
return;
@ -715,10 +719,60 @@ Metadata::getFuncName(const Bytes* maybeBytecode, uint32_t funcIndex, UTF8Bytes*
name->append(afterFuncIndex, strlen(afterFuncIndex));
}
Code::Code(UniqueCodeSegment tier, const Metadata& metadata, UniqueJumpTable maybeJumpTable)
bool
JumpTables::init(CompileMode mode, const CodeSegment& cs, const CodeRangeVector& codeRanges)
{
// Note a fast jit entry has two addresses, to be compatible with
// ion/baseline functions which have the raw vs checked args entries,
// both used all over the place in jit calls. This allows the fast entries
// to be compatible with jit code pointer loading routines.
// We can use the same entry for both kinds of jit entries since a wasm
// entry knows how to convert any kind of arguments and doesn't assume
// any input types.
static_assert(JSScript::offsetOfJitCodeRaw() == 0,
"wasm fast jit entry is at (void*) jit[2*funcIndex]");
static_assert(JSScript::offsetOfJitCodeSkipArgCheck() == sizeof(void*),
"wasm fast jit entry is also at (void*) jit[2*funcIndex+1]");
mode_ = mode;
size_t numFuncs = 0;
for (const CodeRange& cr : codeRanges) {
if (cr.isFunction())
numFuncs++;
}
numFuncs_ = numFuncs;
if (mode_ == CompileMode::Tier1) {
tiering_ = TablePointer(js_pod_calloc<void*>(numFuncs));
if (!tiering_)
return false;
}
// The number of jit entries is overestimated, but it is simpler when
// filling/looking up the jit entries and safe (worst case we'll crash
// because of a null deref when trying to call the jit entry of an
// unexported function).
jit_ = TablePointer(js_pod_calloc<void*>(2 * numFuncs));
if (!jit_)
return false;
uint8_t* codeBase = cs.base();
for (const CodeRange& cr : codeRanges) {
if (cr.isFunction())
setTieringEntry(cr.funcIndex(), codeBase + cr.funcTierEntry());
else if (cr.isJitEntry())
setJitEntry(cr.funcIndex(), codeBase + cr.begin());
}
return true;
}
Code::Code(UniqueCodeSegment tier, const Metadata& metadata, JumpTables&& maybeJumpTables)
: metadata_(&metadata),
profilingLabels_(mutexid::WasmCodeProfilingLabels, CacheableCharsVector()),
jumpTable_(Move(maybeJumpTable))
jumpTables_(Move(maybeJumpTables))
{
segment1_ = takeOwnership(Move(tier));
}
@ -736,6 +790,14 @@ Code::setTier2(UniqueCodeSegment segment) const
segment2_ = takeOwnership(Move(segment));
}
uint32_t
Code::lookupFuncIndex(JSFunction* fun) const
{
if (fun->isAsmJSNative())
return fun->asmJSFuncIndex();
return lookupRange(*fun->wasmJitEntry())->funcIndex();
}
Tiers
Code::tiers() const
{
@ -986,7 +1048,8 @@ Code::addSizeOfMiscIfNotSeen(MallocSizeOf mallocSizeOf,
*data += mallocSizeOf(this) +
metadata().sizeOfIncludingThisIfNotSeen(mallocSizeOf, seenMetadata) +
profilingLabels_.lock()->sizeOfExcludingThis(mallocSizeOf);
profilingLabels_.lock()->sizeOfExcludingThis(mallocSizeOf) +
jumpTables_.sizeOfMiscIncludingThis(mallocSizeOf);
for (auto t : tiers())
segment(t).addSizeOfMisc(mallocSizeOf, code, data);
@ -1029,5 +1092,9 @@ Code::deserialize(const uint8_t* cursor, const SharedBytes& bytecode, const Link
segment1_ = takeOwnership(Move(codeSegment));
metadata_ = &metadata;
if (!jumpTables_.init(CompileMode::Once, *segment1_,
metadata.metadata(Tier::Serialized).codeRanges))
return nullptr;
return cursor;
}

View File

@ -450,7 +450,49 @@ class Metadata : public ShareableBase<Metadata>, public MetadataCacheablePod
typedef RefPtr<Metadata> MutableMetadata;
typedef RefPtr<const Metadata> SharedMetadata;
typedef mozilla::UniquePtr<void*[], JS::FreePolicy> UniqueJumpTable;
// Jump tables to take tiering into account, when calling either from wasm to
// wasm (through rabaldr) or from jit to wasm (jit entry).
class JumpTables
{
using TablePointer = mozilla::UniquePtr<void*[], JS::FreePolicy>;
CompileMode mode_;
TablePointer tiering_;
TablePointer jit_;
size_t numFuncs_;
public:
bool init(CompileMode mode, const CodeSegment& cs, const CodeRangeVector& codeRanges);
void setJitEntry(size_t i, void* target) const {
// See comment in wasm::Module::finishTier2 and JumpTables::init.
MOZ_ASSERT(i < numFuncs_);
jit_.get()[2 * i] = target;
jit_.get()[2 * i + 1] = target;
}
void** getAddressOfJitEntry(size_t i) const {
MOZ_ASSERT(i < numFuncs_);
MOZ_ASSERT(jit_.get()[2 * i]);
return &jit_.get()[2 * i];
}
void setTieringEntry(size_t i, void* target) const {
MOZ_ASSERT(i < numFuncs_);
// See comment in wasm::Module::finishTier2.
if (mode_ == CompileMode::Tier1)
tiering_.get()[i] = target;
}
void** tiering() const {
return tiering_.get();
}
size_t sizeOfMiscIncludingThis(MallocSizeOf mallocSizeOf) const {
return mallocSizeOf(this) +
2 * sizeof(void*) * numFuncs_ +
(tiering_ ? sizeof(void*) : numFuncs_);
}
};
// Code objects own executable code and the metadata that describe it. A single
// Code object is normally shared between a module and all its instances.
@ -463,7 +505,7 @@ class Code : public ShareableBase<Code>
mutable UniqueConstCodeSegment segment2_; // Access only when hasTier2() is true
SharedMetadata metadata_;
ExclusiveData<CacheableCharsVector> profilingLabels_;
UniqueJumpTable jumpTable_;
JumpTables jumpTables_;
UniqueConstCodeSegment takeOwnership(UniqueCodeSegment segment) const {
segment->initCode(this);
@ -472,9 +514,14 @@ class Code : public ShareableBase<Code>
public:
Code();
Code(UniqueCodeSegment tier, const Metadata& metadata, UniqueJumpTable maybeJumpTable);
Code(UniqueCodeSegment tier, const Metadata& metadata, JumpTables&& maybeJumpTables);
void** jumpTable() const { return jumpTable_.get(); }
void setTieringEntry(size_t i, void* target) const { jumpTables_.setTieringEntry(i, target); }
void** tieringJumpTable() const { return jumpTables_.tiering(); }
void setJitEntry(size_t i, void* target) const { jumpTables_.setJitEntry(i, target); }
void** getAddressOfJitEntry(size_t i) const { return jumpTables_.getAddressOfJitEntry(i); }
uint32_t lookupFuncIndex(JSFunction* fun) const;
bool hasTier2() const { return metadata_->hasTier2(); }
void setTier2(UniqueCodeSegment segment) const;

View File

@ -19,6 +19,7 @@
#include "wasm/WasmFrameIter.h"
#include "wasm/WasmInstance.h"
#include "wasm/WasmStubs.h"
#include "jit/MacroAssembler-inl.h"
@ -38,6 +39,7 @@ WasmFrameIter::WasmFrameIter(JitActivation* activation, wasm::Frame* fp)
codeRange_(nullptr),
lineOrBytecode_(0),
fp_(fp ? fp : activation->wasmExitFP()),
unwoundIonCallerFP_(nullptr),
unwind_(Unwind::False)
{
MOZ_ASSERT(fp_);
@ -83,10 +85,11 @@ WasmFrameIter::WasmFrameIter(JitActivation* activation, wasm::Frame* fp)
// Otherwise, execution exits wasm code via an exit stub which sets exitFP
// to the exit stub's frame. Thus, in this case, we want to start iteration
// at the caller of the exit frame, whose Code, CodeRange and CallSite are
// indicated by the returnAddress of the exit stub's frame.
// indicated by the returnAddress of the exit stub's frame. If the caller
// was Ion, we can just skip the wasm frames.
popFrame();
MOZ_ASSERT(!done());
MOZ_ASSERT(!done() || unwoundIonCallerFP_);
}
bool
@ -135,7 +138,8 @@ WasmFrameIter::popFrame()
codeRange_ = nullptr;
if (unwind_ == Unwind::True) {
// TODO with bug 1319203, there may be other JIT frames above.
// We're exiting via the interpreter entry; we can safely reset
// exitFP.
activation_->setWasmExitFP(nullptr);
unwoundAddressOfReturnAddress_ = &prevFP->returnAddress;
}
@ -146,10 +150,26 @@ WasmFrameIter::popFrame()
void* returnAddress = prevFP->returnAddress;
code_ = &fp_->tls->instance->code();
MOZ_ASSERT(code_ == LookupCode(returnAddress));
code_ = LookupCode(returnAddress);
codeRange_ = code_->lookupRange(returnAddress);
if (codeRange_->isJitEntry()) {
unwoundIonCallerFP_ = (uint8_t*) fp_;
fp_ = nullptr;
code_ = nullptr;
codeRange_ = nullptr;
if (unwind_ == Unwind::True) {
activation_->setJSExitFP(unwoundIonCallerFP_);
unwoundAddressOfReturnAddress_ = &prevFP->returnAddress;
}
MOZ_ASSERT(done());
return;
}
MOZ_ASSERT(code_ == &fp_->tls->instance->code());
MOZ_ASSERT(codeRange_->kind() == CodeRange::Function);
const CallSite* callsite = code_->lookupCallSite(returnAddress);
@ -294,6 +314,7 @@ static const unsigned PoppedTLSReg = 5;
#else
# error "Unknown architecture!"
#endif
static constexpr unsigned SetJitEntryFP = PushedRetAddr + SetFP - PushedFP;
static void
@ -551,8 +572,8 @@ void
wasm::GenerateJitExitPrologue(MacroAssembler& masm, unsigned framePushed, CallableOffsets* offsets)
{
masm.haltingAlign(CodeAlignment);
GenerateCallablePrologue(masm, framePushed, ExitReason::None(),
&offsets->begin, nullptr, CompileMode::Once, 0);
GenerateCallablePrologue(masm, framePushed, ExitReason::None(), &offsets->begin, nullptr,
CompileMode::Once, 0);
AssertNoWasmExitFPInJitExit(masm);
masm.setFramePushed(framePushed);
}
@ -567,6 +588,31 @@ wasm::GenerateJitExitEpilogue(MacroAssembler& masm, unsigned framePushed, Callab
masm.setFramePushed(0);
}
void
wasm::GenerateJitEntryPrologue(MacroAssembler& masm, Offsets* offsets)
{
masm.haltingAlign(CodeAlignment);
{
#if defined(JS_CODEGEN_ARM)
AutoForbidPools afp(&masm, /* number of instructions in scope = */ 2);
offsets->begin = masm.currentOffset();
MOZ_ASSERT(BeforePushRetAddr == 0);
masm.push(lr);
#else
// The x86/x64 call instruction pushes the return address.
offsets->begin = masm.currentOffset();
#endif
MOZ_ASSERT_IF(!masm.oom(), PushedRetAddr == masm.currentOffset() - offsets->begin);
// Save jit frame pointer, so unwinding from wasm to jit frames is trivial.
masm.moveStackPtrTo(FramePointer);
MOZ_ASSERT_IF(!masm.oom(), SetJitEntryFP == masm.currentOffset() - offsets->begin);
}
masm.setFramePushed(0);
}
/*****************************************************************************/
// ProfilingFrameIterator
@ -576,6 +622,7 @@ ProfilingFrameIterator::ProfilingFrameIterator()
callerFP_(nullptr),
callerPC_(nullptr),
stackAddress_(nullptr),
unwoundIonCallerFP_(nullptr),
exitReason_(ExitReason::Fixed::None)
{
MOZ_ASSERT(done());
@ -587,6 +634,7 @@ ProfilingFrameIterator::ProfilingFrameIterator(const JitActivation& activation)
callerFP_(nullptr),
callerPC_(nullptr),
stackAddress_(nullptr),
unwoundIonCallerFP_(nullptr),
exitReason_(activation.wasmExitReason())
{
initFromExitFP(activation.wasmExitFP());
@ -598,6 +646,7 @@ ProfilingFrameIterator::ProfilingFrameIterator(const JitActivation& activation,
callerFP_(nullptr),
callerPC_(nullptr),
stackAddress_(nullptr),
unwoundIonCallerFP_(nullptr),
exitReason_(ExitReason::Fixed::ImportJit)
{
MOZ_ASSERT(fp);
@ -614,11 +663,16 @@ AssertMatchesCallSite(void* callerPC, Frame* callerFP)
const CodeRange* callerCodeRange = code->lookupRange(callerPC);
MOZ_ASSERT(callerCodeRange);
if (callerCodeRange->kind() == CodeRange::InterpEntry) {
if (callerCodeRange->isInterpEntry()) {
MOZ_ASSERT(callerFP == nullptr);
return;
}
if (callerCodeRange->isJitEntry()) {
MOZ_ASSERT(callerFP != nullptr);
return;
}
const CallSite* callsite = code->lookupCallSite(callerPC);
MOZ_ASSERT(callsite);
#endif
@ -642,14 +696,20 @@ ProfilingFrameIterator::initFromExitFP(const Frame* fp)
// This means that the innermost frame is skipped. This is fine because:
// - for import exit calls, the innermost frame is a thunk, so the first
// frame that shows up is the function calling the import;
// - for Math and other builtin calls as well as interrupts, we note the absence
// of an exit reason and inject a fake "builtin" frame; and
// - for async interrupts, we just accept that we'll lose the innermost frame.
// - for Math and other builtin calls as well as interrupts, we note the
// absence of an exit reason and inject a fake "builtin" frame; and
// - for async interrupts, we just accept that we'll lose the innermost
// frame.
switch (codeRange_->kind()) {
case CodeRange::InterpEntry:
callerPC_ = nullptr;
callerFP_ = nullptr;
break;
case CodeRange::JitEntry:
callerPC_ = nullptr;
callerFP_ = nullptr;
unwoundIonCallerFP_ = (uint8_t*) fp->callerFP;
break;
case CodeRange::Function:
fp = fp->callerFP;
callerPC_ = fp->returnAddress;
@ -838,6 +898,22 @@ js::wasm::StartUnwinding(const RegisterState& registers, UnwindState* unwindStat
// entry trampoline also doesn't GeneratePrologue/Epilogue so we can't
// use the general unwinding logic above.
break;
case CodeRange::JitEntry:
// There's a jit frame above the current one; we don't care about pc
// since the Jit entry frame is a jit frame which can be considered as
// an exit frame.
#if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
if (offsetFromEntry < PushedRetAddr) {
// We haven't pushed the jit return address yet, thus the jit
// frame is incomplete. During profiling frame iteration, it means
// that the jit profiling frame iterator won't be able to unwind
// this frame; drop it.
return false;
}
#endif
fixedFP = offsetFromEntry < SetJitEntryFP ? (Frame*) sp : fp;
fixedPC = nullptr;
break;
case CodeRange::Throw:
// The throw stub executes a small number of instructions before popping
// the entire activation. To simplify testing, we simply pretend throw
@ -864,6 +940,7 @@ ProfilingFrameIterator::ProfilingFrameIterator(const JitActivation& activation,
callerFP_(nullptr),
callerPC_(nullptr),
stackAddress_(nullptr),
unwoundIonCallerFP_(nullptr),
exitReason_(ExitReason::Fixed::None)
{
// Let wasmExitFP take precedence to StartUnwinding when it is set since
@ -890,6 +967,9 @@ ProfilingFrameIterator::ProfilingFrameIterator(const JitActivation& activation,
callerPC_ = unwindState.fp->returnAddress;
}
if (unwindState.codeRange->isJitEntry())
unwoundIonCallerFP_ = (uint8_t*) callerFP_;
code_ = unwindState.code;
codeRange_ = unwindState.codeRange;
stackAddress_ = state.sp;
@ -906,6 +986,15 @@ ProfilingFrameIterator::operator++()
return;
}
if (unwoundIonCallerFP_) {
MOZ_ASSERT(codeRange_->isJitEntry());
callerPC_ = nullptr;
callerFP_ = nullptr;
codeRange_ = nullptr;
MOZ_ASSERT(done());
return;
}
if (!callerPC_) {
MOZ_ASSERT(!callerFP_);
codeRange_ = nullptr;
@ -921,12 +1010,18 @@ ProfilingFrameIterator::operator++()
return;
}
code_ = &callerFP_->tls->instance->code();
MOZ_ASSERT(code_ == LookupCode(callerPC_));
code_ = LookupCode(callerPC_);
codeRange_ = code_->lookupRange(callerPC_);
MOZ_ASSERT(codeRange_);
if (codeRange_->isJitEntry()) {
unwoundIonCallerFP_ = (uint8_t*) callerFP_;
MOZ_ASSERT(!done());
return;
}
MOZ_ASSERT(code_ == &callerFP_->tls->instance->code());
switch (codeRange_->kind()) {
case CodeRange::Function:
case CodeRange::ImportJitExit:
@ -945,6 +1040,8 @@ ProfilingFrameIterator::operator++()
break;
case CodeRange::InterpEntry:
MOZ_CRASH("should have had null caller fp");
case CodeRange::JitEntry:
MOZ_CRASH("should have been guarded above");
case CodeRange::Interrupt:
case CodeRange::Throw:
MOZ_CRASH("code range doesn't have frame");
@ -1049,6 +1146,10 @@ ThunkedNativeToDescription(SymbolicAddress func)
return "call to native i64.wait (in wasm)";
case SymbolicAddress::Wake:
return "call to native wake (in wasm)";
case SymbolicAddress::CoerceInPlace_JitEntry:
return "out-of-line coercion for jit entry arguments (in wasm)";
case SymbolicAddress::ReportInt64JSCall:
return "jit call to int64 wasm function";
#if defined(JS_CODEGEN_MIPS32)
case SymbolicAddress::js_jit_gAtomic64Lock:
MOZ_CRASH();
@ -1066,9 +1167,10 @@ ProfilingFrameIterator::label() const
// Use the same string for both time inside and under so that the two
// entries will be coalesced by the profiler.
static const char* importJitDescription = "fast FFI trampoline (in wasm)";
static const char* importInterpDescription = "slow FFI trampoline (in wasm)";
static const char* builtinNativeDescription = "fast FFI trampoline to native (in wasm)";
// Must be kept in sync with /tools/profiler/tests/test_asm.js
static const char* importJitDescription = "fast exit trampoline (in wasm)";
static const char* importInterpDescription = "slow exit trampoline (in wasm)";
static const char* builtinNativeDescription = "fast exit trampoline to native (in wasm)";
static const char* trapDescription = "trap handling (in wasm)";
static const char* debugTrapDescription = "debug trap handling (in wasm)";
@ -1093,6 +1195,7 @@ ProfilingFrameIterator::label() const
switch (codeRange_->kind()) {
case CodeRange::Function: return code_->profilingLabel(codeRange_->funcIndex());
case CodeRange::InterpEntry: return "slow entry trampoline (in wasm)";
case CodeRange::JitEntry: return "fast entry trampoline (in wasm)";
case CodeRange::ImportJitExit: return importJitDescription;
case CodeRange::BuiltinThunk: return builtinNativeDescription;
case CodeRange::ImportInterpExit: return importInterpDescription;

View File

@ -29,6 +29,7 @@ namespace js {
namespace jit {
class MacroAssembler;
struct Register;
class Label;
} // namespace jit
namespace wasm {
@ -69,6 +70,7 @@ class WasmFrameIter
const CodeRange* codeRange_;
unsigned lineOrBytecode_;
Frame* fp_;
uint8_t* unwoundIonCallerFP_;
Unwind unwind_;
void** unwoundAddressOfReturnAddress_;
@ -91,6 +93,7 @@ class WasmFrameIter
void** unwoundAddressOfReturnAddress() const;
bool debugEnabled() const;
DebugFrame* debugFrame() const;
uint8_t* unwoundIonCallerFP() const { return unwoundIonCallerFP_; }
};
enum class SymbolicAddress;
@ -164,6 +167,7 @@ class ProfilingFrameIterator
Frame* callerFP_;
void* callerPC_;
void* stackAddress_;
uint8_t* unwoundIonCallerFP_;
ExitReason exitReason_;
void initFromExitFP(const Frame* fp);
@ -188,6 +192,7 @@ class ProfilingFrameIterator
bool done() const { return !codeRange_; }
void* stackAddress() const { MOZ_ASSERT(!done()); return stackAddress_; }
uint8_t* unwoundIonCallerFP() const { MOZ_ASSERT(done()); return unwoundIonCallerFP_; }
const char* label() const;
};
@ -204,10 +209,15 @@ GenerateExitPrologue(jit::MacroAssembler& masm, unsigned framePushed, ExitReason
void
GenerateExitEpilogue(jit::MacroAssembler& masm, unsigned framePushed, ExitReason reason,
CallableOffsets* offsets);
void
GenerateJitExitPrologue(jit::MacroAssembler& masm, unsigned framePushed, CallableOffsets* offsets);
void
GenerateJitExitEpilogue(jit::MacroAssembler& masm, unsigned framePushed, CallableOffsets* offsets);
void
GenerateJitEntryPrologue(jit::MacroAssembler& masm, Offsets* offsets);
void
GenerateFunctionPrologue(jit::MacroAssembler& masm, unsigned framePushed, const SigIdDesc& sigId,
FuncOffsets* offsets, CompileMode mode = CompileMode::Once,

View File

@ -510,6 +510,9 @@ ModuleGenerator::noteCodeRange(uint32_t codeRangeIndex, const CodeRange& codeRan
case CodeRange::InterpEntry:
metadataTier_->lookupFuncExport(codeRange.funcIndex()).initInterpEntryOffset(codeRange.begin());
break;
case CodeRange::JitEntry:
// Nothing to do: jit entries are linked in the jump tables.
break;
case CodeRange::ImportJitExit:
metadataTier_->funcImports[codeRange.funcIndex()].initJitExitOffset(codeRange.begin());
break;
@ -972,26 +975,6 @@ ModuleGenerator::finish(const ShareableBytes& bytecode)
return CodeSegment::create(tier(), masm_, bytecode, *linkDataTier_, *metadata_);
}
UniqueJumpTable
ModuleGenerator::createJumpTable(const CodeSegment& codeSegment)
{
MOZ_ASSERT(mode() == CompileMode::Tier1);
MOZ_ASSERT(!isAsmJS());
uint32_t tableSize = env_->numFuncs();
UniqueJumpTable jumpTable(js_pod_calloc<void*>(tableSize));
if (!jumpTable)
return nullptr;
uint8_t* codeBase = codeSegment.base();
for (const CodeRange& codeRange : metadataTier_->codeRanges) {
if (codeRange.isFunction())
jumpTable[codeRange.funcIndex()] = codeBase + codeRange.funcTierEntry();
}
return jumpTable;
}
SharedModule
ModuleGenerator::finishModule(const ShareableBytes& bytecode)
{
@ -1001,12 +984,9 @@ ModuleGenerator::finishModule(const ShareableBytes& bytecode)
if (!codeSegment)
return nullptr;
UniqueJumpTable maybeJumpTable;
if (mode() == CompileMode::Tier1) {
maybeJumpTable = createJumpTable(*codeSegment);
if (!maybeJumpTable)
return nullptr;
}
JumpTables jumpTables;
if (!jumpTables.init(mode(), *codeSegment, metadataTier_->codeRanges))
return nullptr;
UniqueConstBytes maybeDebuggingBytes;
if (env_->debugEnabled()) {
@ -1020,7 +1000,7 @@ ModuleGenerator::finishModule(const ShareableBytes& bytecode)
return nullptr;
}
SharedCode code = js_new<Code>(Move(codeSegment), *metadata_, Move(maybeJumpTable));
SharedCode code = js_new<Code>(Move(codeSegment), *metadata_, Move(jumpTables));
if (!code)
return nullptr;

View File

@ -204,7 +204,6 @@ class MOZ_STACK_CLASS ModuleGenerator
bool finishCode();
bool finishMetadata(const ShareableBytes& bytecode);
UniqueCodeSegment finish(const ShareableBytes& bytecode);
UniqueJumpTable createJumpTable(const CodeSegment& codeSegment);
bool isAsmJS() const { return env_->isAsmJS(); }
Tier tier() const { return env_->tier; }

View File

@ -391,7 +391,6 @@ Instance::Instance(JSContext* cx,
const ValVector& globalImports)
: compartment_(cx->compartment()),
object_(object),
jsJitArgsRectifier_(),
code_(code),
debug_(Move(debug)),
tlsData_(Move(tlsDataIn)),
@ -412,7 +411,7 @@ Instance::Instance(JSContext* cx,
tlsData()->instance = this;
tlsData()->cx = cx;
tlsData()->stackLimit = cx->stackLimitForJitCode(JS::StackForUntrustedScript);
tlsData()->jumpTable = code_->jumpTable();
tlsData()->jumpTable = code_->tieringJumpTable();
Tier callerTier = code_->bestTier();
@ -508,13 +507,11 @@ Instance::init(JSContext* cx)
}
}
if (!metadata(code_->bestTier()).funcImports.empty()) {
JitRuntime* jitRuntime = cx->runtime()->getJitRuntime(cx);
if (!jitRuntime)
return false;
jsJitArgsRectifier_ = jitRuntime->getArgumentsRectifier();
}
JitRuntime* jitRuntime = cx->runtime()->getJitRuntime(cx);
if (!jitRuntime)
return false;
jsJitArgsRectifier_ = jitRuntime->getArgumentsRectifier();
jsJitExceptionHandler_ = jitRuntime->getExceptionTail();
return true;
}

Some files were not shown because too many files have changed in this diff Show More