Merge autoland to mozilla-central. a=merge

This commit is contained in:
Csoregi Natalia 2021-07-30 00:43:06 +03:00
commit 2aa97aea10
370 changed files with 39672 additions and 39203 deletions

View File

@ -82,6 +82,16 @@ git = "https://github.com/hsivonen/chardetng"
replace-with = "vendored-sources"
rev = "302c995f91f44cf26e77dc4758ad56c3ff0153ad"
[source."https://github.com/gfx-rs/naga"]
git = "https://github.com/gfx-rs/naga"
replace-with = "vendored-sources"
tag = "gfx-25"
[source."https://github.com/gfx-rs/gfx"]
git = "https://github.com/gfx-rs/gfx"
replace-with = "vendored-sources"
rev = "27a1dae3796d33d23812f2bb8c7e3b5aea18b521"
[source."https://github.com/bytecodealliance/wasmtime"]
git = "https://github.com/bytecodealliance/wasmtime"
replace-with = "vendored-sources"

90
Cargo.lock generated
View File

@ -1192,15 +1192,6 @@ dependencies = [
"smallbitvec",
]
[[package]]
name = "drm-fourcc"
version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ebbf3a5ed4671aabffefce172ff43d69c1f27dd2c6aea28e5212a70f32ada0cf"
dependencies = [
"serde",
]
[[package]]
name = "dtoa"
version = "0.4.8"
@ -1322,16 +1313,6 @@ dependencies = [
"serde",
]
[[package]]
name = "external-memory"
version = "0.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4dfe8d292b014422776a8c516862d2bff8a81b223a4461dfdc45f3862dc9d39"
dependencies = [
"bitflags",
"drm-fourcc",
]
[[package]]
name = "failure"
version = "0.1.8"
@ -1776,9 +1757,8 @@ dependencies = [
[[package]]
name = "gfx-auxil"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1694991b11d642680e82075a75c7c2bd75556b805efa7660b705689f05b1ab1c"
version = "0.9.0"
source = "git+https://github.com/gfx-rs/gfx?rev=27a1dae3796d33d23812f2bb8c7e3b5aea18b521#27a1dae3796d33d23812f2bb8c7e3b5aea18b521"
dependencies = [
"fxhash",
"gfx-hal",
@ -1787,15 +1767,13 @@ dependencies = [
[[package]]
name = "gfx-backend-dx11"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f9e453baf3aaef2b0c354ce0b3d63d76402e406a59b64b7182d123cfa6635ae"
version = "0.8.0"
source = "git+https://github.com/gfx-rs/gfx?rev=27a1dae3796d33d23812f2bb8c7e3b5aea18b521#27a1dae3796d33d23812f2bb8c7e3b5aea18b521"
dependencies = [
"arrayvec",
"bitflags",
"gfx-auxil",
"gfx-hal",
"gfx-renderdoc",
"libloading 0.7.0",
"log",
"parking_lot",
@ -1810,9 +1788,8 @@ dependencies = [
[[package]]
name = "gfx-backend-dx12"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21506399f64a3c4d389182a89a30073856ae33eb712315456b4fd8f39ee7682a"
version = "0.8.0"
source = "git+https://github.com/gfx-rs/gfx?rev=27a1dae3796d33d23812f2bb8c7e3b5aea18b521#27a1dae3796d33d23812f2bb8c7e3b5aea18b521"
dependencies = [
"arrayvec",
"bit-set",
@ -1820,7 +1797,6 @@ dependencies = [
"d3d12",
"gfx-auxil",
"gfx-hal",
"gfx-renderdoc",
"log",
"parking_lot",
"range-alloc",
@ -1833,9 +1809,8 @@ dependencies = [
[[package]]
name = "gfx-backend-empty"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29c8f813c47791918aa00dc9c9ddf961d23fa8c2a5d869e6cb8ea84f944820f4"
version = "0.8.0"
source = "git+https://github.com/gfx-rs/gfx?rev=27a1dae3796d33d23812f2bb8c7e3b5aea18b521#27a1dae3796d33d23812f2bb8c7e3b5aea18b521"
dependencies = [
"gfx-hal",
"log",
@ -1844,16 +1819,14 @@ dependencies = [
[[package]]
name = "gfx-backend-metal"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0de85808e2a98994c6af925253f8a9593bc57180ef1ea137deab6d35cc949517"
version = "0.8.1"
source = "git+https://github.com/gfx-rs/gfx?rev=27a1dae3796d33d23812f2bb8c7e3b5aea18b521#27a1dae3796d33d23812f2bb8c7e3b5aea18b521"
dependencies = [
"arrayvec",
"bitflags",
"block",
"cocoa-foundation",
"copyless",
"core-graphics-types",
"foreign-types",
"fxhash",
"gfx-auxil",
@ -1872,50 +1845,37 @@ dependencies = [
[[package]]
name = "gfx-backend-vulkan"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9861ec855acbbc65c0e4f966d761224886e811dc2c6d413a4776e9293d0e5c0"
version = "0.8.0"
source = "git+https://github.com/gfx-rs/gfx?rev=27a1dae3796d33d23812f2bb8c7e3b5aea18b521#27a1dae3796d33d23812f2bb8c7e3b5aea18b521"
dependencies = [
"arrayvec",
"ash",
"byteorder",
"core-graphics-types",
"gfx-hal",
"gfx-renderdoc",
"inplace_it",
"libloading 0.7.0",
"log",
"naga",
"objc",
"parking_lot",
"raw-window-handle",
"renderdoc-sys",
"smallvec",
"winapi",
]
[[package]]
name = "gfx-hal"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fbb575ea793dd0507b3082f4f2cde62dc9f3cebd98f5cd49ba2a4da97a976fd"
version = "0.8.0"
source = "git+https://github.com/gfx-rs/gfx?rev=27a1dae3796d33d23812f2bb8c7e3b5aea18b521#27a1dae3796d33d23812f2bb8c7e3b5aea18b521"
dependencies = [
"bitflags",
"external-memory",
"naga",
"raw-window-handle",
"thiserror",
]
[[package]]
name = "gfx-renderdoc"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8027995e247e2426d3a00d13f5191dd56c314bff02dc4b54cbf727f1ba9c40a"
dependencies = [
"libloading 0.7.0",
"log",
"renderdoc-sys",
]
[[package]]
name = "gkrust"
version = "0.1.0"
@ -2965,13 +2925,13 @@ dependencies = [
[[package]]
name = "metal"
version = "0.23.0"
version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79d7d769f1c104b8388294d6594d491d2e21240636f5f94d37f8a0f3d7904450"
checksum = "1c12e48c737ee9a55e8bb2352bcde588f79ae308d3529ee888f7cc0f469b5777"
dependencies = [
"bitflags",
"block",
"core-graphics-types",
"cocoa-foundation",
"foreign-types",
"log",
"objc",
@ -3240,9 +3200,8 @@ checksum = "a2983372caf4480544083767bf2d27defafe32af49ab4df3a0b7fc90793a3664"
[[package]]
name = "naga"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef670817eef03d356d5a509ea275e7dd3a78ea9e24261ea3cb2dfed1abb08f64"
version = "0.4.0"
source = "git+https://github.com/gfx-rs/naga?tag=gfx-25#057d03ad86f18e3bb3866b20901d8d4e892dd3d6"
dependencies = [
"bit-set",
"bitflags",
@ -4001,8 +3960,7 @@ dependencies = [
[[package]]
name = "range-alloc"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63e935c45e09cc6dcf00d2f0b2d630a58f4095320223d47fc68918722f0538b6"
source = "git+https://github.com/gfx-rs/gfx?rev=27a1dae3796d33d23812f2bb8c7e3b5aea18b521#27a1dae3796d33d23812f2bb8c7e3b5aea18b521"
[[package]]
name = "raw-cpuid"
@ -5646,7 +5604,7 @@ dependencies = [
[[package]]
name = "wgpu-core"
version = "0.9.2"
version = "0.8.0"
dependencies = [
"arrayvec",
"bitflags",
@ -5674,7 +5632,7 @@ dependencies = [
[[package]]
name = "wgpu-types"
version = "0.9.0"
version = "0.8.0"
dependencies = [
"bitflags",
"serde",

View File

@ -81,12 +81,17 @@ const KNOWN_ERROR_TITLE_IDS = new Set([
let searchParams = new URLSearchParams(document.documentURI.split("?")[1]);
// Set to true on init if the error code is nssBadCert.
let gIsCertError;
let gErrorCode = searchParams.get("e");
function getErrorCode() {
return searchParams.get("e");
}
let gIsCertError = gErrorCode == "nssBadCert";
// If the location of the favicon changes, FAVICON_CERTERRORPAGE_URL and/or
// FAVICON_ERRORPAGE_URL in toolkit/components/places/nsFaviconService.idl
// should also be updated.
document.getElementById("favicon").href =
gIsCertError || gErrorCode == "nssFailure2"
? "chrome://global/skin/icons/warning.svg"
: "chrome://global/skin/icons/info.svg";
function getCSSClass() {
return searchParams.get("s");
@ -257,7 +262,7 @@ function initPage() {
});
}
var err = getErrorCode();
var err = gErrorCode;
// List of error pages with an illustration.
let illustratedErrors = [
"malformedURI",
@ -278,7 +283,6 @@ function initPage() {
document.body.classList.add("blocked");
}
gIsCertError = err == "nssBadCert";
// Only worry about captive portals if this is a cert error.
let showCaptivePortalUI = isCaptive() && gIsCertError;
if (showCaptivePortalUI) {
@ -494,7 +498,7 @@ function reportBlockingError() {
return;
}
let err = getErrorCode();
let err = gErrorCode;
// Ensure we only deal with XFO and CSP here.
if (!["xfoBlocked", "cspBlocked"].includes(err)) {
return;
@ -793,7 +797,7 @@ function setCertErrorDetails(event) {
let es = document.getElementById("errorWhatToDoText");
let errWhatToDoTitle = document.getElementById("edd_nssBadCert");
let est = document.getElementById("errorWhatToDoTitleText");
let error = getErrorCode();
let error = gErrorCode;
if (error == "sslv3Used") {
learnMoreLink.setAttribute("href", baseURL + "sslv3-error-messages");
@ -1052,7 +1056,7 @@ async function setTechnicalDetailsOnCertError(
}
let cssClass = getCSSClass();
let error = getErrorCode();
let error = gErrorCode;
let hostString = HOST_NAME;
let port = document.location.port;

View File

@ -22,9 +22,7 @@
<meta http-equiv="Content-Security-Policy" content="default-src chrome:; object-src 'none'" />
<title>&loadError.label;</title>
<link rel="stylesheet" href="chrome://browser/skin/aboutNetError.css" type="text/css" media="all" />
<!-- If the location of the favicon is changed here, the FAVICON_ERRORPAGE_URL symbol in
toolkit/components/places/src/nsFaviconService.h should be updated. -->
<link rel="icon" id="favicon" href="chrome://global/skin/icons/info.svg"/>
<link rel="icon" id="favicon"/>
<link rel="localization" href="browser/aboutCertError.ftl" />
<link rel="localization" href="browser/nsserrors.ftl" />
<link rel="localization" href="branding/brand.ftl"/>

View File

@ -52,6 +52,7 @@ XPCOMUtils.defineLazyModuleGetters(this, {
Interactions: "resource:///modules/Interactions.jsm",
Log: "resource://gre/modules/Log.jsm",
LoginBreaches: "resource:///modules/LoginBreaches.jsm",
PageDataService: "resource:///modules/pagedata/PageDataService.jsm",
NetUtil: "resource://gre/modules/NetUtil.jsm",
NewTabUtils: "resource://gre/modules/NewTabUtils.jsm",
NimbusFeatures: "resource://nimbus/ExperimentAPI.jsm",
@ -1933,6 +1934,7 @@ BrowserGlue.prototype = {
BrowserUsageTelemetry.uninit();
SearchSERPTelemetry.uninit();
Interactions.uninit();
PageDataService.uninit();
PageThumbs.uninit();
NewTabUtils.uninit();
@ -2148,6 +2150,7 @@ BrowserGlue.prototype = {
SearchSERPTelemetry.init();
Interactions.init();
PageDataService.init();
ExtensionsUI.init();
let signingRequired;

View File

@ -0,0 +1,228 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
var EXPORTED_SYMBOLS = ["PageDataChild"];
const { XPCOMUtils } = ChromeUtils.import(
"resource://gre/modules/XPCOMUtils.jsm"
);
XPCOMUtils.defineLazyModuleGetters(this, {
PrivateBrowsingUtils: "resource://gre/modules/PrivateBrowsingUtils.jsm",
SchemaOrgPageData: "resource:///modules/pagedata/SchemaOrgPageData.jsm",
Services: "resource://gre/modules/Services.jsm",
});
XPCOMUtils.defineLazyGetter(this, "logConsole", function() {
return console.createInstance({
prefix: "PageData",
maxLogLevel: Services.prefs.getBoolPref("browser.pagedata.log", false)
? "Debug"
: "Warn",
});
});
// We defer any attempt to check for page data for a short time after a page
// loads to allow JS to operate.
XPCOMUtils.defineLazyPreferenceGetter(
this,
"READY_DELAY",
"browser.pagedata.readyDelay",
500
);
/**
* Returns the list of page data collectors for a document.
*
* @param {Document} document
* The DOM document to collect data for.
* @returns {PageDataCollector[]}
*/
function getCollectors(document) {
return [new SchemaOrgPageData(document)];
}
/**
* The actor responsible for monitoring a page for page data.
*/
class PageDataChild extends JSWindowActorChild {
#isContentWindowPrivate = true;
/**
* Used to debounce notifications about a page being ready.
* @type {Timer | null}
*/
#deferTimer = null;
/**
* The current set of page data collectors for the page and their current data
* or null if data collection has not begun.
* @type {Map<PageDataCollector, Data[]> | null}
*/
#collectors = null;
/**
* Called when the actor is created for a new page.
*/
actorCreated() {
this.#isContentWindowPrivate = PrivateBrowsingUtils.isContentWindowPrivate(
this.contentWindow
);
}
/**
* Called when the page is destroyed.
*/
didDestroy() {
if (this.#deferTimer) {
this.#deferTimer.cancel();
}
}
/**
* Called when the page has signalled it is done loading. This signal is
* debounced by READY_DELAY.
*/
#deferReady() {
if (!this.#deferTimer) {
this.#deferTimer = Cc["@mozilla.org/timer;1"].createInstance(Ci.nsITimer);
}
// If the timer was already running this re-starts it.
this.#deferTimer.initWithCallback(
() => {
this.#deferTimer = null;
this.sendAsyncMessage("PageData:DocumentReady", {
url: this.document.documentURI,
});
},
READY_DELAY,
Ci.nsITimer.TYPE_ONE_SHOT_LOW_PRIORITY
);
}
/**
* Coalesces the data from the page data collectors into a single array.
*
* @returns {Data[]}
*/
#buildData() {
if (!this.#collectors) {
return [];
}
let results = [];
for (let data of this.#collectors.values()) {
if (data !== null) {
results = results.concat(data);
}
}
return results;
}
/**
* Begins page data collection on the page.
*/
async #beginCollection() {
if (this.#collectors !== null) {
// Already collecting.
return this.#buildData();
}
logConsole.debug("Starting collection", this.document.documentURI);
// let initialCollection = true;
this.#collectors = new Map();
let pending = [];
for (let collector of getCollectors(this.document)) {
// TODO: Implement monitoring of pages for changes, e.g. for SPAs changing
// video without reloading.
//
// The commented out code below is a first attempt, that would allow
// individual collectors to provide updates. It will need fixing to
// ensure that listeners are either removed or not re-added on fresh
// page loads, as would happen currently.
//
// collector.on("data", (type, data) => {
// this.#collectors.set(collector, data);
//
// // Do nothing if intial collection is still ongoing.
// if (!initialCollection) {
// // TODO debounce this.
// this.sendAsyncMessage("PageData:Collected", {
// url: this.document.documentURI,
// data: this.#buildData(),
// });
// }
// });
pending.push(
collector.init().then(
data => {
this.#collectors.set(collector, data);
},
error => {
this.#collectors.set(collector, []);
logConsole.error(`Failed collecting page data`, error);
}
)
);
}
await Promise.all(pending);
// initialCollection = false;
return this.#buildData();
}
/**
* Called when a message is received from the parent process.
*
* @param {ReceiveMessageArgument} msg
* The received message.
*
* @returns {Promise | undefined}
* A promise for the requested data or undefined if no data was requested.
*/
receiveMessage(msg) {
if (this.#isContentWindowPrivate) {
return undefined;
}
switch (msg.name) {
case "PageData:CheckLoaded":
// The service just started in the parent. Check if this document is
// already loaded.
if (this.document.readystate == "complete") {
this.#deferReady();
}
break;
case "PageData:Collect":
return this.#beginCollection();
}
return undefined;
}
/**
* DOM event handler.
*
* @param {Event} event
* The DOM event.
*/
handleEvent(event) {
if (this.#isContentWindowPrivate) {
return;
}
switch (event.type) {
case "DOMContentLoaded":
case "pageshow":
this.#deferReady();
break;
}
}
}

View File

@ -0,0 +1,67 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
var EXPORTED_SYMBOLS = ["PageDataCollector"];
const { EventEmitter } = ChromeUtils.import(
"resource://gre/modules/EventEmitter.jsm"
);
/**
* Each `PageDataCollector` is responsible for finding data about a DOM
* document. When initialized it must asynchronously discover available data and
* either report what was found or an empty array if there was no relevant data
* in the page. Following this it may continue to monitor the page and report as
* the available data changes.
*/
class PageDataCollector extends EventEmitter {
/**
* Supported data types.
*/
static get DATA_TYPE() {
return {
PRODUCT: 1,
};
}
/**
* Internal, should generally not need to be overriden by child classes.
*
* @param {Document} document
* The DOM Document for the page.
*/
constructor(document) {
super();
this.document = document;
}
/**
* Starts collection of data, should be overriden by child classes. The
* current state of data in the page should be asynchronously returned from
* this method.
*
* @returns {Data[]} The data found for the page which may be an empty array.
*/
async init() {
return [];
}
/**
* Signals that the page has been destroyed.
*/
destroy() {}
/**
* Should not be overriden by child classes. Call to signal that the data in
* the page changed.
*
* @param {Data[]} data
* The data found which may be an empty array to signal that no data was found.
*/
dataFound(data) {
this.emit("data", data);
}
}

View File

@ -0,0 +1,69 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
var EXPORTED_SYMBOLS = ["PageDataParent"];
const { XPCOMUtils } = ChromeUtils.import(
"resource://gre/modules/XPCOMUtils.jsm"
);
XPCOMUtils.defineLazyModuleGetters(this, {
PageDataService: "resource:///modules/pagedata/PageDataService.jsm",
PromiseUtils: "resource://gre/modules/PromiseUtils.jsm",
});
/**
* Receives messages from PageDataChild and passes them to the PageData service.
*/
class PageDataParent extends JSWindowActorParent {
#deferredCollection = null;
/**
* Starts data collection in the child process. Returns a promise that
* resolves to the initial set of data discovered.
*
* @returns {Promise<Data[]>}
*/
collectPageData() {
if (!this.#deferredCollection) {
this.#deferredCollection = PromiseUtils.defer();
this.sendQuery("PageData:Collect").then(
this.#deferredCollection.resolve,
this.#deferredCollection.reject
);
}
return this.#deferredCollection.promise;
}
/**
* Called when the page is destroyed.
*/
didDestroy() {
this.#deferredCollection?.reject(
new Error("Page destroyed before collection completed.")
);
}
/**
* Called when a message is received from the content process.
*
* @param {ReceiveMessageArgument} msg
* The received message.
*/
receiveMessage(msg) {
switch (msg.name) {
case "PageData:DocumentReady":
PageDataService.pageLoaded(this, msg.data.url);
break;
// TODO: This is for supporting listening to dynamic changes. See
// PageDataChild.jsm for more information.
// case "PageData:Collected":
// PageDataService.pageDataDiscovered(msg.data.url, msg.data.data);
// break;
}
}
}

View File

@ -11,6 +11,7 @@ const { XPCOMUtils } = ChromeUtils.import(
);
XPCOMUtils.defineLazyModuleGetters(this, {
BrowserWindowTracker: "resource:///modules/BrowserWindowTracker.jsm",
Services: "resource://gre/modules/Services.jsm",
EventEmitter: "resource://gre/modules/EventEmitter.jsm",
});
@ -24,11 +25,13 @@ XPCOMUtils.defineLazyGetter(this, "logConsole", function() {
});
});
const ALLOWED_SCHEMES = ["http", "https", "data", "blob"];
/**
* @typedef {object} Data
* An individual piece of data about a page.
* @property {number} type
* The type of data, see Snapshots.DATA_TYPE.*
* The type of data, see PageDataCollector.DATA_TYPE.*
* @property {object} data
* The data in a format specific to the type of data.
*
@ -44,23 +47,83 @@ XPCOMUtils.defineLazyGetter(this, "logConsole", function() {
const PageDataService = new (class PageDataService extends EventEmitter {
/**
* Caches page data discovered from browsers. The key is the url of the data. Currently the cache
* never expires.
* Caches page data discovered from browsers. The key is the url of the data.
*
* TODO: Currently the cache never expires.
*
* @type {Map<string, PageData[]>}
*/
#pageDataCache = new Map();
/**
* Constructs a new instance of the service, not called externally.
* Initializes a new instance of the service, not called externally.
*/
constructor() {
super();
init() {
if (!Services.prefs.getBoolPref("browser.pagedata.enabled", false)) {
return;
}
ChromeUtils.registerWindowActor("PageData", {
parent: {
moduleURI: "resource:///actors/PageDataParent.jsm",
},
child: {
moduleURI: "resource:///actors/PageDataChild.jsm",
events: {
DOMContentLoaded: {},
pageshow: {},
},
},
});
logConsole.debug("Service started");
for (let win of BrowserWindowTracker.orderedWindows) {
if (!win.closed) {
// Ask any existing tabs to report
for (let tab of win.gBrowser.tabs) {
let parent = tab.linkedBrowser.browsingContext?.currentWindowGlobal.getActor(
"PageData"
);
parent.sendAsyncMessage("PageData:CheckLoaded");
}
}
}
}
/**
* Called when the service is destroyed. This is generally on shutdown so we
* don't really need to do much cleanup.
*/
uninit() {
logConsole.debug("Service stopped");
}
/**
* Called when the content process signals that a page is ready for data
* collection.
*
* @param {PageDataParent} actor
* The parent actor for the page.
* @param {string} url
* The url of the page.
*/
async pageLoaded(actor, url) {
let uri = Services.io.newURI(url);
if (!ALLOWED_SCHEMES.includes(uri.scheme)) {
return;
}
let browser = actor.browsingContext?.embedderElement;
// If we don't have a browser then it went away before we could record,
// so we don't know where the data came from.
if (!browser || !this.#isATabBrowser(browser)) {
return;
}
let data = await actor.collectPageData();
this.pageDataDiscovered(url, data);
}
/**
@ -73,6 +136,8 @@ const PageDataService = new (class PageDataService extends EventEmitter {
* The set of data discovered.
*/
pageDataDiscovered(url, data) {
logConsole.debug("Discovered page data", url, data);
let pageData = {
url,
date: Date.now(),
@ -81,10 +146,9 @@ const PageDataService = new (class PageDataService extends EventEmitter {
this.#pageDataCache.set(url, pageData);
// Send out a notification if there was some data found.
if (data.length) {
this.emit("page-data", pageData);
}
// Send out a notification. The `no-page-data` notification is intended
// for test use only.
this.emit(data.length ? "page-data" : "no-page-data", pageData);
}
/**
@ -103,21 +167,16 @@ const PageDataService = new (class PageDataService extends EventEmitter {
}
/**
* Queues page data retrieval for a url.
* Queues page data retrieval for a url. The page-data notification will be
* generated if data becomes available.
*
* Check `getCached` first to ensure that data is not already in the cache.
*
* @param {string} url
* The url to retrieve data for.
* @returns {Promise<PageData>}
* Resolves to a `PageData` (which may not contain any items of data) when the page has been
* successfully checked for data. Will resolve immediately if there is cached data available.
* Rejects if there was some failure to collect data.
*/
async queueFetch(url) {
let cached = this.#pageDataCache.get(url);
if (cached) {
return cached;
}
// Stub-implementation that generates an empty record.
let pageData = {
url,
date: Date.now(),
@ -126,11 +185,20 @@ const PageDataService = new (class PageDataService extends EventEmitter {
this.#pageDataCache.set(url, pageData);
// Send out a notification if there was some data found.
if (pageData.data.length) {
this.emit("page-data", pageData);
}
// Send out a notification. The `no-page-data` notification is intended
// for test use only.
this.emit(pageData.data.length ? "page-data" : "no-page-data", pageData);
}
return pageData;
/**
* Determines if the given browser is contained within a tab.
*
* @param {DOMElement} browser
* The browser element to check.
* @returns {boolean}
* True if the browser element is contained within a tab.
*/
#isATabBrowser(browser) {
return browser.ownerGlobal.gBrowser?.getTabForBrowser(browser);
}
})();

View File

@ -0,0 +1,175 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
var EXPORTED_SYMBOLS = ["SchemaOrgPageData"];
const { PageDataCollector } = ChromeUtils.import(
"resource:///modules/pagedata/PageDataCollector.jsm"
);
/**
* @typedef {object} ProductData
* Data about a product.
* @property {string | undefined} gtin
* The Global Trade Item Number for the product.
* @property {string | undefined} name
* The name of the product.
* @property {URL | undefined} url
* The url of the product.
* @property {string | undefined} image
* the url of a product image.
* @property {string | undefined} price
* The price of the product.
* @property {string | undefined} currency
* The currency of the price.
*/
/**
* Finds the values for a given property.
* See https://html.spec.whatwg.org/multipage/microdata.html#values for the parsing spec
*
* TODO: Currently this will find item properties of inner-items. Need to use itemscope as a
* boundary.
*
* @param {Element} element
* The item scope.
* @param {string} prop
* The property to find.
* @returns {any[]}
* The value of the property.
*/
function getProp(element, prop) {
const parseUrl = (urlElement, attr) => {
if (!urlElement.hasAttribute(attr)) {
return "";
}
try {
let url = new URL(
urlElement.getAttribute(attr),
urlElement.ownerDocument.documentURI
);
return url.toString();
} catch (e) {
return "";
}
};
return Array.from(
// Ignores properties that are scopes.
element.querySelectorAll(`[itemprop~='${prop}']:not([itemscope])`),
propElement => {
switch (propElement.localName) {
case "meta":
return propElement.getAttribute("content") ?? "";
case "audio":
case "embed":
case "iframe":
case "img":
case "source":
case "track":
case "video":
return parseUrl(propElement, "src");
case "object":
return parseUrl(propElement, "data");
case "a":
case "area":
case "link":
return parseUrl(propElement, "href");
case "data":
case "meter":
return propElement.getAttribute("value");
case "time":
if (propElement.hasAtribute("datetime")) {
return propElement.getAttribute("datetime");
}
return propElement.textContent;
default:
// Not mentioned in the spec but sites seem to use it.
if (propElement.hasAttribute("content")) {
return propElement.getAttribute("content");
}
return propElement.textContent;
}
}
);
}
/**
* Collects schema.org related data from a page.
*
* Currently only supports HTML Microdata, not RDFa or JSON-LD formats.
* Currently only collects product data.
*
* TODO: Respond to DOM mutations to trigger recollection.
*/
class SchemaOrgPageData extends PageDataCollector {
/**
* @see PageDataCollector.init
*/
async init() {
return this.#collect();
}
/**
* Collects product data from an element.
*
* @param {Element} element
* The DOM element representing the product.
*
* @returns {ProductData}
* The product data.
*/
#collectProduct(element) {
// At the moment we simply grab the first element found for each property.
// In future we may need to do something better.
return {
gtin: getProp(element, "gtin")[0],
name: getProp(element, "name")[0],
image: getProp(element, "image")[0] || undefined,
url: getProp(element, "url")[0] || undefined,
price: getProp(element, "price")[0],
currency: getProp(element, "priceCurrency")[0],
};
}
/**
* Collects the existing data from the page.
*
* @returns {Data[]}
*/
#collect() {
/**
* A map from item type to an array of the items found in the page.
*/
let items = new Map();
let insert = (type, item) => {
let data = items.get(type);
if (!data) {
data = [];
items.set(type, data);
}
data.push(item);
};
let scopes = this.document.querySelectorAll(
"[itemscope][itemtype^='https://schema.org/']"
);
for (let scope of scopes) {
switch (scope.getAttribute("itemtype")) {
case "https://schema.org/Product":
insert(
PageDataCollector.DATA_TYPE.PRODUCT,
this.#collectProduct(scope)
);
break;
}
}
return Array.from(items, ([type, data]) => ({ type, data }));
}
}

View File

@ -29,14 +29,8 @@ a short delay and then updated when necessary. Any data is cached in memory for
When page data has been found a `page-data` event is emitted. The event's argument holds the
`PageData` structure. The `getCached` function can be used to access any cached data for a url.
Page data can also be requested for a URL that is not currently open. In this case the service will
load the page in the background to find its data. The service operates a queueing system to reduce
resource usage. As above when any new data is found the `page-data` event is emitted. The
`queueFetch` method starts this process and returns a promise that resolves to the `PageData` or
rejects in the event of failure.
## Supported Types of page data
The following types of page data are currently supported:
The following types of page data (`PageDataCollector.DATA_TYPE`) are currently supported:
... TBD
- [`PRODUCT`](./schema-org.html#Product)

View File

@ -0,0 +1,26 @@
# Schema.org page data
Collects data marked up in pages in the [schema.org](https://schema.org/) microdata format.
## Product
Collects product information from the page. Since a page may contain multiple products the data
type is an array.
```json
{
"type": PageDataCollector.DATA_TYPE.PRODUCT,
"data": [
{
"gtin": <The Global Trade Item Number for the product>,
"name": <The name of the product>,
"image": <An image for the product>,
"url": <A canonical url for the product>,
"price": <The price of the product>,
"currency": <The currency of the product (ISO 4217)>,
},
... more products
]
}
```

View File

@ -7,9 +7,19 @@
XPCSHELL_TESTS_MANIFESTS += [
"tests/unit/xpcshell.ini",
]
BROWSER_CHROME_MANIFESTS += [
"tests/browser/browser.ini",
]
EXTRA_JS_MODULES.pagedata += [
"PageDataCollector.jsm",
"PageDataService.jsm",
"SchemaOrgPageData.jsm",
]
FINAL_TARGET_FILES.actors += [
"PageDataChild.jsm",
"PageDataParent.jsm",
]
SPHINX_TREES["docs"] = "docs"

View File

@ -0,0 +1,15 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
[DEFAULT]
prefs =
browser.pagedata.log=true
browser.pagedata.enabled=true
support-files =
head.js
product1.html
product2.html
[browser_pagedata_basic.js]
[browser_pagedata_product.js]

View File

@ -0,0 +1,64 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/**
* Basic tests for the page data service.
*/
const TEST_URL = "https://example.com/";
const TEST_URL2 = "https://example.com/browser";
add_task(async function test_pagedata_no_data() {
let promise = PageDataService.once("no-page-data");
await BrowserTestUtils.withNewTab(TEST_URL, async browser => {
let pageData = await promise;
Assert.equal(pageData.url, TEST_URL, "Should have returned the loaded URL");
Assert.deepEqual(pageData.data, [], "Should have returned no data");
Assert.deepEqual(
PageDataService.getCached(TEST_URL),
pageData,
"Should return the same data from the cache"
);
promise = PageDataService.once("no-page-data");
BrowserTestUtils.loadURI(browser, TEST_URL2);
await BrowserTestUtils.browserLoaded(browser, false, TEST_URL2);
pageData = await promise;
Assert.equal(
pageData.url,
TEST_URL2,
"Should have returned the loaded URL"
);
Assert.deepEqual(pageData.data, [], "Should have returned no data");
Assert.deepEqual(
PageDataService.getCached(TEST_URL2),
pageData,
"Should return the same data from the cache"
);
info("Test going back still triggers collection");
promise = PageDataService.once("no-page-data");
let locationChangePromise = BrowserTestUtils.waitForLocationChange(
gBrowser,
TEST_URL
);
browser.goBack();
await locationChangePromise;
pageData = await promise;
Assert.equal(
pageData.url,
TEST_URL,
"Should have returned the URL of the previous page"
);
Assert.deepEqual(pageData.data, [], "Should have returned no data");
Assert.deepEqual(
PageDataService.getCached(TEST_URL),
pageData,
"Should return the same data from the cache"
);
});
});

View File

@ -0,0 +1,82 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/**
* Basic tests for the page data service.
*/
const BASE_URL = getRootDirectory(gTestPath).replace(
"chrome://mochitests/content",
"https://example.com"
);
add_task(async function test_single_product_data() {
let promise = PageDataService.once("page-data");
const TEST_URL = BASE_URL + "product1.html";
await BrowserTestUtils.withNewTab(TEST_URL, async browser => {
let pageData = await promise;
Assert.equal(pageData.url, TEST_URL, "Should have returned the loaded URL");
Assert.equal(pageData.data.length, 1, "Should have only one data item");
Assert.deepEqual(
pageData.data,
[
{
type: PageDataCollector.DATA_TYPE.PRODUCT,
data: [
{
gtin: "13572468",
name: "Bon Echo Microwave",
image: BASE_URL + "bon-echo-microwave-17in.jpg",
url: BASE_URL + "microwave.html",
price: "3.00",
currency: "GBP",
},
],
},
],
"Should have returned the expected data"
);
});
});
add_task(async function test_single_multiple_data() {
let promise = PageDataService.once("page-data");
const TEST_URL = BASE_URL + "product2.html";
await BrowserTestUtils.withNewTab(TEST_URL, async browser => {
let pageData = await promise;
Assert.equal(pageData.url, TEST_URL, "Should have returned the loaded URL");
Assert.equal(pageData.data.length, 1, "Should have only one data item");
Assert.deepEqual(
pageData.data,
[
{
type: PageDataCollector.DATA_TYPE.PRODUCT,
data: [
{
gtin: "13572468",
name: "Bon Echo Microwave",
image: BASE_URL + "bon-echo-microwave-17in.jpg",
url: BASE_URL + "microwave.html",
price: "3.00",
currency: "GBP",
},
{
gtin: "15263748",
name: "Gran Paradiso Toaster",
image: BASE_URL + "gran-paradiso-toaster-17in.jpg",
url: BASE_URL + "toaster.html",
price: undefined,
currency: undefined,
},
],
},
],
"Should have returned the expected data"
);
});
});

View File

@ -0,0 +1,13 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
const { XPCOMUtils } = ChromeUtils.import(
"resource://gre/modules/XPCOMUtils.jsm"
);
XPCOMUtils.defineLazyModuleGetters(this, {
PageDataCollector: "resource:///modules/pagedata/PageDataCollector.jsm",
PageDataService: "resource:///modules/pagedata/PageDataService.jsm",
Snapshots: "resource:///modules/Snapshots.jsm",
});

View File

@ -0,0 +1,19 @@
<html>
<head>
<title>Product Info 1</title>
<meta http-equiv="Content-Type" content="text/html;charset=utf-8"></meta>
</head>
<body>
<div itemscope itemtype="https://schema.org/Product">
<img itemprop="image" src="bon-echo-microwave-17in.jpg" />
<a href="microwave.html" itemprop="url">
<span itemprop="name">Bon Echo Microwave</span>
</a>
<span itemprop="price" content="3.00">£3.00</span>
<span itemprop="priceCurrency" content="GBP"></span>
<span itemprop="gtin" content="13572468"></span>
</div>
</body>
</html>

View File

@ -0,0 +1,27 @@
<html>
<head>
<title>Product Info 2</title>
<meta http-equiv="Content-Type" content="text/html;charset=utf-8"></meta>
</head>
<body>
<div itemscope itemtype="https://schema.org/Product">
<img itemprop="image" src="bon-echo-microwave-17in.jpg" />
<a href="microwave.html" itemprop="url">
<span itemprop="name">Bon Echo Microwave</span>
</a>
<span itemprop="price" content="3.00">£3.00</span>
<span itemprop="priceCurrency" content="GBP"></span>
<span itemprop="gtin" content="13572468"></span>
</div>
<div itemscope itemtype="https://schema.org/Product">
<img itemprop="image" src="gran-paradiso-toaster-17in.jpg" />
<a href="toaster.html" itemprop="url">
<span itemprop="name">Gran Paradiso Toaster</span>
</a>
<span itemprop="gtin" content="15263748"></span>
</div>
</body>
</html>

View File

@ -11,12 +11,13 @@ const { XPCOMUtils } = ChromeUtils.import(
);
XPCOMUtils.defineLazyModuleGetters(this, {
Services: "resource://gre/modules/Services.jsm",
PageDataCollector: "resource:///modules/pagedata/PageDataCollector.jsm",
PageDataService: "resource:///modules/pagedata/PageDataService.jsm",
Services: "resource://gre/modules/Services.jsm",
Snapshots: "resource:///modules/Snapshots.jsm",
});
add_task(async function notifies() {
add_task(async function test_pageDataDiscoverd_notifies() {
let url = "https://www.mozilla.org/";
Assert.equal(
@ -25,38 +26,53 @@ add_task(async function notifies() {
"Should be no cached data."
);
let listener = () => {
Assert.ok(false, "Should not notify for no data.");
};
PageDataService.on("page-data", listener);
let pageData = await PageDataService.queueFetch(url);
Assert.equal(pageData.url, "https://www.mozilla.org/");
Assert.equal(pageData.data.length, 0);
pageData = PageDataService.getCached(url);
Assert.equal(pageData.url, "https://www.mozilla.org/");
Assert.equal(pageData.data.length, 0);
PageDataService.off("page-data", listener);
let promise = PageDataService.once("page-data");
PageDataService.pageDataDiscovered(url, [
{
type: Snapshots.DATA_TYPE.PRODUCT,
type: PageDataCollector.DATA_TYPE.PRODUCT,
data: {
price: 276,
},
},
]);
pageData = await promise;
Assert.equal(pageData.url, "https://www.mozilla.org/");
Assert.equal(pageData.data.length, 1);
Assert.equal(pageData.data[0].type, Snapshots.DATA_TYPE.PRODUCT);
let pageData = await promise;
Assert.equal(
pageData.url,
"https://www.mozilla.org/",
"Should have notified data for the expected url"
);
Assert.deepEqual(
pageData.data,
[
{
type: PageDataCollector.DATA_TYPE.PRODUCT,
data: {
price: 276,
},
},
],
"Should have returned the correct product data"
);
Assert.equal(PageDataService.getCached(url), pageData);
Assert.equal(await PageDataService.queueFetch(url), pageData);
Assert.deepEqual(
PageDataService.getCached(url),
pageData,
"Should return the same pageData from the cache as was notified."
);
});
add_task(async function test_queueFetch_notifies() {
let promise = PageDataService.once("no-page-data");
PageDataService.queueFetch("https://example.org");
let pageData = await promise;
Assert.equal(
pageData.url,
"https://example.org",
"Should have notified data for the expected url"
);
Assert.equal(pageData.data.length, 0, "Should have returned no data");
});

View File

@ -13,9 +13,10 @@ const { XPCOMUtils } = ChromeUtils.import(
const VERSION_PREF = "browser.places.snapshots.version";
XPCOMUtils.defineLazyModuleGetters(this, {
PageDataCollector: "resource:///modules/pagedata/PageDataCollector.jsm",
PageDataService: "resource:///modules/pagedata/PageDataService.jsm",
PlacesUtils: "resource://gre/modules/PlacesUtils.jsm",
Services: "resource://gre/modules/Services.jsm",
PageDataService: "resource:///modules/pagedata/PageDataService.jsm",
});
/**
@ -116,15 +117,6 @@ const Snapshots = new (class Snapshots {
// PageDataService.on("page-data", this.#onPageData);
}
/**
* Supported data types.
*/
get DATA_TYPE() {
return {
PRODUCT: 1,
};
}
#notify(topic, urls) {
Services.obs.notifyObservers(null, topic, JSON.stringify(urls));
}
@ -141,7 +133,7 @@ const Snapshots = new (class Snapshots {
let pageData = PageDataService.getCached(url);
if (pageData?.data.length) {
for (let data of pageData.data) {
if (Object.values(this.DATA_TYPE).includes(data.type)) {
if (Object.values(PageDataCollector.DATA_TYPE).includes(data.type)) {
bindings[`id${index}`] = placeId;
bindings[`type${index}`] = data.type;
// We store the whole data object that also includes type because

View File

@ -146,6 +146,10 @@ class TableViewer {
* the rows being the keys of the columnMap.
*/
displayData(rows) {
if (gCurrentHandler != this) {
/* Data is no more relevant for the current view. */
return;
}
let viewer = document.getElementById("tableViewer");
let index = this.columnMap.size;
for (let row of rows) {
@ -422,21 +426,19 @@ function show(selectedButton) {
return;
}
gCurrentHandler.pause();
currentButton.classList.remove("selected");
selectedButton.classList.add("selected");
switch (selectedButton.getAttribute("value")) {
case "snapshots":
metadataHandler.pause();
snapshotHandler.start();
(gCurrentHandler = snapshotHandler).start();
break;
case "metadata":
snapshotHandler.pause();
(gCurrentHandler = metadataHandler).start();
metadataHandler.start();
break;
case "places-stats":
placesStatsHandler.pause();
placesStatsHandler.start();
(gCurrentHandler = placesStatsHandler).start();
break;
}
}
@ -463,5 +465,7 @@ function setupListeners() {
}
checkPrefs();
snapshotHandler.start().catch(console.error);
// Set the initial handler here.
let gCurrentHandler = snapshotHandler;
gCurrentHandler.start().catch(console.error);
setupListeners();

View File

@ -5,6 +5,7 @@
* Tests that adding a snapshot also adds related page data.
*/
XPCOMUtils.defineLazyModuleGetters(this, {
PageDataCollector: "resource:///modules/pagedata/PageDataCollector.jsm",
PageDataService: "resource:///modules/pagedata/PageDataService.jsm",
});
@ -16,7 +17,7 @@ add_task(async function pagedata() {
// Register some page data.
PageDataService.pageDataDiscovered(TEST_URL1, [
{
type: Snapshots.DATA_TYPE.PRODUCT,
type: PageDataCollector.DATA_TYPE.PRODUCT,
data: {
price: 276,
},
@ -32,7 +33,7 @@ add_task(async function pagedata() {
PageDataService.pageDataDiscovered(TEST_URL2, [
{
type: Snapshots.DATA_TYPE.PRODUCT,
type: PageDataCollector.DATA_TYPE.PRODUCT,
data: {
price: 384,
},
@ -88,7 +89,7 @@ add_task(async function pagedata() {
let snap = await Snapshots.get(TEST_URL1);
Assert.equal(snap.pageData.size, 1, "Should have some page data.");
Assert.equal(
snap.pageData.get(Snapshots.DATA_TYPE.PRODUCT).price,
snap.pageData.get(PageDataCollector.DATA_TYPE.PRODUCT).price,
276,
"Should have the right price."
);
@ -97,7 +98,7 @@ add_task(async function pagedata() {
snap = await Snapshots.get(TEST_URL2);
Assert.equal(snap.pageData.size, 1, "Should have some page data.");
Assert.equal(
snap.pageData.get(Snapshots.DATA_TYPE.PRODUCT).price,
snap.pageData.get(PageDataCollector.DATA_TYPE.PRODUCT).price,
384,
"Should have the right price."
);
@ -119,7 +120,7 @@ add_task(async function pagedata() {
documentType: Interactions.DOCUMENT_TYPE.GENERIC,
},
],
{ type: Snapshots.DATA_TYPE.PRODUCT }
{ type: PageDataCollector.DATA_TYPE.PRODUCT }
);
info("Ensure that removing a snapshot removes pagedata for it");
@ -141,7 +142,7 @@ add_task(async function pagedata() {
snap = await Snapshots.get(TEST_URL1);
Assert.equal(snap.pageData.size, 1, "Should have some page data.");
Assert.equal(
snap.pageData.get(Snapshots.DATA_TYPE.PRODUCT).price,
snap.pageData.get(PageDataCollector.DATA_TYPE.PRODUCT).price,
276,
"Should have the right price."
);

View File

@ -9,11 +9,11 @@
#UITourHighlightContainer {
appearance: none;
-moz-window-shadow: none;
border: none;
background-color: transparent;
--panel-border-color: transparent;
--panel-background: transparent;
/* This is a buffer to compensate for the movement in the "wobble" effect,
and for the box-shadow of #UITourHighlight. */
padding: 4px;
--panel-padding: 4px;
/* Compensate the displacement caused by padding. */
margin: -4px;
}

View File

@ -160,8 +160,8 @@ var DevToolsServer = {
return;
}
for (const connID of Object.getOwnPropertyNames(this._connections)) {
this._connections[connID].close();
for (const connection of Object.values(this._connections)) {
connection.close();
}
ActorRegistry.destroy();

View File

@ -26,3 +26,8 @@ table.docutils td, table.docutils th
span.strikethrough {
text-decoration: line-through;
}
/* Better control over the table on this page */
.matcher-cookbook td {
white-space: break-spaces !important;
}

View File

@ -10,6 +10,7 @@ or seriously develop one we can land and run internally. While being written fo
clang-query.rst
compiler-explorer.rst
writing-matchers.rst
matcher-cookbook.rst
adding-a-check.rst
advanced-check-features.rst

View File

@ -0,0 +1,23 @@
.. _matcher_cookbook:
Matcher Cookbook
=================
This page is designed to be a selection of common ingredients to a more complicated matcher.
.. list-table::
:widths: 35 65
:header-rows: 1
:class: matcher-cookbook
* - Desired Outcome
- Syntax
* - Ignore header files
*If you have an #include in your example code, your matcher may match things in the header files.*
- Add **isExpansionInMainFile()** to the matcher. e.g.
``m functionDecl(isExpansionInMainFile())``
*More coming*

View File

@ -87,6 +87,10 @@ var tests = (function*() {
{ set: [["security.mixed_content.block_active_content", false]] },
advance
);
yield SpecialPowers.pushPrefEnv(
{ set: [["network.http.referer.disallowRelaxingDefault", false]] },
advance
);
yield SpecialPowers.pushPermissions(
[{ type: "systemXHR", allow: true, context: document }],
advance

View File

@ -82,6 +82,11 @@ var tests = (function*() {
var iframe = document.getElementById("testframe");
var sjs = "/tests/dom/base/test/referrer_change_server.sjs?action=generate-policy-test";
yield SpecialPowers.pushPrefEnv(
{ set: [["network.http.referer.disallowCrossSiteRelaxingDefault", false]] },
advance
);
yield resetState();
var name = "no-referrer-unsafe-url";
yield iframe.src = sjs + "&policy=" + escape('no-referrer') + "&name=" + name + "&newPolicy=" + escape('unsafe-url');

View File

@ -1329,6 +1329,9 @@ DOMInterfaces = {
'GPUAdapterFeatures': {
'nativeType': 'mozilla::webgpu::AdapterFeatures',
},
'GPUAdapterLimits': {
'nativeType': 'mozilla::webgpu::AdapterLimits',
},
'GPUBindGroup': {
'nativeType': 'mozilla::webgpu::BindGroup',
},
@ -1396,8 +1399,8 @@ DOMInterfaces = {
'GPUShaderModule': {
'nativeType': 'mozilla::webgpu::ShaderModule',
},
'GPUSupportedLimits': {
'nativeType': 'mozilla::webgpu::SupportedLimits',
'GPUSwapChain': {
'nativeType': 'mozilla::webgpu::SwapChain',
},
'GPUTexture': {
'nativeType': 'mozilla::webgpu::Texture',

View File

@ -8,5 +8,7 @@ These linked pages contain design documents for the DOM implementation in Gecko.
ipc/index
navigation/index
scriptSecurity/index
scriptSecurity/xray_vision
workersAndStorage/index
webIdlBindings/index

Binary file not shown.

After

Width:  |  Height:  |  Size: 203 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 276 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 52 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 55 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 54 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 144 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 55 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 54 KiB

View File

@ -0,0 +1,322 @@
Script Security
===============
.. container:: summary
This page provides an overview of the script security architecture in
Gecko.
Like any web browser, Gecko can load JavaScript from untrusted and
potentially hostile web pages and run it on the user's computer. The
security model for web content is based on the `same-origin policy
<https://developer.mozilla.org/en-US/docs/Web/Security/Same-origin_policy>`__,
in which code
gets full access to objects from its origin but highly restricted access
to objects from a different origin. The rules for determining whether an
object is same-origin with another, and what access is allowed
cross-origin, are now mostly standardized across browsers.
Gecko has an additional problem, though: while its core is written in
C++, the front-end code is written in JavaScript. This JavaScript code,
which is commonly referred to as c\ *hrome code*, runs with system
privileges. If the code is compromised, the attacker can take over the
user's computer. Legacy SDK extensions also run with chrome privileges.
Having the browser front end in JavaScript has benefits: it can be much
quicker to develop in JavaScript than in C++, and contributors do not
need to learn C++. However, JavaScript is a highly dynamic, malleable
language, and without help it's difficult to write system-privileged
code that interacts safely with untrusted web content. From the point of
view of chrome code, the script security model in Gecko is intended to
provide that help to make writing secure, system-privileged JavaScript a
realistic expectation.
.. _Security_policy:
Security policy
---------------
Gecko implements the following security policy:
- **Objects that are same-origin** are able to access each other
freely. For example, the objects associated with a document served
from *https://example.org/* can access each other, and they can also
access objects served from *https://example.org/foo*.
- **Objects that are cross-origin** get highly restricted access to
each other, according to the same-origin policy.
For example, code served from *https://example.org/* trying to access
objects from *https://somewhere-else.org/* will have restricted
access.
- **Objects in a privileged scope** are allowed complete access to
objects in a less privileged scope, but by default they see a
`restricted view <#privileged-to-unprivileged-code>`__
of such objects, designed to prevent them from being tricked by the
untrusted code. An example of this scope is chrome-privileged
JavaScript accessing web content.
- **Objects in a less privileged scope** don't get any access to
objects in a more privileged scope, unless the more privileged scope
`explicitly clones those objects <#unprivileged-to-privileged-code>`__.
An example of this scope is web content accessing objects in a
chrome-privileged scope. 
.. _Compartments:
Compartments
------------
Compartments are the foundation for Gecko's script security
architecture. A compartment is a specific, separate area of memory. In
Gecko, there's a separate compartment for every global object. This
means that each global object and the objects associated with it live in
their own region of memory.
.. image:: images/compartments.png
Normal content windows are globals, of course, but so are chrome
windows, sandboxes, workers, the ``ContentFrameMessageManager`` in a frame
script, and so on.
Gecko guarantees that JavaScript code running in a given compartment is
only allowed to access objects in the same compartment. When code from
compartment A tries to access an object in compartment B, Gecko gives it
a *cross-compartment wrapper*. This is a proxy in compartment A for the
real object, which lives in compartment B.
.. image:: images/cross-compartment-wrapper.png
Inside the same compartment, all objects share a global and are
therefore same-origin with each other. Therefore there's no need for any
security checks, there are no wrappers, and there is no performance
overhead for the common case of objects in a single window interacting
with each other.
Whenever cross-compartment access happens, the wrappers enable us to
implement the appropriate security policy. Because the wrapper we choose
is specific to the relationship between the two compartments, the
security policy it implements can be static: when the caller uses the
wrapper, there's no need to check who is making the call or where it is
going.
.. _Cross-compartment_access:
Cross-compartment access
------------------------
.. _Same-origin:
Same-origin
~~~~~~~~~~~
As we've already seen, the most common scenario for same-origin access
is when objects belonging to the same window object interact. This all
takes place within the same compartment, with no need for security
checks or wrappers.
When objects share an origin but not a global - for example two web
pages from the same protocol, port, and domain - they belong to two
different compartments, and the caller gets a *transparent wrapper* to
the target object.
.. image:: images/same-origin-wrapper.png
Transparent wrappers allow access to all the target's properties:
functionally, it's as if the target is in the caller's compartment.
.. _Cross-origin:
Cross-origin
~~~~~~~~~~~~
If the two compartments are cross-origin, the caller gets a
*cross-origin wrapper*.
.. image:: images/cross-origin-wrapper.png
This denies access to all the object's properties, except for a few
properties of Window and Location objects, as defined by
the `same-origin
policy <https://developer.mozilla.org/en-US/docs/Web/Security/Same-origin_policy#cross-origin_script_api_access>`__.
.. _Privileged_to_unprivileged_code:
Privileged to unprivileged code
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The most obvious example of this kind of security relation is between
system-privileged chrome code and untrusted web content, but there are
other examples in Gecko. The Add-on SDK runs content scripts in
sandboxes, which are initialized with an `expanded
principal <#expanded-principal>`__,
giving them elevated privileges with respect to the web content they
operate on, but reduced privileges with respect to chrome.
If the caller has a higher privilege than the target object, the caller
gets an *Xray wrapper* for the object.
.. image:: images/xray-wrapper.png
Xrays are designed to prevent untrusted code from confusing trusted code
by redefining objects in unexpected ways. For example, privileged code
using an Xray to a DOM object sees only the original version of the DOM
object. Any expando properties are not visible, and if any native DOM properties have been
redefined, they are not visible in the Xray.
The privileged code is able to waive Xrays if it wants unfiltered access to the untrusted object.
See `Xray vision <xray_vision.html>`__ for much more information on Xrays.
.. _Unprivileged_to_privileged_code:
Unprivileged to privileged code
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the caller has lower privileges than the target object, then the
caller gets an *opaque wrapper.*
.. image:: images/opaque-wrapper.png
An opaque wrapper denies all access to the target object.
However, the privileged target is able to copy objects and functions
into the less privileged scope using the ``exportFunction()`` and
``cloneInto()`` functions, and the less privileged scope is then able
to use them.
.. _Security_checks:
Security checks
---------------
To determine the security relation between two compartments, Gecko uses
two concepts: *security principals* and the act of *subsuming*. To
establish the security relationship between two compartments A and B,
Gecko asks:
*Does the security principal for compartment A subsume the security
principal for compartment B, and vice versa?*
.. _Subsumes:
Subsumes
~~~~~~~~
+-----------------------------------+-----------------------------------+
| *A subsumes B* | A has all of the privileges of B, |
| | and possibly more, and therefore |
| | A is allowed to see and do |
| | anything that B can see and do. |
+-----------------------------------+-----------------------------------+
| *A Subsumes B &&* *B Subsumes A* | A and B are same-origin. |
+-----------------------------------+-----------------------------------+
| *A Subsumes B && B !Subsumes A* | A is more privileged than B. |
| | |
| | A gets access to all of B, by |
| | default with Xray vision, which |
| | it may choose to waive. |
| | |
| | B gets no access to A, although A |
| | may choose to export objects to |
| | B. |
+-----------------------------------+-----------------------------------+
| *A !Subsumes B && B !Subsumes A* | A and B are cross-origin. |
+-----------------------------------+-----------------------------------+
.. _Security_principals:
Security principals
~~~~~~~~~~~~~~~~~~~
.. container::
There are four types of security principal: the system principal,
content principals, expanded principals, and the null principal.
.. container::
.. _System_principal:
System principal
^^^^^^^^^^^^^^^^
The system principal passes all security checks. It subsumes itself and
all other principals. Chrome code, by definition, runs with the system
principal, as do frame scripts.
.. _Content_principal:
Content principal
^^^^^^^^^^^^^^^^^
A content principal is associated with some web content and is defined
by the origin
of the content. For example, a normal DOM window has a content principal
defined by the window's origin. A content principal subsumes only other
content principals with the same origin. It is subsumed by the system
principal, any expanded principals that include its origin, and any
other content principals with the same origin.
.. _Expanded_principal:
Expanded principal
^^^^^^^^^^^^^^^^^^
An expanded principal is specified as an array of origins:
.. code:: brush:
["http://mozilla.org", "http://moz.org"]
The expanded principal subsumes every content principal it contains. The
content principals do not subsume the expanded principal, even if the
expanded principal only contains a single content principal.
Thus ``["http://moz.org"]`` subsumes ``"http://moz.org"`` but not vice
versa. The expanded principal gets full access to the content principals
it contains, with Xray vision by default, and the content principals get
no access to the expanded principal.
This also enables the script security model to treat compartments that
have expanded principals more like part of the browser than like web
content. This means, for example, that it can run when JavaScript is
disabled for web content.
Expanded principals are useful when you want to give code extra
privileges, including cross-origin access, but don't want to give the
code full system privileges. For example, expanded principals are used
in the Add-on SDK to give content scripts cross-domain privileges for a predefined set of
domains,
and to protect content scripts from access by untrusted web content,
without having to give content scripts system privileges.
.. _Null_principal:
Null principal
^^^^^^^^^^^^^^
The null principal fails almost all security checks. It has no
privileges and can't be accessed by anything but itself and chrome. It
subsumes no other principals, even other null principals. (This is what
is used when HTML5 and other specs say "origin is a globally unique
identifier".)
.. _Principal_relationships:
Principal relationships
~~~~~~~~~~~~~~~~~~~~~~~
The diagram below summarizes the relationships between the different
principals. The arrow connecting principals A and B means "A subsumes
B".  (A is the start of the arrow, and B is the end.)
.. image:: images/principal-relationships.png
.. _Computing_a_wrapper:
Computing a wrapper
-------------------
The following diagram shows the factors that determine the kind of
wrapper that compartment A would get when trying to access an object in
compartment B.
.. image:: images/computing-a-wrapper.png

View File

@ -0,0 +1,411 @@
Xray Vision
===========
.. container:: summary
Xray vision helps JavaScript running in a privileged security context
safely access objects created by less privileged code, by showing the
caller only the native version of the objects.
Gecko runs JavaScript from a variety of different sources and at a
variety of different privilege levels.
- The JavaScript code that along with the C++ core, implements the
browser itself is called *chrome code* and runs using system
privileges. If chrome-privileged code is compromised, the attacker
can take over the user's computer.
- JavaScript loaded from normal web pages is called *content code*.
Because this code is being loaded from arbitrary web pages, it is
regarded as untrusted and potentially hostile, both to other websites
and to the user.
- As well as these two levels of privilege, chrome code can create
sandboxes. The security principal defined for the sandbox determines
its privilege level. If an
Expanded Principal is used, the sandbox is granted certain privileges
over content code and is protected from direct access by content
code.
| The security machinery in Gecko ensures that there's asymmetric access
between code at different privilege levels: so for example, content
code can't access objects created by chrome code, but chrome code can
access objects created by content.
| However, even the ability to access content objects can be a security
risk for chrome code. JavaScript's a highly malleable language.
Scripts running in web pages can add extra properties to DOM objects
(also known as expando properties)
and even redefine standard DOM objects to do something unexpected. If
chrome code relies on such modified objects, it can be tricked into
doing things it shouldn't.
| For example: ``window.confirm()`` is a DOM
API that's supposed to ask the user to confirm an action, and return a
boolean depending on whether they clicked "OK" or "Cancel". A web page
could redefine it to return ``true``:
.. code:: brush:
window.confirm = function() {
return true;
}
Any privileged code calling this function and expecting its result to
represent user confirmation would be deceived. This would be very naive,
of course, but there are more subtle ways in which accessing content
objects from chrome can cause security problems.
| This is the problem that Xray vision is designed to solve. When a
script accesses an object using Xray vision it sees only the native
version of the object. Any expandos are invisible, and if any
properties of the object have been redefined, it sees the original
implementation, not the redefined version.
| So in the example above, chrome code calling the content's
``window.confirm()`` would get the original version of ``confirm()``,
not the redefined version.
.. note::
It's worth emphasizing that even if content tricks chrome into
running some unexpected code, that code does not run with chrome
privileges. So this is not a straightforward privilege escalation
attack, although it might lead to one if the chrome code is
sufficiently confused.
.. _How_you_get_Xray_vision:
How you get Xray vision
-----------------------
Privileged code automatically gets Xray vision whenever it accesses
objects belonging to less-privileged code. So when chrome code accesses
content objects, it sees them with Xray vision:
.. code:: brush:
// chrome code
var transfer = gBrowser.contentWindow.confirm("Transfer all my money?");
// calls the native implementation
.. note::
Note that using window.confirm() would be a terrible way to implement
a security policy, and is only shown here to illustrate how Xray
vision works.
.. _Waiving_Xray_vision:
Waiving Xray vision
-------------------
| Xray vision is a kind of security heuristic, designed to make most
common operations on untrusted objects simple and safe. However, there
are some operations for which they are too restrictive: for example,
if you need to see expandos on DOM objects. In cases like this you can
waive Xray protection, but then you can no longer rely on any
properties or functions being, or doing, what you expect. Any of them,
even setters and getters, could have been redefined by untrusted code.
| To waive Xray vision for an object you can use
Components.utils.waiveXrays(object),
or use the object's ``wrappedJSObject`` property:
.. code:: brush:
// chrome code
var waivedWindow = Components.utils.waiveXrays(gBrowser.contentWindow);
var transfer = waivedWindow.confirm("Transfer all my money?");
// calls the redefined implementation
.. code:: brush:
// chrome code
var waivedWindow = gBrowser.contentWindow.wrappedJSObject;
var transfer = waivedWindow.confirm("Transfer all my money?");
// calls the redefined implementation
Waivers are transitive: so if you waive Xray vision for an object, then
you automatically waive it for all the object's properties. For example,
``window.wrappedJSObject.document`` gets you the waived version of
``document``.
To undo the waiver again, call Components.utils.unwaiveXrays(waivedObject):
.. code:: brush:
var unwaived = Components.utils.unwaiveXrays(waivedWindow);
unwaived.confirm("Transfer all my money?");
// calls the native implementation
.. _Xrays_for_DOM_objects:
Xrays for DOM objects
---------------------
The primary use of Xray vision is for DOM objects: that is, the
objects that represent parts of the web page.
In Gecko, DOM objects have a dual representation: the canonical
representation is in C++, and this is reflected into JavaScript for the
benefit of JavaScript code. Any modifications to these objects, such as
adding expandos or redefining standard properties, stays in the
JavaScript reflection and does not affect the C++ representation.
The dual representation enables an elegant implementation of Xrays: the
Xray just directly accesses the C++ representation of the original
object, and doesn't go to the content's JavaScript reflection at all.
Instead of filtering out modifications made by content, the Xray
short-circuits the content completely.
This also makes the semantics of Xrays for DOM objects clear: they are
the same as the DOM specification, since that is defined using the
`WebIDL <http://www.w3.org/TR/WebIDL/>`__, and the WebIDL also defines
the C++ representation.
.. _Xrays_for_JavaScript_objects:
Xrays for JavaScript objects
----------------------------
Until recently, built-in JavaScript objects that are not part of the
DOM, such as
``Date``, ``Error``, and ``Object``, did not get Xray vision when
accessed by more-privileged code.
Most of the time this is not a problem: the main concern Xrays solve is
with untrusted web content manipulating objects, and web content is
usually working with DOM objects. For example, if content code creates a
new ``Date`` object, it will usually be created as a property of a DOM
object, and then it will be filtered out by the DOM Xray:
.. code:: brush:
// content code
// redefine Date.getFullYear()
Date.prototype.getFullYear = function() {return 1000};
var date = new Date();
.. code:: brush:
// chrome code
// contentWindow is an Xray, and date is an expando on contentWindow
// so date is filtered out
gBrowser.contentWindow.date.getFullYear()
// -> TypeError: gBrowser.contentWindow.date is undefined
The chrome code will only even see ``date`` if it waives Xrays, and
then, because waiving is transitive, it should expect to be vulnerable
to redefinition:
.. code:: brush:
// chrome code
Components.utils.waiveXrays(gBrowser.contentWindow).date.getFullYear();
// -> 1000
However, there are some situations in which privileged code will access
JavaScript objects that are not themselves DOM objects and are not
properties of DOM objects. For example:
- the ``detail`` property of a CustomEvent fired by content could be a JavaScript
Object or Date as well as a string or a primitive
- the return value of ``evalInSandbox()`` and any properties attached to the
``Sandbox`` object may be pure JavaScript objects
Also, the WebIDL specifications are starting to use JavaScript types
such as ``Date`` and ``Promise``: since WebIDL definition is the basis
of DOM Xrays, not having Xrays for these JavaScript types starts to seem
arbitrary.
So, in Gecko 31 and 32 we've added Xray support for most JavaScript
built-in objects.
Like DOM objects, most JavaScript built-in objects have an underlying
C++ state that is separate from their JavaScript representation, so the
Xray implementation can go straight to the C++ state and guarantee that
the object will behave as its specification defines:
.. code:: brush:
// chrome code
var sandboxScript = 'Date.prototype.getFullYear = function() {return 1000};' +
'var date = new Date(); ';
var sandbox = Components.utils.Sandbox("https://example.org/");
Components.utils.evalInSandbox(sandboxScript, sandbox);
// Date objects are Xrayed
console.log(sandbox.date.getFullYear());
// -> 2014
// But you can waive Xray vision
console.log(Components.utils.waiveXrays(sandbox.date).getFullYear());
// -> 1000
.. note::
To test out examples like this, you can use the Scratchpad in
browser context
for the code snippet, and the Browser Console to see the expected
output.
Because code running in Scratchpad's browser context has chrome
privileges, any time you use it to run code, you need to understand
exactly what the code is doing. That includes the code samples in
this article.
.. _Xray_semantics_for_Object_and_Array:
Xray semantics for Object and Array
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The exceptions are ``Object``
and ``Array``: their interesting state is in JavaScript, not C++. This
means that the semantics of their Xrays have to be independently
defined: they can't simply be defined as "the C++ representation".
The aim of Xray vision is to make most common operations simple and
safe, avoiding the need to access the underlying object except in more
involved cases. So the semantics defined for ``Object`` and ``Array``
Xrays aim to make it easy for privileged code to treat untrusted objects
like simple dictionaries.
Any value properties
of the object are visible in the Xray. If the object has properties
which are themselves objects, and these objects are same-origin with the
content, then their value properties are visible as well.
There are two main sorts of restrictions:
- First, the chrome code might expect to rely on the prototype's
integrity, so the object's prototype is protected:
- the Xray has the standard ``Object`` or ``Array`` prototype,
without any modifications that content may have done to that
prototype. The Xray always inherits from this standard prototype,
even if the underlying instance has a different prototype.
- if a script has created a property on an object instance that
shadows a property on the prototype, the shadowing property is not
visible in the Xray
- Second, we want to prevent the chrome code from running content code,
so functions and accessor properties
of the object are not visible in the Xray.
These rules are demonstrated in the script below, which evaluates a
script in a sandbox, then examines the object attached to the sandbox.
.. note::
To test out examples like this, you can use the Scratchpad in
browser context for the code snippet, and the Browser Console
to see the expected output.
Because code running in Scratchpad's browser context has chrome
privileges, any time you use it to run code, you need to understand
exactly what the code is doing. That includes the code samples in
this article.
.. code:: brush:
/*
The sandbox script:
* redefines Object.prototype.toSource()
* creates a Person() constructor that:
* defines a value property "firstName" using assignment
* defines a value property which shadows "constructor"
* defines a value property "address" which is a simple object
* defines a function fullName()
* using defineProperty, defines a value property on Person "lastName"
* using defineProperty, defines an accessor property on Person "middleName",
which has some unexpected accessor behavior
*/
var sandboxScript = 'Object.prototype.toSource = function() {'+
' return "not what you expected?";' +
'};' +
'function Person() {' +
' this.constructor = "not a constructor";' +
' this.firstName = "Joe";' +
' this.address = {"street" : "Main Street"};' +
' this.fullName = function() {' +
' return this.firstName + " " + this.lastName;'+
' };' +
'};' +
'var me = new Person();' +
'Object.defineProperty(me, "lastName", {' +
' enumerable: true,' +
' configurable: true,' +
' writable: true,' +
' value: "Smith"' +
'});' +
'Object.defineProperty(me, "middleName", {' +
' enumerable: true,' +
' configurable: true,' +
' get: function() { return "wait, is this really a getter?"; }' +
'});';
var sandbox = Components.utils.Sandbox("https://example.org/");
Components.utils.evalInSandbox(sandboxScript, sandbox);
// 1) trying to access properties in the prototype that have been redefined
// (non-own properties) will show the original 'native' version
// note that functions are not included in the output
console.log("1) Property redefined in the prototype:");
console.log(sandbox.me.toSource());
// -> "({firstName:"Joe", address:{street:"Main Street"}, lastName:"Smith"})"
// 2) trying to access properties on the object that shadow properties
// on the prototype will show the original 'native' version
console.log("2) Property that shadows the prototype:");
console.log(sandbox.me.constructor);
// -> function()
// 3) value properties defined by assignment to this are visible:
console.log("3) Value property defined by assignment to this:");
console.log(sandbox.me.firstName);
// -> "Joe"
// 4) value properties defined using defineProperty are visible:
console.log("4) Value property defined by defineProperty");
console.log(sandbox.me.lastName);
// -> "Smith"
// 5) accessor properties are not visible
console.log("5) Accessor property");
console.log(sandbox.me.middleName);
// -> undefined
// 6) accessing a value property of a value-property object is fine
console.log("6) Value property of a value-property object");
console.log(sandbox.me.address.street);
// -> "Main Street"
// 7) functions defined on the sandbox-defined object are not visible in the Xray
console.log("7) Call a function defined on the object");
try {
console.log(sandbox.me.fullName());
}
catch (e) {
console.error(e);
}
// -> TypeError: sandbox.me.fullName is not a function
// now with waived Xrays
console.log("Now with waived Xrays");
console.log("1) Property redefined in the prototype:");
console.log(Components.utils.waiveXrays(sandbox.me).toSource());
// -> "not what you expected?"
console.log("2) Property that shadows the prototype:");
console.log(Components.utils.waiveXrays(sandbox.me).constructor);
// -> "not a constructor"
console.log("3) Accessor property");
console.log(Components.utils.waiveXrays(sandbox.me).middleName);
// -> "wait, is this really a getter?"
console.log("4) Call a function defined on the object");
console.log(Components.utils.waiveXrays(sandbox.me).fullName());
// -> "Joe Smith"

View File

@ -4910,16 +4910,22 @@ void EventStateManager::GenerateDragDropEnterExit(nsPresContext* aPresContext,
nsCOMPtr<nsIContent> targetContent;
mCurrentTarget->GetContentForEvent(aDragEvent,
getter_AddRefs(targetContent));
if (targetContent && targetContent->IsText()) {
targetContent = targetContent->GetFlattenedTreeParent();
}
if (sLastDragOverFrame) {
// The frame has changed but the content may not have. Check before
// dispatching to content
sLastDragOverFrame->GetContentForEvent(aDragEvent,
getter_AddRefs(lastContent));
if (lastContent && lastContent->IsText()) {
lastContent = lastContent->GetFlattenedTreeParent();
}
FireDragEnterOrExit(sLastDragOverFrame->PresContext(), aDragEvent,
eDragExit, targetContent, lastContent,
sLastDragOverFrame);
RefPtr<nsPresContext> presContext = sLastDragOverFrame->PresContext();
FireDragEnterOrExit(presContext, aDragEvent, eDragExit, targetContent,
lastContent, sLastDragOverFrame);
nsIContent* target = sLastDragOverFrame
? sLastDragOverFrame.GetFrame()->GetContent()
: nullptr;
@ -4945,9 +4951,9 @@ void EventStateManager::GenerateDragDropEnterExit(nsPresContext* aPresContext,
targetContent, currentTraget);
if (sLastDragOverFrame) {
FireDragEnterOrExit(sLastDragOverFrame->PresContext(), aDragEvent,
eDragLeave, targetContent, lastContent,
sLastDragOverFrame);
RefPtr<nsPresContext> presContext = sLastDragOverFrame->PresContext();
FireDragEnterOrExit(presContext, aDragEvent, eDragLeave,
targetContent, lastContent, sLastDragOverFrame);
}
sLastDragOverFrame = mCurrentTarget;

View File

@ -415,8 +415,8 @@ class EventStateManager : public nsSupportsWeakReference, public nsIObserver {
* in that case the caller is responsible for updating hover state.
*/
void NotifyMouseOut(WidgetMouseEvent* aMouseEvent, nsIContent* aMovingInto);
void GenerateDragDropEnterExit(nsPresContext* aPresContext,
WidgetDragEvent* aDragEvent);
MOZ_CAN_RUN_SCRIPT void GenerateDragDropEnterExit(
nsPresContext* aPresContext, WidgetDragEvent* aDragEvent);
/**
* Return mMouseEnterLeaveHelper or relevant mPointersEnterLeaveHelper
@ -433,11 +433,12 @@ class EventStateManager : public nsSupportsWeakReference, public nsIObserver {
* @param aTargetContent target to set for the event
* @param aTargetFrame target frame for the event
*/
void FireDragEnterOrExit(nsPresContext* aPresContext,
WidgetDragEvent* aDragEvent, EventMessage aMessage,
nsIContent* aRelatedTarget,
nsIContent* aTargetContent,
AutoWeakFrame& aTargetFrame);
MOZ_CAN_RUN_SCRIPT void FireDragEnterOrExit(nsPresContext* aPresContext,
WidgetDragEvent* aDragEvent,
EventMessage aMessage,
nsIContent* aRelatedTarget,
nsIContent* aTargetContent,
AutoWeakFrame& aTargetFrame);
/**
* Update the initial drag session data transfer with any changes that occur
* on cloned data transfer objects used for events.

View File

@ -295,6 +295,9 @@ run-if = toolkit == 'windows' # Only Windows supports pen input synthesis
support-files =
!/gfx/layers/apz/test/mochitest/apz_test_native_event_utils.js
!/dom/base/test/Ahem.ttf
[test_bug1709832.html]
support-files =
!/dom/base/test/Ahem.ttf
[test_bug1710509.html]
disabled = Enable this when the taskcluster Windows machine upgrades to RS5+
run-if = toolkit == 'windows' # Only Windows supports pen input synthesis

View File

@ -0,0 +1,46 @@
<!DOCTYPE html>
<meta charset="utf-8">
<title>Test for Bug 1709832</title>
<script src="/tests/SimpleTest/SimpleTest.js"></script>
<script src="/tests/SimpleTest/EventUtils.js"></script>
<link rel="stylesheet" href="/tests/SimpleTest/test.css" />
<style>
@font-face {
font-family: Ahem;
src: url("/tests/dom/base/test/Ahem.ttf");
}
#container {
font: 16px/1 Ahem;
}
</style>
<div id="container">
<span id="start" draggable="true">start</span><span id="dest" draggable="true">dest</span>
</div>
<script>
SimpleTest.waitForExplicitFinish();
dest.addEventListener("dragenter", ev => {
is(ev.target, dest, "dragenter target should be element");
is(ev.relatedTarget, start, "dragenter relatedTarget should be element");
});
start.addEventListener("dragleave", ev => {
is(ev.target, start, "dragleave target should be element");
is(ev.relatedTarget, dest, "dragleave relatedTarget should be element");
SimpleTest.finish();
});
synthesizeMouse(start, 5, 5, { type: "mousedown" });
const utils = SpecialPowers.getDOMWindowUtils(window);
// Intentionally passing a text node to test EventStateManager behavior
// when GetContentForEvent gives a text node
// EventUtils.sendDragEvent is unusable because it requires an element as a target
utils.dispatchDOMEventViaPresShellForTesting(start.childNodes[0], new DragEvent("dragover", {
...createDragEventObject("dragover", start, window, null, {})
}));
utils.dispatchDOMEventViaPresShellForTesting(dest.childNodes[0], new DragEvent("dragover", {
...createDragEventObject("dragover", dest, window, null, {})
}));
</script>

View File

@ -20,6 +20,7 @@
#include "mozilla/ipc/Endpoint.h"
#include "mozilla/layers/ImageDataSerializer.h"
#include "mozilla/layers/VideoBridgeChild.h"
#include "mozilla/layers/VideoBridgeParent.h"
namespace mozilla {
@ -93,8 +94,10 @@ void RemoteDecoderManagerParent::ShutdownThreads() {
void RemoteDecoderManagerParent::ShutdownVideoBridge() {
if (sRemoteDecoderManagerParentThread) {
RefPtr<Runnable> task = NS_NewRunnableFunction(
"RemoteDecoderManagerParent::ShutdownVideoBridge",
[]() { VideoBridgeChild::Shutdown(); });
"RemoteDecoderManagerParent::ShutdownVideoBridge", []() {
VideoBridgeParent::Shutdown();
VideoBridgeChild::Shutdown();
});
SyncRunnable::DispatchToThread(sRemoteDecoderManagerParentThread, task);
}
}

View File

@ -664,6 +664,51 @@ nsresult ReferrerInfo::TrimReferrerWithPolicy(nsIURI* aReferrer,
return NS_OK;
}
bool ReferrerInfo::ShouldIgnoreLessRestrictedPolicies(
nsIHttpChannel* aChannel, const ReferrerPolicyEnum aPolicy) const {
MOZ_ASSERT(aChannel);
if (!StaticPrefs::network_http_referer_disallowCrossSiteRelaxingDefault()) {
return false;
}
// We only care about the less restricted policies.
if (aPolicy != ReferrerPolicy::Unsafe_url &&
aPolicy != ReferrerPolicy::No_referrer_when_downgrade &&
aPolicy != ReferrerPolicy::Origin_when_cross_origin) {
return false;
}
nsCOMPtr<nsILoadInfo> loadInfo = aChannel->LoadInfo();
// Check if the channel is triggered by the system or the extension.
auto* triggerBasePrincipal =
BasePrincipal::Cast(loadInfo->TriggeringPrincipal());
if (triggerBasePrincipal->IsSystemPrincipal() ||
triggerBasePrincipal->AddonPolicy()) {
return false;
}
if (!loadInfo->TriggeringPrincipal()->GetIsContentPrincipal()) {
LOG(("no triggering URI via loadInfo, assuming load is cross-site"));
return true;
}
nsCOMPtr<nsIURI> uri;
nsresult rv = aChannel->GetURI(getter_AddRefs(uri));
if (NS_WARN_IF(NS_FAILED(rv))) {
return true;
}
bool isCrossSite = true;
rv = loadInfo->TriggeringPrincipal()->IsThirdPartyURI(uri, &isCrossSite);
if (NS_WARN_IF(NS_FAILED(rv))) {
return true;
}
return isCrossSite;
}
void ReferrerInfo::LogMessageToConsole(
nsIHttpChannel* aChannel, const char* aMsg,
const nsTArray<nsString>& aParams) const {
@ -1169,7 +1214,8 @@ nsresult ReferrerInfo::ComputeReferrer(nsIHttpChannel* aChannel) {
return NS_OK;
}
if (mPolicy == ReferrerPolicy::_empty) {
if (mPolicy == ReferrerPolicy::_empty ||
ShouldIgnoreLessRestrictedPolicies(aChannel, mPolicy)) {
nsCOMPtr<nsILoadInfo> loadInfo = aChannel->LoadInfo();
OriginAttributes attrs = loadInfo->GetOriginAttributes();
bool isPrivate = attrs.mPrivateBrowsingId > 0;

View File

@ -383,6 +383,20 @@ class ReferrerInfo : public nsIReferrerInfo {
TrimmingPolicy aTrimmingPolicy,
nsACString& aResult) const;
/**
* Returns true if we should ignore less restricted referrer policies,
* including 'unsafe_url', 'no_referrer_when_downgrade' and
* 'origin_when_cross_origin', for the given channel. We only apply this
* restriction for cross-site requests. For the same-site request, we will
* still allow overriding the default referrer policy with less restricted
* one.
*
* Note that the channel triggered by the system and the extension will be
* exempt from this restriction.
*/
bool ShouldIgnoreLessRestrictedPolicies(
nsIHttpChannel* aChannel, const ReferrerPolicyEnum aPolicy) const;
/*
* Limit referrer length using the following ruleset:
* - If the length of referrer URL is over max length, strip down to origin.

View File

@ -169,7 +169,8 @@ async function runTest() {
}
SpecialPowers.pushPrefEnv({ set: [
["dom.security.https_first", true]
["dom.security.https_first", true],
["network.http.referer.disallowCrossSiteRelaxingDefault", false],
]}, runTest);
</script>

View File

@ -38,5 +38,6 @@ BROWSER_CHROME_MANIFESTS += [
"https-first/browser.ini",
"https-only/browser.ini",
"mixedcontentblocker/browser.ini",
"referrer-policy/browser.ini",
"sec-fetch/browser.ini",
]

View File

@ -1,5 +1,5 @@
"use strict";
module.exports = {
extends: ["plugin:mozilla/mochitest-test"],
extends: ["plugin:mozilla/browser-test", "plugin:mozilla/mochitest-test"],
};

View File

@ -0,0 +1,4 @@
[DEFAULT]
support-files = referrer_page.sjs
[browser_referrer_disallow_cross_site_relaxing.js]

View File

@ -0,0 +1,194 @@
/**
* Bug 1720294 - Testing disallow relaxing default referrer policy for
* cross-site requests.
*/
"use strict";
if (SpecialPowers.useRemoteSubframes) {
requestLongerTimeout(3);
}
const TEST_DOMAIN = "https://example.com/";
const TEST_SAME_SITE_DOMAIN = "https://test1.example.com/";
const TEST_SAME_SITE_DOMAIN_HTTP = "http://test1.example.com/";
const TEST_CROSS_SITE_DOMAIN = "https://test1.example.org/";
const TEST_CROSS_SITE_DOMAIN_HTTP = "http://test1.example.org/";
const TEST_PATH = "browser/dom/security/test/referrer-policy/";
const TEST_PAGE = `${TEST_DOMAIN}${TEST_PATH}referrer_page.sjs`;
const TEST_SAME_SITE_PAGE = `${TEST_SAME_SITE_DOMAIN}${TEST_PATH}referrer_page.sjs`;
const TEST_SAME_SITE_PAGE_HTTP = `${TEST_SAME_SITE_DOMAIN_HTTP}${TEST_PATH}referrer_page.sjs`;
const TEST_CROSS_SITE_PAGE = `${TEST_CROSS_SITE_DOMAIN}${TEST_PATH}referrer_page.sjs`;
const TEST_CROSS_SITE_PAGE_HTTP = `${TEST_CROSS_SITE_DOMAIN_HTTP}${TEST_PATH}referrer_page.sjs`;
const REFERRER_FULL = 0;
const REFERRER_ORIGIN = 1;
const REFERRER_NONE = 2;
function getExpectedReferrer(referrer, type) {
let res;
switch (type) {
case REFERRER_FULL:
res = referrer;
break;
case REFERRER_ORIGIN:
let url = new URL(referrer);
res = `${url.origin}/`;
break;
case REFERRER_NONE:
res = "";
break;
default:
ok(false, "unknown type");
}
return res;
}
async function verifyResultInPage(browser, expected) {
await SpecialPowers.spawn(browser, [expected], value => {
is(content.document.referrer, value, "The document.referrer is correct.");
let result = content.document.getElementById("result");
is(result.textContent, value, "The referer header is correct");
});
}
const TEST_CASES = [
// Testing that the referrer policy can be overridden with less restricted
// policy in the same-origin scenario.
{
policy: "unsafe-url",
referrer: TEST_PAGE,
test_url: TEST_PAGE,
expect: REFERRER_FULL,
},
// Testing that the referrer policy can be overridden with less restricted
// policy in the same-site scenario.
{
policy: "unsafe-url",
referrer: TEST_PAGE,
test_url: TEST_SAME_SITE_PAGE,
expect: REFERRER_FULL,
},
{
policy: "no-referrer-when-downgrade",
referrer: TEST_PAGE,
test_url: TEST_SAME_SITE_PAGE,
expect: REFERRER_FULL,
},
{
policy: "origin-when-cross-origin",
referrer: TEST_PAGE,
test_url: TEST_SAME_SITE_PAGE_HTTP,
expect: REFERRER_ORIGIN,
},
// Testing that the referrer policy cannot be overridden with less restricted
// policy in the cross-site scenario.
{
policy: "unsafe-url",
referrer: TEST_PAGE,
test_url: TEST_CROSS_SITE_PAGE,
expect: REFERRER_ORIGIN,
},
{
policy: "no-referrer-when-downgrade",
referrer: TEST_PAGE,
test_url: TEST_CROSS_SITE_PAGE,
expect: REFERRER_ORIGIN,
},
{
policy: "origin-when-cross-origin",
referrer: TEST_PAGE,
test_url: TEST_CROSS_SITE_PAGE_HTTP,
expect: REFERRER_NONE,
},
// Testing that the referrer policy can still be overridden with more
// restricted policy in the cross-site scenario.
{
policy: "no-referrer",
referrer: TEST_PAGE,
test_url: TEST_CROSS_SITE_PAGE,
expect: REFERRER_NONE,
},
];
add_task(async function setup() {
await SpecialPowers.pushPrefEnv({
set: [
["network.http.referer.disallowCrossSiteRelaxingDefault", true],
// Disable mixed content blocking to be able to test downgrade scenario.
["security.mixed_content.block_active_content", false],
],
});
});
add_task(async function test_iframe() {
for (let type of ["meta", "header"]) {
for (let test of TEST_CASES) {
info(`Test iframe: ${test.toSource()}`);
let referrerURL = `${test.referrer}?${type}=${test.policy}`;
let expected = getExpectedReferrer(referrerURL, test.expect);
await BrowserTestUtils.withNewTab(referrerURL, async browser => {
let iframeURL = test.test_url + "?show";
// Create an iframe and load the url.
let bc = await SpecialPowers.spawn(browser, [iframeURL], async url => {
let iframe = content.document.createElement("iframe");
iframe.src = url;
await new content.Promise(resolve => {
iframe.onload = () => {
resolve();
};
content.document.body.appendChild(iframe);
});
return iframe.browsingContext;
});
await verifyResultInPage(bc, expected);
});
}
}
});
add_task(async function test_link_click() {
for (let type of ["meta", "header"]) {
for (let test of TEST_CASES) {
info(`Test link click: ${test.toSource()}`);
let referrerURL = `${test.referrer}?${type}=${test.policy}`;
let expected = getExpectedReferrer(referrerURL, test.expect);
await BrowserTestUtils.withNewTab(referrerURL, async browser => {
let linkURL = test.test_url + "?show";
// Create the promise to wait for the navigation finishes.
let loadedPromise = BrowserTestUtils.browserLoaded(
browser,
false,
linkURL
);
// Generate the link and click it to navigate.
await SpecialPowers.spawn(browser, [linkURL], async url => {
let link = content.document.createElement("a");
link.textContent = "Link";
link.setAttribute("href", url);
content.document.body.appendChild(link);
link.click();
});
await loadedPromise;
await verifyResultInPage(browser, expected);
});
}
}
});

View File

@ -87,6 +87,10 @@ var tests = (function*() {
{ set: [["security.mixed_content.block_active_content", false]] },
advance
);
yield SpecialPowers.pushPrefEnv(
{ set: [["network.http.referer.disallowCrossSiteRelaxingDefault", false]] },
advance
);
yield SpecialPowers.pushPermissions(
[{ type: "systemXHR", allow: true, context: document }],
advance

View File

@ -0,0 +1,39 @@
Components.utils.importGlobalProperties(["URLSearchParams"]);
function handleRequest(request, response) {
let params = new URLSearchParams(request.queryString);
let referrerPolicyHeader = params.get("header") || "";
let metaReferrerPolicy = params.get("meta") || "";
let showReferrer = params.has("show");
if (referrerPolicyHeader) {
response.setHeader("Referrer-Policy", referrerPolicyHeader, false);
}
let metaString = "";
let resultString = "";
if (metaReferrerPolicy) {
metaString = `<meta name="referrer" content="${metaReferrerPolicy}">`;
}
if (showReferrer) {
if (request.hasHeader("Referer")) {
resultString = `Referer Header: <a id="result">${request.getHeader("Referer")}</a>`;
} else {
resultString = `Referer Header: <a id="result"></a>`;
}
}
response.write(
`<!DOCTYPE HTML>
<html>
<head>
${metaString}
</head>
<body>
${resultString}
</body>
</html>`);
}

View File

@ -84,6 +84,11 @@ function resetState() {
*/
var tests = (function*() {
yield SpecialPowers.pushPrefEnv(
{ set: [["network.http.referer.disallowCrossSiteRelaxingDefault", false]] },
advance
);
var iframe = document.getElementById("testframe");
var sjs = "/tests/dom/security/test/referrer-policy/img_referrer_testserver.sjs?action=generate-img-policy-test";

View File

@ -7,9 +7,9 @@
#include "Adapter.h"
#include "AdapterFeatures.h"
#include "AdapterLimits.h"
#include "Device.h"
#include "Instance.h"
#include "SupportedLimits.h"
#include "ipc/WebGPUChild.h"
#include "mozilla/dom/Promise.h"
@ -25,8 +25,7 @@ Adapter::Adapter(Instance* const aParent,
mBridge(aParent->mBridge),
mId(aInfo.id),
mFeatures(new AdapterFeatures(this)),
mLimits(new SupportedLimits(this, aInfo.limits)),
mIsSoftware(aInfo.ty == ffi::WGPUDeviceType_Cpu) {}
mLimits(new AdapterLimits(this, aInfo.limits)) {}
Adapter::~Adapter() { Cleanup(); }
@ -38,8 +37,7 @@ void Adapter::Cleanup() {
}
const RefPtr<AdapterFeatures>& Adapter::Features() const { return mFeatures; }
const RefPtr<SupportedLimits>& Adapter::Limits() const { return mLimits; }
bool Adapter::IsSoftware() const { return mIsSoftware; }
const RefPtr<AdapterLimits>& Adapter::Limits() const { return mLimits; }
already_AddRefed<dom::Promise> Adapter::RequestDevice(
const dom::GPUDeviceDescriptor& aDesc, ErrorResult& aRv) {

View File

@ -22,9 +22,9 @@ struct GPUFeatures;
namespace webgpu {
class AdapterFeatures;
class AdapterLimits;
class Device;
class Instance;
class SupportedLimits;
class WebGPUChild;
namespace ffi {
struct WGPUAdapterInformation;
@ -46,15 +46,13 @@ class Adapter final : public ObjectBase, public ChildOf<Instance> {
// Cant have them as `const` right now, since we wouldn't be able
// to unlink them in CC unlink.
RefPtr<AdapterFeatures> mFeatures;
RefPtr<SupportedLimits> mLimits;
const bool mIsSoftware = false;
RefPtr<AdapterLimits> mLimits;
public:
Adapter(Instance* const aParent, const ffi::WGPUAdapterInformation& aInfo);
void GetName(nsString& out) const { out = mName; }
const RefPtr<AdapterFeatures>& Features() const;
const RefPtr<SupportedLimits>& Limits() const;
bool IsSoftware() const;
const RefPtr<AdapterLimits>& Limits() const;
already_AddRefed<dom::Promise> RequestDevice(
const dom::GPUDeviceDescriptor& aDesc, ErrorResult& aRv);

View File

@ -3,7 +3,7 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "SupportedLimits.h"
#include "AdapterLimits.h"
#include "Adapter.h"
#include "mozilla/dom/WebGPUBinding.h"
#include "mozilla/webgpu/ffi/wgpu.h"
@ -11,64 +11,64 @@
namespace mozilla {
namespace webgpu {
GPU_IMPL_CYCLE_COLLECTION(SupportedLimits, mParent)
GPU_IMPL_JS_WRAP(SupportedLimits)
GPU_IMPL_CYCLE_COLLECTION(AdapterLimits, mParent)
GPU_IMPL_JS_WRAP(AdapterLimits)
SupportedLimits::SupportedLimits(Adapter* const aParent,
const ffi::WGPULimits& aLimits)
AdapterLimits::AdapterLimits(Adapter* const aParent,
const ffi::WGPULimits& aLimits)
: ChildOf(aParent), mLimits(new ffi::WGPULimits(aLimits)) {}
SupportedLimits::~SupportedLimits() = default;
AdapterLimits::~AdapterLimits() = default;
uint32_t SupportedLimits::MaxTextureDimension1D() const {
uint32_t AdapterLimits::MaxTextureDimension1D() const {
return mLimits->max_texture_dimension_1d;
}
uint32_t SupportedLimits::MaxTextureDimension2D() const {
uint32_t AdapterLimits::MaxTextureDimension2D() const {
return mLimits->max_texture_dimension_2d;
}
uint32_t SupportedLimits::MaxTextureDimension3D() const {
uint32_t AdapterLimits::MaxTextureDimension3D() const {
return mLimits->max_texture_dimension_3d;
}
uint32_t SupportedLimits::MaxTextureArrayLayers() const {
uint32_t AdapterLimits::MaxTextureArrayLayers() const {
return mLimits->max_texture_array_layers;
}
uint32_t SupportedLimits::MaxBindGroups() const {
uint32_t AdapterLimits::MaxBindGroups() const {
return mLimits->max_bind_groups;
}
uint32_t SupportedLimits::MaxDynamicUniformBuffersPerPipelineLayout() const {
uint32_t AdapterLimits::MaxDynamicUniformBuffersPerPipelineLayout() const {
return mLimits->max_dynamic_uniform_buffers_per_pipeline_layout;
}
uint32_t SupportedLimits::MaxDynamicStorageBuffersPerPipelineLayout() const {
uint32_t AdapterLimits::MaxDynamicStorageBuffersPerPipelineLayout() const {
return mLimits->max_dynamic_storage_buffers_per_pipeline_layout;
}
uint32_t SupportedLimits::MaxSampledTexturesPerShaderStage() const {
uint32_t AdapterLimits::MaxSampledTexturesPerShaderStage() const {
return mLimits->max_sampled_textures_per_shader_stage;
}
uint32_t SupportedLimits::MaxSamplersPerShaderStage() const {
uint32_t AdapterLimits::MaxSamplersPerShaderStage() const {
return mLimits->max_samplers_per_shader_stage;
}
uint32_t SupportedLimits::MaxStorageBuffersPerShaderStage() const {
uint32_t AdapterLimits::MaxStorageBuffersPerShaderStage() const {
return mLimits->max_storage_buffers_per_shader_stage;
}
uint32_t SupportedLimits::MaxStorageTexturesPerShaderStage() const {
uint32_t AdapterLimits::MaxStorageTexturesPerShaderStage() const {
return mLimits->max_storage_textures_per_shader_stage;
}
uint32_t SupportedLimits::MaxUniformBuffersPerShaderStage() const {
uint32_t AdapterLimits::MaxUniformBuffersPerShaderStage() const {
return mLimits->max_uniform_buffers_per_shader_stage;
}
uint32_t SupportedLimits::MaxUniformBufferBindingSize() const {
uint32_t AdapterLimits::MaxUniformBufferBindingSize() const {
return mLimits->max_uniform_buffer_binding_size;
}
uint32_t SupportedLimits::MaxStorageBufferBindingSize() const {
uint32_t AdapterLimits::MaxStorageBufferBindingSize() const {
return mLimits->max_storage_buffer_binding_size;
}
uint32_t SupportedLimits::MaxVertexBuffers() const {
uint32_t AdapterLimits::MaxVertexBuffers() const {
return mLimits->max_vertex_buffers;
}
uint32_t SupportedLimits::MaxVertexAttributes() const {
uint32_t AdapterLimits::MaxVertexAttributes() const {
return mLimits->max_vertex_attributes;
}
uint32_t SupportedLimits::MaxVertexBufferArrayStride() const {
uint32_t AdapterLimits::MaxVertexBufferArrayStride() const {
return mLimits->max_vertex_buffer_array_stride;
}

View File

@ -3,8 +3,8 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef GPU_SupportedLimits_H_
#define GPU_SupportedLimits_H_
#ifndef GPU_AdapterLimitss_H_
#define GPU_AdapterLimitss_H_
#include "nsWrapperCache.h"
#include "ObjectModel.h"
@ -16,12 +16,12 @@ struct WGPULimits;
}
class Adapter;
class SupportedLimits final : public nsWrapperCache, public ChildOf<Adapter> {
class AdapterLimits final : public nsWrapperCache, public ChildOf<Adapter> {
const UniquePtr<ffi::WGPULimits> mLimits;
public:
GPU_DECL_CYCLE_COLLECTION(SupportedLimits)
GPU_DECL_JS_WRAP(SupportedLimits)
GPU_DECL_CYCLE_COLLECTION(AdapterLimits)
GPU_DECL_JS_WRAP(AdapterLimits)
uint32_t MaxTextureDimension1D() const;
uint32_t MaxTextureDimension2D() const;
@ -41,14 +41,14 @@ class SupportedLimits final : public nsWrapperCache, public ChildOf<Adapter> {
uint32_t MaxVertexAttributes() const;
uint32_t MaxVertexBufferArrayStride() const;
SupportedLimits(Adapter* const aParent, const ffi::WGPULimits& aLimits);
AdapterLimits(Adapter* const aParent, const ffi::WGPULimits& aLimits);
private:
~SupportedLimits();
~AdapterLimits();
void Cleanup() {}
};
} // namespace webgpu
} // namespace mozilla
#endif // GPU_SupportedLimits_H_
#endif // GPU_AdapterLimitss_H_

View File

@ -5,12 +5,11 @@
#include "mozilla/dom/WebGPUBinding.h"
#include "CanvasContext.h"
#include "SwapChain.h"
#include "nsDisplayList.h"
#include "LayerUserData.h"
#include "mozilla/dom/HTMLCanvasElement.h"
#include "mozilla/layers/CompositorManagerChild.h"
#include "mozilla/layers/ImageDataSerializer.h"
#include "mozilla/layers/LayersSurfaces.h"
#include "mozilla/layers/RenderRootStateManager.h"
#include "mozilla/layers/WebRenderBridgeChild.h"
#include "ipc/WebGPUChild.h"
@ -21,7 +20,7 @@ namespace webgpu {
NS_IMPL_CYCLE_COLLECTING_ADDREF(CanvasContext)
NS_IMPL_CYCLE_COLLECTING_RELEASE(CanvasContext)
GPU_IMPL_CYCLE_COLLECTION_WRAPPERCACHE(CanvasContext, mTexture, mBridge,
GPU_IMPL_CYCLE_COLLECTION_WRAPPERCACHE(CanvasContext, mSwapChain,
mCanvasElement, mOffscreenCanvas)
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(CanvasContext)
@ -40,7 +39,10 @@ CanvasContext::~CanvasContext() {
}
void CanvasContext::Cleanup() {
Unconfigure();
if (mSwapChain) {
mSwapChain->Destroy(mExternalImageId);
mSwapChain = nullptr;
}
if (mRenderRootStateManager && mImageKey) {
mRenderRootStateManager->AddImageKeyForDiscard(mImageKey.value());
mRenderRootStateManager = nullptr;
@ -64,32 +66,41 @@ bool CanvasContext::UpdateWebRenderCanvasData(
return true;
}
void CanvasContext::Configure(const dom::GPUCanvasConfiguration& aDesc) {
Unconfigure();
dom::GPUTextureFormat CanvasContext::GetSwapChainPreferredFormat(
Adapter&) const {
return dom::GPUTextureFormat::Bgra8unorm;
}
RefPtr<SwapChain> CanvasContext::ConfigureSwapChain(
const dom::GPUSwapChainDescriptor& aDesc, ErrorResult& aRv) {
Cleanup();
gfx::SurfaceFormat format;
switch (aDesc.mFormat) {
case dom::GPUTextureFormat::Rgba8unorm:
mGfxFormat = gfx::SurfaceFormat::R8G8B8A8;
format = gfx::SurfaceFormat::R8G8B8A8;
break;
case dom::GPUTextureFormat::Bgra8unorm:
mGfxFormat = gfx::SurfaceFormat::B8G8R8A8;
format = gfx::SurfaceFormat::B8G8R8A8;
break;
default:
NS_WARNING("Specified swap chain format is not supported");
return;
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
return nullptr;
}
gfx::IntSize actualSize(mWidth, mHeight);
mTexture = aDesc.mDevice->InitSwapChain(aDesc, mExternalImageId, mGfxFormat,
&actualSize);
mTexture->mTargetCanvasElement = mCanvasElement;
mBridge = aDesc.mDevice->GetBridge();
mGfxSize = actualSize;
dom::GPUExtent3DDict extent;
extent.mWidth = mWidth;
extent.mHeight = mHeight;
extent.mDepthOrArrayLayers = 1;
mSwapChain = new SwapChain(aDesc, extent, mExternalImageId, format);
// Force a new frame to be built, which will execute the
// `CanvasContextType::WebGPU` switch case in `CreateWebRenderCommands` and
// populate the WR user data.
mCanvasElement->InvalidateCanvas();
mSwapChain->GetCurrentTexture()->mTargetCanvasElement = mCanvasElement;
return mSwapChain;
}
Maybe<wr::ImageKey> CanvasContext::GetImageKey() const { return mImageKey; }
@ -102,40 +113,37 @@ wr::ImageKey CanvasContext::CreateImageKey(
return key;
}
void CanvasContext::Unconfigure() {
if (mBridge && mBridge->IsOpen()) {
mBridge->SendSwapChainDestroy(mExternalImageId);
}
mBridge = nullptr;
mTexture = nullptr;
}
dom::GPUTextureFormat CanvasContext::GetPreferredFormat(Adapter&) const {
return dom::GPUTextureFormat::Bgra8unorm;
}
RefPtr<Texture> CanvasContext::GetCurrentTexture() { return mTexture; }
bool CanvasContext::UpdateWebRenderLocalCanvasData(
layers::WebRenderLocalCanvasData* aCanvasData) {
if (!mTexture) {
if (!mSwapChain || !mSwapChain->GetParent()) {
return false;
}
aCanvasData->mGpuBridge = mBridge.get();
aCanvasData->mGpuTextureId = mTexture->mId;
aCanvasData->mExternalImageId = mExternalImageId;
aCanvasData->mFormat = mGfxFormat;
return true;
}
const auto size =
nsIntSize(AssertedCast<int>(mWidth), AssertedCast<int>(mHeight));
if (mSwapChain->mSize != size) {
const auto gfxFormat = mSwapChain->mGfxFormat;
dom::GPUSwapChainDescriptor desc;
desc.mFormat = static_cast<dom::GPUTextureFormat>(mSwapChain->mFormat);
desc.mUsage = mSwapChain->mUsage;
desc.mDevice = mSwapChain->GetParent();
wr::ImageDescriptor CanvasContext::MakeImageDescriptor() const {
const layers::RGBDescriptor rgbDesc(mGfxSize, mGfxFormat, false);
const auto targetStride = layers::ImageDataSerializer::GetRGBStride(rgbDesc);
const bool preferCompositorSurface = true;
return wr::ImageDescriptor(mGfxSize, targetStride, mGfxFormat,
wr::OpacityType::HasAlphaChannel,
preferCompositorSurface);
mSwapChain->Destroy(mExternalImageId);
mExternalImageId =
layers::CompositorManagerChild::GetInstance()->GetNextExternalImageId();
dom::GPUExtent3DDict extent;
extent.mWidth = size.width;
extent.mHeight = size.height;
extent.mDepthOrArrayLayers = 1;
mSwapChain = new SwapChain(desc, extent, mExternalImageId, gfxFormat);
}
aCanvasData->mGpuBridge = mSwapChain->GetParent()->GetBridge().get();
aCanvasData->mGpuTextureId = mSwapChain->GetCurrentTexture()->mId;
aCanvasData->mExternalImageId = mExternalImageId;
aCanvasData->mFormat = mSwapChain->mGfxFormat;
return true;
}
} // namespace webgpu

View File

@ -9,12 +9,12 @@
#include "nsICanvasRenderingContextInternal.h"
#include "nsWrapperCache.h"
#include "ObjectModel.h"
#include "SwapChain.h"
#include "mozilla/webrender/WebRenderAPI.h"
namespace mozilla {
namespace dom {
class Promise;
struct GPUCanvasConfiguration;
enum class GPUTextureFormat : uint8_t;
} // namespace dom
namespace layers {
@ -22,6 +22,7 @@ class WebRenderLocalCanvasData;
};
namespace webgpu {
class Adapter;
class SwapChain;
class Texture;
class CanvasContext final : public nsICanvasRenderingContextInternal,
@ -40,13 +41,13 @@ class CanvasContext final : public nsICanvasRenderingContextInternal,
JSObject* WrapObject(JSContext* aCx,
JS::Handle<JSObject*> aGivenProto) override;
void RemoveSwapChain();
Maybe<wr::ImageKey> GetImageKey() const;
wr::ImageKey CreateImageKey(layers::RenderRootStateManager* aManager);
bool UpdateWebRenderLocalCanvasData(
layers::WebRenderLocalCanvasData* aCanvasData);
wr::ImageDescriptor MakeImageDescriptor() const;
wr::ExternalImageId mExternalImageId;
public: // nsICanvasRenderingContextInternal
@ -97,19 +98,14 @@ class CanvasContext final : public nsICanvasRenderingContextInternal,
bool IsContextCleanForFrameCapture() override { return false; }
public:
void Configure(const dom::GPUCanvasConfiguration& aDesc);
void Unconfigure();
dom::GPUTextureFormat GetPreferredFormat(Adapter& aAdapter) const;
RefPtr<Texture> GetCurrentTexture();
dom::GPUTextureFormat GetSwapChainPreferredFormat(Adapter& aAdapter) const;
RefPtr<SwapChain> ConfigureSwapChain(const dom::GPUSwapChainDescriptor& aDesc,
ErrorResult& aRv);
private:
uint32_t mWidth = 0, mHeight = 0;
RefPtr<WebGPUChild> mBridge;
RefPtr<Texture> mTexture;
gfx::SurfaceFormat mGfxFormat = gfx::SurfaceFormat::R8G8B8A8;
gfx::IntSize mGfxSize;
RefPtr<SwapChain> mSwapChain;
RefPtr<layers::RenderRootStateManager> mRenderRootStateManager;
Maybe<wr::ImageKey> mImageKey;
};

View File

@ -22,8 +22,6 @@ class CompilationMessage final : public nsWrapperCache,
dom::GPUCompilationMessageType mType = dom::GPUCompilationMessageType::Error;
uint64_t mLineNum = 0;
uint64_t mLinePos = 0;
uint64_t mOffset = 0;
uint64_t mLength = 0;
public:
GPU_DECL_CYCLE_COLLECTION(CompilationMessage)
@ -33,8 +31,6 @@ class CompilationMessage final : public nsWrapperCache,
dom::GPUCompilationMessageType Type() const { return mType; }
uint64_t LineNum() const { return mLineNum; }
uint64_t LinePos() const { return mLinePos; }
uint64_t Offset() const { return mOffset; }
uint64_t Length() const { return mLength; }
private:
explicit CompilationMessage(CompilationInfo* const aParent);

View File

@ -228,28 +228,13 @@ already_AddRefed<RenderPipeline> Device::CreateRenderPipeline(
}
already_AddRefed<Texture> Device::InitSwapChain(
const dom::GPUCanvasConfiguration& aDesc,
wr::ExternalImageId aExternalImageId, gfx::SurfaceFormat aFormat,
gfx::IntSize* aCanvasSize) {
gfx::IntSize size = *aCanvasSize;
if (aDesc.mSize.WasPassed()) {
const auto& descSize = aDesc.mSize.Value();
if (descSize.IsRangeEnforcedUnsignedLongSequence()) {
const auto& seq = descSize.GetAsRangeEnforcedUnsignedLongSequence();
// TODO: add a check for `seq.Length()`
size.width = AssertedCast<int>(seq[0]);
size.height = AssertedCast<int>(seq[1]);
} else if (descSize.IsGPUExtent3DDict()) {
const auto& dict = descSize.GetAsGPUExtent3DDict();
size.width = AssertedCast<int>(dict.mWidth);
size.height = AssertedCast<int>(dict.mHeight);
} else {
MOZ_CRASH("Unexpected union");
}
}
*aCanvasSize = size;
const layers::RGBDescriptor rgbDesc(size, aFormat, false);
const dom::GPUSwapChainDescriptor& aDesc,
const dom::GPUExtent3DDict& aExtent3D, wr::ExternalImageId aExternalImageId,
gfx::SurfaceFormat aFormat) {
const layers::RGBDescriptor rgbDesc(
gfx::IntSize(AssertedCast<int>(aExtent3D.mWidth),
AssertedCast<int>(aExtent3D.mHeight)),
aFormat, false);
// buffer count doesn't matter much, will be created on demand
const size_t maxBufferCount = 10;
mBridge->DeviceCreateSwapChain(mId, rgbDesc, maxBufferCount,
@ -257,10 +242,7 @@ already_AddRefed<Texture> Device::InitSwapChain(
dom::GPUTextureDescriptor desc;
desc.mDimension = dom::GPUTextureDimension::_2d;
auto& sizeDict = desc.mSize.SetAsGPUExtent3DDict();
sizeDict.mWidth = size.width;
sizeDict.mHeight = size.height;
sizeDict.mDepthOrArrayLayers = 1;
desc.mSize.SetAsGPUExtent3DDict() = aExtent3D;
desc.mFormat = aDesc.mFormat;
desc.mMipLevelCount = 1;
desc.mSampleCount = 1;

View File

@ -35,7 +35,7 @@ struct GPUComputePipelineDescriptor;
struct GPURenderBundleEncoderDescriptor;
struct GPURenderPipelineDescriptor;
struct GPUCommandEncoderDescriptor;
struct GPUCanvasConfiguration;
struct GPUSwapChainDescriptor;
class EventHandlerNonNull;
class Promise;
@ -90,9 +90,9 @@ class Device final : public DOMEventTargetHelper {
void UnmapBuffer(RawId aId, ipc::Shmem&& aShmem, bool aFlush,
bool aKeepShmem);
already_AddRefed<Texture> InitSwapChain(
const dom::GPUCanvasConfiguration& aDesc,
wr::ExternalImageId aExternalImageId, gfx::SurfaceFormat aFormat,
gfx::IntSize* aDefaultSize);
const dom::GPUSwapChainDescriptor& aDesc,
const dom::GPUExtent3DDict& aExtent3D,
wr::ExternalImageId aExternalImageId, gfx::SurfaceFormat aFormat);
private:
~Device();

View File

@ -40,7 +40,7 @@ ffi::WGPUStoreOp ConvertStoreOp(const dom::GPUStoreOp& aOp) {
switch (aOp) {
case dom::GPUStoreOp::Store:
return ffi::WGPUStoreOp_Store;
case dom::GPUStoreOp::Discard:
case dom::GPUStoreOp::Clear:
return ffi::WGPUStoreOp_Clear;
default:
MOZ_CRASH("Unexpected load op");

52
dom/webgpu/SwapChain.cpp Normal file
View File

@ -0,0 +1,52 @@
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "SwapChain.h"
#include "Texture.h"
#include "mozilla/dom/WebGPUBinding.h"
#include "ipc/WebGPUChild.h"
namespace mozilla {
namespace webgpu {
GPU_IMPL_CYCLE_COLLECTION(SwapChain, mParent, mTexture)
GPU_IMPL_JS_WRAP(SwapChain)
SwapChain::SwapChain(const dom::GPUSwapChainDescriptor& aDesc,
const dom::GPUExtent3DDict& aExtent3D,
wr::ExternalImageId aExternalImageId,
gfx::SurfaceFormat aFormat)
: ChildOf(aDesc.mDevice),
mGfxFormat(aFormat),
mFormat(static_cast<uint8_t>(aDesc.mFormat)),
mUsage(aDesc.mUsage),
mSize(aExtent3D.mWidth, aExtent3D.mHeight),
mTexture(aDesc.mDevice->InitSwapChain(aDesc, aExtent3D, aExternalImageId,
aFormat)) {}
SwapChain::~SwapChain() { Cleanup(); }
void SwapChain::Cleanup() {
if (mValid) {
mValid = false;
}
}
RefPtr<Device> SwapChain::GetParent() const { return mParent; }
void SwapChain::Destroy(wr::ExternalImageId aExternalImageId) {
if (mValid && mParent && mParent->GetBridge()) {
mValid = false;
auto bridge = mParent->GetBridge();
if (bridge && bridge->IsOpen()) {
bridge->SendSwapChainDestroy(aExternalImageId);
}
}
}
RefPtr<Texture> SwapChain::GetCurrentTexture() { return mTexture; }
} // namespace webgpu
} // namespace mozilla

54
dom/webgpu/SwapChain.h Normal file
View File

@ -0,0 +1,54 @@
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef GPU_SwapChain_H_
#define GPU_SwapChain_H_
#include "nsWrapperCache.h"
#include "ObjectModel.h"
#include "mozilla/webrender/WebRenderAPI.h"
namespace mozilla {
namespace dom {
struct GPUExtent3DDict;
struct GPUSwapChainDescriptor;
} // namespace dom
namespace webgpu {
class Device;
class Texture;
class SwapChain final : public ObjectBase, public ChildOf<Device> {
public:
GPU_DECL_CYCLE_COLLECTION(SwapChain)
GPU_DECL_JS_WRAP(SwapChain)
SwapChain(const dom::GPUSwapChainDescriptor& aDesc,
const dom::GPUExtent3DDict& aExtent3D,
wr::ExternalImageId aExternalImageId, gfx::SurfaceFormat aFormat);
RefPtr<Device> GetParent() const;
void Destroy(wr::ExternalImageId aExternalImageId);
const gfx::SurfaceFormat mGfxFormat;
const uint8_t
mFormat; // This is `dom::GPUTextureFormat` but without the includes
const uint32_t mUsage;
const nsIntSize mSize;
private:
virtual ~SwapChain();
void Cleanup();
RefPtr<Texture> mTexture;
public:
RefPtr<Texture> GetCurrentTexture();
};
} // namespace webgpu
} // namespace mozilla
#endif // GPU_SwapChain_H_

View File

@ -208,8 +208,8 @@ Maybe<RawId> WebGPUChild::AdapterRequestDevice(
ffi::WGPUDeviceDescriptor desc = {};
ffi::wgpu_client_fill_default_limits(&desc.limits);
if (aDesc.mRequiredLimits.WasPassed()) {
for (const auto& entry : aDesc.mRequiredLimits.Value().Entries()) {
if (aDesc.mNonGuaranteedLimits.WasPassed()) {
for (const auto& entry : aDesc.mNonGuaranteedLimits.Value().Entries()) {
Unused << entry; // TODO
}
/*desc.limits.max_bind_groups = lim.mMaxBindGroups;

View File

@ -17,6 +17,7 @@ DIRS += []
h_and_cpp = [
"Adapter",
"AdapterFeatures",
"AdapterLimits",
"BindGroup",
"BindGroupLayout",
"Buffer",
@ -41,7 +42,7 @@ h_and_cpp = [
"RenderPipeline",
"Sampler",
"ShaderModule",
"SupportedLimits",
"SwapChain",
"Texture",
"TextureView",
"ValidationError",

View File

@ -91,8 +91,8 @@ interface GPUAdapterFeatures {
};
dictionary GPUDeviceDescriptor {
sequence<GPUFeatureName> requiredFeatures = [];
record<DOMString, GPUSize32> requiredLimits;
sequence<GPUFeatureName> nonGuaranteedFeatures = [];
record<DOMString, GPUSize32> nonGuaranteedLimits;
};
enum GPUFeatureName {
@ -106,7 +106,7 @@ enum GPUFeatureName {
[Pref="dom.webgpu.enabled",
Exposed=Window]
interface GPUSupportedLimits {
interface GPUAdapterLimits {
readonly attribute unsigned long maxTextureDimension1D;
readonly attribute unsigned long maxTextureDimension2D;
readonly attribute unsigned long maxTextureDimension3D;
@ -131,8 +131,7 @@ interface GPUSupportedLimits {
interface GPUAdapter {
readonly attribute DOMString name;
[SameObject] readonly attribute GPUAdapterFeatures features;
[SameObject] readonly attribute GPUSupportedLimits limits;
readonly attribute boolean isSoftware;
[SameObject] readonly attribute GPUAdapterLimits limits;
[NewObject]
Promise<GPUDevice> requestDevice(optional GPUDeviceDescriptor descriptor = {});
@ -529,11 +528,12 @@ dictionary GPUTextureBindingLayout {
};
enum GPUStorageTextureAccess {
"read-only",
"write-only",
};
dictionary GPUStorageTextureBindingLayout {
GPUStorageTextureAccess access = "write-only";
required GPUStorageTextureAccess access;
required GPUTextureFormat format;
GPUTextureViewDimension viewDimension = "2d";
};
@ -599,8 +599,6 @@ interface GPUCompilationMessage {
readonly attribute GPUCompilationMessageType type;
readonly attribute unsigned long long lineNum;
readonly attribute unsigned long long linePos;
readonly attribute unsigned long long offset;
readonly attribute unsigned long long length;
};
[Pref="dom.webgpu.enabled",
@ -696,7 +694,7 @@ enum GPUVertexFormat {
"sint32x4",
};
enum GPUVertexStepMode {
enum GPUInputStepMode {
"vertex",
"instance",
};
@ -709,7 +707,7 @@ dictionary GPUVertexAttribute {
dictionary GPUVertexBufferLayout {
required GPUSize64 arrayStride;
GPUVertexStepMode stepMode = "vertex";
GPUInputStepMode stepMode = "vertex";
required sequence<GPUVertexAttribute> attributes;
};
@ -872,7 +870,7 @@ enum GPULoadOp {
enum GPUStoreOp {
"store",
"discard"
"clear"
};
dictionary GPURenderPassColorAttachment {
@ -1067,9 +1065,10 @@ GPURenderBundle includes GPUObjectBase;
dictionary GPURenderBundleDescriptor : GPUObjectDescriptorBase {
};
dictionary GPURenderBundleEncoderDescriptor : GPURenderPassLayout {
boolean depthReadOnly = false;
boolean stencilReadOnly = false;
dictionary GPURenderBundleEncoderDescriptor : GPUObjectDescriptorBase {
required sequence<GPUTextureFormat> colorFormats;
GPUTextureFormat depthStencilFormat;
GPUSize32 sampleCount = 1;
};
[Pref="dom.webgpu.enabled",
@ -1081,14 +1080,8 @@ GPURenderBundleEncoder includes GPUObjectBase;
GPURenderBundleEncoder includes GPUProgrammablePassEncoder;
GPURenderBundleEncoder includes GPURenderEncoderBase;
dictionary GPURenderPassLayout: GPUObjectDescriptorBase {
required sequence<GPUTextureFormat> colorFormats;
GPUTextureFormat depthStencilFormat;
GPUSize32 sampleCount = 1;
};
// ****************************************************************************
// OTHER (Canvas, Query, Queue, Device)
// OTHER (Query, Queue, SwapChain, Device)
// ****************************************************************************
// Query set
@ -1149,13 +1142,18 @@ interface GPUQueue {
};
GPUQueue includes GPUObjectBase;
dictionary GPUCanvasConfiguration {
[Pref="dom.webgpu.enabled",
Exposed=Window]
interface GPUSwapChain {
GPUTexture getCurrentTexture();
};
GPUSwapChain includes GPUObjectBase;
dictionary GPUSwapChainDescriptor : GPUObjectDescriptorBase {
required GPUDevice device;
required GPUTextureFormat format;
GPUTextureUsageFlags usage = 0x10; //GPUTextureUsage.OUTPUT_ATTACHMENT
//GPUPredefinedColorSpace colorSpace = "srgb"; //TODO
GPUCanvasCompositingAlphaMode compositingAlphaMode = "opaque";
GPUExtent3D size;
};
enum GPUCanvasCompositingAlphaMode {
@ -1166,11 +1164,10 @@ enum GPUCanvasCompositingAlphaMode {
[Pref="dom.webgpu.enabled",
Exposed=Window]
interface GPUCanvasContext {
// Calling configure() a second time invalidates the previous one,
// Calling configureSwapChain a second time invalidates the previous one,
// and all of the textures it's produced.
void configure(GPUCanvasConfiguration descriptor);
void unconfigure();
[Throws]
GPUSwapChain configureSwapChain(GPUSwapChainDescriptor descriptor);
GPUTextureFormat getPreferredFormat(GPUAdapter adapter);
GPUTexture getCurrentTexture();
GPUTextureFormat getSwapChainPreferredFormat(GPUAdapter adapter);
};

View File

@ -369,7 +369,7 @@ class RecordedFillGlyphs : public RecordedDrawingEvent<RecordedFillGlyphs> {
ReferencePtr mScaledFont;
PatternStorage mPattern;
DrawOptions mOptions;
Glyph* mGlyphs;
Glyph* mGlyphs = nullptr;
uint32_t mNumGlyphs;
};
@ -878,7 +878,7 @@ class RecordedSourceSurfaceCreation
friend class RecordedEvent;
ReferencePtr mRefPtr;
uint8_t* mData;
uint8_t* mData = nullptr;
int32_t mStride;
IntSize mSize;
SurfaceFormat mFormat;
@ -1047,7 +1047,7 @@ class RecordedGradientStopsCreation
friend class RecordedEvent;
ReferencePtr mRefPtr;
GradientStop* mStops;
GradientStop* mStops = nullptr;
uint32_t mNumStops;
ExtendMode mExtendMode;
bool mDataOwned;
@ -1186,7 +1186,6 @@ class RecordedFontData : public RecordedEventDerived<RecordedFontData> {
explicit RecordedFontData(UnscaledFont* aUnscaledFont)
: RecordedEventDerived(FONTDATA),
mType(aUnscaledFont->GetType()),
mData(nullptr),
mFontDetails() {
mGetFontFileDataSucceeded =
aUnscaledFont->GetFontFileData(&FontDataProc, this) && mData;
@ -1212,7 +1211,7 @@ class RecordedFontData : public RecordedEventDerived<RecordedFontData> {
friend class RecordedEvent;
FontType mType;
uint8_t* mData;
uint8_t* mData = nullptr;
RecordedFontDetails mFontDetails;
bool mGetFontFileDataSucceeded;
@ -3471,7 +3470,7 @@ inline bool RecordedFontData::GetFontDetails(RecordedFontDetails& fontDetails) {
template <class S>
RecordedFontData::RecordedFontData(S& aStream)
: RecordedEventDerived(FONTDATA), mType(FontType::UNKNOWN), mData(nullptr) {
: RecordedEventDerived(FONTDATA), mType(FontType::UNKNOWN) {
ReadElementConstrained(aStream, mType, FontType::DWRITE, FontType::UNKNOWN);
ReadElement(aStream, mFontDetails.fontDataKey);
ReadElement(aStream, mFontDetails.size);

View File

@ -89,8 +89,22 @@ void VideoBridgeParent::ActorDestroy(ActorDestroyReason aWhy) {
mClosed = true;
}
/* static */
void VideoBridgeParent::Shutdown() {
if (sVideoBridgeFromRddProcess) {
sVideoBridgeFromRddProcess->ReleaseCompositorThread();
} else if (sVideoBridgeFromGpuProcess) {
sVideoBridgeFromGpuProcess->ReleaseCompositorThread();
}
}
void VideoBridgeParent::ReleaseCompositorThread() {
mCompositorThreadHolder = nullptr;
}
void VideoBridgeParent::ActorDealloc() {
mCompositorThreadHolder = nullptr;
ReleaseCompositorThread();
mSelfRef = nullptr;
}

View File

@ -27,6 +27,7 @@ class VideoBridgeParent final : public PVideoBridgeParent,
static void Open(Endpoint<PVideoBridgeParent>&& aEndpoint,
VideoBridgeSource aSource);
static void Shutdown();
TextureHost* LookupTexture(uint64_t aSerial);
@ -65,6 +66,7 @@ class VideoBridgeParent final : public PVideoBridgeParent,
void Bind(Endpoint<PVideoBridgeParent>&& aEndpoint);
void ActorDealloc() override;
void ReleaseCompositorThread();
// This keeps us alive until ActorDestroy(), at which point we do a
// deferred destruction of ourselves.

View File

@ -1,2 +0,0 @@
*.mtl binary
*.obj binary

View File

@ -1,8 +0,0 @@
blank_issues_enabled: false
contact_links:
- name: Issues with shaders
url: https://github.com/gfx-rs/naga/issues/new/choose
about: Issues with or enhancements for the shader translation.
- name: Question about wgpu
url: https://github.com/gfx-rs/wgpu-rs/discussions/new
about: Any questions about how to use wgpu should go here.

View File

@ -4,4 +4,7 @@ about: Strange things you want to tell us
title: ''
labels: question
assignees: ''
---

View File

@ -6,3 +6,12 @@ _Describe what problem this is solving, and how it's solved._
**Testing**
_Explain how this change is tested._
<!--
Non-trivial functional changes would need to be tested through:
- [wgpu-rs](https://github.com/gfx-rs/wgpu-rs) - test the examples.
- [wgpu-native](https://github.com/gfx-rs/wgpu-native/) - check the generated C header for sanity.
Ideally, a PR needs to link to the draft PRs in these projects with relevant modifications.
See https://github.com/gfx-rs/wgpu/pull/666 for an example.
If you can add a unit/integration test here in `wgpu`, that would be best.
-->

View File

@ -34,18 +34,13 @@ jobs:
- name: Additional core features
run: cargo check --manifest-path wgpu-core/Cargo.toml --features trace --target ${{ env.TARGET }}
wasm:
webgl_build:
name: Web Assembly
runs-on: ubuntu-18.04
env:
RUSTFLAGS: --cfg=web_sys_unstable_apis
steps:
- uses: actions/checkout@v2
- run: rustup target add wasm32-unknown-unknown
- name: Check WebGPU
run: cargo check --all-targets --target=wasm32-unknown-unknown
- name: Check WebGL
run: cargo check --all-targets --target=wasm32-unknown-unknown --features webgl
- uses: actions/checkout@v2
- run: rustup target add wasm32-unknown-unknown
- run: cargo build --manifest-path wgpu-core/Cargo.toml --target wasm32-unknown-unknown
build:
name: ${{ matrix.name }}
@ -122,20 +117,6 @@ jobs:
- if: matrix.channel == 'nightly'
run: cargo test -- --nocapture
docs:
runs-on: [ubuntu-18.04]
steps:
- uses: actions/checkout@v2
- name: Install latest nightly
uses: actions-rs/toolchain@v1
with:
toolchain: nightly
override: true
continue-on-error: true
- name: cargo doc
run: cargo --version; cargo doc --no-deps
continue-on-error: true
lint:
name: Clippy
runs-on: ubuntu-latest
@ -151,7 +132,3 @@ jobs:
with:
command: clippy
args: -- -D warnings
- uses: actions-rs/cargo@v1
with:
command: fmt
args: -- --check

View File

@ -1,45 +0,0 @@
name: Documentation
on:
push:
branches:
- master
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout the code
uses: actions/checkout@v2
with:
persist-credentials: false
- name: Install latest nightly
uses: actions-rs/toolchain@v1
with:
toolchain: nightly
override: true
continue-on-error: true
- name: Add EGL for OpenGL
run: |
sudo apt-get update -y -qq
sudo apt-get install -y -qq libegl1-mesa-dev
- name: Build the docs (nightly)
run: |
cargo +nightly doc --lib --all-features
- name: Build the docs (stable)
run: cargo +stable doc --lib --all-features
if: ${{ failure() }}
- name: Deploy the docs
uses: JamesIves/github-pages-deploy-action@releases/v3
with:
ACCESS_TOKEN: ${{ secrets.WEB_DEPLOY }}
FOLDER: target/doc
REPOSITORY_NAME: gfx-rs/wgpu-rs.github.io
BRANCH: master
TARGET_FOLDER: doc

View File

@ -1,47 +0,0 @@
name: Publish
on:
push:
branches:
- gecko
env:
RUSTFLAGS: --cfg=web_sys_unstable_apis
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout the code
uses: actions/checkout@v2
with:
persist-credentials: false
- name: Install Rust WASM toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
target: wasm32-unknown-unknown
- name: Build the examples
run: cargo build --release --target wasm32-unknown-unknown --examples
- name: Install wasm-bindgen-cli
run: cargo install wasm-bindgen-cli
- name: Generate JS bindings for the examples
run: |
for i in target/wasm32-unknown-unknown/release/examples/*.wasm;
do
wasm-bindgen --no-typescript --out-dir target/generated --web "$i";
done
- name: Deploy the examples
uses: JamesIves/github-pages-deploy-action@releases/v3
with:
ACCESS_TOKEN: ${{ secrets.WEB_DEPLOY }}
FOLDER: target/generated
REPOSITORY_NAME: gfx-rs/wgpu-rs.github.io
BRANCH: master
TARGET_FOLDER: examples/wasm

15
gfx/wgpu/.gitignore vendored
View File

@ -1,17 +1,8 @@
# Generated by Cargo
# will have compiled files and executables
/target/
# These are backup files generated by rustfmt
/target
**/*.rs.bk
# Other
#Cargo.lock
.fuse_hidden*
.DS_Store
# IDE/Editor configuration files
.vscode
.vs
.idea
# Output from capture example
wgpu/red.png

View File

@ -1,56 +1,4 @@
# Change Log
## wgpu-core-0.9.2
- fix `Features::TEXTURE_SPECIFIC_FORMAT_FEATURES` not being supported for rendertargets
## wgpu-core-0.9.1 (2021-07-13)
- fix buffer inits delayed by a frame
- fix query resolves to initialize buffers
- fix pipeline statistics stride
- fix the check for maximum query count
## v0.9 (2021-06-18)
- Updated:
- naga to `v0.5`.
- Added:
- `Features::VERTEX_WRITABLE_STORAGE`.
- `Features::CLEAR_COMMANDS` which allows you to use `cmd_buf.clear_texture` and `cmd_buf.clear_buffer`.
- Changed:
- Updated default storage buffer/image limit to `8` from `4`.
- Fixed:
- `Buffer::get_mapped_range` can now have a range of zero.
- Fixed output spirv requiring the "kernal" capability.
- Fixed segfault due to improper drop order.
- Fixed incorrect dynamic stencil reference for Replace ops.
- Fixed tracking of temporary resources.
- Stopped unconditionally adding cubemap flags when the backend doesn't support cubemaps.
- Validation:
- Ensure that if resources are viewed from the vertex stage, they are read only unless `Features::VERTEX_WRITABLE_STORAGE` is true.
- Ensure storage class (i.e. storage vs uniform) is consistent between the shader and the pipeline layout.
- Error when a color texture is used as a depth/stencil texture.
- Check that pipeline output formats are logical
- Added shader label to log messages if validation fails.
- Tracing:
- Make renderpasses show up in the trace before they are run.
- Docs:
- Fix typo in `PowerPreference::LowPower` description.
- Player:
- Automatically start and stop RenderDoc captures.
- Examples:
- Handle winit's unconditional exception.
- Internal:
- Merged wgpu-rs and wgpu back into a single repository.
- The tracker was split into two different stateful/stateless trackers to reduce overhead.
- Added code coverage testing
- CI can now test on lavapipe
- Add missing extern "C" in wgpu-core on `wgpu_render_pass_execute_bundles`
- Fix incorrect function name `wgpu_render_pass_bundle_indexed_indirect` to `wgpu_render_bundle_draw_indexed_indirect`.
## wgpu-types-0.8.1 (2021-06-08)
- fix dynamic stencil reference for Replace ops
## v0.8.1 (2021-05-06)
- fix SPIR-V generation from WGSL, which was broken due to "Kernel" capability
- validate buffer storage classes
## v0.8 (2021-04-29)
- Naga is used by default to translate shaders, SPIRV-Cross is optional behind `cross` feature
@ -60,7 +8,7 @@
- conservative rasterization (native-only)
- buffer resource indexing (native-only)
- API adjustments to the spec:
- Renamed `RenderPassColorAttachmentDescriptor` to `RenderPassColorAttachment`:
- Renamed `RenderPassDepthStencilAttachmentDescriptor` to `RenderPassDepthStencilAttachment`:
- Renamed the `attachment` member to `view`
- Renamed `RenderPassDepthStencilAttachmentDescriptor` to `RenderPassDepthStencilAttachment`:
- Renamed the `attachment` member to `view`
@ -91,7 +39,7 @@
- interpolation qualifiers
- allow vertex components to be underspecified
## wgpu-core-0.7.1 (2021-02-25)
## v0.7.1 (2021-02-25)
- expose `wgc::device::queue` sub-module in public
- fix the indexed buffer check
- fix command allocator race condition
@ -100,12 +48,9 @@
- Major API changes:
- `RenderPipelineDescriptor`
- `BindingType`
- new `ShaderModuleDescriptor`
- new `RenderEncoder`
- Features:
- (beta) WGSL support, including the ability to bypass SPIR-V entirely
- (beta) implicit bind group layout support
- better error messages
- timestamp and pipeline statistics queries
- ETC2 and ASTC compressed textures
- (beta) targeting WASM with WebGL backend
@ -120,9 +65,6 @@
- render pipeline descriptor
- vertex buffers
### wgpu-0.6.2 (2020-11-24)
- don't panic in the staging belt if the channel is dropped
## v0.6 (2020-08-17)
- Crates:
- C API is moved to [another repository](https://github.com/gfx-rs/wgpu-native)
@ -150,28 +92,28 @@
- bind group matching to the layout
- experimental shader interface matching with Naga
## wgpu-core-0.5.6 (2020-07-09)
## v0.5.6 (2020-07-09)
- add debug markers support
## wgpu-core-0.5.5 (2020-05-20)
## v0.5.5 (2020-05-20)
- fix destruction of adapters, swap chains, and bind group layouts
- fix command pool leak with temporary threads
- improve assertion messages
- implement `From<TextureFormat>` for `TextureComponentType`
## wgpu-core-0.5.4 (2020-04-24)
## v0.5.4 (2020-04-24)
- fix memory management of staging buffers
## wgpu-core-0.5.3 (2020-04-18)
## v0.5.3 (2020-04-18)
- fix reading access to storage textures
- another fix to layout transitions for swapchain images
## wgpu-core-0.5.2 (2020-04-15)
## v0.5.2 (2020-04-15)
- fix read-only storage flags
- fix pipeline layout life time
- improve various assert messages
## wgpu-core-0.5.1 (2020-04-10)
## v0.5.1 (2020-04-10)
- fix tracking of swapchain images that are used multiple times in a command buffer
- fix tracking of initial usage of a resource across a command buffer
@ -196,13 +138,13 @@
- unmapping dropped buffers
- better error messages on misused swapchain frames
## wgpu-core-0.4.3 (2020-01-20)
## v0.4.3 (2020-01-20)
- improved swap chain error handling
## wgpu-core-0.4.2 (2019-12-15)
## v0.4.2 (2019-12-15)
- fixed render pass transitions
## wgpu-core-0.4.1 (2019-11-28)
## v0.4.1 (2019-11-28)
- fixed depth/stencil transitions
- fixed dynamic offset iteration
@ -216,10 +158,10 @@
- Validation:
- buffer and texture usage
## wgpu-core-0.3.3 (2019-08-22)
## v0.3.3 (2019-08-22)
- fixed instance creation on Windows
## wgpu-core-0.3.1 (2019-08-21)
## v0.3.1 (2019-08-21)
- fixed pipeline barriers that aren't transitions
## v0.3 (2019-08-21)
@ -242,16 +184,16 @@
- bind group buffer ranges
- required stencil reference, blend color
## wgpu-core-0.2.6 (2019-04-04)
## v0.2.6 (2019-04-04)
- fixed frame acquisition GPU waits
## wgpu-core-0.2.5 (2019-03-31)
## v0.2.5 (2019-03-31)
- fixed submission tracking
- added support for blend colors
- fixed bind group compatibility at the gfx-hal level
- validating the bind groups and blend colors
## wgpu-core-0.2.3 (2019-03-20)
## v0.2.3 (2019-03-20)
- fixed vertex format mapping
- fixed building with "empty" backend on Windows
- bumped the default descriptor pool size

434
gfx/wgpu/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -2,31 +2,18 @@
members = [
"dummy",
"player",
#"wgpu",
"wgpu-core",
"wgpu-types",
]
default-members = ["player"]
[patch."https://github.com/gfx-rs/gfx"]
#hal = { package = "gfx-hal", path = "../gfx/src/hal" }
#gfx-backend-vulkan = { path = "../gfx/src/backend/vulkan", features = ["naga"] }
#gfx-backend-metal = { path = "../gfx/src/backend/metal", features = ["naga"] }
#gfx-backend-gl = { path = "../gfx/src/backend/gl", features = ["naga"] }
#gfx-backend-dx12 = { path = "../gfx/src/backend/dx12" }
#gfx-backend-dx11 = { path = "../gfx/src/backend/dx11" }
#gfx-backend-empty = { path = "../gfx/src/backend/empty" }
[patch."https://github.com/gfx-rs/naga"]
#naga = { path = "../naga" }
[patch."https://github.com/zakarumych/gpu-descriptor"]
#gpu-descriptor = { path = "../gpu-descriptor/gpu-descriptor" }
[patch."https://github.com/zakarumych/gpu-alloc"]
#gpu-alloc = { path = "../gpu-alloc/gpu-alloc" }
[patch."https://github.com/gfx-rs/gfx"]
#gfx-hal = { path = "../gfx/src/hal" }
#gfx-backend-empty = { path = "../gfx/src/backend/empty" }
#gfx-backend-vulkan = { path = "../gfx/src/backend/vulkan" }
#gfx-backend-gl = { path = "../gfx/src/backend/gl" }
#gfx-backend-dx12 = { path = "../gfx/src/backend/dx12" }
#gfx-backend-dx11 = { path = "../gfx/src/backend/dx11" }
#gfx-backend-metal = { path = "../gfx/src/backend/metal" }
[patch.crates-io]
#web-sys = { path = "../wasm-bindgen/crates/web-sys" }
#js-sys = { path = "../wasm-bindgen/crates/js-sys" }
#wasm-bindgen = { path = "../wasm-bindgen" }

373
gfx/wgpu/LICENSE Normal file
View File

@ -0,0 +1,373 @@
Mozilla Public License Version 2.0
==================================
1. Definitions
--------------
1.1. "Contributor"
means each individual or legal entity that creates, contributes to
the creation of, or owns Covered Software.
1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used
by a Contributor and that particular Contributor's Contribution.
1.3. "Contribution"
means Covered Software of a particular Contributor.
1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached
the notice in Exhibit A, the Executable Form of such Source Code
Form, and Modifications of such Source Code Form, in each case
including portions thereof.
1.5. "Incompatible With Secondary Licenses"
means
(a) that the initial Contributor has attached the notice described
in Exhibit B to the Covered Software; or
(b) that the Covered Software was made available under the terms of
version 1.1 or earlier of the License, but not also under the
terms of a Secondary License.
1.6. "Executable Form"
means any form of the work other than Source Code Form.
1.7. "Larger Work"
means a work that combines Covered Software with other material, in
a separate file or files, that is not Covered Software.
1.8. "License"
means this document.
1.9. "Licensable"
means having the right to grant, to the maximum extent possible,
whether at the time of the initial grant or subsequently, any and
all of the rights conveyed by this License.
1.10. "Modifications"
means any of the following:
(a) any file in Source Code Form that results from an addition to,
deletion from, or modification of the contents of Covered
Software; or
(b) any new file in Source Code Form that contains any Covered
Software.
1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
Contributor that would be infringed, but for the grant of the
License, by the making, using, selling, offering for sale, having
made, import, or transfer of either its Contributions or its
Contributor Version.
1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU
Lesser General Public License, Version 2.1, the GNU Affero General
Public License, Version 3.0, or any later versions of those
licenses.
1.13. "Source Code Form"
means the form of the work preferred for making modifications.
1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
License. For legal entities, "You" includes any entity that
controls, is controlled by, or is under common control with You. For
purposes of this definition, "control" means (a) the power, direct
or indirect, to cause the direction or management of such entity,
whether by contract or otherwise, or (b) ownership of more than
fifty percent (50%) of the outstanding shares or beneficial
ownership of such entity.
2. License Grants and Conditions
--------------------------------
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
(a) under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or
as part of a Larger Work; and
(b) under Patent Claims of such Contributor to make, use, sell, offer
for sale, have made, import, and otherwise transfer either its
Contributions or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor first
distributes such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under
this License. No additional rights or licenses will be implied from the
distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted by a
Contributor:
(a) for any code that a Contributor has removed from Covered Software;
or
(b) for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
(c) under Patent Claims infringed by Covered Software in the absence of
its Contributions.
This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License (if
permitted under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights
to grant the rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing, or other
equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
in Section 2.1.
3. Responsibilities
-------------------
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
(a) such Covered Software must also be made available in Source Code
Form, as described in Section 3.1, and You must inform recipients of
the Executable Form how they can obtain a copy of such Source Code
Form by reasonable means in a timely manner, at a charge no more
than the cost of distribution to the recipient; and
(b) You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter
the recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of Covered
Software with a work governed by one or more Secondary Licenses, and the
Covered Software is not Incompatible With Secondary Licenses, this
License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the Covered
Software under the terms of either this License or such Secondary
License(s).
3.4. Notices
You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty,
or limitations of liability) contained within the Source Code Form of
the Covered Software, except that You may alter any license notices to
the extent required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on
behalf of any Contributor. You must make it absolutely clear that any
such warranty, support, indemnity, or liability obligation is offered by
You alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
---------------------------------------------------
If it is impossible for You to comply with any of the terms of this
License with respect to some or all of the Covered Software due to
statute, judicial order, or regulation then You must: (a) comply with
the terms of this License to the maximum extent possible; and (b)
describe the limitations and the code they affect. Such description must
be placed in a text file included with all distributions of the Covered
Software under this License. Except to the extent prohibited by statute
or regulation, such description must be sufficiently detailed for a
recipient of ordinary skill to be able to understand it.
5. Termination
--------------
5.1. The rights granted under this License will terminate automatically
if You fail to comply with any of its terms. However, if You become
compliant, then the rights granted under this License from a particular
Contributor are reinstated (a) provisionally, unless and until such
Contributor explicitly and finally terminates Your grants, and (b) on an
ongoing basis, if such Contributor fails to notify You of the
non-compliance by some reasonable means prior to 60 days after You have
come back into compliance. Moreover, Your grants from a particular
Contributor are reinstated on an ongoing basis if such Contributor
notifies You of the non-compliance by some reasonable means, this is the
first time You have received notice of non-compliance with this License
from such Contributor, and You become compliant prior to 30 days after
Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted to
You by any and all Contributors for the Covered Software under Section
2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
end user license agreements (excluding distributors and resellers) which
have been validly granted by You or Your distributors under this License
prior to termination shall survive termination.
************************************************************************
* *
* 6. Disclaimer of Warranty *
* ------------------------- *
* *
* Covered Software is provided under this License on an "as is" *
* basis, without warranty of any kind, either expressed, implied, or *
* statutory, including, without limitation, warranties that the *
* Covered Software is free of defects, merchantable, fit for a *
* particular purpose or non-infringing. The entire risk as to the *
* quality and performance of the Covered Software is with You. *
* Should any Covered Software prove defective in any respect, You *
* (not any Contributor) assume the cost of any necessary servicing, *
* repair, or correction. This disclaimer of warranty constitutes an *
* essential part of this License. No use of any Covered Software is *
* authorized under this License except under this disclaimer. *
* *
************************************************************************
************************************************************************
* *
* 7. Limitation of Liability *
* -------------------------- *
* *
* Under no circumstances and under no legal theory, whether tort *
* (including negligence), contract, or otherwise, shall any *
* Contributor, or anyone who distributes Covered Software as *
* permitted above, be liable to You for any direct, indirect, *
* special, incidental, or consequential damages of any character *
* including, without limitation, damages for lost profits, loss of *
* goodwill, work stoppage, computer failure or malfunction, or any *
* and all other commercial damages or losses, even if such party *
* shall have been informed of the possibility of such damages. This *
* limitation of liability shall not apply to liability for death or *
* personal injury resulting from such party's negligence to the *
* extent applicable law prohibits such limitation. Some *
* jurisdictions do not allow the exclusion or limitation of *
* incidental or consequential damages, so this exclusion and *
* limitation may not apply to You. *
* *
************************************************************************
8. Litigation
-------------
Any litigation relating to this License may be brought only in the
courts of a jurisdiction where the defendant maintains its principal
place of business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions.
Nothing in this Section shall prevent a party's ability to bring
cross-claims or counter-claims.
9. Miscellaneous
----------------
This License represents the complete agreement concerning the subject
matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent
necessary to make it enforceable. Any law or regulation which provides
that the language of a contract shall be construed against the drafter
shall not be used to construe this License against a Contributor.
10. Versions of the License
---------------------------
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a
modified version of this License if you rename the license and remove
any references to the name of the license steward (except to note that
such modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary
Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
-------------------------------------------
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular
file, then You may include the notice in a location (such as a LICENSE
file in a relevant directory) where a recipient would be likely to look
for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - "Incompatible With Secondary Licenses" Notice
---------------------------------------------------------
This Source Code Form is "Incompatible With Secondary Licenses", as
defined by the Mozilla Public License, v. 2.0.

View File

@ -1,176 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS

View File

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2021 The gfx-rs developers
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,23 +1,24 @@
<img align="right" width="25%" src="logo.png">
This github project is mirrored in "gfx/wgpu" of [Mozilla-central](https://hg.mozilla.org/mozilla-central/file/tip/gfx/wgpu).
Issues and pull requests are welcome, but some bidirectional synchronization may be involved.
# wgpu
[![Matrix](https://img.shields.io/badge/Dev_Matrix-%23wgpu%3Amatrix.org-blueviolet.svg)](https://matrix.to/#/#wgpu:matrix.org) [![Matrix](https://img.shields.io/badge/User_Matrix-%23wgpu--users%3Amatrix.org-blueviolet.svg)](https://matrix.to/#/#wgpu-users:matrix.org)
[![Build Status](https://github.com/gfx-rs/wgpu/workflows/CI/badge.svg)](https://github.com/gfx-rs/wgpu/actions)
[![codecov.io](https://codecov.io/gh/gfx-rs/wgpu/branch/master/graph/badge.svg?token=84qJTesmeS)](https://codecov.io/gh/gfx-rs/wgpu)
This is an implementation of [WebGPU](https://www.w3.org/community/gpu/) API in Rust, targeting both native and the Web.
It's written in Rust and is based on [gfx-hal](https://github.com/gfx-rs/gfx) with help of [gpu-alloc](https://github.com/zakarumych/gpu-alloc) and [gpu-descriptor](https://github.com/zakarumych/gpu-descriptor). See the upstream [WebGPU specification](https://gpuweb.github.io/gpuweb/) (work in progress).
This is the core logic of an experimental [WebGPU](https://www.w3.org/community/gpu/) implementation. It's written in Rust and is based on [gfx-hal](https://github.com/gfx-rs/gfx) with help of [gpu-alloc](https://github.com/zakarumych/gpu-alloc) and [gpu-descriptor](https://github.com/zakarumych/gpu-descriptor). See the upstream [WebGPU specification](https://gpuweb.github.io/gpuweb/) (work in progress).
The repository hosts the following parts:
The implementation consists of the following parts:
- [![Crates.io](https://img.shields.io/crates/v/wgpu.svg?label=wgpu)](https://crates.io/crates/wgpu) [![docs.rs](https://docs.rs/wgpu/badge.svg)](https://docs.rs/wgpu/) - public Rust API for users
- [![Crates.io](https://img.shields.io/crates/v/wgpu-core.svg?label=wgpu-core)](https://crates.io/crates/wgpu-core) [![docs.rs](https://docs.rs/wgpu-core/badge.svg)](https://docs.rs/wgpu-core/) - internal Rust API for WebGPU implementations to use
- [![Crates.io](https://img.shields.io/crates/v/wgpu-types.svg?label=wgpu-types)](https://crates.io/crates/wgpu-types) [![docs.rs](https://docs.rs/wgpu-types/badge.svg)](https://docs.rs/wgpu-types/) - Rust types shared between `wgpu-core` and `wgpu-rs`
- `player` - standalone application for replaying the API traces, uses `winit`
Rust examples can be found at `wgpu/examples`. `wgpu` is a default member, so you can run the examples directly from the root, e.g. `cargo run --example boids`.
This repository contains the core of `wgpu`, and is not usable directly by applications.
If you are looking for the user-facing Rust API, you need [wgpu-rs](https://github.com/gfx-rs/wgpu-rs).
If you are looking for the native implementation or bindings to the API in other languages, you need [wgpu-native](https://github.com/gfx-rs/wgpu-native).
## Supported Platforms

View File

@ -7,6 +7,4 @@ status = [
"Ubuntu Nightly",
"Windows Stable",
"Windows Nightly",
"Web Assembly",
#"Clippy",
]

View File

@ -5,7 +5,6 @@ authors = [
"Dzmitry Malyshau <kvark@mozilla.com>",
]
edition = "2018"
license = "MIT OR Apache-2.0"
publish = false
[features]

View File

@ -9,7 +9,7 @@ description = "WebGPU trace player"
homepage = "https://github.com/gfx-rs/wgpu"
repository = "https://github.com/gfx-rs/wgpu"
keywords = ["graphics"]
license = "MIT OR Apache-2.0"
license = "MPL-2.0"
publish = false
[features]

View File

@ -88,7 +88,7 @@ impl GlobalPlay for wgc::hub::Global<IdentityPassThroughFactory> {
dst,
subresource_range,
} => self
.command_encoder_clear_image::<B>(encoder, dst, &subresource_range)
.command_encoder_clear_image::<B>(encoder, dst, subresource_range)
.unwrap(),
trace::Command::WriteTimestamp {
query_set_id,

View File

@ -5,7 +5,6 @@
"buffer-copy.ron",
"clear-buffer-image.ron",
"buffer-zero-init.ron",
"pipeline-statistics-query.ron",
"quad.ron",
],
)

View File

@ -1,81 +0,0 @@
(
features: (bits: 0x0000_0000_0000_0008), // PIPELINE_STATISTICS_QUERY
expectations: [
(
name: "Queried number of compute invocations is correct",
buffer: (index: 0, epoch: 1),
offset: 0,
data: Raw([0x2A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]),
),
],
actions: [
CreatePipelineLayout(Id(0, 1, Empty), (
label: Some("empty"),
bind_group_layouts: [],
push_constant_ranges: [],
)),
CreateShaderModule(
id: Id(0, 1, Empty),
desc: (
label: None,
flags: (bits: 3),
),
data: "empty.wgsl",
),
CreateComputePipeline(
id: Id(0, 1, Empty),
desc: (
label: None,
layout: Some(Id(0, 1, Empty)),
stage: (
module: Id(0, 1, Empty),
entry_point: "main",
),
),
),
CreateQuerySet(
id: Id(0, 1, Empty),
desc: (
label: Some("Compute Invocation QuerySet"),
count: 2,
ty: PipelineStatistics((bits: 0x10)), // COMPUTE_SHADER_INVOCATIONS
),
),
CreateBuffer(
Id(0, 1, Empty),
(
label: Some("Compute Invocation Result Buffer"),
size: 8,
usage: (
bits: 9, // COPY_DST | MAP_READ
),
mapped_at_creation: false,
),
),
Submit(1, [
RunComputePass(
base: (
commands: [
SetPipeline(Id(0, 1, Empty)),
BeginPipelineStatisticsQuery(
query_set_id: Id(0, 1, Empty),
query_index: 0,
),
Dispatch((2, 3, 7,)),
EndPipelineStatisticsQuery,
],
dynamic_offsets: [],
string_data: [],
push_constant_data: [],
),
),
ResolveQuerySet(
query_set_id: Id(0, 1, Empty),
start_query: 0,
query_count: 1,
destination: Id(0, 1, Empty),
destination_offset: 0,
)
]),
],
)

View File

@ -1,13 +1,13 @@
[package]
name = "wgpu-core"
version = "0.9.2"
version = "0.8.0"
authors = ["wgpu developers"]
edition = "2018"
description = "WebGPU core logic on gfx-hal"
homepage = "https://github.com/gfx-rs/wgpu"
repository = "https://github.com/gfx-rs/wgpu"
keywords = ["graphics"]
license = "MIT OR Apache-2.0"
license = "MPL-2.0"
[lib]
@ -39,33 +39,34 @@ thiserror = "1"
gpu-alloc = "0.4"
gpu-descriptor = "0.1"
hal = { package = "gfx-hal", version = "0.9" }
gfx-backend-empty = { version = "0.9" }
hal = { package = "gfx-hal", git = "https://github.com/gfx-rs/gfx", rev = "27a1dae3796d33d23812f2bb8c7e3b5aea18b521" }
gfx-backend-empty = { git = "https://github.com/gfx-rs/gfx", rev = "27a1dae3796d33d23812f2bb8c7e3b5aea18b521" }
[target.'cfg(all(not(target_arch = "wasm32"), all(unix, not(target_os = "ios"), not(target_os = "macos"))))'.dependencies]
gfx-backend-vulkan = { version = "0.9", features = ["naga"] }
#gfx-backend-gl = { version = "0.9 }
gfx-backend-vulkan = { git = "https://github.com/gfx-rs/gfx", rev = "27a1dae3796d33d23812f2bb8c7e3b5aea18b521", features = ["naga"] }
#gfx-backend-gl = { git = "https://github.com/gfx-rs/gfx", rev = "27a1dae3796d33d23812f2bb8c7e3b5aea18b521" }
[target.'cfg(all(not(target_arch = "wasm32"), any(target_os = "ios", target_os = "macos")))'.dependencies]
gfx-backend-metal = { version = "0.9" }
gfx-backend-metal = { git = "https://github.com/gfx-rs/gfx", rev = "27a1dae3796d33d23812f2bb8c7e3b5aea18b521" }
#TODO: could also depend on gfx-backend-vulkan for Vulkan Portability
[target.'cfg(all(not(target_arch = "wasm32"), windows))'.dependencies]
gfx-backend-dx12 = { version = "0.9" }
gfx-backend-dx11 = { version = "0.9" }
gfx-backend-vulkan = { version = "0.9", features = ["naga"] }
gfx-backend-dx12 = { git = "https://github.com/gfx-rs/gfx", rev = "27a1dae3796d33d23812f2bb8c7e3b5aea18b521" }
gfx-backend-dx11 = { git = "https://github.com/gfx-rs/gfx", rev = "27a1dae3796d33d23812f2bb8c7e3b5aea18b521" }
gfx-backend-vulkan = { git = "https://github.com/gfx-rs/gfx", rev = "27a1dae3796d33d23812f2bb8c7e3b5aea18b521", features = ["naga"] }
[target.'cfg(target_arch = "wasm32")'.dependencies]
#gfx-backend-gl = { version = "0.9" }
#gfx-backend-gl = { git = "https://github.com/gfx-rs/gfx", rev = "27a1dae3796d33d23812f2bb8c7e3b5aea18b521" }
[dependencies.naga]
version = "0.5"
git = "https://github.com/gfx-rs/naga"
tag = "gfx-25"
features = ["spv-in", "spv-out", "wgsl-in"]
[dependencies.wgt]
path = "../wgpu-types"
package = "wgpu-types"
version = "0.9"
version = "0.8"
[dev-dependencies]
loom = "0.3"

View File

@ -169,7 +169,7 @@ impl<G: GlobalIdentityHandlerFactory> Global<G> {
&self,
command_encoder_id: CommandEncoderId,
dst: TextureId,
subresource_range: &ImageSubresourceRange,
subresource_range: ImageSubresourceRange,
) -> Result<(), ClearError> {
profiling::scope!("CommandEncoder::clear_image");

View File

@ -11,7 +11,6 @@ use crate::{
device::all_buffer_stages,
hub::{GfxBackend, Global, GlobalIdentityHandlerFactory, Storage, Token},
id::{self, Id, TypedId},
memory_init_tracker::{MemoryInitKind, MemoryInitTrackerAction},
resource::{BufferUse, QuerySet},
track::UseExtendError,
Epoch, FastHashMap, Index,
@ -382,11 +381,7 @@ impl<G: GlobalIdentityHandlerFactory> Global<G> {
.into());
}
let elements_per_query = match query_set.desc.ty {
wgt::QueryType::PipelineStatistics(ps) => ps.bits().count_ones(),
wgt::QueryType::Timestamp => 1,
};
let stride = elements_per_query * wgt::QUERY_SIZE;
let stride = query_set.elements * wgt::QUERY_SIZE;
let bytes_used = (stride * query_count) as BufferAddress;
let buffer_start_offset = destination_offset;
@ -404,17 +399,6 @@ impl<G: GlobalIdentityHandlerFactory> Global<G> {
.into());
}
cmd_buf.buffer_memory_init_actions.extend(
dst_buffer
.initialization_status
.check(buffer_start_offset..buffer_end_offset)
.map(|range| MemoryInitTrackerAction {
id: destination,
range,
kind: MemoryInitKind::ImplicitlyInitialized,
}),
);
unsafe {
cmd_buf_raw.pipeline_barrier(
all_buffer_stages()..hal::pso::PipelineStage::TRANSFER,

View File

@ -253,6 +253,7 @@ impl<B: hal::Backend> LifetimeTracker<B> {
&mut self,
index: SubmissionIndex,
fence: B::Fence,
new_suspects: &SuspectedResources,
temp_resources: impl Iterator<Item = (TempResource<B>, alloc::MemoryBlock<B>)>,
) {
let mut last_resources = NonReferencedResources::new();
@ -273,6 +274,7 @@ impl<B: hal::Backend> LifetimeTracker<B> {
.drain(..)
.map(|stored| stored.value),
);
self.suspected_resources.extend(new_suspects);
self.active.alloc().init(ActiveSubmission {
index,

Some files were not shown because too many files have changed in this diff Show More