mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-11-26 14:22:01 +00:00
Merge m-c to fx-team.
This commit is contained in:
commit
b6361891bd
2
CLOBBER
2
CLOBBER
@ -18,4 +18,4 @@
|
||||
# Modifying this file will now automatically clobber the buildbot machines \o/
|
||||
#
|
||||
|
||||
Removal of XPIDL for bug 893117 requires a clobber to make sure interfaces aren't generated.
|
||||
Add an WebIDL interface for bug 892978 requires a clobber for Windows.
|
||||
|
@ -35,11 +35,6 @@ let gBrowserThumbnails = {
|
||||
if (gMultiProcessBrowser)
|
||||
return;
|
||||
|
||||
try {
|
||||
if (Services.prefs.getBoolPref("browser.pagethumbnails.capturing_disabled"))
|
||||
return;
|
||||
} catch (e) {}
|
||||
|
||||
PageThumbs.addExpirationFilter(this);
|
||||
gBrowser.addTabsProgressListener(this);
|
||||
Services.prefs.addObserver(this.PREF_DISK_CACHE_SSL, this, false);
|
||||
|
@ -84,9 +84,6 @@ this.__defineSetter__("PluralForm", function (val) {
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "TelemetryStopwatch",
|
||||
"resource://gre/modules/TelemetryStopwatch.jsm");
|
||||
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "AboutHomeUtils",
|
||||
"resource:///modules/AboutHomeUtils.jsm");
|
||||
|
||||
#ifdef MOZ_SERVICES_SYNC
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "Weave",
|
||||
"resource://services-sync/main.js");
|
||||
@ -2312,64 +2309,6 @@ function PageProxyClickHandler(aEvent)
|
||||
middleMousePaste(aEvent);
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle load of some pages (about:*) so that we can make modifications
|
||||
* to the DOM for unprivileged pages.
|
||||
*/
|
||||
function BrowserOnAboutPageLoad(doc) {
|
||||
if (doc.documentURI.toLowerCase() == "about:home") {
|
||||
// XXX bug 738646 - when Marketplace is launched, remove this statement and
|
||||
// the hidden attribute set on the apps button in aboutHome.xhtml
|
||||
if (getBoolPref("browser.aboutHome.apps", false))
|
||||
doc.getElementById("apps").removeAttribute("hidden");
|
||||
|
||||
let ss = Components.classes["@mozilla.org/browser/sessionstore;1"].
|
||||
getService(Components.interfaces.nsISessionStore);
|
||||
if (ss.canRestoreLastSession &&
|
||||
!PrivateBrowsingUtils.isWindowPrivate(window))
|
||||
doc.getElementById("launcher").setAttribute("session", "true");
|
||||
|
||||
// Inject search engine and snippets URL.
|
||||
let docElt = doc.documentElement;
|
||||
// set the following attributes BEFORE searchEngineURL, which triggers to
|
||||
// show the snippets when it's set.
|
||||
docElt.setAttribute("snippetsURL", AboutHomeUtils.snippetsURL);
|
||||
if (AboutHomeUtils.showKnowYourRights) {
|
||||
docElt.setAttribute("showKnowYourRights", "true");
|
||||
// Set pref to indicate we've shown the notification.
|
||||
let currentVersion = Services.prefs.getIntPref("browser.rights.version");
|
||||
Services.prefs.setBoolPref("browser.rights." + currentVersion + ".shown", true);
|
||||
}
|
||||
docElt.setAttribute("snippetsVersion", AboutHomeUtils.snippetsVersion);
|
||||
|
||||
let updateSearchEngine = function() {
|
||||
let engine = AboutHomeUtils.defaultSearchEngine;
|
||||
docElt.setAttribute("searchEngineName", engine.name);
|
||||
docElt.setAttribute("searchEnginePostData", engine.postDataString || "");
|
||||
// Again, keep the searchEngineURL as the last attribute, because the
|
||||
// mutation observer in aboutHome.js is counting on that.
|
||||
docElt.setAttribute("searchEngineURL", engine.searchURL);
|
||||
};
|
||||
updateSearchEngine();
|
||||
|
||||
// Listen for the event that's triggered when the user changes search engine.
|
||||
// At this point we simply reload about:home to reflect the change.
|
||||
Services.obs.addObserver(updateSearchEngine, "browser-search-engine-modified", false);
|
||||
|
||||
// Remove the observer when the page is reloaded or closed.
|
||||
doc.defaultView.addEventListener("pagehide", function removeObserver() {
|
||||
doc.defaultView.removeEventListener("pagehide", removeObserver);
|
||||
Services.obs.removeObserver(updateSearchEngine, "browser-search-engine-modified");
|
||||
}, false);
|
||||
|
||||
#ifdef MOZ_SERVICES_HEALTHREPORT
|
||||
doc.addEventListener("AboutHomeSearchEvent", function onSearch(e) {
|
||||
BrowserSearch.recordSearchInHealthReport(e.detail, "abouthome");
|
||||
}, true, true);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle command events bubbling up from error page content
|
||||
*/
|
||||
@ -2394,9 +2333,6 @@ let BrowserOnClick = {
|
||||
else if (ownerDoc.documentURI.startsWith("about:neterror")) {
|
||||
this.onAboutNetError(originalTarget, ownerDoc);
|
||||
}
|
||||
else if (ownerDoc.documentURI.toLowerCase() == "about:home") {
|
||||
this.onAboutHome(originalTarget, ownerDoc);
|
||||
}
|
||||
},
|
||||
|
||||
onAboutCertError: function BrowserOnClick_onAboutCertError(aTargetElm, aOwnerDoc) {
|
||||
@ -2573,49 +2509,6 @@ let BrowserOnClick = {
|
||||
return;
|
||||
Services.io.offline = false;
|
||||
},
|
||||
|
||||
onAboutHome: function BrowserOnClick_onAboutHome(aTargetElm, aOwnerDoc) {
|
||||
let elmId = aTargetElm.getAttribute("id");
|
||||
|
||||
switch (elmId) {
|
||||
case "restorePreviousSession":
|
||||
let ss = Cc["@mozilla.org/browser/sessionstore;1"].
|
||||
getService(Ci.nsISessionStore);
|
||||
if (ss.canRestoreLastSession) {
|
||||
ss.restoreLastSession();
|
||||
}
|
||||
aOwnerDoc.getElementById("launcher").removeAttribute("session");
|
||||
break;
|
||||
|
||||
case "downloads":
|
||||
BrowserDownloadsUI();
|
||||
break;
|
||||
|
||||
case "bookmarks":
|
||||
PlacesCommandHook.showPlacesOrganizer("AllBookmarks");
|
||||
break;
|
||||
|
||||
case "history":
|
||||
PlacesCommandHook.showPlacesOrganizer("History");
|
||||
break;
|
||||
|
||||
case "apps":
|
||||
openUILinkIn("https://marketplace.mozilla.org/", "tab");
|
||||
break;
|
||||
|
||||
case "addons":
|
||||
BrowserOpenAddonsMgr();
|
||||
break;
|
||||
|
||||
case "sync":
|
||||
openPreferences("paneSync");
|
||||
break;
|
||||
|
||||
case "settings":
|
||||
openPreferences();
|
||||
break;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
@ -4295,6 +4188,7 @@ var TabsProgressListener = {
|
||||
Components.isSuccessCode(aStatus) &&
|
||||
doc.documentURI.startsWith("about:") &&
|
||||
!doc.documentURI.toLowerCase().startsWith("about:blank") &&
|
||||
!doc.documentURI.toLowerCase().startsWith("about:home") &&
|
||||
!doc.documentElement.hasAttribute("hasBrowserHandlers")) {
|
||||
// STATE_STOP may be received twice for documents, thus store an
|
||||
// attribute to ensure handling it just once.
|
||||
@ -4308,9 +4202,6 @@ var TabsProgressListener = {
|
||||
if (event.target.documentElement)
|
||||
event.target.documentElement.removeAttribute("hasBrowserHandlers");
|
||||
}, true);
|
||||
|
||||
// We also want to make changes to page UI for unprivileged about pages.
|
||||
BrowserOnAboutPageLoad(doc);
|
||||
}
|
||||
},
|
||||
|
||||
|
@ -13,6 +13,8 @@ XPCOMUtils.defineLazyModuleGetter(this,
|
||||
"LoginManagerContent", "resource://gre/modules/LoginManagerContent.jsm");
|
||||
XPCOMUtils.defineLazyModuleGetter(this,
|
||||
"InsecurePasswordUtils", "resource://gre/modules/InsecurePasswordUtils.jsm");
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "PrivateBrowsingUtils",
|
||||
"resource://gre/modules/PrivateBrowsingUtils.jsm");
|
||||
|
||||
// Bug 671101 - directly using webNavigation in this context
|
||||
// causes docshells to leak
|
||||
@ -50,3 +52,142 @@ if (!Services.prefs.getBoolPref("browser.tabs.remote")) {
|
||||
LoginManagerContent.onUsernameInput(event);
|
||||
});
|
||||
}
|
||||
|
||||
let AboutHomeListener = {
|
||||
init: function() {
|
||||
let webProgress = docShell.QueryInterface(Ci.nsIInterfaceRequestor)
|
||||
.getInterface(Ci.nsIWebProgress);
|
||||
webProgress.addProgressListener(this, Ci.nsIWebProgress.NOTIFY_STATE_WINDOW);
|
||||
|
||||
addMessageListener("AboutHome:Update", this);
|
||||
},
|
||||
|
||||
receiveMessage: function(aMessage) {
|
||||
switch (aMessage.name) {
|
||||
case "AboutHome:Update":
|
||||
this.onUpdate(aMessage.data);
|
||||
break;
|
||||
}
|
||||
},
|
||||
|
||||
onUpdate: function(aData) {
|
||||
let doc = content.document;
|
||||
if (doc.documentURI.toLowerCase() != "about:home")
|
||||
return;
|
||||
|
||||
if (aData.showRestoreLastSession && !PrivateBrowsingUtils.isWindowPrivate(content))
|
||||
doc.getElementById("launcher").setAttribute("session", "true");
|
||||
|
||||
// Inject search engine and snippets URL.
|
||||
let docElt = doc.documentElement;
|
||||
// set the following attributes BEFORE searchEngineURL, which triggers to
|
||||
// show the snippets when it's set.
|
||||
docElt.setAttribute("snippetsURL", aData.snippetsURL);
|
||||
if (aData.showKnowYourRights)
|
||||
docElt.setAttribute("showKnowYourRights", "true");
|
||||
docElt.setAttribute("snippetsVersion", aData.snippetsVersion);
|
||||
|
||||
let engine = aData.defaultSearchEngine;
|
||||
docElt.setAttribute("searchEngineName", engine.name);
|
||||
docElt.setAttribute("searchEnginePostData", engine.postDataString || "");
|
||||
// Again, keep the searchEngineURL as the last attribute, because the
|
||||
// mutation observer in aboutHome.js is counting on that.
|
||||
docElt.setAttribute("searchEngineURL", engine.searchURL);
|
||||
},
|
||||
|
||||
onPageLoad: function(aDocument) {
|
||||
// XXX bug 738646 - when Marketplace is launched, remove this statement and
|
||||
// the hidden attribute set on the apps button in aboutHome.xhtml
|
||||
if (Services.prefs.getPrefType("browser.aboutHome.apps") == Services.prefs.PREF_BOOL &&
|
||||
Services.prefs.getBoolPref("browser.aboutHome.apps"))
|
||||
doc.getElementById("apps").removeAttribute("hidden");
|
||||
|
||||
sendAsyncMessage("AboutHome:RequestUpdate");
|
||||
|
||||
aDocument.addEventListener("AboutHomeSearchEvent", function onSearch(e) {
|
||||
sendAsyncMessage("AboutHome:Search", { engineName: e.detail });
|
||||
}, true, true);
|
||||
},
|
||||
|
||||
onStateChange: function(aWebProgress, aRequest, aStateFlags, aStatus) {
|
||||
let doc = aWebProgress.DOMWindow.document;
|
||||
if (aStateFlags & Ci.nsIWebProgressListener.STATE_STOP &&
|
||||
aStateFlags & Ci.nsIWebProgressListener.STATE_IS_WINDOW &&
|
||||
Components.isSuccessCode(aStatus) &&
|
||||
doc.documentURI.toLowerCase() == "about:home" &&
|
||||
!doc.documentElement.hasAttribute("hasBrowserHandlers")) {
|
||||
// STATE_STOP may be received twice for documents, thus store an
|
||||
// attribute to ensure handling it just once.
|
||||
doc.documentElement.setAttribute("hasBrowserHandlers", "true");
|
||||
addEventListener("click", this.onClick, true);
|
||||
addEventListener("pagehide", function onPageHide(event) {
|
||||
if (event.target.defaultView.frameElement)
|
||||
return;
|
||||
removeEventListener("click", this.onClick, true);
|
||||
removeEventListener("pagehide", onPageHide, true);
|
||||
if (event.target.documentElement)
|
||||
event.target.documentElement.removeAttribute("hasBrowserHandlers");
|
||||
}, true);
|
||||
|
||||
// We also want to make changes to page UI for unprivileged about pages.
|
||||
this.onPageLoad(doc);
|
||||
}
|
||||
},
|
||||
|
||||
onClick: function(aEvent) {
|
||||
if (!aEvent.isTrusted || // Don't trust synthetic events
|
||||
aEvent.button == 2 || aEvent.target.localName != "button") {
|
||||
return;
|
||||
}
|
||||
|
||||
let originalTarget = aEvent.originalTarget;
|
||||
let ownerDoc = originalTarget.ownerDocument;
|
||||
let elmId = originalTarget.getAttribute("id");
|
||||
|
||||
switch (elmId) {
|
||||
case "restorePreviousSession":
|
||||
sendAsyncMessage("AboutHome:RestorePreviousSession");
|
||||
ownerDoc.getElementById("launcher").removeAttribute("session");
|
||||
break;
|
||||
|
||||
case "downloads":
|
||||
sendAsyncMessage("AboutHome:Downloads");
|
||||
break;
|
||||
|
||||
case "bookmarks":
|
||||
sendAsyncMessage("AboutHome:Bookmarks");
|
||||
break;
|
||||
|
||||
case "history":
|
||||
sendAsyncMessage("AboutHome:History");
|
||||
break;
|
||||
|
||||
case "apps":
|
||||
sendAsyncMessage("AboutHome:Apps");
|
||||
break;
|
||||
|
||||
case "addons":
|
||||
sendAsyncMessage("AboutHome:Addons");
|
||||
break;
|
||||
|
||||
case "sync":
|
||||
sendAsyncMessage("AboutHome:Sync");
|
||||
break;
|
||||
|
||||
case "settings":
|
||||
sendAsyncMessage("AboutHome:Settings");
|
||||
break;
|
||||
}
|
||||
},
|
||||
|
||||
QueryInterface: function QueryInterface(aIID) {
|
||||
if (aIID.equals(Ci.nsIWebProgressListener) ||
|
||||
aIID.equals(Ci.nsISupportsWeakReference) ||
|
||||
aIID.equals(Ci.nsISupports)) {
|
||||
return this;
|
||||
}
|
||||
|
||||
throw Components.results.NS_ERROR_NO_INTERFACE;
|
||||
}
|
||||
};
|
||||
AboutHomeListener.init();
|
||||
|
@ -144,10 +144,6 @@ Site.prototype = {
|
||||
refreshThumbnail: function Site_refreshThumbnail() {
|
||||
let thumbnailURL = PageThumbs.getThumbnailURL(this.url);
|
||||
let thumbnail = this._querySelector(".newtab-thumbnail");
|
||||
// if this is being called due to the thumbnail being updated we will
|
||||
// be setting it to the same value it had before. To be confident the
|
||||
// change wont be optimized away we remove the property first.
|
||||
thumbnail.style.removeProperty("backgroundImage");
|
||||
thumbnail.style.backgroundImage = "url(" + thumbnailURL + ")";
|
||||
},
|
||||
|
||||
|
@ -7,7 +7,7 @@ XPCOMUtils.defineLazyModuleGetter(this, "Promise",
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "Task",
|
||||
"resource://gre/modules/Task.jsm");
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "AboutHomeUtils",
|
||||
"resource:///modules/AboutHomeUtils.jsm");
|
||||
"resource:///modules/AboutHome.jsm");
|
||||
|
||||
let gRightsVersion = Services.prefs.getIntPref("browser.rights.version");
|
||||
|
||||
@ -106,14 +106,17 @@ let gTests = [
|
||||
let doc = gBrowser.contentDocument;
|
||||
let engineName = doc.documentElement.getAttribute("searchEngineName");
|
||||
|
||||
// We rely on the listener in browser.js being installed and fired before
|
||||
// this one. If this ever changes, we should add an executeSoon() or similar.
|
||||
doc.addEventListener("AboutHomeSearchEvent", function onSearch(e) {
|
||||
is(e.detail, engineName, "Detail is search engine name");
|
||||
|
||||
getNumberOfSearches(engineName).then(num => {
|
||||
is(num, numSearchesBefore + 1, "One more search recorded.");
|
||||
deferred.resolve();
|
||||
// We use executeSoon() to ensure that this code runs after the
|
||||
// count has been updated in browser.js, since it uses the same
|
||||
// event.
|
||||
executeSoon(function () {
|
||||
getNumberOfSearches(engineName).then(num => {
|
||||
is(num, numSearchesBefore + 1, "One more search recorded.");
|
||||
deferred.resolve();
|
||||
});
|
||||
});
|
||||
}, true, true);
|
||||
|
||||
@ -275,19 +278,35 @@ let gTests = [
|
||||
if (engine.name != "POST Search")
|
||||
return;
|
||||
|
||||
Services.search.defaultEngine = engine;
|
||||
|
||||
registerCleanupFunction(function() {
|
||||
Services.search.removeEngine(engine);
|
||||
Services.search.defaultEngine = currEngine;
|
||||
});
|
||||
|
||||
|
||||
// Ready to execute the tests!
|
||||
let needle = "Search for something awesome.";
|
||||
let document = gBrowser.selectedTab.linkedBrowser.contentDocument;
|
||||
let searchText = document.getElementById("searchText");
|
||||
|
||||
// We're about to change the search engine. Once the change has
|
||||
// propagated to the about:home content, we want to perform a search.
|
||||
let mutationObserver = new MutationObserver(function (mutations) {
|
||||
for (let mutation of mutations) {
|
||||
if (mutation.attributeName == "searchEngineURL") {
|
||||
searchText.value = needle;
|
||||
searchText.focus();
|
||||
EventUtils.synthesizeKey("VK_RETURN", {});
|
||||
}
|
||||
}
|
||||
});
|
||||
mutationObserver.observe(document.documentElement, { attributes: true });
|
||||
|
||||
// Change the search engine, triggering the observer above.
|
||||
Services.search.defaultEngine = engine;
|
||||
|
||||
registerCleanupFunction(function() {
|
||||
mutationObserver.disconnect();
|
||||
Services.search.removeEngine(engine);
|
||||
Services.search.defaultEngine = currEngine;
|
||||
});
|
||||
|
||||
|
||||
// When the search results load, check them for correctness.
|
||||
waitForLoad(function() {
|
||||
let loadedText = gBrowser.contentDocument.body.textContent;
|
||||
ok(loadedText, "search page loaded");
|
||||
@ -295,10 +314,6 @@ let gTests = [
|
||||
"Search text should arrive correctly");
|
||||
deferred.resolve();
|
||||
});
|
||||
|
||||
searchText.value = needle;
|
||||
searchText.focus();
|
||||
EventUtils.synthesizeKey("VK_RETURN", {});
|
||||
};
|
||||
Services.obs.addObserver(searchObserver, "browser-search-engine-modified", false);
|
||||
registerCleanupFunction(function () {
|
||||
|
@ -14,6 +14,9 @@ Cu.import("resource://gre/modules/XPCOMUtils.jsm");
|
||||
Cu.import("resource://gre/modules/Services.jsm");
|
||||
Cu.import("resource:///modules/SignInToWebsite.jsm");
|
||||
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "AboutHome",
|
||||
"resource:///modules/AboutHome.jsm");
|
||||
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "AddonManager",
|
||||
"resource://gre/modules/AddonManager.jsm");
|
||||
|
||||
@ -462,6 +465,7 @@ BrowserGlue.prototype = {
|
||||
SignInToWebsiteUX.init();
|
||||
PdfJs.init();
|
||||
webrtcUI.init();
|
||||
AboutHome.init();
|
||||
|
||||
Services.obs.notifyObservers(null, "browser-ui-startup-complete", "");
|
||||
},
|
||||
|
209
browser/modules/AboutHome.jsm
Normal file
209
browser/modules/AboutHome.jsm
Normal file
@ -0,0 +1,209 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
"use strict";
|
||||
|
||||
let Cc = Components.classes;
|
||||
let Ci = Components.interfaces;
|
||||
let Cu = Components.utils;
|
||||
|
||||
this.EXPORTED_SYMBOLS = [ "AboutHomeUtils", "AboutHome" ];
|
||||
|
||||
Components.utils.import("resource://gre/modules/XPCOMUtils.jsm");
|
||||
Components.utils.import("resource://gre/modules/Services.jsm");
|
||||
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "PrivateBrowsingUtils",
|
||||
"resource://gre/modules/PrivateBrowsingUtils.jsm");
|
||||
|
||||
// Url to fetch snippets, in the urlFormatter service format.
|
||||
const SNIPPETS_URL_PREF = "browser.aboutHomeSnippets.updateUrl";
|
||||
|
||||
// Should be bumped up if the snippets content format changes.
|
||||
const STARTPAGE_VERSION = 4;
|
||||
|
||||
this.AboutHomeUtils = {
|
||||
get snippetsVersion() STARTPAGE_VERSION,
|
||||
|
||||
/**
|
||||
* Returns an object containing the name and searchURL of the original default
|
||||
* search engine.
|
||||
*/
|
||||
get defaultSearchEngine() {
|
||||
let defaultEngine = Services.search.defaultEngine;
|
||||
let submission = defaultEngine.getSubmission("_searchTerms_", null, "homepage");
|
||||
|
||||
return Object.freeze({
|
||||
name: defaultEngine.name,
|
||||
searchURL: submission.uri.spec,
|
||||
postDataString: submission.postDataString
|
||||
});
|
||||
},
|
||||
|
||||
/*
|
||||
* showKnowYourRights - Determines if the user should be shown the
|
||||
* about:rights notification. The notification should *not* be shown if
|
||||
* we've already shown the current version, or if the override pref says to
|
||||
* never show it. The notification *should* be shown if it's never been seen
|
||||
* before, if a newer version is available, or if the override pref says to
|
||||
* always show it.
|
||||
*/
|
||||
get showKnowYourRights() {
|
||||
// Look for an unconditional override pref. If set, do what it says.
|
||||
// (true --> never show, false --> always show)
|
||||
try {
|
||||
return !Services.prefs.getBoolPref("browser.rights.override");
|
||||
} catch (e) { }
|
||||
// Ditto, for the legacy EULA pref.
|
||||
try {
|
||||
return !Services.prefs.getBoolPref("browser.EULA.override");
|
||||
} catch (e) { }
|
||||
|
||||
#ifndef MOZILLA_OFFICIAL
|
||||
// Non-official builds shouldn't show the notification.
|
||||
return false;
|
||||
#endif
|
||||
|
||||
// Look to see if the user has seen the current version or not.
|
||||
var currentVersion = Services.prefs.getIntPref("browser.rights.version");
|
||||
try {
|
||||
return !Services.prefs.getBoolPref("browser.rights." + currentVersion + ".shown");
|
||||
} catch (e) { }
|
||||
|
||||
// Legacy: If the user accepted a EULA, we won't annoy them with the
|
||||
// equivalent about:rights page until the version changes.
|
||||
try {
|
||||
return !Services.prefs.getBoolPref("browser.EULA." + currentVersion + ".accepted");
|
||||
} catch (e) { }
|
||||
|
||||
// We haven't shown the notification before, so do so now.
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the URL to fetch snippets from, in the urlFormatter service format.
|
||||
*/
|
||||
XPCOMUtils.defineLazyGetter(AboutHomeUtils, "snippetsURL", function() {
|
||||
let updateURL = Services.prefs
|
||||
.getCharPref(SNIPPETS_URL_PREF)
|
||||
.replace("%STARTPAGE_VERSION%", STARTPAGE_VERSION);
|
||||
return Services.urlFormatter.formatURL(updateURL);
|
||||
});
|
||||
|
||||
/**
|
||||
* This code provides services to the about:home page. Whenever
|
||||
* about:home needs to do something chrome-privileged, it sends a
|
||||
* message that's handled here.
|
||||
*/
|
||||
let AboutHome = {
|
||||
MESSAGES: [
|
||||
"AboutHome:RestorePreviousSession",
|
||||
"AboutHome:Downloads",
|
||||
"AboutHome:Bookmarks",
|
||||
"AboutHome:History",
|
||||
"AboutHome:Apps",
|
||||
"AboutHome:Addons",
|
||||
"AboutHome:Sync",
|
||||
"AboutHome:Settings",
|
||||
"AboutHome:RequestUpdate",
|
||||
"AboutHome:Search",
|
||||
],
|
||||
|
||||
init: function() {
|
||||
let mm = Cc["@mozilla.org/globalmessagemanager;1"].getService(Ci.nsIMessageListenerManager);
|
||||
|
||||
for (let msg of this.MESSAGES) {
|
||||
mm.addMessageListener(msg, this);
|
||||
}
|
||||
|
||||
Services.obs.addObserver(this, "browser-search-engine-modified", false);
|
||||
},
|
||||
|
||||
observe: function(aEngine, aTopic, aVerb) {
|
||||
switch (aTopic) {
|
||||
case "browser-search-engine-modified":
|
||||
this.sendAboutHomeData(null);
|
||||
break;
|
||||
}
|
||||
},
|
||||
|
||||
receiveMessage: function(aMessage) {
|
||||
let window = aMessage.target.ownerDocument.defaultView;
|
||||
|
||||
switch (aMessage.name) {
|
||||
case "AboutHome:RestorePreviousSession":
|
||||
let ss = Cc["@mozilla.org/browser/sessionstore;1"].
|
||||
getService(Ci.nsISessionStore);
|
||||
if (ss.canRestoreLastSession) {
|
||||
ss.restoreLastSession();
|
||||
}
|
||||
break;
|
||||
|
||||
case "AboutHome:Downloads":
|
||||
window.BrowserDownloadsUI();
|
||||
break;
|
||||
|
||||
case "AboutHome:Bookmarks":
|
||||
window.PlacesCommandHook.showPlacesOrganizer("AllBookmarks");
|
||||
break;
|
||||
|
||||
case "AboutHome:History":
|
||||
window.PlacesCommandHook.showPlacesOrganizer("History");
|
||||
break;
|
||||
|
||||
case "AboutHome:Apps":
|
||||
window.openUILinkIn("https://marketplace.mozilla.org/", "tab");
|
||||
break;
|
||||
|
||||
case "AboutHome:Addons":
|
||||
window.BrowserOpenAddonsMgr();
|
||||
break;
|
||||
|
||||
case "AboutHome:Sync":
|
||||
window.openPreferences("paneSync");
|
||||
break;
|
||||
|
||||
case "AboutHome:Settings":
|
||||
window.openPreferences();
|
||||
break;
|
||||
|
||||
case "AboutHome:RequestUpdate":
|
||||
this.sendAboutHomeData(aMessage.target);
|
||||
break;
|
||||
|
||||
case "AboutHome:Search":
|
||||
#ifdef MOZ_SERVICES_HEALTHREPORT
|
||||
window.BrowserSearch.recordSearchInHealthReport(aMessage.data.engineName, "abouthome");
|
||||
#endif
|
||||
break;
|
||||
}
|
||||
},
|
||||
|
||||
// Send all the chrome-privileged data needed by about:home. This
|
||||
// gets re-sent when the search engine changes.
|
||||
sendAboutHomeData: function(target) {
|
||||
let ss = Cc["@mozilla.org/browser/sessionstore;1"].
|
||||
getService(Ci.nsISessionStore);
|
||||
let data = {
|
||||
showRestoreLastSession: ss.canRestoreLastSession,
|
||||
snippetsURL: AboutHomeUtils.snippetsURL,
|
||||
showKnowYourRights: AboutHomeUtils.showKnowYourRights,
|
||||
snippetsVersion: AboutHomeUtils.snippetsVersion,
|
||||
defaultSearchEngine: AboutHomeUtils.defaultSearchEngine
|
||||
};
|
||||
|
||||
if (AboutHomeUtils.showKnowYourRights) {
|
||||
// Set pref to indicate we've shown the notification.
|
||||
let currentVersion = Services.prefs.getIntPref("browser.rights.version");
|
||||
Services.prefs.setBoolPref("browser.rights." + currentVersion + ".shown", true);
|
||||
}
|
||||
|
||||
if (target) {
|
||||
target.messageManager.sendAsyncMessage("AboutHome:Update", data);
|
||||
} else {
|
||||
let mm = Cc["@mozilla.org/globalmessagemanager;1"].getService(Ci.nsIMessageListenerManager);
|
||||
mm.broadcastAsyncMessage("AboutHome:Update", data);
|
||||
}
|
||||
},
|
||||
};
|
@ -1,85 +0,0 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
"use strict";
|
||||
|
||||
this.EXPORTED_SYMBOLS = [ "AboutHomeUtils" ];
|
||||
|
||||
Components.utils.import("resource://gre/modules/XPCOMUtils.jsm");
|
||||
Components.utils.import("resource://gre/modules/Services.jsm");
|
||||
|
||||
// Url to fetch snippets, in the urlFormatter service format.
|
||||
const SNIPPETS_URL_PREF = "browser.aboutHomeSnippets.updateUrl";
|
||||
|
||||
// Should be bumped up if the snippets content format changes.
|
||||
const STARTPAGE_VERSION = 4;
|
||||
|
||||
this.AboutHomeUtils = {
|
||||
get snippetsVersion() STARTPAGE_VERSION,
|
||||
|
||||
/**
|
||||
* Returns an object containing the name and searchURL of the original default
|
||||
* search engine.
|
||||
*/
|
||||
get defaultSearchEngine() {
|
||||
let defaultEngine = Services.search.defaultEngine;
|
||||
let submission = defaultEngine.getSubmission("_searchTerms_", null, "homepage");
|
||||
|
||||
return Object.freeze({
|
||||
name: defaultEngine.name,
|
||||
searchURL: submission.uri.spec,
|
||||
postDataString: submission.postDataString
|
||||
});
|
||||
},
|
||||
|
||||
/*
|
||||
* showKnowYourRights - Determines if the user should be shown the
|
||||
* about:rights notification. The notification should *not* be shown if
|
||||
* we've already shown the current version, or if the override pref says to
|
||||
* never show it. The notification *should* be shown if it's never been seen
|
||||
* before, if a newer version is available, or if the override pref says to
|
||||
* always show it.
|
||||
*/
|
||||
get showKnowYourRights() {
|
||||
// Look for an unconditional override pref. If set, do what it says.
|
||||
// (true --> never show, false --> always show)
|
||||
try {
|
||||
return !Services.prefs.getBoolPref("browser.rights.override");
|
||||
} catch (e) { }
|
||||
// Ditto, for the legacy EULA pref.
|
||||
try {
|
||||
return !Services.prefs.getBoolPref("browser.EULA.override");
|
||||
} catch (e) { }
|
||||
|
||||
#ifndef MOZILLA_OFFICIAL
|
||||
// Non-official builds shouldn't show the notification.
|
||||
return false;
|
||||
#endif
|
||||
|
||||
// Look to see if the user has seen the current version or not.
|
||||
var currentVersion = Services.prefs.getIntPref("browser.rights.version");
|
||||
try {
|
||||
return !Services.prefs.getBoolPref("browser.rights." + currentVersion + ".shown");
|
||||
} catch (e) { }
|
||||
|
||||
// Legacy: If the user accepted a EULA, we won't annoy them with the
|
||||
// equivalent about:rights page until the version changes.
|
||||
try {
|
||||
return !Services.prefs.getBoolPref("browser.EULA." + currentVersion + ".accepted");
|
||||
} catch (e) { }
|
||||
|
||||
// We haven't shown the notification before, so do so now.
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the URL to fetch snippets from, in the urlFormatter service format.
|
||||
*/
|
||||
XPCOMUtils.defineLazyGetter(AboutHomeUtils, "snippetsURL", function() {
|
||||
let updateURL = Services.prefs
|
||||
.getCharPref(SNIPPETS_URL_PREF)
|
||||
.replace("%STARTPAGE_VERSION%", STARTPAGE_VERSION);
|
||||
return Services.urlFormatter.formatURL(updateURL);
|
||||
});
|
@ -26,7 +26,7 @@ if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows':
|
||||
]
|
||||
|
||||
EXTRA_PP_JS_MODULES += [
|
||||
'AboutHomeUtils.jsm',
|
||||
'AboutHome.jsm',
|
||||
'RecentWindow.jsm',
|
||||
]
|
||||
|
||||
|
@ -573,7 +573,8 @@ class Pattern(object):
|
||||
def __init__(self, s):
|
||||
r = []
|
||||
i = 0
|
||||
while i < len(s):
|
||||
slen = len(s)
|
||||
while i < slen:
|
||||
c = s[i]
|
||||
if c == '\\':
|
||||
nc = s[i + 1]
|
||||
|
@ -47,7 +47,7 @@ nsSecurityNameSet::~nsSecurityNameSet()
|
||||
|
||||
NS_IMPL_ISUPPORTS1(nsSecurityNameSet, nsIScriptExternalNameSet)
|
||||
|
||||
static JSBool
|
||||
static bool
|
||||
netscape_security_enablePrivilege(JSContext *cx, unsigned argc, JS::Value *vp)
|
||||
{
|
||||
Telemetry::Accumulate(Telemetry::ENABLE_PRIVILEGE_EVER_CALLED, true);
|
||||
|
@ -184,7 +184,7 @@ class FileKind(object):
|
||||
|
||||
def get_all_filenames():
|
||||
"""Get a list of all the files in the (Mercurial or Git) repository."""
|
||||
cmds = [['hg', 'manifest'], ['git', 'ls-files']]
|
||||
cmds = [['hg', 'manifest', '-q'], ['git', 'ls-files']]
|
||||
for cmd in cmds:
|
||||
try:
|
||||
all_filenames = subprocess.check_output(cmd, universal_newlines=True,
|
||||
|
@ -25,10 +25,13 @@ endef
|
||||
export::
|
||||
@echo "BUILDSTATUS SUBTIERS IPDL WebIDL"
|
||||
|
||||
export:: ipdl webidl
|
||||
export:: ipdl webidl xpidl-parser
|
||||
|
||||
ipdl:
|
||||
$(call make_subtier_dir,IPDL,$(DEPTH)/ipc/ipdl,ipdl)
|
||||
|
||||
webidl:
|
||||
$(call make_subtier_dir,WebIDL,$(DEPTH)/dom/bindings,webidl)
|
||||
|
||||
xpidl-parser:
|
||||
$(call make_subtier_dir,XPIDLParser,$(DEPTH)/xpcom/idl-parser,xpidl-parser)
|
||||
|
@ -44,6 +44,7 @@ class nsIChannel;
|
||||
class nsIConsoleService;
|
||||
class nsIContent;
|
||||
class nsIContentPolicy;
|
||||
class nsIContentSecurityPolicy;
|
||||
class nsIDocShell;
|
||||
class nsIDocument;
|
||||
class nsIDocumentLoaderFactory;
|
||||
@ -471,6 +472,12 @@ public:
|
||||
return sSecurityManager;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the ContentSecurityPolicy for a JS context.
|
||||
**/
|
||||
static bool GetContentSecurityPolicy(JSContext* aCx,
|
||||
nsIContentSecurityPolicy** aCSP);
|
||||
|
||||
// Returns the subject principal. Guaranteed to return non-null. May only
|
||||
// be called when nsContentUtils is initialized.
|
||||
static nsIPrincipal* GetSubjectPrincipal();
|
||||
|
@ -14,11 +14,13 @@ interface nsIContent;
|
||||
interface nsINode;
|
||||
|
||||
%{C++
|
||||
class nsIFrame;
|
||||
struct nsTextRangeStyle;
|
||||
struct nsPoint;
|
||||
struct ScrollAxis;
|
||||
#include "nsDirection.h"
|
||||
#include "nsTArray.h"
|
||||
#include "nsIFrame.h"
|
||||
#include "nsIPresShell.h" // TODO: Remove this include
|
||||
%}
|
||||
|
||||
[ptr] native nsIFrame(nsIFrame);
|
||||
|
@ -13,6 +13,7 @@ include $(DEPTH)/config/autoconf.mk
|
||||
LIBRARY_NAME = gkconbase_s
|
||||
MSVC_ENABLE_PGO := 1
|
||||
LIBXUL_LIBRARY = 1
|
||||
FAIL_ON_WARNINGS = 1
|
||||
|
||||
ifdef MOZ_WEBRTC
|
||||
LOCAL_INCLUDES += \
|
||||
|
@ -6026,6 +6026,34 @@ nsContentUtils::FindInternalContentViewer(const char* aType,
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
bool
|
||||
nsContentUtils::GetContentSecurityPolicy(JSContext* aCx,
|
||||
nsIContentSecurityPolicy** aCSP)
|
||||
{
|
||||
NS_ASSERTION(NS_IsMainThread(), "Wrong thread!");
|
||||
|
||||
// Get the security manager
|
||||
nsCOMPtr<nsIScriptSecurityManager> ssm = nsContentUtils::GetSecurityManager();
|
||||
|
||||
if (!ssm) {
|
||||
NS_ERROR("Failed to get security manager service");
|
||||
return false;
|
||||
}
|
||||
|
||||
nsCOMPtr<nsIPrincipal> subjectPrincipal = ssm->GetCxSubjectPrincipal(aCx);
|
||||
NS_ASSERTION(subjectPrincipal, "Failed to get subjectPrincipal");
|
||||
|
||||
nsCOMPtr<nsIContentSecurityPolicy> csp;
|
||||
nsresult rv = subjectPrincipal->GetCsp(getter_AddRefs(csp));
|
||||
if (NS_FAILED(rv)) {
|
||||
NS_ERROR("CSP: Failed to get CSP from principal.");
|
||||
return false;
|
||||
}
|
||||
|
||||
csp.forget(aCSP);
|
||||
return true;
|
||||
}
|
||||
|
||||
// static
|
||||
bool
|
||||
nsContentUtils::IsPatternMatching(nsAString& aValue, nsAString& aPattern,
|
||||
|
@ -5155,7 +5155,7 @@ nsIDocument::CreateAttributeNS(const nsAString& aNamespaceURI,
|
||||
return attribute.forget();
|
||||
}
|
||||
|
||||
static JSBool
|
||||
static bool
|
||||
CustomElementConstructor(JSContext *aCx, unsigned aArgc, JS::Value* aVp)
|
||||
{
|
||||
JS::CallArgs args = JS::CallArgsFromVp(aArgc, aVp);
|
||||
|
@ -462,7 +462,8 @@ nsDocumentEncoder::SerializeToStringRecursive(nsINode* aNode,
|
||||
if (!maybeFixedNode)
|
||||
maybeFixedNode = aNode;
|
||||
|
||||
if (mFlags & SkipInvisibleContent & ~OutputNonTextContentAsPlaceholder) {
|
||||
if ((mFlags & SkipInvisibleContent) &&
|
||||
!(mFlags & OutputNonTextContentAsPlaceholder)) {
|
||||
if (aNode->IsNodeOfType(nsINode::eCONTENT)) {
|
||||
nsIFrame* frame = static_cast<nsIContent*>(aNode)->GetPrimaryFrame();
|
||||
if (frame) {
|
||||
|
@ -462,23 +462,6 @@ nsFrameLoader::ReallyStartLoadingInternal()
|
||||
mDocShell->CreateLoadInfo(getter_AddRefs(loadInfo));
|
||||
NS_ENSURE_TRUE(loadInfo, NS_ERROR_FAILURE);
|
||||
|
||||
// Does this frame have a parent which is already sandboxed or is this
|
||||
// an <iframe> with a sandbox attribute?
|
||||
uint32_t sandboxFlags = 0;
|
||||
uint32_t parentSandboxFlags = mOwnerContent->OwnerDoc()->GetSandboxFlags();
|
||||
|
||||
HTMLIFrameElement* iframe = HTMLIFrameElement::FromContent(mOwnerContent);
|
||||
|
||||
if (iframe) {
|
||||
sandboxFlags = iframe->GetSandboxFlags();
|
||||
}
|
||||
|
||||
if (sandboxFlags || parentSandboxFlags) {
|
||||
// The child can only add restrictions, never remove them.
|
||||
sandboxFlags |= parentSandboxFlags;
|
||||
mDocShell->SetSandboxFlags(sandboxFlags);
|
||||
}
|
||||
|
||||
// If this frame is sandboxed with respect to origin we will set it up with
|
||||
// a null principal later in nsDocShell::DoURILoad.
|
||||
// We do it there to correctly sandbox content that was loaded into
|
||||
@ -1569,6 +1552,15 @@ nsFrameLoader::MaybeCreateDocShell()
|
||||
mDocShell = do_CreateInstance("@mozilla.org/docshell;1");
|
||||
NS_ENSURE_TRUE(mDocShell, NS_ERROR_FAILURE);
|
||||
|
||||
// Apply sandbox flags even if our owner is not an iframe, as this copies
|
||||
// flags from our owning content's owning document.
|
||||
uint32_t sandboxFlags = 0;
|
||||
HTMLIFrameElement* iframe = HTMLIFrameElement::FromContent(mOwnerContent);
|
||||
if (iframe) {
|
||||
sandboxFlags = iframe->GetSandboxFlags();
|
||||
}
|
||||
ApplySandboxFlags(sandboxFlags);
|
||||
|
||||
if (!mNetworkCreated) {
|
||||
if (mDocShell) {
|
||||
mDocShell->SetCreatedDynamically(true);
|
||||
@ -2471,6 +2463,18 @@ nsFrameLoader::GetDetachedSubdocView(nsIDocument** aContainerDoc) const
|
||||
return mDetachedSubdocViews;
|
||||
}
|
||||
|
||||
void
|
||||
nsFrameLoader::ApplySandboxFlags(uint32_t sandboxFlags)
|
||||
{
|
||||
if (mDocShell) {
|
||||
uint32_t parentSandboxFlags = mOwnerContent->OwnerDoc()->GetSandboxFlags();
|
||||
|
||||
// The child can only add restrictions, never remove them.
|
||||
sandboxFlags |= parentSandboxFlags;
|
||||
mDocShell->SetSandboxFlags(sandboxFlags);
|
||||
}
|
||||
}
|
||||
|
||||
/* virtual */ void
|
||||
nsFrameLoader::AttributeChanged(nsIDocument* aDocument,
|
||||
mozilla::dom::Element* aElement,
|
||||
|
@ -303,6 +303,13 @@ public:
|
||||
*/
|
||||
nsView* GetDetachedSubdocView(nsIDocument** aContainerDoc) const;
|
||||
|
||||
/**
|
||||
* Applies a new set of sandbox flags. These are merged with the sandbox
|
||||
* flags from our owning content's owning document with a logical OR, this
|
||||
* ensures that we can only add restrictions and never remove them.
|
||||
*/
|
||||
void ApplySandboxFlags(uint32_t sandboxFlags);
|
||||
|
||||
private:
|
||||
|
||||
void SetOwnerContent(mozilla::dom::Element* aContent);
|
||||
|
@ -60,6 +60,7 @@
|
||||
#include "nsGUIEvent.h"
|
||||
#include "nsUnicharUtils.h"
|
||||
#include "mozilla/Preferences.h"
|
||||
#include "nsSandboxFlags.h"
|
||||
|
||||
// Concrete classes
|
||||
#include "nsFrameLoader.h"
|
||||
@ -2289,9 +2290,20 @@ nsObjectLoadingContent::OpenChannel()
|
||||
httpChan->SetReferrer(doc->GetDocumentURI());
|
||||
}
|
||||
|
||||
// Set up the channel's principal and such, like nsDocShell::DoURILoad does
|
||||
nsContentUtils::SetUpChannelOwner(thisContent->NodePrincipal(),
|
||||
chan, mURI, true);
|
||||
// Set up the channel's principal and such, like nsDocShell::DoURILoad does.
|
||||
// If the content being loaded should be sandboxed with respect to origin we
|
||||
// create a new null principal here. nsContentUtils::SetUpChannelOwner is
|
||||
// used with a flag to force it to be set as the channel owner.
|
||||
nsCOMPtr<nsIPrincipal> ownerPrincipal;
|
||||
uint32_t sandboxFlags = doc->GetSandboxFlags();
|
||||
if (sandboxFlags & SANDBOXED_ORIGIN) {
|
||||
ownerPrincipal = do_CreateInstance("@mozilla.org/nullprincipal;1");
|
||||
} else {
|
||||
// Not sandboxed - we allow the content to assume its natural owner.
|
||||
ownerPrincipal = thisContent->NodePrincipal();
|
||||
}
|
||||
nsContentUtils::SetUpChannelOwner(ownerPrincipal, chan, mURI, true,
|
||||
sandboxFlags & SANDBOXED_ORIGIN);
|
||||
|
||||
nsCOMPtr<nsIScriptChannel> scriptChannel = do_QueryInterface(chan);
|
||||
if (scriptChannel) {
|
||||
|
@ -102,6 +102,8 @@ nsPlainTextSerializer::nsPlainTextSerializer()
|
||||
mOLStackIndex = 0;
|
||||
|
||||
mULCount = 0;
|
||||
|
||||
mIgnoredChildNodeLevel = 0;
|
||||
}
|
||||
|
||||
nsPlainTextSerializer::~nsPlainTextSerializer()
|
||||
@ -232,6 +234,27 @@ nsPlainTextSerializer::PopBool(nsTArray<bool>& aStack)
|
||||
return returnValue;
|
||||
}
|
||||
|
||||
bool
|
||||
nsPlainTextSerializer::ShouldReplaceContainerWithPlaceholder(nsIAtom* aTag)
|
||||
{
|
||||
// If nsIDocumentEncoder::OutputNonTextContentAsPlaceholder is set,
|
||||
// non-textual container element should be serialized as placeholder
|
||||
// character and its child nodes should be ignored. See bug 895239.
|
||||
if (!(mFlags & nsIDocumentEncoder::OutputNonTextContentAsPlaceholder)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return
|
||||
(aTag == nsGkAtoms::audio) ||
|
||||
(aTag == nsGkAtoms::canvas) ||
|
||||
(aTag == nsGkAtoms::iframe) ||
|
||||
(aTag == nsGkAtoms::meter) ||
|
||||
(aTag == nsGkAtoms::progress) ||
|
||||
(aTag == nsGkAtoms::object) ||
|
||||
(aTag == nsGkAtoms::svg) ||
|
||||
(aTag == nsGkAtoms::video);
|
||||
}
|
||||
|
||||
NS_IMETHODIMP
|
||||
nsPlainTextSerializer::AppendText(nsIContent* aText,
|
||||
int32_t aStartOffset,
|
||||
@ -403,6 +426,18 @@ nsPlainTextSerializer::AppendDocumentStart(nsIDocument *aDocument,
|
||||
nsresult
|
||||
nsPlainTextSerializer::DoOpenContainer(nsIAtom* aTag)
|
||||
{
|
||||
// Check if we need output current node as placeholder character and ignore
|
||||
// child nodes.
|
||||
if (ShouldReplaceContainerWithPlaceholder(mElement->Tag())) {
|
||||
if (mIgnoredChildNodeLevel == 0) {
|
||||
// Serialize current node as placeholder character
|
||||
Write(NS_LITERAL_STRING("\xFFFC"));
|
||||
}
|
||||
// Ignore child nodes.
|
||||
mIgnoredChildNodeLevel++;
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
if (mFlags & nsIDocumentEncoder::OutputRaw) {
|
||||
// Raw means raw. Don't even think about doing anything fancy
|
||||
// here like indenting, adding line breaks or any other
|
||||
@ -728,6 +763,11 @@ nsPlainTextSerializer::DoOpenContainer(nsIAtom* aTag)
|
||||
nsresult
|
||||
nsPlainTextSerializer::DoCloseContainer(nsIAtom* aTag)
|
||||
{
|
||||
if (ShouldReplaceContainerWithPlaceholder(mElement->Tag())) {
|
||||
mIgnoredChildNodeLevel--;
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
if (mFlags & nsIDocumentEncoder::OutputRaw) {
|
||||
// Raw means raw. Don't even think about doing anything fancy
|
||||
// here like indenting, adding line breaks or any other
|
||||
@ -923,6 +963,10 @@ nsPlainTextSerializer::DoCloseContainer(nsIAtom* aTag)
|
||||
bool
|
||||
nsPlainTextSerializer::MustSuppressLeaf()
|
||||
{
|
||||
if (mIgnoredChildNodeLevel > 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if ((mTagStackIndex > 1 &&
|
||||
mTagStack[mTagStackIndex-2] == nsGkAtoms::select) ||
|
||||
(mTagStackIndex > 0 &&
|
||||
@ -1031,7 +1075,7 @@ nsPlainTextSerializer::DoAddLeaf(nsIAtom* aTag)
|
||||
EnsureVerticalSpace(0);
|
||||
}
|
||||
else if (mFlags & nsIDocumentEncoder::OutputNonTextContentAsPlaceholder) {
|
||||
Write(NS_LITERAL_STRING("\uFFFC"));
|
||||
Write(NS_LITERAL_STRING("\xFFFC"));
|
||||
}
|
||||
else if (aTag == nsGkAtoms::img) {
|
||||
/* Output (in decreasing order of preference)
|
||||
|
@ -109,6 +109,8 @@ protected:
|
||||
void PushBool(nsTArray<bool>& aStack, bool aValue);
|
||||
bool PopBool(nsTArray<bool>& aStack);
|
||||
|
||||
bool ShouldReplaceContainerWithPlaceholder(nsIAtom* aTag);
|
||||
|
||||
protected:
|
||||
nsString mCurrentLine;
|
||||
uint32_t mHeadLevel;
|
||||
@ -208,6 +210,15 @@ protected:
|
||||
// Conveniance constant. It would be nice to have it as a const static
|
||||
// variable, but that causes issues with OpenBSD and module unloading.
|
||||
const nsString kSpace;
|
||||
|
||||
// If nsIDocumentEncoder::OutputNonTextContentAsPlaceholder is set, the child
|
||||
// nodes of specific nodes - <iframe>, <canvas>, etc. should be ignored.
|
||||
// mIgnoredChildNodeLevel is used to tell if current node is an ignorable
|
||||
// child node. The initial value of mIgnoredChildNodeLevel is 0. When
|
||||
// serializer enters those specific nodes, mIgnoredChildNodeLevel increases
|
||||
// and is greater than 0. Otherwise when serializer leaves those nodes,
|
||||
// mIgnoredChildNodeLevel decreases.
|
||||
uint32_t mIgnoredChildNodeLevel;
|
||||
};
|
||||
|
||||
nsresult
|
||||
|
@ -16,7 +16,15 @@ template<> struct Non8BitParameters<4> {
|
||||
};
|
||||
|
||||
template<> struct Non8BitParameters<8> {
|
||||
static inline size_t mask() { return 0xff00ff00ff00ff00; }
|
||||
static inline size_t mask() {
|
||||
static const uint64_t maskAsUint64 = 0xff00ff00ff00ff00ULL;
|
||||
// We have to explicitly cast this 64-bit value to a size_t, or else
|
||||
// compilers for 32-bit platforms will warn about it being too large to fit
|
||||
// in the size_t return type. (Fortunately, this code isn't actually
|
||||
// invoked on 32-bit platforms -- they'll use the <4> specialization above.
|
||||
// So it is, in fact, OK that this value is too large for a 32-bit size_t.)
|
||||
return (size_t)maskAsUint64;
|
||||
}
|
||||
static inline uint32_t alignMask() { return 0x7; }
|
||||
static inline uint32_t numUnicharsPerWord() { return 4; }
|
||||
};
|
||||
|
@ -242,6 +242,7 @@ MOCHITEST_FILES_A = \
|
||||
test_bug704063.html \
|
||||
test_bug894874.html \
|
||||
test_bug895974.html \
|
||||
test_bug895239.html \
|
||||
$(NULL)
|
||||
|
||||
MOCHITEST_FILES_B = \
|
||||
@ -363,6 +364,18 @@ MOCHITEST_FILES_B = \
|
||||
file_CSP_evalscript_main_spec_compliant.html^headers^ \
|
||||
file_CSP_evalscript_main_spec_compliant_allowed.html \
|
||||
file_CSP_evalscript_main_spec_compliant_allowed.html^headers^ \
|
||||
test_CSP_evalscript_getCRMFRequest.html \
|
||||
file_CSP_evalscript_main_getCRMFRequest.html \
|
||||
file_CSP_evalscript_main_getCRMFRequest.html^headers^ \
|
||||
file_CSP_evalscript_main_getCRMFRequest.js \
|
||||
file_CSP_evalscript_main_allowed_getCRMFRequest.js \
|
||||
file_CSP_evalscript_main_spec_compliant_getCRMFRequest.html \
|
||||
file_CSP_evalscript_main_spec_compliant_getCRMFRequest.html^headers^ \
|
||||
file_CSP_evalscript_main_spec_compliant_allowed_getCRMFRequest.html \
|
||||
file_CSP_evalscript_main_spec_compliant_allowed_getCRMFRequest.html^headers^ \
|
||||
file_CSP_evalscript_no_CSP_at_all.html \
|
||||
file_CSP_evalscript_no_CSP_at_all.html^headers^ \
|
||||
file_CSP_evalscript_no_CSP_at_all.js \
|
||||
test_CSP_inlinestyle.html \
|
||||
file_CSP_inlinestyle_main.html \
|
||||
file_CSP_inlinestyle_main.html^headers^ \
|
||||
|
@ -30,7 +30,6 @@ var onevalblocked = (function(window) {
|
||||
// Defer until document is loaded so that we can write the pretty result boxes
|
||||
// out.
|
||||
addEventListener('load', function() {
|
||||
|
||||
// setTimeout(String) test -- mutate something in the window._testResults
|
||||
// obj, then check it.
|
||||
{
|
||||
|
@ -28,7 +28,6 @@ var onevalblocked = (function(window) {
|
||||
// Defer until document is loaded so that we can write the pretty result boxes
|
||||
// out.
|
||||
addEventListener('load', function() {
|
||||
|
||||
// setTimeout(String) test -- should pass
|
||||
try {
|
||||
setTimeout('onevalexecuted(true, "setTimeout(String)", "setTimeout with a string was enabled.");', 10);
|
||||
|
@ -0,0 +1,42 @@
|
||||
// some javascript for the CSP eval() tests
|
||||
// all of these evals should succeed, as the document loading this script
|
||||
// has script-src 'self' 'unsafe-eval'
|
||||
|
||||
function logResult(str, passed) {
|
||||
var elt = document.createElement('div');
|
||||
var color = passed ? "#cfc;" : "#fcc";
|
||||
elt.setAttribute('style', 'background-color:' + color + '; width:100%; border:1px solid black; padding:3px; margin:4px;');
|
||||
elt.innerHTML = str;
|
||||
document.body.appendChild(elt);
|
||||
}
|
||||
|
||||
// callback for when stuff is allowed by CSP
|
||||
var onevalexecuted = (function(window) {
|
||||
return function(shouldrun, what, data) {
|
||||
window.parent.scriptRan(shouldrun, what, data);
|
||||
logResult((shouldrun ? "PASS: " : "FAIL: ") + what + " : " + data, shouldrun);
|
||||
};})(window);
|
||||
|
||||
// callback for when stuff is blocked
|
||||
var onevalblocked = (function(window) {
|
||||
return function(shouldrun, what, data) {
|
||||
window.parent.scriptBlocked(shouldrun, what, data);
|
||||
logResult((shouldrun ? "FAIL: " : "PASS: ") + what + " : " + data, !shouldrun);
|
||||
};})(window);
|
||||
|
||||
|
||||
// Defer until document is loaded so that we can write the pretty result boxes
|
||||
// out.
|
||||
addEventListener('load', function() {
|
||||
// test that allows crypto.generateCRMFRequest eval to run
|
||||
try {
|
||||
var script =
|
||||
'console.log("dynamic script passed to crypto.generateCRMFRequest should execute")';
|
||||
crypto.generateCRMFRequest('CN=0', 0, 0, null, script, 384, null, 'rsa-dual-use');
|
||||
onevalexecuted(true, "eval(script) inside crypto.generateCRMFRequest",
|
||||
"eval executed during crypto.generateCRMFRequest");
|
||||
} catch (e) {
|
||||
onevalblocked(true, "eval(script) inside crypto.generateCRMFRequest",
|
||||
"eval was blocked during crypto.generateCRMFRequest");
|
||||
}
|
||||
}, false);
|
@ -0,0 +1,12 @@
|
||||
<html>
|
||||
<head>
|
||||
<title>CSP eval script tests</title>
|
||||
<script type="application/javascript"
|
||||
src="file_CSP_evalscript_main_getCRMFRequest.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
Foo.
|
||||
|
||||
</body>
|
||||
</html>
|
@ -0,0 +1,2 @@
|
||||
Cache-Control: no-cache
|
||||
X-Content-Security-Policy: default-src 'self'
|
48
content/base/test/file_CSP_evalscript_main_getCRMFRequest.js
Normal file
48
content/base/test/file_CSP_evalscript_main_getCRMFRequest.js
Normal file
@ -0,0 +1,48 @@
|
||||
// some javascript for the CSP eval() tests
|
||||
|
||||
function logResult(str, passed) {
|
||||
var elt = document.createElement('div');
|
||||
var color = passed ? "#cfc;" : "#fcc";
|
||||
elt.setAttribute('style', 'background-color:' + color + '; width:100%; border:1px solid black; padding:3px; margin:4px;');
|
||||
elt.innerHTML = str;
|
||||
document.body.appendChild(elt);
|
||||
}
|
||||
|
||||
window._testResults = {};
|
||||
|
||||
// callback for when stuff is allowed by CSP
|
||||
var onevalexecuted = (function(window) {
|
||||
return function(shouldrun, what, data) {
|
||||
window._testResults[what] = "ran";
|
||||
window.parent.scriptRan(shouldrun, what, data);
|
||||
logResult((shouldrun ? "PASS: " : "FAIL: ") + what + " : " + data, shouldrun);
|
||||
};})(window);
|
||||
|
||||
// callback for when stuff is blocked
|
||||
var onevalblocked = (function(window) {
|
||||
return function(shouldrun, what, data) {
|
||||
window._testResults[what] = "blocked";
|
||||
window.parent.scriptBlocked(shouldrun, what, data);
|
||||
logResult((shouldrun ? "FAIL: " : "PASS: ") + what + " : " + data, !shouldrun);
|
||||
};})(window);
|
||||
|
||||
|
||||
// Defer until document is loaded so that we can write the pretty result boxes
|
||||
// out.
|
||||
addEventListener('load', function() {
|
||||
// generateCRMFRequest test -- make sure we cannot eval the callback if CSP is in effect
|
||||
try {
|
||||
var script = 'console.log("dynamic script eval\'d in crypto.generateCRMFRequest should be disallowed")';
|
||||
crypto.generateCRMFRequest('CN=0', 0, 0, null, script, 384, null, 'rsa-dual-use');
|
||||
onevalexecuted(false, "crypto.generateCRMFRequest()",
|
||||
"crypto.generateCRMFRequest() should not run!");
|
||||
} catch (e) {
|
||||
onevalblocked(false, "eval(script) inside crypto.generateCRMFRequest",
|
||||
"eval was blocked during crypto.generateCRMFRequest");
|
||||
}
|
||||
|
||||
|
||||
}, false);
|
||||
|
||||
|
||||
|
@ -0,0 +1,12 @@
|
||||
<html>
|
||||
<head>
|
||||
<title>CSP eval script tests</title>
|
||||
<script type="application/javascript"
|
||||
src="file_CSP_evalscript_main_allowed_getCRMFRequest.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
Foo.
|
||||
|
||||
</body>
|
||||
</html>
|
@ -0,0 +1,2 @@
|
||||
Cache-Control: no-cache
|
||||
Content-Security-Policy: default-src 'self' ; script-src 'self' 'unsafe-eval'
|
@ -0,0 +1,12 @@
|
||||
<html>
|
||||
<head>
|
||||
<title>CSP eval script tests</title>
|
||||
<script type="application/javascript"
|
||||
src="file_CSP_evalscript_main_getCRMFRequest.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
Foo.
|
||||
|
||||
</body>
|
||||
</html>
|
@ -0,0 +1,2 @@
|
||||
Cache-Control: no-cache
|
||||
Content-Security-Policy: default-src 'self'
|
12
content/base/test/file_CSP_evalscript_no_CSP_at_all.html
Normal file
12
content/base/test/file_CSP_evalscript_no_CSP_at_all.html
Normal file
@ -0,0 +1,12 @@
|
||||
<html>
|
||||
<head>
|
||||
<title>CSP eval script tests: no CSP specified</title>
|
||||
<script type="application/javascript"
|
||||
src="file_CSP_evalscript_no_CSP_at_all.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
Foo. See bug 824652
|
||||
|
||||
</body>
|
||||
</html>
|
@ -0,0 +1 @@
|
||||
Cache-Control: no-cache
|
42
content/base/test/file_CSP_evalscript_no_CSP_at_all.js
Normal file
42
content/base/test/file_CSP_evalscript_no_CSP_at_all.js
Normal file
@ -0,0 +1,42 @@
|
||||
// some javascript for the CSP eval() tests
|
||||
// all of these evals should succeed, as the document loading this script
|
||||
// has script-src 'self' 'unsafe-eval'
|
||||
|
||||
function logResult(str, passed) {
|
||||
var elt = document.createElement('div');
|
||||
var color = passed ? "#cfc;" : "#fcc";
|
||||
elt.setAttribute('style', 'background-color:' + color + '; width:100%; border:1px solid black; padding:3px; margin:4px;');
|
||||
elt.innerHTML = str;
|
||||
document.body.appendChild(elt);
|
||||
}
|
||||
|
||||
// callback for when stuff is allowed by CSP
|
||||
var onevalexecuted = (function(window) {
|
||||
return function(shouldrun, what, data) {
|
||||
window.parent.scriptRan(shouldrun, what, data);
|
||||
logResult((shouldrun ? "PASS: " : "FAIL: ") + what + " : " + data, shouldrun);
|
||||
};})(window);
|
||||
|
||||
// callback for when stuff is blocked
|
||||
var onevalblocked = (function(window) {
|
||||
return function(shouldrun, what, data) {
|
||||
window.parent.scriptBlocked(shouldrun, what, data);
|
||||
logResult((shouldrun ? "FAIL: " : "PASS: ") + what + " : " + data, !shouldrun);
|
||||
};})(window);
|
||||
|
||||
|
||||
// Defer until document is loaded so that we can write the pretty result boxes
|
||||
// out.
|
||||
addEventListener('load', function() {
|
||||
// test that allows crypto.generateCRMFRequest eval to run when there is no CSP at all in place
|
||||
try {
|
||||
var script =
|
||||
'console.log("dynamic script passed to crypto.generateCRMFRequest should execute")';
|
||||
crypto.generateCRMFRequest('CN=0', 0, 0, null, script, 384, null, 'rsa-dual-use');
|
||||
onevalexecuted(true, "eval(script) inside crypto.generateCRMFRequest: no CSP at all",
|
||||
"eval executed during crypto.generateCRMFRequest where no CSP is set at all");
|
||||
} catch (e) {
|
||||
onevalblocked(true, "eval(script) inside crypto.generateCRMFRequest",
|
||||
"eval was blocked during crypto.generateCRMFRequest");
|
||||
}
|
||||
}, false);
|
@ -61,6 +61,7 @@ SpecialPowers.pushPrefEnv(
|
||||
document.getElementById('cspframe').src = 'file_CSP_evalscript_main.html';
|
||||
document.getElementById('cspframe2').src = 'file_CSP_evalscript_main_spec_compliant.html';
|
||||
document.getElementById('cspframe3').src = 'file_CSP_evalscript_main_spec_compliant_allowed.html';
|
||||
// document.getElementById('cspframe4').src = 'file_CSP_evalscript_no_CSP_at_all.html';
|
||||
});
|
||||
</script>
|
||||
</pre>
|
||||
|
70
content/base/test/test_CSP_evalscript_getCRMFRequest.html
Normal file
70
content/base/test/test_CSP_evalscript_getCRMFRequest.html
Normal file
@ -0,0 +1,70 @@
|
||||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<title>Test for Content Security Policy "no eval" in crypto.getCRMFRequest()</title>
|
||||
<script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
|
||||
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
|
||||
</head>
|
||||
<body>
|
||||
<p id="display"></p>
|
||||
<div id="content" style="display: none">
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
<iframe style="width:100%;height:300px;" id='cspframe'></iframe>
|
||||
<iframe style="width:100%;height:300px;" id='cspframe2'></iframe>
|
||||
<iframe style="width:100%;height:300px;" id='cspframe3'></iframe>
|
||||
<iframe style="width:100%;height:300px;" id='cspframe4'></iframe>
|
||||
<script class="testbody" type="text/javascript">
|
||||
|
||||
var path = "/tests/content/base/test/";
|
||||
|
||||
var evalScriptsThatRan = 0;
|
||||
var evalScriptsBlocked = 0;
|
||||
var evalScriptsTotal = 4;
|
||||
|
||||
|
||||
// called by scripts that run
|
||||
var scriptRan = function(shouldrun, testname, data) {
|
||||
evalScriptsThatRan++;
|
||||
ok(shouldrun, 'EVAL SCRIPT RAN: ' + testname + '(' + data + ')');
|
||||
checkTestResults();
|
||||
}
|
||||
|
||||
// called when a script is blocked
|
||||
var scriptBlocked = function(shouldrun, testname, data) {
|
||||
evalScriptsBlocked++;
|
||||
ok(!shouldrun, 'EVAL SCRIPT BLOCKED: ' + testname + '(' + data + ')');
|
||||
checkTestResults();
|
||||
}
|
||||
|
||||
|
||||
// Check to see if all the tests have run
|
||||
var checkTestResults = function() {
|
||||
// if any test is incomplete, keep waiting
|
||||
if (evalScriptsTotal - evalScriptsBlocked - evalScriptsThatRan > 0)
|
||||
return;
|
||||
|
||||
// ... otherwise, finish
|
||||
SimpleTest.finish();
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////
|
||||
// set up and go
|
||||
SimpleTest.waitForExplicitFinish();
|
||||
|
||||
SpecialPowers.pushPrefEnv(
|
||||
{'set':[["security.csp.speccompliant", true]]},
|
||||
function() {
|
||||
// save this for last so that our listeners are registered.
|
||||
// ... this loads the testbed of good and bad requests.
|
||||
document.getElementById('cspframe').src = 'file_CSP_evalscript_main_getCRMFRequest.html';
|
||||
document.getElementById('cspframe2').src = 'file_CSP_evalscript_main_spec_compliant_getCRMFRequest.html';
|
||||
document.getElementById('cspframe3').src = 'file_CSP_evalscript_main_spec_compliant_allowed_getCRMFRequest.html';
|
||||
document.getElementById('cspframe4').src = 'file_CSP_evalscript_no_CSP_at_all.html';
|
||||
});
|
||||
</script>
|
||||
</pre>
|
||||
</body>
|
||||
</html>
|
123
content/base/test/test_bug895239.html
Normal file
123
content/base/test/test_bug895239.html
Normal file
@ -0,0 +1,123 @@
|
||||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<!--
|
||||
https://bugzilla.mozilla.org/show_bug.cgi?id=895239
|
||||
-->
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Test for Bug 895239</title>
|
||||
<script type="application/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
|
||||
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
|
||||
<script type="application/javascript">
|
||||
function testPaintextSerializerWithPlaceHolder() {
|
||||
|
||||
const de = SpecialPowers.Ci.nsIDocumentEncoder;
|
||||
const Cc = SpecialPowers.Cc;
|
||||
|
||||
// Create a plaintext encoder with the flag OutputNonTextContentAsPlaceholder.
|
||||
var encoder = Cc["@mozilla.org/layout/documentEncoder;1?type=text/plain"]
|
||||
.createInstance(de);
|
||||
var flags = de.OutputRaw |
|
||||
de.OutputNonTextContentAsPlaceholder;
|
||||
encoder.init(document, "text/plain", flags);
|
||||
|
||||
function toPlaintext(id) {
|
||||
var element = document.getElementById(id);
|
||||
var range = document.createRange();
|
||||
range.selectNodeContents(element);
|
||||
encoder.setRange(range);
|
||||
return encoder.encodeToString();
|
||||
}
|
||||
|
||||
// Test cases to serialize all nodes including invisible nodes.
|
||||
is(toPlaintext("case1"), "This is an audio \uFFFC! ", "test with <audio>");
|
||||
is(toPlaintext("case2"), "This is a canvas \uFFFC! ", "test with <canvas>");
|
||||
is(toPlaintext("case3"), "This is an iframe \uFFFC! ", "test with one <iframe>");
|
||||
is(toPlaintext("case4"), "One iframe \uFFFC with another iframe \uFFFC. ", "test with two <iframes>");
|
||||
is(toPlaintext("case5"), "This is a meter \uFFFC! ", "test with <meter>");
|
||||
is(toPlaintext("case6"), "This is a progress \uFFFC! ", "test with <progress>");
|
||||
is(toPlaintext("case7"), "This is an object \uFFFC! ", "test with <object>");
|
||||
is(toPlaintext("case8"), "This is a svg \uFFFC! ", "test with <svg>");
|
||||
is(toPlaintext("case9"), "This is a video \uFFFC! ", "test with <video>");
|
||||
is(toPlaintext("case10"), "This is a video \uFFFC! ", "test with nested tags");
|
||||
|
||||
// Test cases to serialize visible nodes only.
|
||||
encoder.init(document, "text/plain", flags | de.SkipInvisibleContent);
|
||||
is(toPlaintext("case1"), "This is an audio \uFFFC! ", "test with <audio> for visible nodes");
|
||||
is(toPlaintext("case2"), "This is a canvas \uFFFC! ", "test with <canvas> for visible nodes");
|
||||
is(toPlaintext("case3"), "This is an iframe \uFFFC! ", "test with one <iframe> for visible nodes");
|
||||
is(toPlaintext("case4"), "One iframe \uFFFC with another iframe . ", "test with two <iframes> for visible nodes");
|
||||
is(toPlaintext("case5"), "This is a meter \uFFFC! ", "test with <meter> for visible nodes");
|
||||
is(toPlaintext("case6"), "This is a progress \uFFFC! ", "test with <progress> for visible nodes");
|
||||
is(toPlaintext("case7"), "This is an object \uFFFC! ", "test with <object> for visible nodes");
|
||||
is(toPlaintext("case8"), "This is a svg \uFFFC! ", "test with <svg> for visible nodes");
|
||||
is(toPlaintext("case9"), "This is a video \uFFFC! ", "test with <video> for visible nodes");
|
||||
is(toPlaintext("case10"), "This is a video \uFFFC! ", "test with nested tags for visible nodes");
|
||||
SimpleTest.finish();
|
||||
}
|
||||
|
||||
addLoadEvent(testPaintextSerializerWithPlaceHolder);
|
||||
SimpleTest.waitForExplicitFinish();
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=895239">Mozilla Bug 895239</a>
|
||||
<p id="display"></p>
|
||||
<div id="content">
|
||||
<span id="case1">This is an audio
|
||||
<audio controls="controls">
|
||||
Your browser does not support <code>audio</code> element.
|
||||
</audio>!
|
||||
</span>
|
||||
<span id="case2">This is a canvas
|
||||
<canvas height="100" width="100">
|
||||
Your browser does not support canvas element.
|
||||
</canvas>!
|
||||
</span>
|
||||
<span id="case3">This is an iframe
|
||||
<iframe src="about:blank">
|
||||
Your browser does not support iframes.
|
||||
</iframe>!
|
||||
</span>
|
||||
<span id="case4">One iframe
|
||||
<iframe src="about:blank">
|
||||
Your browser does not support iframes.
|
||||
</iframe> with another iframe
|
||||
<iframe src="about:blank" style="display: none"></iframe>.
|
||||
</span>
|
||||
<span id="case5">This is a meter
|
||||
<meter min="0" max="100" value="50">
|
||||
50%
|
||||
</meter>!
|
||||
</span>
|
||||
<span id="case6">This is a progress
|
||||
<progress max="100" value="70">
|
||||
70%
|
||||
</progress>!
|
||||
</span>
|
||||
<span id="case7">This is an object
|
||||
<object type="application/x-shockware-flash">
|
||||
<a href="#">Download the plugin.</a>
|
||||
</object>!
|
||||
</span>
|
||||
<span id="case8">This is a svg
|
||||
<svg height="100" width="100">
|
||||
Your browser does not support svg.
|
||||
<circle cx="100" cy="100" r="80" fill="green"></circle>
|
||||
</svg>!
|
||||
</span>
|
||||
<span id="case9">This is a video
|
||||
<video>
|
||||
Your browser does not support videos.
|
||||
</video>!
|
||||
</span>
|
||||
<span id="case10">This is a video
|
||||
<video>
|
||||
Your browser does not support videos.<iframe src="about:blank"></iframe>
|
||||
</video>!
|
||||
</span>
|
||||
</div>
|
||||
<pre id="test">
|
||||
</pre>
|
||||
</body>
|
||||
</html>
|
@ -96,7 +96,7 @@
|
||||
"set": [
|
||||
["dom.ipc.browser_frames.oop_by_default", true],
|
||||
["dom.mozBrowserFramesEnabled", true],
|
||||
["browser.pageThumbs.enabled", false]
|
||||
["browser.pagethumbnails.capturing_disabled", false]
|
||||
]
|
||||
}, runTests);
|
||||
});
|
||||
|
@ -13,6 +13,7 @@
|
||||
#include "WebGLMemoryMultiReporterWrapper.h"
|
||||
#include "WebGLFramebuffer.h"
|
||||
#include "WebGLVertexArray.h"
|
||||
#include "WebGLQuery.h"
|
||||
|
||||
#include "AccessCheck.h"
|
||||
#include "nsIConsoleService.h"
|
||||
@ -196,8 +197,9 @@ WebGLContext::WebGLContext()
|
||||
|
||||
mLastUseIndex = 0;
|
||||
|
||||
mMinInUseAttribArrayLengthCached = false;
|
||||
mMinInUseAttribArrayLength = 0;
|
||||
mBufferFetchingIsVerified = false;
|
||||
mMaxFetchedVertices = 0;
|
||||
mMaxFetchedInstances = 0;
|
||||
|
||||
mIsScreenCleared = false;
|
||||
|
||||
@ -237,6 +239,7 @@ WebGLContext::DestroyResourcesAndContext()
|
||||
mBoundArrayBuffer = nullptr;
|
||||
mCurrentProgram = nullptr;
|
||||
mBoundFramebuffer = nullptr;
|
||||
mActiveOcclusionQuery = nullptr;
|
||||
mBoundRenderbuffer = nullptr;
|
||||
mBoundVertexArray = nullptr;
|
||||
mDefaultVertexArray = nullptr;
|
||||
@ -255,6 +258,8 @@ WebGLContext::DestroyResourcesAndContext()
|
||||
mShaders.getLast()->DeleteOnce();
|
||||
while (!mPrograms.isEmpty())
|
||||
mPrograms.getLast()->DeleteOnce();
|
||||
while (!mQueries.isEmpty())
|
||||
mQueries.getLast()->DeleteOnce();
|
||||
|
||||
if (mBlackTexturesAreInitialized) {
|
||||
gl->fDeleteTextures(1, &mBlackTexture2D);
|
||||
@ -1599,7 +1604,7 @@ WebGLContext::GetSupportedExtensions(JSContext *cx, Nullable< nsTArray<nsString>
|
||||
NS_IMPL_CYCLE_COLLECTING_ADDREF(WebGLContext)
|
||||
NS_IMPL_CYCLE_COLLECTING_RELEASE(WebGLContext)
|
||||
|
||||
NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE_9(WebGLContext,
|
||||
NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE_10(WebGLContext,
|
||||
mCanvasElement,
|
||||
mExtensions,
|
||||
mBound2DTextures,
|
||||
@ -1608,7 +1613,8 @@ NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE_9(WebGLContext,
|
||||
mCurrentProgram,
|
||||
mBoundFramebuffer,
|
||||
mBoundRenderbuffer,
|
||||
mBoundVertexArray)
|
||||
mBoundVertexArray,
|
||||
mActiveOcclusionQuery)
|
||||
|
||||
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(WebGLContext)
|
||||
NS_WRAPPERCACHE_INTERFACE_MAP_ENTRY
|
||||
|
@ -71,6 +71,7 @@ class WebGLBuffer;
|
||||
class WebGLVertexAttribData;
|
||||
class WebGLShader;
|
||||
class WebGLProgram;
|
||||
class WebGLQuery;
|
||||
class WebGLUniformLocation;
|
||||
class WebGLFramebuffer;
|
||||
class WebGLRenderbuffer;
|
||||
@ -389,10 +390,8 @@ public:
|
||||
void DepthRange(WebGLclampf zNear, WebGLclampf zFar);
|
||||
void DetachShader(WebGLProgram *program, WebGLShader *shader);
|
||||
void Disable(WebGLenum cap);
|
||||
void DisableVertexAttribArray(WebGLuint index);
|
||||
void DrawBuffers(const dom::Sequence<GLenum>& buffers);
|
||||
void Enable(WebGLenum cap);
|
||||
void EnableVertexAttribArray(WebGLuint index);
|
||||
void Flush() {
|
||||
if (!IsContextStable())
|
||||
return;
|
||||
@ -462,9 +461,6 @@ public:
|
||||
WebGLUniformLocation *location, ErrorResult& rv);
|
||||
already_AddRefed<WebGLUniformLocation>
|
||||
GetUniformLocation(WebGLProgram *prog, const nsAString& name);
|
||||
JS::Value GetVertexAttrib(JSContext* cx, WebGLuint index, WebGLenum pname,
|
||||
ErrorResult& rv);
|
||||
WebGLsizeiptr GetVertexAttribOffset(WebGLuint index, WebGLenum pname);
|
||||
void Hint(WebGLenum target, WebGLenum mode);
|
||||
bool IsBuffer(WebGLBuffer *buffer);
|
||||
bool IsEnabled(WebGLenum cap);
|
||||
@ -745,6 +741,39 @@ public:
|
||||
WebGLUniformLocation *location,
|
||||
WebGLint value);
|
||||
|
||||
void Viewport(WebGLint x, WebGLint y, WebGLsizei width, WebGLsizei height);
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// Asynchronous Queries (WebGLContextAsyncQueries.cpp)
|
||||
public:
|
||||
already_AddRefed<WebGLQuery> CreateQuery();
|
||||
void DeleteQuery(WebGLQuery *query);
|
||||
void BeginQuery(WebGLenum target, WebGLQuery *query);
|
||||
void EndQuery(WebGLenum target);
|
||||
bool IsQuery(WebGLQuery *query);
|
||||
already_AddRefed<WebGLQuery> GetQuery(WebGLenum target, WebGLenum pname);
|
||||
JS::Value GetQueryObject(JSContext* cx, WebGLQuery *query, WebGLenum pname);
|
||||
|
||||
private:
|
||||
bool ValidateTargetParameter(WebGLenum target, const char* infos);
|
||||
WebGLRefPtr<WebGLQuery>& GetActiveQueryByTarget(WebGLenum target);
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// Vertices Feature (WebGLContextVertices.cpp)
|
||||
public:
|
||||
void DrawArrays(GLenum mode, WebGLint first, WebGLsizei count);
|
||||
void DrawArraysInstanced(GLenum mode, WebGLint first, WebGLsizei count, WebGLsizei primcount);
|
||||
void DrawElements(WebGLenum mode, WebGLsizei count, WebGLenum type, WebGLintptr byteOffset);
|
||||
void DrawElementsInstanced(WebGLenum mode, WebGLsizei count, WebGLenum type,
|
||||
WebGLintptr byteOffset, WebGLsizei primcount);
|
||||
|
||||
void EnableVertexAttribArray(WebGLuint index);
|
||||
void DisableVertexAttribArray(WebGLuint index);
|
||||
|
||||
JS::Value GetVertexAttrib(JSContext* cx, WebGLuint index, WebGLenum pname,
|
||||
ErrorResult& rv);
|
||||
WebGLsizeiptr GetVertexAttribOffset(WebGLuint index, WebGLenum pname);
|
||||
|
||||
void VertexAttrib1f(WebGLuint index, WebGLfloat x0);
|
||||
void VertexAttrib2f(WebGLuint index, WebGLfloat x0, WebGLfloat x1);
|
||||
void VertexAttrib3f(WebGLuint index, WebGLfloat x0, WebGLfloat x1,
|
||||
@ -758,8 +787,6 @@ public:
|
||||
void VertexAttrib1fv(WebGLuint idx, const dom::Sequence<WebGLfloat>& arr) {
|
||||
VertexAttrib1fv_base(idx, arr.Length(), arr.Elements());
|
||||
}
|
||||
void VertexAttrib1fv_base(WebGLuint idx, uint32_t arrayLength,
|
||||
const WebGLfloat* ptr);
|
||||
|
||||
void VertexAttrib2fv(WebGLuint idx, const dom::Float32Array &arr) {
|
||||
VertexAttrib2fv_base(idx, arr.Length(), arr.Data());
|
||||
@ -767,8 +794,6 @@ public:
|
||||
void VertexAttrib2fv(WebGLuint idx, const dom::Sequence<WebGLfloat>& arr) {
|
||||
VertexAttrib2fv_base(idx, arr.Length(), arr.Elements());
|
||||
}
|
||||
void VertexAttrib2fv_base(WebGLuint idx, uint32_t arrayLength,
|
||||
const WebGLfloat* ptr);
|
||||
|
||||
void VertexAttrib3fv(WebGLuint idx, const dom::Float32Array &arr) {
|
||||
VertexAttrib3fv_base(idx, arr.Length(), arr.Data());
|
||||
@ -776,8 +801,6 @@ public:
|
||||
void VertexAttrib3fv(WebGLuint idx, const dom::Sequence<WebGLfloat>& arr) {
|
||||
VertexAttrib3fv_base(idx, arr.Length(), arr.Elements());
|
||||
}
|
||||
void VertexAttrib3fv_base(WebGLuint idx, uint32_t arrayLength,
|
||||
const WebGLfloat* ptr);
|
||||
|
||||
void VertexAttrib4fv(WebGLuint idx, const dom::Float32Array &arr) {
|
||||
VertexAttrib4fv_base(idx, arr.Length(), arr.Data());
|
||||
@ -785,22 +808,11 @@ public:
|
||||
void VertexAttrib4fv(WebGLuint idx, const dom::Sequence<WebGLfloat>& arr) {
|
||||
VertexAttrib4fv_base(idx, arr.Length(), arr.Elements());
|
||||
}
|
||||
void VertexAttrib4fv_base(WebGLuint idx, uint32_t arrayLength,
|
||||
const WebGLfloat* ptr);
|
||||
|
||||
void VertexAttribPointer(WebGLuint index, WebGLint size, WebGLenum type,
|
||||
WebGLboolean normalized, WebGLsizei stride,
|
||||
WebGLintptr byteOffset);
|
||||
void Viewport(WebGLint x, WebGLint y, WebGLsizei width, WebGLsizei height);
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// Vertices Feature (WebGLContextVertices.cpp)
|
||||
public:
|
||||
void DrawArrays(GLenum mode, WebGLint first, WebGLsizei count);
|
||||
void DrawArraysInstanced(GLenum mode, WebGLint first, WebGLsizei count, WebGLsizei primcount);
|
||||
void DrawElements(WebGLenum mode, WebGLsizei count, WebGLenum type, WebGLintptr byteOffset);
|
||||
void DrawElementsInstanced(WebGLenum mode, WebGLsizei count, WebGLenum type,
|
||||
WebGLintptr byteOffset, WebGLsizei primcount);
|
||||
void VertexAttribDivisor(WebGLuint index, WebGLuint divisor);
|
||||
|
||||
private:
|
||||
bool DrawArrays_check(WebGLint first, WebGLsizei count, WebGLsizei primcount, const char* info);
|
||||
@ -808,6 +820,13 @@ private:
|
||||
WebGLsizei primcount, const char* info);
|
||||
void Draw_cleanup();
|
||||
|
||||
void VertexAttrib1fv_base(WebGLuint idx, uint32_t arrayLength, const WebGLfloat* ptr);
|
||||
void VertexAttrib2fv_base(WebGLuint idx, uint32_t arrayLength, const WebGLfloat* ptr);
|
||||
void VertexAttrib3fv_base(WebGLuint idx, uint32_t arrayLength, const WebGLfloat* ptr);
|
||||
void VertexAttrib4fv_base(WebGLuint idx, uint32_t arrayLength, const WebGLfloat* ptr);
|
||||
|
||||
bool ValidateBufferFetching(const char *info);
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// PROTECTED
|
||||
protected:
|
||||
@ -876,15 +895,17 @@ protected:
|
||||
int32_t mGLMaxColorAttachments;
|
||||
int32_t mGLMaxDrawBuffers;
|
||||
|
||||
// Cache the max number of elements that can be read from bound VBOs
|
||||
// (result of ValidateBuffers).
|
||||
bool mMinInUseAttribArrayLengthCached;
|
||||
uint32_t mMinInUseAttribArrayLength;
|
||||
// Cache the max number of vertices and isntances that can be read from
|
||||
// bound VBOs (result of ValidateBuffers).
|
||||
bool mBufferFetchingIsVerified;
|
||||
uint32_t mMaxFetchedVertices;
|
||||
uint32_t mMaxFetchedInstances;
|
||||
|
||||
inline void InvalidateCachedMinInUseAttribArrayLength()
|
||||
inline void InvalidateBufferFetching()
|
||||
{
|
||||
mMinInUseAttribArrayLengthCached = false;
|
||||
mMinInUseAttribArrayLength = 0;
|
||||
mBufferFetchingIsVerified = false;
|
||||
mMaxFetchedVertices = 0;
|
||||
mMaxFetchedInstances = 0;
|
||||
}
|
||||
|
||||
// Represents current status, or state, of the context. That is, is it lost
|
||||
@ -937,8 +958,9 @@ protected:
|
||||
|
||||
nsTArray<WebGLenum> mCompressedTextureFormats;
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Validation functions (implemented in WebGLContextValidate.cpp)
|
||||
bool InitAndValidateGL();
|
||||
bool ValidateBuffers(uint32_t *maxAllowedCount, const char *info);
|
||||
bool ValidateCapabilityEnum(WebGLenum cap, const char *info);
|
||||
bool ValidateBlendEquationEnum(WebGLenum cap, const char *info);
|
||||
bool ValidateBlendFuncDstEnum(WebGLenum mode, const char *info);
|
||||
@ -1088,10 +1110,12 @@ protected:
|
||||
WebGLRefPtr<WebGLFramebuffer> mBoundFramebuffer;
|
||||
WebGLRefPtr<WebGLRenderbuffer> mBoundRenderbuffer;
|
||||
WebGLRefPtr<WebGLVertexArray> mBoundVertexArray;
|
||||
WebGLRefPtr<WebGLQuery> mActiveOcclusionQuery;
|
||||
|
||||
LinkedList<WebGLTexture> mTextures;
|
||||
LinkedList<WebGLBuffer> mBuffers;
|
||||
LinkedList<WebGLProgram> mPrograms;
|
||||
LinkedList<WebGLQuery> mQueries;
|
||||
LinkedList<WebGLShader> mShaders;
|
||||
LinkedList<WebGLRenderbuffer> mRenderbuffers;
|
||||
LinkedList<WebGLFramebuffer> mFramebuffers;
|
||||
@ -1180,6 +1204,7 @@ public:
|
||||
friend class WebGLFramebuffer;
|
||||
friend class WebGLRenderbuffer;
|
||||
friend class WebGLProgram;
|
||||
friend class WebGLQuery;
|
||||
friend class WebGLBuffer;
|
||||
friend class WebGLShader;
|
||||
friend class WebGLUniformLocation;
|
||||
|
331
content/canvas/src/WebGLContextAsyncQueries.cpp
Normal file
331
content/canvas/src/WebGLContextAsyncQueries.cpp
Normal file
@ -0,0 +1,331 @@
|
||||
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#include "WebGLContext.h"
|
||||
#include "WebGLQuery.h"
|
||||
|
||||
using namespace mozilla;
|
||||
|
||||
/*
|
||||
* We fake ANY_SAMPLES_PASSED and ANY_SAMPLES_PASSED_CONSERVATIVE with
|
||||
* SAMPLES_PASSED on desktop.
|
||||
*
|
||||
* OpenGL ES 3.0 spec 4.1.6
|
||||
* If the target of the query is ANY_SAMPLES_PASSED_CONSERVATIVE, an implementation
|
||||
* may choose to use a less precise version of the test which can additionally set
|
||||
* the samples-boolean state to TRUE in some other implementation-dependent cases.
|
||||
*/
|
||||
|
||||
static const char*
|
||||
GetQueryTargetEnumString(WebGLenum target)
|
||||
{
|
||||
switch (target)
|
||||
{
|
||||
case LOCAL_GL_ANY_SAMPLES_PASSED:
|
||||
return "ANY_SAMPLES_PASSED";
|
||||
case LOCAL_GL_ANY_SAMPLES_PASSED_CONSERVATIVE:
|
||||
return "ANY_SAMPLES_PASSED_CONSERVATIVE";
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
MOZ_ASSERT(false, "Unknown query `target`.");
|
||||
return "UNKNOWN_QUERY_TARGET";
|
||||
}
|
||||
|
||||
already_AddRefed<WebGLQuery>
|
||||
WebGLContext::CreateQuery()
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return nullptr;
|
||||
|
||||
if (mActiveOcclusionQuery && !gl->IsGLES2()) {
|
||||
/* http://www.opengl.org/registry/specs/ARB/occlusion_query.txt
|
||||
* Calling either GenQueriesARB or DeleteQueriesARB while any query of
|
||||
* any target is active causes an INVALID_OPERATION error to be
|
||||
* generated.
|
||||
*/
|
||||
GenerateWarning("createQuery: the WebGL 2 prototype might generate INVALID_OPERATION"
|
||||
"when creating a query object while one other is active.");
|
||||
/*
|
||||
* We *need* to lock webgl2 to GL>=3.0 on desktop, but we don't have a good
|
||||
* mechanism to do this yet. See bug 898404.
|
||||
*/
|
||||
}
|
||||
|
||||
nsRefPtr<WebGLQuery> globj = new WebGLQuery(this);
|
||||
|
||||
return globj.forget();
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::DeleteQuery(WebGLQuery *query)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return;
|
||||
|
||||
if (!query)
|
||||
return;
|
||||
|
||||
if (query->IsDeleted())
|
||||
return;
|
||||
|
||||
if (query->IsActive()) {
|
||||
EndQuery(query->mType);
|
||||
}
|
||||
|
||||
if (mActiveOcclusionQuery && !gl->IsGLES2()) {
|
||||
/* http://www.opengl.org/registry/specs/ARB/occlusion_query.txt
|
||||
* Calling either GenQueriesARB or DeleteQueriesARB while any query of
|
||||
* any target is active causes an INVALID_OPERATION error to be
|
||||
* generated.
|
||||
*/
|
||||
GenerateWarning("deleteQuery: the WebGL 2 prototype might generate INVALID_OPERATION"
|
||||
"when deleting a query object while one other is active.");
|
||||
}
|
||||
|
||||
query->RequestDelete();
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::BeginQuery(WebGLenum target, WebGLQuery *query)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return;
|
||||
|
||||
if (!ValidateTargetParameter(target, "beginQuery")) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!query) {
|
||||
/* SPECS BeginQuery.1
|
||||
* http://www.khronos.org/registry/gles/extensions/EXT/EXT_occlusion_query_boolean.txt
|
||||
* BeginQueryEXT sets the active query object name for the query type given
|
||||
* by <target> to <id>. If BeginQueryEXT is called with an <id> of zero, if
|
||||
* the active query object name for <target> is non-zero (for the targets
|
||||
* ANY_SAMPLES_PASSED_EXT and ANY_SAMPLES_PASSED_CONSERVATIVE_EXT, if the
|
||||
* active query for either target is non-zero), if <id> is the name of an
|
||||
* existing query object whose type does not match <target>, or if <id> is the
|
||||
* active query object name for any query type, the error INVALID_OPERATION is
|
||||
* generated.
|
||||
*/
|
||||
ErrorInvalidOperation("beginQuery: query should not be null");
|
||||
return;
|
||||
}
|
||||
|
||||
if (query->IsDeleted()) {
|
||||
/* http://www.khronos.org/registry/gles/extensions/EXT/EXT_occlusion_query_boolean.txt
|
||||
* BeginQueryEXT fails and an INVALID_OPERATION error is generated if <id>
|
||||
* is not a name returned from a previous call to GenQueriesEXT, or if such
|
||||
* a name has since been deleted with DeleteQueriesEXT.
|
||||
*/
|
||||
ErrorInvalidOperation("beginQuery: query has been deleted");
|
||||
return;
|
||||
}
|
||||
|
||||
if (query->HasEverBeenActive() &&
|
||||
query->mType != target)
|
||||
{
|
||||
/*
|
||||
* See SPECS BeginQuery.1
|
||||
*/
|
||||
ErrorInvalidOperation("beginQuery: target doesn't match with the query type");
|
||||
return;
|
||||
}
|
||||
|
||||
if (GetActiveQueryByTarget(target)) {
|
||||
/*
|
||||
* See SPECS BeginQuery.1
|
||||
*/
|
||||
ErrorInvalidOperation("beginQuery: an other query already active");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!query->HasEverBeenActive()) {
|
||||
query->mType = target;
|
||||
}
|
||||
|
||||
MakeContextCurrent();
|
||||
|
||||
if (!gl->IsGLES2()) {
|
||||
gl->fBeginQuery(LOCAL_GL_SAMPLES_PASSED, query->mGLName);
|
||||
} else {
|
||||
gl->fBeginQuery(target, query->mGLName);
|
||||
}
|
||||
|
||||
GetActiveQueryByTarget(target) = query;
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::EndQuery(WebGLenum target)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return;
|
||||
|
||||
if (!ValidateTargetParameter(target, "endQuery")) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!GetActiveQueryByTarget(target) ||
|
||||
target != GetActiveQueryByTarget(target)->mType)
|
||||
{
|
||||
/* http://www.khronos.org/registry/gles/extensions/EXT/EXT_occlusion_query_boolean.txt
|
||||
* marks the end of the sequence of commands to be tracked for the query type
|
||||
* given by <target>. The active query object for <target> is updated to
|
||||
* indicate that query results are not available, and the active query object
|
||||
* name for <target> is reset to zero. When the commands issued prior to
|
||||
* EndQueryEXT have completed and a final query result is available, the
|
||||
* query object active when EndQueryEXT is called is updated by the GL. The
|
||||
* query object is updated to indicate that the query results are available
|
||||
* and to contain the query result. If the active query object name for
|
||||
* <target> is zero when EndQueryEXT is called, the error INVALID_OPERATION
|
||||
* is generated.
|
||||
*/
|
||||
ErrorInvalidOperation("endQuery: There is no active query of type %s.",
|
||||
GetQueryTargetEnumString(target));
|
||||
return;
|
||||
}
|
||||
|
||||
MakeContextCurrent();
|
||||
|
||||
if (!gl->IsGLES2()) {
|
||||
gl->fEndQuery(LOCAL_GL_SAMPLES_PASSED);
|
||||
} else {
|
||||
gl->fEndQuery(target);
|
||||
}
|
||||
|
||||
GetActiveQueryByTarget(target) = nullptr;
|
||||
}
|
||||
|
||||
bool
|
||||
WebGLContext::IsQuery(WebGLQuery *query)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return false;
|
||||
|
||||
if (!query)
|
||||
return false;
|
||||
|
||||
return ValidateObjectAllowDeleted("isQuery", query) &&
|
||||
!query->IsDeleted() &&
|
||||
query->HasEverBeenActive();
|
||||
}
|
||||
|
||||
already_AddRefed<WebGLQuery>
|
||||
WebGLContext::GetQuery(WebGLenum target, WebGLenum pname)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return nullptr;
|
||||
|
||||
if (!ValidateTargetParameter(target, "getQuery")) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (pname != LOCAL_GL_CURRENT_QUERY) {
|
||||
/* OpenGL ES 3.0 spec 6.1.7
|
||||
* pname must be CURRENT_QUERY.
|
||||
*/
|
||||
ErrorInvalidEnum("getQuery: pname must be CURRENT_QUERY");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
nsRefPtr<WebGLQuery> tmp = GetActiveQueryByTarget(target).get();
|
||||
return tmp.forget();
|
||||
}
|
||||
|
||||
JS::Value
|
||||
WebGLContext::GetQueryObject(JSContext* cx, WebGLQuery *query, WebGLenum pname)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return JS::NullValue();
|
||||
|
||||
if (!query) {
|
||||
/* OpenGL ES 3.0 spec 6.1.7 (spec getQueryObject 1)
|
||||
* If id is not the name of a query object, or if the query object named by id is
|
||||
* currently active, then an INVALID_OPERATION error is generated. pname must be
|
||||
* QUERY_RESULT or QUERY_RESULT_AVAILABLE.
|
||||
*/
|
||||
ErrorInvalidOperation("getQueryObject: query should not be null");
|
||||
return JS::NullValue();
|
||||
}
|
||||
|
||||
if (query->IsDeleted()) {
|
||||
// See (spec getQueryObject 1)
|
||||
ErrorInvalidOperation("getQueryObject: query has been deleted");
|
||||
return JS::NullValue();
|
||||
}
|
||||
|
||||
if (query->IsActive()) {
|
||||
// See (spec getQueryObject 1)
|
||||
ErrorInvalidOperation("getQueryObject: query is active");
|
||||
return JS::NullValue();
|
||||
}
|
||||
|
||||
if (!query->HasEverBeenActive()) {
|
||||
/* See (spec getQueryObject 1)
|
||||
* If this instance of WebGLQuery has never been active before, that mean that
|
||||
* query->mGLName is not a query object yet.
|
||||
*/
|
||||
ErrorInvalidOperation("getQueryObject: query has never been active");
|
||||
return JS::NullValue();
|
||||
}
|
||||
|
||||
switch (pname)
|
||||
{
|
||||
case LOCAL_GL_QUERY_RESULT_AVAILABLE:
|
||||
{
|
||||
GLuint returned = 0;
|
||||
|
||||
MakeContextCurrent();
|
||||
gl->fGetQueryObjectuiv(query->mGLName, LOCAL_GL_QUERY_RESULT_AVAILABLE, &returned);
|
||||
|
||||
return JS::BooleanValue(returned != 0);
|
||||
}
|
||||
|
||||
case LOCAL_GL_QUERY_RESULT:
|
||||
{
|
||||
GLuint returned = 0;
|
||||
|
||||
MakeContextCurrent();
|
||||
gl->fGetQueryObjectuiv(query->mGLName, LOCAL_GL_QUERY_RESULT, &returned);
|
||||
|
||||
/*
|
||||
* test (returned != 0) is important because ARB_occlusion_query on desktop drivers
|
||||
* return the number of samples drawed when the OpenGL ES extension
|
||||
* ARB_occlusion_query_boolean return only a boolean if a sample has been drawed.
|
||||
*/
|
||||
return JS::BooleanValue(returned != 0);
|
||||
}
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
ErrorInvalidEnum("getQueryObject: pname must be QUERY_RESULT{_AVAILABLE}");
|
||||
return JS::NullValue();
|
||||
}
|
||||
|
||||
bool
|
||||
WebGLContext::ValidateTargetParameter(WebGLenum target, const char* infos)
|
||||
{
|
||||
if (target != LOCAL_GL_ANY_SAMPLES_PASSED &&
|
||||
target != LOCAL_GL_ANY_SAMPLES_PASSED_CONSERVATIVE)
|
||||
{
|
||||
ErrorInvalidEnum("%s: target must be ANY_SAMPLES_PASSED{_CONSERVATIVE}", infos);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
WebGLRefPtr<WebGLQuery>&
|
||||
WebGLContext::GetActiveQueryByTarget(WebGLenum target)
|
||||
{
|
||||
MOZ_ASSERT(ValidateTargetParameter(target, "private WebGLContext::GetActiveQueryByTarget"));
|
||||
|
||||
return mActiveOcclusionQuery;
|
||||
}
|
||||
|
||||
|
@ -22,6 +22,10 @@ WebGLContext::Clear(WebGLbitfield mask)
|
||||
if (mask != m)
|
||||
return ErrorInvalidValue("clear: invalid mask bits");
|
||||
|
||||
if (mask == 0) {
|
||||
GenerateWarning("Calling gl.clear(0) has no effect.");
|
||||
}
|
||||
|
||||
if (mBoundFramebuffer) {
|
||||
if (!mBoundFramebuffer->CheckAndInitializeRenderbuffers())
|
||||
return ErrorInvalidFramebufferOperation("clear: incomplete framebuffer");
|
||||
|
@ -383,7 +383,7 @@ WebGLContext::BufferData(WebGLenum target, WebGLsizeiptr size,
|
||||
return ErrorOutOfMemory("bufferData: out of memory");
|
||||
|
||||
MakeContextCurrent();
|
||||
InvalidateCachedMinInUseAttribArrayLength();
|
||||
InvalidateBufferFetching();
|
||||
|
||||
GLenum error = CheckedBufferData(target, size, zeroBuffer, usage);
|
||||
free(zeroBuffer);
|
||||
@ -431,7 +431,7 @@ WebGLContext::BufferData(WebGLenum target,
|
||||
return ErrorInvalidOperation("bufferData: no buffer bound!");
|
||||
|
||||
MakeContextCurrent();
|
||||
InvalidateCachedMinInUseAttribArrayLength();
|
||||
InvalidateBufferFetching();
|
||||
|
||||
GLenum error = CheckedBufferData(target, data.Length(), data.Data(), usage);
|
||||
|
||||
@ -469,7 +469,7 @@ WebGLContext::BufferData(WebGLenum target, const ArrayBufferView& data,
|
||||
if (!boundBuffer)
|
||||
return ErrorInvalidOperation("bufferData: no buffer bound!");
|
||||
|
||||
InvalidateCachedMinInUseAttribArrayLength();
|
||||
InvalidateBufferFetching();
|
||||
MakeContextCurrent();
|
||||
|
||||
GLenum error = CheckedBufferData(target, data.Length(), data.Data(), usage);
|
||||
@ -1114,24 +1114,6 @@ WebGLContext::DepthRange(WebGLfloat zNear, WebGLfloat zFar)
|
||||
gl->fDepthRange(zNear, zFar);
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::DisableVertexAttribArray(WebGLuint index)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return;
|
||||
|
||||
if (!ValidateAttribIndex(index, "disableVertexAttribArray"))
|
||||
return;
|
||||
|
||||
MakeContextCurrent();
|
||||
InvalidateCachedMinInUseAttribArrayLength();
|
||||
|
||||
if (index || gl->IsGLES2())
|
||||
gl->fDisableVertexAttribArray(index);
|
||||
|
||||
mBoundVertexArray->mAttribBuffers[index].enabled = false;
|
||||
}
|
||||
|
||||
int
|
||||
WebGLContext::WhatDoesVertexAttrib0Need()
|
||||
{
|
||||
@ -1394,22 +1376,6 @@ WebGLContext::Disable(WebGLenum cap)
|
||||
gl->fDisable(cap);
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::EnableVertexAttribArray(WebGLuint index)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return;
|
||||
|
||||
if (!ValidateAttribIndex(index, "enableVertexAttribArray"))
|
||||
return;
|
||||
|
||||
MakeContextCurrent();
|
||||
InvalidateCachedMinInUseAttribArrayLength();
|
||||
|
||||
gl->fEnableVertexAttribArray(index);
|
||||
mBoundVertexArray->mAttribBuffers[index].enabled = true;
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::FramebufferRenderbuffer(WebGLenum target, WebGLenum attachment, WebGLenum rbtarget, WebGLRenderbuffer *wrb)
|
||||
{
|
||||
@ -2671,99 +2637,6 @@ WebGLContext::GetUniformLocation(WebGLProgram *prog, const nsAString& name)
|
||||
return loc.forget();
|
||||
}
|
||||
|
||||
JS::Value
|
||||
WebGLContext::GetVertexAttrib(JSContext* cx, WebGLuint index, WebGLenum pname,
|
||||
ErrorResult& rv)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return JS::NullValue();
|
||||
|
||||
if (!mBoundVertexArray->EnsureAttribIndex(index, "getVertexAttrib"))
|
||||
return JS::NullValue();
|
||||
|
||||
MakeContextCurrent();
|
||||
|
||||
switch (pname) {
|
||||
case LOCAL_GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING:
|
||||
{
|
||||
return WebGLObjectAsJSValue(cx, mBoundVertexArray->mAttribBuffers[index].buf.get(), rv);
|
||||
}
|
||||
|
||||
case LOCAL_GL_VERTEX_ATTRIB_ARRAY_STRIDE:
|
||||
return JS::Int32Value(mBoundVertexArray->mAttribBuffers[index].stride);
|
||||
|
||||
case LOCAL_GL_VERTEX_ATTRIB_ARRAY_SIZE:
|
||||
{
|
||||
if (!ValidateAttribIndex(index, "enableVertexAttribArray"))
|
||||
return JS::NullValue();
|
||||
|
||||
if (!mBoundVertexArray->mAttribBuffers[index].enabled)
|
||||
return JS::Int32Value(4);
|
||||
|
||||
// Don't break; fall through.
|
||||
}
|
||||
case LOCAL_GL_VERTEX_ATTRIB_ARRAY_TYPE:
|
||||
{
|
||||
GLint i = 0;
|
||||
gl->fGetVertexAttribiv(index, pname, &i);
|
||||
if (pname == LOCAL_GL_VERTEX_ATTRIB_ARRAY_SIZE)
|
||||
return JS::Int32Value(i);
|
||||
MOZ_ASSERT(pname == LOCAL_GL_VERTEX_ATTRIB_ARRAY_TYPE);
|
||||
return JS::NumberValue(uint32_t(i));
|
||||
}
|
||||
|
||||
case LOCAL_GL_CURRENT_VERTEX_ATTRIB:
|
||||
{
|
||||
WebGLfloat vec[4] = {0, 0, 0, 1};
|
||||
if (index) {
|
||||
gl->fGetVertexAttribfv(index, LOCAL_GL_CURRENT_VERTEX_ATTRIB, &vec[0]);
|
||||
} else {
|
||||
vec[0] = mVertexAttrib0Vector[0];
|
||||
vec[1] = mVertexAttrib0Vector[1];
|
||||
vec[2] = mVertexAttrib0Vector[2];
|
||||
vec[3] = mVertexAttrib0Vector[3];
|
||||
}
|
||||
JSObject* obj = Float32Array::Create(cx, this, 4, vec);
|
||||
if (!obj) {
|
||||
rv.Throw(NS_ERROR_OUT_OF_MEMORY);
|
||||
}
|
||||
return JS::ObjectOrNullValue(obj);
|
||||
}
|
||||
|
||||
case LOCAL_GL_VERTEX_ATTRIB_ARRAY_ENABLED:
|
||||
{
|
||||
return JS::BooleanValue(mBoundVertexArray->mAttribBuffers[index].enabled);
|
||||
}
|
||||
|
||||
case LOCAL_GL_VERTEX_ATTRIB_ARRAY_NORMALIZED:
|
||||
{
|
||||
return JS::BooleanValue(mBoundVertexArray->mAttribBuffers[index].normalized);
|
||||
}
|
||||
|
||||
default:
|
||||
ErrorInvalidEnumInfo("getVertexAttrib: parameter", pname);
|
||||
}
|
||||
|
||||
return JS::NullValue();
|
||||
}
|
||||
|
||||
WebGLsizeiptr
|
||||
WebGLContext::GetVertexAttribOffset(WebGLuint index, WebGLenum pname)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return 0;
|
||||
|
||||
if (!ValidateAttribIndex(index, "getVertexAttribOffset"))
|
||||
return 0;
|
||||
|
||||
if (pname != LOCAL_GL_VERTEX_ATTRIB_ARRAY_POINTER) {
|
||||
ErrorInvalidEnum("getVertexAttribOffset: bad parameter");
|
||||
return 0;
|
||||
}
|
||||
|
||||
return mBoundVertexArray->mAttribBuffers[index].byteOffset;
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::Hint(WebGLenum target, WebGLenum mode)
|
||||
{
|
||||
@ -2873,7 +2746,7 @@ WebGLContext::LinkProgram(WebGLProgram *program)
|
||||
if (!ValidateObject("linkProgram", program))
|
||||
return;
|
||||
|
||||
InvalidateCachedMinInUseAttribArrayLength(); // we do it early in this function
|
||||
InvalidateBufferFetching(); // we do it early in this function
|
||||
// as some of the validation below changes program state
|
||||
|
||||
GLuint progname = program->GLName();
|
||||
@ -3787,168 +3660,6 @@ WebGLContext::UniformMatrix4fv_base(WebGLUniformLocation* location_object,
|
||||
gl->fUniformMatrix4fv(location, numElementsToUpload, false, data);
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::VertexAttrib1f(WebGLuint index, WebGLfloat x0)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return;
|
||||
|
||||
MakeContextCurrent();
|
||||
|
||||
if (index) {
|
||||
gl->fVertexAttrib1f(index, x0);
|
||||
} else {
|
||||
mVertexAttrib0Vector[0] = x0;
|
||||
mVertexAttrib0Vector[1] = 0;
|
||||
mVertexAttrib0Vector[2] = 0;
|
||||
mVertexAttrib0Vector[3] = 1;
|
||||
if (gl->IsGLES2())
|
||||
gl->fVertexAttrib1f(index, x0);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::VertexAttrib2f(WebGLuint index, WebGLfloat x0, WebGLfloat x1)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return;
|
||||
|
||||
MakeContextCurrent();
|
||||
|
||||
if (index) {
|
||||
gl->fVertexAttrib2f(index, x0, x1);
|
||||
} else {
|
||||
mVertexAttrib0Vector[0] = x0;
|
||||
mVertexAttrib0Vector[1] = x1;
|
||||
mVertexAttrib0Vector[2] = 0;
|
||||
mVertexAttrib0Vector[3] = 1;
|
||||
if (gl->IsGLES2())
|
||||
gl->fVertexAttrib2f(index, x0, x1);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::VertexAttrib3f(WebGLuint index, WebGLfloat x0, WebGLfloat x1, WebGLfloat x2)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return;
|
||||
|
||||
MakeContextCurrent();
|
||||
|
||||
if (index) {
|
||||
gl->fVertexAttrib3f(index, x0, x1, x2);
|
||||
} else {
|
||||
mVertexAttrib0Vector[0] = x0;
|
||||
mVertexAttrib0Vector[1] = x1;
|
||||
mVertexAttrib0Vector[2] = x2;
|
||||
mVertexAttrib0Vector[3] = 1;
|
||||
if (gl->IsGLES2())
|
||||
gl->fVertexAttrib3f(index, x0, x1, x2);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::VertexAttrib4f(WebGLuint index, WebGLfloat x0, WebGLfloat x1,
|
||||
WebGLfloat x2, WebGLfloat x3)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return;
|
||||
|
||||
MakeContextCurrent();
|
||||
|
||||
if (index) {
|
||||
gl->fVertexAttrib4f(index, x0, x1, x2, x3);
|
||||
} else {
|
||||
mVertexAttrib0Vector[0] = x0;
|
||||
mVertexAttrib0Vector[1] = x1;
|
||||
mVertexAttrib0Vector[2] = x2;
|
||||
mVertexAttrib0Vector[3] = x3;
|
||||
if (gl->IsGLES2())
|
||||
gl->fVertexAttrib4f(index, x0, x1, x2, x3);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
WebGLContext::VertexAttrib1fv_base(WebGLuint idx, uint32_t arrayLength,
|
||||
const WebGLfloat* ptr)
|
||||
{
|
||||
if (!ValidateAttribArraySetter("VertexAttrib1fv", 1, arrayLength))
|
||||
return;
|
||||
|
||||
MakeContextCurrent();
|
||||
if (idx) {
|
||||
gl->fVertexAttrib1fv(idx, ptr);
|
||||
} else {
|
||||
mVertexAttrib0Vector[0] = ptr[0];
|
||||
mVertexAttrib0Vector[1] = WebGLfloat(0);
|
||||
mVertexAttrib0Vector[2] = WebGLfloat(0);
|
||||
mVertexAttrib0Vector[3] = WebGLfloat(1);
|
||||
if (gl->IsGLES2())
|
||||
gl->fVertexAttrib1fv(idx, ptr);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::VertexAttrib2fv_base(WebGLuint idx, uint32_t arrayLength,
|
||||
const WebGLfloat* ptr)
|
||||
{
|
||||
if (!ValidateAttribArraySetter("VertexAttrib2fv", 2, arrayLength))
|
||||
return;
|
||||
|
||||
MakeContextCurrent();
|
||||
if (idx) {
|
||||
gl->fVertexAttrib2fv(idx, ptr);
|
||||
} else {
|
||||
mVertexAttrib0Vector[0] = ptr[0];
|
||||
mVertexAttrib0Vector[1] = ptr[1];
|
||||
mVertexAttrib0Vector[2] = WebGLfloat(0);
|
||||
mVertexAttrib0Vector[3] = WebGLfloat(1);
|
||||
if (gl->IsGLES2())
|
||||
gl->fVertexAttrib2fv(idx, ptr);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::VertexAttrib3fv_base(WebGLuint idx, uint32_t arrayLength,
|
||||
const WebGLfloat* ptr)
|
||||
{
|
||||
if (!ValidateAttribArraySetter("VertexAttrib3fv", 3, arrayLength))
|
||||
return;
|
||||
|
||||
MakeContextCurrent();
|
||||
if (idx) {
|
||||
gl->fVertexAttrib3fv(idx, ptr);
|
||||
} else {
|
||||
mVertexAttrib0Vector[0] = ptr[0];
|
||||
mVertexAttrib0Vector[1] = ptr[1];
|
||||
mVertexAttrib0Vector[2] = ptr[2];
|
||||
mVertexAttrib0Vector[3] = WebGLfloat(1);
|
||||
if (gl->IsGLES2())
|
||||
gl->fVertexAttrib3fv(idx, ptr);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::VertexAttrib4fv_base(WebGLuint idx, uint32_t arrayLength,
|
||||
const WebGLfloat* ptr)
|
||||
{
|
||||
if (!ValidateAttribArraySetter("VertexAttrib4fv", 4, arrayLength))
|
||||
return;
|
||||
|
||||
MakeContextCurrent();
|
||||
if (idx) {
|
||||
gl->fVertexAttrib4fv(idx, ptr);
|
||||
} else {
|
||||
mVertexAttrib0Vector[0] = ptr[0];
|
||||
mVertexAttrib0Vector[1] = ptr[1];
|
||||
mVertexAttrib0Vector[2] = ptr[2];
|
||||
mVertexAttrib0Vector[3] = ptr[3];
|
||||
if (gl->IsGLES2())
|
||||
gl->fVertexAttrib4fv(idx, ptr);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::UseProgram(WebGLProgram *prog)
|
||||
{
|
||||
@ -3959,7 +3670,8 @@ WebGLContext::UseProgram(WebGLProgram *prog)
|
||||
return;
|
||||
|
||||
MakeContextCurrent();
|
||||
InvalidateCachedMinInUseAttribArrayLength();
|
||||
|
||||
InvalidateBufferFetching();
|
||||
|
||||
WebGLuint progname = prog ? prog->GLName() : 0;
|
||||
|
||||
@ -4604,85 +4316,6 @@ WebGLContext::ShaderSource(WebGLShader *shader, const nsAString& source)
|
||||
shader->SetNeedsTranslation();
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::VertexAttribPointer(WebGLuint index, WebGLint size, WebGLenum type,
|
||||
WebGLboolean normalized, WebGLsizei stride,
|
||||
WebGLintptr byteOffset)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return;
|
||||
|
||||
if (mBoundArrayBuffer == nullptr)
|
||||
return ErrorInvalidOperation("vertexAttribPointer: must have valid GL_ARRAY_BUFFER binding");
|
||||
|
||||
WebGLsizei requiredAlignment = 1;
|
||||
switch (type) {
|
||||
case LOCAL_GL_BYTE:
|
||||
case LOCAL_GL_UNSIGNED_BYTE:
|
||||
requiredAlignment = 1;
|
||||
break;
|
||||
case LOCAL_GL_SHORT:
|
||||
case LOCAL_GL_UNSIGNED_SHORT:
|
||||
requiredAlignment = 2;
|
||||
break;
|
||||
// XXX case LOCAL_GL_FIXED:
|
||||
case LOCAL_GL_FLOAT:
|
||||
requiredAlignment = 4;
|
||||
break;
|
||||
default:
|
||||
return ErrorInvalidEnumInfo("vertexAttribPointer: type", type);
|
||||
}
|
||||
|
||||
// requiredAlignment should always be a power of two.
|
||||
WebGLsizei requiredAlignmentMask = requiredAlignment - 1;
|
||||
|
||||
if ( !mBoundVertexArray->EnsureAttribIndex(index, "vertexAttribPointer") ) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (size < 1 || size > 4)
|
||||
return ErrorInvalidValue("vertexAttribPointer: invalid element size");
|
||||
|
||||
if (stride < 0 || stride > 255) // see WebGL spec section 6.6 "Vertex Attribute Data Stride"
|
||||
return ErrorInvalidValue("vertexAttribPointer: negative or too large stride");
|
||||
|
||||
if (byteOffset < 0)
|
||||
return ErrorInvalidValue("vertexAttribPointer: negative offset");
|
||||
|
||||
if (stride & requiredAlignmentMask) {
|
||||
return ErrorInvalidOperation("vertexAttribPointer: stride doesn't satisfy the alignment "
|
||||
"requirement of given type");
|
||||
}
|
||||
|
||||
if (byteOffset & requiredAlignmentMask) {
|
||||
return ErrorInvalidOperation("vertexAttribPointer: byteOffset doesn't satisfy the alignment "
|
||||
"requirement of given type");
|
||||
|
||||
}
|
||||
|
||||
InvalidateCachedMinInUseAttribArrayLength();
|
||||
|
||||
/* XXX make work with bufferSubData & heterogeneous types
|
||||
if (type != mBoundArrayBuffer->GLType())
|
||||
return ErrorInvalidOperation("vertexAttribPointer: type must match bound VBO type: %d != %d", type, mBoundArrayBuffer->GLType());
|
||||
*/
|
||||
|
||||
WebGLVertexAttribData &vd = mBoundVertexArray->mAttribBuffers[index];
|
||||
|
||||
vd.buf = mBoundArrayBuffer;
|
||||
vd.stride = stride;
|
||||
vd.size = size;
|
||||
vd.byteOffset = byteOffset;
|
||||
vd.type = type;
|
||||
vd.normalized = normalized;
|
||||
|
||||
MakeContextCurrent();
|
||||
|
||||
gl->fVertexAttribPointer(index, size, type, normalized,
|
||||
stride,
|
||||
reinterpret_cast<void*>(byteOffset));
|
||||
}
|
||||
|
||||
GLenum WebGLContext::CheckedTexImage2D(GLenum target,
|
||||
GLint level,
|
||||
GLenum internalFormat,
|
||||
|
@ -89,87 +89,6 @@ WebGLProgram::UpdateInfo()
|
||||
return true;
|
||||
}
|
||||
|
||||
/*
|
||||
* Verify that state is consistent for drawing, and compute max number of elements (maxAllowedCount)
|
||||
* that will be legal to be read from bound VBOs.
|
||||
*/
|
||||
|
||||
bool
|
||||
WebGLContext::ValidateBuffers(uint32_t *maxAllowedCount, const char *info)
|
||||
{
|
||||
#ifdef DEBUG
|
||||
GLint currentProgram = 0;
|
||||
MakeContextCurrent();
|
||||
gl->fGetIntegerv(LOCAL_GL_CURRENT_PROGRAM, ¤tProgram);
|
||||
NS_ASSERTION(GLuint(currentProgram) == mCurrentProgram->GLName(),
|
||||
"WebGL: current program doesn't agree with GL state");
|
||||
if (GLuint(currentProgram) != mCurrentProgram->GLName())
|
||||
return false;
|
||||
#endif
|
||||
|
||||
if (mMinInUseAttribArrayLengthCached) {
|
||||
*maxAllowedCount = mMinInUseAttribArrayLength;
|
||||
return true;
|
||||
}
|
||||
|
||||
uint32_t maxAllowed = UINT32_MAX;
|
||||
uint32_t attribs = mBoundVertexArray->mAttribBuffers.Length();
|
||||
for (uint32_t i = 0; i < attribs; ++i) {
|
||||
const WebGLVertexAttribData& vd = mBoundVertexArray->mAttribBuffers[i];
|
||||
|
||||
// If the attrib array isn't enabled, there's nothing to check;
|
||||
// it's a static value.
|
||||
if (!vd.enabled)
|
||||
continue;
|
||||
|
||||
if (vd.buf == nullptr) {
|
||||
ErrorInvalidOperation("%s: no VBO bound to enabled vertex attrib index %d!", info, i);
|
||||
return false;
|
||||
}
|
||||
|
||||
// If the attrib is not in use, then we don't have to validate
|
||||
// it, just need to make sure that the binding is non-null.
|
||||
if (!mCurrentProgram->IsAttribInUse(i))
|
||||
continue;
|
||||
|
||||
// the base offset
|
||||
CheckedUint32 checked_byteLength
|
||||
= CheckedUint32(vd.buf->ByteLength()) - vd.byteOffset;
|
||||
CheckedUint32 checked_sizeOfLastElement
|
||||
= CheckedUint32(vd.componentSize()) * vd.size;
|
||||
|
||||
if (!checked_byteLength.isValid() ||
|
||||
!checked_sizeOfLastElement.isValid())
|
||||
{
|
||||
ErrorInvalidOperation("%s: integer overflow occured while checking vertex attrib %d", info, i);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (checked_byteLength.value() < checked_sizeOfLastElement.value()) {
|
||||
maxAllowed = 0;
|
||||
break;
|
||||
} else {
|
||||
CheckedUint32 checked_maxAllowedCount
|
||||
= ((checked_byteLength - checked_sizeOfLastElement) / vd.actualStride()) + 1;
|
||||
|
||||
if (!checked_maxAllowedCount.isValid()) {
|
||||
ErrorInvalidOperation("%s: integer overflow occured while checking vertex attrib %d", info, i);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (maxAllowed > checked_maxAllowedCount.value())
|
||||
maxAllowed = checked_maxAllowedCount.value();
|
||||
}
|
||||
}
|
||||
|
||||
*maxAllowedCount = maxAllowed;
|
||||
|
||||
mMinInUseAttribArrayLengthCached = true;
|
||||
mMinInUseAttribArrayLength = *maxAllowedCount;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool WebGLContext::ValidateCapabilityEnum(WebGLenum cap, const char *info)
|
||||
{
|
||||
switch (cap) {
|
||||
@ -1078,9 +997,12 @@ WebGLContext::InitAndValidateGL()
|
||||
!IsExtensionSupported(WEBGL_draw_buffers) ||
|
||||
!gl->IsExtensionSupported(gl::GLContext::EXT_gpu_shader4) ||
|
||||
!gl->IsExtensionSupported(gl::GLContext::EXT_blend_minmax) ||
|
||||
!gl->IsExtensionSupported(gl::GLContext::XXX_draw_instanced)
|
||||
!gl->IsExtensionSupported(gl::GLContext::XXX_draw_instanced) ||
|
||||
!gl->IsExtensionSupported(gl::GLContext::XXX_instanced_arrays) ||
|
||||
(gl->IsGLES2() && !gl->IsExtensionSupported(gl::GLContext::EXT_occlusion_query_boolean))
|
||||
))
|
||||
{
|
||||
// Todo: Bug 898404: Only allow WebGL2 on GL>=3.0 on desktop GL.
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -30,7 +30,7 @@ WebGLContext::BindVertexArray(WebGLVertexArray *array)
|
||||
return;
|
||||
}
|
||||
|
||||
InvalidateCachedMinInUseAttribArrayLength();
|
||||
InvalidateBufferFetching();
|
||||
|
||||
MakeContextCurrent();
|
||||
|
||||
|
@ -10,12 +10,417 @@
|
||||
#include "WebGLTexture.h"
|
||||
#include "WebGLRenderbuffer.h"
|
||||
#include "WebGLFramebuffer.h"
|
||||
#include "WebGLUniformInfo.h"
|
||||
#include "WebGLShader.h"
|
||||
#include "WebGLProgram.h"
|
||||
|
||||
using namespace mozilla;
|
||||
using namespace dom;
|
||||
|
||||
// For a Tegra workaround.
|
||||
static const int MAX_DRAW_CALLS_SINCE_FLUSH = 100;
|
||||
|
||||
void
|
||||
WebGLContext::VertexAttrib1f(WebGLuint index, WebGLfloat x0)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return;
|
||||
|
||||
MakeContextCurrent();
|
||||
|
||||
if (index) {
|
||||
gl->fVertexAttrib1f(index, x0);
|
||||
} else {
|
||||
mVertexAttrib0Vector[0] = x0;
|
||||
mVertexAttrib0Vector[1] = 0;
|
||||
mVertexAttrib0Vector[2] = 0;
|
||||
mVertexAttrib0Vector[3] = 1;
|
||||
if (gl->IsGLES2())
|
||||
gl->fVertexAttrib1f(index, x0);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::VertexAttrib2f(WebGLuint index, WebGLfloat x0, WebGLfloat x1)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return;
|
||||
|
||||
MakeContextCurrent();
|
||||
|
||||
if (index) {
|
||||
gl->fVertexAttrib2f(index, x0, x1);
|
||||
} else {
|
||||
mVertexAttrib0Vector[0] = x0;
|
||||
mVertexAttrib0Vector[1] = x1;
|
||||
mVertexAttrib0Vector[2] = 0;
|
||||
mVertexAttrib0Vector[3] = 1;
|
||||
if (gl->IsGLES2())
|
||||
gl->fVertexAttrib2f(index, x0, x1);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::VertexAttrib3f(WebGLuint index, WebGLfloat x0, WebGLfloat x1, WebGLfloat x2)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return;
|
||||
|
||||
MakeContextCurrent();
|
||||
|
||||
if (index) {
|
||||
gl->fVertexAttrib3f(index, x0, x1, x2);
|
||||
} else {
|
||||
mVertexAttrib0Vector[0] = x0;
|
||||
mVertexAttrib0Vector[1] = x1;
|
||||
mVertexAttrib0Vector[2] = x2;
|
||||
mVertexAttrib0Vector[3] = 1;
|
||||
if (gl->IsGLES2())
|
||||
gl->fVertexAttrib3f(index, x0, x1, x2);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::VertexAttrib4f(WebGLuint index, WebGLfloat x0, WebGLfloat x1,
|
||||
WebGLfloat x2, WebGLfloat x3)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return;
|
||||
|
||||
MakeContextCurrent();
|
||||
|
||||
if (index) {
|
||||
gl->fVertexAttrib4f(index, x0, x1, x2, x3);
|
||||
} else {
|
||||
mVertexAttrib0Vector[0] = x0;
|
||||
mVertexAttrib0Vector[1] = x1;
|
||||
mVertexAttrib0Vector[2] = x2;
|
||||
mVertexAttrib0Vector[3] = x3;
|
||||
if (gl->IsGLES2())
|
||||
gl->fVertexAttrib4f(index, x0, x1, x2, x3);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
WebGLContext::VertexAttrib1fv_base(WebGLuint idx, uint32_t arrayLength,
|
||||
const WebGLfloat* ptr)
|
||||
{
|
||||
if (!ValidateAttribArraySetter("VertexAttrib1fv", 1, arrayLength))
|
||||
return;
|
||||
|
||||
MakeContextCurrent();
|
||||
if (idx) {
|
||||
gl->fVertexAttrib1fv(idx, ptr);
|
||||
} else {
|
||||
mVertexAttrib0Vector[0] = ptr[0];
|
||||
mVertexAttrib0Vector[1] = WebGLfloat(0);
|
||||
mVertexAttrib0Vector[2] = WebGLfloat(0);
|
||||
mVertexAttrib0Vector[3] = WebGLfloat(1);
|
||||
if (gl->IsGLES2())
|
||||
gl->fVertexAttrib1fv(idx, ptr);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::VertexAttrib2fv_base(WebGLuint idx, uint32_t arrayLength,
|
||||
const WebGLfloat* ptr)
|
||||
{
|
||||
if (!ValidateAttribArraySetter("VertexAttrib2fv", 2, arrayLength))
|
||||
return;
|
||||
|
||||
MakeContextCurrent();
|
||||
if (idx) {
|
||||
gl->fVertexAttrib2fv(idx, ptr);
|
||||
} else {
|
||||
mVertexAttrib0Vector[0] = ptr[0];
|
||||
mVertexAttrib0Vector[1] = ptr[1];
|
||||
mVertexAttrib0Vector[2] = WebGLfloat(0);
|
||||
mVertexAttrib0Vector[3] = WebGLfloat(1);
|
||||
if (gl->IsGLES2())
|
||||
gl->fVertexAttrib2fv(idx, ptr);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::VertexAttrib3fv_base(WebGLuint idx, uint32_t arrayLength,
|
||||
const WebGLfloat* ptr)
|
||||
{
|
||||
if (!ValidateAttribArraySetter("VertexAttrib3fv", 3, arrayLength))
|
||||
return;
|
||||
|
||||
MakeContextCurrent();
|
||||
if (idx) {
|
||||
gl->fVertexAttrib3fv(idx, ptr);
|
||||
} else {
|
||||
mVertexAttrib0Vector[0] = ptr[0];
|
||||
mVertexAttrib0Vector[1] = ptr[1];
|
||||
mVertexAttrib0Vector[2] = ptr[2];
|
||||
mVertexAttrib0Vector[3] = WebGLfloat(1);
|
||||
if (gl->IsGLES2())
|
||||
gl->fVertexAttrib3fv(idx, ptr);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::VertexAttrib4fv_base(WebGLuint idx, uint32_t arrayLength,
|
||||
const WebGLfloat* ptr)
|
||||
{
|
||||
if (!ValidateAttribArraySetter("VertexAttrib4fv", 4, arrayLength))
|
||||
return;
|
||||
|
||||
MakeContextCurrent();
|
||||
if (idx) {
|
||||
gl->fVertexAttrib4fv(idx, ptr);
|
||||
} else {
|
||||
mVertexAttrib0Vector[0] = ptr[0];
|
||||
mVertexAttrib0Vector[1] = ptr[1];
|
||||
mVertexAttrib0Vector[2] = ptr[2];
|
||||
mVertexAttrib0Vector[3] = ptr[3];
|
||||
if (gl->IsGLES2())
|
||||
gl->fVertexAttrib4fv(idx, ptr);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::EnableVertexAttribArray(WebGLuint index)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return;
|
||||
|
||||
if (!ValidateAttribIndex(index, "enableVertexAttribArray"))
|
||||
return;
|
||||
|
||||
MakeContextCurrent();
|
||||
InvalidateBufferFetching();
|
||||
|
||||
gl->fEnableVertexAttribArray(index);
|
||||
mBoundVertexArray->mAttribBuffers[index].enabled = true;
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::DisableVertexAttribArray(WebGLuint index)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return;
|
||||
|
||||
if (!ValidateAttribIndex(index, "disableVertexAttribArray"))
|
||||
return;
|
||||
|
||||
MakeContextCurrent();
|
||||
InvalidateBufferFetching();
|
||||
|
||||
if (index || gl->IsGLES2())
|
||||
gl->fDisableVertexAttribArray(index);
|
||||
|
||||
mBoundVertexArray->mAttribBuffers[index].enabled = false;
|
||||
}
|
||||
|
||||
|
||||
JS::Value
|
||||
WebGLContext::GetVertexAttrib(JSContext* cx, WebGLuint index, WebGLenum pname,
|
||||
ErrorResult& rv)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return JS::NullValue();
|
||||
|
||||
if (!mBoundVertexArray->EnsureAttribIndex(index, "getVertexAttrib"))
|
||||
return JS::NullValue();
|
||||
|
||||
MakeContextCurrent();
|
||||
|
||||
switch (pname) {
|
||||
case LOCAL_GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING:
|
||||
{
|
||||
return WebGLObjectAsJSValue(cx, mBoundVertexArray->mAttribBuffers[index].buf.get(), rv);
|
||||
}
|
||||
|
||||
case LOCAL_GL_VERTEX_ATTRIB_ARRAY_STRIDE:
|
||||
{
|
||||
return JS::Int32Value(mBoundVertexArray->mAttribBuffers[index].stride);
|
||||
}
|
||||
|
||||
case LOCAL_GL_VERTEX_ATTRIB_ARRAY_SIZE:
|
||||
{
|
||||
if (!ValidateAttribIndex(index, "getVertexAttrib"))
|
||||
return JS::NullValue();
|
||||
|
||||
if (!mBoundVertexArray->mAttribBuffers[index].enabled)
|
||||
return JS::Int32Value(4);
|
||||
|
||||
// Don't break; fall through.
|
||||
}
|
||||
case LOCAL_GL_VERTEX_ATTRIB_ARRAY_TYPE:
|
||||
{
|
||||
GLint i = 0;
|
||||
gl->fGetVertexAttribiv(index, pname, &i);
|
||||
if (pname == LOCAL_GL_VERTEX_ATTRIB_ARRAY_SIZE)
|
||||
return JS::Int32Value(i);
|
||||
MOZ_ASSERT(pname == LOCAL_GL_VERTEX_ATTRIB_ARRAY_TYPE);
|
||||
return JS::NumberValue(uint32_t(i));
|
||||
}
|
||||
|
||||
case LOCAL_GL_VERTEX_ATTRIB_ARRAY_DIVISOR:
|
||||
{
|
||||
if (IsWebGL2())
|
||||
{
|
||||
return JS::Int32Value(mBoundVertexArray->mAttribBuffers[index].divisor);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case LOCAL_GL_CURRENT_VERTEX_ATTRIB:
|
||||
{
|
||||
WebGLfloat vec[4] = {0, 0, 0, 1};
|
||||
if (index) {
|
||||
gl->fGetVertexAttribfv(index, LOCAL_GL_CURRENT_VERTEX_ATTRIB, &vec[0]);
|
||||
} else {
|
||||
vec[0] = mVertexAttrib0Vector[0];
|
||||
vec[1] = mVertexAttrib0Vector[1];
|
||||
vec[2] = mVertexAttrib0Vector[2];
|
||||
vec[3] = mVertexAttrib0Vector[3];
|
||||
}
|
||||
JSObject* obj = Float32Array::Create(cx, this, 4, vec);
|
||||
if (!obj) {
|
||||
rv.Throw(NS_ERROR_OUT_OF_MEMORY);
|
||||
}
|
||||
return JS::ObjectOrNullValue(obj);
|
||||
}
|
||||
|
||||
case LOCAL_GL_VERTEX_ATTRIB_ARRAY_ENABLED:
|
||||
{
|
||||
return JS::BooleanValue(mBoundVertexArray->mAttribBuffers[index].enabled);
|
||||
}
|
||||
|
||||
case LOCAL_GL_VERTEX_ATTRIB_ARRAY_NORMALIZED:
|
||||
{
|
||||
return JS::BooleanValue(mBoundVertexArray->mAttribBuffers[index].normalized);
|
||||
}
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
ErrorInvalidEnumInfo("getVertexAttrib: parameter", pname);
|
||||
|
||||
return JS::NullValue();
|
||||
}
|
||||
|
||||
WebGLsizeiptr
|
||||
WebGLContext::GetVertexAttribOffset(WebGLuint index, WebGLenum pname)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return 0;
|
||||
|
||||
if (!ValidateAttribIndex(index, "getVertexAttribOffset"))
|
||||
return 0;
|
||||
|
||||
if (pname != LOCAL_GL_VERTEX_ATTRIB_ARRAY_POINTER) {
|
||||
ErrorInvalidEnum("getVertexAttribOffset: bad parameter");
|
||||
return 0;
|
||||
}
|
||||
|
||||
return mBoundVertexArray->mAttribBuffers[index].byteOffset;
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::VertexAttribPointer(WebGLuint index, WebGLint size, WebGLenum type,
|
||||
WebGLboolean normalized, WebGLsizei stride,
|
||||
WebGLintptr byteOffset)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return;
|
||||
|
||||
if (mBoundArrayBuffer == nullptr)
|
||||
return ErrorInvalidOperation("vertexAttribPointer: must have valid GL_ARRAY_BUFFER binding");
|
||||
|
||||
WebGLsizei requiredAlignment = 1;
|
||||
switch (type) {
|
||||
case LOCAL_GL_BYTE:
|
||||
case LOCAL_GL_UNSIGNED_BYTE:
|
||||
requiredAlignment = 1;
|
||||
break;
|
||||
case LOCAL_GL_SHORT:
|
||||
case LOCAL_GL_UNSIGNED_SHORT:
|
||||
requiredAlignment = 2;
|
||||
break;
|
||||
// XXX case LOCAL_GL_FIXED:
|
||||
case LOCAL_GL_FLOAT:
|
||||
requiredAlignment = 4;
|
||||
break;
|
||||
default:
|
||||
return ErrorInvalidEnumInfo("vertexAttribPointer: type", type);
|
||||
}
|
||||
|
||||
// requiredAlignment should always be a power of two.
|
||||
WebGLsizei requiredAlignmentMask = requiredAlignment - 1;
|
||||
|
||||
if ( !mBoundVertexArray->EnsureAttribIndex(index, "vertexAttribPointer") ) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (size < 1 || size > 4)
|
||||
return ErrorInvalidValue("vertexAttribPointer: invalid element size");
|
||||
|
||||
if (stride < 0 || stride > 255) // see WebGL spec section 6.6 "Vertex Attribute Data Stride"
|
||||
return ErrorInvalidValue("vertexAttribPointer: negative or too large stride");
|
||||
|
||||
if (byteOffset < 0)
|
||||
return ErrorInvalidValue("vertexAttribPointer: negative offset");
|
||||
|
||||
if (stride & requiredAlignmentMask) {
|
||||
return ErrorInvalidOperation("vertexAttribPointer: stride doesn't satisfy the alignment "
|
||||
"requirement of given type");
|
||||
}
|
||||
|
||||
if (byteOffset & requiredAlignmentMask) {
|
||||
return ErrorInvalidOperation("vertexAttribPointer: byteOffset doesn't satisfy the alignment "
|
||||
"requirement of given type");
|
||||
|
||||
}
|
||||
|
||||
InvalidateBufferFetching();
|
||||
|
||||
/* XXX make work with bufferSubData & heterogeneous types
|
||||
if (type != mBoundArrayBuffer->GLType())
|
||||
return ErrorInvalidOperation("vertexAttribPointer: type must match bound VBO type: %d != %d", type, mBoundArrayBuffer->GLType());
|
||||
*/
|
||||
|
||||
WebGLVertexAttribData &vd = mBoundVertexArray->mAttribBuffers[index];
|
||||
|
||||
vd.buf = mBoundArrayBuffer;
|
||||
vd.stride = stride;
|
||||
vd.size = size;
|
||||
vd.byteOffset = byteOffset;
|
||||
vd.type = type;
|
||||
vd.normalized = normalized;
|
||||
|
||||
MakeContextCurrent();
|
||||
|
||||
gl->fVertexAttribPointer(index, size, type, normalized,
|
||||
stride,
|
||||
reinterpret_cast<void*>(byteOffset));
|
||||
}
|
||||
|
||||
void
|
||||
WebGLContext::VertexAttribDivisor(WebGLuint index, WebGLuint divisor)
|
||||
{
|
||||
if (!IsContextStable())
|
||||
return;
|
||||
|
||||
if ( !mBoundVertexArray->EnsureAttribIndex(index, "vertexAttribDivisor") ) {
|
||||
return;
|
||||
}
|
||||
|
||||
WebGLVertexAttribData& vd = mBoundVertexArray->mAttribBuffers[index];
|
||||
vd.divisor = divisor;
|
||||
|
||||
InvalidateBufferFetching();
|
||||
|
||||
MakeContextCurrent();
|
||||
|
||||
gl->fVertexAttribDivisor(index, divisor);
|
||||
}
|
||||
|
||||
bool WebGLContext::DrawArrays_check(WebGLint first, WebGLsizei count, WebGLsizei primcount, const char* info)
|
||||
{
|
||||
@ -24,6 +429,11 @@ bool WebGLContext::DrawArrays_check(WebGLint first, WebGLsizei count, WebGLsizei
|
||||
return false;
|
||||
}
|
||||
|
||||
if (primcount < 0) {
|
||||
ErrorInvalidValue("%s: negative primcount", info);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!ValidateStencilParamsForDrawCall()) {
|
||||
return false;
|
||||
}
|
||||
@ -39,8 +449,7 @@ bool WebGLContext::DrawArrays_check(WebGLint first, WebGLsizei count, WebGLsizei
|
||||
return false;
|
||||
}
|
||||
|
||||
uint32_t maxAllowedCount = 0;
|
||||
if (!ValidateBuffers(&maxAllowedCount, info)) {
|
||||
if (!ValidateBufferFetching(info)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -51,11 +460,16 @@ bool WebGLContext::DrawArrays_check(WebGLint first, WebGLsizei count, WebGLsizei
|
||||
return false;
|
||||
}
|
||||
|
||||
if (uint32_t(checked_firstPlusCount.value()) > maxAllowedCount) {
|
||||
if (uint32_t(checked_firstPlusCount.value()) > mMaxFetchedVertices) {
|
||||
ErrorInvalidOperation("%s: bound vertex attribute buffers do not have sufficient size for given first and count", info);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (uint32_t(primcount) > mMaxFetchedInstances) {
|
||||
ErrorInvalidOperation("%s: bound instance attribute buffers do not have sufficient size for given primcount", info);
|
||||
return false;
|
||||
}
|
||||
|
||||
MakeContextCurrent();
|
||||
|
||||
if (mBoundFramebuffer) {
|
||||
@ -117,6 +531,11 @@ WebGLContext::DrawElements_check(WebGLsizei count, WebGLenum type, WebGLintptr b
|
||||
return false;
|
||||
}
|
||||
|
||||
if (primcount < 0) {
|
||||
ErrorInvalidValue("%s: negative primcount", info);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!ValidateStencilParamsForDrawCall()) {
|
||||
return false;
|
||||
}
|
||||
@ -188,12 +607,11 @@ WebGLContext::DrawElements_check(WebGLsizei count, WebGLenum type, WebGLintptr b
|
||||
return false;
|
||||
}
|
||||
|
||||
uint32_t maxAllowedCount = 0;
|
||||
if (!ValidateBuffers(&maxAllowedCount, info))
|
||||
if (!ValidateBufferFetching(info))
|
||||
return false;
|
||||
|
||||
if (!maxAllowedCount ||
|
||||
!mBoundVertexArray->mBoundElementArrayBuffer->Validate(type, maxAllowedCount - 1, first, count))
|
||||
if (!mMaxFetchedVertices ||
|
||||
!mBoundVertexArray->mBoundElementArrayBuffer->Validate(type, mMaxFetchedVertices - 1, first, count))
|
||||
{
|
||||
ErrorInvalidOperation(
|
||||
"%s: bound vertex attribute buffers do not have sufficient "
|
||||
@ -201,6 +619,11 @@ WebGLContext::DrawElements_check(WebGLsizei count, WebGLenum type, WebGLintptr b
|
||||
return false;
|
||||
}
|
||||
|
||||
if (uint32_t(primcount) > mMaxFetchedInstances) {
|
||||
ErrorInvalidOperation("%s: bound instance attribute buffers do not have sufficient size for given primcount", info);
|
||||
return false;
|
||||
}
|
||||
|
||||
MakeContextCurrent();
|
||||
|
||||
if (mBoundFramebuffer) {
|
||||
@ -210,7 +633,7 @@ WebGLContext::DrawElements_check(WebGLsizei count, WebGLenum type, WebGLintptr b
|
||||
}
|
||||
}
|
||||
|
||||
if (!DoFakeVertexAttrib0(maxAllowedCount)) {
|
||||
if (!DoFakeVertexAttrib0(mMaxFetchedVertices)) {
|
||||
return false;
|
||||
}
|
||||
BindFakeBlackTextures();
|
||||
@ -247,7 +670,7 @@ WebGLContext::DrawElementsInstanced(WebGLenum mode, WebGLsizei count, WebGLenum
|
||||
if (!ValidateDrawModeEnum(mode, "drawElementsInstanced: mode"))
|
||||
return;
|
||||
|
||||
if (!DrawElements_check(count, type, byteOffset, 1, "drawElementsInstanced"))
|
||||
if (!DrawElements_check(count, type, byteOffset, primcount, "drawElementsInstanced"))
|
||||
return;
|
||||
|
||||
SetupContextLossTimer();
|
||||
@ -279,3 +702,82 @@ void WebGLContext::Draw_cleanup()
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Verify that state is consistent for drawing, and compute max number of elements (maxAllowedCount)
|
||||
* that will be legal to be read from bound VBOs.
|
||||
*/
|
||||
|
||||
bool
|
||||
WebGLContext::ValidateBufferFetching(const char *info)
|
||||
{
|
||||
#ifdef DEBUG
|
||||
GLint currentProgram = 0;
|
||||
MakeContextCurrent();
|
||||
gl->fGetIntegerv(LOCAL_GL_CURRENT_PROGRAM, ¤tProgram);
|
||||
MOZ_ASSERT(GLuint(currentProgram) == mCurrentProgram->GLName(),
|
||||
"WebGL: current program doesn't agree with GL state");
|
||||
#endif
|
||||
|
||||
if (mBufferFetchingIsVerified) {
|
||||
return true;
|
||||
}
|
||||
|
||||
uint32_t maxVertices = UINT32_MAX;
|
||||
uint32_t maxInstances = UINT32_MAX;
|
||||
uint32_t attribs = mBoundVertexArray->mAttribBuffers.Length();
|
||||
|
||||
for (uint32_t i = 0; i < attribs; ++i) {
|
||||
const WebGLVertexAttribData& vd = mBoundVertexArray->mAttribBuffers[i];
|
||||
|
||||
// If the attrib array isn't enabled, there's nothing to check;
|
||||
// it's a static value.
|
||||
if (!vd.enabled)
|
||||
continue;
|
||||
|
||||
if (vd.buf == nullptr) {
|
||||
ErrorInvalidOperation("%s: no VBO bound to enabled vertex attrib index %d!", info, i);
|
||||
return false;
|
||||
}
|
||||
|
||||
// If the attrib is not in use, then we don't have to validate
|
||||
// it, just need to make sure that the binding is non-null.
|
||||
if (!mCurrentProgram->IsAttribInUse(i))
|
||||
continue;
|
||||
|
||||
// the base offset
|
||||
CheckedUint32 checked_byteLength = CheckedUint32(vd.buf->ByteLength()) - vd.byteOffset;
|
||||
CheckedUint32 checked_sizeOfLastElement = CheckedUint32(vd.componentSize()) * vd.size;
|
||||
|
||||
if (!checked_byteLength.isValid() ||
|
||||
!checked_sizeOfLastElement.isValid())
|
||||
{
|
||||
ErrorInvalidOperation("%s: integer overflow occured while checking vertex attrib %d", info, i);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (checked_byteLength.value() < checked_sizeOfLastElement.value()) {
|
||||
maxVertices = 0;
|
||||
maxInstances = 0;
|
||||
break;
|
||||
}
|
||||
|
||||
CheckedUint32 checked_maxAllowedCount = ((checked_byteLength - checked_sizeOfLastElement) / vd.actualStride()) + 1;
|
||||
|
||||
if (!checked_maxAllowedCount.isValid()) {
|
||||
ErrorInvalidOperation("%s: integer overflow occured while checking vertex attrib %d", info, i);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (vd.divisor == 0)
|
||||
maxVertices = std::min(maxVertices, checked_maxAllowedCount.value());
|
||||
else
|
||||
maxInstances = std::min(maxInstances, checked_maxAllowedCount.value() / vd.divisor);
|
||||
}
|
||||
|
||||
mBufferFetchingIsVerified = true;
|
||||
mMaxFetchedVertices = maxVertices;
|
||||
mMaxFetchedInstances = maxInstances;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
45
content/canvas/src/WebGLQuery.cpp
Normal file
45
content/canvas/src/WebGLQuery.cpp
Normal file
@ -0,0 +1,45 @@
|
||||
/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#include "WebGLContext.h"
|
||||
#include "WebGLQuery.h"
|
||||
#include "mozilla/dom/WebGL2RenderingContextBinding.h"
|
||||
#include "nsContentUtils.h"
|
||||
|
||||
using namespace mozilla;
|
||||
|
||||
JSObject*
|
||||
WebGLQuery::WrapObject(JSContext *cx, JS::Handle<JSObject*> scope) {
|
||||
return dom::WebGLQueryBinding::Wrap(cx, scope, this);
|
||||
}
|
||||
|
||||
WebGLQuery::WebGLQuery(WebGLContext* context)
|
||||
: WebGLContextBoundObject(context)
|
||||
, mGLName(0)
|
||||
, mType(0)
|
||||
{
|
||||
SetIsDOMBinding();
|
||||
mContext->mQueries.insertBack(this);
|
||||
|
||||
mContext->MakeContextCurrent();
|
||||
mContext->gl->fGenQueries(1, &mGLName);
|
||||
}
|
||||
|
||||
void WebGLQuery::Delete() {
|
||||
mContext->MakeContextCurrent();
|
||||
mContext->gl->fDeleteQueries(1, &mGLName);
|
||||
LinkedListElement<WebGLQuery>::removeFrom(mContext->mQueries);
|
||||
}
|
||||
|
||||
|
||||
NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE_0(WebGLQuery)
|
||||
|
||||
NS_IMPL_CYCLE_COLLECTING_ADDREF(WebGLQuery)
|
||||
NS_IMPL_CYCLE_COLLECTING_RELEASE(WebGLQuery)
|
||||
|
||||
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(WebGLQuery)
|
||||
NS_WRAPPERCACHE_INTERFACE_MAP_ENTRY
|
||||
NS_INTERFACE_MAP_ENTRY(nsISupports)
|
||||
NS_INTERFACE_MAP_END
|
89
content/canvas/src/WebGLQuery.h
Normal file
89
content/canvas/src/WebGLQuery.h
Normal file
@ -0,0 +1,89 @@
|
||||
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#ifndef WEBGLQUERY_H_
|
||||
#define WEBGLQUERY_H_
|
||||
|
||||
#include "WebGLObjectModel.h"
|
||||
#include "WebGLContext.h"
|
||||
|
||||
#include "nsWrapperCache.h"
|
||||
|
||||
#include "mozilla/LinkedList.h"
|
||||
|
||||
namespace mozilla {
|
||||
|
||||
class WebGLQuery MOZ_FINAL
|
||||
: public nsISupports
|
||||
, public WebGLRefCountedObject<WebGLQuery>
|
||||
, public LinkedListElement<WebGLQuery>
|
||||
, public WebGLContextBoundObject
|
||||
, public nsWrapperCache
|
||||
{
|
||||
// -----------------------------------------------------------------------------
|
||||
// PUBLIC
|
||||
public:
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// CONSTRUCTOR & DESTRUCTOR
|
||||
|
||||
WebGLQuery(WebGLContext *context);
|
||||
|
||||
~WebGLQuery() {
|
||||
DeleteOnce();
|
||||
};
|
||||
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// MEMBER FUNCTIONS
|
||||
|
||||
bool IsActive() const
|
||||
{
|
||||
return mContext->GetActiveQueryByTarget(mType) == this;
|
||||
}
|
||||
|
||||
bool HasEverBeenActive()
|
||||
{
|
||||
return mType != 0;
|
||||
}
|
||||
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// IMPLEMENT WebGLRefCountedObject and WebGLContextBoundObject
|
||||
|
||||
void Delete();
|
||||
|
||||
WebGLContext* GetParentObject() const
|
||||
{
|
||||
return Context();
|
||||
}
|
||||
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// IMPLEMENT NS
|
||||
virtual JSObject* WrapObject(JSContext *cx,
|
||||
JS::Handle<JSObject*> scope) MOZ_OVERRIDE;
|
||||
|
||||
NS_DECL_CYCLE_COLLECTING_ISUPPORTS
|
||||
NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_CLASS(WebGLQuery)
|
||||
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// PRIVATE
|
||||
private:
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// MEMBERS
|
||||
WebGLuint mGLName;
|
||||
WebGLenum mType;
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// FRIENDSHIPS
|
||||
friend class WebGLContext;
|
||||
};
|
||||
|
||||
} // namespace mozilla
|
||||
|
||||
#endif
|
@ -23,39 +23,63 @@ class WebGLVertexArray MOZ_FINAL
|
||||
, public WebGLContextBoundObject
|
||||
, public nsWrapperCache
|
||||
{
|
||||
// -----------------------------------------------------------------------------
|
||||
// PUBLIC
|
||||
public:
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// CONSTRUCTOR & DESTRUCTOR
|
||||
|
||||
WebGLVertexArray(WebGLContext *context);
|
||||
|
||||
~WebGLVertexArray() {
|
||||
DeleteOnce();
|
||||
};
|
||||
|
||||
void Delete();
|
||||
|
||||
bool HasEverBeenBound() { return mHasEverBeenBound; }
|
||||
void SetHasEverBeenBound(bool x) { mHasEverBeenBound = x; }
|
||||
WebGLuint GLName() const { return mGLName; }
|
||||
// -------------------------------------------------------------------------
|
||||
// IMPLMENET PARENT CLASSES
|
||||
|
||||
void Delete();
|
||||
|
||||
WebGLContext* GetParentObject() const {
|
||||
return Context();
|
||||
}
|
||||
|
||||
bool EnsureAttribIndex(WebGLuint index, const char *info);
|
||||
|
||||
virtual JSObject* WrapObject(JSContext *cx,
|
||||
JS::Handle<JSObject*> scope) MOZ_OVERRIDE;
|
||||
|
||||
NS_DECL_CYCLE_COLLECTING_ISUPPORTS
|
||||
NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_CLASS(WebGLVertexArray)
|
||||
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// MEMBER FUNCTIONS
|
||||
|
||||
bool HasEverBeenBound() { return mHasEverBeenBound; }
|
||||
void SetHasEverBeenBound(bool x) { mHasEverBeenBound = x; }
|
||||
WebGLuint GLName() const { return mGLName; }
|
||||
|
||||
bool EnsureAttribIndex(WebGLuint index, const char *info);
|
||||
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// PRIVATE
|
||||
private:
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// MEMBERS
|
||||
|
||||
WebGLuint mGLName;
|
||||
bool mHasEverBeenBound;
|
||||
|
||||
nsTArray<WebGLVertexAttribData> mAttribBuffers;
|
||||
WebGLRefPtr<WebGLBuffer> mBoundElementArrayBuffer;
|
||||
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// FRIENDSHIPS
|
||||
|
||||
friend class WebGLContext;
|
||||
friend class WebGLExtensionVertexArray;
|
||||
};
|
||||
|
||||
} // namespace mozilla
|
||||
|
@ -13,13 +13,20 @@ class WebGLBuffer;
|
||||
struct WebGLVertexAttribData {
|
||||
// note that these initial values are what GL initializes vertex attribs to
|
||||
WebGLVertexAttribData()
|
||||
: buf(0), stride(0), size(4), byteOffset(0),
|
||||
type(LOCAL_GL_FLOAT), enabled(false), normalized(false)
|
||||
: buf(0)
|
||||
, stride(0)
|
||||
, size(4)
|
||||
, divisor(0) // OpenGL ES 3.0 specs paragraphe 6.2 p240
|
||||
, byteOffset(0)
|
||||
, type(LOCAL_GL_FLOAT)
|
||||
, enabled(false)
|
||||
, normalized(false)
|
||||
{ }
|
||||
|
||||
WebGLRefPtr<WebGLBuffer> buf;
|
||||
WebGLuint stride;
|
||||
WebGLuint size;
|
||||
WebGLuint divisor;
|
||||
GLuint byteOffset;
|
||||
GLenum type;
|
||||
bool enabled;
|
||||
|
@ -31,6 +31,7 @@ if CONFIG['MOZ_WEBGL']:
|
||||
'WebGL1Context.cpp',
|
||||
'WebGL2Context.cpp',
|
||||
'WebGLContext.cpp',
|
||||
'WebGLContextAsyncQueries.cpp',
|
||||
'WebGLContextGL.cpp',
|
||||
'WebGLContextUtils.cpp',
|
||||
'WebGLContextReporter.cpp',
|
||||
@ -56,6 +57,7 @@ if CONFIG['MOZ_WEBGL']:
|
||||
'WebGLFramebuffer.cpp',
|
||||
'WebGLObjectModel.cpp',
|
||||
'WebGLProgram.cpp',
|
||||
'WebGLQuery.cpp',
|
||||
'WebGLRenderbuffer.cpp',
|
||||
'WebGLShader.cpp',
|
||||
'WebGLShaderPrecisionFormat.cpp',
|
||||
|
@ -909,6 +909,9 @@ protected:
|
||||
nsMediaNetworkState mNetworkState;
|
||||
nsMediaReadyState mReadyState;
|
||||
|
||||
// Last value passed from codec or stream source to UpdateReadyStateForData.
|
||||
NextFrameStatus mLastNextFrameStatus;
|
||||
|
||||
enum LoadAlgorithmState {
|
||||
// No load algorithm instance is waiting for a source to be added to the
|
||||
// media in order to continue loading.
|
||||
@ -1121,6 +1124,9 @@ protected:
|
||||
// True if the media has an audio track
|
||||
bool mHasAudio;
|
||||
|
||||
// True if the media has a video track
|
||||
bool mHasVideo;
|
||||
|
||||
// True if the media's channel's download has been suspended.
|
||||
bool mDownloadSuspendedByCache;
|
||||
|
||||
|
@ -222,23 +222,18 @@ HTMLIFrameElement::AfterSetAttr(int32_t aNameSpaceID, nsIAtom* aName,
|
||||
bool aNotify)
|
||||
{
|
||||
if (aName == nsGkAtoms::sandbox && aNameSpaceID == kNameSpaceID_None) {
|
||||
// Parse the new value of the sandbox attribute, and if we have a docshell
|
||||
// set its sandbox flags appropriately.
|
||||
// If we have an nsFrameLoader, parse the new value of the sandbox
|
||||
// attribute and apply the new sandbox flags.
|
||||
if (mFrameLoader) {
|
||||
nsCOMPtr<nsIDocShell> docshell = mFrameLoader->GetExistingDocShell();
|
||||
|
||||
if (docshell) {
|
||||
uint32_t newFlags = 0;
|
||||
// If a nullptr aValue is passed in, we want to clear the sandbox flags
|
||||
// which we will do by setting them to 0.
|
||||
if (aValue) {
|
||||
nsAutoString strValue;
|
||||
aValue->ToString(strValue);
|
||||
newFlags = nsContentUtils::ParseSandboxAttributeToFlags(
|
||||
strValue);
|
||||
}
|
||||
docshell->SetSandboxFlags(newFlags);
|
||||
// If a nullptr aValue is passed in, we want to clear the sandbox flags
|
||||
// which we will do by setting them to 0.
|
||||
uint32_t newFlags = 0;
|
||||
if (aValue) {
|
||||
nsAutoString strValue;
|
||||
aValue->ToString(strValue);
|
||||
newFlags = nsContentUtils::ParseSandboxAttributeToFlags(strValue);
|
||||
}
|
||||
mFrameLoader->ApplySandboxFlags(newFlags);
|
||||
}
|
||||
}
|
||||
return nsGenericHTMLElement::AfterSetAttr(aNameSpaceID, aName, aValue,
|
||||
|
@ -68,6 +68,8 @@
|
||||
#include "nsHostObjectProtocolHandler.h"
|
||||
#include "mozilla/dom/MediaSource.h"
|
||||
#include "MediaMetadataManager.h"
|
||||
#include "AudioStreamTrack.h"
|
||||
#include "VideoStreamTrack.h"
|
||||
|
||||
#include "AudioChannelService.h"
|
||||
|
||||
@ -621,7 +623,10 @@ void HTMLMediaElement::AbortExistingLoads()
|
||||
mHaveQueuedSelectResource = false;
|
||||
mSuspendedForPreloadNone = false;
|
||||
mDownloadSuspendedByCache = false;
|
||||
mHasAudio = false;
|
||||
mHasVideo = false;
|
||||
mSourcePointer = nullptr;
|
||||
mLastNextFrameStatus = NEXT_FRAME_UNINITIALIZED;
|
||||
|
||||
mChannels = 0;
|
||||
mRate = 0;
|
||||
@ -1906,6 +1911,7 @@ HTMLMediaElement::HTMLMediaElement(already_AddRefed<nsINodeInfo> aNodeInfo)
|
||||
mCurrentLoadID(0),
|
||||
mNetworkState(nsIDOMHTMLMediaElement::NETWORK_EMPTY),
|
||||
mReadyState(nsIDOMHTMLMediaElement::HAVE_NOTHING),
|
||||
mLastNextFrameStatus(NEXT_FRAME_UNINITIALIZED),
|
||||
mLoadWaitStatus(NOT_WAITING),
|
||||
mVolume(1.0),
|
||||
mChannels(0),
|
||||
@ -1946,6 +1952,7 @@ HTMLMediaElement::HTMLMediaElement(already_AddRefed<nsINodeInfo> aNodeInfo)
|
||||
mMediaSecurityVerified(false),
|
||||
mCORSMode(CORS_NONE),
|
||||
mHasAudio(false),
|
||||
mHasVideo(false),
|
||||
mDownloadSuspendedByCache(false),
|
||||
mAudioChannelType(AUDIO_CHANNEL_NORMAL),
|
||||
mPlayingThroughTheAudioChannel(false)
|
||||
@ -2514,12 +2521,21 @@ nsresult HTMLMediaElement::FinishDecoderSetup(MediaDecoder* aDecoder,
|
||||
mPausedForInactiveDocumentOrChannel = false;
|
||||
mEventDeliveryPaused = false;
|
||||
mPendingEvents.Clear();
|
||||
// Set mDecoder now so if methods like GetCurrentSrc get called between
|
||||
// here and Load(), they work.
|
||||
mDecoder = aDecoder;
|
||||
|
||||
// Tell aDecoder about its MediaResource now so things like principals are
|
||||
// available immediately.
|
||||
aDecoder->SetResource(aStream);
|
||||
aDecoder->SetAudioChannelType(mAudioChannelType);
|
||||
aDecoder->SetAudioCaptured(mAudioCaptured);
|
||||
aDecoder->SetVolume(mMuted ? 0.0 : mVolume);
|
||||
aDecoder->SetPreservesPitch(mPreservesPitch);
|
||||
aDecoder->SetPlaybackRate(mPlaybackRate);
|
||||
// Update decoder principal before we start decoding, since it
|
||||
// can affect how we feed data to MediaStreams
|
||||
NotifyDecoderPrincipalChanged();
|
||||
|
||||
for (uint32_t i = 0; i < mOutputStreams.Length(); ++i) {
|
||||
OutputMediaStream* ms = &mOutputStreams[i];
|
||||
@ -2527,8 +2543,9 @@ nsresult HTMLMediaElement::FinishDecoderSetup(MediaDecoder* aDecoder,
|
||||
ms->mFinishWhenEnded);
|
||||
}
|
||||
|
||||
nsresult rv = aDecoder->Load(aStream, aListener, aCloneDonor);
|
||||
nsresult rv = aDecoder->Load(aListener, aCloneDonor);
|
||||
if (NS_FAILED(rv)) {
|
||||
mDecoder = nullptr;
|
||||
LOG(PR_LOG_DEBUG, ("%p Failed to load for decoder %p", this, aDecoder));
|
||||
return rv;
|
||||
}
|
||||
@ -2537,9 +2554,7 @@ nsresult HTMLMediaElement::FinishDecoderSetup(MediaDecoder* aDecoder,
|
||||
// which owns the channel.
|
||||
mChannel = nullptr;
|
||||
|
||||
mDecoder = aDecoder;
|
||||
AddMediaElementToURITable();
|
||||
NotifyDecoderPrincipalChanged();
|
||||
|
||||
// We may want to suspend the new stream now.
|
||||
// This will also do an AddRemoveSelfReference.
|
||||
@ -2685,6 +2700,19 @@ void HTMLMediaElement::SetupSrcMediaStreamPlayback(DOMMediaStream* aStream)
|
||||
if (mPausedForInactiveDocumentOrChannel) {
|
||||
GetSrcMediaStream()->ChangeExplicitBlockerCount(1);
|
||||
}
|
||||
|
||||
nsAutoTArray<nsRefPtr<AudioStreamTrack>,1> audioTracks;
|
||||
aStream->GetAudioTracks(audioTracks);
|
||||
nsAutoTArray<nsRefPtr<VideoStreamTrack>,1> videoTracks;
|
||||
aStream->GetVideoTracks(videoTracks);
|
||||
|
||||
// Clear aChannels, aRate and aTags, but set mHasAudio and mHasVideo
|
||||
MetadataLoaded(0, 0,
|
||||
!audioTracks.IsEmpty(), !videoTracks.IsEmpty(),
|
||||
nullptr);
|
||||
DispatchAsyncEvent(NS_LITERAL_STRING("suspend"));
|
||||
mNetworkState = nsIDOMHTMLMediaElement::NETWORK_IDLE;
|
||||
|
||||
ChangeDelayLoadStatus(false);
|
||||
GetSrcMediaStream()->AddAudioOutput(this);
|
||||
GetSrcMediaStream()->SetAudioOutputVolume(this, float(mMuted ? 0.0 : mVolume));
|
||||
@ -2692,11 +2720,7 @@ void HTMLMediaElement::SetupSrcMediaStreamPlayback(DOMMediaStream* aStream)
|
||||
if (container) {
|
||||
GetSrcMediaStream()->AddVideoOutput(container);
|
||||
}
|
||||
ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_METADATA);
|
||||
DispatchAsyncEvent(NS_LITERAL_STRING("durationchange"));
|
||||
DispatchAsyncEvent(NS_LITERAL_STRING("loadedmetadata"));
|
||||
DispatchAsyncEvent(NS_LITERAL_STRING("suspend"));
|
||||
mNetworkState = nsIDOMHTMLMediaElement::NETWORK_IDLE;
|
||||
|
||||
AddRemoveSelfReference();
|
||||
// FirstFrameLoaded(false) will be called when the stream has current data,
|
||||
// to complete the setup by entering the HAVE_CURRENT_DATA state.
|
||||
@ -2753,6 +2777,7 @@ void HTMLMediaElement::MetadataLoaded(int aChannels,
|
||||
mChannels = aChannels;
|
||||
mRate = aRate;
|
||||
mHasAudio = aHasAudio;
|
||||
mHasVideo = aHasVideo;
|
||||
mTags = aTags;
|
||||
ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_METADATA);
|
||||
DispatchAsyncEvent(NS_LITERAL_STRING("durationchange"));
|
||||
@ -2772,10 +2797,8 @@ void HTMLMediaElement::MetadataLoaded(int aChannels,
|
||||
|
||||
void HTMLMediaElement::FirstFrameLoaded(bool aResourceFullyLoaded)
|
||||
{
|
||||
ChangeReadyState(aResourceFullyLoaded ?
|
||||
nsIDOMHTMLMediaElement::HAVE_ENOUGH_DATA :
|
||||
nsIDOMHTMLMediaElement::HAVE_CURRENT_DATA);
|
||||
ChangeDelayLoadStatus(false);
|
||||
UpdateReadyStateForData(NEXT_FRAME_UNAVAILABLE);
|
||||
|
||||
NS_ASSERTION(!mSuspendedAfterFirstFrame, "Should not have already suspended");
|
||||
|
||||
@ -2962,6 +2985,8 @@ bool HTMLMediaElement::ShouldCheckAllowOrigin()
|
||||
|
||||
void HTMLMediaElement::UpdateReadyStateForData(MediaDecoderOwner::NextFrameStatus aNextFrame)
|
||||
{
|
||||
mLastNextFrameStatus = aNextFrame;
|
||||
|
||||
if (mReadyState < nsIDOMHTMLMediaElement::HAVE_METADATA) {
|
||||
// aNextFrame might have a next frame because the decoder can advance
|
||||
// on its own thread before ResourceLoaded or MetadataLoaded gets
|
||||
@ -2985,6 +3010,14 @@ void HTMLMediaElement::UpdateReadyStateForData(MediaDecoderOwner::NextFrameStatu
|
||||
return;
|
||||
}
|
||||
|
||||
if (mReadyState < nsIDOMHTMLMediaElement::HAVE_CURRENT_DATA && mHasVideo) {
|
||||
VideoFrameContainer* container = GetVideoFrameContainer();
|
||||
if (container && mMediaSize == nsIntSize(-1,-1)) {
|
||||
// No frame has been set yet. Don't advance.
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (aNextFrame != MediaDecoderOwner::NEXT_FRAME_AVAILABLE) {
|
||||
ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_CURRENT_DATA);
|
||||
if (!mWaitingFired && aNextFrame == MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE_BUFFERING) {
|
||||
@ -3119,26 +3152,21 @@ void HTMLMediaElement::CheckAutoplayDataReady()
|
||||
|
||||
VideoFrameContainer* HTMLMediaElement::GetVideoFrameContainer()
|
||||
{
|
||||
// If we have loaded the metadata, and the size of the video is still
|
||||
// (-1, -1), the media has no video. Don't go a create a video frame
|
||||
// container.
|
||||
if (mReadyState >= nsIDOMHTMLMediaElement::HAVE_METADATA &&
|
||||
mMediaSize == nsIntSize(-1, -1)) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (mVideoFrameContainer)
|
||||
if (mVideoFrameContainer) {
|
||||
return mVideoFrameContainer;
|
||||
}
|
||||
|
||||
// If we have a print surface, this is just a static image so
|
||||
// no image container is required
|
||||
if (mPrintSurface)
|
||||
if (mPrintSurface) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Only video frames need an image container.
|
||||
nsCOMPtr<nsIDOMHTMLVideoElement> video = do_QueryObject(this);
|
||||
if (!video)
|
||||
if (!video) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
mVideoFrameContainer =
|
||||
new VideoFrameContainer(this, LayerManager::CreateAsynchronousImageContainer());
|
||||
@ -3253,9 +3281,14 @@ already_AddRefed<nsIPrincipal> HTMLMediaElement::GetCurrentPrincipal()
|
||||
|
||||
void HTMLMediaElement::NotifyDecoderPrincipalChanged()
|
||||
{
|
||||
nsRefPtr<nsIPrincipal> principal = GetCurrentPrincipal();
|
||||
|
||||
bool subsumes;
|
||||
mDecoder->UpdateSameOriginStatus(
|
||||
NS_SUCCEEDED(NodePrincipal()->Subsumes(principal, &subsumes)) && subsumes);
|
||||
|
||||
for (uint32_t i = 0; i < mOutputStreams.Length(); ++i) {
|
||||
OutputMediaStream* ms = &mOutputStreams[i];
|
||||
nsRefPtr<nsIPrincipal> principal = GetCurrentPrincipal();
|
||||
ms->mStream->CombineWithPrincipal(principal);
|
||||
}
|
||||
}
|
||||
@ -3263,6 +3296,7 @@ void HTMLMediaElement::NotifyDecoderPrincipalChanged()
|
||||
void HTMLMediaElement::UpdateMediaSize(nsIntSize size)
|
||||
{
|
||||
mMediaSize = size;
|
||||
UpdateReadyStateForData(mLastNextFrameStatus);
|
||||
}
|
||||
|
||||
void HTMLMediaElement::SuspendOrResumeElement(bool aPauseElement, bool aSuspendEvents)
|
||||
|
@ -288,6 +288,13 @@ MOCHITEST_FILES = \
|
||||
file_iframe_sandbox_a_if10.html \
|
||||
file_iframe_sandbox_a_if11.html \
|
||||
file_iframe_sandbox_a_if12.html \
|
||||
file_iframe_sandbox_a_if13.html \
|
||||
file_iframe_sandbox_a_if14.html \
|
||||
file_iframe_sandbox_a_if15.html \
|
||||
file_iframe_sandbox_a_if16.html \
|
||||
file_iframe_sandbox_a_if17.html \
|
||||
file_iframe_sandbox_a_if18.html \
|
||||
file_iframe_sandbox_a_if19.html \
|
||||
test_iframe_sandbox_same_origin.html \
|
||||
file_iframe_sandbox_b_if1.html \
|
||||
file_iframe_sandbox_b_if2.html \
|
||||
|
@ -7,5 +7,6 @@
|
||||
</head>
|
||||
<frameset>
|
||||
<frame src="file_iframe_sandbox_a_if11.html">
|
||||
<frame src="file_iframe_sandbox_a_if16.html">
|
||||
</frameset>
|
||||
</html>
|
||||
|
@ -18,6 +18,6 @@
|
||||
<frameset>
|
||||
<frame onload='doStuff()' src="file_iframe_sandbox_a_if12.html">
|
||||
</frameset>
|
||||
I'm a <frame> inside an iframe which is sandboxed with 'allow-scripts'
|
||||
I'm a <frame> inside an iframe which is sandboxed with 'allow-scripts allow-forms'
|
||||
</html>
|
||||
|
||||
|
@ -17,7 +17,7 @@ function doStuff() {
|
||||
}
|
||||
</script>
|
||||
<body onload='doStuff()'>
|
||||
I'm a <frame> inside a <frame> inside an iframe which is sandboxed with 'allow-scripts'
|
||||
I'm a <frame> inside a <frame> inside an iframe which is sandboxed with 'allow-scripts allow-forms'
|
||||
</body>
|
||||
</html>
|
||||
|
||||
|
13
content/html/content/test/file_iframe_sandbox_a_if13.html
Normal file
13
content/html/content/test/file_iframe_sandbox_a_if13.html
Normal file
@ -0,0 +1,13 @@
|
||||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Test for Bug 886262</title>
|
||||
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<object data="file_iframe_sandbox_a_if14.html"></object>
|
||||
</body>
|
||||
|
||||
</html>
|
34
content/html/content/test/file_iframe_sandbox_a_if14.html
Normal file
34
content/html/content/test/file_iframe_sandbox_a_if14.html
Normal file
@ -0,0 +1,34 @@
|
||||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Test for Bug 886262</title>
|
||||
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
|
||||
</head>
|
||||
|
||||
<script>
|
||||
window.addEventListener("message", receiveMessage, false);
|
||||
|
||||
function receiveMessage(event)
|
||||
{
|
||||
window.parent.parent.postMessage({ok: event.data.ok, desc: "objects containing " + event.data.desc}, "*");
|
||||
}
|
||||
|
||||
function doStuff() {
|
||||
try {
|
||||
window.parent.parent.ok_wrapper(false, "an object inside a sandboxed iframe should NOT be same origin with the iframe's parent");
|
||||
}
|
||||
catch (e) {
|
||||
window.parent.parent.postMessage({ok: true, desc: "an object inside a sandboxed iframe is not same origin with the iframe's parent"}, "*");
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<body onload='doStuff()'>
|
||||
I'm a <object> inside an iframe which is sandboxed with 'allow-scripts allow-forms'
|
||||
|
||||
<object data="file_iframe_sandbox_a_if15.html"></object>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
33
content/html/content/test/file_iframe_sandbox_a_if15.html
Normal file
33
content/html/content/test/file_iframe_sandbox_a_if15.html
Normal file
@ -0,0 +1,33 @@
|
||||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Test for Bug 886262</title>
|
||||
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
|
||||
</head>
|
||||
|
||||
<script>
|
||||
function doStuff() {
|
||||
try {
|
||||
window.parent.parent.parent.ok_wrapper(false, "an object inside a frame or object inside a sandboxed iframe should NOT be same origin with the iframe's parent");
|
||||
}
|
||||
catch (e) {
|
||||
window.parent.parent.parent.postMessage({ok: true, desc: "an object inside a frame or object inside a sandboxed iframe is not same origin with the iframe's parent"}, "*");
|
||||
}
|
||||
|
||||
// Check that sandboxed forms browsing context flag NOT set by attempting to submit a form.
|
||||
document.getElementById('a_form').submit();
|
||||
}
|
||||
</script>
|
||||
|
||||
<body onload='doStuff()'>
|
||||
I'm a <object> inside a <frame> or <object> inside an iframe which is sandboxed with 'allow-scripts allow-forms'
|
||||
|
||||
<form method="get" action="file_iframe_sandbox_form_pass.html" id="a_form">
|
||||
First name: <input type="text" name="firstname">
|
||||
Last name: <input type="text" name="lastname">
|
||||
<input type="submit" id="a_button">
|
||||
</form>
|
||||
</body>
|
||||
</html>
|
||||
|
25
content/html/content/test/file_iframe_sandbox_a_if16.html
Normal file
25
content/html/content/test/file_iframe_sandbox_a_if16.html
Normal file
@ -0,0 +1,25 @@
|
||||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Test for Bug 886262</title>
|
||||
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
|
||||
</head>
|
||||
|
||||
<script>
|
||||
window.addEventListener("message", receiveMessage, false);
|
||||
|
||||
function receiveMessage(event)
|
||||
{
|
||||
window.parent.parent.postMessage({ok: event.data.ok, desc: "objects containing " + event.data.desc}, "*");
|
||||
}
|
||||
</script>
|
||||
|
||||
<body>
|
||||
I'm a <frame> inside an iframe which is sandboxed with 'allow-scripts allow-forms'
|
||||
|
||||
<object data="file_iframe_sandbox_a_if15.html"></object>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
27
content/html/content/test/file_iframe_sandbox_a_if17.html
Normal file
27
content/html/content/test/file_iframe_sandbox_a_if17.html
Normal file
@ -0,0 +1,27 @@
|
||||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Test for Bug 886262</title>
|
||||
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
|
||||
</head>
|
||||
|
||||
<script>
|
||||
function doTest() {
|
||||
var if_18_19 = document.getElementById('if_18_19');
|
||||
if_18_19.sandbox = "allow-scripts allow-same-origin";
|
||||
if_18_19.contentWindow.postMessage("go", "*");
|
||||
}
|
||||
</script>
|
||||
|
||||
<body onload="doTest()">
|
||||
I am sandboxed but with "allow-scripts". I change the sandbox flags on if_18_19 to
|
||||
"allow-scripts allow-same-origin" then get it to re-navigate itself to
|
||||
file_iframe_sandbox_a_if18.html, which attemps to call a function in my parent.
|
||||
This should fail since my sandbox flags should be copied to it when the sandbox
|
||||
flags are changed.
|
||||
|
||||
<iframe sandbox="allow-scripts" id="if_18_19" src="file_iframe_sandbox_a_if19.html" height="10" width="10"></iframe>
|
||||
</body>
|
||||
</html>
|
||||
|
26
content/html/content/test/file_iframe_sandbox_a_if18.html
Normal file
26
content/html/content/test/file_iframe_sandbox_a_if18.html
Normal file
@ -0,0 +1,26 @@
|
||||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Test for Bug 886262</title>
|
||||
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
|
||||
</head>
|
||||
|
||||
<script>
|
||||
function doTest() {
|
||||
try {
|
||||
window.parent.parent.ok_wrapper(false, "an iframe in an iframe SHOULD copy its parent's sandbox flags when its sandbox flags are changed");
|
||||
}
|
||||
catch (e) {
|
||||
window.parent.parent.postMessage({ok: true, desc: "an iframe in an iframe copies its parent's sandbox flags when its sandbox flags are changed"}, "*");
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<body onload="doTest()">
|
||||
I'm an iframe whose sandbox flags have been changed to include allow-same-origin.
|
||||
I should not be able to call a function in my parent's parent because my parent's
|
||||
iframe does not have allow-same-origin set.
|
||||
</body>
|
||||
</html>
|
||||
|
21
content/html/content/test/file_iframe_sandbox_a_if19.html
Normal file
21
content/html/content/test/file_iframe_sandbox_a_if19.html
Normal file
@ -0,0 +1,21 @@
|
||||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Test for Bug 886262</title>
|
||||
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
|
||||
<script type="text/javascript" src="/tests/SimpleTest/EventUtils.js"></script>
|
||||
</head>
|
||||
|
||||
<script>
|
||||
window.addEventListener("message", function(e){
|
||||
window.open("file_iframe_sandbox_a_if18.html", "_self");
|
||||
}, false);
|
||||
</script>
|
||||
|
||||
<body>
|
||||
I'm just here to navigate to file_iframe_sandbox_a_if18.html after my owning
|
||||
iframe has had allow-same-origin added.
|
||||
</body>
|
||||
</html>
|
||||
|
@ -14,6 +14,9 @@ Implement HTML5 sandbox attribute for IFRAMEs - inheritance tests
|
||||
/** Test for Bug 341604 - Implement HTML5 sandbox attribute for IFRAMEs **/
|
||||
/** Inheritance Tests **/
|
||||
|
||||
// Assertion failure in docshell/shistory/src/nsSHEntry.cpp (currently line 625).
|
||||
// Bug 901876 raised.
|
||||
SimpleTest.expectAssertions(1);
|
||||
SimpleTest.waitForExplicitFinish();
|
||||
|
||||
// A postMessage handler that is used by sandboxed iframes without
|
||||
@ -39,8 +42,8 @@ function ok_wrapper(result, desc) {
|
||||
passedTests++;
|
||||
}
|
||||
|
||||
if (completedTests == 6) {
|
||||
is(passedTests, 6, "there should be 6 passed inheritance tests");
|
||||
if (completedTests == 13) {
|
||||
is(passedTests, completedTests, "there should be " + completedTests + " passed inheritance tests");
|
||||
SimpleTest.finish();
|
||||
}
|
||||
}
|
||||
@ -102,6 +105,48 @@ function doTest() {
|
||||
// 11) a <frame> inside a <frame> inside an <iframe> sandboxed with 'allow-scripts' should not be same
|
||||
// origin with its parent frame or this document
|
||||
// done by file_iframe_sandbox_a_if12.html which is contained with file_iframe_sandbox_a_if11.html
|
||||
|
||||
// passes if good, fails if bad
|
||||
// 12) An <object> inside an <iframe> sandboxed with 'allow-scripts' should not be same
|
||||
// origin with this document
|
||||
// Done by file_iframe_sandbox_a_if14.html which is contained within file_iframe_sandbox_a_if13.html
|
||||
|
||||
// passes if good, fails if bad
|
||||
// 13) An <object> inside an <object> inside an <iframe> sandboxed with 'allow-scripts' should not be same
|
||||
// origin with its parent frame or this document
|
||||
// Done by file_iframe_sandbox_a_if15.html which is contained within file_iframe_sandbox_a_if14.html
|
||||
|
||||
// passes if good, fails if bad
|
||||
// 14) An <object> inside a <frame> inside an <iframe> sandboxed with 'allow-scripts' should not be same
|
||||
// origin with its parent frame or this document
|
||||
// Done by file_iframe_sandbox_a_if15.html which is contained within file_iframe_sandbox_a_if16.html
|
||||
// which is contained within file_iframe_sandbox_a_if10.html
|
||||
|
||||
// passes if good
|
||||
// 15) An <object> inside an <object> inside an <iframe> sandboxed with 'allow-scripts allow-forms'
|
||||
// should be able to submit forms.
|
||||
// Done by file_iframe_sandbox_a_if15.html which is contained within file_iframe_sandbox_a_if14.html
|
||||
|
||||
// passes if good
|
||||
// 16) An <object> inside a <frame> inside an <iframe> sandboxed with 'allow-scripts allow-forms'
|
||||
// should be able to submit forms.
|
||||
// Done by file_iframe_sandbox_a_if15.html which is contained within file_iframe_sandbox_a_if16.html
|
||||
// which is contained within file_iframe_sandbox_a_if10.html
|
||||
|
||||
// fails if bad
|
||||
// 17) An <object> inside an <iframe> sandboxed with 'allow-same-origin'
|
||||
// should not be able to run scripts.
|
||||
// Done by iframe "if_no_scripts" using a data: load.
|
||||
|
||||
// passes if good
|
||||
// 18) An <object> inside an <iframe> sandboxed with 'allow-scripts allow-same-origin'
|
||||
// should be able to run scripts and be same origin with this document.
|
||||
// Done by iframe "if_scripts" using a data: load.
|
||||
|
||||
// passes if good, fails if bad
|
||||
// 19) Make sure that the parent's document's sandboxing flags are copied when
|
||||
// changing the sandbox flags on an iframe inside an iframe.
|
||||
// Done in file_iframe_sandbox_a_if17.html and file_iframe_sandbox_a_if18.html
|
||||
}
|
||||
|
||||
addLoadEvent(doTest);
|
||||
@ -114,5 +159,10 @@ addLoadEvent(doTest);
|
||||
<iframe sandbox="allow-scripts" id="if_3" src="file_iframe_sandbox_a_if3.html" height="10" width="10"></iframe>
|
||||
<iframe sandbox="allow-scripts allow-same-origin" id="if_5" src="file_iframe_sandbox_a_if5.html" height="10" width="10"></iframe>
|
||||
<iframe sandbox="allow-scripts allow-same-origin" id="if_8" src="file_iframe_sandbox_a_if8.html" height="10" width="10"></iframe>
|
||||
<iframe sandbox="allow-scripts" id="if_10" src="file_iframe_sandbox_a_if10.html" height="10" width="10"></iframe>
|
||||
<iframe sandbox="allow-scripts allow-forms" id="if_10" src="file_iframe_sandbox_a_if10.html" height="10" width="10"></iframe>
|
||||
<iframe sandbox="allow-scripts allow-forms" id="if_13" src="file_iframe_sandbox_a_if13.html" height="10" width="10"></iframe>
|
||||
<iframe sandbox="allow-same-origin" id="if_no_scripts" src="data:text/html,<object%20data='data:text/html,<script>parent.parent.ok_wrapper(false, "an object inside an iframe sandboxed with only allow-same-origin should not be able to run scripts")</script>'></object>" height="10" width="10"></iframe>
|
||||
<iframe sandbox="allow-scripts allow-same-origin" id="if_scripts" src="data:text/html,<object%20data='data:text/html,<script>parent.parent.ok_wrapper(true, "an object inside an iframe sandboxed with allow-scripts allow-same-origin should be able to run scripts and call functions in the parent of the iframe")</script>'></object>" height="10" width="10"></iframe>
|
||||
<iframe sandbox="allow-same-origin" id="if_19" src="data:text/html,<iframe%20data='data:text/html,<script>parent.parent.ok_wrapper(true, "an object inside an iframe sandboxed with allow-scripts allow-same-origin should be able to run scripts and call functions in the parent of the iframe")</script>'></object>" height="10" width="10"></iframe>
|
||||
<iframe sandbox="allow-scripts" id="if_17" src="file_iframe_sandbox_a_if17.html" height="10" width="10"></iframe>
|
||||
</div>
|
||||
|
492
content/media/AudioNodeExternalInputStream.cpp
Normal file
492
content/media/AudioNodeExternalInputStream.cpp
Normal file
@ -0,0 +1,492 @@
|
||||
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#include "MediaStreamGraphImpl.h"
|
||||
#include "AudioNodeEngine.h"
|
||||
#include "AudioNodeExternalInputStream.h"
|
||||
#include "speex/speex_resampler.h"
|
||||
|
||||
using namespace mozilla::dom;
|
||||
|
||||
namespace mozilla {
|
||||
|
||||
AudioNodeExternalInputStream::AudioNodeExternalInputStream(AudioNodeEngine* aEngine, TrackRate aSampleRate)
|
||||
: AudioNodeStream(aEngine, MediaStreamGraph::INTERNAL_STREAM, aSampleRate)
|
||||
, mCurrentOutputPosition(0)
|
||||
{
|
||||
MOZ_COUNT_CTOR(AudioNodeExternalInputStream);
|
||||
}
|
||||
|
||||
AudioNodeExternalInputStream::~AudioNodeExternalInputStream()
|
||||
{
|
||||
MOZ_COUNT_DTOR(AudioNodeExternalInputStream);
|
||||
}
|
||||
|
||||
AudioNodeExternalInputStream::TrackMapEntry::~TrackMapEntry()
|
||||
{
|
||||
if (mResampler) {
|
||||
speex_resampler_destroy(mResampler);
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t
|
||||
AudioNodeExternalInputStream::GetTrackMapEntry(const StreamBuffer::Track& aTrack,
|
||||
GraphTime aFrom)
|
||||
{
|
||||
AudioSegment* segment = aTrack.Get<AudioSegment>();
|
||||
|
||||
// Check the map for an existing entry corresponding to the input track.
|
||||
for (uint32_t i = 0; i < mTrackMap.Length(); ++i) {
|
||||
TrackMapEntry* map = &mTrackMap[i];
|
||||
if (map->mTrackID == aTrack.GetID()) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
|
||||
// Determine channel count by finding the first entry with non-silent data.
|
||||
AudioSegment::ChunkIterator ci(*segment);
|
||||
while (!ci.IsEnded() && ci->IsNull()) {
|
||||
ci.Next();
|
||||
}
|
||||
if (ci.IsEnded()) {
|
||||
// The track is entirely silence so far, we can ignore it for now.
|
||||
return nsTArray<TrackMapEntry>::NoIndex;
|
||||
}
|
||||
|
||||
// Create a speex resampler with the same sample rate and number of channels
|
||||
// as the track.
|
||||
SpeexResamplerState* resampler = nullptr;
|
||||
uint32_t channelCount = (*ci).mChannelData.Length();
|
||||
if (aTrack.GetRate() != mSampleRate) {
|
||||
resampler = speex_resampler_init(channelCount,
|
||||
aTrack.GetRate(), mSampleRate, SPEEX_RESAMPLER_QUALITY_DEFAULT, nullptr);
|
||||
speex_resampler_skip_zeros(resampler);
|
||||
}
|
||||
|
||||
TrackMapEntry* map = mTrackMap.AppendElement();
|
||||
map->mEndOfConsumedInputTicks = 0;
|
||||
map->mEndOfLastInputIntervalInInputStream = -1;
|
||||
map->mEndOfLastInputIntervalInOutputStream = -1;
|
||||
map->mSamplesPassedToResampler =
|
||||
TimeToTicksRoundUp(aTrack.GetRate(), GraphTimeToStreamTime(aFrom));
|
||||
map->mResampler = resampler;
|
||||
map->mResamplerChannelCount = channelCount;
|
||||
map->mTrackID = aTrack.GetID();
|
||||
return mTrackMap.Length() - 1;
|
||||
}
|
||||
|
||||
static const uint32_t SPEEX_RESAMPLER_PROCESS_MAX_OUTPUT = 1000;
|
||||
|
||||
template <typename T> static int
|
||||
SpeexResamplerProcess(SpeexResamplerState* aResampler,
|
||||
uint32_t aChannel,
|
||||
const T* aInput, uint32_t* aIn,
|
||||
float* aOutput, uint32_t* aOut);
|
||||
|
||||
template <> int
|
||||
SpeexResamplerProcess<float>(SpeexResamplerState* aResampler,
|
||||
uint32_t aChannel,
|
||||
const float* aInput, uint32_t* aIn,
|
||||
float* aOutput, uint32_t* aOut)
|
||||
{
|
||||
NS_ASSERTION(*aOut <= SPEEX_RESAMPLER_PROCESS_MAX_OUTPUT, "Bad aOut");
|
||||
return speex_resampler_process_float(aResampler, aChannel, aInput, aIn, aOutput, aOut);
|
||||
}
|
||||
|
||||
template <> int
|
||||
SpeexResamplerProcess<int16_t>(SpeexResamplerState* aResampler,
|
||||
uint32_t aChannel,
|
||||
const int16_t* aInput, uint32_t* aIn,
|
||||
float* aOutput, uint32_t* aOut)
|
||||
{
|
||||
NS_ASSERTION(*aOut <= SPEEX_RESAMPLER_PROCESS_MAX_OUTPUT, "Bad aOut");
|
||||
int16_t tmp[SPEEX_RESAMPLER_PROCESS_MAX_OUTPUT];
|
||||
int result = speex_resampler_process_int(aResampler, aChannel, aInput, aIn, tmp, aOut);
|
||||
if (result == RESAMPLER_ERR_SUCCESS) {
|
||||
for (uint32_t i = 0; i < *aOut; ++i) {
|
||||
aOutput[i] = AudioSampleToFloat(tmp[i]);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
template <typename T> static void
|
||||
ResampleChannelBuffer(SpeexResamplerState* aResampler, uint32_t aChannel,
|
||||
const T* aInput, uint32_t aInputDuration,
|
||||
nsTArray<float>* aOutput)
|
||||
{
|
||||
if (!aResampler) {
|
||||
float* out = aOutput->AppendElements(aInputDuration);
|
||||
for (uint32_t i = 0; i < aInputDuration; ++i) {
|
||||
out[i] = AudioSampleToFloat(aInput[i]);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
uint32_t processed = 0;
|
||||
while (processed < aInputDuration) {
|
||||
uint32_t prevLength = aOutput->Length();
|
||||
float* output = aOutput->AppendElements(SPEEX_RESAMPLER_PROCESS_MAX_OUTPUT);
|
||||
uint32_t in = aInputDuration - processed;
|
||||
uint32_t out = aOutput->Length() - prevLength;
|
||||
SpeexResamplerProcess(aResampler, aChannel,
|
||||
aInput + processed, &in,
|
||||
output, &out);
|
||||
processed += in;
|
||||
aOutput->SetLength(prevLength + out);
|
||||
}
|
||||
}
|
||||
|
||||
class SharedChannelArrayBuffer : public ThreadSharedObject {
|
||||
public:
|
||||
SharedChannelArrayBuffer(nsTArray<nsTArray<float> >* aBuffers)
|
||||
{
|
||||
mBuffers.SwapElements(*aBuffers);
|
||||
}
|
||||
nsTArray<nsTArray<float> > mBuffers;
|
||||
};
|
||||
|
||||
void
|
||||
AudioNodeExternalInputStream::TrackMapEntry::ResampleChannels(const nsTArray<const void*>& aBuffers,
|
||||
uint32_t aInputDuration,
|
||||
AudioSampleFormat aFormat,
|
||||
float aVolume)
|
||||
{
|
||||
NS_ASSERTION(aBuffers.Length() == mResamplerChannelCount,
|
||||
"Channel count must be correct here");
|
||||
|
||||
nsAutoTArray<nsTArray<float>,2> resampledBuffers;
|
||||
resampledBuffers.SetLength(aBuffers.Length());
|
||||
nsTArray<float> samplesAdjustedForVolume;
|
||||
nsAutoTArray<const float*,2> bufferPtrs;
|
||||
bufferPtrs.SetLength(aBuffers.Length());
|
||||
|
||||
for (uint32_t i = 0; i < aBuffers.Length(); ++i) {
|
||||
AudioSampleFormat format = aFormat;
|
||||
const void* buffer = aBuffers[i];
|
||||
|
||||
if (aVolume != 1.0f) {
|
||||
format = AUDIO_FORMAT_FLOAT32;
|
||||
samplesAdjustedForVolume.SetLength(aInputDuration);
|
||||
switch (aFormat) {
|
||||
case AUDIO_FORMAT_FLOAT32:
|
||||
ConvertAudioSamplesWithScale(static_cast<const float*>(buffer),
|
||||
samplesAdjustedForVolume.Elements(),
|
||||
aInputDuration, aVolume);
|
||||
break;
|
||||
case AUDIO_FORMAT_S16:
|
||||
ConvertAudioSamplesWithScale(static_cast<const int16_t*>(buffer),
|
||||
samplesAdjustedForVolume.Elements(),
|
||||
aInputDuration, aVolume);
|
||||
break;
|
||||
default:
|
||||
MOZ_ASSERT(false);
|
||||
return;
|
||||
}
|
||||
buffer = samplesAdjustedForVolume.Elements();
|
||||
}
|
||||
|
||||
switch (format) {
|
||||
case AUDIO_FORMAT_FLOAT32:
|
||||
ResampleChannelBuffer(mResampler, i,
|
||||
static_cast<const float*>(buffer),
|
||||
aInputDuration, &resampledBuffers[i]);
|
||||
break;
|
||||
case AUDIO_FORMAT_S16:
|
||||
ResampleChannelBuffer(mResampler, i,
|
||||
static_cast<const int16_t*>(buffer),
|
||||
aInputDuration, &resampledBuffers[i]);
|
||||
break;
|
||||
default:
|
||||
MOZ_ASSERT(false);
|
||||
return;
|
||||
}
|
||||
bufferPtrs[i] = resampledBuffers[i].Elements();
|
||||
NS_ASSERTION(i == 0 ||
|
||||
resampledBuffers[i].Length() == resampledBuffers[0].Length(),
|
||||
"Resampler made different decisions for different channels!");
|
||||
}
|
||||
|
||||
uint32_t length = resampledBuffers[0].Length();
|
||||
nsRefPtr<ThreadSharedObject> buf = new SharedChannelArrayBuffer(&resampledBuffers);
|
||||
mResampledData.AppendFrames(buf.forget(), bufferPtrs, length);
|
||||
}
|
||||
|
||||
void
|
||||
AudioNodeExternalInputStream::TrackMapEntry::ResampleInputData(AudioSegment* aSegment)
|
||||
{
|
||||
AudioSegment::ChunkIterator ci(*aSegment);
|
||||
while (!ci.IsEnded()) {
|
||||
const AudioChunk& chunk = *ci;
|
||||
nsAutoTArray<const void*,2> channels;
|
||||
if (chunk.GetDuration() > UINT32_MAX) {
|
||||
// This will cause us to OOM or overflow below. So let's just bail.
|
||||
NS_ERROR("Chunk duration out of bounds");
|
||||
return;
|
||||
}
|
||||
uint32_t duration = uint32_t(chunk.GetDuration());
|
||||
|
||||
if (chunk.IsNull()) {
|
||||
nsAutoTArray<AudioDataValue,1024> silence;
|
||||
silence.SetLength(duration);
|
||||
PodZero(silence.Elements(), silence.Length());
|
||||
channels.SetLength(mResamplerChannelCount);
|
||||
for (uint32_t i = 0; i < channels.Length(); ++i) {
|
||||
channels[i] = silence.Elements();
|
||||
}
|
||||
ResampleChannels(channels, duration, AUDIO_OUTPUT_FORMAT, 0.0f);
|
||||
} else if (chunk.mChannelData.Length() == mResamplerChannelCount) {
|
||||
// Common case, since mResamplerChannelCount is set to the first chunk's
|
||||
// number of channels.
|
||||
channels.AppendElements(chunk.mChannelData);
|
||||
ResampleChannels(channels, duration, chunk.mBufferFormat, chunk.mVolume);
|
||||
} else {
|
||||
// Uncommon case. Since downmixing requires channels to be floats,
|
||||
// convert everything to floats now.
|
||||
uint32_t upChannels = GetAudioChannelsSuperset(chunk.mChannelData.Length(), mResamplerChannelCount);
|
||||
nsTArray<float> buffer;
|
||||
if (chunk.mBufferFormat == AUDIO_FORMAT_FLOAT32) {
|
||||
channels.AppendElements(chunk.mChannelData);
|
||||
} else {
|
||||
NS_ASSERTION(chunk.mBufferFormat == AUDIO_FORMAT_S16, "Unknown format");
|
||||
if (duration > UINT32_MAX/chunk.mChannelData.Length()) {
|
||||
NS_ERROR("Chunk duration out of bounds");
|
||||
return;
|
||||
}
|
||||
buffer.SetLength(chunk.mChannelData.Length()*duration);
|
||||
for (uint32_t i = 0; i < chunk.mChannelData.Length(); ++i) {
|
||||
const int16_t* samples = static_cast<const int16_t*>(chunk.mChannelData[i]);
|
||||
float* converted = &buffer[i*duration];
|
||||
for (uint32_t j = 0; j < duration; ++j) {
|
||||
converted[j] = AudioSampleToFloat(samples[j]);
|
||||
}
|
||||
channels.AppendElement(converted);
|
||||
}
|
||||
}
|
||||
nsTArray<float> zeroes;
|
||||
if (channels.Length() < upChannels) {
|
||||
zeroes.SetLength(duration);
|
||||
PodZero(zeroes.Elements(), zeroes.Length());
|
||||
AudioChannelsUpMix(&channels, upChannels, zeroes.Elements());
|
||||
}
|
||||
if (channels.Length() == mResamplerChannelCount) {
|
||||
ResampleChannels(channels, duration, AUDIO_FORMAT_FLOAT32, chunk.mVolume);
|
||||
} else {
|
||||
nsTArray<float> output;
|
||||
if (duration > UINT32_MAX/mResamplerChannelCount) {
|
||||
NS_ERROR("Chunk duration out of bounds");
|
||||
return;
|
||||
}
|
||||
output.SetLength(duration*mResamplerChannelCount);
|
||||
nsAutoTArray<float*,2> outputPtrs;
|
||||
nsAutoTArray<const void*,2> outputPtrsConst;
|
||||
for (uint32_t i = 0; i < mResamplerChannelCount; ++i) {
|
||||
outputPtrs.AppendElement(output.Elements() + i*duration);
|
||||
outputPtrsConst.AppendElement(outputPtrs[i]);
|
||||
}
|
||||
AudioChannelsDownMix(channels, outputPtrs.Elements(), outputPtrs.Length(), duration);
|
||||
ResampleChannels(outputPtrsConst, duration, AUDIO_FORMAT_FLOAT32, chunk.mVolume);
|
||||
}
|
||||
}
|
||||
ci.Next();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Copies the data in aInput to aOffsetInBlock within aBlock. All samples must
|
||||
* be float. Both chunks must have the same number of channels (or else
|
||||
* aInput is null). aBlock must have been allocated with AllocateInputBlock.
|
||||
*/
|
||||
static void
|
||||
CopyChunkToBlock(const AudioChunk& aInput, AudioChunk *aBlock, uint32_t aOffsetInBlock)
|
||||
{
|
||||
uint32_t d = aInput.GetDuration();
|
||||
for (uint32_t i = 0; i < aBlock->mChannelData.Length(); ++i) {
|
||||
float* out = static_cast<float*>(const_cast<void*>(aBlock->mChannelData[i])) +
|
||||
aOffsetInBlock;
|
||||
if (aInput.IsNull()) {
|
||||
PodZero(out, d);
|
||||
} else {
|
||||
const float* in = static_cast<const float*>(aInput.mChannelData[i]);
|
||||
ConvertAudioSamplesWithScale(in, out, d, aInput.mVolume);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the data in aSegment to a single chunk aChunk. Every chunk in
|
||||
* aSegment must have the same number of channels (or be null). aSegment must have
|
||||
* duration WEBAUDIO_BLOCK_SIZE. Every chunk in aSegment must be in float format.
|
||||
*/
|
||||
static void
|
||||
ConvertSegmentToAudioBlock(AudioSegment* aSegment, AudioChunk* aBlock)
|
||||
{
|
||||
NS_ASSERTION(aSegment->GetDuration() == WEBAUDIO_BLOCK_SIZE, "Bad segment duration");
|
||||
|
||||
{
|
||||
AudioSegment::ChunkIterator ci(*aSegment);
|
||||
NS_ASSERTION(!ci.IsEnded(), "Segment must have at least one chunk");
|
||||
AudioChunk& firstChunk = *ci;
|
||||
ci.Next();
|
||||
if (ci.IsEnded()) {
|
||||
*aBlock = firstChunk;
|
||||
return;
|
||||
}
|
||||
|
||||
while (ci->IsNull() && !ci.IsEnded()) {
|
||||
ci.Next();
|
||||
}
|
||||
if (ci.IsEnded()) {
|
||||
// All null.
|
||||
aBlock->SetNull(WEBAUDIO_BLOCK_SIZE);
|
||||
return;
|
||||
}
|
||||
|
||||
AllocateAudioBlock(ci->mChannelData.Length(), aBlock);
|
||||
}
|
||||
|
||||
AudioSegment::ChunkIterator ci(*aSegment);
|
||||
uint32_t duration = 0;
|
||||
while (!ci.IsEnded()) {
|
||||
CopyChunkToBlock(*ci, aBlock, duration);
|
||||
duration += ci->GetDuration();
|
||||
ci.Next();
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
AudioNodeExternalInputStream::ProduceOutput(GraphTime aFrom, GraphTime aTo)
|
||||
{
|
||||
// According to spec, number of outputs is always 1.
|
||||
mLastChunks.SetLength(1);
|
||||
|
||||
// GC stuff can result in our input stream being destroyed before this stream.
|
||||
// Handle that.
|
||||
if (mInputs.IsEmpty()) {
|
||||
mLastChunks[0].SetNull(WEBAUDIO_BLOCK_SIZE);
|
||||
AdvanceOutputSegment();
|
||||
return;
|
||||
}
|
||||
|
||||
MOZ_ASSERT(mInputs.Length() == 1);
|
||||
|
||||
MediaStream* source = mInputs[0]->GetSource();
|
||||
nsAutoTArray<AudioSegment,1> audioSegments;
|
||||
nsAutoTArray<bool,1> trackMapEntriesUsed;
|
||||
uint32_t inputChannels = 0;
|
||||
for (StreamBuffer::TrackIter tracks(source->mBuffer, MediaSegment::AUDIO);
|
||||
!tracks.IsEnded(); tracks.Next()) {
|
||||
const StreamBuffer::Track& inputTrack = *tracks;
|
||||
// Create a TrackMapEntry if necessary.
|
||||
uint32_t trackMapIndex = GetTrackMapEntry(inputTrack, aFrom);
|
||||
// Maybe there's nothing in this track yet. If so, ignore it. (While the
|
||||
// track is only playing silence, we may not be able to determine the
|
||||
// correct number of channels to start resampling.)
|
||||
if (trackMapIndex == nsTArray<TrackMapEntry>::NoIndex) {
|
||||
continue;
|
||||
}
|
||||
|
||||
while (trackMapEntriesUsed.Length() <= trackMapIndex) {
|
||||
trackMapEntriesUsed.AppendElement(false);
|
||||
}
|
||||
trackMapEntriesUsed[trackMapIndex] = true;
|
||||
|
||||
TrackMapEntry* trackMap = &mTrackMap[trackMapIndex];
|
||||
AudioSegment segment;
|
||||
GraphTime next;
|
||||
TrackRate inputTrackRate = inputTrack.GetRate();
|
||||
for (GraphTime t = aFrom; t < aTo; t = next) {
|
||||
MediaInputPort::InputInterval interval = mInputs[0]->GetNextInputInterval(t);
|
||||
interval.mEnd = std::min(interval.mEnd, aTo);
|
||||
if (interval.mStart >= interval.mEnd)
|
||||
break;
|
||||
next = interval.mEnd;
|
||||
|
||||
// Ticks >= startTicks and < endTicks are in the interval
|
||||
StreamTime outputEnd = GraphTimeToStreamTime(interval.mEnd);
|
||||
TrackTicks startTicks = trackMap->mSamplesPassedToResampler + segment.GetDuration();
|
||||
StreamTime outputStart = GraphTimeToStreamTime(interval.mStart);
|
||||
NS_ASSERTION(startTicks == TimeToTicksRoundUp(inputTrackRate, outputStart),
|
||||
"Samples missing");
|
||||
TrackTicks endTicks = TimeToTicksRoundUp(inputTrackRate, outputEnd);
|
||||
TrackTicks ticks = endTicks - startTicks;
|
||||
|
||||
if (interval.mInputIsBlocked) {
|
||||
segment.AppendNullData(ticks);
|
||||
} else {
|
||||
// See comments in TrackUnionStream::CopyTrackData
|
||||
StreamTime inputStart = source->GraphTimeToStreamTime(interval.mStart);
|
||||
StreamTime inputEnd = source->GraphTimeToStreamTime(interval.mEnd);
|
||||
TrackTicks inputTrackEndPoint =
|
||||
inputTrack.IsEnded() ? inputTrack.GetEnd() : TRACK_TICKS_MAX;
|
||||
|
||||
if (trackMap->mEndOfLastInputIntervalInInputStream != inputStart ||
|
||||
trackMap->mEndOfLastInputIntervalInOutputStream != outputStart) {
|
||||
// Start of a new series of intervals where neither stream is blocked.
|
||||
trackMap->mEndOfConsumedInputTicks = TimeToTicksRoundDown(inputTrackRate, inputStart) - 1;
|
||||
}
|
||||
TrackTicks inputStartTicks = trackMap->mEndOfConsumedInputTicks;
|
||||
TrackTicks inputEndTicks = inputStartTicks + ticks;
|
||||
trackMap->mEndOfConsumedInputTicks = inputEndTicks;
|
||||
trackMap->mEndOfLastInputIntervalInInputStream = inputEnd;
|
||||
trackMap->mEndOfLastInputIntervalInOutputStream = outputEnd;
|
||||
|
||||
if (inputStartTicks < 0) {
|
||||
// Data before the start of the track is just null.
|
||||
segment.AppendNullData(-inputStartTicks);
|
||||
inputStartTicks = 0;
|
||||
}
|
||||
if (inputEndTicks > inputStartTicks) {
|
||||
segment.AppendSlice(*inputTrack.GetSegment(),
|
||||
std::min(inputTrackEndPoint, inputStartTicks),
|
||||
std::min(inputTrackEndPoint, inputEndTicks));
|
||||
}
|
||||
// Pad if we're looking past the end of the track
|
||||
segment.AppendNullData(std::max<TrackTicks>(0, inputEndTicks - inputTrackEndPoint));
|
||||
}
|
||||
}
|
||||
|
||||
trackMap->mSamplesPassedToResampler += segment.GetDuration();
|
||||
trackMap->ResampleInputData(&segment);
|
||||
|
||||
if (trackMap->mResampledData.GetDuration() < mCurrentOutputPosition + WEBAUDIO_BLOCK_SIZE) {
|
||||
// We don't have enough data. Delay it.
|
||||
trackMap->mResampledData.InsertNullDataAtStart(
|
||||
mCurrentOutputPosition + WEBAUDIO_BLOCK_SIZE - trackMap->mResampledData.GetDuration());
|
||||
}
|
||||
audioSegments.AppendElement()->AppendSlice(trackMap->mResampledData,
|
||||
mCurrentOutputPosition, mCurrentOutputPosition + WEBAUDIO_BLOCK_SIZE);
|
||||
trackMap->mResampledData.ForgetUpTo(mCurrentOutputPosition + WEBAUDIO_BLOCK_SIZE);
|
||||
inputChannels = GetAudioChannelsSuperset(inputChannels, trackMap->mResamplerChannelCount);
|
||||
}
|
||||
|
||||
for (int32_t i = mTrackMap.Length() - 1; i >= 0; --i) {
|
||||
if (i >= int32_t(trackMapEntriesUsed.Length()) || !trackMapEntriesUsed[i]) {
|
||||
mTrackMap.RemoveElementAt(i);
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t outputChannels = ComputeFinalOuputChannelCount(inputChannels);
|
||||
|
||||
if (outputChannels) {
|
||||
AllocateAudioBlock(outputChannels, &mLastChunks[0]);
|
||||
nsAutoTArray<float,GUESS_AUDIO_CHANNELS*WEBAUDIO_BLOCK_SIZE> downmixBuffer;
|
||||
for (uint32_t i = 0; i < audioSegments.Length(); ++i) {
|
||||
AudioChunk tmpChunk;
|
||||
ConvertSegmentToAudioBlock(&audioSegments[i], &tmpChunk);
|
||||
if (!tmpChunk.IsNull()) {
|
||||
AccumulateInputChunk(i, tmpChunk, &mLastChunks[0], &downmixBuffer);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
mLastChunks[0].SetNull(WEBAUDIO_BLOCK_SIZE);
|
||||
}
|
||||
mCurrentOutputPosition += WEBAUDIO_BLOCK_SIZE;
|
||||
|
||||
// Using AudioNodeStream's AdvanceOutputSegment to push the media stream graph along with null data.
|
||||
AdvanceOutputSegment();
|
||||
}
|
||||
|
||||
}
|
111
content/media/AudioNodeExternalInputStream.h
Normal file
111
content/media/AudioNodeExternalInputStream.h
Normal file
@ -0,0 +1,111 @@
|
||||
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#ifndef MOZILLA_AUDIONODEEXTERNALINPUTSTREAM_H_
|
||||
#define MOZILLA_AUDIONODEEXTERNALINPUTSTREAM_H_
|
||||
|
||||
#include "MediaStreamGraph.h"
|
||||
#include "AudioChannelFormat.h"
|
||||
#include "AudioNodeEngine.h"
|
||||
#include "AudioNodeStream.h"
|
||||
#include "mozilla/dom/AudioParam.h"
|
||||
#include <deque>
|
||||
|
||||
#ifdef PR_LOGGING
|
||||
#define LOG(type, msg) PR_LOG(gMediaStreamGraphLog, type, msg)
|
||||
#else
|
||||
#define LOG(type, msg)
|
||||
#endif
|
||||
|
||||
// Forward declaration for mResamplerMap
|
||||
typedef struct SpeexResamplerState_ SpeexResamplerState;
|
||||
|
||||
namespace mozilla {
|
||||
|
||||
/**
|
||||
* This is a MediaStream implementation that acts for a Web Audio node but
|
||||
* unlike other AudioNodeStreams, supports any kind of MediaStream as an
|
||||
* input --- handling any number of audio tracks, resampling them from whatever
|
||||
* sample rate they're using, and handling blocking of the input MediaStream.
|
||||
*/
|
||||
class AudioNodeExternalInputStream : public AudioNodeStream {
|
||||
public:
|
||||
AudioNodeExternalInputStream(AudioNodeEngine* aEngine, TrackRate aSampleRate);
|
||||
~AudioNodeExternalInputStream();
|
||||
|
||||
virtual void ProduceOutput(GraphTime aFrom, GraphTime aTo) MOZ_OVERRIDE;
|
||||
|
||||
private:
|
||||
// For storing pointers and data about input tracks, like the last TrackTick which
|
||||
// was read, and the associated speex resampler.
|
||||
struct TrackMapEntry {
|
||||
~TrackMapEntry();
|
||||
|
||||
/**
|
||||
* Resamples data from all chunks in aIterator and following, using mResampler,
|
||||
* adding the results to mResampledData.
|
||||
*/
|
||||
void ResampleInputData(AudioSegment* aSegment);
|
||||
/**
|
||||
* Resamples a set of channel buffers using mResampler, adding the results
|
||||
* to mResampledData.
|
||||
*/
|
||||
void ResampleChannels(const nsTArray<const void*>& aBuffers,
|
||||
uint32_t aInputDuration,
|
||||
AudioSampleFormat aFormat,
|
||||
float aVolume);
|
||||
|
||||
// mEndOfConsumedInputTicks is the end of the input ticks that we've consumed.
|
||||
// 0 if we haven't consumed any yet.
|
||||
TrackTicks mEndOfConsumedInputTicks;
|
||||
// mEndOfLastInputIntervalInInputStream is the timestamp for the end of the
|
||||
// previous interval which was unblocked for both the input and output
|
||||
// stream, in the input stream's timeline, or -1 if there wasn't one.
|
||||
StreamTime mEndOfLastInputIntervalInInputStream;
|
||||
// mEndOfLastInputIntervalInOutputStream is the timestamp for the end of the
|
||||
// previous interval which was unblocked for both the input and output
|
||||
// stream, in the output stream's timeline, or -1 if there wasn't one.
|
||||
StreamTime mEndOfLastInputIntervalInOutputStream;
|
||||
/**
|
||||
* Number of samples passed to the resampler so far.
|
||||
*/
|
||||
TrackTicks mSamplesPassedToResampler;
|
||||
/**
|
||||
* Resampler being applied to this track.
|
||||
*/
|
||||
SpeexResamplerState* mResampler;
|
||||
/**
|
||||
* The track data that has been resampled to the rate of the
|
||||
* AudioNodeExternalInputStream. All data in these chunks is in floats (or null),
|
||||
* and has the number of channels given in mResamplerChannelCount.
|
||||
* mResampledData starts at zero in the stream's output track (so generally
|
||||
* it will consist of null data followed by actual data).
|
||||
*/
|
||||
AudioSegment mResampledData;
|
||||
/**
|
||||
* Number of channels used to create mResampler.
|
||||
*/
|
||||
uint32_t mResamplerChannelCount;
|
||||
/**
|
||||
* The ID for the track of the input stream this entry is for.
|
||||
*/
|
||||
TrackID mTrackID;
|
||||
};
|
||||
|
||||
nsTArray<TrackMapEntry> mTrackMap;
|
||||
// Amount of track data produced so far. A multiple of WEBAUDIO_BLOCK_SIZE.
|
||||
TrackTicks mCurrentOutputPosition;
|
||||
|
||||
/**
|
||||
* Creates a TrackMapEntry for the track, if needed. Returns the index
|
||||
* of the TrackMapEntry or NoIndex if no entry is needed yet.
|
||||
*/
|
||||
uint32_t GetTrackMapEntry(const StreamBuffer::Track& aTrack,
|
||||
GraphTime aFrom);
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif /* MOZILLA_AUDIONODESTREAM_H_ */
|
@ -248,6 +248,24 @@ AudioNodeStream::AllInputsFinished() const
|
||||
return !!inputCount;
|
||||
}
|
||||
|
||||
uint32_t
|
||||
AudioNodeStream::ComputeFinalOuputChannelCount(uint32_t aInputChannelCount)
|
||||
{
|
||||
switch (mChannelCountMode) {
|
||||
case ChannelCountMode::Explicit:
|
||||
// Disregard the channel count we've calculated from inputs, and just use
|
||||
// mNumberOfInputChannels.
|
||||
return mNumberOfInputChannels;
|
||||
case ChannelCountMode::Clamped_max:
|
||||
// Clamp the computed output channel count to mNumberOfInputChannels.
|
||||
return std::min(aInputChannelCount, mNumberOfInputChannels);
|
||||
default:
|
||||
case ChannelCountMode::Max:
|
||||
// Nothing to do here, just shut up the compiler warning.
|
||||
return aInputChannelCount;
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
AudioNodeStream::ObtainInputBlock(AudioChunk& aTmpChunk, uint32_t aPortIndex)
|
||||
{
|
||||
@ -277,20 +295,7 @@ AudioNodeStream::ObtainInputBlock(AudioChunk& aTmpChunk, uint32_t aPortIndex)
|
||||
GetAudioChannelsSuperset(outputChannelCount, chunk->mChannelData.Length());
|
||||
}
|
||||
|
||||
switch (mChannelCountMode) {
|
||||
case ChannelCountMode::Explicit:
|
||||
// Disregard the output channel count that we've calculated, and just use
|
||||
// mNumberOfInputChannels.
|
||||
outputChannelCount = mNumberOfInputChannels;
|
||||
break;
|
||||
case ChannelCountMode::Clamped_max:
|
||||
// Clamp the computed output channel count to mNumberOfInputChannels.
|
||||
outputChannelCount = std::min(outputChannelCount, mNumberOfInputChannels);
|
||||
break;
|
||||
case ChannelCountMode::Max:
|
||||
// Nothing to do here, just shut up the compiler warning.
|
||||
break;
|
||||
}
|
||||
outputChannelCount = ComputeFinalOuputChannelCount(outputChannelCount);
|
||||
|
||||
uint32_t inputChunkCount = inputChunks.Length();
|
||||
if (inputChunkCount == 0 ||
|
||||
@ -311,63 +316,80 @@ AudioNodeStream::ObtainInputBlock(AudioChunk& aTmpChunk, uint32_t aPortIndex)
|
||||
}
|
||||
|
||||
AllocateAudioBlock(outputChannelCount, &aTmpChunk);
|
||||
float silenceChannel[WEBAUDIO_BLOCK_SIZE] = {0.f};
|
||||
// The static storage here should be 1KB, so it's fine
|
||||
nsAutoTArray<float, GUESS_AUDIO_CHANNELS*WEBAUDIO_BLOCK_SIZE> downmixBuffer;
|
||||
|
||||
for (uint32_t i = 0; i < inputChunkCount; ++i) {
|
||||
AudioChunk* chunk = inputChunks[i];
|
||||
nsAutoTArray<const void*,GUESS_AUDIO_CHANNELS> channels;
|
||||
channels.AppendElements(chunk->mChannelData);
|
||||
if (channels.Length() < outputChannelCount) {
|
||||
if (mChannelInterpretation == ChannelInterpretation::Speakers) {
|
||||
AudioChannelsUpMix(&channels, outputChannelCount, nullptr);
|
||||
NS_ASSERTION(outputChannelCount == channels.Length(),
|
||||
"We called GetAudioChannelsSuperset to avoid this");
|
||||
AccumulateInputChunk(i, *inputChunks[i], &aTmpChunk, &downmixBuffer);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
AudioNodeStream::AccumulateInputChunk(uint32_t aInputIndex, const AudioChunk& aChunk,
|
||||
AudioChunk* aBlock,
|
||||
nsTArray<float>* aDownmixBuffer)
|
||||
{
|
||||
nsAutoTArray<const void*,GUESS_AUDIO_CHANNELS> channels;
|
||||
UpMixDownMixChunk(&aChunk, aBlock->mChannelData.Length(), channels, *aDownmixBuffer);
|
||||
|
||||
for (uint32_t c = 0; c < channels.Length(); ++c) {
|
||||
const float* inputData = static_cast<const float*>(channels[c]);
|
||||
float* outputData = static_cast<float*>(const_cast<void*>(aBlock->mChannelData[c]));
|
||||
if (inputData) {
|
||||
if (aInputIndex == 0) {
|
||||
AudioBlockCopyChannelWithScale(inputData, aChunk.mVolume, outputData);
|
||||
} else {
|
||||
// Fill up the remaining channels by zeros
|
||||
for (uint32_t j = channels.Length(); j < outputChannelCount; ++j) {
|
||||
channels.AppendElement(silenceChannel);
|
||||
}
|
||||
AudioBlockAddChannelWithScale(inputData, aChunk.mVolume, outputData);
|
||||
}
|
||||
} else if (channels.Length() > outputChannelCount) {
|
||||
if (mChannelInterpretation == ChannelInterpretation::Speakers) {
|
||||
nsAutoTArray<float*,GUESS_AUDIO_CHANNELS> outputChannels;
|
||||
outputChannels.SetLength(outputChannelCount);
|
||||
downmixBuffer.SetLength(outputChannelCount * WEBAUDIO_BLOCK_SIZE);
|
||||
for (uint32_t j = 0; j < outputChannelCount; ++j) {
|
||||
outputChannels[j] = &downmixBuffer[j * WEBAUDIO_BLOCK_SIZE];
|
||||
}
|
||||
|
||||
AudioChannelsDownMix(channels, outputChannels.Elements(),
|
||||
outputChannelCount, WEBAUDIO_BLOCK_SIZE);
|
||||
|
||||
channels.SetLength(outputChannelCount);
|
||||
for (uint32_t j = 0; j < channels.Length(); ++j) {
|
||||
channels[j] = outputChannels[j];
|
||||
}
|
||||
} else {
|
||||
// Drop the remaining channels
|
||||
channels.RemoveElementsAt(outputChannelCount,
|
||||
channels.Length() - outputChannelCount);
|
||||
} else {
|
||||
if (aInputIndex == 0) {
|
||||
PodZero(outputData, WEBAUDIO_BLOCK_SIZE);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (uint32_t c = 0; c < channels.Length(); ++c) {
|
||||
const float* inputData = static_cast<const float*>(channels[c]);
|
||||
float* outputData = static_cast<float*>(const_cast<void*>(aTmpChunk.mChannelData[c]));
|
||||
if (inputData) {
|
||||
if (i == 0) {
|
||||
AudioBlockCopyChannelWithScale(inputData, chunk->mVolume, outputData);
|
||||
} else {
|
||||
AudioBlockAddChannelWithScale(inputData, chunk->mVolume, outputData);
|
||||
}
|
||||
} else {
|
||||
if (i == 0) {
|
||||
memset(outputData, 0, WEBAUDIO_BLOCK_SIZE*sizeof(float));
|
||||
}
|
||||
void
|
||||
AudioNodeStream::UpMixDownMixChunk(const AudioChunk* aChunk,
|
||||
uint32_t aOutputChannelCount,
|
||||
nsTArray<const void*>& aOutputChannels,
|
||||
nsTArray<float>& aDownmixBuffer)
|
||||
{
|
||||
static const float silenceChannel[WEBAUDIO_BLOCK_SIZE] = {0.f};
|
||||
|
||||
aOutputChannels.AppendElements(aChunk->mChannelData);
|
||||
if (aOutputChannels.Length() < aOutputChannelCount) {
|
||||
if (mChannelInterpretation == ChannelInterpretation::Speakers) {
|
||||
AudioChannelsUpMix(&aOutputChannels, aOutputChannelCount, nullptr);
|
||||
NS_ASSERTION(aOutputChannelCount == aOutputChannels.Length(),
|
||||
"We called GetAudioChannelsSuperset to avoid this");
|
||||
} else {
|
||||
// Fill up the remaining aOutputChannels by zeros
|
||||
for (uint32_t j = aOutputChannels.Length(); j < aOutputChannelCount; ++j) {
|
||||
aOutputChannels.AppendElement(silenceChannel);
|
||||
}
|
||||
}
|
||||
} else if (aOutputChannels.Length() > aOutputChannelCount) {
|
||||
if (mChannelInterpretation == ChannelInterpretation::Speakers) {
|
||||
nsAutoTArray<float*,GUESS_AUDIO_CHANNELS> outputChannels;
|
||||
outputChannels.SetLength(aOutputChannelCount);
|
||||
aDownmixBuffer.SetLength(aOutputChannelCount * WEBAUDIO_BLOCK_SIZE);
|
||||
for (uint32_t j = 0; j < aOutputChannelCount; ++j) {
|
||||
outputChannels[j] = &aDownmixBuffer[j * WEBAUDIO_BLOCK_SIZE];
|
||||
}
|
||||
|
||||
AudioChannelsDownMix(aOutputChannels, outputChannels.Elements(),
|
||||
aOutputChannelCount, WEBAUDIO_BLOCK_SIZE);
|
||||
|
||||
aOutputChannels.SetLength(aOutputChannelCount);
|
||||
for (uint32_t j = 0; j < aOutputChannels.Length(); ++j) {
|
||||
aOutputChannels[j] = outputChannels[j];
|
||||
}
|
||||
} else {
|
||||
// Drop the remaining aOutputChannels
|
||||
aOutputChannels.RemoveElementsAt(aOutputChannelCount,
|
||||
aOutputChannels.Length() - aOutputChannelCount);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -383,9 +405,7 @@ AudioNodeStream::ProduceOutput(GraphTime aFrom, GraphTime aTo)
|
||||
FinishOutput();
|
||||
}
|
||||
|
||||
StreamBuffer::Track* track = EnsureTrack(AUDIO_NODE_STREAM_TRACK_ID, mSampleRate);
|
||||
|
||||
AudioSegment* segment = track->Get<AudioSegment>();
|
||||
EnsureTrack(AUDIO_NODE_STREAM_TRACK_ID, mSampleRate);
|
||||
|
||||
uint16_t outputCount = std::max(uint16_t(1), mEngine->OutputCount());
|
||||
mLastChunks.SetLength(outputCount);
|
||||
@ -424,6 +444,15 @@ AudioNodeStream::ProduceOutput(GraphTime aFrom, GraphTime aTo)
|
||||
}
|
||||
}
|
||||
|
||||
AdvanceOutputSegment();
|
||||
}
|
||||
|
||||
void
|
||||
AudioNodeStream::AdvanceOutputSegment()
|
||||
{
|
||||
StreamBuffer::Track* track = EnsureTrack(AUDIO_NODE_STREAM_TRACK_ID, mSampleRate);
|
||||
AudioSegment* segment = track->Get<AudioSegment>();
|
||||
|
||||
if (mKind == MediaStreamGraph::EXTERNAL_STREAM) {
|
||||
segment->AppendAndConsumeChunk(&mLastChunks[0]);
|
||||
} else {
|
||||
|
@ -114,14 +114,26 @@ public:
|
||||
return (mKind == MediaStreamGraph::SOURCE_STREAM && mFinished) ||
|
||||
mKind == MediaStreamGraph::EXTERNAL_STREAM;
|
||||
}
|
||||
virtual bool IsIntrinsicallyConsumed() const MOZ_OVERRIDE
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// Any thread
|
||||
AudioNodeEngine* Engine() { return mEngine; }
|
||||
TrackRate SampleRate() const { return mSampleRate; }
|
||||
|
||||
protected:
|
||||
void AdvanceOutputSegment();
|
||||
void FinishOutput();
|
||||
void AccumulateInputChunk(uint32_t aInputIndex, const AudioChunk& aChunk,
|
||||
AudioChunk* aBlock,
|
||||
nsTArray<float>* aDownmixBuffer);
|
||||
void UpMixDownMixChunk(const AudioChunk* aChunk, uint32_t aOutputChannelCount,
|
||||
nsTArray<const void*>& aOutputChannels,
|
||||
nsTArray<float>& aDownmixBuffer);
|
||||
|
||||
uint32_t ComputeFinalOuputChannelCount(uint32_t aInputChannelCount);
|
||||
void ObtainInputBlock(AudioChunk& aTmpChunk, uint32_t aPortIndex);
|
||||
|
||||
// The engine that will generate output for this node.
|
||||
|
@ -31,11 +31,13 @@ NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(DOMMediaStream)
|
||||
tmp->Destroy();
|
||||
NS_IMPL_CYCLE_COLLECTION_UNLINK(mWindow)
|
||||
NS_IMPL_CYCLE_COLLECTION_UNLINK(mTracks)
|
||||
NS_IMPL_CYCLE_COLLECTION_UNLINK(mConsumersToKeepAlive)
|
||||
NS_IMPL_CYCLE_COLLECTION_UNLINK_PRESERVED_WRAPPER
|
||||
NS_IMPL_CYCLE_COLLECTION_UNLINK_END
|
||||
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(DOMMediaStream)
|
||||
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mWindow)
|
||||
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mTracks)
|
||||
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mConsumersToKeepAlive)
|
||||
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_SCRIPT_OBJECTS
|
||||
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
|
||||
NS_IMPL_CYCLE_COLLECTION_TRACE_WRAPPERCACHE(DOMMediaStream)
|
||||
@ -288,6 +290,16 @@ DOMMediaStream::NotifyMediaStreamGraphShutdown()
|
||||
// to prevent leaks.
|
||||
mNotifiedOfMediaStreamGraphShutdown = true;
|
||||
mRunOnTracksAvailable.Clear();
|
||||
|
||||
mConsumersToKeepAlive.Clear();
|
||||
}
|
||||
|
||||
void
|
||||
DOMMediaStream::NotifyStreamStateChanged()
|
||||
{
|
||||
if (IsFinished()) {
|
||||
mConsumersToKeepAlive.Clear();
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -68,10 +68,12 @@ public:
|
||||
|
||||
// WebIDL
|
||||
double CurrentTime();
|
||||
|
||||
void GetAudioTracks(nsTArray<nsRefPtr<AudioStreamTrack> >& aTracks);
|
||||
void GetVideoTracks(nsTArray<nsRefPtr<VideoStreamTrack> >& aTracks);
|
||||
|
||||
MediaStream* GetStream() { return mStream; }
|
||||
MediaStream* GetStream() const { return mStream; }
|
||||
|
||||
bool IsFinished();
|
||||
/**
|
||||
* Returns a principal indicating who may access this stream. The stream contents
|
||||
@ -93,6 +95,10 @@ public:
|
||||
* will only be called during a forced shutdown due to application exit.
|
||||
*/
|
||||
void NotifyMediaStreamGraphShutdown();
|
||||
/**
|
||||
* Called when the main-thread state of the MediaStream changed.
|
||||
*/
|
||||
void NotifyStreamStateChanged();
|
||||
|
||||
// Indicate what track types we eventually expect to add to this stream
|
||||
enum {
|
||||
@ -145,6 +151,17 @@ public:
|
||||
// Takes ownership of aCallback.
|
||||
void OnTracksAvailable(OnTracksAvailableCallback* aCallback);
|
||||
|
||||
/**
|
||||
* Add an nsISupports object that this stream will keep alive as long as
|
||||
* the stream is not finished.
|
||||
*/
|
||||
void AddConsumerToKeepAlive(nsISupports* aConsumer)
|
||||
{
|
||||
if (!IsFinished() && !mNotifiedOfMediaStreamGraphShutdown) {
|
||||
mConsumersToKeepAlive.AppendElement(aConsumer);
|
||||
}
|
||||
}
|
||||
|
||||
protected:
|
||||
void Destroy();
|
||||
void InitSourceStream(nsIDOMWindow* aWindow, TrackTypeHints aHintContents);
|
||||
@ -173,6 +190,9 @@ protected:
|
||||
|
||||
nsTArray<nsAutoPtr<OnTracksAvailableCallback> > mRunOnTracksAvailable;
|
||||
|
||||
// Keep these alive until the stream finishes
|
||||
nsTArray<nsCOMPtr<nsISupports> > mConsumersToKeepAlive;
|
||||
|
||||
// Indicate what track types we eventually expect to add to this stream
|
||||
uint8_t mHintContents;
|
||||
// Indicate what track types have been added to this stream
|
||||
|
@ -368,6 +368,7 @@ MediaDecoder::MediaDecoder() :
|
||||
mDuration(-1),
|
||||
mTransportSeekable(true),
|
||||
mMediaSeekable(true),
|
||||
mSameOriginMedia(false),
|
||||
mReentrantMonitor("media.decoder"),
|
||||
mIsDormant(false),
|
||||
mPlayState(PLAY_STATE_PAUSED),
|
||||
@ -445,8 +446,7 @@ MediaDecoder::~MediaDecoder()
|
||||
MOZ_COUNT_DTOR(MediaDecoder);
|
||||
}
|
||||
|
||||
nsresult MediaDecoder::OpenResource(MediaResource* aResource,
|
||||
nsIStreamListener** aStreamListener)
|
||||
nsresult MediaDecoder::OpenResource(nsIStreamListener** aStreamListener)
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
if (aStreamListener) {
|
||||
@ -459,24 +459,21 @@ nsresult MediaDecoder::OpenResource(MediaResource* aResource,
|
||||
// should be grabbed before the cache lock
|
||||
ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
|
||||
|
||||
nsresult rv = aResource->Open(aStreamListener);
|
||||
nsresult rv = mResource->Open(aStreamListener);
|
||||
if (NS_FAILED(rv)) {
|
||||
LOG(PR_LOG_DEBUG, ("%p Failed to open stream!", this));
|
||||
return rv;
|
||||
}
|
||||
|
||||
mResource = aResource;
|
||||
}
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
nsresult MediaDecoder::Load(MediaResource* aResource,
|
||||
nsIStreamListener** aStreamListener,
|
||||
MediaDecoder* aCloneDonor)
|
||||
nsresult MediaDecoder::Load(nsIStreamListener** aStreamListener,
|
||||
MediaDecoder* aCloneDonor)
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
|
||||
nsresult rv = OpenResource(aResource, aStreamListener);
|
||||
nsresult rv = OpenResource(aStreamListener);
|
||||
NS_ENSURE_SUCCESS(rv, rv);
|
||||
|
||||
mDecoderStateMachine = CreateStateMachine();
|
||||
@ -842,6 +839,18 @@ void MediaDecoder::DecodeError()
|
||||
Shutdown();
|
||||
}
|
||||
|
||||
void MediaDecoder::UpdateSameOriginStatus(bool aSameOrigin)
|
||||
{
|
||||
ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
|
||||
mSameOriginMedia = aSameOrigin;
|
||||
}
|
||||
|
||||
bool MediaDecoder::IsSameOriginMedia()
|
||||
{
|
||||
GetReentrantMonitor().AssertCurrentThreadIn();
|
||||
return mSameOriginMedia;
|
||||
}
|
||||
|
||||
bool MediaDecoder::IsSeeking() const
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
|
@ -277,16 +277,12 @@ public:
|
||||
|
||||
// Start downloading the media. Decode the downloaded data up to the
|
||||
// point of the first frame of data.
|
||||
// aResource is the media stream to use. Ownership of aResource passes to
|
||||
// the decoder, even if Load returns an error.
|
||||
// This is called at most once per decoder, after Init().
|
||||
virtual nsresult Load(MediaResource* aResource,
|
||||
nsIStreamListener** aListener,
|
||||
virtual nsresult Load(nsIStreamListener** aListener,
|
||||
MediaDecoder* aCloneDonor);
|
||||
|
||||
// Called in |Load| to open the media resource.
|
||||
nsresult OpenResource(MediaResource* aResource,
|
||||
nsIStreamListener** aStreamListener);
|
||||
// Called in |Load| to open mResource.
|
||||
nsresult OpenResource(nsIStreamListener** aStreamListener);
|
||||
|
||||
// Called when the video file has completed downloading.
|
||||
virtual void ResourceLoaded();
|
||||
@ -306,6 +302,11 @@ public:
|
||||
{
|
||||
return mResource;
|
||||
}
|
||||
void SetResource(MediaResource* aResource)
|
||||
{
|
||||
NS_ASSERTION(NS_IsMainThread(), "Should only be called on main thread");
|
||||
mResource = aResource;
|
||||
}
|
||||
|
||||
// Return the principal of the current URI being played or downloaded.
|
||||
virtual already_AddRefed<nsIPrincipal> GetCurrentPrincipal();
|
||||
@ -640,6 +641,10 @@ public:
|
||||
// The actual playback rate computation. The monitor must be held.
|
||||
virtual double ComputePlaybackRate(bool* aReliable);
|
||||
|
||||
// Return true when the media is same-origin with the element. The monitor
|
||||
// must be held.
|
||||
bool IsSameOriginMedia();
|
||||
|
||||
// Returns true if we can play the entire media through without stopping
|
||||
// to buffer, given the current download and playback rates.
|
||||
bool CanPlayThrough();
|
||||
@ -736,6 +741,9 @@ public:
|
||||
// Notifies the element that decoding has failed.
|
||||
virtual void DecodeError();
|
||||
|
||||
// Indicate whether the media is same-origin with the element.
|
||||
void UpdateSameOriginStatus(bool aSameOrigin);
|
||||
|
||||
MediaDecoderOwner* GetOwner() MOZ_OVERRIDE;
|
||||
|
||||
#ifdef MOZ_RAW
|
||||
@ -958,6 +966,10 @@ public:
|
||||
// True if the media is seekable (i.e. supports random access).
|
||||
bool mMediaSeekable;
|
||||
|
||||
// True if the media is same-origin with the element. Data can only be
|
||||
// passed to MediaStreams when this is true.
|
||||
bool mSameOriginMedia;
|
||||
|
||||
/******
|
||||
* The following member variables can be accessed from any thread.
|
||||
******/
|
||||
|
@ -619,6 +619,10 @@ void MediaDecoderStateMachine::SendStreamData()
|
||||
if (mState == DECODER_STATE_DECODING_METADATA)
|
||||
return;
|
||||
|
||||
if (!mDecoder->IsSameOriginMedia()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If there's still an audio thread alive, then we can't send any stream
|
||||
// data yet since both SendStreamData and the audio thread want to be in
|
||||
// charge of popping the audio queue. We're waiting for the audio thread
|
||||
|
@ -176,7 +176,7 @@ MediaRecorder::Start(const Optional<int32_t>& aTimeSlice, ErrorResult& aResult)
|
||||
|
||||
// Create a TrackUnionStream to support Pause/Resume by using ChangeExplicitBlockerCount
|
||||
MediaStreamGraph* gm = mStream->GetStream()->Graph();
|
||||
mTrackUnionStream = gm->CreateTrackUnionStream(mStream);
|
||||
mTrackUnionStream = gm->CreateTrackUnionStream(nullptr);
|
||||
MOZ_ASSERT(mTrackUnionStream, "CreateTrackUnionStream failed");
|
||||
|
||||
if (!CheckPrincipal()) {
|
||||
|
@ -205,6 +205,16 @@ ChannelMediaResource::OnStartRequest(nsIRequest* aRequest)
|
||||
hc->GetResponseHeader(NS_LITERAL_CSTRING("Accept-Ranges"),
|
||||
ranges);
|
||||
bool acceptsRanges = ranges.EqualsLiteral("bytes");
|
||||
// True if this channel will not return an unbounded amount of data
|
||||
bool dataIsBounded = false;
|
||||
|
||||
int64_t contentLength = -1;
|
||||
hc->GetContentLength(&contentLength);
|
||||
if (contentLength >= 0 && responseStatus == HTTP_OK_CODE) {
|
||||
// "OK" status means Content-Length is for the whole resource.
|
||||
// Since that's bounded, we know we have a finite-length resource.
|
||||
dataIsBounded = true;
|
||||
}
|
||||
|
||||
if (mOffset == 0) {
|
||||
// Look for duration headers from known Ogg content systems.
|
||||
@ -225,20 +235,21 @@ ChannelMediaResource::OnStartRequest(nsIRequest* aRequest)
|
||||
rv = hc->GetResponseHeader(NS_LITERAL_CSTRING("X-Content-Duration"), durationText);
|
||||
}
|
||||
|
||||
// If there is no Content-Duration header, or if the value for this header
|
||||
// is not valid, set the media as being infinite.
|
||||
// If there is a Content-Duration header with a valid value, record
|
||||
// the duration.
|
||||
if (NS_SUCCEEDED(rv)) {
|
||||
double duration = durationText.ToDouble(&ec);
|
||||
if (ec == NS_OK && duration >= 0) {
|
||||
mDecoder->SetDuration(duration);
|
||||
} else {
|
||||
mDecoder->SetInfinite(true);
|
||||
// We know the resource must be bounded.
|
||||
dataIsBounded = true;
|
||||
}
|
||||
} else {
|
||||
mDecoder->SetInfinite(true);
|
||||
}
|
||||
}
|
||||
|
||||
// Assume Range requests have a bounded upper limit unless the
|
||||
// Content-Range header tells us otherwise.
|
||||
bool boundedSeekLimit = true;
|
||||
// Check response code for byte-range requests (seeking, chunk requests).
|
||||
if (!mByteRange.IsNull() && (responseStatus == HTTP_PARTIAL_RESPONSE_CODE)) {
|
||||
// Parse Content-Range header.
|
||||
@ -267,10 +278,10 @@ ChannelMediaResource::OnStartRequest(nsIRequest* aRequest)
|
||||
// Notify media cache about the length and start offset of data received.
|
||||
// Note: If aRangeTotal == -1, then the total bytes is unknown at this stage.
|
||||
// For now, tell the decoder that the stream is infinite.
|
||||
if (rangeTotal != -1) {
|
||||
mCacheStream.NotifyDataLength(rangeTotal);
|
||||
if (rangeTotal == -1) {
|
||||
boundedSeekLimit = false;
|
||||
} else {
|
||||
mDecoder->SetInfinite(true);
|
||||
mCacheStream.NotifyDataLength(rangeTotal);
|
||||
}
|
||||
mCacheStream.NotifyDataStarted(rangeStart);
|
||||
|
||||
@ -290,13 +301,8 @@ ChannelMediaResource::OnStartRequest(nsIRequest* aRequest)
|
||||
} else if (mOffset == 0 &&
|
||||
(responseStatus == HTTP_OK_CODE ||
|
||||
responseStatus == HTTP_PARTIAL_RESPONSE_CODE)) {
|
||||
// We weren't seeking and got a valid response status,
|
||||
// set the length of the content.
|
||||
int64_t cl = -1;
|
||||
hc->GetContentLength(&cl);
|
||||
|
||||
if (cl >= 0) {
|
||||
mCacheStream.NotifyDataLength(cl);
|
||||
if (contentLength >= 0) {
|
||||
mCacheStream.NotifyDataLength(contentLength);
|
||||
}
|
||||
}
|
||||
// XXX we probably should examine the Content-Range header in case
|
||||
@ -307,10 +313,13 @@ ChannelMediaResource::OnStartRequest(nsIRequest* aRequest)
|
||||
// support seeking.
|
||||
seekable =
|
||||
responseStatus == HTTP_PARTIAL_RESPONSE_CODE || acceptsRanges;
|
||||
|
||||
if (seekable) {
|
||||
mDecoder->SetInfinite(false);
|
||||
if (seekable && boundedSeekLimit) {
|
||||
// If range requests are supported, and we did not see an unbounded
|
||||
// upper range limit, we assume the resource is bounded.
|
||||
dataIsBounded = true;
|
||||
}
|
||||
|
||||
mDecoder->SetInfinite(!dataIsBounded);
|
||||
}
|
||||
mDecoder->SetTransportSeekable(seekable);
|
||||
mCacheStream.SetTransportSeekable(seekable);
|
||||
|
@ -22,6 +22,7 @@
|
||||
#include "AudioChannelCommon.h"
|
||||
#include "AudioNodeEngine.h"
|
||||
#include "AudioNodeStream.h"
|
||||
#include "AudioNodeExternalInputStream.h"
|
||||
#include <algorithm>
|
||||
#include "DOMMediaStream.h"
|
||||
#include "GeckoProfiler.h"
|
||||
@ -520,7 +521,7 @@ MediaStreamGraphImpl::UpdateStreamOrder()
|
||||
mozilla::LinkedList<MediaStream> stack;
|
||||
for (uint32_t i = 0; i < mOldStreams.Length(); ++i) {
|
||||
nsRefPtr<MediaStream>& s = mOldStreams[i];
|
||||
if (!s->mAudioOutputs.IsEmpty() || !s->mVideoOutputs.IsEmpty()) {
|
||||
if (s->IsIntrinsicallyConsumed()) {
|
||||
MarkConsumed(s);
|
||||
}
|
||||
if (!s->mHasBeenOrdered) {
|
||||
@ -1231,6 +1232,9 @@ MediaStreamGraphImpl::ApplyStreamUpdate(StreamUpdate* aUpdate)
|
||||
stream->mMainThreadCurrentTime = aUpdate->mNextMainThreadCurrentTime;
|
||||
stream->mMainThreadFinished = aUpdate->mNextMainThreadFinished;
|
||||
|
||||
if (stream->mWrapper) {
|
||||
stream->mWrapper->NotifyStreamStateChanged();
|
||||
}
|
||||
for (int32_t i = stream->mMainThreadListeners.Length() - 1; i >= 0; --i) {
|
||||
stream->mMainThreadListeners[i]->NotifyMainThreadStateChanged();
|
||||
}
|
||||
@ -1543,6 +1547,30 @@ MediaStreamGraphImpl::AppendMessage(ControlMessage* aMessage)
|
||||
}
|
||||
}
|
||||
|
||||
MediaStream::MediaStream(DOMMediaStream* aWrapper)
|
||||
: mBufferStartTime(0)
|
||||
, mExplicitBlockerCount(0)
|
||||
, mBlocked(false)
|
||||
, mGraphUpdateIndices(0)
|
||||
, mFinished(false)
|
||||
, mNotifiedFinished(false)
|
||||
, mNotifiedBlocked(false)
|
||||
, mHasCurrentData(false)
|
||||
, mNotifiedHasCurrentData(false)
|
||||
, mWrapper(aWrapper)
|
||||
, mMainThreadCurrentTime(0)
|
||||
, mMainThreadFinished(false)
|
||||
, mMainThreadDestroyed(false)
|
||||
, mGraph(nullptr)
|
||||
{
|
||||
MOZ_COUNT_CTOR(MediaStream);
|
||||
// aWrapper should not already be connected to a MediaStream! It needs
|
||||
// to be hooked up to this stream, and since this stream is only just
|
||||
// being created now, aWrapper must not be connected to anything.
|
||||
NS_ASSERTION(!aWrapper || !aWrapper->GetStream(),
|
||||
"Wrapper already has another media stream hooked up to it!");
|
||||
}
|
||||
|
||||
void
|
||||
MediaStream::Init()
|
||||
{
|
||||
@ -2316,6 +2344,21 @@ MediaStreamGraph::CreateTrackUnionStream(DOMMediaStream* aWrapper)
|
||||
return stream;
|
||||
}
|
||||
|
||||
AudioNodeExternalInputStream*
|
||||
MediaStreamGraph::CreateAudioNodeExternalInputStream(AudioNodeEngine* aEngine, TrackRate aSampleRate)
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
if (!aSampleRate) {
|
||||
aSampleRate = aEngine->NodeMainThread()->Context()->SampleRate();
|
||||
}
|
||||
AudioNodeExternalInputStream* stream = new AudioNodeExternalInputStream(aEngine, aSampleRate);
|
||||
NS_ADDREF(stream);
|
||||
MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
|
||||
stream->SetGraphImpl(graph);
|
||||
graph->AppendMessage(new CreateMessage(stream));
|
||||
return stream;
|
||||
}
|
||||
|
||||
AudioNodeStream*
|
||||
MediaStreamGraph::CreateAudioNodeStream(AudioNodeEngine* aEngine,
|
||||
AudioNodeStreamKind aKind,
|
||||
|
@ -191,8 +191,9 @@ class MediaStreamGraphImpl;
|
||||
class SourceMediaStream;
|
||||
class ProcessedMediaStream;
|
||||
class MediaInputPort;
|
||||
class AudioNodeStream;
|
||||
class AudioNodeEngine;
|
||||
class AudioNodeExternalInputStream;
|
||||
class AudioNodeStream;
|
||||
struct AudioChunk;
|
||||
|
||||
/**
|
||||
@ -262,24 +263,7 @@ class MediaStream : public mozilla::LinkedListElement<MediaStream> {
|
||||
public:
|
||||
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaStream)
|
||||
|
||||
MediaStream(DOMMediaStream* aWrapper)
|
||||
: mBufferStartTime(0)
|
||||
, mExplicitBlockerCount(0)
|
||||
, mBlocked(false)
|
||||
, mGraphUpdateIndices(0)
|
||||
, mFinished(false)
|
||||
, mNotifiedFinished(false)
|
||||
, mNotifiedBlocked(false)
|
||||
, mHasCurrentData(false)
|
||||
, mNotifiedHasCurrentData(false)
|
||||
, mWrapper(aWrapper)
|
||||
, mMainThreadCurrentTime(0)
|
||||
, mMainThreadFinished(false)
|
||||
, mMainThreadDestroyed(false)
|
||||
, mGraph(nullptr)
|
||||
{
|
||||
MOZ_COUNT_CTOR(MediaStream);
|
||||
}
|
||||
MediaStream(DOMMediaStream* aWrapper);
|
||||
virtual ~MediaStream()
|
||||
{
|
||||
MOZ_COUNT_DTOR(MediaStream);
|
||||
@ -360,6 +344,7 @@ public:
|
||||
|
||||
friend class MediaStreamGraphImpl;
|
||||
friend class MediaInputPort;
|
||||
friend class AudioNodeExternalInputStream;
|
||||
|
||||
virtual SourceMediaStream* AsSourceStream() { return nullptr; }
|
||||
virtual ProcessedMediaStream* AsProcessedStream() { return nullptr; }
|
||||
@ -400,6 +385,16 @@ public:
|
||||
void RemoveListenerImpl(MediaStreamListener* aListener);
|
||||
void RemoveAllListenersImpl();
|
||||
void SetTrackEnabledImpl(TrackID aTrackID, bool aEnabled);
|
||||
/**
|
||||
* Returns true when this stream requires the contents of its inputs even if
|
||||
* its own outputs are not being consumed. This is used to signal inputs to
|
||||
* this stream that they are being consumed; when they're not being consumed,
|
||||
* we make some optimizations.
|
||||
*/
|
||||
virtual bool IsIntrinsicallyConsumed() const
|
||||
{
|
||||
return !mAudioOutputs.IsEmpty() || !mVideoOutputs.IsEmpty();
|
||||
}
|
||||
|
||||
void AddConsumer(MediaInputPort* aPort)
|
||||
{
|
||||
@ -764,10 +759,10 @@ public:
|
||||
* each other.
|
||||
*/
|
||||
enum {
|
||||
// When set, blocking on the input stream forces blocking on the output
|
||||
// When set, blocking on the output stream forces blocking on the input
|
||||
// stream.
|
||||
FLAG_BLOCK_INPUT = 0x01,
|
||||
// When set, blocking on the output stream forces blocking on the input
|
||||
// When set, blocking on the input stream forces blocking on the output
|
||||
// stream.
|
||||
FLAG_BLOCK_OUTPUT = 0x02
|
||||
};
|
||||
@ -958,6 +953,11 @@ public:
|
||||
AudioNodeStream* CreateAudioNodeStream(AudioNodeEngine* aEngine,
|
||||
AudioNodeStreamKind aKind,
|
||||
TrackRate aSampleRate = 0);
|
||||
|
||||
AudioNodeExternalInputStream*
|
||||
CreateAudioNodeExternalInputStream(AudioNodeEngine* aEngine,
|
||||
TrackRate aSampleRate = 0);
|
||||
|
||||
/**
|
||||
* Returns the number of graph updates sent. This can be used to track
|
||||
* whether a given update has been processed by the graph thread and reflected
|
||||
|
@ -120,6 +120,17 @@ protected:
|
||||
|
||||
// Only non-ended tracks are allowed to persist in this map.
|
||||
struct TrackMapEntry {
|
||||
// mEndOfConsumedInputTicks is the end of the input ticks that we've consumed.
|
||||
// 0 if we haven't consumed any yet.
|
||||
TrackTicks mEndOfConsumedInputTicks;
|
||||
// mEndOfLastInputIntervalInInputStream is the timestamp for the end of the
|
||||
// previous interval which was unblocked for both the input and output
|
||||
// stream, in the input stream's timeline, or -1 if there wasn't one.
|
||||
StreamTime mEndOfLastInputIntervalInInputStream;
|
||||
// mEndOfLastInputIntervalInOutputStream is the timestamp for the end of the
|
||||
// previous interval which was unblocked for both the input and output
|
||||
// stream, in the output stream's timeline, or -1 if there wasn't one.
|
||||
StreamTime mEndOfLastInputIntervalInOutputStream;
|
||||
MediaInputPort* mInputPort;
|
||||
// We keep track IDs instead of track pointers because
|
||||
// tracks can be removed without us being notified (e.g.
|
||||
@ -161,6 +172,9 @@ protected:
|
||||
(long long)outputStart));
|
||||
|
||||
TrackMapEntry* map = mTrackMap.AppendElement();
|
||||
map->mEndOfConsumedInputTicks = 0;
|
||||
map->mEndOfLastInputIntervalInInputStream = -1;
|
||||
map->mEndOfLastInputIntervalInOutputStream = -1;
|
||||
map->mInputPort = aPort;
|
||||
map->mInputTrackID = aTrack->GetID();
|
||||
map->mOutputTrackID = track->GetID();
|
||||
@ -208,14 +222,12 @@ protected:
|
||||
// Ticks >= startTicks and < endTicks are in the interval
|
||||
StreamTime outputEnd = GraphTimeToStreamTime(interval.mEnd);
|
||||
TrackTicks startTicks = outputTrack->GetEnd();
|
||||
#ifdef DEBUG
|
||||
StreamTime outputStart = GraphTimeToStreamTime(interval.mStart);
|
||||
#endif
|
||||
NS_ASSERTION(startTicks == TimeToTicksRoundUp(rate, outputStart),
|
||||
"Samples missing");
|
||||
TrackTicks endTicks = TimeToTicksRoundUp(rate, outputEnd);
|
||||
TrackTicks ticks = endTicks - startTicks;
|
||||
// StreamTime inputStart = source->GraphTimeToStreamTime(interval.mStart);
|
||||
StreamTime inputStart = source->GraphTimeToStreamTime(interval.mStart);
|
||||
StreamTime inputEnd = source->GraphTimeToStreamTime(interval.mEnd);
|
||||
TrackTicks inputTrackEndPoint = TRACK_TICKS_MAX;
|
||||
|
||||
@ -239,12 +251,66 @@ protected:
|
||||
// that 'ticks' samples are gathered, even though a tick boundary may
|
||||
// occur between outputStart and outputEnd but not between inputStart
|
||||
// and inputEnd.
|
||||
// We'll take the latest samples we can.
|
||||
TrackTicks inputEndTicks = TimeToTicksRoundUp(rate, inputEnd);
|
||||
TrackTicks inputStartTicks = inputEndTicks - ticks;
|
||||
segment->AppendSlice(*aInputTrack->GetSegment(),
|
||||
std::min(inputTrackEndPoint, inputStartTicks),
|
||||
std::min(inputTrackEndPoint, inputEndTicks));
|
||||
// These are the properties we need to ensure:
|
||||
// 1) Exactly 'ticks' ticks of output are produced, i.e.
|
||||
// inputEndTicks - inputStartTicks = ticks.
|
||||
// 2) inputEndTicks <= aInputTrack->GetSegment()->GetDuration().
|
||||
// 3) In any sequence of intervals where neither stream is blocked,
|
||||
// the content of the input track we use is a contiguous sequence of
|
||||
// ticks with no gaps or overlaps.
|
||||
if (map->mEndOfLastInputIntervalInInputStream != inputStart ||
|
||||
map->mEndOfLastInputIntervalInOutputStream != outputStart) {
|
||||
// Start of a new series of intervals where neither stream is blocked.
|
||||
map->mEndOfConsumedInputTicks = TimeToTicksRoundDown(rate, inputStart) - 1;
|
||||
}
|
||||
TrackTicks inputStartTicks = map->mEndOfConsumedInputTicks;
|
||||
TrackTicks inputEndTicks = inputStartTicks + ticks;
|
||||
map->mEndOfConsumedInputTicks = inputEndTicks;
|
||||
map->mEndOfLastInputIntervalInInputStream = inputEnd;
|
||||
map->mEndOfLastInputIntervalInOutputStream = outputEnd;
|
||||
// Now we prove that the above properties hold:
|
||||
// Property #1: trivial by construction.
|
||||
// Property #3: trivial by construction. Between every two
|
||||
// intervals where both streams are not blocked, the above if condition
|
||||
// is false and mEndOfConsumedInputTicks advances exactly to match
|
||||
// the ticks that were consumed.
|
||||
// Property #2:
|
||||
// Let originalOutputStart be the value of outputStart and originalInputStart
|
||||
// be the value of inputStart when the body of the "if" block was last
|
||||
// executed.
|
||||
// Let allTicks be the sum of the values of 'ticks' computed since then.
|
||||
// The interval [originalInputStart/rate, inputEnd/rate) is the
|
||||
// same length as the interval [originalOutputStart/rate, outputEnd/rate),
|
||||
// so the latter interval can have at most one more integer in it. Thus
|
||||
// TimeToTicksRoundUp(rate, outputEnd) - TimeToTicksRoundUp(rate, originalOutputStart)
|
||||
// <= TimeToTicksRoundDown(rate, inputEnd) - TimeToTicksRoundDown(rate, originalInputStart) + 1
|
||||
// Then
|
||||
// inputEndTicks = TimeToTicksRoundDown(rate, originalInputStart) - 1 + allTicks
|
||||
// = TimeToTicksRoundDown(rate, originalInputStart) - 1 + TimeToTicksRoundUp(rate, outputEnd) - TimeToTicksRoundUp(rate, originalOutputStart)
|
||||
// <= TimeToTicksRoundDown(rate, originalInputStart) - 1 + TimeToTicksRoundDown(rate, inputEnd) - TimeToTicksRoundDown(rate, originalInputStart) + 1
|
||||
// = TimeToTicksRoundDown(rate, inputEnd)
|
||||
// <= inputEnd/rate
|
||||
// (now using the fact that inputEnd <= track->GetEndTimeRoundDown() for a non-ended track)
|
||||
// <= TicksToTimeRoundDown(rate, aInputTrack->GetSegment()->GetDuration())/rate
|
||||
// <= rate*aInputTrack->GetSegment()->GetDuration()/rate
|
||||
// = aInputTrack->GetSegment()->GetDuration()
|
||||
// as required.
|
||||
|
||||
if (inputStartTicks < 0) {
|
||||
// Data before the start of the track is just null.
|
||||
// We have to add a small amount of delay to ensure that there is
|
||||
// always a sample available if we see an interval that contains a
|
||||
// tick boundary on the output stream's timeline but does not contain
|
||||
// a tick boundary on the input stream's timeline. 1 tick delay is
|
||||
// necessary and sufficient.
|
||||
segment->AppendNullData(-inputStartTicks);
|
||||
inputStartTicks = 0;
|
||||
}
|
||||
if (inputEndTicks > inputStartTicks) {
|
||||
segment->AppendSlice(*aInputTrack->GetSegment(),
|
||||
std::min(inputTrackEndPoint, inputStartTicks),
|
||||
std::min(inputTrackEndPoint, inputEndTicks));
|
||||
}
|
||||
LOG(PR_LOG_DEBUG+1, ("TrackUnionStream %p appending %lld ticks of input data to track %d",
|
||||
this, (long long)(std::min(inputTrackEndPoint, inputEndTicks) - std::min(inputTrackEndPoint, inputStartTicks)),
|
||||
outputTrack->GetID()));
|
||||
|
@ -195,15 +195,14 @@ DASHDecoder::ReleaseStateMachine()
|
||||
}
|
||||
|
||||
nsresult
|
||||
DASHDecoder::Load(MediaResource* aResource,
|
||||
nsIStreamListener** aStreamListener,
|
||||
DASHDecoder::Load(nsIStreamListener** aStreamListener,
|
||||
MediaDecoder* aCloneDonor)
|
||||
{
|
||||
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
||||
|
||||
mDASHReader = new DASHReader(this);
|
||||
|
||||
nsresult rv = OpenResource(aResource, aStreamListener);
|
||||
nsresult rv = OpenResource(aStreamListener);
|
||||
NS_ENSURE_SUCCESS(rv, rv);
|
||||
|
||||
mDecoderStateMachine = CreateStateMachine();
|
||||
|
@ -57,9 +57,8 @@ public:
|
||||
|
||||
// Loads the MPD from the network and subsequently loads the media streams.
|
||||
// Called from the main thread only.
|
||||
nsresult Load(MediaResource* aResource,
|
||||
nsIStreamListener** aListener,
|
||||
MediaDecoder* aCloneDonor);
|
||||
virtual nsresult Load(nsIStreamListener** aListener,
|
||||
MediaDecoder* aCloneDonor) MOZ_OVERRIDE;
|
||||
|
||||
// Notifies download of MPD file has ended.
|
||||
// Called on the main thread only.
|
||||
|
@ -73,8 +73,7 @@ DASHRepDecoder::SetReader(WebMReader* aReader)
|
||||
}
|
||||
|
||||
nsresult
|
||||
DASHRepDecoder::Load(MediaResource* aResource,
|
||||
nsIStreamListener** aListener,
|
||||
DASHRepDecoder::Load(nsIStreamListener** aListener,
|
||||
MediaDecoder* aCloneDonor)
|
||||
{
|
||||
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
||||
|
@ -73,9 +73,8 @@ public:
|
||||
void SetMPDRepresentation(Representation const * aRep);
|
||||
|
||||
// Called from DASHDecoder on main thread; Starts media stream download.
|
||||
nsresult Load(MediaResource* aResource = nullptr,
|
||||
nsIStreamListener** aListener = nullptr,
|
||||
MediaDecoder* aCloneDonor = nullptr);
|
||||
virtual nsresult Load(nsIStreamListener** aListener = nullptr,
|
||||
MediaDecoder* aCloneDonor = nullptr) MOZ_OVERRIDE;
|
||||
|
||||
// Loads the next byte range (or first one on first call). Called on the main
|
||||
// thread only.
|
||||
|
@ -53,6 +53,7 @@ EXPORTS += [
|
||||
'AudioChannelFormat.h',
|
||||
'AudioEventTimeline.h',
|
||||
'AudioNodeEngine.h',
|
||||
'AudioNodeExternalInputStream.h',
|
||||
'AudioNodeStream.h',
|
||||
'AudioSampleFormat.h',
|
||||
'AudioSegment.h',
|
||||
@ -97,6 +98,7 @@ CPP_SOURCES += [
|
||||
'AudioAvailableEventManager.cpp',
|
||||
'AudioChannelFormat.cpp',
|
||||
'AudioNodeEngine.cpp',
|
||||
'AudioNodeExternalInputStream.cpp',
|
||||
'AudioNodeStream.cpp',
|
||||
'AudioSegment.cpp',
|
||||
'AudioStream.cpp',
|
||||
|
@ -16,8 +16,10 @@ function handleRequest(request, response)
|
||||
bis.setInputStream(fis);
|
||||
var bytes = bis.readBytes(bis.available());
|
||||
response.setHeader("Content-Duration", "0.233", false);
|
||||
response.setHeader("Content-Length", ""+bytes.length, false);
|
||||
response.setHeader("Content-Type", "video/ogg", false);
|
||||
response.write(bytes, bytes.length);
|
||||
// Make this request async to prevent a default Content-Length from being provided.
|
||||
response.processAsync();
|
||||
response.finish();
|
||||
bis.close();
|
||||
}
|
||||
|
@ -16,8 +16,10 @@ function handleRequest(request, response)
|
||||
bis.setInputStream(fis);
|
||||
var bytes = bis.readBytes(bis.available());
|
||||
response.setHeader("x-amz-meta-content-duration", "0.233", false);
|
||||
response.setHeader("Content-Length", ""+bytes.length, false);
|
||||
response.setHeader("Content-Type", "video/ogg", false);
|
||||
response.write(bytes, bytes.length);
|
||||
// Make this request async to prevent a default Content-Length from being provided.
|
||||
response.processAsync();
|
||||
response.finish();
|
||||
bis.close();
|
||||
}
|
||||
|
@ -16,8 +16,10 @@ function handleRequest(request, response)
|
||||
bis.setInputStream(fis);
|
||||
var bytes = bis.readBytes(bis.available());
|
||||
response.setStatusLine(request.httpVersion, 200, "Content Follows");
|
||||
response.setHeader("Content-Length", ""+bytes.length, false);
|
||||
response.setHeader("Content-Type", "video/ogg", false);
|
||||
response.write(bytes, bytes.length);
|
||||
// Make this request async to prevent a default Content-Length from being provided.
|
||||
response.processAsync();
|
||||
response.finish();
|
||||
bis.close();
|
||||
}
|
||||
|
@ -17,8 +17,10 @@ function handleRequest(request, response)
|
||||
var bytes = bis.readBytes(bis.available());
|
||||
response.setStatusLine(request.httpVersion, 200, "Content Follows");
|
||||
response.setHeader("Content-Duration", "-5", false);
|
||||
response.setHeader("Content-Length", ""+bytes.length, false);
|
||||
response.setHeader("Content-Type", "video/ogg", false);
|
||||
response.write(bytes, bytes.length);
|
||||
// Make this request async to prevent a default Content-Length from being provided.
|
||||
response.processAsync();
|
||||
response.finish();
|
||||
bis.close();
|
||||
}
|
||||
|
@ -17,7 +17,10 @@ function handleRequest(request, response)
|
||||
var bytes = bis.readBytes(bis.available());
|
||||
response.setStatusLine(request.httpVersion, 200, "Content Follows");
|
||||
response.setHeader("Content-Duration", "-6", false);
|
||||
response.setHeader("Content-Length", ""+bytes.length, false);
|
||||
response.setHeader("Content-Type", "video/ogg", false);
|
||||
response.write(bytes, bytes.length);
|
||||
// Make this request async to prevent a default Content-Length from being provided.
|
||||
response.processAsync();
|
||||
response.finish();
|
||||
bis.close();
|
||||
}
|
||||
|
@ -17,8 +17,10 @@ function handleRequest(request, response)
|
||||
var bytes = bis.readBytes(bis.available());
|
||||
response.setStatusLine(request.httpVersion, 200, "Content Follows");
|
||||
response.setHeader("Content-Duration", "Invalid Float Value", false);
|
||||
response.setHeader("Content-Length", ""+bytes.length, false);
|
||||
response.setHeader("Content-Type", "video/ogg", false);
|
||||
response.write(bytes, bytes.length);
|
||||
// Make this request async to prevent a default Content-Length from being provided.
|
||||
response.processAsync();
|
||||
response.finish();
|
||||
bis.close();
|
||||
}
|
||||
|
@ -16,8 +16,10 @@ function handleRequest(request, response)
|
||||
bis.setInputStream(fis);
|
||||
var bytes = bis.readBytes(bis.available());
|
||||
response.setHeader("X-Content-Duration", "0.233", false);
|
||||
response.setHeader("Content-Length", ""+bytes.length, false);
|
||||
response.setHeader("Content-Type", "video/ogg", false);
|
||||
response.write(bytes, bytes.length);
|
||||
// Make this request async to prevent a default Content-Length from being provided.
|
||||
response.processAsync();
|
||||
response.finish();
|
||||
bis.close();
|
||||
}
|
||||
|
@ -19,5 +19,8 @@ function handleRequest(request, response)
|
||||
response.setStatusLine(request.httpVersion, 200, "Content Follows");
|
||||
response.setHeader("Content-Type", "video/ogg", false);
|
||||
response.write(bytes, bytes.length);
|
||||
// Make this request async to prevent a default Content-Length from being provided.
|
||||
response.processAsync();
|
||||
response.finish();
|
||||
bis.close();
|
||||
}
|
||||
|
@ -8,14 +8,17 @@
|
||||
#include "nsContentUtils.h"
|
||||
#include "nsPIDOMWindow.h"
|
||||
#include "mozilla/ErrorResult.h"
|
||||
#include "mozilla/dom/AnalyserNode.h"
|
||||
#include "mozilla/dom/AudioContextBinding.h"
|
||||
#include "mozilla/dom/HTMLMediaElement.h"
|
||||
#include "mozilla/dom/OfflineAudioContextBinding.h"
|
||||
#include "MediaStreamGraph.h"
|
||||
#include "mozilla/dom/AnalyserNode.h"
|
||||
#include "AudioDestinationNode.h"
|
||||
#include "AudioBufferSourceNode.h"
|
||||
#include "AudioBuffer.h"
|
||||
#include "GainNode.h"
|
||||
#include "MediaElementAudioSourceNode.h"
|
||||
#include "MediaStreamAudioSourceNode.h"
|
||||
#include "DelayNode.h"
|
||||
#include "PannerNode.h"
|
||||
#include "AudioListener.h"
|
||||
@ -253,6 +256,36 @@ AudioContext::CreateAnalyser()
|
||||
return analyserNode.forget();
|
||||
}
|
||||
|
||||
already_AddRefed<MediaElementAudioSourceNode>
|
||||
AudioContext::CreateMediaElementSource(HTMLMediaElement& aMediaElement,
|
||||
ErrorResult& aRv)
|
||||
{
|
||||
if (mIsOffline) {
|
||||
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
|
||||
return nullptr;
|
||||
}
|
||||
nsRefPtr<DOMMediaStream> stream = aMediaElement.MozCaptureStream(aRv);
|
||||
if (aRv.Failed()) {
|
||||
return nullptr;
|
||||
}
|
||||
nsRefPtr<MediaElementAudioSourceNode> mediaElementAudioSourceNode =
|
||||
new MediaElementAudioSourceNode(this, stream);
|
||||
return mediaElementAudioSourceNode.forget();
|
||||
}
|
||||
|
||||
already_AddRefed<MediaStreamAudioSourceNode>
|
||||
AudioContext::CreateMediaStreamSource(DOMMediaStream& aMediaStream,
|
||||
ErrorResult& aRv)
|
||||
{
|
||||
if (mIsOffline) {
|
||||
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
|
||||
return nullptr;
|
||||
}
|
||||
nsRefPtr<MediaStreamAudioSourceNode> mediaStreamAudioSourceNode =
|
||||
new MediaStreamAudioSourceNode(this, &aMediaStream);
|
||||
return mediaStreamAudioSourceNode.forget();
|
||||
}
|
||||
|
||||
already_AddRefed<GainNode>
|
||||
AudioContext::CreateGain()
|
||||
{
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user