Merge inbound to m-c. a=merge

This commit is contained in:
Ryan VanderMeulen 2015-08-28 08:48:42 -04:00
commit 402fae4a15
79 changed files with 1111 additions and 1130 deletions

View File

@ -1831,7 +1831,8 @@ function gotoHistoryIndex(aEvent) {
}
// Modified click. Go there in a new tab/window.
duplicateTabIn(gBrowser.selectedTab, where, index - gBrowser.sessionHistory.index);
let historyindex = aEvent.target.getAttribute("historyindex");
duplicateTabIn(gBrowser.selectedTab, where, Number(historyindex));
return true;
}
@ -3753,66 +3754,108 @@ function FillHistoryMenu(aParent) {
}
// Remove old entries if any
var children = aParent.childNodes;
let children = aParent.childNodes;
for (var i = children.length - 1; i >= 0; --i) {
if (children[i].hasAttribute("index"))
aParent.removeChild(children[i]);
}
var webNav = gBrowser.webNavigation;
var sessionHistory = webNav.sessionHistory;
const MAX_HISTORY_MENU_ITEMS = 15;
var count = sessionHistory.count;
if (count <= 1) // don't display the popup for a single item
const tooltipBack = gNavigatorBundle.getString("tabHistory.goBack");
const tooltipCurrent = gNavigatorBundle.getString("tabHistory.current");
const tooltipForward = gNavigatorBundle.getString("tabHistory.goForward");
function updateSessionHistory(sessionHistory, initial)
{
let count = sessionHistory.entries.length;
if (!initial) {
if (count <= 1) {
// if there is only one entry now, close the popup.
aParent.hidePopup();
return;
} else if (!aParent.parentNode.open) {
// if the popup wasn't open before, but now needs to be, reopen the menu.
// It should trigger FillHistoryMenu again.
aParent.parentNode.open = true;
return;
}
}
let index = sessionHistory.index;
let half_length = Math.floor(MAX_HISTORY_MENU_ITEMS / 2);
let start = Math.max(index - half_length, 0);
let end = Math.min(start == 0 ? MAX_HISTORY_MENU_ITEMS : index + half_length + 1, count);
if (end == count) {
start = Math.max(count - MAX_HISTORY_MENU_ITEMS, 0);
}
let existingIndex = 0;
for (let j = end - 1; j >= start; j--) {
let entry = sessionHistory.entries[j];
let uri = entry.url;
let item = existingIndex < children.length ?
children[existingIndex] : document.createElement("menuitem");
let entryURI = BrowserUtils.makeURI(entry.url, entry.charset, null);
item.setAttribute("uri", uri);
item.setAttribute("label", entry.title || uri);
item.setAttribute("index", j);
// Cache this so that gotoHistoryIndex doesn't need the original index
item.setAttribute("historyindex", j - index);
if (j != index) {
PlacesUtils.favicons.getFaviconURLForPage(entryURI, function (aURI) {
if (aURI) {
let iconURL = PlacesUtils.favicons.getFaviconLinkForIcon(aURI).spec;
iconURL = PlacesUtils.getImageURLForResolution(window, iconURL);
item.style.listStyleImage = "url(" + iconURL + ")";
}
});
}
if (j < index) {
item.className = "unified-nav-back menuitem-iconic menuitem-with-favicon";
item.setAttribute("tooltiptext", tooltipBack);
} else if (j == index) {
item.setAttribute("type", "radio");
item.setAttribute("checked", "true");
item.className = "unified-nav-current";
item.setAttribute("tooltiptext", tooltipCurrent);
} else {
item.className = "unified-nav-forward menuitem-iconic menuitem-with-favicon";
item.setAttribute("tooltiptext", tooltipForward);
}
if (!item.parentNode) {
aParent.appendChild(item);
}
existingIndex++;
}
if (!initial) {
let existingLength = children.length;
while (existingIndex < existingLength) {
aParent.removeChild(aParent.lastChild);
existingIndex++;
}
}
}
let sessionHistory = SessionStore.getSessionHistory(gBrowser.selectedTab, updateSessionHistory);
if (!sessionHistory)
return false;
const MAX_HISTORY_MENU_ITEMS = 15;
var index = sessionHistory.index;
var half_length = Math.floor(MAX_HISTORY_MENU_ITEMS / 2);
var start = Math.max(index - half_length, 0);
var end = Math.min(start == 0 ? MAX_HISTORY_MENU_ITEMS : index + half_length + 1, count);
if (end == count)
start = Math.max(count - MAX_HISTORY_MENU_ITEMS, 0);
// don't display the popup for a single item
if (sessionHistory.entries.length <= 1)
return false;
var tooltipBack = gNavigatorBundle.getString("tabHistory.goBack");
var tooltipCurrent = gNavigatorBundle.getString("tabHistory.current");
var tooltipForward = gNavigatorBundle.getString("tabHistory.goForward");
for (var j = end - 1; j >= start; j--) {
let item = document.createElement("menuitem");
let entry = sessionHistory.getEntryAtIndex(j, false);
let uri = entry.URI.spec;
let entryURI = BrowserUtils.makeURIFromCPOW(entry.URI);
item.setAttribute("uri", uri);
item.setAttribute("label", entry.title || uri);
item.setAttribute("index", j);
if (j != index) {
PlacesUtils.favicons.getFaviconURLForPage(entryURI, function (aURI) {
if (aURI) {
let iconURL = PlacesUtils.favicons.getFaviconLinkForIcon(aURI).spec;
iconURL = PlacesUtils.getImageURLForResolution(window, iconURL);
item.style.listStyleImage = "url(" + iconURL + ")";
}
});
}
if (j < index) {
item.className = "unified-nav-back menuitem-iconic menuitem-with-favicon";
item.setAttribute("tooltiptext", tooltipBack);
} else if (j == index) {
item.setAttribute("type", "radio");
item.setAttribute("checked", "true");
item.className = "unified-nav-current";
item.setAttribute("tooltiptext", tooltipCurrent);
} else {
item.className = "unified-nav-forward menuitem-iconic menuitem-with-favicon";
item.setAttribute("tooltiptext", tooltipForward);
}
aParent.appendChild(item);
}
updateSessionHistory(sessionHistory, true);
return true;
}

View File

@ -20,6 +20,21 @@ add_task(function* () {
ok(true, "history menu opened");
// Wait for the session data to be flushed before continuing the test
yield new Promise(resolve => SessionStore.getSessionHistory(gBrowser.selectedTab, resolve));
is(event.target.children.length, 2, "Two history items");
let node = event.target.firstChild;
is(node.getAttribute("uri"), "http://example.com/2.html", "first item uri");
is(node.getAttribute("index"), "1", "first item index");
is(node.getAttribute("historyindex"), "0", "first item historyindex");
node = event.target.lastChild;
is(node.getAttribute("uri"), "http://example.com/", "second item uri");
is(node.getAttribute("index"), "0", "second item index");
is(node.getAttribute("historyindex"), "-1", "second item historyindex");
event.target.hidePopup();
gBrowser.removeTab(gBrowser.selectedTab);
});

View File

@ -7,7 +7,7 @@
for var in ('MOZ_APP_NAME', 'MOZ_MACBUNDLE_NAME'):
DEFINES[var] = CONFIG[var]
if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('windows', 'gtk2', 'cocoa'):
if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('windows', 'gtk2', 'gtk3', 'cocoa'):
DEFINES['HAVE_SHELL_SERVICE'] = 1
JAR_MANIFESTS += ['jar.mn']

View File

@ -123,6 +123,8 @@ let SessionHistoryInternal = {
entry.subframe = true;
}
entry.charset = shEntry.URI.originCharset;
let cacheKey = shEntry.cacheKey;
if (cacheKey && cacheKey instanceof Ci.nsISupportsPRUint32 &&
cacheKey.data != 0) {
@ -289,7 +291,7 @@ let SessionHistoryInternal = {
var shEntry = Cc["@mozilla.org/browser/session-history-entry;1"].
createInstance(Ci.nsISHEntry);
shEntry.setURI(Utils.makeURI(entry.url));
shEntry.setURI(Utils.makeURI(entry.url, entry.charset));
shEntry.setTitle(entry.title || entry.url);
if (entry.subframe)
shEntry.setIsSubFrame(entry.subframe || false);

View File

@ -312,6 +312,10 @@ this.SessionStore = {
navigateAndRestore(tab, loadArguments, historyIndex) {
return SessionStoreInternal.navigateAndRestore(tab, loadArguments, historyIndex);
},
getSessionHistory(tab, updatedCallback) {
return SessionStoreInternal.getSessionHistory(tab, updatedCallback);
}
};
@ -2262,6 +2266,34 @@ let SessionStoreInternal = {
});
},
/**
* Retrieves the latest session history information for a tab. The cached data
* is returned immediately, but a callback may be provided that supplies
* up-to-date data when or if it is available. The callback is passed a single
* argument with data in the same format as the return value.
*
* @param tab tab to retrieve the session history for
* @param updatedCallback function to call with updated data as the single argument
* @returns a object containing 'index' specifying the current index, and an
* array 'entries' containing an object for each history item.
*/
getSessionHistory(tab, updatedCallback) {
if (updatedCallback) {
TabStateFlusher.flush(tab.linkedBrowser).then(() => {
let sessionHistory = this.getSessionHistory(tab);
if (sessionHistory) {
updatedCallback(sessionHistory);
}
});
}
// Don't continue if the tab was closed before TabStateFlusher.flush resolves.
if (tab.linkedBrowser) {
let tabState = TabState.collect(tab);
return { index: tabState.index - 1, entries: tabState.entries }
}
},
/**
* See if aWindow is usable for use when restoring a previous session via
* restoreLastSession. If usable, prepare it for use.

View File

@ -29,8 +29,10 @@ const CHAR_CODE_0 = "0".charCodeAt(0);
const CHAR_CODE_9 = "9".charCodeAt(0);
const CHAR_CODE_LPAREN = "(".charCodeAt(0);
const CHAR_CODE_RPAREN = ")".charCodeAt(0);
const CHAR_CODE_COLON = ":".charCodeAt(0);
const CHAR_CODE_SLASH = "/".charCodeAt(0);
const CHAR_CODE_SPACE = " ".charCodeAt(0);
// The cache used in the `nsIURL` function.
const gNSURLStore = new Map();
@ -51,77 +53,95 @@ function parseLocation(location, fallbackLine, fallbackColumn) {
let line, column, url;
// These two indices are used to extract the resource substring, which is
// location[firstParenIndex + 1 .. lineAndColumnIndex].
// location[parenIndex + 1 .. lineAndColumnIndex].
//
// The resource substring is extracted iff a line number was found. There
// may be no parentheses, in which case the substring starts at 0.
// There are 3 variants of location strings in the profiler (with optional
// column numbers):
// 1) "name (resource:line)"
// 2) "resource:line"
// 3) "resource"
//
// For example, take "foo (bar.js:1)".
// ^ ^
// | -----+
// +-------+ |
// | |
// firstParenIndex will point to -+ |
// |
// lineAndColumnIndex will point to --+
// For example for (1), take "foo (bar.js:1)".
// ^ ^
// | |
// | |
// | |
// parenIndex will point to ------+ |
// |
// lineAndColumnIndex will point to -----+
//
// For an example without parentheses, take "bar.js:2".
// ^ ^
// | |
// firstParenIndex will point to -----------+ |
// parenIndex will point to ----------------+ |
// |
// lineAndColumIndex will point to ----------------+
let firstParenIndex = -1;
//
// To parse, we look for the last occurrence of the string ' ('.
//
// For 1), all occurrences of space ' ' characters in the resource string
// are urlencoded, so the last occurrence of ' (' is the separator between
// the function name and the resource.
//
// For 2) and 3), there can be no occurences of ' (' since ' ' characters
// are urlencoded in the resource string.
//
// XXX: Note that 3) is ambiguous with SPS marker locations like
// "EnterJIT". We can't distinguish the two, so we treat 3) like a function
// name.
let parenIndex = -1;
let lineAndColumnIndex = -1;
// Compute firstParenIndex and lineAndColumnIndex. If lineAndColumnIndex is
// found, also extract the line and column.
for (let i = 0; i < location.length; i++) {
let c = location.charCodeAt(i);
let lastCharCode = location.charCodeAt(location.length - 1);
let i;
if (lastCharCode === CHAR_CODE_RPAREN) {
// Case 1)
i = location.length - 2;
} else if (isNumeric(lastCharCode)) {
// Case 2)
i = location.length - 1;
} else {
// Case 3)
i = 0;
}
// The url and line information might be inside parentheses.
if (c === CHAR_CODE_LPAREN) {
if (firstParenIndex < 0) {
firstParenIndex = i;
}
continue;
if (i !== 0) {
// Look for a :number.
let end = i;
while (isNumeric(location.charCodeAt(i))) {
i--;
}
if (location.charCodeAt(i) === CHAR_CODE_COLON) {
column = location.substr(i + 1, end - i);
i--;
}
// Look for numbers after colons, twice. Firstly for the line, secondly
// for the column.
if (c === CHAR_CODE_COLON) {
if (isNumeric(location.charCodeAt(i + 1))) {
// If we found a line number, remember when it starts.
if (lineAndColumnIndex < 0) {
lineAndColumnIndex = i;
}
// Look for a preceding :number.
end = i;
while (isNumeric(location.charCodeAt(i))) {
i--;
}
let start = ++i;
let length = 1;
while (isNumeric(location.charCodeAt(++i))) {
length++;
}
// If two were found, the first is the line and the second is the
// column. If only a single :number was found, then it is the line number.
if (location.charCodeAt(i) === CHAR_CODE_COLON) {
line = location.substr(i + 1, end - i);
lineAndColumnIndex = i;
i--;
} else {
lineAndColumnIndex = i + 1;
line = column;
column = undefined;
}
}
// Discard port numbers
if (location.charCodeAt(i) === CHAR_CODE_SLASH) {
lineAndColumnIndex = -1;
--i;
continue;
}
if (!line) {
line = location.substr(start, length);
// Unwind a character due to the isNumeric loop above.
--i;
// There still might be a column number, continue looking.
continue;
}
column = location.substr(start, length);
// We've gotten both a line and a column, stop looking.
// Look for the last occurrence of ' (' in case 1).
if (lastCharCode === CHAR_CODE_RPAREN) {
for (; i >= 0; i--) {
if (location.charCodeAt(i) === CHAR_CODE_LPAREN &&
i > 0 &&
location.charCodeAt(i - 1) === CHAR_CODE_SPACE) {
parenIndex = i;
break;
}
}
@ -129,7 +149,7 @@ function parseLocation(location, fallbackLine, fallbackColumn) {
let uri;
if (lineAndColumnIndex > 0) {
let resource = location.substring(firstParenIndex + 1, lineAndColumnIndex);
let resource = location.substring(parenIndex + 1, lineAndColumnIndex);
url = resource.split(" -> ").pop();
if (url) {
uri = nsIURL(url);
@ -142,7 +162,7 @@ function parseLocation(location, fallbackLine, fallbackColumn) {
// If the URI digged out from the `location` is valid, this is a JS frame.
if (uri) {
functionName = location.substring(0, firstParenIndex - 1);
functionName = location.substring(0, parenIndex - 1);
fileName = uri.fileName || "/";
hostName = getHost(url, uri.host);
// nsIURL throws when accessing a piece of a URL that doesn't
@ -177,37 +197,48 @@ function computeIsContentAndCategory(frame) {
let location = frame.location;
// Locations in frames with function names look like:
// "functionName (foo://bar)".
// Look for the starting left parenthesis, then try to match a
// scheme name.
for (let i = 0; i < location.length; i++) {
if (location.charCodeAt(i) === CHAR_CODE_LPAREN) {
if (isContentScheme(location, i + 1)) {
frame.isContent = true;
return;
// There are 3 variants of location strings in the profiler (with optional
// column numbers):
// 1) "name (resource:line)"
// 2) "resource:line"
// 3) "resource"
let lastCharCode = location.charCodeAt(location.length - 1);
let schemeStartIndex = -1;
if (lastCharCode === CHAR_CODE_RPAREN) {
// Case 1)
//
// Need to search for the last occurrence of ' (' to find the start of the
// resource string.
for (let i = location.length - 2; i >= 0; i--) {
if (location.charCodeAt(i) === CHAR_CODE_LPAREN &&
i > 0 &&
location.charCodeAt(i - 1) === CHAR_CODE_SPACE) {
schemeStartIndex = i + 1;
break;
}
for (let j = i + 1; j < location.length; j++) {
if (location.charCodeAt(j) === CHAR_CODE_R &&
isChromeScheme(location, j) &&
(location.indexOf("resource://gre/modules/devtools") !== -1 ||
location.indexOf("resource:///modules/devtools") !== -1)) {
frame.category = global.CATEGORY_DEVTOOLS;
return;
}
}
break;
}
} else {
// Cases 2) and 3)
schemeStartIndex = 0;
}
// If there was no left parenthesis, try matching from the start.
if (isContentScheme(location, 0)) {
if (isContentScheme(location, schemeStartIndex)) {
frame.isContent = true;
return;
}
if (schemeStartIndex !== 0) {
for (let j = schemeStartIndex; j < location.length; j++) {
if (location.charCodeAt(j) === CHAR_CODE_R &&
isChromeScheme(location, j) &&
(location.indexOf("resource://gre/modules/devtools") !== -1 ||
location.indexOf("resource:///modules/devtools") !== -1)) {
frame.category = global.CATEGORY_DEVTOOLS;
return;
}
}
}
if (location === "EnterJIT") {
frame.category = global.CATEGORY_JIT;
return;

View File

@ -22,8 +22,12 @@ const CONTENT_LOCATIONS = [
// Occurs when executing an inline script on a root html page with port
// (I've never seen it with a column number but check anyway) bug 1164131
"hello/<.world (http://localhost:8888/:1",
"hello/<.world (http://localhost:8888/:100:50",
"hello/<.world (http://localhost:8888/:1)",
"hello/<.world (http://localhost:8888/:100:50)",
// bug 1197636
"Native[\"arraycopy(blah)\"] (http://localhost:8888/profiler.html:4)",
"Native[\"arraycopy(blah)\"] (http://localhost:8888/profiler.html:4:5)",
].map(argify);
const CHROME_LOCATIONS = [
@ -70,6 +74,8 @@ add_task(function () {
["hello/<.world", "file.js", "localhost", "http://localhost:8888/file.js", 100, null, "localhost:8888", 8888],
["hello/<.world", "/", "localhost", "http://localhost:8888/", 1, null, "localhost:8888", 8888],
["hello/<.world", "/", "localhost", "http://localhost:8888/", 100, 50, "localhost:8888", 8888],
["Native[\"arraycopy(blah)\"]", "profiler.html", "localhost", "http://localhost:8888/profiler.html", 4, null, "localhost:8888", 8888],
["Native[\"arraycopy(blah)\"]", "profiler.html", "localhost", "http://localhost:8888/profiler.html", 4, 5, "localhost:8888", 8888],
];
for (let i = 0; i < PARSED_CONTENT.length; i++) {

View File

@ -47,9 +47,9 @@ function* performTest() {
let TEST_DATA = synthesizeProfileForTest([{
frames: [{
location: "A (http://path/to/file.js:10:5"
location: "A (http://path/to/file.js:10:5)"
}, {
location: "B (http://path/to/file.js:100:5"
location: "B (http://path/to/file.js:100:5)"
}],
time: 50,
}]);
@ -65,7 +65,7 @@ let EXPECTED_OUTPUT = [{
}, {
blocks: [{
startTime: 0,
frameKey: "A (http://path/to/file.js:10:5",
frameKey: "A (http://path/to/file.js:10:5)",
x: 0,
y: 0,
width: 50,
@ -95,7 +95,7 @@ let EXPECTED_OUTPUT = [{
}, {
blocks: [{
startTime: 0,
frameKey: "B (http://path/to/file.js:100:5",
frameKey: "B (http://path/to/file.js:100:5)",
x: 0,
y: 15,
width: 50,

View File

@ -42,6 +42,7 @@ support-files =
[test_basic.html]
[test_newapp.html]
skip-if = buildapp == 'b2g' || (os == "mac" && (os_version == "10.8" || os_version == "10.10") && debug) || (os == "win" && os_version == "10.0") # Bug 1135315, bug 1197053
[test_import.html]
[test_duplicate_import.html]
[test_runtime.html]

View File

@ -8,6 +8,7 @@ support-files =
../device_front_shared.js
[test_newapp.html]
skip-if = buildapp == 'b2g' || (os == "mac" && (os_version == "10.8" || os_version == "10.10") && debug) || (os == "win" && os_version == "10.0") # Bug 1135315, bug 1197053
[test_import.html]
[test_duplicate_import.html]
[test_runtime.html]

View File

@ -29,6 +29,7 @@
#include "nsIPresShell.h"
#include "nsIScriptError.h"
#include "nsIWindowMediator.h"
#include "nsIPrefService.h"
nsChromeRegistry* nsChromeRegistry::gChromeRegistry;
@ -657,6 +658,31 @@ nsChromeRegistry::MustLoadURLRemotely(nsIURI *aURI, bool *aResult)
return NS_OK;
}
bool
nsChromeRegistry::GetDirectionForLocale(const nsACString& aLocale)
{
// first check the intl.uidirection.<locale> preference, and if that is not
// set, check the same preference but with just the first two characters of
// the locale. If that isn't set, default to left-to-right.
nsAutoCString prefString = NS_LITERAL_CSTRING("intl.uidirection.") + aLocale;
nsCOMPtr<nsIPrefBranch> prefBranch (do_GetService(NS_PREFSERVICE_CONTRACTID));
if (!prefBranch) {
return false;
}
nsXPIDLCString dir;
prefBranch->GetCharPref(prefString.get(), getter_Copies(dir));
if (dir.IsEmpty()) {
int32_t hyphen = prefString.FindChar('-');
if (hyphen >= 1) {
nsAutoCString shortPref(Substring(prefString, 0, hyphen));
prefBranch->GetCharPref(shortPref.get(), getter_Copies(dir));
}
}
return dir.EqualsLiteral("rtl");
}
NS_IMETHODIMP_(bool)
nsChromeRegistry::WrappersEnabled(nsIURI *aURI)
{

View File

@ -99,6 +99,8 @@ protected:
static nsresult GetProviderAndPath(nsIURL* aChromeURL,
nsACString& aProvider, nsACString& aPath);
bool GetDirectionForLocale(const nsACString& aLocale);
public:
static already_AddRefed<nsChromeRegistry> GetSingleton();

View File

@ -226,24 +226,7 @@ nsChromeRegistryChrome::IsLocaleRTL(const nsACString& package, bool *aResult)
if (locale.Length() < 2)
return NS_OK;
// first check the intl.uidirection.<locale> preference, and if that is not
// set, check the same preference but with just the first two characters of
// the locale. If that isn't set, default to left-to-right.
nsAutoCString prefString = NS_LITERAL_CSTRING("intl.uidirection.") + locale;
nsCOMPtr<nsIPrefBranch> prefBranch (do_GetService(NS_PREFSERVICE_CONTRACTID));
if (!prefBranch)
return NS_OK;
nsXPIDLCString dir;
prefBranch->GetCharPref(prefString.get(), getter_Copies(dir));
if (dir.IsEmpty()) {
int32_t hyphen = prefString.FindChar('-');
if (hyphen >= 1) {
nsAutoCString shortPref(Substring(prefString, 0, hyphen));
prefBranch->GetCharPref(shortPref.get(), getter_Copies(dir));
}
}
*aResult = dir.EqualsLiteral("rtl");
*aResult = GetDirectionForLocale(locale);
return NS_OK;
}

View File

@ -210,10 +210,15 @@ nsChromeRegistryContent::CheckForNewChrome()
}
NS_IMETHODIMP
nsChromeRegistryContent::IsLocaleRTL(const nsACString& package,
nsChromeRegistryContent::IsLocaleRTL(const nsACString& aPackage,
bool *aResult)
{
CONTENT_NOT_IMPLEMENTED();
if (aPackage != nsDependentCString("global")) {
NS_ERROR("Packages other than global unavailable");
return NS_ERROR_NOT_AVAILABLE;
}
*aResult = GetDirectionForLocale(mLocale);
return NS_OK;
}
NS_IMETHODIMP

View File

@ -634,6 +634,14 @@ child:
* they are 'compressed' by dumping the oldest one.
*/
RealMouseMoveEvent(WidgetMouseEvent event) compress;
/**
* Mouse move events with |reason == eSynthesized| are sent via a separate
* message because they do not generate DOM 'mousemove' events, and the
* 'compress' attribute on RealMouseMoveEvent() could result in a
* |reason == eReal| event being dropped in favour of an |eSynthesized|
* event, and thus a DOM 'mousemove' event to be lost.
*/
SynthMouseMoveEvent(WidgetMouseEvent event);
RealMouseButtonEvent(WidgetMouseEvent event);
RealKeyEvent(WidgetKeyboardEvent event, MaybeNativeKeyBinding keyBinding);
MouseWheelEvent(WidgetWheelEvent event, ScrollableLayerGuid aGuid, uint64_t aInputBlockId);

View File

@ -1925,6 +1925,12 @@ TabChild::RecvRealMouseMoveEvent(const WidgetMouseEvent& event)
return RecvRealMouseButtonEvent(event);
}
bool
TabChild::RecvSynthMouseMoveEvent(const WidgetMouseEvent& event)
{
return RecvRealMouseButtonEvent(event);
}
bool
TabChild::RecvRealMouseButtonEvent(const WidgetMouseEvent& event)
{

View File

@ -334,6 +334,7 @@ public:
const int32_t& aModifiers,
const bool& aIgnoreRootScrollFrame) override;
virtual bool RecvRealMouseMoveEvent(const mozilla::WidgetMouseEvent& event) override;
virtual bool RecvSynthMouseMoveEvent(const mozilla::WidgetMouseEvent& event) override;
virtual bool RecvRealMouseButtonEvent(const mozilla::WidgetMouseEvent& event) override;
virtual bool RecvRealDragEvent(const WidgetDragEvent& aEvent,
const uint32_t& aDragAction,

View File

@ -1396,7 +1396,11 @@ bool TabParent::SendRealMouseEvent(WidgetMouseEvent& event)
}
if (NS_MOUSE_MOVE == event.mMessage) {
return SendRealMouseMoveEvent(event);
if (event.reason == WidgetMouseEvent::eSynthesized) {
return SendSynthMouseMoveEvent(event);
} else {
return SendRealMouseMoveEvent(event);
}
}
return SendRealMouseButtonEvent(event);
}

View File

@ -15,12 +15,8 @@
#include "mozilla/Snprintf.h"
#include <algorithm>
#include "mozilla/Telemetry.h"
#include "Latency.h"
#include "CubebUtils.h"
#include "nsPrintfCString.h"
#ifdef XP_MACOSX
#include <sys/sysctl.h>
#endif
namespace mozilla {
@ -130,18 +126,11 @@ AudioStream::AudioStream()
, mWritten(0)
, mAudioClock(this)
, mTimeStretcher(nullptr)
, mLatencyRequest(HighLatency)
, mReadPoint(0)
, mDumpFile(nullptr)
, mBytesPerFrame(0)
, mState(INITIALIZED)
, mNeedsStart(false)
, mShouldDropFrames(false)
, mPendingAudioInitTask(false)
, mLastGoodPosition(0)
{
// keep a ref in case we shut down later than nsLayoutStatics
mLatencyLog = AsyncLatencyLogger::Get(true);
}
AudioStream::~AudioStream()
@ -164,10 +153,8 @@ AudioStream::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
// Possibly add in the future:
// - mTimeStretcher
// - mLatencyLog
// - mCubebStream
amount += mInserts.ShallowSizeOfExcludingThis(aMallocSizeOf);
amount += mBuffer.SizeOfExcludingThis(aMallocSizeOf);
return amount;
@ -319,12 +306,9 @@ WriteDumpFile(FILE* aDumpFile, AudioStream* aStream, uint32_t aFrames,
fflush(aDumpFile);
}
// NOTE: this must not block a LowLatency stream for any significant amount
// of time, or it will block the entirety of MSG
nsresult
AudioStream::Init(int32_t aNumChannels, int32_t aRate,
const dom::AudioChannel aAudioChannel,
LatencyRequest aLatencyRequest)
const dom::AudioChannel aAudioChannel)
{
mStartTime = TimeStamp::Now();
mIsFirst = CubebUtils::GetFirstStream();
@ -338,7 +322,6 @@ AudioStream::Init(int32_t aNumChannels, int32_t aRate,
mInRate = mOutRate = aRate;
mChannels = aNumChannels;
mOutChannels = (aNumChannels > 2) ? 2 : aNumChannels;
mLatencyRequest = aLatencyRequest;
mDumpFile = OpenDumpFile(this);
@ -374,108 +357,13 @@ AudioStream::Init(int32_t aNumChannels, int32_t aRate,
MOZ_ASSERT(bufferLimit % mBytesPerFrame == 0, "Must buffer complete frames");
mBuffer.SetCapacity(bufferLimit);
if (aLatencyRequest == LowLatency) {
// Don't block this thread to initialize a cubeb stream.
// When this is done, it will start callbacks from Cubeb. Those will
// cause us to move from INITIALIZED to RUNNING. Until then, we
// can't access any cubeb functions.
// Use a RefPtr to avoid leaks if Dispatch fails
mPendingAudioInitTask = true;
RefPtr<AudioInitTask> init = new AudioInitTask(this, aLatencyRequest, params);
nsresult rv = init->Dispatch();
if (NS_FAILED(rv)) {
mPendingAudioInitTask = false;
}
return rv;
}
// High latency - open synchronously
nsresult rv = OpenCubeb(params, aLatencyRequest);
NS_ENSURE_SUCCESS(rv, rv);
// See if we need to start() the stream, since we must do that from this
// thread for now (cubeb API issue)
{
MonitorAutoLock mon(mMonitor);
CheckForStart();
}
return NS_OK;
}
// On certain MacBookPro, the microphone is located near the left speaker.
// We need to pan the sound output to the right speaker if we are using the mic
// and the built-in speaker, or we will have terrible echo.
void AudioStream::PanOutputIfNeeded(bool aMicrophoneActive)
{
#ifdef XP_MACOSX
cubeb_device* device;
int rv;
char name[128];
size_t length = sizeof(name);
bool panCenter = false;
rv = sysctlbyname("hw.model", name, &length, NULL, 0);
if (rv) {
return;
}
if (!strncmp(name, "MacBookPro", 10)) {
if (cubeb_stream_get_current_device(mCubebStream.get(), &device) == CUBEB_OK) {
// Check if we are currently outputing sound on external speakers.
if (!strcmp(device->output_name, "ispk")) {
// Pan everything to the right speaker.
if (aMicrophoneActive) {
LOG(("%p Panning audio output to the right.", this));
if (cubeb_stream_set_panning(mCubebStream.get(), 1.0) != CUBEB_OK) {
NS_WARNING("Could not pan audio output to the right.");
}
} else {
panCenter = true;
}
} else {
panCenter = true;
}
if (panCenter) {
LOG(("%p Panning audio output to the center.", this));
if (cubeb_stream_set_panning(mCubebStream.get(), 0.0) != CUBEB_OK) {
NS_WARNING("Could not pan audio output to the center.");
}
}
cubeb_stream_device_destroy(mCubebStream.get(), device);
}
}
#endif
}
void AudioStream::ResetStreamIfNeeded()
{
cubeb_device * device;
// Only reset a device if a mic is active, and this is a low latency stream.
if (!mMicrophoneActive || mLatencyRequest != LowLatency) {
return;
}
if (cubeb_stream_get_current_device(mCubebStream.get(), &device) == CUBEB_OK) {
// This a microphone that goes through the headphone plug, reset the
// output to prevent echo building up.
if (strcmp(device->input_name, "emic") == 0) {
LOG(("Resetting audio output"));
Reset();
}
cubeb_stream_device_destroy(mCubebStream.get(), device);
}
}
void AudioStream::DeviceChangedCallback()
{
MonitorAutoLock mon(mMonitor);
PanOutputIfNeeded(mMicrophoneActive);
mShouldDropFrames = true;
ResetStreamIfNeeded();
return OpenCubeb(params);
}
// This code used to live inside AudioStream::Init(), but on Mac (others?)
// it has been known to take 300-800 (or even 8500) ms to execute(!)
nsresult
AudioStream::OpenCubeb(cubeb_stream_params &aParams,
LatencyRequest aLatencyRequest)
AudioStream::OpenCubeb(cubeb_stream_params &aParams)
{
cubeb* cubebContext = CubebUtils::GetCubebContext();
if (!cubebContext) {
@ -488,14 +376,7 @@ AudioStream::OpenCubeb(cubeb_stream_params &aParams,
// If the latency pref is set, use it. Otherwise, if this stream is intended
// for low latency playback, try to get the lowest latency possible.
// Otherwise, for normal streams, use 100ms.
uint32_t latency;
if (aLatencyRequest == LowLatency && !CubebUtils::CubebLatencyPrefSet()) {
if (cubeb_get_min_latency(cubebContext, aParams, &latency) != CUBEB_OK) {
latency = CubebUtils::GetCubebLatency();
}
} else {
latency = CubebUtils::GetCubebLatency();
}
uint32_t latency = CubebUtils::GetCubebLatency();
{
cubeb_stream* stream;
@ -504,9 +385,6 @@ AudioStream::OpenCubeb(cubeb_stream_params &aParams,
MonitorAutoLock mon(mMonitor);
MOZ_ASSERT(mState != SHUTDOWN);
mCubebStream.reset(stream);
// We can't cubeb_stream_start() the thread from a transient thread due to
// cubeb API requirements (init can be called from another thread, but
// not start/stop/destroy/etc)
} else {
MonitorAutoLock mon(mMonitor);
mState = ERRORED;
@ -515,9 +393,6 @@ AudioStream::OpenCubeb(cubeb_stream_params &aParams,
}
}
cubeb_stream_register_device_changed_callback(mCubebStream.get(),
AudioStream::DeviceChangedCallback_s);
mState = INITIALIZED;
if (!mStartTime.IsNull()) {
@ -531,72 +406,12 @@ AudioStream::OpenCubeb(cubeb_stream_params &aParams,
return NS_OK;
}
void
AudioStream::AudioInitTaskFinished()
{
MonitorAutoLock mon(mMonitor);
mPendingAudioInitTask = false;
mon.NotifyAll();
}
void
AudioStream::CheckForStart()
{
mMonitor.AssertCurrentThreadOwns();
if (mState == INITIALIZED) {
// Start the stream right away when low latency has been requested. This means
// that the DataCallback will feed silence to cubeb, until the first frames
// are written to this AudioStream. Also start if a start has been queued.
if (mLatencyRequest == LowLatency || mNeedsStart) {
StartUnlocked(); // mState = STARTED or ERRORED
mNeedsStart = false;
MOZ_LOG(gAudioStreamLog, LogLevel::Warning,
("Started waiting %s-latency stream",
mLatencyRequest == LowLatency ? "low" : "high"));
} else {
// high latency, not full - OR Pause() was called before we got here
MOZ_LOG(gAudioStreamLog, LogLevel::Debug,
("Not starting waiting %s-latency stream",
mLatencyRequest == LowLatency ? "low" : "high"));
}
}
}
NS_IMETHODIMP
AudioInitTask::Run()
{
MOZ_ASSERT(mThread);
if (NS_IsMainThread()) {
mThread->Shutdown(); // can't Shutdown from the thread itself, darn
// Don't null out mThread!
// See bug 999104. We must hold a ref to the thread across Dispatch()
// since the internal mThread ref could be released while processing
// the Dispatch(), and Dispatch/PutEvent itself doesn't hold a ref; it
// assumes the caller does.
return NS_OK;
}
nsresult rv = mAudioStream->OpenCubeb(mParams, mLatencyRequest);
mAudioStream->AudioInitTaskFinished();
// and now kill this thread
NS_DispatchToMainThread(this);
return rv;
}
// aTime is the time in ms the samples were inserted into MediaStreamGraph
nsresult
AudioStream::Write(const AudioDataValue* aBuf, uint32_t aFrames, TimeStamp *aTime)
AudioStream::Write(const AudioDataValue* aBuf, uint32_t aFrames)
{
MonitorAutoLock mon(mMonitor);
// See if we need to start() the stream, since we must do that from this thread
CheckForStart();
if (mShouldDropFrames) {
mBuffer.ContractTo(0);
return NS_OK;
}
if (mState == ERRORED) {
return NS_ERROR_FAILURE;
}
@ -614,22 +429,6 @@ AudioStream::Write(const AudioDataValue* aBuf, uint32_t aFrames, TimeStamp *aTim
const uint8_t* src = reinterpret_cast<const uint8_t*>(aBuf);
uint32_t bytesToCopy = FramesToBytes(aFrames);
// XXX this will need to change if we want to enable this on-the-fly!
if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
// Record the position and time this data was inserted
int64_t timeMs;
if (aTime && !aTime->IsNull()) {
if (mStartTime.IsNull()) {
AsyncLatencyLogger::Get(true)->GetStartTime(mStartTime);
}
timeMs = (*aTime - mStartTime).ToMilliseconds();
} else {
timeMs = 0;
}
struct Inserts insert = { timeMs, aFrames};
mInserts.AppendElement(insert);
}
while (bytesToCopy > 0) {
uint32_t available = std::min(bytesToCopy, mBuffer.Available());
MOZ_ASSERT(available % mBytesPerFrame == 0,
@ -640,33 +439,19 @@ AudioStream::Write(const AudioDataValue* aBuf, uint32_t aFrames, TimeStamp *aTim
bytesToCopy -= available;
if (bytesToCopy > 0) {
// Careful - the CubebInit thread may not have gotten to STARTED yet
if ((mState == INITIALIZED || mState == STARTED) && mLatencyRequest == LowLatency) {
// don't ever block MediaStreamGraph low-latency streams
uint32_t remains = 0; // we presume the buffer is full
if (mBuffer.Length() > bytesToCopy) {
remains = mBuffer.Length() - bytesToCopy; // Free up just enough space
}
// account for dropping samples
MOZ_LOG(gAudioStreamLog, LogLevel::Warning, ("Stream %p dropping %u bytes (%u frames)in Write()",
this, mBuffer.Length() - remains, BytesToFrames(mBuffer.Length() - remains)));
mReadPoint += BytesToFrames(mBuffer.Length() - remains);
mBuffer.ContractTo(remains);
} else { // RUNNING or high latency
// If we are not playing, but our buffer is full, start playing to make
// room for soon-to-be-decoded data.
if (mState != STARTED && mState != RUNNING) {
MOZ_LOG(gAudioStreamLog, LogLevel::Warning, ("Starting stream %p in Write (%u waiting)",
this, bytesToCopy));
StartUnlocked();
if (mState == ERRORED) {
return NS_ERROR_FAILURE;
}
}
MOZ_LOG(gAudioStreamLog, LogLevel::Warning, ("Stream %p waiting in Write() (%u waiting)",
this, bytesToCopy));
mon.Wait();
}
// If we are not playing, but our buffer is full, start playing to make
// room for soon-to-be-decoded data.
if (mState != STARTED && mState != RUNNING) {
MOZ_LOG(gAudioStreamLog, LogLevel::Warning, ("Starting stream %p in Write (%u waiting)",
this, bytesToCopy));
StartUnlocked();
if (mState == ERRORED) {
return NS_ERROR_FAILURE;
}
}
MOZ_LOG(gAudioStreamLog, LogLevel::Warning, ("Stream %p waiting in Write() (%u waiting)",
this, bytesToCopy));
mon.Wait();
}
}
@ -692,16 +477,6 @@ AudioStream::SetVolume(double aVolume)
}
}
void
AudioStream::SetMicrophoneActive(bool aActive)
{
MonitorAutoLock mon(mMonitor);
mMicrophoneActive = aActive;
PanOutputIfNeeded(mMicrophoneActive);
}
void
AudioStream::Cancel()
{
@ -737,7 +512,6 @@ AudioStream::StartUnlocked()
{
mMonitor.AssertCurrentThreadOwns();
if (!mCubebStream) {
mNeedsStart = true;
return;
}
@ -746,8 +520,6 @@ AudioStream::StartUnlocked()
{
MonitorAutoUnlock mon(mMonitor);
r = cubeb_stream_start(mCubebStream.get());
PanOutputIfNeeded(mMicrophoneActive);
}
mState = r == CUBEB_OK ? STARTED : ERRORED;
LOG(("AudioStream: started %p, state %s", this, mState == STARTED ? "STARTED" : "ERRORED"));
@ -764,7 +536,6 @@ AudioStream::Pause()
}
if (!mCubebStream || (mState != STARTED && mState != RUNNING)) {
mNeedsStart = false;
mState = STOPPED; // which also tells async OpenCubeb not to start, just init
return;
}
@ -803,10 +574,6 @@ AudioStream::Shutdown()
MonitorAutoLock mon(mMonitor);
LOG(("AudioStream: Shutdown %p, state %d", this, mState));
while (mPendingAudioInitTask) {
mon.Wait();
}
if (mCubebStream) {
MonitorAutoUnlock mon(mMonitor);
// Force stop to put the cubeb stream in a stable state before deletion.
@ -826,19 +593,12 @@ AudioStream::GetPosition()
return mAudioClock.GetPositionUnlocked();
}
// This function is miscompiled by PGO with MSVC 2010. See bug 768333.
#ifdef _MSC_VER
#pragma optimize("", off)
#endif
int64_t
AudioStream::GetPositionInFrames()
{
MonitorAutoLock mon(mMonitor);
return mAudioClock.GetPositionInFrames();
}
#ifdef _MSC_VER
#pragma optimize("", on)
#endif
int64_t
AudioStream::GetPositionInFramesUnlocked()
@ -865,17 +625,6 @@ AudioStream::GetPositionInFramesUnlocked()
return std::min<uint64_t>(mLastGoodPosition, INT64_MAX);
}
int64_t
AudioStream::GetLatencyInFrames()
{
uint32_t latency;
if (cubeb_stream_get_latency(mCubebStream.get(), &latency)) {
NS_WARNING("Could not get cubeb latency.");
return 0;
}
return static_cast<int64_t>(latency);
}
bool
AudioStream::IsPaused()
{
@ -883,26 +632,8 @@ AudioStream::IsPaused()
return mState == STOPPED;
}
void
AudioStream::GetBufferInsertTime(int64_t &aTimeMs)
{
mMonitor.AssertCurrentThreadOwns();
if (mInserts.Length() > 0) {
// Find the right block, but don't leave the array empty
while (mInserts.Length() > 1 && mReadPoint >= mInserts[0].mFrames) {
mReadPoint -= mInserts[0].mFrames;
mInserts.RemoveElementAt(0);
}
// offset for amount already read
// XXX Note: could misreport if we couldn't find a block in the right timeframe
aTimeMs = mInserts[0].mTimeMs + ((mReadPoint * 1000) / mOutRate);
} else {
aTimeMs = INT64_MAX;
}
}
long
AudioStream::GetUnprocessed(void* aBuffer, long aFrames, int64_t &aTimeMs)
AudioStream::GetUnprocessed(void* aBuffer, long aFrames)
{
mMonitor.AssertCurrentThreadOwns();
uint8_t* wpos = reinterpret_cast<uint8_t*>(aBuffer);
@ -924,42 +655,11 @@ AudioStream::GetUnprocessed(void* aBuffer, long aFrames, int64_t &aTimeMs)
wpos += input_size[0];
memcpy(wpos, input[1], input_size[1]);
// First time block now has our first returned sample
mReadPoint += BytesToFrames(available);
GetBufferInsertTime(aTimeMs);
return BytesToFrames(available) + flushedFrames;
}
// Get unprocessed samples, and pad the beginning of the buffer with silence if
// there is not enough data.
long
AudioStream::GetUnprocessedWithSilencePadding(void* aBuffer, long aFrames, int64_t& aTimeMs)
{
mMonitor.AssertCurrentThreadOwns();
uint32_t toPopBytes = FramesToBytes(aFrames);
uint32_t available = std::min(toPopBytes, mBuffer.Length());
uint32_t silenceOffset = toPopBytes - available;
uint8_t* wpos = reinterpret_cast<uint8_t*>(aBuffer);
memset(wpos, 0, silenceOffset);
wpos += silenceOffset;
void* input[2];
uint32_t input_size[2];
mBuffer.PopElements(available, &input[0], &input_size[0], &input[1], &input_size[1]);
memcpy(wpos, input[0], input_size[0]);
wpos += input_size[0];
memcpy(wpos, input[1], input_size[1]);
GetBufferInsertTime(aTimeMs);
return aFrames;
}
long
AudioStream::GetTimeStretched(void* aBuffer, long aFrames, int64_t &aTimeMs)
AudioStream::GetTimeStretched(void* aBuffer, long aFrames)
{
mMonitor.AssertCurrentThreadOwns();
long processedFrames = 0;
@ -985,7 +685,6 @@ AudioStream::GetTimeStretched(void* aBuffer, long aFrames, int64_t &aTimeMs)
}
mBuffer.PopElements(available, &input[0], &input_size[0],
&input[1], &input_size[1]);
mReadPoint += BytesToFrames(available);
for(uint32_t i = 0; i < 2; i++) {
mTimeStretcher->putSamples(reinterpret_cast<AudioDataValue*>(input[i]), BytesToFrames(input_size[i]));
}
@ -995,59 +694,9 @@ AudioStream::GetTimeStretched(void* aBuffer, long aFrames, int64_t &aTimeMs)
processedFrames += receivedFrames;
} while (processedFrames < aFrames && !lowOnBufferedData);
GetBufferInsertTime(aTimeMs);
return processedFrames;
}
void
AudioStream::Reset()
{
MOZ_ASSERT(mLatencyRequest == LowLatency, "We should only be reseting low latency streams");
mShouldDropFrames = true;
mNeedsStart = true;
cubeb_stream_params params;
params.rate = mInRate;
params.channels = mOutChannels;
#if defined(__ANDROID__)
#if defined(MOZ_B2G)
params.stream_type = CubebUtils::ConvertChannelToCubebType(mAudioChannel);
#else
params.stream_type = CUBEB_STREAM_TYPE_MUSIC;
#endif
if (params.stream_type == CUBEB_STREAM_TYPE_MAX) {
return;
}
#endif
if (AUDIO_OUTPUT_FORMAT == AUDIO_FORMAT_S16) {
params.format = CUBEB_SAMPLE_S16NE;
} else {
params.format = CUBEB_SAMPLE_FLOAT32NE;
}
mBytesPerFrame = sizeof(AudioDataValue) * mOutChannels;
// Size mBuffer for one second of audio. This value is arbitrary, and was
// selected based on the observed behaviour of the existing AudioStream
// implementations.
uint32_t bufferLimit = FramesToBytes(mInRate);
MOZ_ASSERT(bufferLimit % mBytesPerFrame == 0, "Must buffer complete frames");
mBuffer.Reset();
mBuffer.SetCapacity(bufferLimit);
// Don't block this thread to initialize a cubeb stream.
// When this is done, it will start callbacks from Cubeb. Those will
// cause us to move from INITIALIZED to RUNNING. Until then, we
// can't access any cubeb functions.
// Use a RefPtr to avoid leaks if Dispatch fails
RefPtr<AudioInitTask> init = new AudioInitTask(this, mLatencyRequest, params);
init->Dispatch();
}
long
AudioStream::DataCallback(void* aBuffer, long aFrames)
{
@ -1058,63 +707,26 @@ AudioStream::DataCallback(void* aBuffer, long aFrames)
AudioDataValue* output = reinterpret_cast<AudioDataValue*>(aBuffer);
uint32_t underrunFrames = 0;
uint32_t servicedFrames = 0;
int64_t insertTime;
mShouldDropFrames = false;
// NOTE: wasapi (others?) can call us back *after* stop()/Shutdown() (mState == SHUTDOWN)
// Bug 996162
// callback tells us cubeb succeeded initializing
if (mState == STARTED) {
// For low-latency streams, we want to minimize any built-up data when
// we start getting callbacks.
// Simple version - contract on first callback only.
if (mLatencyRequest == LowLatency) {
uint32_t old_len = mBuffer.Length();
available = mBuffer.ContractTo(FramesToBytes(aFrames));
TimeStamp now = TimeStamp::Now();
if (!mStartTime.IsNull()) {
int64_t timeMs = (now - mStartTime).ToMilliseconds();
MOZ_LOG(gAudioStreamLog, LogLevel::Warning,
("Stream took %lldms to start after first Write() @ %u", timeMs, mOutRate));
} else {
MOZ_LOG(gAudioStreamLog, LogLevel::Warning,
("Stream started before Write() @ %u", mOutRate));
}
if (old_len != available) {
// Note that we may have dropped samples in Write() as well!
MOZ_LOG(gAudioStreamLog, LogLevel::Warning,
("AudioStream %p dropped %u + %u initial frames @ %u", this,
mReadPoint, BytesToFrames(old_len - available), mOutRate));
mReadPoint += BytesToFrames(old_len - available);
}
}
mState = RUNNING;
}
if (available) {
// When we are playing a low latency stream, and it is the first time we are
// getting data from the buffer, we prefer to add the silence for an
// underrun at the beginning of the buffer, so the first buffer is not cut
// in half by the silence inserted to compensate for the underrun.
if (mInRate == mOutRate) {
if (mLatencyRequest == LowLatency && !mWritten) {
servicedFrames = GetUnprocessedWithSilencePadding(output, aFrames, insertTime);
} else {
servicedFrames = GetUnprocessed(output, aFrames, insertTime);
}
servicedFrames = GetUnprocessed(output, aFrames);
} else {
servicedFrames = GetTimeStretched(output, aFrames, insertTime);
servicedFrames = GetTimeStretched(output, aFrames);
}
MOZ_ASSERT(mBuffer.Length() % mBytesPerFrame == 0, "Must copy complete frames");
// Notify any blocked Write() call that more space is available in mBuffer.
mon.NotifyAll();
} else {
GetBufferInsertTime(insertTime);
}
underrunFrames = aFrames - servicedFrames;
@ -1135,21 +747,6 @@ AudioStream::DataCallback(void* aBuffer, long aFrames)
}
WriteDumpFile(mDumpFile, this, aFrames, aBuffer);
// Don't log if we're not interested or if the stream is inactive
if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug) &&
mState != SHUTDOWN &&
insertTime != INT64_MAX && servicedFrames > underrunFrames) {
uint32_t latency = UINT32_MAX;
if (cubeb_stream_get_latency(mCubebStream.get(), &latency)) {
NS_WARNING("Could not get latency from cubeb.");
}
TimeStamp now = TimeStamp::Now();
mLatencyLog->Log(AsyncLatencyLogger::AudioStream, reinterpret_cast<uint64_t>(this),
insertTime, now);
mLatencyLog->Log(AsyncLatencyLogger::Cubeb, reinterpret_cast<uint64_t>(mCubebStream.get()),
(latency * 1000) / mOutRate, now);
}
return servicedFrames;
}

View File

@ -10,9 +10,10 @@
#include "nsAutoPtr.h"
#include "nsCOMPtr.h"
#include "nsThreadUtils.h"
#include "Latency.h"
#include "mozilla/dom/AudioChannelBinding.h"
#include "mozilla/Monitor.h"
#include "mozilla/RefPtr.h"
#include "mozilla/TimeStamp.h"
#include "mozilla/UniquePtr.h"
#include "CubebUtils.h"
#include "soundtouch/SoundTouchFactory.h"
@ -71,7 +72,7 @@ private:
int mInRate;
// True if the we are timestretching, false if we are resampling.
bool mPreservesPitch;
// The history of frames sent to the audio engine in each Datacallback.
// The history of frames sent to the audio engine in each DataCallback.
const nsAutoPtr<FrameHistory> mFrameHistory;
};
@ -133,19 +134,6 @@ public:
mStart %= mCapacity;
}
// Throw away all but aSize bytes from the buffer. Returns new size, which
// may be less than aSize
uint32_t ContractTo(uint32_t aSize) {
MOZ_ASSERT(mBuffer && mCapacity, "Buffer not initialized.");
if (aSize >= mCount) {
return mCount;
}
mStart += (mCount - aSize);
mCount = aSize;
mStart %= mCapacity;
return mCount;
}
size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
{
size_t amount = 0;
@ -153,14 +141,6 @@ public:
return amount;
}
void Reset()
{
mBuffer = nullptr;
mCapacity = 0;
mStart = 0;
mCount = 0;
}
private:
nsAutoArrayPtr<uint8_t> mBuffer;
uint32_t mCapacity;
@ -168,8 +148,6 @@ private:
uint32_t mCount;
};
class AudioInitTask;
// Access to a single instance of this class must be synchronized by
// callers, or made from a single thread. One exception is that access to
// GetPosition, GetPositionInFrames, SetVolume, and Get{Rate,Channels},
@ -182,17 +160,11 @@ public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(AudioStream)
AudioStream();
enum LatencyRequest {
HighLatency,
LowLatency
};
// Initialize the audio stream. aNumChannels is the number of audio
// channels (1 for mono, 2 for stereo, etc) and aRate is the sample rate
// (22050Hz, 44100Hz, etc).
nsresult Init(int32_t aNumChannels, int32_t aRate,
const dom::AudioChannel aAudioStreamChannel,
LatencyRequest aLatencyRequest);
const dom::AudioChannel aAudioStreamChannel);
// Closes the stream. All future use of the stream is an error.
void Shutdown();
@ -202,9 +174,8 @@ public:
// Write audio data to the audio hardware. aBuf is an array of AudioDataValues
// AudioDataValue of length aFrames*mChannels. If aFrames is larger
// than the result of Available(), the write will block until sufficient
// buffer space is available. aTime is the time in ms associated with the first sample
// for latency calculations
nsresult Write(const AudioDataValue* aBuf, uint32_t aFrames, TimeStamp* aTime = nullptr);
// buffer space is available.
nsresult Write(const AudioDataValue* aBuf, uint32_t aFrames);
// Return the number of audio frames that can be written without blocking.
uint32_t Available();
@ -213,12 +184,6 @@ public:
// 0 (meaning muted) to 1 (meaning full volume). Thread-safe.
void SetVolume(double aVolume);
// Informs the AudioStream that a microphone is being used by someone in the
// application.
void SetMicrophoneActive(bool aActive);
void PanOutputIfNeeded(bool aMicrophoneActive);
void ResetStreamIfNeeded();
// Block until buffered audio data has been consumed.
void Drain();
@ -271,14 +236,7 @@ protected:
int64_t GetPositionInFramesUnlocked();
private:
friend class AudioInitTask;
// So we can call it asynchronously from AudioInitTask
nsresult OpenCubeb(cubeb_stream_params &aParams,
LatencyRequest aLatencyRequest);
void AudioInitTaskFinished();
void CheckForStart();
nsresult OpenCubeb(cubeb_stream_params &aParams);
static long DataCallback_S(cubeb_stream*, void* aThis, void* aBuffer, long aFrames)
{
@ -291,23 +249,13 @@ private:
}
static void DeviceChangedCallback_s(void * aThis) {
static_cast<AudioStream*>(aThis)->DeviceChangedCallback();
}
long DataCallback(void* aBuffer, long aFrames);
void StateCallback(cubeb_state aState);
void DeviceChangedCallback();
nsresult EnsureTimeStretcherInitializedUnlocked();
// aTime is the time in ms the samples were inserted into MediaStreamGraph
long GetUnprocessed(void* aBuffer, long aFrames, int64_t &aTime);
long GetTimeStretched(void* aBuffer, long aFrames, int64_t &aTime);
long GetUnprocessedWithSilencePadding(void* aBuffer, long aFrames, int64_t &aTime);
int64_t GetLatencyInFrames();
void GetBufferInsertTime(int64_t &aTimeMs);
long GetUnprocessed(void* aBuffer, long aFrames);
long GetTimeStretched(void* aBuffer, long aFrames);
void StartUnlocked();
@ -330,22 +278,9 @@ private:
int64_t mWritten;
AudioClock mAudioClock;
soundtouch::SoundTouch* mTimeStretcher;
nsRefPtr<AsyncLatencyLogger> mLatencyLog;
// copy of Latency logger's starting time for offset calculations
// Stream start time for stream open delay telemetry.
TimeStamp mStartTime;
// Whether we are playing a low latency stream, or a normal stream.
LatencyRequest mLatencyRequest;
// Where in the current mInserts[0] block cubeb has read to
int64_t mReadPoint;
// Keep track of each inserted block of samples and the time it was inserted
// so we can estimate the clock time for a specific sample's insertion (for when
// we send data to cubeb). Blocks are aged out as needed.
struct Inserts {
int64_t mTimeMs;
int64_t mFrames;
};
nsAutoTArray<Inserts, 8> mInserts;
// Output file for dumping audio
FILE* mDumpFile;
@ -386,57 +321,12 @@ private:
};
StreamState mState;
bool mNeedsStart; // needed in case Start() is called before cubeb is open
bool mIsFirst;
// True if a microphone is active.
bool mMicrophoneActive;
// When we are in the process of changing the output device, and the callback
// is not going to be called for a little while, simply drop incoming frames.
// This is only on OSX for now, because other systems handle this gracefully.
bool mShouldDropFrames;
// True if there is a pending AudioInitTask. Shutdown() will wait until the
// pending AudioInitTask is finished.
bool mPendingAudioInitTask;
// The last good position returned by cubeb_stream_get_position(). Used to
// check if the cubeb position is going backward.
uint64_t mLastGoodPosition;
};
class AudioInitTask : public nsRunnable
{
public:
AudioInitTask(AudioStream *aStream,
AudioStream::LatencyRequest aLatencyRequest,
const cubeb_stream_params &aParams)
: mAudioStream(aStream)
, mLatencyRequest(aLatencyRequest)
, mParams(aParams)
{}
nsresult Dispatch()
{
// Can't add 'this' as the event to run, since mThread may not be set yet
nsresult rv = NS_NewNamedThread("CubebInit", getter_AddRefs(mThread));
if (NS_SUCCEEDED(rv)) {
// Note: event must not null out mThread!
rv = mThread->Dispatch(this, NS_DISPATCH_NORMAL);
}
return rv;
}
protected:
virtual ~AudioInitTask() {};
private:
NS_IMETHOD Run() override final;
RefPtr<AudioStream> mAudioStream;
AudioStream::LatencyRequest mLatencyRequest;
cubeb_stream_params mParams;
nsCOMPtr<nsIThread> mThread;
};
} // namespace mozilla
#endif

View File

@ -1321,6 +1321,14 @@ MediaStreamGraphImpl::ProduceDataForStreamsBlockByBlock(uint32_t aStreamIndex,
ps->ProcessInput(t, next, (next == aTo) ? ProcessedMediaStream::ALLOW_FINISH : 0);
}
}
// Remove references to shared AudioChunk buffers from downstream nodes
// first so that upstream nodes can re-use next iteration.
for (uint32_t i = mStreams.Length(); i--; ) {
AudioNodeStream* ns = mStreams[i]->AsAudioNodeStream();
if (ns) {
ns->ReleaseSharedBuffers();
}
}
t = next;
}
NS_ASSERTION(t == aTo, "Something went wrong with rounding to block boundaries");

View File

@ -263,8 +263,7 @@ DecodedAudioDataSink::InitializeAudioStream()
// circumstances, so we take care to drop the decoder monitor while
// initializing.
RefPtr<AudioStream> audioStream(new AudioStream());
nsresult rv = audioStream->Init(mInfo.mChannels, mInfo.mRate,
mChannel, AudioStream::HighLatency);
nsresult rv = audioStream->Init(mInfo.mChannels, mInfo.mRate, mChannel);
if (NS_FAILED(rv)) {
audioStream->Shutdown();
return rv;

View File

@ -785,7 +785,7 @@ TrackBuffersManager::CreateDemuxerforMIMEType()
#ifdef MOZ_WEBM
if (mType.LowerCaseEqualsLiteral("video/webm") || mType.LowerCaseEqualsLiteral("audio/webm")) {
mInputDemuxer = new WebMDemuxer(mCurrentInputBuffer);
mInputDemuxer = new WebMDemuxer(mCurrentInputBuffer, true /* IsMediaSource*/ );
return;
}
#endif

View File

@ -44,6 +44,7 @@ AppleVDADecoder::AppleVDADecoder(const VideoInfo& aConfig,
, mIsShutDown(false)
, mUseSoftwareImages(false)
, mIs106(!nsCocoaFeatures::OnLionOrLater())
, mQueuedSamples(0)
, mMonitor("AppleVideoDecoder")
, mIsFlushing(false)
, mDecoder(nullptr)
@ -213,15 +214,13 @@ PlatformCallback(void* decompressionOutputRefCon,
// FIXME: Distinguish between errors and empty flushed frames.
if (status != noErr || !image) {
NS_WARNING("AppleVDADecoder decoder returned no data");
return;
}
MOZ_ASSERT(CFGetTypeID(image) == CVPixelBufferGetTypeID(),
"AppleVDADecoder returned an unexpected image type");
if (infoFlags & kVDADecodeInfo_FrameDropped)
{
image = nullptr;
} else if (infoFlags & kVDADecodeInfo_FrameDropped) {
NS_WARNING(" ...frame dropped...");
return;
image = nullptr;
} else {
MOZ_ASSERT(image || CFGetTypeID(image) == CVPixelBufferGetTypeID(),
"AppleVDADecoder returned an unexpected image type");
}
AppleVDADecoder* decoder =
@ -257,12 +256,7 @@ PlatformCallback(void* decompressionOutputRefCon,
byte_offset,
is_sync_point == 1);
// Forward the data back to an object method which can access
// the correct reader's callback.
nsCOMPtr<nsIRunnable> task =
NS_NewRunnableMethodWithArgs<CFRefPtr<CVPixelBufferRef>, AppleVDADecoder::AppleFrameRef>(
decoder, &AppleVDADecoder::OutputFrame, image, frameRef);
decoder->DispatchOutputTask(task.forget());
decoder->OutputFrame(image, frameRef);
}
AppleVDADecoder::AppleFrameRef*
@ -275,28 +269,30 @@ AppleVDADecoder::CreateAppleFrameRef(const MediaRawData* aSample)
void
AppleVDADecoder::DrainReorderedFrames()
{
MonitorAutoLock mon(mMonitor);
while (!mReorderQueue.IsEmpty()) {
mCallback->Output(mReorderQueue.Pop().get());
}
mQueuedSamples = 0;
}
void
AppleVDADecoder::ClearReorderedFrames()
{
MonitorAutoLock mon(mMonitor);
while (!mReorderQueue.IsEmpty()) {
mReorderQueue.Pop();
}
mQueuedSamples = 0;
}
// Copy and return a decoded frame.
nsresult
AppleVDADecoder::OutputFrame(CFRefPtr<CVPixelBufferRef> aImage,
AppleVDADecoder::OutputFrame(CVPixelBufferRef aImage,
AppleVDADecoder::AppleFrameRef aFrameRef)
{
MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn());
if (mIsFlushing) {
// We are in the process of flushing; ignore frame.
if (mIsShutDown || mIsFlushing) {
// We are in the process of flushing or shutting down; ignore frame.
return NS_OK;
}
@ -308,6 +304,19 @@ AppleVDADecoder::OutputFrame(CFRefPtr<CVPixelBufferRef> aImage,
aFrameRef.is_sync_point ? " keyframe" : ""
);
if (mQueuedSamples > mMaxRefFrames) {
// We had stopped requesting more input because we had received too much at
// the time. We can ask for more once again.
mCallback->InputExhausted();
}
MOZ_ASSERT(mQueuedSamples);
mQueuedSamples--;
if (!aImage) {
// Image was dropped by decoder.
return NS_OK;
}
// Where our resulting image will end up.
nsRefPtr<VideoData> data;
// Bounds.
@ -404,6 +413,7 @@ AppleVDADecoder::OutputFrame(CFRefPtr<CVPixelBufferRef> aImage,
// Frames come out in DTS order but we need to output them
// in composition order.
MonitorAutoLock mon(mMonitor);
mReorderQueue.Push(data);
while (mReorderQueue.Length() > mMaxRefFrames) {
mCallback->Output(mReorderQueue.Pop().get());
@ -471,6 +481,8 @@ AppleVDADecoder::SubmitFrame(MediaRawData* aSample)
&kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks);
mQueuedSamples++;
OSStatus rv = VDADecoderDecode(mDecoder,
0,
block,
@ -494,7 +506,7 @@ AppleVDADecoder::SubmitFrame(MediaRawData* aSample)
}
// Ask for more data.
if (!mInputIncoming) {
if (!mInputIncoming && mQueuedSamples <= mMaxRefFrames) {
LOG("AppleVDADecoder task queue empty; requesting more data");
mCallback->InputExhausted();
}

View File

@ -81,16 +81,9 @@ public:
return true;
}
void DispatchOutputTask(already_AddRefed<nsIRunnable> aTask)
{
nsCOMPtr<nsIRunnable> task = aTask;
if (mIsShutDown || mIsFlushing) {
return;
}
mTaskQueue->Dispatch(task.forget(), AbstractThread::DontAssertDispatchSuccess);
}
nsresult OutputFrame(CFRefPtr<CVPixelBufferRef> aImage,
// Access from the taskqueue and the decoder's thread.
// OutputFrame is thread-safe.
nsresult OutputFrame(CVPixelBufferRef aImage,
AppleFrameRef aFrameRef);
protected:
@ -108,26 +101,33 @@ protected:
nsRefPtr<FlushableTaskQueue> mTaskQueue;
MediaDataDecoderCallback* mCallback;
nsRefPtr<layers::ImageContainer> mImageContainer;
ReorderQueue mReorderQueue;
uint32_t mPictureWidth;
uint32_t mPictureHeight;
uint32_t mDisplayWidth;
uint32_t mDisplayHeight;
// Accessed on multiple threads, but only set in constructor.
uint32_t mMaxRefFrames;
// Increased when Input is called, and decreased when ProcessFrame runs.
// Reaching 0 indicates that there's no pending Input.
Atomic<uint32_t> mInputIncoming;
Atomic<bool> mIsShutDown;
bool mUseSoftwareImages;
bool mIs106;
const bool mUseSoftwareImages;
const bool mIs106;
// Number of times a sample was queued via Input(). Will be decreased upon
// the decoder's callback being invoked.
// This is used to calculate how many frames has been buffered by the decoder.
Atomic<uint32_t> mQueuedSamples;
// For wait on mIsFlushing during Shutdown() process.
// Protects mReorderQueue.
Monitor mMonitor;
// Set on reader/decode thread calling Flush() to indicate that output is
// not required and so input samples on mTaskQueue need not be processed.
// Cleared on mTaskQueue in ProcessDrain().
Atomic<bool> mIsFlushing;
ReorderQueue mReorderQueue;
private:
VDADecoder mDecoder;

View File

@ -167,18 +167,14 @@ PlatformCallback(void* decompressionOutputRefCon,
// Validate our arguments.
if (status != noErr || !image) {
NS_WARNING("VideoToolbox decoder returned no data");
return;
}
if (flags & kVTDecodeInfo_FrameDropped) {
image = nullptr;
} else if (flags & kVTDecodeInfo_FrameDropped) {
NS_WARNING(" ...frame tagged as dropped...");
} else {
MOZ_ASSERT(CFGetTypeID(image) == CVPixelBufferGetTypeID(),
"VideoToolbox returned an unexpected image type");
}
MOZ_ASSERT(CFGetTypeID(image) == CVPixelBufferGetTypeID(),
"VideoToolbox returned an unexpected image type");
nsCOMPtr<nsIRunnable> task =
NS_NewRunnableMethodWithArgs<CFRefPtr<CVPixelBufferRef>, AppleVTDecoder::AppleFrameRef>(
decoder, &AppleVTDecoder::OutputFrame, image, *frameRef);
decoder->DispatchOutputTask(task.forget());
decoder->OutputFrame(image, *frameRef);
}
nsresult
@ -242,6 +238,8 @@ AppleVTDecoder::SubmitFrame(MediaRawData* aSample)
return NS_ERROR_FAILURE;
}
mQueuedSamples++;
VTDecodeFrameFlags decodeFlags =
kVTDecodeFrame_EnableAsynchronousDecompression;
rv = VTDecompressionSessionDecodeFrame(mSession,
@ -257,7 +255,7 @@ AppleVTDecoder::SubmitFrame(MediaRawData* aSample)
}
// Ask for more data.
if (!mInputIncoming) {
if (!mInputIncoming && mQueuedSamples <= mMaxRefFrames) {
LOG("AppleVTDecoder task queue empty; requesting more data");
mCallback->InputExhausted();
}

View File

@ -536,12 +536,6 @@ AudioNodeStream::ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags)
mEngine->ProcessBlocksOnPorts(this, mInputChunks, mLastChunks, &finished);
}
}
for (auto& chunk : mInputChunks) {
// If the buffer is shared then it won't be reused, so release the
// reference now. Keep the channel data array to save a free/alloc
// pair.
chunk.ReleaseBufferIfShared();
}
for (uint16_t i = 0; i < outputCount; ++i) {
NS_ASSERTION(mLastChunks[i].GetDuration() == WEBAUDIO_BLOCK_SIZE,
"Invalid WebAudio chunk size");
@ -617,6 +611,22 @@ AudioNodeStream::AdvanceOutputSegment()
}
}
void
AudioNodeStream::ReleaseSharedBuffers()
{
// A shared buffer can't be reused, so release the reference now. Keep
// the channel data arrays to save unnecessary free/alloc.
// Release shared output buffers first, as they may be shared with input
// buffers which can be re-used if there are no other references.
for (auto& chunk : mLastChunks) {
chunk.ReleaseBufferIfShared();
}
for (auto& chunk : mInputChunks) {
chunk.ReleaseBufferIfShared();
}
}
StreamTime
AudioNodeStream::GetCurrentPosition()
{

View File

@ -117,6 +117,14 @@ public:
* the output. This is used only for DelayNodeEngine in a feedback loop.
*/
void ProduceOutputBeforeInput(GraphTime aFrom);
/**
* Remove references to shared AudioChunk buffers. Called on downstream
* nodes first after an iteration has called ProcessInput() on the entire
* graph, so that upstream nodes can re-use their buffers on the next
* iteration.
*/
void ReleaseSharedBuffers();
StreamTime GetCurrentPosition();
bool IsAudioParamStream() const
{
@ -184,7 +192,7 @@ protected:
// The engine that will generate output for this node.
nsAutoPtr<AudioNodeEngine> mEngine;
// The mixed input blocks are kept from iteration to iteration to avoid
// reallocating channel data arrays.
// reallocating channel data arrays and any buffers for mixing.
OutputChunks mInputChunks;
// The last block produced by this node.
OutputChunks mLastChunks;

View File

@ -114,6 +114,9 @@ public:
// ProduceBlockBeforeInput() when in a cycle.
if (!mHaveProducedBeforeInput) {
UpdateOutputBlock(aOutput, 0.0);
// Not in cycle, so no need for additional buffer reference.
// See ProduceBlockBeforeInput().
mLastOutput.SetNull(0);
}
mHaveProducedBeforeInput = false;
mBuffer.NextBlock();
@ -156,7 +159,13 @@ public:
if (mLeftOverData <= 0) {
aOutput->SetNull(WEBAUDIO_BLOCK_SIZE);
} else {
UpdateOutputBlock(aOutput, WEBAUDIO_BLOCK_SIZE);
// AudioNodeStream::ReleaseSharedBuffers() is called on delay nodes in
// cycles first and so may release the buffer reference in aOutput
// because downstream nodes may still be sharing when called. Therefore
// keep a separate reference to the output buffer for re-use next
// iteration.
UpdateOutputBlock(&mLastOutput, WEBAUDIO_BLOCK_SIZE);
*aOutput = mLastOutput;
}
mHaveProducedBeforeInput = true;
}
@ -177,6 +186,7 @@ public:
return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
}
AudioChunk mLastOutput; // Used only when in a cycle.
AudioNodeStream* mSource;
AudioNodeStream* mDestination;
AudioParamTimeline mDelay;

View File

@ -415,9 +415,16 @@ void WebMBufferedState::UpdateIndex(const nsTArray<MediaByteRange>& aRanges, Med
}
}
}
nsRefPtr<MediaByteBuffer> bytes = aResource->MediaReadAt(offset, length);
if(bytes) {
while (length > 0) {
static const uint32_t BLOCK_SIZE = 1048576;
uint32_t block = std::min(length, BLOCK_SIZE);
nsRefPtr<MediaByteBuffer> bytes = aResource->MediaReadAt(offset, block);
if (!bytes) {
break;
}
NotifyDataArrived(bytes->Elements(), bytes->Length(), offset);
length -= bytes->Length();
offset += bytes->Length();
}
}
}

View File

@ -42,23 +42,17 @@ static int webmdemux_read(void* aBuffer, size_t aLength, void* aUserData)
MOZ_ASSERT(aUserData);
MOZ_ASSERT(aLength < UINT32_MAX);
WebMDemuxer* demuxer = reinterpret_cast<WebMDemuxer*>(aUserData);
int64_t length = demuxer->GetEndDataOffset();
uint32_t count = aLength;
int64_t position = demuxer->GetResource()->Tell();
if (position >= length) {
// GetLastBlockOffset was calculated after we had read past it.
// This condition can only occurs with plain webm, as with MSE,
// EnsureUpdateIndex would have been called first.
// Continue reading to the end instead.
length = demuxer->GetResource()->GetLength();
if (demuxer->IsMediaSource()) {
int64_t length = demuxer->GetEndDataOffset();
int64_t position = demuxer->GetResource()->Tell();
MOZ_ASSERT(position <= demuxer->GetResource()->GetLength());
MOZ_ASSERT(position <= length);
if (length >= 0 && count + position > length) {
count = length - position;
}
MOZ_ASSERT(count <= aLength);
}
MOZ_ASSERT(position <= demuxer->GetResource()->GetLength());
MOZ_ASSERT(position <= length);
if (length >= 0 && count + position > length) {
count = length - position;
}
MOZ_ASSERT(count <= aLength);
uint32_t bytes = 0;
nsresult rv =
demuxer->GetResource()->Read(static_cast<char*>(aBuffer), count, &bytes);
@ -125,6 +119,11 @@ static void webmdemux_log(nestegg* aContext,
WebMDemuxer::WebMDemuxer(MediaResource* aResource)
: WebMDemuxer(aResource, false)
{
}
WebMDemuxer::WebMDemuxer(MediaResource* aResource, bool aIsMediaSource)
: mResource(aResource)
, mBufferedState(nullptr)
, mInitData(nullptr)
@ -140,7 +139,7 @@ WebMDemuxer::WebMDemuxer(MediaResource* aResource)
, mHasAudio(false)
, mNeedReIndex(true)
, mLastWebMBlockOffset(-1)
, mIsExpectingMoreData(true)
, mIsMediaSource(aIsMediaSource)
{
if (!gNesteggLog) {
gNesteggLog = PR_NewLogModule("Nestegg");
@ -454,10 +453,13 @@ WebMDemuxer::EnsureUpToDateIndex()
if (!mInitData && mBufferedState->GetInitEndOffset() != -1) {
mInitData = mResource.MediaReadAt(0, mBufferedState->GetInitEndOffset());
}
mLastWebMBlockOffset = mBufferedState->GetLastBlockOffset();
mIsExpectingMoreData = mResource.GetResource()->IsExpectingMoreData();
MOZ_ASSERT(mLastWebMBlockOffset <= mResource.GetLength());
mNeedReIndex = false;
if (!mIsMediaSource) {
return;
}
mLastWebMBlockOffset = mBufferedState->GetLastBlockOffset();
MOZ_ASSERT(mLastWebMBlockOffset <= mResource.GetLength());
}
void
@ -483,7 +485,9 @@ WebMDemuxer::GetCrypto()
bool
WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSamples)
{
EnsureUpToDateIndex();
if (mIsMediaSource) {
EnsureUpToDateIndex();
}
nsRefPtr<NesteggPacketHolder> holder(NextPacket(aType));

View File

@ -54,7 +54,10 @@ class WebMDemuxer : public MediaDataDemuxer
{
public:
explicit WebMDemuxer(MediaResource* aResource);
// Indicate if the WebMDemuxer is to be used with MediaSource. In which
// case the demuxer will stop reads to the last known complete block.
WebMDemuxer(MediaResource* aResource, bool aIsMediaSource);
nsRefPtr<InitPromise> Init() override;
already_AddRefed<MediaDataDemuxer> Clone() const override;
@ -91,11 +94,15 @@ public:
return &mResource;
}
int64_t GetEndDataOffset()
int64_t GetEndDataOffset() const
{
return mLastWebMBlockOffset < 0 || mIsExpectingMoreData
return (!mIsMediaSource || mLastWebMBlockOffset < 0)
? mResource.GetLength() : mLastWebMBlockOffset;
}
int64_t IsMediaSource() const
{
return mIsMediaSource;
}
private:
friend class WebMTrackDemuxer;
@ -169,7 +176,7 @@ private:
// We cache those values rather than retrieving them for performance reasons
// as nestegg only performs 1-byte read at a time.
int64_t mLastWebMBlockOffset;
bool mIsExpectingMoreData;
const bool mIsMediaSource;
};
class WebMTrackDemuxer : public MediaTrackDemuxer

View File

@ -609,6 +609,11 @@ nsCSPKeywordSrc::allows(enum CSPKeyword aKeyword, const nsAString& aHashOrNonce)
void
nsCSPKeywordSrc::toString(nsAString& outStr) const
{
if (mInvalidated) {
MOZ_ASSERT(mKeyword == CSP_UNSAFE_INLINE,
"can only ignore 'unsafe-inline' within toString()");
return;
}
outStr.AppendASCII(CSP_EnumToKeyword(mKeyword));
}
@ -616,8 +621,8 @@ void
nsCSPKeywordSrc::invalidate()
{
mInvalidated = true;
NS_ASSERTION(mInvalidated == CSP_UNSAFE_INLINE,
"invalidate 'unsafe-inline' only within script-src");
MOZ_ASSERT(mKeyword == CSP_UNSAFE_INLINE,
"invalidate 'unsafe-inline' only within script-src");
}
/* ===== nsCSPNonceSrc ==================== */
@ -1046,8 +1051,13 @@ nsCSPPolicy::allows(nsContentPolicyType aContentType,
}
}
// Only match {nonce,hash}-source on specific directives (not default-src)
// {nonce,hash}-source should not consult default-src:
// * return false if default-src is specified
// * but allow the load if default-src is *not* specified (Bug 1198422)
if (aKeyword == CSP_NONCE || aKeyword == CSP_HASH) {
if (!defaultDir) {
return true;
}
return false;
}

View File

@ -0,0 +1,56 @@
// custom *.sjs file specifically for the needs of:
// * Bug 1004703 - ignore 'unsafe-inline' if nonce- or hash-source specified
// * Bug 1198422: should not block inline script if default-src is not specified
Components.utils.import("resource://gre/modules/NetUtil.jsm");
function loadHTMLFromFile(path) {
// Load the HTML to return in the response from file.
// Since it's relative to the cwd of the test runner, we start there and
// append to get to the actual path of the file.
var testHTMLFile =
Components.classes["@mozilla.org/file/directory_service;1"].
getService(Components.interfaces.nsIProperties).
get("CurWorkD", Components.interfaces.nsILocalFile);
var dirs = path.split("/");
for (var i = 0; i < dirs.length; i++) {
testHTMLFile.append(dirs[i]);
}
var testHTMLFileStream =
Components.classes["@mozilla.org/network/file-input-stream;1"].
createInstance(Components.interfaces.nsIFileInputStream);
testHTMLFileStream.init(testHTMLFile, -1, 0, 0);
var testHTML = NetUtil.readInputStreamToString(testHTMLFileStream, testHTMLFileStream.available());
return testHTML;
}
function handleRequest(request, response)
{
var query = {};
request.queryString.split('&').forEach(function (val) {
var [name, value] = val.split('=');
query[name] = unescape(value);
});
var csp1 = (query['csp1']) ? unescape(query['csp1']) : "";
var csp2 = (query['csp2']) ? unescape(query['csp2']) : "";
var file = unescape(query['file']);
// avoid confusing cache behaviors
response.setHeader("Cache-Control", "no-cache", false);
// deliver the CSP encoded in the URI
// please note that comma separation of two policies
// acts like sending *two* separate policies
var csp = csp1;
if (csp2 !== "") {
csp += ", " + csp2;
}
response.setHeader("Content-Security-Policy", csp, false);
// Send HTML to test allowed/blocked behaviors
response.setHeader("Content-Type", "text/html", false);
response.write(loadHTMLFromFile(file));
}

View File

@ -91,6 +91,7 @@ support-files =
file_scheme_relative_sources.js
file_scheme_relative_sources.sjs
file_ignore_unsafe_inline.html
file_ignore_unsafe_inline_multiple_policies_server.sjs
file_self_none_as_hostname_confusion.html
file_self_none_as_hostname_confusion.html^headers^
file_path_matching.html

View File

@ -21,43 +21,53 @@ SimpleTest.waitForExplicitFinish();
* The expected output of each test is a sequence of chars.
* E.g. the default char we expect is 'a', depending on what inline scripts
* are allowed to run we also expect 'b', 'c', 'd'.
*
* The test also covers the handling of multiple policies where the second
* policy makes use of a directive that should *not* fallback to
* default-src, see Bug 1198422.
*/
var POLICY_PREFIX = "default-src 'none'; script-src ";
const POLICY_PREFIX = "default-src 'none'; script-src ";
var tests = [
{
policy: POLICY_PREFIX + "'unsafe-inline'",
policy1: POLICY_PREFIX + "'unsafe-inline'",
policy2: "frame-ancestors 'self'",
description: "'unsafe-inline' allows all scripts to execute",
file: "file_ignore_unsafe_inline.html",
result: "abcd",
},
{
policy: POLICY_PREFIX + "'unsafe-inline' 'sha256-uJXAPKP5NZxnVMZMUkDofh6a9P3UMRc1CRTevVPS/rI='",
policy1: POLICY_PREFIX + "'unsafe-inline' 'sha256-uJXAPKP5NZxnVMZMUkDofh6a9P3UMRc1CRTevVPS/rI='",
policy2: "base-uri http://mochi.test",
description: "defining a hash should only allow one script to execute",
file: "file_ignore_unsafe_inline.html",
result: "ac",
},
{
policy: POLICY_PREFIX + "'unsafe-inline' 'nonce-FooNonce'",
policy1: POLICY_PREFIX + "'unsafe-inline' 'nonce-FooNonce'",
policy2: "form-action 'none'",
description: "defining a nonce should only allow one script to execute",
file: "file_ignore_unsafe_inline.html",
result: "ad",
},
{
policy: POLICY_PREFIX + "'unsafe-inline' 'sha256-uJXAPKP5NZxnVMZMUkDofh6a9P3UMRc1CRTevVPS/rI=' 'nonce-FooNonce'",
policy1: POLICY_PREFIX + "'unsafe-inline' 'sha256-uJXAPKP5NZxnVMZMUkDofh6a9P3UMRc1CRTevVPS/rI=' 'nonce-FooNonce'",
policy2: "upgrade-insecure-requests",
description: "defining hash and nonce should allow two scripts to execute",
file: "file_ignore_unsafe_inline.html",
result: "acd",
},
{
policy: POLICY_PREFIX + "'unsafe-inline' 'sha256-uJXAPKP5NZxnVMZMUkDofh6a9P3UMRc1CRTevVPS/rI=' 'nonce-FooNonce' 'unsafe-inline'",
policy1: POLICY_PREFIX + "'unsafe-inline' 'sha256-uJXAPKP5NZxnVMZMUkDofh6a9P3UMRc1CRTevVPS/rI=' 'nonce-FooNonce' 'unsafe-inline'",
policy2: "referrer origin",
description: "defining hash, nonce and 'unsafe-inline' twice should still only allow two scripts to execute",
file: "file_ignore_unsafe_inline.html",
result: "acd",
},
{
policy: "default-src 'unsafe-inline' 'sha256-uJXAPKP5NZxnVMZMUkDofh6a9P3UMRc1CRTevVPS/rI=' 'nonce-FooNonce' ",
policy1: "default-src 'unsafe-inline' 'sha256-uJXAPKP5NZxnVMZMUkDofh6a9P3UMRc1CRTevVPS/rI=' 'nonce-FooNonce' ",
policy2: "sandbox allow-forms",
description: "unsafe-inline should *not* be ignored within default-src even if hash or nonce is specified",
file: "file_ignore_unsafe_inline.html",
result: "abcd",
@ -75,12 +85,14 @@ function loadNextTest() {
}
curTest = tests[counter++];
var src = "file_testserver.sjs?file=";
var src = "file_ignore_unsafe_inline_multiple_policies_server.sjs?file=";
// append the file that should be served
src += escape("tests/dom/security/test/csp/" + curTest.file);
// append the CSP that should be used to serve the file
src += "&csp=" + escape(curTest.policy);
// append the first CSP that should be used to serve the file
src += "&csp1=" + escape(curTest.policy1);
// append the second CSP that should be used to serve the file
src += "&csp2=" + escape(curTest.policy2);
document.getElementById("testframe").addEventListener("load", test, false);
document.getElementById("testframe").src = src;

View File

@ -1,6 +1,6 @@
en_US-mozilla Hunspell Dictionary
Generated from SCOWL Version 2015.05.18
Wed May 27 08:05:38 EDT 2015
Generated from SCOWL Version 2015.08.24
Thu Aug 27 23:04:57 EDT 2015
http://wordlist.sourceforge.net
@ -311,4 +311,4 @@ from the Ispell distribution they are under the Ispell copyright:
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
Build Date: Wed May 27 08:05:38 EDT 2015
Build Date: Thu Aug 27 23:04:57 EDT 2015

View File

@ -12177,9 +12177,7 @@ colonoscope
colonoscope's
colonoscopes
commenters
composited
compositeness
compositing
concurrent's
concurrents
concuss's
@ -12211,6 +12209,7 @@ court-martial
court-martialed
court-martialing
court-martials
crappiness
crimeware
crimeware's
cryonic
@ -12892,6 +12891,7 @@ stents
substituent
substituent's
substituents
subsumptions
syllabi
synches
synesthesia

View File

@ -1,6 +1,6 @@
en_US Hunspell Dictionary
Generated from SCOWL Version 2015.05.18
Wed May 27 08:05:20 EDT 2015
Generated from SCOWL Version 2015.08.24
Thu Aug 27 23:04:43 EDT 2015
http://wordlist.sourceforge.net
@ -311,5 +311,5 @@ from the Ispell distribution they are under the Ispell copyright:
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
Build Date: Wed May 27 08:05:20 EDT 2015
Build Date: Thu Aug 27 23:04:43 EDT 2015
Wordlist Command: mk-list --accents=strip en_US 60

View File

@ -1,4 +1,4 @@
48743
48756
0/nm
0th/pt
1/n1
@ -3237,6 +3237,7 @@ FSLIC
FTC
FUD/S
FWD
FWIW
FY
FYI
Faberge/M
@ -3421,6 +3422,7 @@ Franck/M
Franco/M
Francois/M
Francoise/M
Francophile
Franglais/M
Frank/SM
Frankel/M
@ -6339,6 +6341,7 @@ Mensa/M
Mentholatum/M
Menuhin/M
Menzies/M
Mephisto
Mephistopheles/M
Merak/M
Mercado/M
@ -9724,6 +9727,7 @@ USPS
USS
USSR/M
UT/M
UTC
UV/M
Ubangi/M
Ubuntu/M
@ -16527,7 +16531,8 @@ claustrophobic
clavichord/SM
clavicle/MS
clavier/MS
claw/SGMD
claw's
claw/CSGD
clay/M
clayey
clayier
@ -17245,7 +17250,7 @@ comportment/M
compose/AECGSD
composedly
composer/MS
composite/MYNXS
composite/MYGNXDS
composition/CM
compositor/SM
compost/SGMD
@ -20615,6 +20620,7 @@ drivel/SZGMDR
driveler/M
driven
driver/M
driveshaft/SM
driveway/MS
drizzle/MGDS
drizzly
@ -23205,6 +23211,7 @@ flask/SM
flat/MYPS
flatbed/SM
flatboat/SM
flatbread
flatcar/SM
flatfeet
flatfish/MS
@ -26484,6 +26491,7 @@ homiest
homiletic
homily/SM
hominid/SM
hominoid/S
hominy/M
homo/MS
homoerotic
@ -30983,6 +30991,7 @@ measureless
measurement/MS
meat/MS
meatball/MS
meathead/MS
meatiness/M
meatless
meatloaf/M
@ -35808,6 +35817,7 @@ piece/DSMG
piecemeal
piecework/MRZ
pieceworker/M
piecrust/SM
pieing
pier/M
pierce/JGDS
@ -36603,6 +36613,7 @@ potent/Y
potentate/MS
potential/MYS
potentiality/SM
potentiate/GDS
potful/SM
pothead/SM
pother/SMDG
@ -43419,6 +43430,7 @@ substrate/MS
substratum/M
substructure/SM
subsume/DSG
subsumption
subsurface/M
subsystem/SM
subteen/SM
@ -44378,6 +44390,7 @@ technician/SM
technicolor
technique/SM
techno
technobabble
technocracy/SM
technocrat/MS
technocratic

View File

@ -1,4 +1,4 @@
54838
54850
0/nm
0th/pt
1/n1
@ -5293,6 +5293,7 @@ FSLIC
FTC
FUD/S
FWD
FWIW
FY
FYI
Fabe/RM
@ -5637,6 +5638,7 @@ Francklyn/M
Franco/M
Francois/M
Francoise/M
Francophile
Francyne/M
Franglais/M
Frank/SM
@ -10421,6 +10423,7 @@ Mensa/M
Mentholatum/M
Menuhin/M
Menzies/M
Mephisto
Mephistopheles/M
Merak/M
Mercado/M
@ -15438,6 +15441,7 @@ USPS
USS
USSR/M
UT/M
UTC
UV/M
Ubangi/M
Ubiquity/M
@ -18451,9 +18455,9 @@ assay/ZGMDRS
assayer/M
assemblage/SM
assemble/AEGSD
assembler/MS
assembler/MSE
assemblies
assembly/AM
assembly/AME
assemblyman/M
assemblymen
assemblywoman/M
@ -22620,7 +22624,8 @@ claustrophobic
clavichord/SM
clavicle/MS
clavier/MS
claw/SGMD
claw's
claw/CSGD
clay/M
clayey
clayier
@ -24373,9 +24378,9 @@ crap/MS
crape/SM
crapped
crapper/S
crappie/RSMT
crappie/M
crapping
crappy/P
crappy/RSPT
craps/M
crapshooter/MS
crash/MDSG
@ -26701,6 +26706,7 @@ drivel/SZGMDR
driveler/M
driven
driver/M
driveshaft/SM
driveway/MS
drizzle/MGDS
drizzly
@ -29289,6 +29295,7 @@ flask/SM
flat/MYPS
flatbed/SM
flatboat/SM
flatbread
flatcar/SM
flatfeet
flatfish/MS
@ -32566,6 +32573,7 @@ homiest
homiletic
homily/SM
hominid/SM
hominoid/S
hominy/M
homo/MS
homoerotic
@ -37058,6 +37066,7 @@ measureless
measurement/MS
meat/MS
meatball/MS
meathead/MS
meatiness/M
meatless
meatloaf/M
@ -41903,6 +41912,7 @@ piece/DSMG
piecemeal
piecework/MRZ
pieceworker/M
piecrust/SM
pieing
pier/M
pierce/JGDS
@ -42697,6 +42707,7 @@ potent/Y
potentate/MS
potential/MYS
potentiality/SM
potentiate/GDS
potful/SM
pothead/SM
pother/SMDG
@ -50478,6 +50489,7 @@ technician/SM
technicolor
technique/SM
techno
technobabble
technocracy/SM
technocrat/MS
technocratic

View File

@ -235,6 +235,12 @@ static inline SkColor ColorToSkColor(const Color &color, Float aAlpha)
ColorFloatToByte(color.g), ColorFloatToByte(color.b));
}
static inline SkPoint
PointToSkPoint(const Point &aPoint)
{
return SkPoint::Make(SkFloatToScalar(aPoint.x), SkFloatToScalar(aPoint.y));
}
static inline SkRect
RectToSkRect(const Rect& aRect)
{

View File

@ -126,29 +126,54 @@ PathSkia::TransformedCopyToBuilder(const Matrix &aTransform, FillRule aFillRule)
return MakeAndAddRef<PathBuilderSkia>(aTransform, mPath, aFillRule);
}
bool
PathSkia::ContainsPoint(const Point &aPoint, const Matrix &aTransform) const
static bool
SkPathContainsPoint(const SkPath& aPath, const Point& aPoint, const Matrix& aTransform)
{
// Skia's SkPath::contains method does not support the inclusive boundary conditions
// required by canvas (bug 831259), so we employ the same workaround as used by Blink.
// First, we scale the path up to the largest coordinates that won't cause precision
// issues (2^15) and consequently also scale larger paths than that down.
// Next, we make a clip region representing the point to be tested and convert the
// path to a region within this clip region.
// If the resulting region is non-empty, then the path should contain the point.
Matrix inverse = aTransform;
inverse.Invert();
Point transformed = inverse * aPoint;
SkPoint point = PointToSkPoint(inverse * aPoint);
Rect bounds = GetBounds(aTransform);
if (aPoint.x < bounds.x || aPoint.y < bounds.y ||
aPoint.x > bounds.XMost() || aPoint.y > bounds.YMost()) {
SkRect bounds = aPath.getBounds();
if (point.fX < bounds.fLeft || point.fY < bounds.fTop ||
point.fX > bounds.fRight || point.fY > bounds.fBottom) {
return false;
}
SkRegion pointRect;
pointRect.setRect(int32_t(SkFloatToScalar(transformed.x - 1.f)),
int32_t(SkFloatToScalar(transformed.y - 1.f)),
int32_t(SkFloatToScalar(transformed.x + 1.f)),
int32_t(SkFloatToScalar(transformed.y + 1.f)));
SkPoint scale = SkPoint::Make(SkMaxScalar(bounds.fRight, -bounds.fLeft),
SkMaxScalar(bounds.fBottom, -bounds.fTop));
if (SkScalarNearlyZero(scale.fX) || SkScalarNearlyZero(scale.fY)) {
return false;
}
scale.set(SkMaxScalar(scale.fX, SkScalarAbs(point.fX) + SK_Scalar1),
SkMaxScalar(scale.fY, SkScalarAbs(point.fY) + SK_Scalar1));
const SkScalar maxCoord = SkIntToScalar(1 << 15);
SkMatrix scaleMatrix;
scaleMatrix.setScale(maxCoord / scale.fX, maxCoord / scale.fY);
SkPath scaledPath(aPath);
scaledPath.transform(scaleMatrix, nullptr);
scaleMatrix.mapPoints(&point, 1);
SkRegion pointClip(SkIRect::MakeXYWH(SkScalarRoundToInt(point.fX) - 1,
SkScalarRoundToInt(point.fY) - 1,
2, 2));
SkRegion pathRegion;
return pathRegion.setPath(mPath, pointRect);
return pathRegion.setPath(scaledPath, pointClip);
}
bool
PathSkia::ContainsPoint(const Point &aPoint, const Matrix &aTransform) const
{
return SkPathContainsPoint(mPath, aPoint, aTransform);
}
bool
@ -156,32 +181,13 @@ PathSkia::StrokeContainsPoint(const StrokeOptions &aStrokeOptions,
const Point &aPoint,
const Matrix &aTransform) const
{
Matrix inverse = aTransform;
inverse.Invert();
Point transformed = inverse * aPoint;
SkPaint paint;
StrokeOptionsToPaint(paint, aStrokeOptions);
SkPath strokePath;
paint.getFillPath(mPath, &strokePath);
Rect bounds = aTransform.TransformBounds(SkRectToRect(strokePath.getBounds()));
if (aPoint.x < bounds.x || aPoint.y < bounds.y ||
aPoint.x > bounds.XMost() || aPoint.y > bounds.YMost()) {
return false;
}
SkRegion pointRect;
pointRect.setRect(int32_t(SkFloatToScalar(transformed.x - 1.f)),
int32_t(SkFloatToScalar(transformed.y - 1.f)),
int32_t(SkFloatToScalar(transformed.x + 1.f)),
int32_t(SkFloatToScalar(transformed.y + 1.f)));
SkRegion pathRegion;
return pathRegion.setPath(strokePath, pointRect);
return SkPathContainsPoint(strokePath, aPoint, aTransform);
}
Rect

View File

@ -824,9 +824,10 @@ Layer::ApplyPendingUpdatesForThisTransaction()
const float
Layer::GetLocalOpacity()
{
if (LayerComposite* shadow = AsLayerComposite())
return shadow->GetShadowOpacity();
return mOpacity;
float opacity = mOpacity;
if (LayerComposite* shadow = AsLayerComposite())
opacity = shadow->GetShadowOpacity();
return std::min(std::max(opacity, 0.0f), 1.0f);
}
float

View File

@ -40,14 +40,7 @@ window.onload = function() {
// Dropping the touch slop to 0 makes the tests easier to write because
// we can just do a one-pixel drag to get over the pan threshold rather
// than having to hard-code some larger value.
["apz.touch_start_tolerance", "0.0"],
// The B2G emulator is hella slow, and needs more than 300ms to run the
// main-thread code that deals with layerizing subframes and running
// touch listeners. In my local runs this needs to be at least 1000.
// On try this sometimes needs to be as long as 8 seconds (bug 1176798)
// so we make it 15 seconds just to be extra safe.
["apz.content_response_timeout", "15000"]
["apz.touch_start_tolerance", "0.0"]
]
}, testDone);
};

View File

@ -17,7 +17,9 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=1151663
// inside an iframe which means we have no control over the root APZC.
var w = null;
window.onload = function() {
w = window.open("helper_bug1151663.html", "_blank");
SpecialPowers.pushPrefEnv({"set": [["apz.test.logging_enabled", true]]}, function() {
w = window.open("helper_bug1151663.html", "_blank");
});
};
function finishTest() {

View File

@ -17,7 +17,9 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=982141
// inside an iframe which means we have no control over the root APZC.
var w = null;
window.onload = function() {
w = window.open("helper_bug982141.html", "_blank");
SpecialPowers.pushPrefEnv({"set": [["apz.test.logging_enabled", true]]}, function() {
w = window.open("helper_bug982141.html", "_blank");
});
};
function finishTest() {

View File

@ -14,6 +14,8 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=1173580
<style>
#container {
display: flex;
overflow: scroll;
height: 500px;
}
.outer-frame {
height: 500px;
@ -21,7 +23,7 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=1173580
flex-basis: 100%;
background: repeating-linear-gradient(#CCC, #CCC 100px, #BBB 100px, #BBB 200px);
}
#page-content {
#container-content {
height: 200%;
}
</style>
@ -42,11 +44,11 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=1173580
</div>
<iframe id="outer3" class="outer-frame" src="helper_iframe1.html"></iframe>
<iframe id="outer4" class="outer-frame" src="helper_iframe2.html"></iframe>
<!-- The container-content div ensures 'container' is scrollable, so the
optimization that layerizes the primary async-scrollable frame on page
load layerizes it rather than its child subframes. -->
<div id="container-content"></div>
</div>
<!-- The page-content div ensures the page is scrollable, so none of the
subframes are layerized by the optimization that layerizes the
primary async-scrollable frame on page load. -->
<div id="page-content"></div>
<pre id="test">
<script type="application/javascript;version=1.7">
@ -148,8 +150,18 @@ function startTest() {
}
SimpleTest.waitForExplicitFinish();
SimpleTest.waitForFocus(startTest, window);
SimpleTest.testInChaosMode();
// Disable smooth scrolling, because it results in long-running scroll
// animations that can result in a 'scroll' event triggered by an earlier
// wheel event as corresponding to a later wheel event.
// Also enable APZ test logging, since we use that data to determine whether
// a scroll frame was layerized.
SpecialPowers.pushPrefEnv({"set": [["general.smoothScroll", false],
["apz.test.logging_enabled", true]]},
function() {
SimpleTest.waitForFocus(startTest, window);
});
</script>
</pre>
</body>

View File

@ -231,9 +231,14 @@ ClientLayerManager::BeginTransactionWithTarget(gfxContext* aTarget)
}
// If this is a new paint, increment the paint sequence number.
if (!mIsRepeatTransaction && gfxPrefs::APZTestLoggingEnabled()) {
if (!mIsRepeatTransaction) {
// Increment the paint sequence number even if test logging isn't
// enabled in this process; it may be enabled in the parent process,
// and the parent process expects unique sequence numbers.
++mPaintSequenceNumber;
mApzTestData.StartNewPaint(mPaintSequenceNumber);
if (gfxPrefs::APZTestLoggingEnabled()) {
mApzTestData.StartNewPaint(mPaintSequenceNumber);
}
}
}

View File

@ -212,24 +212,26 @@ ContainerRenderVR(ContainerT* aContainer,
// from WebGL (and maybe depth video?)
compositor->SetRenderTarget(surface);
aContainer->ReplaceEffectiveTransform(origTransform);
// If this native-VR child layer does not have sizes that match
// the eye resolution (that is, returned by the recommended
// render rect from the HMD device), then we need to scale it
// up/down.
nsIntRect layerBounds;
Rect layerBounds;
// XXX this is a hack! Canvas layers aren't reporting the
// proper bounds here (visible region bounds are 0,0,0,0)
// and I'm not sure if this is the bounds we want anyway.
if (layer->GetType() == Layer::TYPE_CANVAS) {
layerBounds = static_cast<CanvasLayer*>(layer)->GetBounds();
layerBounds = ToRect(static_cast<CanvasLayer*>(layer)->GetBounds());
} else {
layerBounds = layer->GetEffectiveVisibleRegion().GetBounds();
layerBounds = ToRect(layer->GetEffectiveVisibleRegion().GetBounds());
}
DUMP(" layer %p [type %d] bounds [%d %d %d %d] surfaceRect [%d %d %d %d]\n", layer, (int) layer->GetType(),
XYWH(layerBounds), XYWH(surfaceRect));
const gfx::Matrix4x4 childTransform = layer->GetEffectiveTransform();
layerBounds = childTransform.TransformBounds(layerBounds);
DUMP(" layer %p [type %d] bounds [%f %f %f %f] surfaceRect [%d %d %d %d]\n", layer, (int) layer->GetType(),
XYWH(layerBounds), XYWH(surfaceRect));
bool restoreTransform = false;
if ((layerBounds.width != 0 && layerBounds.height != 0) &&
(layerBounds.width != surfaceRect.width ||
@ -239,8 +241,8 @@ ContainerRenderVR(ContainerT* aContainer,
surfaceRect.width / float(layerBounds.width),
surfaceRect.height / float(layerBounds.height));
gfx::Matrix4x4 scaledChildTransform(childTransform);
scaledChildTransform.PreScale(surfaceRect.width / float(layerBounds.width),
surfaceRect.height / float(layerBounds.height),
scaledChildTransform.PreScale(surfaceRect.width / layerBounds.width,
surfaceRect.height / layerBounds.height,
1.0f);
layer->ReplaceEffectiveTransform(scaledChildTransform);

View File

@ -226,11 +226,7 @@ LayerManagerComposite::ApplyOcclusionCulling(Layer* aLayer, nsIntRegion& aOpaque
LayerComposite *composite = aLayer->AsLayerComposite();
if (!localOpaque.IsEmpty()) {
nsIntRegion visible = composite->GetShadowVisibleRegion();
nsIntRegion afterCulling;
afterCulling.Sub(visible, localOpaque);
// Intersect the original region with the bounds of the culled region so
// that we don't increase the region's complexity.
visible.AndWith(afterCulling.GetBounds());
visible.Sub(visible, localOpaque);
composite->SetShadowVisibleRegion(visible);
}

View File

@ -176,7 +176,7 @@ private:
DECL_GFX_PREF(Live, "apz.pan_repaint_interval", APZPanRepaintInterval, int32_t, 250);
DECL_GFX_PREF(Live, "apz.printtree", APZPrintTree, bool, false);
DECL_GFX_PREF(Live, "apz.smooth_scroll_repaint_interval", APZSmoothScrollRepaintInterval, int32_t, 75);
DECL_GFX_PREF(Once, "apz.test.logging_enabled", APZTestLoggingEnabled, bool, false);
DECL_GFX_PREF(Live, "apz.test.logging_enabled", APZTestLoggingEnabled, bool, false);
DECL_GFX_PREF(Live, "apz.touch_start_tolerance", APZTouchStartTolerance, float, 1.0f/4.5f);
DECL_GFX_PREF(Live, "apz.use_paint_duration", APZUsePaintDuration, bool, true);
DECL_GFX_PREF(Live, "apz.velocity_bias", APZVelocityBias, float, 1.0f);

View File

@ -1731,12 +1731,13 @@ CheckForAdapterMismatch(ID3D11Device *device)
}
}
void CheckIfRenderTargetViewNeedsRecreating(ID3D11Device *device)
bool DoesRenderTargetViewNeedsRecreating(ID3D11Device *device)
{
bool result = false;
// CreateTexture2D is known to crash on lower feature levels, see bugs
// 1170211 and 1089413.
if (device->GetFeatureLevel() < D3D_FEATURE_LEVEL_10_0) {
return;
return true;
}
nsRefPtr<ID3D11DeviceContext> deviceContext;
@ -1758,9 +1759,17 @@ void CheckIfRenderTargetViewNeedsRecreating(ID3D11Device *device)
offscreenTextureDesc.CPUAccessFlags = 0;
offscreenTextureDesc.MiscFlags = D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX;
HRESULT result = device->CreateTexture2D(&offscreenTextureDesc, NULL, getter_AddRefs(offscreenTexture));
HRESULT hr = device->CreateTexture2D(&offscreenTextureDesc, NULL, getter_AddRefs(offscreenTexture));
if (FAILED(hr)) {
gfxCriticalNote << "DoesRecreatingCreateTexture2DFail";
return false;
}
result = offscreenTexture->QueryInterface(__uuidof(IDXGIKeyedMutex), (void**)getter_AddRefs(keyedMutex));
hr = offscreenTexture->QueryInterface(__uuidof(IDXGIKeyedMutex), (void**)getter_AddRefs(keyedMutex));
if (FAILED(hr)) {
gfxCriticalNote << "DoesRecreatingKeyedMutexFailed";
return false;
}
D3D11_RENDER_TARGET_VIEW_DESC offscreenRTVDesc;
offscreenRTVDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
@ -1768,7 +1777,11 @@ void CheckIfRenderTargetViewNeedsRecreating(ID3D11Device *device)
offscreenRTVDesc.Texture2D.MipSlice = 0;
nsRefPtr<ID3D11RenderTargetView> offscreenRTView;
result = device->CreateRenderTargetView(offscreenTexture, &offscreenRTVDesc, getter_AddRefs(offscreenRTView));
hr = device->CreateRenderTargetView(offscreenTexture, &offscreenRTVDesc, getter_AddRefs(offscreenRTView));
if (FAILED(hr)) {
gfxCriticalNote << "DoesRecreatingCreateRenderTargetViewFailed";
return false;
}
// Acquire and clear
keyedMutex->AcquireSync(0, INFINITE);
@ -1789,7 +1802,11 @@ void CheckIfRenderTargetViewNeedsRecreating(ID3D11Device *device)
desc.MiscFlags = 0;
desc.BindFlags = 0;
ID3D11Texture2D* cpuTexture;
device->CreateTexture2D(&desc, NULL, &cpuTexture);
hr = device->CreateTexture2D(&desc, NULL, &cpuTexture);
if (FAILED(hr)) {
gfxCriticalNote << "DoesRecreatingCreateCPUTextureFailed";
return false;
}
deviceContext->CopyResource(cpuTexture, offscreenTexture);
@ -1803,13 +1820,14 @@ void CheckIfRenderTargetViewNeedsRecreating(ID3D11Device *device)
// match the clear
if (resultColor != 0xffffff00) {
gfxCriticalNote << "RenderTargetViewNeedsRecreating";
gANGLESupportsD3D11 = false;
result = true;
}
keyedMutex->ReleaseSync(0);
// It seems like this may only happen when we're using the NVIDIA gpu
CheckForAdapterMismatch(device);
return result;
}
@ -2010,11 +2028,14 @@ gfxWindowsPlatform::AttemptD3D11DeviceCreation()
return;
}
CheckIfRenderTargetViewNeedsRecreating(mD3D11Device);
// Only test this when not using WARP since it can fail and cause
// GetDeviceRemovedReason to return weird values.
mCompositorD3D11TextureSharingWorks = ::DoesD3D11TextureSharingWork(mD3D11Device);
if (!mCompositorD3D11TextureSharingWorks || !DoesRenderTargetViewNeedsRecreating(mD3D11Device)) {
gANGLESupportsD3D11 = false;
}
mD3D11Device->SetExceptionMode(0);
mIsWARP = false;
}

View File

@ -437,6 +437,50 @@ bool ConstructSavedFrameStackSlow(JSContext* cx, JS::ubi::StackFrame& frame,
/*** ubi::Node ************************************************************************************/
// A concrete node specialization can claim its referent is a member of a
// particular "coarse type" which is less specific than the actual
// implementation type but generally more palatable for web developers. For
// example, JitCode can be considered to have a coarse type of "Script". This is
// used by some analyses for putting nodes into different buckets. The default,
// if a concrete specialization does not provide its own mapping to a CoarseType
// variant, is "Other".
//
// NB: the values associated with a particular enum variant must not change or
// be reused for new variants. Doing so will cause inspecting ubi::Nodes backed
// by an offline heap snapshot from an older SpiderMonkey/Firefox version to
// break. Consider this enum append only.
enum class CoarseType: uint32_t {
Other = 0,
Object = 1,
Script = 2,
String = 3,
FIRST = Other,
LAST = String
};
inline uint32_t
CoarseTypeToUint32(CoarseType type)
{
return static_cast<uint32_t>(type);
}
inline bool
Uint32IsValidCoarseType(uint32_t n)
{
auto first = static_cast<uint32_t>(CoarseType::FIRST);
auto last = static_cast<uint32_t>(CoarseType::LAST);
MOZ_ASSERT(first < last);
return first <= n && n <= last;
}
inline CoarseType
Uint32ToCoarseType(uint32_t n)
{
MOZ_ASSERT(Uint32IsValidCoarseType(n));
return static_cast<CoarseType>(n);
}
// The base class implemented by each ubi::Node referent type. Subclasses must
// not add data members to this class.
class Base {
@ -485,6 +529,9 @@ class Base {
// otherwise.
virtual bool isLive() const { return true; };
// Return the coarse-grained type-of-thing that this node represents.
virtual CoarseType coarseType() const { return CoarseType::Other; }
// Return a human-readable name for the referent's type. The result should
// be statically allocated. (You can use MOZ_UTF16("strings") for this.)
//
@ -652,10 +699,6 @@ class Node {
template<typename T>
static const char16_t* canonicalTypeName() { return Concrete<T>::concreteTypeName; }
// Get the canonical type name for the supplied string, if one
// exists. Otherwise nullptr is returned.
static const char16_t* getCanonicalTypeName(const char16_t* dupe, size_t length);
template<typename T>
bool is() const {
return base()->typeName() == canonicalTypeName<T>();
@ -680,6 +723,7 @@ class Node {
// not all!) JSObjects can be exposed.
JS::Value exposeToJS() const;
CoarseType coarseType() const { return base()->coarseType(); }
const char16_t* typeName() const { return base()->typeName(); }
JS::Zone* zone() const { return base()->zone(); }
JSCompartment* compartment() const { return base()->compartment(); }
@ -963,7 +1007,16 @@ class TracerConcreteWithCompartment : public TracerConcrete<Referent> {
// Define specializations for some commonly-used public JSAPI types.
// These can use the generic templates above.
template<> struct Concrete<JS::Symbol> : TracerConcrete<JS::Symbol> { };
template<> struct Concrete<JSScript> : TracerConcreteWithCompartment<JSScript> { };
template<> struct Concrete<JSScript> : TracerConcreteWithCompartment<JSScript> {
CoarseType coarseType() const final { return CoarseType::Script; }
protected:
explicit Concrete(JSScript *ptr) : TracerConcreteWithCompartment<JSScript>(ptr) { }
public:
static void construct(void *storage, JSScript *ptr) { new (storage) Concrete(ptr); }
};
// The JSObject specialization.
template<>
@ -976,6 +1029,8 @@ class Concrete<JSObject> : public TracerConcreteWithCompartment<JSObject> {
bool hasAllocationStack() const override;
StackFrame allocationStack() const override;
CoarseType coarseType() const final { return CoarseType::Object; }
protected:
explicit Concrete(JSObject* ptr) : TracerConcreteWithCompartment(ptr) { }
@ -989,6 +1044,8 @@ class Concrete<JSObject> : public TracerConcreteWithCompartment<JSObject> {
template<> struct Concrete<JSString> : TracerConcrete<JSString> {
Size size(mozilla::MallocSizeOf mallocSizeOf) const override;
CoarseType coarseType() const final { return CoarseType::String; }
protected:
explicit Concrete(JSString *ptr) : TracerConcrete<JSString>(ptr) { }
@ -1004,6 +1061,7 @@ class Concrete<void> : public Base {
UniquePtr<EdgeRange> edges(JSContext* cx, bool wantNames) const override;
JS::Zone* zone() const override;
JSCompartment* compartment() const override;
CoarseType coarseType() const final;
explicit Concrete(void* ptr) : Base(ptr) { }

View File

@ -7461,7 +7461,8 @@ BytecodeEmitter::emitClass(ParseNode* pn)
for (ParseNode* mn = classMethods->pn_head; mn; mn = mn->pn_next) {
ClassMethod& method = mn->as<ClassMethod>();
ParseNode& methodName = method.name();
if (methodName.isKind(PNK_OBJECT_PROPERTY_NAME) &&
if (!method.isStatic() &&
methodName.isKind(PNK_OBJECT_PROPERTY_NAME) &&
methodName.pn_atom == cx->names().constructor)
{
constructor = &method.method();

View File

@ -767,13 +767,12 @@ class GCRuntime
void setFoundBlackGrayEdges() { foundBlackGrayEdges = true; }
uint64_t gcNumber() const { return number; }
void incGcNumber() { ++number; }
uint64_t minorGCCount() const { return minorGCNumber; }
void incMinorGcNumber() { ++minorGCNumber; }
void incMinorGcNumber() { ++minorGCNumber; ++number; }
uint64_t majorGCCount() const { return majorGCNumber; }
void incMajorGcNumber() { ++majorGCNumber; }
void incMajorGcNumber() { ++majorGCNumber; ++number; }
int64_t defaultSliceBudget() const { return defaultTimeBudget_; }

View File

@ -381,7 +381,6 @@ js::TenuringTracer::TenuringTracer(JSRuntime* rt, Nursery* nursery)
, head(nullptr)
, tail(&head)
{
rt->gc.incGcNumber();
}
#define TIME_START(name) int64_t timestampStart_##name = enableProfiling_ ? PRMJ_Now() : 0

View File

@ -776,7 +776,17 @@ IsMarked(const jit::VMFunction*)
// instances with no associated compartment.
namespace JS {
namespace ubi {
template<> struct Concrete<js::jit::JitCode> : TracerConcrete<js::jit::JitCode> { };
template<>
struct Concrete<js::jit::JitCode> : TracerConcrete<js::jit::JitCode> {
CoarseType coarseType() const final { return CoarseType::Script; }
protected:
explicit Concrete(js::jit::JitCode *ptr) : TracerConcrete<js::jit::JitCode>(ptr) { }
public:
static void construct(void *storage, js::jit::JitCode *ptr) { new (storage) Concrete(ptr); }
};
} // namespace ubi
} // namespace JS

View File

@ -195,3 +195,31 @@ BEGIN_TEST(test_ubiStackFrame)
return true;
}
END_TEST(test_ubiStackFrame)
BEGIN_TEST(test_ubiCoarseType)
{
// Test that our explicit coarseType() overrides work as expected.
JSObject* obj = nullptr;
CHECK(JS::ubi::Node(obj).coarseType() == JS::ubi::CoarseType::Object);
JSScript* script = nullptr;
CHECK(JS::ubi::Node(script).coarseType() == JS::ubi::CoarseType::Script);
js::LazyScript* lazyScript = nullptr;
CHECK(JS::ubi::Node(lazyScript).coarseType() == JS::ubi::CoarseType::Script);
js::jit::JitCode* jitCode = nullptr;
CHECK(JS::ubi::Node(jitCode).coarseType() == JS::ubi::CoarseType::Script);
JSString* str = nullptr;
CHECK(JS::ubi::Node(str).coarseType() == JS::ubi::CoarseType::String);
// Test that the default when coarseType() is not overridden is Other.
JS::Symbol* sym = nullptr;
CHECK(JS::ubi::Node(sym).coarseType() == JS::ubi::CoarseType::Other);
return true;
}
END_TEST(test_ubiCoarseType)

View File

@ -2408,7 +2408,16 @@ CloneGlobalScript(JSContext* cx, Handle<ScopeObject*> enclosingScope, HandleScri
// with no associated compartment.
namespace JS {
namespace ubi {
template<> struct Concrete<js::LazyScript> : TracerConcrete<js::LazyScript> { };
template<>
struct Concrete<js::LazyScript> : TracerConcrete<js::LazyScript> {
CoarseType coarseType() const final { return CoarseType::Script; }
protected:
explicit Concrete(js::LazyScript *ptr) : TracerConcrete<js::LazyScript>(ptr) { }
public:
static void construct(void *storage, js::LazyScript *ptr) { new (storage) Concrete(ptr); }
};
} // namespace ubi
} // namespace JS

View File

@ -3,6 +3,21 @@ class test {
constructor() { }
static constructor() { }
}
class testWithExtends {
constructor() { };
static constructor() { };
}
class testOrder {
static constructor() { };
constructor() { };
}
class testOrderWithExtends extends null {
static constructor() { };
constructor() { };
}
`;
if (classesEnabled())

View File

@ -1025,17 +1025,19 @@ ArrayBufferObject::addView(JSContext* cx, JSObject* viewArg)
static size_t VIEW_LIST_MAX_LENGTH = 500;
bool
InnerViewTable::addView(JSContext* cx, ArrayBufferObject* obj, ArrayBufferViewObject* view)
InnerViewTable::addView(JSContext* cx, ArrayBufferObject* buffer, ArrayBufferViewObject* view)
{
// ArrayBufferObject entries are only added when there are multiple views.
MOZ_ASSERT(obj->firstView());
MOZ_ASSERT(buffer->firstView());
if (!map.initialized() && !map.init())
if (!map.initialized() && !map.init()) {
ReportOutOfMemory(cx);
return false;
}
Map::AddPtr p = map.lookupForAdd(obj);
Map::AddPtr p = map.lookupForAdd(buffer);
MOZ_ASSERT(!gc::IsInsideNursery(obj));
MOZ_ASSERT(!gc::IsInsideNursery(buffer));
bool addToNursery = nurseryKeysValid && gc::IsInsideNursery(view);
if (p) {
@ -1050,8 +1052,10 @@ InnerViewTable::addView(JSContext* cx, ArrayBufferObject* obj, ArrayBufferViewOb
nurseryKeysValid = false;
} else {
for (size_t i = 0; i < views.length(); i++) {
if (gc::IsInsideNursery(views[i]))
if (gc::IsInsideNursery(views[i])) {
addToNursery = false;
break;
}
}
}
}
@ -1061,33 +1065,35 @@ InnerViewTable::addView(JSContext* cx, ArrayBufferObject* obj, ArrayBufferViewOb
return false;
}
} else {
if (!map.add(p, obj, ViewVector()))
if (!map.add(p, buffer, ViewVector())) {
ReportOutOfMemory(cx);
return false;
JS_ALWAYS_TRUE(p->value().append(view));
}
MOZ_ALWAYS_TRUE(p->value().append(view));
}
if (addToNursery && !nurseryKeys.append(obj))
if (addToNursery && !nurseryKeys.append(buffer))
nurseryKeysValid = false;
return true;
}
InnerViewTable::ViewVector*
InnerViewTable::maybeViewsUnbarriered(ArrayBufferObject* obj)
InnerViewTable::maybeViewsUnbarriered(ArrayBufferObject* buffer)
{
if (!map.initialized())
return nullptr;
Map::Ptr p = map.lookup(obj);
Map::Ptr p = map.lookup(buffer);
if (p)
return &p->value();
return nullptr;
}
void
InnerViewTable::removeViews(ArrayBufferObject* obj)
InnerViewTable::removeViews(ArrayBufferObject* buffer)
{
Map::Ptr p = map.lookup(obj);
Map::Ptr p = map.lookup(buffer);
MOZ_ASSERT(p);
map.remove(p);

View File

@ -42,6 +42,7 @@ using JS::HandleValue;
using JS::Value;
using JS::ZoneSet;
using JS::ubi::AtomOrTwoByteChars;
using JS::ubi::CoarseType;
using JS::ubi::Concrete;
using JS::ubi::Edge;
using JS::ubi::EdgeRange;
@ -143,135 +144,8 @@ StackFrame::functionDisplayNameLength()
return functionDisplayName().match(m);
}
/* static */ const char16_t*
Node::getCanonicalTypeName(const char16_t* dupe, size_t length)
{
// This function is a hot mess of practicality.
//
// JS::ubi::Node::is<T> uses pointer identity to T's canonical type name
// string to determine type equivalence because (a) we don't have RTTI, and
// (b) a JS::ubi::Node instance backed by an offline heap snapshot wants
// is<JSScript>() to be true if that node represents a JSScript that was
// live when the heap snapshot was taken, but obviously there isn't a live
// JSScript backing it anymore. So, we need to be able to get a pointer to
// T's canonical type name string if we want is<T>() to work properly with
// nodes backed by an offline heap snapshot.
//
// Enter JS::ubi::Node::getCanonicalTypeName, stage right.
//
// Ideally, this function would return a pointer to the canonical
// Concrete<T>::concreteTypeName string that is structurally identical to
// the given in dupe string for all Concrete<T> specializations. In a
// perfect world, we could use static constructors in Firefox and
// SpiderMonkey and build up a registry of canonical type names at
// compile/link time. This is not the world we live in. We observe that
// while the set of concrete JS::ubi::Node specializations is open ended,
// there is a finite set of Ts passed to is<T>() in our various
// JS::ubi::Node analyses and we only really need is<T>() to work for
// them. That set is hard coded here.
//
// Uh... static_assert(grep for is<T>() calls and we got them all), right?
// Hold me and tell me it will be ok? Please?
//
// Oh and to top it off, this function is pretty hot: it's called for every
// single node we deserialize in a heap snapshot, and there tend to be a lot
// of those.
MOZ_ASSERT(8 == strlen("JSObject") &&
8 == strlen("JSScript") &&
8 == strlen("JSString"));
if (length == 8) {
if (dupe[0] == 'J' &&
dupe[1] == 'S' &&
dupe[2] == 'O' &&
dupe[3] == 'b' &&
dupe[4] == 'j' &&
dupe[5] == 'e' &&
dupe[6] == 'c' &&
dupe[7] == 't')
{
return JS::ubi::Node::canonicalTypeName<JSObject>();
}
if (dupe[0] == 'J' &&
dupe[1] == 'S' &&
dupe[2] == 'S' &&
dupe[3] == 'c' &&
dupe[4] == 'r' &&
dupe[5] == 'i' &&
dupe[6] == 'p' &&
dupe[7] == 't')
{
return JS::ubi::Node::canonicalTypeName<JSScript>();
}
if (dupe[0] == 'J' &&
dupe[1] == 'S' &&
dupe[2] == 'S' &&
dupe[3] == 't' &&
dupe[4] == 'r' &&
dupe[5] == 'i' &&
dupe[6] == 'n' &&
dupe[7] == 'g')
{
return JS::ubi::Node::canonicalTypeName<JSString>();
}
return nullptr;
}
MOZ_ASSERT(14 == strlen("js::LazyScript"));
if (length == 14) {
if (dupe[0] == 'j' &&
dupe[1] == 's' &&
dupe[2] == ':' &&
dupe[3] == ':' &&
dupe[4] == 'L' &&
dupe[5] == 'a' &&
dupe[6] == 'z' &&
dupe[7] == 'y' &&
dupe[8] == 'S' &&
dupe[9] == 'c' &&
dupe[10] == 'r' &&
dupe[11] == 'i' &&
dupe[12] == 'p' &&
dupe[13] == 't')
{
return JS::ubi::Node::canonicalTypeName<js::LazyScript>();
}
return nullptr;
}
MOZ_ASSERT(16 == strlen("js::jit::JitCode"));
if (length == 16) {
if (dupe[0] == 'j' &&
dupe[1] == 's' &&
dupe[2] == ':' &&
dupe[3] == ':' &&
dupe[4] == 'j' &&
dupe[5] == 'i' &&
dupe[6] == 't' &&
dupe[7] == ':' &&
dupe[8] == ':' &&
dupe[9] == 'J' &&
dupe[10] == 'i' &&
dupe[11] == 't' &&
dupe[12] == 'C' &&
dupe[13] == 'o' &&
dupe[14] == 'd' &&
dupe[15] == 'e')
{
return JS::ubi::Node::canonicalTypeName<js::jit::JitCode>();
}
return nullptr;
}
return nullptr;
}
// All operations on null ubi::Nodes crash.
CoarseType Concrete<void>::coarseType() const { MOZ_CRASH("null ubi::Node"); }
const char16_t* Concrete<void>::typeName() const { MOZ_CRASH("null ubi::Node"); }
JS::Zone* Concrete<void>::zone() const { MOZ_CRASH("null ubi::Node"); }
JSCompartment* Concrete<void>::compartment() const { MOZ_CRASH("null ubi::Node"); }

View File

@ -206,13 +206,19 @@ ByCoarseType::count(CountBase& countBase, const Node& node)
Count& count = static_cast<Count&>(countBase);
count.total_++;
if (node.is<JSObject>())
switch (node.coarseType()) {
case JS::ubi::CoarseType::Object:
return count.objects->count(node);
if (node.is<JSScript>() || node.is<LazyScript>() || node.is<jit::JitCode>())
case JS::ubi::CoarseType::Script:
return count.scripts->count(node);
if (node.is<JSString>())
case JS::ubi::CoarseType::String:
return count.strings->count(node);
return count.other->count(node);
case JS::ubi::CoarseType::Other:
return count.other->count(node);
default:
MOZ_CRASH("bad JS::ubi::CoarseType in JS::ubi::ByCoarseType::count");
return false;
}
}
bool

View File

@ -446,13 +446,8 @@ public:
*/
already_AddRefed<ImageContainer> GetContainerForImageLayer(nsDisplayListBuilder* aBuilder);
bool VisibleAboveRegionIntersects(const nsIntRect& aRect) const
{ return mVisibleAboveRegion.Intersects(aRect); }
bool VisibleAboveRegionIntersects(const nsIntRegion& aRegion) const
{ return !mVisibleAboveRegion.Intersect(aRegion).IsEmpty(); }
bool VisibleRegionIntersects(const nsIntRect& aRect) const
{ return mVisibleRegion.Intersects(aRect); }
bool VisibleRegionIntersects(const nsIntRegion& aRegion) const
{ return !mVisibleRegion.Intersect(aRegion).IsEmpty(); }
@ -1088,6 +1083,13 @@ public:
nsIFrame* GetContainerFrame() const { return mContainerFrame; }
nsDisplayListBuilder* Builder() const { return mBuilder; }
/**
* Check if we are currently inside an inactive layer.
*/
bool IsInInactiveLayer() const {
return mLayerBuilder->GetContainingPaintedLayerData();
}
/**
* Sets aOuterVisibleRegion as aLayer's visible region. aOuterVisibleRegion
* is in the coordinate space of the container reference frame.
@ -2584,12 +2586,22 @@ PaintedLayerDataNode::FindPaintedLayerFor(const nsIntRect& aVisibleRect,
} else {
PaintedLayerData* lowestUsableLayer = nullptr;
for (auto& data : Reversed(mPaintedLayerDataStack)) {
if (data.VisibleAboveRegionIntersects(aVisibleRect)) {
if (data.mVisibleAboveRegion.Intersects(aVisibleRect)) {
break;
}
MOZ_ASSERT(!data.mExclusiveToOneItem);
lowestUsableLayer = &data;
if (data.VisibleRegionIntersects(aVisibleRect)) {
nsIntRegion visibleRegion = data.mVisibleRegion;
// When checking whether the visible region intersects the given
// visible rect, also include the event-regions in the visible region,
// unless we're in an inactive layer, in which case the event-regions
// will be hoisted out into their own layer.
ContainerState& contState = mTree.ContState();
if (!contState.IsInInactiveLayer()) {
visibleRegion.OrWith(contState.ScaleRegionToOutsidePixels(data.mHitRegion));
visibleRegion.OrWith(contState.ScaleRegionToOutsidePixels(data.mMaybeHitRegion));
}
if (visibleRegion.Intersects(aVisibleRect)) {
break;
}
}

View File

@ -920,6 +920,12 @@ GetDisplayPortFromMarginsData(nsIContent* aContent,
ScreenRect screenRect = LayoutDeviceRect::FromAppUnits(base, auPerDevPixel)
* parentRes;
nsRect expandedScrollableRect =
nsLayoutUtils::CalculateExpandedScrollableRect(frame);
ScreenRect screenExpScrollableRect =
LayoutDeviceRect::FromAppUnits(expandedScrollableRect - scrollPos,
auPerDevPixel) * parentRes;
if (gfxPrefs::LayersTilesEnabled()) {
// Note on the correctness of applying the alignment in Screen space:
// The correct space to apply the alignment in would be Layer space, but
@ -943,6 +949,9 @@ GetDisplayPortFromMarginsData(nsIContent* aContent,
// up to tile boundaries.
screenRect.Inflate(1);
// Make sure the displayport remains within the scrollable rect.
screenRect = screenRect.ForceInside(screenExpScrollableRect);
// Avoid division by zero.
if (alignmentX == 0) {
alignmentX = 1;
@ -992,6 +1001,9 @@ GetDisplayPortFromMarginsData(nsIContent* aContent,
screenRect.x -= left;
screenRect.width += left + right;
}
// Make sure the displayport remains within the scrollable rect.
screenRect = screenRect.ForceInside(screenExpScrollableRect);
}
// Convert the aligned rect back into app units.
@ -1001,7 +1013,6 @@ GetDisplayPortFromMarginsData(nsIContent* aContent,
result = ApplyRectMultiplier(result, aMultiplier);
// Finally, clamp it to the expanded scrollable rect.
nsRect expandedScrollableRect = nsLayoutUtils::CalculateExpandedScrollableRect(frame);
result = expandedScrollableRect.Intersect(result + scrollPos) - scrollPos;
return result;

View File

@ -7,6 +7,7 @@
#include "mozilla/ArrayUtils.h"
#include "mozilla/Assertions.h"
#include "mozilla/SplayTree.h"
#include "mozilla/unused.h"
using mozilla::SplayTree;
using mozilla::SplayTreeNode;
@ -118,6 +119,8 @@ static SplayTree<SplayNoCopy, SplayNoCopy> testNoCopy;
int
main()
{
mozilla::unused << testNoCopy;
SplayTree<SplayInt, SplayInt> tree;
MOZ_RELEASE_ASSERT(tree.empty());

View File

@ -72,6 +72,12 @@ TinderBoxPrintRe = {
'fail_group': "FAILED",
'known_fail_group': None,
},
"mozmill_summary": {
'regex': re.compile(r'''INFO (Passed|Failed|Skipped): (\d+)'''),
'pass_group': "Passed",
'fail_group': "Failed",
'known_fail_group': "Skipped",
},
"webapprt_summary": _mochitest_summary,
"harness_error": {

View File

@ -255,6 +255,9 @@ class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMix
@PreScriptAction('create-virtualenv')
def _pre_create_virtualenv(self, action):
dirs = self.query_abs_dirs()
self.register_virtualenv_module(name='pip>=1.5')
self.register_virtualenv_module('psutil==3.1.1', method='pip')
self.register_virtualenv_module(name='mock')
self.register_virtualenv_module(name='simplejson')

View File

@ -252,8 +252,12 @@ user_pref("identity.fxaccounts.remote.signin.uri", "https://%(server)s/fxa-signi
user_pref("identity.fxaccounts.settings.uri", "https://%(server)s/fxa-settings");
user_pref('identity.fxaccounts.remote.webchannel.uri', 'https://%(server)s/');
// Enable logging of APZ test data (see bug 961289).
user_pref('apz.test.logging_enabled', true);
// Increase the APZ content response timeout in tests to 15 seconds.
// This is to accommodate the fact that test environments tends to be slower
// than production environments (with the b2g emulator being the slowest of them
// all), resulting in the production timeout value sometimes being exceeded
// and causing false-positive test failures. See bug 1176798, bug 1177018.
user_pref("apz.content_response_timeout", 15000);
// Make sure SSL Error reports don't hit the network
user_pref("security.ssl.errorReporting.url", "https://example.com/browser/browser/base/content/test/general/pinning_reports.sjs?succeed");

View File

@ -108,13 +108,14 @@ void protobuf_AssignDesc_CoreDump_2eproto() {
::google::protobuf::MessageFactory::generated_factory(),
sizeof(StackFrame_Data));
Node_descriptor_ = file->message_type(2);
static const int Node_offsets_[6] = {
static const int Node_offsets_[7] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(Node, id_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(Node, typename__),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(Node, size_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(Node, edges_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(Node, allocationstack_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(Node, jsobjectclassname_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(Node, coarsetype_),
};
Node_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
@ -199,13 +200,13 @@ void protobuf_AddDesc_CoreDump_2eproto() {
"\022\014\n\004line\030\003 \001(\r\022\016\n\006column\030\004 \001(\r\022\016\n\006source"
"\030\005 \001(\014\022\033\n\023functionDisplayName\030\006 \001(\014\022\020\n\010i"
"sSystem\030\007 \001(\010\022\024\n\014isSelfHosted\030\010 \001(\010B\020\n\016S"
"tackFrameType\"\275\001\n\004Node\022\n\n\002id\030\001 \001(\004\022\020\n\010ty"
"tackFrameType\"\324\001\n\004Node\022\n\n\002id\030\001 \001(\004\022\020\n\010ty"
"peName\030\002 \001(\014\022\014\n\004size\030\003 \001(\004\022.\n\005edges\030\004 \003("
"\0132\037.mozilla.devtools.protobuf.Edge\022>\n\017al"
"locationStack\030\005 \001(\0132%.mozilla.devtools.p"
"rotobuf.StackFrame\022\031\n\021jsObjectClassName\030"
"\006 \001(\014\"&\n\004Edge\022\020\n\010referent\030\001 \001(\004\022\014\n\004name\030"
"\002 \001(\014", 605);
"\006 \001(\014\022\025\n\ncoarseType\030\007 \001(\r:\0010\"&\n\004Edge\022\020\n\010"
"referent\030\001 \001(\004\022\014\n\004name\030\002 \001(\014", 628);
::google::protobuf::MessageFactory::InternalRegisterGeneratedFile(
"CoreDump.proto", &protobuf_RegisterTypes);
Metadata::default_instance_ = new Metadata();
@ -1279,6 +1280,7 @@ const int Node::kSizeFieldNumber;
const int Node::kEdgesFieldNumber;
const int Node::kAllocationStackFieldNumber;
const int Node::kJsObjectClassNameFieldNumber;
const int Node::kCoarseTypeFieldNumber;
#endif // !_MSC_VER
Node::Node()
@ -1306,6 +1308,7 @@ void Node::SharedCtor() {
size_ = GOOGLE_ULONGLONG(0);
allocationstack_ = NULL;
jsobjectclassname_ = const_cast< ::std::string*>(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
coarsetype_ = 0u;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
@ -1348,7 +1351,7 @@ Node* Node::New() const {
}
void Node::Clear() {
if (_has_bits_[0 / 32] & 55) {
if (_has_bits_[0 / 32] & 119) {
id_ = GOOGLE_ULONGLONG(0);
if (has_typename_()) {
if (typename__ != &::google::protobuf::internal::GetEmptyStringAlreadyInited()) {
@ -1364,6 +1367,7 @@ void Node::Clear() {
jsobjectclassname_->clear();
}
}
coarsetype_ = 0u;
}
edges_.Clear();
::memset(_has_bits_, 0, sizeof(_has_bits_));
@ -1458,6 +1462,21 @@ bool Node::MergePartialFromCodedStream(
} else {
goto handle_unusual;
}
if (input->ExpectTag(56)) goto parse_coarseType;
break;
}
// optional uint32 coarseType = 7 [default = 0];
case 7: {
if (tag == 56) {
parse_coarseType:
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::uint32, ::google::protobuf::internal::WireFormatLite::TYPE_UINT32>(
input, &coarsetype_)));
set_has_coarsetype();
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
@ -1521,6 +1540,11 @@ void Node::SerializeWithCachedSizes(
6, this->jsobjectclassname(), output);
}
// optional uint32 coarseType = 7 [default = 0];
if (has_coarsetype()) {
::google::protobuf::internal::WireFormatLite::WriteUInt32(7, this->coarsetype(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
@ -1569,6 +1593,11 @@ void Node::SerializeWithCachedSizes(
6, this->jsobjectclassname(), target);
}
// optional uint32 coarseType = 7 [default = 0];
if (has_coarsetype()) {
target = ::google::protobuf::internal::WireFormatLite::WriteUInt32ToArray(7, this->coarsetype(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
@ -1616,6 +1645,13 @@ int Node::ByteSize() const {
this->jsobjectclassname());
}
// optional uint32 coarseType = 7 [default = 0];
if (has_coarsetype()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::UInt32Size(
this->coarsetype());
}
}
// repeated .mozilla.devtools.protobuf.Edge edges = 4;
total_size += 1 * this->edges_size();
@ -1667,6 +1703,9 @@ void Node::MergeFrom(const Node& from) {
if (from.has_jsobjectclassname()) {
set_jsobjectclassname(from.jsobjectclassname());
}
if (from.has_coarsetype()) {
set_coarsetype(from.coarsetype());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
@ -1696,6 +1735,7 @@ void Node::Swap(Node* other) {
edges_.Swap(&other->edges_);
std::swap(allocationstack_, other->allocationstack_);
std::swap(jsobjectclassname_, other->jsobjectclassname_);
std::swap(coarsetype_, other->coarsetype_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);

View File

@ -501,6 +501,13 @@ class Node : public ::google::protobuf::Message {
inline ::std::string* release_jsobjectclassname();
inline void set_allocated_jsobjectclassname(::std::string* jsobjectclassname);
// optional uint32 coarseType = 7 [default = 0];
inline bool has_coarsetype() const;
inline void clear_coarsetype();
static const int kCoarseTypeFieldNumber = 7;
inline ::google::protobuf::uint32 coarsetype() const;
inline void set_coarsetype(::google::protobuf::uint32 value);
// @@protoc_insertion_point(class_scope:mozilla.devtools.protobuf.Node)
private:
inline void set_has_id();
@ -513,6 +520,8 @@ class Node : public ::google::protobuf::Message {
inline void clear_has_allocationstack();
inline void set_has_jsobjectclassname();
inline void clear_has_jsobjectclassname();
inline void set_has_coarsetype();
inline void clear_has_coarsetype();
::google::protobuf::UnknownFieldSet _unknown_fields_;
@ -524,6 +533,7 @@ class Node : public ::google::protobuf::Message {
::google::protobuf::RepeatedPtrField< ::mozilla::devtools::protobuf::Edge > edges_;
::mozilla::devtools::protobuf::StackFrame* allocationstack_;
::std::string* jsobjectclassname_;
::google::protobuf::uint32 coarsetype_;
friend void protobuf_AddDesc_CoreDump_2eproto();
friend void protobuf_AssignDesc_CoreDump_2eproto();
friend void protobuf_ShutdownFile_CoreDump_2eproto();
@ -1331,6 +1341,30 @@ inline void Node::set_allocated_jsobjectclassname(::std::string* jsobjectclassna
// @@protoc_insertion_point(field_set_allocated:mozilla.devtools.protobuf.Node.jsObjectClassName)
}
// optional uint32 coarseType = 7 [default = 0];
inline bool Node::has_coarsetype() const {
return (_has_bits_[0] & 0x00000040u) != 0;
}
inline void Node::set_has_coarsetype() {
_has_bits_[0] |= 0x00000040u;
}
inline void Node::clear_has_coarsetype() {
_has_bits_[0] &= ~0x00000040u;
}
inline void Node::clear_coarsetype() {
coarsetype_ = 0u;
clear_has_coarsetype();
}
inline ::google::protobuf::uint32 Node::coarsetype() const {
// @@protoc_insertion_point(field_get:mozilla.devtools.protobuf.Node.coarseType)
return coarsetype_;
}
inline void Node::set_coarsetype(::google::protobuf::uint32 value) {
set_has_coarsetype();
coarsetype_ = value;
// @@protoc_insertion_point(field_set:mozilla.devtools.protobuf.Node.coarseType)
}
// -------------------------------------------------------------------
// Edge

View File

@ -80,14 +80,16 @@ message StackFrame {
// A serialized version of `JS::ubi::Node` and its outgoing edges.
message Node {
optional uint64 id = 1;
optional uint64 id = 1;
// char16_t[]
optional bytes typeName = 2;
optional uint64 size = 3;
repeated Edge edges = 4;
optional StackFrame allocationStack = 5;
optional bytes typeName = 2;
optional uint64 size = 3;
repeated Edge edges = 4;
optional StackFrame allocationStack = 5;
// char[]
optional bytes jsObjectClassName = 6;
// JS::ubi::CoarseType. Defaults to Other.
optional uint32 coarseType = 7 [default = 0];
}
// A serialized edge from the heap graph.

View File

@ -49,32 +49,6 @@ DeserializedEdge::init(const protobuf::Edge& edge, HeapSnapshot& owner)
return true;
}
DeserializedNode::DeserializedNode(DeserializedNode&& rhs)
: id(rhs.id)
, typeName(rhs.typeName)
, size(rhs.size)
, edges(Move(rhs.edges))
, allocationStack(rhs.allocationStack)
, jsObjectClassName(Move(rhs.jsObjectClassName))
, owner(rhs.owner)
{ }
DeserializedNode& DeserializedNode::operator=(DeserializedNode&& rhs)
{
MOZ_ASSERT(&rhs != this);
this->~DeserializedNode();
new(this) DeserializedNode(Move(rhs));
return *this;
}
DeserializedNode::DeserializedNode(NodeId id, const char16_t* typeName, uint64_t size)
: id(id)
, typeName(typeName)
, size(size)
, edges()
, owner(nullptr)
{ }
JS::ubi::Node
DeserializedNode::getEdgeReferent(const DeserializedEdge& edge)
{

View File

@ -59,6 +59,7 @@ struct DeserializedNode {
using UniqueStringPtr = UniquePtr<char16_t[]>;
NodeId id;
JS::ubi::CoarseType coarseType;
// A borrowed reference to a string owned by this node's owning HeapSnapshot.
const char16_t* typeName;
uint64_t size;
@ -70,6 +71,7 @@ struct DeserializedNode {
HeapSnapshot* owner;
DeserializedNode(NodeId id,
JS::ubi::CoarseType coarseType,
const char16_t* typeName,
uint64_t size,
EdgeVector&& edges,
@ -77,6 +79,7 @@ struct DeserializedNode {
UniquePtr<char[]>&& className,
HeapSnapshot& owner)
: id(id)
, coarseType(coarseType)
, typeName(typeName)
, size(size)
, edges(Move(edges))
@ -86,8 +89,24 @@ struct DeserializedNode {
{ }
virtual ~DeserializedNode() { }
DeserializedNode(DeserializedNode&& rhs);
DeserializedNode& operator=(DeserializedNode&& rhs);
DeserializedNode(DeserializedNode&& rhs)
: id(rhs.id)
, coarseType(rhs.coarseType)
, typeName(rhs.typeName)
, size(rhs.size)
, edges(Move(rhs.edges))
, allocationStack(rhs.allocationStack)
, jsObjectClassName(Move(rhs.jsObjectClassName))
, owner(rhs.owner)
{ }
DeserializedNode& operator=(DeserializedNode&& rhs)
{
MOZ_ASSERT(&rhs != this);
this->~DeserializedNode();
new(this) DeserializedNode(Move(rhs));
return *this;
}
// Get a borrowed reference to the given edge's referent. This method is
// virtual to provide a hook for gmock and gtest.
@ -97,7 +116,16 @@ struct DeserializedNode {
protected:
// This is only for use with `MockDeserializedNode` in testing.
DeserializedNode(NodeId id, const char16_t* typeName, uint64_t size);
DeserializedNode(NodeId id, const char16_t* typeName, uint64_t size)
: id(id)
, coarseType(JS::ubi::CoarseType::Other)
, typeName(typeName)
, size(size)
, edges()
, allocationStack(Nothing())
, jsObjectClassName(nullptr)
, owner(nullptr)
{ }
private:
DeserializedNode(const DeserializedNode&) = delete;
@ -225,6 +253,7 @@ public:
new (storage) Concrete(ptr);
}
CoarseType coarseType() const final { return get().coarseType; }
Id identifier() const override { return get().id; }
bool isLive() const override { return false; }
const char16_t* typeName() const override;

View File

@ -130,25 +130,19 @@ HeapSnapshot::saveNode(const protobuf::Node& node)
if (nodes.has(id))
return false;
if (!JS::ubi::Uint32IsValidCoarseType(node.coarsetype()))
return false;
auto coarseType = JS::ubi::Uint32ToCoarseType(node.coarsetype());
if (!node.has_typename_())
return false;
// First, try and get the canonical type name in the case where the type name
// is one of the known concrete specializations that our analyses check for
// with is<T>(). If that fails, then just use a generic unique string, but all
// subsequent JS::ubi::Node::is<T>() checks will always return false for the
// node with this type name. That's fine though because we already check for
// all the Ts that are of interest to us in the first case.
auto duplicatedTypeName = reinterpret_cast<const char16_t*>(
node.typename_().data());
auto length = node.typename_().length() / sizeof(char16_t);
auto typeName = JS::ubi::Node::getCanonicalTypeName(duplicatedTypeName, length);
if (!typeName) {
typeName = borrowUniqueString(duplicatedTypeName, length);
if (!typeName)
return false;
}
MOZ_ASSERT(typeName);
auto typeName = borrowUniqueString(duplicatedTypeName, length);
if (!typeName)
return false;
if (!node.has_size())
return false;
@ -185,8 +179,8 @@ HeapSnapshot::saveNode(const protobuf::Node& node)
jsObjectClassName.get()[length] = '\0';
}
return nodes.putNew(id, DeserializedNode(id, typeName, size, Move(edges),
allocationStack,
return nodes.putNew(id, DeserializedNode(id, coarseType, typeName, size,
Move(edges), allocationStack,
Move(jsObjectClassName),
*this));
}
@ -662,6 +656,8 @@ public:
protobuf::Node protobufNode;
protobufNode.set_id(ubiNode.identifier());
protobufNode.set_coarsetype(JS::ubi::CoarseTypeToUint32(ubiNode.coarseType()));
const char16_t* typeName = ubiNode.typeName();
size_t length = NS_strlen(typeName) * sizeof(char16_t);
protobufNode.set_typename_(typeName, length);

View File

@ -45,6 +45,7 @@ DEF_TEST(DeserializedNodeUbiNodes, {
MockDeserializedNode mocked(id, typeName, size);
mocked.jsObjectClassName = mozilla::UniquePtr<char[]>(strdup(className));
ASSERT_TRUE(!!mocked.jsObjectClassName);
mocked.coarseType = JS::ubi::CoarseType::Script;
DeserializedNode& deserialized = mocked;
JS::ubi::Node ubi(&deserialized);
@ -53,6 +54,7 @@ DEF_TEST(DeserializedNodeUbiNodes, {
EXPECT_EQ(size, ubi.size(fakeMallocSizeOf));
EXPECT_EQ(typeName, ubi.typeName());
EXPECT_EQ(JS::ubi::CoarseType::Script, ubi.coarseType());
EXPECT_EQ(id, ubi.identifier());
EXPECT_FALSE(ubi.isLive());
EXPECT_EQ(strcmp(ubi.jsObjectClassName(), className), 0);

View File

@ -177,7 +177,7 @@ nsManifestCheck::Begin()
rv = NS_NewChannel(getter_AddRefs(mChannel),
mURI,
nsContentUtils::GetSystemPrincipal(),
nsILoadInfo::SEC_NORMAL,
nsILoadInfo::SEC_ALLOW_CROSS_ORIGIN_DATA_IS_NULL,
nsIContentPolicy::TYPE_OTHER,
nullptr, // loadGroup
nullptr, // aCallbacks
@ -195,10 +195,7 @@ nsManifestCheck::Begin()
false);
}
rv = mChannel->AsyncOpen(this, nullptr);
NS_ENSURE_SUCCESS(rv, rv);
return NS_OK;
return mChannel->AsyncOpen2(this);
}
//-----------------------------------------------------------------------------
@ -371,7 +368,7 @@ nsOfflineCacheUpdateItem::OpenChannel(nsOfflineCacheUpdate *aUpdate)
rv = NS_NewChannel(getter_AddRefs(mChannel),
mURI,
nsContentUtils::GetSystemPrincipal(),
nsILoadInfo::SEC_NORMAL,
nsILoadInfo::SEC_ALLOW_CROSS_ORIGIN_DATA_IS_NULL,
nsIContentPolicy::TYPE_OTHER,
nullptr, // aLoadGroup
this, // aCallbacks
@ -403,7 +400,7 @@ nsOfflineCacheUpdateItem::OpenChannel(nsOfflineCacheUpdate *aUpdate)
false);
}
rv = mChannel->AsyncOpen(this, nullptr);
rv = mChannel->AsyncOpen2(this);
NS_ENSURE_SUCCESS(rv, rv);
mUpdate = aUpdate;

View File

@ -191,7 +191,7 @@ nsPrefetchNode::OpenChannel()
source,
source->NodePrincipal(),
nullptr, //aTriggeringPrincipal
nsILoadInfo::SEC_NORMAL,
nsILoadInfo::SEC_ALLOW_CROSS_ORIGIN_DATA_INHERITS,
nsIContentPolicy::TYPE_OTHER,
loadGroup, // aLoadGroup
this, // aCallbacks
@ -211,10 +211,7 @@ nsPrefetchNode::OpenChannel()
false);
}
rv = mChannel->AsyncOpen(this, nullptr);
NS_ENSURE_SUCCESS(rv, rv);
return NS_OK;
return mChannel->AsyncOpen2(this);
}
nsresult