Merge mozilla-central to inbound. a=merge CLOSED TREE

This commit is contained in:
Csoregi Natalia 2018-07-19 19:12:06 +03:00
commit 84b71b7c4e
31 changed files with 588 additions and 500 deletions

View File

@ -386,9 +386,7 @@ HyperTextAccessible::OffsetToDOMPoint(int32_t aOffset)
if (aOffset == 0) {
RefPtr<TextEditor> textEditor = GetEditor();
if (textEditor) {
bool isEmpty = false;
textEditor->GetDocumentIsEmpty(&isEmpty);
if (isEmpty) {
if (textEditor->IsEmpty()) {
return DOMPoint(textEditor->GetRoot(), 0);
}
}

View File

@ -2688,7 +2688,10 @@ function URLBarSetURI(aURI) {
var value = gBrowser.userTypedValue;
var valid = false;
if (value == null) {
// Explicitly check for nulled out value. We don't want to reset the URL
// bar if the user has deleted the URL and we'd just put the same URL
// back. See bug 304198.
if (value === null) {
let uri = aURI || gBrowser.currentURI;
// Strip off "wyciwyg://" and passwords for the location bar
try {

View File

@ -242,7 +242,9 @@
</html:div>
<box id="editBookmarkPanelImage"/>
#include ../../components/places/content/editBookmarkPanel.inc.xul
<vbox id="editBookmarkPanelBottomContent" flex="1">
<vbox id="editBookmarkPanelBottomContent"
flex="1"
style="min-width: &editBookmark.panel.width;;">
<checkbox id="editBookmarkPanel_showForNewBookmarks"
label="&editBookmark.showForNewBookmarks.label;"
accesskey="&editBookmark.showForNewBookmarks.accesskey;"

View File

@ -4726,10 +4726,13 @@ class TabProgressListener {
// loss of urlbar contents for invalid URI errors (see bug 867957).
// Another reason to clear the userTypedValue is if this was an anchor
// navigation initiated by the user.
// Finally, we do insert the URL if this is a same-document navigation
// and the user cleared the URL manually.
if (this.mBrowser.didStartLoadSinceLastUserTyping() ||
((aFlags & Ci.nsIWebProgressListener.LOCATION_CHANGE_ERROR_PAGE) &&
aLocation.spec != "about:blank") ||
(isSameDocument && this.mBrowser.inLoadURI)) {
(isSameDocument && this.mBrowser.inLoadURI) ||
(isSameDocument && !this.mBrowser.userTypedValue)) {
this.mBrowser.userTypedValue = null;
}

View File

@ -59,6 +59,7 @@ subsuite = clipboard
skip-if = os != "mac" # Mac only feature
[browser_pasteAndGo.js]
subsuite = clipboard
[browser_populateAfterPushState.js]
[browser_removeUnsafeProtocolsFromURLBarPaste.js]
subsuite = clipboard
[browser_search_favicon.js]

View File

@ -0,0 +1,23 @@
/* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/publicdomain/zero/1.0/ */
"use strict";
/* When a user clears the URL bar, and then the page pushes state, we should
* re-fill the URL bar so it doesn't remain empty indefinitely. See bug 1441039.
* For normal loads, this happens automatically because a non-same-document state
* change takes place.
*/
add_task(async function() {
const TEST_PATH = getRootDirectory(gTestPath).replace("chrome://mochitests/content", "http://example.com");
await BrowserTestUtils.withNewTab(TEST_PATH + "dummy_page.html", async function(browser) {
gURLBar.value = "";
let locationChangePromise = BrowserTestUtils.waitForLocationChange(gBrowser, TEST_PATH + "dummy_page2.html");
await ContentTask.spawn(browser, null, function() {
content.history.pushState({}, "Page 2", "dummy_page2.html");
});
await locationChangePromise;
ok(gURLBar.value, TEST_PATH + "dummy_page2.html", "Should have updated the URL bar.");
});
});

View File

@ -754,6 +754,10 @@ you can use these alternative items. Otherwise, their values should be empty. -
<!ENTITY spellAddDictionaries.label "Add Dictionaries…">
<!ENTITY spellAddDictionaries.accesskey "A">
<!-- LOCALIZATION NOTE (editBookmark.panel.width): width of the bookmark panel.
Should be large enough to fully display the Done and Cancel/
Remove Bookmark buttons. -->
<!ENTITY editBookmark.panel.width "23em">
<!ENTITY editBookmark.done.label "Done">
<!ENTITY editBookmark.showForNewBookmarks.label "Show editor when saving">
<!ENTITY editBookmark.showForNewBookmarks.accesskey "S">

View File

@ -40,10 +40,6 @@
-moz-appearance: button-arrow-down;
}
#editBookmarkPanelContent {
min-width: 23em;
}
#editBMPanel_folderTree {
margin-top: 2px;
margin-bottom: 2px;

View File

@ -43,10 +43,6 @@
-moz-appearance: -moz-mac-disclosure-button-closed;
}
#editBookmarkPanelContent {
min-width: 23em;
}
#editBMPanel_folderTree {
margin: 6px 4px 0 4px;
}

View File

@ -45,10 +45,6 @@
list-style-image: url("chrome://global/skin/icons/expand.png");
}
#editBookmarkPanelContent {
min-width: 23em;
}
#editBMPanel_folderTree {
margin-top: 2px;
margin-bottom: 2px;

View File

@ -1173,10 +1173,10 @@ class JSTerm extends Component {
}
if (this._autocompleteQuery && input.startsWith(this._autocompleteQuery)) {
let filterBy = input;
// Find the last non-alphanumeric other than _ or $ if it exists.
const lastNonAlpha = input.match(/[^a-zA-Z0-9_$][a-zA-Z0-9_$]*$/);
// Find the last non-alphanumeric other than "_", ":", or "$" if it exists.
const lastNonAlpha = input.match(/[^a-zA-Z0-9_$:][a-zA-Z0-9_$:]*$/);
// If input contains non-alphanumerics, use the part after the last one
// to filter the cache
// to filter the cache.
if (lastNonAlpha) {
filterBy = input.substring(input.lastIndexOf(lastNonAlpha) + 1);
}

View File

@ -185,6 +185,7 @@ skip-if = verify
[browser_jsterm_autocomplete_array_no_index.js]
[browser_jsterm_autocomplete_arrow_keys.js]
[browser_jsterm_autocomplete_cached_results.js]
[browser_jsterm_autocomplete_commands.js]
[browser_jsterm_autocomplete_crossdomain_iframe.js]
[browser_jsterm_autocomplete_escape_key.js]
[browser_jsterm_autocomplete_extraneous_closing_brackets.js]

View File

@ -0,0 +1,59 @@
/* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/publicdomain/zero/1.0/ */
"use strict";
// Test that console commands are autocompleted.
const TEST_URI = `data:text/html;charset=utf-8,Test command autocomplete`;
add_task(async function() {
// Run test with legacy JsTerm
await performTests();
// And then run it with the CodeMirror-powered one.
await pushPref("devtools.webconsole.jsterm.codeMirror", true);
await performTests();
});
async function performTests() {
const { jsterm } = await openNewTabAndConsole(TEST_URI);
const { autocompletePopup } = jsterm;
const onPopUpOpen = autocompletePopup.once("popup-opened");
info(`Enter ":"`);
jsterm.focus();
EventUtils.sendString(":");
await onPopUpOpen;
const expectedCommands = [":help", ":screenshot"];
is(getPopupItems(autocompletePopup).join("\n"), expectedCommands.join("\n"),
"popup contains expected commands");
let onAutocompleUpdated = jsterm.once("autocomplete-updated");
EventUtils.sendString("s");
await onAutocompleUpdated;
checkJsTermCompletionValue(jsterm, " creenshot",
"completion node has expected :screenshot value");
EventUtils.synthesizeKey("KEY_Tab");
is(jsterm.getInputValue(), ":screenshot", "Tab key correctly completed :screenshot");
ok(!autocompletePopup.isOpen, "popup is closed after Tab");
info("Test :hel completion");
jsterm.setInputValue(":he");
onAutocompleUpdated = jsterm.once("autocomplete-updated");
EventUtils.sendString("l");
await onAutocompleUpdated;
checkJsTermCompletionValue(jsterm, " p", "completion node has expected :help value");
EventUtils.synthesizeKey("KEY_Tab");
is(jsterm.getInputValue(), ":help", "Tab key correctly completes :help");
}
function getPopupItems(popup) {
return popup.items.map(item => item.label);
}

View File

@ -31,6 +31,7 @@ loader.lazyRequireGetter(this, "WebConsoleCommands", "devtools/server/actors/web
loader.lazyRequireGetter(this, "addWebConsoleCommands", "devtools/server/actors/webconsole/utils", true);
loader.lazyRequireGetter(this, "formatCommand", "devtools/server/actors/webconsole/commands", true);
loader.lazyRequireGetter(this, "isCommand", "devtools/server/actors/webconsole/commands", true);
loader.lazyRequireGetter(this, "validCommands", "devtools/server/actors/webconsole/commands", true);
loader.lazyRequireGetter(this, "CONSOLE_WORKER_IDS", "devtools/server/actors/webconsole/utils", true);
loader.lazyRequireGetter(this, "WebConsoleUtils", "devtools/server/actors/webconsole/utils", true);
loader.lazyRequireGetter(this, "EnvironmentActor", "devtools/server/actors/environment", true);
@ -1085,54 +1086,57 @@ WebConsoleActor.prototype =
let dbgObject = null;
let environment = null;
let hadDebuggee = false;
// This is the case of the paused debugger
if (frameActorId) {
const frameActor = this.conn.getActor(frameActorId);
try {
// Need to try/catch since accessing frame.environment
// can throw "Debugger.Frame is not live"
const frame = frameActor.frame;
environment = frame.environment;
} catch (e) {
DevToolsUtils.reportException("autocomplete",
Error("The frame actor was not found: " + frameActorId));
}
} else {
// This is the general case (non-paused debugger)
hadDebuggee = this.dbg.hasDebuggee(this.evalWindow);
dbgObject = this.dbg.addDebuggee(this.evalWindow);
}
const result = JSPropertyProvider(dbgObject, environment, request.text,
request.cursor, frameActorId) || {};
if (!hadDebuggee && dbgObject) {
this.dbg.removeDebuggee(this.evalWindow);
}
let matches = result.matches || [];
let matches = [];
let matchProp;
const reqText = request.text.substr(0, request.cursor);
// We consider '$' as alphanumerc because it is used in the names of some
// helper functions.
const lastNonAlphaIsDot = /[.][a-zA-Z0-9$]*$/.test(reqText);
if (!lastNonAlphaIsDot) {
if (!this._webConsoleCommandsCache) {
const helpers = {
sandbox: Object.create(null)
};
addWebConsoleCommands(helpers);
this._webConsoleCommandsCache =
Object.getOwnPropertyNames(helpers.sandbox);
if (isCommand(reqText)) {
const commandsCache = this._getWebConsoleCommandsCache();
matchProp = reqText;
matches = validCommands
.filter(c => `:${c}`.startsWith(reqText)
&& commandsCache.find(n => `:${n}`.startsWith(reqText))
)
.map(c => `:${c}`);
} else {
// This is the case of the paused debugger
if (frameActorId) {
const frameActor = this.conn.getActor(frameActorId);
try {
// Need to try/catch since accessing frame.environment
// can throw "Debugger.Frame is not live"
const frame = frameActor.frame;
environment = frame.environment;
} catch (e) {
DevToolsUtils.reportException("autocomplete",
Error("The frame actor was not found: " + frameActorId));
}
} else {
// This is the general case (non-paused debugger)
hadDebuggee = this.dbg.hasDebuggee(this.evalWindow);
dbgObject = this.dbg.addDebuggee(this.evalWindow);
}
matches = matches.concat(this._webConsoleCommandsCache
.filter(n =>
// filter out `screenshot` command as it is inaccessible without
// the `:` prefix
n !== "screenshot" && n.startsWith(result.matchProp)
));
const result = JSPropertyProvider(dbgObject, environment, request.text,
request.cursor, frameActorId) || {};
if (!hadDebuggee && dbgObject) {
this.dbg.removeDebuggee(this.evalWindow);
}
matches = result.matches || [];
matchProp = result.matchProp;
// We consider '$' as alphanumerc because it is used in the names of some
// helper functions.
const lastNonAlphaIsDot = /[.][a-zA-Z0-9$]*$/.test(reqText);
if (!lastNonAlphaIsDot) {
matches = matches.concat(this._getWebConsoleCommandsCache().filter(n =>
// filter out `screenshot` command as it is inaccessible without
// the `:` prefix
n !== "screenshot" && n.startsWith(result.matchProp)
));
}
}
// Make sure we return an array with unique items, since `matches` can hold twice
@ -1143,7 +1147,7 @@ WebConsoleActor.prototype =
return {
from: this.actorID,
matches,
matchProp: result.matchProp,
matchProp,
};
},
@ -1274,6 +1278,17 @@ WebConsoleActor.prototype =
return helpers;
},
_getWebConsoleCommandsCache: function() {
if (!this._webConsoleCommandsCache) {
const helpers = {
sandbox: Object.create(null)
};
addWebConsoleCommands(helpers);
this._webConsoleCommandsCache = Object.getOwnPropertyNames(helpers.sandbox);
}
return this._webConsoleCommandsCache;
},
/**
* Evaluates a string using the debugger API.
*

View File

@ -236,3 +236,4 @@ function getTypedValue(value) {
exports.formatCommand = formatCommand;
exports.isCommand = isCommand;
exports.validCommands = validCommands;

View File

@ -2543,7 +2543,7 @@ nsTextEditorState::HasNonEmptyValue()
if (mTextEditor && mBoundFrame && mEditorInitialized &&
!mIsCommittingComposition) {
bool empty;
nsresult rv = mTextEditor->DocumentIsEmpty(&empty);
nsresult rv = mTextEditor->IsEmpty(&empty);
if (NS_SUCCEEDED(rv)) {
return !empty;
}

View File

@ -1000,13 +1000,7 @@ EditorBase::SetShouldTxnSetSelection(bool aShould)
NS_IMETHODIMP
EditorBase::GetDocumentIsEmpty(bool* aDocumentIsEmpty)
{
*aDocumentIsEmpty = true;
dom::Element* root = GetRoot();
NS_ENSURE_TRUE(root, NS_ERROR_NULL_POINTER);
*aDocumentIsEmpty = !root->HasChildren();
return NS_OK;
return NS_ERROR_NOT_IMPLEMENTED;
}
// XXX: The rule system should tell us which node to select all on (ie, the

View File

@ -839,7 +839,6 @@ SelectAllCommand::IsCommandEnabled(const char* aCommandName,
// You can always select all, unless the selection is editable,
// and the editable region is empty!
*aIsEnabled = true;
bool docIsEmpty;
nsCOMPtr<nsIEditor> editor = do_QueryInterface(aCommandRefCon);
if (!editor) {
@ -849,11 +848,12 @@ SelectAllCommand::IsCommandEnabled(const char* aCommandName,
// You can select all if there is an editor which is non-empty
TextEditor* textEditor = editor->AsTextEditor();
MOZ_ASSERT(textEditor);
rv = textEditor->GetDocumentIsEmpty(&docIsEmpty);
bool isEmpty = false;
rv = textEditor->IsEmpty(&isEmpty);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
*aIsEnabled = !docIsEmpty;
*aIsEnabled = !isEmpty;
return NS_OK;
}

View File

@ -397,7 +397,7 @@ bool
TextEditRules::DocumentIsEmpty()
{
bool retVal = false;
if (!mTextEditor || NS_FAILED(mTextEditor->DocumentIsEmpty(&retVal))) {
if (!mTextEditor || NS_FAILED(mTextEditor->IsEmpty(&retVal))) {
retVal = true;
}

View File

@ -1317,20 +1317,23 @@ TextEditor::GetInputEventTargetContent()
}
nsresult
TextEditor::DocumentIsEmpty(bool* aIsEmpty)
TextEditor::IsEmpty(bool* aIsEmpty) const
{
NS_ENSURE_TRUE(mRules, NS_ERROR_NOT_INITIALIZED);
if (NS_WARN_IF(!mRules)) {
return NS_ERROR_NOT_INITIALIZED;
}
*aIsEmpty = true;
if (mRules->HasBogusNode()) {
*aIsEmpty = true;
return NS_OK;
}
// Even if there is no bogus node, we should be detected as empty document
// Even if there is no bogus node, we should be detected as empty editor
// if all the children are text nodes and these have no content.
Element* rootElement = GetRoot();
if (!rootElement) {
*aIsEmpty = true;
// XXX Why don't we return an error in such case??
return NS_OK;
}
@ -1343,38 +1346,47 @@ TextEditor::DocumentIsEmpty(bool* aIsEmpty)
}
}
*aIsEmpty = true;
return NS_OK;
}
NS_IMETHODIMP
TextEditor::GetDocumentIsEmpty(bool* aDocumentIsEmpty)
{
return DocumentIsEmpty(aDocumentIsEmpty);
nsresult rv = IsEmpty(aDocumentIsEmpty);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
return NS_OK;
}
NS_IMETHODIMP
TextEditor::GetTextLength(int32_t* aCount)
{
NS_ASSERTION(aCount, "null pointer");
MOZ_ASSERT(aCount);
// initialize out params
*aCount = 0;
// special-case for empty document, to account for the bogus node
bool docEmpty;
nsresult rv = GetDocumentIsEmpty(&docEmpty);
NS_ENSURE_SUCCESS(rv, rv);
if (docEmpty) {
bool isEmpty = false;
nsresult rv = IsEmpty(&isEmpty);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
if (isEmpty) {
return NS_OK;
}
dom::Element *rootElement = GetRoot();
NS_ENSURE_TRUE(rootElement, NS_ERROR_NULL_POINTER);
Element* rootElement = GetRoot();
if (NS_WARN_IF(!rootElement)) {
return NS_ERROR_FAILURE;
}
nsCOMPtr<nsIContentIterator> iter =
do_CreateInstance("@mozilla.org/content/post-content-iterator;1", &rv);
NS_ENSURE_SUCCESS(rv, rv);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
uint32_t totalLength = 0;
iter->Init(rootElement);

View File

@ -98,7 +98,20 @@ public:
nsISelectionController* aSelCon, uint32_t aFlags,
const nsAString& aValue) override;
nsresult DocumentIsEmpty(bool* aIsEmpty);
/**
* IsEmpty() checks whether the editor is empty. If editor has only bogus
* node, returns true. If editor's root element has non-empty text nodes or
* other nodes like <br>, returns false.
*/
nsresult IsEmpty(bool* aIsEmpty) const;
bool IsEmpty() const
{
bool isEmpty = false;
nsresult rv = IsEmpty(&isEmpty);
NS_WARNING_ASSERTION(NS_SUCCEEDED(rv),
"Checking whether the editor is empty failed");
return NS_SUCCEEDED(rv) && isEmpty;
}
virtual nsresult HandleKeyPressEvent(
WidgetKeyboardEvent* aKeyboardEvent) override;

View File

@ -179,79 +179,6 @@ nsStyleUtil::AppendEscapedCSSIdent(const nsAString& aIdent, nsAString& aReturn)
}
}
// unquoted family names must be a sequence of idents
// so escape any parts that require escaping
static void
AppendUnquotedFamilyName(const nsAString& aFamilyName, nsAString& aResult)
{
const char16_t *p, *p_end;
aFamilyName.BeginReading(p);
aFamilyName.EndReading(p_end);
bool moreThanOne = false;
while (p < p_end) {
const char16_t* identStart = p;
while (++p != p_end && *p != ' ')
/* nothing */ ;
nsDependentSubstring ident(identStart, p);
if (!ident.IsEmpty()) {
if (moreThanOne) {
aResult.Append(' ');
}
nsStyleUtil::AppendEscapedCSSIdent(ident, aResult);
moreThanOne = true;
}
++p;
}
}
/* static */ void
nsStyleUtil::AppendEscapedCSSFontFamilyList(
const nsTArray<mozilla::FontFamilyName>& aNames,
nsAString& aResult)
{
size_t i, len = aNames.Length();
for (i = 0; i < len; i++) {
if (i != 0) {
aResult.AppendLiteral(", ");
}
const FontFamilyName& name = aNames[i];
switch (name.mType) {
case eFamily_named:
AppendUnquotedFamilyName(name.mName, aResult);
break;
case eFamily_named_quoted:
AppendEscapedCSSString(name.mName, aResult);
break;
default:
name.AppendToString(aResult);
}
}
}
/* static */ void
nsStyleUtil::AppendEscapedCSSFontFamilyList(
const mozilla::FontFamilyList& aFamilyList,
nsAString& aResult)
{
if (aFamilyList.IsEmpty()) {
FontFamilyType defaultGeneric = aFamilyList.GetDefaultFontType();
// If the font list is empty, then serialize the default generic.
// See also: gfxFontGroup::BuildFontList()
if (defaultGeneric != eFamily_none) {
FontFamilyName(defaultGeneric).AppendToString(aResult);
} else {
MOZ_ASSERT_UNREACHABLE("No fonts to serialize");
}
return;
}
AppendEscapedCSSFontFamilyList(aFamilyList.GetFontlist().get(), aResult);
}
/* static */ void
nsStyleUtil::AppendBitmaskCSSValue(const nsCSSKTableEntry aTable[],
int32_t aMaskedValue,
@ -350,260 +277,6 @@ nsStyleUtil::AppendPaintOrderValue(uint8_t aValue,
}
}
/* static */ void
nsStyleUtil::AppendFontTagAsString(uint32_t aTag, nsAString& aResult)
{
// A font tag (for feature/variation settings) is a 4-char code interpreted
// as a bigendian 32-bit value and stored/processed as a uint32_t.
// To serialize it, we put the four bytes (which are all guaranteed to be
// printable ASCII values) into a string, starting from the high byte of the
// value, then append that to the result with CSS escaping and quotes.
nsAutoString tagStr;
for (int shiftAmount = 24; shiftAmount >= 0; shiftAmount -= 8) {
char c = (aTag >> shiftAmount) & 0xff;
MOZ_ASSERT(isascii(c) && isprint(c),
"parser should have restricted tag to printable ASCII chars");
tagStr.Append(c);
}
AppendEscapedCSSString(tagStr, aResult);
}
/* static */ void
nsStyleUtil::AppendFontFeatureSettings(const nsTArray<gfxFontFeature>& aFeatures,
nsAString& aResult)
{
for (uint32_t i = 0, numFeat = aFeatures.Length(); i < numFeat; i++) {
const gfxFontFeature& feat = aFeatures[i];
if (i != 0) {
aResult.AppendLiteral(", ");
}
AppendFontTagAsString(feat.mTag, aResult);
// omit value if it's 1, implied by default
if (feat.mValue != 1) {
aResult.Append(' ');
aResult.AppendInt(feat.mValue);
}
}
}
/* static */ void
nsStyleUtil::AppendFontFeatureSettings(const nsCSSValue& aSrc,
nsAString& aResult)
{
nsCSSUnit unit = aSrc.GetUnit();
if (unit == eCSSUnit_Normal) {
aResult.AppendLiteral("normal");
return;
}
MOZ_ASSERT(unit == eCSSUnit_PairList || unit == eCSSUnit_PairListDep,
"improper value unit for font-feature-settings:");
nsTArray<gfxFontFeature> featureSettings;
nsLayoutUtils::ComputeFontFeatures(aSrc.GetPairListValue(), featureSettings);
AppendFontFeatureSettings(featureSettings, aResult);
}
/* static */ void
nsStyleUtil::AppendFontVariationSettings(const nsTArray<gfxFontVariation>& aVariations,
nsAString& aResult)
{
for (uint32_t i = 0, numVars = aVariations.Length(); i < numVars; i++) {
const gfxFontVariation& var = aVariations[i];
if (i != 0) {
aResult.AppendLiteral(", ");
}
// output tag
AppendFontTagAsString(var.mTag, aResult);
// output value
aResult.Append(' ');
aResult.AppendFloat(var.mValue);
}
}
/* static */ void
nsStyleUtil::AppendFontVariationSettings(const nsCSSValue& aSrc,
nsAString& aResult)
{
nsCSSUnit unit = aSrc.GetUnit();
if (unit == eCSSUnit_Normal) {
aResult.AppendLiteral("normal");
return;
}
MOZ_ASSERT(unit == eCSSUnit_PairList || unit == eCSSUnit_PairListDep,
"improper value unit for font-variation-settings:");
nsTArray<gfxFontVariation> variationSettings;
nsLayoutUtils::ComputeFontVariations(aSrc.GetPairListValue(),
variationSettings);
AppendFontVariationSettings(variationSettings, aResult);
}
/* static */ void
nsStyleUtil::GetFunctionalAlternatesName(int32_t aFeature,
nsAString& aFeatureName)
{
aFeatureName.Truncate();
nsCSSKeyword key =
nsCSSProps::ValueToKeywordEnum(aFeature,
nsCSSProps::kFontVariantAlternatesFuncsKTable);
NS_ASSERTION(key != eCSSKeyword_UNKNOWN, "bad alternate feature type");
AppendUTF8toUTF16(nsCSSKeywords::GetStringValue(key), aFeatureName);
}
/* static */ void
nsStyleUtil::SerializeFunctionalAlternates(
const nsTArray<gfxAlternateValue>& aAlternates,
nsAString& aResult)
{
nsAutoString funcName, funcParams;
uint32_t numValues = aAlternates.Length();
uint32_t feature = 0;
for (uint32_t i = 0; i < numValues; i++) {
const gfxAlternateValue& v = aAlternates.ElementAt(i);
if (feature != v.alternate) {
feature = v.alternate;
if (!funcName.IsEmpty() && !funcParams.IsEmpty()) {
if (!aResult.IsEmpty()) {
aResult.Append(char16_t(' '));
}
// append the previous functional value
aResult.Append(funcName);
aResult.Append(char16_t('('));
aResult.Append(funcParams);
aResult.Append(char16_t(')'));
}
// function name
GetFunctionalAlternatesName(v.alternate, funcName);
NS_ASSERTION(!funcName.IsEmpty(), "unknown property value name");
// function params
funcParams.Truncate();
AppendEscapedCSSIdent(v.value, funcParams);
} else {
if (!funcParams.IsEmpty()) {
funcParams.AppendLiteral(", ");
}
AppendEscapedCSSIdent(v.value, funcParams);
}
}
// append the previous functional value
if (!funcName.IsEmpty() && !funcParams.IsEmpty()) {
if (!aResult.IsEmpty()) {
aResult.Append(char16_t(' '));
}
aResult.Append(funcName);
aResult.Append(char16_t('('));
aResult.Append(funcParams);
aResult.Append(char16_t(')'));
}
}
/* static */ void
nsStyleUtil::ComputeFunctionalAlternates(const nsCSSValueList* aList,
nsTArray<gfxAlternateValue>& aAlternateValues)
{
gfxAlternateValue v;
aAlternateValues.Clear();
for (const nsCSSValueList* curr = aList; curr != nullptr; curr = curr->mNext) {
// list contains function units
if (curr->mValue.GetUnit() != eCSSUnit_Function) {
continue;
}
// element 0 is the propval in ident form
const nsCSSValue::Array *func = curr->mValue.GetArrayValue();
// lookup propval
nsCSSKeyword key = func->Item(0).GetKeywordValue();
NS_ASSERTION(key != eCSSKeyword_UNKNOWN, "unknown alternate property value");
int32_t alternate;
if (!nsCSSProps::FindKeyword(key,
nsCSSProps::kFontVariantAlternatesFuncsKTable,
alternate)) {
MOZ_ASSERT_UNREACHABLE("keyword not a font-variant-alternates value");
continue;
}
v.alternate = alternate;
// other elements are the idents associated with the propval
// append one alternate value for each one
uint32_t numElems = func->Count();
for (uint32_t i = 1; i < numElems; i++) {
const nsCSSValue& value = func->Item(i);
NS_ASSERTION(value.GetUnit() == eCSSUnit_Ident,
"weird unit found in variant alternate");
if (value.GetUnit() != eCSSUnit_Ident) {
continue;
}
value.GetStringValue(v.value);
aAlternateValues.AppendElement(v);
}
}
}
static void
AppendSerializedUnicodePoint(uint32_t aCode, nsACString& aBuf)
{
aBuf.Append(nsPrintfCString("%0X", aCode));
}
// A unicode-range: descriptor is represented as an array of integers,
// to be interpreted as a sequence of pairs: min max min max ...
// It is in source order. (Possibly it should be sorted and overlaps
// consolidated, but right now we don't do that.)
/* static */ void
nsStyleUtil::AppendUnicodeRange(const nsCSSValue& aValue, nsAString& aResult)
{
MOZ_ASSERT(aValue.GetUnit() == eCSSUnit_Null ||
aValue.GetUnit() == eCSSUnit_Array,
"improper value unit for unicode-range:");
aResult.Truncate();
if (aValue.GetUnit() != eCSSUnit_Array)
return;
nsCSSValue::Array const & sources = *aValue.GetArrayValue();
nsAutoCString buf;
MOZ_ASSERT(sources.Count() % 2 == 0,
"odd number of entries in a unicode-range: array");
for (uint32_t i = 0; i < sources.Count(); i += 2) {
uint32_t min = sources[i].GetIntValue();
uint32_t max = sources[i+1].GetIntValue();
// We don't try to replicate the U+XX?? notation.
buf.AppendLiteral("U+");
AppendSerializedUnicodePoint(min, buf);
if (min != max) {
buf.Append('-');
AppendSerializedUnicodePoint(max, buf);
}
buf.AppendLiteral(", ");
}
buf.Truncate(buf.Length() - 2); // remove the last comma-space
CopyASCIItoUTF16(buf, aResult);
}
/* static */ void
nsStyleUtil::AppendStepsTimingFunction(nsTimingFunction::Type aType,
uint32_t aSteps,

View File

@ -56,24 +56,9 @@ public:
static void AppendEscapedCSSIdent(const nsAString& aIdent,
nsAString& aResult);
static void
AppendEscapedCSSFontFamilyList(const mozilla::FontFamilyList& aFamilyList,
nsAString& aResult);
static void
AppendEscapedCSSFontFamilyList(mozilla::SharedFontList* aFontlist,
nsAString& aResult)
{
AppendEscapedCSSFontFamilyList(aFontlist->mNames, aResult);
}
static void
AppendFontSlantStyle(const mozilla::FontSlantStyle&, nsAString& aResult);
private:
static void
AppendEscapedCSSFontFamilyList(const nsTArray<mozilla::FontFamilyName>& aNames,
nsAString& aResult);
public:
// Append a bitmask-valued property's value(s) (space-separated) to aResult.
static void AppendBitmaskCSSValue(const nsCSSKTableEntry aTable[],
@ -86,22 +71,6 @@ public:
static void AppendPaintOrderValue(uint8_t aValue, nsAString& aResult);
static void AppendFontTagAsString(uint32_t aTag, nsAString& aResult);
static void AppendFontFeatureSettings(const nsTArray<gfxFontFeature>& aFeatures,
nsAString& aResult);
static void AppendFontFeatureSettings(const nsCSSValue& src,
nsAString& aResult);
static void AppendFontVariationSettings(const nsTArray<gfxFontVariation>& aVariations,
nsAString& aResult);
static void AppendFontVariationSettings(const nsCSSValue& src,
nsAString& aResult);
static void AppendUnicodeRange(const nsCSSValue& aValue, nsAString& aResult);
static void AppendCSSNumber(float aNumber, nsAString& aResult)
{
aResult.AppendFloat(aNumber);
@ -119,20 +88,6 @@ public:
nsTimingFunction::Type aType,
nsAString& aResult);
// convert bitmask value to keyword name for a functional alternate
static void GetFunctionalAlternatesName(int32_t aFeature,
nsAString& aFeatureName);
// Append functional font-variant-alternates values to string
static void
SerializeFunctionalAlternates(const nsTArray<gfxAlternateValue>& aAlternates,
nsAString& aResult);
// List of functional font-variant-alternates values to feature/value pairs
static void
ComputeFunctionalAlternates(const nsCSSValueList* aList,
nsTArray<gfxAlternateValue>& aAlternateValues);
/*
* Convert an author-provided floating point number to an integer (0
* ... 255) appropriate for use in the alpha component of a color.

View File

@ -4,7 +4,6 @@
const Cm = Components.manager;
ChromeUtils.import("resource://gre/modules/FileUtils.jsm");
ChromeUtils.import("resource://gre/modules/Services.jsm");
function processTerminated() {

View File

@ -266,11 +266,7 @@ class TupBackend(CommonBackend):
self._gtests = '$(MOZ_OBJ_ROOT)/<gtest>'
self._default_group = '$(MOZ_OBJ_ROOT)/<default>'
# The two rust libraries in the tree share many prerequisites, so we need
# to prune common dependencies and therefore build all rust from the same
# Tupfile.
self._rust_outputs = set()
self._rust_backend_file = self._get_backend_file('toolkit/library/rust')
self._built_in_addons = set()
self._built_in_addons_file = 'dist/bin/browser/chrome/browser/content/browser/built_in_addons.json'
@ -806,8 +802,12 @@ class TupBackend(CommonBackend):
output_key = tuple(outputs)
if output_key not in self._rust_outputs:
# The two rust libraries in the tree share many prerequisites,
# so we need to prune common dependencies and therefore build
# all rust from the same Tupfile.
rust_backend_file = self._get_backend_file('toolkit/library/rust')
self._rust_outputs.add(output_key)
self._rust_backend_file.rule(
rust_backend_file.rule(
command,
inputs=sorted(inputs),
outputs=outputs,
@ -818,8 +818,7 @@ class TupBackend(CommonBackend):
for dst, link in invocation['links'].iteritems():
self._rust_outputs.add(output_key)
self._rust_backend_file.symlink_rule(link, dst,
self._rust_libs)
rust_backend_file.symlink_rule(link, dst, self._rust_libs)
for val in enumerate(invocations):
_process(*val)

View File

@ -12,7 +12,6 @@ ChromeUtils.import("resource://gre/modules/IndexedDB.jsm");
XPCOMUtils.defineLazyModuleGetters(this, {
ContextualIdentityService: "resource://gre/modules/ContextualIdentityService.jsm",
ExtensionStorage: "resource://gre/modules/ExtensionStorage.jsm",
ExtensionUtils: "resource://gre/modules/ExtensionUtils.jsm",
Services: "resource://gre/modules/Services.jsm",
OS: "resource://gre/modules/osfile.jsm",
});
@ -33,6 +32,134 @@ const IDB_MIGRATE_RESULT_HISTOGRAM = "WEBEXT_STORAGE_LOCAL_IDB_MIGRATE_RESULT_CO
const BACKEND_ENABLED_PREF = "extensions.webextensions.ExtensionStorageIDB.enabled";
const IDB_MIGRATED_PREF_BRANCH = "extensions.webextensions.ExtensionStorageIDB.migrated";
var DataMigrationTelemetry = {
initialized: false,
lazyInit() {
if (this.initialized) {
return;
}
this.initialized = true;
// Ensure that these telemetry events category is enabled.
Services.telemetry.setEventRecordingEnabled("extensions.data", true);
this.resultHistogram = Services.telemetry.getHistogramById(IDB_MIGRATE_RESULT_HISTOGRAM);
},
/**
* Get a trimmed version of the given string if it is longer than 80 chars.
*
* @param {string} str
* The original string content.
*
* @returns {string}
* The trimmed version of the string when longer than 80 chars, or the given string
* unmodified otherwise.
*/
getTrimmedString(str) {
if (str.length <= 80) {
return str;
}
const length = str.length;
// Trim the string to prevent a flood of warnings messages logged internally by recordEvent,
// the trimmed version is going to be composed by the first 40 chars and the last 37 and 3 dots
// that joins the two parts, to visually indicate that the string has been trimmed.
return `${str.slice(0, 40)}...${str.slice(length - 37, length)}`;
},
/**
* Get the DOMException error name for a given error object.
*
* @param {Error | undefined} error
* The Error object to convert into a string, or undefined if there was no error.
*
* @returns {string | undefined}
* The DOMException error name (sliced to a maximum of 80 chars),
* "OtherError" if the error object is not a DOMException instance,
* or `undefined` if there wasn't an error.
*/
getErrorName(error) {
if (!error) {
return undefined;
}
if (error instanceof DOMException) {
if (error.name.length > 80) {
return this.getTrimmedString(error.name);
}
return error.name;
}
return "OtherError";
},
/**
* Record telemetry related to a data migration result.
*
* @param {object} telemetryData
* @param {string} telemetryData.backend
* The backend selected ("JSONFile" or "IndexedDB").
* @param {boolean} telemetryData.dataMigrated
* Old extension data has been migrated successfully.
* @param {string} telemetryData.extensionId
* The id of the extension migrated.
* @param {Error | undefined} telemetryData.error
* The error raised during the data migration, if any.
* @param {boolean} telemetryData.hasJSONFile
* The extension has an existing JSONFile to migrate.
* @param {boolean} telemetryData.hasOldData
* The extension's JSONFile wasn't empty.
* @param {string} telemetryData.histogramCategory
* The histogram category for the result ("success" or "failure").
*/
recordResult(telemetryData) {
try {
const {
backend,
dataMigrated,
extensionId,
error,
hasJSONFile,
hasOldData,
histogramCategory,
} = telemetryData;
this.lazyInit();
this.resultHistogram.add(histogramCategory);
const extra = {backend};
if (dataMigrated != null) {
extra.data_migrated = dataMigrated ? "y" : "n";
}
if (hasJSONFile != null) {
extra.has_jsonfile = hasJSONFile ? "y" : "n";
}
if (hasOldData != null) {
extra.has_olddata = hasOldData ? "y" : "n";
}
if (error) {
extra.error_name = this.getErrorName(error);
}
Services.telemetry.recordEvent("extensions.data", "migrateResult", "storageLocal",
this.getTrimmedString(extensionId), extra);
} catch (err) {
// Report any telemetry error on the browser console, but
// we treat it as a non-fatal error and we don't re-throw
// it to the caller.
Cu.reportError(err);
}
},
};
class ExtensionStorageLocalIDB extends IndexedDB {
onupgradeneeded(event) {
if (event.oldVersion < 1) {
@ -92,10 +219,7 @@ class ExtensionStorageLocalIDB extends IndexedDB {
} catch (err) {
transaction.abort();
// Ensure that the error we throw is converted into an ExtensionError
// (e.g. DataCloneError instances raised from the internal IndexedDB
// operation have to be converted to be accessible to the extension code).
throw new ExtensionUtils.ExtensionError(String(err));
throw err;
}
}
@ -238,8 +362,9 @@ async function migrateJSONFileData(extension, storagePrincipal) {
let idbConn;
let jsonFile;
let hasEmptyIDB;
let histogram = Services.telemetry.getHistogramById(IDB_MIGRATE_RESULT_HISTOGRAM);
let nonFatalError;
let dataMigrateCompleted = false;
let hasOldData = false;
const isMigratedExtension = Services.prefs.getBoolPref(`${IDB_MIGRATED_PREF_BRANCH}.${extension.id}`, false);
if (isMigratedExtension) {
@ -262,7 +387,12 @@ async function migrateJSONFileData(extension, storagePrincipal) {
extension.logWarning(
`storage.local data migration cancelled, unable to open IDB connection: ${err.message}::${err.stack}`);
histogram.add("failure");
DataMigrationTelemetry.recordResult({
backend: "JSONFile",
extensionId: extension.id,
error: err,
histogramCategory: "failure",
});
throw err;
}
@ -288,6 +418,7 @@ async function migrateJSONFileData(extension, storagePrincipal) {
const data = {};
for (let [key, value] of jsonFile.data.entries()) {
data[key] = value;
hasOldData = true;
}
await idbConn.set(data);
@ -306,10 +437,23 @@ async function migrateJSONFileData(extension, storagePrincipal) {
// from being enabled for this session).
Services.qms.clearStoragesForPrincipal(storagePrincipal);
histogram.add("failure");
DataMigrationTelemetry.recordResult({
backend: "JSONFile",
dataMigrated: dataMigrateCompleted,
extensionId: extension.id,
error: err,
hasJSONFile: oldStorageExists,
hasOldData,
histogramCategory: "failure",
});
throw err;
}
// This error is not preventing the extension from switching to the IndexedDB backend,
// but we may still want to know that it has been triggered and include it into the
// telemetry data collected for the extension.
nonFatalError = err;
} finally {
// Clear the jsonFilePromise cached by the ExtensionStorage.
await ExtensionStorage.clearCachedFile(extension.id).catch(err => {
@ -317,8 +461,6 @@ async function migrateJSONFileData(extension, storagePrincipal) {
});
}
histogram.add("success");
// If the IDB backend has been enabled, rename the old storage.local data file, but
// do not prevent the extension from switching to the IndexedDB backend if it fails.
if (oldStorageExists && dataMigrateCompleted) {
@ -331,11 +473,22 @@ async function migrateJSONFileData(extension, storagePrincipal) {
await OS.File.move(oldStoragePath, openInfo.path);
}
} catch (err) {
nonFatalError = err;
extension.logWarning(err.message);
}
}
Services.prefs.setBoolPref(`${IDB_MIGRATED_PREF_BRANCH}.${extension.id}`, true);
DataMigrationTelemetry.recordResult({
backend: "IndexedDB",
dataMigrated: dataMigrateCompleted,
extensionId: extension.id,
error: nonFatalError,
hasJSONFile: oldStorageExists,
hasOldData,
histogramCategory: "success",
});
}
/**

View File

@ -182,15 +182,25 @@ this.storage = class extends ExtensionAPI {
const local = {};
for (let method of ["get", "set", "remove", "clear"]) {
local[method] = async function(...args) {
if (!promiseStorageLocalBackend) {
promiseStorageLocalBackend = getStorageLocalBackend();
try {
if (!promiseStorageLocalBackend) {
promiseStorageLocalBackend = getStorageLocalBackend();
}
const backend = await promiseStorageLocalBackend.catch(err => {
// Clear the cached promise if it has been rejected.
promiseStorageLocalBackend = null;
throw err;
});
// Let the outer try to catch rejections returned by the backend methods.
const result = await backend[method](...args);
return result;
} catch (err) {
// Ensure that the error we throw is converted into an ExtensionError
// (e.g. DataCloneError instances raised from the internal IndexedDB
// operation have to be converted to be accessible to the extension code).
throw new ExtensionUtils.ExtensionError(String(err));
}
const backend = await promiseStorageLocalBackend.catch(err => {
// Clear the cached promise if it has been rejected.
promiseStorageLocalBackend = null;
throw err;
});
return backend[method](...args);
};
}

View File

@ -52,6 +52,7 @@
"additional_backgrounds": {
"type": "array",
"items": { "$ref": "ImageDataOrExtensionURL" },
"maxItems": 15,
"optional": true
},
"headerURL": {
@ -550,6 +551,7 @@
"right bottom", "right center", "right top"
]
},
"maxItems": 15,
"optional": true
},
"additional_backgrounds_tiling": {
@ -558,6 +560,7 @@
"type": "string",
"enum": ["no-repeat", "repeat", "repeat-x", "repeat-y"]
},
"maxItems": 15,
"optional": true
}
},

View File

@ -7,7 +7,12 @@
ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm");
ChromeUtils.import("resource://gre/modules/ExtensionStorage.jsm");
ChromeUtils.import("resource://gre/modules/ExtensionStorageIDB.jsm");
ChromeUtils.import("resource://gre/modules/TelemetryController.jsm");
const {
ExtensionStorageIDB,
DataMigrationTelemetry,
} = ChromeUtils.import("resource://gre/modules/ExtensionStorageIDB.jsm", {});
XPCOMUtils.defineLazyModuleGetters(this, {
OS: "resource://gre/modules/osfile.jsm",
@ -28,6 +33,11 @@ const {
IDB_MIGRATE_RESULT_HISTOGRAM,
} = ExtensionStorageIDB;
const CATEGORIES = ["success", "failure"];
const EVENT_CATEGORY = "extensions.data";
const EVENT_OBJECT = "storageLocal";
const EVENT_METHODS = ["migrateResult"];
const LEAVE_STORAGE_PREF = "extensions.webextensions.keepStorageOnUninstall";
const LEAVE_UUID_PREF = "extensions.webextensions.keepUuidOnUninstall";
async function createExtensionJSONFileWithData(extensionId, data) {
await ExtensionStorage.set(extensionId, data);
@ -53,11 +63,32 @@ function assertMigrationHistogramCount(category, expectedCount) {
`Got the expected count on category "${category}" for histogram ${IDB_MIGRATE_RESULT_HISTOGRAM}`);
}
function assertTelemetryEvents(extensionId, expectedEvents) {
const snapshot = Services.telemetry.snapshotEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, true);
ok(snapshot.parent && snapshot.parent.length > 0, "Got parent telemetry events in the snapshot");
const migrateEvents = snapshot.parent.filter(([timestamp, category, method, object, value]) => {
return category === EVENT_CATEGORY &&
EVENT_METHODS.includes(method) &&
object === EVENT_OBJECT &&
value === extensionId;
}).map(event => {
return {method: event[2], extra: event[5]};
});
Assert.deepEqual(migrateEvents, expectedEvents, "Got the expected telemetry events");
}
add_task(async function setup() {
Services.prefs.setBoolPref(ExtensionStorageIDB.BACKEND_ENABLED_PREF, true);
setLowDiskMode(false);
await promiseStartupManager();
// Telemetry test setup needed to ensure that the builtin events are defined
// and they can be collected and verified.
await TelemetryController.testSetup();
});
// Test that the old data is migrated successfully to the new storage backend
@ -65,6 +96,11 @@ add_task(async function setup() {
add_task(async function test_storage_local_data_migration() {
const EXTENSION_ID = "extension-to-be-migrated@mozilla.org";
// Keep the extension storage and the uuid on uninstall, to verify that no telemetry events
// are being sent for an already migrated extension.
Services.prefs.setBoolPref(LEAVE_STORAGE_PREF, true);
Services.prefs.setBoolPref(LEAVE_UUID_PREF, true);
const data = {
"test_key_string": "test_value1",
"test_key_number": 1000,
@ -91,7 +127,7 @@ add_task(async function test_storage_local_data_migration() {
clearMigrationHistogram();
let extension = ExtensionTestUtils.loadExtension({
let extensionDefinition = {
useAddonManager: "temporary",
manifest: {
permissions: ["storage"],
@ -102,7 +138,9 @@ add_task(async function test_storage_local_data_migration() {
},
},
background,
});
};
let extension = ExtensionTestUtils.loadExtension(extensionDefinition);
await extension.startup();
@ -127,6 +165,43 @@ add_task(async function test_storage_local_data_migration() {
assertMigrationHistogramCount("success", 1);
assertMigrationHistogramCount("failure", 0);
assertTelemetryEvents(EXTENSION_ID, [
{
method: "migrateResult",
extra: {
backend: "IndexedDB",
data_migrated: "y",
has_jsonfile: "y",
has_olddata: "y",
},
},
]);
await extension.unload();
equal(Services.prefs.getBoolPref(`${IDB_MIGRATED_PREF_BRANCH}.${EXTENSION_ID}`, false),
true, `${IDB_MIGRATED_PREF_BRANCH} should still be true on keepStorageOnUninstall=true`);
// Re-install the extension and check that no telemetry events are being sent
// for an already migrated extension.
extension = ExtensionTestUtils.loadExtension(extensionDefinition);
await extension.startup();
await extension.awaitMessage("storage-local-data-migrated");
// The histogram values are unmodified.
assertMigrationHistogramCount("success", 1);
assertMigrationHistogramCount("failure", 0);
// No new telemetry events recorded for the extension.
const snapshot = Services.telemetry.snapshotEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, true);
ok(!snapshot.parent || snapshot.parent.length === 0,
"No telemetry events should be recorded for an already migrated extension");
Services.prefs.setBoolPref(LEAVE_STORAGE_PREF, false);
Services.prefs.setBoolPref(LEAVE_UUID_PREF, false);
await extension.unload();
equal(Services.prefs.getPrefType(`${IDB_MIGRATED_PREF_BRANCH}.${EXTENSION_ID}`),
@ -134,6 +209,61 @@ add_task(async function test_storage_local_data_migration() {
`Got the ${IDB_MIGRATED_PREF_BRANCH} preference has been cleared on addon uninstall`);
});
// Test that the extensionId included in the telemetry event is being trimmed down to 80 chars
// as expected.
add_task(async function test_extensionId_trimmed_in_telemetry_event() {
// Generated extensionId in email-like format, longer than 80 chars.
const EXTENSION_ID = `long.extension.id@${Array(80).fill("a").join("")}`;
const data = {"test_key_string": "test_value"};
// Store some fake data in the storage.local file backend before starting the extension.
await createExtensionJSONFileWithData(EXTENSION_ID, data);
async function background() {
const storedData = await browser.storage.local.get("test_key_string");
browser.test.assertEq("test_value", storedData.test_key_string,
"Got the expected data after the storage.local data migration");
browser.test.sendMessage("storage-local-data-migrated");
}
let extension = ExtensionTestUtils.loadExtension({
manifest: {
permissions: ["storage"],
applications: {
gecko: {
id: EXTENSION_ID,
},
},
},
background,
});
await extension.startup();
await extension.awaitMessage("storage-local-data-migrated");
const expectedTrimmedExtensionId = DataMigrationTelemetry.getTrimmedString(EXTENSION_ID);
equal(expectedTrimmedExtensionId.length, 80, "The trimmed version of the extensionId should be 80 chars long");
assertTelemetryEvents(expectedTrimmedExtensionId, [
{
method: "migrateResult",
extra: {
backend: "IndexedDB",
data_migrated: "y",
has_jsonfile: "y",
has_olddata: "y",
},
},
]);
await extension.unload();
});
// Test that if the old JSONFile data file is corrupted and the old data
// can't be successfully migrated to the new storage backend, then:
// - the new storage backend for that extension is still initialized and enabled
@ -203,6 +333,18 @@ add_task(async function test_storage_local_corrupted_data_migration() {
assertMigrationHistogramCount("success", 1);
assertMigrationHistogramCount("failure", 0);
assertTelemetryEvents(EXTENSION_ID, [
{
method: "migrateResult",
extra: {
backend: "IndexedDB",
data_migrated: "y",
has_jsonfile: "y",
has_olddata: "n",
},
},
]);
await extension.unload();
});
@ -250,12 +392,25 @@ add_task(async function test_storage_local_data_migration_quota_exceeded_error()
await extension.unload();
assertTelemetryEvents(EXTENSION_ID, [
{
method: "migrateResult",
extra: {
backend: "JSONFile",
error_name: "QuotaExceededError",
},
},
]);
assertMigrationHistogramCount("success", 0);
assertMigrationHistogramCount("failure", 1);
});
add_task(async function test_storage_local_data_migration_clear_pref() {
Services.prefs.clearUserPref(LEAVE_STORAGE_PREF);
Services.prefs.clearUserPref(LEAVE_UUID_PREF);
Services.prefs.clearUserPref(ExtensionStorageIDB.BACKEND_ENABLED_PREF);
setLowDiskMode(false);
await promiseShutdownManager();
await TelemetryController.testShutdown();
});

View File

@ -69,6 +69,28 @@ activity_stream:
page: about:home or about_newtab - the page where the event occurred
user_prefs: An integer representaing a user's A-S settings.
extensions.data:
migrateResult:
objects: ["storageLocal"]
bug_numbers: [1470213]
notification_emails: ["addons-dev-internal@mozilla.com"]
expiry_version: "70"
record_in_processes: ["main"]
release_channel_collection: opt-out
extra_keys:
backend: The selected backend ("JSONFile" / "IndexedDB").
data_migrated: The old extension data has been migrated ("y" / "n").
error_name: >
A DOMException error name if any ("OtherError" for unknown errors).
The error has been fatal if the `backend` extra key is "JSONFile",
otherwise it is a non fatal error which didn't prevented the
extension from switching to the IndexedDB backend.
has_jsonfile: The extension has a JSONFile ("y" / "n").
has_olddata: The extension had some data stored in the JSONFile ("y" / "n").
description: >
These events are sent when an extension is migrating its data to the new IndexedDB backend,
the value of this event is the addon id.
navigation:
search:
objects: ["about_home", "about_newtab", "contextmenu", "oneoff",

View File

@ -1,8 +1,10 @@
// Proxy file in order to define generic data types, to avoid binding with system headers
typedef __SIZE_TYPE__ size_t;
namespace std {
typedef unsigned long size_t;
typedef size_t size_t;
template <class T>
class vector {
@ -93,10 +95,10 @@ int abort() { return 0; }
if (!(x)) \
(void)abort()
std::size_t strlen(const char *s);
char *strncat(char *s1, const char *s2, std::size_t n);
size_t strlen(const char *s);
char *strncat(char *s1, const char *s2, size_t);
void free(void *ptr);
void *malloc(std::size_t size);
void *malloc(size_t size);
void *memset(void *b, int c, std::size_t len);
void *memset(void *b, int c, size_t len);