Merge m-c to b2g-inbound. a=merge

This commit is contained in:
Ryan VanderMeulen 2014-08-15 17:06:19 -04:00
commit 8f9c0257b6
354 changed files with 4874 additions and 2044 deletions

View File

@ -657,9 +657,6 @@
; [Layout Engine Resources]
; Style Sheets, Graphics and other Resources used by the layout engine.
@BINPATH@/res/EditorOverride.css
@BINPATH@/res/caret_left.svg
@BINPATH@/res/caret_middle.svg
@BINPATH@/res/caret_right.svg
@BINPATH@/res/contenteditable.css
@BINPATH@/res/designmode.css
@BINPATH@/res/ImageDocument.css
@ -683,6 +680,21 @@
@BINPATH@/res/table-remove-row-active.gif
@BINPATH@/res/table-remove-row-hover.gif
@BINPATH@/res/table-remove-row.gif
@BINPATH@/res/text_caret.png
@BINPATH@/res/text_caret@1.5x.png
@BINPATH@/res/text_caret@2.25x.png
@BINPATH@/res/text_caret@2x.png
@BINPATH@/res/text_caret_tilt_left.png
@BINPATH@/res/text_caret_tilt_left@1.5x.png
@BINPATH@/res/text_caret_tilt_left@2.25x.png
@BINPATH@/res/text_caret_tilt_left@2x.png
@BINPATH@/res/text_caret_tilt_right.png
@BINPATH@/res/text_caret_tilt_right@1.5x.png
@BINPATH@/res/text_caret_tilt_right@2.25x.png
@BINPATH@/res/text_caret_tilt_right@2x.png
@BINPATH@/res/text_selection_handle.png
@BINPATH@/res/text_selection_handle@1.5.png
@BINPATH@/res/text_selection_handle@2.png
@BINPATH@/res/grabber.gif
#ifdef XP_MACOSX
@BINPATH@/res/cursors/*

View File

@ -1280,8 +1280,8 @@ pref("devtools.appmanager.enabled", true);
pref("devtools.appmanager.lastTab", "help");
pref("devtools.appmanager.manifestEditor.enabled", true);
// Disable devtools webide until bug 1007059
pref("devtools.webide.enabled", false);
// Enable DevTools WebIDE by default
pref("devtools.webide.enabled", true);
// Toolbox preferences
pref("devtools.toolbox.footer.height", 250);

View File

@ -2,7 +2,7 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
@import url("chrome://browser/skin/in-content/common.css");
@import url("chrome://global/skin/in-content/common.css");
body {
display: flex;

View File

@ -23,6 +23,10 @@ XPCOMUtils.defineLazyModuleGetter(this, "ShortcutUtils",
"resource://gre/modules/ShortcutUtils.jsm");
XPCOMUtils.defineLazyModuleGetter(this, "GMPInstallManager",
"resource://gre/modules/GMPInstallManager.jsm");
XPCOMUtils.defineLazyModuleGetter(this, "ContentSearch",
"resource:///modules/ContentSearch.jsm");
XPCOMUtils.defineLazyModuleGetter(this, "AboutHome",
"resource:///modules/AboutHome.jsm");
XPCOMUtils.defineLazyServiceGetter(this, "gDNSService",
"@mozilla.org/network/dns-service;1",
"nsIDNSService");
@ -30,7 +34,6 @@ XPCOMUtils.defineLazyServiceGetter(this, "gDNSService",
const nsIWebNavigation = Ci.nsIWebNavigation;
var gLastBrowserCharset = null;
var gPrevCharset = null;
var gProxyFavIcon = null;
var gLastValidURLStr = "";
var gInPrintPreviewMode = false;
@ -242,7 +245,6 @@ XPCOMUtils.defineLazyGetter(this, "PageMenu", function() {
* one listener that calls all real handlers.
*/
function pageShowEventHandlers(persisted) {
charsetLoadListener();
XULBrowserWindow.asyncUpdateUI();
// The PluginClickToPlay events are not fired when navigating using the
@ -2588,7 +2590,7 @@ let BrowserOnClick = {
anchorTarget.classList.contains("newtab-link")) {
event.preventDefault();
let where = whereToOpenLink(event, false, false);
openUILinkIn(anchorTarget.href, where);
openLinkIn(anchorTarget.href, where, { charset: ownerDoc.characterSet });
}
},
@ -3095,8 +3097,17 @@ const BrowserSearch = {
}
#endif
let openSearchPageIfFieldIsNotActive = function(aSearchBar) {
if (!aSearchBar || document.activeElement != aSearchBar.textbox.inputField)
let doc = gBrowser.selectedBrowser.contentDocument;
let url = doc.documentURI.toLowerCase();
let mm = gBrowser.selectedBrowser.messageManager;
if (url === "about:home") {
AboutHome.focusInput(mm);
} else if (url === "about:newtab") {
ContentSearch.focusInput(mm);
} else if (!aSearchBar || document.activeElement != aSearchBar.textbox.inputField) {
openUILinkIn("about:home", "current");
}
};
let searchBar = this.searchBar;
@ -3511,9 +3522,7 @@ function updateCharacterEncodingMenuState()
// gBrowser is null on Mac when the menubar shows in the context of
// non-browser windows. The above elements may be null depending on
// what parts of the menubar are present. E.g. no app menu on Mac.
if (gBrowser &&
gBrowser.docShell &&
gBrowser.docShell.mayEnableCharacterEncodingMenu) {
if (gBrowser && gBrowser.selectedBrowser.mayEnableCharacterEncodingMenu) {
if (charsetMenu) {
charsetMenu.removeAttribute("disabled");
}
@ -5358,8 +5367,7 @@ function handleDroppedLink(event, url, name)
function BrowserSetForcedCharacterSet(aCharset)
{
if (aCharset) {
gBrowser.docShell.gatherCharsetMenuTelemetry();
gBrowser.docShell.charset = aCharset;
gBrowser.selectedBrowser.characterSet = aCharset;
// Save the forced character-set
if (!PrivateBrowsingUtils.isWindowPrivate(window))
PlacesUtils.setCharsetForURI(getWebNavigation().currentURI, aCharset);
@ -5372,35 +5380,11 @@ function BrowserCharsetReload()
BrowserReloadWithFlags(nsIWebNavigation.LOAD_FLAGS_CHARSET_CHANGE);
}
function charsetMenuGetElement(parent, charset) {
return parent.getElementsByAttribute("charset", charset)[0];
}
function UpdateCurrentCharset(target) {
// extract the charset from DOM
var wnd = document.commandDispatcher.focusedWindow;
if ((window == wnd) || (wnd == null)) wnd = window.content;
// Uncheck previous item
if (gPrevCharset) {
var pref_item = charsetMenuGetElement(target, gPrevCharset);
if (pref_item)
pref_item.setAttribute('checked', 'false');
}
var menuitem = charsetMenuGetElement(target, CharsetMenu.foldCharset(wnd.document.characterSet));
if (menuitem) {
menuitem.setAttribute('checked', 'true');
}
}
function charsetLoadListener() {
let currCharset = gBrowser.selectedBrowser.characterSet;
let charset = CharsetMenu.foldCharset(currCharset);
if (charset.length > 0 && (charset != gLastBrowserCharset)) {
gPrevCharset = gLastBrowserCharset;
gLastBrowserCharset = charset;
for (let menuItem of target.getElementsByTagName("menuitem")) {
let isSelected = menuItem.getAttribute("charset") ===
CharsetMenu.foldCharset(gBrowser.selectedBrowser.characterSet);
menuItem.setAttribute("checked", isSelected);
}
}

View File

@ -106,6 +106,9 @@ let AboutHomeListener = {
case "AboutHome:Update":
this.onUpdate(aMessage.data);
break;
case "AboutHome:FocusInput":
this.onFocusInput();
break;
}
},
@ -138,6 +141,7 @@ let AboutHomeListener = {
doc.documentElement.setAttribute("hasBrowserHandlers", "true");
let self = this;
addMessageListener("AboutHome:Update", self);
addMessageListener("AboutHome:FocusInput", self);
addEventListener("click", this.onClick, true);
addEventListener("pagehide", function onPageHide(event) {
if (event.target.defaultView.frameElement)
@ -212,6 +216,10 @@ let AboutHomeListener = {
break;
}
},
onFocusInput: function () {
content.document.getElementById("searchText").focus();
},
};
AboutHomeListener.init(this);

View File

@ -76,6 +76,10 @@ let gSearch = {
}
},
onFocusInput: function () {
this._nodes.text.focus();
},
_nodeIDSuffixes: [
"form",
"logo",

View File

@ -419,6 +419,22 @@ let gTests = [
});
}
},
{
desc: "Cmd+k should focus the search bar element",
setup: function () {},
run: Task.async(function* () {
let doc = gBrowser.selectedTab.linkedBrowser.contentDocument;
let logo = doc.getElementById("brandLogo");
let searchInput = doc.getElementById("searchText");
EventUtils.synthesizeMouseAtCenter(logo, {});
isnot(searchInput, doc.activeElement, "Search input should not be the active element.");
EventUtils.synthesizeKey("k", { accelKey: true });
yield promiseWaitForCondition(() => doc.activeElement === searchInput);
is(searchInput, doc.activeElement, "Search input should be the active element.");
})
},
];

View File

@ -82,6 +82,12 @@ function waitForCondition(condition, nextTest, errorMsg) {
var moveOn = function() { clearInterval(interval); nextTest(); };
}
function promiseWaitForCondition(aConditionFn) {
let deferred = Promise.defer();
waitForCondition(aConditionFn, deferred.resolve, "Condition didn't pass.");
return deferred.promise;
}
function getTestPlugin(aName) {
var pluginName = aName || "Test Plug-in";
var ph = Cc["@mozilla.org/plugin/host;1"].getService(Ci.nsIPluginHost);

View File

@ -186,6 +186,16 @@ function runTests() {
EventUtils.synthesizeKey("VK_DELETE", {});
ok(table.hidden, "Search suggestion table hidden");
// Focus a different element than the search input.
let btn = getContentDocument().getElementById("newtab-customize-button");
yield promiseClick(btn).then(TestRunner.next);
isnot(input, getContentDocument().activeElement, "Search input should not be focused");
// Test that Ctrl/Cmd + K will focus the input field.
EventUtils.synthesizeKey("k", { accelKey: true });
yield promiseSearchEvents(["FocusInput"]).then(TestRunner.next);
is(input, getContentDocument().activeElement, "Search input should be focused");
// Done. Revert the current engine and remove the new engines.
Services.search.currentEngine = oldCurrentEngine;
yield promiseSearchEvents(["CurrentEngine"]).then(TestRunner.next);

View File

@ -309,6 +309,8 @@ function openLinkIn(url, where, params) {
// result in a new frontmost window (e.g. "javascript:window.open('');").
w.focus();
let newTab;
switch (where) {
case "current":
let flags = Ci.nsIWebNavigation.LOAD_FLAGS_NONE;
@ -332,22 +334,29 @@ function openLinkIn(url, where, params) {
// fall through
case "tab":
let browser = w.gBrowser;
browser.loadOneTab(url, {
referrerURI: aReferrerURI,
charset: aCharset,
postData: aPostData,
inBackground: loadInBackground,
allowThirdPartyFixup: aAllowThirdPartyFixup,
relatedToCurrent: aRelatedToCurrent,
skipAnimation: aSkipTabAnimation,
allowMixedContent: aAllowMixedContent });
newTab = browser.loadOneTab(url, {
referrerURI: aReferrerURI,
charset: aCharset,
postData: aPostData,
inBackground: loadInBackground,
allowThirdPartyFixup: aAllowThirdPartyFixup,
relatedToCurrent: aRelatedToCurrent,
skipAnimation: aSkipTabAnimation,
allowMixedContent: aAllowMixedContent });
break;
}
w.gBrowser.selectedBrowser.focus();
if (!loadInBackground && w.isBlankPageURL(url))
if (!loadInBackground && w.isBlankPageURL(url)) {
if (newTab) {
// Remote tab content does not focus synchronously, so we set the flag
// on this tab to skip focusing the content if we want to focus the URL
// bar instead.
newTab._urlbarFocused = true;
}
w.focusAndSelectUrlBar();
}
}
// Used as an onclick handler for UI elements with link-like behavior.

View File

@ -735,8 +735,7 @@ const CustomizableWidgets = [{
maybeDisableMenu: function(aDocument) {
let window = aDocument.defaultView;
return !(window.gBrowser &&
window.gBrowser.docShell &&
window.gBrowser.docShell.mayEnableCharacterEncodingMenu);
window.gBrowser.selectedBrowser.mayEnableCharacterEncodingMenu);
},
populateList: function(aDocument, aContainerId, aSection) {
let containerElem = aDocument.getElementById(aContainerId);
@ -756,8 +755,7 @@ const CustomizableWidgets = [{
}
},
updateCurrentCharset: function(aDocument) {
let content = aDocument.defaultView.content;
let currentCharset = content && content.document && content.document.characterSet;
let currentCharset = aDocument.defaultView.gBrowser.selectedBrowser.characterSet;
currentCharset = CharsetMenu.foldCharset(currentCharset);
let pinnedContainer = aDocument.getElementById("PanelUI-characterEncodingView-pinned");

View File

@ -160,6 +160,27 @@ function injectLoopAPI(targetWindow) {
}
},
/**
* Return any preference under "loop." that's coercible to a boolean
* preference.
*
* @param {String} prefName The name of the pref without the preceding
* "loop."
*
* Any errors thrown by the Mozilla pref API are logged to the console
* and cause null to be returned. This includes the case of the preference
* not being found.
*
* @return {String} on success, null on error
*/
getLoopBoolPref: {
enumerable: true,
writable: true,
value: function(prefName) {
return MozLoopService.getLoopBoolPref(prefName);
}
},
/**
* Starts alerting the user about an incoming call
*/

View File

@ -613,6 +613,29 @@ this.MozLoopService = {
}
},
/**
* Return any preference under "loop." that's coercible to a character
* preference.
*
* @param {String} prefName The name of the pref without the preceding
* "loop."
*
* Any errors thrown by the Mozilla pref API are logged to the console
* and cause null to be returned. This includes the case of the preference
* not being found.
*
* @return {String} on success, null on error
*/
getLoopBoolPref: function(prefName) {
try {
return Services.prefs.getBoolPref("loop." + prefName);
} catch (ex) {
console.log("getLoopBoolPref had trouble getting " + prefName +
"; exception: " + ex);
return null;
}
},
/**
* Performs a hawk based request to the loop server.
*

View File

@ -32,13 +32,13 @@ loop.conversation = (function(OT, mozL10n) {
},
componentDidMount: function() {
window.addEventListener('click', this.clickHandler);
window.addEventListener('blur', this._hideDeclineMenu);
window.addEventListener("click", this.clickHandler);
window.addEventListener("blur", this._hideDeclineMenu);
},
componentWillUnmount: function() {
window.removeEventListener('click', this.clickHandler);
window.removeEventListener('blur', this._hideDeclineMenu);
window.removeEventListener("click", this.clickHandler);
window.removeEventListener("blur", this._hideDeclineMenu);
},
clickHandler: function(e) {
@ -48,8 +48,11 @@ loop.conversation = (function(OT, mozL10n) {
}
},
_handleAccept: function() {
this.props.model.trigger("accept");
_handleAccept: function(callType) {
return () => {
this.props.model.set("selectedCallType", callType);
this.props.model.trigger("accept");
};
},
_handleDecline: function() {
@ -74,15 +77,15 @@ loop.conversation = (function(OT, mozL10n) {
render: function() {
/* jshint ignore:start */
var btnClassAccept = "btn btn-success btn-accept";
var btnClassAccept = "btn btn-success btn-accept call-audio-video";
var btnClassBlock = "btn btn-error btn-block";
var btnClassDecline = "btn btn-error btn-decline";
var conversationPanelClass = "incoming-call " +
loop.shared.utils.getTargetPlatform();
var cx = React.addons.classSet;
var declineDropdownMenuClasses = cx({
var dropdownMenuClassesDecline = cx({
"native-dropdown-menu": true,
"decline-block-menu": true,
"conversation-window-dropdown": true,
"visually-hidden": !this.state.showDeclineMenu
});
return (
@ -92,22 +95,36 @@ loop.conversation = (function(OT, mozL10n) {
React.DOM.div({className: "button-chevron-menu-group"},
React.DOM.div({className: "button-group-chevron"},
React.DOM.div({className: "button-group"},
React.DOM.button({className: btnClassDecline, onClick: this._handleDecline},
React.DOM.button({className: btnClassDecline,
onClick: this._handleDecline},
__("incoming_call_decline_button")
),
React.DOM.div({className: "btn-chevron",
onClick: this._toggleDeclineMenu}
onClick: this._toggleDeclineMenu}
)
),
React.DOM.ul({className: declineDropdownMenuClasses},
React.DOM.ul({className: dropdownMenuClassesDecline},
React.DOM.li({className: "btn-block", onClick: this._handleDeclineBlock},
__("incoming_call_decline_and_block_button")
)
)
)
),
React.DOM.button({className: btnClassAccept, onClick: this._handleAccept},
__("incoming_call_answer_button")
React.DOM.div({className: "button-chevron-menu-group"},
React.DOM.div({className: "button-group"},
React.DOM.button({className: btnClassAccept,
onClick: this._handleAccept("audio-video")},
__("incoming_call_answer_button")
),
React.DOM.div({className: "call-audio-only",
onClick: this._handleAccept("audio"),
title: __("incoming_call_answer_audio_only_tooltip")}
)
)
)
)
)
@ -181,9 +198,10 @@ loop.conversation = (function(OT, mozL10n) {
// We'll probably really want to be getting this data from the
// background worker on the desktop client.
// Bug 1032700 should fix this.
this._conversation.setSessionData(sessionData[0]);
this._conversation.setIncomingSessionData(sessionData[0]);
this.loadReactComponent(loop.conversation.IncomingCallView({
model: this._conversation
model: this._conversation,
video: {enabled: this._conversation.hasVideoStream("incoming")}
}));
});
},
@ -213,7 +231,7 @@ loop.conversation = (function(OT, mozL10n) {
*/
declineAndBlock: function() {
navigator.mozLoop.stopAlerting();
var token = navigator.mozLoop.getLoopCharPref('loopToken');
var token = navigator.mozLoop.getLoopCharPref("loopToken");
this._client.deleteCallUrl(token, function(error) {
// XXX The conversation window will be closed when this cb is triggered
// figure out if there is a better way to report the error to the user
@ -235,10 +253,14 @@ loop.conversation = (function(OT, mozL10n) {
return;
}
var callType = this._conversation.get("selectedCallType");
var videoStream = callType === "audio" ? false : true;
/*jshint newcap:false*/
this.loadReactComponent(sharedViews.ConversationView({
sdk: OT,
model: this._conversation
model: this._conversation,
video: {enabled: videoStream}
}));
},

View File

@ -32,13 +32,13 @@ loop.conversation = (function(OT, mozL10n) {
},
componentDidMount: function() {
window.addEventListener('click', this.clickHandler);
window.addEventListener('blur', this._hideDeclineMenu);
window.addEventListener("click", this.clickHandler);
window.addEventListener("blur", this._hideDeclineMenu);
},
componentWillUnmount: function() {
window.removeEventListener('click', this.clickHandler);
window.removeEventListener('blur', this._hideDeclineMenu);
window.removeEventListener("click", this.clickHandler);
window.removeEventListener("blur", this._hideDeclineMenu);
},
clickHandler: function(e) {
@ -48,8 +48,11 @@ loop.conversation = (function(OT, mozL10n) {
}
},
_handleAccept: function() {
this.props.model.trigger("accept");
_handleAccept: function(callType) {
return () => {
this.props.model.set("selectedCallType", callType);
this.props.model.trigger("accept");
};
},
_handleDecline: function() {
@ -74,15 +77,15 @@ loop.conversation = (function(OT, mozL10n) {
render: function() {
/* jshint ignore:start */
var btnClassAccept = "btn btn-success btn-accept";
var btnClassAccept = "btn btn-success btn-accept call-audio-video";
var btnClassBlock = "btn btn-error btn-block";
var btnClassDecline = "btn btn-error btn-decline";
var conversationPanelClass = "incoming-call " +
loop.shared.utils.getTargetPlatform();
var cx = React.addons.classSet;
var declineDropdownMenuClasses = cx({
var dropdownMenuClassesDecline = cx({
"native-dropdown-menu": true,
"decline-block-menu": true,
"conversation-window-dropdown": true,
"visually-hidden": !this.state.showDeclineMenu
});
return (
@ -92,23 +95,37 @@ loop.conversation = (function(OT, mozL10n) {
<div className="button-chevron-menu-group">
<div className="button-group-chevron">
<div className="button-group">
<button className={btnClassDecline} onClick={this._handleDecline}>
<button className={btnClassDecline}
onClick={this._handleDecline}>
{__("incoming_call_decline_button")}
</button>
<div className="btn-chevron"
onClick={this._toggleDeclineMenu}>
onClick={this._toggleDeclineMenu}>
</div>
</div>
<ul className={declineDropdownMenuClasses}>
<ul className={dropdownMenuClassesDecline}>
<li className="btn-block" onClick={this._handleDeclineBlock}>
{__("incoming_call_decline_and_block_button")}
</li>
</ul>
</div>
</div>
<div className="button-chevron-menu-group">
<div className="button-group">
<button className={btnClassAccept}
onClick={this._handleAccept("audio-video")}>
{__("incoming_call_answer_button")}
</button>
<div className="call-audio-only"
onClick={this._handleAccept("audio")}
title={__("incoming_call_answer_audio_only_tooltip")} >
</div>
</div>
</div>
<button className={btnClassAccept} onClick={this._handleAccept}>
{__("incoming_call_answer_button")}
</button>
</div>
</div>
);
@ -181,9 +198,10 @@ loop.conversation = (function(OT, mozL10n) {
// We'll probably really want to be getting this data from the
// background worker on the desktop client.
// Bug 1032700 should fix this.
this._conversation.setSessionData(sessionData[0]);
this._conversation.setIncomingSessionData(sessionData[0]);
this.loadReactComponent(loop.conversation.IncomingCallView({
model: this._conversation
model: this._conversation,
video: {enabled: this._conversation.hasVideoStream("incoming")}
}));
});
},
@ -213,7 +231,7 @@ loop.conversation = (function(OT, mozL10n) {
*/
declineAndBlock: function() {
navigator.mozLoop.stopAlerting();
var token = navigator.mozLoop.getLoopCharPref('loopToken');
var token = navigator.mozLoop.getLoopCharPref("loopToken");
this._client.deleteCallUrl(token, function(error) {
// XXX The conversation window will be closed when this cb is triggered
// figure out if there is a better way to report the error to the user
@ -235,10 +253,14 @@ loop.conversation = (function(OT, mozL10n) {
return;
}
var callType = this._conversation.get("selectedCallType");
var videoStream = callType === "audio" ? false : true;
/*jshint newcap:false*/
this.loadReactComponent(sharedViews.ConversationView({
sdk: OT,
model: this._conversation
model: this._conversation,
video: {enabled: videoStream}
}));
},

View File

@ -109,6 +109,11 @@ h1, h2, h3 {
height: auto;
}
.btn-large + .btn-chevron {
padding: 1rem;
height: 100%; /* match full height of button */
}
/*
* Left / Right padding elements
* used to center components
@ -133,17 +138,20 @@ h1, h2, h3 {
border: 1px solid #006b9d;
}
.btn-success {
.btn-success,
.btn-success + .btn-chevron {
background-color: #74bf43;
border: 1px solid #74bf43;
}
.btn-success:hover {
.btn-success:hover,
.btn-success + .btn-chevron:hover {
background-color: #6cb23e;
border: 1px solid #6cb23e;
}
.btn-success:active {
.btn-success:active,
.btn-success + .btn-chevron:active {
background-color: #64a43a;
border: 1px solid #64a43a;
}
@ -234,6 +242,8 @@ h1, h2, h3 {
.button-group .btn {
flex: 1;
border-bottom-right-radius: 0;
border-top-right-radius: 0;
}
/* Alerts */
@ -292,24 +302,36 @@ h1, h2, h3 {
opacity: 0;
}
.btn-large .icon {
display: inline-block;
width: 20px;
height: 20px;
.icon,
.icon-small,
.icon-audio,
.icon-video {
background-size: 20px;
background-repeat: no-repeat;
vertical-align: top;
margin-left: 10px;
background-position: 80% center;
}
.icon-small {
background-size: 10px;
}
.icon-video {
background-image: url("../img/video-inverse-14x14.png");
}
.icon-audio {
background-image: url("../img/audio-default-16x16@1.5x.png");
}
@media (min-resolution: 2dppx) {
.icon-video {
background-image: url("../img/video-inverse-14x14@2x.png");
}
.icon-audio {
background-image: url("../img/audio-default-16x16@2x.png");
}
}
/*

View File

@ -194,6 +194,41 @@
font-weight: normal;
}
.call-audio-only {
width: 26px;
height: 26px;
border-left: 1px solid rgba(255,255,255,.4);
border-top-right-radius: 2px;
border-bottom-right-radius: 2px;
background-color: #74BF43;
background-image: url("../img/audio-inverse-14x14.png");
background-size: 1rem;
background-position: center;
background-repeat: no-repeat;
cursor: pointer;
}
.call-audio-only:hover {
background-color: #6cb23e;
}
.call-audio-video {
background-image: url("../img/video-inverse-14x14.png");
background-position: 96% center;
background-repeat: no-repeat;
background-size: 1rem;
}
@media (min-resolution: 2dppx) {
.call-audio-only {
background-image: url("../img/audio-inverse-14x14@2x.png");
}
.call-audio-video {
background-image: url("../img/video-inverse-14x14@2x.png");
}
}
/* Expired call url page */
.expired-url-info {
@ -212,9 +247,16 @@
font-weight: 300;
}
/* Block incoming call */
/*
* Dropdown menu hidden behind a chevron
*
* .native-dropdown-menu[-large-parent] Generic class, contains common styles
* .standalone-dropdown-menu Initiate call dropdown menu
* .conversation-window-dropdown Dropdown menu for answer/decline/block options
*/
.native-dropdown-menu {
.native-dropdown-menu,
.native-dropdown-large-parent {
/* Should match a native select menu */
padding: 0;
position: absolute; /* element can be wider than the parent */
@ -226,19 +268,35 @@
border-color: #aaa #111 #111 #aaa;
}
.decline-block-menu li {
padding: 0 10px 0 5px;
list-style: none;
font-size: .9em;
color: #000;
cursor: pointer;
}
.decline-block-menu li:hover {
color: #FFF;
background: #111;
/*
* If the component is smaller than the parent
* we need it to display block to occupy full width
* Same as above but overrides apropriate styles
*/
.native-dropdown-large-parent {
position: relative;
display: block;
}
.native-dropdown-menu li,
.native-dropdown-large-parent li {
list-style: none;
cursor: pointer;
color: #000;
}
.native-dropdown-menu li:hover,
.native-dropdown-large-parent li:hover,
.native-dropdown-large-parent li:hover button {
color: #fff;
background-color: #111;
}
.conversation-window-dropdown li {
padding: 0 10px 0 5px;
font-size: .9em;
}
/* Expired call url page */
.expired-url-info {

Binary file not shown.

After

Width:  |  Height:  |  Size: 424 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 536 B

View File

@ -14,17 +14,21 @@ loop.shared.models = (function() {
*/
var ConversationModel = Backbone.Model.extend({
defaults: {
connected: false, // Session connected flag
ongoing: false, // Ongoing call flag
callerId: undefined, // Loop caller id
loopToken: undefined, // Loop conversation token
loopVersion: undefined, // Loop version for /calls/ information. This
// is the version received from the push
// notification and is used by the server to
// determine the pending calls
sessionId: undefined, // OT session id
sessionToken: undefined, // OT session token
apiKey: undefined // OT api key
connected: false, // Session connected flag
ongoing: false, // Ongoing call flag
callerId: undefined, // Loop caller id
loopToken: undefined, // Loop conversation token
loopVersion: undefined, // Loop version for /calls/ information. This
// is the version received from the push
// notification and is used by the server to
// determine the pending calls
sessionId: undefined, // OT session id
sessionToken: undefined, // OT session token
apiKey: undefined, // OT api key
callType: undefined, // The type of incoming call selected by
// other peer ("audio" or "audio-video")
selectedCallType: undefined // The selected type for the call that was
// initiated ("audio" or "audio-video")
},
/**
@ -114,7 +118,7 @@ loop.shared.models = (function() {
this._pendingCallTimer = setTimeout(
handleOutgoingCallTimeout.bind(this), this.pendingCallTimeout);
this.setSessionData(sessionData);
this.setOutgoingSessionData(sessionData);
this.trigger("call:outgoing");
},
@ -129,10 +133,11 @@ loop.shared.models = (function() {
/**
* Sets session information.
* Session data received by creating an outgoing call.
*
* @param {Object} sessionData Conversation session information.
*/
setSessionData: function(sessionData) {
setOutgoingSessionData: function(sessionData) {
// Explicit property assignment to prevent later "surprises"
this.set({
sessionId: sessionData.sessionId,
@ -141,6 +146,21 @@ loop.shared.models = (function() {
});
},
/**
* Sets session information about the incoming call.
*
* @param {Object} sessionData Conversation session information.
*/
setIncomingSessionData: function(sessionData) {
// Explicit property assignment to prevent later "surprises"
this.set({
sessionId: sessionData.sessionId,
sessionToken: sessionData.sessionToken,
apiKey: sessionData.apiKey,
callType: sessionData.callType || "audio-video"
});
},
/**
* Starts a SDK session and subscribe to call events.
*/
@ -169,6 +189,22 @@ loop.shared.models = (function() {
.once("session:ended", this.stopListening, this);
},
/**
* Helper function to determine if video stream is available for the
* incoming or outgoing call
*
* @param {string} callType Incoming or outgoing call
*/
hasVideoStream: function(callType) {
if (callType === "incoming") {
return this.get("callType") === "audio-video";
}
if (callType === "outgoing") {
return this.get("selectedCallType") === "audio-video";
}
return undefined;
},
/**
* Handle a loop-server error, which has an optional `errno` property which
* is server error identifier.

View File

@ -217,13 +217,24 @@ loop.shared.views = (function(_, OT, l10n) {
}
},
getInitialProps: function() {
return {
video: {enabled: true},
audio: {enabled: true}
};
},
getInitialState: function() {
return {
video: {enabled: false},
audio: {enabled: false}
video: this.props.video,
audio: this.props.audio
};
},
componentWillMount: function() {
this.publisherConfig.publishVideo = this.props.video.enabled;
},
componentDidMount: function() {
this.listenTo(this.props.model, "session:connected",
this.startPublishing);

View File

@ -217,13 +217,24 @@ loop.shared.views = (function(_, OT, l10n) {
}
},
getInitialProps: function() {
return {
video: {enabled: true},
audio: {enabled: true}
};
},
getInitialState: function() {
return {
video: {enabled: false},
audio: {enabled: false}
video: this.props.video,
audio: this.props.audio
};
},
componentWillMount: function() {
this.publisherConfig.publishVideo = this.props.video.enabled;
},
componentDidMount: function() {
this.listenTo(this.props.model, "session:connected",
this.startPublishing);

View File

@ -124,3 +124,39 @@ header {
font-weight: normal;
}
.start-audio-only-call,
.start-audio-video-call {
background-color: none;
background-image: url("../shared/img/audio-default-16x16@1.5x.png");
background-position: 80% center;
background-size: 10px;
background-repeat: no-repeat;
cursor: pointer;
}
.start-audio-only-call {
border: none;
width: 100%;
}
.start-audio-only-call:hover {
background-image: url("../shared/img/audio-inverse-14x14.png");
}
.start-audio-video-call {
background-size: 20px;
background-image: url("../shared/img/video-inverse-14x14.png");
}
@media (min-resolution: 2dppx) {
.start-audio-only-call {
background-image: url("../shared/img/audio-default-16x16@2x.png");
}
.start-audio-only-call:hover {
background-image: url("../shared/img/audio-inverse-14x14@2x.png");
}
.start-audio-video-call {
background-image: url("../shared/img/video-inverse-14x14@2x.png");
}
}

View File

@ -144,7 +144,8 @@ loop.webapp = (function($, _, OT, webL10n) {
getInitialState: function() {
return {
urlCreationDateString: '',
disableCallButton: false
disableCallButton: false,
showCallOptionsMenu: false
};
},
@ -157,6 +158,8 @@ loop.webapp = (function($, _, OT, webL10n) {
},
componentDidMount: function() {
// Listen for events & hide dropdown menu if user clicks away
window.addEventListener("click", this.clickHandler);
this.props.model.listenTo(this.props.model, "session:error",
this._onSessionError);
this.props.client.requestCallUrlInfo(this.props.model.get("loopToken"),
@ -173,10 +176,18 @@ loop.webapp = (function($, _, OT, webL10n) {
/**
* Initiates the call.
* Takes in a call type parameter "audio" or "audio-video" and returns
* a function that initiates the call. React click handler requires a function
* to be called when that event happenes.
*
* @param {string} User call type choice "audio" or "audio-video"
*/
_initiateOutgoingCall: function() {
this.setState({disableCallButton: true});
this.props.model.setupOutgoingCall();
_initiateOutgoingCall: function(callType) {
return function() {
this.props.model.set("selectedCallType", callType);
this.setState({disableCallButton: true});
this.props.model.setupOutgoingCall();
}.bind(this);
},
_setConversationTimestamp: function(err, callUrlInfo) {
@ -191,6 +202,22 @@ loop.webapp = (function($, _, OT, webL10n) {
}
},
componentWillUnmount: function() {
window.removeEventListener("click", this.clickHandler);
},
clickHandler: function(e) {
if (!e.target.classList.contains('btn-chevron') &&
this.state.showCallOptionsMenu) {
this._toggleCallOptionsMenu();
}
},
_toggleCallOptionsMenu: function() {
var state = this.state.showCallOptionsMenu;
this.setState({showCallOptionsMenu: !state});
},
render: function() {
var tos_link_name = __("terms_of_use_link_text");
var privacy_notice_name = __("privacy_notice_link_text");
@ -202,8 +229,14 @@ loop.webapp = (function($, _, OT, webL10n) {
"https://www.mozilla.org/privacy/'>" + privacy_notice_name + "</a>"
});
var callButtonClasses = "btn btn-success btn-large " +
var btnClassStartCall = "btn btn-large btn-success " +
"start-audio-video-call " +
loop.shared.utils.getTargetPlatform();
var dropdownMenuClasses = React.addons.classSet({
"native-dropdown-large-parent": true,
"standalone-dropdown-menu": true,
"visually-hidden": !this.state.showCallOptionsMenu
});
return (
/* jshint ignore:start */
@ -221,11 +254,37 @@ loop.webapp = (function($, _, OT, webL10n) {
React.DOM.div({className: "button-group"},
React.DOM.div({className: "flex-padding-1"}),
React.DOM.button({ref: "submitButton", onClick: this._initiateOutgoingCall,
className: callButtonClasses,
disabled: this.state.disableCallButton},
__("initiate_call_button"),
React.DOM.i({className: "icon icon-video"})
React.DOM.div({className: "button-chevron-menu-group"},
React.DOM.div({className: "button-group-chevron"},
React.DOM.div({className: "button-group"},
React.DOM.button({className: btnClassStartCall,
onClick: this._initiateOutgoingCall("audio-video"),
disabled: this.state.disableCallButton,
title: __("initiate_audio_video_call_tooltip")},
__("initiate_audio_video_call_button")
),
React.DOM.div({className: "btn-chevron",
onClick: this._toggleCallOptionsMenu}
)
),
React.DOM.ul({className: dropdownMenuClasses},
React.DOM.li(null,
/*
Button required for disabled state.
*/
React.DOM.button({className: "start-audio-only-call",
onClick: this._initiateOutgoingCall("audio"),
disabled: this.state.disableCallButton},
__("initiate_audio_call_button")
)
)
)
)
),
React.DOM.div({className: "flex-padding-1"})
),
@ -280,12 +339,12 @@ loop.webapp = (function($, _, OT, webL10n) {
this._notifier.errorL10n("missing_conversation_info");
this.navigate("home", {trigger: true});
} else {
var callType = this._conversation.get("selectedCallType");
this._conversation.once("call:outgoing", this.startCall, this);
// XXX For now, we assume both audio and video as there is no
// other option to select (bug 1048333)
this._client.requestCallInfo(this._conversation.get("loopToken"), "audio-video",
function(err, sessionData) {
this._client.requestCallInfo(this._conversation.get("loopToken"),
callType, function(err, sessionData) {
if (err) {
switch (err.errno) {
// loop-server sends 404 + INVALID_TOKEN (errno 105) whenever a token is
@ -389,7 +448,8 @@ loop.webapp = (function($, _, OT, webL10n) {
}
this.loadReactComponent(sharedViews.ConversationView({
sdk: OT,
model: this._conversation
model: this._conversation,
video: {enabled: this._conversation.hasVideoStream("outgoing")}
}));
}
});

View File

@ -144,7 +144,8 @@ loop.webapp = (function($, _, OT, webL10n) {
getInitialState: function() {
return {
urlCreationDateString: '',
disableCallButton: false
disableCallButton: false,
showCallOptionsMenu: false
};
},
@ -157,6 +158,8 @@ loop.webapp = (function($, _, OT, webL10n) {
},
componentDidMount: function() {
// Listen for events & hide dropdown menu if user clicks away
window.addEventListener("click", this.clickHandler);
this.props.model.listenTo(this.props.model, "session:error",
this._onSessionError);
this.props.client.requestCallUrlInfo(this.props.model.get("loopToken"),
@ -173,10 +176,18 @@ loop.webapp = (function($, _, OT, webL10n) {
/**
* Initiates the call.
* Takes in a call type parameter "audio" or "audio-video" and returns
* a function that initiates the call. React click handler requires a function
* to be called when that event happenes.
*
* @param {string} User call type choice "audio" or "audio-video"
*/
_initiateOutgoingCall: function() {
this.setState({disableCallButton: true});
this.props.model.setupOutgoingCall();
_initiateOutgoingCall: function(callType) {
return function() {
this.props.model.set("selectedCallType", callType);
this.setState({disableCallButton: true});
this.props.model.setupOutgoingCall();
}.bind(this);
},
_setConversationTimestamp: function(err, callUrlInfo) {
@ -191,6 +202,22 @@ loop.webapp = (function($, _, OT, webL10n) {
}
},
componentWillUnmount: function() {
window.removeEventListener("click", this.clickHandler);
},
clickHandler: function(e) {
if (!e.target.classList.contains('btn-chevron') &&
this.state.showCallOptionsMenu) {
this._toggleCallOptionsMenu();
}
},
_toggleCallOptionsMenu: function() {
var state = this.state.showCallOptionsMenu;
this.setState({showCallOptionsMenu: !state});
},
render: function() {
var tos_link_name = __("terms_of_use_link_text");
var privacy_notice_name = __("privacy_notice_link_text");
@ -202,8 +229,14 @@ loop.webapp = (function($, _, OT, webL10n) {
"https://www.mozilla.org/privacy/'>" + privacy_notice_name + "</a>"
});
var callButtonClasses = "btn btn-success btn-large " +
var btnClassStartCall = "btn btn-large btn-success " +
"start-audio-video-call " +
loop.shared.utils.getTargetPlatform();
var dropdownMenuClasses = React.addons.classSet({
"native-dropdown-large-parent": true,
"standalone-dropdown-menu": true,
"visually-hidden": !this.state.showCallOptionsMenu
});
return (
/* jshint ignore:start */
@ -221,12 +254,38 @@ loop.webapp = (function($, _, OT, webL10n) {
<div className="button-group">
<div className="flex-padding-1"></div>
<button ref="submitButton" onClick={this._initiateOutgoingCall}
className={callButtonClasses}
disabled={this.state.disableCallButton}>
{__("initiate_call_button")}
<i className="icon icon-video"></i>
</button>
<div className="button-chevron-menu-group">
<div className="button-group-chevron">
<div className="button-group">
<button className={btnClassStartCall}
onClick={this._initiateOutgoingCall("audio-video")}
disabled={this.state.disableCallButton}
title={__("initiate_audio_video_call_tooltip")} >
{__("initiate_audio_video_call_button")}
</button>
<div className="btn-chevron"
onClick={this._toggleCallOptionsMenu}>
</div>
</div>
<ul className={dropdownMenuClasses}>
<li>
{/*
Button required for disabled state.
*/}
<button className="start-audio-only-call"
onClick={this._initiateOutgoingCall("audio")}
disabled={this.state.disableCallButton} >
{__("initiate_audio_call_button")}
</button>
</li>
</ul>
</div>
</div>
<div className="flex-padding-1"></div>
</div>
@ -280,12 +339,12 @@ loop.webapp = (function($, _, OT, webL10n) {
this._notifier.errorL10n("missing_conversation_info");
this.navigate("home", {trigger: true});
} else {
var callType = this._conversation.get("selectedCallType");
this._conversation.once("call:outgoing", this.startCall, this);
// XXX For now, we assume both audio and video as there is no
// other option to select (bug 1048333)
this._client.requestCallInfo(this._conversation.get("loopToken"), "audio-video",
function(err, sessionData) {
this._client.requestCallInfo(this._conversation.get("loopToken"),
callType, function(err, sessionData) {
if (err) {
switch (err.errno) {
// loop-server sends 404 + INVALID_TOKEN (errno 105) whenever a token is
@ -389,7 +448,8 @@ loop.webapp = (function($, _, OT, webL10n) {
}
this.loadReactComponent(sharedViews.ConversationView({
sdk: OT,
model: this._conversation
model: this._conversation,
video: {enabled: this._conversation.hasVideoStream("outgoing")}
}));
}
});

View File

@ -25,7 +25,9 @@ promote_firefox_hello_heading=Download Firefox to make free audio and video call
get_firefox_button=Get Firefox
call_url_unavailable_notification=This URL is unavailable.
initiate_call_button_label=Click Call to start a video chat
initiate_call_button=Call
initiate_audio_video_call_button=Call
initiate_audio_video_call_tooltip=Start a video call
initiate_audio_call_button=Voice call
## LOCALIZATION NOTE (legal_text_and_links): In this item, don't translate the
## part between {{..}}
legal_text_and_links=By using this product you agree to the {{terms_of_use_url}} and {{privacy_notice_url}}

View File

@ -119,7 +119,8 @@ describe("loop.conversation", function() {
pendingCallTimeout: 1000,
});
sandbox.stub(client, "requestCallsInfo");
sandbox.stub(conversation, "setSessionData");
sandbox.stub(conversation, "setIncomingSessionData");
sandbox.stub(conversation, "setOutgoingSessionData");
});
describe("Routes", function() {
@ -192,7 +193,8 @@ describe("loop.conversation", function() {
fakeSessionData = {
sessionId: "sessionId",
sessionToken: "sessionToken",
apiKey: "apiKey"
apiKey: "apiKey",
callType: "callType"
};
client.requestCallsInfo.callsArgWith(1, null, [fakeSessionData]);
@ -201,17 +203,31 @@ describe("loop.conversation", function() {
it("should store the session data", function() {
router.incoming(42);
sinon.assert.calledOnce(conversation.setSessionData);
sinon.assert.calledWithExactly(conversation.setSessionData,
sinon.assert.calledOnce(conversation.setIncomingSessionData);
sinon.assert.calledWithExactly(conversation.setIncomingSessionData,
fakeSessionData);
});
it("should call the view with video.enabled=false", function() {
sandbox.stub(conversation, "get").withArgs("callType").returns("audio");
router.incoming("fakeVersion");
sinon.assert.calledOnce(conversation.get);
sinon.assert.calledOnce(loop.conversation.IncomingCallView);
sinon.assert.calledWithExactly(loop.conversation.IncomingCallView,
{model: conversation,
video: {enabled: false}});
});
it("should display the incoming call view", function() {
sandbox.stub(conversation, "get").withArgs("callType")
.returns("audio-video");
router.incoming("fakeVersion");
sinon.assert.calledOnce(loop.conversation.IncomingCallView);
sinon.assert.calledWithExactly(loop.conversation.IncomingCallView,
{model: conversation});
{model: conversation,
video: {enabled: true}});
sinon.assert.calledOnce(router.loadReactComponent);
sinon.assert.calledWith(router.loadReactComponent,
sinon.match(function(value) {
@ -439,9 +455,32 @@ describe("loop.conversation", function() {
TestUtils.Simulate.click(buttonAccept);
sinon.assert.calledOnce(model.trigger);
/* Setting a model property triggers 2 events */
sinon.assert.calledThrice(model.trigger);
sinon.assert.calledWith(model.trigger, "accept");
});
sinon.assert.calledWith(model.trigger, "change:selectedCallType");
sinon.assert.calledWith(model.trigger, "change");
});
it("should set selectedCallType to audio-video", function() {
var buttonAccept = view.getDOMNode().querySelector(".call-audio-video");
sandbox.stub(model, "set");
TestUtils.Simulate.click(buttonAccept);
sinon.assert.calledOnce(model.set);
sinon.assert.calledWithExactly(model.set, "selectedCallType", "audio-video");
});
it("should set selectedCallType to audio", function() {
var buttonAccept = view.getDOMNode().querySelector(".call-audio-only");
sandbox.stub(model, "set");
TestUtils.Simulate.click(buttonAccept);
sinon.assert.calledOnce(model.set);
sinon.assert.calledWithExactly(model.set, "selectedCallType", "audio");
});
});
describe("click event on .btn-decline", function() {

View File

@ -3,6 +3,6 @@ support-files =
head.js
[browser_mozLoop_appVersionInfo.js]
[browser_mozLoop_charPref.js]
[browser_mozLoop_prefs.js]
[browser_mozLoop_doNotDisturb.js]
skip-if = buildapp == 'mulet'

View File

@ -24,3 +24,17 @@ add_task(function* test_mozLoop_charPref() {
Assert.equal(gMozLoopAPI.getLoopCharPref("test"), "foo",
"should get loop pref value correctly");
});
add_task(function* test_mozLoop_boolPref() {
registerCleanupFunction(function () {
Services.prefs.clearUserPref("loop.testBool");
});
Assert.ok(gMozLoopAPI, "mozLoop should exist");
Services.prefs.setBoolPref("loop.testBool", true);
// Test getLoopCharPref
Assert.equal(gMozLoopAPI.getLoopBoolPref("testBool"), true,
"should get loop pref value correctly");
});

View File

@ -24,7 +24,8 @@ describe("loop.shared.models", function() {
fakeSessionData = {
sessionId: "sessionId",
sessionToken: "sessionToken",
apiKey: "apiKey"
apiKey: "apiKey",
callType: "callType"
};
fakeSession = _.extend({
connect: function () {},
@ -101,13 +102,14 @@ describe("loop.shared.models", function() {
describe("#outgoing", function() {
beforeEach(function() {
sandbox.stub(conversation, "endSession");
sandbox.stub(conversation, "setSessionData");
sandbox.stub(conversation, "setOutgoingSessionData");
sandbox.stub(conversation, "setIncomingSessionData");
});
it("should save the sessionData", function() {
it("should save the outgoing sessionData", function() {
conversation.outgoing(fakeSessionData);
sinon.assert.calledOnce(conversation.setSessionData);
sinon.assert.calledOnce(conversation.setOutgoingSessionData);
});
it("should trigger a `call:outgoing` event", function(done) {
@ -139,13 +141,24 @@ describe("loop.shared.models", function() {
});
describe("#setSessionData", function() {
it("should update conversation session information", function() {
conversation.setSessionData(fakeSessionData);
it("should update outgoing conversation session information",
function() {
conversation.setOutgoingSessionData(fakeSessionData);
expect(conversation.get("sessionId")).eql("sessionId");
expect(conversation.get("sessionToken")).eql("sessionToken");
expect(conversation.get("apiKey")).eql("apiKey");
});
expect(conversation.get("sessionId")).eql("sessionId");
expect(conversation.get("sessionToken")).eql("sessionToken");
expect(conversation.get("apiKey")).eql("apiKey");
});
it("should update incoming conversation session information",
function() {
conversation.setIncomingSessionData(fakeSessionData);
expect(conversation.get("sessionId")).eql("sessionId");
expect(conversation.get("sessionToken")).eql("sessionToken");
expect(conversation.get("apiKey")).eql("apiKey");
expect(conversation.get("callType")).eql("callType");
});
});
describe("#startSession", function() {
@ -359,6 +372,30 @@ describe("loop.shared.models", function() {
sinon.assert.calledOnce(model.stopListening);
});
});
describe("#hasVideoStream", function() {
var model;
beforeEach(function() {
model = new sharedModels.ConversationModel(fakeSessionData, {
sdk: fakeSDK,
pendingCallTimeout: 1000
});
model.startSession();
});
it("should return true for incoming callType", function() {
model.set("callType", "audio-video");
expect(model.hasVideoStream("incoming")).to.eql(true);
});
it("should return true for outgoing callType", function() {
model.set("selectedCallType", "audio-video");
expect(model.hasVideoStream("outgoing")).to.eql(true);
});
});
});
});
});

View File

@ -212,17 +212,37 @@ describe("loop.shared.views", function() {
it("should start a session", function() {
sandbox.stub(model, "startSession");
mountTestComponent({sdk: fakeSDK, model: model});
mountTestComponent({
sdk: fakeSDK,
model: model,
video: {enabled: true}
});
sinon.assert.calledOnce(model.startSession);
});
it("should set the correct stream publish options", function() {
var component = mountTestComponent({
sdk: fakeSDK,
model: model,
video: {enabled: false}
});
expect(component.publisherConfig.publishVideo).to.eql(false);
});
});
describe("constructed", function() {
var comp;
beforeEach(function() {
comp = mountTestComponent({sdk: fakeSDK, model: model});
comp = mountTestComponent({
sdk: fakeSDK,
model: model,
video: {enabled: false}
});
});
describe("#hangup", function() {
@ -293,7 +313,11 @@ describe("loop.shared.views", function() {
var comp;
beforeEach(function() {
comp = mountTestComponent({sdk: fakeSDK, model: model});
comp = mountTestComponent({
sdk: fakeSDK,
model: model,
video: {enabled: false}
});
comp.startPublishing();
});

View File

@ -302,6 +302,7 @@ describe("loop.webapp", function() {
describe("Has loop token", function() {
beforeEach(function() {
conversation.set("loopToken", "fakeToken");
conversation.set("selectedCallType", "audio-video");
sandbox.stub(conversation, "outgoing");
});
@ -400,21 +401,59 @@ describe("loop.webapp", function() {
});
it("should start the conversation establishment process", function() {
var button = view.getDOMNode().querySelector("button");
var button = view.getDOMNode().querySelector(".start-audio-video-call");
React.addons.TestUtils.Simulate.click(button);
sinon.assert.calledOnce(setupOutgoingCall);
sinon.assert.calledWithExactly(setupOutgoingCall);
});
it("should disable current form once session is initiated", function() {
conversation.set("loopToken", "fake");
var button = view.getDOMNode().querySelector("button");
it("should start the conversation establishment process", function() {
var button = view.getDOMNode().querySelector(".start-audio-only-call");
React.addons.TestUtils.Simulate.click(button);
expect(button.disabled).to.eql(true);
sinon.assert.calledOnce(setupOutgoingCall);
sinon.assert.calledWithExactly(setupOutgoingCall);
});
it("should disable audio-video button once session is initiated",
function() {
conversation.set("loopToken", "fake");
var button = view.getDOMNode().querySelector(".start-audio-video-call");
React.addons.TestUtils.Simulate.click(button);
expect(button.disabled).to.eql(true);
});
it("should disable audio-only button once session is initiated",
function() {
conversation.set("loopToken", "fake");
var button = view.getDOMNode().querySelector(".start-audio-only-call");
React.addons.TestUtils.Simulate.click(button);
expect(button.disabled).to.eql(true);
});
it("should set selectedCallType to audio", function() {
conversation.set("loopToken", "fake");
var button = view.getDOMNode().querySelector(".start-audio-only-call");
React.addons.TestUtils.Simulate.click(button);
expect(conversation.get("selectedCallType")).to.eql("audio");
});
it("should set selectedCallType to audio-video", function() {
conversation.set("loopToken", "fake");
var button = view.getDOMNode().querySelector(".start-audio-video-call");
React.addons.TestUtils.Simulate.click(button);
expect(conversation.get("selectedCallType")).to.eql("audio-video");
});
it("should set state.urlCreationDateString to a locale date string",
function() {
// wrap in a jquery object because text is broken up

View File

@ -1,47 +0,0 @@
/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
/*global XPCOMUtils, Services, Assert */
var fakePrefName = "color";
var fakePrefValue = "green";
function test_getLoopCharPref()
{
Services.prefs.setCharPref("loop." + fakePrefName, fakePrefValue);
var returnedPref = MozLoopService.getLoopCharPref(fakePrefName);
Assert.equal(returnedPref, fakePrefValue,
"Should return a char pref under the loop. branch");
Services.prefs.clearUserPref("loop." + fakePrefName);
}
function test_getLoopCharPref_not_found()
{
var returnedPref = MozLoopService.getLoopCharPref(fakePrefName);
Assert.equal(returnedPref, null,
"Should return null if a preference is not found");
}
function test_getLoopCharPref_non_coercible_type()
{
Services.prefs.setBoolPref("loop." + fakePrefName, false );
var returnedPref = MozLoopService.getLoopCharPref(fakePrefName);
Assert.equal(returnedPref, null,
"Should return null if the preference exists & is of a non-coercible type");
}
function run_test()
{
test_getLoopCharPref();
test_getLoopCharPref_not_found();
test_getLoopCharPref_non_coercible_type();
do_register_cleanup(function() {
Services.prefs.clearUserPref("loop." + fakePrefName);
});
}

View File

@ -0,0 +1,106 @@
/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
/*global XPCOMUtils, Services, Assert */
var fakeCharPrefName = "color";
var fakeBoolPrefName = "boolean";
var fakePrefValue = "green";
function test_getLoopCharPref()
{
Services.prefs.setCharPref("loop." + fakeCharPrefName, fakePrefValue);
var returnedPref = MozLoopService.getLoopCharPref(fakeCharPrefName);
Assert.equal(returnedPref, fakePrefValue,
"Should return a char pref under the loop. branch");
Services.prefs.clearUserPref("loop." + fakeCharPrefName);
}
function test_getLoopCharPref_not_found()
{
var returnedPref = MozLoopService.getLoopCharPref(fakeCharPrefName);
Assert.equal(returnedPref, null,
"Should return null if a preference is not found");
}
function test_getLoopCharPref_non_coercible_type()
{
Services.prefs.setBoolPref("loop." + fakeCharPrefName, false);
var returnedPref = MozLoopService.getLoopCharPref(fakeCharPrefName);
Assert.equal(returnedPref, null,
"Should return null if the preference exists & is of a non-coercible type");
}
function test_setLoopCharPref()
{
Services.prefs.setCharPref("loop." + fakeCharPrefName, "red");
MozLoopService.setLoopCharPref(fakeCharPrefName, fakePrefValue);
var returnedPref = Services.prefs.getCharPref("loop." + fakeCharPrefName);
Assert.equal(returnedPref, fakePrefValue,
"Should set a char pref under the loop. branch");
Services.prefs.clearUserPref("loop." + fakeCharPrefName);
}
function test_setLoopCharPref_new()
{
Services.prefs.clearUserPref("loop." + fakeCharPrefName);
MozLoopService.setLoopCharPref(fakeCharPrefName, fakePrefValue);
var returnedPref = Services.prefs.getCharPref("loop." + fakeCharPrefName);
Assert.equal(returnedPref, fakePrefValue,
"Should set a new char pref under the loop. branch");
Services.prefs.clearUserPref("loop." + fakeCharPrefName);
}
function test_setLoopCharPref_non_coercible_type()
{
MozLoopService.setLoopCharPref(fakeCharPrefName, true);
ok(true, "Setting non-coercible type should not fail");
}
function test_getLoopBoolPref()
{
Services.prefs.setBoolPref("loop." + fakeBoolPrefName, true);
var returnedPref = MozLoopService.getLoopBoolPref(fakeBoolPrefName);
Assert.equal(returnedPref, true,
"Should return a bool pref under the loop. branch");
Services.prefs.clearUserPref("loop." + fakeBoolPrefName);
}
function test_getLoopBoolPref_not_found()
{
var returnedPref = MozLoopService.getLoopBoolPref(fakeBoolPrefName);
Assert.equal(returnedPref, null,
"Should return null if a preference is not found");
}
function run_test()
{
test_getLoopCharPref();
test_getLoopCharPref_not_found();
test_getLoopCharPref_non_coercible_type();
test_setLoopCharPref();
test_setLoopCharPref_new();
test_setLoopCharPref_non_coercible_type();
test_getLoopBoolPref();
test_getLoopBoolPref_not_found();
do_register_cleanup(function() {
Services.prefs.clearUserPref("loop." + fakeCharPrefName);
Services.prefs.clearUserPref("loop." + fakeBoolPrefName);
});
}

View File

@ -1,49 +0,0 @@
/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
/*global XPCOMUtils, Services, Assert */
var fakePrefName = "color";
var fakePrefValue = "green";
function test_setLoopCharPref()
{
Services.prefs.setCharPref("loop." + fakePrefName, "red");
MozLoopService.setLoopCharPref(fakePrefName, fakePrefValue);
var returnedPref = Services.prefs.getCharPref("loop." + fakePrefName);
Assert.equal(returnedPref, fakePrefValue,
"Should set a char pref under the loop. branch");
Services.prefs.clearUserPref("loop." + fakePrefName);
}
function test_setLoopCharPref_new()
{
Services.prefs.clearUserPref("loop." + fakePrefName);
MozLoopService.setLoopCharPref(fakePrefName, fakePrefValue);
var returnedPref = Services.prefs.getCharPref("loop." + fakePrefName);
Assert.equal(returnedPref, fakePrefValue,
"Should set a new char pref under the loop. branch");
Services.prefs.clearUserPref("loop." + fakePrefName);
}
function test_setLoopCharPref_non_coercible_type()
{
MozLoopService.setLoopCharPref(fakePrefName, true);
ok(true, "Setting non-coercible type should not fail");
}
function run_test()
{
test_setLoopCharPref();
test_setLoopCharPref_new();
test_setLoopCharPref_non_coercible_type();
do_register_cleanup(function() {
Services.prefs.clearUserPref("loop." + fakePrefName);
});
}

View File

@ -6,8 +6,7 @@ firefox-appdir = browser
[test_looppush_initialize.js]
[test_loopservice_dnd.js]
[test_loopservice_expiry.js]
[test_loopservice_get_loop_char_pref.js]
[test_loopservice_set_loop_char_pref.js]
[test_loopservice_loop_prefs.js]
[test_loopservice_initialize.js]
[test_loopservice_locales.js]
[test_loopservice_registration.js]

View File

@ -8,7 +8,7 @@
<?xml-stylesheet href="chrome://mozapps/content/preferences/preferences.css"?>
<?xml-stylesheet href="chrome://browser/skin/preferences/preferences.css"?>
<?xml-stylesheet href="chrome://browser/skin/in-content/common.css"?>
<?xml-stylesheet href="chrome://global/skin/in-content/common.css"?>
<?xml-stylesheet
href="chrome://browser/skin/preferences/in-content/preferences.css"?>
<?xml-stylesheet

View File

@ -11,7 +11,7 @@ let gSubDialog = {
_box: null,
_injectedStyleSheets: ["chrome://mozapps/content/preferences/preferences.css",
"chrome://browser/skin/preferences/preferences.css",
"chrome://browser/skin/in-content/common.css",
"chrome://global/skin/in-content/common.css",
"chrome://browser/skin/preferences/in-content/preferences.css"],
init: function() {

View File

@ -5,7 +5,6 @@
let gItemsToTest = {
"menu_devToolbar": "devtools.toolbar.enabled",
"menu_devAppMgr": "devtools.appmanager.enabled",
"menu_browserToolbox": ["devtools.chrome.enabled", "devtools.debugger.remote-enabled", "devtools.debugger.chrome-enabled"],
"javascriptConsole": "devtools.errorconsole.enabled",
"menu_devtools_connect": "devtools.debugger.remote-enabled",

View File

@ -76,6 +76,7 @@ function Toolbox(target, selectedTool, hostType, hostOptions) {
this._highlighterHidden = this._highlighterHidden.bind(this);
this._prefChanged = this._prefChanged.bind(this);
this._saveSplitConsoleHeight = this._saveSplitConsoleHeight.bind(this);
this._onFocus = this._onFocus.bind(this);
this._target.on("close", this.destroy);
@ -253,7 +254,7 @@ Toolbox.prototype = {
this._applyCacheSettings();
this._addKeysToWindow();
this._addReloadKeys();
this._addToolSwitchingKeys();
this._addHostListeners();
this._addZoomKeys();
this._loadInitialZoom();
@ -263,16 +264,19 @@ Toolbox.prototype = {
this.webconsolePanel.addEventListener("resize",
this._saveSplitConsoleHeight);
let splitConsolePromise = promise.resolve();
if (Services.prefs.getBoolPref(SPLITCONSOLE_ENABLED_PREF)) {
splitConsolePromise = this.openSplitConsole();
}
let buttonsPromise = this._buildButtons();
this._telemetry.toolOpened("toolbox");
this.selectTool(this._defaultToolId).then(panel => {
// Wait until the original tool is selected so that the split
// console input will receive focus.
let splitConsolePromise = promise.resolve();
if (Services.prefs.getBoolPref(SPLITCONSOLE_ENABLED_PREF)) {
splitConsolePromise = this.openSplitConsole();
}
promise.all([
splitConsolePromise,
buttonsPromise
@ -345,7 +349,7 @@ Toolbox.prototype = {
});
},
_addToolSwitchingKeys: function() {
_addHostListeners: function() {
let nextKey = this.doc.getElementById("toolbox-next-tool-key");
nextKey.addEventListener("command", this.selectNextTool.bind(this), true);
let prevKey = this.doc.getElementById("toolbox-previous-tool-key");
@ -354,6 +358,8 @@ Toolbox.prototype = {
// Split console uses keypress instead of command so the event can be
// cancelled with stopPropagation on the keypress, and not preventDefault.
this.doc.addEventListener("keypress", this._splitConsoleOnKeypress, false);
this.doc.addEventListener("focus", this._onFocus, true);
},
_saveSplitConsoleHeight: function() {
@ -976,12 +982,29 @@ Toolbox.prototype = {
* Focus split console's input line
*/
focusConsoleInput: function() {
let hud = this.getPanel("webconsole").hud;
if (hud && hud.jsterm) {
hud.jsterm.inputNode.focus();
let consolePanel = this.getPanel("webconsole");
if (consolePanel) {
consolePanel.focusInput();
}
},
/**
* If the console is split and we are focusing an element outside
* of the console, then store the newly focused element, so that
* it can be restored once the split console closes.
*/
_onFocus: function({originalTarget}) {
// Ignore any non element nodes, or any elements contained
// within the webconsole frame.
let webconsoleURL = gDevTools.getToolDefinition("webconsole").url;
if (originalTarget.nodeType !== 1 ||
originalTarget.baseURI === webconsoleURL) {
return;
}
this._lastFocusedElement = originalTarget;
},
/**
* Opens the split console.
*
@ -993,6 +1016,7 @@ Toolbox.prototype = {
Services.prefs.setBoolPref(SPLITCONSOLE_ENABLED_PREF, true);
this._refreshConsoleDisplay();
this.emit("split-console");
return this.loadTool("webconsole").then(() => {
this.focusConsoleInput();
});
@ -1009,6 +1033,10 @@ Toolbox.prototype = {
Services.prefs.setBoolPref(SPLITCONSOLE_ENABLED_PREF, false);
this._refreshConsoleDisplay();
this.emit("split-console");
if (this._lastFocusedElement) {
this._lastFocusedElement.focus();
}
return promise.resolve();
},
@ -1312,6 +1340,7 @@ Toolbox.prototype = {
destroyHost: function() {
this.doc.removeEventListener("keypress",
this._splitConsoleOnKeypress, false);
this.doc.removeEventListener("focus", this._onFocus, true);
return this._host.destroy();
},
@ -1334,6 +1363,7 @@ Toolbox.prototype = {
gDevTools.off("pref-changed", this._prefChanged);
this._lastFocusedElement = null;
this._saveSplitConsoleHeight();
this.webconsolePanel.removeEventListener("resize",
this._saveSplitConsoleHeight);

View File

@ -1614,8 +1614,11 @@ var Scratchpad = {
var lines = initialText.split("\n");
this.editor.on("change", this._onChanged);
let okstring = this.strings.GetStringFromName("selfxss.okstring");
let msg = this.strings.formatStringFromName("selfxss.msg", [okstring], 1);
this._onPaste = WebConsoleUtils.pasteHandlerGen(this.editor.container.contentDocument.body,
document.querySelector('#scratchpad-notificationbox'));
document.querySelector('#scratchpad-notificationbox'),
msg, okstring);
editorElement.addEventListener("paste", this._onPaste);
editorElement.addEventListener("drop", this._onPaste);
this.editor.on("save", () => this.saveFile());

View File

@ -58,7 +58,7 @@ function spawnTest() {
"Empty message hides even when loading node while open.");
ok(isVisible($("#web-audio-editor-tabs")),
"Switches to tab view when loading node while open.");
is($("#web-audio-inspector-title").value, "OscillatorNode (" + nodeIds[1] + ")",
is($("#web-audio-inspector-title").value, "Oscillator",
"Inspector title updates when loading node while open.");
yield teardown(panel);

View File

@ -43,7 +43,7 @@ function spawnTest() {
ok(isVisible($("#web-audio-editor-tabs")),
"InspectorView tabs view visible when node selected.");
is($("#web-audio-inspector-title").value, "OscillatorNode (" + nodeIds[1] + ")",
is($("#web-audio-inspector-title").value, "Oscillator",
"Inspector should have the node title when a node is selected.");
is($("#web-audio-editor-tabs").selectedIndex, 0,
@ -52,7 +52,7 @@ function spawnTest() {
click(panelWin, findGraphNode(panelWin, nodeIds[2]));
yield once(panelWin, EVENTS.UI_INSPECTOR_NODE_SET);
is($("#web-audio-inspector-title").value, "GainNode (" + nodeIds[2] + ")",
is($("#web-audio-inspector-title").value, "Gain",
"Inspector title updates when a new node is selected.");
yield teardown(panel);

View File

@ -156,7 +156,11 @@ let WebAudioGraphView = {
AudioNodes.forEach(node => {
// Add node to graph
graph.addNode(node.id, { label: node.type, id: node.id });
graph.addNode(node.id, {
type: node.type, // Just for storing type data
label: node.type.replace(/Node$/, ""), // Displayed in SVG node
id: node.id // Identification
});
// Add all of the connections from this node to the edge array to be added
// after all the nodes are added, otherwise edges will attempted to be created
@ -177,7 +181,7 @@ let WebAudioGraphView = {
let svgNodes = oldDrawNodes(graph, root);
svgNodes.attr("class", (n) => {
let node = graph.node(n);
return "audionode type-" + node.label;
return "audionode type-" + node.type;
});
svgNodes.attr("data-id", (n) => {
let node = graph.node(n);
@ -452,7 +456,7 @@ let WebAudioInspectorView = {
*/
_setTitle: function () {
let node = this._currentNode;
let title = node.type + " (" + node.id + ")";
let title = node.type.replace(/Node$/, "");
$("#web-audio-inspector-title").setAttribute("value", title);
},

View File

@ -281,6 +281,7 @@ skip-if = buildapp == 'mulet'
[browser_webconsole_scratchpad_panel_link.js]
[browser_webconsole_split.js]
[browser_webconsole_split_escape_key.js]
[browser_webconsole_split_focus.js]
[browser_webconsole_split_persist.js]
[browser_webconsole_view_source.js]
[browser_webconsole_reflow.js]

View File

@ -0,0 +1,74 @@
/*
* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/publicdomain/zero/1.0/
*/
function test() {
info("Test that the split console state is persisted");
let toolbox;
let TEST_URI = "data:text/html;charset=utf-8,<p>Web Console test for splitting</p>";
Task.spawn(runner).then(finish);
function* runner() {
info("Opening a tab while there is no user setting on split console pref");
let {tab} = yield loadTab(TEST_URI);
let target = TargetFactory.forTab(tab);
toolbox = yield gDevTools.showToolbox(target, "inspector");
ok(!toolbox.splitConsole, "Split console is hidden by default");
info ("Focusing the search box before opening the split console");
let inspector = toolbox.getPanel("inspector");
inspector.searchBox.focus();
// Use the binding element since inspector.searchBox is a XUL element.
let activeElement = getActiveElement(inspector.panelDoc);
activeElement = activeElement.ownerDocument.getBindingParent(activeElement);
is (activeElement, inspector.searchBox, "Search box is focused");
yield toolbox.openSplitConsole();
ok(toolbox.splitConsole, "Split console is now visible");
// Use the binding element since jsterm.inputNode is a XUL textarea element.
let activeElement = getActiveElement(toolbox.doc);
activeElement = activeElement.ownerDocument.getBindingParent(activeElement);
let inputNode = toolbox.getPanel("webconsole").hud.jsterm.inputNode;
is(activeElement, inputNode, "Split console input is focused by default");
yield toolbox.closeSplitConsole();
info ("Making sure that the search box is refocused after closing the split console");
// Use the binding element since inspector.searchBox is a XUL element.
let activeElement = getActiveElement(inspector.panelDoc);
activeElement = activeElement.ownerDocument.getBindingParent(activeElement);
is (activeElement, inspector.searchBox, "Search box is focused");
yield toolbox.destroy();
}
function getActiveElement(doc) {
let activeElement = doc.activeElement;
while (activeElement && activeElement.contentDocument) {
activeElement = activeElement.contentDocument.activeElement;
}
return activeElement;
}
function toggleSplitConsoleWithEscape() {
let onceSplitConsole = toolbox.once("split-console");
let contentWindow = toolbox.frame.contentWindow;
contentWindow.focus();
EventUtils.sendKey("ESCAPE", contentWindow);
return onceSplitConsole;
}
function finish() {
toolbox = TEST_URI = null;
Services.prefs.clearUserPref("devtools.toolbox.splitconsoleEnabled");
Services.prefs.clearUserPref("devtools.toolbox.splitconsoleHeight");
finishTest();
}
}

View File

@ -35,6 +35,12 @@ function test() {
ok(toolbox.splitConsole, "Split console is visible by default.");
is(getHeightPrefValue(), 200, "Height is set based on panel height after closing");
// Use the binding element since jsterm.inputNode is a XUL textarea element.
let activeElement = getActiveElement(toolbox.doc);
activeElement = activeElement.ownerDocument.getBindingParent(activeElement);
let inputNode = toolbox.getPanel("webconsole").hud.jsterm.inputNode;
is(activeElement, inputNode, "Split console input is focused by default");
toolbox.webconsolePanel.height = 1;
ok (toolbox.webconsolePanel.clientHeight > 1,
"The actual height of the console is bound with a min height");
@ -63,6 +69,14 @@ function test() {
yield toolbox.destroy();
}
function getActiveElement(doc) {
let activeElement = doc.activeElement;
while (activeElement && activeElement.contentDocument) {
activeElement = activeElement.contentDocument.activeElement;
}
return activeElement;
}
function getVisiblePrefValue() {
return Services.prefs.getBoolPref("devtools.toolbox.splitconsoleEnabled");
}

View File

@ -3137,7 +3137,11 @@ JSTerm.prototype = {
inputContainer.style.display = "none";
}
else {
this._onPaste = WebConsoleUtils.pasteHandlerGen(this.inputNode, doc.getElementById("webconsole-notificationbox"));
let okstring = l10n.getStr("selfxss.okstring");
let msg = l10n.getFormatStr("selfxss.msg", [okstring]);
this._onPaste = WebConsoleUtils.pasteHandlerGen(this.inputNode,
doc.getElementById("webconsole-notificationbox"),
msg, okstring);
this.inputNode.addEventListener("keypress", this._keyPress, false);
this.inputNode.addEventListener("paste", this._onPaste);
this.inputNode.addEventListener("drop", this._onPaste);

View File

@ -64,6 +64,8 @@ const PREF_TELEMETRY_ENABLED = "enabled";
const URI_EXTENSION_STRINGS = "chrome://mozapps/locale/extensions/extensions.properties";
const STRING_TYPE_NAME = "type.%ID%.name";
const CACHE_WRITE_RETRY_DELAY_SEC = 60 * 3;
const TELEMETRY_LOG = {
// log(key, [kind, experimentId, details])
ACTIVATION_KEY: "EXPERIMENT_ACTIVATION",
@ -331,10 +333,19 @@ function AlreadyShutdownError(message="already shut down") {
this.message = message;
this.stack = error.stack;
}
AlreadyShutdownError.prototype = Object.create(Error.prototype);
AlreadyShutdownError.prototype.constructor = AlreadyShutdownError;
function CacheWriteError(message="Error writing cache file") {
Error.call(this, message);
let error = new Error();
this.name = "CacheWriteError";
this.message = message;
this.stack = error.stack;
}
CacheWriteError.prototype = Object.create(Error.prototype);
CacheWriteError.prototype.constructor = CacheWriteError;
/**
* Manages the experiments and provides an interface to control them.
*/
@ -690,6 +701,7 @@ Experiments.Experiments.prototype = {
throw new Error("Experiment not found");
}
e.branch = String(branchstr);
this._log.trace("setExperimentBranch(" + id + ", " + e.branch + ") _dirty=" + this._dirty);
this._dirty = true;
Services.obs.notifyObservers(null, EXPERIMENTS_CHANGED_TOPIC, null);
yield this._run();
@ -766,6 +778,8 @@ Experiments.Experiments.prototype = {
this._mainTask = Task.spawn(function*() {
try {
yield this._main();
} catch (e if e instanceof CacheWriteError) {
// In this case we want to reschedule
} catch (e) {
this._log.error("_main caught error: " + e);
return;
@ -801,7 +815,7 @@ Experiments.Experiments.prototype = {
// If somebody called .updateManifest() or disableExperiment()
// while we were running, go again right now.
}
while (this._refresh || this._terminateReason);
while (this._refresh || this._terminateReason || this._dirty);
},
_loadManifest: function*() {
@ -992,7 +1006,7 @@ Experiments.Experiments.prototype = {
// We failed to write the cache, it's still dirty.
this._dirty = true;
this._log.error("_saveToCache failed and caught error: " + e);
return;
throw new CacheWriteError();
}
this._log.debug("_saveToCache saved to " + path);
@ -1307,6 +1321,10 @@ Experiments.Experiments.prototype = {
let time = null;
let now = this._policy.now().getTime();
if (this._dirty) {
// If we failed to write the cache, we should try again periodically
time = now + 1000 * CACHE_WRITE_RETRY_DELAY_SEC;
}
for (let [id, experiment] of this._experiments) {
let scheduleTime = experiment.getScheduleTime();

View File

@ -103,3 +103,13 @@ scratchpad.panelLabel=Scratchpad Panel
# tooltip of the tab when the Scratchpad is displayed inside the developer tools
# window.
scratchpad.tooltip=Scratchpad
# LOCALIZATION NOTE (selfxss.msg): the text that is displayed when
# a new user of the developer tools pastes code into the console
# %1 is the text of selfxss.okstring
selfxss.msg=Scam Warning: Take care when pasting things you don't understand. This could allow attackers to steal your identity or take control of your computer. Please type '%S' in the scratchpad below to allow pasting.
# LOCALIZATION NOTE (selfxss.msg): the string to be typed
# in by a new user of the developer tools when they receive the sefxss.msg prompt.
# Please avoid using non-keyboard characters here
selfxss.okstring=allow pasting

View File

@ -17,6 +17,7 @@ unable_retrieve_url=Sorry, we were unable to retrieve a call url.
incoming_call_title=Incoming Call…
incoming_call=Incoming call
incoming_call_answer_button=Answer
incoming_call_answer_audio_only_tooltip=Answer with voice
incoming_call_decline_button=Decline
incoming_call_decline_and_block_button=Decline and Block
incoming_call_block_button=Block

View File

@ -251,4 +251,13 @@ let AboutHome = {
Cu.reportError("Error in AboutHome.sendAboutHomeData: " + x);
});
},
/**
* Focuses the search input in the page with the given message manager.
* @param messageManager
* The MessageManager object of the selected browser.
*/
focusInput: function (messageManager) {
messageManager.sendAsyncMessage("AboutHome:FocusInput");
}
};

View File

@ -94,6 +94,17 @@ this.ContentSearch = {
Services.obs.addObserver(this, "browser-search-engine-modified", false);
},
/**
* Focuses the search input in the page with the given message manager.
* @param messageManager
* The MessageManager object of the selected browser.
*/
focusInput: function (messageManager) {
messageManager.sendAsyncMessage(OUTBOUND_MESSAGE, {
type: "FocusInput"
});
},
receiveMessage: function (msg) {
// Add a temporary event handler that exists only while the message is in
// the event queue. If the message's source docshell changes browsers in

View File

@ -881,7 +881,6 @@ toolbarbutton[sdk-button="true"][cui-areatype="toolbar"] > .toolbarbutton-icon {
}
.urlbar-icon {
cursor: pointer;
padding: 0 3px;
}
@ -1524,7 +1523,6 @@ richlistitem[type~="action"][actiontype="switchtab"] > .ac-url-box > .ac-action-
#urlbar > toolbarbutton {
-moz-appearance: none;
padding: 0 2px;
cursor: pointer;
list-style-image: url("chrome://browser/skin/reload-stop-go.png");
}

View File

@ -35,17 +35,6 @@ browser.jar:
skin/classic/browser/identity-icons-https-mixed-active.png
skin/classic/browser/identity-icons-https-mixed-display.png
skin/classic/browser/Info.png
* skin/classic/browser/in-content/common.css (in-content/common.css)
skin/classic/browser/in-content/check.png (../shared/in-content/check.png)
skin/classic/browser/in-content/check@2x.png (../shared/in-content/check@2x.png)
skin/classic/browser/in-content/dropdown.png (../shared/in-content/dropdown.png)
skin/classic/browser/in-content/dropdown@2x.png (../shared/in-content/dropdown@2x.png)
skin/classic/browser/in-content/dropdown-disabled.png (../shared/in-content/dropdown-disabled.png)
skin/classic/browser/in-content/dropdown-disabled@2x.png (../shared/in-content/dropdown-disabled@2x.png)
skin/classic/browser/in-content/help-glyph.png (../shared/in-content/help-glyph.png)
skin/classic/browser/in-content/help-glyph@2x.png (../shared/in-content/help-glyph@2x.png)
skin/classic/browser/in-content/sorter.png (../shared/in-content/sorter.png)
skin/classic/browser/in-content/sorter@2x.png (../shared/in-content/sorter@2x.png)
skin/classic/browser/menuPanel.png
skin/classic/browser/menuPanel-customize.png
skin/classic/browser/menuPanel-exit.png

View File

@ -57,7 +57,6 @@
.search-go-button {
padding: 1px;
list-style-image: url(moz-icon://stock/gtk-find?size=menu);
cursor: pointer;
}
menuitem[cmd="cmd_clearhistory"] {

View File

@ -43,17 +43,6 @@ browser.jar:
skin/classic/browser/identity-icons-https-mixed-display.png
skin/classic/browser/identity-icons-https-mixed-display@2x.png
skin/classic/browser/Info.png
* skin/classic/browser/in-content/common.css (in-content/common.css)
skin/classic/browser/in-content/check.png (../shared/in-content/check.png)
skin/classic/browser/in-content/check@2x.png (../shared/in-content/check@2x.png)
skin/classic/browser/in-content/dropdown.png (../shared/in-content/dropdown.png)
skin/classic/browser/in-content/dropdown@2x.png (../shared/in-content/dropdown@2x.png)
skin/classic/browser/in-content/dropdown-disabled.png (../shared/in-content/dropdown-disabled.png)
skin/classic/browser/in-content/dropdown-disabled@2x.png (../shared/in-content/dropdown-disabled@2x.png)
skin/classic/browser/in-content/help-glyph.png (../shared/in-content/help-glyph.png)
skin/classic/browser/in-content/help-glyph@2x.png (../shared/in-content/help-glyph@2x.png)
skin/classic/browser/in-content/sorter.png (../shared/in-content/sorter.png)
skin/classic/browser/in-content/sorter@2x.png (../shared/in-content/sorter@2x.png)
skin/classic/browser/keyhole-circle.png
skin/classic/browser/keyhole-circle@2x.png
skin/classic/browser/KUI-background.png

View File

@ -897,14 +897,14 @@ toolbarbutton[panel-multiview-anchor="true"] > .toolbarbutton-menubutton-button
toolbarbutton[panel-multiview-anchor="true"] {
background-image: url(chrome://browser/skin/customizableui/subView-arrow-back-inverted.png),
linear-gradient(rgba(255,255,255,0.3), rgba(255,255,255,0));
background-position: right 5px center;
background-position: right calc(@menuPanelButtonWidth@ / 2 - @exitSubviewGutterWidth@ + 2px) center;
background-repeat: no-repeat, repeat;
}
toolbarbutton[panel-multiview-anchor="true"]:-moz-locale-dir(rtl) {
background-image: url(chrome://browser/skin/customizableui/subView-arrow-back-inverted-rtl.png),
linear-gradient(rgba(255,255,255,0.3), rgba(255,255,255,0));
background-position: left 5px center;
background-position: left calc(@menuPanelButtonWidth@ / 2 - @exitSubviewGutterWidth@ + 2px) center;
}
toolbarpaletteitem[place="palette"] > .toolbarbutton-1 > .toolbarbutton-menubutton-dropmarker,

View File

@ -99,6 +99,10 @@ text {
* Inspector Styles
*/
#web-audio-inspector-title {
margin: 6px;
}
.web-audio-inspector .error {
background-image: url(alerticon-warning.png);
background-size: 13px 12px;

View File

@ -167,7 +167,7 @@ prefpane {
#typeColumn > .treecol-sortdirection[sortDirection=descending],
#actionColumn > .treecol-sortdirection[sortDirection=descending] {
-moz-appearance: none;
list-style-image: url("chrome://browser/skin/in-content/sorter.png");
list-style-image: url("chrome://global/skin/in-content/sorter.png");
}
#typeColumn > .treecol-sortdirection[sortDirection=descending],
@ -182,7 +182,7 @@ prefpane {
#actionColumn > .treecol-sortdirection[sortDirection=descending] {
width: 12px;
height: 8px;
list-style-image: url("chrome://browser/skin/in-content/sorter@2x.png");
list-style-image: url("chrome://global/skin/in-content/sorter@2x.png");
}
}

View File

@ -37,17 +37,6 @@ browser.jar:
skin/classic/browser/identity-icons-https-ev.png
skin/classic/browser/identity-icons-https-mixed-active.png
skin/classic/browser/identity-icons-https-mixed-display.png
* skin/classic/browser/in-content/common.css (in-content/common.css)
skin/classic/browser/in-content/check.png (../shared/in-content/check.png)
skin/classic/browser/in-content/check@2x.png (../shared/in-content/check@2x.png)
skin/classic/browser/in-content/dropdown.png (../shared/in-content/dropdown.png)
skin/classic/browser/in-content/dropdown@2x.png (../shared/in-content/dropdown@2x.png)
skin/classic/browser/in-content/dropdown-disabled.png (../shared/in-content/dropdown-disabled.png)
skin/classic/browser/in-content/dropdown-disabled@2x.png (../shared/in-content/dropdown-disabled@2x.png)
skin/classic/browser/in-content/help-glyph.png (../shared/in-content/help-glyph.png)
skin/classic/browser/in-content/help-glyph@2x.png (../shared/in-content/help-glyph@2x.png)
skin/classic/browser/in-content/sorter.png (../shared/in-content/sorter.png)
skin/classic/browser/in-content/sorter@2x.png (../shared/in-content/sorter@2x.png)
skin/classic/browser/keyhole-forward-mask.svg
skin/classic/browser/KUI-background.png
skin/classic/browser/livemark-folder.png
@ -457,17 +446,6 @@ browser.jar:
skin/classic/aero/browser/identity-icons-https-ev.png
skin/classic/aero/browser/identity-icons-https-mixed-active.png
skin/classic/aero/browser/identity-icons-https-mixed-display.png
* skin/classic/aero/browser/in-content/common.css (in-content/common.css)
skin/classic/aero/browser/in-content/check.png (../shared/in-content/check.png)
skin/classic/aero/browser/in-content/check@2x.png (../shared/in-content/check@2x.png)
skin/classic/aero/browser/in-content/dropdown.png (../shared/in-content/dropdown.png)
skin/classic/aero/browser/in-content/dropdown@2x.png (../shared/in-content/dropdown@2x.png)
skin/classic/aero/browser/in-content/dropdown-disabled.png (../shared/in-content/dropdown-disabled.png)
skin/classic/aero/browser/in-content/dropdown-disabled@2x.png (../shared/in-content/dropdown-disabled@2x.png)
skin/classic/aero/browser/in-content/help-glyph.png (../shared/in-content/help-glyph.png)
skin/classic/aero/browser/in-content/help-glyph@2x.png (../shared/in-content/help-glyph@2x.png)
skin/classic/aero/browser/in-content/sorter.png (../shared/in-content/sorter.png)
skin/classic/aero/browser/in-content/sorter@2x.png (../shared/in-content/sorter@2x.png)
skin/classic/aero/browser/keyhole-forward-mask.svg
skin/classic/aero/browser/KUI-background.png
skin/classic/aero/browser/livemark-folder.png (livemark-folder-aero.png)

View File

@ -311,7 +311,7 @@ def print_command(out, args):
print >>out, "".join([" " + l for l in file.readlines()])
out.flush()
def main():
def main(args, proc_callback=None):
parser = OptionParser()
parser.add_option("--extract", action="store_true", dest="extract",
help="when a library has no descriptor file, extract it first, when possible")
@ -322,7 +322,7 @@ def main():
parser.add_option("--symbol-order", dest="symbol_order", metavar="FILE",
help="use the given list of symbols to order symbols in the resulting binary when using with a linker")
(options, args) = parser.parse_args()
(options, args) = parser.parse_args(args)
with ExpandArgsMore(args) as args:
if options.extract:
@ -336,6 +336,8 @@ def main():
print_command(sys.stderr, args)
try:
proc = subprocess.Popen(args, stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
if proc_callback:
proc_callback(proc)
except Exception, e:
print >>sys.stderr, 'error: Launching', args, ':', e
raise e
@ -345,7 +347,8 @@ def main():
sys.stderr.write(stdout)
sys.stderr.flush()
if proc.returncode:
exit(proc.returncode)
return proc.returncode
return 0
if __name__ == '__main__':
main()
exit(main(sys.argv[1:]))

View File

@ -2,8 +2,11 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import with_statement
import os, subprocess, sys, threading, time
import expandlibs_exec
import sys
import threading
import time
from win32 import procmem
def measure_vsize_threadfunc(proc, output_file):
@ -33,14 +36,17 @@ def measure_link_vsize(output_file, args):
Execute |args|, and measure the maximum virtual memory usage of the process,
printing it to stdout when finished.
"""
proc = subprocess.Popen(args)
t = threading.Thread(target=measure_vsize_threadfunc,
args=(proc, output_file))
t.start()
# Wait for the linker to finish.
exitcode = proc.wait()
# ...and then wait for the background thread to finish.
t.join()
# This needs to be a list in order for the callback to set the
# variable properly with python-2's scoping rules.
t = [None]
def callback(proc):
t[0] = threading.Thread(target=measure_vsize_threadfunc,
args=(proc, output_file))
t[0].start()
exitcode = expandlibs_exec.main(args, proc_callback=callback)
# Wait for the background thread to finish.
t[0].join()
return exitcode
if __name__ == "__main__":
@ -50,4 +56,5 @@ if __name__ == "__main__":
if len(sys.argv) < 3:
print >>sys.stderr, "Usage: link.py <output filename> <commandline>"
sys.exit(1)
sys.exit(measure_link_vsize(sys.argv[1], sys.argv[2:]))
output_file = sys.argv.pop(1)
sys.exit(measure_link_vsize(output_file, sys.argv[1:]))

View File

@ -3973,11 +3973,15 @@ ArrayBufferBuilder::setCapacity(uint32_t aNewCap)
{
MOZ_ASSERT(!mMapPtr);
uint8_t *newdata = (uint8_t *) JS_ReallocateArrayBufferContents(nullptr, aNewCap, mDataPtr, mCapacity);
uint8_t *newdata = (uint8_t *) realloc(mDataPtr, aNewCap);
if (!newdata) {
return false;
}
if (aNewCap > mCapacity) {
memset(newdata + mCapacity, 0, aNewCap - mCapacity);
}
mDataPtr = newdata;
mCapacity = aNewCap;
if (mLength > aNewCap) {

View File

@ -178,7 +178,7 @@ LOCAL_INCLUDES += [
'/dom/base',
'/dom/canvas',
'/dom/xbl',
'/editor/libeditor/base',
'/editor/libeditor',
'/editor/libeditor/text',
'/editor/txmgr',
'/layout/forms',

View File

@ -113,12 +113,14 @@ ImageListener::OnStartRequest(nsIRequest* request, nsISupports *ctxt)
return NS_OK;
}
nsCOMPtr<nsIImageLoadingContent> imageLoader = do_QueryInterface(imgDoc->mImageContent);
NS_ENSURE_TRUE(imageLoader, NS_ERROR_UNEXPECTED);
if (!imgDoc->mObservingImageLoader) {
nsCOMPtr<nsIImageLoadingContent> imageLoader = do_QueryInterface(imgDoc->mImageContent);
NS_ENSURE_TRUE(imageLoader, NS_ERROR_UNEXPECTED);
imageLoader->AddObserver(imgDoc);
imgDoc->mObservingImageLoader = true;
imageLoader->LoadImageWithChannel(channel, getter_AddRefs(mNextStream));
imageLoader->AddObserver(imgDoc);
imgDoc->mObservingImageLoader = true;
imageLoader->LoadImageWithChannel(channel, getter_AddRefs(mNextStream));
}
return MediaDocumentStreamListener::OnStartRequest(request, ctxt);
}
@ -384,12 +386,14 @@ ImageDocument::ScrollImageTo(int32_t aX, int32_t aY, bool restoreImage)
}
nsCOMPtr<nsIPresShell> shell = GetShell();
if (!shell)
if (!shell) {
return;
}
nsIScrollableFrame* sf = shell->GetRootScrollFrameAsScrollable();
if (!sf)
if (!sf) {
return;
}
nsRect portRect = sf->GetScrollPortRect();
sf->ScrollTo(nsPoint(nsPresContext::CSSPixelsToAppUnits(aX/ratio) - portRect.width/2,

View File

@ -21,6 +21,7 @@
#include "mozilla/Services.h"
#include "nsServiceManagerUtils.h"
#include "nsIPrincipal.h"
#include "nsIMultiPartChannel.h"
namespace mozilla {
namespace dom {
@ -70,9 +71,16 @@ MediaDocumentStreamListener::OnStopRequest(nsIRequest* request,
rv = mNextStream->OnStopRequest(request, ctxt, status);
}
// No more need for our document so clear our reference and prevent leaks
mDocument = nullptr;
// Don't release mDocument here if we're in the middle of a multipart response.
bool lastPart = true;
nsCOMPtr<nsIMultiPartChannel> mpchan(do_QueryInterface(request));
if (mpchan) {
mpchan->GetIsLastPart(&lastPart);
}
if (lastPart) {
mDocument = nullptr;
}
return rv;
}

View File

@ -758,6 +758,9 @@ public:
MediaInfo* aInfo,
MetadataTags* aTags);
int64_t GetSeekTime() { return mRequestedSeekTarget.mTime; }
void ResetSeekTime() { mRequestedSeekTarget.Reset(); }
/******
* The following methods must only be called on the main
* thread.

View File

@ -1518,17 +1518,16 @@ MediaDecoderStateMachine::EnqueueDecodeMetadataTask()
mDispatchedDecodeMetadataTask) {
return NS_OK;
}
mDispatchedDecodeMetadataTask = true;
RefPtr<nsIRunnable> task(
NS_NewRunnableMethod(this, &MediaDecoderStateMachine::CallDecodeMetadata));
nsresult rv = mDecodeTaskQueue->Dispatch(task);
if (NS_SUCCEEDED(rv)) {
mDispatchedDecodeMetadataTask = true;
} else {
if (NS_FAILED(rv)) {
NS_WARNING("Dispatch ReadMetadata task failed.");
return rv;
mDispatchedDecodeMetadataTask = false;
}
return NS_OK;
return rv;
}
void
@ -1842,7 +1841,6 @@ void
MediaDecoderStateMachine::CallDecodeMetadata()
{
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
AutoSetOnScopeExit<bool> unsetOnExit(mDispatchedDecodeMetadataTask, false);
if (mState != DECODER_STATE_DECODING_METADATA) {
return;
}
@ -1874,6 +1872,7 @@ nsresult MediaDecoderStateMachine::DecodeMetadata()
// change state to DECODER_STATE_WAIT_FOR_RESOURCES
StartWaitForResources();
// affect values only if ReadMetadata succeeds
mDispatchedDecodeMetadataTask = false;
return NS_OK;
}
}
@ -1996,6 +1995,7 @@ MediaDecoderStateMachine::FinishDecodeMetadata()
StartPlayback();
}
mDispatchedDecodeMetadataTask = false;
return NS_OK;
}
@ -2443,23 +2443,23 @@ MediaDecoderStateMachine::FlushDecoding()
"Should be on state machine or decode thread.");
mDecoder->GetReentrantMonitor().AssertNotCurrentThreadIn();
// Put a task in the decode queue to abort any decoding operations.
// The reader is not supposed to put any tasks to deliver samples into
// the queue after we call this (unless we request another sample from it).
RefPtr<nsIRunnable> task;
task = NS_NewRunnableMethod(mReader, &MediaDecoderReader::ResetDecode);
mDecodeTaskQueue->Dispatch(task);
{
// Wait for the thread decoding to abort decoding operations and run
// any pending callbacks. This is important, as we don't want any
// pending tasks posted to the task queue by the reader to deliver
// any samples after we've posted the reader Shutdown() task below,
// as the sample-delivery tasks will keep video frames alive until
// after we've called Reader::Shutdown(), and shutdown on B2G will
// fail as there are outstanding video frames alive.
// Put a task in the decode queue to abort any decoding operations.
// The reader is not supposed to put any tasks to deliver samples into
// the queue after this runs (unless we request another sample from it).
RefPtr<nsIRunnable> task;
task = NS_NewRunnableMethod(mReader, &MediaDecoderReader::ResetDecode);
// Wait for the ResetDecode to run and for the decoder to abort
// decoding operations and run any pending callbacks. This is
// important, as we don't want any pending tasks posted to the task
// queue by the reader to deliver any samples after we've posted the
// reader Shutdown() task below, as the sample-delivery tasks will
// keep video frames alive until after we've called Reader::Shutdown(),
// and shutdown on B2G will fail as there are outstanding video frames
// alive.
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
mDecodeTaskQueue->Flush();
mDecodeTaskQueue->FlushAndDispatch(task);
}
// We must reset playback so that all references to frames queued

View File

@ -15,6 +15,7 @@ MediaTaskQueue::MediaTaskQueue(TemporaryRef<SharedThreadPool> aPool)
, mQueueMonitor("MediaTaskQueue::Queue")
, mIsRunning(false)
, mIsShutdown(false)
, mIsFlushing(false)
{
MOZ_COUNT_CTOR(MediaTaskQueue);
}
@ -30,6 +31,17 @@ nsresult
MediaTaskQueue::Dispatch(TemporaryRef<nsIRunnable> aRunnable)
{
MonitorAutoLock mon(mQueueMonitor);
return DispatchLocked(aRunnable, AbortIfFlushing);
}
nsresult
MediaTaskQueue::DispatchLocked(TemporaryRef<nsIRunnable> aRunnable,
DispatchMode aMode)
{
mQueueMonitor.AssertCurrentThreadOwns();
if (mIsFlushing && aMode == AbortIfFlushing) {
return NS_ERROR_ABORT;
}
if (mIsShutdown) {
return NS_ERROR_FAILURE;
}
@ -113,10 +125,25 @@ MediaTaskQueue::Shutdown()
AwaitIdleLocked();
}
nsresult
MediaTaskQueue::FlushAndDispatch(TemporaryRef<nsIRunnable> aRunnable)
{
MonitorAutoLock mon(mQueueMonitor);
AutoSetFlushing autoFlush(this);
while (!mTasks.empty()) {
mTasks.pop();
}
nsresult rv = DispatchLocked(aRunnable, IgnoreFlushing);
NS_ENSURE_SUCCESS(rv, rv);
AwaitIdleLocked();
return NS_OK;
}
void
MediaTaskQueue::Flush()
{
MonitorAutoLock mon(mQueueMonitor);
AutoSetFlushing autoFlush(this);
while (!mTasks.empty()) {
mTasks.pop();
}

View File

@ -35,6 +35,8 @@ public:
nsresult SyncDispatch(TemporaryRef<nsIRunnable> aRunnable);
nsresult FlushAndDispatch(TemporaryRef<nsIRunnable> aRunnable);
// Removes all pending tasks from the task queue, and blocks until
// the currently running task (if any) finishes.
void Flush();
@ -59,6 +61,11 @@ private:
// mQueueMonitor must be held.
void AwaitIdleLocked();
enum DispatchMode { AbortIfFlushing, IgnoreFlushing };
nsresult DispatchLocked(TemporaryRef<nsIRunnable> aRunnable,
DispatchMode aMode);
RefPtr<SharedThreadPool> mPool;
// Monitor that protects the queue and mIsRunning;
@ -79,6 +86,27 @@ private:
// True if we've started our shutdown process.
bool mIsShutdown;
class MOZ_STACK_CLASS AutoSetFlushing
{
public:
AutoSetFlushing(MediaTaskQueue* aTaskQueue) : mTaskQueue(aTaskQueue)
{
mTaskQueue->mQueueMonitor.AssertCurrentThreadOwns();
mTaskQueue->mIsFlushing = true;
}
~AutoSetFlushing()
{
mTaskQueue->mQueueMonitor.AssertCurrentThreadOwns();
mTaskQueue->mIsFlushing = false;
}
private:
MediaTaskQueue* mTaskQueue;
};
// True if we're flushing; we reject new tasks if we're flushing.
bool mIsFlushing;
class Runner : public nsRunnable {
public:
Runner(MediaTaskQueue* aQueue)

View File

@ -54,10 +54,10 @@ AppleATDecoder::~AppleATDecoder()
}
static void
_MetadataCallback(void *aDecoder,
_MetadataCallback(void* aDecoder,
AudioFileStreamID aStream,
AudioFileStreamPropertyID aProperty,
UInt32 *aFlags)
UInt32* aFlags)
{
LOG("AppleATDecoder metadata callback");
AppleATDecoder* decoder = static_cast<AppleATDecoder*>(aDecoder);
@ -65,10 +65,11 @@ _MetadataCallback(void *aDecoder,
}
static void
_SampleCallback(void *aDecoder,
UInt32 aNumBytes, UInt32 aNumPackets,
const void *aData,
AudioStreamPacketDescription *aPackets)
_SampleCallback(void* aDecoder,
UInt32 aNumBytes,
UInt32 aNumPackets,
const void* aData,
AudioStreamPacketDescription* aPackets)
{
LOG("AppleATDecoder sample callback %u bytes %u packets",
aNumBytes, aNumPackets);
@ -170,8 +171,8 @@ struct PassthroughUserData {
AppleATDecoder* mDecoder;
UInt32 mNumPackets;
UInt32 mDataSize;
const void *mData;
AudioStreamPacketDescription *mPacketDesc;
const void* mData;
AudioStreamPacketDescription* mPacketDesc;
bool mDone;
};
@ -181,12 +182,12 @@ const uint32_t kNeedMoreData = 'MOAR';
static OSStatus
_PassthroughInputDataCallback(AudioConverterRef aAudioConverter,
UInt32 *aNumDataPackets /* in/out */,
AudioBufferList *aData /* in/out */,
AudioStreamPacketDescription **aPacketDesc,
void *aUserData)
UInt32* aNumDataPackets /* in/out */,
AudioBufferList* aData /* in/out */,
AudioStreamPacketDescription** aPacketDesc,
void* aUserData)
{
PassthroughUserData *userData = (PassthroughUserData *)aUserData;
PassthroughUserData* userData = (PassthroughUserData*)aUserData;
if (userData->mDone) {
// We make sure this callback is run _once_, with all the data we received
// from |AudioFileStreamParseBytes|. When we return an error, the decoder
@ -207,7 +208,7 @@ _PassthroughInputDataCallback(AudioConverterRef aAudioConverter,
aData->mBuffers[0].mNumberChannels = userData->mDecoder->mConfig.channel_count;
aData->mBuffers[0].mDataByteSize = userData->mDataSize;
aData->mBuffers[0].mData = const_cast<void *>(userData->mData);
aData->mBuffers[0].mData = const_cast<void*>(userData->mData);
return noErr;
}
@ -280,9 +281,9 @@ AppleATDecoder::SampleCallback(uint32_t aNumBytes,
LOG("pushed audio at time %lfs; duration %lfs\n",
(double)time / USECS_PER_S, (double)duration / USECS_PER_S);
AudioData *audio = new AudioData(mSamplePosition,
AudioData* audio = new AudioData(mSamplePosition,
time, duration, numFrames,
reinterpret_cast<AudioDataValue *>(decoded.forget()),
reinterpret_cast<AudioDataValue*>(decoded.forget()),
channels, rate);
mCallback->Output(audio);
mHaveOutput = true;
@ -302,9 +303,14 @@ void
AppleATDecoder::SetupDecoder()
{
AudioStreamBasicDescription inputFormat;
// Fill in the input format description from the stream.
AppleUtils::GetProperty(mStream,
kAudioFileStreamProperty_DataFormat, &inputFormat);
mHaveOutput = false;
nsresult rv = AppleUtils::GetRichestDecodableFormat(mStream, inputFormat);
if (NS_FAILED(rv)) {
mCallback->Error();
return;
}
// Fill in the output format manually.
PodZero(&mOutputFormat);
@ -324,13 +330,13 @@ AppleATDecoder::SetupDecoder()
mOutputFormat.mBytesPerPacket = mOutputFormat.mBytesPerFrame
= mOutputFormat.mChannelsPerFrame * mOutputFormat.mBitsPerChannel / 8;
OSStatus rv = AudioConverterNew(&inputFormat, &mOutputFormat, &mConverter);
if (rv) {
OSStatus status =
AudioConverterNew(&inputFormat, &mOutputFormat, &mConverter);
if (status) {
LOG("Error %d constructing AudioConverter", rv);
mConverter = nullptr;
mCallback->Error();
}
mHaveOutput = false;
}
void

View File

@ -4,9 +4,9 @@
// Utility functions to help with Apple API calls.
#include <AudioToolbox/AudioToolbox.h>
#include "AppleUtils.h"
#include "prlog.h"
#include "nsAutoPtr.h"
#ifdef PR_LOGGING
PRLogModuleInfo* GetDemuxerLog();
@ -26,12 +26,12 @@ namespace mozilla {
nsresult
AppleUtils::GetProperty(AudioFileStreamID aAudioFileStream,
AudioFileStreamPropertyID aPropertyID,
void *aData)
void* aData)
{
UInt32 size;
Boolean writeable;
OSStatus rv = AudioFileStreamGetPropertyInfo(aAudioFileStream, aPropertyID,
&size, &writeable);
&size, &writeable);
if (rv) {
WARN("Couldn't get property " PROPERTY_ID_FORMAT "\n",
@ -80,5 +80,52 @@ AppleUtils::SetCFDict(CFMutableDictionaryRef dict,
CFDictionarySetValue(dict, keyRef, value ? kCFBooleanTrue : kCFBooleanFalse);
}
nsresult
AppleUtils::GetRichestDecodableFormat(AudioFileStreamID aAudioFileStream,
AudioStreamBasicDescription& aFormat)
{
// Fill in the default format description from the stream.
nsresult rv = GetProperty(aAudioFileStream,
kAudioFileStreamProperty_DataFormat, &aFormat);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
UInt32 propertySize;
OSStatus status = AudioFileStreamGetPropertyInfo(
aAudioFileStream, kAudioFileStreamProperty_FormatList, &propertySize, NULL);
if (NS_WARN_IF(status)) {
// Return the default format description.
return NS_OK;
}
MOZ_ASSERT(propertySize % sizeof(AudioFormatListItem) == 0);
uint32_t sizeList = propertySize / sizeof(AudioFormatListItem);
nsAutoArrayPtr<AudioFormatListItem> formatListPtr(
new AudioFormatListItem[sizeList]);
rv = GetProperty(aAudioFileStream, kAudioFileStreamProperty_FormatList,
formatListPtr);
if (NS_WARN_IF(NS_FAILED(rv))) {
// Return the default format description.
return NS_OK;
}
// Get the index number of the first playable format.
// This index number will be for the highest quality layer the platform
// is capable of playing.
UInt32 itemIndex;
UInt32 indexSize = sizeof(itemIndex);
status =
AudioFormatGetProperty(kAudioFormatProperty_FirstPlayableFormatFromList,
propertySize, formatListPtr, &indexSize, &itemIndex);
if (NS_WARN_IF(status)) {
// Return the default format description.
return NS_OK;
}
aFormat = formatListPtr[itemIndex].mASBD;
return NS_OK;
}
} // namespace mozilla

View File

@ -16,7 +16,7 @@ struct AppleUtils {
// Helper to retrieve properties from AudioFileStream objects.
static nsresult GetProperty(AudioFileStreamID aAudioFileStream,
AudioFileStreamPropertyID aPropertyID,
void *aData);
void* aData);
// Helper to set a string, string pair on a CFMutableDictionaryRef.
static void SetCFDict(CFMutableDictionaryRef dict,
@ -30,6 +30,12 @@ struct AppleUtils {
static void SetCFDict(CFMutableDictionaryRef dict,
const char* key,
bool value);
// Helper to retrieve the best audio format available in the given
// audio stream.
// The basic format will be returned by default should an error occur.
static nsresult GetRichestDecodableFormat(
AudioFileStreamID aAudioFileStream, AudioStreamBasicDescription& aFormat);
};
// Wrapper class to call CFRelease on reference types

View File

@ -12,6 +12,7 @@
#include "nsAutoRef.h"
#include "GMPParent.h"
#include "mozilla/gmp/GMPTypes.h"
#include "nsThread.h"
#include "nsThreadUtils.h"
#include "runnable_utils.h"
@ -72,7 +73,9 @@ GMPVideoEncoderParent::GMPVideoEncoderParent(GMPParent *aPlugin)
GMPVideoEncoderParent::~GMPVideoEncoderParent()
{
mEncodedThread->Shutdown();
if (mEncodedThread) {
mEncodedThread->Shutdown();
}
}
GMPVideoHostImpl&
@ -238,6 +241,12 @@ GMPVideoEncoderParent::Shutdown()
}
}
static void
ShutdownEncodedThread(nsCOMPtr<nsIThread>& aThread)
{
aThread->Shutdown();
}
// Note: Keep this sync'd up with Shutdown
void
GMPVideoEncoderParent::ActorDestroy(ActorDestroyReason aWhy)
@ -249,6 +258,15 @@ GMPVideoEncoderParent::ActorDestroy(ActorDestroyReason aWhy)
mCallback->Terminated();
mCallback = nullptr;
}
// Must be shut down before VideoEncoderDestroyed(), since this can recurse
// the GMPThread event loop. See bug 1049501
if (mEncodedThread) {
// Can't get it to allow me to use WrapRunnable with a nsCOMPtr<nsIThread>()
NS_DispatchToMainThread(
WrapRunnableNM<decltype(&ShutdownEncodedThread),
nsCOMPtr<nsIThread> >(&ShutdownEncodedThread, mEncodedThread));
mEncodedThread = nullptr;
}
if (mPlugin) {
// Ignore any return code. It is OK for this to fail without killing the process.
mPlugin->VideoEncoderDestroyed(this);

10
content/media/omx/AudioOffloadPlayer.cpp Executable file → Normal file
View File

@ -21,6 +21,7 @@
#include "nsComponentManagerUtils.h"
#include "nsITimer.h"
#include "mozilla/dom/HTMLMediaElement.h"
#include "VideoUtils.h"
#include <binder/IPCThreadState.h>
#include <stagefright/foundation/ADebug.h>
@ -51,7 +52,7 @@ PRLogModuleInfo* gAudioOffloadPlayerLog;
// When elapsed, the AudioSink is destroyed to allow the audio DSP to power down.
static const uint64_t OFFLOAD_PAUSE_MAX_MSECS = 60000ll;
AudioOffloadPlayer::AudioOffloadPlayer(MediaOmxDecoder* aObserver) :
AudioOffloadPlayer::AudioOffloadPlayer(MediaOmxCommonDecoder* aObserver) :
mObserver(aObserver),
mInputBuffer(nullptr),
mSampleRate(0),
@ -197,7 +198,8 @@ status_t AudioOffloadPlayer::ChangeState(MediaDecoder::PlayState aState)
case MediaDecoder::PLAY_STATE_PAUSED:
case MediaDecoder::PLAY_STATE_SHUTDOWN:
// Just pause here during play state shutdown as well to stop playing
// offload track immediately. Resources will be freed by MediaOmxDecoder
// offload track immediately. Resources will be freed by
// MediaOmxCommonDecoder
Pause();
break;
@ -421,14 +423,14 @@ void AudioOffloadPlayer::NotifyAudioEOS()
void AudioOffloadPlayer::NotifyPositionChanged()
{
nsCOMPtr<nsIRunnable> nsEvent = NS_NewRunnableMethod(mObserver,
&MediaOmxDecoder::PlaybackPositionChanged);
&MediaOmxCommonDecoder::PlaybackPositionChanged);
NS_DispatchToMainThread(nsEvent);
}
void AudioOffloadPlayer::NotifyAudioTearDown()
{
nsCOMPtr<nsIRunnable> nsEvent = NS_NewRunnableMethod(mObserver,
&MediaOmxDecoder::AudioOffloadTearDown);
&MediaOmxCommonDecoder::AudioOffloadTearDown);
NS_DispatchToMainThread(nsEvent);
}

26
content/media/omx/AudioOffloadPlayer.h Executable file → Normal file
View File

@ -27,14 +27,12 @@
#include <utils/RefBase.h>
#include "AudioOutput.h"
#include "AudioOffloadPlayerBase.h"
#include "MediaDecoderOwner.h"
#include "MediaOmxDecoder.h"
#include "MediaOmxCommonDecoder.h"
namespace mozilla {
class MediaOmxDecoder;
/**
* AudioOffloadPlayer adds support for audio tunneling to a digital signal
* processor (DSP) in the device chipset. With tunneling, audio decoding is
@ -47,11 +45,12 @@ class MediaOmxDecoder;
* data, FillBuffer() will read data from compressed audio source and provide
* it to the sink
*
* Also this class passes state changes (play/pause/seek) from MediaOmxDecoder
* to AudioSink as well as provide AudioSink status (position changed,
* playback ended, seek complete, audio tear down) back to MediaOmxDecoder
* Also this class passes state changes (play/pause/seek) from
* MediaOmxCommonDecoder to AudioSink as well as provide AudioSink status
* (position changed, playback ended, seek complete, audio tear down) back to
* MediaOmxCommonDecoder
*
* It acts as a bridge between MediaOmxDecoder and AudioSink during
* It acts as a bridge between MediaOmxCommonDecoder and AudioSink during
* offload playback
*/
@ -70,7 +69,7 @@ public:
SEEK_COMPLETE
};
AudioOffloadPlayer(MediaOmxDecoder* aDecoder = nullptr);
AudioOffloadPlayer(MediaOmxCommonDecoder* aDecoder = nullptr);
~AudioOffloadPlayer();
@ -146,7 +145,8 @@ private:
// relative to the seeked position. And seeked position may be slightly
// different than given mSeekTimeUs, if audio source cannot find a frame at
// that position. Store seeked position in mStartPosUs and provide
// mStartPosUs + GetPosition() (i.e. absolute position) to MediaOmxDecoder
// mStartPosUs + GetPosition() (i.e. absolute position) to
// MediaOmxCommonDecoder
// Used in main thread and offload callback thread, protected by Mutex
// mLock
int64_t mStartPosUs;
@ -161,7 +161,7 @@ private:
// mLock
int64_t mPositionTimeMediaUs;
// State obtained from MediaOmxDecoder. Used only in main thread
// State obtained from MediaOmxCommonDecoder. Used only in main thread
MediaDecoder::PlayState mPlayState;
// Protect accessing audio position related variables between main thread and
@ -180,8 +180,8 @@ private:
// Buffer used to get date from audio source. Used in offload callback thread
MediaBuffer* mInputBuffer;
// MediaOmxDecoder object used mainly to notify the audio sink status
MediaOmxDecoder* mObserver;
// MediaOmxCommonDecoder object used mainly to notify the audio sink status
MediaOmxCommonDecoder* mObserver;
TimeStamp mLastFireUpdateTime;

View File

@ -20,13 +20,11 @@
#ifndef AUDIO_OFFLOAD_PLAYER_BASE_H_
#define AUDIO_OFFLOAD_PLAYER_BASE_H_
#include "MediaDecoder.h"
#include "MediaDecoderOwner.h"
#include "MediaOmxDecoder.h"
namespace mozilla {
class MediaOmxDecoder;
/**
* AudioOffloadPlayer interface class which has funtions used by MediaOmxDecoder
* This is to reduce the dependency of AudioOffloadPlayer in MediaOmxDecoder

View File

@ -5,6 +5,9 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "MediaCodecDecoder.h"
#include <stagefright/MediaSource.h>
#include "MediaCodecReader.h"
#include "MediaDecoderStateMachine.h"
@ -16,10 +19,16 @@ MediaCodecDecoder::Clone()
return new MediaCodecDecoder();
}
MediaDecoderStateMachine*
MediaCodecDecoder::CreateStateMachine()
MediaOmxCommonReader*
MediaCodecDecoder::CreateReader()
{
return new MediaDecoderStateMachine(this, new MediaCodecReader(this));
return new MediaCodecReader(this);
}
MediaDecoderStateMachine*
MediaCodecDecoder::CreateStateMachine(MediaOmxCommonReader* aReader)
{
return new MediaDecoderStateMachine(this, aReader);
}
} // namespace mozilla

View File

@ -7,18 +7,20 @@
#ifndef MEDIA_CODEC_DECODER_H
#define MEDIA_CODEC_DECODER_H
#include "MediaDecoder.h"
#include "MediaOmxCommonDecoder.h"
namespace mozilla {
// MediaDecoder that uses MediaCodecReader.
class MediaCodecDecoder : public MediaDecoder
class MediaCodecDecoder : public MediaOmxCommonDecoder
{
public:
virtual MediaDecoder* Clone();
virtual MediaDecoderStateMachine* CreateStateMachine();
virtual MediaOmxCommonReader* CreateReader();
virtual MediaDecoderStateMachine* CreateStateMachine(MediaOmxCommonReader* aReader);
};
} // namespace mozilla

View File

@ -106,7 +106,8 @@ MediaCodecReader::VideoResourceListener::codecCanceled()
}
}
bool MediaCodecReader::TrackInputCopier::Copy(MediaBuffer* aSourceBuffer, sp<ABuffer> aCodecBuffer)
bool
MediaCodecReader::TrackInputCopier::Copy(MediaBuffer* aSourceBuffer, sp<ABuffer> aCodecBuffer)
{
if (aSourceBuffer == nullptr ||
aCodecBuffer == nullptr ||
@ -132,7 +133,8 @@ MediaCodecReader::Track::Track()
// Append the value of |kKeyValidSamples| to the end of each vorbis buffer.
// https://github.com/mozilla-b2g/platform_frameworks_av/blob/master/media/libstagefright/OMXCodec.cpp#L3128
// https://github.com/mozilla-b2g/platform_frameworks_av/blob/master/media/libstagefright/NuMediaExtractor.cpp#L472
bool MediaCodecReader::VorbisInputCopier::Copy(MediaBuffer* aSourceBuffer, sp<ABuffer> aCodecBuffer)
bool
MediaCodecReader::VorbisInputCopier::Copy(MediaBuffer* aSourceBuffer, sp<ABuffer> aCodecBuffer)
{
if (aSourceBuffer == nullptr ||
aCodecBuffer == nullptr ||
@ -176,7 +178,7 @@ MediaCodecReader::CodecBufferInfo::CodecBufferInfo()
}
MediaCodecReader::MediaCodecReader(AbstractMediaDecoder* aDecoder)
: MediaDecoderReader(aDecoder)
: MediaOmxCommonReader(aDecoder)
, mColorConverterBufferSize(0)
{
mHandler = new MessageHandler(this);
@ -427,6 +429,10 @@ MediaCodecReader::ReadMetadata(MediaInfo* aInfo,
return NS_ERROR_FAILURE;
}
#ifdef MOZ_AUDIO_OFFLOAD
CheckAudioOffload();
#endif
if (IsWaitingMediaResources()) {
return NS_OK;
}
@ -524,6 +530,12 @@ MediaCodecReader::IsMediaSeekable()
return (mExtractor != nullptr) && (mExtractor->flags() & MediaExtractor::CAN_SEEK);
}
android::sp<android::MediaSource>
MediaCodecReader::GetAudioOffloadTrack()
{
return mAudioOffloadTrack.mSource;
}
bool
MediaCodecReader::ReallocateResources()
{
@ -675,6 +687,8 @@ MediaCodecReader::CreateMediaSources()
if (audioSource != nullptr && audioSource->start() == OK) {
mAudioTrack.mSource = audioSource;
}
// Get one another track instance for audio offload playback.
mAudioOffloadTrack.mSource = mExtractor->getTrack(audioTrackIndex);
}
if (videoTrackIndex != invalidTrackIndex && mVideoTrack.mSource == nullptr) {
@ -694,6 +708,7 @@ MediaCodecReader::DestroyMediaSources()
{
mAudioTrack.mSource = nullptr;
mVideoTrack.mSource = nullptr;
mAudioOffloadTrack.mSource = nullptr;
}
bool
@ -1227,8 +1242,6 @@ MediaCodecReader::onMessageReceived(const sp<AMessage> &aMessage)
break;
}
// TODO
default:
TRESPASS();
break;

View File

@ -15,7 +15,7 @@
#include "I420ColorConverterHelper.h"
#include "MediaCodecProxy.h"
#include "MediaDecoderReader.h"
#include "MediaOmxCommonReader.h"
namespace android {
struct ALooper;
@ -29,7 +29,7 @@ struct MediaCodec;
namespace mozilla {
class MediaCodecReader : public MediaDecoderReader
class MediaCodecReader : public MediaOmxCommonReader
{
public:
MediaCodecReader(AbstractMediaDecoder* aDecoder);
@ -85,6 +85,8 @@ public:
virtual bool IsMediaSeekable() MOZ_OVERRIDE;
virtual android::sp<android::MediaSource> GetAudioOffloadTrack();
protected:
struct TrackInputCopier
{
@ -249,9 +251,10 @@ private:
android::sp<android::ALooper> mLooper;
android::sp<android::MediaExtractor> mExtractor;
// media elements
// media tracks
AudioTrack mAudioTrack;
VideoTrack mVideoTrack;
AudioTrack mAudioOffloadTrack; // only Track::mSource is valid
// color converter
android::I420ColorConverterHelper mColorConverter;

View File

@ -0,0 +1,264 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "MediaOmxCommonDecoder.h"
#include <stagefright/MediaSource.h>
#include "AudioOffloadPlayerBase.h"
#include "MediaDecoderStateMachine.h"
#include "MediaOmxCommonReader.h"
#ifdef MOZ_AUDIO_OFFLOAD
#include "AudioOffloadPlayer.h"
#endif
using namespace android;
namespace mozilla {
#ifdef PR_LOGGING
extern PRLogModuleInfo* gMediaDecoderLog;
#define DECODER_LOG(type, msg) PR_LOG(gMediaDecoderLog, type, msg)
#else
#define DECODER_LOG(type, msg)
#endif
MediaOmxCommonDecoder::MediaOmxCommonDecoder()
: MediaDecoder()
, mReader(nullptr)
, mCanOffloadAudio(false)
, mFallbackToStateMachine(false)
{
#ifdef PR_LOGGING
if (!gMediaDecoderLog) {
gMediaDecoderLog = PR_NewLogModule("MediaDecoder");
}
#endif
}
void
MediaOmxCommonDecoder::SetCanOffloadAudio(bool aCanOffloadAudio)
{
ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
mCanOffloadAudio = aCanOffloadAudio;
}
void
MediaOmxCommonDecoder::MetadataLoaded(MediaInfo* aInfo,
MetadataTags* aTags)
{
MOZ_ASSERT(NS_IsMainThread());
MediaDecoder::MetadataLoaded(aInfo, aTags);
ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
if (!mCanOffloadAudio || mFallbackToStateMachine || mOutputStreams.Length() ||
mInitialPlaybackRate != 1.0) {
DECODER_LOG(PR_LOG_DEBUG, ("In %s Offload Audio check failed",
__PRETTY_FUNCTION__));
return;
}
#ifdef MOZ_AUDIO_OFFLOAD
mAudioOffloadPlayer = new AudioOffloadPlayer(this);
#endif
if (!mAudioOffloadPlayer) {
return;
}
mAudioOffloadPlayer->SetSource(mReader->GetAudioOffloadTrack());
status_t err = mAudioOffloadPlayer->Start(false);
if (err == OK) {
PauseStateMachine();
// Call ChangeState() to run AudioOffloadPlayer since offload state enabled
ChangeState(mPlayState);
return;
}
mAudioOffloadPlayer = nullptr;
DECODER_LOG(PR_LOG_DEBUG, ("In %s Unable to start offload audio %d."
"Switching to normal mode", __PRETTY_FUNCTION__, err));
}
void
MediaOmxCommonDecoder::PauseStateMachine()
{
MOZ_ASSERT(NS_IsMainThread());
GetReentrantMonitor().AssertCurrentThreadIn();
DECODER_LOG(PR_LOG_DEBUG, ("%s", __PRETTY_FUNCTION__));
if (!mDecoderStateMachine) {
return;
}
StopProgress();
mDecoderStateMachine->SetDormant(true);
}
void
MediaOmxCommonDecoder::ResumeStateMachine()
{
MOZ_ASSERT(NS_IsMainThread());
ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
DECODER_LOG(PR_LOG_DEBUG, ("%s current time %f", __PRETTY_FUNCTION__,
mCurrentTime));
if (!mDecoderStateMachine) {
return;
}
mFallbackToStateMachine = true;
mAudioOffloadPlayer = nullptr;
int64_t timeUsecs = 0;
SecondsToUsecs(mCurrentTime, timeUsecs);
mRequestedSeekTarget = SeekTarget(timeUsecs, SeekTarget::Accurate);
mNextState = mPlayState;
ChangeState(PLAY_STATE_LOADING);
mDecoderStateMachine->SetDormant(false);
}
void
MediaOmxCommonDecoder::AudioOffloadTearDown()
{
MOZ_ASSERT(NS_IsMainThread());
DECODER_LOG(PR_LOG_DEBUG, ("%s", __PRETTY_FUNCTION__));
// mAudioOffloadPlayer can be null here if ResumeStateMachine was called
// just before because of some other error.
if (mAudioOffloadPlayer) {
// Audio offload player sent tear down event. Fallback to state machine
PlaybackPositionChanged();
ResumeStateMachine();
}
}
void
MediaOmxCommonDecoder::AddOutputStream(ProcessedMediaStream* aStream,
bool aFinishWhenEnded)
{
MOZ_ASSERT(NS_IsMainThread());
if (mAudioOffloadPlayer) {
// Offload player cannot handle MediaStream. Fallback
PlaybackPositionChanged();
ResumeStateMachine();
}
MediaDecoder::AddOutputStream(aStream, aFinishWhenEnded);
}
void
MediaOmxCommonDecoder::SetPlaybackRate(double aPlaybackRate)
{
MOZ_ASSERT(NS_IsMainThread());
if (mAudioOffloadPlayer &&
((aPlaybackRate != 0.0) || (aPlaybackRate != 1.0))) {
// Offload player cannot handle playback rate other than 1/0. Fallback
PlaybackPositionChanged();
ResumeStateMachine();
}
MediaDecoder::SetPlaybackRate(aPlaybackRate);
}
void
MediaOmxCommonDecoder::ChangeState(PlayState aState)
{
MOZ_ASSERT(NS_IsMainThread());
// Keep MediaDecoder state in sync with MediaElement irrespective of offload
// playback so it will continue to work in normal mode when offloading fails
// in between
MediaDecoder::ChangeState(aState);
if (mAudioOffloadPlayer) {
status_t err = mAudioOffloadPlayer->ChangeState(aState);
if (err != OK) {
ResumeStateMachine();
}
}
}
void
MediaOmxCommonDecoder::ApplyStateToStateMachine(PlayState aState)
{
MOZ_ASSERT(NS_IsMainThread());
// During offload playback, state machine should be in dormant state.
// ApplyStateToStateMachine() can change state machine state to
// something else or reset the seek time. So don't call this when audio is
// offloaded
if (!mAudioOffloadPlayer) {
MediaDecoder::ApplyStateToStateMachine(aState);
}
}
void
MediaOmxCommonDecoder::PlaybackPositionChanged()
{
MOZ_ASSERT(NS_IsMainThread());
if (!mAudioOffloadPlayer) {
MediaDecoder::PlaybackPositionChanged();
return;
}
if (!mOwner || mShuttingDown) {
return;
}
double lastTime = mCurrentTime;
{
ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
mCurrentTime = mAudioOffloadPlayer->GetMediaTimeSecs();
}
if (mOwner && lastTime != mCurrentTime) {
FireTimeUpdate();
}
}
void
MediaOmxCommonDecoder::SetElementVisibility(bool aIsVisible)
{
MOZ_ASSERT(NS_IsMainThread());
if (mAudioOffloadPlayer) {
mAudioOffloadPlayer->SetElementVisibility(aIsVisible);
}
}
void
MediaOmxCommonDecoder::UpdateReadyStateForData()
{
MOZ_ASSERT(NS_IsMainThread());
if (!mAudioOffloadPlayer) {
MediaDecoder::UpdateReadyStateForData();
return;
}
if (!mOwner || mShuttingDown)
return;
mOwner->UpdateReadyStateForData(mAudioOffloadPlayer->GetNextFrameStatus());
}
void
MediaOmxCommonDecoder::SetVolume(double aVolume)
{
MOZ_ASSERT(NS_IsMainThread());
if (!mAudioOffloadPlayer) {
MediaDecoder::SetVolume(aVolume);
return;
}
mAudioOffloadPlayer->SetVolume(aVolume);
}
MediaDecoderStateMachine*
MediaOmxCommonDecoder::CreateStateMachine()
{
mReader = CreateReader();
if (mReader != nullptr) {
mReader->SetAudioChannel(GetAudioChannel());
}
return CreateStateMachine(mReader);
}
} // namespace mozilla

View File

@ -0,0 +1,67 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef MEDIA_OMX_COMMON_DECODER_H
#define MEDIA_OMX_COMMON_DECODER_H
#include "MediaDecoder.h"
namespace android {
struct MOZ_EXPORT MediaSource;
} // namespace android
namespace mozilla {
class AudioOffloadPlayerBase;
class MediaOmxCommonReader;
class MediaOmxCommonDecoder : public MediaDecoder
{
public:
MediaOmxCommonDecoder();
virtual void MetadataLoaded(MediaInfo* aInfo,
MetadataTags* aTags);
virtual void ChangeState(PlayState aState);
virtual void ApplyStateToStateMachine(PlayState aState);
virtual void SetVolume(double aVolume);
virtual void PlaybackPositionChanged();
virtual void UpdateReadyStateForData();
virtual void SetElementVisibility(bool aIsVisible);
virtual void SetCanOffloadAudio(bool aCanOffloadAudio);
virtual void AddOutputStream(ProcessedMediaStream* aStream,
bool aFinishWhenEnded);
virtual void SetPlaybackRate(double aPlaybackRate);
void AudioOffloadTearDown();
virtual MediaDecoderStateMachine* CreateStateMachine();
virtual MediaOmxCommonReader* CreateReader() = 0;
virtual MediaDecoderStateMachine* CreateStateMachine(MediaOmxCommonReader* aReader) = 0;
protected:
void PauseStateMachine();
void ResumeStateMachine();
MediaOmxCommonReader* mReader;
// Offloaded audio track
android::sp<android::MediaSource> mAudioTrack;
nsAutoPtr<AudioOffloadPlayerBase> mAudioOffloadPlayer;
// Set by Media*Reader to denote current track can be offloaded
bool mCanOffloadAudio;
// Set when offload playback of current track fails in the middle and need to
// fallback to state machine
bool mFallbackToStateMachine;
};
} // namespace mozilla
#endif // MEDIA_OMX_COMMON_DECODER_H

View File

@ -0,0 +1,81 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "MediaOmxCommonReader.h"
#include <stagefright/MediaSource.h>
#include "AbstractMediaDecoder.h"
#include "AudioChannelService.h"
#include "MediaStreamSource.h"
#ifdef MOZ_AUDIO_OFFLOAD
#include <stagefright/Utils.h>
#include <cutils/properties.h>
#include <stagefright/MetaData.h>
#endif
using namespace android;
namespace mozilla {
#ifdef PR_LOGGING
extern PRLogModuleInfo* gMediaDecoderLog;
#define DECODER_LOG(type, msg) PR_LOG(gMediaDecoderLog, type, msg)
#else
#define DECODER_LOG(type, msg)
#endif
MediaOmxCommonReader::MediaOmxCommonReader(AbstractMediaDecoder *aDecoder)
: MediaDecoderReader(aDecoder)
{
#ifdef PR_LOGGING
if (!gMediaDecoderLog) {
gMediaDecoderLog = PR_NewLogModule("MediaDecoder");
}
#endif
mAudioChannel = dom::AudioChannelService::GetDefaultAudioChannel();
}
#ifdef MOZ_AUDIO_OFFLOAD
void MediaOmxCommonReader::CheckAudioOffload()
{
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
char offloadProp[128];
property_get("audio.offload.disable", offloadProp, "0");
bool offloadDisable = atoi(offloadProp) != 0;
if (offloadDisable) {
return;
}
sp<MediaSource> audioOffloadTrack = GetAudioOffloadTrack();
sp<MetaData> meta = audioOffloadTrack.get()
? audioOffloadTrack->getFormat() : nullptr;
// Supporting audio offload only when there is no video, no streaming
bool hasNoVideo = !HasVideo();
bool isNotStreaming
= mDecoder->GetResource()->IsDataCachedToEndOfResource(0);
// Not much benefit in trying to offload other channel types. Most of them
// aren't supported and also duration would be less than a minute
bool isTypeMusic = mAudioChannel == dom::AudioChannel::Content;
DECODER_LOG(PR_LOG_DEBUG, ("%s meta %p, no video %d, no streaming %d,"
" channel type %d", __FUNCTION__, meta.get(), hasNoVideo,
isNotStreaming, mAudioChannel));
if ((meta.get()) && hasNoVideo && isNotStreaming && isTypeMusic &&
canOffloadStream(meta, false, false, AUDIO_STREAM_MUSIC)) {
DECODER_LOG(PR_LOG_DEBUG, ("Can offload this audio stream"));
mDecoder->SetCanOffloadAudio(true);
}
}
#endif
} // namespace mozilla

View File

@ -0,0 +1,48 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef MEDIA_OMX_COMMON_READER_H
#define MEDIA_OMX_COMMON_READER_H
#include "MediaDecoderReader.h"
#include <utils/RefBase.h>
#include "mozilla/dom/AudioChannelBinding.h"
namespace android {
struct MOZ_EXPORT MediaSource;
} // namespace android
namespace mozilla {
class AbstractMediaDecoder;
class MediaOmxCommonReader : public MediaDecoderReader
{
public:
MediaOmxCommonReader(AbstractMediaDecoder* aDecoder);
void SetAudioChannel(dom::AudioChannel aAudioChannel) {
mAudioChannel = aAudioChannel;
}
virtual android::sp<android::MediaSource> GetAudioOffloadTrack() = 0;
#ifdef MOZ_AUDIO_OFFLOAD
// Check whether it is possible to offload current audio track. This access
// canOffloadStream() from libStageFright Utils.cpp, which is not there in
// ANDROID_VERSION < 19
void CheckAudioOffload();
#endif
protected:
dom::AudioChannel mAudioChannel;
};
} // namespace mozilla
#endif // MEDIA_OMX_COMMON_READER_H

230
content/media/omx/MediaOmxDecoder.cpp Executable file → Normal file
View File

@ -7,241 +7,27 @@
#include "MediaOmxDecoder.h"
#include "MediaOmxReader.h"
#include "MediaDecoderStateMachine.h"
#include "VideoUtils.h"
#include "OmxDecoder.h"
#ifdef MOZ_AUDIO_OFFLOAD
#include "AudioOffloadPlayer.h"
#endif
using namespace android;
namespace mozilla {
#ifdef PR_LOGGING
extern PRLogModuleInfo* gMediaDecoderLog;
#define DECODER_LOG(type, msg) PR_LOG(gMediaDecoderLog, type, msg)
#else
#define DECODER_LOG(type, msg)
#endif
MediaOmxDecoder::MediaOmxDecoder() :
MediaDecoder(),
mCanOffloadAudio(false),
mFallbackToStateMachine(false)
{
#ifdef PR_LOGGING
if (!gMediaDecoderLog) {
gMediaDecoderLog = PR_NewLogModule("MediaDecoder");
}
#endif
}
MediaDecoder* MediaOmxDecoder::Clone()
MediaDecoder*
MediaOmxDecoder::Clone()
{
return new MediaOmxDecoder();
}
MediaDecoderStateMachine* MediaOmxDecoder::CreateStateMachine()
MediaOmxCommonReader*
MediaOmxDecoder::CreateReader()
{
mReader = new MediaOmxReader(this);
mReader->SetAudioChannel(GetAudioChannel());
return new MediaDecoderStateMachine(this, mReader);
return new MediaOmxReader(this);
}
void MediaOmxDecoder::SetCanOffloadAudio(bool aCanOffloadAudio)
MediaDecoderStateMachine*
MediaOmxDecoder::CreateStateMachine(MediaOmxCommonReader* aReader)
{
ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
mCanOffloadAudio = aCanOffloadAudio;
}
void MediaOmxDecoder::MetadataLoaded(MediaInfo* aInfo,
MetadataTags* aTags)
{
MOZ_ASSERT(NS_IsMainThread());
MediaDecoder::MetadataLoaded(aInfo, aTags);
ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
if (!mCanOffloadAudio || mFallbackToStateMachine || mOutputStreams.Length() ||
mInitialPlaybackRate != 1.0) {
DECODER_LOG(PR_LOG_DEBUG, ("In %s Offload Audio check failed",
__PRETTY_FUNCTION__));
return;
}
#ifdef MOZ_AUDIO_OFFLOAD
mAudioOffloadPlayer = new AudioOffloadPlayer(this);
#endif
mAudioOffloadPlayer->SetSource(mReader->GetAudioOffloadTrack());
status_t err = mAudioOffloadPlayer->Start(false);
if (err == OK) {
PauseStateMachine();
// Call ChangeState() to run AudioOffloadPlayer since offload state enabled
ChangeState(mPlayState);
return;
}
mAudioOffloadPlayer = nullptr;
DECODER_LOG(PR_LOG_DEBUG, ("In %s Unable to start offload audio %d."
"Switching to normal mode", __PRETTY_FUNCTION__, err));
}
void MediaOmxDecoder::PauseStateMachine()
{
MOZ_ASSERT(NS_IsMainThread());
GetReentrantMonitor().AssertCurrentThreadIn();
DECODER_LOG(PR_LOG_DEBUG, ("%s", __PRETTY_FUNCTION__));
if (!mDecoderStateMachine) {
return;
}
StopProgress();
mDecoderStateMachine->SetDormant(true);
}
void MediaOmxDecoder::ResumeStateMachine()
{
MOZ_ASSERT(NS_IsMainThread());
ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
DECODER_LOG(PR_LOG_DEBUG, ("%s current time %f", __PRETTY_FUNCTION__,
mCurrentTime));
if (!mDecoderStateMachine) {
return;
}
mFallbackToStateMachine = true;
mAudioOffloadPlayer = nullptr;
int64_t timeUsecs = 0;
SecondsToUsecs(mCurrentTime, timeUsecs);
mRequestedSeekTarget = SeekTarget(timeUsecs, SeekTarget::Accurate);
mNextState = mPlayState;
ChangeState(PLAY_STATE_LOADING);
mDecoderStateMachine->SetDormant(false);
}
void MediaOmxDecoder::AudioOffloadTearDown()
{
MOZ_ASSERT(NS_IsMainThread());
DECODER_LOG(PR_LOG_DEBUG, ("%s", __PRETTY_FUNCTION__));
// mAudioOffloadPlayer can be null here if ResumeStateMachine was called
// just before because of some other error.
if (mAudioOffloadPlayer) {
// Audio offload player sent tear down event. Fallback to state machine
PlaybackPositionChanged();
ResumeStateMachine();
}
}
void MediaOmxDecoder::AddOutputStream(ProcessedMediaStream* aStream,
bool aFinishWhenEnded)
{
MOZ_ASSERT(NS_IsMainThread());
if (mAudioOffloadPlayer) {
// Offload player cannot handle MediaStream. Fallback
PlaybackPositionChanged();
ResumeStateMachine();
}
MediaDecoder::AddOutputStream(aStream, aFinishWhenEnded);
}
void MediaOmxDecoder::SetPlaybackRate(double aPlaybackRate)
{
MOZ_ASSERT(NS_IsMainThread());
if (mAudioOffloadPlayer &&
((aPlaybackRate != 0.0) || (aPlaybackRate != 1.0))) {
// Offload player cannot handle playback rate other than 1/0. Fallback
PlaybackPositionChanged();
ResumeStateMachine();
}
MediaDecoder::SetPlaybackRate(aPlaybackRate);
}
void MediaOmxDecoder::ChangeState(PlayState aState)
{
MOZ_ASSERT(NS_IsMainThread());
// Keep MediaDecoder state in sync with MediaElement irrespective of offload
// playback so it will continue to work in normal mode when offloading fails
// in between
MediaDecoder::ChangeState(aState);
if (mAudioOffloadPlayer) {
status_t err = mAudioOffloadPlayer->ChangeState(aState);
if (err != OK) {
ResumeStateMachine();
}
}
}
void MediaOmxDecoder::ApplyStateToStateMachine(PlayState aState)
{
MOZ_ASSERT(NS_IsMainThread());
// During offload playback, state machine should be in dormant state.
// ApplyStateToStateMachine() can change state machine state to
// something else or reset the seek time. So don't call this when audio is
// offloaded
if (!mAudioOffloadPlayer) {
MediaDecoder::ApplyStateToStateMachine(aState);
}
}
void MediaOmxDecoder::PlaybackPositionChanged()
{
MOZ_ASSERT(NS_IsMainThread());
if (!mAudioOffloadPlayer) {
MediaDecoder::PlaybackPositionChanged();
return;
}
if (!mOwner || mShuttingDown) {
return;
}
double lastTime = mCurrentTime;
{
ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
mCurrentTime = mAudioOffloadPlayer->GetMediaTimeSecs();
}
if (mOwner && lastTime != mCurrentTime) {
FireTimeUpdate();
}
}
void MediaOmxDecoder::SetElementVisibility(bool aIsVisible)
{
MOZ_ASSERT(NS_IsMainThread());
if (mAudioOffloadPlayer) {
mAudioOffloadPlayer->SetElementVisibility(aIsVisible);
}
}
void MediaOmxDecoder::UpdateReadyStateForData()
{
MOZ_ASSERT(NS_IsMainThread());
if (!mAudioOffloadPlayer) {
MediaDecoder::UpdateReadyStateForData();
return;
}
if (!mOwner || mShuttingDown)
return;
mOwner->UpdateReadyStateForData(mAudioOffloadPlayer->GetNextFrameStatus());
}
void MediaOmxDecoder::SetVolume(double aVolume)
{
MOZ_ASSERT(NS_IsMainThread());
if (!mAudioOffloadPlayer) {
MediaDecoder::SetVolume(aVolume);
return;
}
mAudioOffloadPlayer->SetVolume(aVolume);
return new MediaDecoderStateMachine(this, aReader);
}
} // namespace mozilla

47
content/media/omx/MediaOmxDecoder.h Executable file → Normal file
View File

@ -6,55 +6,16 @@
#if !defined(MediaOmxDecoder_h_)
#define MediaOmxDecoder_h_
#include "base/basictypes.h"
#include "MediaDecoder.h"
#include "MediaOmxReader.h"
#include "AudioOffloadPlayerBase.h"
#include "MediaOmxCommonDecoder.h"
namespace mozilla {
class MediaOmxDecoder : public MediaDecoder
class MediaOmxDecoder : public MediaOmxCommonDecoder
{
typedef android::MediaSource MediaSource;
public:
MediaOmxDecoder();
virtual MediaDecoder* Clone();
virtual MediaDecoderStateMachine* CreateStateMachine();
virtual void MetadataLoaded(MediaInfo* aInfo,
MetadataTags* aTags);
virtual void ChangeState(PlayState aState);
virtual void ApplyStateToStateMachine(PlayState aState);
virtual void SetVolume(double aVolume);
virtual void PlaybackPositionChanged();
virtual void UpdateReadyStateForData();
virtual void SetElementVisibility(bool aIsVisible);
virtual void SetCanOffloadAudio(bool aCanOffloadAudio);
virtual void AddOutputStream(ProcessedMediaStream* aStream,
bool aFinishWhenEnded);
virtual void SetPlaybackRate(double aPlaybackRate);
void AudioOffloadTearDown();
int64_t GetSeekTime() { return mRequestedSeekTarget.mTime; }
void ResetSeekTime() { mRequestedSeekTarget.Reset(); }
private:
void PauseStateMachine();
void ResumeStateMachine();
MediaOmxReader* mReader;
// Offloaded audio track
android::sp<MediaSource> mAudioTrack;
nsAutoPtr<AudioOffloadPlayerBase> mAudioOffloadPlayer;
// Set by MediaOmxReader to denote current track can be offloaded
bool mCanOffloadAudio;
// Set when offload playback of current track fails in the middle and need to
// fallback to state machine
bool mFallbackToStateMachine;
virtual MediaOmxCommonReader* CreateReader();
virtual MediaDecoderStateMachine* CreateStateMachine(MediaOmxCommonReader* aReader);
};
} // namespace mozilla

View File

@ -19,12 +19,6 @@
#include "gfx2DGlue.h"
#include "MediaStreamSource.h"
#ifdef MOZ_AUDIO_OFFLOAD
#include <stagefright/Utils.h>
#include <cutils/properties.h>
#include <stagefright/MetaData.h>
#endif
#define MAX_DROPPED_FRAMES 25
// Try not to spend more than this much time in a single call to DecodeVideoFrame.
#define MAX_VIDEO_DECODE_SECONDS 0.1
@ -42,7 +36,7 @@ extern PRLogModuleInfo* gMediaDecoderLog;
#endif
MediaOmxReader::MediaOmxReader(AbstractMediaDecoder *aDecoder)
: MediaDecoderReader(aDecoder)
: MediaOmxCommonReader(aDecoder)
, mHasVideo(false)
, mHasAudio(false)
, mVideoSeekTimeUs(-1)
@ -425,41 +419,12 @@ void MediaOmxReader::EnsureActive() {
NS_ASSERTION(result == NS_OK, "OmxDecoder should be in play state to continue decoding");
}
#ifdef MOZ_AUDIO_OFFLOAD
void MediaOmxReader::CheckAudioOffload()
android::sp<android::MediaSource> MediaOmxReader::GetAudioOffloadTrack()
{
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
char offloadProp[128];
property_get("audio.offload.disable", offloadProp, "0");
bool offloadDisable = atoi(offloadProp) != 0;
if (offloadDisable) {
return;
}
mAudioOffloadTrack = mOmxDecoder->GetAudioOffloadTrack();
sp<MetaData> meta = (mAudioOffloadTrack.get()) ?
mAudioOffloadTrack->getFormat() : nullptr;
// Supporting audio offload only when there is no video, no streaming
bool hasNoVideo = !mOmxDecoder->HasVideo();
bool isNotStreaming
= mDecoder->GetResource()->IsDataCachedToEndOfResource(0);
// Not much benefit in trying to offload other channel types. Most of them
// aren't supported and also duration would be less than a minute
bool isTypeMusic = mAudioChannel == dom::AudioChannel::Content;
DECODER_LOG(PR_LOG_DEBUG, ("%s meta %p, no video %d, no streaming %d,"
" channel type %d", __FUNCTION__, meta.get(), hasNoVideo,
isNotStreaming, mAudioChannel));
if ((meta.get()) && hasNoVideo && isNotStreaming && isTypeMusic &&
canOffloadStream(meta, false, false, AUDIO_STREAM_MUSIC)) {
DECODER_LOG(PR_LOG_DEBUG, ("Can offload this audio stream"));
mDecoder->SetCanOffloadAudio(true);
if (!mOmxDecoder.get()) {
return nullptr;
}
return mOmxDecoder->GetAudioOffloadTrack();
}
#endif
} // namespace mozilla

View File

@ -6,10 +6,10 @@
#if !defined(MediaOmxReader_h_)
#define MediaOmxReader_h_
#include "MediaOmxCommonReader.h"
#include "MediaResource.h"
#include "MediaDecoderReader.h"
#include "nsRect.h"
#include "mozilla/dom/AudioChannelBinding.h"
#include <ui/GraphicBuffer.h>
#include <stagefright/MediaSource.h>
@ -26,7 +26,7 @@ namespace dom {
class AbstractMediaDecoder;
class MediaOmxReader : public MediaDecoderReader
class MediaOmxReader : public MediaOmxCommonReader
{
nsCString mType;
bool mHasVideo;
@ -36,8 +36,6 @@ class MediaOmxReader : public MediaDecoderReader
int64_t mVideoSeekTimeUs;
int64_t mAudioSeekTimeUs;
int32_t mSkipCount;
dom::AudioChannel mAudioChannel;
android::sp<android::MediaSource> mAudioOffloadTrack;
protected:
android::sp<android::OmxDecoder> mOmxDecoder;
@ -90,22 +88,9 @@ public:
virtual void Shutdown() MOZ_OVERRIDE;
void SetAudioChannel(dom::AudioChannel aAudioChannel) {
mAudioChannel = aAudioChannel;
}
android::sp<android::MediaSource> GetAudioOffloadTrack() {
return mAudioOffloadTrack;
}
#ifdef MOZ_AUDIO_OFFLOAD
// Check whether it is possible to offload current audio track. This access
// canOffloadStream() from libStageFright Utils.cpp, which is not there in
// ANDROID_VERSION < 19
void CheckAudioOffload();
#endif
void ReleaseDecoder();
android::sp<android::MediaSource> GetAudioOffloadTrack();
};
} // namespace mozilla

View File

@ -6,11 +6,15 @@
EXPORTS += [
'AudioOffloadPlayerBase.h',
'MediaOmxCommonDecoder.h',
'MediaOmxCommonReader.h',
'MediaOmxDecoder.h',
'MediaOmxReader.h',
]
SOURCES += [
'MediaOmxCommonDecoder.cpp',
'MediaOmxCommonReader.cpp',
'MediaOmxDecoder.cpp',
'MediaOmxReader.cpp',
'MediaStreamSource.cpp',

View File

@ -20,6 +20,7 @@
#include "nsIConsoleService.h"
#include "nsIScriptError.h"
#include "nsDocShellLoadTypes.h"
#include "nsIMultiPartChannel.h"
using namespace mozilla;
@ -28,8 +29,9 @@ using namespace mozilla;
//*****************************************************************************
nsDSURIContentListener::nsDSURIContentListener(nsDocShell* aDocShell)
: mDocShell(aDocShell),
mParentContentListener(nullptr)
: mDocShell(aDocShell)
, mExistingJPEGRequest(nullptr)
, mParentContentListener(nullptr)
{
}
@ -119,7 +121,32 @@ nsDSURIContentListener::DoContent(const char* aContentType,
mDocShell->SetLoadType(aIsContentPreferred ? LOAD_LINK : LOAD_NORMAL);
}
rv = mDocShell->CreateContentViewer(aContentType, request, aContentHandler);
// In case of multipart jpeg request (mjpeg) we don't really want to
// create new viewer since the one we already have is capable of
// rendering multipart jpeg correctly (see bug 625012)
nsCOMPtr<nsIChannel> baseChannel;
if (nsCOMPtr<nsIMultiPartChannel> mpchan = do_QueryInterface(request)) {
mpchan->GetBaseChannel(getter_AddRefs(baseChannel));
}
bool reuseCV = baseChannel
&& baseChannel == mExistingJPEGRequest
&& nsDependentCString(aContentType).EqualsLiteral("image/jpeg");
if (mExistingJPEGStreamListener && reuseCV) {
nsRefPtr<nsIStreamListener> copy(mExistingJPEGStreamListener);
copy.forget(aContentHandler);
rv = NS_OK;
} else {
rv = mDocShell->CreateContentViewer(aContentType, request, aContentHandler);
if (NS_SUCCEEDED(rv) && reuseCV) {
mExistingJPEGStreamListener = *aContentHandler;
} else {
mExistingJPEGStreamListener = nullptr;
}
mExistingJPEGRequest = baseChannel;
}
if (rv == NS_ERROR_REMOTE_XUL) {
request->Cancel(rv);

View File

@ -34,6 +34,8 @@ protected:
void DropDocShellreference() {
mDocShell = nullptr;
mExistingJPEGRequest = nullptr;
mExistingJPEGStreamListener = nullptr;
}
// Determine if X-Frame-Options allows content to be framed
@ -53,6 +55,9 @@ protected:
XFOHeader aHeader);
protected:
nsDocShell* mDocShell;
// Hack to handle multipart images without creating a new viewer
nsCOMPtr<nsIStreamListener> mExistingJPEGStreamListener;
nsCOMPtr<nsIChannel> mExistingJPEGRequest;
// Store the parent listener in either of these depending on
// if supports weak references or not. Proper weak refs are

View File

@ -58,7 +58,7 @@ GonkCameraHardware::OnNewFrame()
}
RefPtr<TextureClient> buffer = mNativeWindow->getCurrentBuffer();
if (!buffer) {
DOM_CAMERA_LOGW("received null frame");
DOM_CAMERA_LOGE("received null frame");
return;
}
OnNewPreviewFrame(mTarget, buffer);
@ -185,9 +185,13 @@ GonkCameraHardware::Init()
#if ANDROID_VERSION >= 19
mNativeWindow = new GonkNativeWindow(GonkCameraHardware::MIN_UNDEQUEUED_BUFFERS);
sp<GonkBufferQueue> bq = mNativeWindow->getBufferQueue();
bq->setSynchronousMode(false);
mCamera->setPreviewTarget(mNativeWindow->getBufferQueue());
#elif ANDROID_VERSION >= 17
mNativeWindow = new GonkNativeWindow(GonkCameraHardware::MIN_UNDEQUEUED_BUFFERS);
sp<GonkBufferQueue> bq = mNativeWindow->getBufferQueue();
bq->setSynchronousMode(false);
mCamera->setPreviewTexture(mNativeWindow->getBufferQueue());
#else
mNativeWindow = new GonkNativeWindow();

View File

@ -5,7 +5,7 @@
#include "nsISupports.idl"
[scriptable, uuid(0e56f04d-cda4-4a55-ab83-e5e29ddd370e)]
[scriptable, uuid(231df043-3a32-43c4-aaac-7ad2da81e84f)]
interface nsIPluginTag : nsISupports
{
// enabledState is stored as one of the following as an integer in prefs,
@ -25,6 +25,11 @@ interface nsIPluginTag : nsISupports
*/
readonly attribute boolean blocklisted;
/**
* true if the state is non-default and locked, false otherwise.
*/
readonly attribute boolean isEnabledStateLocked;
readonly attribute boolean disabled;
readonly attribute boolean clicktoplay;
attribute unsigned long enabledState;

View File

@ -16,6 +16,7 @@
#include "nsPluginLogging.h"
#include "nsNPAPIPlugin.h"
#include "mozilla/Preferences.h"
#include "mozilla/unused.h"
#include <cctype>
#include "mozilla/dom/EncodingUtils.h"
@ -340,6 +341,22 @@ nsPluginTag::GetBlocklisted(bool* aBlocklisted)
return NS_OK;
}
NS_IMETHODIMP
nsPluginTag::GetIsEnabledStateLocked(bool* aIsEnabledStateLocked)
{
*aIsEnabledStateLocked = false;
nsCOMPtr<nsIPrefBranch> prefs(do_GetService(NS_PREFSERVICE_CONTRACTID));
if (NS_WARN_IF(!prefs)) {
return NS_ERROR_FAILURE;
}
unused << prefs->PrefIsLocked(GetStatePrefNameForPlugin(this).get(),
aIsEnabledStateLocked);
return NS_OK;
}
bool
nsPluginTag::IsClicktoplay()
{

View File

@ -27,9 +27,6 @@ FAIL_ON_WARNINGS = True
FINAL_LIBRARY = 'xul'
RESOURCE_FILES += [
'res/caret_left.svg',
'res/caret_middle.svg',
'res/caret_right.svg',
'res/EditorOverride.css',
'res/grabber.gif',
'res/table-add-column-after-active.gif',
@ -50,4 +47,19 @@ RESOURCE_FILES += [
'res/table-remove-row-active.gif',
'res/table-remove-row-hover.gif',
'res/table-remove-row.gif',
'res/text_caret.png',
'res/text_caret@1.5x.png',
'res/text_caret@2.25x.png',
'res/text_caret@2x.png',
'res/text_caret_tilt_left.png',
'res/text_caret_tilt_left@1.5x.png',
'res/text_caret_tilt_left@2.25x.png',
'res/text_caret_tilt_left@2x.png',
'res/text_caret_tilt_right.png',
'res/text_caret_tilt_right@1.5x.png',
'res/text_caret_tilt_right@2.25x.png',
'res/text_caret_tilt_right@2x.png',
'res/text_selection_handle.png',
'res/text_selection_handle@1.5.png',
'res/text_selection_handle@2.png',
]

View File

@ -1,24 +0,0 @@
<?xml version="1.0" encoding="iso-8859-1"?>
<!-- Generator: Adobe Illustrator 16.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
width="29px" height="31px" viewBox="0 0 29 31" style="enable-background:new 0 0 29 31;" xml:space="preserve">
<!-- TODO: Enable shadow after bug 1015575 is resolved.
<defs>
<filter id="caretFilter">
<feOffset result="offsetOut" in="SourceAlpha" dx="1" dy="1" />
<feGaussianBlur result="blurOut" in="offsetOut" stdDeviation="0.5" />
<feBlend in="SourceGraphic" in2="blurOut" mode="normal" />
</filter>
</defs>
<g fill="#2da9e3" filter="url(#caretFilter)">
-->
<g fill="#2da9e3">
<path d="M25.368,2.674c-0.049,0.104-0.09,0.209-0.134,0.314C25.304,2.893,25.347,2.786,25.368,2.674z"/>
<path d="M24.27,1.734c0.003-0.001,0.008-0.003,0.013-0.004C24.277,1.73,24.272,1.733,24.27,1.734z"/>
<path d="M24.583,8.574C24.25,6.7,24.478,4.755,25.234,2.989c0.044-0.105,0.085-0.21,0.134-0.314
c0.053-0.254-0.016-0.528-0.204-0.73c-0.232-0.249-0.581-0.322-0.882-0.215c-0.005,0.001-0.01,0.003-0.013,0.004
c-1.915,0.71-4.001,0.798-5.954,0.277C15.015,0.898,11.222,1.587,8.5,4.134c-3.947,3.691-4.155,9.882-0.464,13.828
c3.691,3.947,9.881,4.154,13.828,0.462C24.64,15.828,25.562,11.994,24.583,8.574z"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 1.5 KiB

View File

@ -1,24 +0,0 @@
<?xml version="1.0" encoding="iso-8859-1"?>
<!-- Generator: Adobe Illustrator 16.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
width="29px" height="31px" style="enable-background:new 0 0 29 31;" xml:space="preserve">
<!-- TODO: Enable shadow after bug 1015575 is resolved.
<defs>
<filter id="caretFilter">
<feOffset result="offsetOut" in="SourceAlpha" dx="1" dy="1" />
<feGaussianBlur result="blurOut" in="offsetOut" stdDeviation="0.5" />
<feBlend in="SourceGraphic" in2="blurOut" mode="normal" />
</filter>
</defs>
<g fill="#2da9e3" filter="url(#caretFilter)">
-->
<g fill="#2da9e3">
<path d="M15.174,1.374c0.042,0.106,0.091,0.208,0.138,0.312C15.288,1.57,15.239,1.466,15.174,1.374z"/>
<path d="M13.735,1.534c0.002-0.003,0.004-0.009,0.006-0.013C13.739,1.525,13.737,1.531,13.735,1.534z"/>
<path d="M18.945,5.978c-1.596-1.038-2.861-2.532-3.634-4.292c-0.047-0.104-0.096-0.206-0.138-0.312
c-0.15-0.212-0.396-0.349-0.674-0.349c-0.34,0-0.631,0.204-0.759,0.497c-0.002,0.004-0.004,0.009-0.006,0.013
c-0.789,1.883-2.149,3.467-3.864,4.538c-3.068,1.651-5.155,4.892-5.155,8.62c0,5.404,4.379,9.784,9.783,9.784
c5.403,0,9.783-4.38,9.783-9.784C24.283,10.891,22.113,7.598,18.945,5.978z"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 1.5 KiB

View File

@ -1,24 +0,0 @@
<?xml version="1.0" encoding="iso-8859-1"?>
<!-- Generator: Adobe Illustrator 16.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
width="29px" height="31px" viewBox="0 0 29 31" style="enable-background:new 0 0 29 31;" xml:space="preserve">
<!-- TODO: Enable shadow after bug 1015575 is resolved.
<defs>
<filter id="caretFilter">
<feOffset result="offsetOut" in="SourceAlpha" dx="1" dy="1" />
<feGaussianBlur result="blurOut" in="offsetOut" stdDeviation="0.5" />
<feBlend in="SourceGraphic" in2="blurOut" mode="normal" />
</filter>
</defs>
<g fill="#2da9e3" filter="url(#caretFilter)">
-->
<g fill="#2da9e3">
<path fill="#2da9e3" d="M27.296,2.674c-0.049,0.104-0.09,0.209-0.134,0.314C27.231,2.893,27.274,2.786,27.296,2.674z"/>
<path fill="#2da9e3" d="M26.197,1.734C26.2,1.733,26.205,1.73,26.21,1.729C26.205,1.73,26.2,1.733,26.197,1.734z"/>
<path fill="#2da9e3" d="M4.299,8.574C4.632,6.7,4.404,4.755,3.647,2.989c-0.044-0.105-0.085-0.21-0.134-0.314C3.461,2.42,3.529,2.146,3.718,1.944
C3.95,1.696,4.299,1.623,4.6,1.729c0.005,0.001,0.01,0.003,0.013,0.004c1.915,0.71,4.001,0.798,5.954,0.277
c3.301-1.113,7.094-0.423,9.815,2.123c3.947,3.691,4.156,9.882,0.465,13.828c-3.691,3.947-9.881,4.154-13.828,0.462
C4.242,15.828,3.319,11.994,4.299,8.574z"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 1.6 KiB

Some files were not shown because too many files have changed in this diff Show More