Merge m-c to inbound on a CLOSED TREE.

This commit is contained in:
Ryan VanderMeulen 2014-04-10 22:16:50 -04:00
commit 7ba7f0e455
78 changed files with 2395 additions and 322 deletions

View File

@ -77,13 +77,15 @@ SimulatorScreen.prototype = {
classID: Components.ID('{c83c02c0-5d43-4e3e-987f-9173b313e880}'),
QueryInterface: XPCOMUtils.generateQI([Ci.nsIObserver,
Ci.nsISupportsWeakReference]),
_windows: new Set(),
_windows: new Map(),
observe: function (subject, topic, data) {
let windows = this._windows;
switch (topic) {
case 'profile-after-change':
Services.obs.addObserver(this, 'document-element-inserted', false);
Services.obs.addObserver(this, 'simulator-orientation-change', false);
Services.obs.addObserver(this, 'inner-window-destroyed', false);
break;
case 'document-element-inserted':
@ -94,16 +96,19 @@ SimulatorScreen.prototype = {
hookScreen(window);
let windows = this._windows;
window.addEventListener('unload', function unload() {
window.removeEventListener('unload', unload);
windows.delete(window);
});
windows.add(window);
var id = window.QueryInterface(Ci.nsIInterfaceRequestor)
.getInterface(Ci.nsIDOMWindowUtils)
.currentInnerWindowID;
windows.set(id, window);
break;
case 'inner-window-destroyed':
var id = subject.QueryInterface(Ci.nsISupportsPRUint64).data;
windows.delete(id);
break;
case 'simulator-orientation-change':
this._windows.forEach(fireOrientationEvent);
windows.forEach(fireOrientationEvent);
break;
}
}

View File

@ -19,7 +19,7 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="4bc61f9faa7fe76c911b4a7f3f89424cd38400bf"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="1368d716072adf308e1b435ac828f97545a045f1"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="266bca6e60dad43e395f38b66edabe8bdc882334"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="platform_hardware_ril" path="hardware/ril" remote="b2g" revision="55bcc2d7e44dc805c24b57d1e783fc26e8a2ee86"/>

View File

@ -17,7 +17,7 @@
</project>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
<project name="gaia" path="gaia" remote="mozillaorg" revision="4bc61f9faa7fe76c911b4a7f3f89424cd38400bf"/>
<project name="gaia" path="gaia" remote="mozillaorg" revision="1368d716072adf308e1b435ac828f97545a045f1"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="266bca6e60dad43e395f38b66edabe8bdc882334"/>
<project name="moztt" path="external/moztt" remote="b2g" revision="70b698c2e8d1764a1e27527a102df6452e405b9a"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="8d6c36d74ba9aefbc8c3618fc93dd4907a0dbf5e"/>

View File

@ -15,7 +15,7 @@
<project name="platform_build" path="build" remote="b2g" revision="52c909ccead537f8f9dbf634f3e6639078a8b0bd">
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="gaia" path="gaia" remote="mozillaorg" revision="4bc61f9faa7fe76c911b4a7f3f89424cd38400bf"/>
<project name="gaia" path="gaia" remote="mozillaorg" revision="1368d716072adf308e1b435ac828f97545a045f1"/>
<project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="266bca6e60dad43e395f38b66edabe8bdc882334"/>
<project name="moztt" path="external/moztt" remote="b2g" revision="70b698c2e8d1764a1e27527a102df6452e405b9a"/>

View File

@ -19,7 +19,7 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="4bc61f9faa7fe76c911b4a7f3f89424cd38400bf"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="1368d716072adf308e1b435ac828f97545a045f1"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="266bca6e60dad43e395f38b66edabe8bdc882334"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="platform_hardware_ril" path="hardware/ril" remote="b2g" revision="55bcc2d7e44dc805c24b57d1e783fc26e8a2ee86"/>

View File

@ -18,7 +18,7 @@
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="librecovery" path="librecovery" remote="b2g" revision="1f6a1fe07f81c5bc5e1d079c9b60f7f78ca2bf4f"/>
<project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
<project name="gaia" path="gaia" remote="mozillaorg" revision="4bc61f9faa7fe76c911b4a7f3f89424cd38400bf"/>
<project name="gaia" path="gaia" remote="mozillaorg" revision="1368d716072adf308e1b435ac828f97545a045f1"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="266bca6e60dad43e395f38b66edabe8bdc882334"/>
<project name="moztt" path="external/moztt" remote="b2g" revision="70b698c2e8d1764a1e27527a102df6452e405b9a"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="8d6c36d74ba9aefbc8c3618fc93dd4907a0dbf5e"/>

View File

@ -4,6 +4,6 @@
"remote": "",
"branch": ""
},
"revision": "343689e6afb7479b23dc33dd8c7232880c2288ed",
"revision": "3cd8d6565396a988e0b84044c79ddb48bab527c9",
"repo_path": "/integration/gaia-central"
}

View File

@ -17,7 +17,7 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="4bc61f9faa7fe76c911b4a7f3f89424cd38400bf"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="1368d716072adf308e1b435ac828f97545a045f1"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="266bca6e60dad43e395f38b66edabe8bdc882334"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="librecovery" path="librecovery" remote="b2g" revision="1f6a1fe07f81c5bc5e1d079c9b60f7f78ca2bf4f"/>
@ -100,7 +100,7 @@
<project name="quic/lf/b2g/external/jsmin" path="external/jsmin" revision="cec896f0affaa0226c02605ad28d42df1bc0e393"/>
<project name="device/qcom/common" path="device/qcom/common" revision="d13aaf080177b7c48f243d51827db5c7a7873cd0"/>
<project name="platform/vendor/qcom/msm7627a" path="device/qcom/msm7627a" revision="f06bcacc6f13cec895dc5d4c2385c076396194ec"/>
<project name="android-device-hamachi" path="device/qcom/hamachi" remote="b2g" revision="a9b1bbfb55c2b28e884a367f89855e31d86a328a"/>
<project name="android-device-hamachi" path="device/qcom/hamachi" remote="b2g" revision="7a67afcef2846ad7938059111e1d0bc4de9f0445"/>
<project name="kernel/msm" path="kernel" revision="a6578b9cacf9079f2dcf5bfe77c31b1be18809e3"/>
<project name="platform/hardware/qcom/camera" path="hardware/qcom/camera" revision="19933e5d182a4799c6217b19a18562193a419298"/>
<project name="platform/hardware/qcom/display" path="hardware/qcom/display" revision="5a58382180c70d0c446badc9c9837918ab69ec60"/>

View File

@ -15,7 +15,7 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="4bc61f9faa7fe76c911b4a7f3f89424cd38400bf"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="1368d716072adf308e1b435ac828f97545a045f1"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="266bca6e60dad43e395f38b66edabe8bdc882334"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="librecovery" path="librecovery" remote="b2g" revision="1f6a1fe07f81c5bc5e1d079c9b60f7f78ca2bf4f"/>

View File

@ -19,7 +19,7 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="4bc61f9faa7fe76c911b4a7f3f89424cd38400bf"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="1368d716072adf308e1b435ac828f97545a045f1"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="266bca6e60dad43e395f38b66edabe8bdc882334"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="librecovery" path="librecovery" remote="b2g" revision="1f6a1fe07f81c5bc5e1d079c9b60f7f78ca2bf4f"/>

View File

@ -17,7 +17,7 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="4bc61f9faa7fe76c911b4a7f3f89424cd38400bf"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="1368d716072adf308e1b435ac828f97545a045f1"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="266bca6e60dad43e395f38b66edabe8bdc882334"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="librecovery" path="librecovery" remote="b2g" revision="1f6a1fe07f81c5bc5e1d079c9b60f7f78ca2bf4f"/>

View File

@ -17,7 +17,7 @@
</project>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
<project name="gaia" path="gaia" remote="mozillaorg" revision="4bc61f9faa7fe76c911b4a7f3f89424cd38400bf"/>
<project name="gaia" path="gaia" remote="mozillaorg" revision="1368d716072adf308e1b435ac828f97545a045f1"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="266bca6e60dad43e395f38b66edabe8bdc882334"/>
<project name="moztt" path="external/moztt" remote="b2g" revision="70b698c2e8d1764a1e27527a102df6452e405b9a"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="8d6c36d74ba9aefbc8c3618fc93dd4907a0dbf5e"/>

View File

@ -17,7 +17,7 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="4bc61f9faa7fe76c911b4a7f3f89424cd38400bf"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="1368d716072adf308e1b435ac828f97545a045f1"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="266bca6e60dad43e395f38b66edabe8bdc882334"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="librecovery" path="librecovery" remote="b2g" revision="1f6a1fe07f81c5bc5e1d079c9b60f7f78ca2bf4f"/>

View File

@ -784,6 +784,10 @@ bin/components/@DLL_PREFIX@nkgnomevfs@DLL_SUFFIX@
@BINPATH@/components/HelperAppDialog.js
@BINPATH@/components/DownloadsUI.js
#ifndef MOZ_WIDGET_GONK
@BINPATH@/components/SimulatorScreen.js
#endif
#ifdef MOZ_SERVICES_FXACCOUNTS
@BINPATH@/components/FxAccountsUIGlue.js
@BINPATH@/components/services_fxaccounts.xpt

View File

@ -6,7 +6,7 @@
const { Cc, Ci, Cu } = require("chrome");
const { SimulatorProcess } = require("./simulator-process");
const { Promise } = Cu.import("resource://gre/modules/Promise.jsm", {});
const { Promise: promise } = Cu.import("resource://gre/modules/Promise.jsm", {});
const Self = require("sdk/self");
const System = require("sdk/system");
const { Simulator } = Cu.import("resource://gre/modules/devtools/Simulator.jsm");
@ -23,12 +23,12 @@ function launch({ port }) {
process.remoteDebuggerPort = port;
process.run();
return Promise.resolve();
return promise.resolve();
}
function close() {
if (!process) {
return Promise.resolve();
return promise.resolve();
}
let p = process;
process = null;

View File

@ -17,7 +17,7 @@ const Runtime = require("sdk/system/runtime");
const Self = require("sdk/self");
const URL = require("sdk/url");
const Subprocess = require("subprocess");
const { Promise } = Cu.import("resource://gre/modules/Promise.jsm", {});
const { Promise: promise } = Cu.import("resource://gre/modules/Promise.jsm", {});
const { rootURI: ROOT_URI } = require('@loader/options');
const PROFILE_URL = ROOT_URI + "profile/";
@ -112,7 +112,7 @@ exports.SimulatorProcess = Class({
// request a b2g instance kill
kill: function() {
let deferred = Promise.defer();
let deferred = promise.defer();
if (this.process) {
this.once("exit", (exitCode) => {
this.shuttingDown = false;
@ -125,7 +125,7 @@ exports.SimulatorProcess = Class({
}
return deferred.promise;
} else {
return Promise.resolve(undefined);
return promise.resolve(undefined);
}
},

View File

@ -234,7 +234,7 @@ CustomizeMode.prototype = {
this._showPanelCustomizationPlaceholders();
yield this._wrapToolbarItems();
yield this.populatePalette();
this.populatePalette();
this.visiblePalette.addEventListener("dragstart", this, true);
this.visiblePalette.addEventListener("dragover", this, true);
@ -647,7 +647,7 @@ CustomizeMode.prototype = {
let fragment = this.document.createDocumentFragment();
let toolboxPalette = this.window.gNavToolbox.palette;
return Task.spawn(function() {
try {
let unusedWidgets = CustomizableUI.getUnusedWidgets(toolboxPalette);
for (let widget of unusedWidgets) {
let paletteItem = this.makePaletteItem(widget, "palette");
@ -657,7 +657,9 @@ CustomizeMode.prototype = {
this.visiblePalette.appendChild(fragment);
this._stowedPalette = this.window.gNavToolbox.palette;
this.window.gNavToolbox.palette = this.visiblePalette;
}.bind(this)).then(null, ERROR);
} catch (ex) {
ERROR(ex);
}
},
//XXXunf Maybe this should use -moz-element instead of wrapping the node?
@ -985,7 +987,7 @@ CustomizeMode.prototype = {
CustomizableUI.reset();
yield this._wrapToolbarItems();
yield this.populatePalette();
this.populatePalette();
this.persistCurrentSets(true);
@ -1011,7 +1013,7 @@ CustomizeMode.prototype = {
CustomizableUI.undoReset();
yield this._wrapToolbarItems();
yield this.populatePalette();
this.populatePalette();
this.persistCurrentSets(true);

View File

@ -8,11 +8,12 @@
let test = asyncTest(function*() {
yield addTab("data:text/html;charset=utf-8,browser_ruleview_update.js");
let {toolbox, inspector, view} = yield openRuleView();
content.document.body.innerHTML = '<div id="testid" class="testclass">Styled Node</div>';
let testElement = getNode("#testid");
testElement.setAttribute("style", "margin-top: 1px; padding-top: 5px;");
let {toolbox, inspector, view} = yield openRuleView();
yield selectNode(testElement, inspector);
yield testPropertyChanges(inspector, view, testElement);

View File

@ -9,8 +9,10 @@
%define forwardTransitionLength 150ms
%define conditionalForwardWithUrlbar window:not([chromehidden~="toolbar"]) #urlbar-container
%define conditionalForwardWithUrlbarWidth 30
%define nativeTitlebarHeight 22px
%define spaceAboveTabbar 9px
%define toolbarButtonPressed :hover:active:not([disabled="true"]):not([cui-areatype="menu-panel"])
%define windowButtonMarginTop 11px
@namespace url("http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul");
@namespace html url("http://www.w3.org/1999/xhtml");
@ -42,6 +44,8 @@
background-color: #eeeeee;
}
/** Begin titlebar **/
#titlebar-buttonbox > .titlebar-button {
display: none;
}
@ -60,14 +64,44 @@
}
}
#main-window[chromehidden~="toolbar"]:not(:-moz-lwtheme) > #titlebar {
padding-top: 22px;
}
#main-window:not(:-moz-lwtheme):not([privatebrowsingmode=temporary]) > #titlebar {
-moz-appearance: -moz-window-titlebar;
}
#main-window:not([tabsintitlebar]) > #titlebar {
min-height: @nativeTitlebarHeight@;
}
/**
* When we hide #titlebar-content, we display the native window buttons in
* their default locations.
*/
#main-window:not([tabsintitlebar]) > #titlebar > #titlebar-content {
display: none;
}
/**
* For tabs in titlebar on OS X, we stretch the titlebar down so that the
* tabstrip can overlap it.
*/
#main-window[tabsintitlebar] > #titlebar {
min-height: calc(@tabMinHeight@ + @spaceAboveTabbar@ - @tabToolbarNavbarOverlap@);
}
/**
* We also vertically center the window buttons.
*/
#main-window[tabsintitlebar] > #titlebar > #titlebar-content > #titlebar-buttonbox-container,
#main-window[tabsintitlebar] > #titlebar > #titlebar-content > #titlebar-fullscreen-button {
margin-top: @windowButtonMarginTop@;
}
#main-window[tabsintitlebar][customize-entered] > #titlebar {
-moz-appearance: none;
}
/** End titlebar **/
#main-window[chromehidden~="toolbar"][chromehidden~="location"][chromehidden~="directories"] {
border-top: 1px solid rgba(0,0,0,0.65);
}
@ -2757,11 +2791,6 @@ toolbarbutton.chevron > .toolbarbutton-menu-dropmarker {
box-shadow: @focusRingShadow@;
}
#main-window:not(:-moz-lwtheme) > #titlebar {
padding-top: @spaceAboveTabbar@;
min-height: @tabHeight@;
}
/* We want the titlebar to be unified, but we still want to be able
* to give #TabsToolbar a background. So we can't set -moz-appearance:
* toolbar on #TabsToolbar itself. Instead, we set it on a box of the
@ -2774,8 +2803,8 @@ toolbarbutton.chevron > .toolbarbutton-menu-dropmarker {
content: '';
display: block;
-moz-appearance: toolbar;
height: @tabHeight@;
margin-bottom: -@tabHeight@;
height: @tabMinHeight@;
margin-bottom: -@tabMinHeight@;
visibility: hidden;
}
@ -3943,7 +3972,7 @@ toolbarbutton.chevron > .toolbarbutton-menu-dropmarker {
* not displayed in native full-screen.
* Also add the height of the tabs, since we're calculating the
* total height of this pseudo-element, not just the top-padding. */
height: calc(@tabHeight@ + @spaceAboveTabbar@) !important;
height: calc(@tabMinHeight@ + @spaceAboveTabbar@) !important;
}
#main-window[inFullscreen][privatebrowsingmode=temporary] {
/* Adjust by the full element height of #titlebar, since that element is
@ -4274,36 +4303,11 @@ window > chatbox {
%include ../shared/customizableui/customizeMode.inc.css
#main-window[customize-entered] > #titlebar {
padding-top: 0;
}
#main-window[tabsintitlebar]:not([customizing]):not(:-moz-lwtheme) > #titlebar > #titlebar-content,
#main-window[tabsintitlebar][customize-entering] > #titlebar > #titlebar-content,
#main-window[tabsintitlebar][customize-exiting] > #titlebar > #titlebar-content {
margin-top: 2px;
margin-bottom: 11px;
}
#main-window[tabsintitlebar][customize-entered] > #titlebar > #titlebar-content,
#main-window:not([tabsintitlebar]):not(:-moz-lwtheme) > #titlebar > #titlebar-content {
margin-top: 11px;
margin-bottom: 0px;
}
#main-window:not([tabsintitlebar]):-moz-lwtheme > #titlebar {
margin-bottom: 5px;
}
#main-window[tabsintitlebar]:-moz-lwtheme > #titlebar > #titlebar-content {
margin-top: 11px;
margin-bottom: 11px;
}
#main-window[customizing] {
background-color: rgb(178,178,178);
}
#main-window[tabsintitlebar][customize-entered] > #titlebar,
#main-window[customize-entered] > #tab-view-deck {
background-image: url("chrome://browser/skin/customizableui/customizeMode-gridTexture.png"),
url("chrome://browser/skin/customizableui/background-noise-toolbar.png"),
@ -4328,10 +4332,6 @@ window > chatbox {
border-bottom-width: 0;
}
#main-window[tabsintitlebar][customize-entered] #TabsToolbar {
margin-top: 8px;
}
#main-window[customize-entered] #TabsToolbar {
background-clip: padding-box;
border-right: 3px solid transparent;

View File

@ -12,3 +12,4 @@
%define inAnyPanel :-moz-any(:not([cui-areatype="toolbar"]), [overflowedItem=true])
%define tabToolbarNavbarOverlap 1px
%define tabMinHeight 31px

View File

@ -4,7 +4,6 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
%endif
%define tabHeight 31px
%define tabCurveWidth 30px
%define tabCurveHalfWidth 15px
@ -21,7 +20,7 @@
}
#tabbrowser-tabs {
min-height: @tabHeight@;
min-height: @tabMinHeight@;
}
.tabbrowser-tab,
@ -119,7 +118,7 @@
.tab-background-end,
.tab-background-end[selected=true]::after,
.tab-background-end[selected=true]::before {
min-height: @tabHeight@;
min-height: @tabMinHeight@;
width: @tabCurveWidth@;
}

View File

@ -581,7 +581,7 @@ MediaEngineWebRTCVideoSource::StartImpl(webrtc::CaptureCapability aCapability) {
config.mPreviewSize.width = aCapability.width;
config.mPreviewSize.height = aCapability.height;
mCameraControl->Start(&config);
mCameraControl->Set(CAMERA_PARAM_PICTURESIZE, config.mPreviewSize);
mCameraControl->Set(CAMERA_PARAM_PICTURE_SIZE, config.mPreviewSize);
hal::RegisterScreenConfigurationObserver(this);
}

View File

@ -185,11 +185,21 @@ DOMInterfaces = {
}
},
'CameraDetectedFace': {
'nativeType': 'mozilla::dom::DOMCameraDetectedFace',
'headerFile': 'DOMCameraDetectedFace.h'
},
'CameraManager': {
'nativeType': 'nsDOMCameraManager',
'headerFile': 'DOMCameraManager.h'
},
'CameraPoint': {
'nativeType': 'mozilla::dom::DOMCameraPoint',
'headerFile': 'DOMCameraDetectedFace.h'
},
'CanvasRenderingContext2D': {
'implicitJSContext': [
'createImageData', 'getImageData'

View File

@ -38,6 +38,17 @@ CameraControlImpl::CameraControlImpl(uint32_t aCameraId)
sCameraThread = do_GetWeakReference(mCameraThread);
}
// Care must be taken with the mListenerLock read-write lock to prevent
// deadlocks. Currently this is handled by ensuring that any attempts to
// acquire the lock for writing (as in Add/RemoveListener()) happen in a
// runnable dispatched to the Camera Thread--even if the method is being
// called from that thread. This ensures that if a registered listener
// (which is invoked with a read-lock) tries to call Add/RemoveListener(),
// the lock-for-writing attempt won't happen until the listener has
// completed.
//
// Multiple parallel listeners being invoked are not a problem because
// the read-write lock allows multiple simultaneous read-locks.
mListenerLock = PR_NewRWLock(PR_RWLOCK_RANK_NONE, "CameraControlImpl.Listeners.Lock");
}
@ -120,6 +131,31 @@ CameraControlImpl::OnAutoFocusComplete(bool aAutoFocusSucceeded)
}
}
void
CameraControlImpl::OnAutoFocusMoving(bool aIsMoving)
{
RwLockAutoEnterRead lock(mListenerLock);
for (uint32_t i = 0; i < mListeners.Length(); ++i) {
CameraControlListener* l = mListeners[i];
l->OnAutoFocusMoving(aIsMoving);
}
}
void
CameraControlImpl::OnFacesDetected(const nsTArray<Face>& aFaces)
{
// This callback can run on threads other than the Main Thread and
// the Camera Thread. On Gonk, it is called from the camera
// library's face detection thread.
RwLockAutoEnterRead lock(mListenerLock);
for (uint32_t i = 0; i < mListeners.Length(); ++i) {
CameraControlListener* l = mListeners[i];
l->OnFacesDetected(aFaces);
}
}
void
CameraControlImpl::OnTakePictureComplete(uint8_t* aData, uint32_t aLength, const nsAString& aMimeType)
{
@ -404,6 +440,50 @@ CameraControlImpl::AutoFocus(bool aCancelExistingCall)
new Message(this, CameraControlListener::kInAutoFocus, aCancelExistingCall), NS_DISPATCH_NORMAL);
}
nsresult
CameraControlImpl::StartFaceDetection()
{
class Message : public ControlMessage
{
public:
Message(CameraControlImpl* aCameraControl,
CameraControlListener::CameraErrorContext aContext)
: ControlMessage(aCameraControl, aContext)
{ }
nsresult
RunImpl() MOZ_OVERRIDE
{
return mCameraControl->StartFaceDetectionImpl();
}
};
return mCameraThread->Dispatch(
new Message(this, CameraControlListener::kInStartFaceDetection), NS_DISPATCH_NORMAL);
}
nsresult
CameraControlImpl::StopFaceDetection()
{
class Message : public ControlMessage
{
public:
Message(CameraControlImpl* aCameraControl,
CameraControlListener::CameraErrorContext aContext)
: ControlMessage(aCameraControl, aContext)
{ }
nsresult
RunImpl() MOZ_OVERRIDE
{
return mCameraControl->StopFaceDetectionImpl();
}
};
return mCameraThread->Dispatch(
new Message(this, CameraControlListener::kInStopFaceDetection), NS_DISPATCH_NORMAL);
}
nsresult
CameraControlImpl::TakePicture()
{

View File

@ -43,6 +43,8 @@ public:
virtual nsresult StartPreview() MOZ_OVERRIDE;
virtual nsresult StopPreview() MOZ_OVERRIDE;
virtual nsresult AutoFocus(bool aCancelExistingCall) MOZ_OVERRIDE;
virtual nsresult StartFaceDetection() MOZ_OVERRIDE;
virtual nsresult StopFaceDetection() MOZ_OVERRIDE;
virtual nsresult TakePicture() MOZ_OVERRIDE;
virtual nsresult StartRecording(DeviceStorageFileDescriptor* aFileDescriptor,
const StartRecordingOptions* aOptions) MOZ_OVERRIDE;
@ -53,14 +55,17 @@ public:
virtual void Shutdown() MOZ_OVERRIDE;
// Event handlers called directly from outside this class.
void OnShutter();
void OnClosed();
void OnError(CameraControlListener::CameraErrorContext aContext,
CameraControlListener::CameraError aError);
void OnAutoFocusMoving(bool aIsMoving);
protected:
// Event handlers.
void OnAutoFocusComplete(bool aAutoFocusSucceeded);
void OnFacesDetected(const nsTArray<Face>& aFaces);
void OnTakePictureComplete(uint8_t* aData, uint32_t aLength, const nsAString& aMimeType);
bool OnNewPreviewFrame(layers::Image* aImage, uint32_t aWidth, uint32_t aHeight);
@ -98,6 +103,8 @@ protected:
virtual nsresult StartPreviewImpl() = 0;
virtual nsresult StopPreviewImpl() = 0;
virtual nsresult AutoFocusImpl(bool aCancelExistingCall) = 0;
virtual nsresult StartFaceDetectionImpl() = 0;
virtual nsresult StopFaceDetectionImpl() = 0;
virtual nsresult TakePictureImpl() = 0;
virtual nsresult StartRecordingImpl(DeviceStorageFileDescriptor* aFileDescriptor,
const StartRecordingOptions* aOptions) = 0;

View File

@ -78,13 +78,17 @@ public:
virtual void OnConfigurationChange(const CameraListenerConfiguration& aConfiguration) { }
virtual void OnAutoFocusComplete(bool aAutoFocusSucceeded) { }
virtual void OnAutoFocusMoving(bool aIsMoving) { }
virtual void OnTakePictureComplete(uint8_t* aData, uint32_t aLength, const nsAString& aMimeType) { }
virtual void OnFacesDetected(const nsTArray<ICameraControl::Face>& aFaces) { }
enum CameraErrorContext
{
kInStartCamera,
kInStopCamera,
kInAutoFocus,
kInStartFaceDetection,
kInStopFaceDetection,
kInTakePicture,
kInStartRecording,
kInStopRecording,

View File

@ -9,6 +9,7 @@
#include "nsContentUtils.h"
#include "mozilla/dom/CameraManagerBinding.h"
#include "mozilla/dom/CameraCapabilitiesBinding.h"
#include "Navigator.h"
#include "CameraCommon.h"
#include "ICameraControl.h"
#include "CameraRecorderProfiles.h"
@ -42,6 +43,13 @@ NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(CameraCapabilities)
NS_INTERFACE_MAP_ENTRY(nsISupports)
NS_INTERFACE_MAP_END
/* static */
bool
CameraCapabilities::HasSupport(JSContext* aCx, JSObject* aGlobal)
{
return Navigator::HasCameraSupport(aCx, aGlobal);
}
CameraCapabilities::CameraCapabilities(nsPIDOMWindow* aWindow)
: mRecorderProfiles(JS::UndefinedValue())
, mWindow(aWindow)
@ -149,6 +157,11 @@ CameraCapabilities::Populate(ICameraControl* aCameraControl)
LOG_IF_ERROR(rv, CAMERA_PARAM_SUPPORTED_MAXMETERINGAREAS);
mMaxMeteringAreas = areas < 0 ? 0 : areas;
int32_t faces;
rv = aCameraControl->Get(CAMERA_PARAM_SUPPORTED_MAXDETECTEDFACES, faces);
LOG_IF_ERROR(rv, CAMERA_PARAM_SUPPORTED_MAXDETECTEDFACES);
mMaxDetectedFaces = faces < 0 ? 0 : faces;
rv = aCameraControl->Get(CAMERA_PARAM_SUPPORTED_MINEXPOSURECOMPENSATION, mMinExposureCompensation);
LOG_IF_ERROR(rv, CAMERA_PARAM_SUPPORTED_MINEXPOSURECOMPENSATION);
@ -257,6 +270,12 @@ CameraCapabilities::MaxMeteringAreas() const
return mMaxMeteringAreas;
}
uint32_t
CameraCapabilities::MaxDetectedFaces() const
{
return mMaxDetectedFaces;
}
double
CameraCapabilities::MinExposureCompensation() const
{

View File

@ -33,6 +33,13 @@ public:
NS_DECL_CYCLE_COLLECTING_ISUPPORTS
NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_CLASS(CameraCapabilities)
// Because this header's filename doesn't match its C++ or DOM-facing
// classname, we can't rely on the [Func="..."] WebIDL tag to implicitly
// include the right header for us; instead we must explicitly include a
// HasSupport() method in each header. We can get rid of these with the
// Great Renaming proposed in bug 983177.
static bool HasSupport(JSContext* aCx, JSObject* aGlobal);
CameraCapabilities(nsPIDOMWindow* aWindow);
~CameraCapabilities();
@ -55,6 +62,7 @@ public:
void GetZoomRatios(nsTArray<double>& aRetVal) const;
uint32_t MaxFocusAreas() const;
uint32_t MaxMeteringAreas() const;
uint32_t MaxDetectedFaces() const;
double MinExposureCompensation() const;
double MaxExposureCompensation() const;
double ExposureCompensationStep() const;
@ -82,6 +90,7 @@ protected:
uint32_t mMaxFocusAreas;
uint32_t mMaxMeteringAreas;
uint32_t mMaxDetectedFaces;
double mMinExposureCompensation;
double mMaxExposureCompensation;

View File

@ -20,6 +20,7 @@
#include "nsIDOMDeviceStorage.h"
#include "nsIDOMEventListener.h"
#include "nsIScriptSecurityManager.h"
#include "Navigator.h"
#include "nsXULAppAPI.h"
#include "DOMCameraManager.h"
#include "DOMCameraCapabilities.h"
@ -29,6 +30,7 @@
#include "mozilla/dom/CameraControlBinding.h"
#include "mozilla/dom/CameraManagerBinding.h"
#include "mozilla/dom/CameraCapabilitiesBinding.h"
#include "DOMCameraDetectedFace.h"
#include "mozilla/dom/BindingUtils.h"
using namespace mozilla;
@ -43,7 +45,7 @@ NS_INTERFACE_MAP_END_INHERITING(DOMMediaStream)
NS_IMPL_ADDREF_INHERITED(nsDOMCameraControl, DOMMediaStream)
NS_IMPL_RELEASE_INHERITED(nsDOMCameraControl, DOMMediaStream)
NS_IMPL_CYCLE_COLLECTION_INHERITED_18(nsDOMCameraControl, DOMMediaStream,
NS_IMPL_CYCLE_COLLECTION_INHERITED_20(nsDOMCameraControl, DOMMediaStream,
mCapabilities,
mWindow,
mGetCameraOnSuccessCb,
@ -61,7 +63,16 @@ NS_IMPL_CYCLE_COLLECTION_INHERITED_18(nsDOMCameraControl, DOMMediaStream,
mOnShutterCb,
mOnClosedCb,
mOnRecorderStateChangeCb,
mOnPreviewStateChangeCb)
mOnPreviewStateChangeCb,
mOnAutoFocusMovingCb,
mOnFacesDetectedCb)
/* static */
bool
nsDOMCameraControl::HasSupport(JSContext* aCx, JSObject* aGlobal)
{
return Navigator::HasCameraSupport(aCx, aGlobal);
}
class mozilla::StartRecordingHelper : public nsIDOMEventListener
{
@ -142,6 +153,8 @@ nsDOMCameraControl::nsDOMCameraControl(uint32_t aCameraId,
, mOnClosedCb(nullptr)
, mOnRecorderStateChangeCb(nullptr)
, mOnPreviewStateChangeCb(nullptr)
, mOnAutoFocusMovingCb(nullptr)
, mOnFacesDetectedCb(nullptr)
, mWindow(aWindow)
{
DOM_CAMERA_LOGT("%s:%d : this=%p\n", __func__, __LINE__, this);
@ -487,7 +500,7 @@ nsDOMCameraControl::GetPictureSize(JSContext* cx, ErrorResult& aRv)
JS::Rooted<JS::Value> value(cx);
ICameraControl::Size size;
aRv = mCameraControl->Get(CAMERA_PARAM_PICTURESIZE, size);
aRv = mCameraControl->Get(CAMERA_PARAM_PICTURE_SIZE, size);
if (aRv.Failed()) {
return value;
}
@ -505,7 +518,7 @@ nsDOMCameraControl::SetPictureSize(JSContext* aCx, JS::Handle<JS::Value> aSize,
}
ICameraControl::Size s = { size.mWidth, size.mHeight };
aRv = mCameraControl->Set(CAMERA_PARAM_PICTURESIZE, s);
aRv = mCameraControl->Set(CAMERA_PARAM_PICTURE_SIZE, s);
}
/* attribute any thumbnailSize */
@ -610,52 +623,45 @@ nsDOMCameraControl::SensorAngle()
return angle;
}
already_AddRefed<CameraShutterCallback>
// Callback attributes
CameraShutterCallback*
nsDOMCameraControl::GetOnShutter()
{
nsRefPtr<CameraShutterCallback> cb = mOnShutterCb;
return cb.forget();
return mOnShutterCb;
}
void
nsDOMCameraControl::SetOnShutter(CameraShutterCallback* aCb)
{
mOnShutterCb = aCb;
}
/* attribute CameraClosedCallback onClosed; */
already_AddRefed<CameraClosedCallback>
CameraClosedCallback*
nsDOMCameraControl::GetOnClosed()
{
nsRefPtr<CameraClosedCallback> onClosed = mOnClosedCb;
return onClosed.forget();
return mOnClosedCb;
}
void
nsDOMCameraControl::SetOnClosed(CameraClosedCallback* aCb)
{
mOnClosedCb = aCb;
}
already_AddRefed<CameraRecorderStateChange>
CameraRecorderStateChange*
nsDOMCameraControl::GetOnRecorderStateChange()
{
nsRefPtr<CameraRecorderStateChange> cb = mOnRecorderStateChangeCb;
return cb.forget();
return mOnRecorderStateChangeCb;
}
void
nsDOMCameraControl::SetOnRecorderStateChange(CameraRecorderStateChange* aCb)
{
mOnRecorderStateChangeCb = aCb;
}
/* attribute CameraPreviewStateChange onPreviewStateChange; */
already_AddRefed<CameraPreviewStateChange>
CameraPreviewStateChange*
nsDOMCameraControl::GetOnPreviewStateChange()
{
nsRefPtr<CameraPreviewStateChange> cb = mOnPreviewStateChangeCb;
return cb.forget();
return mOnPreviewStateChangeCb;
}
void
nsDOMCameraControl::SetOnPreviewStateChange(CameraPreviewStateChange* aCb)
@ -663,6 +669,28 @@ nsDOMCameraControl::SetOnPreviewStateChange(CameraPreviewStateChange* aCb)
mOnPreviewStateChangeCb = aCb;
}
CameraAutoFocusMovingCallback*
nsDOMCameraControl::GetOnAutoFocusMoving()
{
return mOnAutoFocusMovingCb;
}
void
nsDOMCameraControl::SetOnAutoFocusMoving(CameraAutoFocusMovingCallback* aCb)
{
mOnAutoFocusMovingCb = aCb;
}
CameraFaceDetectionCallback*
nsDOMCameraControl::GetOnFacesDetected()
{
return mOnFacesDetectedCb;
}
void
nsDOMCameraControl::SetOnFacesDetected(CameraFaceDetectionCallback* aCb)
{
mOnFacesDetectedCb = aCb;
}
already_AddRefed<dom::CameraCapabilities>
nsDOMCameraControl::Capabilities()
{
@ -736,6 +764,7 @@ nsDOMCameraControl::OnCreatedFileDescriptor(bool aSucceeded)
o.rotation = mOptions.mRotation;
o.maxFileSizeBytes = mOptions.mMaxFileSizeBytes;
o.maxVideoLengthMs = mOptions.mMaxVideoLengthMs;
o.autoEnableLowLightTorch = mOptions.mAutoEnableLowLightTorch;
nsresult rv = mCameraControl->StartRecording(mDSFileDescriptor.get(), &o);
if (NS_SUCCEEDED(rv)) {
return;
@ -844,6 +873,20 @@ nsDOMCameraControl::AutoFocus(CameraAutoFocusCallback& aOnSuccess,
aRv = mCameraControl->AutoFocus(cancel);
}
void
nsDOMCameraControl::StartFaceDetection(ErrorResult& aRv)
{
MOZ_ASSERT(mCameraControl);
aRv = mCameraControl->StartFaceDetection();
}
void
nsDOMCameraControl::StopFaceDetection(ErrorResult& aRv)
{
MOZ_ASSERT(mCameraControl);
aRv = mCameraControl->StopFaceDetection();
}
void
nsDOMCameraControl::TakePicture(const CameraPictureOptions& aOptions,
CameraTakePictureCallback& aOnSuccess,
@ -942,6 +985,8 @@ nsDOMCameraControl::Shutdown()
mOnClosedCb = nullptr;
mOnRecorderStateChangeCb = nullptr;
mOnPreviewStateChangeCb = nullptr;
mOnAutoFocusMovingCb = nullptr;
mOnFacesDetectedCb = nullptr;
mCameraControl->Shutdown();
}
@ -1138,6 +1183,44 @@ nsDOMCameraControl::OnAutoFocusComplete(bool aAutoFocusSucceeded)
}
}
void
nsDOMCameraControl::OnAutoFocusMoving(bool aIsMoving)
{
MOZ_ASSERT(NS_IsMainThread());
nsRefPtr<CameraAutoFocusMovingCallback> cb = mOnAutoFocusMovingCb;
if (cb) {
ErrorResult ignored;
cb->Call(aIsMoving, ignored);
}
}
void
nsDOMCameraControl::OnFacesDetected(const nsTArray<ICameraControl::Face>& aFaces)
{
DOM_CAMERA_LOGI("DOM OnFacesDetected %u face(s)\n", aFaces.Length());
MOZ_ASSERT(NS_IsMainThread());
nsRefPtr<CameraFaceDetectionCallback> cb = mOnFacesDetectedCb;
if (!cb) {
return;
}
Sequence<OwningNonNull<DOMCameraDetectedFace> > faces;
uint32_t len = aFaces.Length();
if (faces.SetCapacity(len)) {
nsRefPtr<DOMCameraDetectedFace> f;
for (uint32_t i = 0; i < len; ++i) {
f = new DOMCameraDetectedFace(this, aFaces[i]);
*faces.AppendElement() = f.forget().take();
}
}
ErrorResult ignored;
cb->Call(faces, ignored);
}
void
nsDOMCameraControl::OnTakePictureComplete(nsIDOMBlob* aPicture)
{

View File

@ -40,6 +40,13 @@ public:
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(nsDOMCameraControl, DOMMediaStream)
NS_DECL_ISUPPORTS_INHERITED
// Because this header's filename doesn't match its C++ or DOM-facing
// classname, we can't rely on the [Func="..."] WebIDL tag to implicitly
// include the right header for us; instead we must explicitly include a
// HasSupport() method in each header. We can get rid of these with the
// Great Renaming proposed in bug 983177.
static bool HasSupport(JSContext* aCx, JSObject* aGlobal);
nsDOMCameraControl(uint32_t aCameraId,
const dom::CameraConfiguration& aInitialConfig,
dom::GetCameraCallback* aOnSuccess,
@ -83,14 +90,18 @@ public:
void SetIsoMode(const nsAString& aMode, ErrorResult& aRv);
// Unsolicited event handlers.
already_AddRefed<dom::CameraShutterCallback> GetOnShutter();
dom::CameraShutterCallback* GetOnShutter();
void SetOnShutter(dom::CameraShutterCallback* aCb);
already_AddRefed<dom::CameraClosedCallback> GetOnClosed();
dom::CameraClosedCallback* GetOnClosed();
void SetOnClosed(dom::CameraClosedCallback* aCb);
already_AddRefed<dom::CameraRecorderStateChange> GetOnRecorderStateChange();
dom::CameraRecorderStateChange* GetOnRecorderStateChange();
void SetOnRecorderStateChange(dom::CameraRecorderStateChange* aCb);
already_AddRefed<dom::CameraPreviewStateChange> GetOnPreviewStateChange();
dom::CameraPreviewStateChange* GetOnPreviewStateChange();
void SetOnPreviewStateChange(dom::CameraPreviewStateChange* aCb);
dom::CameraAutoFocusMovingCallback* GetOnAutoFocusMoving();
void SetOnAutoFocusMoving(dom::CameraAutoFocusMovingCallback* aCb);
dom::CameraFaceDetectionCallback* GetOnFacesDetected();
void SetOnFacesDetected(dom::CameraFaceDetectionCallback* aCb);
// Methods.
void SetConfiguration(const dom::CameraConfiguration& aConfiguration,
@ -100,6 +111,8 @@ public:
void AutoFocus(dom::CameraAutoFocusCallback& aOnSuccess,
const dom::Optional<dom::OwningNonNull<dom::CameraErrorCallback> >& aOnError,
ErrorResult& aRv);
void StartFaceDetection(ErrorResult& aRv);
void StopFaceDetection(ErrorResult& aRv);
void TakePicture(const dom::CameraPictureOptions& aOptions,
dom::CameraTakePictureCallback& aOnSuccess,
const dom::Optional<dom::OwningNonNull<dom::CameraErrorCallback> >& aOnError,
@ -144,7 +157,9 @@ protected:
void OnCreatedFileDescriptor(bool aSucceeded);
void OnAutoFocusComplete(bool aAutoFocusSucceeded);
void OnAutoFocusMoving(bool aIsMoving);
void OnTakePictureComplete(nsIDOMBlob* aPicture);
void OnFacesDetected(const nsTArray<ICameraControl::Face>& aFaces);
void OnHardwareStateChange(DOMCameraControlListener::HardwareState aState);
void OnPreviewStateChange(DOMCameraControlListener::PreviewState aState);
@ -187,6 +202,8 @@ protected:
nsRefPtr<dom::CameraClosedCallback> mOnClosedCb;
nsRefPtr<dom::CameraRecorderStateChange> mOnRecorderStateChangeCb;
nsRefPtr<dom::CameraPreviewStateChange> mOnPreviewStateChangeCb;
nsRefPtr<dom::CameraAutoFocusMovingCallback> mOnAutoFocusMovingCb;
nsRefPtr<dom::CameraFaceDetectionCallback> mOnFacesDetectedCb;
// Camera event listener; we only need this weak reference so that
// we can remove the listener from the camera when we're done

View File

@ -218,6 +218,55 @@ DOMCameraControlListener::OnConfigurationChange(const CameraListenerConfiguratio
NS_DispatchToMainThread(new Callback(mDOMCameraControl, aConfiguration));
}
void
DOMCameraControlListener::OnAutoFocusMoving(bool aIsMoving)
{
class Callback : public DOMCallback
{
public:
Callback(nsMainThreadPtrHandle<nsDOMCameraControl> aDOMCameraControl, bool aIsMoving)
: DOMCallback(aDOMCameraControl)
, mIsMoving(aIsMoving)
{ }
void
RunCallback(nsDOMCameraControl* aDOMCameraControl) MOZ_OVERRIDE
{
aDOMCameraControl->OnAutoFocusMoving(mIsMoving);
}
protected:
bool mIsMoving;
};
NS_DispatchToMainThread(new Callback(mDOMCameraControl, aIsMoving));
}
void
DOMCameraControlListener::OnFacesDetected(const nsTArray<ICameraControl::Face>& aFaces)
{
class Callback : public DOMCallback
{
public:
Callback(nsMainThreadPtrHandle<nsDOMCameraControl> aDOMCameraControl,
const nsTArray<ICameraControl::Face>& aFaces)
: DOMCallback(aDOMCameraControl)
, mFaces(aFaces)
{ }
void
RunCallback(nsDOMCameraControl* aDOMCameraControl) MOZ_OVERRIDE
{
aDOMCameraControl->OnFacesDetected(mFaces);
}
protected:
const nsTArray<ICameraControl::Face> mFaces;
};
NS_DispatchToMainThread(new Callback(mDOMCameraControl, aFaces));
}
void
DOMCameraControlListener::OnShutter()
{

View File

@ -19,6 +19,8 @@ public:
DOMCameraControlListener(nsDOMCameraControl* aDOMCameraControl, CameraPreviewMediaStream* aStream);
virtual void OnAutoFocusComplete(bool aAutoFocusSucceeded) MOZ_OVERRIDE;
virtual void OnAutoFocusMoving(bool aIsMoving) MOZ_OVERRIDE;
virtual void OnFacesDetected(const nsTArray<ICameraControl::Face>& aFaces) MOZ_OVERRIDE;
virtual void OnTakePictureComplete(uint8_t* aData, uint32_t aLength, const nsAString& aMimeType) MOZ_OVERRIDE;
virtual void OnHardwareStateChange(HardwareState aState) MOZ_OVERRIDE;

View File

@ -0,0 +1,79 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "DOMCameraDetectedFace.h"
#include "Navigator.h"
using namespace mozilla;
using namespace mozilla::dom;
NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE_1(DOMCameraPoint, mParent)
NS_IMPL_CYCLE_COLLECTING_ADDREF(DOMCameraPoint)
NS_IMPL_CYCLE_COLLECTING_RELEASE(DOMCameraPoint)
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(DOMCameraPoint)
NS_WRAPPERCACHE_INTERFACE_MAP_ENTRY
NS_INTERFACE_MAP_ENTRY(nsISupports)
NS_INTERFACE_MAP_END
NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE_5(DOMCameraDetectedFace, mParent,
mBounds, mLeftEye, mRightEye, mMouth)
NS_IMPL_CYCLE_COLLECTING_ADDREF(DOMCameraDetectedFace)
NS_IMPL_CYCLE_COLLECTING_RELEASE(DOMCameraDetectedFace)
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(DOMCameraDetectedFace)
NS_WRAPPERCACHE_INTERFACE_MAP_ENTRY
NS_INTERFACE_MAP_ENTRY(nsISupports)
NS_INTERFACE_MAP_END
/* static */
bool
DOMCameraPoint::HasSupport(JSContext* aCx, JSObject* aGlobal)
{
return Navigator::HasCameraSupport(aCx, aGlobal);
}
/* static */
bool
DOMCameraDetectedFace::HasSupport(JSContext* aCx, JSObject* aGlobal)
{
return Navigator::HasCameraSupport(aCx, aGlobal);
}
JSObject*
DOMCameraPoint::WrapObject(JSContext* aCx)
{
return CameraPointBinding::Wrap(aCx, this);
}
JSObject*
DOMCameraDetectedFace::WrapObject(JSContext* aCx)
{
return CameraDetectedFaceBinding::Wrap(aCx, this);
}
DOMCameraDetectedFace::DOMCameraDetectedFace(nsISupports* aParent,
const ICameraControl::Face& aFace)
: mParent(aParent)
, mId(aFace.id)
, mScore(aFace.score)
, mBounds(new DOMRect(MOZ_THIS_IN_INITIALIZER_LIST()))
{
mBounds->SetRect(aFace.bound.left,
aFace.bound.top,
aFace.bound.right - aFace.bound.left,
aFace.bound.bottom - aFace.bound.top);
if (aFace.hasLeftEye) {
mLeftEye = new DOMCameraPoint(this, aFace.leftEye);
}
if (aFace.hasRightEye) {
mRightEye = new DOMCameraPoint(this, aFace.rightEye);
}
if (aFace.hasMouth) {
mMouth = new DOMCameraPoint(this, aFace.mouth);
}
SetIsDOMBinding();
}

View File

@ -0,0 +1,126 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef DOM_CAMERA_DOMCAMERADETECTEDFACE_H
#define DOM_CAMERA_DOMCAMERADETECTEDFACE_H
#include "mozilla/dom/CameraControlBinding.h"
#include "nsCycleCollectionParticipant.h"
#include "nsWrapperCache.h"
#include "mozilla/dom/DOMRect.h"
#include "ICameraControl.h"
namespace mozilla {
namespace dom {
class DOMCameraPoint MOZ_FINAL : public nsISupports
, public nsWrapperCache
{
public:
NS_DECL_CYCLE_COLLECTING_ISUPPORTS
NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_CLASS(DOMCameraPoint)
// Because this header's filename doesn't match its C++ or DOM-facing
// classname, we can't rely on the [Func="..."] WebIDL tag to implicitly
// include the right header for us; instead we must explicitly include a
// HasSupport() method in each header. We can get rid of these with the
// Great Renaming proposed in bug 983177.
static bool HasSupport(JSContext* aCx, JSObject* aGlobal);
DOMCameraPoint(nsISupports* aParent, const ICameraControl::Point& aPoint)
: mParent(aParent)
, mX(aPoint.x)
, mY(aPoint.y)
{
SetIsDOMBinding();
}
void
SetPoint(int32_t aX, int32_t aY)
{
mX = aX;
mY = aY;
}
int32_t X() { return mX; }
int32_t Y() { return mY; }
void SetX(int32_t aX) { mX = aX; }
void SetY(int32_t aY) { mY = aY; }
nsISupports*
GetParentObject() const
{
MOZ_ASSERT(mParent);
return mParent;
}
virtual JSObject* WrapObject(JSContext* aCx) MOZ_OVERRIDE;
protected:
virtual ~DOMCameraPoint() { }
nsCOMPtr<nsISupports> mParent;
int32_t mX;
int32_t mY;
};
class DOMCameraDetectedFace MOZ_FINAL : public nsISupports
, public nsWrapperCache
{
public:
NS_DECL_CYCLE_COLLECTING_ISUPPORTS
NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_CLASS(DOMCameraDetectedFace)
// Because this header's filename doesn't match its C++ or DOM-facing
// classname, we can't rely on the [Func="..."] WebIDL tag to implicitly
// include the right header for us; instead we must explicitly include a
// HasSupport() method in each header. We can get rid of these with the
// Great Renaming proposed in bug 983177.
static bool HasSupport(JSContext* aCx, JSObject* aGlobal);
DOMCameraDetectedFace(nsISupports* aParent, const ICameraControl::Face& aFace);
uint32_t Id() { return mId; }
uint32_t Score() { return mScore; }
bool HasLeftEye() { return mLeftEye; }
bool HasRightEye() { return mRightEye; }
bool HasMouth() { return mMouth; }
dom::DOMRect* Bounds() { return mBounds; }
DOMCameraPoint* GetLeftEye() { return mLeftEye; }
DOMCameraPoint* GetRightEye() { return mRightEye; }
DOMCameraPoint* GetMouth() { return mMouth; }
nsISupports*
GetParentObject() const
{
MOZ_ASSERT(mParent);
return mParent;
}
virtual JSObject* WrapObject(JSContext* aCx) MOZ_OVERRIDE;
protected:
virtual ~DOMCameraDetectedFace() { }
nsCOMPtr<nsISupports> mParent;
uint32_t mId;
uint32_t mScore;
nsRefPtr<dom::DOMRect> mBounds;
nsRefPtr<DOMCameraPoint> mLeftEye;
nsRefPtr<DOMCameraPoint> mRightEye;
nsRefPtr<DOMCameraPoint> mMouth;
};
} // namespace dom
} // namespace mozilla
#endif // DOM_CAMERA_DOMCAMERADETECTEDFACE_H

View File

@ -5,6 +5,7 @@
#include "DOMCameraManager.h"
#include "nsDebug.h"
#include "jsapi.h"
#include "Navigator.h"
#include "nsPIDOMWindow.h"
#include "mozilla/Services.h"
#include "nsContentPermissionHelper.h"
@ -70,12 +71,21 @@ nsDOMCameraManager::~nsDOMCameraManager()
DOM_CAMERA_LOGT("%s:%d : this=%p\n", __func__, __LINE__, this);
}
/* static */
void
nsDOMCameraManager::GetListOfCameras(nsTArray<nsString>& aList, ErrorResult& aRv)
{
aRv = ICameraControl::GetListOfCameras(aList);
}
/* static */
bool
nsDOMCameraManager::HasSupport(JSContext* aCx, JSObject* aGlobal)
{
return Navigator::HasCameraSupport(aCx, aGlobal);
}
/* static */
bool
nsDOMCameraManager::CheckPermission(nsPIDOMWindow* aWindow)
{
@ -93,7 +103,7 @@ nsDOMCameraManager::CheckPermission(nsPIDOMWindow* aWindow)
return true;
}
// static creator
/* static */
already_AddRefed<nsDOMCameraManager>
nsDOMCameraManager::CreateInstance(nsPIDOMWindow* aWindow)
{

View File

@ -46,6 +46,13 @@ public:
nsIObserver)
NS_DECL_NSIOBSERVER
// Because this header's filename doesn't match its C++ or DOM-facing
// classname, we can't rely on the [Func="..."] WebIDL tag to implicitly
// include the right header for us; instead we must explicitly include a
// HasSupport() method in each header. We can get rid of these with the
// Great Renaming proposed in bug 983177.
static bool HasSupport(JSContext* aCx, JSObject* aGlobal);
static bool CheckPermission(nsPIDOMWindow* aWindow);
static already_AddRefed<nsDOMCameraManager>
CreateInstance(nsPIDOMWindow* aWindow);

View File

@ -110,6 +110,13 @@ DOMCameraCapabilities::GetMaxMeteringAreas(JSContext* cx, int32_t* aMaxMeteringA
return NS_ERROR_NOT_IMPLEMENTED;
}
/* [implicit_jscontext] readonly attribute long maxDetectedFaces; */
NS_IMETHODIMP
DOMCameraCapabilities::GetMaxDetectedFaces(JSContext* cx, int32_t* aMaxDetectedFaces)
{
return NS_ERROR_NOT_IMPLEMENTED;
}
/* [implicit_jscontext] readonly attribute jsval zoomRatios; */
NS_IMETHODIMP
DOMCameraCapabilities::GetZoomRatios(JSContext* cx, JS::Value* aZoomRatios)

View File

@ -25,6 +25,7 @@ public:
FallbackCameraControl(uint32_t aCameraId) : CameraControlImpl(aCameraId) { }
void OnAutoFocusComplete(bool aSuccess);
void OnAutoFocusMoving(bool aIsMoving) { }
void OnTakePictureComplete(uint8_t* aData, uint32_t aLength) { }
void OnTakePictureError() { }
void OnNewPreviewFrame(layers::GraphicBufferLocked* aBuffer) { }
@ -60,6 +61,8 @@ protected:
virtual nsresult StartPreviewImpl() { return NS_ERROR_FAILURE; }
virtual nsresult StopPreviewImpl() { return NS_ERROR_FAILURE; }
virtual nsresult AutoFocusImpl(bool aCancelExistingCall) { return NS_ERROR_FAILURE; }
virtual nsresult StartFaceDetectionImpl() { return NS_ERROR_FAILURE; }
virtual nsresult StopFaceDetectionImpl() { return NS_ERROR_FAILURE; }
virtual nsresult TakePictureImpl() { return NS_ERROR_FAILURE; }
virtual nsresult StartRecordingImpl(DeviceStorageFileDescriptor* aFileDescriptor,
const StartRecordingOptions* aOptions = nullptr)

View File

@ -65,6 +65,9 @@ nsGonkCameraControl::nsGonkCameraControl(uint32_t aCameraId)
, mLastThumbnailSize({0, 0})
, mPreviewFps(30)
, mResumePreviewAfterTakingPicture(false) // XXXmikeh - see bug 950102
, mFlashSupported(false)
, mLuminanceSupported(false)
, mAutoFlashModeOverridden(false)
, mDeferConfigUpdate(0)
, mMediaProfiles(nullptr)
, mRecorder(nullptr)
@ -153,6 +156,14 @@ nsGonkCameraControl::Initialize()
mParams.Get(CAMERA_PARAM_PREVIEWSIZE, mCurrentConfiguration.mPreviewSize);
mParams.Get(CAMERA_PARAM_VIDEOSIZE, mLastRecorderSize);
nsString luminance; // check for support
mParams.Get(CAMERA_PARAM_LUMINANCE, luminance);
mLuminanceSupported = !luminance.IsEmpty();
nsString flashMode;
mParams.Get(CAMERA_PARAM_FLASHMODE, flashMode);
mFlashSupported = !flashMode.IsEmpty();
DOM_CAMERA_LOGI(" - maximum metering areas: %u\n", mCurrentConfiguration.mMaxMeteringAreas);
DOM_CAMERA_LOGI(" - maximum focus areas: %u\n", mCurrentConfiguration.mMaxFocusAreas);
DOM_CAMERA_LOGI(" - default picture size: %u x %u\n",
@ -165,6 +176,14 @@ nsGonkCameraControl::Initialize()
mLastRecorderSize.width, mLastRecorderSize.height);
DOM_CAMERA_LOGI(" - default picture file format: %s\n",
NS_ConvertUTF16toUTF8(mFileFormat).get());
DOM_CAMERA_LOGI(" - luminance reporting: %ssupported\n",
mLuminanceSupported ? "" : "NOT ");
if (mFlashSupported) {
DOM_CAMERA_LOGI(" - flash: supported, default mode '%s'\n",
NS_ConvertUTF16toUTF8(flashMode).get());
} else {
DOM_CAMERA_LOGI(" - flash: NOT supported\n");
}
return NS_OK;
}
@ -382,9 +401,16 @@ nsGonkCameraControl::Set(uint32_t aKey, const nsAString& aValue)
return rv;
}
if (aKey == CAMERA_PARAM_PICTURE_FILEFORMAT) {
// Picture format -- need to keep it for the TakePicture() callback.
mFileFormat = aValue;
switch (aKey) {
case CAMERA_PARAM_PICTURE_FILEFORMAT:
// Picture format -- need to keep it for the TakePicture() callback.
mFileFormat = aValue;
break;
case CAMERA_PARAM_FLASHMODE:
// Explicit flash mode changes always win and stick.
mAutoFlashModeOverridden = false;
break;
}
return PushParameters();
@ -440,7 +466,7 @@ nsresult
nsGonkCameraControl::Set(uint32_t aKey, const Size& aSize)
{
switch (aKey) {
case CAMERA_PARAM_PICTURESIZE:
case CAMERA_PARAM_PICTURE_SIZE:
DOM_CAMERA_LOGI("setting picture size to %ux%u\n", aSize.width, aSize.height);
return SetPictureSize(aSize);
@ -554,6 +580,34 @@ nsGonkCameraControl::AutoFocusImpl(bool aCancelExistingCall)
return NS_OK;
}
nsresult
nsGonkCameraControl::StartFaceDetectionImpl()
{
MOZ_ASSERT(NS_GetCurrentThread() == mCameraThread);
RETURN_IF_NO_CAMERA_HW();
DOM_CAMERA_LOGI("Starting face detection\n");
if (mCameraHw->StartFaceDetection() != OK) {
return NS_ERROR_FAILURE;
}
return NS_OK;
}
nsresult
nsGonkCameraControl::StopFaceDetectionImpl()
{
MOZ_ASSERT(NS_GetCurrentThread() == mCameraThread);
RETURN_IF_NO_CAMERA_HW();
DOM_CAMERA_LOGI("Stopping face detection\n");
if (mCameraHw->StopFaceDetection() != OK) {
return NS_ERROR_FAILURE;
}
return NS_OK;
}
nsresult
nsGonkCameraControl::SetThumbnailSizeImpl(const Size& aSize)
{
@ -724,7 +778,7 @@ nsGonkCameraControl::SetPictureSizeImpl(const Size& aSize)
return NS_ERROR_FAILURE;
}
nsresult rv = mParams.Set(CAMERA_PARAM_PICTURESIZE, size);
nsresult rv = mParams.Set(CAMERA_PARAM_PICTURE_SIZE, size);
if (NS_FAILED(rv)) {
return rv;
}
@ -837,10 +891,56 @@ nsGonkCameraControl::PullParametersImpl()
return mCameraHw->PullParameters(mParams);
}
nsresult
nsGonkCameraControl::SetupRecordingFlash(bool aAutoEnableLowLightTorch)
{
mAutoFlashModeOverridden = false;
if (!aAutoEnableLowLightTorch || !mLuminanceSupported || !mFlashSupported) {
return NS_OK;
}
DOM_CAMERA_LOGI("Luminance reporting and flash supported\n");
nsresult rv = PullParametersImpl();
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
nsString luminance;
rv = mParams.Get(CAMERA_PARAM_LUMINANCE, luminance);
if (NS_WARN_IF(NS_FAILED(rv))) {
// If we failed to get the luminance, assume it's "high"
return NS_OK;
}
nsString flashMode;
rv = mParams.Get(CAMERA_PARAM_FLASHMODE, flashMode);
if (NS_WARN_IF(NS_FAILED(rv))) {
// If we failed to get the current flash mode, swallow the error
return NS_OK;
}
if (luminance.EqualsASCII("low") && flashMode.EqualsASCII("auto")) {
DOM_CAMERA_LOGI("Low luminance detected, turning on flash\n");
rv = SetAndPush(CAMERA_PARAM_FLASHMODE, NS_LITERAL_STRING("torch"));
if (NS_WARN_IF(NS_FAILED(rv))) {
// If we failed to turn on the flash, swallow the error
return NS_OK;
}
mAutoFlashModeOverridden = true;
}
return NS_OK;
}
nsresult
nsGonkCameraControl::StartRecordingImpl(DeviceStorageFileDescriptor* aFileDescriptor,
const StartRecordingOptions* aOptions)
{
MOZ_ASSERT(NS_GetCurrentThread() == mCameraThread);
NS_ENSURE_TRUE(mRecorderProfile, NS_ERROR_NOT_INITIALIZED);
NS_ENSURE_FALSE(mRecorder, NS_ERROR_FAILURE);
@ -877,15 +977,24 @@ nsGonkCameraControl::StartRecordingImpl(DeviceStorageFileDescriptor* aFileDescri
if (aOptions) {
rv = SetupRecording(fd, aOptions->rotation, aOptions->maxFileSizeBytes,
aOptions->maxVideoLengthMs);
if (NS_SUCCEEDED(rv)) {
rv = SetupRecordingFlash(aOptions->autoEnableLowLightTorch);
}
} else {
rv = SetupRecording(fd, 0, 0, 0);
}
NS_ENSURE_SUCCESS(rv, rv);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
if (mRecorder->start() != OK) {
DOM_CAMERA_LOGE("mRecorder->start() failed\n");
// important: we MUST destroy the recorder if start() fails!
mRecorder = nullptr;
// put the flash back to the 'auto' state
if (mAutoFlashModeOverridden) {
SetAndPush(CAMERA_PARAM_FLASHMODE, NS_LITERAL_STRING("auto"));
}
return NS_ERROR_FAILURE;
}
@ -928,6 +1037,10 @@ nsGonkCameraControl::StopRecordingImpl()
mRecorder = nullptr;
OnRecorderStateChange(CameraControlListener::kRecorderStopped);
if (mAutoFlashModeOverridden) {
SetAndPush(CAMERA_PARAM_FLASHMODE, NS_LITERAL_STRING("auto"));
}
// notify DeviceStorage that the new video file is closed and ready
return NS_DispatchToMainThread(new RecordingComplete(mVideoFile), NS_DISPATCH_NORMAL);
}
@ -972,6 +1085,83 @@ nsGonkCameraControl::OnAutoFocusComplete(bool aSuccess)
mCameraThread->Dispatch(new AutoFocusComplete(this, aSuccess), NS_DISPATCH_NORMAL);
}
bool
FeatureDetected(int32_t feature[])
{
/**
* For information on what constitutes a valid feature, see:
* http://androidxref.com/4.0.4/xref/system/core/include/system/camera.h#202
*
* Although the comments explicitly state that undetected features are
* indicated using the value -2000, we conservatively include anything
* outside the explicitly valid range of [-1000, 1000] as undetected
* as well.
*/
const int32_t kLowerFeatureBound = -1000;
const int32_t kUpperFeatureBound = 1000;
return (feature[0] >= kLowerFeatureBound && feature[0] <= kUpperFeatureBound) ||
(feature[1] >= kLowerFeatureBound && feature[1] <= kUpperFeatureBound);
}
void
nsGonkCameraControl::OnFacesDetected(camera_frame_metadata_t* aMetaData)
{
NS_ENSURE_TRUE_VOID(aMetaData);
nsTArray<Face> faces;
uint32_t numFaces = aMetaData->number_of_faces;
DOM_CAMERA_LOGI("Camera detected %d face(s)", numFaces);
faces.SetCapacity(numFaces);
for (uint32_t i = 0; i < numFaces; ++i) {
Face* f = faces.AppendElement();
f->id = aMetaData->faces[i].id;
f->score = aMetaData->faces[i].score;
if (f->score > 100) {
f->score = 100;
}
f->bound.left = aMetaData->faces[i].rect[0];
f->bound.top = aMetaData->faces[i].rect[1];
f->bound.right = aMetaData->faces[i].rect[2];
f->bound.bottom = aMetaData->faces[i].rect[3];
DOM_CAMERA_LOGI("Camera face[%u] appended: id=%d, score=%d, bound=(%d, %d)-(%d, %d)\n",
i, f->id, f->score, f->bound.left, f->bound.top, f->bound.right, f->bound.bottom);
f->hasLeftEye = FeatureDetected(aMetaData->faces[i].left_eye);
if (f->hasLeftEye) {
f->leftEye.x = aMetaData->faces[i].left_eye[0];
f->leftEye.y = aMetaData->faces[i].left_eye[1];
DOM_CAMERA_LOGI(" Left eye detected at (%d, %d)\n",
f->leftEye.x, f->leftEye.y);
} else {
DOM_CAMERA_LOGI(" No left eye detected\n");
}
f->hasRightEye = FeatureDetected(aMetaData->faces[i].right_eye);
if (f->hasRightEye) {
f->rightEye.x = aMetaData->faces[i].right_eye[0];
f->rightEye.y = aMetaData->faces[i].right_eye[1];
DOM_CAMERA_LOGI(" Right eye detected at (%d, %d)\n",
f->rightEye.x, f->rightEye.y);
} else {
DOM_CAMERA_LOGI(" No right eye detected\n");
}
f->hasMouth = FeatureDetected(aMetaData->faces[i].mouth);
if (f->hasMouth) {
f->mouth.x = aMetaData->faces[i].mouth[0];
f->mouth.y = aMetaData->faces[i].mouth[1];
DOM_CAMERA_LOGI(" Mouth detected at (%d, %d)\n", f->mouth.x, f->mouth.y);
} else {
DOM_CAMERA_LOGI(" No mouth detected\n");
}
}
CameraControlImpl::OnFacesDetected(faces);
}
void
nsGonkCameraControl::OnTakePictureComplete(uint8_t* aData, uint32_t aLength)
{
@ -1325,7 +1515,9 @@ nsGonkCameraControl::OnRecorderEvent(int msg, int ext1, int ext2)
}
nsresult
nsGonkCameraControl::SetupRecording(int aFd, int aRotation, int64_t aMaxFileSizeBytes, int64_t aMaxVideoLengthMs)
nsGonkCameraControl::SetupRecording(int aFd, int aRotation,
int64_t aMaxFileSizeBytes,
int64_t aMaxVideoLengthMs)
{
RETURN_IF_NO_CAMERA_HW();
@ -1368,6 +1560,7 @@ nsGonkCameraControl::SetupRecording(int aFd, int aRotation, int64_t aMaxFileSize
// recording API needs file descriptor of output file
CHECK_SETARG(mRecorder->setOutputFile(aFd, 0, 0));
CHECK_SETARG(mRecorder->prepare());
return NS_OK;
}
@ -1377,12 +1570,7 @@ nsGonkCameraControl::StopImpl()
DOM_CAMERA_LOGT("%s:%d : this=%p\n", __func__, __LINE__, this);
// if we're recording, stop recording
if (mRecorder) {
DOM_CAMERA_LOGI("Stopping existing video recorder\n");
mRecorder->stop();
mRecorder = nullptr;
OnRecorderStateChange(CameraControlListener::kRecorderStopped);
}
StopRecordingImpl();
// stop the preview
StopPreviewImpl();
@ -1470,6 +1658,18 @@ OnAutoFocusComplete(nsGonkCameraControl* gc, bool aSuccess)
gc->OnAutoFocusComplete(aSuccess);
}
void
OnAutoFocusMoving(nsGonkCameraControl* gc, bool aIsMoving)
{
gc->OnAutoFocusMoving(aIsMoving);
}
void
OnFacesDetected(nsGonkCameraControl* gc, camera_frame_metadata_t* aMetaData)
{
gc->OnFacesDetected(aMetaData);
}
void
OnNewPreviewFrame(nsGonkCameraControl* gc, layers::TextureClient* aBuffer)
{

View File

@ -49,6 +49,7 @@ public:
nsGonkCameraControl(uint32_t aCameraId);
void OnAutoFocusComplete(bool aSuccess);
void OnFacesDetected(camera_frame_metadata_t* aMetaData);
void OnTakePictureComplete(uint8_t* aData, uint32_t aLength);
void OnTakePictureError();
void OnNewPreviewFrame(layers::TextureClient* aBuffer);
@ -83,6 +84,7 @@ protected:
using CameraControlImpl::OnNewPreviewFrame;
using CameraControlImpl::OnAutoFocusComplete;
using CameraControlImpl::OnFacesDetected;
using CameraControlImpl::OnTakePictureComplete;
using CameraControlImpl::OnConfigurationChange;
using CameraControlImpl::OnError;
@ -104,6 +106,8 @@ protected:
virtual nsresult StartPreviewImpl() MOZ_OVERRIDE;
virtual nsresult StopPreviewImpl() MOZ_OVERRIDE;
virtual nsresult AutoFocusImpl(bool aCancelExistingCall) MOZ_OVERRIDE;
virtual nsresult StartFaceDetectionImpl() MOZ_OVERRIDE;
virtual nsresult StopFaceDetectionImpl() MOZ_OVERRIDE;
virtual nsresult TakePictureImpl() MOZ_OVERRIDE;
virtual nsresult StartRecordingImpl(DeviceStorageFileDescriptor* aFileDescriptor,
const StartRecordingOptions* aOptions = nullptr) MOZ_OVERRIDE;
@ -113,7 +117,9 @@ protected:
virtual already_AddRefed<RecorderProfileManager> GetRecorderProfileManagerImpl() MOZ_OVERRIDE;
already_AddRefed<GonkRecorderProfileManager> GetGonkRecorderProfileManager();
nsresult SetupRecording(int aFd, int aRotation, int64_t aMaxFileSizeBytes, int64_t aMaxVideoLengthMs);
nsresult SetupRecording(int aFd, int aRotation, int64_t aMaxFileSizeBytes,
int64_t aMaxVideoLengthMs);
nsresult SetupRecordingFlash(bool aAutoEnableLowLightTorch);
nsresult SetupVideoMode(const nsAString& aProfile);
nsresult SetPreviewSize(const Size& aSize);
nsresult SetVideoSize(const Size& aSize);
@ -137,6 +143,9 @@ protected:
Size mLastRecorderSize;
uint32_t mPreviewFps;
bool mResumePreviewAfterTakingPicture;
bool mFlashSupported;
bool mLuminanceSupported;
bool mAutoFlashModeOverridden;
Atomic<uint32_t> mDeferConfigUpdate;
GonkCameraParameters mParams;
@ -165,6 +174,8 @@ private:
void OnTakePictureComplete(nsGonkCameraControl* gc, uint8_t* aData, uint32_t aLength);
void OnTakePictureError(nsGonkCameraControl* gc);
void OnAutoFocusComplete(nsGonkCameraControl* gc, bool aSuccess);
void OnAutoFocusMoving(nsGonkCameraControl* gc, bool aIsMoving);
void OnFacesDetected(nsGonkCameraControl* gc, camera_frame_metadata_t* aMetaData);
void OnNewPreviewFrame(nsGonkCameraControl* gc, layers::TextureClient* aBuffer);
void OnShutter(nsGonkCameraControl* gc);
void OnClosed(nsGonkCameraControl* gc);

View File

@ -79,6 +79,10 @@ GonkCameraHardware::postData(int32_t aMsgType, const sp<IMemory>& aDataPtr, came
}
break;
case CAMERA_MSG_PREVIEW_METADATA:
OnFacesDetected(mTarget, metadata);
break;
default:
DOM_CAMERA_LOGE("Unhandled data callback event %d\n", aMsgType);
break;
@ -98,6 +102,12 @@ GonkCameraHardware::notify(int32_t aMsgType, int32_t ext1, int32_t ext2)
OnAutoFocusComplete(mTarget, !!ext1);
break;
#if ANDROID_VERSION >= 16
case CAMERA_MSG_FOCUS_MOVE:
OnAutoFocusMoving(mTarget, !!ext1);
break;
#endif
case CAMERA_MSG_SHUTTER:
OnShutter(mTarget);
break;
@ -176,6 +186,13 @@ GonkCameraHardware::Init()
mCamera->setPreviewTexture(mNativeWindow);
#endif
#if ANDROID_VERSION >= 16
rv = mCamera->sendCommand(CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG, 1, 0);
if (rv != OK) {
NS_WARNING("Failed to send command CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG");
}
#endif
#endif
return NS_OK;
@ -282,6 +299,38 @@ GonkCameraHardware::CancelAutoFocus()
mCamera->cancelAutoFocus();
}
int
GonkCameraHardware::StartFaceDetection()
{
DOM_CAMERA_LOGI("%s\n", __func__);
int rv = INVALID_OPERATION;
#if ANDROID_VERSION >= 15
rv = mCamera->sendCommand(CAMERA_CMD_START_FACE_DETECTION, CAMERA_FACE_DETECTION_HW, 0);
#endif
if (rv != OK) {
DOM_CAMERA_LOGE("Start face detection failed with status %d", rv);
}
return rv;
}
int
GonkCameraHardware::StopFaceDetection()
{
DOM_CAMERA_LOGI("%s\n", __func__);
int rv = INVALID_OPERATION;
#if ANDROID_VERSION >= 15
rv = mCamera->sendCommand(CAMERA_CMD_STOP_FACE_DETECTION, 0, 0);
#endif
if (rv != OK) {
DOM_CAMERA_LOGE("Stop face detection failed with status %d", rv);
}
return rv;
}
int
GonkCameraHardware::TakePicture()
{

View File

@ -79,6 +79,8 @@ public:
virtual int AutoFocus();
virtual void CancelAutoFocus();
virtual int StartFaceDetection();
virtual int StopFaceDetection();
virtual int TakePicture();
virtual void CancelTakePicture();
virtual int StartPreview();

View File

@ -14,11 +14,12 @@
* limitations under the License.
*/
#include "ICameraControl.h"
#include <camera/Camera.h>
#include "CameraCommon.h"
#include "GonkCameraControl.h"
#include "ICameraControl.h"
using namespace mozilla;

View File

@ -58,8 +58,6 @@ GonkCameraParameters::Parameters::GetTextKey(uint32_t aKey)
return KEY_FOCUS_DISTANCES;
case CAMERA_PARAM_EXPOSURECOMPENSATION:
return KEY_EXPOSURE_COMPENSATION;
case CAMERA_PARAM_PICTURESIZE:
return KEY_PICTURE_SIZE;
case CAMERA_PARAM_THUMBNAILQUALITY:
return KEY_JPEG_THUMBNAIL_QUALITY;
case CAMERA_PARAM_PICTURE_SIZE:
@ -81,6 +79,8 @@ GonkCameraParameters::Parameters::GetTextKey(uint32_t aKey)
// Not every platform defines KEY_ISO_MODE;
// for those that don't, we use the raw string key.
return "iso";
case CAMERA_PARAM_LUMINANCE:
return "luminance-condition";
case CAMERA_PARAM_SUPPORTED_PREVIEWSIZES:
return KEY_SUPPORTED_PREVIEW_SIZES;
@ -114,6 +114,8 @@ GonkCameraParameters::Parameters::GetTextKey(uint32_t aKey)
return KEY_ZOOM_SUPPORTED;
case CAMERA_PARAM_SUPPORTED_ZOOMRATIOS:
return KEY_ZOOM_RATIOS;
case CAMERA_PARAM_SUPPORTED_MAXDETECTEDFACES:
return KEY_MAX_NUM_DETECTED_FACES_HW;
case CAMERA_PARAM_SUPPORTED_JPEG_THUMBNAIL_SIZES:
return KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES;
case CAMERA_PARAM_SUPPORTED_ISOMODES:
@ -264,8 +266,10 @@ GonkCameraParameters::GetTranslated(uint32_t aKey, nsAString& aValue)
}
if (aKey == CAMERA_PARAM_ISOMODE) {
rv = MapIsoFromGonk(val, aValue);
} else {
} else if(val) {
aValue.AssignASCII(val);
} else {
aValue.Truncate(0);
}
return rv;
}
@ -545,25 +549,23 @@ GonkCameraParameters::SetTranslated(uint32_t aKey, const double& aValue)
// mZoomRatios is sorted, so we can binary search it
int bottom = 0;
int top = mZoomRatios.Length() - 1;
int middle;
while (top >= bottom) {
middle = (top + bottom) / 2;
if (value == mZoomRatios[middle]) {
index = (top + bottom) / 2;
if (value == mZoomRatios[index]) {
// exact match
break;
}
if (value > mZoomRatios[middle] && value < mZoomRatios[middle + 1]) {
if (value > mZoomRatios[index] && value < mZoomRatios[index + 1]) {
// the specified zoom value lies in this interval
break;
}
if (value > mZoomRatios[middle]) {
bottom = middle + 1;
if (value > mZoomRatios[index]) {
bottom = index + 1;
} else {
top = middle - 1;
top = index - 1;
}
}
index = middle;
}
DOM_CAMERA_LOGI("Zoom = %fx --> index = %d\n", aValue, index);
}

View File

@ -46,11 +46,11 @@ enum {
CAMERA_PARAM_FOCUSDISTANCEOPTIMUM,
CAMERA_PARAM_FOCUSDISTANCEFAR,
CAMERA_PARAM_EXPOSURECOMPENSATION,
CAMERA_PARAM_PICTURESIZE,
CAMERA_PARAM_THUMBNAILSIZE,
CAMERA_PARAM_THUMBNAILQUALITY,
CAMERA_PARAM_SENSORANGLE,
CAMERA_PARAM_ISOMODE,
CAMERA_PARAM_LUMINANCE,
// supported features
CAMERA_PARAM_SUPPORTED_PREVIEWSIZES,
@ -69,6 +69,7 @@ enum {
CAMERA_PARAM_SUPPORTED_EXPOSURECOMPENSATIONSTEP,
CAMERA_PARAM_SUPPORTED_ZOOM,
CAMERA_PARAM_SUPPORTED_ZOOMRATIOS,
CAMERA_PARAM_SUPPORTED_MAXDETECTEDFACES,
CAMERA_PARAM_SUPPORTED_JPEG_THUMBNAIL_SIZES,
CAMERA_PARAM_SUPPORTED_ISOMODES
};
@ -102,16 +103,17 @@ public:
};
struct Position {
double latitude;
double longitude;
double altitude;
double timestamp;
double latitude;
double longitude;
double altitude;
double timestamp;
};
struct StartRecordingOptions {
uint32_t rotation;
uint32_t maxFileSizeBytes;
uint32_t maxVideoLengthMs;
bool autoEnableLowLightTorch;
};
struct Configuration {
@ -119,6 +121,25 @@ public:
Size mPreviewSize;
nsString mRecorderProfile;
};
struct Point
{
int32_t x;
int32_t y;
};
struct Face {
uint32_t id;
uint32_t score;
Region bound;
bool hasLeftEye;
Point leftEye;
bool hasRightEye;
Point rightEye;
bool hasMouth;
Point mouth;
};
static already_AddRefed<ICameraControl> Create(uint32_t aCameraId);
virtual nsresult Start(const Configuration* aInitialConfig = nullptr) = 0;
@ -136,6 +157,8 @@ public:
virtual nsresult StartRecording(DeviceStorageFileDescriptor *aFileDescriptor,
const StartRecordingOptions* aOptions = nullptr) = 0;
virtual nsresult StopRecording() = 0;
virtual nsresult StartFaceDetection() = 0;
virtual nsresult StopFaceDetection() = 0;
virtual nsresult Set(uint32_t aKey, const nsAString& aValue) = 0;
virtual nsresult Get(uint32_t aKey, nsAString& aValue) = 0;

View File

@ -27,7 +27,7 @@ TestGonkCameraHardware::TestGonkCameraHardware(nsGonkCameraControl* aTarget,
const sp<Camera>& aCamera)
: GonkCameraHardware(aTarget, aCameraId, aCamera)
{
DOM_CAMERA_LOGA("+===== Created TestGonkCameraHardware =====+\n");
DOM_CAMERA_LOGA("v===== Created TestGonkCameraHardware =====v\n");
DOM_CAMERA_LOGT("%s:%d : this=%p (aTarget=%p)\n",
__func__, __LINE__, this, aTarget);
MOZ_COUNT_CTOR(TestGonkCameraHardware);
@ -36,7 +36,7 @@ TestGonkCameraHardware::TestGonkCameraHardware(nsGonkCameraControl* aTarget,
TestGonkCameraHardware::~TestGonkCameraHardware()
{
MOZ_COUNT_DTOR(TestGonkCameraHardware);
DOM_CAMERA_LOGA("+===== Destroyed TestGonkCameraHardware =====+\n");
DOM_CAMERA_LOGA("^===== Destroyed TestGonkCameraHardware =====^\n");
}
nsresult
@ -138,6 +138,213 @@ TestGonkCameraHardware::AutoFocus()
return GonkCameraHardware::AutoFocus();
}
// These classes have to be external to StartFaceDetection(), at least
// until we pick up gcc 4.5, which supports local classes as template
// arguments.
class FaceDetected : public nsRunnable
{
public:
FaceDetected(nsGonkCameraControl* aTarget)
: mTarget(aTarget)
{ }
~FaceDetected()
{
ReleaseFacesArray();
}
NS_IMETHODIMP
Run()
{
InitMetaData();
OnFacesDetected(mTarget, &mMetaData);
return NS_OK;
}
protected:
virtual nsresult InitMetaData() = 0;
nsresult
AllocateFacesArray(uint32_t num)
{
mMetaData.faces = new camera_face_t[num];
return NS_OK;
}
nsresult
ReleaseFacesArray()
{
delete [] mMetaData.faces;
mMetaData.faces = nullptr;
return NS_OK;
}
nsRefPtr<nsGonkCameraControl> mTarget;
camera_frame_metadata_t mMetaData;
};
class OneFaceDetected : public FaceDetected
{
public:
OneFaceDetected(nsGonkCameraControl* aTarget)
: FaceDetected(aTarget)
{ }
nsresult
InitMetaData() MOZ_OVERRIDE
{
mMetaData.number_of_faces = 1;
AllocateFacesArray(1);
mMetaData.faces[0].id = 1;
mMetaData.faces[0].score = 2;
mMetaData.faces[0].rect[0] = 3;
mMetaData.faces[0].rect[1] = 4;
mMetaData.faces[0].rect[2] = 5;
mMetaData.faces[0].rect[3] = 6;
mMetaData.faces[0].left_eye[0] = 7;
mMetaData.faces[0].left_eye[1] = 8;
mMetaData.faces[0].right_eye[0] = 9;
mMetaData.faces[0].right_eye[1] = 10;
mMetaData.faces[0].mouth[0] = 11;
mMetaData.faces[0].mouth[1] = 12;
return NS_OK;
}
};
class TwoFacesDetected : public FaceDetected
{
public:
TwoFacesDetected(nsGonkCameraControl* aTarget)
: FaceDetected(aTarget)
{ }
nsresult
InitMetaData() MOZ_OVERRIDE
{
mMetaData.number_of_faces = 2;
AllocateFacesArray(2);
mMetaData.faces[0].id = 1;
mMetaData.faces[0].score = 2;
mMetaData.faces[0].rect[0] = 3;
mMetaData.faces[0].rect[1] = 4;
mMetaData.faces[0].rect[2] = 5;
mMetaData.faces[0].rect[3] = 6;
mMetaData.faces[0].left_eye[0] = 7;
mMetaData.faces[0].left_eye[1] = 8;
mMetaData.faces[0].right_eye[0] = 9;
mMetaData.faces[0].right_eye[1] = 10;
mMetaData.faces[0].mouth[0] = 11;
mMetaData.faces[0].mouth[1] = 12;
mMetaData.faces[1].id = 13;
mMetaData.faces[1].score = 14;
mMetaData.faces[1].rect[0] = 15;
mMetaData.faces[1].rect[1] = 16;
mMetaData.faces[1].rect[2] = 17;
mMetaData.faces[1].rect[3] = 18;
mMetaData.faces[1].left_eye[0] = 19;
mMetaData.faces[1].left_eye[1] = 20;
mMetaData.faces[1].right_eye[0] = 21;
mMetaData.faces[1].right_eye[1] = 22;
mMetaData.faces[1].mouth[0] = 23;
mMetaData.faces[1].mouth[1] = 24;
return NS_OK;
}
};
class OneFaceNoFeaturesDetected : public FaceDetected
{
public:
OneFaceNoFeaturesDetected(nsGonkCameraControl* aTarget)
: FaceDetected(aTarget)
{ }
nsresult
InitMetaData() MOZ_OVERRIDE
{
mMetaData.number_of_faces = 1;
AllocateFacesArray(1);
mMetaData.faces[0].id = 1;
// Test clamping 'score' to 100.
mMetaData.faces[0].score = 1000;
mMetaData.faces[0].rect[0] = 3;
mMetaData.faces[0].rect[1] = 4;
mMetaData.faces[0].rect[2] = 5;
mMetaData.faces[0].rect[3] = 6;
// Nullable values set to 'not-supported' specific values
mMetaData.faces[0].left_eye[0] = -2000;
mMetaData.faces[0].left_eye[1] = -2000;
// Test other 'not-supported' values as well. We treat
// anything outside the range [-1000, 1000] as invalid.
mMetaData.faces[0].right_eye[0] = 1001;
mMetaData.faces[0].right_eye[1] = -1001;
mMetaData.faces[0].mouth[0] = -2000;
mMetaData.faces[0].mouth[1] = 2000;
return NS_OK;
}
};
class NoFacesDetected : public FaceDetected
{
public:
NoFacesDetected(nsGonkCameraControl* aTarget)
: FaceDetected(aTarget)
{ }
nsresult
InitMetaData() MOZ_OVERRIDE
{
mMetaData.number_of_faces = 0;
mMetaData.faces = nullptr;
return NS_OK;
}
};
int
TestGonkCameraHardware::StartFaceDetection()
{
nsRefPtr<FaceDetected> faceDetected;
if (IsTestCase("face-detection-detected-one-face")) {
faceDetected = new OneFaceDetected(mTarget);
} else if (IsTestCase("face-detection-detected-two-faces")) {
faceDetected = new TwoFacesDetected(mTarget);
} else if (IsTestCase("face-detection-detected-one-face-no-features")) {
faceDetected = new OneFaceNoFeaturesDetected(mTarget);
} else if (IsTestCase("face-detection-no-faces-detected")) {
faceDetected = new NoFacesDetected(mTarget);
}
if (!faceDetected) {
return GonkCameraHardware::StartFaceDetection();
}
nsresult rv = NS_DispatchToCurrentThread(faceDetected);
if (NS_FAILED(rv)) {
DOM_CAMERA_LOGE("Failed to dispatch FaceDetected runnable (0x%08x)\n", rv);
return UNKNOWN_ERROR;
}
return OK;
}
int
TestGonkCameraHardware::StopFaceDetection()
{
if (IsTestCase("face-detection-detected-one-face") ||
IsTestCase("face-detection-detected-two-faces") ||
IsTestCase("face-detection-detected-one-face-no-features") ||
IsTestCase("face-detection-no-faces-detected"))
{
return OK;
}
return GonkCameraHardware::StopFaceDetection();
}
int
TestGonkCameraHardware::TakePicture()
{
@ -184,6 +391,37 @@ TestGonkCameraHardware::StartPreview()
return GonkCameraHardware::StartPreview();
}
int
TestGonkCameraHardware::StartAutoFocusMoving(bool aIsMoving)
{
class AutoFocusMoving : public nsRunnable
{
public:
AutoFocusMoving(nsGonkCameraControl* aTarget, bool aIsMoving)
: mTarget(aTarget)
, mIsMoving(aIsMoving)
{ }
NS_IMETHODIMP
Run()
{
OnAutoFocusMoving(mTarget, mIsMoving);
return NS_OK;
}
protected:
nsGonkCameraControl* mTarget;
bool mIsMoving;
};
nsresult rv = NS_DispatchToCurrentThread(new AutoFocusMoving(mTarget, aIsMoving));
if (NS_SUCCEEDED(rv)) {
return OK;
}
DOM_CAMERA_LOGE("Failed to dispatch AutoFocusMoving runnable (0x%08x)\n", rv);
return UNKNOWN_ERROR;
}
int
TestGonkCameraHardware::PushParameters(const GonkCameraParameters& aParams)
{
@ -191,6 +429,19 @@ TestGonkCameraHardware::PushParameters(const GonkCameraParameters& aParams)
return TestCaseError(UNKNOWN_ERROR);
}
nsString focusMode;
GonkCameraParameters& params = const_cast<GonkCameraParameters&>(aParams);
params.Get(CAMERA_PARAM_FOCUSMODE, focusMode);
if (focusMode.EqualsASCII("continuous-picture") ||
focusMode.EqualsASCII("continuous-video"))
{
if (IsTestCase("autofocus-moving-true")) {
return StartAutoFocusMoving(true);
} else if (IsTestCase("autofocus-moving-false")) {
return StartAutoFocusMoving(false);
}
}
return GonkCameraHardware::PushParameters(aParams);
}

View File

@ -25,6 +25,8 @@ class TestGonkCameraHardware : public android::GonkCameraHardware
{
public:
virtual int AutoFocus() MOZ_OVERRIDE;
virtual int StartFaceDetection() MOZ_OVERRIDE;
virtual int StopFaceDetection() MOZ_OVERRIDE;
virtual int TakePicture() MOZ_OVERRIDE;
virtual int StartPreview() MOZ_OVERRIDE;
virtual int PushParameters(const mozilla::GonkCameraParameters& aParams) MOZ_OVERRIDE;
@ -59,6 +61,8 @@ protected:
bool IsTestCaseInternal(const char* aTest, const char* aFile, int aLine);
int TestCaseError(int aDefaultError);
int StartAutoFocusMoving(bool aIsMoving);
private:
TestGonkCameraHardware(const TestGonkCameraHardware&) MOZ_DELETE;
TestGonkCameraHardware& operator=(const TestGonkCameraHardware&) MOZ_DELETE;

View File

@ -21,6 +21,7 @@ SOURCES += [
'DOMCameraCapabilities.cpp',
'DOMCameraControl.cpp',
'DOMCameraControlListener.cpp',
'DOMCameraDetectedFace.cpp',
'DOMCameraManager.cpp',
]

View File

@ -8,3 +8,5 @@ support-files = camera_common.js
[test_camera_hardware_failures.html]
[test_bug975472.html]
[test_camera_fake_parameters.html]
[test_camera_hardware_face_detection.html]
[test_camera_hardware_auto_focus_moving_cb.html]

View File

@ -0,0 +1,131 @@
<!DOCTYPE HTML>
<html>
<!--
https://bugzilla.mozilla.org/show_bug.cgi?id=965421
-->
<head>
<title>Bug 965421 - Test camera hardware API for Auto focus moving Callback</title>
<script type="text/javascript" src="/MochiKit/MochiKit.js"></script>
<script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
<script type="text/javascript" src="camera_common.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
</head>
<body>
<a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=965421">Mozilla Bug 965421</a>
<video id="viewfinder" width = "200" height = "200" autoplay></video>
<img src="#" alt="This image is going to load" id="testimage"/>
<script class="testbody" type="text/javascript;version=1.7">
var whichCamera = navigator.mozCameras.getListOfCameras()[0];
var initialConfig = {
mode: 'picture',
recorderProfile: 'cif',
previewSize: {
width: 352,
height: 288
}
};
const PREF_AUTOFOCUSCALLBACK_ENABLED = "camera.control.autofocus_moving_callback.enabled";
var cameraObj;
var oldPref;
// Shorthand functions
function end() {
function reallyEnd() {
CameraTest.end();
}
if (oldPref) {
SpecialPowers.pushPrefEnv(
{'set': [[PREF_AUTOFOCUSCALLBACK_ENABLED, oldPref]]}, reallyEnd);
} else {
SpecialPowers.pushPrefEnv(
{'clear': [[PREF_AUTOFOCUSCALLBACK_ENABLED]]}, reallyEnd);
}
}
function next() {
CameraTest.next();
}
var tests = [
{
key: "autofocus-moving-true",
func: function testAutoFocusMovingIsTrue(camera) {
camera.onAutoFocusMoving = function(aIsMoving) {
ok(aIsMoving == true,"onAutoFocusMoving callback received true correctly");
camera.focusMode = 'auto';
next();
}
camera.focusMode = 'continuous-picture';
}
},
{
key: "autofocus-moving-false",
func: function testAutoFocusMovingIsFalse(camera) {
camera.onAutoFocusMoving = function(aIsMoving) {
ok(aIsMoving == false, "onAutoFocusMoving callback received false correctly");
camera.focusMode = 'auto';
end();
}
camera.focusMode = 'continuous-video';
}
},
];
var testGenerator = function() {
for (var i = 0; i < tests.length; ++i ) {
yield tests[i];
}
}();
window.addEventListener('beforeunload', function() {
document.getElementById('viewfinder').mozSrcObject = null;
cameraObj.release();
cameraObj = null;
});
// Must call CameraTest.begin() before any other async methods.
CameraTest.begin("hardware", function(test) {
// If the pref doesn't exist, this get will fail; catch it and continue.
try {
oldPref = SpecialPowers.getBoolPref(PREF_AUTOFOCUSCALLBACK_ENABLED);
} catch(e) { }
SpecialPowers.pushPrefEnv({'set': [[PREF_AUTOFOCUSCALLBACK_ENABLED, true]]}, function() {
var enabled;
try {
enabled = SpecialPowers.getBoolPref(PREF_AUTOFOCUSCALLBACK_ENABLED);
} catch(e) { }
ok(enabled, PREF_AUTOFOCUSCALLBACK_ENABLED + " is " + enabled);
function onSuccess(camera, config) {
document.getElementById('viewfinder').mozSrcObject = camera;
cameraObj = camera;
CameraTest.next = function() {
try {
var t = testGenerator.next();
test.set(t.key, t.func.bind(undefined, camera));
} catch(e) {
if (e instanceof StopIteration) {
end();
} else {
throw e;
}
}
};
next();
}
function onError(error) {
ok(false, "getCamera() failed with: " + error);
end();
}
navigator.mozCameras.getCamera(whichCamera, initialConfig, onSuccess, onError);
})
});
</script>
</body>
</html>

View File

@ -0,0 +1,320 @@
<!DOCTYPE HTML>
<html>
<!--
https://bugzilla.mozilla.org/show_bug.cgi?id=965420
-->
<head>
<title>Bug 965420 - Test camera hardware API for face detection</title>
<script type="text/javascript" src="/MochiKit/MochiKit.js"></script>
<script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
<script type="text/javascript" src="camera_common.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
</head>
<body>
<a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=965420">Mozilla Bug 965420</a>
<video id="viewfinder" width = "200" height = "200" autoplay></video>
<img src="#" alt="This image is going to load" id="testimage"/>
<script class="testbody" type="text/javascript;version=1.7">
var whichCamera = navigator.mozCameras.getListOfCameras()[0];
var initialConfig = {
mode: 'picture',
recorderProfile: 'cif',
previewSize: {
width: 352,
height: 288
}
};
const PREF_FACEDETECTION_ENABLED = "camera.control.face_detection.enabled";
var cameraObj;
var oldPref;
// Shorthand functions
function end() {
function reallyEnd() {
CameraTest.end();
}
if (oldPref) {
SpecialPowers.pushPrefEnv(
{'set': [[PREF_FACEDETECTION_ENABLED, oldPref]]}, reallyEnd);
} else {
SpecialPowers.pushPrefEnv(
{'clear': [[PREF_FACEDETECTION_ENABLED]]}, reallyEnd);
}
}
function next() {
CameraTest.next();
}
function compareFaces(aFaces, expected)
{
ok(aFaces, "have detected faces object");
ok(aFaces.length == expected.faces.length,
"expected=" + expected.faces.length + ", got=" + aFaces.length);
aFaces.forEach(function (face, index) {
let result = compareFace(face, expected.faces[index]);
ok(result === "ok", "face check: " + result);
if (result !== "ok") {
return false;
}
});
return true;
}
function compareFace(aFace, expected)
{
if (aFace.id != expected.id) {
return "expected face.id=" + expected.id + ", got=" + aFace.id;
}
if (aFace.score != expected.score) {
return "expected face.score=" + expected.score + ", got=" + aFace.score;
}
if (!aFace.bounds) {
return "face.bounds is missing";
}
if (aFace.bounds.left != expected.bounds.left ||
aFace.bounds.top != expected.bounds.top ||
aFace.bounds.right != expected.bounds.right ||
aFace.bounds.bottom != expected.bounds.bottom) {
return "expected face.bounds=" + expected.bounds.toSource() +
", got=({left:" + aFace.bounds.left + ", top:" + aFace.bounds.top + ", right:" + aFace.bounds.right + ", bottom:" + aFace.bounds.bottom + "})";
}
if (aFace.leftEye && !expected.leftEye) {
return "expected null face.leftEye, got=({x:" + aFace.leftEye.x + ", y:" + aFace.leftEye.y + "})";
}
if (!aFace.leftEye && expected.leftEye) {
return "expected face.leftEye=" + expected.leftEye.toSource() + ", got null leftEye";
}
if (aFace.leftEye && expected.leftEye &&
(aFace.leftEye.x != expected.leftEye.x || aFace.leftEye.y != expected.leftEye.y)) {
return "expected face.leftEye=" + expected.leftEye.toSource() +
", got=({x:" + aFace.leftEye.x + ", y:" + aFace.leftEye.y + "})";
}
if (aFace.rightEye && !expected.rightEye) {
return "expected null face.rightEye, got=({x:" + aFace.rightEye.x + ", y:" + aFace.rightEye.y + "})";
}
if (!aFace.rightEye && expected.rightEye) {
return "expected face.rightEye=" + expected.rightEye.toSource() + ", got null rightEye";
}
if (aFace.rightEye && expected.rightEye &&
(aFace.rightEye.x != expected.rightEye.x || aFace.rightEye.y != expected.rightEye.y)) {
return "expected face.rightEye=" + expected.rightEye.toSource() +
", got=({x:" + aFace.rightEye.x + ", y:" + aFace.rightEye.y + "})";
}
if (aFace.mouth && !expected.mouth) {
return "expected null face.mouth, got=({x:" + aFace.mouth.x + ", y:" + aFace.mouth.y + "})";
}
if (!aFace.mouth && expected.mouth) {
return "expected face.mouth=" + expected.mouth.toSource() + ", got null mouth";
}
if (aFace.mouth && expected.mouth &&
(aFace.mouth.x != expected.mouth.x || aFace.mouth.y != expected.mouth.y)) {
return "expected face.mouth=" + expected.mouth.toSource() +
", got=({x:" + aFace.mouth.x + ", y:" + aFace.mouth.y + "})";
}
return "ok";
}
var tests = [
{
key: "face-detection-detected-one-face",
func: function testFaceDetectionFoundOneFace(camera) {
var expected = {
faces: [ {
id: 1,
score: 2,
bounds: {
left: 3,
top: 4,
right: 5,
bottom: 6
},
leftEye: {
x: 7,
y: 8
},
rightEye: {
x: 9,
y: 10
},
mouth: {
x: 11,
y: 12
}
} ]
};
camera.onFacesDetected = function(aFaces) {
ok(compareFaces(aFaces, expected),
"onFaceDetected received the detected faces correctly");
camera.stopFaceDetection();
next();
}
camera.startFaceDetection();
}
},
{
key: "face-detection-detected-two-faces",
func: function testFaceDetectionFoundTwoFace(camera) {
var expected = {
faces: [ {
id: 1,
score: 2,
bounds: {
left: 3,
top: 4,
right: 5,
bottom: 6
},
leftEye: {
x: 7,
y: 8
},
rightEye: {
x: 9,
y: 10
},
mouth: {
x: 11,
y: 12
}
},
{
id: 13,
score: 14,
bounds: {
left: 15,
top: 16,
right: 17,
bottom: 18
},
leftEye: {
x: 19,
y: 20
},
rightEye: {
x: 21,
y: 22
},
mouth: {
x: 23,
y: 24
}
} ]
};
camera.onFacesDetected = function(aFaces) {
ok(compareFaces(aFaces, expected),
"onFaceDetected received the detected faces correctly");
camera.stopFaceDetection();
next();
}
camera.startFaceDetection();
}
},
{
key: "face-detection-detected-one-face-no-features",
func: function (camera) {
var expected = {
faces: [ {
id: 1,
score: 100,
bounds: {
left: 3,
top: 4,
right: 5,
bottom: 6
},
leftEye: null,
rightEye: null,
mouth: null
} ]
};
camera.onFacesDetected = function(aFaces) {
ok(compareFaces(aFaces, expected),
"onFaceDetected received the detected faces correctly");
camera.stopFaceDetection();
next();
}
camera.startFaceDetection();
}
},
{
key: "face-detection-no-faces-detected",
func: function (camera) {
var expected = {
faces: []
};
camera.onFacesDetected = function(aFaces) {
ok(compareFaces(aFaces, expected),
"onFaceDetected received the detected faces correctly");
camera.stopFaceDetection();
next();
}
camera.startFaceDetection();
}
},
];
var testGenerator = function() {
for (var i = 0; i < tests.length; ++i ) {
yield tests[i];
}
}();
window.addEventListener('beforeunload', function() {
document.getElementById('viewfinder').mozSrcObject = null;
if (cameraObj) {
cameraObj.release();
cameraObj = null;
}
});
// Must call CameraTest.begin() before any other async methods.
CameraTest.begin("hardware", function(test) {
// If the pref doesn't exist, this get will fail; catch it and continue.
try {
oldPref = SpecialPowers.getBoolPref(PREF_FACEDETECTION_ENABLED);
} catch(e) { }
SpecialPowers.pushPrefEnv({'set': [[PREF_FACEDETECTION_ENABLED, true]]}, function() {
var enabled;
try {
enabled = SpecialPowers.getBoolPref(PREF_FACEDETECTION_ENABLED);
} catch(e) { }
ok(enabled, PREF_FACEDETECTION_ENABLED + " is " + enabled);
function onSuccess(camera, config) {
document.getElementById('viewfinder').mozSrcObject = camera;
cameraObj = camera;
CameraTest.next = function() {
try {
var t = testGenerator.next();
test.set(t.key, t.func.bind(undefined, camera));
} catch(e) {
if (e instanceof StopIteration) {
end();
} else {
throw e;
}
}
};
next();
}
function onError(error) {
ok(false, "getCamera() failed with: " + error);
end();
}
navigator.mozCameras.getCamera(whichCamera, initialConfig, onSuccess, onError);
})
});
</script>
</body>
</html>

View File

@ -7,6 +7,7 @@
const Cc = Components.classes;
const Ci = Components.interfaces;
const Cu = Components.utils;
const Cr = Components.results;
Cu.import("resource://gre/modules/XPCOMUtils.jsm");
Cu.import("resource://gre/modules/Services.jsm");
@ -319,8 +320,46 @@ DOMDownloadImpl.prototype = {
});
if (aDownload.error) {
//
// When we get a generic error failure back from the js downloads api
// we will verify the status of device storage to see if we can't provide
// a better error result value.
//
// XXX If these checks expand further, consider moving them into their
// own function.
//
let result = aDownload.error.result;
let storage = this._window.navigator.getDeviceStorage("sdcard");
// If we don't have access to device storage we'll opt out of these
// extra checks as they are all dependent on the state of the storage.
if (result == Cr.NS_ERROR_FAILURE && storage) {
// We will delay sending the notification until we've inferred which
// error is really happening.
changed = false;
debug("Attempting to infer error via device storage sanity checks.");
// Get device storage and request availability status.
let available = storage.available();
available.onsuccess = (function() {
debug("Storage Status = '" + available.result + "'");
let inferredError = result;
switch (available.result) {
case "unavailable":
inferredError = Cr.NS_ERROR_FILE_NOT_FOUND;
break;
case "shared":
inferredError = Cr.NS_ERROR_FILE_ACCESS_DENIED;
break;
}
this._updateWithError(aDownload, inferredError);
}).bind(this);
available.onerror = (function() {
this._updateWithError(aDownload, result);
}).bind(this);
}
this.error =
new this._window.DOMError("DownloadError", aDownload.error.result);
new this._window.DOMError("DownloadError", result);
} else {
this.error = null;
}
@ -330,6 +369,16 @@ DOMDownloadImpl.prototype = {
return;
}
this._sendStateChange();
},
_updateWithError: function(aDownload, aError) {
this.error =
new this._window.DOMError("DownloadError", aError);
this._sendStateChange();
},
_sendStateChange: function() {
// __DOM_IMPL__ may not be available at first update.
if (this.__DOM_IMPL__) {
let event = new this._window.DownloadEvent("statechange", {

View File

@ -141,7 +141,7 @@ this.DownloadsIPC = {
case "Downloads:Resume:Return":
if (this.downloadPromises[download.promiseId]) {
if (!download.error) {
this.downloadPromises[download.promiseId].resolve(download);
this.downloadPromises[download.promiseId].resolve(download);
} else {
this.downloadPromises[download.promiseId].reject(download);
}

View File

@ -714,6 +714,8 @@ TelephonyProvider.prototype = {
new Date().getTime() - aCall.started : 0;
let data = {
number: aCall.number,
serviceId: aClientId,
emergency: aCall.isEmergency,
duration: duration,
direction: aCall.isOutgoing ? "outgoing" : "incoming"
};

View File

@ -28,7 +28,7 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=766694
// are what we intend. Each entry in the arrays below can either be a
// simple string with the interface name, or an object with a 'name'
// property giving the interface name as a string, and additional
// properties which quality the exposure of that interface. For example:
// properties which qualify the exposure of that interface. For example:
//
// [
// "AGlobalInterface",
@ -168,11 +168,15 @@ var interfaceNamesInGlobalScope =
// IMPORTANT: Do not change this list without review from a DOM peer!
{name: "CallGroupErrorEvent", b2g: true, pref: "dom.telephony.enabled"},
// IMPORTANT: Do not change this list without review from a DOM peer!
"CameraCapabilities",
{name: "CameraCapabilities", b2g: true},
// IMPORTANT: Do not change this list without review from a DOM peer!
"CameraControl",
{name: "CameraControl", b2g: true},
// IMPORTANT: Do not change this list without review from a DOM peer!
"CameraManager",
{name: "CameraDetectedFace", b2g: true, pref: "camera.control.face_detection.enabled"},
// IMPORTANT: Do not change this list without review from a DOM peer!
{name: "CameraManager", b2g: true},
// IMPORTANT: Do not change this list without review from a DOM peer!
{name: "CameraPoint", b2g: true, pref: "camera.control.face_detection.enabled"},
// IMPORTANT: Do not change this list without review from a DOM peer!
"CanvasGradient",
// IMPORTANT: Do not change this list without review from a DOM peer!

View File

@ -5,6 +5,10 @@
* You can obtain one at http://mozilla.org/MPL/2.0/.
*/
/* The capabilities of a CameraControl instance. These are guaranteed
not to change over the lifetime of that particular instance.
*/
[Func="CameraCapabilities::HasSupport"]
interface CameraCapabilities
{
[Constant, Cached] readonly attribute sequence<CameraSize> previewSizes;
@ -24,6 +28,7 @@ interface CameraCapabilities
[Constant, Cached] readonly attribute unsigned long maxFocusAreas;
[Constant, Cached] readonly attribute unsigned long maxMeteringAreas;
[Constant, Cached] readonly attribute unsigned long maxDetectedFaces;
[Constant, Cached] readonly attribute double minExposureCompensation;
[Constant, Cached] readonly attribute double maxExposureCompensation;

View File

@ -72,26 +72,6 @@ dictionary CameraPictureOptions
long long dateTime = 0;
};
/* These properties affect the video recording preview, e.g.
{
profile: "1080p",
rotation: 0
}
'profile' is one of the profiles returned by
CameraCapabilities.recorderProfiles'; if this profile is missing,
an arbitrary profile will be chosen.
'rotation' is the degrees clockwise to rotate the preview; if
this option is not supported, it will be ignored; if this option
is missing, the default is 0.
*/
dictionary CameraRecorderOptions
{
DOMString profile;
long rotation;
};
/* These properties affect the actual video recording, e.g.
{
rotation: 0,
@ -117,6 +97,15 @@ dictionary CameraStartRecordingOptions
long rotation = 0;
long long maxFileSizeBytes = 0;
long long maxVideoLengthMs = 0;
/* If startRecording() is called with flashMode set to "auto" and the
camera has determined that the scene is poorly lit, the flash mode
will be automatically changed to "torch" until stopRecording() is
called. During this time, flashMode will reflect the new setting. If
flashMode is changed while recording is in progress, the new setting
will be left as-is on stopRecording(). If the camera does not
support this setting, it will be ignored. */
boolean autoEnableLowLightTorch = false;
};
callback CameraSetConfigurationCallback = void (CameraConfiguration configuration);
@ -128,11 +117,13 @@ callback CameraClosedCallback = void ();
callback CameraReleaseCallback = void ();
callback CameraRecorderStateChange = void (DOMString newState);
callback CameraPreviewStateChange = void (DOMString newState);
callback CameraAutoFocusMovingCallback = void (boolean isMoving);
/*
attributes here affect the preview, any pictures taken, and/or
any video recorded by the camera.
*/
[Func="nsDOMCameraControl::HasSupport"]
interface CameraControl : MediaStream
{
[Constant, Cached]
@ -280,6 +271,15 @@ interface CameraControl : MediaStream
[Throws]
void autoFocus(CameraAutoFocusCallback onSuccess, optional CameraErrorCallback onError);
/* if continuous autofocus is supported and focusMode is set to enable it,
then this function is called whenever the camera decides to start and
stop moving the focus position; it can be used to update a UI element to
indicate that the camera is still trying to focus, or has finished. Some
platforms do not support this event, in which case the callback is never
invoked. */
[Pref="camera.control.autofocus_moving_callback.enabled"]
attribute CameraAutoFocusMovingCallback? onAutoFocusMoving;
/* capture an image and return it as a blob to the 'onSuccess' callback;
if the camera supports it, this may be invoked while the camera is
already recording video.
@ -335,3 +335,100 @@ interface CameraControl : MediaStream
optional CameraSetConfigurationCallback onSuccess,
optional CameraErrorCallback onError);
};
/* The coordinates of a point, relative to the camera sensor, of the center of
detected facial features. As with CameraRegions:
{ x: -1000, y: -1000 } is the top-left corner
{ x: 1000, y: 1000 } is the bottom-right corner
x and y can range from -1000 to 1000.
*/
[Pref="camera.control.face_detection.enabled", Func="DOMCameraPoint::HasSupport"]
interface CameraPoint
{
attribute long x;
attribute long y;
};
/* The information of the each face detected by a camera device, e.g.
{
id: 1,
score: 80,
bound: { left: -203,
top: -400,
right: 300,
bottom: 250 },
leftEye: { x: -100,
y: -200 },
rightEye: { x: 100,
y: 100 },
mouth: { x: 150,
y: 150 } }
'id' is an unique value per face while the face is visible to the tracker.
If the face leaves the viewfinder and then returns, it will be assigned
a new value.
'score' is the confidence level for the detection of the face.
This range is 1 to 100, where 100 is the highest confidence.
'bounds' is the bounds of the face. It is guaranteed left < right and
top < bottom. The coordinates can be smaller than -1000 or bigger than 1000.
But at least one vertex will be within (-1000, -1000) and (1000, 1000).
'leftEye' is the coordinates of the centre of the left eye. The coordinates
are in the same space as the ones for 'bounds'. This is an optional field
and may not be supported on all devices. If it is not supported or detected,
the value will be set to null.
'rightEye' is the coordinates of the detected right eye; null if not
supported or detected.
'mouth' is the coordinates of the detected mouth; null if not supported or
detected.
*/
[Pref="camera.control.face_detection.enabled", Func="DOMCameraDetectedFace::HasSupport"]
interface CameraDetectedFace
{
readonly attribute unsigned long id;
readonly attribute unsigned long score;
readonly attribute DOMRect bounds;
readonly attribute boolean hasLeftEye;
readonly attribute CameraPoint? leftEye;
readonly attribute boolean hasRightEye;
readonly attribute CameraPoint? rightEye;
readonly attribute boolean hasMouth;
readonly attribute CameraPoint? mouth;
};
callback CameraFaceDetectionCallback = void (sequence<CameraDetectedFace> faces);
partial interface CameraControl
{
/* Starts the face detection. This should be called after the preview is
started. The camera will periodically call 'onFacesDetected' with a
sequence of zero or one or more detected faces in the preview frame.
How often the callback is invoked is implementation dependent.
This method throws an exception if face detection fails to start.
*/
[Throws, Pref="camera.control.face_detection.enabled"]
void startFaceDetection();
/* Stops the face detection.
This method throws an exception if face detection can't be stopped.
*/
[Throws, Pref="camera.control.face_detection.enabled"]
void stopFaceDetection();
/* Callback for faces detected in the preview frame. If no faces are
detected, the callback is invoked with an empty sequence. */
[Pref="camera.control.face_detection.enabled"]
attribute CameraFaceDetectionCallback? onFacesDetected;
};

View File

@ -29,6 +29,7 @@ callback CameraErrorCallback = void (DOMString error);
callback GetCameraCallback = void (CameraControl camera,
CameraConfiguration configuration);
[Func="nsDOMCameraManager::HasSupport"]
interface CameraManager
{
/* get a camera instance; 'camera' is one of the camera

View File

@ -24,16 +24,10 @@ public class AppConstants {
public static final String ANDROID_PACKAGE_NAME = "@ANDROID_PACKAGE_NAME@";
public static final String MANGLED_ANDROID_PACKAGE_NAME = "@MANGLED_ANDROID_PACKAGE_NAME@";
/**
* The Java Class instance that launches the browser.
* <p>
* This should always agree with <code>BROWSER_INTENT_CLASS_NAME</code>.
*/
public static final Class<? extends Activity> BROWSER_INTENT_CLASS = @ANDROID_PACKAGE_NAME@.App.class;
/**
* The name of the Java class that launches the browser.
*/
public static final String BROWSER_INTENT_CLASS_NAME = BROWSER_INTENT_CLASS.getName();
public static final String BROWSER_INTENT_CLASS_NAME = ANDROID_PACKAGE_NAME + ".App";
public static final String GRE_MILESTONE = "@GRE_MILESTONE@";

View File

@ -13,6 +13,7 @@ import org.mozilla.gecko.background.fxa.FxAccountClient;
import org.mozilla.gecko.background.fxa.FxAccountClient10.RequestDelegate;
import org.mozilla.gecko.background.fxa.FxAccountClient20;
import org.mozilla.gecko.background.fxa.FxAccountClientException.FxAccountClientRemoteException;
import org.mozilla.gecko.fxa.FirefoxAccounts;
import org.mozilla.gecko.fxa.authenticator.AndroidFxAccount;
import org.mozilla.gecko.fxa.login.Engaged;
import org.mozilla.gecko.fxa.login.State;
@ -87,12 +88,18 @@ public class FxAccountConfirmAccountActivity extends FxAccountAbstractActivity i
FxAccountSyncStatusHelper.getInstance().startObserving(syncStatusDelegate);
refresh();
fxAccount.requestSync(FirefoxAccounts.NOW);
}
@Override
public void onPause() {
super.onPause();
FxAccountSyncStatusHelper.getInstance().stopObserving(syncStatusDelegate);
if (fxAccount != null) {
fxAccount.requestSync(FirefoxAccounts.SOON);
}
}
protected class SyncStatusDelegate implements FxAccountSyncStatusHelper.Delegate {

View File

@ -24,6 +24,7 @@ import android.content.ContentResolver;
import android.content.Context;
import android.content.res.Configuration;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.LoaderManager;
import android.support.v4.app.LoaderManager.LoaderCallbacks;
@ -298,10 +299,6 @@ public class DynamicPanel extends HomeFragment
}
}
private static int generateLoaderId(String datasetId) {
return datasetId.hashCode();
}
/**
* Handles a dataset refresh request from Gecko. This is usually
* triggered by a HomeStorage.save() call in an add-on.
@ -315,38 +312,19 @@ public class DynamicPanel extends HomeFragment
return;
}
final Activity activity = getActivity();
if (activity == null) {
return;
}
Log.d(LOGTAG, "Refresh request for dataset: " + datasetId);
final int loaderId = generateLoaderId(datasetId);
final LoaderManager lm = getLoaderManager();
final Loader<?> loader = (Loader<?>) lm.getLoader(loaderId);
// Only restart a loader if there's already an active one
// for the given dataset ID. Do nothing otherwise.
if (loader != null) {
final PanelDatasetLoader datasetLoader = (PanelDatasetLoader) loader;
final DatasetRequest request = datasetLoader.getRequest();
// Ensure the refresh request doesn't affect the view's filter
// stack (i.e. use DATASET_LOAD type) but keep the current
// dataset ID and filter.
final DatasetRequest newRequest =
new DatasetRequest(DatasetRequest.Type.DATASET_LOAD,
request.getDatasetId(),
request.getFilterDetail());
restartDatasetLoader(newRequest);
}
final ContentResolver cr = activity.getContentResolver();
cr.notifyChange(getDatasetNotificationUri(datasetId), null);
}
private void restartDatasetLoader(DatasetRequest request) {
final Bundle bundle = new Bundle();
bundle.putParcelable(DATASET_REQUEST, request);
// Ensure one loader per dataset
final int loaderId = generateLoaderId(request.getDatasetId());
getLoaderManager().restartLoader(loaderId, bundle, mLoaderCallbacks);
private static Uri getDatasetNotificationUri(String datasetId) {
return Uri.withAppendedPath(HomeItems.CONTENT_URI, datasetId);
}
/**
@ -364,19 +342,22 @@ public class DynamicPanel extends HomeFragment
return;
}
restartDatasetLoader(request);
final Bundle bundle = new Bundle();
bundle.putParcelable(DATASET_REQUEST, request);
getLoaderManager().restartLoader(request.getViewIndex(),
bundle, mLoaderCallbacks);
}
@Override
public void resetDataset(String datasetId) {
Log.d(LOGTAG, "Resetting dataset: " + datasetId);
public void resetDataset(int viewIndex) {
Log.d(LOGTAG, "Resetting dataset: " + viewIndex);
final LoaderManager lm = getLoaderManager();
final int loaderId = generateLoaderId(datasetId);
// Release any resources associated with the dataset if
// it's currently loaded in memory.
final Loader<?> datasetLoader = lm.getLoader(loaderId);
final Loader<?> datasetLoader = lm.getLoader(viewIndex);
if (datasetLoader != null) {
datasetLoader.reset();
}
@ -387,7 +368,7 @@ public class DynamicPanel extends HomeFragment
* Cursor loader for the panel datasets.
*/
private static class PanelDatasetLoader extends SimpleCursorLoader {
private final DatasetRequest mRequest;
private DatasetRequest mRequest;
public PanelDatasetLoader(Context context, DatasetRequest request) {
super(context);
@ -398,6 +379,21 @@ public class DynamicPanel extends HomeFragment
return mRequest;
}
@Override
public void onContentChanged() {
// Ensure the refresh request doesn't affect the view's filter
// stack (i.e. use DATASET_LOAD type) but keep the current
// dataset ID and filter.
final DatasetRequest newRequest =
new DatasetRequest(mRequest.getViewIndex(),
DatasetRequest.Type.DATASET_LOAD,
mRequest.getDatasetId(),
mRequest.getFilterDetail());
mRequest = newRequest;
super.onContentChanged();
}
@Override
public Cursor loadCursor() {
final ContentResolver cr = getContext().getContentResolver();
@ -415,7 +411,12 @@ public class DynamicPanel extends HomeFragment
}
// XXX: You can use CONTENT_FAKE_URI for development to pull items from fake_home_items.json.
return cr.query(HomeItems.CONTENT_URI, null, selection, selectionArgs, null);
final Cursor c = cr.query(HomeItems.CONTENT_URI, null, selection, selectionArgs, null);
final Uri notificationUri = getDatasetNotificationUri(mRequest.getDatasetId());
c.setNotificationUri(cr, notificationUri);
return c;
}
}
@ -434,8 +435,8 @@ public class DynamicPanel extends HomeFragment
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) {
final DatasetRequest request = getRequestFromLoader(loader);
Log.d(LOGTAG, "Finished loader for request: " + request);
if (mPanelLayout != null) {
mPanelLayout.deliverDataset(request, cursor);
}
@ -445,8 +446,9 @@ public class DynamicPanel extends HomeFragment
public void onLoaderReset(Loader<Cursor> loader) {
final DatasetRequest request = getRequestFromLoader(loader);
Log.d(LOGTAG, "Resetting loader for request: " + request);
if (mPanelLayout != null) {
mPanelLayout.releaseDataset(request.getDatasetId());
mPanelLayout.releaseDataset(request.getViewIndex());
}
}

View File

@ -39,7 +39,11 @@ class FramePanelLayout extends PanelLayout {
if (mChildView instanceof DatasetBacked) {
final FilterDetail filter = new FilterDetail(mChildConfig.getFilter(), null);
final DatasetRequest request = new DatasetRequest(mChildConfig.getDatasetId(), filter);
final DatasetRequest request = new DatasetRequest(mChildConfig.getIndex(),
mChildConfig.getDatasetId(),
filter);
Log.d(LOGTAG, "Requesting child request: " + request);
requestDataset(request);
}

View File

@ -144,7 +144,7 @@ public final class HomeConfig {
final int viewCount = jsonViews.length();
for (int i = 0; i < viewCount; i++) {
final JSONObject jsonViewConfig = (JSONObject) jsonViews.get(i);
final ViewConfig viewConfig = new ViewConfig(jsonViewConfig);
final ViewConfig viewConfig = new ViewConfig(i, jsonViewConfig);
mViews.add(viewConfig);
}
} else {
@ -605,6 +605,7 @@ public final class HomeConfig {
}
public static class ViewConfig implements Parcelable {
private final int mIndex;
private final ViewType mType;
private final String mDatasetId;
private final ItemType mItemType;
@ -619,7 +620,8 @@ public final class HomeConfig {
private static final String JSON_KEY_BACK_IMAGE_URL = "backImageUrl";
private static final String JSON_KEY_FILTER = "filter";
public ViewConfig(JSONObject json) throws JSONException, IllegalArgumentException {
public ViewConfig(int index, JSONObject json) throws JSONException, IllegalArgumentException {
mIndex = index;
mType = ViewType.fromId(json.getString(JSON_KEY_TYPE));
mDatasetId = json.getString(JSON_KEY_DATASET);
mItemType = ItemType.fromId(json.getString(JSON_KEY_ITEM_TYPE));
@ -632,6 +634,7 @@ public final class HomeConfig {
@SuppressWarnings("unchecked")
public ViewConfig(Parcel in) {
mIndex = in.readInt();
mType = (ViewType) in.readParcelable(getClass().getClassLoader());
mDatasetId = in.readString();
mItemType = (ItemType) in.readParcelable(getClass().getClassLoader());
@ -643,6 +646,7 @@ public final class HomeConfig {
}
public ViewConfig(ViewConfig viewConfig) {
mIndex = viewConfig.mIndex;
mType = viewConfig.mType;
mDatasetId = viewConfig.mDatasetId;
mItemType = viewConfig.mItemType;
@ -653,8 +657,9 @@ public final class HomeConfig {
validate();
}
public ViewConfig(ViewType type, String datasetId, ItemType itemType,
public ViewConfig(int index, ViewType type, String datasetId, ItemType itemType,
ItemHandler itemHandler, String backImageUrl, String filter) {
mIndex = index;
mType = type;
mDatasetId = datasetId;
mItemType = itemType;
@ -683,6 +688,10 @@ public final class HomeConfig {
}
}
public int getIndex() {
return mIndex;
}
public ViewType getType() {
return mType;
}
@ -733,6 +742,7 @@ public final class HomeConfig {
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeInt(mIndex);
dest.writeParcelable(mType, 0);
dest.writeString(mDatasetId);
dest.writeParcelable(mItemType, 0);

View File

@ -18,10 +18,12 @@ import android.os.Parcel;
import android.os.Parcelable;
import android.text.TextUtils;
import android.util.Log;
import android.util.SparseArray;
import android.view.KeyEvent;
import android.view.View;
import android.widget.FrameLayout;
import java.lang.ref.SoftReference;
import java.util.EnumSet;
import java.util.LinkedList;
import java.util.Map;
@ -65,7 +67,7 @@ import java.util.WeakHashMap;
abstract class PanelLayout extends FrameLayout {
private static final String LOGTAG = "GeckoPanelLayout";
protected final Map<View, ViewState> mViewStateMap;
protected final SparseArray<ViewState> mViewStates;
private final PanelConfig mPanelConfig;
private final DatasetHandler mDatasetHandler;
private final OnUrlOpenListener mUrlOpenListener;
@ -112,26 +114,33 @@ abstract class PanelLayout extends FrameLayout {
};
}
private final int mViewIndex;
private final Type mType;
private final String mDatasetId;
private final FilterDetail mFilterDetail;
private DatasetRequest(Parcel in) {
this.mViewIndex = in.readInt();
this.mType = (Type) in.readParcelable(getClass().getClassLoader());
this.mDatasetId = in.readString();
this.mFilterDetail = (FilterDetail) in.readParcelable(getClass().getClassLoader());
}
public DatasetRequest(String datasetId, FilterDetail filterDetail) {
this(Type.DATASET_LOAD, datasetId, filterDetail);
public DatasetRequest(int index, String datasetId, FilterDetail filterDetail) {
this(index, Type.DATASET_LOAD, datasetId, filterDetail);
}
public DatasetRequest(Type type, String datasetId, FilterDetail filterDetail) {
public DatasetRequest(int index, Type type, String datasetId, FilterDetail filterDetail) {
this.mViewIndex = index;
this.mType = type;
this.mDatasetId = datasetId;
this.mFilterDetail = filterDetail;
}
public int getViewIndex() {
return mViewIndex;
}
public Type getType() {
return mType;
}
@ -155,13 +164,18 @@ abstract class PanelLayout extends FrameLayout {
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeInt(mViewIndex);
dest.writeParcelable(mType, 0);
dest.writeString(mDatasetId);
dest.writeParcelable(mFilterDetail, 0);
}
public String toString() {
return "{type: " + mType + " dataset: " + mDatasetId + ", filter: " + mFilterDetail + "}";
return "{ index: " + mViewIndex +
", type: " + mType +
", dataset: " + mDatasetId +
", filter: " + mFilterDetail +
" }";
}
public static final Creator<DatasetRequest> CREATOR = new Creator<DatasetRequest>() {
@ -187,11 +201,11 @@ abstract class PanelLayout extends FrameLayout {
public void requestDataset(DatasetRequest request);
/**
* Releases any resources associated with a previously loaded
* dataset. It will do nothing if the dataset with the given ID
* hasn't been loaded before.
* Releases any resources associated with a panel view. It will
* do nothing if the view with the given index been created
* before.
*/
public void resetDataset(String datasetId);
public void resetDataset(int viewIndex);
}
public interface PanelView {
@ -207,29 +221,70 @@ abstract class PanelLayout extends FrameLayout {
public PanelLayout(Context context, PanelConfig panelConfig, DatasetHandler datasetHandler, OnUrlOpenListener urlOpenListener) {
super(context);
mViewStateMap = new WeakHashMap<View, ViewState>();
mViewStates = new SparseArray<ViewState>();
mPanelConfig = panelConfig;
mDatasetHandler = datasetHandler;
mUrlOpenListener = urlOpenListener;
}
@Override
public void onDetachedFromWindow() {
super.onDetachedFromWindow();
final int count = mViewStates.size();
for (int i = 0; i < count; i++) {
final ViewState viewState = mViewStates.valueAt(i);
final View view = viewState.getView();
if (view != null) {
maybeSetDataset(view, null);
}
}
mViewStates.clear();
}
/**
* Delivers the dataset as a {@code Cursor} to be bound to the
* panel views backed by it. This is used by the {@code DatasetHandler}
* panel view backed by it. This is used by the {@code DatasetHandler}
* in response to a dataset request.
*/
public final void deliverDataset(DatasetRequest request, Cursor cursor) {
Log.d(LOGTAG, "Delivering request: " + request);
updateViewsFromRequest(request, cursor);
final ViewState viewState = mViewStates.get(request.getViewIndex());
if (viewState == null) {
return;
}
switch (request.getType()) {
case FILTER_PUSH:
viewState.pushFilter(request.getFilterDetail());
break;
case FILTER_POP:
viewState.popFilter();
break;
}
final View view = viewState.getView();
if (view != null) {
maybeSetDataset(view, cursor);
}
}
/**
* Releases any references to the given dataset from all
* existing panel views.
*/
public final void releaseDataset(String datasetId) {
Log.d(LOGTAG, "Releasing dataset: " + datasetId);
releaseViewsWithDataset(datasetId);
public final void releaseDataset(int viewIndex) {
Log.d(LOGTAG, "Releasing dataset: " + viewIndex);
final ViewState viewState = mViewStates.get(viewIndex);
if (viewState == null) {
return;
}
final View view = viewState.getView();
if (view != null) {
maybeSetDataset(view, null);
}
}
/**
@ -238,15 +293,24 @@ abstract class PanelLayout extends FrameLayout {
*/
protected final void requestDataset(DatasetRequest request) {
Log.d(LOGTAG, "Requesting request: " + request);
if (mViewStates.get(request.getViewIndex()) == null) {
return;
}
mDatasetHandler.requestDataset(request);
}
/**
* Releases any resources associated with a previously
* loaded dataset e.g. close any associated {@code Cursor}.
* Releases any resources associated with a panel view.
* e.g. close any associated {@code Cursor}.
*/
protected final void resetDataset(String datasetId) {
mDatasetHandler.resetDataset(datasetId);
protected final void resetDataset(int viewIndex) {
Log.d(LOGTAG, "Resetting view with index: " + viewIndex);
if (mViewStates.get(viewIndex) == null) {
return;
}
mDatasetHandler.resetDataset(viewIndex);
}
/**
@ -256,34 +320,39 @@ abstract class PanelLayout extends FrameLayout {
* keep track of panel views and their associated datasets.
*/
protected final View createPanelView(ViewConfig viewConfig) {
final View view;
Log.d(LOGTAG, "Creating panel view: " + viewConfig.getType());
switch(viewConfig.getType()) {
case LIST:
view = new PanelListView(getContext(), viewConfig);
break;
case GRID:
view = new PanelGridView(getContext(), viewConfig);
break;
default:
throw new IllegalStateException("Unrecognized view type in " + getClass().getSimpleName());
ViewState viewState = mViewStates.get(viewConfig.getIndex());
if (viewState == null) {
viewState = new ViewState(viewConfig);
mViewStates.put(viewConfig.getIndex(), viewState);
}
final ViewState state = new ViewState(viewConfig);
// TODO: Push initial filter here onto ViewState
mViewStateMap.put(view, state);
View view = viewState.getView();
if (view == null) {
switch(viewConfig.getType()) {
case LIST:
view = new PanelListView(getContext(), viewConfig);
break;
PanelView panelView = (PanelView) view;
panelView.setOnItemOpenListener(new PanelOnItemOpenListener(state));
panelView.setOnKeyListener(new PanelKeyListener(state));
case GRID:
view = new PanelGridView(getContext(), viewConfig);
break;
if (view instanceof DatasetBacked) {
DatasetBacked datasetBacked = (DatasetBacked) view;
datasetBacked.setFilterManager(new PanelFilterManager(state));
default:
throw new IllegalStateException("Unrecognized view type in " + getClass().getSimpleName());
}
PanelView panelView = (PanelView) view;
panelView.setOnItemOpenListener(new PanelOnItemOpenListener(viewState));
panelView.setOnKeyListener(new PanelKeyListener(viewState));
if (view instanceof DatasetBacked) {
DatasetBacked datasetBacked = (DatasetBacked) view;
datasetBacked.setFilterManager(new PanelFilterManager(viewState));
}
viewState.setView(view);
}
return view;
@ -295,45 +364,14 @@ abstract class PanelLayout extends FrameLayout {
*/
protected final void disposePanelView(View view) {
Log.d(LOGTAG, "Disposing panel view");
if (mViewStateMap.containsKey(view)) {
// Release any Cursor references from the view
// if it's backed by a dataset.
maybeSetDataset(view, null);
final int count = mViewStates.size();
for (int i = 0; i < count; i++) {
final ViewState viewState = mViewStates.valueAt(i);
// Remove the view entry from the map
mViewStateMap.remove(view);
}
}
private void updateViewsFromRequest(DatasetRequest request, Cursor cursor) {
for (Map.Entry<View, ViewState> entry : mViewStateMap.entrySet()) {
final ViewState detail = entry.getValue();
// Update any views associated with the given dataset ID
if (TextUtils.equals(detail.getDatasetId(), request.getDatasetId())) {
switch (request.getType()) {
case FILTER_PUSH:
detail.pushFilter(request.getFilterDetail());
break;
case FILTER_POP:
detail.popFilter();
break;
}
final View view = entry.getKey();
maybeSetDataset(view, cursor);
}
}
}
private void releaseViewsWithDataset(String datasetId) {
for (Map.Entry<View, ViewState> entry : mViewStateMap.entrySet()) {
final ViewState detail = entry.getValue();
// Release the cursor on views associated with the given dataset ID
if (TextUtils.equals(detail.getDatasetId(), datasetId)) {
final View view = entry.getKey();
if (viewState.getView() == view) {
maybeSetDataset(view, null);
mViewStates.remove(viewState.getIndex());
break;
}
}
}
@ -358,10 +396,24 @@ abstract class PanelLayout extends FrameLayout {
*/
protected class ViewState {
private final ViewConfig mViewConfig;
private SoftReference<View> mView;
private LinkedList<FilterDetail> mFilterStack;
public ViewState(ViewConfig viewConfig) {
mViewConfig = viewConfig;
mView = new SoftReference<View>(null);
}
public int getIndex() {
return mViewConfig.getIndex();
}
public View getView() {
return mView.get();
}
public void setView(View view) {
mView = new SoftReference<View>(view);
}
public String getDatasetId() {
@ -468,9 +520,13 @@ abstract class PanelLayout extends FrameLayout {
* Pushes filter to {@code ViewState}'s stack and makes request for new filter value.
*/
private void pushFilterOnView(ViewState viewState, FilterDetail filterDetail) {
final int index = viewState.getIndex();
final String datasetId = viewState.getDatasetId();
mDatasetHandler.requestDataset(
new DatasetRequest(DatasetRequest.Type.FILTER_PUSH, datasetId, filterDetail));
mDatasetHandler.requestDataset(new DatasetRequest(index,
DatasetRequest.Type.FILTER_PUSH,
datasetId,
filterDetail));
}
/**
@ -480,10 +536,15 @@ abstract class PanelLayout extends FrameLayout {
*/
private boolean popFilterOnView(ViewState viewState) {
if (viewState.canPopFilter()) {
final FilterDetail filterDetail = viewState.getPreviousFilter();
final int index = viewState.getIndex();
final String datasetId = viewState.getDatasetId();
mDatasetHandler.requestDataset(
new DatasetRequest(DatasetRequest.Type.FILTER_POP, datasetId, filterDetail));
final FilterDetail filterDetail = viewState.getPreviousFilter();
mDatasetHandler.requestDataset(new DatasetRequest(index,
DatasetRequest.Type.FILTER_POP,
datasetId,
filterDetail));
return true;
} else {
return false;

View File

@ -6,7 +6,6 @@ package org.mozilla.gecko.tests;
import java.util.Map;
import org.mozilla.gecko.AppConstants;
import org.mozilla.gecko.Assert;
import org.mozilla.gecko.FennecInstrumentationTestRunner;
import org.mozilla.gecko.FennecMochitestAssert;
@ -39,7 +38,7 @@ public abstract class BaseRobocopTest extends ActivityInstrumentationTestCase2<A
*/
@SuppressWarnings("unchecked")
public BaseRobocopTest() {
this((Class<Activity>) AppConstants.BROWSER_INTENT_CLASS);
this((Class<Activity>) TestConstants.BROWSER_INTENT_CLASS);
}
/**

View File

@ -2,6 +2,7 @@ package org.mozilla.gecko.tests;
import java.util.ArrayList;
import org.mozilla.gecko.AppConstants;
import org.mozilla.gecko.Assert;
import org.mozilla.gecko.GeckoProfile;
import org.mozilla.gecko.db.BrowserDB;
@ -31,7 +32,7 @@ class DatabaseHelper {
protected Uri buildUri(BrowserDataType dataType) {
Uri uri = null;
if (dataType == BrowserDataType.BOOKMARKS || dataType == BrowserDataType.HISTORY) {
uri = Uri.parse("content://" + TestConstants.ANDROID_PACKAGE_NAME + ".db.browser/" + dataType.toString().toLowerCase());
uri = Uri.parse("content://" + AppConstants.ANDROID_PACKAGE_NAME + ".db.browser/" + dataType.toString().toLowerCase());
} else {
mAsserter.ok(false, "The wrong data type has been provided = " + dataType.toString(), "Please provide the correct data type");
}

View File

@ -6,6 +6,13 @@
package org.mozilla.gecko.tests;
import android.app.Activity;
public class TestConstants {
public static final String ANDROID_PACKAGE_NAME = "@ANDROID_PACKAGE_NAME@";
/**
* The Java Class instance that launches the browser.
* <p>
* This should always agree with {@link AppConstants#BROWSER_INTENT_CLASS_NAME}.
*/
public static final Class<? extends Activity> BROWSER_INTENT_CLASS = @ANDROID_PACKAGE_NAME@.App.class;
}

View File

@ -3,6 +3,7 @@ package org.mozilla.gecko.tests;
import java.util.ArrayList;
import org.mozilla.gecko.Actions;
import org.mozilla.gecko.AppConstants;
import org.mozilla.gecko.GeckoProfile;
import android.content.ContentResolver;
@ -112,7 +113,7 @@ public class testImportFromAndroid extends AboutHomeTest {
// Add a few history items in Firefox Mobile
ContentResolver resolver = getActivity().getContentResolver();
Uri uri = Uri.parse("content://" + TestConstants.ANDROID_PACKAGE_NAME + ".db.browser/history");
Uri uri = Uri.parse("content://" + AppConstants.ANDROID_PACKAGE_NAME + ".db.browser/history");
uri = uri.buildUpon().appendQueryParameter("profile", GeckoProfile.DEFAULT_PROFILE)
.appendQueryParameter("sync", "true").build();
for (String url:androidData) {

View File

@ -81,10 +81,10 @@ var ZoomHelper = {
*/
zoomToElement: function(aElement, aClickY = -1, aCanZoomOut = true, aCanScrollHorizontally = true) {
let rect = ElementTouchHelper.getBoundingContentRect(aElement);
ZoomHelper.zoomToRect(rect, aClickY, aCanZoomOut, aCanScrollHorizontally);
ZoomHelper.zoomToRect(rect, aClickY, aCanZoomOut, aCanScrollHorizontally, aElement);
},
zoomToRect: function(aRect, aClickY = -1, aCanZoomOut = true, aCanScrollHorizontally = true) {
zoomToRect: function(aRect, aClickY = -1, aCanZoomOut = true, aCanScrollHorizontally = true, aElement) {
const margin = 15;
if(!aRect.h || !aRect.w) {
@ -102,22 +102,24 @@ var ZoomHelper = {
// if the rect is already taking up most of the visible area and is stretching the
// width of the page, then we want to zoom out instead.
if (BrowserEventHandler.mReflozPref) {
let zoomFactor = BrowserApp.selectedTab.getZoomToMinFontSize(aElement);
if (aElement) {
if (BrowserEventHandler.mReflozPref) {
let zoomFactor = BrowserApp.selectedTab.getZoomToMinFontSize(aElement);
bRect.width = zoomFactor <= 1.0 ? bRect.width : gScreenWidth / zoomFactor;
bRect.height = zoomFactor <= 1.0 ? bRect.height : bRect.height / zoomFactor;
if (zoomFactor == 1.0 || ZoomHelper.isRectZoomedIn(bRect, viewport)) {
bRect.width = zoomFactor <= 1.0 ? bRect.width : gScreenWidth / zoomFactor;
bRect.height = zoomFactor <= 1.0 ? bRect.height : bRect.height / zoomFactor;
if (zoomFactor == 1.0 || ZoomHelper.isRectZoomedIn(bRect, viewport)) {
if (aCanZoomOut) {
ZoomHelper.zoomOut();
}
return;
}
} else if (ZoomHelper.isRectZoomedIn(bRect, viewport)) {
if (aCanZoomOut) {
ZoomHelper.zoomOut();
}
return;
}
} else if (ZoomHelper.isRectZoomedIn(bRect, viewport)) {
if (aCanZoomOut) {
ZoomHelper.zoomOut();
}
return;
}
let rect = {};
@ -145,4 +147,4 @@ var ZoomHelper = {
sendMessageToJava(rect);
},
};
};

View File

@ -12,13 +12,34 @@ import android.test.ActivityInstrumentationTestCase2;
/**
* BrowserTestCase provides helper methods for testing.
*/
@SuppressWarnings("unchecked")
public class BrowserTestCase extends ActivityInstrumentationTestCase2<Activity> {
@SuppressWarnings("unused")
private static String LOG_TAG = "BrowserTestCase";
@SuppressWarnings("unchecked")
/**
* The Java Class instance that launches the browser.
* <p>
* This should always agree with {@link AppConstants#BROWSER_INTENT_CLASS_NAME}.
*/
public static final Class<? extends Activity> BROWSER_INTENT_CLASS;
// Use reflection here so we don't have to either (a) preprocess this
// file, or (b) get access to Robocop's TestConstants class from these
// instrumentation tests.
static {
Class<? extends Activity> cl;
try {
cl = (Class<? extends Activity>) Class.forName(AppConstants.BROWSER_INTENT_CLASS_NAME);
} catch (ClassNotFoundException e) {
// Oh well.
cl = Activity.class;
}
BROWSER_INTENT_CLASS = cl;
}
public BrowserTestCase() {
super((Class<Activity>) AppConstants.BROWSER_INTENT_CLASS);
super((Class<Activity>) BROWSER_INTENT_CLASS);
}
public Context getApplicationContext() {

View File

@ -4430,3 +4430,7 @@ pref("image.mozsamplesize.enabled", false);
#ifndef MOZ_WIDGET_GONK
pref("beacon.enabled", true);
#endif
// Camera prefs
pref("camera.control.autofocus_moving_callback.enabled", false);
pref("camera.control.face_detection.enabled", false);

View File

@ -5,10 +5,14 @@
"use strict";
/* General utilities used throughout devtools. */
const { Ci, Cu } = require("chrome");
let { Services } = Cu.import("resource://gre/modules/Services.jsm", {});
let { setTimeout, clearTimeout } = Cu.import("resource://gre/modules/Timer.jsm", {});
// hasChrome is provided as a global by the loader. It is true if we are running
// on the main thread, and false if we are running on a worker thread.
if (hasChrome) {
var { Ci, Cu } = require("chrome");
var Services = require("Services");
var setTimeout = Cu.import("resource://gre/modules/Timer.jsm", {}).setTimeout;
}
/**
* Turn the error |aError| into a string, without fail.

View File

@ -34,6 +34,7 @@ this.EXPORTED_SYMBOLS = ["DevToolsLoader", "devtools", "BuiltinProvider",
let loaderGlobals = {
btoa: btoa,
console: console,
hasChrome: true,
promise: promise,
_Iterator: Iterator,
ChromeWorker: ChromeWorker,

View File

@ -51,6 +51,7 @@ SOURCES += [
'HwcComposer2D.cpp',
'HwcUtils.cpp',
'nsAppShell.cpp',
'nsClipboard.cpp',
'nsIdleServiceGonk.cpp',
'nsLookAndFeel.cpp',
'nsWidgetFactory.cpp',

148
widget/gonk/nsClipboard.cpp Normal file
View File

@ -0,0 +1,148 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "mozilla/dom/ContentChild.h"
#include "nsClipboard.h"
#include "nsISupportsPrimitives.h"
#include "nsCOMPtr.h"
#include "nsComponentManagerUtils.h"
#include "nsXULAppAPI.h"
using namespace mozilla;
using mozilla::dom::ContentChild;
#define LOG_TAG "Clipboard"
#define LOGI(args...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, ## args)
#define LOGE(args...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, ## args)
NS_IMPL_ISUPPORTS1(nsClipboard, nsIClipboard)
nsClipboard::nsClipboard()
{
}
NS_IMETHODIMP
nsClipboard::SetData(nsITransferable *aTransferable,
nsIClipboardOwner *anOwner, int32_t aWhichClipboard)
{
if (aWhichClipboard != kGlobalClipboard) {
return NS_ERROR_NOT_IMPLEMENTED;
}
nsCOMPtr<nsISupports> tmp;
uint32_t len;
nsresult rv = aTransferable->GetTransferData(kUnicodeMime, getter_AddRefs(tmp),
&len);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
nsCOMPtr<nsISupportsString> supportsString = do_QueryInterface(tmp);
// No support for non-text data
if (NS_WARN_IF(!supportsString)) {
LOGE("No support for non-text data. See bug 952456.");
return NS_ERROR_NOT_IMPLEMENTED;
}
nsAutoString buffer;
supportsString->GetData(buffer);
if (XRE_GetProcessType() == GeckoProcessType_Default) {
mClipboard = buffer;
} else {
bool isPrivateData = false;
aTransferable->GetIsPrivateData(&isPrivateData);
ContentChild::GetSingleton()->SendSetClipboardText(buffer, isPrivateData,
aWhichClipboard);
}
return NS_OK;
}
NS_IMETHODIMP
nsClipboard::GetData(nsITransferable *aTransferable, int32_t aWhichClipboard)
{
if (aWhichClipboard != kGlobalClipboard) {
return NS_ERROR_NOT_IMPLEMENTED;
}
nsAutoString buffer;
if (XRE_GetProcessType() == GeckoProcessType_Default) {
buffer = mClipboard;
} else {
ContentChild::GetSingleton()->SendGetClipboardText(aWhichClipboard, &buffer);
}
nsresult rv;
nsCOMPtr<nsISupportsString> dataWrapper =
do_CreateInstance(NS_SUPPORTS_STRING_CONTRACTID, &rv);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
rv = dataWrapper->SetData(buffer);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
// If our data flavor has already been added, this will fail. But we don't care
aTransferable->AddDataFlavor(kUnicodeMime);
nsCOMPtr<nsISupports> nsisupportsDataWrapper =
do_QueryInterface(dataWrapper);
rv = aTransferable->SetTransferData(kUnicodeMime, nsisupportsDataWrapper,
buffer.Length() * sizeof(PRUnichar));
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
return NS_OK;
}
NS_IMETHODIMP
nsClipboard::EmptyClipboard(int32_t aWhichClipboard)
{
if (aWhichClipboard != kGlobalClipboard) {
return NS_ERROR_NOT_IMPLEMENTED;
}
if (XRE_GetProcessType() == GeckoProcessType_Default) {
mClipboard.Truncate(0);
} else {
ContentChild::GetSingleton()->SendEmptyClipboard(aWhichClipboard);
}
return NS_OK;
}
NS_IMETHODIMP
nsClipboard::HasDataMatchingFlavors(const char **aFlavorList,
uint32_t aLength, int32_t aWhichClipboard,
bool *aHasText)
{
*aHasText = false;
if (aWhichClipboard != kGlobalClipboard) {
return NS_ERROR_NOT_IMPLEMENTED;
}
if (XRE_GetProcessType() == GeckoProcessType_Default) {
*aHasText = !mClipboard.IsEmpty();
} else {
ContentChild::GetSingleton()->SendClipboardHasText(aWhichClipboard, aHasText);
}
return NS_OK;
}
NS_IMETHODIMP
nsClipboard::SupportsSelectionClipboard(bool *aIsSupported)
{
*aIsSupported = false;
return NS_OK;
}
NS_IMETHODIMP
nsClipboard::SupportsFindClipboard(bool* _retval)
{
NS_ENSURE_ARG_POINTER(_retval);
*_retval = false;
return NS_OK;
}

21
widget/gonk/nsClipboard.h Normal file
View File

@ -0,0 +1,21 @@
/* -*- Mode: c++; c-basic-offset: 4; tab-width: 20; indent-tabs-mode: nil; -*-
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef nsClipbard_h__
#define nsClipbard_h__
#include "nsIClipboard.h"
class nsClipboard MOZ_FINAL : public nsIClipboard
{
nsAutoString mClipboard;
public:
NS_DECL_ISUPPORTS
NS_DECL_NSICLIPBOARD
nsClipboard();
};
#endif

View File

@ -28,6 +28,8 @@
#include "nsScreenManagerGonk.h"
#include "nsIdleServiceGonk.h"
#include "nsTransferable.h"
#include "nsClipboard.h"
#include "nsClipboardHelper.h"
#include "nsHTMLFormatConverter.h"
#include "nsXULAppAPI.h"
@ -52,6 +54,8 @@ NS_GENERIC_FACTORY_CONSTRUCTOR(PuppetScreenManager)
NS_GENERIC_FACTORY_CONSTRUCTOR(nsHTMLFormatConverter)
NS_GENERIC_FACTORY_SINGLETON_CONSTRUCTOR(nsIdleServiceGonk, nsIdleServiceGonk::GetInstance)
NS_GENERIC_FACTORY_CONSTRUCTOR(nsTransferable)
NS_GENERIC_FACTORY_CONSTRUCTOR(nsClipboard)
NS_GENERIC_FACTORY_CONSTRUCTOR(nsClipboardHelper)
NS_DEFINE_NAMED_CID(NS_APPSHELL_CID);
NS_DEFINE_NAMED_CID(NS_WINDOW_CID);
@ -61,6 +65,8 @@ NS_DEFINE_NAMED_CID(NS_HTMLFORMATCONVERTER_CID);
NS_DEFINE_NAMED_CID(NS_IDLE_SERVICE_CID);
NS_DEFINE_NAMED_CID(NS_TRANSFERABLE_CID);
NS_DEFINE_NAMED_CID(NS_GFXINFO_CID);
NS_DEFINE_NAMED_CID(NS_CLIPBOARD_CID);
NS_DEFINE_NAMED_CID(NS_CLIPBOARDHELPER_CID);
static nsresult
ScreenManagerConstructor(nsISupports *aOuter, REFNSIID aIID, void **aResult)
@ -79,6 +85,8 @@ static const mozilla::Module::CIDEntry kWidgetCIDs[] = {
{ &kNS_IDLE_SERVICE_CID, false, nullptr, nsIdleServiceGonkConstructor },
{ &kNS_TRANSFERABLE_CID, false, nullptr, nsTransferableConstructor },
{ &kNS_GFXINFO_CID, false, nullptr, mozilla::widget::GfxInfoConstructor },
{ &kNS_CLIPBOARD_CID, false, nullptr, nsClipboardConstructor },
{ &kNS_CLIPBOARDHELPER_CID, false, nullptr, nsClipboardHelperConstructor },
{ nullptr }
};
@ -91,6 +99,8 @@ static const mozilla::Module::ContractIDEntry kWidgetContracts[] = {
{ "@mozilla.org/widget/idleservice;1", &kNS_IDLE_SERVICE_CID },
{ "@mozilla.org/widget/transferable;1", &kNS_TRANSFERABLE_CID },
{ "@mozilla.org/gfx/info;1", &kNS_GFXINFO_CID },
{ "@mozilla.org/widget/clipboard;1", &kNS_CLIPBOARD_CID },
{ "@mozilla.org/widget/clipboardhelper;1", &kNS_CLIPBOARDHELPER_CID },
{ nullptr }
};

View File

@ -1462,6 +1462,102 @@ static NS_CYCLE_COLLECTION_INNERCLASS NS_CYCLE_COLLECTION_INNERNAME;
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f18) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
#define NS_IMPL_CYCLE_COLLECTION_INHERITED_19(_class, _base, _f1, _f2, _f3, _f4, _f5, \
_f6, _f7, _f8, _f9, _f10, _f11, _f12, _f13, _f14, \
_f15, _f16, _f17, _f18, _f19) \
NS_IMPL_CYCLE_COLLECTION_CLASS(_class) \
NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(_class, _base) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f1) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f2) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f3) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f4) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f5) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f6) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f7) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f8) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f9) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f10) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f11) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f12) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f13) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f14) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f15) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f16) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f17) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f18) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f19) \
NS_IMPL_CYCLE_COLLECTION_UNLINK_END \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(_class, _base) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f1) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f2) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f3) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f4) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f5) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f6) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f7) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f8) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f9) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f10) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f11) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f12) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f13) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f14) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f15) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f16) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f17) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f18) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f19) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
#define NS_IMPL_CYCLE_COLLECTION_INHERITED_20(_class, _base, _f1, _f2, _f3, _f4, _f5, \
_f6, _f7, _f8, _f9, _f10, _f11, _f12, _f13, _f14, \
_f15, _f16, _f17, _f18, _f19, _f20) \
NS_IMPL_CYCLE_COLLECTION_CLASS(_class) \
NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(_class, _base) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f1) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f2) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f3) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f4) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f5) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f6) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f7) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f8) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f9) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f10) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f11) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f12) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f13) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f14) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f15) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f16) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f17) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f18) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f19) \
NS_IMPL_CYCLE_COLLECTION_UNLINK(_f20) \
NS_IMPL_CYCLE_COLLECTION_UNLINK_END \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(_class, _base) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f1) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f2) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f3) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f4) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f5) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f6) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f7) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f8) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f9) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f10) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f11) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f12) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f13) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f14) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f15) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f16) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f17) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f18) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f19) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f20) \
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
#define NS_CYCLE_COLLECTION_NOTE_EDGE_NAME CycleCollectionNoteEdgeName
#endif // nsCycleCollectionParticipant_h__