Merge inbound to mozilla-central r=merge a=merge

This commit is contained in:
Dorel Luca 2017-11-29 12:26:15 +02:00
commit be78e6ea9b
264 changed files with 10096 additions and 9461 deletions

View File

@ -17,6 +17,7 @@
#include "HandlerData_i.c"
#include "mozilla/Assertions.h"
#include "mozilla/a11y/AccessibleWrap.h"
#include "mozilla/a11y/HandlerDataCleanup.h"
#include "mozilla/dom/ContentChild.h"
#include "mozilla/Move.h"
#include "mozilla/mscom/AgileReference.h"
@ -132,7 +133,8 @@ HandlerProvider::GetAndSerializePayload(const MutexAutoLock&,
// Now that we have serialized payload, we should clean up any
// BSTRs, interfaces, etc. fetched in BuildInitialIA2Data.
CleanupStaticIA2Data(payload.mStaticData);
CleanupDynamicIA2Data(payload.mDynamicData);
// No need to zero memory, since payload is going out of scope.
CleanupDynamicIA2Data(payload.mDynamicData, false);
}
HRESULT
@ -397,37 +399,10 @@ HandlerProvider::CleanupStaticIA2Data(StaticIA2Data& aData)
{
// When CoMarshalInterface writes interfaces out to a stream, it AddRefs.
// Therefore, we must release our references after this.
if (aData.mIA2) {
aData.mIA2->Release();
}
if (aData.mIEnumVARIANT) {
aData.mIEnumVARIANT->Release();
}
if (aData.mIAHypertext) {
aData.mIAHypertext->Release();
}
if (aData.mIAHyperlink) {
aData.mIAHyperlink->Release();
}
if (aData.mIATable) {
aData.mIATable->Release();
}
if (aData.mIATable2) {
aData.mIATable2->Release();
}
if (aData.mIATableCell) {
aData.mIATableCell->Release();
}
ReleaseStaticIA2DataInterfaces(aData);
ZeroMemory(&aData, sizeof(StaticIA2Data));
}
void
HandlerProvider::CleanupDynamicIA2Data(DynamicIA2Data& aData)
{
::VariantClear(&aData.mRole);
ZeroMemory(&aData, sizeof(DynamicIA2Data));
}
void
HandlerProvider::BuildInitialIA2Data(
NotNull<mscom::IInterceptor*> aInterceptor,

View File

@ -75,7 +75,6 @@ private:
StaticIA2Data* aOutStaticData,
DynamicIA2Data* aOutDynamicData);
static void CleanupStaticIA2Data(StaticIA2Data& aData);
static void CleanupDynamicIA2Data(DynamicIA2Data& aData);
bool IsTargetInterfaceCacheable();
// Replace a raw object from the main thread with a wrapped, intercepted
// object suitable for calling from the MTA.

View File

@ -16,6 +16,7 @@
#include "Factory.h"
#include "HandlerData.h"
#include "mozilla/ArrayUtils.h"
#include "mozilla/a11y/HandlerDataCleanup.h"
#include "mozilla/mscom/Registration.h"
#include "mozilla/UniquePtr.h"
@ -92,6 +93,8 @@ AccessibleHandler::AccessibleHandler(IUnknown* aOuter, HRESULT* aResult)
AccessibleHandler::~AccessibleHandler()
{
// No need to zero memory, since we're being destroyed anyway.
CleanupDynamicIA2Data(mCachedData.mDynamicData, false);
if (mCachedData.mGeckoBackChannel) {
mCachedData.mGeckoBackChannel->Release();
}
@ -393,6 +396,9 @@ AccessibleHandler::ReadHandlerPayload(IStream* aStream, REFIID aIid)
if (!deserializer.Read(&newData, &IA2Payload_Decode)) {
return E_FAIL;
}
// Clean up the old data.
// No need to zero memory, since we're about to completely replace this.
CleanupDynamicIA2Data(mCachedData.mDynamicData, false);
mCachedData = newData;
// These interfaces have been aggregated into the proxy manager.
@ -404,27 +410,7 @@ AccessibleHandler::ReadHandlerPayload(IStream* aStream, REFIID aIid)
// Note that if pointers to other objects (in contrast to
// interfaces of *this* object) are added in future, we should not release
// those pointers.
if (mCachedData.mStaticData.mIA2) {
mCachedData.mStaticData.mIA2->Release();
}
if (mCachedData.mStaticData.mIEnumVARIANT) {
mCachedData.mStaticData.mIEnumVARIANT->Release();
}
if (mCachedData.mStaticData.mIAHypertext) {
mCachedData.mStaticData.mIAHypertext->Release();
}
if (mCachedData.mStaticData.mIAHyperlink) {
mCachedData.mStaticData.mIAHyperlink->Release();
}
if (mCachedData.mStaticData.mIATable) {
mCachedData.mStaticData.mIATable->Release();
}
if (mCachedData.mStaticData.mIATable2) {
mCachedData.mStaticData.mIATable2->Release();
}
if (mCachedData.mStaticData.mIATableCell) {
mCachedData.mStaticData.mIATableCell->Release();
}
ReleaseStaticIA2DataInterfaces(mCachedData.mStaticData);
if (!mCachedData.mGeckoBackChannel) {
return S_OK;

View File

@ -0,0 +1,85 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef mozilla_a11y_HandlerDataCleanup_h
#define mozilla_a11y_HandlerDataCleanup_h
#include <OleAuto.h>
#include "HandlerData.h"
namespace mozilla {
namespace a11y {
inline void
ReleaseStaticIA2DataInterfaces(StaticIA2Data& aData)
{
// Only interfaces of the proxied object wrapped by this handler should be
// released here, never other objects!
// For example, if StaticIA2Data were to include accParent in future,
// that must not be released here.
if (aData.mIA2) {
aData.mIA2->Release();
}
if (aData.mIEnumVARIANT) {
aData.mIEnumVARIANT->Release();
}
if (aData.mIAHypertext) {
aData.mIAHypertext->Release();
}
if (aData.mIAHyperlink) {
aData.mIAHyperlink->Release();
}
if (aData.mIATable) {
aData.mIATable->Release();
}
if (aData.mIATable2) {
aData.mIATable2->Release();
}
if (aData.mIATableCell) {
aData.mIATableCell->Release();
}
}
inline void
CleanupDynamicIA2Data(DynamicIA2Data& aData, bool aZero=true)
{
::VariantClear(&aData.mRole);
if (aData.mKeyboardShortcut) {
::SysFreeString(aData.mKeyboardShortcut);
}
if (aData.mName) {
::SysFreeString(aData.mName);
}
if (aData.mDescription) {
::SysFreeString(aData.mDescription);
}
if (aData.mDefaultAction) {
::SysFreeString(aData.mDefaultAction);
}
if (aData.mValue) {
::SysFreeString(aData.mValue);
}
if (aData.mAttributes) {
::SysFreeString(aData.mAttributes);
}
if (aData.mIA2Locale.language) {
::SysFreeString(aData.mIA2Locale.language);
}
if (aData.mIA2Locale.country) {
::SysFreeString(aData.mIA2Locale.country);
}
if (aData.mIA2Locale.variant) {
::SysFreeString(aData.mIA2Locale.variant);
}
if (aZero) {
ZeroMemory(&aData, sizeof(DynamicIA2Data));
}
}
} // namespace a11y
} // namespace mozilla
#endif // mozilla_a11y_HandlerDataCleanup_h

View File

@ -6,7 +6,10 @@
SharedLibrary('AccessibleHandler')
EXPORTS.mozilla.a11y += ['AccessibleHandler.h']
EXPORTS.mozilla.a11y += [
'AccessibleHandler.h',
'HandlerDataCleanup.h',
]
LOCAL_INCLUDES += [
'/accessible/interfaces/ia2',

View File

@ -123,43 +123,6 @@
]
};
// XPFE and Toolkit autocomplete widgets differ.
var ac1h = document.getElementById("autocomplete");
if ("clearResults" in ac1h) {
SimpleTest.ok(true, "Testing (Old) XPFE autocomplete widget. (ac1h)");
// Popup is always created.
accTree.children.push(
{
// xul:panel
role: ROLE_COMBOBOX_LIST,
children: [
{
// xul:tree
role: ROLE_TABLE,
children: [
{
// xul:treecols
role: ROLE_LIST,
children: [
{
// xul:treecol
role: ROLE_COLUMNHEADER,
children: []
}
]
}
]
}
]
}
);
} else {
SimpleTest.ok(true, "Testing (New) Toolkit autocomplete widget. (ac1h)");
// Popup is lazily created, so not present in this case.
}
testAccessibleTree("autocomplete", accTree);
//////////////////////////////////////////////////////////////////////////
@ -195,43 +158,6 @@
]
};
// XPFE and Toolkit autocomplete widgets differ.
var ac2cmp = document.getElementById("autocomplete2");
if ("clearResults" in ac2cmp) {
SimpleTest.ok(true, "Testing (Old) XPFE autocomplete widget. (ac2mp)");
// Popup is always created.
accTree.children.push(
{
// xul:panel
role: ROLE_COMBOBOX_LIST,
children: [
{
// xul:tree
role: ROLE_TABLE,
children: [
{
// xul:treecols
role: ROLE_LIST,
children: [
{
// xul:treecol
role: ROLE_COLUMNHEADER,
children: []
}
]
}
]
}
]
}
);
} else {
SimpleTest.ok(true, "Testing (New) Toolkit autocomplete widget. (ac2mp)");
// Popup is lazily created, so not present in this case.
}
testAccessibleTree("autocomplete2", accTree);
SimpleTest.finish()

View File

@ -125,65 +125,30 @@
SimpleTest.finish();
}
// XPFE and Toolkit autocomplete widgets differ.
var txc = document.getElementById("txc_autocomplete");
if ("clearResults" in txc) {
SimpleTest.ok(true, "Testing (Old) XPFE autocomplete widget.");
SimpleTest.ok(txc, "Testing (New) Toolkit autocomplete widget.");
// Popup is always created. (See code below.)
// Dumb access to trigger popup lazy creation.
dump("Trigget popup lazy creation");
waitForEvent(EVENT_REORDER, txc, test_AutocompleteControl);
txc.popup;
accTree.children.push(
{
// xul:panel
role: ROLE_COMBOBOX_LIST,
children: [
{
// xul:tree
role: ROLE_TABLE,
children: [
{
// xul:treecols
role: ROLE_LIST,
children: [
{
// xul:treecol
role: ROLE_COLUMNHEADER,
children: []
}
]
}
]
}
]
}
);
test_AutocompleteControl();
} else {
SimpleTest.ok(true, "Testing (New) Toolkit autocomplete widget.");
// Dumb access to trigger popup lazy creation.
dump("Trigget popup lazy creation");
waitForEvent(EVENT_REORDER, txc, test_AutocompleteControl);
txc.popup;
accTree.children.push(
{
role: ROLE_LIST,
children: [
{
role: ROLE_LIST,
children: [
{
role: ROLE_COLUMNHEADER,
children: []
}
]
}
]
}
);
}
accTree.children.push(
{
role: ROLE_LIST,
children: [
{
role: ROLE_LIST,
children: [
{
role: ROLE_COLUMNHEADER,
children: []
}
]
}
]
}
);
}
SimpleTest.waitForExplicitFinish();

View File

@ -872,8 +872,8 @@ var BrowserPageActionFeedback = {
return this.feedbackLabel = document.getElementById("pageActionFeedbackMessage");
},
show(action, event) {
this.feedbackLabel.textContent = this.panelNode.getAttribute(action.id + "Feedback");
show(action, event, textContentOverride) {
this.feedbackLabel.textContent = this.panelNode.getAttribute((textContentOverride || action.id) + "Feedback");
this.panelNode.hidden = false;
let anchor = BrowserPageActions.panelAnchorNodeForAction(action, event);
@ -994,7 +994,8 @@ BrowserPageActions.sendToDevice = {
// in", "Learn about Sync", etc. Device items will be .sendtab-target.
if (event.target.classList.contains("sendtab-target")) {
let action = PageActions.actionForID("sendToDevice");
BrowserPageActionFeedback.show(action, event);
let textOverride = gSync.offline && "sendToDeviceOffline";
BrowserPageActionFeedback.show(action, event, textOverride);
}
});
return item;

View File

@ -65,6 +65,10 @@ var gSync = {
.sort((a, b) => a.name.localeCompare(b.name));
},
get offline() {
return Weave.Service.scheduler.offline;
},
_generateNodeGetters() {
for (let k of ["Status", "Avatar", "Label", "Container"]) {
let prop = "appMenu" + k;

View File

@ -436,7 +436,8 @@
tabspecific="true"
noautofocus="true"
copyURLFeedback="&copyURLFeedback.label;"
sendToDeviceFeedback="&sendToDeviceFeedback.label;">
sendToDeviceFeedback="&sendToDeviceFeedback.label;"
sendToDeviceOfflineFeedback="&sendToDeviceOfflineFeedback.label;">
<hbox id="pageActionFeedbackAnimatableBox">
<image id="pageActionFeedbackAnimatableImage"/>
</hbox>

View File

@ -434,7 +434,7 @@
<hbox id="PanelUI-remotetabs-tabsdisabledpane" pack="center" flex="1">
<vbox class="PanelUI-remotetabs-instruction-box" align="center">
<hbox pack="center">
<image class="fxaSyncIllustration"/>
<image class="fxaSyncIllustrationIssue"/>
</hbox>
<label class="PanelUI-remotetabs-instruction-label">&appMenuRemoteTabs.tabsnotsyncing.label;</label>
<hbox pack="center">
@ -452,9 +452,8 @@
<hbox id="PanelUI-remotetabs-nodevicespane" pack="center" flex="1">
<vbox class="PanelUI-remotetabs-instruction-box">
<hbox pack="center">
<image class="fxaSyncIllustration"/>
<image class="fxaSyncIllustrationIssue"/>
</hbox>
<label class="PanelUI-remotetabs-instruction-title">&appMenuRemoteTabs.noclients.title;</label>
<label class="PanelUI-remotetabs-instruction-label">&appMenuRemoteTabs.noclients.subtitle;</label>
<!-- The inner HTML for PanelUI-remotetabs-mobile-promo is built at runtime -->
<label id="PanelUI-remotetabs-mobile-promo" fxAccountsBrand="&syncBrand.fxAccount.label;"/>
@ -484,7 +483,7 @@
align="center"
class="PanelUI-remotetabs-instruction-box"
observes="sync-reauth-state">
<image class="fxaSyncIllustration"/>
<image class="fxaSyncIllustrationIssue"/>
<label class="PanelUI-remotetabs-instruction-label">&appMenuRemoteTabs.notsignedin.label;</label>
<toolbarbutton class="PanelUI-remotetabs-prefs-button"
label="&appMenuRemoteTabs.signin.label;"

View File

@ -116,13 +116,13 @@ SyncedTabsDeckComponent.prototype = {
// There's no good way to mock fxAccounts in browser tests where it's already
// been instantiated, so we have this method for stubbing.
_accountStatus() {
return this._fxAccounts.accountStatus();
_getSignedInUser() {
return this._fxAccounts.getSignedInUser();
},
getPanelStatus() {
return this._accountStatus().then(exists => {
if (!exists || this._SyncedTabs.loginFailed) {
return this._getSignedInUser().then(user => {
if (!user || !user.verified || this._SyncedTabs.loginFailed) {
return this.PANELS.NOT_AUTHED_INFO;
}
if (!this._SyncedTabs.isConfiguredToSyncTabs) {

View File

@ -81,13 +81,14 @@
<button class="button sync-prefs">&fxaSignIn.label;</button>
</div>
<div class="singleDeviceInfo sync-state">
<p>&syncedTabs.sidebar.noclients.title;</p>
<p>&syncedTabs.sidebar.noclients.subtitle;</p>
<p class="device-promo" fxAccountsBrand="&syncBrand.fxAccount.label;"></p>
<div class="syncIllustrationIssue"></div>
<p class="instructions">&syncedTabs.sidebar.noclients.subtitle;</p>
<p class="instructions device-promo" fxAccountsBrand="&syncBrand.fxAccount.label;"></p>
</div>
<div class="tabs-disabled sync-state">
<p>&syncedTabs.sidebar.tabsnotsyncing.label;</p>
<p><a href="#" class="sync-prefs text-link">&syncedTabs.sidebar.openprefs.label;</a></p>
<div class="syncIllustrationIssue"></div>
<p class="instructions">&syncedTabs.sidebar.tabsnotsyncing.label;</p>
<button class="button sync-prefs">&syncedTabs.sidebar.openprefs.label;</button>
</div>
</div>
</template>

View File

@ -67,7 +67,7 @@ let originalSyncedTabsInternal = null;
async function testClean() {
let syncedTabsDeckComponent = window.SidebarUI.browser.contentWindow.syncedTabsDeckComponent;
let SyncedTabs = window.SidebarUI.browser.contentWindow.SyncedTabs;
syncedTabsDeckComponent._accountStatus.restore();
syncedTabsDeckComponent._getSignedInUser.restore();
SyncedTabs._internal.getTabClients.restore();
SyncedTabs._internal = originalSyncedTabsInternal;
@ -97,7 +97,7 @@ add_task(async function testSyncedTabsSidebarList() {
syncTabs() { return Promise.resolve(); },
};
sinon.stub(syncedTabsDeckComponent, "_accountStatus", () => Promise.resolve(true));
sinon.stub(syncedTabsDeckComponent, "_getSignedInUser", () => Promise.resolve({verified: true}));
sinon.stub(SyncedTabs._internal, "getTabClients", () => Promise.resolve(Cu.cloneInto(FIXTURE, {})));
await syncedTabsDeckComponent.updatePanel();
@ -150,7 +150,7 @@ add_task(async function testSyncedTabsSidebarFilteredList() {
syncTabs() { return Promise.resolve(); },
};
sinon.stub(syncedTabsDeckComponent, "_accountStatus", () => Promise.resolve(true));
sinon.stub(syncedTabsDeckComponent, "_getSignedInUser", () => Promise.resolve({verified: true}));
sinon.stub(SyncedTabs._internal, "getTabClients", () => Promise.resolve(Cu.cloneInto(FIXTURE, {})));
await syncedTabsDeckComponent.updatePanel();
@ -195,7 +195,7 @@ add_task(async function testSyncedTabsSidebarFilteredList() {
add_task(testClean);
add_task(async function testSyncedTabsSidebarStatus() {
let accountExists = false;
let account = null;
await SidebarUI.show("viewTabsSidebar");
let syncedTabsDeckComponent = window.SidebarUI.browser.contentWindow.syncedTabsDeckComponent;
@ -214,21 +214,21 @@ add_task(async function testSyncedTabsSidebarStatus() {
sinon.spy(syncedTabsDeckComponent, "updatePanel");
sinon.spy(syncedTabsDeckComponent, "observe");
sinon.stub(syncedTabsDeckComponent, "_accountStatus", () => Promise.reject("Test error"));
sinon.stub(syncedTabsDeckComponent, "_getSignedInUser", () => Promise.reject("Test error"));
await syncedTabsDeckComponent.updatePanel();
let selectedPanel = syncedTabsDeckComponent.container.querySelector(".sync-state.selected");
Assert.ok(selectedPanel.classList.contains("notAuthedInfo"),
"not-authed panel is selected on auth error");
syncedTabsDeckComponent._accountStatus.restore();
sinon.stub(syncedTabsDeckComponent, "_accountStatus", () => Promise.resolve(accountExists));
syncedTabsDeckComponent._getSignedInUser.restore();
sinon.stub(syncedTabsDeckComponent, "_getSignedInUser", () => Promise.resolve(account));
await syncedTabsDeckComponent.updatePanel();
selectedPanel = syncedTabsDeckComponent.container.querySelector(".sync-state.selected");
Assert.ok(selectedPanel.classList.contains("notAuthedInfo"),
"not-authed panel is selected");
accountExists = true;
account = {verified: true};
await syncedTabsDeckComponent.updatePanel();
selectedPanel = syncedTabsDeckComponent.container.querySelector(".sync-state.selected");
Assert.ok(selectedPanel.classList.contains("tabs-disabled"),
@ -272,7 +272,7 @@ add_task(async function testSyncedTabsSidebarContextMenu() {
syncTabs() { return Promise.resolve(); },
};
sinon.stub(syncedTabsDeckComponent, "_accountStatus", () => Promise.resolve(true));
sinon.stub(syncedTabsDeckComponent, "_getSignedInUser", () => Promise.resolve({verified: true}));
sinon.stub(SyncedTabs._internal, "getTabClients", () => Promise.resolve(Cu.cloneInto(FIXTURE, {})));
await syncedTabsDeckComponent.updatePanel();

View File

@ -137,7 +137,7 @@ add_task(async function testPanelStatus() {
let listStore = new SyncedTabsListStore();
let listComponent = {};
let fxAccounts = {
accountStatus() {}
getSignedInUser() {}
};
let SyncedTabsMock = {
getTabClients() {}
@ -153,12 +153,17 @@ add_task(async function testPanelStatus() {
SyncedTabs: SyncedTabsMock
});
let isAuthed = false;
sinon.stub(fxAccounts, "accountStatus", () => Promise.resolve(isAuthed));
let account = null;
sinon.stub(fxAccounts, "getSignedInUser", () => Promise.resolve(account));
let result = await component.getPanelStatus();
Assert.equal(result, component.PANELS.NOT_AUTHED_INFO);
isAuthed = true;
account = {verified: false};
result = await component.getPanelStatus();
Assert.equal(result, component.PANELS.NOT_AUTHED_INFO);
account = {verified: true};
SyncedTabsMock.loginFailed = true;
result = await component.getPanelStatus();
@ -186,8 +191,8 @@ add_task(async function testPanelStatus() {
result = await component.getPanelStatus();
Assert.equal(result, component.PANELS.TABS_CONTAINER);
fxAccounts.accountStatus.restore();
sinon.stub(fxAccounts, "accountStatus", () => Promise.reject("err"));
fxAccounts.getSignedInUser.restore();
sinon.stub(fxAccounts, "getSignedInUser", () => Promise.reject("err"));
result = await component.getPanelStatus();
Assert.equal(result, component.PANELS.NOT_AUTHED_INFO);

View File

@ -68,9 +68,6 @@
#ifndef MOZ_STATIC_JS
@BINPATH@/@DLL_PREFIX@mozjs@DLL_SUFFIX@
#endif
#ifdef MOZ_DMD
@BINPATH@/@DLL_PREFIX@dmd@DLL_SUFFIX@
#endif
#ifndef MOZ_SYSTEM_NSPR
#ifndef MOZ_FOLD_LIBS
@BINPATH@/@DLL_PREFIX@nspr4@DLL_SUFFIX@

View File

@ -51,6 +51,7 @@ can reach it easily. -->
<!ENTITY sendLinkToDevice.label "Send Link to Device">
<!ENTITY sendLinkToDevice.accesskey "n">
<!ENTITY sendToDeviceFeedback.label "Sent!">
<!ENTITY sendToDeviceOfflineFeedback.label "Queued (offline)">
<!ENTITY moveToNewWindow.label "Move to New Window">
<!ENTITY moveToNewWindow.accesskey "W">
<!ENTITY bookmarkAllTabs.label "Bookmark All Tabs…">
@ -377,7 +378,6 @@ These should match what Safari and other Apple applications use on OS X Lion. --
<!-- LOCALIZATION NOTE (appMenuRemoteTabs.noclients.label): This is shown
when Sync is configured but this appears to be the only device attached to
the account. We also show links to download Firefox for android/ios. -->
<!ENTITY appMenuRemoteTabs.noclients.title "No synced tabs… yet!">
<!ENTITY appMenuRemoteTabs.noclients.subtitle "Want to see your tabs from other devices here?">
<!ENTITY appMenuRemoteTabs.openprefs.label "Sync Preferences">
<!ENTITY appMenuRemoteTabs.notsignedin.label "Sign in to view a list of tabs from your other devices.">
@ -791,7 +791,6 @@ you can use these alternative items. Otherwise, their values should be empty. -
<!ENTITY syncedTabs.sidebar.label "Synced Tabs">
<!ENTITY syncedTabs.sidebar.noclients.label "Sign in to Firefox from your other devices to view their tabs here.">
<!ENTITY syncedTabs.sidebar.noclients.title "No synced tabs… yet!">
<!ENTITY syncedTabs.sidebar.noclients.subtitle "Want to see your tabs from other devices here?">
<!ENTITY syncedTabs.sidebar.notsignedin.label "Sign in to view a list of tabs from your other devices.">
<!ENTITY syncedTabs.sidebar.notabs.label "No open tabs">

View File

@ -92,7 +92,7 @@ add_task(async function test_sidebar() {
let syncedTabsDeckComponent = SidebarUI.browser.contentWindow.syncedTabsDeckComponent;
syncedTabsDeckComponent._accountStatus = () => Promise.resolve(true);
syncedTabsDeckComponent._getSignedInUser = () => Promise.resolve({verified: true});
// Once the tabs container has been selected (which here means "'selected'
// added to the class list") we are ready to test.

View File

@ -687,14 +687,22 @@ toolbarbutton[constrain-size="true"][cui-areatype="menu-panel"] > .toolbarbutton
margin-left: 32px;
}
.fxaSyncIllustration {
.fxaSyncIllustration,
.fxaSyncIllustrationIssue {
width: 180px;
height: var(--panel-ui-sync-illustration-height);
list-style-image: url(chrome://browser/skin/fxa/sync-illustration.svg);
-moz-context-properties: fill;
fill: #cdcdcd;
}
.fxaSyncIllustration {
list-style-image: url(chrome://browser/skin/fxa/sync-illustration.svg);
}
.fxaSyncIllustrationIssue {
list-style-image: url(chrome://browser/skin/fxa/sync-illustration-issue.svg);
}
.PanelUI-remotetabs-prefs-button > .toolbarbutton-text {
/* !important to override ".cui-widget-panel toolbarbutton > .toolbarbutton-text" above. */
text-align: center !important;

View File

@ -0,0 +1,63 @@
<!-- This Source Code Form is subject to the terms of the Mozilla Public
- License, v. 2.0. If a copy of the MPL was not distributed with this
- file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 173.9 156.5">
<style>
.st0{opacity:0.1;fill:#0C0C0D;enable-background:new ;} .st1{fill:#FFFFFF;} .st2{fill:url(#SVGID_1_);} .st3{fill:#F9F9FA;} .st4{fill:url(#SVGID_2_);} .st5{fill:url(#SVGID_3_);} .st6{fill:url(#SVGID_4_);} .st7{fill:url(#SVGID_5_);} .st8{fill:url(#SVGID_6_);} .st9{fill:url(#SVGID_7_);}
</style>
<path class="st0" d="M140.9 152h-69c-.6 0-1-.4-1-1s.4-1 1-1H141c.6 0 1 .4 1 1s-.5 1-1.1 1zm-9.3-5.1h-12c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h12c.3 0 .5.2.5.5s-.2.5-.5.5zm-15.7 9.6h-12c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h12c.3 0 .5.2.5.5s-.2.5-.5.5zm-20 0h-3c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h3c.3 0 .5.2.5.5s-.2.5-.5.5zm-7 0h-1c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h1c.3 0 .5.2.5.5s-.2.5-.5.5zm-10 0h-12c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h12c.3 0 .5.2.5.5s-.2.5-.5.5zm-20 0h-3c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h3c.3 0 .5.2.5.5s-.2.5-.5.5zm-7 0h-1c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h1c.3 0 .5.2.5.5s-.2.5-.5.5zm-10 0h-12c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h12c.3 0 .5.2.5.5s-.2.5-.5.5zm-20 0h-3c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h3c.3 0 .5.2.5.5s-.2.5-.5.5zm-7 0h-1c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h1c.3 0 .5.2.5.5s-.2.5-.5.5z"/>
<path class="st1" d="M85 20.4h21.3s-6.7-14.9 7.5-16.8c12.6-1.7 17.6 11.3 17.6 11.3s1.5-7.5 9-6.1 12.9 13.3 12.9 13.3h18.6"/>
<path class="st0" d="M172.2 18.6h-4c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h4c.3 0 .5.2.5.5s-.2.5-.5.5zm-13 0h-1c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h1c.3 0 .5.2.5.5s-.2.5-.5.5zm-5 0h-.8c-.1-.1-.2-.1-.2-.2-.1-.2-.5-1-1.2-2.1-.1-.2-.1-.5.2-.7.2-.1.5-.1.7.2.5.8.9 1.5 1.1 1.9h.2c.3 0 .5.2.5.5s-.2.4-.5.4zm-47.5-.6h-1.3c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h.6c-.1-.2-.2-.6-.3-.9-.1-.3.1-.6.3-.7.3-.1.6.1.7.3.3.9.6 1.5.6 1.5.1.3 0 .5-.3.7-.1.1-.2.1-.3.1zm-9.3 0h-12c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h12c.3 0 .5.2.5.5s-.3.5-.5.5zm7.8-5.5c-.3 0-.5-.2-.5-.4 0-.3-.1-.7-.1-1s.2-.5.5-.5.5.2.5.5 0 .6.1 1c.1.2-.1.4-.5.4.1.1.1.1 0 0zm26.2-1c-.2 0-.4-.1-.4-.3-.1-.2-.3-.5-.4-.9-.1-.2 0-.5.2-.7.2-.1.5 0 .7.2.2.3.3.6.5.9.1.2 0 .5-.2.7-.3.1-.3.1-.4.1zm16.1-1.3c-.1 0-.3 0-.4-.1-1.7-1.8-4-3.1-6.4-3.7-1.3-.3-2.6-.2-3.9.2-.3.1-.5-.1-.6-.3-.1-.3.1-.5.3-.6 1.4-.4 2.9-.5 4.4-.2 2.6.6 5.1 2 6.9 4 .2.2.2.5 0 .7-.1-.1-.2 0-.3 0zm-18.8-3c-.2 0-.3-.1-.4-.2-.6-.8-1.3-1.5-2-2.1-.2-.2-.1-.5.1-.7.2-.1.4-.1.6 0 .8.7 1.5 1.4 2.1 2.2.2.2.1.5-.1.7 0 .1-.2.1-.3.1zm-20.5-3.8c-.3 0-.5-.2-.5-.5 0-.2.1-.3.2-.4 1.8-1.3 4-2.2 6.2-2.4 1.9-.3 3.8-.2 5.7.2.3.1.5.3.4.6s-.3.5-.6.4c-1.7-.4-3.5-.4-5.3-.2-2.1.2-4.1.9-5.7 2.2-.2.1-.3.1-.4.1z"/>
<path class="st1" d="M172.9 22.4H85c-.6 0-1-.4-1-1s.4-1 1-1h87.9c.6 0 1 .4 1 1s-.5 1-1 1zM.8 37.7h11.9s-3.7-8.3 4.2-9.4c7-1 9.8 6.3 9.8 6.3s.8-4.2 5-3.4 7.2 7.4 7.2 7.4h10.3"/>
<path class="st0" d="M13 36.4H1.1c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h11.5c.2-.2.5-.2.7 0l.1.1v.1c.1.3 0 .5-.3.7 0 .1-.1.1-.1.1zm32.9-.2h-3c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h3c.3 0 .5.2.5.5s-.2.5-.5.5zM27 33h-.1c-.3-.1-.4-.4-.3-.6.5-2 2.3-3.5 4.4-3.5.4 0 .7 0 1.1.1 1.9.5 3.6 1.5 4.9 3 .2.2.2.5 0 .7s-.5.2-.7 0c-1.1-1.3-2.6-2.3-4.3-2.7-.3-.1-.6-.1-.9-.1-1.7 0-3.1 1.2-3.4 2.8-.3.2-.5.3-.7.3zm-13.6-4.3c-.3 0-.5-.2-.5-.5 0-.1.1-.3.1-.4.8-.8 1.8-1.3 2.8-1.6.3-.1.6.1.6.4s-.1.6-.4.6c-.9.2-1.7.7-2.4 1.3.1.2 0 .2-.2.2zm7.5-1.3h-.1c-.3-.1-.6-.2-.9-.2-.3-.1-.5-.3-.4-.6.1-.3.3-.5.6-.4.3.1.7.2 1 .3.3 0 .5.3.4.6 0 .1-.3.3-.6.3z"/>
<path class="st1" d="M49.9 39.7H1c-.6 0-1-.4-1-1s.4-1 1-1h48.8c.6 0 1 .4 1 1s-.4 1-.9 1zm85.5 37.5h-15.3V60.3c0-4.2-3.4-7.5-7.6-7.5H51.1c-4.2 0-7.5 3.4-7.5 7.5V101c0 1.3.4 2.6 1 3.7-.4.5-.8 1-1 1.6l-6.9 16.1c-.3.7-.5 1.5-.5 2.3v1.1c.1 3.4 2.8 6.1 6.2 6h60v3.2c0 4.1 3.3 7.4 7.4 7.4h25.6c4.1 0 7.4-3.3 7.4-7.4V84.7c.1-4.2-3.2-7.5-7.4-7.5z"/>
<path class="st1" d="M50.8 56.5h61.4c2 0 3.6 1.6 3.6 3.5v40.7c0 2-1.6 3.6-3.6 3.6H50.8c-2 0-3.5-1.6-3.5-3.6V60.1c0-2 1.6-3.6 3.5-3.6z"/>
<path class="st1" d="M52.7 62.5h57.7c1.2 0 2.1.9 2.1 2.1V99c0 1.2-.9 2.1-2.1 2.1H52.7c-1.2 0-2.1-.9-2.1-2.1V64.6c0-1.1 1-2.1 2.1-2.1z"/>
<linearGradient id="SVGID_1_" gradientUnits="userSpaceOnUse" x1="55.4468" y1="665.5432" x2="128.2768" y2="738.3832" gradientTransform="translate(.02 -609.83)">
<stop offset="0" stop-color="#CCFBFF"/>
<stop offset="1" stop-color="#C9E4FF"/>
</linearGradient>
<path class="st2" d="M110.4 63.5c.6 0 1.1.5 1.1 1.1V99c0 .6-.5 1.1-1.1 1.1H52.7c-.6 0-1.1-.5-1.1-1.1V64.6c0-.6.5-1.1 1.1-1.1h57.7"/>
<path class="st3" d="M115.7 107.6c-.4-.8-1.2-1.3-2.1-1.2H49c-.9 0-1.7.5-2.1 1.3L40 123.8c-.1.2-.1.5-.1.7v1.1c.1 1.2 1 2.1 2.2 2H121c1.2.1 2.2-.8 2.2-2v-1c0-.3-.1-.5-.2-.7l-7.3-16.3z"/>
<linearGradient id="SVGID_2_" gradientUnits="userSpaceOnUse" x1="-4.5021" y1="627.6644" x2="182.4979" y2="797.1644" gradientTransform="translate(.02 -609.83)">
<stop offset="0" stop-color="#00C8D7"/>
<stop offset="1" stop-color="#0A84FF"/>
</linearGradient>
<path class="st4" d="M124.9 122.9l-7.3-16.1c-.3-.8-.9-1.4-1.6-1.8 1.2-1.1 1.9-2.6 1.9-4.2V60.1c0-3.1-2.5-5.5-5.6-5.5H50.9c-3.1 0-5.5 2.5-5.5 5.5v40.7c0 1.5.6 3 1.7 4-.9.4-1.6 1.1-2 2L38.2 123c-.2.5-.3 1-.3 1.5v1.1c.1 2.3 1.9 4.1 4.2 4H121c2.3.1 4.2-1.7 4.2-4v-1c0-.6-.1-1.2-.3-1.7zm-1.7 2.6c-.1 1.2-1 2.1-2.2 2H42.1c-1.2.1-2.2-.8-2.2-2v-1.1c0-.2 0-.5.1-.7l6.9-16.1c.4-.8 1.2-1.3 2.1-1.3h64.7c.9 0 1.7.5 2.1 1.2l7.3 16.1c.1.2.2.5.2.7l-.1 1.2zm-75.9-24.7V60.1c0-2 1.6-3.5 3.5-3.5h61.4c2 0 3.6 1.6 3.6 3.5v40.7c0 2-1.6 3.6-3.5 3.6H50.9c-2 0-3.6-1.6-3.6-3.6z"/>
<linearGradient id="SVGID_3_" gradientUnits="userSpaceOnUse" x1="-51.3994" y1="590.94" x2="201.6006" y2="840.94" gradientTransform="translate(.02 -609.83)">
<stop offset="0" stop-color="#00C8D7"/>
<stop offset="1" stop-color="#0A84FF"/>
</linearGradient>
<path class="st5" d="M94.7 121.8H68.4c-.4 0-.8-.2-.8-.5.3-1.7.5-2.6.8-4.4 0-.2.3-.4.7-.4l25.3-.2c.4 0 .7.2.7.4.2 1.5.4 3 .4 4.5.1.4-.3.6-.8.6zM64 112.4h-3.9c-.3 0-.6.2-.7.4-.1.5-.2.7-.3 1.2s.4.7.9.7h3.8c.3 0 .6-.1.7-.3.1-.5.2-.7.3-1.2s-.3-.8-.8-.8zm9.9 0h-4.1c-.4 0-.7.2-.7.4-.1.5-.1.7-.2 1.2-.1.3.4.6.9.6h3.9c.3 0 .6-.1.7-.4.1-.5.2-.7.3-1.2.1-.4-.3-.7-.8-.6zm20.5 1.4c-.1-.5-.1-.7-.2-1.2 0-.2-.3-.4-.7-.4h-4.1c-.5 0-.9.3-.9.6l.3 1.2c0 .2.3.4.7.4h3.9c.6 0 1.1-.3 1-.6zm-9.9.1l-.2-1.2c0-.2-.4-.4-.8-.4h-3.7c-.4 0-.8.2-.8.4-.1.5-.2.7-.2 1.2-.1.3.3.6.8.6h4.1c.4-.1.8-.3.8-.6zm19.6-.2l-.2-1.2c0-.2-.3-.3-.6-.3h-3.9c-.5 0-1 .3-.9.7l.3 1.2c.1.2.3.3.7.3h3.8c.4-.1.9-.4.8-.7zm-39.1-5h-3.7c-.3 0-.5.1-.6.3-.1.4-.2.7-.3 1.1s.3.6.8.6h3.6c.3 0 .5-.1.7-.3.2-.4.2-.7.4-1.1s-.4-.6-.9-.6zm9.4 0h-3.9c-.4 0-.7.2-.7.4-.1.4-.2.7-.3 1.1s.3.6.8.6h3.8c.3 0 .6-.1.7-.4.1-.4.2-.7.3-1.1s-.3-.6-.7-.6zm19.6 1.4l-.2-1.1c0-.2-.3-.4-.7-.4h-3.9c-.5 0-.9.3-.8.6l.2 1.1c0 .2.3.4.7.4h3.8c.5-.1.9-.3.9-.6zm-9.5.1l-.2-1.1c0-.2-.3-.4-.7-.4H80c-.4 0-.7.2-.8.4-.1.4-.2.7-.3 1.1-.1.3.3.5.8.5h3.9c.5 0 .9-.3.9-.5zm18.7-.1l-.2-1.1c0-.2-.3-.3-.6-.3h-3.7c-.5 0-1 .3-.9.6l.3 1.1c.1.2.3.3.6.3h3.6c.5-.1.9-.4.9-.6zm-48.1 2.4h-3.8c-.3 0-.6.1-.7.4-.2.4-.3.8-.4 1.2-.1.3.4.7.9.7h3.7c.3 0 .6-.2.6-.4.1-.4.2-.8.4-1.2.2-.5-.2-.8-.7-.7zm1.3-3.7h-3.5c-.2 0-.5.1-.6.3-.1.4-.2.7-.4 1.1-.1.3.2.6.7.6h3.5c.3 0 .5-.1.7-.3.2-.4.3-.7.5-1.1s-.4-.7-.9-.6zm50.9 4.1c.1.4.3.8.3 1.1 0 .2.3.3.6.3h3.7c.5 0 1-.3.9-.6-.1-.4-.2-.8-.3-1.1-.1-.2-.4-.4-.7-.3h-3.7c-.5-.1-.9.2-.8.6zm-1.1-3.7c.1.4.2.7.4 1.1.1.2.4.3.6.3h3.4c.5 0 .8-.3.8-.6-.1-.4-.2-.7-.3-1.1 0-.2-.2-.3-.6-.3H107c-.4 0-.9.3-.8.6z"/>
<g>
<linearGradient id="SVGID_4_" gradientUnits="userSpaceOnUse" x1="-22.6206" y1="563.3309" x2="225.3794" y2="813.3309" gradientTransform="translate(0 -610)">
<stop offset="0" stop-color="#00C8D7"/>
<stop offset="1" stop-color="#0A84FF"/>
</linearGradient>
<circle class="st6" cx="82.6" cy="59.4" r="1.2"/>
</g>
<path class="st1" d="M109.6 80h25.6c2.5 0 4.4 2 4.4 4.4v50.3c0 2.5-2 4.4-4.4 4.4h-25.6c-2.5 0-4.4-2-4.4-4.4V84.4c-.1-2.4 1.9-4.4 4.4-4.4z"/>
<linearGradient id="SVGID_5_" gradientUnits="userSpaceOnUse" x1="-19.2553" y1="590.7758" x2="221.2447" y2="809.2758" gradientTransform="translate(.02 -609.83)">
<stop offset="0" stop-color="#00C8D7"/>
<stop offset="1" stop-color="#0A84FF"/>
</linearGradient>
<path class="st7" d="M135.1 81c1.9 0 3.4 1.5 3.4 3.4v50.3c0 1.9-1.5 3.4-3.4 3.4h-25.6c-1.9 0-3.4-1.5-3.4-3.4V84.4c0-1.9 1.5-3.4 3.4-3.4h25.6m0-2h-25.6c-3 0-5.4 2.4-5.4 5.4v50.3c0 3 2.4 5.4 5.4 5.4h25.6c3 0 5.4-2.4 5.4-5.4V84.4c.1-3-2.4-5.4-5.4-5.4z"/>
<g>
<path class="st1" d="M111.1 84.8h22.4c.9 0 1.7.8 1.7 1.7v41.9c0 .9-.8 1.7-1.7 1.7h-22.4c-.9 0-1.7-.8-1.7-1.7V86.5c0-.9.8-1.7 1.7-1.7z"/>
<linearGradient id="SVGID_6_" gradientUnits="userSpaceOnUse" x1="62.995" y1="657.995" x2="135.835" y2="730.835" gradientTransform="translate(.02 -609.83)">
<stop offset="0" stop-color="#CCFBFF"/>
<stop offset="1" stop-color="#C9E4FF"/>
</linearGradient>
<path class="st8" d="M133.5 85.8c.4 0 .7.3.7.7v41.9c0 .4-.3.7-.7.7h-22.4c-.4 0-.7-.3-.7-.7V86.5c0-.4.3-.7.7-.7h22.4"/>
</g>
<linearGradient id="SVGID_7_" gradientUnits="userSpaceOnUse" x1="-73.41" y1="701.6741" x2="262.92" y2="701.6741" gradientTransform="translate(.02 -609.83)">
<stop offset="0" stop-color="#00C8D7"/>
<stop offset="1" stop-color="#0A84FF"/>
</linearGradient>
<path class="st9" d="M82.9 97.8c-.6 0-1.1-.2-1.6-.6l-15-12.1c-1.5-1-2.7-2.2-3.7-3.7-3.3-5.1-2.6-11.7 1.7-16 5-5 13-5 17.9 0 .1.1.3.2.5.2s.4-.1.5-.2c5.1-4.8 13.1-4.6 18 .5s4.6 13.1-.5 18c-.5.4-1 .9-1.5 1.2L84.4 97.3c-.4.3-1 .5-1.5.5zm41 23.7l11-9c4.4-3 5.6-9 2.7-13.4-3-4.4-9-5.6-13.4-2.7-.5.3-1 .7-1.4 1.2 0 0-.1.1-.2.1s-.1 0-.2-.1c-3.8-3.8-9.9-3.8-13.7 0-3.2 3.2-3.8 8.3-1.3 12.2.7 1.1 1.7 2.1 2.8 2.8l11.1 9c.7.6 1.8.6 2.6-.1z"/>
<path class="st1" d="M73.3 62.8c3.1 0 6.1 1.2 8.3 3.4.3.3.7.5 1.2.5.4 0 .9-.2 1.2-.5 4.6-4.5 12-4.5 16.6.1 4.5 4.6 4.5 12-.1 16.6-.5.5-1.1 1-1.7 1.4l-15 12.2c-.3.2-.6.3-.9.3s-.7-.1-.9-.3L67 84.3c-1.4-.9-2.5-2-3.4-3.4-3-4.6-2.4-10.7 1.5-14.7 2.1-2.1 5.1-3.4 8.2-3.4m0-2c-3.6 0-7.1 1.4-9.7 4-4.6 4.6-5.3 11.8-1.8 17.2 1 1.6 2.3 2.9 3.9 3.9l15 12.1c1.3 1 3.1 1 4.3 0l14.9-12.1c6.3-4.2 8-12.7 3.8-19s-12.7-8-19-3.8c-.7.5-1.3 1-1.9 1.5-2.6-2.4-6-3.8-9.5-3.8z"/>
<path class="st3" d="M66.3 76.2h-.2c-1.1-.1-1.9-1-1.8-2.1.3-3.8 3.1-7 6.8-7.8 1-.4 2.2 0 2.6 1s0 2.2-1 2.6c-.2.1-.5.2-.8.2-2.1.5-3.6 2.2-3.8 4.3 0 1-.8 1.7-1.8 1.8z"/>
<path class="st1" d="M115.4 95.8c2.3 0 4.5.9 6.1 2.6.2.2.5.4.9.4.3 0 .6-.1.9-.4 3.4-3.4 8.9-3.4 12.3 0 3.4 3.4 3.4 8.9 0 12.3-.4.4-.8.8-1.3 1.1l-11.1 9c-.2.2-.4.2-.7.2-.2 0-.5-.1-.7-.2l-11.1-9c-1-.7-1.9-1.5-2.5-2.5-2.2-3.4-1.7-8 1.1-10.9 1.6-1.7 3.8-2.6 6.1-2.6m0-2c-2.8 0-5.5 1.1-7.5 3.1-3.6 3.6-4.1 9.2-1.4 13.4.8 1.2 1.8 2.2 3 3l11 8.9c1.1.9 2.7.9 3.9 0l11-9c4.9-3.3 6.1-10 2.8-14.8-3.3-4.9-10-6.1-14.8-2.8-.3.2-.7.5-1 .7-2-1.6-4.5-2.6-7-2.5z"/>
<path class="st3" d="M110.3 105.7c-.9-.1-1.5-.8-1.4-1.6.2-2.8 2.2-5.2 5-5.8.8-.2 1.6.3 1.8 1.1.2.8-.3 1.6-1.1 1.8h-.1c-1.5.3-2.7 1.6-2.8 3.2-.1.7-.7 1.3-1.4 1.3z"/>
<path class="st1" d="M82.7 98.2c-.7 0-1.2-.6-1.2-1.2v-6.2c0-.3.1-.6.4-.9l1.7-1.7-4-4c-.5-.5-.5-1.3 0-1.8l6.3-6.3-3.9-3.9c-.2-.2-.4-.6-.4-.9v-4.6c0-.7.6-1.2 1.2-1.2s1.2.6 1.2 1.2v4.1l4.4 4.4c.5.5.5 1.3 0 1.8l-6.3 6.3 4 4c.5.5.5 1.3 0 1.8L84 91.2v5.7c0 .7-.6 1.3-1.3 1.3zm39.7 23.8c-.7 0-1.2-.6-1.2-1.2V116c0-.3.1-.6.4-.9l1.1-1.1-2.9-2.9c-.2-.2-.4-.6-.4-.9s.1-.6.4-.9l4.6-4.6-2.7-2.7c-.2-.2-.4-.6-.4-.9v-3.5c0-.7.6-1.2 1.2-1.2s1.2.6 1.2 1.2v3l3.2 3.2c.2.2.4.6.4.9s-.1.6-.4.9l-4.6 4.6 2.9 2.9c.5.5.5 1.3 0 1.8l-1.6 1.6v4.2c.1.8-.5 1.3-1.2 1.3z"/>
</svg>

After

Width:  |  Height:  |  Size: 11 KiB

View File

@ -108,6 +108,7 @@
* skin/classic/browser/preferences/containers.css (../shared/preferences/containers.css)
skin/classic/browser/fxa/default-avatar.svg (../shared/fxa/default-avatar.svg)
skin/classic/browser/fxa/sync-illustration.svg (../shared/fxa/sync-illustration.svg)
skin/classic/browser/fxa/sync-illustration-issue.svg (../shared/fxa/sync-illustration-issue.svg)
skin/classic/browser/accessibility.svg (../shared/icons/accessibility.svg)

View File

@ -213,14 +213,22 @@ body {
opacity: 100;
}
.deck .syncIllustration {
.deck .syncIllustration,
.deck .syncIllustrationIssue {
height: 150px;
margin-top: 20px;
background-image: url(chrome://browser/skin/fxa/sync-illustration.svg);
background-position: center;
background-repeat: no-repeat;
}
.deck .syncIllustration {
background-image: url(chrome://browser/skin/fxa/sync-illustration.svg);
}
.deck .syncIllustrationIssue {
background-image: url(chrome://browser/skin/fxa/sync-illustration-issue.svg);
}
.deck .instructions {
text-align: center;
padding: 0 11px;

View File

@ -8,7 +8,7 @@ add_task(async function() {
const scale = window.QueryInterface(Ci.nsIInterfaceRequestor)
.getInterface(Ci.nsIDocShell).QueryInterface(Ci.nsIBaseWindow)
.devicePixelsPerDesktopPixel;
let rect = TestRunner._findBoundingBox(["#tabbrowser-tabs"]);
let {bounds, rects} = TestRunner._findBoundingBox(["#tabbrowser-tabs"]);
let element = document.querySelector("#tabbrowser-tabs");
let tabBar = element.ownerDocument.getBoxObjectFor(element);
@ -30,13 +30,24 @@ add_task(async function() {
expectedRight = Math.min(expectedRight, windowRight);
expectedBottom = Math.min(expectedBottom, windowBottom);
// Check width calculation on simple example
is(rect.width, expectedRight - expectedLeft,
is(bounds.width, expectedRight - expectedLeft,
"Checking _findBoundingBox width calculation");
// Check height calculation on simple example
is(rect.height, expectedBottom - expectedTop,
is(bounds.height, expectedBottom - expectedTop,
"Checking _findBoundingBox height caclulation");
is(bounds.left, rects[0].left,
"Checking _findBoundingBox union.left and rect.left is the same for a single selector");
is(bounds.right, rects[0].right,
"Checking _findBoundingBox union.right and rect.right is the same for a single selector");
is(bounds.top, rects[0].top,
"Checking _findBoundingBox union.top and rect.top is the same for a single selector");
is(bounds.bottom, rects[0].bottom,
"Checking _findBoundingBox union.bottom and rect.bottom is the same for a single selector");
let result = TestRunner._findBoundingBox(["#forward-button", "#TabsToolbar"]);
bounds = result.bounds;
rects = result.rects;
rect = TestRunner._findBoundingBox(["#forward-button", "#TabsToolbar"]);
element = document.querySelector("#TabsToolbar");
let tabToolbar = element.ownerDocument.getBoxObjectFor(element);
element = document.querySelector("#forward-button");
@ -61,11 +72,23 @@ add_task(async function() {
expectedBottom = Math.min(expectedBottom, windowBottom);
// Check width calculation on union
is(rect.width, expectedRight - expectedLeft,
is(bounds.width, expectedRight - expectedLeft,
"Checking _findBoundingBox union width calculation");
// Check height calculation on union
is(rect.height, expectedBottom - expectedTop,
is(bounds.height, expectedBottom - expectedTop,
"Checking _findBoundingBox union height calculation");
// Check single selector's left position
is(rects[0].left, Math.max(scale * (fButton.screenX - TestRunner.croppingPadding), windowLeft),
"Checking single selector's left position when _findBoundingBox has multiple selectors");
// Check single selector's right position
is(rects[0].right, Math.min(scale * (fButton.width + fButton.screenX + TestRunner.croppingPadding), windowRight),
"Checking single selector's right position when _findBoundingBox has multiple selectors");
// Check single selector's top position
is(rects[0].top, Math.max(scale * (fButton.screenY - TestRunner.croppingPadding), windowTop),
"Checking single selector's top position when _findBoundingBox has multiple selectors");
// Check single selector's bottom position
is(rects[0].bottom, Math.min(scale * (fButton.height + fButton.screenY + TestRunner.croppingPadding), windowBottom),
"Checking single selector's bottom position when _findBoundingBox has multiple selectors");
// Check that nonexistent selectors throws an exception
Assert.throws(() => {
@ -74,7 +97,6 @@ add_task(async function() {
// Check that no selectors throws an exception
Assert.throws(() => {
rect = TestRunner._findBoundingBox([]);
TestRunner._findBoundingBox([]);
}, "No selectors specified.", "Checking that no selectors throws an exception");
});

View File

@ -46,8 +46,8 @@ async function compareImages(window, expected, test) {
return nsIDOMWindowUtils.compareCanvases(expectedCanvas, testCanvas, {});
}
async function cropAndCompare(window, src, expected, test, region) {
await TestRunner._cropImage(window, src, region, test);
async function cropAndCompare(window, src, expected, test, region, subregions) {
await TestRunner._cropImage(window, src, region, subregions, test);
return compareImages(window, expected, OS.Path.toFileURI(test));
}
@ -61,7 +61,8 @@ add_task(async function crop() {
"chrome://mozscreenshots/content/lib/robot.png",
"chrome://mozscreenshots/content/lib/robot_upperleft.png",
OS.Path.join(tmp, "test_cropped_upperleft.png"),
new Rect(0, 0, 32, 32)
new Rect(0, 0, 32, 32),
[new Rect(0, 0, 32, 32)]
), 0, "The image should be cropped to the upper left quadrant");
is(await cropAndCompare(
@ -69,7 +70,8 @@ add_task(async function crop() {
"chrome://mozscreenshots/content/lib/robot.png",
"chrome://mozscreenshots/content/lib/robot_center.png",
OS.Path.join(tmp, "test_cropped_center.png"),
new Rect(16, 16, 32, 32)
new Rect(16, 16, 32, 32),
[new Rect(16, 16, 32, 32)]
), 0, "The image should be cropped to the center of the image");
is(await cropAndCompare(
@ -77,6 +79,33 @@ add_task(async function crop() {
"chrome://mozscreenshots/content/lib/robot.png",
"chrome://mozscreenshots/content/lib/robot_uncropped.png",
OS.Path.join(tmp, "test_uncropped.png"),
new Rect(-8, -9, 80, 80)
new Rect(-8, -9, 80, 80),
[new Rect(-8, -9, 80, 80)]
), 0, "The image should be not be cropped, and the cropping region should be clipped to the size of the image");
is(await cropAndCompare(
window,
"chrome://mozscreenshots/content/lib/robot.png",
"chrome://mozscreenshots/content/lib/robot_diagonal.png",
OS.Path.join(tmp, "test_diagonal.png"),
new Rect(0, 0, 64, 64),
[
new Rect(0, 0, 16, 16),
new Rect(16, 16, 16, 16),
new Rect(32, 32, 16, 16),
new Rect(48, 48, 16, 16)
]
), 0, "The image should be contain squares across the diagonal");
is(await cropAndCompare(
window,
"chrome://mozscreenshots/content/lib/robot.png",
"chrome://mozscreenshots/content/lib/robot_cropped_diagonal.png",
OS.Path.join(tmp, "test_cropped_diagonal.png"),
new Rect(16, 16, 48, 48),
[
new Rect(16, 16, 16, 16),
new Rect(32, 32, 16, 16),
]
), 0, "The image should be cropped with squares across the diagonal");
});

View File

@ -209,7 +209,13 @@ this.TestRunner = {
.getInterface(Ci.nsIDocShell).QueryInterface(Ci.nsIBaseWindow)
.devicePixelsPerDesktopPixel;
let finalRect = undefined;
const windowLeft = browserWindow.screenX * scale;
const windowTop = browserWindow.screenY * scale;
const windowWidth = browserWindow.outerWidth * scale;
const windowHeight = browserWindow.outerHeight * scale;
let bounds;
const rects = [];
// Grab bounding boxes and find the union
for (let selector of selectors) {
let element;
@ -226,31 +232,23 @@ this.TestRunner = {
// Calculate box region, convert to Rect
let box = element.ownerDocument.getBoxObjectFor(element);
let newRect = new Rect(box.screenX * scale, box.screenY * scale,
let rect = new Rect(box.screenX * scale, box.screenY * scale,
box.width * scale, box.height * scale);
rect.inflateFixed(this.croppingPadding * scale);
rect.left = Math.max(rect.left, windowLeft);
rect.top = Math.max(rect.top, windowTop);
rect.right = Math.min(rect.right, windowLeft + windowWidth);
rect.bottom = Math.min(rect.bottom, windowTop + windowHeight);
rects.push(rect);
if (!finalRect) {
finalRect = newRect;
if (!bounds) {
bounds = rect;
} else {
finalRect = finalRect.union(newRect);
bounds = bounds.union(rect);
}
}
// Add fixed padding
finalRect = finalRect.inflateFixed(this.croppingPadding * scale);
let windowLeft = browserWindow.screenX * scale;
let windowTop = browserWindow.screenY * scale;
let windowWidth = browserWindow.outerWidth * scale;
let windowHeight = browserWindow.outerHeight * scale;
// Clip dimensions to window only
finalRect.left = Math.max(finalRect.left, windowLeft);
finalRect.top = Math.max(finalRect.top, windowTop);
finalRect.right = Math.min(finalRect.right, windowLeft + windowWidth);
finalRect.bottom = Math.min(finalRect.bottom, windowTop + windowHeight);
return finalRect;
return {bounds, rects};
},
async _performCombo(combo) {
@ -332,21 +330,21 @@ this.TestRunner = {
}
}
const rect = this._findBoundingBox(finalSelectors, windowType);
this.mochitestScope.ok(rect, "A valid bounding box was found");
if (!rect) {
const {bounds, rects} = this._findBoundingBox(finalSelectors, windowType);
this.mochitestScope.ok(bounds, "A valid bounding box was found");
if (!bounds) {
return;
}
await this._onConfigurationReady(combo, rect);
await this._onConfigurationReady(combo, bounds, rects);
},
async _onConfigurationReady(combo, rect) {
async _onConfigurationReady(combo, bounds, rects) {
let filename = padLeft(this.currentComboIndex + 1,
String(this.combos.length).length) + this._comboName(combo);
const imagePath = await Screenshot.captureExternal(filename);
let browserWindow = Services.wm.getMostRecentWindow("navigator:browser");
await this._cropImage(browserWindow, OS.Path.toFileURI(imagePath), rect, imagePath).catch((msg) => {
await this._cropImage(browserWindow, OS.Path.toFileURI(imagePath), bounds, rects, imagePath).catch((msg) => {
throw `Cropping combo [${combo.map((e) => e.name).join(", ")}] failed: ${msg}`;
});
this.completedCombos++;
@ -359,33 +357,50 @@ this.TestRunner = {
}, "");
},
async _cropImage(window, srcPath, rect, targetPath) {
async _cropImage(window, srcPath, bounds, rects, targetPath) {
const { document, Image } = window;
const promise = new Promise((resolve, reject) => {
const img = new Image();
img.onload = function() {
img.onload = () => {
// Clip the cropping region to the size of the screenshot
// This is necessary mostly to deal with offscreen windows, since we
// are capturing an image of the operating system's desktop.
rect.left = Math.max(0, rect.left);
rect.right = Math.min(img.naturalWidth, rect.right);
rect.top = Math.max(0, rect.top);
rect.bottom = Math.min(img.naturalHeight, rect.bottom);
bounds.left = Math.max(0, bounds.left);
bounds.right = Math.min(img.naturalWidth, bounds.right);
bounds.top = Math.max(0, bounds.top);
bounds.bottom = Math.min(img.naturalHeight, bounds.bottom);
// Create a new offscreen canvas with the width and height given by the
// size of the region we want to crop to
const canvas = document.createElementNS("http://www.w3.org/1999/xhtml", "canvas");
canvas.width = rect.width;
canvas.height = rect.height;
canvas.width = bounds.width;
canvas.height = bounds.height;
const ctx = canvas.getContext("2d");
// By drawing the image with the negative offset, the unwanted regions
// are drawn off canvas, and are not captured when the canvas is saved.
ctx.drawImage(img, -rect.x, -rect.y);
for (const rect of rects) {
rect.left = Math.max(0, rect.left);
rect.right = Math.min(img.naturalWidth, rect.right);
rect.top = Math.max(0, rect.top);
rect.bottom = Math.min(img.naturalHeight, rect.bottom);
const width = rect.width;
const height = rect.height;
const screenX = rect.left;
const screenY = rect.top;
const imageX = screenX - bounds.left;
const imageY = screenY - bounds.top;
ctx.drawImage(img,
screenX, screenY, width, height,
imageX, imageY, width, height);
}
// Converts the canvas to a binary blob, which can be saved to a png
canvas.toBlob((blob) => {
// Use a filereader to convert the raw binary blob into a writable buffer
const fr = new FileReader();
fr.onload = function(e) {
fr.onload = (e) => {
const buffer = new Uint8Array(e.target.result);
// Save the file and complete the promise
OS.File.writeAtomic(targetPath, buffer, {}).then(resolve);

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.0 KiB

View File

@ -183,33 +183,22 @@ class Automation(object):
else:
os.kill(self.pid, signal.SIGKILL)
def environment(self, env=None, xrePath=None, crashreporter=True, debugger=False, dmdPath=None, lsanPath=None, ubsanPath=None):
def environment(self, env=None, xrePath=None, crashreporter=True, debugger=False, lsanPath=None, ubsanPath=None):
if xrePath == None:
xrePath = self.DIST_BIN
if env == None:
env = dict(os.environ)
ldLibraryPath = os.path.abspath(os.path.join(SCRIPT_DIR, xrePath))
dmdLibrary = None
preloadEnvVar = None
if self.UNIXISH or self.IS_MAC:
envVar = "LD_LIBRARY_PATH"
preloadEnvVar = "LD_PRELOAD"
if self.IS_MAC:
envVar = "DYLD_LIBRARY_PATH"
dmdLibrary = "libdmd.dylib"
else: # unixish
dmdLibrary = "libdmd.so"
if envVar in env:
ldLibraryPath = ldLibraryPath + ":" + env[envVar]
env[envVar] = ldLibraryPath
elif self.IS_WIN32:
env["PATH"] = env["PATH"] + ";" + str(ldLibraryPath)
dmdLibrary = "dmd.dll"
preloadEnvVar = "MOZ_REPLACE_MALLOC_LIB"
if dmdPath and dmdLibrary and preloadEnvVar:
env[preloadEnvVar] = os.path.join(dmdPath, dmdLibrary)
if crashreporter and not debugger:
env['MOZ_CRASHREPORTER_NO_REPORT'] = '1'

View File

@ -60,15 +60,12 @@ class RemoteAutomation(Automation):
self._remoteLog = logfile
# Set up what we need for the remote environment
def environment(self, env=None, xrePath=None, crashreporter=True, debugger=False, dmdPath=None, lsanPath=None, ubsanPath=None):
def environment(self, env=None, xrePath=None, crashreporter=True, debugger=False, lsanPath=None, ubsanPath=None):
# Because we are running remote, we don't want to mimic the local env
# so no copying of os.environ
if env is None:
env = {}
if dmdPath:
env['MOZ_REPLACE_MALLOC_LIB'] = os.path.join(dmdPath, 'libdmd.so')
# Except for the mochitest results table hiding option, which isn't
# passed to runtestsremote.py as an actual option, but through the
# MOZ_HIDE_RESULTS_TABLE environment variable.

View File

@ -62,3 +62,14 @@ def replace_malloc(value, jemalloc, milestone, build_project):
set_config('MOZ_REPLACE_MALLOC', replace_malloc)
set_define('MOZ_REPLACE_MALLOC', replace_malloc)
add_old_configure_assignment('MOZ_REPLACE_MALLOC', replace_malloc)
@depends(replace_malloc, build_project)
def replace_malloc_static(replace_malloc, build_project):
# Default to statically linking replace-malloc libraries that can be
# statically linked, except when building with --enable-project=memory.
if replace_malloc and build_project != 'memory':
return True
set_config('MOZ_REPLACE_MALLOC_STATIC', replace_malloc_static)

View File

@ -0,0 +1,45 @@
/* -*- indent-tabs-mode: nil; js-indent-level: 2 -*- */
/* vim: set ft=javascript ts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
define(function (require, exports, module) {
const { Component } = require("devtools/client/shared/vendor/react");
const PropTypes = require("devtools/client/shared/vendor/react-prop-types");
const { findDOMNode } = require("devtools/client/shared/vendor/react-dom");
const { pre } = require("devtools/client/shared/vendor/react-dom-factories");
/**
* This object represents a live DOM text node in a <pre>.
*/
class LiveText extends Component {
static get propTypes() {
return {
data: PropTypes.instanceOf(Text),
};
}
componentDidMount() {
this.componentDidUpdate();
}
componentDidUpdate() {
let el = findDOMNode(this);
if (el.firstChild === this.props.data) {
return;
}
el.textContent = "";
el.append(this.props.data);
}
render() {
return pre({className: "data"});
}
}
// Exports from this module
exports.LiveText = LiveText;
});

View File

@ -22,7 +22,7 @@ define(function (require, exports, module) {
class MainTabbedArea extends Component {
static get propTypes() {
return {
jsonText: PropTypes.string,
jsonText: PropTypes.instanceOf(Text),
tabActive: PropTypes.number,
actions: PropTypes.object,
headers: PropTypes.object,
@ -42,8 +42,8 @@ define(function (require, exports, module) {
super(props);
this.state = {
json: {},
headers: {},
json: props.json,
expandedNodes: props.expandedNodes,
jsonText: props.jsonText,
tabActive: props.tabActive
};
@ -64,7 +64,7 @@ define(function (require, exports, module) {
className: "json",
title: JSONView.Locale.$STR("jsonViewer.tab.JSON")},
JsonPanel({
data: this.props.json,
data: this.state.json,
expandedNodes: this.props.expandedNodes,
actions: this.props.actions,
searchFilter: this.state.searchFilter
@ -74,7 +74,8 @@ define(function (require, exports, module) {
className: "rawdata",
title: JSONView.Locale.$STR("jsonViewer.tab.RawData")},
TextPanel({
isValidJson: !(this.props.json instanceof Error),
isValidJson: !(this.state.json instanceof Error) &&
document.readyState != "loading",
data: this.state.jsonText,
actions: this.props.actions
})

View File

@ -12,8 +12,8 @@ define(function (require, exports, module) {
const dom = require("devtools/client/shared/vendor/react-dom-factories");
const { createFactories } = require("devtools/client/shared/react-utils");
const { TextToolbar } = createFactories(require("./TextToolbar"));
const { div, pre } = dom;
const { LiveText } = createFactories(require("./LiveText"));
const { div } = dom;
/**
* This template represents the 'Raw Data' panel displaying
@ -24,7 +24,7 @@ define(function (require, exports, module) {
return {
isValidJson: PropTypes.bool,
actions: PropTypes.object,
data: PropTypes.string
data: PropTypes.instanceOf(Text),
};
}
@ -41,9 +41,7 @@ define(function (require, exports, module) {
isValidJson: this.props.isValidJson
}),
div({className: "panelContent"},
pre({className: "data"},
this.props.data
)
LiveText({data: this.props.data})
)
)
);

View File

@ -14,6 +14,7 @@ DevToolsModules(
'HeadersToolbar.js',
'JsonPanel.js',
'JsonToolbar.js',
'LiveText.js',
'MainTabbedArea.js',
'SearchBox.js',
'TextPanel.js',

View File

@ -168,6 +168,8 @@ function exportData(win, request) {
data.json = new win.Text();
data.readyState = "uninitialized";
let Locale = {
$STR: key => {
try {
@ -244,7 +246,6 @@ function initialHTML(doc) {
element("script", {
src: baseURI + "lib/require.js",
"data-main": baseURI + "viewer-config.js",
defer: true,
})
]),
element("body", {}, [

View File

@ -19,41 +19,26 @@ define(function (require, exports, module) {
// Application state object.
let input = {
jsonText: JSONView.json.textContent,
jsonText: JSONView.json,
jsonPretty: null,
headers: JSONView.headers,
tabActive: 0,
prettified: false
};
try {
input.json = JSON.parse(input.jsonText);
} catch (err) {
input.json = err;
}
// Expand the document by default if its size isn't bigger than 100KB.
if (!(input.json instanceof Error) && input.jsonText.length <= AUTO_EXPAND_MAX_SIZE) {
input.expandedNodes = TreeViewClass.getExpandedNodes(
input.json,
{maxLevel: AUTO_EXPAND_MAX_LEVEL}
);
} else {
input.expandedNodes = new Set();
}
/**
* Application actions/commands. This list implements all commands
* available for the JSON viewer.
*/
input.actions = {
onCopyJson: function () {
copyString(input.prettified ? input.jsonPretty : input.jsonText);
let text = input.prettified ? input.jsonPretty : input.jsonText;
copyString(text.textContent);
},
onSaveJson: function () {
if (input.prettified && !prettyURL) {
prettyURL = URL.createObjectURL(new window.Blob([input.jsonPretty]));
prettyURL = URL.createObjectURL(new window.Blob([input.jsonPretty.textContent]));
}
dispatchEvent("save", input.prettified ? prettyURL : null);
},
@ -93,7 +78,7 @@ define(function (require, exports, module) {
theApp.setState({jsonText: input.jsonText});
} else {
if (!input.jsonPretty) {
input.jsonPretty = JSON.stringify(input.json, null, " ");
input.jsonPretty = new Text(JSON.stringify(input.json, null, " "));
}
theApp.setState({jsonText: input.jsonPretty});
}
@ -139,11 +124,52 @@ define(function (require, exports, module) {
* at the top of the window. This component also represents ReacJS root.
*/
let content = document.getElementById("content");
let promise = (async function parseJSON() {
if (document.readyState == "loading") {
// If the JSON has not been loaded yet, render the Raw Data tab first.
input.json = {};
input.expandedNodes = new Set();
input.tabActive = 1;
return new Promise(resolve => {
document.addEventListener("DOMContentLoaded", resolve, {once: true});
}).then(parseJSON).then(() => {
// Now update the state and switch to the JSON tab.
theApp.setState({
tabActive: 0,
json: input.json,
expandedNodes: input.expandedNodes,
});
});
}
// If the JSON has been loaded, parse it immediately before loading the app.
let jsonString = input.jsonText.textContent;
try {
input.json = JSON.parse(jsonString);
} catch (err) {
input.json = err;
}
// Expand the document by default if its size isn't bigger than 100KB.
if (!(input.json instanceof Error) && jsonString.length <= AUTO_EXPAND_MAX_SIZE) {
input.expandedNodes = TreeViewClass.getExpandedNodes(
input.json,
{maxLevel: AUTO_EXPAND_MAX_LEVEL}
);
}
return undefined;
})();
let theApp = render(MainTabbedArea(input), content);
// Send notification event to the window. Can be useful for
// Send readyState change notification event to the window. Can be useful for
// tests as well as extensions.
let event = new CustomEvent("JSONViewInitialized", {});
JSONView.initialized = true;
window.dispatchEvent(event);
JSONView.readyState = "interactive";
window.dispatchEvent(new CustomEvent("AppReadyStateChange"));
promise.then(() => {
// Another readyState change notification event.
JSONView.readyState = "complete";
window.dispatchEvent(new CustomEvent("AppReadyStateChange"));
});
});

View File

@ -22,8 +22,11 @@ support-files =
!/devtools/client/framework/test/head.js
!/devtools/client/framework/test/shared-head.js
[browser_json_refresh.js]
[browser_jsonview_bug_1380828.js]
[browser_jsonview_ignore_charset.js]
[browser_jsonview_chunked_json.js]
support-files =
chunked_json.sjs
[browser_jsonview_content_type.js]
[browser_jsonview_copy_headers.js]
subsuite = clipboard
@ -38,6 +41,7 @@ skip-if = (os == 'linux' && bits == 32 && debug) # bug 1328915, disable linux32
[browser_jsonview_empty_object.js]
[browser_jsonview_encoding.js]
[browser_jsonview_filter.js]
[browser_jsonview_ignore_charset.js]
[browser_jsonview_invalid_json.js]
[browser_jsonview_manifest.js]
[browser_jsonview_nojs.js]
@ -47,8 +51,7 @@ skip-if = (os == 'linux' && bits == 32 && debug) # bug 1328915, disable linux32
[browser_jsonview_save_json.js]
support-files =
!/toolkit/content/tests/browser/common/mockTransfer.js
[browser_jsonview_theme.js]
[browser_jsonview_slash.js]
[browser_jsonview_valid_json.js]
[browser_json_refresh.js]
[browser_jsonview_serviceworker.js]
[browser_jsonview_slash.js]
[browser_jsonview_theme.js]
[browser_jsonview_valid_json.js]

View File

@ -0,0 +1,80 @@
/* -*- indent-tabs-mode: nil; js-indent-level: 2 -*- */
/* vim: set ts=2 et sw=2 tw=80: */
/* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/publicdomain/zero/1.0/ */
"use strict";
const TEST_JSON_URL = URL_ROOT + "chunked_json.sjs";
add_task(async function () {
info("Test chunked JSON started");
await addJsonViewTab(TEST_JSON_URL, {
appReadyState: "interactive",
docReadyState: "loading",
});
is(await getElementCount(".rawdata.is-active"), 1,
"The Raw Data tab is selected.");
// Write some text and check that it is displayed.
await write("[");
await checkText();
// Repeat just in case.
await write("1,");
await checkText();
is(await getElementCount("button.prettyprint"), 0,
"There is no pretty print button during load");
await selectJsonViewContentTab("json");
is(await getElementText(".jsonPanelBox > .panelContent"), "", "There is no JSON tree");
await selectJsonViewContentTab("headers");
ok(await getElementText(".headersPanelBox .netInfoHeadersTable"),
"The headers table has been filled.");
// Write some text without being in Raw Data, then switch tab and check.
await write("2");
await selectJsonViewContentTab("rawdata");
await checkText();
// Another text check.
await write("]");
await checkText();
// Close the connection.
await server("close");
is(await getElementCount(".json.is-active"), 1, "The JSON tab is selected.");
is(await getElementCount(".jsonPanelBox .treeTable .treeRow"), 2,
"There is a tree with 2 rows.");
await selectJsonViewContentTab("rawdata");
await checkText();
is(await getElementCount("button.prettyprint"), 1, "There is a pretty print button.");
await clickJsonNode("button.prettyprint");
await checkText(JSON.stringify(JSON.parse(data), null, 2));
});
let data = " ";
async function write(text) {
data += text;
await server("write", text);
}
async function checkText(text = data) {
is(await getElementText(".textPanelBox .data"), text, "Got the right text.");
}
function server(action, value) {
return new Promise(resolve => {
let xhr = new XMLHttpRequest();
xhr.open("GET", TEST_JSON_URL + "?" + action + "=" + value);
xhr.addEventListener("load", resolve, {once: true});
xhr.send();
});
}

View File

@ -5,21 +5,21 @@
"use strict";
add_task(function* () {
add_task(async function () {
info("Test JSON without JavaScript started.");
let oldPref = SpecialPowers.getBoolPref("javascript.enabled");
SpecialPowers.setBoolPref("javascript.enabled", false);
const TEST_JSON_URL = "data:application/json,[1,2,3]";
yield addJsonViewTab(TEST_JSON_URL, 0).catch(() => {
info("JSON Viewer did not load");
return executeInContent("Test:JsonView:GetElementVisibleText", {selector: "html"})
.then(result => {
info("Checking visible text contents.");
is(result.text, "[1,2,3]", "The raw source should be visible.");
});
});
// "uninitialized" will be the last app readyState because JS is disabled.
await addJsonViewTab(TEST_JSON_URL, {appReadyState: "uninitialized"});
info("Checking visible text contents.");
let {text} = await executeInContent("Test:JsonView:GetElementVisibleText",
{selector: "html"});
is(text, "[1,2,3]", "The raw source should be visible.");
SpecialPowers.setBoolPref("javascript.enabled", oldPref);
});

View File

@ -0,0 +1,38 @@
/* -*- indent-tabs-mode: nil; js-indent-level: 2 -*- */
/* vim: set ts=2 et sw=2 tw=80: */
/* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/publicdomain/zero/1.0/ */
const key = "json-viewer-chunked-response";
function setResponse(response) {
setObjectState(key, response);
}
function getResponse() {
let response;
getObjectState(key, v => { response = v });
return response;
}
function handleRequest(request, response) {
let {queryString} = request;
if (!queryString) {
response.processAsync();
setResponse(response);
response.setHeader("Content-Type", "application/json");
// Write something so that the JSON viewer app starts loading.
response.write(" ");
return;
}
let [command, value] = queryString.split('=');
switch (command) {
case "write":
getResponse().write(value);
break;
case "close":
getResponse().finish();
setResponse(null);
break;
}
response.setHeader("Content-Type", "text/plain");
response.write("ok");
}

View File

@ -25,17 +25,19 @@ Services.scriptloader.loadSubScript(
"chrome://mochikit/content/tests/SimpleTest/EventUtils.js", EventUtils);
/**
* When the JSON View is done rendering it triggers custom event
* "JSONViewInitialized", then the Test:TestPageProcessingDone message
* will be sent to the parent process for tests to wait for this event
* if needed.
* When the ready state of the JSON View app changes, it triggers custom event
* "AppReadyStateChange", then the "Test:JsonView:AppReadyStateChange" message
* will be sent to the parent process for tests to wait for this event if needed.
*/
content.addEventListener("JSONViewInitialized", () => {
sendAsyncMessage("Test:JsonView:JSONViewInitialized");
content.addEventListener("AppReadyStateChange", () => {
sendAsyncMessage("Test:JsonView:AppReadyStateChange");
});
content.addEventListener("load", () => {
sendAsyncMessage("Test:JsonView:load");
/**
* Analogous for the standard "readystatechange" event of the document.
*/
content.document.addEventListener("readystatechange", () => {
sendAsyncMessage("Test:JsonView:DocReadyStateChange");
});
addMessageListener("Test:JsonView:GetElementCount", function (msg) {

View File

@ -26,17 +26,52 @@ registerCleanupFunction(() => {
* Add a new test tab in the browser and load the given url.
* @param {String} url
* The url to be loaded in the new tab.
* @param {Number} timeout [optional]
* The maximum number of milliseconds allowed before the initialization of the
* JSON Viewer once the tab has been loaded. If exceeded, the initialization
* will be considered to have failed, and the returned promise will be rejected.
* If this parameter is not passed or is negative, it will be ignored.
*
* @param {Object} [optional]
* An object with the following optional properties:
* - appReadyState: The readyState of the JSON Viewer app that you want to
* wait for. Its value can be one of:
* - "uninitialized": The converter has started the request.
* If JavaScript is disabled, there will be no more readyState changes.
* - "loading": RequireJS started loading the scripts for the JSON Viewer.
* If the load timeouts, there will be no more readyState changes.
* - "interactive": The JSON Viewer app loaded, but possibly not all the JSON
* data has been received.
* - "complete" (default): The app is fully loaded with all the JSON.
* - docReadyState: The standard readyState of the document that you want to
* wait for. Its value can be one of:
* - "loading": The JSON data has not been completely loaded (but the app might).
* - "interactive": All the JSON data has been received.
* - "complete" (default): Since there aren't sub-resources like images,
* behaves as "interactive". Note the app might not be loaded yet.
*/
async function addJsonViewTab(url, timeout = -1) {
info("Adding a new JSON tab with URL: '" + url + "'");
async function addJsonViewTab(url, {
appReadyState = "complete",
docReadyState = "complete",
} = {}) {
let docReadyStates = ["loading", "interactive", "complete"];
let docReadyIndex = docReadyStates.indexOf(docReadyState);
let appReadyStates = ["uninitialized", ...docReadyStates];
let appReadyIndex = appReadyStates.indexOf(appReadyState);
if (docReadyIndex < 0 || appReadyIndex < 0) {
throw new Error("Invalid app or doc readyState parameter.");
}
let tab = await addTab(url);
info("Adding a new JSON tab with URL: '" + url + "'");
let tabLoaded = addTab(url);
let tab = gBrowser.selectedTab;
let browser = tab.linkedBrowser;
await Promise.race([tabLoaded, new Promise(resolve => {
browser.webProgress.addProgressListener({
QueryInterface: XPCOMUtils.generateQI(["nsIWebProgressListener",
"nsISupportsWeakReference"]),
onLocationChange(webProgress) {
// Fires when the tab is ready but before completely loaded.
webProgress.removeProgressListener(this);
resolve();
},
}, Ci.nsIWebProgress.NOTIFY_LOCATION);
})]);
// Load devtools/shared/frame-script-utils.js
getFrameScript();
@ -47,32 +82,23 @@ async function addJsonViewTab(url, timeout = -1) {
browser.messageManager.loadFrameScript(frameScriptUrl, false);
// Check if there is a JSONView object.
if (!content.window.wrappedJSObject.JSONView) {
throw new Error("JSON Viewer did not load.");
let JSONView = content.window.wrappedJSObject.JSONView;
if (!JSONView) {
throw new Error("The JSON Viewer did not load.");
}
// Resolve if the JSONView is fully loaded.
if (content.window.wrappedJSObject.JSONView.initialized) {
return tab;
// Wait until the document readyState suffices.
let {document} = content.window;
while (docReadyStates.indexOf(document.readyState) < docReadyIndex) {
await waitForContentMessage("Test:JsonView:DocReadyStateChange");
}
// Otherwise wait for an initialization event, possibly with a time limit.
const onJSONViewInitialized =
waitForContentMessage("Test:JsonView:JSONViewInitialized")
.then(() => tab);
if (!(timeout >= 0)) {
return onJSONViewInitialized;
// Wait until the app readyState suffices.
while (appReadyStates.indexOf(JSONView.readyState) < appReadyIndex) {
await waitForContentMessage("Test:JsonView:AppReadyStateChange");
}
if (content.window.document.readyState !== "complete") {
await waitForContentMessage("Test:JsonView:load");
}
let onTimeout = new Promise((_, reject) =>
setTimeout(() => reject(new Error("JSON Viewer did not load.")), timeout));
return Promise.race([onJSONViewInitialized, onTimeout]);
return tab;
}
/**

View File

@ -7,6 +7,10 @@
"use strict";
// Send readyState change notification event to the window. It's useful for tests.
JSONView.readyState = "loading";
window.dispatchEvent(new CustomEvent("AppReadyStateChange"));
/**
* RequireJS configuration for JSON Viewer.
*

View File

@ -132,9 +132,6 @@ function* getSystemInfo() {
// The application's build ID/date, for example "2004051604".
appbuildid: appInfo.appBuildID,
// The application's changeset.
changeset: exports.getAppIniString("App", "SourceStamp"),
// The build ID/date of Gecko and the XULRunner platform.
platformbuildid: appInfo.platformBuildID,
geckobuildid: appInfo.platformBuildID,
@ -217,28 +214,6 @@ function getProfileLocation() {
}
}
function getAppIniString(section, key) {
let inifile = Services.dirsvc.get("GreD", Ci.nsIFile);
inifile.append("application.ini");
if (!inifile.exists()) {
inifile = Services.dirsvc.get("CurProcD", Ci.nsIFile);
inifile.append("application.ini");
}
if (!inifile.exists()) {
return undefined;
}
let iniParser = Cc["@mozilla.org/xpcom/ini-parser-factory;1"]
.getService(Ci.nsIINIParserFactory).createINIParser(inifile);
try {
return iniParser.getString(section, key);
} catch (e) {
return undefined;
}
}
/**
* Function for fetching screen dimensions and returning
* an enum for Telemetry.
@ -350,7 +325,6 @@ function getSetting(name) {
}
exports.getSystemInfo = Task.async(getSystemInfo);
exports.getAppIniString = getAppIniString;
exports.getSetting = getSetting;
exports.getScreenDimensions = getScreenDimensions;
exports.getOSCPU = getOSCPU;

View File

@ -924,17 +924,17 @@ nsDefaultURIFixup::KeywordURIFixup(const nsACString& aURIString,
}
nsAutoCString asciiHost;
nsAutoCString host;
nsAutoCString displayHost;
bool isValidAsciiHost =
bool isValidHost =
aFixupInfo->mFixedURI &&
NS_SUCCEEDED(aFixupInfo->mFixedURI->GetAsciiHost(asciiHost)) &&
!asciiHost.IsEmpty();
bool isValidHost =
bool isValidDisplayHost =
aFixupInfo->mFixedURI &&
NS_SUCCEEDED(aFixupInfo->mFixedURI->GetHost(host)) &&
!host.IsEmpty();
NS_SUCCEEDED(aFixupInfo->mFixedURI->GetDisplayHost(displayHost)) &&
!displayHost.IsEmpty();
nsresult rv = NS_OK;
// We do keyword lookups if a space or quote preceded the dot, colon
@ -946,10 +946,10 @@ nsDefaultURIFixup::KeywordURIFixup(const nsACString& aURIString,
firstQMarkLoc == 0) {
rv = TryKeywordFixupForURIInfo(aFixupInfo->mOriginalInput, aFixupInfo,
aPostData);
// ... or when the host is the same as asciiHost and there are no
// ... or when the asciiHost is the same as displayHost and there are no
// characters from [a-z][A-Z]
} else if (isValidAsciiHost && isValidHost && !hasAsciiAlpha &&
host.EqualsIgnoreCase(asciiHost.get())) {
} else if (isValidHost && isValidDisplayHost && !hasAsciiAlpha &&
asciiHost.EqualsIgnoreCase(displayHost.get())) {
if (!sDNSFirstForSingleWords) {
rv = TryKeywordFixupForURIInfo(aFixupInfo->mOriginalInput, aFixupInfo,
aPostData);
@ -962,7 +962,7 @@ nsDefaultURIFixup::KeywordURIFixup(const nsACString& aURIString,
firstDotLoc == aURIString.Length() - 1))) &&
firstColonLoc == uint32_t(kNotFound) &&
firstQMarkLoc == uint32_t(kNotFound)) {
if (isValidAsciiHost && IsDomainWhitelisted(asciiHost, firstDotLoc)) {
if (isValidHost && IsDomainWhitelisted(asciiHost, firstDotLoc)) {
return NS_OK;
}
@ -970,7 +970,7 @@ nsDefaultURIFixup::KeywordURIFixup(const nsACString& aURIString,
// this is a valid host:
if (firstDotLoc == uint32_t(kNotFound) &&
lastSlashLoc != uint32_t(kNotFound) &&
hasAsciiAlpha && isValidAsciiHost) {
hasAsciiAlpha && isValidHost) {
return NS_OK;
}

View File

@ -52,8 +52,10 @@ add_task(async function testExpiredCache() {
await BrowserTestUtils.browserLoaded(browser);
// Wait for 3 times of expiration timeout, hopefully it's evicted...
await new Promise(resolve => {
setTimeout(resolve, 3000);
await ContentTask.spawn(browser, null, () => {
return new Promise(resolve => {
content.setTimeout(resolve, 3000);
});
});
// Go back and verify text content.

View File

@ -482,7 +482,12 @@ var testcases = [ {
input: "plonk:8080",
fixedURI: "http://plonk:8080/",
protocolChange: true,
}
}, {
input: "\u10E0\u10D4\u10D2\u10D8\u10E1\u10E2\u10E0\u10D0\u10EA\u10D8\u10D0.\u10D2\u10D4",
fixedURI: "http://xn--lodaehvb5cdik4g.xn--node/",
alternateURI: "http://www.xn--lodaehvb5cdik4g.xn--node/",
protocolChange: true,
},
];
if (Services.appinfo.OS.toLowerCase().startsWith("win")) {

View File

@ -683,6 +683,12 @@ DOMInterfaces = {
'wrapperCache': False
},
'TransceiverImpl': {
'nativeType': 'mozilla::TransceiverImpl',
'headerFile': 'TransceiverImpl.h',
'wrapperCache': False
},
'Plugin': {
'headerFile' : 'nsPluginArray.h',
'nativeType': 'nsPluginElement',

View File

@ -64,7 +64,7 @@ public:
// Resume any downloads that have been suspended.
virtual void Resume() = 0;
// The mode is initially MODE_PLAYBACK.
// The mode is initially MODE_METADATA.
virtual void SetReadMode(MediaCacheStream::ReadMode aMode) = 0;
// Returns true if the resource can be seeked to unbuffered ranges, i.e.

View File

@ -255,9 +255,6 @@ ChannelMediaDecoder::Load(nsIChannel* aChannel,
rv = mResource->Open(aStreamListener);
NS_ENSURE_SUCCESS(rv, rv);
// Set mode to METADATA since we are about to read metadata.
mResource->SetReadMode(MediaCacheStream::MODE_METADATA);
SetStateMachine(CreateStateMachine());
NS_ENSURE_TRUE(GetStateMachine(), NS_ERROR_FAILURE);

View File

@ -427,6 +427,8 @@ protected:
// end
void Truncate();
void FlushInternal(AutoLock&);
// There is at most one file-backed media cache.
// It is owned by all MediaCacheStreams that use it.
// This is a raw pointer set by GetMediaCache(), and reset by ~MediaCache(),
@ -505,7 +507,6 @@ MediaCacheStream::MediaCacheStream(ChannelMediaResource* aClient,
, mStreamOffset(0)
, mPlaybackBytesPerSecond(10000)
, mPinCount(0)
, mCurrentMode(MODE_PLAYBACK)
, mMetadataInPartialBlockBuffer(false)
, mIsPrivateBrowsing(aIsPrivateBrowsing)
{
@ -695,13 +696,10 @@ MediaCacheStream::BlockList::NotifyBlockSwapped(int32_t aBlockIndex1,
}
void
MediaCache::Flush()
MediaCache::FlushInternal(AutoLock& aLock)
{
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
AutoLock lock(mMonitor);
for (uint32_t blockIndex = 0; blockIndex < mIndex.Length(); ++blockIndex) {
FreeBlock(lock, blockIndex);
FreeBlock(aLock, blockIndex);
}
// Truncate index array.
@ -711,6 +709,18 @@ MediaCache::Flush()
mBlockCache->Flush();
}
void
MediaCache::Flush()
{
MOZ_ASSERT(NS_IsMainThread());
nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction(
"MediaCache::Flush", [self = RefPtr<MediaCache>(this)]() {
AutoLock lock(self->mMonitor);
self->FlushInternal(lock);
});
sThread->Dispatch(r.forget());
}
void
MediaCache::CloseStreamsForPrivateBrowsing()
{
@ -2500,12 +2510,16 @@ MediaCacheStream::GetNextCachedDataInternal(AutoLock&, int64_t aOffset)
void
MediaCacheStream::SetReadMode(ReadMode aMode)
{
// TODO: Assert non-main thread.
AutoLock lock(mMediaCache->Monitor());
if (aMode == mCurrentMode)
return;
mCurrentMode = aMode;
mMediaCache->QueueUpdate(lock);
nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction(
"MediaCacheStream::SetReadMode",
[ this, client = RefPtr<ChannelMediaResource>(mClient), aMode ]() {
AutoLock lock(mMediaCache->Monitor());
if (!mClosed && mCurrentMode != aMode) {
mCurrentMode = aMode;
mMediaCache->QueueUpdate(lock);
}
});
OwnerThread()->Dispatch(r.forget());
}
void

View File

@ -523,7 +523,7 @@ private:
// when mDidNotifyDataEnded is true.
nsresult mNotifyDataEndedStatus;
// The last reported read mode
ReadMode mCurrentMode;
ReadMode mCurrentMode = MODE_METADATA;
// True if some data in mPartialBlockBuffer has been read as metadata
bool mMetadataInPartialBlockBuffer;
// The load ID of the current channel. Used to check whether the data is

View File

@ -25,6 +25,7 @@ const PC_STATS_CONTRACT = "@mozilla.org/dom/rtcstatsreport;1";
const PC_STATIC_CONTRACT = "@mozilla.org/dom/peerconnectionstatic;1";
const PC_SENDER_CONTRACT = "@mozilla.org/dom/rtpsender;1";
const PC_RECEIVER_CONTRACT = "@mozilla.org/dom/rtpreceiver;1";
const PC_TRANSCEIVER_CONTRACT = "@mozilla.org/dom/rtptransceiver;1";
const PC_COREQUEST_CONTRACT = "@mozilla.org/dom/createofferrequest;1";
const PC_DTMF_SENDER_CONTRACT = "@mozilla.org/dom/rtcdtmfsender;1";
@ -37,6 +38,7 @@ const PC_STATS_CID = Components.ID("{7fe6e18b-0da3-4056-bf3b-440ef3809e06}");
const PC_STATIC_CID = Components.ID("{0fb47c47-a205-4583-a9fc-cbadf8c95880}");
const PC_SENDER_CID = Components.ID("{4fff5d46-d827-4cd4-a970-8fd53977440e}");
const PC_RECEIVER_CID = Components.ID("{d974b814-8fde-411c-8c45-b86791b81030}");
const PC_TRANSCEIVER_CID = Components.ID("{09475754-103a-41f5-a2d0-e1f27eb0b537}");
const PC_COREQUEST_CID = Components.ID("{74b2122d-65a8-4824-aa9e-3d664cb75dc2}");
const PC_DTMF_SENDER_CID = Components.ID("{3610C242-654E-11E6-8EC0-6D1BE389A607}");
@ -148,9 +150,8 @@ class GlobalPCList {
let cleanupPcRef = function(pcref) {
let pc = pcref.get();
if (pc) {
pc._pc.close();
delete pc._observer;
pc._pc = null;
pc._suppressEvents = true;
pc.close();
}
};
@ -347,8 +348,8 @@ setupPrototype(RTCStatsReport, {
class RTCPeerConnection {
constructor() {
this._senders = [];
this._receivers = [];
this._receiveStreams = new Map();
this._transceivers = [];
this._pc = null;
this._closed = false;
@ -589,6 +590,18 @@ class RTCPeerConnection {
}
}
// This implements the fairly common "Queue a task" logic
async _queueTaskWithClosedCheck(func) {
return new this._win.Promise(resolve => {
Services.tm.dispatchToMainThread({ run() {
if (!this._closed) {
func();
resolve();
}
}});
});
}
/**
* An RTCConfiguration may look like this:
*
@ -690,7 +703,7 @@ class RTCPeerConnection {
dispatchEvent(event) {
// PC can close while events are firing if there is an async dispatch
// in c++ land. But let through "closed" signaling and ice connection events.
if (!this._closed || this._inClose) {
if (!this._suppressEvents) {
this.__DOM_IMPL__.dispatchEvent(event);
}
}
@ -757,11 +770,60 @@ class RTCPeerConnection {
}
createOffer(optionsOrOnSucc, onErr, options) {
// This entry-point handles both new and legacy call sig. Decipher which one
let onSuccess = null;
if (typeof optionsOrOnSucc == "function") {
return this._legacy(optionsOrOnSucc, onErr, () => this._createOffer(options));
onSuccess = optionsOrOnSucc;
} else {
options = optionsOrOnSucc;
}
// Spec language implies that this needs to happen as if it were called
// before createOffer, so we do this as early as possible.
this._ensureTransceiversForOfferToReceive(options);
// This entry-point handles both new and legacy call sig. Decipher which one
if (onSuccess) {
return this._legacy(onSuccess, onErr, () => this._createOffer(options));
}
return this._async(() => this._createOffer(options));
}
// Ensures that we have at least one transceiver of |kind| that is
// configured to receive. It will create one if necessary.
_ensureOfferToReceive(kind) {
let hasRecv = this._transceivers.some(
transceiver =>
transceiver.getKind() == kind &&
(transceiver.direction == "sendrecv" || transceiver.direction == "recvonly") &&
!transceiver.stopped);
if (!hasRecv) {
this._addTransceiverNoEvents(kind, {direction: "recvonly"});
}
}
// Handles offerToReceiveAudio/Video
_ensureTransceiversForOfferToReceive(options) {
if (options.offerToReceiveVideo) {
this._ensureOfferToReceive("video");
}
if (options.offerToReceiveVideo === false) {
this.logWarning("offerToReceiveVideo: false is ignored now. If you " +
"want to disallow a recv track, use " +
"RTCRtpTransceiver.direction");
}
if (options.offerToReceiveAudio) {
this._ensureOfferToReceive("audio");
}
if (options.offerToReceiveAudio === false) {
this.logWarning("offerToReceiveAudio: false is ignored now. If you " +
"want to disallow a recv track, use " +
"RTCRtpTransceiver.direction");
}
return this._async(() => this._createOffer(optionsOrOnSucc));
}
async _createOffer(options) {
@ -1066,77 +1128,161 @@ class RTCPeerConnection {
throw new this._win.DOMException("invalid stream.", "InvalidParameterError");
}
this._checkClosed();
this._senders.forEach(sender => {
if (sender.track == track) {
throw new this._win.DOMException("already added.",
"InvalidParameterError");
}
if (this._transceivers.some(
transceiver => transceiver.sender.track == track)) {
throw new this._win.DOMException("This track is already set on a sender.",
"InvalidAccessError");
}
let transceiver = this._transceivers.find(transceiver => {
return transceiver.sender.track == null &&
transceiver.getKind() == track.kind &&
!transceiver.stopped &&
!transceiver.hasBeenUsedToSend();
});
this._impl.addTrack(track, stream);
let sender = this._win.RTCRtpSender._create(this._win,
new RTCRtpSender(this, track,
stream));
this._senders.push(sender);
return sender;
if (transceiver) {
transceiver.sender.setTrack(track);
transceiver.sender.setStreams([stream]);
if (transceiver.direction == "recvonly") {
transceiver.setDirectionInternal("sendrecv");
} else if (transceiver.direction == "inactive") {
transceiver.setDirectionInternal("sendonly");
}
} else {
transceiver = this._addTransceiverNoEvents(track, {
streams: [stream],
direction: "sendrecv"
});
}
transceiver.setAddTrackMagic();
transceiver.sync();
this.updateNegotiationNeeded();
return transceiver.sender;
}
removeTrack(sender) {
this._checkClosed();
var i = this._senders.indexOf(sender);
if (i >= 0) {
this._senders.splice(i, 1);
this._impl.removeTrack(sender.track); // fires negotiation needed
sender.checkWasCreatedByPc(this.__DOM_IMPL__);
let transceiver =
this._transceivers.find(transceiver => transceiver.sender == sender);
// If the transceiver was removed due to rollback, let it slide.
if (!transceiver || !sender.track) {
return;
}
// TODO(bug 1401983): Move to TransceiverImpl?
this._impl.removeTrack(sender.track);
sender.setTrack(null);
if (transceiver.direction == "sendrecv") {
transceiver.setDirectionInternal("recvonly");
} else if (transceiver.direction == "sendonly") {
transceiver.setDirectionInternal("inactive");
}
transceiver.sync();
this.updateNegotiationNeeded();
}
_insertDTMF(sender, tones, duration, interToneGap) {
return this._impl.insertDTMF(sender.__DOM_IMPL__, tones, duration, interToneGap);
_addTransceiverNoEvents(sendTrackOrKind, init) {
let sendTrack = null;
let kind;
if (typeof(sendTrackOrKind) == "string") {
kind = sendTrackOrKind;
switch (kind) {
case "audio":
case "video":
break;
default:
throw new this._win.TypeError("Invalid media kind");
}
} else {
sendTrack = sendTrackOrKind;
kind = sendTrack.kind;
}
let transceiverImpl = this._impl.createTransceiverImpl(kind, sendTrack);
let transceiver = this._win.RTCRtpTransceiver._create(
this._win,
new RTCRtpTransceiver(this, transceiverImpl, init, kind, sendTrack));
transceiver.sync();
this._transceivers.push(transceiver);
return transceiver;
}
_onTransceiverNeeded(kind, transceiverImpl) {
let init = {direction: "recvonly"};
let transceiver = this._win.RTCRtpTransceiver._create(
this._win,
new RTCRtpTransceiver(this, transceiverImpl, init, kind, null));
transceiver.sync();
this._transceivers.push(transceiver);
}
addTransceiver(sendTrackOrKind, init) {
let transceiver = this._addTransceiverNoEvents(sendTrackOrKind, init);
this.updateNegotiationNeeded();
return transceiver;
}
_syncTransceivers() {
this._transceivers.forEach(transceiver => transceiver.sync());
}
updateNegotiationNeeded() {
if (this._closed || this.signalingState != "stable") {
return;
}
let negotiationNeeded = this._impl.checkNegotiationNeeded();
if (!negotiationNeeded) {
this._negotiationNeeded = false;
return;
}
if (this._negotiationNeeded) {
return;
}
this._negotiationNeeded = true;
this._queueTaskWithClosedCheck(() => {
if (this._negotiationNeeded) {
this.dispatchEvent(new this._win.Event("negotiationneeded"));
}
});
}
_getOrCreateStream(id) {
if (!this._receiveStreams.has(id)) {
let stream = new this._win.MediaStream();
stream.assignId(id);
// Legacy event, remove eventually
let ev = new this._win.MediaStreamEvent("addstream", { stream });
this.dispatchEvent(ev);
this._receiveStreams.set(id, stream);
}
return this._receiveStreams.get(id);
}
_insertDTMF(transceiverImpl, tones, duration, interToneGap) {
return this._impl.insertDTMF(transceiverImpl, tones, duration, interToneGap);
}
_getDTMFToneBuffer(sender) {
return this._impl.getDTMFToneBuffer(sender.__DOM_IMPL__);
}
async _replaceTrack(sender, withTrack) {
_replaceTrack(transceiverImpl, withTrack) {
this._checkClosed();
return this._chain(() => new Promise((resolve, reject) => {
this._onReplaceTrackSender = sender;
this._onReplaceTrackWithTrack = withTrack;
this._onReplaceTrackSuccess = resolve;
this._onReplaceTrackFailure = reject;
this._impl.replaceTrack(sender.track, withTrack);
}));
}
_setParameters({ track }, parameters) {
if (!Services.prefs.getBoolPref("media.peerconnection.simulcast")) {
return;
}
// validate parameters input
var encodings = parameters.encodings || [];
encodings.reduce((uniqueRids, { rid, scaleResolutionDownBy }) => {
if (scaleResolutionDownBy < 1.0) {
throw new this._win.RangeError("scaleResolutionDownBy must be >= 1.0");
}
if (!rid && encodings.length > 1) {
throw new this._win.DOMException("Missing rid", "TypeError");
}
if (uniqueRids[rid]) {
throw new this._win.DOMException("Duplicate rid", "TypeError");
}
uniqueRids[rid] = true;
return uniqueRids;
}, {});
this._impl.setParameters(track, parameters);
}
_getParameters({ track }) {
if (!Services.prefs.getBoolPref("media.peerconnection.simulcast")) {
return null;
}
return this._impl.getParameters(track);
this._impl.replaceTrackNoRenegotiation(transceiverImpl, withTrack);
}
close() {
@ -1144,30 +1290,37 @@ class RTCPeerConnection {
return;
}
this._closed = true;
this._inClose = true;
this.changeIceConnectionState("closed");
this._localIdp.close();
this._remoteIdp.close();
this._impl.close();
this._inClose = false;
this._suppressEvents = true;
delete this._pc;
delete this._observer;
}
getLocalStreams() {
this._checkClosed();
return this._impl.getLocalStreams();
let localStreams = new Set();
this._transceivers.forEach(transceiver => {
transceiver.sender.getStreams().forEach(stream => {
localStreams.add(stream);
});
});
return [...localStreams.values()];
}
getRemoteStreams() {
this._checkClosed();
return this._impl.getRemoteStreams();
return [...this._receiveStreams.values()];
}
getSenders() {
return this._senders;
return this.getTransceivers().map(transceiver => transceiver.sender);
}
getReceivers() {
return this._receivers;
return this.getTransceivers().map(transceiver => transceiver.receiver);
}
mozAddRIDExtension(receiver, extensionId) {
@ -1190,6 +1343,10 @@ class RTCPeerConnection {
this._impl.disablePacketDump(level, type, sending);
}
getTransceivers() {
return this._transceivers;
}
get localDescription() {
this._checkClosed();
let sdp = this._impl.localDescription;
@ -1327,9 +1484,17 @@ class RTCPeerConnection {
type = Ci.IPeerConnection.kDataChannelReliable;
}
// Synchronous since it doesn't block.
return this._impl.createDataChannel(label, protocol, type, ordered,
maxPacketLifeTime, maxRetransmits,
negotiated, id);
let dataChannel =
this._impl.createDataChannel(label, protocol, type, ordered,
maxPacketLifeTime, maxRetransmits,
negotiated, id);
// Spec says to only do this if this is the first DataChannel created,
// but the c++ code that does the "is negotiation needed" checking will
// only ever return true on the first one.
this.updateNegotiationNeeded();
return dataChannel;
}
}
setupPrototype(RTCPeerConnection, {
@ -1395,10 +1560,16 @@ class PeerConnectionObserver {
}
onSetLocalDescriptionSuccess() {
this._dompc._syncTransceivers();
this._negotiationNeeded = false;
this._dompc.updateNegotiationNeeded();
this._dompc._onSetLocalDescriptionSuccess();
}
onSetRemoteDescriptionSuccess() {
this._dompc._syncTransceivers();
this._negotiationNeeded = false;
this._dompc.updateNegotiationNeeded();
this._dompc._onSetRemoteDescriptionSuccess();
}
@ -1435,10 +1606,6 @@ class PeerConnectionObserver {
{ candidate }));
}
onNegotiationNeeded() {
this.dispatchEvent(new this._win.Event("negotiationneeded"));
}
// This method is primarily responsible for updating iceConnectionState.
// This state is defined in the WebRTC specification as follows:
//
@ -1558,51 +1725,51 @@ class PeerConnectionObserver {
this._dompc._onGetStatsFailure(this.newError(message, code));
}
onAddStream(stream) {
let ev = new this._dompc._win.MediaStreamEvent("addstream", { stream });
this.dispatchEvent(ev);
}
onRemoveStream(stream) {
this.dispatchEvent(new this._dompc._win.MediaStreamEvent("removestream",
{ stream }));
}
onAddTrack(track, streams) {
_getTransceiverWithRecvTrack(webrtcTrackId) {
return this._dompc.getTransceivers().find(
transceiver => transceiver.remoteTrackIdIs(webrtcTrackId));
}
onTrack(webrtcTrackId, streamIds) {
let pc = this._dompc;
let receiver = pc._win.RTCRtpReceiver._create(pc._win,
new RTCRtpReceiver(pc,
track));
pc._receivers.push(receiver);
let ev = new pc._win.RTCTrackEvent("track", { receiver, track, streams });
let matchingTransceiver = this._getTransceiverWithRecvTrack(webrtcTrackId);
// Get or create MediaStreams, and add the new track to them.
let streams = streamIds.map(id => this._dompc._getOrCreateStream(id));
streams.forEach(stream => {
stream.addTrack(matchingTransceiver.receiver.track);
// Adding tracks from JS does not result in the stream getting
// onaddtrack, so we need to do that here. The mediacapture spec says
// this needs to be queued, also.
pc._queueTaskWithClosedCheck(() => {
stream.dispatchEvent(
new pc._win.MediaStreamTrackEvent(
"addtrack", { track: matchingTransceiver.receiver.track }));
});
});
let ev = new pc._win.RTCTrackEvent("track", {
receiver: matchingTransceiver.receiver,
track: matchingTransceiver.receiver.track,
streams,
transceiver: matchingTransceiver });
this.dispatchEvent(ev);
// Fire legacy event as well for a little bit.
ev = new pc._win.MediaStreamTrackEvent("addtrack", { track });
ev = new pc._win.MediaStreamTrackEvent("addtrack",
{ track: matchingTransceiver.receiver.track });
this.dispatchEvent(ev);
}
onRemoveTrack(track) {
let pc = this._dompc;
let i = pc._receivers.findIndex(receiver => receiver.track == track);
if (i >= 0) {
pc._receivers.splice(i, 1);
}
}
onReplaceTrackSuccess() {
var pc = this._dompc;
pc._onReplaceTrackSender.track = pc._onReplaceTrackWithTrack;
pc._onReplaceTrackWithTrack = null;
pc._onReplaceTrackSender = null;
pc._onReplaceTrackSuccess();
}
onReplaceTrackError(code, message) {
var pc = this._dompc;
pc._onReplaceTrackWithTrack = null;
pc._onReplaceTrackSender = null;
pc._onReplaceTrackFailure(this.newError(message, code));
onTransceiverNeeded(kind, transceiverImpl) {
this._dompc._onTransceiverNeeded(kind, transceiverImpl);
}
notifyDataChannel(channel) {
@ -1610,9 +1777,9 @@ class PeerConnectionObserver {
{ channel }));
}
onDTMFToneChange(trackId, tone) {
onDTMFToneChange(track, tone) {
var pc = this._dompc;
var sender = pc._senders.find(({track}) => track.id == trackId);
var sender = pc.getSenders().find(sender => sender.track == track);
sender.dtmf.dispatchEvent(new pc._win.RTCDTMFToneChangeEvent("tonechange",
{ tone }));
}
@ -1623,6 +1790,10 @@ class PeerConnectionObserver {
pc._onPacket(level, type, sending, packet);
}
}
syncTransceivers() {
this._dompc._syncTransceivers();
}
}
setupPrototype(PeerConnectionObserver, {
classID: PC_OBS_CID,
@ -1667,23 +1838,7 @@ class RTCDTMFSender {
insertDTMF(tones, duration, interToneGap) {
this._sender._pc._checkClosed();
if (this._sender._pc._senders.indexOf(this._sender.__DOM_IMPL__) == -1) {
throw new this._sender._pc._win.DOMException("RTCRtpSender is stopped",
"InvalidStateError");
}
duration = Math.max(40, Math.min(duration, 6000));
if (interToneGap < 30) interToneGap = 30;
tones = tones.toUpperCase();
if (tones.match(/[^0-9A-D#*,]/)) {
throw new this._sender._pc._win.DOMException("Invalid DTMF characters",
"InvalidCharacterError");
}
this._sender._pc._insertDTMF(this._sender, tones, duration, interToneGap);
this._sender._transceiver.insertDTMF(tones, duration, interToneGap);
}
}
setupPrototype(RTCDTMFSender, {
@ -1693,28 +1848,132 @@ setupPrototype(RTCDTMFSender, {
});
class RTCRtpSender {
constructor(pc, track, stream) {
let dtmf = pc._win.RTCDTMFSender._create(pc._win, new RTCDTMFSender(this));
Object.assign(this, { _pc: pc, track, _stream: stream, dtmf });
constructor(pc, transceiverImpl, transceiver, track, streams) {
let dtmf = pc._win.RTCDTMFSender._create(
pc._win, new RTCDTMFSender(this));
Object.assign(this, {
_pc: pc,
_transceiverImpl: transceiverImpl,
_transceiver: transceiver,
track,
_streams: streams,
dtmf });
}
replaceTrack(withTrack) {
return this._pc._async(() => this._pc._replaceTrack(this, withTrack));
// async functions in here return a chrome promise, which is not something
// content can use. This wraps that promise in something content can use.
return this._pc._win.Promise.resolve(this._replaceTrack(withTrack));
}
async _replaceTrack(withTrack) {
this._pc._checkClosed();
if (this._transceiver.stopped) {
throw new this._pc._win.DOMException(
"Cannot call replaceTrack when transceiver is stopped",
"InvalidStateError");
}
if (withTrack && (withTrack.kind != this._transceiver.getKind())) {
throw new this._pc._win.DOMException(
"Cannot replaceTrack with a different kind!",
"TypeError");
}
// Updates the track on the MediaPipeline; this is needed whether or not
// we've associated this transceiver, the spec language notwithstanding.
// Synchronous, and will throw on failure.
this._pc._replaceTrack(this._transceiverImpl, withTrack);
let setTrack = () => {
this.track = withTrack;
this._transceiver.sync();
};
// Spec is a little weird here; we only queue if the transceiver was
// associated, otherwise we update the track synchronously.
if (this._transceiver.mid == null) {
setTrack();
} else {
// We're supposed to queue a task if the transceiver is associated
await this._pc._queueTaskWithClosedCheck(setTrack);
}
}
setParameters(parameters) {
return this._pc._win.Promise.resolve()
.then(() => this._pc._setParameters(this, parameters));
return this._pc._win.Promise.resolve(this._setParameters(parameters));
}
async _setParameters(parameters) {
this._pc._checkClosed();
if (this._transceiver.stopped) {
throw new this._pc._win.DOMException(
"This sender's transceiver is stopped", "InvalidStateError");
}
if (!Services.prefs.getBoolPref("media.peerconnection.simulcast")) {
return;
}
parameters.encodings = parameters.encodings || [];
parameters.encodings.reduce((uniqueRids, { rid, scaleResolutionDownBy }) => {
if (scaleResolutionDownBy < 1.0) {
throw new this._pc._win.RangeError("scaleResolutionDownBy must be >= 1.0");
}
if (!rid && parameters.encodings.length > 1) {
throw new this._pc._win.DOMException("Missing rid", "TypeError");
}
if (uniqueRids[rid]) {
throw new this._pc._win.DOMException("Duplicate rid", "TypeError");
}
uniqueRids[rid] = true;
return uniqueRids;
}, {});
// TODO(bug 1401592): transaction ids, timing changes
await this._pc._queueTaskWithClosedCheck(() => {
this.parameters = parameters;
this._transceiver.sync();
});
}
getParameters() {
return this._pc._getParameters(this);
// TODO(bug 1401592): transaction ids
// All the other stuff that the spec says to update is handled when
// transceivers are synced.
return this.parameters;
}
setStreams(streams) {
this._streams = streams;
}
getStreams() {
return this._streams;
}
setTrack(track) {
this.track = track;
}
getStats() {
return this._pc._async(
async () => this._pc._getStats(this.track));
}
checkWasCreatedByPc(pc) {
if (pc != this._pc.__DOM_IMPL__) {
throw new this._pc._win.DOMException(
"This sender was not created by this PeerConnection",
"InvalidAccessError");
}
}
}
setupPrototype(RTCRtpSender, {
classID: PC_SENDER_CID,
@ -1723,10 +1982,18 @@ setupPrototype(RTCRtpSender, {
});
class RTCRtpReceiver {
constructor(pc, track) {
Object.assign(this, { _pc: pc, track });
constructor(pc, transceiverImpl) {
// We do not set the track here; that is done when _transceiverImpl is set
Object.assign(this,
{
_pc: pc,
_transceiverImpl: transceiverImpl,
track: transceiverImpl.getReceiveTrack()
});
}
// TODO(bug 1401983): Create a getStats binding on TransceiverImpl, and use
// that here.
getStats() {
return this._pc._async(
async () => this._pc.getStats(this.track));
@ -1738,6 +2005,175 @@ setupPrototype(RTCRtpReceiver, {
QueryInterface: XPCOMUtils.generateQI([Ci.nsISupports])
});
class RTCRtpTransceiver {
constructor(pc, transceiverImpl, init, kind, sendTrack) {
let receiver = pc._win.RTCRtpReceiver._create(
pc._win, new RTCRtpReceiver(pc, transceiverImpl, kind));
let streams = (init && init.streams) || [];
let sender = pc._win.RTCRtpSender._create(
pc._win, new RTCRtpSender(pc, transceiverImpl, this, sendTrack, streams));
let direction = (init && init.direction) || "sendrecv";
Object.assign(this,
{
_pc: pc,
mid: null,
sender,
receiver,
stopped: false,
_direction: direction,
currentDirection: null,
_remoteTrackId: null,
addTrackMagic: false,
_hasBeenUsedToSend: false,
// the receiver starts out without a track, so record this here
_kind: kind,
_transceiverImpl: transceiverImpl
});
}
set direction(direction) {
this._pc._checkClosed();
if (this.stopped) {
throw new this._pc._win.DOMException("Transceiver is stopped!",
"InvalidStateError");
}
if (this._direction == direction) {
return;
}
this._direction = direction;
this.sync();
this._pc.updateNegotiationNeeded();
}
get direction() {
return this._direction;
}
setDirectionInternal(direction) {
this._direction = direction;
}
stop() {
if (this.stopped) {
return;
}
this._pc._checkClosed();
this.setStopped();
this.sync();
this._pc.updateNegotiationNeeded();
}
setStopped() {
this.stopped = true;
this.currentDirection = null;
}
remove() {
var index = this._pc._transceivers.indexOf(this.__DOM_IMPL__);
if (index != -1) {
this._pc._transceivers.splice(index, 1);
}
}
getKind() {
return this._kind;
}
hasBeenUsedToSend() {
return this._hasBeenUsedToSend;
}
setRemoteTrackId(webrtcTrackId) {
this._remoteTrackId = webrtcTrackId;
}
remoteTrackIdIs(webrtcTrackId) {
return this._remoteTrackId == webrtcTrackId;
}
getRemoteTrackId() {
return this._remoteTrackId;
}
setAddTrackMagic() {
this.addTrackMagic = true;
}
sync() {
if (this._syncing) {
throw new DOMException("Reentrant sync! This is a bug!", "InternalError");
}
this._syncing = true;
this._transceiverImpl.syncWithJS(this.__DOM_IMPL__);
this._syncing = false;
}
// Used by _transceiverImpl.syncWithJS, don't call sync again!
setCurrentDirection(direction) {
if (this.stopped) {
return;
}
switch (direction) {
case "sendrecv":
case "sendonly":
this._hasBeenUsedToSend = true;
break;
default:
}
this.currentDirection = direction;
}
// Used by _transceiverImpl.syncWithJS, don't call sync again!
setMid(mid) {
this.mid = mid;
}
// Used by _transceiverImpl.syncWithJS, don't call sync again!
unsetMid() {
this.mid = null;
}
insertDTMF(tones, duration, interToneGap) {
if (this.stopped) {
throw new this._pc._win.DOMException("Transceiver is stopped!",
"InvalidStateError");
}
if (!this.sender.track) {
throw new this._pc._win.DOMException("RTCRtpSender has no track",
"InvalidStateError");
}
duration = Math.max(40, Math.min(duration, 6000));
if (interToneGap < 30) interToneGap = 30;
tones = tones.toUpperCase();
if (tones.match(/[^0-9A-D#*,]/)) {
throw new this._pc._win.DOMException("Invalid DTMF characters",
"InvalidCharacterError");
}
// TODO (bug 1401983): Move this API to TransceiverImpl so we don't need the
// extra hops through RTCPeerConnection and PeerConnectionImpl
this._pc._insertDTMF(this._transceiverImpl, tones, duration, interToneGap);
}
}
setupPrototype(RTCRtpTransceiver, {
classID: PC_TRANSCEIVER_CID,
contractID: PC_TRANSCEIVER_CONTRACT,
QueryInterface: XPCOMUtils.generateQI([Ci.nsISupports])
});
class CreateOfferRequest {
constructor(windowID, innerWindowID, callID, isSecure) {
Object.assign(this, { windowID, innerWindowID, callID, isSecure });
@ -1758,6 +2194,7 @@ this.NSGetFactory = XPCOMUtils.generateNSGetFactory(
RTCPeerConnectionStatic,
RTCRtpReceiver,
RTCRtpSender,
RTCRtpTransceiver,
RTCStatsReport,
PeerConnectionObserver,
CreateOfferRequest]

View File

@ -541,7 +541,7 @@ MP3TrackDemuxer::FindNextFrame()
BufferReader reader(buffer, read);
uint32_t bytesToSkip = 0;
auto res = mParser.Parse(&reader, &bytesToSkip);
foundFrame = res.isOk() ? res.unwrap() : false;
foundFrame = res.unwrapOr(false);
frameHeaderOffset =
mOffset + reader.Offset() - FrameParser::FrameHeader::SIZE;

View File

@ -811,10 +811,10 @@ H264::GetFrameType(const mozilla::MediaRawData* aSample)
while (reader.Remaining() >= nalLenSize) {
uint32_t nalLen = 0;
switch (nalLenSize) {
case 1: Unused << reader.ReadU8().map([&] (uint8_t x) mutable { return nalLen = x; }); break;
case 2: Unused << reader.ReadU16().map([&] (uint16_t x) mutable { return nalLen = x; }); break;
case 3: Unused << reader.ReadU24().map([&] (uint32_t x) mutable { return nalLen = x; }); break;
case 4: Unused << reader.ReadU32().map([&] (uint32_t x) mutable { return nalLen = x; }); break;
case 1: nalLen = reader.ReadU8().unwrapOr(0); break;
case 2: nalLen = reader.ReadU16().unwrapOr(0); break;
case 3: nalLen = reader.ReadU24().unwrapOr(0); break;
case 4: nalLen = reader.ReadU32().unwrapOr(0); break;
}
if (!nalLen) {
continue;

View File

@ -319,6 +319,7 @@ function setupEnvironment() {
['media.peerconnection.identity.timeout', 120000],
['media.peerconnection.ice.stun_client_maximum_transmits', 14],
['media.peerconnection.ice.trickle_grace_period', 30000],
['media.peerconnection.remoteTrackId.enabled', true],
['media.navigator.permission.disabled', true],
['media.navigator.streams.fake', FAKE_ENABLED],
['media.getusermedia.screensharing.enabled', true],

View File

@ -98,6 +98,8 @@ skip-if = toolkit == 'android' # no screenshare or windowshare on android
skip-if = android_version == '18' # android(Bug 1189784, timeouts on 4.3 emulator)
[test_peerConnection_audioCodecs.html]
skip-if = (android_version == '18') # android(Bug 1189784, timeouts on 4.3 emulator)
[test_peerConnection_transceivers.html]
skip-if = (android_version == '18') # android(Bug 1189784, timeouts on 4.3 emulator)
[test_peerConnection_basicAudio.html]
skip-if = (android_version == '18') # android(Bug 1189784, timeouts on 4.3 emulator)
[test_peerConnection_checkPacketDumpHook.html]
@ -128,6 +130,8 @@ skip-if = (android_version == '18') # android(Bug 1189784, timeouts on 4.3 emula
skip-if = android_version == '18' # android(Bug 1189784, timeouts on 4.3 emulator)
[test_peerConnection_basicAudioVideoNoRtcpMux.html]
skip-if = android_version == '18' # android(Bug 1189784, timeouts on 4.3 emulator)
[test_peerConnection_basicAudioVideoTransceivers.html]
skip-if = android_version == '18' # android(Bug 1189784, timeouts on 4.3 emulator)
[test_peerConnection_basicVideo.html]
skip-if = (android_version == '18' && debug) # android(Bug 1189784, timeouts on 4.3 emulator)
[test_peerConnection_basicVideoVerifyRtpHeaderExtensions.html]

View File

@ -150,7 +150,7 @@ PeerConnectionTest.prototype.closePC = function() {
return haveEvent(receiver.track, "ended", wait(50000))
.then(event => {
is(event.target, receiver.track, "Event target should be the correct track");
info("ended fired for track " + receiver.track.id);
info(pc + " ended fired for track " + receiver.track.id);
}, e => e ? Promise.reject(e)
: ok(false, "ended never fired for track " +
receiver.track.id));
@ -761,8 +761,10 @@ function PeerConnectionWrapper(label, configuration) {
this.remoteMediaElements = [];
this.audioElementsOnly = false;
this._sendStreams = [];
this.expectedLocalTrackInfoById = {};
this.expectedRemoteTrackInfoById = {};
this.expectedSignalledTrackInfoById = {};
this.observedRemoteTrackInfoById = {};
this.disableRtpCountChecking = false;
@ -875,12 +877,28 @@ PeerConnectionWrapper.prototype = {
this._pc.setIdentityProvider(provider, protocol, identity);
},
ensureMediaElement : function(track, direction) {
const idPrefix = [this.label, direction].join('_');
var element = getMediaElementForTrack(track, idPrefix);
elementPrefix : direction =>
{
return [this.label, direction].join('_');
},
getMediaElementForTrack : function (track, direction)
{
var prefix = this.elementPrefix(direction);
return getMediaElementForTrack(track, prefix);
},
createMediaElementForTrack : function(track, direction)
{
var prefix = this.elementPrefix(direction);
return createMediaElementForTrack(track, prefix);
},
ensureMediaElement : function(track, direction) {
var prefix = this.elementPrefix(direction);
var element = this.getMediaElementForTrack(track, direction);
if (!element) {
element = createMediaElementForTrack(track, idPrefix);
element = this.createMediaElementForTrack(track, direction);
if (direction == "local") {
this.localMediaElements.push(element);
} else if (direction == "remote") {
@ -895,6 +913,24 @@ PeerConnectionWrapper.prototype = {
element.play();
},
addSendStream : function(stream)
{
// The PeerConnection will not necessarily know about this stream
// automatically, because replaceTrack is not told about any streams the
// new track might be associated with. Only content really knows.
this._sendStreams.push(stream);
},
getStreamForSendTrack : function(track)
{
return this._sendStreams.find(str => str.getTrackById(track.id));
},
getStreamForRecvTrack : function(track)
{
return this._pc.getRemoteStreams().find(s => !!s.getTrackById(track.id));
},
/**
* Attaches a local track to this RTCPeerConnection using
* RTCPeerConnection.addTrack().
@ -921,6 +957,10 @@ PeerConnectionWrapper.prototype = {
type: track.kind,
streamId: stream.id,
};
this.expectedSignalledTrackInfoById[track.id] =
this.expectedLocalTrackInfoById[track.id];
this.addSendStream(stream);
// This will create one media element per track, which might not be how
// we set up things with the RTCPeerConnection. It's the only way
@ -937,13 +977,20 @@ PeerConnectionWrapper.prototype = {
* @param {MediaStream} stream
* Media stream to handle
*/
attachLocalStream : function(stream) {
attachLocalStream : function(stream, useAddTransceiver) {
info("Got local media stream: (" + stream.id + ")");
this.expectNegotiationNeeded();
if (useAddTransceiver) {
info("Using addTransceiver (on PC).");
stream.getTracks().forEach(track => {
var transceiver = this._pc.addTransceiver(track, {streams: [stream]});
is(transceiver.sender.track, track, "addTransceiver returns sender");
});
}
// In order to test both the addStream and addTrack APIs, we do half one
// way, half the other, at random.
if (Math.random() < 0.5) {
else if (Math.random() < 0.5) {
info("Using addStream.");
this._pc.addStream(stream);
ok(this._pc.getSenders().find(sender => sender.track == stream.getTracks()[0]),
@ -956,6 +1003,8 @@ PeerConnectionWrapper.prototype = {
});
}
this.addSendStream(stream);
stream.getTracks().forEach(track => {
ok(track.id, "track has id");
ok(track.kind, "track has kind");
@ -963,8 +1012,12 @@ PeerConnectionWrapper.prototype = {
type: track.kind,
streamId: stream.id
};
this.expectedSignalledTrackInfoById[track.id] =
this.expectedLocalTrackInfoById[track.id];
this.ensureMediaElement(track, "local");
});
return this.observedNegotiationNeeded;
},
removeSender : function(index) {
@ -975,16 +1028,34 @@ PeerConnectionWrapper.prototype = {
return this.observedNegotiationNeeded;
},
senderReplaceTrack : function(index, withTrack, withStreamId) {
var sender = this._pc.getSenders()[index];
senderReplaceTrack : function(sender, withTrack, stream) {
delete this.expectedLocalTrackInfoById[sender.track.id];
this.expectedLocalTrackInfoById[withTrack.id] = {
type: withTrack.kind,
streamId: withStreamId
streamId: stream.id
};
this.addSendStream(stream);
this.ensureMediaElement(withTrack, 'local');
return sender.replaceTrack(withTrack);
},
getUserMedia : async function(constraints) {
var stream = await getUserMedia(constraints);
if (constraints.audio) {
stream.getAudioTracks().forEach(track => {
info(this + " gUM local stream " + stream.id +
" with audio track " + track.id);
});
}
if (constraints.video) {
stream.getVideoTracks().forEach(track => {
info(this + " gUM local stream " + stream.id +
" with video track " + track.id);
});
}
return stream;
},
/**
* Requests all the media streams as specified in the constrains property.
*
@ -998,23 +1069,25 @@ PeerConnectionWrapper.prototype = {
}
info("Get " + constraintsList.length + " local streams");
return Promise.all(constraintsList.map(constraints => {
return getUserMedia(constraints).then(stream => {
if (constraints.audio) {
stream.getAudioTracks().forEach(track => {
info(this + " gUM local stream " + stream.id +
" with audio track " + track.id);
});
}
if (constraints.video) {
stream.getVideoTracks().forEach(track => {
info(this + " gUM local stream " + stream.id +
" with video track " + track.id);
});
}
return this.attachLocalStream(stream);
});
}));
return Promise.all(
constraintsList.map(constraints => this.getUserMedia(constraints))
);
},
getAllUserMediaAndAddStreams : async function(constraintsList) {
var streams = await this.getAllUserMedia(constraintsList);
if (!streams) {
return;
}
return Promise.all(streams.map(stream => this.attachLocalStream(stream)));
},
getAllUserMediaAndAddTransceivers : async function(constraintsList) {
var streams = await this.getAllUserMedia(constraintsList);
if (!streams) {
return;
}
return Promise.all(streams.map(stream => this.attachLocalStream(stream, true)));
},
/**
@ -1164,34 +1237,55 @@ PeerConnectionWrapper.prototype = {
* is of the correct type. Then, moves the track from
* |expectedTrackInfoById| to |observedTrackInfoById|.
*/
checkTrackIsExpected : function(track,
checkTrackIsExpected : function(trackId,
kind,
expectedTrackInfoById,
observedTrackInfoById) {
ok(expectedTrackInfoById[track.id], "track id " + track.id + " was expected");
ok(!observedTrackInfoById[track.id], "track id " + track.id + " was not yet observed");
var observedKind = track.kind;
var expectedKind = expectedTrackInfoById[track.id].type;
ok(expectedTrackInfoById[trackId], "track id " + trackId + " was expected");
ok(!observedTrackInfoById[trackId], "track id " + trackId + " was not yet observed");
var observedKind = kind;
var expectedKind = expectedTrackInfoById[trackId].type;
is(observedKind, expectedKind,
"track id " + track.id + " was of kind " +
"track id " + trackId + " was of kind " +
observedKind + ", which matches " + expectedKind);
observedTrackInfoById[track.id] = expectedTrackInfoById[track.id];
observedTrackInfoById[trackId] = expectedTrackInfoById[trackId];
},
isTrackOnPC: function(track) {
return this._pc.getRemoteStreams().some(s => !!s.getTrackById(track.id));
return !!this.getStreamForRecvTrack(track);
},
allExpectedTracksAreObserved: function(expected, observed) {
return Object.keys(expected).every(trackId => observed[trackId]);
},
getWebrtcTrackId: function(receiveTrack) {
let matchingTransceiver = this._pc.getTransceivers().find(
transceiver => transceiver.receiver.track == receiveTrack);
if (!matchingTransceiver) {
return null;
}
return matchingTransceiver.getRemoteTrackId();
},
setupTrackEventHandler: function() {
this._pc.addEventListener('track', event => {
info(this + ": 'ontrack' event fired for " + JSON.stringify(event.track));
info(this + ": 'ontrack' event fired for " + event.track.id +
"(SDP msid is " + this.getWebrtcTrackId(event.track) +
")");
this.checkTrackIsExpected(event.track,
this.expectedRemoteTrackInfoById,
this.observedRemoteTrackInfoById);
// TODO(bug 1403238): Checking for remote tracks needs to be completely
// reworked, because with the latest spec the identifiers aren't the same
// as they are on the other end. Ultimately, what we need to check is
// whether the _transceivers_ are in line with what is expected, and
// whether the callbacks are consistent with the transceivers.
let trackId = this.getWebrtcTrackId(event.track);
ok(!this.observedRemoteTrackInfoById[trackId],
"track id " + trackId + " was not yet observed");
this.observedRemoteTrackInfoById[trackId] = {
type: event.track.kind
};
ok(this.isTrackOnPC(event.track), "Found track " + event.track.id);
this.ensureMediaElement(event.track, 'remote');
@ -1324,7 +1418,12 @@ PeerConnectionWrapper.prototype = {
var observed = {};
info(this + " Checking local tracks " + JSON.stringify(this.expectedLocalTrackInfoById));
this._pc.getSenders().forEach(sender => {
this.checkTrackIsExpected(sender.track, this.expectedLocalTrackInfoById, observed);
if (sender.track) {
this.checkTrackIsExpected(sender.track.id,
sender.track.kind,
this.expectedLocalTrackInfoById,
observed);
}
});
Object.keys(this.expectedLocalTrackInfoById).forEach(
@ -1336,15 +1435,6 @@ PeerConnectionWrapper.prototype = {
*/
checkMediaTracks : function() {
this.checkLocalMediaTracks();
info(this + " Checking remote tracks " +
JSON.stringify(this.expectedRemoteTrackInfoById));
ok(this.allExpectedTracksAreObserved(this.expectedRemoteTrackInfoById,
this.observedRemoteTrackInfoById),
"All expected tracks have been observed"
+ "\nexpected: " + JSON.stringify(this.expectedRemoteTrackInfoById)
+ "\nobserved: " + JSON.stringify(this.observedRemoteTrackInfoById));
},
checkMsids: function() {
@ -1357,10 +1447,8 @@ PeerConnectionWrapper.prototype = {
});
};
checkSdpForMsids(this.localDescription, this.expectedLocalTrackInfoById,
checkSdpForMsids(this.localDescription, this.expectedSignalledTrackInfoById,
"local");
checkSdpForMsids(this.remoteDescription, this.expectedRemoteTrackInfoById,
"remote");
},
markRemoteTracksAsNegotiated: function() {
@ -1461,6 +1549,39 @@ PeerConnectionWrapper.prototype = {
+ " after at least" + timeout + "ms");
},
getExpectedActiveReceiveTracks : function() {
return this._pc.getTransceivers()
.filter(t => {
return !t.stopped &&
t.currentDirection &&
(t.currentDirection != "inactive") &&
(t.currentDirection != "sendonly");
})
.map(t => {
info("Found transceiver that should be receiving RTP: mid=" + t.mid +
" currentDirection=" + t.currentDirection + " kind=" +
t.receiver.track.kind + " track-id=" + t.receiver.track.id);
return t.receiver.track;
});
},
getExpectedSendTracks : function() {
return Object.keys(this.expectedLocalTrackInfoById)
.map(id => this.findSendTrackByWebrtcId(id));
},
findReceiveTrackByWebrtcId : function(webrtcId) {
return this._pc.getReceivers().map(receiver => receiver.track)
.find(track => this.getWebrtcTrackId(track) == webrtcId);
},
// Send tracks use the same identifiers that go in the signaling
findSendTrackByWebrtcId : function(webrtcId) {
return this._pc.getSenders().map(sender => sender.track)
.filter(track => track) // strip out null
.find(track => track.id == webrtcId);
},
/**
* Wait for presence of video flow on all media elements and rtp flow on
* all sending and receiving track involved in this test.
@ -1471,12 +1592,13 @@ PeerConnectionWrapper.prototype = {
waitForMediaFlow : function() {
return Promise.all([].concat(
this.localMediaElements.map(element => this.waitForMediaElementFlow(element)),
Object.keys(this.expectedRemoteTrackInfoById)
.map(id => this.remoteMediaElements
.find(e => e.srcObject.getTracks().some(t => t.id == id)))
.map(e => this.waitForMediaElementFlow(e)),
this._pc.getSenders().map(sender => this.waitForRtpFlow(sender.track)),
this._pc.getReceivers().map(receiver => this.waitForRtpFlow(receiver.track))));
this.remoteMediaElements.filter(elem =>
this.getExpectedActiveReceiveTracks()
.some(track => elem.srcObject.getTracks().some(t => t == track))
)
.map(elem => this.waitForMediaElementFlow(elem)),
this.getExpectedActiveReceiveTracks().map(track => this.waitForRtpFlow(track)),
this.getExpectedSendTracks().map(track => this.waitForRtpFlow(track))));
},
async waitForSyncedRtcp() {
@ -1522,59 +1644,90 @@ PeerConnectionWrapper.prototype = {
/**
* Check that correct audio (typically a flat tone) is flowing to this
* PeerConnection. Uses WebAudio AnalyserNodes to compare input and output
* audio data in the frequency domain.
* PeerConnection for each transceiver that should be receiving. Uses
* WebAudio AnalyserNodes to compare input and output audio data in the
* frequency domain.
*
* @param {object} from
* A PeerConnectionWrapper whose audio RTPSender we use as source for
* the audio flow check.
* @returns {Promise}
* A promise that resolves when we're receiving the tone from |from|.
* A promise that resolves when we're receiving the tone/s from |from|.
*/
checkReceivingToneFrom : async function(audiocontext, from,
cancel = wait(60000, new Error("Tone not detected"))) {
let inputElem = from.localMediaElements[0];
let localTransceivers = this._pc.getTransceivers()
.filter(t => t.mid)
.filter(t => t.receiver.track.kind == "audio")
.sort((t1, t2) => t1.mid < t2.mid);
let remoteTransceivers = from._pc.getTransceivers()
.filter(t => t.mid)
.filter(t => t.receiver.track.kind == "audio")
.sort((t1, t2) => t1.mid < t2.mid);
// As input we use the stream of |from|'s first available audio sender.
let inputSenderTracks = from._pc.getSenders().map(sn => sn.track);
let inputAudioStream = from._pc.getLocalStreams()
.find(s => inputSenderTracks.some(t => t.kind == "audio" && s.getTrackById(t.id)));
let inputAnalyser = new AudioStreamAnalyser(audiocontext, inputAudioStream);
is(localTransceivers.length, remoteTransceivers.length,
"Same number of associated audio transceivers on remote and local.");
// It would have been nice to have a working getReceivers() here, but until
// we do, let's use what remote streams we have.
let outputAudioStream = this._pc.getRemoteStreams()
.find(s => s.getAudioTracks().length > 0);
let outputAnalyser = new AudioStreamAnalyser(audiocontext, outputAudioStream);
for (let i = 0; i < localTransceivers.length; i++) {
is(localTransceivers[i].mid, remoteTransceivers[i].mid,
"Transceivers at index " + i + " have the same mid.");
let error = null;
cancel.then(e => error = e);
let indexOfMax = data =>
data.reduce((max, val, i) => (val >= data[max]) ? i : max, 0);
await outputAnalyser.waitForAnalysisSuccess(() => {
if (error) {
throw error;
if (!remoteTransceivers[i].sender.track) {
continue;
}
let inputData = inputAnalyser.getByteFrequencyData();
let outputData = outputAnalyser.getByteFrequencyData();
let inputMax = indexOfMax(inputData);
let outputMax = indexOfMax(outputData);
info(`Comparing maxima; input[${inputMax}] = ${inputData[inputMax]},`
+ ` output[${outputMax}] = ${outputData[outputMax]}`);
if (!inputData[inputMax] || !outputData[outputMax]) {
return false;
if (remoteTransceivers[i].currentDirection == "recvonly" ||
remoteTransceivers[i].currentDirection == "inactive") {
continue;
}
// When the input and output maxima are within reasonable distance (2% of
// total length, which means ~10 for length 512) from each other, we can
// be sure that the input tone has made it through the peer connection.
info(`input data length: ${inputData.length}`);
return Math.abs(inputMax - outputMax) < (inputData.length * 0.02);
});
let sendTrack = remoteTransceivers[i].sender.track;
let inputElem = from.getMediaElementForTrack(sendTrack, "local");
ok(inputElem,
"Remote wrapper should have a media element for track id " +
sendTrack.id);
let inputAudioStream = from.getStreamForSendTrack(sendTrack);
ok(inputAudioStream,
"Remote wrapper should have a stream for track id " + sendTrack.id);
let inputAnalyser =
new AudioStreamAnalyser(audiocontext, inputAudioStream);
let recvTrack = localTransceivers[i].receiver.track;
let outputAudioStream = this.getStreamForRecvTrack(recvTrack);
ok(outputAudioStream,
"Local wrapper should have a stream for track id " + recvTrack.id);
let outputAnalyser =
new AudioStreamAnalyser(audiocontext, outputAudioStream);
let error = null;
cancel.then(e => error = e);
let indexOfMax = data =>
data.reduce((max, val, i) => (val >= data[max]) ? i : max, 0);
await outputAnalyser.waitForAnalysisSuccess(() => {
if (error) {
throw error;
}
let inputData = inputAnalyser.getByteFrequencyData();
let outputData = outputAnalyser.getByteFrequencyData();
let inputMax = indexOfMax(inputData);
let outputMax = indexOfMax(outputData);
info(`Comparing maxima; input[${inputMax}] = ${inputData[inputMax]},`
+ ` output[${outputMax}] = ${outputData[outputMax]}`);
if (!inputData[inputMax] || !outputData[outputMax]) {
return false;
}
// When the input and output maxima are within reasonable distance (2% of
// total length, which means ~10 for length 512) from each other, we can
// be sure that the input tone has made it through the peer connection.
info(`input data length: ${inputData.length}`);
return Math.abs(inputMax - outputMax) < (inputData.length * 0.02);
});
}
},
/**
@ -1622,6 +1775,7 @@ PeerConnectionWrapper.prototype = {
// Use spec way of enumerating stats
var counters = {};
for (let [key, res] of stats) {
info("Checking stats for " + key + " : " + res);
// validate stats
ok(res.id == key, "Coherent stats id");
var nowish = Date.now() + 1000; // TODO: clock drift observed
@ -1655,11 +1809,17 @@ PeerConnectionWrapper.prototype = {
switch (res.type) {
case "inbound-rtp":
case "outbound-rtp": {
// ssrc is a 32 bit number returned as a string by spec
ok(res.ssrc.length > 0, "Ssrc has length");
ok(res.ssrc.length < 11, "Ssrc not lengthy");
ok(!/[^0-9]/.test(res.ssrc), "Ssrc numeric");
ok(parseInt(res.ssrc) < Math.pow(2,32), "Ssrc within limits");
// Inbound tracks won't have an ssrc if RTP is not flowing.
// (eg; negotiated inactive)
ok(res.ssrc || res.type == "inbound-rtp", "Outbound RTP stats has an ssrc.");
if (res.ssrc) {
// ssrc is a 32 bit number returned as a string by spec
ok(res.ssrc.length > 0, "Ssrc has length");
ok(res.ssrc.length < 11, "Ssrc not lengthy");
ok(!/[^0-9]/.test(res.ssrc), "Ssrc numeric");
ok(parseInt(res.ssrc) < Math.pow(2,32), "Ssrc within limits");
}
if (res.type == "outbound-rtp") {
ok(res.packetsSent !== undefined, "Rtp packetsSent");
@ -1734,7 +1894,12 @@ PeerConnectionWrapper.prototype = {
}
is(JSON.stringify(counters), JSON.stringify(counters2),
"Spec and legacy variant of RTCStatsReport enumeration agree");
var nin = Object.keys(this.expectedRemoteTrackInfoById).length;
var nin = this._pc.getTransceivers()
.filter(t => {
return !t.stopped &&
(t.currentDirection != "inactive") &&
(t.currentDirection != "sendonly");
}).length;
var nout = Object.keys(this.expectedLocalTrackInfoById).length;
var ndata = this.dataChannels.length;
@ -1810,13 +1975,10 @@ PeerConnectionWrapper.prototype = {
*
* @param {object} stats
* The stats to check for ICE candidate pairs
* @param {object} counters
* The counters for media and data tracks based on constraints
* @param {object} testOptions
* The test options object from the PeerConnectionTest
*/
checkStatsIceConnections : function(stats,
offerConstraintsList, offerOptions, testOptions) {
checkStatsIceConnections : function(stats, testOptions) {
var numIceConnections = 0;
stats.forEach(stat => {
if ((stat.type === "candidate-pair") && stat.selected) {
@ -1832,17 +1994,17 @@ PeerConnectionWrapper.prototype = {
is(numIceConnections, 2, "stats report exactly 2 ICE connections for media and RTCP");
}
} else {
// This code assumes that no media sections have been rejected due to
// codec mismatch or other unrecoverable negotiation failures.
var numAudioTracks =
sdputils.countTracksInConstraint('audio', offerConstraintsList) ||
((offerOptions && offerOptions.offerToReceiveAudio) ? 1 : 0);
var numAudioTransceivers =
this._pc.getTransceivers().filter((transceiver) => {
return (!transceiver.stopped) && transceiver.receiver.track.kind == "audio";
}).length;
var numVideoTracks =
sdputils.countTracksInConstraint('video', offerConstraintsList) ||
((offerOptions && offerOptions.offerToReceiveVideo) ? 1 : 0);
var numVideoTransceivers =
this._pc.getTransceivers().filter((transceiver) => {
return (!transceiver.stopped) && transceiver.receiver.track.kind == "video";
}).length;
var numExpectedTransports = numAudioTracks + numVideoTracks;
var numExpectedTransports = numAudioTransceivers + numVideoTransceivers;
if (!testOptions.rtcpmux) {
numExpectedTransports *= 2;
}

View File

@ -83,8 +83,7 @@ function waitForAnIceCandidate(pc) {
});
}
function checkTrackStats(pc, rtpSenderOrReceiver, outbound) {
var track = rtpSenderOrReceiver.track;
function checkTrackStats(pc, track, outbound) {
var audio = (track.kind == "audio");
var msg = pc + " stats " + (outbound ? "outbound " : "inbound ") +
(audio ? "audio" : "video") + " rtp track id " + track.id;
@ -106,8 +105,8 @@ function checkTrackStats(pc, rtpSenderOrReceiver, outbound) {
var checkAllTrackStats = pc => {
return Promise.all([].concat(
pc._pc.getSenders().map(sender => checkTrackStats(pc, sender, true)),
pc._pc.getReceivers().map(receiver => checkTrackStats(pc, receiver, false))));
pc.getExpectedActiveReceiveTracks().map(track => checkTrackStats(pc, track, false)),
pc.getExpectedSendTracks().map(track => checkTrackStats(pc, track, true))));
}
// Commands run once at the beginning of each test, even when performing a
@ -183,11 +182,11 @@ var commandsPeerConnectionInitial = [
var commandsGetUserMedia = [
function PC_LOCAL_GUM(test) {
return test.pcLocal.getAllUserMedia(test.pcLocal.constraints);
return test.pcLocal.getAllUserMediaAndAddStreams(test.pcLocal.constraints);
},
function PC_REMOTE_GUM(test) {
return test.pcRemote.getAllUserMedia(test.pcRemote.constraints);
return test.pcRemote.getAllUserMediaAndAddStreams(test.pcRemote.constraints);
},
];
@ -214,32 +213,6 @@ var commandsPeerConnectionOfferAnswer = [
}
},
function PC_LOCAL_GET_EXPECTED_REMOTE_TRACKS(test) {
if (test.testOptions.steeplechase) {
return test.getSignalingMessage("remote_expected_tracks").then(
message => {
test.pcLocal.expectedRemoteTrackInfoById = message.expected_tracks;
});
}
// Deep copy, as similar to steeplechase as possible
test.pcLocal.expectedRemoteTrackInfoById =
JSON.parse(JSON.stringify(test.pcRemote.expectedLocalTrackInfoById));
},
function PC_REMOTE_GET_EXPECTED_REMOTE_TRACKS(test) {
if (test.testOptions.steeplechase) {
return test.getSignalingMessage("local_expected_tracks").then(
message => {
test.pcRemote.expectedRemoteTrackInfoById = message.expected_tracks;
});
}
// Deep copy, as similar to steeplechase as possible
test.pcRemote.expectedRemoteTrackInfoById =
JSON.parse(JSON.stringify(test.pcLocal.expectedLocalTrackInfoById));
},
function PC_LOCAL_CREATE_OFFER(test) {
return test.createOffer(test.pcLocal).then(offer => {
is(test.pcLocal.signalingState, STABLE,
@ -435,19 +408,13 @@ var commandsPeerConnectionOfferAnswer = [
function PC_LOCAL_CHECK_ICE_CONNECTIONS(test) {
return test.pcLocal.getStats().then(stats => {
test.pcLocal.checkStatsIceConnections(stats,
test._offer_constraints,
test._offer_options,
test.testOptions);
test.pcLocal.checkStatsIceConnections(stats, test.testOptions);
});
},
function PC_REMOTE_CHECK_ICE_CONNECTIONS(test) {
return test.pcRemote.getStats().then(stats => {
test.pcRemote.checkStatsIceConnections(stats,
test._offer_constraints,
test._offer_options,
test.testOptions);
test.pcRemote.checkStatsIceConnections(stats, test.testOptions);
});
},

View File

@ -18,7 +18,7 @@
function PC_LOCAL_ADD_SECOND_STREAM(test) {
test.setMediaConstraints([{audio: true}, {audio: true}],
[{audio: true}]);
return test.pcLocal.getAllUserMedia([{audio: true}]);
return test.pcLocal.getAllUserMediaAndAddStreams([{audio: true}]);
},
],
[

View File

@ -22,7 +22,7 @@
// Since this is a NoBundle variant, adding a track will cause us to
// go back to checking.
test.pcLocal.expectIceChecking();
return test.pcLocal.getAllUserMedia([{audio: true}]);
return test.pcLocal.getAllUserMediaAndAddStreams([{audio: true}]);
},
function PC_REMOTE_EXPECT_ICE_CHECKING(test) {
test.pcRemote.expectIceChecking();

View File

@ -21,7 +21,7 @@
[{video: true}]);
// Use fake:true here since the native fake device on linux doesn't
// change color as needed by checkVideoPlaying() below.
return test.pcLocal.getAllUserMedia([{video: true, fake: true}]);
return test.pcLocal.getAllUserMediaAndAddStreams([{video: true, fake: true}]);
},
],
[

View File

@ -25,7 +25,7 @@
test.pcLocal.expectIceChecking();
// Use fake:true here since the native fake device on linux doesn't
// change color as needed by checkVideoPlaying() below.
return test.pcLocal.getAllUserMedia([{video: true, fake: true}]);
return test.pcLocal.getAllUserMediaAndAddStreams([{video: true, fake: true}]);
},
function PC_REMOTE_EXPECT_ICE_CHECKING(test) {
test.pcRemote.expectIceChecking();

View File

@ -20,6 +20,12 @@ runNetworkTest(function (options) {
[
function PC_LOCAL_SWAP_VIDEO_TRACKS(test) {
return getUserMedia({video: true}).then(stream => {
var videoTransceiver = test.pcLocal._pc.getTransceivers()[1];
is(videoTransceiver.currentDirection, "sendonly",
"Video transceiver's current direction is sendonly");
is(videoTransceiver.direction, "sendrecv",
"Video transceiver's desired direction is sendrecv");
const localStream = test.pcLocal._pc.getLocalStreams()[0];
ok(localStream, "Should have local stream");
@ -33,30 +39,25 @@ runNetworkTest(function (options) {
isnot(videoSenderIndex, -1, "Should have video sender");
test.pcLocal.removeSender(videoSenderIndex);
is(videoTransceiver.direction, "recvonly",
"Video transceiver should be recvonly after removeTrack");
test.pcLocal.attachLocalTrack(stream.getTracks()[0], localStream);
is(videoTransceiver.direction, "recvonly",
"Video transceiver should be recvonly after addTrack");
const addTrackPromise = haveEvent(remoteStream, "addtrack",
wait(50000, new Error("No addtrack event")))
eventsPromise = haveEvent(remoteStream, "addtrack",
wait(50000, new Error("No addtrack event for " + newTrack.id)))
.then(trackEvent => {
ok(trackEvent instanceof MediaStreamTrackEvent,
"Expected event to be instance of MediaStreamTrackEvent");
is(trackEvent.type, "addtrack",
"Expected addtrack event type");
is(trackEvent.track.id, newTrack.id, "Expected track in event");
is(test.pcRemote.getWebrtcTrackId(trackEvent.track), newTrack.id, "Expected track in event");
is(trackEvent.track.readyState, "live",
"added track should be live");
})
.then(() => haveNoEvent(remoteStream, "addtrack"));
const remoteTrack = test.pcRemote._pc.getReceivers()
.map(r => r.track)
.find(t => t.kind == "video");
ok(remoteTrack, "Should have received remote track");
const endedPromise = haveEvent(remoteTrack, "ended",
wait(50000, new Error("No ended event")));
eventsPromise = Promise.all([addTrackPromise, endedPromise]);
remoteStream.addEventListener("removetrack",
function onRemovetrack(trackEvent) {
ok(false, "UA shouldn't raise 'removetrack' when receiving peer connection");

View File

@ -19,7 +19,7 @@
function PC_LOCAL_ADD_SECOND_STREAM(test) {
test.setMediaConstraints([{audio: true}, {audio: true}],
[{audio: true}]);
return test.pcLocal.getAllUserMedia([{audio: true}]);
return test.pcLocal.getAllUserMediaAndAddStreams([{audio: true}]);
},
]
);

View File

@ -0,0 +1,31 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="pc.js"></script>
</head>
<body>
<pre id="test">
<script type="application/javascript">
createHTML({
bug: "1290948",
title: "Basic audio/video with addTransceiver"
});
var test;
runNetworkTest(function (options) {
test = new PeerConnectionTest(options);
test.setMediaConstraints([{audio: true}, {video: true}],
[{audio: true}, {video: true}]);
test.chain.replace("PC_LOCAL_GUM",
[
function PC_LOCAL_GUM_TRANSCEIVERS(test) {
return test.pcLocal.getAllUserMediaAndAddTransceivers(test.pcLocal.constraints);
}
]);
test.run();
});
</script>
</pre>
</body>
</html>

View File

@ -16,7 +16,7 @@
var options = { mandatory: { OfferToReceiveVideo: true } }; // obsolete
pc.createOffer(options).then(() => ok(false, "createOffer must fail"),
e => is(e.name, "InternalError",
e => is(e.name, "InvalidStateError",
"createOffer must fail"))
.catch(e => ok(false, e.message))
.then(() => {

View File

@ -50,7 +50,7 @@ runNetworkTest(() => {
is(receivedStream.getTracks().length, sentTracks.length,
"Should receive same number of tracks as were sent");
sentTracks.forEach(t =>
ok(receivedStream.getTracks().find(t2 => t.id == t2.id),
ok(receivedStream.getTracks().find(t2 => t.id == test.pcRemote.getWebrtcTrackId(t2)),
"The sent track (" + t.id + ") should exist on the receive side"));
};

View File

@ -18,7 +18,7 @@
function PC_LOCAL_ADD_SECOND_STREAM(test) {
test.setMediaConstraints([{audio: true}, {audio: true}],
[{audio: true}]);
return test.pcLocal.getAllUserMedia([{audio: true}]);
return test.pcLocal.getAllUserMediaAndAddStreams([{audio: true}]);
},
function PC_REMOTE_SETUP_ICE_HANDLER(test) {
@ -37,6 +37,9 @@
},
function PC_REMOTE_ROLLBACK(test) {
// the negotiationNeeded slot should have been true both before and
// after this SLD, so the event should fire again.
test.pcRemote.expectNegotiationNeeded();
return test.setLocalDescription(test.pcRemote,
{ type: "rollback", sdp: "" },
STABLE);

View File

@ -23,6 +23,9 @@
},
function PC_REMOTE_ROLLBACK(test) {
// the negotiationNeeded slot should have been true both before and
// after this SLD, so the event should fire again.
test.pcRemote.expectNegotiationNeeded();
return test.setLocalDescription(test.pcRemote,
{ type: "rollback", sdp: "" },
STABLE);

View File

@ -19,7 +19,7 @@
function PC_LOCAL_ADD_SECOND_STREAM(test) {
test.setMediaConstraints([{audio: true}, {audio: true}],
[{audio: true}]);
return test.pcLocal.getAllUserMedia([{audio: true}]);
return test.pcLocal.getAllUserMediaAndAddStreams([{audio: true}]);
},
]
);

View File

@ -37,10 +37,11 @@
],
[
function PC_REMOTE_CHECK_FLOW_STOPPED(test) {
is(test.pcRemote._pc.getReceivers().length, 0,
"pcRemote should have no more receivers");
is(receivedTrack.readyState, "ended",
"The received track should have ended");
// Simply removing a track is not enough to cause it to be
// signaled as ended. Spec may change though.
// TODO: One last check of the spec is in order
is(receivedTrack.readyState, "live",
"The received track should not have ended");
return analyser.waitForAnalysisSuccess(arr => arr[freq] < 50);
},

View File

@ -29,21 +29,30 @@
// 0, but the remote side will keep its old pipeline and packet
// count.
test.pcLocal.disableRtpCountChecking = true;
return test.pcLocal.getAllUserMedia([{audio: true}]);
return test.pcLocal.getAllUserMediaAndAddStreams([{audio: true}]);
},
],
[
function PC_REMOTE_CHECK_ADDED_TRACK(test) {
is(test.pcRemote._pc.getReceivers().length, 1,
"pcRemote should still have one receiver");
const track = test.pcRemote._pc.getReceivers()[0].track;
isnot(originalTrack.id, track.id, "Receiver should have changed");
is(test.pcRemote._pc.getTransceivers().length, 2,
"pcRemote should have two transceivers");
const track = test.pcRemote._pc.getTransceivers()[1].receiver.track;
const analyser = new AudioStreamAnalyser(
new AudioContext(), new MediaStream([track]));
const freq = analyser.binIndexForFrequency(TEST_AUDIO_FREQ);
return analyser.waitForAnalysisSuccess(arr => arr[freq] > 200);
},
function PC_REMOTE_CHECK_REMOVED_TRACK(test) {
is(test.pcRemote._pc.getTransceivers().length, 2,
"pcRemote should have two transceivers");
const track = test.pcRemote._pc.getTransceivers()[0].receiver.track;
const analyser = new AudioStreamAnalyser(
new AudioContext(), new MediaStream([track]));
const freq = analyser.binIndexForFrequency(TEST_AUDIO_FREQ);
return analyser.waitForAnalysisSuccess(arr => arr[freq] < 50);
}
]
);

View File

@ -12,6 +12,8 @@
});
runNetworkTest(function (options) {
options = options || { };
options.bundle = false;
const test = new PeerConnectionTest(options);
let originalTrack;
addRenegotiation(test.chain,
@ -29,21 +31,36 @@
return test.pcLocal.removeSender(0);
},
function PC_LOCAL_ADD_AUDIO_TRACK(test) {
return test.pcLocal.getAllUserMedia([{audio: true}]);
return test.pcLocal.getAllUserMediaAndAddStreams([{audio: true}]);
},
function PC_LOCAL_EXPECT_ICE_CHECKING(test) {
test.pcLocal.expectIceChecking();
},
function PC_REMOTE_EXPECT_ICE_CHECKING(test) {
test.pcRemote.expectIceChecking();
},
],
[
function PC_REMOTE_CHECK_ADDED_TRACK(test) {
is(test.pcRemote._pc.getReceivers().length, 1,
"pcRemote should still have one receiver");
const track = test.pcRemote._pc.getReceivers()[0].track;
isnot(originalTrack.id, track.id, "Receiver should have changed");
is(test.pcRemote._pc.getTransceivers().length, 2,
"pcRemote should have two transceivers");
const track = test.pcRemote._pc.getTransceivers()[1].receiver.track;
const analyser = new AudioStreamAnalyser(
new AudioContext(), new MediaStream([track]));
const freq = analyser.binIndexForFrequency(TEST_AUDIO_FREQ);
return analyser.waitForAnalysisSuccess(arr => arr[freq] > 200);
},
function PC_REMOTE_CHECK_REMOVED_TRACK(test) {
is(test.pcRemote._pc.getTransceivers().length, 2,
"pcRemote should have two transceivers");
const track = test.pcRemote._pc.getTransceivers()[0].receiver.track;
const analyser = new AudioStreamAnalyser(
new AudioContext(), new MediaStream([track]));
const freq = analyser.binIndexForFrequency(TEST_AUDIO_FREQ);
return analyser.waitForAnalysisSuccess(arr => arr[freq] < 50);
}
]
);

View File

@ -33,23 +33,28 @@
function PC_LOCAL_ADD_VIDEO_TRACK(test) {
// Use fake:true here since the native fake device on linux doesn't
// change color as needed by checkVideoPlaying() below.
return test.pcLocal.getAllUserMedia([{video: true, fake: true}]);
return test.pcLocal.getAllUserMediaAndAddStreams([{video: true, fake: true}]);
},
],
[
function PC_REMOTE_CHECK_ADDED_TRACK(test) {
is(test.pcRemote._pc.getReceivers().length, 1,
"pcRemote should still have one receiver");
const track = test.pcRemote._pc.getReceivers()[0].track;
isnot(originalTrack.id, track.id, "Receiver should have changed");
is(test.pcRemote._pc.getTransceivers().length, 2,
"pcRemote should have two transceivers");
const track = test.pcRemote._pc.getTransceivers()[1].receiver.track;
const vOriginal = test.pcRemote.remoteMediaElements.find(
elem => elem.id.includes(originalTrack.id));
const vAdded = test.pcRemote.remoteMediaElements.find(
elem => elem.id.includes(track.id));
ok(vOriginal.ended, "Original video element should have ended");
return helper.checkVideoPlaying(vAdded);
},
function PC_REMOTE_CHECK_REMOVED_TRACK(test) {
is(test.pcRemote._pc.getTransceivers().length, 2,
"pcRemote should have two transceivers");
const track = test.pcRemote._pc.getTransceivers()[0].receiver.track;
const vAdded = test.pcRemote.remoteMediaElements.find(
elem => elem.id.includes(track.id));
return helper.checkVideoPaused(vAdded, 10, 10, 16, 5000);
}
]
);

View File

@ -13,6 +13,8 @@
});
runNetworkTest(function (options) {
options = options || { };
options.bundle = false;
const test = new PeerConnectionTest(options);
const helper = new VideoStreamHelper();
var originalTrack;
@ -33,23 +35,34 @@
function PC_LOCAL_ADD_VIDEO_TRACK(test) {
// Use fake:true here since the native fake device on linux doesn't
// change color as needed by checkVideoPlaying() below.
return test.pcLocal.getAllUserMedia([{video: true, fake: true}]);
return test.pcLocal.getAllUserMediaAndAddStreams([{video: true, fake: true}]);
},
function PC_LOCAL_EXPECT_ICE_CHECKING(test) {
test.pcLocal.expectIceChecking();
},
function PC_REMOTE_EXPECT_ICE_CHECKING(test) {
test.pcRemote.expectIceChecking();
},
],
[
function PC_REMOTE_CHECK_ADDED_TRACK(test) {
is(test.pcRemote._pc.getReceivers().length, 1,
"pcRemote should still have one receiver");
const track = test.pcRemote._pc.getReceivers()[0].track;
isnot(originalTrack.id, track.id, "Receiver should have changed");
is(test.pcRemote._pc.getTransceivers().length, 2,
"pcRemote should have two transceivers");
const track = test.pcRemote._pc.getTransceivers()[1].receiver.track;
const vOriginal = test.pcRemote.remoteMediaElements.find(
elem => elem.id.includes(originalTrack.id));
const vAdded = test.pcRemote.remoteMediaElements.find(
elem => elem.id.includes(track.id));
ok(vOriginal.ended, "Original video element should have ended");
return helper.checkVideoPlaying(vAdded);
},
function PC_REMOTE_CHECK_REMOVED_TRACK(test) {
is(test.pcRemote._pc.getTransceivers().length, 2,
"pcRemote should have two transceivers");
const track = test.pcRemote._pc.getTransceivers()[0].receiver.track;
const vAdded = test.pcRemote.remoteMediaElements.find(
elem => elem.id.includes(track.id));
return helper.checkVideoPaused(vAdded, 10, 10, 16, 5000);
},
]
);

View File

@ -2,6 +2,7 @@
<html>
<head>
<script type="application/javascript" src="pc.js"></script>
<script type="application/javascript" src="/tests/dom/canvas/test/captureStream_common.js"></script>
</head>
<body>
<pre id="test">
@ -36,12 +37,14 @@
],
[
function PC_REMOTE_CHECK_FLOW_STOPPED(test) {
is(test.pcRemote._pc.getReceivers().length, 0,
"pcRemote should have no more receivers");
is(receivedTrack.readyState, "ended",
"The received track should have ended");
is(element.ended, true,
"Element playing the removed track should have ended");
is(test.pcRemote._pc.getTransceivers().length, 1,
"pcRemote should have one transceiver");
const track = test.pcRemote._pc.getTransceivers()[0].receiver.track;
const vAdded = test.pcRemote.remoteMediaElements.find(
elem => elem.id.includes(track.id));
const helper = new VideoStreamHelper();
return helper.checkVideoPaused(vAdded, 10, 10, 16, 5000);
},
]
);

View File

@ -47,19 +47,21 @@
isnot(newTrack, sender.track, "replacing with a different track");
ok(!pc.getLocalStreams().some(s => s == newStream),
"from a different stream");
return sender.replaceTrack(newTrack);
// Use wrapper function, since it updates expected tracks
return wrapper.senderReplaceTrack(sender, newTrack, newStream);
})
.then(() => {
is(pc.getSenders().length, oldSenderCount, "same sender count");
is(sender.track, newTrack, "sender.track has been replaced");
ok(!pc.getSenders().map(sn => sn.track).some(t => t == oldTrack),
"old track not among senders");
ok(pc.getLocalStreams().some(s => s.getTracks()
// Spec does not say we add this new track to any stream
ok(!pc.getLocalStreams().some(s => s.getTracks()
.some(t => t == sender.track)),
"track exists among pc's local streams");
"track does not exist among pc's local streams");
return sender.replaceTrack(audiotrack)
.then(() => ok(false, "replacing with different kind should fail"),
e => is(e.name, "IncompatibleMediaStreamTrackError",
e => is(e.name, "TypeError",
"replacing with different kind should fail"));
});
}
@ -130,15 +132,18 @@
sourceNode.connect(destNode);
var newTrack = destNode.stream.getAudioTracks()[0];
return sender.replaceTrack(newTrack)
return test.pcLocal.senderReplaceTrack(
sender, newTrack, destNode.stream)
.then(() => {
is(pc.getSenders().length, oldSenderCount, "same sender count");
ok(!pc.getSenders().some(sn => sn.track == oldTrack),
"Replaced track should be removed from senders");
ok(allLocalStreamsHaveSender(pc),
"Shouldn't have any streams without a corresponding sender");
// TODO: Should PC remove local streams when there are no senders
// associated with it? getLocalStreams() isn't in the spec anymore,
// so I guess it is pretty arbitrary?
is(sender.track, newTrack, "sender.track has been replaced");
ok(pc.getLocalStreams().some(s => s.getTracks()
// Spec does not say we add this new track to any stream
ok(!pc.getLocalStreams().some(s => s.getTracks()
.some(t => t == sender.track)),
"track exists among pc's local streams");
});
@ -151,22 +156,27 @@
]);
test.chain.append([
function PC_LOCAL_INVALID_ADD_VIDEOTRACKS(test) {
var stream = test.pcLocal._pc.getLocalStreams()[0];
var track = stream.getVideoTracks()[0];
try {
test.pcLocal._pc.addTrack(track, stream);
ok(false, "addTrack existing track should fail");
} catch (e) {
is(e.name, "InvalidParameterError",
"addTrack existing track should fail");
}
try {
test.pcLocal._pc.addTrack(track, stream);
ok(false, "addTrack existing track should fail");
} catch (e) {
is(e.name, "InvalidParameterError",
"addTrack existing track should fail");
}
let videoTransceivers = test.pcLocal._pc.getTransceivers()
.filter(transceiver => {
return !transceiver.stopped &&
transceiver.receiver.track.kind == "video" &&
transceiver.sender.track;
});
ok(videoTransceivers.length,
"There is at least one non-stopped video transceiver with a track.");
videoTransceivers.forEach(transceiver => {
var stream = test.pcLocal._pc.getLocalStreams()[0];;
var track = transceiver.sender.track;
try {
test.pcLocal._pc.addTrack(track, stream);
ok(false, "addTrack existing track should fail");
} catch (e) {
is(e.name, "InvalidAccessError",
"addTrack existing track should fail");
}
});
}
]);
test.run();

View File

@ -36,50 +36,28 @@
emitter2.start();
const newstream = emitter2.stream();
const newtrack = newstream.getVideoTracks()[0];
return test.pcLocal.senderReplaceTrack(0, newtrack, newstream.id)
var sender = test.pcLocal._pc.getSenders()[0];
return test.pcLocal.senderReplaceTrack(sender, newtrack, newstream)
.then(() => {
test.setMediaConstraints([{video: true}, {video: true}],
[{video: true}]);
// Use fake:true here since the native fake device on linux
// doesn't change color as needed by checkVideoPlaying() below.
return test.pcLocal.getAllUserMedia([{video: true, fake: true}]);
});
},
],
[
function PC_REMOTE_CHECK_ORIGINAL_TRACK_ENDED(test) {
function PC_REMOTE_CHECK_ORIGINAL_TRACK_NOT_ENDED(test) {
is(test.pcRemote._pc.getTransceivers().length, 1,
"pcRemote should have one transceiver");
const track = test.pcRemote._pc.getTransceivers()[0].receiver.track;
const vremote = test.pcRemote.remoteMediaElements.find(
elem => elem.id.includes(emitter1.stream().getTracks()[0].id));
if (!vremote) {
return Promise.reject(new Error("Couldn't find video element"));
}
ok(vremote.ended, "Original track should have ended after renegotiation");
},
function PC_REMOTE_CHECK_REPLACED_TRACK_FLOW(test) {
const vremote = test.pcRemote.remoteMediaElements.find(
elem => elem.id.includes(test.pcLocal._pc.getSenders()[0].track.id));
if (!vremote) {
return Promise.reject(new Error("Couldn't find video element"));
}
return addFinallyToPromise(helper.checkVideoPlaying(vremote))
.finally(() => emitter2.stop())
.then(() => {
const px = helper._helper.getPixel(vremote, 10, 10);
const isBlue = helper._helper.isPixel(
px, CaptureStreamTestHelper.prototype.blue, 5);
const isGrey = helper._helper.isPixel(
px, CaptureStreamTestHelper.prototype.grey, 5);
ok(isBlue || isGrey, "replaced track should be blue or grey");
});
},
function PC_REMOTE_CHECK_ADDED_TRACK_FLOW(test) {
const vremote = test.pcRemote.remoteMediaElements.find(
elem => elem.id.includes(test.pcLocal._pc.getSenders()[1].track.id));
elem => elem.id.includes(track.id));
if (!vremote) {
return Promise.reject(new Error("Couldn't find video element"));
}
ok(!vremote.ended, "Original track should not have ended after renegotiation (replaceTrack is not signalled!)");
return helper.checkVideoPlaying(vremote);
},
}
]
);

View File

@ -18,7 +18,7 @@
f().then(() => ok(false, msg),
e => is(e.name, reason, msg));
function testScale(codec) {
async function testScale(codec) {
var pc1 = new RTCPeerConnection();
var pc2 = new RTCPeerConnection();
@ -28,53 +28,53 @@
info("testing scaling with " + codec);
pc1.onnegotiationneeded = e =>
pc1.createOffer()
.then(d => pc1.setLocalDescription(codec == "VP8"
? d
: (d.sdp = sdputils.removeAllButPayloadType(d.sdp, 126), d)))
.then(() => pc2.setRemoteDescription(pc1.localDescription))
.then(() => pc2.createAnswer()).then(d => pc2.setLocalDescription(d))
.then(() => pc1.setRemoteDescription(pc2.localDescription))
.catch(generateErrorCallback());
let stream = await navigator.mediaDevices.getUserMedia({ video: true });
return navigator.mediaDevices.getUserMedia({ video: true })
.then(stream => {
var v1 = createMediaElement('video', 'v1');
var v2 = createMediaElement('video', 'v2');
var v1 = createMediaElement('video', 'v1');
var v2 = createMediaElement('video', 'v2');
is(v2.currentTime, 0, "v2.currentTime is zero at outset");
var ontrackfired = new Promise(resolve => pc2.ontrack = e => resolve(e));
var v2loadedmetadata = new Promise(resolve => v2.onloadedmetadata = resolve);
v1.srcObject = stream;
var sender = pc1.addTrack(stream.getVideoTracks()[0], stream);
is(v2.currentTime, 0, "v2.currentTime is zero at outset");
return mustRejectWith("Invalid scaleResolutionDownBy must reject", "RangeError",
() => sender.setParameters({ encodings:
[{ scaleResolutionDownBy: 0.5 } ] }))
.then(() => sender.setParameters({ encodings: [{ maxBitrate: 60000,
scaleResolutionDownBy: 2 }] }))
.then(() => new Promise(resolve => pc2.ontrack = e => resolve(e)))
.then(e => v2.srcObject = e.streams[0])
.then(() => new Promise(resolve => v2.onloadedmetadata = resolve))
.then(() => waitUntil(() => v2.currentTime > 0 && v2.srcObject.currentTime > 0))
.then(() => ok(v2.currentTime > 0, "v2.currentTime is moving (" + v2.currentTime + ")"))
.then(() => wait(3000)) // TODO: Bug 1248154
.then(() => {
ok(v1.videoWidth > 0, "source width is positive");
ok(v1.videoHeight > 0, "source height is positive");
if (v2.videoWidth == 640 && v2.videoHeight == 480) { // TODO: Bug 1248154
info("Skipping test due to Bug 1248154");
} else {
is(v2.videoWidth, v1.videoWidth / 2, "sink is half the width of source");
is(v2.videoHeight, v1.videoHeight / 2, "sink is half the height of source");
}
})
.then(() => {
stream.getTracks().forEach(track => track.stop());
v1.srcObject = v2.srcObject = null;
})
})
.catch(generateErrorCallback());
v1.srcObject = stream;
var sender = pc1.addTrack(stream.getVideoTracks()[0], stream);
await mustRejectWith(
"Invalid scaleResolutionDownBy must reject", "RangeError",
() => sender.setParameters(
{ encodings:[{ scaleResolutionDownBy: 0.5 } ] })
);
await sender.setParameters({ encodings: [{ maxBitrate: 60000,
scaleResolutionDownBy: 2 }] });
let offer = await pc1.createOffer();
if (codec == "VP8") {
offer.sdp = sdputils.removeAllButPayloadType(offer.sdp, 126);
}
await pc1.setLocalDescription(offer);
await pc2.setRemoteDescription(pc1.localDescription);
let answer = await pc2.createAnswer();
await pc2.setLocalDescription(answer);
await pc1.setRemoteDescription(pc2.localDescription);
let trackevent = await ontrackfired;
v2.srcObject = trackevent.streams[0];
await v2loadedmetadata;
await waitUntil(() => v2.currentTime > 0 && v2.srcObject.currentTime > 0);
ok(v2.currentTime > 0, "v2.currentTime is moving (" + v2.currentTime + ")");
ok(v1.videoWidth > 0, "source width is positive");
ok(v1.videoHeight > 0, "source height is positive");
is(v2.videoWidth, v1.videoWidth / 2, "sink is half the width of source");
is(v2.videoHeight, v1.videoHeight / 2, "sink is half the height of source");
stream.getTracks().forEach(track => track.stop());
v1.srcObject = v2.srcObject = null;
}
pushPrefs(['media.peerconnection.video.lock_scaling', true]).then(() => {

View File

@ -17,10 +17,11 @@ function parameterstest(pc) {
var sender = pc.getSenders()[0];
var testParameters = (params, errorName, errorMsg) => {
info("Trying to set " + JSON.stringify(params));
var validateParameters = (a, b) => {
var validateEncoding = (a, b) => {
is(a.rid, b.rid || "", "same rid");
is(a.rid, b.rid, "same rid");
is(a.maxBitrate, b.maxBitrate, "same maxBitrate");
is(a.scaleResolutionDownBy, b.scaleResolutionDownBy,
"same scaleResolutionDownBy");

File diff suppressed because it is too large Load Diff

View File

@ -19,12 +19,6 @@
test._local_offer.sdp = test._local_offer.sdp.replace(
/a=msid:[^\s]*/g,
"a=msid:foo");
},
function PC_REMOTE_OVERRIDE_EXPECTED_STREAM_IDS(test) {
Object.keys(
test.pcRemote.expectedRemoteTrackInfoById).forEach(trackId => {
test.pcRemote.expectedRemoteTrackInfoById[trackId].streamId = "foo";
});
}
]);
test.chain.insertAfter("PC_LOCAL_GET_ANSWER", [
@ -32,12 +26,6 @@
test._remote_answer.sdp = test._remote_answer.sdp.replace(
/a=msid:[^\s]*/g,
"a=msid:foo");
},
function PC_LOCAL_OVERRIDE_EXPECTED_STREAM_IDS(test) {
Object.keys(
test.pcLocal.expectedRemoteTrackInfoById).forEach(trackId => {
test.pcLocal.expectedRemoteTrackInfoById[trackId].streamId = "foo";
});
}
]);
test.setMediaConstraints([{audio: true}, {audio: true}],

View File

@ -19,12 +19,6 @@
test._local_offer.sdp = test._local_offer.sdp.replace(
/a=msid:[^\s]*/g,
"a=msid:foo");
},
function PC_REMOTE_OVERRIDE_EXPECTED_STREAM_IDS(test) {
Object.keys(
test.pcRemote.expectedRemoteTrackInfoById).forEach(trackId => {
test.pcRemote.expectedRemoteTrackInfoById[trackId].streamId = "foo";
});
}
]);
test.chain.insertAfter("PC_LOCAL_GET_ANSWER", [
@ -32,12 +26,6 @@
test._remote_answer.sdp = test._remote_answer.sdp.replace(
/a=msid:[^\s]*/g,
"a=msid:foo");
},
function PC_LOCAL_OVERRIDE_EXPECTED_STREAM_IDS(test) {
Object.keys(
test.pcLocal.expectedRemoteTrackInfoById).forEach(trackId => {
test.pcLocal.expectedRemoteTrackInfoById[trackId].streamId = "foo";
});
}
]);
test.setMediaConstraints([{video: true}, {video: true}],

View File

@ -48,7 +48,7 @@
function PC_LOCAL_ADD_SECOND_STREAM(test) {
test.setMediaConstraints([{audio: true}],
[]);
return test.pcLocal.getAllUserMedia([{audio: true}]);
return test.pcLocal.getAllUserMediaAndAddStreams([{audio: true}]);
},
]
);

View File

@ -78,7 +78,7 @@ runNetworkTest(() => {
h2.drawColor(canvas2, h2.blue);
stream2 = canvas2.captureStream(0);
// can't use test.pcLocal.getAllUserMedia([{video: true}]);
// can't use test.pcLocal.getAllUserMediaAndAddStreams([{video: true}]);
// because it doesn't let us substitute the capture stream
test.pcLocal.attachLocalStream(stream2);
}

View File

@ -814,6 +814,8 @@ var interfaceNamesInGlobalScope =
"RTCRtpReceiver",
// IMPORTANT: Do not change this list without review from a DOM peer!
"RTCRtpSender",
// IMPORTANT: Do not change this list without review from a DOM peer!
"RTCRtpTransceiver",
// IMPORTANT: Do not change this list without review from a DOM peer!
"RTCSessionDescription",
// IMPORTANT: Do not change this list without review from a DOM peer!

View File

@ -43,4 +43,9 @@ interface MediaStream : EventTarget {
[ChromeOnly, Throws]
static Promise<long> countUnderlyingStreams();
// Webrtc allows the remote side to name a stream whatever it wants, and we
// need to surface this to content.
[ChromeOnly]
void assignId(DOMString id);
};

View File

@ -1,11 +0,0 @@
/* -*- Mode: IDL; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/.
*/
[ChromeOnly]
interface MediaStreamList {
getter MediaStream? (unsigned long index);
readonly attribute unsigned long length;
};

View File

@ -41,28 +41,24 @@ interface PeerConnectionImpl {
/* Adds the tracks created by GetUserMedia */
[Throws]
void addTrack(MediaStreamTrack track, MediaStream... streams);
[Throws]
void removeTrack(MediaStreamTrack track);
[Throws]
void insertDTMF(RTCRtpSender sender, DOMString tones,
TransceiverImpl createTransceiverImpl(DOMString kind,
MediaStreamTrack? track);
[Throws]
boolean checkNegotiationNeeded();
[Throws]
void insertDTMF(TransceiverImpl transceiver, DOMString tones,
optional unsigned long duration = 100,
optional unsigned long interToneGap = 70);
[Throws]
DOMString getDTMFToneBuffer(RTCRtpSender sender);
[Throws]
void replaceTrack(MediaStreamTrack thisTrack, MediaStreamTrack withTrack);
[Throws]
void setParameters(MediaStreamTrack track,
optional RTCRtpParameters parameters);
[Throws]
RTCRtpParameters getParameters(MediaStreamTrack track);
void replaceTrackNoRenegotiation(TransceiverImpl transceiverImpl,
MediaStreamTrack? withTrack);
[Throws]
void closeStreams();
sequence<MediaStream> getLocalStreams();
sequence<MediaStream> getRemoteStreams();
void addRIDExtension(MediaStreamTrack recvTrack, unsigned short extensionId);
void addRIDFilter(MediaStreamTrack recvTrack, DOMString rid);

View File

@ -23,16 +23,11 @@ interface PeerConnectionObserver
void onAddIceCandidateSuccess();
void onAddIceCandidateError(unsigned long name, DOMString message);
void onIceCandidate(unsigned short level, DOMString mid, DOMString candidate);
void onNegotiationNeeded();
/* Stats callbacks */
void onGetStatsSuccess(optional RTCStatsReportInternal report);
void onGetStatsError(unsigned long name, DOMString message);
/* replaceTrack callbacks */
void onReplaceTrackSuccess();
void onReplaceTrackError(unsigned long name, DOMString message);
/* Data channel callbacks */
void notifyDataChannel(DataChannel channel);
@ -40,15 +35,20 @@ interface PeerConnectionObserver
void onStateChange(PCObserverStateType state);
/* Changes to MediaStreamTracks */
void onAddStream(MediaStream stream);
void onRemoveStream(MediaStream stream);
void onAddTrack(MediaStreamTrack track, sequence<MediaStream> streams);
void onRemoveTrack(MediaStreamTrack track);
void onTrack(DOMString webrtcTrackId, sequence<DOMString> streamIds);
/* Transceiver management; called when setRemoteDescription causes a
transceiver to be created on the C++ side */
void onTransceiverNeeded(DOMString kind, TransceiverImpl transceiverImpl);
/* DTMF callback */
void onDTMFToneChange(DOMString trackId, DOMString tone);
void onDTMFToneChange(MediaStreamTrack track, DOMString tone);
/* Packet dump callback */
void onPacket(unsigned long level, mozPacketDumpType type, boolean sending,
ArrayBuffer packet);
/* Transceiver sync */
void syncTransceivers();
};

View File

@ -128,8 +128,12 @@ interface RTCPeerConnection : EventTarget {
MediaStream... moreStreams);
void removeTrack(RTCRtpSender sender);
RTCRtpTransceiver addTransceiver((MediaStreamTrack or DOMString) trackOrKind,
optional RTCRtpTransceiverInit init);
sequence<RTCRtpSender> getSenders();
sequence<RTCRtpReceiver> getReceivers();
sequence<RTCRtpTransceiver> getTransceivers();
[ChromeOnly]
void mozAddRIDExtension(RTCRtpReceiver receiver, unsigned short extensionId);

View File

@ -69,11 +69,20 @@ dictionary RTCRtpParameters {
[Pref="media.peerconnection.enabled",
JSImplementation="@mozilla.org/dom/rtpsender;1"]
interface RTCRtpSender {
readonly attribute MediaStreamTrack track;
readonly attribute MediaStreamTrack? track;
Promise<void> setParameters (optional RTCRtpParameters parameters);
RTCRtpParameters getParameters();
Promise<void> replaceTrack(MediaStreamTrack track);
Promise<void> replaceTrack(MediaStreamTrack? withTrack);
Promise<RTCStatsReport> getStats();
[Pref="media.peerconnection.dtmf.enabled"]
readonly attribute RTCDTMFSender? dtmf;
// Ugh, can't use a ChromeOnly attibute sequence<MediaStream>...
[ChromeOnly]
sequence<MediaStream> getStreams();
[ChromeOnly]
void setStreams(sequence<MediaStream> streams);
[ChromeOnly]
void setTrack(MediaStreamTrack? track);
[ChromeOnly]
void checkWasCreatedByPc(RTCPeerConnection pc);
};

View File

@ -0,0 +1,78 @@
/* -*- Mode: IDL; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/.
*
* The origin of this IDL file is
* http://w3c.github.io/webrtc-pc/#rtcrtptransceiver-interface
*/
enum RTCRtpTransceiverDirection {
"sendrecv",
"sendonly",
"recvonly",
"inactive"
};
dictionary RTCRtpTransceiverInit {
RTCRtpTransceiverDirection direction = "sendrecv";
sequence<MediaStream> streams = [];
// TODO: bug 1396918
// sequence<RTCRtpEncodingParameters> sendEncodings;
};
[Pref="media.peerconnection.enabled",
JSImplementation="@mozilla.org/dom/rtptransceiver;1"]
interface RTCRtpTransceiver {
readonly attribute DOMString? mid;
[SameObject]
readonly attribute RTCRtpSender sender;
[SameObject]
readonly attribute RTCRtpReceiver receiver;
readonly attribute boolean stopped;
attribute RTCRtpTransceiverDirection direction;
readonly attribute RTCRtpTransceiverDirection? currentDirection;
void stop();
// TODO: bug 1396922
// void setCodecPreferences(sequence<RTCRtpCodecCapability> codecs);
[ChromeOnly]
void setRemoteTrackId(DOMString trackId);
[ChromeOnly]
boolean remoteTrackIdIs(DOMString trackId);
// Mostly for testing
[Pref="media.peerconnection.remoteTrackId.enabled"]
DOMString getRemoteTrackId();
[ChromeOnly]
void setAddTrackMagic();
[ChromeOnly]
readonly attribute boolean addTrackMagic;
[ChromeOnly]
void setCurrentDirection(RTCRtpTransceiverDirection direction);
[ChromeOnly]
void setDirectionInternal(RTCRtpTransceiverDirection direction);
[ChromeOnly]
void setMid(DOMString mid);
[ChromeOnly]
void unsetMid();
[ChromeOnly]
void setStopped();
[ChromeOnly]
void remove();
[ChromeOnly]
DOMString getKind();
[ChromeOnly]
boolean hasBeenUsedToSend();
[ChromeOnly]
void sync();
[ChromeOnly]
void insertDTMF(DOMString tones,
optional unsigned long duration = 100,
optional unsigned long interToneGap = 70);
};

View File

@ -11,6 +11,7 @@ dictionary RTCTrackEventInit : EventInit {
required RTCRtpReceiver receiver;
required MediaStreamTrack track;
sequence<MediaStream> streams = [];
required RTCRtpTransceiver transceiver;
};
[Pref="media.peerconnection.enabled",
@ -24,4 +25,5 @@ interface RTCTrackEvent : Event {
[Frozen, Cached, Pure]
readonly attribute sequence<MediaStream> streams; // workaround
readonly attribute RTCRtpTransceiver transceiver;
};

View File

@ -0,0 +1,23 @@
/* -*- Mode: IDL; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/.
*
* PeerConnection.js' interface to the C++ TransceiverImpl.
*
* Do not confuse with RTCRtpTransceiver. This interface is purely for
* communication between the PeerConnection JS DOM binding and the C++
* implementation.
*
* See media/webrtc/signaling/src/peerconnection/TransceiverImpl.h
*
*/
// Constructed by PeerConnectionImpl::CreateTransceiverImpl.
[ChromeOnly]
interface TransceiverImpl {
MediaStreamTrack getReceiveTrack();
[Throws]
void syncWithJS(RTCRtpTransceiver transceiver);
};

View File

@ -202,9 +202,6 @@ with Files("MediaKey*"):
with Files("Media*List*"):
BUG_COMPONENT = ("Core", "CSS Parsing and Computation")
with Files("MediaStreamList.webidl"):
BUG_COMPONENT = ("Core", "Web Audio")
with Files("*Record*"):
BUG_COMPONENT = ("Core", "Audio/Video: Recording")
@ -985,7 +982,6 @@ WEBIDL_FILES = [
if CONFIG['MOZ_WEBRTC']:
WEBIDL_FILES += [
'DataChannel.webidl',
'MediaStreamList.webidl',
'PeerConnectionImpl.webidl',
'PeerConnectionImplEnums.webidl',
'PeerConnectionObserver.webidl',
@ -1000,7 +996,9 @@ if CONFIG['MOZ_WEBRTC']:
'RTCPeerConnectionStatic.webidl',
'RTCRtpReceiver.webidl',
'RTCRtpSender.webidl',
'RTCRtpTransceiver.webidl',
'RTCSessionDescription.webidl',
'TransceiverImpl.webidl',
'WebrtcDeprecated.webidl',
'WebrtcGlobalInformation.webidl',
]

View File

@ -596,6 +596,16 @@ impl SourceTextureResolver {
}
}
fn begin_frame(&self) {
assert!(self.cache_rgba8_texture.is_none());
assert!(self.cache_a8_texture.is_none());
}
fn end_frame(&mut self, pool: &mut Vec<Texture>) {
// return the cached targets to the pool
self.end_pass(None, None, pool)
}
fn end_pass(
&mut self,
a8_texture: Option<Texture>,
@ -3697,6 +3707,7 @@ impl Renderer {
self.device.set_blend(false);
self.bind_frame_data(frame);
self.texture_resolver.begin_frame();
for (pass_index, pass) in frame.passes.iter_mut().enumerate() {
self.gpu_profile.place_marker(&format!("pass {}", pass_index));
@ -3813,6 +3824,7 @@ impl Renderer {
}
}
self.texture_resolver.end_frame(&mut self.render_target_pool);
self.draw_render_target_debug(framebuffer_size);
self.draw_texture_cache_debug(framebuffer_size);

View File

@ -180,7 +180,7 @@ URLPreloader::GetCacheFile(const nsAString& suffix)
return Move(cacheFile);
}
static const uint8_t URL_MAGIC[] = "mozURLcachev001";
static const uint8_t URL_MAGIC[] = "mozURLcachev002";
Result<nsCOMPtr<nsIFile>, nsresult>
URLPreloader::FindCacheFile()
@ -515,13 +515,6 @@ URLPreloader::ReadFile(nsIFile* file, ReadType readType)
return Read(CacheKey(file), readType);
}
/* static */ Result<const nsCString, nsresult>
URLPreloader::ReadFile(const nsACString& path, ReadType readType)
{
CacheKey key(CacheKey::TypeFile, path);
return Read(key, readType);
}
/* static */ Result<const nsCString, nsresult>
URLPreloader::Read(FileLocation& location, ReadType readType)
{
@ -604,10 +597,10 @@ URLPreloader::ResolveURI(nsIURI* uri)
nsCOMPtr<nsIFile> file;
MOZ_TRY(fileURL->GetFile(getter_AddRefs(file)));
nsCString path;
MOZ_TRY(file->GetNativePath(path));
nsString path;
MOZ_TRY(file->GetPath(path));
return CacheKey(CacheKey::TypeFile, path);
return CacheKey(CacheKey::TypeFile, NS_ConvertUTF16toUTF8(path));
}
// Not a file or Omnijar URI, so currently unsupported.
@ -628,7 +621,8 @@ URLPreloader::CacheKey::ToFileLocation()
{
if (mType == TypeFile) {
nsCOMPtr<nsIFile> file;
MOZ_TRY(NS_NewNativeLocalFile(mPath, false, getter_AddRefs(file)));
MOZ_TRY(NS_NewLocalFile(NS_ConvertUTF8toUTF16(mPath), false,
getter_AddRefs(file)));
return Move(FileLocation(file));
}

View File

@ -77,8 +77,6 @@ public:
static Result<const nsCString, nsresult> ReadFile(nsIFile* file, ReadType readType = Forget);
static Result<const nsCString, nsresult> ReadFile(const nsACString& path, ReadType readType = Forget);
static Result<const nsCString, nsresult> ReadZip(nsZipArchive* archive,
const nsACString& path,
ReadType readType = Forget);
@ -159,7 +157,9 @@ private:
explicit CacheKey(nsIFile* file)
: mType(TypeFile)
{
MOZ_ALWAYS_SUCCEEDS(file->GetNativePath(mPath));
nsString path;
MOZ_ALWAYS_SUCCEEDS(file->GetPath(path));
CopyUTF16toUTF8(path, mPath);
}
explicit inline CacheKey(InputBuffer& buffer);

Some files were not shown because too many files have changed in this diff Show More