Bug 1208316 - Test media flow per track instead of per stream. r=drno

The existing media elements may have ended and new tracks been created in the
stream it used as source, since we now end when a stream goes inactive.

We test the tracks individually in media elements instead. They'll either be
live or ended forever.

MozReview-Commit-ID: DxBk2XjSVCH

--HG--
extra : rebase_source : cd3884f4d1b90cde5095e3dc3eb5a43363cf7195
This commit is contained in:
Andreas Pehrson 2016-09-14 17:15:24 +02:00
parent bffd28794f
commit 0b82737563
7 changed files with 97 additions and 85 deletions

View File

@ -233,37 +233,17 @@ function realCreateHTML(meta) {
document.body.appendChild(content);
}
function getMediaElement(label, direction, streamId) {
var id = label + '_' + direction + '_' + streamId;
return document.getElementById(id);
}
/**
* Create the HTML element if it doesn't exist yet and attach
* it to the content node.
* Creates an element of the given type, assigns the given id, sets the controls
* and autoplay attributes and adds it to the content node.
*
* @param {string} label
* Prefix to use for the element
* @param {direction} "local" or "remote"
* @param {stream} A MediaStream id.
* @param {audioOnly} Use <audio> element instead of <video>
* @return {HTMLMediaElement} The created HTML media element
* @param {string} type
* Defining if we should create an "audio" or "video" element
* @param {string} id
* A string to use as the element id.
*/
function createMediaElement(label, direction, streamId, audioOnly) {
var id = label + '_' + direction + '_' + streamId;
var element = document.getElementById(id);
// Sanity check that we haven't created the element already
if (element) {
return element;
}
if (!audioOnly) {
// Even if this is just audio now, we might add video later.
element = document.createElement('video');
} else {
element = document.createElement('audio');
}
function createMediaElement(type, id) {
const element = document.createElement(type);
element.setAttribute('id', id);
element.setAttribute('height', 100);
element.setAttribute('width', 150);
@ -274,6 +254,37 @@ function createMediaElement(label, direction, streamId, audioOnly) {
return element;
}
/**
* Returns an existing element for the given track with the given idPrefix,
* as it was added by createMediaElementForTrack().
*
* @param {MediaStreamTrack} track
* Track used as the element's source.
* @param {string} idPrefix
* A string to use as the element id. The track id will also be appended.
*/
function getMediaElementForTrack(track, idPrefix) {
return document.getElementById(idPrefix + '_' + track.id);
}
/**
* Create a media element with a track as source and attach it to the content
* node.
*
* @param {MediaStreamTrack} track
* Track for use as source.
* @param {string} idPrefix
* A string to use as the element id. The track id will also be appended.
* @return {HTMLMediaElement} The created HTML media element
*/
function createMediaElementForTrack(track, idPrefix) {
const id = idPrefix + '_' + track.id;
const element = createMediaElement(track.kind, id);
element.srcObject = new MediaStream([track]);
return element;
}
/**
* Wrapper function for mediaDevices.getUserMedia used by some tests. Whether

View File

@ -862,12 +862,12 @@ PeerConnectionWrapper.prototype = {
this._pc.setIdentityProvider(provider, protocol, identity);
},
ensureMediaElement : function(track, stream, direction) {
var element = getMediaElement(this.label, direction, stream.id);
ensureMediaElement : function(track, direction) {
const idPrefix = [this.label, direction].join('_');
var element = getMediaElementForTrack(track, idPrefix);
if (!element) {
element = createMediaElement(this.label, direction, stream.id,
this.audioElementsOnly);
element = createMediaElementForTrack(track, idPrefix);
if (direction == "local") {
this.localMediaElements.push(element);
} else if (direction == "remote") {
@ -878,7 +878,7 @@ PeerConnectionWrapper.prototype = {
// We do this regardless, because sometimes we end up with a new stream with
// an old id (ie; the rollback tests cause the same stream to be added
// twice)
element.srcObject = stream;
element.srcObject = new MediaStream([track]);
element.play();
},
@ -912,14 +912,14 @@ PeerConnectionWrapper.prototype = {
// This will create one media element per track, which might not be how
// we set up things with the RTCPeerConnection. It's the only way
// we can ensure all sent tracks are flowing however.
this.ensureMediaElement(track, new MediaStream([track]), "local");
this.ensureMediaElement(track, "local");
return this.observedNegotiationNeeded;
},
/**
* Callback when we get local media. Also an appropriate HTML media element
* will be created, which may be obtained later with |getMediaElement|.
* will be created and added to the content node.
*
* @param {MediaStream} stream
* Media stream to handle
@ -950,7 +950,7 @@ PeerConnectionWrapper.prototype = {
type: track.kind,
streamId: stream.id
};
this.ensureMediaElement(track, stream, "local");
this.ensureMediaElement(track, "local");
});
},
@ -1181,7 +1181,7 @@ PeerConnectionWrapper.prototype = {
this.observedRemoteTrackInfoById);
ok(this.isTrackOnPC(event.track), "Found track " + event.track.id);
this.ensureMediaElement(event.track, event.streams[0], 'remote');
this.ensureMediaElement(event.track, 'remote');
});
},
@ -1368,45 +1368,42 @@ PeerConnectionWrapper.prototype = {
},
/**
* Check that media flow is present on the given media element by waiting for
* it to reach ready state HAVE_ENOUGH_DATA and progress time further than
* the start of the check.
* Check that media flow is present for the given media element by checking
* that it reaches ready state HAVE_ENOUGH_DATA and progresses time further
* than the start of the check.
*
* This ensures, that the stream being played is producing
* data and that at least one video frame has been displayed.
* data and, in case it contains a video track, that at least one video frame
* has been displayed.
*
* @param {object} element
* A media element to wait for data flow on.
* @param {HTMLMediaElement} track
* The media element to check
* @returns {Promise}
* A promise that resolves when media is flowing.
* A promise that resolves when media data is flowing.
*/
waitForMediaElementFlow : function(element) {
return new Promise(resolve => {
info("Checking data flow to element: " + element.id);
if (element.ended && element.readyState >= element.HAVE_CURRENT_DATA) {
resolve();
return;
}
var haveEnoughData = false;
var oncanplay = () => {
info("Element " + element.id + " saw 'canplay', " +
"meaning HAVE_ENOUGH_DATA was just reached.");
haveEnoughData = true;
element.removeEventListener("canplay", oncanplay);
};
var ontimeupdate = () => {
info("Element " + element.id + " saw 'timeupdate'" +
", currentTime=" + element.currentTime +
"s, readyState=" + element.readyState);
if (haveEnoughData || element.readyState == element.HAVE_ENOUGH_DATA) {
element.removeEventListener("timeupdate", ontimeupdate);
ok(true, "Media flowing for element: " + element.id);
resolve();
}
};
element.addEventListener("canplay", oncanplay);
element.addEventListener("timeupdate", ontimeupdate);
});
info("Checking data flow for element: " + element.id);
is(element.ended, !element.srcObject.active,
"Element ended should be the inverse of the MediaStream's active state");
if (element.ended) {
is(element.readyState, element.HAVE_CURRENT_DATA,
"Element " + element.id + " is ended and should have had data");
return Promise.resolve();
}
const haveEnoughData = (element.readyState == element.HAVE_ENOUGH_DATA ?
Promise.resolve() :
haveEvent(element, "canplay", wait(60000,
new Error("Timeout for element " + element.id))))
.then(_ => info("Element " + element.id + " has enough data."));
const startTime = element.currentTime;
const timeProgressed = timeout(
listenUntil(element, "timeupdate", _ => element.currentTime > startTime),
60000, "Element " + element.id + " should progress currentTime")
.then();
return Promise.all([haveEnoughData, timeProgressed]);
},
/**

View File

@ -19,7 +19,7 @@ var gUMAudioElement;
var analyser;
runTest(() => getUserMedia({audio: true})
.then(stream => {
gUMAudioElement = createMediaElement("gUMAudio", "local", "gUMAudio", true);
gUMAudioElement = createMediaElement("audio", "gUMAudio");
gUMAudioElement.srcObject = stream;
audioContext = new AudioContext();

View File

@ -26,7 +26,7 @@ runTest(() => getUserMedia({audio: true, video: true})
// We need to test with multiple tracks. We add an extra of each kind.
stream.getTracks().forEach(t => stream.addTrack(t.clone()));
audioElement = createMediaElement("gUMAudio", "local", "gUMAudio", true);
audioElement = createMediaElement("audio", "gUMAudio");
audioElement.srcObject = stream;
return haveEvent(audioElement, "loadedmetadata", wait(50000, new Error("Timeout")));
@ -43,7 +43,7 @@ runTest(() => getUserMedia({audio: true, video: true})
return haveNoEvent(audioCaptureStream, "addtrack");
})
.then(() => {
videoElement = createMediaElement("gUMVideo", "local", "gUMVideo", false);
videoElement = createMediaElement("video", "gUMVideo");
info("Capturing video element (captureStream -> loadedmetadata)");
videoCaptureStream = videoElement.mozCaptureStream();
@ -143,7 +143,7 @@ runTest(() => getUserMedia({audio: true, video: true})
info("Testing CaptureStreamUntilEnded");
untilEndedElement =
createMediaElement("gUMVideoUntilEnded", "local", "gUMVideoUntilEnded", false);
createMediaElement("video", "gUMVideoUntilEnded");
untilEndedElement.srcObject = audioElement.srcObject;
return haveEvent(untilEndedElement, "loadedmetadata",

View File

@ -62,13 +62,13 @@ var checkVideoPaused = video => checkHasFrame(video)
runTest(() => getUserMedia({video: true, fake: true})
.then(stream => {
gUMVideoElement =
createMediaElement("gUMVideo", "local", "gUMVideo", false);
createMediaElement("video", "gUMVideo");
gUMVideoElement.srcObject = stream;
gUMVideoElement.play();
info("Capturing");
captureStreamElement =
createMediaElement("captureStream", "local", "captureStream", false);
createMediaElement("video", "captureStream");
captureStreamElement.srcObject = gUMVideoElement.mozCaptureStream();
captureStreamElement.play();

View File

@ -20,9 +20,9 @@ runNetworkTest(() => {
test.setMediaConstraints([{audio: true, video: true, fake: true}], []);
test.chain.append([
function CHECK_ASSUMPTIONS() {
is(test.pcLocal.localMediaElements.length, 1,
is(test.pcLocal.localMediaElements.length, 2,
"pcLocal should have one media element");
is(test.pcRemote.remoteMediaElements.length, 1,
is(test.pcRemote.remoteMediaElements.length, 2,
"pcRemote should have one media element");
is(test.pcLocal._pc.getLocalStreams().length, 1,
"pcLocal should have one stream");
@ -31,8 +31,10 @@ runNetworkTest(() => {
},
function CHECK_VIDEO() {
var h = new CaptureStreamTestHelper2D();
var localVideo = test.pcLocal.localMediaElements[0];
var remoteVideo = test.pcRemote.remoteMediaElements[0];
var localVideo = test.pcLocal.localMediaElements
.find(e => e instanceof HTMLVideoElement);
var remoteVideo = test.pcRemote.remoteMediaElements
.find(e => e instanceof HTMLVideoElement);
// We check a pixel somewhere away from the top left corner since
// MediaEngineDefault puts semi-transparent time indicators there.
const offsetX = 50;

View File

@ -26,7 +26,7 @@ runNetworkTest(() => {
return getUserMedia(test.pcLocal.constraints[0]).then(stream => {
originalStream = stream;
localVideoOriginal =
createMediaElement("audiovideo", "local-original");
createMediaElement("video", "local-original");
localVideoOriginal.srcObject = stream;
test.pcLocal.attachLocalStream(originalStream.clone());
});
@ -34,9 +34,9 @@ runNetworkTest(() => {
]);
test.chain.append([
function CHECK_ASSUMPTIONS() {
is(test.pcLocal.localMediaElements.length, 1,
is(test.pcLocal.localMediaElements.length, 2,
"pcLocal should have one media element");
is(test.pcRemote.remoteMediaElements.length, 1,
is(test.pcRemote.remoteMediaElements.length, 2,
"pcRemote should have one media element");
is(test.pcLocal._pc.getLocalStreams().length, 1,
"pcLocal should have one stream");
@ -46,8 +46,10 @@ runNetworkTest(() => {
function CHECK_VIDEO() {
info("Checking video");
var h = new CaptureStreamTestHelper2D();
var localVideoClone = test.pcLocal.localMediaElements[0];
var remoteVideoClone = test.pcRemote.remoteMediaElements[0];
var localVideoClone = test.pcLocal.localMediaElements
.find(e => e instanceof HTMLVideoElement);
var remoteVideoClone = test.pcRemote.remoteMediaElements
.find(e => e instanceof HTMLVideoElement);
// We check a pixel somewhere away from the top left corner since
// MediaEngineDefault puts semi-transparent time indicators there.