mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-12-01 17:23:59 +00:00
Bug 1296531 - Let waitForPixel and friends take a cancelPromise. r=jib
MozReview-Commit-ID: 42hT181wkvq --HG-- extra : rebase_source : 430d7ea6156bddf98893c4b8b3d0db70c3c54831
This commit is contained in:
parent
b99abdab75
commit
78a302848f
@ -62,12 +62,7 @@ CaptureStreamTestHelper.prototype = {
|
||||
* optional scaling of the drawImage() call (so that a 1x1 black image
|
||||
* won't just draw 1 pixel in the corner)
|
||||
*/
|
||||
getPixel: function (video, offsetX, offsetY, width, height) {
|
||||
offsetX = offsetX || 0; // Set to 0 if not passed in.
|
||||
offsetY = offsetY || 0; // Set to 0 if not passed in.
|
||||
width = width || 0; // Set to 0 if not passed in.
|
||||
height = height || 0; // Set to 0 if not passed in.
|
||||
|
||||
getPixel: function (video, offsetX = 0, offsetY = 0, width = 0, height = 0) {
|
||||
// Avoids old values in case of a transparent image.
|
||||
CaptureStreamTestHelper2D.prototype.clear.call(this, this.cout);
|
||||
|
||||
@ -84,9 +79,10 @@ CaptureStreamTestHelper.prototype = {
|
||||
* Returns true if px lies within the per-channel |threshold| of the
|
||||
* referenced color for all channels. px is on the form of an array of color
|
||||
* channels, [R,G,B,A]. Each channel is in the range [0, 255].
|
||||
*
|
||||
* Threshold defaults to 0 which is an exact match.
|
||||
*/
|
||||
isPixel: function (px, refColor, threshold) {
|
||||
threshold = threshold || 0; // Default to 0 (exact match) if not passed in.
|
||||
isPixel: function (px, refColor, threshold = 0) {
|
||||
return px.every((ch, i) => Math.abs(ch - refColor.data[i]) <= threshold);
|
||||
},
|
||||
|
||||
@ -94,12 +90,10 @@ CaptureStreamTestHelper.prototype = {
|
||||
* Returns true if px lies further away than |threshold| of the
|
||||
* referenced color for any channel. px is on the form of an array of color
|
||||
* channels, [R,G,B,A]. Each channel is in the range [0, 255].
|
||||
*
|
||||
* Threshold defaults to 127 which should be far enough for most cases.
|
||||
*/
|
||||
isPixelNot: function (px, refColor, threshold) {
|
||||
if (threshold === undefined) {
|
||||
// Default to 127 (should be sufficiently far away) if not passed in.
|
||||
threshold = 127;
|
||||
}
|
||||
isPixelNot: function (px, refColor, threshold = 127) {
|
||||
return px.some((ch, i) => Math.abs(ch - refColor.data[i]) > threshold);
|
||||
},
|
||||
|
||||
@ -113,65 +107,84 @@ CaptureStreamTestHelper.prototype = {
|
||||
|
||||
/*
|
||||
* Returns a promise that resolves when the provided function |test|
|
||||
* returns true.
|
||||
* returns true, or rejects when the optional `cancel` promise resolves.
|
||||
*/
|
||||
waitForPixel: function (video, offsetX, offsetY, test, timeout, width, height) {
|
||||
return new Promise(resolve => {
|
||||
const startTime = video.currentTime;
|
||||
var ontimeupdate = () => {
|
||||
var pixelMatch = false;
|
||||
try {
|
||||
pixelMatch = test(this.getPixel(video, offsetX, offsetY, width, height));
|
||||
} catch (e) {
|
||||
info("Waiting for pixel but no video available: " + e + "\n" + e.stack);
|
||||
}
|
||||
if (!pixelMatch &&
|
||||
(!timeout || video.currentTime < startTime + (timeout / 1000.0))) {
|
||||
// No match yet and,
|
||||
// No timeout (waiting indefinitely) or |timeout| has not passed yet.
|
||||
waitForPixel: async function (video, test, {
|
||||
offsetX = 0, offsetY = 0,
|
||||
width = 0, height = 0,
|
||||
cancel = new Promise(() => {}),
|
||||
} = {}) {
|
||||
let aborted = false;
|
||||
cancel.then(e => aborted = true);
|
||||
|
||||
while (true) {
|
||||
await Promise.race([
|
||||
new Promise(resolve => video.addEventListener("timeupdate", resolve, { once: true })),
|
||||
cancel,
|
||||
]);
|
||||
if (aborted) {
|
||||
throw await cancel;
|
||||
}
|
||||
try {
|
||||
if (test(this.getPixel(video, offsetX, offsetY, width, height))) {
|
||||
return;
|
||||
}
|
||||
video.removeEventListener("timeupdate", ontimeupdate);
|
||||
resolve(pixelMatch);
|
||||
};
|
||||
video.addEventListener("timeupdate", ontimeupdate);
|
||||
});
|
||||
} catch (e) {
|
||||
info("Waiting for pixel but no video available: " + e + "\n" + e.stack);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
/*
|
||||
* Returns a promise that resolves when the top left pixel of |video| matches
|
||||
* on all channels. Use |threshold| for fuzzy matching the color on each
|
||||
* channel, in the range [0,255].
|
||||
* channel, in the range [0,255]. 0 means exact match, 255 accepts anything.
|
||||
*/
|
||||
waitForPixelColor: function (video, refColor, threshold, infoString) {
|
||||
pixelMustBecome: async function (video, refColor, {
|
||||
threshold = 0, infoString = "n/a",
|
||||
cancel = new Promise(() => {}),
|
||||
} = {}) {
|
||||
info("Waiting for video " + video.id + " to match [" +
|
||||
refColor.data.join(',') + "] - " + refColor.name +
|
||||
" (" + infoString + ")");
|
||||
var paintedFrames = video.mozPaintedFrames-1;
|
||||
return this.waitForPixel(video, 0, 0,
|
||||
px => { if (paintedFrames != video.mozPaintedFrames) {
|
||||
info("Frame: " + video.mozPaintedFrames +
|
||||
" IsPixel ref=" + refColor.data +
|
||||
" threshold=" + threshold +
|
||||
" value=" + px);
|
||||
paintedFrames = video.mozPaintedFrames;
|
||||
}
|
||||
return this.isPixel(px, refColor, threshold); })
|
||||
.then(() => ok(true, video.id + " " + infoString));
|
||||
await this.waitForPixel(video, px => {
|
||||
if (paintedFrames != video.mozPaintedFrames) {
|
||||
info("Frame: " + video.mozPaintedFrames +
|
||||
" IsPixel ref=" + refColor.data +
|
||||
" threshold=" + threshold +
|
||||
" value=" + px);
|
||||
paintedFrames = video.mozPaintedFrames;
|
||||
}
|
||||
return this.isPixel(px, refColor, threshold);
|
||||
}, {
|
||||
offsetX: 0, offsetY: 0,
|
||||
width: 0, height: 0,
|
||||
cancel,
|
||||
});
|
||||
ok(true, video.id + " " + infoString);
|
||||
},
|
||||
|
||||
/*
|
||||
* Returns a promise that resolves after |timeout| ms of playback or when the
|
||||
* Returns a promise that resolves after |time| ms of playback or when the
|
||||
* top left pixel of |video| becomes |refColor|. The test is failed if the
|
||||
* timeout is not reached.
|
||||
* time is not reached, or if the cancel promise resolves.
|
||||
*/
|
||||
waitForPixelColorTimeout: function (video, refColor, threshold, timeout, infoString) {
|
||||
info("Waiting for " + video.id + " to time out after " + timeout +
|
||||
pixelMustNotBecome: async function (video, refColor, {
|
||||
threshold = 0, time = 5000,
|
||||
infoString = "n/a",
|
||||
} = {}) {
|
||||
info("Waiting for " + video.id + " to time out after " + time +
|
||||
"ms against [" + refColor.data.join(',') + "] - " + refColor.name);
|
||||
return this.waitForPixel(video, 0, 0,
|
||||
px => this.isPixel(px, refColor, threshold),
|
||||
timeout)
|
||||
.then(result => ok(!result, video.id + " " + infoString));
|
||||
let timeout = new Promise(resolve => setTimeout(resolve, time));
|
||||
let analysis = async () => {
|
||||
await this.waitForPixel(video, px => this.isPixel(px, refColor, threshold), {
|
||||
offsetX: 0, offsetY: 0, width: 0, height: 0,
|
||||
});
|
||||
throw new Error("Got color " + refColor.name + ". " + infoString);
|
||||
};
|
||||
await Promise.race([timeout, analysis()]);
|
||||
ok(true, video.id + " " + infoString);
|
||||
},
|
||||
|
||||
/* Create an element of type |type| with id |id| and append it to the body. */
|
||||
|
@ -31,13 +31,15 @@ function checkDrawColorInitialRed() {
|
||||
"vmanual should not be drawn to before stable state");
|
||||
|
||||
return Promise.resolve()
|
||||
.then(() => h.waitForPixelColor(vauto, h.red, 0,
|
||||
"should become red automatically"))
|
||||
.then(() => h.waitForPixelColor(vrate, h.red, 0,
|
||||
"should become red automatically"))
|
||||
.then(() => h.waitForPixelColor(vmanual, h.red, 0,
|
||||
"should become red when we get" +
|
||||
" to stable state (first frame)"));
|
||||
.then(() => h.pixelMustBecome(vauto, h.red, {
|
||||
infoString: "should become red automatically",
|
||||
}))
|
||||
.then(() => h.pixelMustBecome(vrate, h.red, {
|
||||
infoString: "should become red automatically",
|
||||
}))
|
||||
.then(() => h.pixelMustBecome(vmanual, h.red, {
|
||||
infoString: "should become red when we get to stable state (first frame)",
|
||||
}));
|
||||
}
|
||||
|
||||
function checkDrawColorGreen() {
|
||||
@ -46,15 +48,19 @@ function checkDrawColorGreen() {
|
||||
var drawing = h.startDrawing(() => h.drawColor(c, h.green));
|
||||
|
||||
return Promise.resolve()
|
||||
.then(() => h.waitForPixelColor(vauto, h.green, 0,
|
||||
"should become green automatically"))
|
||||
.then(() => h.waitForPixelColor(vrate, h.green, 0,
|
||||
"should become green automatically"))
|
||||
.then(() => h.waitForPixelColor(vmanual, h.red, 0,
|
||||
"should still be red"))
|
||||
.then(() => h.pixelMustBecome(vauto, h.green, {
|
||||
infoString: "should become green automatically",
|
||||
}))
|
||||
.then(() => h.pixelMustBecome(vrate, h.green, {
|
||||
infoString: "should become green automatically",
|
||||
}))
|
||||
.then(() => h.pixelMustBecome(vmanual, h.red, {
|
||||
infoString: "should still be red",
|
||||
}))
|
||||
.then(() => h.requestFrame(vmanual))
|
||||
.then(() => h.waitForPixelColor(vmanual, h.green, 0,
|
||||
"should become green after requstFrame()"))
|
||||
.then(() => h.pixelMustBecome(vmanual, h.green, {
|
||||
infoString: "should become green after requstFrame()",
|
||||
}))
|
||||
.catch(err => ok(false, "checkDrawColorGreen failed: ", err))
|
||||
.then(() => drawing.stop());
|
||||
}
|
||||
@ -64,12 +70,14 @@ function checkRequestFrameOrderGuarantee() {
|
||||
"call results in the expected frame seen in the stream.");
|
||||
|
||||
return Promise.resolve()
|
||||
.then(() => h.waitForPixelColor(vmanual, h.green, 0,
|
||||
"should still be green"))
|
||||
.then(() => h.pixelMustBecome(vmanual, h.green, {
|
||||
infoString: "should still be green",
|
||||
}))
|
||||
.then(() => h.drawColor(c, h.red)) // 1. Draw canvas red
|
||||
.then(() => h.requestFrame(vmanual)) // 2. Immediately request a frame
|
||||
.then(() => h.waitForPixelColor(vmanual, h.red, 0,
|
||||
"should become red after call order test"))
|
||||
.then(() => h.pixelMustBecome(vmanual, h.red, {
|
||||
infoString: "should become red after call order test",
|
||||
}));
|
||||
}
|
||||
|
||||
function checkDrawImageNotCleanRed() {
|
||||
@ -86,14 +94,20 @@ function checkDrawImageNotCleanRed() {
|
||||
})
|
||||
.then(() => drawing = h.startDrawing(() => ctx.drawImage(notCleanRed, 0, 0, c.width, c.height)))
|
||||
.then(() => h.testNotClean(c))
|
||||
.then(() => h.waitForPixelColorTimeout(vauto, h.red, 0, 1000,
|
||||
"should not become red"))
|
||||
.then(() => h.isPixelNot(h.getPixel(vrate), h.red, 250,
|
||||
"should not have become red"))
|
||||
.then(() => h.waitForPixelColor(vmanual, h.green, 0, "should still be green"))
|
||||
.then(() => h.pixelMustNotBecome(vauto, h.red, {
|
||||
timeout: 1000,
|
||||
infoString: "should not become red",
|
||||
}))
|
||||
.then(() => ok(h.isPixelNot(h.getPixel(vrate), h.red, 250),
|
||||
"should not have become red"))
|
||||
.then(() => h.pixelMustBecome(vmanual, h.green, {
|
||||
infoString: "should still be green",
|
||||
}))
|
||||
.then(() => h.requestFrame(vmanual))
|
||||
.then(() => h.waitForPixelColorTimeout(vmanual, h.red, 0, 1000,
|
||||
"should not become red"))
|
||||
.then(() => h.pixelMustNotBecome(vmanual, h.red, {
|
||||
timeout: 1000,
|
||||
infoString: "should not become red",
|
||||
}))
|
||||
.catch(err => ok(false, "checkDrawImageNotCleanRed failed: ", err))
|
||||
.then(() => drawing.stop());
|
||||
}
|
||||
@ -117,6 +131,7 @@ function finish() {
|
||||
}
|
||||
|
||||
function beginTest() {
|
||||
SimpleTest.requestFlakyTimeout("Ensuring nothing happens until timing out with good margin");
|
||||
h = new CaptureStreamTestHelper2D();
|
||||
|
||||
c = h.createAndAppendElement('canvas', 'c');
|
||||
|
@ -54,21 +54,24 @@ function checkClearColorInitialRed() {
|
||||
vmanual.srcObject = c.captureStream(0);
|
||||
vrate.srcObject = c.captureStream(10);
|
||||
|
||||
ok(h.isPixel(h.getPixel(vauto), h.blackTransparent, 0,
|
||||
"vauto should not be drawn to before stable state"));
|
||||
ok(h.isPixel(h.getPixel(vrate), h.blackTransparent, 0,
|
||||
"vrate should not be drawn to before stable state"));
|
||||
ok(h.isPixel(h.getPixel(vmanual), h.blackTransparent, 0,
|
||||
"vmanual should not be drawn to before stable state"));
|
||||
ok(h.isPixel(h.getPixel(vauto), h.blackTransparent),
|
||||
"vauto should not be drawn to before stable state");
|
||||
ok(h.isPixel(h.getPixel(vrate), h.blackTransparent),
|
||||
"vrate should not be drawn to before stable state");
|
||||
ok(h.isPixel(h.getPixel(vmanual), h.blackTransparent),
|
||||
"vmanual should not be drawn to before stable state");
|
||||
|
||||
return Promise.resolve()
|
||||
.then(() => h.waitForPixelColor(vauto, h.red, 0,
|
||||
"should become red automatically"))
|
||||
.then(() => h.waitForPixelColor(vrate, h.red, 0,
|
||||
"should become red automatically"))
|
||||
.then(() => h.waitForPixelColor(vmanual, h.red, 0,
|
||||
"should become red when we get to stable " +
|
||||
"state (first frame)"))
|
||||
.then(() => h.pixelMustBecome(vauto, h.red, {
|
||||
infoString: "should become red automatically",
|
||||
}))
|
||||
.then(() => h.pixelMustBecome(vrate, h.red, {
|
||||
infoString: "should become red automatically",
|
||||
}))
|
||||
.then(() => h.pixelMustBecome(vmanual, h.red, {
|
||||
infoString: "should become red when we get to stable "
|
||||
+ "state (first frame)",
|
||||
}))
|
||||
}
|
||||
|
||||
function checkDrawColorGreen() {
|
||||
@ -76,15 +79,19 @@ function checkDrawColorGreen() {
|
||||
var drawing = h.startDrawing(h.drawColor.bind(h, c, h.green));
|
||||
checkGLError('after DrawColor');
|
||||
return Promise.resolve()
|
||||
.then(() => h.waitForPixelColor(vauto, h.green, 0,
|
||||
"should become green automatically"))
|
||||
.then(() => h.waitForPixelColor(vrate, h.green, 0,
|
||||
"should become green automatically"))
|
||||
.then(() => h.waitForPixelColor(vmanual, h.red, 0,
|
||||
"should still be red"))
|
||||
.then(() => h.pixelMustBecome(vauto, h.green, {
|
||||
infoString: "should become green automatically",
|
||||
}))
|
||||
.then(() => h.pixelMustBecome(vrate, h.green, {
|
||||
infoString: "should become green automatically",
|
||||
}))
|
||||
.then(() => h.pixelMustBecome(vmanual, h.red, {
|
||||
infoString: "should still be red",
|
||||
}))
|
||||
.then(() => h.requestFrame(vmanual))
|
||||
.then(() => h.waitForPixelColor(vmanual, h.green, 0,
|
||||
"should become green after requstFrame()"))
|
||||
.then(() => h.pixelMustBecome(vmanual, h.green, {
|
||||
infoString: "should become green after requstFrame()",
|
||||
}))
|
||||
.then(() => drawing.stop());
|
||||
}
|
||||
|
||||
@ -92,15 +99,19 @@ function checkClearColorRed() {
|
||||
info("Checking that clearing to red works.");
|
||||
var drawing = h.startDrawing(h.clearColor.bind(h, c, h.red));
|
||||
return Promise.resolve()
|
||||
.then(() => h.waitForPixelColor(vauto, h.red, 0,
|
||||
"should become red automatically"))
|
||||
.then(() => h.waitForPixelColor(vrate, h.red, 0,
|
||||
"should become red automatically"))
|
||||
.then(() => h.waitForPixelColor(vmanual, h.green, 0,
|
||||
"should still be green"))
|
||||
.then(() => h.pixelMustBecome(vauto, h.red, {
|
||||
infoString: "should become red automatically",
|
||||
}))
|
||||
.then(() => h.pixelMustBecome(vrate, h.red, {
|
||||
infoString: "should become red automatically",
|
||||
}))
|
||||
.then(() => h.pixelMustBecome(vmanual, h.green, {
|
||||
infoString: "should still be green",
|
||||
}))
|
||||
.then(() => h.requestFrame(vmanual))
|
||||
.then(() => h.waitForPixelColor(vmanual, h.red, 0,
|
||||
"should become red after requestFrame()"))
|
||||
.then(() => h.pixelMustBecome(vmanual, h.red, {
|
||||
infoString: "should become red after requestFrame()",
|
||||
}))
|
||||
.then(() => drawing.stop());
|
||||
}
|
||||
|
||||
@ -108,11 +119,12 @@ function checkRequestFrameOrderGuarantee() {
|
||||
info("Checking that requestFrame() immediately after a draw " +
|
||||
"call results in the expected frame seen in the stream.");
|
||||
return Promise.resolve()
|
||||
.then(() => h.waitForPixelColor(vmanual, h.red, 0, "should still be red"))
|
||||
.then(() => h.pixelMustBecome(vmanual, h.red, 0, "should still be red"))
|
||||
.then(() => h.drawColor(c, h.green)) // 1. Draw canvas green
|
||||
.then(() => h.requestFrame(vmanual)) // 2. Immediately request a frame
|
||||
.then(() => h.waitForPixelColor(vmanual, h.green, 0,
|
||||
"should become green after call order test"))
|
||||
.then(() => h.pixelMustBecome(vmanual, h.green, {
|
||||
infoString: "should become green after call order test",
|
||||
}))
|
||||
}
|
||||
|
||||
function checkEndedOnStop() {
|
||||
|
@ -57,8 +57,10 @@ function startTest() {
|
||||
SimpleTest.finish();
|
||||
};
|
||||
document.getElementById("content").appendChild(video);
|
||||
helper.waitForPixelColor(video, helper.red, 128, "Should become red")
|
||||
.then(SimpleTest.finish);
|
||||
helper.pixelMustBecome(video, helper.red, {
|
||||
threshold: 128,
|
||||
infoString: "Should become red"
|
||||
}).then(SimpleTest.finish);
|
||||
};
|
||||
xhr.onerror = () => {
|
||||
ok(false, "XHR error");
|
||||
|
@ -126,9 +126,12 @@ runTestWhenReady(async () => {
|
||||
return lowerAmp < 50 && freqAmp > 200 && upperAmp < 50;
|
||||
}, endedNoError.then(() => new Error("Audio check failed")));
|
||||
|
||||
const videoReady = helper.waitForPixelColor(
|
||||
video, helper.red, 128, "Should become red",
|
||||
endedNoError.then(() => new Error("Video check failed")));
|
||||
const videoReady = helper.pixelMustBecome(
|
||||
video, helper.red, {
|
||||
threshold: 128,
|
||||
infoString: "Should become red",
|
||||
cancelPromise: endedNoError.then(() => new Error("Video check failed")),
|
||||
});
|
||||
|
||||
video.play();
|
||||
|
||||
|
@ -56,8 +56,10 @@ function startTest() {
|
||||
SimpleTest.finish();
|
||||
};
|
||||
document.getElementById("content").appendChild(video);
|
||||
helper.waitForPixelColor(video, helper.red, 128, "Should become red")
|
||||
.then(SimpleTest.finish);
|
||||
helper.pixelMustBecome(video, helper.red, {
|
||||
threshold: 128,
|
||||
infoString: "Should become red"
|
||||
}).then(SimpleTest.finish);
|
||||
};
|
||||
|
||||
mediaRecorder.start();
|
||||
|
@ -90,12 +90,14 @@ function startTest() {
|
||||
video.play();
|
||||
|
||||
// Check last color
|
||||
helper.waitForPixelColor(video, helper.red, 128, "Should become red")
|
||||
.then(() => {
|
||||
video.onresize = {};
|
||||
video.onended = {};
|
||||
SimpleTest.finish();
|
||||
});
|
||||
helper.pixelMustBecome(video, helper.red, {
|
||||
threshold: 128,
|
||||
infoString: "Should become red",
|
||||
}).then(() => {
|
||||
video.onresize = {};
|
||||
video.onended = {};
|
||||
SimpleTest.finish();
|
||||
});
|
||||
};
|
||||
|
||||
// Start here by stream recorder.
|
||||
|
@ -90,12 +90,14 @@ function startTest() {
|
||||
video.play();
|
||||
|
||||
// Check last color
|
||||
helper.waitForPixelColor(video, helper.red, 128, "Should become red")
|
||||
.then(() => {
|
||||
video.onresize = {};
|
||||
video.onended = {};
|
||||
SimpleTest.finish();
|
||||
});
|
||||
helper.pixelMustBecome(video, helper.red, {
|
||||
threshold: 128,
|
||||
infoString: "Should become red",
|
||||
}).then(() => {
|
||||
video.onresize = {};
|
||||
video.onended = {};
|
||||
SimpleTest.finish();
|
||||
});
|
||||
};
|
||||
|
||||
// Start here by stream recorder.
|
||||
|
@ -56,8 +56,10 @@ function startTest() {
|
||||
SimpleTest.finish();
|
||||
};
|
||||
document.getElementById("content").appendChild(video);
|
||||
helper.waitForPixelColor(video, helper.red, 128, "Should become red")
|
||||
.then(SimpleTest.finish);
|
||||
helper.pixelMustBecome(video, helper.red, {
|
||||
threshold: 128,
|
||||
infoString: "Should become red",
|
||||
}).then(SimpleTest.finish);
|
||||
};
|
||||
|
||||
mediaRecorder.start();
|
||||
|
@ -138,7 +138,7 @@ AudioStreamAnalyser.prototype = {
|
||||
do {
|
||||
await new Promise(resolve => requestAnimationFrame(resolve));
|
||||
if (aborted) {
|
||||
throw error;
|
||||
throw await cancel;
|
||||
}
|
||||
}
|
||||
while (!analysisFunction(this.getByteFrequencyData()));
|
||||
@ -634,7 +634,7 @@ function createOneShotEventWrapper(wrapper, obj, event) {
|
||||
/**
|
||||
* Returns a promise that resolves when `target` has raised an event with the
|
||||
* given name the given number of times. Cancel the returned promise by passing
|
||||
* in a `cancelPromise` and resolve it.
|
||||
* in a `cancel` promise and resolving it.
|
||||
*
|
||||
* @param {object} target
|
||||
* The target on which the event should occur.
|
||||
@ -642,16 +642,16 @@ function createOneShotEventWrapper(wrapper, obj, event) {
|
||||
* The name of the event that should occur.
|
||||
* @param {integer} count
|
||||
* Optional number of times the event should be raised before resolving.
|
||||
* @param {promise} cancelPromise
|
||||
* @param {promise} cancel
|
||||
* Optional promise that on resolving rejects the returned promise,
|
||||
* so we can avoid logging results after a test has finished.
|
||||
* @returns {promise} A promise that resolves to the last of the seen events.
|
||||
*/
|
||||
function haveEvents(target, name, count, cancelPromise) {
|
||||
function haveEvents(target, name, count, cancel) {
|
||||
var listener;
|
||||
var counter = count || 1;
|
||||
return Promise.race([
|
||||
(cancelPromise || new Promise(() => {})).then(e => Promise.reject(e)),
|
||||
(cancel || new Promise(() => {})).then(e => Promise.reject(e)),
|
||||
new Promise(resolve =>
|
||||
target.addEventListener(name, listener = e => (--counter < 1 && resolve(e))))
|
||||
])
|
||||
@ -660,20 +660,20 @@ function haveEvents(target, name, count, cancelPromise) {
|
||||
|
||||
/**
|
||||
* Returns a promise that resolves when `target` has raised an event with the
|
||||
* given name. Cancel the returned promise by passing in a `cancelPromise` and
|
||||
* resolve it.
|
||||
* given name. Cancel the returned promise by passing in a `cancel` promise and
|
||||
* resolving it.
|
||||
*
|
||||
* @param {object} target
|
||||
* The target on which the event should occur.
|
||||
* @param {string} name
|
||||
* The name of the event that should occur.
|
||||
* @param {promise} cancelPromise
|
||||
* @param {promise} cancel
|
||||
* Optional promise that on resolving rejects the returned promise,
|
||||
* so we can avoid logging results after a test has finished.
|
||||
* @returns {promise} A promise that resolves to the seen event.
|
||||
*/
|
||||
function haveEvent(target, name, cancelPromise) {
|
||||
return haveEvents(target, name, 1, cancelPromise);
|
||||
function haveEvent(target, name, cancel) {
|
||||
return haveEvents(target, name, 1, cancel);
|
||||
};
|
||||
|
||||
/**
|
||||
@ -706,13 +706,13 @@ function haveNoEvent(target, name, timeoutPromise) {
|
||||
* The name of the event that should occur.
|
||||
* @param {integer} count
|
||||
* Optional number of times the event should be raised before resolving.
|
||||
* @param {promise} cancelPromise
|
||||
* @param {promise} cancel
|
||||
* Optional promise that on resolving rejects the returned promise,
|
||||
* so we can avoid logging results after a test has finished.
|
||||
* @returns {promise} A promise that resolves to the last of the seen events.
|
||||
*/
|
||||
function haveEventsButNoMore(target, name, count, cancelPromise) {
|
||||
return haveEvents(target, name, count, cancelPromise)
|
||||
function haveEventsButNoMore(target, name, count, cancel) {
|
||||
return haveEvents(target, name, count, cancel)
|
||||
.then(e => haveNoEvent(target, name).then(() => e));
|
||||
};
|
||||
|
||||
@ -975,47 +975,57 @@ class VideoStreamHelper {
|
||||
this._helper = new CaptureStreamTestHelper2D(50,50);
|
||||
}
|
||||
|
||||
checkHasFrame(video, offsetX, offsetY, threshold) {
|
||||
async checkHasFrame(video, { offsetX, offsetY, threshold } = {}) {
|
||||
const h = this._helper;
|
||||
return h.waitForPixel(video, offsetX, offsetY, px => {
|
||||
await h.waitForPixel(video, px => {
|
||||
let result = h.isOpaquePixelNot(px, h.black, threshold);
|
||||
info("Checking that we have a frame, got [" +
|
||||
Array.slice(px) + "]. Ref=[" +
|
||||
Array.slice(h.black.data) + "]. Threshold=" + threshold +
|
||||
". Pass=" + result);
|
||||
return result;
|
||||
});
|
||||
}, { offsetX, offsetY });
|
||||
}
|
||||
|
||||
async checkVideoPlaying(video, offsetX, offsetY, threshold) {
|
||||
async checkVideoPlaying(video, { offsetX = 10, offsetY = 10,
|
||||
threshold = 16,
|
||||
} = {}) {
|
||||
const h = this._helper;
|
||||
await this.checkHasFrame(video, offsetX, offsetY, threshold);
|
||||
let startPixel = { data: h.getPixel(video, offsetX, offsetY)
|
||||
, name: "startcolor"
|
||||
};
|
||||
return h.waitForPixel(video, offsetX, offsetY, px => {
|
||||
let result = h.isPixelNot(px, startPixel, threshold)
|
||||
await this.checkHasFrame(video, { offsetX, offsetY, threshold });
|
||||
let startPixel = {
|
||||
data: h.getPixel(video, offsetX, offsetY),
|
||||
name: "startcolor",
|
||||
};
|
||||
await h.waitForPixel(video, px => {
|
||||
let result = h.isPixelNot(px, startPixel, threshold);
|
||||
info("Checking playing, [" +
|
||||
Array.slice(px) + "] vs [" + Array.slice(startPixel.data) +
|
||||
"]. Threshold=" + threshold + " Pass=" + result);
|
||||
return result;
|
||||
});
|
||||
}, { offsetX, offsetY });
|
||||
}
|
||||
|
||||
async checkVideoPaused(video, offsetX, offsetY, threshold, timeout) {
|
||||
async checkVideoPaused(video, { offsetX = 10, offsetY = 10,
|
||||
threshold = 16, time = 5000,
|
||||
}={}) {
|
||||
const h = this._helper;
|
||||
await this.checkHasFrame(video, offsetX, offsetY, threshold);
|
||||
let startPixel = { data: h.getPixel(video, offsetX, offsetY)
|
||||
, name: "startcolor"
|
||||
};
|
||||
const changed = await h.waitForPixel(video, offsetX, offsetY, px => {
|
||||
let result = h.isOpaquePixelNot(px, startPixel, threshold);
|
||||
info("Checking paused, [" +
|
||||
Array.slice(px) + "] vs [" + Array.slice(startPixel.data) +
|
||||
"]. Threshold=" + threshold + " Pass=" + result);
|
||||
return result;
|
||||
}, timeout);
|
||||
ok(!changed, "Frame shouldn't change within " + timeout / 1000 + " seconds.");
|
||||
await this.checkHasFrame(video, { offsetX, offsetY, threshold });
|
||||
let startPixel = {
|
||||
data: h.getPixel(video, offsetX, offsetY),
|
||||
name: "startcolor",
|
||||
};
|
||||
try {
|
||||
await h.waitForPixel(video, px => {
|
||||
let result = h.isOpaquePixelNot(px, startPixel, threshold);
|
||||
info("Checking paused, [" +
|
||||
Array.slice(px) + "] vs [" + Array.slice(startPixel.data) +
|
||||
"]. Threshold=" + threshold + " Pass=" + result);
|
||||
return result;
|
||||
}, { offsetX, offsetY, cancel: wait(time, "timeout") });
|
||||
ok(false, "Frame changed within " + time/1000 + " seconds");
|
||||
} catch (e) {
|
||||
is(e, "timeout", "Frame shouldn't change for " + time/1000 + " seconds");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -40,8 +40,10 @@
|
||||
};
|
||||
|
||||
return listenUntil(video, "loadeddata", () => true)
|
||||
.then(() => h.waitForPixelColor(video, h.grey, 5,
|
||||
"The canvas track should be rendered by the media element"))
|
||||
.then(() => h.pixelMustBecome(video, h.grey, {
|
||||
threshold: 5,
|
||||
infoString: "The canvas track should be rendered by the media element",
|
||||
}))
|
||||
.then(() => {
|
||||
[removedTrack, ...stream.getAudioTracks()].forEach(t => t.stop());
|
||||
});
|
||||
|
@ -17,11 +17,6 @@ createHTML({
|
||||
var gUMVideoElement;
|
||||
var captureStreamElement;
|
||||
|
||||
// We check a pixel somewhere away from the top left corner since
|
||||
// MediaEngineDefault puts semi-transparent time indicators there.
|
||||
const offsetX = 20;
|
||||
const offsetY = 20;
|
||||
const threshold = 16;
|
||||
const pausedTimeout = 1000;
|
||||
let h;
|
||||
|
||||
@ -47,45 +42,45 @@ runTest(() => getUserMedia({video: true, fake: true})
|
||||
let osc = createOscillatorStream(new AudioContext(), 1000);
|
||||
captureStreamElement.srcObject.addTrack(osc.getTracks()[0]);
|
||||
|
||||
return h.checkVideoPlaying(captureStreamElement, 10, 10, 16);
|
||||
return h.checkVideoPlaying(captureStreamElement);
|
||||
})
|
||||
.then(() => {
|
||||
info("Video flowing. Pausing.");
|
||||
gUMVideoElement.pause();
|
||||
|
||||
return h.checkVideoPaused(captureStreamElement, 10, 10, 16, pausedTimeout);
|
||||
return h.checkVideoPaused(captureStreamElement, { time: pausedTimeout });
|
||||
})
|
||||
.then(() => {
|
||||
info("Video stopped flowing. Playing.");
|
||||
gUMVideoElement.play();
|
||||
|
||||
return h.checkVideoPlaying(captureStreamElement, 10, 10, 16);
|
||||
return h.checkVideoPlaying(captureStreamElement);
|
||||
})
|
||||
.then(() => {
|
||||
info("Video flowing. Removing source.");
|
||||
var stream = gUMVideoElement.srcObject;
|
||||
gUMVideoElement.srcObject = null;
|
||||
|
||||
return h.checkVideoPaused(captureStreamElement, 10, 10, 16, pausedTimeout)
|
||||
return h.checkVideoPaused(captureStreamElement, { time: pausedTimeout })
|
||||
.then(() => stream);
|
||||
})
|
||||
.then(stream => {
|
||||
info("Video stopped flowing. Setting source.");
|
||||
gUMVideoElement.srcObject = stream;
|
||||
return h.checkVideoPlaying(captureStreamElement, 10, 10, 16);
|
||||
return h.checkVideoPlaying(captureStreamElement);
|
||||
})
|
||||
.then(() => {
|
||||
info("Video flowing. Changing source by track manipulation. Remove first.");
|
||||
var track = gUMVideoElement.srcObject.getTracks()[0];
|
||||
gUMVideoElement.srcObject.removeTrack(track);
|
||||
return h.checkVideoPaused(captureStreamElement, 10, 10, 16, pausedTimeout)
|
||||
return h.checkVideoPaused(captureStreamElement, { time: pausedTimeout })
|
||||
.then(() => track);
|
||||
})
|
||||
.then(track => {
|
||||
info("Video paused. Changing source by track manipulation. Add first.");
|
||||
gUMVideoElement.srcObject.addTrack(track);
|
||||
gUMVideoElement.play();
|
||||
return h.checkVideoPlaying(captureStreamElement, 10, 10, 16);
|
||||
return h.checkVideoPlaying(captureStreamElement);
|
||||
})
|
||||
.then(() => {
|
||||
gUMVideoElement.srcObject.getTracks().forEach(t => t.stop());
|
||||
|
@ -30,7 +30,7 @@
|
||||
is(test.pcRemote.remoteMediaElements.length, 2,
|
||||
"Should have two remote media elements after renegotiation");
|
||||
return Promise.all(test.pcRemote.remoteMediaElements.map(video =>
|
||||
h.checkVideoPlaying(video, 10, 10, 16)));
|
||||
h.checkVideoPlaying(video)));
|
||||
},
|
||||
]
|
||||
);
|
||||
|
@ -37,7 +37,7 @@
|
||||
is(test.pcRemote.remoteMediaElements.length, 2,
|
||||
"Should have two remote media elements after renegotiation");
|
||||
return Promise.all(test.pcRemote.remoteMediaElements.map(video =>
|
||||
h.checkVideoPlaying(video, 10, 10, 16)));
|
||||
h.checkVideoPlaying(video)));
|
||||
},
|
||||
]
|
||||
);
|
||||
|
@ -48,8 +48,10 @@ runNetworkTest(() => {
|
||||
function PC_REMOTE_WAIT_FOR_REMOTE_GREEN() {
|
||||
mediaElement = test.pcRemote.remoteMediaElements[0];
|
||||
ok(!!mediaElement, "Should have remote video element for pcRemote");
|
||||
return h.waitForPixelColor(mediaElement, h.green, 128,
|
||||
"pcRemote's remote should become green");
|
||||
return h.pixelMustBecome(mediaElement, h.green, {
|
||||
threshold: 128,
|
||||
infoString: "pcRemote's remote should become green",
|
||||
});
|
||||
},
|
||||
function PC_LOCAL_DRAW_LOCAL_RED() {
|
||||
// After requesting a frame it will be captured at the time of next render.
|
||||
@ -59,8 +61,10 @@ runNetworkTest(() => {
|
||||
h.drawColor(canvas, h.red);
|
||||
},
|
||||
function PC_REMOTE_WAIT_FOR_REMOTE_RED() {
|
||||
return h.waitForPixelColor(mediaElement, h.red, 128,
|
||||
"pcRemote's remote should become red");
|
||||
return h.pixelMustBecome(mediaElement, h.red, {
|
||||
threshold: 128,
|
||||
infoString: "pcRemote's remote should become red",
|
||||
});
|
||||
}
|
||||
]);
|
||||
test.run();
|
||||
|
@ -50,8 +50,10 @@ runNetworkTest((options) => {
|
||||
function PC_REMOTE_WAIT_FOR_REMOTE_GREEN() {
|
||||
mediaElement = test.pcRemote.remoteMediaElements[0];
|
||||
ok(!!mediaElement, "Should have remote video element for pcRemote");
|
||||
return h.waitForPixelColor(mediaElement, h.green, 128,
|
||||
"pcRemote's remote should become green");
|
||||
return h.pixelMustBecome(mediaElement, h.green, {
|
||||
threshold: 128,
|
||||
infoString: "pcRemote's remote should become green",
|
||||
});
|
||||
},
|
||||
function PC_LOCAL_DRAW_LOCAL_RED() {
|
||||
// After requesting a frame it will be captured at the time of next render.
|
||||
@ -61,8 +63,10 @@ runNetworkTest((options) => {
|
||||
h.drawColor(canvas, h.red);
|
||||
},
|
||||
function PC_REMOTE_WAIT_FOR_REMOTE_RED() {
|
||||
return h.waitForPixelColor(mediaElement, h.red, 128,
|
||||
"pcRemote's remote should become red");
|
||||
return h.pixelMustBecome(mediaElement, h.red, {
|
||||
threshold: 128,
|
||||
infoString: "pcRemote's remote should become red",
|
||||
});
|
||||
}
|
||||
]);
|
||||
test.run();
|
||||
|
@ -99,8 +99,10 @@ runNetworkTest(() => {
|
||||
ok(!!vremote, "Should have remote video element for pcRemote");
|
||||
},
|
||||
function WAIT_FOR_REMOTE_GREEN() {
|
||||
return h.waitForPixelColor(vremote, h.green, 128,
|
||||
"pcRemote's remote should become green");
|
||||
return h.pixelMustBecome(vremote, h.green, {
|
||||
threshold: 128,
|
||||
infoString: "pcRemote's remote should become green",
|
||||
});
|
||||
},
|
||||
function REQUEST_FRAME(test) {
|
||||
// After requesting a frame it will be captured at the time of next render.
|
||||
@ -112,8 +114,10 @@ runNetworkTest(() => {
|
||||
h.drawColor(canvas, h.red);
|
||||
},
|
||||
function WAIT_FOR_REMOTE_RED() {
|
||||
return h.waitForPixelColor(vremote, h.red, 128,
|
||||
"pcRemote's remote should become red");
|
||||
return h.pixelMustBecome(vremote, h.red, {
|
||||
threshold: 128,
|
||||
infoString: "pcRemote's remote should become red",
|
||||
});
|
||||
}
|
||||
]);
|
||||
test.run();
|
||||
|
@ -28,6 +28,8 @@ runNetworkTest(() => {
|
||||
var stream2;
|
||||
var canvas2 = h.createAndAppendElement('canvas', 'source_canvas2');
|
||||
|
||||
const threshold = 128;
|
||||
|
||||
test.setMediaConstraints([{video: true}, {video: true}], []);
|
||||
test.chain.replace("PC_LOCAL_GUM", [
|
||||
function PC_LOCAL_CANVAS_CAPTURESTREAM(test) {
|
||||
@ -65,25 +67,37 @@ runNetworkTest(() => {
|
||||
// green and one is blue
|
||||
return Promise.race([
|
||||
Promise.all([
|
||||
h.waitForPixelColor(vremote1, h.red, 128,
|
||||
"pcRemote's remote1 should become red"),
|
||||
h.waitForPixelColor(vremote2, h.blue, 128,
|
||||
"pcRemote's remote2 should become blue")
|
||||
h.pixelMustBecome(vremote1, h.red, {
|
||||
threshold,
|
||||
infoString: "pcRemote's remote1 should become red",
|
||||
}),
|
||||
h.pixelMustBecome(vremote2, h.blue, {
|
||||
threshold,
|
||||
infoString: "pcRemote's remote2 should become blue",
|
||||
}),
|
||||
]),
|
||||
Promise.all([
|
||||
h.waitForPixelColor(vremote2, h.red, 128,
|
||||
"pcRemote's remote2 should become red"),
|
||||
h.waitForPixelColor(vremote1, h.blue, 128,
|
||||
"pcRemote's remote1 should become blue")
|
||||
h.pixelMustBecome(vremote2, h.red, {
|
||||
threshold,
|
||||
infoString: "pcRemote's remote2 should become red",
|
||||
}),
|
||||
h.pixelMustBecome(vremote1, h.blue, {
|
||||
threshold,
|
||||
infoString: "pcRemote's remote1 should become blue",
|
||||
}),
|
||||
])
|
||||
]);
|
||||
},
|
||||
function WAIT_FOR_REMOTE_BOTH_GREEN() {
|
||||
return Promise.all([
|
||||
h.waitForPixelColor(vremote1, h.green, 128,
|
||||
"pcRemote's remote1 should become green"),
|
||||
h.waitForPixelColor(vremote2, h.green, 128,
|
||||
"pcRemote's remote2 should become green")
|
||||
h.pixelMustBecome(vremote1, h.green, {
|
||||
threshold,
|
||||
infoString: "pcRemote's remote1 should become green",
|
||||
}),
|
||||
h.pixelMustBecome(vremote2, h.green, {
|
||||
threshold,
|
||||
infoString: "pcRemote's remote2 should become green",
|
||||
}),
|
||||
])
|
||||
},
|
||||
]);
|
||||
|
@ -48,7 +48,7 @@
|
||||
const vAdded = test.pcRemote.remoteMediaElements.find(
|
||||
elem => elem.id.includes(track.id));
|
||||
ok(vOriginal.ended, "Original video element should have ended");
|
||||
return helper.checkVideoPlaying(vAdded, 10, 10, 16);
|
||||
return helper.checkVideoPlaying(vAdded);
|
||||
},
|
||||
]
|
||||
);
|
||||
|
@ -48,7 +48,7 @@
|
||||
const vAdded = test.pcRemote.remoteMediaElements.find(
|
||||
elem => elem.id.includes(track.id));
|
||||
ok(vOriginal.ended, "Original video element should have ended");
|
||||
return helper.checkVideoPlaying(vAdded, 10, 10, 16);
|
||||
return helper.checkVideoPlaying(vAdded);
|
||||
},
|
||||
]
|
||||
);
|
||||
|
@ -64,16 +64,19 @@
|
||||
|
||||
// now verify that actually something gets rendered into the remote video
|
||||
// element.
|
||||
.then(() => h.waitForPixelColor(v2, h.blue, 128,
|
||||
"pcRemote's video should become blue"))
|
||||
.then(() => h.pixelMustBecome(v2, h.blue, {
|
||||
threshold: 128,
|
||||
infoString: "pcRemote's video should become blue",
|
||||
}))
|
||||
// This will verify that new changes to the canvas propagate through
|
||||
// the peerconnection
|
||||
.then(() => {
|
||||
emitter.colors(h.red, h.green)
|
||||
})
|
||||
.then(() => h.waitForPixelColor(v2, h.red, 128,
|
||||
"pcRemote's video should become red"))
|
||||
|
||||
})
|
||||
.then(() => h.pixelMustBecome(v2, h.red, {
|
||||
threshold: 128,
|
||||
infoString: "pcRemote's video should become red",
|
||||
}))
|
||||
.catch(reason => ok(false, "unexpected failure: " + reason))
|
||||
.then(networkTestFinished);
|
||||
});
|
||||
|
@ -61,7 +61,7 @@
|
||||
if (!vremote) {
|
||||
return Promise.reject(new Error("Couldn't find video element"));
|
||||
}
|
||||
return addFinallyToPromise(helper.checkVideoPlaying(vremote, 10, 10, 16))
|
||||
return addFinallyToPromise(helper.checkVideoPlaying(vremote))
|
||||
.finally(() => emitter2.stop())
|
||||
.then(() => {
|
||||
const px = helper._helper.getPixel(vremote, 10, 10);
|
||||
@ -78,7 +78,7 @@
|
||||
if (!vremote) {
|
||||
return Promise.reject(new Error("Couldn't find video element"));
|
||||
}
|
||||
return helper.checkVideoPlaying(vremote, 10, 10, 16);
|
||||
return helper.checkVideoPlaying(vremote);
|
||||
},
|
||||
]
|
||||
);
|
||||
|
@ -104,7 +104,7 @@
|
||||
const vremote = test.pcLocal.remoteMediaElements[0];
|
||||
ok(vremote, "Should have remote video element for pcLocal");
|
||||
emitter.start();
|
||||
await helper.checkVideoPlaying(vremote, 10, 10, 16);
|
||||
await helper.checkVideoPlaying(vremote);
|
||||
emitter.stop();
|
||||
},
|
||||
function PC_LOCAL_CHECK_SIZE_1() {
|
||||
@ -129,7 +129,7 @@
|
||||
const vremote = test.pcLocal.remoteMediaElements[0];
|
||||
ok(vremote, "Should have remote video element for pcLocal");
|
||||
emitter.start();
|
||||
await helper.checkVideoPlaying(vremote, 10, 10, 16);
|
||||
await helper.checkVideoPlaying(vremote);
|
||||
emitter.stop();
|
||||
},
|
||||
// For some reason, even though we're getting a 25x25 stream, sometimes
|
||||
@ -138,7 +138,7 @@
|
||||
const vremote = test.pcLocal.remoteMediaElements[0];
|
||||
ok(vremote, "Should have remote video element for pcLocal");
|
||||
emitter.start();
|
||||
await helper.checkVideoPlaying(vremote, 10, 10, 16);
|
||||
await helper.checkVideoPlaying(vremote);
|
||||
emitter.stop();
|
||||
},
|
||||
function PC_LOCAL_CHECK_SIZE_2() {
|
||||
|
@ -96,7 +96,7 @@
|
||||
const vremote = test.pcRemote.remoteMediaElements[0];
|
||||
ok(vremote, "Should have remote video element for pcRemote");
|
||||
emitter.start();
|
||||
await helper.checkVideoPlaying(vremote, 10, 10, 16);
|
||||
await helper.checkVideoPlaying(vremote);
|
||||
emitter.stop();
|
||||
},
|
||||
function PC_REMOTE_CHECK_SIZE_1() {
|
||||
@ -121,7 +121,7 @@
|
||||
const vremote = test.pcRemote.remoteMediaElements[0];
|
||||
ok(vremote, "Should have remote video element for pcRemote");
|
||||
emitter.start();
|
||||
await helper.checkVideoPlaying(vremote, 10, 10, 16);
|
||||
await helper.checkVideoPlaying(vremote);
|
||||
emitter.stop();
|
||||
},
|
||||
// For some reason, even though we're getting a 25x25 stream, sometimes
|
||||
@ -130,7 +130,7 @@
|
||||
const vremote = test.pcRemote.remoteMediaElements[0];
|
||||
ok(vremote, "Should have remote video element for pcRemote");
|
||||
emitter.start();
|
||||
await helper.checkVideoPlaying(vremote, 10, 10, 16);
|
||||
await helper.checkVideoPlaying(vremote);
|
||||
emitter.stop();
|
||||
},
|
||||
function PC_REMOTE_CHECK_SIZE_2() {
|
||||
|
@ -43,12 +43,14 @@ runNetworkTest(() => {
|
||||
|
||||
// We're regarding black as disabled here, and we're setting the alpha
|
||||
// channel of the pixel to 255 to disregard alpha when testing.
|
||||
var checkVideoEnabled = video =>
|
||||
h.waitForPixel(video, offsetX, offsetY,
|
||||
px => (px[3] = 255, h.isPixelNot(px, h.black, threshold)));
|
||||
var checkVideoDisabled = video =>
|
||||
h.waitForPixel(video, offsetX, offsetY,
|
||||
px => (px[3] = 255, h.isPixel(px, h.black, threshold, offsetX*2, offsetY*2)));
|
||||
var checkVideoEnabled = video => h.waitForPixel(video,
|
||||
px => (px[3] = 255, h.isPixelNot(px, h.black, threshold)),
|
||||
{ offsetX, offsetY }
|
||||
);
|
||||
var checkVideoDisabled = video => h.waitForPixel(video,
|
||||
px => (px[3] = 255, h.isPixel(px, h.black, threshold)),
|
||||
{ offsetX, offsetY }
|
||||
);
|
||||
return Promise.resolve()
|
||||
.then(() => info("Checking local video enabled"))
|
||||
.then(() => checkVideoEnabled(localVideo))
|
||||
|
@ -60,12 +60,14 @@ runNetworkTest(() => {
|
||||
|
||||
// We're regarding black as disabled here, and we're setting the alpha
|
||||
// channel of the pixel to 255 to disregard alpha when testing.
|
||||
var checkVideoEnabled = video =>
|
||||
h.waitForPixel(video, offsetX, offsetY,
|
||||
px => (px[3] = 255, h.isPixelNot(px, h.black, threshold)));
|
||||
var checkVideoDisabled = video =>
|
||||
h.waitForPixel(video, offsetX, offsetY,
|
||||
px => (px[3] = 255, h.isPixel(px, h.black, threshold)));
|
||||
var checkVideoEnabled = video => h.waitForPixel(video,
|
||||
px => (px[3] = 255, h.isPixelNot(px, h.black, threshold)),
|
||||
{ offsetX, offsetY }
|
||||
);
|
||||
var checkVideoDisabled = video => h.waitForPixel(video,
|
||||
px => (px[3] = 255, h.isPixel(px, h.black, threshold)),
|
||||
{ offsetX, offsetY }
|
||||
);
|
||||
|
||||
return Promise.resolve()
|
||||
.then(() => info("Checking local original enabled"))
|
||||
|
@ -58,12 +58,16 @@ runNetworkTest(() => {
|
||||
ok(!!vremote1, "Should have remote video element for pcRemote");
|
||||
},
|
||||
function WAIT_FOR_REMOTE_GREEN() {
|
||||
return h1.waitForPixelColor(vremote1, h1.green, 128,
|
||||
"pcRemote's remote should become green");
|
||||
return h1.pixelMustBecome(vremote1, h1.green, {
|
||||
threshold: 128,
|
||||
infoString: "pcRemote's remote should become green",
|
||||
});
|
||||
},
|
||||
function WAIT_FOR_REMOTE_RED() {
|
||||
return h1.waitForPixelColor(vremote1, h1.red, 128,
|
||||
"pcRemote's remote should become red");
|
||||
return h1.pixelMustBecome(vremote1, h1.red, {
|
||||
threshold: 128,
|
||||
infoString: "pcRemote's remote should become red",
|
||||
});
|
||||
}
|
||||
]);
|
||||
|
||||
@ -87,16 +91,20 @@ runNetworkTest(() => {
|
||||
ok(!!vremote2, "Should have remote2 video element for pcRemote");
|
||||
},
|
||||
function WAIT_FOR_REMOTE2_BLUE() {
|
||||
return h2.waitForPixelColor(vremote2, h2.blue, 128,
|
||||
"pcRemote's remote2 should become blue");
|
||||
return h2.pixelMustBecome(vremote2, h2.blue, {
|
||||
threshold: 128,
|
||||
infoString: "pcRemote's remote2 should become blue",
|
||||
});
|
||||
},
|
||||
function DRAW_NEW_LOCAL_GREEN(test) {
|
||||
stream1.requestFrame();
|
||||
h1.drawColor(canvas1, h1.green);
|
||||
},
|
||||
function WAIT_FOR_REMOTE1_GREEN() {
|
||||
return h1.waitForPixelColor(vremote1, h1.green, 128,
|
||||
"pcRemote's remote1 should become green");
|
||||
return h1.pixelMustBecome(vremote1, h1.green, {
|
||||
threshold: 128,
|
||||
infoString: "pcRemote's remote1 should become green",
|
||||
});
|
||||
}
|
||||
]);
|
||||
|
||||
|
@ -31,7 +31,7 @@
|
||||
function PC_REMOTE_WAIT_FOR_FRAMES() {
|
||||
var vremote = test.pcRemote.remoteMediaElements[0];
|
||||
ok(vremote, "Should have remote video element for pcRemote");
|
||||
return addFinallyToPromise(helper.checkVideoPlaying(vremote, 10, 10, 16))
|
||||
return addFinallyToPromise(helper.checkVideoPlaying(vremote))
|
||||
.finally(() => emitter.stop());
|
||||
}
|
||||
]);
|
||||
@ -50,7 +50,7 @@
|
||||
var vremote = test.pcRemote.remoteMediaElements[0];
|
||||
ok(vremote, "Should have remote video element for pcRemote");
|
||||
emitter.start();
|
||||
return addFinallyToPromise(helper.checkVideoPaused(vremote, 10, 10, 16, 5000))
|
||||
return addFinallyToPromise(helper.checkVideoPaused(vremote))
|
||||
.finally(() => emitter.stop());
|
||||
},
|
||||
]);
|
||||
@ -65,7 +65,7 @@
|
||||
var vremote = test.pcRemote.remoteMediaElements[0];
|
||||
ok(vremote, "Should have remote video element for pcRemote");
|
||||
emitter.start();
|
||||
return addFinallyToPromise(helper.checkVideoPlaying(vremote, 10, 10, 16))
|
||||
return addFinallyToPromise(helper.checkVideoPlaying(vremote))
|
||||
.finally(() => emitter.stop());
|
||||
}
|
||||
]);
|
||||
|
Loading…
Reference in New Issue
Block a user