Bug 1132064 - Local video is sometimes displayed in the wrong location on the standalone Loop UI. r=mikedeboer

This commit is contained in:
Mark Banner 2015-02-11 20:28:11 +00:00
parent c5f69cfdf1
commit 85c91d2e02
2 changed files with 159 additions and 32 deletions

View File

@ -295,15 +295,37 @@ loop.shared.mixins = (function() {
width: width,
height: node.offsetHeight
};
var ratio = this._videoDimensionsCache.remote[videoType].aspectRatio;
// Leading axis is the side that has the smallest ratio.
var leadingAxis = Math.min(ratio.width, ratio.height) === ratio.width ?
"width" : "height";
var slaveSize = remoteVideoDimensions[leadingAxis] +
(remoteVideoDimensions[leadingAxis] * (1 - ratio[leadingAxis]));
remoteVideoDimensions.streamWidth = leadingAxis === "width" ?
remoteVideoDimensions.width : slaveSize;
remoteVideoDimensions.streamHeight = leadingAxis === "height" ?
remoteVideoDimensions.height: slaveSize;
var slaveAxis = leadingAxis === "height" ? "width" : "height";
// We need to work out if the leading axis of the video is full, by
// calculating the expected length of the leading axis based on the
// length of the slave axis and aspect ratio.
var leadingAxisFull = remoteVideoDimensions[slaveAxis] * ratio[leadingAxis] >
remoteVideoDimensions[leadingAxis];
if (leadingAxisFull) {
// If the leading axis is "full" then we need to adjust the slave axis.
var slaveAxisSize = remoteVideoDimensions[leadingAxis] / ratio[leadingAxis];
remoteVideoDimensions.streamWidth = leadingAxis === "width" ?
remoteVideoDimensions.width : slaveAxisSize;
remoteVideoDimensions.streamHeight = leadingAxis === "height" ?
remoteVideoDimensions.height: slaveAxisSize;
} else {
// If the leading axis is not "full" then we need to adjust it, based
// on the length of the leading axis.
var leadingAxisSize = remoteVideoDimensions[slaveAxis] * ratio[leadingAxis];
remoteVideoDimensions.streamWidth = leadingAxis === "height" ?
remoteVideoDimensions.width : leadingAxisSize;
remoteVideoDimensions.streamHeight = leadingAxis === "width" ?
remoteVideoDimensions.height: leadingAxisSize;
}
}
}, this);

View File

@ -195,6 +195,7 @@ describe("loop.shared.mixins", function() {
describe("loop.shared.mixins.MediaSetupMixin", function() {
var view, TestComp, rootObject;
var localElement, remoteElement, screenShareElement;
beforeEach(function() {
TestComp = React.createClass({
@ -225,6 +226,23 @@ describe("loop.shared.mixins", function() {
sharedMixins.setRootObject(rootObject);
view = TestUtils.renderIntoDocument(React.createElement(TestComp));
sandbox.stub(view, "getDOMNode").returns({
querySelector: function(classSelector) {
if (classSelector.contains("local")) {
return localElement;
} else if (classSelector.contains("screen")) {
return screenShareElement;
}
return remoteElement;
}
});
});
afterEach(function() {
localElement = null;
remoteElement = null;
screenShareElement = null;
});
describe("#getDefaultPublisherConfig", function() {
@ -235,22 +253,124 @@ describe("loop.shared.mixins", function() {
});
});
describe("Events", function() {
var localElement, remoteElement, screenShareElement;
describe("#getRemoteVideoDimensions", function() {
var localVideoDimensions, remoteVideoDimensions;
beforeEach(function() {
sandbox.stub(view, "getDOMNode").returns({
querySelector: function(classSelector) {
if (classSelector.contains("local")) {
return localElement;
} else if (classSelector.contains("screen")) {
return screenShareElement;
}
return remoteElement;
localVideoDimensions = {
camera: {
width: 640,
height: 480
}
});
};
});
it("should fetch the correct stream sizes for leading axis width and full",
function() {
remoteVideoDimensions = {
camera: {
width: 240,
height: 320
}
};
remoteElement = {
offsetWidth: 480,
offsetHeight: 700
};
view.updateVideoDimensions(localVideoDimensions, remoteVideoDimensions);
var result = view.getRemoteVideoDimensions();
expect(result.width).eql(remoteElement.offsetWidth);
expect(result.height).eql(remoteElement.offsetHeight);
expect(result.streamWidth).eql(remoteElement.offsetWidth);
// The real height of the stream accounting for the aspect ratio.
expect(result.streamHeight).eql(640);
expect(result.offsetX).eql(0);
// The remote element height (700) minus the stream height (640) split in 2.
expect(result.offsetY).eql(30);
});
it("should fetch the correct stream sizes for leading axis width and not full",
function() {
remoteVideoDimensions = {
camera: {
width: 240,
height: 320
}
};
remoteElement = {
offsetWidth: 640,
offsetHeight: 480
};
view.updateVideoDimensions(localVideoDimensions, remoteVideoDimensions);
var result = view.getRemoteVideoDimensions();
expect(result.width).eql(remoteElement.offsetWidth);
expect(result.height).eql(remoteElement.offsetHeight);
// Aspect ratio modified from the height.
expect(result.streamWidth).eql(360);
expect(result.streamHeight).eql(remoteElement.offsetHeight);
// The remote element width (640) minus the stream width (360) split in 2.
expect(result.offsetX).eql(140);
expect(result.offsetY).eql(0);
});
it("should fetch the correct stream sizes for leading axis height and full",
function() {
remoteVideoDimensions = {
camera: {
width: 320,
height: 240
}
};
remoteElement = {
offsetWidth: 700,
offsetHeight: 480
};
view.updateVideoDimensions(localVideoDimensions, remoteVideoDimensions);
var result = view.getRemoteVideoDimensions();
expect(result.width).eql(remoteElement.offsetWidth);
expect(result.height).eql(remoteElement.offsetHeight);
// The real width of the stream accounting for the aspect ratio.
expect(result.streamWidth).eql(640);
expect(result.streamHeight).eql(remoteElement.offsetHeight);
// The remote element width (700) minus the stream width (640) split in 2.
expect(result.offsetX).eql(30);
expect(result.offsetY).eql(0);
});
it("should fetch the correct stream sizes for leading axis height and not full",
function() {
remoteVideoDimensions = {
camera: {
width: 320,
height: 240
}
};
remoteElement = {
offsetWidth: 480,
offsetHeight: 640
};
view.updateVideoDimensions(localVideoDimensions, remoteVideoDimensions);
var result = view.getRemoteVideoDimensions();
expect(result.width).eql(remoteElement.offsetWidth);
expect(result.height).eql(remoteElement.offsetHeight);
expect(result.streamWidth).eql(remoteElement.offsetWidth);
// Aspect ratio modified from the width.
expect(result.streamHeight).eql(360);
expect(result.offsetX).eql(0);
// The remote element width (640) minus the stream width (360) split in 2.
expect(result.offsetY).eql(140);
});
});
describe("Events", function() {
describe("resize", function() {
it("should update the width on the local stream element", function() {
localElement = {
@ -367,21 +487,6 @@ describe("loop.shared.mixins", function() {
expect(view._videoDimensionsCache.remote.camera.aspectRatio.height)
.eql(0.32857142857142857);
});
it("should fetch remote video stream dimensions correctly", function() {
remoteElement = {
offsetWidth: 600,
offsetHeight: 320
};
var remoteVideoDimensions = view.getRemoteVideoDimensions();
expect(remoteVideoDimensions.width).eql(remoteElement.offsetWidth);
expect(remoteVideoDimensions.height).eql(remoteElement.offsetHeight);
expect(remoteVideoDimensions.streamWidth).eql(534.8571428571429);
expect(remoteVideoDimensions.streamHeight).eql(remoteElement.offsetHeight);
expect(remoteVideoDimensions.offsetX).eql(32.571428571428555);
expect(remoteVideoDimensions.offsetY).eql(0);
});
});
});
});