Bug 1192738 - Drop the old aspect ratio calculation code now that Loop's media layout refactors have been completed. r=andreio

This commit is contained in:
Mark Banner 2015-08-11 14:17:25 +01:00
parent 947544ce87
commit 70ab21d643
2 changed files with 11 additions and 491 deletions

View File

@ -283,277 +283,9 @@ loop.shared.mixins = (function() {
/**
* Media setup mixin. Provides a common location for settings for the media
* elements and handling updates of the media containers.
* elements.
*/
var MediaSetupMixin = {
componentDidMount: function() {
this.resetDimensionsCache();
},
/**
* Resets the dimensions cache, e.g. for when the session is ended, and
* before a new session, so that we always ensure we see an update when a
* new session is started.
*/
resetDimensionsCache: function() {
this._videoDimensionsCache = {
local: {},
remote: {}
};
},
/**
* Whenever the dimensions change of a video stream, this function is called
* by `updateVideoDimensions` to store the new values and notifies the callee
* if the dimensions changed compared to the currently stored values.
*
* @param {String} which Type of video stream. May be 'local' or 'remote'
* @param {Object} newDimensions Object containing 'width' and 'height' properties
* @return {Boolean} `true` when the dimensions have changed,
* `false` if not
*/
_updateDimensionsCache: function(which, newDimensions) {
var cache = this._videoDimensionsCache[which];
var cacheKeys = Object.keys(cache);
var changed = false;
Object.keys(newDimensions).forEach(function(videoType) {
if (cacheKeys.indexOf(videoType) === -1) {
cache[videoType] = newDimensions[videoType];
cache[videoType].aspectRatio = this.getAspectRatio(cache[videoType]);
changed = true;
return;
}
if (cache[videoType].width !== newDimensions[videoType].width) {
cache[videoType].width = newDimensions[videoType].width;
changed = true;
}
if (cache[videoType].height !== newDimensions[videoType].height) {
cache[videoType].height = newDimensions[videoType].height;
changed = true;
}
if (changed) {
cache[videoType].aspectRatio = this.getAspectRatio(cache[videoType]);
}
}, this);
// Remove any streams that are no longer being published.
cacheKeys.forEach(function(videoType) {
if (!(videoType in newDimensions)) {
delete cache[videoType];
changed = true;
}
});
return changed;
},
/**
* Whenever the dimensions change of a video stream, this function is called
* to process these changes and possibly trigger an update to the video
* container elements.
*
* @param {Object} localVideoDimensions Object containing 'width' and 'height'
* properties grouped by stream name
* @param {Object} remoteVideoDimensions Object containing 'width' and 'height'
* properties grouped by stream name
*/
updateVideoDimensions: function(localVideoDimensions, remoteVideoDimensions) {
var localChanged = this._updateDimensionsCache("local", localVideoDimensions);
var remoteChanged = this._updateDimensionsCache("remote", remoteVideoDimensions);
if (localChanged || remoteChanged) {
this.updateVideoContainer();
}
},
/**
* Get the aspect ratio of a width/ height pair, which should be the dimensions
* of a stream. The returned object is an aspect ratio indexed by 1; the leading
* size has a value smaller than 1 and the slave size has a value of 1.
* this is exactly the same as notations like 4:3 and 16:9, which are merely
* human-readable forms of their fractional counterparts. 4:3 === 1:0.75 and
* 16:9 === 1:0.5625.
* So we're using the aspect ratios in their original form, because that's
* easier to do calculus with.
*
* Example:
* A stream with dimensions `{ width: 640, height: 480 }` yields an indexed
* aspect ratio of `{ width: 1, height: 0.75 }`. This means that the 'height'
* will determine the value of 'width' when the stream is stretched or shrunk
* to fit inside its container element at the maximum size.
*
* @param {Object} dimensions Object containing 'width' and 'height' properties
* @return {Object} Contains the indexed aspect ratio for 'width'
* and 'height' assigned to the corresponding
* properties.
*/
getAspectRatio: function(dimensions) {
if (dimensions.width === dimensions.height) {
return {width: 1, height: 1};
}
var denominator = Math.max(dimensions.width, dimensions.height);
return {
width: dimensions.width / denominator,
height: dimensions.height / denominator
};
},
/**
* Retrieve the dimensions of the active remote video stream. This assumes
* that if screens are being shared, the remote camera stream is hidden.
* Example output:
* {
* width: 680,
* height: 480,
* streamWidth: 640,
* streamHeight: 480,
* offsetX: 20,
* offsetY: 0
* }
*
* Note: This expects a class on the element that has the name "remote" or the
* same name as the possible video types (currently only "screen").
* Note: Once we support multiple remote video streams, this function will
* need to be updated.
*
* @param {string} videoType The video type according to the sdk, e.g. "camera" or
* "screen".
* @return {Object} contains the remote stream dimension properties of its
* container node, the stream itself and offset of the stream
* relative to its container node in pixels.
*/
getRemoteVideoDimensions: function(videoType) {
var remoteVideoDimensions;
if (videoType in this._videoDimensionsCache.remote) {
var node = this._getElement("." + (videoType === "camera" ? "remote" : videoType));
var width = node.offsetWidth;
// If the width > 0 then we record its real size by taking its aspect
// ratio in account. Due to the 'contain' fit-mode, the stream will be
// centered inside the video element.
// We'll need to deal with more than one remote video stream whenever
// that becomes something we need to support.
if (width) {
remoteVideoDimensions = {
width: width,
height: node.offsetHeight
};
var ratio = this._videoDimensionsCache.remote[videoType].aspectRatio;
// Leading axis is the side that has the smallest ratio.
var leadingAxis = Math.min(ratio.width, ratio.height) === ratio.width ?
"width" : "height";
var slaveAxis = leadingAxis === "height" ? "width" : "height";
// We need to work out if the leading axis of the video is full, by
// calculating the expected length of the leading axis based on the
// length of the slave axis and aspect ratio.
var leadingAxisFull = remoteVideoDimensions[slaveAxis] * ratio[leadingAxis] >
remoteVideoDimensions[leadingAxis];
if (leadingAxisFull) {
// If the leading axis is "full" then we need to adjust the slave axis.
var slaveAxisSize = remoteVideoDimensions[leadingAxis] / ratio[leadingAxis];
remoteVideoDimensions.streamWidth = leadingAxis === "width" ?
remoteVideoDimensions.width : slaveAxisSize;
remoteVideoDimensions.streamHeight = leadingAxis === "height" ?
remoteVideoDimensions.height : slaveAxisSize;
} else {
// If the leading axis is not "full" then we need to adjust it, based
// on the length of the leading axis.
var leadingAxisSize = remoteVideoDimensions[slaveAxis] * ratio[leadingAxis];
remoteVideoDimensions.streamWidth = leadingAxis === "height" ?
remoteVideoDimensions.width : leadingAxisSize;
remoteVideoDimensions.streamHeight = leadingAxis === "width" ?
remoteVideoDimensions.height : leadingAxisSize;
}
}
}
// Supply some sensible defaults for the remoteVideoDimensions if no remote
// stream is connected (yet).
if (!remoteVideoDimensions) {
node = this._getElement(".remote");
width = node.offsetWidth;
var height = node.offsetHeight;
remoteVideoDimensions = {
width: width,
height: height,
streamWidth: width,
streamHeight: height
};
}
// Calculate the size of each individual letter- or pillarbox for convenience.
remoteVideoDimensions.offsetX = remoteVideoDimensions.width -
remoteVideoDimensions.streamWidth;
if (remoteVideoDimensions.offsetX > 0) {
remoteVideoDimensions.offsetX /= 2;
}
remoteVideoDimensions.offsetY = remoteVideoDimensions.height -
remoteVideoDimensions.streamHeight;
if (remoteVideoDimensions.offsetY > 0) {
remoteVideoDimensions.offsetY /= 2;
}
return remoteVideoDimensions;
},
/**
* Used to update the video container whenever the orientation or size of the
* display area changes.
*
* Buffer the calls to this function to make sure we don't overflow the stack
* with update calls when many 'resize' event are fired, to prevent blocking
* the event loop.
*/
updateVideoContainer: function() {
if (this._bufferedUpdateVideo) {
rootObject.clearTimeout(this._bufferedUpdateVideo);
this._bufferedUpdateVideo = null;
}
this._bufferedUpdateVideo = rootObject.setTimeout(function() {
// Since this is being called from setTimeout, any exceptions thrown
// will propagate upwards into nothingness, unless we go out of our
// way to catch and log them explicitly, so...
try {
this._bufferedUpdateVideo = null;
var localStreamParent = this._getElement(".local .OT_publisher");
var remoteStreamParent = this._getElement(".remote .OT_subscriber");
var screenShareStreamParent = this._getElement(".screen .OT_subscriber");
if (localStreamParent) {
localStreamParent.style.width = "100%";
}
if (remoteStreamParent) {
remoteStreamParent.style.height = "100%";
}
if (screenShareStreamParent) {
screenShareStreamParent.style.height = "100%";
}
// Update the position and dimensions of the containers of local and
// remote video streams, if necessary. The consumer of this mixin
// should implement the actual updating mechanism.
Object.keys(this._videoDimensionsCache.local).forEach(
function (videoType) {
var ratio = this._videoDimensionsCache.local[videoType].aspectRatio;
if (videoType == "camera" && this.updateLocalCameraPosition) {
this.updateLocalCameraPosition(ratio);
}
}, this);
Object.keys(this._videoDimensionsCache.remote).forEach(
function (videoType) {
var ratio = this._videoDimensionsCache.remote[videoType].aspectRatio;
if (videoType == "camera" && this.updateRemoteCameraPosition) {
this.updateRemoteCameraPosition(ratio);
}
}, this);
} catch (ex) {
console.error("updateVideoContainer: _bufferedVideoUpdate exception:", ex);
}
}.bind(this), 0);
},
/**
* Returns the default configuration for publishing media on the sdk.
*
@ -576,15 +308,6 @@ loop.shared.mixins = (function() {
publishVideo: options.publishVideo,
showControls: false
};
},
/**
* Returns either the required DOMNode
*
* @param {String} className The name of the class to get the element for.
*/
_getElement: function(className) {
return this.getDOMNode().querySelector(className);
}
};

View File

@ -192,233 +192,30 @@ describe("loop.shared.mixins", function() {
});
describe("loop.shared.mixins.MediaSetupMixin", function() {
var view, TestComp, rootObject;
var localElement, remoteElement, screenShareElement;
var view;
beforeEach(function() {
TestComp = React.createClass({
var TestComp = React.createClass({
mixins: [loop.shared.mixins.MediaSetupMixin],
render: function() {
return React.DOM.div();
}
});
sandbox.useFakeTimers();
rootObject = {
events: {},
setTimeout: function(func, timeout) {
return setTimeout(func, timeout);
},
clearTimeout: function(timer) {
return clearTimeout(timer);
},
addEventListener: function(eventName, listener) {
this.events[eventName] = listener;
},
removeEventListener: function(eventName) {
delete this.events[eventName];
}
};
sharedMixins.setRootObject(rootObject);
view = TestUtils.renderIntoDocument(React.createElement(TestComp));
sandbox.stub(view, "getDOMNode").returns({
querySelector: function(classSelector) {
if (classSelector.indexOf("local") > -1) {
return localElement;
} else if (classSelector.indexOf("screen") > -1) {
return screenShareElement;
}
return remoteElement;
}
});
});
afterEach(function() {
localElement = null;
remoteElement = null;
screenShareElement = null;
});
describe("#getDefaultPublisherConfig", function() {
it("should provide a default publisher configuration", function() {
var defaultConfig = view.getDefaultPublisherConfig({publishVideo: true});
expect(defaultConfig.publishVideo).eql(true);
});
});
describe("#getRemoteVideoDimensions", function() {
var localVideoDimensions, remoteVideoDimensions;
beforeEach(function() {
localVideoDimensions = {
camera: {
width: 640,
height: 480
}
};
it("should throw if publishVideo is not given", function() {
expect(function() {
view.getDefaultPublisherConfig();
}).to.throw(/missing/);
});
it("should fetch the correct stream sizes for leading axis width and full",
function() {
remoteVideoDimensions = {
screen: {
width: 240,
height: 320
}
};
screenShareElement = {
offsetWidth: 480,
offsetHeight: 700
};
view.updateVideoDimensions(localVideoDimensions, remoteVideoDimensions);
var result = view.getRemoteVideoDimensions("screen");
expect(result.width).eql(screenShareElement.offsetWidth);
expect(result.height).eql(screenShareElement.offsetHeight);
expect(result.streamWidth).eql(screenShareElement.offsetWidth);
// The real height of the stream accounting for the aspect ratio.
expect(result.streamHeight).eql(640);
expect(result.offsetX).eql(0);
// The remote element height (700) minus the stream height (640) split in 2.
expect(result.offsetY).eql(30);
});
it("should fetch the correct stream sizes for leading axis width and not full",
function() {
remoteVideoDimensions = {
camera: {
width: 240,
height: 320
}
};
remoteElement = {
offsetWidth: 640,
offsetHeight: 480
};
view.updateVideoDimensions(localVideoDimensions, remoteVideoDimensions);
var result = view.getRemoteVideoDimensions("camera");
expect(result.width).eql(remoteElement.offsetWidth);
expect(result.height).eql(remoteElement.offsetHeight);
// Aspect ratio modified from the height.
expect(result.streamWidth).eql(360);
expect(result.streamHeight).eql(remoteElement.offsetHeight);
// The remote element width (640) minus the stream width (360) split in 2.
expect(result.offsetX).eql(140);
expect(result.offsetY).eql(0);
});
it("should fetch the correct stream sizes for leading axis height and full",
function() {
remoteVideoDimensions = {
screen: {
width: 320,
height: 240
}
};
screenShareElement = {
offsetWidth: 700,
offsetHeight: 480
};
view.updateVideoDimensions(localVideoDimensions, remoteVideoDimensions);
var result = view.getRemoteVideoDimensions("screen");
expect(result.width).eql(screenShareElement.offsetWidth);
expect(result.height).eql(screenShareElement.offsetHeight);
// The real width of the stream accounting for the aspect ratio.
expect(result.streamWidth).eql(640);
expect(result.streamHeight).eql(screenShareElement.offsetHeight);
// The remote element width (700) minus the stream width (640) split in 2.
expect(result.offsetX).eql(30);
expect(result.offsetY).eql(0);
});
it("should fetch the correct stream sizes for leading axis height and not full",
function() {
remoteVideoDimensions = {
camera: {
width: 320,
height: 240
}
};
remoteElement = {
offsetWidth: 480,
offsetHeight: 640
};
view.updateVideoDimensions(localVideoDimensions, remoteVideoDimensions);
var result = view.getRemoteVideoDimensions("camera");
expect(result.width).eql(remoteElement.offsetWidth);
expect(result.height).eql(remoteElement.offsetHeight);
expect(result.streamWidth).eql(remoteElement.offsetWidth);
// Aspect ratio modified from the width.
expect(result.streamHeight).eql(360);
expect(result.offsetX).eql(0);
// The remote element width (640) minus the stream width (360) split in 2.
expect(result.offsetY).eql(140);
});
});
describe("Events", function() {
describe("Video stream dimensions", function() {
var localVideoDimensions = {
camera: {
width: 640,
height: 480
}
};
var remoteVideoDimensions = {
camera: {
width: 420,
height: 138
}
};
it("should register video dimension updates correctly", function() {
view.updateVideoDimensions(localVideoDimensions, remoteVideoDimensions);
expect(view._videoDimensionsCache.local.camera.width)
.eql(localVideoDimensions.camera.width);
expect(view._videoDimensionsCache.local.camera.height)
.eql(localVideoDimensions.camera.height);
expect(view._videoDimensionsCache.local.camera.aspectRatio.width).eql(1);
expect(view._videoDimensionsCache.local.camera.aspectRatio.height).eql(0.75);
expect(view._videoDimensionsCache.remote.camera.width)
.eql(remoteVideoDimensions.camera.width);
expect(view._videoDimensionsCache.remote.camera.height)
.eql(remoteVideoDimensions.camera.height);
expect(view._videoDimensionsCache.remote.camera.aspectRatio.width).eql(1);
expect(view._videoDimensionsCache.remote.camera.aspectRatio.height)
.eql(0.32857142857142857);
});
it("should unregister video dimension updates correctly", function() {
view.updateVideoDimensions(localVideoDimensions, {});
expect("camera" in view._videoDimensionsCache.local).eql(true);
expect("camera" in view._videoDimensionsCache.remote).eql(false);
});
it("should not populate the cache on another component instance", function() {
view.updateVideoDimensions(localVideoDimensions, remoteVideoDimensions);
var view2 =
TestUtils.renderIntoDocument(React.createElement(TestComp));
expect(view2._videoDimensionsCache.local).to.be.empty;
expect(view2._videoDimensionsCache.remote).to.be.empty;
});
it("should return a set of defaults based on the options", function() {
expect(view.getDefaultPublisherConfig({
publishVideo: true
}).publishVideo).eql(true);
});
});
});