merge mozilla-inbound to mozilla-central. r=merge a=merge

MozReview-Commit-ID: 9adaYBJ4tlo
This commit is contained in:
Sebastian Hengst 2017-10-13 23:37:41 +02:00
commit 373bdd8602
101 changed files with 2453 additions and 1744 deletions

View File

@ -1,26 +0,0 @@
[
{
"digest": "2e56a3cf84764b8e63720e5f961cff7ba8ba5cf2f353dac55c69486489bcd89f53a757e09469a07700b80cd09f09666c2db4ce375b67060ac3be967714597231",
"unpack": true,
"algorithm": "sha512",
"filename": "sixgill.tar.xz",
"size": 2629600,
"hg_id": "221d0d2eead9"
},
{
"digest": "3915f8ec396c56a8a92e6f9695b70f09ce9d1582359d1258e37e3fd43a143bc974410e4cfc27f500e095f34a8956206e0ebf799b7287f0f38def0d5e34ed71c9",
"unpack": true,
"setup": "setup.sh",
"algorithm": "sha512",
"filename": "gtk3.tar.xz",
"size": 12072532
},
{
"version": "rustc 1.19.0 (0ade33941 2017-07-17) repack",
"size": 161014632,
"digest": "65bebcf94fc66ea618c58c9ac33f0f206095ecfe3931cc6edb301f4b40480e3b44b0f39aea7a25fed8eef47e63523e7e670082947a3662cdc04c68ebbe5dfc89",
"algorithm": "sha512",
"filename": "rustc.tar.xz",
"unpack": true
}
]

View File

@ -1,5 +1,5 @@
This is the PDF.js project output, https://github.com/mozilla/pdf.js
Current extension version is: 1.9.630
Current extension version is: 1.9.640
Taken from upstream commit: ec469673
Taken from upstream commit: 853db85b

View File

@ -1990,7 +1990,7 @@ function _fetchDocument(worker, source, pdfDataRangeTransport, docId) {
if (worker.destroyed) {
return Promise.reject(new Error('Worker was destroyed'));
}
let apiVersion = '1.9.630';
let apiVersion = '1.9.640';
source.disableAutoFetch = (0, _dom_utils.getDefaultSetting)('disableAutoFetch');
source.disableStream = (0, _dom_utils.getDefaultSetting)('disableStream');
source.chunkedViewerLoading = !!pdfDataRangeTransport;
@ -2884,7 +2884,7 @@ var WorkerTransport = function WorkerTransportClosure() {
};
}
var font = new _font_loader.FontFaceObject(exportedData, {
isEvalSuported: (0, _dom_utils.getDefaultSetting)('isEvalSupported'),
isEvalSupported: (0, _dom_utils.getDefaultSetting)('isEvalSupported'),
disableFontFace: (0, _dom_utils.getDefaultSetting)('disableFontFace'),
fontRegistry
});
@ -3311,8 +3311,8 @@ var _UnsupportedManager = function UnsupportedManagerClosure() {
}();
var version, build;
{
exports.version = version = '1.9.630';
exports.build = build = 'ec469673';
exports.version = version = '1.9.640';
exports.build = build = '853db85b';
}
exports.getDocument = getDocument;
exports.LoopbackPort = LoopbackPort;
@ -5051,8 +5051,8 @@ exports.SVGGraphics = SVGGraphics;
"use strict";
var pdfjsVersion = '1.9.630';
var pdfjsBuild = 'ec469673';
var pdfjsVersion = '1.9.640';
var pdfjsBuild = '853db85b';
var pdfjsSharedUtil = __w_pdfjs_require__(0);
var pdfjsDisplayGlobal = __w_pdfjs_require__(13);
var pdfjsDisplayAPI = __w_pdfjs_require__(3);
@ -8177,8 +8177,8 @@ if (!_global_scope2.default.PDFJS) {
}
var PDFJS = _global_scope2.default.PDFJS;
{
PDFJS.version = '1.9.630';
PDFJS.build = 'ec469673';
PDFJS.version = '1.9.640';
PDFJS.build = '853db85b';
}
PDFJS.pdfBug = false;
if (PDFJS.verbosity !== undefined) {

View File

@ -16419,42 +16419,52 @@ var PartialEvaluator = function PartialEvaluatorClosure() {
switch (fn | 0) {
case _util.OPS.paintXObject:
var name = args[0].name;
if (!name) {
(0, _util.warn)('XObject must be referred to by name.');
continue;
}
if (imageCache[name] !== undefined) {
if (name && imageCache[name] !== undefined) {
operatorList.addOp(imageCache[name].fn, imageCache[name].args);
args = null;
continue;
}
var xobj = xobjs.get(name);
if (xobj) {
next(new Promise(function (resolveXObject, rejectXObject) {
if (!name) {
throw new _util.FormatError('XObject must be referred to by name.');
}
let xobj = xobjs.get(name);
if (!xobj) {
operatorList.addOp(fn, args);
resolveXObject();
return;
}
if (!(0, _primitives.isStream)(xobj)) {
throw new _util.FormatError('XObject should be a stream');
}
var type = xobj.dict.get('Subtype');
let type = xobj.dict.get('Subtype');
if (!(0, _primitives.isName)(type)) {
throw new _util.FormatError('XObject should have a Name subtype');
}
if (type.name === 'Form') {
stateManager.save();
next(self.buildFormXObject(resources, xobj, null, operatorList, task, stateManager.state.clone()).then(function () {
self.buildFormXObject(resources, xobj, null, operatorList, task, stateManager.state.clone()).then(function () {
stateManager.restore();
}));
resolveXObject();
}, rejectXObject);
return;
} else if (type.name === 'Image') {
self.buildPaintImageXObject(resources, xobj, false, operatorList, name, imageCache);
args = null;
continue;
} else if (type.name === 'PS') {
(0, _util.info)('Ignored XObject subtype PS');
continue;
} else {
throw new _util.FormatError(`Unhandled XObject subtype ${type.name}`);
}
}
break;
resolveXObject();
}).catch(function (reason) {
if (self.options.ignoreErrors) {
self.handler.send('UnsupportedFeature', { featureId: _util.UNSUPPORTED_FEATURES.unknown });
(0, _util.warn)(`getOperatorList - ignoring XObject: "${reason}".`);
return;
}
throw reason;
}));
return;
case _util.OPS.setFont:
var fontSize = args[1];
next(self.handleSetFont(resources, args, null, operatorList, task, stateManager.state).then(function (loadedName) {
@ -17003,57 +17013,74 @@ var PartialEvaluator = function PartialEvaluatorClosure() {
xobjs = resources.get('XObject') || _primitives.Dict.empty;
}
var name = args[0].name;
if (name in skipEmptyXObjs) {
if (name && skipEmptyXObjs[name] !== undefined) {
break;
}
var xobj = xobjs.get(name);
if (!xobj) {
break;
}
if (!(0, _primitives.isStream)(xobj)) {
throw new _util.FormatError('XObject should be a stream');
}
var type = xobj.dict.get('Subtype');
if (!(0, _primitives.isName)(type)) {
throw new _util.FormatError('XObject should have a Name subtype');
}
if (type.name !== 'Form') {
skipEmptyXObjs[name] = true;
break;
}
var currentState = stateManager.state.clone();
var xObjStateManager = new StateManager(currentState);
var matrix = xobj.dict.getArray('Matrix');
if (Array.isArray(matrix) && matrix.length === 6) {
xObjStateManager.transform(matrix);
}
enqueueChunk();
let sinkWrapper = {
enqueueInvoked: false,
enqueue(chunk, size) {
this.enqueueInvoked = true;
sink.enqueue(chunk, size);
},
get desiredSize() {
return sink.desiredSize;
},
get ready() {
return sink.ready;
next(new Promise(function (resolveXObject, rejectXObject) {
if (!name) {
throw new _util.FormatError('XObject must be referred to by name.');
}
};
next(self.getTextContent({
stream: xobj,
task,
resources: xobj.dict.get('Resources') || resources,
stateManager: xObjStateManager,
normalizeWhitespace,
combineTextItems,
sink: sinkWrapper,
seenStyles
}).then(function () {
if (!sinkWrapper.enqueueInvoked) {
let xobj = xobjs.get(name);
if (!xobj) {
resolveXObject();
return;
}
if (!(0, _primitives.isStream)(xobj)) {
throw new _util.FormatError('XObject should be a stream');
}
let type = xobj.dict.get('Subtype');
if (!(0, _primitives.isName)(type)) {
throw new _util.FormatError('XObject should have a Name subtype');
}
if (type.name !== 'Form') {
skipEmptyXObjs[name] = true;
resolveXObject();
return;
}
let currentState = stateManager.state.clone();
let xObjStateManager = new StateManager(currentState);
let matrix = xobj.dict.getArray('Matrix');
if (Array.isArray(matrix) && matrix.length === 6) {
xObjStateManager.transform(matrix);
}
enqueueChunk();
let sinkWrapper = {
enqueueInvoked: false,
enqueue(chunk, size) {
this.enqueueInvoked = true;
sink.enqueue(chunk, size);
},
get desiredSize() {
return sink.desiredSize;
},
get ready() {
return sink.ready;
}
};
self.getTextContent({
stream: xobj,
task,
resources: xobj.dict.get('Resources') || resources,
stateManager: xObjStateManager,
normalizeWhitespace,
combineTextItems,
sink: sinkWrapper,
seenStyles
}).then(function () {
if (!sinkWrapper.enqueueInvoked) {
skipEmptyXObjs[name] = true;
}
resolveXObject();
}, rejectXObject);
}).catch(function (reason) {
if (reason instanceof _util.AbortException) {
return;
}
if (self.options.ignoreErrors) {
(0, _util.warn)(`getTextContent - ignoring XObject: "${reason}".`);
return;
}
throw reason;
}));
return;
case _util.OPS.setGState:
@ -23823,8 +23850,8 @@ exports.PostScriptCompiler = PostScriptCompiler;
"use strict";
var pdfjsVersion = '1.9.630';
var pdfjsBuild = 'ec469673';
var pdfjsVersion = '1.9.640';
var pdfjsBuild = '853db85b';
var pdfjsCoreWorker = __w_pdfjs_require__(18);
exports.WorkerMessageHandler = pdfjsCoreWorker.WorkerMessageHandler;
@ -24019,7 +24046,7 @@ var WorkerMessageHandler = {
var cancelXHRs = null;
var WorkerTasks = [];
let apiVersion = docParams.apiVersion;
let workerVersion = '1.9.630';
let workerVersion = '1.9.640';
if (apiVersion !== null && apiVersion !== workerVersion) {
throw new Error(`The API version "${apiVersion}" does not match ` + `the Worker version "${workerVersion}".`);
}

View File

@ -1503,10 +1503,17 @@ let PDFViewerApplication = {
return;
}
pdfDocument.getJavaScript().then(javaScript => {
if (javaScript.length) {
if (javaScript.length === 0) {
return;
}
javaScript.some(js => {
if (!js) {
return false;
}
console.warn('Warning: JavaScript is not supported');
this.fallback(_pdfjsLib.UNSUPPORTED_FEATURES.javaScript);
}
return true;
});
let regex = /\bprint\s*\(/;
for (let i = 0, ii = javaScript.length; i < ii; i++) {
let js = javaScript[i];
@ -4381,7 +4388,7 @@ exports.PDFFindBar = PDFFindBar;
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.isDestsEqual = exports.PDFHistory = undefined;
exports.isDestArraysEqual = exports.isDestHashesEqual = exports.PDFHistory = undefined;
var _ui_utils = __webpack_require__(0);
@ -4484,7 +4491,7 @@ class PDFHistory {
return;
}
let forceReplace = false;
if (this._destination && (this._destination.hash === hash || isDestsEqual(this._destination.dest, explicitDest))) {
if (this._destination && (isDestHashesEqual(this._destination.hash, hash) || isDestArraysEqual(this._destination.dest, explicitDest))) {
if (this._destination.page) {
return;
}
@ -4691,7 +4698,20 @@ class PDFHistory {
window.addEventListener('pagehide', _boundEvents.pageHide);
}
}
function isDestsEqual(firstDest, secondDest) {
function isDestHashesEqual(destHash, pushHash) {
if (typeof destHash !== 'string' || typeof pushHash !== 'string') {
return false;
}
if (destHash === pushHash) {
return true;
}
let { nameddest } = (0, _ui_utils.parseQueryString)(destHash);
if (nameddest === pushHash) {
return true;
}
return false;
}
function isDestArraysEqual(firstDest, secondDest) {
function isEntryEqual(first, second) {
if (typeof first !== typeof second) {
return false;
@ -4726,7 +4746,8 @@ function isDestsEqual(firstDest, secondDest) {
return true;
}
exports.PDFHistory = PDFHistory;
exports.isDestsEqual = isDestsEqual;
exports.isDestHashesEqual = isDestHashesEqual;
exports.isDestArraysEqual = isDestArraysEqual;
/***/ }),
/* 17 */
@ -6443,9 +6464,13 @@ class AnnotationLayerBuilder {
this.renderInteractiveForms = renderInteractiveForms;
this.l10n = l10n;
this.div = null;
this._cancelled = false;
}
render(viewport, intent = 'display') {
this.pdfPage.getAnnotations({ intent }).then(annotations => {
if (this._cancelled) {
return;
}
let parameters = {
viewport: viewport.clone({ dontFlip: true }),
div: this.div,
@ -6470,6 +6495,9 @@ class AnnotationLayerBuilder {
}
});
}
cancel() {
this._cancelled = true;
}
hide() {
if (!this.div) {
return;
@ -6578,7 +6606,7 @@ class PDFPageView {
this.zoomLayer = null;
}
reset(keepZoomLayer = false, keepAnnotations = false) {
this.cancelRendering();
this.cancelRendering(keepAnnotations);
let div = this.div;
div.style.width = Math.floor(this.viewport.width) + 'px';
div.style.height = Math.floor(this.viewport.height) + 'px';
@ -6595,7 +6623,8 @@ class PDFPageView {
div.removeAttribute('data-loaded');
if (currentAnnotationNode) {
this.annotationLayer.hide();
} else {
} else if (this.annotationLayer) {
this.annotationLayer.cancel();
this.annotationLayer = null;
}
if (!currentZoomLayerNode) {
@ -6661,7 +6690,7 @@ class PDFPageView {
}
this.reset(true, true);
}
cancelRendering() {
cancelRendering(keepAnnotations = false) {
if (this.paintTask) {
this.paintTask.cancel();
this.paintTask = null;
@ -6672,6 +6701,10 @@ class PDFPageView {
this.textLayer.cancel();
this.textLayer = null;
}
if (!keepAnnotations && this.annotationLayer) {
this.annotationLayer.cancel();
this.annotationLayer = null;
}
}
cssTransform(target, redrawAnnotations = false) {
let width = this.viewport.width;

View File

@ -1,11 +1,11 @@
This is the debugger.html project output.
See https://github.com/devtools-html/debugger.html
Taken from upstream commit: 6d850d386e2dfef08077602191808fd5c3d8b1ee
Taken from upstream commit: 455e7e3f2de29113e37de8c03052de110f5fb106
Packages:
- babel-plugin-transform-es2015-modules-commonjs @6.26.0
- babel-preset-react @6.24.1
- react @15.6.2
- react-dom @15.6.2
- webpack @3.6.0
- webpack @3.7.1

View File

@ -36,7 +36,7 @@
padding-bottom: var(--base-spacing);
}
.landing-page .panel header input[type=search] {
.landing-page .panel header input[type="search"] {
flex: 1;
background-color: var(--theme-tab-toolbar-background);
color: var(--theme-comment);
@ -115,11 +115,9 @@
color: var(--theme-comment);
font-size: calc(var(--ui-element-font-size) / 1);
margin: var(--base-spacing) auto;
line-height: 1.4em;
}
.landing-page .panel .under-construction .under-construction-message {
max-width: 350px;
line-height: 1.4em;
}
.landing-page .panel .under-construction .github-link {
@ -255,24 +253,27 @@
padding-left: var(--base-spacing);
}
.landing-page .sidebar .title-wrapper .launchpad-container .launchpad-container-icon {
.landing-page
.sidebar
.title-wrapper
.launchpad-container
.launchpad-container-icon {
display: inline-block;
}
.landing-page .sidebar .title-wrapper .launchpad-container svg {
width: 24px;
height: 24px;
.landing-page .sidebar .title-wrapper .launchpad-container .rocket svg {
width: 18px;
height: 18px;
}
.landing-page .sidebar .title-wrapper .launchpad-container svg path {
width: 24px;
height: 24px;
fill: var(--theme-body-color);
}
.landing-page .sidebar .title-wrapper .launchpad-container .launchpad-container-title {
.landing-page
.sidebar
.title-wrapper
.launchpad-container
.launchpad-container-title {
display: inline;
padding-left: 3px;
font-weight: normal;
}
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
@ -501,6 +502,12 @@ body {
justify-content: space-between;
border: 1px solid transparent;
}
@media (max-width: 640px) {
.shortcuts-section {
width: 100%;
}
}
:root {
--arrow-width: 10px;
}
@ -942,11 +949,18 @@ menuseparator {
margin-inline-end: 5px;
}
.arrow svg {
fill: var(--theme-splitter-color);
img.arrow {
mask: url("chrome://devtools/skin/images/debugger/arrow.svg");
margin-top: 3px;
transition: transform 0.25s ease;
width: 10px;
width: 9px;
height: 9px;
padding-top: 9px;
background: var(--theme-splitter-color);
mask-size: 100%;
display: inline-block;
margin-bottom: 1px;
transform: rotate(-90deg);
transition: transform 0.18s ease;
}
html:not([dir="rtl"]) .arrow svg {
@ -1441,12 +1455,20 @@ html[dir="rtl"] .managed-tree .tree .node > div {
padding: 0px 0px 0px 3px;
}
.sources-list .tree .arrow svg {
margin-top: 0px;
.sources-list .tree img.arrow {
margin-right: 5px;
}
.theme-dark .sources-list .tree .node:not(.focused) svg {
fill: var(--theme-content-color3);
.sources-list .tree .focused img.arrow {
background-color: white;
}
.sources-list .tree img.arrow.expanded {
transform: rotate(0deg);
}
.theme-dark .sources-list .tree .node:not(.focused) img.arrow {
background: var(--theme-content-color3);
}
.theme-dark .source-list .tree .node.focused {
@ -1468,12 +1490,12 @@ html[dir="rtl"] .managed-tree .tree .node > div {
}
.no-sources-message {
width: 100%;
font-style: italic;
text-align: center;
padding: 0.5em;
font-size: 12px;
color: var(--theme-comment-alt);
font-weight: lighter;
padding-top: 5px;
flex-grow: 1;
display: flex;
user-select: none;
justify-content: center;
align-items: center;
}

File diff suppressed because one or more lines are too long

View File

@ -28726,55 +28726,53 @@ module.exports = isEmpty;
/* 1021 */,
/* 1022 */,
/* 1023 */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(__webpack_exports__, "__esModule", { value: true });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "extractScriptTags", function() { return extractScriptTags; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "generateWhitespace", function() { return generateWhitespace; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getCandidateScriptLocations", function() { return getCandidateScriptLocations; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "parseScript", function() { return parseScript; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "parseScripts", function() { return parseScripts; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "parseScriptTags", function() { return parseScriptTags; });
const babylon = __webpack_require__(435);
const types = __webpack_require__(493);
const startScript = /<script[^>]*>/im;
const endScript = /<\/script\s*>/im;
Object.defineProperty(exports, "__esModule", {
value: true
});
function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } }
var babylon = __webpack_require__(435);
var types = __webpack_require__(493);
var startScript = /<script[^>]*>/im;
var endScript = /<\/script\s*>/im;
// https://stackoverflow.com/questions/5034781/js-regex-to-split-by-line#comment5633979_5035005
const newLines = /\r\n|[\n\v\f\r\x85\u2028\u2029]/;
var newLines = /\r\n|[\n\v\f\r\x85\u2028\u2029]/;
function getCandidateScriptLocations(source, index) {
const i = index || 0;
const str = source.substring(i);
var i = index || 0;
var str = source.substring(i);
const startMatch = startScript.exec(str);
var startMatch = startScript.exec(str);
if (startMatch) {
const startsAt = startMatch.index + startMatch[0].length;
const afterStart = str.substring(startsAt);
const endMatch = endScript.exec(afterStart);
var startsAt = startMatch.index + startMatch[0].length;
var afterStart = str.substring(startsAt);
var endMatch = endScript.exec(afterStart);
if (endMatch) {
const locLength = endMatch.index;
const locIndex = i + startsAt;
var locLength = endMatch.index;
var locIndex = i + startsAt;
return [
adjustForLineAndColumn(source, {
index: locIndex,
length: locLength,
source: source.substring(locIndex, locIndex + locLength)
}),
...getCandidateScriptLocations(
source,
locIndex + locLength + endMatch[0].length
)
];
return [adjustForLineAndColumn(source, {
index: locIndex,
length: locLength,
source: source.substring(locIndex, locIndex + locLength)
})].concat(_toConsumableArray(getCandidateScriptLocations(source, locIndex + locLength + endMatch[0].length)));
}
}
return [];
}
function parseScript({source, line}) {
function parseScript(_ref) {
var source = _ref.source,
line = _ref.line;
// remove empty or only whitespace scripts
if (source.length === 0 || /^\s+$/.test(source)) {
return null;
@ -28790,7 +28788,9 @@ function parseScript({source, line}) {
}
}
function parseScripts(locations, parser = parseScript) {
function parseScripts(locations) {
var parser = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : parseScript;
return locations.map(parser);
}
@ -28799,35 +28799,31 @@ function generateWhitespace(length) {
}
function calcLineAndColumn(source, index) {
const lines = source
.substring(0, index)
.split(newLines)
const line = lines.length;
const column = lines.pop().length + 1;
var lines = source.substring(0, index).split(newLines);
var line = lines.length;
var column = lines.pop().length + 1;
return {
column,
line
column: column,
line: line
};
}
function adjustForLineAndColumn(fullSource, location) {
const {column, line} = calcLineAndColumn(fullSource, location.index);
var _calcLineAndColumn = calcLineAndColumn(fullSource, location.index),
column = _calcLineAndColumn.column,
line = _calcLineAndColumn.line;
return Object.assign({}, location, {
line,
column,
line: line,
column: column,
// prepend whitespace for scripts that do not start on the first column
source: generateWhitespace(column) + location.source
});
}
function parseScriptTags(source, parser) {
const scripts = parseScripts(
getCandidateScriptLocations(source),
parser
).filter(
types.isFile
).reduce((main, script) => {
var scripts = parseScripts(getCandidateScriptLocations(source), parser).filter(types.isFile).reduce(function (main, script) {
return {
statements: main.statements.concat(script.program.body),
comments: main.comments.concat(script.comments),
@ -28839,14 +28835,10 @@ function parseScriptTags(source, parser) {
tokens: []
});
const program = types.program(scripts.statements);
const file = types.file(
program,
scripts.comments,
scripts.tokens
);
var program = types.program(scripts.statements);
var file = types.file(program, scripts.comments, scripts.tokens);
const end = calcLineAndColumn(source, source.length);
var end = calcLineAndColumn(source, source.length);
file.start = program.start = 0;
file.end = program.end = source.length;
file.loc = program.loc = {
@ -28854,32 +28846,31 @@ function parseScriptTags(source, parser) {
line: 1,
column: 0
},
end
}
end: end
};
return file;
}
function extractScriptTags(source) {
return parseScripts(
getCandidateScriptLocations(source),
loc => {
const ast = parseScript(loc);
return parseScripts(getCandidateScriptLocations(source), function (loc) {
var ast = parseScript(loc);
if (ast) {
return loc;
}
return null;
if (ast) {
return loc;
}
).filter(
types.isFile
);
return null;
}).filter(types.isFile);
}
/* harmony default export */ __webpack_exports__["default"] = (parseScriptTags);
exports.default = parseScriptTags;
exports.extractScriptTags = extractScriptTags;
exports.generateWhitespace = generateWhitespace;
exports.getCandidateScriptLocations = getCandidateScriptLocations;
exports.parseScript = parseScript;
exports.parseScripts = parseScripts;
exports.parseScriptTags = parseScriptTags;
/***/ }),
/* 1024 */,

View File

@ -1,12 +0,0 @@
/* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/publicdomain/zero/1.0/ */
const {
setupTestRunner,
returnvalues
} = require("devtools/client/debugger/new/integration-tests");
add_task(function*() {
setupTestRunner(this);
yield returnvalues(this);
});

View File

@ -54,7 +54,7 @@ FontInspector.prototype = {
id: "fontinspector",
key: "fontinspector",
store: this.store,
title: INSPECTOR_L10N.getStr("inspector.sidebar.fontInspectorTitle")
title: INSPECTOR_L10N.getStr("inspector.sidebar.fontInspectorTitle"),
}, app);
// Expose the provider to let inspector.js use it in setupSidebar.

View File

@ -27,8 +27,6 @@ const CSS_GRID_COUNT_HISTOGRAM_ID = "DEVTOOLS_NUMBER_OF_CSS_GRIDS_IN_A_PAGE";
const SHOW_GRID_AREAS = "devtools.gridinspector.showGridAreas";
const SHOW_GRID_LINE_NUMBERS = "devtools.gridinspector.showGridLineNumbers";
const SHOW_INFINITE_LINES_PREF = "devtools.gridinspector.showInfiniteLines";
// @remove after release 56 (See Bug 1355747)
const PROMOTE_COUNT_PREF = "devtools.promote.layoutview";
// Default grid colors.
const GRID_COLORS = [
@ -569,9 +567,6 @@ GridInspector.prototype = {
return;
}
// @remove after release 56 (See Bug 1355747)
Services.prefs.setIntPref(PROMOTE_COUNT_PREF, 0);
this.inspector.reflowTracker.trackReflows(this, this.onReflow);
this.updateGridPanel();
},

View File

@ -16,10 +16,8 @@ Services.scriptloader.loadSubScript(
"chrome://mochitests/content/browser/devtools/client/framework/test/shared-redux-head.js",
this);
Services.prefs.setBoolPref("devtools.promote.layoutview.showPromoteBar", false);
Services.prefs.setIntPref("devtools.toolbox.footer.height", 350);
registerCleanupFunction(() => {
Services.prefs.clearUserPref("devtools.promote.layoutview.showPromoteBar");
Services.prefs.clearUserPref("devtools.toolbox.footer.height");
});

View File

@ -18,7 +18,6 @@ const BoxModelTypes = require("devtools/client/inspector/boxmodel/types");
const GridTypes = require("devtools/client/inspector/grids/types");
const Accordion = createFactory(require("./Accordion"));
const LayoutPromoteBar = createFactory(require("./LayoutPromoteBar"));
const BOXMODEL_STRINGS_URI = "devtools/client/locales/boxmodel.properties";
const BOXMODEL_L10N = new LocalizationHelper(BOXMODEL_STRINGS_URI);
@ -41,7 +40,6 @@ const App = createClass({
setSelectedNode: PropTypes.func.isRequired,
showBoxModelProperties: PropTypes.bool.isRequired,
onHideBoxModelHighlighter: PropTypes.func.isRequired,
onPromoteLearnMoreClick: PropTypes.func.isRequired,
onSetGridOverlayColor: PropTypes.func.isRequired,
onShowBoxModelEditor: PropTypes.func.isRequired,
onShowBoxModelHighlighter: PropTypes.func.isRequired,
@ -54,15 +52,10 @@ const App = createClass({
mixins: [ addons.PureRenderMixin ],
render() {
let { onPromoteLearnMoreClick } = this.props;
return dom.div(
{
id: "layout-container",
},
LayoutPromoteBar({
onPromoteLearnMoreClick,
}),
Accordion({
items: [
{

View File

@ -1,76 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
/**
* !!!! TO BE REMOVED AFTER RELEASE 56 !!!!
* !!!! !!!!
* !!!! This file is a temporary panel that should only be used for release 56 to !!!!
* !!!! promote the new layout panel. After release 56, it should be removed. !!!!
* !!!! See bug 1355747. !!!!
*/
const Services = require("Services");
const { addons, createClass, DOM: dom, PropTypes } =
require("devtools/client/shared/vendor/react");
const { LocalizationHelper } = require("devtools/shared/l10n");
const LAYOUT_STRINGS_URI = "devtools/client/locales/layout.properties";
const LAYOUT_L10N = new LocalizationHelper(LAYOUT_STRINGS_URI);
const SHOW_PROMOTE_BAR_PREF = "devtools.promote.layoutview.showPromoteBar";
module.exports = createClass({
displayName: "LayoutPromoteBar",
propTypes: {
onPromoteLearnMoreClick: PropTypes.func.isRequired,
},
mixins: [ addons.PureRenderMixin ],
getInitialState() {
return {
showPromoteBar: Services.prefs.getBoolPref(SHOW_PROMOTE_BAR_PREF)
};
},
onPromoteCloseButtonClick() {
Services.prefs.setBoolPref(SHOW_PROMOTE_BAR_PREF, false);
this.setState({ showPromoteBar: false });
},
render() {
let { onPromoteLearnMoreClick } = this.props;
let { showPromoteBar } = this.state;
return showPromoteBar ?
dom.div({ className: "layout-promote-bar" },
dom.span({ className: "layout-promote-info-icon" }),
dom.div({ className: "layout-promote-message" },
LAYOUT_L10N.getStr("layout.promoteMessage"),
dom.a(
{
className: "layout-promote-learn-more-link theme-link",
href: "#",
onClick: onPromoteLearnMoreClick,
},
LAYOUT_L10N.getStr("layout.learnMore")
)
),
dom.button(
{
className: "layout-promote-close-button devtools-button",
onClick: this.onPromoteCloseButtonClick,
}
)
)
:
null;
},
});

View File

@ -8,5 +8,4 @@ DevToolsModules(
'Accordion.css',
'Accordion.js',
'App.js',
'LayoutPromoteBar.js',
)

View File

@ -4,8 +4,6 @@
"use strict";
const Services = require("Services");
const { createFactory, createElement } = require("devtools/client/shared/vendor/react");
const { Provider } = require("devtools/client/shared/vendor/react-redux");
@ -15,12 +13,6 @@ const { LocalizationHelper } = require("devtools/shared/l10n");
const INSPECTOR_L10N =
new LocalizationHelper("devtools/client/locales/inspector.properties");
// @remove after release 56 (See Bug 1355747)
const PROMOTE_COUNT_PREF = "devtools.promote.layoutview";
// @remove after release 56 (See Bug 1355747)
const GRID_LINK = "https://www.mozilla.org/en-US/developer/css-grid/?utm_source=gridtooltip&utm_medium=devtools&utm_campaign=cssgrid_layout";
loader.lazyRequireGetter(this, "GridInspector", "devtools/client/inspector/grids/grid-inspector");
function LayoutView(inspector, window) {
@ -28,8 +20,6 @@ function LayoutView(inspector, window) {
this.inspector = inspector;
this.store = inspector.store;
this.onPromoteLearnMoreClick = this.onPromoteLearnMoreClick.bind(this);
this.init();
}
@ -65,10 +55,6 @@ LayoutView.prototype = {
onToggleShowInfiniteLines,
} = this.gridInspector.getComponentProps();
let {
onPromoteLearnMoreClick,
} = this;
let app = App({
getSwatchColorPickerTooltip,
setSelectedNode,
@ -78,7 +64,6 @@ LayoutView.prototype = {
*/
showBoxModelProperties: true,
onHideBoxModelHighlighter,
onPromoteLearnMoreClick,
onSetGridOverlayColor,
onShowBoxModelEditor,
onShowBoxModelHighlighter,
@ -98,10 +83,6 @@ LayoutView.prototype = {
key: "layoutview",
store: this.store,
title: INSPECTOR_L10N.getStr("inspector.sidebar.layoutViewTitle2"),
// @remove after release 56 (See Bug 1355747)
badge: Services.prefs.getIntPref(PROMOTE_COUNT_PREF) > 0 ?
INSPECTOR_L10N.getStr("inspector.sidebar.newBadge") : null,
showBadge: () => Services.prefs.getIntPref(PROMOTE_COUNT_PREF) > 0,
}, app);
// Expose the provider to let inspector.js use it in setupSidebar.
@ -119,11 +100,6 @@ LayoutView.prototype = {
this.store = null;
},
onPromoteLearnMoreClick() {
let browserWin = this.inspector.target.tab.ownerDocument.defaultView;
browserWin.openUILinkIn(GRID_LINK, "current");
}
};
module.exports = LayoutView;

View File

@ -296,3 +296,6 @@ devtools.jar:
skin/images/firebug/command-measure.svg (themes/images/firebug/command-measure.svg)
skin/images/firebug/command-rulers.svg (themes/images/firebug/command-rulers.svg)
skin/images/firebug/command-noautohide.svg (themes/images/firebug/command-noautohide.svg)
# Debugger
skin/images/debugger/arrow.svg (themes/images/debugger/arrow.svg)

View File

@ -643,6 +643,11 @@ watchExpressionsSeparatorLabel2=\u0020→
# and its real name (if available).
functionSearchSeparatorLabel=
# LOCALIZATION NOTE(gotoLineModal.placeholder): The placeholder
# text displayed when the user searches for specific lines in a file
gotoLineModal.placeholder=Go to line…
gotoLineModal.key=CmdOrCtrl+Shift+;
# LOCALIZATION NOTE(symbolSearch.search.functionsPlaceholder): The placeholder
# text displayed when the user searches for functions in a file
symbolSearch.search.functionsPlaceholder=Search functions…

View File

@ -44,11 +44,3 @@ layout.overlayGrid=Overlay Grid
# LOCALIZATION NOTE (layout.rowColumnPositions): The row and column position of a grid
# cell shown in the grid cell infobar when hovering over the CSS grid outline.
layout.rowColumnPositions=Row %S / Column %S
# LOCALIZATION NOTE (layout.promoteMessage): Text displayed in the promote bar for the
# layout panel.
layout.promoteMessage=Explore CSS Grids with the latest CSS Grid Inspector.
# LOCALIZATION NOTE (layout.learnMore): Text for the link displayed in the promote bar
# for the layout panel.
layout.learnMore=Learn more…

View File

@ -12,10 +12,6 @@ pref("devtools.debugger.remote-timeout", 20000);
pref("devtools.debugger.pause-on-exceptions", false);
pref("devtools.debugger.ignore-caught-exceptions", false);
pref("devtools.debugger.source-maps-enabled", true);
// Temporarily leave this in place, even though it is unused, so the
// options pane doesn't break.
// https://bugzilla.mozilla.org/show_bug.cgi?id=1371849
pref("devtools.debugger.client-source-maps-enabled", true);
pref("devtools.debugger.pretty-print-enabled", true);
pref("devtools.debugger.auto-pretty-print", false);
pref("devtools.debugger.auto-black-box", true);

View File

@ -61,13 +61,6 @@ pref("devtools.inspector.colorWidget.enabled", false);
// Enable the CSS shapes highlighter
pref("devtools.inspector.shapesHighlighter.enabled", true);
// Counter to promote the inspector layout view.
// @remove after release 56 (See Bug 1355747)
pref("devtools.promote.layoutview", 1);
// Whether or not to show the promote bar in the layout view
// @remove after release 56 (See Bug 1355747)
pref("devtools.promote.layoutview.showPromoteBar", true);
// Grid highlighter preferences
pref("devtools.gridinspector.gridOutlineMaxColumns", 50);
pref("devtools.gridinspector.gridOutlineMaxRows", 50);

View File

@ -0,0 +1,6 @@
<!-- This Source Code Form is subject to the terms of the Mozilla Public
- License, v. 2.0. If a copy of the MPL was not distributed with this
- file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 16 16">
<path d="M8 13.4c-.5 0-.9-.2-1.2-.6L.4 5.2C0 4.7-.1 4.3.2 3.7S1 3 1.6 3h12.8c.6 0 1.2.1 1.4.7.3.6.2 1.1-.2 1.6l-6.4 7.6c-.3.4-.7.5-1.2.5z"/>
</svg>

After

Width:  |  Height:  |  Size: 465 B

View File

@ -48,56 +48,6 @@
align-items: center;
}
/**
* Layout Promote Bar
*/
.layout-promote-bar {
align-items: center;
background-color: var(--theme-toolbar-background);
border-bottom: 1px solid var(--theme-splitter-color);
display: flex;
font-size: 11px;
padding: 5px;
transition: all 0.25s ease;
width: 100%;
-moz-user-select: none;
}
.layout-promote-bar:hover {
background-color: var(--theme-toolbar-hover);
}
.layout-promote-info-icon {
display: inline-block;
background-size: 16px;
width: 16px;
height: 16px;
margin: 6px;
background-image: url("chrome://browser/skin/info.svg");
}
.layout-promote-message {
flex: 1;
}
.layout-promote-learn-more-link {
margin-inline-start: 5px;
}
.layout-promote-learn-more-link:hover {
text-decoration: underline;
}
.layout-promote-close-button {
margin: 6px;
}
.layout-promote-close-button::before {
background-image: url("chrome://devtools/skin/images/close.svg");
margin: -6px 0 0 -6px;
}
/**
* Grid Container
*/

View File

@ -408,8 +408,7 @@ CustomElementRegistry::SyncInvokeReactions(nsIDocument::ElementCallbackType aTyp
}
UniquePtr<CustomElementReaction> reaction(Move(
MakeUnique<CustomElementCallbackReaction>(aDefinition,
Move(callback))));
MakeUnique<CustomElementCallbackReaction>(Move(callback))));
RefPtr<SyncInvokeReactionRunnable> runnable =
new SyncInvokeReactionRunnable(Move(reaction), aCustomElement);
@ -455,8 +454,7 @@ CustomElementRegistry::EnqueueLifecycleCallback(nsIDocument::ElementCallbackType
CustomElementReactionsStack* reactionsStack =
docGroup->CustomElementReactionsStack();
reactionsStack->EnqueueCallbackReaction(aCustomElement, definition,
Move(callback));
reactionsStack->EnqueueCallbackReaction(aCustomElement, Move(callback));
}
void
@ -1033,11 +1031,9 @@ CustomElementReactionsStack::EnqueueUpgradeReaction(Element* aElement,
void
CustomElementReactionsStack::EnqueueCallbackReaction(Element* aElement,
CustomElementDefinition* aDefinition,
UniquePtr<CustomElementCallback> aCustomElementCallback)
{
Enqueue(aElement, new CustomElementCallbackReaction(aDefinition,
Move(aCustomElementCallback)));
Enqueue(aElement, new CustomElementCallbackReaction(Move(aCustomElementCallback)));
}
void
@ -1186,7 +1182,7 @@ CustomElementDefinition::CustomElementDefinition(nsAtom* aType,
nsAtom* aLocalName,
Function* aConstructor,
nsTArray<RefPtr<nsAtom>>&& aObservedAttributes,
JSObject* aPrototype,
JS::Handle<JSObject*> aPrototype,
LifecycleCallbacks* aCallbacks,
uint32_t aDocOrder)
: mType(aType),

View File

@ -165,7 +165,7 @@ struct CustomElementDefinition
nsAtom* aLocalName,
Function* aConstructor,
nsTArray<RefPtr<nsAtom>>&& aObservedAttributes,
JSObject* aPrototype,
JS::Handle<JSObject*> aPrototype,
mozilla::dom::LifecycleCallbacks* aCallbacks,
uint32_t aDocOrder);
@ -214,40 +214,32 @@ private:
class CustomElementReaction
{
public:
explicit CustomElementReaction(CustomElementDefinition* aDefinition)
: mDefinition(aDefinition)
{
}
virtual ~CustomElementReaction() = default;
virtual void Invoke(Element* aElement, ErrorResult& aRv) = 0;
virtual void Traverse(nsCycleCollectionTraversalCallback& aCb) const
{
}
protected:
CustomElementDefinition* mDefinition;
};
class CustomElementUpgradeReaction final : public CustomElementReaction
{
public:
explicit CustomElementUpgradeReaction(CustomElementDefinition* aDefinition)
: CustomElementReaction(aDefinition)
: mDefinition(aDefinition)
{
}
private:
virtual void Invoke(Element* aElement, ErrorResult& aRv) override;
CustomElementDefinition* mDefinition;
};
class CustomElementCallbackReaction final : public CustomElementReaction
{
public:
CustomElementCallbackReaction(CustomElementDefinition* aDefinition,
UniquePtr<CustomElementCallback> aCustomElementCallback)
: CustomElementReaction(aDefinition)
, mCustomElementCallback(Move(aCustomElementCallback))
explicit CustomElementCallbackReaction(UniquePtr<CustomElementCallback> aCustomElementCallback)
: mCustomElementCallback(Move(aCustomElementCallback))
{
}
@ -291,7 +283,6 @@ public:
* https://html.spec.whatwg.org/multipage/scripting.html#enqueue-a-custom-element-callback-reaction
*/
void EnqueueCallbackReaction(Element* aElement,
CustomElementDefinition* aDefinition,
UniquePtr<CustomElementCallback> aCustomElementCallback);
// [CEReactions] Before executing the algorithm's steps

View File

@ -161,31 +161,47 @@ CallbackObject::CallSetup::CallSetup(CallbackObject* aCallback,
return;
}
// First, find the real underlying callback.
JSObject* realCallback = js::UncheckedUnwrap(wrappedCallback);
nsIGlobalObject* globalObject = nullptr;
// Now get the global for this callback. Note that for the case of
// JS-implemented WebIDL we never have a window here.
nsGlobalWindow* win = mIsMainThread && !aIsJSImplementedWebIDL
? xpc::WindowGlobalOrNull(realCallback)
: nullptr;
if (win) {
MOZ_ASSERT(win->IsInnerWindow());
// We don't want to run script in windows that have been navigated away
// from.
if (!win->AsInner()->HasActiveDocument()) {
aRv.ThrowDOMException(NS_ERROR_DOM_NOT_SUPPORTED_ERR,
NS_LITERAL_CSTRING("Refusing to execute function from window "
"whose document is no longer active."));
return;
{
// First, find the real underlying callback.
JSObject* realCallback = js::UncheckedUnwrap(wrappedCallback);
// Check that it's ok to run this callback. JS-implemented WebIDL is always
// OK to run, since it runs with Chrome privileges anyway.
if (mIsMainThread && !aIsJSImplementedWebIDL) {
// Make sure to use realCallback to get the global of the callback
// object, not the wrapper.
if (!xpc::Scriptability::Get(realCallback).Allowed()) {
aRv.ThrowDOMException(NS_ERROR_DOM_NOT_SUPPORTED_ERR,
NS_LITERAL_CSTRING("Refusing to execute function from global in which "
"script is disabled."));
return;
}
}
// Now get the global for this callback. Note that for the case of
// JS-implemented WebIDL we never have a window here.
nsGlobalWindow* win = mIsMainThread && !aIsJSImplementedWebIDL
? xpc::WindowGlobalOrNull(realCallback)
: nullptr;
if (win) {
MOZ_ASSERT(win->IsInnerWindow());
// We don't want to run script in windows that have been navigated away
// from.
if (!win->AsInner()->HasActiveDocument()) {
aRv.ThrowDOMException(NS_ERROR_DOM_NOT_SUPPORTED_ERR,
NS_LITERAL_CSTRING("Refusing to execute function from window "
"whose document is no longer active."));
return;
}
globalObject = win;
} else {
// No DOM Window. Store the global.
JSObject* global = js::GetGlobalForObjectCrossCompartment(realCallback);
globalObject = xpc::NativeGlobal(global);
MOZ_ASSERT(globalObject);
}
globalObject = win;
} else {
// No DOM Window. Store the global.
JSObject* global = js::GetGlobalForObjectCrossCompartment(realCallback);
globalObject = xpc::NativeGlobal(global);
MOZ_ASSERT(globalObject);
}
// Bail out if there's no useful global. This seems to happen intermittently
@ -227,22 +243,6 @@ CallbackObject::CallSetup::CallSetup(CallbackObject* aCallback,
// JSContext. (Rooted<> does not care about requests or compartments.)
mRootedCallable.emplace(cx, aCallback->CallbackOrNull());
// JS-implemented WebIDL is always OK to run, since it runs with Chrome
// privileges anyway.
if (mIsMainThread && !aIsJSImplementedWebIDL) {
// Check that it's ok to run this callback at all.
// Make sure to use realCallback to get the global of the callback object,
// not the wrapper.
bool allowed = xpc::Scriptability::Get(realCallback).Allowed();
if (!allowed) {
aRv.ThrowDOMException(NS_ERROR_DOM_NOT_SUPPORTED_ERR,
NS_LITERAL_CSTRING("Refusing to execute function from global in which "
"script is disabled."));
return;
}
}
mAsyncStack.emplace(cx, aCallback->GetCreationStack());
if (*mAsyncStack) {
mAsyncStackSetter.emplace(cx, *mAsyncStack, aExecutionReason);

View File

@ -416,7 +416,6 @@ TabChild::TabChild(nsIContentChild* aManager,
, mActiveSuppressDisplayport(0)
, mLayersId(0)
, mBeforeUnloadListeners(0)
, mLayersConnected(false)
, mDidFakeShow(false)
, mNotified(false)
, mTriedBrowserInit(false)
@ -1176,7 +1175,7 @@ TabChild::DoFakeShow(const TextureFactoryIdentifier& aTextureFactoryIdentifier,
const CompositorOptions& aCompositorOptions,
PRenderFrameChild* aRenderFrame, const ShowInfo& aShowInfo)
{
mLayersConnected = aRenderFrame ? true : false;
mLayersConnected = aRenderFrame ? Some(true) : Some(false);
InitRenderingState(aTextureFactoryIdentifier, aLayersId, aCompositorOptions, aRenderFrame);
RecvShow(ScreenIntSize(0, 0), aShowInfo, mParentIsActive, nsSizeMode_Normal);
mDidFakeShow = true;
@ -1275,7 +1274,7 @@ TabChild::RecvInitRendering(const TextureFactoryIdentifier& aTextureFactoryIdent
{
MOZ_ASSERT((!mDidFakeShow && aRenderFrame) || (mDidFakeShow && !aRenderFrame));
mLayersConnected = aLayersConnected;
mLayersConnected = Some(aLayersConnected);
InitRenderingState(aTextureFactoryIdentifier, aLayersId, aCompositorOptions, aRenderFrame);
return IPC_OK();
}
@ -2800,6 +2799,7 @@ TabChild::InitRenderingState(const TextureFactoryIdentifier& aTextureFactoryIden
mPuppetWidget->InitIMEState();
if (!aRenderFrame) {
mLayersConnected = Some(false);
NS_WARNING("failed to construct RenderFrame");
return;
}
@ -2811,6 +2811,7 @@ TabChild::InitRenderingState(const TextureFactoryIdentifier& aTextureFactoryIden
// compositor context.
PCompositorBridgeChild* compositorChild = CompositorBridgeChild::Get();
if (!compositorChild) {
mLayersConnected = Some(false);
NS_WARNING("failed to get CompositorBridgeChild instance");
return;
}
@ -2829,32 +2830,21 @@ TabChild::InitRenderingState(const TextureFactoryIdentifier& aTextureFactoryIden
mLayersId = aLayersId;
}
LayerManager* lm = mPuppetWidget->GetLayerManager();
if (lm->AsWebRenderLayerManager()) {
lm->AsWebRenderLayerManager()->Initialize(compositorChild,
wr::AsPipelineId(aLayersId),
&mTextureFactoryIdentifier);
MOZ_ASSERT(!mPuppetWidget->HasLayerManager());
bool success = false;
if (mLayersConnected == Some(true)) {
success = CreateRemoteLayerManager(compositorChild);
}
if (success) {
MOZ_ASSERT(mLayersConnected == Some(true));
// Succeeded to create "remote" layer manager
ImageBridgeChild::IdentifyCompositorTextureHost(mTextureFactoryIdentifier);
gfx::VRManagerChild::IdentifyTextureHost(mTextureFactoryIdentifier);
InitAPZState();
}
ShadowLayerForwarder* lf =
mPuppetWidget->GetLayerManager(
nullptr, mTextureFactoryIdentifier.mParentBackend)
->AsShadowForwarder();
if (lf) {
nsTArray<LayersBackend> backends;
backends.AppendElement(mTextureFactoryIdentifier.mParentBackend);
PLayerTransactionChild* shadowManager =
compositorChild->SendPLayerTransactionConstructor(backends, aLayersId);
if (shadowManager) {
lf->SetShadowManager(shadowManager);
lf->IdentifyTextureHost(mTextureFactoryIdentifier);
ImageBridgeChild::IdentifyCompositorTextureHost(mTextureFactoryIdentifier);
gfx::VRManagerChild::IdentifyTextureHost(mTextureFactoryIdentifier);
InitAPZState();
}
} else {
// Fallback to BasicManager
mLayersConnected = Some(false);
}
nsCOMPtr<nsIObserverService> observerService =
@ -2867,6 +2857,42 @@ TabChild::InitRenderingState(const TextureFactoryIdentifier& aTextureFactoryIden
}
}
bool
TabChild::CreateRemoteLayerManager(mozilla::layers::PCompositorBridgeChild* aCompositorChild)
{
MOZ_ASSERT(aCompositorChild);
bool success = false;
if (gfxVars::UseWebRender()) {
success = mPuppetWidget->CreateRemoteLayerManager([&] (LayerManager* aLayerManager) -> bool {
MOZ_ASSERT(aLayerManager->AsWebRenderLayerManager());
return aLayerManager->AsWebRenderLayerManager()->Initialize(aCompositorChild,
wr::AsPipelineId(mLayersId),
&mTextureFactoryIdentifier);
});
} else {
nsTArray<LayersBackend> ignored;
PLayerTransactionChild* shadowManager = aCompositorChild->SendPLayerTransactionConstructor(ignored, LayersId());
if (shadowManager &&
shadowManager->SendGetTextureFactoryIdentifier(&mTextureFactoryIdentifier) &&
mTextureFactoryIdentifier.mParentBackend != LayersBackend::LAYERS_NONE)
{
success = true;
}
if (!success) {
NS_WARNING("failed to allocate layer transaction");
} else {
success = mPuppetWidget->CreateRemoteLayerManager([&] (LayerManager* aLayerManager) -> bool {
ShadowLayerForwarder* lf = aLayerManager->AsShadowForwarder();
lf->SetShadowManager(shadowManager);
lf->IdentifyTextureHost(mTextureFactoryIdentifier);
return true;
});
}
}
return success;
}
void
TabChild::InitAPZState()
{
@ -3164,33 +3190,9 @@ TabChild::ReinitRendering()
bool success = false;
RefPtr<CompositorBridgeChild> cb = CompositorBridgeChild::Get();
if (gfxVars::UseWebRender()) {
success = mPuppetWidget->RecreateLayerManager([&] (LayerManager* aLayerManager) -> bool {
MOZ_ASSERT(aLayerManager->AsWebRenderLayerManager());
return aLayerManager->AsWebRenderLayerManager()->Initialize(cb,
wr::AsPipelineId(mLayersId),
&mTextureFactoryIdentifier);
});
} else {
nsTArray<LayersBackend> ignored;
PLayerTransactionChild* shadowManager = cb->SendPLayerTransactionConstructor(ignored, LayersId());
if (shadowManager &&
shadowManager->SendGetTextureFactoryIdentifier(&mTextureFactoryIdentifier) &&
mTextureFactoryIdentifier.mParentBackend != LayersBackend::LAYERS_NONE)
{
success = true;
}
if (!success) {
NS_WARNING("failed to re-allocate layer transaction");
return;
}
success = mPuppetWidget->RecreateLayerManager([&] (LayerManager* aLayerManager) -> bool {
ShadowLayerForwarder* lf = aLayerManager->AsShadowForwarder();
lf->SetShadowManager(shadowManager);
lf->IdentifyTextureHost(mTextureFactoryIdentifier);
return true;
});
if (cb) {
success = CreateRemoteLayerManager(cb);
}
if (!success) {
@ -3198,7 +3200,7 @@ TabChild::ReinitRendering()
return;
}
mLayersConnected = true;
mLayersConnected = Some(true);
ImageBridgeChild::IdentifyCompositorTextureHost(mTextureFactoryIdentifier);
gfx::VRManagerChild::IdentifyTextureHost(mTextureFactoryIdentifier);

View File

@ -63,6 +63,7 @@ class APZEventState;
class AsyncDragMetrics;
class IAPZCTreeManager;
class ImageCompositeNotification;
class PCompositorBridgeChild;
} // namespace layers
namespace widget {
@ -609,7 +610,7 @@ public:
static TabChild* GetFrom(uint64_t aLayersId);
uint64_t LayersId() { return mLayersId; }
bool IsLayersConnected() { return mLayersConnected; }
Maybe<bool> IsLayersConnected() { return mLayersConnected; }
void DidComposite(uint64_t aTransactionId,
const TimeStamp& aCompositeStart,
@ -884,6 +885,8 @@ private:
void InternalSetDocShellIsActive(bool aIsActive,
bool aPreserveLayers);
bool CreateRemoteLayerManager(mozilla::layers::PCompositorBridgeChild* aCompositorChild);
class DelayedDeleteRunnable;
TextureFactoryIdentifier mTextureFactoryIdentifier;
@ -901,7 +904,7 @@ private:
int64_t mBeforeUnloadListeners;
CSSRect mUnscaledOuterRect;
nscolor mLastBackgroundColor;
bool mLayersConnected;
Maybe<bool> mLayersConnected;
bool mDidFakeShow;
bool mNotified;
bool mTriedBrowserInit;

View File

@ -5062,7 +5062,6 @@ HTMLEditRules::CheckForEmptyBlock(nsINode* aStartNode,
if (emptyBlock && emptyBlock->IsEditable()) {
nsCOMPtr<nsINode> blockParent = emptyBlock->GetParentNode();
NS_ENSURE_TRUE(blockParent, NS_ERROR_FAILURE);
int32_t offset = blockParent->IndexOf(emptyBlock);
if (HTMLEditUtils::IsListItem(emptyBlock)) {
// Are we the first list item in the list?
@ -5070,9 +5069,10 @@ HTMLEditRules::CheckForEmptyBlock(nsINode* aStartNode,
if (htmlEditor->IsFirstEditableChild(emptyBlock)) {
nsCOMPtr<nsINode> listParent = blockParent->GetParentNode();
NS_ENSURE_TRUE(listParent, NS_ERROR_FAILURE);
int32_t listOffset = listParent->IndexOf(blockParent);
// If we are a sublist, skip the br creation
if (!HTMLEditUtils::IsList(listParent)) {
int32_t listOffset = listParent->IndexOf(blockParent);
// Create a br before list
NS_ENSURE_STATE(htmlEditor);
nsCOMPtr<Element> br =
@ -5086,6 +5086,8 @@ HTMLEditRules::CheckForEmptyBlock(nsINode* aStartNode,
// AfterEdit()
}
} else {
int32_t offset = blockParent->IndexOf(emptyBlock);
if (aAction == nsIEditor::eNext || aAction == nsIEditor::eNextWord ||
aAction == nsIEditor::eToEndOfLine) {
// Move to the start of the next node, if any
@ -5147,8 +5149,13 @@ HTMLEditRules::CheckForInvisibleBR(Element& aBlock,
}
testNode = rightmostNode->GetParentNode();
// Since rightmostNode is always the last child, its index is equal to the
// child count, so instead of IndexOf() we use the faster GetChildCount(),
// and assert the equivalence below.
testOffset = testNode->GetChildCount();
// Use offset + 1, so last node is included in our evaluation
testOffset = testNode->IndexOf(rightmostNode) + 1;
MOZ_ASSERT(testNode->IndexOf(rightmostNode) + 1 == testOffset);
} else if (aOffset) {
testNode = &aBlock;
// We'll check everything to the left of the input position
@ -5572,7 +5579,7 @@ HTMLEditRules::GetPromotedPoint(RulesEndpoint aWhere,
!htmlEditor->IsVisibleBRElement(priorNode) &&
!IsBlockNode(*priorNode)) {
offset = priorNode->GetParentNode()->IndexOf(priorNode);
child = node;
child = priorNode;
node = priorNode->GetParentNode();
priorNode = htmlEditor->GetPriorHTMLNode(node, offset, child, true);
}

View File

@ -0,0 +1,20 @@
<script>
function jsfuzzer() {
try { document.execCommand("insertUnorderedList", false); } catch(e) { }
try { document.execCommand("delete", false); } catch(e) { }
}
function eventhandler1() {
try { window.getSelection().collapse(htmlvar00001,1); } catch(e) { }
}
function eventhandler2() {
try { htmlvar00002.appendChild(htmlvar00001); } catch(e) { }
}
</script>
<body onload=jsfuzzer()>
<label id="htmlvar00002" contenteditable="true">
<details ontoggle="eventhandler2()" open="true">
</details>
</label>
<details ontoggle="eventhandler1()" open="true">
<font id="htmlvar00001" dir="rtl">
<summary>

View File

@ -90,3 +90,4 @@ load 1393171.html
load 1402469.html
load 1402904.html
load 1405747.html
load 1408170.html

View File

@ -18,34 +18,33 @@ https://bugzilla.mozilla.org/show_bug.cgi=id=1385905
<script class="testbody" type="application/javascript">
SimpleTest.waitForExplicitFinish();
SimpleTest.waitForFocus(() => {
SpecialPowers.pushPrefEnv({"set": [["editor.use_div_for_default_newlines", true]]}, () => {
function ensureNoMozBR() {
for (let br of document.querySelectorAll("#editor > div > br")) {
isnot(br.getAttribute("type"), "_moz",
"mozBR shouldn't be used with this test");
}
function ensureNoMozBR() {
for (let br of document.querySelectorAll("#editor > div > br")) {
isnot(br.getAttribute("type"), "_moz",
"mozBR shouldn't be used with this test");
}
var editor = document.getElementById("editor");
// Click the left blank area of the first line to set cursor to the start of "contents".
synthesizeMouse(editor, 3, 10, {});
synthesizeKey("KEY_Enter", { code: "Enter" });
is(editor.innerHTML, "<div><br></div><div>contents</div>",
"Typing Enter at start of the <div> element should split the <div> element");
synthesizeKey("KEY_ArrowUp", { code: "ArrowUp" });
synthesizeKey("x", { code: "KeyX" });
is(editor.innerHTML, "<div>x<br></div><div>contents</div>",
"Typing 'x' at the empty <div> element should just insert 'x' into the <div> element");
ensureNoMozBR();
synthesizeKey("KEY_Enter", { code: "Enter" });
is(editor.innerHTML, "<div>x</div><div><br></div><div>contents</div>",
"Typing Enter next to 'x' in the first <div> element should split the <div> element and inserts <br> element to a new <div> element");
ensureNoMozBR();
synthesizeKey("KEY_Enter", { code: "Enter" });
is(editor.innerHTML, "<div>x</div><div><br></div><div><br></div><div>contents</div>",
"Typing Enter in the empty <div> should split the <div> element and inserts <br> element to a new <div> element");
ensureNoMozBR();
SimpleTest.finish();
});
}
document.execCommand("defaultparagraphseparator", false, "div");
var editor = document.getElementById("editor");
// Click the left blank area of the first line to set cursor to the start of "contents".
synthesizeMouse(editor, 3, 10, {});
synthesizeKey("KEY_Enter", { code: "Enter" });
is(editor.innerHTML, "<div><br></div><div>contents</div>",
"Typing Enter at start of the <div> element should split the <div> element");
synthesizeKey("KEY_ArrowUp", { code: "ArrowUp" });
synthesizeKey("x", { code: "KeyX" });
is(editor.innerHTML, "<div>x<br></div><div>contents</div>",
"Typing 'x' at the empty <div> element should just insert 'x' into the <div> element");
ensureNoMozBR();
synthesizeKey("KEY_Enter", { code: "Enter" });
is(editor.innerHTML, "<div>x</div><div><br></div><div>contents</div>",
"Typing Enter next to 'x' in the first <div> element should split the <div> element and inserts <br> element to a new <div> element");
ensureNoMozBR();
synthesizeKey("KEY_Enter", { code: "Enter" });
is(editor.innerHTML, "<div>x</div><div><br></div><div><br></div><div>contents</div>",
"Typing Enter in the empty <div> should split the <div> element and inserts <br> element to a new <div> element");
ensureNoMozBR();
SimpleTest.finish();
});
</script>
</body>

View File

@ -94,7 +94,7 @@ CompositorBridgeChild::CompositorBridgeChild(CompositorManagerChild *aManager)
, mPaintLock("CompositorBridgeChild.mPaintLock")
, mOutstandingAsyncPaints(0)
, mOutstandingAsyncEndTransaction(false)
, mIsWaitingForPaint(false)
, mIsDelayingForAsyncPaints(false)
, mSlowFlushCount(0)
, mTotalFlushCount(0)
{
@ -1164,7 +1164,7 @@ CompositorBridgeChild::FlushAsyncPaints()
{
MonitorAutoLock lock(mPaintLock);
while (mIsWaitingForPaint) {
while (mOutstandingAsyncPaints > 0 || mOutstandingAsyncEndTransaction) {
lock.Wait();
}
@ -1198,7 +1198,7 @@ CompositorBridgeChild::NotifyBeginAsyncPaint(CapturedPaintState* aState)
// We must not be waiting for paints to complete yet. This would imply we
// started a new paint without waiting for a previous one, which could lead to
// incorrect rendering or IPDL deadlocks.
MOZ_ASSERT(!mIsWaitingForPaint);
MOZ_ASSERT(!mIsDelayingForAsyncPaints);
mOutstandingAsyncPaints++;
@ -1267,15 +1267,15 @@ CompositorBridgeChild::NotifyFinishedAsyncEndLayerTransaction()
mOutstandingAsyncEndTransaction = false;
// It's possible that we painted so fast that the main thread never reached
// the code that starts delaying messages. If so, mIsWaitingForPaint will be
// the code that starts delaying messages. If so, mIsDelayingForAsyncPaints will be
// false, and we can safely return.
if (mIsWaitingForPaint) {
if (mIsDelayingForAsyncPaints) {
ResumeIPCAfterAsyncPaint();
// Notify the main thread in case it's blocking. We do this unconditionally
// to avoid deadlocking.
lock.Notify();
}
// Notify the main thread in case it's blocking. We do this unconditionally
// to avoid deadlocking.
lock.Notify();
}
void
@ -1285,9 +1285,10 @@ CompositorBridgeChild::ResumeIPCAfterAsyncPaint()
mPaintLock.AssertCurrentThreadOwns();
MOZ_ASSERT(PaintThread::IsOnPaintThread());
MOZ_ASSERT(mOutstandingAsyncPaints == 0);
MOZ_ASSERT(mIsWaitingForPaint);
MOZ_ASSERT(!mOutstandingAsyncEndTransaction);
MOZ_ASSERT(mIsDelayingForAsyncPaints);
mIsWaitingForPaint = false;
mIsDelayingForAsyncPaints = false;
// It's also possible that the channel has shut down already.
if (!mCanSend || mActorDestroyed) {
@ -1304,12 +1305,12 @@ CompositorBridgeChild::PostponeMessagesIfAsyncPainting()
MonitorAutoLock lock(mPaintLock);
MOZ_ASSERT(!mIsWaitingForPaint);
MOZ_ASSERT(!mIsDelayingForAsyncPaints);
// We need to wait for async paints and the async end transaction as
// it will do texture synchronization
if (mOutstandingAsyncPaints > 0 || mOutstandingAsyncEndTransaction) {
mIsWaitingForPaint = true;
mIsDelayingForAsyncPaints = true;
GetIPCChannel()->BeginPostponingSends();
}
}

View File

@ -387,7 +387,7 @@ private:
// True if this CompositorBridge is currently delaying its messages until the
// paint thread completes. This is R/W on both the main and paint threads, and
// must be accessed within the paint lock.
bool mIsWaitingForPaint;
bool mIsDelayingForAsyncPaints;
uintptr_t mSlowFlushCount;
uintptr_t mTotalFlushCount;

View File

@ -246,24 +246,22 @@ WebRenderBridgeChild::DeallocExternalImageId(wr::ExternalImageId& aImageId)
SendRemoveExternalImageId(aImageId);
}
struct FontFileData
struct FontFileDataSink
{
wr::ByteBuffer mFontBuffer;
uint32_t mFontIndex;
wr::FontKey* mFontKey;
WebRenderBridgeChild* mWrBridge;
wr::IpcResourceUpdateQueue* mResources;
};
static void
WriteFontFileData(const uint8_t* aData, uint32_t aLength, uint32_t aIndex,
void* aBaton)
{
FontFileData* data = static_cast<FontFileData*>(aBaton);
FontFileDataSink* sink = static_cast<FontFileDataSink*>(aBaton);
if (!data->mFontBuffer.Allocate(aLength)) {
return;
}
memcpy(data->mFontBuffer.mData, aData, aLength);
*sink->mFontKey = sink->mWrBridge->GetNextFontKey();
data->mFontIndex = aIndex;
sink->mResources->AddRawFont(*sink->mFontKey, Range<uint8_t>(const_cast<uint8_t*>(aData), aLength), aIndex);
}
void
@ -306,22 +304,15 @@ WebRenderBridgeChild::GetFontKeyForScaledFont(gfx::ScaledFont* aScaledFont)
wr::FontKey fontKey = { wr::IdNamespace { 0 }, 0};
if (!mFontKeys.Get(unscaled, &fontKey)) {
FontFileData data;
if (!unscaled->GetFontFileData(WriteFontFileData, &data) ||
!data.mFontBuffer.mData) {
FontFileDataSink sink = { &fontKey, this, &resources };
if (!unscaled->GetFontFileData(WriteFontFileData, &sink)) {
return instanceKey;
}
fontKey.mNamespace = GetNamespace();
fontKey.mHandle = GetNextResourceId();
resources.AddRawFont(fontKey, data.mFontBuffer.AsSlice(), data.mFontIndex);
mFontKeys.Put(unscaled, fontKey);
}
instanceKey.mNamespace = GetNamespace();
instanceKey.mHandle = GetNextResourceId();
instanceKey = GetNextFontInstanceKey();
Maybe<wr::FontInstanceOptions> options;
Maybe<wr::FontInstancePlatformOptions> platformOptions;

View File

@ -112,6 +112,16 @@ public:
mIdNamespace = aIdNamespace;
}
wr::FontKey GetNextFontKey()
{
return wr::FontKey { GetNamespace(), GetNextResourceId() };
}
wr::FontInstanceKey GetNextFontInstanceKey()
{
return wr::FontInstanceKey { GetNamespace(), GetNextResourceId() };
}
wr::WrImageKey GetNextImageKey()
{
return wr::WrImageKey{ GetNamespace(), GetNextResourceId() };

View File

@ -903,8 +903,12 @@ public:
return (ch == 0x200D);
}
// We treat Combining Grapheme Joiner (U+034F) together with the join
// controls (ZWJ, ZWNJ) here, because (like them) it is an invisible
// char that will be handled by the shaper even if not explicitly
// supported by the font. (See bug 1408366.)
static inline bool IsJoinControl(uint32_t ch) {
return (ch == 0x200C || ch == 0x200D);
return (ch == 0x200C || ch == 0x200D || ch == 0x034f);
}
enum {

View File

@ -105,7 +105,7 @@ JavaScriptParent::allowMessage(JSContext* cx)
bool warn = !isSafe;
nsIGlobalObject* global = dom::GetIncumbentGlobal();
JSObject* jsGlobal = global ? global->GetGlobalJSObject() : nullptr;
JS::Rooted<JSObject*> jsGlobal(cx, global ? global->GetGlobalJSObject() : nullptr);
if (jsGlobal) {
JSAutoCompartment ac(cx, jsGlobal);
JSAddonId* addonId = JS::AddonIdOfObject(jsGlobal);

View File

@ -225,6 +225,8 @@ var ignoreFunctions = {
"void js::AutoEnterOOMUnsafeRegion::crash(uint64, int8*)" : true,
"void mozilla::dom::workers::WorkerPrivate::AssertIsOnWorkerThread() const" : true,
// It would be cool to somehow annotate that nsTHashtable<T> will use
// nsTHashtable<T>::s_MatchEntry for its matchEntry function pointer, but
// there is no mechanism for that. So we will just annotate a particularly
@ -400,6 +402,8 @@ function isOverridableField(initialCSU, csu, field)
return false;
if (field == "GetIsMainThread")
return false;
if (field == "GetThreadFromPRThread")
return false;
if (initialCSU == 'nsIXPConnectJSObjectHolder' && field == 'GetJSObject')
return false;
if (initialCSU == 'nsIXPConnect' && field == 'GetSafeJSContext')

View File

@ -216,13 +216,16 @@ class BumpChunk : public SingleLinkedListElement<BumpChunk>
uint8_t* bump_;
// Pointer to the last byte available in this chunk.
const uint8_t* capacity_;
#ifdef MOZ_DIAGNOSTIC_ASSERT_ENABLED
// Magic number used to check against poisoned values.
const uintptr_t magic_;
static constexpr uintptr_t magicNumber =
sizeof(uintptr_t) == 4 ? uintptr_t(0x4c69666f) : uintptr_t(0x4c69666f42756d70);
#endif
// Byte used for poisoning unused memory after releasing memory.
static constexpr int undefinedChunkMemory = 0xcd;
static constexpr uintptr_t magicNumber =
sizeof(uintptr_t) == 4 ? uintptr_t(0x4c69666f) : uintptr_t(0x4c69666f42756d70);
void assertInvariants() {
MOZ_DIAGNOSTIC_ASSERT(magic_ == magicNumber);
@ -235,8 +238,10 @@ class BumpChunk : public SingleLinkedListElement<BumpChunk>
explicit BumpChunk(uintptr_t capacity)
: bump_(begin()),
capacity_(base() + capacity),
magic_(magicNumber)
capacity_(base() + capacity)
#ifdef MOZ_DIAGNOSTIC_ASSERT_ENABLED
, magic_(magicNumber)
#endif
{
// We cannot bake this value inside the BumpChunk class, because
// sizeof(BumpChunk) can only be computed after the closing brace of the

View File

@ -145,7 +145,7 @@ SpewTempOptimizationTypeInfoVector(JitSpewChannel channel,
indent ? indent : "",
TrackedTypeSiteString(t->site()), StringFromMIRType(t->mirType()));
for (uint32_t i = 0; i < t->types().length(); i++)
JitSpewCont(channel, " %s", TypeSet::TypeString(t->types()[i]));
JitSpewCont(channel, " %s", TypeSet::TypeString(t->types()[i]).get());
JitSpewFin(channel);
}
#endif
@ -872,7 +872,7 @@ SpewConstructor(TypeSet::Type ty, JSFunction* constructor)
#ifdef JS_JITSPEW
if (!constructor->isInterpreted()) {
JitSpew(JitSpew_OptimizationTrackingExtended, " Unique type %s has native constructor",
TypeSet::TypeString(ty));
TypeSet::TypeString(ty).get());
return;
}
@ -887,7 +887,7 @@ SpewConstructor(TypeSet::Type ty, JSFunction* constructor)
InterpretedFunctionFilenameAndLineNumber(constructor, &filename, &lineno);
JitSpew(JitSpew_OptimizationTrackingExtended, " Unique type %s has constructor %s (%s:%u)",
TypeSet::TypeString(ty), buf, filename, lineno.isSome() ? *lineno : 0);
TypeSet::TypeString(ty).get(), buf, filename, lineno.isSome() ? *lineno : 0);
#endif
}
@ -899,7 +899,7 @@ SpewAllocationSite(TypeSet::Type ty, JSScript* script, uint32_t offset)
return;
JitSpew(JitSpew_OptimizationTrackingExtended, " Unique type %s has alloc site %s:%u",
TypeSet::TypeString(ty), script->filename(),
TypeSet::TypeString(ty).get(), script->filename(),
PCToLineNumber(script, script->offsetToPC(offset)));
#endif
}

View File

@ -80,6 +80,7 @@ static constexpr Register ABINonArgReg0 { Registers::invalid_reg };
static constexpr Register ABINonArgReg1 { Registers::invalid_reg };
static constexpr Register ABINonArgReturnReg0 { Registers::invalid_reg };
static constexpr Register ABINonArgReturnReg1 { Registers::invalid_reg };
static constexpr Register NativeABIPrologueClobberable { Registers::invalid_reg };
static constexpr Register WasmTableCallScratchReg { Registers::invalid_reg };
static constexpr Register WasmTableCallSigReg { Registers::invalid_reg };

View File

@ -6975,6 +6975,31 @@ gc::IsIncrementalGCUnsafe(JSRuntime* rt)
return gc::AbortReason::None;
}
static inline void
CheckZoneIsScheduled(Zone* zone, JS::gcreason::Reason reason, const char* trigger)
{
#ifdef DEBUG
if (zone->isGCScheduled())
return;
fprintf(stderr,
"CheckZoneIsScheduled: Zone %p not scheduled as expected in %s GC for %s trigger\n",
zone,
JS::gcreason::ExplainReason(reason),
trigger);
JSRuntime* rt = zone->runtimeFromActiveCooperatingThread();
for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
fprintf(stderr,
" Zone %p:%s%s\n",
zone.get(),
zone->isAtomsZone() ? " atoms" : "",
zone->isGCScheduled() ? " scheduled" : "");
}
fflush(stderr);
MOZ_CRASH("Zone not scheduled");
#endif
}
GCRuntime::IncrementalResult
GCRuntime::budgetIncrementalGC(bool nonincrementalByAPI, JS::gcreason::Reason reason,
SliceBudget& budget, AutoLockForExclusiveAccess& lock)
@ -7024,13 +7049,13 @@ GCRuntime::budgetIncrementalGC(bool nonincrementalByAPI, JS::gcreason::Reason re
continue;
if (zone->usage.gcBytes() >= zone->threshold.gcTriggerBytes()) {
MOZ_ASSERT(zone->isGCScheduled());
CheckZoneIsScheduled(zone, reason, "GC bytes");
budget.makeUnlimited();
stats().nonincremental(AbortReason::GCBytesTrigger);
}
if (zone->isTooMuchMalloc()) {
MOZ_ASSERT(zone->isGCScheduled());
CheckZoneIsScheduled(zone, reason, "malloc bytes");
budget.makeUnlimited();
stats().nonincremental(AbortReason::MallocBytesTrigger);
}

View File

@ -291,9 +291,7 @@ struct ShellCompartmentPrivate {
};
struct MOZ_STACK_CLASS EnvironmentPreparer : public js::ScriptEnvironmentPreparer {
JSContext* cx;
explicit EnvironmentPreparer(JSContext* cx)
: cx(cx)
{
js::SetScriptEnvironmentPreparer(cx, this);
}
@ -606,6 +604,7 @@ SkipUTF8BOM(FILE* file)
void
EnvironmentPreparer::invoke(HandleObject scope, Closure& closure)
{
JSContext* cx = TlsContext.get();
MOZ_ASSERT(!JS_IsExceptionPending(cx));
AutoCompartment ac(cx, scope);

View File

@ -1218,11 +1218,11 @@ JSStructuredCloneWriter::writeDataView(HandleObject obj)
bool
JSStructuredCloneWriter::writeArrayBuffer(HandleObject obj)
{
ArrayBufferObject& buffer = CheckedUnwrap(obj)->as<ArrayBufferObject>();
JSAutoCompartment ac(context(), &buffer);
Rooted<ArrayBufferObject*> buffer(context(), &CheckedUnwrap(obj)->as<ArrayBufferObject>());
JSAutoCompartment ac(context(), buffer);
return out.writePair(SCTAG_ARRAY_BUFFER_OBJECT, buffer.byteLength()) &&
out.writeBytes(buffer.dataPointer(), buffer.byteLength());
return out.writePair(SCTAG_ARRAY_BUFFER_OBJECT, buffer->byteLength()) &&
out.writeBytes(buffer->dataPointer(), buffer->byteLength());
}
bool

View File

@ -646,7 +646,7 @@ TypeScript::SetThis(JSContext* cx, JSScript* script, TypeSet::Type type)
AutoEnterAnalysis enter(cx);
InferSpew(ISpewOps, "externalType: setThis %p: %s",
script, TypeSet::TypeString(type));
script, TypeSet::TypeString(type).get());
types->addType(cx, type);
}
}
@ -670,7 +670,7 @@ TypeScript::SetArgument(JSContext* cx, JSScript* script, unsigned arg, TypeSet::
AutoEnterAnalysis enter(cx);
InferSpew(ISpewOps, "externalType: setArg %p %u: %s",
script, arg, TypeSet::TypeString(type));
script, arg, TypeSet::TypeString(type).get());
types->addType(cx, type);
}
}

View File

@ -124,28 +124,33 @@ TypeSet::NonObjectTypeString(TypeSet::Type type)
return "object";
}
/* static */ const char*
static UniqueChars MakeStringCopy(const char* s)
{
AutoEnterOOMUnsafeRegion oomUnsafe;
char* copy = strdup(s);
if (!copy)
oomUnsafe.crash("Could not copy string");
return UniqueChars(copy);
}
/* static */ UniqueChars
TypeSet::TypeString(TypeSet::Type type)
{
if (type.isPrimitive() || type.isUnknown() || type.isAnyObject())
return NonObjectTypeString(type);
static char bufs[4][40];
static unsigned which = 0;
which = (which + 1) & 3;
return MakeStringCopy(NonObjectTypeString(type));
char buf[100];
if (type.isSingleton()) {
JSObject* singleton = type.singletonNoBarrier();
snprintf(bufs[which], 40, "<%s %#" PRIxPTR ">",
singleton->getClass()->name, uintptr_t(singleton));
SprintfLiteral(buf, "<%s %#" PRIxPTR ">", singleton->getClass()->name, uintptr_t(singleton));
} else {
snprintf(bufs[which], 40, "[%s * %#" PRIxPTR "]", type.groupNoBarrier()->clasp()->name, uintptr_t(type.groupNoBarrier()));
SprintfLiteral(buf, "[%s * %#" PRIxPTR "]", type.groupNoBarrier()->clasp()->name, uintptr_t(type.groupNoBarrier()));
}
return bufs[which];
return MakeStringCopy(buf);
}
/* static */ const char*
/* static */ UniqueChars
TypeSet::ObjectGroupString(ObjectGroup* group)
{
return TypeString(TypeSet::ObjectType(group));
@ -303,8 +308,8 @@ js::ObjectGroupHasProperty(JSContext* cx, ObjectGroup* group, jsid id, const Val
if (!types->hasType(type)) {
TypeFailure(cx, "Missing type in object %s %s: %s",
TypeSet::ObjectGroupString(group), TypeIdString(id),
TypeSet::TypeString(type));
TypeSet::ObjectGroupString(group).get(), TypeIdString(id),
TypeSet::TypeString(type).get());
}
}
return true;
@ -704,7 +709,7 @@ ConstraintTypeSet::addType(JSContext* cx, Type type)
InferSpew(ISpewOps, "addType: %sT%p%s %s",
InferSpewColor(this), this, InferSpewColorReset(),
TypeString(type));
TypeString(type).get());
/* Propagate the type to all constraints. */
if (!cx->helperThread()) {
@ -769,7 +774,7 @@ TypeSet::print(FILE* fp)
for (unsigned i = 0; i < count; i++) {
ObjectKey* key = getObject(i);
if (key)
fprintf(fp, " %s", TypeString(ObjectType(key)));
fprintf(fp, " %s", TypeString(ObjectType(key)).get());
}
}
@ -2665,7 +2670,8 @@ UpdatePropertyType(JSContext* cx, HeapTypeSet* types, NativeObject* obj, Shape*
} else {
InferSpew(ISpewOps, "typeSet: %sT%p%s property %s %s - setConstant",
InferSpewColor(types), types, InferSpewColorReset(),
TypeSet::ObjectGroupString(obj->group()), TypeIdString(shape->propid()));
TypeSet::ObjectGroupString(obj->group()).get(),
TypeIdString(shape->propid()));
}
}
}
@ -2675,7 +2681,7 @@ ObjectGroup::updateNewPropertyTypes(JSContext* cx, JSObject* objArg, jsid id, He
{
InferSpew(ISpewOps, "typeSet: %sT%p%s property %s %s",
InferSpewColor(types), types, InferSpewColorReset(),
TypeSet::ObjectGroupString(this), TypeIdString(id));
TypeSet::ObjectGroupString(this).get(), TypeIdString(id));
MOZ_ASSERT_IF(objArg, objArg->group() == this);
MOZ_ASSERT_IF(singleton(), objArg);
@ -2800,7 +2806,8 @@ js::AddTypePropertyId(JSContext* cx, ObjectGroup* group, JSObject* obj, jsid id,
// Clear any constant flag if it exists.
if (!types->empty() && !types->nonConstantProperty()) {
InferSpew(ISpewOps, "constantMutated: %sT%p%s %s",
InferSpewColor(types), types, InferSpewColorReset(), TypeSet::TypeString(type));
InferSpewColor(types), types, InferSpewColorReset(),
TypeSet::TypeString(type).get());
types->setNonConstantProperty(cx);
}
@ -2808,7 +2815,9 @@ js::AddTypePropertyId(JSContext* cx, ObjectGroup* group, JSObject* obj, jsid id,
return;
InferSpew(ISpewOps, "externalType: property %s %s: %s",
TypeSet::ObjectGroupString(group), TypeIdString(id), TypeSet::TypeString(type));
TypeSet::ObjectGroupString(group).get(),
TypeIdString(id),
TypeSet::TypeString(type).get());
types->addType(cx, type);
// If this addType caused the type set to be marked as containing any
@ -2899,7 +2908,7 @@ ObjectGroup::setFlags(JSContext* cx, ObjectGroupFlags flags)
addFlags(flags);
InferSpew(ISpewOps, "%s: setFlags 0x%x", TypeSet::ObjectGroupString(this), flags);
InferSpew(ISpewOps, "%s: setFlags 0x%x", TypeSet::ObjectGroupString(this).get(), flags);
ObjectStateChange(cx, this, false);
@ -2923,7 +2932,7 @@ ObjectGroup::markUnknown(JSContext* cx)
MOZ_ASSERT(cx->zone()->types.activeAnalysis);
MOZ_ASSERT(!unknownProperties());
InferSpew(ISpewOps, "UnknownProperties: %s", TypeSet::ObjectGroupString(this));
InferSpew(ISpewOps, "UnknownProperties: %s", TypeSet::ObjectGroupString(this).get());
clearNewScript(cx);
ObjectStateChange(cx, this, true);
@ -3070,9 +3079,9 @@ ObjectGroup::print()
{
TaggedProto tagged(proto());
fprintf(stderr, "%s : %s",
TypeSet::ObjectGroupString(this),
TypeSet::ObjectGroupString(this).get(),
tagged.isObject()
? TypeSet::TypeString(TypeSet::ObjectType(tagged.toObject()))
? TypeSet::TypeString(TypeSet::ObjectType(tagged.toObject())).get()
: tagged.isDynamic()
? "(dynamic)"
: "(null)");
@ -3325,7 +3334,7 @@ js::TypeMonitorResult(JSContext* cx, JSScript* script, jsbytecode* pc, TypeSet::
return;
InferSpew(ISpewOps, "bytecodeType: %p %05zu: %s",
script, script->pcToOffset(pc), TypeSet::TypeString(type));
script, script->pcToOffset(pc), TypeSet::TypeString(type).get());
types->addType(cx, type);
}
@ -3341,7 +3350,7 @@ js::TypeMonitorResult(JSContext* cx, JSScript* script, jsbytecode* pc, StackType
MOZ_ASSERT(!types->hasType(type));
InferSpew(ISpewOps, "bytecodeType: %p %05zu: %s",
script, script->pcToOffset(pc), TypeSet::TypeString(type));
script, script->pcToOffset(pc), TypeSet::TypeString(type).get());
types->addType(cx, type);
}

View File

@ -382,8 +382,8 @@ class TypeSet
static const char* NonObjectTypeString(Type type);
static const char* TypeString(Type type);
static const char* ObjectGroupString(ObjectGroup* group);
static UniqueChars TypeString(Type type);
static UniqueChars ObjectGroupString(ObjectGroup* group);
protected:
/* Flags for this type set. */

View File

@ -2186,7 +2186,7 @@ already_AddRefed<LayerManager> nsDisplayList::PaintRoot(nsDisplayListBuilder* aB
}
if (presContext->RefreshDriver()->HasScheduleFlush()) {
presContext->NotifyInvalidation(layerManager->GetLastTransactionId(), nsIntRect());
presContext->NotifyInvalidation(layerManager->GetLastTransactionId(), frame->GetRect());
}
return layerManager.forget();

View File

@ -377,7 +377,7 @@ fuzzy-if(webrender,0-2,0-227) == pattern-transform-presence-01.svg pattern-trans
fuzzy-if(cocoaWidget,4,15982) fuzzy-if(winWidget,4,92) fuzzy-if(skiaContent,4,60) == radialGradient-basic-03.svg radialGradient-basic-03-ref.svg
== radialGradient-basic-04.svg pass.svg
== radialGradient-fr-01.svg pass.svg
fuzzy(1,3235) fuzzy-if(winWidget,1,6704) fuzzy-if(winWidget&&stylo,1,6711) == radialGradient-fr-02.svg radialGradient-fr-02-ref.svg
fuzzy(1,3235) fuzzy-if(winWidget,1,6711) == radialGradient-fr-02.svg radialGradient-fr-02-ref.svg
fuzzy-if(skiaContent,1,3600) == rect-01.svg pass.svg
== rect-02.svg pass.svg

View File

@ -80,3 +80,8 @@ void CSFLog( CSFLogLevel priority, const char* sourceFile, int sourceLine, const
va_end(ap);
}
int CSFLogTestLevel(CSFLogLevel priority)
{
mozilla::LogLevel level = static_cast<mozilla::LogLevel>(priority);
return MOZ_LOG_TEST(gSignalingLog, level);
}

View File

@ -38,6 +38,8 @@ void CSFLog( CSFLogLevel priority, const char* sourceFile, int sourceLine, const
void CSFLogV( CSFLogLevel priority, const char* sourceFile, int sourceLine, const char* tag , const char* format, va_list args);
int CSFLogTestLevel(CSFLogLevel priority);
#ifdef __cplusplus
}
#endif

View File

@ -33,7 +33,11 @@
namespace mozilla {
static const char* logTag ="WebrtcAudioSessionConduit";
static const char* acLogTag ="WebrtcAudioSessionConduit";
#ifdef LOGTAG
#undef LOGTAG
#endif
#define LOGTAG acLogTag
// 32 bytes is what WebRTC CodecInst expects
const unsigned int WebrtcAudioConduit::CODEC_PLNAME_SIZE = 32;
@ -43,17 +47,17 @@ const unsigned int WebrtcAudioConduit::CODEC_PLNAME_SIZE = 32;
*/
RefPtr<AudioSessionConduit> AudioSessionConduit::Create()
{
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
WebrtcAudioConduit* obj = new WebrtcAudioConduit();
if(obj->Init() != kMediaConduitNoError)
{
CSFLogError(logTag, "%s AudioConduit Init Failed ", __FUNCTION__);
CSFLogError(LOGTAG, "%s AudioConduit Init Failed ", __FUNCTION__);
delete obj;
return nullptr;
}
CSFLogDebug(logTag, "%s Successfully created AudioConduit ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s Successfully created AudioConduit ", __FUNCTION__);
return obj;
}
@ -64,7 +68,7 @@ WebrtcAudioConduit::~WebrtcAudioConduit()
{
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
for(auto & codec : mRecvCodecList)
{
delete codec;
@ -255,11 +259,11 @@ bool WebrtcAudioConduit::GetRTCPSenderReport(DOMHighResTimeStamp* timestamp,
}
bool WebrtcAudioConduit::SetDtmfPayloadType(unsigned char type, int freq) {
CSFLogInfo(logTag, "%s : setting dtmf payload %d", __FUNCTION__, (int)type);
CSFLogInfo(LOGTAG, "%s : setting dtmf payload %d", __FUNCTION__, (int)type);
int result = mChannelProxy->SetSendTelephoneEventPayloadType(type, freq);
if (result == -1) {
CSFLogError(logTag, "%s Failed call to SetSendTelephoneEventPayloadType(%u, %d)",
CSFLogError(LOGTAG, "%s Failed call to SetSendTelephoneEventPayloadType(%u, %d)",
__FUNCTION__, type, freq);
}
return result != -1;
@ -286,7 +290,7 @@ bool WebrtcAudioConduit::InsertDTMFTone(int channel, int eventCode,
*/
MediaConduitErrorCode WebrtcAudioConduit::Init()
{
CSFLogDebug(logTag, "%s this=%p", __FUNCTION__, this);
CSFLogDebug(LOGTAG, "%s this=%p", __FUNCTION__, this);
#ifdef MOZ_WIDGET_ANDROID
jobject context = jsjni_GetGlobalContextRef();
@ -294,7 +298,7 @@ MediaConduitErrorCode WebrtcAudioConduit::Init()
JavaVM *jvm = jsjni_GetVM();
if (webrtc::VoiceEngine::SetAndroidObjects(jvm, (void*)context) != 0) {
CSFLogError(logTag, "%s Unable to set Android objects", __FUNCTION__);
CSFLogError(LOGTAG, "%s Unable to set Android objects", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
#endif
@ -302,66 +306,66 @@ MediaConduitErrorCode WebrtcAudioConduit::Init()
// Per WebRTC APIs below function calls return nullptr on failure
if(!(mVoiceEngine = webrtc::VoiceEngine::Create()))
{
CSFLogError(logTag, "%s Unable to create voice engine", __FUNCTION__);
CSFLogError(LOGTAG, "%s Unable to create voice engine", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if(!(mPtrVoEBase = VoEBase::GetInterface(mVoiceEngine)))
{
CSFLogError(logTag, "%s Unable to initialize VoEBase", __FUNCTION__);
CSFLogError(LOGTAG, "%s Unable to initialize VoEBase", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
// init the engine with our audio device layer
if(mPtrVoEBase->Init() == -1)
{
CSFLogError(logTag, "%s VoiceEngine Base Not Initialized", __FUNCTION__);
CSFLogError(LOGTAG, "%s VoiceEngine Base Not Initialized", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if(!(mPtrVoENetwork = VoENetwork::GetInterface(mVoiceEngine)))
{
CSFLogError(logTag, "%s Unable to initialize VoENetwork", __FUNCTION__);
CSFLogError(LOGTAG, "%s Unable to initialize VoENetwork", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if(!(mPtrVoECodec = VoECodec::GetInterface(mVoiceEngine)))
{
CSFLogError(logTag, "%s Unable to initialize VoEBCodec", __FUNCTION__);
CSFLogError(LOGTAG, "%s Unable to initialize VoEBCodec", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if(!(mPtrVoEProcessing = VoEAudioProcessing::GetInterface(mVoiceEngine)))
{
CSFLogError(logTag, "%s Unable to initialize VoEProcessing", __FUNCTION__);
CSFLogError(LOGTAG, "%s Unable to initialize VoEProcessing", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if(!(mPtrVoEXmedia = VoEExternalMedia::GetInterface(mVoiceEngine)))
{
CSFLogError(logTag, "%s Unable to initialize VoEExternalMedia", __FUNCTION__);
CSFLogError(LOGTAG, "%s Unable to initialize VoEExternalMedia", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if(!(mPtrVoERTP_RTCP = VoERTP_RTCP::GetInterface(mVoiceEngine)))
{
CSFLogError(logTag, "%s Unable to initialize VoERTP_RTCP", __FUNCTION__);
CSFLogError(LOGTAG, "%s Unable to initialize VoERTP_RTCP", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if(!(mPtrVoEVideoSync = VoEVideoSync::GetInterface(mVoiceEngine)))
{
CSFLogError(logTag, "%s Unable to initialize VoEVideoSync", __FUNCTION__);
CSFLogError(LOGTAG, "%s Unable to initialize VoEVideoSync", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if (!(mPtrRTP = webrtc::VoERTP_RTCP::GetInterface(mVoiceEngine)))
{
CSFLogError(logTag, "%s Unable to get audio RTP/RTCP interface ",
CSFLogError(LOGTAG, "%s Unable to get audio RTP/RTCP interface ",
__FUNCTION__);
return kMediaConduitSessionNotInited;
}
if( (mChannel = mPtrVoEBase->CreateChannel()) == -1)
{
CSFLogError(logTag, "%s VoiceEngine Channel creation failed",__FUNCTION__);
CSFLogError(LOGTAG, "%s VoiceEngine Channel creation failed",__FUNCTION__);
return kMediaConduitChannelError;
}
// Needed to access TelephoneEvent APIs in 57 if we're not using Call/audio_send_stream/etc
@ -369,29 +373,29 @@ MediaConduitErrorCode WebrtcAudioConduit::Init()
mChannelProxy = s->GetChannelProxy(mChannel);
MOZ_ASSERT(mChannelProxy);
CSFLogDebug(logTag, "%s Channel Created %d ",__FUNCTION__, mChannel);
CSFLogDebug(LOGTAG, "%s Channel Created %d ",__FUNCTION__, mChannel);
if(mPtrVoENetwork->RegisterExternalTransport(mChannel, *this) == -1)
{
CSFLogError(logTag, "%s VoiceEngine, External Transport Failed",__FUNCTION__);
CSFLogError(LOGTAG, "%s VoiceEngine, External Transport Failed",__FUNCTION__);
return kMediaConduitTransportRegistrationFail;
}
if(mPtrVoEXmedia->SetExternalRecordingStatus(true) == -1)
{
CSFLogError(logTag, "%s SetExternalRecordingStatus Failed %d",__FUNCTION__,
CSFLogError(LOGTAG, "%s SetExternalRecordingStatus Failed %d",__FUNCTION__,
mPtrVoEBase->LastError());
return kMediaConduitExternalPlayoutError;
}
if(mPtrVoEXmedia->SetExternalPlayoutStatus(true) == -1)
{
CSFLogError(logTag, "%s SetExternalPlayoutStatus Failed %d ",__FUNCTION__,
CSFLogError(LOGTAG, "%s SetExternalPlayoutStatus Failed %d ",__FUNCTION__,
mPtrVoEBase->LastError());
return kMediaConduitExternalRecordingError;
}
CSFLogDebug(logTag , "%s AudioSessionConduit Initialization Done (%p)",__FUNCTION__, this);
CSFLogDebug(LOGTAG , "%s AudioSessionConduit Initialization Done (%p)",__FUNCTION__, this);
return kMediaConduitNoError;
}
@ -399,7 +403,7 @@ MediaConduitErrorCode WebrtcAudioConduit::Init()
MediaConduitErrorCode
WebrtcAudioConduit::SetTransmitterTransport(RefPtr<TransportInterface> aTransport)
{
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
ReentrantMonitorAutoEnter enter(mTransportMonitor);
// set the transport
@ -410,7 +414,7 @@ WebrtcAudioConduit::SetTransmitterTransport(RefPtr<TransportInterface> aTranspor
MediaConduitErrorCode
WebrtcAudioConduit::SetReceiverTransport(RefPtr<TransportInterface> aTransport)
{
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
ReentrantMonitorAutoEnter enter(mTransportMonitor);
// set the transport
@ -421,7 +425,7 @@ WebrtcAudioConduit::SetReceiverTransport(RefPtr<TransportInterface> aTransport)
MediaConduitErrorCode
WebrtcAudioConduit::ConfigureSendMediaCodec(const AudioCodecConfig* codecConfig)
{
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
MediaConduitErrorCode condError = kMediaConduitNoError;
int error = 0;//webrtc engine errors
webrtc::CodecInst cinst;
@ -441,29 +445,29 @@ WebrtcAudioConduit::ConfigureSendMediaCodec(const AudioCodecConfig* codecConfig)
if(!CodecConfigToWebRTCCodec(codecConfig,cinst))
{
CSFLogError(logTag,"%s CodecConfig to WebRTC Codec Failed ",__FUNCTION__);
CSFLogError(LOGTAG,"%s CodecConfig to WebRTC Codec Failed ",__FUNCTION__);
return kMediaConduitMalformedArgument;
}
if(mPtrVoECodec->SetSendCodec(mChannel, cinst) == -1)
{
error = mPtrVoEBase->LastError();
CSFLogError(logTag, "%s SetSendCodec - Invalid Codec %d ",__FUNCTION__,
CSFLogError(LOGTAG, "%s SetSendCodec - Invalid Codec %d ",__FUNCTION__,
error);
if(error == VE_CANNOT_SET_SEND_CODEC || error == VE_CODEC_ERROR)
{
CSFLogError(logTag, "%s Invalid Send Codec", __FUNCTION__);
CSFLogError(LOGTAG, "%s Invalid Send Codec", __FUNCTION__);
return kMediaConduitInvalidSendCodec;
}
CSFLogError(logTag, "%s SetSendCodec Failed %d ", __FUNCTION__,
CSFLogError(LOGTAG, "%s SetSendCodec Failed %d ", __FUNCTION__,
mPtrVoEBase->LastError());
return kMediaConduitUnknownError;
}
// This must be called after SetSendCodec
if (mPtrVoECodec->SetFECStatus(mChannel, codecConfig->mFECEnabled) == -1) {
CSFLogError(logTag, "%s SetFECStatus Failed %d ", __FUNCTION__,
CSFLogError(LOGTAG, "%s SetFECStatus Failed %d ", __FUNCTION__,
mPtrVoEBase->LastError());
return kMediaConduitFECStatusError;
}
@ -474,7 +478,7 @@ WebrtcAudioConduit::ConfigureSendMediaCodec(const AudioCodecConfig* codecConfig)
if (mPtrVoECodec->SetOpusMaxPlaybackRate(
mChannel,
codecConfig->mMaxPlaybackRate) == -1) {
CSFLogError(logTag, "%s SetOpusMaxPlaybackRate Failed %d ", __FUNCTION__,
CSFLogError(LOGTAG, "%s SetOpusMaxPlaybackRate Failed %d ", __FUNCTION__,
mPtrVoEBase->LastError());
return kMediaConduitUnknownError;
}
@ -515,7 +519,7 @@ MediaConduitErrorCode
WebrtcAudioConduit::ConfigureRecvMediaCodecs(
const std::vector<AudioCodecConfig*>& codecConfigList)
{
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
MediaConduitErrorCode condError = kMediaConduitNoError;
int error = 0; //webrtc engine errors
bool success = false;
@ -529,7 +533,7 @@ WebrtcAudioConduit::ConfigureRecvMediaCodecs(
if(codecConfigList.empty())
{
CSFLogError(logTag, "%s Zero number of codecs to configure", __FUNCTION__);
CSFLogError(LOGTAG, "%s Zero number of codecs to configure", __FUNCTION__);
return kMediaConduitMalformedArgument;
}
@ -547,22 +551,22 @@ WebrtcAudioConduit::ConfigureRecvMediaCodecs(
webrtc::CodecInst cinst;
if(!CodecConfigToWebRTCCodec(codec,cinst))
{
CSFLogError(logTag,"%s CodecConfig to WebRTC Codec Failed ",__FUNCTION__);
CSFLogError(LOGTAG,"%s CodecConfig to WebRTC Codec Failed ",__FUNCTION__);
continue;
}
if(mPtrVoECodec->SetRecPayloadType(mChannel,cinst) == -1)
{
error = mPtrVoEBase->LastError();
CSFLogError(logTag, "%s SetRecvCodec Failed %d ",__FUNCTION__, error);
CSFLogError(LOGTAG, "%s SetRecvCodec Failed %d ",__FUNCTION__, error);
continue;
}
CSFLogDebug(logTag, "%s Successfully Set RecvCodec %s", __FUNCTION__,
CSFLogDebug(LOGTAG, "%s Successfully Set RecvCodec %s", __FUNCTION__,
codec->mName.c_str());
//copy this to local database
if(!CopyCodecToDB(codec)) {
CSFLogError(logTag,"%s Unable to updated Codec Database", __FUNCTION__);
CSFLogError(LOGTAG,"%s Unable to updated Codec Database", __FUNCTION__);
return kMediaConduitUnknownError;
}
success = true;
@ -571,7 +575,7 @@ WebrtcAudioConduit::ConfigureRecvMediaCodecs(
if(!success)
{
CSFLogError(logTag, "%s Setting Receive Codec Failed ", __FUNCTION__);
CSFLogError(LOGTAG, "%s Setting Receive Codec Failed ", __FUNCTION__);
return kMediaConduitInvalidReceiveCodec;
}
@ -588,11 +592,11 @@ WebrtcAudioConduit::ConfigureRecvMediaCodecs(
MediaConduitErrorCode
WebrtcAudioConduit::EnableAudioLevelExtension(bool enabled, uint8_t id)
{
CSFLogDebug(logTag, "%s %d %d ", __FUNCTION__, enabled, id);
CSFLogDebug(LOGTAG, "%s %d %d ", __FUNCTION__, enabled, id);
if (mPtrVoERTP_RTCP->SetSendAudioLevelIndicationStatus(mChannel, enabled, id) == -1)
{
CSFLogError(logTag, "%s SetSendAudioLevelIndicationStatus Failed", __FUNCTION__);
CSFLogError(LOGTAG, "%s SetSendAudioLevelIndicationStatus Failed", __FUNCTION__);
return kMediaConduitUnknownError;
}
@ -602,11 +606,11 @@ WebrtcAudioConduit::EnableAudioLevelExtension(bool enabled, uint8_t id)
MediaConduitErrorCode
WebrtcAudioConduit::EnableMIDExtension(bool enabled, uint8_t id)
{
CSFLogDebug(logTag, "%s %d %d ", __FUNCTION__, enabled, id);
CSFLogDebug(LOGTAG, "%s %d %d ", __FUNCTION__, enabled, id);
if (mPtrVoERTP_RTCP->SetSendMIDStatus(mChannel, enabled, id) == -1)
{
CSFLogError(logTag, "%s SetSendMIDStatus Failed", __FUNCTION__);
CSFLogError(LOGTAG, "%s SetSendMIDStatus Failed", __FUNCTION__);
return kMediaConduitUnknownError;
}
@ -620,7 +624,7 @@ WebrtcAudioConduit::SendAudioFrame(const int16_t audio_data[],
uint32_t channels,
int32_t capture_delay)
{
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
// Following checks need to be performed
// 1. Non null audio buffer pointer,
// 2. invalid sampling frequency - less than 0 or unsupported ones
@ -634,7 +638,7 @@ WebrtcAudioConduit::SendAudioFrame(const int16_t audio_data[],
(IsSamplingFreqSupported(samplingFreqHz) == false) ||
((lengthSamples % (samplingFreqHz / 100) != 0)) )
{
CSFLogError(logTag, "%s Invalid Parameters ",__FUNCTION__);
CSFLogError(LOGTAG, "%s Invalid Parameters ",__FUNCTION__);
MOZ_ASSERT(PR_FALSE);
return kMediaConduitMalformedArgument;
}
@ -642,7 +646,7 @@ WebrtcAudioConduit::SendAudioFrame(const int16_t audio_data[],
//validate capture time
if(capture_delay < 0 )
{
CSFLogError(logTag,"%s Invalid Capture Delay ", __FUNCTION__);
CSFLogError(LOGTAG,"%s Invalid Capture Delay ", __FUNCTION__);
MOZ_ASSERT(PR_FALSE);
return kMediaConduitMalformedArgument;
}
@ -650,7 +654,7 @@ WebrtcAudioConduit::SendAudioFrame(const int16_t audio_data[],
// if transmission is not started .. conduit cannot insert frames
if(!mEngineTransmitting)
{
CSFLogError(logTag, "%s Engine not transmitting ", __FUNCTION__);
CSFLogError(LOGTAG, "%s Engine not transmitting ", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
@ -677,13 +681,13 @@ WebrtcAudioConduit::GetAudioFrame(int16_t speechData[],
int& lengthSamples)
{
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
unsigned int numSamples = 0;
//validate params
if(!speechData )
{
CSFLogError(logTag,"%s Null Audio Buffer Pointer", __FUNCTION__);
CSFLogError(LOGTAG,"%s Null Audio Buffer Pointer", __FUNCTION__);
MOZ_ASSERT(PR_FALSE);
return kMediaConduitMalformedArgument;
}
@ -691,7 +695,7 @@ WebrtcAudioConduit::GetAudioFrame(int16_t speechData[],
// Validate sample length
if((numSamples = GetNum10msSamplesForFrequency(samplingFreqHz)) == 0 )
{
CSFLogError(logTag,"%s Invalid Sampling Frequency ", __FUNCTION__);
CSFLogError(LOGTAG,"%s Invalid Sampling Frequency ", __FUNCTION__);
MOZ_ASSERT(PR_FALSE);
return kMediaConduitMalformedArgument;
}
@ -699,7 +703,7 @@ WebrtcAudioConduit::GetAudioFrame(int16_t speechData[],
//validate capture time
if(capture_delay < 0 )
{
CSFLogError(logTag,"%s Invalid Capture Delay ", __FUNCTION__);
CSFLogError(LOGTAG,"%s Invalid Capture Delay ", __FUNCTION__);
MOZ_ASSERT(PR_FALSE);
return kMediaConduitMalformedArgument;
}
@ -708,7 +712,7 @@ WebrtcAudioConduit::GetAudioFrame(int16_t speechData[],
// samples
if(!mEngineReceiving)
{
CSFLogError(logTag, "%s Engine not Receiving ", __FUNCTION__);
CSFLogError(LOGTAG, "%s Engine not Receiving ", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
@ -721,7 +725,7 @@ WebrtcAudioConduit::GetAudioFrame(int16_t speechData[],
lengthSamples) == -1)
{
int error = mPtrVoEBase->LastError();
CSFLogError(logTag, "%s Getting audio data Failed %d", __FUNCTION__, error);
CSFLogError(LOGTAG, "%s Getting audio data Failed %d", __FUNCTION__, error);
if(error == VE_RUNTIME_PLAY_ERROR)
{
return kMediaConduitPlayoutError;
@ -745,11 +749,11 @@ WebrtcAudioConduit::GetAudioFrame(int16_t speechData[],
Telemetry::Accumulate(Telemetry::WEBRTC_AVSYNC_WHEN_AUDIO_LAGS_VIDEO_MS,
avsync_offset_ms);
}
CSFLogError(logTag,
CSFLogError(LOGTAG,
"A/V sync: sync delta: %dms, audio jitter delay %dms, playout delay %dms",
avsync_offset_ms, jitter_buffer_delay_ms, playout_buffer_delay_ms);
} else {
CSFLogError(logTag, "A/V sync: GetAVStats failed");
CSFLogError(LOGTAG, "A/V sync: GetAVStats failed");
}
mLastSyncLog = mSamples;
}
@ -776,7 +780,7 @@ WebrtcAudioConduit::GetAudioFrame(int16_t speechData[],
}
}
}
CSFLogDebug(logTag,"%s GetAudioFrame:Got samples: length %d ",__FUNCTION__,
CSFLogDebug(LOGTAG,"%s GetAudioFrame:Got samples: length %d ",__FUNCTION__,
lengthSamples);
return kMediaConduitNoError;
}
@ -785,7 +789,7 @@ WebrtcAudioConduit::GetAudioFrame(int16_t speechData[],
MediaConduitErrorCode
WebrtcAudioConduit::ReceivedRTPPacket(const void *data, int len, uint32_t ssrc)
{
CSFLogDebug(logTag, "%s : channel %d", __FUNCTION__, mChannel);
CSFLogDebug(LOGTAG, "%s : channel %d", __FUNCTION__, mChannel);
if(mEngineReceiving)
{
@ -800,7 +804,7 @@ WebrtcAudioConduit::ReceivedRTPPacket(const void *data, int len, uint32_t ssrc)
if(mPtrVoENetwork->ReceivedRTPPacket(mChannel, data, len) == -1)
{
int error = mPtrVoEBase->LastError();
CSFLogError(logTag, "%s RTP Processing Error %d", __FUNCTION__, error);
CSFLogError(LOGTAG, "%s RTP Processing Error %d", __FUNCTION__, error);
if(error == VE_RTP_RTCP_MODULE_ERROR)
{
return kMediaConduitRTPRTCPModuleError;
@ -808,7 +812,7 @@ WebrtcAudioConduit::ReceivedRTPPacket(const void *data, int len, uint32_t ssrc)
return kMediaConduitUnknownError;
}
} else {
CSFLogError(logTag, "Error: %s when not receiving", __FUNCTION__);
CSFLogError(LOGTAG, "Error: %s when not receiving", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
@ -818,12 +822,12 @@ WebrtcAudioConduit::ReceivedRTPPacket(const void *data, int len, uint32_t ssrc)
MediaConduitErrorCode
WebrtcAudioConduit::ReceivedRTCPPacket(const void *data, int len)
{
CSFLogDebug(logTag, "%s : channel %d",__FUNCTION__, mChannel);
CSFLogDebug(LOGTAG, "%s : channel %d",__FUNCTION__, mChannel);
if(mPtrVoENetwork->ReceivedRTCPPacket(mChannel, data, len) == -1)
{
int error = mPtrVoEBase->LastError();
CSFLogError(logTag, "%s RTCP Processing Error %d", __FUNCTION__, error);
CSFLogError(LOGTAG, "%s RTCP Processing Error %d", __FUNCTION__, error);
if(error == VE_RTP_RTCP_MODULE_ERROR)
{
return kMediaConduitRTPRTCPModuleError;
@ -838,10 +842,10 @@ WebrtcAudioConduit::StopTransmitting()
{
if(mEngineTransmitting)
{
CSFLogDebug(logTag, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__);
if(mPtrVoEBase->StopSend(mChannel) == -1)
{
CSFLogError(logTag, "%s StopSend() Failed %d ", __FUNCTION__,
CSFLogError(LOGTAG, "%s StopSend() Failed %d ", __FUNCTION__,
mPtrVoEBase->LastError());
return kMediaConduitUnknownError;
}
@ -859,7 +863,7 @@ WebrtcAudioConduit::StartTransmitting()
if(mPtrVoEBase->StartSend(mChannel) == -1)
{
int error = mPtrVoEBase->LastError();
CSFLogError(logTag, "%s StartSend failed %d", __FUNCTION__, error);
CSFLogError(LOGTAG, "%s StartSend failed %d", __FUNCTION__, error);
return kMediaConduitUnknownError;
}
mEngineTransmitting = true;
@ -873,16 +877,16 @@ WebrtcAudioConduit::StopReceiving()
{
if(mEngineReceiving)
{
CSFLogDebug(logTag, "%s Engine Already Receiving. Attemping to Stop ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s Engine Already Receiving. Attemping to Stop ", __FUNCTION__);
// AudioEngine doesn't fail fatally on stopping reception. Ref:voe_errors.h.
// hence we need not be strict in failing here on errors
mPtrVoEBase->StopReceive(mChannel);
CSFLogDebug(logTag, "%s Attemping to Stop playout ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s Attemping to Stop playout ", __FUNCTION__);
if(mPtrVoEBase->StopPlayout(mChannel) == -1)
{
if( mPtrVoEBase->LastError() == VE_CANNOT_STOP_PLAYOUT)
{
CSFLogDebug(logTag, "%s Stop-Playout Failed %d", __FUNCTION__, mPtrVoEBase->LastError());
CSFLogDebug(LOGTAG, "%s Stop-Playout Failed %d", __FUNCTION__, mPtrVoEBase->LastError());
return kMediaConduitPlayoutError;
}
}
@ -899,7 +903,7 @@ WebrtcAudioConduit::StartReceiving()
if(mPtrVoEBase->StartReceive(mChannel) == -1)
{
int error = mPtrVoEBase->LastError();
CSFLogError(logTag , "%s StartReceive Failed %d ",__FUNCTION__, error);
CSFLogError(LOGTAG , "%s StartReceive Failed %d ",__FUNCTION__, error);
if(error == VE_RECV_SOCKET_ERROR)
{
return kMediaConduitSocketError;
@ -909,7 +913,7 @@ WebrtcAudioConduit::StartReceiving()
if(mPtrVoEBase->StartPlayout(mChannel) == -1)
{
CSFLogError(logTag, "%s Starting playout Failed", __FUNCTION__);
CSFLogError(LOGTAG, "%s Starting playout Failed", __FUNCTION__);
return kMediaConduitPlayoutError;
}
mEngineReceiving = true;
@ -925,7 +929,7 @@ WebrtcAudioConduit::SendRtp(const uint8_t* data,
size_t len,
const webrtc::PacketOptions& options)
{
CSFLogDebug(logTag, "%s: len %lu", __FUNCTION__, (unsigned long)len);
CSFLogDebug(LOGTAG, "%s: len %lu", __FUNCTION__, (unsigned long)len);
if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
if (mProcessing.Length() > 0) {
@ -946,10 +950,10 @@ WebrtcAudioConduit::SendRtp(const uint8_t* data,
if(mTransmitterTransport &&
(mTransmitterTransport->SendRtpPacket(data, len) == NS_OK))
{
CSFLogDebug(logTag, "%s Sent RTP Packet ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s Sent RTP Packet ", __FUNCTION__);
return true;
}
CSFLogError(logTag, "%s RTP Packet Send Failed ", __FUNCTION__);
CSFLogError(LOGTAG, "%s RTP Packet Send Failed ", __FUNCTION__);
return false;
}
@ -957,7 +961,7 @@ WebrtcAudioConduit::SendRtp(const uint8_t* data,
bool
WebrtcAudioConduit::SendRtcp(const uint8_t* data, size_t len)
{
CSFLogDebug(logTag, "%s : len %lu, first rtcp = %u ",
CSFLogDebug(LOGTAG, "%s : len %lu, first rtcp = %u ",
__FUNCTION__,
(unsigned long) len,
static_cast<unsigned>(data[1]));
@ -970,15 +974,15 @@ WebrtcAudioConduit::SendRtcp(const uint8_t* data, size_t len)
mReceiverTransport->SendRtcpPacket(data, len) == NS_OK)
{
// Might be a sender report, might be a receiver report, we don't know.
CSFLogDebug(logTag, "%s Sent RTCP Packet ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s Sent RTCP Packet ", __FUNCTION__);
return true;
}
if (mTransmitterTransport &&
(mTransmitterTransport->SendRtcpPacket(data, len) == NS_OK)) {
CSFLogDebug(logTag, "%s Sent RTCP Packet (sender report) ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s Sent RTCP Packet (sender report) ", __FUNCTION__);
return true;
}
CSFLogError(logTag, "%s RTCP Packet Send Failed ", __FUNCTION__);
CSFLogError(LOGTAG, "%s RTCP Packet Send Failed ", __FUNCTION__);
return false;
}
@ -994,7 +998,7 @@ WebrtcAudioConduit::CodecConfigToWebRTCCodec(const AudioCodecConfig* codecInfo,
memset(&cinst, 0, sizeof(webrtc::CodecInst));
if(sizeof(cinst.plname) < plNameLength+1)
{
CSFLogError(logTag, "%s Payload name buffer capacity mismatch ",
CSFLogError(LOGTAG, "%s Payload name buffer capacity mismatch ",
__FUNCTION__);
return false;
}
@ -1108,21 +1112,21 @@ WebrtcAudioConduit::ValidateCodecConfig(const AudioCodecConfig* codecInfo,
if(!codecInfo)
{
CSFLogError(logTag, "%s Null CodecConfig ", __FUNCTION__);
CSFLogError(LOGTAG, "%s Null CodecConfig ", __FUNCTION__);
return kMediaConduitMalformedArgument;
}
if((codecInfo->mName.empty()) ||
(codecInfo->mName.length() >= CODEC_PLNAME_SIZE))
{
CSFLogError(logTag, "%s Invalid Payload Name Length ", __FUNCTION__);
CSFLogError(LOGTAG, "%s Invalid Payload Name Length ", __FUNCTION__);
return kMediaConduitMalformedArgument;
}
//Only mono or stereo channels supported
if( (codecInfo->mChannels != 1) && (codecInfo->mChannels != 2))
{
CSFLogError(logTag, "%s Channel Unsupported ", __FUNCTION__);
CSFLogError(LOGTAG, "%s Channel Unsupported ", __FUNCTION__);
return kMediaConduitMalformedArgument;
}
@ -1138,7 +1142,7 @@ WebrtcAudioConduit::ValidateCodecConfig(const AudioCodecConfig* codecInfo,
if(codecAppliedAlready)
{
CSFLogDebug(logTag, "%s Codec %s Already Applied ", __FUNCTION__, codecInfo->mName.c_str());
CSFLogDebug(LOGTAG, "%s Codec %s Already Applied ", __FUNCTION__, codecInfo->mName.c_str());
}
return kMediaConduitNoError;
}
@ -1148,12 +1152,12 @@ WebrtcAudioConduit::DumpCodecDB() const
{
for(auto& codec : mRecvCodecList)
{
CSFLogDebug(logTag,"Payload Name: %s", codec->mName.c_str());
CSFLogDebug(logTag,"Payload Type: %d", codec->mType);
CSFLogDebug(logTag,"Payload Frequency: %d", codec->mFreq);
CSFLogDebug(logTag,"Payload PacketSize: %d", codec->mPacSize);
CSFLogDebug(logTag,"Payload Channels: %d", codec->mChannels);
CSFLogDebug(logTag,"Payload Sampling Rate: %d", codec->mRate);
CSFLogDebug(LOGTAG,"Payload Name: %s", codec->mName.c_str());
CSFLogDebug(LOGTAG,"Payload Type: %d", codec->mType);
CSFLogDebug(LOGTAG,"Payload Frequency: %d", codec->mFreq);
CSFLogDebug(LOGTAG,"Payload PacketSize: %d", codec->mPacSize);
CSFLogDebug(LOGTAG,"Payload Channels: %d", codec->mChannels);
CSFLogDebug(LOGTAG,"Payload Sampling Rate: %d", codec->mRate);
}
}
}// end namespace

View File

@ -11,10 +11,14 @@
namespace mozilla {
static const char* logTag ="MediaCodecVideoCodec";
static const char* mcvcLogTag ="MediaCodecVideoCodec";
#ifdef LOGTAG
#undef LOGTAG
#endif
#define LOGTAG mcvcLogTag
WebrtcVideoEncoder* MediaCodecVideoCodec::CreateEncoder(CodecType aCodecType) {
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
if (aCodecType == CODEC_VP8) {
if (MediaPrefs::RemoteMediaCodecVP8EncoderEnabled()) {
return new WebrtcMediaCodecVP8VideoRemoteEncoder();
@ -26,7 +30,7 @@ WebrtcVideoEncoder* MediaCodecVideoCodec::CreateEncoder(CodecType aCodecType) {
}
WebrtcVideoDecoder* MediaCodecVideoCodec::CreateDecoder(CodecType aCodecType) {
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
if (aCodecType == CODEC_VP8) {
return new WebrtcMediaCodecVP8VideoDecoder();
}

View File

@ -70,7 +70,11 @@
namespace mozilla {
static const char* logTag = "WebrtcVideoSessionConduit";
static const char* vcLogTag = "WebrtcVideoSessionConduit";
#ifdef LOGTAG
#undef LOGTAG
#endif
#define LOGTAG vcLogTag
static const int kNullPayloadType = -1;
static const char* kUlpFecPayloadName = "ulpfec";
@ -171,14 +175,14 @@ WebrtcVideoConduit::SendStreamStatistics::Update(
const webrtc::FrameCounts& fc =
aStats.substreams.begin()->second.frame_counts;
mFramesEncoded = fc.key_frames + fc.delta_frames;
CSFLogVerbose(logTag,
CSFLogVerbose(LOGTAG,
"%s: framerate: %u, bitrate: %u, dropped frames delta: %u",
__FUNCTION__, aStats.encode_frame_rate,
aStats.media_bitrate_bps,
mFramesDeliveredToEncoder - mFramesEncoded - mDroppedFrames);
mDroppedFrames = mFramesDeliveredToEncoder - mFramesEncoded;
} else {
CSFLogVerbose(logTag, "%s stats.substreams is empty", __FUNCTION__);
CSFLogVerbose(LOGTAG, "%s stats.substreams is empty", __FUNCTION__);
}
}
@ -200,7 +204,7 @@ void
WebrtcVideoConduit::ReceiveStreamStatistics::Update(
const webrtc::VideoReceiveStream::Stats& aStats)
{
CSFLogVerbose(logTag, "%s ", __FUNCTION__);
CSFLogVerbose(LOGTAG, "%s ", __FUNCTION__);
StreamStatistics::Update(aStats.decode_frame_rate, aStats.total_bitrate_bps);
mDiscardedPackets = aStats.discarded_packets;
mFramesDecoded = aStats.frame_counts.key_frames
@ -215,7 +219,7 @@ VideoSessionConduit::Create(RefPtr<WebRtcCallWrapper> aCall)
{
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
NS_ASSERTION(aCall, "missing required parameter: aCall");
CSFLogVerbose(logTag, "%s", __FUNCTION__);
CSFLogVerbose(LOGTAG, "%s", __FUNCTION__);
if (!aCall) {
return nullptr;
@ -223,10 +227,10 @@ VideoSessionConduit::Create(RefPtr<WebRtcCallWrapper> aCall)
nsAutoPtr<WebrtcVideoConduit> obj(new WebrtcVideoConduit(aCall));
if(obj->Init() != kMediaConduitNoError) {
CSFLogError(logTag, "%s VideoConduit Init Failed ", __FUNCTION__);
CSFLogError(LOGTAG, "%s VideoConduit Init Failed ", __FUNCTION__);
return nullptr;
}
CSFLogVerbose(logTag, "%s Successfully created VideoConduit ", __FUNCTION__);
CSFLogVerbose(LOGTAG, "%s Successfully created VideoConduit ", __FUNCTION__);
return obj.forget();
}
@ -276,7 +280,7 @@ WebrtcVideoConduit::WebrtcVideoConduit(RefPtr<WebRtcCallWrapper> aCall)
// Video Stats Callback
nsTimerCallbackFunc callback = [](nsITimer* aTimer, void* aClosure) {
CSFLogDebug(logTag, "StreamStats polling scheduled for VideoConduit: %p", aClosure);
CSFLogDebug(LOGTAG, "StreamStats polling scheduled for VideoConduit: %p", aClosure);
auto self = static_cast<WebrtcVideoConduit*>(aClosure);
MutexAutoLock lock(self->mCodecMutex);
if (self->mEngineTransmitting && self->mSendStream) {
@ -300,12 +304,12 @@ WebrtcVideoConduit::WebrtcVideoConduit(RefPtr<WebRtcCallWrapper> aCall)
WebrtcVideoConduit::~WebrtcVideoConduit()
{
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
if (mVideoStatsTimer) {
CSFLogDebug(logTag, "canceling StreamStats for VideoConduit: %p", this);
CSFLogDebug(LOGTAG, "canceling StreamStats for VideoConduit: %p", this);
MutexAutoLock lock(mCodecMutex);
CSFLogDebug(logTag, "StreamStats cancelled for VideoConduit: %p", this);
CSFLogDebug(LOGTAG, "StreamStats cancelled for VideoConduit: %p", this);
mVideoStatsTimer->Cancel();
}
@ -379,7 +383,7 @@ bool WebrtcVideoConduit::SetLocalMID(const std::string& mid)
MediaConduitErrorCode
WebrtcVideoConduit::ConfigureCodecMode(webrtc::VideoCodecMode mode)
{
CSFLogVerbose(logTag, "%s ", __FUNCTION__);
CSFLogVerbose(LOGTAG, "%s ", __FUNCTION__);
if (mode == webrtc::VideoCodecMode::kRealtimeVideo ||
mode == webrtc::VideoCodecMode::kScreensharing) {
mCodecMode = mode;
@ -477,7 +481,7 @@ WebrtcVideoConduit::CreateRecvStream()
decoder_type = SupportedCodecType(webrtc::PayloadNameToCodecType(config->mName)
.value_or(webrtc::VideoCodecType::kVideoCodecUnknown));
if (decoder_type == webrtc::VideoCodecType::kVideoCodecUnknown) {
CSFLogError(logTag, "%s Unknown decoder type: %s", __FUNCTION__,
CSFLogError(LOGTAG, "%s Unknown decoder type: %s", __FUNCTION__,
config->mName.c_str());
continue;
}
@ -488,7 +492,7 @@ WebrtcVideoConduit::CreateRecvStream()
// This really should never happen unless something went wrong
// in the negotiation code
NS_ASSERTION(decoder, "Failed to create video decoder");
CSFLogError(logTag, "Failed to create decoder of type %s (%d)",
CSFLogError(LOGTAG, "Failed to create decoder of type %s (%d)",
config->mName.c_str(), decoder_type);
// don't stop
continue;
@ -508,7 +512,7 @@ WebrtcVideoConduit::CreateRecvStream()
mDecoders.clear();
return kMediaConduitUnknownError;
}
CSFLogDebug(logTag, "Created VideoReceiveStream %p for SSRC %u (0x%x)",
CSFLogDebug(LOGTAG, "Created VideoReceiveStream %p for SSRC %u (0x%x)",
mRecvStream, mRecvStreamConfig.rtp.remote_ssrc, mRecvStreamConfig.rtp.remote_ssrc);
return kMediaConduitNoError;
@ -595,12 +599,12 @@ WebrtcVideoConduit::VideoStreamFactory::CreateEncoderStreams(int width, int heig
if (new_width != width || new_height != height) {
if (streamCount == 1) {
CSFLogVerbose(logTag, "%s: ConstrainPreservingAspectRatio", __FUNCTION__);
CSFLogVerbose(LOGTAG, "%s: ConstrainPreservingAspectRatio", __FUNCTION__);
// Use less strict scaling in unicast. That way 320x240 / 3 = 106x79.
ConstrainPreservingAspectRatio(new_width, new_height,
&width, &height);
} else {
CSFLogVerbose(logTag, "%s: ConstrainPreservingAspectRatioExact", __FUNCTION__);
CSFLogVerbose(LOGTAG, "%s: ConstrainPreservingAspectRatioExact", __FUNCTION__);
// webrtc.org supposedly won't tolerate simulcast unless every stream
// is exactly the same aspect ratio. 320x240 / 3 = 80x60.
ConstrainPreservingAspectRatioExact(new_width * new_height,
@ -674,7 +678,7 @@ WebrtcVideoConduit::VideoStreamFactory::CreateEncoderStreams(int width, int heig
if (mConduit->mCurSendCodecConfig->mName == "H264") {
if (mConduit->mCurSendCodecConfig->mEncodingConstraints.maxMbps > 0) {
// Not supported yet!
CSFLogError(logTag, "%s H.264 max_mbps not supported yet", __FUNCTION__);
CSFLogError(LOGTAG, "%s H.264 max_mbps not supported yet", __FUNCTION__);
}
}
streams.push_back(video_stream);
@ -696,7 +700,7 @@ WebrtcVideoConduit::VideoStreamFactory::CreateEncoderStreams(int width, int heig
MediaConduitErrorCode
WebrtcVideoConduit::ConfigureSendMediaCodec(const VideoCodecConfig* codecConfig)
{
CSFLogDebug(logTag, "%s for %s", __FUNCTION__,
CSFLogDebug(LOGTAG, "%s for %s", __FUNCTION__,
codecConfig ? codecConfig->mName.c_str() : "<null>");
MediaConduitErrorCode condError = kMediaConduitNoError;
@ -708,7 +712,7 @@ WebrtcVideoConduit::ConfigureSendMediaCodec(const VideoCodecConfig* codecConfig)
size_t streamCount = std::min(codecConfig->mSimulcastEncodings.size(),
(size_t)webrtc::kMaxSimulcastStreams);
CSFLogDebug(logTag, "%s for VideoConduit:%p stream count:%d", __FUNCTION__,
CSFLogDebug(LOGTAG, "%s for VideoConduit:%p stream count:%d", __FUNCTION__,
this, static_cast<int>(streamCount));
mSendingFramerate = 0;
@ -856,7 +860,7 @@ WebrtcVideoConduit::ConfigureSendMediaCodec(const VideoCodecConfig* codecConfig)
bool
WebrtcVideoConduit::SetRemoteSSRC(unsigned int ssrc)
{
CSFLogDebug(logTag, "%s: SSRC %u (0x%x)", __FUNCTION__, ssrc, ssrc);
CSFLogDebug(LOGTAG, "%s: SSRC %u (0x%x)", __FUNCTION__, ssrc, ssrc);
mRecvStreamConfig.rtp.remote_ssrc = ssrc;
unsigned int current_ssrc;
@ -887,7 +891,7 @@ WebrtcVideoConduit::SetRemoteSSRC(unsigned int ssrc)
}
MediaConduitErrorCode rval = CreateRecvStream();
if (rval != kMediaConduitNoError) {
CSFLogError(logTag, "%s Start Receive Error %d ", __FUNCTION__, rval);
CSFLogError(LOGTAG, "%s Start Receive Error %d ", __FUNCTION__, rval);
return false;
}
}
@ -988,7 +992,7 @@ bool
WebrtcVideoConduit::GetRTPStats(unsigned int* jitterMs,
unsigned int* cumulativeLost)
{
CSFLogVerbose(logTag, "%s for VideoConduit:%p", __FUNCTION__, this);
CSFLogVerbose(LOGTAG, "%s for VideoConduit:%p", __FUNCTION__, this);
{
MutexAutoLock lock(mCodecMutex);
if (!mRecvStream) {
@ -1011,7 +1015,7 @@ bool WebrtcVideoConduit::GetRTCPReceiverReport(DOMHighResTimeStamp* timestamp,
int32_t* rttMs)
{
{
CSFLogVerbose(logTag, "%s for VideoConduit:%p", __FUNCTION__, this);
CSFLogVerbose(LOGTAG, "%s for VideoConduit:%p", __FUNCTION__, this);
MutexAutoLock lock(mCodecMutex);
if (!mSendStream) {
return false;
@ -1024,7 +1028,7 @@ bool WebrtcVideoConduit::GetRTCPReceiverReport(DOMHighResTimeStamp* timestamp,
uint32_t ssrc = mSendStreamConfig.rtp.ssrcs.front();
auto ind = sendStats.substreams.find(ssrc);
if (ind == sendStats.substreams.end()) {
CSFLogError(logTag,
CSFLogError(LOGTAG,
"%s for VideoConduit:%p ssrc not found in SendStream stats.",
__FUNCTION__, this);
return false;
@ -1038,7 +1042,7 @@ bool WebrtcVideoConduit::GetRTCPReceiverReport(DOMHighResTimeStamp* timestamp,
int64_t rtt = stats.rtt_ms;
#ifdef DEBUG
if (rtt > INT32_MAX) {
CSFLogError(logTag,
CSFLogError(LOGTAG,
"%s for VideoConduit:%p RTT is larger than the"
" maximum size of an RTCP RTT.", __FUNCTION__, this);
}
@ -1060,7 +1064,7 @@ WebrtcVideoConduit::GetRTCPSenderReport(DOMHighResTimeStamp* timestamp,
unsigned int* packetsSent,
uint64_t* bytesSent)
{
CSFLogVerbose(logTag, "%s for VideoConduit:%p", __FUNCTION__, this);
CSFLogVerbose(LOGTAG, "%s for VideoConduit:%p", __FUNCTION__, this);
webrtc::RTCPSenderInfo senderInfo;
{
MutexAutoLock lock(mCodecMutex);
@ -1155,7 +1159,7 @@ WebrtcVideoConduit::InitMain()
JavaVM *jvm = jsjni_GetVM();
if (mozilla::camera::VideoEngine::SetAndroidObjects(jvm) != 0) {
CSFLogError(logTag, "%s: could not set Android objects", __FUNCTION__);
CSFLogError(LOGTAG, "%s: could not set Android objects", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
#endif //MOZ_WIDGET_ANDROID
@ -1168,7 +1172,7 @@ WebrtcVideoConduit::InitMain()
MediaConduitErrorCode
WebrtcVideoConduit::Init()
{
CSFLogDebug(logTag, "%s this=%p", __FUNCTION__, this);
CSFLogDebug(LOGTAG, "%s this=%p", __FUNCTION__, this);
MediaConduitErrorCode result;
// Run code that must run on MainThread first
MOZ_ASSERT(NS_IsMainThread());
@ -1177,7 +1181,7 @@ WebrtcVideoConduit::Init()
return result;
}
CSFLogError(logTag, "%s Initialization Done", __FUNCTION__);
CSFLogError(LOGTAG, "%s Initialization Done", __FUNCTION__);
return kMediaConduitNoError;
}
@ -1195,12 +1199,12 @@ WebrtcVideoConduit::Destroy()
void
WebrtcVideoConduit::SyncTo(WebrtcAudioConduit* aConduit)
{
CSFLogDebug(logTag, "%s Synced to %p", __FUNCTION__, aConduit);
CSFLogDebug(LOGTAG, "%s Synced to %p", __FUNCTION__, aConduit);
{
MutexAutoLock lock(mCodecMutex);
if (!mRecvStream) {
CSFLogError(logTag, "SyncTo called with no receive stream");
CSFLogError(LOGTAG, "SyncTo called with no receive stream");
return;
}
@ -1218,11 +1222,11 @@ WebrtcVideoConduit::SyncTo(WebrtcAudioConduit* aConduit)
MediaConduitErrorCode
WebrtcVideoConduit::AttachRenderer(RefPtr<mozilla::VideoRenderer> aVideoRenderer)
{
CSFLogDebug(logTag, "%s", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
// null renderer
if (!aVideoRenderer) {
CSFLogError(logTag, "%s NULL Renderer", __FUNCTION__);
CSFLogError(LOGTAG, "%s NULL Renderer", __FUNCTION__);
MOZ_ASSERT(false);
return kMediaConduitInvalidRenderer;
}
@ -1256,7 +1260,7 @@ MediaConduitErrorCode
WebrtcVideoConduit::SetTransmitterTransport(
RefPtr<TransportInterface> aTransport)
{
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
ReentrantMonitorAutoEnter enter(mTransportMonitor);
// set the transport
@ -1267,7 +1271,7 @@ WebrtcVideoConduit::SetTransmitterTransport(
MediaConduitErrorCode
WebrtcVideoConduit::SetReceiverTransport(RefPtr<TransportInterface> aTransport)
{
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
ReentrantMonitorAutoEnter enter(mTransportMonitor);
// set the transport
@ -1279,12 +1283,12 @@ MediaConduitErrorCode
WebrtcVideoConduit::ConfigureRecvMediaCodecs(
const std::vector<VideoCodecConfig* >& codecConfigList)
{
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
MediaConduitErrorCode condError = kMediaConduitNoError;
std::string payloadName;
if (codecConfigList.empty()) {
CSFLogError(logTag, "%s Zero number of codecs to configure", __FUNCTION__);
CSFLogError(LOGTAG, "%s Zero number of codecs to configure", __FUNCTION__);
return kMediaConduitMalformedArgument;
}
@ -1306,7 +1310,7 @@ WebrtcVideoConduit::ConfigureRecvMediaCodecs(
for (const auto& codec_config : codecConfigList) {
if ((condError = ValidateCodecConfig(codec_config))
!= kMediaConduitNoError) {
CSFLogError(logTag, "%s Invalid config for %s decoder: %i", __FUNCTION__,
CSFLogError(LOGTAG, "%s Invalid config for %s decoder: %i", __FUNCTION__,
codec_config ? codec_config->mName.c_str() : "<null>",
condError);
continue;
@ -1353,7 +1357,7 @@ WebrtcVideoConduit::ConfigureRecvMediaCodecs(
}
if (!recv_codecs.Length()) {
CSFLogError(logTag, "%s Found no valid receive codecs", __FUNCTION__);
CSFLogError(LOGTAG, "%s Found no valid receive codecs", __FUNCTION__);
return kMediaConduitMalformedArgument;
}
@ -1426,7 +1430,7 @@ WebrtcVideoConduit::ConfigureRecvMediaCodecs(
// webrtc.org code has fits if you select an SSRC of 0
mRecvStreamConfig.rtp.local_ssrc = ssrc;
CSFLogDebug(logTag, "%s (%p): Local SSRC 0x%08x (of %u), remote SSRC 0x%08x",
CSFLogDebug(LOGTAG, "%s (%p): Local SSRC 0x%08x (of %u), remote SSRC 0x%08x",
__FUNCTION__, (void*) this, ssrc,
(uint32_t) mSendStreamConfig.rtp.ssrcs.size(),
mRecvStreamConfig.rtp.remote_ssrc);
@ -1442,7 +1446,7 @@ WebrtcVideoConduit::ConfigureRecvMediaCodecs(
// Rebuilds mRecvStream from mRecvStreamConfig
MediaConduitErrorCode rval = CreateRecvStream();
if (rval != kMediaConduitNoError) {
CSFLogError(logTag, "%s Start Receive Error %d ", __FUNCTION__, rval);
CSFLogError(LOGTAG, "%s Start Receive Error %d ", __FUNCTION__, rval);
return rval;
}
}
@ -1714,7 +1718,7 @@ WebrtcVideoConduit::SelectSendResolution(unsigned short width,
// NOTE: mSendingWidth != mLastWidth, because of maxwidth/height/etc above
bool changed = false;
if (mSendingWidth != width || mSendingHeight != height) {
CSFLogDebug(logTag, "%s: resolution changing to %ux%u (from %ux%u)",
CSFLogDebug(LOGTAG, "%s: resolution changing to %ux%u (from %ux%u)",
__FUNCTION__, width, height, mSendingWidth, mSendingHeight);
// This will avoid us continually retrying this operation if it fails.
// If the resolution changes, we'll try again. In the meantime, we'll
@ -1729,7 +1733,7 @@ WebrtcVideoConduit::SelectSendResolution(unsigned short width,
mSendingWidth,
mSendingHeight);
if (mSendingFramerate != framerate) {
CSFLogDebug(logTag, "%s: framerate changing to %u (from %u)",
CSFLogDebug(LOGTAG, "%s: framerate changing to %u (from %u)",
__FUNCTION__, framerate, mSendingFramerate);
mSendingFramerate = framerate;
changed = true;
@ -1765,7 +1769,7 @@ WebrtcVideoConduit::SelectSendResolution(unsigned short width,
return self->ReconfigureSendCodec(width, height, new_frame);
});
// new_frame now owned by lambda
CSFLogDebug(logTag, "%s: proxying lambda to WebRTC thread for reconfig (width %u/%u, height %u/%u",
CSFLogDebug(LOGTAG, "%s: proxying lambda to WebRTC thread for reconfig (width %u/%u, height %u/%u",
__FUNCTION__, width, mLastWidth, height, mLastHeight);
NS_DispatchToMainThread(webrtc_runnable.forget());
if (new_frame) {
@ -1794,7 +1798,7 @@ WebrtcVideoConduit::ReconfigureSendCodec(unsigned short width,
mSendStream->ReconfigureVideoEncoder(mEncoderConfig.CopyConfig());
if (frame) {
mVideoBroadcaster.OnFrame(*frame);
CSFLogDebug(logTag, "%s Inserted a frame from reconfig lambda", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s Inserted a frame from reconfig lambda", __FUNCTION__);
}
}
return NS_OK;
@ -1836,7 +1840,7 @@ WebrtcVideoConduit::SendVideoFrame(unsigned char* video_buffer,
{
// check for parameter sanity
if (!video_buffer || video_length == 0 || width == 0 || height == 0) {
CSFLogError(logTag, "%s Invalid Parameters ", __FUNCTION__);
CSFLogError(LOGTAG, "%s Invalid Parameters ", __FUNCTION__);
MOZ_ASSERT(false);
return kMediaConduitMalformedArgument;
}
@ -1844,7 +1848,7 @@ WebrtcVideoConduit::SendVideoFrame(unsigned char* video_buffer,
// Transmission should be enabled before we insert any frames.
if (!mEngineTransmitting) {
CSFLogError(logTag, "%s Engine not transmitting ", __FUNCTION__);
CSFLogError(LOGTAG, "%s Engine not transmitting ", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
@ -1875,7 +1879,7 @@ WebrtcVideoConduit::AddOrUpdateSink(
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
const rtc::VideoSinkWants& wants)
{
CSFLogDebug(logTag, "%s (send SSRC %u (0x%x)) - wants pixels = %d/%d", __FUNCTION__,
CSFLogDebug(LOGTAG, "%s (send SSRC %u (0x%x)) - wants pixels = %d/%d", __FUNCTION__,
mSendStreamConfig.rtp.ssrcs.front(), mSendStreamConfig.rtp.ssrcs.front(),
wants.max_pixel_count ? *wants.max_pixel_count : -1,
wants.max_pixel_count_step_up ? *wants.max_pixel_count_step_up : -1);
@ -1940,7 +1944,7 @@ WebrtcVideoConduit::SendVideoFrame(webrtc::VideoFrame& frame)
// avoids sampling error when capturing frames, but google had to deal with some
// broken cameras, include Logitech c920's IIRC.
CSFLogVerbose(logTag, "%s (send SSRC %u (0x%x))", __FUNCTION__,
CSFLogVerbose(LOGTAG, "%s (send SSRC %u (0x%x))", __FUNCTION__,
mSendStreamConfig.rtp.ssrcs.front(), mSendStreamConfig.rtp.ssrcs.front());
// See if we need to recalculate what we're sending.
// Don't compute mSendingWidth/Height, since those may not be the same as the input.
@ -1951,7 +1955,7 @@ WebrtcVideoConduit::SendVideoFrame(webrtc::VideoFrame& frame)
return kMediaConduitNoError;
}
if (frame.width() != mLastWidth || frame.height() != mLastHeight) {
CSFLogVerbose(logTag, "%s: call SelectSendResolution with %ux%u",
CSFLogVerbose(LOGTAG, "%s: call SelectSendResolution with %ux%u",
__FUNCTION__, frame.width(), frame.height());
if (SelectSendResolution(frame.width(), frame.height(), &frame)) {
// SelectSendResolution took ownership of the data in i420_frame.
@ -2041,7 +2045,7 @@ WebrtcVideoConduit::DeliverPacket(const void* data, int len)
{
// Media Engine should be receiving already.
if (!mCall) {
CSFLogError(logTag, "Error: %s when not receiving", __FUNCTION__);
CSFLogError(LOGTAG, "Error: %s when not receiving", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
@ -2052,7 +2056,7 @@ WebrtcVideoConduit::DeliverPacket(const void* data, int len)
len, webrtc::PacketTime());
if (status != webrtc::PacketReceiver::DELIVERY_OK) {
CSFLogError(logTag, "%s DeliverPacket Failed, %d", __FUNCTION__, status);
CSFLogError(LOGTAG, "%s DeliverPacket Failed, %d", __FUNCTION__, status);
return kMediaConduitRTPProcessingFailed;
}
@ -2075,7 +2079,7 @@ WebrtcVideoConduit::ReceivedRTPPacket(const void* data, int len, uint32_t ssrc)
UniquePtr<QueuedPacket> packet((QueuedPacket*) malloc(sizeof(QueuedPacket) + len-1));
packet->mLen = len;
memcpy(packet->mData, data, len);
CSFLogDebug(logTag, "queuing packet: seq# %u, Len %d ",
CSFLogDebug(LOGTAG, "queuing packet: seq# %u, Len %d ",
(uint16_t)ntohs(((uint16_t*) packet->mData)[1]), packet->mLen);
if (queue) {
mQueuedPackets.AppendElement(Move(packet));
@ -2087,7 +2091,7 @@ WebrtcVideoConduit::ReceivedRTPPacket(const void* data, int len, uint32_t ssrc)
mQueuedPackets.Clear();
mQueuedPackets.AppendElement(Move(packet));
CSFLogDebug(logTag, "%s: switching from SSRC %u to %u", __FUNCTION__,
CSFLogDebug(LOGTAG, "%s: switching from SSRC %u to %u", __FUNCTION__,
mRecvSSRC, ssrc);
// we "switch" here immediately, but buffer until the queue is released
mRecvSSRC = ssrc;
@ -2113,11 +2117,11 @@ WebrtcVideoConduit::ReceivedRTPPacket(const void* data, int len, uint32_t ssrc)
if (ssrc == self->mRecvSSRC) {
// SSRC is set; insert queued packets
for (auto& packet : self->mQueuedPackets) {
CSFLogDebug(logTag, "Inserting queued packets: seq# %u, Len %d ",
CSFLogDebug(LOGTAG, "Inserting queued packets: seq# %u, Len %d ",
(uint16_t)ntohs(((uint16_t*) packet->mData)[1]), packet->mLen);
if (self->DeliverPacket(packet->mData, packet->mLen) != kMediaConduitNoError) {
CSFLogError(logTag, "%s RTP Processing Failed", __FUNCTION__);
CSFLogError(LOGTAG, "%s RTP Processing Failed", __FUNCTION__);
// Keep delivering and then clear the queue
}
}
@ -2134,13 +2138,13 @@ WebrtcVideoConduit::ReceivedRTPPacket(const void* data, int len, uint32_t ssrc)
return kMediaConduitNoError;
}
CSFLogVerbose(logTag, "%s: seq# %u, Len %d, SSRC %u (0x%x) ", __FUNCTION__,
CSFLogVerbose(LOGTAG, "%s: seq# %u, Len %d, SSRC %u (0x%x) ", __FUNCTION__,
(uint16_t)ntohs(((uint16_t*) data)[1]), len,
(uint32_t) ntohl(((uint32_t*) data)[2]),
(uint32_t) ntohl(((uint32_t*) data)[2]));
if (DeliverPacket(data, len) != kMediaConduitNoError) {
CSFLogError(logTag, "%s RTP Processing Failed", __FUNCTION__);
CSFLogError(LOGTAG, "%s RTP Processing Failed", __FUNCTION__);
return kMediaConduitRTPProcessingFailed;
}
return kMediaConduitNoError;
@ -2149,10 +2153,10 @@ WebrtcVideoConduit::ReceivedRTPPacket(const void* data, int len, uint32_t ssrc)
MediaConduitErrorCode
WebrtcVideoConduit::ReceivedRTCPPacket(const void* data, int len)
{
CSFLogVerbose(logTag, " %s Len %d ", __FUNCTION__, len);
CSFLogVerbose(LOGTAG, " %s Len %d ", __FUNCTION__, len);
if (DeliverPacket(data, len) != kMediaConduitNoError) {
CSFLogError(logTag, "%s RTCP Processing Failed", __FUNCTION__);
CSFLogError(LOGTAG, "%s RTCP Processing Failed", __FUNCTION__);
return kMediaConduitRTPProcessingFailed;
}
@ -2166,7 +2170,7 @@ WebrtcVideoConduit::StopTransmitting()
{
MutexAutoLock lock(mCodecMutex);
if (mSendStream) {
CSFLogDebug(logTag, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__);
mSendStream->Stop();
}
}
@ -2183,7 +2187,7 @@ WebrtcVideoConduit::StartTransmitting()
return kMediaConduitNoError;
}
CSFLogDebug(logTag, "%s Attemping to start... ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s Attemping to start... ", __FUNCTION__);
{
// Start Transmitting on the video engine
MutexAutoLock lock(mCodecMutex);
@ -2191,7 +2195,7 @@ WebrtcVideoConduit::StartTransmitting()
if (!mSendStream) {
MediaConduitErrorCode rval = CreateSendStream();
if (rval != kMediaConduitNoError) {
CSFLogError(logTag, "%s Start Send Error %d ", __FUNCTION__, rval);
CSFLogError(LOGTAG, "%s Start Send Error %d ", __FUNCTION__, rval);
return rval;
}
}
@ -2212,7 +2216,7 @@ WebrtcVideoConduit::StopReceiving()
// Are we receiving already? If so, stop receiving and playout
// since we can't apply new recv codec when the engine is playing.
if (mEngineReceiving && mRecvStream) {
CSFLogDebug(logTag, "%s Engine Already Receiving . Attemping to Stop ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s Engine Already Receiving . Attemping to Stop ", __FUNCTION__);
mRecvStream->Stop();
}
@ -2227,7 +2231,7 @@ WebrtcVideoConduit::StartReceiving()
return kMediaConduitNoError;
}
CSFLogDebug(logTag, "%s Attemping to start... (SSRC %u (0x%x))", __FUNCTION__, mRecvSSRC, mRecvSSRC);
CSFLogDebug(LOGTAG, "%s Attemping to start... (SSRC %u (0x%x))", __FUNCTION__, mRecvSSRC, mRecvSSRC);
{
// Start Receive on the video engine
MutexAutoLock lock(mCodecMutex);
@ -2251,7 +2255,7 @@ WebrtcVideoConduit::SendRtp(const uint8_t* packet, size_t length,
// XXX(pkerr) - PacketOptions possibly containing RTP extensions are ignored.
// The only field in it is the packet_id, which is used when the header
// extension for TransportSequenceNumber is being used, which we don't.
CSFLogVerbose(logTag, "%s Sent RTP Packet seq %d, len %lu, SSRC %u (0x%x)",
CSFLogVerbose(LOGTAG, "%s Sent RTP Packet seq %d, len %lu, SSRC %u (0x%x)",
__FUNCTION__,
(uint16_t) ntohs(*((uint16_t*) &packet[2])),
(unsigned long)length,
@ -2262,7 +2266,7 @@ WebrtcVideoConduit::SendRtp(const uint8_t* packet, size_t length,
if (!mTransmitterTransport ||
NS_FAILED(mTransmitterTransport->SendRtpPacket(packet, length)))
{
CSFLogError(logTag, "%s RTP Packet Send Failed ", __FUNCTION__);
CSFLogError(LOGTAG, "%s RTP Packet Send Failed ", __FUNCTION__);
return false;
}
return true;
@ -2272,7 +2276,7 @@ WebrtcVideoConduit::SendRtp(const uint8_t* packet, size_t length,
bool
WebrtcVideoConduit::SendRtcp(const uint8_t* packet, size_t length)
{
CSFLogVerbose(logTag, "%s : len %lu ", __FUNCTION__, (unsigned long)length);
CSFLogVerbose(LOGTAG, "%s : len %lu ", __FUNCTION__, (unsigned long)length);
// We come here if we have only one pipeline/conduit setup,
// such as for unidirectional streams.
// We also end up here if we are receiving
@ -2281,7 +2285,7 @@ WebrtcVideoConduit::SendRtcp(const uint8_t* packet, size_t length)
NS_SUCCEEDED(mReceiverTransport->SendRtcpPacket(packet, length)))
{
// Might be a sender report, might be a receiver report, we don't know.
CSFLogDebug(logTag, "%s Sent RTCP Packet ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s Sent RTCP Packet ", __FUNCTION__);
return true;
}
if (mTransmitterTransport &&
@ -2289,19 +2293,19 @@ WebrtcVideoConduit::SendRtcp(const uint8_t* packet, size_t length)
return true;
}
CSFLogError(logTag, "%s RTCP Packet Send Failed ", __FUNCTION__);
CSFLogError(LOGTAG, "%s RTCP Packet Send Failed ", __FUNCTION__);
return false;
}
void
WebrtcVideoConduit::OnFrame(const webrtc::VideoFrame& video_frame)
{
CSFLogVerbose(logTag, "%s: recv SSRC %u (0x%x), size %ux%u", __FUNCTION__,
CSFLogVerbose(LOGTAG, "%s: recv SSRC %u (0x%x), size %ux%u", __FUNCTION__,
mRecvSSRC, mRecvSSRC, video_frame.width(), video_frame.height());
ReentrantMonitorAutoEnter enter(mTransportMonitor);
if (!mRenderer) {
CSFLogError(logTag, "%s Renderer is NULL ", __FUNCTION__);
CSFLogError(LOGTAG, "%s Renderer is NULL ", __FUNCTION__);
return;
}
@ -2361,13 +2365,13 @@ MediaConduitErrorCode
WebrtcVideoConduit::ValidateCodecConfig(const VideoCodecConfig* codecInfo)
{
if(!codecInfo) {
CSFLogError(logTag, "%s Null CodecConfig ", __FUNCTION__);
CSFLogError(LOGTAG, "%s Null CodecConfig ", __FUNCTION__);
return kMediaConduitMalformedArgument;
}
if((codecInfo->mName.empty()) ||
(codecInfo->mName.length() >= CODEC_PLNAME_SIZE)) {
CSFLogError(logTag, "%s Invalid Payload Name Length ", __FUNCTION__);
CSFLogError(LOGTAG, "%s Invalid Payload Name Length ", __FUNCTION__);
return kMediaConduitMalformedArgument;
}
@ -2378,10 +2382,10 @@ void
WebrtcVideoConduit::DumpCodecDB() const
{
for (auto& entry : mRecvCodecList) {
CSFLogDebug(logTag, "Payload Name: %s", entry->mName.c_str());
CSFLogDebug(logTag, "Payload Type: %d", entry->mType);
CSFLogDebug(logTag, "Payload Max Frame Size: %d", entry->mEncodingConstraints.maxFs);
CSFLogDebug(logTag, "Payload Max Frame Rate: %d", entry->mEncodingConstraints.maxFps);
CSFLogDebug(LOGTAG, "Payload Name: %s", entry->mName.c_str());
CSFLogDebug(LOGTAG, "Payload Type: %d", entry->mType);
CSFLogDebug(LOGTAG, "Payload Max Frame Size: %d", entry->mEncodingConstraints.maxFs);
CSFLogDebug(LOGTAG, "Payload Max Frame Rate: %d", entry->mEncodingConstraints.maxFps);
}
}

View File

@ -40,7 +40,11 @@ static const char MEDIACODEC_VIDEO_MIME_VP8[] = "video/x-vnd.on2.vp8";
namespace mozilla {
static const char* logTag ="WebrtcMediaCodecVP8VideoCodec";
static const char* wmcLogTag ="WebrtcMediaCodecVP8VideoCodec";
#ifdef LOGTAG
#undef LOGTAG
#endif
#define LOGTAG wmcLogTag
class CallbacksSupport final : public JavaCallbacksSupport
{
@ -49,12 +53,12 @@ public:
: mCallback(aCallback)
, mCritSect(webrtc::CriticalSectionWrapper::CreateCriticalSection())
, mPictureId(0) {
CSFLogDebug(logTag, "%s %p", __FUNCTION__, this);
CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
memset(&mEncodedImage, 0, sizeof(mEncodedImage));
}
~CallbacksSupport() {
CSFLogDebug(logTag, "%s %p", __FUNCTION__, this);
CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
if (mEncodedImage._size) {
delete [] mEncodedImage._buffer;
mEncodedImage._buffer = nullptr;
@ -64,7 +68,7 @@ public:
void VerifyAndAllocate(const uint32_t minimumSize)
{
CSFLogDebug(logTag, "%s %p", __FUNCTION__, this);
CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
if(minimumSize > mEncodedImage._size)
{
uint8_t* newBuffer = new uint8_t[minimumSize];
@ -80,17 +84,17 @@ public:
void HandleInput(jlong aTimestamp, bool aProcessed) override
{
CSFLogDebug(logTag, "%s %p", __FUNCTION__, this);
CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
}
void HandleOutputFormatChanged(MediaFormat::Param aFormat) override
{
CSFLogDebug(logTag, "%s %p", __FUNCTION__, this);
CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
}
void HandleOutput(Sample::Param aSample)
{
CSFLogDebug(logTag, "%s %p", __FUNCTION__, this);
CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
BufferInfo::LocalRef info = aSample->Info();
int32_t size;
@ -145,7 +149,7 @@ public:
void HandleError(const MediaResult& aError) override
{
CSFLogDebug(logTag, "%s %p", __FUNCTION__, this);
CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
}
friend class WebrtcMediaCodecVP8VideoRemoteEncoder;
@ -294,7 +298,7 @@ public:
, mDecoderCallback(nullptr)
, isStarted(false)
, mEnding(false) {
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
}
nsresult Configure(uint32_t width,
@ -303,7 +307,7 @@ public:
uint32_t flags,
const char* mime,
bool encoder) {
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
nsresult res = NS_OK;
if (!mCoder) {
@ -318,7 +322,7 @@ public:
&format);
if (NS_FAILED(res)) {
CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, CreateVideoFormat failed err = %d", __FUNCTION__, (int)res);
CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, CreateVideoFormat failed err = %d", __FUNCTION__, (int)res);
return NS_ERROR_FAILURE;
}
@ -326,7 +330,7 @@ public:
mCoder = CreateEncoder(mime);
if (NS_FAILED(res)) {
CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, CreateEncoderByType failed err = %d", __FUNCTION__, (int)res);
CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, CreateEncoderByType failed err = %d", __FUNCTION__, (int)res);
return NS_ERROR_FAILURE;
}
@ -339,13 +343,13 @@ public:
} else {
mCoder = CreateDecoder(mime);
if (NS_FAILED(res)) {
CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, CreateDecoderByType failed err = %d", __FUNCTION__, (int)res);
CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, CreateDecoderByType failed err = %d", __FUNCTION__, (int)res);
return NS_ERROR_FAILURE;
}
}
res = mCoder->Configure(format, nullptr, nullptr, flags);
if (NS_FAILED(res)) {
CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, err = %d", __FUNCTION__, (int)res);
CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, err = %d", __FUNCTION__, (int)res);
}
}
@ -353,7 +357,7 @@ public:
}
nsresult Start() {
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
if (!mCoder) {
return NS_ERROR_FAILURE;
@ -364,7 +368,7 @@ public:
nsresult res;
res = mCoder->Start();
if (NS_FAILED(res)) {
CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, mCoder->start() return err = %d",
CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, mCoder->start() return err = %d",
__FUNCTION__, (int)res);
return res;
}
@ -373,7 +377,7 @@ public:
}
nsresult Stop() {
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
mEnding = true;
if (mOutputDrain != nullptr) {
@ -391,7 +395,7 @@ public:
size_t width, size_t height, uint32_t timeStamp,
void* decoded, int color_format) {
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
// TODO: eliminate extra pixel copy/color conversion
size_t widthUV = (width + 1) / 2;
@ -423,17 +427,17 @@ public:
#ifdef WEBRTC_MEDIACODEC_DEBUG
uint32_t time = PR_IntervalNow();
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
#endif
int inputIndex = DequeueInputBuffer(DECODER_TIMEOUT);
if (inputIndex == -1) {
CSFLogError(logTag, "%s equeue input buffer failed", __FUNCTION__);
CSFLogError(LOGTAG, "%s equeue input buffer failed", __FUNCTION__);
return inputIndex;
}
#ifdef WEBRTC_MEDIACODEC_DEBUG
CSFLogDebug(logTag, "%s dequeue input buffer took %u ms", __FUNCTION__, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
CSFLogDebug(LOGTAG, "%s dequeue input buffer took %u ms", __FUNCTION__, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
time = PR_IntervalNow();
#endif
@ -446,7 +450,7 @@ public:
PodCopy((uint8_t*)directBuffer, inputImage._buffer, size);
if (inputIndex >= 0) {
CSFLogError(logTag, "%s queue input buffer inputIndex = %d", __FUNCTION__, inputIndex);
CSFLogError(LOGTAG, "%s queue input buffer inputIndex = %d", __FUNCTION__, inputIndex);
QueueInputBuffer(inputIndex, 0, size, renderTimeMs, 0);
{
@ -481,7 +485,7 @@ public:
BufferInfo::LocalRef bufferInfo;
res = BufferInfo::New(&bufferInfo);
if (NS_FAILED(res)) {
CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, BufferInfo::New return err = %d",
CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, BufferInfo::New return err = %d",
__FUNCTION__, (int)res);
return res;
}
@ -489,20 +493,20 @@ public:
if (outputIndex == MediaCodec::INFO_TRY_AGAIN_LATER) {
// Not an error: output not available yet. Try later.
CSFLogDebug(logTag, "%s dequeue output buffer try again:%d", __FUNCTION__, outputIndex);
CSFLogDebug(LOGTAG, "%s dequeue output buffer try again:%d", __FUNCTION__, outputIndex);
} else if (outputIndex == MediaCodec::INFO_OUTPUT_FORMAT_CHANGED) {
// handle format change
CSFLogDebug(logTag, "%s dequeue output buffer format changed:%d", __FUNCTION__, outputIndex);
CSFLogDebug(LOGTAG, "%s dequeue output buffer format changed:%d", __FUNCTION__, outputIndex);
} else if (outputIndex == MediaCodec::INFO_OUTPUT_BUFFERS_CHANGED) {
CSFLogDebug(logTag, "%s dequeue output buffer changed:%d", __FUNCTION__, outputIndex);
CSFLogDebug(LOGTAG, "%s dequeue output buffer changed:%d", __FUNCTION__, outputIndex);
GetOutputBuffers();
} else if (outputIndex < 0) {
CSFLogDebug(logTag, "%s dequeue output buffer unknow error:%d", __FUNCTION__, outputIndex);
CSFLogDebug(LOGTAG, "%s dequeue output buffer unknow error:%d", __FUNCTION__, outputIndex);
MonitorAutoLock lock(aMonitor);
aInputFrames.pop();
} else {
#ifdef WEBRTC_MEDIACODEC_DEBUG
CSFLogDebug(logTag, "%s dequeue output buffer# return status is %d took %u ms", __FUNCTION__, outputIndex, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
CSFLogDebug(LOGTAG, "%s dequeue output buffer# return status is %d took %u ms", __FUNCTION__, outputIndex, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
#endif
EncodedFrame frame;
{
@ -524,7 +528,7 @@ public:
int color_format = 0;
CSFLogDebug(logTag, "%s generate video frame, width = %d, height = %d, timeStamp_ = %d", __FUNCTION__, frame.width_, frame.height_, frame.timeStamp_);
CSFLogDebug(LOGTAG, "%s generate video frame, width = %d, height = %d, timeStamp_ = %d", __FUNCTION__, frame.width_, frame.height_, frame.timeStamp_);
GenerateVideoFrame(frame.width_, frame.height_, frame.timeStamp_, directBuffer, color_format);
mDecoderCallback->Decoded(*mVideoFrame);
@ -541,7 +545,7 @@ public:
res = mCoder->DequeueInputBuffer(time, &inputIndex);
if (NS_FAILED(res)) {
CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, mCoder->DequeueInputBuffer() return err = %d",
CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, mCoder->DequeueInputBuffer() return err = %d",
__FUNCTION__, (int)res);
return -1;
}
@ -553,7 +557,7 @@ public:
res = mCoder->QueueInputBuffer(inputIndex, offset, size, renderTimes, flags);
if (NS_FAILED(res)) {
CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, mCoder->QueueInputBuffer() return err = %d",
CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, mCoder->QueueInputBuffer() return err = %d",
__FUNCTION__, (int)res);
}
}
@ -565,7 +569,7 @@ public:
res = mCoder->DequeueOutputBuffer(aInfo, DECODER_TIMEOUT, &outputStatus);
if (NS_FAILED(res)) {
CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, mCoder->DequeueOutputBuffer() return err = %d",
CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, mCoder->DequeueOutputBuffer() return err = %d",
__FUNCTION__, (int)res);
return -1;
}
@ -589,7 +593,7 @@ public:
res = mCoder->GetInputBuffers(&inputBuffers);
mInputBuffers = (jobjectArray) env->NewGlobalRef(inputBuffers.Get());
if (NS_FAILED(res)) {
CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, GetInputBuffers return err = %d",
CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, GetInputBuffers return err = %d",
__FUNCTION__, (int)res);
return nullptr;
}
@ -609,7 +613,7 @@ public:
res = mCoder->GetOutputBuffers(&outputBuffers);
mOutputBuffers = (jobjectArray) env->NewGlobalRef(outputBuffers.Get());
if (NS_FAILED(res)) {
CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, GetOutputBuffers return err = %d",
CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, GetOutputBuffers return err = %d",
__FUNCTION__, (int)res);
return nullptr;
}
@ -698,7 +702,7 @@ static bool I420toNV12(uint8_t* dstY, uint16_t* dstUV, const webrtc::VideoFrame&
WebrtcMediaCodecVP8VideoEncoder::WebrtcMediaCodecVP8VideoEncoder()
: mCallback(nullptr)
, mMediaCodecEncoder(nullptr) {
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
memset(&mEncodedImage, 0, sizeof(mEncodedImage));
}
@ -747,7 +751,7 @@ int32_t WebrtcMediaCodecVP8VideoEncoder::InitEncode(
int32_t numberOfCores,
size_t maxPayloadSize) {
mMaxPayloadSize = maxPayloadSize;
CSFLogDebug(logTag, "%s, w = %d, h = %d", __FUNCTION__, codecSettings->width, codecSettings->height);
CSFLogDebug(LOGTAG, "%s, w = %d, h = %d", __FUNCTION__, codecSettings->width, codecSettings->height);
return WEBRTC_VIDEO_CODEC_OK;
}
@ -756,7 +760,7 @@ int32_t WebrtcMediaCodecVP8VideoEncoder::Encode(
const webrtc::VideoFrame& inputImage,
const webrtc::CodecSpecificInfo* codecSpecificInfo,
const std::vector<webrtc::FrameType>* frame_types) {
CSFLogDebug(logTag, "%s, w = %d, h = %d", __FUNCTION__, inputImage.width(), inputImage.height());
CSFLogDebug(LOGTAG, "%s, w = %d, h = %d", __FUNCTION__, inputImage.width(), inputImage.height());
if (!mMediaCodecEncoder) {
mMediaCodecEncoder = new WebrtcAndroidMediaCodec();
@ -774,7 +778,7 @@ int32_t WebrtcMediaCodecVP8VideoEncoder::Encode(
nsresult res = mMediaCodecEncoder->Configure(mFrameWidth, mFrameHeight, nullptr, MediaCodec::CONFIGURE_FLAG_ENCODE, MEDIACODEC_VIDEO_MIME_VP8, true /* encoder */);
if (res != NS_OK) {
CSFLogDebug(logTag, "%s, encoder configure return err = %d",
CSFLogDebug(LOGTAG, "%s, encoder configure return err = %d",
__FUNCTION__, (int)res);
return WEBRTC_VIDEO_CODEC_ERROR;
}
@ -783,18 +787,18 @@ int32_t WebrtcMediaCodecVP8VideoEncoder::Encode(
if (NS_FAILED(res)) {
mMediaCodecEncoder->isStarted = false;
CSFLogDebug(logTag, "%s start encoder. err = %d", __FUNCTION__, (int)res);
CSFLogDebug(LOGTAG, "%s start encoder. err = %d", __FUNCTION__, (int)res);
return WEBRTC_VIDEO_CODEC_ERROR;
}
bool retBool = ResetInputBuffers();
if (!retBool) {
CSFLogDebug(logTag, "%s ResetInputBuffers failed.", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ResetInputBuffers failed.", __FUNCTION__);
return WEBRTC_VIDEO_CODEC_ERROR;
}
retBool = ResetOutputBuffers();
if (!retBool) {
CSFLogDebug(logTag, "%s ResetOutputBuffers failed.", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ResetOutputBuffers failed.", __FUNCTION__);
return WEBRTC_VIDEO_CODEC_ERROR;
}
@ -812,12 +816,12 @@ int32_t WebrtcMediaCodecVP8VideoEncoder::Encode(
int inputIndex = mMediaCodecEncoder->DequeueInputBuffer(DECODER_TIMEOUT);
if (inputIndex == -1) {
CSFLogError(logTag, "%s dequeue input buffer failed", __FUNCTION__);
CSFLogError(LOGTAG, "%s dequeue input buffer failed", __FUNCTION__);
return inputIndex;
}
#ifdef WEBRTC_MEDIACODEC_DEBUG
CSFLogDebug(logTag, "%s WebrtcMediaCodecVP8VideoEncoder::Encode() dequeue OMX input buffer took %u ms", __FUNCTION__, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
CSFLogDebug(LOGTAG, "%s WebrtcMediaCodecVP8VideoEncoder::Encode() dequeue OMX input buffer took %u ms", __FUNCTION__, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
#endif
if (inputIndex >= 0) {
@ -830,7 +834,7 @@ int32_t WebrtcMediaCodecVP8VideoEncoder::Encode(
bool converted = I420toNV12(dstY, dstUV, inputImage);
if (!converted) {
CSFLogError(logTag, "%s WebrtcMediaCodecVP8VideoEncoder::Encode() convert input buffer to NV12 error.", __FUNCTION__);
CSFLogError(LOGTAG, "%s WebrtcMediaCodecVP8VideoEncoder::Encode() convert input buffer to NV12 error.", __FUNCTION__);
return WEBRTC_VIDEO_CODEC_ERROR;
}
@ -838,12 +842,12 @@ int32_t WebrtcMediaCodecVP8VideoEncoder::Encode(
#ifdef WEBRTC_MEDIACODEC_DEBUG
time = PR_IntervalNow();
CSFLogError(logTag, "%s queue input buffer inputIndex = %d", __FUNCTION__, inputIndex);
CSFLogError(LOGTAG, "%s queue input buffer inputIndex = %d", __FUNCTION__, inputIndex);
#endif
mMediaCodecEncoder->QueueInputBuffer(inputIndex, 0, size, inputImage.render_time_ms() * PR_USEC_PER_MSEC /* ms to us */, 0);
#ifdef WEBRTC_MEDIACODEC_DEBUG
CSFLogDebug(logTag, "%s WebrtcMediaCodecVP8VideoEncoder::Encode() queue input buffer took %u ms", __FUNCTION__, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
CSFLogDebug(LOGTAG, "%s WebrtcMediaCodecVP8VideoEncoder::Encode() queue input buffer took %u ms", __FUNCTION__, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
#endif
mEncodedImage._encodedWidth = inputImage.width();
mEncodedImage._encodedHeight = inputImage.height();
@ -854,7 +858,7 @@ int32_t WebrtcMediaCodecVP8VideoEncoder::Encode(
BufferInfo::LocalRef bufferInfo;
res = BufferInfo::New(&bufferInfo);
if (NS_FAILED(res)) {
CSFLogDebug(logTag, "WebrtcMediaCodecVP8VideoEncoder::%s, BufferInfo::New return err = %d",
CSFLogDebug(LOGTAG, "WebrtcMediaCodecVP8VideoEncoder::%s, BufferInfo::New return err = %d",
__FUNCTION__, (int)res);
return -1;
}
@ -863,18 +867,18 @@ int32_t WebrtcMediaCodecVP8VideoEncoder::Encode(
if (outputIndex == MediaCodec::INFO_TRY_AGAIN_LATER) {
// Not an error: output not available yet. Try later.
CSFLogDebug(logTag, "%s dequeue output buffer try again:%d", __FUNCTION__, outputIndex);
CSFLogDebug(LOGTAG, "%s dequeue output buffer try again:%d", __FUNCTION__, outputIndex);
} else if (outputIndex == MediaCodec::INFO_OUTPUT_FORMAT_CHANGED) {
// handle format change
CSFLogDebug(logTag, "%s dequeue output buffer format changed:%d", __FUNCTION__, outputIndex);
CSFLogDebug(LOGTAG, "%s dequeue output buffer format changed:%d", __FUNCTION__, outputIndex);
} else if (outputIndex == MediaCodec::INFO_OUTPUT_BUFFERS_CHANGED) {
CSFLogDebug(logTag, "%s dequeue output buffer changed:%d", __FUNCTION__, outputIndex);
CSFLogDebug(LOGTAG, "%s dequeue output buffer changed:%d", __FUNCTION__, outputIndex);
mMediaCodecEncoder->GetOutputBuffers();
} else if (outputIndex < 0) {
CSFLogDebug(logTag, "%s dequeue output buffer unknow error:%d", __FUNCTION__, outputIndex);
CSFLogDebug(LOGTAG, "%s dequeue output buffer unknow error:%d", __FUNCTION__, outputIndex);
} else {
#ifdef WEBRTC_MEDIACODEC_DEBUG
CSFLogDebug(logTag, "%s dequeue output buffer return status is %d took %u ms", __FUNCTION__, outputIndex, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
CSFLogDebug(LOGTAG, "%s dequeue output buffer return status is %d took %u ms", __FUNCTION__, outputIndex, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
#endif
JNIEnv* env = jsjni_GetJNIForThread();
@ -898,11 +902,11 @@ int32_t WebrtcMediaCodecVP8VideoEncoder::Encode(
int32_t size;
bufferInfo->Size(&size);
#ifdef WEBRTC_MEDIACODEC_DEBUG
CSFLogDebug(logTag, "%s dequeue output buffer ok, index:%d, buffer size = %d, buffer offset = %d, flags = %d", __FUNCTION__, outputIndex, size, offset, flags);
CSFLogDebug(LOGTAG, "%s dequeue output buffer ok, index:%d, buffer size = %d, buffer offset = %d, flags = %d", __FUNCTION__, outputIndex, size, offset, flags);
#endif
if(VerifyAndAllocate(size) == -1) {
CSFLogDebug(logTag, "%s VerifyAndAllocate buffers failed", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s VerifyAndAllocate buffers failed", __FUNCTION__);
return WEBRTC_VIDEO_CODEC_ERROR;
}
@ -938,7 +942,7 @@ int32_t WebrtcMediaCodecVP8VideoEncoder::Encode(
}
int32_t WebrtcMediaCodecVP8VideoEncoder::RegisterEncodeCompleteCallback(webrtc::EncodedImageCallback* callback) {
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
mCallback = callback;
return WEBRTC_VIDEO_CODEC_OK;
@ -946,7 +950,7 @@ int32_t WebrtcMediaCodecVP8VideoEncoder::RegisterEncodeCompleteCallback(webrtc::
int32_t WebrtcMediaCodecVP8VideoEncoder::Release() {
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
delete mMediaCodecEncoder;
mMediaCodecEncoder = nullptr;
@ -958,17 +962,17 @@ int32_t WebrtcMediaCodecVP8VideoEncoder::Release() {
}
WebrtcMediaCodecVP8VideoEncoder::~WebrtcMediaCodecVP8VideoEncoder() {
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
Release();
}
int32_t WebrtcMediaCodecVP8VideoEncoder::SetChannelParameters(uint32_t packetLoss, int64_t rtt) {
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t WebrtcMediaCodecVP8VideoEncoder::SetRates(uint32_t newBitRate, uint32_t frameRate) {
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
if (!mMediaCodecEncoder) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
@ -982,7 +986,7 @@ int32_t WebrtcMediaCodecVP8VideoEncoder::SetRates(uint32_t newBitRate, uint32_t
}
WebrtcMediaCodecVP8VideoRemoteEncoder::~WebrtcMediaCodecVP8VideoRemoteEncoder() {
CSFLogDebug(logTag, "%s %p", __FUNCTION__, this);
CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
Release();
}
@ -994,7 +998,7 @@ int32_t WebrtcMediaCodecVP8VideoRemoteEncoder::InitEncode(
}
int32_t WebrtcMediaCodecVP8VideoRemoteEncoder::SetRates(uint32_t newBitRate, uint32_t frameRate) {
CSFLogDebug(logTag, "%s, newBitRate: %d, frameRate: %d", __FUNCTION__, newBitRate, frameRate);
CSFLogDebug(LOGTAG, "%s, newBitRate: %d, frameRate: %d", __FUNCTION__, newBitRate, frameRate);
if (!mJavaEncoder) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
@ -1006,7 +1010,7 @@ int32_t WebrtcMediaCodecVP8VideoRemoteEncoder::Encode(
const webrtc::VideoFrame& inputImage,
const webrtc::CodecSpecificInfo* codecSpecificInfo,
const std::vector<webrtc::FrameType>* frame_types) {
CSFLogDebug(logTag, "%s, w = %d, h = %d", __FUNCTION__, inputImage.width(), inputImage.height());
CSFLogDebug(LOGTAG, "%s, w = %d, h = %d", __FUNCTION__, inputImage.width(), inputImage.height());
if (inputImage.width() == 0 || inputImage.height() == 0) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
@ -1026,7 +1030,7 @@ int32_t WebrtcMediaCodecVP8VideoRemoteEncoder::Encode(
&format);
if (NS_FAILED(res)) {
CSFLogDebug(logTag, "%s, CreateVideoFormat failed err = %d", __FUNCTION__, (int)res);
CSFLogDebug(LOGTAG, "%s, CreateVideoFormat failed err = %d", __FUNCTION__, (int)res);
return WEBRTC_VIDEO_CODEC_ERROR;
}
@ -1062,7 +1066,7 @@ int32_t WebrtcMediaCodecVP8VideoRemoteEncoder::Encode(
bool converted = I420toNV12(dstY, dstUV, inputImage);
if (!converted) {
CSFLogError(logTag, "%s WebrtcMediaCodecVP8VideoEncoder::Encode() convert input buffer to NV12 error.", __FUNCTION__);
CSFLogError(LOGTAG, "%s WebrtcMediaCodecVP8VideoEncoder::Encode() convert input buffer to NV12 error.", __FUNCTION__);
return WEBRTC_VIDEO_CODEC_ERROR;
}
@ -1091,7 +1095,7 @@ int32_t WebrtcMediaCodecVP8VideoRemoteEncoder::RegisterEncodeCompleteCallback(we
}
int32_t WebrtcMediaCodecVP8VideoRemoteEncoder::Release() {
CSFLogDebug(logTag, "%s %p", __FUNCTION__, this);
CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
if (mJavaEncoder) {
mJavaEncoder->Release();
@ -1118,7 +1122,7 @@ WebrtcMediaCodecVP8VideoDecoder::WebrtcMediaCodecVP8VideoDecoder()
, mFrameWidth(0)
, mFrameHeight(0)
, mMediaCodecDecoder(nullptr) {
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
}
bool WebrtcMediaCodecVP8VideoDecoder::ResetInputBuffers() {
@ -1158,15 +1162,15 @@ int32_t WebrtcMediaCodecVP8VideoDecoder::Decode(
const webrtc::CodecSpecificInfo* codecSpecificInfo,
int64_t renderTimeMs) {
CSFLogDebug(logTag, "%s, renderTimeMs = %" PRId64, __FUNCTION__, renderTimeMs);
CSFLogDebug(LOGTAG, "%s, renderTimeMs = %" PRId64, __FUNCTION__, renderTimeMs);
if (inputImage._length== 0 || !inputImage._buffer) {
CSFLogDebug(logTag, "%s, input Image invalid. length = %" PRIdPTR, __FUNCTION__, inputImage._length);
CSFLogDebug(LOGTAG, "%s, input Image invalid. length = %" PRIdPTR, __FUNCTION__, inputImage._length);
return WEBRTC_VIDEO_CODEC_ERROR;
}
if (inputImage._frameType == webrtc::kVideoFrameKey) {
CSFLogDebug(logTag, "%s, inputImage is Golden frame",
CSFLogDebug(LOGTAG, "%s, inputImage is Golden frame",
__FUNCTION__);
mFrameWidth = inputImage._encodedWidth;
mFrameHeight = inputImage._encodedHeight;
@ -1181,7 +1185,7 @@ int32_t WebrtcMediaCodecVP8VideoDecoder::Decode(
nsresult res = mMediaCodecDecoder->Configure(mFrameWidth, mFrameHeight, nullptr, 0, MEDIACODEC_VIDEO_MIME_VP8, false /* decoder */);
if (res != NS_OK) {
CSFLogDebug(logTag, "%s, decoder configure return err = %d",
CSFLogDebug(LOGTAG, "%s, decoder configure return err = %d",
__FUNCTION__, (int)res);
return WEBRTC_VIDEO_CODEC_ERROR;
}
@ -1190,18 +1194,18 @@ int32_t WebrtcMediaCodecVP8VideoDecoder::Decode(
if (NS_FAILED(res)) {
mMediaCodecDecoder->isStarted = false;
CSFLogDebug(logTag, "%s start decoder. err = %d", __FUNCTION__, (int)res);
CSFLogDebug(LOGTAG, "%s start decoder. err = %d", __FUNCTION__, (int)res);
return WEBRTC_VIDEO_CODEC_ERROR;
}
bool retBool = ResetInputBuffers();
if (!retBool) {
CSFLogDebug(logTag, "%s ResetInputBuffers failed.", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ResetInputBuffers failed.", __FUNCTION__);
return WEBRTC_VIDEO_CODEC_ERROR;
}
retBool = ResetOutputBuffers();
if (!retBool) {
CSFLogDebug(logTag, "%s ResetOutputBuffers failed.", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ResetOutputBuffers failed.", __FUNCTION__);
return WEBRTC_VIDEO_CODEC_ERROR;
}
@ -1209,7 +1213,7 @@ int32_t WebrtcMediaCodecVP8VideoDecoder::Decode(
}
#ifdef WEBRTC_MEDIACODEC_DEBUG
uint32_t time = PR_IntervalNow();
CSFLogDebug(logTag, "%s start decoder took %u ms", __FUNCTION__, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
CSFLogDebug(LOGTAG, "%s start decoder took %u ms", __FUNCTION__, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
#endif
bool feedFrame = true;
@ -1220,24 +1224,24 @@ int32_t WebrtcMediaCodecVP8VideoDecoder::Decode(
feedFrame = (ret == -1);
}
CSFLogDebug(logTag, "%s end, ret = %d", __FUNCTION__, ret);
CSFLogDebug(LOGTAG, "%s end, ret = %d", __FUNCTION__, ret);
return ret;
}
void WebrtcMediaCodecVP8VideoDecoder::DecodeFrame(EncodedFrame* frame) {
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
}
int32_t WebrtcMediaCodecVP8VideoDecoder::RegisterDecodeCompleteCallback(webrtc::DecodedImageCallback* callback) {
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
mCallback = callback;
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t WebrtcMediaCodecVP8VideoDecoder::Release() {
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
delete mMediaCodecDecoder;
mMediaCodecDecoder = nullptr;
@ -1246,7 +1250,7 @@ int32_t WebrtcMediaCodecVP8VideoDecoder::Release() {
}
WebrtcMediaCodecVP8VideoDecoder::~WebrtcMediaCodecVP8VideoDecoder() {
CSFLogDebug(logTag, "%s ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
Release();
}

View File

@ -18,21 +18,17 @@ LOCAL_INCLUDES += [
'/media/webrtc/trunk',
]
SOURCES += [
'AudioConduit.cpp',
'VideoConduit.cpp',
]
UNIFIED_SOURCES += [
'AudioConduit.cpp',
'GmpVideoCodec.cpp',
'MediaDataDecoderCodec.cpp',
'VideoConduit.cpp',
'WebrtcGmpVideoCodec.cpp',
'WebrtcMediaDataDecoderCodec.cpp',
]
if CONFIG['OS_TARGET'] == 'Android':
# Duplicate definition of logTag
SOURCES += [
UNIFIED_SOURCES += [
'MediaCodecVideoCodec.cpp',
'WebrtcMediaCodecVP8VideoCodec.cpp',
]

View File

@ -9,6 +9,7 @@
#include "MediaStreamGraphImpl.h"
#include <inttypes.h>
#include <math.h>
#include "nspr.h"
@ -56,7 +57,7 @@
#include "nsThreadUtils.h"
#include "logging.h"
#include "CSFLog.h"
// Max size given stereo is 480*2*2 = 1920 (10ms of 16-bits stereo audio at
// 48KHz)
@ -70,8 +71,11 @@ using namespace mozilla::dom;
using namespace mozilla::gfx;
using namespace mozilla::layers;
// Logging context
MOZ_MTLOG_MODULE("mediapipeline")
static const char* mpLogTag = "MediaPipeline";
#ifdef LOGTAG
#undef LOGTAG
#endif
#define LOGTAG mpLogTag
namespace mozilla {
extern mozilla::LogModule* AudioLogModule();
@ -148,8 +152,8 @@ public:
// giving us a margin to not cause some machines to drop every other frame.
const int32_t queueThrottlingLimit = 1;
if (mLength > queueThrottlingLimit) {
MOZ_MTLOG(ML_DEBUG, "VideoFrameConverter " << this << " queue is full." <<
" Throttling by throwing away a frame.");
CSFLogDebug(LOGTAG, "VideoFrameConverter %p queue is full. Throttling by throwing away a frame.",
this);
#ifdef DEBUG
++mThrottleCount;
mThrottleRecord = std::max(mThrottleCount, mThrottleRecord);
@ -159,15 +163,14 @@ public:
#ifdef DEBUG
if (mThrottleCount > 0) {
auto level = ML_DEBUG;
if (mThrottleCount > 5) {
// Log at a higher level when we have large drops.
level = ML_INFO;
CSFLogInfo(LOGTAG, "VideoFrameConverter %p stopped throttling after throwing away %d frames. Longest throttle so far was %d frames.",
this, mThrottleCount, mThrottleRecord);
} else {
CSFLogDebug(LOGTAG, "VideoFrameConverter %p stopped throttling after throwing away %d frames. Longest throttle so far was %d frames.",
this, mThrottleCount, mThrottleRecord);
}
MOZ_MTLOG(level, "VideoFrameConverter " << this << " stopped" <<
" throttling after throwing away " << mThrottleCount <<
" frames. Longest throttle so far was " <<
mThrottleRecord << " frames.");
mThrottleCount = 0;
}
#endif
@ -250,7 +253,7 @@ protected:
{
// check for parameter sanity
if (!aBuffer || aVideoFrameLength == 0 || aWidth == 0 || aHeight == 0) {
MOZ_MTLOG(ML_ERROR, __FUNCTION__ << " Invalid Parameters ");
CSFLogError(LOGTAG, "%s Invalid Parameters", __FUNCTION__);
MOZ_ASSERT(false);
return;
}
@ -308,7 +311,7 @@ protected:
// Fill Cb/Cr planes
memset(pixelData.get() + yPlaneLen.value(), 0x80, cbcrPlaneLen);
MOZ_MTLOG(ML_DEBUG, "Sending a black video frame");
CSFLogDebug(LOGTAG, "Sending a black video frame");
VideoFrameConverted(Move(pixelData), length.value(),
size.width, size.height,
mozilla::kVideoI420, 0);
@ -344,7 +347,7 @@ protected:
webrtc::VideoFrame i420_frame(video_frame_buffer,
0, 0, // not setting timestamps
webrtc::kVideoRotation_0);
MOZ_MTLOG(ML_DEBUG, "Sending an I420 video frame");
CSFLogDebug(LOGTAG, "Sending an I420 video frame");
VideoFrameConverted(i420_frame);
return;
}
@ -352,15 +355,16 @@ protected:
RefPtr<SourceSurface> surf = aImage->GetAsSourceSurface();
if (!surf) {
MOZ_MTLOG(ML_ERROR, "Getting surface from " << Stringify(format) << " image failed");
CSFLogError(LOGTAG, "Getting surface from %s image failed",
Stringify(format).c_str());
return;
}
RefPtr<DataSourceSurface> data = surf->GetDataSurface();
if (!data) {
MOZ_MTLOG(ML_ERROR, "Getting data surface from " << Stringify(format)
<< " image with " << Stringify(surf->GetType()) << "("
<< Stringify(surf->GetFormat()) << ") surface failed");
CSFLogError(LOGTAG, "Getting data surface from %s image with %s (%s) surface failed",
Stringify(format).c_str(), Stringify(surf->GetType()).c_str(),
Stringify(surf->GetFormat()).c_str());
return;
}
@ -383,9 +387,9 @@ protected:
DataSourceSurface::ScopedMap map(data, DataSourceSurface::READ);
if (!map.IsMapped()) {
MOZ_MTLOG(ML_ERROR, "Reading DataSourceSurface from " << Stringify(format)
<< " image with " << Stringify(surf->GetType()) << "("
<< Stringify(surf->GetFormat()) << ") surface failed");
CSFLogError(LOGTAG, "Reading DataSourceSurface from %s image with %s (%s) surface failed",
Stringify(format).c_str(), Stringify(surf->GetType()).c_str(),
Stringify(surf->GetFormat()).c_str());
return;
}
@ -411,16 +415,18 @@ protected:
size.width, size.height);
break;
default:
MOZ_MTLOG(ML_ERROR, "Unsupported RGB video format" << Stringify(surf->GetFormat()));
CSFLogError(LOGTAG, "Unsupported RGB video format %s",
Stringify(surf->GetFormat()).c_str());
MOZ_ASSERT(PR_FALSE);
return;
}
if (rv != 0) {
MOZ_MTLOG(ML_ERROR, Stringify(surf->GetFormat()) << " to I420 conversion failed");
CSFLogError(LOGTAG, "%s to I420 conversion failed",
Stringify(surf->GetFormat()).c_str());
return;
}
MOZ_MTLOG(ML_DEBUG, "Sending an I420 video frame converted from " <<
Stringify(surf->GetFormat()));
CSFLogDebug(LOGTAG, "Sending an I420 video frame converted from %s",
Stringify(surf->GetFormat()).c_str());
VideoFrameConverted(Move(yuv_scoped), buffer_size.value(), size.width, size.height, mozilla::kVideoI420, 0);
}
@ -594,7 +600,7 @@ MediaPipeline::MediaPipeline(const std::string& pc,
MediaPipeline::~MediaPipeline() {
ASSERT_ON_THREAD(main_thread_);
MOZ_MTLOG(ML_INFO, "Destroying MediaPipeline: " << description_);
CSFLogInfo(LOGTAG, "Destroying MediaPipeline: %s", description_.c_str());
}
nsresult MediaPipeline::Init() {
@ -770,7 +776,7 @@ void MediaPipeline::StateChange(TransportFlow *flow, TransportLayer::State state
MOZ_ASSERT(info);
if (state == TransportLayer::TS_OPEN) {
MOZ_MTLOG(ML_INFO, "Flow is ready");
CSFLogInfo(LOGTAG, "Flow is ready");
TransportReady_s(*info);
} else if (state == TransportLayer::TS_CLOSED ||
state == TransportLayer::TS_ERROR) {
@ -802,14 +808,13 @@ nsresult MediaPipeline::TransportReady_s(TransportInfo &info) {
// TODO(ekr@rtfm.com): implement some kind of notification on
// failure. bug 852665.
if (info.state_ != MP_CONNECTING) {
MOZ_MTLOG(ML_ERROR, "Transport ready for flow in wrong state:" <<
description_ << ": " << ToString(info.type_));
CSFLogError(LOGTAG, "Transport ready for flow in wrong state:%s :%s",
description_.c_str(), ToString(info.type_));
return NS_ERROR_FAILURE;
}
MOZ_MTLOG(ML_INFO, "Transport ready for pipeline " <<
static_cast<void *>(this) << " flow " << description_ << ": " <<
ToString(info.type_));
CSFLogInfo(LOGTAG, "Transport ready for pipeline %p flow %s: %s", this,
description_.c_str(), ToString(info.type_));
// TODO(bcampen@mozilla.com): Should we disconnect from the flow on failure?
nsresult res;
@ -822,7 +827,7 @@ nsresult MediaPipeline::TransportReady_s(TransportInfo &info) {
uint16_t cipher_suite;
res = dtls->GetSrtpCipher(&cipher_suite);
if (NS_FAILED(res)) {
MOZ_MTLOG(ML_ERROR, "Failed to negotiate DTLS-SRTP. This is an error");
CSFLogError(LOGTAG, "Failed to negotiate DTLS-SRTP. This is an error");
info.state_ = MP_CLOSED;
UpdateRtcpMuxState(info);
return res;
@ -833,7 +838,7 @@ nsresult MediaPipeline::TransportReady_s(TransportInfo &info) {
res = dtls->ExportKeyingMaterial(kDTLSExporterLabel, false, "",
srtp_block, sizeof(srtp_block));
if (NS_FAILED(res)) {
MOZ_MTLOG(ML_ERROR, "Failed to compute DTLS-SRTP keys. This is an error");
CSFLogError(LOGTAG, "Failed to compute DTLS-SRTP keys. This is an error");
info.state_ = MP_CLOSED;
UpdateRtcpMuxState(info);
MOZ_CRASH(); // TODO: Remove once we have enough field experience to
@ -875,16 +880,15 @@ nsresult MediaPipeline::TransportReady_s(TransportInfo &info) {
info.recv_srtp_ = SrtpFlow::Create(cipher_suite, true, read_key,
SRTP_TOTAL_KEY_LENGTH);
if (!info.send_srtp_ || !info.recv_srtp_) {
MOZ_MTLOG(ML_ERROR, "Couldn't create SRTP flow for "
<< ToString(info.type_));
CSFLogError(LOGTAG, "Couldn't create SRTP flow for %s",
ToString(info.type_));
info.state_ = MP_CLOSED;
UpdateRtcpMuxState(info);
return NS_ERROR_FAILURE;
}
MOZ_MTLOG(ML_INFO, "Listening for " << ToString(info.type_)
<< " packets received on " <<
static_cast<void *>(dtls->downward()));
CSFLogInfo(LOGTAG, "Listening for %s packets received on %p",
ToString(info.type_), dtls->downward());
switch (info.type_) {
case RTP:
@ -917,7 +921,7 @@ nsresult MediaPipeline::TransportFailed_s(TransportInfo &info) {
info.state_ = MP_CLOSED;
UpdateRtcpMuxState(info);
MOZ_MTLOG(ML_INFO, "Transport closed for flow " << ToString(info.type_));
CSFLogInfo(LOGTAG, "Transport closed for flow %s", ToString(info.type_));
NS_WARNING(
"MediaPipeline Transport failed. This is not properly cleaned up yet");
@ -959,7 +963,7 @@ nsresult MediaPipeline::SendPacket(TransportFlow *flow, const void *data,
if (res == TE_WOULDBLOCK)
return NS_OK;
MOZ_MTLOG(ML_ERROR, "Failed write on stream " << description_);
CSFLogError(LOGTAG, "Failed write on stream %s", description_.c_str());
return NS_BASE_STREAM_CLOSED;
}
@ -971,21 +975,18 @@ void MediaPipeline::increment_rtp_packets_sent(int32_t bytes) {
rtp_bytes_sent_ += bytes;
if (!(rtp_packets_sent_ % 100)) {
MOZ_MTLOG(ML_INFO, "RTP sent packet count for " << description_
<< " Pipeline " << static_cast<void *>(this)
<< " Flow : " << static_cast<void *>(rtp_.transport_)
<< ": " << rtp_packets_sent_
<< " (" << rtp_bytes_sent_ << " bytes)");
CSFLogInfo(LOGTAG, "RTP sent packet count for %s Pipeline %p Flow: %p: %u (%" PRId64 " bytes)",
description_.c_str(), this, static_cast<void *>(rtp_.transport_),
rtp_packets_sent_, rtp_bytes_sent_);
}
}
void MediaPipeline::increment_rtcp_packets_sent() {
++rtcp_packets_sent_;
if (!(rtcp_packets_sent_ % 100)) {
MOZ_MTLOG(ML_INFO, "RTCP sent packet count for " << description_
<< " Pipeline " << static_cast<void *>(this)
<< " Flow : " << static_cast<void *>(rtcp_.transport_)
<< ": " << rtcp_packets_sent_);
CSFLogInfo(LOGTAG, "RTCP sent packet count for %s Pipeline %p Flow: %p: %u",
description_.c_str(), this, static_cast<void *>(rtp_.transport_),
rtcp_packets_sent_);
}
}
@ -993,21 +994,18 @@ void MediaPipeline::increment_rtp_packets_received(int32_t bytes) {
++rtp_packets_received_;
rtp_bytes_received_ += bytes;
if (!(rtp_packets_received_ % 100)) {
MOZ_MTLOG(ML_INFO, "RTP received packet count for " << description_
<< " Pipeline " << static_cast<void *>(this)
<< " Flow : " << static_cast<void *>(rtp_.transport_)
<< ": " << rtp_packets_received_
<< " (" << rtp_bytes_received_ << " bytes)");
CSFLogInfo(LOGTAG, "RTP received packet count for %s Pipeline %p Flow: %p: %u (%" PRId64 " bytes)",
description_.c_str(), this, static_cast<void *>(rtp_.transport_),
rtp_packets_received_, rtp_bytes_received_);
}
}
void MediaPipeline::increment_rtcp_packets_received() {
++rtcp_packets_received_;
if (!(rtcp_packets_received_ % 100)) {
MOZ_MTLOG(ML_INFO, "RTCP received packet count for " << description_
<< " Pipeline " << static_cast<void *>(this)
<< " Flow : " << static_cast<void *>(rtcp_.transport_)
<< ": " << rtcp_packets_received_);
CSFLogInfo(LOGTAG, "RTCP received packet count for %s Pipeline %p Flow: %p: %u",
description_.c_str(), this, static_cast<void *>(rtp_.transport_),
rtcp_packets_received_);
}
}
@ -1015,22 +1013,22 @@ void MediaPipeline::RtpPacketReceived(TransportLayer *layer,
const unsigned char *data,
size_t len) {
if (!transport_->pipeline()) {
MOZ_MTLOG(ML_ERROR, "Discarding incoming packet; transport disconnected");
CSFLogError(LOGTAG, "Discarding incoming packet; transport disconnected");
return;
}
if (!conduit_) {
MOZ_MTLOG(ML_DEBUG, "Discarding incoming packet; media disconnected");
CSFLogDebug(LOGTAG, "Discarding incoming packet; media disconnected");
return;
}
if (rtp_.state_ != MP_OPEN) {
MOZ_MTLOG(ML_ERROR, "Discarding incoming packet; pipeline not open");
CSFLogError(LOGTAG, "Discarding incoming packet; pipeline not open");
return;
}
if (rtp_.transport_->state() != TransportLayer::TS_OPEN) {
MOZ_MTLOG(ML_ERROR, "Discarding incoming packet; transport not open");
CSFLogError(LOGTAG, "Discarding incoming packet; transport not open");
return;
}
@ -1115,11 +1113,11 @@ void MediaPipeline::RtpPacketReceived(TransportLayer *layer,
inner_data[2],
inner_data[3]);
MOZ_MTLOG(ML_NOTICE, "Error unprotecting RTP in " << description_
<< "len= " << len << "[" << tmp << "...]");
CSFLogError(LOGTAG, "Error unprotecting RTP in %s len= %zu [%s]",
description_.c_str(), len, tmp);
return;
}
MOZ_MTLOG(ML_DEBUG, description_ << " received RTP packet.");
CSFLogDebug(LOGTAG, "%s received RTP packet.", description_.c_str());
increment_rtp_packets_received(out_len);
RtpLogger::LogPacket(inner_data.get(), out_len, true, true, header.headerLength,
@ -1135,22 +1133,22 @@ void MediaPipeline::RtcpPacketReceived(TransportLayer *layer,
const unsigned char *data,
size_t len) {
if (!transport_->pipeline()) {
MOZ_MTLOG(ML_DEBUG, "Discarding incoming packet; transport disconnected");
CSFLogDebug(LOGTAG, "Discarding incoming packet; transport disconnected");
return;
}
if (!conduit_) {
MOZ_MTLOG(ML_DEBUG, "Discarding incoming packet; media disconnected");
CSFLogDebug(LOGTAG, "Discarding incoming packet; media disconnected");
return;
}
if (rtcp_.state_ != MP_OPEN) {
MOZ_MTLOG(ML_DEBUG, "Discarding incoming packet; pipeline not open");
CSFLogDebug(LOGTAG, "Discarding incoming packet; pipeline not open");
return;
}
if (rtcp_.transport_->state() != TransportLayer::TS_OPEN) {
MOZ_MTLOG(ML_ERROR, "Discarding incoming packet; transport not open");
CSFLogError(LOGTAG, "Discarding incoming packet; transport not open");
return;
}
@ -1168,7 +1166,7 @@ void MediaPipeline::RtcpPacketReceived(TransportLayer *layer,
// TODO bug 1279153: remove SR check for reduced size RTCP
if (filter_ && direction_ == RECEIVE) {
if (!filter_->FilterSenderReport(data, len)) {
MOZ_MTLOG(ML_NOTICE, "Dropping incoming RTCP packet; filtered out");
CSFLogWarn(LOGTAG, "Dropping incoming RTCP packet; filtered out");
return;
}
}
@ -1189,7 +1187,7 @@ void MediaPipeline::RtcpPacketReceived(TransportLayer *layer,
if (!NS_SUCCEEDED(res))
return;
MOZ_MTLOG(ML_DEBUG, description_ << " received RTCP packet.");
CSFLogDebug(LOGTAG, "%s received RTCP packet.", description_.c_str());
increment_rtcp_packets_received();
RtpLogger::LogPacket(inner_data.get(), out_len, true, false, 0, description_);
@ -1239,7 +1237,7 @@ void MediaPipeline::PacketReceived(TransportLayer *layer,
const unsigned char *data,
size_t len) {
if (!transport_->pipeline()) {
MOZ_MTLOG(ML_DEBUG, "Discarding incoming packet; transport disconnected");
CSFLogDebug(LOGTAG, "Discarding incoming packet; transport disconnected");
return;
}
@ -1486,9 +1484,8 @@ void MediaPipelineTransmit::AttachToTrack(const std::string& track_id) {
description_ += "]";
// TODO(ekr@rtfm.com): Check for errors
MOZ_MTLOG(ML_DEBUG, "Attaching pipeline to track "
<< static_cast<void *>(domtrack_) << " conduit type=" <<
(conduit_->type() == MediaSessionConduit::AUDIO ?"audio":"video"));
CSFLogDebug(LOGTAG, "Attaching pipeline to track %p conduit type=%s", this,
(conduit_->type() == MediaSessionConduit::AUDIO ?"audio":"video"));
#if !defined(MOZILLA_EXTERNAL_LINKAGE)
// With full duplex we don't risk having audio come in late to the MSG
@ -1584,10 +1581,9 @@ nsresult MediaPipelineTransmit::ReplaceTrack(MediaStreamTrack& domtrack) {
nsString nsTrackId;
domtrack.GetId(nsTrackId);
std::string track_id(NS_ConvertUTF16toUTF8(nsTrackId).get());
MOZ_MTLOG(ML_DEBUG, "Reattaching pipeline " << description_ << " to track "
<< static_cast<void *>(&domtrack)
<< " track " << track_id << " conduit type=" <<
(conduit_->type() == MediaSessionConduit::AUDIO ?"audio":"video"));
CSFLogDebug(LOGTAG, "Reattaching pipeline %s to track %p track %s conduit type: %s",
description_.c_str(), &domtrack, track_id.c_str(),
(conduit_->type() == MediaSessionConduit::AUDIO ?"audio":"video"));
DetachMedia();
domtrack_ = &domtrack; // Detach clears it
@ -1620,13 +1616,13 @@ nsresult MediaPipeline::ConnectTransport_s(TransportInfo &info) {
if (info.transport_->state() == TransportLayer::TS_OPEN) {
nsresult res = TransportReady_s(info);
if (NS_FAILED(res)) {
MOZ_MTLOG(ML_ERROR, "Error calling TransportReady(); res="
<< static_cast<uint32_t>(res) << " in " << __FUNCTION__);
CSFLogError(LOGTAG, "Error calling TransportReady(); res=%u in %s",
static_cast<uint32_t>(res), __FUNCTION__);
return res;
}
} else if (info.transport_->state() == TransportLayer::TS_ERROR) {
MOZ_MTLOG(ML_ERROR, ToString(info.type_)
<< "transport is already in error state");
CSFLogError(LOGTAG, "%s transport is already in error state",
ToString(info.type_));
TransportFailed_s(info);
return NS_ERROR_FAILURE;
}
@ -1677,7 +1673,7 @@ nsresult MediaPipeline::PipelineTransport::SendRtpRtcpPacket_s(
TransportInfo& transport = is_rtp ? pipeline_->rtp_ : pipeline_->rtcp_;
if (!transport.send_srtp_) {
MOZ_MTLOG(ML_DEBUG, "Couldn't write RTP/RTCP packet; SRTP not set up yet");
CSFLogDebug(LOGTAG, "Couldn't write RTP/RTCP packet; SRTP not set up yet");
return NS_OK;
}
@ -1724,8 +1720,8 @@ nsresult MediaPipeline::PipelineTransport::SendRtpRtcpPacket_s(
// paranoia; don't have uninitialized bytes included in data->len()
data->SetLength(out_len);
MOZ_MTLOG(ML_DEBUG, pipeline_->description_ << " sending " <<
(is_rtp ? "RTP" : "RTCP") << " packet");
CSFLogDebug(LOGTAG, "%s sending %s packet", pipeline_->description_.c_str(),
(is_rtp ? "RTP" : "RTCP"));
if (is_rtp) {
pipeline_->packet_dumper_->Dump(
pipeline_->level(), dom::mozPacketDumpType::Srtp, true, data->data(), out_len);
@ -1774,9 +1770,8 @@ void MediaPipelineTransmit::PipelineListener::
NotifyRealtimeTrackData(MediaStreamGraph* graph,
StreamTime offset,
const MediaSegment& media) {
MOZ_MTLOG(ML_DEBUG, "MediaPipeline::NotifyRealtimeTrackData() listener=" <<
this << ", offset=" << offset <<
", duration=" << media.GetDuration());
CSFLogDebug(LOGTAG, "MediaPipeline::NotifyRealtimeTrackData() listener=%p, offset=%" PRId64 ", duration=%" PRId64,
this, offset, media.GetDuration());
if (media.GetType() == MediaSegment::VIDEO) {
// We have to call the upstream NotifyRealtimeTrackData and
@ -1792,7 +1787,7 @@ void MediaPipelineTransmit::PipelineListener::
NotifyQueuedChanges(MediaStreamGraph* graph,
StreamTime offset,
const MediaSegment& queued_media) {
MOZ_MTLOG(ML_DEBUG, "MediaPipeline::NotifyQueuedChanges()");
CSFLogDebug(LOGTAG, "MediaPipeline::NotifyQueuedChanges()");
if (queued_media.GetType() == MediaSegment::VIDEO) {
// We always get video from SetCurrentFrames().
@ -1816,15 +1811,15 @@ NotifyQueuedChanges(MediaStreamGraph* graph,
void MediaPipelineTransmit::PipelineListener::
NotifyDirectListenerInstalled(InstallationResult aResult) {
MOZ_MTLOG(ML_INFO, "MediaPipeline::NotifyDirectListenerInstalled() listener= " <<
this << ", result=" << static_cast<int32_t>(aResult));
CSFLogInfo(LOGTAG, "MediaPipeline::NotifyDirectListenerInstalled() listener=%p, result=%d",
this, static_cast<int32_t>(aResult));
direct_connect_ = InstallationResult::SUCCESS == aResult;
}
void MediaPipelineTransmit::PipelineListener::
NotifyDirectListenerUninstalled() {
MOZ_MTLOG(ML_INFO, "MediaPipeline::NotifyDirectListenerUninstalled() listener=" << this);
CSFLogInfo(LOGTAG, "MediaPipeline::NotifyDirectListenerUninstalled() listener=%p", this);
direct_connect_ = false;
}
@ -1832,7 +1827,7 @@ NotifyDirectListenerUninstalled() {
void MediaPipelineTransmit::PipelineListener::
NewData(const MediaSegment& media, TrackRate aRate /* = 0 */) {
if (!active_) {
MOZ_MTLOG(ML_DEBUG, "Discarding packets because transport not ready");
CSFLogDebug(LOGTAG, "Discarding packets because transport not ready");
return;
}
@ -2032,7 +2027,7 @@ public:
{
MOZ_ASSERT(source_);
if (!source_) {
MOZ_MTLOG(ML_ERROR, "NotifyPull() called from a non-SourceMediaStream");
CSFLogError(LOGTAG, "NotifyPull() called from a non-SourceMediaStream");
return;
}
@ -2053,10 +2048,9 @@ public:
if (err != kMediaConduitNoError) {
// Insert silence on conduit/GIPS failure (extremely unlikely)
MOZ_MTLOG(ML_ERROR, "Audio conduit failed (" << err
<< ") to return data @ " << played_ticks_
<< " (desired " << desired_time << " -> "
<< source_->StreamTimeToSeconds(desired_time) << ")");
CSFLogError(LOGTAG, "Audio conduit failed (%d) to return data @ %" PRId64 " (desired %" PRId64 " -> %f)",
err, played_ticks_, desired_time,
source_->StreamTimeToSeconds(desired_time));
// if this is not enough we'll loop and provide more
samples_length = WEBRTC_DEFAULT_SAMPLE_RATE/100;
PodArrayZero(scratch_buffer);
@ -2064,8 +2058,8 @@ public:
MOZ_ASSERT(samples_length * sizeof(uint16_t) < AUDIO_SAMPLE_BUFFER_MAX_BYTES);
MOZ_MTLOG(ML_DEBUG, "Audio conduit returned buffer of length "
<< samples_length);
CSFLogDebug(LOGTAG, "Audio conduit returned buffer of length %u",
samples_length);
RefPtr<SharedBuffer> samples = SharedBuffer::Create(samples_length * sizeof(uint16_t));
int16_t *samples_data = static_cast<int16_t *>(samples->Data());
@ -2108,7 +2102,7 @@ public:
}
}
} else {
MOZ_MTLOG(ML_ERROR, "AppendToTrack failed");
CSFLogError(LOGTAG, "AppendToTrack failed");
// we can't un-read the data, but that's ok since we don't want to
// buffer - but don't i-loop!
return;
@ -2154,7 +2148,7 @@ void MediaPipelineReceiveAudio::DetachMedia()
nsresult MediaPipelineReceiveAudio::Init()
{
ASSERT_ON_THREAD(main_thread_);
MOZ_MTLOG(ML_DEBUG, __FUNCTION__);
CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
description_ = pc_ + "| Receive audio[";
description_ += track_id_;
@ -2202,7 +2196,7 @@ public:
if (source_->AppendToTrack(track_id_, &segment)) {
played_ticks_ = desired_time;
} else {
MOZ_MTLOG(ML_ERROR, "AppendToTrack failed");
CSFLogError(LOGTAG, "AppendToTrack failed");
return;
}
}
@ -2336,7 +2330,7 @@ void MediaPipelineReceiveVideo::DetachMedia()
nsresult MediaPipelineReceiveVideo::Init() {
ASSERT_ON_THREAD(main_thread_);
MOZ_MTLOG(ML_DEBUG, __FUNCTION__);
CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
description_ = pc_ + "| Receive video[";
description_ += track_id_;

View File

@ -10,10 +10,14 @@
#include "MediaPipelineFilter.h"
#include "webrtc/common_types.h"
#include "logging.h"
// Logging context
MOZ_MTLOG_MODULE("mediapipeline")
#include "CSFLog.h"
static const char* mpfLogTag = "MediaPipelineFilter";
#ifdef LOGTAG
#undef LOGTAG
#endif
#define LOGTAG mpfLogTag
namespace mozilla {
@ -41,8 +45,10 @@ bool MediaPipelineFilter::Filter(const webrtc::RTPHeader& header,
return true;
}
if (!header.extension.rtpStreamId.empty()) {
MOZ_MTLOG(ML_DEBUG, "MediaPipelineFilter ignoring seq# " << header.sequenceNumber <<
" ssrc: " << header.ssrc << " RID: " << header.extension.rtpStreamId.data());
CSFLogDebug(LOGTAG,
"MediaPipelineFilter ignoring seq# %u ssrc: %u RID: %s",
header.sequenceNumber, header.ssrc,
header.extension.rtpStreamId.data());
}
if (remote_ssrc_set_.count(header.ssrc)) {

View File

@ -5,7 +5,8 @@
// Original author: nohlmeier@mozilla.com
#include "RtpLogger.h"
#include "logging.h"
#include "CSFLog.h"
#include <ctime>
#include <iomanip>
@ -19,17 +20,22 @@
// Logging context
using namespace mozilla;
MOZ_MTLOG_MODULE("rtplogger")
static const char* rlLogTag = "RtpLogger";
#ifdef LOGTAG
#undef LOGTAG
#endif
#define LOGTAG rlLogTag
namespace mozilla {
bool RtpLogger::IsPacketLoggingOn() {
return MOZ_LOG_TEST(getLogModule(), ML_DEBUG);
return CSFLogTestLevel(CSF_LOG_DEBUG);
}
void RtpLogger::LogPacket(const unsigned char *data, int len, bool input,
bool isRtp, int headerLength, std::string desc) {
if (MOZ_LOG_TEST(getLogModule(), ML_DEBUG)) {
if (CSFLogTestLevel(CSF_LOG_DEBUG)) {
std::stringstream ss;
/* This creates text2pcap compatible format, e.g.:
* O 10:36:26.864934 000000 80 c8 00 06 6d ... RTCP_PACKET
@ -66,9 +72,8 @@ void RtpLogger::LogPacket(const unsigned char *data, int len, bool input,
ss << " " << std::setw(2) << (int)data[i];
}
}
MOZ_MTLOG(ML_DEBUG, "\n" << ss.str() <<
(isRtp ? " RTP_PACKET " : " RTCP_PACKET ") <<
desc);
CSFLogDebug(LOGTAG, "%s%s%s", ss.str().c_str(),
(isRtp ? " RTP_PACKET " : " RTCP_PACKET "), desc.c_str());
}
}

View File

@ -13,9 +13,13 @@
#include "mozilla/RefPtr.h"
// Logging context
static const char* sfLogTag = "SrtpFlow";
#ifdef LOGTAG
#undef LOGTAG
#endif
#define LOGTAG sfLogTag
using namespace mozilla;
MOZ_MTLOG_MODULE("mediapipeline")
namespace mozilla {
@ -38,12 +42,12 @@ RefPtr<SrtpFlow> SrtpFlow::Create(int cipher_suite,
RefPtr<SrtpFlow> flow = new SrtpFlow();
if (!key) {
MOZ_MTLOG(ML_ERROR, "Null SRTP key specified");
CSFLogError(LOGTAG, "Null SRTP key specified");
return nullptr;
}
if (key_len != SRTP_TOTAL_KEY_LENGTH) {
MOZ_MTLOG(ML_ERROR, "Invalid SRTP key length");
CSFLogError(LOGTAG, "Invalid SRTP key length");
return nullptr;
}
@ -54,19 +58,19 @@ RefPtr<SrtpFlow> SrtpFlow::Create(int cipher_suite,
// since any flow can only have one cipher suite with DTLS-SRTP
switch (cipher_suite) {
case SRTP_AES128_CM_HMAC_SHA1_80:
MOZ_MTLOG(ML_DEBUG,
"Setting SRTP cipher suite SRTP_AES128_CM_HMAC_SHA1_80");
CSFLogDebug(LOGTAG,
"Setting SRTP cipher suite SRTP_AES128_CM_HMAC_SHA1_80");
crypto_policy_set_aes_cm_128_hmac_sha1_80(&policy.rtp);
crypto_policy_set_aes_cm_128_hmac_sha1_80(&policy.rtcp);
break;
case SRTP_AES128_CM_HMAC_SHA1_32:
MOZ_MTLOG(ML_DEBUG,
"Setting SRTP cipher suite SRTP_AES128_CM_HMAC_SHA1_32");
CSFLogDebug(LOGTAG,
"Setting SRTP cipher suite SRTP_AES128_CM_HMAC_SHA1_32");
crypto_policy_set_aes_cm_128_hmac_sha1_32(&policy.rtp);
crypto_policy_set_aes_cm_128_hmac_sha1_80(&policy.rtcp); // 80-bit per RFC 5764
break; // S 4.1.2.
default:
MOZ_MTLOG(ML_ERROR, "Request to set unknown SRTP cipher suite");
CSFLogError(LOGTAG, "Request to set unknown SRTP cipher suite");
return nullptr;
}
// This key is copied into the srtp_t object, so we don't
@ -83,7 +87,7 @@ RefPtr<SrtpFlow> SrtpFlow::Create(int cipher_suite,
// Now make the session
err_status_t r = srtp_create(&flow->session_, &policy);
if (r != err_status_ok) {
MOZ_MTLOG(ML_ERROR, "Error creating srtp session");
CSFLogError(LOGTAG, "Error creating srtp session");
return nullptr;
}
@ -95,30 +99,30 @@ nsresult SrtpFlow::CheckInputs(bool protect, void *in, int in_len,
int max_len, int *out_len) {
MOZ_ASSERT(in);
if (!in) {
MOZ_MTLOG(ML_ERROR, "NULL input value");
CSFLogError(LOGTAG, "NULL input value");
return NS_ERROR_NULL_POINTER;
}
if (in_len < 0) {
MOZ_MTLOG(ML_ERROR, "Input length is negative");
CSFLogError(LOGTAG, "Input length is negative");
return NS_ERROR_ILLEGAL_VALUE;
}
if (max_len < 0) {
MOZ_MTLOG(ML_ERROR, "Max output length is negative");
CSFLogError(LOGTAG, "Max output length is negative");
return NS_ERROR_ILLEGAL_VALUE;
}
if (protect) {
if ((max_len < SRTP_MAX_EXPANSION) ||
((max_len - SRTP_MAX_EXPANSION) < in_len)) {
MOZ_MTLOG(ML_ERROR, "Output too short");
CSFLogError(LOGTAG, "Output too short");
return NS_ERROR_ILLEGAL_VALUE;
}
}
else {
if (in_len > max_len) {
MOZ_MTLOG(ML_ERROR, "Output too short");
CSFLogError(LOGTAG, "Output too short");
return NS_ERROR_ILLEGAL_VALUE;
}
}
@ -136,7 +140,7 @@ nsresult SrtpFlow::ProtectRtp(void *in, int in_len,
err_status_t r = srtp_protect(session_, in, &len);
if (r != err_status_ok) {
MOZ_MTLOG(ML_ERROR, "Error protecting SRTP packet");
CSFLogError(LOGTAG, "Error protecting SRTP packet");
return NS_ERROR_FAILURE;
}
@ -144,8 +148,8 @@ nsresult SrtpFlow::ProtectRtp(void *in, int in_len,
*out_len = len;
MOZ_MTLOG(ML_DEBUG, "Successfully protected an SRTP packet of len "
<< *out_len);
CSFLogDebug(LOGTAG, "Successfully protected an SRTP packet of len %d",
*out_len);
return NS_OK;
}
@ -160,15 +164,15 @@ nsresult SrtpFlow::UnprotectRtp(void *in, int in_len,
err_status_t r = srtp_unprotect(session_, in, &len);
if (r != err_status_ok) {
MOZ_MTLOG(ML_ERROR, "Error unprotecting SRTP packet error=" << (int)r);
CSFLogError(LOGTAG, "Error unprotecting SRTP packet error=%d", (int)r);
return NS_ERROR_FAILURE;
}
MOZ_ASSERT(len <= max_len);
*out_len = len;
MOZ_MTLOG(ML_DEBUG, "Successfully unprotected an SRTP packet of len "
<< *out_len);
CSFLogDebug(LOGTAG, "Successfully unprotected an SRTP packet of len %d",
*out_len);
return NS_OK;
}
@ -183,15 +187,15 @@ nsresult SrtpFlow::ProtectRtcp(void *in, int in_len,
err_status_t r = srtp_protect_rtcp(session_, in, &len);
if (r != err_status_ok) {
MOZ_MTLOG(ML_ERROR, "Error protecting SRTCP packet");
CSFLogError(LOGTAG, "Error protecting SRTCP packet");
return NS_ERROR_FAILURE;
}
MOZ_ASSERT(len <= max_len);
*out_len = len;
MOZ_MTLOG(ML_DEBUG, "Successfully protected an SRTCP packet of len "
<< *out_len);
CSFLogDebug(LOGTAG, "Successfully protected an SRTCP packet of len %d",
*out_len);
return NS_OK;
}
@ -206,15 +210,15 @@ nsresult SrtpFlow::UnprotectRtcp(void *in, int in_len,
err_status_t r = srtp_unprotect_rtcp(session_, in, &len);
if (r != err_status_ok) {
MOZ_MTLOG(ML_ERROR, "Error unprotecting SRTCP packet error=" << (int)r);
CSFLogError(LOGTAG, "Error unprotecting SRTCP packet error=%d", (int)r);
return NS_ERROR_FAILURE;
}
MOZ_ASSERT(len <= max_len);
*out_len = len;
MOZ_MTLOG(ML_DEBUG, "Successfully unprotected an SRTCP packet of len "
<< *out_len);
CSFLogDebug(LOGTAG, "Successfully unprotected an SRTCP packet of len %d",
*out_len);
return NS_OK;
}
@ -229,14 +233,14 @@ nsresult SrtpFlow::Init() {
if (!initialized) {
err_status_t r = srtp_init();
if (r != err_status_ok) {
MOZ_MTLOG(ML_ERROR, "Could not initialize SRTP");
CSFLogError(LOGTAG, "Could not initialize SRTP");
MOZ_ASSERT(PR_FALSE);
return NS_ERROR_FAILURE;
}
r = srtp_install_event_handler(&SrtpFlow::srtp_event_handler);
if (r != err_status_ok) {
MOZ_MTLOG(ML_ERROR, "Could not install SRTP event handler");
CSFLogError(LOGTAG, "Could not install SRTP event handler");
MOZ_ASSERT(PR_FALSE);
return NS_ERROR_FAILURE;
}

View File

@ -10,14 +10,14 @@ LOCAL_INCLUDES += [
'/media/libyuv/libyuv/include',
'/media/mtransport',
'/media/webrtc',
'/media/webrtc/signaling/src/common/browser_logging',
'/media/webrtc/signaling/src/media-conduit',
'/media/webrtc/trunk',
'/netwerk/srtp/src/crypto/include',
'/netwerk/srtp/src/include',
]
# Duplicate definition of getLogModule
SOURCES += [
UNIFIED_SOURCES += [
'MediaPipeline.cpp',
'MediaPipelineFilter.cpp',
'RtpLogger.cpp',

View File

@ -29,7 +29,11 @@
#include "gmp-video-decode.h" // GMP_API_VIDEO_DECODER
#include "gmp-video-encode.h" // GMP_API_VIDEO_ENCODER
static const char* logTag = "PeerConnectionCtx";
static const char* pccLogTag = "PeerConnectionCtx";
#ifdef LOGTAG
#undef LOGTAG
#endif
#define LOGTAG pccLogTag
namespace mozilla {
@ -65,7 +69,7 @@ public:
NS_IMETHOD Observe(nsISupports* aSubject, const char* aTopic,
const char16_t* aData) override {
if (strcmp(aTopic, NS_XPCOM_SHUTDOWN_OBSERVER_ID) == 0) {
CSFLogDebug(logTag, "Shutting down PeerConnectionCtx");
CSFLogDebug(LOGTAG, "Shutting down PeerConnectionCtx");
PeerConnectionCtx::Destroy();
nsCOMPtr<nsIObserverService> observerService =
@ -86,13 +90,13 @@ public:
}
if (strcmp(aTopic, NS_IOSERVICE_OFFLINE_STATUS_TOPIC) == 0) {
if (NS_strcmp(aData, u"" NS_IOSERVICE_OFFLINE) == 0) {
CSFLogDebug(logTag, "Updating network state to offline");
CSFLogDebug(LOGTAG, "Updating network state to offline");
PeerConnectionCtx::UpdateNetworkState(false);
} else if(NS_strcmp(aData, u"" NS_IOSERVICE_ONLINE) == 0) {
CSFLogDebug(logTag, "Updating network state to online");
CSFLogDebug(LOGTAG, "Updating network state to online");
PeerConnectionCtx::UpdateNetworkState(true);
} else {
CSFLogDebug(logTag, "Received unsupported network state event");
CSFLogDebug(LOGTAG, "Received unsupported network state event");
MOZ_CRASH();
}
}
@ -139,7 +143,7 @@ nsresult PeerConnectionCtx::InitializeGlobal(nsIThread *mainThread,
MOZ_ASSERT(NS_IsMainThread());
if (!gInstance) {
CSFLogDebug(logTag, "Creating PeerConnectionCtx");
CSFLogDebug(LOGTAG, "Creating PeerConnectionCtx");
PeerConnectionCtx *ctx = new PeerConnectionCtx();
res = ctx->Initialize();
@ -169,7 +173,7 @@ bool PeerConnectionCtx::isActive() {
}
void PeerConnectionCtx::Destroy() {
CSFLogDebug(logTag, "%s", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
if (gInstance) {
gInstance->Cleanup();
@ -397,7 +401,7 @@ void PeerConnectionCtx::initGMP()
mGMPService = do_GetService("@mozilla.org/gecko-media-plugin-service;1");
if (!mGMPService) {
CSFLogError(logTag, "%s failed to get the gecko-media-plugin-service",
CSFLogError(LOGTAG, "%s failed to get the gecko-media-plugin-service",
__FUNCTION__);
return;
}
@ -407,7 +411,7 @@ void PeerConnectionCtx::initGMP()
if (NS_FAILED(rv)) {
mGMPService = nullptr;
CSFLogError(logTag,
CSFLogError(LOGTAG,
"%s failed to get the gecko-media-plugin thread, err=%u",
__FUNCTION__,
static_cast<unsigned>(rv));
@ -419,7 +423,7 @@ void PeerConnectionCtx::initGMP()
}
nsresult PeerConnectionCtx::Cleanup() {
CSFLogDebug(logTag, "%s", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
mQueuedJSEPOperations.Clear();
mGMPService = nullptr;

View File

@ -134,7 +134,12 @@ using namespace mozilla::dom;
typedef PCObserverString ObString;
static const char* logTag = "PeerConnectionImpl";
static const char* pciLogTag = "PeerConnectionImpl";
#ifdef LOGTAG
#undef LOGTAG
#endif
#define LOGTAG pciLogTag
static mozilla::LazyLogModule logModuleInfo("signaling");
// Getting exceptions back down from PCObserver is generally not harmful.
@ -202,12 +207,12 @@ static nsresult InitNSSInContent()
}
if (NSS_NoDB_Init(nullptr) != SECSuccess) {
CSFLogError(logTag, "NSS_NoDB_Init failed.");
CSFLogError(LOGTAG, "NSS_NoDB_Init failed.");
return NS_ERROR_FAILURE;
}
if (NS_FAILED(mozilla::psm::InitializeCipherSuite())) {
CSFLogError(logTag, "Fail to set up nss cipher suite.");
CSFLogError(LOGTAG, "Fail to set up nss cipher suite.");
return NS_ERROR_FAILURE;
}
@ -333,7 +338,7 @@ PeerConnectionImpl::PeerConnectionImpl(const GlobalObject* aGlobal)
mWindow->AddPeerConnection();
mActiveOnWindow = true;
}
CSFLogInfo(logTag, "%s: PeerConnectionImpl constructor for %s",
CSFLogInfo(LOGTAG, "%s: PeerConnectionImpl constructor for %s",
__FUNCTION__, mHandle.c_str());
STAMP_TIMECARD(mTimeCard, "Constructor Completed");
mAllowIceLoopback = Preferences::GetBool(
@ -374,10 +379,10 @@ PeerConnectionImpl::~PeerConnectionImpl()
if (PeerConnectionCtx::isActive()) {
PeerConnectionCtx::GetInstance()->mPeerConnections.erase(mHandle);
} else {
CSFLogError(logTag, "PeerConnectionCtx is already gone. Ignoring...");
CSFLogError(LOGTAG, "PeerConnectionCtx is already gone. Ignoring...");
}
CSFLogInfo(logTag, "%s: PeerConnectionImpl destructor invoked for %s",
CSFLogInfo(LOGTAG, "%s: PeerConnectionImpl destructor invoked for %s",
__FUNCTION__, mHandle.c_str());
Close();
@ -400,7 +405,7 @@ PeerConnectionImpl::MakeMediaStream()
RefPtr<DOMMediaStream> stream =
DOMMediaStream::CreateSourceStreamAsInput(GetWindow(), graph);
CSFLogDebug(logTag, "Created media stream %p, inner: %p", stream.get(), stream->GetInputStream());
CSFLogDebug(LOGTAG, "Created media stream %p, inner: %p", stream.get(), stream->GetInputStream());
return stream.forget();
}
@ -687,7 +692,7 @@ PeerConnectionImpl::Initialize(PeerConnectionObserver& aObserver,
aConfiguration.getTurnServers(),
aConfiguration.getIceTransportPolicy());
if (NS_FAILED(res)) {
CSFLogError(logTag, "%s: Couldn't initialize media object", __FUNCTION__);
CSFLogError(LOGTAG, "%s: Couldn't initialize media object", __FUNCTION__);
return res;
}
@ -698,7 +703,7 @@ PeerConnectionImpl::Initialize(PeerConnectionObserver& aObserver,
res = mJsepSession->Init();
if (NS_FAILED(res)) {
CSFLogError(logTag, "%s: Couldn't init JSEP Session, res=%u",
CSFLogError(LOGTAG, "%s: Couldn't init JSEP Session, res=%u",
__FUNCTION__,
static_cast<unsigned>(res));
return res;
@ -707,7 +712,7 @@ PeerConnectionImpl::Initialize(PeerConnectionObserver& aObserver,
res = mJsepSession->SetIceCredentials(mMedia->ice_ctx()->ufrag(),
mMedia->ice_ctx()->pwd());
if (NS_FAILED(res)) {
CSFLogError(logTag, "%s: Couldn't set ICE credentials, res=%u",
CSFLogError(LOGTAG, "%s: Couldn't set ICE credentials, res=%u",
__FUNCTION__,
static_cast<unsigned>(res));
return res;
@ -715,7 +720,7 @@ PeerConnectionImpl::Initialize(PeerConnectionObserver& aObserver,
res = mJsepSession->SetBundlePolicy(aConfiguration.getBundlePolicy());
if (NS_FAILED(res)) {
CSFLogError(logTag, "%s: Couldn't set bundle policy, res=%u, error=%s",
CSFLogError(LOGTAG, "%s: Couldn't set bundle policy, res=%u, error=%s",
__FUNCTION__,
static_cast<unsigned>(res),
mJsepSession->GetLastError().c_str());
@ -739,7 +744,7 @@ PeerConnectionImpl::Initialize(PeerConnectionObserver& aObserver,
PeerConnectionConfiguration converted;
nsresult res = converted.Init(aConfiguration);
if (NS_FAILED(res)) {
CSFLogError(logTag, "%s: Invalid RTCConfiguration", __FUNCTION__);
CSFLogError(LOGTAG, "%s: Invalid RTCConfiguration", __FUNCTION__);
rv.Throw(res);
return;
}
@ -767,7 +772,7 @@ PeerConnectionImpl::SetCertificate(mozilla::dom::RTCCertificate& aCertificate)
nsresult rv = CalculateFingerprint(DtlsIdentity::DEFAULT_HASH_ALGORITHM,
&fingerprint);
if (NS_FAILED(rv)) {
CSFLogError(logTag, "%s: Couldn't calculate fingerprint, rv=%u",
CSFLogError(LOGTAG, "%s: Couldn't calculate fingerprint, rv=%u",
__FUNCTION__, static_cast<unsigned>(rv));
mCertificate = nullptr;
return;
@ -775,7 +780,7 @@ PeerConnectionImpl::SetCertificate(mozilla::dom::RTCCertificate& aCertificate)
rv = mJsepSession->AddDtlsFingerprint(DtlsIdentity::DEFAULT_HASH_ALGORITHM,
fingerprint);
if (NS_FAILED(rv)) {
CSFLogError(logTag, "%s: Couldn't set DTLS credentials, rv=%u",
CSFLogError(LOGTAG, "%s: Couldn't set DTLS credentials, rv=%u",
__FUNCTION__, static_cast<unsigned>(rv));
mCertificate = nullptr;
}
@ -866,7 +871,7 @@ class ConfigureCodec {
// trying to call ourself, for example. It will work for most real-world cases, like
// if we try to add a person to a 2-way call to make a 3-way mesh call
if (encode->ReserveOMXCodec() && decode->ReserveOMXCodec()) {
CSFLogDebug( logTag, "%s: H264 hardware codec available", __FUNCTION__);
CSFLogDebug( LOGTAG, "%s: H264 hardware codec available", __FUNCTION__);
mHardwareH264Supported = true;
}
}
@ -1045,7 +1050,7 @@ PeerConnectionImpl::ConfigureJsepSessionCodecs() {
do_GetService("@mozilla.org/preferences-service;1", &res);
if (NS_FAILED(res)) {
CSFLogError(logTag, "%s: Couldn't get prefs service, res=%u",
CSFLogError(LOGTAG, "%s: Couldn't get prefs service, res=%u",
__FUNCTION__,
static_cast<unsigned>(res));
return res;
@ -1053,7 +1058,7 @@ PeerConnectionImpl::ConfigureJsepSessionCodecs() {
nsCOMPtr<nsIPrefBranch> branch = do_QueryInterface(prefs);
if (!branch) {
CSFLogError(logTag, "%s: Couldn't get prefs branch", __FUNCTION__);
CSFLogError(LOGTAG, "%s: Couldn't get prefs branch", __FUNCTION__);
return NS_ERROR_FAILURE;
}
@ -1109,7 +1114,7 @@ PeerConnectionImpl::EnsureDataConnection(uint16_t aLocalPort,
PC_AUTO_ENTER_API_CALL(false);
if (mDataConnection) {
CSFLogDebug(logTag,"%s DataConnection already connected",__FUNCTION__);
CSFLogDebug(LOGTAG,"%s DataConnection already connected",__FUNCTION__);
mDataConnection->SetMaxMessageSize(aMMSSet, aMaxMessageSize);
return NS_OK;
}
@ -1119,10 +1124,10 @@ PeerConnectionImpl::EnsureDataConnection(uint16_t aLocalPort,
: nullptr;
mDataConnection = new DataChannelConnection(this, target);
if (!mDataConnection->Init(aLocalPort, aNumstreams, aMMSSet, aMaxMessageSize)) {
CSFLogError(logTag,"%s DataConnection Init Failed",__FUNCTION__);
CSFLogError(LOGTAG,"%s DataConnection Init Failed",__FUNCTION__);
return NS_ERROR_FAILURE;
}
CSFLogDebug(logTag,"%s DataChannelConnection %p attached to %s",
CSFLogDebug(LOGTAG,"%s DataChannelConnection %p attached to %s",
__FUNCTION__, (void*) mDataConnection.get(), mHandle.c_str());
return NS_OK;
}
@ -1153,7 +1158,7 @@ PeerConnectionImpl::GetDatachannelParameters(
trackPair.mSending->GetNegotiatedDetails()->GetEncoding(0);
if (encoding.GetCodecs().empty()) {
CSFLogError(logTag, "%s: Negotiated m=application with no codec. "
CSFLogError(LOGTAG, "%s: Negotiated m=application with no codec. "
"This is likely to be broken.",
__FUNCTION__);
return NS_ERROR_FAILURE;
@ -1161,7 +1166,7 @@ PeerConnectionImpl::GetDatachannelParameters(
for (const JsepCodecDescription* codec : encoding.GetCodecs()) {
if (codec->mType != SdpMediaSection::kApplication) {
CSFLogError(logTag, "%s: Codec type for m=application was %u, this "
CSFLogError(LOGTAG, "%s: Codec type for m=application was %u, this "
"is a bug.",
__FUNCTION__,
static_cast<unsigned>(codec->mType));
@ -1170,7 +1175,7 @@ PeerConnectionImpl::GetDatachannelParameters(
}
if (codec->mName != "webrtc-datachannel") {
CSFLogWarn(logTag, "%s: Codec for m=application was not "
CSFLogWarn(LOGTAG, "%s: Codec for m=application was not "
"webrtc-datachannel (was instead %s). ",
__FUNCTION__,
codec->mName.c_str());
@ -1235,7 +1240,7 @@ PeerConnectionImpl::AddTrackToJsepSession(SdpMediaSection::MediaType type,
{
nsresult res = ConfigureJsepSessionCodecs();
if (NS_FAILED(res)) {
CSFLogError(logTag, "Failed to configure codecs");
CSFLogError(LOGTAG, "Failed to configure codecs");
return res;
}
@ -1244,7 +1249,7 @@ PeerConnectionImpl::AddTrackToJsepSession(SdpMediaSection::MediaType type,
if (NS_FAILED(res)) {
std::string errorString = mJsepSession->GetLastError();
CSFLogError(logTag, "%s (%s) : pc = %s, error = %s",
CSFLogError(LOGTAG, "%s (%s) : pc = %s, error = %s",
__FUNCTION__,
type == SdpMediaSection::kAudio ? "audio" : "video",
mHandle.c_str(),
@ -1259,7 +1264,7 @@ nsresult
PeerConnectionImpl::InitializeDataChannel()
{
PC_AUTO_ENTER_API_CALL(false);
CSFLogDebug(logTag, "%s", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
uint32_t channels = 0;
uint16_t localport = 0;
@ -1271,7 +1276,7 @@ PeerConnectionImpl::InitializeDataChannel()
&remotemaxmessagesize, &mmsset, &level);
if (NS_FAILED(rv)) {
CSFLogDebug(logTag, "%s: We did not negotiate datachannel", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s: We did not negotiate datachannel", __FUNCTION__);
return NS_OK;
}
@ -1283,7 +1288,7 @@ PeerConnectionImpl::InitializeDataChannel()
if (NS_SUCCEEDED(rv)) {
// use the specified TransportFlow
RefPtr<TransportFlow> flow = mMedia->GetTransportFlow(level, false).get();
CSFLogDebug(logTag, "Transportflow[%u] = %p",
CSFLogDebug(LOGTAG, "Transportflow[%u] = %p",
static_cast<unsigned>(level), flow.get());
if (flow) {
if (mDataConnection->ConnectViaTransportFlow(flow,
@ -1351,7 +1356,7 @@ PeerConnectionImpl::CreateDataChannel(const nsAString& aLabel,
);
NS_ENSURE_TRUE(dataChannel,NS_ERROR_FAILURE);
CSFLogDebug(logTag, "%s: making DOMDataChannel", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s: making DOMDataChannel", __FUNCTION__);
if (!mHaveDataStream) {
@ -1374,7 +1379,7 @@ PeerConnectionImpl::CreateDataChannel(const nsAString& aLabel,
rv = mJsepSession->AddTrack(track);
if (NS_FAILED(rv)) {
CSFLogError(logTag, "%s: Failed to add application track.",
CSFLogError(LOGTAG, "%s: Failed to add application track.",
__FUNCTION__);
return rv;
}
@ -1440,7 +1445,7 @@ PeerConnectionImpl::NotifyDataChannel(already_AddRefed<DataChannel> aChannel)
DataChannel* channel = aChannel.take();
MOZ_ASSERT(channel);
CSFLogDebug(logTag, "%s: channel: %p", __FUNCTION__, channel);
CSFLogDebug(LOGTAG, "%s: channel: %p", __FUNCTION__, channel);
nsCOMPtr<nsIDOMDataChannel> domchannel;
nsresult rv = NS_NewDOMDataChannel(already_AddRefed<DataChannel>(channel),
@ -1519,7 +1524,7 @@ PeerConnectionImpl::CreateOffer(const JsepOfferOptions& aOptions)
return NS_OK;
}
CSFLogDebug(logTag, "CreateOffer()");
CSFLogDebug(LOGTAG, "CreateOffer()");
nsresult nrv;
if (restartIce &&
@ -1538,10 +1543,10 @@ PeerConnectionImpl::CreateOffer(const JsepOfferOptions& aOptions)
FinalizeIceRestart();
}
CSFLogInfo(logTag, "Offerer restarting ice");
CSFLogInfo(LOGTAG, "Offerer restarting ice");
nrv = SetupIceRestart();
if (NS_FAILED(nrv)) {
CSFLogError(logTag, "%s: SetupIceRestart failed, res=%u",
CSFLogError(LOGTAG, "%s: SetupIceRestart failed, res=%u",
__FUNCTION__,
static_cast<unsigned>(nrv));
return nrv;
@ -1550,7 +1555,7 @@ PeerConnectionImpl::CreateOffer(const JsepOfferOptions& aOptions)
nrv = ConfigureJsepSessionCodecs();
if (NS_FAILED(nrv)) {
CSFLogError(logTag, "Failed to configure codecs");
CSFLogError(LOGTAG, "Failed to configure codecs");
return nrv;
}
@ -1571,7 +1576,7 @@ PeerConnectionImpl::CreateOffer(const JsepOfferOptions& aOptions)
}
std::string errorString = mJsepSession->GetLastError();
CSFLogError(logTag, "%s: pc = %s, error = %s",
CSFLogError(LOGTAG, "%s: pc = %s, error = %s",
__FUNCTION__, mHandle.c_str(), errorString.c_str());
pco->OnCreateOfferError(error, ObString(errorString.c_str()), rv);
} else {
@ -1592,7 +1597,7 @@ PeerConnectionImpl::CreateAnswer()
return NS_OK;
}
CSFLogDebug(logTag, "CreateAnswer()");
CSFLogDebug(LOGTAG, "CreateAnswer()");
nsresult nrv;
if (mJsepSession->RemoteIceIsRestarting()) {
@ -1600,10 +1605,10 @@ PeerConnectionImpl::CreateAnswer()
PeerConnectionMedia::ICE_RESTART_COMMITTED) {
FinalizeIceRestart();
} else if (!mMedia->IsIceRestarting()) {
CSFLogInfo(logTag, "Answerer restarting ice");
CSFLogInfo(LOGTAG, "Answerer restarting ice");
nrv = SetupIceRestart();
if (NS_FAILED(nrv)) {
CSFLogError(logTag, "%s: SetupIceRestart failed, res=%u",
CSFLogError(LOGTAG, "%s: SetupIceRestart failed, res=%u",
__FUNCTION__,
static_cast<unsigned>(nrv));
return nrv;
@ -1630,7 +1635,7 @@ PeerConnectionImpl::CreateAnswer()
}
std::string errorString = mJsepSession->GetLastError();
CSFLogError(logTag, "%s: pc = %s, error = %s",
CSFLogError(LOGTAG, "%s: pc = %s, error = %s",
__FUNCTION__, mHandle.c_str(), errorString.c_str());
pco->OnCreateAnswerError(error, ObString(errorString.c_str()), rv);
} else {
@ -1646,7 +1651,7 @@ nsresult
PeerConnectionImpl::SetupIceRestart()
{
if (mMedia->IsIceRestarting()) {
CSFLogError(logTag, "%s: ICE already restarting",
CSFLogError(LOGTAG, "%s: ICE already restarting",
__FUNCTION__);
return NS_ERROR_UNEXPECTED;
}
@ -1654,7 +1659,7 @@ PeerConnectionImpl::SetupIceRestart()
std::string ufrag = mMedia->ice_ctx()->GetNewUfrag();
std::string pwd = mMedia->ice_ctx()->GetNewPwd();
if (ufrag.empty() || pwd.empty()) {
CSFLogError(logTag, "%s: Bad ICE credentials (ufrag:'%s'/pwd:'%s')",
CSFLogError(LOGTAG, "%s: Bad ICE credentials (ufrag:'%s'/pwd:'%s')",
__FUNCTION__,
ufrag.c_str(), pwd.c_str());
return NS_ERROR_UNEXPECTED;
@ -1667,7 +1672,7 @@ PeerConnectionImpl::SetupIceRestart()
nsresult nrv = mJsepSession->SetIceCredentials(ufrag, pwd);
if (NS_FAILED(nrv)) {
CSFLogError(logTag, "%s: Couldn't set ICE credentials, res=%u",
CSFLogError(LOGTAG, "%s: Couldn't set ICE credentials, res=%u",
__FUNCTION__,
static_cast<unsigned>(nrv));
return nrv;
@ -1684,7 +1689,7 @@ PeerConnectionImpl::RollbackIceRestart()
nsresult nrv = mJsepSession->SetIceCredentials(mPreviousIceUfrag,
mPreviousIcePwd);
if (NS_FAILED(nrv)) {
CSFLogError(logTag, "%s: Couldn't set ICE credentials, res=%u",
CSFLogError(LOGTAG, "%s: Couldn't set ICE credentials, res=%u",
__FUNCTION__,
static_cast<unsigned>(nrv));
return nrv;
@ -1712,7 +1717,7 @@ PeerConnectionImpl::SetLocalDescription(int32_t aAction, const char* aSDP)
PC_AUTO_ENTER_API_CALL(true);
if (!aSDP) {
CSFLogError(logTag, "%s - aSDP is NULL", __FUNCTION__);
CSFLogError(LOGTAG, "%s - aSDP is NULL", __FUNCTION__);
return NS_ERROR_FAILURE;
}
@ -1764,7 +1769,7 @@ PeerConnectionImpl::SetLocalDescription(int32_t aAction, const char* aSDP)
}
std::string errorString = mJsepSession->GetLastError();
CSFLogError(logTag, "%s: pc = %s, error = %s",
CSFLogError(LOGTAG, "%s: pc = %s, error = %s",
__FUNCTION__, mHandle.c_str(), errorString.c_str());
pco->OnSetLocalDescriptionError(error, ObString(errorString.c_str()), rv);
} else {
@ -1812,7 +1817,7 @@ static void StartTrack(MediaStream* aSource,
// a track "later".
if (current_end != 0L) {
CSFLogDebug(logTag, "added track @ %u -> %f",
CSFLogDebug(LOGTAG, "added track @ %u -> %f",
static_cast<unsigned>(current_end),
mStream->StreamTimeToSeconds(current_end));
}
@ -1837,7 +1842,7 @@ static void StartTrack(MediaStream* aSource,
aSource->GraphImpl()->AppendMessage(
MakeUnique<Message>(aSource, aTrackId, Move(aSegment)));
CSFLogInfo(logTag, "Dispatched track-add for track id %u on stream %p",
CSFLogInfo(LOGTAG, "Dispatched track-add for track id %u on stream %p",
aTrackId, aSource);
}
@ -1888,7 +1893,7 @@ PeerConnectionImpl::CreateNewRemoteTracks(RefPtr<PeerConnectionObserver>& aPco)
return nrv;
}
CSFLogDebug(logTag, "Added remote stream %s", info->GetId().c_str());
CSFLogDebug(LOGTAG, "Added remote stream %s", info->GetId().c_str());
info->GetMediaStream()->AssignId(NS_ConvertUTF8toUTF16(streamId.c_str()));
info->GetMediaStream()->SetLogicalStreamStartTime(
@ -1954,13 +1959,13 @@ PeerConnectionImpl::CreateNewRemoteTracks(RefPtr<PeerConnectionObserver>& aPco)
StartTrack(info->GetMediaStream()->GetInputStream()->AsSourceStream(),
trackID, Move(segment));
info->AddTrack(webrtcTrackId, domTrack);
CSFLogDebug(logTag, "Added remote track %s/%s",
CSFLogDebug(LOGTAG, "Added remote track %s/%s",
info->GetId().c_str(), webrtcTrackId.c_str());
domTrack->AssignId(NS_ConvertUTF8toUTF16(webrtcTrackId.c_str()));
aPco->OnAddTrack(*domTrack, streams, jrv);
if (jrv.Failed()) {
CSFLogError(logTag, ": OnAddTrack(%s) failed! Error: %u",
CSFLogError(LOGTAG, ": OnAddTrack(%s) failed! Error: %u",
webrtcTrackId.c_str(),
jrv.ErrorCodeAsInt());
}
@ -1970,7 +1975,7 @@ PeerConnectionImpl::CreateNewRemoteTracks(RefPtr<PeerConnectionObserver>& aPco)
if (newStream) {
aPco->OnAddStream(*info->GetMediaStream(), jrv);
if (jrv.Failed()) {
CSFLogError(logTag, ": OnAddStream() failed! Error: %u",
CSFLogError(LOGTAG, ": OnAddStream() failed! Error: %u",
jrv.ErrorCodeAsInt());
}
}
@ -2027,7 +2032,7 @@ PeerConnectionImpl::SetRemoteDescription(int32_t action, const char* aSDP)
PC_AUTO_ENTER_API_CALL(true);
if (!aSDP) {
CSFLogError(logTag, "%s - aSDP is NULL", __FUNCTION__);
CSFLogError(LOGTAG, "%s - aSDP is NULL", __FUNCTION__);
return NS_ERROR_FAILURE;
}
@ -2052,7 +2057,7 @@ PeerConnectionImpl::SetRemoteDescription(int32_t action, const char* aSDP)
nsresult nrv = ConfigureJsepSessionCodecs();
if (NS_FAILED(nrv)) {
CSFLogError(logTag, "Failed to configure codecs");
CSFLogError(LOGTAG, "Failed to configure codecs");
return nrv;
}
}
@ -2095,7 +2100,7 @@ PeerConnectionImpl::SetRemoteDescription(int32_t action, const char* aSDP)
}
std::string errorString = mJsepSession->GetLastError();
CSFLogError(logTag, "%s: pc = %s, error = %s",
CSFLogError(LOGTAG, "%s: pc = %s, error = %s",
__FUNCTION__, mHandle.c_str(), errorString.c_str());
pco->OnSetRemoteDescriptionError(error, ObString(errorString.c_str()), jrv);
} else {
@ -2172,7 +2177,7 @@ PeerConnectionImpl::AddIceCandidate(const char* aCandidate, const char* aMid, un
PC_AUTO_ENTER_API_CALL(true);
if (mForceIceTcp && std::string::npos != std::string(aCandidate).find(" UDP ")) {
CSFLogError(logTag, "Blocking remote UDP candidate: %s", aCandidate);
CSFLogError(LOGTAG, "Blocking remote UDP candidate: %s", aCandidate);
return NS_OK;
}
@ -2184,7 +2189,7 @@ PeerConnectionImpl::AddIceCandidate(const char* aCandidate, const char* aMid, un
STAMP_TIMECARD(mTimeCard, "Add Ice Candidate");
CSFLogDebug(logTag, "AddIceCandidate: %s", aCandidate);
CSFLogDebug(LOGTAG, "AddIceCandidate: %s", aCandidate);
// When remote candidates are added before our ICE ctx is up and running
// (the transition to New is async through STS, so this is not impossible),
@ -2226,7 +2231,7 @@ PeerConnectionImpl::AddIceCandidate(const char* aCandidate, const char* aMid, un
std::string errorString = mJsepSession->GetLastError();
CSFLogError(logTag, "Failed to incorporate remote candidate into SDP:"
CSFLogError(LOGTAG, "Failed to incorporate remote candidate into SDP:"
" res = %u, candidate = %s, level = %u, error = %s",
static_cast<unsigned>(res),
aCandidate,
@ -2269,7 +2274,7 @@ PeerConnectionImpl::SetPeerIdentity(const nsAString& aPeerIdentity)
mPeerIdentity = new PeerIdentity(aPeerIdentity);
nsIDocument* doc = GetWindow()->GetExtantDoc();
if (!doc) {
CSFLogInfo(logTag, "Can't update principal on streams; document gone");
CSFLogInfo(LOGTAG, "Can't update principal on streams; document gone");
return NS_ERROR_FAILURE;
}
MediaStreamTrack* allTracks = nullptr;
@ -2291,7 +2296,7 @@ PeerConnectionImpl::SetDtlsConnected(bool aPrivacyRequested)
// now we know that privacy isn't needed for sure
nsIDocument* doc = GetWindow()->GetExtantDoc();
if (!doc) {
CSFLogInfo(logTag, "Can't update principal on streams; document gone");
CSFLogInfo(LOGTAG, "Can't update principal on streams; document gone");
return NS_ERROR_FAILURE;
}
mMedia->UpdateRemoteStreamPrincipals_m(doc->NodePrincipal());
@ -2307,7 +2312,7 @@ PeerConnectionImpl::PrincipalChanged(MediaStreamTrack* aTrack) {
if (doc) {
mMedia->UpdateSinkIdentity_m(aTrack, doc->NodePrincipal(), mPeerIdentity);
} else {
CSFLogInfo(logTag, "Can't update sink principal; document gone");
CSFLogInfo(LOGTAG, "Can't update sink principal; document gone");
}
}
@ -2330,7 +2335,7 @@ PeerConnectionImpl::GetStreamId(const DOMMediaStream& aStream)
void
PeerConnectionImpl::OnMediaError(const std::string& aError)
{
CSFLogError(logTag, "Encountered media error! %s", aError.c_str());
CSFLogError(LOGTAG, "Encountered media error! %s", aError.c_str());
// TODO: Let content know about this somehow.
}
@ -2404,7 +2409,7 @@ PeerConnectionImpl::AddTrack(MediaStreamTrack& aTrack,
PC_AUTO_ENTER_API_CALL(true);
if (!aStreams.Length()) {
CSFLogError(logTag, "%s: At least one stream arg required", __FUNCTION__);
CSFLogError(LOGTAG, "%s: At least one stream arg required", __FUNCTION__);
return NS_ERROR_FAILURE;
}
@ -2422,7 +2427,7 @@ PeerConnectionImpl::AddTrack(MediaStreamTrack& aTrack,
return res;
}
CSFLogDebug(logTag, "Added track (%s) to stream %s",
CSFLogDebug(LOGTAG, "Added track (%s) to stream %s",
trackId.c_str(), streamId.c_str());
aTrack.AddPrincipalChangeObserver(this);
@ -2556,7 +2561,7 @@ PeerConnectionImpl::RemoveTrack(MediaStreamTrack& aTrack) {
RefPtr<LocalSourceStreamInfo> info = media()->GetLocalStreamByTrackId(trackId);
if (!info) {
CSFLogError(logTag, "%s: Unknown stream", __FUNCTION__);
CSFLogError(LOGTAG, "%s: Unknown stream", __FUNCTION__);
return NS_ERROR_INVALID_ARG;
}
@ -2564,7 +2569,7 @@ PeerConnectionImpl::RemoveTrack(MediaStreamTrack& aTrack) {
mJsepSession->RemoveTrack(info->GetId(), trackId);
if (NS_FAILED(rv)) {
CSFLogError(logTag, "%s: Unknown stream/track ids %s %s",
CSFLogError(LOGTAG, "%s: Unknown stream/track ids %s %s",
__FUNCTION__,
info->GetId().c_str(),
trackId.c_str());
@ -2713,7 +2718,7 @@ PeerConnectionImpl::ReplaceTrack(MediaStreamTrack& aThisTrack,
if (&aThisTrack == &aWithTrack) {
pco->OnReplaceTrackSuccess(jrv);
if (jrv.Failed()) {
CSFLogError(logTag, "Error firing replaceTrack success callback");
CSFLogError(LOGTAG, "Error firing replaceTrack success callback");
return NS_ERROR_UNEXPECTED;
}
return NS_OK;
@ -2729,7 +2734,7 @@ PeerConnectionImpl::ReplaceTrack(MediaStreamTrack& aThisTrack,
ObString(mJsepSession->GetLastError().c_str()),
jrv);
if (jrv.Failed()) {
CSFLogError(logTag, "Error firing replaceTrack success callback");
CSFLogError(LOGTAG, "Error firing replaceTrack success callback");
return NS_ERROR_UNEXPECTED;
}
return NS_OK;
@ -2740,7 +2745,7 @@ PeerConnectionImpl::ReplaceTrack(MediaStreamTrack& aThisTrack,
RefPtr<LocalSourceStreamInfo> info =
media()->GetLocalStreamByTrackId(origTrackId);
if (!info) {
CSFLogError(logTag, "Could not find stream from trackId");
CSFLogError(LOGTAG, "Could not find stream from trackId");
return NS_ERROR_UNEXPECTED;
}
@ -2757,7 +2762,7 @@ PeerConnectionImpl::ReplaceTrack(MediaStreamTrack& aThisTrack,
ObString(mJsepSession->GetLastError().c_str()),
jrv);
if (jrv.Failed()) {
CSFLogError(logTag, "Error firing replaceTrack error callback");
CSFLogError(LOGTAG, "Error firing replaceTrack error callback");
return NS_ERROR_UNEXPECTED;
}
return NS_OK;
@ -2770,13 +2775,13 @@ PeerConnectionImpl::ReplaceTrack(MediaStreamTrack& aThisTrack,
newTrackId);
if (NS_FAILED(rv)) {
CSFLogError(logTag, "Unexpected error in ReplaceTrack: %d",
CSFLogError(LOGTAG, "Unexpected error in ReplaceTrack: %d",
static_cast<int>(rv));
pco->OnReplaceTrackError(kInvalidMediastreamTrack,
ObString("Failed to replace track"),
jrv);
if (jrv.Failed()) {
CSFLogError(logTag, "Error firing replaceTrack error callback");
CSFLogError(LOGTAG, "Error firing replaceTrack error callback");
return NS_ERROR_UNEXPECTED;
}
return NS_OK;
@ -2790,13 +2795,13 @@ PeerConnectionImpl::ReplaceTrack(MediaStreamTrack& aThisTrack,
// TODO: We should probably only do this if the source has in fact changed.
if (NS_FAILED((rv = mMedia->UpdateMediaPipelines(*mJsepSession)))) {
CSFLogError(logTag, "Error Updating MediaPipelines");
CSFLogError(LOGTAG, "Error Updating MediaPipelines");
return rv;
}
pco->OnReplaceTrackSuccess(jrv);
if (jrv.Failed()) {
CSFLogError(logTag, "Error firing replaceTrack success callback");
CSFLogError(LOGTAG, "Error firing replaceTrack success callback");
return NS_ERROR_UNEXPECTED;
}
@ -2833,7 +2838,7 @@ PeerConnectionImpl::SetParameters(
std::string trackId = PeerConnectionImpl::GetTrackId(aTrack);
RefPtr<LocalSourceStreamInfo> info = media()->GetLocalStreamByTrackId(trackId);
if (!info) {
CSFLogError(logTag, "%s: Unknown stream", __FUNCTION__);
CSFLogError(LOGTAG, "%s: Unknown stream", __FUNCTION__);
return NS_ERROR_INVALID_ARG;
}
std::string streamId = info->GetId();
@ -2870,7 +2875,7 @@ PeerConnectionImpl::GetParameters(
std::string trackId = PeerConnectionImpl::GetTrackId(aTrack);
RefPtr<LocalSourceStreamInfo> info = media()->GetLocalStreamByTrackId(trackId);
if (!info) {
CSFLogError(logTag, "%s: Unknown stream", __FUNCTION__);
CSFLogError(LOGTAG, "%s: Unknown stream", __FUNCTION__);
return NS_ERROR_INVALID_ARG;
}
std::string streamId = info->GetId();
@ -2891,7 +2896,7 @@ PeerConnectionImpl::CalculateFingerprint(
&buf[0], sizeof(buf),
&len);
if (NS_FAILED(rv)) {
CSFLogError(logTag, "Unable to calculate certificate fingerprint, rv=%u",
CSFLogError(LOGTAG, "Unable to calculate certificate fingerprint, rv=%u",
static_cast<unsigned>(rv));
return rv;
}
@ -3027,11 +3032,11 @@ PeerConnectionImpl::CheckApiState(bool assert_ice_ready) const
(mIceGatheringState == PCImplIceGatheringState::Complete));
if (IsClosed()) {
CSFLogError(logTag, "%s: called API while closed", __FUNCTION__);
CSFLogError(LOGTAG, "%s: called API while closed", __FUNCTION__);
return NS_ERROR_FAILURE;
}
if (!mMedia) {
CSFLogError(logTag, "%s: called API with disposed mMedia", __FUNCTION__);
CSFLogError(LOGTAG, "%s: called API with disposed mMedia", __FUNCTION__);
return NS_ERROR_FAILURE;
}
return NS_OK;
@ -3040,7 +3045,7 @@ PeerConnectionImpl::CheckApiState(bool assert_ice_ready) const
NS_IMETHODIMP
PeerConnectionImpl::Close()
{
CSFLogDebug(logTag, "%s: for %s", __FUNCTION__, mHandle.c_str());
CSFLogDebug(LOGTAG, "%s: for %s", __FUNCTION__, mHandle.c_str());
PC_AUTO_ENTER_API_CALL_NO_CHECK();
SetSignalingState_m(PCImplSignalingState::SignalingClosed);
@ -3058,7 +3063,7 @@ PeerConnectionImpl::PluginCrash(uint32_t aPluginID,
return false;
}
CSFLogError(logTag, "%s: Our plugin %llu crashed", __FUNCTION__, static_cast<unsigned long long>(aPluginID));
CSFLogError(LOGTAG, "%s: Our plugin %llu crashed", __FUNCTION__, static_cast<unsigned long long>(aPluginID));
nsCOMPtr<nsIDocument> doc = mWindow->GetExtantDoc();
if (!doc) {
@ -3154,13 +3159,13 @@ PeerConnectionImpl::CloseInt()
RecordLongtermICEStatistics();
}
RecordEndOfCallTelemetry();
CSFLogInfo(logTag, "%s: Closing PeerConnectionImpl %s; "
CSFLogInfo(LOGTAG, "%s: Closing PeerConnectionImpl %s; "
"ending call", __FUNCTION__, mHandle.c_str());
if (mJsepSession) {
mJsepSession->Close();
}
if (mDataConnection) {
CSFLogInfo(logTag, "%s: Destroying DataChannelConnection %p for %s",
CSFLogInfo(LOGTAG, "%s: Destroying DataChannelConnection %p for %s",
__FUNCTION__, (void *) mDataConnection.get(), mHandle.c_str());
mDataConnection->Destroy();
mDataConnection = nullptr; // it may not go away until the runnables are dead
@ -3238,7 +3243,7 @@ PeerConnectionImpl::SetSignalingState_m(PCImplSignalingState aSignalingState,
mMedia->ActivateOrRemoveTransports(*mJsepSession, mForceIceTcp);
if (!rollback) {
if (NS_FAILED(mMedia->UpdateMediaPipelines(*mJsepSession))) {
CSFLogError(logTag, "Error Updating MediaPipelines");
CSFLogError(LOGTAG, "Error Updating MediaPipelines");
NS_ASSERTION(false, "Error Updating MediaPipelines in SetSignalingState_m()");
// XXX what now? Not much we can do but keep going, without major restructuring
}
@ -3247,7 +3252,7 @@ PeerConnectionImpl::SetSignalingState_m(PCImplSignalingState aSignalingState,
}
if (!mJsepSession->AllLocalTracksAreAssigned()) {
CSFLogInfo(logTag, "Not all local tracks were assigned to an "
CSFLogInfo(LOGTAG, "Not all local tracks were assigned to an "
"m-section, either because the offerer did not offer"
" to receive enough tracks, or because tracks were "
"added after CreateOffer/Answer, but before "
@ -3410,7 +3415,7 @@ PeerConnectionImpl::CandidateReady(const std::string& candidate,
PC_AUTO_ENTER_API_CALL_VOID_RETURN(false);
if (mForceIceTcp && std::string::npos != candidate.find(" UDP ")) {
CSFLogError(logTag, "Blocking local UDP candidate: %s", candidate.c_str());
CSFLogError(LOGTAG, "Blocking local UDP candidate: %s", candidate.c_str());
return;
}
@ -3424,7 +3429,7 @@ PeerConnectionImpl::CandidateReady(const std::string& candidate,
if (NS_FAILED(res)) {
std::string errorString = mJsepSession->GetLastError();
CSFLogError(logTag, "Failed to incorporate local candidate into SDP:"
CSFLogError(LOGTAG, "Failed to incorporate local candidate into SDP:"
" res = %u, candidate = %s, level = %u, error = %s",
static_cast<unsigned>(res),
candidate.c_str(),
@ -3434,7 +3439,7 @@ PeerConnectionImpl::CandidateReady(const std::string& candidate,
}
if (skipped) {
CSFLogDebug(logTag, "Skipped adding local candidate %s (level %u) to SDP, "
CSFLogDebug(LOGTAG, "Skipped adding local candidate %s (level %u) to SDP, "
"this typically happens because the m-section is "
"bundled, which means it doesn't make sense for it to "
"have its own transport-related attributes.",
@ -3443,7 +3448,7 @@ PeerConnectionImpl::CandidateReady(const std::string& candidate,
return;
}
CSFLogDebug(logTag, "Passing local candidate to content: %s",
CSFLogDebug(LOGTAG, "Passing local candidate to content: %s",
candidate.c_str());
SendLocalIceCandidateToContent(level, mid, candidate);
}
@ -3501,7 +3506,7 @@ void PeerConnectionImpl::IceConnectionStateChange(
NrIceCtx::ConnectionState state) {
PC_AUTO_ENTER_API_CALL_VOID_RETURN(false);
CSFLogDebug(logTag, "%s", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
auto domState = toDomIceConnectionState(state);
if (domState == mIceConnectionState) {
@ -3588,7 +3593,7 @@ PeerConnectionImpl::IceGatheringStateChange(
{
PC_AUTO_ENTER_API_CALL_VOID_RETURN(false);
CSFLogDebug(logTag, "%s", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
mIceGatheringState = toDomIceGatheringState(state);
@ -3631,7 +3636,7 @@ PeerConnectionImpl::UpdateDefaultCandidate(const std::string& defaultAddr,
const std::string& defaultRtcpAddr,
uint16_t defaultRtcpPort,
uint16_t level) {
CSFLogDebug(logTag, "%s", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
mJsepSession->UpdateDefaultCandidate(defaultAddr,
defaultPort,
defaultRtcpAddr,
@ -3641,7 +3646,7 @@ PeerConnectionImpl::UpdateDefaultCandidate(const std::string& defaultAddr,
void
PeerConnectionImpl::EndOfLocalCandidates(uint16_t level) {
CSFLogDebug(logTag, "%s", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
mJsepSession->EndOfLocalCandidates(level);
}
@ -3655,13 +3660,13 @@ PeerConnectionImpl::BuildStatsQuery_m(
}
if (!mThread) {
CSFLogError(logTag, "Could not build stats query, no MainThread");
CSFLogError(LOGTAG, "Could not build stats query, no MainThread");
return NS_ERROR_UNEXPECTED;
}
nsresult rv = GetTimeSinceEpoch(&(query->now));
if (NS_FAILED(rv)) {
CSFLogError(logTag, "Could not build stats query, could not get timestamp");
CSFLogError(LOGTAG, "Could not build stats query, could not get timestamp");
return rv;
}
@ -3671,7 +3676,7 @@ PeerConnectionImpl::BuildStatsQuery_m(
// accidentally release the Ctx on Mainthread.
query->iceCtx = mMedia->ice_ctx();
if (!query->iceCtx) {
CSFLogError(logTag, "Could not build stats query, no ice_ctx");
CSFLogError(LOGTAG, "Could not build stats query, no ice_ctx");
return NS_ERROR_UNEXPECTED;
}
@ -3772,7 +3777,7 @@ static void RecordIceStats_s(
std::vector<NrIceCandidatePair> candPairs;
nsresult res = mediaStream.GetCandidatePairs(&candPairs);
if (NS_FAILED(res)) {
CSFLogError(logTag, "%s: Error getting candidate pairs", __FUNCTION__);
CSFLogError(LOGTAG, "%s: Error getting candidate pairs", __FUNCTION__);
return;
}
@ -4120,7 +4125,7 @@ void PeerConnectionImpl::DeliverStatsReportToPCObserver_m(
}
if (rv.Failed()) {
CSFLogError(logTag, "Error firing stats observer callback");
CSFLogError(LOGTAG, "Error firing stats observer callback");
}
}
}
@ -4185,7 +4190,7 @@ PeerConnectionImpl::IceStreamReady(NrIceMediaStream *aStream)
PC_AUTO_ENTER_API_CALL_NO_CHECK();
MOZ_ASSERT(aStream);
CSFLogDebug(logTag, "%s: %s", __FUNCTION__, aStream->name().c_str());
CSFLogDebug(LOGTAG, "%s: %s", __FUNCTION__, aStream->name().c_str());
}
//Telemetry for when calls start

View File

@ -26,6 +26,11 @@
#include "nsIThread.h"
#include "mozilla/Mutex.h"
// Work around nasty macro in webrtc/voice_engine/voice_engine_defines.h
#ifdef GetLastError
#undef GetLastError
#endif
#include "signaling/src/jsep/JsepSession.h"
#include "signaling/src/jsep/JsepSessionImpl.h"
#include "signaling/src/sdp/SdpMediaSection.h"

View File

@ -56,7 +56,11 @@
namespace mozilla {
using namespace dom;
static const char* logTag = "PeerConnectionMedia";
static const char* pcmLogTag = "PeerConnectionMedia";
#ifdef LOGTAG
#undef LOGTAG
#endif
#define LOGTAG pcmLogTag
//XXX(pkerr) What about bitrate settings? Going with the defaults for now.
RefPtr<WebRtcCallWrapper>
@ -75,7 +79,7 @@ PeerConnectionMedia::ReplaceTrack(const std::string& aOldStreamId,
RefPtr<LocalSourceStreamInfo> oldInfo(GetLocalStreamById(aOldStreamId));
if (!oldInfo) {
CSFLogError(logTag, "Failed to find stream id %s", aOldStreamId.c_str());
CSFLogError(LOGTAG, "Failed to find stream id %s", aOldStreamId.c_str());
return NS_ERROR_NOT_AVAILABLE;
}
@ -86,7 +90,7 @@ PeerConnectionMedia::ReplaceTrack(const std::string& aOldStreamId,
RefPtr<LocalSourceStreamInfo> newInfo(GetLocalStreamById(aNewStreamId));
if (!newInfo) {
CSFLogError(logTag, "Failed to add track id %s", aNewTrackId.c_str());
CSFLogError(LOGTAG, "Failed to add track id %s", aNewTrackId.c_str());
MOZ_ASSERT(false);
return NS_ERROR_FAILURE;
}
@ -181,7 +185,7 @@ PeerConnectionImpl::Constructor(const dom::GlobalObject& aGlobal, ErrorResult& r
{
RefPtr<PeerConnectionImpl> pc = new PeerConnectionImpl(&aGlobal);
CSFLogDebug(logTag, "Created PeerConnection: %p", pc.get());
CSFLogDebug(LOGTAG, "Created PeerConnection: %p", pc.get());
return pc.forget();
}
@ -190,7 +194,7 @@ PeerConnectionImpl* PeerConnectionImpl::CreatePeerConnection()
{
PeerConnectionImpl *pc = new PeerConnectionImpl();
CSFLogDebug(logTag, "Created PeerConnection: %p", pc);
CSFLogDebug(LOGTAG, "Created PeerConnection: %p", pc);
return pc;
}
@ -206,7 +210,7 @@ OnProxyAvailable(nsICancelable *request,
return NS_OK;
}
CSFLogInfo(logTag, "%s: Proxy Available: %d", __FUNCTION__, (int)result);
CSFLogInfo(LOGTAG, "%s: Proxy Available: %d", __FUNCTION__, (int)result);
if (NS_SUCCEEDED(result) && proxyinfo) {
SetProxyOnPcm(*proxyinfo);
@ -223,20 +227,20 @@ void
PeerConnectionMedia::ProtocolProxyQueryHandler::SetProxyOnPcm(
nsIProxyInfo& proxyinfo)
{
CSFLogInfo(logTag, "%s: Had proxyinfo", __FUNCTION__);
CSFLogInfo(LOGTAG, "%s: Had proxyinfo", __FUNCTION__);
nsresult rv;
nsCString httpsProxyHost;
int32_t httpsProxyPort;
rv = proxyinfo.GetHost(httpsProxyHost);
if (NS_FAILED(rv)) {
CSFLogError(logTag, "%s: Failed to get proxy server host", __FUNCTION__);
CSFLogError(LOGTAG, "%s: Failed to get proxy server host", __FUNCTION__);
return;
}
rv = proxyinfo.GetPort(&httpsProxyPort);
if (NS_FAILED(rv)) {
CSFLogError(logTag, "%s: Failed to get proxy server port", __FUNCTION__);
CSFLogError(LOGTAG, "%s: Failed to get proxy server port", __FUNCTION__);
return;
}
@ -252,7 +256,7 @@ PeerConnectionMedia::ProtocolProxyQueryHandler::SetProxyOnPcm(
static_cast<uint16_t>(httpsProxyPort),
"webrtc,c-webrtc"));
} else {
CSFLogError(logTag, "%s: Failed to set proxy server (ICE ctx unavailable)",
CSFLogError(LOGTAG, "%s: Failed to set proxy server (ICE ctx unavailable)",
__FUNCTION__);
}
}
@ -263,7 +267,7 @@ void
PeerConnectionMedia::StunAddrsHandler::OnStunAddrsAvailable(
const mozilla::net::NrIceStunAddrArray& addrs)
{
CSFLogInfo(logTag, "%s: receiving (%d) stun addrs", __FUNCTION__,
CSFLogInfo(LOGTAG, "%s: receiving (%d) stun addrs", __FUNCTION__,
(int)addrs.Length());
if (pcm_) {
pcm_->mStunAddrs = addrs;
@ -292,7 +296,7 @@ void
PeerConnectionMedia::InitLocalAddrs()
{
if (XRE_IsContentProcess()) {
CSFLogDebug(logTag, "%s: Get stun addresses via IPC",
CSFLogDebug(LOGTAG, "%s: Get stun addresses via IPC",
mParentHandle.c_str());
nsCOMPtr<nsIEventTarget> target = mParent->GetWindow()
@ -328,7 +332,7 @@ PeerConnectionMedia::InitProxy()
nsCOMPtr<nsIProtocolProxyService> pps =
do_GetService(NS_PROTOCOLPROXYSERVICE_CONTRACTID, &rv);
if (NS_FAILED(rv)) {
CSFLogError(logTag, "%s: Failed to get proxy service: %d", __FUNCTION__, (int)rv);
CSFLogError(LOGTAG, "%s: Failed to get proxy service: %d", __FUNCTION__, (int)rv);
return NS_ERROR_FAILURE;
}
@ -338,7 +342,7 @@ PeerConnectionMedia::InitProxy()
nsCOMPtr<nsIURI> fakeHttpsLocation;
rv = NS_NewURI(getter_AddRefs(fakeHttpsLocation), "https://example.com");
if (NS_FAILED(rv)) {
CSFLogError(logTag, "%s: Failed to set URI: %d", __FUNCTION__, (int)rv);
CSFLogError(LOGTAG, "%s: Failed to set URI: %d", __FUNCTION__, (int)rv);
return NS_ERROR_FAILURE;
}
@ -350,7 +354,7 @@ PeerConnectionMedia::InitProxy()
nsIContentPolicy::TYPE_OTHER);
if (NS_FAILED(rv)) {
CSFLogError(logTag, "%s: Failed to get channel from URI: %d",
CSFLogError(LOGTAG, "%s: Failed to get channel from URI: %d",
__FUNCTION__, (int)rv);
return NS_ERROR_FAILURE;
}
@ -364,7 +368,7 @@ PeerConnectionMedia::InitProxy()
nsIProtocolProxyService::RESOLVE_ALWAYS_TUNNEL,
handler, target, getter_AddRefs(mProxyRequest));
if (NS_FAILED(rv)) {
CSFLogError(logTag, "%s: Failed to resolve protocol proxy: %d", __FUNCTION__, (int)rv);
CSFLogError(LOGTAG, "%s: Failed to resolve protocol proxy: %d", __FUNCTION__, (int)rv);
return NS_ERROR_FAILURE;
}
@ -391,31 +395,31 @@ nsresult PeerConnectionMedia::Init(const std::vector<NrIceStunServer>& stun_serv
mParent->GetAllowIceLinkLocal(),
policy);
if(!mIceCtxHdlr) {
CSFLogError(logTag, "%s: Failed to create Ice Context", __FUNCTION__);
CSFLogError(LOGTAG, "%s: Failed to create Ice Context", __FUNCTION__);
return NS_ERROR_FAILURE;
}
if (NS_FAILED(rv = mIceCtxHdlr->ctx()->SetStunServers(stun_servers))) {
CSFLogError(logTag, "%s: Failed to set stun servers", __FUNCTION__);
CSFLogError(LOGTAG, "%s: Failed to set stun servers", __FUNCTION__);
return rv;
}
// Give us a way to globally turn off TURN support
bool disabled = Preferences::GetBool("media.peerconnection.turn.disable", false);
if (!disabled) {
if (NS_FAILED(rv = mIceCtxHdlr->ctx()->SetTurnServers(turn_servers))) {
CSFLogError(logTag, "%s: Failed to set turn servers", __FUNCTION__);
CSFLogError(LOGTAG, "%s: Failed to set turn servers", __FUNCTION__);
return rv;
}
} else if (!turn_servers.empty()) {
CSFLogError(logTag, "%s: Setting turn servers disabled", __FUNCTION__);
CSFLogError(LOGTAG, "%s: Setting turn servers disabled", __FUNCTION__);
}
if (NS_FAILED(rv = mDNSResolver->Init())) {
CSFLogError(logTag, "%s: Failed to initialize dns resolver", __FUNCTION__);
CSFLogError(LOGTAG, "%s: Failed to initialize dns resolver", __FUNCTION__);
return rv;
}
if (NS_FAILED(rv =
mIceCtxHdlr->ctx()->SetResolver(mDNSResolver->AllocateResolver()))) {
CSFLogError(logTag, "%s: Failed to get dns resolver", __FUNCTION__);
CSFLogError(LOGTAG, "%s: Failed to get dns resolver", __FUNCTION__);
return rv;
}
ConnectSignals(mIceCtxHdlr->ctx().get());
@ -449,7 +453,7 @@ PeerConnectionMedia::EnsureTransport_s(size_t aLevel, size_t aComponentCount)
{
RefPtr<NrIceMediaStream> stream(mIceCtxHdlr->ctx()->GetStream(aLevel));
if (!stream) {
CSFLogDebug(logTag, "%s: Creating ICE media stream=%u components=%u",
CSFLogDebug(LOGTAG, "%s: Creating ICE media stream=%u components=%u",
mParentHandle.c_str(),
static_cast<unsigned>(aLevel),
static_cast<unsigned>(aComponentCount));
@ -461,7 +465,7 @@ PeerConnectionMedia::EnsureTransport_s(size_t aLevel, size_t aComponentCount)
aComponentCount);
if (!stream) {
CSFLogError(logTag, "Failed to create ICE stream.");
CSFLogError(LOGTAG, "Failed to create ICE stream.");
return;
}
@ -487,12 +491,12 @@ PeerConnectionMedia::ActivateOrRemoveTransports(const JsepSession& aSession,
if (transport->mComponents) {
MOZ_ASSERT(transport->mIce);
CSFLogDebug(logTag, "Transport %u is active", static_cast<unsigned>(i));
CSFLogDebug(LOGTAG, "Transport %u is active", static_cast<unsigned>(i));
ufrag = transport->mIce->GetUfrag();
pwd = transport->mIce->GetPassword();
candidates = transport->mIce->GetCandidates();
} else {
CSFLogDebug(logTag, "Transport %u is disabled", static_cast<unsigned>(i));
CSFLogDebug(LOGTAG, "Transport %u is disabled", static_cast<unsigned>(i));
// Make sure the MediaPipelineFactory doesn't try to use these.
RemoveTransportFlow(i, false);
RemoveTransportFlow(i, true);
@ -537,7 +541,7 @@ PeerConnectionMedia::ActivateOrRemoveTransport_s(
const std::vector<std::string>& aCandidateList) {
if (!aComponentCount) {
CSFLogDebug(logTag, "%s: Removing ICE media stream=%u",
CSFLogDebug(LOGTAG, "%s: Removing ICE media stream=%u",
mParentHandle.c_str(),
static_cast<unsigned>(aMLine));
mIceCtxHdlr->ctx()->SetStream(aMLine, nullptr);
@ -551,7 +555,7 @@ PeerConnectionMedia::ActivateOrRemoveTransport_s(
}
if (!stream->HasParsedAttributes()) {
CSFLogDebug(logTag, "%s: Activating ICE media stream=%u components=%u",
CSFLogDebug(LOGTAG, "%s: Activating ICE media stream=%u components=%u",
mParentHandle.c_str(),
static_cast<unsigned>(aMLine),
static_cast<unsigned>(aComponentCount));
@ -566,7 +570,7 @@ PeerConnectionMedia::ActivateOrRemoveTransport_s(
nsresult rv = stream->ParseAttributes(attrs);
if (NS_FAILED(rv)) {
CSFLogError(logTag, "Couldn't parse ICE attributes, rv=%u",
CSFLogError(LOGTAG, "Couldn't parse ICE attributes, rv=%u",
static_cast<unsigned>(rv));
}
@ -638,7 +642,7 @@ PeerConnectionMedia::StartIceChecks_s(
bool aIsIceLite,
const std::vector<std::string>& aIceOptionsList) {
CSFLogDebug(logTag, "Starting ICE Checking");
CSFLogDebug(LOGTAG, "Starting ICE Checking");
std::vector<std::string> attributes;
if (aIsIceLite) {
@ -654,7 +658,7 @@ PeerConnectionMedia::StartIceChecks_s(
nsresult rv = mIceCtxHdlr->ctx()->ParseGlobalAttributes(attributes);
if (NS_FAILED(rv)) {
CSFLogError(logTag, "%s: couldn't parse global parameters", __FUNCTION__ );
CSFLogError(LOGTAG, "%s: couldn't parse global parameters", __FUNCTION__ );
}
mIceCtxHdlr->ctx()->SetControlling(aIsControlling ?
@ -875,14 +879,14 @@ PeerConnectionMedia::AddIceCandidate_s(const std::string& aCandidate,
uint32_t aMLine) {
RefPtr<NrIceMediaStream> stream(mIceCtxHdlr->ctx()->GetStream(aMLine));
if (!stream) {
CSFLogError(logTag, "No ICE stream for candidate at level %u: %s",
CSFLogError(LOGTAG, "No ICE stream for candidate at level %u: %s",
static_cast<unsigned>(aMLine), aCandidate.c_str());
return;
}
nsresult rv = stream->ParseTrickleCandidate(aCandidate);
if (NS_FAILED(rv)) {
CSFLogError(logTag, "Couldn't process ICE candidate at level %u",
CSFLogError(LOGTAG, "Couldn't process ICE candidate at level %u",
static_cast<unsigned>(aMLine));
return;
}
@ -985,7 +989,7 @@ PeerConnectionMedia::AddTrack(DOMMediaStream& aMediaStream,
{
ASSERT_ON_THREAD(mMainThread);
CSFLogDebug(logTag, "%s: MediaStream: %p", __FUNCTION__, &aMediaStream);
CSFLogDebug(LOGTAG, "%s: MediaStream: %p", __FUNCTION__, &aMediaStream);
RefPtr<LocalSourceStreamInfo> localSourceStream =
GetLocalStreamById(streamId);
@ -1005,7 +1009,7 @@ PeerConnectionMedia::RemoveLocalTrack(const std::string& streamId,
{
ASSERT_ON_THREAD(mMainThread);
CSFLogDebug(logTag, "%s: stream: %s track: %s", __FUNCTION__,
CSFLogDebug(LOGTAG, "%s: stream: %s track: %s", __FUNCTION__,
streamId.c_str(), trackId.c_str());
RefPtr<LocalSourceStreamInfo> localSourceStream =
@ -1027,7 +1031,7 @@ PeerConnectionMedia::RemoveRemoteTrack(const std::string& streamId,
{
ASSERT_ON_THREAD(mMainThread);
CSFLogDebug(logTag, "%s: stream: %s track: %s", __FUNCTION__,
CSFLogDebug(LOGTAG, "%s: stream: %s track: %s", __FUNCTION__,
streamId.c_str(), trackId.c_str());
RefPtr<RemoteSourceStreamInfo> remoteSourceStream =
@ -1048,7 +1052,7 @@ PeerConnectionMedia::SelfDestruct()
{
ASSERT_ON_THREAD(mMainThread);
CSFLogDebug(logTag, "%s: ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s: ", __FUNCTION__);
// Shut down the media
for (uint32_t i=0; i < mLocalSourceStreams.Length(); ++i) {
@ -1074,13 +1078,13 @@ PeerConnectionMedia::SelfDestruct()
this, &PeerConnectionMedia::ShutdownMediaTransport_s),
NS_DISPATCH_NORMAL);
CSFLogDebug(logTag, "%s: Media shut down", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s: Media shut down", __FUNCTION__);
}
void
PeerConnectionMedia::SelfDestruct_m()
{
CSFLogDebug(logTag, "%s: ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s: ", __FUNCTION__);
ASSERT_ON_THREAD(mMainThread);
@ -1098,7 +1102,7 @@ PeerConnectionMedia::ShutdownMediaTransport_s()
{
ASSERT_ON_THREAD(mSTSThread);
CSFLogDebug(logTag, "%s: ", __FUNCTION__);
CSFLogDebug(LOGTAG, "%s: ", __FUNCTION__);
// Here we access m{Local|Remote}SourceStreams off the main thread.
// That's OK because by here PeerConnectionImpl has forgotten about us,
@ -1119,7 +1123,7 @@ PeerConnectionMedia::ShutdownMediaTransport_s()
#if !defined(MOZILLA_EXTERNAL_LINKAGE)
NrIceStats stats = mIceCtxHdlr->Destroy();
CSFLogDebug(logTag, "Ice Telemetry: stun (retransmits: %d)"
CSFLogDebug(LOGTAG, "Ice Telemetry: stun (retransmits: %d)"
" turn (401s: %d 403s: %d 438s: %d)",
stats.stun_retransmits, stats.turn_401s, stats.turn_403s,
stats.turn_438s);
@ -1286,7 +1290,7 @@ PeerConnectionMedia::OnCandidateFound_s(NrIceMediaStream *aStream,
MOZ_ASSERT(aStream);
MOZ_RELEASE_ASSERT(mIceCtxHdlr);
CSFLogDebug(logTag, "%s: %s", __FUNCTION__, aStream->name().c_str());
CSFLogDebug(LOGTAG, "%s: %s", __FUNCTION__, aStream->name().c_str());
NrIceCandidate candidate;
NrIceCandidate rtcpCandidate;
@ -1340,7 +1344,7 @@ PeerConnectionMedia::GetDefaultCandidates(const NrIceMediaStream& aStream,
if (NS_FAILED(res)) {
aCandidate->cand_addr.host.clear();
aCandidate->cand_addr.port = 0;
CSFLogError(logTag, "%s: GetDefaultCandidates failed for level %u, "
CSFLogError(LOGTAG, "%s: GetDefaultCandidates failed for level %u, "
"res=%u",
__FUNCTION__,
static_cast<unsigned>(aStream.GetLevel()),
@ -1369,7 +1373,7 @@ PeerConnectionMedia::IceStreamReady_s(NrIceMediaStream *aStream)
{
MOZ_ASSERT(aStream);
CSFLogDebug(logTag, "%s: %s", __FUNCTION__, aStream->name().c_str());
CSFLogDebug(LOGTAG, "%s: %s", __FUNCTION__, aStream->name().c_str());
}
void
@ -1473,7 +1477,7 @@ LocalSourceStreamInfo::TakePipelineFrom(RefPtr<LocalSourceStreamInfo>& info,
const std::string& newTrackId)
{
if (mPipelines.count(newTrackId)) {
CSFLogError(logTag, "%s: Pipeline already exists for %s/%s",
CSFLogError(LOGTAG, "%s: Pipeline already exists for %s/%s",
__FUNCTION__, mId.c_str(), newTrackId.c_str());
return NS_ERROR_INVALID_ARG;
}
@ -1483,7 +1487,7 @@ LocalSourceStreamInfo::TakePipelineFrom(RefPtr<LocalSourceStreamInfo>& info,
if (!pipeline) {
// Replacetrack can potentially happen in the middle of offer/answer, before
// the pipeline has been created.
CSFLogInfo(logTag, "%s: Replacing track before the pipeline has been "
CSFLogInfo(LOGTAG, "%s: Replacing track before the pipeline has been "
"created, nothing to do.", __FUNCTION__);
return NS_OK;
}
@ -1611,7 +1615,7 @@ SourceStreamInfo::StorePipeline(
{
MOZ_ASSERT(mPipelines.find(trackId) == mPipelines.end());
if (mPipelines.find(trackId) != mPipelines.end()) {
CSFLogError(logTag, "%s: Storing duplicate track", __FUNCTION__);
CSFLogError(LOGTAG, "%s: Storing duplicate track", __FUNCTION__);
return NS_ERROR_FAILURE;
}
@ -1660,7 +1664,7 @@ RemoteSourceStreamInfo::SyncPipeline(
aPipeline->Conduit() :
i->second->Conduit());
video_conduit->SyncTo(audio_conduit);
CSFLogDebug(logTag, "Syncing %p to %p, %s to %s",
CSFLogDebug(LOGTAG, "Syncing %p to %p, %s to %s",
video_conduit, audio_conduit,
i->first.c_str(), aPipeline->trackid().c_str());
}
@ -1684,7 +1688,7 @@ RemoteSourceStreamInfo::StartReceiving()
// bunch of locking that would be required (and potential blocking)
// if we used smaller values and updated them on each NotifyPull.
source->AdvanceKnownTracksTime(STREAM_TIME_MAX);
CSFLogDebug(logTag, "Finished adding tracks to MediaStream %p", source);
CSFLogDebug(LOGTAG, "Finished adding tracks to MediaStream %p", source);
}
RefPtr<MediaPipeline> SourceStreamInfo::GetPipelineByTrackId_m(

View File

@ -36,7 +36,11 @@
#include "PeerConnectionImpl.h"
#include "webrtc/system_wrappers/include/trace.h"
static const char* logTag = "WebrtcGlobalInformation";
static const char* wgiLogTag = "WebrtcGlobalInformation";
#ifdef LOGTAG
#undef LOGTAG
#endif
#define LOGTAG wgiLogTag
namespace mozilla {
namespace dom {
@ -106,7 +110,7 @@ public:
mCallback.get()->Call(mResult, rv);
if (rv.Failed()) {
CSFLogError(logTag, "Error firing stats observer callback");
CSFLogError(LOGTAG, "Error firing stats observer callback");
}
}
@ -266,7 +270,7 @@ OnStatsReport_m(WebrtcGlobalChild* aThisChild,
StatsRequest* request = StatsRequest::Get(aRequestId);
if (!request) {
CSFLogError(logTag, "Bad RequestId");
CSFLogError(LOGTAG, "Bad RequestId");
return;
}
@ -337,7 +341,7 @@ static void OnGetLogging_m(WebrtcGlobalChild* aThisChild,
LogRequest* request = LogRequest::Get(aRequestId);
if (!request) {
CSFLogError(logTag, "Bad RequestId");
CSFLogError(LOGTAG, "Bad RequestId");
return;
}
@ -716,7 +720,7 @@ WebrtcGlobalParent::RecvGetStatsResult(const int& aRequestId,
StatsRequest* request = StatsRequest::Get(aRequestId);
if (!request) {
CSFLogError(logTag, "Bad RequestId");
CSFLogError(LOGTAG, "Bad RequestId");
return IPC_FAIL_NO_REASON(this);
}
@ -760,7 +764,7 @@ WebrtcGlobalParent::RecvGetLogResult(const int& aRequestId,
LogRequest* request = LogRequest::Get(aRequestId);
if (!request) {
CSFLogError(logTag, "Bad RequestId");
CSFLogError(LOGTAG, "Bad RequestId");
return IPC_FAIL_NO_REASON(this);
}
request->mResult.AppendElements(aLog, fallible);
@ -779,7 +783,7 @@ WebrtcGlobalParent::RecvGetLogResult(const int& aRequestId,
if (NS_FAILED(rv)) {
//Unable to get gecko process log. Return what has been collected.
CSFLogError(logTag, "Unable to extract chrome process log");
CSFLogError(LOGTAG, "Unable to extract chrome process log");
request->Complete();
LogRequest::Delete(aRequestId);
}

View File

@ -20,8 +20,7 @@ LOCAL_INCLUDES += [
'/media/webrtc/trunk',
]
# Multiple uses of logTag
SOURCES += [
UNIFIED_SOURCES += [
'MediaPipelineFactory.cpp',
'MediaStreamList.cpp',
'PacketDumper.cpp',

View File

@ -28,9 +28,9 @@ NS_INTERFACE_MAP_BEGIN(PartiallySeekableInputStream)
NS_INTERFACE_MAP_ENTRY_AMBIGUOUS(nsISupports, nsIInputStream)
NS_INTERFACE_MAP_END
PartiallySeekableInputStream::PartiallySeekableInputStream(nsIInputStream* aInputStream,
PartiallySeekableInputStream::PartiallySeekableInputStream(already_AddRefed<nsIInputStream> aInputStream,
uint64_t aBufferSize)
: mInputStream(aInputStream)
: mInputStream(Move(aInputStream))
, mWeakCloneableInputStream(nullptr)
, mWeakIPCSerializableInputStream(nullptr)
, mWeakAsyncInputStream(nullptr)
@ -38,29 +38,29 @@ PartiallySeekableInputStream::PartiallySeekableInputStream(nsIInputStream* aInpu
, mPos(0)
, mClosed(false)
{
MOZ_ASSERT(aInputStream);
MOZ_ASSERT(mInputStream);
#ifdef DEBUG
nsCOMPtr<nsISeekableStream> seekableStream = do_QueryInterface(aInputStream);
nsCOMPtr<nsISeekableStream> seekableStream = do_QueryInterface(mInputStream);
MOZ_ASSERT(!seekableStream);
#endif
nsCOMPtr<nsICloneableInputStream> cloneableStream =
do_QueryInterface(aInputStream);
if (cloneableStream && SameCOMIdentity(aInputStream, cloneableStream)) {
do_QueryInterface(mInputStream);
if (cloneableStream && SameCOMIdentity(mInputStream, cloneableStream)) {
mWeakCloneableInputStream = cloneableStream;
}
nsCOMPtr<nsIIPCSerializableInputStream> serializableStream =
do_QueryInterface(aInputStream);
do_QueryInterface(mInputStream);
if (serializableStream &&
SameCOMIdentity(aInputStream, serializableStream)) {
SameCOMIdentity(mInputStream, serializableStream)) {
mWeakIPCSerializableInputStream = serializableStream;
}
nsCOMPtr<nsIAsyncInputStream> asyncInputStream =
do_QueryInterface(aInputStream);
if (asyncInputStream && SameCOMIdentity(aInputStream, asyncInputStream)) {
do_QueryInterface(mInputStream);
if (asyncInputStream && SameCOMIdentity(mInputStream, asyncInputStream)) {
mWeakAsyncInputStream = asyncInputStream;
}
}
@ -183,7 +183,7 @@ PartiallySeekableInputStream::Clone(nsIInputStream** aResult)
}
nsCOMPtr<nsIInputStream> stream =
new PartiallySeekableInputStream(clonedStream, mBufferSize);
new PartiallySeekableInputStream(clonedStream.forget(), mBufferSize);
stream.forget(aResult);
return NS_OK;

View File

@ -17,6 +17,7 @@ namespace mozilla {
namespace net {
// A wrapper for making a stream seekable for the first |aBufferSize| bytes.
// Note that this object takes the ownership of the underlying stream.
class PartiallySeekableInputStream final : public nsISeekableStream
, public nsIAsyncInputStream
@ -33,7 +34,7 @@ public:
NS_DECL_NSIIPCSERIALIZABLEINPUTSTREAM
NS_DECL_NSIINPUTSTREAMCALLBACK
explicit PartiallySeekableInputStream(nsIInputStream* aInputStream,
explicit PartiallySeekableInputStream(already_AddRefed<nsIInputStream> aInputStream,
uint64_t aBufferSize = 4096);
private:

View File

@ -38,6 +38,18 @@ interface nsICacheInfoChannel : nsISupports
*/
boolean isFromCache();
/**
* The unique ID of the corresponding nsICacheEntry from which the response is
* retrieved. By comparing the returned value, we can judge whether the data
* of two distinct nsICacheInfoChannels is from the same nsICacheEntry. This
* scenario could be useful when verifying whether the alternative data from
* one nsICacheInfochannel matches the main data from another one.
*
* Note: NS_ERROR_NOT_AVAILABLE is thrown when a nsICacheInfoChannel has no
* valid corresponding nsICacheEntry.
*/
uint64_t getCacheEntryId();
/**
* Set/get the cache key... uniquely identifies the data in the cache
* for this channel. Holding a reference to this key does NOT prevent

View File

@ -191,6 +191,13 @@ NS_IMPL_ISUPPORTS(CacheEntry,
nsIRunnable,
CacheFileListener)
/* static */
uint64_t CacheEntry::GetNextId()
{
static Atomic<uint64_t, Relaxed> id(0);
return ++id;
}
CacheEntry::CacheEntry(const nsACString& aStorageID,
const nsACString& aURI,
const nsACString& aEnhanceID,
@ -217,6 +224,7 @@ CacheEntry::CacheEntry(const nsACString& aStorageID,
, mWriter(nullptr)
, mPredictedDataSize(0)
, mUseCount(0)
, mCacheEntryId(GetNextId())
{
LOG(("CacheEntry::CacheEntry [this=%p]", this));
@ -1048,6 +1056,12 @@ NS_IMETHODIMP CacheEntry::GetKey(nsACString & aKey)
return NS_OK;
}
NS_IMETHODIMP CacheEntry::GetCacheEntryId(uint64_t *aCacheEntryId)
{
*aCacheEntryId = mCacheEntryId;
return NS_OK;
}
NS_IMETHODIMP CacheEntry::GetFetchCount(int32_t *aFetchCount)
{
NS_ENSURE_SUCCESS(mFileStatus, NS_ERROR_NOT_AVAILABLE);

View File

@ -54,6 +54,8 @@ public:
NS_DECL_NSICACHEENTRY
NS_DECL_NSIRUNNABLE
static uint64_t GetNextId();
CacheEntry(const nsACString& aStorageID, const nsACString& aURI, const nsACString& aEnhanceID,
bool aUseDisk, bool aSkipSizeCheck, bool aPin);
@ -383,6 +385,8 @@ private:
int64_t mPredictedDataSize;
mozilla::TimeStamp mLoadStart;
uint32_t mUseCount;
const uint64_t mCacheEntryId;
};

View File

@ -346,13 +346,13 @@ _OldGetDiskConsumption::VisitEntry(const char * deviceID,
// _OldCacheEntryWrapper
_OldCacheEntryWrapper::_OldCacheEntryWrapper(nsICacheEntryDescriptor* desc)
: mOldDesc(desc), mOldInfo(desc)
: mOldDesc(desc), mOldInfo(desc), mCacheEntryId(CacheEntry::GetNextId())
{
LOG(("Creating _OldCacheEntryWrapper %p for descriptor %p", this, desc));
}
_OldCacheEntryWrapper::_OldCacheEntryWrapper(nsICacheEntryInfo* info)
: mOldDesc(nullptr), mOldInfo(info)
: mOldDesc(nullptr), mOldInfo(info), mCacheEntryId(CacheEntry::GetNextId())
{
LOG(("Creating _OldCacheEntryWrapper %p for info %p", this, info));
}

View File

@ -115,6 +115,11 @@ public:
{
return mOldInfo->GetKey(aKey);
}
NS_IMETHOD GetCacheEntryId(uint64_t *aCacheEntryId) override
{
*aCacheEntryId = mCacheEntryId;
return NS_OK;
}
NS_IMETHOD GetFetchCount(int32_t *aFetchCount) override
{
return mOldInfo->GetFetchCount(aFetchCount);
@ -176,6 +181,8 @@ private:
_OldCacheEntryWrapper() = delete;
nsICacheEntryDescriptor* mOldDesc; // ref holded in mOldInfo
nsCOMPtr<nsICacheEntryInfo> mOldInfo;
const uint64_t mCacheEntryId;
};

View File

@ -27,6 +27,12 @@ interface nsICacheEntry : nsISupports
*/
readonly attribute ACString key;
/**
* The unique ID for every nsICacheEntry instance, which can be used to check
* whether two pieces of information are from the same nsICacheEntry instance.
*/
readonly attribute uint64_t cacheEntryId;
/**
* Whether the entry is memory/only or persisted to disk.
* Note: private browsing entries are reported as persistent for consistency

View File

@ -1068,7 +1068,8 @@ HttpBaseChannel::ExplicitSetUploadStream(nsIInputStream *aStream,
nsCOMPtr<nsISeekableStream> seekable = do_QueryInterface(aStream);
if (!seekable) {
aStream = new PartiallySeekableInputStream(aStream);
nsCOMPtr<nsIInputStream> stream = aStream;
aStream = new PartiallySeekableInputStream(stream.forget());
}
mUploadStream = aStream;

View File

@ -165,6 +165,7 @@ HttpChannelChild::HttpChannelChild()
, mSynthesizedStreamLength(0)
, mIsFromCache(false)
, mCacheEntryAvailable(false)
, mCacheEntryId(0)
, mAltDataCacheEntryAvailable(false)
, mCacheFetchCount(0)
, mCacheExpirationTime(nsICacheEntry::NO_EXPIRATION_TIME)
@ -413,6 +414,7 @@ class StartRequestEvent : public NeckoTargetChannelEvent<HttpChannelChild>
const nsHttpHeaderArray& aRequestHeaders,
const bool& aIsFromCache,
const bool& aCacheEntryAvailable,
const uint64_t& aCacheEntryId,
const int32_t& aCacheFetchCount,
const uint32_t& aCacheExpirationTime,
const nsCString& aCachedCharset,
@ -429,6 +431,7 @@ class StartRequestEvent : public NeckoTargetChannelEvent<HttpChannelChild>
, mUseResponseHead(aUseResponseHead)
, mIsFromCache(aIsFromCache)
, mCacheEntryAvailable(aCacheEntryAvailable)
, mCacheEntryId(aCacheEntryId)
, mCacheFetchCount(aCacheFetchCount)
, mCacheExpirationTime(aCacheExpirationTime)
, mCachedCharset(aCachedCharset)
@ -445,7 +448,8 @@ class StartRequestEvent : public NeckoTargetChannelEvent<HttpChannelChild>
LOG(("StartRequestEvent [this=%p]\n", mChild));
mChild->OnStartRequest(mChannelStatus, mResponseHead, mUseResponseHead,
mRequestHeaders, mIsFromCache, mCacheEntryAvailable,
mCacheFetchCount, mCacheExpirationTime, mCachedCharset,
mCacheEntryId, mCacheFetchCount,
mCacheExpirationTime, mCachedCharset,
mSecurityInfoSerialization, mSelfAddr, mPeerAddr,
mCacheKey, mAltDataType, mAltDataLen);
}
@ -457,6 +461,7 @@ class StartRequestEvent : public NeckoTargetChannelEvent<HttpChannelChild>
bool mUseResponseHead;
bool mIsFromCache;
bool mCacheEntryAvailable;
uint64_t mCacheEntryId;
int32_t mCacheFetchCount;
uint32_t mCacheExpirationTime;
nsCString mCachedCharset;
@ -475,6 +480,7 @@ HttpChannelChild::RecvOnStartRequest(const nsresult& channelStatus,
const nsHttpHeaderArray& requestHeaders,
const bool& isFromCache,
const bool& cacheEntryAvailable,
const uint64_t& cacheEntryId,
const int32_t& cacheFetchCount,
const uint32_t& cacheExpirationTime,
const nsCString& cachedCharset,
@ -500,7 +506,7 @@ HttpChannelChild::RecvOnStartRequest(const nsresult& channelStatus,
mEventQ->RunOrEnqueue(new StartRequestEvent(this, channelStatus, responseHead,
useResponseHead, requestHeaders,
isFromCache, cacheEntryAvailable,
cacheFetchCount,
cacheEntryId, cacheFetchCount,
cacheExpirationTime, cachedCharset,
securityInfoSerialization,
selfAddr, peerAddr, cacheKey,
@ -535,6 +541,7 @@ HttpChannelChild::OnStartRequest(const nsresult& channelStatus,
const nsHttpHeaderArray& requestHeaders,
const bool& isFromCache,
const bool& cacheEntryAvailable,
const uint64_t& cacheEntryId,
const int32_t& cacheFetchCount,
const uint32_t& cacheExpirationTime,
const nsCString& cachedCharset,
@ -568,6 +575,7 @@ HttpChannelChild::OnStartRequest(const nsresult& channelStatus,
mIsFromCache = isFromCache;
mCacheEntryAvailable = cacheEntryAvailable;
mCacheEntryId = cacheEntryId;
mCacheFetchCount = cacheFetchCount;
mCacheExpirationTime = cacheExpirationTime;
mCachedCharset = cachedCharset;
@ -2846,6 +2854,20 @@ HttpChannelChild::IsFromCache(bool *value)
return NS_OK;
}
NS_IMETHODIMP
HttpChannelChild::GetCacheEntryId(uint64_t *aCacheEntryId)
{
bool fromCache = false;
if (NS_FAILED(IsFromCache(&fromCache)) ||
!fromCache ||
!mCacheEntryAvailable) {
return NS_ERROR_NOT_AVAILABLE;
}
*aCacheEntryId = mCacheEntryId;
return NS_OK;
}
NS_IMETHODIMP
HttpChannelChild::GetCacheKey(nsISupports **cacheKey)
{

View File

@ -131,6 +131,7 @@ protected:
const nsHttpHeaderArray& requestHeaders,
const bool& isFromCache,
const bool& cacheEntryAvailable,
const uint64_t& cacheEntryId,
const int32_t& cacheFetchCount,
const uint32_t& cacheExpirationTime,
const nsCString& cachedCharset,
@ -289,6 +290,7 @@ private:
bool mIsFromCache;
bool mCacheEntryAvailable;
uint64_t mCacheEntryId;
bool mAltDataCacheEntryAvailable;
int32_t mCacheFetchCount;
uint32_t mCacheExpirationTime;
@ -385,6 +387,7 @@ private:
const nsHttpHeaderArray& requestHeaders,
const bool& isFromCache,
const bool& cacheEntryAvailable,
const uint64_t& cacheEntryId,
const int32_t& cacheFetchCount,
const uint32_t& cacheExpirationTime,
const nsCString& cachedCharset,

View File

@ -1443,6 +1443,7 @@ HttpChannelParent::OnStartRequest(nsIRequest *aRequest, nsISupports *aContext)
nsHttpResponseHead *responseHead = chan->GetResponseHead();
nsHttpRequestHead *requestHead = chan->GetRequestHead();
bool isFromCache = false;
uint64_t cacheEntryId = 0;
int32_t fetchCount = 0;
uint32_t expirationTime = nsICacheEntry::NO_EXPIRATION_TIME;
nsCString cachedCharset;
@ -1451,6 +1452,7 @@ HttpChannelParent::OnStartRequest(nsIRequest *aRequest, nsISupports *aContext)
if (httpChannelImpl) {
httpChannelImpl->IsFromCache(&isFromCache);
httpChannelImpl->GetCacheEntryId(&cacheEntryId);
httpChannelImpl->GetCacheTokenFetchCount(&fetchCount);
httpChannelImpl->GetCacheTokenExpirationTime(&expirationTime);
httpChannelImpl->GetCacheTokenCachedCharset(cachedCharset);
@ -1528,6 +1530,7 @@ HttpChannelParent::OnStartRequest(nsIRequest *aRequest, nsISupports *aContext)
requestHead->Headers(),
isFromCache,
mCacheEntry ? true : false,
cacheEntryId,
fetchCount, expirationTime,
cachedCharset, secInfoSerialization,
chan->GetSelfAddr(), chan->GetPeerAddr(),

View File

@ -99,6 +99,7 @@ child:
nsHttpHeaderArray requestHeaders,
bool isFromCache,
bool cacheEntryAvailable,
uint64_t cacheEntryId,
int32_t cacheFetchCount,
uint32_t cacheExpirationTime,
nsCString cachedCharset,

View File

@ -7851,6 +7851,18 @@ nsHttpChannel::IsFromCache(bool *value)
return NS_OK;
}
NS_IMETHODIMP
nsHttpChannel::GetCacheEntryId(uint64_t *aCacheEntryId)
{
bool fromCache = false;
if (NS_FAILED(IsFromCache(&fromCache)) || !fromCache || !mCacheEntry ||
NS_FAILED(mCacheEntry->GetCacheEntryId(aCacheEntryId))) {
return NS_ERROR_NOT_AVAILABLE;
}
return NS_OK;
}
NS_IMETHODIMP
nsHttpChannel::GetCacheTokenFetchCount(int32_t *_retval)
{

View File

@ -69,7 +69,7 @@ CreateStream(uint32_t aSize, uint64_t aStreamSize, nsCString& aBuffer)
}
RefPtr<NonSeekableStream> stream = new NonSeekableStream(aBuffer);
return new PartiallySeekableInputStream(stream, aStreamSize);
return new PartiallySeekableInputStream(stream.forget(), aStreamSize);
}
// Simple reading.

View File

@ -53,6 +53,7 @@ function ChannelListener(closure, ctx, flags) {
this._closurectx = ctx;
this._flags = flags;
this._isFromCache = false;
this._cacheEntryId = undefined;
}
ChannelListener.prototype = {
_closure: null,
@ -79,9 +80,20 @@ ChannelListener.prototype = {
this._lastEvent = Date.now();
try {
this._isFromCache = request.QueryInterface(Ci.nsICachingChannel).isFromCache();
this._isFromCache = request.QueryInterface(Ci.nsICacheInfoChannel).isFromCache();
} catch (e) {}
var thrown = false;
try {
this._cacheEntryId = request.QueryInterface(Ci.nsICacheInfoChannel).getCacheEntryId();
} catch (e) {
thrown = true;
}
if (this._isFromCache && thrown)
do_throw("Should get a CacheEntryId");
else if (!this._isFromCache && !thrown)
do_throw("Shouldn't get a CacheEntryId");
request.QueryInterface(Components.interfaces.nsIChannel);
try {
this._contentLen = request.contentLength;
@ -171,7 +183,11 @@ ChannelListener.prototype = {
do_throw("Error in onStopRequest: " + ex);
}
try {
this._closure(request, this._buffer, this._closurectx, this._isFromCache);
this._closure(request,
this._buffer,
this._closurectx,
this._isFromCache,
this._cacheEntryId);
this._closurectx = null;
} catch (ex) {
do_throw("Error in closure function: " + ex);

View File

@ -0,0 +1,138 @@
/**
* Test for the "CacheEntryId" under several cases.
*/
Cu.import("resource://testing-common/httpd.js");
Cu.import("resource://gre/modules/NetUtil.jsm");
Cu.import("resource://gre/modules/Services.jsm");
XPCOMUtils.defineLazyGetter(this, "URL", function() {
return "http://localhost:" + httpServer.identity.primaryPort + "/content";
});
var httpServer = null;
const responseContent = "response body";
const responseContent2 = "response body 2";
const altContent = "!@#$%^&*()";
const altContentType = "text/binary";
var handlers = [
(m, r) => {r.bodyOutputStream.write(responseContent, responseContent.length)},
(m, r) => {r.setStatusLine(m.httpVersion, 304, "Not Modified")},
(m, r) => {r.setStatusLine(m.httpVersion, 304, "Not Modified")},
(m, r) => {r.setStatusLine(m.httpVersion, 304, "Not Modified")},
(m, r) => {r.setStatusLine(m.httpVersion, 304, "Not Modified")},
(m, r) => {r.bodyOutputStream.write(responseContent2, responseContent2.length)},
(m, r) => {r.setStatusLine(m.httpVersion, 304, "Not Modified")},
];
function contentHandler(metadata, response)
{
response.setHeader("Content-Type", "text/plain");
response.setHeader("Cache-Control", "no-cache");
var handler = handlers.shift();
if (handler) {
handler(metadata, response);
return;
}
do_check_true(false, "Should not reach here.");
}
function fetch(preferredDataType = null)
{
return new Promise(resolve => {
var chan = NetUtil.newChannel({uri: URL, loadUsingSystemPrincipal: true});
if (preferredDataType) {
var cc = chan.QueryInterface(Ci.nsICacheInfoChannel);
cc.preferAlternativeDataType(altContentType);
}
chan.asyncOpen2(new ChannelListener((request,
buffer,
ctx,
isFromCache,
cacheEntryId) => {
resolve({request, buffer, isFromCache, cacheEntryId});
}, null));
});
}
function check(response, content, preferredDataType, isFromCache, cacheEntryIdChecker)
{
var cc = response.request.QueryInterface(Ci.nsICacheInfoChannel);
do_check_eq(response.buffer, content);
do_check_eq(cc.alternativeDataType, preferredDataType);
do_check_eq(response.isFromCache, isFromCache);
do_check_true(!cacheEntryIdChecker || cacheEntryIdChecker(response.cacheEntryId));
return response;
}
function writeAltData(request)
{
var cc = request.QueryInterface(Ci.nsICacheInfoChannel);
var os = cc.openAlternativeOutputStream(altContentType);
os.write(altContent, altContent.length);
os.close();
gc(); // We need to do a GC pass to ensure the cache entry has been freed.
return new Promise(resolve => {
Services.cache2.QueryInterface(Ci.nsICacheTesting)
.flush(resolve);
});
}
function run_test()
{
do_get_profile();
httpServer = new HttpServer();
httpServer.registerPathHandler("/content", contentHandler);
httpServer.start(-1);
do_test_pending();
var targetCacheEntryId = null;
return Promise.resolve()
// Setup testing environment: Placing alternative data into HTTP cache.
.then(_ => fetch(altContentType))
.then(r => check(r, responseContent, "", false,
cacheEntryId => cacheEntryId === undefined))
.then(r => writeAltData(r.request))
// Start testing.
.then(_ => fetch(altContentType))
.then(r => check(r, altContent, altContentType, true,
cacheEntryId => cacheEntryId !== undefined))
.then(r => targetCacheEntryId = r.cacheEntryId)
.then(_ => fetch())
.then(r => check(r, responseContent, "", true,
cacheEntryId => cacheEntryId === targetCacheEntryId))
.then(_ => fetch(altContentType))
.then(r => check(r, altContent, altContentType, true,
cacheEntryId => cacheEntryId === targetCacheEntryId))
.then(_ => fetch())
.then(r => check(r, responseContent, "", true,
cacheEntryId => cacheEntryId === targetCacheEntryId))
.then(_ => fetch()) // The response is changed here.
.then(r => check(r, responseContent2, "", false,
cacheEntryId => cacheEntryId === undefined))
.then(_ => fetch())
.then(r => check(r, responseContent2, "", true,
cacheEntryId => cacheEntryId !== undefined &&
cacheEntryId !== targetCacheEntryId))
// Tear down.
.catch(e => do_check_true(false, "Unexpected exception: " + e))
.then(_ => do_check_eq(handlers.length, 0))
.then(_ => httpServer.stop(do_test_finished));
}

View File

@ -180,6 +180,7 @@ skip-if = bits != 32
[test_doomentry.js]
[test_cacheflags.js]
[test_cache_jar.js]
[test_cache-entry-id.js]
[test_channel_close.js]
[test_compareURIs.js]
[test_compressappend.js]

View File

@ -0,0 +1,3 @@
function run_test() {
run_test_in_child("../unit/test_cache-entry-id.js");
}

View File

@ -6,6 +6,7 @@ support-files =
!/netwerk/test/unit/test_XHR_redirects.js
!/netwerk/test/unit/test_bug248970_cookie.js
!/netwerk/test/unit/test_bug528292.js
!/netwerk/test/unit/test_cache-entry-id.js
!/netwerk/test/unit/test_cache_jar.js
!/netwerk/test/unit/test_cacheflags.js
!/netwerk/test/unit/test_channel_close.js
@ -61,6 +62,7 @@ support-files =
[test_bug528292_wrap.js]
[test_bug248970_cookie_wrap.js]
[test_cacheflags_wrap.js]
[test_cache-entry-id_wrap.js]
[test_cache_jar_wrap.js]
[test_channel_close_wrap.js]
[test_cookie_header_wrap.js]

View File

@ -314,7 +314,7 @@ linux-rusttests/opt:
tier: 2
worker-type: aws-provisioner-v1/gecko-{level}-b-linux
worker:
max-run-time: 3600
max-run-time: 5400
env:
TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux32/releng.manifest"
run:
@ -347,7 +347,7 @@ linux-rusttests/debug:
tier: 2
worker-type: aws-provisioner-v1/gecko-{level}-b-linux
worker:
max-run-time: 3600
max-run-time: 5400
env:
TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux32/releng.manifest"
run:
@ -640,7 +640,7 @@ linux64-rusttests/opt:
tier: 2
worker-type: aws-provisioner-v1/gecko-{level}-b-linux
worker:
max-run-time: 3600
max-run-time: 5400
env:
TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/releng.manifest"
run:
@ -673,7 +673,7 @@ linux64-rusttests/debug:
tier: 2
worker-type: aws-provisioner-v1/gecko-{level}-b-linux
worker:
max-run-time: 3600
max-run-time: 5400
env:
TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/releng.manifest"
run:

View File

@ -33,7 +33,7 @@ jobs:
symbol: SM-tc(H)
worker:
env:
TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/hazard.manifest"
TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/releng.manifest"
run:
using: hazard
command: >
@ -46,6 +46,8 @@ jobs:
toolchains:
- linux64-clang
- linux64-gcc-4.9
- linux64-sixgill
- linux64-rust
linux64-haz/debug:
description: "Browser Hazard Analysis Linux"
@ -57,7 +59,7 @@ jobs:
symbol: tc(H)
worker:
env:
TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/hazard.manifest"
TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/releng.manifest"
run:
using: hazard
mozconfig: "browser/config/mozconfigs/linux64/hazards"
@ -67,3 +69,5 @@ jobs:
toolchains:
- linux64-clang
- linux64-gcc-4.9
- linux64-sixgill
- linux64-rust

View File

@ -109,7 +109,7 @@ qr-talos:
- talos-g3
- talos-g4
- talos-g5
# - talos-other # fails with layers-free
- talos-other
# - talos-svgr # fails with layers-free
- talos-tp5o
- talos-perf-reftest

View File

@ -438,7 +438,7 @@ mochitest:
by-test-platform:
linux64-jsdcov/opt: xlarge
android.*: xlarge
default: legacy # Bug 1281241: migrating to m3.large instances
default: default
chunks:
by-test-platform:
android-4.3-arm7-api-16/debug: 48
@ -616,7 +616,6 @@ mochitest-browser-screenshots:
- remove_executables.py
extra-options:
- --mochitest-suite=browser-chrome-screenshots
instance-size: legacy
allow-software-gl-layers: false
mochitest-chrome:
@ -2176,7 +2175,7 @@ xpcshell:
instance-size:
by-test-platform:
android.*: xlarge
default: legacy # Bug 1281241: migrating to m3.large instances
default: default
max-run-time: 5400
e10s: false
allow-software-gl-layers: false

View File

@ -110,6 +110,27 @@ linux64-gcc-6:
toolchain-alias: linux64-gcc
toolchain-artifact: public/build/gcc.tar.xz
linux64-sixgill:
description: "sixgill GCC plugin build"
treeherder:
kind: build
platform: toolchains/opt
symbol: TL(sixgill)
tier: 1
worker-type: aws-provisioner-v1/gecko-{level}-b-linux
worker:
docker-image: {in-tree: desktop-build}
max-run-time: 3600
run:
using: toolchain-script
script: build-gcc-sixgill-plugin-linux.sh
resources:
- 'build/unix/build-gcc/build-gcc.sh'
- 'taskcluster/scripts/misc/build-gcc-sixgill-plugin-linux.sh'
toolchain-artifact: public/build/sixgill.tar.xz
toolchains:
- linux64-gcc-4.9
linux64-binutils:
description: "Binutils toolchain build"
treeherder:

View File

@ -146,6 +146,7 @@ function check_hazards () {
NUM_HAZARDS=$(grep -c 'Function.*has unrooted.*live across GC call' "$1"/rootingHazards.txt)
NUM_UNSAFE=$(grep -c '^Function.*takes unsafe address of unrooted' "$1"/refs.txt)
NUM_UNNECESSARY=$(grep -c '^Function.* has unnecessary root' "$1"/unnecessary.txt)
NUM_DROPPED=$(grep -c '^Dropped CFG' "$1"/build_xgill.log)
NUM_WRITE_HAZARDS=$(perl -lne 'print $1 if m!found (\d+)/\d+ allowed errors!' "$1"/heapWriteHazards.txt)
set +x
@ -154,17 +155,29 @@ function check_hazards () {
echo "TinderboxPrint: (unnecessary roots)<br/>$NUM_UNNECESSARY"
echo "TinderboxPrint: heap write hazards<br/>$NUM_WRITE_HAZARDS"
exit_status=0
if [ $NUM_HAZARDS -gt 0 ]; then
echo "TEST-UNEXPECTED-FAIL $NUM_HAZARDS rooting hazards detected" >&2
echo "TinderboxPrint: documentation<br/><a href='https://wiki.mozilla.org/Javascript:Hazard_Builds#Diagnosing_a_rooting_hazards_failure'>static rooting hazard analysis failures</a>, visit \"Inspect Task\" link for hazard details"
exit 1
exit_status=1
fi
NUM_ALLOWED_WRITE_HAZARDS=0
if [ $NUM_WRITE_HAZARDS -gt $NUM_ALLOWED_WRITE_HAZARDS ]; then
echo "TEST-UNEXPECTED-FAIL $NUM_WRITE_HAZARDS heap write hazards detected out of $NUM_ALLOWED_WRITE_HAZARDS allowed" >&2
echo "TinderboxPrint: documentation<br/><a href='https://wiki.mozilla.org/Javascript:Hazard_Builds#Diagnosing_a_heap_write_hazard_failure'>heap write hazard analysis failures</a>, visit \"Inspect Task\" link for hazard details"
exit 1
exit_status = 1
fi
if [ $NUM_DROPPED -gt 0 ]; then
echo "TEST-UNEXPECTED-FAIL $NUM_DROPPED CFGs dropped" >&2
echo "TinderboxPrint: sixgill unable to handle constructs<br/>$NUM_DROPPED"
exit_status=1
fi
if [ $exit_status -ne 0 ]; then
exit $exit_status
fi
)
}

View File

@ -0,0 +1,105 @@
#!/bin/bash
set -e
set -x
# This script is for building the sixgill GCC plugin for Linux. It relies on
# the gcc checkout because it needs to recompile gmp and the gcc build script
# determines the version of gmp to download.
WORKSPACE=$HOME/workspace
HOME_DIR=$WORKSPACE/build
UPLOAD_DIR=$HOME/artifacts
root_dir=$HOME_DIR
build_dir=$HOME_DIR/src/build
data_dir=$HOME_DIR/src/build/unix/build-gcc
# Download and unpack upstream toolchain artifacts (ie, the gcc binary).
. $(dirname $0)/tooltool-download.sh
gcc_version=4.9.4
gcc_ext=bz2
binutils_version=2.25.1
binutils_ext=bz2
sixgill_rev=59b74c2e21bd
sixgill_repo=https://hg.mozilla.org/users/sfink_mozilla.com/sixgill
. $data_dir/build-gcc.sh
# GPG key used to sign GCC
$GPG --import $data_dir/13975A70E63C361C73AE69EF6EEB81F8981C74C7.key
# GPG key used to sign binutils
$GPG --import $data_dir/EAF1C276A747E9ED86210CBAC3126D3B4AE55E93.key
# GPG key used to sign GMP
$GPG --import $data_dir/343C2FF0FBEE5EC2EDBEF399F3599FF828C67298.key
# GPG key used to sign MPFR
$GPG --import $data_dir/07F3DBBECC1A39605078094D980C197698C3739D.key
# GPG key used to sign MPC
$GPG --import $data_dir/AD17A21EF8AED8F1CC02DBD9F7D5C9BF765C61E3.key
cat > $HOME_DIR/checksums <<EOF
b5b14added7d78a8d1ca70b5cb75fef57ce2197264f4f5835326b0df22ac9f22 binutils-2.25.1.tar.bz2
02500a4edd14875f94fe84cbeda4290425cb0c1c2474c6f75d75a303d64b4196 cloog-0.18.1.tar.gz
6c11d292cd01b294f9f84c9a59c230d80e9e4a47e5c6355f046bb36d4f358092 gcc-4.9.4.tar.bz2
752079520b4690531171d0f4532e40f08600215feefede70b24fabdc6f1ab160 gmp-5.1.3.tar.bz2
f4b3dbee9712850006e44f0db2103441ab3d13b406f77996d1df19ee89d11fb4 isl-0.12.2.tar.bz2
ae79f8d41d8a86456b68607e9ca398d00f8b7342d1d83bcf4428178ac45380c7 mpc-0.8.2.tar.gz
ca498c1c7a74dd37a576f353312d1e68d490978de4395fa28f1cbd46a364e658 mpfr-3.1.5.tar.bz2
EOF
# Download GCC + related, and unpack.
prepare
export TMPDIR=${TMPDIR:-/tmp/}
export gcc_bindir=$root_dir/src/gcc/bin
export gmp_prefix=/tools/gmp
export gmp_dir=$root_dir$gmp_prefix
prepare_sixgill() {(
cd $root_dir
hg clone -r $sixgill_rev $sixgill_repo || ( cd sixgill && hg update -r $sixgill_rev )
)}
build_gmp() {
if ! [ -x $gcc_bindir/gcc ]; then
echo "GCC not found in $gcc_bindir/gcc" >&2
exit 1
fi
# The sixgill plugin uses some gmp symbols, including some not exported by
# cc1/cc1plus. So link the plugin statically to libgmp. Except that the
# default static build does not have -fPIC, and will result in a relocation
# error, so build our own. This requires the gcc and related source to be
# in $root_dir/gcc-$gcc_version.
mkdir $root_dir/gmp-objdir || true
(
cd $root_dir/gmp-objdir
$root_dir/gcc-$gcc_version/gmp/configure --disable-shared --with-pic --prefix=$gmp_prefix
make -j8
make install DESTDIR=$root_dir
)
}
build_sixgill() {(
cd $root_dir/sixgill
export CC=$gcc_bindir/gcc
export CXX=$gcc_bindir/g++
export TARGET_CC=$CC
export CPPFLAGS=-I$gmp_dir/include
export EXTRA_LDFLAGS=-L$gmp_dir/lib
export HOST_CFLAGS=$CPPFLAGS
./release.sh --build-and-package --with-gmp=$gmp_dir
tarball=$(ls -td *-sixgill | head -1)/sixgill.tar.xz
cp $tarball $root_dir/sixgill.tar.xz
)}
prepare_sixgill
build_gmp
build_sixgill
# Put a tarball in the artifacts dir
mkdir -p $UPLOAD_DIR
cp $HOME_DIR/sixgill.tar.* $UPLOAD_DIR

View File

@ -291,7 +291,7 @@ function removeProfile(profile) {
}
}
profile.remove(deleteFiles);
profile.removeInBackground(deleteFiles);
ProfileService.flush();
refreshUI();
}

View File

@ -74,6 +74,12 @@ interface nsIToolkitProfile : nsISupports
*/
void remove(in boolean removeFiles);
/**
* Removes the profile from the registry of profiles.
* The profile directory is removed in the stream transport thread.
*/
void removeInBackground();
/**
* Lock this profile using platform-specific locking methods.
*

View File

@ -32,8 +32,11 @@
#endif
#include "nsAppDirectoryServiceDefs.h"
#include "nsNetCID.h"
#include "nsXULAppAPI.h"
#include "nsThreadUtils.h"
#include "nsIRunnable.h"
#include "nsINIParser.h"
#include "nsXREDirProvider.h"
#include "nsAppRunner.h"
@ -64,6 +67,9 @@ private:
nsToolkitProfile* aPrev,
bool aForExternalApp);
nsresult
RemoveInternal(bool aRemoveFiles, bool aInBackground);
friend class nsToolkitProfileLock;
nsCString mName;
@ -223,8 +229,8 @@ nsToolkitProfile::SetName(const nsACString& aName)
return NS_OK;
}
NS_IMETHODIMP
nsToolkitProfile::Remove(bool removeFiles)
nsresult
nsToolkitProfile::RemoveInternal(bool aRemoveFiles, bool aInBackground)
{
NS_ASSERTION(nsToolkitProfileService::gService,
"Whoa, my service is gone.");
@ -237,18 +243,32 @@ nsToolkitProfile::Remove(bool removeFiles)
if (!mPrev && !mNext && nsToolkitProfileService::gService->mFirst != this)
return NS_ERROR_NOT_INITIALIZED;
if (removeFiles) {
bool equals;
nsresult rv = mRootDir->Equals(mLocalDir, &equals);
if (NS_FAILED(rv))
return rv;
if (aRemoveFiles) {
nsCOMPtr<nsIFile> rootDir(mRootDir);
nsCOMPtr<nsIFile> localDir(mLocalDir);
// The root dir might contain the temp dir, so remove
// the temp dir first.
if (!equals)
mLocalDir->Remove(true);
nsCOMPtr<nsIRunnable> runnable = NS_NewRunnableFunction(
"nsToolkitProfile::RemoveInternal",
[rootDir, localDir]() {
bool equals;
nsresult rv = rootDir->Equals(localDir, &equals);
// The root dir might contain the temp dir, so remove
// the temp dir first.
if (NS_SUCCEEDED(rv) && !equals) {
localDir->Remove(true);
}
mRootDir->Remove(true);
rootDir->Remove(true);
}
);
if (aInBackground) {
nsCOMPtr<nsIEventTarget> target =
do_GetService(NS_STREAMTRANSPORTSERVICE_CONTRACTID);
target->Dispatch(runnable, NS_DISPATCH_NORMAL);
} else {
runnable->Run();
}
}
if (mPrev)
@ -270,6 +290,18 @@ nsToolkitProfile::Remove(bool removeFiles)
return NS_OK;
}
NS_IMETHODIMP
nsToolkitProfile::Remove(bool removeFiles)
{
return RemoveInternal(removeFiles, false /* in background */);
}
NS_IMETHODIMP
nsToolkitProfile::RemoveInBackground()
{
return RemoveInternal(true /* remove Files */, true /* in background */);
}
NS_IMETHODIMP
nsToolkitProfile::Lock(nsIProfileUnlocker* *aUnlocker, nsIProfileLock* *aResult)
{

View File

@ -596,31 +596,19 @@ PuppetWidget::GetLayerManager(PLayerTransactionChild* aShadowManager,
return mLayerManager;
}
if (mTabChild && !mTabChild->IsLayersConnected()) {
// If we know for sure that the parent side of this TabChild is not
// connected to the compositor, we don't want to use a "remote" layer
// manager like WebRender or Client. Instead we use a Basic one which
// can do drawing in this process.
mLayerManager = new BasicLayerManager(this);
} else if (gfxVars::UseWebRender()) {
MOZ_ASSERT(!aShadowManager);
mLayerManager = new WebRenderLayerManager(this);
} else {
mLayerManager = new ClientLayerManager(this);
}
}
// Attach a shadow forwarder if none exists.
ShadowLayerForwarder* lf = mLayerManager->AsShadowForwarder();
if (lf && !lf->HasShadowManager() && aShadowManager) {
lf->SetShadowManager(aShadowManager);
// If we know for sure that the parent side of this TabChild is not
// connected to the compositor, we don't want to use a "remote" layer
// manager like WebRender or Client. Instead we use a Basic one which
// can do drawing in this process.
MOZ_ASSERT(!mTabChild || mTabChild->IsLayersConnected() != Some(true));
mLayerManager = new BasicLayerManager(this);
}
return mLayerManager;
}
bool
PuppetWidget::RecreateLayerManager(const std::function<bool(LayerManager*)>& aInitializeFunc)
PuppetWidget::CreateRemoteLayerManager(const std::function<bool(LayerManager*)>& aInitializeFunc)
{
RefPtr<LayerManager> lm;
MOZ_ASSERT(mTabChild);
@ -1053,7 +1041,9 @@ PuppetWidget::Paint()
#endif
if (mLayerManager->GetBackendType() == mozilla::layers::LayersBackend::LAYERS_CLIENT ||
mLayerManager->GetBackendType() == mozilla::layers::LayersBackend::LAYERS_WR) {
mLayerManager->GetBackendType() == mozilla::layers::LayersBackend::LAYERS_WR ||
(mozilla::layers::LayersBackend::LAYERS_BASIC == mLayerManager->GetBackendType() &&
mTabChild && mTabChild->IsLayersConnected().isSome())) {
// Do nothing, the compositor will handle drawing
if (mTabChild) {
mTabChild->NotifyPainted();

Some files were not shown because too many files have changed in this diff Show More