Bug 964127: Add a/v sync status to about:webrtc. r=jesup

This commit is contained in:
Jan-Ivar Bruaroey 2014-03-12 17:13:20 -04:00
parent 48635b58d5
commit d72fff5a22
8 changed files with 63 additions and 8 deletions

View File

@ -38,6 +38,8 @@ dictionary RTCInboundRTPStreamStats : RTCRTPStreamStats {
unsigned long long bytesReceived;
double jitter;
unsigned long packetsLost;
long mozAvSyncDelay;
long mozJitterBufferDelay;
};
dictionary RTCOutboundRTPStreamStats : RTCRTPStreamStats {

View File

@ -127,6 +127,15 @@ bool WebrtcAudioConduit::GetRemoteSSRC(unsigned int* ssrc) {
return !mPtrRTP->GetRemoteSSRC(mChannel, *ssrc);
}
bool WebrtcAudioConduit::GetAVStats(int32_t* jitterBufferDelayMs,
int32_t* playoutBufferDelayMs,
int32_t* avSyncOffsetMs) {
return !mPtrVoEVideoSync->GetDelayEstimate(mChannel,
jitterBufferDelayMs,
playoutBufferDelayMs,
avSyncOffsetMs);
}
bool WebrtcAudioConduit::GetRTPStats(unsigned int* jitterMs,
unsigned int* cumulativeLost) {
unsigned int maxJitterMs = 0;
@ -693,10 +702,9 @@ WebrtcAudioConduit::GetAudioFrame(int16_t speechData[],
int jitter_buffer_delay_ms = 0;
int playout_buffer_delay_ms = 0;
int avsync_offset_ms = 0;
mPtrVoEVideoSync->GetDelayEstimate(mChannel,
&jitter_buffer_delay_ms,
&playout_buffer_delay_ms,
&avsync_offset_ms); // ignore errors
GetAVStats(&jitter_buffer_delay_ms,
&playout_buffer_delay_ms,
&avsync_offset_ms); // ignore errors
CSFLogError(logTag,
"A/V sync: sync delta: %dms, audio jitter delay %dms, playout delay %dms",
avsync_offset_ms, jitter_buffer_delay_ms, playout_buffer_delay_ms);

View File

@ -180,6 +180,9 @@ public:
webrtc::VoiceEngine* GetVoiceEngine() { return mVoiceEngine; }
bool GetLocalSSRC(unsigned int* ssrc);
bool GetRemoteSSRC(unsigned int* ssrc);
bool GetAVStats(int32_t* jitterBufferDelayMs,
int32_t* playoutBufferDelayMs,
int32_t* avSyncOffsetMs);
bool GetRTPStats(unsigned int* jitterMs, unsigned int* cumulativeLost);
bool GetRTCPReceiverReport(DOMHighResTimeStamp* timestamp,
unsigned int* jitterMs,

View File

@ -143,6 +143,9 @@ public:
/**
* Functions returning stats needed by w3c stats model.
*/
virtual bool GetAVStats(int32_t* jitterBufferDelayMs,
int32_t* playoutBufferDelayMs,
int32_t* avSyncOffsetMs) = 0;
virtual bool GetRTPStats(unsigned int* jitterMs,
unsigned int* cumulativeLost) = 0;
virtual bool GetRTCPReceiverReport(DOMHighResTimeStamp* timestamp,

View File

@ -131,6 +131,12 @@ bool WebrtcVideoConduit::GetRemoteSSRC(unsigned int* ssrc) {
return !mPtrRTP->GetRemoteSSRC(mChannel, *ssrc);
}
bool WebrtcVideoConduit::GetAVStats(int32_t* jitterBufferDelayMs,
int32_t* playoutBufferDelayMs,
int32_t* avSyncOffsetMs) {
return false;
}
bool WebrtcVideoConduit::GetRTPStats(unsigned int* jitterMs,
unsigned int* cumulativeLost) {
unsigned int ntpHigh, ntpLow;

View File

@ -205,6 +205,9 @@ public:
webrtc::VideoEngine* GetVideoEngine() { return mVideoEngine; }
bool GetLocalSSRC(unsigned int* ssrc);
bool GetRemoteSSRC(unsigned int* ssrc);
bool GetAVStats(int32_t* jitterBufferDelayMs,
int32_t* playoutBufferDelayMs,
int32_t* avSyncOffsetMs);
bool GetRTPStats(unsigned int* jitterMs, unsigned int* cumulativeLost);
bool GetRTCPReceiverReport(DOMHighResTimeStamp* timestamp,
unsigned int* jitterMs,

View File

@ -2129,7 +2129,8 @@ PeerConnectionImpl::ExecuteStatsQuery_s(RTCStatsQuery *query) {
for (size_t p = 0; p < query->pipelines.Length(); ++p) {
const MediaPipeline& mp = *query->pipelines[p];
nsString idstr = (mp.Conduit()->type() == MediaSessionConduit::AUDIO) ?
bool isAudio = (mp.Conduit()->type() == MediaSessionConduit::AUDIO);
nsString idstr = isAudio ?
NS_LITERAL_STRING("audio_") : NS_LITERAL_STRING("video_");
idstr.AppendInt(mp.trackid());
@ -2238,6 +2239,18 @@ PeerConnectionImpl::ExecuteStatsQuery_s(RTCStatsQuery *query) {
s.mIsRemote = false;
s.mPacketsReceived.Construct(mp.rtp_packets_received());
s.mBytesReceived.Construct(mp.rtp_bytes_received());
if (query->internalStats && isAudio) {
int32_t jitterBufferDelay;
int32_t playoutBufferDelay;
int32_t avSyncDelta;
if (mp.Conduit()->GetAVStats(&jitterBufferDelay,
&playoutBufferDelay,
&avSyncDelta)) {
s.mMozJitterBufferDelay.Construct(jitterBufferDelay);
s.mMozAvSyncDelay.Construct(avSyncDelta);
}
}
query->report.mInboundRTPStreamStats.Value().AppendElement(s);
break;
}

View File

@ -135,7 +135,20 @@ function round00(num) {
return Math.round(num * 100) / 100;
}
function dumpStat(stat, label) {
function dumpAvStat(stat) {
var div = document.createElement('div');
var statsString = "";
if (stat.mozAvSyncDelay !== undefined) {
statsString += "A/V sync: " + stat.mozAvSyncDelay + " ms ";
}
if (stat.mozJitterBufferDelay !== undefined) {
statsString += "Jitter-buffer delay: " + stat.mozJitterBufferDelay + " ms";
}
div.appendChild(document.createTextNode(statsString));
return div;
}
function dumpRtpStat(stat, label) {
var div = document.createElement('div');
var statsString = " " + label + new Date(stat.timestamp).toTimeString() +
" " + stat.type + " SSRC: " + stat.ssrc;
@ -251,13 +264,17 @@ function buildPcDiv(stats, pcDivHeading) {
if (!rtpStat.isRemote) {
newPcDiv.appendChild(document.createElement('h5'))
.appendChild(document.createTextNode(rtpStat.id));
newPcDiv.appendChild(dumpStat(rtpStat, "Local: "));
if (rtpStat.mozAvSyncDelay !== undefined ||
rtpStat.mozJitterBufferDelay !== undefined) {
newPcDiv.appendChild(dumpAvStat(rtpStat));
}
newPcDiv.appendChild(dumpRtpStat(rtpStat, "Local: "));
// Might not be receiving RTCP, so we have no idea what the
// statistics look like from the perspective of the other end.
if (rtpStat.remoteId) {
var remoteRtpStat = remoteRtpStatsMap[rtpStat.remoteId];
newPcDiv.appendChild(dumpStat(remoteRtpStat, "Remote: "));
newPcDiv.appendChild(dumpRtpStat(remoteRtpStat, "Remote: "));
}
}
}