2012-05-11 17:35:36 +00:00
|
|
|
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
|
2012-04-30 03:11:26 +00:00
|
|
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
|
|
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
|
|
|
|
2013-02-04 10:04:25 +00:00
|
|
|
#include "MediaStreamGraphImpl.h"
|
2014-04-13 19:41:07 +00:00
|
|
|
#include "mozilla/MathAlgorithms.h"
|
2014-03-15 19:00:16 +00:00
|
|
|
#include "mozilla/unused.h"
|
2012-04-30 03:11:26 +00:00
|
|
|
|
|
|
|
#include "AudioSegment.h"
|
|
|
|
#include "VideoSegment.h"
|
|
|
|
#include "nsContentUtils.h"
|
|
|
|
#include "nsIAppShell.h"
|
|
|
|
#include "nsIObserver.h"
|
2014-04-13 18:08:10 +00:00
|
|
|
#include "nsPrintfCString.h"
|
2012-04-30 03:11:26 +00:00
|
|
|
#include "nsServiceManagerUtils.h"
|
|
|
|
#include "nsWidgetsCID.h"
|
2014-04-13 18:08:10 +00:00
|
|
|
#include "prerror.h"
|
2012-04-30 03:11:26 +00:00
|
|
|
#include "prlog.h"
|
2012-06-19 02:30:09 +00:00
|
|
|
#include "mozilla/Attributes.h"
|
2012-07-31 12:17:21 +00:00
|
|
|
#include "TrackUnionStream.h"
|
2012-08-21 04:06:46 +00:00
|
|
|
#include "ImageContainer.h"
|
2014-04-10 17:39:20 +00:00
|
|
|
#include "AudioChannelService.h"
|
2013-01-13 22:46:57 +00:00
|
|
|
#include "AudioNodeEngine.h"
|
|
|
|
#include "AudioNodeStream.h"
|
2013-07-24 11:29:39 +00:00
|
|
|
#include "AudioNodeExternalInputStream.h"
|
2013-01-15 12:22:03 +00:00
|
|
|
#include <algorithm>
|
2013-05-03 05:02:55 +00:00
|
|
|
#include "DOMMediaStream.h"
|
2013-07-19 14:40:57 +00:00
|
|
|
#include "GeckoProfiler.h"
|
2014-03-15 19:00:16 +00:00
|
|
|
#include "mozilla/unused.h"
|
2014-04-21 07:15:34 +00:00
|
|
|
#ifdef MOZ_WEBRTC
|
2014-04-02 17:58:19 +00:00
|
|
|
#include "AudioOutputObserver.h"
|
2014-04-21 07:15:34 +00:00
|
|
|
#endif
|
2012-04-30 03:11:26 +00:00
|
|
|
|
2014-10-10 02:30:28 +00:00
|
|
|
#include "webaudio/blink/HRTFDatabaseLoader.h"
|
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
using namespace mozilla::layers;
|
2012-11-16 03:25:26 +00:00
|
|
|
using namespace mozilla::dom;
|
2013-12-31 09:06:12 +00:00
|
|
|
using namespace mozilla::gfx;
|
2012-04-30 03:11:26 +00:00
|
|
|
|
|
|
|
namespace mozilla {
|
|
|
|
|
|
|
|
#ifdef PR_LOGGING
|
|
|
|
PRLogModuleInfo* gMediaStreamGraphLog;
|
2013-11-21 03:02:42 +00:00
|
|
|
#define STREAM_LOG(type, msg) PR_LOG(gMediaStreamGraphLog, type, msg)
|
2013-11-18 11:48:04 +00:00
|
|
|
#else
|
2013-11-21 03:02:42 +00:00
|
|
|
#define STREAM_LOG(type, msg)
|
2012-04-30 03:11:26 +00:00
|
|
|
#endif
|
|
|
|
|
2014-08-31 12:19:48 +00:00
|
|
|
// #define ENABLE_LIFECYCLE_LOG
|
|
|
|
|
|
|
|
// We don't use NSPR log here because we want this interleaved with adb logcat
|
|
|
|
// on Android/B2G
|
|
|
|
#ifdef ENABLE_LIFECYCLE_LOG
|
|
|
|
# ifdef ANDROID
|
|
|
|
# include "android/log.h"
|
|
|
|
# define LIFECYCLE_LOG(...) __android_log_print(ANDROID_LOG_INFO, "Gecko - MSG", ## __VA_ARGS__); printf(__VA_ARGS__);printf("\n");
|
|
|
|
# else
|
|
|
|
# define LIFECYCLE_LOG(...) printf(__VA_ARGS__);printf("\n");
|
|
|
|
# endif
|
|
|
|
#else
|
|
|
|
# define LIFECYCLE_LOG(...)
|
|
|
|
#endif
|
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
/**
|
|
|
|
* The singleton graph instance.
|
|
|
|
*/
|
2014-11-17 16:07:55 +00:00
|
|
|
static nsDataHashtable<nsUint32HashKey, MediaStreamGraphImpl*> gGraphs;
|
2012-04-30 03:11:26 +00:00
|
|
|
|
2013-11-18 11:48:04 +00:00
|
|
|
MediaStreamGraphImpl::~MediaStreamGraphImpl()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(IsEmpty(),
|
|
|
|
"All streams should have been destroyed by messages from the main thread");
|
2013-11-21 03:02:42 +00:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p destroyed", this));
|
2014-08-31 12:19:48 +00:00
|
|
|
LIFECYCLE_LOG("MediaStreamGraphImpl::~MediaStreamGraphImpl\n");
|
2013-11-18 11:48:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
StreamTime
|
|
|
|
MediaStreamGraphImpl::GetDesiredBufferEnd(MediaStream* aStream)
|
|
|
|
{
|
2014-04-25 14:09:30 +00:00
|
|
|
StreamTime current = IterationEnd() - aStream->mBufferStartTime;
|
2014-01-27 18:10:48 +00:00
|
|
|
// When waking up media decoders, we need a longer safety margin, as it can
|
|
|
|
// take more time to get new samples. A factor of two seem to work.
|
2012-07-31 12:17:21 +00:00
|
|
|
return current +
|
2014-01-27 18:10:48 +00:00
|
|
|
2 * MillisecondsToMediaTime(std::max(AUDIO_TARGET_MS, VIDEO_TARGET_MS));
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::FinishStream(MediaStream* aStream)
|
|
|
|
{
|
|
|
|
if (aStream->mFinished)
|
|
|
|
return;
|
2013-11-21 03:02:42 +00:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG, ("MediaStream %p will finish", aStream));
|
2012-04-30 03:11:26 +00:00
|
|
|
aStream->mFinished = true;
|
2014-05-18 20:26:54 +00:00
|
|
|
aStream->mBuffer.AdvanceKnownTracksTime(STREAM_TIME_MAX);
|
2012-04-30 03:11:26 +00:00
|
|
|
// Force at least one more iteration of the control loop, since we rely
|
2014-04-25 16:04:23 +00:00
|
|
|
// on UpdateCurrentTimeForStreams to notify our listeners once the stream end
|
2012-04-30 03:11:26 +00:00
|
|
|
// has been reached.
|
2014-09-28 16:07:25 +00:00
|
|
|
EnsureNextIteration();
|
2014-01-29 13:34:35 +00:00
|
|
|
|
|
|
|
SetStreamOrderDirty();
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::AddStream(MediaStream* aStream)
|
|
|
|
{
|
2014-04-25 14:09:30 +00:00
|
|
|
aStream->mBufferStartTime = IterationEnd();
|
2014-07-06 23:52:25 +00:00
|
|
|
mStreams.AppendElement(aStream);
|
2013-11-21 03:02:42 +00:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG, ("Adding media stream %p to the graph", aStream));
|
2014-01-29 13:34:35 +00:00
|
|
|
|
|
|
|
SetStreamOrderDirty();
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::RemoveStream(MediaStream* aStream)
|
|
|
|
{
|
|
|
|
// Remove references in mStreamUpdates before we allow aStream to die.
|
|
|
|
// Pending updates are not needed (since the main thread has already given
|
|
|
|
// up the stream) so we will just drop them.
|
|
|
|
{
|
2014-08-26 15:01:33 +00:00
|
|
|
MonitorAutoLock lock(mMonitor);
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t i = 0; i < mStreamUpdates.Length(); ++i) {
|
2012-04-30 03:11:26 +00:00
|
|
|
if (mStreamUpdates[i].mStream == aStream) {
|
2012-07-30 14:20:58 +00:00
|
|
|
mStreamUpdates[i].mStream = nullptr;
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-07-24 20:36:07 +00:00
|
|
|
// Ensure that mFirstCycleBreaker and mMixer are updated when necessary.
|
2014-01-29 13:34:35 +00:00
|
|
|
SetStreamOrderDirty();
|
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
mStreams.RemoveElement(aStream);
|
2014-07-06 23:52:25 +00:00
|
|
|
NS_RELEASE(aStream); // probably destroying it
|
2012-04-30 03:11:26 +00:00
|
|
|
|
2013-11-21 03:02:42 +00:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG, ("Removing media stream %p from the graph", aStream));
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
|
2012-06-01 06:26:17 +00:00
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::UpdateConsumptionState(SourceMediaStream* aStream)
|
|
|
|
{
|
2012-07-31 12:17:21 +00:00
|
|
|
MediaStreamListener::Consumption state =
|
|
|
|
aStream->mIsConsumed ? MediaStreamListener::CONSUMED
|
|
|
|
: MediaStreamListener::NOT_CONSUMED;
|
2012-06-01 06:26:17 +00:00
|
|
|
if (state != aStream->mLastConsumptionState) {
|
|
|
|
aStream->mLastConsumptionState = state;
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t j = 0; j < aStream->mListeners.Length(); ++j) {
|
2012-06-01 06:26:17 +00:00
|
|
|
MediaStreamListener* l = aStream->mListeners[j];
|
|
|
|
l->NotifyConsumptionChanged(this, state);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-04-30 03:11:40 +00:00
|
|
|
void
|
2012-07-20 19:36:03 +00:00
|
|
|
MediaStreamGraphImpl::ExtractPendingInput(SourceMediaStream* aStream,
|
|
|
|
GraphTime aDesiredUpToTime,
|
|
|
|
bool* aEnsureNextIteration)
|
2012-04-30 03:11:40 +00:00
|
|
|
{
|
|
|
|
bool finished;
|
|
|
|
{
|
|
|
|
MutexAutoLock lock(aStream->mMutex);
|
2012-12-07 11:06:55 +00:00
|
|
|
if (aStream->mPullEnabled && !aStream->mFinished &&
|
|
|
|
!aStream->mListeners.IsEmpty()) {
|
|
|
|
// Compute how much stream time we'll need assuming we don't block
|
|
|
|
// the stream at all between mBlockingDecisionsMadeUntilTime and
|
|
|
|
// aDesiredUpToTime.
|
|
|
|
StreamTime t =
|
2014-04-25 14:09:30 +00:00
|
|
|
GraphTimeToStreamTime(aStream, CurrentDriver()->StateComputedTime()) +
|
|
|
|
(aDesiredUpToTime - CurrentDriver()->StateComputedTime());
|
2013-11-21 03:02:42 +00:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("Calling NotifyPull aStream=%p t=%f current end=%f", aStream,
|
|
|
|
MediaTimeToSeconds(t),
|
|
|
|
MediaTimeToSeconds(aStream->mBuffer.GetEnd())));
|
2012-12-07 11:06:55 +00:00
|
|
|
if (t > aStream->mBuffer.GetEnd()) {
|
|
|
|
*aEnsureNextIteration = true;
|
2013-03-07 08:53:45 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
if (aStream->mListeners.Length() == 0) {
|
2013-11-21 03:02:42 +00:00
|
|
|
STREAM_LOG(PR_LOG_ERROR, ("No listeners in NotifyPull aStream=%p desired=%f current end=%f",
|
|
|
|
aStream, MediaTimeToSeconds(t),
|
|
|
|
MediaTimeToSeconds(aStream->mBuffer.GetEnd())));
|
2013-03-07 08:53:45 +00:00
|
|
|
aStream->DumpTrackInfo();
|
|
|
|
}
|
|
|
|
#endif
|
2012-12-07 11:06:55 +00:00
|
|
|
for (uint32_t j = 0; j < aStream->mListeners.Length(); ++j) {
|
|
|
|
MediaStreamListener* l = aStream->mListeners[j];
|
|
|
|
{
|
|
|
|
MutexAutoUnlock unlock(aStream->mMutex);
|
|
|
|
l->NotifyPull(this, t);
|
|
|
|
}
|
2012-07-20 19:36:03 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2012-04-30 03:11:40 +00:00
|
|
|
finished = aStream->mUpdateFinished;
|
2012-08-22 15:56:38 +00:00
|
|
|
for (int32_t i = aStream->mUpdateTracks.Length() - 1; i >= 0; --i) {
|
2012-04-30 03:11:40 +00:00
|
|
|
SourceMediaStream::TrackData* data = &aStream->mUpdateTracks[i];
|
2013-05-30 04:44:43 +00:00
|
|
|
aStream->ApplyTrackDisabling(data->mID, data->mData);
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t j = 0; j < aStream->mListeners.Length(); ++j) {
|
2012-04-30 03:12:50 +00:00
|
|
|
MediaStreamListener* l = aStream->mListeners[j];
|
2014-09-18 05:20:43 +00:00
|
|
|
StreamTime offset = (data->mCommands & SourceMediaStream::TRACK_CREATE)
|
2012-04-30 03:12:50 +00:00
|
|
|
? data->mStart : aStream->mBuffer.FindTrack(data->mID)->GetSegment()->GetDuration();
|
2014-09-17 23:51:13 +00:00
|
|
|
l->NotifyQueuedTrackChanges(this, data->mID,
|
2012-04-30 03:12:50 +00:00
|
|
|
offset, data->mCommands, *data->mData);
|
|
|
|
}
|
2012-04-30 03:11:40 +00:00
|
|
|
if (data->mCommands & SourceMediaStream::TRACK_CREATE) {
|
|
|
|
MediaSegment* segment = data->mData.forget();
|
2014-09-17 23:50:02 +00:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG, ("SourceMediaStream %p creating track %d, start %lld, initial end %lld",
|
|
|
|
aStream, data->mID, int64_t(data->mStart),
|
2013-11-21 03:02:42 +00:00
|
|
|
int64_t(segment->GetDuration())));
|
2014-03-24 10:06:05 +00:00
|
|
|
|
2014-12-30 01:54:01 +00:00
|
|
|
data->mEndOfFlushedData += segment->GetDuration();
|
2014-09-18 05:13:16 +00:00
|
|
|
aStream->mBuffer.AddTrack(data->mID, data->mStart, segment);
|
2012-04-30 03:11:40 +00:00
|
|
|
// The track has taken ownership of data->mData, so let's replace
|
|
|
|
// data->mData with an empty clone.
|
|
|
|
data->mData = segment->CreateEmptyClone();
|
|
|
|
data->mCommands &= ~SourceMediaStream::TRACK_CREATE;
|
|
|
|
} else if (data->mData->GetDuration() > 0) {
|
|
|
|
MediaSegment* dest = aStream->mBuffer.FindTrack(data->mID)->GetSegment();
|
2013-11-21 03:02:42 +00:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("SourceMediaStream %p track %d, advancing end from %lld to %lld",
|
|
|
|
aStream, data->mID,
|
|
|
|
int64_t(dest->GetDuration()),
|
|
|
|
int64_t(dest->GetDuration() + data->mData->GetDuration())));
|
2014-12-30 01:54:01 +00:00
|
|
|
data->mEndOfFlushedData += data->mData->GetDuration();
|
2012-04-30 03:11:40 +00:00
|
|
|
dest->AppendFrom(data->mData);
|
|
|
|
}
|
|
|
|
if (data->mCommands & SourceMediaStream::TRACK_END) {
|
|
|
|
aStream->mBuffer.FindTrack(data->mID)->SetEnded();
|
|
|
|
aStream->mUpdateTracks.RemoveElementAt(i);
|
|
|
|
}
|
|
|
|
}
|
2014-05-18 20:26:54 +00:00
|
|
|
if (!aStream->mFinished) {
|
|
|
|
aStream->mBuffer.AdvanceKnownTracksTime(aStream->mUpdateKnownTracksTime);
|
|
|
|
}
|
2012-04-30 03:11:40 +00:00
|
|
|
}
|
2013-03-20 11:19:39 +00:00
|
|
|
if (aStream->mBuffer.GetEnd() > 0) {
|
|
|
|
aStream->mHasCurrentData = true;
|
|
|
|
}
|
2012-04-30 03:11:40 +00:00
|
|
|
if (finished) {
|
|
|
|
FinishStream(aStream);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::UpdateBufferSufficiencyState(SourceMediaStream* aStream)
|
|
|
|
{
|
|
|
|
StreamTime desiredEnd = GetDesiredBufferEnd(aStream);
|
|
|
|
nsTArray<SourceMediaStream::ThreadAndRunnable> runnables;
|
|
|
|
|
|
|
|
{
|
|
|
|
MutexAutoLock lock(aStream->mMutex);
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t i = 0; i < aStream->mUpdateTracks.Length(); ++i) {
|
2012-04-30 03:11:40 +00:00
|
|
|
SourceMediaStream::TrackData* data = &aStream->mUpdateTracks[i];
|
2012-04-30 05:23:00 +00:00
|
|
|
if (data->mCommands & SourceMediaStream::TRACK_CREATE) {
|
|
|
|
// This track hasn't been created yet, so we have no sufficiency
|
|
|
|
// data. The track will be created in the next iteration of the
|
|
|
|
// control loop and then we'll fire insufficiency notifications
|
|
|
|
// if necessary.
|
|
|
|
continue;
|
|
|
|
}
|
2012-04-30 03:13:42 +00:00
|
|
|
if (data->mCommands & SourceMediaStream::TRACK_END) {
|
|
|
|
// This track will end, so no point in firing not-enough-data
|
|
|
|
// callbacks.
|
2012-04-30 03:11:40 +00:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
StreamBuffer::Track* track = aStream->mBuffer.FindTrack(data->mID);
|
2012-04-30 03:13:42 +00:00
|
|
|
// Note that track->IsEnded() must be false, otherwise we would have
|
|
|
|
// removed the track from mUpdateTracks already.
|
|
|
|
NS_ASSERTION(!track->IsEnded(), "What is this track doing here?");
|
2014-09-18 05:13:13 +00:00
|
|
|
data->mHaveEnough = track->GetEnd() >= desiredEnd;
|
2012-04-30 03:11:40 +00:00
|
|
|
if (!data->mHaveEnough) {
|
|
|
|
runnables.MoveElementsFrom(data->mDispatchWhenNotEnough);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t i = 0; i < runnables.Length(); ++i) {
|
2014-02-17 22:53:53 +00:00
|
|
|
runnables[i].mTarget->Dispatch(runnables[i].mRunnable, 0);
|
2012-04-30 03:11:40 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
StreamTime
|
|
|
|
MediaStreamGraphImpl::GraphTimeToStreamTime(MediaStream* aStream,
|
|
|
|
GraphTime aTime)
|
|
|
|
{
|
2014-08-26 15:01:33 +00:00
|
|
|
MOZ_ASSERT(aTime <= CurrentDriver()->StateComputedTime(),
|
2012-04-30 03:11:26 +00:00
|
|
|
"Don't ask about times where we haven't made blocking decisions yet");
|
2014-04-25 14:09:30 +00:00
|
|
|
if (aTime <= IterationEnd()) {
|
2013-01-15 12:22:03 +00:00
|
|
|
return std::max<StreamTime>(0, aTime - aStream->mBufferStartTime);
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
2014-04-25 14:09:30 +00:00
|
|
|
GraphTime t = IterationEnd();
|
2012-04-30 03:11:26 +00:00
|
|
|
StreamTime s = t - aStream->mBufferStartTime;
|
|
|
|
while (t < aTime) {
|
|
|
|
GraphTime end;
|
|
|
|
if (!aStream->mBlocked.GetAt(t, &end)) {
|
2013-01-15 12:22:03 +00:00
|
|
|
s += std::min(aTime, end) - t;
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
t = end;
|
|
|
|
}
|
2013-01-15 12:22:03 +00:00
|
|
|
return std::max<StreamTime>(0, s);
|
2013-02-04 10:04:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
StreamTime
|
|
|
|
MediaStreamGraphImpl::GraphTimeToStreamTimeOptimistic(MediaStream* aStream,
|
|
|
|
GraphTime aTime)
|
|
|
|
{
|
2014-04-25 14:09:30 +00:00
|
|
|
GraphTime computedUpToTime = std::min(CurrentDriver()->StateComputedTime(), aTime);
|
2013-02-04 10:04:26 +00:00
|
|
|
StreamTime s = GraphTimeToStreamTime(aStream, computedUpToTime);
|
|
|
|
return s + (aTime - computedUpToTime);
|
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
|
|
|
|
GraphTime
|
|
|
|
MediaStreamGraphImpl::StreamTimeToGraphTime(MediaStream* aStream,
|
2012-08-22 15:56:38 +00:00
|
|
|
StreamTime aTime, uint32_t aFlags)
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
|
|
|
if (aTime >= STREAM_TIME_MAX) {
|
|
|
|
return GRAPH_TIME_MAX;
|
|
|
|
}
|
2014-04-25 14:09:30 +00:00
|
|
|
MediaTime bufferElapsedToCurrentTime = IterationEnd() - aStream->mBufferStartTime;
|
2012-04-30 03:11:26 +00:00
|
|
|
if (aTime < bufferElapsedToCurrentTime ||
|
|
|
|
(aTime == bufferElapsedToCurrentTime && !(aFlags & INCLUDE_TRAILING_BLOCKED_INTERVAL))) {
|
|
|
|
return aTime + aStream->mBufferStartTime;
|
|
|
|
}
|
|
|
|
|
|
|
|
MediaTime streamAmount = aTime - bufferElapsedToCurrentTime;
|
|
|
|
NS_ASSERTION(streamAmount >= 0, "Can't answer queries before current time");
|
|
|
|
|
2014-04-25 14:09:30 +00:00
|
|
|
GraphTime t = IterationEnd();
|
2012-04-30 03:11:26 +00:00
|
|
|
while (t < GRAPH_TIME_MAX) {
|
2013-12-09 05:08:02 +00:00
|
|
|
if (!(aFlags & INCLUDE_TRAILING_BLOCKED_INTERVAL) && streamAmount == 0) {
|
|
|
|
return t;
|
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
bool blocked;
|
|
|
|
GraphTime end;
|
2014-04-25 14:09:30 +00:00
|
|
|
if (t < CurrentDriver()->StateComputedTime()) {
|
2012-04-30 03:11:26 +00:00
|
|
|
blocked = aStream->mBlocked.GetAt(t, &end);
|
2014-04-25 14:09:30 +00:00
|
|
|
end = std::min(end, CurrentDriver()->StateComputedTime());
|
2012-04-30 03:11:26 +00:00
|
|
|
} else {
|
|
|
|
blocked = false;
|
|
|
|
end = GRAPH_TIME_MAX;
|
|
|
|
}
|
|
|
|
if (blocked) {
|
|
|
|
t = end;
|
|
|
|
} else {
|
|
|
|
if (streamAmount == 0) {
|
|
|
|
// No more stream time to consume at time t, so we're done.
|
|
|
|
break;
|
|
|
|
}
|
2013-01-15 12:22:03 +00:00
|
|
|
MediaTime consume = std::min(end - t, streamAmount);
|
2012-04-30 03:11:26 +00:00
|
|
|
streamAmount -= consume;
|
|
|
|
t += consume;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
|
|
|
GraphTime
|
|
|
|
MediaStreamGraphImpl::GetAudioPosition(MediaStream* aStream)
|
|
|
|
{
|
2014-08-26 15:02:31 +00:00
|
|
|
/* This is correlated to the audio clock when using an AudioCallbackDriver,
|
|
|
|
* and is using a system timer otherwise. */
|
|
|
|
return IterationEnd();
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
|
2014-04-25 14:09:30 +00:00
|
|
|
GraphTime
|
|
|
|
MediaStreamGraphImpl::IterationEnd()
|
|
|
|
{
|
|
|
|
return CurrentDriver()->IterationEnd();
|
|
|
|
}
|
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
void
|
2014-04-25 16:04:23 +00:00
|
|
|
MediaStreamGraphImpl::UpdateCurrentTimeForStreams(GraphTime aPrevCurrentTime, GraphTime aNextCurrentTime)
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
2013-10-02 02:28:49 +00:00
|
|
|
nsTArray<MediaStream*> streamsReadyToFinish;
|
2013-11-25 11:59:49 +00:00
|
|
|
nsAutoTArray<bool,800> streamHasOutput;
|
|
|
|
streamHasOutput.SetLength(mStreams.Length());
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
|
2012-04-30 03:11:26 +00:00
|
|
|
MediaStream* stream = mStreams[i];
|
|
|
|
|
|
|
|
// Calculate blocked time and fire Blocked/Unblocked events
|
|
|
|
GraphTime blockedTime = 0;
|
2014-04-25 16:04:23 +00:00
|
|
|
GraphTime t = aPrevCurrentTime;
|
2014-06-22 18:21:00 +00:00
|
|
|
// include |nextCurrentTime| to ensure NotifyBlockingChanged() is called
|
2014-07-14 05:47:56 +00:00
|
|
|
// before NotifyEvent(this, EVENT_FINISHED) when |nextCurrentTime == stream end time|
|
2014-04-25 16:04:23 +00:00
|
|
|
while (t <= aNextCurrentTime) {
|
2012-04-30 03:11:26 +00:00
|
|
|
GraphTime end;
|
|
|
|
bool blocked = stream->mBlocked.GetAt(t, &end);
|
|
|
|
if (blocked) {
|
2014-04-25 16:04:23 +00:00
|
|
|
blockedTime += std::min(end, aNextCurrentTime) - t;
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
2013-01-02 13:49:18 +00:00
|
|
|
if (blocked != stream->mNotifiedBlocked) {
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) {
|
2012-04-30 03:11:26 +00:00
|
|
|
MediaStreamListener* l = stream->mListeners[j];
|
|
|
|
l->NotifyBlockingChanged(this,
|
|
|
|
blocked ? MediaStreamListener::BLOCKED : MediaStreamListener::UNBLOCKED);
|
|
|
|
}
|
2013-01-02 13:49:18 +00:00
|
|
|
stream->mNotifiedBlocked = blocked;
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
t = end;
|
|
|
|
}
|
|
|
|
|
2014-04-25 14:09:30 +00:00
|
|
|
|
2014-04-25 16:04:23 +00:00
|
|
|
stream->AdvanceTimeVaryingValuesToCurrentTime(aNextCurrentTime, blockedTime);
|
2012-04-30 03:11:26 +00:00
|
|
|
// Advance mBlocked last so that implementations of
|
|
|
|
// AdvanceTimeVaryingValuesToCurrentTime can rely on the value of mBlocked.
|
2014-04-25 16:04:23 +00:00
|
|
|
stream->mBlocked.AdvanceCurrentTime(aNextCurrentTime);
|
2012-04-30 03:11:26 +00:00
|
|
|
|
2014-04-25 16:04:23 +00:00
|
|
|
streamHasOutput[i] = blockedTime < aNextCurrentTime - aPrevCurrentTime;
|
2014-01-28 05:14:24 +00:00
|
|
|
// Make this an assertion when bug 957832 is fixed.
|
|
|
|
NS_WARN_IF_FALSE(!streamHasOutput[i] || !stream->mNotifiedFinished,
|
|
|
|
"Shouldn't have already notified of finish *and* have output!");
|
2012-04-30 03:11:26 +00:00
|
|
|
|
2013-10-02 02:28:49 +00:00
|
|
|
if (stream->mFinished && !stream->mNotifiedFinished) {
|
|
|
|
streamsReadyToFinish.AppendElement(stream);
|
|
|
|
}
|
2013-11-21 03:02:42 +00:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p bufferStartTime=%f blockedTime=%f",
|
|
|
|
stream, MediaTimeToSeconds(stream->mBufferStartTime),
|
|
|
|
MediaTimeToSeconds(blockedTime)));
|
2013-10-02 02:28:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-11-25 11:59:49 +00:00
|
|
|
for (uint32_t i = 0; i < streamHasOutput.Length(); ++i) {
|
|
|
|
if (!streamHasOutput[i]) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
MediaStream* stream = mStreams[i];
|
|
|
|
for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) {
|
|
|
|
MediaStreamListener* l = stream->mListeners[j];
|
2014-04-25 14:09:30 +00:00
|
|
|
l->NotifyOutput(this, IterationEnd());
|
2013-11-25 11:59:49 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-10-02 02:28:49 +00:00
|
|
|
for (uint32_t i = 0; i < streamsReadyToFinish.Length(); ++i) {
|
|
|
|
MediaStream* stream = streamsReadyToFinish[i];
|
2013-12-04 01:08:12 +00:00
|
|
|
// The stream is fully finished when all of its track data has been played
|
|
|
|
// out.
|
2014-04-25 14:09:30 +00:00
|
|
|
if (IterationEnd() >=
|
|
|
|
stream->StreamTimeToGraphTime(stream->GetStreamBuffer().GetAllTracksEnd())) {
|
2014-06-22 18:21:00 +00:00
|
|
|
NS_WARN_IF_FALSE(stream->mNotifiedBlocked,
|
|
|
|
"Should've notified blocked=true for a fully finished stream");
|
2012-04-30 03:11:26 +00:00
|
|
|
stream->mNotifiedFinished = true;
|
2012-11-21 01:32:06 +00:00
|
|
|
stream->mLastPlayedVideoFrame.SetNull();
|
2014-01-29 13:34:35 +00:00
|
|
|
SetStreamOrderDirty();
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) {
|
2012-04-30 03:11:26 +00:00
|
|
|
MediaStreamListener* l = stream->mListeners[j];
|
2014-07-14 05:47:56 +00:00
|
|
|
l->NotifyEvent(this, MediaStreamListener::EVENT_FINISHED);
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
bool
|
|
|
|
MediaStreamGraphImpl::WillUnderrun(MediaStream* aStream, GraphTime aTime,
|
|
|
|
GraphTime aEndBlockingDecisions, GraphTime* aEnd)
|
|
|
|
{
|
2012-07-31 12:17:21 +00:00
|
|
|
// Finished streams can't underrun. ProcessedMediaStreams also can't cause
|
|
|
|
// underrun currently, since we'll always be able to produce data for them
|
|
|
|
// unless they block on some other stream.
|
|
|
|
if (aStream->mFinished || aStream->AsProcessedStream()) {
|
2012-04-30 03:11:26 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
GraphTime bufferEnd =
|
|
|
|
StreamTimeToGraphTime(aStream, aStream->GetBufferEnd(),
|
|
|
|
INCLUDE_TRAILING_BLOCKED_INTERVAL);
|
2013-03-07 08:53:45 +00:00
|
|
|
#ifdef DEBUG
|
2014-04-25 14:09:30 +00:00
|
|
|
if (bufferEnd < IterationEnd()) {
|
2013-11-21 03:02:42 +00:00
|
|
|
STREAM_LOG(PR_LOG_ERROR, ("MediaStream %p underrun, "
|
2014-04-25 14:09:30 +00:00
|
|
|
"bufferEnd %f < IterationEnd() %f (%lld < %lld), Streamtime %lld",
|
|
|
|
aStream, MediaTimeToSeconds(bufferEnd), MediaTimeToSeconds(IterationEnd()),
|
|
|
|
bufferEnd, IterationEnd(), aStream->GetBufferEnd()));
|
2013-03-07 08:53:45 +00:00
|
|
|
aStream->DumpTrackInfo();
|
2014-04-25 14:09:30 +00:00
|
|
|
NS_ASSERTION(bufferEnd >= IterationEnd(), "Buffer underran");
|
2013-03-07 08:53:45 +00:00
|
|
|
}
|
|
|
|
#endif
|
2012-04-30 03:11:26 +00:00
|
|
|
// We should block after bufferEnd.
|
|
|
|
if (bufferEnd <= aTime) {
|
2014-08-26 15:01:33 +00:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p will block due to data underrun at %ld, "
|
|
|
|
"bufferEnd %ld",
|
|
|
|
aStream, aTime, bufferEnd));
|
2012-04-30 03:11:26 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
// We should keep blocking if we're currently blocked and we don't have
|
|
|
|
// data all the way through to aEndBlockingDecisions. If we don't have
|
|
|
|
// data all the way through to aEndBlockingDecisions, we'll block soon,
|
|
|
|
// but we might as well remain unblocked and play the data we've got while
|
|
|
|
// we can.
|
2014-08-26 15:01:35 +00:00
|
|
|
if (bufferEnd < aEndBlockingDecisions && aStream->mBlocked.GetBefore(aTime)) {
|
2013-11-21 03:02:42 +00:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p will block due to speculative data underrun, "
|
2014-08-26 15:01:33 +00:00
|
|
|
"bufferEnd %f (end at %ld)",
|
|
|
|
aStream, MediaTimeToSeconds(bufferEnd), bufferEnd));
|
2012-04-30 03:11:26 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
// Reconsider decisions at bufferEnd
|
2013-01-15 12:22:03 +00:00
|
|
|
*aEnd = std::min(*aEnd, bufferEnd);
|
2012-04-30 03:11:26 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2012-10-29 04:34:17 +00:00
|
|
|
MediaStreamGraphImpl::MarkConsumed(MediaStream* aStream)
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
2012-10-29 04:34:17 +00:00
|
|
|
if (aStream->mIsConsumed) {
|
2012-07-31 12:17:21 +00:00
|
|
|
return;
|
2012-10-29 04:34:17 +00:00
|
|
|
}
|
|
|
|
aStream->mIsConsumed = true;
|
|
|
|
|
|
|
|
ProcessedMediaStream* ps = aStream->AsProcessedStream();
|
|
|
|
if (!ps) {
|
2012-07-31 12:17:21 +00:00
|
|
|
return;
|
|
|
|
}
|
2012-10-29 04:34:17 +00:00
|
|
|
// Mark all the inputs to this stream as consumed
|
|
|
|
for (uint32_t i = 0; i < ps->mInputs.Length(); ++i) {
|
|
|
|
MarkConsumed(ps->mInputs[i]->mSource);
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
2012-07-31 12:17:21 +00:00
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
|
2012-07-31 12:17:21 +00:00
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::UpdateStreamOrder()
|
|
|
|
{
|
2014-09-09 16:23:01 +00:00
|
|
|
#ifdef MOZ_WEBRTC
|
2014-08-26 15:02:31 +00:00
|
|
|
bool shouldAEC = false;
|
2014-09-09 16:23:01 +00:00
|
|
|
#endif
|
2014-08-25 13:25:49 +00:00
|
|
|
bool audioTrackPresent = false;
|
2014-07-17 00:55:55 +00:00
|
|
|
// Value of mCycleMarker for unvisited streams in cycle detection.
|
|
|
|
const uint32_t NOT_VISITED = UINT32_MAX;
|
|
|
|
// Value of mCycleMarker for ordered streams in muted cycles.
|
|
|
|
const uint32_t IN_MUTED_CYCLE = 1;
|
|
|
|
|
|
|
|
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
|
|
|
|
MediaStream* stream = mStreams[i];
|
2012-08-29 11:20:45 +00:00
|
|
|
stream->mIsConsumed = false;
|
2012-07-31 12:17:21 +00:00
|
|
|
stream->mInBlockingSet = false;
|
2014-09-09 16:23:01 +00:00
|
|
|
#ifdef MOZ_WEBRTC
|
2014-03-24 10:06:06 +00:00
|
|
|
if (stream->AsSourceStream() &&
|
|
|
|
stream->AsSourceStream()->NeedsMixing()) {
|
2014-08-26 15:02:31 +00:00
|
|
|
shouldAEC = true;
|
2014-03-24 10:06:06 +00:00
|
|
|
}
|
2014-09-09 16:23:01 +00:00
|
|
|
#endif
|
Bug 1060311 - Force the use of an AudioCallbackDriver when at least an AudioNodeStream is present in the graph. r=jesup
This prevent a bug where the graph would be using a SystemClockDriver even if it
was rendering Web Audio API content.
It went like this:
- An AudioContext was created.
- Some AudioNodeStream (Web Audio API MediaStreams) were created, but their
MediaStreamTrack was not added yet
- During the stream ordering, we would see that we were running an
AudioCallbackDriver (because the MSG was created using an AudioContext, and we
pass in hints regarding the type of MediaStreams that will be added in the
future, to open the audio stream as early as we can, because it can take some
time, the MSG was created directly using an AudioCallbackDriver)
- Also during the stream ordering, we see that none of our MediaStream have an
MediaStreamTrack with an audio track. This triggers a switch to a
SystemClockDriver, because the graph thinks there is no audio.
- During CreateAndDestroyAudioNode, we would not switch to an
AudioCallbackDriver on the first iteration (right after the UpdateStreamOrder
call), because we would be switching, and not during the iteration after,
because we thought we already switched (the first patch makes this more robust).
This basically forces an AudioCallbackDriver if there is an AudioNodeStream,
which prevents unnecessary GraphDriver switches (and save threads creation
destruction, audio stream create and destruction, and all other resources
associated with a GraphDriver).
2014-08-29 18:26:29 +00:00
|
|
|
// If this is a AudioNodeStream, force a AudioCallbackDriver.
|
|
|
|
if (stream->AsAudioNodeStream()) {
|
|
|
|
audioTrackPresent = true;
|
|
|
|
}
|
2014-08-25 13:25:49 +00:00
|
|
|
for (StreamBuffer::TrackIter tracks(stream->GetStreamBuffer(), MediaSegment::AUDIO);
|
|
|
|
!tracks.IsEnded(); tracks.Next()) {
|
|
|
|
audioTrackPresent = true;
|
|
|
|
}
|
2012-07-31 12:17:21 +00:00
|
|
|
}
|
|
|
|
|
2014-08-26 15:02:07 +00:00
|
|
|
if (!audioTrackPresent &&
|
|
|
|
CurrentDriver()->AsAudioCallbackDriver()) {
|
|
|
|
bool started;
|
|
|
|
{
|
|
|
|
MonitorAutoLock mon(mMonitor);
|
|
|
|
started = CurrentDriver()->AsAudioCallbackDriver()->IsStarted();
|
|
|
|
}
|
|
|
|
if (started) {
|
2014-09-03 13:52:43 +00:00
|
|
|
MonitorAutoLock mon(mMonitor);
|
|
|
|
if (mLifecycleState == LIFECYCLE_RUNNING) {
|
|
|
|
SystemClockDriver* driver = new SystemClockDriver(this);
|
|
|
|
CurrentDriver()->SwitchAtNextIteration(driver);
|
|
|
|
}
|
2014-08-26 15:02:07 +00:00
|
|
|
}
|
2014-08-26 15:01:33 +00:00
|
|
|
}
|
|
|
|
|
2014-09-09 16:23:01 +00:00
|
|
|
#ifdef MOZ_WEBRTC
|
2014-08-26 15:02:31 +00:00
|
|
|
if (shouldAEC && !mFarendObserverRef && gFarendObserver) {
|
|
|
|
mFarendObserverRef = gFarendObserver;
|
|
|
|
mMixer.AddCallback(mFarendObserverRef);
|
|
|
|
} else if (!shouldAEC && mFarendObserverRef){
|
|
|
|
if (mMixer.FindCallback(mFarendObserverRef)) {
|
|
|
|
mMixer.RemoveCallback(mFarendObserverRef);
|
|
|
|
mFarendObserverRef = nullptr;
|
2014-07-29 16:45:03 +00:00
|
|
|
}
|
2014-03-24 10:06:06 +00:00
|
|
|
}
|
2014-09-09 16:23:01 +00:00
|
|
|
#endif
|
2014-03-24 10:06:06 +00:00
|
|
|
|
2014-07-17 00:55:55 +00:00
|
|
|
// The algorithm for finding cycles is based on Tim Leslie's iterative
|
|
|
|
// implementation [1][2] of Pearce's variant [3] of Tarjan's strongly
|
|
|
|
// connected components (SCC) algorithm. There are variations (a) to
|
|
|
|
// distinguish whether streams in SCCs of size 1 are in a cycle and (b) to
|
|
|
|
// re-run the algorithm over SCCs with breaks at DelayNodes.
|
|
|
|
//
|
|
|
|
// [1] http://www.timl.id.au/?p=327
|
|
|
|
// [2] https://github.com/scipy/scipy/blob/e2c502fca/scipy/sparse/csgraph/_traversal.pyx#L582
|
|
|
|
// [3] http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.102.1707
|
|
|
|
//
|
|
|
|
// There are two stacks. One for the depth-first search (DFS),
|
|
|
|
mozilla::LinkedList<MediaStream> dfsStack;
|
|
|
|
// and another for streams popped from the DFS stack, but still being
|
|
|
|
// considered as part of SCCs involving streams on the stack.
|
|
|
|
mozilla::LinkedList<MediaStream> sccStack;
|
|
|
|
|
|
|
|
// An index into mStreams for the next stream found with no unsatisfied
|
|
|
|
// upstream dependencies.
|
|
|
|
uint32_t orderedStreamCount = 0;
|
|
|
|
|
|
|
|
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
|
|
|
|
MediaStream* s = mStreams[i];
|
2013-07-24 10:11:35 +00:00
|
|
|
if (s->IsIntrinsicallyConsumed()) {
|
2012-10-29 04:34:17 +00:00
|
|
|
MarkConsumed(s);
|
|
|
|
}
|
2014-07-17 00:55:55 +00:00
|
|
|
ProcessedMediaStream* ps = s->AsProcessedStream();
|
|
|
|
if (ps) {
|
|
|
|
// The dfsStack initially contains a list of all processed streams in
|
|
|
|
// unchanged order.
|
|
|
|
dfsStack.insertBack(s);
|
|
|
|
ps->mCycleMarker = NOT_VISITED;
|
|
|
|
} else {
|
|
|
|
// SourceMediaStreams have no inputs and so can be ordered now.
|
|
|
|
mStreams[orderedStreamCount] = s;
|
|
|
|
++orderedStreamCount;
|
2012-07-31 12:17:21 +00:00
|
|
|
}
|
|
|
|
}
|
2014-07-17 00:55:55 +00:00
|
|
|
|
|
|
|
// mNextStackMarker corresponds to "index" in Tarjan's algorithm. It is a
|
|
|
|
// counter to label mCycleMarker on the next visited stream in the DFS
|
|
|
|
// uniquely in the set of visited streams that are still being considered.
|
|
|
|
//
|
|
|
|
// In this implementation, the counter descends so that the values are
|
|
|
|
// strictly greater than the values that mCycleMarker takes when the stream
|
|
|
|
// has been ordered (0 or IN_MUTED_CYCLE).
|
|
|
|
//
|
|
|
|
// Each new stream labelled, as the DFS searches upstream, receives a value
|
|
|
|
// less than those used for all other streams being considered.
|
|
|
|
uint32_t nextStackMarker = NOT_VISITED - 1;
|
|
|
|
// Reset list of DelayNodes in cycles stored at the tail of mStreams.
|
|
|
|
mFirstCycleBreaker = mStreams.Length();
|
|
|
|
|
|
|
|
// Rearrange dfsStack order as required to DFS upstream and pop streams
|
|
|
|
// in processing order to place in mStreams.
|
|
|
|
while (auto ps = static_cast<ProcessedMediaStream*>(dfsStack.getFirst())) {
|
|
|
|
const auto& inputs = ps->mInputs;
|
|
|
|
MOZ_ASSERT(ps->AsProcessedStream());
|
|
|
|
if (ps->mCycleMarker == NOT_VISITED) {
|
|
|
|
// Record the position on the visited stack, so that any searches
|
|
|
|
// finding this stream again know how much of the stack is in the cycle.
|
|
|
|
ps->mCycleMarker = nextStackMarker;
|
|
|
|
--nextStackMarker;
|
|
|
|
// Not-visited input streams should be processed first.
|
|
|
|
// SourceMediaStreams have already been ordered.
|
|
|
|
for (uint32_t i = inputs.Length(); i--; ) {
|
|
|
|
auto input = inputs[i]->mSource->AsProcessedStream();
|
|
|
|
if (input && input->mCycleMarker == NOT_VISITED) {
|
|
|
|
input->remove();
|
|
|
|
dfsStack.insertFront(input);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Returning from DFS. Pop from dfsStack.
|
|
|
|
ps->remove();
|
|
|
|
|
|
|
|
// cycleStackMarker keeps track of the highest marker value on any
|
|
|
|
// upstream stream, if any, found receiving input, directly or indirectly,
|
|
|
|
// from the visited stack (and so from |ps|, making a cycle). In a
|
|
|
|
// variation from Tarjan's SCC algorithm, this does not include |ps|
|
|
|
|
// unless it is part of the cycle.
|
|
|
|
uint32_t cycleStackMarker = 0;
|
|
|
|
for (uint32_t i = inputs.Length(); i--; ) {
|
|
|
|
auto input = inputs[i]->mSource->AsProcessedStream();
|
|
|
|
if (input) {
|
|
|
|
cycleStackMarker = std::max(cycleStackMarker, input->mCycleMarker);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (cycleStackMarker <= IN_MUTED_CYCLE) {
|
|
|
|
// All inputs have been ordered and their stack markers have been removed.
|
|
|
|
// This stream is not part of a cycle. It can be processed next.
|
|
|
|
ps->mCycleMarker = 0;
|
|
|
|
mStreams[orderedStreamCount] = ps;
|
|
|
|
++orderedStreamCount;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
// A cycle has been found. Record this stream for ordering when all
|
|
|
|
// streams in this SCC have been popped from the DFS stack.
|
|
|
|
sccStack.insertFront(ps);
|
|
|
|
|
|
|
|
if (cycleStackMarker > ps->mCycleMarker) {
|
|
|
|
// Cycles have been found that involve streams that remain on the stack.
|
|
|
|
// Leave mCycleMarker indicating the most downstream (last) stream on
|
|
|
|
// the stack known to be part of this SCC. In this way, any searches on
|
|
|
|
// other paths that find |ps| will know (without having to traverse from
|
|
|
|
// this stream again) that they are part of this SCC (i.e. part of an
|
|
|
|
// intersecting cycle).
|
|
|
|
ps->mCycleMarker = cycleStackMarker;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
// |ps| is the root of an SCC involving no other streams on dfsStack, the
|
|
|
|
// complete SCC has been recorded, and streams in this SCC are part of at
|
|
|
|
// least one cycle.
|
|
|
|
MOZ_ASSERT(cycleStackMarker == ps->mCycleMarker);
|
|
|
|
// If there are DelayNodes in this SCC, then they may break the cycles.
|
|
|
|
bool haveDelayNode = false;
|
2014-07-24 09:58:01 +00:00
|
|
|
auto next = sccStack.getFirst();
|
2014-07-17 00:55:55 +00:00
|
|
|
// Streams in this SCC are identified by mCycleMarker <= cycleStackMarker.
|
|
|
|
// (There may be other streams later in sccStack from other incompletely
|
|
|
|
// searched SCCs, involving streams still on dfsStack.)
|
|
|
|
//
|
|
|
|
// DelayNodes in cycles must behave differently from those not in cycles,
|
|
|
|
// so all DelayNodes in the SCC must be identified.
|
2014-07-24 09:58:01 +00:00
|
|
|
while (next && static_cast<ProcessedMediaStream*>(next)->
|
|
|
|
mCycleMarker <= cycleStackMarker) {
|
2014-07-17 00:55:55 +00:00
|
|
|
auto ns = next->AsAudioNodeStream();
|
|
|
|
// Get next before perhaps removing from list below.
|
2014-07-24 09:58:01 +00:00
|
|
|
next = next->getNext();
|
2014-07-17 00:55:55 +00:00
|
|
|
if (ns && ns->Engine()->AsDelayNodeEngine()) {
|
|
|
|
haveDelayNode = true;
|
|
|
|
// DelayNodes break cycles by producing their output in a
|
|
|
|
// preprocessing phase; they do not need to be ordered before their
|
|
|
|
// consumers. Order them at the tail of mStreams so that they can be
|
|
|
|
// handled specially. Do so now, so that DFS ignores them.
|
|
|
|
ns->remove();
|
|
|
|
ns->mCycleMarker = 0;
|
|
|
|
--mFirstCycleBreaker;
|
|
|
|
mStreams[mFirstCycleBreaker] = ns;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
auto after_scc = next;
|
2014-07-24 09:58:01 +00:00
|
|
|
while ((next = sccStack.getFirst()) != after_scc) {
|
|
|
|
next->remove();
|
|
|
|
auto removed = static_cast<ProcessedMediaStream*>(next);
|
2014-07-17 00:55:55 +00:00
|
|
|
if (haveDelayNode) {
|
|
|
|
// Return streams to the DFS stack again (to order and detect cycles
|
|
|
|
// without delayNodes). Any of these streams that are still inputs
|
|
|
|
// for streams on the visited stack must be returned to the front of
|
|
|
|
// the stack to be ordered before their dependents. We know that none
|
|
|
|
// of these streams need input from streams on the visited stack, so
|
|
|
|
// they can all be searched and ordered before the current stack head
|
|
|
|
// is popped.
|
2014-07-24 09:58:01 +00:00
|
|
|
removed->mCycleMarker = NOT_VISITED;
|
|
|
|
dfsStack.insertFront(removed);
|
2014-07-17 00:55:55 +00:00
|
|
|
} else {
|
|
|
|
// Streams in cycles without any DelayNodes must be muted, and so do
|
|
|
|
// not need input and can be ordered now. They must be ordered before
|
|
|
|
// their consumers so that their muted output is available.
|
2014-07-24 09:58:01 +00:00
|
|
|
removed->mCycleMarker = IN_MUTED_CYCLE;
|
|
|
|
mStreams[orderedStreamCount] = removed;
|
2014-07-17 00:55:55 +00:00
|
|
|
++orderedStreamCount;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
MOZ_ASSERT(orderedStreamCount == mFirstCycleBreaker);
|
2012-07-31 12:17:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::RecomputeBlocking(GraphTime aEndBlockingDecisions)
|
|
|
|
{
|
|
|
|
bool blockingDecisionsWillChange = false;
|
|
|
|
|
2013-11-21 03:02:42 +00:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("Media graph %p computing blocking for time %f",
|
2014-04-25 14:09:30 +00:00
|
|
|
this, MediaTimeToSeconds(CurrentDriver()->StateComputedTime())));
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
|
2012-04-30 03:11:26 +00:00
|
|
|
MediaStream* stream = mStreams[i];
|
2012-07-31 12:17:21 +00:00
|
|
|
if (!stream->mInBlockingSet) {
|
|
|
|
// Compute a partition of the streams containing 'stream' such that we can
|
|
|
|
// compute the blocking status of each subset independently.
|
|
|
|
nsAutoTArray<MediaStream*,10> streamSet;
|
|
|
|
AddBlockingRelatedStreamsToSet(&streamSet, stream);
|
|
|
|
|
|
|
|
GraphTime end;
|
2014-04-25 14:09:30 +00:00
|
|
|
for (GraphTime t = CurrentDriver()->StateComputedTime();
|
2012-07-31 12:17:21 +00:00
|
|
|
t < aEndBlockingDecisions; t = end) {
|
|
|
|
end = GRAPH_TIME_MAX;
|
|
|
|
RecomputeBlockingAt(streamSet, t, aEndBlockingDecisions, &end);
|
|
|
|
if (end < GRAPH_TIME_MAX) {
|
|
|
|
blockingDecisionsWillChange = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
GraphTime end;
|
2014-04-25 16:03:04 +00:00
|
|
|
stream->mBlocked.GetAt(IterationEnd(), &end);
|
2012-04-30 03:11:26 +00:00
|
|
|
if (end < GRAPH_TIME_MAX) {
|
|
|
|
blockingDecisionsWillChange = true;
|
|
|
|
}
|
|
|
|
}
|
2013-11-21 03:02:42 +00:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("Media graph %p computed blocking for interval %f to %f",
|
2014-04-25 14:09:30 +00:00
|
|
|
this, MediaTimeToSeconds(CurrentDriver()->StateComputedTime()),
|
2013-11-21 03:02:42 +00:00
|
|
|
MediaTimeToSeconds(aEndBlockingDecisions)));
|
2014-04-25 14:09:30 +00:00
|
|
|
|
|
|
|
CurrentDriver()->UpdateStateComputedTime(aEndBlockingDecisions);
|
2014-06-22 18:21:00 +00:00
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
if (blockingDecisionsWillChange) {
|
|
|
|
// Make sure we wake up to notify listeners about these changes.
|
2014-09-28 16:07:25 +00:00
|
|
|
EnsureNextIteration();
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2012-07-31 12:17:21 +00:00
|
|
|
MediaStreamGraphImpl::AddBlockingRelatedStreamsToSet(nsTArray<MediaStream*>* aStreams,
|
|
|
|
MediaStream* aStream)
|
|
|
|
{
|
|
|
|
if (aStream->mInBlockingSet)
|
|
|
|
return;
|
|
|
|
aStream->mInBlockingSet = true;
|
|
|
|
aStreams->AppendElement(aStream);
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t i = 0; i < aStream->mConsumers.Length(); ++i) {
|
2012-07-31 12:17:21 +00:00
|
|
|
MediaInputPort* port = aStream->mConsumers[i];
|
|
|
|
if (port->mFlags & (MediaInputPort::FLAG_BLOCK_INPUT | MediaInputPort::FLAG_BLOCK_OUTPUT)) {
|
|
|
|
AddBlockingRelatedStreamsToSet(aStreams, port->mDest);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ProcessedMediaStream* ps = aStream->AsProcessedStream();
|
|
|
|
if (ps) {
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t i = 0; i < ps->mInputs.Length(); ++i) {
|
2012-07-31 12:17:21 +00:00
|
|
|
MediaInputPort* port = ps->mInputs[i];
|
|
|
|
if (port->mFlags & (MediaInputPort::FLAG_BLOCK_INPUT | MediaInputPort::FLAG_BLOCK_OUTPUT)) {
|
|
|
|
AddBlockingRelatedStreamsToSet(aStreams, port->mSource);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::MarkStreamBlocking(MediaStream* aStream)
|
|
|
|
{
|
|
|
|
if (aStream->mBlockInThisPhase)
|
|
|
|
return;
|
|
|
|
aStream->mBlockInThisPhase = true;
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t i = 0; i < aStream->mConsumers.Length(); ++i) {
|
2012-07-31 12:17:21 +00:00
|
|
|
MediaInputPort* port = aStream->mConsumers[i];
|
|
|
|
if (port->mFlags & MediaInputPort::FLAG_BLOCK_OUTPUT) {
|
|
|
|
MarkStreamBlocking(port->mDest);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ProcessedMediaStream* ps = aStream->AsProcessedStream();
|
|
|
|
if (ps) {
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t i = 0; i < ps->mInputs.Length(); ++i) {
|
2012-07-31 12:17:21 +00:00
|
|
|
MediaInputPort* port = ps->mInputs[i];
|
|
|
|
if (port->mFlags & MediaInputPort::FLAG_BLOCK_INPUT) {
|
|
|
|
MarkStreamBlocking(port->mSource);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::RecomputeBlockingAt(const nsTArray<MediaStream*>& aStreams,
|
|
|
|
GraphTime aTime,
|
2012-04-30 03:11:26 +00:00
|
|
|
GraphTime aEndBlockingDecisions,
|
|
|
|
GraphTime* aEnd)
|
|
|
|
{
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t i = 0; i < aStreams.Length(); ++i) {
|
2012-07-31 12:17:21 +00:00
|
|
|
MediaStream* stream = aStreams[i];
|
|
|
|
stream->mBlockInThisPhase = false;
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t i = 0; i < aStreams.Length(); ++i) {
|
2012-07-31 12:17:21 +00:00
|
|
|
MediaStream* stream = aStreams[i];
|
2012-04-30 03:11:26 +00:00
|
|
|
|
|
|
|
if (stream->mFinished) {
|
2013-12-04 06:00:26 +00:00
|
|
|
GraphTime endTime = StreamTimeToGraphTime(stream,
|
|
|
|
stream->GetStreamBuffer().GetAllTracksEnd());
|
2012-04-30 03:11:26 +00:00
|
|
|
if (endTime <= aTime) {
|
2013-11-21 03:02:42 +00:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p is blocked due to being finished", stream));
|
2012-07-31 12:17:21 +00:00
|
|
|
// We'll block indefinitely
|
|
|
|
MarkStreamBlocking(stream);
|
2014-02-03 00:55:41 +00:00
|
|
|
*aEnd = std::min(*aEnd, aEndBlockingDecisions);
|
2012-04-30 03:11:26 +00:00
|
|
|
continue;
|
|
|
|
} else {
|
2013-11-21 03:02:42 +00:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p is finished, but not blocked yet (end at %f, with blocking at %f)",
|
|
|
|
stream, MediaTimeToSeconds(stream->GetBufferEnd()),
|
|
|
|
MediaTimeToSeconds(endTime)));
|
2013-01-15 12:22:03 +00:00
|
|
|
*aEnd = std::min(*aEnd, endTime);
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
GraphTime end;
|
|
|
|
bool explicitBlock = stream->mExplicitBlockerCount.GetAt(aTime, &end) > 0;
|
2013-01-15 12:22:03 +00:00
|
|
|
*aEnd = std::min(*aEnd, end);
|
2012-04-30 03:11:26 +00:00
|
|
|
if (explicitBlock) {
|
2013-11-21 03:02:42 +00:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p is blocked due to explicit blocker", stream));
|
2012-07-31 12:17:21 +00:00
|
|
|
MarkStreamBlocking(stream);
|
2012-04-30 03:11:26 +00:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool underrun = WillUnderrun(stream, aTime, aEndBlockingDecisions, aEnd);
|
|
|
|
if (underrun) {
|
2012-07-31 12:17:21 +00:00
|
|
|
// We'll block indefinitely
|
|
|
|
MarkStreamBlocking(stream);
|
2014-02-03 00:55:41 +00:00
|
|
|
*aEnd = std::min(*aEnd, aEndBlockingDecisions);
|
2012-04-30 03:11:26 +00:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
NS_ASSERTION(*aEnd > aTime, "Failed to advance!");
|
2012-07-31 12:17:21 +00:00
|
|
|
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t i = 0; i < aStreams.Length(); ++i) {
|
2012-07-31 12:17:21 +00:00
|
|
|
MediaStream* stream = aStreams[i];
|
|
|
|
stream->mBlocked.SetAtAndAfter(aTime, stream->mBlockInThisPhase);
|
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
|
2012-09-20 00:47:51 +00:00
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::NotifyHasCurrentData(MediaStream* aStream)
|
|
|
|
{
|
2013-03-20 11:19:39 +00:00
|
|
|
if (!aStream->mNotifiedHasCurrentData && aStream->mHasCurrentData) {
|
|
|
|
for (uint32_t j = 0; j < aStream->mListeners.Length(); ++j) {
|
|
|
|
MediaStreamListener* l = aStream->mListeners[j];
|
|
|
|
l->NotifyHasCurrentData(this);
|
|
|
|
}
|
|
|
|
aStream->mNotifiedHasCurrentData = true;
|
2012-09-20 00:47:51 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
void
|
2012-07-31 12:17:22 +00:00
|
|
|
MediaStreamGraphImpl::CreateOrDestroyAudioStreams(GraphTime aAudioOutputStartTime,
|
|
|
|
MediaStream* aStream)
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
2013-05-08 11:44:07 +00:00
|
|
|
MOZ_ASSERT(mRealtime, "Should only attempt to create audio streams in real-time mode");
|
|
|
|
|
2012-07-31 12:17:22 +00:00
|
|
|
nsAutoTArray<bool,2> audioOutputStreamsFound;
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t i = 0; i < aStream->mAudioOutputStreams.Length(); ++i) {
|
2012-07-31 12:17:22 +00:00
|
|
|
audioOutputStreamsFound.AppendElement(false);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!aStream->mAudioOutputs.IsEmpty()) {
|
|
|
|
for (StreamBuffer::TrackIter tracks(aStream->GetStreamBuffer(), MediaSegment::AUDIO);
|
|
|
|
!tracks.IsEnded(); tracks.Next()) {
|
2012-08-22 15:56:38 +00:00
|
|
|
uint32_t i;
|
2012-07-31 12:17:22 +00:00
|
|
|
for (i = 0; i < audioOutputStreamsFound.Length(); ++i) {
|
|
|
|
if (aStream->mAudioOutputStreams[i].mTrackID == tracks->GetID()) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (i < audioOutputStreamsFound.Length()) {
|
|
|
|
audioOutputStreamsFound[i] = true;
|
|
|
|
} else {
|
|
|
|
MediaStream::AudioOutputStream* audioOutputStream =
|
|
|
|
aStream->mAudioOutputStreams.AppendElement();
|
|
|
|
audioOutputStream->mAudioPlaybackStartTime = aAudioOutputStartTime;
|
|
|
|
audioOutputStream->mBlockedAudioTime = 0;
|
2014-03-24 10:06:06 +00:00
|
|
|
audioOutputStream->mLastTickWritten = 0;
|
2012-07-31 12:17:22 +00:00
|
|
|
audioOutputStream->mTrackID = tracks->GetID();
|
2013-10-25 22:13:42 +00:00
|
|
|
|
2014-08-26 15:01:33 +00:00
|
|
|
if (!CurrentDriver()->AsAudioCallbackDriver() &&
|
|
|
|
!CurrentDriver()->Switching()) {
|
2014-09-03 13:52:43 +00:00
|
|
|
MonitorAutoLock mon(mMonitor);
|
|
|
|
if (mLifecycleState == LIFECYCLE_RUNNING) {
|
|
|
|
AudioCallbackDriver* driver = new AudioCallbackDriver(this);
|
|
|
|
mMixer.AddCallback(driver);
|
|
|
|
CurrentDriver()->SwitchAtNextIteration(driver);
|
|
|
|
}
|
2014-08-25 13:25:49 +00:00
|
|
|
}
|
2012-07-31 12:17:22 +00:00
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-08-22 15:56:38 +00:00
|
|
|
for (int32_t i = audioOutputStreamsFound.Length() - 1; i >= 0; --i) {
|
2012-07-31 12:17:22 +00:00
|
|
|
if (!audioOutputStreamsFound[i]) {
|
|
|
|
aStream->mAudioOutputStreams.RemoveElementAt(i);
|
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-09-18 05:20:43 +00:00
|
|
|
StreamTime
|
2012-04-30 03:11:26 +00:00
|
|
|
MediaStreamGraphImpl::PlayAudio(MediaStream* aStream,
|
|
|
|
GraphTime aFrom, GraphTime aTo)
|
|
|
|
{
|
2013-05-08 11:44:07 +00:00
|
|
|
MOZ_ASSERT(mRealtime, "Should only attempt to play audio in realtime mode");
|
|
|
|
|
2014-09-18 05:20:43 +00:00
|
|
|
StreamTime ticksWritten = 0;
|
2014-03-24 10:06:06 +00:00
|
|
|
// We compute the number of needed ticks by converting a difference of graph
|
|
|
|
// time rather than by substracting two converted stream time to ensure that
|
|
|
|
// the rounding between {Graph,Stream}Time and track ticks is not dependant
|
|
|
|
// on the absolute value of the {Graph,Stream}Time, and so that number of
|
|
|
|
// ticks to play is the same for each cycle.
|
2014-09-18 05:20:43 +00:00
|
|
|
StreamTime ticksNeeded = aTo - aFrom;
|
2014-03-24 10:06:06 +00:00
|
|
|
|
2012-07-31 12:17:22 +00:00
|
|
|
if (aStream->mAudioOutputStreams.IsEmpty()) {
|
2014-03-24 10:06:06 +00:00
|
|
|
return 0;
|
2012-07-31 12:17:22 +00:00
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
|
|
|
|
float volume = 0.0f;
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t i = 0; i < aStream->mAudioOutputs.Length(); ++i) {
|
2012-04-30 03:11:26 +00:00
|
|
|
volume += aStream->mAudioOutputs[i].mVolume;
|
|
|
|
}
|
|
|
|
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t i = 0; i < aStream->mAudioOutputStreams.Length(); ++i) {
|
2012-07-31 12:17:22 +00:00
|
|
|
MediaStream::AudioOutputStream& audioOutput = aStream->mAudioOutputStreams[i];
|
|
|
|
StreamBuffer::Track* track = aStream->mBuffer.FindTrack(audioOutput.mTrackID);
|
|
|
|
AudioSegment* audio = track->Get<AudioSegment>();
|
2014-03-24 10:06:06 +00:00
|
|
|
AudioSegment output;
|
|
|
|
|
|
|
|
// offset and audioOutput.mLastTickWritten can differ by at most one sample,
|
|
|
|
// because of the rounding issue. We track that to ensure we don't skip a
|
2014-06-12 04:41:29 +00:00
|
|
|
// sample. One sample may be played twice, but this should not happen
|
|
|
|
// again during an unblocked sequence of track samples.
|
2014-09-18 05:20:43 +00:00
|
|
|
StreamTime offset = GraphTimeToStreamTime(aStream, aFrom);
|
2014-06-12 04:40:51 +00:00
|
|
|
if (audioOutput.mLastTickWritten &&
|
|
|
|
audioOutput.mLastTickWritten != offset) {
|
2014-03-24 10:06:06 +00:00
|
|
|
// If there is a global underrun of the MSG, this property won't hold, and
|
|
|
|
// we reset the sample count tracking.
|
2014-06-12 04:41:29 +00:00
|
|
|
if (offset - audioOutput.mLastTickWritten == 1) {
|
2014-03-24 10:06:06 +00:00
|
|
|
offset = audioOutput.mLastTickWritten;
|
|
|
|
}
|
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
|
2014-04-25 16:04:23 +00:00
|
|
|
// We don't update aStream->mBufferStartTime here to account for time spent
|
|
|
|
// blocked. Instead, we'll update it in UpdateCurrentTimeForStreams after
|
|
|
|
// the blocked period has completed. But we do need to make sure we play
|
|
|
|
// from the right offsets in the stream buffer, even if we've already
|
|
|
|
// written silence for some amount of blocked time after the current time.
|
2012-07-31 12:17:22 +00:00
|
|
|
GraphTime t = aFrom;
|
2014-03-24 10:06:06 +00:00
|
|
|
while (ticksNeeded) {
|
2012-07-31 12:17:22 +00:00
|
|
|
GraphTime end;
|
|
|
|
bool blocked = aStream->mBlocked.GetAt(t, &end);
|
2013-01-15 12:22:03 +00:00
|
|
|
end = std::min(end, aTo);
|
2012-04-30 03:11:26 +00:00
|
|
|
|
2014-03-24 10:06:06 +00:00
|
|
|
// Check how many ticks of sound we can provide if we are blocked some
|
|
|
|
// time in the middle of this cycle.
|
2014-09-18 05:20:43 +00:00
|
|
|
StreamTime toWrite = 0;
|
2014-03-24 10:06:06 +00:00
|
|
|
if (end >= aTo) {
|
|
|
|
toWrite = ticksNeeded;
|
|
|
|
} else {
|
2014-09-18 05:13:14 +00:00
|
|
|
toWrite = end - t;
|
2014-03-24 10:06:06 +00:00
|
|
|
}
|
2014-06-12 04:40:51 +00:00
|
|
|
ticksNeeded -= toWrite;
|
2014-03-24 10:06:06 +00:00
|
|
|
|
2012-07-31 12:17:22 +00:00
|
|
|
if (blocked) {
|
2014-03-24 10:06:06 +00:00
|
|
|
output.InsertNullDataAtStart(toWrite);
|
2014-08-26 15:01:33 +00:00
|
|
|
ticksWritten += toWrite;
|
2014-03-24 10:06:06 +00:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing %ld blocking-silence samples for %f to %f (%ld to %ld)\n",
|
|
|
|
aStream, toWrite, MediaTimeToSeconds(t), MediaTimeToSeconds(end),
|
|
|
|
offset, offset + toWrite));
|
2012-07-31 12:17:22 +00:00
|
|
|
} else {
|
2014-09-18 05:20:43 +00:00
|
|
|
StreamTime endTicksNeeded = offset + toWrite;
|
|
|
|
StreamTime endTicksAvailable = audio->GetDuration();
|
2014-06-12 04:40:51 +00:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing %ld samples for %f to %f (samples %ld to %ld)\n",
|
|
|
|
aStream, toWrite, MediaTimeToSeconds(t), MediaTimeToSeconds(end),
|
|
|
|
offset, endTicksNeeded));
|
|
|
|
|
2014-03-24 10:06:06 +00:00
|
|
|
if (endTicksNeeded <= endTicksAvailable) {
|
|
|
|
output.AppendSlice(*audio, offset, endTicksNeeded);
|
2014-08-26 15:01:33 +00:00
|
|
|
ticksWritten += toWrite;
|
2014-06-12 04:40:51 +00:00
|
|
|
offset = endTicksNeeded;
|
2014-03-24 10:06:06 +00:00
|
|
|
} else {
|
2014-08-26 15:01:33 +00:00
|
|
|
// MOZ_ASSERT(track->IsEnded(), "Not enough data, and track not ended.");
|
2014-03-24 10:06:06 +00:00
|
|
|
// If we are at the end of the track, maybe write the remaining
|
|
|
|
// samples, and pad with/output silence.
|
|
|
|
if (endTicksNeeded > endTicksAvailable &&
|
|
|
|
offset < endTicksAvailable) {
|
|
|
|
output.AppendSlice(*audio, offset, endTicksAvailable);
|
2014-12-09 13:11:32 +00:00
|
|
|
uint32_t available = endTicksAvailable - offset;
|
|
|
|
ticksWritten += available;
|
|
|
|
toWrite -= available;
|
2014-06-12 04:40:51 +00:00
|
|
|
offset = endTicksAvailable;
|
2014-03-24 10:06:06 +00:00
|
|
|
}
|
|
|
|
output.AppendNullData(toWrite);
|
2014-12-09 13:11:32 +00:00
|
|
|
ticksWritten += toWrite;
|
2012-07-31 12:17:22 +00:00
|
|
|
}
|
|
|
|
output.ApplyVolume(volume);
|
|
|
|
}
|
|
|
|
t = end;
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
2014-06-12 04:40:51 +00:00
|
|
|
audioOutput.mLastTickWritten = offset;
|
2014-03-24 10:06:06 +00:00
|
|
|
|
|
|
|
// Need unique id for stream & track - and we want it to match the inserter
|
|
|
|
output.WriteTo(LATENCY_STREAM_ID(aStream, track->GetID()),
|
2014-08-25 13:25:49 +00:00
|
|
|
mMixer, AudioChannelCount(),
|
|
|
|
mSampleRate);
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
2014-03-24 10:06:06 +00:00
|
|
|
return ticksWritten;
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
|
2013-05-30 04:44:43 +00:00
|
|
|
static void
|
|
|
|
SetImageToBlackPixel(PlanarYCbCrImage* aImage)
|
|
|
|
{
|
|
|
|
uint8_t blackPixel[] = { 0x10, 0x80, 0x80 };
|
|
|
|
|
2013-10-02 00:57:50 +00:00
|
|
|
PlanarYCbCrData data;
|
2013-05-30 04:44:43 +00:00
|
|
|
data.mYChannel = blackPixel;
|
|
|
|
data.mCbChannel = blackPixel + 1;
|
|
|
|
data.mCrChannel = blackPixel + 2;
|
|
|
|
data.mYStride = data.mCbCrStride = 1;
|
2013-12-31 09:06:12 +00:00
|
|
|
data.mPicSize = data.mYSize = data.mCbCrSize = IntSize(1, 1);
|
2013-05-30 04:44:43 +00:00
|
|
|
aImage->SetData(data);
|
|
|
|
}
|
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::PlayVideo(MediaStream* aStream)
|
|
|
|
{
|
2013-05-08 11:44:07 +00:00
|
|
|
MOZ_ASSERT(mRealtime, "Should only attempt to play video in realtime mode");
|
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
if (aStream->mVideoOutputs.IsEmpty())
|
|
|
|
return;
|
|
|
|
|
|
|
|
// Display the next frame a bit early. This is better than letting the current
|
2014-08-26 15:01:33 +00:00
|
|
|
// frame be displayed for too long. Because depending on the GraphDriver in
|
|
|
|
// use, we can't really estimate the graph interval duration, we clamp it to
|
|
|
|
// the current state computed time.
|
|
|
|
GraphTime framePosition = IterationEnd() + MillisecondsToMediaTime(CurrentDriver()->IterationDuration());
|
|
|
|
if (framePosition > CurrentDriver()->StateComputedTime()) {
|
|
|
|
NS_WARN_IF_FALSE(std::abs(framePosition - CurrentDriver()->StateComputedTime()) <
|
|
|
|
MillisecondsToMediaTime(5), "Graph thread slowdown?");
|
|
|
|
framePosition = CurrentDriver()->StateComputedTime();
|
|
|
|
}
|
|
|
|
MOZ_ASSERT(framePosition >= aStream->mBufferStartTime, "frame position before buffer?");
|
2012-04-30 03:11:26 +00:00
|
|
|
StreamTime frameBufferTime = GraphTimeToStreamTime(aStream, framePosition);
|
|
|
|
|
2014-09-18 05:20:43 +00:00
|
|
|
StreamTime start;
|
2012-07-31 12:17:22 +00:00
|
|
|
const VideoFrame* frame = nullptr;
|
|
|
|
for (StreamBuffer::TrackIter tracks(aStream->GetStreamBuffer(), MediaSegment::VIDEO);
|
|
|
|
!tracks.IsEnded(); tracks.Next()) {
|
|
|
|
VideoSegment* segment = tracks->Get<VideoSegment>();
|
2014-09-18 05:20:43 +00:00
|
|
|
StreamTime thisStart;
|
2012-07-31 12:17:22 +00:00
|
|
|
const VideoFrame* thisFrame =
|
2014-09-18 05:13:13 +00:00
|
|
|
segment->GetFrameAt(frameBufferTime, &thisStart);
|
2012-07-31 12:17:22 +00:00
|
|
|
if (thisFrame && thisFrame->GetImage()) {
|
|
|
|
start = thisStart;
|
|
|
|
frame = thisFrame;
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
}
|
2012-07-31 12:17:22 +00:00
|
|
|
if (!frame || *frame == aStream->mLastPlayedVideoFrame)
|
|
|
|
return;
|
|
|
|
|
2013-11-21 03:02:42 +00:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing video frame %p (%dx%d)",
|
|
|
|
aStream, frame->GetImage(), frame->GetIntrinsicSize().width,
|
|
|
|
frame->GetIntrinsicSize().height));
|
2012-07-31 12:17:22 +00:00
|
|
|
GraphTime startTime = StreamTimeToGraphTime(aStream,
|
2014-09-18 05:13:13 +00:00
|
|
|
start, INCLUDE_TRAILING_BLOCKED_INTERVAL);
|
2014-04-25 16:03:04 +00:00
|
|
|
TimeStamp targetTime = CurrentDriver()->GetCurrentTimeStamp() +
|
2014-04-25 14:09:30 +00:00
|
|
|
TimeDuration::FromMilliseconds(double(startTime - IterationEnd()));
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t i = 0; i < aStream->mVideoOutputs.Length(); ++i) {
|
2012-07-31 12:17:22 +00:00
|
|
|
VideoFrameContainer* output = aStream->mVideoOutputs[i];
|
2013-05-30 04:44:43 +00:00
|
|
|
|
|
|
|
if (frame->GetForceBlack()) {
|
|
|
|
nsRefPtr<Image> image =
|
2014-01-30 22:58:51 +00:00
|
|
|
output->GetImageContainer()->CreateImage(ImageFormat::PLANAR_YCBCR);
|
2013-05-30 04:44:43 +00:00
|
|
|
if (image) {
|
|
|
|
// Sets the image to a single black pixel, which will be scaled to fill
|
|
|
|
// the rendered size.
|
|
|
|
SetImageToBlackPixel(static_cast<PlanarYCbCrImage*>(image.get()));
|
|
|
|
}
|
|
|
|
output->SetCurrentFrame(frame->GetIntrinsicSize(), image,
|
|
|
|
targetTime);
|
|
|
|
} else {
|
|
|
|
output->SetCurrentFrame(frame->GetIntrinsicSize(), frame->GetImage(),
|
|
|
|
targetTime);
|
|
|
|
}
|
|
|
|
|
2012-07-31 12:17:22 +00:00
|
|
|
nsCOMPtr<nsIRunnable> event =
|
|
|
|
NS_NewRunnableMethod(output, &VideoFrameContainer::Invalidate);
|
2014-11-20 18:10:00 +00:00
|
|
|
DispatchToMainThreadAfterStreamStateUpdate(event.forget());
|
2012-07-31 12:17:22 +00:00
|
|
|
}
|
2012-11-21 01:32:06 +00:00
|
|
|
if (!aStream->mNotifiedFinished) {
|
|
|
|
aStream->mLastPlayedVideoFrame = *frame;
|
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
|
2013-07-19 14:40:57 +00:00
|
|
|
bool
|
|
|
|
MediaStreamGraphImpl::ShouldUpdateMainThread()
|
|
|
|
{
|
|
|
|
if (mRealtime) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
TimeStamp now = TimeStamp::Now();
|
2014-08-26 15:01:33 +00:00
|
|
|
if ((now - mLastMainThreadUpdate).ToMilliseconds() > CurrentDriver()->IterationDuration()) {
|
2013-07-19 14:40:57 +00:00
|
|
|
mLastMainThreadUpdate = now;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
void
|
2013-06-17 13:06:34 +00:00
|
|
|
MediaStreamGraphImpl::PrepareUpdatesToMainThreadState(bool aFinalUpdate)
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
2014-08-26 15:01:33 +00:00
|
|
|
mMonitor.AssertCurrentThreadOwns();
|
2012-04-30 03:11:26 +00:00
|
|
|
|
2013-10-23 18:21:33 +00:00
|
|
|
// We don't want to frequently update the main thread about timing update
|
|
|
|
// when we are not running in realtime.
|
|
|
|
if (aFinalUpdate || ShouldUpdateMainThread()) {
|
2013-07-19 14:40:57 +00:00
|
|
|
mStreamUpdates.SetCapacity(mStreamUpdates.Length() + mStreams.Length());
|
|
|
|
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
|
|
|
|
MediaStream* stream = mStreams[i];
|
|
|
|
if (!stream->MainThreadNeedsUpdates()) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
StreamUpdate* update = mStreamUpdates.AppendElement();
|
2014-04-25 14:09:30 +00:00
|
|
|
update->mGraphUpdateIndex = stream->mGraphUpdateIndices.GetAt(IterationEnd());
|
2013-07-19 14:40:57 +00:00
|
|
|
update->mStream = stream;
|
|
|
|
update->mNextMainThreadCurrentTime =
|
2014-04-25 14:09:30 +00:00
|
|
|
GraphTimeToStreamTime(stream, IterationEnd());
|
2013-12-09 05:08:02 +00:00
|
|
|
update->mNextMainThreadFinished = stream->mNotifiedFinished;
|
2013-07-19 14:40:57 +00:00
|
|
|
}
|
|
|
|
if (!mPendingUpdateRunnables.IsEmpty()) {
|
|
|
|
mUpdateRunnables.MoveElementsFrom(mPendingUpdateRunnables);
|
2013-06-19 03:09:44 +00:00
|
|
|
}
|
2013-06-19 03:10:04 +00:00
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
|
2013-06-17 13:06:34 +00:00
|
|
|
// Don't send the message to the main thread if it's not going to have
|
|
|
|
// any work to do.
|
|
|
|
if (aFinalUpdate ||
|
|
|
|
!mUpdateRunnables.IsEmpty() ||
|
|
|
|
!mStreamUpdates.IsEmpty()) {
|
|
|
|
EnsureStableStateEventPosted();
|
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
|
2014-04-25 16:04:23 +00:00
|
|
|
GraphTime
|
|
|
|
MediaStreamGraphImpl::RoundUpToNextAudioBlock(GraphTime aTime)
|
2013-01-13 22:46:57 +00:00
|
|
|
{
|
2014-09-18 05:20:43 +00:00
|
|
|
StreamTime ticks = aTime;
|
2013-08-02 09:27:58 +00:00
|
|
|
uint64_t block = ticks >> WEBAUDIO_BLOCK_SIZE_BITS;
|
|
|
|
uint64_t nextBlock = block + 1;
|
2014-09-18 05:20:43 +00:00
|
|
|
StreamTime nextTicks = nextBlock << WEBAUDIO_BLOCK_SIZE_BITS;
|
2014-06-12 04:45:00 +00:00
|
|
|
return nextTicks;
|
2013-01-13 22:46:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::ProduceDataForStreamsBlockByBlock(uint32_t aStreamIndex,
|
2013-05-24 17:09:29 +00:00
|
|
|
TrackRate aSampleRate,
|
2013-01-13 22:46:57 +00:00
|
|
|
GraphTime aFrom,
|
|
|
|
GraphTime aTo)
|
|
|
|
{
|
2014-07-17 00:55:55 +00:00
|
|
|
MOZ_ASSERT(aStreamIndex <= mFirstCycleBreaker,
|
|
|
|
"Cycle breaker is not AudioNodeStream?");
|
2013-01-13 22:46:57 +00:00
|
|
|
GraphTime t = aFrom;
|
|
|
|
while (t < aTo) {
|
2014-04-25 16:04:23 +00:00
|
|
|
GraphTime next = RoundUpToNextAudioBlock(t);
|
2014-07-17 00:55:55 +00:00
|
|
|
for (uint32_t i = mFirstCycleBreaker; i < mStreams.Length(); ++i) {
|
|
|
|
auto ns = static_cast<AudioNodeStream*>(mStreams[i]);
|
|
|
|
MOZ_ASSERT(ns->AsAudioNodeStream());
|
|
|
|
ns->ProduceOutputBeforeInput(t);
|
|
|
|
}
|
2013-01-13 22:46:57 +00:00
|
|
|
for (uint32_t i = aStreamIndex; i < mStreams.Length(); ++i) {
|
2014-01-06 23:53:49 +00:00
|
|
|
ProcessedMediaStream* ps = mStreams[i]->AsProcessedStream();
|
2013-01-13 22:46:57 +00:00
|
|
|
if (ps) {
|
2014-03-04 21:53:55 +00:00
|
|
|
ps->ProcessInput(t, next, (next == aTo) ? ProcessedMediaStream::ALLOW_FINISH : 0);
|
2013-01-13 22:46:57 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
t = next;
|
|
|
|
}
|
|
|
|
NS_ASSERTION(t == aTo, "Something went wrong with rounding to block boundaries");
|
|
|
|
}
|
|
|
|
|
2013-12-09 05:08:02 +00:00
|
|
|
bool
|
|
|
|
MediaStreamGraphImpl::AllFinishedStreamsNotified()
|
|
|
|
{
|
|
|
|
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
|
|
|
|
MediaStream* s = mStreams[i];
|
|
|
|
if (s->mFinished && !s->mNotifiedFinished) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
void
|
2014-08-25 13:26:21 +00:00
|
|
|
MediaStreamGraphImpl::UpdateGraph(GraphTime aEndBlockingDecision)
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
2014-04-25 14:09:30 +00:00
|
|
|
// Calculate independent action times for each batch of messages (each
|
|
|
|
// batch corresponding to an event loop task). This isolates the performance
|
|
|
|
// of different scripts to some extent.
|
2014-08-25 13:26:21 +00:00
|
|
|
for (uint32_t i = 0; i < mFrontMessageQueue.Length(); ++i) {
|
|
|
|
mProcessingGraphUpdateIndex = mFrontMessageQueue[i].mGraphUpdateIndex;
|
|
|
|
nsTArray<nsAutoPtr<ControlMessage> >& messages = mFrontMessageQueue[i].mMessages;
|
2014-04-25 14:09:30 +00:00
|
|
|
|
|
|
|
for (uint32_t j = 0; j < messages.Length(); ++j) {
|
|
|
|
messages[j]->Run();
|
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
2014-08-25 13:26:21 +00:00
|
|
|
mFrontMessageQueue.Clear();
|
2012-04-30 03:11:26 +00:00
|
|
|
|
2014-04-25 14:09:30 +00:00
|
|
|
if (mStreamOrderDirty) {
|
|
|
|
UpdateStreamOrder();
|
|
|
|
}
|
2013-05-16 23:30:41 +00:00
|
|
|
|
2014-04-25 14:09:30 +00:00
|
|
|
bool ensureNextIteration = false;
|
|
|
|
|
|
|
|
// Grab pending stream input.
|
|
|
|
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
|
|
|
|
SourceMediaStream* is = mStreams[i]->AsSourceStream();
|
|
|
|
if (is) {
|
|
|
|
UpdateConsumptionState(is);
|
2014-04-25 16:04:23 +00:00
|
|
|
ExtractPendingInput(is, aEndBlockingDecision, &ensureNextIteration);
|
2014-04-25 14:09:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// The loop is woken up so soon that IterationEnd() barely advances and we
|
2014-04-25 16:04:23 +00:00
|
|
|
// end up having aEndBlockingDecision == CurrentDriver()->StateComputedTime().
|
2014-04-25 14:09:30 +00:00
|
|
|
// Since stream blocking is computed in the interval of
|
2014-04-25 16:04:23 +00:00
|
|
|
// [CurrentDriver()->StateComputedTime(), aEndBlockingDecision), it won't be computed at all.
|
2014-04-25 14:09:30 +00:00
|
|
|
// We should ensure next iteration so that pending blocking changes will be
|
|
|
|
// computed in next loop.
|
2014-04-25 16:04:23 +00:00
|
|
|
if (ensureNextIteration ||
|
|
|
|
aEndBlockingDecision == CurrentDriver()->StateComputedTime()) {
|
2014-09-28 16:07:25 +00:00
|
|
|
EnsureNextIteration();
|
2014-04-25 14:09:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Figure out which streams are blocked and when.
|
2014-04-25 16:04:23 +00:00
|
|
|
RecomputeBlocking(aEndBlockingDecision);
|
|
|
|
}
|
2014-04-25 14:09:30 +00:00
|
|
|
|
2014-04-25 16:04:23 +00:00
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::Process(GraphTime aFrom, GraphTime aTo)
|
|
|
|
{
|
2014-04-25 14:09:30 +00:00
|
|
|
// Play stream contents.
|
|
|
|
bool allBlockedForever = true;
|
|
|
|
// True when we've done ProcessInput for all processed streams.
|
|
|
|
bool doneAllProducing = false;
|
|
|
|
// This is the number of frame that are written to the AudioStreams, for
|
|
|
|
// this cycle.
|
2014-09-18 05:20:43 +00:00
|
|
|
StreamTime ticksPlayed = 0;
|
2014-08-25 13:25:49 +00:00
|
|
|
|
|
|
|
mMixer.StartMixing();
|
|
|
|
|
2014-04-25 14:09:30 +00:00
|
|
|
// Figure out what each stream wants to do
|
|
|
|
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
|
|
|
|
MediaStream* stream = mStreams[i];
|
|
|
|
if (!doneAllProducing) {
|
|
|
|
ProcessedMediaStream* ps = stream->AsProcessedStream();
|
|
|
|
if (ps) {
|
|
|
|
AudioNodeStream* n = stream->AsAudioNodeStream();
|
|
|
|
if (n) {
|
|
|
|
#ifdef DEBUG
|
|
|
|
// Verify that the sampling rate for all of the following streams is the same
|
|
|
|
for (uint32_t j = i + 1; j < mStreams.Length(); ++j) {
|
|
|
|
AudioNodeStream* nextStream = mStreams[j]->AsAudioNodeStream();
|
|
|
|
if (nextStream) {
|
|
|
|
MOZ_ASSERT(n->SampleRate() == nextStream->SampleRate(),
|
|
|
|
"All AudioNodeStreams in the graph must have the same sampling rate");
|
|
|
|
}
|
2014-04-13 18:08:10 +00:00
|
|
|
}
|
2014-04-25 14:09:30 +00:00
|
|
|
#endif
|
|
|
|
// Since an AudioNodeStream is present, go ahead and
|
|
|
|
// produce audio block by block for all the rest of the streams.
|
2014-04-25 16:04:23 +00:00
|
|
|
ProduceDataForStreamsBlockByBlock(i, n->SampleRate(), aFrom, aTo);
|
2014-04-25 14:09:30 +00:00
|
|
|
doneAllProducing = true;
|
|
|
|
} else {
|
2014-04-25 16:04:23 +00:00
|
|
|
ps->ProcessInput(aFrom, aTo, ProcessedMediaStream::ALLOW_FINISH);
|
2014-04-25 14:09:30 +00:00
|
|
|
NS_WARN_IF_FALSE(stream->mBuffer.GetEnd() >=
|
2014-04-25 16:04:23 +00:00
|
|
|
GraphTimeToStreamTime(stream, aTo),
|
2014-04-25 14:09:30 +00:00
|
|
|
"Stream did not produce enough data");
|
2014-04-13 18:08:10 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2014-04-25 14:09:30 +00:00
|
|
|
NotifyHasCurrentData(stream);
|
2014-08-25 13:25:49 +00:00
|
|
|
// Only playback audio and video in real-time mode
|
2014-04-25 14:09:30 +00:00
|
|
|
if (mRealtime) {
|
2014-04-25 16:04:23 +00:00
|
|
|
CreateOrDestroyAudioStreams(aFrom, stream);
|
2014-08-27 17:13:15 +00:00
|
|
|
if (CurrentDriver()->AsAudioCallbackDriver()) {
|
2014-09-18 05:20:43 +00:00
|
|
|
StreamTime ticksPlayedForThisStream = PlayAudio(stream, aFrom, aTo);
|
2014-08-27 17:13:15 +00:00
|
|
|
if (!ticksPlayed) {
|
|
|
|
ticksPlayed = ticksPlayedForThisStream;
|
|
|
|
} else {
|
|
|
|
MOZ_ASSERT(!ticksPlayedForThisStream || ticksPlayedForThisStream == ticksPlayed,
|
|
|
|
"Each stream should have the same number of frame.");
|
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
2014-04-25 14:09:30 +00:00
|
|
|
PlayVideo(stream);
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
2014-04-25 14:09:30 +00:00
|
|
|
SourceMediaStream* is = stream->AsSourceStream();
|
|
|
|
if (is) {
|
|
|
|
UpdateBufferSufficiencyState(is);
|
2014-01-29 13:34:35 +00:00
|
|
|
}
|
2014-04-25 14:09:30 +00:00
|
|
|
GraphTime end;
|
2014-04-25 16:04:23 +00:00
|
|
|
if (!stream->mBlocked.GetAt(aTo, &end) || end < GRAPH_TIME_MAX) {
|
2014-04-25 14:09:30 +00:00
|
|
|
allBlockedForever = false;
|
2012-04-30 03:11:40 +00:00
|
|
|
}
|
2014-04-25 14:09:30 +00:00
|
|
|
}
|
2012-04-30 03:11:40 +00:00
|
|
|
|
2014-08-26 15:01:33 +00:00
|
|
|
if (CurrentDriver()->AsAudioCallbackDriver() && ticksPlayed) {
|
2014-08-25 13:25:49 +00:00
|
|
|
mMixer.FinishMixing();
|
2014-04-25 14:09:30 +00:00
|
|
|
}
|
2014-03-04 23:15:41 +00:00
|
|
|
|
2014-08-26 15:01:33 +00:00
|
|
|
// If we are switching away from an AudioCallbackDriver, we don't need the
|
|
|
|
// mixer anymore.
|
|
|
|
if (CurrentDriver()->AsAudioCallbackDriver() &&
|
|
|
|
CurrentDriver()->Switching()) {
|
|
|
|
bool isStarted;
|
|
|
|
{
|
|
|
|
MonitorAutoLock mon(mMonitor);
|
|
|
|
isStarted = CurrentDriver()->AsAudioCallbackDriver()->IsStarted();
|
|
|
|
}
|
|
|
|
if (isStarted) {
|
|
|
|
mMixer.RemoveCallback(CurrentDriver()->AsAudioCallbackDriver());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-04-25 16:04:23 +00:00
|
|
|
if (!allBlockedForever) {
|
2014-09-28 16:07:25 +00:00
|
|
|
EnsureNextIteration();
|
2014-04-25 14:09:30 +00:00
|
|
|
}
|
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
|
2014-04-25 14:09:30 +00:00
|
|
|
bool
|
2014-04-25 16:04:23 +00:00
|
|
|
MediaStreamGraphImpl::OneIteration(GraphTime aFrom, GraphTime aTo,
|
2014-08-25 13:26:21 +00:00
|
|
|
GraphTime aStateFrom, GraphTime aStateEnd)
|
2014-04-25 14:09:30 +00:00
|
|
|
{
|
2014-08-26 15:01:33 +00:00
|
|
|
{
|
|
|
|
MonitorAutoLock lock(mMemoryReportMonitor);
|
|
|
|
if (mNeedsMemoryReport) {
|
|
|
|
mNeedsMemoryReport = false;
|
|
|
|
|
|
|
|
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
|
|
|
|
AudioNodeStream* stream = mStreams[i]->AsAudioNodeStream();
|
|
|
|
if (stream) {
|
|
|
|
AudioNodeSizes usage;
|
|
|
|
stream->SizeOfAudioNodesIncludingThis(MallocSizeOf, usage);
|
|
|
|
mAudioStreamSizes.AppendElement(usage);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
lock.Notify();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-04-25 16:04:23 +00:00
|
|
|
UpdateCurrentTimeForStreams(aFrom, aTo);
|
|
|
|
|
2014-08-25 13:26:21 +00:00
|
|
|
UpdateGraph(aStateEnd);
|
2014-08-25 12:13:08 +00:00
|
|
|
|
2014-04-25 16:04:23 +00:00
|
|
|
Process(aStateFrom, aStateEnd);
|
2014-08-25 12:13:08 +00:00
|
|
|
|
2014-04-25 14:09:30 +00:00
|
|
|
// Send updates to the main thread and wait for the next control loop
|
|
|
|
// iteration.
|
|
|
|
{
|
2014-08-26 15:01:33 +00:00
|
|
|
MonitorAutoLock lock(mMonitor);
|
2014-04-25 14:09:30 +00:00
|
|
|
bool finalUpdate = mForceShutDown ||
|
|
|
|
(IterationEnd() >= mEndTime && AllFinishedStreamsNotified()) ||
|
2014-08-25 13:26:21 +00:00
|
|
|
(IsEmpty() && mBackMessageQueue.IsEmpty());
|
2014-04-25 14:09:30 +00:00
|
|
|
PrepareUpdatesToMainThreadState(finalUpdate);
|
|
|
|
if (finalUpdate) {
|
|
|
|
// Enter shutdown mode. The stable-state handler will detect this
|
|
|
|
// and complete shutdown. Destroy any streams immediately.
|
|
|
|
STREAM_LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p waiting for main thread cleanup", this));
|
|
|
|
// We'll shut down this graph object if it does not get restarted.
|
|
|
|
mLifecycleState = LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP;
|
|
|
|
// No need to Destroy streams here. The main-thread owner of each
|
|
|
|
// stream is responsible for calling Destroy on them.
|
|
|
|
return false;
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
2014-03-24 10:06:06 +00:00
|
|
|
|
2014-04-25 14:09:30 +00:00
|
|
|
CurrentDriver()->WaitForNextIteration();
|
2014-03-24 10:06:06 +00:00
|
|
|
|
2014-08-25 13:26:21 +00:00
|
|
|
SwapMessageQueues();
|
2014-04-25 14:09:30 +00:00
|
|
|
}
|
2014-10-29 14:47:28 +00:00
|
|
|
mFlushSourcesNow = false;
|
2012-04-30 03:11:26 +00:00
|
|
|
|
2014-04-25 14:09:30 +00:00
|
|
|
return true;
|
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::ApplyStreamUpdate(StreamUpdate* aUpdate)
|
|
|
|
{
|
2014-08-26 15:01:33 +00:00
|
|
|
mMonitor.AssertCurrentThreadOwns();
|
2012-04-30 03:11:26 +00:00
|
|
|
|
|
|
|
MediaStream* stream = aUpdate->mStream;
|
|
|
|
if (!stream)
|
|
|
|
return;
|
|
|
|
stream->mMainThreadCurrentTime = aUpdate->mNextMainThreadCurrentTime;
|
|
|
|
stream->mMainThreadFinished = aUpdate->mNextMainThreadFinished;
|
2012-08-20 04:20:44 +00:00
|
|
|
|
2013-07-25 02:07:34 +00:00
|
|
|
if (stream->mWrapper) {
|
|
|
|
stream->mWrapper->NotifyStreamStateChanged();
|
|
|
|
}
|
2012-08-22 15:56:38 +00:00
|
|
|
for (int32_t i = stream->mMainThreadListeners.Length() - 1; i >= 0; --i) {
|
2012-08-20 04:20:44 +00:00
|
|
|
stream->mMainThreadListeners[i]->NotifyMainThreadStateChanged();
|
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::ForceShutDown()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Must be called on main thread");
|
2013-11-21 03:02:42 +00:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p ForceShutdown", this));
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
2014-08-26 15:01:33 +00:00
|
|
|
MonitorAutoLock lock(mMonitor);
|
2012-04-30 03:11:26 +00:00
|
|
|
mForceShutDown = true;
|
2014-09-28 16:07:25 +00:00
|
|
|
EnsureNextIterationLocked();
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
namespace {
|
|
|
|
|
|
|
|
class MediaStreamGraphShutDownRunnable : public nsRunnable {
|
|
|
|
public:
|
2014-09-01 03:50:23 +00:00
|
|
|
explicit MediaStreamGraphShutDownRunnable(MediaStreamGraphImpl* aGraph)
|
2014-08-26 15:01:33 +00:00
|
|
|
: mGraph(aGraph)
|
2014-04-25 16:03:04 +00:00
|
|
|
{}
|
2012-04-30 03:11:26 +00:00
|
|
|
NS_IMETHOD Run()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(mGraph->mDetectedNotRunning,
|
|
|
|
"We should know the graph thread control loop isn't running!");
|
2013-05-03 05:02:55 +00:00
|
|
|
|
2014-08-31 12:19:48 +00:00
|
|
|
LIFECYCLE_LOG("Shutting down graph %p", mGraph.get());
|
2014-08-26 15:04:38 +00:00
|
|
|
|
2014-09-28 16:07:24 +00:00
|
|
|
// We've asserted the graph isn't running. Use mDriver instead of CurrentDriver
|
|
|
|
// to avoid thread-safety checks
|
|
|
|
#if 0 // AudioCallbackDrivers are released asynchronously anyways
|
|
|
|
// XXX a better test would be have setting mDetectedNotRunning make sure
|
|
|
|
// any current callback has finished and block future ones -- or just
|
|
|
|
// handle it all in Shutdown()!
|
|
|
|
if (mGraph->mDriver->AsAudioCallbackDriver()) {
|
|
|
|
MOZ_ASSERT(!mGraph->mDriver->AsAudioCallbackDriver()->InCallback());
|
2014-08-26 15:01:35 +00:00
|
|
|
}
|
2014-09-28 16:07:24 +00:00
|
|
|
#endif
|
2014-08-26 15:01:35 +00:00
|
|
|
|
2014-09-28 16:07:24 +00:00
|
|
|
mGraph->mDriver->Shutdown();
|
2013-05-08 05:16:35 +00:00
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
// mGraph's thread is not running so it's OK to do whatever here
|
|
|
|
if (mGraph->IsEmpty()) {
|
2014-02-11 00:04:58 +00:00
|
|
|
// mGraph is no longer needed, so delete it.
|
2014-04-13 18:08:10 +00:00
|
|
|
mGraph->Destroy();
|
2012-04-30 03:11:26 +00:00
|
|
|
} else {
|
2014-02-11 00:04:58 +00:00
|
|
|
// The graph is not empty. We must be in a forced shutdown, or a
|
|
|
|
// non-realtime graph that has finished processing. Some later
|
|
|
|
// AppendMessage will detect that the manager has been emptied, and
|
|
|
|
// delete it.
|
|
|
|
NS_ASSERTION(mGraph->mForceShutDown || !mGraph->mRealtime,
|
|
|
|
"Not in forced shutdown?");
|
2013-05-03 05:02:55 +00:00
|
|
|
for (uint32_t i = 0; i < mGraph->mStreams.Length(); ++i) {
|
|
|
|
DOMMediaStream* s = mGraph->mStreams[i]->GetWrapper();
|
|
|
|
if (s) {
|
|
|
|
s->NotifyMediaStreamGraphShutdown();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
mGraph->mLifecycleState =
|
|
|
|
MediaStreamGraphImpl::LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION;
|
|
|
|
}
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
private:
|
2014-08-26 15:04:39 +00:00
|
|
|
nsRefPtr<MediaStreamGraphImpl> mGraph;
|
2012-04-30 03:11:26 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
class MediaStreamGraphStableStateRunnable : public nsRunnable {
|
|
|
|
public:
|
2014-08-25 12:13:14 +00:00
|
|
|
explicit MediaStreamGraphStableStateRunnable(MediaStreamGraphImpl* aGraph,
|
|
|
|
bool aSourceIsMSG)
|
2013-02-01 20:20:32 +00:00
|
|
|
: mGraph(aGraph)
|
2014-08-25 12:13:14 +00:00
|
|
|
, mSourceIsMSG(aSourceIsMSG)
|
2013-02-01 20:20:32 +00:00
|
|
|
{
|
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
NS_IMETHOD Run()
|
|
|
|
{
|
2013-02-01 20:20:32 +00:00
|
|
|
if (mGraph) {
|
2014-08-25 12:13:14 +00:00
|
|
|
mGraph->RunInStableState(mSourceIsMSG);
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
return NS_OK;
|
|
|
|
}
|
2013-02-01 20:20:32 +00:00
|
|
|
private:
|
2014-08-26 15:04:39 +00:00
|
|
|
nsRefPtr<MediaStreamGraphImpl> mGraph;
|
2014-08-25 12:13:14 +00:00
|
|
|
bool mSourceIsMSG;
|
2012-04-30 03:11:26 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Control messages forwarded from main thread to graph manager thread
|
|
|
|
*/
|
|
|
|
class CreateMessage : public ControlMessage {
|
|
|
|
public:
|
2014-09-01 03:50:23 +00:00
|
|
|
explicit CreateMessage(MediaStream* aStream) : ControlMessage(aStream) {}
|
2013-05-29 15:38:39 +00:00
|
|
|
virtual void Run() MOZ_OVERRIDE
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
|
|
|
mStream->GraphImpl()->AddStream(mStream);
|
|
|
|
mStream->Init();
|
|
|
|
}
|
2013-05-29 15:38:39 +00:00
|
|
|
virtual void RunDuringShutdown() MOZ_OVERRIDE
|
|
|
|
{
|
|
|
|
// Make sure to run this message during shutdown too, to make sure
|
|
|
|
// that we balance the number of streams registered with the graph
|
|
|
|
// as they're destroyed during shutdown.
|
|
|
|
Run();
|
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
};
|
|
|
|
|
2012-06-19 02:30:09 +00:00
|
|
|
class MediaStreamGraphShutdownObserver MOZ_FINAL : public nsIObserver
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
2014-06-24 16:36:43 +00:00
|
|
|
~MediaStreamGraphShutdownObserver() {}
|
2012-04-30 03:11:26 +00:00
|
|
|
public:
|
|
|
|
NS_DECL_ISUPPORTS
|
|
|
|
NS_DECL_NSIOBSERVER
|
|
|
|
};
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2014-08-25 12:13:14 +00:00
|
|
|
MediaStreamGraphImpl::RunInStableState(bool aSourceIsMSG)
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Must be called on main thread");
|
|
|
|
|
|
|
|
nsTArray<nsCOMPtr<nsIRunnable> > runnables;
|
2012-07-31 12:17:22 +00:00
|
|
|
// When we're doing a forced shutdown, pending control messages may be
|
|
|
|
// run on the main thread via RunDuringShutdown. Those messages must
|
|
|
|
// run without the graph monitor being held. So, we collect them here.
|
|
|
|
nsTArray<nsAutoPtr<ControlMessage> > controlMessagesToRunDuringShutdown;
|
2012-04-30 03:11:26 +00:00
|
|
|
|
|
|
|
{
|
2014-08-26 15:01:33 +00:00
|
|
|
MonitorAutoLock lock(mMonitor);
|
2014-08-25 12:13:14 +00:00
|
|
|
if (aSourceIsMSG) {
|
|
|
|
MOZ_ASSERT(mPostedRunInStableStateEvent);
|
|
|
|
mPostedRunInStableStateEvent = false;
|
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
|
2014-08-31 12:19:48 +00:00
|
|
|
#ifdef ENABLE_LIFECYCLE_LOG
|
|
|
|
// This should be kept in sync with the LifecycleState enum in
|
|
|
|
// MediaStreamGraphImpl.h
|
|
|
|
const char * LifecycleState_str[] = {
|
|
|
|
"LIFECYCLE_THREAD_NOT_STARTED",
|
|
|
|
"LIFECYCLE_RUNNING",
|
|
|
|
"LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP",
|
|
|
|
"LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN",
|
|
|
|
"LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION"
|
|
|
|
};
|
|
|
|
|
|
|
|
if (mLifecycleState != LIFECYCLE_RUNNING) {
|
|
|
|
LIFECYCLE_LOG("Running %p in stable state. Current state: %s\n",
|
|
|
|
this, LifecycleState_str[mLifecycleState]);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
runnables.SwapElements(mUpdateRunnables);
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t i = 0; i < mStreamUpdates.Length(); ++i) {
|
2012-04-30 03:11:26 +00:00
|
|
|
StreamUpdate* update = &mStreamUpdates[i];
|
|
|
|
if (update->mStream) {
|
|
|
|
ApplyStreamUpdate(update);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
mStreamUpdates.Clear();
|
|
|
|
|
|
|
|
if (mCurrentTaskMessageQueue.IsEmpty()) {
|
|
|
|
if (mLifecycleState == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP && IsEmpty()) {
|
|
|
|
// Complete shutdown. First, ensure that this graph is no longer used.
|
|
|
|
// A new graph graph will be created if one is needed.
|
|
|
|
// Asynchronously clean up old graph. We don't want to do this
|
|
|
|
// synchronously because it spins the event loop waiting for threads
|
|
|
|
// to shut down, and we don't want to do that in a stable state handler.
|
|
|
|
mLifecycleState = LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN;
|
2014-08-31 12:19:48 +00:00
|
|
|
LIFECYCLE_LOG("Sending MediaStreamGraphShutDownRunnable %p", this);
|
2014-08-26 15:01:33 +00:00
|
|
|
nsCOMPtr<nsIRunnable> event = new MediaStreamGraphShutDownRunnable(this );
|
2012-04-30 03:11:26 +00:00
|
|
|
NS_DispatchToMainThread(event);
|
2014-09-03 13:52:43 +00:00
|
|
|
|
|
|
|
LIFECYCLE_LOG("Disconnecting MediaStreamGraph %p", this);
|
2014-11-17 16:07:55 +00:00
|
|
|
MediaStreamGraphImpl* graph;
|
|
|
|
if (gGraphs.Get(mAudioChannel, &graph) && graph == this) {
|
2014-09-03 13:52:43 +00:00
|
|
|
// null out gGraph if that's the graph being shut down
|
2014-11-17 16:07:55 +00:00
|
|
|
gGraphs.Remove(mAudioChannel);
|
2014-09-03 13:52:43 +00:00
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if (mLifecycleState <= LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) {
|
2014-08-25 13:26:21 +00:00
|
|
|
MessageBlock* block = mBackMessageQueue.AppendElement();
|
2012-04-30 03:11:26 +00:00
|
|
|
block->mMessages.SwapElements(mCurrentTaskMessageQueue);
|
2013-10-24 23:12:00 +00:00
|
|
|
block->mGraphUpdateIndex = mNextGraphUpdateIndex;
|
|
|
|
++mNextGraphUpdateIndex;
|
2014-09-28 16:07:25 +00:00
|
|
|
EnsureNextIterationLocked();
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
|
2014-01-06 20:09:29 +00:00
|
|
|
// If the MediaStreamGraph has more messages going to it, try to revive
|
|
|
|
// it to process those messages. Don't do this if we're in a forced
|
|
|
|
// shutdown or it's a non-realtime graph that has already terminated
|
|
|
|
// processing.
|
|
|
|
if (mLifecycleState == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP &&
|
|
|
|
mRealtime && !mForceShutDown) {
|
2012-04-30 03:11:26 +00:00
|
|
|
mLifecycleState = LIFECYCLE_RUNNING;
|
|
|
|
// Revive the MediaStreamGraph since we have more messages going to it.
|
|
|
|
// Note that we need to put messages into its queue before reviving it,
|
|
|
|
// or it might exit immediately.
|
2014-08-26 15:01:33 +00:00
|
|
|
{
|
2014-08-31 12:19:48 +00:00
|
|
|
LIFECYCLE_LOG("Reviving a graph (%p) ! %s\n",
|
|
|
|
this, CurrentDriver()->AsAudioCallbackDriver() ? "AudioDriver" :
|
|
|
|
"SystemDriver");
|
2014-09-28 16:07:24 +00:00
|
|
|
nsRefPtr<GraphDriver> driver = CurrentDriver();
|
|
|
|
MonitorAutoUnlock unlock(mMonitor);
|
|
|
|
driver->Revive();
|
2014-08-26 15:01:33 +00:00
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-08-26 15:02:09 +00:00
|
|
|
// Don't start the thread for a non-realtime graph until it has been
|
|
|
|
// explicitly started by StartNonRealtimeProcessing.
|
|
|
|
if (mLifecycleState == LIFECYCLE_THREAD_NOT_STARTED &&
|
|
|
|
(mRealtime || mNonRealtimeProcessing)) {
|
|
|
|
mLifecycleState = LIFECYCLE_RUNNING;
|
|
|
|
// Start the thread now. We couldn't start it earlier because
|
|
|
|
// the graph might exit immediately on finding it has no streams. The
|
|
|
|
// first message for a new graph must create a stream.
|
|
|
|
{
|
|
|
|
// We should exit the monitor for now, because starting a stream might
|
|
|
|
// take locks, and we don't want to deadlock.
|
2014-08-31 12:19:48 +00:00
|
|
|
LIFECYCLE_LOG("Starting a graph (%p) ! %s\n",
|
|
|
|
this,
|
|
|
|
CurrentDriver()->AsAudioCallbackDriver() ? "AudioDriver" :
|
|
|
|
"SystemDriver");
|
2014-09-28 16:07:24 +00:00
|
|
|
nsRefPtr<GraphDriver> driver = CurrentDriver();
|
|
|
|
MonitorAutoUnlock unlock(mMonitor);
|
|
|
|
driver->Start();
|
2014-08-26 15:02:09 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-02-11 00:04:58 +00:00
|
|
|
if ((mForceShutDown || !mRealtime) &&
|
2014-01-30 05:50:17 +00:00
|
|
|
mLifecycleState == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) {
|
2014-01-06 20:09:29 +00:00
|
|
|
// Defer calls to RunDuringShutdown() to happen while mMonitor is not held.
|
2014-08-25 13:26:21 +00:00
|
|
|
for (uint32_t i = 0; i < mBackMessageQueue.Length(); ++i) {
|
|
|
|
MessageBlock& mb = mBackMessageQueue[i];
|
2014-01-06 20:09:29 +00:00
|
|
|
controlMessagesToRunDuringShutdown.MoveElementsFrom(mb.mMessages);
|
|
|
|
}
|
2014-08-25 13:26:21 +00:00
|
|
|
mBackMessageQueue.Clear();
|
2014-01-08 03:58:14 +00:00
|
|
|
MOZ_ASSERT(mCurrentTaskMessageQueue.IsEmpty());
|
2014-01-06 20:09:29 +00:00
|
|
|
// Stop MediaStreamGraph threads. Do not clear gGraph since
|
|
|
|
// we have outstanding DOM objects that may need it.
|
|
|
|
mLifecycleState = LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN;
|
2014-08-26 15:01:33 +00:00
|
|
|
nsCOMPtr<nsIRunnable> event = new MediaStreamGraphShutDownRunnable(this);
|
2014-01-06 20:09:29 +00:00
|
|
|
NS_DispatchToMainThread(event);
|
|
|
|
}
|
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
mDetectedNotRunning = mLifecycleState > LIFECYCLE_RUNNING;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Make sure we get a new current time in the next event loop task
|
2014-08-25 12:13:14 +00:00
|
|
|
if (!aSourceIsMSG) {
|
|
|
|
MOZ_ASSERT(mPostedRunInStableState);
|
|
|
|
mPostedRunInStableState = false;
|
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t i = 0; i < controlMessagesToRunDuringShutdown.Length(); ++i) {
|
2012-07-31 12:17:22 +00:00
|
|
|
controlMessagesToRunDuringShutdown[i]->RunDuringShutdown();
|
|
|
|
}
|
2014-07-02 06:04:54 +00:00
|
|
|
|
|
|
|
#ifdef DEBUG
|
|
|
|
mCanRunMessagesSynchronously = mDetectedNotRunning &&
|
|
|
|
mLifecycleState >= LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN;
|
|
|
|
#endif
|
2015-01-08 05:21:00 +00:00
|
|
|
|
|
|
|
for (uint32_t i = 0; i < runnables.Length(); ++i) {
|
|
|
|
runnables[i]->Run();
|
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
|
2014-09-28 16:07:24 +00:00
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
static NS_DEFINE_CID(kAppShellCID, NS_APPSHELL_CID);
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::EnsureRunInStableState()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "main thread only");
|
|
|
|
|
|
|
|
if (mPostedRunInStableState)
|
|
|
|
return;
|
|
|
|
mPostedRunInStableState = true;
|
2014-08-25 12:13:14 +00:00
|
|
|
nsCOMPtr<nsIRunnable> event = new MediaStreamGraphStableStateRunnable(this, false);
|
2012-04-30 03:11:26 +00:00
|
|
|
nsCOMPtr<nsIAppShell> appShell = do_GetService(kAppShellCID);
|
|
|
|
if (appShell) {
|
|
|
|
appShell->RunInStableState(event);
|
|
|
|
} else {
|
|
|
|
NS_ERROR("Appshell already destroyed?");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::EnsureStableStateEventPosted()
|
|
|
|
{
|
2014-08-26 15:01:33 +00:00
|
|
|
mMonitor.AssertCurrentThreadOwns();
|
2012-04-30 03:11:26 +00:00
|
|
|
|
|
|
|
if (mPostedRunInStableStateEvent)
|
|
|
|
return;
|
|
|
|
mPostedRunInStableStateEvent = true;
|
2014-08-25 12:13:14 +00:00
|
|
|
nsCOMPtr<nsIRunnable> event = new MediaStreamGraphStableStateRunnable(this, true);
|
2012-04-30 03:11:26 +00:00
|
|
|
NS_DispatchToMainThread(event);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::AppendMessage(ControlMessage* aMessage)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "main thread only");
|
2012-08-09 11:30:09 +00:00
|
|
|
NS_ASSERTION(!aMessage->GetStream() ||
|
|
|
|
!aMessage->GetStream()->IsDestroyed(),
|
|
|
|
"Stream already destroyed");
|
2012-04-30 03:11:26 +00:00
|
|
|
|
|
|
|
if (mDetectedNotRunning &&
|
|
|
|
mLifecycleState > LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) {
|
|
|
|
// The graph control loop is not running and main thread cleanup has
|
|
|
|
// happened. From now on we can't append messages to mCurrentTaskMessageQueue,
|
2012-07-31 12:17:21 +00:00
|
|
|
// because that will never be processed again, so just RunDuringShutdown
|
2012-04-30 03:11:26 +00:00
|
|
|
// this message.
|
2014-02-11 00:04:58 +00:00
|
|
|
// This should only happen during forced shutdown, or after a non-realtime
|
|
|
|
// graph has finished processing.
|
2014-07-02 06:04:54 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
MOZ_ASSERT(mCanRunMessagesSynchronously);
|
|
|
|
mCanRunMessagesSynchronously = false;
|
|
|
|
#endif
|
2012-07-31 12:17:21 +00:00
|
|
|
aMessage->RunDuringShutdown();
|
2014-07-02 06:04:54 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
mCanRunMessagesSynchronously = true;
|
|
|
|
#endif
|
2012-04-30 03:11:26 +00:00
|
|
|
delete aMessage;
|
2013-05-31 00:53:51 +00:00
|
|
|
if (IsEmpty() &&
|
|
|
|
mLifecycleState >= LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION) {
|
2014-11-17 16:07:55 +00:00
|
|
|
|
|
|
|
MediaStreamGraphImpl* graph;
|
|
|
|
if (gGraphs.Get(mAudioChannel, &graph) && graph == this) {
|
|
|
|
gGraphs.Remove(mAudioChannel);
|
2013-02-04 17:29:14 +00:00
|
|
|
}
|
2014-11-17 16:07:55 +00:00
|
|
|
|
2014-04-13 18:08:10 +00:00
|
|
|
Destroy();
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
mCurrentTaskMessageQueue.AppendElement(aMessage);
|
2014-01-06 20:09:29 +00:00
|
|
|
EnsureRunInStableState();
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
|
2013-08-06 22:14:35 +00:00
|
|
|
MediaStream::MediaStream(DOMMediaStream* aWrapper)
|
|
|
|
: mBufferStartTime(0)
|
|
|
|
, mExplicitBlockerCount(0)
|
|
|
|
, mBlocked(false)
|
|
|
|
, mGraphUpdateIndices(0)
|
|
|
|
, mFinished(false)
|
|
|
|
, mNotifiedFinished(false)
|
|
|
|
, mNotifiedBlocked(false)
|
|
|
|
, mHasCurrentData(false)
|
|
|
|
, mNotifiedHasCurrentData(false)
|
|
|
|
, mWrapper(aWrapper)
|
|
|
|
, mMainThreadCurrentTime(0)
|
|
|
|
, mMainThreadFinished(false)
|
|
|
|
, mMainThreadDestroyed(false)
|
|
|
|
, mGraph(nullptr)
|
2014-04-18 09:23:36 +00:00
|
|
|
, mAudioChannelType(dom::AudioChannel::Normal)
|
2013-08-06 22:14:35 +00:00
|
|
|
{
|
|
|
|
MOZ_COUNT_CTOR(MediaStream);
|
|
|
|
// aWrapper should not already be connected to a MediaStream! It needs
|
|
|
|
// to be hooked up to this stream, and since this stream is only just
|
|
|
|
// being created now, aWrapper must not be connected to anything.
|
|
|
|
NS_ASSERTION(!aWrapper || !aWrapper->GetStream(),
|
|
|
|
"Wrapper already has another media stream hooked up to it!");
|
|
|
|
}
|
|
|
|
|
2014-04-13 18:08:10 +00:00
|
|
|
size_t
|
|
|
|
MediaStream::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
|
|
|
|
{
|
|
|
|
size_t amount = 0;
|
|
|
|
|
|
|
|
// Not owned:
|
|
|
|
// - mGraph - Not reported here
|
|
|
|
// - mConsumers - elements
|
|
|
|
// Future:
|
|
|
|
// - mWrapper
|
|
|
|
// - mVideoOutputs - elements
|
|
|
|
// - mLastPlayedVideoFrame
|
|
|
|
// - mListeners - elements
|
2014-08-25 13:25:49 +00:00
|
|
|
// - mAudioOutputStream - elements
|
2014-04-13 18:08:10 +00:00
|
|
|
|
|
|
|
amount += mBuffer.SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
amount += mAudioOutputs.SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
amount += mVideoOutputs.SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
amount += mExplicitBlockerCount.SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
amount += mListeners.SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
amount += mMainThreadListeners.SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
amount += mDisabledTrackIDs.SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
amount += mBlocked.SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
amount += mGraphUpdateIndices.SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
amount += mConsumers.SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
|
|
|
|
return amount;
|
|
|
|
}
|
|
|
|
|
|
|
|
size_t
|
|
|
|
MediaStream::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
|
|
|
|
{
|
|
|
|
return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
}
|
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
void
|
|
|
|
MediaStream::Init()
|
|
|
|
{
|
|
|
|
MediaStreamGraphImpl* graph = GraphImpl();
|
2014-04-25 14:09:30 +00:00
|
|
|
mBlocked.SetAtAndAfter(graph->IterationEnd(), true);
|
|
|
|
mExplicitBlockerCount.SetAtAndAfter(graph->IterationEnd(), true);
|
|
|
|
mExplicitBlockerCount.SetAtAndAfter(graph->CurrentDriver()->StateComputedTime(), false);
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
MediaStreamGraphImpl*
|
|
|
|
MediaStream::GraphImpl()
|
|
|
|
{
|
2013-02-01 19:43:36 +00:00
|
|
|
return mGraph;
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
|
2012-07-31 12:17:21 +00:00
|
|
|
MediaStreamGraph*
|
|
|
|
MediaStream::Graph()
|
|
|
|
{
|
2013-02-01 19:43:36 +00:00
|
|
|
return mGraph;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStream::SetGraphImpl(MediaStreamGraphImpl* aGraph)
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(!mGraph, "Should only be called once");
|
|
|
|
mGraph = aGraph;
|
2014-06-12 04:44:59 +00:00
|
|
|
mBuffer.InitGraphRate(aGraph->GraphRate());
|
2012-07-31 12:17:21 +00:00
|
|
|
}
|
|
|
|
|
2013-06-10 19:01:19 +00:00
|
|
|
void
|
|
|
|
MediaStream::SetGraphImpl(MediaStreamGraph* aGraph)
|
|
|
|
{
|
|
|
|
MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(aGraph);
|
|
|
|
SetGraphImpl(graph);
|
|
|
|
}
|
|
|
|
|
2012-07-31 12:17:21 +00:00
|
|
|
StreamTime
|
|
|
|
MediaStream::GraphTimeToStreamTime(GraphTime aTime)
|
|
|
|
{
|
|
|
|
return GraphImpl()->GraphTimeToStreamTime(this, aTime);
|
|
|
|
}
|
|
|
|
|
2013-02-04 10:04:26 +00:00
|
|
|
StreamTime
|
|
|
|
MediaStream::GraphTimeToStreamTimeOptimistic(GraphTime aTime)
|
|
|
|
{
|
|
|
|
return GraphImpl()->GraphTimeToStreamTimeOptimistic(this, aTime);
|
|
|
|
}
|
|
|
|
|
|
|
|
GraphTime
|
|
|
|
MediaStream::StreamTimeToGraphTime(StreamTime aTime)
|
|
|
|
{
|
|
|
|
return GraphImpl()->StreamTimeToGraphTime(this, aTime, 0);
|
|
|
|
}
|
|
|
|
|
2012-07-31 12:17:21 +00:00
|
|
|
void
|
|
|
|
MediaStream::FinishOnGraphThread()
|
|
|
|
{
|
|
|
|
GraphImpl()->FinishStream(this);
|
|
|
|
}
|
|
|
|
|
2013-07-05 01:49:53 +00:00
|
|
|
int64_t
|
|
|
|
MediaStream::GetProcessingGraphUpdateIndex()
|
|
|
|
{
|
|
|
|
return GraphImpl()->GetProcessingGraphUpdateIndex();
|
|
|
|
}
|
|
|
|
|
2013-05-21 19:17:47 +00:00
|
|
|
StreamBuffer::Track*
|
2014-09-18 05:13:16 +00:00
|
|
|
MediaStream::EnsureTrack(TrackID aTrackId)
|
2013-05-21 19:17:47 +00:00
|
|
|
{
|
|
|
|
StreamBuffer::Track* track = mBuffer.FindTrack(aTrackId);
|
|
|
|
if (!track) {
|
|
|
|
nsAutoPtr<MediaSegment> segment(new AudioSegment());
|
|
|
|
for (uint32_t j = 0; j < mListeners.Length(); ++j) {
|
|
|
|
MediaStreamListener* l = mListeners[j];
|
2014-09-17 23:51:13 +00:00
|
|
|
l->NotifyQueuedTrackChanges(Graph(), aTrackId, 0,
|
2013-05-21 19:17:47 +00:00
|
|
|
MediaStreamListener::TRACK_EVENT_CREATED,
|
|
|
|
*segment);
|
|
|
|
}
|
2014-09-18 05:13:16 +00:00
|
|
|
track = &mBuffer.AddTrack(aTrackId, 0, segment.forget());
|
2013-05-21 19:17:47 +00:00
|
|
|
}
|
|
|
|
return track;
|
|
|
|
}
|
|
|
|
|
2013-01-07 02:31:30 +00:00
|
|
|
void
|
|
|
|
MediaStream::RemoveAllListenersImpl()
|
|
|
|
{
|
|
|
|
for (int32_t i = mListeners.Length() - 1; i >= 0; --i) {
|
|
|
|
nsRefPtr<MediaStreamListener> listener = mListeners[i].forget();
|
2014-07-14 05:47:56 +00:00
|
|
|
listener->NotifyEvent(GraphImpl(), MediaStreamListener::EVENT_REMOVED);
|
2013-01-07 02:31:30 +00:00
|
|
|
}
|
|
|
|
mListeners.Clear();
|
|
|
|
}
|
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
void
|
|
|
|
MediaStream::DestroyImpl()
|
|
|
|
{
|
2012-08-22 15:56:38 +00:00
|
|
|
for (int32_t i = mConsumers.Length() - 1; i >= 0; --i) {
|
2012-07-31 12:17:21 +00:00
|
|
|
mConsumers[i]->Disconnect();
|
|
|
|
}
|
2014-07-24 21:09:22 +00:00
|
|
|
mGraph = nullptr;
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStream::Destroy()
|
|
|
|
{
|
2012-10-26 01:39:05 +00:00
|
|
|
// Keep this stream alive until we leave this method
|
|
|
|
nsRefPtr<MediaStream> kungFuDeathGrip = this;
|
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
2014-09-01 03:50:23 +00:00
|
|
|
explicit Message(MediaStream* aStream) : ControlMessage(aStream) {}
|
2012-07-31 12:17:21 +00:00
|
|
|
virtual void Run()
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
2014-07-24 21:09:22 +00:00
|
|
|
mStream->RemoveAllListenersImpl();
|
|
|
|
auto graph = mStream->GraphImpl();
|
2012-04-30 03:11:26 +00:00
|
|
|
mStream->DestroyImpl();
|
2014-07-24 21:09:22 +00:00
|
|
|
graph->RemoveStream(mStream);
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
2012-07-31 12:17:21 +00:00
|
|
|
virtual void RunDuringShutdown()
|
|
|
|
{ Run(); }
|
2012-04-30 03:11:26 +00:00
|
|
|
};
|
2012-07-30 14:20:58 +00:00
|
|
|
mWrapper = nullptr;
|
2012-10-25 22:07:59 +00:00
|
|
|
GraphImpl()->AppendMessage(new Message(this));
|
2012-10-26 01:39:05 +00:00
|
|
|
// Message::RunDuringShutdown may have removed this stream from the graph,
|
|
|
|
// but our kungFuDeathGrip above will have kept this stream alive if
|
|
|
|
// necessary.
|
2012-10-25 23:08:38 +00:00
|
|
|
mMainThreadDestroyed = true;
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStream::AddAudioOutput(void* aKey)
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(MediaStream* aStream, void* aKey) : ControlMessage(aStream), mKey(aKey) {}
|
2012-07-31 12:17:21 +00:00
|
|
|
virtual void Run()
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
|
|
|
mStream->AddAudioOutputImpl(mKey);
|
|
|
|
}
|
|
|
|
void* mKey;
|
|
|
|
};
|
|
|
|
GraphImpl()->AppendMessage(new Message(this, aKey));
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStream::SetAudioOutputVolumeImpl(void* aKey, float aVolume)
|
|
|
|
{
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t i = 0; i < mAudioOutputs.Length(); ++i) {
|
2012-04-30 03:11:26 +00:00
|
|
|
if (mAudioOutputs[i].mKey == aKey) {
|
|
|
|
mAudioOutputs[i].mVolume = aVolume;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
NS_ERROR("Audio output key not found");
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStream::SetAudioOutputVolume(void* aKey, float aVolume)
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(MediaStream* aStream, void* aKey, float aVolume) :
|
|
|
|
ControlMessage(aStream), mKey(aKey), mVolume(aVolume) {}
|
2012-07-31 12:17:21 +00:00
|
|
|
virtual void Run()
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
|
|
|
mStream->SetAudioOutputVolumeImpl(mKey, mVolume);
|
|
|
|
}
|
|
|
|
void* mKey;
|
|
|
|
float mVolume;
|
|
|
|
};
|
|
|
|
GraphImpl()->AppendMessage(new Message(this, aKey, aVolume));
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStream::RemoveAudioOutputImpl(void* aKey)
|
|
|
|
{
|
2012-08-22 15:56:38 +00:00
|
|
|
for (uint32_t i = 0; i < mAudioOutputs.Length(); ++i) {
|
2012-04-30 03:11:26 +00:00
|
|
|
if (mAudioOutputs[i].mKey == aKey) {
|
|
|
|
mAudioOutputs.RemoveElementAt(i);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
NS_ERROR("Audio output key not found");
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStream::RemoveAudioOutput(void* aKey)
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(MediaStream* aStream, void* aKey) :
|
|
|
|
ControlMessage(aStream), mKey(aKey) {}
|
2012-07-31 12:17:21 +00:00
|
|
|
virtual void Run()
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
|
|
|
mStream->RemoveAudioOutputImpl(mKey);
|
|
|
|
}
|
|
|
|
void* mKey;
|
|
|
|
};
|
|
|
|
GraphImpl()->AppendMessage(new Message(this, aKey));
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStream::AddVideoOutput(VideoFrameContainer* aContainer)
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(MediaStream* aStream, VideoFrameContainer* aContainer) :
|
|
|
|
ControlMessage(aStream), mContainer(aContainer) {}
|
2012-07-31 12:17:21 +00:00
|
|
|
virtual void Run()
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
|
|
|
mStream->AddVideoOutputImpl(mContainer.forget());
|
|
|
|
}
|
|
|
|
nsRefPtr<VideoFrameContainer> mContainer;
|
|
|
|
};
|
|
|
|
GraphImpl()->AppendMessage(new Message(this, aContainer));
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStream::RemoveVideoOutput(VideoFrameContainer* aContainer)
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(MediaStream* aStream, VideoFrameContainer* aContainer) :
|
|
|
|
ControlMessage(aStream), mContainer(aContainer) {}
|
2012-07-31 12:17:21 +00:00
|
|
|
virtual void Run()
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
|
|
|
mStream->RemoveVideoOutputImpl(mContainer);
|
|
|
|
}
|
|
|
|
nsRefPtr<VideoFrameContainer> mContainer;
|
|
|
|
};
|
|
|
|
GraphImpl()->AppendMessage(new Message(this, aContainer));
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2012-08-22 15:56:38 +00:00
|
|
|
MediaStream::ChangeExplicitBlockerCount(int32_t aDelta)
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
2012-08-22 15:56:38 +00:00
|
|
|
Message(MediaStream* aStream, int32_t aDelta) :
|
2012-04-30 03:11:26 +00:00
|
|
|
ControlMessage(aStream), mDelta(aDelta) {}
|
2012-07-31 12:17:21 +00:00
|
|
|
virtual void Run()
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
|
|
|
mStream->ChangeExplicitBlockerCountImpl(
|
2014-04-25 14:09:30 +00:00
|
|
|
mStream->GraphImpl()->CurrentDriver()->StateComputedTime(), mDelta);
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
2012-08-22 15:56:38 +00:00
|
|
|
int32_t mDelta;
|
2012-04-30 03:11:26 +00:00
|
|
|
};
|
2014-01-13 16:38:30 +00:00
|
|
|
|
|
|
|
// This can happen if this method has been called asynchronously, and the
|
|
|
|
// stream has been destroyed since then.
|
|
|
|
if (mMainThreadDestroyed) {
|
|
|
|
return;
|
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
GraphImpl()->AppendMessage(new Message(this, aDelta));
|
|
|
|
}
|
|
|
|
|
2012-05-24 10:37:14 +00:00
|
|
|
void
|
|
|
|
MediaStream::AddListenerImpl(already_AddRefed<MediaStreamListener> aListener)
|
|
|
|
{
|
|
|
|
MediaStreamListener* listener = *mListeners.AppendElement() = aListener;
|
|
|
|
listener->NotifyBlockingChanged(GraphImpl(),
|
2013-01-02 13:49:18 +00:00
|
|
|
mNotifiedBlocked ? MediaStreamListener::BLOCKED : MediaStreamListener::UNBLOCKED);
|
2012-05-24 10:37:14 +00:00
|
|
|
if (mNotifiedFinished) {
|
2014-07-14 05:47:56 +00:00
|
|
|
listener->NotifyEvent(GraphImpl(), MediaStreamListener::EVENT_FINISHED);
|
2012-05-24 10:37:14 +00:00
|
|
|
}
|
2013-03-20 11:19:39 +00:00
|
|
|
if (mNotifiedHasCurrentData) {
|
|
|
|
listener->NotifyHasCurrentData(GraphImpl());
|
|
|
|
}
|
2012-05-24 10:37:14 +00:00
|
|
|
}
|
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
void
|
|
|
|
MediaStream::AddListener(MediaStreamListener* aListener)
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(MediaStream* aStream, MediaStreamListener* aListener) :
|
|
|
|
ControlMessage(aStream), mListener(aListener) {}
|
2012-07-31 12:17:21 +00:00
|
|
|
virtual void Run()
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
|
|
|
mStream->AddListenerImpl(mListener.forget());
|
|
|
|
}
|
|
|
|
nsRefPtr<MediaStreamListener> mListener;
|
|
|
|
};
|
|
|
|
GraphImpl()->AppendMessage(new Message(this, aListener));
|
|
|
|
}
|
|
|
|
|
2013-01-07 02:31:30 +00:00
|
|
|
void
|
|
|
|
MediaStream::RemoveListenerImpl(MediaStreamListener* aListener)
|
2014-06-22 18:21:00 +00:00
|
|
|
{
|
2013-01-07 02:31:30 +00:00
|
|
|
// wouldn't need this if we could do it in the opposite order
|
|
|
|
nsRefPtr<MediaStreamListener> listener(aListener);
|
|
|
|
mListeners.RemoveElement(aListener);
|
2014-07-14 05:47:56 +00:00
|
|
|
listener->NotifyEvent(GraphImpl(), MediaStreamListener::EVENT_REMOVED);
|
2013-01-07 02:31:30 +00:00
|
|
|
}
|
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
void
|
|
|
|
MediaStream::RemoveListener(MediaStreamListener* aListener)
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(MediaStream* aStream, MediaStreamListener* aListener) :
|
|
|
|
ControlMessage(aStream), mListener(aListener) {}
|
2012-07-31 12:17:21 +00:00
|
|
|
virtual void Run()
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
|
|
|
mStream->RemoveListenerImpl(mListener);
|
|
|
|
}
|
|
|
|
nsRefPtr<MediaStreamListener> mListener;
|
|
|
|
};
|
2013-02-28 19:53:38 +00:00
|
|
|
// If the stream is destroyed the Listeners have or will be
|
|
|
|
// removed.
|
|
|
|
if (!IsDestroyed()) {
|
|
|
|
GraphImpl()->AppendMessage(new Message(this, aListener));
|
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
2013-10-24 23:07:29 +00:00
|
|
|
|
|
|
|
void
|
|
|
|
MediaStream::RunAfterPendingUpdates(nsRefPtr<nsIRunnable> aRunnable)
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
|
|
|
MediaStreamGraphImpl* graph = GraphImpl();
|
|
|
|
|
|
|
|
// Special case when a non-realtime graph has not started, to ensure the
|
|
|
|
// runnable will run in finite time.
|
|
|
|
if (!(graph->mRealtime || graph->mNonRealtimeProcessing)) {
|
|
|
|
aRunnable->Run();
|
|
|
|
}
|
|
|
|
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
explicit Message(MediaStream* aStream,
|
|
|
|
already_AddRefed<nsIRunnable> aRunnable)
|
|
|
|
: ControlMessage(aStream)
|
|
|
|
, mRunnable(aRunnable) {}
|
|
|
|
virtual void Run() MOZ_OVERRIDE
|
|
|
|
{
|
|
|
|
mStream->Graph()->
|
|
|
|
DispatchToMainThreadAfterStreamStateUpdate(mRunnable.forget());
|
|
|
|
}
|
|
|
|
virtual void RunDuringShutdown() MOZ_OVERRIDE
|
|
|
|
{
|
2014-07-02 06:04:54 +00:00
|
|
|
// Don't run mRunnable now as it may call AppendMessage() which would
|
|
|
|
// assume that there are no remaining controlMessagesToRunDuringShutdown.
|
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
|
|
|
NS_DispatchToCurrentThread(mRunnable);
|
2013-10-24 23:07:29 +00:00
|
|
|
}
|
|
|
|
private:
|
|
|
|
nsRefPtr<nsIRunnable> mRunnable;
|
|
|
|
};
|
|
|
|
|
|
|
|
graph->AppendMessage(new Message(this, aRunnable.forget()));
|
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
|
2013-05-30 04:44:43 +00:00
|
|
|
void
|
|
|
|
MediaStream::SetTrackEnabledImpl(TrackID aTrackID, bool aEnabled)
|
|
|
|
{
|
|
|
|
if (aEnabled) {
|
|
|
|
mDisabledTrackIDs.RemoveElement(aTrackID);
|
|
|
|
} else {
|
|
|
|
if (!mDisabledTrackIDs.Contains(aTrackID)) {
|
|
|
|
mDisabledTrackIDs.AppendElement(aTrackID);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStream::SetTrackEnabled(TrackID aTrackID, bool aEnabled)
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(MediaStream* aStream, TrackID aTrackID, bool aEnabled) :
|
|
|
|
ControlMessage(aStream), mTrackID(aTrackID), mEnabled(aEnabled) {}
|
|
|
|
virtual void Run()
|
|
|
|
{
|
|
|
|
mStream->SetTrackEnabledImpl(mTrackID, mEnabled);
|
|
|
|
}
|
|
|
|
TrackID mTrackID;
|
|
|
|
bool mEnabled;
|
|
|
|
};
|
|
|
|
GraphImpl()->AppendMessage(new Message(this, aTrackID, aEnabled));
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2013-08-26 06:07:17 +00:00
|
|
|
MediaStream::ApplyTrackDisabling(TrackID aTrackID, MediaSegment* aSegment, MediaSegment* aRawSegment)
|
2013-05-30 04:44:43 +00:00
|
|
|
{
|
|
|
|
if (!mDisabledTrackIDs.Contains(aTrackID)) {
|
|
|
|
return;
|
|
|
|
}
|
2013-08-26 06:07:17 +00:00
|
|
|
aSegment->ReplaceWithDisabled();
|
|
|
|
if (aRawSegment) {
|
|
|
|
aRawSegment->ReplaceWithDisabled();
|
2013-05-30 04:44:43 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-04-30 03:11:40 +00:00
|
|
|
void
|
2012-05-23 06:01:15 +00:00
|
|
|
SourceMediaStream::DestroyImpl()
|
2012-04-30 03:11:40 +00:00
|
|
|
{
|
2014-07-24 21:09:22 +00:00
|
|
|
// Hold mMutex while mGraph is reset so that other threads holding mMutex
|
|
|
|
// can null-check know that the graph will not destroyed.
|
|
|
|
MutexAutoLock lock(mMutex);
|
2012-05-23 06:01:15 +00:00
|
|
|
MediaStream::DestroyImpl();
|
|
|
|
}
|
|
|
|
|
2012-07-20 19:36:03 +00:00
|
|
|
void
|
|
|
|
SourceMediaStream::SetPullEnabled(bool aEnabled)
|
|
|
|
{
|
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
mPullEnabled = aEnabled;
|
2014-07-24 21:09:22 +00:00
|
|
|
if (mPullEnabled && GraphImpl()) {
|
2014-09-28 16:07:24 +00:00
|
|
|
GraphImpl()->EnsureNextIteration();
|
2012-07-20 19:36:03 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-05-23 06:01:15 +00:00
|
|
|
void
|
2014-09-18 05:20:43 +00:00
|
|
|
SourceMediaStream::AddTrackInternal(TrackID aID, TrackRate aRate, StreamTime aStart,
|
2014-09-17 23:50:02 +00:00
|
|
|
MediaSegment* aSegment)
|
2012-05-23 06:01:15 +00:00
|
|
|
{
|
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
TrackData* data = mUpdateTracks.AppendElement();
|
|
|
|
data->mID = aID;
|
2014-03-24 10:06:05 +00:00
|
|
|
data->mInputRate = aRate;
|
2012-05-23 06:01:15 +00:00
|
|
|
data->mStart = aStart;
|
2014-12-30 01:54:01 +00:00
|
|
|
data->mEndOfFlushedData = aStart;
|
2012-05-23 06:01:15 +00:00
|
|
|
data->mCommands = TRACK_CREATE;
|
|
|
|
data->mData = aSegment;
|
|
|
|
data->mHaveEnough = false;
|
2014-09-17 23:50:02 +00:00
|
|
|
if (GraphImpl()) {
|
|
|
|
GraphImpl()->EnsureNextIteration();
|
2012-05-23 06:01:15 +00:00
|
|
|
}
|
2012-04-30 03:11:40 +00:00
|
|
|
}
|
|
|
|
|
2014-03-24 10:06:05 +00:00
|
|
|
void
|
|
|
|
SourceMediaStream::ResampleAudioToGraphSampleRate(TrackData* aTrackData, MediaSegment* aSegment)
|
|
|
|
{
|
|
|
|
if (aSegment->GetType() != MediaSegment::AUDIO ||
|
2014-09-17 23:50:01 +00:00
|
|
|
aTrackData->mInputRate == GraphImpl()->GraphRate()) {
|
2014-03-24 10:06:05 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
AudioSegment* segment = static_cast<AudioSegment*>(aSegment);
|
2014-06-09 00:11:41 +00:00
|
|
|
int channels = segment->ChannelCount();
|
2014-04-25 13:15:21 +00:00
|
|
|
|
2014-06-09 00:11:41 +00:00
|
|
|
// If this segment is just silence, we delay instanciating the resampler.
|
|
|
|
if (channels) {
|
|
|
|
if (aTrackData->mResampler) {
|
|
|
|
MOZ_ASSERT(aTrackData->mResamplerChannelCount == segment->ChannelCount());
|
|
|
|
} else {
|
2014-04-25 13:15:21 +00:00
|
|
|
SpeexResamplerState* state = speex_resampler_init(channels,
|
|
|
|
aTrackData->mInputRate,
|
2014-09-17 23:50:01 +00:00
|
|
|
GraphImpl()->GraphRate(),
|
2014-04-25 13:15:21 +00:00
|
|
|
SPEEX_RESAMPLER_QUALITY_DEFAULT,
|
|
|
|
nullptr);
|
|
|
|
if (!state) {
|
|
|
|
return;
|
|
|
|
}
|
2014-03-24 10:06:05 +00:00
|
|
|
aTrackData->mResampler.own(state);
|
2014-06-09 00:11:41 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
aTrackData->mResamplerChannelCount = channels;
|
|
|
|
#endif
|
2014-03-24 10:06:05 +00:00
|
|
|
}
|
|
|
|
}
|
2014-09-17 23:50:01 +00:00
|
|
|
segment->ResampleChunks(aTrackData->mResampler, aTrackData->mInputRate, GraphImpl()->GraphRate());
|
2014-03-24 10:06:05 +00:00
|
|
|
}
|
|
|
|
|
2013-02-25 09:25:07 +00:00
|
|
|
bool
|
2013-08-24 13:53:11 +00:00
|
|
|
SourceMediaStream::AppendToTrack(TrackID aID, MediaSegment* aSegment, MediaSegment *aRawSegment)
|
2012-04-30 03:11:40 +00:00
|
|
|
{
|
2012-05-23 06:01:15 +00:00
|
|
|
MutexAutoLock lock(mMutex);
|
2012-10-24 23:21:32 +00:00
|
|
|
// ::EndAllTrackAndFinished() can end these before the sources notice
|
2013-02-25 09:25:07 +00:00
|
|
|
bool appended = false;
|
2014-07-24 21:23:59 +00:00
|
|
|
auto graph = GraphImpl();
|
|
|
|
if (!mFinished && graph) {
|
2012-10-24 23:21:32 +00:00
|
|
|
TrackData *track = FindDataForTrack(aID);
|
|
|
|
if (track) {
|
2013-08-24 13:53:01 +00:00
|
|
|
// Data goes into mData, and on the next iteration of the MSG moves
|
|
|
|
// into the track's segment after NotifyQueuedTrackChanges(). This adds
|
|
|
|
// 0-10ms of delay before data gets to direct listeners.
|
|
|
|
// Indirect listeners (via subsequent TrackUnion nodes) are synced to
|
|
|
|
// playout time, and so can be delayed by buffering.
|
|
|
|
|
2013-08-26 06:07:17 +00:00
|
|
|
// Apply track disabling before notifying any consumers directly
|
|
|
|
// or inserting into the graph
|
|
|
|
ApplyTrackDisabling(aID, aSegment, aRawSegment);
|
|
|
|
|
2014-03-24 10:06:05 +00:00
|
|
|
ResampleAudioToGraphSampleRate(track, aSegment);
|
|
|
|
|
2013-08-24 13:53:11 +00:00
|
|
|
// Must notify first, since AppendFrom() will empty out aSegment
|
|
|
|
NotifyDirectConsumers(track, aRawSegment ? aRawSegment : aSegment);
|
|
|
|
track->mData->AppendFrom(aSegment); // note: aSegment is now dead
|
2013-02-25 09:25:07 +00:00
|
|
|
appended = true;
|
2014-09-28 16:07:24 +00:00
|
|
|
GraphImpl()->EnsureNextIteration();
|
2012-10-24 23:21:32 +00:00
|
|
|
} else {
|
2013-02-25 09:25:07 +00:00
|
|
|
aSegment->Clear();
|
2013-02-27 12:49:26 +00:00
|
|
|
}
|
2012-05-23 06:01:15 +00:00
|
|
|
}
|
2013-02-25 09:25:07 +00:00
|
|
|
return appended;
|
2012-04-30 03:11:40 +00:00
|
|
|
}
|
|
|
|
|
2013-08-24 13:53:11 +00:00
|
|
|
void
|
|
|
|
SourceMediaStream::NotifyDirectConsumers(TrackData *aTrack,
|
|
|
|
MediaSegment *aSegment)
|
|
|
|
{
|
|
|
|
// Call with mMutex locked
|
|
|
|
MOZ_ASSERT(aTrack);
|
|
|
|
|
|
|
|
for (uint32_t j = 0; j < mDirectListeners.Length(); ++j) {
|
|
|
|
MediaStreamDirectListener* l = mDirectListeners[j];
|
2014-09-18 05:20:43 +00:00
|
|
|
StreamTime offset = 0; // FIX! need a separate StreamTime.... or the end of the internal buffer
|
2014-09-17 23:50:02 +00:00
|
|
|
l->NotifyRealtimeData(static_cast<MediaStreamGraph*>(GraphImpl()), aTrack->mID,
|
2013-08-24 13:53:11 +00:00
|
|
|
offset, aTrack->mCommands, *aSegment);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-08-17 06:09:21 +00:00
|
|
|
// These handle notifying all the listeners of an event
|
|
|
|
void
|
|
|
|
SourceMediaStream::NotifyListenersEventImpl(MediaStreamListener::MediaStreamGraphEvent aEvent)
|
|
|
|
{
|
|
|
|
for (uint32_t j = 0; j < mListeners.Length(); ++j) {
|
|
|
|
MediaStreamListener* l = mListeners[j];
|
|
|
|
l->NotifyEvent(GraphImpl(), aEvent);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
SourceMediaStream::NotifyListenersEvent(MediaStreamListener::MediaStreamGraphEvent aNewEvent)
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(SourceMediaStream* aStream, MediaStreamListener::MediaStreamGraphEvent aEvent) :
|
|
|
|
ControlMessage(aStream), mEvent(aEvent) {}
|
|
|
|
virtual void Run()
|
|
|
|
{
|
|
|
|
mStream->AsSourceStream()->NotifyListenersEventImpl(mEvent);
|
|
|
|
}
|
|
|
|
MediaStreamListener::MediaStreamGraphEvent mEvent;
|
|
|
|
};
|
|
|
|
GraphImpl()->AppendMessage(new Message(this, aNewEvent));
|
|
|
|
}
|
|
|
|
|
2013-08-24 13:53:11 +00:00
|
|
|
void
|
|
|
|
SourceMediaStream::AddDirectListener(MediaStreamDirectListener* aListener)
|
|
|
|
{
|
2014-07-14 05:47:56 +00:00
|
|
|
bool wasEmpty;
|
|
|
|
{
|
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
wasEmpty = mDirectListeners.IsEmpty();
|
|
|
|
mDirectListeners.AppendElement(aListener);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (wasEmpty) {
|
2014-08-17 06:09:21 +00:00
|
|
|
// Async
|
|
|
|
NotifyListenersEvent(MediaStreamListener::EVENT_HAS_DIRECT_LISTENERS);
|
2014-07-14 05:47:56 +00:00
|
|
|
}
|
2013-08-24 13:53:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
SourceMediaStream::RemoveDirectListener(MediaStreamDirectListener* aListener)
|
|
|
|
{
|
2014-07-14 05:47:56 +00:00
|
|
|
bool isEmpty;
|
|
|
|
{
|
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
mDirectListeners.RemoveElement(aListener);
|
|
|
|
isEmpty = mDirectListeners.IsEmpty();
|
|
|
|
}
|
|
|
|
|
|
|
|
if (isEmpty) {
|
2014-08-17 06:09:21 +00:00
|
|
|
// Async
|
|
|
|
NotifyListenersEvent(MediaStreamListener::EVENT_HAS_NO_DIRECT_LISTENERS);
|
2014-07-14 05:47:56 +00:00
|
|
|
}
|
2013-08-24 13:53:11 +00:00
|
|
|
}
|
|
|
|
|
2012-04-30 03:11:40 +00:00
|
|
|
bool
|
|
|
|
SourceMediaStream::HaveEnoughBuffered(TrackID aID)
|
|
|
|
{
|
|
|
|
MutexAutoLock lock(mMutex);
|
2012-05-09 05:53:49 +00:00
|
|
|
TrackData *track = FindDataForTrack(aID);
|
|
|
|
if (track) {
|
|
|
|
return track->mHaveEnough;
|
|
|
|
}
|
2013-02-25 09:25:07 +00:00
|
|
|
return false;
|
2012-04-30 03:11:40 +00:00
|
|
|
}
|
|
|
|
|
2014-12-30 01:54:01 +00:00
|
|
|
StreamTime
|
|
|
|
SourceMediaStream::GetEndOfAppendedData(TrackID aID)
|
|
|
|
{
|
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
TrackData *track = FindDataForTrack(aID);
|
|
|
|
if (track) {
|
|
|
|
return track->mEndOfFlushedData + track->mData->GetDuration();
|
|
|
|
}
|
|
|
|
NS_ERROR("Track not found");
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2012-04-30 03:11:40 +00:00
|
|
|
void
|
|
|
|
SourceMediaStream::DispatchWhenNotEnoughBuffered(TrackID aID,
|
2014-02-17 22:53:53 +00:00
|
|
|
nsIEventTarget* aSignalThread, nsIRunnable* aSignalRunnable)
|
2012-04-30 03:11:40 +00:00
|
|
|
{
|
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
TrackData* data = FindDataForTrack(aID);
|
2012-05-09 05:53:49 +00:00
|
|
|
if (!data) {
|
2013-02-25 09:25:07 +00:00
|
|
|
aSignalThread->Dispatch(aSignalRunnable, 0);
|
2012-05-09 05:53:49 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2012-04-30 03:11:40 +00:00
|
|
|
if (data->mHaveEnough) {
|
2014-02-14 18:38:58 +00:00
|
|
|
if (data->mDispatchWhenNotEnough.IsEmpty()) {
|
|
|
|
data->mDispatchWhenNotEnough.AppendElement()->Init(aSignalThread, aSignalRunnable);
|
|
|
|
}
|
2012-04-30 03:11:40 +00:00
|
|
|
} else {
|
|
|
|
aSignalThread->Dispatch(aSignalRunnable, 0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
SourceMediaStream::EndTrack(TrackID aID)
|
|
|
|
{
|
2012-05-23 06:01:15 +00:00
|
|
|
MutexAutoLock lock(mMutex);
|
2012-10-24 23:21:32 +00:00
|
|
|
// ::EndAllTrackAndFinished() can end these before the sources call this
|
|
|
|
if (!mFinished) {
|
|
|
|
TrackData *track = FindDataForTrack(aID);
|
|
|
|
if (track) {
|
|
|
|
track->mCommands |= TRACK_END;
|
|
|
|
}
|
2012-05-23 06:01:15 +00:00
|
|
|
}
|
2014-07-24 21:09:22 +00:00
|
|
|
if (auto graph = GraphImpl()) {
|
2014-09-28 16:07:24 +00:00
|
|
|
graph->EnsureNextIteration();
|
2012-04-30 03:11:40 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
SourceMediaStream::AdvanceKnownTracksTime(StreamTime aKnownTime)
|
|
|
|
{
|
2012-05-23 06:01:15 +00:00
|
|
|
MutexAutoLock lock(mMutex);
|
2014-05-18 20:26:54 +00:00
|
|
|
MOZ_ASSERT(aKnownTime >= mUpdateKnownTracksTime);
|
2012-05-23 06:01:15 +00:00
|
|
|
mUpdateKnownTracksTime = aKnownTime;
|
2014-07-24 21:09:22 +00:00
|
|
|
if (auto graph = GraphImpl()) {
|
2014-09-28 16:07:24 +00:00
|
|
|
graph->EnsureNextIteration();
|
2012-04-30 03:11:40 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2012-10-24 23:21:32 +00:00
|
|
|
SourceMediaStream::FinishWithLockHeld()
|
2012-04-30 03:11:40 +00:00
|
|
|
{
|
2013-04-08 12:03:33 +00:00
|
|
|
mMutex.AssertCurrentThreadOwns();
|
2012-05-23 06:01:15 +00:00
|
|
|
mUpdateFinished = true;
|
2014-07-24 21:09:22 +00:00
|
|
|
if (auto graph = GraphImpl()) {
|
2014-09-28 16:07:24 +00:00
|
|
|
graph->EnsureNextIteration();
|
2012-04-30 03:11:40 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-10-24 23:21:32 +00:00
|
|
|
void
|
|
|
|
SourceMediaStream::EndAllTrackAndFinish()
|
|
|
|
{
|
2013-04-08 12:03:33 +00:00
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
for (uint32_t i = 0; i < mUpdateTracks.Length(); ++i) {
|
|
|
|
SourceMediaStream::TrackData* data = &mUpdateTracks[i];
|
|
|
|
data->mCommands |= TRACK_END;
|
2012-10-24 23:21:32 +00:00
|
|
|
}
|
|
|
|
FinishWithLockHeld();
|
2014-07-14 05:47:56 +00:00
|
|
|
// we will call NotifyEvent() to let GetUserMedia know
|
2012-10-24 23:21:32 +00:00
|
|
|
}
|
|
|
|
|
2014-09-18 05:20:43 +00:00
|
|
|
StreamTime
|
2013-08-24 13:53:01 +00:00
|
|
|
SourceMediaStream::GetBufferedTicks(TrackID aID)
|
|
|
|
{
|
|
|
|
StreamBuffer::Track* track = mBuffer.FindTrack(aID);
|
|
|
|
if (track) {
|
|
|
|
MediaSegment* segment = track->GetSegment();
|
|
|
|
if (segment) {
|
|
|
|
return segment->GetDuration() -
|
2014-09-18 05:13:13 +00:00
|
|
|
GraphTimeToStreamTime(GraphImpl()->CurrentDriver()->StateComputedTime());
|
2013-08-24 13:53:01 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2014-03-24 10:06:06 +00:00
|
|
|
void
|
|
|
|
SourceMediaStream::RegisterForAudioMixing()
|
|
|
|
{
|
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
mNeedsMixing = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool
|
|
|
|
SourceMediaStream::NeedsMixing()
|
|
|
|
{
|
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
return mNeedsMixing;
|
|
|
|
}
|
|
|
|
|
2012-07-31 12:17:21 +00:00
|
|
|
void
|
|
|
|
MediaInputPort::Init()
|
|
|
|
{
|
2013-11-21 03:02:42 +00:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG, ("Adding MediaInputPort %p (from %p to %p) to the graph",
|
|
|
|
this, mSource, mDest));
|
2012-07-31 12:17:21 +00:00
|
|
|
mSource->AddConsumer(this);
|
|
|
|
mDest->AddInput(this);
|
2012-08-23 12:46:20 +00:00
|
|
|
// mPortCount decremented via MediaInputPort::Destroy's message
|
2012-07-31 12:17:21 +00:00
|
|
|
++mDest->GraphImpl()->mPortCount;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaInputPort::Disconnect()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(!mSource == !mDest,
|
|
|
|
"mSource must either both be null or both non-null");
|
|
|
|
if (!mSource)
|
|
|
|
return;
|
|
|
|
|
|
|
|
mSource->RemoveConsumer(this);
|
|
|
|
mSource = nullptr;
|
|
|
|
mDest->RemoveInput(this);
|
|
|
|
mDest = nullptr;
|
2014-01-29 13:34:35 +00:00
|
|
|
|
|
|
|
GraphImpl()->SetStreamOrderDirty();
|
2012-07-31 12:17:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
MediaInputPort::InputInterval
|
|
|
|
MediaInputPort::GetNextInputInterval(GraphTime aTime)
|
|
|
|
{
|
|
|
|
InputInterval result = { GRAPH_TIME_MAX, GRAPH_TIME_MAX, false };
|
|
|
|
GraphTime t = aTime;
|
|
|
|
GraphTime end;
|
|
|
|
for (;;) {
|
2014-08-26 15:01:33 +00:00
|
|
|
if (!mDest->mBlocked.GetAt(t, &end)) {
|
2012-07-31 12:17:21 +00:00
|
|
|
break;
|
2014-08-26 15:01:33 +00:00
|
|
|
}
|
|
|
|
if (end >= GRAPH_TIME_MAX) {
|
2012-07-31 12:17:21 +00:00
|
|
|
return result;
|
2014-08-26 15:01:33 +00:00
|
|
|
}
|
2012-07-31 12:17:21 +00:00
|
|
|
t = end;
|
|
|
|
}
|
|
|
|
result.mStart = t;
|
|
|
|
GraphTime sourceEnd;
|
|
|
|
result.mInputIsBlocked = mSource->mBlocked.GetAt(t, &sourceEnd);
|
2013-01-15 12:22:03 +00:00
|
|
|
result.mEnd = std::min(end, sourceEnd);
|
2012-07-31 12:17:21 +00:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaInputPort::Destroy()
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
2014-09-01 03:50:23 +00:00
|
|
|
explicit Message(MediaInputPort* aPort)
|
2012-08-23 12:46:20 +00:00
|
|
|
: ControlMessage(nullptr), mPort(aPort) {}
|
2012-07-31 12:17:21 +00:00
|
|
|
virtual void Run()
|
|
|
|
{
|
|
|
|
mPort->Disconnect();
|
2012-08-23 12:46:20 +00:00
|
|
|
--mPort->GraphImpl()->mPortCount;
|
2014-07-24 21:18:38 +00:00
|
|
|
mPort->SetGraphImpl(nullptr);
|
2012-07-31 12:17:21 +00:00
|
|
|
NS_RELEASE(mPort);
|
|
|
|
}
|
|
|
|
virtual void RunDuringShutdown()
|
|
|
|
{
|
|
|
|
Run();
|
|
|
|
}
|
|
|
|
MediaInputPort* mPort;
|
|
|
|
};
|
2012-08-23 12:46:20 +00:00
|
|
|
GraphImpl()->AppendMessage(new Message(this));
|
|
|
|
}
|
|
|
|
|
|
|
|
MediaStreamGraphImpl*
|
|
|
|
MediaInputPort::GraphImpl()
|
|
|
|
{
|
2013-02-01 19:49:58 +00:00
|
|
|
return mGraph;
|
2012-08-23 12:46:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
MediaStreamGraph*
|
|
|
|
MediaInputPort::Graph()
|
|
|
|
{
|
2013-02-01 19:49:58 +00:00
|
|
|
return mGraph;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaInputPort::SetGraphImpl(MediaStreamGraphImpl* aGraph)
|
|
|
|
{
|
2014-07-24 21:18:38 +00:00
|
|
|
MOZ_ASSERT(!mGraph || !aGraph, "Should only be set once");
|
2013-02-01 19:49:58 +00:00
|
|
|
mGraph = aGraph;
|
2012-07-31 12:17:21 +00:00
|
|
|
}
|
|
|
|
|
2012-11-22 22:25:05 +00:00
|
|
|
already_AddRefed<MediaInputPort>
|
2013-05-05 15:47:36 +00:00
|
|
|
ProcessedMediaStream::AllocateInputPort(MediaStream* aStream, uint32_t aFlags,
|
|
|
|
uint16_t aInputNumber, uint16_t aOutputNumber)
|
2012-07-31 12:17:21 +00:00
|
|
|
{
|
2012-11-22 22:25:05 +00:00
|
|
|
// This method creates two references to the MediaInputPort: one for
|
|
|
|
// the main thread, and one for the MediaStreamGraph.
|
2012-07-31 12:17:21 +00:00
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
2014-09-01 03:50:23 +00:00
|
|
|
explicit Message(MediaInputPort* aPort)
|
2012-07-31 12:17:21 +00:00
|
|
|
: ControlMessage(aPort->GetDestination()),
|
|
|
|
mPort(aPort) {}
|
|
|
|
virtual void Run()
|
|
|
|
{
|
|
|
|
mPort->Init();
|
2012-11-22 22:25:05 +00:00
|
|
|
// The graph holds its reference implicitly
|
2014-01-29 13:34:35 +00:00
|
|
|
mPort->GraphImpl()->SetStreamOrderDirty();
|
2014-03-15 19:00:16 +00:00
|
|
|
unused << mPort.forget();
|
2012-07-31 12:17:21 +00:00
|
|
|
}
|
2013-05-25 14:01:08 +00:00
|
|
|
virtual void RunDuringShutdown()
|
|
|
|
{
|
|
|
|
Run();
|
|
|
|
}
|
2012-11-22 22:25:05 +00:00
|
|
|
nsRefPtr<MediaInputPort> mPort;
|
2012-07-31 12:17:21 +00:00
|
|
|
};
|
2013-05-05 15:47:36 +00:00
|
|
|
nsRefPtr<MediaInputPort> port = new MediaInputPort(aStream, this, aFlags,
|
|
|
|
aInputNumber, aOutputNumber);
|
2013-02-01 19:49:58 +00:00
|
|
|
port->SetGraphImpl(GraphImpl());
|
2012-07-31 12:17:21 +00:00
|
|
|
GraphImpl()->AppendMessage(new Message(port));
|
2012-11-22 22:25:05 +00:00
|
|
|
return port.forget();
|
2012-07-31 12:17:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
ProcessedMediaStream::Finish()
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
2014-09-01 03:50:23 +00:00
|
|
|
explicit Message(ProcessedMediaStream* aStream)
|
2012-07-31 12:17:21 +00:00
|
|
|
: ControlMessage(aStream) {}
|
|
|
|
virtual void Run()
|
|
|
|
{
|
|
|
|
mStream->GraphImpl()->FinishStream(mStream);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
GraphImpl()->AppendMessage(new Message(this));
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
ProcessedMediaStream::SetAutofinish(bool aAutofinish)
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(ProcessedMediaStream* aStream, bool aAutofinish)
|
|
|
|
: ControlMessage(aStream), mAutofinish(aAutofinish) {}
|
|
|
|
virtual void Run()
|
|
|
|
{
|
2013-02-04 10:04:26 +00:00
|
|
|
static_cast<ProcessedMediaStream*>(mStream)->SetAutofinishImpl(mAutofinish);
|
2012-07-31 12:17:21 +00:00
|
|
|
}
|
|
|
|
bool mAutofinish;
|
|
|
|
};
|
|
|
|
GraphImpl()->AppendMessage(new Message(this, aAutofinish));
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
ProcessedMediaStream::DestroyImpl()
|
|
|
|
{
|
2012-08-22 15:56:38 +00:00
|
|
|
for (int32_t i = mInputs.Length() - 1; i >= 0; --i) {
|
2012-07-31 12:17:21 +00:00
|
|
|
mInputs[i]->Disconnect();
|
|
|
|
}
|
|
|
|
MediaStream::DestroyImpl();
|
2014-07-24 20:36:07 +00:00
|
|
|
// The stream order is only important if there are connections, in which
|
|
|
|
// case MediaInputPort::Disconnect() called SetStreamOrderDirty().
|
|
|
|
// MediaStreamGraphImpl::RemoveStream() will also call
|
|
|
|
// SetStreamOrderDirty(), for other reasons.
|
2012-07-31 12:17:21 +00:00
|
|
|
}
|
2012-04-30 03:11:26 +00:00
|
|
|
|
2014-08-25 13:27:25 +00:00
|
|
|
MediaStreamGraphImpl::MediaStreamGraphImpl(bool aRealtime,
|
|
|
|
TrackRate aSampleRate,
|
2014-08-26 15:02:08 +00:00
|
|
|
DOMMediaStream::TrackTypeHints aHint= DOMMediaStream::HINT_CONTENTS_UNKNOWN,
|
|
|
|
dom::AudioChannel aChannel)
|
2014-09-17 23:50:01 +00:00
|
|
|
: MediaStreamGraph(aSampleRate)
|
|
|
|
, mProcessingGraphUpdateIndex(0)
|
2012-07-31 12:17:21 +00:00
|
|
|
, mPortCount(0)
|
2014-09-28 16:07:25 +00:00
|
|
|
, mNeedAnotherIteration(false)
|
|
|
|
, mGraphDriverAsleep(false)
|
2014-08-26 15:01:33 +00:00
|
|
|
, mMonitor("MediaStreamGraphImpl")
|
2012-04-30 03:11:26 +00:00
|
|
|
, mLifecycleState(LIFECYCLE_THREAD_NOT_STARTED)
|
2013-12-09 05:08:02 +00:00
|
|
|
, mEndTime(GRAPH_TIME_MAX)
|
2012-04-30 03:11:26 +00:00
|
|
|
, mForceShutDown(false)
|
|
|
|
, mPostedRunInStableStateEvent(false)
|
2014-10-29 14:47:28 +00:00
|
|
|
, mFlushSourcesNow(false)
|
|
|
|
, mFlushSourcesOnNextIteration(false)
|
2012-04-30 03:11:26 +00:00
|
|
|
, mDetectedNotRunning(false)
|
|
|
|
, mPostedRunInStableState(false)
|
2013-05-08 11:44:07 +00:00
|
|
|
, mRealtime(aRealtime)
|
2013-05-16 23:30:41 +00:00
|
|
|
, mNonRealtimeProcessing(false)
|
2013-09-13 16:12:07 +00:00
|
|
|
, mStreamOrderDirty(false)
|
2013-09-25 02:10:24 +00:00
|
|
|
, mLatencyLog(AsyncLatencyLogger::Get())
|
2014-09-28 21:27:00 +00:00
|
|
|
#ifdef MOZ_WEBRTC
|
2014-08-26 15:02:31 +00:00
|
|
|
, mFarendObserverRef(nullptr)
|
2014-09-09 16:23:01 +00:00
|
|
|
#endif
|
2014-04-13 18:08:10 +00:00
|
|
|
, mMemoryReportMonitor("MSGIMemory")
|
2015-01-07 05:39:46 +00:00
|
|
|
, mSelfRef(this)
|
2014-04-13 18:08:10 +00:00
|
|
|
, mAudioStreamSizes()
|
|
|
|
, mNeedsMemoryReport(false)
|
2014-07-02 06:04:54 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
, mCanRunMessagesSynchronously(false)
|
|
|
|
#endif
|
2014-11-17 16:07:55 +00:00
|
|
|
, mAudioChannel(static_cast<uint32_t>(aChannel))
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
|
|
|
#ifdef PR_LOGGING
|
|
|
|
if (!gMediaStreamGraphLog) {
|
|
|
|
gMediaStreamGraphLog = PR_NewLogModule("MediaStreamGraph");
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2014-04-25 14:09:30 +00:00
|
|
|
if (mRealtime) {
|
2014-08-26 15:01:33 +00:00
|
|
|
if (aHint & DOMMediaStream::HINT_CONTENTS_AUDIO) {
|
2014-08-26 15:02:08 +00:00
|
|
|
AudioCallbackDriver* driver = new AudioCallbackDriver(this, aChannel);
|
2014-08-26 15:02:28 +00:00
|
|
|
mDriver = driver;
|
2014-08-26 15:01:33 +00:00
|
|
|
mMixer.AddCallback(driver);
|
|
|
|
} else {
|
2014-08-26 15:02:28 +00:00
|
|
|
mDriver = new SystemClockDriver(this);
|
2014-08-26 15:01:33 +00:00
|
|
|
}
|
2014-04-25 14:09:30 +00:00
|
|
|
} else {
|
2014-08-26 15:02:28 +00:00
|
|
|
mDriver = new OfflineClockDriver(this, MEDIA_GRAPH_TARGET_PERIOD_MS);
|
2014-04-25 14:09:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
mLastMainThreadUpdate = TimeStamp::Now();
|
2014-04-13 18:08:10 +00:00
|
|
|
|
|
|
|
RegisterWeakMemoryReporter(this);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::Destroy()
|
|
|
|
{
|
|
|
|
// First unregister from memory reporting.
|
|
|
|
UnregisterWeakMemoryReporter(this);
|
|
|
|
|
|
|
|
// Clear the self reference which will destroy this instance.
|
|
|
|
mSelfRef = nullptr;
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
|
2014-04-27 07:06:00 +00:00
|
|
|
NS_IMPL_ISUPPORTS(MediaStreamGraphShutdownObserver, nsIObserver)
|
2012-04-30 03:11:26 +00:00
|
|
|
|
|
|
|
static bool gShutdownObserverRegistered = false;
|
|
|
|
|
2014-11-17 16:07:55 +00:00
|
|
|
namespace {
|
|
|
|
|
|
|
|
PLDHashOperator
|
|
|
|
ForceShutdownEnumerator(const uint32_t& /* aAudioChannel */,
|
|
|
|
MediaStreamGraphImpl* aGraph,
|
|
|
|
void* /* aUnused */)
|
|
|
|
{
|
|
|
|
aGraph->ForceShutDown();
|
|
|
|
return PL_DHASH_NEXT;
|
|
|
|
}
|
|
|
|
|
|
|
|
} // anonymous namespace
|
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
NS_IMETHODIMP
|
|
|
|
MediaStreamGraphShutdownObserver::Observe(nsISupports *aSubject,
|
|
|
|
const char *aTopic,
|
2014-01-04 15:02:17 +00:00
|
|
|
const char16_t *aData)
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
|
|
|
if (strcmp(aTopic, NS_XPCOM_SHUTDOWN_OBSERVER_ID) == 0) {
|
2014-11-17 16:07:55 +00:00
|
|
|
gGraphs.EnumerateRead(ForceShutdownEnumerator, nullptr);
|
2012-04-30 03:11:26 +00:00
|
|
|
nsContentUtils::UnregisterShutdownObserver(this);
|
|
|
|
gShutdownObserverRegistered = false;
|
|
|
|
}
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
MediaStreamGraph*
|
2014-08-26 15:02:08 +00:00
|
|
|
MediaStreamGraph::GetInstance(DOMMediaStream::TrackTypeHints aHint, dom::AudioChannel aChannel)
|
2012-04-30 03:11:26 +00:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Main thread only");
|
|
|
|
|
2014-11-17 16:07:55 +00:00
|
|
|
uint32_t channel = static_cast<uint32_t>(aChannel);
|
|
|
|
MediaStreamGraphImpl* graph = nullptr;
|
|
|
|
|
|
|
|
if (!gGraphs.Get(channel, &graph)) {
|
2012-04-30 03:11:26 +00:00
|
|
|
if (!gShutdownObserverRegistered) {
|
|
|
|
gShutdownObserverRegistered = true;
|
|
|
|
nsContentUtils::RegisterShutdownObserver(new MediaStreamGraphShutdownObserver());
|
|
|
|
}
|
|
|
|
|
2014-08-25 13:26:09 +00:00
|
|
|
CubebUtils::InitPreferredSampleRate();
|
2014-04-13 18:08:10 +00:00
|
|
|
|
2014-11-17 16:07:55 +00:00
|
|
|
graph = new MediaStreamGraphImpl(true, CubebUtils::PreferredSampleRate(), aHint, aChannel);
|
|
|
|
gGraphs.Put(channel, graph);
|
2014-03-24 10:06:05 +00:00
|
|
|
|
2014-11-17 16:07:55 +00:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG, ("Starting up MediaStreamGraph %p", graph));
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
|
2014-11-17 16:07:55 +00:00
|
|
|
return graph;
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|
|
|
|
|
2013-05-08 11:44:07 +00:00
|
|
|
MediaStreamGraph*
|
2014-04-23 09:20:56 +00:00
|
|
|
MediaStreamGraph::CreateNonRealtimeInstance(TrackRate aSampleRate)
|
2013-05-08 11:44:07 +00:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Main thread only");
|
|
|
|
|
2014-04-23 09:20:56 +00:00
|
|
|
MediaStreamGraphImpl* graph = new MediaStreamGraphImpl(false, aSampleRate);
|
2014-04-13 18:08:10 +00:00
|
|
|
|
2014-08-26 15:01:33 +00:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG, ("Starting up Offline MediaStreamGraph %p", graph));
|
|
|
|
|
2013-05-08 11:44:07 +00:00
|
|
|
return graph;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraph::DestroyNonRealtimeInstance(MediaStreamGraph* aGraph)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Main thread only");
|
2013-09-10 05:05:22 +00:00
|
|
|
MOZ_ASSERT(aGraph->IsNonRealtime(), "Should not destroy the global graph here");
|
2013-05-08 11:44:07 +00:00
|
|
|
|
|
|
|
MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(aGraph);
|
2013-09-10 05:05:22 +00:00
|
|
|
if (graph->mForceShutDown)
|
|
|
|
return; // already done
|
|
|
|
|
2013-05-25 13:59:59 +00:00
|
|
|
if (!graph->mNonRealtimeProcessing) {
|
|
|
|
// Start the graph, but don't produce anything
|
2014-11-19 10:21:38 +00:00
|
|
|
graph->StartNonRealtimeProcessing(0);
|
2013-05-25 13:59:59 +00:00
|
|
|
}
|
2013-05-08 11:44:07 +00:00
|
|
|
graph->ForceShutDown();
|
|
|
|
}
|
|
|
|
|
2014-04-27 07:06:00 +00:00
|
|
|
NS_IMPL_ISUPPORTS(MediaStreamGraphImpl, nsIMemoryReporter)
|
2014-04-13 18:08:10 +00:00
|
|
|
|
|
|
|
struct ArrayClearer
|
|
|
|
{
|
2014-09-01 03:50:23 +00:00
|
|
|
explicit ArrayClearer(nsTArray<AudioNodeSizes>& aArray) : mArray(aArray) {}
|
2014-04-13 18:08:10 +00:00
|
|
|
~ArrayClearer() { mArray.Clear(); }
|
|
|
|
nsTArray<AudioNodeSizes>& mArray;
|
|
|
|
};
|
|
|
|
|
|
|
|
NS_IMETHODIMP
|
|
|
|
MediaStreamGraphImpl::CollectReports(nsIHandleReportCallback* aHandleReport,
|
2014-05-21 06:06:54 +00:00
|
|
|
nsISupports* aData, bool aAnonymize)
|
2014-04-13 18:08:10 +00:00
|
|
|
{
|
|
|
|
// Clears out the report array after we're done with it.
|
|
|
|
ArrayClearer reportCleanup(mAudioStreamSizes);
|
|
|
|
|
|
|
|
{
|
|
|
|
MonitorAutoLock memoryReportLock(mMemoryReportMonitor);
|
|
|
|
mNeedsMemoryReport = true;
|
|
|
|
|
2014-09-29 18:46:29 +00:00
|
|
|
{
|
2014-10-20 16:06:10 +00:00
|
|
|
// Wake up the MSG thread if it's real time (Offline graphs can't be
|
|
|
|
// sleeping).
|
2014-09-29 18:46:29 +00:00
|
|
|
MonitorAutoLock monitorLock(mMonitor);
|
2014-10-20 16:06:10 +00:00
|
|
|
if (!CurrentDriver()->AsOfflineClockDriver()) {
|
|
|
|
CurrentDriver()->WakeUp();
|
|
|
|
}
|
2014-09-29 18:46:29 +00:00
|
|
|
}
|
2014-04-13 18:08:10 +00:00
|
|
|
|
2014-07-10 00:22:37 +00:00
|
|
|
if (mLifecycleState >= LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN) {
|
|
|
|
// Shutting down, nothing to report.
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Wait for up to one second for the report to complete.
|
2014-04-13 18:08:10 +00:00
|
|
|
nsresult rv;
|
2014-07-10 00:22:37 +00:00
|
|
|
const PRIntervalTime kMaxWait = PR_SecondsToInterval(1);
|
|
|
|
while ((rv = memoryReportLock.Wait(kMaxWait)) != NS_OK) {
|
2014-04-13 18:08:10 +00:00
|
|
|
if (PR_GetError() != PR_PENDING_INTERRUPT_ERROR) {
|
|
|
|
return rv;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#define REPORT(_path, _amount, _desc) \
|
|
|
|
do { \
|
|
|
|
nsresult rv; \
|
|
|
|
rv = aHandleReport->Callback(EmptyCString(), _path, \
|
|
|
|
KIND_HEAP, UNITS_BYTES, _amount, \
|
|
|
|
NS_LITERAL_CSTRING(_desc), aData); \
|
|
|
|
NS_ENSURE_SUCCESS(rv, rv); \
|
|
|
|
} while (0)
|
|
|
|
|
|
|
|
for (size_t i = 0; i < mAudioStreamSizes.Length(); i++) {
|
|
|
|
const AudioNodeSizes& usage = mAudioStreamSizes[i];
|
2014-06-20 17:29:10 +00:00
|
|
|
const char* const nodeType = usage.mNodeType.IsEmpty() ?
|
|
|
|
"<unknown>" : usage.mNodeType.get();
|
2014-04-13 18:08:10 +00:00
|
|
|
|
|
|
|
nsPrintfCString domNodePath("explicit/webaudio/audio-node/%s/dom-nodes",
|
2014-05-21 06:06:54 +00:00
|
|
|
nodeType);
|
2014-04-13 18:08:10 +00:00
|
|
|
REPORT(domNodePath, usage.mDomNode,
|
|
|
|
"Memory used by AudioNode DOM objects (Web Audio).");
|
|
|
|
|
|
|
|
nsPrintfCString enginePath("explicit/webaudio/audio-node/%s/engine-objects",
|
2014-05-21 06:06:54 +00:00
|
|
|
nodeType);
|
2014-04-13 18:08:10 +00:00
|
|
|
REPORT(enginePath, usage.mEngine,
|
|
|
|
"Memory used by AudioNode engine objects (Web Audio).");
|
|
|
|
|
|
|
|
nsPrintfCString streamPath("explicit/webaudio/audio-node/%s/stream-objects",
|
2014-05-21 06:06:54 +00:00
|
|
|
nodeType);
|
2014-04-13 18:08:10 +00:00
|
|
|
REPORT(streamPath, usage.mStream,
|
|
|
|
"Memory used by AudioNode stream objects (Web Audio).");
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2014-10-10 02:30:28 +00:00
|
|
|
size_t hrtfLoaders = WebCore::HRTFDatabaseLoader::sizeOfLoaders(MallocSizeOf);
|
|
|
|
if (hrtfLoaders) {
|
|
|
|
|
|
|
|
REPORT(NS_LITERAL_CSTRING(
|
|
|
|
"explicit/webaudio/audio-node/PannerNode/hrtf-databases"),
|
|
|
|
hrtfLoaders,
|
|
|
|
"Memory used by PannerNode databases (Web Audio).");
|
|
|
|
}
|
|
|
|
|
2014-04-13 18:08:10 +00:00
|
|
|
#undef REPORT
|
|
|
|
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2012-04-30 03:11:40 +00:00
|
|
|
SourceMediaStream*
|
2013-02-15 08:01:58 +00:00
|
|
|
MediaStreamGraph::CreateSourceStream(DOMMediaStream* aWrapper)
|
2012-04-30 03:11:40 +00:00
|
|
|
{
|
|
|
|
SourceMediaStream* stream = new SourceMediaStream(aWrapper);
|
|
|
|
NS_ADDREF(stream);
|
2013-02-01 19:43:36 +00:00
|
|
|
MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
|
|
|
|
stream->SetGraphImpl(graph);
|
|
|
|
graph->AppendMessage(new CreateMessage(stream));
|
2012-04-30 03:11:40 +00:00
|
|
|
return stream;
|
|
|
|
}
|
|
|
|
|
2012-07-31 12:17:21 +00:00
|
|
|
ProcessedMediaStream*
|
2013-02-15 08:01:58 +00:00
|
|
|
MediaStreamGraph::CreateTrackUnionStream(DOMMediaStream* aWrapper)
|
2012-07-31 12:17:21 +00:00
|
|
|
{
|
|
|
|
TrackUnionStream* stream = new TrackUnionStream(aWrapper);
|
|
|
|
NS_ADDREF(stream);
|
2013-02-01 19:43:36 +00:00
|
|
|
MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
|
|
|
|
stream->SetGraphImpl(graph);
|
|
|
|
graph->AppendMessage(new CreateMessage(stream));
|
2012-07-31 12:17:21 +00:00
|
|
|
return stream;
|
|
|
|
}
|
|
|
|
|
2013-07-24 11:29:39 +00:00
|
|
|
AudioNodeExternalInputStream*
|
|
|
|
MediaStreamGraph::CreateAudioNodeExternalInputStream(AudioNodeEngine* aEngine, TrackRate aSampleRate)
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
|
|
|
if (!aSampleRate) {
|
|
|
|
aSampleRate = aEngine->NodeMainThread()->Context()->SampleRate();
|
|
|
|
}
|
|
|
|
AudioNodeExternalInputStream* stream = new AudioNodeExternalInputStream(aEngine, aSampleRate);
|
|
|
|
NS_ADDREF(stream);
|
|
|
|
MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
|
|
|
|
stream->SetGraphImpl(graph);
|
|
|
|
graph->AppendMessage(new CreateMessage(stream));
|
|
|
|
return stream;
|
|
|
|
}
|
|
|
|
|
2013-01-13 22:46:57 +00:00
|
|
|
AudioNodeStream*
|
2013-03-18 00:37:47 +00:00
|
|
|
MediaStreamGraph::CreateAudioNodeStream(AudioNodeEngine* aEngine,
|
2013-05-24 17:09:29 +00:00
|
|
|
AudioNodeStreamKind aKind,
|
|
|
|
TrackRate aSampleRate)
|
2013-01-13 22:46:57 +00:00
|
|
|
{
|
2013-04-27 23:25:23 +00:00
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
2013-05-24 17:09:29 +00:00
|
|
|
if (!aSampleRate) {
|
|
|
|
aSampleRate = aEngine->NodeMainThread()->Context()->SampleRate();
|
|
|
|
}
|
|
|
|
AudioNodeStream* stream = new AudioNodeStream(aEngine, aKind, aSampleRate);
|
2013-01-13 22:46:57 +00:00
|
|
|
NS_ADDREF(stream);
|
2013-02-01 19:43:36 +00:00
|
|
|
MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
|
|
|
|
stream->SetGraphImpl(graph);
|
2013-05-02 01:02:31 +00:00
|
|
|
if (aEngine->HasNode()) {
|
|
|
|
stream->SetChannelMixingParametersImpl(aEngine->NodeMainThread()->ChannelCount(),
|
|
|
|
aEngine->NodeMainThread()->ChannelCountModeValue(),
|
|
|
|
aEngine->NodeMainThread()->ChannelInterpretationValue());
|
|
|
|
}
|
2013-02-01 19:43:36 +00:00
|
|
|
graph->AppendMessage(new CreateMessage(stream));
|
2013-01-13 22:46:57 +00:00
|
|
|
return stream;
|
|
|
|
}
|
|
|
|
|
2013-09-10 05:05:22 +00:00
|
|
|
bool
|
|
|
|
MediaStreamGraph::IsNonRealtime() const
|
|
|
|
{
|
2014-11-17 16:07:55 +00:00
|
|
|
const MediaStreamGraphImpl* impl = static_cast<const MediaStreamGraphImpl*>(this);
|
|
|
|
MediaStreamGraphImpl* graph;
|
|
|
|
|
|
|
|
return !gGraphs.Get(impl->AudioChannel(), &graph) || graph != impl;
|
2013-09-10 05:05:22 +00:00
|
|
|
}
|
|
|
|
|
2013-05-16 23:30:41 +00:00
|
|
|
void
|
2014-11-19 10:21:38 +00:00
|
|
|
MediaStreamGraph::StartNonRealtimeProcessing(uint32_t aTicksToProcess)
|
2013-05-16 23:30:41 +00:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "main thread only");
|
|
|
|
|
|
|
|
MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
|
|
|
|
NS_ASSERTION(!graph->mRealtime, "non-realtime only");
|
|
|
|
|
|
|
|
if (graph->mNonRealtimeProcessing)
|
|
|
|
return;
|
2014-04-25 14:09:30 +00:00
|
|
|
|
2014-09-18 05:13:13 +00:00
|
|
|
graph->mEndTime = graph->IterationEnd() + aTicksToProcess;
|
2013-05-16 23:30:41 +00:00
|
|
|
graph->mNonRealtimeProcessing = true;
|
|
|
|
graph->EnsureRunInStableState();
|
|
|
|
}
|
|
|
|
|
2013-09-13 16:12:07 +00:00
|
|
|
void
|
|
|
|
ProcessedMediaStream::AddInput(MediaInputPort* aPort)
|
|
|
|
{
|
|
|
|
mInputs.AppendElement(aPort);
|
|
|
|
GraphImpl()->SetStreamOrderDirty();
|
|
|
|
}
|
|
|
|
|
2012-04-30 03:11:26 +00:00
|
|
|
}
|