mirror of
https://github.com/mozilla/gecko-dev.git
synced 2025-02-18 06:45:33 +00:00
Bug 1259788 - Support MediaStream sources for HTMLMediaElement.mozCaptureStream(). r=jesup
This adds support for HTMLMediaElement.mozCaptureStream() and mozCaptureStreamUntilEnded() for a HTMLMediaElement playing a MediaStream. This is up to spec, while capturing a HTMLMediaElement playing a file is not. This incompatibility means we cannot mix sources for the returned MediaStream. As such, a MediaStream returned while the HTMLMediaElement was playing a file will only have content while the element is playing files. If the src changes to a MediaStream, the stream will be empty. It works the same way if a MediaStream was captured while the HTMLMediaElement was playing another MediaStream. This is due to TrackID management - MediaDecoder doesn't care, and creates new tracks when you seek, so users are unable to keep track, while for MediaStream we control everything from main thread and keep track of the TrackIDs used previously. This also adds a separate path from MediaElementAudioSourceNode so that we don't forward video tracks when the returned MediaStream is only used internally for WebAudio. We should in that case not require a DOMMediaStream but just forwarding tracks to a TrackUnionStream should be enough, and will save us some cpu cycles. This is however fine for now as it's simpler. MozReview-Commit-ID: Bg8hESDISDU --HG-- extra : rebase_source : 83885a73ec8cfc5fbe3c30a9330a52cd6b6dff12 extra : source : f1aec79078869c0a6435a1c06957c649d7a40dd9
This commit is contained in:
parent
83dcc7266e
commit
091e8c0b00
@ -938,7 +938,6 @@ void HTMLMediaElement::AbortExistingLoads()
|
||||
// We need to remove StreamSizeListener before VideoTracks get emptied.
|
||||
if (mMediaStreamSizeListener) {
|
||||
mSelectedVideoStreamTrack->RemoveDirectListener(mMediaStreamSizeListener);
|
||||
mSelectedVideoStreamTrack = nullptr;
|
||||
mMediaStreamSizeListener->Forget();
|
||||
mMediaStreamSizeListener = nullptr;
|
||||
}
|
||||
@ -1248,8 +1247,9 @@ void HTMLMediaElement::NotifyMediaTrackEnabled(MediaTrack* aTrack)
|
||||
nsString id;
|
||||
aTrack->GetId(id);
|
||||
|
||||
LOG(LogLevel::Debug, ("MediaElement %p MediaTrack with id %s enabled",
|
||||
this, NS_ConvertUTF16toUTF8(id).get()));
|
||||
LOG(LogLevel::Debug, ("MediaElement %p %sTrack with id %s enabled",
|
||||
this, aTrack->AsAudioTrack() ? "Audio" : "Video",
|
||||
NS_ConvertUTF16toUTF8(id).get()));
|
||||
#endif
|
||||
|
||||
MOZ_ASSERT((aTrack->AsAudioTrack() && aTrack->AsAudioTrack()->Enabled()) ||
|
||||
@ -1258,7 +1258,46 @@ void HTMLMediaElement::NotifyMediaTrackEnabled(MediaTrack* aTrack)
|
||||
if (aTrack->AsAudioTrack()) {
|
||||
SetMutedInternal(mMuted & ~MUTED_BY_AUDIO_TRACK);
|
||||
} else if (aTrack->AsVideoTrack()) {
|
||||
if (!IsVideo()) {
|
||||
MOZ_ASSERT(false);
|
||||
return;
|
||||
}
|
||||
mDisableVideo = false;
|
||||
} else {
|
||||
MOZ_ASSERT(false, "Unknown track type");
|
||||
}
|
||||
|
||||
if (mSrcStream) {
|
||||
if (aTrack->AsVideoTrack()) {
|
||||
MOZ_ASSERT(!mSelectedVideoStreamTrack);
|
||||
MOZ_ASSERT(!mMediaStreamSizeListener);
|
||||
|
||||
mSelectedVideoStreamTrack = aTrack->AsVideoTrack()->GetVideoStreamTrack();
|
||||
VideoFrameContainer* container = GetVideoFrameContainer();
|
||||
if (mSrcStreamIsPlaying && container) {
|
||||
mSelectedVideoStreamTrack->AddVideoOutput(container);
|
||||
}
|
||||
HTMLVideoElement* self = static_cast<HTMLVideoElement*>(this);
|
||||
if (self->VideoWidth() <= 1 && self->VideoHeight() <= 1) {
|
||||
// MediaInfo uses dummy values of 1 for width and height to
|
||||
// mark video as valid. We need a new stream size listener
|
||||
// if size is 0x0 or 1x1.
|
||||
mMediaStreamSizeListener = new StreamSizeListener(this);
|
||||
mSelectedVideoStreamTrack->AddDirectListener(mMediaStreamSizeListener);
|
||||
}
|
||||
}
|
||||
|
||||
if (mReadyState == HAVE_NOTHING) {
|
||||
// No MediaStreamTracks are captured until we have metadata.
|
||||
return;
|
||||
}
|
||||
for (OutputMediaStream& ms : mOutputStreams) {
|
||||
if (aTrack->AsVideoTrack() && ms.mCapturingAudioOnly) {
|
||||
// If the output stream is for audio only we ignore video tracks.
|
||||
continue;
|
||||
}
|
||||
AddCaptureMediaTrackToOutputStream(aTrack, ms);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1272,14 +1311,14 @@ void HTMLMediaElement::NotifyMediaTrackDisabled(MediaTrack* aTrack)
|
||||
nsString id;
|
||||
aTrack->GetId(id);
|
||||
|
||||
LOG(LogLevel::Debug, ("MediaElement %p MediaTrack with id %s disabled",
|
||||
this, NS_ConvertUTF16toUTF8(id).get()));
|
||||
LOG(LogLevel::Debug, ("MediaElement %p %sTrack with id %s disabled",
|
||||
this, aTrack->AsAudioTrack() ? "Audio" : "Video",
|
||||
NS_ConvertUTF16toUTF8(id).get()));
|
||||
#endif
|
||||
|
||||
MOZ_ASSERT((!aTrack->AsAudioTrack() || !aTrack->AsAudioTrack()->Enabled()) &&
|
||||
(!aTrack->AsVideoTrack() || !aTrack->AsVideoTrack()->Selected()));
|
||||
|
||||
|
||||
if (aTrack->AsAudioTrack()) {
|
||||
bool shouldMute = true;
|
||||
for (uint32_t i = 0; i < AudioTracks()->Length(); ++i) {
|
||||
@ -1292,10 +1331,54 @@ void HTMLMediaElement::NotifyMediaTrackDisabled(MediaTrack* aTrack)
|
||||
SetMutedInternal(mMuted | MUTED_BY_AUDIO_TRACK);
|
||||
}
|
||||
} else if (aTrack->AsVideoTrack()) {
|
||||
if (VideoTracks()->SelectedIndex() == -1) {
|
||||
mDisableVideo = false;
|
||||
if (mSrcStream) {
|
||||
MOZ_ASSERT(mSelectedVideoStreamTrack);
|
||||
if (mSelectedVideoStreamTrack && mMediaStreamSizeListener) {
|
||||
mSelectedVideoStreamTrack->RemoveDirectListener(mMediaStreamSizeListener);
|
||||
mMediaStreamSizeListener->Forget();
|
||||
mMediaStreamSizeListener = nullptr;
|
||||
}
|
||||
VideoFrameContainer* container = GetVideoFrameContainer();
|
||||
if (mSrcStreamIsPlaying && container) {
|
||||
mSelectedVideoStreamTrack->RemoveVideoOutput(container);
|
||||
}
|
||||
mSelectedVideoStreamTrack = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
for (OutputMediaStream& ms : mOutputStreams) {
|
||||
if (ms.mCapturingDecoder) {
|
||||
MOZ_ASSERT(!ms.mCapturingMediaStream);
|
||||
continue;
|
||||
}
|
||||
MOZ_ASSERT(ms.mCapturingMediaStream);
|
||||
for (int32_t i = ms.mTrackPorts.Length() - 1; i >= 0; --i) {
|
||||
if (ms.mTrackPorts[i].first() == aTrack->GetId()) {
|
||||
// The source of this track just ended. Force-notify that it ended.
|
||||
// If we bounce it to the MediaStreamGraph it might not be picked up,
|
||||
// for instance if the MediaInputPort was destroyed in the same
|
||||
// iteration as it was added.
|
||||
MediaStreamTrack* outputTrack = ms.mStream->FindOwnedDOMTrack(
|
||||
ms.mTrackPorts[i].second()->GetDestination(),
|
||||
ms.mTrackPorts[i].second()->GetDestinationTrackId());
|
||||
MOZ_ASSERT(outputTrack);
|
||||
if (outputTrack) {
|
||||
NS_DispatchToMainThread(
|
||||
NewRunnableMethod(outputTrack, &MediaStreamTrack::NotifyEnded));
|
||||
}
|
||||
|
||||
ms.mTrackPorts[i].second()->Destroy();
|
||||
ms.mTrackPorts.RemoveElementAt(i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
#ifdef DEBUG
|
||||
for (auto pair : ms.mTrackPorts) {
|
||||
MOZ_ASSERT(pair.first() != aTrack->GetId(),
|
||||
"The same MediaTrack was forwarded to the output stream more than once. This shouldn't happen.");
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
void HTMLMediaElement::NotifyMediaStreamTracksAvailable(DOMMediaStream* aStream)
|
||||
@ -2128,6 +2211,81 @@ NS_IMETHODIMP HTMLMediaElement::SetMuted(bool aMuted)
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
class HTMLMediaElement::StreamCaptureTrackSource :
|
||||
public MediaStreamTrackSource,
|
||||
public MediaStreamTrackSource::Sink
|
||||
{
|
||||
public:
|
||||
NS_DECL_ISUPPORTS_INHERITED
|
||||
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(StreamCaptureTrackSource,
|
||||
MediaStreamTrackSource)
|
||||
|
||||
explicit StreamCaptureTrackSource(MediaStreamTrackSource* aCapturedTrackSource)
|
||||
: MediaStreamTrackSource(aCapturedTrackSource->GetPrincipal(),
|
||||
true,
|
||||
nsString())
|
||||
, mCapturedTrackSource(aCapturedTrackSource)
|
||||
{
|
||||
mCapturedTrackSource->RegisterSink(this);
|
||||
}
|
||||
|
||||
void Destroy() override
|
||||
{
|
||||
MOZ_ASSERT(mCapturedTrackSource);
|
||||
if (mCapturedTrackSource) {
|
||||
mCapturedTrackSource->UnregisterSink(this);
|
||||
}
|
||||
}
|
||||
|
||||
MediaSourceEnum GetMediaSource() const override
|
||||
{
|
||||
return MediaSourceEnum::Other;
|
||||
}
|
||||
|
||||
CORSMode GetCORSMode() const override
|
||||
{
|
||||
return mCapturedTrackSource->GetCORSMode();
|
||||
}
|
||||
|
||||
already_AddRefed<PledgeVoid>
|
||||
ApplyConstraints(nsPIDOMWindowInner* aWindow,
|
||||
const dom::MediaTrackConstraints& aConstraints) override
|
||||
{
|
||||
RefPtr<PledgeVoid> p = new PledgeVoid();
|
||||
p->Reject(new dom::MediaStreamError(aWindow,
|
||||
NS_LITERAL_STRING("OverconstrainedError"),
|
||||
NS_LITERAL_STRING("")));
|
||||
return p.forget();
|
||||
}
|
||||
|
||||
void Stop() override
|
||||
{
|
||||
NS_ERROR("We're reporting remote=true to not be stoppable. "
|
||||
"Stop() should not be called.");
|
||||
}
|
||||
|
||||
void PrincipalChanged() override
|
||||
{
|
||||
mPrincipal = mCapturedTrackSource->GetPrincipal();
|
||||
MediaStreamTrackSource::PrincipalChanged();
|
||||
}
|
||||
|
||||
private:
|
||||
virtual ~StreamCaptureTrackSource() {}
|
||||
|
||||
RefPtr<MediaStreamTrackSource> mCapturedTrackSource;
|
||||
};
|
||||
|
||||
NS_IMPL_ADDREF_INHERITED(HTMLMediaElement::StreamCaptureTrackSource,
|
||||
MediaStreamTrackSource)
|
||||
NS_IMPL_RELEASE_INHERITED(HTMLMediaElement::StreamCaptureTrackSource,
|
||||
MediaStreamTrackSource)
|
||||
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(HTMLMediaElement::StreamCaptureTrackSource)
|
||||
NS_INTERFACE_MAP_END_INHERITING(MediaStreamTrackSource)
|
||||
NS_IMPL_CYCLE_COLLECTION_INHERITED(HTMLMediaElement::StreamCaptureTrackSource,
|
||||
MediaStreamTrackSource,
|
||||
mCapturedTrackSource)
|
||||
|
||||
class HTMLMediaElement::DecoderCaptureTrackSource :
|
||||
public MediaStreamTrackSource,
|
||||
public DecoderPrincipalChangeObserver
|
||||
@ -2222,6 +2380,11 @@ public:
|
||||
already_AddRefed<dom::MediaStreamTrackSource>
|
||||
GetMediaStreamTrackSource(TrackID aInputTrackID) override
|
||||
{
|
||||
if (mElement && mElement->mSrcStream) {
|
||||
NS_ERROR("Captured media element playing a stream adds tracks explicitly on main thread.");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// We can return a new source each time here, even for different streams,
|
||||
// since the sources don't keep any internal state and all of them call
|
||||
// through to the same HTMLMediaElement.
|
||||
@ -2247,8 +2410,115 @@ NS_IMPL_CYCLE_COLLECTION_INHERITED(HTMLMediaElement::CaptureStreamTrackSourceGet
|
||||
MediaStreamTrackSourceGetter,
|
||||
mElement)
|
||||
|
||||
void
|
||||
HTMLMediaElement::SetCapturedOutputStreamsEnabled(bool aEnabled) {
|
||||
for (OutputMediaStream& ms : mOutputStreams) {
|
||||
if (ms.mCapturingDecoder) {
|
||||
MOZ_ASSERT(!ms.mCapturingMediaStream);
|
||||
continue;
|
||||
}
|
||||
for (auto pair : ms.mTrackPorts) {
|
||||
MediaStream* outputSource = ms.mStream->GetInputStream();
|
||||
if (!outputSource) {
|
||||
NS_ERROR("No output source stream");
|
||||
return;
|
||||
}
|
||||
|
||||
TrackID id = pair.second()->GetDestinationTrackId();
|
||||
outputSource->SetTrackEnabled(id, aEnabled);
|
||||
|
||||
LOG(LogLevel::Debug,
|
||||
("%s track %d for captured MediaStream %p",
|
||||
aEnabled ? "Enabled" : "Disabled", id, ms.mStream.get()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
HTMLMediaElement::AddCaptureMediaTrackToOutputStream(MediaTrack* aTrack,
|
||||
OutputMediaStream& aOutputStream,
|
||||
bool aAsyncAddtrack)
|
||||
{
|
||||
if (aOutputStream.mCapturingDecoder) {
|
||||
MOZ_ASSERT(!aOutputStream.mCapturingMediaStream);
|
||||
return;
|
||||
}
|
||||
aOutputStream.mCapturingMediaStream = true;
|
||||
|
||||
MediaStream* outputSource = aOutputStream.mStream->GetInputStream();
|
||||
if (!outputSource) {
|
||||
NS_ERROR("No output source stream");
|
||||
return;
|
||||
}
|
||||
|
||||
ProcessedMediaStream* processedOutputSource =
|
||||
outputSource->AsProcessedStream();
|
||||
if (!processedOutputSource) {
|
||||
NS_ERROR("Input stream not a ProcessedMediaStream");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!aTrack) {
|
||||
MOZ_ASSERT(false, "Bad MediaTrack");
|
||||
return;
|
||||
}
|
||||
|
||||
MediaStreamTrack* inputTrack = mSrcStream->GetTrackById(aTrack->GetId());
|
||||
MOZ_ASSERT(inputTrack);
|
||||
if (!inputTrack) {
|
||||
NS_ERROR("Input track not found in source stream");
|
||||
return;
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
for (auto pair : aOutputStream.mTrackPorts) {
|
||||
MOZ_ASSERT(pair.first() != aTrack->GetId(),
|
||||
"Captured track already captured to output stream");
|
||||
}
|
||||
#endif
|
||||
|
||||
TrackID destinationTrackID = aOutputStream.mNextAvailableTrackID++;
|
||||
RefPtr<MediaStreamTrackSource> source =
|
||||
new StreamCaptureTrackSource(&inputTrack->GetSource());
|
||||
|
||||
MediaSegment::Type type = inputTrack->AsAudioStreamTrack()
|
||||
? MediaSegment::AUDIO
|
||||
: MediaSegment::VIDEO;
|
||||
|
||||
RefPtr<MediaStreamTrack> track =
|
||||
aOutputStream.mStream->CreateDOMTrack(destinationTrackID, type, source);
|
||||
|
||||
if (aAsyncAddtrack) {
|
||||
NS_DispatchToMainThread(
|
||||
NewRunnableMethod<StorensRefPtrPassByPtr<MediaStreamTrack>>(
|
||||
aOutputStream.mStream, &DOMMediaStream::AddTrackInternal, track));
|
||||
} else {
|
||||
aOutputStream.mStream->AddTrackInternal(track);
|
||||
}
|
||||
|
||||
// Track is muted initially, so we don't leak data if it's added while paused
|
||||
// and an MSG iteration passes before the mute comes into effect.
|
||||
processedOutputSource->SetTrackEnabled(destinationTrackID, false);
|
||||
RefPtr<MediaInputPort> port =
|
||||
inputTrack->ForwardTrackContentsTo(processedOutputSource,
|
||||
destinationTrackID);
|
||||
|
||||
Pair<nsString, RefPtr<MediaInputPort>> p(aTrack->GetId(), port);
|
||||
aOutputStream.mTrackPorts.AppendElement(Move(p));
|
||||
|
||||
if (mSrcStreamIsPlaying) {
|
||||
processedOutputSource->SetTrackEnabled(destinationTrackID, true);
|
||||
}
|
||||
|
||||
LOG(LogLevel::Debug,
|
||||
("Created %s track %p with id %d from track %p through MediaInputPort %p",
|
||||
inputTrack->AsAudioStreamTrack() ? "audio" : "video",
|
||||
track.get(), destinationTrackID, inputTrack, port.get()));
|
||||
}
|
||||
|
||||
already_AddRefed<DOMMediaStream>
|
||||
HTMLMediaElement::CaptureStreamInternal(bool aFinishWhenEnded,
|
||||
bool aCaptureAudio,
|
||||
MediaStreamGraph* aGraph)
|
||||
{
|
||||
nsPIDOMWindowInner* window = OwnerDoc()->GetInnerWindow();
|
||||
@ -2277,30 +2547,91 @@ HTMLMediaElement::CaptureStreamInternal(bool aFinishWhenEnded,
|
||||
MediaStreamTrackSourceGetter* getter = new CaptureStreamTrackSourceGetter(this);
|
||||
out->mStream = DOMMediaStream::CreateTrackUnionStreamAsInput(window, aGraph, getter);
|
||||
out->mFinishWhenEnded = aFinishWhenEnded;
|
||||
out->mCapturingAudioOnly = aCaptureAudio;
|
||||
|
||||
if (aCaptureAudio) {
|
||||
if (mSrcStream) {
|
||||
// We don't support applying volume and mute to the captured stream, when
|
||||
// capturing a MediaStream.
|
||||
nsContentUtils::ReportToConsole(nsIScriptError::errorFlag,
|
||||
NS_LITERAL_CSTRING("Media"),
|
||||
OwnerDoc(),
|
||||
nsContentUtils::eDOM_PROPERTIES,
|
||||
"MediaElementAudioCaptureOfMediaStreamError");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// mAudioCaptured tells the user that the audio played by this media element
|
||||
// is being routed to the captureStreams *instead* of being played to
|
||||
// speakers.
|
||||
mAudioCaptured = true;
|
||||
}
|
||||
|
||||
if (mReadyState == HAVE_NOTHING) {
|
||||
// Do not expose the tracks directly before we have metadata.
|
||||
RefPtr<DOMMediaStream> result = out->mStream;
|
||||
return result.forget();
|
||||
}
|
||||
|
||||
mAudioCaptured = true;
|
||||
if (mDecoder) {
|
||||
out->mCapturingDecoder = true;
|
||||
mDecoder->AddOutputStream(out->mStream->GetInputStream()->AsProcessedStream(),
|
||||
aFinishWhenEnded);
|
||||
if (mReadyState >= HAVE_METADATA) {
|
||||
// Expose the tracks to JS directly.
|
||||
if (HasAudio()) {
|
||||
TrackID audioTrackId = mMediaInfo.mAudio.mTrackId;
|
||||
RefPtr<MediaStreamTrackSource> trackSource =
|
||||
getter->GetMediaStreamTrackSource(audioTrackId);
|
||||
RefPtr<MediaStreamTrack> track =
|
||||
out->mStream->CreateDOMTrack(audioTrackId, MediaSegment::AUDIO,
|
||||
trackSource);
|
||||
out->mStream->AddTrackInternal(track);
|
||||
if (HasAudio()) {
|
||||
TrackID audioTrackId = mMediaInfo.mAudio.mTrackId;
|
||||
RefPtr<MediaStreamTrackSource> trackSource =
|
||||
getter->GetMediaStreamTrackSource(audioTrackId);
|
||||
RefPtr<MediaStreamTrack> track =
|
||||
out->mStream->CreateDOMTrack(audioTrackId, MediaSegment::AUDIO,
|
||||
trackSource);
|
||||
out->mStream->AddTrackInternal(track);
|
||||
LOG(LogLevel::Debug,
|
||||
("Created audio track %d for captured decoder", audioTrackId));
|
||||
}
|
||||
if (IsVideo() && HasVideo() && !out->mCapturingAudioOnly) {
|
||||
TrackID videoTrackId = mMediaInfo.mVideo.mTrackId;
|
||||
RefPtr<MediaStreamTrackSource> trackSource =
|
||||
getter->GetMediaStreamTrackSource(videoTrackId);
|
||||
RefPtr<MediaStreamTrack> track =
|
||||
out->mStream->CreateDOMTrack(videoTrackId, MediaSegment::VIDEO,
|
||||
trackSource);
|
||||
out->mStream->AddTrackInternal(track);
|
||||
LOG(LogLevel::Debug,
|
||||
("Created video track %d for captured decoder", videoTrackId));
|
||||
}
|
||||
}
|
||||
|
||||
if (mSrcStream) {
|
||||
out->mCapturingMediaStream = true;
|
||||
MediaStream* inputStream = out->mStream->GetInputStream();
|
||||
if (!inputStream) {
|
||||
NS_ERROR("No input stream");
|
||||
RefPtr<DOMMediaStream> result = out->mStream;
|
||||
return result.forget();
|
||||
}
|
||||
|
||||
ProcessedMediaStream* processedInputStream =
|
||||
inputStream->AsProcessedStream();
|
||||
if (!processedInputStream) {
|
||||
NS_ERROR("Input stream not a ProcessedMediaStream");
|
||||
RefPtr<DOMMediaStream> result = out->mStream;
|
||||
return result.forget();
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < AudioTracks()->Length(); ++i) {
|
||||
AudioTrack* t = (*AudioTracks())[i];
|
||||
if (t->Enabled()) {
|
||||
AddCaptureMediaTrackToOutputStream(t, *out, false);
|
||||
}
|
||||
if (HasVideo()) {
|
||||
TrackID videoTrackId = mMediaInfo.mVideo.mTrackId;
|
||||
RefPtr<MediaStreamTrackSource> trackSource =
|
||||
getter->GetMediaStreamTrackSource(videoTrackId);
|
||||
RefPtr<MediaStreamTrack> track =
|
||||
out->mStream->CreateDOMTrack(videoTrackId, MediaSegment::VIDEO,
|
||||
trackSource);
|
||||
out->mStream->AddTrackInternal(track);
|
||||
}
|
||||
if (IsVideo() && !out->mCapturingAudioOnly) {
|
||||
// Only add video tracks if we're a video element and the output stream
|
||||
// wants video.
|
||||
for (size_t i = 0; i < VideoTracks()->Length(); ++i) {
|
||||
VideoTrack* t = (*VideoTracks())[i];
|
||||
if (t->Selected()) {
|
||||
AddCaptureMediaTrackToOutputStream(t, *out, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2308,6 +2639,19 @@ HTMLMediaElement::CaptureStreamInternal(bool aFinishWhenEnded,
|
||||
return result.forget();
|
||||
}
|
||||
|
||||
already_AddRefed<DOMMediaStream>
|
||||
HTMLMediaElement::CaptureAudio(ErrorResult& aRv,
|
||||
MediaStreamGraph* aGraph)
|
||||
{
|
||||
RefPtr<DOMMediaStream> stream = CaptureStreamInternal(false, aGraph);
|
||||
if (!stream) {
|
||||
aRv.Throw(NS_ERROR_FAILURE);
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
return stream.forget();
|
||||
}
|
||||
|
||||
already_AddRefed<DOMMediaStream>
|
||||
HTMLMediaElement::MozCaptureStream(ErrorResult& aRv,
|
||||
MediaStreamGraph* aGraph)
|
||||
@ -2845,6 +3189,20 @@ HTMLMediaElement::WakeLockRelease()
|
||||
}
|
||||
}
|
||||
|
||||
HTMLMediaElement::OutputMediaStream::OutputMediaStream()
|
||||
: mFinishWhenEnded(false)
|
||||
, mCapturingAudioOnly(false)
|
||||
, mCapturingDecoder(false)
|
||||
, mCapturingMediaStream(false)
|
||||
, mNextAvailableTrackID(1) {}
|
||||
|
||||
HTMLMediaElement::OutputMediaStream::~OutputMediaStream()
|
||||
{
|
||||
for (auto pair : mTrackPorts) {
|
||||
pair.second()->Destroy();
|
||||
}
|
||||
}
|
||||
|
||||
bool HTMLMediaElement::ParseAttribute(int32_t aNamespaceID,
|
||||
nsIAtom* aAttribute,
|
||||
const nsAString& aValue,
|
||||
@ -3462,10 +3820,15 @@ nsresult HTMLMediaElement::FinishDecoderSetup(MediaDecoder* aDecoder,
|
||||
return rv;
|
||||
}
|
||||
|
||||
for (uint32_t i = 0; i < mOutputStreams.Length(); ++i) {
|
||||
OutputMediaStream* ms = &mOutputStreams[i];
|
||||
aDecoder->AddOutputStream(ms->mStream->GetInputStream()->AsProcessedStream(),
|
||||
ms->mFinishWhenEnded);
|
||||
for (OutputMediaStream& ms : mOutputStreams) {
|
||||
if (ms.mCapturingMediaStream) {
|
||||
MOZ_ASSERT(!ms.mCapturingDecoder);
|
||||
continue;
|
||||
}
|
||||
|
||||
ms.mCapturingDecoder = true;
|
||||
aDecoder->AddOutputStream(ms.mStream->GetInputStream()->AsProcessedStream(),
|
||||
ms.mFinishWhenEnded);
|
||||
}
|
||||
|
||||
#ifdef MOZ_EME
|
||||
@ -3719,13 +4082,8 @@ void HTMLMediaElement::UpdateSrcMediaStreamPlaying(uint32_t aFlags)
|
||||
if (mSelectedVideoStreamTrack && container) {
|
||||
mSelectedVideoStreamTrack->AddVideoOutput(container);
|
||||
}
|
||||
VideoTrack* videoTrack = VideoTracks()->GetSelectedTrack();
|
||||
if (videoTrack) {
|
||||
VideoStreamTrack* videoStreamTrack = videoTrack->GetVideoStreamTrack();
|
||||
if (videoStreamTrack && container) {
|
||||
videoStreamTrack->AddVideoOutput(container);
|
||||
}
|
||||
}
|
||||
|
||||
SetCapturedOutputStreamsEnabled(true); // Unmute
|
||||
} else {
|
||||
if (stream) {
|
||||
mSrcStreamPausedCurrentTime = CurrentTime();
|
||||
@ -3737,13 +4095,8 @@ void HTMLMediaElement::UpdateSrcMediaStreamPlaying(uint32_t aFlags)
|
||||
if (mSelectedVideoStreamTrack && container) {
|
||||
mSelectedVideoStreamTrack->RemoveVideoOutput(container);
|
||||
}
|
||||
VideoTrack* videoTrack = VideoTracks()->GetSelectedTrack();
|
||||
if (videoTrack) {
|
||||
VideoStreamTrack* videoStreamTrack = videoTrack->GetVideoStreamTrack();
|
||||
if (videoStreamTrack && container) {
|
||||
videoStreamTrack->RemoveVideoOutput(container);
|
||||
}
|
||||
}
|
||||
|
||||
SetCapturedOutputStreamsEnabled(false); // Mute
|
||||
}
|
||||
// If stream is null, then DOMMediaStream::Destroy must have been
|
||||
// called and that will remove all listeners/outputs.
|
||||
@ -3782,7 +4135,11 @@ void HTMLMediaElement::SetupSrcMediaStreamPlayback(DOMMediaStream* aStream)
|
||||
// If we pause this media element, track changes in the underlying stream
|
||||
// will continue to fire events at this element and alter its track list.
|
||||
// That's simpler than delaying the events, but probably confusing...
|
||||
ConstructMediaTracks();
|
||||
nsTArray<RefPtr<MediaStreamTrack>> tracks;
|
||||
mSrcStream->GetTracks(tracks);
|
||||
for (const RefPtr<MediaStreamTrack>& track : tracks) {
|
||||
NotifyMediaStreamTrackAdded(track);
|
||||
}
|
||||
|
||||
mSrcStream->OnTracksAvailable(new MediaStreamTracksAvailableCallback(this));
|
||||
mMediaStreamTrackListener = new MediaStreamTrackListener(this);
|
||||
@ -3805,11 +4162,14 @@ void HTMLMediaElement::EndSrcMediaStreamPlayback()
|
||||
UpdateSrcMediaStreamPlaying(REMOVING_SRC_STREAM);
|
||||
|
||||
if (mMediaStreamSizeListener) {
|
||||
mSelectedVideoStreamTrack->RemoveDirectListener(mMediaStreamSizeListener);
|
||||
mSelectedVideoStreamTrack = nullptr;
|
||||
MOZ_ASSERT(mSelectedVideoStreamTrack);
|
||||
if (mSelectedVideoStreamTrack) {
|
||||
mSelectedVideoStreamTrack->RemoveDirectListener(mMediaStreamSizeListener);
|
||||
}
|
||||
mMediaStreamSizeListener->Forget();
|
||||
mMediaStreamSizeListener = nullptr;
|
||||
}
|
||||
mSelectedVideoStreamTrack = nullptr;
|
||||
mMediaStreamSizeListener = nullptr;
|
||||
|
||||
mSrcStream->UnregisterTrackListener(mMediaStreamTrackListener);
|
||||
mMediaStreamTrackListener = nullptr;
|
||||
@ -3817,6 +4177,13 @@ void HTMLMediaElement::EndSrcMediaStreamPlayback()
|
||||
mSrcStream->RemovePrincipalChangeObserver(this);
|
||||
mSrcStreamVideoPrincipal = nullptr;
|
||||
|
||||
for (OutputMediaStream& ms : mOutputStreams) {
|
||||
for (auto pair : ms.mTrackPorts) {
|
||||
pair.second()->Destroy();
|
||||
}
|
||||
ms.mTrackPorts.Clear();
|
||||
}
|
||||
|
||||
mSrcStream = nullptr;
|
||||
}
|
||||
|
||||
@ -3829,8 +4196,7 @@ CreateAudioTrack(AudioStreamTrack* aStreamTrack)
|
||||
aStreamTrack->GetLabel(label);
|
||||
|
||||
return MediaTrackList::CreateAudioTrack(id, NS_LITERAL_STRING("main"),
|
||||
label, EmptyString(),
|
||||
aStreamTrack->Enabled());
|
||||
label, EmptyString(), true);
|
||||
}
|
||||
|
||||
static already_AddRefed<VideoTrack>
|
||||
@ -3846,57 +4212,22 @@ CreateVideoTrack(VideoStreamTrack* aStreamTrack)
|
||||
aStreamTrack);
|
||||
}
|
||||
|
||||
void HTMLMediaElement::ConstructMediaTracks()
|
||||
{
|
||||
nsTArray<RefPtr<MediaStreamTrack>> tracks;
|
||||
mSrcStream->GetTracks(tracks);
|
||||
|
||||
int firstEnabledVideo = -1;
|
||||
for (const RefPtr<MediaStreamTrack>& track : tracks) {
|
||||
if (track->Ended()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (AudioStreamTrack* t = track->AsAudioStreamTrack()) {
|
||||
RefPtr<AudioTrack> audioTrack = CreateAudioTrack(t);
|
||||
AudioTracks()->AddTrack(audioTrack);
|
||||
} else if (VideoStreamTrack* t = track->AsVideoStreamTrack()) {
|
||||
RefPtr<VideoTrack> videoTrack = CreateVideoTrack(t);
|
||||
VideoTracks()->AddTrack(videoTrack);
|
||||
firstEnabledVideo = (t->Enabled() && firstEnabledVideo < 0)
|
||||
? (VideoTracks()->Length() - 1)
|
||||
: firstEnabledVideo;
|
||||
}
|
||||
}
|
||||
|
||||
if (VideoTracks()->Length() > 0) {
|
||||
// If media resource does not indicate a particular set of video tracks to
|
||||
// enable, the one that is listed first in the element's videoTracks object
|
||||
// must be selected.
|
||||
int index = firstEnabledVideo >= 0 ? firstEnabledVideo : 0;
|
||||
(*VideoTracks())[index]->SetEnabledInternal(true, MediaTrack::FIRE_NO_EVENTS);
|
||||
VideoTrack* track = (*VideoTracks())[index];
|
||||
VideoStreamTrack* streamTrack = track->GetVideoStreamTrack();
|
||||
mMediaStreamSizeListener = new StreamSizeListener(this);
|
||||
streamTrack->AddDirectListener(mMediaStreamSizeListener);
|
||||
mSelectedVideoStreamTrack = streamTrack;
|
||||
if (GetVideoFrameContainer()) {
|
||||
mSelectedVideoStreamTrack->AddVideoOutput(GetVideoFrameContainer());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
HTMLMediaElement::NotifyMediaStreamTrackAdded(const RefPtr<MediaStreamTrack>& aTrack)
|
||||
{
|
||||
MOZ_ASSERT(aTrack);
|
||||
|
||||
if (aTrack->Ended()) {
|
||||
return;
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
nsString id;
|
||||
aTrack->GetId(id);
|
||||
|
||||
LOG(LogLevel::Debug, ("%p, Adding MediaTrack with id %s",
|
||||
this, NS_ConvertUTF16toUTF8(id).get()));
|
||||
LOG(LogLevel::Debug, ("%p, Adding %sTrack with id %s",
|
||||
this, aTrack->AsAudioStreamTrack() ? "Audio" : "Video",
|
||||
NS_ConvertUTF16toUTF8(id).get()));
|
||||
#endif
|
||||
|
||||
if (AudioStreamTrack* t = aTrack->AsAudioStreamTrack()) {
|
||||
@ -3904,23 +4235,17 @@ HTMLMediaElement::NotifyMediaStreamTrackAdded(const RefPtr<MediaStreamTrack>& aT
|
||||
AudioTracks()->AddTrack(audioTrack);
|
||||
} else if (VideoStreamTrack* t = aTrack->AsVideoStreamTrack()) {
|
||||
// TODO: Fix this per the spec on bug 1273443.
|
||||
int32_t selectedIndex = VideoTracks()->SelectedIndex();
|
||||
if (!IsVideo()) {
|
||||
return;
|
||||
}
|
||||
RefPtr<VideoTrack> videoTrack = CreateVideoTrack(t);
|
||||
VideoTracks()->AddTrack(videoTrack);
|
||||
// New MediaStreamTrack added, set the new added video track as selected
|
||||
// video track when there is no selected track.
|
||||
if (selectedIndex == -1) {
|
||||
if (VideoTracks()->SelectedIndex() == -1) {
|
||||
MOZ_ASSERT(!mSelectedVideoStreamTrack);
|
||||
videoTrack->SetEnabledInternal(true, MediaTrack::FIRE_NO_EVENTS);
|
||||
mMediaStreamSizeListener = new StreamSizeListener(this);
|
||||
t->AddDirectListener(mMediaStreamSizeListener);
|
||||
mSelectedVideoStreamTrack = t;
|
||||
VideoFrameContainer* container = GetVideoFrameContainer();
|
||||
if (mSrcStreamIsPlaying && container) {
|
||||
mSelectedVideoStreamTrack->AddVideoOutput(container);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@ -3932,62 +4257,14 @@ HTMLMediaElement::NotifyMediaStreamTrackRemoved(const RefPtr<MediaStreamTrack>&
|
||||
nsAutoString id;
|
||||
aTrack->GetId(id);
|
||||
|
||||
LOG(LogLevel::Debug, ("%p, Removing MediaTrack with id %s",
|
||||
this, NS_ConvertUTF16toUTF8(id).get()));
|
||||
LOG(LogLevel::Debug, ("%p, Removing %sTrack with id %s",
|
||||
this, aTrack->AsAudioStreamTrack() ? "Audio" : "Video",
|
||||
NS_ConvertUTF16toUTF8(id).get()));
|
||||
|
||||
if (MediaTrack* t = AudioTracks()->GetTrackById(id)) {
|
||||
AudioTracks()->RemoveTrack(t);
|
||||
} else if (MediaTrack* t = VideoTracks()->GetTrackById(id)) {
|
||||
VideoTracks()->RemoveTrack(t);
|
||||
// TODO: Fix this per the spec on bug 1273443.
|
||||
// If the removed media stream track is selected video track and there are
|
||||
// still video tracks, change the selected video track to the first
|
||||
// remaining track.
|
||||
if (aTrack == mSelectedVideoStreamTrack) {
|
||||
// The mMediaStreamSizeListener might already reset to nullptr.
|
||||
if (mMediaStreamSizeListener) {
|
||||
mSelectedVideoStreamTrack->RemoveDirectListener(mMediaStreamSizeListener);
|
||||
}
|
||||
VideoFrameContainer* container = GetVideoFrameContainer();
|
||||
if (mSrcStreamIsPlaying && container) {
|
||||
mSelectedVideoStreamTrack->RemoveVideoOutput(container);
|
||||
}
|
||||
mSelectedVideoStreamTrack = nullptr;
|
||||
MOZ_ASSERT(mSrcStream);
|
||||
nsTArray<RefPtr<VideoStreamTrack>> tracks;
|
||||
mSrcStream->GetVideoTracks(tracks);
|
||||
|
||||
for (const RefPtr<VideoStreamTrack>& track : tracks) {
|
||||
if (track->Ended()) {
|
||||
continue;
|
||||
}
|
||||
if (!track->Enabled()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
nsAutoString trackId;
|
||||
track->GetId(trackId);
|
||||
MediaTrack* videoTrack = VideoTracks()->GetTrackById(trackId);
|
||||
MOZ_ASSERT(videoTrack);
|
||||
|
||||
videoTrack->SetEnabledInternal(true, MediaTrack::FIRE_NO_EVENTS);
|
||||
if (mMediaStreamSizeListener) {
|
||||
track->AddDirectListener(mMediaStreamSizeListener);
|
||||
}
|
||||
mSelectedVideoStreamTrack = track;
|
||||
if (container) {
|
||||
mSelectedVideoStreamTrack->AddVideoOutput(container);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// There is no enabled video track existing, clean the
|
||||
// mMediaStreamSizeListener.
|
||||
if (mMediaStreamSizeListener) {
|
||||
mMediaStreamSizeListener->Forget();
|
||||
mMediaStreamSizeListener = nullptr;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// XXX (bug 1208328) Uncomment this when DOMMediaStream doesn't call
|
||||
// NotifyTrackRemoved multiple times for the same track, i.e., when it
|
||||
@ -4071,6 +4348,28 @@ void HTMLMediaElement::MetadataLoaded(const MediaInfo* aInfo,
|
||||
SetCurrentTime(mDefaultPlaybackStartPosition);
|
||||
mDefaultPlaybackStartPosition = 0.0;
|
||||
}
|
||||
|
||||
if (!mSrcStream) {
|
||||
return;
|
||||
}
|
||||
for (OutputMediaStream& ms : mOutputStreams) {
|
||||
for (size_t i = 0; i < AudioTracks()->Length(); ++i) {
|
||||
AudioTrack* t = (*AudioTracks())[i];
|
||||
if (t->Enabled()) {
|
||||
AddCaptureMediaTrackToOutputStream(t, ms);
|
||||
}
|
||||
}
|
||||
if (IsVideo() && !ms.mCapturingAudioOnly) {
|
||||
// Only add video tracks if we're a video element and the output stream
|
||||
// wants video.
|
||||
for (size_t i = 0; i < VideoTracks()->Length(); ++i) {
|
||||
VideoTrack* t = (*VideoTracks())[i];
|
||||
if (t->Selected()) {
|
||||
AddCaptureMediaTrackToOutputStream(t, ms);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void HTMLMediaElement::FirstFrameLoaded()
|
||||
@ -4175,6 +4474,8 @@ void HTMLMediaElement::PlaybackEnded()
|
||||
// Discard all output streams that have finished now.
|
||||
for (int32_t i = mOutputStreams.Length() - 1; i >= 0; --i) {
|
||||
if (mOutputStreams[i].mFinishWhenEnded) {
|
||||
LOG(LogLevel::Debug, ("Playback ended. Removing output stream %p",
|
||||
mOutputStreams[i].mStream.get()));
|
||||
mOutputStreams.RemoveElementAt(i);
|
||||
}
|
||||
}
|
||||
@ -4913,6 +5214,12 @@ void HTMLMediaElement::UpdateInitialMediaSize(const nsIntSize& aSize)
|
||||
if (!mMediaStreamSizeListener) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!mSelectedVideoStreamTrack) {
|
||||
MOZ_ASSERT(false);
|
||||
return;
|
||||
}
|
||||
|
||||
mSelectedVideoStreamTrack->RemoveDirectListener(mMediaStreamSizeListener);
|
||||
mMediaStreamSizeListener->Forget();
|
||||
mMediaStreamSizeListener = nullptr;
|
||||
|
@ -316,6 +316,7 @@ public:
|
||||
*/
|
||||
bool RemoveDecoderPrincipalChangeObserver(DecoderPrincipalChangeObserver* aObserver);
|
||||
|
||||
class StreamCaptureTrackSource;
|
||||
class DecoderCaptureTrackSource;
|
||||
class CaptureStreamTrackSourceGetter;
|
||||
|
||||
@ -658,6 +659,9 @@ public:
|
||||
return mAutoplayEnabled;
|
||||
}
|
||||
|
||||
already_AddRefed<DOMMediaStream> CaptureAudio(ErrorResult& aRv,
|
||||
MediaStreamGraph* aGraph = nullptr);
|
||||
|
||||
already_AddRefed<DOMMediaStream> MozCaptureStream(ErrorResult& aRv,
|
||||
MediaStreamGraph* aGraph = nullptr);
|
||||
|
||||
@ -784,6 +788,23 @@ protected:
|
||||
nsCOMPtr<nsITimer> mTimer;
|
||||
};
|
||||
|
||||
// Holds references to the DOM wrappers for the MediaStreams that we're
|
||||
// writing to.
|
||||
struct OutputMediaStream {
|
||||
OutputMediaStream();
|
||||
~OutputMediaStream();
|
||||
|
||||
RefPtr<DOMMediaStream> mStream;
|
||||
bool mFinishWhenEnded;
|
||||
bool mCapturingAudioOnly;
|
||||
bool mCapturingDecoder;
|
||||
bool mCapturingMediaStream;
|
||||
|
||||
// The following members are keeping state for a captured MediaStream.
|
||||
TrackID mNextAvailableTrackID;
|
||||
nsTArray<Pair<nsString, RefPtr<MediaInputPort>>> mTrackPorts;
|
||||
};
|
||||
|
||||
nsresult PlayInternal(bool aCallerIsChrome);
|
||||
|
||||
/** Use this method to change the mReadyState member, so required
|
||||
@ -836,13 +857,6 @@ protected:
|
||||
enum { REMOVING_SRC_STREAM = 0x1 };
|
||||
void UpdateSrcMediaStreamPlaying(uint32_t aFlags = 0);
|
||||
|
||||
/**
|
||||
* If loading and playing a MediaStream, for each MediaStreamTrack in the
|
||||
* MediaStream, create a corresponding AudioTrack or VideoTrack during the
|
||||
* phase of resource fetching.
|
||||
*/
|
||||
void ConstructMediaTracks();
|
||||
|
||||
/**
|
||||
* Called by our DOMMediaStream::TrackListener when a new MediaStreamTrack has
|
||||
* been added to the playback stream of |mSrcStream|.
|
||||
@ -856,13 +870,36 @@ protected:
|
||||
void NotifyMediaStreamTrackRemoved(const RefPtr<MediaStreamTrack>& aTrack);
|
||||
|
||||
/**
|
||||
* Returns an nsDOMMediaStream containing the played contents of this
|
||||
* Enables or disables all tracks forwarded from mSrcStream to all
|
||||
* OutputMediaStreams. We do this for muting the tracks when pausing,
|
||||
* and unmuting when playing the media element again.
|
||||
*
|
||||
* If mSrcStream is unset, this does nothing.
|
||||
*/
|
||||
void SetCapturedOutputStreamsEnabled(bool aEnabled);
|
||||
|
||||
/**
|
||||
* Create a new MediaStreamTrack for aTrack and add it to the DOMMediaStream
|
||||
* in aOutputStream. This automatically sets the output track to enabled or
|
||||
* disabled depending on our current playing state.
|
||||
*/
|
||||
void AddCaptureMediaTrackToOutputStream(MediaTrack* aTrack,
|
||||
OutputMediaStream& aOutputStream,
|
||||
bool aAsyncAddtrack = true);
|
||||
|
||||
/**
|
||||
* Returns an DOMMediaStream containing the played contents of this
|
||||
* element. When aFinishWhenEnded is true, when this element ends playback
|
||||
* we will finish the stream and not play any more into it.
|
||||
* When aFinishWhenEnded is false, ending playback does not finish the stream.
|
||||
* The stream will never finish.
|
||||
*
|
||||
* When aCaptureAudio is true, we stop playout of audio and instead route it
|
||||
* to the DOMMediaStream. Volume and mute state will be applied to the audio
|
||||
* reaching the stream. No video tracks will be captured in this case.
|
||||
*/
|
||||
already_AddRefed<DOMMediaStream> CaptureStreamInternal(bool aFinishWhenEnded,
|
||||
bool aCaptureAudio,
|
||||
MediaStreamGraph* aGraph = nullptr);
|
||||
|
||||
/**
|
||||
@ -1259,10 +1296,6 @@ protected:
|
||||
|
||||
// Holds references to the DOM wrappers for the MediaStreams that we're
|
||||
// writing to.
|
||||
struct OutputMediaStream {
|
||||
RefPtr<DOMMediaStream> mStream;
|
||||
bool mFinishWhenEnded;
|
||||
};
|
||||
nsTArray<OutputMediaStream> mOutputStreams;
|
||||
|
||||
// Holds a reference to the MediaStreamListener attached to mSrcStream's
|
||||
|
@ -144,28 +144,35 @@ public:
|
||||
|
||||
MediaStreamTrack* track =
|
||||
mStream->FindOwnedDOMTrack(aInputStream, aInputTrackID, aTrackID);
|
||||
if (!track) {
|
||||
// Track had not been created on main thread before, create it now.
|
||||
NS_WARN_IF_FALSE(!mStream->mTracks.IsEmpty(),
|
||||
"A new track was detected on the input stream; creating "
|
||||
"a corresponding MediaStreamTrack. Initial tracks "
|
||||
"should be added manually to immediately and "
|
||||
"synchronously be available to JS.");
|
||||
RefPtr<MediaStreamTrackSource> source;
|
||||
if (mStream->mTrackSourceGetter) {
|
||||
source = mStream->mTrackSourceGetter->GetMediaStreamTrackSource(aTrackID);
|
||||
}
|
||||
if (!source) {
|
||||
NS_ASSERTION(false, "Dynamic track created without an explicit TrackSource");
|
||||
nsPIDOMWindowInner* window = mStream->GetParentObject();
|
||||
nsIDocument* doc = window ? window->GetExtantDoc() : nullptr;
|
||||
nsIPrincipal* principal = doc ? doc->NodePrincipal() : nullptr;
|
||||
source = new BasicUnstoppableTrackSource(principal);
|
||||
}
|
||||
RefPtr<MediaStreamTrack> newTrack =
|
||||
mStream->CreateDOMTrack(aTrackID, aType, source);
|
||||
mStream->AddTrackInternal(newTrack);
|
||||
|
||||
if (track) {
|
||||
LOG(LogLevel::Debug, ("DOMMediaStream %p Track %d from owned stream %p "
|
||||
"bound to MediaStreamTrack %p.",
|
||||
mStream, aTrackID, aInputStream, track));
|
||||
return;
|
||||
}
|
||||
|
||||
// Track had not been created on main thread before, create it now.
|
||||
NS_WARN_IF_FALSE(!mStream->mTracks.IsEmpty(),
|
||||
"A new track was detected on the input stream; creating "
|
||||
"a corresponding MediaStreamTrack. Initial tracks "
|
||||
"should be added manually to immediately and "
|
||||
"synchronously be available to JS.");
|
||||
RefPtr<MediaStreamTrackSource> source;
|
||||
if (mStream->mTrackSourceGetter) {
|
||||
source = mStream->mTrackSourceGetter->GetMediaStreamTrackSource(aTrackID);
|
||||
}
|
||||
if (!source) {
|
||||
NS_ASSERTION(false, "Dynamic track created without an explicit TrackSource");
|
||||
nsPIDOMWindowInner* window = mStream->GetParentObject();
|
||||
nsIDocument* doc = window ? window->GetExtantDoc() : nullptr;
|
||||
nsIPrincipal* principal = doc ? doc->NodePrincipal() : nullptr;
|
||||
source = new BasicUnstoppableTrackSource(principal);
|
||||
}
|
||||
|
||||
RefPtr<MediaStreamTrack> newTrack =
|
||||
mStream->CreateDOMTrack(aTrackID, aType, source);
|
||||
mStream->AddTrackInternal(newTrack);
|
||||
}
|
||||
|
||||
void DoNotifyTrackEnded(MediaStream* aInputStream, TrackID aInputTrackID,
|
||||
@ -1321,6 +1328,7 @@ DOMLocalMediaStream::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProt
|
||||
void
|
||||
DOMLocalMediaStream::Stop()
|
||||
{
|
||||
LOG(LogLevel::Debug, ("DOMMediaStream %p Stop()", this));
|
||||
nsCOMPtr<nsPIDOMWindowInner> pWindow = GetParentObject();
|
||||
nsIDocument* document = pWindow ? pWindow->GetExtantDoc() : nullptr;
|
||||
nsContentUtils::ReportToConsole(nsIScriptError::warningFlag,
|
||||
|
@ -366,8 +366,8 @@ AudioContext::CreateMediaElementSource(HTMLMediaElement& aMediaElement,
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
RefPtr<DOMMediaStream> stream = aMediaElement.MozCaptureStream(aRv,
|
||||
mDestination->Stream()->Graph());
|
||||
RefPtr<DOMMediaStream> stream =
|
||||
aMediaElement.CaptureAudio(aRv, mDestination->Stream()->Graph());
|
||||
if (aRv.Failed()) {
|
||||
return nullptr;
|
||||
}
|
||||
|
@ -60,7 +60,11 @@ MediaStreamAudioSourceNode::Create(AudioContext* aContext,
|
||||
void
|
||||
MediaStreamAudioSourceNode::Init(DOMMediaStream* aMediaStream, ErrorResult& aRv)
|
||||
{
|
||||
MOZ_ASSERT(aMediaStream);
|
||||
if (!aMediaStream) {
|
||||
aRv.Throw(NS_ERROR_FAILURE);
|
||||
return;
|
||||
}
|
||||
|
||||
MediaStream* inputStream = aMediaStream->GetPlaybackStream();
|
||||
MediaStreamGraph* graph = Context()->Graph();
|
||||
if (NS_WARN_IF(graph != inputStream->Graph())) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user