Bug 1259274: [MSE] P3. Refactor handling of tasks so they only ever run concurrently. r=gerald

MozReview-Commit-ID: 1U8r82kTR0t

--HG--
extra : rebase_source : 0646125364bee89f37cfff426c1034a6feb3a516
This commit is contained in:
Jean-Yves Avenard 2016-03-27 20:20:40 +11:00
parent 8da4d6ea4d
commit 5f3b4ca5d5
7 changed files with 600 additions and 417 deletions

View File

@ -42,21 +42,20 @@ extern mozilla::LogModule* GetMediaSourceAPILog();
namespace mozilla {
using media::TimeUnit;
typedef SourceBufferAttributes::AppendState AppendState;
namespace dom {
void
SourceBuffer::SetMode(SourceBufferAppendMode aMode, ErrorResult& aRv)
{
typedef mozilla::TrackBuffersManager::AppendState AppendState;
MOZ_ASSERT(NS_IsMainThread());
MSE_API("SetMode(aMode=%d)", aMode);
if (!IsAttached() || mUpdating) {
aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
return;
}
if (mAttributes->mGenerateTimestamps &&
if (mCurrentAttributes.mGenerateTimestamps &&
aMode == SourceBufferAppendMode::Segments) {
aRv.Throw(NS_ERROR_DOM_INVALID_ACCESS_ERR);
return;
@ -65,24 +64,22 @@ SourceBuffer::SetMode(SourceBufferAppendMode aMode, ErrorResult& aRv)
if (mMediaSource->ReadyState() == MediaSourceReadyState::Ended) {
mMediaSource->SetReadyState(MediaSourceReadyState::Open);
}
if (mTrackBuffersManager->GetAppendState() == AppendState::PARSING_MEDIA_SEGMENT){
if (mCurrentAttributes.GetAppendState() == AppendState::PARSING_MEDIA_SEGMENT){
aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
return;
}
if (aMode == SourceBufferAppendMode::Sequence) {
// Will set GroupStartTimestamp to GroupEndTimestamp.
mTrackBuffersManager->RestartGroupStartTimestamp();
mCurrentAttributes.RestartGroupStartTimestamp();
}
mAttributes->SetAppendMode(aMode);
mCurrentAttributes.SetAppendMode(aMode);
}
void
SourceBuffer::SetTimestampOffset(double aTimestampOffset, ErrorResult& aRv)
{
typedef mozilla::TrackBuffersManager::AppendState AppendState;
MOZ_ASSERT(NS_IsMainThread());
MSE_API("SetTimestampOffset(aTimestampOffset=%f)", aTimestampOffset);
if (!IsAttached() || mUpdating) {
@ -93,13 +90,13 @@ SourceBuffer::SetTimestampOffset(double aTimestampOffset, ErrorResult& aRv)
if (mMediaSource->ReadyState() == MediaSourceReadyState::Ended) {
mMediaSource->SetReadyState(MediaSourceReadyState::Open);
}
if (mTrackBuffersManager->GetAppendState() == AppendState::PARSING_MEDIA_SEGMENT){
if (mCurrentAttributes.GetAppendState() == AppendState::PARSING_MEDIA_SEGMENT){
aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
return;
}
mAttributes->SetApparentTimestampOffset(aTimestampOffset);
if (mAttributes->GetAppendMode() == SourceBufferAppendMode::Sequence) {
mTrackBuffersManager->SetGroupStartTimestamp(mAttributes->GetTimestampOffset());
mCurrentAttributes.SetApparentTimestampOffset(aTimestampOffset);
if (mCurrentAttributes.GetAppendMode() == SourceBufferAppendMode::Sequence) {
mCurrentAttributes.SetGroupStartTimestamp(mCurrentAttributes.GetTimestampOffset());
}
}
@ -146,11 +143,11 @@ SourceBuffer::SetAppendWindowStart(double aAppendWindowStart, ErrorResult& aRv)
return;
}
if (aAppendWindowStart < 0 ||
aAppendWindowStart >= mAttributes->GetAppendWindowEnd()) {
aAppendWindowStart >= mCurrentAttributes.GetAppendWindowEnd()) {
aRv.Throw(NS_ERROR_DOM_INVALID_ACCESS_ERR);
return;
}
mAttributes->SetAppendWindowStart(aAppendWindowStart);
mCurrentAttributes.SetAppendWindowStart(aAppendWindowStart);
}
void
@ -163,11 +160,11 @@ SourceBuffer::SetAppendWindowEnd(double aAppendWindowEnd, ErrorResult& aRv)
return;
}
if (IsNaN(aAppendWindowEnd) ||
aAppendWindowEnd <= mAttributes->GetAppendWindowStart()) {
aAppendWindowEnd <= mCurrentAttributes.GetAppendWindowStart()) {
aRv.Throw(NS_ERROR_DOM_INVALID_ACCESS_ERR);
return;
}
mAttributes->SetAppendWindowEnd(aAppendWindowEnd);
mCurrentAttributes.SetAppendWindowEnd(aAppendWindowEnd);
}
void
@ -202,9 +199,9 @@ SourceBuffer::Abort(ErrorResult& aRv)
return;
}
AbortBufferAppend();
mTrackBuffersManager->ResetParserState();
mAttributes->SetAppendWindowStart(0);
mAttributes->SetAppendWindowEnd(PositiveInfinity<double>());
ResetParserState();
mCurrentAttributes.SetAppendWindowStart(0);
mCurrentAttributes.SetAppendWindowEnd(PositiveInfinity<double>());
}
void
@ -214,14 +211,17 @@ SourceBuffer::AbortBufferAppend()
if (mPendingAppend.Exists()) {
mPendingAppend.Disconnect();
mTrackBuffersManager->AbortAppendData();
// Some data may have been added by the Segment Parser Loop.
// Check if we need to update the duration.
CheckEndTime();
}
AbortUpdating();
}
}
void
SourceBuffer::ResetParserState()
{
mTrackBuffersManager->ResetParserState(mCurrentAttributes);
}
void
SourceBuffer::Remove(double aStart, double aEnd, ErrorResult& aRv)
{
@ -295,23 +295,17 @@ SourceBuffer::Ended()
SourceBuffer::SourceBuffer(MediaSource* aMediaSource, const nsACString& aType)
: DOMEventTargetHelper(aMediaSource->GetParentObject())
, mMediaSource(aMediaSource)
, mCurrentAttributes(aType.LowerCaseEqualsLiteral("audio/mpeg") ||
aType.LowerCaseEqualsLiteral("audio/aac"))
, mUpdating(false)
, mActive(false)
, mType(aType)
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(aMediaSource);
bool generateTimestamps = false;
if (aType.LowerCaseEqualsLiteral("audio/mpeg") ||
aType.LowerCaseEqualsLiteral("audio/aac")) {
generateTimestamps = true;
}
mAttributes = new SourceBufferAttributes(generateTimestamps);
mTrackBuffersManager =
new TrackBuffersManager(mAttributes,
aMediaSource->GetDecoder(),
aType);
new TrackBuffersManager(aMediaSource->GetDecoder(), aType);
// Now that we know what type we're dealing with, enable dormant as needed.
#if defined(MP4_READER_DORMANT_HEURISTIC)
@ -322,7 +316,7 @@ SourceBuffer::SourceBuffer(MediaSource* aMediaSource, const nsACString& aType)
mTrackBuffersManager.get());
ErrorResult dummy;
if (mAttributes->mGenerateTimestamps) {
if (mCurrentAttributes.mGenerateTimestamps) {
SetMode(SourceBufferAppendMode::Sequence, dummy);
} else {
SetMode(SourceBufferAppendMode::Segments, dummy);
@ -403,7 +397,7 @@ SourceBuffer::CheckEndTime()
{
MOZ_ASSERT(NS_IsMainThread());
// Check if we need to update mMediaSource duration
double endTime = mTrackBuffersManager->GroupEndTimestamp().ToSeconds();
double endTime = mCurrentAttributes.GetGroupEndTimestamp().ToSeconds();
double duration = mMediaSource->Duration();
if (endTime > duration) {
mMediaSource->SetDuration(endTime, MSRangeRemovalAction::SKIP);
@ -419,33 +413,21 @@ SourceBuffer::AppendData(const uint8_t* aData, uint32_t aLength, ErrorResult& aR
if (!data) {
return;
}
mTrackBuffersManager->AppendData(data, mAttributes->GetTimestampOffset());
StartUpdating();
BufferAppend();
}
void
SourceBuffer::BufferAppend()
{
MOZ_ASSERT(mUpdating);
MOZ_ASSERT(mMediaSource);
MOZ_ASSERT(!mPendingAppend.Exists());
mPendingAppend.Begin(mTrackBuffersManager->BufferAppend()
mPendingAppend.Begin(mTrackBuffersManager->AppendData(data, mCurrentAttributes)
->Then(AbstractThread::MainThread(), __func__, this,
&SourceBuffer::AppendDataCompletedWithSuccess,
&SourceBuffer::AppendDataErrored));
}
void
SourceBuffer::AppendDataCompletedWithSuccess(bool aHasActiveTracks)
SourceBuffer::AppendDataCompletedWithSuccess(SourceBufferTask::AppendBufferResult aResult)
{
MOZ_ASSERT(mUpdating);
mPendingAppend.Complete();
if (aHasActiveTracks) {
if (aResult.first()) {
if (!mActive) {
mActive = true;
mMediaSource->SourceBufferIsActive(this);
@ -458,6 +440,8 @@ SourceBuffer::AppendDataCompletedWithSuccess(bool aHasActiveTracks)
mMediaSource->GetDecoder()->NotifyBytesDownloaded();
}
mCurrentAttributes = aResult.second();
CheckEndTime();
StopUpdating();
@ -485,7 +469,7 @@ SourceBuffer::AppendError(bool aDecoderError)
{
MOZ_ASSERT(NS_IsMainThread());
mTrackBuffersManager->ResetParserState();
ResetParserState();
mUpdating = false;

View File

@ -14,6 +14,7 @@
#include "mozilla/Atomics.h"
#include "mozilla/Attributes.h"
#include "mozilla/DOMEventTargetHelper.h"
#include "mozilla/UniquePtr.h"
#include "mozilla/dom/SourceBufferBinding.h"
#include "mozilla/dom/TypedArray.h"
#include "mozilla/mozalloc.h"
@ -25,7 +26,7 @@
#include "nsString.h"
#include "nscore.h"
#include "TrackBuffersManager.h"
#include "mozilla/Monitor.h"
#include "SourceBufferTask.h"
class JSObject;
struct JSContext;
@ -40,103 +41,13 @@ namespace dom {
class TimeRanges;
class SourceBufferAttributes {
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(SourceBufferAttributes);
explicit SourceBufferAttributes(bool aGenerateTimestamp)
: mGenerateTimestamps(aGenerateTimestamp)
, mMonitor("SourceBufferAttributes")
, mAppendWindowStart(0)
, mAppendWindowEnd(PositiveInfinity<double>())
, mAppendMode(SourceBufferAppendMode::Segments)
, mApparentTimestampOffset(0)
{}
double GetAppendWindowStart()
{
MonitorAutoLock mon(mMonitor);
return mAppendWindowStart;
}
double GetAppendWindowEnd()
{
MonitorAutoLock mon(mMonitor);
return mAppendWindowEnd;
}
void SetAppendWindowStart(double aWindowStart)
{
MonitorAutoLock mon(mMonitor);
mAppendWindowStart = aWindowStart;
}
void SetAppendWindowEnd(double aWindowEnd)
{
MonitorAutoLock mon(mMonitor);
mAppendWindowEnd = aWindowEnd;
}
double GetApparentTimestampOffset()
{
MonitorAutoLock mon(mMonitor);
return mApparentTimestampOffset;
}
void SetApparentTimestampOffset(double aTimestampOffset)
{
MonitorAutoLock mon(mMonitor);
mApparentTimestampOffset = aTimestampOffset;
mTimestampOffset = media::TimeUnit::FromSeconds(aTimestampOffset);
}
media::TimeUnit GetTimestampOffset()
{
MonitorAutoLock mon(mMonitor);
return mTimestampOffset;
}
void SetTimestampOffset(media::TimeUnit& aTimestampOffset)
{
MonitorAutoLock mon(mMonitor);
mTimestampOffset = aTimestampOffset;
mApparentTimestampOffset = aTimestampOffset.ToSeconds();
}
SourceBufferAppendMode GetAppendMode()
{
MonitorAutoLock mon(mMonitor);
return mAppendMode;
}
void SetAppendMode(SourceBufferAppendMode aAppendMode)
{
MonitorAutoLock mon(mMonitor);
mAppendMode = aAppendMode;
}
// mGenerateTimestamp isn't mutable once the source buffer has been constructed
// We don't need a monitor to protect it across threads.
const bool mGenerateTimestamps;
private:
~SourceBufferAttributes() {};
// Monitor protecting all members below.
Monitor mMonitor;
double mAppendWindowStart;
double mAppendWindowEnd;
SourceBufferAppendMode mAppendMode;
double mApparentTimestampOffset;
media::TimeUnit mTimestampOffset;
};
class SourceBuffer final : public DOMEventTargetHelper
{
public:
/** WebIDL Methods. */
SourceBufferAppendMode Mode() const
{
return mAttributes->GetAppendMode();
return mCurrentAttributes.GetAppendMode();
}
void SetMode(SourceBufferAppendMode aMode, ErrorResult& aRv);
@ -151,21 +62,21 @@ public:
double TimestampOffset() const
{
return mAttributes->GetApparentTimestampOffset();
return mCurrentAttributes.GetApparentTimestampOffset();
}
void SetTimestampOffset(double aTimestampOffset, ErrorResult& aRv);
double AppendWindowStart() const
{
return mAttributes->GetAppendWindowStart();
return mCurrentAttributes.GetAppendWindowStart();
}
void SetAppendWindowStart(double aAppendWindowStart, ErrorResult& aRv);
double AppendWindowEnd() const
{
return mAttributes->GetAppendWindowEnd();
return mCurrentAttributes.GetAppendWindowEnd();
}
void SetAppendWindowEnd(double aAppendWindowEnd, ErrorResult& aRv);
@ -222,6 +133,7 @@ private:
void StartUpdating();
void StopUpdating();
void AbortUpdating();
void ResetParserState();
// If the media segment contains data beyond the current duration,
// then run the duration change algorithm with new duration set to the
@ -230,7 +142,6 @@ private:
// Shared implementation of AppendBuffer overloads.
void AppendData(const uint8_t* aData, uint32_t aLength, ErrorResult& aRv);
void BufferAppend();
// Implement the "Append Error Algorithm".
// Will call endOfStream() with "decode" error if aDecodeError is true.
@ -244,19 +155,19 @@ private:
uint32_t aLength,
ErrorResult& aRv);
void AppendDataCompletedWithSuccess(bool aHasActiveTracks);
void AppendDataCompletedWithSuccess(SourceBufferTask::AppendBufferResult aResult);
void AppendDataErrored(nsresult aError);
RefPtr<MediaSource> mMediaSource;
RefPtr<TrackBuffersManager> mTrackBuffersManager;
RefPtr<SourceBufferAttributes> mAttributes;
SourceBufferAttributes mCurrentAttributes;
bool mUpdating;
mozilla::Atomic<bool> mActive;
MozPromiseRequestHolder<TrackBuffersManager::AppendPromise> mPendingAppend;
MozPromiseRequestHolder<SourceBufferTask::AppendPromise> mPendingAppend;
const nsCString mType;
RefPtr<TimeRanges> mBuffered;

View File

@ -0,0 +1,157 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef mozilla_SourceBufferAttributes_h_
#define mozilla_SourceBufferAttributes_h_
#include "TimeUnits.h"
#include "mozilla/dom/SourceBufferBinding.h"
#include "mozilla/Maybe.h"
namespace mozilla {
class SourceBufferAttributes {
public:
// Current state as per Segment Parser Loop Algorithm
// http://w3c.github.io/media-source/index.html#sourcebuffer-segment-parser-loop
enum class AppendState
{
WAITING_FOR_SEGMENT,
PARSING_INIT_SEGMENT,
PARSING_MEDIA_SEGMENT,
};
explicit SourceBufferAttributes(bool aGenerateTimestamp)
: mGenerateTimestamps(aGenerateTimestamp)
, mAppendWindowStart(0)
, mAppendWindowEnd(PositiveInfinity<double>())
, mAppendMode(dom::SourceBufferAppendMode::Segments)
, mApparentTimestampOffset(0)
, mAppendState(AppendState::WAITING_FOR_SEGMENT)
{}
SourceBufferAttributes(const SourceBufferAttributes& aOther) = default;
double GetAppendWindowStart() const
{
return mAppendWindowStart;
}
double GetAppendWindowEnd() const
{
return mAppendWindowEnd;
}
void SetAppendWindowStart(double aWindowStart)
{
mAppendWindowStart = aWindowStart;
}
void SetAppendWindowEnd(double aWindowEnd)
{
mAppendWindowEnd = aWindowEnd;
}
double GetApparentTimestampOffset() const
{
return mApparentTimestampOffset;
}
void SetApparentTimestampOffset(double aTimestampOffset)
{
mApparentTimestampOffset = aTimestampOffset;
mTimestampOffset = media::TimeUnit::FromSeconds(aTimestampOffset);
}
media::TimeUnit GetTimestampOffset() const
{
return mTimestampOffset;
}
void SetTimestampOffset(const media::TimeUnit& aTimestampOffset)
{
mTimestampOffset = aTimestampOffset;
mApparentTimestampOffset = aTimestampOffset.ToSeconds();
}
dom::SourceBufferAppendMode GetAppendMode() const
{
return mAppendMode;
}
void SetAppendMode(dom::SourceBufferAppendMode aAppendMode)
{
mAppendMode = aAppendMode;
}
void SetGroupStartTimestamp(const media::TimeUnit& aGroupStartTimestamp)
{
mGroupStartTimestamp = Some(aGroupStartTimestamp);
}
media::TimeUnit GetGroupStartTimestamp() const
{
return mGroupStartTimestamp.ref();
}
bool HaveGroupStartTimestamp() const
{
return mGroupStartTimestamp.isSome();
}
void ResetGroupStartTimestamp()
{
mGroupStartTimestamp.reset();
}
void RestartGroupStartTimestamp()
{
mGroupStartTimestamp = Some(mGroupEndTimestamp);
}
media::TimeUnit GetGroupEndTimestamp() const
{
return mGroupEndTimestamp;
}
void SetGroupEndTimestamp(const media::TimeUnit& aGroupEndTimestamp)
{
mGroupEndTimestamp = aGroupEndTimestamp;
}
AppendState GetAppendState() const
{
return mAppendState;
}
void SetAppendState(AppendState aState)
{
mAppendState = aState;
}
// mGenerateTimestamp isn't mutable once the source buffer has been constructed
bool mGenerateTimestamps;
SourceBufferAttributes& operator=(const SourceBufferAttributes& aOther) = default;
private:
SourceBufferAttributes() = delete;
double mAppendWindowStart;
double mAppendWindowEnd;
dom::SourceBufferAppendMode mAppendMode;
double mApparentTimestampOffset;
media::TimeUnit mTimestampOffset;
Maybe<media::TimeUnit> mGroupStartTimestamp;
media::TimeUnit mGroupEndTimestamp;
// The current append state as per https://w3c.github.io/media-source/#sourcebuffer-append-state
AppendState mAppendState;
};
} // end namespace mozilla
#endif /* mozilla_SourceBufferAttributes_h_ */

View File

@ -0,0 +1,103 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef MOZILLA_SOURCEBUFFERTASK_H_
#define MOZILLA_SOURCEBUFFERTASK_H_
#include "mozilla/MozPromise.h"
#include "mozilla/Pair.h"
#include "mozilla/RefPtr.h"
#include "SourceBufferAttributes.h"
#include "TimeUnits.h"
namespace mozilla {
class SourceBufferTask {
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(SourceBufferTask);
enum class Type {
AppendBuffer,
Abort,
Reset,
RangeRemoval,
EvictData
};
typedef Pair<bool, SourceBufferAttributes> AppendBufferResult;
typedef MozPromise<AppendBufferResult, nsresult, /* IsExclusive = */ true> AppendPromise;
typedef MozPromise<bool, nsresult, /* IsExclusive = */ true> RangeRemovalPromise;
virtual Type GetType() const = 0;
template<typename ReturnType>
ReturnType* As()
{
MOZ_ASSERT(this->GetType() == ReturnType::sType);
return static_cast<ReturnType*>(this);
}
protected:
virtual ~SourceBufferTask() {}
};
class AppendBufferTask : public SourceBufferTask {
public:
AppendBufferTask(MediaByteBuffer* aData,
SourceBufferAttributes aAttributes)
: mBuffer(aData)
, mAttributes(aAttributes)
{}
static const Type sType = Type::AppendBuffer;
Type GetType() const override { return Type::AppendBuffer; }
RefPtr<MediaByteBuffer> mBuffer;
SourceBufferAttributes mAttributes;
MozPromiseHolder<AppendPromise> mPromise;
};
class AbortTask : public SourceBufferTask {
public:
static const Type sType = Type::Abort;
Type GetType() const override { return Type::Abort; }
};
class ResetTask : public SourceBufferTask {
public:
static const Type sType = Type::Reset;
Type GetType() const override { return Type::Reset; }
};
class RangeRemovalTask : public SourceBufferTask {
public:
explicit RangeRemovalTask(const media::TimeInterval& aRange)
: mRange(aRange)
{}
static const Type sType = Type::RangeRemoval;
Type GetType() const override { return Type::RangeRemoval; }
media::TimeInterval mRange;
MozPromiseHolder<RangeRemovalPromise> mPromise;
};
class EvictDataTask : public SourceBufferTask {
public:
EvictDataTask(const media::TimeUnit& aPlaybackTime, int64_t aSizetoEvict)
: mPlaybackTime(aPlaybackTime)
, mSizeToEvict(aSizetoEvict)
{}
static const Type sType = Type::EvictData;
Type GetType() const override { return Type::EvictData; }
media::TimeUnit mPlaybackTime;
int64_t mSizeToEvict;
};
} // end mozilla namespace
#endif

View File

@ -13,6 +13,7 @@
#include "SourceBufferResource.h"
#include "SourceBuffer.h"
#include "WebMDemuxer.h"
#include "SourceBufferTask.h"
#ifdef MOZ_FMP4
#include "MP4Demuxer.h"
@ -38,16 +39,17 @@ using dom::SourceBufferAppendMode;
using media::TimeUnit;
using media::TimeInterval;
using media::TimeIntervals;
typedef SourceBufferTask::AppendBufferResult AppendBufferResult;
static const char*
AppendStateToStr(TrackBuffersManager::AppendState aState)
AppendStateToStr(SourceBufferAttributes::AppendState aState)
{
switch (aState) {
case TrackBuffersManager::AppendState::WAITING_FOR_SEGMENT:
case SourceBufferAttributes::AppendState::WAITING_FOR_SEGMENT:
return "WAITING_FOR_SEGMENT";
case TrackBuffersManager::AppendState::PARSING_INIT_SEGMENT:
case SourceBufferAttributes::AppendState::PARSING_INIT_SEGMENT:
return "PARSING_INIT_SEGMENT";
case TrackBuffersManager::AppendState::PARSING_MEDIA_SEGMENT:
case SourceBufferAttributes::AppendState::PARSING_MEDIA_SEGMENT:
return "PARSING_MEDIA_SEGMENT";
default:
return "IMPOSSIBLE";
@ -84,11 +86,9 @@ private:
};
#endif // MOZ_EME
TrackBuffersManager::TrackBuffersManager(dom::SourceBufferAttributes* aAttributes,
MediaSourceDecoder* aParentDecoder,
TrackBuffersManager::TrackBuffersManager(MediaSourceDecoder* aParentDecoder,
const nsACString& aType)
: mInputBuffer(new MediaByteBuffer)
, mAppendState(AppendState::WAITING_FOR_SEGMENT)
, mBufferFull(false)
, mFirstInitializationSegmentReceived(false)
, mNewMediaSegmentStarted(false)
@ -97,98 +97,198 @@ TrackBuffersManager::TrackBuffersManager(dom::SourceBufferAttributes* aAttribute
, mParser(ContainerParser::CreateForMIMEType(aType))
, mProcessedInput(0)
, mTaskQueue(aParentDecoder->GetDemuxer()->GetTaskQueue())
, mSourceBufferAttributes(aAttributes)
, mParentDecoder(new nsMainThreadPtrHolder<MediaSourceDecoder>(aParentDecoder, false /* strict */))
, mEnded(false)
, mDetached(false)
, mVideoEvictionThreshold(Preferences::GetUint("media.mediasource.eviction_threshold.video",
100 * 1024 * 1024))
, mAudioEvictionThreshold(Preferences::GetUint("media.mediasource.eviction_threshold.audio",
15 * 1024 * 1024))
, mEvictionOccurred(false)
, mMonitor("TrackBuffersManager")
, mAppendRunning(false)
{
MOZ_ASSERT(NS_IsMainThread(), "Must be instanciated on the main thread");
}
TrackBuffersManager::~TrackBuffersManager()
{
CancelAllTasks();
ShutdownDemuxers();
}
bool
RefPtr<TrackBuffersManager::AppendPromise>
TrackBuffersManager::AppendData(MediaByteBuffer* aData,
TimeUnit aTimestampOffset)
const SourceBufferAttributes& aAttributes)
{
MOZ_ASSERT(NS_IsMainThread());
MSE_DEBUG("Appending %lld bytes", aData->Length());
mEnded = false;
nsCOMPtr<nsIRunnable> task =
NS_NewRunnableMethodWithArg<IncomingBuffer>(
this, &TrackBuffersManager::AppendIncomingBuffer,
IncomingBuffer(aData, aTimestampOffset));
GetTaskQueue()->Dispatch(task.forget());
return true;
}
void
TrackBuffersManager::AppendIncomingBuffer(IncomingBuffer aData)
{
MOZ_ASSERT(OnTaskQueue());
mIncomingBuffers.AppendElement(aData);
RefPtr<MediaByteBuffer> buffer = aData;
return InvokeAsync(GetTaskQueue(), this,
__func__, &TrackBuffersManager::DoAppendData,
buffer, aAttributes);
}
RefPtr<TrackBuffersManager::AppendPromise>
TrackBuffersManager::BufferAppend()
TrackBuffersManager::DoAppendData(RefPtr<MediaByteBuffer> aData,
SourceBufferAttributes aAttributes)
{
MOZ_ASSERT(NS_IsMainThread());
MSE_DEBUG("");
RefPtr<AppendBufferTask> task = new AppendBufferTask(aData, aAttributes);
RefPtr<AppendPromise> p = task->mPromise.Ensure(__func__);
mQueue.Push(task);
mAppendRunning = true;
return InvokeAsync(GetTaskQueue(), this,
__func__, &TrackBuffersManager::InitSegmentParserLoop);
ProcessTasks();
return p;
}
void
TrackBuffersManager::ProcessTasks()
{
typedef SourceBufferTask::Type Type;
if (mDetached) {
return;
}
if (OnTaskQueue()) {
if (mCurrentTask) {
// Already have a task pending. ProcessTask will be scheduled once the
// current task complete.
return;
}
RefPtr<SourceBufferTask> task = mQueue.Pop();
if (!task) {
// nothing to do.
return;
}
switch (task->GetType()) {
case Type::AppendBuffer:
mCurrentTask = task;
if (!mInputBuffer) {
mInputBuffer = task->As<AppendBufferTask>()->mBuffer;
} else if (!mInputBuffer->AppendElements(*task->As<AppendBufferTask>()->mBuffer, fallible)) {
RejectAppend(NS_ERROR_OUT_OF_MEMORY, __func__);
return;
}
mSourceBufferAttributes =
MakeUnique<SourceBufferAttributes>(task->As<AppendBufferTask>()->mAttributes);
mAppendWindow =
TimeInterval(TimeUnit::FromSeconds(mSourceBufferAttributes->GetAppendWindowStart()),
TimeUnit::FromSeconds(mSourceBufferAttributes->GetAppendWindowEnd()));
ScheduleSegmentParserLoop();
break;
case Type::RangeRemoval:
{
bool rv = CodedFrameRemoval(task->As<RangeRemovalTask>()->mRange);
task->As<RangeRemovalTask>()->mPromise.Resolve(rv, __func__);
break;
}
case Type::EvictData:
DoEvictData(task->As<EvictDataTask>()->mPlaybackTime,
task->As<EvictDataTask>()->mSizeToEvict);
break;
case Type::Abort:
// not handled yet, and probably never.
break;
case Type::Reset:
CompleteResetParserState();
break;
default:
NS_WARNING("Invalid Task");
}
}
nsCOMPtr<nsIRunnable> task =
NS_NewRunnableMethod(this, &TrackBuffersManager::ProcessTasks);
GetTaskQueue()->Dispatch(task.forget());
}
// A PromiseHolder will assert upon destruction if it has a pending promise
// that hasn't been completed. It is possible that a task didn't get processed
// due to the owning SourceBuffer having shutdown.
// We resolve/reject all pending promises and remove all pending tasks from the
// queue.
void
TrackBuffersManager::CancelAllTasks()
{
typedef SourceBufferTask::Type Type;
MOZ_DIAGNOSTIC_ASSERT(mDetached);
if (mCurrentTask) {
mQueue.Push(mCurrentTask);
mCurrentTask = nullptr;
}
RefPtr<SourceBufferTask> task;
while ((task = mQueue.Pop())) {
switch (task->GetType()) {
case Type::AppendBuffer:
task->As<AppendBufferTask>()->mPromise.RejectIfExists(NS_ERROR_ABORT, __func__);
break;
case Type::RangeRemoval:
task->As<RangeRemovalTask>()->mPromise.ResolveIfExists(false, __func__);
break;
case Type::EvictData:
break;
case Type::Abort:
// not handled yet, and probably never.
break;
case Type::Reset:
break;
default:
NS_WARNING("Invalid Task");
}
}
}
// The MSE spec requires that we abort the current SegmentParserLoop
// which is then followed by a call to ResetParserState.
// However due to our asynchronous design this causes inherent difficulities.
// As the spec behaviour is non deterministic anyway, we instead wait until the
// current AppendData has completed its run.
// However due to our asynchronous design this causes inherent difficulties.
// As the spec behaviour is non deterministic anyway, we instead process all
// pending frames found in the input buffer.
void
TrackBuffersManager::AbortAppendData()
{
MOZ_ASSERT(NS_IsMainThread());
MSE_DEBUG("");
MonitorAutoLock mon(mMonitor);
while (mAppendRunning) {
mon.Wait();
}
RefPtr<AbortTask> task = new AbortTask();
mQueue.Push(task);
ProcessTasks();
}
void
TrackBuffersManager::ResetParserState()
TrackBuffersManager::ResetParserState(SourceBufferAttributes& aAttributes)
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_DIAGNOSTIC_ASSERT(!mAppendRunning, "Append is running, abort must have been called");
MSE_DEBUG("");
// Spec states:
// 1. If the append state equals PARSING_MEDIA_SEGMENT and the input buffer contains some complete coded frames, then run the coded frame processing algorithm until all of these complete coded frames have been processed.
// SourceBuffer.abort() has ensured that all complete coded frames have been
// processed. As such, we don't need to check for the value of mAppendState.
nsCOMPtr<nsIRunnable> task =
NS_NewRunnableMethod(this, &TrackBuffersManager::CompleteResetParserState);
GetTaskQueue()->Dispatch(task.forget());
// However, we will wait until all coded frames have been processed regardless
// of the value of append state.
RefPtr<ResetTask> task = new ResetTask();
mQueue.Push(task);
ProcessTasks();
// 7. Set append state to WAITING_FOR_SEGMENT.
SetAppendState(AppendState::WAITING_FOR_SEGMENT);
// ResetParserState has some synchronous steps that much be performed now.
// The remaining steps will be performed once the ResetTask gets executed.
// 6. If the mode attribute equals "sequence", then set the group start timestamp to the group end timestamp
if (aAttributes.GetAppendMode() == SourceBufferAppendMode::Sequence) {
aAttributes.SetGroupStartTimestamp(aAttributes.GetGroupEndTimestamp());
}
// 8. Set append state to WAITING_FOR_SEGMENT.
aAttributes.SetAppendState(AppendState::WAITING_FOR_SEGMENT);
}
RefPtr<TrackBuffersManager::RangeRemovalPromise>
TrackBuffersManager::RangeRemoval(TimeUnit aStart, TimeUnit aEnd)
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_DIAGNOSTIC_ASSERT(!mAppendRunning, "Append is running");
MSE_DEBUG("From %.2f to %.2f", aStart.ToSeconds(), aEnd.ToSeconds());
mEnded = false;
@ -225,11 +325,9 @@ TrackBuffersManager::EvictData(TimeUnit aPlaybackTime,
MSE_DEBUG("Reaching our size limit, schedule eviction of %lld bytes", toEvict);
nsCOMPtr<nsIRunnable> task =
NS_NewRunnableMethodWithArgs<TimeUnit, uint32_t>(
this, &TrackBuffersManager::DoEvictData,
aPlaybackTime, toEvict);
GetTaskQueue()->Dispatch(task.forget());
RefPtr<EvictDataTask> task = new EvictDataTask(aPlaybackTime, toEvict);
mQueue.Push(task);
ProcessTasks();
return EvictDataResult::NO_DATA_EVICTED;
}
@ -287,6 +385,7 @@ TrackBuffersManager::Detach()
{
MOZ_ASSERT(NS_IsMainThread());
MSE_DEBUG("");
mDetached = true;
}
void
@ -297,10 +396,7 @@ TrackBuffersManager::CompleteResetParserState()
// We shouldn't change mInputDemuxer while a demuxer init/reset request is
// being processed. See bug 1239983.
NS_ASSERTION(!mDemuxerInitRequest.Exists(), "Previous AppendBuffer didn't complete");
if (mDemuxerInitRequest.Exists()) {
mDemuxerInitRequest.Disconnect();
}
MOZ_DIAGNOSTIC_ASSERT(!mDemuxerInitRequest.Exists(), "Previous AppendBuffer didn't complete");
for (auto& track : GetTracksList()) {
// 2. Unset the last decode timestamp on all track buffers.
@ -314,13 +410,7 @@ TrackBuffersManager::CompleteResetParserState()
track->mQueuedSamples.Clear();
}
// 6. If the mode attribute equals "sequence", then set the group start timestamp to the group end timestamp
if (mSourceBufferAttributes->GetAppendMode() == SourceBufferAppendMode::Sequence) {
mGroupStartTimestamp = Some(mGroupEndTimestamp);
}
// 7. Remove all bytes from the input buffer.
mIncomingBuffers.Clear();
mInputBuffer = nullptr;
if (mCurrentInputBuffer) {
mCurrentInputBuffer->EvictAll();
@ -343,12 +433,6 @@ TrackBuffersManager::CompleteResetParserState()
mInputBuffer->AppendElements(*mInitData);
}
RecreateParser(true);
// 8. Set append state to WAITING_FOR_SEGMENT.
SetAppendState(AppendState::WAITING_FOR_SEGMENT);
// Reject our promise immediately.
mAppendPromise.RejectIfExists(NS_ERROR_ABORT, __func__);
}
int64_t
@ -435,8 +519,12 @@ RefPtr<TrackBuffersManager::RangeRemovalPromise>
TrackBuffersManager::CodedFrameRemovalWithPromise(TimeInterval aInterval)
{
MOZ_ASSERT(OnTaskQueue());
bool rv = CodedFrameRemoval(aInterval);
return RangeRemovalPromise::CreateAndResolve(rv, __func__);
RefPtr<RangeRemovalTask> task = new RangeRemovalTask(aInterval);
RefPtr<RangeRemovalPromise> p = task->mPromise.Ensure(__func__);
mQueue.Push(task);
ProcessTasks();
return p;
}
bool
@ -536,44 +624,6 @@ TrackBuffersManager::UpdateBufferedRanges()
DumpTimeRanges(mAudioTracks.mBufferedRanges).get());
}
#endif
mOfficialGroupEndTimestamp = mGroupEndTimestamp;
}
RefPtr<TrackBuffersManager::AppendPromise>
TrackBuffersManager::InitSegmentParserLoop()
{
MOZ_ASSERT(OnTaskQueue());
MOZ_DIAGNOSTIC_ASSERT(mAppendPromise.IsEmpty());
MSE_DEBUG("");
RefPtr<AppendPromise> p = mAppendPromise.Ensure(__func__);
AppendIncomingBuffers();
SegmentParserLoop();
return p;
}
void
TrackBuffersManager::AppendIncomingBuffers()
{
MOZ_ASSERT(OnTaskQueue());
MonitorAutoLock mon(mMonitor);
for (auto& incomingBuffer : mIncomingBuffers) {
if (!mInputBuffer) {
mInputBuffer = incomingBuffer.first();
} else if (!mInputBuffer->AppendElements(*incomingBuffer.first(), fallible)) {
RejectAppend(NS_ERROR_OUT_OF_MEMORY, __func__);
}
mTimestampOffset = incomingBuffer.second();
mLastTimestampOffset = mTimestampOffset;
}
mIncomingBuffers.Clear();
mAppendWindow =
TimeInterval(TimeUnit::FromSeconds(mSourceBufferAttributes->GetAppendWindowStart()),
TimeUnit::FromSeconds(mSourceBufferAttributes->GetAppendWindowEnd()));
}
void
@ -600,7 +650,7 @@ TrackBuffersManager::SegmentParserLoop()
// 4. If the append state equals WAITING_FOR_SEGMENT, then run the following
// steps:
if (mAppendState == AppendState::WAITING_FOR_SEGMENT) {
if (mSourceBufferAttributes->GetAppendState() == AppendState::WAITING_FOR_SEGMENT) {
if (mParser->IsInitSegmentPresent(mInputBuffer)) {
SetAppendState(AppendState::PARSING_INIT_SEGMENT);
if (mFirstInitializationSegmentReceived) {
@ -627,7 +677,7 @@ TrackBuffersManager::SegmentParserLoop()
// 5. If the append state equals PARSING_INIT_SEGMENT, then run the
// following steps:
if (mAppendState == AppendState::PARSING_INIT_SEGMENT) {
if (mSourceBufferAttributes->GetAppendState() == AppendState::PARSING_INIT_SEGMENT) {
if (mParser->InitSegmentRange().IsEmpty()) {
mInputBuffer = nullptr;
NeedMoreData();
@ -636,7 +686,7 @@ TrackBuffersManager::SegmentParserLoop()
InitializationSegmentReceived();
return;
}
if (mAppendState == AppendState::PARSING_MEDIA_SEGMENT) {
if (mSourceBufferAttributes->GetAppendState() == AppendState::PARSING_MEDIA_SEGMENT) {
// 1. If the first initialization segment received flag is false, then run the append error algorithm with the decode error parameter set to true and abort this algorithm.
if (!mFirstInitializationSegmentReceived) {
RejectAppend(NS_ERROR_FAILURE, __func__);
@ -703,32 +753,45 @@ void
TrackBuffersManager::NeedMoreData()
{
MSE_DEBUG("");
RestoreCachedVariables();
mAppendRunning = false;
{
// Wake-up any pending Abort()
MonitorAutoLock mon(mMonitor);
mon.NotifyAll();
if (mDetached) {
// We've been detached.
return;
}
mAppendPromise.ResolveIfExists(mActiveTrack, __func__);
MOZ_DIAGNOSTIC_ASSERT(mCurrentTask && mCurrentTask->GetType() == SourceBufferTask::Type::AppendBuffer);
MOZ_DIAGNOSTIC_ASSERT(mSourceBufferAttributes);
mCurrentTask->As<AppendBufferTask>()->mPromise.Resolve(
SourceBufferTask::AppendBufferResult(mActiveTrack,
*mSourceBufferAttributes),
__func__);
mSourceBufferAttributes = nullptr;
mCurrentTask = nullptr;
ProcessTasks();
}
void
TrackBuffersManager::RejectAppend(nsresult aRejectValue, const char* aName)
{
MSE_DEBUG("rv=%d", aRejectValue);
mAppendRunning = false;
{
// Wake-up any pending Abort()
MonitorAutoLock mon(mMonitor);
mon.NotifyAll();
if (mDetached) {
// We've been detached.
return;
}
mAppendPromise.RejectIfExists(aRejectValue, aName);
MOZ_DIAGNOSTIC_ASSERT(mCurrentTask && mCurrentTask->GetType() == SourceBufferTask::Type::AppendBuffer);
MOZ_DIAGNOSTIC_ASSERT(mSourceBufferAttributes);
mCurrentTask->As<AppendBufferTask>()->mPromise.Reject(aRejectValue, __func__);
mSourceBufferAttributes = nullptr;
mCurrentTask = nullptr;
ProcessTasks();
}
void
TrackBuffersManager::ScheduleSegmentParserLoop()
{
if (mDetached) {
return;
}
nsCOMPtr<nsIRunnable> task =
NS_NewRunnableMethod(this, &TrackBuffersManager::SegmentParserLoop);
GetTaskQueue()->Dispatch(task.forget());
@ -757,13 +820,15 @@ TrackBuffersManager::CreateDemuxerforMIMEType()
{
ShutdownDemuxers();
if (mType.LowerCaseEqualsLiteral("video/webm") || mType.LowerCaseEqualsLiteral("audio/webm")) {
if (mType.LowerCaseEqualsLiteral("video/webm") ||
mType.LowerCaseEqualsLiteral("audio/webm")) {
mInputDemuxer = new WebMDemuxer(mCurrentInputBuffer, true /* IsMediaSource*/ );
return;
}
#ifdef MOZ_FMP4
if (mType.LowerCaseEqualsLiteral("video/mp4") || mType.LowerCaseEqualsLiteral("audio/mp4")) {
if (mType.LowerCaseEqualsLiteral("video/mp4") ||
mType.LowerCaseEqualsLiteral("audio/mp4")) {
mInputDemuxer = new MP4Demuxer(mCurrentInputBuffer);
return;
}
@ -808,14 +873,16 @@ TrackBuffersManager::OnDemuxerResetDone(nsresult)
uint32_t numVideos = mInputDemuxer->GetNumberTracks(TrackInfo::kVideoTrack);
if (numVideos) {
// We currently only handle the first video track.
mVideoTracks.mDemuxer = mInputDemuxer->GetTrackDemuxer(TrackInfo::kVideoTrack, 0);
mVideoTracks.mDemuxer =
mInputDemuxer->GetTrackDemuxer(TrackInfo::kVideoTrack, 0);
MOZ_ASSERT(mVideoTracks.mDemuxer);
}
uint32_t numAudios = mInputDemuxer->GetNumberTracks(TrackInfo::kAudioTrack);
if (numAudios) {
// We currently only handle the first audio track.
mAudioTracks.mDemuxer = mInputDemuxer->GetTrackDemuxer(TrackInfo::kAudioTrack, 0);
mAudioTracks.mDemuxer =
mInputDemuxer->GetTrackDemuxer(TrackInfo::kAudioTrack, 0);
MOZ_ASSERT(mAudioTracks.mDemuxer);
}
@ -871,22 +938,17 @@ void
TrackBuffersManager::OnDemuxerInitDone(nsresult)
{
MOZ_ASSERT(OnTaskQueue());
mDemuxerInitRequest.Complete();
MOZ_DIAGNOSTIC_ASSERT(mInputDemuxer, "mInputDemuxer has been destroyed");
if (!mInputDemuxer) {
// mInputDemuxer shouldn't have been destroyed while a demuxer init/reset
// request was being processed. See bug 1239983.
NS_ASSERTION(false, "mInputDemuxer has been destroyed");
RejectAppend(NS_ERROR_ABORT, __func__);
return;
}
mDemuxerInitRequest.Complete();
MediaInfo info;
uint32_t numVideos = mInputDemuxer->GetNumberTracks(TrackInfo::kVideoTrack);
if (numVideos) {
// We currently only handle the first video track.
mVideoTracks.mDemuxer = mInputDemuxer->GetTrackDemuxer(TrackInfo::kVideoTrack, 0);
mVideoTracks.mDemuxer =
mInputDemuxer->GetTrackDemuxer(TrackInfo::kVideoTrack, 0);
MOZ_ASSERT(mVideoTracks.mDemuxer);
info.mVideo = *mVideoTracks.mDemuxer->GetInfo()->GetAsVideoInfo();
info.mVideo.mTrackId = 2;
@ -895,7 +957,8 @@ TrackBuffersManager::OnDemuxerInitDone(nsresult)
uint32_t numAudios = mInputDemuxer->GetNumberTracks(TrackInfo::kAudioTrack);
if (numAudios) {
// We currently only handle the first audio track.
mAudioTracks.mDemuxer = mInputDemuxer->GetTrackDemuxer(TrackInfo::kAudioTrack, 0);
mAudioTracks.mDemuxer =
mInputDemuxer->GetTrackDemuxer(TrackInfo::kAudioTrack, 0);
MOZ_ASSERT(mAudioTracks.mDemuxer);
info.mAudio = *mAudioTracks.mDemuxer->GetInfo()->GetAsAudioInfo();
info.mAudio.mTrackId = 1;
@ -1036,6 +1099,7 @@ TrackBuffersManager::OnDemuxerInitDone(nsresult)
(info.mAudio.mChannels != mAudioTracks.mInfo->GetAsAudioInfo()->mChannels ||
info.mAudio.mRate != mAudioTracks.mInfo->GetAsAudioInfo()->mRate)) {
RejectAppend(NS_ERROR_FAILURE, __func__);
return;
}
mAudioTracks.mLastInfo = new SharedTrackInfo(info.mAudio, streamID);
mVideoTracks.mLastInfo = new SharedTrackInfo(info.mVideo, streamID);
@ -1295,12 +1359,14 @@ void
TrackBuffersManager::CheckSequenceDiscontinuity(const TimeUnit& aPresentationTime)
{
if (mSourceBufferAttributes->GetAppendMode() == SourceBufferAppendMode::Sequence &&
mGroupStartTimestamp.isSome()) {
mTimestampOffset = mGroupStartTimestamp.ref() - aPresentationTime;
mGroupEndTimestamp = mGroupStartTimestamp.ref();
mSourceBufferAttributes->HaveGroupStartTimestamp()) {
mSourceBufferAttributes->SetTimestampOffset(
mSourceBufferAttributes->GetGroupStartTimestamp() - aPresentationTime);
mSourceBufferAttributes->SetGroupEndTimestamp(
mSourceBufferAttributes->GetGroupStartTimestamp());
mVideoTracks.mNeedRandomAccessPoint = true;
mAudioTracks.mNeedRandomAccessPoint = true;
mGroupStartTimestamp.reset();
mSourceBufferAttributes->ResetGroupStartTimestamp();
}
}
@ -1391,14 +1457,14 @@ TrackBuffersManager::ProcessFrames(TrackBuffer& aSamples, TrackData& aTrackData)
TimeInterval sampleInterval =
mSourceBufferAttributes->mGenerateTimestamps
? TimeInterval(mTimestampOffset,
mTimestampOffset + TimeUnit::FromMicroseconds(sample->mDuration))
: TimeInterval(TimeUnit::FromMicroseconds(sample->mTime) + mTimestampOffset,
TimeUnit::FromMicroseconds(sample->GetEndTime()) + mTimestampOffset);
? TimeInterval(mSourceBufferAttributes->GetTimestampOffset(),
mSourceBufferAttributes->GetTimestampOffset() + TimeUnit::FromMicroseconds(sample->mDuration))
: TimeInterval(TimeUnit::FromMicroseconds(sample->mTime) + mSourceBufferAttributes->GetTimestampOffset(),
TimeUnit::FromMicroseconds(sample->GetEndTime()) + mSourceBufferAttributes->GetTimestampOffset());
TimeUnit decodeTimestamp =
mSourceBufferAttributes->mGenerateTimestamps
? mTimestampOffset
: TimeUnit::FromMicroseconds(sample->mTimecode) + mTimestampOffset;
? mSourceBufferAttributes->GetTimestampOffset()
: TimeUnit::FromMicroseconds(sample->mTimecode) + mSourceBufferAttributes->GetTimestampOffset();
// 6. If last decode timestamp for track buffer is set and decode timestamp is less than last decode timestamp:
// OR
@ -1413,12 +1479,13 @@ TrackBuffersManager::ProcessFrames(TrackBuffer& aSamples, TrackData& aTrackData)
// 1a. If mode equals "segments":
if (appendMode == SourceBufferAppendMode::Segments) {
// Set group end timestamp to presentation timestamp.
mGroupEndTimestamp = sampleInterval.mStart;
mSourceBufferAttributes->SetGroupEndTimestamp(sampleInterval.mStart);
}
// 1b. If mode equals "sequence":
if (appendMode == SourceBufferAppendMode::Sequence) {
// Set group start timestamp equal to the group end timestamp.
mGroupStartTimestamp = Some(mGroupEndTimestamp);
mSourceBufferAttributes->SetGroupStartTimestamp(
mSourceBufferAttributes->GetGroupEndTimestamp());
}
for (auto& track : GetTracksList()) {
// 2. Unset the last decode timestamp on all track buffers.
@ -1439,18 +1506,18 @@ TrackBuffersManager::ProcessFrames(TrackBuffer& aSamples, TrackData& aTrackData)
continue;
}
if (appendMode == SourceBufferAppendMode::Sequence) {
// mTimestampOffset was modified during CheckSequenceDiscontinuity.
// mSourceBufferAttributes->GetTimestampOffset() was modified during CheckSequenceDiscontinuity.
// We need to update our variables.
sampleInterval =
mSourceBufferAttributes->mGenerateTimestamps
? TimeInterval(mTimestampOffset,
mTimestampOffset + TimeUnit::FromMicroseconds(sample->mDuration))
: TimeInterval(TimeUnit::FromMicroseconds(sample->mTime) + mTimestampOffset,
TimeUnit::FromMicroseconds(sample->GetEndTime()) + mTimestampOffset);
? TimeInterval(mSourceBufferAttributes->GetTimestampOffset(),
mSourceBufferAttributes->GetTimestampOffset() + TimeUnit::FromMicroseconds(sample->mDuration))
: TimeInterval(TimeUnit::FromMicroseconds(sample->mTime) + mSourceBufferAttributes->GetTimestampOffset(),
TimeUnit::FromMicroseconds(sample->GetEndTime()) + mSourceBufferAttributes->GetTimestampOffset());
decodeTimestamp =
mSourceBufferAttributes->mGenerateTimestamps
? mTimestampOffset
: TimeUnit::FromMicroseconds(sample->mTimecode) + mTimestampOffset;
? mSourceBufferAttributes->GetTimestampOffset()
: TimeUnit::FromMicroseconds(sample->mTimecode) + mSourceBufferAttributes->GetTimestampOffset();
}
trackBuffer.mNeedRandomAccessPoint = false;
needDiscontinuityCheck = false;
@ -1504,12 +1571,12 @@ TrackBuffersManager::ProcessFrames(TrackBuffer& aSamples, TrackData& aTrackData)
trackBuffer.mHighestEndTimestamp = Some(sampleInterval.mEnd);
}
// 20. If frame end timestamp is greater than group end timestamp, then set group end timestamp equal to frame end timestamp.
if (sampleInterval.mEnd > mGroupEndTimestamp) {
mGroupEndTimestamp = sampleInterval.mEnd;
if (sampleInterval.mEnd > mSourceBufferAttributes->GetGroupEndTimestamp()) {
mSourceBufferAttributes->SetGroupEndTimestamp(sampleInterval.mEnd);
}
// 21. If generate timestamps flag equals true, then set timestampOffset equal to frame end timestamp.
if (mSourceBufferAttributes->mGenerateTimestamps) {
mTimestampOffset = sampleInterval.mEnd;
mSourceBufferAttributes->SetTimestampOffset(sampleInterval.mEnd);
}
}
@ -1766,56 +1833,11 @@ TrackBuffersManager::GetTracksList()
}
void
TrackBuffersManager::RestoreCachedVariables()
{
MOZ_ASSERT(OnTaskQueue());
if (mTimestampOffset != mLastTimestampOffset) {
mSourceBufferAttributes->SetTimestampOffset(mTimestampOffset);
}
}
void
TrackBuffersManager::SetAppendState(TrackBuffersManager::AppendState aAppendState)
TrackBuffersManager::SetAppendState(SourceBufferAttributes::AppendState aAppendState)
{
MSE_DEBUG("AppendState changed from %s to %s",
AppendStateToStr(mAppendState), AppendStateToStr(aAppendState));
mAppendState = aAppendState;
}
void
TrackBuffersManager::SetGroupStartTimestamp(const TimeUnit& aGroupStartTimestamp)
{
if (NS_IsMainThread()) {
nsCOMPtr<nsIRunnable> task =
NS_NewRunnableMethodWithArg<TimeUnit>(
this,
&TrackBuffersManager::SetGroupStartTimestamp,
aGroupStartTimestamp);
GetTaskQueue()->Dispatch(task.forget());
return;
}
MOZ_ASSERT(OnTaskQueue());
mGroupStartTimestamp = Some(aGroupStartTimestamp);
}
void
TrackBuffersManager::RestartGroupStartTimestamp()
{
if (NS_IsMainThread()) {
nsCOMPtr<nsIRunnable> task =
NS_NewRunnableMethod(this, &TrackBuffersManager::RestartGroupStartTimestamp);
GetTaskQueue()->Dispatch(task.forget());
return;
}
MOZ_ASSERT(OnTaskQueue());
mGroupStartTimestamp = Some(mGroupEndTimestamp);
}
TimeUnit
TrackBuffersManager::GroupEndTimestamp()
{
MonitorAutoLock mon(mMonitor);
return mOfficialGroupEndTimestamp;
AppendStateToStr(mSourceBufferAttributes->GetAppendState()), AppendStateToStr(aAppendState));
mSourceBufferAttributes->SetAppendState(aAppendState);
}
MediaInfo

View File

@ -10,13 +10,12 @@
#include "mozilla/Atomics.h"
#include "mozilla/Maybe.h"
#include "mozilla/Monitor.h"
#include "mozilla/MozPromise.h"
#include "mozilla/Pair.h"
#include "mozilla/dom/SourceBufferBinding.h"
#include "MediaData.h"
#include "MediaDataDemuxer.h"
#include "MediaSourceDecoder.h"
#include "SourceBufferTask.h"
#include "TimeUnits.h"
#include "nsProxyRelease.h"
#include "nsString.h"
@ -30,17 +29,44 @@ class MediaRawData;
class MediaSourceDemuxer;
class SourceBufferResource;
namespace dom {
class SourceBufferAttributes;
}
class SourceBufferTaskQueue {
public:
SourceBufferTaskQueue()
: mMonitor("SourceBufferTaskQueue")
{}
void Push(SourceBufferTask* aTask)
{
MonitorAutoLock mon(mMonitor);
mQueue.AppendElement(aTask);
}
already_AddRefed<SourceBufferTask> Pop()
{
MonitorAutoLock mon(mMonitor);
if (!mQueue.Length()) {
return nullptr;
}
RefPtr<SourceBufferTask> task = Move(mQueue[0]);
mQueue.RemoveElementAt(0);
return task.forget();
}
nsTArray<SourceBufferTask>::size_type Length() const
{
MonitorAutoLock mon(mMonitor);
return mQueue.Length();
}
private:
mutable Monitor mMonitor;
nsTArray<RefPtr<SourceBufferTask>> mQueue;
};
class TrackBuffersManager {
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(TrackBuffersManager);
typedef MozPromise<bool, nsresult, /* IsExclusive = */ true> AppendPromise;
typedef AppendPromise RangeRemovalPromise;
enum class EvictDataResult : int8_t
{
NO_DATA_EVICTED,
@ -48,42 +74,32 @@ public:
BUFFER_FULL,
};
// Current state as per Segment Parser Loop Algorithm
// http://w3c.github.io/media-source/index.html#sourcebuffer-segment-parser-loop
enum class AppendState : int32_t
{
WAITING_FOR_SEGMENT,
PARSING_INIT_SEGMENT,
PARSING_MEDIA_SEGMENT,
};
typedef TrackInfo::TrackType TrackType;
typedef MediaData::Type MediaType;
typedef nsTArray<RefPtr<MediaRawData>> TrackBuffer;
typedef SourceBufferTask::AppendPromise AppendPromise;
typedef SourceBufferTask::RangeRemovalPromise RangeRemovalPromise;
// Interface for SourceBuffer
TrackBuffersManager(dom::SourceBufferAttributes* aAttributes,
MediaSourceDecoder* aParentDecoder,
TrackBuffersManager(MediaSourceDecoder* aParentDecoder,
const nsACString& aType);
// Add data to the end of the input buffer.
// Returns false if the append failed.
bool AppendData(MediaByteBuffer* aData,
media::TimeUnit aTimestampOffset);
// Run MSE Buffer Append Algorithm
// Queue a task to add data to the end of the input buffer and run the MSE
// Buffer Append Algorithm
// 3.5.5 Buffer Append Algorithm.
// http://w3c.github.io/media-source/index.html#sourcebuffer-buffer-append
RefPtr<AppendPromise> BufferAppend();
RefPtr<AppendPromise> AppendData(MediaByteBuffer* aData,
const SourceBufferAttributes& aAttributes);
// Abort any pending AppendData.
// Queue a task to abort any pending AppendData.
// Does nothing at this stage.
void AbortAppendData();
// Run MSE Reset Parser State Algorithm.
// Queue a task to run MSE Reset Parser State Algorithm.
// 3.5.2 Reset Parser State
void ResetParserState();
void ResetParserState(SourceBufferAttributes& aAttributes);
// Runs MSE range removal algorithm.
// Queue a task to run the MSE range removal algorithm.
// http://w3c.github.io/media-source/#sourcebuffer-coded-frame-removal
RefPtr<RangeRemovalPromise> RangeRemoval(media::TimeUnit aStart,
media::TimeUnit aEnd);
@ -111,14 +127,6 @@ public:
// The parent SourceBuffer is about to be destroyed.
void Detach();
AppendState GetAppendState()
{
return mAppendState;
}
void SetGroupStartTimestamp(const media::TimeUnit& aGroupStartTimestamp);
void RestartGroupStartTimestamp();
media::TimeUnit GroupEndTimestamp();
int64_t EvictionThreshold() const;
// Interface for MediaSourceDemuxer
@ -150,10 +158,10 @@ private:
friend class MediaSourceDemuxer;
virtual ~TrackBuffersManager();
// All following functions run on the taskqueue.
RefPtr<AppendPromise> InitSegmentParserLoop();
RefPtr<AppendPromise> DoAppendData(RefPtr<MediaByteBuffer> aData,
SourceBufferAttributes aAttributes);
void ScheduleSegmentParserLoop();
void SegmentParserLoop();
void AppendIncomingBuffers();
void InitializationSegmentReceived();
void ShutdownDemuxers();
void CreateDemuxerforMIMEType();
@ -169,7 +177,7 @@ private:
RefPtr<RangeRemovalPromise>
CodedFrameRemovalWithPromise(media::TimeInterval aInterval);
bool CodedFrameRemoval(media::TimeInterval aInterval);
void SetAppendState(AppendState aAppendState);
void SetAppendState(SourceBufferAttributes::AppendState aAppendState);
bool HasVideo() const
{
@ -180,25 +188,15 @@ private:
return mAudioTracks.mNumTracks > 0;
}
typedef Pair<RefPtr<MediaByteBuffer>, media::TimeUnit> IncomingBuffer;
void AppendIncomingBuffer(IncomingBuffer aData);
nsTArray<IncomingBuffer> mIncomingBuffers;
// The input buffer as per http://w3c.github.io/media-source/index.html#sourcebuffer-input-buffer
RefPtr<MediaByteBuffer> mInputBuffer;
// The current append state as per https://w3c.github.io/media-source/#sourcebuffer-append-state
// Accessed on both the main thread and the task queue.
Atomic<AppendState> mAppendState;
// Buffer full flag as per https://w3c.github.io/media-source/#sourcebuffer-buffer-full-flag.
// Accessed on both the main thread and the task queue.
// TODO: Unused for now.
Atomic<bool> mBufferFull;
bool mFirstInitializationSegmentReceived;
// Set to true once a new segment is started.
bool mNewMediaSegmentStarted;
bool mActiveTrack;
Maybe<media::TimeUnit> mGroupStartTimestamp;
media::TimeUnit mGroupEndTimestamp;
nsCString mType;
// ContainerParser objects and methods.
@ -245,7 +243,7 @@ private:
OnDemuxFailed(TrackType::kAudioTrack, aFailure);
}
void DoEvictData(const media::TimeUnit& aPlaybackTime, int64_t aThreshold);
void DoEvictData(const media::TimeUnit& aPlaybackTime, int64_t aSizeToEvict);
struct TrackData {
TrackData()
@ -348,8 +346,6 @@ private:
MozPromiseRequestHolder<CodedFrameProcessingPromise> mProcessingRequest;
MozPromiseHolder<CodedFrameProcessingPromise> mProcessingPromise;
MozPromiseHolder<AppendPromise> mAppendPromise;
// Trackbuffers definition.
nsTArray<TrackData*> GetTracksList();
TrackData& GetTracksData(TrackType aTrack)
@ -375,17 +371,29 @@ private:
}
RefPtr<TaskQueue> mTaskQueue;
// SourceBuffer Queues and running context.
SourceBufferTaskQueue mQueue;
void ProcessTasks();
void CancelAllTasks();
// Set if the TrackBuffersManager is currently processing a task.
// At this stage, this task is always a AppendBufferTask.
RefPtr<SourceBufferTask> mCurrentTask;
// Current SourceBuffer state for ongoing task.
// Its content is returned to the SourceBuffer once the AppendBufferTask has
// completed.
UniquePtr<SourceBufferAttributes> mSourceBufferAttributes;
// The current sourcebuffer append window. It's content is equivalent to
// mSourceBufferAttributes.mAppendWindowStart/End
media::TimeInterval mAppendWindow;
media::TimeUnit mTimestampOffset;
media::TimeUnit mLastTimestampOffset;
void RestoreCachedVariables();
// Strong references to external objects.
RefPtr<dom::SourceBufferAttributes> mSourceBufferAttributes;
nsMainThreadPtrHandle<MediaSourceDecoder> mParentDecoder;
// Set to true if mediasource state changed to ended.
Atomic<bool> mEnded;
// Set to true if the parent SourceBuffer has shutdown.
// We will not reschedule or process new task once mDetached is set.
Atomic<bool> mDetached;
// Global size of this source buffer content.
Atomic<int64_t> mSizeSourceBuffer;
@ -394,14 +402,10 @@ private:
Atomic<bool> mEvictionOccurred;
// Monitor to protect following objects accessed across multipple threads.
// mMonitor is also notified if the value of mAppendRunning becomes false.
mutable Monitor mMonitor;
// Set to true while a BufferAppend is running or is pending.
Atomic<bool> mAppendRunning;
// Stable audio and video track time ranges.
media::TimeIntervals mVideoBufferedRanges;
media::TimeIntervals mAudioBufferedRanges;
media::TimeUnit mOfficialGroupEndTimestamp;
// MediaInfo of the first init segment read.
MediaInfo mInfo;
};

View File

@ -9,6 +9,8 @@ EXPORTS += [
'AsyncEventRunner.h',
'MediaSourceDecoder.h',
'MediaSourceDemuxer.h',
'SourceBufferAttributes.h',
'SourceBufferTask.h',
'TrackBuffersManager.h',
]