Bug 1878510 - When recreating WebM demuxer when encountering a new segment, propagate media time (esr115 rebase) r=media-playback-reviewers,alwu, a=dmeehan

Differential Revision: https://phabricator.services.mozilla.com/D215116
This commit is contained in:
Alastor Wu 2024-06-27 14:47:33 +00:00
parent 233293b21b
commit 7a59d2552d
4 changed files with 65 additions and 6 deletions

View File

@ -917,7 +917,11 @@ void TrackBuffersManager::SegmentParserLoop() {
if (mNewMediaSegmentStarted) {
if (NS_SUCCEEDED(newData) && mLastParsedEndTime.isSome() &&
start < mLastParsedEndTime.ref()) {
MSE_DEBUG("Re-creating demuxer");
MSE_DEBUG("Re-creating demuxer, new start (%" PRId64
") is smaller than last parsed end time (%" PRId64 ")",
start.ToMicroseconds(),
mLastParsedEndTime->ToMicroseconds());
mFrameEndTimeBeforeRecreateDemuxer = Some(end);
ResetDemuxingState();
return;
}
@ -1033,8 +1037,15 @@ void TrackBuffersManager::CreateDemuxerforMIMEType() {
if (mType.Type() == MEDIAMIMETYPE(VIDEO_WEBM) ||
mType.Type() == MEDIAMIMETYPE(AUDIO_WEBM)) {
mInputDemuxer =
new WebMDemuxer(mCurrentInputBuffer, true /* IsMediaSource*/);
if (mFrameEndTimeBeforeRecreateDemuxer) {
MSE_DEBUG(
"CreateDemuxerFromMimeType: "
"mFrameEndTimeBeforeRecreateDemuxer=%" PRId64,
mFrameEndTimeBeforeRecreateDemuxer->ToMicroseconds());
}
mInputDemuxer = new WebMDemuxer(mCurrentInputBuffer, true,
mFrameEndTimeBeforeRecreateDemuxer);
mFrameEndTimeBeforeRecreateDemuxer.reset();
DDLINKCHILD("demuxer", mInputDemuxer.get());
return;
}
@ -1043,6 +1054,7 @@ void TrackBuffersManager::CreateDemuxerforMIMEType() {
if (mType.Type() == MEDIAMIMETYPE(VIDEO_MP4) ||
mType.Type() == MEDIAMIMETYPE(AUDIO_MP4)) {
mInputDemuxer = new MP4Demuxer(mCurrentInputBuffer);
mFrameEndTimeBeforeRecreateDemuxer.reset();
DDLINKCHILD("demuxer", mInputDemuxer.get());
return;
}
@ -1622,9 +1634,11 @@ void TrackBuffersManager::MaybeDispatchEncryptedEvent(
void TrackBuffersManager::OnVideoDemuxCompleted(
RefPtr<MediaTrackDemuxer::SamplesHolder> aSamples) {
mTaskQueueCapability->AssertOnCurrentThread();
MSE_DEBUG("%zu video samples demuxed", aSamples->GetSamples().Length());
mVideoTracks.mDemuxRequest.Complete();
mVideoTracks.mQueuedSamples.AppendElements(aSamples->GetSamples());
MSE_DEBUG("%zu video samples demuxed, queued-sz=%zu",
aSamples->GetSamples().Length(),
mVideoTracks.mQueuedSamples.Length());
MaybeDispatchEncryptedEvent(aSamples->GetSamples());
DoDemuxAudio();
@ -1875,6 +1889,9 @@ void TrackBuffersManager::ProcessFrames(TrackBuffer& aSamples,
// coded frame.
if (!sample->mKeyframe) {
previouslyDroppedSample = nullptr;
SAMPLE_DEBUGV("skipping sample [%" PRId64 ",%" PRId64 "]",
sample->mTime.ToMicroseconds(),
sample->GetEndTime().ToMicroseconds());
continue;
}
// 2. Set the need random access point flag on track buffer to false.
@ -1951,6 +1968,7 @@ void TrackBuffersManager::ProcessFrames(TrackBuffer& aSamples,
// 4. Unset the highest end timestamp on all track buffers.
// 5. Set the need random access point flag on all track buffers to
// true.
MSE_DEBUG("Resetting append state");
track->ResetAppendState();
}
// 6. Jump to the Loop Top step above to restart processing of the current
@ -2469,10 +2487,12 @@ void TrackBuffersManager::RecreateParser(bool aReuseInitData) {
mParser = ContainerParser::CreateForMIMEType(mType);
DDLINKCHILD("parser", mParser.get());
if (aReuseInitData && mInitData) {
MSE_DEBUG("Using existing init data to reset parser");
TimeUnit start, end;
mParser->ParseStartAndEndTimestamps(MediaSpan(mInitData), start, end);
mProcessedInput = mInitData->Length();
} else {
MSE_DEBUG("Resetting parser, not reusing init data");
mProcessedInput = 0;
}
}
@ -2757,12 +2777,20 @@ const MediaRawData* TrackBuffersManager::GetSample(TrackInfo::TrackType aTrack,
const TrackBuffer& track = GetTrackBuffer(aTrack);
if (aIndex >= track.Length()) {
MSE_DEBUGV(
"Can't get sample due to reaching to the end, index=%u, "
"length=%zu",
aIndex, track.Length());
// reached the end.
return nullptr;
}
if (!(aExpectedDts + aFuzz).IsValid() || !(aExpectedPts + aFuzz).IsValid()) {
// Time overflow, it seems like we also reached the end.
MSE_DEBUGV("Can't get sample due to time overflow, expectedPts=%" PRId64
", aExpectedDts=%" PRId64 ", fuzz=%" PRId64,
aExpectedPts.ToMicroseconds(), aExpectedPts.ToMicroseconds(),
aFuzz.ToMicroseconds());
return nullptr;
}
@ -2773,6 +2801,12 @@ const MediaRawData* TrackBuffersManager::GetSample(TrackInfo::TrackType aTrack,
return sample;
}
MSE_DEBUGV("Can't get sample due to big gap, sample=%" PRId64
", expectedPts=%" PRId64 ", aExpectedDts=%" PRId64
", fuzz=%" PRId64,
sample->mTime.ToMicroseconds(), aExpectedPts.ToMicroseconds(),
aExpectedPts.ToMicroseconds(), aFuzz.ToMicroseconds());
// Gap is too big. End of Stream or Waiting for Data.
// TODO, check that we have continuous data based on the sanitized buffered
// range instead.

View File

@ -561,6 +561,8 @@ class TrackBuffersManager final
// mTaskQueue. However, there's special locking around mTaskQueue, so we keep
// both for now.
Maybe<EventTargetCapability<TaskQueue>> mTaskQueueCapability;
Maybe<media::TimeUnit> mFrameEndTimeBeforeRecreateDemuxer;
};
} // namespace mozilla

View File

@ -16,6 +16,7 @@
#include "gfx2DGlue.h"
#include "gfxUtils.h"
#include "mozilla/EndianUtils.h"
#include "mozilla/Maybe.h"
#include "mozilla/SharedThreadPool.h"
#include "MediaDataDemuxer.h"
#include "nsAutoRef.h"
@ -151,7 +152,9 @@ int WebMDemuxer::NestEggContext::Init() {
WebMDemuxer::WebMDemuxer(MediaResource* aResource)
: WebMDemuxer(aResource, false) {}
WebMDemuxer::WebMDemuxer(MediaResource* aResource, bool aIsMediaSource)
WebMDemuxer::WebMDemuxer(
MediaResource* aResource, bool aIsMediaSource,
Maybe<media::TimeUnit> aFrameEndTimeBeforeRecreateDemuxer)
: mVideoContext(this, aResource),
mAudioContext(this, aResource),
mBufferedState(nullptr),
@ -170,6 +173,14 @@ WebMDemuxer::WebMDemuxer(MediaResource* aResource, bool aIsMediaSource)
// Audio/video contexts hold a MediaResourceIndex.
DDLINKCHILD("video context", mVideoContext.GetResource());
DDLINKCHILD("audio context", mAudioContext.GetResource());
MOZ_ASSERT_IF(!aIsMediaSource,
aFrameEndTimeBeforeRecreateDemuxer.isNothing());
if (aIsMediaSource && aFrameEndTimeBeforeRecreateDemuxer) {
mVideoFrameEndTimeBeforeReset = aFrameEndTimeBeforeRecreateDemuxer;
WEBM_DEBUG("Set mVideoFrameEndTimeBeforeReset=%" PRId64,
mVideoFrameEndTimeBeforeReset->ToMicroseconds());
}
}
WebMDemuxer::~WebMDemuxer() {
@ -586,6 +597,10 @@ nsresult WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType,
}
int64_t tstamp = holder->Timestamp();
int64_t duration = holder->Duration();
if (aType == TrackInfo::TrackType::kVideoTrack) {
WEBM_DEBUG("GetNextPacket(video): tstamp=%" PRId64 ", duration=%" PRId64,
tstamp, duration);
}
// The end time of this frame is the start time of the next frame. Fetch
// the timestamp of the next packet for this track. If we've reached the
@ -607,6 +622,10 @@ nsresult WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType,
next_tstamp = tstamp + duration;
} else if (lastFrameTime.isSome()) {
next_tstamp = tstamp + (tstamp - lastFrameTime.ref());
} else if (mVideoFrameEndTimeBeforeReset) {
WEBM_DEBUG("Setting next timestamp to be %" PRId64 " us",
mVideoFrameEndTimeBeforeReset->ToMicroseconds());
next_tstamp = mVideoFrameEndTimeBeforeReset->ToMicroseconds();
} else if (mIsMediaSource) {
(this->*pushPacket)(holder);
} else {

View File

@ -94,7 +94,9 @@ class WebMDemuxer : public MediaDataDemuxer,
explicit WebMDemuxer(MediaResource* aResource);
// Indicate if the WebMDemuxer is to be used with MediaSource. In which
// case the demuxer will stop reads to the last known complete block.
WebMDemuxer(MediaResource* aResource, bool aIsMediaSource);
WebMDemuxer(
MediaResource* aResource, bool aIsMediaSource,
Maybe<media::TimeUnit> aFrameEndTimeBeforeRecreateDemuxer = Nothing());
RefPtr<InitPromise> Init() override;
@ -223,6 +225,8 @@ class WebMDemuxer : public MediaDataDemuxer,
Maybe<int64_t> mLastAudioFrameTime;
Maybe<int64_t> mLastVideoFrameTime;
Maybe<media::TimeUnit> mVideoFrameEndTimeBeforeReset;
// Codec ID of audio track
int mAudioCodec;
// Codec ID of video track