Bug 1756260 - part1 : split some of current state machine methods and variables to another new base class preparing for a new type of state machine. r=jolin

In following patch, we will implement a new type of state machine. In order to avoid redundant code, this patch splits some basic share codes into a new base class.

Differential Revision: https://phabricator.services.mozilla.com/D140013
This commit is contained in:
alwu 2022-05-11 17:46:11 +00:00
parent aa5a49c072
commit 6bf81c9c16
5 changed files with 526 additions and 385 deletions

View File

@ -2757,32 +2757,17 @@ RefPtr<ShutdownPromise> MediaDecoderStateMachine::ShutdownState::Enter() {
MediaDecoderStateMachine::MediaDecoderStateMachine(MediaDecoder* aDecoder,
MediaFormatReader* aReader)
: mDecoderID(aDecoder),
mAbstractMainThread(aDecoder->AbstractMainThread()),
mFrameStats(&aDecoder->GetFrameStatistics()),
mVideoFrameContainer(aDecoder->GetVideoFrameContainer()),
mTaskQueue(TaskQueue::Create(GetMediaThreadPool(MediaThreadType::MDSM),
"MDSM::mTaskQueue",
/* aSupportsTailDispatch = */ true)),
: MediaDecoderStateMachineBase(aDecoder, aReader),
mWatchManager(this, mTaskQueue),
mDispatchedStateMachine(false),
mDelayedScheduler(mTaskQueue, true /*aFuzzy*/),
mCurrentFrameID(0),
mReader(new ReaderProxy(mTaskQueue, aReader)),
mPlaybackRate(1.0),
mAmpleAudioThreshold(detail::AMPLE_AUDIO_THRESHOLD),
mMinimizePreroll(aDecoder->GetMinimizePreroll()),
mSentFirstFrameLoadedEvent(false),
mVideoDecodeSuspended(false),
mVideoDecodeSuspendTimer(mTaskQueue),
mVideoDecodeMode(VideoDecodeMode::Normal),
mIsMSE(aDecoder->IsMSE()),
mSeamlessLoopingAllowed(false),
INIT_MIRROR(mBuffered, TimeIntervals()),
INIT_MIRROR(mPlayState, MediaDecoder::PLAY_STATE_LOADING),
INIT_MIRROR(mVolume, 1.0),
INIT_MIRROR(mPreservesPitch, true),
INIT_MIRROR(mLooping, false),
INIT_MIRROR(mStreamName, nsAutoString()),
INIT_MIRROR(mSinkDevice, nullptr),
INIT_MIRROR(mSecondaryVideoContainer, nullptr),
@ -2792,10 +2777,7 @@ MediaDecoderStateMachine::MediaDecoderStateMachine(MediaDecoder* aDecoder,
INIT_MIRROR(mOutputPrincipal, PRINCIPAL_HANDLE_NONE),
INIT_CANONICAL(mCanonicalOutputTracks,
nsTArray<RefPtr<ProcessedMediaTrack>>()),
INIT_CANONICAL(mCanonicalOutputPrincipal, PRINCIPAL_HANDLE_NONE),
INIT_CANONICAL(mDuration, NullableTimeUnit()),
INIT_CANONICAL(mCurrentPosition, TimeUnit::Zero()),
INIT_CANONICAL(mIsAudioDataAudible, false) {
INIT_CANONICAL(mCanonicalOutputPrincipal, PRINCIPAL_HANDLE_NONE) {
MOZ_COUNT_CTOR(MediaDecoderStateMachine);
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
@ -2818,13 +2800,9 @@ void MediaDecoderStateMachine::InitializationTask(MediaDecoder* aDecoder) {
MEDIA_PLAYBACK);
MOZ_ASSERT(OnTaskQueue());
MediaDecoderStateMachineBase::InitializationTask(aDecoder);
// Connect mirrors.
mBuffered.Connect(mReader->CanonicalBuffered());
mPlayState.Connect(aDecoder->CanonicalPlayState());
mVolume.Connect(aDecoder->CanonicalVolume());
mPreservesPitch.Connect(aDecoder->CanonicalPreservesPitch());
mLooping.Connect(aDecoder->CanonicalLooping());
mStreamName.Connect(aDecoder->CanonicalStreamName());
mSinkDevice.Connect(aDecoder->CanonicalSinkDevice());
mSecondaryVideoContainer.Connect(
aDecoder->CanonicalSecondaryVideoContainer());
@ -2834,13 +2812,6 @@ void MediaDecoderStateMachine::InitializationTask(MediaDecoder* aDecoder) {
mOutputPrincipal.Connect(aDecoder->CanonicalOutputPrincipal());
// Initialize watchers.
mWatchManager.Watch(mBuffered,
&MediaDecoderStateMachine::BufferedRangeUpdated);
mWatchManager.Watch(mVolume, &MediaDecoderStateMachine::VolumeChanged);
mWatchManager.Watch(mPreservesPitch,
&MediaDecoderStateMachine::PreservesPitchChanged);
mWatchManager.Watch(mPlayState, &MediaDecoderStateMachine::PlayStateChanged);
mWatchManager.Watch(mLooping, &MediaDecoderStateMachine::LoopingChanged);
mWatchManager.Watch(mStreamName,
&MediaDecoderStateMachine::StreamNameChanged);
mWatchManager.Watch(mSecondaryVideoContainer,
@ -3006,27 +2977,18 @@ void MediaDecoderStateMachine::SetMediaNotSeekable() { mMediaSeekable = false; }
nsresult MediaDecoderStateMachine::Init(MediaDecoder* aDecoder) {
MOZ_ASSERT(NS_IsMainThread());
// Dispatch initialization that needs to happen on that task queue.
nsCOMPtr<nsIRunnable> r = NewRunnableMethod<RefPtr<MediaDecoder>>(
"MediaDecoderStateMachine::InitializationTask", this,
&MediaDecoderStateMachine::InitializationTask, aDecoder);
mTaskQueue->DispatchStateChange(r.forget());
nsresult rv = MediaDecoderStateMachineBase::Init(aDecoder);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
mAudioQueueListener = AudioQueue().PopFrontEvent().Connect(
mTaskQueue, this, &MediaDecoderStateMachine::OnAudioPopped);
mVideoQueueListener = VideoQueue().PopFrontEvent().Connect(
mTaskQueue, this, &MediaDecoderStateMachine::OnVideoPopped);
mMetadataManager.Connect(mReader->TimedMetadataEvent(), OwnerThread());
mOnMediaNotSeekable = mReader->OnMediaNotSeekable().Connect(
OwnerThread(), this, &MediaDecoderStateMachine::SetMediaNotSeekable);
nsresult rv = mReader->Init();
NS_ENSURE_SUCCESS(rv, rv);
mReader->SetCanonicalDuration(&mDuration);
return NS_OK;
}
@ -3285,12 +3247,6 @@ RefPtr<MediaDecoder::SeekPromise> MediaDecoderStateMachine::Seek(
return mStateObj->HandleSeek(aTarget);
}
RefPtr<MediaDecoder::SeekPromise> MediaDecoderStateMachine::InvokeSeek(
const SeekTarget& aTarget) {
return InvokeAsync(OwnerThread(), this, __func__,
&MediaDecoderStateMachine::Seek, aTarget);
}
void MediaDecoderStateMachine::StopMediaSink() {
MOZ_ASSERT(OnTaskQueue());
if (mMediaSink->IsStarted()) {
@ -3582,15 +3538,6 @@ bool MediaDecoderStateMachine::HasLowBufferedData(const TimeUnit& aThreshold) {
return !mBuffered.Ref().Contains(interval);
}
void MediaDecoderStateMachine::DecodeError(const MediaResult& aError) {
MOZ_ASSERT(OnTaskQueue());
LOGE("Decode error: %s", aError.Description().get());
PROFILER_MARKER_TEXT("MDSM::DecodeError", MEDIA_PLAYBACK, {},
aError.Description());
// Notify the decode error and MediaDecoder will shut down MDSM.
mOnPlaybackErrorEvent.Notify(aError);
}
void MediaDecoderStateMachine::EnqueueFirstFrameLoadedEvent() {
MOZ_ASSERT(OnTaskQueue());
// Track value of mSentFirstFrameLoadedEvent from before updating it
@ -3619,12 +3566,6 @@ void MediaDecoderStateMachine::FinishDecodeFirstFrame() {
EnqueueFirstFrameLoadedEvent();
}
RefPtr<ShutdownPromise> MediaDecoderStateMachine::BeginShutdown() {
MOZ_ASSERT(NS_IsMainThread());
return InvokeAsync(OwnerThread(), this, __func__,
&MediaDecoderStateMachine::Shutdown);
}
RefPtr<ShutdownPromise> MediaDecoderStateMachine::FinishShutdown() {
AUTO_PROFILER_LABEL("MediaDecoderStateMachine::FinishShutdown",
MEDIA_PLAYBACK);
@ -3768,10 +3709,6 @@ void MediaDecoderStateMachine::ScheduleStateMachineIn(const TimeUnit& aTime) {
[]() { MOZ_DIAGNOSTIC_ASSERT(false); });
}
bool MediaDecoderStateMachine::OnTaskQueue() const {
return OwnerThread()->IsCurrentThreadIn();
}
bool MediaDecoderStateMachine::IsStateMachineScheduled() const {
MOZ_ASSERT(OnTaskQueue());
return mDispatchedStateMachine || mDelayedScheduler.IsScheduled();
@ -4137,15 +4074,6 @@ size_t MediaDecoderStateMachine::SizeOfAudioQueue() const {
return functor.mSize;
}
AbstractCanonical<media::TimeIntervals>*
MediaDecoderStateMachine::CanonicalBuffered() const {
return mReader->CanonicalBuffered();
}
MediaEventSource<void>& MediaDecoderStateMachine::OnMediaNotSeekable() const {
return mReader->OnMediaNotSeekable();
}
const char* MediaDecoderStateMachine::AudioRequestStatus() const {
MOZ_ASSERT(OnTaskQueue());
if (IsRequestingAudioData()) {

View File

@ -3,6 +3,38 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(MediaDecoderStateMachine_h__)
# define MediaDecoderStateMachine_h__
# include "AudioDeviceInfo.h"
# include "ImageContainer.h"
# include "MediaDecoder.h"
# include "MediaDecoderOwner.h"
# include "MediaDecoderStateMachineBase.h"
# include "MediaFormatReader.h"
# include "MediaQueue.h"
# include "MediaSink.h"
# include "MediaStatistics.h"
# include "MediaTimer.h"
# include "SeekJob.h"
# include "mozilla/Attributes.h"
# include "mozilla/ReentrantMonitor.h"
# include "mozilla/StateMirroring.h"
# include "nsThreadUtils.h"
namespace mozilla {
class AbstractThread;
class AudioSegment;
class DecodedStream;
class DOMMediaStream;
class ReaderProxy;
class TaskQueue;
extern LazyLogModule gMediaDecoderLog;
DDLoggedTypeDeclName(MediaDecoderStateMachine);
/*
Each media element for a media file has one thread called the "audio thread".
@ -78,93 +110,19 @@ monitor) if the audio queue is empty. Otherwise it constantly pops
audio data off the queue and plays it with a blocking write to the audio
hardware (via AudioStream).
*/
#if !defined(MediaDecoderStateMachine_h__)
# define MediaDecoderStateMachine_h__
# include "AudioDeviceInfo.h"
# include "ImageContainer.h"
# include "MediaDecoder.h"
# include "MediaDecoderOwner.h"
# include "MediaEventSource.h"
# include "MediaFormatReader.h"
# include "MediaMetadataManager.h"
# include "MediaQueue.h"
# include "MediaSink.h"
# include "MediaStatistics.h"
# include "MediaTimer.h"
# include "SeekJob.h"
# include "mozilla/Attributes.h"
# include "mozilla/ReentrantMonitor.h"
# include "mozilla/StateMirroring.h"
# include "mozilla/dom/MediaDebugInfoBinding.h"
# include "nsThreadUtils.h"
namespace mozilla {
class AbstractThread;
class AudioSegment;
class DecodedStream;
class DOMMediaStream;
class ReaderProxy;
class TaskQueue;
extern LazyLogModule gMediaDecoderLog;
struct MediaPlaybackEvent {
enum EventType {
PlaybackStarted,
PlaybackStopped,
PlaybackProgressed,
PlaybackEnded,
SeekStarted,
Invalidate,
EnterVideoSuspend,
ExitVideoSuspend,
StartVideoSuspendTimer,
CancelVideoSuspendTimer,
VideoOnlySeekBegin,
VideoOnlySeekCompleted,
} mType;
using DataType = Variant<Nothing, int64_t>;
DataType mData;
MOZ_IMPLICIT MediaPlaybackEvent(EventType aType)
: mType(aType), mData(Nothing{}) {}
template <typename T>
MediaPlaybackEvent(EventType aType, T&& aArg)
: mType(aType), mData(std::forward<T>(aArg)) {}
};
enum class VideoDecodeMode : uint8_t { Normal, Suspend };
DDLoggedTypeDeclName(MediaDecoderStateMachine);
/*
The state machine class. This manages the decoding and seeking in the
MediaDecoderReader on the decode task queue, and A/V sync on the shared
state machine thread, and controls the audio "push" thread.
All internal state is synchronised via the decoder monitor. State changes
are propagated by scheduling the state machine to run another cycle on the
shared state machine thread.
See MediaDecoder.h for more details.
*/
class MediaDecoderStateMachine
: public DecoderDoctorLifeLogger<MediaDecoderStateMachine> {
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaDecoderStateMachine)
: public MediaDecoderStateMachineBase,
public DecoderDoctorLifeLogger<MediaDecoderStateMachine> {
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaDecoderStateMachine, override)
using TrackSet = MediaFormatReader::TrackSet;
public:
typedef MediaDecoderOwner::NextFrameStatus NextFrameStatus;
typedef mozilla::layers::ImageContainer::FrameID FrameID;
MediaDecoderStateMachine(MediaDecoder* aDecoder, MediaFormatReader* aReader);
nsresult Init(MediaDecoder* aDecoder);
nsresult Init(MediaDecoder* aDecoder) override;
// Enumeration for the valid decoding states
enum State {
@ -182,103 +140,21 @@ class MediaDecoderStateMachine
DECODER_STATE_SHUTDOWN
};
// Returns the state machine task queue.
TaskQueue* OwnerThread() const { return mTaskQueue; }
RefPtr<GenericPromise> RequestDebugInfo(
dom::MediaDecoderStateMachineDebugInfo& aInfo);
dom::MediaDecoderStateMachineDebugInfo& aInfo) override;
// Seeks to the decoder to aTarget asynchronously.
RefPtr<MediaDecoder::SeekPromise> InvokeSeek(const SeekTarget& aTarget);
size_t SizeOfVideoQueue() const override;
void DispatchSetPlaybackRate(double aPlaybackRate) {
OwnerThread()->DispatchStateChange(NewRunnableMethod<double>(
"MediaDecoderStateMachine::SetPlaybackRate", this,
&MediaDecoderStateMachine::SetPlaybackRate, aPlaybackRate));
}
RefPtr<ShutdownPromise> BeginShutdown();
// Set the media fragment end time.
void DispatchSetFragmentEndTime(const media::TimeUnit& aEndTime) {
RefPtr<MediaDecoderStateMachine> self = this;
nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction(
"MediaDecoderStateMachine::DispatchSetFragmentEndTime",
[self, aEndTime]() {
// A negative number means we don't have a fragment end time at all.
self->mFragmentEndTime = aEndTime >= media::TimeUnit::Zero()
? aEndTime
: media::TimeUnit::Invalid();
});
nsresult rv = OwnerThread()->Dispatch(r.forget());
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
Unused << rv;
}
void DispatchCanPlayThrough(bool aCanPlayThrough) {
RefPtr<MediaDecoderStateMachine> self = this;
nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction(
"MediaDecoderStateMachine::DispatchCanPlayThrough",
[self, aCanPlayThrough]() { self->mCanPlayThrough = aCanPlayThrough; });
OwnerThread()->DispatchStateChange(r.forget());
}
void DispatchIsLiveStream(bool aIsLiveStream) {
RefPtr<MediaDecoderStateMachine> self = this;
nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction(
"MediaDecoderStateMachine::DispatchIsLiveStream",
[self, aIsLiveStream]() { self->mIsLiveStream = aIsLiveStream; });
OwnerThread()->DispatchStateChange(r.forget());
}
TimedMetadataEventSource& TimedMetadataEvent() {
return mMetadataManager.TimedMetadataEvent();
}
MediaEventSource<void>& OnMediaNotSeekable() const;
MediaEventSourceExc<UniquePtr<MediaInfo>, UniquePtr<MetadataTags>,
MediaDecoderEventVisibility>&
MetadataLoadedEvent() {
return mMetadataLoadedEvent;
}
MediaEventSourceExc<UniquePtr<MediaInfo>, MediaDecoderEventVisibility>&
FirstFrameLoadedEvent() {
return mFirstFrameLoadedEvent;
}
MediaEventSource<MediaPlaybackEvent>& OnPlaybackEvent() {
return mOnPlaybackEvent;
}
MediaEventSource<MediaResult>& OnPlaybackErrorEvent() {
return mOnPlaybackErrorEvent;
}
MediaEventSource<DecoderDoctorEvent>& OnDecoderDoctorEvent() {
return mOnDecoderDoctorEvent;
}
MediaEventSource<NextFrameStatus>& OnNextFrameStatus() {
return mOnNextFrameStatus;
}
MediaEventSourceExc<RefPtr<VideoFrameContainer>>&
OnSecondaryVideoContainerInstalled() {
return mOnSecondaryVideoContainerInstalled;
}
size_t SizeOfVideoQueue() const;
size_t SizeOfAudioQueue() const;
size_t SizeOfAudioQueue() const override;
// Sets the video decode mode. Used by the suspend-video-decoder feature.
void SetVideoDecodeMode(VideoDecodeMode aMode);
void SetVideoDecodeMode(VideoDecodeMode aMode) override;
RefPtr<GenericPromise> InvokeSetSink(const RefPtr<AudioDeviceInfo>& aSink);
RefPtr<GenericPromise> InvokeSetSink(
const RefPtr<AudioDeviceInfo>& aSink) override;
void InvokeSuspendMediaSink();
void InvokeResumeMediaSink();
void InvokeSuspendMediaSink() override;
void InvokeResumeMediaSink() override;
private:
class StateObject;
@ -301,18 +177,14 @@ class MediaDecoderStateMachine
void GetDebugInfo(dom::MediaDecoderStateMachineDebugInfo& aInfo);
// Functions used by assertions to ensure we're calling things
// on the appropriate threads.
bool OnTaskQueue() const;
// Initialization that needs to happen on the task queue. This is the first
// task that gets run on the task queue, and is dispatched from the MDSM
// constructor immediately after the task queue is created.
void InitializationTask(MediaDecoder* aDecoder);
void InitializationTask(MediaDecoder* aDecoder) override;
RefPtr<MediaDecoder::SeekPromise> Seek(const SeekTarget& aTarget);
RefPtr<MediaDecoder::SeekPromise> Seek(const SeekTarget& aTarget) override;
RefPtr<ShutdownPromise> Shutdown();
RefPtr<ShutdownPromise> Shutdown() override;
RefPtr<ShutdownPromise> FinishShutdown();
@ -323,10 +195,6 @@ class MediaDecoderStateMachine
// the decode monitor held.
void UpdatePlaybackPosition(const media::TimeUnit& aTime);
bool HasAudio() const { return mInfo.ref().HasAudio(); }
bool HasVideo() const { return mInfo.ref().HasVideo(); }
const MediaInfo& Info() const { return mInfo.ref(); }
// Schedules the shared state machine thread to run the state machine.
void ScheduleStateMachine();
@ -374,7 +242,11 @@ class MediaDecoderStateMachine
protected:
virtual ~MediaDecoderStateMachine();
void BufferedRangeUpdated();
void BufferedRangeUpdated() override;
void VolumeChanged() override;
void PreservesPitchChanged() override;
void PlayStateChanged() override;
void LoopingChanged() override;
void ReaderSuspendedChanged();
@ -388,10 +260,20 @@ class MediaDecoderStateMachine
void AudioAudibleChanged(bool aAudible);
void VolumeChanged();
void SetPlaybackRate(double aPlaybackRate);
void PreservesPitchChanged();
void LoopingChanged();
void SetPlaybackRate(double aPlaybackRate) override;
void SetIsLiveStream(bool aIsLiveStream) override {
mIsLiveStream = aIsLiveStream;
}
void SetCanPlayThrough(bool aCanPlayThrough) override {
mCanPlayThrough = aCanPlayThrough;
}
void SetFragmentEndTime(const media::TimeUnit& aEndTime) override {
// A negative number means we don't have a fragment end time at all.
mFragmentEndTime = aEndTime >= media::TimeUnit::Zero()
? aEndTime
: media::TimeUnit::Invalid();
}
void StreamNameChanged();
void UpdateSecondaryVideoContainer();
void UpdateOutputCaptured();
@ -459,9 +341,6 @@ class MediaDecoderStateMachine
// If start fails an NS_ERROR_FAILURE is returned.
nsresult StartMediaSink();
// Notification method invoked when mPlayState changes.
void PlayStateChanged();
// Notification method invoked when mIsVisible changes.
void VisibilityChanged();
@ -474,12 +353,6 @@ class MediaDecoderStateMachine
// Must be called with the decode monitor held.
void MaybeStartPlayback();
// Moves the decoder into the shutdown state, and dispatches an error
// event to the media element. This begins shutting down the decoder.
// The decoder monitor must be held. This is only called on the
// decode thread.
void DecodeError(const MediaResult& aError);
void EnqueueFirstFrameLoadedEvent();
// Start a task to decode audio.
@ -494,11 +367,6 @@ class MediaDecoderStateMachine
void WaitForData(MediaData::Type aType);
bool IsRequestingAudioData() const { return mAudioDataRequest.Exists(); }
bool IsRequestingVideoData() const { return mVideoDataRequest.Exists(); }
bool IsWaitingAudioData() const { return mAudioWaitRequest.Exists(); }
bool IsWaitingVideoData() const { return mVideoWaitRequest.Exists(); }
// Returns the "media time". This is the absolute time which the media
// playback has reached. i.e. this returns values in the range
// [mStartTime, mEndTime], and mStartTime will not be 0 if the media does
@ -540,14 +408,6 @@ class MediaDecoderStateMachine
void OnMediaSinkAudioError(nsresult aResult);
void OnMediaSinkVideoError();
void* const mDecoderID;
const RefPtr<AbstractThread> mAbstractMainThread;
const RefPtr<FrameStatistics> mFrameStats;
const RefPtr<VideoFrameContainer> mVideoFrameContainer;
// Task queue for running the state machine.
RefPtr<TaskQueue> mTaskQueue;
// State-watching manager.
WatchManager<MediaDecoderStateMachine> mWatchManager;
@ -581,8 +441,6 @@ class MediaDecoderStateMachine
// The media sink resource. Used on the state machine thread.
RefPtr<MediaSink> mMediaSink;
const RefPtr<ReaderProxy> mReader;
// The end time of the last audio frame that's been pushed onto the media sink
// in microseconds. This will approximately be the end time
// of the audio stream, unless another frame is pushed to the hardware.
@ -600,9 +458,6 @@ class MediaDecoderStateMachine
// on decoded video data.
media::TimeUnit mDecodedVideoEndTime;
// Playback rate. 1.0 : normal speed, 0.5 : two times slower.
double mPlaybackRate;
// If we've got more than this number of decoded video frames waiting in
// the video queue, we will not decode any more video frames until some have
// been consumed by the play state machine thread.
@ -613,16 +468,6 @@ class MediaDecoderStateMachine
// pause decoding.
media::TimeUnit mAmpleAudioThreshold;
// Only one of a given pair of ({Audio,Video}DataPromise, WaitForDataPromise)
// should exist at any given moment.
using AudioDataPromise = MediaFormatReader::AudioDataPromise;
using VideoDataPromise = MediaFormatReader::VideoDataPromise;
using WaitForDataPromise = MediaFormatReader::WaitForDataPromise;
MozPromiseRequestHolder<AudioDataPromise> mAudioDataRequest;
MozPromiseRequestHolder<VideoDataPromise> mVideoDataRequest;
MozPromiseRequestHolder<WaitForDataPromise> mAudioWaitRequest;
MozPromiseRequestHolder<WaitForDataPromise> mVideoWaitRequest;
const char* AudioRequestStatus() const;
const char* VideoRequestStatus() const;
@ -641,38 +486,9 @@ class MediaDecoderStateMachine
// True if all video frames are already rendered.
bool mVideoCompleted = false;
// True if we should not decode/preroll unnecessary samples, unless we're
// played. "Prerolling" in this context refers to when we decode and
// buffer decoded samples in advance of when they're needed for playback.
// This flag is set for preload=metadata media, and means we won't
// decode more than the first video frame and first block of audio samples
// for that media when we startup, or after a seek. When Play() is called,
// we reset this flag, as we assume the user is playing the media, so
// prerolling is appropriate then. This flag is used to reduce the overhead
// of prerolling samples for media elements that may not play, both
// memory and CPU overhead.
bool mMinimizePreroll;
// Stores presentation info required for playback.
Maybe<MediaInfo> mInfo;
mozilla::MediaMetadataManager mMetadataManager;
// True if we've decoded first frames (thus having the start time) and
// notified the FirstFrameLoaded event. Note we can't initiate seek until the
// start time is known which happens when the first frames are decoded or we
// are playing an MSE stream (the start time is always assumed 0).
bool mSentFirstFrameLoadedEvent;
// True if video decoding is suspended.
bool mVideoDecodeSuspended;
// True if the media is seekable (i.e. supports random access).
bool mMediaSeekable = true;
// True if the media is seekable only in buffered ranges.
bool mMediaSeekableOnlyInBufferedRanges = false;
// Track enabling video decode suspension via timer
DelayedScheduler mVideoDecodeSuspendTimer;
@ -688,22 +504,6 @@ class MediaDecoderStateMachine
MediaEventListener mAudibleListener;
MediaEventListener mOnMediaNotSeekable;
MediaEventProducerExc<UniquePtr<MediaInfo>, UniquePtr<MetadataTags>,
MediaDecoderEventVisibility>
mMetadataLoadedEvent;
MediaEventProducerExc<UniquePtr<MediaInfo>, MediaDecoderEventVisibility>
mFirstFrameLoadedEvent;
MediaEventProducer<MediaPlaybackEvent> mOnPlaybackEvent;
MediaEventProducer<MediaResult> mOnPlaybackErrorEvent;
MediaEventProducer<DecoderDoctorEvent> mOnDecoderDoctorEvent;
MediaEventProducer<NextFrameStatus> mOnNextFrameStatus;
MediaEventProducerExc<RefPtr<VideoFrameContainer>>
mOnSecondaryVideoContainerInstalled;
const bool mIsMSE;
bool mSeamlessLoopingAllowed;
@ -717,22 +517,6 @@ class MediaDecoderStateMachine
int64_t mPlaybackOffset = 0;
private:
// The buffered range. Mirrored from the decoder thread.
Mirror<media::TimeIntervals> mBuffered;
// The current play state, mirrored from the main thread.
Mirror<MediaDecoder::PlayState> mPlayState;
// Volume of playback. 0.0 = muted. 1.0 = full volume.
Mirror<double> mVolume;
// Pitch preservation for the playback rate.
Mirror<bool> mPreservesPitch;
// Whether to seek back to the start of the media resource
// upon reaching the end.
Mirror<bool> mLooping;
// Audio stream name
Mirror<nsAutoString> mStreamName;
@ -761,26 +545,12 @@ class MediaDecoderStateMachine
Canonical<CopyableTArray<RefPtr<ProcessedMediaTrack>>> mCanonicalOutputTracks;
Canonical<PrincipalHandle> mCanonicalOutputPrincipal;
// Duration of the media. This is guaranteed to be non-null after we finish
// decoding the first frame.
Canonical<media::NullableTimeUnit> mDuration;
// The time of the current frame, corresponding to the "current
// playback position" in HTML5. This is referenced from 0, which is the
// initial playback position.
Canonical<media::TimeUnit> mCurrentPosition;
// Used to distinguish whether the audio is producing sound.
Canonical<bool> mIsAudioDataAudible;
// Track when MediaSink is supsended. When that happens some actions are
// restricted like starting the sink or changing sink id. The flag is valid
// after Initialization. TaskQueue thread only.
bool mIsMediaSinkSuspended = false;
public:
AbstractCanonical<media::TimeIntervals>* CanonicalBuffered() const;
AbstractCanonical<CopyableTArray<RefPtr<ProcessedMediaTrack>>>*
CanonicalOutputTracks() {
return &mCanonicalOutputTracks;
@ -788,15 +558,6 @@ class MediaDecoderStateMachine
AbstractCanonical<PrincipalHandle>* CanonicalOutputPrincipal() {
return &mCanonicalOutputPrincipal;
}
AbstractCanonical<media::NullableTimeUnit>* CanonicalDuration() {
return &mDuration;
}
AbstractCanonical<media::TimeUnit>* CanonicalCurrentPosition() {
return &mCurrentPosition;
}
AbstractCanonical<bool>* CanonicalIsAudioDataAudible() {
return &mIsAudioDataAudible;
}
};
} // namespace mozilla

View File

@ -0,0 +1,167 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "MediaDecoderStateMachineBase.h"
#include "MediaDecoder.h"
#include "ReaderProxy.h"
#include "mozilla/ProfilerMarkers.h"
#include "mozilla/TaskQueue.h"
#include "nsThreadUtils.h"
namespace mozilla {
#define INIT_MIRROR(name, val) \
name(mTaskQueue, val, "MediaDecoderStateMachineBase::" #name " (Mirror)")
#define INIT_CANONICAL(name, val) \
name(mTaskQueue, val, "MediaDecoderStateMachineBase::" #name " (Canonical)")
#define FMT(x, ...) "Decoder=%p " x, mDecoderID, ##__VA_ARGS__
#define LOG(x, ...) \
DDMOZ_LOG(gMediaDecoderLog, LogLevel::Debug, "Decoder=%p " x, mDecoderID, \
##__VA_ARGS__)
#define LOGV(x, ...) \
DDMOZ_LOG(gMediaDecoderLog, LogLevel::Verbose, "Decoder=%p " x, mDecoderID, \
##__VA_ARGS__)
#define LOGW(x, ...) NS_WARNING(nsPrintfCString(FMT(x, ##__VA_ARGS__)).get())
#define LOGE(x, ...) \
NS_DebugBreak(NS_DEBUG_WARNING, \
nsPrintfCString(FMT(x, ##__VA_ARGS__)).get(), nullptr, \
__FILE__, __LINE__)
MediaDecoderStateMachineBase::MediaDecoderStateMachineBase(
MediaDecoder* aDecoder, MediaFormatReader* aReader)
: mDecoderID(aDecoder),
mAbstractMainThread(aDecoder->AbstractMainThread()),
mFrameStats(&aDecoder->GetFrameStatistics()),
mVideoFrameContainer(aDecoder->GetVideoFrameContainer()),
mTaskQueue(TaskQueue::Create(GetMediaThreadPool(MediaThreadType::MDSM),
"MDSM::mTaskQueue",
/* aSupportsTailDispatch = */ true)),
mReader(new ReaderProxy(mTaskQueue, aReader)),
mPlaybackRate(1.0),
INIT_MIRROR(mBuffered, media::TimeIntervals()),
INIT_MIRROR(mPlayState, MediaDecoder::PLAY_STATE_LOADING),
INIT_MIRROR(mVolume, 1.0),
INIT_MIRROR(mPreservesPitch, true),
INIT_MIRROR(mLooping, false),
INIT_CANONICAL(mDuration, media::NullableTimeUnit()),
INIT_CANONICAL(mCurrentPosition, media::TimeUnit::Zero()),
INIT_CANONICAL(mIsAudioDataAudible, false),
mMinimizePreroll(aDecoder->GetMinimizePreroll()),
mWatchManager(this, mTaskQueue) {}
MediaEventSource<void>& MediaDecoderStateMachineBase::OnMediaNotSeekable()
const {
return mReader->OnMediaNotSeekable();
}
AbstractCanonical<media::TimeIntervals>*
MediaDecoderStateMachineBase::CanonicalBuffered() const {
return mReader->CanonicalBuffered();
}
void MediaDecoderStateMachineBase::DispatchSetFragmentEndTime(
const media::TimeUnit& aEndTime) {
OwnerThread()->DispatchStateChange(NewRunnableMethod<media::TimeUnit>(
"MediaDecoderStateMachineBase::SetFragmentEndTime", this,
&MediaDecoderStateMachineBase::SetFragmentEndTime, aEndTime));
}
void MediaDecoderStateMachineBase::DispatchCanPlayThrough(
bool aCanPlayThrough) {
OwnerThread()->DispatchStateChange(NewRunnableMethod<bool>(
"MediaDecoderStateMachineBase::SetCanPlayThrough", this,
&MediaDecoderStateMachineBase::SetCanPlayThrough, aCanPlayThrough));
}
void MediaDecoderStateMachineBase::DispatchIsLiveStream(bool aIsLiveStream) {
OwnerThread()->DispatchStateChange(NewRunnableMethod<bool>(
"MediaDecoderStateMachineBase::SetIsLiveStream", this,
&MediaDecoderStateMachineBase::SetIsLiveStream, aIsLiveStream));
}
void MediaDecoderStateMachineBase::DispatchSetPlaybackRate(
double aPlaybackRate) {
OwnerThread()->DispatchStateChange(NewRunnableMethod<double>(
"MediaDecoderStateMachineBase::SetPlaybackRate", this,
&MediaDecoderStateMachineBase::SetPlaybackRate, aPlaybackRate));
}
nsresult MediaDecoderStateMachineBase::Init(MediaDecoder* aDecoder) {
MOZ_ASSERT(NS_IsMainThread());
// Dispatch initialization that needs to happen on that task queue.
nsCOMPtr<nsIRunnable> r = NewRunnableMethod<RefPtr<MediaDecoder>>(
"MediaDecoderStateMachineBase::InitializationTask", this,
&MediaDecoderStateMachineBase::InitializationTask, aDecoder);
mTaskQueue->DispatchStateChange(r.forget());
;
nsresult rv = mReader->Init();
NS_ENSURE_SUCCESS(rv, rv);
mMetadataManager.Connect(mReader->TimedMetadataEvent(), OwnerThread());
mReader->SetCanonicalDuration(&mDuration);
return NS_OK;
}
void MediaDecoderStateMachineBase::InitializationTask(MediaDecoder* aDecoder) {
MOZ_ASSERT(OnTaskQueue());
// Connect mirrors.
mBuffered.Connect(mReader->CanonicalBuffered());
mPlayState.Connect(aDecoder->CanonicalPlayState());
mVolume.Connect(aDecoder->CanonicalVolume());
mPreservesPitch.Connect(aDecoder->CanonicalPreservesPitch());
mLooping.Connect(aDecoder->CanonicalLooping());
// Initialize watchers.
mWatchManager.Watch(mBuffered,
&MediaDecoderStateMachineBase::BufferedRangeUpdated);
mWatchManager.Watch(mVolume, &MediaDecoderStateMachineBase::VolumeChanged);
mWatchManager.Watch(mPreservesPitch,
&MediaDecoderStateMachineBase::PreservesPitchChanged);
mWatchManager.Watch(mPlayState,
&MediaDecoderStateMachineBase::PlayStateChanged);
mWatchManager.Watch(mLooping, &MediaDecoderStateMachineBase::LoopingChanged);
}
RefPtr<ShutdownPromise> MediaDecoderStateMachineBase::BeginShutdown() {
MOZ_ASSERT(NS_IsMainThread());
return InvokeAsync(OwnerThread(), __func__,
[self = RefPtr<MediaDecoderStateMachineBase>(this)]() {
self->mWatchManager.Shutdown();
return self->Shutdown();
});
}
RefPtr<MediaDecoder::SeekPromise> MediaDecoderStateMachineBase::InvokeSeek(
const SeekTarget& aTarget) {
return InvokeAsync(OwnerThread(), __func__,
[self = RefPtr<MediaDecoderStateMachineBase>(this),
target = aTarget]() { return self->Seek(target); });
}
bool MediaDecoderStateMachineBase::OnTaskQueue() const {
return OwnerThread()->IsCurrentThreadIn();
}
void MediaDecoderStateMachineBase::DecodeError(const MediaResult& aError) {
MOZ_ASSERT(OnTaskQueue());
LOGE("Decode error: %s", aError.Description().get());
PROFILER_MARKER_TEXT("MDSMBase::DecodeError", MEDIA_PLAYBACK, {},
aError.Description());
// Notify the decode error and MediaDecoder will shut down MDSM.
mOnPlaybackErrorEvent.Notify(aError);
}
#undef INIT_MIRROR
#undef INIT_CANONICAL
#undef FMT
#undef LOG
#undef LOGV
#undef LOGW
#undef LOGE
} // namespace mozilla

View File

@ -0,0 +1,283 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef DOM_MEDIA_MEDIADECODERSTATEMACHINEBASE_H_
#define DOM_MEDIA_MEDIADECODERSTATEMACHINEBASE_H_
#include "DecoderDoctorDiagnostics.h"
#include "MediaEventSource.h"
#include "MediaInfo.h"
#include "MediaMetadataManager.h"
#include "mozilla/dom/MediaDebugInfoBinding.h"
#include "mozilla/Variant.h"
#include "nsISupportsImpl.h"
class AudioDeviceInfo;
namespace mozilla {
class AbstractThread;
class FrameStatistics;
class MediaDecoder;
class MediaFormatReader;
class ReaderProxy;
class TaskQueue;
class VideoFrameContainer;
struct MediaPlaybackEvent {
enum EventType {
PlaybackStarted,
PlaybackStopped,
PlaybackProgressed,
PlaybackEnded,
SeekStarted,
Invalidate,
EnterVideoSuspend,
ExitVideoSuspend,
StartVideoSuspendTimer,
CancelVideoSuspendTimer,
VideoOnlySeekBegin,
VideoOnlySeekCompleted,
} mType;
using DataType = Variant<Nothing, int64_t>;
DataType mData;
MOZ_IMPLICIT MediaPlaybackEvent(EventType aType)
: mType(aType), mData(Nothing{}) {}
template <typename T>
MediaPlaybackEvent(EventType aType, T&& aArg)
: mType(aType), mData(std::forward<T>(aArg)) {}
};
enum class VideoDecodeMode : uint8_t { Normal, Suspend };
/**
* The state machine class. This manages the decoding and seeking in the
* MediaDecoderReader on the decode task queue, and A/V sync on the shared
* state machine thread, and controls the audio "push" thread.
*
* All internal state is synchronised via the decoder monitor. State changes
* are propagated by scheduling the state machine to run another cycle on the
* shared state machine thread.
*/
class MediaDecoderStateMachineBase {
public:
NS_INLINE_DECL_PURE_VIRTUAL_REFCOUNTING
using FirstFrameEventSourceExc =
MediaEventSourceExc<UniquePtr<MediaInfo>, MediaDecoderEventVisibility>;
using MetadataEventSourceExc =
MediaEventSourceExc<UniquePtr<MediaInfo>, UniquePtr<MetadataTags>,
MediaDecoderEventVisibility>;
using NextFrameStatus = MediaDecoderOwner::NextFrameStatus;
MediaDecoderStateMachineBase(MediaDecoder* aDecoder,
MediaFormatReader* aReader);
virtual nsresult Init(MediaDecoder* aDecoder);
RefPtr<ShutdownPromise> BeginShutdown();
// Seeks to the decoder to aTarget asynchronously.
RefPtr<MediaDecoder::SeekPromise> InvokeSeek(const SeekTarget& aTarget);
virtual size_t SizeOfVideoQueue() const = 0;
virtual size_t SizeOfAudioQueue() const = 0;
// Sets the video decode mode. Used by the suspend-video-decoder feature.
virtual void SetVideoDecodeMode(VideoDecodeMode aMode) = 0;
virtual RefPtr<GenericPromise> InvokeSetSink(
const RefPtr<AudioDeviceInfo>& aSink) = 0;
virtual void InvokeSuspendMediaSink() = 0;
virtual void InvokeResumeMediaSink() = 0;
virtual RefPtr<GenericPromise> RequestDebugInfo(
dom::MediaDecoderStateMachineDebugInfo& aInfo) = 0;
// Returns the state machine task queue.
TaskQueue* OwnerThread() const { return mTaskQueue; }
MetadataEventSourceExc& MetadataLoadedEvent() { return mMetadataLoadedEvent; }
FirstFrameEventSourceExc& FirstFrameLoadedEvent() {
return mFirstFrameLoadedEvent;
}
MediaEventSourceExc<RefPtr<VideoFrameContainer>>&
OnSecondaryVideoContainerInstalled() {
return mOnSecondaryVideoContainerInstalled;
}
TimedMetadataEventSource& TimedMetadataEvent() {
return mMetadataManager.TimedMetadataEvent();
}
MediaEventSource<MediaPlaybackEvent>& OnPlaybackEvent() {
return mOnPlaybackEvent;
}
MediaEventSource<MediaResult>& OnPlaybackErrorEvent() {
return mOnPlaybackErrorEvent;
}
MediaEventSource<DecoderDoctorEvent>& OnDecoderDoctorEvent() {
return mOnDecoderDoctorEvent;
}
MediaEventSource<NextFrameStatus>& OnNextFrameStatus() {
return mOnNextFrameStatus;
}
MediaEventSource<void>& OnMediaNotSeekable() const;
AbstractCanonical<media::NullableTimeUnit>* CanonicalDuration() {
return &mDuration;
}
AbstractCanonical<media::TimeUnit>* CanonicalCurrentPosition() {
return &mCurrentPosition;
}
AbstractCanonical<bool>* CanonicalIsAudioDataAudible() {
return &mIsAudioDataAudible;
}
AbstractCanonical<media::TimeIntervals>* CanonicalBuffered() const;
void DispatchSetFragmentEndTime(const media::TimeUnit& aEndTime);
void DispatchCanPlayThrough(bool aCanPlayThrough);
void DispatchIsLiveStream(bool aIsLiveStream);
void DispatchSetPlaybackRate(double aPlaybackRate);
protected:
virtual ~MediaDecoderStateMachineBase() = default;
bool HasAudio() const { return mInfo.ref().HasAudio(); }
bool HasVideo() const { return mInfo.ref().HasVideo(); }
const MediaInfo& Info() const { return mInfo.ref(); }
virtual void SetPlaybackRate(double aPlaybackRate) = 0;
virtual void SetIsLiveStream(bool aIsLiveStream) = 0;
virtual void SetCanPlayThrough(bool aCanPlayThrough) = 0;
virtual void SetFragmentEndTime(const media::TimeUnit& aFragmentEndTime) = 0;
virtual void BufferedRangeUpdated() = 0;
virtual void VolumeChanged() = 0;
virtual void PreservesPitchChanged() = 0;
virtual void PlayStateChanged() = 0;
virtual void LoopingChanged() = 0;
// Init tasks which should be done on the task queue.
virtual void InitializationTask(MediaDecoder* aDecoder);
virtual RefPtr<ShutdownPromise> Shutdown() = 0;
virtual RefPtr<MediaDecoder::SeekPromise> Seek(const SeekTarget& aTarget) = 0;
void DecodeError(const MediaResult& aError);
// Functions used by assertions to ensure we're calling things
// on the appropriate threads.
bool OnTaskQueue() const;
bool IsRequestingAudioData() const { return mAudioDataRequest.Exists(); }
bool IsRequestingVideoData() const { return mVideoDataRequest.Exists(); }
bool IsWaitingAudioData() const { return mAudioWaitRequest.Exists(); }
bool IsWaitingVideoData() const { return mVideoWaitRequest.Exists(); }
void* const mDecoderID;
const RefPtr<AbstractThread> mAbstractMainThread;
const RefPtr<FrameStatistics> mFrameStats;
const RefPtr<VideoFrameContainer> mVideoFrameContainer;
const RefPtr<TaskQueue> mTaskQueue;
const RefPtr<ReaderProxy> mReader;
mozilla::MediaMetadataManager mMetadataManager;
// Playback rate. 1.0 : normal speed, 0.5 : two times slower.
double mPlaybackRate;
// Event producers
MediaEventProducerExc<UniquePtr<MediaInfo>, UniquePtr<MetadataTags>,
MediaDecoderEventVisibility>
mMetadataLoadedEvent;
MediaEventProducerExc<UniquePtr<MediaInfo>, MediaDecoderEventVisibility>
mFirstFrameLoadedEvent;
MediaEventProducerExc<RefPtr<VideoFrameContainer>>
mOnSecondaryVideoContainerInstalled;
MediaEventProducer<MediaPlaybackEvent> mOnPlaybackEvent;
MediaEventProducer<MediaResult> mOnPlaybackErrorEvent;
MediaEventProducer<DecoderDoctorEvent> mOnDecoderDoctorEvent;
MediaEventProducer<NextFrameStatus> mOnNextFrameStatus;
// The buffered range. Mirrored from the decoder thread.
Mirror<media::TimeIntervals> mBuffered;
// The current play state, mirrored from the main thread.
Mirror<MediaDecoder::PlayState> mPlayState;
// Volume of playback. 0.0 = muted. 1.0 = full volume.
Mirror<double> mVolume;
// Pitch preservation for the playback rate.
Mirror<bool> mPreservesPitch;
// Whether to seek back to the start of the media resource
// upon reaching the end.
Mirror<bool> mLooping;
// Duration of the media. This is guaranteed to be non-null after we finish
// decoding the first frame.
Canonical<media::NullableTimeUnit> mDuration;
// The time of the current frame, corresponding to the "current
// playback position" in HTML5. This is referenced from 0, which is the
// initial playback position.
Canonical<media::TimeUnit> mCurrentPosition;
// Used to distinguish whether the audio is producing sound.
Canonical<bool> mIsAudioDataAudible;
// Stores presentation info required for playback.
Maybe<MediaInfo> mInfo;
// True if the media is seekable (i.e. supports random access).
bool mMediaSeekable = true;
// True if the media is seekable only in buffered ranges.
bool mMediaSeekableOnlyInBufferedRanges = false;
// True if we've decoded first frames (thus having the start time) and
// notified the FirstFrameLoaded event. Note we can't initiate seek until the
// start time is known which happens when the first frames are decoded or we
// are playing an MSE stream (the start time is always assumed 0).
bool mSentFirstFrameLoadedEvent = false;
// True if we should not decode/preroll unnecessary samples, unless we're
// played. "Prerolling" in this context refers to when we decode and
// buffer decoded samples in advance of when they're needed for playback.
// This flag is set for preload=metadata media, and means we won't
// decode more than the first video frame and first block of audio samples
// for that media when we startup, or after a seek. When Play() is called,
// we reset this flag, as we assume the user is playing the media, so
// prerolling is appropriate then. This flag is used to reduce the overhead
// of prerolling samples for media elements that may not play, both
// memory and CPU overhead.
bool mMinimizePreroll;
// Only one of a given pair of ({Audio,Video}DataPromise, WaitForDataPromise)
// should exist at any given moment.
using AudioDataPromise = MediaFormatReader::AudioDataPromise;
using VideoDataPromise = MediaFormatReader::VideoDataPromise;
using WaitForDataPromise = MediaFormatReader::WaitForDataPromise;
MozPromiseRequestHolder<AudioDataPromise> mAudioDataRequest;
MozPromiseRequestHolder<VideoDataPromise> mVideoDataRequest;
MozPromiseRequestHolder<WaitForDataPromise> mAudioWaitRequest;
MozPromiseRequestHolder<WaitForDataPromise> mVideoWaitRequest;
private:
WatchManager<MediaDecoderStateMachineBase> mWatchManager;
};
} // namespace mozilla
#endif // DOM_MEDIA_MEDIADECODERSTATEMACHINEBASE_H_

View File

@ -160,6 +160,7 @@ EXPORTS += [
"MediaDecoder.h",
"MediaDecoderOwner.h",
"MediaDecoderStateMachine.h",
"MediaDecoderStateMachineBase.h",
"MediaEventSource.h",
"MediaFormatReader.h",
"MediaInfo.h",
@ -279,6 +280,7 @@ UNIFIED_SOURCES += [
"MediaData.cpp",
"MediaDecoder.cpp",
"MediaDecoderStateMachine.cpp",
"MediaDecoderStateMachineBase.cpp",
"MediaDeviceInfo.cpp",
"MediaDevices.cpp",
"MediaFormatReader.cpp",