Bug 455165 - Add support for chained ogg audio file and proper metadata dispatching. r=cpearce

This commit is contained in:
Paul Adenot 2012-11-30 14:17:54 +01:00
parent 66e84be708
commit 29d389e7cd
18 changed files with 452 additions and 125 deletions

View File

@ -25,6 +25,7 @@
#include "nsIDOMWakeLock.h"
#include "AudioChannelCommon.h"
#include "DecoderTraits.h"
#include "MediaMetadataManager.h"
// Define to output information on decoding and painting framerate
/* #define DEBUG_FRAME_RATE 1 */
@ -119,8 +120,8 @@ public:
// Called by the video decoder object, on the main thread,
// when it has read the metadata containing video dimensions,
// etc.
virtual void MetadataLoaded(uint32_t aChannels,
uint32_t aRate,
virtual void MetadataLoaded(int aChannels,
int aRate,
bool aHasAudio,
const MetadataTags* aTags) MOZ_FINAL MOZ_OVERRIDE;

View File

@ -124,6 +124,7 @@ nsHTMLAudioElement::MozSetup(uint32_t aChannels, uint32_t aRate)
MetadataLoaded(aChannels, aRate, true, nullptr);
mAudioStream->SetVolume(mVolume);
return NS_OK;
}

View File

@ -65,6 +65,7 @@
#include "nsDOMMediaStream.h"
#include "nsIScriptError.h"
#include "nsHostObjectProtocolHandler.h"
#include "MediaMetadataManager.h"
#include "nsCSSParser.h"
#include "nsIMediaList.h"
@ -2183,7 +2184,8 @@ nsresult nsHTMLMediaElement::InitializeDecoderAsClone(MediaDecoder* aOriginal)
double duration = aOriginal->GetDuration();
if (duration >= 0) {
decoder->SetDuration(duration);
decoder->SetSeekable(aOriginal->IsSeekable());
decoder->SetTransportSeekable(aOriginal->IsTransportSeekable());
decoder->SetMediaSeekable(aOriginal->IsMediaSeekable());
}
MediaResource* resource = originalResource->CloneData(decoder);
@ -2512,8 +2514,8 @@ void nsHTMLMediaElement::ProcessMediaFragmentURI()
}
}
void nsHTMLMediaElement::MetadataLoaded(uint32_t aChannels,
uint32_t aRate,
void nsHTMLMediaElement::MetadataLoaded(int aChannels,
int aRate,
bool aHasAudio,
const MetadataTags* aTags)
{
@ -2524,7 +2526,7 @@ void nsHTMLMediaElement::MetadataLoaded(uint32_t aChannels,
ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_METADATA);
DispatchAsyncEvent(NS_LITERAL_STRING("durationchange"));
DispatchAsyncEvent(NS_LITERAL_STRING("loadedmetadata"));
if (mDecoder && mDecoder->IsSeekable()) {
if (mDecoder && mDecoder->IsTransportSeekable() && mDecoder->IsMediaSeekable()) {
ProcessMediaFragmentURI();
mDecoder->SetFragmentEndTime(mFragmentEnd);
}

View File

@ -8,6 +8,8 @@
#define AbstractMediaDecoder_h_
#include "nsISupports.h"
#include "nsDataHashtable.h"
#include "nsThreadUtils.h"
namespace mozilla
{
@ -19,6 +21,9 @@ namespace layers
class MediaResource;
class ReentrantMonitor;
class VideoFrameContainer;
class TimedMetadata;
typedef nsDataHashtable<nsCStringHashKey, nsCString> MetadataTags;
/**
* The AbstractMediaDecoder class describes the public interface for a media decoder
@ -61,12 +66,24 @@ public:
// Set the duration of the media in microseconds.
virtual void SetMediaDuration(int64_t aDuration) = 0;
// Set the media as being seekable or not.
virtual void SetMediaSeekable(bool aMediaSeekable) = 0;
// Set the transport level as being seekable or not.
virtual void SetTransportSeekable(bool aTransportSeekable) = 0;
virtual VideoFrameContainer* GetVideoFrameContainer() = 0;
virtual mozilla::layers::ImageContainer* GetImageContainer() = 0;
// Return true if seeking is supported.
// Return true if the media layer supports seeking.
virtual bool IsTransportSeekable() = 0;
// Return true if the transport layer supports seeking.
virtual bool IsMediaSeekable() = 0;
virtual void MetadataLoaded(int aChannels, int aRate, bool aHasAudio, MetadataTags* aTags) = 0;
virtual void QueueMetadata(int64_t aTime, int aChannels, int aRate, bool aHasAudio, MetadataTags* aTags) = 0;
// Set the media end time in microseconds
virtual void SetMediaEndTime(int64_t aTime) = 0;
@ -97,6 +114,32 @@ public:
};
};
class AudioMetadataEventRunner : public nsRunnable
{
private:
nsRefPtr<AbstractMediaDecoder> mDecoder;
public:
AudioMetadataEventRunner(AbstractMediaDecoder* aDecoder, int aChannels, int aRate, bool aHasAudio, MetadataTags* aTags)
: mDecoder(aDecoder),
mChannels(aChannels),
mRate(aRate),
mHasAudio(aHasAudio),
mTags(aTags)
{}
NS_IMETHOD Run()
{
mDecoder->MetadataLoaded(mChannels, mRate, mHasAudio, mTags);
return NS_OK;
}
int mChannels;
int mRate;
bool mHasAudio;
MetadataTags* mTags;
};
}
#endif

View File

@ -37,6 +37,7 @@ EXPORTS = \
VideoUtils.h \
VideoSegment.h \
VorbisUtils.h \
MediaMetadataManager.h \
$(NULL)
CPPSRCS = \

View File

@ -761,7 +761,7 @@ MediaCache::FindReusableBlock(TimeStamp aNow,
// Don't consider readahead blocks in non-seekable streams. If we
// remove the block we won't be able to seek back to read it later.
if (stream->mIsSeekable) {
if (stream->mIsTransportSeekable) {
AppendMostReusableBlock(&stream->mReadaheadBlocks, &candidates, length);
}
}
@ -1082,7 +1082,7 @@ MediaCache::Update()
int32_t nonSeekableReadaheadBlockCount = 0;
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
MediaCacheStream* stream = mStreams[i];
if (!stream->mIsSeekable) {
if (!stream->mIsTransportSeekable) {
nonSeekableReadaheadBlockCount += stream->mReadaheadBlocks.GetCount();
}
}
@ -1110,7 +1110,7 @@ MediaCache::Update()
// Compute where we'd actually seek to to read at readOffset
int64_t desiredOffset = dataOffset;
if (stream->mIsSeekable) {
if (stream->mIsTransportSeekable) {
if (desiredOffset > stream->mChannelOffset &&
desiredOffset <= stream->mChannelOffset + SEEK_VS_READ_THRESHOLD) {
// Assume it's more efficient to just keep reading up to the
@ -1162,7 +1162,7 @@ MediaCache::Update()
// The stream reader is waiting for us, or nearly so. Better feed it.
LOG(PR_LOG_DEBUG, ("Stream %p feeding reader", stream));
enableReading = true;
} else if (!stream->mIsSeekable &&
} else if (!stream->mIsTransportSeekable &&
nonSeekableReadaheadBlockCount >= maxBlocks*NONSEEKABLE_READAHEAD_MAX) {
// This stream is not seekable and there are already too many blocks
// being cached for readahead for nonseekable streams (which we can't
@ -1209,7 +1209,7 @@ MediaCache::Update()
if (stream->mChannelOffset != desiredOffset && enableReading) {
// We need to seek now.
NS_ASSERTION(stream->mIsSeekable || desiredOffset == 0,
NS_ASSERTION(stream->mIsTransportSeekable || desiredOffset == 0,
"Trying to seek in a non-seekable stream!");
// Round seek offset down to the start of the block. This is essential
// because we don't want to think we have part of a block already
@ -1775,22 +1775,22 @@ MediaCacheStream::~MediaCacheStream()
}
void
MediaCacheStream::SetSeekable(bool aIsSeekable)
MediaCacheStream::SetTransportSeekable(bool aIsTransportSeekable)
{
ReentrantMonitorAutoEnter mon(gMediaCache->GetReentrantMonitor());
NS_ASSERTION(mIsSeekable || aIsSeekable ||
NS_ASSERTION(mIsTransportSeekable || aIsTransportSeekable ||
mChannelOffset == 0, "channel offset must be zero when we become non-seekable");
mIsSeekable = aIsSeekable;
mIsTransportSeekable = aIsTransportSeekable;
// Queue an Update since we may change our strategy for dealing
// with this stream
gMediaCache->QueueUpdate();
}
bool
MediaCacheStream::IsSeekable()
MediaCacheStream::IsTransportSeekable()
{
ReentrantMonitorAutoEnter mon(gMediaCache->GetReentrantMonitor());
return mIsSeekable;
return mIsTransportSeekable;
}
bool
@ -2228,7 +2228,7 @@ MediaCacheStream::InitAsClone(MediaCacheStream* aOriginal)
mPrincipal = aOriginal->mPrincipal;
mStreamLength = aOriginal->mStreamLength;
mIsSeekable = aOriginal->mIsSeekable;
mIsTransportSeekable = aOriginal->mIsTransportSeekable;
// Cloned streams are initially suspended, since there is no channel open
// initially for a clone.

View File

@ -196,7 +196,7 @@ public:
mHasHadUpdate(false),
mClosed(false),
mDidNotifyDataEnded(false), mResourceID(0),
mIsSeekable(false), mCacheSuspended(false),
mIsTransportSeekable(false), mCacheSuspended(false),
mChannelEnded(false),
mChannelOffset(0), mStreamLength(-1),
mStreamOffset(0), mPlaybackBytesPerSecond(10000),
@ -222,7 +222,7 @@ public:
// change during the lifetime of the MediaCacheStream --- every time
// we do an HTTP load the seekability may be different (and sometimes
// is, in practice, due to the effects of caching proxies).
void SetSeekable(bool aIsSeekable);
void SetTransportSeekable(bool aIsTransportSeekable);
// This must be called (and return) before the ChannelMediaResource
// used to create this MediaCacheStream is deleted.
void Close();
@ -323,8 +323,8 @@ public:
// because it doesn't know when the decoder was paused, buffering, etc.
// Do not pass zero.
void SetPlaybackRate(uint32_t aBytesPerSecond);
// Returns the last set value of SetSeekable.
bool IsSeekable();
// Returns the last set value of SetTransportSeekable.
bool IsTransportSeekable();
// Returns true when all streams for this resource are suspended or their
// channel has ended.
@ -447,7 +447,7 @@ private:
// underlying resource and should share data.
int64_t mResourceID;
// The last reported seekability state for the underlying channel
bool mIsSeekable;
bool mIsTransportSeekable;
// True if the cache has suspended our channel because the cache is
// full and the priority of the data that would be received is lower
// than the priority of the data already in the cache

View File

@ -303,7 +303,8 @@ MediaDecoder::MediaDecoder() :
mInitialVolume(0.0),
mRequestedSeekTime(-1.0),
mDuration(-1),
mSeekable(true),
mTransportSeekable(true),
mMediaSeekable(true),
mReentrantMonitor("media.decoder"),
mPlayState(PLAY_STATE_PAUSED),
mNextState(PLAY_STATE_PAUSED),
@ -435,7 +436,8 @@ nsresult MediaDecoder::InitializeStateMachine(MediaDecoder* aCloneDonor)
}
{
ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
mDecoderStateMachine->SetSeekable(mSeekable);
mDecoderStateMachine->SetTransportSeekable(mTransportSeekable);
mDecoderStateMachine->SetMediaSeekable(mMediaSeekable);
mDecoderStateMachine->SetDuration(mDuration);
mDecoderStateMachine->SetVolume(mInitialVolume);
mDecoderStateMachine->SetAudioCaptured(mInitialAudioCaptured);
@ -631,10 +633,19 @@ void MediaDecoder::AudioAvailable(float* aFrameBuffer,
mOwner->NotifyAudioAvailable(frameBuffer.forget(), aFrameBufferLength, aTime);
}
void MediaDecoder::MetadataLoaded(uint32_t aChannels,
uint32_t aRate,
bool aHasAudio,
const MetadataTags* aTags)
void MediaDecoder::QueueMetadata(int64_t aPublishTime,
int aChannels,
int aRate,
bool aHasAudio,
MetadataTags* aTags)
{
NS_ASSERTION(mDecoderStateMachine->OnDecodeThread(),
"Should be on decode thread.");
GetReentrantMonitor().AssertCurrentThreadIn();
mDecoderStateMachine->QueueMetadata(aPublishTime, aChannels, aRate, aHasAudio, aTags);
}
void MediaDecoder::MetadataLoaded(int aChannels, int aRate, bool aHasAudio, MetadataTags* aTags)
{
MOZ_ASSERT(NS_IsMainThread());
if (mShuttingDown) {
@ -1185,25 +1196,36 @@ void MediaDecoder::SetMediaDuration(int64_t aDuration)
GetStateMachine()->SetDuration(aDuration);
}
void MediaDecoder::SetSeekable(bool aSeekable)
{
MOZ_ASSERT(NS_IsMainThread());
mSeekable = aSeekable;
void MediaDecoder::SetMediaSeekable(bool aMediaSeekable) {
ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
MOZ_ASSERT(NS_IsMainThread() || OnDecodeThread());
mMediaSeekable = aMediaSeekable;
if (mDecoderStateMachine) {
ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
mDecoderStateMachine->SetSeekable(aSeekable);
mDecoderStateMachine->SetMediaSeekable(aMediaSeekable);
}
}
bool MediaDecoder::IsSeekable()
void MediaDecoder::SetTransportSeekable(bool aTransportSeekable)
{
MOZ_ASSERT(NS_IsMainThread());
return mSeekable;
mTransportSeekable = aTransportSeekable;
if (mDecoderStateMachine) {
ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
mDecoderStateMachine->SetTransportSeekable(aTransportSeekable);
}
}
bool MediaDecoder::IsTransportSeekable()
{
MOZ_ASSERT(NS_IsMainThread());
return mTransportSeekable;
}
bool MediaDecoder::IsMediaSeekable()
{
return GetStateMachine()->IsSeekable();
ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
MOZ_ASSERT(OnDecodeThread() || NS_IsMainThread());
return mMediaSeekable;
}
nsresult MediaDecoder::GetSeekable(nsTimeRanges* aSeekable)
@ -1211,20 +1233,24 @@ nsresult MediaDecoder::GetSeekable(nsTimeRanges* aSeekable)
//TODO : change 0.0 to GetInitialTime() when available
double initialTime = 0.0;
if (IsSeekable()) {
// We can seek in buffered range if the media is seekable. Also, we can seek
// in unbuffered ranges if the transport level is seekable (local file or the
// server supports range requests, etc.)
if (!IsMediaSeekable()) {
return NS_OK;
} else if (!IsTransportSeekable()){
if (mDecoderStateMachine &&
mDecoderStateMachine->IsSeekableInBufferedRanges()) {
return GetBuffered(aSeekable);
} else {
return NS_OK;
}
} else {
double end = IsInfinite() ? std::numeric_limits<double>::infinity()
: initialTime + GetDuration();
aSeekable->Add(initialTime, end);
return NS_OK;
}
if (mDecoderStateMachine && mDecoderStateMachine->IsSeekableInBufferedRanges()) {
return GetBuffered(aSeekable);
} else {
// The stream is not seekable using only buffered ranges, and is not
// seekable. Don't allow seeking (return no ranges in |seekable|).
return NS_OK;
}
}
void MediaDecoder::SetFragmentEndTime(double aTime)

View File

@ -504,11 +504,14 @@ public:
void SetMediaDuration(int64_t aDuration) MOZ_FINAL MOZ_OVERRIDE;
// Set a flag indicating whether seeking is supported
virtual void SetSeekable(bool aSeekable);
// Return true if seeking is supported.
virtual bool IsSeekable();
bool IsMediaSeekable() MOZ_FINAL MOZ_OVERRIDE;
virtual void SetMediaSeekable(bool aMediaSeekable) MOZ_FINAL MOZ_OVERRIDE;
virtual void SetTransportSeekable(bool aTransportSeekable) MOZ_FINAL MOZ_OVERRIDE;
// Returns true if this media supports seeking. False for example for WebM
// files without an index and chained ogg files.
virtual bool IsMediaSeekable() MOZ_FINAL MOZ_OVERRIDE;
// Returns true if seeking is supported on a transport level (e.g. the server
// supports range requests, we are playing a file, etc.).
virtual bool IsTransportSeekable();
// Return the time ranges that can be seeked into.
virtual nsresult GetSeekable(nsTimeRanges* aSeekable);
@ -633,6 +636,15 @@ public:
void SetAudioChannelType(AudioChannelType aType) { mAudioChannelType = aType; }
AudioChannelType GetAudioChannelType() { return mAudioChannelType; }
// Send a new set of metadata to the state machine, to be dispatched to the
// main thread to be presented when the |currentTime| of the media is greater
// or equal to aPublishTime.
void QueueMetadata(int64_t aPublishTime,
int aChannels,
int aRate,
bool aHasAudio,
MetadataTags* aTags);
/******
* The following methods must only be called on the main
* thread.
@ -649,10 +661,7 @@ public:
// Called when the metadata from the media file has been loaded by the
// state machine. Call on the main thread only.
void MetadataLoaded(uint32_t aChannels,
uint32_t aRate,
bool aHasAudio,
const MetadataTags* aTags);
void MetadataLoaded(int aChannels, int aRate, bool aHasAudio, MetadataTags* aTags);
// Called when the first frame has been loaded.
// Call on the main thread only.
@ -902,9 +911,12 @@ public:
// True when playback should start with audio captured (not playing).
bool mInitialAudioCaptured;
// True if the media resource is seekable (server supports byte range
// requests).
bool mSeekable;
// True if the resource is seekable at a transport level (server supports byte
// range requests, local file, etc.).
bool mTransportSeekable;
// True if the media is seekable (i.e. supports random access).
bool mMediaSeekable;
/******
* The following member variables can be accessed from any thread.

View File

@ -5,6 +5,7 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef MediaDecoderOwner_h_
#define MediaDecoderOwner_h_
#include "AbstractMediaDecoder.h"
class nsHTMLMediaElement;
@ -12,8 +13,6 @@ namespace mozilla {
class VideoFrameContainer;
typedef nsDataHashtable<nsCStringHashKey, nsCString> MetadataTags;
class MediaDecoderOwner
{
public:
@ -54,8 +53,8 @@ public:
// Called by the video decoder object, on the main thread,
// when it has read the metadata containing video dimensions,
// etc.
virtual void MetadataLoaded(uint32_t aChannels,
uint32_t aRate,
virtual void MetadataLoaded(int aChannels,
int aRate,
bool aHasAudio,
const MetadataTags* aTags) = 0;

View File

@ -113,34 +113,6 @@ static int64_t DurationToUsecs(TimeDuration aDuration) {
return static_cast<int64_t>(aDuration.ToSeconds() * USECS_PER_S);
}
class nsAudioMetadataEventRunner : public nsRunnable
{
private:
nsRefPtr<MediaDecoder> mDecoder;
public:
nsAudioMetadataEventRunner(MediaDecoder* aDecoder, uint32_t aChannels,
uint32_t aRate, bool aHasAudio,
MetadataTags* aTags) :
mDecoder(aDecoder),
mChannels(aChannels),
mRate(aRate),
mHasAudio(aHasAudio),
mTags(aTags)
{
}
NS_IMETHOD Run()
{
mDecoder->MetadataLoaded(mChannels, mRate, mHasAudio, mTags);
return NS_OK;
}
const uint32_t mChannels;
const uint32_t mRate;
const bool mHasAudio;
MetadataTags* mTags;
};
// Owns the global state machine thread and counts of
// state machine and decoder threads. There should
// only be one instance of this class.
@ -394,7 +366,8 @@ MediaDecoderStateMachine::MediaDecoderStateMachine(MediaDecoder* aDecoder,
mPreservesPitch(true),
mBasePosition(0),
mAudioCaptured(false),
mSeekable(true),
mTransportSeekable(true),
mMediaSeekable(true),
mPositionChangeQueued(false),
mAudioCompleted(false),
mGotDurationFromMetaData(false),
@ -1306,6 +1279,8 @@ void MediaDecoderStateMachine::UpdatePlaybackPosition(int64_t aTime)
// Notify DOM of any queued up audioavailable events
mEventManager.DispatchPendingEvents(GetMediaTime());
mMetadataManager.DispatchMetadataIfNeeded(mDecoder, aTime);
if (fragmentEnded) {
StopPlayback();
}
@ -1399,12 +1374,21 @@ void MediaDecoderStateMachine::SetFragmentEndTime(int64_t aEndTime)
mFragmentEndTime = aEndTime < 0 ? aEndTime : aEndTime + mStartTime;
}
void MediaDecoderStateMachine::SetSeekable(bool aSeekable)
void MediaDecoderStateMachine::SetTransportSeekable(bool aTransportSeekable)
{
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
NS_ASSERTION(NS_IsMainThread() || OnDecodeThread(),
"Should be on main thread or the decoder thread.");
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
mSeekable = aSeekable;
mTransportSeekable = aTransportSeekable;
}
void MediaDecoderStateMachine::SetMediaSeekable(bool aMediaSeekable)
{
NS_ASSERTION(NS_IsMainThread() || OnDecodeThread(),
"Should be on main thread or the decoder thread.");
mMediaSeekable = aMediaSeekable;
}
void MediaDecoderStateMachine::Shutdown()
@ -1489,6 +1473,12 @@ void MediaDecoderStateMachine::Seek(double aTime)
{
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
// We need to be able to seek both at a transport level and at a media level
// to seek.
if (!mMediaSeekable) {
return;
}
// MediaDecoder::mPlayState should be SEEKING while we seek, and
// in that case MediaDecoder shouldn't be calling us.
NS_ASSERTION(mState != DECODER_STATE_SEEKING,
@ -1775,12 +1765,15 @@ nsresult MediaDecoderStateMachine::DecodeMetadata()
}
NS_ASSERTION(mStartTime != -1, "Must have start time");
NS_ASSERTION((!HasVideo() && !HasAudio()) ||
!mSeekable || mEndTime != -1,
"Active seekable media should have end time");
NS_ASSERTION(!mSeekable || GetDuration() != -1, "Seekable media should have duration");
LOG(PR_LOG_DEBUG, ("%p Media goes from %lld to %lld (duration %lld) seekable=%d",
mDecoder.get(), mStartTime, mEndTime, GetDuration(), mSeekable));
MOZ_ASSERT((!HasVideo() && !HasAudio()) ||
!(mMediaSeekable && mTransportSeekable) || mEndTime != -1,
"Active seekable media should have end time");
MOZ_ASSERT(!(mMediaSeekable && mTransportSeekable) ||
GetDuration() != -1, "Seekable media should have duration");
LOG(PR_LOG_DEBUG, ("%p Media goes from %lld to %lld (duration %lld)"
" transportSeekable=%d, mediaSeekable=%d",
mDecoder.get(), mStartTime, mEndTime, GetDuration(),
mTransportSeekable, mMediaSeekable));
// Inform the element that we've loaded the metadata and the first frame,
// setting the default framebuffer size for audioavailable events. Also,
@ -1794,12 +1787,13 @@ nsresult MediaDecoderStateMachine::DecodeMetadata()
uint32_t frameBufferLength = mInfo.mAudioChannels * FRAMEBUFFER_LENGTH_PER_CHANNEL;
mDecoder->RequestFrameBufferLength(frameBufferLength);
}
nsCOMPtr<nsIRunnable> metadataLoadedEvent =
new nsAudioMetadataEventRunner(mDecoder,
mInfo.mAudioChannels,
mInfo.mAudioRate,
HasAudio(),
tags);
new AudioMetadataEventRunner(mDecoder,
mInfo.mAudioChannels,
mInfo.mAudioRate,
HasAudio(),
tags);
NS_DispatchToMainThread(metadataLoadedEvent, NS_DISPATCH_NORMAL);
if (mState == DECODER_STATE_DECODING_METADATA) {
@ -2700,5 +2694,18 @@ bool MediaDecoderStateMachine::IsShutdown()
return GetState() == DECODER_STATE_SHUTDOWN;
}
void MediaDecoderStateMachine::QueueMetadata(int64_t aPublishTime, int aChannels, int aRate, bool aHasAudio, MetadataTags* aTags)
{
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
TimedMetadata* metadata = new TimedMetadata;
metadata->mPublishTime = aPublishTime;
metadata->mChannels = aChannels;
metadata->mRate = aRate;
metadata->mHasAudio = aHasAudio;
metadata->mTags = aTags;
mMetadataManager.QueueMetadata(metadata);
}
} // namespace mozilla

View File

@ -179,10 +179,16 @@ public:
// be called with the decode monitor held.
void ClearPositionChangeFlag();
// Called from the main thread to set whether the media resource can
// seek into unbuffered ranges. The decoder monitor must be obtained
// before calling this.
void SetSeekable(bool aSeekable);
// Called from the main thread or the decoder thread to set whether the media
// resource can seek into unbuffered ranges. The decoder monitor must be
// obtained before calling this.
void SetTransportSeekable(bool aSeekable);
// Called from the main thread or the decoder thread to set whether the media
// can seek to random location. This is not true for chained ogg and WebM
// media without index. The decoder monitor must be obtained before calling
// this.
void SetMediaSeekable(bool aSeekable);
// Update the playback position. This can result in a timeupdate event
// and an invalidate of the frame being dispatched asynchronously if
@ -257,9 +263,14 @@ public:
return mEndTime;
}
bool IsSeekable() {
bool IsTransportSeekable() {
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
return mSeekable;
return mTransportSeekable;
}
bool IsMediaSeekable() {
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
return mMediaSeekable;
}
// Return true if the media is seekable using only buffered ranges.
@ -322,6 +333,8 @@ public:
// shutting down. The decoder monitor must be held while calling this.
bool IsShutdown();
void QueueMetadata(int64_t aPublishTime, int aChannels, int aRate, bool aHasAudio, MetadataTags* aTags);
protected:
virtual uint32_t GetAmpleVideoFrames() { return mAmpleVideoFrames; }
@ -700,9 +713,13 @@ private:
// streams).
bool mAudioCaptured;
// True if the media resource can be seeked. Accessed from the state
// machine and main threads. Synchronised via decoder monitor.
bool mSeekable;
// True if the media resource can be seeked on a transport level. Accessed
// from the state machine and main threads. Synchronised via decoder monitor.
bool mTransportSeekable;
// True if the media can be seeked. Accessed from the state machine and main
// threads. Synchronised via decoder monitor.
bool mMediaSeekable;
// True if an event to notify about a change in the playback
// position has been queued, but not yet run. It is set to false when
@ -785,6 +802,8 @@ private:
// Stores presentation info required for playback. The decoder monitor
// must be held when accessing this.
VideoInfo mInfo;
mozilla::MediaMetadataManager mMetadataManager;
};
} // namespace mozilla;

View File

@ -0,0 +1,67 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(MediaMetadataManager_h__)
#define MediaMetadataManager_h__
#include "VideoUtils.h"
#include "mozilla/LinkedList.h"
#include "AbstractMediaDecoder.h"
#include "nsAutoPtr.h"
namespace mozilla {
// A struct that contains the metadata of a media, and the time at which those
// metadata should start to be reported.
class TimedMetadata : public LinkedListElement<TimedMetadata> {
public:
// The time, in microseconds, at which those metadata should be available.
int64_t mPublishTime;
// The metadata. The ownership is transfered to the element when dispatching to
// the main threads.
nsAutoPtr<MetadataTags> mTags;
// The sample rate of this media.
int mRate;
// The number of channel of this media.
int mChannels;
// True if this media has an audio track.
bool mHasAudio;
};
// This class encapsulate the logic to give the metadata from the reader to
// the content, at the right time.
class MediaMetadataManager
{
public:
~MediaMetadataManager() {
TimedMetadata* element;
while((element = mMetadataQueue.popFirst()) != nullptr) {
delete element;
}
}
void QueueMetadata(TimedMetadata* aMetadata) {
mMetadataQueue.insertBack(aMetadata);
}
void DispatchMetadataIfNeeded(AbstractMediaDecoder* aDecoder, double aCurrentTime) {
TimedMetadata* metadata = mMetadataQueue.getFirst();
while (metadata && aCurrentTime >= static_cast<double>(metadata->mPublishTime) / USECS_PER_S) {
nsCOMPtr<nsIRunnable> metadataUpdatedEvent =
new mozilla::AudioMetadataEventRunner(aDecoder,
metadata->mChannels,
metadata->mRate,
metadata->mHasAudio,
metadata->mTags.forget());
NS_DispatchToMainThread(metadataUpdatedEvent, NS_DISPATCH_NORMAL);
mMetadataQueue.popFirst();
metadata = mMetadataQueue.getFirst();
}
}
protected:
LinkedList<TimedMetadata> mMetadataQueue;
};
}
#endif

View File

@ -214,10 +214,14 @@ ChannelMediaResource::OnStartRequest(nsIRequest* aRequest)
rv = hc->GetResponseHeader(NS_LITERAL_CSTRING("X-Content-Duration"), durationText);
}
// If there is no Content-Duration header, or if the value for this header
// is not valid, set the media as being infinite.
if (NS_SUCCEEDED(rv)) {
double duration = durationText.ToDouble(&ec);
if (ec == NS_OK && duration >= 0) {
mDecoder->SetDuration(duration);
} else {
mDecoder->SetInfinite(true);
}
} else {
mDecoder->SetInfinite(true);
@ -305,8 +309,8 @@ ChannelMediaResource::OnStartRequest(nsIRequest* aRequest)
mDecoder->SetInfinite(false);
}
}
mDecoder->SetSeekable(seekable);
mCacheStream.SetSeekable(seekable);
mDecoder->SetTransportSeekable(seekable);
mCacheStream.SetTransportSeekable(seekable);
nsCOMPtr<nsICachingChannel> cc = do_QueryInterface(aRequest);
if (cc) {
@ -419,7 +423,7 @@ ChannelMediaResource::OnStopRequest(nsIRequest* aRequest, nsresult aStatus)
// cause us to just re-read the stream, which would be really bad.
if (mReopenOnError &&
aStatus != NS_ERROR_PARSED_DATA_CACHED && aStatus != NS_BINDING_ABORTED &&
(mOffset == 0 || mCacheStream.IsSeekable())) {
(mOffset == 0 || mCacheStream.IsTransportSeekable())) {
// If the stream did close normally, then if the server is seekable we'll
// just seek to the end of the resource and get an HTTP 416 error because
// there's nothing there, so this isn't bad.
@ -803,7 +807,7 @@ void ChannelMediaResource::Suspend(bool aCloseImmediately)
}
if (mChannel) {
if (aCloseImmediately && mCacheStream.IsSeekable()) {
if (aCloseImmediately && mCacheStream.IsTransportSeekable()) {
// Kill off our channel right now, but don't tell anyone about it.
mIgnoreClose = true;
CloseChannel();

View File

@ -283,10 +283,10 @@ DASHRepDecoder::SetInfinite(bool aInfinite)
}
void
DASHRepDecoder::SetSeekable(bool aSeekable)
DASHRepDecoder::SetMediaSeekable(bool aMediaSeekable)
{
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
if (mMainDecoder) { mMainDecoder->SetSeekable(aSeekable); }
if (mMainDecoder) { mMainDecoder->SetMediaSeekable(aMediaSeekable); }
}
void

View File

@ -96,7 +96,7 @@ public:
void SetInfinite(bool aInfinite);
// Sets media stream as seekable. Called on main thread only.
void SetSeekable(bool aSeekable);
void SetMediaSeekable(bool aSeekable);
// Fire progress events if needed according to the time and byte
// constraints outlined in the specification. aTimer is true

View File

@ -18,6 +18,7 @@ extern "C" {
#include "nsTimeRanges.h"
#include "mozilla/TimeStamp.h"
#include "VorbisUtils.h"
#include "MediaMetadataManager.h"
namespace mozilla {
@ -77,6 +78,7 @@ static const int PAGE_STEP = 8192;
OggReader::OggReader(AbstractMediaDecoder* aDecoder)
: MediaDecoderReader(aDecoder),
mMonitor("OggReader"),
mTheoraState(nullptr),
mVorbisState(nullptr),
mOpusState(nullptr),
@ -86,7 +88,9 @@ OggReader::OggReader(AbstractMediaDecoder* aDecoder)
mOpusSerial(0),
mTheoraSerial(0),
mOpusPreSkip(0),
mPageOffset(0)
mPageOffset(0),
mIsChained(false),
mDecodedAudioFrames(0)
{
MOZ_COUNT_CTOR(OggReader);
memset(&mTheoraInfo, 0, sizeof(mTheoraInfo));
@ -400,6 +404,9 @@ nsresult OggReader::DecodeVorbis(ogg_packet* aPacket) {
frames,
buffer.forget(),
channels));
mDecodedAudioFrames += frames;
endFrame -= frames;
if (vorbis_synthesis_read(&mVorbisState->mDsp, frames) != 0) {
return NS_ERROR_FAILURE;
@ -544,6 +551,9 @@ nsresult OggReader::DecodeOpus(ogg_packet* aPacket) {
frames,
buffer.forget(),
channels));
mDecodedAudioFrames += frames;
return NS_OK;
}
#endif /* MOZ_OPUS */
@ -584,7 +594,7 @@ bool OggReader::DecodeAudioData()
#endif
}
if (packet->e_o_s) {
if ((packet->e_o_s) && (!ReadOggChain())) {
// We've encountered an end of bitstream packet, or we've hit the end of
// file while trying to decode, so inform the audio queue that there'll
// be no more samples.
@ -595,6 +605,115 @@ bool OggReader::DecodeAudioData()
return true;
}
void OggReader::SetChained(bool aIsChained) {
{
ReentrantMonitorAutoEnter mon(mMonitor);
mIsChained = aIsChained;
}
{
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
mDecoder->SetMediaSeekable(false);
}
}
bool OggReader::ReadOggChain()
{
bool chained = false;
OpusState* newOpusState = nullptr;
VorbisState* newVorbisState = nullptr;
int channels = 0;
long rate = 0;
MetadataTags* tags = nullptr;
if (HasVideo() || HasSkeleton() || !HasAudio()) {
return false;
}
ogg_page page;
int64_t pageOffset = ReadOggPage(&page);
if ((pageOffset == -1) || (!ogg_page_bos(&page))) {
return false;
}
int serial = ogg_page_serialno(&page);
if (mCodecStates.Get(serial, nullptr)) {
return false;
}
nsAutoPtr<OggCodecState> codecState;
codecState = OggCodecState::Create(&page);
if (!codecState) {
return false;
}
if (mVorbisState && (codecState->GetType() == OggCodecState::TYPE_VORBIS)) {
newVorbisState = static_cast<VorbisState*>(codecState.get());
}
#ifdef MOZ_OPUS
else if (mOpusState && (codecState->GetType() == OggCodecState::TYPE_OPUS)) {
newOpusState = static_cast<OpusState*>(codecState.get());
}
#endif
else {
return false;
}
mCodecStates.Put(serial, codecState.forget());
mKnownStreams.AppendElement(serial);
OggCodecState* state = nullptr;
mCodecStates.Get(serial, &state);
NS_ENSURE_TRUE(state, false);
if (NS_FAILED(state->PageIn(&page))) {
return false;
}
if ((newVorbisState && ReadHeaders(newVorbisState)) &&
(mVorbisState->mInfo.rate == newVorbisState->mInfo.rate) &&
(mVorbisState->mInfo.channels == newVorbisState->mInfo.channels)) {
mVorbisState->Reset();
mVorbisState = newVorbisState;
mVorbisSerial = mVorbisState->mSerial;
LOG(PR_LOG_DEBUG, ("New vorbis ogg link, serial=%d\n", mVorbisSerial));
chained = true;
rate = mVorbisState->mInfo.rate;
channels = mVorbisState->mInfo.channels;
tags = mVorbisState->GetTags();
}
#ifdef MOZ_OPUS
if ((newOpusState && ReadHeaders(newOpusState)) &&
(mOpusState->mRate == newOpusState->mRate) &&
(mOpusState->mChannels == newOpusState->mChannels) &&
(mOpusState->mPreSkip == newOpusState->mPreSkip)) {
mOpusState->Reset();
mOpusState = newOpusState;
mOpusSerial = mOpusState->mSerial;
chained = true;
rate = mOpusState->mRate;
channels = mOpusState->mChannels;
tags = mOpusState->GetTags();
}
#endif
if (chained) {
SetChained(true);
{
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
mDecoder->QueueMetadata((mDecodedAudioFrames * USECS_PER_S) / rate,
channels,
rate,
HasAudio(),
tags);
}
return true;
}
return false;
}
nsresult OggReader::DecodeTheora(ogg_packet* aPacket, int64_t aTimeThreshold)
{
NS_ASSERTION(aPacket->granulepos >= TheoraVersion(&mTheoraState->mInfo,3,2,1),
@ -943,6 +1062,7 @@ int64_t OggReader::RangeEndTime(int64_t aStartOffset,
// This page is from a bitstream which we haven't encountered yet.
// It's probably from a new "link" in a "chained" ogg. Don't
// bother even trying to find a duration...
SetChained(true);
endTime = -1;
break;
}
@ -1207,6 +1327,8 @@ nsresult OggReader::Seek(int64_t aTarget,
int64_t aCurrentTime)
{
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
if (mIsChained)
return NS_ERROR_FAILURE;
LOG(PR_LOG_DEBUG, ("%p About to seek to %lld", mDecoder, aTarget));
nsresult res;
MediaResource* resource = mDecoder->GetResource();
@ -1603,6 +1725,11 @@ nsresult OggReader::SeekBisection(int64_t aTarget,
nsresult OggReader::GetBuffered(nsTimeRanges* aBuffered, int64_t aStartTime)
{
{
mozilla::ReentrantMonitorAutoEnter mon(mMonitor);
if (mIsChained)
return NS_ERROR_FAILURE;
}
#ifdef OGG_ESTIMATE_BUFFERED
MediaResource* stream = mDecoder->GetResource();
int64_t durationUs = 0;
@ -1698,6 +1825,7 @@ nsresult OggReader::GetBuffered(nsTimeRanges* aBuffered, int64_t aStartTime)
// ogg), return OK to abort the finding any further ranges. This
// prevents us searching through the rest of the media when we
// may not be able to extract timestamps from it.
SetChained(true);
return NS_OK;
}
}

View File

@ -51,12 +51,15 @@ public:
virtual nsresult Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime);
virtual nsresult GetBuffered(nsTimeRanges* aBuffered, int64_t aStartTime);
// We use bisection to seek in buffered range.
// We use bisection to seek in buffered range, but we don't allow seeking in a
// chained ogg file.
virtual bool IsSeekableInBufferedRanges() {
return true;
}
private:
// This monitor should be taken when reading or writing to mIsChained.
ReentrantMonitor mMonitor;
// Specialized Reset() method to signal if the seek is
// to the start of the stream.
@ -216,6 +219,13 @@ private:
// succeeds.
bool ReadHeaders(OggCodecState* aState);
// Reads the next link in the chain.
bool ReadOggChain();
// Set this media as being a chain and notifies the state machine that the
// media is no longer seekable.
void SetChained(bool aIsChained);
// Returns the next Ogg packet for an bitstream/codec state. Returns a
// pointer to an ogg_packet on success, or nullptr if the read failed.
// The caller is responsible for deleting the packet and its |packet| field.
@ -274,6 +284,13 @@ private:
// The picture region inside Theora frame to be displayed, if we have
// a Theora video track.
nsIntRect mPicture;
// True if we are decoding a chained ogg. Reading or writing to this member
// should be done with |mMonitor| acquired.
bool mIsChained;
// Number of audio frames decoded so far.
int64_t mDecodedAudioFrames;
};
} // namespace mozilla