Bug 1291629 - Remove RTSP code. r=jwwang

MozReview-Commit-ID: AxaLwO4rTuY

--HG--
extra : rebase_source : f8a5de1be1471238d62b6bbc419b3a45f2590da6
This commit is contained in:
bechen 2016-08-10 10:32:25 +08:00
parent 41f64b0230
commit 931402dd2f
20 changed files with 1 additions and 1868 deletions

View File

@ -2617,20 +2617,6 @@ HTMLMediaElement::NotifyXPCOMShutdown()
ShutdownDecoder();
}
void
HTMLMediaElement::ResetConnectionState()
{
SetCurrentTime(0);
FireTimeUpdate(false);
DispatchAsyncEvent(NS_LITERAL_STRING("ended"));
ChangeNetworkState(nsIDOMHTMLMediaElement::NETWORK_EMPTY);
ChangeDelayLoadStatus(false);
ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_NOTHING);
if (mDecoder) {
ShutdownDecoder();
}
}
void
HTMLMediaElement::Play(ErrorResult& aRv)
{
@ -3378,19 +3364,7 @@ nsresult HTMLMediaElement::InitializeDecoderForChannel(nsIChannel* aChannel,
mChannelLoader = nullptr;
}
// We postpone the |FinishDecoderSetup| function call until we get
// |OnConnected| signal from MediaStreamController which is held by
// RtspMediaResource.
if (DecoderTraits::DecoderWaitsForOnConnected(mimeType)) {
decoder->SetResource(resource);
SetDecoder(decoder);
if (aListener) {
*aListener = nullptr;
}
return NS_OK;
} else {
return FinishDecoderSetup(decoder, resource, aListener);
}
return FinishDecoderSetup(decoder, resource, aListener);
}
nsresult HTMLMediaElement::FinishDecoderSetup(MediaDecoder* aDecoder,

View File

@ -439,10 +439,6 @@ public:
return mNetworkState;
}
// Called by the media decoder object, on the main thread,
// when the connection between Rtsp server and client gets lost.
virtual void ResetConnectionState() final override;
void NotifyXPCOMShutdown() final override;
// Called by media decoder when the audible state changed or when input is
@ -719,13 +715,6 @@ public:
// A method to check whether we are currently playing.
bool IsCurrentlyPlaying() const;
/**
* A public wrapper for FinishDecoderSetup()
*/
nsresult FinishDecoderSetup(MediaDecoder* aDecoder, MediaResource* aStream) {
return FinishDecoderSetup(aDecoder, aStream, nullptr);
}
// Returns true if the media element is being destroyed. Used in
// dormancy checks to prevent dormant processing for an element
// that will soon be gone.

View File

@ -33,10 +33,6 @@
#include "nsIPrincipal.h"
#include "mozilla/dom/HTMLMediaElement.h"
#endif
#ifdef NECKO_PROTOCOL_rtsp
#include "RtspOmxDecoder.h"
#include "RtspOmxReader.h"
#endif
#ifdef MOZ_DIRECTSHOW
#include "DirectShowDecoder.h"
#include "DirectShowReader.h"
@ -273,29 +269,6 @@ static char const *const gOMXWebMCodecs[] = {
#endif
#ifdef NECKO_PROTOCOL_rtsp
static const char* const gRtspTypes[2] = {
"RTSP",
nullptr
};
static bool
IsRtspSupportedType(const nsACString& aMimeType)
{
return MediaDecoder::IsRtspEnabled() &&
CodecListContains(gRtspTypes, aMimeType);
}
#endif
/* static */
bool DecoderTraits::DecoderWaitsForOnConnected(const nsACString& aMimeType) {
#ifdef NECKO_PROTOCOL_rtsp
return CodecListContains(gRtspTypes, aMimeType);
#else
return false;
#endif
}
#ifdef MOZ_ANDROID_OMX
static bool
IsAndroidMediaType(const nsACString& aType)
@ -547,11 +520,6 @@ DecoderTraits::CanHandleMediaType(const char* aMIMEType,
EnsureAndroidMediaPluginHost()->FindDecoder(nsDependentCString(aMIMEType), nullptr)) {
return CANPLAY_MAYBE;
}
#endif
#ifdef NECKO_PROTOCOL_rtsp
if (IsRtspSupportedType(nsDependentCString(aMIMEType))) {
return CANPLAY_MAYBE;
}
#endif
return CANPLAY_NO;
}
@ -615,12 +583,6 @@ InstantiateDecoder(const nsACString& aType,
return decoder.forget();
}
#endif
#ifdef NECKO_PROTOCOL_rtsp
if (IsRtspSupportedType(aType)) {
decoder = new RtspOmxDecoder(aOwner);
return decoder.forget();
}
#endif
#ifdef MOZ_ANDROID_OMX
if (MediaDecoder::IsAndroidMediaPluginEnabled() &&
EnsureAndroidMediaPluginHost()->FindDecoder(aType, nullptr)) {
@ -753,9 +715,6 @@ bool DecoderTraits::IsSupportedInVideoDocument(const nsACString& aType)
IsAACSupportedType(aType) ||
#ifdef MOZ_DIRECTSHOW
IsDirectShowSupportedType(aType) ||
#endif
#ifdef NECKO_PROTOCOL_rtsp
IsRtspSupportedType(aType) ||
#endif
false;
}

View File

@ -71,10 +71,6 @@ public:
// vice versa.
static bool IsSupportedInVideoDocument(const nsACString& aType);
// Returns true if we should not start decoder until we receive
// OnConnected signal. (currently RTSP only)
static bool DecoderWaitsForOnConnected(const nsACString& aType);
static bool IsWebMTypeAndEnabled(const nsACString& aType);
static bool IsWebMAudioType(const nsACString& aType);
static bool IsMP4TypeAndEnabled(const nsACString& aType,

View File

@ -183,22 +183,6 @@ MediaDecoder::ResourceCallback::SetMediaSeekable(bool aMediaSeekable)
}
}
void
MediaDecoder::ResourceCallback::ResetConnectionState()
{
MOZ_ASSERT(NS_IsMainThread());
if (mDecoder) {
mDecoder->ResetConnectionState();
}
}
nsresult
MediaDecoder::ResourceCallback::FinishDecoderSetup(MediaResource* aResource)
{
MOZ_ASSERT(NS_IsMainThread());
return mDecoder ? mDecoder->FinishDecoderSetup(aResource) : NS_ERROR_FAILURE;
}
void
MediaDecoder::ResourceCallback::NotifyNetworkError()
{
@ -1010,26 +994,6 @@ MediaDecoder::FirstFrameLoaded(nsAutoPtr<MediaInfo> aInfo,
NotifySuspendedStatusChanged();
}
nsresult
MediaDecoder::FinishDecoderSetup(MediaResource* aResource)
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(!IsShutdown());
HTMLMediaElement* element = mOwner->GetMediaElement();
NS_ENSURE_TRUE(element, NS_ERROR_FAILURE);
element->FinishDecoderSetup(this, aResource);
return NS_OK;
}
void
MediaDecoder::ResetConnectionState()
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(!IsShutdown());
mOwner->ResetConnectionState();
MOZ_ASSERT(IsShutdown());
}
void
MediaDecoder::NetworkError()
{
@ -1739,15 +1703,6 @@ MediaDecoder::IsWebMEnabled()
return Preferences::GetBool("media.webm.enabled");
}
#ifdef NECKO_PROTOCOL_rtsp
bool
MediaDecoder::IsRtspEnabled()
{
//Currently the Rtsp decoded by omx.
return (Preferences::GetBool("media.rtsp.enabled", false) && IsOmxEnabled());
}
#endif
#ifdef MOZ_OMX_DECODER
bool
MediaDecoder::IsOmxEnabled()

View File

@ -85,8 +85,6 @@ public:
MediaDecoderOwner* GetMediaOwner() const override;
void SetInfinite(bool aInfinite) override;
void SetMediaSeekable(bool aMediaSeekable) override;
void ResetConnectionState() override;
nsresult FinishDecoderSetup(MediaResource* aResource) override;
void NotifyNetworkError() override;
void NotifyDecodeError() override;
void NotifyDataArrived() override;
@ -462,10 +460,6 @@ private:
static bool IsWaveEnabled();
static bool IsWebMEnabled();
#ifdef NECKO_PROTOCOL_rtsp
static bool IsRtspEnabled();
#endif
#ifdef MOZ_OMX_DECODER
static bool IsOmxEnabled();
#endif
@ -887,12 +881,6 @@ private:
// no longer considered to be infinite.
void SetInfinite(bool aInfinite);
// Reset the decoder and notify the media element that
// server connection is closed.
void ResetConnectionState();
nsresult FinishDecoderSetup(MediaResource* aResource);
// Called by MediaResource when the principal of the resource has
// changed. Called on main thread only.
void NotifyPrincipalChanged();

View File

@ -134,12 +134,6 @@ public:
// ImageContainer containing the video data.
virtual VideoFrameContainer* GetVideoFrameContainer() = 0;
// Called by the decoder object, on the main thread,
// when the connection between Rtsp server and client gets lost.
// The decoder owner should call Shutdown() on the decoder and drop the
// reference to the decoder to prevent further calls into the decoder.
virtual void ResetConnectionState() = 0;
// Called by media decoder when the audible state changed
virtual void SetAudibleState(bool aAudible) = 0;

View File

@ -8,7 +8,6 @@
#include "MediaResource.h"
#include "MediaResourceCallback.h"
#include "RtspMediaResource.h"
#include "mozilla/Mutex.h"
#include "nsDebug.h"
@ -1506,8 +1505,6 @@ MediaResource::Create(MediaResourceCallback* aCallback, nsIChannel* aChannel)
RefPtr<MediaResource> resource;
if (fc || IsBlobURI(uri)) {
resource = new FileMediaResource(aCallback, aChannel, uri, contentType);
} else if (IsRtspURI(uri)) {
resource = new RtspMediaResource(aCallback, aChannel, uri, contentType);
} else {
resource = new ChannelMediaResource(aCallback, aChannel, uri, contentType);
}

View File

@ -140,8 +140,6 @@ private:
typedef media::Interval<int64_t> MediaByteRange;
typedef media::IntervalSet<int64_t> MediaByteRangeSet;
class RtspMediaResource;
/**
* Provides a thread-safe, seek/read interface to resources
* loaded from a URI. Uses MediaCache to cache data received over
@ -344,12 +342,6 @@ public:
// any thread.
virtual const nsCString& GetContentType() const = 0;
// Get the RtspMediaResource pointer if this MediaResource really is a
// RtspMediaResource. For calling Rtsp specific functions.
virtual RtspMediaResource* GetRtspPointer() {
return nullptr;
}
// Return true if the stream is a live stream
virtual bool IsRealTime() {
return false;

View File

@ -37,15 +37,6 @@ public:
// Notify if seeking is supported by this MediaResource.
virtual void SetMediaSeekable(bool aMediaSeekable) {}
// Notify that server connection is closed.
virtual void ResetConnectionState() {}
// Used by RtspMediaResource which has an unusual sequence
// to setup the decoder.
virtual nsresult FinishDecoderSetup(MediaResource* aResource) {
return NS_OK;
}
// Notify that a network error is encountered.
virtual void NotifyNetworkError() {}

View File

@ -1,888 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "mozilla/DebugOnly.h"
#include "RtspMediaResource.h"
#include "MediaDecoder.h"
#include "mozilla/dom/HTMLMediaElement.h"
#include "mozilla/Monitor.h"
#include "mozilla/Preferences.h"
#include "mozilla/UniquePtr.h"
#include "nsContentUtils.h"
#include "nsIScriptSecurityManager.h"
#include "nsIStreamingProtocolService.h"
#include "nsServiceManagerUtils.h"
#ifdef NECKO_PROTOCOL_rtsp
#include "mozilla/net/RtspChannelChild.h"
#endif
using namespace mozilla::net;
using namespace mozilla::media;
mozilla::LazyLogModule gRtspMediaResourceLog("RtspMediaResource");
#define RTSP_LOG(msg, ...) MOZ_LOG(gRtspMediaResourceLog, mozilla::LogLevel::Debug, \
(msg, ##__VA_ARGS__))
// Debug logging macro with object pointer and class name.
#define RTSPMLOG(msg, ...) \
RTSP_LOG("%p [RtspMediaResource]: " msg, this, ##__VA_ARGS__)
namespace mozilla {
/* class RtspTrackBuffer: a ring buffer implementation for audio/video track
* un-decoded data.
* The ring buffer is divided into BUFFER_SLOT_NUM slots,
* and each slot's size is fixed(mSlotSize).
* Even though the ring buffer is divided into fixed size slots, it still can
* store the data which size is larger than one slot size.
* */
#define BUFFER_SLOT_NUM 8192
#define BUFFER_SLOT_DEFAULT_SIZE 256
#define BUFFER_SLOT_MAX_SIZE 512
#define BUFFER_SLOT_INVALID -1
#define BUFFER_SLOT_EMPTY 0
struct BufferSlotData {
int32_t mLength;
uint64_t mTime;
int32_t mFrameType;
};
// This constant is used to determine if the buffer usage is over a threshold.
const float kBufferThresholdPerc = 0.8f;
// The default value of playout delay duration.
const uint32_t kPlayoutDelayMs = 3000;
//-----------------------------------------------------------------------------
// RtspTrackBuffer
//-----------------------------------------------------------------------------
class RtspTrackBuffer
{
public:
RtspTrackBuffer(const char *aMonitor, int32_t aTrackIdx, uint32_t aSlotSize)
: mMonitor(aMonitor)
, mSlotSize(aSlotSize)
, mTotalBufferSize(BUFFER_SLOT_NUM * mSlotSize)
, mFrameType(0)
, mIsStarted(false)
, mDuringPlayoutDelay(false)
, mPlayoutDelayMs(kPlayoutDelayMs)
, mPlayoutDelayTimer(nullptr) {
MOZ_COUNT_CTOR(RtspTrackBuffer);
mTrackIdx = aTrackIdx;
MOZ_ASSERT(mSlotSize < UINT32_MAX / BUFFER_SLOT_NUM);
mRingBuffer = MakeUnique<uint8_t[]>(mTotalBufferSize);
Reset();
};
~RtspTrackBuffer() {
MOZ_COUNT_DTOR(RtspTrackBuffer);
mRingBuffer = nullptr;
};
size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
// including this
size_t size = aMallocSizeOf(this);
// excluding this
size += aMallocSizeOf(mRingBuffer.get());
return size;
}
void Start() {
MonitorAutoLock monitor(mMonitor);
mIsStarted = true;
mFrameType = 0;
}
void Stop() {
MonitorAutoLock monitor(mMonitor);
mIsStarted = false;
StopPlayoutDelay();
}
// Read the data from mRingBuffer[mConsumerIdx*mSlotSize] into aToBuffer.
// If the aToBufferSize is smaller than mBufferSlotDataLength[mConsumerIdx],
// early return and set the aFrameSize to notify the reader the aToBuffer
// doesn't have enough space. The reader must realloc the aToBuffer if it
// wishes to read the data.
nsresult ReadBuffer(uint8_t* aToBuffer, uint32_t aToBufferSize,
uint32_t& aReadCount, uint64_t& aFrameTime,
uint32_t& aFrameSize);
// Write the data from aFromBuffer into mRingBuffer[mProducerIdx*mSlotSize].
void WriteBuffer(const char *aFromBuffer, uint32_t aWriteCount,
uint64_t aFrameTime, uint32_t aFrameType);
// Reset the mProducerIdx, mConsumerIdx, mBufferSlotDataLength[],
// mBufferSlotDataTime[].
void Reset();
// We should call SetFrameType first then reset().
// If we call reset() first, the queue may still has some "garbage" frame
// from another thread's |OnMediaDataAvailable| before |SetFrameType|.
void ResetWithFrameType(uint32_t aFrameType) {
SetFrameType(aFrameType);
Reset();
}
// When RtspTrackBuffer is in playout delay duration, it should suspend
// reading data from the buffer until the playout-delay-ended event occurs,
// which wil be trigger by mPlayoutDelayTimer.
void StartPlayoutDelay() {
mDuringPlayoutDelay = true;
}
void LockStartPlayoutDelay() {
MonitorAutoLock monitor(mMonitor);
StartPlayoutDelay();
}
// If the playout delay is stopped, mPlayoutDelayTimer should be canceled.
void StopPlayoutDelay() {
if (mPlayoutDelayTimer) {
mPlayoutDelayTimer->Cancel();
mPlayoutDelayTimer = nullptr;
}
mDuringPlayoutDelay = false;
}
void LockStopPlayoutDelay() {
MonitorAutoLock monitor(mMonitor);
StopPlayoutDelay();
}
bool IsBufferOverThreshold();
void CreatePlayoutDelayTimer(unsigned long delayMs);
static void PlayoutDelayTimerCallback(nsITimer *aTimer, void *aClosure);
private:
// The FrameType is sync to nsIStreamingProtocolController.h
void SetFrameType(uint32_t aFrameType) {
MonitorAutoLock monitor(mMonitor);
mFrameType = mFrameType | aFrameType;
}
// A monitor lock to prevent racing condition.
Monitor mMonitor;
// Indicate the track number for Rtsp.
int32_t mTrackIdx;
// mProducerIdx: A slot index that we store data from
// nsIStreamingProtocolController.
// mConsumerIdx: A slot index that we read when decoder need(from OMX decoder).
int32_t mProducerIdx;
int32_t mConsumerIdx;
// Because each slot's size is fixed, we need an array to record the real
// data length and data time stamp.
// The value in mBufferSlotData[index].mLength represents:
// -1(BUFFER_SLOT_INVALID): The index of slot data is invalid, mConsumerIdx
// should go forward.
// 0(BUFFER_SLOT_EMPTY): The index slot is empty. mConsumerIdx should wait here.
// positive value: The index slot contains valid data and the value is data size.
BufferSlotData mBufferSlotData[BUFFER_SLOT_NUM];
// The ring buffer pointer.
UniquePtr<uint8_t[]> mRingBuffer;
// Each slot's size.
uint32_t mSlotSize;
// Total mRingBuffer's total size.
uint32_t mTotalBufferSize;
// A flag that that indicate the incoming data should be dropped or stored.
// When we are seeking, the incoming data should be dropped.
// Bit definition in |nsIStreamingProtocolController.h|
uint32_t mFrameType;
// Set true/false when |Start()/Stop()| is called.
bool mIsStarted;
// Indicate the buffer is in playout delay duration or not.
bool mDuringPlayoutDelay;
// Playout delay duration defined in milliseconds.
uint32_t mPlayoutDelayMs;
// Timer used to fire playout-delay-ended event.
nsCOMPtr<nsITimer> mPlayoutDelayTimer;
};
nsresult RtspTrackBuffer::ReadBuffer(uint8_t* aToBuffer, uint32_t aToBufferSize,
uint32_t& aReadCount, uint64_t& aFrameTime,
uint32_t& aFrameSize)
{
MonitorAutoLock monitor(mMonitor);
RTSPMLOG("ReadBuffer mTrackIdx %d mProducerIdx %d mConsumerIdx %d "
"mBufferSlotData[mConsumerIdx].mLength %d"
,mTrackIdx ,mProducerIdx ,mConsumerIdx
,mBufferSlotData[mConsumerIdx].mLength);
// Reader should skip the slots with mLength==BUFFER_SLOT_INVALID.
// The loop ends when
// 1. Read data successfully
// 2. Fail to read data due to aToBuffer's space
// 3. No data in this buffer
// 4. mIsStarted is not set
while (1) {
// Make sure the track buffer is started.
// It could be stopped when RTSP connection is disconnected.
if (!mIsStarted) {
RTSPMLOG("ReadBuffer: mIsStarted is false");
return NS_ERROR_FAILURE;
}
// Do not read from buffer if we are still in the playout delay duration.
if (mDuringPlayoutDelay) {
monitor.Wait();
continue;
}
if (mBufferSlotData[mConsumerIdx].mFrameType & MEDIASTREAM_FRAMETYPE_END_OF_STREAM) {
return NS_BASE_STREAM_CLOSED;
}
if (mBufferSlotData[mConsumerIdx].mLength > 0) {
// Check the aToBuffer space is enough for data copy.
if ((int32_t)aToBufferSize < mBufferSlotData[mConsumerIdx].mLength) {
aFrameSize = mBufferSlotData[mConsumerIdx].mLength;
break;
}
uint32_t slots = mBufferSlotData[mConsumerIdx].mLength / mSlotSize;
if (mBufferSlotData[mConsumerIdx].mLength % mSlotSize > 0) {
slots++;
}
// we have data, copy to aToBuffer
MOZ_ASSERT(mBufferSlotData[mConsumerIdx].mLength <=
(int32_t)((BUFFER_SLOT_NUM - mConsumerIdx) * mSlotSize));
memcpy(aToBuffer,
(void *)(&mRingBuffer[mSlotSize * mConsumerIdx]),
mBufferSlotData[mConsumerIdx].mLength);
aFrameSize = aReadCount = mBufferSlotData[mConsumerIdx].mLength;
aFrameTime = mBufferSlotData[mConsumerIdx].mTime;
RTSPMLOG("DataLength %d, data time %lld"
,mBufferSlotData[mConsumerIdx].mLength
,mBufferSlotData[mConsumerIdx].mTime);
// After reading the data, we set current index of mBufferSlotDataLength
// to BUFFER_SLOT_EMPTY to indicate these slots are free.
for (uint32_t i = mConsumerIdx; i < mConsumerIdx + slots; ++i) {
mBufferSlotData[i].mLength = BUFFER_SLOT_EMPTY;
mBufferSlotData[i].mTime = BUFFER_SLOT_EMPTY;
}
mConsumerIdx = (mConsumerIdx + slots) % BUFFER_SLOT_NUM;
break;
} else if (mBufferSlotData[mConsumerIdx].mLength == BUFFER_SLOT_INVALID) {
mConsumerIdx = (mConsumerIdx + 1) % BUFFER_SLOT_NUM;
RTSPMLOG("BUFFER_SLOT_INVALID move forward");
} else {
// No data, the decode thread is blocked here until we receive
// OnMediaDataAvailable. The OnMediaDataAvailable will call WriteBuffer()
// to wake up the decode thread.
RTSPMLOG("monitor.Wait()");
monitor.Wait();
}
}
return NS_OK;
}
/* When we perform a WriteBuffer, we check mIsStarted and aFrameType first.
* These flags prevent "garbage" frames from being written into the buffer.
*
* After writing the data into the buffer, we check to see if we wrote over a
* slot, and update mConsumerIdx if necessary.
* This ensures that the decoder will get the "oldest" data available in the
* buffer.
*
* If the incoming data is larger than one slot size (isMultipleSlots), we do
* |mBufferSlotData[].mLength = BUFFER_SLOT_INVALID;| for other slots except the
* first slot, in order to notify the reader that some slots are unavailable.
*
* If the incoming data is isMultipleSlots and crosses the end of
* BUFFER_SLOT_NUM, returnToHead is set to true and the data will continue to
* be written from head(index 0).
*
* MEDIASTREAM_FRAMETYPE_DISCONTINUITY currently is used when we are seeking.
* */
void RtspTrackBuffer::WriteBuffer(const char *aFromBuffer, uint32_t aWriteCount,
uint64_t aFrameTime, uint32_t aFrameType)
{
MonitorAutoLock monitor(mMonitor);
if (!mIsStarted) {
RTSPMLOG("mIsStarted is false");
return;
}
if (mTotalBufferSize < aWriteCount) {
RTSPMLOG("mTotalBufferSize < aWriteCount, incoming data is too large");
return;
}
// Checking the incoming data's frame type.
// If we receive MEDIASTREAM_FRAMETYPE_DISCONTINUITY, clear the mFrameType
// imply the RtspTrackBuffer is ready for receive data.
if (aFrameType & MEDIASTREAM_FRAMETYPE_DISCONTINUITY) {
mFrameType = mFrameType & (~MEDIASTREAM_FRAMETYPE_DISCONTINUITY);
RTSPMLOG("Clear mFrameType");
return;
}
// Checking current buffer frame type.
// If the MEDIASTREAM_FRAMETYPE_DISCONTINUNITY bit is set, imply the
// RtspTrackBuffer can't receive data now. So we drop the frame until we
// receive MEDIASTREAM_FRAMETYPE_DISCONTINUNITY.
if (mFrameType & MEDIASTREAM_FRAMETYPE_DISCONTINUITY) {
RTSPMLOG("Return because the mFrameType is set");
return;
}
// Create a timer to delay ReadBuffer() for a duration.
if (mDuringPlayoutDelay && !mPlayoutDelayTimer) {
CreatePlayoutDelayTimer(mPlayoutDelayMs);
}
// The flag is true if the incoming data is larger than one slot size.
bool isMultipleSlots = false;
// The flag is true if the incoming data is larger than remainder free slots
bool returnToHead = false;
// Calculate how many slots the incoming data needed.
int32_t slots = aWriteCount / mSlotSize;
if (aWriteCount % mSlotSize > 0) {
slots++;
}
int32_t i;
RTSPMLOG("WriteBuffer mTrackIdx %d mProducerIdx %d mConsumerIdx %d",
mTrackIdx, mProducerIdx,mConsumerIdx);
if (aWriteCount > mSlotSize) {
isMultipleSlots = true;
}
if (isMultipleSlots &&
(aWriteCount > (BUFFER_SLOT_NUM - mProducerIdx) * mSlotSize)) {
returnToHead = true;
}
RTSPMLOG("slots %d isMultipleSlots %d returnToHead %d",
slots, isMultipleSlots, returnToHead);
if (returnToHead) {
// Clear the rest index of mBufferSlotData[].mLength
for (i = mProducerIdx; i < BUFFER_SLOT_NUM; ++i) {
mBufferSlotData[i].mLength = BUFFER_SLOT_INVALID;
}
// We wrote one or more slots that the decode thread has not yet read.
// So the mConsumerIdx returns to the head of slot buffer and moves forward
// to the oldest slot.
if (mProducerIdx <= mConsumerIdx && mConsumerIdx < mProducerIdx + slots) {
mConsumerIdx = 0;
for (i = mConsumerIdx; i < BUFFER_SLOT_NUM; ++i) {
if (mBufferSlotData[i].mLength > 0) {
mConsumerIdx = i;
break;
}
}
}
mProducerIdx = 0;
}
if (!(aFrameType & MEDIASTREAM_FRAMETYPE_END_OF_STREAM)) {
memcpy(&(mRingBuffer[mSlotSize * mProducerIdx]), aFromBuffer, aWriteCount);
}
// If the buffer is almost full, stop the playout delay to let ReadBuffer()
// consume data in the buffer.
if (mDuringPlayoutDelay && IsBufferOverThreshold()) {
StopPlayoutDelay();
}
if (mProducerIdx <= mConsumerIdx && mConsumerIdx < mProducerIdx + slots
&& mBufferSlotData[mConsumerIdx].mLength > 0) {
// Wrote one or more slots that the decode thread has not yet read.
RTSPMLOG("overwrite!! %d time %lld"
,mTrackIdx,mBufferSlotData[mConsumerIdx].mTime);
if (aFrameType & MEDIASTREAM_FRAMETYPE_END_OF_STREAM) {
mBufferSlotData[mProducerIdx].mLength = 0;
mBufferSlotData[mProducerIdx].mTime = 0;
StopPlayoutDelay();
} else {
mBufferSlotData[mProducerIdx].mLength = aWriteCount;
mBufferSlotData[mProducerIdx].mTime = aFrameTime;
}
mBufferSlotData[mProducerIdx].mFrameType = aFrameType;
// Clear the mBufferSlotDataLength except the start slot.
if (isMultipleSlots) {
for (i = mProducerIdx + 1; i < mProducerIdx + slots; ++i) {
mBufferSlotData[i].mLength = BUFFER_SLOT_INVALID;
}
}
mProducerIdx = (mProducerIdx + slots) % BUFFER_SLOT_NUM;
// Move the mConsumerIdx forward to ensure that the decoder reads the
// oldest data available.
mConsumerIdx = mProducerIdx;
} else {
// Normal case, the writer doesn't take over the reader.
if (aFrameType & MEDIASTREAM_FRAMETYPE_END_OF_STREAM) {
mBufferSlotData[mProducerIdx].mLength = 0;
mBufferSlotData[mProducerIdx].mTime = 0;
StopPlayoutDelay();
} else {
mBufferSlotData[mProducerIdx].mLength = aWriteCount;
mBufferSlotData[mProducerIdx].mTime = aFrameTime;
}
mBufferSlotData[mProducerIdx].mFrameType = aFrameType;
// Clear the mBufferSlotData[].mLength except the start slot.
if (isMultipleSlots) {
for (i = mProducerIdx + 1; i < mProducerIdx + slots; ++i) {
mBufferSlotData[i].mLength = BUFFER_SLOT_INVALID;
}
}
mProducerIdx = (mProducerIdx + slots) % BUFFER_SLOT_NUM;
}
mMonitor.NotifyAll();
}
void RtspTrackBuffer::Reset() {
MonitorAutoLock monitor(mMonitor);
mProducerIdx = 0;
mConsumerIdx = 0;
for (uint32_t i = 0; i < BUFFER_SLOT_NUM; ++i) {
mBufferSlotData[i].mLength = BUFFER_SLOT_EMPTY;
mBufferSlotData[i].mTime = BUFFER_SLOT_EMPTY;
mBufferSlotData[i].mFrameType = MEDIASTREAM_FRAMETYPE_NORMAL;
}
StopPlayoutDelay();
mMonitor.NotifyAll();
}
bool
RtspTrackBuffer::IsBufferOverThreshold()
{
static int32_t numSlotsThreshold =
BUFFER_SLOT_NUM * kBufferThresholdPerc;
int32_t numSlotsUsed = mProducerIdx - mConsumerIdx;
if (numSlotsUsed < 0) { // wrap-around
numSlotsUsed = (BUFFER_SLOT_NUM - mConsumerIdx) + mProducerIdx;
}
if (numSlotsUsed > numSlotsThreshold) {
return true;
}
return false;
}
void
RtspTrackBuffer::CreatePlayoutDelayTimer(unsigned long delayMs)
{
if (delayMs <= 0) {
return;
}
mPlayoutDelayTimer = do_CreateInstance("@mozilla.org/timer;1");
if (mPlayoutDelayTimer) {
mPlayoutDelayTimer->InitWithFuncCallback(PlayoutDelayTimerCallback,
this, delayMs,
nsITimer::TYPE_ONE_SHOT);
}
}
// static
void
RtspTrackBuffer::PlayoutDelayTimerCallback(nsITimer *aTimer,
void *aClosure)
{
MOZ_ASSERT(aTimer);
MOZ_ASSERT(aClosure);
RtspTrackBuffer *self = static_cast<RtspTrackBuffer*>(aClosure);
MonitorAutoLock lock(self->mMonitor);
self->StopPlayoutDelay();
lock.NotifyAll();
}
//-----------------------------------------------------------------------------
// RtspMediaResource
//-----------------------------------------------------------------------------
RtspMediaResource::RtspMediaResource(MediaResourceCallback* aCallback,
nsIChannel* aChannel, nsIURI* aURI, const nsACString& aContentType)
: BaseMediaResource(aCallback, aChannel, aURI, aContentType)
, mIsConnected(false)
, mIsLiveStream(false)
, mHasTimestamp(true)
, mIsSuspend(true)
{
#ifndef NECKO_PROTOCOL_rtsp
MOZ_CRASH("Should not be called except for B2G platform");
#else
MOZ_ASSERT(aChannel);
mMediaStreamController =
static_cast<RtspChannelChild*>(aChannel)->GetController();
MOZ_ASSERT(mMediaStreamController);
mListener = new Listener(this);
mMediaStreamController->AsyncOpen(mListener);
#endif
}
RtspMediaResource::~RtspMediaResource()
{
RTSPMLOG("~RtspMediaResource");
if (mListener) {
// Kill its reference to us since we're going away
mListener->Revoke();
}
}
void RtspMediaResource::SetSuspend(bool aIsSuspend)
{
NS_ASSERTION(!NS_IsMainThread(), "Don't call on main thread");
RTSPMLOG("SetSuspend %d",aIsSuspend);
nsCOMPtr<nsIRunnable> runnable =
NewRunnableMethod<bool>(this, &RtspMediaResource::NotifySuspend,
aIsSuspend);
NS_DispatchToMainThread(runnable);
}
void RtspMediaResource::NotifySuspend(bool aIsSuspend)
{
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
RTSPMLOG("NotifySuspend %d",aIsSuspend);
mIsSuspend = aIsSuspend;
if (mCallback) {
mCallback->NotifySuspendedStatusChanged();
}
}
size_t
RtspMediaResource::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
{
size_t size = BaseMediaResource::SizeOfExcludingThis(aMallocSizeOf);
size += mTrackBuffer.ShallowSizeOfExcludingThis(aMallocSizeOf);
// Include the size of each track buffer.
for (size_t i = 0; i < mTrackBuffer.Length(); i++) {
size += mTrackBuffer[i]->SizeOfIncludingThis(aMallocSizeOf);
}
// Could add in the future:
// - mMediaStreamController
return size;
}
//----------------------------------------------------------------------------
// RtspMediaResource::Listener
//----------------------------------------------------------------------------
NS_IMPL_ISUPPORTS(RtspMediaResource::Listener,
nsIInterfaceRequestor, nsIStreamingProtocolListener);
nsresult
RtspMediaResource::Listener::OnMediaDataAvailable(uint8_t aTrackIdx,
const nsACString &data,
uint32_t length,
uint32_t offset,
nsIStreamingProtocolMetaData *meta)
{
if (!mResource)
return NS_OK;
return mResource->OnMediaDataAvailable(aTrackIdx, data, length, offset, meta);
}
nsresult
RtspMediaResource::Listener::OnConnected(uint8_t aTrackIdx,
nsIStreamingProtocolMetaData *meta)
{
if (!mResource)
return NS_OK;
return mResource->OnConnected(aTrackIdx, meta);
}
nsresult
RtspMediaResource::Listener::OnDisconnected(uint8_t aTrackIdx, nsresult reason)
{
if (!mResource)
return NS_OK;
return mResource->OnDisconnected(aTrackIdx, reason);
}
nsresult
RtspMediaResource::Listener::GetInterface(const nsIID & aIID, void **aResult)
{
return QueryInterface(aIID, aResult);
}
void
RtspMediaResource::Listener::Revoke()
{
NS_ASSERTION(NS_IsMainThread(), "Don't call on non-main thread");
if (mResource) {
mResource = nullptr;
}
}
nsresult
RtspMediaResource::ReadFrameFromTrack(uint8_t* aBuffer, uint32_t aBufferSize,
uint32_t aTrackIdx, uint32_t& aBytes,
uint64_t& aTime, uint32_t& aFrameSize)
{
NS_ASSERTION(!NS_IsMainThread(), "Don't call on main thread");
NS_ASSERTION(aTrackIdx < mTrackBuffer.Length(),
"ReadTrack index > mTrackBuffer");
MOZ_ASSERT(aBuffer);
if (!mIsConnected) {
RTSPMLOG("ReadFrameFromTrack: RTSP not connected");
return NS_ERROR_FAILURE;
}
return mTrackBuffer[aTrackIdx]->ReadBuffer(aBuffer, aBufferSize, aBytes,
aTime, aFrameSize);
}
nsresult
RtspMediaResource::OnMediaDataAvailable(uint8_t aTrackIdx,
const nsACString &data,
uint32_t length,
uint32_t offset,
nsIStreamingProtocolMetaData *meta)
{
uint64_t time;
uint32_t frameType;
meta->GetTimeStamp(&time);
meta->GetFrameType(&frameType);
mTrackBuffer[aTrackIdx]->WriteBuffer(data.BeginReading(), length, time,
frameType);
return NS_OK;
}
// Bug 962309 - Video RTSP support should be disabled in 1.3
bool
RtspMediaResource::IsVideoEnabled()
{
return Preferences::GetBool("media.rtsp.video.enabled", false);
}
bool
RtspMediaResource::IsVideo(uint8_t tracks, nsIStreamingProtocolMetaData *meta)
{
bool isVideo = false;
for (int i = 0; i < tracks; ++i) {
nsCOMPtr<nsIStreamingProtocolMetaData> trackMeta;
mMediaStreamController->GetTrackMetaData(i, getter_AddRefs(trackMeta));
MOZ_ASSERT(trackMeta);
uint32_t w = 0, h = 0;
trackMeta->GetWidth(&w);
trackMeta->GetHeight(&h);
if (w > 0 || h > 0) {
isVideo = true;
break;
}
}
return isVideo;
}
nsresult
RtspMediaResource::OnConnected(uint8_t aTrackIdx,
nsIStreamingProtocolMetaData *meta)
{
if (mIsConnected) {
for (uint32_t i = 0 ; i < mTrackBuffer.Length(); ++i) {
mTrackBuffer[i]->Start();
}
return NS_OK;
}
uint8_t tracks;
mMediaStreamController->GetTotalTracks(&tracks);
// If the preference of RTSP video feature is not enabled and the streaming is
// video, we give up moving forward.
if (!IsVideoEnabled() && IsVideo(tracks, meta)) {
// Give up, report error to media element.
mCallback->NotifyDecodeError();
return NS_ERROR_FAILURE;
}
uint64_t durationUs = 0;
for (int i = 0; i < tracks; ++i) {
nsCString rtspTrackId("RtspTrack");
rtspTrackId.AppendInt(i);
nsCOMPtr<nsIStreamingProtocolMetaData> trackMeta;
mMediaStreamController->GetTrackMetaData(i, getter_AddRefs(trackMeta));
MOZ_ASSERT(trackMeta);
trackMeta->GetDuration(&durationUs);
// Here is a heuristic to estimate the slot size.
// For video track, calculate the width*height.
// For audio track, use the BUFFER_SLOT_DEFAULT_SIZE because the w*h is 0.
// Finally clamp them into (BUFFER_SLOT_DEFAULT_SIZE,BUFFER_SLOT_MAX_SIZE)
uint32_t w, h;
uint32_t slotSize;
trackMeta->GetWidth(&w);
trackMeta->GetHeight(&h);
slotSize = clamped((int32_t)(w * h), BUFFER_SLOT_DEFAULT_SIZE,
BUFFER_SLOT_MAX_SIZE);
mTrackBuffer.AppendElement(new RtspTrackBuffer(rtspTrackId.get(),
i, slotSize));
mTrackBuffer[i]->Start();
}
if (!mCallback) {
return NS_ERROR_FAILURE;
}
// If the durationUs is 0, imply the stream is live stream.
if (durationUs) {
// Not live stream.
mIsLiveStream = false;
mCallback->SetInfinite(false);
} else {
// Live stream.
// Check the preference "media.realtime_decoder.enabled".
if (!Preferences::GetBool("media.realtime_decoder.enabled", false)) {
// Give up, report error to media element.
mCallback->NotifyDecodeError();
return NS_ERROR_FAILURE;
} else {
mIsLiveStream = true;
bool seekable = false;
mCallback->SetInfinite(true);
mCallback->SetMediaSeekable(seekable);
}
}
MediaDecoderOwner* owner = mCallback->GetMediaOwner();
NS_ENSURE_TRUE(owner, NS_ERROR_FAILURE);
// Fires an initial progress event.
owner->DownloadProgressed();
nsresult rv = mCallback->FinishDecoderSetup(this);
NS_ENSURE_SUCCESS(rv, rv);
mIsConnected = true;
return NS_OK;
}
nsresult
RtspMediaResource::OnDisconnected(uint8_t aTrackIdx, nsresult aReason)
{
NS_ASSERTION(NS_IsMainThread(), "Don't call on non-main thread");
for (uint32_t i = 0 ; i < mTrackBuffer.Length(); ++i) {
mTrackBuffer[i]->Stop();
mTrackBuffer[i]->Reset();
}
if (mCallback) {
if (aReason == NS_ERROR_NOT_INITIALIZED ||
aReason == NS_ERROR_CONNECTION_REFUSED ||
aReason == NS_ERROR_NOT_CONNECTED ||
aReason == NS_ERROR_NET_TIMEOUT) {
// Report error code to Decoder.
RTSPMLOG("Error in OnDisconnected 0x%x", aReason);
mIsConnected = false;
mCallback->NotifyNetworkError();
} else {
// Resetting the decoder and media element when the connection
// between RTSP client and server goes down.
mCallback->ResetConnectionState();
}
}
if (mListener) {
// Note: Listener's Revoke() kills its reference to us, which means it would
// release |this| object. So, ensure it is called in the end of this method.
mListener->Revoke();
}
return NS_OK;
}
void RtspMediaResource::Suspend(bool aCloseImmediately)
{
NS_ASSERTION(NS_IsMainThread(), "Don't call on non-main thread");
mIsSuspend = true;
if (NS_WARN_IF(!mCallback)) {
return;
}
MediaDecoderOwner* owner = mCallback->GetMediaOwner();
NS_ENSURE_TRUE_VOID(owner);
dom::HTMLMediaElement* element = owner->GetMediaElement();
NS_ENSURE_TRUE_VOID(element);
mMediaStreamController->Suspend();
element->DownloadSuspended();
mCallback->NotifySuspendedStatusChanged();
}
void RtspMediaResource::Resume()
{
NS_ASSERTION(NS_IsMainThread(), "Don't call on non-main thread");
mIsSuspend = false;
if (NS_WARN_IF(!mCallback)) {
return;
}
MediaDecoderOwner* owner = mCallback->GetMediaOwner();
NS_ENSURE_TRUE_VOID(owner);
dom::HTMLMediaElement* element = owner->GetMediaElement();
NS_ENSURE_TRUE_VOID(element);
if (mChannel) {
element->DownloadResumed();
}
mMediaStreamController->Resume();
mCallback->NotifySuspendedStatusChanged();
}
nsresult RtspMediaResource::Open(nsIStreamListener **aStreamListener)
{
return NS_OK;
}
nsresult RtspMediaResource::Close()
{
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
mMediaStreamController->Stop();
// Since mCallback is not an nsCOMPtr in BaseMediaResource, we have to
// explicitly set it as null pointer in order to prevent misuse from this
// object (RtspMediaResource).
if (mCallback) {
mCallback = nullptr;
}
return NS_OK;
}
already_AddRefed<nsIPrincipal> RtspMediaResource::GetCurrentPrincipal()
{
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
nsCOMPtr<nsIPrincipal> principal;
nsIScriptSecurityManager* secMan = nsContentUtils::GetSecurityManager();
if (!secMan || !mChannel)
return nullptr;
secMan->GetChannelResultPrincipal(mChannel, getter_AddRefs(principal));
return principal.forget();
}
nsresult RtspMediaResource::SeekTime(int64_t aOffset)
{
NS_ASSERTION(!NS_IsMainThread(), "Don't call on main thread");
RTSPMLOG("Seek requested for aOffset [%lld] for decoder [%p]",
aOffset, mCallback.get());
// Clear buffer and raise the frametype flag.
for(uint32_t i = 0 ; i < mTrackBuffer.Length(); ++i) {
mTrackBuffer[i]->ResetWithFrameType(MEDIASTREAM_FRAMETYPE_DISCONTINUITY);
}
return mMediaStreamController->Seek(aOffset);
}
void
RtspMediaResource::EnablePlayoutDelay()
{
for (uint32_t i = 0; i < mTrackBuffer.Length(); ++i) {
mTrackBuffer[i]->LockStartPlayoutDelay();
}
}
void
RtspMediaResource::DisablePlayoutDelay()
{
for (uint32_t i = 0; i < mTrackBuffer.Length(); ++i) {
mTrackBuffer[i]->LockStopPlayoutDelay();
}
}
} // namespace mozilla

View File

@ -1,252 +0,0 @@
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(RtspMediaResource_h_)
#define RtspMediaResource_h_
#include "MediaResource.h"
#include "mozilla/Monitor.h"
#include "nsAutoPtr.h"
#include "nsITimer.h"
#include "VideoUtils.h"
namespace mozilla {
class RtspTrackBuffer;
/* RtspMediaResource
* RtspMediaResource provides an interface to deliver and control RTSP media
* data to RtspDecoder.
*
* RTSP Flow Start vs HTTP Flow Start:
* For HTTP (and files stored on disk), once the channel is created and response
* data is available, HTMLMediaElement::MediaLoadListener::OnStartRequest is
* called. (Note, this is an asynchronous call following channel->AsyncOpen).
* The decoder and MediaResource are set up to talk to each other:
* InitializeDecoderForChannel and FinishDecoderSetup.
* RtspMediaResource is different from this, in that FinishDecoderSetup is
* postponed until after the initial connection with the server is made.
* RtspController, owned by RtspMediaResource, provides the interface to setup
* the connection, and calls RtspMediaResource::Listener::OnConnected
* (from nsIStreamingProtocolListener). FinishDecoderSetup is then called to
* connect RtspMediaResource with RtspDecoder and allow HTMLMediaElement to
* request playback etc.
*
* Playback:
* When the user presses play/pause, HTMLMediaElement::Play/::Pause is called,
* subsequently making calls to the decoder state machine. Upon these state
* changes, the decoder is told to start reading and decoding data. This causes
* the nsIStreamingMediaController object to send play/pause commands to the
* server.
* Data is then delivered to the host and eventually written to the
* RtspTrackBuffer objects. Note that RtspMediaResource does not know about the
* play or pause state. It only knows about the data written into its buffers.
*
* Data Structures and Flow:
* Unlike HTTP, RTSP provides separate streams for audio and video.
* As such, it creates two RtspTrackBuffer objects for the audio and video data.
* Data is read using the function ReadFrameFromTrack. These buffer objects are
* ring buffers, implying that data from the network may be discarded if the
* decoder cannot read at a high enough rate.
*
* Data is delivered via RtspMediaResource::Listener::OnMediaDataAvailable.
* This Listener implements nsIStreamingProtocolListener, and writes the data to
* the appropriate RtspTrackBuffer. The decoder then reads the data by calling
* RtspMediaResource::ReadFrameFromTrack. Note that the decoder and decode
* thread will be blocked until data is available in one of the two buffers.
*
* Seeking:
* Since the frame data received after seek is not continuous with existing
* frames in RtspTrackBuffer, the buffer must be cleared. If we don't clear the
* old frame data in RtspTrackBuffer, the decoder's behavior will be
* unpredictable. So we add |mFrameType| in RtspTrackBuffer to do this:
* When we are seeking, the mFrameType flag is set, and RtspTrackBuffer will
* drop the incoming data until the RTSP server completes the seek operation.
* Note: seeking for RTSP is carried out based on sending the seek time to the
* server, unlike HTTP in which the seek time is converted to a byte offset.
* Thus, RtspMediaResource has a SeekTime function which should be called
* instead of Seek.
* */
class RtspMediaResource : public BaseMediaResource
{
public:
RtspMediaResource(MediaResourceCallback* aCallback, nsIChannel* aChannel, nsIURI* aURI,
const nsACString& aContentType);
virtual ~RtspMediaResource();
// The following methods can be called on any thread.
// Get the RtspMediaResource pointer if this MediaResource is a
// RtspMediaResource. For calling Rtsp specific functions.
RtspMediaResource* GetRtspPointer() override final {
return this;
}
// Returns the nsIStreamingProtocolController in the RtspMediaResource.
// RtspMediaExtractor: request it to get mime type for creating decoder.
// RtspOmxDecoder: request it to send play/pause commands to RTSP server.
// The lifetime of mMediaStreamController is controlled by RtspMediaResource
// because the RtspMediaExtractor and RtspOmxDecoder won't hold the reference.
nsIStreamingProtocolController* GetMediaStreamController() {
return mMediaStreamController;
}
// Even it is a live stream, as long as it provides valid timestamps,
// we tell state machine it's not a live stream.
bool IsRealTime() override {
return !mHasTimestamp;
}
// Called by RtspOmxReader, dispatch a runnable to notify mDecoder.
// Other thread only.
void SetSuspend(bool aIsSuspend);
// The following methods can be called on any thread except main thread.
// Read data from track.
// Parameters:
// aToBuffer, aToBufferSize: buffer pointer and buffer size.
// aReadCount: output actual read bytes.
// aFrameTime: output frame time stamp.
// aFrameSize: actual data size in track.
nsresult ReadFrameFromTrack(uint8_t* aBuffer, uint32_t aBufferSize,
uint32_t aTrackIdx, uint32_t& aBytes,
uint64_t& aTime, uint32_t& aFrameSize);
// Seek to the given time offset
nsresult SeekTime(int64_t aOffset);
// The idea of playout delay is to hold frames in the playout buffer
// (RtspTrackBuffer) for a period of time in order to smooth timing variations
// caused by the network.
void EnablePlayoutDelay();
void DisablePlayoutDelay();
// dummy
nsresult ReadAt(int64_t aOffset, char* aBuffer,
uint32_t aCount, uint32_t* aBytes) override{
return NS_ERROR_FAILURE;
}
// dummy
void SetReadMode(MediaCacheStream::ReadMode aMode) override {}
// dummy
void SetPlaybackRate(uint32_t aBytesPerSecond) override {}
// dummy
int64_t Tell() override { return 0; }
// Any thread
void Pin() override {}
void Unpin() override {}
bool IsSuspendedByCache() override { return mIsSuspend; }
bool IsSuspended() override { return false; }
bool IsTransportSeekable() override { return true; }
// dummy
double GetDownloadRate(bool* aIsReliable) override { *aIsReliable = false; return 0; }
int64_t GetLength() override {
if (mIsLiveStream) {
return -1;
}
return 0;
}
// dummy
int64_t GetNextCachedData(int64_t aOffset) override { return 0; }
// dummy
int64_t GetCachedDataEnd(int64_t aOffset) override { return 0; }
// dummy
bool IsDataCachedToEndOfResource(int64_t aOffset) override {
return false;
}
// dummy
nsresult GetCachedRanges(MediaByteRangeSet& aRanges) override {
return NS_ERROR_FAILURE;
}
// The following methods can be called on main thread only.
nsresult Open(nsIStreamListener** aStreamListener) override;
nsresult Close() override;
void Suspend(bool aCloseImmediately) override;
void Resume() override;
already_AddRefed<nsIPrincipal> GetCurrentPrincipal() override;
bool CanClone() override {
return false;
}
already_AddRefed<MediaResource> CloneData(MediaResourceCallback*) override {
return nullptr;
}
// dummy
nsresult ReadFromCache(char* aBuffer, int64_t aOffset,
uint32_t aCount) override {
return NS_ERROR_FAILURE;
}
size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const override;
size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const override {
return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
}
// Listener implements nsIStreamingProtocolListener as
// mMediaStreamController's callback function.
// It holds RtspMediaResource reference to notify the connection status and
// data arrival. The Revoke function releases the reference when
// RtspMediaResource::OnDisconnected is called.
class Listener final : public nsIInterfaceRequestor,
public nsIStreamingProtocolListener
{
~Listener() {}
public:
explicit Listener(RtspMediaResource* aResource) : mResource(aResource) {}
NS_DECL_ISUPPORTS
NS_DECL_NSIINTERFACEREQUESTOR
NS_DECL_NSISTREAMINGPROTOCOLLISTENER
void Revoke();
private:
RefPtr<RtspMediaResource> mResource;
};
friend class Listener;
protected:
// Main thread access only.
// These are called on the main thread by Listener.
nsresult OnMediaDataAvailable(uint8_t aIndex, const nsACString& aData,
uint32_t aLength, uint32_t aOffset,
nsIStreamingProtocolMetaData* aMeta);
nsresult OnConnected(uint8_t aIndex, nsIStreamingProtocolMetaData* aMeta);
nsresult OnDisconnected(uint8_t aIndex, nsresult aReason);
RefPtr<Listener> mListener;
private:
// Notify mDecoder the rtsp stream is suspend. Main thread only.
void NotifySuspend(bool aIsSuspend);
bool IsVideoEnabled();
bool IsVideo(uint8_t tracks, nsIStreamingProtocolMetaData *meta);
// These two members are created at |RtspMediaResource::OnConnected|.
nsCOMPtr<nsIStreamingProtocolController> mMediaStreamController;
nsTArray<nsAutoPtr<RtspTrackBuffer>> mTrackBuffer;
// A flag that indicates the |RtspMediaResource::OnConnected| has already been
// called.
bool mIsConnected;
// Whether it's a live stream.
bool mIsLiveStream;
// Whether it provides timestamps.
bool mHasTimestamp;
// Indicate the rtsp controller is suspended or not. Main thread only.
bool mIsSuspend;
};
} // namespace mozilla
#endif

View File

@ -48,7 +48,6 @@ public:
{
return nullptr;
}
void ResetConnectionState() override {}
void SetAudibleState(bool aAudible) override {}
void NotifyXPCOMShutdown() override {}
};

View File

@ -138,7 +138,6 @@ EXPORTS += [
'nsIDocumentActivity.h',
'PrincipalChangeObserver.h',
'QueueObject.h',
'RtspMediaResource.h',
'SeekJob.h',
'SeekTarget.h',
'SeekTask.h',
@ -249,7 +248,6 @@ UNIFIED_SOURCES += [
'MP3FrameParser.cpp',
'NextFrameSeekTask.cpp',
'QueueObject.cpp',
'RtspMediaResource.cpp',
'SeekJob.cpp',
'SeekTask.cpp',
'StreamTracks.cpp',

View File

@ -1,222 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "RtspExtractor.h"
#include "mozilla/ReentrantMonitor.h"
#include "nsAutoPtr.h"
using namespace android;
#define FRAME_DEFAULT_SIZE 1024
namespace mozilla {
/* class RtspMediaSource : implements MediaSource for OMX.
* The decoder thread will trigger the MediaDecodeStateMachine to read a/v frame.
* Then RtspOmxReader calls OMX decoder to decode a/v frame. Finally the code
* path run into the read() here, it reads un-decoded frame data from mResource
* and construct a MediaBuffer for output to OMX decoder.
* */
class RtspMediaSource final : public MediaSource {
public:
RtspMediaSource(RtspMediaResource* aRtspMediaResource,
ssize_t aTrackIdx,
uint32_t aFrameMaxSize,
const sp<MetaData>& aMeta)
: mRtspResource(aRtspMediaResource)
, mFormat(aMeta)
, mTrackIdx(aTrackIdx)
, mMonitor("RtspMediaSource.mMonitor")
, mIsStarted(false)
, mGroup(nullptr)
, mBuffer(nullptr)
, mFrameMaxSize(aFrameMaxSize) {}
virtual ~RtspMediaSource() {}
status_t start(MetaData* params = nullptr) override;
status_t stop() override;
sp<MetaData> getFormat() override {
ReentrantMonitorAutoEnter mon(mMonitor);
return mFormat;
}
status_t read(MediaBuffer** buffer,
const ReadOptions* options = nullptr) override ;
private:
RefPtr<RtspMediaResource> mRtspResource;
sp<MetaData> mFormat;
uint32_t mTrackIdx;
ReentrantMonitor mMonitor;
bool mIsStarted;
// mGroup owns the mBuffer. mFrameMaxSize is the mBuffer size.
// mBuffer is the input buffer for omx decoder.
nsAutoPtr<MediaBufferGroup> mGroup;
MediaBuffer* mBuffer;
uint32_t mFrameMaxSize;
};
status_t
RtspMediaSource::start(MetaData* params)
{
ReentrantMonitorAutoEnter mon(mMonitor);
if (!mIsStarted) {
// RtspMediaSource relinquish the ownership of MediaBuffer |buf| to mGroup.
mGroup = new MediaBufferGroup();
MediaBuffer* buf = new MediaBuffer(mFrameMaxSize);
mGroup->add_buffer(buf);
mIsStarted = true;
}
return OK;
}
status_t
RtspMediaSource::stop()
{
ReentrantMonitorAutoEnter mon(mMonitor);
if (mIsStarted) {
if (mBuffer) {
mBuffer->release();
mBuffer = nullptr;
}
mGroup = nullptr;
mIsStarted = false;
}
return OK;
}
status_t
RtspMediaSource::read(MediaBuffer** out, const ReadOptions* options)
{
ReentrantMonitorAutoEnter mon(mMonitor);
NS_ENSURE_TRUE(mIsStarted, MEDIA_ERROR_BASE);
NS_ENSURE_TRUE(out, MEDIA_ERROR_BASE);
*out = nullptr;
// Video/audio track's initial frame size is FRAME_DEFAULT_SIZE.
// We need to realloc the mBuffer if the mBuffer doesn't have enough space
// for next ReadFrameFromTrack function. (actualFrameSize > mFrameMaxSize)
status_t err;
uint32_t readCount;
uint32_t actualFrameSize;
uint64_t time;
nsresult rv;
while (1) {
err = mGroup->acquire_buffer(&mBuffer);
NS_ENSURE_TRUE(err == OK, err);
rv = mRtspResource->ReadFrameFromTrack((uint8_t *)mBuffer->data(),
mFrameMaxSize, mTrackIdx, readCount,
time, actualFrameSize);
if (NS_FAILED(rv)) {
// Release mGroup and mBuffer.
stop();
// Since RtspMediaSource is an implementation of Android media source,
// it's held by OMXCodec and isn't released yet. So we have to re-construct
// mGroup and mBuffer.
start();
NS_WARNING("ReadFrameFromTrack failed; releasing buffers and returning.");
return ERROR_END_OF_STREAM;
}
if (actualFrameSize > mFrameMaxSize) {
// release mGroup and mBuffer
stop();
// re-construct mGroup and mBuffer
mFrameMaxSize = actualFrameSize;
err = start();
NS_ENSURE_TRUE(err == OK, err);
} else {
// ReadFrameFromTrack success, break the while loop.
break;
}
}
mBuffer->set_range(0, readCount);
if (NS_SUCCEEDED(rv)) {
mBuffer->meta_data()->clear();
// fill the meta data
mBuffer->meta_data()->setInt64(kKeyTime, time);
*out = mBuffer;
mBuffer = nullptr;
return OK;
}
return ERROR_END_OF_STREAM;
}
size_t
RtspExtractor::countTracks()
{
uint8_t tracks = 0;
if (mController) {
mController->GetTotalTracks(&tracks);
}
return size_t(tracks);
}
sp<MediaSource>
RtspExtractor::getTrack(size_t index)
{
NS_ENSURE_TRUE(index < countTracks(), nullptr);
sp<MetaData> meta = getTrackMetaData(index);
sp<MediaSource> source = new RtspMediaSource(mRtspResource,
index,
FRAME_DEFAULT_SIZE,
meta);
return source;
}
sp<MetaData>
RtspExtractor::getTrackMetaData(size_t index, uint32_t flag)
{
NS_ENSURE_TRUE(index < countTracks(), nullptr);
sp<MetaData> meta = new MetaData();
nsCOMPtr<nsIStreamingProtocolMetaData> rtspMetadata;
mController->GetTrackMetaData(index, getter_AddRefs(rtspMetadata));
if (rtspMetadata) {
// Convert msMeta into meta.
// The getter function of nsIStreamingProtocolMetaData will initialize the
// metadata values to 0 before setting them.
nsCString mime;
rtspMetadata->GetMimeType(mime);
meta->setCString(kKeyMIMEType, mime.get());
uint32_t temp32;
rtspMetadata->GetWidth(&temp32);
meta->setInt32(kKeyWidth, temp32);
rtspMetadata->GetHeight(&temp32);
meta->setInt32(kKeyHeight, temp32);
rtspMetadata->GetSampleRate(&temp32);
meta->setInt32(kKeySampleRate, temp32);
rtspMetadata->GetChannelCount(&temp32);
meta->setInt32(kKeyChannelCount, temp32);
uint64_t temp64;
rtspMetadata->GetDuration(&temp64);
meta->setInt64(kKeyDuration, temp64);
nsCString tempCString;
rtspMetadata->GetEsdsData(tempCString);
if (tempCString.Length()) {
meta->setData(kKeyESDS, 0, tempCString.get(), tempCString.Length());
}
rtspMetadata->GetAvccData(tempCString);
if (tempCString.Length()) {
meta->setData(kKeyAVCC, 0, tempCString.get(), tempCString.Length());
}
}
return meta;
}
uint32_t
RtspExtractor::flags() const
{
if (mRtspResource->IsRealTime()) {
return 0;
} else {
return MediaExtractor::CAN_SEEK;
}
}
} // namespace mozilla

View File

@ -1,53 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(RtspExtractor_h_)
#define RtspExtractor_h_
#include "RtspMediaResource.h"
#include <stagefright/MediaBufferGroup.h>
#include <stagefright/MediaExtractor.h>
#include <stagefright/MediaSource.h>
#include <stagefright/MetaData.h>
namespace mozilla {
// RtspExtractor is a custom extractor for Rtsp stream, whereas the other
// XXXExtractors are made for container media content.
// The extractor is used for |OmxDecoder::Init|, it provides the essential
// information for creating OMXCodec instance.
// For example, the |getTrackMetaData| returns metadata that includes the
// codec type.
class RtspExtractor: public android::MediaExtractor
{
public:
size_t countTracks() final override;
android::sp<android::MediaSource> getTrack(size_t index)
final override;
android::sp<android::MetaData> getTrackMetaData(
size_t index, uint32_t flag = 0) final override;
uint32_t flags() const final override;
RtspExtractor(RtspMediaResource* aResource)
: mRtspResource(aResource) {
MOZ_ASSERT(aResource);
mController = mRtspResource->GetMediaStreamController();
MOZ_ASSERT(mController);
}
virtual ~RtspExtractor() override {}
private:
// mRtspResource is a pointer to RtspMediaResource. When |getTrack| is called
// we use mRtspResource to construct a RtspMediaSource.
RtspMediaResource* mRtspResource;
// Through the mController in mRtspResource, we can get the essential
// information for the extractor.
RefPtr<nsIStreamingProtocolController> mController;
};
} // namespace mozilla
#endif

View File

@ -1,49 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "RtspMediaResource.h"
#include "RtspOmxDecoder.h"
#include "RtspOmxReader.h"
#include "MediaDecoderStateMachine.h"
namespace mozilla {
MediaDecoder* RtspOmxDecoder::Clone(MediaDecoderOwner* aOwner)
{
return new RtspOmxDecoder(aOwner);
}
MediaDecoderStateMachine*
RtspOmxDecoder::CreateStateMachine()
{
return new MediaDecoderStateMachine(this,
new RtspOmxReader(this),
mResource->IsRealTime());
}
void
RtspOmxDecoder::ChangeState(PlayState aState)
{
MOZ_ASSERT(NS_IsMainThread());
MediaDecoder::ChangeState(aState);
// Notify RTSP controller if the play state is ended.
// This is necessary for RTSP controller to transit its own state.
if (mPlayState == PLAY_STATE_ENDED) {
RefPtr<RtspMediaResource> resource = mResource->GetRtspPointer();
if (resource) {
nsIStreamingProtocolController* controller =
resource->GetMediaStreamController();
if (controller) {
controller->PlaybackEnded();
}
}
}
}
} // namespace mozilla

View File

@ -1,39 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(RtspOmxDecoder_h_)
#define RtspOmxDecoder_h_
#include "base/basictypes.h"
#include "MediaDecoder.h"
namespace mozilla {
/* RtspOmxDecoder is a subclass of MediaDecoder but not a subclass of
* MediaOmxDecoder. Because the MediaOmxDecoder doesn't extend any functionality
* for MediaDecoder.
* It creates the RtspOmxReader for the MediaDecoderStateMachine and override
* the ApplyStateToStateMachine to send rtsp play/pause command to rtsp server.
*
* */
class RtspOmxDecoder : public MediaDecoder
{
public:
explicit RtspOmxDecoder(MediaDecoderOwner* aOwner) : MediaDecoder(aOwner) {
MOZ_COUNT_CTOR(RtspOmxDecoder);
}
~RtspOmxDecoder() {
MOZ_COUNT_DTOR(RtspOmxDecoder);
}
MediaDecoder* Clone(MediaDecoderOwner* aOwner) override final;
MediaDecoderStateMachine* CreateStateMachine() override final;
void ChangeState(PlayState aState) override final;
};
} // namespace mozilla
#endif

View File

@ -1,112 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "RtspOmxReader.h"
#include "AbstractMediaDecoder.h"
#include "MediaDecoderStateMachine.h"
#include "OmxDecoder.h"
#include "RtspExtractor.h"
#include "RtspMediaResource.h"
#include "RtspOmxDecoder.h"
using namespace android;
namespace mozilla {
nsresult RtspOmxReader::InitOmxDecoder()
{
if (!mOmxDecoder.get()) {
NS_ASSERTION(mDecoder, "RtspOmxReader mDecoder is null.");
NS_ASSERTION(mDecoder->GetResource(),
"RtspOmxReader mDecoder->GetResource() is null.");
mExtractor = new RtspExtractor(mRtspResource);
mOmxDecoder = new OmxDecoder(mDecoder, OwnerThread());
if (!mOmxDecoder->Init(mExtractor)) {
return NS_ERROR_FAILURE;
}
}
return NS_OK;
}
RefPtr<MediaDecoderReader::SeekPromise>
RtspOmxReader::Seek(SeekTarget aTarget, int64_t aEndTime)
{
// The seek function of Rtsp is time-based, we call the SeekTime function in
// RtspMediaResource. The SeekTime function finally send a seek command to
// Rtsp stream server through network and also clear the buffer data in
// RtspMediaResource.
if (mRtspResource) {
mRtspResource->SeekTime(aTarget.GetTime().ToMicroseconds());
mRtspResource->EnablePlayoutDelay();
}
// Call |MediaOmxReader::Seek| to notify the OMX decoder we are performing a
// seek operation. The function will clear the |mVideoQueue| and |mAudioQueue|
// that store the decoded data and also call the |DecodeToTarget| to pass
// the seek time to OMX a/v decoders.
mEnsureActiveFromSeek = true;
return MediaOmxReader::Seek(aTarget, aEndTime);
}
void RtspOmxReader::SetIdle() {
// Call parent class to set OMXCodec idle.
MediaOmxReader::SetIdle();
// Need to pause RTSP streaming OMXCodec decoding.
if (mRtspResource) {
nsIStreamingProtocolController* controller =
mRtspResource->GetMediaStreamController();
if (controller) {
controller->Pause();
}
mRtspResource->SetSuspend(true);
}
}
void RtspOmxReader::EnsureActive() {
// Need to start RTSP streaming OMXCodec decoding.
if (mRtspResource) {
nsIStreamingProtocolController* controller =
mRtspResource->GetMediaStreamController();
// We do not have to call Play if the EnsureActive request is from Seek
// operation because RTSP connection must already be established before
// performing Seek.
if (controller && !mEnsureActiveFromSeek) {
controller->Play();
}
mEnsureActiveFromSeek = false;
mRtspResource->SetSuspend(false);
}
// Call parent class to set OMXCodec active.
MediaOmxReader::EnsureActive();
}
RefPtr<MediaDecoderReader::MetadataPromise>
RtspOmxReader::AsyncReadMetadata()
{
// Send a PLAY command to the RTSP server before reading metadata.
// Because we might need some decoded samples to ensure we have configuration.
mRtspResource->DisablePlayoutDelay();
RefPtr<MediaDecoderReader::MetadataPromise> p =
MediaOmxReader::AsyncReadMetadata();
// Send a PAUSE to the RTSP server because the underlying media resource is
// not ready.
SetIdle();
return p;
}
void RtspOmxReader::HandleResourceAllocated()
{
MediaOmxReader::HandleResourceAllocated();
mRtspResource->EnablePlayoutDelay();
}
} // namespace mozilla

View File

@ -1,84 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(RtspOmxReader_h_)
#define RtspOmxReader_h_
#include "MediaResource.h"
#include "MediaDecoderReader.h"
#include "MediaOmxReader.h"
namespace mozilla {
namespace dom {
class TimeRanges;
}
class AbstractMediaDecoder;
class RtspMediaResource;
/* RtspOmxReader is a subclass of MediaOmxReader.
* The major reason that RtspOmxReader inherit from MediaOmxReader is the
* same video/audio decoding logic we can reuse.
*/
class RtspOmxReader : public MediaOmxReader
{
protected:
// Provide a Rtsp extractor.
nsresult InitOmxDecoder() final override;
void EnsureActive() override;
public:
RtspOmxReader(AbstractMediaDecoder* aDecoder)
: MediaOmxReader(aDecoder)
, mEnsureActiveFromSeek(false)
{
MOZ_COUNT_CTOR(RtspOmxReader);
NS_ASSERTION(mDecoder, "RtspOmxReader mDecoder is null.");
NS_ASSERTION(mDecoder->GetResource(),
"RtspOmxReader mDecoder->GetResource() is null.");
mRtspResource = mDecoder->GetResource()->GetRtspPointer();
MOZ_ASSERT(mRtspResource);
}
virtual ~RtspOmxReader() {
MOZ_COUNT_DTOR(RtspOmxReader);
}
// Implement a time-based seek instead of byte-based..
RefPtr<SeekPromise> Seek(SeekTarget aTarget, int64_t aEndTime) final override;
// Override GetBuffered() to do nothing for below reasons:
// 1. Because the Rtsp stream is a/v separated. The buffered data in a/v
// tracks are not consistent with time stamp.
// For example: audio buffer: 1~2s, video buffer: 1.5~2.5s
// 2. Since the Rtsp is a realtime streaming, the buffer we made for
// RtspMediaResource is quite small. The small buffer implies the time ranges
// we returned are not useful for the MediaDecodeStateMachine. Unlike the
// ChannelMediaResource, it has a "cache" that can store the whole streaming
// data so the |GetBuffered| function can retrieve useful time ranges.
media::TimeIntervals GetBuffered() final override {
return media::TimeIntervals::Invalid();
}
void SetIdle() override;
RefPtr<MediaDecoderReader::MetadataPromise> AsyncReadMetadata() override;
void HandleResourceAllocated() override;
private:
// A pointer to RtspMediaResource for calling the Rtsp specific function.
// The lifetime of mRtspResource is controlled by MediaDecoder. MediaDecoder
// holds the MediaDecoderStateMachine and RtspMediaResource.
// And MediaDecoderStateMachine holds this RtspOmxReader.
RtspMediaResource* mRtspResource;
bool mEnsureActiveFromSeek;
};
} // namespace mozilla
#endif