2010-04-02 03:03:07 +00:00
|
|
|
/* vim:set ts=2 sw=2 sts=2 et cindent: */
|
2012-05-21 11:12:37 +00:00
|
|
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2013-05-03 00:39:19 +00:00
|
|
|
#ifdef XP_WIN
|
|
|
|
// Include Windows headers required for enabling high precision timers.
|
2013-05-06 09:33:00 +00:00
|
|
|
#include "windows.h"
|
|
|
|
#include "mmsystem.h"
|
2013-05-03 00:39:19 +00:00
|
|
|
#endif
|
|
|
|
|
2012-12-14 23:58:45 +00:00
|
|
|
#include "mozilla/DebugOnly.h"
|
2013-07-30 14:25:31 +00:00
|
|
|
#include <stdint.h>
|
2012-12-14 23:58:45 +00:00
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
#include "MediaDecoderStateMachine.h"
|
2013-12-20 00:52:06 +00:00
|
|
|
#include "AudioSink.h"
|
2010-04-02 03:03:07 +00:00
|
|
|
#include "nsTArray.h"
|
2012-11-14 19:46:40 +00:00
|
|
|
#include "MediaDecoder.h"
|
|
|
|
#include "MediaDecoderReader.h"
|
2010-04-02 03:03:07 +00:00
|
|
|
#include "mozilla/mozalloc.h"
|
2010-04-27 08:53:44 +00:00
|
|
|
#include "VideoUtils.h"
|
2013-03-02 19:14:44 +00:00
|
|
|
#include "mozilla/dom/TimeRanges.h"
|
2012-01-19 18:30:29 +00:00
|
|
|
#include "nsDeque.h"
|
2012-04-30 03:12:42 +00:00
|
|
|
#include "AudioSegment.h"
|
|
|
|
#include "VideoSegment.h"
|
2012-08-21 04:06:46 +00:00
|
|
|
#include "ImageContainer.h"
|
2013-09-05 20:25:17 +00:00
|
|
|
#include "nsComponentManagerUtils.h"
|
|
|
|
#include "nsITimer.h"
|
2013-12-18 03:59:11 +00:00
|
|
|
#include "nsContentUtils.h"
|
|
|
|
#include "MediaShutdownManager.h"
|
2012-01-11 08:23:07 +00:00
|
|
|
|
2012-11-30 13:17:54 +00:00
|
|
|
#include "prenv.h"
|
2011-09-27 00:25:41 +00:00
|
|
|
#include "mozilla/Preferences.h"
|
2013-01-15 12:22:03 +00:00
|
|
|
#include <algorithm>
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2012-11-14 19:45:33 +00:00
|
|
|
namespace mozilla {
|
|
|
|
|
2012-12-04 10:59:36 +00:00
|
|
|
using namespace mozilla::layers;
|
2012-11-16 03:25:26 +00:00
|
|
|
using namespace mozilla::dom;
|
2010-04-02 03:03:07 +00:00
|
|
|
|
|
|
|
#ifdef PR_LOGGING
|
2012-11-14 19:46:40 +00:00
|
|
|
extern PRLogModuleInfo* gMediaDecoderLog;
|
2013-11-21 03:02:42 +00:00
|
|
|
#define DECODER_LOG(type, msg) PR_LOG(gMediaDecoderLog, type, msg)
|
2010-04-02 03:03:07 +00:00
|
|
|
#else
|
2013-11-21 03:02:42 +00:00
|
|
|
#define DECODER_LOG(type, msg)
|
2010-04-02 03:03:07 +00:00
|
|
|
#endif
|
|
|
|
|
2012-06-06 23:43:25 +00:00
|
|
|
// Wait this number of seconds when buffering, then leave and play
|
2010-04-02 03:03:07 +00:00
|
|
|
// as best as we can if the required amount of data hasn't been
|
|
|
|
// retrieved.
|
2012-08-22 15:56:38 +00:00
|
|
|
static const uint32_t BUFFERING_WAIT_S = 30;
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2011-04-13 22:12:23 +00:00
|
|
|
// If audio queue has less than this many usecs of decoded audio, we won't risk
|
2010-04-27 08:53:44 +00:00
|
|
|
// trying to decode the video, we'll skip decoding video up to the next
|
2010-11-28 20:06:38 +00:00
|
|
|
// keyframe. We may increase this value for an individual decoder if we
|
|
|
|
// encounter video frames which take a long time to decode.
|
2012-08-22 15:56:38 +00:00
|
|
|
static const uint32_t LOW_AUDIO_USECS = 300000;
|
2010-04-27 08:53:44 +00:00
|
|
|
|
2011-04-13 22:12:23 +00:00
|
|
|
// If more than this many usecs of decoded audio is queued, we'll hold off
|
2010-11-28 20:06:38 +00:00
|
|
|
// decoding more audio. If we increase the low audio threshold (see
|
2011-04-13 22:12:23 +00:00
|
|
|
// LOW_AUDIO_USECS above) we'll also increase this value to ensure it's not
|
2010-11-28 20:06:38 +00:00
|
|
|
// less than the low audio threshold.
|
2012-08-22 15:56:38 +00:00
|
|
|
const int64_t AMPLE_AUDIO_USECS = 1000000;
|
2010-05-13 00:59:42 +00:00
|
|
|
|
2010-04-27 08:53:44 +00:00
|
|
|
// If we have fewer than LOW_VIDEO_FRAMES decoded frames, and
|
|
|
|
// we're not "pumping video", we'll skip the video up to the next keyframe
|
|
|
|
// which is at or after the current playback position.
|
2012-08-22 15:56:38 +00:00
|
|
|
static const uint32_t LOW_VIDEO_FRAMES = 1;
|
2010-04-27 08:53:44 +00:00
|
|
|
|
2010-05-31 04:02:00 +00:00
|
|
|
// Arbitrary "frame duration" when playing only audio.
|
2011-04-13 22:12:23 +00:00
|
|
|
static const int AUDIO_DURATION_USECS = 40000;
|
2010-05-31 04:02:00 +00:00
|
|
|
|
2011-04-13 22:12:23 +00:00
|
|
|
// If we increase our "low audio threshold" (see LOW_AUDIO_USECS above), we
|
2010-11-28 20:06:38 +00:00
|
|
|
// use this as a factor in all our calculations. Increasing this will cause
|
|
|
|
// us to be more likely to increase our low audio threshold, and to
|
|
|
|
// increase it by more.
|
|
|
|
static const int THRESHOLD_FACTOR = 2;
|
|
|
|
|
2011-03-23 22:28:57 +00:00
|
|
|
// If we have less than this much undecoded data available, we'll consider
|
|
|
|
// ourselves to be running low on undecoded data. We determine how much
|
|
|
|
// undecoded data we have remaining using the reader's GetBuffered()
|
|
|
|
// implementation.
|
2012-08-22 15:56:38 +00:00
|
|
|
static const int64_t LOW_DATA_THRESHOLD_USECS = 5000000;
|
2010-11-28 20:06:38 +00:00
|
|
|
|
2011-04-13 22:12:23 +00:00
|
|
|
// LOW_DATA_THRESHOLD_USECS needs to be greater than AMPLE_AUDIO_USECS, otherwise
|
2011-03-23 22:28:57 +00:00
|
|
|
// the skip-to-keyframe logic can activate when we're running low on data.
|
2013-11-11 08:03:59 +00:00
|
|
|
static_assert(LOW_DATA_THRESHOLD_USECS > AMPLE_AUDIO_USECS,
|
|
|
|
"LOW_DATA_THRESHOLD_USECS is too small");
|
2010-11-28 20:06:38 +00:00
|
|
|
|
2011-04-13 22:12:23 +00:00
|
|
|
// Amount of excess usecs of data to add in to the "should we buffer" calculation.
|
2012-08-22 15:56:38 +00:00
|
|
|
static const uint32_t EXHAUSTED_DATA_MARGIN_USECS = 60000;
|
2011-03-23 22:28:57 +00:00
|
|
|
|
2011-04-13 22:12:23 +00:00
|
|
|
// If we enter buffering within QUICK_BUFFER_THRESHOLD_USECS seconds of starting
|
2011-03-23 22:28:57 +00:00
|
|
|
// decoding, we'll enter "quick buffering" mode, which exits a lot sooner than
|
|
|
|
// normal buffering mode. This exists so that if the decode-ahead exhausts the
|
|
|
|
// downloaded data while decode/playback is just starting up (for example
|
|
|
|
// after a seek while the media is still playing, or when playing a media
|
|
|
|
// as soon as it's load started), we won't necessarily stop for 30s and wait
|
|
|
|
// for buffering. We may actually be able to playback in this case, so exit
|
|
|
|
// buffering early and try to play. If it turns out we can't play, we'll fall
|
|
|
|
// back to buffering normally.
|
2012-08-22 15:56:38 +00:00
|
|
|
static const uint32_t QUICK_BUFFER_THRESHOLD_USECS = 2000000;
|
2011-03-23 22:28:57 +00:00
|
|
|
|
|
|
|
// If we're quick buffering, we'll remain in buffering mode while we have less than
|
2011-04-13 22:12:23 +00:00
|
|
|
// QUICK_BUFFERING_LOW_DATA_USECS of decoded data available.
|
2012-08-22 15:56:38 +00:00
|
|
|
static const uint32_t QUICK_BUFFERING_LOW_DATA_USECS = 1000000;
|
2011-03-23 22:28:57 +00:00
|
|
|
|
2011-04-13 22:12:23 +00:00
|
|
|
// If QUICK_BUFFERING_LOW_DATA_USECS is > AMPLE_AUDIO_USECS, we won't exit
|
2011-03-23 22:28:57 +00:00
|
|
|
// quick buffering in a timely fashion, as the decode pauses when it
|
2011-04-13 22:12:23 +00:00
|
|
|
// reaches AMPLE_AUDIO_USECS decoded data, and thus we'll never reach
|
|
|
|
// QUICK_BUFFERING_LOW_DATA_USECS.
|
2013-11-11 08:03:59 +00:00
|
|
|
static_assert(QUICK_BUFFERING_LOW_DATA_USECS <= AMPLE_AUDIO_USECS,
|
|
|
|
"QUICK_BUFFERING_LOW_DATA_USECS is too large");
|
2011-03-23 22:28:57 +00:00
|
|
|
|
2013-09-10 00:45:33 +00:00
|
|
|
// The amount of instability we tollerate in calls to
|
|
|
|
// MediaDecoderStateMachine::UpdateEstimatedDuration(); changes of duration
|
|
|
|
// less than this are ignored, as they're assumed to be the result of
|
|
|
|
// instability in the duration estimation.
|
|
|
|
static const int64_t ESTIMATED_DURATION_FUZZ_FACTOR_USECS = USECS_PER_S / 2;
|
|
|
|
|
2012-08-22 15:56:38 +00:00
|
|
|
static TimeDuration UsecsToDuration(int64_t aUsecs) {
|
2011-04-13 22:12:23 +00:00
|
|
|
return TimeDuration::FromMilliseconds(static_cast<double>(aUsecs) / USECS_PER_MS);
|
2011-03-23 22:28:57 +00:00
|
|
|
}
|
|
|
|
|
2012-08-22 15:56:38 +00:00
|
|
|
static int64_t DurationToUsecs(TimeDuration aDuration) {
|
|
|
|
return static_cast<int64_t>(aDuration.ToSeconds() * USECS_PER_S);
|
2011-03-23 22:28:57 +00:00
|
|
|
}
|
|
|
|
|
2011-11-08 01:38:17 +00:00
|
|
|
// Owns the global state machine thread and counts of
|
|
|
|
// state machine and decoder threads. There should
|
|
|
|
// only be one instance of this class.
|
|
|
|
class StateMachineTracker
|
|
|
|
{
|
|
|
|
private:
|
|
|
|
StateMachineTracker() :
|
|
|
|
mMonitor("media.statemachinetracker"),
|
|
|
|
mStateMachineCount(0),
|
|
|
|
mDecodeThreadCount(0),
|
2012-07-30 14:20:58 +00:00
|
|
|
mStateMachineThread(nullptr)
|
2011-11-08 01:38:17 +00:00
|
|
|
{
|
|
|
|
MOZ_COUNT_CTOR(StateMachineTracker);
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2013-01-24 12:38:32 +00:00
|
|
|
}
|
|
|
|
|
2011-11-08 01:38:17 +00:00
|
|
|
~StateMachineTracker()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
|
|
|
|
|
|
|
MOZ_COUNT_DTOR(StateMachineTracker);
|
|
|
|
}
|
|
|
|
|
|
|
|
public:
|
|
|
|
// Access singleton instance. This is initially called on the main
|
2012-11-14 19:46:40 +00:00
|
|
|
// thread in the MediaDecoderStateMachine constructor resulting
|
2011-11-08 01:38:17 +00:00
|
|
|
// in the global object being created lazily. Non-main thread
|
|
|
|
// access always occurs after this and uses the monitor to
|
|
|
|
// safely access the decode thread counts.
|
|
|
|
static StateMachineTracker& Instance();
|
2013-01-24 12:38:32 +00:00
|
|
|
|
2011-11-08 01:38:17 +00:00
|
|
|
// Instantiate the global state machine thread if required.
|
|
|
|
// Call on main thread only.
|
|
|
|
void EnsureGlobalStateMachine();
|
|
|
|
|
|
|
|
// Destroy global state machine thread if required.
|
|
|
|
// Call on main thread only.
|
|
|
|
void CleanupGlobalStateMachine();
|
|
|
|
|
|
|
|
// Return the global state machine thread. Call from any thread.
|
|
|
|
nsIThread* GetGlobalStateMachineThread()
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mMonitor);
|
|
|
|
NS_ASSERTION(mStateMachineThread, "Should have non-null state machine thread!");
|
2013-12-18 03:59:11 +00:00
|
|
|
return mStateMachineThread->GetThread();
|
2011-11-08 01:38:17 +00:00
|
|
|
}
|
|
|
|
|
2012-01-19 18:30:29 +00:00
|
|
|
// Requests that a decode thread be created for aStateMachine. The thread
|
|
|
|
// may be created immediately, or after some delay, once a thread becomes
|
|
|
|
// available. The request can be cancelled using CancelCreateDecodeThread().
|
|
|
|
// It's the callers responsibility to not call this more than once for any
|
|
|
|
// given state machine.
|
2012-11-14 19:46:40 +00:00
|
|
|
nsresult RequestCreateDecodeThread(MediaDecoderStateMachine* aStateMachine);
|
2012-01-19 18:30:29 +00:00
|
|
|
|
|
|
|
// Cancels a request made by RequestCreateDecodeThread to create a decode
|
|
|
|
// thread for aStateMachine.
|
2012-11-14 19:46:40 +00:00
|
|
|
nsresult CancelCreateDecodeThread(MediaDecoderStateMachine* aStateMachine);
|
2012-01-19 18:30:29 +00:00
|
|
|
|
2011-11-08 01:38:17 +00:00
|
|
|
// Maximum number of active decode threads allowed. When more
|
|
|
|
// than this number are active the thread creation will fail.
|
2012-08-22 15:56:38 +00:00
|
|
|
static const uint32_t MAX_DECODE_THREADS = 25;
|
2011-11-08 01:38:17 +00:00
|
|
|
|
|
|
|
// Returns the number of active decode threads.
|
|
|
|
// Call on any thread. Holds the internal monitor so don't
|
|
|
|
// call with any other monitor held to avoid deadlock.
|
2012-08-22 15:56:38 +00:00
|
|
|
uint32_t GetDecodeThreadCount();
|
2011-11-08 01:38:17 +00:00
|
|
|
|
|
|
|
// Keep track of the fact that a decode thread was destroyed.
|
|
|
|
// Call on any thread. Holds the internal monitor so don't
|
|
|
|
// call with any other monitor held to avoid deadlock.
|
|
|
|
void NoteDecodeThreadDestroyed();
|
|
|
|
|
2012-01-19 18:30:29 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
// Returns true if aStateMachine has a pending request for a
|
|
|
|
// decode thread.
|
2012-11-14 19:46:40 +00:00
|
|
|
bool IsQueued(MediaDecoderStateMachine* aStateMachine);
|
2012-01-19 18:30:29 +00:00
|
|
|
#endif
|
|
|
|
|
2011-11-08 01:38:17 +00:00
|
|
|
private:
|
|
|
|
// Holds global instance of StateMachineTracker.
|
|
|
|
// Writable on main thread only.
|
2012-07-18 17:26:24 +00:00
|
|
|
static StateMachineTracker* sInstance;
|
2011-11-08 01:38:17 +00:00
|
|
|
|
|
|
|
// Reentrant monitor that must be obtained to access
|
|
|
|
// the decode thread count member and methods.
|
|
|
|
ReentrantMonitor mMonitor;
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
// Number of instances of MediaDecoderStateMachine
|
2011-11-08 01:38:17 +00:00
|
|
|
// that are currently instantiated. Access on the
|
|
|
|
// main thread only.
|
2012-08-22 15:56:38 +00:00
|
|
|
uint32_t mStateMachineCount;
|
2011-11-08 01:38:17 +00:00
|
|
|
|
|
|
|
// Number of instances of decoder threads that are
|
|
|
|
// currently instantiated. Access only with the
|
|
|
|
// mMonitor lock held. Can be used from any thread.
|
2012-08-22 15:56:38 +00:00
|
|
|
uint32_t mDecodeThreadCount;
|
2011-11-08 01:38:17 +00:00
|
|
|
|
|
|
|
// Global state machine thread. Write on the main thread
|
|
|
|
// only, read from the decoder threads. Synchronized via
|
|
|
|
// the mMonitor.
|
2013-12-18 03:59:11 +00:00
|
|
|
nsRefPtr<StateMachineThread> mStateMachineThread;
|
2012-01-19 18:30:29 +00:00
|
|
|
|
|
|
|
// Queue of state machines waiting for decode threads. Entries at the front
|
|
|
|
// get their threads first.
|
|
|
|
nsDeque mPending;
|
2011-11-08 01:38:17 +00:00
|
|
|
};
|
|
|
|
|
2012-07-30 14:20:58 +00:00
|
|
|
StateMachineTracker* StateMachineTracker::sInstance = nullptr;
|
2011-11-08 01:38:17 +00:00
|
|
|
|
|
|
|
StateMachineTracker& StateMachineTracker::Instance()
|
|
|
|
{
|
2012-07-18 17:26:24 +00:00
|
|
|
if (!sInstance) {
|
2011-11-08 01:38:17 +00:00
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2012-07-18 17:26:24 +00:00
|
|
|
sInstance = new StateMachineTracker();
|
2011-11-08 01:38:17 +00:00
|
|
|
}
|
2012-07-18 17:26:24 +00:00
|
|
|
return *sInstance;
|
2011-11-08 01:38:17 +00:00
|
|
|
}
|
|
|
|
|
2013-01-24 12:38:32 +00:00
|
|
|
void StateMachineTracker::EnsureGlobalStateMachine()
|
2011-11-08 01:38:17 +00:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
|
|
|
ReentrantMonitorAutoEnter mon(mMonitor);
|
|
|
|
if (mStateMachineCount == 0) {
|
|
|
|
NS_ASSERTION(!mStateMachineThread, "Should have null state machine thread!");
|
2013-12-18 03:59:11 +00:00
|
|
|
mStateMachineThread = new StateMachineThread();
|
|
|
|
DebugOnly<nsresult> rv = mStateMachineThread->Init();
|
2012-02-21 09:34:01 +00:00
|
|
|
NS_ABORT_IF_FALSE(NS_SUCCEEDED(rv), "Can't create media state machine thread");
|
2011-11-08 01:38:17 +00:00
|
|
|
}
|
|
|
|
mStateMachineCount++;
|
|
|
|
}
|
2012-01-19 18:30:29 +00:00
|
|
|
|
|
|
|
#ifdef DEBUG
|
2012-11-14 19:46:40 +00:00
|
|
|
bool StateMachineTracker::IsQueued(MediaDecoderStateMachine* aStateMachine)
|
2012-01-19 18:30:29 +00:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mMonitor);
|
2012-08-22 15:56:38 +00:00
|
|
|
int32_t size = mPending.GetSize();
|
2012-01-19 18:30:29 +00:00
|
|
|
for (int i = 0; i < size; ++i) {
|
2012-11-14 19:46:40 +00:00
|
|
|
MediaDecoderStateMachine* m =
|
|
|
|
static_cast<MediaDecoderStateMachine*>(mPending.ObjectAt(i));
|
2012-01-19 18:30:29 +00:00
|
|
|
if (m == aStateMachine) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2012-11-22 10:38:28 +00:00
|
|
|
void StateMachineTracker::CleanupGlobalStateMachine()
|
2011-11-08 01:38:17 +00:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
|
|
|
NS_ABORT_IF_FALSE(mStateMachineCount > 0,
|
|
|
|
"State machine ref count must be > 0");
|
|
|
|
mStateMachineCount--;
|
|
|
|
if (mStateMachineCount == 0) {
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("Destroying media state machine thread"));
|
2012-01-19 18:30:29 +00:00
|
|
|
NS_ASSERTION(mPending.GetSize() == 0, "Shouldn't all requests be handled by now?");
|
2011-11-08 01:38:17 +00:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mMonitor);
|
2013-12-18 03:59:11 +00:00
|
|
|
mStateMachineThread->Shutdown();
|
2012-07-30 14:20:58 +00:00
|
|
|
mStateMachineThread = nullptr;
|
2011-11-08 01:38:17 +00:00
|
|
|
NS_ASSERTION(mDecodeThreadCount == 0, "Decode thread count must be zero.");
|
2012-07-30 14:20:58 +00:00
|
|
|
sInstance = nullptr;
|
2011-11-08 01:38:17 +00:00
|
|
|
}
|
|
|
|
delete this;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-01-18 22:56:54 +00:00
|
|
|
void StateMachineTracker::NoteDecodeThreadDestroyed()
|
2011-11-08 01:38:17 +00:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mMonitor);
|
2012-01-18 22:56:54 +00:00
|
|
|
--mDecodeThreadCount;
|
2012-01-19 18:30:29 +00:00
|
|
|
while (mDecodeThreadCount < MAX_DECODE_THREADS && mPending.GetSize() > 0) {
|
2012-11-14 19:46:40 +00:00
|
|
|
MediaDecoderStateMachine* m =
|
|
|
|
static_cast<MediaDecoderStateMachine*>(mPending.PopFront());
|
2012-01-19 18:30:29 +00:00
|
|
|
nsresult rv;
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mMonitor);
|
|
|
|
rv = m->StartDecodeThread();
|
|
|
|
}
|
|
|
|
if (NS_SUCCEEDED(rv)) {
|
|
|
|
++mDecodeThreadCount;
|
|
|
|
}
|
|
|
|
}
|
2012-01-18 20:15:57 +00:00
|
|
|
}
|
|
|
|
|
2012-08-22 15:56:38 +00:00
|
|
|
uint32_t StateMachineTracker::GetDecodeThreadCount()
|
2012-01-18 20:15:57 +00:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mMonitor);
|
2012-01-18 22:56:54 +00:00
|
|
|
return mDecodeThreadCount;
|
2012-01-18 20:15:57 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
nsresult StateMachineTracker::CancelCreateDecodeThread(MediaDecoderStateMachine* aStateMachine) {
|
2012-01-19 18:30:29 +00:00
|
|
|
ReentrantMonitorAutoEnter mon(mMonitor);
|
2012-08-22 15:56:38 +00:00
|
|
|
int32_t size = mPending.GetSize();
|
|
|
|
for (int32_t i = 0; i < size; ++i) {
|
2012-11-14 19:46:40 +00:00
|
|
|
void* m = static_cast<MediaDecoderStateMachine*>(mPending.ObjectAt(i));
|
2012-01-19 18:30:29 +00:00
|
|
|
if (m == aStateMachine) {
|
|
|
|
mPending.RemoveObjectAt(i);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
NS_ASSERTION(!IsQueued(aStateMachine), "State machine should no longer have queued request.");
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
nsresult StateMachineTracker::RequestCreateDecodeThread(MediaDecoderStateMachine* aStateMachine)
|
2012-01-19 18:30:29 +00:00
|
|
|
{
|
|
|
|
NS_ENSURE_STATE(aStateMachine);
|
|
|
|
ReentrantMonitorAutoEnter mon(mMonitor);
|
|
|
|
if (mPending.GetSize() > 0 || mDecodeThreadCount + 1 >= MAX_DECODE_THREADS) {
|
|
|
|
// If there's already state machines in the queue, or we've exceeded the
|
|
|
|
// limit, append the state machine to the queue of state machines waiting
|
|
|
|
// for a decode thread. This ensures state machines already waiting get
|
|
|
|
// their threads first.
|
|
|
|
mPending.Push(aStateMachine);
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
nsresult rv;
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mMonitor);
|
|
|
|
rv = aStateMachine->StartDecodeThread();
|
|
|
|
}
|
|
|
|
if (NS_SUCCEEDED(rv)) {
|
|
|
|
++mDecodeThreadCount;
|
|
|
|
}
|
|
|
|
NS_ASSERTION(mDecodeThreadCount <= MAX_DECODE_THREADS,
|
|
|
|
"Should keep to thread limit!");
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
MediaDecoderStateMachine::MediaDecoderStateMachine(MediaDecoder* aDecoder,
|
2012-11-28 02:34:53 +00:00
|
|
|
MediaDecoderReader* aReader,
|
|
|
|
bool aRealTime) :
|
2010-04-02 03:03:07 +00:00
|
|
|
mDecoder(aDecoder),
|
|
|
|
mState(DECODER_STATE_DECODING_METADATA),
|
2013-12-20 00:52:06 +00:00
|
|
|
mResetPlayStartTime(false),
|
2013-12-01 21:09:06 +00:00
|
|
|
mSyncPointInMediaStream(-1),
|
|
|
|
mSyncPointInDecodedStream(-1),
|
2010-04-02 03:03:07 +00:00
|
|
|
mPlayDuration(0),
|
|
|
|
mStartTime(-1),
|
|
|
|
mEndTime(-1),
|
|
|
|
mSeekTime(0),
|
2011-08-24 23:42:23 +00:00
|
|
|
mFragmentEndTime(-1),
|
2010-05-06 02:31:02 +00:00
|
|
|
mReader(aReader),
|
2010-04-02 03:03:07 +00:00
|
|
|
mCurrentFrameTime(0),
|
|
|
|
mAudioStartTime(-1),
|
|
|
|
mAudioEndTime(-1),
|
2010-05-31 04:02:00 +00:00
|
|
|
mVideoFrameEndTime(-1),
|
2010-04-02 03:03:07 +00:00
|
|
|
mVolume(1.0),
|
2012-11-22 10:38:28 +00:00
|
|
|
mPlaybackRate(1.0),
|
|
|
|
mPreservesPitch(true),
|
|
|
|
mBasePosition(0),
|
2012-04-30 03:12:42 +00:00
|
|
|
mAudioCaptured(false),
|
2012-11-30 13:17:54 +00:00
|
|
|
mTransportSeekable(true),
|
|
|
|
mMediaSeekable(true),
|
2011-09-29 23:34:37 +00:00
|
|
|
mPositionChangeQueued(false),
|
|
|
|
mAudioCompleted(false),
|
|
|
|
mGotDurationFromMetaData(false),
|
|
|
|
mStopDecodeThread(true),
|
|
|
|
mDecodeThreadIdle(false),
|
|
|
|
mStopAudioThread(true),
|
|
|
|
mQuickBuffering(false),
|
|
|
|
mIsRunning(false),
|
|
|
|
mRunAgain(false),
|
|
|
|
mDispatchedRunEvent(false),
|
|
|
|
mDecodeThreadWaiting(false),
|
|
|
|
mRealTime(aRealTime),
|
2012-04-30 03:12:42 +00:00
|
|
|
mDidThrottleAudioDecoding(false),
|
|
|
|
mDidThrottleVideoDecoding(false),
|
2012-05-18 08:29:38 +00:00
|
|
|
mRequestedNewDecodeThread(false),
|
2013-12-20 00:52:06 +00:00
|
|
|
mEventManager(new AudioAvailableEventManager(aDecoder)),
|
2012-12-19 04:48:32 +00:00
|
|
|
mLastFrameStatus(MediaDecoderOwner::NEXT_FRAME_UNINITIALIZED)
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2012-11-14 19:46:40 +00:00
|
|
|
MOZ_COUNT_CTOR(MediaDecoderStateMachine);
|
2011-07-12 03:39:34 +00:00
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2011-11-08 01:38:17 +00:00
|
|
|
|
|
|
|
StateMachineTracker::Instance().EnsureGlobalStateMachine();
|
2011-09-27 00:25:41 +00:00
|
|
|
|
|
|
|
// only enable realtime mode when "media.realtime_decoder.enabled" is true.
|
2011-09-29 06:19:26 +00:00
|
|
|
if (Preferences::GetBool("media.realtime_decoder.enabled", false) == false)
|
2011-09-29 23:34:37 +00:00
|
|
|
mRealTime = false;
|
2011-09-27 00:25:41 +00:00
|
|
|
|
2012-06-06 23:43:25 +00:00
|
|
|
mBufferingWait = mRealTime ? 0 : BUFFERING_WAIT_S;
|
2011-09-27 00:25:41 +00:00
|
|
|
mLowDataThresholdUsecs = mRealTime ? 0 : LOW_DATA_THRESHOLD_USECS;
|
2012-09-28 17:34:03 +00:00
|
|
|
|
|
|
|
// If we've got more than mAmpleVideoFrames decoded video frames waiting in
|
|
|
|
// the video queue, we will not decode any more video frames until some have
|
|
|
|
// been consumed by the play state machine thread.
|
2013-05-02 21:21:22 +00:00
|
|
|
#if defined(MOZ_OMX_DECODER) || defined(MOZ_MEDIA_PLUGINS)
|
2012-12-03 02:41:06 +00:00
|
|
|
// On B2G and Android this is decided by a similar value which varies for
|
|
|
|
// each OMX decoder |OMX_PARAM_PORTDEFINITIONTYPE::nBufferCountMin|. This
|
|
|
|
// number must be less than the OMX equivalent or gecko will think it is
|
|
|
|
// chronically starved of video frames. All decoders seen so far have a value
|
|
|
|
// of at least 4.
|
2012-09-28 17:34:03 +00:00
|
|
|
mAmpleVideoFrames = Preferences::GetUint("media.video-queue.default-size", 3);
|
|
|
|
#else
|
|
|
|
mAmpleVideoFrames = Preferences::GetUint("media.video-queue.default-size", 10);
|
|
|
|
#endif
|
|
|
|
if (mAmpleVideoFrames < 2) {
|
|
|
|
mAmpleVideoFrames = 2;
|
|
|
|
}
|
2013-05-03 00:39:19 +00:00
|
|
|
#ifdef XP_WIN
|
|
|
|
// Ensure high precision timers are enabled on Windows, otherwise the state
|
|
|
|
// machine thread isn't woken up at reliable intervals to set the next frame,
|
|
|
|
// and we drop frames while painting. Note that multiple calls to this
|
|
|
|
// function per-process is OK, provided each call is matched by a corresponding
|
|
|
|
// timeEndPeriod() call.
|
|
|
|
timeBeginPeriod(1);
|
|
|
|
#endif
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
MediaDecoderStateMachine::~MediaDecoderStateMachine()
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2011-07-12 03:39:34 +00:00
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2012-11-14 19:46:40 +00:00
|
|
|
MOZ_COUNT_DTOR(MediaDecoderStateMachine);
|
2012-07-31 12:17:22 +00:00
|
|
|
NS_ASSERTION(!mPendingWakeDecoder.get(),
|
|
|
|
"WakeDecoder should have been revoked already");
|
2012-01-19 18:30:29 +00:00
|
|
|
NS_ASSERTION(!StateMachineTracker::Instance().IsQueued(this),
|
|
|
|
"Should not have a pending request for a new decode thread");
|
|
|
|
NS_ASSERTION(!mRequestedNewDecodeThread,
|
|
|
|
"Should not have (or flagged) a pending request for a new decode thread");
|
2011-07-12 03:39:32 +00:00
|
|
|
if (mTimer)
|
|
|
|
mTimer->Cancel();
|
2012-07-30 14:20:58 +00:00
|
|
|
mTimer = nullptr;
|
|
|
|
mReader = nullptr;
|
2013-01-24 12:38:32 +00:00
|
|
|
|
2011-11-08 01:38:17 +00:00
|
|
|
StateMachineTracker::Instance().CleanupGlobalStateMachine();
|
2013-05-03 00:39:19 +00:00
|
|
|
#ifdef XP_WIN
|
|
|
|
timeEndPeriod(1);
|
|
|
|
#endif
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
bool MediaDecoderStateMachine::HasFutureAudio() const {
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-09-14 23:24:47 +00:00
|
|
|
NS_ASSERTION(HasAudio(), "Should only call HasFutureAudio() when we have audio");
|
|
|
|
// We've got audio ready to play if:
|
|
|
|
// 1. We've not completed playback of audio, and
|
|
|
|
// 2. we either have more than the threshold of decoded audio available, or
|
|
|
|
// we've completely decoded all audio (but not finished playing it yet
|
|
|
|
// as per 1).
|
|
|
|
return !mAudioCompleted &&
|
2012-11-22 10:38:28 +00:00
|
|
|
(AudioDecodedUsecs() > LOW_AUDIO_USECS * mPlaybackRate || mReader->AudioQueue().IsFinished());
|
2010-05-13 00:59:42 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
bool MediaDecoderStateMachine::HaveNextFrameData() const {
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-09-14 23:24:47 +00:00
|
|
|
return (!HasAudio() || HasFutureAudio()) &&
|
2012-09-17 20:45:38 +00:00
|
|
|
(!HasVideo() || mReader->VideoQueue().GetSize() > 0);
|
2010-05-13 00:59:42 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
int64_t MediaDecoderStateMachine::GetDecodedAudioDuration() {
|
2011-01-13 01:06:15 +00:00
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-09-17 20:45:38 +00:00
|
|
|
int64_t audioDecoded = mReader->AudioQueue().Duration();
|
2011-01-13 01:06:15 +00:00
|
|
|
if (mAudioEndTime != -1) {
|
|
|
|
audioDecoded += mAudioEndTime - GetMediaTime();
|
|
|
|
}
|
|
|
|
return audioDecoded;
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::DecodeThreadRun()
|
2011-07-12 03:39:25 +00:00
|
|
|
{
|
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
2012-12-18 08:49:58 +00:00
|
|
|
mReader->OnDecodeThreadStart();
|
2013-01-24 12:38:32 +00:00
|
|
|
|
2012-12-18 08:49:58 +00:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2011-07-12 03:39:25 +00:00
|
|
|
|
2012-12-18 08:49:58 +00:00
|
|
|
if (mState == DECODER_STATE_DECODING_METADATA &&
|
|
|
|
NS_FAILED(DecodeMetadata())) {
|
2011-07-12 03:39:25 +00:00
|
|
|
NS_ASSERTION(mState == DECODER_STATE_SHUTDOWN,
|
|
|
|
"Should be in shutdown state if metadata loading fails.");
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("Decode metadata failed, shutting down decode thread"));
|
2011-07-12 03:39:25 +00:00
|
|
|
}
|
|
|
|
|
2012-12-18 08:49:58 +00:00
|
|
|
while (mState != DECODER_STATE_SHUTDOWN &&
|
|
|
|
mState != DECODER_STATE_COMPLETED &&
|
2013-06-10 12:22:05 +00:00
|
|
|
mState != DECODER_STATE_DORMANT &&
|
2012-12-18 08:49:58 +00:00
|
|
|
!mStopDecodeThread)
|
|
|
|
{
|
|
|
|
if (mState == DECODER_STATE_DECODING || mState == DECODER_STATE_BUFFERING) {
|
|
|
|
DecodeLoop();
|
|
|
|
} else if (mState == DECODER_STATE_SEEKING) {
|
|
|
|
DecodeSeek();
|
2013-06-10 12:22:05 +00:00
|
|
|
} else if (mState == DECODER_STATE_DECODING_METADATA) {
|
|
|
|
if (NS_FAILED(DecodeMetadata())) {
|
|
|
|
NS_ASSERTION(mState == DECODER_STATE_SHUTDOWN,
|
|
|
|
"Should be in shutdown state if metadata loading fails.");
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("Decode metadata failed, shutting down decode thread"));
|
2013-06-10 12:22:05 +00:00
|
|
|
}
|
|
|
|
} else if (mState == DECODER_STATE_WAIT_FOR_RESOURCES) {
|
|
|
|
mDecoder->GetReentrantMonitor().Wait();
|
|
|
|
|
|
|
|
if (!mReader->IsWaitingMediaResources()) {
|
|
|
|
// change state to DECODER_STATE_WAIT_FOR_RESOURCES
|
|
|
|
StartDecodeMetadata();
|
|
|
|
}
|
|
|
|
} else if (mState == DECODER_STATE_DORMANT) {
|
|
|
|
mDecoder->GetReentrantMonitor().Wait();
|
2012-12-18 08:49:58 +00:00
|
|
|
}
|
2011-07-12 03:39:25 +00:00
|
|
|
}
|
|
|
|
|
2012-12-18 08:49:58 +00:00
|
|
|
mDecodeThreadIdle = true;
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Decode thread finished", mDecoder.get()));
|
2012-12-18 08:49:58 +00:00
|
|
|
}
|
2013-01-24 12:38:32 +00:00
|
|
|
|
2012-12-18 08:49:58 +00:00
|
|
|
mReader->OnDecodeThreadFinish();
|
2011-07-12 03:39:25 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::SendStreamAudio(AudioData* aAudio,
|
2012-12-04 10:59:36 +00:00
|
|
|
DecodedStreamData* aStream,
|
|
|
|
AudioSegment* aOutput)
|
2012-04-30 03:12:42 +00:00
|
|
|
{
|
2013-01-30 04:20:03 +00:00
|
|
|
NS_ASSERTION(OnDecodeThread() ||
|
|
|
|
OnStateMachineThread(), "Should be on decode thread or state machine thread");
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-04-30 03:12:42 +00:00
|
|
|
|
|
|
|
if (aAudio->mTime <= aStream->mLastAudioPacketTime) {
|
|
|
|
// ignore packet that we've already processed
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
aStream->mLastAudioPacketTime = aAudio->mTime;
|
2013-10-25 02:44:58 +00:00
|
|
|
aStream->mLastAudioPacketEndTime = aAudio->GetEndTime();
|
2012-04-30 03:12:42 +00:00
|
|
|
|
2013-12-20 00:52:06 +00:00
|
|
|
// This logic has to mimic AudioSink closely to make sure we write
|
2012-04-30 03:12:42 +00:00
|
|
|
// the exact same silences
|
2013-09-27 05:22:38 +00:00
|
|
|
CheckedInt64 audioWrittenOffset = UsecsToFrames(mInfo.mAudio.mRate,
|
2012-07-31 12:17:22 +00:00
|
|
|
aStream->mInitialTime + mStartTime) + aStream->mAudioFramesWritten;
|
2013-09-27 05:22:38 +00:00
|
|
|
CheckedInt64 frameOffset = UsecsToFrames(mInfo.mAudio.mRate, aAudio->mTime);
|
2012-05-14 19:50:20 +00:00
|
|
|
if (!audioWrittenOffset.isValid() || !frameOffset.isValid())
|
2012-04-30 03:12:42 +00:00
|
|
|
return;
|
|
|
|
if (audioWrittenOffset.value() < frameOffset.value()) {
|
|
|
|
// Write silence to catch up
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Decoder writing %d frames of silence to MediaStream",
|
|
|
|
mDecoder.get(), int32_t(frameOffset.value() - audioWrittenOffset.value())));
|
2012-04-30 03:12:42 +00:00
|
|
|
AudioSegment silence;
|
|
|
|
silence.InsertNullDataAtStart(frameOffset.value() - audioWrittenOffset.value());
|
|
|
|
aStream->mAudioFramesWritten += silence.GetDuration();
|
|
|
|
aOutput->AppendFrom(&silence);
|
|
|
|
}
|
|
|
|
|
2012-08-22 15:56:38 +00:00
|
|
|
int64_t offset;
|
2012-04-30 03:12:42 +00:00
|
|
|
if (aStream->mAudioFramesWritten == 0) {
|
|
|
|
NS_ASSERTION(frameOffset.value() <= audioWrittenOffset.value(),
|
|
|
|
"Otherwise we'd have taken the write-silence path");
|
|
|
|
// We're starting in the middle of a packet. Split the packet.
|
|
|
|
offset = audioWrittenOffset.value() - frameOffset.value();
|
|
|
|
} else {
|
|
|
|
// Write the entire packet.
|
|
|
|
offset = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (offset >= aAudio->mFrames)
|
|
|
|
return;
|
|
|
|
|
|
|
|
aAudio->EnsureAudioBuffer();
|
|
|
|
nsRefPtr<SharedBuffer> buffer = aAudio->mAudioBuffer;
|
2012-11-22 05:04:27 +00:00
|
|
|
AudioDataValue* bufferData = static_cast<AudioDataValue*>(buffer->Data());
|
|
|
|
nsAutoTArray<const AudioDataValue*,2> channels;
|
|
|
|
for (uint32_t i = 0; i < aAudio->mChannels; ++i) {
|
|
|
|
channels.AppendElement(bufferData + i*aAudio->mFrames + offset);
|
|
|
|
}
|
|
|
|
aOutput->AppendFrames(buffer.forget(), channels, aAudio->mFrames);
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Decoder writing %d frames of data to MediaStream for AudioData at %lld",
|
|
|
|
mDecoder.get(), aAudio->mFrames - int32_t(offset), aAudio->mTime));
|
2012-08-22 15:56:38 +00:00
|
|
|
aStream->mAudioFramesWritten += aAudio->mFrames - int32_t(offset);
|
2012-04-30 03:12:42 +00:00
|
|
|
}
|
|
|
|
|
2012-12-19 04:48:32 +00:00
|
|
|
static void WriteVideoToMediaStream(layers::Image* aImage,
|
2012-08-22 15:56:38 +00:00
|
|
|
int64_t aDuration, const gfxIntSize& aIntrinsicSize,
|
2012-04-30 03:12:42 +00:00
|
|
|
VideoSegment* aOutput)
|
|
|
|
{
|
2012-12-19 04:48:32 +00:00
|
|
|
nsRefPtr<layers::Image> image = aImage;
|
2012-04-30 03:12:42 +00:00
|
|
|
aOutput->AppendFrame(image.forget(), aDuration, aIntrinsicSize);
|
|
|
|
}
|
|
|
|
|
|
|
|
static const TrackID TRACK_AUDIO = 1;
|
|
|
|
static const TrackID TRACK_VIDEO = 2;
|
|
|
|
static const TrackRate RATE_VIDEO = USECS_PER_S;
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::SendStreamData()
|
2012-04-30 03:12:42 +00:00
|
|
|
{
|
2012-12-04 10:59:36 +00:00
|
|
|
NS_ASSERTION(OnDecodeThread() ||
|
|
|
|
OnStateMachineThread(), "Should be on decode thread or state machine thread");
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-04-30 03:12:42 +00:00
|
|
|
|
2012-07-31 12:17:22 +00:00
|
|
|
DecodedStreamData* stream = mDecoder->GetDecodedStream();
|
|
|
|
if (!stream)
|
|
|
|
return;
|
|
|
|
|
2012-04-30 03:12:42 +00:00
|
|
|
if (mState == DECODER_STATE_DECODING_METADATA)
|
|
|
|
return;
|
|
|
|
|
2013-12-20 00:52:06 +00:00
|
|
|
// If there's still an audio sink alive, then we can't send any stream
|
|
|
|
// data yet since both SendStreamData and the audio sink want to be in
|
|
|
|
// charge of popping the audio queue. We're waiting for the audio sink
|
2012-12-04 10:59:36 +00:00
|
|
|
// to die before sending anything to our stream.
|
2013-12-20 00:52:06 +00:00
|
|
|
if (mAudioSink)
|
2012-12-04 10:59:36 +00:00
|
|
|
return;
|
|
|
|
|
2012-09-28 06:57:33 +00:00
|
|
|
int64_t minLastAudioPacketTime = INT64_MAX;
|
2013-11-22 11:33:24 +00:00
|
|
|
bool finished =
|
|
|
|
(!mInfo.HasAudio() || mReader->AudioQueue().IsFinished()) &&
|
|
|
|
(!mInfo.HasVideo() || mReader->VideoQueue().IsFinished());
|
|
|
|
if (mDecoder->IsSameOriginMedia()) {
|
|
|
|
SourceMediaStream* mediaStream = stream->mStream;
|
|
|
|
StreamTime endPosition = 0;
|
|
|
|
|
|
|
|
if (!stream->mStreamInitialized) {
|
|
|
|
if (mInfo.HasAudio()) {
|
|
|
|
AudioSegment* audio = new AudioSegment();
|
|
|
|
mediaStream->AddTrack(TRACK_AUDIO, mInfo.mAudio.mRate, 0, audio);
|
|
|
|
}
|
|
|
|
if (mInfo.HasVideo()) {
|
|
|
|
VideoSegment* video = new VideoSegment();
|
|
|
|
mediaStream->AddTrack(TRACK_VIDEO, RATE_VIDEO, 0, video);
|
|
|
|
}
|
|
|
|
stream->mStreamInitialized = true;
|
2012-07-31 12:17:22 +00:00
|
|
|
}
|
|
|
|
|
2013-11-22 11:33:24 +00:00
|
|
|
if (mInfo.HasAudio()) {
|
|
|
|
nsAutoTArray<AudioData*,10> audio;
|
|
|
|
// It's OK to hold references to the AudioData because while audio
|
|
|
|
// is captured, only the decoder thread pops from the queue (see below).
|
|
|
|
mReader->AudioQueue().GetElementsAfter(stream->mLastAudioPacketTime, &audio);
|
|
|
|
AudioSegment output;
|
|
|
|
for (uint32_t i = 0; i < audio.Length(); ++i) {
|
|
|
|
SendStreamAudio(audio[i], stream, &output);
|
|
|
|
}
|
|
|
|
if (output.GetDuration() > 0) {
|
|
|
|
mediaStream->AppendToTrack(TRACK_AUDIO, &output);
|
|
|
|
}
|
|
|
|
if (mReader->AudioQueue().IsFinished() && !stream->mHaveSentFinishAudio) {
|
|
|
|
mediaStream->EndTrack(TRACK_AUDIO);
|
|
|
|
stream->mHaveSentFinishAudio = true;
|
|
|
|
}
|
|
|
|
minLastAudioPacketTime = std::min(minLastAudioPacketTime, stream->mLastAudioPacketTime);
|
|
|
|
endPosition = std::max(endPosition,
|
|
|
|
TicksToTimeRoundDown(mInfo.mAudio.mRate, stream->mAudioFramesWritten));
|
2012-07-31 12:17:22 +00:00
|
|
|
}
|
|
|
|
|
2013-11-22 11:33:24 +00:00
|
|
|
if (mInfo.HasVideo()) {
|
|
|
|
nsAutoTArray<VideoData*,10> video;
|
|
|
|
// It's OK to hold references to the VideoData only the decoder thread
|
|
|
|
// pops from the queue.
|
|
|
|
mReader->VideoQueue().GetElementsAfter(stream->mNextVideoTime + mStartTime, &video);
|
|
|
|
VideoSegment output;
|
|
|
|
for (uint32_t i = 0; i < video.Length(); ++i) {
|
|
|
|
VideoData* v = video[i];
|
|
|
|
if (stream->mNextVideoTime + mStartTime < v->mTime) {
|
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Decoder writing last video to MediaStream %p for %lld ms",
|
|
|
|
mDecoder.get(), mediaStream,
|
|
|
|
v->mTime - (stream->mNextVideoTime + mStartTime)));
|
|
|
|
// Write last video frame to catch up. mLastVideoImage can be null here
|
|
|
|
// which is fine, it just means there's no video.
|
|
|
|
WriteVideoToMediaStream(stream->mLastVideoImage,
|
|
|
|
v->mTime - (stream->mNextVideoTime + mStartTime), stream->mLastVideoImageDisplaySize,
|
|
|
|
&output);
|
|
|
|
stream->mNextVideoTime = v->mTime - mStartTime;
|
|
|
|
}
|
|
|
|
if (stream->mNextVideoTime + mStartTime < v->GetEndTime()) {
|
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Decoder writing video frame %lld to MediaStream %p for %lld ms",
|
|
|
|
mDecoder.get(), v->mTime, mediaStream,
|
|
|
|
v->GetEndTime() - (stream->mNextVideoTime + mStartTime)));
|
|
|
|
WriteVideoToMediaStream(v->mImage,
|
|
|
|
v->GetEndTime() - (stream->mNextVideoTime + mStartTime), v->mDisplay,
|
|
|
|
&output);
|
|
|
|
stream->mNextVideoTime = v->GetEndTime() - mStartTime;
|
|
|
|
stream->mLastVideoImage = v->mImage;
|
|
|
|
stream->mLastVideoImageDisplaySize = v->mDisplay;
|
|
|
|
} else {
|
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Decoder skipping writing video frame %lld to MediaStream",
|
|
|
|
mDecoder.get(), v->mTime));
|
|
|
|
}
|
2012-04-30 03:12:42 +00:00
|
|
|
}
|
2013-11-22 11:33:24 +00:00
|
|
|
if (output.GetDuration() > 0) {
|
|
|
|
mediaStream->AppendToTrack(TRACK_VIDEO, &output);
|
2012-08-20 12:44:32 +00:00
|
|
|
}
|
2013-11-22 11:33:24 +00:00
|
|
|
if (mReader->VideoQueue().IsFinished() && !stream->mHaveSentFinishVideo) {
|
|
|
|
mediaStream->EndTrack(TRACK_VIDEO);
|
|
|
|
stream->mHaveSentFinishVideo = true;
|
|
|
|
}
|
|
|
|
endPosition = std::max(endPosition,
|
|
|
|
TicksToTimeRoundDown(RATE_VIDEO, stream->mNextVideoTime - stream->mInitialTime));
|
2012-04-30 03:12:42 +00:00
|
|
|
}
|
2012-07-31 12:17:22 +00:00
|
|
|
|
2013-11-22 11:33:24 +00:00
|
|
|
if (!stream->mHaveSentFinish) {
|
|
|
|
stream->mStream->AdvanceKnownTracksTime(endPosition);
|
|
|
|
}
|
2012-07-31 12:17:22 +00:00
|
|
|
|
2013-11-22 11:33:24 +00:00
|
|
|
if (finished && !stream->mHaveSentFinish) {
|
|
|
|
stream->mHaveSentFinish = true;
|
|
|
|
stream->mStream->Finish();
|
|
|
|
}
|
2012-04-30 03:12:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if (mAudioCaptured) {
|
|
|
|
// Discard audio packets that are no longer needed.
|
|
|
|
while (true) {
|
2012-09-17 20:45:38 +00:00
|
|
|
nsAutoPtr<AudioData> a(mReader->AudioQueue().PopFront());
|
2012-04-30 03:12:42 +00:00
|
|
|
if (!a)
|
|
|
|
break;
|
|
|
|
// Packet times are not 100% reliable so this may discard packets that
|
|
|
|
// actually contain data for mCurrentFrameTime. This means if someone might
|
|
|
|
// create a new output stream and we actually don't have the audio for the
|
|
|
|
// very start. That's OK, we'll play silence instead for a brief moment.
|
|
|
|
// That's OK. Seeking to this time would have a similar issue for such
|
|
|
|
// badly muxed resources.
|
2013-10-25 02:44:58 +00:00
|
|
|
if (a->GetEndTime() >= minLastAudioPacketTime) {
|
2012-09-17 20:45:38 +00:00
|
|
|
mReader->AudioQueue().PushFront(a.forget());
|
2012-04-30 03:12:42 +00:00
|
|
|
break;
|
|
|
|
}
|
2013-12-20 00:52:06 +00:00
|
|
|
OnAudioEndTimeUpdate(std::max(mAudioEndTime, a->GetEndTime()));
|
2012-04-30 03:12:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if (finished) {
|
|
|
|
mAudioCompleted = true;
|
|
|
|
UpdateReadyState();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
MediaDecoderStateMachine::WakeDecoderRunnable*
|
|
|
|
MediaDecoderStateMachine::GetWakeDecoderRunnable()
|
2012-04-30 03:13:42 +00:00
|
|
|
{
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-07-31 12:17:22 +00:00
|
|
|
|
|
|
|
if (!mPendingWakeDecoder.get()) {
|
|
|
|
mPendingWakeDecoder = new WakeDecoderRunnable(this);
|
2012-04-30 03:13:42 +00:00
|
|
|
}
|
2012-07-31 12:17:22 +00:00
|
|
|
return mPendingWakeDecoder.get();
|
2012-04-30 03:13:42 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
bool MediaDecoderStateMachine::HaveEnoughDecodedAudio(int64_t aAmpleAudioUSecs)
|
2012-04-30 03:12:42 +00:00
|
|
|
{
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-04-30 03:12:42 +00:00
|
|
|
|
2012-09-17 20:45:38 +00:00
|
|
|
if (mReader->AudioQueue().GetSize() == 0 ||
|
2012-04-30 03:12:42 +00:00
|
|
|
GetDecodedAudioDuration() < aAmpleAudioUSecs) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
if (!mAudioCaptured) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2012-07-31 12:17:22 +00:00
|
|
|
DecodedStreamData* stream = mDecoder->GetDecodedStream();
|
|
|
|
if (stream && stream->mStreamInitialized && !stream->mHaveSentFinishAudio) {
|
|
|
|
if (!stream->mStream->HaveEnoughBuffered(TRACK_AUDIO)) {
|
2012-04-30 03:12:42 +00:00
|
|
|
return false;
|
|
|
|
}
|
2012-07-31 12:17:22 +00:00
|
|
|
stream->mStream->DispatchWhenNotEnoughBuffered(TRACK_AUDIO,
|
|
|
|
GetStateMachineThread(), GetWakeDecoderRunnable());
|
2012-04-30 03:12:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
bool MediaDecoderStateMachine::HaveEnoughDecodedVideo()
|
2012-04-30 03:12:42 +00:00
|
|
|
{
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-04-30 03:12:42 +00:00
|
|
|
|
2012-11-28 02:34:53 +00:00
|
|
|
if (static_cast<uint32_t>(mReader->VideoQueue().GetSize()) < GetAmpleVideoFrames() * mPlaybackRate) {
|
2012-04-30 03:12:42 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2012-07-31 12:17:22 +00:00
|
|
|
DecodedStreamData* stream = mDecoder->GetDecodedStream();
|
|
|
|
if (stream && stream->mStreamInitialized && !stream->mHaveSentFinishVideo) {
|
|
|
|
if (!stream->mStream->HaveEnoughBuffered(TRACK_VIDEO)) {
|
2012-04-30 03:12:42 +00:00
|
|
|
return false;
|
|
|
|
}
|
2012-07-31 12:17:22 +00:00
|
|
|
stream->mStream->DispatchWhenNotEnoughBuffered(TRACK_VIDEO,
|
|
|
|
GetStateMachineThread(), GetWakeDecoderRunnable());
|
2012-04-30 03:12:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::DecodeLoop()
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Start DecodeLoop()", mDecoder.get()));
|
2011-07-12 03:39:37 +00:00
|
|
|
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-04-02 03:03:07 +00:00
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
|
|
|
|
2011-09-27 03:31:18 +00:00
|
|
|
// We want to "pump" the decode until we've got a few frames decoded
|
2010-04-02 03:03:07 +00:00
|
|
|
// before we consider whether decode is falling behind.
|
2011-09-29 06:19:26 +00:00
|
|
|
bool audioPump = true;
|
|
|
|
bool videoPump = true;
|
2010-04-02 03:03:07 +00:00
|
|
|
|
|
|
|
// If the video decode is falling behind the audio, we'll start dropping the
|
|
|
|
// inter-frames up until the next keyframe which is at or before the current
|
2011-09-29 23:34:37 +00:00
|
|
|
// playback position. skipToNextKeyframe is true if we're currently
|
2010-04-02 03:03:07 +00:00
|
|
|
// skipping up to the next keyframe.
|
2011-09-29 06:19:26 +00:00
|
|
|
bool skipToNextKeyframe = false;
|
2010-04-02 03:03:07 +00:00
|
|
|
|
|
|
|
// Once we've decoded more than videoPumpThreshold video frames, we'll
|
|
|
|
// no longer be considered to be "pumping video".
|
2012-11-28 02:34:53 +00:00
|
|
|
const unsigned videoPumpThreshold = mRealTime ? 0 : GetAmpleVideoFrames() / 2;
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2011-04-13 22:12:23 +00:00
|
|
|
// After the audio decode fills with more than audioPumpThreshold usecs
|
2010-04-02 03:03:07 +00:00
|
|
|
// of decoded audio, we'll start to check whether the audio or video decode
|
|
|
|
// is falling behind.
|
2011-09-27 00:25:41 +00:00
|
|
|
const unsigned audioPumpThreshold = mRealTime ? 0 : LOW_AUDIO_USECS * 2;
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2010-11-28 20:06:38 +00:00
|
|
|
// Our local low audio threshold. We may increase this if we're slow to
|
|
|
|
// decode video frames, in order to reduce the chance of audio underruns.
|
2012-08-22 15:56:38 +00:00
|
|
|
int64_t lowAudioThreshold = LOW_AUDIO_USECS;
|
2010-11-28 20:06:38 +00:00
|
|
|
|
|
|
|
// Our local ample audio threshold. If we increase lowAudioThreshold, we'll
|
2011-01-13 01:06:15 +00:00
|
|
|
// also increase this too appropriately (we don't want lowAudioThreshold to
|
2010-11-28 20:06:38 +00:00
|
|
|
// be greater than ampleAudioThreshold, else we'd stop decoding!).
|
2012-08-22 15:56:38 +00:00
|
|
|
int64_t ampleAudioThreshold = AMPLE_AUDIO_USECS;
|
2010-11-28 20:06:38 +00:00
|
|
|
|
2011-01-13 01:06:15 +00:00
|
|
|
// Main decode loop.
|
2011-09-29 06:19:26 +00:00
|
|
|
bool videoPlaying = HasVideo();
|
|
|
|
bool audioPlaying = HasAudio();
|
2011-07-12 03:39:25 +00:00
|
|
|
while ((mState == DECODER_STATE_DECODING || mState == DECODER_STATE_BUFFERING) &&
|
2011-07-12 03:39:10 +00:00
|
|
|
!mStopDecodeThread &&
|
2011-01-13 01:06:15 +00:00
|
|
|
(videoPlaying || audioPlaying))
|
|
|
|
{
|
2010-04-02 03:03:07 +00:00
|
|
|
// We don't want to consider skipping to the next keyframe if we've
|
|
|
|
// only just started up the decode loop, so wait until we've decoded
|
2011-01-13 01:06:15 +00:00
|
|
|
// some frames before enabling the keyframe skip logic on video.
|
2011-02-02 01:35:47 +00:00
|
|
|
if (videoPump &&
|
2012-09-17 20:45:38 +00:00
|
|
|
(static_cast<uint32_t>(mReader->VideoQueue().GetSize())
|
2012-11-22 10:38:28 +00:00
|
|
|
>= videoPumpThreshold * mPlaybackRate))
|
2011-02-02 01:35:47 +00:00
|
|
|
{
|
2011-09-29 23:34:37 +00:00
|
|
|
videoPump = false;
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2011-01-13 01:06:15 +00:00
|
|
|
// We don't want to consider skipping to the next keyframe if we've
|
|
|
|
// only just started up the decode loop, so wait until we've decoded
|
|
|
|
// some audio data before enabling the keyframe skip logic on audio.
|
2012-11-22 10:38:28 +00:00
|
|
|
if (audioPump && GetDecodedAudioDuration() >= audioPumpThreshold * mPlaybackRate) {
|
2011-09-29 23:34:37 +00:00
|
|
|
audioPump = false;
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
2011-01-13 01:06:15 +00:00
|
|
|
|
2010-11-28 20:06:38 +00:00
|
|
|
// We'll skip the video decode to the nearest keyframe if we're low on
|
|
|
|
// audio, or if we're low on video, provided we're not running low on
|
|
|
|
// data to decode. If we're running low on downloaded data to decode,
|
|
|
|
// we won't start keyframe skipping, as we'll be pausing playback to buffer
|
|
|
|
// soon anyway and we'll want to be able to display frames immediately
|
|
|
|
// after buffering finishes.
|
2011-03-23 22:28:57 +00:00
|
|
|
if (mState == DECODER_STATE_DECODING &&
|
|
|
|
!skipToNextKeyframe &&
|
2010-11-28 20:06:38 +00:00
|
|
|
videoPlaying &&
|
2012-11-22 10:38:28 +00:00
|
|
|
((!audioPump && audioPlaying && !mDidThrottleAudioDecoding &&
|
|
|
|
GetDecodedAudioDuration() < lowAudioThreshold * mPlaybackRate) ||
|
2012-04-30 03:12:42 +00:00
|
|
|
(!videoPump && videoPlaying && !mDidThrottleVideoDecoding &&
|
2012-09-17 20:45:38 +00:00
|
|
|
(static_cast<uint32_t>(mReader->VideoQueue().GetSize())
|
2012-11-22 10:38:28 +00:00
|
|
|
< LOW_VIDEO_FRAMES * mPlaybackRate))) &&
|
2011-03-23 22:28:57 +00:00
|
|
|
!HasLowUndecodedData())
|
2010-11-28 20:06:38 +00:00
|
|
|
{
|
2011-09-29 23:34:37 +00:00
|
|
|
skipToNextKeyframe = true;
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Skipping video decode to the next keyframe", mDecoder.get()));
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2010-11-28 20:06:38 +00:00
|
|
|
// Video decode.
|
2012-04-30 03:12:42 +00:00
|
|
|
bool throttleVideoDecoding = !videoPlaying || HaveEnoughDecodedVideo();
|
|
|
|
if (mDidThrottleVideoDecoding && !throttleVideoDecoding) {
|
|
|
|
videoPump = true;
|
|
|
|
}
|
|
|
|
mDidThrottleVideoDecoding = throttleVideoDecoding;
|
|
|
|
if (!throttleVideoDecoding)
|
2011-02-02 01:35:47 +00:00
|
|
|
{
|
2010-11-28 20:06:38 +00:00
|
|
|
// Time the video decode, so that if it's slow, we can increase our low
|
|
|
|
// audio threshold to reduce the chance of an audio underrun while we're
|
|
|
|
// waiting for a video decode to complete.
|
2011-01-13 01:06:15 +00:00
|
|
|
TimeDuration decodeTime;
|
|
|
|
{
|
2012-08-22 15:56:38 +00:00
|
|
|
int64_t currentTime = GetMediaTime();
|
2011-04-29 19:21:57 +00:00
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
2011-01-13 01:06:15 +00:00
|
|
|
TimeStamp start = TimeStamp::Now();
|
|
|
|
videoPlaying = mReader->DecodeVideoFrame(skipToNextKeyframe, currentTime);
|
|
|
|
decodeTime = TimeStamp::Now() - start;
|
2013-09-13 02:33:54 +00:00
|
|
|
if (!videoPlaying) {
|
|
|
|
// Playback ended for this stream, close the sample queue.
|
|
|
|
mReader->VideoQueue().Finish();
|
|
|
|
}
|
2011-01-13 01:06:15 +00:00
|
|
|
}
|
2011-04-13 22:12:23 +00:00
|
|
|
if (THRESHOLD_FACTOR * DurationToUsecs(decodeTime) > lowAudioThreshold &&
|
2011-03-23 22:28:57 +00:00
|
|
|
!HasLowUndecodedData())
|
2010-11-28 20:06:38 +00:00
|
|
|
{
|
|
|
|
lowAudioThreshold =
|
2013-01-15 12:22:03 +00:00
|
|
|
std::min(THRESHOLD_FACTOR * DurationToUsecs(decodeTime), AMPLE_AUDIO_USECS);
|
|
|
|
ampleAudioThreshold = std::max(THRESHOLD_FACTOR * lowAudioThreshold,
|
2010-11-28 20:06:38 +00:00
|
|
|
ampleAudioThreshold);
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG,
|
|
|
|
("Slow video decode, set lowAudioThreshold=%lld ampleAudioThreshold=%lld",
|
|
|
|
lowAudioThreshold, ampleAudioThreshold));
|
2010-11-28 20:06:38 +00:00
|
|
|
}
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2010-11-28 20:06:38 +00:00
|
|
|
// Audio decode.
|
2012-11-22 10:38:28 +00:00
|
|
|
bool throttleAudioDecoding = !audioPlaying || HaveEnoughDecodedAudio(ampleAudioThreshold * mPlaybackRate);
|
2012-04-30 03:12:42 +00:00
|
|
|
if (mDidThrottleAudioDecoding && !throttleAudioDecoding) {
|
|
|
|
audioPump = true;
|
|
|
|
}
|
|
|
|
mDidThrottleAudioDecoding = throttleAudioDecoding;
|
|
|
|
if (!mDidThrottleAudioDecoding) {
|
2011-04-29 19:21:57 +00:00
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
2010-05-06 02:31:02 +00:00
|
|
|
audioPlaying = mReader->DecodeAudioData();
|
2013-09-13 02:33:54 +00:00
|
|
|
if (!audioPlaying) {
|
|
|
|
// Playback ended for this stream, close the sample queue.
|
|
|
|
mReader->AudioQueue().Finish();
|
|
|
|
}
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
2011-09-27 00:25:41 +00:00
|
|
|
|
2012-07-31 12:17:22 +00:00
|
|
|
SendStreamData();
|
2012-04-30 03:12:42 +00:00
|
|
|
|
2013-12-20 00:52:06 +00:00
|
|
|
// Notify to ensure that the AudioSink is not waiting, in case it was
|
2011-01-13 01:06:15 +00:00
|
|
|
// waiting for more audio to be decoded.
|
2011-04-29 19:21:57 +00:00
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
2011-01-13 01:06:15 +00:00
|
|
|
|
2011-06-30 23:00:22 +00:00
|
|
|
// The ready state can change when we've decoded data, so update the
|
|
|
|
// ready state, so that DOM events can fire.
|
|
|
|
UpdateReadyState();
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2011-07-12 03:39:25 +00:00
|
|
|
if ((mState == DECODER_STATE_DECODING || mState == DECODER_STATE_BUFFERING) &&
|
2011-07-12 03:39:10 +00:00
|
|
|
!mStopDecodeThread &&
|
2011-06-30 23:00:22 +00:00
|
|
|
(videoPlaying || audioPlaying) &&
|
2012-04-30 03:12:42 +00:00
|
|
|
throttleAudioDecoding && throttleVideoDecoding)
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2011-01-13 01:06:15 +00:00
|
|
|
// All active bitstreams' decode is well ahead of the playback
|
|
|
|
// position, we may as well wait for the playback to catch up. Note the
|
|
|
|
// audio push thread acquires and notifies the decoder monitor every time
|
2011-08-16 05:19:51 +00:00
|
|
|
// it pops AudioData off the audio queue. So if the audio push thread pops
|
|
|
|
// the last AudioData off the audio queue right after that queue reported
|
2011-01-13 01:06:15 +00:00
|
|
|
// it was non-empty here, we'll receive a notification on the decoder
|
|
|
|
// monitor which will wake us up shortly after we sleep, thus preventing
|
|
|
|
// both the decode and audio push threads waiting at the same time.
|
|
|
|
// See bug 620326.
|
2011-09-29 23:34:37 +00:00
|
|
|
mDecodeThreadWaiting = true;
|
2012-11-14 19:46:40 +00:00
|
|
|
if (mDecoder->GetState() != MediaDecoder::PLAY_STATE_PLAYING) {
|
2011-07-12 03:39:37 +00:00
|
|
|
// We're not playing, and the decode is about to wait. This means
|
|
|
|
// the decode thread may not be needed in future. Signal the state
|
|
|
|
// machine thread to run, so it can decide whether to shutdown the
|
|
|
|
// decode thread.
|
|
|
|
ScheduleStateMachine();
|
|
|
|
}
|
2011-07-12 03:39:25 +00:00
|
|
|
mDecoder->GetReentrantMonitor().Wait();
|
2011-09-29 23:34:37 +00:00
|
|
|
mDecodeThreadWaiting = false;
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2011-01-13 01:06:15 +00:00
|
|
|
} // End decode loop.
|
|
|
|
|
2011-07-12 03:39:10 +00:00
|
|
|
if (!mStopDecodeThread &&
|
2011-01-13 01:06:15 +00:00
|
|
|
mState != DECODER_STATE_SHUTDOWN &&
|
2013-06-10 12:22:05 +00:00
|
|
|
mState != DECODER_STATE_DORMANT &&
|
2011-01-13 01:06:15 +00:00
|
|
|
mState != DECODER_STATE_SEEKING)
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2011-01-13 01:06:15 +00:00
|
|
|
mState = DECODER_STATE_COMPLETED;
|
2011-07-12 03:39:32 +00:00
|
|
|
ScheduleStateMachine();
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
2011-01-13 01:06:15 +00:00
|
|
|
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Exiting DecodeLoop", mDecoder.get()));
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
bool MediaDecoderStateMachine::IsPlaying()
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-04-02 03:03:07 +00:00
|
|
|
|
|
|
|
return !mPlayStartTime.IsNull();
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
nsresult MediaDecoderStateMachine::Init(MediaDecoderStateMachine* aCloneDonor)
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2012-11-14 19:46:40 +00:00
|
|
|
MediaDecoderReader* cloneReader = nullptr;
|
2010-09-21 00:49:50 +00:00
|
|
|
if (aCloneDonor) {
|
2012-11-14 19:46:40 +00:00
|
|
|
cloneReader = static_cast<MediaDecoderStateMachine*>(aCloneDonor)->mReader;
|
2010-09-21 00:49:50 +00:00
|
|
|
}
|
|
|
|
return mReader->Init(cloneReader);
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::StopPlayback()
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p StopPlayback()", mDecoder.get()));
|
2011-07-12 03:39:37 +00:00
|
|
|
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2012-12-13 19:42:45 +00:00
|
|
|
mDecoder->NotifyPlaybackStopped();
|
2011-01-18 00:53:18 +00:00
|
|
|
|
2010-04-02 03:03:07 +00:00
|
|
|
if (IsPlaying()) {
|
2011-04-13 22:12:23 +00:00
|
|
|
mPlayDuration += DurationToUsecs(TimeStamp::Now() - mPlayStartTime);
|
2013-12-20 00:52:06 +00:00
|
|
|
SetPlayStartTime(TimeStamp());
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
2013-12-20 00:52:06 +00:00
|
|
|
// Notify the audio sink, so that it notices that we've stopped playing,
|
2011-07-12 03:39:37 +00:00
|
|
|
// so it can pause audio playback.
|
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
2011-07-12 03:39:30 +00:00
|
|
|
NS_ASSERTION(!IsPlaying(), "Should report not playing at end of StopPlayback()");
|
2013-11-23 09:48:24 +00:00
|
|
|
mDecoder->UpdateStreamBlockingForStateMachinePlaying();
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2013-12-01 21:09:06 +00:00
|
|
|
void MediaDecoderStateMachine::SetSyncPointForMediaStream()
|
|
|
|
{
|
|
|
|
AssertCurrentThreadInMonitor();
|
|
|
|
|
|
|
|
DecodedStreamData* stream = mDecoder->GetDecodedStream();
|
|
|
|
if (!stream) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
mSyncPointInMediaStream = stream->GetLastOutputTime();
|
|
|
|
mSyncPointInDecodedStream = mStartTime + mPlayDuration;
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::StartPlayback()
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p StartPlayback()", mDecoder.get()));
|
2011-07-12 03:39:37 +00:00
|
|
|
|
2010-04-02 03:03:07 +00:00
|
|
|
NS_ASSERTION(!IsPlaying(), "Shouldn't be playing when StartPlayback() is called");
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2013-01-14 05:25:02 +00:00
|
|
|
|
2012-12-13 19:42:45 +00:00
|
|
|
mDecoder->NotifyPlaybackStarted();
|
2013-12-20 00:52:06 +00:00
|
|
|
SetPlayStartTime(TimeStamp::Now());
|
2011-07-12 03:39:37 +00:00
|
|
|
|
2011-07-12 03:39:30 +00:00
|
|
|
NS_ASSERTION(IsPlaying(), "Should report playing by end of StartPlayback()");
|
2011-07-12 03:39:37 +00:00
|
|
|
if (NS_FAILED(StartAudioThread())) {
|
2013-01-24 12:38:32 +00:00
|
|
|
NS_WARNING("Failed to create audio thread");
|
2011-07-12 03:39:37 +00:00
|
|
|
}
|
2011-04-29 19:21:57 +00:00
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
2013-11-23 09:48:24 +00:00
|
|
|
mDecoder->UpdateStreamBlockingForStateMachinePlaying();
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::UpdatePlaybackPositionInternal(int64_t aTime)
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2011-07-12 03:39:25 +00:00
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
2010-04-02 03:03:07 +00:00
|
|
|
"Should be on state machine thread.");
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-04-02 03:03:07 +00:00
|
|
|
|
|
|
|
NS_ASSERTION(mStartTime >= 0, "Should have positive mStartTime");
|
|
|
|
mCurrentFrameTime = aTime - mStartTime;
|
|
|
|
NS_ASSERTION(mCurrentFrameTime >= 0, "CurrentTime should be positive!");
|
|
|
|
if (aTime > mEndTime) {
|
|
|
|
NS_ASSERTION(mCurrentFrameTime > GetDuration(),
|
|
|
|
"CurrentTime must be after duration if aTime > endTime!");
|
|
|
|
mEndTime = aTime;
|
|
|
|
nsCOMPtr<nsIRunnable> event =
|
2012-11-14 19:46:40 +00:00
|
|
|
NS_NewRunnableMethod(mDecoder, &MediaDecoder::DurationChanged);
|
2010-04-02 03:03:07 +00:00
|
|
|
NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
|
|
|
|
}
|
2011-02-01 02:57:13 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::UpdatePlaybackPosition(int64_t aTime)
|
2011-02-01 02:57:13 +00:00
|
|
|
{
|
|
|
|
UpdatePlaybackPositionInternal(aTime);
|
|
|
|
|
2011-09-29 06:19:26 +00:00
|
|
|
bool fragmentEnded = mFragmentEndTime >= 0 && GetMediaTime() >= mFragmentEndTime;
|
2011-08-24 23:42:23 +00:00
|
|
|
if (!mPositionChangeQueued || fragmentEnded) {
|
2011-09-29 23:34:37 +00:00
|
|
|
mPositionChangeQueued = true;
|
2010-04-02 03:03:07 +00:00
|
|
|
nsCOMPtr<nsIRunnable> event =
|
2012-11-14 19:46:40 +00:00
|
|
|
NS_NewRunnableMethod(mDecoder, &MediaDecoder::PlaybackPositionChanged);
|
2010-04-02 03:03:07 +00:00
|
|
|
NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
|
|
|
|
}
|
2010-08-25 13:10:00 +00:00
|
|
|
|
|
|
|
// Notify DOM of any queued up audioavailable events
|
2013-12-20 00:52:06 +00:00
|
|
|
mEventManager->DispatchPendingEvents(GetMediaTime());
|
2011-08-24 23:42:23 +00:00
|
|
|
|
2012-11-30 13:17:54 +00:00
|
|
|
mMetadataManager.DispatchMetadataIfNeeded(mDecoder, aTime);
|
|
|
|
|
2011-08-24 23:42:23 +00:00
|
|
|
if (fragmentEnded) {
|
|
|
|
StopPlayback();
|
|
|
|
}
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::ClearPositionChangeFlag()
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2011-09-29 23:34:37 +00:00
|
|
|
mPositionChangeQueued = false;
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
MediaDecoderOwner::NextFrameStatus MediaDecoderStateMachine::GetNextFrameStatus()
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2011-04-29 19:21:57 +00:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2010-04-02 03:03:07 +00:00
|
|
|
if (IsBuffering() || IsSeeking()) {
|
2012-11-14 19:45:31 +00:00
|
|
|
return MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE_BUFFERING;
|
2010-04-02 03:03:07 +00:00
|
|
|
} else if (HaveNextFrameData()) {
|
2012-11-14 19:45:31 +00:00
|
|
|
return MediaDecoderOwner::NEXT_FRAME_AVAILABLE;
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
2012-11-14 19:45:31 +00:00
|
|
|
return MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE;
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::SetVolume(double volume)
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2011-04-29 19:21:57 +00:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2010-09-06 02:14:50 +00:00
|
|
|
mVolume = volume;
|
2013-12-20 00:52:06 +00:00
|
|
|
if (mAudioSink) {
|
|
|
|
mAudioSink->SetVolume(mVolume);
|
|
|
|
}
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::SetAudioCaptured(bool aCaptured)
|
2012-04-30 03:12:42 +00:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2012-12-04 10:59:36 +00:00
|
|
|
if (!mAudioCaptured && aCaptured && !mStopAudioThread) {
|
|
|
|
// Make sure the state machine runs as soon as possible. That will
|
2013-12-20 00:52:06 +00:00
|
|
|
// stop the audio sink.
|
|
|
|
// If mStopAudioThread is true then we're already stopping the audio sink
|
2012-12-04 10:59:36 +00:00
|
|
|
// and since we set mAudioCaptured to true, nothing can start it again.
|
|
|
|
ScheduleStateMachine();
|
2012-04-30 03:12:42 +00:00
|
|
|
}
|
|
|
|
mAudioCaptured = aCaptured;
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
double MediaDecoderStateMachine::GetCurrentTime() const
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2011-03-23 22:28:57 +00:00
|
|
|
NS_ASSERTION(NS_IsMainThread() ||
|
2011-07-12 03:39:23 +00:00
|
|
|
OnStateMachineThread() ||
|
2011-03-23 22:28:57 +00:00
|
|
|
OnDecodeThread(),
|
|
|
|
"Should be on main, decode, or state machine thread.");
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2011-04-13 22:12:23 +00:00
|
|
|
return static_cast<double>(mCurrentFrameTime) / static_cast<double>(USECS_PER_S);
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
int64_t MediaDecoderStateMachine::GetDuration()
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-04-02 03:03:07 +00:00
|
|
|
|
|
|
|
if (mEndTime == -1 || mStartTime == -1)
|
|
|
|
return -1;
|
|
|
|
return mEndTime - mStartTime;
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::SetDuration(int64_t aDuration)
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2011-07-12 03:39:23 +00:00
|
|
|
NS_ASSERTION(NS_IsMainThread() || OnDecodeThread(),
|
|
|
|
"Should be on main or decode thread.");
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2011-05-08 21:10:28 +00:00
|
|
|
if (aDuration == -1) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2010-04-02 03:03:07 +00:00
|
|
|
if (mStartTime != -1) {
|
|
|
|
mEndTime = mStartTime + aDuration;
|
|
|
|
} else {
|
|
|
|
mStartTime = 0;
|
|
|
|
mEndTime = aDuration;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-09-10 00:45:33 +00:00
|
|
|
void MediaDecoderStateMachine::UpdateEstimatedDuration(int64_t aDuration)
|
2013-05-03 07:48:37 +00:00
|
|
|
{
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2013-09-10 00:45:33 +00:00
|
|
|
int64_t duration = GetDuration();
|
|
|
|
if (aDuration != duration &&
|
|
|
|
abs(aDuration - duration) > ESTIMATED_DURATION_FUZZ_FACTOR_USECS) {
|
2013-05-03 07:48:37 +00:00
|
|
|
SetDuration(aDuration);
|
|
|
|
nsCOMPtr<nsIRunnable> event =
|
|
|
|
NS_NewRunnableMethod(mDecoder, &MediaDecoder::DurationChanged);
|
|
|
|
NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-11-19 15:11:21 +00:00
|
|
|
void MediaDecoderStateMachine::SetMediaEndTime(int64_t aEndTime)
|
2011-05-08 21:10:28 +00:00
|
|
|
{
|
2011-07-12 03:39:23 +00:00
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread");
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2011-05-08 21:10:28 +00:00
|
|
|
|
|
|
|
mEndTime = aEndTime;
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::SetFragmentEndTime(int64_t aEndTime)
|
2011-08-24 23:42:23 +00:00
|
|
|
{
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2011-08-24 23:42:23 +00:00
|
|
|
|
|
|
|
mFragmentEndTime = aEndTime < 0 ? aEndTime : aEndTime + mStartTime;
|
|
|
|
}
|
|
|
|
|
2012-11-30 13:17:54 +00:00
|
|
|
void MediaDecoderStateMachine::SetTransportSeekable(bool aTransportSeekable)
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2012-11-30 13:17:54 +00:00
|
|
|
NS_ASSERTION(NS_IsMainThread() || OnDecodeThread(),
|
|
|
|
"Should be on main thread or the decoder thread.");
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2012-11-30 13:17:54 +00:00
|
|
|
mTransportSeekable = aTransportSeekable;
|
|
|
|
}
|
|
|
|
|
|
|
|
void MediaDecoderStateMachine::SetMediaSeekable(bool aMediaSeekable)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread() || OnDecodeThread(),
|
|
|
|
"Should be on main thread or the decoder thread.");
|
|
|
|
|
|
|
|
mMediaSeekable = aMediaSeekable;
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2013-06-10 12:22:05 +00:00
|
|
|
bool MediaDecoderStateMachine::IsDormantNeeded()
|
|
|
|
{
|
|
|
|
return mReader->IsDormantNeeded();
|
|
|
|
}
|
|
|
|
|
|
|
|
void MediaDecoderStateMachine::SetDormant(bool aDormant)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2013-06-10 12:22:05 +00:00
|
|
|
|
|
|
|
if (!mReader) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (aDormant) {
|
|
|
|
ScheduleStateMachine();
|
|
|
|
mState = DECODER_STATE_DORMANT;
|
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
|
|
|
} else if ((aDormant != true) && (mState == DECODER_STATE_DORMANT)) {
|
|
|
|
ScheduleStateMachine();
|
|
|
|
mStartTime = 0;
|
|
|
|
mCurrentFrameTime = 0;
|
|
|
|
mState = DECODER_STATE_DECODING_METADATA;
|
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::Shutdown()
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
|
|
|
|
|
|
|
// Once we've entered the shutdown state here there's no going back.
|
2011-04-29 19:21:57 +00:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2010-04-02 03:03:07 +00:00
|
|
|
|
|
|
|
// Change state before issuing shutdown request to threads so those
|
|
|
|
// threads can start exiting cleanly during the Shutdown call.
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Changed state to SHUTDOWN", mDecoder.get()));
|
2011-07-12 03:39:32 +00:00
|
|
|
ScheduleStateMachine();
|
2010-04-02 03:03:07 +00:00
|
|
|
mState = DECODER_STATE_SHUTDOWN;
|
2013-12-20 00:52:06 +00:00
|
|
|
if (mAudioSink) {
|
|
|
|
mAudioSink->PrepareToShutdown();
|
|
|
|
}
|
2011-04-29 19:21:57 +00:00
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::StartDecoding()
|
2011-03-23 22:28:57 +00:00
|
|
|
{
|
2011-07-12 03:39:23 +00:00
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
|
|
|
"Should be on state machine or decode thread.");
|
2011-04-29 19:21:57 +00:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2011-03-23 22:28:57 +00:00
|
|
|
if (mState != DECODER_STATE_DECODING) {
|
|
|
|
mDecodeStartTime = TimeStamp::Now();
|
|
|
|
}
|
|
|
|
mState = DECODER_STATE_DECODING;
|
2011-07-12 03:39:32 +00:00
|
|
|
ScheduleStateMachine();
|
2011-03-23 22:28:57 +00:00
|
|
|
}
|
|
|
|
|
2013-06-10 12:22:05 +00:00
|
|
|
void MediaDecoderStateMachine::StartWaitForResources()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
|
|
|
"Should be on state machine or decode thread.");
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
mState = DECODER_STATE_WAIT_FOR_RESOURCES;
|
|
|
|
}
|
|
|
|
|
|
|
|
void MediaDecoderStateMachine::StartDecodeMetadata()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
|
|
|
"Should be on state machine or decode thread.");
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
mState = DECODER_STATE_DECODING_METADATA;
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::Play()
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2011-03-23 22:28:57 +00:00
|
|
|
// When asked to play, switch to decoding state only if
|
|
|
|
// we are currently buffering. In other cases, we'll start playing anyway
|
|
|
|
// when the state machine notices the decoder's state change to PLAYING.
|
2011-04-29 19:21:57 +00:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2010-04-02 03:03:07 +00:00
|
|
|
if (mState == DECODER_STATE_BUFFERING) {
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Changed state from BUFFERING to DECODING", mDecoder.get()));
|
2010-04-02 03:03:07 +00:00
|
|
|
mState = DECODER_STATE_DECODING;
|
2011-03-23 22:28:57 +00:00
|
|
|
mDecodeStartTime = TimeStamp::Now();
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
2011-07-12 03:39:32 +00:00
|
|
|
ScheduleStateMachine();
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::ResetPlayback()
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2011-07-12 03:39:25 +00:00
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
2010-05-31 04:02:00 +00:00
|
|
|
mVideoFrameEndTime = -1;
|
2010-04-02 03:03:07 +00:00
|
|
|
mAudioStartTime = -1;
|
|
|
|
mAudioEndTime = -1;
|
2011-09-29 23:34:37 +00:00
|
|
|
mAudioCompleted = false;
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::NotifyDataArrived(const char* aBuffer,
|
2012-08-22 15:56:38 +00:00
|
|
|
uint32_t aLength,
|
|
|
|
int64_t aOffset)
|
2012-01-06 06:40:51 +00:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
|
|
|
|
mReader->NotifyDataArrived(aBuffer, aLength, aOffset);
|
|
|
|
|
|
|
|
// While playing an unseekable stream of unknown duration, mEndTime is
|
|
|
|
// updated (in AdvanceFrame()) as we play. But if data is being downloaded
|
|
|
|
// faster than played, mEndTime won't reflect the end of playable data
|
|
|
|
// since we haven't played the frame at the end of buffered data. So update
|
|
|
|
// mEndTime here as new data is downloaded to prevent such a lag.
|
2013-11-18 04:22:47 +00:00
|
|
|
dom::TimeRanges buffered;
|
2012-01-06 06:40:51 +00:00
|
|
|
if (mDecoder->IsInfinite() &&
|
|
|
|
NS_SUCCEEDED(mDecoder->GetBuffered(&buffered)))
|
|
|
|
{
|
2012-08-22 15:56:38 +00:00
|
|
|
uint32_t length = 0;
|
2012-01-06 06:40:51 +00:00
|
|
|
buffered.GetLength(&length);
|
|
|
|
if (length) {
|
|
|
|
double end = 0;
|
|
|
|
buffered.End(length - 1, &end);
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2013-01-15 12:22:03 +00:00
|
|
|
mEndTime = std::max<int64_t>(mEndTime, end * USECS_PER_S);
|
2012-01-06 06:40:51 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::Seek(double aTime)
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2011-04-29 19:21:57 +00:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2012-11-30 13:17:54 +00:00
|
|
|
|
|
|
|
// We need to be able to seek both at a transport level and at a media level
|
|
|
|
// to seek.
|
|
|
|
if (!mMediaSeekable) {
|
|
|
|
return;
|
|
|
|
}
|
2012-11-14 19:46:40 +00:00
|
|
|
// MediaDecoder::mPlayState should be SEEKING while we seek, and
|
|
|
|
// in that case MediaDecoder shouldn't be calling us.
|
2010-04-02 03:03:07 +00:00
|
|
|
NS_ASSERTION(mState != DECODER_STATE_SEEKING,
|
|
|
|
"We shouldn't already be seeking");
|
|
|
|
NS_ASSERTION(mState >= DECODER_STATE_DECODING,
|
|
|
|
"We should have loaded metadata");
|
2011-04-13 22:12:23 +00:00
|
|
|
double t = aTime * static_cast<double>(USECS_PER_S);
|
2012-01-11 08:23:07 +00:00
|
|
|
if (t > INT64_MAX) {
|
2010-04-02 03:03:07 +00:00
|
|
|
// Prevent integer overflow.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2012-08-22 15:56:38 +00:00
|
|
|
mSeekTime = static_cast<int64_t>(t) + mStartTime;
|
2010-04-02 03:03:07 +00:00
|
|
|
NS_ASSERTION(mSeekTime >= mStartTime && mSeekTime <= mEndTime,
|
|
|
|
"Can only seek in range [0,duration]");
|
|
|
|
|
|
|
|
// Bound the seek time to be inside the media range.
|
|
|
|
NS_ASSERTION(mStartTime != -1, "Should know start time by now");
|
|
|
|
NS_ASSERTION(mEndTime != -1, "Should know end time by now");
|
2013-01-15 12:22:03 +00:00
|
|
|
mSeekTime = std::min(mSeekTime, mEndTime);
|
|
|
|
mSeekTime = std::max(mStartTime, mSeekTime);
|
2013-02-28 15:05:50 +00:00
|
|
|
mBasePosition = mSeekTime - mStartTime;
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Changed state to SEEKING (to %f)", mDecoder.get(), aTime));
|
2010-04-02 03:03:07 +00:00
|
|
|
mState = DECODER_STATE_SEEKING;
|
2012-09-19 05:23:35 +00:00
|
|
|
if (mDecoder->GetDecodedStream()) {
|
|
|
|
mDecoder->RecreateDecodedStream(mSeekTime - mStartTime);
|
|
|
|
}
|
2011-07-12 03:39:32 +00:00
|
|
|
ScheduleStateMachine();
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::StopDecodeThread()
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2011-07-12 03:39:34 +00:00
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-01-19 18:30:29 +00:00
|
|
|
if (mRequestedNewDecodeThread) {
|
|
|
|
// We've requested that the decode be created, but it hasn't been yet.
|
|
|
|
// Cancel that request.
|
|
|
|
NS_ASSERTION(!mDecodeThread,
|
|
|
|
"Shouldn't have a decode thread until after request processed");
|
|
|
|
StateMachineTracker::Instance().CancelCreateDecodeThread(this);
|
|
|
|
mRequestedNewDecodeThread = false;
|
|
|
|
}
|
2011-09-29 23:34:37 +00:00
|
|
|
mStopDecodeThread = true;
|
2011-04-29 19:21:57 +00:00
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
2010-04-02 03:03:07 +00:00
|
|
|
if (mDecodeThread) {
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Shutdown decode thread", mDecoder.get()));
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2011-04-29 19:21:57 +00:00
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
2010-04-02 03:03:07 +00:00
|
|
|
mDecodeThread->Shutdown();
|
2011-11-08 01:38:17 +00:00
|
|
|
StateMachineTracker::Instance().NoteDecodeThreadDestroyed();
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
2012-07-30 14:20:58 +00:00
|
|
|
mDecodeThread = nullptr;
|
2011-09-29 23:34:37 +00:00
|
|
|
mDecodeThreadIdle = false;
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
2012-01-19 18:30:29 +00:00
|
|
|
NS_ASSERTION(!mRequestedNewDecodeThread,
|
|
|
|
"Any pending requests for decode threads must be canceled and unflagged");
|
|
|
|
NS_ASSERTION(!StateMachineTracker::Instance().IsQueued(this),
|
|
|
|
"Any pending requests for decode threads must be canceled");
|
2011-07-12 03:39:10 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::StopAudioThread()
|
2011-07-12 03:39:10 +00:00
|
|
|
{
|
2012-12-04 10:59:36 +00:00
|
|
|
NS_ASSERTION(OnDecodeThread() ||
|
|
|
|
OnStateMachineThread(), "Should be on decode thread or state machine thread");
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-12-04 10:59:36 +00:00
|
|
|
|
|
|
|
if (mStopAudioThread) {
|
|
|
|
// Nothing to do, since the thread is already stopping
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2011-09-29 23:34:37 +00:00
|
|
|
mStopAudioThread = true;
|
2011-07-12 03:39:10 +00:00
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
2013-12-20 00:52:06 +00:00
|
|
|
if (mAudioSink) {
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Shutdown audio thread", mDecoder.get()));
|
2013-12-20 00:52:06 +00:00
|
|
|
mAudioSink->PrepareToShutdown();
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2011-04-29 19:21:57 +00:00
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
2013-12-20 00:52:06 +00:00
|
|
|
mAudioSink->Shutdown();
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
2013-12-20 00:52:06 +00:00
|
|
|
mAudioSink = nullptr;
|
|
|
|
// Now that the audio sink is dead, try sending data to our MediaStream(s).
|
|
|
|
// That may have been waiting for the audio sink to stop.
|
2012-12-04 10:59:36 +00:00
|
|
|
SendStreamData();
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
nsresult
|
2012-11-14 19:46:40 +00:00
|
|
|
MediaDecoderStateMachine::ScheduleDecodeThread()
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2011-07-12 03:39:34 +00:00
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2013-01-24 12:38:32 +00:00
|
|
|
|
2012-01-19 18:30:29 +00:00
|
|
|
mStopDecodeThread = false;
|
|
|
|
if (mState >= DECODER_STATE_COMPLETED) {
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
if (mDecodeThread) {
|
|
|
|
NS_ASSERTION(!mRequestedNewDecodeThread,
|
|
|
|
"Shouldn't have requested new decode thread when we have a decode thread");
|
|
|
|
// We already have a decode thread...
|
|
|
|
if (mDecodeThreadIdle) {
|
|
|
|
// ... and it's not been shutdown yet, wake it up.
|
|
|
|
nsCOMPtr<nsIRunnable> event =
|
2012-11-14 19:46:40 +00:00
|
|
|
NS_NewRunnableMethod(this, &MediaDecoderStateMachine::DecodeThreadRun);
|
2012-01-19 18:30:29 +00:00
|
|
|
mDecodeThread->Dispatch(event, NS_DISPATCH_NORMAL);
|
|
|
|
mDecodeThreadIdle = false;
|
|
|
|
}
|
|
|
|
return NS_OK;
|
|
|
|
} else if (!mRequestedNewDecodeThread) {
|
|
|
|
// We don't already have a decode thread, request a new one.
|
|
|
|
mRequestedNewDecodeThread = true;
|
2011-11-08 01:38:17 +00:00
|
|
|
ReentrantMonitorAutoExit mon(mDecoder->GetReentrantMonitor());
|
2012-01-19 18:30:29 +00:00
|
|
|
StateMachineTracker::Instance().RequestCreateDecodeThread(this);
|
2011-11-08 01:38:17 +00:00
|
|
|
}
|
2012-01-19 18:30:29 +00:00
|
|
|
return NS_OK;
|
|
|
|
}
|
2011-11-08 01:38:17 +00:00
|
|
|
|
2012-01-19 18:30:29 +00:00
|
|
|
nsresult
|
2012-11-14 19:46:40 +00:00
|
|
|
MediaDecoderStateMachine::StartDecodeThread()
|
2012-01-19 18:30:29 +00:00
|
|
|
{
|
|
|
|
NS_ASSERTION(StateMachineTracker::Instance().GetDecodeThreadCount() <
|
|
|
|
StateMachineTracker::MAX_DECODE_THREADS,
|
|
|
|
"Should not have reached decode thread limit");
|
2012-01-18 20:15:57 +00:00
|
|
|
|
2012-01-19 18:30:29 +00:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
NS_ASSERTION(!StateMachineTracker::Instance().IsQueued(this),
|
|
|
|
"Should not already have a pending request for a new decode thread.");
|
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
|
|
|
|
NS_ASSERTION(!mDecodeThread, "Should not have decode thread yet");
|
|
|
|
NS_ASSERTION(mRequestedNewDecodeThread, "Should have requested this...");
|
2012-01-18 20:15:57 +00:00
|
|
|
|
2012-01-19 18:30:29 +00:00
|
|
|
mRequestedNewDecodeThread = false;
|
|
|
|
|
2012-06-12 17:06:20 +00:00
|
|
|
nsresult rv = NS_NewNamedThread("Media Decode",
|
|
|
|
getter_AddRefs(mDecodeThread),
|
2012-07-30 14:20:58 +00:00
|
|
|
nullptr,
|
2012-06-12 17:06:20 +00:00
|
|
|
MEDIA_THREAD_STACK_SIZE);
|
2012-01-19 18:30:29 +00:00
|
|
|
if (NS_FAILED(rv)) {
|
|
|
|
// Give up, report error to media element.
|
|
|
|
nsCOMPtr<nsIRunnable> event =
|
2012-11-14 19:46:40 +00:00
|
|
|
NS_NewRunnableMethod(mDecoder, &MediaDecoder::DecodeError);
|
2012-01-19 18:30:29 +00:00
|
|
|
NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
|
|
|
|
return rv;
|
2012-01-18 22:56:54 +00:00
|
|
|
}
|
2012-01-19 18:30:29 +00:00
|
|
|
|
2011-07-12 03:39:25 +00:00
|
|
|
nsCOMPtr<nsIRunnable> event =
|
2012-11-14 19:46:40 +00:00
|
|
|
NS_NewRunnableMethod(this, &MediaDecoderStateMachine::DecodeThreadRun);
|
2011-07-12 03:39:25 +00:00
|
|
|
mDecodeThread->Dispatch(event, NS_DISPATCH_NORMAL);
|
2011-09-29 23:34:37 +00:00
|
|
|
mDecodeThreadIdle = false;
|
2011-11-08 01:38:17 +00:00
|
|
|
|
2011-07-12 03:39:10 +00:00
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
nsresult
|
2012-11-14 19:46:40 +00:00
|
|
|
MediaDecoderStateMachine::StartAudioThread()
|
2011-07-12 03:39:10 +00:00
|
|
|
{
|
2011-07-12 03:39:32 +00:00
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
|
|
|
"Should be on state machine or decode thread.");
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-12-04 10:59:36 +00:00
|
|
|
if (mAudioCaptured) {
|
|
|
|
NS_ASSERTION(mStopAudioThread, "mStopAudioThread must always be true if audio is captured");
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2011-09-29 23:34:37 +00:00
|
|
|
mStopAudioThread = false;
|
2013-12-20 00:52:06 +00:00
|
|
|
if (HasAudio() && !mAudioSink) {
|
|
|
|
mAudioCompleted = false;
|
|
|
|
mAudioSink = new AudioSink(this, mEventManager,
|
|
|
|
mAudioStartTime, mInfo.mAudio, mDecoder->GetAudioChannelType());
|
|
|
|
nsresult rv = mAudioSink->Init();
|
2010-04-02 03:03:07 +00:00
|
|
|
if (NS_FAILED(rv)) {
|
2013-12-20 00:52:06 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Changed state to SHUTDOWN because failed to create audio sink", mDecoder.get()));
|
2010-04-02 03:03:07 +00:00
|
|
|
mState = DECODER_STATE_SHUTDOWN;
|
|
|
|
return rv;
|
|
|
|
}
|
2012-06-12 17:06:20 +00:00
|
|
|
|
2013-12-20 00:52:06 +00:00
|
|
|
mAudioSink->SetVolume(mVolume);
|
|
|
|
mAudioSink->SetPlaybackRate(mPlaybackRate);
|
|
|
|
mAudioSink->SetPreservesPitch(mPreservesPitch);
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
int64_t MediaDecoderStateMachine::AudioDecodedUsecs() const
|
2010-09-14 23:24:47 +00:00
|
|
|
{
|
|
|
|
NS_ASSERTION(HasAudio(),
|
2011-04-13 22:12:23 +00:00
|
|
|
"Should only call AudioDecodedUsecs() when we have audio");
|
2010-09-14 23:24:47 +00:00
|
|
|
// The amount of audio we have decoded is the amount of audio data we've
|
|
|
|
// already decoded and pushed to the hardware, plus the amount of audio
|
|
|
|
// data waiting to be pushed to the hardware.
|
2012-08-22 15:56:38 +00:00
|
|
|
int64_t pushed = (mAudioEndTime != -1) ? (mAudioEndTime - GetMediaTime()) : 0;
|
2012-09-17 20:45:38 +00:00
|
|
|
return pushed + mReader->AudioQueue().Duration();
|
2010-09-14 23:24:47 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
bool MediaDecoderStateMachine::HasLowDecodedData(int64_t aAudioUsecs) const
|
2011-01-18 00:53:18 +00:00
|
|
|
{
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2011-03-23 22:28:57 +00:00
|
|
|
// We consider ourselves low on decoded data if we're low on audio,
|
|
|
|
// provided we've not decoded to the end of the audio stream, or
|
2013-11-19 14:01:14 +00:00
|
|
|
// if we're low on video frames, provided
|
2011-03-23 22:28:57 +00:00
|
|
|
// we've not decoded to the end of the video stream.
|
|
|
|
return ((HasAudio() &&
|
2012-09-17 20:45:38 +00:00
|
|
|
!mReader->AudioQueue().IsFinished() &&
|
2011-04-13 22:12:23 +00:00
|
|
|
AudioDecodedUsecs() < aAudioUsecs)
|
2011-03-23 22:28:57 +00:00
|
|
|
||
|
2013-11-19 14:01:14 +00:00
|
|
|
(HasVideo() &&
|
2012-09-17 20:45:38 +00:00
|
|
|
!mReader->VideoQueue().IsFinished() &&
|
|
|
|
static_cast<uint32_t>(mReader->VideoQueue().GetSize()) < LOW_VIDEO_FRAMES));
|
2011-01-18 00:53:18 +00:00
|
|
|
}
|
2010-09-14 23:24:47 +00:00
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
bool MediaDecoderStateMachine::HasLowUndecodedData() const
|
2011-03-23 22:28:57 +00:00
|
|
|
{
|
2013-11-19 14:01:14 +00:00
|
|
|
return HasLowUndecodedData(mLowDataThresholdUsecs);
|
2011-03-23 22:28:57 +00:00
|
|
|
}
|
|
|
|
|
2013-11-19 14:01:14 +00:00
|
|
|
bool MediaDecoderStateMachine::HasLowUndecodedData(double aUsecs) const
|
2011-03-23 22:28:57 +00:00
|
|
|
{
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2011-03-23 22:28:57 +00:00
|
|
|
NS_ASSERTION(mState > DECODER_STATE_DECODING_METADATA,
|
|
|
|
"Must have loaded metadata for GetBuffered() to work");
|
|
|
|
|
2013-11-19 14:01:14 +00:00
|
|
|
bool reliable;
|
|
|
|
double bytesPerSecond = mDecoder->ComputePlaybackRate(&reliable);
|
|
|
|
if (!reliable) {
|
|
|
|
// Default to assuming we have enough
|
|
|
|
return false;
|
|
|
|
}
|
2011-03-23 22:28:57 +00:00
|
|
|
|
2013-11-19 14:01:14 +00:00
|
|
|
MediaResource* stream = mDecoder->GetResource();
|
|
|
|
int64_t currentPos = stream->Tell();
|
|
|
|
int64_t requiredPos = currentPos + int64_t((aUsecs/1000000.0)*bytesPerSecond);
|
|
|
|
int64_t length = stream->GetLength();
|
|
|
|
if (length >= 0) {
|
|
|
|
requiredPos = std::min(requiredPos, length);
|
2011-03-23 22:28:57 +00:00
|
|
|
}
|
2013-11-19 14:01:14 +00:00
|
|
|
|
|
|
|
return stream->GetCachedDataEnd(currentPos) < requiredPos;
|
2011-03-23 22:28:57 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::SetFrameBufferLength(uint32_t aLength)
|
2011-04-11 21:15:45 +00:00
|
|
|
{
|
|
|
|
NS_ASSERTION(aLength >= 512 && aLength <= 16384,
|
|
|
|
"The length must be between 512 and 16384");
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2013-12-20 00:52:06 +00:00
|
|
|
mEventManager->SetSignalBufferLength(aLength);
|
2011-04-11 21:15:45 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
nsresult MediaDecoderStateMachine::DecodeMetadata()
|
2011-07-12 03:39:23 +00:00
|
|
|
{
|
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2011-07-12 03:39:25 +00:00
|
|
|
NS_ASSERTION(mState == DECODER_STATE_DECODING_METADATA,
|
|
|
|
"Only call when in metadata decoding state");
|
2011-07-12 03:39:23 +00:00
|
|
|
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Decoding Media Headers", mDecoder.get()));
|
2011-07-12 03:39:23 +00:00
|
|
|
nsresult res;
|
2013-09-27 05:22:38 +00:00
|
|
|
MediaInfo info;
|
2012-11-09 00:40:08 +00:00
|
|
|
MetadataTags* tags;
|
2011-07-12 03:39:23 +00:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
2012-07-31 00:14:29 +00:00
|
|
|
res = mReader->ReadMetadata(&info, &tags);
|
2011-07-12 03:39:23 +00:00
|
|
|
}
|
2013-06-10 12:22:05 +00:00
|
|
|
if (NS_SUCCEEDED(res) && (mState == DECODER_STATE_DECODING_METADATA) && (mReader->IsWaitingMediaResources())) {
|
|
|
|
// change state to DECODER_STATE_WAIT_FOR_RESOURCES
|
|
|
|
StartWaitForResources();
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2011-07-12 03:39:23 +00:00
|
|
|
mInfo = info;
|
|
|
|
|
2013-09-27 05:22:38 +00:00
|
|
|
if (NS_FAILED(res) || (!info.HasValidMedia())) {
|
2011-07-12 03:39:34 +00:00
|
|
|
// Dispatch the event to call DecodeError synchronously. This ensures
|
|
|
|
// we're in shutdown state by the time we exit the decode thread.
|
|
|
|
// If we just moved to shutdown state here on the decode thread, we may
|
|
|
|
// cause the state machine to shutdown/free memory without closing its
|
|
|
|
// media stream properly, and we'll get callbacks from the media stream
|
|
|
|
// causing a crash. Note the state machine shutdown joins this decode
|
|
|
|
// thread during shutdown (and other state machines can run on the state
|
|
|
|
// machine thread while the join is waiting), so it's safe to do this
|
|
|
|
// synchronously.
|
2011-07-12 03:39:23 +00:00
|
|
|
nsCOMPtr<nsIRunnable> event =
|
2012-11-14 19:46:40 +00:00
|
|
|
NS_NewRunnableMethod(mDecoder, &MediaDecoder::DecodeError);
|
2011-07-12 03:39:34 +00:00
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
NS_DispatchToMainThread(event, NS_DISPATCH_SYNC);
|
2011-07-12 03:39:23 +00:00
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
mDecoder->StartProgressUpdates();
|
|
|
|
mGotDurationFromMetaData = (GetDuration() != -1);
|
|
|
|
|
|
|
|
VideoData* videoData = FindStartTime();
|
|
|
|
if (videoData) {
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
RenderVideoFrame(videoData, TimeStamp::Now());
|
|
|
|
}
|
|
|
|
|
|
|
|
if (mState == DECODER_STATE_SHUTDOWN) {
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
|
|
|
NS_ASSERTION(mStartTime != -1, "Must have start time");
|
2012-11-30 13:17:54 +00:00
|
|
|
MOZ_ASSERT((!HasVideo() && !HasAudio()) ||
|
|
|
|
!(mMediaSeekable && mTransportSeekable) || mEndTime != -1,
|
|
|
|
"Active seekable media should have end time");
|
|
|
|
MOZ_ASSERT(!(mMediaSeekable && mTransportSeekable) ||
|
|
|
|
GetDuration() != -1, "Seekable media should have duration");
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Media goes from %lld to %lld (duration %lld)"
|
|
|
|
" transportSeekable=%d, mediaSeekable=%d",
|
|
|
|
mDecoder.get(), mStartTime, mEndTime, GetDuration(),
|
|
|
|
mTransportSeekable, mMediaSeekable));
|
2011-07-12 03:39:23 +00:00
|
|
|
|
|
|
|
// Inform the element that we've loaded the metadata and the first frame,
|
|
|
|
// setting the default framebuffer size for audioavailable events. Also,
|
|
|
|
// if there is audio, let the MozAudioAvailable event manager know about
|
|
|
|
// the metadata.
|
|
|
|
if (HasAudio()) {
|
2013-12-20 00:52:06 +00:00
|
|
|
mEventManager->Init(mInfo.mAudio.mChannels, mInfo.mAudio.mRate);
|
2011-07-12 03:39:23 +00:00
|
|
|
// Set the buffer length at the decoder level to be able, to be able
|
|
|
|
// to retrive the value via media element method. The RequestFrameBufferLength
|
2012-11-14 19:46:40 +00:00
|
|
|
// will call the MediaDecoderStateMachine::SetFrameBufferLength().
|
2013-09-27 05:22:38 +00:00
|
|
|
uint32_t frameBufferLength = mInfo.mAudio.mChannels * FRAMEBUFFER_LENGTH_PER_CHANNEL;
|
2011-07-12 03:39:23 +00:00
|
|
|
mDecoder->RequestFrameBufferLength(frameBufferLength);
|
|
|
|
}
|
2012-11-30 13:17:54 +00:00
|
|
|
|
2011-07-12 03:39:23 +00:00
|
|
|
nsCOMPtr<nsIRunnable> metadataLoadedEvent =
|
2012-11-30 13:17:54 +00:00
|
|
|
new AudioMetadataEventRunner(mDecoder,
|
2013-09-27 05:22:38 +00:00
|
|
|
mInfo.mAudio.mChannels,
|
|
|
|
mInfo.mAudio.mRate,
|
2012-11-30 13:17:54 +00:00
|
|
|
HasAudio(),
|
2012-12-27 15:21:30 +00:00
|
|
|
HasVideo(),
|
2012-11-30 13:17:54 +00:00
|
|
|
tags);
|
2011-07-12 03:39:23 +00:00
|
|
|
NS_DispatchToMainThread(metadataLoadedEvent, NS_DISPATCH_NORMAL);
|
|
|
|
|
|
|
|
if (mState == DECODER_STATE_DECODING_METADATA) {
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Changed state from DECODING_METADATA to DECODING", mDecoder.get()));
|
2011-07-12 03:39:23 +00:00
|
|
|
StartDecoding();
|
|
|
|
}
|
|
|
|
|
2011-07-12 03:39:32 +00:00
|
|
|
if ((mState == DECODER_STATE_DECODING || mState == DECODER_STATE_COMPLETED) &&
|
2012-11-14 19:46:40 +00:00
|
|
|
mDecoder->GetState() == MediaDecoder::PLAY_STATE_PLAYING &&
|
2011-07-12 03:39:32 +00:00
|
|
|
!IsPlaying())
|
|
|
|
{
|
|
|
|
StartPlayback();
|
|
|
|
}
|
|
|
|
|
2011-07-12 03:39:23 +00:00
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::DecodeSeek()
|
2011-07-12 03:39:25 +00:00
|
|
|
{
|
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2011-07-12 03:39:25 +00:00
|
|
|
NS_ASSERTION(mState == DECODER_STATE_SEEKING,
|
|
|
|
"Only call when in seeking state");
|
|
|
|
|
2012-04-30 03:12:42 +00:00
|
|
|
mDidThrottleAudioDecoding = false;
|
|
|
|
mDidThrottleVideoDecoding = false;
|
|
|
|
|
2011-07-12 03:39:25 +00:00
|
|
|
// During the seek, don't have a lock on the decoder state,
|
|
|
|
// otherwise long seek operations can block the main thread.
|
|
|
|
// The events dispatched to the main thread are SYNC calls.
|
|
|
|
// These calls are made outside of the decode monitor lock so
|
|
|
|
// it is safe for the main thread to makes calls that acquire
|
|
|
|
// the lock since it won't deadlock. We check the state when
|
|
|
|
// acquiring the lock again in case shutdown has occurred
|
|
|
|
// during the time when we didn't have the lock.
|
2012-08-22 15:56:38 +00:00
|
|
|
int64_t seekTime = mSeekTime;
|
2011-07-12 03:39:25 +00:00
|
|
|
mDecoder->StopProgressUpdates();
|
|
|
|
|
2011-09-29 06:19:26 +00:00
|
|
|
bool currentTimeChanged = false;
|
2012-08-22 15:56:38 +00:00
|
|
|
int64_t mediaTime = GetMediaTime();
|
2011-07-12 03:39:25 +00:00
|
|
|
if (mediaTime != seekTime) {
|
|
|
|
currentTimeChanged = true;
|
2012-05-22 09:56:02 +00:00
|
|
|
// Stop playback now to ensure that while we're outside the monitor
|
|
|
|
// dispatching SeekingStarted, playback doesn't advance and mess with
|
|
|
|
// mCurrentFrameTime that we've setting to seekTime here.
|
|
|
|
StopPlayback();
|
2011-07-12 03:39:25 +00:00
|
|
|
UpdatePlaybackPositionInternal(seekTime);
|
|
|
|
}
|
|
|
|
|
|
|
|
// SeekingStarted will do a UpdateReadyStateForData which will
|
|
|
|
// inform the element and its users that we have no frames
|
|
|
|
// to display
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
nsCOMPtr<nsIRunnable> startEvent =
|
2012-11-14 19:46:40 +00:00
|
|
|
NS_NewRunnableMethod(mDecoder, &MediaDecoder::SeekingStarted);
|
2011-07-12 03:39:25 +00:00
|
|
|
NS_DispatchToMainThread(startEvent, NS_DISPATCH_SYNC);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (currentTimeChanged) {
|
|
|
|
// The seek target is different than the current playback position,
|
|
|
|
// we'll need to seek the playback position, so shutdown our decode
|
2013-12-20 00:52:06 +00:00
|
|
|
// thread and audio sink.
|
2011-07-12 03:39:25 +00:00
|
|
|
StopAudioThread();
|
|
|
|
ResetPlayback();
|
|
|
|
nsresult res;
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
// Now perform the seek. We must not hold the state machine monitor
|
2011-07-12 03:39:30 +00:00
|
|
|
// while we seek, since the seek reads, which could block on I/O.
|
2011-07-12 03:39:25 +00:00
|
|
|
res = mReader->Seek(seekTime,
|
|
|
|
mStartTime,
|
|
|
|
mEndTime,
|
|
|
|
mediaTime);
|
|
|
|
}
|
2011-07-12 03:39:30 +00:00
|
|
|
if (NS_SUCCEEDED(res)) {
|
2012-09-17 20:45:38 +00:00
|
|
|
AudioData* audio = HasAudio() ? mReader->AudioQueue().PeekFront() : nullptr;
|
2011-07-12 03:39:25 +00:00
|
|
|
NS_ASSERTION(!audio || (audio->mTime <= seekTime &&
|
|
|
|
seekTime <= audio->mTime + audio->mDuration),
|
|
|
|
"Seek target should lie inside the first audio block after seek");
|
2012-08-22 15:56:38 +00:00
|
|
|
int64_t startTime = (audio && audio->mTime < seekTime) ? audio->mTime : seekTime;
|
2011-07-12 03:39:25 +00:00
|
|
|
mAudioStartTime = startTime;
|
|
|
|
mPlayDuration = startTime - mStartTime;
|
|
|
|
if (HasVideo()) {
|
2012-09-17 20:45:38 +00:00
|
|
|
VideoData* video = mReader->VideoQueue().PeekFront();
|
2011-07-12 03:39:25 +00:00
|
|
|
if (video) {
|
2013-10-25 02:44:58 +00:00
|
|
|
NS_ASSERTION((video->mTime <= seekTime && seekTime <= video->GetEndTime()) ||
|
2013-04-12 04:50:04 +00:00
|
|
|
mReader->VideoQueue().IsFinished(),
|
|
|
|
"Seek target should lie inside the first frame after seek, unless it's the last frame.");
|
2011-07-12 03:39:25 +00:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
RenderVideoFrame(video, TimeStamp::Now());
|
|
|
|
}
|
|
|
|
nsCOMPtr<nsIRunnable> event =
|
2012-11-14 19:46:40 +00:00
|
|
|
NS_NewRunnableMethod(mDecoder, &MediaDecoder::Invalidate);
|
2011-07-12 03:39:25 +00:00
|
|
|
NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
mDecoder->StartProgressUpdates();
|
2013-07-10 13:58:04 +00:00
|
|
|
if (mState == DECODER_STATE_DECODING_METADATA ||
|
|
|
|
mState == DECODER_STATE_DORMANT ||
|
|
|
|
mState == DECODER_STATE_SHUTDOWN) {
|
2011-07-12 03:39:25 +00:00
|
|
|
return;
|
2013-07-10 13:58:04 +00:00
|
|
|
}
|
2011-07-12 03:39:25 +00:00
|
|
|
|
|
|
|
// Try to decode another frame to detect if we're at the end...
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Seek completed, mCurrentFrameTime=%lld\n",
|
|
|
|
mDecoder.get(), mCurrentFrameTime));
|
2011-07-12 03:39:25 +00:00
|
|
|
|
|
|
|
// Change state to DECODING or COMPLETED now. SeekingStopped will
|
2012-11-14 19:46:40 +00:00
|
|
|
// call MediaDecoderStateMachine::Seek to reset our state to SEEKING
|
2011-07-12 03:39:25 +00:00
|
|
|
// if we need to seek again.
|
2011-07-12 03:39:34 +00:00
|
|
|
|
2011-07-12 03:39:25 +00:00
|
|
|
nsCOMPtr<nsIRunnable> stopEvent;
|
2012-02-15 04:35:01 +00:00
|
|
|
bool isLiveStream = mDecoder->GetResource()->GetLength() == -1;
|
2012-01-06 06:40:51 +00:00
|
|
|
if (GetMediaTime() == mEndTime && !isLiveStream) {
|
|
|
|
// Seeked to end of media, move to COMPLETED state. Note we don't do
|
|
|
|
// this if we're playing a live stream, since the end of media will advance
|
|
|
|
// once we download more data!
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Changed state from SEEKING (to %lld) to COMPLETED",
|
|
|
|
mDecoder.get(), seekTime));
|
2012-11-14 19:46:40 +00:00
|
|
|
stopEvent = NS_NewRunnableMethod(mDecoder, &MediaDecoder::SeekingStoppedAtEnd);
|
2011-07-12 03:39:25 +00:00
|
|
|
mState = DECODER_STATE_COMPLETED;
|
|
|
|
} else {
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Changed state from SEEKING (to %lld) to DECODING",
|
|
|
|
mDecoder.get(), seekTime));
|
2012-11-14 19:46:40 +00:00
|
|
|
stopEvent = NS_NewRunnableMethod(mDecoder, &MediaDecoder::SeekingStopped);
|
2011-07-12 03:39:25 +00:00
|
|
|
StartDecoding();
|
|
|
|
}
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
NS_DispatchToMainThread(stopEvent, NS_DISPATCH_SYNC);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Reset quick buffering status. This ensures that if we began the
|
|
|
|
// seek while quick-buffering, we won't bypass quick buffering mode
|
|
|
|
// if we need to buffer after the seek.
|
2011-09-29 23:34:37 +00:00
|
|
|
mQuickBuffering = false;
|
2011-07-12 03:39:32 +00:00
|
|
|
|
|
|
|
ScheduleStateMachine();
|
2011-07-12 03:39:25 +00:00
|
|
|
}
|
|
|
|
|
2011-07-12 03:39:34 +00:00
|
|
|
// Runnable to dispose of the decoder and state machine on the main thread.
|
|
|
|
class nsDecoderDisposeEvent : public nsRunnable {
|
|
|
|
public:
|
2012-11-14 19:46:40 +00:00
|
|
|
nsDecoderDisposeEvent(already_AddRefed<MediaDecoder> aDecoder,
|
|
|
|
already_AddRefed<MediaDecoderStateMachine> aStateMachine)
|
2011-09-21 07:01:00 +00:00
|
|
|
: mDecoder(aDecoder), mStateMachine(aStateMachine) {}
|
2011-07-12 03:39:34 +00:00
|
|
|
NS_IMETHOD Run() {
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Must be on main thread.");
|
2011-09-21 07:01:00 +00:00
|
|
|
mStateMachine->ReleaseDecoder();
|
|
|
|
mDecoder->ReleaseStateMachine();
|
2012-07-30 14:20:58 +00:00
|
|
|
mStateMachine = nullptr;
|
|
|
|
mDecoder = nullptr;
|
2011-07-12 03:39:34 +00:00
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
private:
|
2012-11-14 19:46:40 +00:00
|
|
|
nsRefPtr<MediaDecoder> mDecoder;
|
|
|
|
nsCOMPtr<MediaDecoderStateMachine> mStateMachine;
|
2011-07-12 03:39:34 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
// Runnable which dispatches an event to the main thread to dispose of the
|
|
|
|
// decoder and state machine. This runs on the state machine thread after
|
|
|
|
// the state machine has shutdown, and all events for that state machine have
|
|
|
|
// finished running.
|
|
|
|
class nsDispatchDisposeEvent : public nsRunnable {
|
|
|
|
public:
|
2012-11-14 19:46:40 +00:00
|
|
|
nsDispatchDisposeEvent(MediaDecoder* aDecoder,
|
|
|
|
MediaDecoderStateMachine* aStateMachine)
|
2011-09-21 07:01:00 +00:00
|
|
|
: mDecoder(aDecoder), mStateMachine(aStateMachine) {}
|
2011-07-12 03:39:34 +00:00
|
|
|
NS_IMETHOD Run() {
|
2011-09-21 07:01:00 +00:00
|
|
|
NS_DispatchToMainThread(new nsDecoderDisposeEvent(mDecoder.forget(),
|
|
|
|
mStateMachine.forget()));
|
2011-07-12 03:39:34 +00:00
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
private:
|
2012-11-14 19:46:40 +00:00
|
|
|
nsRefPtr<MediaDecoder> mDecoder;
|
|
|
|
nsCOMPtr<MediaDecoderStateMachine> mStateMachine;
|
2011-07-12 03:39:34 +00:00
|
|
|
};
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
nsresult MediaDecoderStateMachine::RunStateMachine()
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2011-07-12 03:39:32 +00:00
|
|
|
|
2012-02-15 04:35:01 +00:00
|
|
|
MediaResource* resource = mDecoder->GetResource();
|
|
|
|
NS_ENSURE_TRUE(resource, NS_ERROR_NULL_POINTER);
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2011-07-12 03:39:32 +00:00
|
|
|
switch (mState) {
|
|
|
|
case DECODER_STATE_SHUTDOWN: {
|
2010-04-02 03:03:07 +00:00
|
|
|
if (IsPlaying()) {
|
2011-07-12 03:39:30 +00:00
|
|
|
StopPlayback();
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
2011-07-12 03:39:10 +00:00
|
|
|
StopAudioThread();
|
2013-12-20 00:52:06 +00:00
|
|
|
// If mAudioSink is non-null after StopAudioThread completes, we are
|
2013-01-14 05:25:02 +00:00
|
|
|
// running in a nested event loop waiting for Shutdown() on
|
2013-12-20 00:52:06 +00:00
|
|
|
// mAudioSink to complete. Return to the event loop and let it
|
2013-01-14 05:25:02 +00:00
|
|
|
// finish processing before continuing with shutdown.
|
2013-12-20 00:52:06 +00:00
|
|
|
if (mAudioSink) {
|
2013-01-14 05:25:02 +00:00
|
|
|
MOZ_ASSERT(mStopAudioThread);
|
|
|
|
return NS_OK;
|
|
|
|
}
|
2011-07-12 03:39:10 +00:00
|
|
|
StopDecodeThread();
|
2012-07-31 12:17:22 +00:00
|
|
|
// Now that those threads are stopped, there's no possibility of
|
|
|
|
// mPendingWakeDecoder being needed again. Revoke it.
|
|
|
|
mPendingWakeDecoder = nullptr;
|
2013-06-10 12:22:05 +00:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
mReader->ReleaseMediaResources();
|
|
|
|
}
|
2010-04-02 03:03:07 +00:00
|
|
|
NS_ASSERTION(mState == DECODER_STATE_SHUTDOWN,
|
2011-09-21 07:01:00 +00:00
|
|
|
"How did we escape from the shutdown state?");
|
2011-07-12 03:39:34 +00:00
|
|
|
// We must daisy-chain these events to destroy the decoder. We must
|
|
|
|
// destroy the decoder on the main thread, but we can't destroy the
|
|
|
|
// decoder while this thread holds the decoder monitor. We can't
|
|
|
|
// dispatch an event to the main thread to destroy the decoder from
|
|
|
|
// here, as the event may run before the dispatch returns, and we
|
|
|
|
// hold the decoder monitor here. We also want to guarantee that the
|
|
|
|
// state machine is destroyed on the main thread, and so the
|
|
|
|
// event runner running this function (which holds a reference to the
|
|
|
|
// state machine) needs to finish and be released in order to allow
|
|
|
|
// that. So we dispatch an event to run after this event runner has
|
|
|
|
// finished and released its monitor/references. That event then will
|
|
|
|
// dispatch an event to the main thread to release the decoder and
|
|
|
|
// state machine.
|
2011-09-21 07:01:00 +00:00
|
|
|
NS_DispatchToCurrentThread(new nsDispatchDisposeEvent(mDecoder, this));
|
2010-04-02 03:03:07 +00:00
|
|
|
return NS_OK;
|
2011-07-12 03:39:32 +00:00
|
|
|
}
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2013-06-10 12:22:05 +00:00
|
|
|
case DECODER_STATE_DORMANT: {
|
|
|
|
if (IsPlaying()) {
|
|
|
|
StopPlayback();
|
|
|
|
}
|
|
|
|
StopAudioThread();
|
|
|
|
StopDecodeThread();
|
|
|
|
// Now that those threads are stopped, there's no possibility of
|
|
|
|
// mPendingWakeDecoder being needed again. Revoke it.
|
|
|
|
mPendingWakeDecoder = nullptr;
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
mReader->ReleaseMediaResources();
|
|
|
|
}
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
case DECODER_STATE_WAIT_FOR_RESOURCES: {
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2011-07-12 03:39:32 +00:00
|
|
|
case DECODER_STATE_DECODING_METADATA: {
|
|
|
|
// Ensure we have a decode thread to decode metadata.
|
2012-01-19 18:30:29 +00:00
|
|
|
return ScheduleDecodeThread();
|
2011-07-12 03:39:32 +00:00
|
|
|
}
|
2013-01-24 12:38:32 +00:00
|
|
|
|
2011-07-12 03:39:32 +00:00
|
|
|
case DECODER_STATE_DECODING: {
|
2012-11-14 19:46:40 +00:00
|
|
|
if (mDecoder->GetState() != MediaDecoder::PLAY_STATE_PLAYING &&
|
2011-07-12 03:39:37 +00:00
|
|
|
IsPlaying())
|
|
|
|
{
|
|
|
|
// We're playing, but the element/decoder is in paused state. Stop
|
|
|
|
// playing! Note we do this before StopDecodeThread() below because
|
|
|
|
// that blocks this state machine's execution, and can cause a
|
|
|
|
// perceptible delay between the pause command, and playback actually
|
|
|
|
// pausing.
|
|
|
|
StopPlayback();
|
|
|
|
}
|
|
|
|
|
2013-01-24 19:28:48 +00:00
|
|
|
if (mDecoder->GetState() == MediaDecoder::PLAY_STATE_PLAYING &&
|
|
|
|
!IsPlaying()) {
|
|
|
|
// We are playing, but the state machine does not know it yet. Tell it
|
|
|
|
// that it is, so that the clock can be properly queried.
|
|
|
|
StartPlayback();
|
|
|
|
}
|
|
|
|
|
2011-07-12 03:39:37 +00:00
|
|
|
if (IsPausedAndDecoderWaiting()) {
|
|
|
|
// The decode buffers are full, and playback is paused. Shutdown the
|
|
|
|
// decode thread.
|
|
|
|
StopDecodeThread();
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
// We're playing and/or our decode buffers aren't full. Ensure we have
|
|
|
|
// an active decode thread.
|
2012-01-19 18:30:29 +00:00
|
|
|
if (NS_FAILED(ScheduleDecodeThread())) {
|
2011-07-12 03:39:37 +00:00
|
|
|
NS_WARNING("Failed to start media decode thread!");
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
2011-07-12 03:39:32 +00:00
|
|
|
AdvanceFrame();
|
2012-11-14 19:46:40 +00:00
|
|
|
NS_ASSERTION(mDecoder->GetState() != MediaDecoder::PLAY_STATE_PLAYING ||
|
2012-11-22 10:38:28 +00:00
|
|
|
IsStateMachineScheduled() ||
|
|
|
|
mPlaybackRate == 0.0, "Must have timer scheduled");
|
2011-07-12 03:39:32 +00:00
|
|
|
return NS_OK;
|
|
|
|
}
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2011-07-12 03:39:32 +00:00
|
|
|
case DECODER_STATE_BUFFERING: {
|
2011-07-12 03:39:37 +00:00
|
|
|
if (IsPausedAndDecoderWaiting()) {
|
|
|
|
// The decode buffers are full, and playback is paused. Shutdown the
|
|
|
|
// decode thread.
|
|
|
|
StopDecodeThread();
|
|
|
|
return NS_OK;
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2011-07-12 03:39:32 +00:00
|
|
|
TimeStamp now = TimeStamp::Now();
|
|
|
|
NS_ASSERTION(!mBufferingStart.IsNull(), "Must know buffering start time.");
|
|
|
|
|
|
|
|
// We will remain in the buffering state if we've not decoded enough
|
|
|
|
// data to begin playback, or if we've not downloaded a reasonable
|
|
|
|
// amount of data inside our buffering time.
|
|
|
|
TimeDuration elapsed = now - mBufferingStart;
|
2013-09-04 03:08:10 +00:00
|
|
|
bool isLiveStream = resource->GetLength() == -1;
|
2011-07-12 03:39:32 +00:00
|
|
|
if ((isLiveStream || !mDecoder->CanPlayThrough()) &&
|
2012-11-22 10:38:28 +00:00
|
|
|
elapsed < TimeDuration::FromSeconds(mBufferingWait * mPlaybackRate) &&
|
2011-07-12 03:39:32 +00:00
|
|
|
(mQuickBuffering ? HasLowDecodedData(QUICK_BUFFERING_LOW_DATA_USECS)
|
2013-11-19 14:01:14 +00:00
|
|
|
: HasLowUndecodedData(mBufferingWait * USECS_PER_S)) &&
|
2013-09-04 03:08:10 +00:00
|
|
|
!mDecoder->IsDataCachedToEndOfResource() &&
|
2012-02-15 04:35:01 +00:00
|
|
|
!resource->IsSuspended())
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG,
|
|
|
|
("%p Buffering: wait %ds, timeout in %.3lfs %s",
|
|
|
|
mDecoder.get(),
|
|
|
|
mBufferingWait,
|
|
|
|
mBufferingWait - elapsed.ToSeconds(),
|
|
|
|
(mQuickBuffering ? "(quick exit)" : "")));
|
2011-07-12 03:39:32 +00:00
|
|
|
ScheduleStateMachine(USECS_PER_S);
|
|
|
|
return NS_OK;
|
|
|
|
} else {
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Changed state from BUFFERING to DECODING", mDecoder.get()));
|
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Buffered for %.3lfs",
|
|
|
|
mDecoder.get(),
|
|
|
|
(now - mBufferingStart).ToSeconds()));
|
2011-07-12 03:39:32 +00:00
|
|
|
StartDecoding();
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2011-07-12 03:39:32 +00:00
|
|
|
// Notify to allow blocked decoder thread to continue
|
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
|
|
|
UpdateReadyState();
|
2012-11-14 19:46:40 +00:00
|
|
|
if (mDecoder->GetState() == MediaDecoder::PLAY_STATE_PLAYING &&
|
2011-07-12 03:39:32 +00:00
|
|
|
!IsPlaying())
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2011-07-12 03:39:32 +00:00
|
|
|
StartPlayback();
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
2011-07-12 03:39:32 +00:00
|
|
|
NS_ASSERTION(IsStateMachineScheduled(), "Must have timer scheduled");
|
|
|
|
return NS_OK;
|
|
|
|
}
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2011-07-12 03:39:32 +00:00
|
|
|
case DECODER_STATE_SEEKING: {
|
|
|
|
// Ensure we have a decode thread to perform the seek.
|
2012-01-19 18:30:29 +00:00
|
|
|
return ScheduleDecodeThread();
|
2011-07-12 03:39:32 +00:00
|
|
|
}
|
2011-01-18 00:53:18 +00:00
|
|
|
|
2011-07-12 03:39:32 +00:00
|
|
|
case DECODER_STATE_COMPLETED: {
|
|
|
|
StopDecodeThread();
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2011-07-12 03:39:34 +00:00
|
|
|
if (mState != DECODER_STATE_COMPLETED) {
|
|
|
|
// While we're waiting for the decode thread to shutdown, we can
|
|
|
|
// change state, for example to seeking or shutdown state.
|
|
|
|
// Whatever changed our state should have scheduled another state
|
|
|
|
// machine run.
|
|
|
|
NS_ASSERTION(IsStateMachineScheduled(), "Must have timer scheduled");
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2011-07-12 03:39:32 +00:00
|
|
|
// Play the remaining media. We want to run AdvanceFrame() at least
|
|
|
|
// once to ensure the current playback position is advanced to the
|
|
|
|
// end of the media, and so that we update the readyState.
|
|
|
|
if (mState == DECODER_STATE_COMPLETED &&
|
2012-09-17 20:45:38 +00:00
|
|
|
(mReader->VideoQueue().GetSize() > 0 ||
|
2013-12-01 21:09:06 +00:00
|
|
|
(HasAudio() && !mAudioCompleted) ||
|
|
|
|
(mDecoder->GetDecodedStream() && !mDecoder->GetDecodedStream()->IsFinished())))
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2011-07-12 03:39:32 +00:00
|
|
|
AdvanceFrame();
|
2012-11-14 19:46:40 +00:00
|
|
|
NS_ASSERTION(mDecoder->GetState() != MediaDecoder::PLAY_STATE_PLAYING ||
|
2012-11-22 10:38:28 +00:00
|
|
|
mPlaybackRate == 0 ||
|
2011-07-12 03:39:34 +00:00
|
|
|
IsStateMachineScheduled(),
|
|
|
|
"Must have timer scheduled");
|
2011-07-12 03:39:32 +00:00
|
|
|
return NS_OK;
|
|
|
|
}
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2011-07-12 03:39:32 +00:00
|
|
|
// StopPlayback in order to reset the IsPlaying() state so audio
|
|
|
|
// is restarted correctly.
|
|
|
|
StopPlayback();
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2011-07-12 03:39:32 +00:00
|
|
|
if (mState != DECODER_STATE_COMPLETED) {
|
2011-07-12 03:39:34 +00:00
|
|
|
// While we're presenting a frame we can change state. Whatever changed
|
|
|
|
// our state should have scheduled another state machine run.
|
2011-07-12 03:39:32 +00:00
|
|
|
NS_ASSERTION(IsStateMachineScheduled(), "Must have timer scheduled");
|
|
|
|
return NS_OK;
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
2013-01-14 05:25:02 +00:00
|
|
|
|
2011-07-12 03:39:32 +00:00
|
|
|
StopAudioThread();
|
2012-11-14 19:46:40 +00:00
|
|
|
if (mDecoder->GetState() == MediaDecoder::PLAY_STATE_PLAYING) {
|
2012-08-22 15:56:38 +00:00
|
|
|
int64_t videoTime = HasVideo() ? mVideoFrameEndTime : 0;
|
2013-12-20 00:52:06 +00:00
|
|
|
int64_t clockTime = std::max(mEndTime, videoTime);
|
2011-07-12 03:39:32 +00:00
|
|
|
UpdatePlaybackPosition(clockTime);
|
|
|
|
nsCOMPtr<nsIRunnable> event =
|
2012-11-14 19:46:40 +00:00
|
|
|
NS_NewRunnableMethod(mDecoder, &MediaDecoder::PlaybackEnded);
|
2011-07-12 03:39:32 +00:00
|
|
|
NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
|
|
|
|
}
|
|
|
|
return NS_OK;
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::RenderVideoFrame(VideoData* aData,
|
2011-06-23 22:08:54 +00:00
|
|
|
TimeStamp aTarget)
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2011-07-12 03:39:23 +00:00
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
|
|
|
"Should be on state machine or decode thread.");
|
2011-04-29 19:21:57 +00:00
|
|
|
mDecoder->GetReentrantMonitor().AssertNotCurrentThreadIn();
|
2010-04-02 03:03:07 +00:00
|
|
|
|
|
|
|
if (aData->mDuplicate) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2012-11-30 13:17:54 +00:00
|
|
|
if (!PR_GetEnv("MOZ_QUIET")) {
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Decoder playing video frame %lld",
|
|
|
|
mDecoder.get(), aData->mTime));
|
2012-11-30 13:17:54 +00:00
|
|
|
}
|
2012-04-30 03:12:42 +00:00
|
|
|
|
2012-02-15 04:35:01 +00:00
|
|
|
VideoFrameContainer* container = mDecoder->GetVideoFrameContainer();
|
|
|
|
if (container) {
|
|
|
|
container->SetCurrentFrame(aData->mDisplay, aData->mImage, aTarget);
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-08-22 15:56:38 +00:00
|
|
|
int64_t
|
2012-11-14 19:46:40 +00:00
|
|
|
MediaDecoderStateMachine::GetAudioClock()
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2011-07-12 03:39:34 +00:00
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
|
2011-07-12 03:40:38 +00:00
|
|
|
// We must hold the decoder monitor while using the audio stream off the
|
2013-12-20 00:52:06 +00:00
|
|
|
// audio sink to ensure that it doesn't get destroyed on the audio sink
|
2011-07-12 03:40:38 +00:00
|
|
|
// while we're using it.
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-11-20 02:22:42 +00:00
|
|
|
if (!HasAudio() || mAudioCaptured)
|
|
|
|
return -1;
|
2013-12-20 00:52:06 +00:00
|
|
|
if (!mAudioSink) {
|
|
|
|
// Audio sink hasn't played any data yet.
|
2011-07-12 03:39:37 +00:00
|
|
|
return mAudioStartTime;
|
|
|
|
}
|
2013-12-20 00:52:06 +00:00
|
|
|
int64_t t = mAudioSink->GetPosition();
|
2010-04-02 03:03:07 +00:00
|
|
|
return (t == -1) ? -1 : t + mAudioStartTime;
|
|
|
|
}
|
|
|
|
|
2012-11-22 10:38:28 +00:00
|
|
|
int64_t MediaDecoderStateMachine::GetVideoStreamPosition()
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2012-11-22 10:38:28 +00:00
|
|
|
if (!IsPlaying()) {
|
|
|
|
return mPlayDuration + mStartTime;
|
|
|
|
}
|
|
|
|
|
|
|
|
// The playbackRate has been just been changed, reset the playstartTime.
|
|
|
|
if (mResetPlayStartTime) {
|
2013-12-20 00:52:06 +00:00
|
|
|
SetPlayStartTime(TimeStamp::Now());
|
2012-11-22 10:38:28 +00:00
|
|
|
mResetPlayStartTime = false;
|
2011-07-12 03:39:32 +00:00
|
|
|
}
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2012-11-22 10:38:28 +00:00
|
|
|
int64_t pos = DurationToUsecs(TimeStamp::Now() - mPlayStartTime) + mPlayDuration;
|
|
|
|
pos -= mBasePosition;
|
2013-01-10 11:26:18 +00:00
|
|
|
NS_ASSERTION(pos >= 0, "Video stream position should be positive.");
|
|
|
|
return mBasePosition + pos * mPlaybackRate + mStartTime;
|
2012-11-22 10:38:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
int64_t MediaDecoderStateMachine::GetClock() {
|
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-11-22 10:38:28 +00:00
|
|
|
|
2011-07-12 03:39:32 +00:00
|
|
|
// Determine the clock time. If we've got audio, and we've not reached
|
|
|
|
// the end of the audio, use the audio clock. However if we've finished
|
2013-12-01 21:09:06 +00:00
|
|
|
// audio, or don't have audio, use the system clock. If our output is being
|
|
|
|
// fed to a MediaStream, use that stream as the source of the clock.
|
2012-08-22 15:56:38 +00:00
|
|
|
int64_t clock_time = -1;
|
2013-12-01 21:09:06 +00:00
|
|
|
DecodedStreamData* stream = mDecoder->GetDecodedStream();
|
2011-07-12 03:39:32 +00:00
|
|
|
if (!IsPlaying()) {
|
|
|
|
clock_time = mPlayDuration + mStartTime;
|
2013-12-01 21:09:06 +00:00
|
|
|
} else if (stream) {
|
|
|
|
NS_ASSERTION(mSyncPointInDecodedStream >= 0, "Should have set up sync point");
|
|
|
|
StreamTime streamDelta = stream->GetLastOutputTime() - mSyncPointInMediaStream;
|
|
|
|
clock_time = mSyncPointInDecodedStream + MediaTimeToMicroseconds(streamDelta);
|
2011-07-12 03:39:32 +00:00
|
|
|
} else {
|
2012-08-22 15:56:38 +00:00
|
|
|
int64_t audio_time = GetAudioClock();
|
2011-07-12 03:39:32 +00:00
|
|
|
if (HasAudio() && !mAudioCompleted && audio_time != -1) {
|
|
|
|
clock_time = audio_time;
|
|
|
|
// Resync against the audio clock, while we're trusting the
|
|
|
|
// audio clock. This ensures no "drift", particularly on Linux.
|
|
|
|
mPlayDuration = clock_time - mStartTime;
|
2013-12-20 00:52:06 +00:00
|
|
|
SetPlayStartTime(TimeStamp::Now());
|
2011-07-12 03:39:32 +00:00
|
|
|
} else {
|
2011-09-27 03:31:18 +00:00
|
|
|
// Audio is disabled on this system. Sync to the system clock.
|
2012-11-22 10:38:28 +00:00
|
|
|
clock_time = GetVideoStreamPosition();
|
2011-07-12 03:39:32 +00:00
|
|
|
// Ensure the clock can never go backwards.
|
2012-11-22 10:38:28 +00:00
|
|
|
NS_ASSERTION(mCurrentFrameTime <= clock_time || mPlaybackRate <= 0,
|
|
|
|
"Clock should go forwards if the playback rate is > 0.");
|
2011-03-23 22:28:57 +00:00
|
|
|
}
|
2011-07-12 03:39:32 +00:00
|
|
|
}
|
2012-11-22 10:38:28 +00:00
|
|
|
return clock_time;
|
|
|
|
}
|
|
|
|
|
|
|
|
void MediaDecoderStateMachine::AdvanceFrame()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-11-22 10:38:28 +00:00
|
|
|
NS_ASSERTION(!HasAudio() || mAudioStartTime != -1,
|
|
|
|
"Should know audio start time if we have audio.");
|
2011-03-23 22:28:57 +00:00
|
|
|
|
2012-11-22 10:38:28 +00:00
|
|
|
if (mDecoder->GetState() != MediaDecoder::PLAY_STATE_PLAYING) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If playbackRate is 0.0, we should stop the progress, but not be in paused
|
|
|
|
// state, per spec.
|
|
|
|
if (mPlaybackRate == 0.0) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
int64_t clock_time = GetClock();
|
2011-07-12 03:39:32 +00:00
|
|
|
// Skip frames up to the frame at the playback position, and figure out
|
|
|
|
// the time remaining until it's time to display the next frame.
|
2012-08-22 15:56:38 +00:00
|
|
|
int64_t remainingTime = AUDIO_DURATION_USECS;
|
2011-07-12 03:39:32 +00:00
|
|
|
NS_ASSERTION(clock_time >= mStartTime, "Should have positive clock time.");
|
|
|
|
nsAutoPtr<VideoData> currentFrame;
|
2012-09-18 18:27:32 +00:00
|
|
|
#ifdef PR_LOGGING
|
|
|
|
int32_t droppedFrames = 0;
|
|
|
|
#endif
|
2012-09-17 20:45:38 +00:00
|
|
|
if (mReader->VideoQueue().GetSize() > 0) {
|
|
|
|
VideoData* frame = mReader->VideoQueue().PeekFront();
|
2011-09-27 00:25:41 +00:00
|
|
|
while (mRealTime || clock_time >= frame->mTime) {
|
2013-10-25 02:44:58 +00:00
|
|
|
mVideoFrameEndTime = frame->GetEndTime();
|
2011-07-12 03:39:32 +00:00
|
|
|
currentFrame = frame;
|
2012-09-18 18:27:32 +00:00
|
|
|
#ifdef PR_LOGGING
|
2013-06-28 10:09:37 +00:00
|
|
|
if (!PR_GetEnv("MOZ_QUIET")) {
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Decoder discarding video frame %lld", mDecoder.get(), frame->mTime));
|
2013-06-28 10:09:37 +00:00
|
|
|
if (droppedFrames++) {
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Decoder discarding video frame %lld (%d so far)",
|
|
|
|
mDecoder.get(), frame->mTime, droppedFrames - 1));
|
2013-06-28 10:09:37 +00:00
|
|
|
}
|
2012-09-18 18:27:32 +00:00
|
|
|
}
|
|
|
|
#endif
|
2012-09-17 20:45:38 +00:00
|
|
|
mReader->VideoQueue().PopFront();
|
2011-07-12 03:39:32 +00:00
|
|
|
// Notify the decode thread that the video queue's buffers may have
|
|
|
|
// free'd up space for more frames.
|
2011-04-29 19:21:57 +00:00
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
2013-12-20 00:52:06 +00:00
|
|
|
OnPlaybackOffsetUpdate(frame->mOffset);
|
2012-09-17 20:45:38 +00:00
|
|
|
if (mReader->VideoQueue().GetSize() == 0)
|
2011-07-12 03:39:32 +00:00
|
|
|
break;
|
2012-09-17 20:45:38 +00:00
|
|
|
frame = mReader->VideoQueue().PeekFront();
|
2011-03-23 22:28:57 +00:00
|
|
|
}
|
2011-07-12 03:39:32 +00:00
|
|
|
// Current frame has already been presented, wait until it's time to
|
|
|
|
// present the next frame.
|
|
|
|
if (frame && !currentFrame) {
|
Backout b3a8618f901c (bug 829042), 34a9ef8f929d (bug 822933), 4c1215cefbab (bug 826349), 70bb7f775178 (bug 825325), e9c8447fb197 (bug 828713), eb6ebf01eafe (bug 828901), f1f3ef647920 (bug 825329), f9d7b5722d4f (bug 825329), 5add564d4546 (bug 819377), 55e93d1fa972 (bug 804875), f14639a3461e (bug 804875), 23456fc21052 (bug 814308) for Windows pgo-only mochitest-1 media test timeouts on a CLOSED TREE
2013-01-16 15:16:23 +00:00
|
|
|
int64_t now = IsPlaying() ? clock_time : mPlayDuration;
|
2012-11-22 10:38:28 +00:00
|
|
|
|
2013-01-24 19:28:48 +00:00
|
|
|
remainingTime = frame->mTime - now;
|
2011-07-12 03:39:32 +00:00
|
|
|
}
|
|
|
|
}
|
2011-03-23 22:28:57 +00:00
|
|
|
|
2011-07-12 03:39:32 +00:00
|
|
|
// Check to see if we don't have enough data to play up to the next frame.
|
|
|
|
// If we don't, switch to buffering mode.
|
2012-02-15 04:35:01 +00:00
|
|
|
MediaResource* resource = mDecoder->GetResource();
|
2011-07-12 03:39:32 +00:00
|
|
|
if (mState == DECODER_STATE_DECODING &&
|
2012-11-14 19:46:40 +00:00
|
|
|
mDecoder->GetState() == MediaDecoder::PLAY_STATE_PLAYING &&
|
2011-07-12 03:39:32 +00:00
|
|
|
HasLowDecodedData(remainingTime + EXHAUSTED_DATA_MARGIN_USECS) &&
|
2013-09-04 03:08:10 +00:00
|
|
|
!mDecoder->IsDataCachedToEndOfResource() &&
|
2013-11-19 14:01:14 +00:00
|
|
|
!resource->IsSuspended()) {
|
|
|
|
if (JustExitedQuickBuffering() || HasLowUndecodedData()) {
|
|
|
|
if (currentFrame) {
|
|
|
|
mReader->VideoQueue().PushFront(currentFrame.forget());
|
|
|
|
}
|
|
|
|
StartBuffering();
|
|
|
|
ScheduleStateMachine();
|
|
|
|
return;
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
2011-07-12 03:39:32 +00:00
|
|
|
}
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2011-07-12 03:39:32 +00:00
|
|
|
// We've got enough data to keep playing until at least the next frame.
|
|
|
|
// Start playing now if need be.
|
2011-08-24 23:42:23 +00:00
|
|
|
if (!IsPlaying() && ((mFragmentEndTime >= 0 && clock_time < mFragmentEndTime) || mFragmentEndTime < 0)) {
|
2011-07-12 03:39:32 +00:00
|
|
|
StartPlayback();
|
|
|
|
}
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2011-07-12 03:39:32 +00:00
|
|
|
if (currentFrame) {
|
|
|
|
// Decode one frame and display it.
|
|
|
|
TimeStamp presTime = mPlayStartTime - UsecsToDuration(mPlayDuration) +
|
|
|
|
UsecsToDuration(currentFrame->mTime - mStartTime);
|
|
|
|
NS_ASSERTION(currentFrame->mTime >= mStartTime, "Should have positive frame time");
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
// If we have video, we want to increment the clock in steps of the frame
|
|
|
|
// duration.
|
|
|
|
RenderVideoFrame(currentFrame, presTime);
|
|
|
|
}
|
2012-03-05 02:27:49 +00:00
|
|
|
// If we're no longer playing after dropping and reacquiring the lock,
|
|
|
|
// playback must've been stopped on the decode thread (by a seek, for
|
|
|
|
// example). In that case, the current frame is probably out of date.
|
|
|
|
if (!IsPlaying()) {
|
|
|
|
ScheduleStateMachine();
|
|
|
|
return;
|
|
|
|
}
|
2013-06-21 03:14:18 +00:00
|
|
|
MediaDecoder::FrameStatistics& frameStats = mDecoder->GetFrameStatistics();
|
|
|
|
frameStats.NotifyPresentedFrame();
|
2013-07-23 23:46:43 +00:00
|
|
|
double frameDelay = double(clock_time - currentFrame->mTime) / USECS_PER_S;
|
|
|
|
NS_ASSERTION(frameDelay >= 0.0, "Frame should never be displayed early.");
|
|
|
|
frameStats.NotifyFrameDelay(frameDelay);
|
2013-10-25 02:44:58 +00:00
|
|
|
remainingTime = currentFrame->GetEndTime() - clock_time;
|
2012-07-30 14:20:58 +00:00
|
|
|
currentFrame = nullptr;
|
2011-07-12 03:39:32 +00:00
|
|
|
}
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2011-07-12 03:39:32 +00:00
|
|
|
// Cap the current time to the larger of the audio and video end time.
|
|
|
|
// This ensures that if we're running off the system clock, we don't
|
|
|
|
// advance the clock to after the media end time.
|
|
|
|
if (mVideoFrameEndTime != -1 || mAudioEndTime != -1) {
|
2011-09-27 03:31:18 +00:00
|
|
|
// These will be non -1 if we've displayed a video frame, or played an audio frame.
|
2013-01-15 12:22:03 +00:00
|
|
|
clock_time = std::min(clock_time, std::max(mVideoFrameEndTime, mAudioEndTime));
|
2011-07-12 03:39:32 +00:00
|
|
|
if (clock_time > GetMediaTime()) {
|
|
|
|
// Only update the playback position if the clock time is greater
|
|
|
|
// than the previous playback position. The audio clock can
|
|
|
|
// sometimes report a time less than its previously reported in
|
|
|
|
// some situations, and we need to gracefully handle that.
|
|
|
|
UpdatePlaybackPosition(clock_time);
|
2011-02-18 02:30:33 +00:00
|
|
|
}
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
2011-07-12 03:39:32 +00:00
|
|
|
|
2011-09-27 03:31:18 +00:00
|
|
|
// If the number of audio/video frames queued has changed, either by
|
|
|
|
// this function popping and playing a video frame, or by the audio
|
|
|
|
// thread popping and playing an audio frame, we may need to update our
|
2011-07-12 03:39:32 +00:00
|
|
|
// ready state. Post an update to do so.
|
|
|
|
UpdateReadyState();
|
|
|
|
|
|
|
|
ScheduleStateMachine(remainingTime);
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
VideoData* MediaDecoderStateMachine::FindStartTime()
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2011-07-12 03:39:23 +00:00
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-08-22 15:56:38 +00:00
|
|
|
int64_t startTime = 0;
|
2010-04-02 03:03:07 +00:00
|
|
|
mStartTime = 0;
|
2012-07-30 14:20:58 +00:00
|
|
|
VideoData* v = nullptr;
|
2010-04-02 03:03:07 +00:00
|
|
|
{
|
2011-04-29 19:21:57 +00:00
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
2011-05-08 21:10:28 +00:00
|
|
|
v = mReader->FindStartTime(startTime);
|
2010-04-02 03:03:07 +00:00
|
|
|
}
|
|
|
|
if (startTime != 0) {
|
|
|
|
mStartTime = startTime;
|
2010-10-06 22:58:36 +00:00
|
|
|
if (mGotDurationFromMetaData) {
|
2010-04-02 03:03:07 +00:00
|
|
|
NS_ASSERTION(mEndTime != -1,
|
|
|
|
"We should have mEndTime as supplied duration here");
|
|
|
|
// We were specified a duration from a Content-Duration HTTP header.
|
|
|
|
// Adjust mEndTime so that mEndTime-mStartTime matches the specified
|
|
|
|
// duration.
|
|
|
|
mEndTime = mStartTime + mEndTime;
|
|
|
|
}
|
|
|
|
}
|
2010-08-13 02:28:15 +00:00
|
|
|
// Set the audio start time to be start of media. If this lies before the
|
2011-09-27 03:31:18 +00:00
|
|
|
// first actual audio frame we have, we'll inject silence during playback
|
2010-08-13 02:28:15 +00:00
|
|
|
// to ensure the audio starts at the correct time.
|
|
|
|
mAudioStartTime = mStartTime;
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Media start time is %lld", mDecoder.get(), mStartTime));
|
2010-04-02 03:03:07 +00:00
|
|
|
return v;
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::UpdateReadyState() {
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-04-02 03:03:07 +00:00
|
|
|
|
2012-12-19 04:48:32 +00:00
|
|
|
MediaDecoderOwner::NextFrameStatus nextFrameStatus = GetNextFrameStatus();
|
|
|
|
if (nextFrameStatus == mLastFrameStatus) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
mLastFrameStatus = nextFrameStatus;
|
|
|
|
|
2013-06-17 05:15:32 +00:00
|
|
|
/* This is a bit tricky. MediaDecoder::UpdateReadyStateForData will run on
|
|
|
|
* the main thread and re-evaluate GetNextFrameStatus there, passing it to
|
|
|
|
* HTMLMediaElement::UpdateReadyStateForData. It doesn't use the value of
|
|
|
|
* GetNextFrameStatus we computed here, because what we're computing here
|
|
|
|
* could be stale by the time MediaDecoder::UpdateReadyStateForData runs.
|
|
|
|
* We only compute GetNextFrameStatus here to avoid posting runnables to the main
|
|
|
|
* thread unnecessarily.
|
|
|
|
*/
|
2010-04-02 03:03:07 +00:00
|
|
|
nsCOMPtr<nsIRunnable> event;
|
2013-06-17 05:15:32 +00:00
|
|
|
event = NS_NewRunnableMethod(mDecoder, &MediaDecoder::UpdateReadyStateForData);
|
2010-04-02 03:03:07 +00:00
|
|
|
NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
bool MediaDecoderStateMachine::JustExitedQuickBuffering()
|
2011-03-23 22:28:57 +00:00
|
|
|
{
|
|
|
|
return !mDecodeStartTime.IsNull() &&
|
|
|
|
mQuickBuffering &&
|
2012-09-18 18:23:59 +00:00
|
|
|
(TimeStamp::Now() - mDecodeStartTime) < TimeDuration::FromMicroseconds(QUICK_BUFFER_THRESHOLD_USECS);
|
2011-03-23 22:28:57 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::StartBuffering()
|
2010-07-22 22:48:32 +00:00
|
|
|
{
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-07-22 22:48:32 +00:00
|
|
|
|
2011-07-12 03:39:37 +00:00
|
|
|
if (IsPlaying()) {
|
|
|
|
StopPlayback();
|
|
|
|
}
|
|
|
|
|
2011-03-23 22:28:57 +00:00
|
|
|
TimeDuration decodeDuration = TimeStamp::Now() - mDecodeStartTime;
|
|
|
|
// Go into quick buffering mode provided we've not just left buffering using
|
|
|
|
// a "quick exit". This stops us flip-flopping between playing and buffering
|
|
|
|
// when the download speed is similar to the decode speed.
|
|
|
|
mQuickBuffering =
|
|
|
|
!JustExitedQuickBuffering() &&
|
2011-04-13 22:12:23 +00:00
|
|
|
decodeDuration < UsecsToDuration(QUICK_BUFFER_THRESHOLD_USECS);
|
2011-03-23 22:28:57 +00:00
|
|
|
mBufferingStart = TimeStamp::Now();
|
|
|
|
|
2010-07-22 22:48:32 +00:00
|
|
|
// We need to tell the element that buffering has started.
|
|
|
|
// We can't just directly send an asynchronous runnable that
|
|
|
|
// eventually fires the "waiting" event. The problem is that
|
|
|
|
// there might be pending main-thread events, such as "data
|
|
|
|
// received" notifications, that mean we're not actually still
|
|
|
|
// buffering by the time this runnable executes. So instead
|
|
|
|
// we just trigger UpdateReadyStateForData; when it runs, it
|
|
|
|
// will check the current state and decide whether to tell
|
|
|
|
// the element we're buffering or not.
|
|
|
|
UpdateReadyState();
|
|
|
|
mState = DECODER_STATE_BUFFERING;
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Changed state from DECODING to BUFFERING, decoded for %.3lfs",
|
|
|
|
mDecoder.get(), decodeDuration.ToSeconds()));
|
2012-03-28 13:14:33 +00:00
|
|
|
#ifdef PR_LOGGING
|
2012-11-14 19:46:40 +00:00
|
|
|
MediaDecoder::Statistics stats = mDecoder->GetStatistics();
|
2012-03-28 13:14:33 +00:00
|
|
|
#endif
|
2013-11-21 03:02:42 +00:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, ("%p Playback rate: %.1lfKB/s%s download rate: %.1lfKB/s%s",
|
|
|
|
mDecoder.get(),
|
|
|
|
stats.mPlaybackRate/1024, stats.mPlaybackRateReliable ? "" : " (unreliable)",
|
|
|
|
stats.mDownloadRate/1024, stats.mDownloadRateReliable ? "" : " (unreliable)"));
|
2010-07-22 22:48:32 +00:00
|
|
|
}
|
2011-03-23 22:28:58 +00:00
|
|
|
|
2013-11-18 04:22:47 +00:00
|
|
|
nsresult MediaDecoderStateMachine::GetBuffered(dom::TimeRanges* aBuffered) {
|
2012-02-15 04:35:01 +00:00
|
|
|
MediaResource* resource = mDecoder->GetResource();
|
|
|
|
NS_ENSURE_TRUE(resource, NS_ERROR_FAILURE);
|
|
|
|
resource->Pin();
|
2011-03-23 22:28:58 +00:00
|
|
|
nsresult res = mReader->GetBuffered(aBuffered, mStartTime);
|
2012-02-15 04:35:01 +00:00
|
|
|
resource->Unpin();
|
2011-03-23 22:28:58 +00:00
|
|
|
return res;
|
|
|
|
}
|
2011-07-12 03:39:32 +00:00
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
bool MediaDecoderStateMachine::IsPausedAndDecoderWaiting() {
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2011-07-12 03:39:37 +00:00
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
|
|
|
|
|
|
|
|
return
|
|
|
|
mDecodeThreadWaiting &&
|
2012-11-14 19:46:40 +00:00
|
|
|
mDecoder->GetState() != MediaDecoder::PLAY_STATE_PLAYING &&
|
2011-07-12 03:39:37 +00:00
|
|
|
(mState == DECODER_STATE_DECODING || mState == DECODER_STATE_BUFFERING);
|
|
|
|
}
|
|
|
|
|
2013-12-20 00:52:06 +00:00
|
|
|
void MediaDecoderStateMachine::SetPlayStartTime(const TimeStamp& aTimeStamp)
|
|
|
|
{
|
|
|
|
AssertCurrentThreadInMonitor();
|
|
|
|
mPlayStartTime = aTimeStamp;
|
|
|
|
if (!mAudioSink) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (!mPlayStartTime.IsNull()) {
|
|
|
|
mAudioSink->StartPlayback();
|
|
|
|
} else {
|
|
|
|
mAudioSink->StopPlayback();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
nsresult MediaDecoderStateMachine::Run()
|
2011-07-12 03:39:34 +00:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
|
|
|
|
|
2011-09-27 00:25:41 +00:00
|
|
|
return CallRunStateMachine();
|
2011-07-12 03:39:34 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
nsresult MediaDecoderStateMachine::CallRunStateMachine()
|
2011-07-12 03:39:34 +00:00
|
|
|
{
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2011-07-12 03:39:34 +00:00
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
|
2011-09-29 23:34:37 +00:00
|
|
|
// This will be set to true by ScheduleStateMachine() if it's called
|
2011-07-12 03:39:34 +00:00
|
|
|
// while we're in RunStateMachine().
|
2011-09-29 23:34:37 +00:00
|
|
|
mRunAgain = false;
|
2011-07-12 03:39:34 +00:00
|
|
|
|
2011-09-29 23:34:37 +00:00
|
|
|
// Set to true whenever we dispatch an event to run this state machine.
|
2011-07-12 03:39:34 +00:00
|
|
|
// This flag prevents us from dispatching
|
2011-09-29 23:34:37 +00:00
|
|
|
mDispatchedRunEvent = false;
|
2011-07-12 03:39:34 +00:00
|
|
|
|
2013-12-20 00:52:06 +00:00
|
|
|
// If audio is being captured, stop the audio sink if it's running
|
2012-12-04 10:59:36 +00:00
|
|
|
if (mAudioCaptured) {
|
|
|
|
StopAudioThread();
|
|
|
|
}
|
|
|
|
|
2011-07-12 03:39:34 +00:00
|
|
|
mTimeout = TimeStamp();
|
|
|
|
|
2011-09-29 23:34:37 +00:00
|
|
|
mIsRunning = true;
|
2011-07-12 03:39:34 +00:00
|
|
|
nsresult res = RunStateMachine();
|
2011-09-29 23:34:37 +00:00
|
|
|
mIsRunning = false;
|
2011-07-12 03:39:34 +00:00
|
|
|
|
|
|
|
if (mRunAgain && !mDispatchedRunEvent) {
|
2011-09-29 23:34:37 +00:00
|
|
|
mDispatchedRunEvent = true;
|
2011-07-12 03:39:34 +00:00
|
|
|
return NS_DispatchToCurrentThread(this);
|
|
|
|
}
|
|
|
|
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void TimeoutExpired(nsITimer *aTimer, void *aClosure) {
|
2012-11-14 19:46:40 +00:00
|
|
|
MediaDecoderStateMachine *machine =
|
|
|
|
static_cast<MediaDecoderStateMachine*>(aClosure);
|
2011-07-12 03:39:32 +00:00
|
|
|
NS_ASSERTION(machine, "Must have been passed state machine");
|
2011-07-12 03:39:34 +00:00
|
|
|
machine->TimeoutExpired();
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::TimeoutExpired()
|
2011-07-12 03:39:34 +00:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Must be on state machine thread");
|
|
|
|
if (mIsRunning) {
|
2011-09-29 23:34:37 +00:00
|
|
|
mRunAgain = true;
|
2011-07-12 03:39:34 +00:00
|
|
|
} else if (!mDispatchedRunEvent) {
|
2011-09-27 00:25:41 +00:00
|
|
|
// We don't have an event dispatched to run the state machine, so we
|
|
|
|
// can just run it from here.
|
|
|
|
CallRunStateMachine();
|
2011-07-12 03:39:34 +00:00
|
|
|
}
|
|
|
|
// Otherwise, an event has already been dispatched to run the state machine
|
|
|
|
// as soon as possible. Nothing else needed to do, the state machine is
|
|
|
|
// going to run anyway.
|
2011-07-12 03:39:32 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::ScheduleStateMachineWithLockAndWakeDecoder() {
|
2012-04-30 03:12:42 +00:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
mon.NotifyAll();
|
2013-01-14 05:25:02 +00:00
|
|
|
ScheduleStateMachine();
|
2012-04-30 03:12:42 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
nsresult MediaDecoderStateMachine::ScheduleStateMachine(int64_t aUsecs) {
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2011-11-08 01:38:17 +00:00
|
|
|
NS_ABORT_IF_FALSE(GetStateMachineThread(),
|
2011-07-12 03:39:34 +00:00
|
|
|
"Must have a state machine thread to schedule");
|
2011-07-12 03:39:32 +00:00
|
|
|
|
|
|
|
if (mState == DECODER_STATE_SHUTDOWN) {
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
2013-01-15 12:22:03 +00:00
|
|
|
aUsecs = std::max<int64_t>(aUsecs, 0);
|
2011-07-12 03:39:32 +00:00
|
|
|
|
|
|
|
TimeStamp timeout = TimeStamp::Now() + UsecsToDuration(aUsecs);
|
|
|
|
if (!mTimeout.IsNull()) {
|
|
|
|
if (timeout >= mTimeout) {
|
|
|
|
// We've already scheduled a timer set to expire at or before this time,
|
|
|
|
// or have an event dispatched to run the state machine.
|
|
|
|
return NS_OK;
|
2011-07-12 03:39:34 +00:00
|
|
|
}
|
|
|
|
if (mTimer) {
|
2011-07-12 03:39:32 +00:00
|
|
|
// We've been asked to schedule a timer to run before an existing timer.
|
|
|
|
// Cancel the existing timer.
|
|
|
|
mTimer->Cancel();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-08-22 15:56:38 +00:00
|
|
|
uint32_t ms = static_cast<uint32_t>((aUsecs / USECS_PER_MS) & 0xFFFFFFFF);
|
2011-09-27 00:25:41 +00:00
|
|
|
if (mRealTime && ms > 40)
|
|
|
|
ms = 40;
|
2011-07-12 03:39:32 +00:00
|
|
|
if (ms == 0) {
|
2011-07-12 03:39:34 +00:00
|
|
|
if (mIsRunning) {
|
|
|
|
// We're currently running this state machine on the state machine
|
|
|
|
// thread. Signal it to run again once it finishes its current cycle.
|
2011-09-29 23:34:37 +00:00
|
|
|
mRunAgain = true;
|
2011-07-12 03:39:34 +00:00
|
|
|
return NS_OK;
|
|
|
|
} else if (!mDispatchedRunEvent) {
|
|
|
|
// We're not currently running this state machine on the state machine
|
|
|
|
// thread. Dispatch an event to run one cycle of the state machine.
|
2011-09-29 23:34:37 +00:00
|
|
|
mDispatchedRunEvent = true;
|
2011-11-08 01:38:17 +00:00
|
|
|
return GetStateMachineThread()->Dispatch(this, NS_DISPATCH_NORMAL);
|
2011-07-12 03:39:34 +00:00
|
|
|
}
|
|
|
|
// We're not currently running this state machine on the state machine
|
|
|
|
// thread, but something has already dispatched an event to run it again,
|
|
|
|
// so just exit; it's going to run real soon.
|
|
|
|
return NS_OK;
|
2011-07-12 03:39:32 +00:00
|
|
|
}
|
|
|
|
|
2011-07-12 03:39:34 +00:00
|
|
|
mTimeout = timeout;
|
|
|
|
|
|
|
|
nsresult res;
|
2011-07-12 03:39:32 +00:00
|
|
|
if (!mTimer) {
|
|
|
|
mTimer = do_CreateInstance("@mozilla.org/timer;1", &res);
|
|
|
|
if (NS_FAILED(res)) return res;
|
2011-11-08 01:38:17 +00:00
|
|
|
mTimer->SetTarget(GetStateMachineThread());
|
2011-07-12 03:39:32 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:45:33 +00:00
|
|
|
res = mTimer->InitWithFuncCallback(mozilla::TimeoutExpired,
|
2011-07-12 03:39:32 +00:00
|
|
|
this,
|
|
|
|
ms,
|
|
|
|
nsITimer::TYPE_ONE_SHOT);
|
|
|
|
return res;
|
|
|
|
}
|
2011-11-08 01:38:17 +00:00
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
bool MediaDecoderStateMachine::OnStateMachineThread() const
|
2011-11-08 01:38:17 +00:00
|
|
|
{
|
|
|
|
return IsCurrentThread(GetStateMachineThread());
|
|
|
|
}
|
2012-11-14 19:45:33 +00:00
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
nsIThread* MediaDecoderStateMachine::GetStateMachineThread()
|
2011-11-08 01:38:17 +00:00
|
|
|
{
|
|
|
|
return StateMachineTracker::Instance().GetGlobalStateMachineThread();
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
void MediaDecoderStateMachine::NotifyAudioAvailableListener()
|
2011-11-22 00:34:21 +00:00
|
|
|
{
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2013-12-20 00:52:06 +00:00
|
|
|
mEventManager->NotifyAudioAvailableListener();
|
2011-11-22 00:34:21 +00:00
|
|
|
}
|
2012-11-06 22:33:01 +00:00
|
|
|
|
2012-11-22 10:38:28 +00:00
|
|
|
void MediaDecoderStateMachine::SetPlaybackRate(double aPlaybackRate)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
|
|
|
NS_ASSERTION(aPlaybackRate != 0,
|
|
|
|
"PlaybackRate == 0 should be handled before this function.");
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
|
2013-03-04 14:48:58 +00:00
|
|
|
// We don't currently support more than two channels when changing playback
|
|
|
|
// rate.
|
2013-12-20 00:52:06 +00:00
|
|
|
if (mInfo.mAudio.mChannels > 2) {
|
2013-03-04 14:48:58 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2012-11-22 10:38:28 +00:00
|
|
|
if (mPlaybackRate == aPlaybackRate) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Get position of the last time we changed the rate.
|
|
|
|
if (!HasAudio()) {
|
|
|
|
// mBasePosition is a position in the video stream, not an absolute time.
|
2013-01-10 11:26:18 +00:00
|
|
|
if (mState == DECODER_STATE_SEEKING) {
|
2013-02-28 15:05:50 +00:00
|
|
|
mBasePosition = mSeekTime - mStartTime;
|
2013-01-10 11:26:18 +00:00
|
|
|
} else {
|
|
|
|
mBasePosition = GetVideoStreamPosition();
|
2012-11-22 10:38:28 +00:00
|
|
|
}
|
2013-02-28 15:05:50 +00:00
|
|
|
mPlayDuration = mBasePosition;
|
2013-01-10 11:26:18 +00:00
|
|
|
mResetPlayStartTime = true;
|
2013-12-20 00:52:06 +00:00
|
|
|
SetPlayStartTime(TimeStamp::Now());
|
2012-11-22 10:38:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
mPlaybackRate = aPlaybackRate;
|
2013-12-20 00:52:06 +00:00
|
|
|
if (mAudioSink) {
|
|
|
|
mAudioSink->SetPlaybackRate(mPlaybackRate);
|
|
|
|
}
|
2012-11-22 10:38:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void MediaDecoderStateMachine::SetPreservesPitch(bool aPreservesPitch)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
|
|
|
|
mPreservesPitch = aPreservesPitch;
|
2013-12-20 00:52:06 +00:00
|
|
|
if (mAudioSink) {
|
|
|
|
mAudioSink->SetPreservesPitch(mPreservesPitch);
|
|
|
|
}
|
2012-11-22 10:38:28 +00:00
|
|
|
}
|
|
|
|
|
2012-11-14 19:46:40 +00:00
|
|
|
bool MediaDecoderStateMachine::IsShutdown()
|
2012-11-06 22:33:01 +00:00
|
|
|
{
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-11-06 22:33:01 +00:00
|
|
|
return GetState() == DECODER_STATE_SHUTDOWN;
|
|
|
|
}
|
|
|
|
|
2012-12-27 15:21:30 +00:00
|
|
|
void MediaDecoderStateMachine::QueueMetadata(int64_t aPublishTime,
|
|
|
|
int aChannels,
|
|
|
|
int aRate,
|
|
|
|
bool aHasAudio,
|
|
|
|
bool aHasVideo,
|
|
|
|
MetadataTags* aTags)
|
2012-11-30 13:17:54 +00:00
|
|
|
{
|
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
2013-11-03 22:11:09 +00:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-11-30 13:17:54 +00:00
|
|
|
TimedMetadata* metadata = new TimedMetadata;
|
|
|
|
metadata->mPublishTime = aPublishTime;
|
|
|
|
metadata->mChannels = aChannels;
|
|
|
|
metadata->mRate = aRate;
|
|
|
|
metadata->mHasAudio = aHasAudio;
|
2013-08-16 09:57:17 +00:00
|
|
|
metadata->mHasVideo = aHasVideo;
|
2012-11-30 13:17:54 +00:00
|
|
|
metadata->mTags = aTags;
|
|
|
|
mMetadataManager.QueueMetadata(metadata);
|
|
|
|
}
|
|
|
|
|
2013-12-20 00:52:06 +00:00
|
|
|
void MediaDecoderStateMachine::OnAudioEndTimeUpdate(int64_t aAudioEndTime)
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
MOZ_ASSERT(aAudioEndTime >= mAudioEndTime);
|
|
|
|
mAudioEndTime = aAudioEndTime;
|
|
|
|
}
|
|
|
|
|
|
|
|
void MediaDecoderStateMachine::OnPlaybackOffsetUpdate(int64_t aPlaybackOffset)
|
|
|
|
{
|
|
|
|
mDecoder->UpdatePlaybackOffset(aPlaybackOffset);
|
|
|
|
}
|
|
|
|
|
|
|
|
void MediaDecoderStateMachine::OnAudioSinkComplete()
|
|
|
|
{
|
|
|
|
AssertCurrentThreadInMonitor();
|
|
|
|
if (mAudioCaptured) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
mAudioCompleted = true;
|
|
|
|
UpdateReadyState();
|
|
|
|
// Kick the decode thread; it may be sleeping waiting for this to finish.
|
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:45:33 +00:00
|
|
|
} // namespace mozilla
|
|
|
|
|