Bug 1257107 - Discard decoded data if its pts is smaller than seek time. r=jya

--HG--
extra : rebase_source : 2f17f7b3b70f774e03322dbc63727b0794279f91
This commit is contained in:
Alfredo Yang 2016-05-17 21:46:00 -04:00
parent d6e3286232
commit 3fed340de9
24 changed files with 252 additions and 2 deletions

View File

@ -1631,6 +1631,11 @@ MediaFormatReader::OnVideoSeekCompleted(media::TimeUnit aTime)
LOGV("Video seeked to %lld", aTime.ToMicroseconds());
mVideo.mSeekRequest.Complete();
if (mVideo.mDecoder) {
auto& decoder = GetDecoderData(TrackInfo::kVideoTrack);
decoder.mDecoder->SetSeekThreshold(mPendingSeekTime.ref());
}
if (HasAudio() && !mOriginalSeekTarget.IsVideoOnly()) {
MOZ_ASSERT(mPendingSeekTime.isSome());
if (mOriginalSeekTarget.IsFast()) {

View File

@ -229,6 +229,13 @@ public:
// Only return a static const string, as the information may be accessed
// in a non thread-safe fashion.
virtual const char* GetDescriptionName() const = 0;
// Set a hint of seek target time to decoder. Decoder will drop any decoded
// data which pts is smaller than this value. This threshold needs to be clear
// after reset decoder.
// Decoder may not honor this value. However, it'd be better that
// video decoder implements this API to improve seek performance.
virtual void SetSeekThreshold(const media::TimeUnit& aTime) {}
};
} // namespace mozilla

View File

@ -86,6 +86,13 @@ nsresult
VPXDecoder::Flush()
{
mTaskQueue->Flush();
RefPtr<VPXDecoder> self = this;
nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction([self] () {
self->mSeekTargetThreshold.reset();
});
mTaskQueue->Dispatch(r.forget());
return NS_OK;
}
@ -118,6 +125,13 @@ VPXDecoder::DoDecodeFrame(MediaRawData* aSample)
img->fmt == VPX_IMG_FMT_I444,
"WebM image format not I420 or I444");
if (mSeekTargetThreshold.isSome()) {
if (aSample->mTime < mSeekTargetThreshold.ref().ToMicroseconds()) {
continue;
}
mSeekTargetThreshold.reset();
}
// Chroma shifts are rounded down as per the decoding examples in the SDK
VideoData::YCbCrBuffer b;
b.mPlanes[0].mData = img->planes[0];
@ -217,5 +231,15 @@ VPXDecoder::IsVPX(const nsACString& aMimeType, uint8_t aCodecMask)
aMimeType.EqualsLiteral("video/webm; codecs=vp9"));
}
void
VPXDecoder::SetSeekThreshold(const media::TimeUnit& aTime)
{
RefPtr<VPXDecoder> self = this;
nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction([self, aTime] () {
self->mSeekTargetThreshold = Some(aTime);
});
mTaskQueue->Dispatch(r.forget());
}
} // namespace mozilla
#undef LOG

View File

@ -37,6 +37,7 @@ public:
{
return "libvpx video decoder";
}
void SetSeekThreshold(const media::TimeUnit& aTime) override;
enum Codec: uint8_t {
VP8 = 1 << 0,
@ -56,6 +57,8 @@ private:
RefPtr<FlushableTaskQueue> mTaskQueue;
MediaDataDecoderCallback* mCallback;
Maybe<media::TimeUnit> mSeekTargetThreshold;
// VPx decoder state
vpx_codec_ctx_t mVPX;

View File

@ -151,6 +151,10 @@ public:
return mDecoder->GetDescriptionName();
}
void SetSeekThreshold(const media::TimeUnit& aTime) override {
mDecoder->SetSeekThreshold(aTime);
}
private:
RefPtr<MediaDataDecoder> mDecoder;

View File

@ -24,6 +24,13 @@ VideoCallbackAdapter::Decoded(GMPVideoi420Frame* aDecodedFrame)
MOZ_ASSERT(IsOnGMPThread());
if (mSeekTargetThreshold.isSome()) {
if (decodedFrame->Timestamp() < (uint64_t)mSeekTargetThreshold.ref().ToMicroseconds()) {
return;
}
mSeekTargetThreshold.reset();
}
VideoData::YCbCrBuffer b;
for (int i = 0; i < kGMPNumOfPlanes; ++i) {
b.mPlanes[i].mData = decodedFrame->Buffer(GMPPlaneType(i));
@ -104,6 +111,20 @@ VideoCallbackAdapter::Terminated()
mCallback->Error();
}
void
VideoCallbackAdapter::SetSeekThreshold(const media::TimeUnit& aTime)
{
MOZ_ASSERT(IsOnGMPThread());
mSeekTargetThreshold = Some(aTime);
}
void
VideoCallbackAdapter::ResetSeekThreshold()
{
MOZ_ASSERT(IsOnGMPThread());
mSeekTargetThreshold.reset();
}
void
GMPVideoDecoder::InitTags(nsTArray<nsCString>& aTags)
{
@ -279,6 +300,7 @@ GMPVideoDecoder::Flush()
// Abort the flush.
mCallback->FlushComplete();
}
mAdapter->ResetSeekThreshold();
return NS_OK;
}

View File

@ -41,9 +41,13 @@ public:
mLastStreamOffset = aStreamOffset;
}
void SetSeekThreshold(const media::TimeUnit& aTime);
void ResetSeekThreshold();
private:
MediaDataDecoderCallbackProxy* mCallback;
int64_t mLastStreamOffset;
Maybe<media::TimeUnit> mSeekTargetThreshold;
VideoInfo mVideoInfo;
RefPtr<layers::ImageContainer> mImageContainer;
@ -94,6 +98,11 @@ public:
return "GMP video decoder";
}
void SetSeekThreshold(const media::TimeUnit& aTime) override
{
mAdapter->SetSeekThreshold(aTime);
}
protected:
virtual void InitTags(nsTArray<nsCString>& aTags);
virtual nsCString GetNodeId();

View File

@ -96,4 +96,20 @@ MediaDataDecoderProxy::FlushComplete()
mFlushComplete.Set(true);
}
void
MediaDataDecoderProxy::SetSeekThreshold(const media::TimeUnit& aTime)
{
MOZ_ASSERT(!IsOnProxyThread());
MOZ_ASSERT(!mIsShutdown);
int64_t threshold = aTime.ToMicroseconds();
RefPtr<MediaDataDecoderProxy> self = this;
nsCOMPtr<nsIRunnable> task =
NS_NewRunnableFunction([threshold, self] () {
media::TimeUnit time = media::TimeUnit::FromMicroseconds(threshold);
self->mProxyDecoder->SetSeekThreshold(time);
});
mProxyThread->Dispatch(task.forget());
}
} // namespace mozilla

View File

@ -145,6 +145,8 @@ public:
return "GMP proxy data decoder";
}
void SetSeekThreshold(const media::TimeUnit& aTime) override;
// Called by MediaDataDecoderCallbackProxy.
void FlushComplete();

View File

@ -585,11 +585,31 @@ MediaCodecDataDecoder::ProcessOutput(
// The Surface will be updated at this point (for video).
mDecoder->ReleaseOutputBuffer(aStatus, true);
{
MonitorAutoLock lock(mMonitor);
int64_t pts = 0;
if (mSeekTargetThreshold.isSome() &&
NS_SUCCEEDED(aInfo->PresentationTimeUs(&pts))) {
if (pts < mSeekTargetThreshold.ref().ToMicroseconds()) {
return NS_OK;
}
mSeekTargetThreshold.reset();
}
}
PostOutput(aInfo, aFormat, duration.value());
return NS_OK;
}
void
MediaCodecDataDecoder::SetSeekThreshold(const media::TimeUnit& aTime)
{
MonitorAutoLock lock(mMonitor);
mSeekTargetThreshold = Some(aTime);
}
void
MediaCodecDataDecoder::DecoderLoop()
{
@ -749,6 +769,7 @@ nsresult
MediaCodecDataDecoder::Flush()
{
MonitorAutoLock lock(mMonitor);
mSeekTargetThreshold.reset();
if (!State(kFlushing)) {
return NS_OK;
}

View File

@ -65,6 +65,7 @@ public:
{
return "android decoder";
}
void SetSeekThreshold(const media::TimeUnit& aTime) override;
protected:
enum ModuleState {
@ -139,6 +140,8 @@ protected:
SampleQueue mQueue;
// Durations are stored in microseconds.
std::deque<media::TimeUnit> mDurations;
Maybe<media::TimeUnit> mSeekTargetThreshold;
};
} // namespace mozilla

View File

@ -154,6 +154,10 @@ AppleVDADecoder::Flush()
mIsFlushing = false;
// All ProcessDecode() tasks should be done.
MOZ_ASSERT(mInputIncoming == 0);
MonitorAutoLock mon(mMonitor);
mSeekTargetThreshold.reset();
return NS_OK;
}
@ -291,6 +295,14 @@ AppleVDADecoder::ClearReorderedFrames()
mQueuedSamples = 0;
}
void
AppleVDADecoder::SetSeekThreshold(const media::TimeUnit& aTime)
{
LOG("SetSeekThreshold %lld", aTime.ToMicroseconds());
MonitorAutoLock mon(mMonitor);
mSeekTargetThreshold = Some(aTime);
}
// Copy and return a decoded frame.
nsresult
AppleVDADecoder::OutputFrame(CVPixelBufferRef aImage,
@ -322,6 +334,17 @@ AppleVDADecoder::OutputFrame(CVPixelBufferRef aImage,
return NS_OK;
}
{
MonitorAutoLock mon(mMonitor);
if (mSeekTargetThreshold.isSome()) {
if (aFrameRef.composition_timestamp < mSeekTargetThreshold.ref()) {
return NS_OK;
} else {
mSeekTargetThreshold.reset();
}
}
}
// Where our resulting image will end up.
RefPtr<VideoData> data;
// Bounds.

View File

@ -87,6 +87,8 @@ private:
return "apple VDA decoder";
}
void SetSeekThreshold(const media::TimeUnit& aTime) override;
protected:
AppleVDADecoder(const VideoInfo& aConfig,
TaskQueue* aTaskQueue,
@ -133,13 +135,17 @@ private:
const bool mUseSoftwareImages;
const bool mIs106;
// Protects mReorderQueue.
// Protects mReorderQueue and mSeekTargetThreshold.
Monitor mMonitor;
// Set on reader/decode thread calling Flush() to indicate that output is
// not required and so input samples on mTaskQueue need not be processed.
// Cleared on mTaskQueue in ProcessDrain().
Atomic<bool> mIsFlushing;
ReorderQueue mReorderQueue;
// Decoded frame will be dropped if its pts is smaller than this
// value. It is accessed in VideoToolbox thread and reader taskqueue so it
// should be protected by mMonitor.
Maybe<media::TimeUnit> mSeekTargetThreshold;
// Method to set up the decompression session.
nsresult InitializeSession();

View File

@ -160,6 +160,16 @@ FFmpegDataDecoder<LIBAV_VER>::Drain()
return NS_OK;
}
void
FFmpegDataDecoder<LIBAV_VER>::SetSeekThreshold(const media::TimeUnit& aTime)
{
RefPtr<FFmpegDataDecoder> self = this;
nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction([self, aTime] () {
self->mSeekTargetThreshold = Some(aTime);
});
mTaskQueue->Dispatch(r.forget());
}
void
FFmpegDataDecoder<LIBAV_VER>::ProcessFlush()
{
@ -167,6 +177,7 @@ FFmpegDataDecoder<LIBAV_VER>::ProcessFlush()
if (mCodecContext) {
mLib->avcodec_flush_buffers(mCodecContext);
}
mSeekTargetThreshold.reset();
}
void

View File

@ -36,6 +36,7 @@ public:
nsresult Flush() override;
nsresult Drain() override;
nsresult Shutdown() override;
void SetSeekThreshold(const media::TimeUnit& aTime) override;
static AVCodec* FindAVCodec(FFmpegLibWrapper* aLib, AVCodecID aCodec);
@ -61,6 +62,9 @@ protected:
RefPtr<MediaByteBuffer> mExtraData;
AVCodecID mCodecID;
// Accessed in mTaskQueue.
Maybe<media::TimeUnit> mSeekTargetThreshold;
private:
void ProcessDecode(MediaRawData* aSample);
virtual DecodeResult DoDecode(MediaRawData* aSample) = 0;

View File

@ -261,6 +261,15 @@ FFmpegVideoDecoder<LIBAV_VER>::DoDecode(MediaRawData* aSample,
// against the map becoming extremely big.
mDurationMap.Clear();
}
if (mSeekTargetThreshold.isSome()) {
if (pts < mSeekTargetThreshold.ref().ToMicroseconds()) {
FFMPEG_LOG("Dropping decoded frame.");
return DecodeResult::DECODE_FRAME;
}
mSeekTargetThreshold.reset();
}
FFMPEG_LOG("Got one frame output with pts=%lld dts=%lld duration=%lld opaque=%lld",
pts, mFrame->pkt_dts, duration, mCodecContext->reordered_opaque);

View File

@ -357,6 +357,14 @@ OmxDataDecoder::Output(BufferData* aData)
mMediaDataHelper = new MediaDataHelper(mTrackInfo.get(), mImageContainer, mOmxLayer);
}
if (mSeekTargetThreshold.isSome()) {
if (aData->mRawData->mTime < mSeekTargetThreshold.ref().ToMicroseconds()) {
aData->mStatus = BufferData::BufferStatus::FREE;
return;
}
mSeekTargetThreshold.reset();
}
bool isPlatformData = false;
RefPtr<MediaData> data = mMediaDataHelper->GetMediaData(aData, isPlatformData);
if (!data) {
@ -873,6 +881,8 @@ OmxDataDecoder::DoFlush()
{
MOZ_ASSERT(mOmxTaskQueue->IsCurrentThreadIn());
mSeekTargetThreshold.reset();
// 1. Call OMX command OMX_CommandFlush in Omx TaskQueue.
// 2. Remove all elements in mMediaRawDatas when flush is completed.
mOmxLayer->SendCommand(OMX_CommandFlush, OMX_ALL, nullptr)
@ -1037,4 +1047,15 @@ MediaDataHelper::CreateYUV420VideoData(BufferData* aBufferData)
return data.forget();
}
void
OmxDataDecoder::SetSeekThreshold(const media::TimeUnit& aTime)
{
RefPtr<OmxDataDecoder> self = this;
nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction([self, aTime] () {
self->mSeekTargetThreshold = Some(aTime);
});
mOmxTaskQueue->Dispatch(r.forget());
}
}

View File

@ -79,6 +79,8 @@ public:
return "omx decoder";
}
void SetSeekThreshold(const media::TimeUnit& aTime) override;
// Return true if event is handled.
bool Event(OMX_EVENTTYPE aEvent, OMX_U32 aData1, OMX_U32 aData2);
@ -197,6 +199,9 @@ protected:
RefPtr<MediaDataHelper> mMediaDataHelper;
MediaDataDecoderCallback* mCallback;
// It is accessed in Omx TaskQueue.
Maybe<media::TimeUnit> mSeekTargetThreshold;
};
template<class T>

View File

@ -237,4 +237,19 @@ WMFMediaDataDecoder::ProcessConfigurationChanged(UniquePtr<TrackInfo>&& aConfig)
}
}
void
WMFMediaDataDecoder::SetSeekThreshold(const media::TimeUnit& aTime)
{
MOZ_ASSERT(mCallback->OnReaderTaskQueue());
MOZ_DIAGNOSTIC_ASSERT(!mIsShutDown);
RefPtr<WMFMediaDataDecoder> self = this;
nsCOMPtr<nsIRunnable> runnable =
NS_NewRunnableFunction([self, aTime] () {
media::TimeUnit threshold = aTime;
self->mMFTManager->SetSeekThreshold(threshold);
});
mTaskQueue->Dispatch(runnable.forget());
}
} // namespace mozilla

View File

@ -36,7 +36,10 @@ public:
virtual HRESULT Output(int64_t aStreamOffset,
RefPtr<MediaData>& aOutput) = 0;
void Flush() { mDecoder->Flush(); }
void Flush() {
mDecoder->Flush();
mSeekTargetThreshold.reset();
}
void Drain()
{
@ -56,9 +59,15 @@ public:
virtual const char* GetDescriptionName() const = 0;
void SetSeekThreshold(const media::TimeUnit& aTime) {
mSeekTargetThreshold = Some(aTime);
}
protected:
// IMFTransform wrapper that performs the decoding.
RefPtr<MFTDecoder> mDecoder;
Maybe<media::TimeUnit> mSeekTargetThreshold;
};
// Decodes audio and video using Windows Media Foundation. Samples are decoded
@ -92,6 +101,8 @@ public:
return mMFTManager ? mMFTManager->GetDescriptionName() : "";
}
void SetSeekThreshold(const media::TimeUnit& aTime) override;
private:
// Called on the task queue. Inserts the sample into the decoder, and

View File

@ -767,6 +767,20 @@ WMFVideoMFTManager::Output(int64_t aStreamOffset,
}
continue;
}
if (mSeekTargetThreshold.isSome()) {
media::TimeUnit pts = GetSampleTime(sample);
if (!pts.IsValid()) {
return E_FAIL;
}
if (pts < mSeekTargetThreshold.ref()) {
LOG("Dropping video frame which pts is smaller than seek target.");
// It is necessary to clear the pointer to release the previous output
// buffer.
sample = nullptr;
continue;
}
mSeekTargetThreshold.reset();
}
break;
}
// Else unexpected error, assert, and bail.

View File

@ -98,6 +98,13 @@ DecoderFuzzingWrapper::ConfigurationChanged(const TrackInfo& aConfig)
return mDecoder->ConfigurationChanged(aConfig);
}
void
DecoderFuzzingWrapper::SetSeekThreshold(const media::TimeUnit& aTime)
{
DFW_LOGV("");
MOZ_ASSERT(mDecoder);
mDecoder->SetSeekThreshold(aTime);
}
DecoderCallbackFuzzingWrapper::DecoderCallbackFuzzingWrapper(MediaDataDecoderCallback* aCallback)
: mCallback(aCallback)

View File

@ -115,6 +115,7 @@ private:
{
return mDecoder->GetDescriptionName();
}
void SetSeekThreshold(const media::TimeUnit& aTime) override;
RefPtr<MediaDataDecoder> mDecoder;
RefPtr<DecoderCallbackFuzzingWrapper> mCallbackWrapper;

View File

@ -44,6 +44,13 @@ public:
return "H264Converter decoder (pending)";
}
void SetSeekThreshold(const media::TimeUnit& aTime) override
{
if (mDecoder) {
mDecoder->SetSeekThreshold(aTime);
}
}
// Return true if mimetype is H.264.
static bool IsH264(const TrackInfo& aConfig);
nsresult GetLastError() const { return mLastError; }