From d226723e374e164b4f15d36f4b54a1a5465bd020 Mon Sep 17 00:00:00 2001 From: Andreas Pehrson Date: Tue, 11 Sep 2018 14:51:32 +0200 Subject: [PATCH] Bug 1404992 - Audit, document, assert and fix threading policy of VideoConduit. r=dminor, r=bwc --HG-- extra : rebase_source : 281771b96a3f0c2a95d6a7c7ccf0556c1a869001 extra : source : 7d1e74f3e80158dbb71adc5d1c036941e7ab8da9 --- .../gtest/mediaconduit_unittests.cpp | 3 +- .../gtest/videoconduit_unittests.cpp | 6 +- .../src/media-conduit/AudioConduit.cpp | 2 +- .../src/media-conduit/AudioConduit.h | 2 +- .../src/media-conduit/MediaConduitInterface.h | 5 +- .../src/media-conduit/VideoConduit.cpp | 1291 +++++++++++------ .../src/media-conduit/VideoConduit.h | 259 +++- .../src/peerconnection/TransceiverImpl.cpp | 6 +- 8 files changed, 1030 insertions(+), 544 deletions(-) diff --git a/media/webrtc/signaling/gtest/mediaconduit_unittests.cpp b/media/webrtc/signaling/gtest/mediaconduit_unittests.cpp index 9312d080bf19..1de32d9098c3 100644 --- a/media/webrtc/signaling/gtest/mediaconduit_unittests.cpp +++ b/media/webrtc/signaling/gtest/mediaconduit_unittests.cpp @@ -439,7 +439,8 @@ class TransportConduitTest : public ::testing::Test int err = 0; RefPtr videoSession; //get pointer to VideoSessionConduit - videoSession = VideoSessionConduit::Create(WebRtcCallWrapper::Create()); + videoSession = VideoSessionConduit::Create( + WebRtcCallWrapper::Create(), GetCurrentThreadEventTarget()); if( !videoSession ) { ASSERT_NE(videoSession, (void*)nullptr); } diff --git a/media/webrtc/signaling/gtest/videoconduit_unittests.cpp b/media/webrtc/signaling/gtest/videoconduit_unittests.cpp index 667920bdd594..f8b7aa3680d4 100644 --- a/media/webrtc/signaling/gtest/videoconduit_unittests.cpp +++ b/media/webrtc/signaling/gtest/videoconduit_unittests.cpp @@ -90,8 +90,10 @@ public: { NSS_NoDB_Init(nullptr); - mVideoConduit = new WebrtcVideoConduit(WebRtcCallWrapper::Create(UniquePtr(mCall)), - UniquePtr(mAdapter)); + mVideoConduit = new WebrtcVideoConduit( + WebRtcCallWrapper::Create(UniquePtr(mCall)), + UniquePtr(mAdapter), + GetCurrentThreadEventTarget()); std::vector ssrcs = {42}; mVideoConduit->SetLocalSSRCs(ssrcs); } diff --git a/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp b/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp index 9a974b5034ec..bc60bb1cc98f 100644 --- a/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp +++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp @@ -142,7 +142,7 @@ bool WebrtcAudioConduit::SetLocalSSRCs(const std::vector & aSSRCs) return true; } -std::vector WebrtcAudioConduit::GetLocalSSRCs() const { +std::vector WebrtcAudioConduit::GetLocalSSRCs() { unsigned int ssrc; if (!mPtrRTP->GetLocalSSRC(mChannel, ssrc)) { return std::vector(1,ssrc); diff --git a/media/webrtc/signaling/src/media-conduit/AudioConduit.h b/media/webrtc/signaling/src/media-conduit/AudioConduit.h index 45dd635bbaa8..a8c43191f285 100644 --- a/media/webrtc/signaling/src/media-conduit/AudioConduit.h +++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.h @@ -203,7 +203,7 @@ public: * this list should contain only a single ssrc. */ bool SetLocalSSRCs(const std::vector& aSSRCs) override; - std::vector GetLocalSSRCs() const override; + std::vector GetLocalSSRCs() override; bool SetRemoteSSRC(unsigned int ssrc) override { return false; diff --git a/media/webrtc/signaling/src/media-conduit/MediaConduitInterface.h b/media/webrtc/signaling/src/media-conduit/MediaConduitInterface.h index 00ca8a0768aa..d91ff31942b4 100644 --- a/media/webrtc/signaling/src/media-conduit/MediaConduitInterface.h +++ b/media/webrtc/signaling/src/media-conduit/MediaConduitInterface.h @@ -192,7 +192,7 @@ public: * Note: this is an ordered list and {a,b,c} != {b,a,c} */ virtual bool SetLocalSSRCs(const std::vector& aSSRCs) = 0; - virtual std::vector GetLocalSSRCs() const = 0; + virtual std::vector GetLocalSSRCs() = 0; /** * Adds negotiated RTP header extensions to the the conduit. Unknown extensions @@ -378,7 +378,8 @@ public: * @result Concrete VideoSessionConduitObject or nullptr in the case * of failure */ - static RefPtr Create(RefPtr aCall); + static RefPtr Create( + RefPtr aCall, nsCOMPtr aStsThread); enum FrameRequestType { diff --git a/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp b/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp index 0cb481c0ec1e..c68f50dd516c 100644 --- a/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp +++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp @@ -137,12 +137,97 @@ ConstrainPreservingAspectRatio(uint16_t max_width, uint16_t max_height, } } +/** + * Function to select and change the encoding frame rate based on incoming frame rate + * and max-mbps setting. + * @param current framerate + * @result new framerate + */ +static unsigned int +SelectSendFrameRate(const VideoCodecConfig* codecConfig, + unsigned int old_framerate, + unsigned short sending_width, + unsigned short sending_height) +{ + unsigned int new_framerate = old_framerate; + + // Limit frame rate based on max-mbps + if (codecConfig && codecConfig->mEncodingConstraints.maxMbps) + { + unsigned int cur_fs, mb_width, mb_height; + + mb_width = (sending_width + 15) >> 4; + mb_height = (sending_height + 15) >> 4; + + cur_fs = mb_width * mb_height; + if (cur_fs > 0) { // in case no frames have been sent + new_framerate = codecConfig->mEncodingConstraints.maxMbps / cur_fs; + + new_framerate = MinIgnoreZero(new_framerate, codecConfig->mEncodingConstraints.maxFps); + } + } + return new_framerate; +} + +/** + * Perform validation on the codecConfig to be applied + */ +static MediaConduitErrorCode +ValidateCodecConfig(const VideoCodecConfig* codecInfo) +{ + if(!codecInfo) { + CSFLogError(LOGTAG, "%s Null CodecConfig ", __FUNCTION__); + return kMediaConduitMalformedArgument; + } + + if((codecInfo->mName.empty()) || + (codecInfo->mName.length() >= WebrtcVideoConduit::CODEC_PLNAME_SIZE)) { + CSFLogError(LOGTAG, "%s Invalid Payload Name Length ", __FUNCTION__); + return kMediaConduitMalformedArgument; + } + + return kMediaConduitNoError; +} + +void +WebrtcVideoConduit::CallStatistics::Update( + const webrtc::Call::Stats& aStats) +{ + ASSERT_ON_THREAD(mStatsThread); + + int64_t rtt = aStats.rtt_ms; +#ifdef DEBUG + if (rtt > INT32_MAX) { + CSFLogError(LOGTAG, + "%s for VideoConduit:%p RTT is larger than the" + " maximum size of an RTCP RTT.", __FUNCTION__, this); + } +#endif + if (rtt > 0) { + mRttMs = rtt; + } else { + mRttMs = 0; + } +} + +int32_t +WebrtcVideoConduit::CallStatistics::RttMs() const +{ + ASSERT_ON_THREAD(mStatsThread); + + return mRttMs; +} + void WebrtcVideoConduit::StreamStatistics::Update(const double aFrameRate, - const double aBitrate) + const double aBitrate, + const webrtc::RtcpPacketTypeCounter& aPacketCounts) { + ASSERT_ON_THREAD(mStatsThread); + mFrameRate.Push(aFrameRate); mBitrate.Push(aBitrate); + mPacketCounts = aPacketCounts; } bool @@ -150,6 +235,8 @@ WebrtcVideoConduit::StreamStatistics::GetVideoStreamStats( double& aOutFrMean, double& aOutFrStdDev, double& aOutBrMean, double& aOutBrStdDev) const { + ASSERT_ON_THREAD(mStatsThread); + if (mFrameRate.NumDataValues() && mBitrate.NumDataValues()) { aOutFrMean = mFrameRate.Mean(); aOutFrStdDev = mFrameRate.StandardDeviation(); @@ -160,66 +247,204 @@ WebrtcVideoConduit::StreamStatistics::GetVideoStreamStats( return false; } -void -WebrtcVideoConduit::SendStreamStatistics::DroppedFrames( - uint32_t& aOutDroppedFrames) const +const webrtc::RtcpPacketTypeCounter& +WebrtcVideoConduit::StreamStatistics::PacketCounts() const { - aOutDroppedFrames = mDroppedFrames; + ASSERT_ON_THREAD(mStatsThread); + + return mPacketCounts; +} + +bool +WebrtcVideoConduit::StreamStatistics::Active() const +{ + ASSERT_ON_THREAD(mStatsThread); + + return mActive; +} + +void +WebrtcVideoConduit::StreamStatistics::SetActive(bool aActive) +{ + ASSERT_ON_THREAD(mStatsThread); + + mActive = aActive; +} + +uint32_t +WebrtcVideoConduit::SendStreamStatistics::DroppedFrames() const +{ + ASSERT_ON_THREAD(mStatsThread); + + return mDroppedFrames; +} + +uint32_t +WebrtcVideoConduit::SendStreamStatistics::FramesEncoded() const +{ + ASSERT_ON_THREAD(mStatsThread); + + return mFramesEncoded; +} + +void +WebrtcVideoConduit::SendStreamStatistics::FrameDeliveredToEncoder() +{ + ASSERT_ON_THREAD(mStatsThread); + + ++mFramesDeliveredToEncoder; +} + +bool +WebrtcVideoConduit::SendStreamStatistics::SsrcFound() const +{ + ASSERT_ON_THREAD(mStatsThread); + + return mSsrcFound; +} + +uint32_t +WebrtcVideoConduit::SendStreamStatistics::JitterMs() const +{ + ASSERT_ON_THREAD(mStatsThread); + + return mJitterMs; +} + +uint32_t +WebrtcVideoConduit::SendStreamStatistics::CumulativeLost() const +{ + ASSERT_ON_THREAD(mStatsThread); + + return mCumulativeLost; +} + +uint64_t +WebrtcVideoConduit::SendStreamStatistics::BytesReceived() const +{ + ASSERT_ON_THREAD(mStatsThread); + + return mBytesReceived; +} + +uint32_t +WebrtcVideoConduit::SendStreamStatistics::PacketsReceived() const +{ + ASSERT_ON_THREAD(mStatsThread); + + return mPacketsReceived; } void WebrtcVideoConduit::SendStreamStatistics::Update( - const webrtc::VideoSendStream::Stats& aStats) + const webrtc::VideoSendStream::Stats& aStats, + uint32_t aConfiguredSsrc) { - StreamStatistics::Update(aStats.encode_frame_rate, aStats.media_bitrate_bps); - if (!aStats.substreams.empty()) { - const webrtc::FrameCounts& fc = - aStats.substreams.begin()->second.frame_counts; - mFramesEncoded = fc.key_frames + fc.delta_frames; - CSFLogVerbose(LOGTAG, - "%s: framerate: %u, bitrate: %u, dropped frames delta: %u", - __FUNCTION__, aStats.encode_frame_rate, - aStats.media_bitrate_bps, - mFramesDeliveredToEncoder - mFramesEncoded - mDroppedFrames); - mDroppedFrames = mFramesDeliveredToEncoder - mFramesEncoded; - } else { + ASSERT_ON_THREAD(mStatsThread); + + mSsrcFound = false; + + if (aStats.substreams.empty()) { CSFLogVerbose(LOGTAG, "%s stats.substreams is empty", __FUNCTION__); + return; } + + auto ind = aStats.substreams.find(aConfiguredSsrc); + if (ind == aStats.substreams.end()) { + CSFLogError(LOGTAG, + "%s for VideoConduit:%p ssrc not found in SendStream stats.", + __FUNCTION__, this); + return; + } + + mSsrcFound = true; + + StreamStatistics::Update(aStats.encode_frame_rate, aStats.media_bitrate_bps, + ind->second.rtcp_packet_type_counts); + + const webrtc::FrameCounts& fc = ind->second.frame_counts; + mFramesEncoded = fc.key_frames + fc.delta_frames; + CSFLogVerbose(LOGTAG, + "%s: framerate: %u, bitrate: %u, dropped frames delta: %u", + __FUNCTION__, aStats.encode_frame_rate, + aStats.media_bitrate_bps, + mFramesDeliveredToEncoder - mFramesEncoded - mDroppedFrames); + mDroppedFrames = mFramesDeliveredToEncoder - mFramesEncoded; + mJitterMs = ind->second.rtcp_stats.jitter / + (webrtc::kVideoPayloadTypeFrequency / 1000); + mCumulativeLost = ind->second.rtcp_stats.cumulative_lost; + mBytesReceived = ind->second.rtp_stats.MediaPayloadBytes(); + mPacketsReceived = ind->second.rtp_stats.transmitted.packets; } -void -WebrtcVideoConduit::ReceiveStreamStatistics::DiscardedPackets( - uint32_t& aOutDiscPackets) const +uint32_t +WebrtcVideoConduit::ReceiveStreamStatistics::DiscardedPackets() const { - aOutDiscPackets = mDiscardedPackets; + ASSERT_ON_THREAD(mStatsThread); + + return mDiscardedPackets; } -void -WebrtcVideoConduit::ReceiveStreamStatistics::FramesDecoded( - uint32_t& aFramesDecoded) const +uint32_t +WebrtcVideoConduit::ReceiveStreamStatistics::FramesDecoded() const { - aFramesDecoded = mFramesDecoded; + ASSERT_ON_THREAD(mStatsThread); + + return mFramesDecoded; +} + +uint32_t +WebrtcVideoConduit::ReceiveStreamStatistics::JitterMs() const +{ + ASSERT_ON_THREAD(mStatsThread); + + return mJitterMs; +} + +uint32_t +WebrtcVideoConduit::ReceiveStreamStatistics::CumulativeLost() const +{ + ASSERT_ON_THREAD(mStatsThread); + + return mCumulativeLost; +} + +uint32_t +WebrtcVideoConduit::ReceiveStreamStatistics::Ssrc() const +{ + ASSERT_ON_THREAD(mStatsThread); + + return mSsrc; } void WebrtcVideoConduit::ReceiveStreamStatistics::Update( const webrtc::VideoReceiveStream::Stats& aStats) { + ASSERT_ON_THREAD(mStatsThread); + CSFLogVerbose(LOGTAG, "%s ", __FUNCTION__); - StreamStatistics::Update(aStats.decode_frame_rate, aStats.total_bitrate_bps); + StreamStatistics::Update(aStats.decode_frame_rate, aStats.total_bitrate_bps, + aStats.rtcp_packet_type_counts); mDiscardedPackets = aStats.discarded_packets; - mFramesDecoded = aStats.frame_counts.key_frames - + aStats.frame_counts.delta_frames; + mFramesDecoded = + aStats.frame_counts.key_frames + aStats.frame_counts.delta_frames; + mJitterMs = + aStats.rtcp_stats.jitter / (webrtc::kVideoPayloadTypeFrequency / 1000); + mCumulativeLost = aStats.rtcp_stats.cumulative_lost; + mSsrc = aStats.ssrc; } /** * Factory Method for VideoConduit */ RefPtr -VideoSessionConduit::Create(RefPtr aCall) +VideoSessionConduit::Create( + RefPtr aCall, + nsCOMPtr aStsThread) { - NS_ASSERTION(NS_IsMainThread(), "Only call on main thread"); - NS_ASSERTION(aCall, "missing required parameter: aCall"); + MOZ_ASSERT(NS_IsMainThread()); + MOZ_ASSERT(aCall, "missing required parameter: aCall"); CSFLogVerbose(LOGTAG, "%s", __FUNCTION__); if (!aCall) { @@ -228,7 +453,8 @@ VideoSessionConduit::Create(RefPtr aCall) UniquePtr videoAdapter(new cricket::VideoAdapter(1)); nsAutoPtr obj(new WebrtcVideoConduit(aCall, - std::move(videoAdapter))); + std::move(videoAdapter), + aStsThread)); if(obj->Init() != kMediaConduitNoError) { CSFLogError(LOGTAG, "%s VideoConduit Init Failed ", __FUNCTION__); return nullptr; @@ -238,58 +464,36 @@ VideoSessionConduit::Create(RefPtr aCall) } WebrtcVideoConduit::WebrtcVideoConduit(RefPtr aCall, - UniquePtr&& aVideoAdapter) + UniquePtr&& aVideoAdapter, + nsCOMPtr aStsThread) : mTransportMonitor("WebrtcVideoConduit") + , mStsThread(aStsThread) + , mMutex("WebrtcVideoConduit::mMutex") , mVideoAdapter(std::move(aVideoAdapter)) , mBufferPool(false, SCALER_BUFFER_POOL_SIZE) , mEngineTransmitting(false) , mEngineReceiving(false) - , mCodecMutex("VideoConduit codec db") + , mSendStreamStats(aStsThread) + , mRecvStreamStats(aStsThread) + , mCallStats(aStsThread) , mSendingFramerate(DEFAULT_VIDEO_MAX_FRAMERATE) , mCodecMode(webrtc::kRealtimeVideo) - , mCall(aCall) // refcounted store of the call object + , mCall(aCall) , mSendStreamConfig(this) // 'this' is stored but not dereferenced in the constructor. , mRecvStreamConfig(this) // 'this' is stored but not dereferenced in the constructor. + , mRecvSSRC(0) , mVideoStatsTimer(NS_NewTimer()) { mCall->RegisterConduit(this); mRecvStreamConfig.renderer = this; - - // Video Stats Callback - nsTimerCallbackFunc callback = [](nsITimer* aTimer, void* aClosure) { - CSFLogDebug(LOGTAG, "StreamStats polling scheduled for VideoConduit: %p", aClosure); - auto self = static_cast(aClosure); - MutexAutoLock lock(self->mCodecMutex); - if (self->mEngineTransmitting && self->mSendStream) { - const auto& stats = self->mSendStream->GetStats(); - self->mSendStreamStats.Update(stats); - if (!stats.substreams.empty()) { - self->mSendPacketCounts = - stats.substreams.begin()->second.rtcp_packet_type_counts; - } - } - if (self->mEngineReceiving && self->mRecvStream) { - const auto& stats = self->mRecvStream->GetStats(); - self->mRecvStreamStats.Update(stats); - self->mRecvPacketCounts = stats.rtcp_packet_type_counts; - } - }; - mVideoStatsTimer->InitWithNamedFuncCallback( - callback, this, 1000, nsITimer::TYPE_REPEATING_PRECISE_CAN_SKIP, - "WebrtcVideoConduit::WebrtcVideoConduit"); } WebrtcVideoConduit::~WebrtcVideoConduit() { + MOZ_ASSERT(NS_IsMainThread()); + CSFLogDebug(LOGTAG, "%s ", __FUNCTION__); - NS_ASSERTION(NS_IsMainThread(), "Only call on main thread"); mCall->UnregisterConduit(this); - if (mVideoStatsTimer) { - CSFLogDebug(LOGTAG, "canceling StreamStats for VideoConduit: %p", this); - MutexAutoLock lock(mCodecMutex); - CSFLogDebug(LOGTAG, "StreamStats cancelled for VideoConduit: %p", this); - mVideoStatsTimer->Cancel(); - } // Release AudioConduit first by dropping reference on MainThread, where it expects to be SyncTo(nullptr); @@ -300,6 +504,8 @@ MediaConduitErrorCode WebrtcVideoConduit::SetLocalRTPExtensions(LocalDirection aDirection, const RtpExtList& aExtensions) { + MOZ_ASSERT(NS_IsMainThread()); + auto& extList = aDirection == LocalDirection::kSend ? mSendStreamConfig.rtp.extensions : mRecvStreamConfig.rtp.extensions; @@ -309,47 +515,60 @@ WebrtcVideoConduit::SetLocalRTPExtensions(LocalDirection aDirection, bool WebrtcVideoConduit::SetLocalSSRCs(const std::vector & aSSRCs) { + MOZ_ASSERT(NS_IsMainThread()); + // Special case: the local SSRCs are the same - do nothing. if (mSendStreamConfig.rtp.ssrcs == aSSRCs) { return true; } - // Update the value of the ssrcs in the config structure. - mSendStreamConfig.rtp.ssrcs = aSSRCs; + { + MutexAutoLock lock(mMutex); + // Update the value of the ssrcs in the config structure. + mSendStreamConfig.rtp.ssrcs = aSSRCs; - bool wasTransmitting = mEngineTransmitting; - if (StopTransmitting() != kMediaConduitNoError) { - return false; - } - - MutexAutoLock lock(mCodecMutex); - // On the next StartTransmitting() or ConfigureSendMediaCodec, force - // building a new SendStream to switch SSRCs. - DeleteSendStream(); - if (wasTransmitting) { - if (StartTransmitting() != kMediaConduitNoError) { + bool wasTransmitting = mEngineTransmitting; + if (StopTransmittingLocked() != kMediaConduitNoError) { return false; } + + // On the next StartTransmitting() or ConfigureSendMediaCodec, force + // building a new SendStream to switch SSRCs. + DeleteSendStream(); + + if (wasTransmitting) { + if (StartTransmittingLocked() != kMediaConduitNoError) { + return false; + } + } } return true; } std::vector -WebrtcVideoConduit::GetLocalSSRCs() const +WebrtcVideoConduit::GetLocalSSRCs() { + MutexAutoLock lock(mMutex); + return mSendStreamConfig.rtp.ssrcs; } bool WebrtcVideoConduit::SetLocalCNAME(const char* cname) { + MOZ_ASSERT(NS_IsMainThread()); + MutexAutoLock lock(mMutex); + mSendStreamConfig.rtp.c_name = cname; return true; } bool WebrtcVideoConduit::SetLocalMID(const std::string& mid) { + MOZ_ASSERT(NS_IsMainThread()); + MutexAutoLock lock(mMutex); + mSendStreamConfig.rtp.mid = mid; return true; } @@ -357,6 +576,8 @@ bool WebrtcVideoConduit::SetLocalMID(const std::string& mid) MediaConduitErrorCode WebrtcVideoConduit::ConfigureCodecMode(webrtc::VideoCodecMode mode) { + MOZ_ASSERT(NS_IsMainThread()); + CSFLogVerbose(LOGTAG, "%s ", __FUNCTION__); if (mode == webrtc::VideoCodecMode::kRealtimeVideo || mode == webrtc::VideoCodecMode::kScreensharing) { @@ -370,7 +591,9 @@ WebrtcVideoConduit::ConfigureCodecMode(webrtc::VideoCodecMode mode) void WebrtcVideoConduit::DeleteSendStream() { - mCodecMutex.AssertCurrentThreadOwns(); + MOZ_ASSERT(NS_IsMainThread()); + mMutex.AssertCurrentThreadOwns(); + if (mSendStream) { mCall->Call()->DestroyVideoSendStream(mSendStream); mSendStream = nullptr; @@ -395,7 +618,8 @@ SupportedCodecType(webrtc::VideoCodecType aType) MediaConduitErrorCode WebrtcVideoConduit::CreateSendStream() { - mCodecMutex.AssertCurrentThreadOwns(); + MOZ_ASSERT(NS_IsMainThread()); + mMutex.AssertCurrentThreadOwns(); webrtc::VideoCodecType encoder_type = SupportedCodecType( @@ -433,7 +657,9 @@ WebrtcVideoConduit::CreateSendStream() void WebrtcVideoConduit::DeleteRecvStream() { - mCodecMutex.AssertCurrentThreadOwns(); + MOZ_ASSERT(NS_IsMainThread()); + mMutex.AssertCurrentThreadOwns(); + if (mRecvStream) { mCall->Call()->DestroyVideoReceiveStream(mRecvStream); mRecvStream = nullptr; @@ -444,7 +670,8 @@ WebrtcVideoConduit::DeleteRecvStream() MediaConduitErrorCode WebrtcVideoConduit::CreateRecvStream() { - mCodecMutex.AssertCurrentThreadOwns(); + MOZ_ASSERT(NS_IsMainThread()); + mMutex.AssertCurrentThreadOwns(); webrtc::VideoReceiveStream::Decoder decoder_desc; std::unique_ptr decoder; @@ -493,9 +720,11 @@ WebrtcVideoConduit::CreateRecvStream() } static rtc::scoped_refptr -ConfigureVideoEncoderSettings(const VideoCodecConfig* aConfig, - const WebrtcVideoConduit* aConduit) +ConfigureVideoEncoderSettings( + const VideoCodecConfig* aConfig, const WebrtcVideoConduit* aConduit) { + MOZ_ASSERT(NS_IsMainThread()); + bool is_screencast = aConduit->CodecMode() == webrtc::VideoCodecMode::kScreensharing; // No automatic resizing when using simulcast or screencast. bool automatic_resize = !is_screencast && aConfig->mSimulcastEncodings.size() <= 1; @@ -549,11 +778,35 @@ ConfigureVideoEncoderSettings(const VideoCodecConfig* aConfig, return nullptr; } +// Compare lists of codecs +static bool +CodecsDifferent(const nsTArray>& a, + const nsTArray>& b) +{ + // return a != b; + // would work if UniquePtr<> operator== compared contents! + auto len = a.Length(); + if (len != b.Length()) { + return true; + } + + // XXX std::equal would work, if we could use it on this - fails for the + // same reason as above. c++14 would let us pass a comparator function. + for (uint32_t i = 0; i < len; ++i) { + if (!(*a[i] == *b[i])) { + return true; + } + } + + return false; +} + std::vector -WebrtcVideoConduit::VideoStreamFactory::CreateEncoderStreams(int width, int height, - const webrtc::VideoEncoderConfig& config) +WebrtcVideoConduit::VideoStreamFactory::CreateEncoderStreams( + int width, int height, const webrtc::VideoEncoderConfig& config) { size_t streamCount = config.number_of_streams; + webrtc::VideoCodecMode codecMode = mConduit->mCodecMode; // Disallow odd width and height, they will cause aspect ratio checks to // fail in the webrtc.org code. We can hit transient states after window @@ -563,14 +816,14 @@ WebrtcVideoConduit::VideoStreamFactory::CreateEncoderStreams(int width, int heig CountTrailingZeroes32(height)))); // We only allow one layer when screensharing - if (mConduit->mCodecMode == webrtc::VideoCodecMode::kScreensharing) { + if (codecMode == webrtc::VideoCodecMode::kScreensharing) { streamCount = 1; } std::vector streams; streams.reserve(streamCount); MOZ_ASSERT(mConduit); - MutexAutoLock lock(mConduit->mCodecMutex); // for mCurSendCodecConfig + MutexAutoLock lock(mConduit->mMutex); // XXX webrtc.org code has a restriction on simulcast layers that each // layer must be 1/2 the dimension of the previous layer - not sure why. @@ -638,7 +891,7 @@ WebrtcVideoConduit::VideoStreamFactory::CreateEncoderStreams(int width, int heig // For VideoEncoderConfig::ContentType::kScreen, though, in // video_codec_initializer.cc it uses [0] to set the target bitrate // for the screenshare. - if (mConduit->mCodecMode == webrtc::VideoCodecMode::kScreensharing) { + if (codecMode == webrtc::VideoCodecMode::kScreensharing) { video_stream.temporal_layer_thresholds_bps.push_back(video_stream.target_bitrate_bps); } else { video_stream.temporal_layer_thresholds_bps.resize(2); @@ -671,6 +924,9 @@ WebrtcVideoConduit::VideoStreamFactory::CreateEncoderStreams(int width, int heig MediaConduitErrorCode WebrtcVideoConduit::ConfigureSendMediaCodec(const VideoCodecConfig* codecConfig) { + MOZ_ASSERT(NS_IsMainThread()); + MutexAutoLock lock(mMutex); + CSFLogDebug(LOGTAG, "%s for %s", __FUNCTION__, codecConfig ? codecConfig->mName.c_str() : ""); @@ -748,22 +1004,18 @@ WebrtcVideoConduit::ConfigureSendMediaCodec(const VideoCodecConfig* codecConfig) // Recreating on PayloadType change may be overkill, but is safe. if (mSendStream) { if (!RequiresNewSendStream(*codecConfig)) { - { - MutexAutoLock lock(mCodecMutex); - mCurSendCodecConfig->mEncodingConstraints = codecConfig->mEncodingConstraints; - mCurSendCodecConfig->mSimulcastEncodings = codecConfig->mSimulcastEncodings; - } + mCurSendCodecConfig->mEncodingConstraints = codecConfig->mEncodingConstraints; + mCurSendCodecConfig->mSimulcastEncodings = codecConfig->mSimulcastEncodings; mSendStream->ReconfigureVideoEncoder(mEncoderConfig.CopyConfig()); return kMediaConduitNoError; } - condError = StopTransmitting(); + condError = StopTransmittingLocked(); if (condError != kMediaConduitNoError) { return condError; } // This will cause a new encoder to be created by StartTransmitting() - MutexAutoLock lock(mCodecMutex); DeleteSendStream(); } @@ -788,11 +1040,8 @@ WebrtcVideoConduit::ConfigureSendMediaCodec(const VideoCodecConfig* codecConfig) mSendStreamConfig.rtp.nack.rtp_history_ms = codecConfig->RtcpFbNackIsSet("") ? 1000 : 0; - { - MutexAutoLock lock(mCodecMutex); - // Copy the applied config for future reference. - mCurSendCodecConfig = new VideoCodecConfig(*codecConfig); - } + // Copy the applied config for future reference. + mCurSendCodecConfig = new VideoCodecConfig(*codecConfig); mSendStreamConfig.rtp.rids.clear(); bool has_rid = false; @@ -832,8 +1081,20 @@ GenerateRandomSSRC() bool WebrtcVideoConduit::SetRemoteSSRC(unsigned int ssrc) { + MOZ_ASSERT(NS_IsMainThread()); + MutexAutoLock lock(mMutex); + + return SetRemoteSSRCLocked(ssrc); +} + +bool +WebrtcVideoConduit::SetRemoteSSRCLocked(unsigned int ssrc) +{ + MOZ_ASSERT(NS_IsMainThread()); + mMutex.AssertCurrentThreadOwns(); + unsigned int current_ssrc; - if (!GetRemoteSSRC(¤t_ssrc)) { + if (!GetRemoteSSRCLocked(¤t_ssrc)) { return false; } @@ -842,45 +1103,47 @@ WebrtcVideoConduit::SetRemoteSSRC(unsigned int ssrc) } bool wasReceiving = mEngineReceiving; - if (StopReceiving() != kMediaConduitNoError) { + if (StopReceivingLocked() != kMediaConduitNoError) { return false; } - CSFLogDebug(LOGTAG, "%s: SSRC %u (0x%x)", __FUNCTION__, ssrc, ssrc); - if (!mCall->UnsetRemoteSSRC(ssrc)) { - CSFLogError(LOGTAG, "%s: Failed to unset SSRC %u (0x%x) on other conduits," - " bailing", __FUNCTION__, ssrc, ssrc); - return false; - } - mRecvStreamConfig.rtp.remote_ssrc = ssrc; - mWaitingForInitialSsrc = false; - - // This will destroy mRecvStream and create a new one (argh, why can't we change - // it without a full destroy?) - // We're going to modify mRecvStream, we must lock. Only modified on MainThread. - // All non-MainThread users must lock before reading/using { - MutexAutoLock lock(mCodecMutex); - // On the next StartReceiving() or ConfigureRecvMediaCodec, force - // building a new RecvStream to switch SSRCs. - DeleteRecvStream(); - if (!wasReceiving) { - return true; - } - MediaConduitErrorCode rval = CreateRecvStream(); - if (rval != kMediaConduitNoError) { - CSFLogError(LOGTAG, "%s Start Receive Error %d ", __FUNCTION__, rval); + CSFLogDebug(LOGTAG, "%s: SSRC %u (0x%x)", __FUNCTION__, ssrc, ssrc); + MutexAutoUnlock unlock(mMutex); + if (!mCall->UnsetRemoteSSRC(ssrc)) { + CSFLogError(LOGTAG, "%s: Failed to unset SSRC %u (0x%x) on other conduits," + " bailing", __FUNCTION__, ssrc, ssrc); return false; } } - return (StartReceiving() == kMediaConduitNoError); + + mRecvStreamConfig.rtp.remote_ssrc = ssrc; + mStsThread->Dispatch(NS_NewRunnableFunction( + "WebrtcVideoConduit::WaitingForInitialSsrcNoMore", + [this, self = RefPtr(this)]() { + mWaitingForInitialSsrc = false; + })); + // On the next StartReceiving() or ConfigureRecvMediaCodec, force + // building a new RecvStream to switch SSRCs. + DeleteRecvStream(); + + if (wasReceiving) { + if (StartReceivingLocked() != kMediaConduitNoError) { + return false; + } + } + + return true; } bool WebrtcVideoConduit::UnsetRemoteSSRC(uint32_t ssrc) { + MOZ_ASSERT(NS_IsMainThread()); + MutexAutoLock lock(mMutex); + unsigned int our_ssrc; - if (!GetRemoteSSRC(&our_ssrc)) { + if (!GetRemoteSSRCLocked(&our_ssrc)) { // This only fails when we aren't sending, which isn't really an error here return true; } @@ -899,23 +1162,32 @@ WebrtcVideoConduit::UnsetRemoteSSRC(uint32_t ssrc) // There is a (tiny) chance that this new random ssrc will collide with some // other conduit's remote ssrc, in which case that conduit will choose a new // one. - SetRemoteSSRC(our_ssrc); + SetRemoteSSRCLocked(our_ssrc); return true; } bool WebrtcVideoConduit::GetRemoteSSRC(unsigned int* ssrc) { - { - MutexAutoLock lock(mCodecMutex); + MutexAutoLock lock(mMutex); + + return GetRemoteSSRCLocked(ssrc); +} + +bool +WebrtcVideoConduit::GetRemoteSSRCLocked(unsigned int* ssrc) +{ + mMutex.AssertCurrentThreadOwns(); + + if (NS_IsMainThread()) { if (!mRecvStream) { return false; } - - const webrtc::VideoReceiveStream::Stats& stats = mRecvStream->GetStats(); - *ssrc = stats.ssrc; + *ssrc = mRecvStream->GetStats().ssrc; + } else { + ASSERT_ON_THREAD(mStsThread); + *ssrc = mRecvStreamStats.Ssrc(); } - return true; } @@ -923,11 +1195,13 @@ bool WebrtcVideoConduit::GetSendPacketTypeStats( webrtc::RtcpPacketTypeCounter* aPacketCounts) { - MutexAutoLock lock(mCodecMutex); - if (!mEngineTransmitting || !mSendStream) { // Not transmitting + ASSERT_ON_THREAD(mStsThread); + + MutexAutoLock lock(mMutex); + if (!mSendStreamStats.Active()) { return false; } - *aPacketCounts = mSendPacketCounts; + *aPacketCounts = mSendStreamStats.PacketCounts(); return true; } @@ -935,14 +1209,93 @@ bool WebrtcVideoConduit::GetRecvPacketTypeStats( webrtc::RtcpPacketTypeCounter* aPacketCounts) { - MutexAutoLock lock(mCodecMutex); - if (!mEngineReceiving || !mRecvStream) { // Not receiving + ASSERT_ON_THREAD(mStsThread); + + if (!mRecvStreamStats.Active()) { return false; } - *aPacketCounts = mRecvPacketCounts; + *aPacketCounts = mRecvStreamStats.PacketCounts(); return true; } +void +WebrtcVideoConduit::PollStats() +{ + MOZ_ASSERT(NS_IsMainThread()); + + nsTArray> runnables(2); + if (mEngineTransmitting) { + MOZ_RELEASE_ASSERT(mSendStream); + if (!mSendStreamConfig.rtp.ssrcs.empty()) { + uint32_t ssrc = mSendStreamConfig.rtp.ssrcs.front(); + webrtc::VideoSendStream::Stats stats = mSendStream->GetStats(); + runnables.AppendElement(NS_NewRunnableFunction( + "WebrtcVideoConduit::SendStreamStatistics::Update", + [this, self = RefPtr(this), + stats = std::move(stats), ssrc]() + { + mSendStreamStats.Update(stats, ssrc); + })); + } + } + if (mEngineReceiving) { + MOZ_RELEASE_ASSERT(mRecvStream); + webrtc::VideoReceiveStream::Stats stats = mRecvStream->GetStats(); + runnables.AppendElement(NS_NewRunnableFunction( + "WebrtcVideoConduit::RecvStreamStatistics::Update", + [this, self = RefPtr(this), + stats = std::move(stats)]() + { + mRecvStreamStats.Update(stats); + })); + } + webrtc::Call::Stats stats = mCall->Call()->GetStats(); + mStsThread->Dispatch(NS_NewRunnableFunction( + "WebrtcVideoConduit::UpdateStreamStatistics", + [this, self = RefPtr(this), + stats = std::move(stats), runnables = std::move(runnables)]() + { + mCallStats.Update(stats); + for (const auto& runnable : runnables) { + runnable->Run(); + } + })); +} + +void +WebrtcVideoConduit::UpdateVideoStatsTimer() +{ + MOZ_ASSERT(NS_IsMainThread()); + + bool transmitting = mEngineTransmitting; + bool receiving = mEngineReceiving; + mStsThread->Dispatch(NS_NewRunnableFunction( + "WebrtcVideoConduit::SetSendStreamStatsActive", + [this, self = RefPtr(this), transmitting, receiving]() + { + mSendStreamStats.SetActive(transmitting); + mRecvStreamStats.SetActive(receiving); + })); + + bool shouldBeActive = transmitting || receiving; + if (mVideoStatsTimerActive == shouldBeActive) { + return; + } + mVideoStatsTimerActive = shouldBeActive; + if (shouldBeActive) { + nsTimerCallbackFunc callback = [](nsITimer*, void* aClosure) + { + CSFLogDebug(LOGTAG, "StreamStats polling scheduled for VideoConduit: %p", aClosure); + static_cast(aClosure)->PollStats(); + }; + mVideoStatsTimer->InitWithNamedFuncCallback( + callback, this, 1000, nsITimer::TYPE_REPEATING_PRECISE_CAN_SKIP, + "WebrtcVideoConduit::SendStreamStatsUpdater"); + } else { + mVideoStatsTimer->Cancel(); + } +} + bool WebrtcVideoConduit::GetVideoEncoderStats(double* framerateMean, double* framerateStdDev, @@ -951,17 +1304,17 @@ WebrtcVideoConduit::GetVideoEncoderStats(double* framerateMean, uint32_t* droppedFrames, uint32_t* framesEncoded) { - { - MutexAutoLock lock(mCodecMutex); - if (!mEngineTransmitting || !mSendStream) { - return false; - } - mSendStreamStats.GetVideoStreamStats(*framerateMean, *framerateStdDev, - *bitrateMean, *bitrateStdDev); - mSendStreamStats.DroppedFrames(*droppedFrames); - *framesEncoded = mSendStreamStats.FramesEncoded(); - return true; + ASSERT_ON_THREAD(mStsThread); + + MutexAutoLock lock(mMutex); + if (!mEngineTransmitting || !mSendStream) { + return false; } + mSendStreamStats.GetVideoStreamStats( + *framerateMean, *framerateStdDev, *bitrateMean, *bitrateStdDev); + *droppedFrames = mSendStreamStats.DroppedFrames(); + *framesEncoded = mSendStreamStats.FramesEncoded(); + return true; } bool @@ -972,17 +1325,17 @@ WebrtcVideoConduit::GetVideoDecoderStats(double* framerateMean, uint32_t* discardedPackets, uint32_t* framesDecoded) { - { - MutexAutoLock lock(mCodecMutex); - if (!mEngineReceiving || !mRecvStream) { - return false; - } - mRecvStreamStats.GetVideoStreamStats(*framerateMean, *framerateStdDev, - *bitrateMean, *bitrateStdDev); - mRecvStreamStats.DiscardedPackets(*discardedPackets); - mRecvStreamStats.FramesDecoded(*framesDecoded); - return true; + ASSERT_ON_THREAD(mStsThread); + + MutexAutoLock lock(mMutex); + if (!mEngineReceiving || !mRecvStream) { + return false; } + mRecvStreamStats.GetVideoStreamStats(*framerateMean, *framerateStdDev, + *bitrateMean, *bitrateStdDev); + *discardedPackets = mRecvStreamStats.DiscardedPackets(); + *framesDecoded = mRecvStreamStats.FramesDecoded(); + return true; } bool @@ -990,25 +1343,25 @@ WebrtcVideoConduit::GetAVStats(int32_t* jitterBufferDelayMs, int32_t* playoutBufferDelayMs, int32_t* avSyncOffsetMs) { + ASSERT_ON_THREAD(mStsThread); + return false; } bool -WebrtcVideoConduit::GetRTPStats(unsigned int* jitterMs, - unsigned int* cumulativeLost) +WebrtcVideoConduit::GetRTPStats(uint32_t* jitterMs, + uint32_t* cumulativeLost) { - CSFLogVerbose(LOGTAG, "%s for VideoConduit:%p", __FUNCTION__, this); - { - MutexAutoLock lock(mCodecMutex); - if (!mRecvStream) { - return false; - } + ASSERT_ON_THREAD(mStsThread); - const webrtc::VideoReceiveStream::Stats& stats = mRecvStream->GetStats(); - *jitterMs = - stats.rtcp_stats.jitter / (webrtc::kVideoPayloadTypeFrequency / 1000); - *cumulativeLost = stats.rtcp_stats.cumulative_lost; + CSFLogVerbose(LOGTAG, "%s for VideoConduit:%p", __FUNCTION__, this); + MutexAutoLock lock(mMutex); + if (!mRecvStream) { + return false; } + + *jitterMs = mRecvStreamStats.JitterMs(); + *cumulativeLost = mRecvStreamStats.CumulativeLost(); return true; } @@ -1019,48 +1372,24 @@ bool WebrtcVideoConduit::GetRTCPReceiverReport(DOMHighResTimeStamp* timestamp, uint32_t* cumulativeLost, int32_t* rttMs) { - { - CSFLogVerbose(LOGTAG, "%s for VideoConduit:%p", __FUNCTION__, this); - MutexAutoLock lock(mCodecMutex); - if (!mSendStream) { - return false; - } - const webrtc::VideoSendStream::Stats& sendStats = mSendStream->GetStats(); - if (sendStats.substreams.empty() - || mSendStreamConfig.rtp.ssrcs.empty()) { - return false; - } - uint32_t ssrc = mSendStreamConfig.rtp.ssrcs.front(); - auto ind = sendStats.substreams.find(ssrc); - if (ind == sendStats.substreams.end()) { - CSFLogError(LOGTAG, - "%s for VideoConduit:%p ssrc not found in SendStream stats.", - __FUNCTION__, this); - return false; - } - *jitterMs = ind->second.rtcp_stats.jitter - / (webrtc::kVideoPayloadTypeFrequency / 1000); - *cumulativeLost = ind->second.rtcp_stats.cumulative_lost; - *bytesReceived = ind->second.rtp_stats.MediaPayloadBytes(); - *packetsReceived = ind->second.rtp_stats.transmitted.packets; - auto stats = mCall->Call()->GetStats(); - int64_t rtt = stats.rtt_ms; -#ifdef DEBUG - if (rtt > INT32_MAX) { - CSFLogError(LOGTAG, - "%s for VideoConduit:%p RTT is larger than the" - " maximum size of an RTCP RTT.", __FUNCTION__, this); - } -#endif - if (rtt > 0) { - *rttMs = rtt; - } else { - *rttMs = 0; - } - // Note: timestamp is not correct per the spec... should be time the rtcp - // was received (remote) or sent (local) - *timestamp = webrtc::Clock::GetRealTimeClock()->TimeInMilliseconds(); + ASSERT_ON_THREAD(mStsThread); + + CSFLogVerbose(LOGTAG, "%s for VideoConduit:%p", __FUNCTION__, this); + if (!mSendStreamStats.Active()) { + return false; } + if (!mSendStreamStats.SsrcFound()) { + return false; + } + *jitterMs = mSendStreamStats.JitterMs(); + *packetsReceived = mSendStreamStats.PacketsReceived(); + *bytesReceived = mSendStreamStats.BytesReceived(); + *cumulativeLost = mSendStreamStats.CumulativeLost(); + *rttMs = mCallStats.RttMs(); + // Note: timestamp is not correct per the spec... should be time the rtcp + // was received (remote) or sent (local) + *timestamp = webrtc::Clock::GetRealTimeClock()->TimeInMilliseconds(); + return true; } @@ -1069,10 +1398,12 @@ WebrtcVideoConduit::GetRTCPSenderReport(DOMHighResTimeStamp* timestamp, unsigned int* packetsSent, uint64_t* bytesSent) { + ASSERT_ON_THREAD(mStsThread); + CSFLogVerbose(LOGTAG, "%s for VideoConduit:%p", __FUNCTION__, this); webrtc::RTCPSenderInfo senderInfo; { - MutexAutoLock lock(mCodecMutex); + MutexAutoLock lock(mMutex); if (!mRecvStream || !mRecvStream->GetRemoteRTCPSenderInfo(&senderInfo)) { return false; } @@ -1086,7 +1417,6 @@ WebrtcVideoConduit::GetRTCPSenderReport(DOMHighResTimeStamp* timestamp, MediaConduitErrorCode WebrtcVideoConduit::InitMain() { - // already know we must be on MainThread barring unit test weirdness MOZ_ASSERT(NS_IsMainThread()); nsresult rv; @@ -1174,10 +1504,10 @@ WebrtcVideoConduit::InitMain() MediaConduitErrorCode WebrtcVideoConduit::Init() { + MOZ_ASSERT(NS_IsMainThread()); + CSFLogDebug(LOGTAG, "%s this=%p", __FUNCTION__, this); MediaConduitErrorCode result; - // Run code that must run on MainThread first - MOZ_ASSERT(NS_IsMainThread()); result = InitMain(); if (result != kMediaConduitNoError) { return result; @@ -1190,10 +1520,12 @@ WebrtcVideoConduit::Init() void WebrtcVideoConduit::DeleteStreams() { + MOZ_ASSERT(NS_IsMainThread()); + // We can't delete the VideoEngine until all these are released! // And we can't use a Scoped ptr, since the order is arbitrary - MutexAutoLock lock(mCodecMutex); + MutexAutoLock lock(mMutex); DeleteSendStream(); DeleteRecvStream(); } @@ -1201,21 +1533,21 @@ WebrtcVideoConduit::DeleteStreams() void WebrtcVideoConduit::SyncTo(WebrtcAudioConduit* aConduit) { + MOZ_ASSERT(NS_IsMainThread()); + CSFLogDebug(LOGTAG, "%s Synced to %p", __FUNCTION__, aConduit); - { - MutexAutoLock lock(mCodecMutex); - if (!mRecvStream) { - CSFLogError(LOGTAG, "SyncTo called with no receive stream"); - return; - } + if (!mRecvStream) { + CSFLogError(LOGTAG, "SyncTo called with no receive stream"); + return; + } - if (aConduit) { - mRecvStream->SetSyncChannel(aConduit->GetVoiceEngine(), - aConduit->GetChannel()); - } else if (mSyncedTo) { - mRecvStream->SetSyncChannel(mSyncedTo->GetVoiceEngine(), -1); - } + MutexAutoLock lock(mMutex); + if (aConduit) { + mRecvStream->SetSyncChannel(aConduit->GetVoiceEngine(), + aConduit->GetChannel()); + } else if (mSyncedTo) { + mRecvStream->SetSyncChannel(mSyncedTo->GetVoiceEngine(), -1); } mSyncedTo = aConduit; @@ -1224,6 +1556,8 @@ WebrtcVideoConduit::SyncTo(WebrtcAudioConduit* aConduit) MediaConduitErrorCode WebrtcVideoConduit::AttachRenderer(RefPtr aVideoRenderer) { + MOZ_ASSERT(NS_IsMainThread()); + CSFLogDebug(LOGTAG, "%s", __FUNCTION__); // null renderer @@ -1249,11 +1583,11 @@ WebrtcVideoConduit::AttachRenderer(RefPtr aVideoRenderer void WebrtcVideoConduit::DetachRenderer() { - { - ReentrantMonitorAutoEnter enter(mTransportMonitor); - if (mRenderer) { - mRenderer = nullptr; - } + MOZ_ASSERT(NS_IsMainThread()); + + ReentrantMonitorAutoEnter enter(mTransportMonitor); + if (mRenderer) { + mRenderer = nullptr; } } @@ -1261,6 +1595,8 @@ MediaConduitErrorCode WebrtcVideoConduit::SetTransmitterTransport( RefPtr aTransport) { + MOZ_ASSERT(NS_IsMainThread()); + CSFLogDebug(LOGTAG, "%s ", __FUNCTION__); ReentrantMonitorAutoEnter enter(mTransportMonitor); @@ -1272,6 +1608,8 @@ WebrtcVideoConduit::SetTransmitterTransport( MediaConduitErrorCode WebrtcVideoConduit::SetReceiverTransport(RefPtr aTransport) { + MOZ_ASSERT(NS_IsMainThread()); + CSFLogDebug(LOGTAG, "%s ", __FUNCTION__); ReentrantMonitorAutoEnter enter(mTransportMonitor); @@ -1284,6 +1622,8 @@ MediaConduitErrorCode WebrtcVideoConduit::ConfigureRecvMediaCodecs( const std::vector& codecConfigList) { + MOZ_ASSERT(NS_IsMainThread()); + CSFLogDebug(LOGTAG, "%s ", __FUNCTION__); MediaConduitErrorCode condError = kMediaConduitNoError; std::string payloadName; @@ -1372,8 +1712,9 @@ WebrtcVideoConduit::ConfigureRecvMediaCodecs( (use_fec && (mRecvStreamConfig.rtp.ulpfec.ulpfec_payload_type != ulpfec_payload_type || mRecvStreamConfig.rtp.ulpfec.red_payload_type != red_payload_type))) { + MutexAutoLock lock(mMutex); - condError = StopReceiving(); + condError = StopReceivingLocked(); if (condError != kMediaConduitNoError) { return condError; } @@ -1444,17 +1785,8 @@ WebrtcVideoConduit::ConfigureRecvMediaCodecs( recv_codecs.Clear(); mRecvStreamConfig.rtp.rtx.clear(); - { - MutexAutoLock lock(mCodecMutex); - DeleteRecvStream(); - // Rebuilds mRecvStream from mRecvStreamConfig - MediaConduitErrorCode rval = CreateRecvStream(); - if (rval != kMediaConduitNoError) { - CSFLogError(LOGTAG, "%s Start Receive Error %d ", __FUNCTION__, rval); - return rval; - } - } - return StartReceiving(); + DeleteRecvStream(); + return StartReceivingLocked(); } return kMediaConduitNoError; } @@ -1462,6 +1794,8 @@ WebrtcVideoConduit::ConfigureRecvMediaCodecs( webrtc::VideoDecoder* WebrtcVideoConduit::CreateDecoder(webrtc::VideoCodecType aType) { + MOZ_ASSERT(NS_IsMainThread()); + webrtc::VideoDecoder* decoder = nullptr; #ifdef MOZ_WEBRTC_MEDIACODEC bool enabled = false; @@ -1529,6 +1863,8 @@ webrtc::VideoEncoder* WebrtcVideoConduit::CreateEncoder(webrtc::VideoCodecType aType, bool enable_simulcast) { + MOZ_ASSERT(NS_IsMainThread()); + webrtc::VideoEncoder* encoder = nullptr; #ifdef MOZ_WEBRTC_MEDIACODEC bool enabled = false; @@ -1628,6 +1964,8 @@ WebrtcVideoConduit::SelectBitrates( unsigned short width, unsigned short height, int cap, webrtc::VideoStream& aVideoStream) { + mMutex.AssertCurrentThreadOwns(); + int& out_min = aVideoStream.min_bitrate_bps; int& out_start = aVideoStream.target_bitrate_bps; int& out_max = aVideoStream.max_bitrate_bps; @@ -1668,10 +2006,10 @@ WebrtcVideoConduit::SelectBitrates( // XXX we need to figure out how to feed back changes in preferred capture // resolution to the getUserMedia source. void -WebrtcVideoConduit::SelectSendResolution(unsigned short width, - unsigned short height) +WebrtcVideoConduit::SelectSendResolution( + unsigned short width, unsigned short height) { - mCodecMutex.AssertCurrentThreadOwns(); + mMutex.AssertCurrentThreadOwns(); // XXX This will do bandwidth-resolution adaptation as well - bug 877954 // Enforce constraints @@ -1685,11 +2023,12 @@ WebrtcVideoConduit::SelectSendResolution(unsigned short width, } // Limit resolution to max-fs + const auto& wants = mVideoBroadcaster.wants(); if (mCurSendCodecConfig->mEncodingConstraints.maxFs) { // max-fs is in macroblocks, convert to pixels int max_fs(mCurSendCodecConfig->mEncodingConstraints.maxFs*(16*16)); - if (max_fs > mLastSinkWanted.max_pixel_count.value_or(max_fs)) { - max_fs = mLastSinkWanted.max_pixel_count.value_or(max_fs); + if (max_fs > wants.max_pixel_count.value_or(max_fs)) { + max_fs = wants.max_pixel_count.value_or(max_fs); } mVideoAdapter->OnResolutionRequest( rtc::Optional(max_fs), rtc::Optional()); @@ -1707,61 +2046,40 @@ WebrtcVideoConduit::SelectSendResolution(unsigned short width, } } -unsigned int -WebrtcVideoConduit::SelectSendFrameRate(const VideoCodecConfig* codecConfig, - unsigned int old_framerate, - unsigned short sending_width, - unsigned short sending_height) const -{ - unsigned int new_framerate = old_framerate; - - // Limit frame rate based on max-mbps - if (codecConfig && codecConfig->mEncodingConstraints.maxMbps) - { - unsigned int cur_fs, mb_width, mb_height; - - mb_width = (sending_width + 15) >> 4; - mb_height = (sending_height + 15) >> 4; - - cur_fs = mb_width * mb_height; - if (cur_fs > 0) { // in case no frames have been sent - new_framerate = codecConfig->mEncodingConstraints.maxMbps / cur_fs; - - new_framerate = MinIgnoreZero(new_framerate, codecConfig->mEncodingConstraints.maxFps); - } - } - return new_framerate; -} - void WebrtcVideoConduit::AddOrUpdateSink( rtc::VideoSinkInterface* sink, const rtc::VideoSinkWants& wants) { - CSFLogDebug(LOGTAG, "%s (send SSRC %u (0x%x)) - wants pixels = %d/%d", __FUNCTION__, - mSendStreamConfig.rtp.ssrcs.front(), mSendStreamConfig.rtp.ssrcs.front(), - wants.max_pixel_count ? *wants.max_pixel_count : -1, - wants.max_pixel_count_step_up ? *wants.max_pixel_count_step_up : -1); - - // MUST run on the same thread as first call (MainThread) if (!NS_IsMainThread()) { - // This can be asynchronous - RefPtr self(this); - NS_DispatchToMainThread(media::NewRunnableFrom([self, sink, wants]() { - self->mVideoBroadcaster.AddOrUpdateSink(sink, wants); - self->OnSinkWantsChanged(self->mVideoBroadcaster.wants()); - return NS_OK; - })); - } else { - mVideoBroadcaster.AddOrUpdateSink(sink, wants); - OnSinkWantsChanged(mVideoBroadcaster.wants()); + // This may be called off main thread, but only to update an already added + // sink. If we add it after the dispatch we're at risk of a UAF. + NS_DispatchToMainThread(NS_NewRunnableFunction( + "WebrtcVideoConduit::UpdateSink", + [this, self = RefPtr(this), + sink, wants = std::move(wants)]() + { + if (mRegisteredSinks.Contains(sink)) { + AddOrUpdateSink(sink, wants); + } + })); + return; } + + if (!mRegisteredSinks.Contains(sink)) { + mRegisteredSinks.AppendElement(sink); + } + mVideoBroadcaster.AddOrUpdateSink(sink, wants); + OnSinkWantsChanged(mVideoBroadcaster.wants()); } void WebrtcVideoConduit::RemoveSink( rtc::VideoSinkInterface* sink) { + MOZ_ASSERT(NS_IsMainThread()); + + mRegisteredSinks.RemoveElement(sink); mVideoBroadcaster.RemoveSink(sink); OnSinkWantsChanged(mVideoBroadcaster.wants()); } @@ -1770,12 +2088,16 @@ void WebrtcVideoConduit::OnSinkWantsChanged( const rtc::VideoSinkWants& wants) { - NS_ASSERTION(NS_IsMainThread(), "Only call on main thread"); + MOZ_ASSERT(NS_IsMainThread()); if (mLockScaling) { return; } - mLastSinkWanted = wants; + + CSFLogDebug(LOGTAG, "%s (send SSRC %u (0x%x)) - wants pixels = %d/%d", __FUNCTION__, + mSendStreamConfig.rtp.ssrcs.front(), mSendStreamConfig.rtp.ssrcs.front(), + wants.max_pixel_count ? *wants.max_pixel_count : -1, + wants.max_pixel_count_step_up ? *wants.max_pixel_count_step_up : -1); if (!mCurSendCodecConfig) { return; @@ -1810,37 +2132,41 @@ WebrtcVideoConduit::SendVideoFrame(const webrtc::VideoFrame& frame) // avoids sampling error when capturing frames, but google had to deal with some // broken cameras, include Logitech c920's IIRC. - CSFLogVerbose(LOGTAG, "%s (send SSRC %u (0x%x))", __FUNCTION__, - mSendStreamConfig.rtp.ssrcs.front(), mSendStreamConfig.rtp.ssrcs.front()); - - if (frame.width() != mLastWidth || frame.height() != mLastHeight) { - // See if we need to recalculate what we're sending. - CSFLogVerbose(LOGTAG, "%s: call SelectSendResolution with %ux%u", - __FUNCTION__, frame.width(), frame.height()); - MOZ_ASSERT(frame.width() != 0 && frame.height() != 0); - // Note coverity will flag this since it thinks they can be 0 - - MutexAutoLock lock(mCodecMutex); - mLastWidth = frame.width(); - mLastHeight = frame.height(); - SelectSendResolution(frame.width(), frame.height()); - } - - // adapt input video to wants of sink - if (!mVideoBroadcaster.frame_wanted()) { - return kMediaConduitNoError; - } - int cropWidth; int cropHeight; int adaptedWidth; int adaptedHeight; - if (!mVideoAdapter->AdaptFrameResolution( - frame.width(), frame.height(), - frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec, - &cropWidth, &cropHeight, &adaptedWidth, &adaptedHeight)) { - // VideoAdapter dropped the frame. - return kMediaConduitNoError; + { + MutexAutoLock lock(mMutex); + CSFLogVerbose(LOGTAG, "WebrtcVideoConduit %p %s (send SSRC %u (0x%x))", + this, __FUNCTION__, + mSendStreamConfig.rtp.ssrcs.front(), + mSendStreamConfig.rtp.ssrcs.front()); + + if (frame.width() != mLastWidth || frame.height() != mLastHeight) { + // See if we need to recalculate what we're sending. + CSFLogVerbose(LOGTAG, "%s: call SelectSendResolution with %ux%u", + __FUNCTION__, frame.width(), frame.height()); + MOZ_ASSERT(frame.width() != 0 && frame.height() != 0); + // Note coverity will flag this since it thinks they can be 0 + + mLastWidth = frame.width(); + mLastHeight = frame.height(); + SelectSendResolution(frame.width(), frame.height()); + } + + // adapt input video to wants of sink + if (!mVideoBroadcaster.frame_wanted()) { + return kMediaConduitNoError; + } + + if (!mVideoAdapter->AdaptFrameResolution( + frame.width(), frame.height(), + frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec, + &cropWidth, &cropHeight, &adaptedWidth, &adaptedHeight)) { + // VideoAdapter dropped the frame. + return kMediaConduitNoError; + } } int cropX = (frame.width() - cropWidth) / 2; @@ -1865,7 +2191,12 @@ WebrtcVideoConduit::SendVideoFrame(const webrtc::VideoFrame& frame) mVideoBroadcaster.OnFrame(webrtc::VideoFrame( buffer, frame.timestamp(), frame.render_time_ms(), frame.rotation())); - mSendStreamStats.FrameDeliveredToEncoder(); + mStsThread->Dispatch(NS_NewRunnableFunction( + "SendStreamStatistics::FrameDeliveredToEncoder", + [self = RefPtr(this), this]() + { + mSendStreamStats.FrameDeliveredToEncoder(); + })); return kMediaConduitNoError; } @@ -1874,11 +2205,7 @@ WebrtcVideoConduit::SendVideoFrame(const webrtc::VideoFrame& frame) MediaConduitErrorCode WebrtcVideoConduit::DeliverPacket(const void* data, int len) { - // Media Engine should be receiving already. - if (!mCall) { - CSFLogError(LOGTAG, "Error: %s when not receiving", __FUNCTION__); - return kMediaConduitSessionNotInited; - } + ASSERT_ON_THREAD(mStsThread); // XXX we need to get passed the time the packet was received webrtc::PacketReceiver::DeliveryStatus status = @@ -1897,13 +2224,14 @@ WebrtcVideoConduit::DeliverPacket(const void* data, int len) MediaConduitErrorCode WebrtcVideoConduit::ReceivedRTPPacket(const void* data, int len, uint32_t ssrc) { + ASSERT_ON_THREAD(mStsThread); + if (mAllowSsrcChange || mWaitingForInitialSsrc) { // Handle the unknown ssrc (and ssrc-not-signaled case). // We can't just do this here; it has to happen on MainThread :-( // We also don't want to drop the packet, nor stall this thread, so we hold // the packet (and any following) for inserting once the SSRC is set. - bool queue = mRecvSSRCSetInProgress; - if (queue || mRecvSSRC != ssrc) { + if (mRecvSsrcSetInProgress || mRecvSSRC != ssrc) { // capture packet for insertion after ssrc is set -- do this before // sending the runnable, since it may pull from this. Since it // dispatches back to us, it's less critial to do this here, but doesn't @@ -1913,7 +2241,7 @@ WebrtcVideoConduit::ReceivedRTPPacket(const void* data, int len, uint32_t ssrc) memcpy(packet->mData, data, len); CSFLogDebug(LOGTAG, "queuing packet: seq# %u, Len %d ", (uint16_t)ntohs(((uint16_t*) packet->mData)[1]), packet->mLen); - if (queue) { + if (mRecvSsrcSetInProgress) { mQueuedPackets.AppendElement(std::move(packet)); return kMediaConduitNoError; } @@ -1924,49 +2252,47 @@ WebrtcVideoConduit::ReceivedRTPPacket(const void* data, int len, uint32_t ssrc) mQueuedPackets.AppendElement(std::move(packet)); CSFLogDebug(LOGTAG, "%s: switching from SSRC %u to %u", __FUNCTION__, - mRecvSSRC, ssrc); + static_cast(mRecvSSRC), ssrc); // we "switch" here immediately, but buffer until the queue is released mRecvSSRC = ssrc; - mRecvSSRCSetInProgress = true; - queue = true; + mRecvSsrcSetInProgress = true; // Ensure lamba captures refs - RefPtr self = this; - nsCOMPtr thread; - if (NS_WARN_IF(NS_FAILED(NS_GetCurrentThread(getter_AddRefs(thread))))) { - return kMediaConduitRTPProcessingFailed; - } - NS_DispatchToMainThread(media::NewRunnableFrom([self, thread, ssrc]() mutable { - // Normally this is done in CreateOrUpdateMediaPipeline() for - // initial creation and renegotiation, but here we're rebuilding the - // Receive channel at a lower level. This is needed whenever we're - // creating a GMPVideoCodec (in particular, H264) so it can communicate - // errors to the PC. - WebrtcGmpPCHandleSetter setter(self->mPCHandle); - self->SetRemoteSSRC(ssrc); // this will likely re-create the VideoReceiveStream - // We want to unblock the queued packets on the original thread - thread->Dispatch(media::NewRunnableFrom([self, ssrc]() mutable { - if (ssrc == self->mRecvSSRC) { - // SSRC is set; insert queued packets - for (auto& packet : self->mQueuedPackets) { - CSFLogDebug(LOGTAG, "Inserting queued packets: seq# %u, Len %d ", - (uint16_t)ntohs(((uint16_t*) packet->mData)[1]), packet->mLen); + NS_DispatchToMainThread(NS_NewRunnableFunction( + "WebrtcVideoConduit::WebrtcGmpPCHandleSetter", + [this, self = RefPtr(this), ssrc]() mutable + { + // Normally this is done in CreateOrUpdateMediaPipeline() for + // initial creation and renegotiation, but here we're rebuilding the + // Receive channel at a lower level. This is needed whenever we're + // creating a GMPVideoCodec (in particular, H264) so it can communicate + // errors to the PC. + WebrtcGmpPCHandleSetter setter(mPCHandle); + SetRemoteSSRC(ssrc); // this will likely re-create the VideoReceiveStream + // We want to unblock the queued packets on the original thread + mStsThread->Dispatch(NS_NewRunnableFunction( + "WebrtcVideoConduit::QueuedPacketsHandler", + [this, self, ssrc]() mutable + { + if (ssrc != mRecvSSRC) { + // this is an intermediate switch; another is in-flight + return; + } + // SSRC is set; insert queued packets + for (auto& packet : mQueuedPackets) { + CSFLogDebug(LOGTAG, "Inserting queued packets: seq# %u, Len %d ", + (uint16_t)ntohs(((uint16_t*) packet->mData)[1]), packet->mLen); - if (self->DeliverPacket(packet->mData, packet->mLen) != kMediaConduitNoError) { - CSFLogError(LOGTAG, "%s RTP Processing Failed", __FUNCTION__); - // Keep delivering and then clear the queue - } - } - self->mQueuedPackets.Clear(); - // we don't leave inprogress until there are no changes in-flight - self->mRecvSSRCSetInProgress = false; - } - // else this is an intermediate switch; another is in-flight - - return NS_OK; - }), NS_DISPATCH_NORMAL); - return NS_OK; - })); + if (DeliverPacket(packet->mData, packet->mLen) != kMediaConduitNoError) { + CSFLogError(LOGTAG, "%s RTP Processing Failed", __FUNCTION__); + // Keep delivering and then clear the queue + } + } + mQueuedPackets.Clear(); + // we don't leave inprogress until there are no changes in-flight + mRecvSsrcSetInProgress = false; + })); + })); return kMediaConduitNoError; } } @@ -1986,6 +2312,8 @@ WebrtcVideoConduit::ReceivedRTPPacket(const void* data, int len, uint32_t ssrc) MediaConduitErrorCode WebrtcVideoConduit::ReceivedRTCPPacket(const void* data, int len) { + ASSERT_ON_THREAD(mStsThread); + CSFLogVerbose(LOGTAG, " %s Len %d ", __FUNCTION__, len); if (DeliverPacket(data, len) != kMediaConduitNoError) { @@ -1999,53 +2327,92 @@ WebrtcVideoConduit::ReceivedRTCPPacket(const void* data, int len) MediaConduitErrorCode WebrtcVideoConduit::StopTransmitting() { - if (mEngineTransmitting) { - { - MutexAutoLock lock(mCodecMutex); - if (mSendStream) { - CSFLogDebug(LOGTAG, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__); - mSendStream->Stop(); - } - } + MOZ_ASSERT(NS_IsMainThread()); + MutexAutoLock lock(mMutex); - mEngineTransmitting = false; - } - return kMediaConduitNoError; + return StopTransmittingLocked(); } MediaConduitErrorCode WebrtcVideoConduit::StartTransmitting() { - if (mEngineTransmitting) { - return kMediaConduitNoError; - } + MOZ_ASSERT(NS_IsMainThread()); + MutexAutoLock lock(mMutex); - CSFLogDebug(LOGTAG, "%s Attemping to start... ", __FUNCTION__); - { - // Start Transmitting on the video engine - MutexAutoLock lock(mCodecMutex); - - if (!mSendStream) { - MediaConduitErrorCode rval = CreateSendStream(); - if (rval != kMediaConduitNoError) { - CSFLogError(LOGTAG, "%s Start Send Error %d ", __FUNCTION__, rval); - return rval; - } - } - - mSendStream->Start(); - // XXX File a bug to consider hooking this up to the state of mtransport - mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkUp); - mEngineTransmitting = true; - } - - return kMediaConduitNoError; + return StartTransmittingLocked(); } MediaConduitErrorCode WebrtcVideoConduit::StopReceiving() { - NS_ASSERTION(NS_IsMainThread(), "Only call on main thread"); + MOZ_ASSERT(NS_IsMainThread()); + MutexAutoLock lock(mMutex); + + return StopReceivingLocked(); +} + +MediaConduitErrorCode +WebrtcVideoConduit::StartReceiving() +{ + MOZ_ASSERT(NS_IsMainThread()); + MutexAutoLock lock(mMutex); + + return StartReceivingLocked(); +} + +MediaConduitErrorCode +WebrtcVideoConduit::StopTransmittingLocked() +{ + MOZ_ASSERT(NS_IsMainThread()); + mMutex.AssertCurrentThreadOwns(); + + if (mEngineTransmitting) { + if (mSendStream) { + CSFLogDebug(LOGTAG, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__); + mSendStream->Stop(); + } + + mEngineTransmitting = false; + UpdateVideoStatsTimer(); + } + return kMediaConduitNoError; +} + +MediaConduitErrorCode +WebrtcVideoConduit::StartTransmittingLocked() +{ + MOZ_ASSERT(NS_IsMainThread()); + mMutex.AssertCurrentThreadOwns(); + + if (mEngineTransmitting) { + return kMediaConduitNoError; + } + + CSFLogDebug(LOGTAG, "%s Attemping to start... ", __FUNCTION__); + // Start Transmitting on the video engine + if (!mSendStream) { + MediaConduitErrorCode rval = CreateSendStream(); + if (rval != kMediaConduitNoError) { + CSFLogError(LOGTAG, "%s Start Send Error %d ", __FUNCTION__, rval); + return rval; + } + } + + mSendStream->Start(); + // XXX File a bug to consider hooking this up to the state of mtransport + mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkUp); + mEngineTransmitting = true; + UpdateVideoStatsTimer(); + + return kMediaConduitNoError; +} + +MediaConduitErrorCode +WebrtcVideoConduit::StopReceivingLocked() +{ + MOZ_ASSERT(NS_IsMainThread()); + mMutex.AssertCurrentThreadOwns(); + // Are we receiving already? If so, stop receiving and playout // since we can't apply new recv codec when the engine is playing. if (mEngineReceiving && mRecvStream) { @@ -2054,28 +2421,37 @@ WebrtcVideoConduit::StopReceiving() } mEngineReceiving = false; + UpdateVideoStatsTimer(); return kMediaConduitNoError; } MediaConduitErrorCode -WebrtcVideoConduit::StartReceiving() +WebrtcVideoConduit::StartReceivingLocked() { + MOZ_ASSERT(NS_IsMainThread()); + mMutex.AssertCurrentThreadOwns(); + if (mEngineReceiving) { return kMediaConduitNoError; } - CSFLogDebug(LOGTAG, "%s Attemping to start... (SSRC %u (0x%x))", __FUNCTION__, mRecvSSRC, mRecvSSRC); - { - // Start Receive on the video engine - MutexAutoLock lock(mCodecMutex); - MOZ_ASSERT(mRecvStream); - - mRecvStream->Start(); - // XXX File a bug to consider hooking this up to the state of mtransport - mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkUp); - mEngineReceiving = true; + CSFLogDebug(LOGTAG, "%s Attemping to start... (SSRC %u (0x%x))", __FUNCTION__, + static_cast(mRecvSSRC), static_cast(mRecvSSRC)); + // Start Receiving on the video engine + if (!mRecvStream) { + MediaConduitErrorCode rval = CreateRecvStream(); + if (rval != kMediaConduitNoError) { + CSFLogError(LOGTAG, "%s Start Receive Error %d ", __FUNCTION__, rval); + return rval; + } } + mRecvStream->Start(); + // XXX File a bug to consider hooking this up to the state of mtransport + mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkUp); + mEngineReceiving = true; + UpdateVideoStatsTimer(); + return kMediaConduitNoError; } @@ -2134,7 +2510,8 @@ void WebrtcVideoConduit::OnFrame(const webrtc::VideoFrame& video_frame) { CSFLogVerbose(LOGTAG, "%s: recv SSRC %u (0x%x), size %ux%u", __FUNCTION__, - mRecvSSRC, mRecvSSRC, video_frame.width(), video_frame.height()); + static_cast(mRecvSSRC), static_cast(mRecvSSRC), + video_frame.width(), video_frame.height()); ReentrantMonitorAutoEnter enter(mTransportMonitor); if (!mRenderer) { @@ -2167,53 +2544,11 @@ WebrtcVideoConduit::OnFrame(const webrtc::VideoFrame& video_frame) video_frame.render_time_ms()); } -// Compare lists of codecs -bool -WebrtcVideoConduit::CodecsDifferent(const nsTArray>& a, - const nsTArray>& b) -{ - // return a != b; - // would work if UniquePtr<> operator== compared contents! - auto len = a.Length(); - if (len != b.Length()) { - return true; - } - - // XXX std::equal would work, if we could use it on this - fails for the - // same reason as above. c++14 would let us pass a comparator function. - for (uint32_t i = 0; i < len; ++i) { - if (!(*a[i] == *b[i])) { - return true; - } - } - - return false; -} - -/** - * Perform validation on the codecConfig to be applied - * Verifies if the codec is already applied. - */ -MediaConduitErrorCode -WebrtcVideoConduit::ValidateCodecConfig(const VideoCodecConfig* codecInfo) -{ - if(!codecInfo) { - CSFLogError(LOGTAG, "%s Null CodecConfig ", __FUNCTION__); - return kMediaConduitMalformedArgument; - } - - if((codecInfo->mName.empty()) || - (codecInfo->mName.length() >= CODEC_PLNAME_SIZE)) { - CSFLogError(LOGTAG, "%s Invalid Payload Name Length ", __FUNCTION__); - return kMediaConduitMalformedArgument; - } - - return kMediaConduitNoError; -} - void WebrtcVideoConduit::DumpCodecDB() const { + MOZ_ASSERT(NS_IsMainThread()); + for (auto& entry : mRecvCodecList) { CSFLogDebug(LOGTAG, "Payload Name: %s", entry->mName.c_str()); CSFLogDebug(LOGTAG, "Payload Type: %d", entry->mType); @@ -2225,18 +2560,24 @@ WebrtcVideoConduit::DumpCodecDB() const void WebrtcVideoConduit::VideoLatencyUpdate(uint64_t newSample) { + mTransportMonitor.AssertCurrentThreadIn(); + mVideoLatencyAvg = (sRoundingPadding * newSample + sAlphaNum * mVideoLatencyAvg) / sAlphaDen; } uint64_t WebrtcVideoConduit::MozVideoLatencyAvg() { + mTransportMonitor.AssertCurrentThreadIn(); + return mVideoLatencyAvg / sRoundingPadding; } uint64_t WebrtcVideoConduit::CodecPluginID() { + MOZ_ASSERT(NS_IsMainThread()); + if (mSendCodecPlugin) { return mSendCodecPlugin->PluginID(); } @@ -2250,6 +2591,8 @@ WebrtcVideoConduit::CodecPluginID() bool WebrtcVideoConduit::RequiresNewSendStream(const VideoCodecConfig& newConfig) const { + MOZ_ASSERT(NS_IsMainThread()); + return !mCurSendCodecConfig || mCurSendCodecConfig->mName != newConfig.mName || mCurSendCodecConfig->mType != newConfig.mType @@ -2267,12 +2610,16 @@ void WebrtcVideoConduit::VideoEncoderConfigBuilder::SetEncoderSpecificSettings( rtc::scoped_refptr aSettings) { + MOZ_ASSERT(NS_IsMainThread()); + mConfig.encoder_specific_settings = aSettings; } void WebrtcVideoConduit::VideoEncoderConfigBuilder::SetVideoStreamFactory(rtc::scoped_refptr aFactory) { + MOZ_ASSERT(NS_IsMainThread()); + mConfig.video_stream_factory = aFactory; } @@ -2280,6 +2627,8 @@ void WebrtcVideoConduit::VideoEncoderConfigBuilder::SetMinTransmitBitrateBps( int aXmitMinBps) { + MOZ_ASSERT(NS_IsMainThread()); + mConfig.min_transmit_bitrate_bps = aXmitMinBps; } @@ -2287,6 +2636,8 @@ void WebrtcVideoConduit::VideoEncoderConfigBuilder::SetContentType( webrtc::VideoEncoderConfig::ContentType aContentType) { + MOZ_ASSERT(NS_IsMainThread()); + mConfig.content_type = aContentType; } @@ -2294,6 +2645,8 @@ void WebrtcVideoConduit::VideoEncoderConfigBuilder::SetResolutionDivisor( unsigned char aDivisor) { + MOZ_ASSERT(NS_IsMainThread()); + mConfig.resolution_divisor = aDivisor; } @@ -2301,6 +2654,8 @@ void WebrtcVideoConduit::VideoEncoderConfigBuilder::SetMaxEncodings( size_t aMaxStreams) { + MOZ_ASSERT(NS_IsMainThread()); + mConfig.number_of_streams = aMaxStreams; } @@ -2308,6 +2663,8 @@ void WebrtcVideoConduit::VideoEncoderConfigBuilder::AddStream( webrtc::VideoStream aStream) { + MOZ_ASSERT(NS_IsMainThread()); + mSimulcastStreams.push_back(SimulcastStreamConfig()); MOZ_ASSERT(mSimulcastStreams.size() <= mConfig.number_of_streams); } @@ -2316,6 +2673,8 @@ void WebrtcVideoConduit::VideoEncoderConfigBuilder::AddStream( webrtc::VideoStream aStream, const SimulcastStreamConfig& aSimulcastConfig) { + MOZ_ASSERT(NS_IsMainThread()); + mSimulcastStreams.push_back(aSimulcastConfig); MOZ_ASSERT(mSimulcastStreams.size() <= mConfig.number_of_streams); } @@ -2323,12 +2682,16 @@ WebrtcVideoConduit::VideoEncoderConfigBuilder::AddStream( size_t WebrtcVideoConduit::VideoEncoderConfigBuilder::StreamCount() const { + MOZ_ASSERT(NS_IsMainThread()); + return mSimulcastStreams.size(); } void WebrtcVideoConduit::VideoEncoderConfigBuilder::ClearStreams() { + MOZ_ASSERT(NS_IsMainThread()); + mSimulcastStreams.clear(); } diff --git a/media/webrtc/signaling/src/media-conduit/VideoConduit.h b/media/webrtc/signaling/src/media-conduit/VideoConduit.h index 9d3791952331..f484c9236a11 100644 --- a/media/webrtc/signaling/src/media-conduit/VideoConduit.h +++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.h @@ -114,6 +114,11 @@ public: MediaConduitErrorCode StopReceiving() override; MediaConduitErrorCode StartReceiving() override; + MediaConduitErrorCode StopTransmittingLocked(); + MediaConduitErrorCode StartTransmittingLocked(); + MediaConduitErrorCode StopReceivingLocked(); + MediaConduitErrorCode StartReceivingLocked(); + /** * Function to configure sending codec mode for different content */ @@ -169,17 +174,6 @@ public: void SelectSendResolution(unsigned short width, unsigned short height); - /** - * Function to select and change the encoding frame rate based on incoming frame rate - * and max-mbps setting. - * @param current framerate - * @result new framerate - */ - unsigned int SelectSendFrameRate(const VideoCodecConfig* codecConfig, - unsigned int old_framerate, - unsigned short sending_width, - unsigned short sending_height) const; - /** * Function to deliver a capture video frame for encoding and transport. * If the frame's timestamp is 0, it will be automatically generated. @@ -229,6 +223,7 @@ public: void SetPCHandle(const std::string& aPCHandle) override { + MOZ_ASSERT(NS_IsMainThread()); mPCHandle = aPCHandle; } @@ -251,13 +246,14 @@ public: } WebrtcVideoConduit(RefPtr aCall, - UniquePtr&& aVideoAdapter); + UniquePtr&& aVideoAdapter, + nsCOMPtr aStsThread); virtual ~WebrtcVideoConduit(); MediaConduitErrorCode InitMain(); virtual MediaConduitErrorCode Init(); - std::vector GetLocalSSRCs() const override; + std::vector GetLocalSSRCs() override; bool SetLocalSSRCs(const std::vector& ssrcs) override; bool GetRemoteSSRC(unsigned int* ssrc) override; bool SetRemoteSSRC(unsigned int ssrc) override; @@ -265,12 +261,17 @@ public: bool SetLocalCNAME(const char* cname) override; bool SetLocalMID(const std::string& mid) override; + bool GetRemoteSSRCLocked(unsigned int* ssrc); + bool SetRemoteSSRCLocked(unsigned int ssrc); + bool GetSendPacketTypeStats( webrtc::RtcpPacketTypeCounter* aPacketCounts) override; bool GetRecvPacketTypeStats( webrtc::RtcpPacketTypeCounter* aPacketCounts) override; + void PollStats(); + void UpdateVideoStatsTimer(); bool GetVideoEncoderStats(double* framerateMean, double* framerateStdDev, double* bitrateMean, @@ -299,6 +300,7 @@ public: uint64_t MozVideoLatencyAvg(); void DisableSsrcChanges() override { + ASSERT_ON_THREAD(mStsThread); mAllowSsrcChange = false; } @@ -307,11 +309,34 @@ private: WebrtcVideoConduit(const WebrtcVideoConduit&) = delete; void operator=(const WebrtcVideoConduit&) = delete; - /** Shared statistics for receive and transmit video streams + /** + * Statistics for the Call associated with this VideoConduit. + * Single threaded. + */ + class CallStatistics { + public: + explicit CallStatistics(nsCOMPtr aStatsThread) + : mStatsThread(aStatsThread) + {} + void Update(const webrtc::Call::Stats& aStats); + int32_t RttMs() const; + protected: + const nsCOMPtr mStatsThread; + private: + int32_t mRttMs = 0; + }; + + /** + * Shared statistics for receive and transmit video streams. + * Single threaded. */ class StreamStatistics { public: - void Update(const double aFrameRate, const double aBitrate); + explicit StreamStatistics(nsCOMPtr aStatsThread) + : mStatsThread(aStatsThread) + {} + void Update(const double aFrameRate, const double aBitrate, + const webrtc::RtcpPacketTypeCounter& aPacketCounts); /** * Returns gathered stream statistics * @param aOutFrMean: mean framerate @@ -323,58 +348,87 @@ private: double& aOutFrStdDev, double& aOutBrMean, double& aOutBrStdDev) const; + const webrtc::RtcpPacketTypeCounter& PacketCounts() const; + bool Active() const; + void SetActive(bool aActive); + protected: + const nsCOMPtr mStatsThread; private: + bool mActive = false; RunningStat mFrameRate; RunningStat mBitrate; + webrtc::RtcpPacketTypeCounter mPacketCounts; }; /** - * Statistics for sending streams + * Statistics for sending streams. Single threaded. */ class SendStreamStatistics : public StreamStatistics { public: + explicit SendStreamStatistics(nsCOMPtr aStatsThread) + : StreamStatistics(std::forward>(aStatsThread)) + {} /** * Returns the calculate number of dropped frames - * @param aOutDroppedFrames: the number of dropped frames */ - void DroppedFrames(uint32_t& aOutDroppedFrames) const; + uint32_t DroppedFrames() const; /** * Returns the number of frames that have been encoded so far */ - uint32_t FramesEncoded() const { - return mFramesEncoded; - } - void Update(const webrtc::VideoSendStream::Stats& aStats); + uint32_t FramesEncoded() const; + void Update(const webrtc::VideoSendStream::Stats& aStats, + uint32_t aConfiguredSsrc); /** * Call once for every frame delivered for encoding */ - void FrameDeliveredToEncoder() { ++mFramesDeliveredToEncoder; } + void FrameDeliveredToEncoder(); + + bool SsrcFound() const; + uint32_t JitterMs() const; + uint32_t CumulativeLost() const; + uint64_t BytesReceived() const; + uint32_t PacketsReceived() const; private: uint32_t mDroppedFrames = 0; uint32_t mFramesEncoded = 0; - mozilla::Atomic mFramesDeliveredToEncoder; + int32_t mFramesDeliveredToEncoder; + + bool mSsrcFound = false; + uint32_t mJitterMs = 0; + uint32_t mCumulativeLost = 0; + uint64_t mBytesReceived = 0; + uint32_t mPacketsReceived = 0; }; - /** Statistics for receiving streams + /** + * Statistics for receiving streams. Single threaded. */ class ReceiveStreamStatistics : public StreamStatistics { public: + explicit ReceiveStreamStatistics(nsCOMPtr aStatsThread) + : StreamStatistics(std::forward>(aStatsThread)) + {} /** * Returns the number of discarded packets - * @param aOutDiscPackets: number of discarded packets */ - void DiscardedPackets(uint32_t& aOutDiscPackets) const; - /** - * Returns the number of frames decoded - * @param aOutDiscPackets: number of frames decoded - */ - void FramesDecoded(uint32_t& aFramesDecoded) const; + uint32_t DiscardedPackets() const; + /** + * Returns the number of frames decoded + */ + uint32_t FramesDecoded() const; + uint32_t JitterMs() const; + uint32_t CumulativeLost() const; + uint32_t Ssrc() const; void Update(const webrtc::VideoReceiveStream::Stats& aStats); private: uint32_t mDiscardedPackets = 0; uint32_t mFramesDecoded = 0; + uint32_t mJitterMs = 0; + uint32_t mCumulativeLost = 0; + uint32_t mSsrc = 0; }; - /* + + /** * Stores encoder configuration information and produces * a VideoEncoderConfig from it. */ @@ -410,19 +464,9 @@ private: std::vector mSimulcastStreams; }; - //Function to convert between WebRTC and Conduit codec structures - void CodecConfigToWebRTCCodec(const VideoCodecConfig* codecInfo, - webrtc::VideoCodec& cinst); - - //Checks the codec to be applied - MediaConduitErrorCode ValidateCodecConfig(const VideoCodecConfig* codecInfo); - - //Utility function to dump recv codec database + // Utility function to dump recv codec database void DumpCodecDB() const; - bool CodecsDifferent(const nsTArray>& a, - const nsTArray>& b); - // Factory class for VideoStreams... vie_encoder.cc will call this to reconfigure. // We need to give it access to the conduit to make it's decisions class VideoStreamFactory : public webrtc::VideoEncoderConfig::VideoStreamFactoryInterface @@ -459,88 +503,155 @@ private: bool RequiresNewSendStream(const VideoCodecConfig& newConfig) const; mozilla::ReentrantMonitor mTransportMonitor; + + // Accessed on any thread under mTransportMonitor. RefPtr mTransmitterTransport; + + // Accessed on any thread under mTransportMonitor. RefPtr mReceiverTransport; + + // Accessed on any thread under mTransportMonitor. RefPtr mRenderer; - // Frame adapter - handle sinks that we feed data to, and handle resolution - // changes needed for them. + // Accessed on any thread under mTransportMonitor. + unsigned short mReceivingWidth = 0; + + // Accessed on any thread under mTransportMonitor. + unsigned short mReceivingHeight = 0; + + // Socket transport service thread that runs stats queries against us. Any thread. + const nsCOMPtr mStsThread; + + Mutex mMutex; + + // Adapter handling resolution constraints from signaling and sinks. + // Written only on main thread. Guarded by mMutex, except for reads on main. UniquePtr mVideoAdapter; + + // Our own record of the sinks added to mVideoBroadcaster so we can support + // dispatching updates to sinks from off-main-thread. Main thread only. + AutoTArray*, 1> mRegisteredSinks; + + // Broadcaster that distributes our frames to all registered sinks. + // Sinks can only be added, updated and removed on main thread. + // Frames can be passed in on any thread. rtc::VideoBroadcaster mVideoBroadcaster; // Buffer pool used for scaling frames. // Accessed on the frame-feeding thread only. webrtc::I420BufferPool mBufferPool; - // Engine state we are concerned with. + // Engine state we are concerned with. Written on main thread and read anywhere. mozilla::Atomic mEngineTransmitting; // If true ==> Transmit Subsystem is up and running mozilla::Atomic mEngineReceiving; // if true ==> Receive Subsystem up and running - int mCapId = -1; // Capturer for this conduit - //Local database of currently applied receive codecs + //Local database of currently applied receive codecs. Main thread only. nsTArray> mRecvCodecList; - // protects mCurSendCodecConfig, mVideoSend/RecvStreamStats, mSend/RecvStreams, mSendPacketCounts, mRecvPacketCounts - Mutex mCodecMutex; + // Written only on main thread. Guarded by mMutex, except for reads on main. nsAutoPtr mCurSendCodecConfig; - SendStreamStatistics mSendStreamStats; - ReceiveStreamStatistics mRecvStreamStats; - webrtc::RtcpPacketTypeCounter mSendPacketCounts; - webrtc::RtcpPacketTypeCounter mRecvPacketCounts; - // Must call webrtc::Call::DestroyVideoReceive/SendStream to delete these: + // Bookkeeping of send stream stats. Sts thread only. + SendStreamStatistics mSendStreamStats; + + // Bookkeeping of send stream stats. Sts thread only. + ReceiveStreamStatistics mRecvStreamStats; + + // Bookkeeping of call stats. Sts thread only. + CallStatistics mCallStats; + + // Must call webrtc::Call::DestroyVideoReceive/SendStream to delete this. + // Written only on main thread. Guarded by mMutex, except for reads on main. webrtc::VideoReceiveStream* mRecvStream = nullptr; + + // Must call webrtc::Call::DestroyVideoReceive/SendStream to delete this. + // Written only on main thread. Guarded by mMutex, except for reads on main. webrtc::VideoSendStream* mSendStream = nullptr; + // Written on the frame feeding thread. + // Guarded by mMutex, except for reads on the frame feeding thread. unsigned short mLastWidth = 0; + + // Written on the frame feeding thread. + // Guarded by mMutex, except for reads on the frame feeding thread. unsigned short mLastHeight = 0; - unsigned short mReceivingWidth = 0; - unsigned short mReceivingHeight = 0; - unsigned int mSendingFramerate; + + // Accessed under mMutex. + unsigned int mSendingFramerate; + + // Written on main thread at creation, + // then written or read on any thread under mTransportMonitor. bool mVideoLatencyTestEnable = false; + + // Accessed from any thread under mTransportMonitor. uint64_t mVideoLatencyAvg = 0; - // all in bps! + + // All in bps. + // All written on main thread and guarded by mMutex, except for reads on main. int mMinBitrate = 0; int mStartBitrate = 0; int mPrefMaxBitrate = 0; int mNegotiatedMaxBitrate = 0; int mMinBitrateEstimate = 0; - bool mDenoising = false; - bool mLockScaling = false; // for tests that care about output resolution - uint8_t mSpatialLayers = 1; - uint8_t mTemporalLayers = 1; - rtc::VideoSinkWants mLastSinkWanted; + // Set to true to force denoising on. + // Written at creation, then read anywhere. + bool mDenoising = false; + + // Set to true to ignore sink wants (scaling due to bwe and cpu usage). + // Written at creation, then read anywhere. + bool mLockScaling = false; + + // Written at creation, then read anywhere. + uint8_t mSpatialLayers = 1; + + // Written at creation, then read anywhere. + uint8_t mTemporalLayers = 1; static const unsigned int sAlphaNum = 7; static const unsigned int sAlphaDen = 8; static const unsigned int sRoundingPadding = 1024; + // Main thread only. RefPtr mSyncedTo; - webrtc::VideoCodecMode mCodecMode; + // Written on main thread, read anywhere. + Atomic mCodecMode; // WEBRTC.ORG Call API - RefPtr mCall; + // Const so can be accessed on any thread. Most methods are called on + // main thread, though Receiver() is called on STS. This seems fine. + const RefPtr mCall; + // Written only on main thread. Guarded by mMutex, except for reads on main. webrtc::VideoSendStream::Config mSendStreamConfig; + + // Main thread only. VideoEncoderConfigBuilder mEncoderConfig; + // Main thread only. webrtc::VideoReceiveStream::Config mRecvStreamConfig; // Are SSRC changes without signaling allowed or not + // Accessed only on mStsThread. bool mAllowSsrcChange = true; + + // Accessed only on mStsThread. bool mWaitingForInitialSsrc = true; - // accessed on creation, and when receiving packets - uint32_t mRecvSSRC = 0; // this can change during a stream! - // The runnable to set the SSRC is in-flight; queue packets until it's done. - bool mRecvSSRCSetInProgress = false; + // Accessed only on mStsThread. + bool mRecvSsrcSetInProgress = false; + + // Accessed during configuration/signaling (main), + // and when receiving packets (sts). + Atomic mRecvSSRC; // this can change during a stream! + struct QueuedPacket { int mLen; uint8_t mData[1]; }; + // Accessed only on mStsThread. nsTArray> mQueuedPackets; // The lifetime of these codecs are maintained by the VideoConduit instance. @@ -548,11 +659,17 @@ private: // on construction. nsAutoPtr mEncoder; // only one encoder for now std::vector> mDecoders; + // Main thread only WebrtcVideoEncoder* mSendCodecPlugin = nullptr; + // Main thread only WebrtcVideoDecoder* mRecvCodecPlugin = nullptr; + // Timer that updates video stats periodically. Main thread only. nsCOMPtr mVideoStatsTimer; + // True if mVideoStatsTimer is running. Main thread only. + bool mVideoStatsTimerActive = false; + // Main thread only std::string mPCHandle; }; } // end namespace diff --git a/media/webrtc/signaling/src/peerconnection/TransceiverImpl.cpp b/media/webrtc/signaling/src/peerconnection/TransceiverImpl.cpp index 0ca86e365ff2..4f5fd140f24a 100644 --- a/media/webrtc/signaling/src/peerconnection/TransceiverImpl.cpp +++ b/media/webrtc/signaling/src/peerconnection/TransceiverImpl.cpp @@ -102,7 +102,7 @@ TransceiverImpl::InitAudio() void TransceiverImpl::InitVideo() { - mConduit = VideoSessionConduit::Create(mCallWrapper); + mConduit = VideoSessionConduit::Create(mCallWrapper, mStsThread); if (!mConduit) { MOZ_MTLOG(ML_ERROR, mPCHandle << "[" << mMid << "]: " << __FUNCTION__ << @@ -877,7 +877,9 @@ TransceiverImpl::UpdateVideoConduit() if (mJsepTransceiver->HasBundleLevel() && (!mJsepTransceiver->mRecvTrack.GetNegotiatedDetails() || !mJsepTransceiver->mRecvTrack.GetNegotiatedDetails()->GetExt(webrtc::RtpExtension::kMIdUri))) { - conduit->DisableSsrcChanges(); + mStsThread->Dispatch(NewRunnableMethod( + "VideoSessionConduit::DisableSsrcChanges", + conduit, &VideoSessionConduit::DisableSsrcChanges)); } if (mJsepTransceiver->mRecvTrack.GetNegotiatedDetails() &&