mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-11-26 06:11:37 +00:00
Bug 788185: add a/v sync to Audio/Video Conduits r=derf
This commit is contained in:
parent
7d0a71aaeb
commit
7c163ea485
@ -161,6 +161,9 @@ public:
|
||||
|
||||
MediaConduitErrorCode Init(WebrtcAudioConduit *other);
|
||||
|
||||
int GetChannel() { return mChannel; }
|
||||
webrtc::VoiceEngine* GetVoiceEngine() { return mVoiceEngine; }
|
||||
|
||||
private:
|
||||
WebrtcAudioConduit(const WebrtcAudioConduit& other) MOZ_DELETE;
|
||||
void operator=(const WebrtcAudioConduit& other) MOZ_DELETE;
|
||||
|
@ -3,6 +3,7 @@
|
||||
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#include "VideoConduit.h"
|
||||
#include "AudioConduit.h"
|
||||
#include "video_engine/include/vie_errors.h"
|
||||
#include "CSFLog.h"
|
||||
|
||||
@ -72,6 +73,7 @@ WebrtcVideoConduit::~WebrtcVideoConduit()
|
||||
{
|
||||
mPtrViEBase->StopSend(mChannel);
|
||||
mPtrViEBase->StopReceive(mChannel);
|
||||
SyncTo(nullptr);
|
||||
mPtrViEBase->DeleteChannel(mChannel);
|
||||
mPtrViEBase->Release();
|
||||
}
|
||||
@ -234,6 +236,22 @@ MediaConduitErrorCode WebrtcVideoConduit::Init()
|
||||
return kMediaConduitNoError;
|
||||
}
|
||||
|
||||
void
|
||||
WebrtcVideoConduit::SyncTo(WebrtcAudioConduit *aConduit)
|
||||
{
|
||||
CSFLogDebug(logTag, "%s Synced to %p", __FUNCTION__, aConduit);
|
||||
|
||||
if (aConduit) {
|
||||
mPtrViEBase->SetVoiceEngine(aConduit->GetVoiceEngine());
|
||||
mPtrViEBase->ConnectAudioChannel(mChannel, aConduit->GetChannel());
|
||||
// NOTE: this means the VideoConduit will keep the AudioConduit alive!
|
||||
mSyncedTo = aConduit;
|
||||
} else if (mSyncedTo) {
|
||||
mPtrViEBase->DisconnectAudioChannel(mChannel);
|
||||
mPtrViEBase->SetVoiceEngine(nullptr);
|
||||
mSyncedTo = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
MediaConduitErrorCode
|
||||
WebrtcVideoConduit::AttachRenderer(mozilla::RefPtr<VideoRenderer> aVideoRenderer)
|
||||
|
@ -33,6 +33,8 @@
|
||||
|
||||
namespace mozilla {
|
||||
|
||||
class WebrtcAudioConduit;
|
||||
|
||||
/**
|
||||
* Concrete class for Video session. Hooks up
|
||||
* - media-source and target to external transport
|
||||
@ -47,6 +49,11 @@ public:
|
||||
//VoiceEngine defined constant for Payload Name Size.
|
||||
static const unsigned int CODEC_PLNAME_SIZE;
|
||||
|
||||
/**
|
||||
* Set up A/V sync between this (incoming) VideoConduit and an audio conduit.
|
||||
*/
|
||||
void SyncTo(WebrtcAudioConduit *aConduit);
|
||||
|
||||
/**
|
||||
* Function to attach Renderer end-point for the Media-Video conduit.
|
||||
* @param aRenderer : Reference to the concrete Video renderer implementation
|
||||
@ -206,6 +213,8 @@ private:
|
||||
int mCapId; // Capturer for this conduit
|
||||
RecvCodecList mRecvCodecList;
|
||||
VideoCodecConfig* mCurSendCodecConfig;
|
||||
|
||||
mozilla::RefPtr<WebrtcAudioConduit> mSyncedTo;
|
||||
};
|
||||
|
||||
|
||||
|
@ -1357,7 +1357,7 @@ static int vcmRxStartICE_m(cc_mcapid_t mcap_id,
|
||||
CSFLogDebug(logTag, "Created audio pipeline %p, conduit=%p, pc_stream=%d pc_track=%d",
|
||||
pipeline.get(), conduit.get(), pc_stream_id, pc_track_id);
|
||||
|
||||
stream->StorePipeline(pc_track_id, pipeline);
|
||||
stream->StorePipeline(pc_track_id, false, pipeline);
|
||||
} else if (CC_IS_VIDEO(mcap_id)) {
|
||||
|
||||
std::vector<mozilla::VideoCodecConfig *> configs;
|
||||
@ -1401,7 +1401,7 @@ static int vcmRxStartICE_m(cc_mcapid_t mcap_id,
|
||||
CSFLogDebug(logTag, "Created video pipeline %p, conduit=%p, pc_stream=%d pc_track=%d",
|
||||
pipeline.get(), conduit.get(), pc_stream_id, pc_track_id);
|
||||
|
||||
stream->StorePipeline(pc_track_id, pipeline);
|
||||
stream->StorePipeline(pc_track_id, true, pipeline);
|
||||
} else {
|
||||
CSFLogError(logTag, "%s: mcap_id unrecognized", __FUNCTION__);
|
||||
return VCM_ERROR;
|
||||
|
@ -120,6 +120,8 @@ class MediaPipeline : public sigslot::has_slots<> {
|
||||
int rtp_packets_received() const { return rtp_packets_received_; }
|
||||
int rtcp_packets_received() const { return rtp_packets_received_; }
|
||||
|
||||
MediaSessionConduit *Conduit() { return conduit_; }
|
||||
|
||||
// Thread counting
|
||||
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaPipeline)
|
||||
|
||||
|
@ -12,6 +12,8 @@
|
||||
#include "nricemediastream.h"
|
||||
#include "PeerConnectionImpl.h"
|
||||
#include "PeerConnectionMedia.h"
|
||||
#include "AudioConduit.h"
|
||||
#include "VideoConduit.h"
|
||||
#include "runnable_utils.h"
|
||||
|
||||
#ifdef MOZILLA_INTERNAL_API
|
||||
@ -329,16 +331,39 @@ LocalSourceStreamInfo::StorePipeline(int aTrack,
|
||||
|
||||
void
|
||||
RemoteSourceStreamInfo::StorePipeline(int aTrack,
|
||||
mozilla::RefPtr<mozilla::MediaPipeline> aPipeline)
|
||||
bool aIsVideo,
|
||||
mozilla::RefPtr<mozilla::MediaPipeline> aPipeline)
|
||||
{
|
||||
MOZ_ASSERT(mPipelines.find(aTrack) == mPipelines.end());
|
||||
if (mPipelines.find(aTrack) != mPipelines.end()) {
|
||||
CSFLogErrorS(logTag, __FUNCTION__ << ": Storing duplicate track");
|
||||
CSFLogErrorS(logTag, __FUNCTION__ << ": Request to store duplicate track " << aTrack);
|
||||
return;
|
||||
}
|
||||
CSFLogDebug(logTag, "%s track %d %s = %p", __FUNCTION__, aTrack, aIsVideo ? "video" : "audio",
|
||||
aPipeline.get());
|
||||
// See if we have both audio and video here, and if so cross the streams and sync them
|
||||
// XXX Needs to be adjusted when we support multiple streams of the same type
|
||||
for (std::map<int, bool>::iterator it = mTypes.begin(); it != mTypes.end(); ++it) {
|
||||
if (it->second != aIsVideo) {
|
||||
// Ok, we have one video, one non-video - cross the streams!
|
||||
mozilla::WebrtcAudioConduit *audio_conduit = static_cast<mozilla::WebrtcAudioConduit*>
|
||||
(aIsVideo ?
|
||||
mPipelines[it->first]->Conduit() :
|
||||
aPipeline->Conduit());
|
||||
mozilla::WebrtcVideoConduit *video_conduit = static_cast<mozilla::WebrtcVideoConduit*>
|
||||
(aIsVideo ?
|
||||
aPipeline->Conduit() :
|
||||
mPipelines[it->first]->Conduit());
|
||||
video_conduit->SyncTo(audio_conduit);
|
||||
CSFLogDebug(logTag, "Syncing %p to %p, %d to %d", video_conduit, audio_conduit,
|
||||
aTrack, it->first);
|
||||
}
|
||||
}
|
||||
//TODO: Revisit once we start supporting multiple streams or multiple tracks
|
||||
// of same type
|
||||
mPipelines[aTrack] = aPipeline;
|
||||
//TODO: move to attribute on Pipeline
|
||||
mTypes[aTrack] = aIsVideo;
|
||||
}
|
||||
|
||||
|
||||
|
@ -205,10 +205,12 @@ class RemoteSourceStreamInfo {
|
||||
nsDOMMediaStream* GetMediaStream() {
|
||||
return mMediaStream;
|
||||
}
|
||||
void StorePipeline(int aTrack, mozilla::RefPtr<mozilla::MediaPipeline> aPipeline);
|
||||
void StorePipeline(int aTrack, bool aIsVideo,
|
||||
mozilla::RefPtr<mozilla::MediaPipeline> aPipeline);
|
||||
|
||||
void Detach() {
|
||||
// walk through all the MediaPipelines and disconnect them.
|
||||
// XXX we should clear the mTypes map
|
||||
for (std::map<int, mozilla::RefPtr<mozilla::MediaPipeline> >::iterator it =
|
||||
mPipelines.begin(); it != mPipelines.end();
|
||||
++it) {
|
||||
@ -221,6 +223,7 @@ class RemoteSourceStreamInfo {
|
||||
private:
|
||||
nsRefPtr<nsDOMMediaStream> mMediaStream;
|
||||
std::map<int, mozilla::RefPtr<mozilla::MediaPipeline> > mPipelines;
|
||||
std::map<int, bool> mTypes;
|
||||
};
|
||||
|
||||
class PeerConnectionMedia : public sigslot::has_slots<> {
|
||||
|
Loading…
Reference in New Issue
Block a user