mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-12-01 17:23:59 +00:00
Bug 1212237 - Be explicit about principals for received tracks. r=bwc
This patch fixes two things: 1) A potential threading issue in setting and reading the PrincipalHandle for MediaPipelineReceiveVideo. 2) Plumbs the PrincipalHandle down to the receiving MediaPipelines right from the start. It hasn't been necessary in the past as initially the principal handed to a track's constructor is trusted, but it's good form and will help clarify the situation for the next patch which switches the initial principal from always-private to mostly-non-private. In most cases this principal does no longer get updated after the track's been created, so it helps to have a PrincipalHandle that matches the track's principal. Differential Revision: https://phabricator.services.mozilla.com/D48946 --HG-- extra : moz-landing-system : lando
This commit is contained in:
parent
d817feca61
commit
b009f6cd63
@ -345,7 +345,7 @@ class TestAgentReceive : public TestAgent {
|
||||
audio_pipeline_ = new mozilla::MediaPipelineReceiveAudio(
|
||||
test_pc, transport_, nullptr, test_utils->sts_target(),
|
||||
static_cast<mozilla::AudioSessionConduit*>(audio_conduit_.get()),
|
||||
nullptr);
|
||||
nullptr, PRINCIPAL_HANDLE_NONE);
|
||||
|
||||
audio_pipeline_->Start();
|
||||
|
||||
|
@ -1168,7 +1168,6 @@ class GenericReceiveListener : public MediaTrackListener {
|
||||
&static_cast<RemoteTrackSource&>(aTrack->GetSource()))),
|
||||
mSource(mTrackSource->mStream),
|
||||
mIsAudio(aTrack->AsAudioStreamTrack()),
|
||||
mPrincipalHandle(PRINCIPAL_HANDLE_NONE),
|
||||
mListening(false),
|
||||
mMaybeTrackNeedsUnmute(true) {
|
||||
MOZ_DIAGNOSTIC_ASSERT(NS_IsMainThread());
|
||||
@ -1229,38 +1228,10 @@ class GenericReceiveListener : public MediaTrackListener {
|
||||
&RemoteTrackSource::ForceEnded));
|
||||
}
|
||||
|
||||
// Must be called on the main thread
|
||||
void SetPrincipalHandle_m(const PrincipalHandle& aPrincipalHandle) {
|
||||
class Message : public ControlMessage {
|
||||
public:
|
||||
Message(GenericReceiveListener* aListener,
|
||||
const PrincipalHandle& aPrincipalHandle)
|
||||
: ControlMessage(nullptr),
|
||||
mListener(aListener),
|
||||
mPrincipalHandle(aPrincipalHandle) {}
|
||||
|
||||
void Run() override {
|
||||
mListener->SetPrincipalHandle_mtg(mPrincipalHandle);
|
||||
}
|
||||
|
||||
const RefPtr<GenericReceiveListener> mListener;
|
||||
PrincipalHandle mPrincipalHandle;
|
||||
};
|
||||
|
||||
mSource->GraphImpl()->AppendMessage(
|
||||
MakeUnique<Message>(this, aPrincipalHandle));
|
||||
}
|
||||
|
||||
// Must be called on the MediaTrackGraph thread
|
||||
void SetPrincipalHandle_mtg(const PrincipalHandle& aPrincipalHandle) {
|
||||
mPrincipalHandle = aPrincipalHandle;
|
||||
}
|
||||
|
||||
protected:
|
||||
const nsMainThreadPtrHandle<RemoteTrackSource> mTrackSource;
|
||||
const RefPtr<SourceMediaTrack> mSource;
|
||||
const bool mIsAudio;
|
||||
PrincipalHandle mPrincipalHandle;
|
||||
bool mListening;
|
||||
Atomic<bool> mMaybeTrackNeedsUnmute;
|
||||
};
|
||||
@ -1278,7 +1249,8 @@ class MediaPipelineReceiveAudio::PipelineListener
|
||||
: public GenericReceiveListener {
|
||||
public:
|
||||
PipelineListener(dom::MediaStreamTrack* aTrack,
|
||||
const RefPtr<MediaSessionConduit>& aConduit)
|
||||
const RefPtr<MediaSessionConduit>& aConduit,
|
||||
const PrincipalHandle& aPrincipalHandle)
|
||||
: GenericReceiveListener(aTrack),
|
||||
mConduit(aConduit),
|
||||
// AudioSession conduit only supports 16, 32, 44.1 and 48kHz
|
||||
@ -1292,7 +1264,8 @@ class MediaPipelineReceiveAudio::PipelineListener
|
||||
mTaskQueue(
|
||||
new TaskQueue(GetMediaThreadPool(MediaThreadType::WEBRTC_DECODER),
|
||||
"AudioPipelineListener")),
|
||||
mPlayedTicks(0) {
|
||||
mPlayedTicks(0),
|
||||
mPrincipalHandle(aPrincipalHandle) {
|
||||
mSource->SetAppendDataSourceRate(mRate);
|
||||
mSource->AddListener(this);
|
||||
}
|
||||
@ -1303,6 +1276,33 @@ class MediaPipelineReceiveAudio::PipelineListener
|
||||
NotifyPullImpl(aDesiredTime);
|
||||
}
|
||||
|
||||
// Must be called on the main thread
|
||||
void SetPrincipalHandle_m(const PrincipalHandle& aPrincipalHandle) {
|
||||
class Message : public ControlMessage {
|
||||
public:
|
||||
Message(PipelineListener* aListener,
|
||||
const PrincipalHandle& aPrincipalHandle)
|
||||
: ControlMessage(nullptr),
|
||||
mListener(aListener),
|
||||
mPrincipalHandle(aPrincipalHandle) {}
|
||||
|
||||
void Run() override {
|
||||
mListener->SetPrincipalHandle_mtg(mPrincipalHandle);
|
||||
}
|
||||
|
||||
const RefPtr<PipelineListener> mListener;
|
||||
const PrincipalHandle mPrincipalHandle;
|
||||
};
|
||||
|
||||
mSource->GraphImpl()->AppendMessage(
|
||||
MakeUnique<Message>(this, aPrincipalHandle));
|
||||
}
|
||||
|
||||
// Must be called on the MediaTrackGraph thread
|
||||
void SetPrincipalHandle_mtg(const PrincipalHandle& aPrincipalHandle) {
|
||||
mPrincipalHandle = aPrincipalHandle;
|
||||
}
|
||||
|
||||
private:
|
||||
~PipelineListener() {
|
||||
NS_ReleaseOnMainThreadSystemGroup("MediaPipeline::mConduit",
|
||||
@ -1399,15 +1399,19 @@ class MediaPipelineReceiveAudio::PipelineListener
|
||||
// Number of frames of data that has been added to the SourceMediaTrack in
|
||||
// the graph's rate.
|
||||
TrackTicks mPlayedTicks;
|
||||
PrincipalHandle mPrincipalHandle;
|
||||
};
|
||||
|
||||
MediaPipelineReceiveAudio::MediaPipelineReceiveAudio(
|
||||
const std::string& aPc, MediaTransportHandler* aTransportHandler,
|
||||
nsCOMPtr<nsIEventTarget> aMainThread, nsCOMPtr<nsIEventTarget> aStsThread,
|
||||
RefPtr<AudioSessionConduit> aConduit, dom::MediaStreamTrack* aTrack)
|
||||
RefPtr<AudioSessionConduit> aConduit, dom::MediaStreamTrack* aTrack,
|
||||
const PrincipalHandle& aPrincipalHandle)
|
||||
: MediaPipelineReceive(aPc, aTransportHandler, aMainThread, aStsThread,
|
||||
aConduit),
|
||||
mListener(aTrack ? new PipelineListener(aTrack, mConduit) : nullptr) {
|
||||
mListener(aTrack
|
||||
? new PipelineListener(aTrack, mConduit, aPrincipalHandle)
|
||||
: nullptr) {
|
||||
mDescription = mPc + "| Receive audio";
|
||||
}
|
||||
|
||||
@ -1448,15 +1452,28 @@ void MediaPipelineReceiveAudio::OnRtpPacketReceived() {
|
||||
class MediaPipelineReceiveVideo::PipelineListener
|
||||
: public GenericReceiveListener {
|
||||
public:
|
||||
explicit PipelineListener(dom::MediaStreamTrack* aTrack)
|
||||
PipelineListener(dom::MediaStreamTrack* aTrack,
|
||||
const PrincipalHandle& aPrincipalHandle)
|
||||
: GenericReceiveListener(aTrack),
|
||||
mImageContainer(
|
||||
LayerManager::CreateImageContainer(ImageContainer::ASYNCHRONOUS)) {
|
||||
LayerManager::CreateImageContainer(ImageContainer::ASYNCHRONOUS)),
|
||||
mMutex("MediaPipelineReceiveVideo::PipelineListener::mMutex"),
|
||||
mPrincipalHandle(aPrincipalHandle) {
|
||||
mSource->AddListener(this);
|
||||
}
|
||||
|
||||
void SetPrincipalHandle_m(const PrincipalHandle& aPrincipalHandle) {
|
||||
MutexAutoLock lock(mMutex);
|
||||
mPrincipalHandle = aPrincipalHandle;
|
||||
}
|
||||
|
||||
void RenderVideoFrame(const webrtc::VideoFrameBuffer& aBuffer,
|
||||
uint32_t aTimeStamp, int64_t aRenderTime) {
|
||||
PrincipalHandle principal;
|
||||
{
|
||||
MutexAutoLock lock(mMutex);
|
||||
principal = mPrincipalHandle;
|
||||
}
|
||||
RefPtr<Image> image;
|
||||
if (aBuffer.type() == webrtc::VideoFrameBuffer::Type::kNative) {
|
||||
// We assume that only native handles are used with the
|
||||
@ -1501,12 +1518,14 @@ class MediaPipelineReceiveVideo::PipelineListener
|
||||
|
||||
VideoSegment segment;
|
||||
auto size = image->GetSize();
|
||||
segment.AppendFrame(image.forget(), size, mPrincipalHandle);
|
||||
segment.AppendFrame(image.forget(), size, principal);
|
||||
mSource->AppendData(&segment);
|
||||
}
|
||||
|
||||
private:
|
||||
RefPtr<layers::ImageContainer> mImageContainer;
|
||||
Mutex mMutex; // Protects the below members.
|
||||
PrincipalHandle mPrincipalHandle;
|
||||
};
|
||||
|
||||
class MediaPipelineReceiveVideo::PipelineRenderer
|
||||
@ -1531,11 +1550,13 @@ class MediaPipelineReceiveVideo::PipelineRenderer
|
||||
MediaPipelineReceiveVideo::MediaPipelineReceiveVideo(
|
||||
const std::string& aPc, MediaTransportHandler* aTransportHandler,
|
||||
nsCOMPtr<nsIEventTarget> aMainThread, nsCOMPtr<nsIEventTarget> aStsThread,
|
||||
RefPtr<VideoSessionConduit> aConduit, dom::MediaStreamTrack* aTrack)
|
||||
RefPtr<VideoSessionConduit> aConduit, dom::MediaStreamTrack* aTrack,
|
||||
const PrincipalHandle& aPrincipalHandle)
|
||||
: MediaPipelineReceive(aPc, aTransportHandler, aMainThread, aStsThread,
|
||||
aConduit),
|
||||
mRenderer(new PipelineRenderer(this)),
|
||||
mListener(aTrack ? new PipelineListener(aTrack) : nullptr) {
|
||||
mListener(aTrack ? new PipelineListener(aTrack, aPrincipalHandle)
|
||||
: nullptr) {
|
||||
mDescription = mPc + "| Receive video";
|
||||
aConduit->AttachRenderer(mRenderer);
|
||||
}
|
||||
|
@ -348,7 +348,8 @@ class MediaPipelineReceiveAudio : public MediaPipelineReceive {
|
||||
nsCOMPtr<nsIEventTarget> aMainThread,
|
||||
nsCOMPtr<nsIEventTarget> aStsThread,
|
||||
RefPtr<AudioSessionConduit> aConduit,
|
||||
dom::MediaStreamTrack* aTrack);
|
||||
dom::MediaStreamTrack* aTrack,
|
||||
const PrincipalHandle& aPrincipalHandle);
|
||||
|
||||
void DetachMedia() override;
|
||||
|
||||
@ -365,7 +366,7 @@ class MediaPipelineReceiveAudio : public MediaPipelineReceive {
|
||||
// Separate class to allow ref counting
|
||||
class PipelineListener;
|
||||
|
||||
RefPtr<PipelineListener> mListener;
|
||||
const RefPtr<PipelineListener> mListener;
|
||||
};
|
||||
|
||||
// A specialization of pipeline for reading from the network and
|
||||
@ -377,7 +378,8 @@ class MediaPipelineReceiveVideo : public MediaPipelineReceive {
|
||||
nsCOMPtr<nsIEventTarget> aMainThread,
|
||||
nsCOMPtr<nsIEventTarget> aStsThread,
|
||||
RefPtr<VideoSessionConduit> aConduit,
|
||||
dom::MediaStreamTrack* aTrack);
|
||||
dom::MediaStreamTrack* aTrack,
|
||||
const PrincipalHandle& aPrincipalHandle);
|
||||
|
||||
// Called on the main thread.
|
||||
void DetachMedia() override;
|
||||
@ -399,7 +401,7 @@ class MediaPipelineReceiveVideo : public MediaPipelineReceive {
|
||||
class PipelineListener;
|
||||
|
||||
const RefPtr<PipelineRenderer> mRenderer;
|
||||
RefPtr<PipelineListener> mListener;
|
||||
const RefPtr<PipelineListener> mListener;
|
||||
};
|
||||
|
||||
} // namespace mozilla
|
||||
|
@ -1001,13 +1001,29 @@ already_AddRefed<TransceiverImpl> PeerConnectionImpl::CreateTransceiverImpl(
|
||||
aSendTrack->AddPrincipalChangeObserver(this);
|
||||
}
|
||||
|
||||
// Set the principal used for the receive tracks. This makes the track
|
||||
// data (audio/video samples) accessible to the receiving page. We're
|
||||
// only certain that privacy has been requested if we're connected.
|
||||
RefPtr<nsIPrincipal> principal;
|
||||
Document* doc = GetWindow()->GetExtantDoc();
|
||||
MOZ_ASSERT(doc);
|
||||
const bool privacyRequested = mPrivacyRequested.valueOr(true);
|
||||
if (privacyRequested) {
|
||||
// We're either certain that we need isolation for the tracks, OR
|
||||
// we're not sure and we can fix the track and pipeline in AlpnNegotiated.
|
||||
principal =
|
||||
NullPrincipal::CreateWithInheritedAttributes(doc->NodePrincipal());
|
||||
} else {
|
||||
principal = doc->NodePrincipal();
|
||||
}
|
||||
|
||||
OwningNonNull<dom::MediaStreamTrack> receiveTrack =
|
||||
CreateReceiveTrack(aJsepTransceiver->GetMediaType());
|
||||
CreateReceiveTrack(aJsepTransceiver->GetMediaType(), principal);
|
||||
|
||||
RefPtr<TransceiverImpl> transceiverImpl;
|
||||
|
||||
aRv = mMedia->AddTransceiver(aJsepTransceiver, *receiveTrack, aSendTrack,
|
||||
&transceiverImpl);
|
||||
aRv =
|
||||
mMedia->AddTransceiver(aJsepTransceiver, *receiveTrack, aSendTrack,
|
||||
MakePrincipalHandle(principal), &transceiverImpl);
|
||||
|
||||
return transceiverImpl.forget();
|
||||
}
|
||||
@ -1829,24 +1845,9 @@ static int GetDTMFToneCode(uint16_t c) {
|
||||
}
|
||||
|
||||
OwningNonNull<dom::MediaStreamTrack> PeerConnectionImpl::CreateReceiveTrack(
|
||||
SdpMediaSection::MediaType type) {
|
||||
SdpMediaSection::MediaType type, nsIPrincipal* aPrincipal) {
|
||||
bool audio = (type == SdpMediaSection::MediaType::kAudio);
|
||||
|
||||
// Set the principal used for creating the tracks. This makes the track
|
||||
// data (audio/video samples) accessible to the receiving page. We're
|
||||
// only certain that privacy hasn't been requested if we're connected.
|
||||
nsCOMPtr<nsIPrincipal> principal;
|
||||
Document* doc = GetWindow()->GetExtantDoc();
|
||||
MOZ_ASSERT(doc);
|
||||
if (mPrivacyRequested.isSome() && !*mPrivacyRequested) {
|
||||
principal = doc->NodePrincipal();
|
||||
} else {
|
||||
// we're either certain that we need isolation for the tracks, OR
|
||||
// we're not sure and we can fix the track in SetDtlsConnected
|
||||
principal =
|
||||
NullPrincipal::CreateWithInheritedAttributes(doc->NodePrincipal());
|
||||
}
|
||||
|
||||
MediaTrackGraph* graph = MediaTrackGraph::GetInstance(
|
||||
audio ? MediaTrackGraph::AUDIO_THREAD_DRIVER
|
||||
: MediaTrackGraph::SYSTEM_THREAD_DRIVER,
|
||||
@ -1857,13 +1858,13 @@ OwningNonNull<dom::MediaStreamTrack> PeerConnectionImpl::CreateReceiveTrack(
|
||||
if (audio) {
|
||||
RefPtr<SourceMediaTrack> source =
|
||||
graph->CreateSourceTrack(MediaSegment::AUDIO);
|
||||
trackSource = new RemoteTrackSource(source, principal,
|
||||
trackSource = new RemoteTrackSource(source, aPrincipal,
|
||||
NS_ConvertASCIItoUTF16("remote audio"));
|
||||
track = new AudioStreamTrack(GetWindow(), source, trackSource);
|
||||
} else {
|
||||
RefPtr<SourceMediaTrack> source =
|
||||
graph->CreateSourceTrack(MediaSegment::VIDEO);
|
||||
trackSource = new RemoteTrackSource(source, principal,
|
||||
trackSource = new RemoteTrackSource(source, aPrincipal,
|
||||
NS_ConvertASCIItoUTF16("remote video"));
|
||||
track = new VideoStreamTrack(GetWindow(), source, trackSource);
|
||||
}
|
||||
|
@ -310,7 +310,7 @@ class PeerConnectionImpl final
|
||||
ErrorResult& rv);
|
||||
|
||||
OwningNonNull<dom::MediaStreamTrack> CreateReceiveTrack(
|
||||
SdpMediaSection::MediaType type);
|
||||
SdpMediaSection::MediaType type, nsIPrincipal* aPrincipal);
|
||||
|
||||
bool CheckNegotiationNeeded(ErrorResult& rv);
|
||||
|
||||
|
@ -581,16 +581,16 @@ void PeerConnectionMedia::ShutdownMediaTransport_s() {
|
||||
|
||||
nsresult PeerConnectionMedia::AddTransceiver(
|
||||
JsepTransceiver* aJsepTransceiver, dom::MediaStreamTrack& aReceiveTrack,
|
||||
dom::MediaStreamTrack* aSendTrack,
|
||||
dom::MediaStreamTrack* aSendTrack, const PrincipalHandle& aPrincipalHandle,
|
||||
RefPtr<TransceiverImpl>* aTransceiverImpl) {
|
||||
if (!mCall) {
|
||||
mCall = WebRtcCallWrapper::Create();
|
||||
}
|
||||
|
||||
RefPtr<TransceiverImpl> transceiver =
|
||||
new TransceiverImpl(mParent->GetHandle(), mTransportHandler,
|
||||
aJsepTransceiver, mMainThread.get(), mSTSThread.get(),
|
||||
&aReceiveTrack, aSendTrack, mCall.get());
|
||||
RefPtr<TransceiverImpl> transceiver = new TransceiverImpl(
|
||||
mParent->GetHandle(), mTransportHandler, aJsepTransceiver,
|
||||
mMainThread.get(), mSTSThread.get(), &aReceiveTrack, aSendTrack,
|
||||
mCall.get(), aPrincipalHandle);
|
||||
|
||||
if (!transceiver->IsValid()) {
|
||||
return NS_ERROR_FAILURE;
|
||||
|
@ -82,6 +82,7 @@ class PeerConnectionMedia : public sigslot::has_slots<> {
|
||||
nsresult AddTransceiver(JsepTransceiver* aJsepTransceiver,
|
||||
dom::MediaStreamTrack& aReceiveTrack,
|
||||
dom::MediaStreamTrack* aSendTrack,
|
||||
const PrincipalHandle& aPrincipalHandle,
|
||||
RefPtr<TransceiverImpl>* aTransceiverImpl);
|
||||
|
||||
void GetTransmitPipelinesMatching(
|
||||
|
@ -35,7 +35,8 @@ TransceiverImpl::TransceiverImpl(
|
||||
const std::string& aPCHandle, MediaTransportHandler* aTransportHandler,
|
||||
JsepTransceiver* aJsepTransceiver, nsIEventTarget* aMainThread,
|
||||
nsIEventTarget* aStsThread, dom::MediaStreamTrack* aReceiveTrack,
|
||||
dom::MediaStreamTrack* aSendTrack, WebRtcCallWrapper* aCallWrapper)
|
||||
dom::MediaStreamTrack* aSendTrack, WebRtcCallWrapper* aCallWrapper,
|
||||
const PrincipalHandle& aPrincipalHandle)
|
||||
: mPCHandle(aPCHandle),
|
||||
mTransportHandler(aTransportHandler),
|
||||
mJsepTransceiver(aJsepTransceiver),
|
||||
@ -47,9 +48,9 @@ TransceiverImpl::TransceiverImpl(
|
||||
mSendTrack(aSendTrack),
|
||||
mCallWrapper(aCallWrapper) {
|
||||
if (IsVideo()) {
|
||||
InitVideo();
|
||||
InitVideo(aPrincipalHandle);
|
||||
} else {
|
||||
InitAudio();
|
||||
InitAudio(aPrincipalHandle);
|
||||
}
|
||||
|
||||
if (!IsValid()) {
|
||||
@ -69,7 +70,7 @@ TransceiverImpl::~TransceiverImpl() = default;
|
||||
|
||||
NS_IMPL_ISUPPORTS0(TransceiverImpl)
|
||||
|
||||
void TransceiverImpl::InitAudio() {
|
||||
void TransceiverImpl::InitAudio(const PrincipalHandle& aPrincipalHandle) {
|
||||
mConduit = AudioSessionConduit::Create(mCallWrapper, mStsThread);
|
||||
|
||||
if (!mConduit) {
|
||||
@ -82,10 +83,11 @@ void TransceiverImpl::InitAudio() {
|
||||
|
||||
mReceivePipeline = new MediaPipelineReceiveAudio(
|
||||
mPCHandle, mTransportHandler, mMainThread.get(), mStsThread.get(),
|
||||
static_cast<AudioSessionConduit*>(mConduit.get()), mReceiveTrack);
|
||||
static_cast<AudioSessionConduit*>(mConduit.get()), mReceiveTrack,
|
||||
aPrincipalHandle);
|
||||
}
|
||||
|
||||
void TransceiverImpl::InitVideo() {
|
||||
void TransceiverImpl::InitVideo(const PrincipalHandle& aPrincipalHandle) {
|
||||
mConduit = VideoSessionConduit::Create(mCallWrapper, mStsThread);
|
||||
|
||||
if (!mConduit) {
|
||||
@ -98,7 +100,8 @@ void TransceiverImpl::InitVideo() {
|
||||
|
||||
mReceivePipeline = new MediaPipelineReceiveVideo(
|
||||
mPCHandle, mTransportHandler, mMainThread.get(), mStsThread.get(),
|
||||
static_cast<VideoSessionConduit*>(mConduit.get()), mReceiveTrack);
|
||||
static_cast<VideoSessionConduit*>(mConduit.get()), mReceiveTrack,
|
||||
aPrincipalHandle);
|
||||
}
|
||||
|
||||
nsresult TransceiverImpl::UpdateSinkIdentity(
|
||||
@ -256,9 +259,9 @@ nsresult TransceiverImpl::UpdatePrincipal(nsIPrincipal* aPrincipal) {
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
// This blasts away the existing principal.
|
||||
// We only do this when we become certain that all tracks are safe to make
|
||||
// accessible to the script principal.
|
||||
// This updates the existing principal. If we're setting a principal that does
|
||||
// not subsume the old principal, there will be a delay while the principal
|
||||
// propagates in media content, before the track has the principal aPrincipal.
|
||||
static_cast<RemoteTrackSource&>(mReceiveTrack->GetSource())
|
||||
.SetPrincipal(aPrincipal);
|
||||
|
||||
|
@ -56,7 +56,8 @@ class TransceiverImpl : public nsISupports {
|
||||
nsIEventTarget* aMainThread, nsIEventTarget* aStsThread,
|
||||
dom::MediaStreamTrack* aReceiveTrack,
|
||||
dom::MediaStreamTrack* aSendTrack,
|
||||
WebRtcCallWrapper* aCallWrapper);
|
||||
WebRtcCallWrapper* aCallWrapper,
|
||||
const PrincipalHandle& aPrincipalHandle);
|
||||
|
||||
bool IsValid() const { return !!mConduit; }
|
||||
|
||||
@ -133,8 +134,8 @@ class TransceiverImpl : public nsISupports {
|
||||
|
||||
private:
|
||||
virtual ~TransceiverImpl();
|
||||
void InitAudio();
|
||||
void InitVideo();
|
||||
void InitAudio(const PrincipalHandle& aPrincipalHandle);
|
||||
void InitVideo(const PrincipalHandle& aPrincipalHandle);
|
||||
nsresult UpdateAudioConduit();
|
||||
nsresult UpdateVideoConduit();
|
||||
nsresult ConfigureVideoCodecMode(VideoSessionConduit& aConduit);
|
||||
|
Loading…
Reference in New Issue
Block a user