Bug 1208371 - Add PrincipalHandle to MediaChunks. r=mt,jesup

PrincipalHandle is a thread safe pointer to a holder of (the main-thread-only
nsIPrincipal) that can be passed around the MSG.

A MediaStreamTrack whose source has just updated its principal, sets the new
principal aside (as its "pending principal"), and combines the new principal
into its current principal.

Then the source starts passing the new principal to the MediaStreamGraph as
a PrincipalHandle.

Changes to a track's PrincipalHandle on the MSG will be surfaced through the
MediaStreamTrackListener API. These changes are dispatched to main thread
and compared to a MediaStreamTrack's pending principal. In case of a match
the track knows the correct principal is flowing and can move the pending
principal to be the current principal and update any main thread principal
observers.

MozReview-Commit-ID: D0JXGWhQFFU

--HG--
extra : rebase_source : 296e269bb46fc5a85a9c3f90dfc0dc40e53572bc
This commit is contained in:
Andreas Pehrson 2016-04-06 14:56:44 +02:00
parent dfc6d94fe7
commit be74876e25
33 changed files with 290 additions and 88 deletions

View File

@ -674,13 +674,14 @@ HTMLCanvasElement::CaptureStream(const Optional<double>& aFrameRate,
}
TrackID videoTrackId = 1;
nsresult rv = stream->Init(aFrameRate, videoTrackId);
nsCOMPtr<nsIPrincipal> principal = NodePrincipal();
nsresult rv =
stream->Init(aFrameRate, videoTrackId, principal);
if (NS_FAILED(rv)) {
aRv.Throw(rv);
return nullptr;
}
nsCOMPtr<nsIPrincipal> principal = NodePrincipal();
stream->CreateDOMTrack(videoTrackId, MediaSegment::VIDEO, nsString(),
new BasicUnstoppableTrackSource(principal));

View File

@ -145,6 +145,8 @@ DownmixAndInterleave(const nsTArray<const SrcT*>& aChannelData,
struct AudioChunk {
typedef mozilla::AudioSampleFormat SampleFormat;
AudioChunk() : mPrincipalHandle(PRINCIPAL_HANDLE_NONE) {}
// Generic methods
void SliceTo(StreamTime aStart, StreamTime aEnd)
{
@ -190,6 +192,7 @@ struct AudioChunk {
mDuration = aDuration;
mVolume = 1.0f;
mBufferFormat = AUDIO_FORMAT_SILENCE;
mPrincipalHandle = PRINCIPAL_HANDLE_NONE;
}
size_t ChannelCount() const { return mChannelData.Length(); }
@ -224,6 +227,8 @@ struct AudioChunk {
return *reinterpret_cast<nsTArray<const T*>*>(&mChannelData);
}
PrincipalHandle GetPrincipalHandle() const { return mPrincipalHandle; }
StreamTime mDuration; // in frames within the buffer
RefPtr<ThreadSharedObject> mBuffer; // the buffer object whose lifetime is managed; null means data is all zeroes
nsTArray<const void*> mChannelData; // one pointer per channel; empty if and only if mBuffer is null
@ -232,6 +237,9 @@ struct AudioChunk {
#ifdef MOZILLA_INTERNAL_API
mozilla::TimeStamp mTimeStamp; // time at which this has been fetched from the MediaEngine
#endif
// principalHandle for the data in this chunk.
// This can be compared to an nsIPrincipal* when back on main thread.
PrincipalHandle mPrincipalHandle;
};
/**
@ -301,7 +309,7 @@ public:
void AppendFrames(already_AddRefed<ThreadSharedObject> aBuffer,
const nsTArray<const float*>& aChannelData,
int32_t aDuration)
int32_t aDuration, const PrincipalHandle& aPrincipalHandle)
{
AudioChunk* chunk = AppendChunk(aDuration);
chunk->mBuffer = aBuffer;
@ -313,10 +321,11 @@ public:
#ifdef MOZILLA_INTERNAL_API
chunk->mTimeStamp = TimeStamp::Now();
#endif
chunk->mPrincipalHandle = aPrincipalHandle;
}
void AppendFrames(already_AddRefed<ThreadSharedObject> aBuffer,
const nsTArray<const int16_t*>& aChannelData,
int32_t aDuration)
int32_t aDuration, const PrincipalHandle& aPrincipalHandle)
{
AudioChunk* chunk = AppendChunk(aDuration);
chunk->mBuffer = aBuffer;
@ -328,6 +337,7 @@ public:
#ifdef MOZILLA_INTERNAL_API
chunk->mTimeStamp = TimeStamp::Now();
#endif
chunk->mPrincipalHandle = aPrincipalHandle;
}
// Consumes aChunk, and returns a pointer to the persistent copy of aChunk
// in the segment.
@ -341,6 +351,7 @@ public:
#ifdef MOZILLA_INTERNAL_API
chunk->mTimeStamp = TimeStamp::Now();
#endif
chunk->mPrincipalHandle = aChunk->mPrincipalHandle;
return chunk;
}
void ApplyVolume(float aVolume);

View File

@ -24,10 +24,12 @@ class OutputStreamDriver::StreamListener : public MediaStreamListener
public:
explicit StreamListener(OutputStreamDriver* aDriver,
TrackID aTrackId,
PrincipalHandle aPrincipalHandle,
SourceMediaStream* aSourceStream)
: mEnded(false)
, mSourceStream(aSourceStream)
, mTrackId(aTrackId)
, mPrincipalHandle(aPrincipalHandle)
, mMutex("CanvasCaptureMediaStream OutputStreamDriver::StreamListener")
, mImage(nullptr)
{
@ -55,7 +57,7 @@ public:
RefPtr<Image> image = mImage;
IntSize size = image ? image->GetSize() : IntSize(0, 0);
VideoSegment segment;
segment.AppendFrame(image.forget(), delta, size);
segment.AppendFrame(image.forget(), delta, size, mPrincipalHandle);
mSourceStream->AppendToTrack(mTrackId, &segment);
}
@ -72,6 +74,7 @@ private:
Atomic<bool> mEnded;
const RefPtr<SourceMediaStream> mSourceStream;
const TrackID mTrackId;
const PrincipalHandle mPrincipalHandle;
Mutex mMutex;
// The below members are protected by mMutex.
@ -79,10 +82,12 @@ private:
};
OutputStreamDriver::OutputStreamDriver(SourceMediaStream* aSourceStream,
const TrackID& aTrackId)
const TrackID& aTrackId,
const PrincipalHandle& aPrincipalHandle)
: FrameCaptureListener()
, mSourceStream(aSourceStream)
, mStreamListener(new StreamListener(this, aTrackId, aSourceStream))
, mStreamListener(new StreamListener(this, aTrackId, aPrincipalHandle,
aSourceStream))
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(mSourceStream);
@ -120,8 +125,9 @@ class TimerDriver : public OutputStreamDriver
public:
explicit TimerDriver(SourceMediaStream* aSourceStream,
const double& aFPS,
const TrackID& aTrackId)
: OutputStreamDriver(aSourceStream, aTrackId)
const TrackID& aTrackId,
const PrincipalHandle& aPrincipalHandle)
: OutputStreamDriver(aSourceStream, aTrackId, aPrincipalHandle)
, mFPS(aFPS)
, mTimer(nullptr)
{
@ -178,8 +184,9 @@ class AutoDriver : public OutputStreamDriver
{
public:
explicit AutoDriver(SourceMediaStream* aSourceStream,
const TrackID& aTrackId)
: OutputStreamDriver(aSourceStream, aTrackId) {}
const TrackID& aTrackId,
const PrincipalHandle& aPrincipalHandle)
: OutputStreamDriver(aSourceStream, aTrackId, aPrincipalHandle) {}
void NewFrame(already_AddRefed<Image> aImage) override
{
@ -239,18 +246,21 @@ CanvasCaptureMediaStream::RequestFrame()
nsresult
CanvasCaptureMediaStream::Init(const dom::Optional<double>& aFPS,
const TrackID& aTrackId)
const TrackID& aTrackId,
nsIPrincipal* aPrincipal)
{
PrincipalHandle principalHandle = MakePrincipalHandle(aPrincipal);
if (!aFPS.WasPassed()) {
mOutputStreamDriver =
new AutoDriver(GetInputStream()->AsSourceStream(), aTrackId);
new AutoDriver(GetInputStream()->AsSourceStream(), aTrackId, principalHandle);
} else if (aFPS.Value() < 0) {
return NS_ERROR_ILLEGAL_VALUE;
} else {
// Cap frame rate to 60 FPS for sanity
double fps = std::min(60.0, aFPS.Value());
mOutputStreamDriver =
new TimerDriver(GetInputStream()->AsSourceStream(), fps, aTrackId);
new TimerDriver(GetInputStream()->AsSourceStream(), fps, aTrackId, principalHandle);
}
return NS_OK;
}

View File

@ -10,6 +10,8 @@
#include "mozilla/dom/HTMLCanvasElement.h"
#include "StreamBuffer.h"
class nsIPrincipal;
namespace mozilla {
class DOMMediaStream;
class MediaStreamListener;
@ -68,7 +70,8 @@ class OutputStreamDriver : public FrameCaptureListener
{
public:
OutputStreamDriver(SourceMediaStream* aSourceStream,
const TrackID& aTrackId);
const TrackID& aTrackId,
const PrincipalHandle& aPrincipalHandle);
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(OutputStreamDriver);
@ -101,7 +104,8 @@ public:
NS_DECL_ISUPPORTS_INHERITED
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(CanvasCaptureMediaStream, DOMMediaStream)
nsresult Init(const dom::Optional<double>& aFPS, const TrackID& aTrackId);
nsresult Init(const dom::Optional<double>& aFPS, const TrackID& aTrackId,
nsIPrincipal* aPrincipal);
JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override;

View File

@ -1263,6 +1263,20 @@ DOMAudioNodeMediaStream::CreateTrackUnionStream(nsPIDOMWindowInner* aWindow,
DOMHwMediaStream::DOMHwMediaStream(nsPIDOMWindowInner* aWindow)
: DOMLocalMediaStream(aWindow, nullptr)
{
#ifdef MOZ_WIDGET_GONK
if (!mWindow) {
NS_ERROR("Expected window here.");
mPrincipalHandle = PRINCIPAL_ID_NONE;
return;
}
nsIDocument* doc = mWindow->GetDoc();
if (!doc) {
NS_ERROR("Expected document here.");
mPrincipalHandle = PRINCIPAL_ID_NONE;
return;
}
mPrincipalHandle = ConvertPrincipalToID(doc->GetPrincipal());
#endif
}
DOMHwMediaStream::~DOMHwMediaStream()
@ -1312,7 +1326,7 @@ DOMHwMediaStream::Init(MediaStream* stream, OverlayImage* aImage)
RefPtr<Image> image = static_cast<Image*>(mOverlayImage.get());
mozilla::gfx::IntSize size = image->GetSize();
segment.AppendFrame(image.forget(), delta, size);
segment.AppendFrame(image.forget(), delta, size, mPrincipalHandle);
#endif
srcStream->AddTrack(TRACK_VIDEO_PRIMARY, 0, new VideoSegment());
srcStream->AppendToTrack(TRACK_VIDEO_PRIMARY, &segment);
@ -1369,7 +1383,7 @@ DOMHwMediaStream::SetImageSize(uint32_t width, uint32_t height)
mozilla::gfx::IntSize size = image->GetSize();
VideoSegment segment;
segment.AppendFrame(image.forget(), delta, size);
segment.AppendFrame(image.forget(), delta, size, PRINCIPAL_ID_NONE);
srcStream->AppendToTrack(TRACK_VIDEO_PRIMARY, &segment);
#endif
}
@ -1405,7 +1419,7 @@ DOMHwMediaStream::SetOverlayImage(OverlayImage* aImage)
mozilla::gfx::IntSize size = image->GetSize();
VideoSegment segment;
segment.AppendFrame(image.forget(), delta, size);
segment.AppendFrame(image.forget(), delta, size, PRINCIPAL_ID_NONE);
srcStream->AppendToTrack(TRACK_VIDEO_PRIMARY, &segment);
#endif
}

View File

@ -781,6 +781,7 @@ private:
const int DEFAULT_IMAGE_WIDTH = 400;
const int DEFAULT_IMAGE_HEIGHT = 300;
RefPtr<OverlayImage> mOverlayImage;
PrincipalID mPrincipalHandle;
#endif
};

View File

@ -296,14 +296,16 @@ public:
nsresult rv;
if (mAudioDevice) {
rv = mAudioDevice->GetSource()->Start(source, kAudioTrack);
rv = mAudioDevice->GetSource()->Start(source, kAudioTrack,
mListener->GetPrincipalHandle());
if (NS_FAILED(rv)) {
ReturnCallbackError(rv, "Starting audio failed");
return;
}
}
if (mVideoDevice) {
rv = mVideoDevice->GetSource()->Start(source, kVideoTrack);
rv = mVideoDevice->GetSource()->Start(source, kVideoTrack,
mListener->GetPrincipalHandle());
if (NS_FAILED(rv)) {
ReturnCallbackError(rv, "Starting video failed");
return;
@ -1995,8 +1997,10 @@ MediaManager::GetUserMedia(nsPIDOMWindowInner* aWindow,
StreamListeners* listeners = AddWindowID(windowID);
// Create a disabled listener to act as a placeholder
nsIPrincipal* principal = aWindow->GetExtantDoc()->NodePrincipal();
RefPtr<GetUserMediaCallbackMediaStreamListener> listener =
new GetUserMediaCallbackMediaStreamListener(mMediaThread, windowID);
new GetUserMediaCallbackMediaStreamListener(mMediaThread, windowID,
MakePrincipalHandle(principal));
// No need for locking because we always do this in the main thread.
listeners->AppendElement(listener);
@ -2010,14 +2014,14 @@ MediaManager::GetUserMedia(nsPIDOMWindowInner* aWindow,
uint32_t audioPerm = nsIPermissionManager::UNKNOWN_ACTION;
if (IsOn(c.mAudio)) {
rv = permManager->TestExactPermissionFromPrincipal(
aWindow->GetExtantDoc()->NodePrincipal(), "microphone", &audioPerm);
principal, "microphone", &audioPerm);
NS_ENSURE_SUCCESS(rv, rv);
}
uint32_t videoPerm = nsIPermissionManager::UNKNOWN_ACTION;
if (IsOn(c.mVideo)) {
rv = permManager->TestExactPermissionFromPrincipal(
aWindow->GetExtantDoc()->NodePrincipal(), "camera", &videoPerm);
principal, "camera", &videoPerm);
NS_ENSURE_SUCCESS(rv, rv);
}
@ -2311,9 +2315,12 @@ MediaManager::EnumerateDevices(nsPIDOMWindowInner* aWindow,
StreamListeners* listeners = AddWindowID(windowId);
nsIPrincipal* principal = aWindow->GetExtantDoc()->NodePrincipal();
// Create a disabled listener to act as a placeholder
RefPtr<GetUserMediaCallbackMediaStreamListener> listener =
new GetUserMediaCallbackMediaStreamListener(mMediaThread, windowId);
new GetUserMediaCallbackMediaStreamListener(mMediaThread, windowId,
MakePrincipalHandle(principal));
// No need for locking because we always do this in the main thread.
listeners->AppendElement(listener);

View File

@ -126,9 +126,11 @@ class GetUserMediaCallbackMediaStreamListener : public MediaStreamListener
public:
// Create in an inactive state
GetUserMediaCallbackMediaStreamListener(base::Thread *aThread,
uint64_t aWindowID)
uint64_t aWindowID,
const PrincipalHandle& aPrincipalHandle)
: mMediaThread(aThread)
, mWindowID(aWindowID)
, mPrincipalHandle(aPrincipalHandle)
, mStopped(false)
, mFinished(false)
, mRemoved(false)
@ -260,11 +262,11 @@ public:
// watch it especially for fake audio.
if (mAudioDevice) {
mAudioDevice->GetSource()->NotifyPull(aGraph, mStream, kAudioTrack,
aDesiredTime);
aDesiredTime, mPrincipalHandle);
}
if (mVideoDevice) {
mVideoDevice->GetSource()->NotifyPull(aGraph, mStream, kVideoTrack,
aDesiredTime);
aDesiredTime, mPrincipalHandle);
}
}
@ -301,10 +303,13 @@ public:
void
NotifyDirectListeners(MediaStreamGraph* aGraph, bool aHasListeners);
PrincipalHandle GetPrincipalHandle() const { return mPrincipalHandle; }
private:
// Set at construction
base::Thread* mMediaThread;
uint64_t mWindowID;
const PrincipalHandle mPrincipalHandle;
// true after this listener has sent MEDIA_STOP. MainThread only.
bool mStopped;

View File

@ -7,6 +7,8 @@
#define MOZILLA_MEDIASEGMENT_H_
#include "nsTArray.h"
#include "nsIPrincipal.h"
#include "nsProxyRelease.h"
#ifdef MOZILLA_INTERNAL_API
#include "mozilla/TimeStamp.h"
#endif
@ -54,6 +56,50 @@ const StreamTime STREAM_TIME_MAX = MEDIA_TIME_MAX;
typedef MediaTime GraphTime;
const GraphTime GRAPH_TIME_MAX = MEDIA_TIME_MAX;
/**
* We pass the principal through the MediaStreamGraph by wrapping it in a thread
* safe nsMainThreadPtrHandle, since it cannot be used directly off the main
* thread. We can compare two PrincipalHandles to each other on any thread, but
* they can only be created and converted back to nsIPrincipal* on main thread.
*/
typedef nsMainThreadPtrHandle<nsIPrincipal> PrincipalHandle;
inline PrincipalHandle MakePrincipalHandle(nsIPrincipal* aPrincipal)
{
RefPtr<nsMainThreadPtrHolder<nsIPrincipal>> holder =
new nsMainThreadPtrHolder<nsIPrincipal>(aPrincipal);
return PrincipalHandle(holder);
}
const PrincipalHandle PRINCIPAL_HANDLE_NONE(nullptr);
inline nsIPrincipal* GetPrincipalFromHandle(PrincipalHandle& aPrincipalHandle)
{
MOZ_ASSERT(NS_IsMainThread());
return aPrincipalHandle.get();
}
inline bool PrincipalHandleMatches(PrincipalHandle& aPrincipalHandle,
nsIPrincipal* aOther)
{
if (!aOther) {
return false;
}
nsIPrincipal* principal = GetPrincipalFromHandle(aPrincipalHandle);
if (!principal) {
return false;
}
bool result;
if (NS_FAILED(principal->Equals(aOther, &result))) {
NS_ERROR("Principal check failed");
return false;
}
return result;
}
/**
* A MediaSegment is a chunk of media data sequential in time. Different
* types of data have different subclasses of MediaSegment, all inheriting
@ -85,6 +131,19 @@ public:
StreamTime GetDuration() const { return mDuration; }
Type GetType() const { return mType; }
/**
* Gets the last principal id that was appended to this segment.
*/
PrincipalHandle GetLastPrincipalHandle() const { return mLastPrincipalHandle; }
/**
* Called by the MediaStreamGraph as it appends a chunk with a different
* principal id than the current one.
*/
void SetLastPrincipalHandle(PrincipalHandle aLastPrincipalHandle)
{
mLastPrincipalHandle = aLastPrincipalHandle;
}
/**
* Create a MediaSegment of the same type.
*/
@ -134,13 +193,18 @@ public:
}
protected:
explicit MediaSegment(Type aType) : mDuration(0), mType(aType)
explicit MediaSegment(Type aType)
: mDuration(0), mType(aType), mLastPrincipalHandle(PRINCIPAL_HANDLE_NONE)
{
MOZ_COUNT_CTOR(MediaSegment);
}
StreamTime mDuration; // total of mDurations of all chunks
Type mType;
// The latest principal handle that the MediaStreamGraph has processed for
// this segment.
PrincipalHandle mLastPrincipalHandle;
};
/**

View File

@ -9,6 +9,7 @@
#include "mozilla/DOMEventTargetHelper.h"
#include "nsError.h"
#include "nsID.h"
#include "nsIPrincipal.h"
#include "StreamBuffer.h"
#include "MediaTrackConstraints.h"
#include "mozilla/CORSMode.h"

View File

@ -16,11 +16,12 @@ using namespace layers;
VideoFrame::VideoFrame(already_AddRefed<Image>& aImage,
const gfx::IntSize& aIntrinsicSize)
: mImage(aImage), mIntrinsicSize(aIntrinsicSize), mForceBlack(false)
: mImage(aImage), mIntrinsicSize(aIntrinsicSize), mForceBlack(false),
mPrincipalHandle(PRINCIPAL_HANDLE_NONE)
{}
VideoFrame::VideoFrame()
: mIntrinsicSize(0, 0), mForceBlack(false)
: mIntrinsicSize(0, 0), mForceBlack(false), mPrincipalHandle(PRINCIPAL_HANDLE_NONE)
{}
VideoFrame::~VideoFrame()
@ -30,6 +31,7 @@ void
VideoFrame::SetNull() {
mImage = nullptr;
mIntrinsicSize = gfx::IntSize(0, 0);
mPrincipalHandle = PRINCIPAL_HANDLE_NONE;
}
void
@ -38,6 +40,7 @@ VideoFrame::TakeFrom(VideoFrame* aFrame)
mImage = aFrame->mImage.forget();
mIntrinsicSize = aFrame->mIntrinsicSize;
mForceBlack = aFrame->GetForceBlack();
mPrincipalHandle = aFrame->mPrincipalHandle;
}
/* static */ already_AddRefed<Image>
@ -95,11 +98,13 @@ void
VideoSegment::AppendFrame(already_AddRefed<Image>&& aImage,
StreamTime aDuration,
const IntSize& aIntrinsicSize,
const PrincipalHandle& aPrincipalHandle,
bool aForceBlack)
{
VideoChunk* chunk = AppendChunk(aDuration);
VideoFrame frame(aImage, aIntrinsicSize);
frame.SetForceBlack(aForceBlack);
frame.SetPrincipalHandle(aPrincipalHandle);
chunk->mFrame.TakeFrom(&frame);
}

View File

@ -40,6 +40,8 @@ public:
Image* GetImage() const { return mImage; }
void SetForceBlack(bool aForceBlack) { mForceBlack = aForceBlack; }
bool GetForceBlack() const { return mForceBlack; }
void SetPrincipalHandle(const PrincipalHandle& aPrincipalHandle) { mPrincipalHandle = aPrincipalHandle; }
PrincipalHandle GetPrincipalHandle() const { return mPrincipalHandle; }
const gfx::IntSize& GetIntrinsicSize() const { return mIntrinsicSize; }
void SetNull();
void TakeFrom(VideoFrame* aFrame);
@ -54,6 +56,9 @@ protected:
// The desired size to render the video frame at.
gfx::IntSize mIntrinsicSize;
bool mForceBlack;
// principalHandle for the image in this frame.
// This can be compared to an nsIPrincipal when back on main thread.
PrincipalHandle mPrincipalHandle;
};
struct VideoChunk {
@ -86,6 +91,8 @@ struct VideoChunk {
return 0;
}
PrincipalHandle GetPrincipalHandle() const { return mFrame.GetPrincipalHandle(); }
StreamTime mDuration;
VideoFrame mFrame;
mozilla::TimeStamp mTimeStamp;
@ -102,6 +109,7 @@ public:
void AppendFrame(already_AddRefed<Image>&& aImage,
StreamTime aDuration,
const IntSize& aIntrinsicSize,
const PrincipalHandle& aPrincipalHandle,
bool aForceBlack = false);
const VideoFrame* GetLastFrame(StreamTime* aStart = nullptr)
{

View File

@ -91,7 +91,8 @@ MediaEncoder::NotifyQueuedTrackChanges(MediaStreamGraph* aGraph,
// after Resume(), so it'll get added to one of the DirectListener frames
VideoSegment segment;
gfx::IntSize size(0,0);
segment.AppendFrame(nullptr, aQueuedMedia.GetDuration(), size);
segment.AppendFrame(nullptr, aQueuedMedia.GetDuration(), size,
PRINCIPAL_HANDLE_NONE);
mVideoEncoder->NotifyQueuedTrackChanges(aGraph, aID,
aTrackOffset, aTrackEvents,
segment);

View File

@ -263,6 +263,7 @@ VideoTrackEncoder::AppendVideoSegment(const VideoSegment& aSegment)
mRawSegment.AppendFrame(image.forget(),
mTotalFrameDuration,
chunk.mFrame.GetIntrinsicSize(),
PRINCIPAL_HANDLE_NONE,
chunk.mFrame.GetForceBlack());
mTotalFrameDuration = 0;
}

View File

@ -21,6 +21,7 @@ TEST(VideoSegment, TestAppendFrameForceBlack)
segment.AppendFrame(testImage.forget(),
mozilla::StreamTime(90000),
mozilla::gfx::IntSize(640, 480),
PRINCIPAL_HANDLE_NONE,
true);
VideoSegment::ChunkIterator iter(segment);
@ -38,7 +39,8 @@ TEST(VideoSegment, TestAppendFrameNotForceBlack)
VideoSegment segment;
segment.AppendFrame(testImage.forget(),
mozilla::StreamTime(90000),
mozilla::gfx::IntSize(640, 480));
mozilla::gfx::IntSize(640, 480),
PRINCIPAL_HANDLE_NONE);
VideoSegment::ChunkIterator iter(segment);
while (!iter.IsEnded()) {

View File

@ -271,7 +271,10 @@ TEST(VP8VideoTrackEncoder, FrameEncode)
for (nsTArray<RefPtr<Image>>::size_type i = 0; i < images.Length(); i++)
{
RefPtr<Image> image = images[i];
segment.AppendFrame(image.forget(), mozilla::StreamTime(90000), generator.GetSize());
segment.AppendFrame(image.forget(),
mozilla::StreamTime(90000),
generator.GetSize(),
PRINCIPAL_HANDLE_NONE);
}
// track change notification.

View File

@ -440,7 +440,7 @@ SendStreamAudio(DecodedStreamData* aStream, int64_t aStartTime,
for (uint32_t i = 0; i < audio->mChannels; ++i) {
channels.AppendElement(bufferData + i * audio->mFrames);
}
aOutput->AppendFrames(buffer.forget(), channels, audio->mFrames);
aOutput->AppendFrames(buffer.forget(), channels, audio->mFrames, PRINCIPAL_HANDLE_NONE /* Fixed in later patch */);
aStream->mAudioFramesWritten += audio->mFrames;
aStream->mNextAudioTime = audio->GetEndTime();
@ -499,7 +499,7 @@ WriteVideoToMediaStream(MediaStream* aStream,
StreamTime duration =
aStream->MicrosecondsToStreamTimeRoundDown(aEndMicroseconds) -
aStream->MicrosecondsToStreamTimeRoundDown(aStartMicroseconds);
aOutput->AppendFrame(image.forget(), duration, aIntrinsicSize);
aOutput->AppendFrame(image.forget(), duration, aIntrinsicSize, PRINCIPAL_HANDLE_NONE /* Fixed in later patch */);
}
static bool

View File

@ -121,7 +121,7 @@ public:
/* Start the device and add the track to the provided SourceMediaStream, with
* the provided TrackID. You may start appending data to the track
* immediately after. */
virtual nsresult Start(SourceMediaStream*, TrackID) = 0;
virtual nsresult Start(SourceMediaStream*, TrackID, const PrincipalHandle&) = 0;
/* tell the source if there are any direct listeners attached */
virtual void SetDirectListeners(bool) = 0;
@ -130,7 +130,8 @@ public:
virtual void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream *aSource,
TrackID aId,
StreamTime aDesiredTime) = 0;
StreamTime aDesiredTime,
const PrincipalHandle& aPrincipalHandle) = 0;
/* Stop the device and release the corresponding MediaStream */
virtual nsresult Stop(SourceMediaStream *aSource, TrackID aID) = 0;

View File

@ -19,14 +19,15 @@ extern LogModule* GetMediaManagerLog();
bool MediaEngineCameraVideoSource::AppendToTrack(SourceMediaStream* aSource,
layers::Image* aImage,
TrackID aID,
StreamTime delta)
StreamTime delta,
const PrincipalHandle& aPrincipalHandle)
{
MOZ_ASSERT(aSource);
VideoSegment segment;
RefPtr<layers::Image> image = aImage;
IntSize size(image ? mWidth : 0, image ? mHeight : 0);
segment.AppendFrame(image.forget(), delta, size);
segment.AppendFrame(image.forget(), delta, size, aPrincipalHandle);
// This is safe from any thread, and is safe if the track is Finished
// or Destroyed.

View File

@ -70,7 +70,8 @@ protected:
virtual bool AppendToTrack(SourceMediaStream* aSource,
layers::Image* aImage,
TrackID aID,
StreamTime delta);
StreamTime delta,
const PrincipalHandle& aPrincipalHandle);
uint32_t GetFitnessDistance(const webrtc::CaptureCapability& aCandidate,
const dom::MediaTrackConstraintSet &aConstraints,
bool aAdvanced,
@ -95,12 +96,13 @@ protected:
// mMonitor protects mImage access/changes, and transitions of mState
// from kStarted to kStopped (which are combined with EndTrack() and
// image changes).
// mMonitor also protects mSources[] access/changes.
// mSources[] is accessed from webrtc threads.
// mMonitor also protects mSources[] and mPrincipalHandles[] access/changes.
// mSources[] and mPrincipalHandles[] are accessed from webrtc threads.
// All the mMonitor accesses are from the child classes.
Monitor mMonitor; // Monitor for processing Camera frames.
nsTArray<RefPtr<SourceMediaStream>> mSources; // When this goes empty, we shut down HW
nsTArray<PrincipalHandle> mPrincipalHandles; // Directly mapped to mSources.
RefPtr<layers::Image> mImage;
RefPtr<layers::ImageContainer> mImageContainer;
int mWidth, mHeight; // protected with mMonitor on Gonk due to different threading

View File

@ -145,7 +145,8 @@ static void ReleaseFrame(layers::PlanarYCbCrData& aData)
}
nsresult
MediaEngineDefaultVideoSource::Start(SourceMediaStream* aStream, TrackID aID)
MediaEngineDefaultVideoSource::Start(SourceMediaStream* aStream, TrackID aID,
const PrincipalHandle& aPrincipalHandle)
{
if (mState != kAllocated) {
return NS_ERROR_FAILURE;
@ -272,7 +273,8 @@ void
MediaEngineDefaultVideoSource::NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream *aSource,
TrackID aID,
StreamTime aDesiredTime)
StreamTime aDesiredTime,
const PrincipalHandle& aPrincipalHandle)
{
// AddTrack takes ownership of segment
VideoSegment segment;
@ -288,7 +290,7 @@ MediaEngineDefaultVideoSource::NotifyPull(MediaStreamGraph* aGraph,
if (delta > 0) {
// nullptr images are allowed
IntSize size(image ? mOpts.mWidth : 0, image ? mOpts.mHeight : 0);
segment.AppendFrame(image.forget(), delta, size);
segment.AppendFrame(image.forget(), delta, size, aPrincipalHandle);
// This can fail if either a) we haven't added the track yet, or b)
// we've removed or finished the track.
aSource->AppendToTrack(aID, &segment);
@ -358,6 +360,7 @@ NS_IMPL_ISUPPORTS(MediaEngineDefaultAudioSource, nsITimerCallback)
MediaEngineDefaultAudioSource::MediaEngineDefaultAudioSource()
: MediaEngineAudioSource(kReleased)
, mPrincipalHandle(PRINCIPAL_HANDLE_NONE)
, mTimer(nullptr)
{
}
@ -422,7 +425,8 @@ MediaEngineDefaultAudioSource::Deallocate()
}
nsresult
MediaEngineDefaultAudioSource::Start(SourceMediaStream* aStream, TrackID aID)
MediaEngineDefaultAudioSource::Start(SourceMediaStream* aStream, TrackID aID,
const PrincipalHandle& aPrincipalHandle)
{
if (mState != kAllocated) {
return NS_ERROR_FAILURE;
@ -456,6 +460,9 @@ MediaEngineDefaultAudioSource::Start(SourceMediaStream* aStream, TrackID aID)
// Remember TrackID so we can finish later
mTrackID = aID;
// Remember PrincipalHandle since we don't append in NotifyPull.
mPrincipalHandle = aPrincipalHandle;
mLastNotify = TimeStamp::Now();
// 1 Audio frame per 10ms
@ -514,7 +521,7 @@ MediaEngineDefaultAudioSource::AppendToSegment(AudioSegment& aSegment,
mSineGenerator->generate(dest, aSamples);
AutoTArray<const int16_t*,1> channels;
channels.AppendElement(dest);
aSegment.AppendFrames(buffer.forget(), channels, aSamples);
aSegment.AppendFrames(buffer.forget(), channels, aSamples, mPrincipalHandle);
}
NS_IMETHODIMP

View File

@ -48,7 +48,7 @@ public:
const nsString& aDeviceId,
const nsACString& aOrigin) override;
nsresult Deallocate() override;
nsresult Start(SourceMediaStream*, TrackID) override;
nsresult Start(SourceMediaStream*, TrackID, const PrincipalHandle&) override;
nsresult Stop(SourceMediaStream*, TrackID) override;
nsresult Restart(const dom::MediaTrackConstraints& aConstraints,
const MediaEnginePrefs &aPrefs,
@ -57,7 +57,8 @@ public:
void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream *aSource,
TrackID aId,
StreamTime aDesiredTime) override;
StreamTime aDesiredTime,
const PrincipalHandle& aPrincipalHandle) override;
uint32_t GetBestFitnessDistance(
const nsTArray<const dom::MediaTrackConstraintSet*>& aConstraintSets,
const nsString& aDeviceId) override;
@ -118,17 +119,19 @@ public:
const nsString& aDeviceId,
const nsACString& aOrigin) override;
nsresult Deallocate() override;
nsresult Start(SourceMediaStream*, TrackID) override;
nsresult Start(SourceMediaStream*, TrackID, const PrincipalHandle&) override;
nsresult Stop(SourceMediaStream*, TrackID) override;
nsresult Restart(const dom::MediaTrackConstraints& aConstraints,
const MediaEnginePrefs &aPrefs,
const nsString& aDeviceId) override;
void SetDirectListeners(bool aHasDirectListeners) override {};
void AppendToSegment(AudioSegment& aSegment, TrackTicks aSamples);
void AppendToSegment(AudioSegment& aSegment,
TrackTicks aSamples);
void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream *aSource,
TrackID aId,
StreamTime aDesiredTime) override
StreamTime aDesiredTime,
const PrincipalHandle& aPrincipalHandle) override
{
#ifdef DEBUG
StreamBuffer::Track* data = aSource->FindTrack(aId);
@ -170,6 +173,7 @@ protected:
~MediaEngineDefaultAudioSource();
TrackID mTrackID;
PrincipalHandle mPrincipalHandle;
nsCOMPtr<nsITimer> mTimer;
TimeStamp mLastNotify;

View File

@ -67,7 +67,8 @@ void
MediaEngineGonkVideoSource::NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream* aSource,
TrackID aID,
StreamTime aDesiredTime)
StreamTime aDesiredTime,
const PrincipalID& aPrincipalHandle)
{
VideoSegment segment;
@ -95,10 +96,10 @@ MediaEngineGonkVideoSource::NotifyPull(MediaStreamGraph* aGraph,
if (delta > 0) {
// nullptr images are allowed
IntSize size(image ? mWidth : 0, image ? mHeight : 0);
segment.AppendFrame(image.forget(), delta, size);
segment.AppendFrame(image.forget(), delta, size, aPrincipalHandle);
// This can fail if either a) we haven't added the track yet, or b)
// we've removed or finished the track.
aSource->AppendToTrack(aID, &(segment));
aSource->AppendToTrack(aID, &(segment), aPrincipalHandle);
}
}
@ -883,7 +884,7 @@ MediaEngineGonkVideoSource::OnNewMediaBufferFrame(MediaBuffer* aBuffer)
// Unfortunately, clock in gonk camera looks like is a different one
// comparing to MSG. As result, it causes time inaccurate. (frames be
// queued in MSG longer and longer as time going by in device like Frame)
AppendToTrack(mSources[i], mImage, mTrackID, 1);
AppendToTrack(mSources[i], mImage, mTrackID, 1, mPrincipalHandles[i]);
}
}
if (mImage->AsGrallocImage()) {

View File

@ -77,6 +77,7 @@ MediaEngineRemoteVideoSource::Shutdown()
MonitorAutoLock lock(mMonitor);
empty = mSources.IsEmpty();
if (empty) {
MOZ_ASSERT(mPrincipalHandles.IsEmpty());
break;
}
source = mSources[0];
@ -130,6 +131,7 @@ MediaEngineRemoteVideoSource::Allocate(const dom::MediaTrackConstraints& aConstr
} else if (MOZ_LOG_TEST(GetMediaManagerLog(), mozilla::LogLevel::Debug)) {
MonitorAutoLock lock(mMonitor);
if (mSources.IsEmpty()) {
MOZ_ASSERT(mPrincipalHandles.IsEmpty());
LOG(("Video device %d reallocated", mCaptureIndex));
} else {
LOG(("Video device %d allocated shared", mCaptureIndex));
@ -166,7 +168,8 @@ MediaEngineRemoteVideoSource::Deallocate()
}
nsresult
MediaEngineRemoteVideoSource::Start(SourceMediaStream* aStream, TrackID aID)
MediaEngineRemoteVideoSource::Start(SourceMediaStream* aStream, TrackID aID,
const PrincipalHandle& aPrincipalHandle)
{
LOG((__PRETTY_FUNCTION__));
AssertIsOnOwningThread();
@ -178,6 +181,8 @@ MediaEngineRemoteVideoSource::Start(SourceMediaStream* aStream, TrackID aID)
{
MonitorAutoLock lock(mMonitor);
mSources.AppendElement(aStream);
mPrincipalHandles.AppendElement(aPrincipalHandle);
MOZ_ASSERT(mSources.Length() == mPrincipalHandles.Length());
}
aStream->AddTrack(aID, 0, new VideoSegment(), SourceMediaStream::ADDTRACK_QUEUED);
@ -209,11 +214,16 @@ MediaEngineRemoteVideoSource::Stop(mozilla::SourceMediaStream* aSource,
{
MonitorAutoLock lock(mMonitor);
if (!mSources.RemoveElement(aSource)) {
size_t i = mSources.IndexOf(aSource);
if (i == mSources.NoIndex) {
// Already stopped - this is allowed
return NS_OK;
}
MOZ_ASSERT(mSources.Length() == mPrincipalHandles.Length());
mSources.RemoveElementAt(i);
mPrincipalHandles.RemoveElementAt(i);
aSource->EndTrack(aID);
if (!mSources.IsEmpty()) {
@ -268,7 +278,8 @@ MediaEngineRemoteVideoSource::Restart(const dom::MediaTrackConstraints& aConstra
void
MediaEngineRemoteVideoSource::NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream* aSource,
TrackID aID, StreamTime aDesiredTime)
TrackID aID, StreamTime aDesiredTime,
const PrincipalHandle& aPrincipalHandle)
{
VideoSegment segment;
@ -276,8 +287,13 @@ MediaEngineRemoteVideoSource::NotifyPull(MediaStreamGraph* aGraph,
StreamTime delta = aDesiredTime - aSource->GetEndOfAppendedData(aID);
if (delta > 0) {
size_t i = mSources.IndexOf(aSource);
if (i == mSources.NoIndex) {
NS_ERROR("aSource not in mSources");
return;
}
// nullptr images are allowed
AppendToTrack(aSource, mImage, aID, delta);
AppendToTrack(aSource, mImage, aID, delta, aPrincipalHandle);
}
}
@ -361,7 +377,8 @@ MediaEngineRemoteVideoSource::DeliverFrame(unsigned char* buffer,
uint32_t len = mSources.Length();
for (uint32_t i = 0; i < len; i++) {
if (mSources[i]) {
AppendToTrack(mSources[i], mImage, mTrackID, 1); // shortest possible duration
// shortest possible duration
AppendToTrack(mSources[i], mImage, mTrackID, 1, mPrincipalHandles[i]);
}
}

View File

@ -76,7 +76,7 @@ public:
const nsString& aDeviceId,
const nsACString& aOrigin) override;
nsresult Deallocate() override;;
nsresult Start(SourceMediaStream*, TrackID) override;
nsresult Start(SourceMediaStream*, TrackID, const PrincipalHandle&) override;
nsresult Stop(SourceMediaStream*, TrackID) override;
nsresult Restart(const dom::MediaTrackConstraints& aConstraints,
const MediaEnginePrefs &aPrefs,
@ -84,7 +84,8 @@ public:
void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream* aSource,
TrackID aId,
StreamTime aDesiredTime) override;
StreamTime aDesiredTime,
const PrincipalHandle& aPrincipalHandle) override;
dom::MediaSourceEnum GetMediaSource() const override {
return mMediaSource;
}

View File

@ -184,7 +184,8 @@ MediaEngineTabVideoSource::Deallocate()
}
nsresult
MediaEngineTabVideoSource::Start(SourceMediaStream* aStream, TrackID aID)
MediaEngineTabVideoSource::Start(SourceMediaStream* aStream, TrackID aID,
const PrincipalHandle& aPrincipalHandle)
{
nsCOMPtr<nsIRunnable> runnable;
if (!mWindow)
@ -200,7 +201,8 @@ MediaEngineTabVideoSource::Start(SourceMediaStream* aStream, TrackID aID)
void
MediaEngineTabVideoSource::NotifyPull(MediaStreamGraph*,
SourceMediaStream* aSource,
TrackID aID, StreamTime aDesiredTime)
TrackID aID, StreamTime aDesiredTime,
const PrincipalHandle& aPrincipalHandle)
{
VideoSegment segment;
MonitorAutoLock mon(mMonitor);
@ -211,7 +213,8 @@ MediaEngineTabVideoSource::NotifyPull(MediaStreamGraph*,
if (delta > 0) {
// nullptr images are allowed
gfx::IntSize size = image ? image->GetSize() : IntSize(0, 0);
segment.AppendFrame(image.forget().downcast<layers::Image>(), delta, size);
segment.AppendFrame(image.forget().downcast<layers::Image>(), delta, size,
aPrincipalHandle);
// This can fail if either a) we haven't added the track yet, or b)
// we've removed or finished the track.
aSource->AppendToTrack(aID, &(segment));

View File

@ -27,9 +27,9 @@ class MediaEngineTabVideoSource : public MediaEngineVideoSource, nsIDOMEventList
const nsString& aDeviceId,
const nsACString& aOrigin) override;
nsresult Deallocate() override;
nsresult Start(mozilla::SourceMediaStream*, mozilla::TrackID) override;
nsresult Start(mozilla::SourceMediaStream*, mozilla::TrackID, const mozilla::PrincipalHandle&) override;
void SetDirectListeners(bool aHasDirectListeners) override {};
void NotifyPull(mozilla::MediaStreamGraph*, mozilla::SourceMediaStream*, mozilla::TrackID, mozilla::StreamTime) override;
void NotifyPull(mozilla::MediaStreamGraph*, mozilla::SourceMediaStream*, mozilla::TrackID, mozilla::StreamTime, const mozilla::PrincipalHandle& aPrincipalHandle) override;
nsresult Stop(mozilla::SourceMediaStream*, mozilla::TrackID) override;
nsresult Restart(const dom::MediaTrackConstraints& aConstraints,
const mozilla::MediaEnginePrefs& aPrefs,

View File

@ -89,7 +89,9 @@ public:
{
// Nothing to do here, everything is managed in MediaManager.cpp
}
nsresult Start(SourceMediaStream* aMediaStream, TrackID aId) override;
nsresult Start(SourceMediaStream* aMediaStream,
TrackID aId,
const PrincipalHandle& aPrincipalHandle) override;
nsresult Stop(SourceMediaStream* aMediaStream, TrackID aId) override;
nsresult Restart(const dom::MediaTrackConstraints& aConstraints,
const MediaEnginePrefs &aPrefs,
@ -104,8 +106,11 @@ public:
const AudioDataValue* aBuffer, size_t aFrames,
TrackRate aRate, uint32_t aChannels) override
{}
void NotifyPull(MediaStreamGraph* aGraph, SourceMediaStream* aSource,
TrackID aID, StreamTime aDesiredTime) override
void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream* aSource,
TrackID aID,
StreamTime aDesiredTime,
const PrincipalHandle& aPrincipalHandle) override
{}
dom::MediaSourceEnum GetMediaSource() const override
{
@ -435,7 +440,9 @@ public:
const nsString& aDeviceId,
const nsACString& aOrigin) override;
nsresult Deallocate() override;
nsresult Start(SourceMediaStream* aStream, TrackID aID) override;
nsresult Start(SourceMediaStream* aStream,
TrackID aID,
const PrincipalHandle& aPrincipalHandle) override;
nsresult Stop(SourceMediaStream* aSource, TrackID aID) override;
nsresult Restart(const dom::MediaTrackConstraints& aConstraints,
const MediaEnginePrefs &aPrefs,
@ -445,7 +452,8 @@ public:
void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream* aSource,
TrackID aId,
StreamTime aDesiredTime) override;
StreamTime aDesiredTime,
const PrincipalHandle& aPrincipalHandle) override;
// AudioDataListenerInterface methods
void NotifyOutputData(MediaStreamGraph* aGraph,
@ -502,11 +510,13 @@ private:
nsAutoPtr<AudioPacketizer<AudioDataValue, int16_t>> mPacketizer;
ScopedCustomReleasePtr<webrtc::VoEExternalMedia> mVoERenderListener;
// mMonitor protects mSources[] access/changes, and transitions of mState
// from kStarted to kStopped (which are combined with EndTrack()).
// mSources[] is accessed from webrtc threads.
// mMonitor protects mSources[] and mPrinicpalIds[] access/changes, and
// transitions of mState from kStarted to kStopped (which are combined with
// EndTrack()). mSources[] and mPrincipalHandles[] are accessed from webrtc
// threads.
Monitor mMonitor;
nsTArray<RefPtr<SourceMediaStream>> mSources;
nsTArray<PrincipalHandle> mPrincipalHandles; // Maps to mSources.
nsCOMPtr<nsIThread> mThread;
int mCapIndex;
int mChannel;

View File

@ -338,7 +338,8 @@ MediaEngineWebRTCMicrophoneSource::Deallocate()
nsresult
MediaEngineWebRTCMicrophoneSource::Start(SourceMediaStream *aStream,
TrackID aID)
TrackID aID,
const PrincipalHandle& aPrincipalHandle)
{
AssertIsOnOwningThread();
if (!mInitDone || !aStream) {
@ -348,6 +349,8 @@ MediaEngineWebRTCMicrophoneSource::Start(SourceMediaStream *aStream,
{
MonitorAutoLock lock(mMonitor);
mSources.AppendElement(aStream);
mPrincipalHandles.AppendElement(aPrincipalHandle);
MOZ_ASSERT(mSources.Length() == mPrincipalHandles.Length());
}
AudioSegment* segment = new AudioSegment();
@ -401,10 +404,14 @@ MediaEngineWebRTCMicrophoneSource::Stop(SourceMediaStream *aSource, TrackID aID)
{
MonitorAutoLock lock(mMonitor);
if (!mSources.RemoveElement(aSource)) {
size_t sourceIndex = mSources.IndexOf(aSource);
if (sourceIndex == mSources.NoIndex) {
// Already stopped - this is allowed
return NS_OK;
}
mSources.RemoveElementAt(sourceIndex);
mPrincipalHandles.RemoveElementAt(sourceIndex);
MOZ_ASSERT(mSources.Length() == mPrincipalHandles.Length());
aSource->EndTrack(aID);
@ -444,7 +451,8 @@ void
MediaEngineWebRTCMicrophoneSource::NotifyPull(MediaStreamGraph *aGraph,
SourceMediaStream *aSource,
TrackID aID,
StreamTime aDesiredTime)
StreamTime aDesiredTime,
const PrincipalHandle& aPrincipalHandle)
{
// Ignore - we push audio data
LOG_FRAMES(("NotifyPull, desired = %ld", (int64_t) aDesiredTime));
@ -669,7 +677,8 @@ MediaEngineWebRTCMicrophoneSource::Process(int channel,
nsAutoPtr<AudioSegment> segment(new AudioSegment());
AutoTArray<const sample*,1> channels;
channels.AppendElement(dest);
segment->AppendFrames(buffer.forget(), channels, length);
segment->AppendFrames(buffer.forget(), channels, length,
mPrincipalHandles[i]);
TimeStamp insertTime;
segment->GetStartTime(insertTime);
@ -722,7 +731,8 @@ MediaEngineWebRTCAudioCaptureSource::GetUUID(nsACString &aUUID)
nsresult
MediaEngineWebRTCAudioCaptureSource::Start(SourceMediaStream *aMediaStream,
TrackID aId)
TrackID aId,
const PrincipalHandle& aPrincipalHandle)
{
AssertIsOnOwningThread();
aMediaStream->AddTrack(aId, 0, new AudioSegment());

View File

@ -160,7 +160,7 @@ bool
SpeechRecognition::IsAuthorized(JSContext* aCx, JSObject* aGlobal)
{
nsCOMPtr<nsIPrincipal> principal = nsContentUtils::ObjectPrincipal(aGlobal);
nsresult rv;
nsCOMPtr<nsIPermissionManager> mgr = do_GetService(NS_PERMISSIONMANAGER_CONTRACTID, &rv);
if (NS_WARN_IF(NS_FAILED(rv))) {
@ -934,7 +934,8 @@ SpeechRecognition::CreateAudioSegment(nsTArray<RefPtr<SharedBuffer>>& aChunks)
AutoTArray<const int16_t*, 1> channels;
channels.AppendElement(chunkData);
segment->AppendFrames(buffer.forget(), channels, mAudioSamplesPerChunk);
segment->AppendFrames(buffer.forget(), channels, mAudioSamplesPerChunk,
PRINCIPAL_HANDLE_NONE);
}
return segment;

View File

@ -332,7 +332,8 @@ nsSpeechTask::SendAudioImpl(RefPtr<mozilla::SharedBuffer>& aSamples, uint32_t aD
AudioSegment segment;
AutoTArray<const int16_t*, 1> channelData;
channelData.AppendElement(static_cast<int16_t*>(aSamples->Data()));
segment.AppendFrames(aSamples.forget(), channelData, aDataLen);
segment.AppendFrames(aSamples.forget(), channelData, aDataLen,
PRINCIPAL_HANDLE_NONE);
mStream->AppendToTrack(1, &segment);
mStream->AdvanceKnownTracksTime(STREAM_TIME_MAX);
}

View File

@ -1480,7 +1480,8 @@ NotifyPull(MediaStreamGraph* graph, StreamTime desired_time) {
outputChannels.AppendElements(channels);
segment.AppendFrames(samples.forget(), outputChannels, frames);
segment.AppendFrames(samples.forget(), outputChannels, frames,
PRINCIPAL_HANDLE_NONE /* Fixed in later patch */);
// Handle track not actually added yet or removed/finished
if (source_->AppendToTrack(track_id_, &segment)) {
@ -1609,7 +1610,8 @@ NotifyPull(MediaStreamGraph* graph, StreamTime desired_time) {
// delta and thus messes up handling of the graph
if (delta > 0) {
VideoSegment segment;
segment.AppendFrame(image.forget(), delta, IntSize(width_, height_));
segment.AppendFrame(image.forget(), delta, IntSize(width_, height_),
PRINCIPAL_HANDLE_NONE /* Fixed in later patch */);
// Handle track not actually added yet or removed/finished
if (source_->AppendToTrack(track_id_, &segment)) {
played_ticks_ = desired_time;

View File

@ -118,7 +118,10 @@ void Fake_AudioStreamSource::Periodic() {
mozilla::AudioSegment segment;
AutoTArray<const int16_t *,1> channels;
channels.AppendElement(data);
segment.AppendFrames(samples.forget(), channels, AUDIO_BUFFER_SIZE);
segment.AppendFrames(samples.forget(),
channels,
AUDIO_BUFFER_SIZE,
mozilla::PRINCIPAL_HANDLE_NONE);
for(std::set<RefPtr<Fake_MediaStreamListener>>::iterator it = mListeners.begin();
it != mListeners.end(); ++it) {