Merge autoland to m-c. a=merge

This commit is contained in:
Ryan VanderMeulen 2016-09-01 12:11:51 -04:00
commit 37b200a0c5
150 changed files with 972 additions and 983 deletions

View File

@ -695,8 +695,11 @@ nsContextMenu.prototype = {
this.target.getRequest(Ci.nsIImageLoadingContent.CURRENT_REQUEST);
if (request && (request.imageStatus & request.STATUS_SIZE_AVAILABLE))
this.onLoadedImage = true;
if (request && (request.imageStatus & request.STATUS_LOAD_COMPLETE))
if (request &&
(request.imageStatus & request.STATUS_LOAD_COMPLETE) &&
!(request.imageStatus & request.STATUS_ERROR)) {
this.onCompletedImage = true;
}
this.mediaURL = this.target.currentURI.spec;

View File

@ -221,8 +221,8 @@ fi
])
dnl Configure an Android SDK.
dnl Arg 1: target SDK version, like 22.
dnl Arg 2: build tools version, like 22.0.1.
dnl Arg 1: target SDK version, like 23.
dnl Arg 2: list of build-tools versions, like "23.0.3 23.0.1".
AC_DEFUN([MOZ_ANDROID_SDK],
[
@ -254,12 +254,21 @@ case "$target" in
fi
AC_MSG_RESULT([$android_sdk])
android_build_tools="$android_sdk_root"/build-tools/$2
AC_MSG_CHECKING([for Android build-tools version $2])
if test -d "$android_build_tools" -a -f "$android_build_tools/aapt"; then
AC_MSG_RESULT([$android_build_tools])
else
AC_MSG_ERROR([You must install the Android build-tools version $2. Try |mach bootstrap|. (Looked for $android_build_tools)])
AC_MSG_CHECKING([for Android build-tools])
android_build_tools_base="$android_sdk_root"/build-tools
android_build_tools_version=""
versions=($2)
for version in $versions; do
android_build_tools="$android_build_tools_base"/$version
if test -d "$android_build_tools" -a -f "$android_build_tools/aapt"; then
android_build_tools_version=$version
AC_MSG_RESULT([$android_build_tools])
break
fi
done
if test "$android_build_tools_version" == ""; then
version=$(echo $versions | cut -d" " -f1)
AC_MSG_ERROR([You must install the Android build-tools version $version. Try |mach bootstrap|. (Looked for "$android_build_tools_base"/$version)])
fi
MOZ_PATH_PROG(ZIPALIGN, zipalign, :, [$android_build_tools])
@ -309,7 +318,7 @@ case "$target" in
ANDROID_SDK="${android_sdk}"
ANDROID_SDK_ROOT="${android_sdk_root}"
ANDROID_TOOLS="${android_tools}"
ANDROID_BUILD_TOOLS_VERSION="$2"
ANDROID_BUILD_TOOLS_VERSION="$android_build_tools_version"
AC_DEFINE_UNQUOTED(ANDROID_TARGET_SDK,$ANDROID_TARGET_SDK)
AC_SUBST(ANDROID_TARGET_SDK)
AC_SUBST(ANDROID_SDK_ROOT)

View File

@ -176,7 +176,8 @@ module.exports = createClass({
if (functionDisplayName) {
elements.push(
dom.span({ className: "frame-link-function-display-name" },
functionDisplayName)
functionDisplayName),
" "
);
}
}
@ -236,7 +237,7 @@ module.exports = createClass({
elements.push(sourceEl);
if (showHost && host) {
elements.push(dom.span({ className: "frame-link-host" }, host));
elements.push(" ", dom.span({ className: "frame-link-host" }, host));
}
return dom.span(attributes, ...elements);

View File

@ -42,12 +42,12 @@ const StackTrace = createClass({
let frames = [];
stacktrace.forEach(s => {
if (s.asyncCause) {
frames.push(AsyncFrame({
frames.push("\t", AsyncFrame({
asyncCause: s.asyncCause
}));
}), "\n");
}
frames.push(Frame({
frames.push("\t", Frame({
frame: {
functionDisplayName: s.functionName,
source: s.filename.split(" -> ").pop(),
@ -58,7 +58,7 @@ const StackTrace = createClass({
showAnonymousFunctionName: true,
showFullSourceUrl: true,
onClick: onViewSourceInDebugger
}));
}), "\n");
});
return dom.div({ className: "stack-trace" }, frames);

View File

@ -45,7 +45,9 @@ window.onload = function() {
let traceEl = trace.getDOMNode();
ok(traceEl, "Rendered StackTrace has an element");
let frameEls = traceEl.childNodes;
// Get the child nodes and filter out the text-only whitespace ones
let frameEls = Array.from(traceEl.childNodes)
.filter(n => n.className.includes("frame"));
ok(frameEls, "Rendered StackTrace has frames");
is(frameEls.length, 3, "StackTrace has 3 frames");
@ -76,6 +78,14 @@ window.onload = function() {
shouldLink: true,
tooltip: "View source in Debugger → http://myfile.com/loadee.js:10",
});
// Check the tabs and newlines in the stack trace textContent
let traceText = traceEl.textContent;
let traceLines = traceText.split("\n");
ok(traceLines.length > 0, "There are newlines in the stack trace text");
is(traceLines.pop(), "", "There is a newline at the end of the stack trace text");
is(traceLines.length, 3, "The stack trace text has 3 lines");
ok(traceLines.every(l => l[0] == "\t"), "Every stack trace line starts with tab");
});
}
</script>

View File

@ -935,8 +935,6 @@ Messages.Simple.prototype = extend(Messages.BaseMessage.prototype, {
this.element.appendChild(body);
this.element.appendChild(this.document.createTextNode("\n"));
this.element.clipboardText = this.element.textContent;
if (this.private) {
@ -993,12 +991,16 @@ Messages.Simple.prototype = extend(Messages.BaseMessage.prototype, {
let location = this._renderLocation();
if (repeatNode) {
bodyFlex.appendChild(this.document.createTextNode(" "));
bodyFlex.appendChild(repeatNode);
}
if (location) {
bodyFlex.appendChild(this.document.createTextNode(" "));
bodyFlex.appendChild(location);
}
bodyFlex.appendChild(this.document.createTextNode("\n"));
if (this.stack) {
this._attachment = new Widgets.Stacktrace(this, this.stack).render().element;
}

View File

@ -3,67 +3,95 @@
/* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/publicdomain/zero/1.0/ */
/* globals goDoCommand */
"use strict";
// Test copying of the entire console message when right-clicked
// with no other text selected. See Bug 1100562.
function test() {
add_task(function* () {
let hud;
let outputNode;
let contextMenu;
const TEST_URI = "http://example.com/browser/devtools/client/webconsole/" +
"test/test-console.html";
const TEST_URI = "http://example.com/browser/devtools/client/webconsole/test/test-console.html";
Task.spawn(runner).then(finishTest);
const { tab, browser } = yield loadTab(TEST_URI);
hud = yield openConsole(tab);
outputNode = hud.outputNode;
contextMenu = hud.iframeWindow.document.getElementById("output-contextmenu");
function* runner() {
const {tab} = yield loadTab(TEST_URI);
hud = yield openConsole(tab);
outputNode = hud.outputNode;
contextMenu = hud.iframeWindow.document.getElementById("output-contextmenu");
registerCleanupFunction(() => {
hud = outputNode = contextMenu = null;
});
registerCleanupFunction(() => {
hud = outputNode = contextMenu = null;
});
hud.jsterm.clearOutput();
hud.jsterm.clearOutput();
content.console.log("bug 1100562");
yield ContentTask.spawn(browser, {}, function* () {
let button = content.document.getElementById("testTrace");
button.click();
});
let [results] = yield waitForMessages({
webconsole: hud,
messages: [{
let results = yield waitForMessages({
webconsole: hud,
messages: [
{
text: "bug 1100562",
category: CATEGORY_WEBDEV,
severity: SEVERITY_LOG,
}]
});
lines: 1,
},
{
name: "console.trace output",
consoleTrace: true,
lines: 3,
},
]
});
outputNode.focus();
let message = [...results.matched][0];
outputNode.focus();
yield waitForContextMenu(contextMenu, message, copyFromPopup,
testContextMenuCopy);
for (let result of results) {
let message = [...result.matched][0];
function copyFromPopup() {
yield waitForContextMenu(contextMenu, message, () => {
let copyItem = contextMenu.querySelector("#cMenu_copy");
copyItem.doCommand();
let controller = top.document.commandDispatcher
.getControllerForCommand("cmd_copy");
is(controller.isCommandEnabled("cmd_copy"), true, "cmd_copy is enabled");
}
});
function testContextMenuCopy() {
waitForClipboard((str) => {
return message.textContent.trim() == str.trim();
}, () => {
goDoCommand("cmd_copy");
}, () => {}, () => {}
);
}
let clipboardText;
yield closeConsole(tab);
yield waitForClipboardPromise(
() => goDoCommand("cmd_copy"),
(str) => {
clipboardText = str;
return message.textContent == clipboardText;
}
);
ok(clipboardText, "Clipboard text was found and saved");
let lines = clipboardText.split("\n");
ok(lines.length > 0, "There is at least one newline in the message");
is(lines.pop(), "", "There is a newline at the end");
is(lines.length, result.lines, `There are ${result.lines} lines in the message`);
// Test the first line for "timestamp message repeat file:line"
let firstLine = lines.shift();
ok(/^[\d:.]+ .+ \d+ .+:\d+$/.test(firstLine),
"The message's first line has the right format");
// Test the remaining lines (stack trace) for "TABfunctionName sourceURL:line:col"
for (let line of lines) {
ok(/^\t.+ .+:\d+:\d+$/.test(line), "The stack trace line has the right format");
}
}
}
yield closeConsole(tab);
yield finishTest();
});

View File

@ -13,6 +13,12 @@
console.log(str);
}
}
function testTrace() {
console.log("bug 1100562");
console.trace();
}
console.info("INLINE SCRIPT:");
test();
console.warn("I'm warning you, he will eat up all yr bacon.");
@ -22,6 +28,7 @@
<body>
<h1 id="header">Heads Up Display Demo</h1>
<button onclick="test();">Log stuff about Dolske</button>
<button id="testTrace" onclick="testTrace();">Log stuff with stacktrace</button>
<div id="myDiv"></div>
</body>
</html>

View File

@ -143,10 +143,14 @@ AnimationEffectReadOnly::GetComputedTimingAt(
StickyTimeDuration activeTime;
StickyTimeDuration beforeActiveBoundary =
std::min(StickyTimeDuration(aTiming.mDelay), result.mEndTime);
std::max(std::min(StickyTimeDuration(aTiming.mDelay), result.mEndTime),
zeroDuration);
StickyTimeDuration activeAfterBoundary =
std::min(StickyTimeDuration(aTiming.mDelay + result.mActiveDuration),
result.mEndTime);
std::max(std::min(StickyTimeDuration(aTiming.mDelay +
result.mActiveDuration),
result.mEndTime),
zeroDuration);
if (localTime > activeAfterBoundary ||
(aPlaybackRate >= 0 && localTime == activeAfterBoundary)) {
@ -155,9 +159,10 @@ AnimationEffectReadOnly::GetComputedTimingAt(
// The animation isn't active or filling at this time.
return result;
}
activeTime = std::max(std::min(result.mActiveDuration,
result.mActiveDuration + aTiming.mEndDelay),
zeroDuration);
activeTime =
std::max(std::min(StickyTimeDuration(localTime - aTiming.mDelay),
result.mActiveDuration),
zeroDuration);
} else if (localTime < beforeActiveBoundary ||
(aPlaybackRate < 0 && localTime == beforeActiveBoundary)) {
result.mPhase = ComputedTiming::AnimationPhase::Before;
@ -165,7 +170,8 @@ AnimationEffectReadOnly::GetComputedTimingAt(
// The animation isn't active or filling at this time.
return result;
}
// activeTime is zero
activeTime = std::max(StickyTimeDuration(localTime - aTiming.mDelay),
zeroDuration);
} else {
MOZ_ASSERT(result.mActiveDuration != zeroDuration,
"How can we be in the middle of a zero-duration interval?");

View File

@ -114,7 +114,8 @@ struct TimingParams
StickyTimeDuration EndTime() const
{
return mDelay + ActiveDuration() + mEndDelay;
return std::max(mDelay + ActiveDuration() + mEndDelay,
StickyTimeDuration());
}
bool operator==(const TimingParams& aOther) const;

View File

@ -192,7 +192,7 @@ test(function(t) {
// undefined value.
var div = addDiv(t, {style: 'animation: moveAnimation 10s -100s forwards'});
var effect = div.getAnimations()[0].effect;
assert_equals(effect.getComputedTiming().endTime, -90 * MS_PER_SEC,
assert_equals(effect.getComputedTiming().endTime, 0,
'Initial value of endTime');
}, 'endTime of an animation that finishes before its startTime');

View File

@ -279,7 +279,7 @@ public:
/**
* This listener observes the first video frame to arrive with a non-empty size,
* and calls HTMLMediaElement::ReceivedMediaStreamInitialSize() with that size.
* and calls HTMLMediaElement::UpdateInitialMediaSize() with that size.
*/
class HTMLMediaElement::StreamSizeListener : public DirectMediaStreamTrackListener {
public:
@ -287,13 +287,17 @@ public:
mElement(aElement),
mInitialSizeFound(false)
{}
void Forget() { mElement = nullptr; }
void ReceivedSize(gfx::IntSize aSize)
{
MOZ_ASSERT(NS_IsMainThread());
if (!mElement) {
return;
}
RefPtr<HTMLMediaElement> deathGrip = mElement;
mElement->UpdateInitialMediaSize(aSize);
}
@ -302,18 +306,27 @@ public:
StreamTime aTrackOffset,
const MediaSegment& aMedia) override
{
if (mInitialSizeFound || aMedia.GetType() != MediaSegment::VIDEO) {
if (mInitialSizeFound) {
return;
}
if (aMedia.GetType() != MediaSegment::VIDEO) {
MOZ_ASSERT(false, "Should only lock on to a video track");
return;
}
const VideoSegment& video = static_cast<const VideoSegment&>(aMedia);
for (VideoSegment::ConstChunkIterator c(video); !c.IsEnded(); c.Next()) {
if (c->mFrame.GetIntrinsicSize() != gfx::IntSize(0,0)) {
mInitialSizeFound = true;
nsCOMPtr<nsIRunnable> event =
NewRunnableMethod<gfx::IntSize>(
this, &StreamSizeListener::ReceivedSize,
c->mFrame.GetIntrinsicSize());
aGraph->DispatchToMainThreadAfterStreamStateUpdate(event.forget());
NewRunnableMethod<gfx::IntSize>(this, &StreamSizeListener::ReceivedSize,
c->mFrame.GetIntrinsicSize());
// This is fine to dispatch straight to main thread (instead of via
// ...AfterStreamUpdate()) since it reflects state of the element,
// not the stream. Events reflecting stream or track state should be
// dispatched so their order is preserved.
NS_DispatchToMainThread(event.forget());
return;
}
}
@ -323,7 +336,9 @@ private:
// These fields may only be accessed on the main thread
HTMLMediaElement* mElement;
// These fields may only be accessed on the MSG thread
// These fields may only be accessed on the MSG's appending thread.
// (this is a direct listener so we get called by whoever is producing
// this track's data)
bool mInitialSizeFound;
};
@ -2575,16 +2590,21 @@ HTMLMediaElement::CaptureStreamInternal(bool aFinishWhenEnded,
mAudioCaptured = true;
}
if (mDecoder) {
out->mCapturingDecoder = true;
mDecoder->AddOutputStream(out->mStream->GetInputStream()->AsProcessedStream(),
aFinishWhenEnded);
} else if (mSrcStream) {
out->mCapturingMediaStream = true;
}
if (mReadyState == HAVE_NOTHING) {
// Do not expose the tracks directly before we have metadata.
// Do not expose the tracks until we have metadata.
RefPtr<DOMMediaStream> result = out->mStream;
return result.forget();
}
if (mDecoder) {
out->mCapturingDecoder = true;
mDecoder->AddOutputStream(out->mStream->GetInputStream()->AsProcessedStream(),
aFinishWhenEnded);
if (HasAudio()) {
TrackID audioTrackId = mMediaInfo.mAudio.mTrackId;
RefPtr<MediaStreamTrackSource> trackSource =
@ -2610,22 +2630,6 @@ HTMLMediaElement::CaptureStreamInternal(bool aFinishWhenEnded,
}
if (mSrcStream) {
out->mCapturingMediaStream = true;
MediaStream* inputStream = out->mStream->GetInputStream();
if (!inputStream) {
NS_ERROR("No input stream");
RefPtr<DOMMediaStream> result = out->mStream;
return result.forget();
}
ProcessedMediaStream* processedInputStream =
inputStream->AsProcessedStream();
if (!processedInputStream) {
NS_ERROR("Input stream not a ProcessedMediaStream");
RefPtr<DOMMediaStream> result = out->mStream;
return result.forget();
}
for (size_t i = 0; i < AudioTracks()->Length(); ++i) {
AudioTrack* t = (*AudioTracks())[i];
if (t->Enabled()) {

View File

@ -242,7 +242,6 @@ MediaDecoderStateMachine::MediaDecoderStateMachine(MediaDecoder* aDecoder,
mQuickBuffering(false),
mMinimizePreroll(false),
mDecodeThreadWaiting(false),
mDecodingFirstFrame(true),
mSentLoadedMetadataEvent(false),
mSentFirstFrameLoadedEvent(false),
mSentPlaybackEndedEvent(false),
@ -488,7 +487,7 @@ MediaDecoderStateMachine::NeedToDecodeVideo()
IsVideoDecoding(), mMinimizePreroll, HaveEnoughDecodedVideo());
return IsVideoDecoding() &&
mState != DECODER_STATE_SEEKING &&
((IsDecodingFirstFrame() && VideoQueue().GetSize() == 0) ||
((!mSentFirstFrameLoadedEvent && VideoQueue().GetSize() == 0) ||
(!mMinimizePreroll && !HaveEnoughDecodedVideo()));
}
@ -496,7 +495,8 @@ bool
MediaDecoderStateMachine::NeedToSkipToNextKeyframe()
{
MOZ_ASSERT(OnTaskQueue());
if (IsDecodingFirstFrame()) {
// Don't skip when we're still decoding first frames.
if (!mSentFirstFrameLoadedEvent) {
return false;
}
MOZ_ASSERT(mState == DECODER_STATE_DECODING ||
@ -557,7 +557,7 @@ MediaDecoderStateMachine::NeedToDecodeAudio()
return IsAudioDecoding() &&
mState != DECODER_STATE_SEEKING &&
((IsDecodingFirstFrame() && AudioQueue().GetSize() == 0) ||
((!mSentFirstFrameLoadedEvent && AudioQueue().GetSize() == 0) ||
(!mMinimizePreroll && !HaveEnoughDecodedAudio()));
}
@ -733,7 +733,7 @@ bool
MediaDecoderStateMachine::MaybeFinishDecodeFirstFrame()
{
MOZ_ASSERT(OnTaskQueue());
if (!IsDecodingFirstFrame() ||
if (mSentFirstFrameLoadedEvent ||
(IsAudioDecoding() && AudioQueue().GetSize() == 0) ||
(IsVideoDecoding() && VideoQueue().GetSize() == 0)) {
return false;
@ -790,7 +790,7 @@ MediaDecoderStateMachine::OnVideoDecoded(MediaData* aVideoSample,
return;
}
TimeDuration decodeTime = TimeStamp::Now() - aDecodeStartTime;
if (!IsDecodingFirstFrame() &&
if (mSentFirstFrameLoadedEvent &&
THRESHOLD_FACTOR * DurationToUsecs(decodeTime) > mLowAudioThresholdUsecs &&
!HasLowUndecodedData())
{
@ -1077,7 +1077,6 @@ MediaDecoderStateMachine::EnterState(State aState)
MOZ_ASSERT(OnTaskQueue());
switch (aState) {
case DECODER_STATE_DECODING_METADATA:
mDecodingFirstFrame = true;
ReadMetadata();
break;
case DECODER_STATE_DORMANT:
@ -1260,19 +1259,11 @@ MediaDecoderStateMachine::StartDecoding()
MOZ_ASSERT(OnTaskQueue());
MOZ_ASSERT(mState == DECODER_STATE_DECODING);
if (mDecodingFirstFrame && mSentFirstFrameLoadedEvent) {
// We're resuming from dormant state, so we don't need to request
// the first samples in order to determine the media start time,
// we have the start time from last time we loaded.
// FinishDecodeFirstFrame will be launched upon completion of the seek when
// we have data ready to play.
MOZ_ASSERT(mQueuedSeek.Exists() && mSentFirstFrameLoadedEvent,
"Return from dormant must have queued seek");
if (mQueuedSeek.Exists()) {
InitiateSeek(Move(mQueuedSeek));
return;
}
// Handle the pending seek now if we've decoded first frames. Otherwise it
// will be handled after decoding first frames.
if (mSentFirstFrameLoadedEvent && mQueuedSeek.Exists()) {
InitiateSeek(Move(mQueuedSeek));
return;
}
if (CheckIfDecodeComplete()) {
@ -1511,8 +1502,12 @@ MediaDecoderStateMachine::Seek(SeekTarget aTarget)
MOZ_ASSERT(mState > DECODER_STATE_DECODING_METADATA,
"We should have got duration already");
if (mState < DECODER_STATE_DECODING ||
(IsDecodingFirstFrame() && !mReader->ForceZeroStartTime())) {
// Can't seek until the start time is known.
bool hasStartTime = mSentFirstFrameLoadedEvent || mReader->ForceZeroStartTime();
// Can't seek when state is WAIT_FOR_CDM or DORMANT.
bool stateAllowed = mState >= DECODER_STATE_DECODING;
if (!stateAllowed || !hasStartTime) {
DECODER_LOG("Seek() Not Enough Data to continue at this stage, queuing seek");
mQueuedSeek.RejectIfExists(__func__);
mQueuedSeek.mTarget = aTarget;
@ -1906,8 +1901,8 @@ bool MediaDecoderStateMachine::HasLowUndecodedData()
bool MediaDecoderStateMachine::HasLowUndecodedData(int64_t aUsecs)
{
MOZ_ASSERT(OnTaskQueue());
NS_ASSERTION(mState >= DECODER_STATE_DECODING && !IsDecodingFirstFrame(),
"Must have loaded first frame for mBuffered to be valid");
MOZ_ASSERT(mState >= DECODER_STATE_DECODING && mSentFirstFrameLoadedEvent,
"Must have loaded first frame for mBuffered to be valid");
// If we don't have a duration, mBuffered is probably not going to have
// a useful buffered range. Return false here so that we don't get stuck in
@ -2074,12 +2069,6 @@ MediaDecoderStateMachine::EnqueueFirstFrameLoadedEvent()
[]() { MOZ_CRASH("Should not reach"); }));
}
bool
MediaDecoderStateMachine::IsDecodingFirstFrame()
{
return mState == DECODER_STATE_DECODING && mDecodingFirstFrame;
}
void
MediaDecoderStateMachine::FinishDecodeFirstFrame()
{
@ -2106,9 +2095,8 @@ MediaDecoderStateMachine::FinishDecodeFirstFrame()
// If we didn't have duration and/or start time before, we should now.
EnqueueLoadedMetadataEvent();
}
EnqueueFirstFrameLoadedEvent();
mDecodingFirstFrame = false;
EnqueueFirstFrameLoadedEvent();
}
void
@ -2173,9 +2161,11 @@ MediaDecoderStateMachine::SeekCompleted()
// SeekTask::Discard() will ask MediaDecoderReaderWrapper to discard media
// data requests.
if (mDecodingFirstFrame) {
// We were resuming from dormant, or initiated a seek early.
// We can fire loadeddata now.
// Notify FirstFrameLoaded now if we haven't since we've decoded some data
// for readyState to transition to HAVE_CURRENT_DATA and fire 'loadeddata'.
if (!mSentFirstFrameLoadedEvent) {
// Only MSE can start seeking before finishing decoding first frames.
MOZ_ASSERT(mReader->ForceZeroStartTime());
FinishDecodeFirstFrame();
}
@ -2273,9 +2263,8 @@ nsresult MediaDecoderStateMachine::RunStateMachine()
return NS_OK;
case DECODER_STATE_DECODING: {
if (IsDecodingFirstFrame()) {
// We haven't completed decoding our first frames, we can't start
// playback yet.
// Can't start playback until having decoded first frames.
if (!mSentFirstFrameLoadedEvent) {
return NS_OK;
}
if (mPlayState != MediaDecoder::PLAY_STATE_PLAYING && IsPlaying())
@ -2492,7 +2481,7 @@ void MediaDecoderStateMachine::UpdateNextFrameStatus()
MediaDecoderOwner::NextFrameStatus status;
const char* statusString;
if (mState <= DECODER_STATE_WAIT_FOR_CDM || IsDecodingFirstFrame()) {
if (mState < DECODER_STATE_DECODING || !mSentFirstFrameLoadedEvent) {
status = MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE;
statusString = "NEXT_FRAME_UNAVAILABLE";
} else if (IsBuffering()) {
@ -2830,12 +2819,12 @@ MediaDecoderStateMachine::DumpDebugInfo()
mMediaSink->DumpDebugInfo();
DUMP_LOG(
"GetMediaTime=%lld GetClock=%lld mMediaSink=%p "
"mState=%s mPlayState=%d mDecodingFirstFrame=%d IsPlaying=%d "
"mState=%s mPlayState=%d mSentFirstFrameLoadedEvent=%d IsPlaying=%d "
"mAudioStatus=%s mVideoStatus=%s mDecodedAudioEndTime=%lld mDecodedVideoEndTime=%lld "
"mIsAudioPrerolling=%d mIsVideoPrerolling=%d "
"mAudioCompleted=%d mVideoCompleted=%d",
GetMediaTime(), mMediaSink->IsStarted() ? GetClock() : -1, mMediaSink.get(),
ToStateStr(), mPlayState.Ref(), mDecodingFirstFrame, IsPlaying(),
ToStateStr(), mPlayState.Ref(), mSentFirstFrameLoadedEvent, IsPlaying(),
AudioRequestStatus(), VideoRequestStatus(), mDecodedAudioEndTime, mDecodedVideoEndTime,
mIsAudioPrerolling, mIsVideoPrerolling, mAudioCompleted.Ref(), mVideoCompleted.Ref());
});

View File

@ -567,8 +567,7 @@ protected:
// If there are any queued seek, will change state to DECODER_STATE_SEEKING
// and return true.
bool MaybeFinishDecodeFirstFrame();
// Return true if we are currently decoding the first frames.
bool IsDecodingFirstFrame();
void FinishDecodeFirstFrame();
// Completes the seek operation, moves onto the next appropriate state.
@ -885,18 +884,14 @@ private:
// Track our request to update the buffered ranges
MozPromiseRequestHolder<MediaDecoderReader::BufferedUpdatePromise> mBufferedUpdateRequest;
// True if we need to call FinishDecodeFirstFrame() upon frame decoding
// succeeding.
bool mDecodingFirstFrame;
// True if we are back from DECODER_STATE_DORMANT state and
// LoadedMetadataEvent was already sent.
bool mSentLoadedMetadataEvent;
// True if we are back from DECODER_STATE_DORMANT state and
// FirstFrameLoadedEvent was already sent, then we can skip
// SetStartTime because the mStartTime already set before. Also we don't need
// to decode any audio/video since the MediaDecoder will trigger a seek
// operation soon.
// True if we've decoded first frames (thus having the start time) and
// notified the FirstFrameLoaded event. Note we can't initiate seek until the
// start time is known which happens when the first frames are decoded or we
// are playing an MSE stream (the start time is always assumed 0).
bool mSentFirstFrameLoadedEvent;
bool mSentPlaybackEndedEvent;

View File

@ -62,9 +62,9 @@ MediaFormatReader::MediaFormatReader(AbstractMediaDecoder* aDecoder,
VideoFrameContainer* aVideoFrameContainer,
layers::LayersBackend aLayersBackend)
: MediaDecoderReader(aDecoder)
, mAudio(this, MediaData::AUDIO_DATA, Preferences::GetUint("media.audio-decode-ahead", 2),
, mAudio(this, MediaData::AUDIO_DATA,
Preferences::GetUint("media.audio-max-decode-error", 3))
, mVideo(this, MediaData::VIDEO_DATA, Preferences::GetUint("media.video-decode-ahead", 2),
, mVideo(this, MediaData::VIDEO_DATA,
Preferences::GetUint("media.video-max-decode-error", 2))
, mDemuxer(aDemuxer)
, mDemuxerInitDone(false)
@ -562,7 +562,7 @@ MediaFormatReader::RequestVideoData(bool aSkipToNextKeyframe,
}
RefPtr<MediaDataPromise> p = mVideo.EnsurePromise(__func__);
NotifyDecodingRequested(TrackInfo::kVideoTrack);
ScheduleUpdate(TrackInfo::kVideoTrack);
return p;
}
@ -606,7 +606,6 @@ MediaFormatReader::OnDemuxFailed(TrackType aTrack, DemuxerFailureReason aFailure
void
MediaFormatReader::DoDemuxVideo()
{
// TODO Use DecodeAhead value rather than 1.
mVideo.mDemuxRequest.Begin(mVideo.mTrackDemuxer->GetSamples(1)
->Then(OwnerThread(), __func__, this,
&MediaFormatReader::OnVideoDemuxCompleted,
@ -657,7 +656,7 @@ MediaFormatReader::RequestAudioData()
}
RefPtr<MediaDataPromise> p = mAudio.EnsurePromise(__func__);
NotifyDecodingRequested(TrackInfo::kAudioTrack);
ScheduleUpdate(TrackInfo::kAudioTrack);
return p;
}
@ -665,7 +664,6 @@ MediaFormatReader::RequestAudioData()
void
MediaFormatReader::DoDemuxAudio()
{
// TODO Use DecodeAhead value rather than 1.
mAudio.mDemuxRequest.Begin(mAudio.mTrackDemuxer->GetSamples(1)
->Then(OwnerThread(), __func__, this,
&MediaFormatReader::OnAudioDemuxCompleted,
@ -706,7 +704,7 @@ MediaFormatReader::NotifyInputExhausted(TrackType aTrack)
MOZ_ASSERT(OnTaskQueue());
LOGV("Decoder has requested more %s data", TrackTypeToStr(aTrack));
auto& decoder = GetDecoderData(aTrack);
decoder.mInputExhausted = true;
decoder.mDecodePending = false;
ScheduleUpdate(aTrack);
}
@ -755,33 +753,21 @@ MediaFormatReader::NotifyEndOfStream(TrackType aTrack)
ScheduleUpdate(aTrack);
}
void
MediaFormatReader::NotifyDecodingRequested(TrackType aTrack)
{
MOZ_ASSERT(OnTaskQueue());
auto& decoder = GetDecoderData(aTrack);
decoder.mDecodingRequested = true;
ScheduleUpdate(aTrack);
}
bool
MediaFormatReader::NeedInput(DecoderData& aDecoder)
{
// We try to keep a few more compressed samples input than decoded samples
// have been output, provided the state machine has requested we send it a
// decoded sample. To account for H.264 streams which may require a longer
// run of input than we input, decoders fire an "input exhausted" callback,
// which overrides our "few more samples" threshold.
// To account for H.264 streams which may require a longer
// run of input than we input, decoders fire an "input exhausted" callback.
// The decoder will not be fed a new raw sample until InputExhausted
// has been called.
return
(aDecoder.HasPromise() || aDecoder.mTimeThreshold.isSome()) &&
!aDecoder.HasPendingDrain() &&
!aDecoder.HasFatalError() &&
aDecoder.mDecodingRequested &&
!aDecoder.mDemuxRequest.Exists() &&
!aDecoder.mOutput.Length() &&
!aDecoder.HasInternalSeekPending() &&
aDecoder.mOutput.Length() <= aDecoder.mDecodeAhead &&
(aDecoder.mInputExhausted || !aDecoder.mQueuedSamples.IsEmpty() ||
aDecoder.mTimeThreshold.isSome() ||
aDecoder.mNumSamplesInput - aDecoder.mNumSamplesOutput <= aDecoder.mDecodeAhead);
!aDecoder.mDecodePending;
}
void
@ -932,6 +918,7 @@ MediaFormatReader::DecodeDemuxedSamples(TrackType aTrack,
LOG("Unable to pass frame to decoder");
return false;
}
decoder.mDecodePending = true;
return true;
}
@ -1007,7 +994,7 @@ MediaFormatReader::HandleDemuxedSamples(TrackType aTrack,
decoder.ShutdownDecoder();
if (sample->mKeyframe) {
decoder.mQueuedSamples.AppendElements(Move(samples));
NotifyDecodingRequested(aTrack);
ScheduleUpdate(aTrack);
} else {
TimeInterval time =
TimeInterval(TimeUnit::FromMicroseconds(sample->mTime),
@ -1045,9 +1032,6 @@ MediaFormatReader::HandleDemuxedSamples(TrackType aTrack,
}
samplesPending = true;
}
// We have serviced the decoder's request for more data.
decoder.mInputExhausted = false;
}
void
@ -1071,7 +1055,7 @@ MediaFormatReader::InternalSeek(TrackType aTrack, const InternalSeekTarget& aTar
"Seek promise must be disconnected when timethreshold is reset");
decoder.mTimeThreshold.ref().mHasSeeked = true;
self->SetVideoDecodeThreshold();
self->NotifyDecodingRequested(aTrack);
self->ScheduleUpdate(aTrack);
},
[self, aTrack] (DemuxerFailureReason aResult) {
auto& decoder = self->GetDecoderData(aTrack);
@ -1275,6 +1259,7 @@ MediaFormatReader::Update(TrackType aTrack)
if (decoder.mError &&
decoder.mError.ref() == MediaDataDecoderError::DECODE_ERROR) {
decoder.mDecodePending = false;
decoder.mError.reset();
if (++decoder.mNumOfConsecutiveError > decoder.mMaxConsecutiveError) {
NotifyError(aTrack);
@ -1292,11 +1277,11 @@ MediaFormatReader::Update(TrackType aTrack)
bool needInput = NeedInput(decoder);
LOGV("Update(%s) ni=%d no=%d ie=%d, in:%llu out:%llu qs=%u pending:%u waiting:%d ahead:%d sid:%u",
TrackTypeToStr(aTrack), needInput, needOutput, decoder.mInputExhausted,
LOGV("Update(%s) ni=%d no=%d ie=%d, in:%llu out:%llu qs=%u pending:%u waiting:%d promise:%d sid:%u",
TrackTypeToStr(aTrack), needInput, needOutput, decoder.mDecodePending,
decoder.mNumSamplesInput, decoder.mNumSamplesOutput,
uint32_t(size_t(decoder.mSizeOfQueue)), uint32_t(decoder.mOutput.Length()),
decoder.mWaitingForData, !decoder.HasPromise(), decoder.mLastStreamSourceID);
decoder.mWaitingForData, decoder.HasPromise(), decoder.mLastStreamSourceID);
if (decoder.mWaitingForData &&
(!decoder.mTimeThreshold || decoder.mTimeThreshold.ref().mWaiting)) {
@ -1577,7 +1562,7 @@ MediaFormatReader::OnVideoSkipCompleted(uint32_t aSkipped)
VideoSkipReset(aSkipped);
NotifyDecodingRequested(TrackInfo::kVideoTrack);
ScheduleUpdate(TrackInfo::kVideoTrack);
}
void
@ -1595,7 +1580,7 @@ MediaFormatReader::OnVideoSkipFailed(MediaTrackDemuxer::SkipFailureHolder aFailu
DropDecodedSamples(TrackInfo::kVideoTrack);
// We can't complete the skip operation, will just service a video frame
// normally.
NotifyDecodingRequested(TrackInfo::kVideoTrack);
ScheduleUpdate(TrackInfo::kVideoTrack);
break;
case DemuxerFailureReason::CANCELED: MOZ_FALLTHROUGH;
case DemuxerFailureReason::SHUTDOWN:
@ -2013,12 +1998,11 @@ MediaFormatReader::GetMozDebugReaderData(nsAString& aString)
result += nsPrintfCString("audio frames decoded: %lld\n",
mAudio.mNumSamplesOutputTotal);
if (HasAudio()) {
result += nsPrintfCString("audio state: ni=%d no=%d ie=%d demuxr:%d demuxq:%d decoder:%d tt:%f tths:%d in:%llu out:%llu qs=%u pending:%u waiting:%d sid:%u\n",
result += nsPrintfCString("audio state: ni=%d no=%d ie=%d demuxr:%d demuxq:%d tt:%f tths:%d in:%llu out:%llu qs=%u pending:%u waiting:%d sid:%u\n",
NeedInput(mAudio), mAudio.HasPromise(),
mAudio.mInputExhausted,
mAudio.mDecodePending,
mAudio.mDemuxRequest.Exists(),
int(mAudio.mQueuedSamples.Length()),
mAudio.mDecodingRequested,
mAudio.mTimeThreshold
? mAudio.mTimeThreshold.ref().Time().ToSeconds()
: -1.0,
@ -2037,12 +2021,11 @@ MediaFormatReader::GetMozDebugReaderData(nsAString& aString)
mVideo.mNumSamplesOutputTotal,
mVideo.mNumSamplesSkippedTotal);
if (HasVideo()) {
result += nsPrintfCString("video state: ni=%d no=%d ie=%d demuxr:%d demuxq:%d decoder:%d tt:%f tths:%d in:%llu out:%llu qs=%u pending:%u waiting:%d sid:%u\n",
result += nsPrintfCString("video state: ni=%d no=%d ie=%d demuxr:%d demuxq:%d tt:%f tths:%d in:%llu out:%llu qs=%u pending:%u waiting:%d sid:%u\n",
NeedInput(mVideo), mVideo.HasPromise(),
mVideo.mInputExhausted,
mVideo.mDecodePending,
mVideo.mDemuxRequest.Exists(),
int(mVideo.mQueuedSamples.Length()),
mVideo.mDecodingRequested,
mVideo.mTimeThreshold
? mVideo.mTimeThreshold.ref().Time().ToSeconds()
: -1.0,
@ -2080,7 +2063,7 @@ MediaFormatReader::SetBlankDecode(TrackType aTrack, bool aIsBlankDecode)
decoder.mIsBlankDecode = aIsBlankDecode;
decoder.Flush();
decoder.ShutdownDecoder();
NotifyDecodingRequested(TrackInfo::kVideoTrack); // Calls ScheduleUpdate().
ScheduleUpdate(TrackInfo::kVideoTrack);
return;
}

View File

@ -169,7 +169,6 @@ private:
void NotifyError(TrackType aTrack, MediaDataDecoderError aError = MediaDataDecoderError::FATAL_ERROR);
void NotifyWaitingForData(TrackType aTrack);
void NotifyEndOfStream(TrackType aTrack);
void NotifyDecodingRequested(TrackType aTrack);
void ExtractCryptoInitData(nsTArray<uint8_t>& aInitData);
@ -231,21 +230,18 @@ private:
struct DecoderData {
DecoderData(MediaFormatReader* aOwner,
MediaData::Type aType,
uint32_t aDecodeAhead,
uint32_t aNumOfMaxError)
: mOwner(aOwner)
, mType(aType)
, mMonitor("DecoderData")
, mDescription("shutdown")
, mDecodeAhead(aDecodeAhead)
, mUpdateScheduled(false)
, mDemuxEOS(false)
, mWaitingForData(false)
, mReceivedNewData(false)
, mDecoderInitialized(false)
, mDecodingRequested(false)
, mOutputRequested(false)
, mInputExhausted(false)
, mDecodePending(false)
, mNeedDraining(false)
, mDraining(false)
, mDrainComplete(false)
@ -288,7 +284,6 @@ private:
}
// Only accessed from reader's task queue.
uint32_t mDecodeAhead;
bool mUpdateScheduled;
bool mDemuxEOS;
bool mWaitingForData;
@ -312,11 +307,14 @@ private:
MozPromiseRequestHolder<MediaDataDecoder::InitPromise> mInitPromise;
// False when decoder is created. True when decoder Init() promise is resolved.
bool mDecoderInitialized;
// Set when decoding can proceed. It is reset when a decoding promise is
// rejected or prior a seek operation.
bool mDecodingRequested;
bool mOutputRequested;
bool mInputExhausted;
// Set to true once the MediaDataDecoder has been fed a compressed sample.
// No more sample will be passed to the decoder while true.
// mDecodePending is reset when:
// 1- The decoder returns a sample
// 2- The decoder calls InputExhausted
// 3- The decoder is Flushed or Reset.
bool mDecodePending;
bool mNeedDraining;
bool mDraining;
bool mDrainComplete;
@ -376,9 +374,8 @@ private:
if (mDecoder) {
mDecoder->Flush();
}
mDecodingRequested = false;
mOutputRequested = false;
mInputExhausted = false;
mDecodePending = false;
mOutput.Clear();
mNumSamplesInput = 0;
mNumSamplesOutput = 0;
@ -397,10 +394,9 @@ private:
mDemuxEOS = false;
mWaitingForData = false;
mQueuedSamples.Clear();
mDecodingRequested = false;
mOutputRequested = false;
mInputExhausted = false;
mNeedDraining = false;
mDecodePending = false;
mDraining = false;
mDrainComplete = false;
mTimeThreshold.reset();
@ -441,9 +437,8 @@ private:
public:
DecoderDataWithPromise(MediaFormatReader* aOwner,
MediaData::Type aType,
uint32_t aDecodeAhead,
uint32_t aNumOfMaxError)
: DecoderData(aOwner, aType, aDecodeAhead, aNumOfMaxError)
: DecoderData(aOwner, aType, aNumOfMaxError)
, mHasPromise(false)
{}
@ -472,7 +467,6 @@ private:
{
MOZ_ASSERT(mOwner->OnTaskQueue());
mPromise.Reject(aReason, aMethodName);
mDecodingRequested = false;
mHasPromise = false;
}

View File

@ -1077,6 +1077,25 @@ MediaStreamGraph::NotifyOutputData(AudioDataValue* aBuffer, size_t aFrames,
}
}
void
MediaStreamGraph::AssertOnGraphThreadOrNotRunning() const
{
// either we're on the right thread (and calling CurrentDriver() is safe),
// or we're going to assert anyways, so don't cross-check CurrentDriver
#ifdef DEBUG
MediaStreamGraphImpl const * graph =
static_cast<MediaStreamGraphImpl const *>(this);
// if all the safety checks fail, assert we own the monitor
if (!graph->mDriver->OnThread()) {
if (!(graph->mDetectedNotRunning &&
graph->mLifecycleState > MediaStreamGraphImpl::LIFECYCLE_RUNNING &&
NS_IsMainThread())) {
graph->mMonitor.AssertCurrentThreadOwns();
}
}
#endif
}
bool
MediaStreamGraphImpl::ShouldUpdateMainThread()
{

View File

@ -1353,6 +1353,7 @@ public:
*/
virtual void DispatchToMainThreadAfterStreamStateUpdate(already_AddRefed<nsIRunnable> aRunnable)
{
AssertOnGraphThreadOrNotRunning();
*mPendingUpdateRunnables.AppendElement() = aRunnable;
}
@ -1374,6 +1375,8 @@ public:
void NotifyOutputData(AudioDataValue* aBuffer, size_t aFrames,
TrackRate aRate, uint32_t aChannels);
void AssertOnGraphThreadOrNotRunning() const;
protected:
explicit MediaStreamGraph(TrackRate aSampleRate)
: mSampleRate(aSampleRate)

View File

@ -202,24 +202,8 @@ public:
nsISupports* aData,
const nsTArray<AudioNodeSizes>& aAudioStreamSizes);
// The following methods run on the graph thread (or possibly the main thread if
// mLifecycleState > LIFECYCLE_RUNNING)
void AssertOnGraphThreadOrNotRunning() const
{
// either we're on the right thread (and calling CurrentDriver() is safe),
// or we're going to assert anyways, so don't cross-check CurrentDriver
#ifdef DEBUG
// if all the safety checks fail, assert we own the monitor
if (!mDriver->OnThread()) {
if (!(mDetectedNotRunning &&
mLifecycleState > LIFECYCLE_RUNNING &&
NS_IsMainThread())) {
mMonitor.AssertCurrentThreadOwns();
}
}
#endif
}
// The following methods run on the graph thread (or possibly the main thread
// if mLifecycleState > LIFECYCLE_RUNNING)
void CollectSizesForMemoryReport(
already_AddRefed<nsIHandleReportCallback> aHandleReport,
already_AddRefed<nsISupports> aHandlerData);

View File

@ -132,7 +132,7 @@ FlacFrameParser::DecodeHeaderBlock(const uint8_t* aPacket, size_t aLength)
if (numChannels > FLAC_MAX_CHANNELS) {
return false;
}
uint32_t bps = ((blob >> 38) & BITMASK(5)) + 1;
uint32_t bps = ((blob >> 36) & BITMASK(5)) + 1;
if (bps > 24) {
return false;
}

View File

@ -175,6 +175,9 @@ public:
// Denotes that the last input sample has been inserted into the decoder,
// and no more output can be produced unless more input is sent.
// A frame decoding session is completed once InputExhausted has been called.
// MediaDataDecoder::Input will not be called again until InputExhausted has
// been called.
virtual void InputExhausted() = 0;
virtual void DrainComplete() = 0;

View File

@ -96,11 +96,7 @@ private:
while (mReorderQueue.Length() > mMaxRefFrames) {
mCallback->Output(mReorderQueue.Pop().get());
}
if (mReorderQueue.Length() <= mMaxRefFrames) {
mCallback->InputExhausted();
}
mCallback->InputExhausted();
}
private:

View File

@ -163,12 +163,9 @@ OpusDataDecoder::ProcessDecode(MediaRawData* aSample)
mCallback->Error(MediaDataDecoderError::DECODE_ERROR);
break;
case DecodeError::DECODE_SUCCESS:
mCallback->InputExhausted();
break;
}
if (mTaskQueue->IsEmpty()) {
mCallback->InputExhausted();
}
}
OpusDataDecoder::DecodeError

View File

@ -202,7 +202,7 @@ TheoraDecoder::ProcessDecode(MediaRawData* aSample)
}
if (DoDecode(aSample) == -1) {
mCallback->Error(MediaDataDecoderError::DECODE_ERROR);
} else if (mTaskQueue->IsEmpty()) {
} else {
mCallback->InputExhausted();
}
}

View File

@ -192,7 +192,7 @@ VPXDecoder::ProcessDecode(MediaRawData* aSample)
}
if (DoDecode(aSample) == -1) {
mCallback->Error(MediaDataDecoderError::DECODE_ERROR);
} else if (mTaskQueue->IsEmpty()) {
} else {
mCallback->InputExhausted();
}
}

View File

@ -143,7 +143,7 @@ VorbisDataDecoder::ProcessDecode(MediaRawData* aSample)
}
if (DoDecode(aSample) == -1) {
mCallback->Error(MediaDataDecoderError::DECODE_ERROR);
} else if (mTaskQueue->IsEmpty()) {
} else {
mCallback->InputExhausted();
}
}

View File

@ -68,6 +68,8 @@ WaveDataDecoder::Input(MediaRawData* aSample)
{
if (!DoDecode(aSample)) {
mCallback->Error(MediaDataDecoderError::DECODE_ERROR);
} else {
mCallback->InputExhausted();
}
return NS_OK;
}

View File

@ -215,10 +215,7 @@ AppleATDecoder::SubmitSample(MediaRawData* aSample)
}
mQueuedSamples.Clear();
}
if (mTaskQueue->IsEmpty()) {
mCallback->InputExhausted();
}
mCallback->InputExhausted();
}
nsresult

View File

@ -34,11 +34,9 @@ AppleVTDecoder::AppleVTDecoder(const VideoInfo& aConfig,
, mPictureHeight(aConfig.mImage.height)
, mDisplayWidth(aConfig.mDisplay.width)
, mDisplayHeight(aConfig.mDisplay.height)
, mQueuedSamples(0)
, mTaskQueue(aTaskQueue)
, mMaxRefFrames(mp4_demuxer::H264::ComputeMaxRefFrames(aConfig.mExtraData))
, mImageContainer(aImageContainer)
, mInputIncoming(0)
, mIsShutDown(false)
#ifdef MOZ_WIDGET_UIKIT
, mUseSoftwareImages(true)
@ -88,8 +86,6 @@ AppleVTDecoder::Input(MediaRawData* aSample)
aSample->mKeyframe ? " keyframe" : "",
aSample->Size());
mInputIncoming++;
mTaskQueue->Dispatch(NewRunnableMethod<RefPtr<MediaRawData>>(
this, &AppleVTDecoder::ProcessDecode, aSample));
return NS_OK;
@ -104,8 +100,6 @@ AppleVTDecoder::Flush()
NewRunnableMethod(this, &AppleVTDecoder::ProcessFlush);
SyncRunnable::DispatchToThread(mTaskQueue, runnable);
mIsFlushing = false;
// All ProcessDecode() tasks should be done.
MOZ_ASSERT(mInputIncoming == 0);
mSeekTargetThreshold.reset();
@ -142,18 +136,11 @@ AppleVTDecoder::ProcessDecode(MediaRawData* aSample)
{
AssertOnTaskQueueThread();
mInputIncoming--;
if (mIsFlushing) {
return NS_OK;
}
auto rv = DoDecode(aSample);
// Ask for more data.
if (NS_SUCCEEDED(rv) && !mInputIncoming && mQueuedSamples <= mMaxRefFrames) {
LOG("%s task queue empty; requesting more data", GetDescriptionName());
mCallback->InputExhausted();
}
return rv;
}
@ -213,7 +200,6 @@ AppleVTDecoder::DrainReorderedFrames()
while (!mReorderQueue.IsEmpty()) {
mCallback->Output(mReorderQueue.Pop().get());
}
mQueuedSamples = 0;
}
void
@ -223,7 +209,6 @@ AppleVTDecoder::ClearReorderedFrames()
while (!mReorderQueue.IsEmpty()) {
mReorderQueue.Pop();
}
mQueuedSamples = 0;
}
void
@ -288,16 +273,10 @@ AppleVTDecoder::OutputFrame(CVPixelBufferRef aImage,
aFrameRef.is_sync_point ? " keyframe" : ""
);
if (mQueuedSamples > mMaxRefFrames) {
// We had stopped requesting more input because we had received too much at
// the time. We can ask for more once again.
mCallback->InputExhausted();
}
MOZ_ASSERT(mQueuedSamples);
mQueuedSamples--;
if (!aImage) {
// Image was dropped by decoder.
// Image was dropped by decoder or none return yet.
// We need more input to continue.
mCallback->InputExhausted();
return NS_OK;
}
@ -410,9 +389,10 @@ AppleVTDecoder::OutputFrame(CVPixelBufferRef aImage,
// in composition order.
MonitorAutoLock mon(mMonitor);
mReorderQueue.Push(data);
while (mReorderQueue.Length() > mMaxRefFrames) {
if (mReorderQueue.Length() > mMaxRefFrames) {
mCallback->Output(mReorderQueue.Pop().get());
}
mCallback->InputExhausted();
LOG("%llu decoded frames queued",
static_cast<unsigned long long>(mReorderQueue.Length()));
@ -480,8 +460,6 @@ AppleVTDecoder::DoDecode(MediaRawData* aSample)
return NS_ERROR_FAILURE;
}
mQueuedSamples++;
VTDecodeFrameFlags decodeFlags =
kVTDecodeFrame_EnableAsynchronousDecompression;
rv = VTDecompressionSessionDecodeFrame(mSession,

View File

@ -90,11 +90,6 @@ private:
const uint32_t mDisplayWidth;
const uint32_t mDisplayHeight;
// Number of times a sample was queued via Input(). Will be decreased upon
// the decoder's callback being invoked.
// This is used to calculate how many frames has been buffered by the decoder.
Atomic<uint32_t> mQueuedSamples;
// Method to set up the decompression session.
nsresult InitializeSession();
nsresult WaitForAsynchronousFrames();
@ -106,9 +101,6 @@ private:
const RefPtr<TaskQueue> mTaskQueue;
const uint32_t mMaxRefFrames;
const RefPtr<layers::ImageContainer> mImageContainer;
// Increased when Input is called, and decreased when ProcessFrame runs.
// Reaching 0 indicates that there's no pending Input.
Atomic<uint32_t> mInputIncoming;
Atomic<bool> mIsShutDown;
const bool mUseSoftwareImages;

View File

@ -133,6 +133,7 @@ FFmpegAudioDecoder<LIBAV_VER>::DoDecode(MediaRawData* aSample)
int64_t samplePosition = aSample->mOffset;
media::TimeUnit pts = media::TimeUnit::FromMicroseconds(aSample->mTime);
bool didOutput = false;
while (packet.size > 0) {
int decoded;
@ -181,6 +182,7 @@ FFmpegAudioDecoder<LIBAV_VER>::DoDecode(MediaRawData* aSample)
numChannels,
samplingRate);
mCallback->Output(data);
didOutput = true;
pts += duration;
if (!pts.IsValid()) {
NS_WARNING("Invalid count of accumulated audio samples");
@ -192,7 +194,7 @@ FFmpegAudioDecoder<LIBAV_VER>::DoDecode(MediaRawData* aSample)
samplePosition += bytesConsumed;
}
return DecodeResult::DECODE_FRAME;
return didOutput ? DecodeResult::DECODE_FRAME : DecodeResult::DECODE_NO_FRAME;
}
void

View File

@ -117,10 +117,12 @@ FFmpegDataDecoder<LIBAV_VER>::ProcessDecode(MediaRawData* aSample)
case DecodeResult::FATAL_ERROR:
mCallback->Error(MediaDataDecoderError::FATAL_ERROR);
break;
case DecodeResult::DECODE_NO_FRAME:
case DecodeResult::DECODE_FRAME:
mCallback->InputExhausted();
break;
default:
if (mTaskQueue->IsEmpty()) {
mCallback->InputExhausted();
}
break;
}
}

View File

@ -149,9 +149,7 @@ WMFMediaDataDecoder::ProcessOutput()
mCallback->Output(output);
}
if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
if (mTaskQueue->IsEmpty()) {
mCallback->InputExhausted();
}
mCallback->InputExhausted();
} else if (FAILED(hr)) {
NS_WARNING("WMFMediaDataDecoder failed to output data");
mCallback->Error(MediaDataDecoderError::DECODE_ERROR);

View File

@ -15,9 +15,16 @@ from external_media_tests.utils import verbose_until
class YouTubePuppeteer(VideoPuppeteer):
"""
Wrapper around a YouTube #movie_player element.
Wrapper around a YouTube .html5-video-player element.
Partial reference: https://developers.google.com/youtube/js_api_reference.
Can be used with youtube videos or youtube videos at embedded URLS. E.g.
both https://www.youtube.com/watch?v=AbAACm1IQE0 and
https://www.youtube.com/embed/AbAACm1IQE0 should work.
Using an embedded video has the advantage of not auto-playing more videos
while a test is running.
Partial reference: https://developers.google.com/youtube/iframe_api_reference.
This reference is useful for site-specific features such as interacting
with ads, or accessing YouTube's debug data.
"""
@ -37,14 +44,16 @@ class YouTubePuppeteer(VideoPuppeteer):
self.player = None
super(YouTubePuppeteer,
self).__init__(marionette, url,
video_selector='#movie_player video',
video_selector='.html5-video-player video',
**kwargs)
wait = Wait(self.marionette, timeout=30)
with self.marionette.using_context(Marionette.CONTEXT_CONTENT):
verbose_until(wait, self,
expected.element_present(By.ID, 'movie_player'))
self.player = self.marionette.find_element(By.ID, 'movie_player')
self.marionette.execute_script("log('#movie_player "
expected.element_present(By.CLASS_NAME,
'html5-video-player'))
self.player = self.marionette.find_element(By.CLASS_NAME,
'html5-video-player')
self.marionette.execute_script("log('.html5-video-player "
"element obtained');")
# When an ad is playing, self.player_duration indicates the duration
# of the spliced-in ad stream, not the duration of the main video, so
@ -117,7 +126,7 @@ class YouTubePuppeteer(VideoPuppeteer):
def execute_yt_script(self, script):
"""
Execute JS script in content context with access to video element and
YouTube #movie_player element.
YouTube .html5-video-player element.
:param script: script to be executed.
@ -131,7 +140,7 @@ class YouTubePuppeteer(VideoPuppeteer):
@property
def playback_quality(self):
"""
Please see https://developers.google.com/youtube/js_api_reference#Playback_quality
Please see https://developers.google.com/youtube/iframe_api_reference#Playback_quality
for valid values.
:return: A string with a valid value returned via YouTube.
@ -176,7 +185,7 @@ class YouTubePuppeteer(VideoPuppeteer):
"""
:return: The YouTube state of the video. See
https://developers.google.com/youtube/js_api_reference#getPlayerState
https://developers.google.com/youtube/iframe_api_reference#getPlayerState
for valid values.
"""
state = self.execute_yt_script('return arguments[1].'
@ -188,7 +197,7 @@ class YouTubePuppeteer(VideoPuppeteer):
"""
This and the following properties are based on the
player.getPlayerState() call
(https://developers.google.com/youtube/js_api_reference#Playback_status)
(https://developers.google.com/youtube/iframe_api_reference#Playback_status)
:return: True if the video has not yet started.
"""
@ -240,7 +249,7 @@ class YouTubePuppeteer(VideoPuppeteer):
Get state of current ad.
:return: Returns one of the constants listed in
https://developers.google.com/youtube/js_api_reference#Playback_status
https://developers.google.com/youtube/iframe_api_reference#Playback_status
for an ad.
"""
@ -350,7 +359,7 @@ class YouTubePuppeteer(VideoPuppeteer):
# no ad playing
return False
if self.ad_skippable:
selector = '#movie_player .videoAdUiSkipContainer'
selector = '.html5-video-player .videoAdUiSkipContainer'
wait = Wait(self.marionette, timeout=30)
try:
with self.marionette.using_context(Marionette.CONTEXT_CONTENT):
@ -378,7 +387,7 @@ class YouTubePuppeteer(VideoPuppeteer):
if (self.ad_playing and self.video_src.startswith('mediasource') and
self.duration):
return self.duration
selector = '#movie_player .videoAdUiAttribution'
selector = '.html5-media-player .videoAdUiAttribution'
wait = Wait(self.marionette, timeout=5)
try:
with self.marionette.using_context(Marionette.CONTEXT_CONTENT):
@ -464,7 +473,7 @@ class YouTubePuppeteer(VideoPuppeteer):
player_state = self._yt_player_state_name[self.player_state]
ad_state = self._yt_player_state_name[self.ad_state]
messages += [
'#movie_player: {',
'.html5-media-player: {',
'\tvideo id: {0},'.format(self.movie_id),
'\tvideo_title: {0}'.format(self.movie_title),
'\tcurrent_state: {0},'.format(player_state),
@ -475,7 +484,7 @@ class YouTubePuppeteer(VideoPuppeteer):
'}'
]
else:
messages += ['\t#movie_player: None']
messages += ['\t.html5-media-player: None']
return '\n'.join(messages)

View File

@ -1,9 +1,9 @@
# short videos; no ads; max 5 minutes
# short videos; no ads; embedded; max 5 minutes
# 0:12
[https://youtu.be/AbAACm1IQE0]
[https://youtube.com/embed/AbAACm1IQE0?autoplay=1]
# 2:18
[https://www.youtube.com/watch?v=yOQQCoxs8-k]
[https://youtube.com/embed/yOQQCoxs8-k?autoplay=1]
# 0:08
[https://www.youtube.com/watch?v=1visYpIREUM]
[https://youtube.com/embed/1visYpIREUM?autoplay=1]
# 2:09
[https://www.youtube.com/watch?v=rjmuKV9BTkE]
[https://youtube.com/embed/rjmuKV9BTkE?autoplay=1]

View File

@ -1,14 +1,5 @@
# all long videos; < 12 hours total
# 2:18:00
[http://youtu.be/FLX64H5FYa8]
# 1:00:00
[https://www.youtube.com/watch?v=AYYDshv8C4g]
# 1:10:00
[https://www.youtube.com/watch?v=V0Vy4kYAPDk]
# 1:47:00
[https://www.youtube.com/watch?v=bFtGE2C7Pxs]
# shutdownhang | WaitForSingleObjectEx | WaitForSingleObject | PR_Wait | nsThread::ProcessNextEvent(bool, bool*) | NS_ProcessNextEvent(nsIThread*, bool) | mozilla::MediaShutdownManager::Shutdown()
# 1:43:00
[https://www.youtube.com/watch?v=BXMtXpmpXPU]
# a couple of very long videos, < 12 hours total
# 6:00:00 - can't embed due to copyright
[https://www.youtube.com/watch?v=5N8sUccRiTA]
# 2:09:00
[https://www.youtube.com/embed/b6q5N16dje4?autoplay=1]

View File

@ -1,9 +0,0 @@
# a couple of very long videos, < 12 hours total
# 6:00:00
[https://www.youtube.com/watch?v=5N8sUccRiTA]
# 2:27:00
[https://www.youtube.com/watch?v=NAVrm3wjzq8]
# 58:50
[https://www.youtube.com/watch?v=uP1BBw3IYco]
# 2:09:00
[https://www.youtube.com/watch?v=b6q5N16dje4]

View File

@ -1,3 +1,6 @@
# It appears these are not currently used by tests. They are left here as they
# reference failure scenarios. If tese are fixed that can be removed.
# videos from crashes, < 12 hours
# hang | NtUserMessageCall | SendMessageW

View File

@ -1,3 +1,6 @@
# It appears these are not currently used by tests. They are left here as they
# reference failure scenarios. If tese are fixed that can be removed.
# Total time: about 12-13 hours + unskippable ads
#Request url: https://crash-stats.mozilla.com/api/SuperSearchUnredacted/?product=Firefox&url=%24https%3A%2F%2Fwww.youtube.com%2Fwatch%3Fv%3D&url=%21~list&url=%21~index&_results_number=50&platform=Windows&version=37.0&date=%3E2015-03-26

View File

@ -1,15 +0,0 @@
# very long test; 96-100 hours?
# 00:3:26
[https://www.youtube.com/watch?v=7RMQksXpQSk]
# nyan cat 10 hours
[http://youtu.be/9bZkp7q19f0]
# 4:54:00
[https://www.youtube.com/watch?v=jWlKjw3LBDk]
# 3:00:01
[https://www.youtube.com/watch?v=ub9JUDS_6i8]
# 10 hours rick roll
[https://www.youtube.com/watch?v=BROWqjuTM0g]
# 24 hours
[https://www.youtube.com/watch?v=FvHiLLkPhQE]
# 2 hours
[https://www.youtube.com/watch?v=VmOuW5zTt9w

View File

@ -1,18 +1,18 @@
# mix of shorter/longer videos with/without ads, < 60 min
# 4:59
[http://youtu.be/pWI8RB2dmfU]
# 4:59 - can't embed
[https://www.youtube.com/watch?v=pWI8RB2dmfU]
# 0:46 ad at start
[http://youtu.be/6SFp1z7uA6g]
[https://www.youtube.com/embed/6SFp1z7uA6g?autoplay=1]
# 0:58 ad at start
[http://youtu.be/Aebs62bX0dA]
[https://www.youtube.com/embed/Aebs62bX0dA?autoplay=1]
# 1:43 ad
[https://www.youtube.com/watch?v=l5ODwR6FPRQ]
# 8:00 ad
[https://www.youtube.com/embed/l5ODwR6FPRQ?autoplay=1]
# 8:00 ad - can't embed
[https://www.youtube.com/watch?v=KlyXNRrsk4A]
# video with ad in beginning and in the middle 20:00
# https://bugzilla.mozilla.org/show_bug.cgi?id=1176815
[https://www.youtube.com/watch?v=cht9Xq9suGg]
[https://www.youtube.com/embed/cht9Xq9suGg?autoplay=1]
# 1:35 ad
[https://www.youtube.com/watch?v=orybDrUj4vA]
# 3:02 - ad
[https://youtu.be/tDDVAErOI5U]
[https://www.youtube.com/embed/orybDrUj4vA?autoplay=1]
# 3:02 ad
[https://www.youtube.com/embed/tDDVAErOI5U?autoplay=1]

View File

@ -1,6 +0,0 @@
# a few longer videos, < 60 min total
# 0:30:00 no ad
[https://www.youtube.com/watch?v=-qXxNPvqHtQ]
# 0:20:00
[http://youtu.be/Fu2DcHzokew]

View File

@ -1,11 +0,0 @@
# a few longer videos, < 120 min total
# video with ad in the middle
# 21:00
[https://www.youtube.com/watch?v=cht9Xq9suGg]
# 16:00
[https://www.youtube.com/watch?v=6Lm9EHhbJAY]
# 20:00
[https://www.youtube.com/watch?v=8XQ1onjXJK0]
# 59:06
[https://www.youtube.com/watch?v=kmpiY5kssU4]

View File

@ -1,5 +0,0 @@
# 00:12
[https://youtu.be/AbAACm1IQE0]
# longer video with ads; < 15 min total
# 13:40
[https://www.youtube.com/watch?v=87uo2TPrsl8]

View File

@ -1,5 +0,0 @@
# 1-2 longer videos with ads; < 15 minutes total
[https://www.youtube.com/watch?v=v678Em6qyzk]
[https://www.youtube.com/watch?v=l8XOZJkozfI]

View File

@ -1,3 +1,6 @@
# It appears these are not currently used by tests. They are left here as they
# reference failure scenarios. If tese are fixed that can be removed.
# crash-data videos, < 15 minutes total
# hang | NtUserMessageCall | SendMessageW

View File

@ -46,6 +46,7 @@
#if defined(MOZ_X11)
#include <X11/Xlib.h>
#include <X11/Xutil.h>
#include "X11UndefineNone.h"
#endif
#endif

View File

@ -2610,7 +2610,7 @@ nsEventStatus nsPluginInstanceOwner::ProcessEvent(const WidgetGUIEvent& anEvent)
#ifdef MOZ_WIDGET_GTK
Window root = GDK_ROOT_WINDOW();
#else
Window root = None; // Could XQueryTree, but this is not important.
Window root = X11None; // Could XQueryTree, but this is not important.
#endif
switch (anEvent.mMessage) {
@ -2628,7 +2628,7 @@ nsEventStatus nsPluginInstanceOwner::ProcessEvent(const WidgetGUIEvent& anEvent)
event.y_root = rootPoint.y;
event.state = XInputEventState(mouseEvent);
// information lost
event.subwindow = None;
event.subwindow = X11None;
event.mode = -1;
event.detail = NotifyDetailNone;
event.same_screen = True;
@ -2647,7 +2647,7 @@ nsEventStatus nsPluginInstanceOwner::ProcessEvent(const WidgetGUIEvent& anEvent)
event.y_root = rootPoint.y;
event.state = XInputEventState(mouseEvent);
// information lost
event.subwindow = None;
event.subwindow = X11None;
event.is_hint = NotifyNormal;
event.same_screen = True;
}
@ -2678,7 +2678,7 @@ nsEventStatus nsPluginInstanceOwner::ProcessEvent(const WidgetGUIEvent& anEvent)
break;
}
// information lost:
event.subwindow = None;
event.subwindow = X11None;
event.same_screen = True;
}
break;
@ -2722,7 +2722,7 @@ nsEventStatus nsPluginInstanceOwner::ProcessEvent(const WidgetGUIEvent& anEvent)
// Information that could be obtained from pluginEvent but we may not
// want to promise to provide:
event.subwindow = None;
event.subwindow = X11None;
event.x = 0;
event.y = 0;
event.x_root = -1;
@ -2764,7 +2764,7 @@ nsEventStatus nsPluginInstanceOwner::ProcessEvent(const WidgetGUIEvent& anEvent)
XAnyEvent& event = pluginEvent.xany;
event.display = widget ?
static_cast<Display*>(widget->GetNativeData(NS_NATIVE_DISPLAY)) : nullptr;
event.window = None; // not a real window
event.window = X11None; // not a real window
// information lost:
event.serial = 0;
event.send_event = False;

View File

@ -217,7 +217,7 @@ nsresult nsPluginNativeWindowGtk::CreateXEmbedWindow(bool aEnableXtFocus) {
GdkVisual* gdkVisual = gdk_drawable_get_visual(gdkWindow);
mWsInfo.depth = gdkVisual->depth;
#else
mWsInfo.colormap = None;
mWsInfo.colormap = X11None;
GdkVisual* gdkVisual = gdk_window_get_visual(gdkWindow);
mWsInfo.depth = gdk_visual_get_depth(gdkVisual);
#endif

View File

@ -1282,7 +1282,7 @@ PluginInstanceChild::AnswerNPP_SetWindow(const NPRemoteWindow& aWindow)
}
}
if (aWindow.visualID != None
if (aWindow.visualID != X11None
&& gtk_check_version(2, 12, 10) != nullptr) { // older
// Workaround for a bug in Gtk+ (prior to 2.12.10) where deleting
// a foreign GdkColormap will also free the XColormap.

View File

@ -81,7 +81,7 @@ pluginInstanceInit(InstanceData* instanceData)
instanceData->platformData->display = nullptr;
instanceData->platformData->visual = nullptr;
instanceData->platformData->colormap = None;
instanceData->platformData->colormap = X11None;
instanceData->platformData->plug = nullptr;
return NPERR_NO_ERROR;
@ -97,7 +97,7 @@ pluginInstanceShutdown(InstanceData* instanceData)
if (instanceData->hasWidget) {
Window window = reinterpret_cast<XID>(instanceData->window.window);
if (window != None) {
if (window != X11None) {
// This window XID should still be valid.
// See bug 429604 and bug 454756.
XWindowAttributes attributes;

View File

@ -11,6 +11,7 @@
#ifdef MOZ_X11
#include <X11/extensions/Xrender.h>
#include <X11/Xlib.h>
#include "X11UndefineNone.h"
#endif
struct _cairo;
@ -87,7 +88,7 @@ public:
BorrowedXlibDrawable()
: mDT(nullptr),
mDisplay(nullptr),
mDrawable(None),
mDrawable(X11None),
mScreen(nullptr),
mVisual(nullptr),
mXRenderFormat(nullptr)
@ -96,7 +97,7 @@ public:
explicit BorrowedXlibDrawable(DrawTarget *aDT)
: mDT(nullptr),
mDisplay(nullptr),
mDrawable(None),
mDrawable(X11None),
mScreen(nullptr),
mVisual(nullptr),
mXRenderFormat(nullptr)

View File

@ -2163,7 +2163,7 @@ DrawTargetCairo::Draw3DTransformedSurface(SourceSurface* aSurface, const Matrix4
0, nullptr);
XRenderComposite(display, PictOpSrc,
srcPict, None, dstPict,
srcPict, X11None, dstPict,
0, 0, 0, 0, 0, 0,
xformBounds.width, xformBounds.height);
@ -2313,7 +2313,7 @@ BorrowedXlibDrawable::Init(DrawTarget* aDT)
MOZ_ASSERT(aDT, "Caller should check for nullptr");
MOZ_ASSERT(!mDT, "Can't initialize twice!");
mDT = aDT;
mDrawable = None;
mDrawable = X11None;
#ifdef CAIRO_HAS_XLIB_SURFACE
if (aDT->GetBackendType() != BackendType::CAIRO ||
@ -2356,7 +2356,7 @@ BorrowedXlibDrawable::Finish()
cairo_surface_t* surf = cairo_get_group_target(cairoDT->mContext);
cairo_surface_mark_dirty(surf);
if (mDrawable) {
mDrawable = None;
mDrawable = X11None;
}
}
#endif

View File

@ -11,6 +11,7 @@
#include <X11/Xlib.h>
#include <X11/Xutil.h>
#include "X11UndefineNone.h"
#include "mozilla/MathAlgorithms.h"
#include "mozilla/StaticPtr.h"
@ -308,13 +309,13 @@ GLXPixmap
GLXLibrary::CreatePixmap(gfxASurface* aSurface)
{
if (!SupportsTextureFromPixmap(aSurface)) {
return None;
return X11None;
}
gfxXlibSurface* xs = static_cast<gfxXlibSurface*>(aSurface);
const XRenderPictFormat* format = xs->XRenderFormat();
if (!format || format->type != PictTypeDirect) {
return None;
return X11None;
}
const XRenderDirectFormat& direct = format->direct;
int alphaSize = FloorLog2(direct.alphaMask + 1);
@ -327,7 +328,7 @@ GLXLibrary::CreatePixmap(gfxASurface* aSurface)
(alphaSize ? LOCAL_GLX_BIND_TO_TEXTURE_RGBA_EXT
: LOCAL_GLX_BIND_TO_TEXTURE_RGB_EXT), True,
LOCAL_GLX_RENDER_TYPE, LOCAL_GLX_RGBA_BIT,
None };
X11None };
int numConfigs = 0;
Display* display = xs->XDisplay();
@ -351,7 +352,7 @@ GLXLibrary::CreatePixmap(gfxASurface* aSurface)
~(redMask | greenMask | blueMask) != -1UL << format->depth;
for (int i = 0; i < numConfigs; i++) {
int id = None;
int id = X11None;
sGLXLibrary.xGetFBConfigAttrib(display, cfgs[i], LOCAL_GLX_VISUAL_ID, &id);
Visual* visual;
int depth;
@ -424,14 +425,14 @@ GLXLibrary::CreatePixmap(gfxASurface* aSurface)
// caller should deal with this situation.
NS_WARN_IF_FALSE(format->depth == 8,
"[GLX] Couldn't find a FBConfig matching Pixmap format");
return None;
return X11None;
}
int pixmapAttribs[] = { LOCAL_GLX_TEXTURE_TARGET_EXT, LOCAL_GLX_TEXTURE_2D_EXT,
LOCAL_GLX_TEXTURE_FORMAT_EXT,
(alphaSize ? LOCAL_GLX_TEXTURE_FORMAT_RGBA_EXT
: LOCAL_GLX_TEXTURE_FORMAT_RGB_EXT),
None};
X11None};
GLXPixmap glxpixmap = xCreatePixmap(display,
cfgs[matchIndex],
@ -900,7 +901,7 @@ GLContextGLX::~GLContextGLX()
#ifdef DEBUG
bool success =
#endif
mGLX->xMakeCurrent(mDisplay, None, nullptr);
mGLX->xMakeCurrent(mDisplay, X11None, nullptr);
MOZ_ASSERT(success,
"glXMakeCurrent failed to release GL context before we call "
"glXDestroyContext!");
@ -1242,7 +1243,7 @@ GLContextGLX::FindFBConfigForWindow(Display* display, int screen, Window window,
#endif
for (int i = 0; i < numConfigs; i++) {
int visid = None;
int visid = X11None;
sGLXLibrary.xGetFBConfigAttrib(display, cfgs[i], LOCAL_GLX_VISUAL_ID, &visid);
if (!visid) {
continue;

View File

@ -11,6 +11,7 @@
#include <X11/extensions/Xrender.h> // for XRenderPictFormat, etc
#include <X11/extensions/render.h> // for PictFormat
#include "cairo-xlib.h"
#include "X11UndefineNone.h"
#include <stdint.h> // for uint32_t
#include "GLDefs.h" // for GLenum
#include "gfxPlatform.h" // for gfxPlatform
@ -65,7 +66,7 @@ SurfaceDescriptorX11::SurfaceDescriptorX11(gfxXlibSurface* aSurf,
bool aForwardGLX)
: mId(aSurf->XDrawable())
, mSize(aSurf->GetSize())
, mGLXPixmap(None)
, mGLXPixmap(X11None)
{
const XRenderPictFormat *pictFormat = aSurf->XRenderFormat();
if (pictFormat) {
@ -86,7 +87,7 @@ SurfaceDescriptorX11::SurfaceDescriptorX11(Drawable aDrawable, XID aFormatID,
: mId(aDrawable)
, mFormat(aFormatID)
, mSize(aSize)
, mGLXPixmap(None)
, mGLXPixmap(X11None)
{ }
already_AddRefed<gfxXlibSurface>

27
gfx/src/X11UndefineNone.h Normal file
View File

@ -0,0 +1,27 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef MOZILLA_GFX_X11UNDEFINENONE_H_
#define MOZILLA_GFX_X11UNDEFINENONE_H_
// The header <X11/X.h> defines "None" as a macro that expands to "0L".
// This is terrible because many enumerations have an enumerator named "None".
// To work around this, we undefine the macro "None", and define a replacement
// macro named "X11None".
// Include this header after including X11 headers, where necessary.
#ifdef None
# undef None
# define X11None 0L
// <X11/X.h> also defines "RevertToNone" as a macro that expands to "(int)None".
// Since we are undefining "None", that stops working. To keep it working,
// we undefine "RevertToNone" and redefine it in terms of "X11None".
# ifdef RevertToNone
# undef RevertToNone
# define RevertToNone (int)X11None
# endif
#endif
#endif /* MOZILLA_GFX_X11UNDEFINENONE_H_ */

View File

@ -29,7 +29,7 @@ FindVisualAndDepth(Display* aDisplay, VisualID aVisualID,
}
}
NS_ASSERTION(aVisualID == None, "VisualID not on Screen.");
NS_ASSERTION(aVisualID == X11None, "VisualID not on Screen.");
*aVisual = nullptr;
*aDepth = 0;
return;

View File

@ -13,6 +13,7 @@
#if defined(MOZ_WIDGET_GTK)
# include <gdk/gdk.h>
# include <gdk/gdkx.h>
# include "X11UndefineNone.h"
#else
# error Unknown toolkit
#endif

View File

@ -39,6 +39,7 @@ EXPORTS += [
'nsTransform2D.h',
'PingPongRegion.h',
'RegionBuilder.h',
'X11UndefineNone.h'
]
EXPORTS.mozilla += [

View File

@ -25,7 +25,7 @@ using namespace mozilla::gfx;
gfxXlibSurface::gfxXlibSurface(Display *dpy, Drawable drawable, Visual *visual)
: mPixmapTaken(false), mDisplay(dpy), mDrawable(drawable)
#if defined(GL_PROVIDER_GLX)
, mGLXPixmap(None)
, mGLXPixmap(X11None)
#endif
{
const gfx::IntSize size = DoSizeQuery();
@ -36,7 +36,7 @@ gfxXlibSurface::gfxXlibSurface(Display *dpy, Drawable drawable, Visual *visual)
gfxXlibSurface::gfxXlibSurface(Display *dpy, Drawable drawable, Visual *visual, const gfx::IntSize& size)
: mPixmapTaken(false), mDisplay(dpy), mDrawable(drawable)
#if defined(GL_PROVIDER_GLX)
, mGLXPixmap(None)
, mGLXPixmap(X11None)
#endif
{
NS_ASSERTION(Factory::CheckSurfaceSize(size, XLIB_IMAGE_SIDE_SIZE_LIMIT),
@ -51,7 +51,7 @@ gfxXlibSurface::gfxXlibSurface(Screen *screen, Drawable drawable, XRenderPictFor
: mPixmapTaken(false), mDisplay(DisplayOfScreen(screen)),
mDrawable(drawable)
#if defined(GL_PROVIDER_GLX)
, mGLXPixmap(None)
, mGLXPixmap(X11None)
#endif
{
NS_ASSERTION(Factory::CheckSurfaceSize(size, XLIB_IMAGE_SIDE_SIZE_LIMIT),
@ -67,7 +67,7 @@ gfxXlibSurface::gfxXlibSurface(Screen *screen, Drawable drawable, XRenderPictFor
gfxXlibSurface::gfxXlibSurface(cairo_surface_t *csurf)
: mPixmapTaken(false)
#if defined(GL_PROVIDER_GLX)
, mGLXPixmap(None)
, mGLXPixmap(X11None)
#endif
{
NS_PRECONDITION(cairo_surface_status(csurf) == 0,
@ -97,9 +97,9 @@ CreatePixmap(Screen *screen, const gfx::IntSize& size, unsigned int depth,
Drawable relatedDrawable)
{
if (!Factory::CheckSurfaceSize(size, XLIB_IMAGE_SIDE_SIZE_LIMIT))
return None;
return X11None;
if (relatedDrawable == None) {
if (relatedDrawable == X11None) {
relatedDrawable = RootWindowOfScreen(screen);
}
Display *dpy = DisplayOfScreen(screen);
@ -274,7 +274,7 @@ gfxXlibSurface::Finish()
#if defined(GL_PROVIDER_GLX)
if (mPixmapTaken && mGLXPixmap) {
gl::sGLXLibrary.DestroyPixmap(mDisplay, mGLXPixmap);
mGLXPixmap = None;
mGLXPixmap = X11None;
}
#endif
gfxASurface::Finish();

View File

@ -10,6 +10,7 @@
#include <X11/extensions/Xrender.h>
#include <X11/Xlib.h>
#include "X11UndefineNone.h"
#if defined(GL_PROVIDER_GLX)
#include "GLXLibrary.h"
@ -46,13 +47,13 @@ public:
// |screen| (if specified).
static already_AddRefed<gfxXlibSurface>
Create(Screen *screen, Visual *visual, const mozilla::gfx::IntSize& size,
Drawable relatedDrawable = None);
Drawable relatedDrawable = X11None);
static cairo_surface_t *
CreateCairoSurface(Screen *screen, Visual *visual, const mozilla::gfx::IntSize& size,
Drawable relatedDrawable = None);
Drawable relatedDrawable = X11None);
static already_AddRefed<gfxXlibSurface>
Create(Screen* screen, XRenderPictFormat *format, const mozilla::gfx::IntSize& size,
Drawable relatedDrawable = None);
Drawable relatedDrawable = X11None);
virtual ~gfxXlibSurface();

View File

@ -482,7 +482,9 @@ void
ServoStyleSet::StyleNewSubtree(nsIContent* aContent)
{
MOZ_ASSERT(aContent->IsDirtyForServo());
Servo_RestyleSubtree(aContent, mRawSet.get());
if (aContent->IsElement() || aContent->IsNodeOfType(nsINode::eTEXT)) {
Servo_RestyleSubtree(aContent, mRawSet.get());
}
ClearDirtyBits(aContent);
}

View File

@ -763,7 +763,7 @@ class IceTestPeer : public sigslot::has_slots<> {
remote->GetCandidates(i);
for (size_t j=0; j<candidates.size(); ++j) {
std::cerr << name_ << " Candidate: " + candidates[j] << std::endl;
std::cerr << name_ << " Adding remote candidate: " + candidates[j] << std::endl;
}
res = aStream->ParseAttributes(candidates);
ASSERT_TRUE(NS_SUCCEEDED(res));

View File

@ -181,7 +181,8 @@ int nr_ice_candidate_pair_unfreeze(nr_ice_peer_ctx *pctx, nr_ice_cand_pair *pair
static void nr_ice_candidate_pair_stun_cb(NR_SOCKET s, int how, void *cb_arg)
{
int r,_status;
nr_ice_cand_pair *pair=cb_arg,*orig_pair;
nr_ice_cand_pair *pair=cb_arg;
nr_ice_cand_pair *actual_pair=0;
nr_ice_candidate *cand=0;
nr_stun_message *sres;
nr_transport_addr *request_src;
@ -256,14 +257,17 @@ static void nr_ice_candidate_pair_stun_cb(NR_SOCKET s, int how, void *cb_arg)
cand=TAILQ_FIRST(&pair->local->component->candidates);
while(cand){
if(!nr_transport_addr_cmp(&cand->addr,&pair->stun_client->results.ice_binding_response.mapped_addr,NR_TRANSPORT_ADDR_CMP_MODE_ALL))
if(!nr_transport_addr_cmp(&cand->addr,&pair->stun_client->results.ice_binding_response.mapped_addr,NR_TRANSPORT_ADDR_CMP_MODE_ALL)) {
r_log(LOG_ICE,LOG_DEBUG,"ICE-PEER(%s): found pre-existing local candidate of type %d for mapped address %s", pair->pctx->label,cand->type,cand->addr.as_string);
assert(cand->type != HOST);
break;
}
cand=TAILQ_NEXT(cand,entry_comp);
}
/* OK, nothing found, must be peer reflexive */
if(!cand) {
/* OK, nothing found, must be a new peer reflexive */
if (pair->pctx->ctx->flags & NR_ICE_CTX_FLAGS_RELAY_ONLY) {
/* Any STUN response with a reflexive address in it is unwanted
when we'll send on relay only. Bail since cand is used below. */
@ -277,27 +281,31 @@ static void nr_ice_candidate_pair_stun_cb(NR_SOCKET s, int how, void *cb_arg)
ABORT(r);
cand->state=NR_ICE_CAND_STATE_INITIALIZED;
TAILQ_INSERT_TAIL(&pair->local->component->candidates,cand,entry_comp);
} else {
/* Check if we have a pair for this candidate already. */
if(r=nr_ice_media_stream_find_pair(pair->remote->stream, cand, pair->remote, &actual_pair)) {
r_log(LOG_ICE,LOG_DEBUG,"ICE-PEER(%s): no pair exists for %s and %s", pair->pctx->label,cand->addr.as_string, pair->remote->addr.as_string);
}
}
/* Note: we stomp the existing pair! */
orig_pair=pair;
if(r=nr_ice_candidate_pair_create(pair->pctx,cand,pair->remote,
&pair))
ABORT(r);
if(!actual_pair) {
if(r=nr_ice_candidate_pair_create(pair->pctx,cand,pair->remote, &actual_pair))
ABORT(r);
nr_ice_candidate_pair_set_state(pair->pctx,pair,NR_ICE_PAIR_STATE_SUCCEEDED);
if(r=nr_ice_component_insert_pair(actual_pair->remote->component,actual_pair))
ABORT(r);
if(r=nr_ice_component_insert_pair(pair->remote->component,pair))
ABORT(r);
/* If the original pair was nominated, make us nominated too. */
if(pair->peer_nominated)
actual_pair->peer_nominated=1;
/* If the original pair was nominated, make us nominated,
since we replace him*/
if(orig_pair->peer_nominated)
pair->peer_nominated=1;
/* Now mark the orig pair failed */
nr_ice_candidate_pair_set_state(pair->pctx,pair,NR_ICE_PAIR_STATE_FAILED);
}
/* Now mark the orig pair failed */
nr_ice_candidate_pair_set_state(orig_pair->pctx,orig_pair,NR_ICE_PAIR_STATE_FAILED);
assert(actual_pair);
nr_ice_candidate_pair_set_state(actual_pair->pctx,actual_pair,NR_ICE_PAIR_STATE_SUCCEEDED);
pair=actual_pair;
}

View File

@ -913,3 +913,21 @@ void nr_ice_media_stream_role_change(nr_ice_media_stream *stream)
}
}
int nr_ice_media_stream_find_pair(nr_ice_media_stream *str, nr_ice_candidate *lcand, nr_ice_candidate *rcand, nr_ice_cand_pair **pair)
{
nr_ice_cand_pair_head *head = &str->check_list;
nr_ice_cand_pair *c1;
c1=TAILQ_FIRST(head);
while(c1){
if(c1->local == lcand &&
c1->remote == rcand) {
*pair=c1;
return(0);
}
c1=TAILQ_NEXT(c1,check_queue_entry);
}
return(R_NOT_FOUND);
}

View File

@ -92,6 +92,7 @@ int nr_ice_media_stream_get_best_candidate(nr_ice_media_stream *str, int compone
int nr_ice_media_stream_send(nr_ice_peer_ctx *pctx, nr_ice_media_stream *str, int component, UCHAR *data, int len);
int nr_ice_media_stream_get_active(nr_ice_peer_ctx *pctx, nr_ice_media_stream *str, int component, nr_ice_candidate **local, nr_ice_candidate **remote);
int nr_ice_media_stream_find_component(nr_ice_media_stream *str, int comp_id, nr_ice_component **compp);
int nr_ice_media_stream_find_pair(nr_ice_media_stream *str, nr_ice_candidate *local, nr_ice_candidate *remote, nr_ice_cand_pair **pair);
int nr_ice_media_stream_addrs(nr_ice_peer_ctx *pctx, nr_ice_media_stream *str, int component, nr_transport_addr *local, nr_transport_addr *remote);
int
nr_ice_peer_ctx_parse_media_stream_attribute(nr_ice_peer_ctx *pctx, nr_ice_media_stream *stream, char *attr);

View File

@ -12,7 +12,6 @@ import org.mozilla.gecko.util.GeckoEventListener;
import org.mozilla.gecko.util.JSONUtils;
import org.mozilla.gecko.util.NativeEventListener;
import org.mozilla.gecko.util.NativeJSObject;
import org.mozilla.gecko.util.WebActivityMapper;
import org.mozilla.gecko.widget.ExternalIntentDuringPrivateBrowsingPromptFragment;
import org.json.JSONArray;
@ -48,7 +47,6 @@ public final class IntentHelper implements GeckoEventListener,
"Intent:GetHandlers",
"Intent:Open",
"Intent:OpenForResult",
"WebActivity:Open"
};
private static final String[] NATIVE_EVENTS = {
@ -416,8 +414,6 @@ public final class IntentHelper implements GeckoEventListener,
open(message);
} else if (event.equals("Intent:OpenForResult")) {
openForResult(message);
} else if (event.equals("WebActivity:Open")) {
openWebActivity(message);
}
} catch (JSONException e) {
Log.e(LOGTAG, "Exception handling message \"" + event + "\":", e);
@ -572,11 +568,6 @@ public final class IntentHelper implements GeckoEventListener,
return UNKNOWN_PROTOCOL_URI_PREFIX + encodedUri;
}
private void openWebActivity(JSONObject message) throws JSONException {
final Intent intent = WebActivityMapper.getIntentForWebActivity(message.getJSONObject("activity"));
ActivityHandlerHelper.startIntentForActivity(activity, intent, new ResultHandler(message));
}
private static class ResultHandler implements ActivityResultHandler {
private final JSONObject message;

View File

@ -139,7 +139,6 @@ gujar.sources += [geckoview_source_dir + 'java/org/mozilla/gecko/' + x for x in
'util/UIAsyncTask.java',
'util/UUIDUtil.java',
'util/WeakReferenceHandler.java',
'util/WebActivityMapper.java',
'util/WindowUtils.java',
]]
gujar.extra_jars = [

View File

@ -1,31 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict"
const Cc = Components.classes;
const Ci = Components.interfaces;
const Cu = Components.utils;
Cu.import("resource://gre/modules/XPCOMUtils.jsm");
Cu.import("resource://gre/modules/Messaging.jsm");
function ActivitiesGlue() { }
ActivitiesGlue.prototype = {
QueryInterface: XPCOMUtils.generateQI([Ci.nsIActivityUIGlue]),
classID: Components.ID("{e4deb5f6-d5e3-4fce-bc53-901dd9951c48}"),
// Ignore aActivities results on Android, go straight to Android intents.
chooseActivity: function ap_chooseActivity(aOptions, aActivities, aCallback) {
Messaging.sendRequestForResult({
type: "WebActivity:Open",
activity: { name: aOptions.name, data: aOptions.data }
}).then((result) => {
aCallback.handleEvent(Ci.nsIActivityUIGlueCallback.NATIVE_ACTIVITY, result);
});
}
};
this.NSGetFactory = XPCOMUtils.generateNSGetFactory([ActivitiesGlue]);

View File

@ -115,10 +115,6 @@ category update-timer Snippets @mozilla.org/snippets;1,getService,snippets-updat
component {430b987f-bb9f-46a3-99a5-241749220b29} ColorPicker.js
contract @mozilla.org/colorpicker;1 {430b987f-bb9f-46a3-99a5-241749220b29}
# AndroidActivitiesGlue.js
component {e4deb5f6-d5e3-4fce-bc53-901dd9951c48} AndroidActivitiesGlue.js
contract @mozilla.org/dom/activities/ui-glue;1 {e4deb5f6-d5e3-4fce-bc53-901dd9951c48}
# PersistentNotificationHandler.js
component {75390fe7-f8a3-423a-b3b1-258d7eabed40} PersistentNotificationHandler.js
contract @mozilla.org/persistent-notification-handler;1 {75390fe7-f8a3-423a-b3b1-258d7eabed40}

View File

@ -18,7 +18,7 @@ let dataURI = "iVBORw0KGgoAAAANSUhEUgAAACQAAAAkCAYAAADhAJiYAAAC4klEQVRYhdWXLWzbQ
let image = atob(dataURI);
const IMAGE_ARRAYBUFFER = Uint8Array.from(image, byte => byte.charCodeAt(0)).buffer;
function backgroundScript() {
function background() {
browser.test.assertTrue("pageAction" in browser, "Namespace 'pageAction' exists in browser");
browser.test.assertTrue("show" in browser.pageAction, "API method 'show' exists in browser.pageAction");
@ -46,7 +46,7 @@ function backgroundScript() {
add_task(function* test_contentscript() {
let extension = ExtensionTestUtils.loadExtension({
background: "(" + backgroundScript.toString() + ")()",
background,
manifest: {
"name": "PageAction Extension",
"page_action": {

View File

@ -21,7 +21,7 @@ let image = atob(dataURI);
const IMAGE_ARRAYBUFFER = Uint8Array.from(image, byte => byte.charCodeAt(0)).buffer;
add_task(function* test_contentscript() {
function backgroundScript() {
function background() {
// TODO: Use the Tabs API to obtain the tab ids for showing pageActions.
let tabId = 1;
let onClickedListenerEnabled = false;
@ -71,7 +71,7 @@ add_task(function* test_contentscript() {
}
let extension = ExtensionTestUtils.loadExtension({
background: `(${backgroundScript}())`,
background,
manifest: {
"name": "PageAction Extension",
"page_action": {
@ -83,11 +83,11 @@ add_task(function* test_contentscript() {
},
},
files: {
"default.html": `<html><head><meta charset="utf-8"><script src="popup.js"></${"script"}></head></html>`,
"default.html": `<html><head><meta charset="utf-8"><script src="popup.js"><\/script></head></html>`,
"extension.png": IMAGE_ARRAYBUFFER,
"a.html": `<html><head><meta charset="utf-8"><script src="popup.js"></${"script"}></head></html>`,
"b.html": `<html><head><meta charset="utf-8"><script src="popup.js"></${"script"}></head></html>`,
"popup.js": `(${popupScript})()`,
"a.html": `<html><head><meta charset="utf-8"><script src="popup.js"><\/script></head></html>`,
"b.html": `<html><head><meta charset="utf-8"><script src="popup.js"><\/script></head></html>`,
"popup.js": popupScript,
},
});

View File

@ -13,7 +13,6 @@ XPIDL_MODULE = 'MobileComponents'
EXTRA_COMPONENTS += [
'AboutRedirector.js',
'AddonUpdateService.js',
'AndroidActivitiesGlue.js',
'BlocklistPrompt.js',
'BrowserCLH.js',
'ColorPicker.js',

View File

@ -1,221 +0,0 @@
/* -*- Mode: Java; c-basic-offset: 4; tab-width: 4; indent-tabs-mode: nil; -*-
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.mozilla.gecko.util;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.content.Intent;
import android.net.Uri;
import android.os.Environment;
import android.provider.MediaStore;
import android.text.TextUtils;
import android.util.Log;
import java.io.File;
import java.util.HashMap;
import java.util.Map;
public final class WebActivityMapper {
private static final String LOGTAG = "Gecko";
private static final Map<String, WebActivityMapping> activityMap = new HashMap<String, WebActivityMapping>();
static {
activityMap.put("dial", new DialMapping());
activityMap.put("open", new OpenMapping());
activityMap.put("pick", new PickMapping());
activityMap.put("send", new SendMapping());
activityMap.put("view", new ViewMapping());
activityMap.put("record", new RecordMapping());
};
private static abstract class WebActivityMapping {
protected JSONObject mData;
public void setData(JSONObject data) {
mData = data;
}
// Cannot return null
public abstract String getAction();
public String getMime() throws JSONException {
return null;
}
public String getUri() throws JSONException {
return null;
}
public void putExtras(Intent intent) throws JSONException {}
}
/**
* Provides useful defaults for mime type and uri.
*/
private static abstract class BaseMapping extends WebActivityMapping {
/**
* If 'type' is present in data object, uses the value as the MIME type.
*/
@Override
public String getMime() throws JSONException {
return mData.optString("type", null);
}
/**
* If 'uri' or 'url' is present in data object, uses the respective value as the Uri.
*/
@Override
public String getUri() throws JSONException {
// Will return uri or url if present.
String uri = mData.optString("uri", null);
return uri != null ? uri : mData.optString("url", null);
}
}
public static Intent getIntentForWebActivity(JSONObject message) throws JSONException {
final String name = message.getString("name").toLowerCase();
final JSONObject data = message.getJSONObject("data");
Log.w(LOGTAG, "Activity is: " + name);
final WebActivityMapping mapping = activityMap.get(name);
if (mapping == null) {
Log.w(LOGTAG, "No mapping found!");
return null;
}
mapping.setData(data);
final Intent intent = new Intent(mapping.getAction());
final String mime = mapping.getMime();
if (!TextUtils.isEmpty(mime)) {
intent.setType(mime);
}
final String uri = mapping.getUri();
if (!TextUtils.isEmpty(uri)) {
intent.setData(Uri.parse(uri));
}
mapping.putExtras(intent);
return intent;
}
private static class DialMapping extends WebActivityMapping {
@Override
public String getAction() {
return Intent.ACTION_DIAL;
}
@Override
public String getUri() throws JSONException {
return "tel:" + mData.getString("number");
}
}
private static class OpenMapping extends BaseMapping {
@Override
public String getAction() {
return Intent.ACTION_VIEW;
}
}
private static class PickMapping extends BaseMapping {
@Override
public String getAction() {
return Intent.ACTION_GET_CONTENT;
}
@Override
public String getMime() throws JSONException {
// bug 1007112 - pick action needs a mimetype to work
String mime = mData.optString("type", null);
return !TextUtils.isEmpty(mime) ? mime : "*/*";
}
}
private static class SendMapping extends BaseMapping {
@Override
public String getAction() {
return Intent.ACTION_SEND;
}
@Override
public void putExtras(Intent intent) throws JSONException {
optPutExtra("text", Intent.EXTRA_TEXT, intent);
optPutExtra("html_text", Intent.EXTRA_HTML_TEXT, intent);
optPutExtra("stream", Intent.EXTRA_STREAM, intent);
}
private void optPutExtra(String key, String extraName, Intent intent) {
final String extraValue = mData.optString(key);
if (!TextUtils.isEmpty(extraValue)) {
intent.putExtra(extraName, extraValue);
}
}
}
private static class ViewMapping extends BaseMapping {
@Override
public String getAction() {
return Intent.ACTION_VIEW;
}
@Override
public String getMime() {
// MozActivity adds a type 'url' here, we don't want to set the MIME to 'url'.
String type = mData.optString("type", null);
if ("url".equals(type) || "uri".equals(type)) {
return null;
} else {
return type;
}
}
}
private static class RecordMapping extends WebActivityMapping {
@Override
public String getAction() {
String type = mData.optString("type", null);
if ("photos".equals(type)) {
return "android.media.action.IMAGE_CAPTURE";
} else if ("videos".equals(type)) {
return "android.media.action.VIDEO_CAPTURE";
}
return null;
}
// Add an extra to specify where to save the picture/video.
@Override
public void putExtras(Intent intent) {
final String action = getAction();
final String dirType = action == "android.media.action.IMAGE_CAPTURE"
? Environment.DIRECTORY_PICTURES
: Environment.DIRECTORY_MOVIES;
final String ext = action == "android.media.action.IMAGE_CAPTURE"
? ".jpg"
: ".mp4";
File destDir = Environment.getExternalStoragePublicDirectory(dirType);
try {
File dest = File.createTempFile(
"capture", /* prefix */
ext, /* suffix */
destDir /* directory */
);
intent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(dest));
} catch (Exception e) {
Log.w(LOGTAG, "Failed to add extra for " + action + " : " + e);
}
}
}
}

View File

@ -114,7 +114,6 @@
@BINPATH@/components/directory.xpt
@BINPATH@/components/docshell.xpt
@BINPATH@/components/dom.xpt
@BINPATH@/components/dom_activities.xpt
@BINPATH@/components/dom_apps.xpt
@BINPATH@/components/dom_newapps.xpt
@BINPATH@/components/dom_base.xpt
@ -124,7 +123,6 @@
@BINPATH@/components/dom_events.xpt
@BINPATH@/components/dom_geolocation.xpt
@BINPATH@/components/dom_media.xpt
@BINPATH@/components/dom_messages.xpt
@BINPATH@/components/dom_network.xpt
@BINPATH@/components/dom_notification.xpt
@BINPATH@/components/dom_html.xpt
@ -372,13 +370,6 @@
@BINPATH@/components/htmlMenuBuilder.js
@BINPATH@/components/htmlMenuBuilder.manifest
@BINPATH@/components/Activities.manifest
@BINPATH@/components/AndroidActivitiesGlue.js
@BINPATH@/components/ActivityProxy.js
@BINPATH@/components/ActivityRequestHandler.js
@BINPATH@/components/ActivityWrapper.js
@BINPATH@/components/ActivityMessageConfigurator.js
@BINPATH@/components/SystemMessageInternal.js
@BINPATH@/components/SystemMessageManager.js
@BINPATH@/components/SystemMessageCache.js

View File

@ -5,12 +5,14 @@
package org.mozilla.gecko.background.fxa;
import org.mozilla.gecko.background.fxa.FxAccountClient20.AccountStatusResponse;
import org.mozilla.gecko.background.fxa.FxAccountClient20.RequestDelegate;
import org.mozilla.gecko.background.fxa.FxAccountClient20.RecoveryEmailStatusResponse;
import org.mozilla.gecko.background.fxa.FxAccountClient20.RequestDelegate;
import org.mozilla.gecko.background.fxa.FxAccountClient20.TwoKeys;
import org.mozilla.gecko.fxa.FxAccountDevice;
import org.mozilla.gecko.sync.ExtendedJSONObject;
import java.util.List;
public interface FxAccountClient {
public void accountStatus(String uid, RequestDelegate<AccountStatusResponse> requestDelegate);
public void recoveryEmailStatus(byte[] sessionToken, RequestDelegate<RecoveryEmailStatusResponse> requestDelegate);
@ -18,4 +20,5 @@ public interface FxAccountClient {
public void sign(byte[] sessionToken, ExtendedJSONObject publicKey, long certificateDurationInMilliseconds, RequestDelegate<String> requestDelegate);
public void registerOrUpdateDevice(byte[] sessionToken, FxAccountDevice device, RequestDelegate<FxAccountDevice> requestDelegate);
public void deviceList(byte[] sessionToken, RequestDelegate<FxAccountDevice[]> requestDelegate);
public void notifyDevices(byte[] sessionToken, List<String> deviceIds, ExtendedJSONObject payload, Long TTL, RequestDelegate<ExtendedJSONObject> requestDelegate);
}

View File

@ -4,6 +4,8 @@
package org.mozilla.gecko.background.fxa;
import android.support.annotation.NonNull;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.mozilla.gecko.background.common.log.Logger;
@ -33,6 +35,7 @@ import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
@ -832,7 +835,6 @@ public class FxAccountClient20 implements FxAccountClient {
}
final BaseResource resource;
final ExtendedJSONObject body;
try {
resource = getBaseResource("account/devices");
} catch (URISyntaxException | UnsupportedEncodingException e) {
@ -858,4 +860,55 @@ public class FxAccountClient20 implements FxAccountClient {
resource.get();
}
@Override
public void notifyDevices(@NonNull byte[] sessionToken, @NonNull List<String> deviceIds, ExtendedJSONObject payload, Long TTL, RequestDelegate<ExtendedJSONObject> delegate) {
final byte[] tokenId = new byte[32];
final byte[] reqHMACKey = new byte[32];
final byte[] requestKey = new byte[32];
try {
HKDF.deriveMany(sessionToken, new byte[0], FxAccountUtils.KW("sessionToken"), tokenId, reqHMACKey, requestKey);
} catch (Exception e) {
invokeHandleError(delegate, e);
return;
}
final BaseResource resource;
final ExtendedJSONObject body = createNotifyDevicesBody(deviceIds, payload, TTL);
try {
resource = getBaseResource("account/devices/notify");
} catch (URISyntaxException | UnsupportedEncodingException e) {
invokeHandleError(delegate, e);
return;
}
resource.delegate = new ResourceDelegate<ExtendedJSONObject>(resource, delegate, ResponseType.JSON_OBJECT, tokenId, reqHMACKey) {
@Override
public void handleSuccess(int status, HttpResponse response, ExtendedJSONObject body) {
try {
delegate.handleSuccess(body);
} catch (Exception e) {
delegate.handleError(e);
}
}
};
post(resource, body);
}
@NonNull
@SuppressWarnings("unchecked")
private ExtendedJSONObject createNotifyDevicesBody(@NonNull List<String> deviceIds, ExtendedJSONObject payload, Long TTL) {
final ExtendedJSONObject body = new ExtendedJSONObject();
final JSONArray to = new JSONArray();
to.addAll(deviceIds);
body.put("to", to);
if (payload != null) {
body.put("payload", payload);
}
if (TTL != null) {
body.put("TTL", TTL);
}
return body;
}
}

View File

@ -19,9 +19,8 @@ import org.mozilla.gecko.background.fxa.FxAccountClient20.RequestDelegate;
import org.mozilla.gecko.background.fxa.FxAccountClientException.FxAccountClientRemoteException;
import org.mozilla.gecko.background.fxa.FxAccountRemoteError;
import org.mozilla.gecko.fxa.authenticator.AndroidFxAccount;
import org.mozilla.gecko.fxa.authenticator.AndroidFxAccount.InvalidFxAState;
import org.mozilla.gecko.fxa.login.State;
import org.mozilla.gecko.fxa.login.State.StateLabel;
import org.mozilla.gecko.fxa.login.TokensAndKeysState;
import org.mozilla.gecko.sync.SharedPreferencesClientsDataDelegate;
import org.mozilla.gecko.util.BundleEventListener;
import org.mozilla.gecko.util.EventCallback;
@ -112,7 +111,11 @@ public class FxAccountDeviceRegistrator implements BundleEventListener {
String pushAuthKey = subscription.getString("pushAuthKey");
final AndroidFxAccount fxAccount = AndroidFxAccount.fromContext(context);
final byte[] sessionToken = getSessionToken(fxAccount);
if (fxAccount == null) {
Log.e(LOG_TAG, "AndroidFxAccount is null");
return;
}
final byte[] sessionToken = fxAccount.getSessionToken();
final FxAccountDevice device;
String deviceId = fxAccount.getDeviceId();
String clientName = getClientName(fxAccount, context);
@ -180,17 +183,6 @@ public class FxAccountDeviceRegistrator implements BundleEventListener {
}
}
@Nullable
private static byte[] getSessionToken(final AndroidFxAccount fxAccount) throws InvalidFxAState {
State state = fxAccount.getState();
StateLabel stateLabel = state.getStateLabel();
if (stateLabel == StateLabel.Cohabiting || stateLabel == StateLabel.Married) {
TokensAndKeysState tokensAndKeysState = (TokensAndKeysState) state;
return tokensAndKeysState.getSessionToken();
}
throw new InvalidFxAState("Cannot get sessionToken: not in a TokensAndKeysState state");
}
private static void handleTokenError(final FxAccountClientRemoteException error,
final FxAccountClient fxAccountClient,
final AndroidFxAccount fxAccount) {
@ -287,12 +279,4 @@ public class FxAccountDeviceRegistrator implements BundleEventListener {
BundleEventListener.class, String[].class);
registerBackgroundThreadListener.invoke(instance, this, new String[] { "FxAccountsPush:Subscribe:Response" });
}
public static class InvalidFxAState extends Exception {
private static final long serialVersionUID = -8537626959811195978L;
public InvalidFxAState(String message) {
super(message);
}
}
}

View File

@ -7,6 +7,7 @@ import android.os.Bundle;
import android.text.TextUtils;
import android.util.Log;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.mozilla.gecko.fxa.authenticator.AndroidFxAccount;
@ -15,6 +16,9 @@ public class FxAccountPushHandler {
private static final String LOG_TAG = "FxAccountPush";
private static final String COMMAND_DEVICE_DISCONNECTED = "fxaccounts:device_disconnected";
private static final String COMMAND_COLLECTION_CHANGED = "sync:collection_changed";
private static final String CLIENTS_COLLECTION = "clients";
// Forbid instantiation
private FxAccountPushHandler() {}
@ -45,6 +49,9 @@ public class FxAccountPushHandler {
case COMMAND_DEVICE_DISCONNECTED:
handleDeviceDisconnection(context, data);
break;
case COMMAND_COLLECTION_CHANGED:
handleCollectionChanged(context, data);
break;
default:
Log.d(LOG_TAG, "No handler defined for FxA Push command " + command);
break;
@ -54,6 +61,23 @@ public class FxAccountPushHandler {
}
}
private static void handleCollectionChanged(Context context, JSONObject data) throws JSONException {
JSONArray collections = data.getJSONArray("collections");
int len = collections.length();
for (int i = 0; i < len; i++) {
if (collections.getString(i).equals(CLIENTS_COLLECTION)) {
final Account account = FirefoxAccounts.getFirefoxAccount(context);
if (account == null) {
Log.e(LOG_TAG, "The account does not exist anymore");
return;
}
final AndroidFxAccount fxAccount = new AndroidFxAccount(context, account);
fxAccount.requestImmediateSync(new String[] { CLIENTS_COLLECTION }, null);
return;
}
}
}
private static void handleDeviceDisconnection(Context context, JSONObject data) throws JSONException {
final Account account = FirefoxAccounts.getFirefoxAccount(context);
if (account == null) {

View File

@ -30,6 +30,7 @@ import org.mozilla.gecko.fxa.FxAccountConstants;
import org.mozilla.gecko.fxa.login.State;
import org.mozilla.gecko.fxa.login.State.StateLabel;
import org.mozilla.gecko.fxa.login.StateFactory;
import org.mozilla.gecko.fxa.login.TokensAndKeysState;
import org.mozilla.gecko.fxa.sync.FxAccountProfileService;
import org.mozilla.gecko.sync.ExtendedJSONObject;
import org.mozilla.gecko.sync.Utils;
@ -607,6 +608,24 @@ public class AndroidFxAccount {
}
}
public byte[] getSessionToken() throws InvalidFxAState {
State state = getState();
StateLabel stateLabel = state.getStateLabel();
if (stateLabel == StateLabel.Cohabiting || stateLabel == StateLabel.Married) {
TokensAndKeysState tokensAndKeysState = (TokensAndKeysState) state;
return tokensAndKeysState.getSessionToken();
}
throw new InvalidFxAState("Cannot get sessionToken: not in a TokensAndKeysState state");
}
public static class InvalidFxAState extends Exception {
private static final long serialVersionUID = -8537626959811195978L;
public InvalidFxAState(String message) {
super(message);
}
}
/**
* <b>For debugging only!</b>
*/

View File

@ -6,19 +6,27 @@ package org.mozilla.gecko.sync.stage;
import android.accounts.Account;
import android.content.Context;
import android.support.annotation.NonNull;
import android.text.TextUtils;
import android.util.Log;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.mozilla.gecko.AppConstants;
import org.mozilla.gecko.background.common.log.Logger;
import org.mozilla.gecko.background.fxa.FxAccountClient;
import org.mozilla.gecko.background.fxa.FxAccountClient20;
import org.mozilla.gecko.background.fxa.FxAccountClientException;
import org.mozilla.gecko.fxa.FirefoxAccounts;
import org.mozilla.gecko.fxa.authenticator.AndroidFxAccount;
import org.mozilla.gecko.sync.CommandProcessor;
@ -54,6 +62,7 @@ public class SyncClientsEngineStage extends AbstractSessionManagingSyncStage {
public static final String STAGE_NAME = COLLECTION_NAME;
public static final int CLIENTS_TTL_REFRESH = 604800000; // 7 days in milliseconds.
public static final int MAX_UPLOAD_FAILURE_COUNT = 5;
public static final long NOTIFY_TAB_SENT_TTL_SECS = TimeUnit.SECONDS.convert(1L, TimeUnit.HOURS); // 1 hour
protected final ClientRecordFactory factory = new ClientRecordFactory();
protected ClientUploadDelegate clientUploadDelegate;
@ -65,7 +74,7 @@ public class SyncClientsEngineStage extends AbstractSessionManagingSyncStage {
protected volatile boolean shouldWipe;
protected volatile boolean shouldUploadLocalRecord; // Set if, e.g., we received commands or need to refresh our version.
protected final AtomicInteger uploadAttemptsCount = new AtomicInteger();
protected final List<ClientRecord> toUpload = new ArrayList<ClientRecord>();
protected final List<ClientRecord> modifiedClientsToUpload = new ArrayList<ClientRecord>();
protected int getClientsCount() {
return getClientsDatabaseAccessor().clientsCount();
@ -151,13 +160,80 @@ public class SyncClientsEngineStage extends AbstractSessionManagingSyncStage {
// If we upload remote records, checkAndUpload() will be called upon
// upload success in the delegate. Otherwise call checkAndUpload() now.
if (toUpload.size() > 0) {
if (modifiedClientsToUpload.size() > 0) {
// modifiedClientsToUpload is cleared in uploadRemoteRecords, save what we need here
final List<String> devicesToNotify = new ArrayList<>();
for (ClientRecord record : modifiedClientsToUpload) {
if (!TextUtils.isEmpty(record.fxaDeviceId)) {
devicesToNotify.add(record.fxaDeviceId);
}
}
// This method is synchronous, there's no risk of notifying the clients
// before we actually uploaded the records
uploadRemoteRecords();
// Notify the clients who got their record written
notifyClients(devicesToNotify);
return;
}
checkAndUpload();
}
private void notifyClients(final List<String> devicesToNotify) {
final ExecutorService executor = Executors.newSingleThreadExecutor();
final Context context = session.getContext();
final Account account = FirefoxAccounts.getFirefoxAccount(context);
if (account == null) {
Log.e(LOG_TAG, "Can't notify other clients: no account");
return;
}
final AndroidFxAccount fxAccount = new AndroidFxAccount(context, account);
final ExtendedJSONObject payload = createNotifyDevicesPayload();
final byte[] sessionToken;
try {
sessionToken = fxAccount.getSessionToken();
} catch (AndroidFxAccount.InvalidFxAState invalidFxAState) {
Log.e(LOG_TAG, "Could not get session token", invalidFxAState);
return;
}
// API doc : https://github.com/mozilla/fxa-auth-server/blob/master/docs/api.md#post-v1accountdevicesnotify
final FxAccountClient fxAccountClient = new FxAccountClient20(fxAccount.getAccountServerURI(), executor);
fxAccountClient.notifyDevices(sessionToken, devicesToNotify, payload, NOTIFY_TAB_SENT_TTL_SECS, new FxAccountClient20.RequestDelegate<ExtendedJSONObject>() {
@Override
public void handleError(Exception e) {
Log.e(LOG_TAG, "Error while notifying devices", e);
}
@Override
public void handleFailure(FxAccountClientException.FxAccountClientRemoteException e) {
Log.e(LOG_TAG, "Error while notifying devices", e);
}
@Override
public void handleSuccess(ExtendedJSONObject result) {
Log.i(LOG_TAG, devicesToNotify.size() + " devices notified");
}
});
}
@NonNull
@SuppressWarnings("unchecked")
private ExtendedJSONObject createNotifyDevicesPayload() {
final ExtendedJSONObject payload = new ExtendedJSONObject();
payload.put("version", 1);
payload.put("command", "sync:collection_changed");
final ExtendedJSONObject data = new ExtendedJSONObject();
final JSONArray collections = new JSONArray();
collections.add("clients");
data.put("collections", collections);
payload.put("data", data);
return payload;
}
@Override
public void handleRequestFailure(SyncStorageResponse response) {
BaseResource.consumeEntity(response); // We don't need the response at all, and any exception handling shouldn't need the response body.
@ -290,7 +366,7 @@ public class SyncClientsEngineStage extends AbstractSessionManagingSyncStage {
Logger.debug(LOG_TAG, "Client upload failed. Aborting sync.");
if (!currentlyUploadingLocalRecord) {
toUpload.clear(); // These will be redownloaded.
modifiedClientsToUpload.clear(); // These will be redownloaded.
}
BaseResource.consumeEntity(response); // The exception thrown should need the response body.
session.abort(new HTTPFailureException(response), "Client upload failed.");
@ -474,19 +550,19 @@ public class SyncClientsEngineStage extends AbstractSessionManagingSyncStage {
}
record.commands.add(jsonCommand);
}
toUpload.add(record);
modifiedClientsToUpload.add(record);
}
@SuppressWarnings("unchecked")
protected void uploadRemoteRecords() {
Logger.trace(LOG_TAG, "In uploadRemoteRecords. Uploading " + toUpload.size() + " records" );
Logger.trace(LOG_TAG, "In uploadRemoteRecords. Uploading " + modifiedClientsToUpload.size() + " records" );
for (ClientRecord r : toUpload) {
for (ClientRecord r : modifiedClientsToUpload) {
Logger.trace(LOG_TAG, ">> Uploading record " + r.guid + ": " + r.name);
}
if (toUpload.size() == 1) {
ClientRecord record = toUpload.get(0);
if (modifiedClientsToUpload.size() == 1) {
ClientRecord record = modifiedClientsToUpload.get(0);
Logger.debug(LOG_TAG, "Only 1 remote record to upload.");
Logger.debug(LOG_TAG, "Record last modified: " + record.lastModified);
CryptoRecord cryptoRecord = encryptClientRecord(record);
@ -498,7 +574,7 @@ public class SyncClientsEngineStage extends AbstractSessionManagingSyncStage {
}
JSONArray cryptoRecords = new JSONArray();
for (ClientRecord record : toUpload) {
for (ClientRecord record : modifiedClientsToUpload) {
Logger.trace(LOG_TAG, "Record " + record.guid + " is being uploaded" );
CryptoRecord cryptoRecord = encryptClientRecord(record);
@ -547,7 +623,7 @@ public class SyncClientsEngineStage extends AbstractSessionManagingSyncStage {
public void clearRecordsToUpload() {
try {
getClientsDatabaseAccessor().wipeCommandsTable();
toUpload.clear();
modifiedClientsToUpload.clear();
} finally {
closeDataAccessor();
}

View File

@ -765,9 +765,9 @@ public class TestClientsEngineStage extends MockSyncClientsEngineStage {
final String expectedGUID = remoteRecord.guid;
this.addCommands(remoteRecord);
assertEquals(1, toUpload.size());
assertEquals(1, modifiedClientsToUpload.size());
final ClientRecord recordToUpload = toUpload.get(0);
final ClientRecord recordToUpload = modifiedClientsToUpload.get(0);
assertEquals(4, recordToUpload.commands.size());
assertEquals(expectedGUID, recordToUpload.guid);
assertEquals(null, recordToUpload.version);
@ -782,9 +782,9 @@ public class TestClientsEngineStage extends MockSyncClientsEngineStage {
final String expectedGUID = remoteRecord.guid;
this.addCommands(remoteRecord);
assertEquals(1, toUpload.size());
assertEquals(1, modifiedClientsToUpload.size());
final ClientRecord recordToUpload = toUpload.get(0);
final ClientRecord recordToUpload = modifiedClientsToUpload.get(0);
assertEquals(4, recordToUpload.commands.size());
assertEquals(expectedGUID, recordToUpload.guid);
assertEquals("12a1", recordToUpload.version);

View File

@ -6,6 +6,7 @@ package org.mozilla.gecko.fxa.login;
import android.text.TextUtils;
import org.mozilla.gecko.background.fxa.FxAccountClient;
import org.mozilla.gecko.background.fxa.FxAccountClient20;
import org.mozilla.gecko.background.fxa.FxAccountClient20.AccountStatusResponse;
import org.mozilla.gecko.background.fxa.FxAccountClient20.RequestDelegate;
import org.mozilla.gecko.background.fxa.FxAccountClient20.RecoveryEmailStatusResponse;
@ -23,6 +24,7 @@ import org.mozilla.gecko.sync.Utils;
import java.io.UnsupportedEncodingException;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
@ -216,4 +218,9 @@ public class MockFxAccountClient implements FxAccountClient {
FxAccountDevice[] devicesArray = devices.toArray(new FxAccountDevice[devices.size()]);
requestDelegate.handleSuccess(devicesArray);
}
@Override
public void notifyDevices(byte[] sessionToken, List<String> deviceIds, ExtendedJSONObject payload, Long TTL, RequestDelegate<ExtendedJSONObject> requestDelegate) {
requestDelegate.handleSuccess(new ExtendedJSONObject());
}
}

View File

@ -2494,7 +2494,7 @@ dnl ========================================================
if test -z "$gonkdir" ; then
case "$MOZ_BUILD_APP" in
mobile/android)
MOZ_ANDROID_SDK(23, 23.0.3)
MOZ_ANDROID_SDK(23, "23.0.3 23.0.1")
;;
esac
fi

View File

@ -417,7 +417,7 @@ def current_firefox_checkout(check_output, hg=None):
if hg and os.path.exists(hg_dir):
# Verify the hg repo is a Firefox repo by looking at rev 0.
try:
node = check_output([hg, 'log', '-r', '0', '-T', '{node}'], cwd=path)
node = check_output([hg, 'log', '-r', '0', '--template', '{node}'], cwd=path)
if node in HG_ROOT_REVISIONS:
return 'hg'
# Else the root revision is different. There could be nested

View File

@ -73,6 +73,7 @@ class CentOSFedoraBootstrapper(BaseBootstrapper):
]
self.mobile_android_packages += [
'java-1.8.0-openjdk-devel',
'ncurses-devel.i686',
'libstdc++.i686',
'zlib-devel.i686',
@ -130,6 +131,15 @@ class CentOSFedoraBootstrapper(BaseBootstrapper):
ndk_path=self.ndk_path, ndk_url=self.ndk_url,
artifact_mode=artifact_mode)
# Most recent version of build-tools appears to be 23.0.1 on Fedora
packages = [p for p in android.ANDROID_PACKAGES if not p.startswith('build-tools')]
packages.append('build-tools-23.0.1')
# 3. We expect the |android| tool to be at
# ~/.mozbuild/android-sdk-linux/tools/android.
android_tool = os.path.join(self.sdk_path, 'tools', 'android')
android.ensure_android_packages(android_tool=android_tool, packages=packages)
def suggest_mobile_android_mozconfig(self, artifact_mode=False):
import android
android.suggest_mozconfig(sdk_path=self.sdk_path,

View File

@ -49,7 +49,7 @@ class VCSFiles(object):
def by_rev(self, rev):
if self.is_hg:
return self._run(['hg', 'log', '-T', '{files % "\\n{file}"}', '-r', rev])
return self._run(['hg', 'log', '--template', '{files % "\\n{file}"}', '-r', rev])
elif self.is_git:
return self._run(['git', 'diff', '--name-only', rev])
return []

View File

@ -16,7 +16,8 @@ class TestAppInfo(FirefoxTestCase):
self.assertEqual(self.appinfo.name, version_info['application_name'])
self.assertEqual(self.appinfo.vendor, version_info['application_vendor'])
self.assertEqual(self.appinfo.version, version_info['application_version'])
self.assertEqual(self.appinfo.platformBuildID, version_info['platform_buildid'])
# Bug 1298328 - Platform buildid mismatch due to incremental builds
# self.assertEqual(self.appinfo.platformBuildID, version_info['platform_buildid'])
self.assertEqual(self.appinfo.platformVersion, version_info['platform_version'])
self.assertIsNotNone(self.appinfo.locale)
self.assertIsNotNone(self.appinfo.user_agent)

View File

@ -83,6 +83,23 @@ ExtensionTestUtils.loadExtension = function(ext)
},
};
// Mimic serialization of functions as done in `Extension.generateXPI` and
// `Extension.generateZipFile` because functions are dropped when `ext` object
// is sent to the main process via the message manager.
ext = Object.assign({}, ext);
if (ext.files) {
ext.files = Object.assign({}, ext.files);
for (let filename of Object.keys(ext.files)) {
let file = ext.files[filename];
if (typeof file == "function") {
ext.files[filename] = `(${file})();`
}
}
}
if (typeof ext.background == "function") {
ext.background = `(${ext.background})();`
}
var extension = SpecialPowers.loadExtension(ext, handler);
registerCleanup(() => {

View File

@ -107,12 +107,8 @@ test(function(t) {
assert_equals(getComputedStyle(div).opacity, '0.5',
'set currentTime same as endTime');
anim.currentTime = 9999;
assert_equals(getComputedStyle(div).opacity, '0.5',
'set currentTime during duration');
anim.currentTime = 10000;
assert_equals(getComputedStyle(div).opacity, '0.5',
assert_equals(getComputedStyle(div).opacity, '0',
'set currentTime after endTime');
}, 'change currentTime when fill forwards and endDelay is negative');

View File

@ -189,10 +189,10 @@ var gEndTimeTests = [
{ desc: "an non-zero duration and negative delay greater than active " +
"duration",
input: { duration: 1000, iterations: 2, delay: -3000 },
expected: -1000 },
expected: 0 },
{ desc: "a zero duration and negative delay",
input: { duration: 0, iterations: 2, delay: -1000 },
expected: -1000 }
expected: 0 }
];
gEndTimeTests.forEach(function(stest) {

View File

@ -100,7 +100,7 @@ test(function(t) {
var anim = createDiv(t).animate(null, { duration: 1000,
iterations: 2.3,
delay: 500,
endDelay: -3000,
endDelay: -2500,
fill: 'forwards' });
anim.finish();
assert_equals(anim.effect.getComputedTiming().currentIteration, 0);

View File

@ -73,7 +73,7 @@ test(function(t) {
var animation = createDiv(t).animate(null, { duration: 1, delay: -1 });
[ { currentTime: -2, phase: 'before' },
{ currentTime: -1, phase: 'active' },
{ currentTime: -1, phase: 'before' },
{ currentTime: 0, phase: 'after' } ]
.forEach(function(test) {
assert_phase_at_time(animation, test.phase, test.currentTime);
@ -121,7 +121,8 @@ test(function(t) {
var animation = createDiv(t).animate(null, { duration: 1, endDelay: -2 });
[ { currentTime: -2, phase: 'before' },
{ currentTime: -1, phase: 'after' } ]
{ currentTime: -1, phase: 'before' },
{ currentTime: 0, phase: 'after' } ]
.forEach(function(test) {
assert_phase_at_time(animation, test.phase, test.currentTime);
});
@ -133,8 +134,8 @@ test(function(t) {
delay: 1,
endDelay: -1 });
[ { currentTime: 0, phase: 'before' },
{ currentTime: 1, phase: 'active' },
[ { currentTime: 0, phase: 'before' },
{ currentTime: 1, phase: 'active' },
{ currentTime: 2, phase: 'after' } ]
.forEach(function(test) {
assert_phase_at_time(animation, test.phase, test.currentTime);
@ -147,8 +148,9 @@ test(function(t) {
delay: -1,
endDelay: -1 });
[ { currentTime: -2, phase: 'before' },
{ currentTime: -1, phase: 'after' } ]
[ { currentTime: -2, phase: 'before' },
{ currentTime: -1, phase: 'before' },
{ currentTime: 0, phase: 'after' } ]
.forEach(function(test) {
assert_phase_at_time(animation, test.phase, test.currentTime);
});
@ -160,8 +162,10 @@ test(function(t) {
delay: -1,
endDelay: -2 });
[ { currentTime: -3, phase: 'before' },
{ currentTime: -2, phase: 'after' } ]
[ { currentTime: -3, phase: 'before' },
{ currentTime: -2, phase: 'before' },
{ currentTime: -1, phase: 'before' },
{ currentTime: 0, phase: 'after' } ]
.forEach(function(test) {
assert_phase_at_time(animation, test.phase, test.currentTime);
});

View File

@ -29,7 +29,7 @@ const {
* debugging is actually working correctly end-to-end.
*/
function backgroundScript() {
function background() {
window.testThing = "test!";
browser.test.notifyPass("background script ran");
}
@ -37,11 +37,10 @@ function backgroundScript() {
const ID = "debug@tests.mozilla.org";
let extensionData = {
useAddonManager: "temporary",
background: "(" + backgroundScript.toString() + ")()",
background,
manifest: {
applications: {gecko: {id: ID}},
},
files: {},
};
add_task(function* () {

View File

@ -26,7 +26,7 @@ add_task(function* testAlertNotShownInBackgroundWindow() {
let extension = ExtensionTestUtils.loadExtension({
background: "new " + function() {
background: function() {
browser.test.log("background script executed");
alert("I am an alert in the background.");

View File

@ -17,7 +17,7 @@
const BASE = "http://mochi.test:8888/chrome/toolkit/components/extensions/test/mochitest";
add_task(function* test_contentscript() {
function backgroundScript() {
function background() {
browser.runtime.onMessage.addListener((msg) => {
if (msg == "loaded") {
browser.tabs.query({active: true, currentWindow: true}).then((tabs) => {
@ -47,10 +47,10 @@ add_task(function* test_contentscript() {
},
],
},
background: "(" + backgroundScript.toString() + ")()",
background,
files: {
"content_script.js": "(" + contentScript.toString() + ")()",
"content_script.js": contentScript,
},
};

View File

@ -25,10 +25,10 @@ function createEventPageExtension(eventPage) {
"background": eventPage,
},
files: {
"event-page-script.js": `(${eventPageScript})()`,
"event-page-script.js": eventPageScript,
"event-page.html": `<html><head>
<meta charset="utf-8">
<script src="event-page-script.js"></${"script"}>
<script src="event-page-script.js"><\/script>
</head></html>`,
},
});

View File

@ -27,7 +27,7 @@ add_task(function* testShutdownCleanup() {
"GlobalManager start as not initialized");
let extension = ExtensionTestUtils.loadExtension({
background: "new " + function() {
background: function() {
browser.test.notifyPass("background page loaded");
},
});

View File

@ -106,7 +106,7 @@ add_task(function* test_uninstall() {
});
let extension = ExtensionTestUtils.loadExtension({
background: `(${writeData})()`,
background: writeData,
manifest: {
applications: {gecko: {id: ID}},
permissions: ["storage"],
@ -125,7 +125,7 @@ add_task(function* test_uninstall() {
// it is actually a meaningful test!
yield SpecialPowers.popPrefEnv();
extension = ExtensionTestUtils.loadExtension({
background: `(${readData})()`,
background: readData,
manifest: {
applications: {gecko: {id: ID}},
permissions: ["storage"],
@ -143,7 +143,7 @@ add_task(function* test_uninstall() {
// Read again. This time, our data should be gone.
extension = ExtensionTestUtils.loadExtension({
background: `(${readData})()`,
background: readData,
manifest: {
applications: {gecko: {id: ID}},
permissions: ["storage"],

Some files were not shown because too many files have changed in this diff Show More