Merge last green changeset of mozilla-inbound to mozilla-central

This commit is contained in:
Ed Morley 2011-11-24 15:50:30 +00:00
commit 6603663db5
82 changed files with 1272 additions and 943 deletions

View File

@ -404,6 +404,12 @@ public:
//////////////////////////////////////////////////////////////////////////////
// Downcasting and types
inline bool IsAbbreviation() const
{
return mContent->IsHTML() &&
(mContent->Tag() == nsGkAtoms::abbr || mContent->Tag() == nsGkAtoms::acronym);
}
inline bool IsApplication() const { return mFlags & eApplicationAccessible; }
bool IsAutoComplete() const { return mFlags & eAutoCompleteAccessible; }

View File

@ -124,6 +124,7 @@ nsHTMLTableCellAccessible::GetAttributesInternal(nsIPersistentProperties *aAttri
nsresult rv = nsHyperTextAccessibleWrap::GetAttributesInternal(aAttributes);
NS_ENSURE_SUCCESS(rv, rv);
// table-cell-index attribute
nsCOMPtr<nsIAccessibleTable> tableAcc(GetTableAccessible());
if (!tableAcc)
return NS_OK;
@ -139,6 +140,32 @@ nsHTMLTableCellAccessible::GetAttributesInternal(nsIPersistentProperties *aAttri
nsAutoString stringIdx;
stringIdx.AppendInt(idx);
nsAccUtils::SetAccAttr(aAttributes, nsGkAtoms::tableCellIndex, stringIdx);
// abbr attribute
// Pick up object attribute from abbr DOM element (a child of the cell) or
// from abbr DOM attribute.
nsAutoString abbrText;
if (GetChildCount() == 1) {
nsAccessible* abbr = FirstChild();
if (abbr->IsAbbreviation()) {
nsTextEquivUtils::
AppendTextEquivFromTextContent(abbr->GetContent()->GetFirstChild(),
&abbrText);
}
}
if (abbrText.IsEmpty())
mContent->GetAttr(kNameSpaceID_None, nsGkAtoms::abbr, abbrText);
if (!abbrText.IsEmpty())
nsAccUtils::SetAccAttr(aAttributes, nsGkAtoms::abbr, abbrText);
// axis attribute
nsAutoString axisText;
mContent->GetAttr(kNameSpaceID_None, nsGkAtoms::axis, axisText);
if (!axisText.IsEmpty())
nsAccUtils::SetAccAttr(aAttributes, nsGkAtoms::axis, axisText);
return NS_OK;
}

View File

@ -2059,6 +2059,25 @@ nsHyperTextAccessible::ScrollSubstringToPoint(PRInt32 aStartIndex,
////////////////////////////////////////////////////////////////////////////////
// nsAccessible public
nsresult
nsHyperTextAccessible::GetNameInternal(nsAString& aName)
{
nsresult rv = nsAccessibleWrap::GetNameInternal(aName);
NS_ENSURE_SUCCESS(rv, rv);
// Get name from title attribute for HTML abbr and acronym elements making it
// a valid name from markup. Otherwise their name isn't picked up by recursive
// name computation algorithm. See NS_OK_NAME_FROM_TOOLTIP.
if (aName.IsEmpty() && IsAbbreviation()) {
nsAutoString name;
if (mContent->GetAttr(kNameSpaceID_None, nsGkAtoms::title, name)) {
name.CompressWhitespace();
aName = name;
}
}
return NS_OK;
}
void
nsHyperTextAccessible::InvalidateChildren()
{

View File

@ -86,6 +86,7 @@ public:
// nsAccessible
virtual PRInt32 GetLevelInternal();
virtual nsresult GetAttributesInternal(nsIPersistentProperties *aAttributes);
virtual nsresult GetNameInternal(nsAString& aName);
virtual PRUint32 NativeRole();
virtual PRUint64 NativeState();

View File

@ -43,9 +43,12 @@
#include "nsIWinAccessNode.h"
#include "nsRootAccessible.h"
#include "mozilla/Preferences.h"
#include "nsArrayUtils.h"
#include "nsIDocShellTreeItem.h"
using namespace mozilla;
// Window property used by ipc related code in identifying accessible
// tab windows.
const PRUnichar* kPropNameTabContent = L"AccessibleTabWindow";
@ -185,13 +188,12 @@ nsWinUtils::HideNativeWindow(HWND aWnd)
bool
nsWinUtils::IsWindowEmulationFor(LPCWSTR kModuleHandle)
{
#ifdef MOZ_E10S_COMPAT
// Window emulation is always enabled in multiprocess Firefox.
return kModuleHandle ? ::GetModuleHandleW(kModuleHandle) : true;
#else
if (Preferences::GetBool("browser.tabs.remote"))
return kModuleHandle ? ::GetModuleHandleW(kModuleHandle) : true;
return kModuleHandle ? ::GetModuleHandleW(kModuleHandle) :
::GetModuleHandleW(kJAWSModuleHandle) ||
::GetModuleHandleW(kWEModuleHandle) ||
::GetModuleHandleW(kDolphinModuleHandle);
#endif
}

View File

@ -91,7 +91,10 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=558036
// html
testAttrs("radio", {"checkable" : "true"}, true);
testAttrs("checkbox", {"checkable" : "true"}, true);
testAttrs("draggable", {"draggable" : "true"}, true);
testAttrs("draggable", {"draggable" : "true"}, true);
testAttrs("th1", { "abbr": "SS#" }, true);
testAttrs("th2", { "abbr": "SS#" }, true);
testAttrs("th2", { "axis": "social" }, true);
SimpleTest.finish();
}
@ -175,5 +178,11 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=558036
<input id="radio" type="radio"/>
<input id="checkbox" type="checkbox"/>
<div id="draggable" draggable="true">Draggable div</div>
<table>
<tr>
<th id="th1"><abbr title="Social Security Number">SS#</abbr></th>
<th id="th2" abbr="SS#" axis="social">Social Security Number</th>
</tr>
</table>
</body>
</html>

View File

@ -132,6 +132,10 @@
// the name from its children.
testName("tablemenuitem", "menuitem 1");
// Get the name from child acronym title attribute rather than from
// acronym content.
testName("label_with_acronym", "O A T F World Wide Web");
//////////////////////////////////////////////////////////////////////////
// title attribute
@ -220,6 +224,11 @@
title="Accessible name is duplicated when input has a label associated uisng for/id and is wrapped around the input">
Mozilla Bug 669312
</a>
<a target="_blank"
href="https://bugzilla.mozilla.org/show_bug.cgi?id=704416"
title="HTML acronym and abbr names should be provided by @title">
Mozilla Bug 704416
</a>
<p id="display"></p>
<div id="content" style="display: none"></div>
<pre id="test">
@ -395,6 +404,11 @@
</tr>
</table>
<label id="label_with_acronym">
<acronym title="O A T F">OATF</acronym>
<abbr title="World Wide Web">WWW</abbr>
</label>
<!-- name from title attribute -->
<span id="btn_title" role="group" title="title">15</span>

View File

@ -5444,6 +5444,10 @@ public:
{
}
NS_IMETHOD_(void) NoteWeakMapping(void* map, void* key, void* val)
{
}
bool mFound;
private:

View File

@ -3,8 +3,6 @@ conformance/glsl/misc/glsl-function-nodes.html
conformance/glsl/misc/glsl-long-variable-names.html
conformance/glsl/misc/shader-with-256-character-identifier.frag.html
conformance/glsl/misc/shader-with-long-line.html
conformance/textures/texture-mips.html
conformance/textures/texture-npot.html
conformance/more/conformance/quickCheckAPI-S_V.html
conformance/more/functions/uniformfBadArgs.html
conformance/more/functions/uniformiBadArgs.html

View File

@ -166,10 +166,4 @@ void ScaleDisplayByAspectRatio(nsIntSize& aDisplay, float aAspectRatio);
#define MEDIA_THREAD_STACK_SIZE nsIThreadManager::DEFAULT_STACK_SIZE
#endif
// Android's audio backend is not available in content processes, so audio must
// be remoted to the parent chrome process.
#if defined(ANDROID)
#define REMOTE_AUDIO 1
#endif
#endif

View File

@ -66,6 +66,12 @@ using namespace mozilla;
#define SA_PER_STREAM_VOLUME 1
#endif
// Android's audio backend is not available in content processes, so audio must
// be remoted to the parent chrome process.
#if defined(ANDROID)
#define REMOTE_AUDIO 1
#endif
using mozilla::TimeStamp;
#ifdef PR_LOGGING
@ -115,6 +121,7 @@ class nsNativeAudioStream : public nsAudioStream
};
#if defined(REMOTE_AUDIO)
class nsRemotedAudioStream : public nsAudioStream
{
public:
@ -307,6 +314,7 @@ class AudioShutdownEvent : public nsRunnable
nsRefPtr<AudioChild> mAudioChild;
};
#endif
static mozilla::Mutex* gVolumeScaleLock = nsnull;
@ -608,6 +616,7 @@ PRInt32 nsNativeAudioStream::GetMinWriteSize()
return static_cast<PRInt32>(size / mChannels / sizeof(short));
}
#if defined(REMOTE_AUDIO)
nsRemotedAudioStream::nsRemotedAudioStream()
: mAudioChild(nsnull),
mFormat(FORMAT_S16_LE),
@ -759,3 +768,5 @@ nsRemotedAudioStream::IsPaused()
{
return mPaused;
}
#endif

View File

@ -767,29 +767,6 @@ void nsBuiltinDecoderStateMachine::AudioLoop()
NS_WARNING("Int overflow calculating audio end time");
break;
}
// The remoted audio stream does not block writes when the other end's buffers
// are full, so this sleep is necessary to stop the audio thread spinning its
// wheels. When bug 695612 is fixed, this block of code can be removed.
#if defined(REMOTE_AUDIO)
PRInt64 audioAhead = mAudioEndTime - GetMediaTime();
if (audioAhead > AMPLE_AUDIO_USECS &&
framesWritten > minWriteFrames)
{
// We've pushed enough audio onto the hardware that we've queued up a
// significant amount ahead of the playback position. The decode
// thread will be going to sleep, so we won't get any new audio
// anyway, so sleep until we need to push to the hardware again.
Wait(AMPLE_AUDIO_USECS / 2);
// Kick the decode thread; since above we only do a NotifyAll when
// we pop an audio chunk of the queue, the decoder won't wake up if
// we've got no more decoded chunks to push to the hardware. We can
// hit this condition if the last frame in the stream doesn't have
// it's EOS flag set, and the decode thread sleeps just after decoding
// that packet, but before realising there's no more packets.
mon.NotifyAll();
}
#endif
}
}
if (mReader->mAudioQueue.AtEndOfStream() &&

View File

@ -1839,13 +1839,18 @@ nsMediaCacheStream::NotifyDataEnded(nsresult aStatus)
mon.NotifyAll();
}
nsMediaCache::ResourceStreamIterator iter(mResourceID);
while (nsMediaCacheStream* stream = iter.Next()) {
if (NS_SUCCEEDED(aStatus)) {
// We read the whole stream, so remember the true length
stream->mStreamLength = mChannelOffset;
if (!mDidNotifyDataEnded) {
nsMediaCache::ResourceStreamIterator iter(mResourceID);
while (nsMediaCacheStream* stream = iter.Next()) {
if (NS_SUCCEEDED(aStatus)) {
// We read the whole stream, so remember the true length
stream->mStreamLength = mChannelOffset;
}
NS_ASSERTION(!stream->mDidNotifyDataEnded, "Stream already ended!");
stream->mDidNotifyDataEnded = true;
stream->mNotifyDataEndedStatus = aStatus;
stream->mClient->CacheClientNotifyDataEnded(aStatus);
}
stream->mClient->CacheClientNotifyDataEnded(aStatus);
}
}
@ -2121,45 +2126,62 @@ nsMediaCacheStream::Read(char* aBuffer, PRUint32 aCount, PRUint32* aBytes)
PRInt32 bytes;
PRUint32 channelBlock = PRUint32(mChannelOffset/BLOCK_SIZE);
PRInt32 cacheBlock = streamBlock < mBlocks.Length() ? mBlocks[streamBlock] : -1;
if (channelBlock == streamBlock && mStreamOffset < mChannelOffset) {
// We can just use the data in mPartialBlockBuffer. In fact we should
// use it rather than waiting for the block to fill and land in
// the cache.
bytes = NS_MIN<PRInt64>(size, mChannelOffset - mStreamOffset);
memcpy(aBuffer + count,
reinterpret_cast<char*>(mPartialBlockBuffer) + offsetInStreamBlock, bytes);
if (mCurrentMode == MODE_METADATA) {
mMetadataInPartialBlockBuffer = true;
}
gMediaCache->NoteBlockUsage(this, cacheBlock, mCurrentMode, TimeStamp::Now());
} else {
if (cacheBlock < 0) {
if (count > 0) {
// Some data has been read, so return what we've got instead of
// blocking
break;
}
if (cacheBlock < 0) {
// We don't have a complete cached block here.
// No data has been read yet, so block
mon.Wait();
if (mClosed) {
// We may have successfully read some data, but let's just throw
// that out.
return NS_ERROR_FAILURE;
}
continue;
}
gMediaCache->NoteBlockUsage(this, cacheBlock, mCurrentMode, TimeStamp::Now());
PRInt64 offset = cacheBlock*BLOCK_SIZE + offsetInStreamBlock;
nsresult rv = gMediaCache->ReadCacheFile(offset, aBuffer + count, size, &bytes);
if (NS_FAILED(rv)) {
if (count == 0)
return rv;
// If we did successfully read some data, may as well return it
if (count > 0) {
// Some data has been read, so return what we've got instead of
// blocking or trying to find a stream with a partial block.
break;
}
// See if the data is available in the partial cache block of any
// stream reading this resource. We need to do this in case there is
// another stream with this resource that has all the data to the end of
// the stream but the data doesn't end on a block boundary.
nsMediaCacheStream* streamWithPartialBlock = nsnull;
nsMediaCache::ResourceStreamIterator iter(mResourceID);
while (nsMediaCacheStream* stream = iter.Next()) {
if (PRUint32(stream->mChannelOffset/BLOCK_SIZE) == streamBlock &&
mStreamOffset < stream->mChannelOffset) {
streamWithPartialBlock = stream;
break;
}
}
if (streamWithPartialBlock) {
// We can just use the data in mPartialBlockBuffer. In fact we should
// use it rather than waiting for the block to fill and land in
// the cache.
bytes = NS_MIN<PRInt64>(size, streamWithPartialBlock->mChannelOffset - mStreamOffset);
memcpy(aBuffer,
reinterpret_cast<char*>(streamWithPartialBlock->mPartialBlockBuffer) + offsetInStreamBlock, bytes);
if (mCurrentMode == MODE_METADATA) {
streamWithPartialBlock->mMetadataInPartialBlockBuffer = true;
}
mStreamOffset += bytes;
count = bytes;
break;
}
// No data has been read yet, so block
mon.Wait();
if (mClosed) {
// We may have successfully read some data, but let's just throw
// that out.
return NS_ERROR_FAILURE;
}
continue;
}
gMediaCache->NoteBlockUsage(this, cacheBlock, mCurrentMode, TimeStamp::Now());
PRInt64 offset = cacheBlock*BLOCK_SIZE + offsetInStreamBlock;
nsresult rv = gMediaCache->ReadCacheFile(offset, aBuffer + count, size, &bytes);
if (NS_FAILED(rv)) {
if (count == 0)
return rv;
// If we did successfully read some data, may as well return it
break;
}
mStreamOffset += bytes;
count += bytes;
@ -2269,6 +2291,12 @@ nsMediaCacheStream::InitAsClone(nsMediaCacheStream* aOriginal)
// initially for a clone.
mCacheSuspended = true;
if (aOriginal->mDidNotifyDataEnded) {
mNotifyDataEndedStatus = aOriginal->mNotifyDataEndedStatus;
mDidNotifyDataEnded = true;
mClient->CacheClientNotifyDataEnded(mNotifyDataEndedStatus);
}
for (PRUint32 i = 0; i < aOriginal->mBlocks.Length(); ++i) {
PRInt32 cacheBlockIndex = aOriginal->mBlocks[i];
if (cacheBlockIndex < 0)

View File

@ -228,6 +228,7 @@ public:
nsMediaCacheStream(nsMediaChannelStream* aClient)
: mClient(aClient), mResourceID(0), mInitialized(false),
mIsSeekable(false), mCacheSuspended(false),
mDidNotifyDataEnded(false),
mUsingNullPrincipal(false),
mChannelOffset(0), mStreamLength(-1),
mStreamOffset(0), mPlaybackBytesPerSecond(10000),
@ -456,6 +457,8 @@ private:
// full and the priority of the data that would be received is lower
// than the priority of the data already in the cache
bool mCacheSuspended;
// True if CacheClientNotifyDataEnded has been called for this stream.
bool mDidNotifyDataEnded;
// True if mPrincipal is a null principal because we saw data from
// multiple origins
bool mUsingNullPrincipal;
@ -486,6 +489,8 @@ private:
// The number of times this stream has been Pinned without a
// corresponding Unpin
PRUint32 mPinCount;
// The status used when we did CacheClientNotifyDataEnded
nsresult mNotifyDataEndedStatus;
// The last reported read mode
ReadMode mCurrentMode;
// True if some data in mPartialBlockBuffer has been read as metadata

View File

@ -65,7 +65,8 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=479711
function createVideo(name, type, id) {
var v = document.createElement("video");
v.src = name;
// Make sure each video is a unique resource
v.src = name + "?" + id;
v._name = name;
v.id = id;
register(v);
@ -95,14 +96,19 @@ v.load();
// Load and move to another document.
netscape.security.PrivilegeManager.enablePrivilege("UniversalXPConnect");
// Open a new window for the following test. We open it here instead of in
// the event handler to ensure that our document load event doesn't fire while
// window.open is spinning the event loop.
var w = window.open("", "testWindow", "width=400,height=400");
testWindows.push(w);
v = createVideo(test.name, test.type, "4");
v.onloadstart = function(e) {
// Opening a new window to do this is a bit annoying, but if we use an iframe here,
// Using a new window to do this is a bit annoying, but if we use an iframe here,
// delaying of the iframe's load event might interfere with the firing of our load event
// in some confusing way. So it's simpler just to open another window.
var w = window.open("", "testWindow", "width=400,height=400");
// in some confusing way. So it's simpler just to use another window.
w.document.body.appendChild(v);
testWindows.push(w);
};
v.load(); // load started while in this document, this doc's load will block until
// the video's finished loading (in the other document).

View File

@ -755,7 +755,7 @@ nsresult nsWebMReader::Seek(PRInt64 aTarget, PRInt64 aStartTime, PRInt64 aEndTim
{
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
LOG(PR_LOG_DEBUG, ("%p About to seek to %lldms", mDecoder, aTarget));
LOG(PR_LOG_DEBUG, ("%p About to seek to %fs", mDecoder, aTarget/1000000.0));
if (NS_FAILED(ResetDecode())) {
return NS_ERROR_FAILURE;
}

View File

@ -104,10 +104,10 @@ private:
*/
double mRemainingTime;
NS_DECL_EVENT_HANDLER(levelchange);
NS_DECL_EVENT_HANDLER(chargingchange);
NS_DECL_EVENT_HANDLER(chargingtimechange);
NS_DECL_EVENT_HANDLER(dischargingtimechange);
NS_DECL_EVENT_HANDLER(levelchange)
NS_DECL_EVENT_HANDLER(chargingchange)
NS_DECL_EVENT_HANDLER(chargingtimechange)
NS_DECL_EVENT_HANDLER(dischargingtimechange)
};
} // namespace battery

View File

@ -6,51 +6,8 @@
var gLoaded = false;
Cc["@mozilla.org/moz/jssubscript-loader;1"]
.getService(Ci.mozIJSSubScriptLoader)
.loadSubScript("chrome://mochitests/content/browser/toolkit/content/tests/browser/common/mockObjects.js",
this);
function MockFilePicker() { }
MockFilePicker.prototype = {
QueryInterface: XPCOMUtils.generateQI([Ci.nsIFilePicker]),
init: function() { },
appendFilters: function(val) { },
appendFilter: function(val) { },
// constants
modeOpen: 0,
modeSave: 1,
modeGetFolder: 2,
modeOpenMultiple: 3,
returnOK: 0,
returnCancel: 1,
returnReplace: 2,
filterAll: 1,
filterHTML: 2,
filterText: 4,
filterImages: 8,
filterXML: 16,
filterXUL: 32,
filterApps: 64,
filterAllowURLs: 128,
filterAudio: 256,
filterVideo: 512,
// properties
defaultExtension: "",
defaultString: "",
get displayDirectory() { return null; },
set displayDirectory(val) { },
file: null,
get files() { return null; },
get fileURL() { return null; },
filterIndex: 0,
show: function() {
var MockFilePicker = SpecialPowers.MockFilePicker;
function onShowCallback() {
gBrowser.selectedTab.linkedBrowser.addEventListener("load", function () {
gBrowser.selectedTab.linkedBrowser.removeEventListener("load", arguments.callee, true);
executeSoon(function() {
@ -66,16 +23,14 @@ MockFilePicker.prototype = {
curThread.processNextEvent(true);
}
return this.returnCancel;
},
MockFilePicker.returnValue = MockFilePicker.returnCancel;
};
function test() {
waitForExplicitFinish();
var mockFilePickerRegisterer =
new MockObjectRegisterer("@mozilla.org/filepicker;1", MockFilePicker);
mockFilePickerRegisterer.register();
MockFilePicker.reset();
MockFilePicker.showCallback = onShowCallback;
var prefs = Components.classes["@mozilla.org/preferences-service;1"]
.getService(Components.interfaces.nsIPrefBranch);
@ -87,7 +42,7 @@ function test() {
ok(true, "The popup has been blocked");
prefs.setBoolPref("dom.disable_open_during_load", gDisableLoadPref);
mockFilePickerRegisterer.unregister();
MockFilePicker.reset();
finish();
}, true)

View File

@ -8,7 +8,7 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=61098
<script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
<script type="text/javascript" src="/mozprefs.js"></script>
<script type="application/javascript" src="/tests/SimpleTest/EventUtils.js"></script>
<script type="application/javascript" src="/tests/SimpleTest/mockObjects.js"></script>
<script type="application/javascript" src="/tests/SimpleTest/MockObjects.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
</head>
<body onload="runtests();">

View File

@ -1,259 +1,259 @@
2008-12-05 Release
README file for en_US and en_CA Hunspell dictionaries
These dictionaries are created using the speller/make-hunspell-dict
dictionary in SCOWL, SVN revision 74.
The NOSUGGEST flag was added to certain taboo words. While I made an
honest attempt to flag the strongest taboo words with the NOSUGGEST
flag, I MAKE NO GUARANTEE THAT I FLAGGED EVERY POSSIBLE TABOO WORD.
The list was originally derived from Németh László, however I removed
some words which, while being considered taboo by some dictionaries,
are not really considered swear words in today's society.
You can find SCOWL and friend at http://wordlist.sourceforge.net/.
Bug reports should go to the Issue Tracker found on the previously
mentioned web site. General discussion should go to the
wordlist-devel at sourceforge net mailing list.
COPYRIGHT, SOURCES, and CREDITS:
The en_US and en_CA dictionaries come directly from SCOWL (up to level
60) and is thus under the same copyright of SCOWL. The affix file is
a heavily modified version of the original english.aff file which was
released as part of Geoff Kuenning's Ispell and as such is covered by
his BSD license. Part of SCOWL is also based on Ispell thus the
Ispell copyright is included with the SCOWL copyright.
The collective work is Copyright 2000-2007 by Kevin Atkinson as well
as any of the copyrights mentioned below:
Copyright 2000-2007 by Kevin Atkinson
Permission to use, copy, modify, distribute and sell these word
lists, the associated scripts, the output created from the scripts,
and its documentation for any purpose is hereby granted without fee,
provided that the above copyright notice appears in all copies and
that both that copyright notice and this permission notice appear in
supporting documentation. Kevin Atkinson makes no representations
about the suitability of this array for any purpose. It is provided
"as is" without express or implied warranty.
Alan Beale <biljir@pobox.com> also deserves special credit as he has,
in addition to providing the 12Dicts package and being a major
contributor to the ENABLE word list, given me an incredible amount of
feedback and created a number of special lists (those found in the
Supplement) in order to help improve the overall quality of SCOWL.
The 10 level includes the 1000 most common English words (according to
the Moby (TM) Words II [MWords] package), a subset of the 1000 most
common words on the Internet (again, according to Moby Words II), and
frequently class 16 from Brian Kelk's "UK English Wordlist
with Frequency Classification".
The MWords package was explicitly placed in the public domain:
The Moby lexicon project is complete and has
been place into the public domain. Use, sell,
rework, excerpt and use in any way on any platform.
Placing this material on internal or public servers is
also encouraged. The compiler is not aware of any
export restrictions so freely distribute world-wide.
You can verify the public domain status by contacting
Grady Ward
3449 Martha Ct.
Arcata, CA 95521-4884
grady@netcom.com
grady@northcoast.com
The "UK English Wordlist With Frequency Classification" is also in the
Public Domain:
Date: Sat, 08 Jul 2000 20:27:21 +0100
From: Brian Kelk <Brian.Kelk@cl.cam.ac.uk>
> I was wondering what the copyright status of your "UK English
> Wordlist With Frequency Classification" word list as it seems to
> be lacking any copyright notice.
There were many many sources in total, but any text marked
"copyright" was avoided. Locally-written documentation was one
source. An earlier version of the list resided in a filespace called
PUBLIC on the University mainframe, because it was considered public
domain.
Date: Tue, 11 Jul 2000 19:31:34 +0100
> So are you saying your word list is also in the public domain?
That is the intention.
The 20 level includes frequency classes 7-15 from Brian's word list.
The 35 level includes frequency classes 2-6 and words appearing in at
least 11 of 12 dictionaries as indicated in the 12Dicts package. All
words from the 12Dicts package have had likely inflections added via
my inflection database.
The 12Dicts package and Supplement is in the Public Domain.
The WordNet database, which was used in the creation of the
Inflections database, is under the following copyright:
This software and database is being provided to you, the LICENSEE,
by Princeton University under the following license. By obtaining,
using and/or copying this software and database, you agree that you
have read, understood, and will comply with these terms and
conditions.:
Permission to use, copy, modify and distribute this software and
database and its documentation for any purpose and without fee or
royalty is hereby granted, provided that you agree to comply with
the following copyright notice and statements, including the
disclaimer, and that the same appear on ALL copies of the software,
database and documentation, including modifications that you make
for internal use or for distribution.
WordNet 1.6 Copyright 1997 by Princeton University. All rights
reserved.
THIS SOFTWARE AND DATABASE IS PROVIDED "AS IS" AND PRINCETON
UNIVERSITY MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PRINCETON
UNIVERSITY MAKES NO REPRESENTATIONS OR WARRANTIES OF MERCHANT-
ABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE
LICENSED SOFTWARE, DATABASE OR DOCUMENTATION WILL NOT INFRINGE ANY
THIRD PARTY PATENTS, COPYRIGHTS, TRADEMARKS OR OTHER RIGHTS.
The name of Princeton University or Princeton may not be used in
advertising or publicity pertaining to distribution of the software
and/or database. Title to copyright in this software, database and
any associated documentation shall at all times remain with
Princeton University and LICENSEE agrees to preserve same.
The 40 level includes words from Alan's 3esl list found in version 4.0
of his 12dicts package. Like his other stuff the 3esl list is also in the
public domain.
The 50 level includes Brian's frequency class 1, words words appearing
in at least 5 of 12 of the dictionaries as indicated in the 12Dicts
package, and uppercase words in at least 4 of the previous 12
dictionaries. A decent number of proper names is also included: The
top 1000 male, female, and Last names from the 1990 Census report; a
list of names sent to me by Alan Beale; and a few names that I added
myself. Finally a small list of abbreviations not commonly found in
other word lists is included.
The name files form the Census report is a government document which I
don't think can be copyrighted.
The file special-jargon.50 uses common.lst and word.lst from the
"Unofficial Jargon File Word Lists" which is derived from "The Jargon
File". All of which is in the Public Domain. This file also contain
a few extra UNIX terms which are found in the file "unix-terms" in the
special/ directory.
The 55 level includes words from Alan's 2of4brif list found in version
4.0 of his 12dicts package. Like his other stuff the 2of4brif is also
in the public domain.
The 60 level includes Brian's frequency class 0 and all words
appearing in at least 2 of the 12 dictionaries as indicated by the
12Dicts package. A large number of names are also included: The 4,946
female names and the 3,897 male names from the MWords package.
The 70 level includes the 74,550 common dictionary words and the
21,986 names list from the MWords package The common dictionary words,
like those from the 12Dicts package, have had all likely inflections
added. The 70 level also included the 5desk list from version 4.0 of
the 12Dics package which is the public domain
The 80 level includes the ENABLE word list, all the lists in the
ENABLE supplement package (except for ABLE), the "UK Advanced Cryptics
Dictionary" (UKACD), the list of signature words in from YAWL package,
and the 10,196 places list from the MWords package.
The ENABLE package, mainted by M\Cooper <thegrendel@theriver.com>,
is in the Public Domain:
The ENABLE master word list, WORD.LST, is herewith formally released
into the Public Domain. Anyone is free to use it or distribute it in
any manner they see fit. No fee or registration is required for its
use nor are "contributions" solicited (if you feel you absolutely
must contribute something for your own peace of mind, the authors of
the ENABLE list ask that you make a donation on their behalf to your
favorite charity). This word list is our gift to the Scrabble
community, as an alternate to "official" word lists. Game designers
may feel free to incorporate the WORD.LST into their games. Please
mention the source and credit us as originators of the list. Note
that if you, as a game designer, use the WORD.LST in your product,
you may still copyright and protect your product, but you may *not*
legally copyright or in any way restrict redistribution of the
WORD.LST portion of your product. This *may* under law restrict your
rights to restrict your users' rights, but that is only fair.
UKACD, by J Ross Beresford <ross@bryson.demon.co.uk>, is under the
following copyright:
Copyright (c) J Ross Beresford 1993-1999. All Rights Reserved.
The following restriction is placed on the use of this publication:
if The UK Advanced Cryptics Dictionary is used in a software package
or redistributed in any form, the copyright notice must be
prominently displayed and the text of this document must be included
verbatim.
There are no other restrictions: I would like to see the list
distributed as widely as possible.
The 95 level includes the 354,984 single words and 256,772 compound
words from the MWords package, ABLE.LST from the ENABLE Supplement,
and some additional words found in my part-of-speech database that
were not found anywhere else.
Accent information was taken from UKACD.
My VARCON package was used to create the American, British, and
Canadian word list.
Since the original word lists used used in the VARCON package came
from the Ispell distribution they are under the Ispell copyright:
Copyright 1993, Geoff Kuenning, Granada Hills, CA
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. All modifications to the source code must be clearly marked as
such. Binary redistributions based on modified source code
must be clearly marked as modified versions in the documentation
and/or other materials provided with the distribution.
(clause 4 removed with permission from Geoff Kuenning)
5. The name of Geoff Kuenning may not be used to endorse or promote
products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY GEOFF KUENNING AND CONTRIBUTORS ``AS
IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GEOFF
KUENNING OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
2008-12-05 Release
README file for en_US and en_CA Hunspell dictionaries
These dictionaries are created using the speller/make-hunspell-dict
dictionary in SCOWL, SVN revision 74.
The NOSUGGEST flag was added to certain taboo words. While I made an
honest attempt to flag the strongest taboo words with the NOSUGGEST
flag, I MAKE NO GUARANTEE THAT I FLAGGED EVERY POSSIBLE TABOO WORD.
The list was originally derived from Németh László, however I removed
some words which, while being considered taboo by some dictionaries,
are not really considered swear words in today's society.
You can find SCOWL and friend at http://wordlist.sourceforge.net/.
Bug reports should go to the Issue Tracker found on the previously
mentioned web site. General discussion should go to the
wordlist-devel at sourceforge net mailing list.
COPYRIGHT, SOURCES, and CREDITS:
The en_US and en_CA dictionaries come directly from SCOWL (up to level
60) and is thus under the same copyright of SCOWL. The affix file is
a heavily modified version of the original english.aff file which was
released as part of Geoff Kuenning's Ispell and as such is covered by
his BSD license. Part of SCOWL is also based on Ispell thus the
Ispell copyright is included with the SCOWL copyright.
The collective work is Copyright 2000-2007 by Kevin Atkinson as well
as any of the copyrights mentioned below:
Copyright 2000-2007 by Kevin Atkinson
Permission to use, copy, modify, distribute and sell these word
lists, the associated scripts, the output created from the scripts,
and its documentation for any purpose is hereby granted without fee,
provided that the above copyright notice appears in all copies and
that both that copyright notice and this permission notice appear in
supporting documentation. Kevin Atkinson makes no representations
about the suitability of this array for any purpose. It is provided
"as is" without express or implied warranty.
Alan Beale <biljir@pobox.com> also deserves special credit as he has,
in addition to providing the 12Dicts package and being a major
contributor to the ENABLE word list, given me an incredible amount of
feedback and created a number of special lists (those found in the
Supplement) in order to help improve the overall quality of SCOWL.
The 10 level includes the 1000 most common English words (according to
the Moby (TM) Words II [MWords] package), a subset of the 1000 most
common words on the Internet (again, according to Moby Words II), and
frequently class 16 from Brian Kelk's "UK English Wordlist
with Frequency Classification".
The MWords package was explicitly placed in the public domain:
The Moby lexicon project is complete and has
been place into the public domain. Use, sell,
rework, excerpt and use in any way on any platform.
Placing this material on internal or public servers is
also encouraged. The compiler is not aware of any
export restrictions so freely distribute world-wide.
You can verify the public domain status by contacting
Grady Ward
3449 Martha Ct.
Arcata, CA 95521-4884
grady@netcom.com
grady@northcoast.com
The "UK English Wordlist With Frequency Classification" is also in the
Public Domain:
Date: Sat, 08 Jul 2000 20:27:21 +0100
From: Brian Kelk <Brian.Kelk@cl.cam.ac.uk>
> I was wondering what the copyright status of your "UK English
> Wordlist With Frequency Classification" word list as it seems to
> be lacking any copyright notice.
There were many many sources in total, but any text marked
"copyright" was avoided. Locally-written documentation was one
source. An earlier version of the list resided in a filespace called
PUBLIC on the University mainframe, because it was considered public
domain.
Date: Tue, 11 Jul 2000 19:31:34 +0100
> So are you saying your word list is also in the public domain?
That is the intention.
The 20 level includes frequency classes 7-15 from Brian's word list.
The 35 level includes frequency classes 2-6 and words appearing in at
least 11 of 12 dictionaries as indicated in the 12Dicts package. All
words from the 12Dicts package have had likely inflections added via
my inflection database.
The 12Dicts package and Supplement is in the Public Domain.
The WordNet database, which was used in the creation of the
Inflections database, is under the following copyright:
This software and database is being provided to you, the LICENSEE,
by Princeton University under the following license. By obtaining,
using and/or copying this software and database, you agree that you
have read, understood, and will comply with these terms and
conditions.:
Permission to use, copy, modify and distribute this software and
database and its documentation for any purpose and without fee or
royalty is hereby granted, provided that you agree to comply with
the following copyright notice and statements, including the
disclaimer, and that the same appear on ALL copies of the software,
database and documentation, including modifications that you make
for internal use or for distribution.
WordNet 1.6 Copyright 1997 by Princeton University. All rights
reserved.
THIS SOFTWARE AND DATABASE IS PROVIDED "AS IS" AND PRINCETON
UNIVERSITY MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PRINCETON
UNIVERSITY MAKES NO REPRESENTATIONS OR WARRANTIES OF MERCHANT-
ABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE
LICENSED SOFTWARE, DATABASE OR DOCUMENTATION WILL NOT INFRINGE ANY
THIRD PARTY PATENTS, COPYRIGHTS, TRADEMARKS OR OTHER RIGHTS.
The name of Princeton University or Princeton may not be used in
advertising or publicity pertaining to distribution of the software
and/or database. Title to copyright in this software, database and
any associated documentation shall at all times remain with
Princeton University and LICENSEE agrees to preserve same.
The 40 level includes words from Alan's 3esl list found in version 4.0
of his 12dicts package. Like his other stuff the 3esl list is also in the
public domain.
The 50 level includes Brian's frequency class 1, words words appearing
in at least 5 of 12 of the dictionaries as indicated in the 12Dicts
package, and uppercase words in at least 4 of the previous 12
dictionaries. A decent number of proper names is also included: The
top 1000 male, female, and Last names from the 1990 Census report; a
list of names sent to me by Alan Beale; and a few names that I added
myself. Finally a small list of abbreviations not commonly found in
other word lists is included.
The name files form the Census report is a government document which I
don't think can be copyrighted.
The file special-jargon.50 uses common.lst and word.lst from the
"Unofficial Jargon File Word Lists" which is derived from "The Jargon
File". All of which is in the Public Domain. This file also contain
a few extra UNIX terms which are found in the file "unix-terms" in the
special/ directory.
The 55 level includes words from Alan's 2of4brif list found in version
4.0 of his 12dicts package. Like his other stuff the 2of4brif is also
in the public domain.
The 60 level includes Brian's frequency class 0 and all words
appearing in at least 2 of the 12 dictionaries as indicated by the
12Dicts package. A large number of names are also included: The 4,946
female names and the 3,897 male names from the MWords package.
The 70 level includes the 74,550 common dictionary words and the
21,986 names list from the MWords package The common dictionary words,
like those from the 12Dicts package, have had all likely inflections
added. The 70 level also included the 5desk list from version 4.0 of
the 12Dics package which is the public domain
The 80 level includes the ENABLE word list, all the lists in the
ENABLE supplement package (except for ABLE), the "UK Advanced Cryptics
Dictionary" (UKACD), the list of signature words in from YAWL package,
and the 10,196 places list from the MWords package.
The ENABLE package, mainted by M\Cooper <thegrendel@theriver.com>,
is in the Public Domain:
The ENABLE master word list, WORD.LST, is herewith formally released
into the Public Domain. Anyone is free to use it or distribute it in
any manner they see fit. No fee or registration is required for its
use nor are "contributions" solicited (if you feel you absolutely
must contribute something for your own peace of mind, the authors of
the ENABLE list ask that you make a donation on their behalf to your
favorite charity). This word list is our gift to the Scrabble
community, as an alternate to "official" word lists. Game designers
may feel free to incorporate the WORD.LST into their games. Please
mention the source and credit us as originators of the list. Note
that if you, as a game designer, use the WORD.LST in your product,
you may still copyright and protect your product, but you may *not*
legally copyright or in any way restrict redistribution of the
WORD.LST portion of your product. This *may* under law restrict your
rights to restrict your users' rights, but that is only fair.
UKACD, by J Ross Beresford <ross@bryson.demon.co.uk>, is under the
following copyright:
Copyright (c) J Ross Beresford 1993-1999. All Rights Reserved.
The following restriction is placed on the use of this publication:
if The UK Advanced Cryptics Dictionary is used in a software package
or redistributed in any form, the copyright notice must be
prominently displayed and the text of this document must be included
verbatim.
There are no other restrictions: I would like to see the list
distributed as widely as possible.
The 95 level includes the 354,984 single words and 256,772 compound
words from the MWords package, ABLE.LST from the ENABLE Supplement,
and some additional words found in my part-of-speech database that
were not found anywhere else.
Accent information was taken from UKACD.
My VARCON package was used to create the American, British, and
Canadian word list.
Since the original word lists used used in the VARCON package came
from the Ispell distribution they are under the Ispell copyright:
Copyright 1993, Geoff Kuenning, Granada Hills, CA
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. All modifications to the source code must be clearly marked as
such. Binary redistributions based on modified source code
must be clearly marked as modified versions in the documentation
and/or other materials provided with the distribution.
(clause 4 removed with permission from Geoff Kuenning)
5. The name of Geoff Kuenning may not be used to endorse or promote
products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY GEOFF KUENNING AND CONTRIBUTORS ``AS
IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GEOFF
KUENNING OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.

View File

@ -1,33 +1,33 @@
README_mozilla
README_mozilla
The dictionary en-US.dic is generated by merging the following dictionaries, in the dictionary-sources subdirectory, using the merge-dictionaries bash script which
automatically patches, merges, sorts, and identifies duplicates.
hunspell-en_US-20081205.dic:
2008-12-05 Release, en_US Hunspell dictionary from http://wordlist.sourceforge.net/
These dictionaries are created using the speller/make-hunspell-dict
dictionary in SCOWL, SVN revision 74.
upstream-hunspell.diff:
Mozilla-specific additions to the upstream Hunspell dictionary. Some of
these changes should be upstreamed, and others should probably just be removed
(bug 499444).
chromium_en_US.dic_delta:
Chromium wordlist autogenerated by Google,
svn - Revision 18580,
of http://src.chromium.org/svn/trunk/src/chrome/third_party/hunspell/dictionaries/en_US.dic_delta
upstream-chromium.diff:
Patches Chromium wordlist, to remove junk and words that are redundant with the
newer Hunspell dictionary we use.
mozilla-specific.txt:
Mozilla-specific words, separated out from Hunspell and Chromium word lists.
"Firefox" goes here. (See bug 237921)
automatically patches, merges, sorts, and identifies duplicates.
hunspell-en_US-20081205.dic:
2008-12-05 Release, en_US Hunspell dictionary from http://wordlist.sourceforge.net/
These dictionaries are created using the speller/make-hunspell-dict
dictionary in SCOWL, SVN revision 74.
upstream-hunspell.diff:
Mozilla-specific additions to the upstream Hunspell dictionary. Some of
these changes should be upstreamed, and others should probably just be removed
(bug 499444).
chromium_en_US.dic_delta:
Chromium wordlist autogenerated by Google,
svn - Revision 18580,
of http://src.chromium.org/svn/trunk/src/chrome/third_party/hunspell/dictionaries/en_US.dic_delta
upstream-chromium.diff:
Patches Chromium wordlist, to remove junk and words that are redundant with the
newer Hunspell dictionary we use.
mozilla-specific.txt:
Mozilla-specific words, separated out from Hunspell and Chromium word lists.
"Firefox" goes here. (See bug 237921)

View File

@ -588,7 +588,7 @@ GLContextProviderCGL::CreateOffscreen(const gfxIntSize& aSize,
nsRefPtr<GLContextCGL> glContext;
NS_ENSURE_TRUE(Preferences::GetRootBranch(), nsnull);
const bool preferFBOs = Preferences::GetBool("cgl.prefer-fbo", false);
const bool preferFBOs = Preferences::GetBool("cgl.prefer-fbo", true);
if (!preferFBOs)
{
glContext = CreateOffscreenPBufferContext(aSize, actualFormat);

View File

@ -39,6 +39,7 @@
#include "nsIConsoleService.h"
#include <initguid.h>
#include "Nv3DVUtils.h"
#include "mozilla/Util.h"
DEFINE_GUID(CLSID_NV3DVStreaming,
0xf7747266, 0x777d, 0x4f61, 0xa1, 0x75, 0xdd, 0x5a, 0xdf, 0x1e, 0x37, 0xdf);
@ -127,15 +128,13 @@ Nv3DVUtils::SetDeviceInfo(IUnknown *devUnknown)
bool rv = false;
rv = m3DVStreaming->Nv3DVSetDevice(devUnknown);
if (NS_FAILED(rv)) {
if (rv) {
NS_WARNING("Nv3DVStreaming Nv3DVControl failed!");
return;
}
rv = m3DVStreaming->Nv3DVControl(NV_STEREO_MODE_RIGHT_LEFT, true, FIREFOX_3DV_APP_HANDLE);
if (NS_FAILED(rv)) {
NS_WARNING("Nv3DVStreaming Nv3DVControl failed!");
}
NS_WARN_IF_FALSE(!rv, "Nv3DVStreaming Nv3DVControl failed!");
}
/*
@ -148,8 +147,8 @@ Nv3DVUtils::SendNv3DVControl(Nv_Stereo_Mode eStereoMode, bool bEnableStereo, DWO
if (!m3DVStreaming)
return;
bool rv = m3DVStreaming->Nv3DVControl(eStereoMode, bEnableStereo, dw3DVAppHandle);
NS_ASSERTION(rv, "Nv3DVStreaming Nv3DVControl failed");
DebugOnly<bool> rv = m3DVStreaming->Nv3DVControl(eStereoMode, bEnableStereo, dw3DVAppHandle);
NS_WARN_IF_FALSE(!rv, "Nv3DVStreaming Nv3DVControl failed!");
}
/*
@ -162,8 +161,8 @@ Nv3DVUtils::SendNv3DVMetaData(unsigned int dwWidth, unsigned int dwHeight, HANDL
if (!m3DVStreaming)
return;
bool rv = m3DVStreaming->Nv3DVMetaData((DWORD)dwWidth, (DWORD)dwHeight, hSrcLuma, hDst);
NS_ASSERTION(rv, "Nv3DVStreaming Nv3DVMetaData failed!");
DebugOnly<bool> rv = m3DVStreaming->Nv3DVMetaData((DWORD)dwWidth, (DWORD)dwHeight, hSrcLuma, hDst);
NS_WARN_IF_FALSE(!rv, "Nv3DVStreaming Nv3DVMetaData failed!");
}
} /* namespace layers */

0
gfx/skia/include/core/SkAdvancedTypefaceMetrics.h Executable file → Normal file
View File

0
gfx/skia/include/pdf/SkBitSet.h Executable file → Normal file
View File

0
gfx/skia/include/views/SkOSWindow_iOS.h Executable file → Normal file
View File

0
gfx/skia/src/gpu/GrAllocator.h Executable file → Normal file
View File

0
gfx/skia/src/pdf/SkBitSet.cpp Executable file → Normal file
View File

0
gfx/skia/src/pdf/SkPDFFontImpl.h Executable file → Normal file
View File

0
gfx/skia/src/ports/SkFontHost_mac.cpp Executable file → Normal file
View File

0
gfx/skia/src/ports/SkFontHost_win.cpp Executable file → Normal file
View File

View File

@ -41,6 +41,7 @@
#include "jscompartment.h"
#include "jsfriendapi.h"
#include "jswrapper.h"
#include "jsweakmap.h"
#include "jsobjinlines.h"
@ -223,6 +224,12 @@ JS_GetCustomIteratorCount(JSContext *cx)
return sCustomIteratorCount;
}
void
js::TraceWeakMaps(WeakMapTracer *trc)
{
WeakMapBase::traceAllMappings(trc);
}
JS_FRIEND_API(void)
JS_SetAccumulateTelemetryCallback(JSRuntime *rt, JSAccumulateTelemetryDataCallback callback)
{

View File

@ -205,6 +205,30 @@ JS_FRIEND_API(JSBool) obj_defineSetter(JSContext *cx, uintN argc, js::Value *vp)
extern JS_FRIEND_API(bool)
CheckUndeclaredVarAssignment(JSContext *cx, JSString *propname);
struct WeakMapTracer;
/*
* Weak map tracer callback, called once for every binding of every
* weak map that was live at the time of the last garbage collection.
*
* m will be NULL if the weak map is not contained in a JS Object.
*/
typedef void
(* WeakMapTraceCallback)(WeakMapTracer *trc, JSObject *m,
void *k, JSGCTraceKind kkind,
void *v, JSGCTraceKind vkind);
struct WeakMapTracer {
JSContext *context;
WeakMapTraceCallback callback;
WeakMapTracer(JSContext *cx, WeakMapTraceCallback cb)
: context(cx), callback(cb) {}
};
extern JS_FRIEND_API(void)
TraceWeakMaps(WeakMapTracer *trc);
/*
* Shadow declarations of JS internal structures, for access by inline access
* functions below. Do not use these structures in any other way. When adding

View File

@ -2034,6 +2034,18 @@ MarkContext(JSTracer *trc, JSContext *acx)
MarkRoot(trc, acx->iterValue, "iterValue");
}
void
MarkWeakReferences(GCMarker *gcmarker)
{
JS_ASSERT(gcmarker->isMarkStackEmpty());
while (WatchpointMap::markAllIteratively(gcmarker) ||
WeakMapBase::markAllIteratively(gcmarker) ||
Debugger::markAllIteratively(gcmarker)) {
gcmarker->drainMarkStack();
}
JS_ASSERT(gcmarker->isMarkStackEmpty());
}
JS_REQUIRES_STACK void
MarkRuntime(JSTracer *trc)
{
@ -2514,22 +2526,17 @@ EndMarkPhase(JSContext *cx, GCMarker *gcmarker, JSGCInvocationKind gckind)
{
JSRuntime *rt = cx->runtime;
gcmarker->setMarkColor(GRAY);
if (JSTraceDataOp op = rt->gcGrayRootsTraceOp)
(*op)(gcmarker, rt->gcGrayRootsData);
gcmarker->drainMarkStack();
gcmarker->setMarkColor(BLACK);
JS_ASSERT(gcmarker->isMarkStackEmpty());
MarkWeakReferences(gcmarker);
/*
* Mark weak roots.
*/
while (WatchpointMap::markAllIteratively(gcmarker) ||
WeakMapBase::markAllIteratively(gcmarker) ||
Debugger::markAllIteratively(gcmarker))
{
if (JSTraceDataOp op = rt->gcGrayRootsTraceOp) {
gcmarker->setMarkColorGray();
(*op)(gcmarker, rt->gcGrayRootsData);
gcmarker->drainMarkStack();
MarkWeakReferences(gcmarker);
}
JS_ASSERT(gcmarker->isMarkStackEmpty());
rt->gcIncrementalTracer = NULL;
rt->gcStats.endPhase(gcstats::PHASE_MARK);
@ -2673,6 +2680,9 @@ MarkAndSweep(JSContext *cx, JSGCInvocationKind gckind)
rt->gcIsNeeded = false;
rt->gcTriggerCompartment = NULL;
/* Reset weak map list. */
rt->gcWeakMapList = NULL;
/* Reset malloc counter. */
rt->resetGCMallocBytes();
@ -2935,7 +2945,6 @@ GCCycle(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind)
rt->gcRegenShapes = false;
rt->setGCLastBytes(rt->gcBytes, gckind);
rt->gcCurrentCompartment = NULL;
rt->gcWeakMapList = NULL;
for (CompartmentsIter c(rt); !c.done(); c.next())
c->setGCLastBytes(c->gcBytes, gckind);

View File

@ -1678,12 +1678,10 @@ struct GCMarker : public JSTracer {
* edges in the GC heap. This invariant lets the CC not trace through black
* objects. If this invariant is violated, the cycle collector may free
* objects that are still reachable.
*
* We don't assert this yet, but we should.
*/
void setMarkColor(uint32 newColor) {
//JS_ASSERT(color == BLACK && newColor == GRAY);
color = newColor;
void setMarkColorGray() {
JS_ASSERT(color == gc::BLACK);
color = gc::GRAY;
}
void delayMarkingChildren(const void *thing);

View File

@ -78,6 +78,14 @@ WeakMapBase::sweepAll(JSTracer *tracer)
m->sweep(tracer);
}
void
WeakMapBase::traceAllMappings(WeakMapTracer *tracer)
{
JSRuntime *rt = tracer->context->runtime;
for (WeakMapBase *m = rt->gcWeakMapList; m; m = m->next)
m->traceMappings(tracer);
}
} /* namespace js */
typedef WeakMap<HeapPtr<JSObject>, HeapValue> ObjectValueMap;
@ -215,7 +223,7 @@ WeakMap_set(JSContext *cx, uintN argc, Value *vp)
ObjectValueMap *map = GetObjectMap(obj);
if (!map) {
map = cx->new_<ObjectValueMap>(cx);
map = cx->new_<ObjectValueMap>(cx, obj);
if (!map->init()) {
cx->delete_(map);
goto out_of_memory;

View File

@ -43,6 +43,7 @@
#define jsweakmap_h___
#include "jsapi.h"
#include "jsfriendapi.h"
#include "jscntxt.h"
#include "jsobj.h"
#include "jsgcmark.h"
@ -101,11 +102,15 @@ namespace js {
// provides default types for WeakMap's MarkPolicy template parameter.
template <class Type> class DefaultMarkPolicy;
// A policy template holding default tracing algorithms for common type combinations. This
// provides default types for WeakMap's TracePolicy template parameter.
template <class Key, class Value> class DefaultTracePolicy;
// Common base class for all WeakMap specializations. The collector uses this to call
// their markIteratively and sweep methods.
class WeakMapBase {
public:
WeakMapBase() : next(NULL) { }
WeakMapBase(JSObject *memOf) : memberOf(memOf), next(NULL) { }
virtual ~WeakMapBase() { }
void trace(JSTracer *tracer) {
@ -121,8 +126,8 @@ class WeakMapBase {
} else {
// If we're not actually doing garbage collection, the keys won't be marked
// nicely as needed by the true ephemeral marking algorithm --- custom tracers
// must use their own means for cycle detection. So here we do a conservative
// approximation: pretend all keys are live.
// such as the cycle collector must use their own means for cycle detection.
// So here we do a conservative approximation: pretend all keys are live.
if (tracer->eagerlyTraceWeakMaps)
nonMarkingTrace(tracer);
}
@ -140,12 +145,19 @@ class WeakMapBase {
// garbage collection.
static void sweepAll(JSTracer *tracer);
// Trace all delayed weak map bindings. Used by the cycle collector.
static void traceAllMappings(WeakMapTracer *tracer);
protected:
// Instance member functions called by the above. Instantiations of WeakMap override
// these with definitions appropriate for their Key and Value types.
virtual void nonMarkingTrace(JSTracer *tracer) = 0;
virtual bool markIteratively(JSTracer *tracer) = 0;
virtual void sweep(JSTracer *tracer) = 0;
virtual void traceMappings(WeakMapTracer *tracer) = 0;
// Object that this weak map is part of, if any.
JSObject *memberOf;
private:
// Link in a list of WeakMaps to mark iteratively and sweep in this garbage
@ -156,7 +168,8 @@ class WeakMapBase {
template <class Key, class Value,
class HashPolicy = DefaultHasher<Key>,
class KeyMarkPolicy = DefaultMarkPolicy<Key>,
class ValueMarkPolicy = DefaultMarkPolicy<Value> >
class ValueMarkPolicy = DefaultMarkPolicy<Value>,
class TracePolicy = DefaultTracePolicy<Key, Value> >
class WeakMap : public HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy>, public WeakMapBase {
private:
typedef HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy> Base;
@ -165,8 +178,8 @@ class WeakMap : public HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy>, publ
public:
typedef typename Base::Range Range;
explicit WeakMap(JSRuntime *rt) : Base(rt) { }
explicit WeakMap(JSContext *cx) : Base(cx) { }
explicit WeakMap(JSRuntime *rt, JSObject *memOf=NULL) : Base(rt), WeakMapBase(memOf) { }
explicit WeakMap(JSContext *cx, JSObject *memOf=NULL) : Base(cx), WeakMapBase(memOf) { }
// Use with caution, as result can be affected by garbage collection.
Range nondeterministicAll() {
@ -191,9 +204,8 @@ class WeakMap : public HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy>, publ
if (kp.isMarked(k)) {
markedAny |= vp.mark(v);
} else if (kp.overrideKeyMarking(k)) {
// We always mark wrapped natives. This will cause leaks, but WeakMap+CC
// integration is currently busted anyways. When WeakMap+CC integration is
// fixed in Bug 668855, XPC wrapped natives should only be marked during
// We always mark wrapped natives. This will cause leaks. Bug 680937
// will fix this so XPC wrapped natives are only marked during
// non-BLACK marking (ie grey marking).
kp.mark(k);
vp.mark(v);
@ -225,6 +237,13 @@ class WeakMap : public HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy>, publ
}
#endif
}
// mapObj can be NULL, which means that the map is not part of a JSObject.
void traceMappings(WeakMapTracer *tracer) {
TracePolicy t(tracer);
for (Range r = Base::all(); !r.empty(); r.popFront())
t.traceMapping(memberOf, r.front().key, r.front().value);
}
};
template <>
@ -289,6 +308,42 @@ class DefaultMarkPolicy<HeapPtrScript> {
bool overrideKeyMarking(const HeapPtrScript &k) { return false; }
};
// Default trace policies
template <>
class DefaultTracePolicy<HeapPtrObject, HeapValue> {
private:
WeakMapTracer *tracer;
public:
DefaultTracePolicy(WeakMapTracer *t) : tracer(t) { }
void traceMapping(JSObject *m, const HeapPtr<JSObject> &k, HeapValue &v) {
if (v.isMarkable())
tracer->callback(tracer, m, k.get(), JSTRACE_OBJECT, v.toGCThing(), v.gcKind());
}
};
template <>
class DefaultTracePolicy<HeapPtrObject, HeapPtrObject> {
private:
WeakMapTracer *tracer;
public:
DefaultTracePolicy(WeakMapTracer *t) : tracer(t) { }
void traceMapping(JSObject *m, const HeapPtrObject &k, const HeapPtrObject &v) {
tracer->callback(tracer, m, k.get(), JSTRACE_OBJECT, v.get(), JSTRACE_OBJECT);
}
};
template <>
class DefaultTracePolicy<HeapPtrScript, HeapPtrObject> {
private:
WeakMapTracer *tracer;
public:
DefaultTracePolicy(WeakMapTracer *t) : tracer(t) { }
void traceMapping(JSObject *m, const HeapPtrScript &k, const HeapPtrObject &v) {
tracer->callback(tracer, m, k.get(), JSTRACE_SCRIPT, v.get(), JSTRACE_OBJECT);
}
};
}
extern JSObject *

View File

@ -449,6 +449,80 @@ NoteJSRoot(JSTracer *trc, void *thing, JSGCTraceKind kind)
}
#endif
struct NoteWeakMapChildrenTracer : public JSTracer
{
NoteWeakMapChildrenTracer(nsCycleCollectionTraversalCallback &cb)
: mCb(cb)
{
}
nsCycleCollectionTraversalCallback &mCb;
JSObject *mMap;
void *mKey;
};
static void
TraceWeakMappingChild(JSTracer *trc, void *thing, JSGCTraceKind kind)
{
JS_ASSERT(trc->callback == TraceWeakMappingChild);
NoteWeakMapChildrenTracer *tracer =
static_cast<NoteWeakMapChildrenTracer *>(trc);
if (kind == JSTRACE_STRING)
return;
if (!xpc_IsGrayGCThing(thing) && !tracer->mCb.WantAllTraces())
return;
if (AddToCCKind(kind)) {
tracer->mCb.NoteWeakMapping(tracer->mMap, tracer->mKey, thing);
} else {
JS_TraceChildren(trc, thing, kind);
}
}
struct NoteWeakMapsTracer : public js::WeakMapTracer
{
NoteWeakMapsTracer(JSContext *cx, js::WeakMapTraceCallback cb,
nsCycleCollectionTraversalCallback &cccb)
: js::WeakMapTracer(cx, cb), mCb(cccb), mChildTracer(cccb)
{
JS_TRACER_INIT(&mChildTracer, cx, TraceWeakMappingChild);
}
nsCycleCollectionTraversalCallback &mCb;
NoteWeakMapChildrenTracer mChildTracer;
};
static void
TraceWeakMapping(js::WeakMapTracer *trc, JSObject *m,
void *k, JSGCTraceKind kkind,
void *v, JSGCTraceKind vkind)
{
JS_ASSERT(trc->callback == TraceWeakMapping);
NoteWeakMapsTracer *tracer = static_cast<NoteWeakMapsTracer *>(trc);
if (vkind == JSTRACE_STRING)
return;
if (!xpc_IsGrayGCThing(v) && !tracer->mCb.WantAllTraces())
return;
// The cycle collector can only properly reason about weak maps if it can
// reason about the liveness of their keys, which in turn requires that
// the key can be represented in the cycle collector graph. All existing
// uses of weak maps use either objects or scripts as keys, which are okay.
JS_ASSERT(AddToCCKind(kkind));
// As an emergency fallback for non-debug builds, if the key is not
// representable in the cycle collector graph, we treat it as marked. This
// can cause leaks, but is preferable to ignoring the binding, which could
// cause the cycle collector to free live objects.
if (!AddToCCKind(kkind))
k = nsnull;
if (AddToCCKind(vkind)) {
tracer->mCb.NoteWeakMapping(m, k, v);
} else {
tracer->mChildTracer.mMap = m;
tracer->mChildTracer.mKey = k;
JS_TraceChildren(&tracer->mChildTracer, v, vkind);
}
}
nsresult
nsXPConnect::BeginCycleCollection(nsCycleCollectionTraversalCallback &cb,
bool explainLiveExpectedGarbage)
@ -509,6 +583,10 @@ nsXPConnect::BeginCycleCollection(nsCycleCollectionTraversalCallback &cb,
#endif
GetRuntime()->AddXPConnectRoots(mCycleCollectionContext->GetJSContext(), cb);
NoteWeakMapsTracer trc(mCycleCollectionContext->GetJSContext(),
TraceWeakMapping, cb);
js::TraceWeakMaps(&trc);
return NS_OK;
}
@ -851,10 +929,7 @@ nsXPConnect::Traverse(void *p, nsCycleCollectionTraversalCallback &cb)
TraversalTracer trc(cb);
JS_TRACER_INIT(&trc, cx, NoteJSChild);
// When WeakMaps are properly integrated with the cycle
// collector in Bug 668855, don't eagerly trace weak maps when
// building the cycle collector graph.
// trc.eagerlyTraceWeakMaps = JS_FALSE;
trc.eagerlyTraceWeakMaps = JS_FALSE;
JS_TraceChildren(&trc, p, traceKind);
if (traceKind != JSTRACE_OBJECT || dontTraverse)

View File

@ -73,6 +73,7 @@ _CHROME_FILES = \
test_precisegc.xul \
test_nodelists.xul \
test_getweakmapkeys.xul \
test_weakmaps.xul \
$(NULL)
# Disabled until this test gets updated to test the new proxy based

View File

@ -0,0 +1,237 @@
<?xml version="1.0"?>
<?xml-stylesheet type="text/css" href="chrome://global/skin"?>
<?xml-stylesheet type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css"?>
<!--
https://bugzilla.mozilla.org/show_bug.cgi?id=668855
-->
<window title="Mozilla Bug "
xmlns="http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul">
<script type="application/javascript" src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"/>
<!-- test results are displayed in the html:body -->
<body xmlns="http://www.w3.org/1999/xhtml">
<a href="https://bugzilla.mozilla.org/show_bug.cgi?id="
target="_blank">Mozilla Bug 668855</a>
</body>
<!-- test code goes here -->
<script type="application/javascript">
<![CDATA[
/** Test for Bug 668855 **/
let Cu = Components.utils;
let Ci = Components.interfaces;
/* Create a weak reference, with a single-element weak map. */
let make_weak_ref = function (obj) {
let m = new WeakMap;
m.set(obj, {});
return m;
};
/* Check to see if a weak reference is dead. */
let weak_ref_dead = function (r) {
return Cu.nondeterministicGetWeakMapKeys(r).length == 0;
}
/* Deterministically grab an arbitrary DOM element. */
let get_live_dom = function () {
let elems = document.getElementsByTagName("a");
return elems[0];
};
/* Test case from bug 653248, adapted into a standard test.
This is a dead cycle involving a DOM edge, so the cycle collector can free it. Keys and
values reachable only from XPConnect must be marked gray for this to work, and the cycle collector
must know the proper structure of the heap.
*/
let make_gray_loop = function () {
let map = new WeakMap;
let div = document.createElement("div");
let key = {};
div.setUserData("entrain", {m:map, k:key}, null);
//div.entrain = {m:map, k:key}; This is not sufficient to cause a leak in Fx9
map.set(key, div);
return make_weak_ref(map);
};
let weakref = make_gray_loop();
/* Weak map entries where the key is a dead XPCWrappedNative key should be removed.
map2 is a weak map that is black, so its entries will be visited during the
black marking phase. We then use a new div element as key. The div element is dead
after dead_xpc_key returns, so the weak map entry should be removed.
The simple wrapper deoptimization for XPCWrappedNative weak map keys implemented in
Bug 655297 will end up marking the key black, keeping the entry from being collected.
*/
let map2 = new WeakMap;
let dead_xpc_key = function () {
let div = document.createElement("div");
map2.set(div, 0);
};
dead_xpc_key();
/* Combinations of live and dead gray maps/keys. */
let basic_weak_ref = null;
let basic_map_weak_ref = null;
let black_map = new WeakMap;
let black_key = {};
let basic_unit_tests = function () {
let live_dom = get_live_dom();
let dead_dom = document.createElement("div");
let live_map = new WeakMap;
let dead_map = new WeakMap;
let live_key = {};
let dead_key = {};
// put the live/dead maps/keys into the appropriate DOM elements
live_dom.basic_unit_tests = {m:live_map, k:live_key};
dead_dom.setUserData("hook", {m:dead_map, k:dead_key}, null);
// dead_dom.hook = {m:dead_map, k:dead_key};
// Create a dead value, and a weak ref to it.
// The loop keeps dead_dom alive unless the CC is smart enough to kill it.
let dead_val = {loop:dead_dom};
basic_weak_ref = make_weak_ref(dead_val);
basic_map_weak_ref = make_weak_ref(dead_map);
// set up the actual entries. most will die.
live_map.set(live_key, {my_key:'live_live'});
live_map.set(dead_key, dead_val);
live_map.set(black_key, {my_key:'live_black'});
dead_map.set(live_key, dead_val);
dead_map.set(dead_key, dead_val);
dead_map.set(black_key, dead_val);
black_map.set(live_key, {my_key:'black_live'});
black_map.set(dead_key, dead_val);
black_map.set(black_key, {my_key:'black_black'});
};
basic_unit_tests();
let check_basic_unit = function () {
let live_dom = get_live_dom();
let live_map = live_dom.basic_unit_tests.m;
let live_key = live_dom.basic_unit_tests.k;
// check the dead elements
ok(weak_ref_dead(basic_weak_ref), "Dead value was kept alive.");
ok(weak_ref_dead(basic_map_weak_ref), "Dead map was kept alive.");
// check the live gray map
is(live_map.get(live_key).my_key, 'live_live',
"Live key should have the same value in live map.");
is(live_map.get(black_key).my_key, 'live_black',
"Black key should have the same value in live map.");
is(Cu.nondeterministicGetWeakMapKeys(live_map).length, 2,
"Live map should have two entries.");
// check the live black map
is(black_map.get(live_key).my_key, 'black_live',
"Live key should have the same value in black map.");
is(black_map.get(black_key).my_key, 'black_black',
"Black key should have the same value in black map.");
is(Cu.nondeterministicGetWeakMapKeys(black_map).length, 2,
"Black map should have two entries.");
};
/* live gray chained weak map entries, involving the cycle collector. */
let chainm = new WeakMap;
let num_chains = 5;
let nested_cc_maps = function () {
let dom = get_live_dom();
for(let i = 0; i < num_chains; i++) {
let k = {count:i};
dom.key = k;
dom0 = document.createElement("div");
chainm.set(k, {d:dom0});
dom = document.createElement("div");
dom0.appendChild(dom);
};
};
let check_nested_cc_maps = function () {
let dom = get_live_dom();
let all_ok = true;
for(let i = 0; i < num_chains; i++) {
let k = dom.key;
all_ok = all_ok && k.count == i;
dom = chainm.get(k).d.firstChild;
};
ok(all_ok, "Count was invalid on a key in chained weak map entries.");
};
nested_cc_maps();
/* black weak map, chained garbage cycle involving DOM */
let garbage_map = new WeakMap;
let chained_garbage_maps = function () {
let dom0 = document.createElement("div");
let dom = dom0;
for(let i = 0; i < num_chains; i++) {
let k = {};
dom.key = k;
let new_dom = document.createElement("div");
garbage_map.set(k, {val_child:new_dom});
dom = document.createElement("div");
new_dom.appendChild(dom);
};
// tie the knot
dom.appendChild(dom0);
};
chained_garbage_maps();
/* set up for running precise GC/CC then checking the results */
SimpleTest.waitForExplicitFinish();
Cu.schedulePreciseGC(function () {
window.QueryInterface(Ci.nsIInterfaceRequestor)
.getInterface(Ci.nsIDOMWindowUtils)
.cycleCollect();
window.QueryInterface(Ci.nsIInterfaceRequestor)
.getInterface(Ci.nsIDOMWindowUtils)
.garbageCollect();
window.QueryInterface(Ci.nsIInterfaceRequestor)
.getInterface(Ci.nsIDOMWindowUtils)
.garbageCollect();
ok(weak_ref_dead(weakref), "Garbage gray cycle should be collected.");
// this will fail without the XPCwrapped native key fix (Bug 680937)
todo_is(Cu.nondeterministicGetWeakMapKeys(map2).length, 0, "Dead XPCWrappedNative keys should be collected.");
check_nested_cc_maps();
is(Cu.nondeterministicGetWeakMapKeys(garbage_map).length, 0, "Chained garbage weak map entries should not leak.");
check_basic_unit();
SimpleTest.finish();
});
]]>
</script>
</window>

View File

@ -0,0 +1,64 @@
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is mozilla.org code.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 2011
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Dave Hyatt <hyatt@mozilla.org> (Original Author)
* Jan Varga <varga@ku.sk>
* Scott Johnson <sjohnson@mozilla.com>, Mozilla Corporation
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#ifndef nsITreeImageListener_h__
#define nsITreeImageListener_h__
// The interface for our image listener.
// {90586540-2D50-403e-8DCE-981CAA778444}
#define NS_ITREEIMAGELISTENER_IID \
{ 0x90586540, 0x2d50, 0x403e, { 0x8d, 0xce, 0x98, 0x1c, 0xaa, 0x77, 0x84, 0x44 } }
class nsITreeImageListener : public nsISupports
{
public:
NS_DECLARE_STATIC_IID_ACCESSOR(NS_ITREEIMAGELISTENER_IID)
NS_IMETHOD AddCell(PRInt32 aIndex, nsITreeColumn* aCol) = 0;
/**
* Clear the internal frame pointer to prevent dereferencing an object
* that no longer exists.
*/
NS_IMETHOD ClearFrame() = 0;
};
NS_DEFINE_STATIC_IID_ACCESSOR(nsITreeImageListener, NS_ITREEIMAGELISTENER_IID)
#endif

View File

@ -2157,11 +2157,9 @@ nsTreeBodyFrame::GetImage(PRInt32 aRowIndex, nsTreeColumn* aCol, bool aUseContex
// We either aren't done loading, or we're animating. Add our row as a listener for invalidations.
nsCOMPtr<imgIDecoderObserver> obs;
imgReq->GetDecoderObserver(getter_AddRefs(obs));
if (obs) {
static_cast<nsTreeImageListener*> (obs.get())->AddCell(aRowIndex, aCol);
}
nsCOMPtr<nsITreeImageListener> listener(do_QueryInterface(obs));
if (listener)
listener->AddCell(aRowIndex, aCol);
return NS_OK;
}
}

View File

@ -63,6 +63,7 @@
#include "nsScrollbarFrame.h"
#include "nsThreadUtils.h"
#include "mozilla/LookAndFeel.h"
#include "nsITreeImageListener.h"
class nsOverflowChecker;
class nsTreeImageListener;

View File

@ -42,7 +42,7 @@
#include "imgIRequest.h"
#include "imgIContainer.h"
NS_IMPL_ISUPPORTS2(nsTreeImageListener, imgIDecoderObserver, imgIContainerObserver)
NS_IMPL_ISUPPORTS3(nsTreeImageListener, imgIDecoderObserver, imgIContainerObserver, nsITreeImageListener)
nsTreeImageListener::nsTreeImageListener(nsTreeBodyFrame* aTreeFrame)
: mTreeFrame(aTreeFrame),
@ -92,7 +92,7 @@ NS_IMETHODIMP nsTreeImageListener::FrameChanged(imgIContainer *aContainer,
}
void
NS_IMETHODIMP
nsTreeImageListener::AddCell(PRInt32 aIndex, nsITreeColumn* aCol)
{
if (!mInvalidationArea) {
@ -114,6 +114,8 @@ nsTreeImageListener::AddCell(PRInt32 aIndex, nsITreeColumn* aCol)
mInvalidationArea->AddRow(aIndex);
}
}
return NS_OK;
}

View File

@ -45,9 +45,10 @@
#include "nsITreeColumns.h"
#include "nsStubImageDecoderObserver.h"
#include "nsTreeBodyFrame.h"
#include "nsITreeImageListener.h"
// This class handles image load observation.
class nsTreeImageListener : public nsStubImageDecoderObserver
class nsTreeImageListener : public nsStubImageDecoderObserver, public nsITreeImageListener
{
public:
nsTreeImageListener(nsTreeBodyFrame *aTreeFrame);
@ -63,6 +64,7 @@ public:
NS_IMETHOD FrameChanged(imgIContainer *aContainer,
const nsIntRect *aDirtyRect);
NS_IMETHOD AddCell(PRInt32 aIndex, nsITreeColumn* aCol);
NS_IMETHOD ClearFrame();
friend class nsTreeBodyFrame;
@ -70,7 +72,6 @@ public:
protected:
void UnsuppressInvalidation() { mInvalidationSuppressed = false; }
void Invalidate();
void AddCell(PRInt32 aIndex, nsITreeColumn* aCol);
private:
nsTreeBodyFrame* mTreeFrame;

View File

@ -47,10 +47,11 @@
#else
#define ALOG(args...)
#endif
#endif
#endif
/* Android implementation based on sydney_audio_mac.c */
#define NANOSECONDS_PER_SECOND 1000000000
#define NANOSECONDS_IN_MILLISECOND 1000000
#define MILLISECONDS_PER_SECOND 1000
@ -98,6 +99,8 @@ enum AudioFormatEncoding {
struct sa_stream {
jobject output_unit;
jbyteArray output_buf;
unsigned int output_buf_size;
unsigned int rate;
unsigned int channels;
@ -107,7 +110,6 @@ struct sa_stream {
int64_t timePlaying;
int64_t amountWritten;
unsigned int bufferSize;
unsigned int minBufferSize;
jclass at_class;
};
@ -178,6 +180,8 @@ sa_stream_create_pcm(
}
s->output_unit = NULL;
s->output_buf = NULL;
s->output_buf_size = 0;
s->rate = rate;
s->channels = channels;
s->isPaused = 0;
@ -187,7 +191,6 @@ sa_stream_create_pcm(
s->amountWritten = 0;
s->bufferSize = 0;
s->minBufferSize = 0;
*_s = s;
return SA_SUCCESS;
@ -226,11 +229,9 @@ sa_stream_open(sa_stream_t *s) {
return SA_ERROR_INVALID;
}
s->minBufferSize = minsz;
s->bufferSize = s->rate * s->channels * sizeof(int16_t);
if (s->bufferSize < s->minBufferSize) {
s->bufferSize = s->minBufferSize;
if (s->bufferSize < minsz) {
s->bufferSize = minsz;
}
jobject obj =
@ -258,9 +259,26 @@ sa_stream_open(sa_stream_t *s) {
}
s->output_unit = (*jenv)->NewGlobalRef(jenv, obj);
/* arbitrary buffer size. using a preallocated buffer avoids churning
the GC every audio write. */
s->output_buf_size = 4096 * s->channels * sizeof(int16_t);
jbyteArray buf = (*jenv)->NewByteArray(jenv, s->output_buf_size);
if (!buf) {
(*jenv)->ExceptionClear(jenv);
(*jenv)->DeleteGlobalRef(jenv, s->output_unit);
(*jenv)->DeleteGlobalRef(jenv, s->at_class);
(*jenv)->PopLocalFrame(jenv, NULL);
return SA_ERROR_OOM;
}
s->output_buf = (*jenv)->NewGlobalRef(jenv, buf);
(*jenv)->PopLocalFrame(jenv, NULL);
ALOG("%p - New stream %u %u bsz=%u min=%u", s, s->rate, s->channels, s->bufferSize, s->minBufferSize);
ALOG("%p - New stream %u %u bsz=%u min=%u obsz=%u", s, s->rate, s->channels,
s->bufferSize, minsz, s->output_buf_size);
return SA_SUCCESS;
}
@ -280,6 +298,7 @@ sa_stream_destroy(sa_stream_t *s) {
(*jenv)->CallVoidMethod(jenv, s->output_unit, at.stop);
(*jenv)->CallVoidMethod(jenv, s->output_unit, at.flush);
(*jenv)->CallVoidMethod(jenv, s->output_unit, at.release);
(*jenv)->DeleteGlobalRef(jenv, s->output_buf);
(*jenv)->DeleteGlobalRef(jenv, s->output_unit);
(*jenv)->DeleteGlobalRef(jenv, s->at_class);
free(s);
@ -309,51 +328,46 @@ sa_stream_write(sa_stream_t *s, const void *data, size_t nbytes) {
return SA_ERROR_OOM;
}
jbyteArray bytearray = (*jenv)->NewByteArray(jenv, nbytes);
if (!bytearray) {
(*jenv)->ExceptionClear(jenv);
(*jenv)->PopLocalFrame(jenv, NULL);
return SA_ERROR_OOM;
}
(*jenv)->SetByteArrayRegion(jenv, bytearray, 0, nbytes, data);
size_t wroteSoFar = 0;
jint retval;
int first = 1;
unsigned char *p = data;
jint r = 0;
size_t wrote = 0;
do {
retval = (*jenv)->CallIntMethod(jenv,
s->output_unit,
at.write,
bytearray,
wroteSoFar,
nbytes - wroteSoFar);
if (retval < 0) {
ALOG("%p - Write failed %d", s, retval);
size_t towrite = nbytes - wrote;
if (towrite > s->output_buf_size) {
towrite = s->output_buf_size;
}
(*jenv)->SetByteArrayRegion(jenv, s->output_buf, 0, towrite, p);
r = (*jenv)->CallIntMethod(jenv,
s->output_unit,
at.write,
s->output_buf,
0,
towrite);
if (r < 0) {
ALOG("%p - Write failed %d", s, r);
break;
}
wroteSoFar += retval;
/* android doesn't start playing until we explictly call play. */
if (first && !s->isPaused) {
/* AudioTrack::write is blocking when the AudioTrack is playing. When
it's not playing, it's a non-blocking call that will return a short
write when the buffer is full. Use a short write to indicate a good
time to start the AudioTrack playing. */
if (r != towrite) {
ALOG("%p - Buffer full, starting playback", s);
sa_stream_resume(s);
first = 0;
}
if (wroteSoFar != nbytes) {
struct timespec ts = {0, 100000000}; /* .10s */
nanosleep(&ts, NULL);
}
} while(wroteSoFar < nbytes);
p += r;
wrote += r;
} while (wrote < nbytes);
ALOG("%p - Wrote %u", s, nbytes);
s->amountWritten += nbytes;
(*jenv)->PopLocalFrame(jenv, NULL);
return retval < 0 ? SA_ERROR_INVALID : SA_SUCCESS;
return r < 0 ? SA_ERROR_INVALID : SA_SUCCESS;
}
@ -371,10 +385,14 @@ sa_stream_get_write_size(sa_stream_t *s, size_t *size) {
}
/* No android API for this, so estimate based on how much we have played and
* how much we have written.
*/
* how much we have written. */
*size = s->bufferSize - ((s->timePlaying * s->channels * s->rate * sizeof(int16_t) /
MILLISECONDS_PER_SECOND) - s->amountWritten);
/* Available buffer space can't exceed bufferSize. */
if (*size > s->bufferSize) {
*size = s->bufferSize;
}
ALOG("%p - Write Size tp=%lld aw=%u sz=%zu", s, s->timePlaying, s->amountWritten, *size);
return SA_SUCCESS;
@ -460,23 +478,24 @@ sa_stream_drain(sa_stream_t *s)
doesn't make it clear how much data must be written before a chunk of data is
played, and experimentation with short streams required filling the entire
allocated buffer. To guarantee that short streams (and the end of longer
streams) are audible, fill the remaining space in the AudioTrack with silence
before sleeping. Note that the sleep duration is calculated from the
duration of audio written before filling the buffer with silence. */
streams) are audible, write an entire bufferSize of silence before sleeping.
This guarantees the short write logic in sa_stream_write is hit and the
stream is playing before sleeping. Note that the sleep duration is
calculated from the duration of audio written before writing silence. */
size_t available;
sa_stream_get_write_size(s, &available);
void *p = calloc(1, available);
sa_stream_write(s, p, available);
void *p = calloc(1, s->bufferSize);
sa_stream_write(s, p, s->bufferSize);
free(p);
/* There is no way with the Android SDK to determine exactly how
long to playback. So estimate and sleep for the long. */
long x = (s->bufferSize - available) * 1000 / s->channels / s->rate /
sizeof(int16_t) * NANOSECONDS_IN_MILLISECOND;
ALOG("%p - Drain - flush %u, sleep for %ld ns", s, available, x);
long to playback. So estimate and sleep for that long. */
unsigned long long x = (s->bufferSize - available) * 1000 / s->channels / s->rate /
sizeof(int16_t) * NANOSECONDS_IN_MILLISECOND;
ALOG("%p - Drain - flush %u, sleep for %llu ns", s, available, x);
struct timespec ts = {0, x};
struct timespec ts = {x / NANOSECONDS_PER_SECOND, x % NANOSECONDS_PER_SECOND};
nanosleep(&ts, NULL);
return SA_SUCCESS;

View File

@ -570,7 +570,7 @@ Connection::initialize(nsIFile *aDatabaseFile,
srv = prepareStmt(mDBConn, NS_LITERAL_CSTRING("PRAGMA page_size"), &stmt);
if (srv == SQLITE_OK) {
if (SQLITE_ROW == stepStmt(stmt)) {
PRInt64 pageSize = ::sqlite3_column_int64(stmt, 0);
pageSize = ::sqlite3_column_int64(stmt, 0);
}
(void)::sqlite3_finalize(stmt);
}

View File

@ -0,0 +1,79 @@
/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
// This file tests that dbs of various page sizes are using the right cache
// size (bug 703113).
/**
* In order to change the cache size, we must open a DB, change the page
* size, create a table, close the DB, then re-open the DB. We then check
* the cache size after reopening.
*
* @param dbOpener
* function that opens the DB specified in file
* @param file
* file holding the database
* @param pageSize
* the DB's page size
* @param expectedCacheSize
* the expected cache size for the given page size
*/
function check_size(dbOpener, file, pageSize, expectedCacheSize)
{
// Open the DB, immediately change its page size.
let db = dbOpener(file);
db.executeSimpleSQL("PRAGMA page_size = " + pageSize);
// Check the page size change worked.
let stmt = db.createStatement("PRAGMA page_size");
do_check_true(stmt.executeStep());
do_check_eq(stmt.row.page_size, pageSize);
stmt.finalize();
// Create a simple table.
db.executeSimpleSQL("CREATE TABLE test ( id INTEGER PRIMARY KEY )");
// Close and re-open the DB.
db.close();
db = dbOpener(file);
// Check cache size is as expected.
let stmt = db.createStatement("PRAGMA cache_size");
do_check_true(stmt.executeStep());
do_check_eq(stmt.row.cache_size, expectedCacheSize);
stmt.finalize();
}
function new_file(name)
{
let file = dirSvc.get("ProfD", Ci.nsIFile);
file.append(name + ".sqlite");
do_check_false(file.exists());
return file;
}
function run_test()
{
// This is copied from the logic in Connection::initialize().
function cacheSize(pageSize) {
const DEFAULT_CACHE_SIZE_PAGES = 2000;
const MAX_CACHE_SIZE_BYTES = 4 * 1024 * 1024;
return Math.min(DEFAULT_CACHE_SIZE_PAGES, MAX_CACHE_SIZE_BYTES / pageSize);
}
let pageSizes = [
1024,
4096,
32768,
];
for (let i = 0; i < pageSizes.length; i++) {
let pageSize = pageSizes[i];
let expectedCacheSize = cacheSize(pageSize);
check_size(getDatabase,
new_file("shared" + pageSize), pageSize, expectedCacheSize);
check_size(getService().openUnsharedDatabase,
new_file("unshared" + pageSize), pageSize, expectedCacheSize);
}
}

View File

@ -29,7 +29,7 @@ function new_file(name)
function run_test()
{
check_size(getDatabase(new_file("shared32k.sqlite")));
check_size(getService().openUnsharedDatabase(new_file("unshared32k.sqlite")));
check_size(getDatabase(new_file("shared32k")));
check_size(getService().openUnsharedDatabase(new_file("unshared32k")));
}

View File

@ -67,7 +67,7 @@ function run_test()
do_test_pending();
// Change initial page size. Do it immediately since it would require an
// additional vacuum op to do it later. As a bonux this makes the page size
// additional vacuum op to do it later. As a bonus this makes the page size
// change test really fast since it only has to check results.
let conn = getDatabase(new_db_file("testVacuum"));
conn.executeSimpleSQL("PRAGMA page_size = 1024");

View File

@ -6,6 +6,7 @@ tail =
[test_bug-393952.js]
[test_bug-429521.js]
[test_bug-444233.js]
[test_cache_size.js]
[test_chunk_growth.js]
# Bug 676981: test fails consistently on Android
fail-if = os == "android"

View File

@ -23,8 +23,8 @@ mochikit.jar:
content/tests/SimpleTest/test.css (tests/SimpleTest/test.css)
content/tests/SimpleTest/TestRunner.js (tests/SimpleTest/TestRunner.js)
content/tests/SimpleTest/WindowSnapshot.js (tests/SimpleTest/WindowSnapshot.js)
content/tests/SimpleTest/mockObjects.js (../../toolkit/content/tests/browser/common/mockObjects.js)
content/tests/SimpleTest/docshell_helpers.js (../..//docshell/test/chrome/docshell_helpers.js)
content/tests/SimpleTest/MockObjects.js (tests/SimpleTest/MockObjects.js)
content/tests/SimpleTest/docshell_helpers.js (../../docshell/test/chrome/docshell_helpers.js)
% resource mochikit %modules/
modules/MockFilePicker.jsm (MockFilePicker.jsm)

View File

@ -53,8 +53,8 @@ _SIMPLETEST_FILES = MozillaLogger.js \
WindowSnapshot.js \
specialpowersAPI.js \
SpecialPowersObserverAPI.js \
$(DEPTH)/toolkit/content/tests/browser/common/mockObjects.js \
$(DEPTH)/docshell/test/chrome/docshell_helpers.js \
MockObjects.js \
$(DEPTH)/docshell/test/chrome/docshell_helpers.js \
$(NULL)
libs:: $(_SIMPLETEST_FILES)

View File

@ -56,7 +56,6 @@ PARALLEL_DIRS = \
mozapps/plugins \
mozapps/shared \
mozapps/update \
mozapps/xpinstall \
mozapps/webapps \
obsolete \
profile \

View File

@ -45,7 +45,6 @@ include $(DEPTH)/config/autoconf.mk
include $(topsrcdir)/config/rules.mk
_COMMON_FILES = \
mockObjects.js \
mockTransfer.js \
$(NULL)

View File

@ -36,8 +36,7 @@
Cc["@mozilla.org/moz/jssubscript-loader;1"]
.getService(Ci.mozIJSSubScriptLoader)
.loadSubScript("chrome://mochitests/content/browser/toolkit/content/tests/browser/common/mockObjects.js",
this);
.loadSubScript("chrome://mochikit/content/tests/SimpleTest/MockObjects.js", this);
var mockTransferCallback;

View File

@ -45,39 +45,7 @@ function loadUtilsScript() {
loader.loadSubScript("chrome://global/content/contentAreaUtils.js");
}
// Code borrowed from toolkit/components/downloadmgr/test/unit/head_download_manager.js
var dirSvc = Cc["@mozilla.org/file/directory_service;1"].
getService(Ci.nsIProperties);
var profileDir = null;
try {
profileDir = dirSvc.get("ProfD", Ci.nsIFile);
} catch (e) { }
if (!profileDir) {
// Register our own provider for the profile directory.
// It will simply return the current directory.
var provider = {
getFile: function(prop, persistent) {
persistent.value = true;
if (prop == "ProfD") {
return dirSvc.get("CurProcD", Ci.nsILocalFile);
} else if (prop == "DLoads") {
var file = dirSvc.get("CurProcD", Ci.nsILocalFile);
file.append("downloads.rdf");
return file;
}
print("*** Throwing trying to get " + prop);
throw Cr.NS_ERROR_FAILURE;
},
QueryInterface: function(iid) {
if (iid.equals(Ci.nsIDirectoryServiceProvider) ||
iid.equals(Ci.nsISupports)) {
return this;
}
throw Cr.NS_ERROR_NO_INTERFACE;
}
};
dirSvc.QueryInterface(Ci.nsIDirectoryService).registerProvider(provider);
}
do_get_profile();
let window = {};
function run_test()
@ -98,6 +66,8 @@ function run_test()
QueryInterface(Ci.nsIPrefBranch);
prefsService.setBoolPref("browser.privatebrowsing.keep_current_session", true);
let prefs = prefsService.getBranch("browser.download.");
let dirSvc = Cc["@mozilla.org/file/directory_service;1"].
getService(Ci.nsIProperties);
let tmpDir = dirSvc.get("TmpD", Ci.nsILocalFile);
function newDirectory() {
let dir = tmpDir.clone();
@ -185,5 +155,4 @@ function run_test()
// cleanup
prefsService.clearUserPref("browser.privatebrowsing.keep_current_session");
[dir1, dir2, dir3].forEach(function(dir) dir.remove(true));
dirSvc.QueryInterface(Ci.nsIDirectoryService).unregisterProvider(provider);
}

View File

@ -102,8 +102,8 @@
locale/@AB_CD@/mozapps/update/updates.dtd (%chrome/mozapps/update/updates.dtd)
locale/@AB_CD@/mozapps/update/updates.properties (%chrome/mozapps/update/updates.properties)
locale/@AB_CD@/mozapps/update/history.dtd (%chrome/mozapps/update/history.dtd)
locale/@AB_CD@/mozapps/xpinstall/xpinstallConfirm.dtd (%chrome/mozapps/xpinstall/xpinstallConfirm.dtd)
locale/@AB_CD@/mozapps/xpinstall/xpinstallConfirm.properties (%chrome/mozapps/xpinstall/xpinstallConfirm.properties)
locale/@AB_CD@/mozapps/xpinstall/xpinstallConfirm.dtd (%chrome/mozapps/extensions/xpinstallConfirm.dtd)
locale/@AB_CD@/mozapps/xpinstall/xpinstallConfirm.properties (%chrome/mozapps/extensions/xpinstallConfirm.properties)
% locale alerts @AB_CD@ %locale/@AB_CD@/alerts/
locale/@AB_CD@/alerts/notificationNames.properties (%chrome/alerts/notificationNames.properties)
% locale cookie @AB_CD@ %locale/@AB_CD@/cookie/

View File

@ -41,39 +41,7 @@ const Cc = Components.classes;
const Cu = Components.utils;
const Cr = Components.results;
// Code borrowed from toolkit/components/downloadmgr/test/unit/head_download_manager.js
var dirSvc = Cc["@mozilla.org/file/directory_service;1"].
getService(Ci.nsIProperties);
var profileDir = null;
try {
profileDir = dirSvc.get("ProfD", Ci.nsIFile);
} catch (e) { }
if (!profileDir) {
// Register our own provider for the profile directory.
// It will simply return the current directory.
var provider = {
getFile: function(prop, persistent) {
persistent.value = true;
if (prop == "ProfD") {
return dirSvc.get("CurProcD", Ci.nsILocalFile);
} else if (prop == "DLoads") {
var file = dirSvc.get("CurProcD", Ci.nsILocalFile);
file.append("downloads.rdf");
return file;
}
print("*** Throwing trying to get " + prop);
throw Cr.NS_ERROR_FAILURE;
},
QueryInterface: function(iid) {
if (iid.equals(Ci.nsIDirectoryServiceProvider) ||
iid.equals(Ci.nsISupports)) {
return this;
}
throw Cr.NS_ERROR_NO_INTERFACE;
}
};
dirSvc.QueryInterface(Ci.nsIDirectoryService).registerProvider(provider);
}
do_get_profile();
Cu.import("resource://gre/modules/Services.jsm");
Cu.import("resource://gre/modules/XPCOMUtils.jsm");
@ -174,7 +142,6 @@ function run_test()
// cleanup
prefsService.clearUserPref("browser.privatebrowsing.keep_current_session");
[dir1, dir2, dir3].forEach(function(dir) dir.remove(true));
dirSvc.QueryInterface(Ci.nsIDirectoryService).unregisterProvider(provider);
MockFilePicker.reset();
}

View File

@ -25,3 +25,7 @@ toolkit.jar:
content/mozapps/extensions/newaddon.xul (content/newaddon.xul)
content/mozapps/extensions/newaddon.js (content/newaddon.js)
content/mozapps/extensions/setting.xml (content/setting.xml)
* content/mozapps/xpinstall/xpinstallConfirm.xul (content/xpinstallConfirm.xul)
* content/mozapps/xpinstall/xpinstallConfirm.js (content/xpinstallConfirm.js)
content/mozapps/xpinstall/xpinstallConfirm.css (content/xpinstallConfirm.css)
* content/mozapps/xpinstall/xpinstallItem.xml (content/xpinstallItem.xml)

View File

@ -631,7 +631,7 @@ PKG_ARG = , "$(pkg)"
# Define packager macro to work around make 3.81 backslash issue (bug #339933)
define PACKAGER_COPY
$(PERL) -I$(MOZILLA_DIR)/xpinstall/packager -e 'use Packager; \
$(PERL) -I$(MOZILLA_DIR)/toolkit/mozapps/installer -e 'use Packager; \
Packager::Copy($1,$2,$3,$4,$5,$6,$7);'
endef
@ -685,7 +685,7 @@ ifdef MOZ_PKG_MANIFEST
$(call PACKAGER_COPY, "$(call core_abspath,$(DIST))",\
"$(call core_abspath,$(DIST)/$(MOZ_PKG_DIR))", \
"$(MOZ_PKG_MANIFEST)", "$(PKGCP_OS)", 1, 0, 1)
$(PERL) $(MOZILLA_DIR)/xpinstall/packager/xptlink.pl -s $(DIST) -d $(DIST)/xpt -f $(DIST)/$(MOZ_PKG_DIR)/$(_BINPATH)/components -v -x "$(XPIDL_LINK)"
$(PERL) $(MOZILLA_DIR)/toolkit/mozapps/installer/xptlink.pl -s $(DIST) -d $(DIST)/xpt -f $(DIST)/$(MOZ_PKG_DIR)/$(_BINPATH)/components -v -x "$(XPIDL_LINK)"
$(PYTHON) $(MOZILLA_DIR)/toolkit/mozapps/installer/link-manifests.py \
$(DIST)/$(MOZ_PKG_DIR)/$(_BINPATH)/components/components.manifest \
$(patsubst %,$(DIST)/manifests/%/components,$(MOZ_NONLOCALIZED_PKG_LIST))

View File

@ -95,7 +95,7 @@ if (! $return)
}
# ensure that Packager.pm is in @INC, since we might not be called from
# mozilla/xpinstall/packager.
# mozilla/toolkit/mozapps/installer.
$top_path = $0;
if ( $os eq "dos" ) {
$top_path =~ s/\\/\//g;

View File

@ -1,46 +0,0 @@
#
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
DEPTH = ../../..
topsrcdir = @top_srcdir@
srcdir = @srcdir@
VPATH = @srcdir@
include $(DEPTH)/config/autoconf.mk
include $(topsrcdir)/config/rules.mk

View File

@ -1,190 +0,0 @@
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Mozilla Communicator client code, released
* March 31, 1998.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 1998
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
var gManager;
var gBundle;
var gCanClose = false;
var gCancelled = false;
// implements nsIXPIProgressDialog
var progressHooks =
{
onStateChange: function( aIndex, aState, aValue )
{
const state = Components.interfaces.nsIXPIProgressDialog;
var status = document.getElementById("status"+aIndex);
var progress = document.getElementById("progress"+aIndex);
switch( aState ) {
case state.DOWNLOAD_START:
status.setAttribute("value",
gBundle.getString("progress.downloading"));
progress.setAttribute("value","0%");
break;
case state.DOWNLOAD_DONE:
status.setAttribute("value",
gBundle.getString("progress.downloaded"));
progress.setAttribute("value","100%");
break;
case state.INSTALL_START:
status.setAttribute("value",
gBundle.getString("progress.installing"));
progress.setAttribute("mode","undetermined");
break;
case state.INSTALL_DONE:
progress.setAttribute("mode","determined");
progress.hidden = true;
var msg;
try
{
msg = gBundle.getString("error"+aValue);
}
catch (e)
{
msg = gBundle.stringBundle.formatStringFromName(
"unknown.error", [aValue], 1 );
}
status.setAttribute("value",msg);
break;
case state.DIALOG_CLOSE:
// nsXPInstallManager is done with us, but we'll let users
// dismiss the dialog themselves so they can see the status
// (unless we're closing because the user cancelled)
document.getElementById("ok").disabled = false;
document.getElementById("cancel").disabled = true;
gCanClose = true;
if (gCancelled)
window.close();
break;
}
},
onProgress: function( aIndex, aValue, aMaxValue )
{
var percent = Math.round( 100 * (aValue/aMaxValue) );
var node = document.getElementById("progress"+aIndex);
node.setAttribute("value", percent);
},
QueryInterface: function( iid )
{
if (!iid.equals(Components.interfaces.nsISupports) &&
!iid.equals(Components.interfaces.nsIXPIProgressDialog))
throw Components.results.NS_ERROR_NO_INTERFACE;
return this;
}
}
function onLoad()
{
doSetOKCancel(dlgOK, dlgCancel);
document.getElementById("ok").disabled = true;
document.getElementById("cancel").focus();
gBundle = document.getElementById("xpinstallBundle");
var param = window.arguments[0].QueryInterface(
Components.interfaces.nsIDialogParamBlock );
if ( !param )
dump (" error getting param block interface \n");
var i = 0;
var row = 0;
var numElements = param.GetInt(1);
while ( i < numElements )
{
var moduleName = param.GetString(i++);
var URL = param.GetString(i++);
var certName = param.GetString(i++);
addTreeItem(row++, moduleName, URL);
}
gManager = window.arguments[1];
// inform nsXPInstallManager we're open for business
gManager.observe( progressHooks, "xpinstall-progress", "open" );
}
function addTreeItem(aRow, aName, aUrl)
{
// first column is the package name
var item = document.createElement("description");
item.setAttribute("class", "packageName");
item.setAttribute("id", "package"+aRow);
item.setAttribute("value", aName);
item.setAttribute("tooltiptext", aUrl);
// second column is the status
var status = document.createElement('description');
status.setAttribute("class", "packageStatus");
status.setAttribute("id", "status"+aRow);
status.setAttribute("value", gBundle.getString("progress.queued"));
// third row is a progress meter
var progress = document.createElement("progressmeter");
progress.setAttribute("class", "packageProgress");
progress.setAttribute("id", "progress"+aRow);
progress.setAttribute("value", "0%");
// create row and add it to the grid
var row = document.createElement("row");
row.appendChild(item);
row.appendChild(status);
row.appendChild(progress);
document.getElementById("xpirows").appendChild(row);
}
function dlgOK() { return true; }
function dlgCancel()
{
gCancelled = true;
if (gManager)
gManager.observe( progressHooks, "xpinstall-progress", "cancel");
// window is closed by native impl after cleanup
return gCanClose;
}

View File

@ -1,80 +0,0 @@
<?xml version="1.0"?>
<!-- ***** BEGIN LICENSE BLOCK *****
Version: MPL 1.1/GPL 2.0/LGPL 2.1
The contents of this file are subject to the Mozilla Public License Version
1.1 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.mozilla.org/MPL/
Software distributed under the License is distributed on an "AS IS" basis,
WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
for the specific language governing rights and limitations under the
License.
The Original Code is Mozilla Communicator client code.
The Initial Developer of the Original Code is
Netscape Communications Corporation.
Portions created by the Initial Developer are Copyright (C) 1998-2002
the Initial Developer. All Rights Reserved.
Contributor(s):
Don Bragg (dbragg@netscape.com) 12/08/1999
Blake Ross (BlakeR1234@aol.com) 7/05/2000
Daniel Veditz <dveditz@netscape.com> 1/2002
Alternatively, the contents of this file may be used under the terms of
either the GNU General Public License Version 2 or later (the "GPL"), or
the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
in which case the provisions of the GPL or the LGPL are applicable instead
of those above. If you wish to allow use of your version of this file only
under the terms of either the GPL or the LGPL, and not to allow others to
use your version of this file under the terms of the MPL, indicate your
decision by deleting the provisions above and replace them with the notice
and other provisions required by the GPL or the LGPL. If you do not delete
the provisions above, a recipient may use your version of this file under
the terms of any one of the MPL, the GPL or the LGPL.
***** END LICENSE BLOCK ***** -->
<?xml-stylesheet href="chrome://communicator/skin/xpinstall/xpinstall.css" type="text/css"?>
<?xul-overlay href="chrome://global/content/dialogOverlay.xul"?>
<!DOCTYPE window SYSTEM "chrome://communicator/locale/xpinstall/xpistatus.dtd" >
<window xmlns="http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul"
title="&progressTitle.label;"
onload="onLoad()"
onclose="return gCanClose"
id="statusDlg"
class="dialog"
style="width: 50em"
>
<script src="chrome://communicator/content/xpinstall/xpistatus.js"/>
<keyset id="dialogKeys"/>
<stringbundle id="xpinstallBundle" src="chrome://communicator/locale/xpinstall/xpinstall.properties"/>
<vbox id="mainProgressBox" flex="1">
<groupbox id="progressGroup" orient="vertical" flex="1">
<caption id="progressCaption" label="&group.caption;"/>
<grid id="progressGrid" flex="1">
<columns>
<column id="xpiColumn" flex="3"/>
<column id="statusColumn" flex="2"/>
<column id="progressColumn"/>
</columns>
<rows id="xpirows">
</rows>
</grid>
</groupbox>
<separator class="thin"/>
<hbox id="okCancelButtonsRight"/>
</vbox>
</window>

View File

@ -1,5 +0,0 @@
toolkit.jar:
* content/mozapps/xpinstall/xpinstallConfirm.xul (content/xpinstallConfirm.xul)
* content/mozapps/xpinstall/xpinstallConfirm.js (content/xpinstallConfirm.js)
content/mozapps/xpinstall/xpinstallConfirm.css (content/xpinstallConfirm.css)
* content/mozapps/xpinstall/xpinstallItem.xml (content/xpinstallItem.xml)

View File

@ -72,7 +72,7 @@ toolkit.jar:
* skin/classic/mozapps/update/updates.css (update/updates.css)
skin/classic/mozapps/viewsource/viewsource.css (viewsource/viewsource.css)
skin/classic/mozapps/xpinstall/xpinstallItemGeneric.png (extensions/extensionGeneric.png)
skin/classic/mozapps/xpinstall/xpinstallConfirm.css (xpinstall/xpinstallConfirm.css)
skin/classic/mozapps/xpinstall/xpinstallConfirm.css (extensions/xpinstallConfirm.css)
#ifdef MOZ_PLACES
skin/classic/mozapps/places/defaultFavicon.png (places/defaultFavicon.png)
skin/classic/mozapps/places/tagContainerIcon.png (places/tagContainerIcon.png)

View File

@ -76,7 +76,7 @@ toolkit.jar:
skin/classic/mozapps/update/downloadButtons.png (update/downloadButtons.png)
skin/classic/mozapps/update/updates.css (update/updates.css)
skin/classic/mozapps/viewsource/viewsource.css (viewsource/viewsource.css)
* skin/classic/mozapps/xpinstall/xpinstallConfirm.css (xpinstall/xpinstallConfirm.css)
* skin/classic/mozapps/xpinstall/xpinstallConfirm.css (extensions/xpinstallConfirm.css)
skin/classic/mozapps/xpinstall/xpinstallItemGeneric.png (extensions/extensionGeneric.png)
#ifdef XP_WIN
@ -156,6 +156,6 @@ toolkit.jar:
skin/classic/aero/mozapps/update/downloadButtons.png (update/downloadButtons-aero.png)
skin/classic/aero/mozapps/update/updates.css (update/updates.css)
skin/classic/aero/mozapps/viewsource/viewsource.css (viewsource/viewsource.css)
* skin/classic/aero/mozapps/xpinstall/xpinstallConfirm.css (xpinstall/xpinstallConfirm.css)
* skin/classic/aero/mozapps/xpinstall/xpinstallConfirm.css (extensions/xpinstallConfirm.css)
skin/classic/aero/mozapps/xpinstall/xpinstallItemGeneric.png (extensions/extensionGeneric-aero.png)
#endif

View File

@ -641,7 +641,6 @@ MAKEFILES_xulapp="
toolkit/mozapps/readstrings/Makefile
toolkit/mozapps/update/Makefile
toolkit/mozapps/update/updater/Makefile
toolkit/mozapps/xpinstall/Makefile
toolkit/profile/Makefile
toolkit/system/dbus/Makefile
toolkit/system/gnome/Makefile

View File

@ -714,12 +714,21 @@ private:
};
struct WeakMapping
{
// map and key will be null if the corresponding objects are GC marked
PtrInfo *mMap;
PtrInfo *mKey;
PtrInfo *mVal;
};
class GCGraphBuilder;
struct GCGraph
{
NodePool mNodes;
EdgePool mEdges;
nsTArray<WeakMapping> mWeakMaps;
PRUint32 mRootCount;
#ifdef DEBUG_CC
ReversedEdge *mReversedEdges;
@ -1083,6 +1092,7 @@ struct nsCycleCollector
void SelectPurple(GCGraphBuilder &builder);
void MarkRoots(GCGraphBuilder &builder);
void ScanRoots();
void ScanWeakMaps();
// returns whether anything was collected
bool CollectWhite(nsICycleCollectorListener *aListener);
@ -1116,6 +1126,7 @@ struct nsCycleCollector
{
mGraph.mNodes.Clear();
mGraph.mEdges.Clear();
mGraph.mWeakMaps.Clear();
mGraph.mRootCount = 0;
}
@ -1487,6 +1498,7 @@ class GCGraphBuilder : public nsCycleCollectionTraversalCallback
private:
NodePool::Builder mNodeBuilder;
EdgePool::Builder mEdgeBuilder;
nsTArray<WeakMapping> &mWeakMaps;
PLDHashTable mPtrToNodeMap;
PtrInfo *mCurrPi;
nsCycleCollectionLanguageRuntime **mRuntimes; // weak, from nsCycleCollector
@ -1513,6 +1525,7 @@ public:
return AddNode(s, aParticipant);
}
#endif
PtrInfo* AddWeakMapNode(void* node);
void Traverse(PtrInfo* aPtrInfo);
void SetLastChild();
@ -1543,6 +1556,7 @@ private:
nsCycleCollectionParticipant *participant);
NS_IMETHOD_(void) NoteScriptChild(PRUint32 langID, void *child);
NS_IMETHOD_(void) NoteNextEdgeName(const char* name);
NS_IMETHOD_(void) NoteWeakMapping(void *map, void *key, void *val);
};
GCGraphBuilder::GCGraphBuilder(GCGraph &aGraph,
@ -1550,6 +1564,7 @@ GCGraphBuilder::GCGraphBuilder(GCGraph &aGraph,
nsICycleCollectorListener *aListener)
: mNodeBuilder(aGraph.mNodes),
mEdgeBuilder(aGraph.mEdges),
mWeakMaps(aGraph.mWeakMaps),
mRuntimes(aRuntimes),
mListener(aListener)
{
@ -1812,6 +1827,34 @@ GCGraphBuilder::NoteNextEdgeName(const char* name)
}
}
PtrInfo*
GCGraphBuilder::AddWeakMapNode(void *node)
{
nsCycleCollectionParticipant *cp;
NS_ASSERTION(node, "Weak map node should be non-null.");
if (!xpc_GCThingIsGrayCCThing(node) && !WantAllTraces())
return nsnull;
cp = mRuntimes[nsIProgrammingLanguage::JAVASCRIPT]->ToParticipant(node);
NS_ASSERTION(cp, "Javascript runtime participant should be non-null.");
return AddNode(node, cp);
}
NS_IMETHODIMP_(void)
GCGraphBuilder::NoteWeakMapping(void *map, void *key, void *val)
{
PtrInfo *valNode = AddWeakMapNode(val);
if (!valNode)
return;
WeakMapping *mapping = mWeakMaps.AppendElement();
mapping->mMap = map ? AddWeakMapNode(map) : nsnull;
mapping->mKey = key ? AddWeakMapNode(key) : nsnull;
mapping->mVal = valNode;
}
static bool
AddPurpleRoot(GCGraphBuilder &builder, nsISupports *root)
{
@ -1947,6 +1990,38 @@ struct scanVisitor
PRUint32 &mWhiteNodeCount;
};
// Iterate over the WeakMaps. If we mark anything while iterating
// over the WeakMaps, we must iterate over all of the WeakMaps again.
void
nsCycleCollector::ScanWeakMaps()
{
bool anyChanged;
do {
anyChanged = false;
for (PRUint32 i = 0; i < mGraph.mWeakMaps.Length(); i++) {
WeakMapping *wm = &mGraph.mWeakMaps[i];
// If mMap or mKey are null, the original object was marked black.
uint32 mColor = wm->mMap ? wm->mMap->mColor : black;
uint32 kColor = wm->mKey ? wm->mKey->mColor : black;
PtrInfo *v = wm->mVal;
// All non-null weak mapping maps, keys and values are
// roots (in the sense of WalkFromRoots) in the cycle
// collector graph, and thus should have been colored
// either black or white in ScanRoots().
NS_ASSERTION(mColor != grey, "Uncolored weak map");
NS_ASSERTION(kColor != grey, "Uncolored weak map key");
NS_ASSERTION(v->mColor != grey, "Uncolored weak map value");
if (mColor == black && kColor == black && v->mColor != black) {
GraphWalker<ScanBlackVisitor>(ScanBlackVisitor(mWhiteNodeCount)).Walk(v);
anyChanged = true;
}
}
} while (anyChanged);
}
void
nsCycleCollector::ScanRoots()
{
@ -1957,6 +2032,8 @@ nsCycleCollector::ScanRoots()
// NodePool::Enumerator.
GraphWalker<scanVisitor>(scanVisitor(mWhiteNodeCount)).WalkFromRoots(mGraph);
ScanWeakMaps();
#ifdef DEBUG_CC
// Sanity check: scan should have colored all grey nodes black or
// white. So we ensure we have no grey nodes at this point.
@ -2384,6 +2461,7 @@ public:
NS_IMETHOD_(void) NoteNativeChild(void *child,
nsCycleCollectionParticipant *participant) {}
NS_IMETHOD_(void) NoteNextEdgeName(const char* name) {}
NS_IMETHOD_(void) NoteWeakMapping(void *map, void *key, void *val) {}
};
char *Suppressor::sSuppressionList = nsnull;

View File

@ -108,6 +108,8 @@ public:
// flags.
NS_IMETHOD_(void) NoteNextEdgeName(const char* name) = 0;
NS_IMETHOD_(void) NoteWeakMapping(void *map, void *key, void *val) = 0;
enum {
// Values for flags: