mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-11-24 21:31:04 +00:00
Bug 641027 - Add snapshot-at-the-beginning write barriers for incremental GC (r=luke,bhackett)
This commit is contained in:
parent
5256c20b37
commit
213017df51
11
configure.in
11
configure.in
@ -7227,6 +7227,17 @@ if test -n "$MOZ_TRACEVIS"; then
|
||||
AC_DEFINE(MOZ_TRACEVIS)
|
||||
fi
|
||||
|
||||
dnl ========================================================
|
||||
dnl = Use incremental GC
|
||||
dnl ========================================================
|
||||
JSGC_INCREMENTAL=1
|
||||
MOZ_ARG_DISABLE_BOOL(gcincremental,
|
||||
[ --disable-gcincremental Disable incremental GC],
|
||||
JSGC_INCREMENTAL= )
|
||||
if test -n "$JSGC_INCREMENTAL"; then
|
||||
AC_DEFINE(JSGC_INCREMENTAL)
|
||||
fi
|
||||
|
||||
dnl ========================================================
|
||||
dnl ETW - Event Tracing for Windows
|
||||
dnl ========================================================
|
||||
|
@ -501,8 +501,8 @@ nsHTMLCanvasElement::GetContext(const nsAString& aContextId,
|
||||
|
||||
JSObject *opts = JSVAL_TO_OBJECT(aContextOptions);
|
||||
JSIdArray *props = JS_Enumerate(cx, opts);
|
||||
for (int i = 0; props && i < props->length; ++i) {
|
||||
jsid propid = props->vector[i];
|
||||
for (int i = 0; props && i < JS_IdArrayLength(cx, props); ++i) {
|
||||
jsid propid = JS_IdArrayGet(cx, props, i);
|
||||
jsval propname, propval;
|
||||
if (!JS_IdToValue(cx, propid, &propname) ||
|
||||
!JS_GetPropertyById(cx, opts, propid, &propval))
|
||||
|
@ -912,7 +912,10 @@ class HashMapEntry
|
||||
|
||||
public:
|
||||
HashMapEntry() : key(), value() {}
|
||||
HashMapEntry(const Key &k, const Value &v) : key(k), value(v) {}
|
||||
|
||||
template<typename KeyInput, typename ValueInput>
|
||||
HashMapEntry(const KeyInput &k, const ValueInput &v) : key(k), value(v) {}
|
||||
|
||||
HashMapEntry(MoveRef<HashMapEntry> rhs)
|
||||
: key(Move(rhs->key)), value(Move(rhs->value)) { }
|
||||
void operator=(MoveRef<HashMapEntry> rhs) {
|
||||
@ -1048,7 +1051,8 @@ class HashMap
|
||||
return impl.lookupForAdd(l);
|
||||
}
|
||||
|
||||
bool add(AddPtr &p, const Key &k, const Value &v) {
|
||||
template<typename KeyInput, typename ValueInput>
|
||||
bool add(AddPtr &p, const KeyInput &k, const ValueInput &v) {
|
||||
Entry *pentry;
|
||||
if (!impl.add(p, &pentry))
|
||||
return false;
|
||||
@ -1074,7 +1078,8 @@ class HashMap
|
||||
return true;
|
||||
}
|
||||
|
||||
bool relookupOrAdd(AddPtr &p, const Key &k, const Value &v) {
|
||||
template<typename KeyInput, typename ValueInput>
|
||||
bool relookupOrAdd(AddPtr &p, const KeyInput &k, const ValueInput &v) {
|
||||
return impl.relookupOrAdd(p, k, Entry(k, v));
|
||||
}
|
||||
|
||||
@ -1137,7 +1142,8 @@ class HashMap
|
||||
}
|
||||
|
||||
/* Overwrite existing value with v. Return NULL on oom. */
|
||||
Entry *put(const Key &k, const Value &v) {
|
||||
template<typename KeyInput, typename ValueInput>
|
||||
Entry *put(const KeyInput &k, const ValueInput &v) {
|
||||
AddPtr p = lookupForAdd(k);
|
||||
if (p) {
|
||||
p->value = v;
|
||||
|
@ -250,6 +250,7 @@ EXPORTS_ds = \
|
||||
|
||||
EXPORTS_gc = \
|
||||
Statistics.h \
|
||||
Barrier.h \
|
||||
$(NULL)
|
||||
|
||||
######################################################
|
||||
|
@ -4442,6 +4442,17 @@ if test -n "$MOZ_TRACEVIS"; then
|
||||
fi
|
||||
fi
|
||||
|
||||
dnl ========================================================
|
||||
dnl = Use incremental GC
|
||||
dnl ========================================================
|
||||
JSGC_INCREMENTAL=1
|
||||
MOZ_ARG_DISABLE_BOOL(gcincremental,
|
||||
[ --disable-gcincremental Disable incremental GC],
|
||||
JSGC_INCREMENTAL= )
|
||||
if test -n "$JSGC_INCREMENTAL"; then
|
||||
AC_DEFINE(JSGC_INCREMENTAL)
|
||||
fi
|
||||
|
||||
dnl ========================================================
|
||||
dnl = Use Valgrind
|
||||
dnl ========================================================
|
||||
|
73
js/src/frontend/BytecodeEmitter-inl.h
Normal file
73
js/src/frontend/BytecodeEmitter-inl.h
Normal file
@ -0,0 +1,73 @@
|
||||
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
|
||||
* vim: set ts=8 sw=4 et tw=99:
|
||||
*
|
||||
* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
*
|
||||
* The contents of this file are subject to the Mozilla Public License Version
|
||||
* 1.1 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
* http://www.mozilla.org/MPL/
|
||||
*
|
||||
* Software distributed under the License is distributed on an "AS IS" basis,
|
||||
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
* for the specific language governing rights and limitations under the
|
||||
* License.
|
||||
*
|
||||
* The Original Code is Mozilla Communicator client code, released
|
||||
* March 31, 1998.
|
||||
*
|
||||
* The Initial Developer of the Original Code is
|
||||
* Netscape Communications Corporation.
|
||||
* Portions created by the Initial Developer are Copyright (C) 1998
|
||||
* the Initial Developer. All Rights Reserved.
|
||||
*
|
||||
* Contributor(s):
|
||||
*
|
||||
* Alternatively, the contents of this file may be used under the terms of
|
||||
* either of the GNU General Public License Version 2 or later (the "GPL"),
|
||||
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
* in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
* of those above. If you wish to allow use of your version of this file only
|
||||
* under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
* use your version of this file under the terms of the MPL, indicate your
|
||||
* decision by deleting the provisions above and replace them with the notice
|
||||
* and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
* the provisions above, a recipient may use your version of this file under
|
||||
* the terms of any one of the MPL, the GPL or the LGPL.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#ifndef BytecodeEmitter_inl_h__
|
||||
#define BytecodeEmitter_inl_h__
|
||||
|
||||
#include "frontend/ParseNode.h"
|
||||
#include "frontend/TokenStream.h"
|
||||
|
||||
namespace js {
|
||||
|
||||
inline
|
||||
TreeContext::TreeContext(Parser *prs)
|
||||
: flags(0), bodyid(0), blockidGen(0), parenDepth(0), yieldCount(0), argumentsCount(0),
|
||||
topStmt(NULL), topScopeStmt(NULL), blockChainBox(NULL), blockNode(NULL),
|
||||
decls(prs->context), parser(prs), yieldNode(NULL), argumentsNode(NULL), scopeChain_(NULL),
|
||||
lexdeps(prs->context), parent(prs->tc), staticLevel(0), funbox(NULL), functionList(NULL),
|
||||
innermostWith(NULL), bindings(prs->context), sharpSlotBase(-1)
|
||||
{
|
||||
prs->tc = this;
|
||||
}
|
||||
|
||||
/*
|
||||
* For functions the tree context is constructed and destructed a second
|
||||
* time during code generation. To avoid a redundant stats update in such
|
||||
* cases, we store uint16(-1) in maxScopeDepth.
|
||||
*/
|
||||
inline
|
||||
TreeContext::~TreeContext()
|
||||
{
|
||||
parser->tc = this->parent;
|
||||
}
|
||||
|
||||
} /* namespace js */
|
||||
|
||||
#endif /* BytecodeEmitter_inl_h__ */
|
@ -71,10 +71,10 @@
|
||||
#include "vm/RegExpObject.h"
|
||||
|
||||
#include "jsatominlines.h"
|
||||
#include "jsobjinlines.h"
|
||||
#include "jsscopeinlines.h"
|
||||
#include "jsscriptinlines.h"
|
||||
|
||||
#include "frontend/BytecodeEmitter-inl.h"
|
||||
#include "frontend/ParseMaps-inl.h"
|
||||
|
||||
/* Allocation chunk counts, must be powers of two in general. */
|
||||
@ -7777,7 +7777,7 @@ CGObjectList::finish(JSObjectArray *array)
|
||||
JS_ASSERT(length <= INDEX_LIMIT);
|
||||
JS_ASSERT(length == array->length);
|
||||
|
||||
JSObject **cursor = array->vector + array->length;
|
||||
js::HeapPtrObject *cursor = array->vector + array->length;
|
||||
ObjectBox *objbox = lastbox;
|
||||
do {
|
||||
--cursor;
|
||||
@ -7792,7 +7792,7 @@ GCConstList::finish(JSConstArray *array)
|
||||
{
|
||||
JS_ASSERT(array->length == list.length());
|
||||
Value *src = list.begin(), *srcend = list.end();
|
||||
Value *dst = array->vector;
|
||||
HeapValue *dst = array->vector;
|
||||
for (; src != srcend; ++src, ++dst)
|
||||
*dst = *src;
|
||||
}
|
||||
|
@ -359,24 +359,8 @@ struct TreeContext { /* tree context for semantic checks */
|
||||
|
||||
void trace(JSTracer *trc);
|
||||
|
||||
TreeContext(Parser *prs)
|
||||
: flags(0), bodyid(0), blockidGen(0), parenDepth(0), yieldCount(0), argumentsCount(0),
|
||||
topStmt(NULL), topScopeStmt(NULL), blockChainBox(NULL), blockNode(NULL),
|
||||
decls(prs->context), parser(prs), yieldNode(NULL), argumentsNode(NULL), scopeChain_(NULL),
|
||||
lexdeps(prs->context), parent(prs->tc), staticLevel(0), funbox(NULL), functionList(NULL),
|
||||
innermostWith(NULL), bindings(prs->context), sharpSlotBase(-1)
|
||||
{
|
||||
prs->tc = this;
|
||||
}
|
||||
|
||||
/*
|
||||
* For functions the tree context is constructed and destructed a second
|
||||
* time during code generation. To avoid a redundant stats update in such
|
||||
* cases, we store uint16(-1) in maxScopeDepth.
|
||||
*/
|
||||
~TreeContext() {
|
||||
parser->tc = this->parent;
|
||||
}
|
||||
inline TreeContext(Parser *prs);
|
||||
inline ~TreeContext();
|
||||
|
||||
/*
|
||||
* js::BytecodeEmitter derives from js::TreeContext; however, only the
|
||||
|
@ -92,9 +92,9 @@
|
||||
#endif
|
||||
|
||||
#include "jsatominlines.h"
|
||||
#include "jsobjinlines.h"
|
||||
#include "jsscriptinlines.h"
|
||||
|
||||
#include "frontend/BytecodeEmitter-inl.h"
|
||||
#include "frontend/ParseMaps-inl.h"
|
||||
#include "frontend/ParseNode-inl.h"
|
||||
#include "vm/RegExpObject-inl.h"
|
||||
@ -246,7 +246,7 @@ Parser::trace(JSTracer *trc)
|
||||
{
|
||||
ObjectBox *objbox = traceListHead;
|
||||
while (objbox) {
|
||||
MarkObject(trc, *objbox->object, "parser.object");
|
||||
MarkRoot(trc, objbox->object, "parser.object");
|
||||
if (objbox->isFunctionBox)
|
||||
static_cast<FunctionBox *>(objbox)->bindings.trace(trc);
|
||||
objbox = objbox->traceLink;
|
||||
|
@ -45,6 +45,7 @@
|
||||
#include "frontend/BytecodeEmitter.h"
|
||||
#include "frontend/Parser.h"
|
||||
|
||||
#include "jsobjinlines.h"
|
||||
#include "jsfuninlines.h"
|
||||
|
||||
using namespace js;
|
||||
|
259
js/src/gc/Barrier-inl.h
Normal file
259
js/src/gc/Barrier-inl.h
Normal file
@ -0,0 +1,259 @@
|
||||
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
|
||||
* vim: set ts=8 sw=4 et tw=78:
|
||||
*
|
||||
* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
*
|
||||
* The contents of this file are subject to the Mozilla Public License Version
|
||||
* 1.1 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
* http://www.mozilla.org/MPL/
|
||||
*
|
||||
* Software distributed under the License is distributed on an "AS IS" basis,
|
||||
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
* for the specific language governing rights and limitations under the
|
||||
* License.
|
||||
*
|
||||
* The Original Code is SpiderMonkey global object code.
|
||||
*
|
||||
* The Initial Developer of the Original Code is
|
||||
* the Mozilla Foundation.
|
||||
* Portions created by the Initial Developer are Copyright (C) 2011
|
||||
* the Initial Developer. All Rights Reserved.
|
||||
*
|
||||
* Contributor(s):
|
||||
*
|
||||
* Alternatively, the contents of this file may be used under the terms of
|
||||
* either of the GNU General Public License Version 2 or later (the "GPL"),
|
||||
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
* in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
* of those above. If you wish to allow use of your version of this file only
|
||||
* under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
* use your version of this file under the terms of the MPL, indicate your
|
||||
* decision by deleting the provisions above and replace them with the notice
|
||||
* and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
* the provisions above, a recipient may use your version of this file under
|
||||
* the terms of any one of the MPL, the GPL or the LGPL.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#include "jsgcmark.h"
|
||||
|
||||
#include "gc/Barrier.h"
|
||||
|
||||
#ifndef jsgc_barrier_inl_h___
|
||||
#define jsgc_barrier_inl_h___
|
||||
|
||||
namespace js {
|
||||
|
||||
static JS_ALWAYS_INLINE void
|
||||
ClearValueRange(JSCompartment *comp, HeapValue *vec, uintN len, bool useHoles)
|
||||
{
|
||||
if (useHoles) {
|
||||
for (uintN i = 0; i < len; i++)
|
||||
vec[i].set(comp, MagicValue(JS_ARRAY_HOLE));
|
||||
} else {
|
||||
for (uintN i = 0; i < len; i++)
|
||||
vec[i].set(comp, UndefinedValue());
|
||||
}
|
||||
}
|
||||
|
||||
static JS_ALWAYS_INLINE void
|
||||
InitValueRange(HeapValue *vec, uintN len, bool useHoles)
|
||||
{
|
||||
if (useHoles) {
|
||||
for (uintN i = 0; i < len; i++)
|
||||
vec[i].init(MagicValue(JS_ARRAY_HOLE));
|
||||
} else {
|
||||
for (uintN i = 0; i < len; i++)
|
||||
vec[i].init(UndefinedValue());
|
||||
}
|
||||
}
|
||||
|
||||
static JS_ALWAYS_INLINE void
|
||||
DestroyValueRange(HeapValue *vec, uintN len)
|
||||
{
|
||||
for (uintN i = 0; i < len; i++)
|
||||
vec[i].~HeapValue();
|
||||
}
|
||||
|
||||
inline
|
||||
HeapValue::HeapValue(const Value &v)
|
||||
: value(v)
|
||||
{
|
||||
post();
|
||||
}
|
||||
|
||||
inline
|
||||
HeapValue::HeapValue(const HeapValue &v)
|
||||
: value(v.value)
|
||||
{
|
||||
post();
|
||||
}
|
||||
|
||||
inline
|
||||
HeapValue::~HeapValue()
|
||||
{
|
||||
pre();
|
||||
}
|
||||
|
||||
inline void
|
||||
HeapValue::init(const Value &v)
|
||||
{
|
||||
value = v;
|
||||
post();
|
||||
}
|
||||
|
||||
inline void
|
||||
HeapValue::writeBarrierPre(const Value &value)
|
||||
{
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
if (value.isMarkable()) {
|
||||
js::gc::Cell *cell = (js::gc::Cell *)value.toGCThing();
|
||||
writeBarrierPre(cell->compartment(), value);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
inline void
|
||||
HeapValue::writeBarrierPost(const Value &value, void *addr)
|
||||
{
|
||||
}
|
||||
|
||||
inline void
|
||||
HeapValue::writeBarrierPre(JSCompartment *comp, const Value &value)
|
||||
{
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
if (comp->needsBarrier())
|
||||
js::gc::MarkValueUnbarriered(comp->barrierTracer(), value, "write barrier");
|
||||
#endif
|
||||
}
|
||||
|
||||
inline void
|
||||
HeapValue::writeBarrierPost(JSCompartment *comp, const Value &value, void *addr)
|
||||
{
|
||||
}
|
||||
|
||||
inline void
|
||||
HeapValue::pre()
|
||||
{
|
||||
writeBarrierPre(value);
|
||||
}
|
||||
|
||||
inline void
|
||||
HeapValue::post()
|
||||
{
|
||||
}
|
||||
|
||||
inline void
|
||||
HeapValue::pre(JSCompartment *comp)
|
||||
{
|
||||
writeBarrierPre(comp, value);
|
||||
}
|
||||
|
||||
inline void
|
||||
HeapValue::post(JSCompartment *comp)
|
||||
{
|
||||
}
|
||||
|
||||
inline HeapValue &
|
||||
HeapValue::operator=(const Value &v)
|
||||
{
|
||||
pre();
|
||||
value = v;
|
||||
post();
|
||||
return *this;
|
||||
}
|
||||
|
||||
inline HeapValue &
|
||||
HeapValue::operator=(const HeapValue &v)
|
||||
{
|
||||
pre();
|
||||
value = v.value;
|
||||
post();
|
||||
return *this;
|
||||
}
|
||||
|
||||
inline void
|
||||
HeapValue::set(JSCompartment *comp, const Value &v)
|
||||
{
|
||||
#ifdef DEBUG
|
||||
if (value.isMarkable()) {
|
||||
js::gc::Cell *cell = (js::gc::Cell *)value.toGCThing();
|
||||
JS_ASSERT(cell->compartment() == comp ||
|
||||
cell->compartment() == comp->rt->atomsCompartment);
|
||||
}
|
||||
#endif
|
||||
|
||||
pre(comp);
|
||||
value = v;
|
||||
post(comp);
|
||||
}
|
||||
|
||||
inline void
|
||||
HeapValue::boxNonDoubleFrom(JSValueType type, uint64 *out)
|
||||
{
|
||||
pre();
|
||||
value.boxNonDoubleFrom(type, out);
|
||||
post();
|
||||
}
|
||||
|
||||
inline
|
||||
HeapId::HeapId(jsid id)
|
||||
: value(id)
|
||||
{
|
||||
post();
|
||||
}
|
||||
|
||||
inline
|
||||
HeapId::~HeapId()
|
||||
{
|
||||
pre();
|
||||
}
|
||||
|
||||
inline void
|
||||
HeapId::init(jsid id)
|
||||
{
|
||||
value = id;
|
||||
post();
|
||||
}
|
||||
|
||||
inline void
|
||||
HeapId::pre()
|
||||
{
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
if (JS_UNLIKELY(JSID_IS_OBJECT(value))) {
|
||||
JSObject *obj = JSID_TO_OBJECT(value);
|
||||
JSCompartment *comp = obj->compartment();
|
||||
if (comp->needsBarrier())
|
||||
js::gc::MarkObjectUnbarriered(comp->barrierTracer(), obj, "write barrier");
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
inline void
|
||||
HeapId::post()
|
||||
{
|
||||
}
|
||||
|
||||
inline HeapId &
|
||||
HeapId::operator=(jsid id)
|
||||
{
|
||||
pre();
|
||||
value = id;
|
||||
post();
|
||||
return *this;
|
||||
}
|
||||
|
||||
inline HeapId &
|
||||
HeapId::operator=(const HeapId &v)
|
||||
{
|
||||
pre();
|
||||
value = v.value;
|
||||
post();
|
||||
return *this;
|
||||
}
|
||||
|
||||
} /* namespace js */
|
||||
|
||||
#endif /* jsgc_barrier_inl_h___ */
|
450
js/src/gc/Barrier.h
Normal file
450
js/src/gc/Barrier.h
Normal file
@ -0,0 +1,450 @@
|
||||
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
|
||||
* vim: set ts=8 sw=4 et tw=78:
|
||||
*
|
||||
* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
*
|
||||
* The contents of this file are subject to the Mozilla Public License Version
|
||||
* 1.1 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
* http://www.mozilla.org/MPL/
|
||||
*
|
||||
* Software distributed under the License is distributed on an "AS IS" basis,
|
||||
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
* for the specific language governing rights and limitations under the
|
||||
* License.
|
||||
*
|
||||
* The Original Code is SpiderMonkey global object code.
|
||||
*
|
||||
* The Initial Developer of the Original Code is
|
||||
* the Mozilla Foundation.
|
||||
* Portions created by the Initial Developer are Copyright (C) 2011
|
||||
* the Initial Developer. All Rights Reserved.
|
||||
*
|
||||
* Contributor(s):
|
||||
*
|
||||
* Alternatively, the contents of this file may be used under the terms of
|
||||
* either of the GNU General Public License Version 2 or later (the "GPL"),
|
||||
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
* in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
* of those above. If you wish to allow use of your version of this file only
|
||||
* under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
* use your version of this file under the terms of the MPL, indicate your
|
||||
* decision by deleting the provisions above and replace them with the notice
|
||||
* and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
* the provisions above, a recipient may use your version of this file under
|
||||
* the terms of any one of the MPL, the GPL or the LGPL.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#ifndef jsgc_barrier_h___
|
||||
#define jsgc_barrier_h___
|
||||
|
||||
#include "jsapi.h"
|
||||
#include "jscell.h"
|
||||
|
||||
#include "js/HashTable.h"
|
||||
|
||||
/*
|
||||
* A write barrier is a mechanism used by incremental or generation GCs to
|
||||
* ensure that every value that needs to be marked is marked. In general, the
|
||||
* write barrier should be invoked whenever a write can cause the set of things
|
||||
* traced through by the GC to change. This includes:
|
||||
* - writes to object properties
|
||||
* - writes to array slots
|
||||
* - writes to fields like JSObject::lastProp that we trace through
|
||||
* - writes to fields in private data, like JSGenerator::obj
|
||||
* - writes to non-markable fields like JSObject::private that point to
|
||||
* markable data
|
||||
* The last category is the trickiest. Even though the private pointers does not
|
||||
* point to a GC thing, changing the private pointer may change the set of
|
||||
* objects that are traced by the GC. Therefore it needs a write barrier.
|
||||
*
|
||||
* Every barriered write should have the following form:
|
||||
* <pre-barrier>
|
||||
* obj->field = value; // do the actual write
|
||||
* <post-barrier>
|
||||
* The pre-barrier is used for incremental GC and the post-barrier is for
|
||||
* generational GC.
|
||||
*
|
||||
* PRE-BARRIER
|
||||
*
|
||||
* To understand the pre-barrier, let's consider how incremental GC works. The
|
||||
* GC itself is divided into "slices". Between each slice, JS code is allowed to
|
||||
* run. Each slice should be short so that the user doesn't notice the
|
||||
* interruptions. In our GC, the structure of the slices is as follows:
|
||||
*
|
||||
* 1. ... JS work, which leads to a request to do GC ...
|
||||
* 2. [first GC slice, which performs all root marking and possibly more marking]
|
||||
* 3. ... more JS work is allowed to run ...
|
||||
* 4. [GC mark slice, which runs entirely in drainMarkStack]
|
||||
* 5. ... more JS work ...
|
||||
* 6. [GC mark slice, which runs entirely in drainMarkStack]
|
||||
* 7. ... more JS work ...
|
||||
* 8. [GC marking finishes; sweeping done non-incrementally; GC is done]
|
||||
* 9. ... JS continues uninterrupted now that GC is finishes ...
|
||||
*
|
||||
* Of course, there may be a different number of slices depending on how much
|
||||
* marking is to be done.
|
||||
*
|
||||
* The danger inherent in this scheme is that the JS code in steps 3, 5, and 7
|
||||
* might change the heap in a way that causes the GC to collect an object that
|
||||
* is actually reachable. The write barrier prevents this from happening. We use
|
||||
* a variant of incremental GC called "snapshot at the beginning." This approach
|
||||
* guarantees the invariant that if an object is reachable in step 2, then we
|
||||
* will mark it eventually. The name comes from the idea that we take a
|
||||
* theoretical "snapshot" of all reachable objects in step 2; all objects in
|
||||
* that snapshot should eventually be marked. (Note that the write barrier
|
||||
* verifier code takes an actual snapshot.)
|
||||
*
|
||||
* The basic correctness invariant of a snapshot-at-the-beginning collector is
|
||||
* that any object reachable at the end of the GC (step 9) must either:
|
||||
* (1) have been reachable at the beginning (step 2) and thus in the snapshot
|
||||
* (2) or must have been newly allocated, in steps 3, 5, or 7.
|
||||
* To deal with case (2), any objects allocated during an incremental GC are
|
||||
* automatically marked black.
|
||||
*
|
||||
* This strategy is actually somewhat conservative: if an object becomes
|
||||
* unreachable between steps 2 and 8, it would be safe to collect it. We won't,
|
||||
* mainly for simplicity. (Also, note that the snapshot is entirely
|
||||
* theoretical. We don't actually do anything special in step 2 that we wouldn't
|
||||
* do in a non-incremental GC.
|
||||
*
|
||||
* It's the pre-barrier's job to maintain the snapshot invariant. Consider the
|
||||
* write "obj->field = value". Let the prior value of obj->field be
|
||||
* value0. Since it's possible that value0 may have been what obj->field
|
||||
* contained in step 2, when the snapshot was taken, the barrier marks
|
||||
* value0. Note that it only does this if we're in the middle of an incremental
|
||||
* GC. Since this is rare, the cost of the write barrier is usually just an
|
||||
* extra branch.
|
||||
*
|
||||
* In practice, we implement the pre-barrier differently based on the type of
|
||||
* value0. E.g., see JSObject::writeBarrierPre, which is used if obj->field is
|
||||
* a JSObject*. It takes value0 as a parameter.
|
||||
*
|
||||
* POST-BARRIER
|
||||
*
|
||||
* These are not yet implemented. Once we get generational GC, they will allow
|
||||
* us to keep track of pointers from non-nursery space into the nursery.
|
||||
*
|
||||
* IMPLEMENTATION DETAILS
|
||||
*
|
||||
* Since it would be awkward to change every write to memory into a function
|
||||
* call, this file contains a bunch of C++ classes and templates that use
|
||||
* operator overloading to take care of barriers automatically. In many cases,
|
||||
* all that's necessary to make some field be barriered is to replace
|
||||
* Type *field;
|
||||
* with
|
||||
* HeapPtr<Type> field;
|
||||
* There are also special classes HeapValue and HeapId, which barrier js::Value
|
||||
* and jsid, respectively.
|
||||
*
|
||||
* One additional note: not all object writes need to be barriered. Writes to
|
||||
* newly allocated objects do not need a barrier as long as the GC is not
|
||||
* allowed to run in between the allocation and the write. In these cases, we
|
||||
* use the "obj->field.init(value)" method instead of "obj->field = value".
|
||||
* We use the init naming idiom in many places to signify that a field is being
|
||||
* assigned for the first time, and that no GCs have taken place between the
|
||||
* object allocation and the assignment.
|
||||
*/
|
||||
|
||||
namespace js {
|
||||
|
||||
/*
|
||||
* Ideally, we would like to make the argument to functions like MarkShape be a
|
||||
* HeapPtr<const js::Shape>. That would ensure that we don't forget to
|
||||
* barrier any fields that we mark through. However, that would prohibit us from
|
||||
* passing in a derived class like HeapPtr<js::EmptyShape>.
|
||||
*
|
||||
* To overcome the problem, we make the argument to MarkShape be a
|
||||
* MarkablePtr<const js::Shape>. And we allow conversions from HeapPtr<T>
|
||||
* to MarkablePtr<U> as long as T can be converted to U.
|
||||
*/
|
||||
template<class T>
|
||||
class MarkablePtr
|
||||
{
|
||||
public:
|
||||
T *value;
|
||||
|
||||
explicit MarkablePtr(T *value) : value(value) {}
|
||||
};
|
||||
|
||||
template<class T, typename Unioned = uintptr_t>
|
||||
class HeapPtr
|
||||
{
|
||||
union {
|
||||
T *value;
|
||||
Unioned other;
|
||||
};
|
||||
|
||||
public:
|
||||
HeapPtr() : value(NULL) {}
|
||||
explicit HeapPtr(T *v) : value(v) { post(); }
|
||||
explicit HeapPtr(const HeapPtr<T> &v) : value(v.value) { post(); }
|
||||
|
||||
~HeapPtr() { pre(); }
|
||||
|
||||
/* Use this to install a ptr into a newly allocated object. */
|
||||
void init(T *v) {
|
||||
value = v;
|
||||
post();
|
||||
}
|
||||
|
||||
/* Use to set the pointer to NULL. */
|
||||
void clear() {
|
||||
pre();
|
||||
value = NULL;
|
||||
}
|
||||
|
||||
/* Use this if the automatic coercion to T* isn't working. */
|
||||
T *get() const { return value; }
|
||||
|
||||
/*
|
||||
* Use these if you want to change the value without invoking the barrier.
|
||||
* Obviously this is dangerous unless you know the barrier is not needed.
|
||||
*/
|
||||
T **unsafeGet() { return &value; }
|
||||
void unsafeSet(T *v) { value = v; }
|
||||
|
||||
Unioned *unsafeGetUnioned() { return &other; }
|
||||
|
||||
HeapPtr<T, Unioned> &operator=(T *v) {
|
||||
pre();
|
||||
value = v;
|
||||
post();
|
||||
return *this;
|
||||
}
|
||||
|
||||
HeapPtr<T, Unioned> &operator=(const HeapPtr<T> &v) {
|
||||
pre();
|
||||
value = v.value;
|
||||
post();
|
||||
return *this;
|
||||
}
|
||||
|
||||
T &operator*() const { return *value; }
|
||||
T *operator->() const { return value; }
|
||||
|
||||
operator T*() const { return value; }
|
||||
|
||||
/*
|
||||
* This coerces to MarkablePtr<U> as long as T can coerce to U. See the
|
||||
* comment for MarkablePtr above.
|
||||
*/
|
||||
template<class U>
|
||||
operator MarkablePtr<U>() const { return MarkablePtr<U>(value); }
|
||||
|
||||
private:
|
||||
void pre() { T::writeBarrierPre(value); }
|
||||
void post() { T::writeBarrierPost(value, (void *)&value); }
|
||||
|
||||
/* Make this friend so it can access pre() and post(). */
|
||||
template<class T1, class T2>
|
||||
friend inline void
|
||||
BarrieredSetPair(JSCompartment *comp,
|
||||
HeapPtr<T1> &v1, T1 *val1,
|
||||
HeapPtr<T2> &v2, T2 *val2);
|
||||
};
|
||||
|
||||
/*
|
||||
* This is a hack for RegExpStatics::updateFromMatch. It allows us to do two
|
||||
* barriers with only one branch to check if we're in an incremental GC.
|
||||
*/
|
||||
template<class T1, class T2>
|
||||
static inline void
|
||||
BarrieredSetPair(JSCompartment *comp,
|
||||
HeapPtr<T1> &v1, T1 *val1,
|
||||
HeapPtr<T2> &v2, T2 *val2)
|
||||
{
|
||||
if (T1::needWriteBarrierPre(comp)) {
|
||||
v1.pre();
|
||||
v2.pre();
|
||||
}
|
||||
v1.unsafeSet(val1);
|
||||
v2.unsafeSet(val2);
|
||||
v1.post();
|
||||
v2.post();
|
||||
}
|
||||
|
||||
typedef HeapPtr<JSObject> HeapPtrObject;
|
||||
typedef HeapPtr<JSFunction> HeapPtrFunction;
|
||||
typedef HeapPtr<JSString> HeapPtrString;
|
||||
typedef HeapPtr<JSScript> HeapPtrScript;
|
||||
typedef HeapPtr<Shape> HeapPtrShape;
|
||||
typedef HeapPtr<const Shape> HeapPtrConstShape;
|
||||
typedef HeapPtr<JSXML> HeapPtrXML;
|
||||
|
||||
/* Useful for hashtables with a HeapPtr as key. */
|
||||
template<class T>
|
||||
struct HeapPtrHasher
|
||||
{
|
||||
typedef HeapPtr<T> Key;
|
||||
typedef T *Lookup;
|
||||
|
||||
static HashNumber hash(Lookup obj) { return DefaultHasher<T *>::hash(obj); }
|
||||
static bool match(const Key &k, Lookup l) { return k.get() == l; }
|
||||
};
|
||||
|
||||
/* Specialized hashing policy for HeapPtrs. */
|
||||
template <class T>
|
||||
struct DefaultHasher< HeapPtr<T> >: HeapPtrHasher<T> { };
|
||||
|
||||
class HeapValue
|
||||
{
|
||||
Value value;
|
||||
|
||||
public:
|
||||
explicit HeapValue() : value(UndefinedValue()) {}
|
||||
explicit inline HeapValue(const Value &v);
|
||||
explicit inline HeapValue(const HeapValue &v);
|
||||
|
||||
inline ~HeapValue();
|
||||
|
||||
inline void init(const Value &v);
|
||||
|
||||
inline HeapValue &operator=(const Value &v);
|
||||
inline HeapValue &operator=(const HeapValue &v);
|
||||
|
||||
/*
|
||||
* This is a faster version of operator=. Normally, operator= has to
|
||||
* determine the compartment of the value before it can decide whether to do
|
||||
* the barrier. If you already know the compartment, it's faster to pass it
|
||||
* in.
|
||||
*/
|
||||
inline void set(JSCompartment *comp, const Value &v);
|
||||
|
||||
const Value &get() const { return value; }
|
||||
operator const Value &() const { return value; }
|
||||
|
||||
bool isMarkable() const { return value.isMarkable(); }
|
||||
bool isMagic(JSWhyMagic why) const { return value.isMagic(why); }
|
||||
bool isUndefined() const { return value.isUndefined(); }
|
||||
bool isObject() const { return value.isObject(); }
|
||||
bool isGCThing() const { return value.isGCThing(); }
|
||||
bool isTrue() const { return value.isTrue(); }
|
||||
bool isFalse() const { return value.isFalse(); }
|
||||
bool isInt32() const { return value.isInt32(); }
|
||||
bool isNull() const { return value.isNull(); }
|
||||
|
||||
JSObject &toObject() const { return value.toObject(); }
|
||||
JSObject *toObjectOrNull() const { return value.toObjectOrNull(); }
|
||||
void *toGCThing() const { return value.toGCThing(); }
|
||||
double toDouble() const { return value.toDouble(); }
|
||||
int32 toInt32() const { return value.toInt32(); }
|
||||
JSString *toString() const { return value.toString(); }
|
||||
bool toBoolean() const { return value.toBoolean(); }
|
||||
double toNumber() const { return value.toNumber(); }
|
||||
|
||||
unsigned gcKind() const { return value.gcKind(); }
|
||||
|
||||
inline void boxNonDoubleFrom(JSValueType type, uint64 *out);
|
||||
|
||||
uint64 asRawBits() const { return value.asRawBits(); }
|
||||
|
||||
#ifdef DEBUG
|
||||
JSWhyMagic whyMagic() const { return value.whyMagic(); }
|
||||
#endif
|
||||
|
||||
static inline void writeBarrierPre(const Value &v);
|
||||
static inline void writeBarrierPost(const Value &v, void *addr);
|
||||
|
||||
static inline void writeBarrierPre(JSCompartment *comp, const Value &v);
|
||||
static inline void writeBarrierPost(JSCompartment *comp, const Value &v, void *addr);
|
||||
|
||||
private:
|
||||
inline void pre();
|
||||
inline void post();
|
||||
|
||||
inline void pre(JSCompartment *comp);
|
||||
inline void post(JSCompartment *comp);
|
||||
};
|
||||
|
||||
static inline const Value *
|
||||
Valueify(const HeapValue *array)
|
||||
{
|
||||
JS_ASSERT(sizeof(HeapValue) == sizeof(Value));
|
||||
return (const Value *)array;
|
||||
}
|
||||
|
||||
class HeapValueArray
|
||||
{
|
||||
HeapValue *array;
|
||||
|
||||
public:
|
||||
HeapValueArray(HeapValue *array) : array(array) {}
|
||||
|
||||
operator const Value *() const { return Valueify(array); }
|
||||
operator HeapValue *() const { return array; }
|
||||
|
||||
HeapValueArray operator +(int offset) const { return HeapValueArray(array + offset); }
|
||||
HeapValueArray operator +(uint32 offset) const { return HeapValueArray(array + offset); }
|
||||
};
|
||||
|
||||
class HeapId
|
||||
{
|
||||
jsid value;
|
||||
|
||||
public:
|
||||
explicit HeapId() : value(JSID_VOID) {}
|
||||
explicit inline HeapId(jsid id);
|
||||
|
||||
inline ~HeapId();
|
||||
|
||||
inline void init(jsid id);
|
||||
|
||||
inline HeapId &operator=(jsid id);
|
||||
inline HeapId &operator=(const HeapId &v);
|
||||
|
||||
bool operator==(jsid id) const { return value == id; }
|
||||
bool operator!=(jsid id) const { return value != id; }
|
||||
|
||||
jsid get() const { return value; }
|
||||
operator jsid() const { return value; }
|
||||
|
||||
private:
|
||||
inline void pre();
|
||||
inline void post();
|
||||
|
||||
HeapId(const HeapId &v);
|
||||
};
|
||||
|
||||
/*
|
||||
* Incremental GC requires that weak pointers have read barriers. This is mostly
|
||||
* an issue for empty shapes stored in JSCompartment. The problem happens when,
|
||||
* during an incremental GC, some JS code stores one of the compartment's empty
|
||||
* shapes into an object already marked black. Normally, this would not be a
|
||||
* problem, because the empty shape would have been part of the initial snapshot
|
||||
* when the GC started. However, since this is a weak pointer, it isn't. So we
|
||||
* may collect the empty shape even though a live object points to it. To fix
|
||||
* this, we mark these empty shapes black whenever they get read out.
|
||||
*/
|
||||
template<class T>
|
||||
class ReadBarriered
|
||||
{
|
||||
T *value;
|
||||
|
||||
public:
|
||||
ReadBarriered(T *value) : value(value) {}
|
||||
|
||||
T *get() const {
|
||||
if (!value)
|
||||
return NULL;
|
||||
T::readBarrier(value);
|
||||
return value;
|
||||
}
|
||||
|
||||
operator T*() const { return get(); }
|
||||
|
||||
T *unsafeGet() { return value; }
|
||||
|
||||
void set(T *v) { value = v; }
|
||||
|
||||
operator bool() { return !!value; }
|
||||
|
||||
template<class U>
|
||||
operator MarkablePtr<U>() const { return MarkablePtr<U>(value); }
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif /* jsgc_barrier_h___ */
|
@ -6,6 +6,8 @@
|
||||
|
||||
#include "vm/Stack-inl.h"
|
||||
|
||||
#include "jsobjinlines.h"
|
||||
|
||||
using namespace js;
|
||||
|
||||
static const char NORMAL_ZERO[] =
|
||||
|
@ -8,6 +8,8 @@
|
||||
#include "jsobj.h"
|
||||
#include "jswrapper.h"
|
||||
|
||||
#include "jsobjinlines.h"
|
||||
|
||||
struct OuterWrapper : js::Wrapper
|
||||
{
|
||||
OuterWrapper() : Wrapper(0) {}
|
||||
|
@ -7,7 +7,8 @@ BEGIN_TEST(testConservativeGC)
|
||||
jsval v2;
|
||||
EVAL("({foo: 'bar'});", &v2);
|
||||
CHECK(JSVAL_IS_OBJECT(v2));
|
||||
JSObject objCopy = *JSVAL_TO_OBJECT(v2);
|
||||
char objCopy[sizeof(JSObject)];
|
||||
memcpy(&objCopy, JSVAL_TO_OBJECT(v2), sizeof(JSObject));
|
||||
|
||||
jsval v3;
|
||||
EVAL("String(Math.PI);", &v3);
|
||||
@ -18,7 +19,8 @@ BEGIN_TEST(testConservativeGC)
|
||||
EVAL("({foo2: 'bar2'});", &tmp);
|
||||
CHECK(JSVAL_IS_OBJECT(tmp));
|
||||
JSObject *obj2 = JSVAL_TO_OBJECT(tmp);
|
||||
JSObject obj2Copy = *obj2;
|
||||
char obj2Copy[sizeof(JSObject)];
|
||||
memcpy(&obj2Copy, obj2, sizeof(JSObject));
|
||||
|
||||
EVAL("String(Math.sqrt(3));", &tmp);
|
||||
CHECK(JSVAL_IS_STRING(tmp));
|
||||
@ -36,10 +38,10 @@ BEGIN_TEST(testConservativeGC)
|
||||
|
||||
JS_GC(cx);
|
||||
|
||||
checkObjectFields(&objCopy, JSVAL_TO_OBJECT(v2));
|
||||
checkObjectFields((JSObject *)objCopy, JSVAL_TO_OBJECT(v2));
|
||||
CHECK(!memcmp(&strCopy, JSVAL_TO_STRING(v3), sizeof(strCopy)));
|
||||
|
||||
checkObjectFields(&obj2Copy, obj2);
|
||||
checkObjectFields((JSObject *)obj2Copy, obj2);
|
||||
CHECK(!memcmp(&str2Copy, str2, sizeof(str2Copy)));
|
||||
|
||||
return true;
|
||||
|
@ -9,6 +9,8 @@
|
||||
#include "jsnum.h"
|
||||
#include "jsstr.h"
|
||||
|
||||
#include "jsobjinlines.h"
|
||||
|
||||
#include "vm/String-inl.h"
|
||||
|
||||
using namespace mozilla;
|
||||
|
118
js/src/jsapi.cpp
118
js/src/jsapi.cpp
@ -96,6 +96,7 @@
|
||||
#include "jsscriptinlines.h"
|
||||
|
||||
#include "vm/RegExpObject-inl.h"
|
||||
#include "vm/RegExpStatics-inl.h"
|
||||
#include "vm/Stack-inl.h"
|
||||
#include "vm/String-inl.h"
|
||||
|
||||
@ -657,7 +658,7 @@ JSRuntime::JSRuntime()
|
||||
gcMaxMallocBytes(0),
|
||||
gcEmptyArenaPoolLifespan(0),
|
||||
gcNumber(0),
|
||||
gcMarkingTracer(NULL),
|
||||
gcIncrementalTracer(NULL),
|
||||
gcChunkAllocationSinceLastGC(false),
|
||||
gcNextFullGCTime(0),
|
||||
gcJitReleaseTime(0),
|
||||
@ -1327,7 +1328,7 @@ JS_EnterCrossCompartmentCallScript(JSContext *cx, JSScript *target)
|
||||
{
|
||||
CHECK_REQUEST(cx);
|
||||
JS_ASSERT(!target->isCachedEval);
|
||||
GlobalObject *global = target->u.globalObject;
|
||||
GlobalObject *global = target->globalObject;
|
||||
if (!global) {
|
||||
SwitchToCompartment sc(cx, target->compartment());
|
||||
global = GlobalObject::create(cx, &dummy_class);
|
||||
@ -1952,9 +1953,7 @@ JS_EnumerateStandardClasses(JSContext *cx, JSObject *obj)
|
||||
return JS_TRUE;
|
||||
}
|
||||
|
||||
namespace js {
|
||||
|
||||
JSIdArray *
|
||||
static JSIdArray *
|
||||
NewIdArray(JSContext *cx, jsint length)
|
||||
{
|
||||
JSIdArray *ida;
|
||||
@ -1966,8 +1965,6 @@ NewIdArray(JSContext *cx, jsint length)
|
||||
return ida;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
* Unlike realloc(3), this function frees ida on failure.
|
||||
*/
|
||||
@ -2000,7 +1997,7 @@ AddAtomToArray(JSContext *cx, JSAtom *atom, JSIdArray *ida, jsint *ip)
|
||||
return NULL;
|
||||
JS_ASSERT(i < ida->length);
|
||||
}
|
||||
ida->vector[i] = ATOM_TO_JSID(atom);
|
||||
ida->vector[i].init(ATOM_TO_JSID(atom));
|
||||
*ip = i + 1;
|
||||
return ida;
|
||||
}
|
||||
@ -2330,11 +2327,16 @@ JS_TraceRuntime(JSTracer *trc)
|
||||
TraceRuntime(trc);
|
||||
}
|
||||
|
||||
JS_PUBLIC_API(void)
|
||||
JS_TraceChildren(JSTracer *trc, void *thing, JSGCTraceKind kind)
|
||||
{
|
||||
js::TraceChildren(trc, thing, kind);
|
||||
}
|
||||
|
||||
JS_PUBLIC_API(void)
|
||||
JS_CallTracer(JSTracer *trc, void *thing, JSGCTraceKind kind)
|
||||
{
|
||||
JS_ASSERT(thing);
|
||||
MarkKind(trc, thing, kind);
|
||||
js::CallTracer(trc, thing, kind);
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
@ -2782,8 +2784,8 @@ JS_PUBLIC_API(JSBool)
|
||||
JS_IsAboutToBeFinalized(JSContext *cx, void *thing)
|
||||
{
|
||||
JS_ASSERT(thing);
|
||||
JS_ASSERT(!cx->runtime->gcMarkingTracer);
|
||||
return IsAboutToBeFinalized(cx, thing);
|
||||
JS_ASSERT(!cx->runtime->gcIncrementalTracer);
|
||||
return IsAboutToBeFinalized(cx, (gc::Cell *)thing);
|
||||
}
|
||||
|
||||
JS_PUBLIC_API(void)
|
||||
@ -2942,6 +2944,19 @@ JS_SetNativeStackQuota(JSContext *cx, size_t stackSize)
|
||||
|
||||
/************************************************************************/
|
||||
|
||||
JS_PUBLIC_API(jsint)
|
||||
JS_IdArrayLength(JSContext *cx, JSIdArray *ida)
|
||||
{
|
||||
return ida->length;
|
||||
}
|
||||
|
||||
JS_PUBLIC_API(jsid)
|
||||
JS_IdArrayGet(JSContext *cx, JSIdArray *ida, jsint index)
|
||||
{
|
||||
JS_ASSERT(index >= 0 && index < ida->length);
|
||||
return ida->vector[index];
|
||||
}
|
||||
|
||||
JS_PUBLIC_API(void)
|
||||
JS_DestroyIdArray(JSContext *cx, JSIdArray *ida)
|
||||
{
|
||||
@ -4105,12 +4120,16 @@ prop_iter_trace(JSTracer *trc, JSObject *obj)
|
||||
return;
|
||||
|
||||
if (obj->getSlot(JSSLOT_ITER_INDEX).toInt32() < 0) {
|
||||
/* Native case: just mark the next property to visit. */
|
||||
MarkShape(trc, (Shape *)pdata, "prop iter shape");
|
||||
/*
|
||||
* Native case: just mark the next property to visit. We don't need a
|
||||
* barrier here because the pointer is updated via setPrivate, which
|
||||
* always takes a barrier.
|
||||
*/
|
||||
MarkShapeUnbarriered(trc, (Shape *)pdata, "prop iter shape");
|
||||
} else {
|
||||
/* Non-native case: mark each id in the JSIdArray private. */
|
||||
JSIdArray *ida = (JSIdArray *) pdata;
|
||||
MarkIdRange(trc, ida->length, ida->vector, "prop iter");
|
||||
MarkIdRange(trc, ida->vector, ida->vector + ida->length, "prop iter");
|
||||
}
|
||||
}
|
||||
|
||||
@ -4138,7 +4157,7 @@ JS_PUBLIC_API(JSObject *)
|
||||
JS_NewPropertyIterator(JSContext *cx, JSObject *obj)
|
||||
{
|
||||
JSObject *iterobj;
|
||||
const void *pdata;
|
||||
void *pdata;
|
||||
jsint index;
|
||||
JSIdArray *ida;
|
||||
|
||||
@ -4150,7 +4169,7 @@ JS_NewPropertyIterator(JSContext *cx, JSObject *obj)
|
||||
|
||||
if (obj->isNative()) {
|
||||
/* Native case: start with the last property in obj. */
|
||||
pdata = obj->lastProperty();
|
||||
pdata = (void *)obj->lastProperty();
|
||||
index = -1;
|
||||
} else {
|
||||
/*
|
||||
@ -4168,7 +4187,7 @@ JS_NewPropertyIterator(JSContext *cx, JSObject *obj)
|
||||
}
|
||||
|
||||
/* iterobj cannot escape to other threads here. */
|
||||
iterobj->setPrivate(const_cast<void *>(pdata));
|
||||
iterobj->setPrivate(pdata);
|
||||
iterobj->setSlot(JSSLOT_ITER_INDEX, Int32Value(index));
|
||||
return iterobj;
|
||||
}
|
||||
@ -4431,7 +4450,7 @@ JS_CloneFunctionObject(JSContext *cx, JSObject *funobj, JSObject *parent)
|
||||
Value v;
|
||||
if (!obj->getGeneric(cx, r.front().propid, &v))
|
||||
return NULL;
|
||||
clone->getFlatClosureUpvars()[i] = v;
|
||||
clone->setFlatClosureUpvar(i, v);
|
||||
}
|
||||
|
||||
return clone;
|
||||
@ -4882,9 +4901,9 @@ JS_PUBLIC_API(JSObject *)
|
||||
JS_GetGlobalFromScript(JSScript *script)
|
||||
{
|
||||
JS_ASSERT(!script->isCachedEval);
|
||||
JS_ASSERT(script->u.globalObject);
|
||||
JS_ASSERT(script->globalObject);
|
||||
|
||||
return script->u.globalObject;
|
||||
return script->globalObject;
|
||||
}
|
||||
|
||||
static JSFunction *
|
||||
@ -6367,6 +6386,63 @@ js_GetCompartmentPrivate(JSCompartment *compartment)
|
||||
|
||||
/************************************************************************/
|
||||
|
||||
JS_PUBLIC_API(void)
|
||||
JS_RegisterReference(void **ref)
|
||||
{
|
||||
}
|
||||
|
||||
JS_PUBLIC_API(void)
|
||||
JS_ModifyReference(void **ref, void *newval)
|
||||
{
|
||||
// XPConnect uses the lower bits of its JSObject refs for evil purposes,
|
||||
// so we need to fix this.
|
||||
void *thing = *ref;
|
||||
*ref = newval;
|
||||
thing = (void *)((uintptr_t)thing & ~7);
|
||||
if (!thing)
|
||||
return;
|
||||
uint32 kind = GetGCThingTraceKind(thing);
|
||||
if (kind == JSTRACE_OBJECT)
|
||||
JSObject::writeBarrierPre((JSObject *) thing);
|
||||
else if (kind == JSTRACE_STRING)
|
||||
JSString::writeBarrierPre((JSString *) thing);
|
||||
else
|
||||
JS_NOT_REACHED("invalid trace kind");
|
||||
}
|
||||
|
||||
JS_PUBLIC_API(void)
|
||||
JS_UnregisterReference(void **ref)
|
||||
{
|
||||
// For now we just want to trigger a write barrier.
|
||||
JS_ModifyReference(ref, NULL);
|
||||
}
|
||||
|
||||
JS_PUBLIC_API(void)
|
||||
JS_RegisterValue(jsval *val)
|
||||
{
|
||||
}
|
||||
|
||||
JS_PUBLIC_API(void)
|
||||
JS_ModifyValue(jsval *val, jsval newval)
|
||||
{
|
||||
HeapValue::writeBarrierPre(*val);
|
||||
*val = newval;
|
||||
}
|
||||
|
||||
JS_PUBLIC_API(void)
|
||||
JS_UnregisterValue(jsval *val)
|
||||
{
|
||||
JS_ModifyValue(val, JSVAL_VOID);
|
||||
}
|
||||
|
||||
JS_PUBLIC_API(JSTracer *)
|
||||
JS_GetIncrementalGCTracer(JSRuntime *rt)
|
||||
{
|
||||
return rt->gcIncrementalTracer;
|
||||
}
|
||||
|
||||
/************************************************************************/
|
||||
|
||||
#if !defined(STATIC_EXPORTABLE_JS_API) && !defined(STATIC_JS_API) && defined(XP_WIN)
|
||||
|
||||
#include "jswin.h"
|
||||
|
@ -2698,6 +2698,7 @@ typedef void
|
||||
(* JSTraceCallback)(JSTracer *trc, void *thing, JSGCTraceKind kind);
|
||||
|
||||
struct JSTracer {
|
||||
JSRuntime *runtime;
|
||||
JSContext *context;
|
||||
JSTraceCallback callback;
|
||||
JSTraceNamePrinter debugPrinter;
|
||||
@ -2798,6 +2799,7 @@ JS_CallTracer(JSTracer *trc, void *thing, JSGCTraceKind kind);
|
||||
*/
|
||||
# define JS_TRACER_INIT(trc, cx_, callback_) \
|
||||
JS_BEGIN_MACRO \
|
||||
(trc)->runtime = (cx_)->runtime; \
|
||||
(trc)->context = (cx_); \
|
||||
(trc)->callback = (callback_); \
|
||||
(trc)->debugPrinter = NULL; \
|
||||
@ -2842,6 +2844,88 @@ JS_DumpHeap(JSContext *cx, FILE *fp, void* startThing, JSGCTraceKind kind,
|
||||
|
||||
#endif
|
||||
|
||||
/*
|
||||
* Write barrier API.
|
||||
*
|
||||
* This API is used to inform SpiderMonkey of pointers to JS GC things in the
|
||||
* malloc heap. There is no need to use this API unless incremental GC is
|
||||
* enabled. When they are, the requirements for using the API are as follows:
|
||||
*
|
||||
* All pointers to JS GC things from the malloc heap must be registered and
|
||||
* unregistered with the API functions below. This is *in addition* to the
|
||||
* normal rooting and tracing that must be done normally--these functions will
|
||||
* not take care of rooting for you.
|
||||
*
|
||||
* Besides registration, the JS_ModifyReference function must be called to
|
||||
* change the value of these references. You should not change them using
|
||||
* assignment.
|
||||
*
|
||||
* To avoid the headache of using these API functions, the JSBarrieredObjectPtr
|
||||
* C++ class is provided--simply replace your JSObject* with a
|
||||
* JSBarrieredObjectPtr. It will take care of calling the registration and
|
||||
* modification APIs.
|
||||
*
|
||||
* For more explanation, see the comment in gc/Barrier.h.
|
||||
*/
|
||||
|
||||
/* These functions are to be used for objects and strings. */
|
||||
extern JS_PUBLIC_API(void)
|
||||
JS_RegisterReference(void **ref);
|
||||
|
||||
extern JS_PUBLIC_API(void)
|
||||
JS_ModifyReference(void **ref, void *newval);
|
||||
|
||||
extern JS_PUBLIC_API(void)
|
||||
JS_UnregisterReference(void **ref);
|
||||
|
||||
/* These functions are for values. */
|
||||
extern JS_PUBLIC_API(void)
|
||||
JS_RegisterValue(jsval *val);
|
||||
|
||||
extern JS_PUBLIC_API(void)
|
||||
JS_ModifyValue(jsval *val, jsval newval);
|
||||
|
||||
extern JS_PUBLIC_API(void)
|
||||
JS_UnregisterValue(jsval *val);
|
||||
|
||||
extern JS_PUBLIC_API(JSTracer *)
|
||||
JS_GetIncrementalGCTracer(JSRuntime *rt);
|
||||
|
||||
#ifdef __cplusplus
|
||||
JS_END_EXTERN_C
|
||||
|
||||
namespace JS {
|
||||
|
||||
class HeapPtrObject
|
||||
{
|
||||
JSObject *value;
|
||||
|
||||
public:
|
||||
HeapPtrObject() : value(NULL) { JS_RegisterReference((void **) &value); }
|
||||
|
||||
HeapPtrObject(JSObject *obj) : value(obj) { JS_RegisterReference((void **) &value); }
|
||||
|
||||
~HeapPtrObject() { JS_UnregisterReference((void **) &value); }
|
||||
|
||||
void init(JSObject *obj) { value = obj; }
|
||||
|
||||
JSObject *get() const { return value; }
|
||||
|
||||
HeapPtrObject &operator=(JSObject *obj) {
|
||||
JS_ModifyReference((void **) &value, obj);
|
||||
return *this;
|
||||
}
|
||||
|
||||
JSObject &operator*() const { return *value; }
|
||||
JSObject *operator->() const { return value; }
|
||||
operator JSObject *() const { return value; }
|
||||
};
|
||||
|
||||
} /* namespace JS */
|
||||
|
||||
JS_BEGIN_EXTERN_C
|
||||
#endif
|
||||
|
||||
/*
|
||||
* Garbage collector API.
|
||||
*/
|
||||
@ -3115,10 +3199,11 @@ struct JSClass {
|
||||
#define JSCLASS_NO_INTERNAL_MEMBERS 0,{0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}
|
||||
#define JSCLASS_NO_OPTIONAL_MEMBERS 0,0,0,0,0,0,0,JSCLASS_NO_INTERNAL_MEMBERS
|
||||
|
||||
struct JSIdArray {
|
||||
jsint length;
|
||||
jsid vector[1]; /* actually, length jsid words */
|
||||
};
|
||||
extern JS_PUBLIC_API(jsint)
|
||||
JS_IdArrayLength(JSContext *cx, JSIdArray *ida);
|
||||
|
||||
extern JS_PUBLIC_API(jsid)
|
||||
JS_IdArrayGet(JSContext *cx, JSIdArray *ida, jsint index);
|
||||
|
||||
extern JS_PUBLIC_API(void)
|
||||
JS_DestroyIdArray(JSContext *cx, JSIdArray *ida);
|
||||
|
@ -649,14 +649,13 @@ array_length_setter(JSContext *cx, JSObject *obj, jsid id, JSBool strict, Value
|
||||
* the initialized capacity.
|
||||
*/
|
||||
jsuint oldcap = obj->getDenseArrayCapacity();
|
||||
jsuint oldinit = obj->getDenseArrayInitializedLength();
|
||||
if (oldinit > newlen)
|
||||
obj->setDenseArrayInitializedLength(newlen);
|
||||
if (oldcap > newlen)
|
||||
obj->shrinkDenseArrayElements(cx, newlen);
|
||||
jsuint oldinit = obj->getDenseArrayInitializedLength();
|
||||
if (oldinit > newlen) {
|
||||
obj->setDenseArrayInitializedLength(newlen);
|
||||
if (!cx->typeInferenceEnabled())
|
||||
obj->backfillDenseArrayHoles(cx);
|
||||
}
|
||||
if (oldinit > newlen && !cx->typeInferenceEnabled())
|
||||
obj->backfillDenseArrayHoles(cx);
|
||||
} else if (oldlen - newlen < (1 << 24)) {
|
||||
do {
|
||||
--oldlen;
|
||||
@ -1340,8 +1339,11 @@ JSObject::makeDenseArraySlow(JSContext *cx)
|
||||
|
||||
/* Create a native scope. */
|
||||
gc::AllocKind kind = getAllocKind();
|
||||
if (!InitScopeForObject(cx, this, &SlowArrayClass, getProto()->getNewType(cx), kind))
|
||||
js::EmptyShape *empty = InitScopeForObject(cx, this, &SlowArrayClass,
|
||||
getProto()->getNewType(cx), kind);
|
||||
if (!empty)
|
||||
return false;
|
||||
setMap(empty);
|
||||
|
||||
backfillDenseArrayHoles(cx);
|
||||
|
||||
@ -1367,10 +1369,10 @@ JSObject::makeDenseArraySlow(JSContext *cx)
|
||||
* Root all values in the array during conversion, as SlowArrayClass only
|
||||
* protects up to its slot span.
|
||||
*/
|
||||
AutoValueArray autoArray(cx, slots, arrayInitialized);
|
||||
AutoValueArray autoArray(cx, Valueify(slots), arrayInitialized);
|
||||
|
||||
/* The initialized length is used iff this is a dense array. */
|
||||
initializedLength = 0;
|
||||
initializedLength() = 0;
|
||||
JS_ASSERT(newType == NULL);
|
||||
|
||||
/*
|
||||
@ -1380,7 +1382,7 @@ JSObject::makeDenseArraySlow(JSContext *cx)
|
||||
if (!AddLengthProperty(cx, this)) {
|
||||
setMap(oldMap);
|
||||
capacity = arrayCapacity;
|
||||
initializedLength = arrayInitialized;
|
||||
initializedLength() = arrayInitialized;
|
||||
clasp = &ArrayClass;
|
||||
return false;
|
||||
}
|
||||
@ -1403,7 +1405,7 @@ JSObject::makeDenseArraySlow(JSContext *cx)
|
||||
if (!addDataProperty(cx, id, next, JSPROP_ENUMERATE)) {
|
||||
setMap(oldMap);
|
||||
capacity = arrayCapacity;
|
||||
initializedLength = arrayInitialized;
|
||||
initializedLength() = arrayInitialized;
|
||||
clasp = &ArrayClass;
|
||||
return false;
|
||||
}
|
||||
@ -2497,7 +2499,7 @@ NewbornArrayPushImpl(JSContext *cx, JSObject *obj, const Value &v)
|
||||
if (cx->typeInferenceEnabled())
|
||||
obj->setDenseArrayInitializedLength(length + 1);
|
||||
obj->setDenseArrayLength(length + 1);
|
||||
obj->setDenseArrayElementWithType(cx, length, v);
|
||||
obj->initDenseArrayElementWithType(cx, length, v);
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -2893,16 +2895,16 @@ array_splice(JSContext *cx, uintN argc, Value *vp)
|
||||
/* Steps 12(a)-(b). */
|
||||
obj->moveDenseArrayElements(targetIndex, sourceIndex, len - sourceIndex);
|
||||
|
||||
/* Steps 12(c)-(d). */
|
||||
obj->shrinkDenseArrayElements(cx, finalLength);
|
||||
|
||||
/*
|
||||
* The array's initialized length is now out of sync with the array
|
||||
* elements: resynchronize it.
|
||||
* Update the initialized length. Do so before shrinking so that we
|
||||
* can apply the write barrier to the old slots.
|
||||
*/
|
||||
if (cx->typeInferenceEnabled())
|
||||
obj->setDenseArrayInitializedLength(finalLength);
|
||||
|
||||
/* Steps 12(c)-(d). */
|
||||
obj->shrinkDenseArrayElements(cx, finalLength);
|
||||
|
||||
/* Fix running enumerators for the deleted items. */
|
||||
if (!js_SuppressDeletedElements(cx, obj, finalLength, len))
|
||||
return false;
|
||||
@ -3019,10 +3021,12 @@ mjit::stubs::ArrayConcatTwoArrays(VMFrame &f)
|
||||
if (!result->ensureSlots(f.cx, len))
|
||||
THROW();
|
||||
|
||||
result->copyDenseArrayElements(0, obj1->getDenseArrayElements(), initlen1);
|
||||
result->copyDenseArrayElements(initlen1, obj2->getDenseArrayElements(), initlen2);
|
||||
|
||||
JS_ASSERT(!result->getDenseArrayInitializedLength());
|
||||
result->setDenseArrayInitializedLength(len);
|
||||
|
||||
result->initDenseArrayElements(0, obj1->getDenseArrayElements(), initlen1);
|
||||
result->initDenseArrayElements(initlen1, obj2->getDenseArrayElements(), initlen2);
|
||||
|
||||
result->setDenseArrayLength(len);
|
||||
}
|
||||
#endif /* JS_METHODJIT */
|
||||
@ -3920,7 +3924,7 @@ NewDenseCopiedArray(JSContext *cx, uint32 length, const Value *vp, JSObject *pro
|
||||
obj->setDenseArrayInitializedLength(vp ? length : 0);
|
||||
|
||||
if (vp)
|
||||
obj->copyDenseArrayElements(0, vp, length);
|
||||
obj->initDenseArrayElements(0, vp, length);
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
@ -55,7 +55,7 @@ inline uint32
|
||||
JSObject::getDenseArrayInitializedLength()
|
||||
{
|
||||
JS_ASSERT(isDenseArray());
|
||||
return initializedLength;
|
||||
return initializedLength();
|
||||
}
|
||||
|
||||
inline bool
|
||||
|
@ -48,7 +48,9 @@ JSObject::setDenseArrayInitializedLength(uint32 length)
|
||||
{
|
||||
JS_ASSERT(isDenseArray());
|
||||
JS_ASSERT(length <= getDenseArrayCapacity());
|
||||
initializedLength = length;
|
||||
uint32 cur = initializedLength();
|
||||
prepareSlotRangeForOverwrite(length, cur);
|
||||
initializedLength() = length;
|
||||
}
|
||||
|
||||
inline void
|
||||
@ -77,12 +79,13 @@ JSObject::ensureDenseArrayInitializedLength(JSContext *cx, uint32 index, uint32
|
||||
* for a write.
|
||||
*/
|
||||
JS_ASSERT(index + extra <= capacity);
|
||||
if (initializedLength < index) {
|
||||
if (initializedLength() < index)
|
||||
markDenseArrayNotPacked(cx);
|
||||
js::ClearValueRange(slots + initializedLength, index - initializedLength, true);
|
||||
|
||||
if (initializedLength() < index + extra) {
|
||||
js::InitValueRange(slots + initializedLength(), index + extra - initializedLength(), true);
|
||||
initializedLength() = index + extra;
|
||||
}
|
||||
if (initializedLength < index + extra)
|
||||
initializedLength = index + extra;
|
||||
}
|
||||
|
||||
inline JSObject::EnsureDenseResult
|
||||
|
@ -389,7 +389,7 @@ js_FinishCommonAtoms(JSContext *cx)
|
||||
void
|
||||
js_TraceAtomState(JSTracer *trc)
|
||||
{
|
||||
JSRuntime *rt = trc->context->runtime;
|
||||
JSRuntime *rt = trc->runtime;
|
||||
JSAtomState *state = &rt->atomState;
|
||||
|
||||
#ifdef DEBUG
|
||||
@ -399,7 +399,7 @@ js_TraceAtomState(JSTracer *trc)
|
||||
if (rt->gcKeepAtoms) {
|
||||
for (AtomSet::Range r = state->atoms.all(); !r.empty(); r.popFront()) {
|
||||
JS_SET_TRACING_INDEX(trc, "locked_atom", number++);
|
||||
MarkString(trc, r.front().asPtr());
|
||||
MarkAtom(trc, r.front().asPtr());
|
||||
}
|
||||
} else {
|
||||
for (AtomSet::Range r = state->atoms.all(); !r.empty(); r.popFront()) {
|
||||
@ -408,7 +408,7 @@ js_TraceAtomState(JSTracer *trc)
|
||||
continue;
|
||||
|
||||
JS_SET_TRACING_INDEX(trc, "interned_atom", number++);
|
||||
MarkString(trc, entry.asPtr());
|
||||
MarkAtom(trc, entry.asPtr());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -49,9 +49,15 @@
|
||||
#include "jspubtd.h"
|
||||
#include "jslock.h"
|
||||
|
||||
#include "gc/Barrier.h"
|
||||
#include "js/HashTable.h"
|
||||
#include "vm/String.h"
|
||||
|
||||
struct JSIdArray {
|
||||
jsint length;
|
||||
js::HeapId vector[1]; /* actually, length jsid words */
|
||||
};
|
||||
|
||||
/* Engine-internal extensions of jsid */
|
||||
|
||||
static JS_ALWAYS_INLINE jsid
|
||||
|
@ -66,7 +66,6 @@ enum AllocKind {
|
||||
FINALIZE_OBJECT16_BACKGROUND,
|
||||
FINALIZE_OBJECT_LAST = FINALIZE_OBJECT16_BACKGROUND,
|
||||
FINALIZE_FUNCTION,
|
||||
FINALIZE_FUNCTION_AND_OBJECT_LAST = FINALIZE_FUNCTION,
|
||||
FINALIZE_SCRIPT,
|
||||
FINALIZE_SHAPE,
|
||||
FINALIZE_TYPE_OBJECT,
|
||||
@ -75,11 +74,12 @@ enum AllocKind {
|
||||
#endif
|
||||
FINALIZE_SHORT_STRING,
|
||||
FINALIZE_STRING,
|
||||
FINALIZE_EXTERNAL_STRING,
|
||||
FINALIZE_LAST = FINALIZE_EXTERNAL_STRING
|
||||
FINALIZE_EXTERNAL_STRING
|
||||
};
|
||||
|
||||
const size_t FINALIZE_LIMIT = FINALIZE_LAST + 1;
|
||||
static const unsigned FINALIZE_LIMIT = FINALIZE_EXTERNAL_STRING + 1;
|
||||
static const unsigned FINALIZE_OBJECT_LIMIT = FINALIZE_OBJECT16_BACKGROUND + 1;
|
||||
static const unsigned FINALIZE_FUNCTION_AND_OBJECT_LIMIT = FINALIZE_FUNCTION + 1;
|
||||
|
||||
/*
|
||||
* Live objects are marked black. How many other additional colors are available
|
||||
|
@ -460,11 +460,12 @@ struct JSRuntime
|
||||
/* We access this without the GC lock, however a race will not affect correctness */
|
||||
volatile uint32 gcNumFreeArenas;
|
||||
uint32 gcNumber;
|
||||
js::GCMarker *gcMarkingTracer;
|
||||
js::GCMarker *gcIncrementalTracer;
|
||||
bool gcChunkAllocationSinceLastGC;
|
||||
int64 gcNextFullGCTime;
|
||||
int64 gcJitReleaseTime;
|
||||
JSGCMode gcMode;
|
||||
volatile jsuword gcBarrierFailed;
|
||||
volatile jsuword gcIsNeeded;
|
||||
js::WeakMapBase *gcWeakMapList;
|
||||
js::gcstats::Statistics gcStats;
|
||||
@ -2215,18 +2216,6 @@ namespace js {
|
||||
|
||||
/************************************************************************/
|
||||
|
||||
static JS_ALWAYS_INLINE void
|
||||
ClearValueRange(Value *vec, uintN len, bool useHoles)
|
||||
{
|
||||
if (useHoles) {
|
||||
for (uintN i = 0; i < len; i++)
|
||||
vec[i].setMagic(JS_ARRAY_HOLE);
|
||||
} else {
|
||||
for (uintN i = 0; i < len; i++)
|
||||
vec[i].setUndefined();
|
||||
}
|
||||
}
|
||||
|
||||
static JS_ALWAYS_INLINE void
|
||||
MakeRangeGCSafe(Value *vec, size_t len)
|
||||
{
|
||||
@ -2416,26 +2405,23 @@ class AutoShapeVector : public AutoVectorRooter<const Shape *>
|
||||
|
||||
class AutoValueArray : public AutoGCRooter
|
||||
{
|
||||
js::Value *start_;
|
||||
const js::Value *start_;
|
||||
unsigned length_;
|
||||
|
||||
public:
|
||||
AutoValueArray(JSContext *cx, js::Value *start, unsigned length
|
||||
AutoValueArray(JSContext *cx, const js::Value *start, unsigned length
|
||||
JS_GUARD_OBJECT_NOTIFIER_PARAM)
|
||||
: AutoGCRooter(cx, VALARRAY), start_(start), length_(length)
|
||||
{
|
||||
JS_GUARD_OBJECT_NOTIFIER_INIT;
|
||||
}
|
||||
|
||||
Value *start() const { return start_; }
|
||||
const Value *start() const { return start_; }
|
||||
unsigned length() const { return length_; }
|
||||
|
||||
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
|
||||
};
|
||||
|
||||
JSIdArray *
|
||||
NewIdArray(JSContext *cx, jsint length);
|
||||
|
||||
/*
|
||||
* Allocation policy that uses JSRuntime::malloc_ and friends, so that
|
||||
* memory pressure is properly accounted for. This is suitable for
|
||||
|
@ -115,7 +115,7 @@ class AutoNamespaceArray : protected AutoGCRooter {
|
||||
public:
|
||||
friend void AutoGCRooter::trace(JSTracer *trc);
|
||||
|
||||
JSXMLArray array;
|
||||
JSXMLArray<JSObject> array;
|
||||
};
|
||||
|
||||
#ifdef DEBUG
|
||||
@ -208,8 +208,8 @@ class CompartmentChecker
|
||||
void check(JSScript *script) {
|
||||
if (script) {
|
||||
check(script->compartment());
|
||||
if (!script->isCachedEval && script->u.globalObject)
|
||||
check(script->u.globalObject);
|
||||
if (!script->isCachedEval && script->globalObject)
|
||||
check(script->globalObject);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -58,6 +58,7 @@
|
||||
#include "vm/Debugger.h"
|
||||
|
||||
#include "jsgcinlines.h"
|
||||
#include "jsobjinlines.h"
|
||||
#include "jsscopeinlines.h"
|
||||
|
||||
#if ENABLE_YARR_JIT
|
||||
@ -71,6 +72,8 @@ using namespace js::gc;
|
||||
JSCompartment::JSCompartment(JSRuntime *rt)
|
||||
: rt(rt),
|
||||
principals(NULL),
|
||||
needsBarrier_(false),
|
||||
gcIncrementalTracer(NULL),
|
||||
gcBytes(0),
|
||||
gcTriggerBytes(0),
|
||||
gcLastBytes(0),
|
||||
@ -348,6 +351,16 @@ JSCompartment::wrap(JSContext *cx, JSString **strp)
|
||||
return true;
|
||||
}
|
||||
|
||||
bool
|
||||
JSCompartment::wrap(JSContext *cx, HeapPtrString *strp)
|
||||
{
|
||||
AutoValueRooter tvr(cx, StringValue(*strp));
|
||||
if (!wrap(cx, tvr.addr()))
|
||||
return false;
|
||||
*strp = tvr.value().toString();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool
|
||||
JSCompartment::wrap(JSContext *cx, JSObject **objp)
|
||||
{
|
||||
@ -420,10 +433,10 @@ JSCompartment::wrap(JSContext *cx, AutoIdVector &props)
|
||||
void
|
||||
JSCompartment::markCrossCompartmentWrappers(JSTracer *trc)
|
||||
{
|
||||
JS_ASSERT(trc->context->runtime->gcCurrentCompartment);
|
||||
JS_ASSERT(trc->runtime->gcCurrentCompartment);
|
||||
|
||||
for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront())
|
||||
MarkValue(trc, e.front().key, "cross-compartment wrapper");
|
||||
MarkRoot(trc, e.front().key, "cross-compartment wrapper");
|
||||
}
|
||||
|
||||
void
|
||||
@ -438,21 +451,21 @@ JSCompartment::markTypes(JSTracer *trc)
|
||||
|
||||
for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
|
||||
JSScript *script = i.get<JSScript>();
|
||||
MarkScript(trc, script, "mark_types_script");
|
||||
MarkRoot(trc, script, "mark_types_script");
|
||||
}
|
||||
|
||||
for (size_t thingKind = FINALIZE_OBJECT0;
|
||||
thingKind <= FINALIZE_FUNCTION_AND_OBJECT_LAST;
|
||||
thingKind < FINALIZE_FUNCTION_AND_OBJECT_LIMIT;
|
||||
thingKind++) {
|
||||
for (CellIterUnderGC i(this, AllocKind(thingKind)); !i.done(); i.next()) {
|
||||
JSObject *object = i.get<JSObject>();
|
||||
if (!object->isNewborn() && object->hasSingletonType())
|
||||
MarkObject(trc, *object, "mark_types_singleton");
|
||||
MarkRoot(trc, object, "mark_types_singleton");
|
||||
}
|
||||
}
|
||||
|
||||
for (CellIterUnderGC i(this, FINALIZE_TYPE_OBJECT); !i.done(); i.next())
|
||||
MarkTypeObject(trc, i.get<types::TypeObject>(), "mark_types_scan");
|
||||
MarkRoot(trc, i.get<types::TypeObject>(), "mark_types_scan");
|
||||
}
|
||||
|
||||
void
|
||||
@ -460,11 +473,11 @@ JSCompartment::sweep(JSContext *cx, bool releaseTypes)
|
||||
{
|
||||
/* Remove dead wrappers from the table. */
|
||||
for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
|
||||
JS_ASSERT_IF(IsAboutToBeFinalized(cx, e.front().key.toGCThing()) &&
|
||||
!IsAboutToBeFinalized(cx, e.front().value.toGCThing()),
|
||||
JS_ASSERT_IF(IsAboutToBeFinalized(cx, e.front().key) &&
|
||||
!IsAboutToBeFinalized(cx, e.front().value),
|
||||
e.front().key.isString());
|
||||
if (IsAboutToBeFinalized(cx, e.front().key.toGCThing()) ||
|
||||
IsAboutToBeFinalized(cx, e.front().value.toGCThing())) {
|
||||
if (IsAboutToBeFinalized(cx, e.front().key) ||
|
||||
IsAboutToBeFinalized(cx, e.front().value)) {
|
||||
e.removeFront();
|
||||
}
|
||||
}
|
||||
@ -584,7 +597,7 @@ JSCompartment::purge(JSContext *cx)
|
||||
JSScript *script = *listHeadp;
|
||||
JS_ASSERT(GetGCThingTraceKind(script) == JSTRACE_SCRIPT);
|
||||
*listHeadp = NULL;
|
||||
listHeadp = &script->u.evalHashLink;
|
||||
listHeadp = &script->evalHashLink();
|
||||
}
|
||||
}
|
||||
|
||||
@ -827,7 +840,7 @@ JSCompartment::markTrapClosuresIteratively(JSTracer *trc)
|
||||
// Put off marking trap state until we know the script is live.
|
||||
if (site->trapHandler && !IsAboutToBeFinalized(cx, site->script)) {
|
||||
if (site->trapClosure.isMarkable() &&
|
||||
IsAboutToBeFinalized(cx, site->trapClosure.toGCThing()))
|
||||
IsAboutToBeFinalized(cx, site->trapClosure))
|
||||
{
|
||||
markedAny = true;
|
||||
}
|
||||
@ -858,3 +871,10 @@ JSCompartment::sweepBreakpoints(JSContext *cx)
|
||||
site->clearTrap(cx, &e);
|
||||
}
|
||||
}
|
||||
|
||||
GCMarker *
|
||||
JSCompartment::createBarrierTracer()
|
||||
{
|
||||
JS_ASSERT(!gcIncrementalTracer);
|
||||
return NULL;
|
||||
}
|
||||
|
@ -397,6 +397,20 @@ struct JS_FRIEND_API(JSCompartment) {
|
||||
|
||||
js::gc::ArenaLists arenas;
|
||||
|
||||
bool needsBarrier_;
|
||||
js::GCMarker *gcIncrementalTracer;
|
||||
|
||||
bool needsBarrier() {
|
||||
return needsBarrier_;
|
||||
}
|
||||
|
||||
js::GCMarker *barrierTracer() {
|
||||
JS_ASSERT(needsBarrier_);
|
||||
if (gcIncrementalTracer)
|
||||
return gcIncrementalTracer;
|
||||
return createBarrierTracer();
|
||||
}
|
||||
|
||||
uint32 gcBytes;
|
||||
uint32 gcTriggerBytes;
|
||||
size_t gcLastBytes;
|
||||
@ -474,16 +488,19 @@ struct JS_FRIEND_API(JSCompartment) {
|
||||
jsrefcount liveDictModeNodes;
|
||||
#endif
|
||||
|
||||
typedef js::ReadBarriered<js::EmptyShape> BarrieredEmptyShape;
|
||||
typedef js::ReadBarriered<const js::Shape> BarrieredShape;
|
||||
|
||||
/*
|
||||
* Runtime-shared empty scopes for well-known built-in objects that lack
|
||||
* class prototypes (the usual locus of an emptyShape). Mnemonic: ABCDEW
|
||||
*/
|
||||
js::EmptyShape *emptyArgumentsShape;
|
||||
js::EmptyShape *emptyBlockShape;
|
||||
js::EmptyShape *emptyCallShape;
|
||||
js::EmptyShape *emptyDeclEnvShape;
|
||||
js::EmptyShape *emptyEnumeratorShape;
|
||||
js::EmptyShape *emptyWithShape;
|
||||
BarrieredEmptyShape emptyArgumentsShape;
|
||||
BarrieredEmptyShape emptyBlockShape;
|
||||
BarrieredEmptyShape emptyCallShape;
|
||||
BarrieredEmptyShape emptyDeclEnvShape;
|
||||
BarrieredEmptyShape emptyEnumeratorShape;
|
||||
BarrieredEmptyShape emptyWithShape;
|
||||
|
||||
typedef js::HashSet<js::EmptyShape *,
|
||||
js::DefaultHasher<js::EmptyShape *>,
|
||||
@ -500,8 +517,8 @@ struct JS_FRIEND_API(JSCompartment) {
|
||||
* dictionary mode). But because all the initial properties are
|
||||
* non-configurable, they will always map to fixed slots.
|
||||
*/
|
||||
const js::Shape *initialRegExpShape;
|
||||
const js::Shape *initialStringShape;
|
||||
BarrieredShape initialRegExpShape;
|
||||
BarrieredShape initialStringShape;
|
||||
|
||||
private:
|
||||
enum { DebugFromC = 1, DebugFromJS = 2 };
|
||||
@ -526,6 +543,7 @@ struct JS_FRIEND_API(JSCompartment) {
|
||||
|
||||
bool wrap(JSContext *cx, js::Value *vp);
|
||||
bool wrap(JSContext *cx, JSString **strp);
|
||||
bool wrap(JSContext *cx, js::HeapPtrString *strp);
|
||||
bool wrap(JSContext *cx, JSObject **objp);
|
||||
bool wrapId(JSContext *cx, jsid *idp);
|
||||
bool wrap(JSContext *cx, js::PropertyOp *op);
|
||||
@ -624,6 +642,8 @@ struct JS_FRIEND_API(JSCompartment) {
|
||||
private:
|
||||
void sweepBreakpoints(JSContext *cx);
|
||||
|
||||
js::GCMarker *createBarrierTracer();
|
||||
|
||||
public:
|
||||
js::WatchpointMap *watchpointMap;
|
||||
};
|
||||
|
@ -69,8 +69,10 @@
|
||||
|
||||
#include "jsinferinlines.h"
|
||||
#include "jsobjinlines.h"
|
||||
#include "jsstrinlines.h"
|
||||
|
||||
#include "vm/Stack-inl.h"
|
||||
#include "vm/String-inl.h"
|
||||
|
||||
using namespace mozilla;
|
||||
using namespace js;
|
||||
@ -113,7 +115,7 @@ Class js::ErrorClass = {
|
||||
};
|
||||
|
||||
typedef struct JSStackTraceElem {
|
||||
JSString *funName;
|
||||
js::HeapPtrString funName;
|
||||
size_t argc;
|
||||
const char *filename;
|
||||
uintN ulineno;
|
||||
@ -122,8 +124,8 @@ typedef struct JSStackTraceElem {
|
||||
typedef struct JSExnPrivate {
|
||||
/* A copy of the JSErrorReport originally generated. */
|
||||
JSErrorReport *errorReport;
|
||||
JSString *message;
|
||||
JSString *filename;
|
||||
js::HeapPtrString message;
|
||||
js::HeapPtrString filename;
|
||||
uintN lineno;
|
||||
size_t stackDepth;
|
||||
intN exnType;
|
||||
@ -328,12 +330,12 @@ InitExnPrivate(JSContext *cx, JSObject *exnObject, JSString *message,
|
||||
return false;
|
||||
JSStackTraceElem &frame = frames.back();
|
||||
if (fp->isNonEvalFunctionFrame()) {
|
||||
frame.funName = fp->fun()->atom ? fp->fun()->atom : cx->runtime->emptyString;
|
||||
frame.funName.init(fp->fun()->atom ? fp->fun()->atom : cx->runtime->emptyString);
|
||||
frame.argc = fp->numActualArgs();
|
||||
if (!fp->forEachCanonicalActualArg(AppendArg(values)))
|
||||
return false;
|
||||
} else {
|
||||
frame.funName = NULL;
|
||||
frame.funName.init(NULL);
|
||||
frame.argc = 0;
|
||||
}
|
||||
if (fp->isScriptFrame()) {
|
||||
@ -357,6 +359,9 @@ InitExnPrivate(JSContext *cx, JSObject *exnObject, JSString *message,
|
||||
if (!priv)
|
||||
return false;
|
||||
|
||||
/* Initialize to zero so that write barriers don't witness undefined values. */
|
||||
memset(priv, 0, nbytes);
|
||||
|
||||
if (report) {
|
||||
/*
|
||||
* Construct a new copy of the error report struct. We can't use the
|
||||
@ -373,8 +378,8 @@ InitExnPrivate(JSContext *cx, JSObject *exnObject, JSString *message,
|
||||
priv->errorReport = NULL;
|
||||
}
|
||||
|
||||
priv->message = message;
|
||||
priv->filename = filename;
|
||||
priv->message.init(message);
|
||||
priv->filename.init(filename);
|
||||
priv->lineno = lineno;
|
||||
priv->stackDepth = frames.length();
|
||||
priv->exnType = exnType;
|
||||
@ -422,8 +427,9 @@ exn_trace(JSTracer *trc, JSObject *obj)
|
||||
}
|
||||
vp = GetStackTraceValueBuffer(priv);
|
||||
for (i = 0; i != vcount; ++i, ++vp) {
|
||||
/* This value is read-only, so it's okay for it to be Unbarriered. */
|
||||
v = *vp;
|
||||
JS_CALL_VALUE_TRACER(trc, v, "stack trace argument");
|
||||
MarkValueUnbarriered(trc, v, "stack trace argument");
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -494,8 +500,6 @@ exn_resolve(JSContext *cx, JSObject *obj, jsid id, uintN flags,
|
||||
if (!stack)
|
||||
return false;
|
||||
|
||||
/* Allow to GC all things that were used to build stack trace. */
|
||||
priv->stackDepth = 0;
|
||||
prop = js_stack_str;
|
||||
v = STRING_TO_JSVAL(stack);
|
||||
attrs = JSPROP_ENUMERATE;
|
||||
@ -1342,11 +1346,11 @@ js_CopyErrorObject(JSContext *cx, JSObject *errobj, JSObject *scope)
|
||||
} else {
|
||||
copy->errorReport = NULL;
|
||||
}
|
||||
copy->message = priv->message;
|
||||
copy->message.init(priv->message);
|
||||
if (!cx->compartment->wrap(cx, ©->message))
|
||||
return NULL;
|
||||
JS::Anchor<JSString *> messageAnchor(copy->message);
|
||||
copy->filename = priv->filename;
|
||||
copy->filename.init(priv->filename);
|
||||
if (!cx->compartment->wrap(cx, ©->filename))
|
||||
return NULL;
|
||||
JS::Anchor<JSString *> filenameAnchor(copy->filename);
|
||||
|
@ -151,32 +151,35 @@ ArgumentsObject::create(JSContext *cx, uint32 argc, JSObject &callee)
|
||||
cx->malloc_(offsetof(ArgumentsData, slots) + argc * sizeof(Value));
|
||||
if (!data)
|
||||
return NULL;
|
||||
SetValueRangeToUndefined(data->slots, argc);
|
||||
|
||||
data->callee.init(ObjectValue(callee));
|
||||
InitValueRange(data->slots, argc, false);
|
||||
|
||||
/* Can't fail from here on, so initialize everything in argsobj. */
|
||||
obj->init(cx, callee.getFunctionPrivate()->inStrictMode()
|
||||
? &StrictArgumentsObjectClass
|
||||
: &NormalArgumentsObjectClass,
|
||||
type, proto->getParent(), NULL, false);
|
||||
obj->setMap(emptyArgumentsShape);
|
||||
obj->initMap(emptyArgumentsShape);
|
||||
|
||||
ArgumentsObject *argsobj = obj->asArguments();
|
||||
|
||||
JS_ASSERT(UINT32_MAX > (uint64(argc) << PACKED_BITS_COUNT));
|
||||
argsobj->setInitialLength(argc);
|
||||
|
||||
argsobj->setCalleeAndData(callee, data);
|
||||
argsobj->initInitialLength(argc);
|
||||
argsobj->initData(data);
|
||||
|
||||
return argsobj;
|
||||
}
|
||||
|
||||
struct STATIC_SKIP_INFERENCE PutArg
|
||||
{
|
||||
PutArg(Value *dst) : dst(dst) {}
|
||||
Value *dst;
|
||||
PutArg(JSCompartment *comp, HeapValue *dst) : dst(dst), compartment(comp) {}
|
||||
HeapValue *dst;
|
||||
JSCompartment *compartment;
|
||||
bool operator()(uintN, Value *src) {
|
||||
JS_ASSERT(dst->isMagic(JS_ARGS_HOLE) || dst->isUndefined());
|
||||
if (!dst->isMagic(JS_ARGS_HOLE))
|
||||
*dst = *src;
|
||||
dst->set(compartment, *src);
|
||||
++dst;
|
||||
return true;
|
||||
}
|
||||
@ -220,7 +223,7 @@ js_GetArgsObject(JSContext *cx, StackFrame *fp)
|
||||
* retrieve up-to-date parameter values.
|
||||
*/
|
||||
if (argsobj->isStrictArguments())
|
||||
fp->forEachCanonicalActualArg(PutArg(argsobj->data()->slots));
|
||||
fp->forEachCanonicalActualArg(PutArg(cx->compartment, argsobj->data()->slots));
|
||||
else
|
||||
argsobj->setStackFrame(fp);
|
||||
|
||||
@ -234,7 +237,8 @@ js_PutArgsObject(StackFrame *fp)
|
||||
ArgumentsObject &argsobj = fp->argsObj();
|
||||
if (argsobj.isNormalArguments()) {
|
||||
JS_ASSERT(argsobj.maybeStackFrame() == fp);
|
||||
fp->forEachCanonicalActualArg(PutArg(argsobj.data()->slots));
|
||||
JSCompartment *comp = fp->scopeChain().compartment();
|
||||
fp->forEachCanonicalActualArg(PutArg(comp, argsobj.data()->slots));
|
||||
argsobj.setStackFrame(NULL);
|
||||
} else {
|
||||
JS_ASSERT(!argsobj.maybeStackFrame());
|
||||
@ -282,10 +286,11 @@ js_PutArgumentsOnTrace(JSContext *cx, JSObject *obj, Value *argv)
|
||||
* need to worry about actual vs. formal arguments.
|
||||
*/
|
||||
Value *srcend = argv + argsobj->initialLength();
|
||||
Value *dst = argsobj->data()->slots;
|
||||
HeapValue *dst = argsobj->data()->slots;
|
||||
JSCompartment *comp = cx->compartment;
|
||||
for (Value *src = argv; src < srcend; ++src, ++dst) {
|
||||
if (!dst->isMagic(JS_ARGS_HOLE))
|
||||
*dst = *src;
|
||||
dst->set(comp, *src);
|
||||
}
|
||||
|
||||
argsobj->clearOnTrace();
|
||||
@ -601,10 +606,8 @@ MaybeMarkGenerator(JSTracer *trc, JSObject *obj)
|
||||
{
|
||||
#if JS_HAS_GENERATORS
|
||||
StackFrame *fp = (StackFrame *) obj->getPrivate();
|
||||
if (fp && fp->isFloatingGenerator()) {
|
||||
JSObject *genobj = js_FloatingFrameToGenerator(fp)->obj;
|
||||
MarkObject(trc, *genobj, "generator object");
|
||||
}
|
||||
if (fp && fp->isFloatingGenerator())
|
||||
MarkObject(trc, js_FloatingFrameToGenerator(fp)->obj, "generator object");
|
||||
#endif
|
||||
}
|
||||
|
||||
@ -618,8 +621,7 @@ args_trace(JSTracer *trc, JSObject *obj)
|
||||
}
|
||||
|
||||
ArgumentsData *data = argsobj->data();
|
||||
if (data->callee.isObject())
|
||||
MarkObject(trc, data->callee.toObject(), js_callee_str);
|
||||
MarkValue(trc, data->callee, js_callee_str);
|
||||
MarkValueRange(trc, argsobj->initialLength(), data->slots, js_arguments_str);
|
||||
|
||||
MaybeMarkGenerator(trc, argsobj);
|
||||
@ -707,7 +709,7 @@ NewDeclEnvObject(JSContext *cx, StackFrame *fp)
|
||||
if (!emptyDeclEnvShape)
|
||||
return NULL;
|
||||
envobj->init(cx, &DeclEnvClass, &emptyTypeObject, &fp->scopeChain(), fp, false);
|
||||
envobj->setMap(emptyDeclEnvShape);
|
||||
envobj->initMap(emptyDeclEnvShape);
|
||||
|
||||
return envobj;
|
||||
}
|
||||
@ -786,7 +788,7 @@ js_PutCallObject(StackFrame *fp)
|
||||
/* Get the arguments object to snapshot fp's actual argument values. */
|
||||
if (fp->hasArgsObj()) {
|
||||
if (!fp->hasOverriddenArgs())
|
||||
callobj.setArguments(ObjectValue(fp->argsObj()));
|
||||
callobj.initArguments(ObjectValue(fp->argsObj()));
|
||||
js_PutArgsObject(fp);
|
||||
}
|
||||
|
||||
@ -823,18 +825,21 @@ js_PutCallObject(StackFrame *fp)
|
||||
} else {
|
||||
/*
|
||||
* For each arg & var that is closed over, copy it from the stack
|
||||
* into the call object.
|
||||
* into the call object. We use initArg/VarUnchecked because,
|
||||
* when you call a getter on a call object, js_NativeGetInline
|
||||
* caches the return value in the slot, so we can't assert that
|
||||
* it's undefined.
|
||||
*/
|
||||
uint32 nclosed = script->nClosedArgs;
|
||||
for (uint32 i = 0; i < nclosed; i++) {
|
||||
uint32 e = script->getClosedArg(i);
|
||||
callobj.setArg(e, fp->formalArg(e));
|
||||
callobj.initArgUnchecked(e, fp->formalArg(e));
|
||||
}
|
||||
|
||||
nclosed = script->nClosedVars;
|
||||
for (uint32 i = 0; i < nclosed; i++) {
|
||||
uint32 e = script->getClosedVar(i);
|
||||
callobj.setVar(e, fp->slots()[e]);
|
||||
callobj.initVarUnchecked(e, fp->slots()[e]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1592,19 +1597,24 @@ fun_trace(JSTracer *trc, JSObject *obj)
|
||||
return;
|
||||
|
||||
if (fun != obj) {
|
||||
/* obj is a cloned function object, trace the clone-parent, fun. */
|
||||
MarkObject(trc, *fun, "private");
|
||||
/*
|
||||
* obj is a cloned function object, trace the clone-parent, fun.
|
||||
* This is safe to leave Unbarriered for incremental GC because any
|
||||
* change to fun will trigger a setPrivate barrer. But we'll need to
|
||||
* fix this for generational GC.
|
||||
*/
|
||||
MarkObjectUnbarriered(trc, fun, "private");
|
||||
|
||||
/* The function could be a flat closure with upvar copies in the clone. */
|
||||
if (fun->isFlatClosure() && fun->script()->bindings.hasUpvars()) {
|
||||
MarkValueRange(trc, fun->script()->bindings.countUpvars(),
|
||||
obj->getFlatClosureUpvars(), "upvars");
|
||||
obj->getFlatClosureData()->upvars, "upvars");
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (fun->atom)
|
||||
MarkString(trc, fun->atom, "atom");
|
||||
MarkAtom(trc, fun->atom, "atom");
|
||||
|
||||
if (fun->isInterpreted() && fun->script())
|
||||
MarkScript(trc, fun->script(), "script");
|
||||
@ -1849,7 +1859,7 @@ JSObject::initBoundFunction(JSContext *cx, const Value &thisArg,
|
||||
return false;
|
||||
|
||||
JS_ASSERT(numSlots() >= argslen + FUN_CLASS_RESERVED_SLOTS);
|
||||
copySlotRange(FUN_CLASS_RESERVED_SLOTS, args, argslen);
|
||||
copySlotRange(FUN_CLASS_RESERVED_SLOTS, args, argslen, false);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@ -2278,7 +2288,7 @@ js_NewFunction(JSContext *cx, JSObject *funobj, Native native, uintN nargs,
|
||||
JS_ASSERT(!native);
|
||||
JS_ASSERT(nargs == 0);
|
||||
fun->u.i.skipmin = 0;
|
||||
fun->u.i.script_ = NULL;
|
||||
fun->script().init(NULL);
|
||||
} else {
|
||||
fun->u.n.clasp = NULL;
|
||||
if (flags & JSFUN_TRCINFO) {
|
||||
@ -2328,7 +2338,7 @@ js_CloneFunctionObject(JSContext *cx, JSFunction *fun, JSObject *parent,
|
||||
* definitions or read barriers, so will not get here.
|
||||
*/
|
||||
if (fun->getProto() == proto && !fun->hasSingletonType())
|
||||
clone->setType(fun->type());
|
||||
clone->initType(fun->type());
|
||||
|
||||
clone->setPrivate(fun);
|
||||
} else {
|
||||
@ -2352,11 +2362,11 @@ js_CloneFunctionObject(JSContext *cx, JSFunction *fun, JSObject *parent,
|
||||
JS_ASSERT(script->compartment() == fun->compartment());
|
||||
JS_ASSERT(script->compartment() != cx->compartment);
|
||||
|
||||
cfun->u.i.script_ = NULL;
|
||||
cfun->script().init(NULL);
|
||||
JSScript *cscript = js_CloneScript(cx, script);
|
||||
if (!cscript)
|
||||
return NULL;
|
||||
cscript->u.globalObject = cfun->getGlobal();
|
||||
cscript->globalObject = cfun->getGlobal();
|
||||
cfun->setScript(cscript);
|
||||
if (!cscript->typeSetFunction(cx, cfun))
|
||||
return NULL;
|
||||
@ -2395,11 +2405,11 @@ js_AllocFlatClosure(JSContext *cx, JSFunction *fun, JSObject *scopeChain)
|
||||
if (nslots == 0)
|
||||
return closure;
|
||||
|
||||
Value *upvars = (Value *) cx->malloc_(nslots * sizeof(Value));
|
||||
if (!upvars)
|
||||
FlatClosureData *data = (FlatClosureData *) cx->malloc_(nslots * sizeof(HeapValue));
|
||||
if (!data)
|
||||
return NULL;
|
||||
|
||||
closure->setFlatClosureUpvars(upvars);
|
||||
closure->setFlatClosureData(data);
|
||||
return closure;
|
||||
}
|
||||
|
||||
@ -2425,12 +2435,12 @@ js_NewFlatClosure(JSContext *cx, JSFunction *fun, JSOp op, size_t oplen)
|
||||
if (!closure || !fun->script()->bindings.hasUpvars())
|
||||
return closure;
|
||||
|
||||
Value *upvars = closure->getFlatClosureUpvars();
|
||||
FlatClosureData *data = closure->getFlatClosureData();
|
||||
uintN level = fun->script()->staticLevel;
|
||||
JSUpvarArray *uva = fun->script()->upvars();
|
||||
|
||||
for (uint32 i = 0, n = uva->length; i < n; i++)
|
||||
upvars[i] = GetUpvar(cx, level, uva->vector[i]);
|
||||
data->upvars[i].init(GetUpvar(cx, level, uva->vector[i]));
|
||||
|
||||
return closure;
|
||||
}
|
||||
|
@ -50,6 +50,8 @@
|
||||
#include "jsstr.h"
|
||||
#include "jsopcode.h"
|
||||
|
||||
#include "gc/Barrier.h"
|
||||
|
||||
/*
|
||||
* The high two bits of JSFunction.flags encode whether the function is native
|
||||
* or interpreted, and if interpreted, what kind of optimized closure form (if
|
||||
@ -195,18 +197,16 @@ struct JSFunction : public JSObject_Slots2
|
||||
|
||||
inline void setMethodAtom(JSAtom *atom);
|
||||
|
||||
JSScript *script() const {
|
||||
js::HeapPtrScript &script() const {
|
||||
JS_ASSERT(isInterpreted());
|
||||
return u.i.script_;
|
||||
return *(js::HeapPtrScript *)&u.i.script_;
|
||||
}
|
||||
|
||||
void setScript(JSScript *script) {
|
||||
JS_ASSERT(isInterpreted());
|
||||
u.i.script_ = script;
|
||||
}
|
||||
inline void setScript(JSScript *script_);
|
||||
inline void initScript(JSScript *script_);
|
||||
|
||||
JSScript *maybeScript() const {
|
||||
return isInterpreted() ? script() : NULL;
|
||||
return isInterpreted() ? script().get() : NULL;
|
||||
}
|
||||
|
||||
JSNative native() const {
|
||||
@ -269,6 +269,10 @@ JSObject::getFunctionPrivate() const
|
||||
|
||||
namespace js {
|
||||
|
||||
struct FlatClosureData {
|
||||
HeapValue upvars[1];
|
||||
};
|
||||
|
||||
static JS_ALWAYS_INLINE bool
|
||||
IsFunctionObject(const js::Value &v)
|
||||
{
|
||||
|
@ -97,4 +97,18 @@ CloneFunctionObject(JSContext *cx, JSFunction *fun, JSObject *parent,
|
||||
return js_CloneFunctionObject(cx, fun, parent, proto);
|
||||
}
|
||||
|
||||
inline void
|
||||
JSFunction::setScript(JSScript *script_)
|
||||
{
|
||||
JS_ASSERT(isInterpreted());
|
||||
script() = script_;
|
||||
}
|
||||
|
||||
inline void
|
||||
JSFunction::initScript(JSScript *script_)
|
||||
{
|
||||
JS_ASSERT(isInterpreted());
|
||||
script().init(script_);
|
||||
}
|
||||
|
||||
#endif /* jsfuninlines_h___ */
|
||||
|
100
js/src/jsgc.cpp
100
js/src/jsgc.cpp
@ -788,7 +788,7 @@ PickChunk(JSCompartment *comp)
|
||||
}
|
||||
|
||||
JS_FRIEND_API(bool)
|
||||
IsAboutToBeFinalized(JSContext *cx, const void *thing)
|
||||
IsAboutToBeFinalized(JSContext *cx, const Cell *thing)
|
||||
{
|
||||
JS_ASSERT(cx);
|
||||
|
||||
@ -801,6 +801,13 @@ IsAboutToBeFinalized(JSContext *cx, const void *thing)
|
||||
return !reinterpret_cast<const Cell *>(thing)->isMarked();
|
||||
}
|
||||
|
||||
bool
|
||||
IsAboutToBeFinalized(JSContext *cx, const Value &v)
|
||||
{
|
||||
JS_ASSERT(v.isMarkable());
|
||||
return IsAboutToBeFinalized(cx, (Cell *)v.toGCThing());
|
||||
}
|
||||
|
||||
JS_FRIEND_API(bool)
|
||||
js_GCThingIsMarked(void *thing, uintN color = BLACK)
|
||||
{
|
||||
@ -918,7 +925,7 @@ MarkIfGCThingWord(JSTracer *trc, jsuword w)
|
||||
|
||||
Chunk *chunk = Chunk::fromAddress(addr);
|
||||
|
||||
if (!trc->context->runtime->gcChunkSet.has(chunk))
|
||||
if (!trc->runtime->gcChunkSet.has(chunk))
|
||||
return CGCT_NOTCHUNK;
|
||||
|
||||
/*
|
||||
@ -939,7 +946,7 @@ MarkIfGCThingWord(JSTracer *trc, jsuword w)
|
||||
if (!aheader->allocated())
|
||||
return CGCT_FREEARENA;
|
||||
|
||||
JSCompartment *curComp = trc->context->runtime->gcCurrentCompartment;
|
||||
JSCompartment *curComp = trc->runtime->gcCurrentCompartment;
|
||||
if (curComp && curComp != aheader->compartment)
|
||||
return CGCT_OTHERCOMPARTMENT;
|
||||
|
||||
@ -1045,7 +1052,7 @@ MarkStackRangeConservatively(JSTracer *trc, Value *beginv, Value *endv)
|
||||
rt->gcCheckCompartment = NULL;
|
||||
}
|
||||
~AutoSkipChecking() { runtime->gcCheckCompartment = savedCompartment; }
|
||||
} as(trc->context->runtime);
|
||||
} as(trc->runtime);
|
||||
|
||||
const jsuword *begin = beginv->payloadWord();
|
||||
const jsuword *end = endv->payloadWord();
|
||||
@ -1066,7 +1073,7 @@ void
|
||||
MarkConservativeStackRoots(JSTracer *trc)
|
||||
{
|
||||
#ifdef JS_THREADSAFE
|
||||
for (JSThread::Map::Range r = trc->context->runtime->threads.all(); !r.empty(); r.popFront()) {
|
||||
for (JSThread::Map::Range r = trc->runtime->threads.all(); !r.empty(); r.popFront()) {
|
||||
JSThread *thread = r.front().value;
|
||||
ConservativeGCThreadData *ctd = &thread->data.conservativeGC;
|
||||
if (ctd->hasStackToScan()) {
|
||||
@ -1078,7 +1085,7 @@ MarkConservativeStackRoots(JSTracer *trc)
|
||||
}
|
||||
}
|
||||
#else
|
||||
MarkThreadDataConservatively(trc, &trc->context->runtime->threadData);
|
||||
MarkThreadDataConservatively(trc, &trc->runtime->threadData);
|
||||
#endif
|
||||
}
|
||||
|
||||
@ -1822,7 +1829,7 @@ gc_root_traversal(JSTracer *trc, const RootEntry &entry)
|
||||
ptr = vp->isGCThing() ? vp->toGCThing() : NULL;
|
||||
}
|
||||
|
||||
if (ptr && !trc->context->runtime->gcCurrentCompartment) {
|
||||
if (ptr && !trc->runtime->gcCurrentCompartment) {
|
||||
/*
|
||||
* Use conservative machinery to find if ptr is a valid GC thing.
|
||||
* We only do this during global GCs, to preserve the invariant
|
||||
@ -1841,44 +1848,45 @@ gc_root_traversal(JSTracer *trc, const RootEntry &entry)
|
||||
JS_ASSERT(test == CGCT_VALID);
|
||||
}
|
||||
#endif
|
||||
JS_SET_TRACING_NAME(trc, entry.value.name ? entry.value.name : "root");
|
||||
const char *name = entry.value.name ? entry.value.name : "root";
|
||||
if (entry.value.type == JS_GC_ROOT_GCTHING_PTR)
|
||||
MarkGCThing(trc, *reinterpret_cast<void **>(entry.key));
|
||||
MarkRootGCThing(trc, *reinterpret_cast<void **>(entry.key), name);
|
||||
else
|
||||
MarkValueRaw(trc, *reinterpret_cast<Value *>(entry.key));
|
||||
MarkRoot(trc, *reinterpret_cast<Value *>(entry.key), name);
|
||||
}
|
||||
|
||||
static void
|
||||
gc_lock_traversal(const GCLocks::Entry &entry, JSTracer *trc)
|
||||
{
|
||||
JS_ASSERT(entry.value >= 1);
|
||||
MarkGCThing(trc, entry.key, "locked object");
|
||||
MarkRootGCThing(trc, entry.key, "locked object");
|
||||
}
|
||||
|
||||
void
|
||||
js_TraceStackFrame(JSTracer *trc, StackFrame *fp)
|
||||
{
|
||||
MarkObject(trc, fp->scopeChain(), "scope chain");
|
||||
MarkRoot(trc, &fp->scopeChain(), "scope chain");
|
||||
if (fp->isDummyFrame())
|
||||
return;
|
||||
if (fp->hasArgsObj())
|
||||
MarkObject(trc, fp->argsObj(), "arguments");
|
||||
MarkScript(trc, fp->script(), "script");
|
||||
MarkRoot(trc, &fp->argsObj(), "arguments");
|
||||
MarkRoot(trc, fp->script(), "script");
|
||||
fp->script()->compartment()->active = true;
|
||||
MarkValue(trc, fp->returnValue(), "rval");
|
||||
MarkRoot(trc, fp->returnValue(), "rval");
|
||||
}
|
||||
|
||||
void
|
||||
AutoIdArray::trace(JSTracer *trc)
|
||||
{
|
||||
JS_ASSERT(tag == IDARRAY);
|
||||
gc::MarkIdRange(trc, idArray->length, idArray->vector, "JSAutoIdArray.idArray");
|
||||
gc::MarkIdRange(trc, idArray->vector, idArray->vector + idArray->length,
|
||||
"JSAutoIdArray.idArray");
|
||||
}
|
||||
|
||||
void
|
||||
AutoEnumStateRooter::trace(JSTracer *trc)
|
||||
{
|
||||
gc::MarkObject(trc, *obj, "js::AutoEnumStateRooter.obj");
|
||||
gc::MarkRoot(trc, obj, "js::AutoEnumStateRooter.obj");
|
||||
}
|
||||
|
||||
inline void
|
||||
@ -1886,7 +1894,7 @@ AutoGCRooter::trace(JSTracer *trc)
|
||||
{
|
||||
switch (tag) {
|
||||
case JSVAL:
|
||||
MarkValue(trc, static_cast<AutoValueRooter *>(this)->val, "js::AutoValueRooter.val");
|
||||
MarkRoot(trc, static_cast<AutoValueRooter *>(this)->val, "js::AutoValueRooter.val");
|
||||
return;
|
||||
|
||||
case PARSER:
|
||||
@ -1899,7 +1907,7 @@ AutoGCRooter::trace(JSTracer *trc)
|
||||
|
||||
case IDARRAY: {
|
||||
JSIdArray *ida = static_cast<AutoIdArray *>(this)->idArray;
|
||||
MarkIdRange(trc, ida->length, ida->vector, "js::AutoIdArray.idArray");
|
||||
MarkIdRange(trc, ida->vector, ida->vector + ida->length, "js::AutoIdArray.idArray");
|
||||
return;
|
||||
}
|
||||
|
||||
@ -1908,10 +1916,10 @@ AutoGCRooter::trace(JSTracer *trc)
|
||||
static_cast<AutoPropDescArrayRooter *>(this)->descriptors;
|
||||
for (size_t i = 0, len = descriptors.length(); i < len; i++) {
|
||||
PropDesc &desc = descriptors[i];
|
||||
MarkValue(trc, desc.pd, "PropDesc::pd");
|
||||
MarkValue(trc, desc.value, "PropDesc::value");
|
||||
MarkValue(trc, desc.get, "PropDesc::get");
|
||||
MarkValue(trc, desc.set, "PropDesc::set");
|
||||
MarkRoot(trc, desc.pd, "PropDesc::pd");
|
||||
MarkRoot(trc, desc.value, "PropDesc::value");
|
||||
MarkRoot(trc, desc.get, "PropDesc::get");
|
||||
MarkRoot(trc, desc.set, "PropDesc::set");
|
||||
}
|
||||
return;
|
||||
}
|
||||
@ -1919,19 +1927,18 @@ AutoGCRooter::trace(JSTracer *trc)
|
||||
case DESCRIPTOR : {
|
||||
PropertyDescriptor &desc = *static_cast<AutoPropertyDescriptorRooter *>(this);
|
||||
if (desc.obj)
|
||||
MarkObject(trc, *desc.obj, "Descriptor::obj");
|
||||
MarkValue(trc, desc.value, "Descriptor::value");
|
||||
MarkRoot(trc, desc.obj, "Descriptor::obj");
|
||||
MarkRoot(trc, desc.value, "Descriptor::value");
|
||||
if ((desc.attrs & JSPROP_GETTER) && desc.getter)
|
||||
MarkObject(trc, *CastAsObject(desc.getter), "Descriptor::get");
|
||||
MarkRoot(trc, CastAsObject(desc.getter), "Descriptor::get");
|
||||
if (desc.attrs & JSPROP_SETTER && desc.setter)
|
||||
MarkObject(trc, *CastAsObject(desc.setter), "Descriptor::set");
|
||||
MarkRoot(trc, CastAsObject(desc.setter), "Descriptor::set");
|
||||
return;
|
||||
}
|
||||
|
||||
case NAMESPACES: {
|
||||
JSXMLArray &array = static_cast<AutoNamespaceArray *>(this)->array;
|
||||
MarkObjectRange(trc, array.length, reinterpret_cast<JSObject **>(array.vector),
|
||||
"JSXMLArray.vector");
|
||||
JSXMLArray<JSObject> &array = static_cast<AutoNamespaceArray *>(this)->array;
|
||||
MarkObjectRange(trc, array.length, array.vector, "JSXMLArray.vector");
|
||||
array.cursors->trace(trc);
|
||||
return;
|
||||
}
|
||||
@ -1942,51 +1949,52 @@ AutoGCRooter::trace(JSTracer *trc)
|
||||
|
||||
case OBJECT:
|
||||
if (JSObject *obj = static_cast<AutoObjectRooter *>(this)->obj)
|
||||
MarkObject(trc, *obj, "js::AutoObjectRooter.obj");
|
||||
MarkRoot(trc, obj, "js::AutoObjectRooter.obj");
|
||||
return;
|
||||
|
||||
case ID:
|
||||
MarkId(trc, static_cast<AutoIdRooter *>(this)->id_, "js::AutoIdRooter.val");
|
||||
MarkRoot(trc, static_cast<AutoIdRooter *>(this)->id_, "js::AutoIdRooter.val");
|
||||
return;
|
||||
|
||||
case VALVECTOR: {
|
||||
AutoValueVector::VectorImpl &vector = static_cast<AutoValueVector *>(this)->vector;
|
||||
MarkValueRange(trc, vector.length(), vector.begin(), "js::AutoValueVector.vector");
|
||||
MarkRootRange(trc, vector.length(), vector.begin(), "js::AutoValueVector.vector");
|
||||
return;
|
||||
}
|
||||
|
||||
case STRING:
|
||||
if (JSString *str = static_cast<AutoStringRooter *>(this)->str)
|
||||
MarkString(trc, str, "js::AutoStringRooter.str");
|
||||
MarkRoot(trc, str, "js::AutoStringRooter.str");
|
||||
return;
|
||||
|
||||
case IDVECTOR: {
|
||||
AutoIdVector::VectorImpl &vector = static_cast<AutoIdVector *>(this)->vector;
|
||||
MarkIdRange(trc, vector.length(), vector.begin(), "js::AutoIdVector.vector");
|
||||
MarkRootRange(trc, vector.length(), vector.begin(), "js::AutoIdVector.vector");
|
||||
return;
|
||||
}
|
||||
|
||||
case SHAPEVECTOR: {
|
||||
AutoShapeVector::VectorImpl &vector = static_cast<js::AutoShapeVector *>(this)->vector;
|
||||
MarkShapeRange(trc, vector.length(), vector.begin(), "js::AutoShapeVector.vector");
|
||||
MarkRootRange(trc, vector.length(), vector.begin(), "js::AutoShapeVector.vector");
|
||||
return;
|
||||
}
|
||||
|
||||
case OBJVECTOR: {
|
||||
AutoObjectVector::VectorImpl &vector = static_cast<AutoObjectVector *>(this)->vector;
|
||||
MarkObjectRange(trc, vector.length(), vector.begin(), "js::AutoObjectVector.vector");
|
||||
MarkRootRange(trc, vector.length(), vector.begin(), "js::AutoObjectVector.vector");
|
||||
return;
|
||||
}
|
||||
|
||||
case VALARRAY: {
|
||||
AutoValueArray *array = static_cast<AutoValueArray *>(this);
|
||||
MarkValueRange(trc, array->length(), array->start(), "js::AutoValueArray");
|
||||
MarkRootRange(trc, array->length(), array->start(), "js::AutoValueArray");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
JS_ASSERT(tag >= 0);
|
||||
MarkValueRange(trc, tag, static_cast<AutoArrayRooter *>(this)->array, "js::AutoArrayRooter.array");
|
||||
MarkRootRange(trc, tag, static_cast<AutoArrayRooter *>(this)->array,
|
||||
"js::AutoArrayRooter.array");
|
||||
}
|
||||
|
||||
namespace js {
|
||||
@ -1998,9 +2006,9 @@ MarkContext(JSTracer *trc, JSContext *acx)
|
||||
|
||||
/* Mark other roots-by-definition in acx. */
|
||||
if (acx->globalObject && !acx->hasRunOption(JSOPTION_UNROOTED_GLOBAL))
|
||||
MarkObject(trc, *acx->globalObject, "global object");
|
||||
MarkRoot(trc, acx->globalObject, "global object");
|
||||
if (acx->isExceptionPending())
|
||||
MarkValue(trc, acx->getPendingException(), "exception");
|
||||
MarkRoot(trc, acx->getPendingException(), "exception");
|
||||
|
||||
for (js::AutoGCRooter *gcr = acx->autoGCRooters; gcr; gcr = gcr->down)
|
||||
gcr->trace(trc);
|
||||
@ -2008,13 +2016,13 @@ MarkContext(JSTracer *trc, JSContext *acx)
|
||||
if (acx->sharpObjectMap.depth > 0)
|
||||
js_TraceSharpMap(trc, &acx->sharpObjectMap);
|
||||
|
||||
MarkValue(trc, acx->iterValue, "iterValue");
|
||||
MarkRoot(trc, acx->iterValue, "iterValue");
|
||||
}
|
||||
|
||||
JS_REQUIRES_STACK void
|
||||
MarkRuntime(JSTracer *trc)
|
||||
{
|
||||
JSRuntime *rt = trc->context->runtime;
|
||||
JSRuntime *rt = trc->runtime;
|
||||
|
||||
if (rt->state != JSRTS_LANDING)
|
||||
MarkConservativeStackRoots(trc);
|
||||
@ -2516,7 +2524,7 @@ EndMarkPhase(JSContext *cx, GCMarker *gcmarker, JSGCInvocationKind gckind)
|
||||
gcmarker->drainMarkStack();
|
||||
}
|
||||
|
||||
rt->gcMarkingTracer = NULL;
|
||||
rt->gcIncrementalTracer = NULL;
|
||||
|
||||
rt->gcStats.endPhase(gcstats::PHASE_MARK);
|
||||
|
||||
@ -2667,7 +2675,7 @@ MarkAndSweep(JSContext *cx, JSGCInvocationKind gckind)
|
||||
GCMarker gcmarker(cx);
|
||||
JS_ASSERT(IS_GC_MARKING_TRACER(&gcmarker));
|
||||
JS_ASSERT(gcmarker.getMarkColor() == BLACK);
|
||||
rt->gcMarkingTracer = &gcmarker;
|
||||
rt->gcIncrementalTracer = &gcmarker;
|
||||
|
||||
BeginMarkPhase(cx, &gcmarker, gckind);
|
||||
gcmarker.drainMarkStack();
|
||||
@ -3041,7 +3049,7 @@ TraceRuntime(JSTracer *trc)
|
||||
}
|
||||
}
|
||||
#else
|
||||
AutoCopyFreeListToArenas copy(trc->context->runtime);
|
||||
AutoCopyFreeListToArenas copy(trc->runtime);
|
||||
RecordNativeStackTopForGC(trc->context);
|
||||
#endif
|
||||
|
||||
|
@ -88,7 +88,7 @@ struct Arena;
|
||||
* This must be an upper bound, but we do not need the least upper bound, so
|
||||
* we just exclude non-background objects.
|
||||
*/
|
||||
const size_t MAX_BACKGROUND_FINALIZE_KINDS = FINALIZE_LIMIT - (FINALIZE_OBJECT_LAST + 1) / 2;
|
||||
const size_t MAX_BACKGROUND_FINALIZE_KINDS = FINALIZE_LIMIT - FINALIZE_OBJECT_LIMIT / 2;
|
||||
|
||||
const size_t ArenaShift = 12;
|
||||
const size_t ArenaSize = size_t(1) << ArenaShift;
|
||||
@ -1327,7 +1327,10 @@ extern void
|
||||
js_UnlockGCThingRT(JSRuntime *rt, void *thing);
|
||||
|
||||
extern JS_FRIEND_API(bool)
|
||||
IsAboutToBeFinalized(JSContext *cx, const void *thing);
|
||||
IsAboutToBeFinalized(JSContext *cx, const js::gc::Cell *thing);
|
||||
|
||||
extern bool
|
||||
IsAboutToBeFinalized(JSContext *cx, const js::Value &value);
|
||||
|
||||
extern JS_FRIEND_API(bool)
|
||||
js_GCThingIsMarked(void *thing, uintN color);
|
||||
|
@ -96,7 +96,7 @@ GetGCObjectFixedSlotsKind(size_t numFixedSlots)
|
||||
static inline bool
|
||||
IsBackgroundAllocKind(AllocKind kind)
|
||||
{
|
||||
JS_ASSERT(kind <= FINALIZE_OBJECT_LAST);
|
||||
JS_ASSERT(kind < FINALIZE_OBJECT_LIMIT);
|
||||
return kind % 2 == 1;
|
||||
}
|
||||
|
||||
@ -115,7 +115,7 @@ static inline bool
|
||||
TryIncrementAllocKind(AllocKind *kindp)
|
||||
{
|
||||
size_t next = size_t(*kindp) + 2;
|
||||
if (next > size_t(FINALIZE_OBJECT_LAST))
|
||||
if (next >= size_t(FINALIZE_OBJECT_LIMIT))
|
||||
return false;
|
||||
*kindp = AllocKind(next);
|
||||
return true;
|
||||
@ -351,14 +351,17 @@ NewGCThing(JSContext *cx, js::gc::AllocKind kind, size_t thingSize)
|
||||
js::gc::RunDebugGC(cx);
|
||||
#endif
|
||||
|
||||
void *t = cx->compartment->arenas.allocateFromFreeList(kind, thingSize);
|
||||
return static_cast<T *>(t ? t : js::gc::ArenaLists::refillFreeList(cx, kind));
|
||||
JSCompartment *comp = cx->compartment;
|
||||
void *t = comp->arenas.allocateFromFreeList(kind, thingSize);
|
||||
if (!t)
|
||||
t = js::gc::ArenaLists::refillFreeList(cx, kind);
|
||||
return static_cast<T *>(t);
|
||||
}
|
||||
|
||||
inline JSObject *
|
||||
js_NewGCObject(JSContext *cx, js::gc::AllocKind kind)
|
||||
{
|
||||
JS_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind <= js::gc::FINALIZE_OBJECT_LAST);
|
||||
JS_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind < js::gc::FINALIZE_OBJECT_LIMIT);
|
||||
JSObject *obj = NewGCThing<JSObject>(cx, kind, js::gc::Arena::thingSize(kind));
|
||||
if (obj)
|
||||
obj->earlyInit(js::gc::GetGCKindSlots(kind));
|
||||
@ -388,10 +391,8 @@ inline JSFunction*
|
||||
js_NewGCFunction(JSContext *cx)
|
||||
{
|
||||
JSFunction *fun = NewGCThing<JSFunction>(cx, js::gc::FINALIZE_FUNCTION, sizeof(JSFunction));
|
||||
if (fun) {
|
||||
fun->capacity = JSObject::FUN_CLASS_RESERVED_SLOTS;
|
||||
fun->lastProp = NULL; /* Stops fun from being scanned until initializated. */
|
||||
}
|
||||
if (fun)
|
||||
fun->earlyInit(JSObject::FUN_CLASS_RESERVED_SLOTS);
|
||||
return fun;
|
||||
}
|
||||
|
||||
|
@ -110,12 +110,12 @@ CheckMarkedThing(JSTracer *trc, T *thing)
|
||||
{
|
||||
JS_ASSERT(thing);
|
||||
JS_ASSERT(trc->debugPrinter || trc->debugPrintArg);
|
||||
JS_ASSERT_IF(trc->context->runtime->gcCurrentCompartment, IS_GC_MARKING_TRACER(trc));
|
||||
JS_ASSERT_IF(trc->runtime->gcCurrentCompartment, IS_GC_MARKING_TRACER(trc));
|
||||
|
||||
JS_ASSERT(thing->isAligned());
|
||||
|
||||
JS_ASSERT(thing->compartment());
|
||||
JS_ASSERT(thing->compartment()->rt == trc->context->runtime);
|
||||
JS_ASSERT(thing->compartment()->rt == trc->runtime);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
@ -124,7 +124,7 @@ Mark(JSTracer *trc, T *thing)
|
||||
{
|
||||
CheckMarkedThing(trc, thing);
|
||||
|
||||
JSRuntime *rt = trc->context->runtime;
|
||||
JSRuntime *rt = trc->runtime;
|
||||
|
||||
JS_OPT_ASSERT_IF(rt->gcCheckCompartment,
|
||||
thing->compartment() == rt->gcCheckCompartment ||
|
||||
@ -148,51 +148,60 @@ Mark(JSTracer *trc, T *thing)
|
||||
}
|
||||
|
||||
void
|
||||
MarkString(JSTracer *trc, JSString *str)
|
||||
MarkStringUnbarriered(JSTracer *trc, JSString *str, const char *name)
|
||||
{
|
||||
JS_ASSERT(str);
|
||||
JS_SET_TRACING_NAME(trc, name);
|
||||
Mark(trc, str);
|
||||
}
|
||||
|
||||
void
|
||||
MarkString(JSTracer *trc, JSString *str, const char *name)
|
||||
MarkString(JSTracer *trc, const MarkablePtr<JSString> &str, const char *name)
|
||||
{
|
||||
JS_ASSERT(str);
|
||||
JS_SET_TRACING_NAME(trc, name);
|
||||
MarkString(trc, str);
|
||||
MarkStringUnbarriered(trc, str.value, name);
|
||||
}
|
||||
|
||||
void
|
||||
MarkObject(JSTracer *trc, JSObject &obj, const char *name)
|
||||
MarkAtom(JSTracer *trc, JSAtom *atom)
|
||||
{
|
||||
JS_ASSERT(trc);
|
||||
JS_ASSERT(&obj);
|
||||
JS_SET_TRACING_NAME(trc, name);
|
||||
Mark(trc, &obj);
|
||||
JS_ASSERT(atom);
|
||||
Mark(trc, atom);
|
||||
}
|
||||
|
||||
void
|
||||
MarkCrossCompartmentObject(JSTracer *trc, JSObject &obj, const char *name)
|
||||
MarkAtom(JSTracer *trc, JSAtom *atom, const char *name)
|
||||
{
|
||||
JSRuntime *rt = trc->context->runtime;
|
||||
if (rt->gcCurrentCompartment && rt->gcCurrentCompartment != obj.compartment())
|
||||
return;
|
||||
|
||||
MarkObject(trc, obj, name);
|
||||
MarkStringUnbarriered(trc, atom, name);
|
||||
}
|
||||
|
||||
void
|
||||
MarkObjectWithPrinter(JSTracer *trc, JSObject &obj, JSTraceNamePrinter printer,
|
||||
const void *arg, size_t index)
|
||||
MarkObjectUnbarriered(JSTracer *trc, JSObject *obj, const char *name)
|
||||
{
|
||||
JS_ASSERT(trc);
|
||||
JS_ASSERT(&obj);
|
||||
JS_ASSERT(obj);
|
||||
JS_SET_TRACING_NAME(trc, name);
|
||||
Mark(trc, obj);
|
||||
}
|
||||
|
||||
void
|
||||
MarkObjectWithPrinterUnbarriered(JSTracer *trc, JSObject *obj, JSTraceNamePrinter printer,
|
||||
const void *arg, size_t index)
|
||||
{
|
||||
JS_ASSERT(trc);
|
||||
JS_ASSERT(obj);
|
||||
JS_SET_TRACING_DETAILS(trc, printer, arg, index);
|
||||
Mark(trc, &obj);
|
||||
Mark(trc, obj);
|
||||
}
|
||||
|
||||
void
|
||||
MarkScript(JSTracer *trc, JSScript *script, const char *name)
|
||||
MarkObject(JSTracer *trc, const MarkablePtr<JSObject> &obj, const char *name)
|
||||
{
|
||||
MarkObjectUnbarriered(trc, obj.value, name);
|
||||
}
|
||||
|
||||
void
|
||||
MarkScriptUnbarriered(JSTracer *trc, JSScript *script, const char *name)
|
||||
{
|
||||
JS_ASSERT(trc);
|
||||
JS_ASSERT(script);
|
||||
@ -201,7 +210,13 @@ MarkScript(JSTracer *trc, JSScript *script, const char *name)
|
||||
}
|
||||
|
||||
void
|
||||
MarkShape(JSTracer *trc, const Shape *shape, const char *name)
|
||||
MarkScript(JSTracer *trc, const MarkablePtr<JSScript> &script, const char *name)
|
||||
{
|
||||
MarkScriptUnbarriered(trc, script.value, name);
|
||||
}
|
||||
|
||||
void
|
||||
MarkShapeUnbarriered(JSTracer *trc, const Shape *shape, const char *name)
|
||||
{
|
||||
JS_ASSERT(trc);
|
||||
JS_ASSERT(shape);
|
||||
@ -210,7 +225,13 @@ MarkShape(JSTracer *trc, const Shape *shape, const char *name)
|
||||
}
|
||||
|
||||
void
|
||||
MarkTypeObject(JSTracer *trc, types::TypeObject *type, const char *name)
|
||||
MarkShape(JSTracer *trc, const MarkablePtr<const Shape> &shape, const char *name)
|
||||
{
|
||||
MarkShapeUnbarriered(trc, shape.value, name);
|
||||
}
|
||||
|
||||
void
|
||||
MarkTypeObjectUnbarriered(JSTracer *trc, types::TypeObject *type, const char *name)
|
||||
{
|
||||
JS_ASSERT(trc);
|
||||
JS_ASSERT(type);
|
||||
@ -227,28 +248,40 @@ MarkTypeObject(JSTracer *trc, types::TypeObject *type, const char *name)
|
||||
*/
|
||||
if (IS_GC_MARKING_TRACER(trc)) {
|
||||
if (type->singleton)
|
||||
MarkObject(trc, *type->singleton, "type_singleton");
|
||||
MarkObject(trc, type->singleton, "type_singleton");
|
||||
if (type->interpretedFunction)
|
||||
MarkObject(trc, *type->interpretedFunction, "type_function");
|
||||
MarkObject(trc, type->interpretedFunction, "type_function");
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
MarkTypeObject(JSTracer *trc, const MarkablePtr<types::TypeObject> &type, const char *name)
|
||||
{
|
||||
MarkTypeObjectUnbarriered(trc, type.value, name);
|
||||
}
|
||||
|
||||
#if JS_HAS_XML_SUPPORT
|
||||
void
|
||||
MarkXML(JSTracer *trc, JSXML *xml, const char *name)
|
||||
MarkXMLUnbarriered(JSTracer *trc, JSXML *xml, const char *name)
|
||||
{
|
||||
JS_ASSERT(trc);
|
||||
JS_ASSERT(xml);
|
||||
JS_SET_TRACING_NAME(trc, name);
|
||||
Mark(trc, xml);
|
||||
}
|
||||
|
||||
void
|
||||
MarkXML(JSTracer *trc, const MarkablePtr<JSXML> &xml, const char *name)
|
||||
{
|
||||
MarkXMLUnbarriered(trc, xml.value, name);
|
||||
}
|
||||
#endif
|
||||
|
||||
void
|
||||
PushMarkStack(GCMarker *gcmarker, JSXML *thing)
|
||||
{
|
||||
JS_OPT_ASSERT_IF(gcmarker->context->runtime->gcCurrentCompartment,
|
||||
thing->compartment() == gcmarker->context->runtime->gcCurrentCompartment);
|
||||
JS_OPT_ASSERT_IF(gcmarker->runtime->gcCurrentCompartment,
|
||||
thing->compartment() == gcmarker->runtime->gcCurrentCompartment);
|
||||
|
||||
if (thing->markIfUnmarked(gcmarker->getMarkColor()))
|
||||
gcmarker->pushXML(thing);
|
||||
@ -257,8 +290,8 @@ PushMarkStack(GCMarker *gcmarker, JSXML *thing)
|
||||
void
|
||||
PushMarkStack(GCMarker *gcmarker, JSObject *thing)
|
||||
{
|
||||
JS_OPT_ASSERT_IF(gcmarker->context->runtime->gcCurrentCompartment,
|
||||
thing->compartment() == gcmarker->context->runtime->gcCurrentCompartment);
|
||||
JS_OPT_ASSERT_IF(gcmarker->runtime->gcCurrentCompartment,
|
||||
thing->compartment() == gcmarker->runtime->gcCurrentCompartment);
|
||||
|
||||
if (thing->markIfUnmarked(gcmarker->getMarkColor()))
|
||||
gcmarker->pushObject(thing);
|
||||
@ -267,8 +300,8 @@ PushMarkStack(GCMarker *gcmarker, JSObject *thing)
|
||||
void
|
||||
PushMarkStack(GCMarker *gcmarker, JSFunction *thing)
|
||||
{
|
||||
JS_OPT_ASSERT_IF(gcmarker->context->runtime->gcCurrentCompartment,
|
||||
thing->compartment() == gcmarker->context->runtime->gcCurrentCompartment);
|
||||
JS_OPT_ASSERT_IF(gcmarker->runtime->gcCurrentCompartment,
|
||||
thing->compartment() == gcmarker->runtime->gcCurrentCompartment);
|
||||
|
||||
if (thing->markIfUnmarked(gcmarker->getMarkColor()))
|
||||
gcmarker->pushObject(thing);
|
||||
@ -277,8 +310,8 @@ PushMarkStack(GCMarker *gcmarker, JSFunction *thing)
|
||||
void
|
||||
PushMarkStack(GCMarker *gcmarker, types::TypeObject *thing)
|
||||
{
|
||||
JS_ASSERT_IF(gcmarker->context->runtime->gcCurrentCompartment,
|
||||
thing->compartment() == gcmarker->context->runtime->gcCurrentCompartment);
|
||||
JS_ASSERT_IF(gcmarker->runtime->gcCurrentCompartment,
|
||||
thing->compartment() == gcmarker->runtime->gcCurrentCompartment);
|
||||
|
||||
if (thing->markIfUnmarked(gcmarker->getMarkColor()))
|
||||
gcmarker->pushType(thing);
|
||||
@ -287,8 +320,8 @@ PushMarkStack(GCMarker *gcmarker, types::TypeObject *thing)
|
||||
void
|
||||
PushMarkStack(GCMarker *gcmarker, JSScript *thing)
|
||||
{
|
||||
JS_ASSERT_IF(gcmarker->context->runtime->gcCurrentCompartment,
|
||||
thing->compartment() == gcmarker->context->runtime->gcCurrentCompartment);
|
||||
JS_ASSERT_IF(gcmarker->runtime->gcCurrentCompartment,
|
||||
thing->compartment() == gcmarker->runtime->gcCurrentCompartment);
|
||||
|
||||
/*
|
||||
* We mark scripts directly rather than pushing on the stack as they can
|
||||
@ -305,8 +338,8 @@ ScanShape(GCMarker *gcmarker, const Shape *shape);
|
||||
void
|
||||
PushMarkStack(GCMarker *gcmarker, const Shape *thing)
|
||||
{
|
||||
JS_OPT_ASSERT_IF(gcmarker->context->runtime->gcCurrentCompartment,
|
||||
thing->compartment() == gcmarker->context->runtime->gcCurrentCompartment);
|
||||
JS_OPT_ASSERT_IF(gcmarker->runtime->gcCurrentCompartment,
|
||||
thing->compartment() == gcmarker->runtime->gcCurrentCompartment);
|
||||
|
||||
/* We mark shapes directly rather than pushing on the stack. */
|
||||
if (thing->markIfUnmarked(gcmarker->getMarkColor()))
|
||||
@ -325,7 +358,7 @@ MarkAtomRange(JSTracer *trc, size_t len, JSAtom **vec, const char *name)
|
||||
}
|
||||
|
||||
void
|
||||
MarkObjectRange(JSTracer *trc, size_t len, JSObject **vec, const char *name)
|
||||
MarkObjectRange(JSTracer *trc, size_t len, HeapPtr<JSObject> *vec, const char *name)
|
||||
{
|
||||
for (uint32 i = 0; i < len; i++) {
|
||||
if (JSObject *obj = vec[i]) {
|
||||
@ -336,7 +369,7 @@ MarkObjectRange(JSTracer *trc, size_t len, JSObject **vec, const char *name)
|
||||
}
|
||||
|
||||
void
|
||||
MarkXMLRange(JSTracer *trc, size_t len, JSXML **vec, const char *name)
|
||||
MarkXMLRange(JSTracer *trc, size_t len, HeapPtr<JSXML> *vec, const char *name)
|
||||
{
|
||||
for (size_t i = 0; i < len; i++) {
|
||||
if (JSXML *xml = vec[i]) {
|
||||
@ -347,7 +380,7 @@ MarkXMLRange(JSTracer *trc, size_t len, JSXML **vec, const char *name)
|
||||
}
|
||||
|
||||
void
|
||||
MarkId(JSTracer *trc, jsid id)
|
||||
MarkIdUnbarriered(JSTracer *trc, jsid id)
|
||||
{
|
||||
if (JSID_IS_STRING(id))
|
||||
Mark(trc, JSID_TO_STRING(id));
|
||||
@ -356,25 +389,41 @@ MarkId(JSTracer *trc, jsid id)
|
||||
}
|
||||
|
||||
void
|
||||
MarkId(JSTracer *trc, jsid id, const char *name)
|
||||
MarkIdUnbarriered(JSTracer *trc, jsid id, const char *name)
|
||||
{
|
||||
JS_SET_TRACING_NAME(trc, name);
|
||||
MarkId(trc, id);
|
||||
MarkIdUnbarriered(trc, id);
|
||||
}
|
||||
|
||||
void
|
||||
MarkIdRange(JSTracer *trc, jsid *beg, jsid *end, const char *name)
|
||||
MarkId(JSTracer *trc, const HeapId &id, const char *name)
|
||||
{
|
||||
JS_SET_TRACING_NAME(trc, name);
|
||||
MarkIdUnbarriered(trc, id.get(), name);
|
||||
}
|
||||
|
||||
void
|
||||
MarkIdRangeUnbarriered(JSTracer *trc, jsid *beg, jsid *end, const char *name)
|
||||
{
|
||||
for (jsid *idp = beg; idp != end; ++idp) {
|
||||
JS_SET_TRACING_INDEX(trc, name, (idp - beg));
|
||||
MarkId(trc, *idp);
|
||||
MarkIdUnbarriered(trc, *idp);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
MarkIdRange(JSTracer *trc, size_t len, jsid *vec, const char *name)
|
||||
MarkIdRangeUnbarriered(JSTracer *trc, size_t len, jsid *vec, const char *name)
|
||||
{
|
||||
MarkIdRange(trc, vec, vec + len, name);
|
||||
MarkIdRangeUnbarriered(trc, vec, vec + len, name);
|
||||
}
|
||||
|
||||
void
|
||||
MarkIdRange(JSTracer *trc, HeapId *beg, HeapId *end, const char *name)
|
||||
{
|
||||
for (HeapId *idp = beg; idp != end; ++idp) {
|
||||
JS_SET_TRACING_INDEX(trc, name, (idp - beg));
|
||||
MarkIdUnbarriered(trc, *idp);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
@ -387,7 +436,7 @@ MarkKind(JSTracer *trc, void *thing, JSGCTraceKind kind)
|
||||
Mark(trc, reinterpret_cast<JSObject *>(thing));
|
||||
break;
|
||||
case JSTRACE_STRING:
|
||||
MarkString(trc, reinterpret_cast<JSString *>(thing));
|
||||
Mark(trc, reinterpret_cast<JSString *>(thing));
|
||||
break;
|
||||
case JSTRACE_SCRIPT:
|
||||
Mark(trc, static_cast<JSScript *>(thing));
|
||||
@ -396,7 +445,7 @@ MarkKind(JSTracer *trc, void *thing, JSGCTraceKind kind)
|
||||
Mark(trc, reinterpret_cast<Shape *>(thing));
|
||||
break;
|
||||
case JSTRACE_TYPE_OBJECT:
|
||||
MarkTypeObject(trc, reinterpret_cast<types::TypeObject *>(thing), "type_stack");
|
||||
MarkTypeObjectUnbarriered(trc, reinterpret_cast<types::TypeObject *>(thing), "type_stack");
|
||||
break;
|
||||
#if JS_HAS_XML_SUPPORT
|
||||
case JSTRACE_XML:
|
||||
@ -417,18 +466,24 @@ MarkValueRaw(JSTracer *trc, const js::Value &v)
|
||||
}
|
||||
|
||||
void
|
||||
MarkValue(JSTracer *trc, const js::Value &v, const char *name)
|
||||
MarkValueUnbarriered(JSTracer *trc, const js::Value &v, const char *name)
|
||||
{
|
||||
JS_SET_TRACING_NAME(trc, name);
|
||||
MarkValueRaw(trc, v);
|
||||
}
|
||||
|
||||
void
|
||||
MarkCrossCompartmentValue(JSTracer *trc, const js::Value &v, const char *name)
|
||||
MarkValue(JSTracer *trc, const js::HeapValue &v, const char *name)
|
||||
{
|
||||
MarkValueUnbarriered(trc, v, name);
|
||||
}
|
||||
|
||||
void
|
||||
MarkCrossCompartmentValue(JSTracer *trc, const js::HeapValue &v, const char *name)
|
||||
{
|
||||
if (v.isMarkable()) {
|
||||
js::gc::Cell *cell = (js::gc::Cell *)v.toGCThing();
|
||||
JSRuntime *rt = trc->context->runtime;
|
||||
JSRuntime *rt = trc->runtime;
|
||||
if (rt->gcCurrentCompartment && cell->compartment() != rt->gcCurrentCompartment)
|
||||
return;
|
||||
|
||||
@ -437,35 +492,20 @@ MarkCrossCompartmentValue(JSTracer *trc, const js::Value &v, const char *name)
|
||||
}
|
||||
|
||||
void
|
||||
MarkValueRange(JSTracer *trc, const Value *beg, const Value *end, const char *name)
|
||||
MarkValueRange(JSTracer *trc, const HeapValue *beg, const HeapValue *end, const char *name)
|
||||
{
|
||||
for (const Value *vp = beg; vp < end; ++vp) {
|
||||
for (const HeapValue *vp = beg; vp < end; ++vp) {
|
||||
JS_SET_TRACING_INDEX(trc, name, vp - beg);
|
||||
MarkValueRaw(trc, *vp);
|
||||
MarkValueRaw(trc, vp->get());
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
MarkValueRange(JSTracer *trc, size_t len, const Value *vec, const char *name)
|
||||
MarkValueRange(JSTracer *trc, size_t len, const HeapValue *vec, const char *name)
|
||||
{
|
||||
MarkValueRange(trc, vec, vec + len, name);
|
||||
}
|
||||
|
||||
void
|
||||
MarkShapeRange(JSTracer *trc, const Shape **beg, const Shape **end, const char *name)
|
||||
{
|
||||
for (const Shape **sp = beg; sp < end; ++sp) {
|
||||
JS_SET_TRACING_INDEX(trc, name, sp - beg);
|
||||
MarkShape(trc, *sp, name);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
MarkShapeRange(JSTracer *trc, size_t len, const Shape **vec, const char *name)
|
||||
{
|
||||
MarkShapeRange(trc, vec, vec + len, name);
|
||||
}
|
||||
|
||||
/* N.B. Assumes JS_SET_TRACING_NAME/INDEX has already been called. */
|
||||
void
|
||||
MarkGCThing(JSTracer *trc, void *thing, JSGCTraceKind kind)
|
||||
@ -484,13 +524,6 @@ MarkGCThing(JSTracer *trc, void *thing)
|
||||
MarkKind(trc, thing, GetGCThingTraceKind(thing));
|
||||
}
|
||||
|
||||
void
|
||||
MarkGCThing(JSTracer *trc, void *thing, const char *name)
|
||||
{
|
||||
JS_SET_TRACING_NAME(trc, name);
|
||||
MarkGCThing(trc, thing);
|
||||
}
|
||||
|
||||
void
|
||||
MarkGCThing(JSTracer *trc, void *thing, const char *name, size_t index)
|
||||
{
|
||||
@ -509,37 +542,104 @@ Mark(JSTracer *trc, void *thing, JSGCTraceKind kind, const char *name)
|
||||
void
|
||||
MarkRoot(JSTracer *trc, JSObject *thing, const char *name)
|
||||
{
|
||||
MarkObject(trc, *thing, name);
|
||||
MarkObjectUnbarriered(trc, thing, name);
|
||||
}
|
||||
|
||||
void
|
||||
MarkRoot(JSTracer *trc, JSString *thing, const char *name)
|
||||
{
|
||||
MarkString(trc, thing, name);
|
||||
MarkStringUnbarriered(trc, thing, name);
|
||||
}
|
||||
|
||||
void
|
||||
MarkRoot(JSTracer *trc, JSScript *thing, const char *name)
|
||||
{
|
||||
MarkScript(trc, thing, name);
|
||||
MarkScriptUnbarriered(trc, thing, name);
|
||||
}
|
||||
|
||||
void
|
||||
MarkRoot(JSTracer *trc, const Shape *thing, const char *name)
|
||||
{
|
||||
MarkShape(trc, thing, name);
|
||||
MarkShapeUnbarriered(trc, thing, name);
|
||||
}
|
||||
|
||||
void
|
||||
MarkRoot(JSTracer *trc, types::TypeObject *thing, const char *name)
|
||||
{
|
||||
MarkTypeObject(trc, thing, name);
|
||||
MarkTypeObjectUnbarriered(trc, thing, name);
|
||||
}
|
||||
|
||||
void
|
||||
MarkRoot(JSTracer *trc, JSXML *thing, const char *name)
|
||||
{
|
||||
MarkXML(trc, thing, name);
|
||||
MarkXMLUnbarriered(trc, thing, name);
|
||||
}
|
||||
|
||||
void
|
||||
MarkRoot(JSTracer *trc, const Value &v, const char *name)
|
||||
{
|
||||
MarkValueUnbarriered(trc, v, name);
|
||||
}
|
||||
|
||||
void
|
||||
MarkRoot(JSTracer *trc, jsid id, const char *name)
|
||||
{
|
||||
JS_SET_TRACING_NAME(trc, name);
|
||||
MarkIdUnbarriered(trc, id);
|
||||
}
|
||||
|
||||
void
|
||||
MarkRootGCThing(JSTracer *trc, void *thing, const char *name)
|
||||
{
|
||||
JS_SET_TRACING_NAME(trc, name);
|
||||
MarkGCThing(trc, thing);
|
||||
}
|
||||
|
||||
void
|
||||
MarkRootRange(JSTracer *trc, size_t len, const Shape **vec, const char *name)
|
||||
{
|
||||
const Shape **end = vec + len;
|
||||
for (const Shape **sp = vec; sp < end; ++sp) {
|
||||
JS_SET_TRACING_INDEX(trc, name, sp - vec);
|
||||
MarkShapeUnbarriered(trc, *sp, name);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
MarkRootRange(JSTracer *trc, size_t len, JSObject **vec, const char *name)
|
||||
{
|
||||
JSObject **end = vec + len;
|
||||
for (JSObject **sp = vec; sp < end; ++sp) {
|
||||
JS_SET_TRACING_INDEX(trc, name, sp - vec);
|
||||
MarkObjectUnbarriered(trc, *sp, name);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
MarkRootRange(JSTracer *trc, const Value *beg, const Value *end, const char *name)
|
||||
{
|
||||
for (const Value *vp = beg; vp < end; ++vp) {
|
||||
JS_SET_TRACING_INDEX(trc, name, vp - beg);
|
||||
MarkValueRaw(trc, *vp);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
MarkRootRange(JSTracer *trc, size_t len, const Value *vec, const char *name)
|
||||
{
|
||||
MarkRootRange(trc, vec, vec + len, name);
|
||||
}
|
||||
|
||||
void
|
||||
MarkRootRange(JSTracer *trc, jsid *beg, jsid *end, const char *name)
|
||||
{
|
||||
MarkIdRangeUnbarriered(trc, beg, end, name);
|
||||
}
|
||||
|
||||
void
|
||||
MarkRootRange(JSTracer *trc, size_t len, jsid *vec, const char *name)
|
||||
{
|
||||
MarkIdRangeUnbarriered(trc, len, vec, name);
|
||||
}
|
||||
|
||||
static void
|
||||
@ -592,7 +692,7 @@ static void
|
||||
ScanShape(GCMarker *gcmarker, const Shape *shape)
|
||||
{
|
||||
restart:
|
||||
JSRuntime *rt = gcmarker->context->runtime;
|
||||
JSRuntime *rt = gcmarker->runtime;
|
||||
if (rt->gcRegenShapes)
|
||||
shape->shapeid = js_RegenerateShapeForGC(rt);
|
||||
|
||||
@ -617,9 +717,9 @@ restart:
|
||||
static inline void
|
||||
ScanRope(GCMarker *gcmarker, JSRope *rope)
|
||||
{
|
||||
JS_OPT_ASSERT_IF(gcmarker->context->runtime->gcCurrentCompartment,
|
||||
rope->compartment() == gcmarker->context->runtime->gcCurrentCompartment
|
||||
|| rope->compartment() == gcmarker->context->runtime->atomsCompartment);
|
||||
JS_OPT_ASSERT_IF(gcmarker->runtime->gcCurrentCompartment,
|
||||
rope->compartment() == gcmarker->runtime->gcCurrentCompartment
|
||||
|| rope->compartment() == gcmarker->runtime->atomsCompartment);
|
||||
JS_ASSERT(rope->isMarked());
|
||||
|
||||
JSString *leftChild = NULL;
|
||||
@ -645,9 +745,9 @@ ScanRope(GCMarker *gcmarker, JSRope *rope)
|
||||
static inline void
|
||||
PushMarkStack(GCMarker *gcmarker, JSString *str)
|
||||
{
|
||||
JS_OPT_ASSERT_IF(gcmarker->context->runtime->gcCurrentCompartment,
|
||||
str->compartment() == gcmarker->context->runtime->gcCurrentCompartment
|
||||
|| str->compartment() == gcmarker->context->runtime->atomsCompartment);
|
||||
JS_OPT_ASSERT_IF(gcmarker->runtime->gcCurrentCompartment,
|
||||
str->compartment() == gcmarker->runtime->gcCurrentCompartment
|
||||
|| str->compartment() == gcmarker->runtime->atomsCompartment);
|
||||
|
||||
if (str->isLinear()) {
|
||||
str->asLinear().mark(gcmarker);
|
||||
@ -700,11 +800,11 @@ ScanObject(GCMarker *gcmarker, JSObject *obj)
|
||||
js::Shape *shape = obj->lastProp;
|
||||
PushMarkStack(gcmarker, shape);
|
||||
|
||||
if (gcmarker->context->runtime->gcRegenShapes) {
|
||||
if (gcmarker->runtime->gcRegenShapes) {
|
||||
/* We need to regenerate our shape if hasOwnShape(). */
|
||||
uint32 newShape = shape->shapeid;
|
||||
if (obj->hasOwnShape()) {
|
||||
newShape = js_RegenerateShapeForGC(gcmarker->context->runtime);
|
||||
newShape = js_RegenerateShapeForGC(gcmarker->runtime);
|
||||
JS_ASSERT(newShape != shape->shapeid);
|
||||
}
|
||||
obj->objShape = newShape;
|
||||
@ -761,8 +861,8 @@ MarkChildren(JSTracer *trc, JSObject *obj)
|
||||
/* Trace universal (ops-independent) members. */
|
||||
if (!obj->isDenseArray() && obj->newType)
|
||||
MarkTypeObject(trc, obj->newType, "new_type");
|
||||
if (JSObject *parent = obj->getParent())
|
||||
MarkObject(trc, *parent, "parent");
|
||||
if (obj->parent)
|
||||
MarkObject(trc, obj->parent, "parent");
|
||||
|
||||
Class *clasp = obj->getClass();
|
||||
if (clasp->trace)
|
||||
@ -783,12 +883,16 @@ MarkChildren(JSTracer *trc, JSObject *obj)
|
||||
void
|
||||
MarkChildren(JSTracer *trc, JSString *str)
|
||||
{
|
||||
/*
|
||||
* We use custom barriers in JSString, so it's safe to use unbarriered
|
||||
* marking here.
|
||||
*/
|
||||
if (str->isDependent()) {
|
||||
MarkString(trc, str->asDependent().base(), "base");
|
||||
MarkStringUnbarriered(trc, str->asDependent().base(), "base");
|
||||
} else if (str->isRope()) {
|
||||
JSRope &rope = str->asRope();
|
||||
MarkString(trc, rope.leftChild(), "left child");
|
||||
MarkString(trc, rope.rightChild(), "right child");
|
||||
MarkStringUnbarriered(trc, rope.leftChild(), "left child");
|
||||
MarkStringUnbarriered(trc, rope.rightChild(), "right child");
|
||||
}
|
||||
}
|
||||
|
||||
@ -799,10 +903,10 @@ MarkChildren(JSTracer *trc, JSScript *script)
|
||||
CheckScript(script, NULL);
|
||||
|
||||
#ifdef JS_CRASH_DIAGNOSTICS
|
||||
JSRuntime *rt = trc->context->runtime;
|
||||
JSRuntime *rt = trc->runtime;
|
||||
JS_OPT_ASSERT_IF(rt->gcCheckCompartment, script->compartment() == rt->gcCheckCompartment);
|
||||
#endif
|
||||
|
||||
|
||||
MarkAtomRange(trc, script->natoms, script->atoms, "atoms");
|
||||
|
||||
if (JSScript::isValidOffset(script->objectsOffset)) {
|
||||
@ -820,8 +924,8 @@ MarkChildren(JSTracer *trc, JSScript *script)
|
||||
MarkValueRange(trc, constarray->length, constarray->vector, "consts");
|
||||
}
|
||||
|
||||
if (!script->isCachedEval && script->u.globalObject)
|
||||
MarkObject(trc, *script->u.globalObject, "object");
|
||||
if (!script->isCachedEval && script->globalObject)
|
||||
MarkObject(trc, script->globalObject, "object");
|
||||
|
||||
if (IS_GC_MARKING_TRACER(trc) && script->filename)
|
||||
js_MarkScriptFilename(script->filename);
|
||||
@ -839,12 +943,15 @@ restart:
|
||||
MarkId(trc, shape->propid, "propid");
|
||||
|
||||
if (shape->hasGetterValue() && shape->getter())
|
||||
MarkObjectWithPrinter(trc, *shape->getterObject(), PrintPropertyGetterOrSetter, shape, 0);
|
||||
MarkObjectWithPrinterUnbarriered(trc, shape->getterObject(),
|
||||
PrintPropertyGetterOrSetter, shape, 0);
|
||||
if (shape->hasSetterValue() && shape->setter())
|
||||
MarkObjectWithPrinter(trc, *shape->setterObject(), PrintPropertyGetterOrSetter, shape, 1);
|
||||
MarkObjectWithPrinterUnbarriered(trc, shape->setterObject(),
|
||||
PrintPropertyGetterOrSetter, shape, 1);
|
||||
|
||||
if (shape->isMethod())
|
||||
MarkObjectWithPrinter(trc, shape->methodObject(), PrintPropertyMethod, shape, 0);
|
||||
MarkObjectWithPrinterUnbarriered(trc, &shape->methodObject(),
|
||||
PrintPropertyMethod, shape, 0);
|
||||
|
||||
shape = shape->previous();
|
||||
if (shape)
|
||||
@ -864,8 +971,7 @@ ScanTypeObject(GCMarker *gcmarker, types::TypeObject *type)
|
||||
}
|
||||
|
||||
if (type->emptyShapes) {
|
||||
int count = FINALIZE_OBJECT_LAST - FINALIZE_OBJECT0 + 1;
|
||||
for (int i = 0; i < count; i++) {
|
||||
for (unsigned i = 0; i < FINALIZE_OBJECT_LIMIT; i++) {
|
||||
if (type->emptyShapes[i])
|
||||
PushMarkStack(gcmarker, type->emptyShapes[i]);
|
||||
}
|
||||
@ -901,26 +1007,25 @@ MarkChildren(JSTracer *trc, types::TypeObject *type)
|
||||
}
|
||||
|
||||
if (type->emptyShapes) {
|
||||
int count = FINALIZE_OBJECT_LAST - FINALIZE_OBJECT0 + 1;
|
||||
for (int i = 0; i < count; i++) {
|
||||
for (unsigned i = 0; i < FINALIZE_OBJECT_LIMIT; i++) {
|
||||
if (type->emptyShapes[i])
|
||||
MarkShape(trc, type->emptyShapes[i], "empty_shape");
|
||||
}
|
||||
}
|
||||
|
||||
if (type->proto)
|
||||
MarkObject(trc, *type->proto, "type_proto");
|
||||
MarkObject(trc, type->proto, "type_proto");
|
||||
|
||||
if (type->singleton)
|
||||
MarkObject(trc, *type->singleton, "type_singleton");
|
||||
MarkObject(trc, type->singleton, "type_singleton");
|
||||
|
||||
if (type->newScript) {
|
||||
MarkObject(trc, *type->newScript->fun, "type_new_function");
|
||||
MarkObject(trc, type->newScript->fun, "type_new_function");
|
||||
MarkShape(trc, type->newScript->shape, "type_new_shape");
|
||||
}
|
||||
|
||||
if (type->interpretedFunction)
|
||||
MarkObject(trc, *type->interpretedFunction, "type_function");
|
||||
MarkObject(trc, type->interpretedFunction, "type_function");
|
||||
}
|
||||
|
||||
#ifdef JS_HAS_XML_SUPPORT
|
||||
@ -936,7 +1041,7 @@ MarkChildren(JSTracer *trc, JSXML *xml)
|
||||
void
|
||||
GCMarker::drainMarkStack()
|
||||
{
|
||||
JSRuntime *rt = context->runtime;
|
||||
JSRuntime *rt = runtime;
|
||||
rt->gcCheckCompartment = rt->gcCurrentCompartment;
|
||||
|
||||
while (!isMarkStackEmpty()) {
|
||||
@ -970,10 +1075,8 @@ GCMarker::drainMarkStack()
|
||||
rt->gcCheckCompartment = NULL;
|
||||
}
|
||||
|
||||
} /* namespace js */
|
||||
|
||||
JS_PUBLIC_API(void)
|
||||
JS_TraceChildren(JSTracer *trc, void *thing, JSGCTraceKind kind)
|
||||
void
|
||||
TraceChildren(JSTracer *trc, void *thing, JSGCTraceKind kind)
|
||||
{
|
||||
switch (kind) {
|
||||
case JSTRACE_OBJECT:
|
||||
@ -1004,6 +1107,15 @@ JS_TraceChildren(JSTracer *trc, void *thing, JSGCTraceKind kind)
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
CallTracer(JSTracer *trc, void *thing, JSGCTraceKind kind)
|
||||
{
|
||||
JS_ASSERT(thing);
|
||||
MarkKind(trc, thing, kind);
|
||||
}
|
||||
|
||||
} /* namespace js */
|
||||
|
||||
inline void
|
||||
JSObject::scanSlots(GCMarker *gcmarker)
|
||||
{
|
||||
@ -1016,7 +1128,7 @@ JSObject::scanSlots(GCMarker *gcmarker)
|
||||
if (slots) {
|
||||
unsigned nfixed = numFixedSlots();
|
||||
if (nslots > nfixed) {
|
||||
Value *vp = fixedSlots();
|
||||
HeapValue *vp = fixedSlots();
|
||||
for (i = 0; i < nfixed; i++, vp++)
|
||||
ScanValue(gcmarker, *vp);
|
||||
vp = slots;
|
||||
@ -1026,7 +1138,7 @@ JSObject::scanSlots(GCMarker *gcmarker)
|
||||
}
|
||||
}
|
||||
JS_ASSERT(nslots <= numFixedSlots());
|
||||
Value *vp = fixedSlots();
|
||||
HeapValue *vp = fixedSlots();
|
||||
for (i = 0; i < nslots; i++, vp++)
|
||||
ScanValue(gcmarker, *vp);
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
|
||||
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
|
||||
*
|
||||
* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
@ -45,105 +45,93 @@
|
||||
#include "jscompartment.h"
|
||||
#include "jslock.h"
|
||||
|
||||
|
||||
#include "gc/Barrier.h"
|
||||
#include "js/TemplateLib.h"
|
||||
|
||||
namespace js {
|
||||
namespace gc {
|
||||
|
||||
void
|
||||
MarkString(JSTracer *trc, JSString *str);
|
||||
MarkAtom(JSTracer *trc, JSAtom *str);
|
||||
|
||||
void
|
||||
MarkString(JSTracer *trc, JSString *str, const char *name);
|
||||
MarkAtom(JSTracer *trc, JSAtom *str, const char *name);
|
||||
|
||||
void
|
||||
MarkObject(JSTracer *trc, JSObject &obj, const char *name);
|
||||
|
||||
/*
|
||||
* Mark an object that may be in a different compartment from the compartment
|
||||
* being GC'd. (Although it won't be marked if it's in the wrong compartment.)
|
||||
*/
|
||||
void
|
||||
MarkCrossCompartmentObject(JSTracer *trc, JSObject &obj, const char *name);
|
||||
MarkObjectUnbarriered(JSTracer *trc, JSObject *obj, const char *name);
|
||||
|
||||
void
|
||||
MarkObjectWithPrinter(JSTracer *trc, JSObject &obj, JSTraceNamePrinter printer,
|
||||
const void *arg, size_t index);
|
||||
MarkObject(JSTracer *trc, const MarkablePtr<JSObject> &obj, const char *name);
|
||||
|
||||
void
|
||||
MarkScript(JSTracer *trc, JSScript *script, const char *name);
|
||||
MarkStringUnbarriered(JSTracer *trc, JSString *str, const char *name);
|
||||
|
||||
void
|
||||
MarkShape(JSTracer *trc, const Shape *shape, const char *name);
|
||||
MarkString(JSTracer *trc, const MarkablePtr<JSString> &str, const char *name);
|
||||
|
||||
void
|
||||
MarkTypeObject(JSTracer *trc, types::TypeObject *type, const char *name);
|
||||
MarkScriptUnbarriered(JSTracer *trc, JSScript *script, const char *name);
|
||||
|
||||
void
|
||||
MarkXML(JSTracer *trc, JSXML *xml, const char *name);
|
||||
MarkScript(JSTracer *trc, const MarkablePtr<JSScript> &script, const char *name);
|
||||
|
||||
void
|
||||
MarkObjectRange(JSTracer *trc, size_t len, JSObject **vec, const char *name);
|
||||
MarkShapeUnbarriered(JSTracer *trc, const Shape *shape, const char *name);
|
||||
|
||||
void
|
||||
MarkXMLRange(JSTracer *trc, size_t len, JSXML **vec, const char *name);
|
||||
MarkShape(JSTracer *trc, const MarkablePtr<const Shape> &shape, const char *name);
|
||||
|
||||
void
|
||||
MarkId(JSTracer *trc, jsid id);
|
||||
MarkTypeObjectUnbarriered(JSTracer *trc, types::TypeObject *type, const char *name);
|
||||
|
||||
void
|
||||
MarkId(JSTracer *trc, jsid id, const char *name);
|
||||
MarkTypeObject(JSTracer *trc, const MarkablePtr<types::TypeObject> &type, const char *name);
|
||||
|
||||
void
|
||||
MarkIdRange(JSTracer *trc, jsid *beg, jsid *end, const char *name);
|
||||
MarkXMLUnbarriered(JSTracer *trc, JSXML *xml, const char *name);
|
||||
|
||||
void
|
||||
MarkIdRange(JSTracer *trc, size_t len, jsid *vec, const char *name);
|
||||
MarkXML(JSTracer *trc, const MarkablePtr<JSXML> &xml, const char *name);
|
||||
|
||||
void
|
||||
MarkObjectRange(JSTracer *trc, size_t len, HeapPtr<JSObject> *vec, const char *name);
|
||||
|
||||
void
|
||||
MarkXMLRange(JSTracer *trc, size_t len, HeapPtr<JSXML> *vec, const char *name);
|
||||
|
||||
void
|
||||
MarkId(JSTracer *trc, const HeapId &id, const char *name);
|
||||
|
||||
void
|
||||
MarkIdRange(JSTracer *trc, js::HeapId *beg, js::HeapId *end, const char *name);
|
||||
|
||||
void
|
||||
MarkIdRangeUnbarriered(JSTracer *trc, size_t len, jsid *vec, const char *name);
|
||||
|
||||
void
|
||||
MarkIdRangeUnbarriered(JSTracer *trc, jsid *beg, jsid *end, const char *name);
|
||||
|
||||
void
|
||||
MarkKind(JSTracer *trc, void *thing, JSGCTraceKind kind);
|
||||
|
||||
void
|
||||
MarkValueRaw(JSTracer *trc, const js::Value &v);
|
||||
MarkValueUnbarriered(JSTracer *trc, const js::Value &v, const char *name);
|
||||
|
||||
void
|
||||
MarkValue(JSTracer *trc, const js::Value &v, const char *name);
|
||||
MarkValue(JSTracer *trc, const js::HeapValue &v, const char *name);
|
||||
|
||||
/*
|
||||
* Mark a value that may be in a different compartment from the compartment
|
||||
* being GC'd. (Although it won't be marked if it's in the wrong compartment.)
|
||||
*/
|
||||
void
|
||||
MarkCrossCompartmentValue(JSTracer *trc, const js::Value &v, const char *name);
|
||||
MarkCrossCompartmentValue(JSTracer *trc, const js::HeapValue &v, const char *name);
|
||||
|
||||
void
|
||||
MarkValueRange(JSTracer *trc, const Value *beg, const Value *end, const char *name);
|
||||
MarkValueRange(JSTracer *trc, const HeapValue *beg, const HeapValue *end, const char *name);
|
||||
|
||||
void
|
||||
MarkValueRange(JSTracer *trc, size_t len, const Value *vec, const char *name);
|
||||
|
||||
void
|
||||
MarkShapeRange(JSTracer *trc, const Shape **beg, const Shape **end, const char *name);
|
||||
|
||||
void
|
||||
MarkShapeRange(JSTracer *trc, size_t len, const Shape **vec, const char *name);
|
||||
|
||||
/* N.B. Assumes JS_SET_TRACING_NAME/INDEX has already been called. */
|
||||
void
|
||||
MarkGCThing(JSTracer *trc, void *thing, uint32 kind);
|
||||
|
||||
void
|
||||
MarkGCThing(JSTracer *trc, void *thing);
|
||||
|
||||
void
|
||||
MarkGCThing(JSTracer *trc, void *thing, const char *name);
|
||||
|
||||
void
|
||||
MarkGCThing(JSTracer *trc, void *thing, const char *name, size_t index);
|
||||
|
||||
void
|
||||
Mark(JSTracer *trc, void *thing, uint32 kind, const char *name);
|
||||
MarkValueRange(JSTracer *trc, size_t len, const HeapValue *vec, const char *name);
|
||||
|
||||
void
|
||||
MarkRoot(JSTracer *trc, JSObject *thing, const char *name);
|
||||
@ -163,6 +151,33 @@ MarkRoot(JSTracer *trc, types::TypeObject *thing, const char *name);
|
||||
void
|
||||
MarkRoot(JSTracer *trc, JSXML *thing, const char *name);
|
||||
|
||||
void
|
||||
MarkRoot(JSTracer *trc, const Value &v, const char *name);
|
||||
|
||||
void
|
||||
MarkRoot(JSTracer *trc, jsid id, const char *name);
|
||||
|
||||
void
|
||||
MarkRootGCThing(JSTracer *trc, void *thing, const char *name);
|
||||
|
||||
void
|
||||
MarkRootRange(JSTracer *trc, size_t len, const Shape **vec, const char *name);
|
||||
|
||||
void
|
||||
MarkRootRange(JSTracer *trc, size_t len, JSObject **vec, const char *name);
|
||||
|
||||
void
|
||||
MarkRootRange(JSTracer *trc, const Value *beg, const Value *end, const char *name);
|
||||
|
||||
void
|
||||
MarkRootRange(JSTracer *trc, size_t len, const Value *vec, const char *name);
|
||||
|
||||
void
|
||||
MarkRootRange(JSTracer *trc, jsid *beg, jsid *end, const char *name);
|
||||
|
||||
void
|
||||
MarkRootRange(JSTracer *trc, size_t len, jsid *vec, const char *name);
|
||||
|
||||
void
|
||||
MarkChildren(JSTracer *trc, JSObject *obj);
|
||||
|
||||
@ -184,22 +199,22 @@ MarkChildren(JSTracer *trc, JSXML *xml);
|
||||
* the corresponding Mark/IsMarked function.
|
||||
*/
|
||||
inline void
|
||||
Mark(JSTracer *trc, const js::Value &v, const char *name)
|
||||
Mark(JSTracer *trc, const js::HeapValue &v, const char *name)
|
||||
{
|
||||
MarkValue(trc, v, name);
|
||||
}
|
||||
|
||||
inline void
|
||||
Mark(JSTracer *trc, JSObject *o, const char *name)
|
||||
Mark(JSTracer *trc, const MarkablePtr<JSObject> &o, const char *name)
|
||||
{
|
||||
MarkObject(trc, *o, name);
|
||||
MarkObject(trc, o, name);
|
||||
}
|
||||
|
||||
inline bool
|
||||
IsMarked(JSContext *cx, const js::Value &v)
|
||||
{
|
||||
if (v.isMarkable())
|
||||
return !IsAboutToBeFinalized(cx, v.toGCThing());
|
||||
return !IsAboutToBeFinalized(cx, v);
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -215,7 +230,14 @@ IsMarked(JSContext *cx, Cell *cell)
|
||||
return !IsAboutToBeFinalized(cx, cell);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
} /* namespace gc */
|
||||
|
||||
void
|
||||
TraceChildren(JSTracer *trc, void *thing, JSGCTraceKind kind);
|
||||
|
||||
void
|
||||
CallTracer(JSTracer *trc, void *thing, JSGCTraceKind kind);
|
||||
|
||||
} /* namespace js */
|
||||
|
||||
#endif
|
||||
|
@ -47,6 +47,7 @@
|
||||
#include "jscompartment.h"
|
||||
|
||||
#include "jsgcinlines.h"
|
||||
#include "jsobjinlines.h"
|
||||
|
||||
using namespace mozilla;
|
||||
using namespace js;
|
||||
|
@ -474,6 +474,33 @@ TypeSet::print(JSContext *cx)
|
||||
}
|
||||
}
|
||||
|
||||
bool
|
||||
TypeSet::propertyNeedsBarrier(JSContext *cx, jsid id)
|
||||
{
|
||||
id = MakeTypeId(cx, id);
|
||||
|
||||
if (unknownObject())
|
||||
return true;
|
||||
|
||||
for (unsigned i = 0; i < getObjectCount(); i++) {
|
||||
if (getSingleObject(i))
|
||||
return true;
|
||||
|
||||
if (types::TypeObject *otype = getTypeObject(i)) {
|
||||
if (otype->unknownProperties())
|
||||
return true;
|
||||
|
||||
if (types::TypeSet *propTypes = otype->maybeGetProperty(cx, id)) {
|
||||
if (propTypes->needsBarrier(cx))
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
addFreeze(cx);
|
||||
return false;
|
||||
}
|
||||
|
||||
/////////////////////////////////////////////////////////////////////
|
||||
// TypeSet constraints
|
||||
/////////////////////////////////////////////////////////////////////
|
||||
@ -1919,6 +1946,17 @@ TypeSet::hasGlobalObject(JSContext *cx, JSObject *global)
|
||||
return true;
|
||||
}
|
||||
|
||||
bool
|
||||
TypeSet::needsBarrier(JSContext *cx)
|
||||
{
|
||||
bool result = unknownObject()
|
||||
|| getObjectCount() > 0
|
||||
|| hasAnyFlag(TYPE_FLAG_STRING);
|
||||
if (!result)
|
||||
addFreeze(cx);
|
||||
return result;
|
||||
}
|
||||
|
||||
/////////////////////////////////////////////////////////////////////
|
||||
// TypeCompartment
|
||||
/////////////////////////////////////////////////////////////////////
|
||||
@ -2546,7 +2584,7 @@ struct types::ObjectTableKey
|
||||
typedef JSObject * Lookup;
|
||||
|
||||
static inline uint32 hash(JSObject *obj) {
|
||||
return (uint32) (JSID_BITS(obj->lastProperty()->propid) ^
|
||||
return (uint32) (JSID_BITS(obj->lastProperty()->propid.get()) ^
|
||||
obj->slotSpan() ^ obj->numFixedSlots() ^
|
||||
((uint32)(size_t)obj->getProto() >> 2));
|
||||
}
|
||||
@ -3084,8 +3122,10 @@ TypeObject::clearNewScript(JSContext *cx)
|
||||
}
|
||||
}
|
||||
|
||||
cx->free_(newScript);
|
||||
/* We NULL out newScript *before* freeing it so the write barrier works. */
|
||||
TypeNewScript *savedNewScript = newScript;
|
||||
newScript = NULL;
|
||||
cx->free_(savedNewScript);
|
||||
|
||||
markStateChange(cx);
|
||||
}
|
||||
@ -4740,7 +4780,7 @@ CheckNewScriptProperties(JSContext *cx, TypeObject *type, JSFunction *fun)
|
||||
type->newScript->shape = baseobj->lastProperty();
|
||||
|
||||
type->newScript->initializerList = (TypeNewScript::Initializer *)
|
||||
((char *) type->newScript + sizeof(TypeNewScript));
|
||||
((char *) type->newScript.get() + sizeof(TypeNewScript));
|
||||
PodCopy(type->newScript->initializerList, initializerList.begin(), initializerList.length());
|
||||
}
|
||||
|
||||
@ -5201,8 +5241,8 @@ TypeScript::SetScope(JSContext *cx, JSScript *script, JSObject *scope)
|
||||
if (!SetScope(cx, parent, scope->getParent()))
|
||||
return false;
|
||||
parent->nesting()->activeCall = scope;
|
||||
parent->nesting()->argArray = call.argArray();
|
||||
parent->nesting()->varArray = call.varArray();
|
||||
parent->nesting()->argArray = Valueify(call.argArray());
|
||||
parent->nesting()->varArray = Valueify(call.varArray());
|
||||
}
|
||||
|
||||
JS_ASSERT(!script->types->nesting);
|
||||
@ -5719,7 +5759,7 @@ JSObject::makeNewType(JSContext *cx, JSFunction *fun, bool unknown)
|
||||
if (!type)
|
||||
return;
|
||||
|
||||
newType = type;
|
||||
newType.init(type);
|
||||
setDelegate();
|
||||
|
||||
if (!cx->typeInferenceEnabled())
|
||||
@ -6264,7 +6304,7 @@ JS_GetTypeInferenceObjectStats(void *object_, TypeInferenceMemoryStats *stats, J
|
||||
if (object->emptyShapes) {
|
||||
size_t usable = usf(object->emptyShapes);
|
||||
stats->emptyShapes +=
|
||||
usable ? usable : sizeof(EmptyShape*) * gc::FINALIZE_FUNCTION_AND_OBJECT_LAST;
|
||||
usable ? usable : sizeof(EmptyShape*) * gc::FINALIZE_OBJECT_LIMIT;
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -48,6 +48,7 @@
|
||||
#include "jsprvtd.h"
|
||||
|
||||
#include "ds/LifoAlloc.h"
|
||||
#include "gc/Barrier.h"
|
||||
#include "js/HashTable.h"
|
||||
|
||||
namespace js {
|
||||
@ -508,6 +509,15 @@ class TypeSet
|
||||
|
||||
inline void clearObjects();
|
||||
|
||||
/*
|
||||
* Whether a location with this TypeSet needs a write barrier (i.e., whether
|
||||
* it can hold GC things). The type set is frozen if no barrier is needed.
|
||||
*/
|
||||
bool needsBarrier(JSContext *cx);
|
||||
|
||||
/* The type set is frozen if no barrier is needed. */
|
||||
bool propertyNeedsBarrier(JSContext *cx, jsid id);
|
||||
|
||||
private:
|
||||
uint32 baseObjectCount() const {
|
||||
return (flags & TYPE_FLAG_OBJECT_COUNT_MASK) >> TYPE_FLAG_OBJECT_COUNT_SHIFT;
|
||||
@ -621,18 +631,13 @@ struct TypeBarrier
|
||||
struct Property
|
||||
{
|
||||
/* Identifier for this property, JSID_VOID for the aggregate integer index property. */
|
||||
jsid id;
|
||||
HeapId id;
|
||||
|
||||
/* Possible types for this property, including types inherited from prototypes. */
|
||||
TypeSet types;
|
||||
|
||||
Property(jsid id)
|
||||
: id(id)
|
||||
{}
|
||||
|
||||
Property(const Property &o)
|
||||
: id(o.id), types(o.types)
|
||||
{}
|
||||
inline Property(jsid id);
|
||||
inline Property(const Property &o);
|
||||
|
||||
static uint32 keyBits(jsid id) { return (uint32) JSID_BITS(id); }
|
||||
static jsid getKey(Property *p) { return p->id; }
|
||||
@ -650,7 +655,7 @@ struct Property
|
||||
*/
|
||||
struct TypeNewScript
|
||||
{
|
||||
JSFunction *fun;
|
||||
HeapPtrFunction fun;
|
||||
|
||||
/* Allocation kind to use for newly constructed objects. */
|
||||
gc::AllocKind allocKind;
|
||||
@ -659,7 +664,7 @@ struct TypeNewScript
|
||||
* Shape to use for newly constructed objects. Reflects all definite
|
||||
* properties the object will have.
|
||||
*/
|
||||
const Shape *shape;
|
||||
HeapPtr<const Shape> shape;
|
||||
|
||||
/*
|
||||
* Order in which properties become initialized. We need this in case a
|
||||
@ -682,6 +687,9 @@ struct TypeNewScript
|
||||
{}
|
||||
};
|
||||
Initializer *initializerList;
|
||||
|
||||
static inline void writeBarrierPre(TypeNewScript *newScript);
|
||||
static inline void writeBarrierPost(TypeNewScript *newScript, void *addr);
|
||||
};
|
||||
|
||||
/*
|
||||
@ -714,17 +722,17 @@ struct TypeNewScript
|
||||
struct TypeObject : gc::Cell
|
||||
{
|
||||
/* Prototype shared by objects using this type. */
|
||||
JSObject *proto;
|
||||
HeapPtrObject proto;
|
||||
|
||||
/*
|
||||
* Whether there is a singleton JS object with this type. That JS object
|
||||
* must appear in type sets instead of this; we include the back reference
|
||||
* here to allow reverting the JS object to a lazy type.
|
||||
*/
|
||||
JSObject *singleton;
|
||||
HeapPtrObject singleton;
|
||||
|
||||
/* Lazily filled array of empty shapes for each size of objects with this type. */
|
||||
js::EmptyShape **emptyShapes;
|
||||
HeapPtr<EmptyShape> *emptyShapes;
|
||||
|
||||
/* Flags for this object. */
|
||||
TypeObjectFlags flags;
|
||||
@ -734,7 +742,7 @@ struct TypeObject : gc::Cell
|
||||
* 'new' on the specified script, which adds some number of properties to
|
||||
* the object in a definite order before the object escapes.
|
||||
*/
|
||||
TypeNewScript *newScript;
|
||||
HeapPtr<TypeNewScript> newScript;
|
||||
|
||||
/*
|
||||
* Estimate of the contribution of this object to the type sets it appears in.
|
||||
@ -783,7 +791,7 @@ struct TypeObject : gc::Cell
|
||||
Property **propertySet;
|
||||
|
||||
/* If this is an interpreted function, the function object. */
|
||||
JSFunction *interpretedFunction;
|
||||
HeapPtrFunction interpretedFunction;
|
||||
|
||||
inline TypeObject(JSObject *proto, bool isFunction, bool unknown);
|
||||
|
||||
@ -868,6 +876,9 @@ struct TypeObject : gc::Cell
|
||||
*/
|
||||
void finalize(JSContext *cx) {}
|
||||
|
||||
static inline void writeBarrierPre(TypeObject *type);
|
||||
static inline void writeBarrierPost(TypeObject *type, void *addr);
|
||||
|
||||
private:
|
||||
inline uint32 basePropertyCount() const;
|
||||
inline void setBasePropertyCount(uint32 count);
|
||||
@ -984,8 +995,8 @@ struct TypeScriptNesting
|
||||
* these fields can be embedded directly in JIT code (though remember to
|
||||
* use 'addDependency == true' when calling resolveNameAccess).
|
||||
*/
|
||||
Value *argArray;
|
||||
Value *varArray;
|
||||
const Value *argArray;
|
||||
const Value *varArray;
|
||||
|
||||
/* Number of frames for this function on the stack. */
|
||||
uint32 activeFrames;
|
||||
@ -1010,7 +1021,7 @@ class TypeScript
|
||||
analyze::ScriptAnalysis *analysis;
|
||||
|
||||
/* Function for the script, if it has one. */
|
||||
JSFunction *function;
|
||||
HeapPtrFunction function;
|
||||
|
||||
/*
|
||||
* Information about the scope in which a script executes. This information
|
||||
@ -1020,7 +1031,7 @@ class TypeScript
|
||||
static const size_t GLOBAL_MISSING_SCOPE = 0x1;
|
||||
|
||||
/* Global object for the script, if compileAndGo. */
|
||||
js::GlobalObject *global;
|
||||
HeapPtr<GlobalObject> global;
|
||||
|
||||
public:
|
||||
|
||||
@ -1030,12 +1041,10 @@ class TypeScript
|
||||
/* Dynamic types generated at points within this script. */
|
||||
TypeResult *dynamicList;
|
||||
|
||||
TypeScript(JSFunction *fun) {
|
||||
this->function = fun;
|
||||
this->global = (js::GlobalObject *) GLOBAL_MISSING_SCOPE;
|
||||
}
|
||||
inline TypeScript(JSFunction *fun);
|
||||
inline ~TypeScript();
|
||||
|
||||
bool hasScope() { return size_t(global) != GLOBAL_MISSING_SCOPE; }
|
||||
bool hasScope() { return size_t(global.get()) != GLOBAL_MISSING_SCOPE; }
|
||||
|
||||
/* Array of type type sets for variables and JOF_TYPESET ops. */
|
||||
TypeSet *typeArray() { return (TypeSet *) (jsuword(this) + sizeof(TypeScript)); }
|
||||
|
@ -71,7 +71,7 @@ Type::ObjectType(JSObject *obj)
|
||||
Type::ObjectType(TypeObject *obj)
|
||||
{
|
||||
if (obj->singleton)
|
||||
return Type((jsuword) obj->singleton | 1);
|
||||
return Type((jsuword) obj->singleton.get() | 1);
|
||||
return Type((jsuword) obj);
|
||||
}
|
||||
|
||||
@ -459,6 +459,18 @@ UseNewTypeAtEntry(JSContext *cx, StackFrame *fp)
|
||||
// Script interface functions
|
||||
/////////////////////////////////////////////////////////////////////
|
||||
|
||||
inline
|
||||
TypeScript::TypeScript(JSFunction *fun)
|
||||
: function(fun),
|
||||
global((js::GlobalObject *) GLOBAL_MISSING_SCOPE)
|
||||
{
|
||||
}
|
||||
|
||||
inline
|
||||
TypeScript::~TypeScript()
|
||||
{
|
||||
}
|
||||
|
||||
/* static */ inline unsigned
|
||||
TypeScript::NumTypeSets(JSScript *script)
|
||||
{
|
||||
@ -689,9 +701,9 @@ void
|
||||
TypeScript::trace(JSTracer *trc)
|
||||
{
|
||||
if (function)
|
||||
gc::MarkObject(trc, *function, "script_fun");
|
||||
gc::MarkObject(trc, function, "script_fun");
|
||||
if (hasScope() && global)
|
||||
gc::MarkObject(trc, *global, "script_global");
|
||||
gc::MarkObject(trc, global, "script_global");
|
||||
|
||||
/* Note: nesting does not keep anything alive. */
|
||||
}
|
||||
@ -1252,6 +1264,56 @@ TypeObject::getGlobal()
|
||||
return NULL;
|
||||
}
|
||||
|
||||
inline void
|
||||
TypeObject::writeBarrierPre(TypeObject *type)
|
||||
{
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
if (!type || type == &js::types::emptyTypeObject)
|
||||
return;
|
||||
|
||||
JSCompartment *comp = type->compartment();
|
||||
if (comp->needsBarrier())
|
||||
MarkTypeObjectUnbarriered(comp->barrierTracer(), type, "write barrier");
|
||||
#endif
|
||||
}
|
||||
|
||||
inline void
|
||||
TypeObject::writeBarrierPost(TypeObject *type, void *addr)
|
||||
{
|
||||
}
|
||||
|
||||
inline void
|
||||
TypeNewScript::writeBarrierPre(TypeNewScript *newScript)
|
||||
{
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
if (!newScript)
|
||||
return;
|
||||
|
||||
JSCompartment *comp = newScript->fun->compartment();
|
||||
if (comp->needsBarrier()) {
|
||||
MarkObjectUnbarriered(comp->barrierTracer(), newScript->fun, "write barrier");
|
||||
MarkShapeUnbarriered(comp->barrierTracer(), newScript->shape, "write barrier");
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
inline void
|
||||
TypeNewScript::writeBarrierPost(TypeNewScript *newScript, void *addr)
|
||||
{
|
||||
}
|
||||
|
||||
inline
|
||||
Property::Property(jsid id)
|
||||
: id(id)
|
||||
{
|
||||
}
|
||||
|
||||
inline
|
||||
Property::Property(const Property &o)
|
||||
: id(o.id.get()), types(o.types)
|
||||
{
|
||||
}
|
||||
|
||||
} } /* namespace js::types */
|
||||
|
||||
inline bool
|
||||
|
@ -452,8 +452,8 @@ const uint32 JSSLOT_SAVED_ID = 1;
|
||||
static void
|
||||
no_such_method_trace(JSTracer *trc, JSObject *obj)
|
||||
{
|
||||
gc::MarkValue(trc, obj->getSlot(JSSLOT_FOUND_FUNCTION), "found function");
|
||||
gc::MarkValue(trc, obj->getSlot(JSSLOT_SAVED_ID), "saved id");
|
||||
gc::MarkValue(trc, obj->getSlotRef(JSSLOT_FOUND_FUNCTION), "found function");
|
||||
gc::MarkValue(trc, obj->getSlotRef(JSSLOT_SAVED_ID), "saved id");
|
||||
}
|
||||
|
||||
Class js_NoSuchMethodClass = {
|
||||
|
@ -124,7 +124,7 @@ NativeIterator::mark(JSTracer *trc)
|
||||
{
|
||||
MarkIdRange(trc, begin(), end(), "props");
|
||||
if (obj)
|
||||
MarkObject(trc, *obj, "obj");
|
||||
MarkObject(trc, obj, "obj");
|
||||
}
|
||||
|
||||
static void
|
||||
@ -134,8 +134,8 @@ iterator_finalize(JSContext *cx, JSObject *obj)
|
||||
|
||||
NativeIterator *ni = obj->getNativeIterator();
|
||||
if (ni) {
|
||||
obj->setPrivate(NULL);
|
||||
cx->free_(ni);
|
||||
obj->setNativeIterator(NULL);
|
||||
}
|
||||
}
|
||||
|
||||
@ -337,7 +337,9 @@ js::VectorToIdArray(JSContext *cx, AutoIdVector &props, JSIdArray **idap)
|
||||
return false;
|
||||
|
||||
ida->length = static_cast<jsint>(len);
|
||||
memcpy(ida->vector, props.begin(), idsz);
|
||||
jsid *v = props.begin();
|
||||
for (jsint i = 0; i < ida->length; i++)
|
||||
ida->vector[i].init(v[i]);
|
||||
*idap = ida;
|
||||
return true;
|
||||
}
|
||||
@ -441,17 +443,19 @@ NativeIterator::allocateIterator(JSContext *cx, uint32 slength, const AutoIdVect
|
||||
cx->malloc_(sizeof(NativeIterator) + plength * sizeof(jsid) + slength * sizeof(uint32));
|
||||
if (!ni)
|
||||
return NULL;
|
||||
ni->props_array = ni->props_cursor = (jsid *) (ni + 1);
|
||||
ni->props_end = (jsid *)ni->props_array + plength;
|
||||
if (plength)
|
||||
memcpy(ni->props_array, props.begin(), plength * sizeof(jsid));
|
||||
ni->props_array = ni->props_cursor = (HeapId *) (ni + 1);
|
||||
ni->props_end = ni->props_array + plength;
|
||||
if (plength) {
|
||||
for (size_t i = 0; i < plength; i++)
|
||||
ni->props_array[i].init(props[i]);
|
||||
}
|
||||
return ni;
|
||||
}
|
||||
|
||||
inline void
|
||||
NativeIterator::init(JSObject *obj, uintN flags, uint32 slength, uint32 key)
|
||||
{
|
||||
this->obj = obj;
|
||||
this->obj.init(obj);
|
||||
this->flags = flags;
|
||||
this->shapes_array = (uint32 *) this->props_end;
|
||||
this->shapes_length = slength;
|
||||
@ -853,9 +857,9 @@ SuppressDeletedPropertyHelper(JSContext *cx, JSObject *obj, IdPredicate predicat
|
||||
/* This only works for identified surpressed keys, not values. */
|
||||
if (ni->isKeyIter() && ni->obj == obj && ni->props_cursor < ni->props_end) {
|
||||
/* Check whether id is still to come. */
|
||||
jsid *props_cursor = ni->current();
|
||||
jsid *props_end = ni->end();
|
||||
for (jsid *idp = props_cursor; idp < props_end; ++idp) {
|
||||
HeapId *props_cursor = ni->current();
|
||||
HeapId *props_end = ni->end();
|
||||
for (HeapId *idp = props_cursor; idp < props_end; ++idp) {
|
||||
if (predicate(*idp)) {
|
||||
/*
|
||||
* Check whether another property along the prototype chain
|
||||
@ -894,7 +898,8 @@ SuppressDeletedPropertyHelper(JSContext *cx, JSObject *obj, IdPredicate predicat
|
||||
if (idp == props_cursor) {
|
||||
ni->incCursor();
|
||||
} else {
|
||||
memmove(idp, idp + 1, (props_end - (idp + 1)) * sizeof(jsid));
|
||||
for (HeapId *p = idp; p + 1 != props_end; p++)
|
||||
*p = *(p + 1);
|
||||
ni->props_end = ni->end() - 1;
|
||||
}
|
||||
|
||||
@ -1109,6 +1114,37 @@ generator_finalize(JSContext *cx, JSObject *obj)
|
||||
cx->free_(gen);
|
||||
}
|
||||
|
||||
static void
|
||||
MarkGenerator(JSTracer *trc, JSGenerator *gen)
|
||||
{
|
||||
StackFrame *fp = gen->floatingFrame();
|
||||
|
||||
/*
|
||||
* MarkGenerator should only be called when regs is based on the floating frame.
|
||||
* See calls to RebaseRegsFromTo.
|
||||
*/
|
||||
JS_ASSERT(size_t(gen->regs.sp - fp->slots()) <= fp->numSlots());
|
||||
|
||||
/*
|
||||
* Currently, generators are not mjitted. Still, (overflow) args can be
|
||||
* pushed by the mjit and need to be conservatively marked. Technically, the
|
||||
* formal args and generator slots are safe for exact marking, but since the
|
||||
* plan is to eventually mjit generators, it makes sense to future-proof
|
||||
* this code and save someone an hour later.
|
||||
*/
|
||||
MarkStackRangeConservatively(trc, gen->floatingStack, fp->formalArgsEnd());
|
||||
js_TraceStackFrame(trc, fp);
|
||||
MarkStackRangeConservatively(trc, fp->slots(), gen->regs.sp);
|
||||
}
|
||||
|
||||
static void
|
||||
GeneratorWriteBarrierPre(JSContext *cx, JSGenerator *gen)
|
||||
{
|
||||
JSCompartment *comp = cx->compartment;
|
||||
if (comp->needsBarrier())
|
||||
MarkGenerator(comp->barrierTracer(), gen);
|
||||
}
|
||||
|
||||
static void
|
||||
generator_trace(JSTracer *trc, JSObject *obj)
|
||||
{
|
||||
@ -1123,19 +1159,8 @@ generator_trace(JSTracer *trc, JSObject *obj)
|
||||
if (gen->state == JSGEN_RUNNING || gen->state == JSGEN_CLOSING)
|
||||
return;
|
||||
|
||||
StackFrame *fp = gen->floatingFrame();
|
||||
JS_ASSERT(gen->liveFrame() == fp);
|
||||
|
||||
/*
|
||||
* Currently, generators are not mjitted. Still, (overflow) args can be
|
||||
* pushed by the mjit and need to be conservatively marked. Technically, the
|
||||
* formal args and generator slots are safe for exact marking, but since the
|
||||
* plan is to eventually mjit generators, it makes sense to future-proof
|
||||
* this code and save someone an hour later.
|
||||
*/
|
||||
MarkStackRangeConservatively(trc, gen->floatingStack, fp->formalArgsEnd());
|
||||
js_TraceStackFrame(trc, fp);
|
||||
MarkStackRangeConservatively(trc, fp->slots(), gen->regs.sp);
|
||||
JS_ASSERT(gen->liveFrame() == gen->floatingFrame());
|
||||
MarkGenerator(trc, gen);
|
||||
}
|
||||
|
||||
Class js::GeneratorClass = {
|
||||
@ -1209,7 +1234,7 @@ js_NewGenerator(JSContext *cx)
|
||||
StackFrame *genfp = reinterpret_cast<StackFrame *>(genvp + vplen);
|
||||
|
||||
/* Initialize JSGenerator. */
|
||||
gen->obj = obj;
|
||||
gen->obj.init(obj);
|
||||
gen->state = JSGEN_NEWBORN;
|
||||
gen->enumerators = NULL;
|
||||
gen->floating = genfp;
|
||||
@ -1258,6 +1283,19 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
|
||||
if (!cx->ensureGeneratorStackSpace())
|
||||
return JS_FALSE;
|
||||
|
||||
/*
|
||||
* Write barrier is needed since the generator stack can be updated,
|
||||
* and it's not barriered in any other way. We need to do it before
|
||||
* gen->state changes, which can cause us to trace the generator
|
||||
* differently.
|
||||
*
|
||||
* We could optimize this by setting a bit on the generator to signify
|
||||
* that it has been marked. If this bit has already been set, there is no
|
||||
* need to mark again. The bit would have to be reset before the next GC,
|
||||
* or else some kind of epoch scheme would have to be used.
|
||||
*/
|
||||
GeneratorWriteBarrierPre(cx, gen);
|
||||
|
||||
JS_ASSERT(gen->state == JSGEN_NEWBORN || gen->state == JSGEN_OPEN);
|
||||
switch (op) {
|
||||
case JSGENOP_NEXT:
|
||||
|
@ -48,6 +48,7 @@
|
||||
#include "jspubtd.h"
|
||||
#include "jsversion.h"
|
||||
|
||||
#include "gc/Barrier.h"
|
||||
#include "vm/Stack.h"
|
||||
|
||||
/*
|
||||
@ -60,10 +61,10 @@
|
||||
namespace js {
|
||||
|
||||
struct NativeIterator {
|
||||
JSObject *obj;
|
||||
jsid *props_array;
|
||||
jsid *props_cursor;
|
||||
jsid *props_end;
|
||||
HeapPtrObject obj;
|
||||
HeapId *props_array;
|
||||
HeapId *props_cursor;
|
||||
HeapId *props_end;
|
||||
uint32 *shapes_array;
|
||||
uint32 shapes_length;
|
||||
uint32 shapes_key;
|
||||
@ -72,11 +73,11 @@ struct NativeIterator {
|
||||
|
||||
bool isKeyIter() const { return (flags & JSITER_FOREACH) == 0; }
|
||||
|
||||
inline jsid *begin() const {
|
||||
inline HeapId *begin() const {
|
||||
return props_array;
|
||||
}
|
||||
|
||||
inline jsid *end() const {
|
||||
inline HeapId *end() const {
|
||||
return props_end;
|
||||
}
|
||||
|
||||
@ -84,7 +85,7 @@ struct NativeIterator {
|
||||
return end() - begin();
|
||||
}
|
||||
|
||||
jsid *current() const {
|
||||
HeapId *current() const {
|
||||
JS_ASSERT(props_cursor < props_end);
|
||||
return props_cursor;
|
||||
}
|
||||
@ -170,7 +171,7 @@ typedef enum JSGeneratorState {
|
||||
} JSGeneratorState;
|
||||
|
||||
struct JSGenerator {
|
||||
JSObject *obj;
|
||||
js::HeapPtrObject obj;
|
||||
JSGeneratorState state;
|
||||
js::FrameRegs regs;
|
||||
JSObject *enumerators;
|
||||
|
@ -61,6 +61,8 @@
|
||||
#include "jsscope.h"
|
||||
#include "jsstr.h"
|
||||
|
||||
#include "jsscopeinlines.h"
|
||||
|
||||
using namespace js;
|
||||
|
||||
#define ReadWord(W) (W)
|
||||
|
@ -463,7 +463,7 @@ js_LeaveSharpObject(JSContext *cx, JSIdArray **idap)
|
||||
static intN
|
||||
gc_sharp_table_entry_marker(JSHashEntry *he, intN i, void *arg)
|
||||
{
|
||||
MarkObject((JSTracer *)arg, *(JSObject *)he->key, "sharp table entry");
|
||||
MarkRoot((JSTracer *)arg, (JSObject *)he->key, "sharp table entry");
|
||||
return JS_DHASH_NEXT;
|
||||
}
|
||||
|
||||
@ -1061,8 +1061,8 @@ EvalCacheLookup(JSContext *cx, JSLinearString *str, StackFrame *caller, uintN st
|
||||
if (i < 0 ||
|
||||
objarray->vector[i]->getParent() == &scopeobj) {
|
||||
JS_ASSERT(staticLevel == script->staticLevel);
|
||||
*scriptp = script->u.evalHashLink;
|
||||
script->u.evalHashLink = NULL;
|
||||
*scriptp = script->evalHashLink();
|
||||
script->evalHashLink() = NULL;
|
||||
return script;
|
||||
}
|
||||
}
|
||||
@ -1071,7 +1071,7 @@ EvalCacheLookup(JSContext *cx, JSLinearString *str, StackFrame *caller, uintN st
|
||||
|
||||
if (++count == EVAL_CACHE_CHAIN_LIMIT)
|
||||
return NULL;
|
||||
scriptp = &script->u.evalHashLink;
|
||||
scriptp = &script->evalHashLink();
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
@ -1085,7 +1085,7 @@ EvalCacheLookup(JSContext *cx, JSLinearString *str, StackFrame *caller, uintN st
|
||||
* a jsdbgapi user's perspective, we want each eval() to create and destroy a
|
||||
* script. This hides implementation details and means we don't have to deal
|
||||
* with calls to JS_GetScriptObject for scripts in the eval cache (currently,
|
||||
* script->u.object aliases script->u.evalHashLink).
|
||||
* script->object aliases script->evalHashLink()).
|
||||
*/
|
||||
class EvalScriptGuard
|
||||
{
|
||||
@ -1107,7 +1107,7 @@ class EvalScriptGuard
|
||||
js_CallDestroyScriptHook(cx_, script_);
|
||||
script_->isActiveEval = false;
|
||||
script_->isCachedEval = true;
|
||||
script_->u.evalHashLink = *bucket_;
|
||||
script_->evalHashLink() = *bucket_;
|
||||
*bucket_ = script_;
|
||||
}
|
||||
}
|
||||
@ -3004,7 +3004,7 @@ CreateThisForFunctionWithType(JSContext *cx, types::TypeObject *type, JSObject *
|
||||
gc::AllocKind kind = type->newScript->allocKind;
|
||||
JSObject *res = NewObjectWithType(cx, type, parent, kind);
|
||||
if (res)
|
||||
res->setMap((Shape *) type->newScript->shape);
|
||||
res->initMap((Shape *) type->newScript->shape.get());
|
||||
return res;
|
||||
}
|
||||
|
||||
@ -3504,7 +3504,7 @@ js_NewWithObject(JSContext *cx, JSObject *proto, JSObject *parent, jsint depth)
|
||||
if (!emptyWithShape)
|
||||
return NULL;
|
||||
|
||||
obj->setMap(emptyWithShape);
|
||||
obj->initMap(emptyWithShape);
|
||||
OBJ_SET_BLOCK_DEPTH(cx, obj, depth);
|
||||
|
||||
AutoObjectRooter tvr(cx, obj);
|
||||
@ -3533,7 +3533,7 @@ js_NewBlockObject(JSContext *cx)
|
||||
if (!emptyBlockShape)
|
||||
return NULL;
|
||||
blockObj->init(cx, &BlockClass, &emptyTypeObject, NULL, NULL, false);
|
||||
blockObj->setMap(emptyBlockShape);
|
||||
blockObj->initMap(emptyBlockShape);
|
||||
|
||||
return blockObj;
|
||||
}
|
||||
@ -3591,7 +3591,7 @@ js_PutBlockObject(JSContext *cx, JSBool normalUnwind)
|
||||
if (normalUnwind) {
|
||||
uintN slot = JSSLOT_BLOCK_FIRST_FREE_SLOT;
|
||||
depth += fp->numFixed();
|
||||
obj->copySlotRange(slot, fp->slots() + depth, count);
|
||||
obj->copySlotRange(slot, fp->slots() + depth, count, true);
|
||||
}
|
||||
|
||||
/* We must clear the private slot even with errors. */
|
||||
@ -3794,8 +3794,8 @@ struct JSObject::TradeGutsReserved {
|
||||
JSContext *cx;
|
||||
Vector<Value> avals;
|
||||
Vector<Value> bvals;
|
||||
Value *newaslots;
|
||||
Value *newbslots;
|
||||
HeapValue *newaslots;
|
||||
HeapValue *newbslots;
|
||||
|
||||
TradeGutsReserved(JSContext *cx)
|
||||
: cx(cx), avals(cx), bvals(cx), newaslots(NULL), newbslots(NULL)
|
||||
@ -3843,12 +3843,12 @@ JSObject::ReserveForTradeGuts(JSContext *cx, JSObject *a, JSObject *b,
|
||||
unsigned bfixed = b->numFixedSlots();
|
||||
|
||||
if (afixed < bcap) {
|
||||
reserved.newaslots = (Value *) cx->malloc_(sizeof(Value) * (bcap - afixed));
|
||||
reserved.newaslots = (HeapValue *) cx->malloc_(sizeof(HeapValue) * (bcap - afixed));
|
||||
if (!reserved.newaslots)
|
||||
return false;
|
||||
}
|
||||
if (bfixed < acap) {
|
||||
reserved.newbslots = (Value *) cx->malloc_(sizeof(Value) * (acap - bfixed));
|
||||
reserved.newbslots = (HeapValue *) cx->malloc_(sizeof(HeapValue) * (acap - bfixed));
|
||||
if (!reserved.newbslots)
|
||||
return false;
|
||||
}
|
||||
@ -3885,6 +3885,19 @@ JSObject::TradeGuts(JSContext *cx, JSObject *a, JSObject *b, TradeGutsReserved &
|
||||
JS_ASSERT(!a->isDenseArray() && !b->isDenseArray());
|
||||
JS_ASSERT(!a->isArrayBuffer() && !b->isArrayBuffer());
|
||||
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
/*
|
||||
* We need a write barrier here. If |a| was marked and |b| was not, then
|
||||
* after the swap, |b|'s guts would never be marked. The write barrier
|
||||
* solves this.
|
||||
*/
|
||||
JSCompartment *comp = a->compartment();
|
||||
if (comp->needsBarrier()) {
|
||||
MarkChildren(comp->barrierTracer(), a);
|
||||
MarkChildren(comp->barrierTracer(), b);
|
||||
}
|
||||
#endif
|
||||
|
||||
/* New types for a JSObject need to be stable when trading guts. */
|
||||
TypeObject *newTypeA = a->newType;
|
||||
TypeObject *newTypeB = b->newType;
|
||||
@ -3938,7 +3951,7 @@ JSObject::TradeGuts(JSContext *cx, JSObject *a, JSObject *b, TradeGutsReserved &
|
||||
unsigned afixed = a->numFixedSlots();
|
||||
unsigned bfixed = b->numFixedSlots();
|
||||
|
||||
JSObject tmp;
|
||||
char tmp[sizeof(JSObject)];
|
||||
memcpy(&tmp, a, sizeof tmp);
|
||||
memcpy(a, b, sizeof tmp);
|
||||
memcpy(b, &tmp, sizeof tmp);
|
||||
@ -3946,13 +3959,13 @@ JSObject::TradeGuts(JSContext *cx, JSObject *a, JSObject *b, TradeGutsReserved &
|
||||
a->updateFixedSlots(afixed);
|
||||
a->slots = reserved.newaslots;
|
||||
a->capacity = Max(afixed, bcap);
|
||||
a->copySlotRange(0, reserved.bvals.begin(), bcap);
|
||||
a->copySlotRange(0, reserved.bvals.begin(), bcap, false);
|
||||
a->clearSlotRange(bcap, a->capacity - bcap);
|
||||
|
||||
b->updateFixedSlots(bfixed);
|
||||
b->slots = reserved.newbslots;
|
||||
b->capacity = Max(bfixed, acap);
|
||||
b->copySlotRange(0, reserved.avals.begin(), acap);
|
||||
b->copySlotRange(0, reserved.avals.begin(), acap, false);
|
||||
b->clearSlotRange(acap, b->capacity - acap);
|
||||
|
||||
/* Make sure the destructor for reserved doesn't free the slots. */
|
||||
@ -4448,27 +4461,31 @@ JSObject::clearSlotRange(size_t start, size_t length)
|
||||
{
|
||||
JS_ASSERT(start + length <= capacity);
|
||||
if (isDenseArray()) {
|
||||
ClearValueRange(slots + start, length, true);
|
||||
ClearValueRange(compartment(), slots + start, length, true);
|
||||
} else {
|
||||
size_t fixed = numFixedSlots();
|
||||
if (start < fixed) {
|
||||
if (start + length < fixed) {
|
||||
ClearValueRange(fixedSlots() + start, length, false);
|
||||
ClearValueRange(compartment(), fixedSlots() + start, length, false);
|
||||
} else {
|
||||
size_t localClear = fixed - start;
|
||||
ClearValueRange(fixedSlots() + start, localClear, false);
|
||||
ClearValueRange(slots, length - localClear, false);
|
||||
ClearValueRange(compartment(), fixedSlots() + start, localClear, false);
|
||||
ClearValueRange(compartment(), slots, length - localClear, false);
|
||||
}
|
||||
} else {
|
||||
ClearValueRange(slots + start - fixed, length, false);
|
||||
ClearValueRange(compartment(), slots + start - fixed, length, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
JSObject::copySlotRange(size_t start, const Value *vector, size_t length)
|
||||
JSObject::copySlotRange(size_t start, const Value *vector, size_t length, bool valid)
|
||||
{
|
||||
JS_ASSERT(start + length <= capacity);
|
||||
|
||||
if (valid)
|
||||
prepareSlotRangeForOverwrite(start, start + length);
|
||||
|
||||
if (isDenseArray()) {
|
||||
memcpy(slots + start, vector, length * sizeof(Value));
|
||||
} else {
|
||||
@ -4522,7 +4539,7 @@ JSObject::allocSlots(JSContext *cx, size_t newcap)
|
||||
|
||||
uint32 allocCount = numDynamicSlots(newcap);
|
||||
|
||||
Value *tmpslots = (Value*) cx->malloc_(allocCount * sizeof(Value));
|
||||
HeapValue *tmpslots = (HeapValue*) cx->malloc_(allocCount * sizeof(HeapValue));
|
||||
if (!tmpslots)
|
||||
return false; /* Leave slots at inline buffer. */
|
||||
slots = tmpslots;
|
||||
@ -4530,12 +4547,12 @@ JSObject::allocSlots(JSContext *cx, size_t newcap)
|
||||
|
||||
if (isDenseArray()) {
|
||||
/* Copy over anything from the inline buffer. */
|
||||
memcpy(slots, fixedSlots(), getDenseArrayInitializedLength() * sizeof(Value));
|
||||
memcpy(slots, fixedSlots(), getDenseArrayInitializedLength() * sizeof(HeapValue));
|
||||
if (!cx->typeInferenceEnabled())
|
||||
backfillDenseArrayHoles(cx);
|
||||
} else {
|
||||
/* Clear out the new slots without copying. */
|
||||
ClearValueRange(slots, allocCount, false);
|
||||
InitValueRange(slots, allocCount, false);
|
||||
}
|
||||
|
||||
Probes::resizeObject(cx, this, oldSize, slotsAndStructSize());
|
||||
@ -4592,8 +4609,8 @@ JSObject::growSlots(JSContext *cx, size_t newcap)
|
||||
uint32 oldAllocCount = numDynamicSlots(oldcap);
|
||||
uint32 allocCount = numDynamicSlots(actualCapacity);
|
||||
|
||||
Value *tmpslots = (Value*) cx->realloc_(slots, oldAllocCount * sizeof(Value),
|
||||
allocCount * sizeof(Value));
|
||||
HeapValue *tmpslots = (HeapValue*) cx->realloc_(slots, oldAllocCount * sizeof(HeapValue),
|
||||
allocCount * sizeof(HeapValue));
|
||||
if (!tmpslots)
|
||||
return false; /* Leave dslots as its old size. */
|
||||
|
||||
@ -4606,7 +4623,7 @@ JSObject::growSlots(JSContext *cx, size_t newcap)
|
||||
backfillDenseArrayHoles(cx);
|
||||
} else {
|
||||
/* Clear the new slots we added. */
|
||||
ClearValueRange(slots + oldAllocCount, allocCount - oldAllocCount, false);
|
||||
InitValueRange(slots + oldAllocCount, allocCount - oldAllocCount, false);
|
||||
}
|
||||
|
||||
if (changed && isGlobal())
|
||||
@ -4629,6 +4646,8 @@ JSObject::shrinkSlots(JSContext *cx, size_t newcap)
|
||||
if (isCall())
|
||||
return;
|
||||
|
||||
JS_ASSERT_IF(isDenseArray(), initializedLength() <= newcap);
|
||||
|
||||
uint32 oldcap = numSlots();
|
||||
JS_ASSERT(newcap <= oldcap);
|
||||
JS_ASSERT(newcap >= slotSpan());
|
||||
@ -4650,7 +4669,7 @@ JSObject::shrinkSlots(JSContext *cx, size_t newcap)
|
||||
newcap = Max(newcap, size_t(SLOT_CAPACITY_MIN));
|
||||
newcap = Max(newcap, numFixedSlots());
|
||||
|
||||
Value *tmpslots = (Value*) cx->realloc_(slots, newcap * sizeof(Value));
|
||||
HeapValue *tmpslots = (HeapValue*) cx->realloc_(slots, newcap * sizeof(HeapValue));
|
||||
if (!tmpslots)
|
||||
return; /* Leave slots at its old size. */
|
||||
|
||||
|
128
js/src/jsobj.h
128
js/src/jsobj.h
@ -59,6 +59,7 @@
|
||||
#include "jslock.h"
|
||||
#include "jscell.h"
|
||||
|
||||
#include "gc/Barrier.h"
|
||||
#include "vm/String.h"
|
||||
|
||||
namespace nanojit { class ValidateWriter; }
|
||||
@ -427,7 +428,7 @@ struct JSObject : js::gc::Cell {
|
||||
* Private pointer to the last added property and methods to manipulate the
|
||||
* list it links among properties in this scope.
|
||||
*/
|
||||
js::Shape *lastProp;
|
||||
js::HeapPtrShape lastProp;
|
||||
|
||||
private:
|
||||
js::Class *clasp;
|
||||
@ -488,25 +489,25 @@ struct JSObject : js::gc::Cell {
|
||||
NSLOTS_LIMIT = JS_BIT(NSLOTS_BITS)
|
||||
};
|
||||
|
||||
uint32 flags; /* flags */
|
||||
uint32 objShape; /* copy of lastProp->shape, or override if different */
|
||||
uint32 flags; /* flags */
|
||||
uint32 objShape; /* copy of lastProp->shape, or override if different */
|
||||
|
||||
union {
|
||||
/* If prototype, type of values using this as their prototype. */
|
||||
js::types::TypeObject *newType;
|
||||
/*
|
||||
* If prototype, type of values using this as their prototype. If a dense
|
||||
* array, this holds the initialized length (see jsarray.cpp).
|
||||
*/
|
||||
js::HeapPtr<js::types::TypeObject, jsuword> newType;
|
||||
|
||||
/* If dense array, the initialized length (see jsarray.cpp). */
|
||||
jsuword initializedLength;
|
||||
};
|
||||
jsuword &initializedLength() { return *newType.unsafeGetUnioned(); }
|
||||
|
||||
JS_FRIEND_API(size_t) sizeOfSlotsArray(JSUsableSizeFun usf);
|
||||
|
||||
JSObject *parent; /* object's parent */
|
||||
void *privateData; /* private data */
|
||||
jsuword capacity; /* total number of available slots */
|
||||
js::HeapPtrObject parent; /* object's parent */
|
||||
void *privateData; /* private data */
|
||||
jsuword capacity; /* total number of available slots */
|
||||
|
||||
private:
|
||||
js::Value *slots; /* dynamically allocated slots,
|
||||
js::HeapValue *slots; /* dynamically allocated slots,
|
||||
or pointer to fixedSlots() for
|
||||
dense arrays. */
|
||||
|
||||
@ -515,13 +516,12 @@ struct JSObject : js::gc::Cell {
|
||||
* set, this is the prototype's default 'new' type and can only be used
|
||||
* to get that prototype.
|
||||
*/
|
||||
js::types::TypeObject *type_;
|
||||
js::HeapPtr<js::types::TypeObject> type_;
|
||||
|
||||
/* Make the type object to use for LAZY_TYPE objects. */
|
||||
void makeLazyType(JSContext *cx);
|
||||
|
||||
public:
|
||||
|
||||
inline bool isNative() const;
|
||||
inline bool isNewborn() const;
|
||||
|
||||
@ -537,7 +537,6 @@ struct JSObject : js::gc::Cell {
|
||||
return &getClass()->ops;
|
||||
}
|
||||
|
||||
inline void trace(JSTracer *trc);
|
||||
inline void scanSlots(js::GCMarker *gcmarker);
|
||||
|
||||
uint32 shape() const {
|
||||
@ -617,6 +616,7 @@ struct JSObject : js::gc::Cell {
|
||||
|
||||
bool hasOwnShape() const { return !!(flags & OWN_SHAPE); }
|
||||
|
||||
inline void initMap(js::Shape *amap);
|
||||
inline void setMap(js::Shape *amap);
|
||||
|
||||
inline void setSharedNonNativeMap();
|
||||
@ -730,13 +730,14 @@ struct JSObject : js::gc::Cell {
|
||||
static inline size_t getFixedSlotOffset(size_t slot);
|
||||
static inline size_t offsetOfCapacity() { return offsetof(JSObject, capacity); }
|
||||
static inline size_t offsetOfSlots() { return offsetof(JSObject, slots); }
|
||||
static inline size_t offsetOfInitializedLength() { return offsetof(JSObject, newType); }
|
||||
|
||||
/*
|
||||
* Get a raw pointer to the object's slots, or a slot of the object given
|
||||
* a previous value for its since-reallocated dynamic slots.
|
||||
*/
|
||||
inline const js::Value *getRawSlots();
|
||||
inline const js::Value *getRawSlot(size_t slot, const js::Value *slots);
|
||||
inline const js::HeapValue *getRawSlots();
|
||||
inline const js::HeapValue *getRawSlot(size_t slot, const js::HeapValue *slots);
|
||||
|
||||
/* Whether a slot is at a fixed offset from this object. */
|
||||
inline bool isFixedSlot(size_t slot);
|
||||
@ -753,7 +754,7 @@ struct JSObject : js::gc::Cell {
|
||||
inline size_t numDynamicSlots(size_t capacity) const;
|
||||
|
||||
private:
|
||||
inline js::Value* fixedSlots() const;
|
||||
inline js::HeapValue *fixedSlots() const;
|
||||
|
||||
protected:
|
||||
inline bool hasContiguousSlots(size_t start, size_t count) const;
|
||||
@ -772,6 +773,12 @@ struct JSObject : js::gc::Cell {
|
||||
return true;
|
||||
}
|
||||
|
||||
/*
|
||||
* Trigger the write barrier on a range of slots that will no longer be
|
||||
* reachable.
|
||||
*/
|
||||
inline void prepareSlotRangeForOverwrite(size_t start, size_t end);
|
||||
|
||||
/*
|
||||
* Fill a range of slots with holes or undefined, depending on whether this
|
||||
* is a dense array.
|
||||
@ -780,9 +787,11 @@ struct JSObject : js::gc::Cell {
|
||||
|
||||
/*
|
||||
* Copy a flat array of slots to this object at a start slot. Caller must
|
||||
* ensure there are enough slots in this object.
|
||||
* ensure there are enough slots in this object. If |valid|, then the slots
|
||||
* being overwritten hold valid data and must be invalidated for the write
|
||||
* barrier.
|
||||
*/
|
||||
void copySlotRange(size_t start, const js::Value *vector, size_t length);
|
||||
void copySlotRange(size_t start, const js::Value *vector, size_t length, bool valid);
|
||||
|
||||
/*
|
||||
* Ensure that the object has at least JSCLASS_RESERVED_SLOTS(clasp) +
|
||||
@ -817,12 +826,13 @@ struct JSObject : js::gc::Cell {
|
||||
|
||||
void rollbackProperties(JSContext *cx, uint32 slotSpan);
|
||||
|
||||
js::Value *getSlotAddress(uintN slot) {
|
||||
js::HeapValue *getSlotAddress(uintN slot) {
|
||||
/*
|
||||
* This can be used to get the address of the end of the slots for the
|
||||
* object, which may be necessary when fetching zero-length arrays of
|
||||
* slots (e.g. for callObjVarArray).
|
||||
*/
|
||||
JS_ASSERT(!isDenseArray());
|
||||
JS_ASSERT(slot <= capacity);
|
||||
size_t fixed = numFixedSlots();
|
||||
if (slot < fixed)
|
||||
@ -830,12 +840,12 @@ struct JSObject : js::gc::Cell {
|
||||
return slots + (slot - fixed);
|
||||
}
|
||||
|
||||
js::Value &getSlotRef(uintN slot) {
|
||||
js::HeapValue &getSlotRef(uintN slot) {
|
||||
JS_ASSERT(slot < capacity);
|
||||
return *getSlotAddress(slot);
|
||||
}
|
||||
|
||||
inline js::Value &nativeGetSlotRef(uintN slot);
|
||||
inline js::HeapValue &nativeGetSlotRef(uintN slot);
|
||||
|
||||
const js::Value &getSlot(uintN slot) const {
|
||||
JS_ASSERT(slot < capacity);
|
||||
@ -847,22 +857,22 @@ struct JSObject : js::gc::Cell {
|
||||
|
||||
inline const js::Value &nativeGetSlot(uintN slot) const;
|
||||
|
||||
void setSlot(uintN slot, const js::Value &value) {
|
||||
JS_ASSERT(slot < capacity);
|
||||
getSlotRef(slot) = value;
|
||||
}
|
||||
inline void setSlot(uintN slot, const js::Value &value);
|
||||
inline void initSlot(uintN slot, const js::Value &value);
|
||||
inline void initSlotUnchecked(uintN slot, const js::Value &value);
|
||||
|
||||
inline void nativeSetSlot(uintN slot, const js::Value &value);
|
||||
inline void nativeSetSlotWithType(JSContext *cx, const js::Shape *shape, const js::Value &value);
|
||||
|
||||
inline js::Value getReservedSlot(uintN index) const;
|
||||
inline js::HeapValue &getReservedSlotRef(uintN index);
|
||||
|
||||
/* Call this only after the appropriate ensure{Class,Instance}ReservedSlots call. */
|
||||
inline void setReservedSlot(uintN index, const js::Value &v);
|
||||
|
||||
/* For slots which are known to always be fixed, due to the way they are allocated. */
|
||||
|
||||
js::Value &getFixedSlotRef(uintN slot) {
|
||||
js::HeapValue &getFixedSlotRef(uintN slot) {
|
||||
JS_ASSERT(slot < numFixedSlots());
|
||||
return fixedSlots()[slot];
|
||||
}
|
||||
@ -872,10 +882,8 @@ struct JSObject : js::gc::Cell {
|
||||
return fixedSlots()[slot];
|
||||
}
|
||||
|
||||
void setFixedSlot(uintN slot, const js::Value &value) {
|
||||
JS_ASSERT(slot < numFixedSlots());
|
||||
fixedSlots()[slot] = value;
|
||||
}
|
||||
inline void setFixedSlot(uintN slot, const js::Value &value);
|
||||
inline void initFixedSlot(uintN slot, const js::Value &value);
|
||||
|
||||
/* Defined in jsscopeinlines.h to avoid including implementation dependencies here. */
|
||||
inline void updateShape(JSContext *cx);
|
||||
@ -912,7 +920,7 @@ struct JSObject : js::gc::Cell {
|
||||
return type_;
|
||||
}
|
||||
|
||||
js::types::TypeObject *typeFromGC() const {
|
||||
const js::HeapPtr<js::types::TypeObject> &typeFromGC() const {
|
||||
/* Direct field access for use by GC. */
|
||||
return type_;
|
||||
}
|
||||
@ -921,13 +929,14 @@ struct JSObject : js::gc::Cell {
|
||||
|
||||
inline void clearType();
|
||||
inline void setType(js::types::TypeObject *newType);
|
||||
inline void initType(js::types::TypeObject *newType);
|
||||
|
||||
inline js::types::TypeObject *getNewType(JSContext *cx, JSFunction *fun = NULL,
|
||||
bool markUnknown = false);
|
||||
private:
|
||||
void makeNewType(JSContext *cx, JSFunction *fun, bool markUnknown);
|
||||
public:
|
||||
|
||||
public:
|
||||
/* Set a new prototype for an object with a singleton type. */
|
||||
bool splicePrototype(JSContext *cx, JSObject *proto);
|
||||
|
||||
@ -945,18 +954,9 @@ struct JSObject : js::gc::Cell {
|
||||
return parent;
|
||||
}
|
||||
|
||||
void clearParent() {
|
||||
parent = NULL;
|
||||
}
|
||||
|
||||
void setParent(JSObject *newParent) {
|
||||
#ifdef DEBUG
|
||||
for (JSObject *obj = newParent; obj; obj = obj->getParent())
|
||||
JS_ASSERT(obj != this);
|
||||
#endif
|
||||
setDelegateNullSafe(newParent);
|
||||
parent = newParent;
|
||||
}
|
||||
inline void clearParent();
|
||||
inline void setParent(JSObject *newParent);
|
||||
inline void initParent(JSObject *newParent);
|
||||
|
||||
JS_FRIEND_API(js::GlobalObject *) getGlobal() const;
|
||||
|
||||
@ -971,10 +971,8 @@ struct JSObject : js::gc::Cell {
|
||||
return privateData;
|
||||
}
|
||||
|
||||
void setPrivate(void *data) {
|
||||
JS_ASSERT(getClass()->flags & JSCLASS_HAS_PRIVATE);
|
||||
privateData = data;
|
||||
}
|
||||
inline void initPrivate(void *data);
|
||||
inline void setPrivate(void *data);
|
||||
|
||||
/* N.B. Infallible: NULL means 'no principal', not an error. */
|
||||
inline JSPrincipals *principals(JSContext *cx);
|
||||
@ -1043,11 +1041,14 @@ struct JSObject : js::gc::Cell {
|
||||
inline void setDenseArrayInitializedLength(uint32 length);
|
||||
inline void ensureDenseArrayInitializedLength(JSContext *cx, uintN index, uintN extra);
|
||||
inline void backfillDenseArrayHoles(JSContext *cx);
|
||||
inline const js::Value* getDenseArrayElements();
|
||||
inline js::HeapValueArray getDenseArrayElements();
|
||||
inline const js::Value &getDenseArrayElement(uintN idx);
|
||||
inline void setDenseArrayElement(uintN idx, const js::Value &val);
|
||||
inline void initDenseArrayElement(uintN idx, const js::Value &val);
|
||||
inline void setDenseArrayElementWithType(JSContext *cx, uintN idx, const js::Value &val);
|
||||
inline void initDenseArrayElementWithType(JSContext *cx, uintN idx, const js::Value &val);
|
||||
inline void copyDenseArrayElements(uintN dstStart, const js::Value *src, uintN count);
|
||||
inline void initDenseArrayElements(uintN dstStart, const js::Value *src, uintN count);
|
||||
inline void moveDenseArrayElements(uintN dstStart, uintN srcStart, uintN count);
|
||||
inline void shrinkDenseArrayElements(JSContext *cx, uintN cap);
|
||||
inline bool denseArrayHasInlineSlots() const;
|
||||
@ -1156,11 +1157,11 @@ struct JSObject : js::gc::Cell {
|
||||
|
||||
inline JSFunction *getFunctionPrivate() const;
|
||||
|
||||
inline js::Value *getFlatClosureUpvars() const;
|
||||
inline js::FlatClosureData *getFlatClosureData() const;
|
||||
inline js::Value getFlatClosureUpvar(uint32 i) const;
|
||||
inline const js::Value &getFlatClosureUpvar(uint32 i);
|
||||
inline void setFlatClosureUpvar(uint32 i, const js::Value &v);
|
||||
inline void setFlatClosureUpvars(js::Value *upvars);
|
||||
inline void setFlatClosureData(js::FlatClosureData *data);
|
||||
|
||||
/* See comments in fun_finalize. */
|
||||
inline void finalizeUpvarsIfFlatClosure();
|
||||
@ -1240,12 +1241,7 @@ struct JSObject : js::gc::Cell {
|
||||
inline bool isCallable();
|
||||
|
||||
/* Do initialization required immediately after allocation. */
|
||||
void earlyInit(jsuword capacity) {
|
||||
this->capacity = capacity;
|
||||
|
||||
/* Stops obj from being scanned until initializated. */
|
||||
lastProp = NULL;
|
||||
}
|
||||
inline void earlyInit(jsuword capacity);
|
||||
|
||||
/* The map field is not initialized here and should be set separately. */
|
||||
void init(JSContext *cx, js::Class *aclasp, js::types::TypeObject *type,
|
||||
@ -1503,6 +1499,11 @@ struct JSObject : js::gc::Cell {
|
||||
/*** For jit compiler: ***/
|
||||
|
||||
static size_t offsetOfClassPointer() { return offsetof(JSObject, clasp); }
|
||||
|
||||
static inline void writeBarrierPre(JSObject *obj);
|
||||
static inline void writeBarrierPost(JSObject *obj, void *addr);
|
||||
inline void privateWriteBarrierPre(void **oldval);
|
||||
inline void privateWriteBarrierPost(void **oldval);
|
||||
};
|
||||
|
||||
/*
|
||||
@ -1522,9 +1523,10 @@ operator!=(const JSObject &lhs, const JSObject &rhs)
|
||||
return &lhs != &rhs;
|
||||
}
|
||||
|
||||
inline js::Value*
|
||||
JSObject::fixedSlots() const {
|
||||
return (js::Value*) (jsuword(this) + sizeof(JSObject));
|
||||
inline js::HeapValue*
|
||||
JSObject::fixedSlots() const
|
||||
{
|
||||
return (js::HeapValue *) (jsuword(this) + sizeof(JSObject));
|
||||
}
|
||||
|
||||
inline size_t
|
||||
|
@ -1,4 +1,4 @@
|
||||
/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
|
||||
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
|
||||
* vim: set ts=8 sw=4 et tw=99:
|
||||
*
|
||||
* ***** BEGIN LICENSE BLOCK *****
|
||||
@ -42,9 +42,11 @@
|
||||
#define jsobjinlines_h___
|
||||
|
||||
#include <new>
|
||||
|
||||
#include "jsarray.h"
|
||||
#include "jsdate.h"
|
||||
#include "jsfun.h"
|
||||
#include "jsgcmark.h"
|
||||
#include "jsiter.h"
|
||||
#include "jslock.h"
|
||||
#include "jsobj.h"
|
||||
@ -65,6 +67,8 @@
|
||||
#include "jsscriptinlines.h"
|
||||
#include "jsstr.h"
|
||||
|
||||
#include "gc/Barrier.h"
|
||||
#include "js/TemplateLib.h"
|
||||
#include "vm/GlobalObject.h"
|
||||
|
||||
#include "jsatominlines.h"
|
||||
@ -72,6 +76,9 @@
|
||||
#include "jsgcinlines.h"
|
||||
#include "jsscopeinlines.h"
|
||||
|
||||
#include "gc/Barrier-inl.h"
|
||||
#include "vm/String-inl.h"
|
||||
|
||||
inline bool
|
||||
JSObject::preventExtensions(JSContext *cx, js::AutoIdVector *props)
|
||||
{
|
||||
@ -277,7 +284,7 @@ inline void
|
||||
JSObject::initCall(JSContext *cx, const js::Bindings &bindings, JSObject *parent)
|
||||
{
|
||||
init(cx, &js::CallClass, &js::types::emptyTypeObject, parent, NULL, false);
|
||||
lastProp = bindings.lastShape();
|
||||
lastProp.init(bindings.lastShape());
|
||||
|
||||
/*
|
||||
* If |bindings| is for a function that has extensible parents, that means
|
||||
@ -317,7 +324,8 @@ JSObject::initClonedBlock(JSContext *cx, js::types::TypeObject *type, js::StackF
|
||||
* also need unique shapes. See js::Bindings::extensibleParents.
|
||||
*/
|
||||
inline void
|
||||
JSObject::setBlockOwnShape(JSContext *cx) {
|
||||
JSObject::setBlockOwnShape(JSContext *cx)
|
||||
{
|
||||
JS_ASSERT(isStaticBlock());
|
||||
setOwnShape(js_GenerateShape(cx));
|
||||
}
|
||||
@ -397,15 +405,15 @@ JSObject::methodWriteBarrier(JSContext *cx, uint32 slot, const js::Value &v)
|
||||
return true;
|
||||
}
|
||||
|
||||
inline const js::Value *
|
||||
inline const js::HeapValue *
|
||||
JSObject::getRawSlots()
|
||||
{
|
||||
JS_ASSERT(isGlobal());
|
||||
return slots;
|
||||
}
|
||||
|
||||
inline const js::Value *
|
||||
JSObject::getRawSlot(size_t slot, const js::Value *slots)
|
||||
inline const js::HeapValue *
|
||||
JSObject::getRawSlot(size_t slot, const js::HeapValue *slots)
|
||||
{
|
||||
JS_ASSERT(isGlobal());
|
||||
size_t fixed = numFixedSlots();
|
||||
@ -447,6 +455,13 @@ JSObject::getReservedSlot(uintN index) const
|
||||
return (index < numSlots()) ? getSlot(index) : js::UndefinedValue();
|
||||
}
|
||||
|
||||
inline js::HeapValue &
|
||||
JSObject::getReservedSlotRef(uintN index)
|
||||
{
|
||||
JS_ASSERT(index < numSlots());
|
||||
return getSlotRef(index);
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::setReservedSlot(uintN index, const js::Value &v)
|
||||
{
|
||||
@ -493,6 +508,19 @@ JSObject::hasContiguousSlots(size_t start, size_t count) const
|
||||
return (start + count <= numFixedSlots()) || (start >= numFixedSlots());
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::prepareSlotRangeForOverwrite(size_t start, size_t end)
|
||||
{
|
||||
if (isDenseArray()) {
|
||||
JS_ASSERT(end <= initializedLength());
|
||||
for (size_t i = start; i < end; i++)
|
||||
slots[i].js::HeapValue::~HeapValue();
|
||||
} else {
|
||||
for (size_t i = start; i < end; i++)
|
||||
getSlotRef(i).js::HeapValue::~HeapValue();
|
||||
}
|
||||
}
|
||||
|
||||
inline size_t
|
||||
JSObject::structSize() const
|
||||
{
|
||||
@ -541,7 +569,7 @@ JSObject::setArrayLength(JSContext *cx, uint32 length)
|
||||
js::types::Type::DoubleType());
|
||||
}
|
||||
|
||||
setPrivate((void*)(uintptr_t) length);
|
||||
privateData = (void*)(uintptr_t) length;
|
||||
}
|
||||
|
||||
inline void
|
||||
@ -550,7 +578,7 @@ JSObject::setDenseArrayLength(uint32 length)
|
||||
/* Variant of setArrayLength for use on dense arrays where the length cannot overflow int32. */
|
||||
JS_ASSERT(isDenseArray());
|
||||
JS_ASSERT(length <= INT32_MAX);
|
||||
setPrivate((void*)(uintptr_t) length);
|
||||
privateData = (void*)(uintptr_t) length;
|
||||
}
|
||||
|
||||
inline uint32
|
||||
@ -560,11 +588,11 @@ JSObject::getDenseArrayCapacity()
|
||||
return numSlots();
|
||||
}
|
||||
|
||||
inline const js::Value *
|
||||
inline js::HeapValueArray
|
||||
JSObject::getDenseArrayElements()
|
||||
{
|
||||
JS_ASSERT(isDenseArray());
|
||||
return slots;
|
||||
return js::HeapValueArray(slots);
|
||||
}
|
||||
|
||||
inline const js::Value &
|
||||
@ -581,6 +609,13 @@ JSObject::setDenseArrayElement(uintN idx, const js::Value &val)
|
||||
slots[idx] = val;
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::initDenseArrayElement(uintN idx, const js::Value &val)
|
||||
{
|
||||
JS_ASSERT(isDenseArray() && idx < getDenseArrayInitializedLength());
|
||||
slots[idx].init(val);
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::setDenseArrayElementWithType(JSContext *cx, uintN idx, const js::Value &val)
|
||||
{
|
||||
@ -588,8 +623,24 @@ JSObject::setDenseArrayElementWithType(JSContext *cx, uintN idx, const js::Value
|
||||
setDenseArrayElement(idx, val);
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::initDenseArrayElementWithType(JSContext *cx, uintN idx, const js::Value &val)
|
||||
{
|
||||
js::types::AddTypePropertyId(cx, this, JSID_VOID, val);
|
||||
initDenseArrayElement(idx, val);
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::copyDenseArrayElements(uintN dstStart, const js::Value *src, uintN count)
|
||||
{
|
||||
JS_ASSERT(isDenseArray());
|
||||
JS_ASSERT(dstStart + count <= capacity);
|
||||
prepareSlotRangeForOverwrite(dstStart, dstStart + count);
|
||||
memcpy(slots + dstStart, src, count * sizeof(js::Value));
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::initDenseArrayElements(uintN dstStart, const js::Value *src, uintN count)
|
||||
{
|
||||
JS_ASSERT(isDenseArray());
|
||||
JS_ASSERT(dstStart + count <= capacity);
|
||||
@ -602,6 +653,21 @@ JSObject::moveDenseArrayElements(uintN dstStart, uintN srcStart, uintN count)
|
||||
JS_ASSERT(isDenseArray());
|
||||
JS_ASSERT(dstStart + count <= capacity);
|
||||
JS_ASSERT(srcStart + count <= capacity);
|
||||
|
||||
/*
|
||||
* Use a custom write barrier here since it's performance sensitive. We
|
||||
* only want to barrier the slots that are being overwritten.
|
||||
*/
|
||||
uintN markStart, markEnd;
|
||||
if (dstStart > srcStart) {
|
||||
markStart = js::Max(srcStart + count, dstStart);
|
||||
markEnd = dstStart + count;
|
||||
} else {
|
||||
markStart = dstStart;
|
||||
markEnd = js::Min(dstStart + count, srcStart);
|
||||
}
|
||||
prepareSlotRangeForOverwrite(markStart, markEnd);
|
||||
|
||||
memmove(slots + dstStart, slots + srcStart, count * sizeof(js::Value));
|
||||
}
|
||||
|
||||
@ -647,15 +713,15 @@ JSObject::setDateUTCTime(const js::Value &time)
|
||||
setFixedSlot(JSSLOT_DATE_UTC_TIME, time);
|
||||
}
|
||||
|
||||
inline js::Value *
|
||||
JSObject::getFlatClosureUpvars() const
|
||||
inline js::FlatClosureData *
|
||||
JSObject::getFlatClosureData() const
|
||||
{
|
||||
#ifdef DEBUG
|
||||
JSFunction *fun = getFunctionPrivate();
|
||||
JS_ASSERT(fun->isFlatClosure());
|
||||
JS_ASSERT(fun->script()->bindings.countUpvars() == fun->script()->upvars()->length);
|
||||
#endif
|
||||
return (js::Value *) getFixedSlot(JSSLOT_FLAT_CLOSURE_UPVARS).toPrivate();
|
||||
return (js::FlatClosureData *) getFixedSlot(JSSLOT_FLAT_CLOSURE_UPVARS).toPrivate();
|
||||
}
|
||||
|
||||
inline void
|
||||
@ -693,29 +759,29 @@ inline js::Value
|
||||
JSObject::getFlatClosureUpvar(uint32 i) const
|
||||
{
|
||||
JS_ASSERT(i < getFunctionPrivate()->script()->bindings.countUpvars());
|
||||
return getFlatClosureUpvars()[i];
|
||||
return getFlatClosureData()->upvars[i];
|
||||
}
|
||||
|
||||
inline const js::Value &
|
||||
JSObject::getFlatClosureUpvar(uint32 i)
|
||||
{
|
||||
JS_ASSERT(i < getFunctionPrivate()->script()->bindings.countUpvars());
|
||||
return getFlatClosureUpvars()[i];
|
||||
return getFlatClosureData()->upvars[i];
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::setFlatClosureUpvar(uint32 i, const js::Value &v)
|
||||
{
|
||||
JS_ASSERT(i < getFunctionPrivate()->script()->bindings.countUpvars());
|
||||
getFlatClosureUpvars()[i] = v;
|
||||
getFlatClosureData()->upvars[i] = v;
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::setFlatClosureUpvars(js::Value *upvars)
|
||||
JSObject::setFlatClosureData(js::FlatClosureData *data)
|
||||
{
|
||||
JS_ASSERT(isFunction());
|
||||
JS_ASSERT(getFunctionPrivate()->isFlatClosure());
|
||||
setFixedSlot(JSSLOT_FLAT_CLOSURE_UPVARS, js::PrivateValue(upvars));
|
||||
setFixedSlot(JSSLOT_FLAT_CLOSURE_UPVARS, js::PrivateValue(data));
|
||||
}
|
||||
|
||||
inline bool
|
||||
@ -840,7 +906,7 @@ JSObject::getWithThis() const
|
||||
inline void
|
||||
JSObject::setWithThis(JSObject *thisp)
|
||||
{
|
||||
getFixedSlotRef(JSSLOT_WITH_THIS).setObject(*thisp);
|
||||
setFixedSlot(JSSLOT_WITH_THIS, js::ObjectValue(*thisp));
|
||||
}
|
||||
|
||||
inline bool
|
||||
@ -913,6 +979,28 @@ JSObject::setType(js::types::TypeObject *newType)
|
||||
type_ = newType;
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::earlyInit(jsuword capacity)
|
||||
{
|
||||
this->capacity = capacity;
|
||||
|
||||
/* Stops obj from being scanned until initializated. */
|
||||
lastProp.init(NULL);
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::initType(js::types::TypeObject *newType)
|
||||
{
|
||||
#ifdef DEBUG
|
||||
JS_ASSERT(newType);
|
||||
for (JSObject *obj = newType->proto; obj; obj = obj->getProto())
|
||||
JS_ASSERT(obj != this);
|
||||
#endif
|
||||
JS_ASSERT_IF(hasSpecialEquality(), newType->hasAnyFlags(js::types::OBJECT_FLAG_SPECIAL_EQUALITY));
|
||||
JS_ASSERT(!hasSingletonType());
|
||||
type_.init(newType);
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::init(JSContext *cx, js::Class *aclasp, js::types::TypeObject *type,
|
||||
JSObject *parent, void *priv, bool denseArray)
|
||||
@ -944,14 +1032,12 @@ JSObject::init(JSContext *cx, js::Class *aclasp, js::types::TypeObject *type,
|
||||
slots = fixedSlots();
|
||||
flags |= PACKED_ARRAY;
|
||||
} else {
|
||||
js::ClearValueRange(fixedSlots(), capacity, denseArray);
|
||||
js::InitValueRange(fixedSlots(), capacity, denseArray);
|
||||
}
|
||||
|
||||
newType = NULL;
|
||||
JS_ASSERT(initializedLength == 0);
|
||||
|
||||
setType(type);
|
||||
setParent(parent);
|
||||
newType.init(NULL);
|
||||
initType(type);
|
||||
initParent(parent);
|
||||
}
|
||||
|
||||
inline void
|
||||
@ -977,7 +1063,7 @@ JSObject::initSharingEmptyShape(JSContext *cx,
|
||||
if (!empty)
|
||||
return false;
|
||||
|
||||
setMap(empty);
|
||||
initMap(empty);
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -1028,7 +1114,15 @@ JSObject::setMap(js::Shape *amap)
|
||||
objShape = lastProp->shapeid;
|
||||
}
|
||||
|
||||
inline js::Value &
|
||||
inline void
|
||||
JSObject::initMap(js::Shape *amap)
|
||||
{
|
||||
JS_ASSERT(!hasOwnShape());
|
||||
lastProp.init(amap);
|
||||
objShape = lastProp->shapeid;
|
||||
}
|
||||
|
||||
inline js::HeapValue &
|
||||
JSObject::nativeGetSlotRef(uintN slot)
|
||||
{
|
||||
JS_ASSERT(isNative());
|
||||
@ -1400,7 +1494,7 @@ class AutoPropertyDescriptorRooter : private AutoGCRooter, public PropertyDescri
|
||||
friend void AutoGCRooter::trace(JSTracer *trc);
|
||||
};
|
||||
|
||||
static inline bool
|
||||
static inline js::EmptyShape *
|
||||
InitScopeForObject(JSContext* cx, JSObject* obj, js::Class *clasp, js::types::TypeObject *type,
|
||||
gc::AllocKind kind)
|
||||
{
|
||||
@ -1420,20 +1514,19 @@ InitScopeForObject(JSContext* cx, JSObject* obj, js::Class *clasp, js::types::Ty
|
||||
if (!empty)
|
||||
goto bad;
|
||||
|
||||
obj->setMap(empty);
|
||||
return true;
|
||||
return empty;
|
||||
|
||||
bad:
|
||||
/* The GC nulls map initially. It should still be null on error. */
|
||||
JS_ASSERT(obj->isNewborn());
|
||||
return false;
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
CanBeFinalizedInBackground(gc::AllocKind kind, Class *clasp)
|
||||
{
|
||||
#ifdef JS_THREADSAFE
|
||||
JS_ASSERT(kind <= gc::FINALIZE_OBJECT_LAST);
|
||||
JS_ASSERT(kind < gc::FINALIZE_OBJECT_LIMIT);
|
||||
/* If the class has no finalizer or a finalizer that is safe to call on
|
||||
* a different thread, we change the finalize kind. For example,
|
||||
* FINALIZE_OBJECT0 calls the finalizer on the main thread,
|
||||
@ -1461,7 +1554,7 @@ NewNativeClassInstance(JSContext *cx, Class *clasp, JSObject *proto,
|
||||
{
|
||||
JS_ASSERT(proto);
|
||||
JS_ASSERT(parent);
|
||||
JS_ASSERT(kind <= gc::FINALIZE_OBJECT_LAST);
|
||||
JS_ASSERT(kind < gc::FINALIZE_OBJECT_LIMIT);
|
||||
|
||||
types::TypeObject *type = proto->getNewType(cx);
|
||||
if (!type)
|
||||
@ -1487,9 +1580,8 @@ NewNativeClassInstance(JSContext *cx, Class *clasp, JSObject *proto,
|
||||
|
||||
JS_ASSERT(type->canProvideEmptyShape(clasp));
|
||||
js::EmptyShape *empty = type->getEmptyShape(cx, clasp, kind);
|
||||
|
||||
if (empty)
|
||||
obj->setMap(empty);
|
||||
obj->initMap(empty);
|
||||
else
|
||||
obj = NULL;
|
||||
}
|
||||
@ -1660,10 +1752,12 @@ NewObject(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent,
|
||||
NULL, clasp == &ArrayClass);
|
||||
|
||||
if (clasp->isNative()) {
|
||||
if (!InitScopeForObject(cx, obj, clasp, type, kind)) {
|
||||
js::EmptyShape *empty = InitScopeForObject(cx, obj, clasp, type, kind);
|
||||
if (!empty) {
|
||||
obj = NULL;
|
||||
goto out;
|
||||
}
|
||||
obj->initMap(empty);
|
||||
} else {
|
||||
obj->setSharedNonNativeMap();
|
||||
}
|
||||
@ -1749,10 +1843,13 @@ NewObjectWithType(JSContext *cx, types::TypeObject *type, JSObject *parent, gc::
|
||||
(!parent && type->proto) ? type->proto->getParent() : parent,
|
||||
NULL, false);
|
||||
|
||||
if (!InitScopeForObject(cx, obj, &ObjectClass, type, kind)) {
|
||||
js::EmptyShape *empty;
|
||||
empty = InitScopeForObject(cx, obj, &ObjectClass, type, kind);
|
||||
if (!empty) {
|
||||
obj = NULL;
|
||||
goto out;
|
||||
}
|
||||
obj->initMap(empty);
|
||||
|
||||
out:
|
||||
Probes::createObject(cx, obj);
|
||||
@ -1997,4 +2094,126 @@ js_GetProtoIfDenseArray(JSObject *obj)
|
||||
return obj->isDenseArray() ? obj->getProto() : obj;
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::setSlot(uintN slot, const js::Value &value)
|
||||
{
|
||||
JS_ASSERT(slot < capacity);
|
||||
getSlotRef(slot).set(compartment(), value);
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::initSlot(uintN slot, const js::Value &value)
|
||||
{
|
||||
JS_ASSERT(getSlot(slot).isUndefined() || getSlot(slot).isMagic(JS_ARRAY_HOLE));
|
||||
initSlotUnchecked(slot, value);
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::initSlotUnchecked(uintN slot, const js::Value &value)
|
||||
{
|
||||
JS_ASSERT(slot < capacity);
|
||||
getSlotRef(slot).init(value);
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::setFixedSlot(uintN slot, const js::Value &value)
|
||||
{
|
||||
JS_ASSERT(slot < numFixedSlots());
|
||||
fixedSlots()[slot] = value;
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::initFixedSlot(uintN slot, const js::Value &value)
|
||||
{
|
||||
JS_ASSERT(slot < numFixedSlots());
|
||||
fixedSlots()[slot].init(value);
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::clearParent()
|
||||
{
|
||||
parent.clear();
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::setParent(JSObject *newParent)
|
||||
{
|
||||
#ifdef DEBUG
|
||||
for (JSObject *obj = newParent; obj; obj = obj->getParent())
|
||||
JS_ASSERT(obj != this);
|
||||
#endif
|
||||
setDelegateNullSafe(newParent);
|
||||
parent = newParent;
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::initParent(JSObject *newParent)
|
||||
{
|
||||
JS_ASSERT(isNewborn());
|
||||
#ifdef DEBUG
|
||||
for (JSObject *obj = newParent; obj; obj = obj->getParent())
|
||||
JS_ASSERT(obj != this);
|
||||
#endif
|
||||
setDelegateNullSafe(newParent);
|
||||
parent.init(newParent);
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::setPrivate(void *data)
|
||||
{
|
||||
JS_ASSERT(getClass()->flags & JSCLASS_HAS_PRIVATE);
|
||||
|
||||
privateWriteBarrierPre(&privateData);
|
||||
privateData = data;
|
||||
privateWriteBarrierPost(&privateData);
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::initPrivate(void *data)
|
||||
{
|
||||
JS_ASSERT(getClass()->flags & JSCLASS_HAS_PRIVATE);
|
||||
privateData = data;
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::privateWriteBarrierPre(void **old)
|
||||
{
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
JSCompartment *comp = compartment();
|
||||
if (comp->needsBarrier()) {
|
||||
if (clasp->trace && *old)
|
||||
clasp->trace(comp->barrierTracer(), this);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::privateWriteBarrierPost(void **old)
|
||||
{
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::writeBarrierPre(JSObject *obj)
|
||||
{
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
/*
|
||||
* This would normally be a null test, but TypeScript::global uses 0x1 as a
|
||||
* special value.
|
||||
*/
|
||||
if (uintptr_t(obj) < 32)
|
||||
return;
|
||||
|
||||
JSCompartment *comp = obj->compartment();
|
||||
if (comp->needsBarrier()) {
|
||||
JS_ASSERT(!comp->rt->gcRunning);
|
||||
MarkObjectUnbarriered(comp->barrierTracer(), obj, "write barrier");
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
inline void
|
||||
JSObject::writeBarrierPost(JSObject *obj, void *addr)
|
||||
{
|
||||
}
|
||||
|
||||
#endif /* jsobjinlines_h___ */
|
||||
|
@ -36,6 +36,7 @@
|
||||
#ifdef MOZ_ETW
|
||||
#include "jswin.h"
|
||||
#include <evntprov.h>
|
||||
#include <sys/types.h>
|
||||
|
||||
/* Generated from ETWProvider.man */
|
||||
#include "ETWProvider.h"
|
||||
@ -49,11 +50,11 @@
|
||||
#include "jsfun.h"
|
||||
#include "jsinterp.h"
|
||||
#include "jsobj.h"
|
||||
#include "jsprobes.h"
|
||||
#include "jsscript.h"
|
||||
#include "jsstr.h"
|
||||
|
||||
#include "jsprobes.h"
|
||||
#include <sys/types.h>
|
||||
#include "jsobjinlines.h"
|
||||
|
||||
#define TYPEOF(cx,v) (JSVAL_IS_NULL(v) ? JSTYPE_NULL : JS_TypeOfValue(cx,v))
|
||||
|
||||
|
@ -159,6 +159,20 @@ Shape::removeChild(Shape *child)
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* We need a read barrier for the shape tree, since these are weak pointers.
|
||||
*/
|
||||
static Shape *
|
||||
ReadBarrier(Shape *shape)
|
||||
{
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
JSCompartment *comp = shape->compartment();
|
||||
if (comp->needsBarrier())
|
||||
MarkShapeUnbarriered(comp->barrierTracer(), shape, "read barrier");
|
||||
#endif
|
||||
return shape;
|
||||
}
|
||||
|
||||
Shape *
|
||||
PropertyTree::getChild(JSContext *cx, Shape *parent, const Shape &child)
|
||||
{
|
||||
@ -179,11 +193,11 @@ PropertyTree::getChild(JSContext *cx, Shape *parent, const Shape &child)
|
||||
if (kidp->isShape()) {
|
||||
shape = kidp->toShape();
|
||||
if (shape->matches(&child))
|
||||
return shape;
|
||||
return ReadBarrier(shape);
|
||||
} else if (kidp->isHash()) {
|
||||
shape = *kidp->toHash()->lookup(&child);
|
||||
if (shape)
|
||||
return shape;
|
||||
return ReadBarrier(shape);
|
||||
} else {
|
||||
/* If kidp->isNull(), we always insert. */
|
||||
}
|
||||
@ -192,7 +206,7 @@ PropertyTree::getChild(JSContext *cx, Shape *parent, const Shape &child)
|
||||
if (!shape)
|
||||
return NULL;
|
||||
|
||||
new (shape) Shape(child.propid, child.rawGetter, child.rawSetter, child.slot, child.attrs,
|
||||
new (shape) Shape(child.propid, child.getter(), child.setter(), child.slot, child.attrs,
|
||||
child.flags, child.shortid, js_GenerateShape(cx));
|
||||
|
||||
if (!insertChild(cx, parent, shape))
|
||||
@ -256,8 +270,8 @@ Shape::dump(JSContext *cx, FILE *fp) const
|
||||
}
|
||||
|
||||
fprintf(fp, " g/s %p/%p slot %u attrs %x ",
|
||||
JS_FUNC_TO_DATA_PTR(void *, rawGetter),
|
||||
JS_FUNC_TO_DATA_PTR(void *, rawSetter),
|
||||
JS_FUNC_TO_DATA_PTR(void *, getter()),
|
||||
JS_FUNC_TO_DATA_PTR(void *, setter()),
|
||||
slot, attrs);
|
||||
if (attrs) {
|
||||
int first = 1;
|
||||
|
@ -57,19 +57,29 @@
|
||||
using namespace js;
|
||||
using namespace js::gc;
|
||||
|
||||
static inline const Value &
|
||||
GetCall(JSObject *proxy) {
|
||||
static inline const HeapValue &
|
||||
GetCall(JSObject *proxy)
|
||||
{
|
||||
JS_ASSERT(IsFunctionProxy(proxy));
|
||||
return proxy->getSlot(JSSLOT_PROXY_CALL);
|
||||
return proxy->getSlotRef(JSSLOT_PROXY_CALL);
|
||||
}
|
||||
|
||||
static inline Value
|
||||
GetConstruct(JSObject *proxy) {
|
||||
GetConstruct(JSObject *proxy)
|
||||
{
|
||||
if (proxy->numSlots() <= JSSLOT_PROXY_CONSTRUCT)
|
||||
return UndefinedValue();
|
||||
return proxy->getSlot(JSSLOT_PROXY_CONSTRUCT);
|
||||
}
|
||||
|
||||
static inline const HeapValue &
|
||||
GetFunctionProxyConstruct(JSObject *proxy)
|
||||
{
|
||||
JS_ASSERT(IsFunctionProxy(proxy));
|
||||
JS_ASSERT(proxy->numSlots() > JSSLOT_PROXY_CONSTRUCT);
|
||||
return proxy->getSlotRef(JSSLOT_PROXY_CONSTRUCT);
|
||||
}
|
||||
|
||||
static bool
|
||||
OperationInProgress(JSContext *cx, JSObject *proxy)
|
||||
{
|
||||
@ -1191,12 +1201,12 @@ static void
|
||||
proxy_TraceObject(JSTracer *trc, JSObject *obj)
|
||||
{
|
||||
GetProxyHandler(obj)->trace(trc, obj);
|
||||
MarkCrossCompartmentValue(trc, GetProxyPrivate(obj), "private");
|
||||
MarkCrossCompartmentValue(trc, GetProxyExtra(obj, 0), "extra0");
|
||||
MarkCrossCompartmentValue(trc, GetProxyExtra(obj, 1), "extra1");
|
||||
MarkCrossCompartmentValue(trc, obj->getReservedSlotRef(JSSLOT_PROXY_PRIVATE), "private");
|
||||
MarkCrossCompartmentValue(trc, obj->getReservedSlotRef(JSSLOT_PROXY_EXTRA + 0), "extra0");
|
||||
MarkCrossCompartmentValue(trc, obj->getReservedSlotRef(JSSLOT_PROXY_EXTRA + 1), "extra1");
|
||||
if (IsFunctionProxy(obj)) {
|
||||
MarkCrossCompartmentValue(trc, GetCall(obj), "call");
|
||||
MarkCrossCompartmentValue(trc, GetConstruct(obj), "construct");
|
||||
MarkCrossCompartmentValue(trc, GetFunctionProxyConstruct(obj), "construct");
|
||||
}
|
||||
}
|
||||
|
||||
@ -1205,7 +1215,7 @@ proxy_TraceFunction(JSTracer *trc, JSObject *obj)
|
||||
{
|
||||
proxy_TraceObject(trc, obj);
|
||||
MarkCrossCompartmentValue(trc, GetCall(obj), "call");
|
||||
MarkCrossCompartmentValue(trc, GetConstruct(obj), "construct");
|
||||
MarkCrossCompartmentValue(trc, GetFunctionProxyConstruct(obj), "construct");
|
||||
}
|
||||
|
||||
static JSBool
|
||||
|
@ -93,8 +93,6 @@ typedef struct JSSubString JSSubString;
|
||||
typedef struct JSNativeTraceInfo JSNativeTraceInfo;
|
||||
typedef struct JSSpecializedNative JSSpecializedNative;
|
||||
typedef struct JSXML JSXML;
|
||||
typedef struct JSXMLArray JSXMLArray;
|
||||
typedef struct JSXMLArrayCursor JSXMLArrayCursor;
|
||||
|
||||
/*
|
||||
* Template declarations.
|
||||
@ -120,6 +118,7 @@ class JSWrapper;
|
||||
namespace js {
|
||||
|
||||
struct ArgumentsData;
|
||||
struct FlatClosureData;
|
||||
struct Class;
|
||||
|
||||
class RegExpObject;
|
||||
|
@ -214,7 +214,7 @@ PropertyTable::search(jsid id, bool adding)
|
||||
|
||||
/* Hit: return entry. */
|
||||
shape = SHAPE_CLEAR_COLLISION(stored);
|
||||
if (shape && shape->propid == id)
|
||||
if (shape && shape->propid.get() == id)
|
||||
return spp;
|
||||
|
||||
/* Collision: double hash. */
|
||||
@ -248,7 +248,7 @@ PropertyTable::search(jsid id, bool adding)
|
||||
return (adding && firstRemoved) ? firstRemoved : spp;
|
||||
|
||||
shape = SHAPE_CLEAR_COLLISION(stored);
|
||||
if (shape && shape->propid == id) {
|
||||
if (shape && shape->propid.get() == id) {
|
||||
JS_ASSERT(collision_flag);
|
||||
return spp;
|
||||
}
|
||||
@ -322,8 +322,26 @@ PropertyTable::grow(JSContext *cx)
|
||||
return true;
|
||||
}
|
||||
|
||||
void
|
||||
Shape::update(js::PropertyOp getter, js::StrictPropertyOp setter, uint8 attrs)
|
||||
{
|
||||
if (hasGetterValue())
|
||||
JSObject::writeBarrierPre(getterObject());
|
||||
if (hasSetterValue())
|
||||
JSObject::writeBarrierPre(setterObject());
|
||||
|
||||
this->rawGetter = getter;
|
||||
this->rawSetter = setter;
|
||||
this->attrs = attrs;
|
||||
|
||||
if (hasGetterValue())
|
||||
JSObject::writeBarrierPost(getterObject(), this);
|
||||
if (hasSetterValue())
|
||||
JSObject::writeBarrierPost(setterObject(), this);
|
||||
}
|
||||
|
||||
Shape *
|
||||
Shape::getChild(JSContext *cx, const js::Shape &child, Shape **listp)
|
||||
Shape::getChild(JSContext *cx, const js::Shape &child, HeapPtr<Shape> *listp)
|
||||
{
|
||||
JS_ASSERT(!JSID_IS_VOID(child.propid));
|
||||
JS_ASSERT(!child.inDictionary());
|
||||
@ -444,13 +462,13 @@ JSObject::getChildProperty(JSContext *cx, Shape *parent, Shape &child)
|
||||
}
|
||||
|
||||
Shape *
|
||||
Shape::newDictionaryShape(JSContext *cx, const Shape &child, Shape **listp)
|
||||
Shape::newDictionaryShape(JSContext *cx, const Shape &child, HeapPtr<Shape> *listp)
|
||||
{
|
||||
Shape *dprop = JS_PROPERTY_TREE(cx).newShape(cx);
|
||||
if (!dprop)
|
||||
return NULL;
|
||||
|
||||
new (dprop) Shape(child.propid, child.rawGetter, child.rawSetter, child.slot, child.attrs,
|
||||
new (dprop) Shape(child.propid, child.getter(), child.setter(), child.slot, child.attrs,
|
||||
(child.flags & ~FROZEN) | IN_DICTIONARY, child.shortid,
|
||||
js_GenerateShape(cx), child.slotSpan);
|
||||
|
||||
@ -460,7 +478,7 @@ Shape::newDictionaryShape(JSContext *cx, const Shape &child, Shape **listp)
|
||||
}
|
||||
|
||||
Shape *
|
||||
Shape::newDictionaryList(JSContext *cx, Shape **listp)
|
||||
Shape::newDictionaryList(JSContext *cx, HeapPtr<Shape> *listp)
|
||||
{
|
||||
Shape *shape = *listp;
|
||||
Shape *list = shape;
|
||||
@ -470,8 +488,8 @@ Shape::newDictionaryList(JSContext *cx, Shape **listp)
|
||||
* stack. This way, the GC doesn't see any intermediate state until we
|
||||
* switch listp at the end.
|
||||
*/
|
||||
Shape *root = NULL;
|
||||
Shape **childp = &root;
|
||||
HeapPtrShape root(NULL);
|
||||
HeapPtrShape *childp = &root;
|
||||
|
||||
while (shape) {
|
||||
JS_ASSERT_IF(!shape->frozen(), !shape->inDictionary());
|
||||
@ -819,9 +837,7 @@ JSObject::putProperty(JSContext *cx, jsid id,
|
||||
}
|
||||
}
|
||||
|
||||
shape->rawGetter = getter;
|
||||
shape->rawSetter = setter;
|
||||
shape->attrs = uint8(attrs);
|
||||
shape->update(getter, setter, uint8(attrs));
|
||||
shape->flags = flags | Shape::IN_DICTIONARY;
|
||||
shape->shortid = int16(shortid);
|
||||
|
||||
@ -900,7 +916,7 @@ JSObject::changeProperty(JSContext *cx, const Shape *shape, uintN attrs, uintN m
|
||||
!(attrs & JSPROP_SHARED));
|
||||
|
||||
/* Don't allow method properties to be changed to have a getter. */
|
||||
JS_ASSERT_IF(getter != shape->rawGetter, !shape->isMethod());
|
||||
JS_ASSERT_IF(getter != shape->getter(), !shape->isMethod());
|
||||
|
||||
types::MarkTypePropertyConfigured(cx, this, shape->propid);
|
||||
if (attrs & (JSPROP_GETTER | JSPROP_SETTER))
|
||||
@ -942,9 +958,7 @@ JSObject::changeProperty(JSContext *cx, const Shape *shape, uintN attrs, uintN m
|
||||
}
|
||||
}
|
||||
|
||||
mutableShape->rawGetter = getter;
|
||||
mutableShape->rawSetter = setter;
|
||||
mutableShape->attrs = uint8(attrs);
|
||||
mutableShape->update(getter, setter, uint8(attrs));
|
||||
|
||||
updateFlags(shape);
|
||||
|
||||
@ -976,7 +990,7 @@ JSObject::changeProperty(JSContext *cx, const Shape *shape, uintN attrs, uintN m
|
||||
*/
|
||||
Shape child(shape->propid, getter, setter, shape->slot, attrs, shape->flags,
|
||||
shape->shortid);
|
||||
newShape = putProperty(cx, child.propid, child.rawGetter, child.rawSetter, child.slot,
|
||||
newShape = putProperty(cx, child.propid, child.getter(), child.setter(), child.slot,
|
||||
child.attrs, child.flags, child.shortid);
|
||||
}
|
||||
|
||||
@ -1189,16 +1203,16 @@ JSObject::methodShapeChange(JSContext *cx, const Shape &shape)
|
||||
JS_ASSERT(shape.methodObject() == prev.toObject());
|
||||
JS_ASSERT(canHaveMethodBarrier());
|
||||
JS_ASSERT(hasMethodBarrier());
|
||||
JS_ASSERT(!shape.rawSetter);
|
||||
JS_ASSERT(!shape.setter());
|
||||
#endif
|
||||
|
||||
/*
|
||||
* Pass null to make a stub getter, but pass along shape.rawSetter to
|
||||
* Pass null to make a stub getter, but pass along shape.setter() to
|
||||
* preserve watchpoints. Clear Shape::METHOD from flags as we are
|
||||
* despecializing from a method memoized in the property tree to a
|
||||
* plain old function-valued property.
|
||||
*/
|
||||
result = putProperty(cx, shape.propid, NULL, shape.rawSetter, shape.slot,
|
||||
result = putProperty(cx, shape.propid, NULL, shape.setter(), shape.slot,
|
||||
shape.attrs,
|
||||
shape.getFlags() & ~Shape::METHOD,
|
||||
shape.shortid);
|
||||
|
@ -330,7 +330,7 @@ struct Shape : public js::gc::Cell
|
||||
|
||||
inline void freeTable(JSContext *cx);
|
||||
|
||||
jsid propid;
|
||||
HeapId propid;
|
||||
|
||||
protected:
|
||||
union {
|
||||
@ -358,26 +358,27 @@ struct Shape : public js::gc::Cell
|
||||
int16 shortid; /* tinyid, or local arg/var index */
|
||||
|
||||
protected:
|
||||
mutable js::Shape *parent; /* parent node, reverse for..in order */
|
||||
mutable HeapPtrShape parent; /* parent node, reverse for..in order */
|
||||
/* kids is valid when !inDictionary(), listp is valid when inDictionary(). */
|
||||
union {
|
||||
mutable js::KidsPointer kids; /* null, single child, or a tagged ptr
|
||||
to many-kids data structure */
|
||||
mutable js::Shape **listp; /* dictionary list starting at lastProp
|
||||
mutable HeapPtrShape *listp; /* dictionary list starting at lastProp
|
||||
has a double-indirect back pointer,
|
||||
either to shape->parent if not last,
|
||||
else to obj->lastProp */
|
||||
};
|
||||
|
||||
static inline js::Shape **search(JSContext *cx, js::Shape **startp, jsid id,
|
||||
static inline js::Shape **search(JSContext *cx, HeapPtr<Shape> *startp, jsid id,
|
||||
bool adding = false);
|
||||
static js::Shape *newDictionaryShape(JSContext *cx, const js::Shape &child, js::Shape **listp);
|
||||
static js::Shape *newDictionaryList(JSContext *cx, js::Shape **listp);
|
||||
static js::Shape *newDictionaryShape(JSContext *cx, const js::Shape &child,
|
||||
HeapPtr<Shape> *listp);
|
||||
static js::Shape *newDictionaryList(JSContext *cx, HeapPtr<Shape> *listp);
|
||||
|
||||
inline void removeFromDictionary(JSObject *obj) const;
|
||||
inline void insertIntoDictionary(js::Shape **dictp);
|
||||
inline void insertIntoDictionary(HeapPtr<Shape> *dictp);
|
||||
|
||||
js::Shape *getChild(JSContext *cx, const js::Shape &child, js::Shape **listp);
|
||||
js::Shape *getChild(JSContext *cx, const js::Shape &child, HeapPtr<Shape> *listp);
|
||||
|
||||
bool hashify(JSContext *cx);
|
||||
|
||||
@ -574,6 +575,8 @@ struct Shape : public js::gc::Cell
|
||||
return hasSetterValue() && setterObj ? js::ObjectValue(*setterObj) : js::UndefinedValue();
|
||||
}
|
||||
|
||||
void update(js::PropertyOp getter, js::StrictPropertyOp setter, uint8 attrs);
|
||||
|
||||
inline JSDHashNumber hash() const;
|
||||
inline bool matches(const js::Shape *p) const;
|
||||
inline bool matchesParamsAfterId(PropertyOp agetter, StrictPropertyOp asetter,
|
||||
@ -647,6 +650,16 @@ struct Shape : public js::gc::Cell
|
||||
|
||||
void finalize(JSContext *cx);
|
||||
void removeChild(js::Shape *child);
|
||||
|
||||
inline static void writeBarrierPre(const js::Shape *shape);
|
||||
inline static void writeBarrierPost(const js::Shape *shape, void *addr);
|
||||
|
||||
/*
|
||||
* All weak references need a read barrier for incremental GC. This getter
|
||||
* method implements the read barrier. It's used to obtain initial shapes
|
||||
* from the compartment.
|
||||
*/
|
||||
inline static void readBarrier(const js::Shape *shape);
|
||||
};
|
||||
|
||||
struct EmptyShape : public js::Shape
|
||||
@ -662,18 +675,10 @@ struct EmptyShape : public js::Shape
|
||||
return new (eprop) EmptyShape(cx->compartment, clasp);
|
||||
}
|
||||
|
||||
static EmptyShape *ensure(JSContext *cx, js::Class *clasp, EmptyShape **shapep) {
|
||||
EmptyShape *shape = *shapep;
|
||||
if (!shape) {
|
||||
if (!(shape = create(cx, clasp)))
|
||||
return NULL;
|
||||
return *shapep = shape;
|
||||
}
|
||||
return shape;
|
||||
}
|
||||
static inline EmptyShape *ensure(JSContext *cx, js::Class *clasp,
|
||||
ReadBarriered<EmptyShape> *shapep);
|
||||
|
||||
static inline EmptyShape *getEmptyArgumentsShape(JSContext *cx);
|
||||
|
||||
static inline EmptyShape *getEmptyBlockShape(JSContext *cx);
|
||||
static inline EmptyShape *getEmptyCallShape(JSContext *cx);
|
||||
static inline EmptyShape *getEmptyDeclEnvShape(JSContext *cx);
|
||||
@ -730,7 +735,7 @@ namespace js {
|
||||
* |emptyShape| is the EmptyShape at the start of the shape lineage.
|
||||
*/
|
||||
JS_ALWAYS_INLINE js::Shape **
|
||||
Shape::search(JSContext *cx, js::Shape **startp, jsid id, bool adding)
|
||||
Shape::search(JSContext *cx, HeapPtr<js::Shape> *startp, jsid id, bool adding)
|
||||
{
|
||||
js::Shape *start = *startp;
|
||||
if (start->hasTable())
|
||||
@ -757,12 +762,12 @@ Shape::search(JSContext *cx, js::Shape **startp, jsid id, bool adding)
|
||||
* the end). This avoids an extra load per iteration at the cost (if the
|
||||
* search fails) of an extra load and id test at the end.
|
||||
*/
|
||||
js::Shape **spp;
|
||||
HeapPtr<js::Shape> *spp;
|
||||
for (spp = startp; js::Shape *shape = *spp; spp = &shape->parent) {
|
||||
if (shape->propid == id)
|
||||
return spp;
|
||||
if (shape->propid.get() == id)
|
||||
return spp->unsafeGet();
|
||||
}
|
||||
return spp;
|
||||
return spp->unsafeGet();
|
||||
}
|
||||
|
||||
} // namespace js
|
||||
|
@ -49,6 +49,7 @@
|
||||
#include "jsobj.h"
|
||||
#include "jsscope.h"
|
||||
#include "jsgc.h"
|
||||
#include "jsgcmark.h"
|
||||
|
||||
#include "vm/ArgumentsObject.h"
|
||||
#include "vm/StringObject.h"
|
||||
@ -79,12 +80,12 @@ js::types::TypeObject::getEmptyShape(JSContext *cx, js::Class *aclasp,
|
||||
*/
|
||||
JS_ASSERT(this == proto->newType);
|
||||
|
||||
JS_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind <= js::gc::FINALIZE_OBJECT_LAST);
|
||||
JS_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind < js::gc::FINALIZE_OBJECT_LIMIT);
|
||||
int i = kind - js::gc::FINALIZE_OBJECT0;
|
||||
|
||||
if (!emptyShapes) {
|
||||
emptyShapes = (js::EmptyShape**)
|
||||
cx->calloc_(sizeof(js::EmptyShape*) * js::gc::FINALIZE_FUNCTION_AND_OBJECT_LAST);
|
||||
emptyShapes = (js::HeapPtr<js::EmptyShape>*)
|
||||
cx->calloc_(sizeof(js::HeapPtr<js::EmptyShape>) * js::gc::FINALIZE_OBJECT_LIMIT);
|
||||
if (!emptyShapes)
|
||||
return NULL;
|
||||
|
||||
@ -92,7 +93,7 @@ js::types::TypeObject::getEmptyShape(JSContext *cx, js::Class *aclasp,
|
||||
* Always fill in emptyShapes[0], so canProvideEmptyShape works.
|
||||
* Other empty shapes are filled in lazily.
|
||||
*/
|
||||
emptyShapes[0] = js::EmptyShape::create(cx, aclasp);
|
||||
emptyShapes[0].init(js::EmptyShape::create(cx, aclasp));
|
||||
if (!emptyShapes[0]) {
|
||||
cx->free_(emptyShapes);
|
||||
emptyShapes = NULL;
|
||||
@ -103,7 +104,7 @@ js::types::TypeObject::getEmptyShape(JSContext *cx, js::Class *aclasp,
|
||||
JS_ASSERT(aclasp == emptyShapes[0]->getClass());
|
||||
|
||||
if (!emptyShapes[i]) {
|
||||
emptyShapes[i] = js::EmptyShape::create(cx, aclasp);
|
||||
emptyShapes[i].init(js::EmptyShape::create(cx, aclasp));
|
||||
if (!emptyShapes[i])
|
||||
return NULL;
|
||||
}
|
||||
@ -154,15 +155,16 @@ StringObject::init(JSContext *cx, JSString *str)
|
||||
{
|
||||
JS_ASSERT(nativeEmpty());
|
||||
|
||||
const Shape **shapep = &cx->compartment->initialStringShape;
|
||||
if (*shapep) {
|
||||
setLastProperty(*shapep);
|
||||
const Shape *shape = cx->compartment->initialStringShape;
|
||||
if (shape) {
|
||||
setLastProperty(shape);
|
||||
} else {
|
||||
*shapep = assignInitialShape(cx);
|
||||
if (!*shapep)
|
||||
shape = assignInitialShape(cx);
|
||||
if (!shape)
|
||||
return false;
|
||||
cx->compartment->initialStringShape = shape;
|
||||
}
|
||||
JS_ASSERT(*shapep == lastProperty());
|
||||
JS_ASSERT(shape == lastProperty());
|
||||
JS_ASSERT(!nativeEmpty());
|
||||
JS_ASSERT(nativeLookup(cx, ATOM_TO_JSID(cx->runtime->atomState.lengthAtom))->slot == LENGTH_SLOT);
|
||||
|
||||
@ -232,15 +234,15 @@ Shape::hash() const
|
||||
|
||||
/* Accumulate from least to most random so the low bits are most random. */
|
||||
JS_ASSERT_IF(isMethod(), !rawSetter);
|
||||
if (rawGetter)
|
||||
hash = JS_ROTATE_LEFT32(hash, 4) ^ jsuword(rawGetter);
|
||||
if (rawSetter)
|
||||
hash = JS_ROTATE_LEFT32(hash, 4) ^ jsuword(rawSetter);
|
||||
if (getter())
|
||||
hash = JS_ROTATE_LEFT32(hash, 4) ^ jsuword(getter());
|
||||
if (setter())
|
||||
hash = JS_ROTATE_LEFT32(hash, 4) ^ jsuword(setter());
|
||||
hash = JS_ROTATE_LEFT32(hash, 4) ^ (flags & PUBLIC_FLAGS);
|
||||
hash = JS_ROTATE_LEFT32(hash, 4) ^ attrs;
|
||||
hash = JS_ROTATE_LEFT32(hash, 4) ^ shortid;
|
||||
hash = JS_ROTATE_LEFT32(hash, 4) ^ slot;
|
||||
hash = JS_ROTATE_LEFT32(hash, 4) ^ JSID_BITS(propid);
|
||||
hash = JS_ROTATE_LEFT32(hash, 4) ^ JSID_BITS(propid.get());
|
||||
return hash;
|
||||
}
|
||||
|
||||
@ -249,8 +251,8 @@ Shape::matches(const js::Shape *other) const
|
||||
{
|
||||
JS_ASSERT(!JSID_IS_VOID(propid));
|
||||
JS_ASSERT(!JSID_IS_VOID(other->propid));
|
||||
return propid == other->propid &&
|
||||
matchesParamsAfterId(other->rawGetter, other->rawSetter, other->slot, other->attrs,
|
||||
return propid.get() == other->propid.get() &&
|
||||
matchesParamsAfterId(other->getter(), other->setter(), other->slot, other->attrs,
|
||||
other->flags, other->shortid);
|
||||
}
|
||||
|
||||
@ -259,8 +261,8 @@ Shape::matchesParamsAfterId(PropertyOp agetter, StrictPropertyOp asetter, uint32
|
||||
uintN aattrs, uintN aflags, intN ashortid) const
|
||||
{
|
||||
JS_ASSERT(!JSID_IS_VOID(propid));
|
||||
return rawGetter == agetter &&
|
||||
rawSetter == asetter &&
|
||||
return getter() == agetter &&
|
||||
setter() == asetter &&
|
||||
slot == aslot &&
|
||||
attrs == aattrs &&
|
||||
((flags ^ aflags) & PUBLIC_FLAGS) == 0 &&
|
||||
@ -333,7 +335,7 @@ Shape::removeFromDictionary(JSObject *obj) const
|
||||
}
|
||||
|
||||
inline void
|
||||
Shape::insertIntoDictionary(js::Shape **dictp)
|
||||
Shape::insertIntoDictionary(HeapPtr<js::Shape> *dictp)
|
||||
{
|
||||
/*
|
||||
* Don't assert inDictionaryMode() here because we may be called from
|
||||
@ -361,6 +363,18 @@ EmptyShape::EmptyShape(JSCompartment *comp, js::Class *aclasp)
|
||||
: js::Shape(comp, aclasp)
|
||||
{}
|
||||
|
||||
/* static */ inline EmptyShape *
|
||||
EmptyShape::ensure(JSContext *cx, js::Class *clasp, ReadBarriered<EmptyShape> *shapep)
|
||||
{
|
||||
EmptyShape *shape = shapep->get();
|
||||
if (!shape) {
|
||||
if (!(shape = create(cx, clasp)))
|
||||
return NULL;
|
||||
shapep->set(shape);
|
||||
}
|
||||
return shape;
|
||||
}
|
||||
|
||||
/* static */ inline EmptyShape *
|
||||
EmptyShape::getEmptyArgumentsShape(JSContext *cx)
|
||||
{
|
||||
@ -397,6 +411,34 @@ EmptyShape::getEmptyWithShape(JSContext *cx)
|
||||
return ensure(cx, &WithClass, &cx->compartment->emptyWithShape);
|
||||
}
|
||||
|
||||
inline void
|
||||
Shape::writeBarrierPre(const js::Shape *shape)
|
||||
{
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
if (!shape || shape == &sharedNonNative)
|
||||
return;
|
||||
|
||||
JSCompartment *comp = shape->compartment();
|
||||
if (comp->needsBarrier())
|
||||
MarkShapeUnbarriered(comp->barrierTracer(), shape, "write barrier");
|
||||
#endif
|
||||
}
|
||||
|
||||
inline void
|
||||
Shape::writeBarrierPost(const js::Shape *shape, void *addr)
|
||||
{
|
||||
}
|
||||
|
||||
inline void
|
||||
Shape::readBarrier(const js::Shape *shape)
|
||||
{
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
JSCompartment *comp = shape->compartment();
|
||||
if (comp->needsBarrier())
|
||||
MarkShapeUnbarriered(comp->barrierTracer(), shape, "read barrier");
|
||||
#endif
|
||||
}
|
||||
|
||||
} /* namespace js */
|
||||
|
||||
#endif /* jsscopeinlines_h___ */
|
||||
|
@ -91,7 +91,7 @@ Bindings::lookup(JSContext *cx, JSAtom *name, uintN *indexp) const
|
||||
return NONE;
|
||||
|
||||
Shape *shape =
|
||||
SHAPE_FETCH(Shape::search(cx, const_cast<Shape **>(&lastBinding),
|
||||
SHAPE_FETCH(Shape::search(cx, const_cast<HeapPtr<Shape> *>(&lastBinding),
|
||||
ATOM_TO_JSID(name)));
|
||||
if (!shape)
|
||||
return NONE;
|
||||
@ -195,7 +195,7 @@ Bindings::getLocalNameArray(JSContext *cx, Vector<JSAtom *> *namesp)
|
||||
names[i] = POISON;
|
||||
#endif
|
||||
|
||||
for (Shape::Range r = lastBinding; !r.empty(); r.popFront()) {
|
||||
for (Shape::Range r = lastBinding->all(); !r.empty(); r.popFront()) {
|
||||
const Shape &shape = r.front();
|
||||
uintN index = uint16(shape.shortid);
|
||||
|
||||
@ -644,7 +644,7 @@ js_XDRScript(JSXDRState *xdr, JSScript **scriptp)
|
||||
* to restore the parent chain.
|
||||
*/
|
||||
for (i = 0; i != nobjects; ++i) {
|
||||
JSObject **objp = &script->objects()->vector[i];
|
||||
HeapPtr<JSObject> *objp = &script->objects()->vector[i];
|
||||
uint32 isBlock;
|
||||
if (xdr->mode == JSXDR_ENCODE) {
|
||||
Class *clasp = (*objp)->getClass();
|
||||
@ -654,22 +654,26 @@ js_XDRScript(JSXDRState *xdr, JSScript **scriptp)
|
||||
}
|
||||
if (!JS_XDRUint32(xdr, &isBlock))
|
||||
goto error;
|
||||
JSObject *tmp = *objp;
|
||||
if (isBlock == 0) {
|
||||
if (!js_XDRFunctionObject(xdr, objp))
|
||||
if (!js_XDRFunctionObject(xdr, &tmp))
|
||||
goto error;
|
||||
} else {
|
||||
JS_ASSERT(isBlock == 1);
|
||||
if (!js_XDRBlockObject(xdr, objp))
|
||||
if (!js_XDRBlockObject(xdr, &tmp))
|
||||
goto error;
|
||||
}
|
||||
*objp = tmp;
|
||||
}
|
||||
for (i = 0; i != nupvars; ++i) {
|
||||
if (!JS_XDRUint32(xdr, reinterpret_cast<uint32 *>(&script->upvars()->vector[i])))
|
||||
goto error;
|
||||
}
|
||||
for (i = 0; i != nregexps; ++i) {
|
||||
if (!js_XDRRegExpObject(xdr, &script->regexps()->vector[i]))
|
||||
JSObject *tmp = script->regexps()->vector[i];
|
||||
if (!js_XDRRegExpObject(xdr, &tmp))
|
||||
goto error;
|
||||
script->regexps()->vector[i] = tmp;
|
||||
}
|
||||
for (i = 0; i != nClosedArgs; ++i) {
|
||||
if (!JS_XDRUint32(xdr, &script->closedSlots[i]))
|
||||
@ -712,8 +716,10 @@ js_XDRScript(JSXDRState *xdr, JSScript **scriptp)
|
||||
}
|
||||
|
||||
for (i = 0; i != nconsts; ++i) {
|
||||
if (!JS_XDRValue(xdr, &script->consts()->vector[i]))
|
||||
Value tmp = script->consts()->vector[i];
|
||||
if (!JS_XDRValue(xdr, &tmp))
|
||||
goto error;
|
||||
script->consts()->vector[i] = tmp;
|
||||
}
|
||||
|
||||
xdr->script = oldscript;
|
||||
@ -748,6 +754,7 @@ JSPCCounters::destroy(JSContext *cx)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Shared script filename management.
|
||||
*/
|
||||
@ -988,7 +995,7 @@ JSScript::NewScript(JSContext *cx, uint32 length, uint32 nsrcnotes, uint32 natom
|
||||
if (nconsts != 0) {
|
||||
JS_ASSERT(reinterpret_cast<jsuword>(cursor) % sizeof(jsval) == 0);
|
||||
script->consts()->length = nconsts;
|
||||
script->consts()->vector = reinterpret_cast<Value *>(cursor);
|
||||
script->consts()->vector = (HeapValue *)cursor;
|
||||
cursor += nconsts * sizeof(script->consts()->vector[0]);
|
||||
}
|
||||
|
||||
@ -1000,13 +1007,13 @@ JSScript::NewScript(JSContext *cx, uint32 length, uint32 nsrcnotes, uint32 natom
|
||||
|
||||
if (nobjects != 0) {
|
||||
script->objects()->length = nobjects;
|
||||
script->objects()->vector = reinterpret_cast<JSObject **>(cursor);
|
||||
script->objects()->vector = (HeapPtr<JSObject> *)cursor;
|
||||
cursor += nobjects * sizeof(script->objects()->vector[0]);
|
||||
}
|
||||
|
||||
if (nregexps != 0) {
|
||||
script->regexps()->length = nregexps;
|
||||
script->regexps()->vector = reinterpret_cast<JSObject **>(cursor);
|
||||
script->regexps()->vector = (HeapPtr<JSObject> *)cursor;
|
||||
cursor += nregexps * sizeof(script->regexps()->vector[0]);
|
||||
}
|
||||
|
||||
@ -1202,14 +1209,14 @@ JSScript::NewScriptFromEmitter(JSContext *cx, BytecodeEmitter *bce)
|
||||
return NULL;
|
||||
|
||||
fun->setScript(script);
|
||||
script->u.globalObject = fun->getParent() ? fun->getParent()->getGlobal() : NULL;
|
||||
script->globalObject = fun->getParent() ? fun->getParent()->getGlobal() : NULL;
|
||||
} else {
|
||||
/*
|
||||
* Initialize script->object, if necessary, so that the debugger has a
|
||||
* valid holder object.
|
||||
*/
|
||||
if (bce->flags & TCF_NEED_SCRIPT_GLOBAL)
|
||||
script->u.globalObject = GetCurrentGlobal(cx);
|
||||
script->globalObject = GetCurrentGlobal(cx);
|
||||
}
|
||||
|
||||
/* Tell the debugger about this compiled script. */
|
||||
@ -1217,7 +1224,7 @@ JSScript::NewScriptFromEmitter(JSContext *cx, BytecodeEmitter *bce)
|
||||
if (!bce->parent) {
|
||||
GlobalObject *compileAndGoGlobal = NULL;
|
||||
if (script->compileAndGo) {
|
||||
compileAndGoGlobal = script->u.globalObject;
|
||||
compileAndGoGlobal = script->globalObject;
|
||||
if (!compileAndGoGlobal)
|
||||
compileAndGoGlobal = bce->scopeChain()->getGlobal();
|
||||
}
|
||||
|
@ -49,6 +49,8 @@
|
||||
#include "jsclist.h"
|
||||
#include "jsinfer.h"
|
||||
|
||||
#include "gc/Barrier.h"
|
||||
|
||||
/*
|
||||
* Type of try note associated with each catch or finally block, and also with
|
||||
* for-in loops.
|
||||
@ -132,7 +134,7 @@ typedef struct JSTryNoteArray {
|
||||
} JSTryNoteArray;
|
||||
|
||||
typedef struct JSObjectArray {
|
||||
JSObject **vector; /* array of indexed objects */
|
||||
js::HeapPtrObject *vector; /* array of indexed objects */
|
||||
uint32 length; /* count of indexed objects */
|
||||
} JSObjectArray;
|
||||
|
||||
@ -142,7 +144,7 @@ typedef struct JSUpvarArray {
|
||||
} JSUpvarArray;
|
||||
|
||||
typedef struct JSConstArray {
|
||||
js::Value *vector; /* array of indexed constant values */
|
||||
js::HeapValue *vector; /* array of indexed constant values */
|
||||
uint32 length;
|
||||
} JSConstArray;
|
||||
|
||||
@ -168,17 +170,15 @@ enum BindingKind { NONE, ARGUMENT, VARIABLE, CONSTANT, UPVAR };
|
||||
* strict mode eval code, to give such code its own lexical environment).
|
||||
*/
|
||||
class Bindings {
|
||||
js::Shape *lastBinding;
|
||||
HeapPtr<Shape> lastBinding;
|
||||
uint16 nargs;
|
||||
uint16 nvars;
|
||||
uint16 nupvars;
|
||||
bool hasExtensibleParents;
|
||||
|
||||
public:
|
||||
inline Bindings(JSContext *cx)
|
||||
: lastBinding(NULL), nargs(0), nvars(0), nupvars(0), hasExtensibleParents(false)
|
||||
{
|
||||
}
|
||||
inline Bindings(JSContext *cx);
|
||||
inline ~Bindings();
|
||||
|
||||
/*
|
||||
* Transfers ownership of bindings data from bindings into this fresh
|
||||
@ -556,23 +556,21 @@ struct JSScript : public js::gc::Cell {
|
||||
JSPrincipals *principals;/* principals for this script */
|
||||
jschar *sourceMap; /* source map file or null */
|
||||
|
||||
union {
|
||||
/*
|
||||
* A global object for the script.
|
||||
* - All scripts returned by JSAPI functions (JS_CompileScript,
|
||||
* JS_CompileFile, etc.) have a non-null globalObject.
|
||||
* - A function script has a globalObject if the function comes from a
|
||||
* compile-and-go script.
|
||||
* - Temporary scripts created by obj_eval, JS_EvaluateScript, and
|
||||
* similar functions never have the globalObject field set; for such
|
||||
* scripts the global should be extracted from the JS frame that
|
||||
* execute scripts.
|
||||
*/
|
||||
js::GlobalObject *globalObject;
|
||||
/*
|
||||
* A global object for the script.
|
||||
* - All scripts returned by JSAPI functions (JS_CompileScript,
|
||||
* JS_CompileFile, etc.) have a non-null globalObject.
|
||||
* - A function script has a globalObject if the function comes from a
|
||||
* compile-and-go script.
|
||||
* - Temporary scripts created by obj_eval, JS_EvaluateScript, and
|
||||
* similar functions never have the globalObject field set; for such
|
||||
* scripts the global should be extracted from the JS frame that
|
||||
* execute scripts.
|
||||
*/
|
||||
js::HeapPtr<js::GlobalObject, JSScript*> globalObject;
|
||||
|
||||
/* Hash table chaining for JSCompartment::evalCache. */
|
||||
JSScript *evalHashLink;
|
||||
} u;
|
||||
/* Hash table chaining for JSCompartment::evalCache. */
|
||||
JSScript *&evalHashLink() { return *globalObject.unsafeGetUnioned(); }
|
||||
|
||||
uint32 *closedSlots; /* vector of closed slots; args first, then vars. */
|
||||
|
||||
@ -634,7 +632,7 @@ struct JSScript : public js::gc::Cell {
|
||||
|
||||
/* Return creation time global or null. */
|
||||
js::GlobalObject *getGlobalObjectOrNull() const {
|
||||
return isCachedEval ? NULL : u.globalObject;
|
||||
return isCachedEval ? NULL : globalObject.get();
|
||||
}
|
||||
|
||||
private:
|
||||
@ -818,6 +816,9 @@ struct JSScript : public js::gc::Cell {
|
||||
#endif
|
||||
|
||||
void finalize(JSContext *cx);
|
||||
|
||||
static inline void writeBarrierPre(JSScript *script);
|
||||
static inline void writeBarrierPost(JSScript *script, void *addr);
|
||||
};
|
||||
|
||||
JS_STATIC_ASSERT(sizeof(JSScript) % js::gc::Cell::CellSize == 0);
|
||||
|
@ -1,4 +1,4 @@
|
||||
/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
|
||||
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
|
||||
* vim: set ts=8 sw=4 et tw=79 ft=cpp:
|
||||
*
|
||||
* ***** BEGIN LICENSE BLOCK *****
|
||||
@ -55,6 +55,17 @@
|
||||
|
||||
namespace js {
|
||||
|
||||
inline
|
||||
Bindings::Bindings(JSContext *cx)
|
||||
: nargs(0), nvars(0), nupvars(0), hasExtensibleParents(false)
|
||||
{
|
||||
}
|
||||
|
||||
inline
|
||||
Bindings::~Bindings()
|
||||
{
|
||||
}
|
||||
|
||||
inline void
|
||||
Bindings::transfer(JSContext *cx, Bindings *bindings)
|
||||
{
|
||||
@ -215,4 +226,24 @@ JSScript::clearNesting()
|
||||
}
|
||||
}
|
||||
|
||||
inline void
|
||||
JSScript::writeBarrierPre(JSScript *script)
|
||||
{
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
if (!script)
|
||||
return;
|
||||
|
||||
JSCompartment *comp = script->compartment();
|
||||
if (comp->needsBarrier()) {
|
||||
JS_ASSERT(!comp->rt->gcRunning);
|
||||
MarkScriptUnbarriered(comp->barrierTracer(), script, "write barrier");
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
inline void
|
||||
JSScript::writeBarrierPost(JSScript *script, void *addr)
|
||||
{
|
||||
}
|
||||
|
||||
#endif /* jsscriptinlines_h___ */
|
||||
|
@ -1883,7 +1883,7 @@ public:
|
||||
{}
|
||||
|
||||
JS_REQUIRES_STACK JS_ALWAYS_INLINE void
|
||||
visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
|
||||
visitGlobalSlot(HeapValue *vp, unsigned n, unsigned slot) {
|
||||
JSValueType type = getCoercedType(*vp);
|
||||
if (type == JSVAL_TYPE_INT32 && (!mOracle || mOracle->isGlobalSlotUndemotable(mCx, slot)))
|
||||
type = JSVAL_TYPE_DOUBLE;
|
||||
@ -2475,6 +2475,13 @@ TraceRecorder::w_immpIdGC(jsid id)
|
||||
|
||||
ptrdiff_t
|
||||
TraceRecorder::nativeGlobalSlot(const Value* p) const
|
||||
{
|
||||
JS_ASSERT(isGlobal(p));
|
||||
return ptrdiff_t(p - Valueify(globalObj->slots) + globalObj->numFixedSlots());
|
||||
}
|
||||
|
||||
ptrdiff_t
|
||||
TraceRecorder::nativeGlobalSlot(const HeapValue* p) const
|
||||
{
|
||||
JS_ASSERT(isGlobal(p));
|
||||
return ptrdiff_t(p - globalObj->slots + globalObj->numFixedSlots());
|
||||
@ -2491,7 +2498,15 @@ TraceRecorder::nativeGlobalOffset(const Value* p) const
|
||||
bool
|
||||
TraceRecorder::isGlobal(const Value* p) const
|
||||
{
|
||||
return (size_t(p - globalObj->slots) < globalObj->numSlots() - globalObj->numFixedSlots());
|
||||
return (size_t(p - Valueify(globalObj->slots)) <
|
||||
globalObj->numSlots() - globalObj->numFixedSlots());
|
||||
}
|
||||
|
||||
bool
|
||||
TraceRecorder::isGlobal(const HeapValue* p) const
|
||||
{
|
||||
return (size_t(p - globalObj->slots) <
|
||||
globalObj->numSlots() - globalObj->numFixedSlots());
|
||||
}
|
||||
|
||||
bool
|
||||
@ -2719,7 +2734,7 @@ HasUnreachableGCThings(JSContext *cx, TreeFragment *f)
|
||||
for (unsigned len = f->gcthings.length(); len; --len) {
|
||||
Value &v = *vp++;
|
||||
JS_ASSERT(v.isMarkable());
|
||||
if (IsAboutToBeFinalized(cx, v.toGCThing()))
|
||||
if (IsAboutToBeFinalized(cx, v))
|
||||
return true;
|
||||
}
|
||||
const Shape** shapep = f->shapes.data();
|
||||
@ -2861,31 +2876,46 @@ TraceMonitor::mark(JSTracer *trc)
|
||||
TracerState* state = tracerState;
|
||||
while (state) {
|
||||
if (state->nativeVp)
|
||||
MarkValueRange(trc, state->nativeVpLen, state->nativeVp, "nativeVp");
|
||||
MarkRootRange(trc, state->nativeVpLen, state->nativeVp, "nativeVp");
|
||||
state = state->prev;
|
||||
}
|
||||
}
|
||||
|
||||
template<class VALUE>
|
||||
static void
|
||||
SetValue(JSCompartment *comp, VALUE& dst, const Value &src)
|
||||
{
|
||||
dst = src;
|
||||
}
|
||||
|
||||
template<>
|
||||
void
|
||||
SetValue(JSCompartment *comp, HeapValue& dst, const Value &src)
|
||||
{
|
||||
dst.set(comp, src);
|
||||
}
|
||||
|
||||
/*
|
||||
* Box a value from the native stack back into the Value format.
|
||||
*/
|
||||
template<typename VALUE>
|
||||
static inline void
|
||||
NativeToValue(JSContext* cx, Value& v, JSValueType type, double* slot)
|
||||
NativeToValue(JSContext* cx, VALUE& v, JSValueType type, double* slot)
|
||||
{
|
||||
if (type == JSVAL_TYPE_DOUBLE) {
|
||||
v = NumberValue(*slot);
|
||||
SetValue(cx->compartment, v, NumberValue(*slot));
|
||||
} else if (JS_LIKELY(type <= JSVAL_UPPER_INCL_TYPE_OF_BOXABLE_SET)) {
|
||||
v.boxNonDoubleFrom(type, (uint64 *)slot);
|
||||
} else if (type == JSVAL_TYPE_STRORNULL) {
|
||||
JSString *str = *(JSString **)slot;
|
||||
v = str ? StringValue(str) : NullValue();
|
||||
SetValue(cx->compartment, v, str ? StringValue(str) : NullValue());
|
||||
} else if (type == JSVAL_TYPE_OBJORNULL) {
|
||||
JSObject *obj = *(JSObject **)slot;
|
||||
v = obj ? ObjectValue(*obj) : NullValue();
|
||||
SetValue(cx->compartment, v, obj ? ObjectValue(*obj) : NullValue());
|
||||
} else {
|
||||
JS_ASSERT(type == JSVAL_TYPE_BOXED);
|
||||
JS_STATIC_ASSERT(sizeof(Value) == sizeof(double));
|
||||
v = *(Value *)slot;
|
||||
SetValue(cx->compartment, v, *(Value *)slot);
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
@ -2974,7 +3004,7 @@ public:
|
||||
{}
|
||||
|
||||
JS_REQUIRES_STACK JS_ALWAYS_INLINE void
|
||||
visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
|
||||
visitGlobalSlot(HeapValue *vp, unsigned n, unsigned slot) {
|
||||
debug_only_printf(LC_TMTracer, "global%d: ", n);
|
||||
ValueToNative(*vp, *mTypeMap++, &mGlobal[slot]);
|
||||
}
|
||||
@ -3038,7 +3068,7 @@ public:
|
||||
{}
|
||||
|
||||
JS_REQUIRES_STACK JS_ALWAYS_INLINE void
|
||||
visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
|
||||
visitGlobalSlot(HeapValue *vp, unsigned n, unsigned slot) {
|
||||
debug_only_printf(LC_TMTracer, "global%d=", n);
|
||||
JS_ASSERT(JS_THREAD_DATA(mCx)->waiveGCQuota);
|
||||
NativeToValue(mCx, *vp, *mTypeMap++, &mGlobal[slot]);
|
||||
@ -3840,6 +3870,12 @@ TraceRecorder::get(const Value *p)
|
||||
return getImpl(p);
|
||||
}
|
||||
|
||||
JS_REQUIRES_STACK LIns*
|
||||
TraceRecorder::get(const HeapValue *p)
|
||||
{
|
||||
return getImpl(p);
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
bool
|
||||
TraceRecorder::isValidFrameObjPtr(void *p)
|
||||
@ -3896,17 +3932,17 @@ JS_REQUIRES_STACK void
|
||||
TraceRecorder::checkForGlobalObjectReallocationHelper()
|
||||
{
|
||||
debug_only_print0(LC_TMTracer, "globalObj->slots relocated, updating tracker\n");
|
||||
const Value* src = global_slots;
|
||||
const Value* dst = globalObj->getRawSlots();
|
||||
const HeapValue* src = global_slots;
|
||||
const HeapValue* dst = globalObj->getRawSlots();
|
||||
jsuint length = globalObj->capacity;
|
||||
LIns** map = (LIns**)alloca(sizeof(LIns*) * length);
|
||||
for (jsuint n = 0; n < length; ++n) {
|
||||
const Value *slot = globalObj->getRawSlot(n, src);
|
||||
const HeapValue *slot = globalObj->getRawSlot(n, src);
|
||||
map[n] = tracker.get(slot);
|
||||
tracker.set(slot, NULL);
|
||||
}
|
||||
for (jsuint n = 0; n < length; ++n) {
|
||||
const Value *slot = globalObj->getRawSlot(n, dst);
|
||||
const HeapValue *slot = globalObj->getRawSlot(n, dst);
|
||||
tracker.set(slot, map[n]);
|
||||
}
|
||||
global_slots = globalObj->getRawSlots();
|
||||
@ -3951,12 +3987,12 @@ public:
|
||||
}
|
||||
|
||||
JS_REQUIRES_STACK JS_ALWAYS_INLINE void
|
||||
visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
|
||||
visitGlobalSlot(HeapValue *vp, unsigned n, unsigned slot) {
|
||||
LIns *ins = mRecorder.get(vp);
|
||||
bool isPromote = IsPromotedInt32(ins);
|
||||
if (isPromote && *mTypeMap == JSVAL_TYPE_DOUBLE) {
|
||||
mRecorder.w.st(mRecorder.get(vp),
|
||||
EosAddress(mRecorder.eos_ins, mRecorder.nativeGlobalOffset(vp)));
|
||||
EosAddress(mRecorder.eos_ins, mRecorder.nativeGlobalOffset(&vp->get())));
|
||||
/*
|
||||
* Aggressively undo speculation so the inner tree will compile
|
||||
* if this fails.
|
||||
@ -4039,7 +4075,7 @@ TraceRecorder::adjustCallerTypes(TreeFragment* f)
|
||||
}
|
||||
|
||||
JS_REQUIRES_STACK inline JSValueType
|
||||
TraceRecorder::determineSlotType(Value* vp)
|
||||
TraceRecorder::determineSlotType(const Value* vp)
|
||||
{
|
||||
if (vp->isNumber()) {
|
||||
LIns *i = getFromTracker(vp);
|
||||
@ -4074,8 +4110,8 @@ public:
|
||||
{}
|
||||
|
||||
JS_REQUIRES_STACK JS_ALWAYS_INLINE void
|
||||
visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
|
||||
*mTypeMap++ = mRecorder.determineSlotType(vp);
|
||||
visitGlobalSlot(HeapValue *vp, unsigned n, unsigned slot) {
|
||||
*mTypeMap++ = mRecorder.determineSlotType(&vp->get());
|
||||
}
|
||||
|
||||
JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
|
||||
@ -4528,7 +4564,7 @@ class SlotMap : public SlotVisitorBase
|
||||
SlotInfo()
|
||||
: vp(NULL), isPromotedInt32(false), lastCheck(TypeCheck_Bad)
|
||||
{}
|
||||
SlotInfo(Value* vp, bool isPromotedInt32)
|
||||
SlotInfo(const Value* vp, bool isPromotedInt32)
|
||||
: vp(vp), isPromotedInt32(isPromotedInt32), lastCheck(TypeCheck_Bad),
|
||||
type(getCoercedType(*vp))
|
||||
{}
|
||||
@ -4538,7 +4574,7 @@ class SlotMap : public SlotVisitorBase
|
||||
SlotInfo(Value* vp, JSValueType t)
|
||||
: vp(vp), isPromotedInt32(t == JSVAL_TYPE_INT32), lastCheck(TypeCheck_Bad), type(t)
|
||||
{}
|
||||
void *vp;
|
||||
const void *vp;
|
||||
bool isPromotedInt32;
|
||||
TypeCheckResult lastCheck;
|
||||
JSValueType type;
|
||||
@ -4556,9 +4592,9 @@ class SlotMap : public SlotVisitorBase
|
||||
}
|
||||
|
||||
JS_REQUIRES_STACK JS_ALWAYS_INLINE void
|
||||
visitGlobalSlot(Value *vp, unsigned n, unsigned slot)
|
||||
visitGlobalSlot(HeapValue *vp, unsigned n, unsigned slot)
|
||||
{
|
||||
addSlot(vp);
|
||||
addSlot(&vp->get());
|
||||
}
|
||||
|
||||
JS_ALWAYS_INLINE SlotMap::SlotInfo&
|
||||
@ -4610,7 +4646,7 @@ class SlotMap : public SlotVisitorBase
|
||||
}
|
||||
|
||||
JS_REQUIRES_STACK JS_ALWAYS_INLINE void
|
||||
addSlot(Value* vp)
|
||||
addSlot(const Value* vp)
|
||||
{
|
||||
bool isPromotedInt32 = false;
|
||||
if (vp->isNumber()) {
|
||||
@ -6137,7 +6173,7 @@ public:
|
||||
{}
|
||||
|
||||
JS_REQUIRES_STACK JS_ALWAYS_INLINE void
|
||||
visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
|
||||
visitGlobalSlot(HeapValue *vp, unsigned n, unsigned slot) {
|
||||
debug_only_printf(LC_TMTracer, "global%d=", n);
|
||||
if (!IsEntryTypeCompatible(*vp, *mTypeMap)) {
|
||||
mOk = false;
|
||||
@ -6242,7 +6278,7 @@ public:
|
||||
}
|
||||
|
||||
JS_REQUIRES_STACK JS_ALWAYS_INLINE void
|
||||
visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
|
||||
visitGlobalSlot(HeapValue *vp, unsigned n, unsigned slot) {
|
||||
if (mOk)
|
||||
checkSlot(*vp, "global", n);
|
||||
}
|
||||
@ -6478,6 +6514,8 @@ ExecuteTree(JSContext* cx, TraceMonitor* tm, TreeFragment* f,
|
||||
#endif
|
||||
JS_ASSERT(f->root == f && f->code());
|
||||
|
||||
JS_ASSERT(!cx->compartment->needsBarrier());
|
||||
|
||||
if (!ScopeChainCheck(cx, f) ||
|
||||
!cx->stack.space().ensureEnoughSpaceToEnterTrace(cx)) {
|
||||
*lrp = NULL;
|
||||
@ -6932,6 +6970,9 @@ RecordLoopEdge(JSContext* cx, TraceMonitor* tm)
|
||||
TraceVisStateObj tvso(cx, S_MONITOR);
|
||||
#endif
|
||||
|
||||
if (cx->compartment->needsBarrier())
|
||||
return MONITOR_NOT_RECORDING;
|
||||
|
||||
/* Is the recorder currently active? */
|
||||
if (tm->recorder) {
|
||||
tm->recorder->assertInsideLoop();
|
||||
@ -12036,7 +12077,7 @@ TraceRecorder::nativeSet(JSObject* obj, LIns* obj_ins, const Shape* shape,
|
||||
if (obj == globalObj) {
|
||||
if (!lazilyImportGlobalSlot(slot))
|
||||
RETURN_STOP("lazy import of global slot failed");
|
||||
set(&obj->getSlotRef(slot), v_ins);
|
||||
set(&obj->getSlot(slot), v_ins);
|
||||
} else {
|
||||
LIns* slots_ins = NULL;
|
||||
stobj_set_slot(obj, obj_ins, slot, slots_ins, v, v_ins);
|
||||
@ -13015,7 +13056,7 @@ TraceRecorder::setElem(int lval_spindex, int idx_spindex, int v_spindex)
|
||||
if (!idx.isPrimitive())
|
||||
RETURN_STOP_A("non-primitive index");
|
||||
CHECK_STATUS_A(initOrSetPropertyByName(obj_ins, &idx, &v,
|
||||
*cx->regs().pc == JSOP_INITELEM));
|
||||
*cx->regs().pc == JSOP_INITELEM));
|
||||
} else if (OkToTraceTypedArrays && js_IsTypedArray(obj)) {
|
||||
// Fast path: assigning to element of typed array.
|
||||
VMSideExit* branchExit = snapshot(BRANCH_EXIT);
|
||||
@ -13843,7 +13884,7 @@ TraceRecorder::name(const Value*& vp, LIns*& ins, NameResult& nr)
|
||||
if (!lazilyImportGlobalSlot(slot))
|
||||
RETURN_STOP_A("lazy import of global slot failed");
|
||||
|
||||
vp = &obj->getSlotRef(slot);
|
||||
vp = &obj->getSlot(slot);
|
||||
ins = get(vp);
|
||||
nr.tracked = true;
|
||||
return ARECORD_CONTINUE;
|
||||
@ -16366,6 +16407,9 @@ class AutoRetBlacklist
|
||||
JS_REQUIRES_STACK TracePointAction
|
||||
RecordTracePoint(JSContext* cx, TraceMonitor* tm, bool* blacklist, bool execAllowed)
|
||||
{
|
||||
if (cx->compartment->needsBarrier())
|
||||
return TPA_Nothing;
|
||||
|
||||
StackFrame* fp = cx->fp();
|
||||
jsbytecode* pc = cx->regs().pc;
|
||||
|
||||
|
@ -1062,14 +1062,14 @@ class TraceRecorder
|
||||
Tracker nativeFrameTracker;
|
||||
|
||||
/* The start of the global object's slots we assume for the trackers. */
|
||||
const Value* global_slots;
|
||||
const HeapValue* global_slots;
|
||||
|
||||
/* The number of interpreted calls entered (and not yet left) since recording began. */
|
||||
unsigned callDepth;
|
||||
|
||||
/* The current atom table, mirroring the interpreter loop's variable of the same name. */
|
||||
JSAtom** atoms;
|
||||
Value* consts;
|
||||
HeapValue* consts;
|
||||
|
||||
/* An instruction yielding the current script's strict mode code flag. */
|
||||
nanojit::LIns* strictModeCode_ins;
|
||||
@ -1185,7 +1185,9 @@ class TraceRecorder
|
||||
|
||||
bool isVoidPtrGlobal(const void* p) const;
|
||||
bool isGlobal(const Value* p) const;
|
||||
bool isGlobal(const HeapValue* p) const;
|
||||
ptrdiff_t nativeGlobalSlot(const Value *p) const;
|
||||
ptrdiff_t nativeGlobalSlot(const HeapValue *p) const;
|
||||
ptrdiff_t nativeGlobalOffset(const Value* p) const;
|
||||
JS_REQUIRES_STACK ptrdiff_t nativeStackOffsetImpl(const void* p) const;
|
||||
JS_REQUIRES_STACK ptrdiff_t nativeStackOffset(const Value* p) const;
|
||||
@ -1228,6 +1230,7 @@ class TraceRecorder
|
||||
nanojit::LIns* getFromTracker(const Value* p);
|
||||
JS_REQUIRES_STACK nanojit::LIns* getImpl(const void* p);
|
||||
JS_REQUIRES_STACK nanojit::LIns* get(const Value* p);
|
||||
JS_REQUIRES_STACK nanojit::LIns* get(const HeapValue* p);
|
||||
JS_REQUIRES_STACK nanojit::LIns* getFrameObjPtr(void* p);
|
||||
JS_REQUIRES_STACK nanojit::LIns* attemptImport(const Value* p);
|
||||
JS_REQUIRES_STACK nanojit::LIns* addr(Value* p);
|
||||
@ -1528,7 +1531,7 @@ class TraceRecorder
|
||||
|
||||
JS_REQUIRES_STACK jsatomid getFullIndex(ptrdiff_t pcoff = 0);
|
||||
|
||||
JS_REQUIRES_STACK JSValueType determineSlotType(Value* vp);
|
||||
JS_REQUIRES_STACK JSValueType determineSlotType(const Value* vp);
|
||||
|
||||
JS_REQUIRES_STACK RecordingStatus setUpwardTrackedVar(Value* stackVp, const Value& v,
|
||||
nanojit::LIns* v_ins);
|
||||
@ -1616,7 +1619,7 @@ class TraceRecorder
|
||||
* Do slot arithmetic manually to avoid getSlotRef assertions which
|
||||
* do not need to be satisfied for this purpose.
|
||||
*/
|
||||
const Value *vp = globalObj->getRawSlot(slot, globalObj->getRawSlots());
|
||||
const HeapValue *vp = globalObj->getRawSlot(slot, globalObj->getRawSlots());
|
||||
|
||||
/* If this global is definitely being tracked, then the write is unexpected. */
|
||||
if (tracker.has(vp))
|
||||
|
@ -164,8 +164,8 @@ JSObject::allocateArrayBufferSlots(JSContext *cx, uint32 size)
|
||||
JS_ASSERT(isArrayBuffer() && !hasSlotsArray());
|
||||
|
||||
uint32 bytes = size + sizeof(Value);
|
||||
if (size > sizeof(Value) * ARRAYBUFFER_RESERVED_SLOTS - sizeof(Value) ) {
|
||||
Value *tmpslots = (Value *)cx->calloc_(bytes);
|
||||
if (size > sizeof(HeapValue) * ARRAYBUFFER_RESERVED_SLOTS - sizeof(HeapValue) ) {
|
||||
HeapValue *tmpslots = (HeapValue *)cx->calloc_(bytes);
|
||||
if (!tmpslots)
|
||||
return false;
|
||||
slots = tmpslots;
|
||||
@ -174,7 +174,7 @@ JSObject::allocateArrayBufferSlots(JSContext *cx, uint32 size)
|
||||
* |capacity * sizeof(Value)| may underestimate the size by up to
|
||||
* |sizeof(Value) - 1| bytes.
|
||||
*/
|
||||
capacity = bytes / sizeof(Value);
|
||||
capacity = bytes / sizeof(HeapValue);
|
||||
} else {
|
||||
slots = fixedSlots();
|
||||
memset(slots, 0, bytes);
|
||||
@ -232,9 +232,13 @@ ArrayBuffer::~ArrayBuffer()
|
||||
void
|
||||
ArrayBuffer::obj_trace(JSTracer *trc, JSObject *obj)
|
||||
{
|
||||
/*
|
||||
* If this object changes, it will get marked via the private data barrier,
|
||||
* so it's safe to leave it Unbarriered.
|
||||
*/
|
||||
JSObject *delegate = static_cast<JSObject*>(obj->getPrivate());
|
||||
if (delegate)
|
||||
MarkObject(trc, *delegate, "arraybuffer.delegate");
|
||||
MarkObjectUnbarriered(trc, delegate, "arraybuffer.delegate");
|
||||
}
|
||||
|
||||
static JSProperty * const PROPERTY_FOUND = reinterpret_cast<JSProperty *>(1);
|
||||
@ -995,9 +999,7 @@ class TypedArrayTemplate
|
||||
static void
|
||||
obj_trace(JSTracer *trc, JSObject *obj)
|
||||
{
|
||||
JSObject *buffer = static_cast<JSObject*>(getBuffer(obj));
|
||||
if (buffer)
|
||||
MarkObject(trc, *buffer, "typedarray.buffer");
|
||||
MarkValue(trc, obj->getFixedSlotRef(FIELD_BUFFER), "typedarray.buffer");
|
||||
}
|
||||
|
||||
static JSBool
|
||||
|
@ -43,6 +43,8 @@
|
||||
#include "jsapi.h"
|
||||
#include "jsclass.h"
|
||||
|
||||
#include "gc/Barrier.h"
|
||||
|
||||
typedef struct JSProperty JSProperty;
|
||||
|
||||
namespace js {
|
||||
|
@ -48,7 +48,7 @@ using namespace js::gc;
|
||||
inline HashNumber
|
||||
DefaultHasher<WatchKey>::hash(const Lookup &key)
|
||||
{
|
||||
return DefaultHasher<JSObject *>::hash(key.object) ^ HashId(key.id);
|
||||
return DefaultHasher<JSObject *>::hash(key.object.get()) ^ HashId(key.id.get());
|
||||
}
|
||||
|
||||
class AutoEntryHolder {
|
||||
@ -177,7 +177,7 @@ WatchpointMap::triggerWatchpoint(JSContext *cx, JSObject *obj, jsid id, Value *v
|
||||
bool
|
||||
WatchpointMap::markAllIteratively(JSTracer *trc)
|
||||
{
|
||||
JSRuntime *rt = trc->context->runtime;
|
||||
JSRuntime *rt = trc->runtime;
|
||||
if (rt->gcCurrentCompartment) {
|
||||
WatchpointMap *wpmap = rt->gcCurrentCompartment->watchpointMap;
|
||||
return wpmap && wpmap->markIteratively(trc);
|
||||
@ -201,16 +201,16 @@ WatchpointMap::markIteratively(JSTracer *trc)
|
||||
bool objectIsLive = !IsAboutToBeFinalized(cx, e.key.object);
|
||||
if (objectIsLive || e.value.held) {
|
||||
if (!objectIsLive) {
|
||||
MarkObject(trc, *e.key.object, "held Watchpoint object");
|
||||
MarkObject(trc, e.key.object, "held Watchpoint object");
|
||||
marked = true;
|
||||
}
|
||||
|
||||
jsid id = e.key.id;
|
||||
const HeapId &id = e.key.id;
|
||||
JS_ASSERT(JSID_IS_STRING(id) || JSID_IS_INT(id));
|
||||
MarkId(trc, id, "WatchKey::id");
|
||||
|
||||
if (e.value.closure && IsAboutToBeFinalized(cx, e.value.closure)) {
|
||||
MarkObject(trc, *e.value.closure, "Watchpoint::closure");
|
||||
MarkObject(trc, e.value.closure, "Watchpoint::closure");
|
||||
marked = true;
|
||||
}
|
||||
}
|
||||
|
@ -44,6 +44,7 @@
|
||||
#include "jsprvtd.h"
|
||||
#include "jsapi.h"
|
||||
|
||||
#include "gc/Barrier.h"
|
||||
#include "js/HashTable.h"
|
||||
|
||||
namespace js {
|
||||
@ -51,13 +52,14 @@ namespace js {
|
||||
struct WatchKey {
|
||||
WatchKey() {}
|
||||
WatchKey(JSObject *obj, jsid id) : object(obj), id(id) {}
|
||||
JSObject *object;
|
||||
jsid id;
|
||||
WatchKey(const WatchKey &key) : object(key.object.get()), id(key.id.get()) {}
|
||||
HeapPtrObject object;
|
||||
HeapId id;
|
||||
};
|
||||
|
||||
struct Watchpoint {
|
||||
JSWatchPointHandler handler;
|
||||
JSObject *closure;
|
||||
HeapPtrObject closure;
|
||||
bool held; /* true if currently running handler */
|
||||
};
|
||||
|
||||
@ -67,7 +69,7 @@ struct DefaultHasher<WatchKey> {
|
||||
static inline js::HashNumber hash(const Lookup &key);
|
||||
|
||||
static bool match(const WatchKey &k, const Lookup &l) {
|
||||
return k.object == l.object && k.id == l.id;
|
||||
return k.object == l.object && k.id.get() == l.id.get();
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -80,7 +80,7 @@ WeakMapBase::sweepAll(JSTracer *tracer)
|
||||
|
||||
} /* namespace js */
|
||||
|
||||
typedef WeakMap<JSObject *, Value> ObjectValueMap;
|
||||
typedef WeakMap<HeapPtr<JSObject>, HeapValue> ObjectValueMap;
|
||||
|
||||
static ObjectValueMap *
|
||||
GetObjectMap(JSObject *obj)
|
||||
@ -275,7 +275,7 @@ WeakMap_construct(JSContext *cx, uintN argc, Value *vp)
|
||||
if (!obj)
|
||||
return false;
|
||||
|
||||
obj->setPrivate(NULL);
|
||||
obj->initPrivate(NULL);
|
||||
|
||||
vp->setObject(*obj);
|
||||
return true;
|
||||
@ -320,7 +320,7 @@ js_InitWeakMapClass(JSContext *cx, JSObject *obj)
|
||||
JSObject *weakMapProto = global->createBlankPrototype(cx, &WeakMapClass);
|
||||
if (!weakMapProto)
|
||||
return NULL;
|
||||
weakMapProto->setPrivate(NULL);
|
||||
weakMapProto->initPrivate(NULL);
|
||||
|
||||
JSFunction *ctor = global->createConstructor(cx, WeakMap_construct, &WeakMapClass,
|
||||
CLASS_ATOM(cx, WeakMap), 0);
|
||||
|
@ -80,59 +80,26 @@ namespace js {
|
||||
// - There is no AllocPolicy parameter; these are used with our garbage collector, so
|
||||
// RuntimeAllocPolicy is hard-wired.
|
||||
//
|
||||
// - Optional fourth template parameter is a class MarkPolicy, with the following constructor:
|
||||
//
|
||||
// - Optional fourth and fifth parameters are the MarkPolicies for the key and value type.
|
||||
// A MarkPolicy has the constructor:
|
||||
//
|
||||
// MarkPolicy(JSTracer *)
|
||||
//
|
||||
// and the following member functions:
|
||||
//
|
||||
// bool keyMarked(Key &k)
|
||||
// bool valueMarked(Value &v)
|
||||
// Return true if k/v has been marked as live by the garbage collector.
|
||||
// bool isMarked(const Type &x)
|
||||
// Return true if x has been marked as live by the garbage collector.
|
||||
//
|
||||
// bool markEntryIfLive(Key &k, Value &v)
|
||||
// If a table entry whose key is k should be retained, ensure its key and
|
||||
// value are marked. Return true if any previously unmarked objects
|
||||
// became marked.
|
||||
// bool mark(const Type &x)
|
||||
// Return false if x is already marked. Otherwise, mark x and return true.
|
||||
//
|
||||
// To ensure that the WeakMap's behavior isn't visibly affected by
|
||||
// garbage collection, this should leave k unmarked only when no key
|
||||
// matching k could ever be produced after this GC cycle completes ---
|
||||
// removing entries whose keys this function leaves unmarked should never
|
||||
// make future lookups fail.
|
||||
//
|
||||
// A typical definition of markEntryIfLive would be:
|
||||
//
|
||||
// if (keyMarked(k) && !valueMarked(v)) {
|
||||
// markObject(*v, "WeakMap entry value");
|
||||
// return true;
|
||||
// }
|
||||
// return false;
|
||||
//
|
||||
// This meets the above constraint when, for example, Key is JSObject *:
|
||||
// if k isn't marked, it won't exist once the collection cycle completes,
|
||||
// and thus can't be supplied as a key.
|
||||
//
|
||||
// Note that this may mark entries where keyMarked(k) is not initially
|
||||
// true. For example, you could have a table whose keys match when the
|
||||
// values of one of their properties are equal: k1.x === k2.x. An entry
|
||||
// in such a table could be live even when its key is not marked. The
|
||||
// markEntryIfLive function for such a table would generally mark both k and v.
|
||||
//
|
||||
// void markEntry(Value &v)
|
||||
// Mark the table entry's value v as reachable by the collector. WeakMap
|
||||
// uses this function for non-marking tracers: other code using the GC
|
||||
// heap tracing functions to map the heap for some purpose or other.
|
||||
// This provides a conservative approximation of the true reachability
|
||||
// relation of the heap graph.
|
||||
//
|
||||
// If omitted, the MarkPolicy parameter defaults to js::DefaultMarkPolicy<Key,
|
||||
// Value>, a policy template with the obvious definitions for some typical
|
||||
// If omitted, the MarkPolicy parameter defaults to js::DefaultMarkPolicy<Type>,
|
||||
// a policy template with the obvious definitions for some typical
|
||||
// SpiderMonkey type combinations.
|
||||
|
||||
// A policy template holding default marking algorithms for common type combinations. This
|
||||
// provides default types for WeakMap's MarkPolicy template parameter.
|
||||
template <class Key, class Value> class DefaultMarkPolicy;
|
||||
template <class Type> class DefaultMarkPolicy;
|
||||
|
||||
// Common base class for all WeakMap specializations. The collector uses this to call
|
||||
// their markIteratively and sweep methods.
|
||||
@ -188,7 +155,8 @@ class WeakMapBase {
|
||||
|
||||
template <class Key, class Value,
|
||||
class HashPolicy = DefaultHasher<Key>,
|
||||
class MarkPolicy = DefaultMarkPolicy<Key, Value> >
|
||||
class KeyMarkPolicy = DefaultMarkPolicy<Key>,
|
||||
class ValueMarkPolicy = DefaultMarkPolicy<Value> >
|
||||
class WeakMap : public HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy>, public WeakMapBase {
|
||||
private:
|
||||
typedef HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy> Base;
|
||||
@ -206,127 +174,121 @@ class WeakMap : public HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy>, publ
|
||||
}
|
||||
|
||||
private:
|
||||
void nonMarkingTrace(JSTracer *tracer) {
|
||||
MarkPolicy t(tracer);
|
||||
void nonMarkingTrace(JSTracer *trc) {
|
||||
ValueMarkPolicy vp(trc);
|
||||
for (Range r = Base::all(); !r.empty(); r.popFront())
|
||||
t.markEntry(r.front().value);
|
||||
vp.mark(r.front().value);
|
||||
}
|
||||
|
||||
bool markIteratively(JSTracer *tracer) {
|
||||
MarkPolicy t(tracer);
|
||||
bool markIteratively(JSTracer *trc) {
|
||||
KeyMarkPolicy kp(trc);
|
||||
ValueMarkPolicy vp(trc);
|
||||
bool markedAny = false;
|
||||
for (Range r = Base::all(); !r.empty(); r.popFront()) {
|
||||
const Key &k = r.front().key;
|
||||
const Value &v = r.front().value;
|
||||
/* If the entry is live, ensure its key and value are marked. */
|
||||
if (t.markEntryIfLive(r.front().key, r.front().value)) {
|
||||
/* We revived a value with children, we have to iterate again. */
|
||||
if (kp.isMarked(k)) {
|
||||
markedAny |= vp.mark(v);
|
||||
} else if (kp.overrideKeyMarking(k)) {
|
||||
// We always mark wrapped natives. This will cause leaks, but WeakMap+CC
|
||||
// integration is currently busted anyways. When WeakMap+CC integration is
|
||||
// fixed in Bug 668855, XPC wrapped natives should only be marked during
|
||||
// non-BLACK marking (ie grey marking).
|
||||
kp.mark(k);
|
||||
vp.mark(v);
|
||||
markedAny = true;
|
||||
}
|
||||
JS_ASSERT_IF(t.keyMarked(r.front().key), t.valueMarked(r.front().value));
|
||||
JS_ASSERT_IF(kp.isMarked(k), vp.isMarked(v));
|
||||
}
|
||||
return markedAny;
|
||||
}
|
||||
|
||||
void sweep(JSTracer *tracer) {
|
||||
MarkPolicy t(tracer);
|
||||
void sweep(JSTracer *trc) {
|
||||
KeyMarkPolicy kp(trc);
|
||||
|
||||
/* Remove all entries whose keys remain unmarked. */
|
||||
for (Enum e(*this); !e.empty(); e.popFront()) {
|
||||
if (!t.keyMarked(e.front().key))
|
||||
if (!kp.isMarked(e.front().key))
|
||||
e.removeFront();
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
/*
|
||||
ValueMarkPolicy vp(trc);
|
||||
/*
|
||||
* Once we've swept, all remaining edges should stay within the
|
||||
* known-live part of the graph.
|
||||
*/
|
||||
for (Range r = Base::all(); !r.empty(); r.popFront()) {
|
||||
JS_ASSERT(t.keyMarked(r.front().key));
|
||||
JS_ASSERT(t.valueMarked(r.front().value));
|
||||
JS_ASSERT(kp.isMarked(r.front().key));
|
||||
JS_ASSERT(vp.isMarked(r.front().value));
|
||||
}
|
||||
#endif
|
||||
}
|
||||
};
|
||||
|
||||
// Marking policy for maps from JSObject pointers to js::Values.
|
||||
//
|
||||
// We always mark wrapped natives. This will cause leaks, but WeakMap+CC
|
||||
// integration is currently busted anyways. When WeakMap+CC integration is
|
||||
// fixed in Bug 668855, XPC wrapped natives should only be marked during
|
||||
// non-BLACK marking (ie grey marking).
|
||||
template <>
|
||||
class DefaultMarkPolicy<JSObject *, Value> {
|
||||
class DefaultMarkPolicy<HeapValue> {
|
||||
private:
|
||||
JSTracer *tracer;
|
||||
public:
|
||||
DefaultMarkPolicy(JSTracer *t) : tracer(t) { }
|
||||
bool keyMarked(JSObject *k) { return !IsAboutToBeFinalized(tracer->context, k); }
|
||||
bool valueMarked(const Value &v) {
|
||||
if (v.isMarkable())
|
||||
return !IsAboutToBeFinalized(tracer->context, v.toGCThing());
|
||||
bool isMarked(const HeapValue &x) {
|
||||
if (x.isMarkable())
|
||||
return !IsAboutToBeFinalized(tracer->context, x);
|
||||
return true;
|
||||
}
|
||||
private:
|
||||
bool markUnmarkedValue(const Value &v) {
|
||||
if (valueMarked(v))
|
||||
bool mark(const HeapValue &x) {
|
||||
if (isMarked(x))
|
||||
return false;
|
||||
js::gc::MarkValue(tracer, v, "WeakMap entry value");
|
||||
js::gc::MarkValue(tracer, x, "WeakMap entry");
|
||||
return true;
|
||||
}
|
||||
bool overrideKeyMarking(const HeapValue &k) { return false; }
|
||||
};
|
||||
|
||||
// Return true if we should override the GC's default marking
|
||||
// behavior for this key.
|
||||
bool overrideKeyMarking(JSObject *k) {
|
||||
template <>
|
||||
class DefaultMarkPolicy<HeapPtrObject> {
|
||||
private:
|
||||
JSTracer *tracer;
|
||||
public:
|
||||
DefaultMarkPolicy(JSTracer *t) : tracer(t) { }
|
||||
bool isMarked(const HeapPtrObject &x) {
|
||||
return !IsAboutToBeFinalized(tracer->context, x);
|
||||
}
|
||||
bool mark(const HeapPtrObject &x) {
|
||||
if (isMarked(x))
|
||||
return false;
|
||||
js::gc::MarkObject(tracer, x, "WeakMap entry");
|
||||
return true;
|
||||
}
|
||||
bool overrideKeyMarking(const HeapPtrObject &k) {
|
||||
// We only need to worry about extra marking of keys when
|
||||
// we're doing a GC marking pass.
|
||||
if (!IS_GC_MARKING_TRACER(tracer))
|
||||
return false;
|
||||
return k->getClass()->ext.isWrappedNative;
|
||||
}
|
||||
public:
|
||||
bool markEntryIfLive(JSObject *k, const Value &v) {
|
||||
if (keyMarked(k))
|
||||
return markUnmarkedValue(v);
|
||||
if (!overrideKeyMarking(k))
|
||||
return false;
|
||||
js::gc::MarkObject(tracer, *k, "WeakMap entry wrapper key");
|
||||
markUnmarkedValue(v);
|
||||
return true;
|
||||
}
|
||||
void markEntry(const Value &v) {
|
||||
js::gc::MarkValue(tracer, v, "WeakMap entry value");
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
class DefaultMarkPolicy<gc::Cell *, JSObject *> {
|
||||
protected:
|
||||
class DefaultMarkPolicy<HeapPtrScript> {
|
||||
private:
|
||||
JSTracer *tracer;
|
||||
public:
|
||||
DefaultMarkPolicy(JSTracer *t) : tracer(t) { }
|
||||
bool keyMarked(gc::Cell *k) { return !IsAboutToBeFinalized(tracer->context, k); }
|
||||
bool valueMarked(JSObject *v) { return !IsAboutToBeFinalized(tracer->context, v); }
|
||||
bool markEntryIfLive(gc::Cell *k, JSObject *v) {
|
||||
if (keyMarked(k) && !valueMarked(v)) {
|
||||
js::gc::MarkObject(tracer, *v, "WeakMap entry value");
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
bool isMarked(const HeapPtrScript &x) {
|
||||
return !IsAboutToBeFinalized(tracer->context, x);
|
||||
}
|
||||
void markEntry(JSObject *v) {
|
||||
js::gc::MarkObject(tracer, *v, "WeakMap entry value");
|
||||
bool mark(const HeapPtrScript &x) {
|
||||
if (isMarked(x))
|
||||
return false;
|
||||
js::gc::MarkScript(tracer, x, "WeakMap entry");
|
||||
return true;
|
||||
}
|
||||
bool overrideKeyMarking(const HeapPtrScript &k) { return false; }
|
||||
};
|
||||
|
||||
// A MarkPolicy for WeakMaps whose keys and values may be objects in arbitrary
|
||||
// compartments within a runtime.
|
||||
//
|
||||
// With the current GC, the implementation turns out to be identical to the
|
||||
// default mark policy. We give it a distinct name anyway, in case this ever
|
||||
// changes.
|
||||
//
|
||||
typedef DefaultMarkPolicy<gc::Cell *, JSObject *> CrossCompartmentMarkPolicy;
|
||||
|
||||
}
|
||||
|
||||
extern JSObject *
|
||||
|
@ -346,7 +346,7 @@ Wrapper::defaultValue(JSContext *cx, JSObject *wrapper, JSType hint, Value *vp)
|
||||
void
|
||||
Wrapper::trace(JSTracer *trc, JSObject *wrapper)
|
||||
{
|
||||
MarkObject(trc, *wrappedObject(wrapper), "wrappedObject");
|
||||
MarkValue(trc, wrapper->getReservedSlotRef(JSSLOT_PROXY_PRIVATE), "wrappedObject");
|
||||
}
|
||||
|
||||
JSObject *
|
||||
@ -844,7 +844,8 @@ CrossCompartmentWrapper::defaultValue(JSContext *cx, JSObject *wrapper, JSType h
|
||||
void
|
||||
CrossCompartmentWrapper::trace(JSTracer *trc, JSObject *wrapper)
|
||||
{
|
||||
MarkCrossCompartmentObject(trc, *wrappedObject(wrapper), "wrappedObject");
|
||||
MarkCrossCompartmentValue(trc, wrapper->getReservedSlotRef(JSSLOT_PROXY_PRIVATE),
|
||||
"wrappedObject");
|
||||
}
|
||||
|
||||
CrossCompartmentWrapper CrossCompartmentWrapper::singleton(0u);
|
||||
|
@ -739,7 +739,7 @@ JS_XDRScript(JSXDRState *xdr, JSScript **scriptp)
|
||||
|
||||
if (xdr->mode == JSXDR_DECODE) {
|
||||
JS_ASSERT(!script->compileAndGo);
|
||||
script->u.globalObject = GetCurrentGlobal(xdr->cx);
|
||||
script->globalObject = GetCurrentGlobal(xdr->cx);
|
||||
js_CallNewScriptHook(xdr->cx, script, NULL);
|
||||
Debugger::onNewScript(xdr->cx, script, NULL);
|
||||
*scriptp = script;
|
||||
|
408
js/src/jsxml.cpp
408
js/src/jsxml.cpp
@ -78,6 +78,7 @@
|
||||
#include "jsstrinlines.h"
|
||||
|
||||
#include "vm/Stack-inl.h"
|
||||
#include "vm/String-inl.h"
|
||||
|
||||
#ifdef DEBUG
|
||||
#include <string.h> /* for #ifdef DEBUG memset calls */
|
||||
@ -88,6 +89,19 @@ using namespace js;
|
||||
using namespace js::gc;
|
||||
using namespace js::types;
|
||||
|
||||
template<class T, class U>
|
||||
struct IdentityOp
|
||||
{
|
||||
typedef JSBool (* compare)(const T *a, const U *b);
|
||||
};
|
||||
|
||||
template<class T>
|
||||
static JSBool
|
||||
pointer_match(const T *a, const T *b)
|
||||
{
|
||||
return a == b;
|
||||
}
|
||||
|
||||
/*
|
||||
* NOTES
|
||||
* - in the js shell, you must use the -x command line option, or call
|
||||
@ -302,7 +316,7 @@ DEFINE_GETTER(QNameLocalName_getter,
|
||||
*vp = obj->getQNameLocalNameVal())
|
||||
|
||||
static JSBool
|
||||
qname_identity(JSObject *qna, JSObject *qnb)
|
||||
qname_identity(JSObject *qna, const JSObject *qnb)
|
||||
{
|
||||
JSLinearString *uri1 = qna->getNameURI();
|
||||
JSLinearString *uri2 = qnb->getNameURI();
|
||||
@ -839,10 +853,8 @@ QName(JSContext *cx, uintN argc, Value *vp)
|
||||
* XMLArray library functions.
|
||||
*/
|
||||
static JSBool
|
||||
namespace_identity(const void *a, const void *b)
|
||||
namespace_identity(const JSObject *nsa, const JSObject *nsb)
|
||||
{
|
||||
const JSObject *nsa = (const JSObject *) a;
|
||||
const JSObject *nsb = (const JSObject *) b;
|
||||
JSLinearString *prefixa = nsa->getNamePrefix();
|
||||
JSLinearString *prefixb = nsb->getNamePrefix();
|
||||
|
||||
@ -857,32 +869,51 @@ namespace_identity(const void *a, const void *b)
|
||||
}
|
||||
|
||||
static JSBool
|
||||
attr_identity(const void *a, const void *b)
|
||||
attr_identity(const JSXML *xmla, const JSXML *xmlb)
|
||||
{
|
||||
const JSXML *xmla = (const JSXML *) a;
|
||||
const JSXML *xmlb = (const JSXML *) b;
|
||||
|
||||
return qname_identity(xmla->name, xmlb->name);
|
||||
}
|
||||
|
||||
template<>
|
||||
void
|
||||
JSXMLArrayCursor::trace(JSTracer *trc) {
|
||||
#ifdef DEBUG
|
||||
size_t index = 0;
|
||||
#endif
|
||||
for (JSXMLArrayCursor *cursor = this; cursor; cursor = cursor->next)
|
||||
js::gc::MarkGCThing(trc, cursor->root, "cursor_root", index++);
|
||||
JSXMLArrayCursor<JSXML>::trace(JSTracer *trc)
|
||||
{
|
||||
for (JSXMLArrayCursor<JSXML> *cursor = this; cursor; cursor = cursor->next)
|
||||
MarkXML(trc, cursor->root, "cursor_root");
|
||||
}
|
||||
|
||||
template<>
|
||||
void
|
||||
JSXMLArrayCursor<JSObject>::trace(JSTracer *trc)
|
||||
{
|
||||
for (JSXMLArrayCursor<JSObject> *cursor = this; cursor; cursor = cursor->next)
|
||||
MarkObject(trc, cursor->root, "cursor_root");
|
||||
}
|
||||
|
||||
template<class T>
|
||||
static void
|
||||
XMLArrayCursorTrace(JSTracer *trc, JSXMLArrayCursor *cursor)
|
||||
XMLArrayCursorTrace(JSTracer *trc, JSXMLArrayCursor<T> *cursor)
|
||||
{
|
||||
cursor->trace(trc);
|
||||
}
|
||||
|
||||
template<class T>
|
||||
static HeapPtr<T> *
|
||||
ReallocateVector(HeapPtr<T> *vector, size_t count)
|
||||
{
|
||||
#if JS_BITS_PER_WORD == 32
|
||||
if (count > ~(size_t)0 / sizeof(HeapPtr<T>))
|
||||
return NULL;
|
||||
#endif
|
||||
|
||||
size_t size = count * sizeof(HeapPtr<T>);
|
||||
return (HeapPtr<T> *) OffTheBooks::realloc_(vector, size);
|
||||
}
|
||||
|
||||
/* NB: called with null cx from the GC, via xml_trace => JSXMLArray::trim. */
|
||||
template<class T>
|
||||
bool
|
||||
JSXMLArray::setCapacity(JSContext *cx, uint32 newCapacity)
|
||||
JSXMLArray<T>::setCapacity(JSContext *cx, uint32 newCapacity)
|
||||
{
|
||||
if (newCapacity == 0) {
|
||||
/* We could let realloc(p, 0) free this, but purify gets confused. */
|
||||
@ -894,13 +925,8 @@ JSXMLArray::setCapacity(JSContext *cx, uint32 newCapacity)
|
||||
}
|
||||
vector = NULL;
|
||||
} else {
|
||||
void **tmp;
|
||||
|
||||
if (
|
||||
#if JS_BITS_PER_WORD == 32
|
||||
(size_t)newCapacity > ~(size_t)0 / sizeof(void *) ||
|
||||
#endif
|
||||
!(tmp = (void **) OffTheBooks::realloc_(vector, newCapacity * sizeof(void *)))) {
|
||||
HeapPtr<T> *tmp = ReallocateVector(vector, newCapacity);
|
||||
if (!tmp) {
|
||||
if (cx)
|
||||
JS_ReportOutOfMemory(cx);
|
||||
return false;
|
||||
@ -911,8 +937,9 @@ JSXMLArray::setCapacity(JSContext *cx, uint32 newCapacity)
|
||||
return true;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
void
|
||||
JSXMLArray::trim()
|
||||
JSXMLArray<T>::trim()
|
||||
{
|
||||
if (capacity & JSXML_PRESET_CAPACITY)
|
||||
return;
|
||||
@ -920,12 +947,19 @@ JSXMLArray::trim()
|
||||
setCapacity(NULL, length);
|
||||
}
|
||||
|
||||
template<class T>
|
||||
void
|
||||
JSXMLArray::finish(JSContext *cx)
|
||||
JSXMLArray<T>::finish(JSContext *cx)
|
||||
{
|
||||
if (!cx->runtime->gcRunning) {
|
||||
/* We need to clear these to trigger a write barrier. */
|
||||
for (uint32 i = 0; i < length; i++)
|
||||
vector[i].~HeapPtr<T>();
|
||||
}
|
||||
|
||||
cx->free_(vector);
|
||||
|
||||
while (JSXMLArrayCursor *cursor = cursors)
|
||||
while (JSXMLArrayCursor<T> *cursor = cursors)
|
||||
cursor->disconnect();
|
||||
|
||||
#ifdef DEBUG
|
||||
@ -935,24 +969,18 @@ JSXMLArray::finish(JSContext *cx)
|
||||
|
||||
#define XML_NOT_FOUND ((uint32) -1)
|
||||
|
||||
template<class T, class U>
|
||||
static uint32
|
||||
XMLArrayFindMember(const JSXMLArray *array, void *elt, JSIdentityOp identity)
|
||||
XMLArrayFindMember(const JSXMLArray<T> *array, U *elt, typename IdentityOp<T, U>::compare identity)
|
||||
{
|
||||
void **vector;
|
||||
HeapPtr<T> *vector;
|
||||
uint32 i, n;
|
||||
|
||||
/* The identity op must not reallocate array->vector. */
|
||||
vector = array->vector;
|
||||
if (identity) {
|
||||
for (i = 0, n = array->length; i < n; i++) {
|
||||
if (identity(vector[i], elt))
|
||||
return i;
|
||||
}
|
||||
} else {
|
||||
for (i = 0, n = array->length; i < n; i++) {
|
||||
if (vector[i] == elt)
|
||||
return i;
|
||||
}
|
||||
for (i = 0, n = array->length; i < n; i++) {
|
||||
if (identity(vector[i].get(), elt))
|
||||
return i;
|
||||
}
|
||||
return XML_NOT_FOUND;
|
||||
}
|
||||
@ -965,12 +993,13 @@ XMLArrayFindMember(const JSXMLArray *array, void *elt, JSIdentityOp identity)
|
||||
#define LINEAR_THRESHOLD 256
|
||||
#define LINEAR_INCREMENT 32
|
||||
|
||||
template<class T>
|
||||
static JSBool
|
||||
XMLArrayAddMember(JSContext *cx, JSXMLArray *array, uint32 index, void *elt)
|
||||
XMLArrayAddMember(JSContext *cx, JSXMLArray<T> *array, uint32 index, T *elt)
|
||||
{
|
||||
uint32 capacity, i;
|
||||
int log2;
|
||||
void **vector;
|
||||
HeapPtr<T> *vector;
|
||||
|
||||
if (index >= array->length) {
|
||||
if (index >= JSXML_CAPACITY(array)) {
|
||||
@ -982,20 +1011,16 @@ XMLArrayAddMember(JSContext *cx, JSXMLArray *array, uint32 index, void *elt)
|
||||
JS_CEILING_LOG2(log2, capacity);
|
||||
capacity = JS_BIT(log2);
|
||||
}
|
||||
if (
|
||||
#if JS_BITS_PER_WORD == 32
|
||||
(size_t)capacity > ~(size_t)0 / sizeof(void *) ||
|
||||
#endif
|
||||
!(vector = (void **)
|
||||
cx->realloc_(array->vector, capacity * sizeof(void *)))) {
|
||||
if (!(vector = ReallocateVector(array->vector, capacity))) {
|
||||
JS_ReportOutOfMemory(cx);
|
||||
return JS_FALSE;
|
||||
}
|
||||
array->capacity = capacity;
|
||||
array->vector = vector;
|
||||
for (i = array->length; i < index; i++)
|
||||
vector[i] = NULL;
|
||||
vector[i].init(NULL);
|
||||
}
|
||||
array->vector[index].init(NULL);
|
||||
array->length = index + 1;
|
||||
}
|
||||
|
||||
@ -1003,17 +1028,24 @@ XMLArrayAddMember(JSContext *cx, JSXMLArray *array, uint32 index, void *elt)
|
||||
return JS_TRUE;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
static JSBool
|
||||
XMLArrayInsert(JSContext *cx, JSXMLArray *array, uint32 i, uint32 n)
|
||||
XMLArrayInsert(JSContext *cx, JSXMLArray<T> *array, uint32 i, uint32 n)
|
||||
{
|
||||
uint32 j;
|
||||
JSXMLArrayCursor *cursor;
|
||||
uint32 j, k;
|
||||
JSXMLArrayCursor<T> *cursor;
|
||||
|
||||
j = array->length;
|
||||
JS_ASSERT(i <= j);
|
||||
if (!array->setCapacity(cx, j + n))
|
||||
return JS_FALSE;
|
||||
|
||||
k = j;
|
||||
while (k != j + n) {
|
||||
array->vector[k].init(NULL);
|
||||
k++;
|
||||
}
|
||||
|
||||
array->length = j + n;
|
||||
JS_ASSERT(n != (uint32)-1);
|
||||
while (j != i) {
|
||||
@ -1028,12 +1060,14 @@ XMLArrayInsert(JSContext *cx, JSXMLArray *array, uint32 i, uint32 n)
|
||||
return JS_TRUE;
|
||||
}
|
||||
|
||||
static void *
|
||||
XMLArrayDelete(JSContext *cx, JSXMLArray *array, uint32 index, JSBool compress)
|
||||
template<class T>
|
||||
static T *
|
||||
XMLArrayDelete(JSContext *cx, JSXMLArray<T> *array, uint32 index, JSBool compress)
|
||||
{
|
||||
uint32 length;
|
||||
void **vector, *elt;
|
||||
JSXMLArrayCursor *cursor;
|
||||
HeapPtr<T> *vector;
|
||||
T *elt;
|
||||
JSXMLArrayCursor<T> *cursor;
|
||||
|
||||
length = array->length;
|
||||
if (index >= length)
|
||||
@ -1042,6 +1076,7 @@ XMLArrayDelete(JSContext *cx, JSXMLArray *array, uint32 index, JSBool compress)
|
||||
vector = array->vector;
|
||||
elt = vector[index];
|
||||
if (compress) {
|
||||
vector[length - 1].~HeapPtr<T>();
|
||||
while (++index < length)
|
||||
vector[index-1] = vector[index];
|
||||
array->length = length - 1;
|
||||
@ -1057,21 +1092,25 @@ XMLArrayDelete(JSContext *cx, JSXMLArray *array, uint32 index, JSBool compress)
|
||||
return elt;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
static void
|
||||
XMLArrayTruncate(JSContext *cx, JSXMLArray *array, uint32 length)
|
||||
XMLArrayTruncate(JSContext *cx, JSXMLArray<T> *array, uint32 length)
|
||||
{
|
||||
void **vector;
|
||||
HeapPtr<T> *vector;
|
||||
|
||||
JS_ASSERT(!array->cursors);
|
||||
if (length >= array->length)
|
||||
return;
|
||||
|
||||
for (uint32 i = length; i < array->length; i++)
|
||||
array->vector[i].~HeapPtr<T>();
|
||||
|
||||
if (length == 0) {
|
||||
if (array->vector)
|
||||
cx->free_(array->vector);
|
||||
vector = NULL;
|
||||
} else {
|
||||
vector = (void **) cx->realloc_(array->vector, length * sizeof(void *));
|
||||
vector = ReallocateVector(array->vector, length);
|
||||
if (!vector)
|
||||
return;
|
||||
}
|
||||
@ -1082,21 +1121,24 @@ XMLArrayTruncate(JSContext *cx, JSXMLArray *array, uint32 length)
|
||||
array->vector = vector;
|
||||
}
|
||||
|
||||
#define XMLARRAY_FIND_MEMBER(a,e,f) XMLArrayFindMember(a, (void *)(e), f)
|
||||
#define XMLARRAY_HAS_MEMBER(a,e,f) (XMLArrayFindMember(a, (void *)(e), f) != \
|
||||
#define XMLARRAY_FIND_MEMBER(a,e,f) XMLArrayFindMember(a, e, f)
|
||||
#define XMLARRAY_HAS_MEMBER(a,e,f) (XMLArrayFindMember(a, e, f) != \
|
||||
XML_NOT_FOUND)
|
||||
#define XMLARRAY_MEMBER(a,i,t) (((i) < (a)->length) \
|
||||
? (t *) (a)->vector[i] \
|
||||
? (a)->vector[i].get() \
|
||||
: NULL)
|
||||
#define XMLARRAY_SET_MEMBER(a,i,e) JS_BEGIN_MACRO \
|
||||
if ((a)->length <= (i)) \
|
||||
if ((a)->length <= (i)) { \
|
||||
(a)->length = (i) + 1; \
|
||||
((a)->vector[i] = (void *)(e)); \
|
||||
((a)->vector[i].init(e)); \
|
||||
} else { \
|
||||
((a)->vector[i] = e); \
|
||||
} \
|
||||
JS_END_MACRO
|
||||
#define XMLARRAY_ADD_MEMBER(x,a,i,e)XMLArrayAddMember(x, a, i, (void *)(e))
|
||||
#define XMLARRAY_ADD_MEMBER(x,a,i,e)XMLArrayAddMember(x, a, i, e)
|
||||
#define XMLARRAY_INSERT(x,a,i,n) XMLArrayInsert(x, a, i, n)
|
||||
#define XMLARRAY_APPEND(x,a,e) XMLARRAY_ADD_MEMBER(x, a, (a)->length, (e))
|
||||
#define XMLARRAY_DELETE(x,a,i,c,t) ((t *) XMLArrayDelete(x, a, i, c))
|
||||
#define XMLARRAY_DELETE(x,a,i,c,t) (XMLArrayDelete<t>(x, a, i, c))
|
||||
#define XMLARRAY_TRUNCATE(x,a,n) XMLArrayTruncate(x, a, n)
|
||||
|
||||
/*
|
||||
@ -1155,8 +1197,24 @@ HAS_NS_AFTER_XML(const jschar *chars)
|
||||
static const char xml_namespace_str[] = "http://www.w3.org/XML/1998/namespace";
|
||||
static const char xmlns_namespace_str[] = "http://www.w3.org/2000/xmlns/";
|
||||
|
||||
void
|
||||
JSXML::finalize(JSContext *cx)
|
||||
{
|
||||
if (JSXML_HAS_KIDS(this)) {
|
||||
xml_kids.finish(cx);
|
||||
if (xml_class == JSXML_CLASS_ELEMENT) {
|
||||
xml_namespaces.finish(cx);
|
||||
xml_attrs.finish(cx);
|
||||
}
|
||||
}
|
||||
#ifdef DEBUG_notme
|
||||
JS_REMOVE_LINK(&links);
|
||||
#endif
|
||||
}
|
||||
|
||||
static JSObject *
|
||||
ParseNodeToQName(Parser *parser, ParseNode *pn, JSXMLArray *inScopeNSes, JSBool isAttributeName)
|
||||
ParseNodeToQName(Parser *parser, ParseNode *pn,
|
||||
JSXMLArray<JSObject> *inScopeNSes, JSBool isAttributeName)
|
||||
{
|
||||
JSContext *cx = parser->context;
|
||||
JSLinearString *uri, *prefix;
|
||||
@ -1284,7 +1342,8 @@ ChompXMLWhitespace(JSContext *cx, JSString *str)
|
||||
}
|
||||
|
||||
static JSXML *
|
||||
ParseNodeToXML(Parser *parser, ParseNode *pn, JSXMLArray *inScopeNSes, uintN flags)
|
||||
ParseNodeToXML(Parser *parser, ParseNode *pn,
|
||||
JSXMLArray<JSObject> *inScopeNSes, uintN flags)
|
||||
{
|
||||
JSContext *cx = parser->context;
|
||||
JSXML *xml, *kid, *attr, *attrj;
|
||||
@ -2173,7 +2232,7 @@ EscapeElementValue(JSContext *cx, StringBuffer &sb, JSString *str, uint32 toSour
|
||||
|
||||
/* 13.3.5.4 [[GetNamespace]]([InScopeNamespaces]) */
|
||||
static JSObject *
|
||||
GetNamespace(JSContext *cx, JSObject *qn, const JSXMLArray *inScopeNSes)
|
||||
GetNamespace(JSContext *cx, JSObject *qn, const JSXMLArray<JSObject> *inScopeNSes)
|
||||
{
|
||||
JSLinearString *uri, *prefix, *nsprefix;
|
||||
JSObject *match, *ns;
|
||||
@ -2254,7 +2313,7 @@ GetNamespace(JSContext *cx, JSObject *qn, const JSXMLArray *inScopeNSes)
|
||||
}
|
||||
|
||||
static JSLinearString *
|
||||
GeneratePrefix(JSContext *cx, JSLinearString *uri, JSXMLArray *decls)
|
||||
GeneratePrefix(JSContext *cx, JSLinearString *uri, JSXMLArray<JSObject> *decls)
|
||||
{
|
||||
const jschar *cp, *start, *end;
|
||||
size_t length, newlength, offset;
|
||||
@ -2370,10 +2429,8 @@ GeneratePrefix(JSContext *cx, JSLinearString *uri, JSXMLArray *decls)
|
||||
}
|
||||
|
||||
static JSBool
|
||||
namespace_match(const void *a, const void *b)
|
||||
namespace_match(const JSObject *nsa, const JSObject *nsb)
|
||||
{
|
||||
const JSObject *nsa = (const JSObject *) a;
|
||||
const JSObject *nsb = (const JSObject *) b;
|
||||
JSLinearString *prefixa, *prefixb = nsb->getNamePrefix();
|
||||
|
||||
if (prefixb) {
|
||||
@ -2387,7 +2444,7 @@ namespace_match(const void *a, const void *b)
|
||||
#define TO_SOURCE_FLAG 0x80000000
|
||||
|
||||
static JSString *
|
||||
XMLToXMLString(JSContext *cx, JSXML *xml, const JSXMLArray *ancestorNSes,
|
||||
XMLToXMLString(JSContext *cx, JSXML *xml, const JSXMLArray<JSObject> *ancestorNSes,
|
||||
uint32 indentLevel, JSBool pretty)
|
||||
{
|
||||
JSBool indentKids;
|
||||
@ -2434,9 +2491,9 @@ XMLToXMLString(JSContext *cx, JSXML *xml, const JSXMLArray *ancestorNSes,
|
||||
case JSXML_CLASS_LIST:
|
||||
/* ECMA-357 10.2.2. */
|
||||
{
|
||||
JSXMLArrayCursor cursor(&xml->xml_kids);
|
||||
JSXMLArrayCursor<JSXML> cursor(&xml->xml_kids);
|
||||
i = 0;
|
||||
while (JSXML *kid = (JSXML *) cursor.getNext()) {
|
||||
while (JSXML *kid = cursor.getNext()) {
|
||||
if (pretty && i != 0) {
|
||||
if (!sb.append('\n'))
|
||||
return NULL;
|
||||
@ -2474,8 +2531,8 @@ XMLToXMLString(JSContext *cx, JSXML *xml, const JSXMLArray *ancestorNSes,
|
||||
|
||||
/* Clone in-scope namespaces not in ancestorNSes into decls. */
|
||||
{
|
||||
JSXMLArrayCursor cursor(&xml->xml_namespaces);
|
||||
while ((ns = (JSObject *) cursor.getNext()) != NULL) {
|
||||
JSXMLArrayCursor<JSObject> cursor(&xml->xml_namespaces);
|
||||
while ((ns = cursor.getNext()) != NULL) {
|
||||
if (!IsDeclared(ns))
|
||||
continue;
|
||||
if (!XMLARRAY_HAS_MEMBER(ancestorNSes, ns, namespace_identity)) {
|
||||
@ -2597,8 +2654,8 @@ XMLToXMLString(JSContext *cx, JSXML *xml, const JSXMLArray *ancestorNSes,
|
||||
|
||||
/* Step 17(b): append attributes. */
|
||||
{
|
||||
JSXMLArrayCursor cursor(&xml->xml_attrs);
|
||||
while (JSXML *attr = (JSXML *) cursor.getNext()) {
|
||||
JSXMLArrayCursor<JSXML> cursor(&xml->xml_attrs);
|
||||
while (JSXML *attr = cursor.getNext()) {
|
||||
if (!sb.append(' '))
|
||||
goto out;
|
||||
ns2 = GetNamespace(cx, attr->name, &ancdecls.array);
|
||||
@ -2648,8 +2705,8 @@ XMLToXMLString(JSContext *cx, JSXML *xml, const JSXMLArray *ancestorNSes,
|
||||
|
||||
/* Step 17(c): append XML namespace declarations. */
|
||||
{
|
||||
JSXMLArrayCursor cursor(&decls.array);
|
||||
while (JSObject *ns3 = (JSObject *) cursor.getNext()) {
|
||||
JSXMLArrayCursor<JSObject> cursor(&decls.array);
|
||||
while (JSObject *ns3 = cursor.getNext()) {
|
||||
JS_ASSERT(IsDeclared(ns3));
|
||||
|
||||
if (!sb.append(" xmlns"))
|
||||
@ -2702,8 +2759,8 @@ XMLToXMLString(JSContext *cx, JSXML *xml, const JSXMLArray *ancestorNSes,
|
||||
}
|
||||
|
||||
{
|
||||
JSXMLArrayCursor cursor(&xml->xml_kids);
|
||||
while (JSXML *kid = (JSXML *) cursor.getNext()) {
|
||||
JSXMLArrayCursor<JSXML> cursor(&xml->xml_kids);
|
||||
while (JSXML *kid = cursor.getNext()) {
|
||||
if (pretty && indentKids) {
|
||||
if (!sb.append('\n'))
|
||||
goto out;
|
||||
@ -3067,7 +3124,7 @@ DeepCopy(JSContext *cx, JSXML *xml, JSObject *obj, uintN flags)
|
||||
* (iii) from's owning object must be locked if not thread-local.
|
||||
*/
|
||||
static JSBool
|
||||
DeepCopySetInLRS(JSContext *cx, JSXMLArray *from, JSXMLArray *to, JSXML *parent,
|
||||
DeepCopySetInLRS(JSContext *cx, JSXMLArray<JSXML> *from, JSXMLArray<JSXML> *to, JSXML *parent,
|
||||
uintN flags)
|
||||
{
|
||||
uint32 j, n;
|
||||
@ -3078,9 +3135,9 @@ DeepCopySetInLRS(JSContext *cx, JSXMLArray *from, JSXMLArray *to, JSXML *parent,
|
||||
if (!to->setCapacity(cx, n))
|
||||
return JS_FALSE;
|
||||
|
||||
JSXMLArrayCursor cursor(from);
|
||||
JSXMLArrayCursor<JSXML> cursor(from);
|
||||
j = 0;
|
||||
while (JSXML *kid = (JSXML *) cursor.getNext()) {
|
||||
while (JSXML *kid = cursor.getNext()) {
|
||||
if ((flags & XSF_IGNORE_COMMENTS) &&
|
||||
kid->xml_class == JSXML_CLASS_COMMENT) {
|
||||
continue;
|
||||
@ -3360,11 +3417,11 @@ retry:
|
||||
*bp = JS_FALSE;
|
||||
} else {
|
||||
{
|
||||
JSXMLArrayCursor cursor(&xml->xml_kids);
|
||||
JSXMLArrayCursor vcursor(&vxml->xml_kids);
|
||||
JSXMLArrayCursor<JSXML> cursor(&xml->xml_kids);
|
||||
JSXMLArrayCursor<JSXML> vcursor(&vxml->xml_kids);
|
||||
for (;;) {
|
||||
kid = (JSXML *) cursor.getNext();
|
||||
vkid = (JSXML *) vcursor.getNext();
|
||||
kid = cursor.getNext();
|
||||
vkid = vcursor.getNext();
|
||||
if (!kid || !vkid) {
|
||||
*bp = !kid && !vkid;
|
||||
break;
|
||||
@ -3600,7 +3657,7 @@ static void
|
||||
DeleteNamedProperty(JSContext *cx, JSXML *xml, JSObject *nameqn,
|
||||
JSBool attributes)
|
||||
{
|
||||
JSXMLArray *array;
|
||||
JSXMLArray<JSXML> *array;
|
||||
uint32 index, deleteCount;
|
||||
JSXML *kid;
|
||||
JSXMLNameMatcher matcher;
|
||||
@ -3658,7 +3715,7 @@ DeleteListElement(JSContext *cx, JSXML *xml, uint32 index)
|
||||
DeleteNamedProperty(cx, parent, kid->name, JS_TRUE);
|
||||
} else {
|
||||
kidIndex = XMLARRAY_FIND_MEMBER(&parent->xml_kids, kid,
|
||||
NULL);
|
||||
pointer_match);
|
||||
JS_ASSERT(kidIndex != XML_NOT_FOUND);
|
||||
DeleteByIndex(cx, parent, kidIndex);
|
||||
}
|
||||
@ -3671,7 +3728,7 @@ DeleteListElement(JSContext *cx, JSXML *xml, uint32 index)
|
||||
static JSBool
|
||||
SyncInScopeNamespaces(JSContext *cx, JSXML *xml)
|
||||
{
|
||||
JSXMLArray *nsarray;
|
||||
JSXMLArray<JSObject> *nsarray;
|
||||
uint32 i, n;
|
||||
JSObject *ns;
|
||||
|
||||
@ -3691,13 +3748,13 @@ SyncInScopeNamespaces(JSContext *cx, JSXML *xml)
|
||||
static JSBool
|
||||
GetNamedProperty(JSContext *cx, JSXML *xml, JSObject* nameqn, JSXML *list)
|
||||
{
|
||||
JSXMLArray *array;
|
||||
JSXMLArray<JSXML> *array;
|
||||
JSXMLNameMatcher matcher;
|
||||
JSBool attrs;
|
||||
|
||||
if (xml->xml_class == JSXML_CLASS_LIST) {
|
||||
JSXMLArrayCursor cursor(&xml->xml_kids);
|
||||
while (JSXML *kid = (JSXML *) cursor.getNext()) {
|
||||
JSXMLArrayCursor<JSXML> cursor(&xml->xml_kids);
|
||||
while (JSXML *kid = cursor.getNext()) {
|
||||
if (kid->xml_class == JSXML_CLASS_ELEMENT &&
|
||||
!GetNamedProperty(cx, kid, nameqn, list)) {
|
||||
return JS_FALSE;
|
||||
@ -3713,8 +3770,8 @@ GetNamedProperty(JSContext *cx, JSXML *xml, JSObject* nameqn, JSXML *list)
|
||||
matcher = MatchElemName;
|
||||
}
|
||||
|
||||
JSXMLArrayCursor cursor(array);
|
||||
while (JSXML *kid = (JSXML *) cursor.getNext()) {
|
||||
JSXMLArrayCursor<JSXML> cursor(array);
|
||||
while (JSXML *kid = cursor.getNext()) {
|
||||
if (matcher(nameqn, kid)) {
|
||||
if (!attrs &&
|
||||
kid->xml_class == JSXML_CLASS_ELEMENT &&
|
||||
@ -4100,7 +4157,7 @@ PutProperty(JSContext *cx, JSObject *obj, jsid id, JSBool strict, jsval *vp)
|
||||
|
||||
JS_ASSERT(parent != xml);
|
||||
if (parent) {
|
||||
q = XMLARRAY_FIND_MEMBER(&parent->xml_kids, kid, NULL);
|
||||
q = XMLARRAY_FIND_MEMBER(&parent->xml_kids, kid, pointer_match);
|
||||
JS_ASSERT(q != XML_NOT_FOUND);
|
||||
ok = Replace(cx, parent, q, OBJECT_TO_JSVAL(copyobj));
|
||||
if (!ok)
|
||||
@ -4137,7 +4194,7 @@ PutProperty(JSContext *cx, JSObject *obj, jsid id, JSBool strict, jsval *vp)
|
||||
/* 2(g). */
|
||||
else if (vxml || JSXML_HAS_VALUE(kid)) {
|
||||
if (parent) {
|
||||
q = XMLARRAY_FIND_MEMBER(&parent->xml_kids, kid, NULL);
|
||||
q = XMLARRAY_FIND_MEMBER(&parent->xml_kids, kid, pointer_match);
|
||||
JS_ASSERT(q != XML_NOT_FOUND);
|
||||
ok = Replace(cx, parent, q, *vp);
|
||||
if (!ok)
|
||||
@ -4448,9 +4505,9 @@ PutProperty(JSContext *cx, JSObject *obj, jsid id, JSBool strict, jsval *vp)
|
||||
|
||||
/* 14. */
|
||||
if (primitiveAssign) {
|
||||
JSXMLArrayCursor cursor(&xml->xml_kids);
|
||||
JSXMLArrayCursor<JSXML> cursor(&xml->xml_kids);
|
||||
cursor.index = matchIndex;
|
||||
kid = (JSXML *) cursor.getCurrent();
|
||||
kid = cursor.getCurrent();
|
||||
if (JSXML_HAS_KIDS(kid)) {
|
||||
kid->xml_kids.finish(cx);
|
||||
kid->xml_kids.init();
|
||||
@ -4463,7 +4520,7 @@ PutProperty(JSContext *cx, JSObject *obj, jsid id, JSBool strict, jsval *vp)
|
||||
ok = JS_ConvertValue(cx, *vp, JSTYPE_STRING, vp);
|
||||
if (ok && !JSVAL_TO_STRING(*vp)->empty()) {
|
||||
roots[VAL_ROOT] = *vp;
|
||||
if ((JSXML *) cursor.getCurrent() == kid)
|
||||
if (cursor.getCurrent() == kid)
|
||||
ok = Replace(cx, kid, 0, *vp);
|
||||
}
|
||||
}
|
||||
@ -4551,14 +4608,14 @@ static JSBool
|
||||
HasNamedProperty(JSXML *xml, JSObject *nameqn)
|
||||
{
|
||||
JSBool found;
|
||||
JSXMLArray *array;
|
||||
JSXMLArray<JSXML> *array;
|
||||
JSXMLNameMatcher matcher;
|
||||
uint32 i, n;
|
||||
|
||||
if (xml->xml_class == JSXML_CLASS_LIST) {
|
||||
found = JS_FALSE;
|
||||
JSXMLArrayCursor cursor(&xml->xml_kids);
|
||||
while (JSXML *kid = (JSXML *) cursor.getNext()) {
|
||||
JSXMLArrayCursor<JSXML> cursor(&xml->xml_kids);
|
||||
while (JSXML *kid = cursor.getNext()) {
|
||||
found = HasNamedProperty(kid, nameqn);
|
||||
if (found)
|
||||
break;
|
||||
@ -4690,17 +4747,6 @@ HasProperty(JSContext *cx, JSObject *obj, jsval id, JSBool *found)
|
||||
static void
|
||||
xml_finalize(JSContext *cx, JSObject *obj)
|
||||
{
|
||||
JSXML *xml = (JSXML *) obj->getPrivate();
|
||||
if (!xml)
|
||||
return;
|
||||
if (xml->object == obj)
|
||||
xml->object = NULL;
|
||||
}
|
||||
|
||||
static void
|
||||
xml_trace_vector(JSTracer *trc, JSXML **vec, uint32 len)
|
||||
{
|
||||
MarkXMLRange(trc, len, vec, "xml_vector");
|
||||
}
|
||||
|
||||
/*
|
||||
@ -5074,7 +5120,7 @@ xml_enumerate(JSContext *cx, JSObject *obj, JSIterateOp enum_op, Value *statep,
|
||||
{
|
||||
JSXML *xml;
|
||||
uint32 length, index;
|
||||
JSXMLArrayCursor *cursor;
|
||||
JSXMLArrayCursor<JSXML> *cursor;
|
||||
|
||||
xml = (JSXML *)obj->getPrivate();
|
||||
length = JSXML_LENGTH(xml);
|
||||
@ -5085,7 +5131,7 @@ xml_enumerate(JSContext *cx, JSObject *obj, JSIterateOp enum_op, Value *statep,
|
||||
if (length == 0) {
|
||||
statep->setInt32(0);
|
||||
} else {
|
||||
cursor = cx->new_<JSXMLArrayCursor>(&xml->xml_kids);
|
||||
cursor = cx->new_< JSXMLArrayCursor<JSXML> >(&xml->xml_kids);
|
||||
if (!cursor)
|
||||
return JS_FALSE;
|
||||
statep->setPrivate(cursor);
|
||||
@ -5099,7 +5145,7 @@ xml_enumerate(JSContext *cx, JSObject *obj, JSIterateOp enum_op, Value *statep,
|
||||
statep->setNull();
|
||||
break;
|
||||
}
|
||||
cursor = (JSXMLArrayCursor *) statep->toPrivate();
|
||||
cursor = (JSXMLArrayCursor<JSXML> *) statep->toPrivate();
|
||||
if (cursor && cursor->array && (index = cursor->index) < length) {
|
||||
*idp = INT_TO_JSID(index);
|
||||
cursor->index = index + 1;
|
||||
@ -5109,7 +5155,7 @@ xml_enumerate(JSContext *cx, JSObject *obj, JSIterateOp enum_op, Value *statep,
|
||||
|
||||
case JSENUMERATE_DESTROY:
|
||||
if (!statep->isInt32(0)) {
|
||||
cursor = (JSXMLArrayCursor *) statep->toPrivate();
|
||||
cursor = (JSXMLArrayCursor<JSXML> *) statep->toPrivate();
|
||||
if (cursor)
|
||||
cx->delete_(cursor);
|
||||
}
|
||||
@ -5135,8 +5181,12 @@ static void
|
||||
xml_trace(JSTracer *trc, JSObject *obj)
|
||||
{
|
||||
JSXML *xml = (JSXML *) obj->getPrivate();
|
||||
/*
|
||||
* This is safe to leave Unbarriered for incremental GC, but we'll need
|
||||
* to fix somehow for generational.
|
||||
*/
|
||||
if (xml)
|
||||
JS_CALL_TRACER(trc, xml, JSTRACE_XML, "private");
|
||||
MarkXMLUnbarriered(trc, xml, "private");
|
||||
}
|
||||
|
||||
static JSBool
|
||||
@ -5645,8 +5695,8 @@ xml_child(JSContext *cx, uintN argc, jsval *vp)
|
||||
if (!list)
|
||||
return JS_FALSE;
|
||||
|
||||
JSXMLArrayCursor cursor(&xml->xml_kids);
|
||||
while (JSXML *kid = (JSXML *) cursor.getNext()) {
|
||||
JSXMLArrayCursor<JSXML> cursor(&xml->xml_kids);
|
||||
while (JSXML *kid = cursor.getNext()) {
|
||||
kidobj = js_GetXMLObject(cx, kid);
|
||||
if (!kidobj)
|
||||
return JS_FALSE;
|
||||
@ -5786,8 +5836,8 @@ xml_contains(JSContext *cx, uintN argc, jsval *vp)
|
||||
value = argc != 0 ? vp[2] : JSVAL_VOID;
|
||||
if (xml->xml_class == JSXML_CLASS_LIST) {
|
||||
eq = JS_FALSE;
|
||||
JSXMLArrayCursor cursor(&xml->xml_kids);
|
||||
while (JSXML *kid = (JSXML *) cursor.getNext()) {
|
||||
JSXMLArrayCursor<JSXML> cursor(&xml->xml_kids);
|
||||
while (JSXML *kid = cursor.getNext()) {
|
||||
kidobj = js_GetXMLObject(cx, kid);
|
||||
if (!kidobj || !js_TestXMLEquality(cx, ObjectValue(*kidobj), value, &eq))
|
||||
return JS_FALSE;
|
||||
@ -5852,8 +5902,8 @@ xml_elements_helper(JSContext *cx, JSObject *obj, JSXML *xml,
|
||||
|
||||
if (xml->xml_class == JSXML_CLASS_LIST) {
|
||||
/* 13.5.4.6 */
|
||||
JSXMLArrayCursor cursor(&xml->xml_kids);
|
||||
while (JSXML *kid = (JSXML *) cursor.getNext()) {
|
||||
JSXMLArrayCursor<JSXML> cursor(&xml->xml_kids);
|
||||
while (JSXML *kid = cursor.getNext()) {
|
||||
if (kid->xml_class == JSXML_CLASS_ELEMENT) {
|
||||
ok = js_EnterLocalRootScope(cx);
|
||||
if (!ok)
|
||||
@ -5992,7 +6042,7 @@ xml_hasSimpleContent(JSContext *cx, uintN argc, jsval *vp)
|
||||
}
|
||||
|
||||
static JSBool
|
||||
FindInScopeNamespaces(JSContext *cx, JSXML *xml, JSXMLArray *nsarray)
|
||||
FindInScopeNamespaces(JSContext *cx, JSXML *xml, JSXMLArray<JSObject> *nsarray)
|
||||
{
|
||||
uint32 length, i, j, n;
|
||||
JSObject *ns, *ns2;
|
||||
@ -6037,7 +6087,7 @@ FindInScopeNamespaces(JSContext *cx, JSXML *xml, JSXMLArray *nsarray)
|
||||
* rval. rval must point to a rooted location.
|
||||
*/
|
||||
static bool
|
||||
NamespacesToJSArray(JSContext *cx, JSXMLArray *array, jsval *rval)
|
||||
NamespacesToJSArray(JSContext *cx, JSXMLArray<JSObject> *array, jsval *rval)
|
||||
{
|
||||
JSObject *arrayobj = NewDenseEmptyArray(cx);
|
||||
if (!arrayobj)
|
||||
@ -6086,7 +6136,7 @@ xml_insertChildAfter(JSContext *cx, uintN argc, jsval *vp)
|
||||
if (!VALUE_IS_XML(arg))
|
||||
return JS_TRUE;
|
||||
kid = (JSXML *) JSVAL_TO_OBJECT(arg)->getPrivate();
|
||||
i = XMLARRAY_FIND_MEMBER(&xml->xml_kids, kid, NULL);
|
||||
i = XMLARRAY_FIND_MEMBER(&xml->xml_kids, kid, pointer_match);
|
||||
if (i == XML_NOT_FOUND)
|
||||
return JS_TRUE;
|
||||
++i;
|
||||
@ -6118,7 +6168,7 @@ xml_insertChildBefore(JSContext *cx, uintN argc, jsval *vp)
|
||||
if (!VALUE_IS_XML(arg))
|
||||
return JS_TRUE;
|
||||
kid = (JSXML *) JSVAL_TO_OBJECT(arg)->getPrivate();
|
||||
i = XMLARRAY_FIND_MEMBER(&xml->xml_kids, kid, NULL);
|
||||
i = XMLARRAY_FIND_MEMBER(&xml->xml_kids, kid, pointer_match);
|
||||
if (i == XML_NOT_FOUND)
|
||||
return JS_TRUE;
|
||||
}
|
||||
@ -6397,8 +6447,8 @@ xml_processingInstructions_helper(JSContext *cx, JSObject *obj, JSXML *xml,
|
||||
|
||||
if (xml->xml_class == JSXML_CLASS_LIST) {
|
||||
/* 13.5.4.17 Step 4 (misnumbered 9 -- Erratum?). */
|
||||
JSXMLArrayCursor cursor(&xml->xml_kids);
|
||||
while (JSXML *kid = (JSXML *) cursor.getNext()) {
|
||||
JSXMLArrayCursor<JSXML> cursor(&xml->xml_kids);
|
||||
while (JSXML *kid = cursor.getNext()) {
|
||||
if (kid->xml_class == JSXML_CLASS_ELEMENT) {
|
||||
ok = js_EnterLocalRootScope(cx);
|
||||
if (!ok)
|
||||
@ -6500,10 +6550,8 @@ xml_propertyIsEnumerable(JSContext *cx, uintN argc, jsval *vp)
|
||||
}
|
||||
|
||||
static JSBool
|
||||
namespace_full_match(const void *a, const void *b)
|
||||
namespace_full_match(const JSObject *nsa, const JSObject *nsb)
|
||||
{
|
||||
const JSObject *nsa = (const JSObject *) a;
|
||||
const JSObject *nsb = (const JSObject *) b;
|
||||
JSLinearString *prefixa = nsa->getNamePrefix();
|
||||
JSLinearString *prefixb;
|
||||
|
||||
@ -6706,7 +6754,7 @@ xml_setName(JSContext *cx, uintN argc, jsval *vp)
|
||||
jsval name;
|
||||
JSObject *nameqn;
|
||||
JSXML *nsowner;
|
||||
JSXMLArray *nsarray;
|
||||
JSXMLArray<JSObject> *nsarray;
|
||||
uint32 i, n;
|
||||
JSObject *ns;
|
||||
|
||||
@ -6769,7 +6817,7 @@ xml_setName(JSContext *cx, uintN argc, jsval *vp)
|
||||
return JS_FALSE;
|
||||
|
||||
/* XXXbe have to test membership to see whether GetNamespace added */
|
||||
if (XMLARRAY_HAS_MEMBER(&nsowner->xml_namespaces, ns, NULL)) {
|
||||
if (XMLARRAY_HAS_MEMBER(&nsowner->xml_namespaces, ns, pointer_match)) {
|
||||
vp[0] = JSVAL_VOID;
|
||||
return JS_TRUE;
|
||||
}
|
||||
@ -6812,8 +6860,9 @@ xml_setName(JSContext *cx, uintN argc, jsval *vp)
|
||||
}
|
||||
|
||||
/* Utility function used within xml_setNamespace */
|
||||
static JSBool qn_match(const void *xml, const void *qn) {
|
||||
return qname_identity(((JSXML *)xml)->name, (JSObject *)qn);
|
||||
static JSBool qn_match(const JSXML *xml, const JSObject *qn)
|
||||
{
|
||||
return qname_identity(xml->name, qn);
|
||||
}
|
||||
|
||||
/* ECMA-357 13.4.4.36 */
|
||||
@ -6855,7 +6904,7 @@ xml_setNamespace(JSContext *cx, uintN argc, jsval *vp)
|
||||
xml->parent && xml->parent->xml_class == JSXML_CLASS_ELEMENT &&
|
||||
!qn_match(xml, qn))
|
||||
{
|
||||
JSXMLArray *array = &xml->parent->xml_attrs;
|
||||
JSXMLArray<JSXML> *array = &xml->parent->xml_attrs;
|
||||
uint32 i = XMLArrayFindMember(array, qn, qn_match);
|
||||
if (i != XML_NOT_FOUND)
|
||||
XMLArrayDelete(cx, array, i, JS_TRUE);
|
||||
@ -6953,8 +7002,8 @@ xml_toString_helper(JSContext *cx, JSXML *xml)
|
||||
str = cx->runtime->emptyString;
|
||||
if (!js_EnterLocalRootScope(cx))
|
||||
return NULL;
|
||||
JSXMLArrayCursor cursor(&xml->xml_kids);
|
||||
while (JSXML *kid = (JSXML *) cursor.getNext()) {
|
||||
JSXMLArrayCursor<JSXML> cursor(&xml->xml_kids);
|
||||
while (JSXML *kid = cursor.getNext()) {
|
||||
if (kid->xml_class != JSXML_CLASS_COMMENT &&
|
||||
kid->xml_class != JSXML_CLASS_PROCESSING_INSTRUCTION) {
|
||||
kidstr = xml_toString_helper(cx, kid);
|
||||
@ -7249,19 +7298,20 @@ js_NewXML(JSContext *cx, JSXMLClass xml_class)
|
||||
if (!xml)
|
||||
return NULL;
|
||||
|
||||
xml->object = NULL;
|
||||
xml->object.init(NULL);
|
||||
xml->domnode = NULL;
|
||||
xml->parent = NULL;
|
||||
xml->name = NULL;
|
||||
xml->parent.init(NULL);
|
||||
xml->name.init(NULL);
|
||||
xml->xml_class = xml_class;
|
||||
xml->xml_flags = 0;
|
||||
if (JSXML_CLASS_HAS_VALUE(xml_class)) {
|
||||
xml->xml_value = cx->runtime->emptyString;
|
||||
xml->xml_value.init(cx->runtime->emptyString);
|
||||
} else {
|
||||
xml->xml_value.init(NULL);
|
||||
xml->xml_kids.init();
|
||||
if (xml_class == JSXML_CLASS_LIST) {
|
||||
xml->xml_target = NULL;
|
||||
xml->xml_targetprop = NULL;
|
||||
xml->xml_target.init(NULL);
|
||||
xml->xml_targetprop.init(NULL);
|
||||
} else {
|
||||
xml->xml_namespaces.init();
|
||||
xml->xml_attrs.init();
|
||||
@ -7275,15 +7325,33 @@ js_NewXML(JSContext *cx, JSXMLClass xml_class)
|
||||
return xml;
|
||||
}
|
||||
|
||||
void
|
||||
JSXML::writeBarrierPre(JSXML *xml)
|
||||
{
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
if (!xml)
|
||||
return;
|
||||
|
||||
JSCompartment *comp = xml->compartment();
|
||||
if (comp->needsBarrier())
|
||||
MarkXMLUnbarriered(comp->barrierTracer(), xml, "write barrier");
|
||||
#endif
|
||||
}
|
||||
|
||||
void
|
||||
JSXML::writeBarrierPost(JSXML *xml, void *addr)
|
||||
{
|
||||
}
|
||||
|
||||
void
|
||||
js_TraceXML(JSTracer *trc, JSXML *xml)
|
||||
{
|
||||
if (xml->object)
|
||||
MarkObject(trc, *xml->object, "object");
|
||||
MarkObject(trc, xml->object, "object");
|
||||
if (xml->name)
|
||||
MarkObject(trc, *xml->name, "name");
|
||||
MarkObject(trc, xml->name, "name");
|
||||
if (xml->parent)
|
||||
JS_CALL_TRACER(trc, xml->parent, JSTRACE_XML, "xml_parent");
|
||||
MarkXML(trc, xml->parent, "xml_parent");
|
||||
|
||||
if (JSXML_HAS_VALUE(xml)) {
|
||||
if (xml->xml_value)
|
||||
@ -7291,25 +7359,21 @@ js_TraceXML(JSTracer *trc, JSXML *xml)
|
||||
return;
|
||||
}
|
||||
|
||||
xml_trace_vector(trc,
|
||||
(JSXML **) xml->xml_kids.vector,
|
||||
xml->xml_kids.length);
|
||||
MarkXMLRange(trc, xml->xml_kids.length, xml->xml_kids.vector, "xml_kids");
|
||||
XMLArrayCursorTrace(trc, xml->xml_kids.cursors);
|
||||
|
||||
if (xml->xml_class == JSXML_CLASS_LIST) {
|
||||
if (xml->xml_target)
|
||||
JS_CALL_TRACER(trc, xml->xml_target, JSTRACE_XML, "target");
|
||||
MarkXML(trc, xml->xml_target, "target");
|
||||
if (xml->xml_targetprop)
|
||||
MarkObject(trc, *xml->xml_targetprop, "targetprop");
|
||||
MarkObject(trc, xml->xml_targetprop, "targetprop");
|
||||
} else {
|
||||
MarkObjectRange(trc, xml->xml_namespaces.length,
|
||||
(JSObject **) xml->xml_namespaces.vector,
|
||||
xml->xml_namespaces.vector,
|
||||
"xml_namespaces");
|
||||
XMLArrayCursorTrace(trc, xml->xml_namespaces.cursors);
|
||||
|
||||
xml_trace_vector(trc,
|
||||
(JSXML **) xml->xml_attrs.vector,
|
||||
xml->xml_attrs.length);
|
||||
MarkXMLRange(trc, xml->xml_attrs.length, xml->xml_attrs.vector, "xml_attrs");
|
||||
XMLArrayCursorTrace(trc, xml->xml_attrs.cursors);
|
||||
}
|
||||
}
|
||||
@ -7500,7 +7564,7 @@ namespace js {
|
||||
bool
|
||||
GlobalObject::getFunctionNamespace(JSContext *cx, Value *vp)
|
||||
{
|
||||
Value &v = getSlotRef(FUNCTION_NS);
|
||||
HeapValue &v = getSlotRef(FUNCTION_NS);
|
||||
if (v.isUndefined()) {
|
||||
JSRuntime *rt = cx->runtime;
|
||||
JSLinearString *prefix = rt->atomState.typeAtoms[JSTYPE_FUNCTION];
|
||||
@ -7518,7 +7582,7 @@ GlobalObject::getFunctionNamespace(JSContext *cx, Value *vp)
|
||||
*/
|
||||
obj->clearType();
|
||||
|
||||
v.setObject(*obj);
|
||||
v.set(compartment(), ObjectValue(*obj));
|
||||
}
|
||||
|
||||
*vp = v;
|
||||
@ -7837,12 +7901,12 @@ js_DeleteXMLListElements(JSContext *cx, JSObject *listobj)
|
||||
|
||||
struct JSXMLFilter
|
||||
{
|
||||
JSXML *list;
|
||||
JSXML *result;
|
||||
JSXML *kid;
|
||||
JSXMLArrayCursor cursor;
|
||||
HeapPtr<JSXML> list;
|
||||
HeapPtr<JSXML> result;
|
||||
HeapPtr<JSXML> kid;
|
||||
JSXMLArrayCursor<JSXML> cursor;
|
||||
|
||||
JSXMLFilter(JSXML *list, JSXMLArray *array)
|
||||
JSXMLFilter(JSXML *list, JSXMLArray<JSXML> *array)
|
||||
: list(list), result(NULL), kid(NULL), cursor(array) {}
|
||||
|
||||
~JSXMLFilter() {}
|
||||
@ -7856,11 +7920,11 @@ xmlfilter_trace(JSTracer *trc, JSObject *obj)
|
||||
return;
|
||||
|
||||
JS_ASSERT(filter->list);
|
||||
JS_CALL_TRACER(trc, filter->list, JSTRACE_XML, "list");
|
||||
MarkXML(trc, filter->list, "list");
|
||||
if (filter->result)
|
||||
JS_CALL_TRACER(trc, filter->result, JSTRACE_XML, "result");
|
||||
MarkXML(trc, filter->result, "result");
|
||||
if (filter->kid)
|
||||
JS_CALL_TRACER(trc, filter->kid, JSTRACE_XML, "kid");
|
||||
MarkXML(trc, filter->kid, "kid");
|
||||
|
||||
/*
|
||||
* We do not need to trace the cursor as that would be done when
|
||||
@ -7975,7 +8039,7 @@ js_StepXMLListFilter(JSContext *cx, JSBool initialized)
|
||||
}
|
||||
|
||||
/* Do the iteration. */
|
||||
filter->kid = (JSXML *) filter->cursor.getNext();
|
||||
filter->kid = filter->cursor.getNext();
|
||||
if (!filter->kid) {
|
||||
/*
|
||||
* Do not defer finishing the cursor until the next GC cycle to avoid
|
||||
|
@ -43,6 +43,8 @@
|
||||
#include "jsobj.h"
|
||||
#include "jscell.h"
|
||||
|
||||
#include "gc/Barrier.h"
|
||||
|
||||
extern const char js_AnyName_str[];
|
||||
extern const char js_AttributeName_str[];
|
||||
extern const char js_isXMLName_str[];
|
||||
@ -53,14 +55,16 @@ extern const char js_gt_entity_str[];
|
||||
extern const char js_lt_entity_str[];
|
||||
extern const char js_quot_entity_str[];
|
||||
|
||||
typedef JSBool
|
||||
(* JSIdentityOp)(const void *a, const void *b);
|
||||
template<class T>
|
||||
struct JSXMLArrayCursor;
|
||||
|
||||
struct JSXMLArray {
|
||||
template<class T>
|
||||
struct JSXMLArray
|
||||
{
|
||||
uint32 length;
|
||||
uint32 capacity;
|
||||
void **vector;
|
||||
JSXMLArrayCursor *cursors;
|
||||
js::HeapPtr<T> *vector;
|
||||
JSXMLArrayCursor<T> *cursors;
|
||||
|
||||
void init() {
|
||||
length = capacity = 0;
|
||||
@ -74,15 +78,18 @@ struct JSXMLArray {
|
||||
void trim();
|
||||
};
|
||||
|
||||
template<class T>
|
||||
struct JSXMLArrayCursor
|
||||
{
|
||||
JSXMLArray *array;
|
||||
uint32 index;
|
||||
JSXMLArrayCursor *next;
|
||||
JSXMLArrayCursor **prevp;
|
||||
void *root;
|
||||
typedef js::HeapPtr<T> HeapPtrT;
|
||||
|
||||
JSXMLArrayCursor(JSXMLArray *array)
|
||||
JSXMLArray<T> *array;
|
||||
uint32 index;
|
||||
JSXMLArrayCursor<T> *next;
|
||||
JSXMLArrayCursor<T> **prevp;
|
||||
HeapPtrT root;
|
||||
|
||||
JSXMLArrayCursor(JSXMLArray<T> *array)
|
||||
: array(array), index(0), next(array->cursors), prevp(&array->cursors),
|
||||
root(NULL)
|
||||
{
|
||||
@ -100,15 +107,16 @@ struct JSXMLArrayCursor
|
||||
next->prevp = prevp;
|
||||
*prevp = next;
|
||||
array = NULL;
|
||||
root.~HeapPtrT();
|
||||
}
|
||||
|
||||
void *getNext() {
|
||||
T *getNext() {
|
||||
if (!array || index >= array->length)
|
||||
return NULL;
|
||||
return root = array->vector[index++];
|
||||
}
|
||||
|
||||
void *getCurrent() {
|
||||
T *getCurrent() {
|
||||
if (!array || index >= array->length)
|
||||
return NULL;
|
||||
return root = array->vector[index];
|
||||
@ -146,24 +154,24 @@ typedef enum JSXMLClass {
|
||||
#endif
|
||||
|
||||
typedef struct JSXMLListVar {
|
||||
JSXMLArray kids; /* NB: must come first */
|
||||
JSXML *target;
|
||||
JSObject *targetprop;
|
||||
JSXMLArray<JSXML> kids; /* NB: must come first */
|
||||
js::HeapPtrXML target;
|
||||
js::HeapPtrObject targetprop;
|
||||
} JSXMLListVar;
|
||||
|
||||
typedef struct JSXMLElemVar {
|
||||
JSXMLArray kids; /* NB: must come first */
|
||||
JSXMLArray namespaces;
|
||||
JSXMLArray attrs;
|
||||
JSXMLArray<JSXML> kids; /* NB: must come first */
|
||||
JSXMLArray<JSObject> namespaces;
|
||||
JSXMLArray<JSXML> attrs;
|
||||
} JSXMLElemVar;
|
||||
|
||||
/* union member shorthands */
|
||||
#define xml_kids u.list.kids
|
||||
#define xml_target u.list.target
|
||||
#define xml_targetprop u.list.targetprop
|
||||
#define xml_namespaces u.elem.namespaces
|
||||
#define xml_attrs u.elem.attrs
|
||||
#define xml_value u.value
|
||||
#define xml_kids list.kids
|
||||
#define xml_target list.target
|
||||
#define xml_targetprop list.targetprop
|
||||
#define xml_namespaces elem.namespaces
|
||||
#define xml_attrs elem.attrs
|
||||
#define xml_value value
|
||||
|
||||
/* xml_class-testing macros */
|
||||
#define JSXML_HAS_KIDS(xml) JSXML_CLASS_HAS_KIDS((xml)->xml_class)
|
||||
@ -178,30 +186,26 @@ struct JSXML : js::gc::Cell {
|
||||
JSCList links;
|
||||
uint32 serial;
|
||||
#endif
|
||||
JSObject *object;
|
||||
js::HeapPtrObject object;
|
||||
void *domnode; /* DOM node if mapped info item */
|
||||
JSXML *parent;
|
||||
JSObject *name;
|
||||
js::HeapPtrXML parent;
|
||||
js::HeapPtrObject name;
|
||||
uint32 xml_class; /* discriminates u, below */
|
||||
uint32 xml_flags; /* flags, see below */
|
||||
union {
|
||||
JSXMLListVar list;
|
||||
JSXMLElemVar elem;
|
||||
JSString *value;
|
||||
} u;
|
||||
|
||||
void finalize(JSContext *cx) {
|
||||
if (JSXML_HAS_KIDS(this)) {
|
||||
xml_kids.finish(cx);
|
||||
if (xml_class == JSXML_CLASS_ELEMENT) {
|
||||
xml_namespaces.finish(cx);
|
||||
xml_attrs.finish(cx);
|
||||
}
|
||||
}
|
||||
#ifdef DEBUG_notme
|
||||
JS_REMOVE_LINK(&links);
|
||||
|
||||
JSXMLListVar list;
|
||||
JSXMLElemVar elem;
|
||||
js::HeapPtrString value;
|
||||
|
||||
#if JS_BITS_PER_WORD == 32
|
||||
/* The size of every GC thing must be divisible by the FreeCell size. */
|
||||
void *pad;
|
||||
#endif
|
||||
}
|
||||
|
||||
void finalize(JSContext *cx);
|
||||
|
||||
static void writeBarrierPre(JSXML *xml);
|
||||
static void writeBarrierPost(JSXML *xml, void *addr);
|
||||
};
|
||||
|
||||
/* xml_flags values */
|
||||
|
@ -771,7 +771,7 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::SparcRegist
|
||||
JS_ASSERT(objReg != typeReg);
|
||||
|
||||
FastArrayLoadFails fails;
|
||||
fails.rangeCheck = guardArrayExtent(offsetof(JSObject, initializedLength),
|
||||
fails.rangeCheck = guardArrayExtent(JSObject::offsetOfInitializedLength(),
|
||||
objReg, key, BelowOrEqual);
|
||||
|
||||
RegisterID dslotsReg = objReg;
|
||||
@ -1245,7 +1245,7 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::SparcRegist
|
||||
{
|
||||
gc::AllocKind allocKind = templateObject->getAllocKind();
|
||||
|
||||
JS_ASSERT(allocKind >= gc::FINALIZE_OBJECT0 && allocKind <= gc::FINALIZE_OBJECT_LAST);
|
||||
JS_ASSERT(allocKind >= gc::FINALIZE_OBJECT0 && allocKind < gc::FINALIZE_OBJECT_LIMIT);
|
||||
int thingSize = (int)gc::Arena::thingSize(allocKind);
|
||||
|
||||
JS_ASSERT(cx->typeInferenceEnabled());
|
||||
@ -1285,7 +1285,7 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::SparcRegist
|
||||
* slots first.
|
||||
*/
|
||||
if (templateObject->isDenseArray()) {
|
||||
JS_ASSERT(!templateObject->initializedLength);
|
||||
JS_ASSERT(!templateObject->initializedLength());
|
||||
addPtr(Imm32(-thingSize + sizeof(JSObject)), result);
|
||||
storePtr(result, Address(result, -(int)sizeof(JSObject) + JSObject::offsetOfSlots()));
|
||||
addPtr(Imm32(-(int)sizeof(JSObject)), result);
|
||||
|
@ -5132,7 +5132,21 @@ mjit::Compiler::jsop_setprop(JSAtom *atom, bool usePropCache, bool popGuaranteed
|
||||
ScriptAnalysis::NameAccess access =
|
||||
analysis->resolveNameAccess(cx, ATOM_TO_JSID(atom), true);
|
||||
if (access.nesting) {
|
||||
Address address = frame.loadNameAddress(access);
|
||||
/* Use a SavedReg so it isn't clobbered by the stub call. */
|
||||
RegisterID nameReg = frame.allocReg(Registers::SavedRegs).reg();
|
||||
Address address = frame.loadNameAddress(access, nameReg);
|
||||
|
||||
#ifdef JSGC_INCREMENTAL_MJ
|
||||
/* Write barrier. */
|
||||
if (cx->compartment->needsBarrier()) {
|
||||
stubcc.linkExit(masm.jump(), Uses(0));
|
||||
stubcc.leave();
|
||||
stubcc.masm.addPtr(Imm32(address.offset), address.base, Registers::ArgReg1);
|
||||
OOL_STUBCALL(stubs::WriteBarrier, REJOIN_NONE);
|
||||
stubcc.rejoin(Changes(0));
|
||||
}
|
||||
#endif
|
||||
|
||||
frame.storeTo(rhs, address, popGuaranteed);
|
||||
frame.shimmy(1);
|
||||
frame.freeReg(address.base);
|
||||
@ -5160,7 +5174,24 @@ mjit::Compiler::jsop_setprop(JSAtom *atom, bool usePropCache, bool popGuaranteed
|
||||
!propertyTypes->isOwnProperty(cx, object, true)) {
|
||||
types->addFreeze(cx);
|
||||
uint32 slot = propertyTypes->definiteSlot();
|
||||
RegisterID reg = frame.tempRegForData(lhs);
|
||||
bool isObject = lhs->isTypeKnown();
|
||||
#ifdef JSGC_INCREMENTAL_MJ
|
||||
if (cx->compartment->needsBarrier() && propertyTypes->needsBarrier(cx)) {
|
||||
/* Write barrier. */
|
||||
Jump j;
|
||||
if (isObject)
|
||||
j = masm.testGCThing(Address(reg, JSObject::getFixedSlotOffset(slot)));
|
||||
else
|
||||
j = masm.jump();
|
||||
stubcc.linkExit(j, Uses(0));
|
||||
stubcc.leave();
|
||||
stubcc.masm.addPtr(Imm32(JSObject::getFixedSlotOffset(slot)),
|
||||
reg, Registers::ArgReg1);
|
||||
OOL_STUBCALL(stubs::GCThingWriteBarrier, REJOIN_NONE);
|
||||
stubcc.rejoin(Changes(0));
|
||||
}
|
||||
#endif
|
||||
if (!isObject) {
|
||||
Jump notObject = frame.testObject(Assembler::NotEqual, lhs);
|
||||
stubcc.linkExit(notObject, Uses(2));
|
||||
@ -5168,7 +5199,6 @@ mjit::Compiler::jsop_setprop(JSAtom *atom, bool usePropCache, bool popGuaranteed
|
||||
stubcc.masm.move(ImmPtr(atom), Registers::ArgReg1);
|
||||
OOL_STUBCALL(STRICT_VARIANT(stubs::SetName), REJOIN_FALLTHROUGH);
|
||||
}
|
||||
RegisterID reg = frame.tempRegForData(lhs);
|
||||
frame.storeTo(rhs, Address(reg, JSObject::getFixedSlotOffset(slot)), popGuaranteed);
|
||||
frame.shimmy(1);
|
||||
if (!isObject)
|
||||
@ -5177,6 +5207,14 @@ mjit::Compiler::jsop_setprop(JSAtom *atom, bool usePropCache, bool popGuaranteed
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef JSGC_INCREMENTAL_MJ
|
||||
/* Write barrier. */
|
||||
if (cx->compartment->needsBarrier() && (!types || types->propertyNeedsBarrier(cx, id))) {
|
||||
jsop_setprop_slow(atom, usePropCache);
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
|
||||
JSOp op = JSOp(*PC);
|
||||
|
||||
ic::PICInfo::Kind kind = (op == JSOP_SETMETHOD)
|
||||
@ -5718,6 +5756,18 @@ mjit::Compiler::iter(uintN flags)
|
||||
Jump overlongChain = masm.branchPtr(Assembler::NonZero, T1, T1);
|
||||
stubcc.linkExit(overlongChain, Uses(1));
|
||||
|
||||
#ifdef JSGC_INCREMENTAL_MJ
|
||||
/*
|
||||
* Write barrier for stores to the iterator. We only need to take a write
|
||||
* barrier if NativeIterator::obj is actually going to change.
|
||||
*/
|
||||
if (cx->compartment->needsBarrier()) {
|
||||
Jump j = masm.branchPtr(Assembler::NotEqual,
|
||||
Address(nireg, offsetof(NativeIterator, obj)), reg);
|
||||
stubcc.linkExit(j, Uses(1));
|
||||
}
|
||||
#endif
|
||||
|
||||
/* Found a match with the most recent iterator. Hooray! */
|
||||
|
||||
/* Mark iterator as active. */
|
||||
@ -5963,7 +6013,7 @@ mjit::Compiler::jsop_getgname(uint32 index)
|
||||
*/
|
||||
const js::Shape *shape = globalObj->nativeLookup(cx, ATOM_TO_JSID(atom));
|
||||
if (shape && shape->hasDefaultGetterOrIsMethod() && shape->hasSlot()) {
|
||||
Value *value = &globalObj->getSlotRef(shape->slot);
|
||||
HeapValue *value = &globalObj->getSlotRef(shape->slot);
|
||||
if (!value->isUndefined() &&
|
||||
!propertyTypes->isOwnProperty(cx, globalObj->getType(cx), true)) {
|
||||
watchGlobalReallocation();
|
||||
@ -6193,8 +6243,18 @@ mjit::Compiler::jsop_setgname(JSAtom *atom, bool usePropertyCache, bool popGuara
|
||||
shape->writable() && shape->hasSlot() &&
|
||||
!types->isOwnProperty(cx, globalObj->getType(cx), true)) {
|
||||
watchGlobalReallocation();
|
||||
Value *value = &globalObj->getSlotRef(shape->slot);
|
||||
HeapValue *value = &globalObj->getSlotRef(shape->slot);
|
||||
RegisterID reg = frame.allocReg();
|
||||
#ifdef JSGC_INCREMENTAL_MJ
|
||||
/* Write barrier. */
|
||||
if (cx->compartment->needsBarrier() && types->needsBarrier(cx)) {
|
||||
stubcc.linkExit(masm.jump(), Uses(0));
|
||||
stubcc.leave();
|
||||
stubcc.masm.move(ImmPtr(value), Registers::ArgReg1);
|
||||
OOL_STUBCALL(stubs::WriteBarrier, REJOIN_NONE);
|
||||
stubcc.rejoin(Changes(0));
|
||||
}
|
||||
#endif
|
||||
masm.move(ImmPtr(value), reg);
|
||||
frame.storeTo(frame.peek(-1), Address(reg), popGuaranteed);
|
||||
frame.shimmy(1);
|
||||
@ -6203,6 +6263,14 @@ mjit::Compiler::jsop_setgname(JSAtom *atom, bool usePropertyCache, bool popGuara
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef JSGC_INCREMENTAL_MJ
|
||||
/* Write barrier. */
|
||||
if (cx->compartment->needsBarrier()) {
|
||||
jsop_setgname_slow(atom, usePropertyCache);
|
||||
return;
|
||||
}
|
||||
#endif
|
||||
|
||||
#if defined JS_MONOIC
|
||||
FrameEntry *objFe = frame.peek(-2);
|
||||
FrameEntry *fe = frame.peek(-1);
|
||||
@ -6217,6 +6285,7 @@ mjit::Compiler::jsop_setgname(JSAtom *atom, bool usePropertyCache, bool popGuara
|
||||
Jump shapeGuard;
|
||||
|
||||
RESERVE_IC_SPACE(masm);
|
||||
|
||||
ic.fastPathStart = masm.label();
|
||||
if (objFe->isConstant()) {
|
||||
JSObject *obj = &objFe->getValue().toObject();
|
||||
|
@ -367,7 +367,7 @@ class Compiler : public BaseCompiler
|
||||
analyze::CrossScriptSSA ssa;
|
||||
|
||||
GlobalObject *globalObj;
|
||||
const Value *globalSlots; /* Original slots pointer. */
|
||||
const HeapValue *globalSlots; /* Original slots pointer. */
|
||||
|
||||
Assembler masm;
|
||||
FrameState frame;
|
||||
|
@ -467,7 +467,7 @@ mjit::Compiler::compileArrayPush(FrameEntry *thisValue, FrameEntry *arg)
|
||||
Int32Key key = Int32Key::FromRegister(lengthReg);
|
||||
|
||||
/* Test for 'length == initializedLength' */
|
||||
Jump initlenGuard = masm.guardArrayExtent(offsetof(JSObject, initializedLength),
|
||||
Jump initlenGuard = masm.guardArrayExtent(JSObject::offsetOfInitializedLength(),
|
||||
objReg, key, Assembler::NotEqual);
|
||||
stubcc.linkExit(initlenGuard, Uses(3));
|
||||
|
||||
@ -481,7 +481,7 @@ mjit::Compiler::compileArrayPush(FrameEntry *thisValue, FrameEntry *arg)
|
||||
|
||||
masm.bumpKey(key, 1);
|
||||
masm.store32(lengthReg, Address(objReg, offsetof(JSObject, privateData)));
|
||||
masm.store32(lengthReg, Address(objReg, offsetof(JSObject, initializedLength)));
|
||||
masm.store32(lengthReg, Address(objReg, JSObject::offsetOfInitializedLength()));
|
||||
|
||||
stubcc.leave();
|
||||
stubcc.masm.move(Imm32(1), Registers::ArgReg1);
|
||||
@ -504,6 +504,12 @@ mjit::Compiler::compileArrayPopShift(FrameEntry *thisValue, bool isPacked, bool
|
||||
if (thisValue->isConstant())
|
||||
return Compile_InlineAbort;
|
||||
|
||||
#ifdef JSGC_INCREMENTAL_MJ
|
||||
/* Write barrier. */
|
||||
if (cx->compartment->needsBarrier())
|
||||
return Compile_InlineAbort;
|
||||
#endif
|
||||
|
||||
RegisterID objReg = frame.tempRegForData(thisValue);
|
||||
frame.pinReg(objReg);
|
||||
|
||||
@ -533,7 +539,7 @@ mjit::Compiler::compileArrayPopShift(FrameEntry *thisValue, bool isPacked, bool
|
||||
|
||||
/* Test for 'length == initializedLength' */
|
||||
Int32Key key = Int32Key::FromRegister(lengthReg);
|
||||
Jump initlenGuard = masm.guardArrayExtent(offsetof(JSObject, initializedLength),
|
||||
Jump initlenGuard = masm.guardArrayExtent(JSObject::offsetOfInitializedLength(),
|
||||
objReg, key, Assembler::NotEqual);
|
||||
stubcc.linkExit(initlenGuard, Uses(3));
|
||||
|
||||
@ -569,7 +575,7 @@ mjit::Compiler::compileArrayPopShift(FrameEntry *thisValue, bool isPacked, bool
|
||||
}
|
||||
|
||||
masm.store32(lengthReg, Address(objReg, offsetof(JSObject, privateData)));
|
||||
masm.store32(lengthReg, Address(objReg, offsetof(JSObject, initializedLength)));
|
||||
masm.store32(lengthReg, Address(objReg, JSObject::offsetOfInitializedLength()));
|
||||
|
||||
if (!isArrayPop)
|
||||
INLINE_STUBCALL(stubs::ArrayShift, REJOIN_NONE);
|
||||
@ -668,13 +674,13 @@ mjit::Compiler::compileArrayConcat(types::TypeSet *thisTypes, types::TypeSet *ar
|
||||
|
||||
RegisterID objReg = frame.tempRegForData(thisValue);
|
||||
masm.load32(Address(objReg, offsetof(JSObject, privateData)), reg);
|
||||
Jump initlenOneGuard = masm.guardArrayExtent(offsetof(JSObject, initializedLength),
|
||||
Jump initlenOneGuard = masm.guardArrayExtent(JSObject::offsetOfInitializedLength(),
|
||||
objReg, key, Assembler::NotEqual);
|
||||
stubcc.linkExit(initlenOneGuard, Uses(3));
|
||||
|
||||
objReg = frame.tempRegForData(argValue);
|
||||
masm.load32(Address(objReg, offsetof(JSObject, privateData)), reg);
|
||||
Jump initlenTwoGuard = masm.guardArrayExtent(offsetof(JSObject, initializedLength),
|
||||
Jump initlenTwoGuard = masm.guardArrayExtent(JSObject::offsetOfInitializedLength(),
|
||||
objReg, key, Assembler::NotEqual);
|
||||
stubcc.linkExit(initlenTwoGuard, Uses(3));
|
||||
|
||||
@ -785,7 +791,7 @@ mjit::Compiler::compileArrayWithArgs(uint32 argc)
|
||||
}
|
||||
|
||||
masm.storePtr(ImmIntPtr(intptr_t(argc)),
|
||||
Address(result, offsetof(JSObject, initializedLength)));
|
||||
Address(result, JSObject::offsetOfInitializedLength()));
|
||||
|
||||
stubcc.leave();
|
||||
|
||||
|
@ -1102,6 +1102,8 @@ mjit::Compiler::jsop_setelem_dense()
|
||||
bool hoisted = loop && id->isType(JSVAL_TYPE_INT32) &&
|
||||
loop->hoistArrayLengthCheck(DENSE_ARRAY, objv, indexv);
|
||||
|
||||
MaybeJump initlenExit;
|
||||
|
||||
if (hoisted) {
|
||||
FrameEntry *slotsFe = loop->invariantArraySlots(objv);
|
||||
slotsReg = frame.tempRegForData(slotsFe);
|
||||
@ -1129,13 +1131,13 @@ mjit::Compiler::jsop_setelem_dense()
|
||||
// Make an OOL path for setting exactly the initialized length.
|
||||
Label syncTarget = stubcc.syncExitAndJump(Uses(3));
|
||||
|
||||
Jump initlenGuard = masm.guardArrayExtent(offsetof(JSObject, initializedLength),
|
||||
Jump initlenGuard = masm.guardArrayExtent(JSObject::offsetOfInitializedLength(),
|
||||
objReg, key, Assembler::BelowOrEqual);
|
||||
stubcc.linkExitDirect(initlenGuard, stubcc.masm.label());
|
||||
|
||||
// Recheck for an exact initialized length. :TODO: would be nice to
|
||||
// reuse the condition bits from the previous test.
|
||||
Jump exactlenGuard = stubcc.masm.guardArrayExtent(offsetof(JSObject, initializedLength),
|
||||
Jump exactlenGuard = stubcc.masm.guardArrayExtent(JSObject::offsetOfInitializedLength(),
|
||||
objReg, key, Assembler::NotEqual);
|
||||
exactlenGuard.linkTo(syncTarget, &stubcc.masm);
|
||||
|
||||
@ -1149,7 +1151,7 @@ mjit::Compiler::jsop_setelem_dense()
|
||||
stubcc.masm.bumpKey(key, 1);
|
||||
|
||||
// Update the initialized length.
|
||||
stubcc.masm.storeKey(key, Address(objReg, offsetof(JSObject, initializedLength)));
|
||||
stubcc.masm.storeKey(key, Address(objReg, JSObject::offsetOfInitializedLength()));
|
||||
|
||||
// Update the array length if needed.
|
||||
Jump lengthGuard = stubcc.masm.guardArrayExtent(offsetof(JSObject, privateData),
|
||||
@ -1160,14 +1162,41 @@ mjit::Compiler::jsop_setelem_dense()
|
||||
// Restore the index.
|
||||
stubcc.masm.bumpKey(key, -1);
|
||||
|
||||
// Rejoin with the inline path.
|
||||
Jump initlenExit = stubcc.masm.jump();
|
||||
stubcc.crossJump(initlenExit, masm.label());
|
||||
stubcc.masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg);
|
||||
|
||||
initlenExit = stubcc.masm.jump();
|
||||
|
||||
masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg);
|
||||
slotsReg = objReg;
|
||||
}
|
||||
|
||||
#ifdef JSGC_INCREMENTAL_MJ
|
||||
/*
|
||||
* Write barrier.
|
||||
* We skip over the barrier if we incremented initializedLength above,
|
||||
* because in that case the slot we're overwriting was previously
|
||||
* undefined.
|
||||
*/
|
||||
types::TypeSet *types = frame.extra(obj).types;
|
||||
if (cx->compartment->needsBarrier() && (!types || types->propertyNeedsBarrier(cx, JSID_VOID))) {
|
||||
Label barrierStart = stubcc.masm.label();
|
||||
frame.sync(stubcc.masm, Uses(3));
|
||||
stubcc.linkExitDirect(masm.jump(), barrierStart);
|
||||
stubcc.masm.storePtr(slotsReg, FrameAddress(offsetof(VMFrame, scratch)));
|
||||
if (key.isConstant())
|
||||
stubcc.masm.lea(Address(slotsReg, key.index() * sizeof(Value)), Registers::ArgReg1);
|
||||
else
|
||||
stubcc.masm.lea(BaseIndex(slotsReg, key.reg(), masm.JSVAL_SCALE), Registers::ArgReg1);
|
||||
OOL_STUBCALL(stubs::WriteBarrier, REJOIN_NONE);
|
||||
stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, scratch)), slotsReg);
|
||||
stubcc.rejoin(Changes(0));
|
||||
}
|
||||
#endif
|
||||
|
||||
/* Jump over the write barrier in the initlen case. */
|
||||
if (initlenExit.isSet())
|
||||
stubcc.crossJump(initlenExit.get(), masm.label());
|
||||
|
||||
// Fully store the value. :TODO: don't need to do this in the non-initlen case
|
||||
// if the array is packed and monomorphic.
|
||||
if (key.isConstant())
|
||||
@ -1501,6 +1530,14 @@ mjit::Compiler::jsop_setelem(bool popGuaranteed)
|
||||
return true;
|
||||
}
|
||||
|
||||
#ifdef JSGC_INCREMENTAL_MJ
|
||||
// Write barrier.
|
||||
if (cx->compartment->needsBarrier()) {
|
||||
jsop_setelem_slow();
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
|
||||
SetElementICInfo ic = SetElementICInfo(JSOp(*PC));
|
||||
|
||||
// One by one, check if the most important stack entries have registers,
|
||||
@ -1588,7 +1625,7 @@ mjit::Compiler::jsop_setelem(bool popGuaranteed)
|
||||
stubcc.linkExitDirect(ic.claspGuard, ic.slowPathStart);
|
||||
|
||||
// Guard in range of initialized length.
|
||||
Jump initlenGuard = masm.guardArrayExtent(offsetof(JSObject, initializedLength),
|
||||
Jump initlenGuard = masm.guardArrayExtent(JSObject::offsetOfInitializedLength(),
|
||||
ic.objReg, ic.key, Assembler::BelowOrEqual);
|
||||
stubcc.linkExitDirect(initlenGuard, ic.slowPathStart);
|
||||
|
||||
@ -1746,7 +1783,7 @@ mjit::Compiler::jsop_getelem_dense(bool isPacked)
|
||||
// Guard on the array's initialized length.
|
||||
MaybeJump initlenGuard;
|
||||
if (!hoisted) {
|
||||
initlenGuard = masm.guardArrayExtent(offsetof(JSObject, initializedLength),
|
||||
initlenGuard = masm.guardArrayExtent(JSObject::offsetOfInitializedLength(),
|
||||
baseReg, key, Assembler::BelowOrEqual);
|
||||
}
|
||||
|
||||
@ -2678,7 +2715,7 @@ mjit::Compiler::jsop_initelem()
|
||||
|
||||
if (cx->typeInferenceEnabled()) {
|
||||
/* Update the initialized length. */
|
||||
masm.store32(Imm32(idx + 1), Address(objReg, offsetof(JSObject, initializedLength)));
|
||||
masm.store32(Imm32(idx + 1), Address(objReg, JSObject::offsetOfInitializedLength()));
|
||||
}
|
||||
|
||||
/* Perform the store. */
|
||||
|
@ -879,18 +879,24 @@ FrameState::syncAndForgetFe(FrameEntry *fe, bool markSynced)
|
||||
}
|
||||
|
||||
inline JSC::MacroAssembler::Address
|
||||
FrameState::loadNameAddress(const analyze::ScriptAnalysis::NameAccess &access)
|
||||
FrameState::loadNameAddress(const analyze::ScriptAnalysis::NameAccess &access, RegisterID reg)
|
||||
{
|
||||
JS_ASSERT(access.script && access.nesting);
|
||||
|
||||
RegisterID reg = allocReg();
|
||||
Value **pbase = access.arg ? &access.nesting->argArray : &access.nesting->varArray;
|
||||
const Value **pbase = access.arg ? &access.nesting->argArray : &access.nesting->varArray;
|
||||
masm.move(ImmPtr(pbase), reg);
|
||||
masm.loadPtr(Address(reg), reg);
|
||||
|
||||
return Address(reg, access.index * sizeof(Value));
|
||||
}
|
||||
|
||||
inline JSC::MacroAssembler::Address
|
||||
FrameState::loadNameAddress(const analyze::ScriptAnalysis::NameAccess &access)
|
||||
{
|
||||
RegisterID reg = allocReg();
|
||||
return loadNameAddress(access, reg);
|
||||
}
|
||||
|
||||
inline void
|
||||
FrameState::forgetLoopReg(FrameEntry *fe)
|
||||
{
|
||||
@ -1142,6 +1148,14 @@ FrameState::testObject(Assembler::Condition cond, FrameEntry *fe)
|
||||
return masm.testObject(cond, tempRegForType(fe));
|
||||
}
|
||||
|
||||
inline JSC::MacroAssembler::Jump
|
||||
FrameState::testGCThing(FrameEntry *fe)
|
||||
{
|
||||
if (shouldAvoidTypeRemat(fe))
|
||||
return masm.testGCThing(addressOf(fe));
|
||||
return masm.testGCThing(tempRegForType(fe));
|
||||
}
|
||||
|
||||
inline JSC::MacroAssembler::Jump
|
||||
FrameState::testDouble(Assembler::Condition cond, FrameEntry *fe)
|
||||
{
|
||||
|
@ -779,6 +779,8 @@ class FrameState
|
||||
*/
|
||||
inline Jump testObject(Assembler::Condition cond, FrameEntry *fe);
|
||||
|
||||
inline Jump testGCThing(FrameEntry *fe);
|
||||
|
||||
/*
|
||||
* Helper function. Tests if a slot's type is primitive. Condition must
|
||||
* be Equal or NotEqual.
|
||||
@ -961,6 +963,8 @@ class FrameState
|
||||
* The compiler owns the result's base register.
|
||||
*/
|
||||
inline Address loadNameAddress(const analyze::ScriptAnalysis::NameAccess &access);
|
||||
inline Address loadNameAddress(const analyze::ScriptAnalysis::NameAccess &access,
|
||||
RegisterID reg);
|
||||
|
||||
private:
|
||||
inline AnyRegisterID allocAndLoadReg(FrameEntry *fe, bool fp, RematInfo::RematType type);
|
||||
|
@ -1325,7 +1325,7 @@ LoopState::restoreInvariants(jsbytecode *pc, Assembler &masm,
|
||||
*/
|
||||
masm.loadPayload(frame.addressOf(entry.u.check.arraySlot), T0);
|
||||
if (entry.kind == InvariantEntry::DENSE_ARRAY_BOUNDS_CHECK)
|
||||
masm.load32(Address(T0, offsetof(JSObject, initializedLength)), T0);
|
||||
masm.load32(Address(T0, JSObject::offsetOfInitializedLength()), T0);
|
||||
else
|
||||
masm.loadPayload(Address(T0, TypedArray::lengthOffset()), T0);
|
||||
|
||||
|
@ -39,6 +39,10 @@
|
||||
#if !defined jsjaeger_h__ && defined JS_METHODJIT
|
||||
#define jsjaeger_h__
|
||||
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
#define JSGC_INCREMENTAL_MJ
|
||||
#endif
|
||||
|
||||
#include "jscntxt.h"
|
||||
#include "jscompartment.h"
|
||||
|
||||
@ -728,7 +732,7 @@ struct InlineFrame
|
||||
{
|
||||
InlineFrame *parent;
|
||||
jsbytecode *parentpc;
|
||||
JSFunction *fun;
|
||||
HeapPtrFunction fun;
|
||||
|
||||
// Total distance between the start of the outer JSStackFrame and the start
|
||||
// of this frame, in multiples of sizeof(Value).
|
||||
|
@ -402,6 +402,14 @@ class NunboxAssembler : public JSC::MacroAssembler
|
||||
return branch32(cond, tagOf(address), ImmTag(JSVAL_TAG_OBJECT));
|
||||
}
|
||||
|
||||
Jump testGCThing(RegisterID reg) {
|
||||
return branch32(AboveOrEqual, reg, ImmTag(JSVAL_LOWER_INCL_TAG_OF_GCTHING_SET));
|
||||
}
|
||||
|
||||
Jump testGCThing(Address address) {
|
||||
return branch32(AboveOrEqual, tagOf(address), ImmTag(JSVAL_LOWER_INCL_TAG_OF_GCTHING_SET));
|
||||
}
|
||||
|
||||
Jump testDouble(Condition cond, RegisterID reg) {
|
||||
Condition opcond;
|
||||
if (cond == Equal)
|
||||
|
@ -3224,6 +3224,21 @@ SetElementIC::update(VMFrame &f, const Value &objval, const Value &idval)
|
||||
return disable(f.cx, "unsupported object type");
|
||||
}
|
||||
|
||||
bool
|
||||
SetElementIC::shouldUpdate(JSContext *cx)
|
||||
{
|
||||
if (!hit) {
|
||||
hit = true;
|
||||
spew(cx, "ignored", "first hit");
|
||||
return false;
|
||||
}
|
||||
#ifdef JSGC_INCREMENTAL_MJ
|
||||
JS_ASSERT(!cx->compartment->needsBarrier());
|
||||
#endif
|
||||
JS_ASSERT(stubsGenerated < MAX_PIC_STUBS);
|
||||
return true;
|
||||
}
|
||||
|
||||
template<JSBool strict>
|
||||
void JS_FASTCALL
|
||||
ic::SetElement(VMFrame &f, ic::SetElementIC *ic)
|
||||
|
@ -370,6 +370,7 @@ struct SetElementIC : public BaseIC {
|
||||
LookupStatus update(VMFrame &f, const Value &objval, const Value &idval);
|
||||
LookupStatus disable(JSContext *cx, const char *reason);
|
||||
LookupStatus error(JSContext *cx);
|
||||
bool shouldUpdate(JSContext *cx);
|
||||
};
|
||||
|
||||
struct PICInfo : public BasePolyIC {
|
||||
|
@ -338,6 +338,16 @@ class PunboxAssembler : public JSC::MacroAssembler
|
||||
return testObject(cond, Registers::ValueReg);
|
||||
}
|
||||
|
||||
Jump testGCThing(RegisterID reg) {
|
||||
return branchPtr(AboveOrEqual, reg, ImmTag(JSVAL_LOWER_INCL_SHIFTED_TAG_OF_GCTHING_SET));
|
||||
}
|
||||
|
||||
Jump testGCThing(Address address) {
|
||||
loadValue(address, Registers::ValueReg);
|
||||
return branchPtr(AboveOrEqual, Registers::ValueReg,
|
||||
ImmTag(JSVAL_LOWER_INCL_SHIFTED_TAG_OF_GCTHING_SET));
|
||||
}
|
||||
|
||||
Jump testDouble(Condition cond, RegisterID reg) {
|
||||
cond = (cond == Equal) ? BelowOrEqual : Above;
|
||||
return branchPtr(cond, reg, ImmTag(JSVAL_SHIFTED_TAG_MAX_DOUBLE));
|
||||
|
@ -43,6 +43,7 @@
|
||||
#include "jsobj.h"
|
||||
#include "jslibmath.h"
|
||||
#include "jsiter.h"
|
||||
#include "jsgcmark.h"
|
||||
#include "jsnum.h"
|
||||
#include "jsxml.h"
|
||||
#include "jsbool.h"
|
||||
@ -2548,3 +2549,17 @@ stubs::ConvertToTypedFloat(JSContext *cx, Value *vp)
|
||||
vp->setDouble(d);
|
||||
}
|
||||
}
|
||||
|
||||
void JS_FASTCALL
|
||||
stubs::WriteBarrier(VMFrame &f, Value *addr)
|
||||
{
|
||||
js::gc::MarkValueUnbarriered(f.cx->compartment->barrierTracer(), *addr, "write barrier");
|
||||
}
|
||||
|
||||
void JS_FASTCALL
|
||||
stubs::GCThingWriteBarrier(VMFrame &f, Value *addr)
|
||||
{
|
||||
gc::Cell *cell = (gc::Cell *)addr->toGCThing();
|
||||
if (cell && !cell->isMarked())
|
||||
gc::MarkValueUnbarriered(f.cx->compartment->barrierTracer(), *addr, "write barrier");
|
||||
}
|
||||
|
@ -231,6 +231,9 @@ NewDenseUnallocatedArray(VMFrame &f, uint32 length);
|
||||
void JS_FASTCALL ArrayConcatTwoArrays(VMFrame &f);
|
||||
void JS_FASTCALL ArrayShift(VMFrame &f);
|
||||
|
||||
void JS_FASTCALL WriteBarrier(VMFrame &f, Value *addr);
|
||||
void JS_FASTCALL GCThingWriteBarrier(VMFrame &f, Value *addr);
|
||||
|
||||
} /* namespace stubs */
|
||||
|
||||
/*
|
||||
|
@ -160,8 +160,7 @@ class HeapReverser : public JSTracer {
|
||||
|
||||
/* Construct a HeapReverser for |context|'s heap. */
|
||||
HeapReverser(JSContext *cx) : map(cx), work(cx), parent(NULL) {
|
||||
context = cx;
|
||||
callback = traverseEdgeWithThis;
|
||||
JS_TRACER_INIT(this, cx, traverseEdgeWithThis);
|
||||
}
|
||||
|
||||
bool init() { return map.init(); }
|
||||
|
@ -403,11 +403,13 @@ void ValidateWriter::checkAccSet(LOpcode op, LIns *base, int32_t disp, AccSet ac
|
||||
//
|
||||
// base = <JSObject>
|
||||
// ins = ldp.obj<field> base[offsetof(JSObject, <field>)]
|
||||
#define OK_OBJ_FIELD(ldop, field) \
|
||||
#define OK_OBJ_FIELD_OFF(ldop, fieldoff) \
|
||||
((op == (ldop)) && \
|
||||
(disp == offsetof(JSObject, field)) && \
|
||||
(disp == fieldoff) && \
|
||||
couldBeObjectOrString(base))
|
||||
|
||||
#define OK_OBJ_FIELD(ldop, field) OK_OBJ_FIELD_OFF(ldop, offsetof(JSObject, field))
|
||||
|
||||
case ACCSET_OBJ_CLASP:
|
||||
ok = OK_OBJ_FIELD(LIR_ldp, clasp);
|
||||
break;
|
||||
@ -441,7 +443,8 @@ void ValidateWriter::checkAccSet(LOpcode op, LIns *base, int32_t disp, AccSet ac
|
||||
break;
|
||||
|
||||
case ACCSET_OBJ_CAPACITY:
|
||||
ok = OK_OBJ_FIELD(LIR_ldi, capacity) || OK_OBJ_FIELD(LIR_ldi, initializedLength);
|
||||
ok = OK_OBJ_FIELD(LIR_ldi, capacity) ||
|
||||
OK_OBJ_FIELD_OFF(LIR_ldi, (int32_t)JSObject::offsetOfInitializedLength());
|
||||
break;
|
||||
|
||||
case ACCSET_OBJ_SLOTS:
|
||||
|
@ -525,7 +525,7 @@ class Writer
|
||||
}
|
||||
|
||||
nj::LIns *ldiDenseArrayInitializedLength(nj::LIns *array) const {
|
||||
return name(lir->insLoad(nj::LIR_ldi, array, offsetof(JSObject, initializedLength),
|
||||
return name(lir->insLoad(nj::LIR_ldi, array, JSObject::offsetOfInitializedLength(),
|
||||
ACCSET_OBJ_CAPACITY),
|
||||
"capacity");
|
||||
}
|
||||
|
@ -46,10 +46,10 @@
|
||||
namespace js {
|
||||
|
||||
inline void
|
||||
ArgumentsObject::setInitialLength(uint32 length)
|
||||
ArgumentsObject::initInitialLength(uint32 length)
|
||||
{
|
||||
JS_ASSERT(getSlot(INITIAL_LENGTH_SLOT).isUndefined());
|
||||
setSlot(INITIAL_LENGTH_SLOT, Int32Value(length << PACKED_BITS_COUNT));
|
||||
initSlot(INITIAL_LENGTH_SLOT, Int32Value(length << PACKED_BITS_COUNT));
|
||||
JS_ASSERT((getSlot(INITIAL_LENGTH_SLOT).toInt32() >> PACKED_BITS_COUNT) == int32(length));
|
||||
JS_ASSERT(!hasOverriddenLength());
|
||||
}
|
||||
@ -77,11 +77,10 @@ ArgumentsObject::hasOverriddenLength() const
|
||||
}
|
||||
|
||||
inline void
|
||||
ArgumentsObject::setCalleeAndData(JSObject &callee, ArgumentsData *data)
|
||||
ArgumentsObject::initData(ArgumentsData *data)
|
||||
{
|
||||
JS_ASSERT(getSlot(DATA_SLOT).isUndefined());
|
||||
setSlot(DATA_SLOT, PrivateValue(data));
|
||||
data->callee.setObject(callee);
|
||||
initSlot(DATA_SLOT, PrivateValue(data));
|
||||
}
|
||||
|
||||
inline ArgumentsData *
|
||||
@ -100,7 +99,7 @@ ArgumentsObject::element(uint32 i) const
|
||||
inline const js::Value *
|
||||
ArgumentsObject::elements() const
|
||||
{
|
||||
return data()->slots;
|
||||
return Valueify(data()->slots);
|
||||
}
|
||||
|
||||
inline void
|
||||
@ -150,7 +149,7 @@ NormalArgumentsObject::callee() const
|
||||
inline void
|
||||
NormalArgumentsObject::clearCallee()
|
||||
{
|
||||
data()->callee = MagicValue(JS_ARGS_HOLE);
|
||||
data()->callee.set(compartment(), MagicValue(JS_ARGS_HOLE));
|
||||
}
|
||||
|
||||
} // namespace js
|
||||
|
@ -92,13 +92,13 @@ struct ArgumentsData
|
||||
* arguments.callee, or MagicValue(JS_ARGS_HOLE) if arguments.callee has
|
||||
* been modified.
|
||||
*/
|
||||
js::Value callee;
|
||||
HeapValue callee;
|
||||
|
||||
/*
|
||||
* Values of the arguments for this object, or MagicValue(JS_ARGS_HOLE) if
|
||||
* the indexed argument has been modified.
|
||||
*/
|
||||
js::Value slots[1];
|
||||
HeapValue slots[1];
|
||||
};
|
||||
|
||||
/*
|
||||
@ -183,9 +183,9 @@ class ArgumentsObject : public ::JSObject
|
||||
friend struct mjit::ic::GetElementIC;
|
||||
#endif
|
||||
|
||||
void setInitialLength(uint32 length);
|
||||
void initInitialLength(uint32 length);
|
||||
|
||||
void setCalleeAndData(JSObject &callee, ArgumentsData *data);
|
||||
void initData(ArgumentsData *data);
|
||||
|
||||
public:
|
||||
/* Create an arguments object for the given callee function. */
|
||||
|
@ -73,6 +73,13 @@ CallObject::setCallee(JSObject *callee)
|
||||
setFixedSlot(CALLEE_SLOT, js::ObjectOrNullValue(callee));
|
||||
}
|
||||
|
||||
inline void
|
||||
CallObject::initCallee(JSObject *callee)
|
||||
{
|
||||
JS_ASSERT_IF(callee, callee->isFunction());
|
||||
initFixedSlot(CALLEE_SLOT, js::ObjectOrNullValue(callee));
|
||||
}
|
||||
|
||||
inline JSObject *
|
||||
CallObject::getCallee() const
|
||||
{
|
||||
@ -99,6 +106,13 @@ CallObject::setArguments(const js::Value &v)
|
||||
setFixedSlot(ARGUMENTS_SLOT, v);
|
||||
}
|
||||
|
||||
inline void
|
||||
CallObject::initArguments(const js::Value &v)
|
||||
{
|
||||
JS_ASSERT(!isForEval());
|
||||
initFixedSlot(ARGUMENTS_SLOT, v);
|
||||
}
|
||||
|
||||
inline const js::Value &
|
||||
CallObject::arg(uintN i) const
|
||||
{
|
||||
@ -113,6 +127,13 @@ CallObject::setArg(uintN i, const js::Value &v)
|
||||
setSlot(RESERVED_SLOTS + i, v);
|
||||
}
|
||||
|
||||
inline void
|
||||
CallObject::initArgUnchecked(uintN i, const js::Value &v)
|
||||
{
|
||||
JS_ASSERT(i < getCalleeFunction()->nargs);
|
||||
initSlotUnchecked(RESERVED_SLOTS + i, v);
|
||||
}
|
||||
|
||||
inline const js::Value &
|
||||
CallObject::var(uintN i) const
|
||||
{
|
||||
@ -131,29 +152,38 @@ CallObject::setVar(uintN i, const js::Value &v)
|
||||
setSlot(RESERVED_SLOTS + fun->nargs + i, v);
|
||||
}
|
||||
|
||||
inline void
|
||||
CallObject::initVarUnchecked(uintN i, const js::Value &v)
|
||||
{
|
||||
JSFunction *fun = getCalleeFunction();
|
||||
JS_ASSERT(fun->nargs == fun->script()->bindings.countArgs());
|
||||
JS_ASSERT(i < fun->script()->bindings.countVars());
|
||||
initSlotUnchecked(RESERVED_SLOTS + fun->nargs + i, v);
|
||||
}
|
||||
|
||||
inline void
|
||||
CallObject::copyValues(uintN nargs, Value *argv, uintN nvars, Value *slots)
|
||||
{
|
||||
JS_ASSERT(numSlots() >= RESERVED_SLOTS + nargs + nvars);
|
||||
copySlotRange(RESERVED_SLOTS, argv, nargs);
|
||||
copySlotRange(RESERVED_SLOTS + nargs, slots, nvars);
|
||||
copySlotRange(RESERVED_SLOTS, argv, nargs, true);
|
||||
copySlotRange(RESERVED_SLOTS + nargs, slots, nvars, true);
|
||||
}
|
||||
|
||||
inline js::Value *
|
||||
inline js::HeapValueArray
|
||||
CallObject::argArray()
|
||||
{
|
||||
js::DebugOnly<JSFunction*> fun = getCalleeFunction();
|
||||
JS_ASSERT(hasContiguousSlots(RESERVED_SLOTS, fun->nargs));
|
||||
return getSlotAddress(RESERVED_SLOTS);
|
||||
return HeapValueArray(getSlotAddress(RESERVED_SLOTS));
|
||||
}
|
||||
|
||||
inline js::Value *
|
||||
inline js::HeapValueArray
|
||||
CallObject::varArray()
|
||||
{
|
||||
JSFunction *fun = getCalleeFunction();
|
||||
JS_ASSERT(hasContiguousSlots(RESERVED_SLOTS + fun->nargs,
|
||||
fun->script()->bindings.countVars()));
|
||||
return getSlotAddress(RESERVED_SLOTS + fun->nargs);
|
||||
return HeapValueArray(getSlotAddress(RESERVED_SLOTS + fun->nargs));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -83,7 +83,7 @@ CallObject::create(JSContext *cx, JSScript *script, JSObject &scopeChain, JSObje
|
||||
return NULL;
|
||||
|
||||
#ifdef DEBUG
|
||||
for (Shape::Range r = obj->lastProp; !r.empty(); r.popFront()) {
|
||||
for (Shape::Range r = obj->lastProperty(); !r.empty(); r.popFront()) {
|
||||
const Shape &s = r.front();
|
||||
if (s.slot != SHAPE_INVALID_SLOT) {
|
||||
JS_ASSERT(s.slot + 1 == obj->slotSpan());
|
||||
@ -93,7 +93,7 @@ CallObject::create(JSContext *cx, JSScript *script, JSObject &scopeChain, JSObje
|
||||
#endif
|
||||
|
||||
CallObject &callobj = obj->asCall();
|
||||
callobj.setCallee(callee);
|
||||
callobj.initCallee(callee);
|
||||
return &callobj;
|
||||
}
|
||||
|
||||
|
@ -78,28 +78,32 @@ class CallObject : public ::JSObject
|
||||
* invocation, or null if it was created for a strict mode eval frame.
|
||||
*/
|
||||
inline JSObject *getCallee() const;
|
||||
inline JSFunction *getCalleeFunction() const;
|
||||
inline JSFunction *getCalleeFunction() const;
|
||||
inline void setCallee(JSObject *callee);
|
||||
inline void initCallee(JSObject *callee);
|
||||
|
||||
/* Returns the callee's arguments object. */
|
||||
inline const js::Value &getArguments() const;
|
||||
inline void setArguments(const js::Value &v);
|
||||
inline void initArguments(const js::Value &v);
|
||||
|
||||
/* Returns the formal argument at the given index. */
|
||||
inline const js::Value &arg(uintN i) const;
|
||||
inline void setArg(uintN i, const js::Value &v);
|
||||
inline void initArgUnchecked(uintN i, const js::Value &v);
|
||||
|
||||
/* Returns the variable at the given index. */
|
||||
inline const js::Value &var(uintN i) const;
|
||||
inline void setVar(uintN i, const js::Value &v);
|
||||
inline void initVarUnchecked(uintN i, const js::Value &v);
|
||||
|
||||
/*
|
||||
* Get the actual arrays of arguments and variables. Only call if type
|
||||
* inference is enabled, where we ensure that call object variables are in
|
||||
* contiguous slots (see NewCallObject).
|
||||
*/
|
||||
inline js::Value *argArray();
|
||||
inline js::Value *varArray();
|
||||
inline js::HeapValueArray argArray();
|
||||
inline js::HeapValueArray varArray();
|
||||
|
||||
inline void copyValues(uintN nargs, Value *argv, uintN nvars, Value *slots);
|
||||
};
|
||||
|
@ -223,7 +223,7 @@ BreakpointSite::clearTrap(JSContext *cx, BreakpointSiteMap::Enum *e,
|
||||
*closurep = trapClosure;
|
||||
|
||||
trapHandler = NULL;
|
||||
trapClosure.setUndefined();
|
||||
trapClosure = UndefinedValue();
|
||||
if (enabledCount == 0) {
|
||||
*pc = realOpcode;
|
||||
if (!cx->runtime->gcRunning) {
|
||||
@ -483,12 +483,12 @@ Debugger::slowPathOnLeaveFrame(JSContext *cx)
|
||||
bool
|
||||
Debugger::wrapDebuggeeValue(JSContext *cx, Value *vp)
|
||||
{
|
||||
assertSameCompartment(cx, object);
|
||||
assertSameCompartment(cx, object.get());
|
||||
|
||||
if (vp->isObject()) {
|
||||
JSObject *obj = &vp->toObject();
|
||||
|
||||
CellWeakMap::AddPtr p = objects.lookupForAdd(obj);
|
||||
ObjectWeakMap::AddPtr p = objects.lookupForAdd(obj);
|
||||
if (p) {
|
||||
vp->setObject(*p->value);
|
||||
} else {
|
||||
@ -517,7 +517,7 @@ Debugger::wrapDebuggeeValue(JSContext *cx, Value *vp)
|
||||
bool
|
||||
Debugger::unwrapDebuggeeValue(JSContext *cx, Value *vp)
|
||||
{
|
||||
assertSameCompartment(cx, object, *vp);
|
||||
assertSameCompartment(cx, object.get(), *vp);
|
||||
if (vp->isObject()) {
|
||||
JSObject *dobj = &vp->toObject();
|
||||
if (dobj->getClass() != &DebuggerObject_class) {
|
||||
@ -1005,9 +1005,9 @@ Debugger::onSingleStep(JSContext *cx, Value *vp)
|
||||
/*** Debugger JSObjects **************************************************************************/
|
||||
|
||||
void
|
||||
Debugger::markKeysInCompartment(JSTracer *tracer, const CellWeakMap &map, bool scripts)
|
||||
Debugger::markKeysInCompartment(JSTracer *tracer)
|
||||
{
|
||||
JSCompartment *comp = tracer->context->runtime->gcCurrentCompartment;
|
||||
JSCompartment *comp = tracer->runtime->gcCurrentCompartment;
|
||||
JS_ASSERT(comp);
|
||||
|
||||
/*
|
||||
@ -1015,19 +1015,22 @@ Debugger::markKeysInCompartment(JSTracer *tracer, const CellWeakMap &map, bool s
|
||||
* enumerating WeakMap keys. However in this case we need access, so we
|
||||
* make a base-class reference. Range is public in HashMap.
|
||||
*/
|
||||
typedef HashMap<gc::Cell *, JSObject *, DefaultHasher<gc::Cell *>, RuntimeAllocPolicy> Map;
|
||||
const Map &storage = map;
|
||||
for (Map::Range r = storage.all(); !r.empty(); r.popFront()) {
|
||||
gc::Cell *key = r.front().key;
|
||||
if (key->compartment() == comp && IsAboutToBeFinalized(tracer->context, key)) {
|
||||
if (scripts) {
|
||||
js::gc::MarkScript(tracer, static_cast<JSScript *>(key),
|
||||
"cross-compartment WeakMap key");
|
||||
} else {
|
||||
js::gc::MarkObject(tracer, *static_cast<JSObject *>(key),
|
||||
"cross-compartment WeakMap key");
|
||||
}
|
||||
}
|
||||
typedef HashMap<HeapPtrObject, HeapPtrObject, DefaultHasher<HeapPtrObject>, RuntimeAllocPolicy>
|
||||
ObjectMap;
|
||||
const ObjectMap &objStorage = objects;
|
||||
for (ObjectMap::Range r = objStorage.all(); !r.empty(); r.popFront()) {
|
||||
const HeapPtrObject &key = r.front().key;
|
||||
if (key->compartment() == comp && IsAboutToBeFinalized(tracer->context, key))
|
||||
js::gc::MarkObject(tracer, key, "cross-compartment WeakMap key");
|
||||
}
|
||||
|
||||
typedef HashMap<HeapPtrScript, HeapPtrObject, DefaultHasher<HeapPtrScript>, RuntimeAllocPolicy>
|
||||
ScriptMap;
|
||||
const ScriptMap &scriptStorage = scripts;
|
||||
for (ScriptMap::Range r = scriptStorage.all(); !r.empty(); r.popFront()) {
|
||||
const HeapPtrScript &key = r.front().key;
|
||||
if (key->compartment() == comp && IsAboutToBeFinalized(tracer->context, key))
|
||||
js::gc::MarkScript(tracer, key, "cross-compartment WeakMap key");
|
||||
}
|
||||
}
|
||||
|
||||
@ -1057,7 +1060,7 @@ Debugger::markKeysInCompartment(JSTracer *tracer, const CellWeakMap &map, bool s
|
||||
void
|
||||
Debugger::markCrossCompartmentDebuggerObjectReferents(JSTracer *tracer)
|
||||
{
|
||||
JSRuntime *rt = tracer->context->runtime;
|
||||
JSRuntime *rt = tracer->runtime;
|
||||
JSCompartment *comp = rt->gcCurrentCompartment;
|
||||
|
||||
/*
|
||||
@ -1066,10 +1069,8 @@ Debugger::markCrossCompartmentDebuggerObjectReferents(JSTracer *tracer)
|
||||
*/
|
||||
for (JSCList *p = &rt->debuggerList; (p = JS_NEXT_LINK(p)) != &rt->debuggerList;) {
|
||||
Debugger *dbg = Debugger::fromLinks(p);
|
||||
if (dbg->object->compartment() != comp) {
|
||||
markKeysInCompartment(tracer, dbg->objects, false);
|
||||
markKeysInCompartment(tracer, dbg->scripts, true);
|
||||
}
|
||||
if (dbg->object->compartment() != comp)
|
||||
dbg->markKeysInCompartment(tracer);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1128,7 +1129,7 @@ Debugger::markAllIteratively(GCMarker *trc)
|
||||
* - it isn't already marked
|
||||
* - it actually has hooks that might be called
|
||||
*/
|
||||
JSObject *dbgobj = dbg->toJSObject();
|
||||
const HeapPtrObject &dbgobj = dbg->toJSObject();
|
||||
if (comp && comp != dbgobj->compartment())
|
||||
continue;
|
||||
|
||||
@ -1138,7 +1139,7 @@ Debugger::markAllIteratively(GCMarker *trc)
|
||||
* obj could be reachable only via its live, enabled
|
||||
* debugger hooks, which may yet be called.
|
||||
*/
|
||||
MarkObject(trc, *dbgobj, "enabled Debugger");
|
||||
MarkObject(trc, dbgobj, "enabled Debugger");
|
||||
markedAny = true;
|
||||
dbgMarked = true;
|
||||
}
|
||||
@ -1151,9 +1152,9 @@ Debugger::markAllIteratively(GCMarker *trc)
|
||||
* The debugger and the script are both live.
|
||||
* Therefore the breakpoint handler is live.
|
||||
*/
|
||||
JSObject *handler = bp->getHandler();
|
||||
const HeapPtrObject &handler = bp->getHandler();
|
||||
if (IsAboutToBeFinalized(cx, handler)) {
|
||||
MarkObject(trc, *bp->getHandler(), "breakpoint handler");
|
||||
MarkObject(trc, bp->getHandler(), "breakpoint handler");
|
||||
markedAny = true;
|
||||
}
|
||||
}
|
||||
@ -1176,7 +1177,7 @@ void
|
||||
Debugger::trace(JSTracer *trc)
|
||||
{
|
||||
if (uncaughtExceptionHook)
|
||||
MarkObject(trc, *uncaughtExceptionHook, "hooks");
|
||||
MarkObject(trc, uncaughtExceptionHook, "hooks");
|
||||
|
||||
/*
|
||||
* Mark Debugger.Frame objects. These are all reachable from JS, because the
|
||||
@ -1187,9 +1188,9 @@ Debugger::trace(JSTracer *trc)
|
||||
* frames.)
|
||||
*/
|
||||
for (FrameMap::Range r = frames.all(); !r.empty(); r.popFront()) {
|
||||
JSObject *frameobj = r.front().value;
|
||||
const HeapPtrObject &frameobj = r.front().value;
|
||||
JS_ASSERT(frameobj->getPrivate());
|
||||
MarkObject(trc, *frameobj, "live Debugger.Frame");
|
||||
MarkObject(trc, frameobj, "live Debugger.Frame");
|
||||
}
|
||||
|
||||
/* Trace the referent -> Debugger.Object weak map. */
|
||||
@ -1786,9 +1787,11 @@ GetScriptReferent(JSObject *obj)
|
||||
static void
|
||||
DebuggerScript_trace(JSTracer *trc, JSObject *obj)
|
||||
{
|
||||
if (!trc->context->runtime->gcCurrentCompartment) {
|
||||
if (!trc->runtime->gcCurrentCompartment) {
|
||||
/* This comes from a private pointer, so no barrier needed. */
|
||||
if (JSScript *script = GetScriptReferent(obj))
|
||||
MarkScript(trc, script, "Debugger.Script referent");
|
||||
MarkScriptUnbarriered(trc, script, "Debugger.Script referent");
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@ -1808,15 +1811,15 @@ Class DebuggerScript_class = {
|
||||
JSObject *
|
||||
Debugger::newDebuggerScript(JSContext *cx, JSScript *script)
|
||||
{
|
||||
assertSameCompartment(cx, object);
|
||||
assertSameCompartment(cx, object.get());
|
||||
|
||||
JSObject *proto = &object->getReservedSlot(JSSLOT_DEBUG_SCRIPT_PROTO).toObject();
|
||||
JS_ASSERT(proto);
|
||||
JSObject *scriptobj = NewNonFunction<WithProto::Given>(cx, &DebuggerScript_class, proto, NULL);
|
||||
if (!scriptobj || !scriptobj->ensureClassReservedSlots(cx))
|
||||
return NULL;
|
||||
scriptobj->setPrivate(script);
|
||||
scriptobj->setReservedSlot(JSSLOT_DEBUGSCRIPT_OWNER, ObjectValue(*object));
|
||||
scriptobj->setPrivate(script);
|
||||
|
||||
return scriptobj;
|
||||
}
|
||||
@ -1824,9 +1827,9 @@ Debugger::newDebuggerScript(JSContext *cx, JSScript *script)
|
||||
JSObject *
|
||||
Debugger::wrapScript(JSContext *cx, JSScript *script)
|
||||
{
|
||||
assertSameCompartment(cx, object);
|
||||
assertSameCompartment(cx, object.get());
|
||||
JS_ASSERT(cx->compartment != script->compartment());
|
||||
CellWeakMap::AddPtr p = scripts.lookupForAdd(script);
|
||||
ScriptWeakMap::AddPtr p = scripts.lookupForAdd(script);
|
||||
if (!p) {
|
||||
JSObject *scriptobj = newDebuggerScript(cx, script);
|
||||
|
||||
@ -2857,9 +2860,13 @@ static JSFunctionSpec DebuggerFrame_methods[] = {
|
||||
static void
|
||||
DebuggerObject_trace(JSTracer *trc, JSObject *obj)
|
||||
{
|
||||
if (!trc->context->runtime->gcCurrentCompartment) {
|
||||
if (!trc->runtime->gcCurrentCompartment) {
|
||||
/*
|
||||
* There is a barrier on private pointers, so the Unbarriered marking
|
||||
* is okay.
|
||||
*/
|
||||
if (JSObject *referent = (JSObject *) obj->getPrivate())
|
||||
MarkObject(trc, *referent, "Debugger.Object referent");
|
||||
MarkObjectUnbarriered(trc, referent, "Debugger.Object referent");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -50,6 +50,7 @@
|
||||
#include "jsweakmap.h"
|
||||
#include "jswrapper.h"
|
||||
|
||||
#include "gc/Barrier.h"
|
||||
#include "js/HashTable.h"
|
||||
#include "vm/GlobalObject.h"
|
||||
|
||||
@ -81,9 +82,9 @@ class Debugger {
|
||||
|
||||
private:
|
||||
JSCList link; /* See JSRuntime::debuggerList. */
|
||||
JSObject *object; /* The Debugger object. Strong reference. */
|
||||
HeapPtrObject object; /* The Debugger object. Strong reference. */
|
||||
GlobalObjectSet debuggees; /* Debuggee globals. Cross-compartment weak references. */
|
||||
JSObject *uncaughtExceptionHook; /* Strong reference. */
|
||||
js::HeapPtrObject uncaughtExceptionHook; /* Strong reference. */
|
||||
bool enabled;
|
||||
JSCList breakpoints; /* cyclic list of all js::Breakpoints in this debugger */
|
||||
|
||||
@ -100,18 +101,17 @@ class Debugger {
|
||||
* that way, but since stack frames are not gc-things, the implementation
|
||||
* has to be different.
|
||||
*/
|
||||
typedef HashMap<StackFrame *, JSObject *, DefaultHasher<StackFrame *>, RuntimeAllocPolicy>
|
||||
typedef HashMap<StackFrame *, HeapPtrObject, DefaultHasher<StackFrame *>, RuntimeAllocPolicy>
|
||||
FrameMap;
|
||||
FrameMap frames;
|
||||
|
||||
typedef WeakMap<gc::Cell *, JSObject *, DefaultHasher<gc::Cell *>, CrossCompartmentMarkPolicy>
|
||||
CellWeakMap;
|
||||
|
||||
/* The map from debuggee objects to their Debugger.Object instances. */
|
||||
CellWeakMap objects;
|
||||
typedef WeakMap<HeapPtrObject, HeapPtrObject> ObjectWeakMap;
|
||||
ObjectWeakMap objects;
|
||||
|
||||
/* An ephemeral map from JSScript* to Debugger.Script instances. */
|
||||
CellWeakMap scripts;
|
||||
typedef WeakMap<HeapPtrScript, HeapPtrObject> ScriptWeakMap;
|
||||
ScriptWeakMap scripts;
|
||||
|
||||
bool addDebuggeeGlobal(JSContext *cx, GlobalObject *obj);
|
||||
void removeDebuggeeGlobal(JSContext *cx, GlobalObject *global,
|
||||
@ -167,7 +167,7 @@ class Debugger {
|
||||
static void traceObject(JSTracer *trc, JSObject *obj);
|
||||
void trace(JSTracer *trc);
|
||||
static void finalize(JSContext *cx, JSObject *obj);
|
||||
static void markKeysInCompartment(JSTracer *tracer, const CellWeakMap &map, bool scripts);
|
||||
void markKeysInCompartment(JSTracer *tracer);
|
||||
|
||||
static Class jsclass;
|
||||
|
||||
@ -229,7 +229,7 @@ class Debugger {
|
||||
~Debugger();
|
||||
|
||||
bool init(JSContext *cx);
|
||||
inline JSObject *toJSObject() const;
|
||||
inline const js::HeapPtrObject &toJSObject() const;
|
||||
static inline Debugger *fromJSObject(JSObject *obj);
|
||||
static Debugger *fromChildJSObject(JSObject *obj);
|
||||
|
||||
@ -363,7 +363,7 @@ class BreakpointSite {
|
||||
JSCList breakpoints; /* cyclic list of all js::Breakpoints at this instruction */
|
||||
size_t enabledCount; /* number of breakpoints in the list that are enabled */
|
||||
JSTrapHandler trapHandler; /* jsdbgapi trap state */
|
||||
Value trapClosure;
|
||||
HeapValue trapClosure;
|
||||
|
||||
bool recompile(JSContext *cx, bool forTrap);
|
||||
|
||||
@ -408,7 +408,7 @@ class Breakpoint {
|
||||
Debugger * const debugger;
|
||||
BreakpointSite * const site;
|
||||
private:
|
||||
JSObject *handler;
|
||||
js::HeapPtrObject handler;
|
||||
JSCList debuggerLinks;
|
||||
JSCList siteLinks;
|
||||
|
||||
@ -419,7 +419,7 @@ class Breakpoint {
|
||||
void destroy(JSContext *cx, BreakpointSiteMap::Enum *e = NULL);
|
||||
Breakpoint *nextInDebugger();
|
||||
Breakpoint *nextInSite();
|
||||
JSObject *getHandler() const { return handler; }
|
||||
const HeapPtrObject &getHandler() const { return handler; }
|
||||
};
|
||||
|
||||
Debugger *
|
||||
@ -437,7 +437,7 @@ Debugger::firstBreakpoint() const
|
||||
return Breakpoint::fromDebuggerLinks(JS_NEXT_LINK(&breakpoints));
|
||||
}
|
||||
|
||||
JSObject *
|
||||
const js::HeapPtrObject &
|
||||
Debugger::toJSObject() const
|
||||
{
|
||||
JS_ASSERT(object);
|
||||
|
102
js/src/vm/GlobalObject-inl.h
Normal file
102
js/src/vm/GlobalObject-inl.h
Normal file
@ -0,0 +1,102 @@
|
||||
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
|
||||
* vim: set ts=8 sw=4 et tw=78:
|
||||
*
|
||||
* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
*
|
||||
* The contents of this file are subject to the Mozilla Public License Version
|
||||
* 1.1 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
* http://www.mozilla.org/MPL/
|
||||
*
|
||||
* Software distributed under the License is distributed on an "AS IS" basis,
|
||||
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
* for the specific language governing rights and limitations under the
|
||||
* License.
|
||||
*
|
||||
* The Original Code is SpiderMonkey global object code.
|
||||
*
|
||||
* The Initial Developer of the Original Code is
|
||||
* the Mozilla Foundation.
|
||||
* Portions created by the Initial Developer are Copyright (C) 2011
|
||||
* the Initial Developer. All Rights Reserved.
|
||||
*
|
||||
* Contributor(s):
|
||||
* Jeff Walden <jwalden+code@mit.edu> (original author)
|
||||
*
|
||||
* Alternatively, the contents of this file may be used under the terms of
|
||||
* either of the GNU General Public License Version 2 or later (the "GPL"),
|
||||
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
* in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
* of those above. If you wish to allow use of your version of this file only
|
||||
* under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
* use your version of this file under the terms of the MPL, indicate your
|
||||
* decision by deleting the provisions above and replace them with the notice
|
||||
* and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
* the provisions above, a recipient may use your version of this file under
|
||||
* the terms of any one of the MPL, the GPL or the LGPL.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#ifndef GlobalObject_inl_h___
|
||||
#define GlobalObject_inl_h___
|
||||
|
||||
namespace js {
|
||||
|
||||
inline void
|
||||
GlobalObject::setFlags(int32 flags)
|
||||
{
|
||||
setSlot(FLAGS, Int32Value(flags));
|
||||
}
|
||||
|
||||
inline void
|
||||
GlobalObject::initFlags(int32 flags)
|
||||
{
|
||||
initSlot(FLAGS, Int32Value(flags));
|
||||
}
|
||||
|
||||
inline void
|
||||
GlobalObject::setDetailsForKey(JSProtoKey key, JSObject *ctor, JSObject *proto)
|
||||
{
|
||||
HeapValue &ctorVal = getSlotRef(key);
|
||||
HeapValue &protoVal = getSlotRef(JSProto_LIMIT + key);
|
||||
HeapValue &visibleVal = getSlotRef(2 * JSProto_LIMIT + key);
|
||||
JS_ASSERT(ctorVal.isUndefined());
|
||||
JS_ASSERT(protoVal.isUndefined());
|
||||
JS_ASSERT(visibleVal.isUndefined());
|
||||
ctorVal = ObjectValue(*ctor);
|
||||
protoVal = ObjectValue(*proto);
|
||||
visibleVal = ctorVal;
|
||||
}
|
||||
|
||||
inline void
|
||||
GlobalObject::setObjectClassDetails(JSFunction *ctor, JSObject *proto)
|
||||
{
|
||||
setDetailsForKey(JSProto_Object, ctor, proto);
|
||||
}
|
||||
|
||||
inline void
|
||||
GlobalObject::setFunctionClassDetails(JSFunction *ctor, JSObject *proto)
|
||||
{
|
||||
setDetailsForKey(JSProto_Function, ctor, proto);
|
||||
}
|
||||
|
||||
void
|
||||
GlobalObject::setThrowTypeError(JSFunction *fun)
|
||||
{
|
||||
HeapValue &v = getSlotRef(THROWTYPEERROR);
|
||||
JS_ASSERT(v.isUndefined());
|
||||
v = ObjectValue(*fun);
|
||||
}
|
||||
|
||||
void
|
||||
GlobalObject::setOriginalEval(JSObject *evalobj)
|
||||
{
|
||||
HeapValue &v = getSlotRef(EVAL);
|
||||
JS_ASSERT(v.isUndefined());
|
||||
v = ObjectValue(*evalobj);
|
||||
}
|
||||
|
||||
} // namespace js
|
||||
|
||||
#endif
|
@ -47,9 +47,11 @@
|
||||
|
||||
#include "builtin/RegExp.h"
|
||||
#include "frontend/BytecodeEmitter.h"
|
||||
#include "vm/GlobalObject-inl.h"
|
||||
|
||||
#include "jsobjinlines.h"
|
||||
#include "vm/RegExpObject-inl.h"
|
||||
#include "vm/RegExpStatics-inl.h"
|
||||
|
||||
using namespace js;
|
||||
|
||||
@ -134,7 +136,7 @@ GlobalObject::initFunctionAndObjectClasses(JSContext *cx)
|
||||
script->noScriptRval = true;
|
||||
script->code[0] = JSOP_STOP;
|
||||
script->code[1] = SRC_NULL;
|
||||
functionProto->setScript(script);
|
||||
functionProto->initScript(script);
|
||||
functionProto->getType(cx)->interpretedFunction = functionProto;
|
||||
script->hasFunction = true;
|
||||
}
|
||||
@ -258,8 +260,8 @@ GlobalObject::create(JSContext *cx, Class *clasp)
|
||||
JSObject *res = RegExpStatics::create(cx, globalObj);
|
||||
if (!res)
|
||||
return NULL;
|
||||
globalObj->setSlot(REGEXP_STATICS, ObjectValue(*res));
|
||||
globalObj->setFlags(0);
|
||||
globalObj->initSlot(REGEXP_STATICS, ObjectValue(*res));
|
||||
globalObj->initFlags(0);
|
||||
|
||||
return globalObj;
|
||||
}
|
||||
@ -337,7 +339,7 @@ GlobalObject::clear(JSContext *cx)
|
||||
bool
|
||||
GlobalObject::isRuntimeCodeGenEnabled(JSContext *cx)
|
||||
{
|
||||
Value &v = getSlotRef(RUNTIME_CODEGEN_ENABLED);
|
||||
HeapValue &v = getSlotRef(RUNTIME_CODEGEN_ENABLED);
|
||||
if (v.isUndefined()) {
|
||||
JSSecurityCallbacks *callbacks = JS_GetSecurityCallbacks(cx);
|
||||
|
||||
@ -345,8 +347,9 @@ GlobalObject::isRuntimeCodeGenEnabled(JSContext *cx)
|
||||
* If there are callbacks, make sure that the CSP callback is installed
|
||||
* and that it permits runtime code generation, then cache the result.
|
||||
*/
|
||||
v = BooleanValue((!callbacks || !callbacks->contentSecurityPolicyAllows) ||
|
||||
callbacks->contentSecurityPolicyAllows(cx));
|
||||
v.set(compartment(),
|
||||
BooleanValue((!callbacks || !callbacks->contentSecurityPolicyAllows) ||
|
||||
callbacks->contentSecurityPolicyAllows(cx)));
|
||||
}
|
||||
return !v.isFalse();
|
||||
}
|
||||
|
@ -120,9 +120,8 @@ class GlobalObject : public ::JSObject {
|
||||
|
||||
static const int32 FLAGS_CLEARED = 0x1;
|
||||
|
||||
void setFlags(int32 flags) {
|
||||
setSlot(FLAGS, Int32Value(flags));
|
||||
}
|
||||
inline void setFlags(int32 flags);
|
||||
inline void initFlags(int32 flags);
|
||||
|
||||
friend JSObject *
|
||||
::js_InitObjectClass(JSContext *cx, JSObject *obj);
|
||||
@ -133,37 +132,13 @@ class GlobalObject : public ::JSObject {
|
||||
JSObject *
|
||||
initFunctionAndObjectClasses(JSContext *cx);
|
||||
|
||||
void setDetailsForKey(JSProtoKey key, JSObject *ctor, JSObject *proto) {
|
||||
Value &ctorVal = getSlotRef(key);
|
||||
Value &protoVal = getSlotRef(JSProto_LIMIT + key);
|
||||
Value &visibleVal = getSlotRef(2 * JSProto_LIMIT + key);
|
||||
JS_ASSERT(ctorVal.isUndefined());
|
||||
JS_ASSERT(protoVal.isUndefined());
|
||||
JS_ASSERT(visibleVal.isUndefined());
|
||||
ctorVal = ObjectValue(*ctor);
|
||||
protoVal = ObjectValue(*proto);
|
||||
visibleVal = ctorVal;
|
||||
}
|
||||
inline void setDetailsForKey(JSProtoKey key, JSObject *ctor, JSObject *proto);
|
||||
inline void setObjectClassDetails(JSFunction *ctor, JSObject *proto);
|
||||
inline void setFunctionClassDetails(JSFunction *ctor, JSObject *proto);
|
||||
|
||||
void setObjectClassDetails(JSFunction *ctor, JSObject *proto) {
|
||||
setDetailsForKey(JSProto_Object, ctor, proto);
|
||||
}
|
||||
inline void setThrowTypeError(JSFunction *fun);
|
||||
|
||||
void setFunctionClassDetails(JSFunction *ctor, JSObject *proto) {
|
||||
setDetailsForKey(JSProto_Function, ctor, proto);
|
||||
}
|
||||
|
||||
void setThrowTypeError(JSFunction *fun) {
|
||||
Value &v = getSlotRef(THROWTYPEERROR);
|
||||
JS_ASSERT(v.isUndefined());
|
||||
v.setObject(*fun);
|
||||
}
|
||||
|
||||
void setOriginalEval(JSObject *evalobj) {
|
||||
Value &v = getSlotRef(EVAL);
|
||||
JS_ASSERT(v.isUndefined());
|
||||
v.setObject(*evalobj);
|
||||
}
|
||||
inline void setOriginalEval(JSObject *evalobj);
|
||||
|
||||
Value getConstructor(JSProtoKey key) const {
|
||||
JS_ASSERT(key <= JSProto_LIMIT);
|
||||
@ -298,7 +273,7 @@ class GlobalObject : public ::JSObject {
|
||||
}
|
||||
|
||||
JSObject *getOrCreateGeneratorPrototype(JSContext *cx) {
|
||||
Value &v = getSlotRef(GENERATOR_PROTO);
|
||||
HeapValue &v = getSlotRef(GENERATOR_PROTO);
|
||||
if (!v.isObject() && !js_InitIteratorClasses(cx, this))
|
||||
return NULL;
|
||||
JS_ASSERT(v.toObject().isGenerator());
|
||||
|
@ -130,7 +130,7 @@ RegExpObject::purge(JSContext *cx)
|
||||
{
|
||||
if (RegExpPrivate *rep = getPrivate()) {
|
||||
rep->decref(cx);
|
||||
setPrivate(NULL);
|
||||
privateData = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
@ -147,13 +147,14 @@ inline bool
|
||||
RegExpObject::init(JSContext *cx, JSLinearString *source, RegExpFlag flags)
|
||||
{
|
||||
if (nativeEmpty()) {
|
||||
const js::Shape **shapep = &cx->compartment->initialRegExpShape;
|
||||
if (!*shapep) {
|
||||
*shapep = assignInitialShape(cx);
|
||||
if (!*shapep)
|
||||
const js::Shape *shape = cx->compartment->initialRegExpShape;
|
||||
if (!shape) {
|
||||
shape = assignInitialShape(cx);
|
||||
if (!shape)
|
||||
return false;
|
||||
cx->compartment->initialRegExpShape = shape;
|
||||
}
|
||||
setLastProperty(*shapep);
|
||||
setLastProperty(shape);
|
||||
JS_ASSERT(!nativeEmpty());
|
||||
}
|
||||
|
||||
@ -177,6 +178,54 @@ RegExpObject::init(JSContext *cx, JSLinearString *source, RegExpFlag flags)
|
||||
return true;
|
||||
}
|
||||
|
||||
inline void
|
||||
RegExpObject::setLastIndex(const Value &v)
|
||||
{
|
||||
setSlot(LAST_INDEX_SLOT, v);
|
||||
}
|
||||
|
||||
inline void
|
||||
RegExpObject::setLastIndex(double d)
|
||||
{
|
||||
setSlot(LAST_INDEX_SLOT, NumberValue(d));
|
||||
}
|
||||
|
||||
inline void
|
||||
RegExpObject::zeroLastIndex()
|
||||
{
|
||||
setSlot(LAST_INDEX_SLOT, Int32Value(0));
|
||||
}
|
||||
|
||||
inline void
|
||||
RegExpObject::setSource(JSLinearString *source)
|
||||
{
|
||||
setSlot(SOURCE_SLOT, StringValue(source));
|
||||
}
|
||||
|
||||
inline void
|
||||
RegExpObject::setIgnoreCase(bool enabled)
|
||||
{
|
||||
setSlot(IGNORE_CASE_FLAG_SLOT, BooleanValue(enabled));
|
||||
}
|
||||
|
||||
inline void
|
||||
RegExpObject::setGlobal(bool enabled)
|
||||
{
|
||||
setSlot(GLOBAL_FLAG_SLOT, BooleanValue(enabled));
|
||||
}
|
||||
|
||||
inline void
|
||||
RegExpObject::setMultiline(bool enabled)
|
||||
{
|
||||
setSlot(MULTILINE_FLAG_SLOT, BooleanValue(enabled));
|
||||
}
|
||||
|
||||
inline void
|
||||
RegExpObject::setSticky(bool enabled)
|
||||
{
|
||||
setSlot(STICKY_FLAG_SLOT, BooleanValue(enabled));
|
||||
}
|
||||
|
||||
/* RegExpPrivate inlines. */
|
||||
|
||||
inline AlreadyIncRefed<RegExpPrivate>
|
||||
|
@ -46,6 +46,7 @@
|
||||
#include "jsstrinlines.h"
|
||||
|
||||
#include "vm/RegExpObject-inl.h"
|
||||
#include "vm/RegExpStatics-inl.h"
|
||||
|
||||
#ifdef JS_TRACER
|
||||
#include "jstracer.h"
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user