Bug 938950 - Don't provide full access to compartment/zone/runtime during Ion compilation, r=jandem.

This commit is contained in:
Brian Hackett 2013-11-18 13:05:31 -07:00
parent c390844165
commit f24489d2d4
40 changed files with 149 additions and 142 deletions

View File

@ -128,7 +128,7 @@ struct Zone : public JS::shadow::Zone,
void setNeedsBarrier(bool needs, ShouldUpdateIon updateIon);
const bool *AddressOfNeedsBarrier() const {
const bool *addressOfNeedsBarrier() const {
return &needsBarrier_;
}

View File

@ -1934,7 +1934,7 @@ class FunctionCompiler
graph_ = lifo_.new_<MIRGraph>(alloc_);
info_ = lifo_.new_<CompileInfo>(locals_.count(), SequentialExecution);
mirGen_ = lifo_.new_<MIRGenerator>(cx()->compartment(), alloc_, graph_, info_);
mirGen_ = lifo_.new_<MIRGenerator>(CompileCompartment::get(cx()->compartment()), alloc_, graph_, info_);
if (!newBlock(/* pred = */ nullptr, &curBlock_, fn_))
return false;

View File

@ -9214,7 +9214,7 @@ DoTypeOfFallback(JSContext *cx, BaselineFrame *frame, ICTypeOf_Fallback *stub, H
{
FallbackICSpew(cx, stub, "TypeOf");
JSType type = js::TypeOfValue(val);
RootedString string(cx, TypeName(type, cx->runtime()));
RootedString string(cx, TypeName(type, cx->runtime()->atomState));
res.setString(string);

View File

@ -749,7 +749,9 @@ BaselineScript::toggleDebugTraps(JSScript *script, jsbytecode *pc)
SrcNoteLineScanner scanner(script->notes(), script->lineno);
JSRuntime *rt = script->runtimeFromMainThread();
IonContext ictx(rt, script->compartment(), nullptr);
IonContext ictx(CompileRuntime::get(rt),
CompileCompartment::get(script->compartment()),
nullptr);
AutoFlushCache afc("DebugTraps", rt->jitRuntime());
for (uint32_t i = 0; i < numPCMappingIndexEntries(); i++) {

View File

@ -712,7 +712,7 @@ CodeGenerator::visitIntToString(LIntToString *lir)
masm.branch32(Assembler::AboveOrEqual, input, Imm32(StaticStrings::INT_STATIC_LIMIT),
ool->entry());
masm.movePtr(ImmPtr(&GetIonContext()->runtime->staticStrings.intStaticTable), output);
masm.movePtr(ImmPtr(&GetIonContext()->runtime->staticStrings().intStaticTable), output);
masm.loadPtr(BaseIndex(output, input, ScalePointer), output);
masm.bind(ool->rejoin());
@ -741,7 +741,7 @@ CodeGenerator::visitDoubleToString(LDoubleToString *lir)
masm.branch32(Assembler::AboveOrEqual, temp, Imm32(StaticStrings::INT_STATIC_LIMIT),
ool->entry());
masm.movePtr(ImmPtr(&GetIonContext()->runtime->staticStrings.intStaticTable), output);
masm.movePtr(ImmPtr(&GetIonContext()->runtime->staticStrings().intStaticTable), output);
masm.loadPtr(BaseIndex(output, temp, ScalePointer), output);
masm.bind(ool->rejoin());
@ -798,7 +798,6 @@ CodeGenerator::visitLambda(LLambda *lir)
if (!ool)
return false;
JS_ASSERT(gen->compartment == info.fun->compartment());
JS_ASSERT(!info.singletonType);
masm.newGCThing(output, info.fun, ool->entry());
@ -1584,7 +1583,7 @@ CodeGenerator::visitPostWriteBarrierO(LPostWriteBarrierO *lir)
if (!addOutOfLineCode(ool))
return false;
Nursery &nursery = GetIonContext()->runtime->gcNursery;
const Nursery &nursery = GetIonContext()->runtime->gcNursery();
if (lir->object()->isConstant()) {
JS_ASSERT(!nursery.isInside(&lir->object()->toConstant()->toObject()));
@ -1616,7 +1615,7 @@ CodeGenerator::visitPostWriteBarrierV(LPostWriteBarrierV *lir)
ValueOperand value = ToValue(lir, LPostWriteBarrierV::Input);
masm.branchTestObject(Assembler::NotEqual, value, ool->rejoin());
Nursery &nursery = GetIonContext()->runtime->gcNursery;
const Nursery &nursery = GetIonContext()->runtime->gcNursery();
if (lir->object()->isConstant()) {
JS_ASSERT(!nursery.isInside(&lir->object()->toConstant()->toObject()));
@ -1645,7 +1644,7 @@ CodeGenerator::visitPostWriteBarrierAllSlots(LPostWriteBarrierAllSlots *lir)
if (!addOutOfLineCode(ool))
return false;
Nursery &nursery = GetIonContext()->runtime->gcNursery;
const Nursery &nursery = GetIonContext()->runtime->gcNursery();
if (lir->object()->isConstant()) {
JS_ASSERT(!nursery.isInside(&lir->object()->toConstant()->toObject()));
@ -2499,11 +2498,9 @@ CodeGenerator::visitCheckOverRecursed(LCheckOverRecursed *lir)
// Ion may legally place frames very close to the limit. Calling additional
// C functions may then violate the limit without any checking.
JSRuntime *rt = GetIonContext()->runtime;
// Since Ion frames exist on the C stack, the stack limit may be
// dynamically set by JS_SetThreadStackLimit() and JS_SetNativeStackQuota().
uintptr_t *limitAddr = &rt->mainThread.ionStackLimit;
const void *limitAddr = GetIonContext()->runtime->addressOfIonStackLimit();
CheckOverRecursedFailure *ool = new CheckOverRecursedFailure(lir);
if (!addOutOfLineCode(ool))
@ -3866,7 +3863,7 @@ CodeGenerator::visitMathFunctionD(LMathFunctionD *ins)
FloatRegister input = ToFloatRegister(ins->input());
JS_ASSERT(ToFloatRegister(ins->output()) == ReturnFloatReg);
MathCache *mathCache = ins->mir()->cache();
const MathCache *mathCache = ins->mir()->cache();
masm.setupUnalignedABICall(mathCache ? 2 : 1, temp);
if (mathCache) {
@ -4420,7 +4417,7 @@ CodeGenerator::emitConcat(LInstruction *lir, Register lhs, Register rhs, Registe
return false;
ExecutionMode mode = gen->info().executionMode();
IonCode *stringConcatStub = gen->jitCompartment()->stringConcatStub(mode);
IonCode *stringConcatStub = gen->compartment->jitCompartment()->stringConcatStub(mode);
masm.call(stringConcatStub);
masm.branchTestPtr(Assembler::Zero, output, output, ool->entry());
@ -4690,7 +4687,7 @@ CodeGenerator::visitFromCharCode(LFromCharCode *lir)
masm.branch32(Assembler::AboveOrEqual, code, Imm32(StaticStrings::UNIT_STATIC_LIMIT),
ool->entry());
masm.movePtr(ImmPtr(&GetIonContext()->runtime->staticStrings.unitStaticTable), output);
masm.movePtr(ImmPtr(&GetIonContext()->runtime->staticStrings().unitStaticTable), output);
masm.loadPtr(BaseIndex(output, code, ScalePointer), output);
masm.bind(ool->rejoin());
@ -5345,7 +5342,7 @@ CodeGenerator::visitIteratorStart(LIteratorStart *lir)
JS_ASSERT(flags == JSITER_ENUMERATE);
// Fetch the most recent iterator and ensure it's not nullptr.
masm.loadPtr(AbsoluteAddress(&GetIonContext()->runtime->nativeIterCache.last), output);
masm.loadPtr(AbsoluteAddress(GetIonContext()->runtime->addressOfLastCachedNativeIterator()), output);
masm.branchTestPtr(Assembler::Zero, output, output, ool->entry());
// Load NativeIterator.
@ -5406,7 +5403,7 @@ CodeGenerator::visitIteratorStart(LIteratorStart *lir)
masm.or32(Imm32(JSITER_ACTIVE), Address(niTemp, offsetof(NativeIterator, flags)));
// Chain onto the active iterator stack.
masm.loadPtr(AbsoluteAddress(&gen->compartment->enumerators), temp1);
masm.loadPtr(AbsoluteAddress(gen->compartment->addressOfEnumerators()), temp1);
// ni->next = list
masm.storePtr(temp1, Address(niTemp, NativeIterator::offsetOfNext()));
@ -6733,7 +6730,7 @@ CodeGenerator::visitTypeOfV(LTypeOfV *lir)
Register output = ToRegister(lir->output());
Register tag = masm.splitTagForTest(value);
JSRuntime *rt = GetIonContext()->runtime;
const JSAtomState &names = GetIonContext()->runtime->names();
Label done;
OutOfLineTypeOfV *ool = nullptr;
@ -6750,36 +6747,36 @@ CodeGenerator::visitTypeOfV(LTypeOfV *lir)
// it's an object the result is always "object".
Label notObject;
masm.branchTestObject(Assembler::NotEqual, tag, &notObject);
masm.movePtr(ImmGCPtr(rt->atomState.object), output);
masm.movePtr(ImmGCPtr(names.object), output);
masm.jump(&done);
masm.bind(&notObject);
}
Label notNumber;
masm.branchTestNumber(Assembler::NotEqual, tag, &notNumber);
masm.movePtr(ImmGCPtr(rt->atomState.number), output);
masm.movePtr(ImmGCPtr(names.number), output);
masm.jump(&done);
masm.bind(&notNumber);
Label notUndefined;
masm.branchTestUndefined(Assembler::NotEqual, tag, &notUndefined);
masm.movePtr(ImmGCPtr(rt->atomState.undefined), output);
masm.movePtr(ImmGCPtr(names.undefined), output);
masm.jump(&done);
masm.bind(&notUndefined);
Label notNull;
masm.branchTestNull(Assembler::NotEqual, tag, &notNull);
masm.movePtr(ImmGCPtr(rt->atomState.object), output);
masm.movePtr(ImmGCPtr(names.object), output);
masm.jump(&done);
masm.bind(&notNull);
Label notBoolean;
masm.branchTestBoolean(Assembler::NotEqual, tag, &notBoolean);
masm.movePtr(ImmGCPtr(rt->atomState.boolean), output);
masm.movePtr(ImmGCPtr(names.boolean), output);
masm.jump(&done);
masm.bind(&notBoolean);
masm.movePtr(ImmGCPtr(rt->atomState.string), output);
masm.movePtr(ImmGCPtr(names.string), output);
masm.bind(&done);
if (ool)

View File

@ -29,7 +29,7 @@ class CompilerRoot : public CompilerRootNode
: CompilerRootNode(nullptr)
{
if (ptr) {
JS_ASSERT(!UninlinedIsInsideNursery(GetIonContext()->runtime, ptr));
JS_ASSERT(!GetIonContext()->runtime->isInsideNursery(ptr));
setRoot(ptr);
}
}

View File

@ -103,10 +103,10 @@ jit::MaybeGetIonContext()
}
IonContext::IonContext(JSContext *cx, TempAllocator *temp)
: runtime(cx->runtime()),
cx(cx),
compartment(cx->compartment()),
: cx(cx),
temp(temp),
runtime(CompileRuntime::get(cx->runtime())),
compartment(CompileCompartment::get(cx->compartment())),
prev_(CurrentIonContext()),
assemblerCount_(0)
{
@ -114,32 +114,32 @@ IonContext::IonContext(JSContext *cx, TempAllocator *temp)
}
IonContext::IonContext(ExclusiveContext *cx, TempAllocator *temp)
: runtime(cx->runtime_),
cx(nullptr),
compartment(nullptr),
: cx(nullptr),
temp(temp),
runtime(CompileRuntime::get(cx->runtime_)),
compartment(nullptr),
prev_(CurrentIonContext()),
assemblerCount_(0)
{
SetIonContext(this);
}
IonContext::IonContext(JSRuntime *rt, JSCompartment *comp, TempAllocator *temp)
: runtime(rt),
cx(nullptr),
IonContext::IonContext(CompileRuntime *rt, CompileCompartment *comp, TempAllocator *temp)
: cx(nullptr),
temp(temp),
runtime(rt),
compartment(comp),
temp(temp),
prev_(CurrentIonContext()),
assemblerCount_(0)
{
SetIonContext(this);
}
IonContext::IonContext(JSRuntime *rt)
: runtime(rt),
cx(nullptr),
compartment(nullptr),
IonContext::IonContext(CompileRuntime *rt)
: cx(nullptr),
temp(nullptr),
runtime(rt),
compartment(nullptr),
prev_(CurrentIonContext()),
assemblerCount_(0)
{
@ -589,14 +589,14 @@ JitCompartment::sweep(FreeOp *fop)
}
IonCode *
JitRuntime::getBailoutTable(const FrameSizeClass &frameClass)
JitRuntime::getBailoutTable(const FrameSizeClass &frameClass) const
{
JS_ASSERT(frameClass != FrameSizeClass::None());
return bailoutTables_[frameClass.classId()];
}
IonCode *
JitRuntime::getVMWrapper(const VMFunction &f)
JitRuntime::getVMWrapper(const VMFunction &f) const
{
JS_ASSERT(functionWrappers_);
JS_ASSERT(functionWrappers_->initialized());
@ -1077,7 +1077,7 @@ IonScript::purgeCaches(Zone *zone)
return;
JSRuntime *rt = zone->runtimeFromMainThread();
IonContext ictx(rt);
IonContext ictx(CompileRuntime::get(rt));
AutoFlushCache afc("purgeCaches", rt->jitRuntime());
for (size_t i = 0; i < numCaches(); i++)
getCacheFromIndex(i).reset();
@ -1134,7 +1134,7 @@ jit::ToggleBarriers(JS::Zone *zone, bool needs)
if (!rt->hasJitRuntime())
return;
IonContext ictx(rt);
IonContext ictx(CompileRuntime::get(rt));
AutoFlushCache afc("ToggleBarriers", rt->jitRuntime());
for (gc::CellIterUnderGC i(zone, gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
@ -1644,7 +1644,9 @@ IonCompile(JSContext *cx, JSScript *script,
if (!constraints)
return AbortReason_Alloc;
IonBuilder *builder = alloc->new_<IonBuilder>((JSContext *) nullptr, cx->compartment(), temp, graph, constraints,
IonBuilder *builder = alloc->new_<IonBuilder>((JSContext *) nullptr,
CompileCompartment::get(cx->compartment()),
temp, graph, constraints,
&inspector, info, baselineFrame);
if (!builder)
return AbortReason_Alloc;
@ -2366,7 +2368,7 @@ jit::InvalidateAll(FreeOp *fop, Zone *zone)
for (JitActivationIterator iter(fop->runtime()); !iter.done(); ++iter) {
if (iter.activation()->compartment()->zone() == zone) {
IonContext ictx(fop->runtime());
IonContext ictx(CompileRuntime::get(fop->runtime()));
AutoFlushCache afc("InvalidateAll", fop->runtime()->jitRuntime());
IonSpew(IonSpew_Invalidate, "Invalidating all frames for GC");
InvalidateActivation(fop, iter.jitTop(), true);
@ -2612,7 +2614,7 @@ void
AutoFlushCache::updateTop(uintptr_t p, size_t len)
{
IonContext *ictx = MaybeGetIonContext();
JitRuntime *jrt = (ictx != nullptr) ? ictx->runtime->jitRuntime() : nullptr;
JitRuntime *jrt = (ictx != nullptr) ? const_cast<JitRuntime *>(ictx->runtime->jitRuntime()) : nullptr;
if (!jrt || !jrt->flusher())
JSC::ExecutableAllocator::cacheFlush((void*)p, len);
else

View File

@ -15,6 +15,7 @@
#include "jscompartment.h"
#include "jit/CompileInfo.h"
#include "jit/CompileWrappers.h"
namespace js {
namespace jit {
@ -276,14 +277,22 @@ class IonContext
public:
IonContext(JSContext *cx, TempAllocator *temp);
IonContext(ExclusiveContext *cx, TempAllocator *temp);
IonContext(JSRuntime *rt, JSCompartment *comp, TempAllocator *temp);
IonContext(JSRuntime *rt);
IonContext(CompileRuntime *rt, CompileCompartment *comp, TempAllocator *temp);
IonContext(CompileRuntime *rt);
~IonContext();
JSRuntime *runtime;
// Running context when executing on the main thread. Not available during
// compilation.
JSContext *cx;
JSCompartment *compartment;
// Allocator for temporary memory during compilation.
TempAllocator *temp;
// Wrappers with information about the current runtime/compartment for use
// during compilation.
CompileRuntime *runtime;
CompileCompartment *compartment;
int getNextAssemblerId() {
return assemblerCount_++;
}

View File

@ -2109,7 +2109,7 @@ jit::AnalyzeNewScriptProperties(JSContext *cx, HandleFunction fun,
types::CompilerConstraintList *constraints = types::NewCompilerConstraintList(temp);
BaselineInspector inspector(script);
IonBuilder builder(cx, cx->compartment(), &temp, &graph, constraints,
IonBuilder builder(cx, CompileCompartment::get(cx->compartment()), &temp, &graph, constraints,
&inspector, &info, /* baselineFrame = */ nullptr);
if (!builder.build()) {

View File

@ -37,7 +37,7 @@ using namespace js::jit;
using mozilla::DebugOnly;
using mozilla::Maybe;
IonBuilder::IonBuilder(JSContext *analysisContext, JSCompartment *comp, TempAllocator *temp, MIRGraph *graph,
IonBuilder::IonBuilder(JSContext *analysisContext, CompileCompartment *comp, TempAllocator *temp, MIRGraph *graph,
types::CompilerConstraintList *constraints,
BaselineInspector *inspector, CompileInfo *info, BaselineFrame *baselineFrame,
size_t inliningDepth, uint32_t loopDepth)
@ -4967,7 +4967,7 @@ IonBuilder::jsop_call(uint32_t argc, bool constructing)
for (uint32_t i = 0; i < originals.length(); i++) {
JSFunction *fun = &originals[i]->as<JSFunction>();
if (fun->hasScript() && fun->nonLazyScript()->shouldCloneAtCallsite) {
if (JSFunction *clone = ExistingCloneFunctionAtCallsite(compartment, fun, script(), pc)) {
if (JSFunction *clone = ExistingCloneFunctionAtCallsite(compartment->callsiteClones(), fun, script(), pc)) {
fun = clone;
hasClones = true;
}
@ -5028,7 +5028,7 @@ IonBuilder::testShouldDOMCall(types::TypeSet *inTypes,
// property, we can bake in a call to the bottom half of the DOM
// accessor
DOMInstanceClassMatchesProto instanceChecker =
GetDOMCallbacks(compartment->runtimeFromAnyThread())->instanceClassMatchesProto;
compartment->runtime()->DOMcallbacks()->instanceClassMatchesProto;
const JSJitInfo *jinfo = func->jitInfo();
if (jinfo->type != opType)
@ -5940,7 +5940,7 @@ ClassHasEffectlessLookup(const Class *clasp)
}
static bool
ClassHasResolveHook(JSCompartment *comp, const Class *clasp, PropertyName *name)
ClassHasResolveHook(CompileCompartment *comp, const Class *clasp, PropertyName *name)
{
if (clasp->resolve == JS_ResolveStub)
return false;
@ -5951,7 +5951,7 @@ ClassHasResolveHook(JSCompartment *comp, const Class *clasp, PropertyName *name)
}
if (clasp->resolve == (JSResolveOp)fun_resolve)
return FunctionHasResolveHook(comp->runtimeFromAnyThread(), name);
return FunctionHasResolveHook(comp->runtime()->names(), name);
return true;
}
@ -6200,9 +6200,9 @@ IonBuilder::getStaticName(JSObject *staticObject, PropertyName *name, bool *psuc
if (name == names().undefined)
return pushConstant(UndefinedValue());
if (name == names().NaN)
return pushConstant(compartment->runtimeFromAnyThread()->NaNValue);
return pushConstant(compartment->runtime()->NaNValue());
if (name == names().Infinity)
return pushConstant(compartment->runtimeFromAnyThread()->positiveInfinityValue);
return pushConstant(compartment->runtime()->positiveInfinityValue());
}
types::TypeObjectKey *staticType = types::TypeObjectKey::get(staticObject);

View File

@ -205,7 +205,7 @@ class IonBuilder : public MIRGenerator
static int CmpSuccessors(const void *a, const void *b);
public:
IonBuilder(JSContext *analysisContext, JSCompartment *comp, TempAllocator *temp, MIRGraph *graph,
IonBuilder(JSContext *analysisContext, CompileCompartment *comp, TempAllocator *temp, MIRGraph *graph,
types::CompilerConstraintList *constraints,
BaselineInspector *inspector, CompileInfo *info, BaselineFrame *baselineFrame,
size_t inliningDepth = 0, uint32_t loopDepth = 0);
@ -734,7 +734,7 @@ class IonBuilder : public MIRGenerator
return callerBuilder_ != nullptr;
}
JSAtomState &names() { return compartment->runtimeFromAnyThread()->atomState; }
const JSAtomState &names() { return compartment->runtime()->names(); }
private:
bool init();

View File

@ -636,12 +636,10 @@ MacroAssembler::newGCThing(const Register &result, gc::AllocKind allocKind, Labe
int thingSize = int(gc::Arena::thingSize(allocKind));
Zone *zone = GetIonContext()->compartment->zone();
#ifdef JS_GC_ZEAL
// Don't execute the inline path if gcZeal is active.
branch32(Assembler::NotEqual,
AbsoluteAddress(&GetIonContext()->runtime->gcZeal_), Imm32(0),
AbsoluteAddress(GetIonContext()->runtime->addressOfGCZeal()), Imm32(0),
fail);
#endif
@ -651,7 +649,7 @@ MacroAssembler::newGCThing(const Register &result, gc::AllocKind allocKind, Labe
jump(fail);
#ifdef JSGC_GENERATIONAL
Nursery &nursery = GetIonContext()->runtime->gcNursery;
const Nursery &nursery = GetIonContext()->runtime->gcNursery();
if (nursery.isEnabled() &&
allocKind <= gc::FINALIZE_OBJECT_LAST &&
initialHeap != gc::TenuredHeap)
@ -668,17 +666,17 @@ MacroAssembler::newGCThing(const Register &result, gc::AllocKind allocKind, Labe
}
#endif // JSGC_GENERATIONAL
CompileZone *zone = GetIonContext()->compartment->zone();
// Inline FreeSpan::allocate.
// There is always exactly one FreeSpan per allocKind per JSCompartment.
// If a FreeSpan is replaced, its members are updated in the freeLists table,
// which the code below always re-reads.
gc::FreeSpan *list = const_cast<gc::FreeSpan *>
(zone->allocator.arenas.getFreeList(allocKind));
loadPtr(AbsoluteAddress(&list->first), result);
branchPtr(Assembler::BelowOrEqual, AbsoluteAddress(&list->last), result, fail);
loadPtr(AbsoluteAddress(zone->addressOfFreeListFirst(allocKind)), result);
branchPtr(Assembler::BelowOrEqual, AbsoluteAddress(zone->addressOfFreeListLast(allocKind)), result, fail);
addPtr(Imm32(thingSize), result);
storePtr(result, AbsoluteAddress(&list->first));
storePtr(result, AbsoluteAddress(zone->addressOfFreeListFirst(allocKind)));
subPtr(Imm32(thingSize), result);
}
@ -870,7 +868,7 @@ void
MacroAssembler::checkInterruptFlagsPar(const Register &tempReg,
Label *fail)
{
movePtr(ImmPtr(&GetIonContext()->runtime->interrupt), tempReg);
movePtr(ImmPtr(GetIonContext()->runtime->addressOfInterrupt()), tempReg);
load32(Address(tempReg, 0), tempReg);
branchTest32(Assembler::NonZero, tempReg, tempReg, fail);
}

View File

@ -373,10 +373,10 @@ class MacroAssembler : public MacroAssemblerSpecific
}
void loadJSContext(const Register &dest) {
loadPtr(AbsoluteAddress(&GetIonContext()->runtime->mainThread.ionJSContext), dest);
loadPtr(AbsoluteAddress(GetIonContext()->runtime->addressOfJSContext()), dest);
}
void loadJitActivation(const Register &dest) {
loadPtr(AbsoluteAddress(GetIonContext()->runtime->mainThread.addressOfActivation()), dest);
loadPtr(AbsoluteAddress(GetIonContext()->runtime->addressOfActivation()), dest);
}
template<typename T>
@ -627,8 +627,8 @@ class MacroAssembler : public MacroAssemblerSpecific
void branchTestNeedsBarrier(Condition cond, const Register &scratch, Label *label) {
JS_ASSERT(cond == Zero || cond == NonZero);
JS::Zone *zone = GetIonContext()->compartment->zone();
movePtr(ImmPtr(zone->AddressOfNeedsBarrier()), scratch);
CompileZone *zone = GetIonContext()->compartment->zone();
movePtr(ImmPtr(zone->addressOfNeedsBarrier()), scratch);
Address needsBarrierAddr(scratch, 0);
branchTest32(cond, needsBarrierAddr, Imm32(0x1), label);
}
@ -647,7 +647,7 @@ class MacroAssembler : public MacroAssemblerSpecific
Push(PreBarrierReg);
computeEffectiveAddress(address, PreBarrierReg);
JitRuntime *rt = GetIonContext()->runtime->jitRuntime();
const JitRuntime *rt = GetIonContext()->runtime->jitRuntime();
IonCode *preBarrier = (type == MIRType_Shape)
? rt->shapePreBarrier()
: rt->valuePreBarrier();

View File

@ -81,7 +81,7 @@ jit::EnableIonDebugLogging()
void
jit::IonSpewNewFunction(MIRGraph *graph, HandleScript func)
{
if (!OffThreadIonCompilationEnabled(GetIonContext()->runtime)) {
if (GetIonContext()->runtime->onMainThread()) {
ionspewer.beginFunction(graph, func);
return;
}
@ -102,21 +102,21 @@ jit::IonSpewNewFunction(MIRGraph *graph, HandleScript func)
void
jit::IonSpewPass(const char *pass)
{
if (!OffThreadIonCompilationEnabled(GetIonContext()->runtime))
if (GetIonContext()->runtime->onMainThread())
ionspewer.spewPass(pass);
}
void
jit::IonSpewPass(const char *pass, LinearScanAllocator *ra)
{
if (!OffThreadIonCompilationEnabled(GetIonContext()->runtime))
if (GetIonContext()->runtime->onMainThread())
ionspewer.spewPass(pass, ra);
}
void
jit::IonSpewEndFunction()
{
if (!OffThreadIonCompilationEnabled(GetIonContext()->runtime))
if (GetIonContext()->runtime->onMainThread())
ionspewer.endFunction();
}

View File

@ -275,7 +275,7 @@ class JitRuntime
bool handleAccessViolation(JSRuntime *rt, void *faultingAddress);
IonCode *getVMWrapper(const VMFunction &f);
IonCode *getVMWrapper(const VMFunction &f) const;
IonCode *debugTrapHandler(JSContext *cx);
IonCode *getGenericBailoutHandler() const {
@ -290,7 +290,7 @@ class JitRuntime
return bailoutTail_;
}
IonCode *getBailoutTable(const FrameSizeClass &frameClass);
IonCode *getBailoutTable(const FrameSizeClass &frameClass) const;
IonCode *getArgumentsRectifier(ExecutionMode mode) const {
switch (mode) {
@ -424,7 +424,7 @@ class JitCompartment
return rt->execAlloc_;
}
IonCode *stringConcatStub(ExecutionMode mode) {
IonCode *stringConcatStub(ExecutionMode mode) const {
switch (mode) {
case SequentialExecution: return stringConcatStub_;
case ParallelExecution: return parallelStringConcatStub_;

View File

@ -3209,7 +3209,7 @@ LIRGenerator::visitFunctionBoundary(MFunctionBoundary *ins)
return false;
// If slow assertions are enabled, then this node will result in a callVM
// out to a C++ function for the assertions, so we will need a safepoint.
return !GetIonContext()->runtime->spsProfiler.slowAssertionsEnabled() ||
return !GetIonContext()->runtime->spsProfiler().slowAssertionsEnabled() ||
assignSafepoint(lir, ins);
}

View File

@ -188,7 +188,7 @@ IonBuilder::inlineMathFunction(CallInfo &callInfo, MMathFunction::Function funct
if (!IsNumberType(callInfo.getArg(0)->type()))
return InliningStatus_NotInlined;
MathCache *cache = compartment->runtimeFromAnyThread()->maybeGetMathCache();
const MathCache *cache = compartment->runtime()->maybeGetMathCache();
if (!cache)
return InliningStatus_NotInlined;
@ -1266,7 +1266,7 @@ IonBuilder::inlineNewParallelArray(CallInfo &callInfo)
if (targetObj && targetObj->is<JSFunction>())
target = &targetObj->as<JSFunction>();
if (target && target->isInterpreted() && target->nonLazyScript()->shouldCloneAtCallsite) {
if (JSFunction *clone = ExistingCloneFunctionAtCallsite(compartment, target, script(), pc))
if (JSFunction *clone = ExistingCloneFunctionAtCallsite(compartment->callsiteClones(), target, script(), pc))
target = clone;
}
MDefinition *ctor = makeCallsiteClone(
@ -1291,7 +1291,7 @@ IonBuilder::inlineParallelArray(CallInfo &callInfo)
return InliningStatus_NotInlined;
JS_ASSERT(target->nonLazyScript()->shouldCloneAtCallsite);
if (JSFunction *clone = ExistingCloneFunctionAtCallsite(compartment, target, script(), pc))
if (JSFunction *clone = ExistingCloneFunctionAtCallsite(compartment->callsiteClones(), target, script(), pc))
target = clone;
MConstant *ctor = MConstant::New(alloc(), ObjectValue(*target));

View File

@ -1966,8 +1966,7 @@ MTypeOf::foldsTo(TempAllocator &alloc, bool useValueNumbers)
return this;
}
JSRuntime *rt = GetIonContext()->runtime;
return MConstant::New(alloc, StringValue(TypeName(type, rt)));
return MConstant::New(alloc, StringValue(TypeName(type, GetIonContext()->runtime->names())));
}
void

View File

@ -3801,9 +3801,9 @@ class MMathFunction
private:
Function function_;
MathCache *cache_;
const MathCache *cache_;
MMathFunction(MDefinition *input, Function function, MathCache *cache)
MMathFunction(MDefinition *input, Function function, const MathCache *cache)
: MUnaryInstruction(input), function_(function), cache_(cache)
{
setResultType(MIRType_Double);
@ -3816,14 +3816,14 @@ class MMathFunction
// A nullptr cache means this function will neither access nor update the cache.
static MMathFunction *New(TempAllocator &alloc, MDefinition *input, Function function,
MathCache *cache)
const MathCache *cache)
{
return new(alloc) MMathFunction(input, function, cache);
}
Function function() const {
return function_;
}
MathCache *cache() const {
const MathCache *cache() const {
return cache_;
}
TypePolicy *typePolicy() {

View File

@ -33,7 +33,7 @@ class MStart;
class MIRGenerator
{
public:
MIRGenerator(JSCompartment *compartment, TempAllocator *alloc, MIRGraph *graph, CompileInfo *info);
MIRGenerator(CompileCompartment *compartment, TempAllocator *alloc, MIRGraph *graph, CompileInfo *info);
TempAllocator &alloc() {
return *alloc_;
@ -44,10 +44,7 @@ class MIRGenerator
bool ensureBallast() {
return alloc().ensureBallast();
}
JitCompartment *jitCompartment() const {
return compartment->jitCompartment();
}
JitRuntime *jitRuntime() const {
const JitRuntime *jitRuntime() const {
return GetIonContext()->runtime->jitRuntime();
}
CompileInfo &info() {
@ -69,7 +66,7 @@ class MIRGenerator
}
bool instrumentedProfiling() {
return GetIonContext()->runtime->spsProfiler.enabled();
return GetIonContext()->runtime->spsProfiler().enabled();
}
// Whether the main thread is trying to cancel this build.
@ -126,7 +123,7 @@ class MIRGenerator
}
public:
JSCompartment *compartment;
CompileCompartment *compartment;
protected:
CompileInfo *info_;

View File

@ -16,7 +16,7 @@
using namespace js;
using namespace js::jit;
MIRGenerator::MIRGenerator(JSCompartment *compartment,
MIRGenerator::MIRGenerator(CompileCompartment *compartment,
TempAllocator *alloc, MIRGraph *graph, CompileInfo *info)
: compartment(compartment),
info_(info),

View File

@ -1872,7 +1872,7 @@ CodeGeneratorARM::visitInterruptCheck(LInterruptCheck *lir)
if (!ool)
return false;
void *interrupt = (void*)&GetIonContext()->runtime->interrupt;
void *interrupt = (void*)GetIonContext()->runtime->addressOfInterrupt();
masm.load32(AbsoluteAddress(interrupt), lr);
masm.ma_cmp(lr, Imm32(0));
masm.ma_b(ool->entry(), Assembler::NonZero);

View File

@ -3367,7 +3367,7 @@ MacroAssemblerARMCompat::storeTypeTag(ImmTag tag, Register base, Register index,
void
MacroAssemblerARMCompat::linkExitFrame() {
uint8_t *dest = (uint8_t*)&GetIonContext()->runtime->mainThread.ionTop;
uint8_t *dest = (uint8_t*)GetIonContext()->runtime->addressOfIonTop();
movePtr(ImmPtr(dest), ScratchRegister);
ma_str(StackPointer, Operand(ScratchRegister, 0));
}

View File

@ -356,12 +356,8 @@ class Label : public LabelBase
{ }
~Label()
{
#ifdef DEBUG
// Note: the condition is a hack to silence this assert when OOM testing,
// see bug 756614.
if (MaybeGetIonContext() && !OffThreadIonCompilationEnabled(GetIonContext()->runtime))
JS_ASSERT_IF(!GetIonContext()->runtime->hadOutOfMemory, !used());
#endif
if (MaybeGetIonContext())
JS_ASSERT_IF(!GetIonContext()->runtime->hadOutOfMemory(), !used());
}
};

View File

@ -46,7 +46,7 @@ CodeGeneratorShared::CodeGeneratorShared(MIRGenerator *gen, LIRGraph *graph, Mac
pushedArgs_(0),
#endif
lastOsiPointOffset_(0),
sps_(&GetIonContext()->runtime->spsProfiler, &lastPC_),
sps_(&GetIonContext()->runtime->spsProfiler(), &lastPC_),
osrEntryOffset_(0),
skipArgCheckEntryOffset_(0),
frameDepth_(graph->localSlotCount() * sizeof(STACK_SLOT_SIZE) +
@ -623,7 +623,7 @@ CodeGeneratorShared::callVM(const VMFunction &fun, LInstruction *ins, const Regi
// If we're calling a function with an out parameter type of double, make
// sure we have an FPU.
JS_ASSERT_IF(fun.outParam == Type_Double, GetIonContext()->runtime->jitSupportsFloatingPoint);
JS_ASSERT_IF(fun.outParam == Type_Double, GetIonContext()->runtime->jitSupportsFloatingPoint());
#ifdef DEBUG
if (ins->mirRaw()) {

View File

@ -281,7 +281,7 @@ CodeGeneratorX64::visitInterruptCheck(LInterruptCheck *lir)
return false;
masm.branch32(Assembler::NotEqual,
AbsoluteAddress(&GetIonContext()->runtime->interrupt), Imm32(0),
AbsoluteAddress(GetIonContext()->runtime->addressOfInterrupt()), Imm32(0),
ool->entry());
masm.bind(ool->rejoin());
return true;

View File

@ -1228,7 +1228,7 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared
// ThreadData::ionTop of the main thread.
void linkExitFrame() {
storePtr(StackPointer,
AbsoluteAddress(&GetIonContext()->runtime->mainThread.ionTop));
AbsoluteAddress(GetIonContext()->runtime->addressOfIonTop()));
}
void callWithExitFrame(IonCode *target, Register dynStack) {

View File

@ -271,7 +271,7 @@ CodeGeneratorX86::visitInterruptCheck(LInterruptCheck *lir)
if (!ool)
return false;
masm.cmpl(Operand(AbsoluteAddress(&GetIonContext()->runtime->interrupt)), Imm32(0));
masm.cmpl(Operand(AbsoluteAddress(GetIonContext()->runtime->addressOfInterrupt())), Imm32(0));
masm.j(Assembler::NonZero, ool->entry());
masm.bind(ool->rejoin());
return true;

View File

@ -1072,7 +1072,7 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared
// Save an exit frame (which must be aligned to the stack pointer) to
// ThreadData::ionTop of the main thread.
void linkExitFrame() {
movl(StackPointer, Operand(AbsoluteAddress(&GetIonContext()->runtime->mainThread.ionTop)));
movl(StackPointer, Operand(AbsoluteAddress(GetIonContext()->runtime->addressOfIonTop())));
}
void callWithExitFrame(IonCode *target, Register dynStack) {

View File

@ -1174,7 +1174,7 @@ typedef struct JSStdName {
static Handle<PropertyName*>
StdNameToPropertyName(JSContext *cx, const JSStdName *stdn)
{
return OFFSET_TO_NAME(cx->runtime(), stdn->atomOffset);
return AtomStateOffsetToName(cx->runtime()->atomState, stdn->atomOffset);
}
/*
@ -1334,7 +1334,7 @@ JS_ResolveStandardClass(JSContext *cx, HandleObject obj, HandleId id, bool *reso
stdnm = nullptr;
for (i = 0; standard_class_atoms[i].init; i++) {
JS_ASSERT(standard_class_atoms[i].clasp);
atom = OFFSET_TO_NAME(rt, standard_class_atoms[i].atomOffset);
atom = AtomStateOffsetToName(rt->atomState, standard_class_atoms[i].atomOffset);
if (idstr == atom) {
stdnm = &standard_class_atoms[i];
break;

View File

@ -163,14 +163,14 @@ AtomHasher::match(const AtomStateEntry &entry, const Lookup &lookup)
}
inline Handle<PropertyName*>
TypeName(JSType type, JSRuntime *rt)
TypeName(JSType type, const JSAtomState &names)
{
JS_ASSERT(type < JSTYPE_LIMIT);
JS_STATIC_ASSERT(offsetof(JSAtomState, undefined) +
JSTYPE_LIMIT * sizeof(FixedHeapPtr<PropertyName>) <=
sizeof(JSAtomState));
JS_STATIC_ASSERT(JSTYPE_VOID == 0);
return (&rt->atomState.undefined)[type];
return (&names.undefined)[type];
}
inline Handle<PropertyName*>

View File

@ -110,10 +110,9 @@ JSCompartment::sweepCallsiteClones()
}
JSFunction *
js::ExistingCloneFunctionAtCallsite(JSCompartment *comp, JSFunction *fun,
js::ExistingCloneFunctionAtCallsite(const CallsiteCloneTable &table, JSFunction *fun,
JSScript *script, jsbytecode *pc)
{
JS_ASSERT(comp->zone()->types.inferenceEnabled);
JS_ASSERT(fun->nonLazyScript()->shouldCloneAtCallsite);
JS_ASSERT(!fun->nonLazyScript()->enclosingStaticScope());
JS_ASSERT(types::UseNewTypeForClone(fun));
@ -124,14 +123,10 @@ js::ExistingCloneFunctionAtCallsite(JSCompartment *comp, JSFunction *fun,
*/
JS_ASSERT(fun->isTenured());
typedef CallsiteCloneKey Key;
typedef CallsiteCloneTable Table;
Table &table = comp->callsiteClones;
if (!table.initialized())
return nullptr;
Table::Ptr p = table.lookup(Key(fun, script, pc - script->code));
CallsiteCloneTable::Ptr p = table.lookup(CallsiteCloneKey(fun, script, pc - script->code));
if (p)
return p->value;
@ -141,7 +136,7 @@ js::ExistingCloneFunctionAtCallsite(JSCompartment *comp, JSFunction *fun,
JSFunction *
js::CloneFunctionAtCallsite(JSContext *cx, HandleFunction fun, HandleScript script, jsbytecode *pc)
{
if (JSFunction *clone = ExistingCloneFunctionAtCallsite(cx->compartment(), fun, script, pc))
if (JSFunction *clone = ExistingCloneFunctionAtCallsite(cx->compartment()->callsiteClones, fun, script, pc))
return clone;
RootedObject parent(cx, fun->environment());

View File

@ -65,7 +65,7 @@ typedef HashMap<CallsiteCloneKey,
SystemAllocPolicy> CallsiteCloneTable;
JSFunction *
ExistingCloneFunctionAtCallsite(JSCompartment *comp, JSFunction *fun,
ExistingCloneFunctionAtCallsite(const CallsiteCloneTable &table, JSFunction *fun,
JSScript *script, jsbytecode *pc);
JSFunction *CloneFunctionAtCallsite(JSContext *cx, HandleFunction fun,

View File

@ -180,7 +180,7 @@ fun_enumerate(JSContext *cx, HandleObject obj)
for (unsigned i = 0; i < ArrayLength(poisonPillProps); i++) {
const uint16_t offset = poisonPillProps[i];
id = NameToId(OFFSET_TO_NAME(cx->runtime(), offset));
id = NameToId(AtomStateOffsetToName(cx->runtime()->atomState, offset));
if (!JSObject::hasProperty(cx, obj, id, &found, 0))
return false;
}
@ -248,15 +248,15 @@ ResolveInterpretedFunctionPrototype(JSContext *cx, HandleObject obj)
}
bool
js::FunctionHasResolveHook(JSRuntime *rt, PropertyName *name)
js::FunctionHasResolveHook(const JSAtomState &atomState, PropertyName *name)
{
if (name == rt->atomState.prototype || name == rt->atomState.length || name == rt->atomState.name)
if (name == atomState.prototype || name == atomState.length || name == atomState.name)
return true;
for (unsigned i = 0; i < ArrayLength(poisonPillProps); i++) {
const uint16_t offset = poisonPillProps[i];
if (name == OFFSET_TO_NAME(rt, offset))
if (name == AtomStateOffsetToName(atomState, offset))
return true;
}
@ -319,7 +319,7 @@ js::fun_resolve(JSContext *cx, HandleObject obj, HandleId id, unsigned flags,
for (unsigned i = 0; i < ArrayLength(poisonPillProps); i++) {
const uint16_t offset = poisonPillProps[i];
if (JSID_IS_ATOM(id, OFFSET_TO_NAME(cx->runtime(), offset))) {
if (JSID_IS_ATOM(id, AtomStateOffsetToName(cx->runtime()->atomState, offset))) {
JS_ASSERT(!IsInternalFunctionObject(fun));
PropertyOp getter;

View File

@ -23,6 +23,8 @@ typedef JSParallelNative ParallelNative;
typedef JSThreadSafeNative ThreadSafeNative;
}
struct JSAtomState;
class JSFunction : public JSObject
{
public:
@ -490,7 +492,7 @@ DefineFunction(JSContext *cx, HandleObject obj, HandleId id, JSNative native,
NewObjectKind newKind = GenericObject);
bool
FunctionHasResolveHook(JSRuntime *rt, PropertyName *name);
FunctionHasResolveHook(const JSAtomState &atomState, PropertyName *name);
extern bool
fun_resolve(JSContext *cx, HandleObject obj, HandleId id,

View File

@ -636,7 +636,7 @@ WorkerThread::handleAsmJSWorkload(WorkerThreadState &state)
state.unlock();
do {
jit::IonContext icx(runtime, asmData->mir->compartment, &asmData->mir->alloc());
jit::IonContext icx(jit::CompileRuntime::get(runtime), asmData->mir->compartment, &asmData->mir->alloc());
int64_t before = PRMJ_Now();
@ -691,7 +691,9 @@ WorkerThread::handleIonWorkload(WorkerThreadState &state)
state.unlock();
{
jit::IonContext ictx(runtime, ionBuilder->script()->compartment(), &ionBuilder->alloc());
jit::IonContext ictx(jit::CompileRuntime::get(runtime),
jit::CompileCompartment::get(ionBuilder->script()->compartment()),
&ionBuilder->alloc());
ionBuilder->setBackgroundCodegen(jit::CompileBackEnd(ionBuilder));
}
state.lock();

View File

@ -231,6 +231,7 @@ if CONFIG['ENABLE_ION']:
'jit/BytecodeAnalysis.cpp',
'jit/C1Spewer.cpp',
'jit/CodeGenerator.cpp',
'jit/CompileWrappers.cpp',
'jit/EdgeCaseAnalysis.cpp',
'jit/EffectiveAddressAnalysis.cpp',
'jit/Ion.cpp',

View File

@ -1482,7 +1482,9 @@ ForkJoinShared::executePortion(PerThreadData *perThread,
// Make a new IonContext for the slice, which is needed if we need to
// re-enter the VM.
IonContext icx(cx_->runtime(), cx_->compartment(), nullptr);
IonContext icx(CompileRuntime::get(cx_->runtime()),
CompileCompartment::get(cx_->compartment()),
nullptr);
JS_ASSERT(slice.bailoutRecord->topScript == nullptr);

View File

@ -457,14 +457,14 @@ static JS_ALWAYS_INLINE JSString *
TypeOfOperation(const Value &v, JSRuntime *rt)
{
JSType type = js::TypeOfValue(v);
return TypeName(type, rt);
return TypeName(type, rt->atomState);
}
static inline JSString *
TypeOfObjectOperation(JSObject *obj, JSRuntime *rt)
{
JSType type = js::TypeOfObject(obj);
return TypeName(type, rt);
return TypeName(type, rt->atomState);
}
static JS_ALWAYS_INLINE bool

View File

@ -466,11 +466,16 @@ struct JSAtomState
#undef PROPERTYNAME_FIELD
};
#define NAME_OFFSET(name) offsetof(JSAtomState, name)
#define OFFSET_TO_NAME(rt,off) (*(js::FixedHeapPtr<js::PropertyName>*)((char*)&(rt)->atomState + (off)))
namespace js {
#define NAME_OFFSET(name) offsetof(JSAtomState, name)
inline HandlePropertyName
AtomStateOffsetToName(const JSAtomState &atomState, size_t offset)
{
return *(js::FixedHeapPtr<js::PropertyName>*)((char*)&atomState + offset);
}
/*
* Encapsulates portions of the runtime/context that are tied to a
* single active thread. Normally, as most JS is single-threaded,