Backed out changeset 6d56dfa4e845 (bug 934502) for SM(ggc) failures.

This commit is contained in:
Ryan VanderMeulen 2015-01-22 23:22:52 -05:00
parent 2e58c3092f
commit a6e3274b16
11 changed files with 184 additions and 195 deletions

View File

@ -2076,6 +2076,10 @@ CodeGenerator::visitStackArgT(LStackArgT *lir)
masm.storeValue(ValueTypeFromMIRType(argType), ToRegister(arg), dest);
else
masm.storeValue(*(arg->toConstant()), dest);
uint32_t slot = StackOffsetToSlot(stack_offset);
MOZ_ASSERT(slot - 1u < graph.totalSlotCount());
masm.propagateOOM(pushedArgumentSlots_.append(slot));
}
void
@ -2088,6 +2092,10 @@ CodeGenerator::visitStackArgV(LStackArgV *lir)
int32_t stack_offset = StackOffsetOfPassedArg(argslot);
masm.storeValue(val, Address(StackPointer, stack_offset));
uint32_t slot = StackOffsetToSlot(stack_offset);
MOZ_ASSERT(slot - 1u < graph.totalSlotCount());
masm.propagateOOM(pushedArgumentSlots_.append(slot));
}
void
@ -2656,6 +2664,8 @@ CodeGenerator::visitCallNative(LCallNative *call)
// Move the StackPointer back to its original location, unwinding the native exit frame.
masm.adjustStack(NativeExitFrameLayout::Size() - unusedStack);
MOZ_ASSERT(masm.framePushed() == initialStack);
dropArguments(call->numStackArgs() + 1);
}
static void
@ -2785,6 +2795,8 @@ CodeGenerator::visitCallDOMNative(LCallDOMNative *call)
// Move the StackPointer back to its original location, unwinding the native exit frame.
masm.adjustStack(IonDOMMethodExitFrameLayout::Size() - unusedStack);
MOZ_ASSERT(masm.framePushed() == initialStack);
dropArguments(call->numStackArgs() + 1);
}
typedef bool (*GetIntrinsicValueFn)(JSContext *cx, HandlePropertyName, MutableHandleValue);
@ -2901,6 +2913,8 @@ CodeGenerator::visitCallGeneric(LCallGeneric *call)
masm.loadValue(Address(StackPointer, unusedStack), JSReturnOperand);
masm.bind(&notPrimitive);
}
dropArguments(call->numStackArgs() + 1);
}
void
@ -2967,6 +2981,8 @@ CodeGenerator::visitCallKnown(LCallKnown *call)
masm.loadValue(Address(StackPointer, unusedStack), JSReturnOperand);
masm.bind(&notPrimitive);
}
dropArguments(call->numStackArgs() + 1);
}
void
@ -3803,6 +3819,11 @@ CodeGenerator::generateBody()
if (counts)
blockCounts->visitInstruction(*iter);
if (iter->safepoint() && pushedArgumentSlots_.length()) {
if (!markArgumentSlots(iter->safepoint()))
return false;
}
#ifdef CHECK_OSIPOINT_REGISTERS
if (iter->safepoint())
resetOsiPointRegs(iter->safepoint());
@ -3831,6 +3852,7 @@ CodeGenerator::generateBody()
#endif
}
MOZ_ASSERT(pushedArgumentSlots_.empty());
return true;
}
@ -7109,7 +7131,6 @@ CodeGenerator::link(JSContext *cx, types::CompilerConstraintList *constraints)
if (warmUpCount > script->getWarmUpCount())
script->incWarmUpCounter(warmUpCount - script->getWarmUpCount());
uint32_t argumentSlots = (gen->info().nargs() + 1) * sizeof(Value);
uint32_t scriptFrameSize = frameClass_ == FrameSizeClass::None()
? frameDepth_
: FrameSizeClass::FromDepth(frameDepth_).frameSize();
@ -7121,7 +7142,7 @@ CodeGenerator::link(JSContext *cx, types::CompilerConstraintList *constraints)
IonScript *ionScript =
IonScript::New(cx, recompileInfo,
graph.totalSlotCount(), argumentSlots, scriptFrameSize,
graph.totalSlotCount(), scriptFrameSize,
snapshots_.listSize(), snapshots_.RVATableSize(),
recovers_.size(), bailouts_.length(), graph.numConstants(),
safepointIndices_.length(), osiIndices_.length(),

View File

@ -738,7 +738,7 @@ IonScript::IonScript()
IonScript *
IonScript::New(JSContext *cx, types::RecompileInfo recompileInfo,
uint32_t frameSlots, uint32_t argumentSlots, uint32_t frameSize,
uint32_t frameSlots, uint32_t frameSize,
size_t snapshotsListSize, size_t snapshotsRVATableSize,
size_t recoversSize, size_t bailoutEntries,
size_t constants, size_t safepointIndices,
@ -827,8 +827,6 @@ IonScript::New(JSContext *cx, types::RecompileInfo recompileInfo,
offsetCursor += paddedBackedgeSize;
script->frameSlots_ = frameSlots;
script->argumentSlots_ = argumentSlots;
script->frameSize_ = frameSize;
script->recompileInfo_ = recompileInfo;

View File

@ -225,9 +225,6 @@ struct IonScript
// Number of bytes this function reserves on the stack.
uint32_t frameSlots_;
// Number of bytes used passed in as formal arguments or |this|.
uint32_t argumentSlots_;
// Frame size is the value that can be added to the StackPointer along
// with the frame prefix to get a valid JitFrameLayout.
uint32_t frameSize_;
@ -329,7 +326,7 @@ struct IonScript
IonScript();
static IonScript *New(JSContext *cx, types::RecompileInfo recompileInfo,
uint32_t frameSlots, uint32_t argumentSlots, uint32_t frameSize,
uint32_t frameLocals, uint32_t frameSize,
size_t snapshotsListSize, size_t snapshotsRVATableSize,
size_t recoversSize, size_t bailoutEntries,
size_t constants, size_t safepointIndexEntries,
@ -465,9 +462,6 @@ struct IonScript
uint32_t frameSlots() const {
return frameSlots_;
}
uint32_t argumentSlots() const {
return argumentSlots_;
}
uint32_t frameSize() const {
return frameSize_;
}

View File

@ -939,14 +939,6 @@ MarkCalleeToken(JSTracer *trc, CalleeToken token)
}
}
uintptr_t *
JitFrameLayout::slotRef(SafepointSlotEntry where)
{
if (where.stack)
return (uintptr_t *)((uint8_t *)this - where.slot);
return (uintptr_t *)((uint8_t *)argv() + where.slot);
}
#ifdef JS_NUNBOX32
static inline uintptr_t
ReadAllocation(const JitFrameIterator &frame, const LAllocation *a)
@ -955,29 +947,31 @@ ReadAllocation(const JitFrameIterator &frame, const LAllocation *a)
Register reg = a->toGeneralReg()->reg();
return frame.machineState().read(reg);
}
return *frame.jsFrame()->slotRef(SafepointSlotEntry(a));
if (a->isStackSlot()) {
uint32_t slot = a->toStackSlot()->slot();
return *frame.jsFrame()->slotRef(slot);
}
uint32_t index = a->toArgument()->index();
uint8_t *argv = reinterpret_cast<uint8_t *>(frame.jsFrame()->argv());
return *reinterpret_cast<uintptr_t *>(argv + index);
}
#endif
static void
MarkExtraActualArguments(JSTracer *trc, const JitFrameIterator &frame)
MarkFrameAndActualArguments(JSTracer *trc, const JitFrameIterator &frame)
{
// Mark any extra actual arguments for an Ion frame. Marking of |this| and
// the formal arguments is taken care of by the frame's safepoint/snapshot.
// The trampoline produced by |generateEnterJit| is pushing |this| on the
// stack, as requested by |setEnterJitData|. Thus, this function is also
// used for marking the |this| value of the top-level frame.
JitFrameLayout *layout = frame.jsFrame();
if (!CalleeTokenIsFunction(layout->calleeToken())) {
MOZ_ASSERT(frame.numActualArgs() == 0);
return;
}
size_t nargs = frame.numActualArgs();
size_t nformals = CalleeTokenToFunction(layout->calleeToken())->nargs();
MOZ_ASSERT_IF(!CalleeTokenIsFunction(layout->calleeToken()), nargs == 0);
// Trace actual arguments. Note + 1 for thisv.
// Trace function arguments. Note + 1 for thisv.
Value *argv = layout->argv();
for (size_t i = nformals + 1; i < nargs + 1; i++)
for (size_t i = 0; i < nargs + 1; i++)
gc::MarkValueRoot(trc, &argv[i], "ion-argv");
}
@ -988,9 +982,16 @@ WriteAllocation(const JitFrameIterator &frame, const LAllocation *a, uintptr_t v
if (a->isGeneralReg()) {
Register reg = a->toGeneralReg()->reg();
frame.machineState().write(reg, value);
} else {
*frame.jsFrame()->slotRef(SafepointSlotEntry(a)) = value;
return;
}
if (a->isStackSlot()) {
uint32_t slot = a->toStackSlot()->slot();
*frame.jsFrame()->slotRef(slot) = value;
return;
}
uint32_t index = a->toArgument()->index();
uint8_t *argv = reinterpret_cast<uint8_t *>(frame.jsFrame()->argv());
*reinterpret_cast<uintptr_t *>(argv + index) = value;
}
#endif
@ -1011,7 +1012,7 @@ MarkIonJSFrame(JSTracer *trc, const JitFrameIterator &frame)
ionScript = frame.ionScriptFromCalleeToken();
}
MarkExtraActualArguments(trc, frame);
MarkFrameAndActualArguments(trc, frame);
const SafepointIndex *si = ionScript->getSafepointIndex(frame.returnAddressToFp());
@ -1019,15 +1020,14 @@ MarkIonJSFrame(JSTracer *trc, const JitFrameIterator &frame)
// Scan through slots which contain pointers (or on punboxing systems,
// actual values).
SafepointSlotEntry entry;
while (safepoint.getGcSlot(&entry)) {
uintptr_t *ref = layout->slotRef(entry);
uint32_t slot;
while (safepoint.getGcSlot(&slot)) {
uintptr_t *ref = layout->slotRef(slot);
gc::MarkGCThingRoot(trc, reinterpret_cast<void **>(ref), "ion-gc-slot");
}
while (safepoint.getValueSlot(&entry)) {
Value *v = (Value *)layout->slotRef(entry);
while (safepoint.getValueSlot(&slot)) {
Value *v = (Value *)layout->slotRef(slot);
gc::MarkValueRoot(trc, v, "ion-gc-slot");
}
@ -1070,7 +1070,7 @@ MarkBailoutFrame(JSTracer *trc, const JitFrameIterator &frame)
// We have to mark the list of actual arguments, as only formal arguments
// are represented in the Snapshot.
MarkExtraActualArguments(trc, frame);
MarkFrameAndActualArguments(trc, frame);
// Under a bailout, do not have a Safepoint to only iterate over GC-things.
// Thus we use a SnapshotIterator to trace all the locations which would be
@ -1128,16 +1128,16 @@ UpdateIonJSFrameForMinorGC(JSTracer *trc, const JitFrameIterator &frame)
}
// Skip to the right place in the safepoint
SafepointSlotEntry entry;
while (safepoint.getGcSlot(&entry));
while (safepoint.getValueSlot(&entry));
uint32_t slot;
while (safepoint.getGcSlot(&slot));
while (safepoint.getValueSlot(&slot));
#ifdef JS_NUNBOX32
LAllocation type, payload;
while (safepoint.getNunboxSlot(&type, &payload));
#endif
while (safepoint.getSlotsOrElementsSlot(&entry)) {
HeapSlot **slots = reinterpret_cast<HeapSlot **>(layout->slotRef(entry));
while (safepoint.getSlotsOrElementsSlot(&slot)) {
HeapSlot **slots = reinterpret_cast<HeapSlot **>(layout->slotRef(slot));
nursery.forwardBufferPointer(slots);
}
}

View File

@ -13,7 +13,6 @@
#include "jsfun.h"
#include "jit/JitFrameIterator.h"
#include "jit/Safepoints.h"
namespace js {
namespace jit {
@ -401,10 +400,11 @@ class JitFrameLayout : public CommonFrameLayout
return numActualArgs_;
}
// Computes a reference to a stack or argument slot, where a slot is a
// distance from the base frame pointer, as would be used for LStackSlot
// or LArgument.
uintptr_t *slotRef(SafepointSlotEntry where);
// Computes a reference to a slot, where a slot is a distance from the base
// frame pointer (as would be used for LStackSlot).
uintptr_t *slotRef(uint32_t slot) {
return (uintptr_t *)((uint8_t *)this - slot);
}
static inline size_t Size() {
return sizeof(JitFrameLayout);

View File

@ -164,7 +164,6 @@ class LAllocation : public TempObject
bool isMemory() const {
return isStackSlot() || isArgument();
}
inline uint32_t memorySlot() const;
inline LUse *toUse();
inline const LUse *toUse() const;
inline const LGeneralReg *toGeneralReg() const;
@ -361,22 +360,15 @@ class LStackSlot : public LAllocation
class LArgument : public LAllocation
{
public:
explicit LArgument(uint32_t index)
explicit LArgument(int32_t index)
: LAllocation(ARGUMENT_SLOT, index)
{ }
uint32_t index() const {
int32_t index() const {
return data();
}
};
inline uint32_t
LAllocation::memorySlot() const
{
MOZ_ASSERT(isMemory());
return isStackSlot() ? toStackSlot()->slot() : toArgument()->index();
}
// Represents storage for a definition.
class LDefinition
{
@ -1240,22 +1232,6 @@ class LSnapshot : public TempObject
void rewriteRecoveredInput(LUse input);
};
struct SafepointSlotEntry {
// Flag indicating whether this is a slot in the stack or argument space.
uint32_t stack:1;
// Byte offset of the slot, as in LStackSlot or LArgument.
uint32_t slot:31;
SafepointSlotEntry() { }
SafepointSlotEntry(bool stack, uint32_t slot)
: stack(stack), slot(slot)
{ }
explicit SafepointSlotEntry(const LAllocation *a)
: stack(a->isStackSlot()), slot(a->memorySlot())
{ }
};
struct SafepointNunboxEntry {
uint32_t typeVreg;
LAllocation type;
@ -1269,11 +1245,10 @@ struct SafepointNunboxEntry {
class LSafepoint : public TempObject
{
typedef SafepointSlotEntry SlotEntry;
typedef SafepointNunboxEntry NunboxEntry;
public:
typedef Vector<SlotEntry, 0, JitAllocPolicy> SlotList;
typedef Vector<uint32_t, 0, JitAllocPolicy> SlotList;
typedef Vector<NunboxEntry, 0, JitAllocPolicy> NunboxList;
private:
@ -1308,14 +1283,14 @@ class LSafepoint : public TempObject
// Assembler buffer displacement to OSI point's call location.
uint32_t osiCallPointOffset_;
// List of slots which have gcthing pointers.
// List of stack slots which have gcthing pointers.
SlotList gcSlots_;
// List of slots which have Values.
// List of stack slots which have Values.
SlotList valueSlots_;
#ifdef JS_NUNBOX32
// List of registers (in liveRegs) and slots which contain pieces of Values.
// List of registers (in liveRegs) and stack slots which contain pieces of Values.
NunboxList nunboxParts_;
#elif JS_PUNBOX64
// The subset of liveRegs which have Values.
@ -1325,7 +1300,7 @@ class LSafepoint : public TempObject
// The subset of liveRegs which contains pointers to slots/elements.
GeneralRegisterSet slotsOrElementsRegs_;
// List of slots which have slots/elements pointers.
// List of stack slots which have slots/elements pointers.
SlotList slotsOrElementsSlots_;
public:
@ -1372,8 +1347,8 @@ class LSafepoint : public TempObject
GeneralRegisterSet gcRegs() const {
return gcRegs_;
}
bool addGcSlot(bool stack, uint32_t slot) {
bool result = gcSlots_.append(SlotEntry(stack, slot));
bool addGcSlot(uint32_t slot) {
bool result = gcSlots_.append(slot);
if (result)
assertInvariants();
return result;
@ -1392,15 +1367,15 @@ class LSafepoint : public TempObject
slotsOrElementsRegs_.addUnchecked(reg);
assertInvariants();
}
bool addSlotsOrElementsSlot(bool stack, uint32_t slot) {
bool result = slotsOrElementsSlots_.append(SlotEntry(stack, slot));
bool addSlotsOrElementsSlot(uint32_t slot) {
bool result = slotsOrElementsSlots_.append(slot);
if (result)
assertInvariants();
return result;
}
bool addSlotsOrElementsPointer(LAllocation alloc) {
if (alloc.isMemory())
return addSlotsOrElementsSlot(alloc.isStackSlot(), alloc.memorySlot());
if (alloc.isStackSlot())
return addSlotsOrElementsSlot(alloc.toStackSlot()->slot());
MOZ_ASSERT(alloc.isRegister());
addSlotsOrElementsRegister(alloc.toRegister().gpr());
assertInvariants();
@ -1409,17 +1384,19 @@ class LSafepoint : public TempObject
bool hasSlotsOrElementsPointer(LAllocation alloc) const {
if (alloc.isRegister())
return slotsOrElementsRegs().has(alloc.toRegister().gpr());
for (size_t i = 0; i < slotsOrElementsSlots_.length(); i++) {
const SlotEntry &entry = slotsOrElementsSlots_[i];
if (entry.stack == alloc.isStackSlot() && entry.slot == alloc.memorySlot())
return true;
if (alloc.isStackSlot()) {
for (size_t i = 0; i < slotsOrElementsSlots_.length(); i++) {
if (slotsOrElementsSlots_[i] == alloc.toStackSlot()->slot())
return true;
}
return false;
}
return false;
}
bool addGcPointer(LAllocation alloc) {
if (alloc.isMemory())
return addGcSlot(alloc.isStackSlot(), alloc.memorySlot());
if (alloc.isStackSlot())
return addGcSlot(alloc.toStackSlot()->slot());
if (alloc.isRegister())
addGcRegister(alloc.toRegister().gpr());
assertInvariants();
@ -1429,16 +1406,19 @@ class LSafepoint : public TempObject
bool hasGcPointer(LAllocation alloc) const {
if (alloc.isRegister())
return gcRegs().has(alloc.toRegister().gpr());
MOZ_ASSERT(alloc.isMemory());
for (size_t i = 0; i < gcSlots_.length(); i++) {
if (gcSlots_[i].stack == alloc.isStackSlot() && gcSlots_[i].slot == alloc.memorySlot())
return true;
if (alloc.isStackSlot()) {
for (size_t i = 0; i < gcSlots_.length(); i++) {
if (gcSlots_[i] == alloc.toStackSlot()->slot())
return true;
}
return false;
}
return false;
MOZ_ASSERT(alloc.isArgument());
return true;
}
bool addValueSlot(bool stack, uint32_t slot) {
bool result = valueSlots_.append(SlotEntry(stack, slot));
bool addValueSlot(uint32_t slot) {
bool result = valueSlots_.append(slot);
if (result)
assertInvariants();
return result;
@ -1447,9 +1427,9 @@ class LSafepoint : public TempObject
return valueSlots_;
}
bool hasValueSlot(bool stack, uint32_t slot) const {
bool hasValueSlot(uint32_t slot) const {
for (size_t i = 0; i < valueSlots_.length(); i++) {
if (valueSlots_[i].stack == stack && valueSlots_[i].slot == slot)
if (valueSlots_[i] == slot)
return true;
}
return false;
@ -1514,7 +1494,9 @@ class LSafepoint : public TempObject
#ifdef DEBUG
bool hasNunboxPayload(LAllocation payload) const {
if (payload.isMemory() && hasValueSlot(payload.isStackSlot(), payload.memorySlot()))
if (payload.isArgument())
return true;
if (payload.isStackSlot() && hasValueSlot(payload.toStackSlot()->slot()))
return true;
for (size_t i = 0; i < nunboxParts_.length(); i++) {
if (nunboxParts_[i].payload == payload)
@ -1545,15 +1527,25 @@ class LSafepoint : public TempObject
addValueRegister(reg);
return true;
}
if (hasValueSlot(alloc.isStackSlot(), alloc.memorySlot()))
return true;
return addValueSlot(alloc.isStackSlot(), alloc.memorySlot());
if (alloc.isStackSlot()) {
uint32_t slot = alloc.toStackSlot()->slot();
for (size_t i = 0; i < valueSlots().length(); i++) {
if (valueSlots()[i] == slot)
return true;
}
return addValueSlot(slot);
}
MOZ_ASSERT(alloc.isArgument());
return true;
}
bool hasBoxedValue(LAllocation alloc) const {
if (alloc.isRegister())
return valueRegs().has(alloc.toRegister().gpr());
return hasValueSlot(alloc.isStackSlot(), alloc.memorySlot());
if (alloc.isStackSlot())
return hasValueSlot(alloc.toStackSlot()->slot());
MOZ_ASSERT(alloc.isArgument());
return true;
}
#endif // JS_PUNBOX64

View File

@ -484,21 +484,6 @@ LinearScanAllocator::isSpilledAt(LiveInterval *interval, CodePosition pos)
bool
LinearScanAllocator::populateSafepoints()
{
// Populate all safepoints with this/argument slots. These are never changed
// by the allocator and are not necessarily populated by the code below.
size_t nargs = graph.getBlock(0)->mir()->info().nargs();
for (size_t i = 0; i < graph.numSafepoints(); i++) {
LSafepoint *safepoint = graph.getSafepoint(i)->safepoint();
if (!safepoint->addValueSlot(/* stack = */ false, THIS_FRAME_ARGSLOT * sizeof(Value)))
return false;
for (size_t j = 0; j < nargs; j++) {
if (!safepoint->addValueSlot(/* stack = */ false, (j + 1) * sizeof(Value)))
return false;
}
}
size_t firstSafepoint = 0;
for (uint32_t i = 0; i < vregs.numVirtualRegisters(); i++) {
@ -548,7 +533,7 @@ LinearScanAllocator::populateSafepoints()
safepoint->addSlotsOrElementsRegister(a->toGeneralReg()->reg());
if (isSpilledAt(interval, inputOf(ins))) {
if (!safepoint->addSlotsOrElementsSlot(true, reg->canonicalSpillSlot()))
if (!safepoint->addSlotsOrElementsSlot(reg->canonicalSpillSlot()))
return false;
}
} else if (!IsNunbox(reg)) {
@ -573,12 +558,12 @@ LinearScanAllocator::populateSafepoints()
if (isSpilledAt(interval, inputOf(ins))) {
#ifdef JS_PUNBOX64
if (reg->type() == LDefinition::BOX) {
if (!safepoint->addValueSlot(true, reg->canonicalSpillSlot()))
if (!safepoint->addValueSlot(reg->canonicalSpillSlot()))
return false;
} else
#endif
{
if (!safepoint->addGcSlot(true, reg->canonicalSpillSlot()))
if (!safepoint->addGcSlot(reg->canonicalSpillSlot()))
return false;
}
}
@ -613,7 +598,7 @@ LinearScanAllocator::populateSafepoints()
// contiguously, so simply keep track of the base slot.
uint32_t payloadSlot = payload->canonicalSpillSlot();
uint32_t slot = BaseOfNunboxSlot(LDefinition::PAYLOAD, payloadSlot);
if (!safepoint->addValueSlot(true, slot))
if (!safepoint->addValueSlot(slot))
return false;
}

View File

@ -17,15 +17,14 @@ using namespace jit;
using mozilla::FloorLog2;
SafepointWriter::SafepointWriter(uint32_t slotCount, uint32_t argumentCount)
: frameSlots_((slotCount / sizeof(intptr_t)) + 1), // Stack slot counts are inclusive.
argumentSlots_(argumentCount / sizeof(intptr_t))
SafepointWriter::SafepointWriter(uint32_t slotCount)
: frameSlots_(slotCount / sizeof(intptr_t))
{ }
bool
SafepointWriter::init(TempAllocator &alloc)
{
return frameSlots_.init(alloc) && argumentSlots_.init(alloc);
return frameSlots_.init(alloc);
}
uint32_t
@ -130,34 +129,26 @@ SafepointWriter::writeGcRegs(LSafepoint *safepoint)
}
static void
WriteBitset(const BitSet &set, CompactBufferWriter &stream)
MapSlotsToBitset(BitSet &set, CompactBufferWriter &stream, uint32_t nslots, uint32_t *slots)
{
set.clear();
for (uint32_t i = 0; i < nslots; i++) {
// Slots are represented at a distance from |fp|. We divide by the
// pointer size, since we only care about pointer-sized/aligned slots
// here. Since the stack grows down, this means slots start at index 1,
// so we subtract 1 to pack the bitset.
MOZ_ASSERT(slots[i] % sizeof(intptr_t) == 0);
MOZ_ASSERT(slots[i] / sizeof(intptr_t) > 0);
set.insert(slots[i] / sizeof(intptr_t) - 1);
}
size_t count = set.rawLength();
const uint32_t *words = set.raw();
for (size_t i = 0; i < count; i++)
stream.writeUnsigned(words[i]);
}
static void
MapSlotsToBitset(BitSet &stackSet, BitSet &argumentSet,
CompactBufferWriter &stream, const LSafepoint::SlotList &slots)
{
stackSet.clear();
argumentSet.clear();
for (uint32_t i = 0; i < slots.length(); i++) {
// Slots are represented at a distance from |fp|. We divide by the
// pointer size, since we only care about pointer-sized/aligned slots
// here.
MOZ_ASSERT(slots[i].slot % sizeof(intptr_t) == 0);
size_t index = slots[i].slot / sizeof(intptr_t);
(slots[i].stack ? stackSet : argumentSet).insert(index);
}
WriteBitset(stackSet, stream);
WriteBitset(argumentSet, stream);
}
void
SafepointWriter::writeGcSlots(LSafepoint *safepoint)
{
@ -168,7 +159,10 @@ SafepointWriter::writeGcSlots(LSafepoint *safepoint)
JitSpew(JitSpew_Safepoints, " gc slot: %d", slots[i]);
#endif
MapSlotsToBitset(frameSlots_, argumentSlots_, stream_, slots);
MapSlotsToBitset(frameSlots_,
stream_,
slots.length(),
slots.begin());
}
void
@ -179,12 +173,10 @@ SafepointWriter::writeSlotsOrElementsSlots(LSafepoint *safepoint)
stream_.writeUnsigned(slots.length());
for (uint32_t i = 0; i < slots.length(); i++) {
if (!slots[i].stack)
MOZ_CRASH();
#ifdef DEBUG
JitSpew(JitSpew_Safepoints, " slots/elements slot: %d", slots[i].slot);
JitSpew(JitSpew_Safepoints, " slots/elements slot: %d", slots[i]);
#endif
stream_.writeUnsigned(slots[i].slot);
stream_.writeUnsigned(slots[i]);
}
}
@ -198,7 +190,7 @@ SafepointWriter::writeValueSlots(LSafepoint *safepoint)
JitSpew(JitSpew_Safepoints, " gc value: %d", slots[i]);
#endif
MapSlotsToBitset(frameSlots_, argumentSlots_, stream_, slots);
MapSlotsToBitset(frameSlots_, stream_, slots.length(), slots.begin());
}
#if defined(DEBUG) && defined(JS_NUNBOX32)
@ -392,8 +384,7 @@ SafepointWriter::endEntry()
SafepointReader::SafepointReader(IonScript *script, const SafepointIndex *si)
: stream_(script->safepoints() + si->safepointOffset(),
script->safepoints() + script->safepointsSize()),
frameSlots_((script->frameSlots() / sizeof(intptr_t)) + 1), // Stack slot counts are inclusive.
argumentSlots_(script->argumentSlots() / sizeof(intptr_t))
frameSlots_(script->frameSlots() / sizeof(intptr_t))
{
osiCallPointOffset_ = stream_.readUnsigned();
@ -434,23 +425,15 @@ SafepointReader::advanceFromGcRegs()
{
currentSlotChunk_ = 0;
nextSlotChunkNumber_ = 0;
currentSlotsAreStack_ = true;
}
bool
SafepointReader::getSlotFromBitmap(SafepointSlotEntry *entry)
SafepointReader::getSlotFromBitmap(uint32_t *slot)
{
while (currentSlotChunk_ == 0) {
// Are there any more chunks to read?
if (currentSlotsAreStack_) {
if (nextSlotChunkNumber_ == BitSet::RawLengthForBits(frameSlots_)) {
nextSlotChunkNumber_ = 0;
currentSlotsAreStack_ = false;
continue;
}
} else if (nextSlotChunkNumber_ == BitSet::RawLengthForBits(argumentSlots_)) {
if (nextSlotChunkNumber_ == BitSet::RawLengthForBits(frameSlots_))
return false;
}
// Yes, read the next chunk.
currentSlotChunk_ = stream_.readUnsigned();
@ -462,17 +445,17 @@ SafepointReader::getSlotFromBitmap(SafepointSlotEntry *entry)
uint32_t bit = FloorLog2(currentSlotChunk_);
currentSlotChunk_ &= ~(1 << bit);
// Return the slot, and re-scale it by the pointer size, reversing the
// transformation in MapSlotsToBitset.
entry->stack = currentSlotsAreStack_;
entry->slot = (((nextSlotChunkNumber_ - 1) * BitSet::BitsPerWord) + bit) * sizeof(intptr_t);
// Return the slot, taking care to add 1 back in since it was subtracted
// when added in the original bitset, and re-scale it by the pointer size,
// reversing the transformation in MapSlotsToBitset.
*slot = (((nextSlotChunkNumber_ - 1) * BitSet::BitsPerWord) + bit + 1) * sizeof(intptr_t);
return true;
}
bool
SafepointReader::getGcSlot(SafepointSlotEntry *entry)
SafepointReader::getGcSlot(uint32_t *slot)
{
if (getSlotFromBitmap(entry))
if (getSlotFromBitmap(slot))
return true;
advanceFromGcSlots();
return false;
@ -484,13 +467,12 @@ SafepointReader::advanceFromGcSlots()
// No, reset the counter.
currentSlotChunk_ = 0;
nextSlotChunkNumber_ = 0;
currentSlotsAreStack_ = true;
}
bool
SafepointReader::getValueSlot(SafepointSlotEntry *entry)
SafepointReader::getValueSlot(uint32_t *slot)
{
if (getSlotFromBitmap(entry))
if (getSlotFromBitmap(slot))
return true;
advanceFromValueSlots();
return false;
@ -549,11 +531,10 @@ SafepointReader::advanceFromNunboxSlots()
}
bool
SafepointReader::getSlotsOrElementsSlot(SafepointSlotEntry *entry)
SafepointReader::getSlotsOrElementsSlot(uint32_t *slot)
{
if (!slotsOrElementsSlotsRemaining_--)
return false;
entry->stack = true;
entry->slot = stream_.readUnsigned();
*slot = stream_.readUnsigned();
return true;
}

View File

@ -14,9 +14,7 @@
namespace js {
namespace jit {
struct SafepointSlotEntry;
struct SafepointNunboxEntry;
class LAllocation;
class LSafepoint;
@ -26,10 +24,9 @@ class SafepointWriter
{
CompactBufferWriter stream_;
BitSet frameSlots_;
BitSet argumentSlots_;
public:
explicit SafepointWriter(uint32_t slotCount, uint32_t argumentCount);
explicit SafepointWriter(uint32_t slotCount);
bool init(TempAllocator &alloc);
private:
@ -64,9 +61,7 @@ class SafepointReader
{
CompactBufferReader stream_;
uint32_t frameSlots_;
uint32_t argumentSlots_;
uint32_t currentSlotChunk_;
bool currentSlotsAreStack_;
uint32_t nextSlotChunkNumber_;
uint32_t osiCallPointOffset_;
GeneralRegisterSet gcSpills_;
@ -82,7 +77,7 @@ class SafepointReader
void advanceFromGcSlots();
void advanceFromValueSlots();
void advanceFromNunboxSlots();
bool getSlotFromBitmap(SafepointSlotEntry *entry);
bool getSlotFromBitmap(uint32_t *slot);
public:
SafepointReader(IonScript *script, const SafepointIndex *si);
@ -110,17 +105,17 @@ class SafepointReader
uint32_t osiReturnPointOffset() const;
// Returns true if a slot was read, false if there are no more slots.
bool getGcSlot(SafepointSlotEntry *entry);
bool getGcSlot(uint32_t *slot);
// Returns true if a slot was read, false if there are no more value slots.
bool getValueSlot(SafepointSlotEntry *entry);
bool getValueSlot(uint32_t *slot);
// Returns true if a nunbox slot was read, false if there are no more
// nunbox slots.
bool getNunboxSlot(LAllocation *type, LAllocation *payload);
// Returns true if a slot was read, false if there are no more slots.
bool getSlotsOrElementsSlot(SafepointSlotEntry *entry);
bool getSlotsOrElementsSlot(uint32_t *slot);
};
} // namespace jit

View File

@ -52,7 +52,7 @@ CodeGeneratorShared::CodeGeneratorShared(MIRGenerator *gen, LIRGraph *graph, Mac
pushedArgs_(0),
#endif
lastOsiPointOffset_(0),
safepoints_(graph->totalSlotCount(), (gen->info().nargs() + 1) * sizeof(Value)),
safepoints_(graph->totalSlotCount()),
nativeToBytecodeMap_(nullptr),
nativeToBytecodeMapSize_(0),
nativeToBytecodeTableOffset_(0),
@ -269,6 +269,7 @@ ToStackIndex(LAllocation *a)
MOZ_ASSERT(a->toStackSlot()->slot() >= 1);
return a->toStackSlot()->slot();
}
MOZ_ASSERT(-int32_t(sizeof(JitFrameLayout)) <= a->toArgument()->index());
return -int32_t(sizeof(JitFrameLayout) + a->toArgument()->index());
}
@ -1205,6 +1206,22 @@ CodeGeneratorShared::emitPreBarrier(Address address)
masm.patchableCallPreBarrier(address, MIRType_Value);
}
void
CodeGeneratorShared::dropArguments(unsigned argc)
{
pushedArgumentSlots_.shrinkBy(argc);
}
bool
CodeGeneratorShared::markArgumentSlots(LSafepoint *safepoint)
{
for (size_t i = 0; i < pushedArgumentSlots_.length(); i++) {
if (!safepoint->addValueSlot(pushedArgumentSlots_[i]))
return false;
}
return true;
}
Label *
CodeGeneratorShared::labelForBackedgeWithImplicitCheck(MBasicBlock *mir)
{

View File

@ -85,6 +85,9 @@ class CodeGeneratorShared : public LElementVisitor
// Vector of information about generated polymorphic inline caches.
js::Vector<uint32_t, 0, SystemAllocPolicy> cacheList_;
// List of stack slots that have been pushed as arguments to an MCall.
js::Vector<uint32_t, 0, SystemAllocPolicy> pushedArgumentSlots_;
// Patchable backedges generated for loops.
Vector<PatchableBackedgeInfo, 0, SystemAllocPolicy> patchableBackedges_;
@ -145,6 +148,9 @@ class CodeGeneratorShared : public LElementVisitor
typedef js::Vector<SafepointIndex, 8, SystemAllocPolicy> SafepointIndices;
bool markArgumentSlots(LSafepoint *safepoint);
void dropArguments(unsigned argc);
protected:
#ifdef CHECK_OSIPOINT_REGISTERS
// See js_JitOptions.checkOsiPointRegisters. We set this here to avoid