Backed out changeset 45805d1b90d4.

This commit is contained in:
David Anderson 2010-10-18 11:24:27 -07:00
parent f7ead7aafa
commit c86b6fb598
15 changed files with 468 additions and 648 deletions

View File

@ -49,73 +49,10 @@
#include "assembler/moco/MocoStubs.h"
#include "methodjit/MethodJIT.h"
#include "methodjit/MachineRegs.h"
#include "CodeGenIncludes.h"
namespace js {
namespace mjit {
class MaybeRegisterID {
typedef JSC::MacroAssembler::RegisterID RegisterID;
public:
MaybeRegisterID()
: reg_(Registers::ReturnReg), set(false)
{ }
MaybeRegisterID(RegisterID reg)
: reg_(reg), set(true)
{ }
inline RegisterID reg() const { JS_ASSERT(set); return reg_; }
inline void setReg(const RegisterID r) { reg_ = r; set = true; }
inline bool isSet() const { return set; }
MaybeRegisterID & operator =(const MaybeRegisterID &other) {
set = other.set;
reg_ = other.reg_;
return *this;
}
MaybeRegisterID & operator =(RegisterID r) {
setReg(r);
return *this;
}
private:
RegisterID reg_;
bool set;
};
// Represents an int32 property name in generated code, which must be either
// a RegisterID or a constant value.
struct Int32Key {
typedef JSC::MacroAssembler::RegisterID RegisterID;
MaybeRegisterID reg_;
int32 index_;
Int32Key() : index_(0) { }
static Int32Key FromRegister(RegisterID reg) {
Int32Key key;
key.reg_ = reg;
return key;
}
static Int32Key FromConstant(int32 index) {
Int32Key key;
key.index_ = index;
return key;
}
int32 index() const {
JS_ASSERT(!reg_.isSet());
return index_;
}
RegisterID reg() const { return reg_.reg(); }
bool isConstant() const { return !reg_.isSet(); }
};
class MaybeJump {
typedef JSC::MacroAssembler::Jump Jump;
public:
@ -135,6 +72,8 @@ class MaybeJump {
bool set;
};
//#define JS_METHODJIT_PROFILE_STUBS
struct FrameAddress : JSC::MacroAssembler::Address
{
FrameAddress(int32 offset)
@ -149,7 +88,7 @@ struct ImmIntPtr : public JSC::MacroAssembler::ImmPtr
{ }
};
class Assembler : public ValueAssembler
class BaseAssembler : public JSC::MacroAssembler
{
struct CallPatch {
CallPatch(Call cl, void *fun)
@ -177,7 +116,7 @@ class Assembler : public ValueAssembler
// This callLabel is to record the Label exactly after the call.
Label callLabel;
#endif
Assembler()
BaseAssembler()
: callPatches(SystemAllocPolicy())
{
startLabel = label();
@ -186,6 +125,15 @@ class Assembler : public ValueAssembler
/* Total number of floating-point registers. */
static const uint32 TotalFPRegisters = FPRegisters::TotalFPRegisters;
/*
* JSFrameReg is used to home the current JSStackFrame*.
*/
#if defined(JS_CPU_X86) || defined(JS_CPU_X64)
static const RegisterID JSFrameReg = JSC::X86Registers::ebx;
#elif defined(JS_CPU_ARM)
static const RegisterID JSFrameReg = JSC::ARMRegisters::r11;
#endif
/* Register pair storing returned type/data for calls. */
#if defined(JS_CPU_X86) || defined(JS_CPU_X64)
static const JSC::MacroAssembler::RegisterID JSReturnReg_Type = JSC::X86Registers::ecx;
@ -197,6 +145,14 @@ static const JSC::MacroAssembler::RegisterID JSReturnReg_Data = JSC::ARMRegiste
static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::ARMRegisters::r1;
#endif
bool addressUsesRegister(Address address, RegisterID reg) {
return address.base == reg;
}
bool addressUsesRegister(BaseIndex address, RegisterID reg) {
return (address.base == reg) || (address.index == reg);
}
size_t distanceOf(Label l) {
return differenceBetween(startLabel, l);
}
@ -314,6 +270,13 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::ARMRegiste
}
Call wrapCall(void *pfun) {
#ifdef JS_METHODJIT_PROFILE_STUBS
push(Registers::ArgReg0);
push(Registers::ArgReg1);
call(JS_FUNC_TO_DATA_PTR(void *, mjit::ProfileStubCall));
pop(Registers::ArgReg1);
pop(Registers::ArgReg0);
#endif
#if defined(JS_NO_FASTCALL) && defined(JS_CPU_X86)
push(Registers::ArgReg1);
push(Registers::ArgReg0);
@ -372,61 +335,6 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::ARMRegiste
linker.link(patch.call, JSC::FunctionPtr(patch.fun));
}
}
struct FastArrayLoadFails {
Jump rangeCheck;
Jump holeCheck;
};
// Load a jsval from an array slot, given a key. |objReg| is clobbered.
FastArrayLoadFails fastArrayLoad(RegisterID objReg, const Int32Key &key,
RegisterID typeReg, RegisterID dataReg) {
JS_ASSERT(objReg != typeReg);
FastArrayLoadFails fails;
Address capacity(objReg, offsetof(JSObject, capacity));
// Check that the id is within range.
if (key.isConstant()) {
JS_ASSERT(key.index() >= 0);
fails.rangeCheck = branch32(LessThanOrEqual, payloadOf(capacity), Imm32(key.index()));
} else {
fails.rangeCheck = branch32(LessThanOrEqual, payloadOf(capacity), key.reg());
}
RegisterID dslotsReg = objReg;
loadPtr(Address(objReg, offsetof(JSObject, slots)), dslotsReg);
// Load the slot out of the array.
if (key.isConstant()) {
Address slot(objReg, key.index() * sizeof(Value));
fails.holeCheck = fastArrayLoadSlot(slot, typeReg, dataReg);
} else {
BaseIndex slot(objReg, key.reg(), JSVAL_SCALE);
fails.holeCheck = fastArrayLoadSlot(slot, typeReg, dataReg);
}
return fails;
}
void loadObjClass(RegisterID objReg, RegisterID destReg) {
loadPtr(Address(objReg, offsetof(JSObject, clasp)), destReg);
}
Jump testClass(Condition cond, RegisterID claspReg, js::Class *clasp) {
return branchPtr(cond, claspReg, ImmPtr(clasp));
}
Jump testObjClass(Condition cond, RegisterID objReg, js::Class *clasp) {
return branchPtr(cond, Address(objReg, offsetof(JSObject, clasp)), ImmPtr(clasp));
}
void rematPayload(const StateRemat &remat, RegisterID reg) {
if (remat.inMemory())
loadPayload(remat.address(), reg);
else
move(remat.reg(), reg);
}
};
/* Return f<true> if the script is strict mode code, f<false> otherwise. */
@ -435,9 +343,10 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::ARMRegiste
f<true>, f<false>))
/* Save some typing. */
static const JSC::MacroAssembler::RegisterID JSReturnReg_Type = Assembler::JSReturnReg_Type;
static const JSC::MacroAssembler::RegisterID JSReturnReg_Data = Assembler::JSReturnReg_Data;
static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = Assembler::JSParamReg_Argc;
static const JSC::MacroAssembler::RegisterID JSFrameReg = BaseAssembler::JSFrameReg;
static const JSC::MacroAssembler::RegisterID JSReturnReg_Type = BaseAssembler::JSReturnReg_Type;
static const JSC::MacroAssembler::RegisterID JSReturnReg_Data = BaseAssembler::JSReturnReg_Data;
static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = BaseAssembler::JSParamReg_Argc;
struct FrameFlagsAddress : JSC::MacroAssembler::Address
{

View File

@ -48,7 +48,6 @@
#else
# error "Neither JS_NUNBOX32 nor JS_PUNBOX64 is defined."
#endif
#include "BaseAssembler.h"
/* Get a label for assertion purposes. Prevent #ifdef clutter. */
#ifdef DEBUG

View File

@ -526,9 +526,10 @@ mjit::Compiler::finishThisUp(JITScript **jitp)
for (size_t i = 0; i < pics.length(); i++) {
pics[i].copySimpleMembersTo(scriptPICs[i]);
scriptPICs[i].fastPathStart = fullCode.locationOf(pics[i].fastPathStart);
scriptPICs[i].fastPathRejoin = fullCode.locationOf(pics[i].fastPathRejoin);
scriptPICs[i].storeBack = fullCode.locationOf(pics[i].storeBack);
scriptPICs[i].slowPathStart = stubCode.locationOf(pics[i].slowPathStart);
scriptPICs[i].slowPathCall = stubCode.locationOf(pics[i].slowPathCall);
scriptPICs[i].callReturn = uint16((uint8*)stubCode.locationOf(pics[i].callReturn).executableAddress() -
(uint8*)scriptPICs[i].slowPathStart.executableAddress());
scriptPICs[i].shapeGuard = masm.distanceOf(pics[i].shapeGuard) -
masm.distanceOf(pics[i].fastPathStart);
JS_ASSERT(scriptPICs[i].shapeGuard == masm.distanceOf(pics[i].shapeGuard) -
@ -552,7 +553,7 @@ mjit::Compiler::finishThisUp(JITScript **jitp)
}
new (&scriptPICs[i].execPools) ic::PICInfo::ExecPoolVector(SystemAllocPolicy());
scriptPICs[i].reset();
stubCode.patch(pics[i].paramAddr, &scriptPICs[i]);
stubCode.patch(pics[i].addrLabel, &scriptPICs[i]);
}
}
#endif /* JS_POLYIC */
@ -2466,7 +2467,7 @@ mjit::Compiler::passMICAddress(MICGenInfo &mic)
void
mjit::Compiler::passPICAddress(PICGenInfo &pic)
{
pic.paramAddr = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
pic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
}
bool
@ -2541,7 +2542,7 @@ mjit::Compiler::jsop_getprop(JSAtom *atom, bool doTypeCheck)
stubcc.leave();
passPICAddress(pic);
pic.slowPathCall = stubcc.call(ic::GetProp);
pic.callReturn = stubcc.call(ic::GetProp);
/* Load dslots. */
#if defined JS_NUNBOX32
@ -2565,25 +2566,25 @@ mjit::Compiler::jsop_getprop(JSAtom *atom, bool doTypeCheck)
Label inlineValueLoadLabel =
masm.loadValueAsComponents(slot, shapeReg, objReg);
#endif
pic.fastPathRejoin = masm.label();
pic.storeBack = masm.label();
/* Assert correctness of hardcoded offsets. */
RETURN_IF_OOM(false);
#if defined JS_NUNBOX32
JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgDslotsLoad) == GETPROP_DSLOTS_LOAD);
JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgTypeLoad) == GETPROP_TYPE_LOAD);
JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgDataLoad) == GETPROP_DATA_LOAD);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDslotsLoad) == GETPROP_DSLOTS_LOAD);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgTypeLoad) == GETPROP_TYPE_LOAD);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDataLoad) == GETPROP_DATA_LOAD);
JS_ASSERT(masm.differenceBetween(pic.shapeGuard, inlineShapeLabel) == GETPROP_INLINE_SHAPE_OFFSET);
JS_ASSERT(masm.differenceBetween(pic.shapeGuard, dbgInlineShapeJump) == GETPROP_INLINE_SHAPE_JUMP);
#elif defined JS_PUNBOX64
pic.labels.getprop.dslotsLoadOffset = masm.differenceBetween(pic.fastPathRejoin, dslotsLoadLabel);
JS_ASSERT(pic.labels.getprop.dslotsLoadOffset == masm.differenceBetween(pic.fastPathRejoin, dslotsLoadLabel));
pic.labels.getprop.dslotsLoadOffset = masm.differenceBetween(pic.storeBack, dslotsLoadLabel);
JS_ASSERT(pic.labels.getprop.dslotsLoadOffset == masm.differenceBetween(pic.storeBack, dslotsLoadLabel));
pic.labels.getprop.inlineShapeOffset = masm.differenceBetween(pic.shapeGuard, inlineShapeLabel);
JS_ASSERT(pic.labels.getprop.inlineShapeOffset == masm.differenceBetween(pic.shapeGuard, inlineShapeLabel));
pic.labels.getprop.inlineValueOffset = masm.differenceBetween(pic.fastPathRejoin, inlineValueLoadLabel);
JS_ASSERT(pic.labels.getprop.inlineValueOffset == masm.differenceBetween(pic.fastPathRejoin, inlineValueLoadLabel));
pic.labels.getprop.inlineValueOffset = masm.differenceBetween(pic.storeBack, inlineValueLoadLabel);
JS_ASSERT(pic.labels.getprop.inlineValueOffset == masm.differenceBetween(pic.storeBack, inlineValueLoadLabel));
JS_ASSERT(masm.differenceBetween(inlineShapeLabel, dbgInlineShapeJump) == GETPROP_INLINE_SHAPE_JUMP);
#endif
@ -2643,7 +2644,7 @@ mjit::Compiler::jsop_getelem_pic(FrameEntry *obj, FrameEntry *id, RegisterID obj
stubcc.leave();
passPICAddress(pic);
pic.slowPathCall = stubcc.call(ic::GetElem);
pic.callReturn = stubcc.call(ic::GetElem);
/* Load dslots. */
#if defined JS_NUNBOX32
@ -2664,23 +2665,23 @@ mjit::Compiler::jsop_getelem_pic(FrameEntry *obj, FrameEntry *id, RegisterID obj
Label inlineValueOffsetLabel =
masm.loadValueAsComponents(slot, shapeReg, objReg);
#endif
pic.fastPathRejoin = masm.label();
pic.storeBack = masm.label();
pic.objReg = objReg;
pic.idReg = idReg;
RETURN_IF_OOM(false);
#if defined JS_NUNBOX32
JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgDslotsLoad) == GETPROP_DSLOTS_LOAD);
JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgTypeLoad) == GETPROP_TYPE_LOAD);
JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgDataLoad) == GETPROP_DATA_LOAD);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDslotsLoad) == GETELEM_DSLOTS_LOAD);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgTypeLoad) == GETELEM_TYPE_LOAD);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDataLoad) == GETELEM_DATA_LOAD);
JS_ASSERT(masm.differenceBetween(pic.shapeGuard, inlineAtomOffsetLabel) == GETELEM_INLINE_ATOM_OFFSET);
JS_ASSERT(masm.differenceBetween(pic.shapeGuard, dbgInlineAtomJump) == GETELEM_INLINE_ATOM_JUMP);
JS_ASSERT(masm.differenceBetween(pic.shapeGuard, inlineShapeOffsetLabel) == GETELEM_INLINE_SHAPE_OFFSET);
JS_ASSERT(masm.differenceBetween(pic.shapeGuard, dbgInlineShapeJump) == GETELEM_INLINE_SHAPE_JUMP);
#elif defined JS_PUNBOX64
pic.labels.getprop.dslotsLoadOffset = masm.differenceBetween(pic.fastPathRejoin, dslotsLoadLabel);
JS_ASSERT(pic.labels.getprop.dslotsLoadOffset == masm.differenceBetween(pic.fastPathRejoin, dslotsLoadLabel));
pic.labels.getelem.dslotsLoadOffset = masm.differenceBetween(pic.storeBack, dslotsLoadLabel);
JS_ASSERT(pic.labels.getelem.dslotsLoadOffset == masm.differenceBetween(pic.storeBack, dslotsLoadLabel));
pic.labels.getelem.inlineShapeOffset = masm.differenceBetween(pic.shapeGuard, inlineShapeOffsetLabel);
JS_ASSERT(pic.labels.getelem.inlineShapeOffset == masm.differenceBetween(pic.shapeGuard, inlineShapeOffsetLabel));
@ -2688,8 +2689,8 @@ mjit::Compiler::jsop_getelem_pic(FrameEntry *obj, FrameEntry *id, RegisterID obj
pic.labels.getelem.inlineAtomOffset = masm.differenceBetween(pic.shapeGuard, inlineAtomOffsetLabel);
JS_ASSERT(pic.labels.getelem.inlineAtomOffset == masm.differenceBetween(pic.shapeGuard, inlineAtomOffsetLabel));
pic.labels.getelem.inlineValueOffset = masm.differenceBetween(pic.fastPathRejoin, inlineValueOffsetLabel);
JS_ASSERT(pic.labels.getelem.inlineValueOffset == masm.differenceBetween(pic.fastPathRejoin, inlineValueOffsetLabel));
pic.labels.getelem.inlineValueOffset = masm.differenceBetween(pic.storeBack, inlineValueOffsetLabel);
JS_ASSERT(pic.labels.getelem.inlineValueOffset == masm.differenceBetween(pic.storeBack, inlineValueOffsetLabel));
JS_ASSERT(masm.differenceBetween(inlineShapeOffsetLabel, dbgInlineShapeJump) == GETELEM_INLINE_SHAPE_JUMP);
JS_ASSERT(masm.differenceBetween(pic.shapeGuard, dbgInlineAtomJump) ==
@ -2771,7 +2772,7 @@ mjit::Compiler::jsop_callprop_generic(JSAtom *atom)
/* Slow path. */
stubcc.leave();
passPICAddress(pic);
pic.slowPathCall = stubcc.call(ic::CallProp);
pic.callReturn = stubcc.call(ic::CallProp);
/* Adjust the frame. None of this will generate code. */
frame.pop();
@ -2799,26 +2800,26 @@ mjit::Compiler::jsop_callprop_generic(JSAtom *atom)
Label inlineValueLoadLabel =
masm.loadValueAsComponents(slot, shapeReg, objReg);
#endif
pic.fastPathRejoin = masm.label();
pic.storeBack = masm.label();
/* Assert correctness of hardcoded offsets. */
RETURN_IF_OOM(false);
JS_ASSERT(masm.differenceBetween(pic.fastPathStart, dbgInlineTypeGuard) == GETPROP_INLINE_TYPE_GUARD);
#if defined JS_NUNBOX32
JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgDslotsLoad) == GETPROP_DSLOTS_LOAD);
JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgTypeLoad) == GETPROP_TYPE_LOAD);
JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgDataLoad) == GETPROP_DATA_LOAD);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDslotsLoad) == GETPROP_DSLOTS_LOAD);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgTypeLoad) == GETPROP_TYPE_LOAD);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDataLoad) == GETPROP_DATA_LOAD);
JS_ASSERT(masm.differenceBetween(pic.shapeGuard, inlineShapeLabel) == GETPROP_INLINE_SHAPE_OFFSET);
JS_ASSERT(masm.differenceBetween(pic.shapeGuard, dbgInlineShapeJump) == GETPROP_INLINE_SHAPE_JUMP);
#elif defined JS_PUNBOX64
pic.labels.getprop.dslotsLoadOffset = masm.differenceBetween(pic.fastPathRejoin, dslotsLoadLabel);
JS_ASSERT(pic.labels.getprop.dslotsLoadOffset == masm.differenceBetween(pic.fastPathRejoin, dslotsLoadLabel));
pic.labels.getprop.dslotsLoadOffset = masm.differenceBetween(pic.storeBack, dslotsLoadLabel);
JS_ASSERT(pic.labels.getprop.dslotsLoadOffset == masm.differenceBetween(pic.storeBack, dslotsLoadLabel));
pic.labels.getprop.inlineShapeOffset = masm.differenceBetween(pic.shapeGuard, inlineShapeLabel);
JS_ASSERT(pic.labels.getprop.inlineShapeOffset == masm.differenceBetween(pic.shapeGuard, inlineShapeLabel));
pic.labels.getprop.inlineValueOffset = masm.differenceBetween(pic.fastPathRejoin, inlineValueLoadLabel);
JS_ASSERT(pic.labels.getprop.inlineValueOffset == masm.differenceBetween(pic.fastPathRejoin, inlineValueLoadLabel));
pic.labels.getprop.inlineValueOffset = masm.differenceBetween(pic.storeBack, inlineValueLoadLabel);
JS_ASSERT(pic.labels.getprop.inlineValueOffset == masm.differenceBetween(pic.storeBack, inlineValueLoadLabel));
JS_ASSERT(masm.differenceBetween(inlineShapeLabel, dbgInlineShapeJump) == GETPROP_INLINE_SHAPE_JUMP);
#endif
@ -2918,7 +2919,7 @@ mjit::Compiler::jsop_callprop_obj(JSAtom *atom)
stubcc.leave();
passPICAddress(pic);
pic.slowPathCall = stubcc.call(ic::CallProp);
pic.callReturn = stubcc.call(ic::CallProp);
/* Load dslots. */
#if defined JS_NUNBOX32
@ -2942,14 +2943,14 @@ mjit::Compiler::jsop_callprop_obj(JSAtom *atom)
masm.loadValueAsComponents(slot, shapeReg, objReg);
#endif
pic.fastPathRejoin = masm.label();
pic.storeBack = masm.label();
pic.objReg = objReg;
/*
* 1) Dup the |this| object.
* 2) Push the property value onto the stack.
* 3) Move the value below the dup'd |this|, uncopying it. This could
* generate code, thus the fastPathRejoin label being prior. This is safe
* generate code, thus the storeBack label being prior. This is safe
* as a stack transition, because JSOP_CALLPROP has JOF_TMPSLOT. It is
* also safe for correctness, because if we know the LHS is an object, it
* is the resulting vp[1].
@ -2964,20 +2965,20 @@ mjit::Compiler::jsop_callprop_obj(JSAtom *atom)
*/
RETURN_IF_OOM(false);
#if defined JS_NUNBOX32
JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgDslotsLoad) == GETPROP_DSLOTS_LOAD);
JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgTypeLoad) == GETPROP_TYPE_LOAD);
JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgDataLoad) == GETPROP_DATA_LOAD);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDslotsLoad) == GETPROP_DSLOTS_LOAD);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgTypeLoad) == GETPROP_TYPE_LOAD);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDataLoad) == GETPROP_DATA_LOAD);
JS_ASSERT(masm.differenceBetween(pic.shapeGuard, inlineShapeLabel) == GETPROP_INLINE_SHAPE_OFFSET);
JS_ASSERT(masm.differenceBetween(pic.shapeGuard, dbgInlineShapeJump) == GETPROP_INLINE_SHAPE_JUMP);
#elif defined JS_PUNBOX64
pic.labels.getprop.dslotsLoadOffset = masm.differenceBetween(pic.fastPathRejoin, dslotsLoadLabel);
JS_ASSERT(pic.labels.getprop.dslotsLoadOffset == masm.differenceBetween(pic.fastPathRejoin, dslotsLoadLabel));
pic.labels.getprop.dslotsLoadOffset = masm.differenceBetween(pic.storeBack, dslotsLoadLabel);
JS_ASSERT(pic.labels.getprop.dslotsLoadOffset == masm.differenceBetween(pic.storeBack, dslotsLoadLabel));
pic.labels.getprop.inlineShapeOffset = masm.differenceBetween(pic.shapeGuard, inlineShapeLabel);
JS_ASSERT(pic.labels.getprop.inlineShapeOffset == masm.differenceBetween(pic.shapeGuard, inlineShapeLabel));
pic.labels.getprop.inlineValueOffset = masm.differenceBetween(pic.fastPathRejoin, inlineValueLoadLabel);
JS_ASSERT(pic.labels.getprop.inlineValueOffset == masm.differenceBetween(pic.fastPathRejoin, inlineValueLoadLabel));
pic.labels.getprop.inlineValueOffset = masm.differenceBetween(pic.storeBack, inlineValueLoadLabel);
JS_ASSERT(pic.labels.getprop.inlineValueOffset == masm.differenceBetween(pic.storeBack, inlineValueLoadLabel));
JS_ASSERT(masm.differenceBetween(inlineShapeLabel, dbgInlineShapeJump) == GETPROP_INLINE_SHAPE_JUMP);
#endif
@ -3086,7 +3087,7 @@ mjit::Compiler::jsop_setprop(JSAtom *atom)
stubcc.leave();
passPICAddress(pic);
pic.slowPathCall = stubcc.call(ic::SetProp);
pic.callReturn = stubcc.call(ic::SetProp);
}
/* Load dslots. */
@ -3105,7 +3106,7 @@ mjit::Compiler::jsop_setprop(JSAtom *atom)
masm.storeValue(vr, slot);
#endif
DBGLABEL(dbgAfterValueStore);
pic.fastPathRejoin = masm.label();
pic.storeBack = masm.label();
frame.freeReg(objReg);
frame.freeReg(shapeReg);
@ -3122,26 +3123,26 @@ mjit::Compiler::jsop_setprop(JSAtom *atom)
RETURN_IF_OOM(false);
#if defined JS_PUNBOX64
pic.labels.setprop.dslotsLoadOffset = masm.differenceBetween(pic.fastPathRejoin, dslotsLoadLabel);
pic.labels.setprop.dslotsLoadOffset = masm.differenceBetween(pic.storeBack, dslotsLoadLabel);
pic.labels.setprop.inlineShapeOffset = masm.differenceBetween(pic.shapeGuard, inlineShapeOffsetLabel);
JS_ASSERT(masm.differenceBetween(inlineShapeOffsetLabel, dbgInlineShapeJump) == SETPROP_INLINE_SHAPE_JUMP);
JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgAfterValueStore) == SETPROP_INLINE_STORE_VALUE);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgAfterValueStore) == SETPROP_INLINE_STORE_VALUE);
#elif defined JS_NUNBOX32
JS_ASSERT(masm.differenceBetween(pic.shapeGuard, inlineShapeOffsetLabel) == SETPROP_INLINE_SHAPE_OFFSET);
JS_ASSERT(masm.differenceBetween(pic.shapeGuard, dbgInlineShapeJump) == SETPROP_INLINE_SHAPE_JUMP);
if (vr.isConstant()) {
if (vr.isConstant) {
/* Constants are offset inside the opcode by 4. */
JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgInlineStoreType)-4 == SETPROP_INLINE_STORE_CONST_TYPE);
JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgAfterValueStore)-4 == SETPROP_INLINE_STORE_CONST_DATA);
JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgDslots) == SETPROP_DSLOTS_BEFORE_CONSTANT);
} else if (vr.isTypeKnown()) {
JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgInlineStoreType)-4 == SETPROP_INLINE_STORE_KTYPE_TYPE);
JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgAfterValueStore) == SETPROP_INLINE_STORE_KTYPE_DATA);
JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgDslots) == SETPROP_DSLOTS_BEFORE_KTYPE);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgInlineStoreType)-4 == SETPROP_INLINE_STORE_CONST_TYPE);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgAfterValueStore)-4 == SETPROP_INLINE_STORE_CONST_DATA);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDslots) == SETPROP_DSLOTS_BEFORE_CONSTANT);
} else if (vr.u.s.isTypeKnown) {
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgInlineStoreType)-4 == SETPROP_INLINE_STORE_KTYPE_TYPE);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgAfterValueStore) == SETPROP_INLINE_STORE_KTYPE_DATA);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDslots) == SETPROP_DSLOTS_BEFORE_KTYPE);
} else {
JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgInlineStoreType) == SETPROP_INLINE_STORE_DYN_TYPE);
JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgAfterValueStore) == SETPROP_INLINE_STORE_DYN_DATA);
JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgDslots) == SETPROP_DSLOTS_BEFORE_DYNAMIC);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgInlineStoreType) == SETPROP_INLINE_STORE_DYN_TYPE);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgAfterValueStore) == SETPROP_INLINE_STORE_DYN_DATA);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDslots) == SETPROP_DSLOTS_BEFORE_DYNAMIC);
}
#endif
@ -3168,10 +3169,10 @@ mjit::Compiler::jsop_name(JSAtom *atom)
pic.slowPathStart = stubcc.linkExit(j, Uses(0));
stubcc.leave();
passPICAddress(pic);
pic.slowPathCall = stubcc.call(ic::Name);
pic.callReturn = stubcc.call(ic::Name);
}
pic.fastPathRejoin = masm.label();
pic.storeBack = masm.label();
frame.pushRegs(pic.shapeReg, pic.objReg);
JS_ASSERT(masm.differenceBetween(pic.fastPathStart, dbgJumpOffset) == SCOPENAME_JUMP_OFFSET);
@ -3210,10 +3211,10 @@ mjit::Compiler::jsop_xname(JSAtom *atom)
pic.slowPathStart = stubcc.linkExit(j, Uses(1));
stubcc.leave();
passPICAddress(pic);
pic.slowPathCall = stubcc.call(ic::XName);
pic.callReturn = stubcc.call(ic::XName);
}
pic.fastPathRejoin = masm.label();
pic.storeBack = masm.label();
frame.pop();
frame.pushRegs(pic.shapeReg, pic.objReg);
@ -3253,10 +3254,10 @@ mjit::Compiler::jsop_bindname(uint32 index)
pic.slowPathStart = stubcc.linkExit(j, Uses(0));
stubcc.leave();
passPICAddress(pic);
pic.slowPathCall = stubcc.call(ic::BindName);
pic.callReturn = stubcc.call(ic::BindName);
}
pic.fastPathRejoin = masm.label();
pic.storeBack = masm.label();
frame.pushTypedPayload(JSVAL_TYPE_OBJECT, pic.objReg);
frame.freeReg(pic.shapeReg);
@ -3749,7 +3750,8 @@ mjit::Compiler::iterNext()
frame.unpinReg(reg);
/* Test clasp */
Jump notFast = masm.testObjClass(Assembler::NotEqual, reg, &js_IteratorClass);
masm.loadPtr(Address(reg, offsetof(JSObject, clasp)), T1);
Jump notFast = masm.branchPtr(Assembler::NotEqual, T1, ImmPtr(&js_IteratorClass));
stubcc.linkExit(notFast, Uses(1));
/* Get private from iter obj. */
@ -3803,7 +3805,8 @@ mjit::Compiler::iterMore()
frame.unpinReg(reg);
/* Test clasp */
Jump notFast = masm.testObjClass(Assembler::NotEqual, reg, &js_IteratorClass);
masm.loadPtr(Address(reg, offsetof(JSObject, clasp)), T1);
Jump notFast = masm.branchPtr(Assembler::NotEqual, T1, ImmPtr(&js_IteratorClass));
stubcc.linkExitForBranch(notFast);
/* Get private from iter obj. */
@ -3848,7 +3851,8 @@ mjit::Compiler::iterEnd()
frame.unpinReg(reg);
/* Test clasp */
Jump notIterator = masm.testObjClass(Assembler::NotEqual, reg, &js_IteratorClass);
masm.loadPtr(Address(reg, offsetof(JSObject, clasp)), T1);
Jump notIterator = masm.branchPtr(Assembler::NotEqual, T1, ImmPtr(&js_IteratorClass));
stubcc.linkExit(notIterator, Uses(1));
/* Get private from iter obj. :FIXME: X64 */

View File

@ -138,19 +138,15 @@ class Compiler : public BaseCompiler
};
#if defined JS_POLYIC
struct BaseICInfo {
Label fastPathStart;
Label fastPathRejoin;
Label slowPathStart;
Call slowPathCall;
DataLabelPtr paramAddr;
};
struct PICGenInfo : public BaseICInfo {
struct PICGenInfo {
PICGenInfo(ic::PICInfo::Kind kind) : kind(kind)
{ }
ic::PICInfo::Kind kind;
Label fastPathStart;
Label storeBack;
Label typeCheck;
Label slowPathStart;
DataLabelPtr addrLabel;
RegisterID shapeReg;
RegisterID objReg;
RegisterID idReg;
@ -159,24 +155,25 @@ class Compiler : public BaseCompiler
JSAtom *atom;
StateRemat objRemat;
StateRemat idRemat;
Call callReturn;
bool hasTypeCheck;
ValueRemat vr;
# if defined JS_CPU_X64
ic::PICLabels labels;
# endif
void copySimpleMembersTo(ic::PICInfo &ic) const {
ic.kind = kind;
ic.shapeReg = shapeReg;
ic.objReg = objReg;
ic.atom = atom;
if (ic.isSet()) {
ic.u.vr = vr;
} else if (ic.isGet()) {
ic.u.get.idReg = idReg;
ic.u.get.typeReg = typeReg;
ic.u.get.hasTypeCheck = hasTypeCheck;
ic.setObjRemat(objRemat);
void copySimpleMembersTo(ic::PICInfo &pi) const {
pi.kind = kind;
pi.shapeReg = shapeReg;
pi.objReg = objReg;
pi.atom = atom;
if (kind == ic::PICInfo::SET) {
pi.u.vr = vr;
} else if (kind != ic::PICInfo::NAME) {
pi.u.get.idReg = idReg;
pi.u.get.typeReg = typeReg;
pi.u.get.hasTypeCheck = hasTypeCheck;
pi.u.get.objRemat = objRemat.offset;
}
}

View File

@ -1201,7 +1201,9 @@ mjit::Compiler::jsop_setelem()
/* obj.isDenseArray() */
RegisterID objReg = frame.copyDataIntoReg(obj);
Jump guardDense = masm.testObjClass(Assembler::NotEqual, objReg, &js_ArrayClass);
Jump guardDense = masm.branchPtr(Assembler::NotEqual,
Address(objReg, offsetof(JSObject, clasp)),
ImmPtr(&js_ArrayClass));
stubcc.linkExit(guardDense, Uses(3));
/* guard within capacity */
@ -1346,18 +1348,54 @@ mjit::Compiler::jsop_getelem_dense(FrameEntry *obj, FrameEntry *id, RegisterID o
MaybeRegisterID &idReg, RegisterID tmpReg)
{
/* Note: idReg is only valid if id is not a constant. */
Jump guardDense = masm.testObjClass(Assembler::NotEqual, objReg, &js_ArrayClass);
Jump guardDense = masm.branchPtr(Assembler::NotEqual,
Address(objReg, offsetof(JSObject, clasp)),
ImmPtr(&js_ArrayClass));
stubcc.linkExit(guardDense, Uses(2));
Int32Key key = idReg.isSet()
? Int32Key::FromRegister(idReg.reg())
: Int32Key::FromConstant(id->getValue().toInt32());
/* Guard within capacity. */
Jump inRange;
Address capacity(objReg, offsetof(JSObject, capacity));
if (id->isConstant()) {
inRange = masm.branch32(Assembler::LessThanOrEqual, capacity,
Imm32(id->getValue().toInt32()));
} else {
inRange = masm.branch32(Assembler::AboveOrEqual, idReg.reg(), capacity);
}
stubcc.linkExit(inRange, Uses(2));
Assembler::FastArrayLoadFails fails =
masm.fastArrayLoad(objReg, key, tmpReg, objReg);
/* load dslots */
masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg);
stubcc.linkExit(fails.rangeCheck, Uses(2));
stubcc.linkExit(fails.holeCheck, Uses(2));
/* guard within capacity */
if (id->isConstant()) {
/* guard not a hole */
Address slot(objReg, id->getValue().toInt32() * sizeof(Value));
#if defined JS_NUNBOX32
masm.loadTypeTag(slot, tmpReg);
Jump notHole = masm.branchPtr(Assembler::Equal, tmpReg, ImmType(JSVAL_TYPE_MAGIC));
masm.loadPayload(slot, objReg);
#elif defined JS_PUNBOX64
masm.loadValueAsComponents(slot, tmpReg, objReg);
Jump notHole = masm.branchPtr(Assembler::Equal, tmpReg, ImmType(JSVAL_TYPE_MAGIC));
#endif
stubcc.linkExit(notHole, Uses(2));
} else {
/* guard not a hole */
BaseIndex slot(objReg, idReg.reg(), Assembler::JSVAL_SCALE);
#if defined JS_NUNBOX32
masm.loadTypeTag(slot, tmpReg);
Jump notHole = masm.branchPtr(Assembler::Equal, tmpReg, ImmType(JSVAL_TYPE_MAGIC));
masm.loadPayload(slot, objReg);
#elif defined JS_PUNBOX64
masm.loadValueAsComponents(slot, tmpReg, objReg);
Jump notHole = masm.branchPtr(Assembler::Equal, tmpReg, ImmType(JSVAL_TYPE_MAGIC));
#endif
stubcc.linkExit(notHole, Uses(2));
}
/* Postcondition: type must be in tmpReg, data must be in objReg. */
/* Note: linkExits will be hooked up to a leave() after this method completes. */
}
bool

View File

@ -925,12 +925,16 @@ FrameState::dataRematInfo(const FrameEntry *fe) const
{
if (fe->isCopy())
fe = fe->copyOf();
if (fe->data.inRegister())
return StateRemat::FromRegister(fe->data.reg());
JS_ASSERT(fe->data.synced());
return StateRemat::FromAddress(addressOf(fe));
StateRemat remat;
if (fe->data.inRegister()) {
remat.reg = fe->data.reg();
remat.inReg = true;
} else {
JS_ASSERT(fe->data.synced());
remat.offset = addressOf(fe).offset;
remat.inReg = false;
}
return remat;
}
inline void

View File

@ -1305,39 +1305,32 @@ FrameState::shift(int32 n)
void
FrameState::pinEntry(FrameEntry *fe, ValueRemat &vr)
{
if (fe->isConstant()) {
vr = ValueRemat::FromConstant(fe->getValue());
} else {
// Pin the type register so it can't spill.
MaybeRegisterID maybePinnedType = maybePinType(fe);
// Get and pin the data register.
RegisterID dataReg = tempRegForData(fe);
pinReg(dataReg);
if (fe->isTypeKnown()) {
vr = ValueRemat::FromKnownType(fe->getKnownType(), dataReg);
} else {
// The type might not be loaded yet, so unpin for simplicity.
maybeUnpinReg(maybePinnedType);
vr = ValueRemat::FromRegisters(tempRegForType(fe), dataReg);
pinReg(vr.typeReg());
}
}
// Set these bits last, since allocation could have caused a sync.
vr.isDataSynced = fe->data.synced();
vr.isTypeSynced = fe->type.synced();
if (fe->isConstant()) {
vr.isConstant = true;
vr.u.v = Jsvalify(fe->getValue());
} else {
vr.isConstant = false;
vr.u.s.isTypeKnown = fe->isTypeKnown();
if (vr.u.s.isTypeKnown) {
vr.u.s.type.knownType = fe->getKnownType();
} else {
vr.u.s.type.reg = tempRegForType(fe);
pinReg(vr.u.s.type.reg);
}
vr.u.s.data = tempRegForData(fe);
pinReg(vr.u.s.data);
}
}
void
FrameState::unpinEntry(const ValueRemat &vr)
{
if (!vr.isConstant()) {
if (!vr.isTypeKnown())
unpinReg(vr.typeReg());
unpinReg(vr.dataReg());
if (!vr.isConstant) {
if (!vr.u.s.isTypeKnown)
unpinReg(vr.u.s.type.reg);
unpinReg(vr.u.s.data);
}
}
@ -1348,17 +1341,17 @@ FrameState::ensureValueSynced(Assembler &masm, FrameEntry *fe, const ValueRemat
if (!vr.isDataSynced || !vr.isTypeSynced)
masm.storeValue(vr, addressOf(fe));
#elif defined JS_NUNBOX32
if (vr.isConstant()) {
if (vr.isConstant) {
if (!vr.isDataSynced || !vr.isTypeSynced)
masm.storeValue(vr.value(), addressOf(fe));
masm.storeValue(Valueify(vr.u.v), addressOf(fe));
} else {
if (!vr.isDataSynced)
masm.storePayload(vr.dataReg(), addressOf(fe));
masm.storePayload(vr.u.s.data, addressOf(fe));
if (!vr.isTypeSynced) {
if (vr.isTypeKnown())
masm.storeTypeTag(ImmType(vr.knownType()), addressOf(fe));
if (vr.u.s.isTypeKnown)
masm.storeTypeTag(ImmType(vr.u.s.type.knownType), addressOf(fe));
else
masm.storeTypeTag(vr.typeReg(), addressOf(fe));
masm.storeTypeTag(vr.u.s.type.reg, addressOf(fe));
}
}
#endif
@ -1586,30 +1579,3 @@ FrameState::allocForBinary(FrameEntry *lhs, FrameEntry *rhs, JSOp op, BinaryAllo
unpinReg(backingRight->data.reg());
}
MaybeRegisterID
FrameState::maybePinData(FrameEntry *fe)
{
if (fe->data.inRegister()) {
pinReg(fe->data.reg());
return fe->data.reg();
}
return MaybeRegisterID();
}
MaybeRegisterID
FrameState::maybePinType(FrameEntry *fe)
{
if (fe->type.inRegister()) {
pinReg(fe->type.reg());
return fe->type.reg();
}
return MaybeRegisterID();
}
void
FrameState::maybeUnpinReg(MaybeRegisterID reg)
{
if (reg.isSet())
unpinReg(reg.reg());
}

View File

@ -50,6 +50,15 @@
namespace js {
namespace mjit {
struct StateRemat {
typedef JSC::MacroAssembler::RegisterID RegisterID;
union {
RegisterID reg : 31;
uint32 offset : 31;
};
bool inReg : 1;
};
struct Uses {
explicit Uses(uint32 nuses)
: nuses(nuses)
@ -64,6 +73,38 @@ struct Changes {
uint32 nchanges;
};
class MaybeRegisterID {
typedef JSC::MacroAssembler::RegisterID RegisterID;
public:
MaybeRegisterID()
: reg_(Registers::ReturnReg), set(false)
{ }
MaybeRegisterID(RegisterID reg)
: reg_(reg), set(true)
{ }
inline RegisterID reg() const { JS_ASSERT(set); return reg_; }
inline void setReg(const RegisterID r) { reg_ = r; set = true; }
inline bool isSet() const { return set; }
MaybeRegisterID & operator =(const MaybeRegisterID &other) {
set = other.set;
reg_ = other.reg_;
return *this;
}
MaybeRegisterID & operator =(RegisterID r) {
setReg(r);
return *this;
}
private:
RegisterID reg_;
bool set;
};
/*
* The FrameState keeps track of values on the frame during compilation.
* The compiler can query FrameState for information about arguments, locals,
@ -677,11 +718,6 @@ class FrameState
*/
inline void unpinKilledReg(RegisterID reg);
/* Pins a data or type register if one exists. */
MaybeRegisterID maybePinData(FrameEntry *fe);
MaybeRegisterID maybePinType(FrameEntry *fe);
void maybeUnpinReg(MaybeRegisterID reg);
/*
* Dups the top item on the stack.
*/

View File

@ -51,20 +51,13 @@ struct Registers {
typedef JSC::MacroAssembler::RegisterID RegisterID;
// Homed and scratch registers for working with Values on x64.
// TODO: Eliminate scratch register (requires rewriting register allocation mechanism)
#if defined(JS_CPU_X64)
static const RegisterID TypeMaskReg = JSC::X86Registers::r13;
static const RegisterID PayloadMaskReg = JSC::X86Registers::r14;
static const RegisterID ValueReg = JSC::X86Registers::r15;
#endif
// Register that homes the current JSStackFrame.
#if defined(JS_CPU_X86) || defined(JS_CPU_X64)
static const RegisterID JSFrameReg = JSC::X86Registers::ebx;
#elif defined(JS_CPU_ARM)
static const RegisterID JSFrameReg = JSC::ARMRegisters::r11;
#endif
#if defined(JS_CPU_X86) || defined(JS_CPU_X64)
static const RegisterID ReturnReg = JSC::X86Registers::eax;
# if defined(JS_CPU_X86) || defined(_MSC_VER)
@ -363,8 +356,6 @@ struct FPRegisters {
uint32 freeFPMask;
};
static const JSC::MacroAssembler::RegisterID JSFrameReg = Registers::JSFrameReg;
} /* namespace mjit */
} /* namespace js */

View File

@ -385,7 +385,9 @@ class CallCompiler : public BaseCompiler
RegisterID t0 = tempRegs.takeAnyReg();
/* Guard that it's actually a function object. */
Jump claspGuard = masm.testObjClass(Assembler::NotEqual, ic.funObjReg, &js_FunctionClass);
Jump claspGuard = masm.branchPtr(Assembler::NotEqual,
Address(ic.funObjReg, offsetof(JSObject, clasp)),
ImmPtr(&js_FunctionClass));
/* Guard that it's the same function. */
JSFunction *fun = obj->getFunctionPrivate();

View File

@ -41,7 +41,7 @@
#if !defined jsjaeger_assembler_h__ && defined JS_METHODJIT && defined JS_NUNBOX32
#define jsjaeger_assembler_h__
#include "assembler/assembler/MacroAssembler.h"
#include "methodjit/BaseAssembler.h"
#include "methodjit/RematInfo.h"
namespace js {
@ -69,7 +69,7 @@ struct ImmPayload : JSC::MacroAssembler::Imm32
{ }
};
class NunboxAssembler : public JSC::MacroAssembler
class Assembler : public BaseAssembler
{
static const uint32 PAYLOAD_OFFSET = 0;
static const uint32 TAG_OFFSET = 4;
@ -139,14 +139,6 @@ class NunboxAssembler : public JSC::MacroAssembler
store32(imm, payloadOf(address));
}
bool addressUsesRegister(BaseIndex address, RegisterID reg) {
return (address.base == reg) || (address.index == reg);
}
bool addressUsesRegister(Address address, RegisterID reg) {
return address.base == reg;
}
/* Loads type first, then payload, returning label after type load. */
template <typename T>
Label loadValueAsComponents(T address, RegisterID type, RegisterID payload) {
@ -193,15 +185,15 @@ class NunboxAssembler : public JSC::MacroAssembler
template <typename T>
Label storeValue(const ValueRemat &vr, T address) {
if (vr.isConstant()) {
return storeValue(vr.value(), address);
if (vr.isConstant) {
return storeValue(Valueify(vr.u.v), address);
} else {
if (vr.isTypeKnown())
storeTypeTag(ImmType(vr.knownType()), address);
if (vr.u.s.isTypeKnown)
storeTypeTag(ImmType(vr.u.s.type.knownType), address);
else
storeTypeTag(vr.typeReg(), address);
storeTypeTag(vr.u.s.type.reg, address);
Label l = label();
storePayload(vr.dataReg(), address);
storePayload(vr.u.s.data, address);
return l;
}
}
@ -215,103 +207,93 @@ class NunboxAssembler : public JSC::MacroAssembler
loadPtr(priv, to);
}
Jump testNull(Condition cond, RegisterID reg) {
Jump testNull(Assembler::Condition cond, RegisterID reg) {
return branch32(cond, reg, ImmTag(JSVAL_TAG_NULL));
}
Jump testNull(Condition cond, Address address) {
Jump testNull(Assembler::Condition cond, Address address) {
return branch32(cond, tagOf(address), ImmTag(JSVAL_TAG_NULL));
}
Jump testUndefined(Condition cond, RegisterID reg) {
Jump testUndefined(Assembler::Condition cond, RegisterID reg) {
return branch32(cond, reg, ImmTag(JSVAL_TAG_UNDEFINED));
}
Jump testUndefined(Condition cond, Address address) {
Jump testUndefined(Assembler::Condition cond, Address address) {
return branch32(cond, tagOf(address), ImmTag(JSVAL_TAG_UNDEFINED));
}
Jump testInt32(Condition cond, RegisterID reg) {
Jump testInt32(Assembler::Condition cond, RegisterID reg) {
return branch32(cond, reg, ImmTag(JSVAL_TAG_INT32));
}
Jump testInt32(Condition cond, Address address) {
Jump testInt32(Assembler::Condition cond, Address address) {
return branch32(cond, tagOf(address), ImmTag(JSVAL_TAG_INT32));
}
Jump testNumber(Condition cond, RegisterID reg) {
cond = (cond == Equal) ? BelowOrEqual : Above;
Jump testNumber(Assembler::Condition cond, RegisterID reg) {
cond = (cond == Assembler::Equal) ? Assembler::BelowOrEqual : Assembler::Above;
return branch32(cond, reg, ImmTag(JSVAL_TAG_INT32));
}
Jump testNumber(Condition cond, Address address) {
cond = (cond == Equal) ? BelowOrEqual : Above;
Jump testNumber(Assembler::Condition cond, Address address) {
cond = (cond == Assembler::Equal) ? Assembler::BelowOrEqual : Assembler::Above;
return branch32(cond, tagOf(address), ImmTag(JSVAL_TAG_INT32));
}
Jump testPrimitive(Condition cond, RegisterID reg) {
cond = (cond == NotEqual) ? AboveOrEqual : Below;
Jump testPrimitive(Assembler::Condition cond, RegisterID reg) {
cond = (cond == Assembler::NotEqual) ? Assembler::AboveOrEqual : Assembler::Below;
return branch32(cond, reg, ImmTag(JSVAL_TAG_OBJECT));
}
Jump testPrimitive(Condition cond, Address address) {
cond = (cond == NotEqual) ? AboveOrEqual : Below;
Jump testPrimitive(Assembler::Condition cond, Address address) {
cond = (cond == Assembler::NotEqual) ? Assembler::AboveOrEqual : Assembler::Below;
return branch32(cond, tagOf(address), ImmTag(JSVAL_TAG_OBJECT));
}
Jump testObject(Condition cond, RegisterID reg) {
Jump testObject(Assembler::Condition cond, RegisterID reg) {
return branch32(cond, reg, ImmTag(JSVAL_TAG_OBJECT));
}
Jump testObject(Condition cond, Address address) {
Jump testObject(Assembler::Condition cond, Address address) {
return branch32(cond, tagOf(address), ImmTag(JSVAL_TAG_OBJECT));
}
Jump testDouble(Condition cond, RegisterID reg) {
Condition opcond;
if (cond == Equal)
opcond = Below;
Jump testDouble(Assembler::Condition cond, RegisterID reg) {
Assembler::Condition opcond;
if (cond == Assembler::Equal)
opcond = Assembler::Below;
else
opcond = AboveOrEqual;
opcond = Assembler::AboveOrEqual;
return branch32(opcond, reg, ImmTag(JSVAL_TAG_CLEAR));
}
Jump testDouble(Condition cond, Address address) {
Condition opcond;
if (cond == Equal)
opcond = Below;
Jump testDouble(Assembler::Condition cond, Address address) {
Assembler::Condition opcond;
if (cond == Assembler::Equal)
opcond = Assembler::Below;
else
opcond = AboveOrEqual;
opcond = Assembler::AboveOrEqual;
return branch32(opcond, tagOf(address), ImmTag(JSVAL_TAG_CLEAR));
}
Jump testBoolean(Condition cond, RegisterID reg) {
Jump testBoolean(Assembler::Condition cond, RegisterID reg) {
return branch32(cond, reg, ImmTag(JSVAL_TAG_BOOLEAN));
}
Jump testBoolean(Condition cond, Address address) {
Jump testBoolean(Assembler::Condition cond, Address address) {
return branch32(cond, tagOf(address), ImmTag(JSVAL_TAG_BOOLEAN));
}
Jump testString(Condition cond, RegisterID reg) {
Jump testString(Assembler::Condition cond, RegisterID reg) {
return branch32(cond, reg, ImmTag(JSVAL_TAG_STRING));
}
Jump testString(Condition cond, Address address) {
Jump testString(Assembler::Condition cond, Address address) {
return branch32(cond, tagOf(address), ImmTag(JSVAL_TAG_STRING));
}
template <typename T>
Jump fastArrayLoadSlot(T address, RegisterID typeReg, RegisterID dataReg) {
loadTypeTag(address, typeReg);
Jump notHole = branch32(Equal, typeReg, ImmType(JSVAL_TYPE_MAGIC));
loadPayload(address, dataReg);
return notHole;
}
};
typedef NunboxAssembler ValueAssembler;
} /* namespace mjit */
} /* namespace js */

View File

@ -121,7 +121,9 @@ class PICStubCompiler : public BaseCompiler
JITCode jitCode(pic.slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
CodeBlock codeBlock(jitCode);
RepatchBuffer repatcher(&codeBlock);
repatcher.relink(pic.slowPathCall, FunctionPtr(stub));
ReturnAddressPtr retPtr(pic.slowPathStart.callAtOffset(pic.callReturn).executableAddress());
MacroAssemblerCodePtr target(stub);
repatcher.relinkCallerToTrampoline(retPtr, target);
return true;
}
@ -161,9 +163,9 @@ class SetPropCompiler : public PICStubCompiler
static int32 dslotsLoadOffset(ic::PICInfo &pic) {
#if defined JS_NUNBOX32
if (pic.u.vr.isConstant())
if (pic.u.vr.isConstant)
return SETPROP_DSLOTS_BEFORE_CONSTANT;
if (pic.u.vr.isTypeKnown())
if (pic.u.vr.u.s.isTypeKnown)
return SETPROP_DSLOTS_BEFORE_KTYPE;
return SETPROP_DSLOTS_BEFORE_DYNAMIC;
#elif defined JS_PUNBOX64
@ -173,9 +175,9 @@ class SetPropCompiler : public PICStubCompiler
#if defined JS_NUNBOX32
inline int32 inlineTypeOffset() {
if (pic.u.vr.isConstant())
if (pic.u.vr.isConstant)
return SETPROP_INLINE_STORE_CONST_TYPE;
if (pic.u.vr.isTypeKnown())
if (pic.u.vr.u.s.isTypeKnown)
return SETPROP_INLINE_STORE_KTYPE_TYPE;
return SETPROP_INLINE_STORE_DYN_TYPE;
}
@ -183,9 +185,9 @@ class SetPropCompiler : public PICStubCompiler
#if defined JS_NUNBOX32
inline int32 inlineDataOffset() {
if (pic.u.vr.isConstant())
if (pic.u.vr.isConstant)
return SETPROP_INLINE_STORE_CONST_DATA;
if (pic.u.vr.isTypeKnown())
if (pic.u.vr.u.s.isTypeKnown)
return SETPROP_INLINE_STORE_KTYPE_DATA;
return SETPROP_INLINE_STORE_DYN_DATA;
}
@ -234,7 +236,7 @@ class SetPropCompiler : public PICStubCompiler
static void reset(ic::PICInfo &pic)
{
RepatchBuffer repatcher(pic.fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
repatcher.repatchLEAToLoadPtr(pic.fastPathRejoin.instructionAtOffset(dslotsLoadOffset(pic)));
repatcher.repatchLEAToLoadPtr(pic.storeBack.instructionAtOffset(dslotsLoadOffset(pic)));
repatcher.repatch(pic.fastPathStart.dataLabel32AtOffset(
pic.shapeGuard + inlineShapeOffset(pic)),
int32(JSObjectMap::INVALID_SHAPE));
@ -243,8 +245,9 @@ class SetPropCompiler : public PICStubCompiler
pic.slowPathStart);
RepatchBuffer repatcher2(pic.slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, ic::SetProp));
repatcher.relink(pic.slowPathCall, target);
ReturnAddressPtr retPtr(pic.slowPathStart.callAtOffset(pic.callReturn).executableAddress());
MacroAssemblerCodePtr target(JS_FUNC_TO_DATA_PTR(void *, ic::SetProp));
repatcher.relinkCallerToTrampoline(retPtr, target);
}
bool patchInline(const Shape *shape, bool inlineSlot)
@ -257,7 +260,7 @@ class SetPropCompiler : public PICStubCompiler
int32 offset;
if (inlineSlot) {
JSC::CodeLocationInstruction istr;
istr = pic.fastPathRejoin.instructionAtOffset(dslotsLoadOffset());
istr = pic.storeBack.instructionAtOffset(dslotsLoadOffset());
repatcher.repatchLoadPtrToLEA(istr);
//
@ -278,10 +281,10 @@ class SetPropCompiler : public PICStubCompiler
uint32 shapeOffs = pic.shapeGuard + inlineShapeOffset();
repatcher.repatch(pic.fastPathStart.dataLabel32AtOffset(shapeOffs), obj->shape());
#if defined JS_NUNBOX32
repatcher.repatch(pic.fastPathRejoin.dataLabel32AtOffset(inlineTypeOffset()), offset + 4);
repatcher.repatch(pic.fastPathRejoin.dataLabel32AtOffset(inlineDataOffset()), offset);
repatcher.repatch(pic.storeBack.dataLabel32AtOffset(inlineTypeOffset()), offset + 4);
repatcher.repatch(pic.storeBack.dataLabel32AtOffset(inlineDataOffset()), offset);
#elif defined JS_PUNBOX64
repatcher.repatch(pic.fastPathRejoin.dataLabel32AtOffset(SETPROP_INLINE_STORE_VALUE), offset);
repatcher.repatch(pic.storeBack.dataLabel32AtOffset(SETPROP_INLINE_STORE_VALUE), offset);
#endif
pic.inlinePathPatched = true;
@ -371,11 +374,11 @@ class SetPropCompiler : public PICStubCompiler
*/
JS_ASSERT(shape->isMethod());
JSObject *funobj = &shape->methodObject();
if (pic.u.vr.isConstant()) {
JS_ASSERT(funobj == &pic.u.vr.value().toObject());
if (pic.u.vr.isConstant) {
JS_ASSERT(funobj == &Valueify(pic.u.vr.u.v).toObject());
} else {
Jump mismatchedFunction =
masm.branchPtr(Assembler::NotEqual, pic.u.vr.dataReg(), ImmPtr(funobj));
masm.branchPtr(Assembler::NotEqual, pic.u.vr.u.s.data, ImmPtr(funobj));
if (!slowExits.append(mismatchedFunction))
return false;
}
@ -493,9 +496,9 @@ class SetPropCompiler : public PICStubCompiler
buffer.link(slowExit.get(), pic.slowPathStart);
for (Jump *pj = slowExits.begin(); pj != slowExits.end(); ++pj)
buffer.link(*pj, pic.slowPathStart);
buffer.link(done, pic.fastPathRejoin);
buffer.link(done, pic.storeBack);
if (skipOver.isSet())
buffer.link(skipOver.get(), pic.fastPathRejoin);
buffer.link(skipOver.get(), pic.storeBack);
CodeLocationLabel cs = buffer.finalizeCodeAddendum();
JaegerSpew(JSpew_PICs, "generate setprop stub %p %d %d at %p\n",
(void*)&pic,
@ -746,7 +749,7 @@ class GetPropCompiler : public PICStubCompiler
static void reset(ic::PICInfo &pic)
{
RepatchBuffer repatcher(pic.fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
repatcher.repatchLEAToLoadPtr(pic.fastPathRejoin.instructionAtOffset(dslotsLoad(pic)));
repatcher.repatchLEAToLoadPtr(pic.storeBack.instructionAtOffset(dslotsLoad(pic)));
repatcher.repatch(pic.fastPathStart.dataLabel32AtOffset(
pic.shapeGuard + inlineShapeOffset(pic)),
int32(JSObjectMap::INVALID_SHAPE));
@ -759,6 +762,7 @@ class GetPropCompiler : public PICStubCompiler
}
RepatchBuffer repatcher2(pic.slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
ReturnAddressPtr retPtr(pic.slowPathStart.callAtOffset(pic.callReturn).executableAddress());
VoidStubPIC stub;
switch (pic.kind) {
@ -773,15 +777,16 @@ class GetPropCompiler : public PICStubCompiler
return;
}
FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, stub));
repatcher.relink(pic.slowPathCall, target);
MacroAssemblerCodePtr target(JS_FUNC_TO_DATA_PTR(void *, stub));
repatcher.relinkCallerToTrampoline(retPtr, target);
}
bool generateArgsLengthStub()
{
Assembler masm;
Jump notArgs = masm.testObjClass(Assembler::NotEqual, pic.objReg, obj->getClass());
Address clasp(pic.objReg, offsetof(JSObject, clasp));
Jump notArgs = masm.branchPtr(Assembler::NotEqual, clasp, ImmPtr(obj->getClass()));
masm.loadPtr(Address(pic.objReg, offsetof(JSObject, slots)), pic.objReg);
masm.load32(Address(pic.objReg, JSObject::JSSLOT_ARGS_LENGTH * sizeof(Value)),
@ -799,7 +804,7 @@ class GetPropCompiler : public PICStubCompiler
buffer.link(notArgs, pic.slowPathStart);
buffer.link(overridden, pic.slowPathStart);
buffer.link(done, pic.fastPathRejoin);
buffer.link(done, pic.storeBack);
CodeLocationLabel start = buffer.finalizeCodeAddendum();
JaegerSpew(JSpew_PICs, "generate args length stub at %p\n",
@ -817,9 +822,10 @@ class GetPropCompiler : public PICStubCompiler
{
Assembler masm;
masm.loadObjClass(pic.objReg, pic.shapeReg);
Jump isDense = masm.testClass(Assembler::Equal, pic.shapeReg, &js_ArrayClass);
Jump notArray = masm.testClass(Assembler::NotEqual, pic.shapeReg, &js_SlowArrayClass);
masm.loadPtr(Address(pic.objReg, offsetof(JSObject, clasp)), pic.shapeReg);
Jump isDense = masm.branchPtr(Assembler::Equal, pic.shapeReg, ImmPtr(&js_ArrayClass));
Jump notArray = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
ImmPtr(&js_SlowArrayClass));
isDense.linkTo(masm.label(), &masm);
masm.load32(Address(pic.objReg, offsetof(JSObject, privateData)), pic.objReg);
@ -833,7 +839,7 @@ class GetPropCompiler : public PICStubCompiler
buffer.link(notArray, pic.slowPathStart);
buffer.link(oob, pic.slowPathStart);
buffer.link(done, pic.fastPathRejoin);
buffer.link(done, pic.storeBack);
CodeLocationLabel start = buffer.finalizeCodeAddendum();
JaegerSpew(JSpew_PICs, "generate array length stub at %p\n",
@ -920,7 +926,7 @@ class GetPropCompiler : public PICStubCompiler
buffer.link(notString, pic.slowPathStart.labelAtOffset(pic.u.get.typeCheckOffset));
buffer.link(shapeMismatch, pic.slowPathStart);
buffer.link(done, pic.fastPathRejoin);
buffer.link(done, pic.storeBack);
CodeLocationLabel cs = buffer.finalizeCodeAddendum();
JaegerSpew(JSpew_PICs, "generate string call stub at %p\n",
@ -956,7 +962,7 @@ class GetPropCompiler : public PICStubCompiler
return false;
buffer.link(notString, pic.slowPathStart.labelAtOffset(pic.u.get.typeCheckOffset));
buffer.link(done, pic.fastPathRejoin);
buffer.link(done, pic.storeBack);
CodeLocationLabel start = buffer.finalizeCodeAddendum();
JaegerSpew(JSpew_PICs, "generate string length stub at %p\n",
@ -980,7 +986,7 @@ class GetPropCompiler : public PICStubCompiler
int32 offset;
if (!holder->hasSlotsArray()) {
JSC::CodeLocationInstruction istr;
istr = pic.fastPathRejoin.instructionAtOffset(dslotsLoad());
istr = pic.storeBack.instructionAtOffset(dslotsLoad());
repatcher.repatchLoadPtrToLEA(istr);
//
@ -1001,10 +1007,10 @@ class GetPropCompiler : public PICStubCompiler
uint32 shapeOffs = pic.shapeGuard + inlineShapeOffset();
repatcher.repatch(pic.fastPathStart.dataLabel32AtOffset(shapeOffs), obj->shape());
#if defined JS_NUNBOX32
repatcher.repatch(pic.fastPathRejoin.dataLabel32AtOffset(GETPROP_TYPE_LOAD), offset + 4);
repatcher.repatch(pic.fastPathRejoin.dataLabel32AtOffset(GETPROP_DATA_LOAD), offset);
repatcher.repatch(pic.storeBack.dataLabel32AtOffset(GETPROP_TYPE_LOAD), offset + 4);
repatcher.repatch(pic.storeBack.dataLabel32AtOffset(GETPROP_DATA_LOAD), offset);
#elif defined JS_PUNBOX64
repatcher.repatch(pic.fastPathRejoin.dataLabel32AtOffset(pic.labels.getprop.inlineValueOffset), offset);
repatcher.repatch(pic.storeBack.dataLabel32AtOffset(pic.labels.getprop.inlineValueOffset), offset);
#endif
pic.inlinePathPatched = true;
@ -1019,7 +1025,10 @@ class GetPropCompiler : public PICStubCompiler
Assembler masm;
if (pic.objNeedsRemat()) {
masm.rematPayload(pic.objRemat(), pic.objReg);
if (pic.objRemat() >= sizeof(JSStackFrame))
masm.loadPayload(Address(JSFrameReg, pic.objRemat()), pic.objReg);
else
masm.move(RegisterID(pic.objRemat()), pic.objReg);
pic.u.get.objNeedsRemat = false;
}
@ -1028,8 +1037,9 @@ class GetPropCompiler : public PICStubCompiler
Jump argsLenGuard;
if (obj->isDenseArray()) {
start = masm.label();
shapeGuard = masm.testObjClass(Assembler::NotEqual, pic.objReg, obj->getClass());
shapeGuard = masm.branchPtr(Assembler::NotEqual,
Address(pic.objReg, offsetof(JSObject, clasp)),
ImmPtr(obj->getClass()));
/*
* No need to assert validity of GETPROP_STUB_SHAPE_JUMP in this case:
* the IC is disabled after a dense array hit, so no patching can occur.
@ -1115,7 +1125,7 @@ class GetPropCompiler : public PICStubCompiler
buffer.link(*pj, pic.slowPathStart);
// The final exit jumps to the store-back in the inline stub.
buffer.link(done, pic.fastPathRejoin);
buffer.link(done, pic.storeBack);
CodeLocationLabel cs = buffer.finalizeCodeAddendum();
JaegerSpew(JSpew_PICs, "generated %s stub at %p\n", type, cs.executableAddress());
@ -1282,7 +1292,7 @@ class GetElemCompiler : public PICStubCompiler
JS_ASSERT(pic.kind == ic::PICInfo::GETELEM);
RepatchBuffer repatcher(pic.fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
repatcher.repatchLEAToLoadPtr(pic.fastPathRejoin.instructionAtOffset(dslotsLoad(pic)));
repatcher.repatchLEAToLoadPtr(pic.storeBack.instructionAtOffset(dslotsLoad(pic)));
/* Only the shape needs to be patched to fail -- atom jump will never be taken. */
repatcher.repatch(pic.fastPathStart.dataLabel32AtOffset(
@ -1294,9 +1304,10 @@ class GetElemCompiler : public PICStubCompiler
pic.slowPathStart);
RepatchBuffer repatcher2(pic.slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
ReturnAddressPtr retPtr(pic.slowPathStart.callAtOffset(pic.callReturn).executableAddress());
FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, ic::GetElem));
repatcher.relink(pic.slowPathCall, target);
MacroAssemblerCodePtr target(JS_FUNC_TO_DATA_PTR(void *, ic::GetElem));
repatcher.relinkCallerToTrampoline(retPtr, target);
}
bool patchInline(JSObject *holder, const Shape *shape)
@ -1306,7 +1317,7 @@ class GetElemCompiler : public PICStubCompiler
int32 offset;
if (!holder->hasSlotsArray()) {
JSC::CodeLocationInstruction istr = pic.fastPathRejoin.instructionAtOffset(dslotsLoad());
JSC::CodeLocationInstruction istr = pic.storeBack.instructionAtOffset(dslotsLoad());
repatcher.repatchLoadPtrToLEA(istr);
//
@ -1328,10 +1339,10 @@ class GetElemCompiler : public PICStubCompiler
uint32 idOffset = pic.shapeGuard + inlineAtomOffset();
repatcher.repatch(pic.fastPathStart.dataLabelPtrAtOffset(idOffset), id);
#if defined JS_NUNBOX32
repatcher.repatch(pic.fastPathRejoin.dataLabel32AtOffset(GETELEM_TYPE_LOAD), offset + 4);
repatcher.repatch(pic.fastPathRejoin.dataLabel32AtOffset(GETELEM_DATA_LOAD), offset);
repatcher.repatch(pic.storeBack.dataLabel32AtOffset(GETELEM_TYPE_LOAD), offset + 4);
repatcher.repatch(pic.storeBack.dataLabel32AtOffset(GETELEM_DATA_LOAD), offset);
#elif defined JS_PUNBOX64
repatcher.repatch(pic.fastPathRejoin.dataLabel32AtOffset(pic.labels.getelem.inlineValueOffset), offset);
repatcher.repatch(pic.storeBack.dataLabel32AtOffset(pic.labels.getelem.inlineValueOffset), offset);
#endif
pic.inlinePathPatched = true;
@ -1370,10 +1381,21 @@ class GetElemCompiler : public PICStubCompiler
Assembler masm;
if (pic.objNeedsRemat()) {
masm.rematPayload(pic.objRemat(), pic.objReg);
if (pic.objRemat() >= sizeof(JSStackFrame))
masm.loadPayload(Address(JSFrameReg, pic.objRemat()), pic.objReg);
else
masm.move(RegisterID(pic.objRemat()), pic.objReg);
pic.u.get.objNeedsRemat = false;
}
if (pic.idNeedsRemat()) {
if (pic.idRemat() >= sizeof(JSStackFrame))
masm.loadPayload(Address(JSFrameReg, pic.idRemat()), pic.u.get.idReg);
else
masm.move(RegisterID(pic.idRemat()), pic.u.get.idReg);
pic.u.get.idNeedsRemat = false;
}
if (pic.shapeNeedsRemat()) {
masm.loadShape(pic.objReg, pic.shapeReg);
pic.shapeRegHasBaseShape = true;
@ -1456,7 +1478,7 @@ class GetElemCompiler : public PICStubCompiler
buffer.link(*pj, pic.slowPathStart);
// The final exit jumps to the store-back in the inline stub.
buffer.link(done, pic.fastPathRejoin);
buffer.link(done, pic.storeBack);
CodeLocationLabel cs = buffer.finalizeCodeAddendum();
#if DEBUG
char *chars = js_DeflateString(cx, id->chars(), id->length());
@ -1561,9 +1583,10 @@ class ScopeNameCompiler : public PICStubCompiler
pic.slowPathStart);
RepatchBuffer repatcher2(pic.slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
ReturnAddressPtr retPtr(pic.slowPathStart.callAtOffset(pic.callReturn).executableAddress());
VoidStubPIC stub = (pic.kind == ic::PICInfo::NAME) ? ic::Name : ic::XName;
FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, stub));
repatcher.relink(pic.slowPathCall, target);
MacroAssemblerCodePtr target(JS_FUNC_TO_DATA_PTR(void *, stub));
repatcher.relinkCallerToTrampoline(retPtr, target);
}
typedef Vector<Jump, 8, ContextAllocPolicy> JumpList;
@ -1658,7 +1681,7 @@ class ScopeNameCompiler : public PICStubCompiler
return false;
buffer.link(failJump, pic.slowPathStart);
buffer.link(done, pic.fastPathRejoin);
buffer.link(done, pic.storeBack);
CodeLocationLabel cs = buffer.finalizeCodeAddendum();
JaegerSpew(JSpew_PICs, "generated %s global stub at %p\n", type, cs.executableAddress());
spew("NAME stub", "global");
@ -1762,7 +1785,7 @@ class ScopeNameCompiler : public PICStubCompiler
return false;
buffer.link(failJump, pic.slowPathStart);
buffer.link(done, pic.fastPathRejoin);
buffer.link(done, pic.storeBack);
CodeLocationLabel cs = buffer.finalizeCodeAddendum();
JaegerSpew(JSpew_PICs, "generated %s call stub at %p\n", type, cs.executableAddress());
@ -1898,8 +1921,9 @@ class BindNameCompiler : public PICStubCompiler
repatcher.relink(pic.shapeGuard + inlineJumpOffset(pic), pic.slowPathStart);
RepatchBuffer repatcher2(pic.slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, ic::BindName));
repatcher2.relink(pic.slowPathCall, target);
ReturnAddressPtr retPtr(pic.slowPathStart.callAtOffset(pic.callReturn).executableAddress());
MacroAssemblerCodePtr target(JS_FUNC_TO_DATA_PTR(void *, ic::BindName));
repatcher.relinkCallerToTrampoline(retPtr, target);
}
bool generateStub(JSObject *obj)
@ -1948,7 +1972,7 @@ class BindNameCompiler : public PICStubCompiler
return false;
buffer.link(failJump, pic.slowPathStart);
buffer.link(done, pic.fastPathRejoin);
buffer.link(done, pic.storeBack);
CodeLocationLabel cs = buffer.finalizeCodeAddendum();
JaegerSpew(JSpew_PICs, "generated %s stub at %p\n", type, cs.executableAddress());

View File

@ -187,57 +187,7 @@ union PICLabels {
};
#endif
struct BaseIC {
// Address of inline fast-path.
JSC::CodeLocationLabel fastPathStart;
// Address to rejoin to the fast-path.
JSC::CodeLocationLabel fastPathRejoin;
// Start of the slow path.
JSC::CodeLocationLabel slowPathStart;
// Slow path stub call.
JSC::CodeLocationCall slowPathCall;
// Address of the start of the last generated stub, if any.
JSC::CodeLocationLabel lastStubStart;
typedef Vector<JSC::ExecutablePool *, 0, SystemAllocPolicy> ExecPoolVector;
// ExecutablePools that IC stubs were generated into.
ExecPoolVector execPools;
// Return the start address of the last path in this PIC, which is the
// inline path if no stubs have been generated yet.
JSC::CodeLocationLabel lastPathStart() {
return stubsGenerated > 0 ? lastStubStart : fastPathStart;
}
// Whether or not the callsite has been hit at least once.
bool hit : 1;
// Number of stubs generated.
uint32 stubsGenerated : 5;
// Release ExecutablePools referred to by this PIC.
void releasePools() {
for (JSC::ExecutablePool **pExecPool = execPools.begin();
pExecPool != execPools.end();
++pExecPool) {
(*pExecPool)->release();
}
}
void reset() {
hit = false;
stubsGenerated = 0;
releasePools();
execPools.clear();
}
};
struct PICInfo : public BaseIC {
struct PICInfo {
typedef JSC::MacroAssembler::RegisterID RegisterID;
// Operation this is a PIC for.
@ -257,6 +207,7 @@ struct PICInfo : public BaseIC {
};
union {
// This struct comes out to 93 bits with GCC.
struct {
RegisterID typeReg : 5; // reg used for checking type
bool hasTypeCheck : 1; // type check and reg are present
@ -265,9 +216,11 @@ struct PICInfo : public BaseIC {
int32 typeCheckOffset;
// Remat info for the object reg.
int32 objRemat : MIN_STATE_REMAT_BITS;
uint32 objRemat : 20;
bool objNeedsRemat : 1;
RegisterID idReg : 5; // only used in GETELEM PICs.
uint32 idRemat : 20;
bool idNeedsRemat : 1;
} get;
ValueRemat vr;
} u;
@ -284,18 +237,25 @@ struct PICInfo : public BaseIC {
bool shapeRegHasBaseShape : 1;
// State flags.
bool hit : 1; // this PIC has been executed
bool inlinePathPatched : 1; // inline path has been patched
RegisterID shapeReg : 5; // also the out type reg
RegisterID objReg : 5; // also the out data reg
// Number of stubs generated.
uint32 stubsGenerated : 5;
// Offset from start of fast path to initial shape guard.
uint32 shapeGuard;
inline bool isSet() const {
// Return address of slow path call, as an offset from slowPathStart.
uint32 callReturn;
inline bool isSet() {
return kind == SET || kind == SETMETHOD;
}
inline bool isGet() const {
inline bool isGet() {
return kind == GET || kind == CALL || kind == GETELEM;
}
inline RegisterID typeReg() {
@ -306,14 +266,22 @@ struct PICInfo : public BaseIC {
JS_ASSERT(isGet());
return u.get.hasTypeCheck;
}
inline const StateRemat objRemat() const {
inline uint32 objRemat() {
JS_ASSERT(isGet());
return StateRemat::FromInt32(u.get.objRemat);
return u.get.objRemat;
}
inline uint32 idRemat() {
JS_ASSERT(isGet());
return u.get.idRemat;
}
inline bool objNeedsRemat() {
JS_ASSERT(isGet());
return u.get.objNeedsRemat;
}
inline bool idNeedsRemat() {
JS_ASSERT(isGet());
return u.get.idNeedsRemat;
}
inline bool shapeNeedsRemat() {
return !shapeRegHasBaseShape;
}
@ -322,12 +290,6 @@ struct PICInfo : public BaseIC {
return !hasTypeCheck();
}
inline void setObjRemat(const StateRemat &sr) {
JS_ASSERT(isGet());
u.get.objRemat = sr.toInt32();
JS_ASSERT(u.get.objRemat == sr.toInt32());
}
#if defined JS_CPU_X64
// Required labels for platform-specific patching.
PICLabels labels;
@ -336,19 +298,56 @@ struct PICInfo : public BaseIC {
// Index into the script's atom table.
JSAtom *atom;
// Address of inline fast-path.
JSC::CodeLocationLabel fastPathStart;
// Address of store back at the end of the inline fast-path.
JSC::CodeLocationLabel storeBack;
// Offset from callReturn to the start of the slow case.
JSC::CodeLocationLabel slowPathStart;
// Address of the start of the last generated stub, if any.
JSC::CodeLocationLabel lastStubStart;
typedef Vector<JSC::ExecutablePool *, 0, SystemAllocPolicy> ExecPoolVector;
// ExecutablePools that PIC stubs were generated into.
ExecPoolVector execPools;
// Return the start address of the last path in this PIC, which is the
// inline path if no stubs have been generated yet.
JSC::CodeLocationLabel lastPathStart() {
return stubsGenerated > 0 ? lastStubStart : fastPathStart;
}
bool shouldGenerate() {
return stubsGenerated < MAX_PIC_STUBS || !inlinePathPatched;
}
// Release ExecutablePools referred to by this PIC.
void releasePools() {
for (JSC::ExecutablePool **pExecPool = execPools.begin();
pExecPool != execPools.end();
++pExecPool)
{
(*pExecPool)->release();
}
}
// Reset the data members to the state of a fresh PIC before any patching
// or stub generation was done.
void reset() {
hit = false;
inlinePathPatched = false;
if (kind == GET || kind == CALL || kind == GETELEM)
if (kind == GET || kind == CALL || kind == GETELEM) {
u.get.objNeedsRemat = false;
}
secondShapeGuard = 0;
shapeRegHasBaseShape = true;
BaseIC::reset();
stubsGenerated = 0;
releasePools();
execPools.clear();
}
};

View File

@ -40,7 +40,7 @@
#if !defined jsjaeger_assembler64_h__ && defined JS_METHODJIT && defined JS_PUNBOX64
#define jsjaeger_assembler64_h__
#include "assembler/assembler/MacroAssembler.h"
#include "methodjit/BaseAssembler.h"
#include "methodjit/MachineRegs.h"
#include "methodjit/RematInfo.h"
@ -76,7 +76,7 @@ struct ImmPayload : Imm64
{ }
};
class PunboxAssembler : public JSC::MacroAssembler
class Assembler : public BaseAssembler
{
static const uint32 PAYLOAD_OFFSET = 0;
@ -206,12 +206,12 @@ class PunboxAssembler : public JSC::MacroAssembler
template <typename T>
void storeValue(const ValueRemat &vr, T address) {
if (vr.isConstant())
storeValue(vr.value(), address);
else if (vr.isTypeKnown())
storeValueFromComponents(ImmType(vr.knownType()), vr.dataReg(), address);
if (vr.isConstant)
storeValue(Valueify(vr.u.v), address);
else if (vr.u.s.isTypeKnown)
storeValueFromComponents(ImmType(vr.u.s.type.knownType), vr.u.s.data, address);
else
storeValueFromComponents(vr.typeReg(), vr.dataReg(), address);
storeValueFromComponents(vr.u.s.type.reg, vr.u.s.data, address);
}
void loadPrivate(Address privAddr, RegisterID to) {
@ -224,102 +224,94 @@ class PunboxAssembler : public JSC::MacroAssembler
loadPtr(priv, to);
}
Jump testNull(Condition cond, RegisterID reg) {
Jump testNull(Assembler::Condition cond, RegisterID reg) {
return branchPtr(cond, reg, ImmTag(JSVAL_SHIFTED_TAG_NULL));
}
Jump testNull(Condition cond, Address address) {
Jump testNull(Assembler::Condition cond, Address address) {
loadValue(address, Registers::ValueReg);
return testNull(cond, Registers::ValueReg);
}
Jump testUndefined(Condition cond, RegisterID reg) {
Jump testUndefined(Assembler::Condition cond, RegisterID reg) {
return branchPtr(cond, reg, ImmTag(JSVAL_SHIFTED_TAG_UNDEFINED));
}
Jump testUndefined(Condition cond, Address address) {
Jump testUndefined(Assembler::Condition cond, Address address) {
loadValue(address, Registers::ValueReg);
return testUndefined(cond, Registers::ValueReg);
}
Jump testInt32(Condition cond, RegisterID reg) {
Jump testInt32(Assembler::Condition cond, RegisterID reg) {
return branchPtr(cond, reg, ImmTag(JSVAL_SHIFTED_TAG_INT32));
}
Jump testInt32(Condition cond, Address address) {
Jump testInt32(Assembler::Condition cond, Address address) {
loadTypeTag(address, Registers::ValueReg);
return testInt32(cond, Registers::ValueReg);
}
Jump testNumber(Condition cond, RegisterID reg) {
cond = (cond == Equal) ? Below : AboveOrEqual;
Jump testNumber(Assembler::Condition cond, RegisterID reg) {
cond = (cond == Assembler::Equal) ? Assembler::Below : Assembler::AboveOrEqual;
return branchPtr(cond, reg,
ImmTag(JSVAL_UPPER_EXCL_SHIFTED_TAG_OF_NUMBER_SET));
}
Jump testNumber(Condition cond, Address address) {
Jump testNumber(Assembler::Condition cond, Address address) {
loadValue(address, Registers::ValueReg);
return testNumber(cond, Registers::ValueReg);
}
Jump testPrimitive(Condition cond, RegisterID reg) {
cond = (cond == Equal) ? Below : AboveOrEqual;
Jump testPrimitive(Assembler::Condition cond, RegisterID reg) {
cond = (cond == Assembler::Equal) ? Assembler::Below : Assembler::AboveOrEqual;
return branchPtr(cond, reg,
ImmTag(JSVAL_UPPER_EXCL_SHIFTED_TAG_OF_PRIMITIVE_SET));
}
Jump testPrimitive(Condition cond, Address address) {
Jump testPrimitive(Assembler::Condition cond, Address address) {
loadValue(address, Registers::ValueReg);
return testPrimitive(cond, Registers::ValueReg);
}
Jump testObject(Condition cond, RegisterID reg) {
cond = (cond == Equal) ? AboveOrEqual : Below;
Jump testObject(Assembler::Condition cond, RegisterID reg) {
cond = (cond == Assembler::Equal) ? Assembler::AboveOrEqual : Assembler::Below;
return branchPtr(cond, reg, ImmTag(JSVAL_SHIFTED_TAG_OBJECT));
}
Jump testObject(Condition cond, Address address) {
Jump testObject(Assembler::Condition cond, Address address) {
loadValue(address, Registers::ValueReg);
return testObject(cond, Registers::ValueReg);
}
Jump testDouble(Condition cond, RegisterID reg) {
cond = (cond == Equal) ? BelowOrEqual : Above;
Jump testDouble(Assembler::Condition cond, RegisterID reg) {
cond = (cond == Assembler::Equal) ? Assembler::BelowOrEqual : Assembler::Above;
return branchPtr(cond, reg, ImmTag(JSVAL_SHIFTED_TAG_MAX_DOUBLE));
}
Jump testDouble(Condition cond, Address address) {
Jump testDouble(Assembler::Condition cond, Address address) {
loadValue(address, Registers::ValueReg);
return testDouble(cond, Registers::ValueReg);
}
Jump testBoolean(Condition cond, RegisterID reg) {
Jump testBoolean(Assembler::Condition cond, RegisterID reg) {
return branchPtr(cond, reg, ImmTag(JSVAL_SHIFTED_TAG_BOOLEAN));
}
Jump testBoolean(Condition cond, Address address) {
Jump testBoolean(Assembler::Condition cond, Address address) {
loadTypeTag(address, Registers::ValueReg);
return testBoolean(cond, Registers::ValueReg);
}
Jump testString(Condition cond, RegisterID reg) {
Jump testString(Assembler::Condition cond, RegisterID reg) {
return branchPtr(cond, reg, ImmTag(JSVAL_SHIFTED_TAG_STRING));
}
Jump testString(Condition cond, Address address) {
Jump testString(Assembler::Condition cond, Address address) {
loadTypeTag(address, Registers::ValueReg);
return testString(cond, Registers::ValueReg);
}
template <typename T>
Jump fastArrayLoadSlot(T address, RegisterID typeReg, RegisterID dataReg) {
loadValueAsComponents(address, typeReg, dataReg);
return branchPtr(Equal, typeReg, ImmType(JSVAL_TYPE_MAGIC));
}
};
typedef PunboxAssembler ValueAssembler;
} /* namespace mjit */
} /* namespace js */

View File

@ -41,154 +41,34 @@
#define jsjaeger_remat_h__
#include "jscntxt.h"
#include "MachineRegs.h"
#include "assembler/assembler/MacroAssembler.h"
namespace js {
namespace mjit {
// Lightweight, union-able components of FrameEntry.
struct StateRemat {
typedef JSC::MacroAssembler::RegisterID RegisterID;
typedef JSC::MacroAssembler::Address Address;
static const int32 CONSTANT = -int(UINT16_LIMIT * sizeof(Value));
// This union encodes the fastest rematerialization of a non-constant
// value. The |offset| field can be used to recover information
// without this struct's helpers:
// 1) A value in (CONSTANT, 0) is an argument slot.
// 2) A value in [0, fp) is a register ID.
// 3) A value in [fp, inf) is a local slot.
union {
RegisterID reg_;
int32 offset_;
};
static StateRemat FromInt32(int32 i32) {
StateRemat sr;
sr.offset_ = i32;
return sr;
}
static StateRemat FromRegister(RegisterID reg) {
StateRemat sr;
sr.reg_ = reg;
JS_ASSERT(sr.inRegister());
return sr;
}
static StateRemat FromAddress(Address address) {
JS_ASSERT(address.base == JSFrameReg);
StateRemat sr;
sr.offset_ = address.offset;
JS_ASSERT(sr.inMemory());
return sr;
}
// Minimum number of bits needed to compactly store the int32
// representation in a struct or union. This prevents bloating the IC
// structs by an extra 8 bytes in some cases. 16 bits are needed to encode
// the largest local:
// ((UINT16_LIMIT - 1) * sizeof(Value) + sizeof(JSStackFrame),
// And an extra bit for the sign on arguments.
#define MIN_STATE_REMAT_BITS 17
bool isConstant() const { return offset_ == CONSTANT; }
bool inRegister() const { return offset_ >= 0 &&
offset_ <= int32(JSC::MacroAssembler::TotalRegisters); }
bool inMemory() const { return offset_ >= int32(sizeof(JSStackFrame)); }
int32 toInt32() const { return offset_; }
Address address() const {
JS_ASSERT(inMemory());
return Address(JSFrameReg, offset_);
}
RegisterID reg() const {
JS_ASSERT(inRegister());
return reg_;
}
};
/* Lightweight version of FrameEntry. */
struct ValueRemat {
typedef JSC::MacroAssembler::RegisterID RegisterID;
union {
struct {
union {
int32 typeRemat_;
JSValueType knownType_;
RegisterID reg;
JSValueType knownType;
} type;
int32 dataRemat_ : MIN_STATE_REMAT_BITS;
bool isTypeKnown_ : 1;
RegisterID data : 5;
bool isTypeKnown : 1;
} s;
jsval v_;
jsval v;
} u;
bool isConstant_ : 1;
bool isDataSynced : 1;
bool isTypeSynced : 1;
bool isConstant : 1;
bool isDataSynced : 1;
bool isTypeSynced : 1;
static ValueRemat FromConstant(const Value &v) {
ValueRemat vr;
vr.isConstant_ = true;
vr.u.v_ = Jsvalify(v);
return vr;
}
static ValueRemat FromKnownType(JSValueType type, RegisterID dataReg) {
ValueRemat vr;
vr.isConstant_ = false;
vr.u.s.type.knownType_ = type;
vr.u.s.isTypeKnown_ = true;
vr.u.s.dataRemat_ = StateRemat::FromRegister(dataReg).toInt32();
// Assert bitfields are okay.
JS_ASSERT(vr.dataReg() == dataReg);
return vr;
}
static ValueRemat FromRegisters(RegisterID typeReg, RegisterID dataReg) {
ValueRemat vr;
vr.isConstant_ = false;
vr.u.s.isTypeKnown_ = false;
vr.u.s.type.typeRemat_ = StateRemat::FromRegister(typeReg).toInt32();
vr.u.s.dataRemat_ = StateRemat::FromRegister(dataReg).toInt32();
// Assert bitfields are okay.
JS_ASSERT(vr.dataReg() == dataReg);
JS_ASSERT(vr.typeReg() == typeReg);
return vr;
RegisterID dataReg() {
JS_ASSERT(!isConstant);
return u.s.data;
}
RegisterID dataReg() const {
JS_ASSERT(!isConstant());
return dataRemat().reg();
}
RegisterID typeReg() const {
JS_ASSERT(!isTypeKnown());
return typeRemat().reg();
}
bool isConstant() const { return isConstant_; }
bool isTypeKnown() const { return isConstant() || u.s.isTypeKnown_; }
StateRemat dataRemat() const {
JS_ASSERT(!isConstant());
return StateRemat::FromInt32(u.s.dataRemat_);
}
StateRemat typeRemat() const {
JS_ASSERT(!isTypeKnown());
return StateRemat::FromInt32(u.s.type.typeRemat_);
}
Value value() const {
JS_ASSERT(isConstant());
return Valueify(u.v_);
}
JSValueType knownType() const {
JS_ASSERT(isTypeKnown());
if (isConstant()) {
const Value v = value();
if (v.isDouble())
return JSVAL_TYPE_DOUBLE;
return v.extractNonDoubleType();
}
return u.s.type.knownType_;
RegisterID typeReg() {
JS_ASSERT(!isConstant && !u.s.isTypeKnown);
return u.s.type.reg;
}
};
@ -274,8 +154,5 @@ struct RematInfo {
SyncState sync_;
};
} /* namespace mjit */
} /* namespace js */
#endif