Funnel all write-backs to the stack and the global frame through TraceRecorder::writeBack() so we can intercept and manipulate them in one central location.

This commit is contained in:
Andreas Gal 2008-08-15 14:47:49 -07:00
parent 87f65aaf6b
commit a91faa3c28
2 changed files with 19 additions and 13 deletions

View File

@ -1084,28 +1084,33 @@ TraceRecorder::lazilyImportGlobalSlot(unsigned slot)
return true; return true;
} }
/* Write back a value onto the stack or global frames. */
LIns*
TraceRecorder::writeBack(LIns* i, LIns* base, ptrdiff_t offset)
{
/* Sink all type casts targeting the stack into the side exit by simply storing the original
(uncasted) value. Each guard generates the side exit map based on the types of the
last stores to every stack location, so its safe to not perform them on-trace. */
if (isPromoteInt(i))
i = ::demote(lir, i);
return lir->insStorei(i, base, offset);
}
/* Update the tracker, then issue a write back store. */ /* Update the tracker, then issue a write back store. */
void void
TraceRecorder::set(jsval* p, LIns* i, bool initializing) TraceRecorder::set(jsval* p, LIns* i, bool initializing)
{ {
JS_ASSERT(initializing || tracker.has(p)); JS_ASSERT(initializing || tracker.has(p));
tracker.set(p, i); tracker.set(p, i);
/* Sink all type casts targeting the stack into the side exit by simply storing the original
(uncasted) value. Each guard generates the side exit map based on the types of the
last stores to every stack location, so its safe to not perform them on-trace. */
if (isPromoteInt(i))
i = ::demote(lir, i);
/* If we are writing to this location for the first time, calculate the offset into the /* If we are writing to this location for the first time, calculate the offset into the
native frame manually, otherwise just look up the last load or store associated with native frame manually, otherwise just look up the last load or store associated with
the same source address (p) and use the same offset/base. */ the same source address (p) and use the same offset/base. */
LIns* x; LIns* x;
if ((x = nativeFrameTracker.get(p)) == NULL) { if ((x = nativeFrameTracker.get(p)) == NULL) {
if (isGlobal(p)) { if (isGlobal(p))
x = lir->insStorei(i, gp_ins, nativeGlobalOffset(p)); x = writeBack(i, gp_ins, nativeGlobalOffset(p));
} else { else
ptrdiff_t offset = nativeStackOffset(p); x = writeBack(i, lirbuf->sp, -treeInfo->nativeStackBase + nativeStackOffset(p));
x = lir->insStorei(i, lirbuf->sp, -treeInfo->nativeStackBase + offset);
}
nativeFrameTracker.set(p, x); nativeFrameTracker.set(p, x);
} else { } else {
#define ASSERT_VALID_CACHE_HIT(base, offset) \ #define ASSERT_VALID_CACHE_HIT(base, offset) \
@ -1116,11 +1121,11 @@ TraceRecorder::set(jsval* p, LIns* i, bool initializing)
if (x->isop(LIR_st) || x->isop(LIR_stq)) { if (x->isop(LIR_st) || x->isop(LIR_stq)) {
ASSERT_VALID_CACHE_HIT(x->oprnd2(), x->oprnd3()->constval()); ASSERT_VALID_CACHE_HIT(x->oprnd2(), x->oprnd3()->constval());
lir->insStorei(i, x->oprnd2(), x->oprnd3()->constval()); writeBack(i, x->oprnd2(), x->oprnd3()->constval());
} else { } else {
JS_ASSERT(x->isop(LIR_sti) || x->isop(LIR_stqi)); JS_ASSERT(x->isop(LIR_sti) || x->isop(LIR_stqi));
ASSERT_VALID_CACHE_HIT(x->oprnd2(), x->immdisp()); ASSERT_VALID_CACHE_HIT(x->oprnd2(), x->immdisp());
lir->insStorei(i, x->oprnd2(), x->immdisp()); writeBack(i, x->oprnd2(), x->immdisp());
} }
} }
#undef ASSERT_VALID_CACHE_HIT #undef ASSERT_VALID_CACHE_HIT

View File

@ -234,6 +234,7 @@ class TraceRecorder {
nanojit::LIns* addName(nanojit::LIns* ins, const char* name); nanojit::LIns* addName(nanojit::LIns* ins, const char* name);
nanojit::LIns* get(jsval* p); nanojit::LIns* get(jsval* p);
nanojit::LIns* writeBack(nanojit::LIns* i, nanojit::LIns* base, ptrdiff_t offset);
void set(jsval* p, nanojit::LIns* l, bool initializing = false); void set(jsval* p, nanojit::LIns* l, bool initializing = false);
bool checkType(jsval& v, uint8 type, bool& recompile); bool checkType(jsval& v, uint8 type, bool& recompile);