Bug 1552154 part 2 - Stop using pc-to-native map for OSR into Baseline JIT. r=tcampbell

Differential Revision: https://phabricator.services.mozilla.com/D40945

--HG--
extra : moz-landing-system : lando
This commit is contained in:
Jan de Mooij 2019-08-10 10:20:27 +00:00
parent d870ef9328
commit d69bf865df
4 changed files with 99 additions and 35 deletions

View File

@ -292,13 +292,13 @@ MethodStatus BaselineCompiler::compile() {
}
UniquePtr<BaselineScript> baselineScript(
BaselineScript::New(script, warmUpCheckPrologueOffset_.offset(),
profilerEnterFrameToggleOffset_.offset(),
profilerExitFrameToggleOffset_.offset(),
handler.retAddrEntries().length(),
pcMappingIndexEntries.length(), pcEntries.length(),
script->resumeOffsets().size(),
traceLoggerToggleOffsets_.length()),
BaselineScript::New(
script, warmUpCheckPrologueOffset_.offset(),
profilerEnterFrameToggleOffset_.offset(),
profilerExitFrameToggleOffset_.offset(),
handler.retAddrEntries().length(), handler.osrEntries().length(),
pcMappingIndexEntries.length(), pcEntries.length(),
script->resumeOffsets().size(), traceLoggerToggleOffsets_.length()),
JS::DeletePolicy<BaselineScript>(cx->runtime()));
if (!baselineScript) {
ReportOutOfMemory(cx);
@ -319,6 +319,7 @@ MethodStatus BaselineCompiler::compile() {
baselineScript->copyPCMappingEntries(pcEntries);
baselineScript->copyRetAddrEntries(handler.retAddrEntries().begin());
baselineScript->copyOSREntries(handler.osrEntries().begin());
// If profiler instrumentation is enabled, toggle instrumentation on.
if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(
@ -1321,26 +1322,38 @@ bool BaselineCodeGen<Handler>::emitInterruptCheck() {
template <>
bool BaselineCompilerCodeGen::emitWarmUpCounterIncrement() {
// Emit no warm-up counter increments or bailouts if Ion is not
// enabled, or if the script will never be Ion-compileable
frame.assertSyncedStack();
// Record native code offset for OSR from Baseline Interpreter into Baseline
// JIT code. This is right before the warm-up check in the Baseline JIT code,
// to make sure we can immediately enter Ion if the script is warm enough or
// if --ion-eager is used.
JSScript* script = handler.script();
jsbytecode* pc = handler.pc();
if (JSOp(*pc) == JSOP_LOOPENTRY) {
uint32_t pcOffset = script->pcToOffset(pc);
uint32_t nativeOffset = masm.currentOffset();
if (!handler.osrEntries().emplaceBack(pcOffset, nativeOffset)) {
ReportOutOfMemory(cx);
return false;
}
}
// Emit no warm-up counter increments if Ion is not enabled or if the script
// will never be Ion-compileable.
if (!handler.maybeIonCompileable()) {
return true;
}
frame.assertSyncedStack();
Register scriptReg = R2.scratchReg();
Register countReg = R0.scratchReg();
Address warmUpCounterAddr(scriptReg, JSScript::offsetOfWarmUpCounter());
JSScript* script = handler.script();
masm.movePtr(ImmGCPtr(script), scriptReg);
masm.load32(warmUpCounterAddr, countReg);
masm.add32(Imm32(1), countReg);
masm.store32(countReg, warmUpCounterAddr);
jsbytecode* pc = handler.pc();
if (JSOp(*pc) == JSOP_LOOPENTRY) {
// If this is a loop inside a catch or finally block, increment the warmup
// counter but don't attempt OSR (Ion only compiles the try block).

View File

@ -535,6 +535,12 @@ class BaselineCompilerHandler {
#endif
FixedList<Label> labels_;
RetAddrEntryVector retAddrEntries_;
// Native code offsets for OSR at JSOP_LOOPENTRY ops.
using OSREntryVector =
Vector<BaselineScript::OSREntry, 16, SystemAllocPolicy>;
OSREntryVector osrEntries_;
JSScript* script_;
jsbytecode* pc_;
@ -594,6 +600,7 @@ class BaselineCompilerHandler {
BytecodeAnalysis& analysis() { return analysis_; }
RetAddrEntryVector& retAddrEntries() { return retAddrEntries_; }
OSREntryVector& osrEntries() { return osrEntries_; }
MOZ_MUST_USE bool recordCallRetAddr(JSContext* cx, RetAddrEntry::Kind kind,
uint32_t retOffset);

View File

@ -418,16 +418,9 @@ bool jit::BaselineCompileFromBaselineInterpreter(JSContext* cx,
case Method_Compiled: {
if (*pc == JSOP_LOOPENTRY) {
PCMappingSlotInfo slotInfo;
BaselineScript* baselineScript = script->baselineScript();
*res = baselineScript->nativeCodeForPC(script, pc, &slotInfo);
MOZ_ASSERT(slotInfo.isStackSynced());
if (frame->isDebuggee()) {
// Skip the debug trap emitted by emitInterpreterLoop because the
// Baseline Interpreter already handled it for the current op.
MOZ_RELEASE_ASSERT(baselineScript->hasDebugInstrumentation());
*res += MacroAssembler::ToggledCallSize(*res);
}
uint32_t pcOffset = script->pcToOffset(pc);
*res = baselineScript->nativeCodeForOSREntry(pcOffset);
} else {
*res = script->baselineScript()->warmUpCheckPrologueAddr();
}
@ -439,14 +432,18 @@ bool jit::BaselineCompileFromBaselineInterpreter(JSContext* cx,
MOZ_CRASH("Unexpected status");
}
BaselineScript* BaselineScript::New(
JSScript* jsscript, uint32_t warmUpCheckPrologueOffset,
uint32_t profilerEnterToggleOffset, uint32_t profilerExitToggleOffset,
size_t retAddrEntries, size_t pcMappingIndexEntries, size_t pcMappingSize,
size_t resumeEntries, size_t traceLoggerToggleOffsetEntries) {
BaselineScript* BaselineScript::New(JSScript* jsscript,
uint32_t warmUpCheckPrologueOffset,
uint32_t profilerEnterToggleOffset,
uint32_t profilerExitToggleOffset,
size_t retAddrEntries, size_t osrEntries,
size_t pcMappingIndexEntries,
size_t pcMappingSize, size_t resumeEntries,
size_t traceLoggerToggleOffsetEntries) {
static const unsigned DataAlignment = sizeof(uintptr_t);
size_t retAddrEntriesSize = retAddrEntries * sizeof(RetAddrEntry);
size_t osrEntriesSize = osrEntries * sizeof(BaselineScript::OSREntry);
size_t pcMappingIndexEntriesSize =
pcMappingIndexEntries * sizeof(PCMappingIndexEntry);
size_t resumeEntriesSize = resumeEntries * sizeof(uintptr_t);
@ -454,13 +451,14 @@ BaselineScript* BaselineScript::New(
size_t paddedRetAddrEntriesSize =
AlignBytes(retAddrEntriesSize, DataAlignment);
size_t paddedOSREntriesSize = AlignBytes(osrEntriesSize, DataAlignment);
size_t paddedPCMappingIndexEntriesSize =
AlignBytes(pcMappingIndexEntriesSize, DataAlignment);
size_t paddedPCMappingSize = AlignBytes(pcMappingSize, DataAlignment);
size_t paddedResumeEntriesSize = AlignBytes(resumeEntriesSize, DataAlignment);
size_t paddedTLEntriesSize = AlignBytes(tlEntriesSize, DataAlignment);
size_t allocBytes = paddedRetAddrEntriesSize +
size_t allocBytes = paddedRetAddrEntriesSize + paddedOSREntriesSize +
paddedPCMappingIndexEntriesSize + paddedPCMappingSize +
paddedResumeEntriesSize + paddedTLEntriesSize;
@ -481,6 +479,10 @@ BaselineScript* BaselineScript::New(
script->retAddrEntries_ = retAddrEntries;
offsetCursor += paddedRetAddrEntriesSize;
script->osrEntriesOffset_ = offsetCursor;
script->osrEntries_ = osrEntries;
offsetCursor += paddedOSREntriesSize;
script->pcMappingIndexOffset_ = offsetCursor;
script->pcMappingIndexEntries_ = pcMappingIndexEntries;
offsetCursor += paddedPCMappingIndexEntriesSize;
@ -575,11 +577,12 @@ const RetAddrEntry& BaselineScript::retAddrEntryFromReturnOffset(
return entries[loc];
}
static bool ComputeBinarySearchMid(mozilla::Span<RetAddrEntry> entries,
template <typename Entry>
static bool ComputeBinarySearchMid(mozilla::Span<Entry> entries,
uint32_t pcOffset, size_t* loc) {
return BinarySearchIf(
entries.data(), 0, entries.size(),
[pcOffset](const RetAddrEntry& entry) {
[pcOffset](const Entry& entry) {
uint32_t entryOffset = entry.pcOffset();
if (pcOffset < entryOffset) {
return -1;
@ -666,6 +669,17 @@ const RetAddrEntry& BaselineScript::retAddrEntryFromReturnAddress(
return retAddrEntryFromReturnOffset(offset);
}
uint8_t* BaselineScript::nativeCodeForOSREntry(uint32_t pcOffset) {
mozilla::Span<OSREntry> entries = osrEntries();
size_t mid;
if (!ComputeBinarySearchMid(entries, pcOffset, &mid)) {
return nullptr;
}
uint32_t nativeOffset = entries[mid].nativeOffset();
return method_->raw() + nativeOffset;
}
void BaselineScript::computeResumeNativeOffsets(JSScript* script) {
// Translate pcOffset to BaselineScript native address. This may return
// nullptr if compiler decided code was unreachable.
@ -688,6 +702,10 @@ void BaselineScript::copyRetAddrEntries(const RetAddrEntry* entries) {
std::copy_n(entries, retAddrEntries().size(), retAddrEntries().data());
}
void BaselineScript::copyOSREntries(const OSREntry* entries) {
std::copy_n(entries, osrEntries().size(), osrEntries().data());
}
void BaselineScript::copyPCMappingEntries(const CompactBufferWriter& entries) {
MOZ_ASSERT(entries.length() > 0);
MOZ_ASSERT(entries.length() == pcMappingSize_);

View File

@ -226,6 +226,9 @@ struct BaselineScript final {
uint32_t retAddrEntriesOffset_ = 0;
uint32_t retAddrEntries_ = 0;
uint32_t osrEntriesOffset_ = 0;
uint32_t osrEntries_ = 0;
uint32_t pcMappingIndexOffset_ = 0;
uint32_t pcMappingIndexEntries_ = 0;
@ -254,6 +257,19 @@ struct BaselineScript final {
PROFILER_INSTRUMENTATION_ON = 1 << 1,
};
// Native code offset for OSR from Baseline Interpreter into Baseline JIT at
// JSOP_LOOPENTRY ops.
class OSREntry {
uint32_t pcOffset_;
uint32_t nativeOffset_;
public:
OSREntry(uint32_t pcOffset, uint32_t nativeOffset)
: pcOffset_(pcOffset), nativeOffset_(nativeOffset) {}
uint32_t pcOffset() const { return pcOffset_; }
uint32_t nativeOffset() const { return nativeOffset_; }
};
private:
uint8_t flags_ = 0;
@ -281,6 +297,10 @@ struct BaselineScript final {
return mozilla::MakeSpan(
offsetToPointer<RetAddrEntry>(retAddrEntriesOffset_), retAddrEntries_);
}
mozilla::Span<OSREntry> osrEntries() const {
return mozilla::MakeSpan(offsetToPointer<OSREntry>(osrEntriesOffset_),
osrEntries_);
}
#ifdef JS_TRACE_LOGGING
mozilla::Span<uint32_t> traceLoggerToggleOffsets() const {
@ -298,11 +318,14 @@ struct BaselineScript final {
}
public:
static BaselineScript* New(
JSScript* jsscript, uint32_t warmUpCheckPrologueOffset,
uint32_t profilerEnterToggleOffset, uint32_t profilerExitToggleOffset,
size_t retAddrEntries, size_t pcMappingIndexEntries, size_t pcMappingSize,
size_t resumeEntries, size_t traceLoggerToggleOffsetEntries);
static BaselineScript* New(JSScript* jsscript,
uint32_t warmUpCheckPrologueOffset,
uint32_t profilerEnterToggleOffset,
uint32_t profilerExitToggleOffset,
size_t retAddrEntries, size_t osrEntries,
size_t pcMappingIndexEntries, size_t pcMappingSize,
size_t resumeEntries,
size_t traceLoggerToggleOffsetEntries);
static void Trace(JSTracer* trc, BaselineScript* script);
static void Destroy(FreeOp* fop, BaselineScript* script);
@ -352,7 +375,10 @@ struct BaselineScript final {
const RetAddrEntry& retAddrEntryFromReturnOffset(CodeOffset returnOffset);
const RetAddrEntry& retAddrEntryFromReturnAddress(uint8_t* returnAddr);
uint8_t* nativeCodeForOSREntry(uint32_t pcOffset);
void copyRetAddrEntries(const RetAddrEntry* entries);
void copyOSREntries(const OSREntry* entries);
// Copy resumeOffsets list from |script| and convert the pcOffsets
// to native addresses in the Baseline code.