mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-10-30 05:35:31 +00:00
Bug 1288944 - Baldr: move the JSContext* and memory* into TlsData (r=jolesen)
MozReview-Commit-ID: 9jzBImzbncw --HG-- extra : rebase_source : dbdf716602ccbf9c3c7f401930cc9ee2c56b6319
This commit is contained in:
parent
49ef4fdbc1
commit
72a0b7533b
@ -213,15 +213,15 @@ FrameIterator::lineOrBytecode() const
|
|||||||
static const unsigned PushedRetAddr = 0;
|
static const unsigned PushedRetAddr = 0;
|
||||||
static const unsigned PostStorePrePopFP = 0;
|
static const unsigned PostStorePrePopFP = 0;
|
||||||
# endif
|
# endif
|
||||||
static const unsigned PushedFP = 20;
|
static const unsigned PushedFP = 16;
|
||||||
static const unsigned StoredFP = 27;
|
static const unsigned StoredFP = 23;
|
||||||
#elif defined(JS_CODEGEN_X86)
|
#elif defined(JS_CODEGEN_X86)
|
||||||
# if defined(DEBUG)
|
# if defined(DEBUG)
|
||||||
static const unsigned PushedRetAddr = 0;
|
static const unsigned PushedRetAddr = 0;
|
||||||
static const unsigned PostStorePrePopFP = 0;
|
static const unsigned PostStorePrePopFP = 0;
|
||||||
# endif
|
# endif
|
||||||
static const unsigned PushedFP = 14;
|
static const unsigned PushedFP = 11;
|
||||||
static const unsigned StoredFP = 17;
|
static const unsigned StoredFP = 14;
|
||||||
#elif defined(JS_CODEGEN_ARM)
|
#elif defined(JS_CODEGEN_ARM)
|
||||||
static const unsigned PushedRetAddr = 4;
|
static const unsigned PushedRetAddr = 4;
|
||||||
static const unsigned PushedFP = 20;
|
static const unsigned PushedFP = 20;
|
||||||
@ -284,7 +284,7 @@ GenerateProfilingPrologue(MacroAssembler& masm, unsigned framePushed, ExitReason
|
|||||||
PushRetAddr(masm);
|
PushRetAddr(masm);
|
||||||
MOZ_ASSERT_IF(!masm.oom(), PushedRetAddr == masm.currentOffset() - offsets->begin);
|
MOZ_ASSERT_IF(!masm.oom(), PushedRetAddr == masm.currentOffset() - offsets->begin);
|
||||||
|
|
||||||
masm.loadWasmActivation(scratch);
|
masm.loadWasmActivationFromTls(scratch);
|
||||||
masm.push(Address(scratch, WasmActivation::offsetOfFP()));
|
masm.push(Address(scratch, WasmActivation::offsetOfFP()));
|
||||||
MOZ_ASSERT_IF(!masm.oom(), PushedFP == masm.currentOffset() - offsets->begin);
|
MOZ_ASSERT_IF(!masm.oom(), PushedFP == masm.currentOffset() - offsets->begin);
|
||||||
|
|
||||||
@ -313,7 +313,7 @@ GenerateProfilingEpilogue(MacroAssembler& masm, unsigned framePushed, ExitReason
|
|||||||
if (framePushed)
|
if (framePushed)
|
||||||
masm.addToStackPtr(Imm32(framePushed));
|
masm.addToStackPtr(Imm32(framePushed));
|
||||||
|
|
||||||
masm.loadWasmActivation(scratch);
|
masm.loadWasmActivationFromTls(scratch);
|
||||||
|
|
||||||
if (reason != ExitReason::None) {
|
if (reason != ExitReason::None) {
|
||||||
masm.store32(Imm32(int32_t(ExitReason::None)),
|
masm.store32(Imm32(int32_t(ExitReason::None)),
|
||||||
|
@ -85,18 +85,6 @@ class SigIdSet
|
|||||||
|
|
||||||
ExclusiveData<SigIdSet> sigIdSet;
|
ExclusiveData<SigIdSet> sigIdSet;
|
||||||
|
|
||||||
JSContext**
|
|
||||||
Instance::addressOfContextPtr() const
|
|
||||||
{
|
|
||||||
return (JSContext**)(codeSegment().globalData() + ContextPtrGlobalDataOffset);
|
|
||||||
}
|
|
||||||
|
|
||||||
uint8_t**
|
|
||||||
Instance::addressOfMemoryBase() const
|
|
||||||
{
|
|
||||||
return (uint8_t**)(codeSegment().globalData() + HeapGlobalDataOffset);
|
|
||||||
}
|
|
||||||
|
|
||||||
void**
|
void**
|
||||||
Instance::addressOfTableBase(size_t tableIndex) const
|
Instance::addressOfTableBase(size_t tableIndex) const
|
||||||
{
|
{
|
||||||
@ -293,9 +281,10 @@ Instance::Instance(JSContext* cx,
|
|||||||
MOZ_ASSERT(funcImports.length() == metadata().funcImports.length());
|
MOZ_ASSERT(funcImports.length() == metadata().funcImports.length());
|
||||||
MOZ_ASSERT(tables_.length() == metadata().tables.length());
|
MOZ_ASSERT(tables_.length() == metadata().tables.length());
|
||||||
|
|
||||||
*addressOfContextPtr() = cx;
|
tlsData_.cx = cx;
|
||||||
|
|
||||||
tlsData_.instance = this;
|
tlsData_.instance = this;
|
||||||
|
tlsData_.globalData = code_->segment().globalData();
|
||||||
|
tlsData_.memoryBase = memory ? memory->buffer().dataPointerEither().unwrap() : nullptr;
|
||||||
tlsData_.stackLimit = *(void**)cx->stackLimitAddressForJitCode(StackForUntrustedScript);
|
tlsData_.stackLimit = *(void**)cx->stackLimitAddressForJitCode(StackForUntrustedScript);
|
||||||
|
|
||||||
for (size_t i = 0; i < metadata().funcImports.length(); i++) {
|
for (size_t i = 0; i < metadata().funcImports.length(); i++) {
|
||||||
@ -340,9 +329,6 @@ Instance::Instance(JSContext* cx,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (memory)
|
|
||||||
*addressOfMemoryBase() = memory->buffer().dataPointerEither().unwrap();
|
|
||||||
|
|
||||||
for (size_t i = 0; i < tables_.length(); i++)
|
for (size_t i = 0; i < tables_.length(); i++)
|
||||||
*addressOfTableBase(i) = tables_[i]->array();
|
*addressOfTableBase(i) = tables_[i]->array();
|
||||||
}
|
}
|
||||||
@ -400,7 +386,7 @@ SharedMem<uint8_t*>
|
|||||||
Instance::memoryBase() const
|
Instance::memoryBase() const
|
||||||
{
|
{
|
||||||
MOZ_ASSERT(metadata().usesMemory());
|
MOZ_ASSERT(metadata().usesMemory());
|
||||||
MOZ_ASSERT(*addressOfMemoryBase() == memory_->buffer().dataPointerEither());
|
MOZ_ASSERT(tlsData_.memoryBase == memory_->buffer().dataPointerEither());
|
||||||
return memory_->buffer().dataPointerEither();
|
return memory_->buffer().dataPointerEither();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -519,7 +505,7 @@ Instance::callExport(JSContext* cx, uint32_t funcIndex, CallArgs args)
|
|||||||
|
|
||||||
// Call the per-exported-function trampoline created by GenerateEntry.
|
// Call the per-exported-function trampoline created by GenerateEntry.
|
||||||
auto funcPtr = JS_DATA_TO_FUNC_PTR(ExportFuncPtr, codeBase() + func.entryOffset());
|
auto funcPtr = JS_DATA_TO_FUNC_PTR(ExportFuncPtr, codeBase() + func.entryOffset());
|
||||||
if (!CALL_GENERATED_3(funcPtr, exportArgs.begin(), codeSegment().globalData(), tlsData()))
|
if (!CALL_GENERATED_2(funcPtr, exportArgs.begin(), &tlsData_))
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -50,16 +50,10 @@ class Instance
|
|||||||
TlsData tlsData_;
|
TlsData tlsData_;
|
||||||
|
|
||||||
// Internal helpers:
|
// Internal helpers:
|
||||||
JSContext** addressOfContextPtr() const;
|
|
||||||
Instance** addressOfInstancePtr() const;
|
|
||||||
uint8_t** addressOfMemoryBase() const;
|
|
||||||
void** addressOfTableBase(size_t tableIndex) const;
|
void** addressOfTableBase(size_t tableIndex) const;
|
||||||
const void** addressOfSigId(const SigIdDesc& sigId) const;
|
const void** addressOfSigId(const SigIdDesc& sigId) const;
|
||||||
FuncImportExit& funcImportToExit(const FuncImport& fi);
|
FuncImportExit& funcImportToExit(const FuncImport& fi);
|
||||||
|
|
||||||
// Get this instance's TLS data pointer for the current thread.
|
|
||||||
TlsData* tlsData() { return &tlsData_; }
|
|
||||||
|
|
||||||
// Import call slow paths which are called directly from wasm code.
|
// Import call slow paths which are called directly from wasm code.
|
||||||
friend void* AddressOf(SymbolicAddress, ExclusiveContext*);
|
friend void* AddressOf(SymbolicAddress, ExclusiveContext*);
|
||||||
static int32_t callImport_void(Instance*, int32_t, int32_t, uint64_t*);
|
static int32_t callImport_void(Instance*, int32_t, int32_t, uint64_t*);
|
||||||
@ -81,7 +75,7 @@ class Instance
|
|||||||
bool init(JSContext* cx);
|
bool init(JSContext* cx);
|
||||||
void trace(JSTracer* trc);
|
void trace(JSTracer* trc);
|
||||||
|
|
||||||
JSContext* cx() const { return *addressOfContextPtr(); }
|
JSContext* cx() const { return tlsData_.cx; }
|
||||||
JSCompartment* compartment() const { return compartment_; }
|
JSCompartment* compartment() const { return compartment_; }
|
||||||
Code& code() { return *code_; }
|
Code& code() { return *code_; }
|
||||||
const Code& code() const { return *code_; }
|
const Code& code() const { return *code_; }
|
||||||
|
@ -108,8 +108,6 @@ wasm::GenerateEntry(MacroAssembler& masm, const FuncExport& fe, bool usesHeap)
|
|||||||
masm.push(lr);
|
masm.push(lr);
|
||||||
#elif defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
|
#elif defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
|
||||||
masm.push(ra);
|
masm.push(ra);
|
||||||
#elif defined(JS_CODEGEN_X86)
|
|
||||||
static const unsigned EntryFrameSize = sizeof(void*);
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// Save all caller non-volatile registers before we clobber them here and in
|
// Save all caller non-volatile registers before we clobber them here and in
|
||||||
@ -118,53 +116,42 @@ wasm::GenerateEntry(MacroAssembler& masm, const FuncExport& fe, bool usesHeap)
|
|||||||
masm.PushRegsInMask(NonVolatileRegs);
|
masm.PushRegsInMask(NonVolatileRegs);
|
||||||
MOZ_ASSERT(masm.framePushed() == FramePushedAfterSave);
|
MOZ_ASSERT(masm.framePushed() == FramePushedAfterSave);
|
||||||
|
|
||||||
// ARM and MIPS/MIPS64 have a globally-pinned GlobalReg (x64 uses RIP-relative
|
// Put the 'argv' argument into a non-argument/return/TLS register so that
|
||||||
// addressing, x86 uses immediates in effective addresses). For the
|
// we can use 'argv' while we fill in the arguments for the asm.js callee.
|
||||||
// AsmJSGlobalRegBias addition, see Assembler-(mips,arm).h.
|
|
||||||
#if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
|
|
||||||
masm.movePtr(IntArgReg1, GlobalReg);
|
|
||||||
masm.addPtr(Imm32(AsmJSGlobalRegBias), GlobalReg);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
// ARM, MIPS/MIPS64 and x64 have a globally-pinned HeapReg (x86 uses immediates in
|
|
||||||
// effective addresses). Loading the heap register depends on the global
|
|
||||||
// register already having been loaded.
|
|
||||||
if (usesHeap)
|
|
||||||
masm.loadAsmJSHeapRegisterFromGlobalData();
|
|
||||||
|
|
||||||
// Put the per-thread, per-module TLS pointer into WasmTlsReg.
|
|
||||||
// This is the third argument in the ExportFuncPtr prototype.
|
|
||||||
#if defined(JS_CODEGEN_X86)
|
|
||||||
masm.loadPtr(
|
|
||||||
Address(masm.getStackPointer(), EntryFrameSize + masm.framePushed() + 2 * sizeof(void*)),
|
|
||||||
WasmTlsReg);
|
|
||||||
#else
|
|
||||||
masm.movePtr(IntArgReg2, WasmTlsReg);
|
|
||||||
#endif
|
|
||||||
// Make sure the TLS pointer is not clobbered by the following code.
|
|
||||||
MOZ_ASSERT(WasmTlsReg != ABINonArgReg0, "TLS pointer can't be scratch reg");
|
|
||||||
MOZ_ASSERT(WasmTlsReg != ABINonArgReg1, "TLS pointer can't be scratch reg");
|
|
||||||
|
|
||||||
// Put the 'argv' argument into a non-argument/return register so that we
|
|
||||||
// can use 'argv' while we fill in the arguments for the asm.js callee.
|
|
||||||
// Also, save 'argv' on the stack so that we can recover it after the call.
|
|
||||||
// Use a second non-argument/return register as temporary scratch.
|
|
||||||
Register argv = ABINonArgReturnReg0;
|
Register argv = ABINonArgReturnReg0;
|
||||||
Register scratch = ABINonArgReturnReg1;
|
Register scratch = ABINonArgReturnReg1;
|
||||||
|
|
||||||
#if defined(JS_CODEGEN_X86)
|
// Read the arguments of wasm::ExportFuncPtr according to the native ABI.
|
||||||
masm.loadPtr(Address(masm.getStackPointer(), EntryFrameSize + masm.framePushed()), argv);
|
// The entry stub's frame is only 1 word, not the usual 2 for AsmJSFrame.
|
||||||
#else
|
const unsigned argBase = sizeof(void*) + masm.framePushed();
|
||||||
masm.movePtr(IntArgReg0, argv);
|
ABIArgGenerator abi;
|
||||||
#endif
|
ABIArg arg;
|
||||||
|
|
||||||
|
// arg 1: ExportArg*
|
||||||
|
arg = abi.next(MIRType::Pointer);
|
||||||
|
if (arg.kind() == ABIArg::GPR)
|
||||||
|
masm.movePtr(arg.gpr(), argv);
|
||||||
|
else
|
||||||
|
masm.loadPtr(Address(masm.getStackPointer(), argBase + arg.offsetFromArgBase()), argv);
|
||||||
|
|
||||||
|
// Arg 2: TlsData*
|
||||||
|
arg = abi.next(MIRType::Pointer);
|
||||||
|
if (arg.kind() == ABIArg::GPR)
|
||||||
|
masm.movePtr(arg.gpr(), WasmTlsReg);
|
||||||
|
else
|
||||||
|
masm.loadPtr(Address(masm.getStackPointer(), argBase + arg.offsetFromArgBase()), WasmTlsReg);
|
||||||
|
|
||||||
|
// Setup pinned registers that are assumed throughout wasm code.
|
||||||
|
masm.loadWasmPinnedRegsFromTls();
|
||||||
|
|
||||||
|
// Save 'argv' on the stack so that we can recover it after the call. Use
|
||||||
|
// a second non-argument/return register as temporary scratch.
|
||||||
masm.Push(argv);
|
masm.Push(argv);
|
||||||
|
|
||||||
// Save the stack pointer to the saved non-volatile registers. We will use
|
// Save the stack pointer in the WasmActivation right before dynamically
|
||||||
// this on two paths: normal return and exceptional return. Since
|
// aligning the stack so that it may be recovered on return or throw.
|
||||||
// loadWasmActivation uses GlobalReg, we must do this after loading
|
|
||||||
// GlobalReg.
|
|
||||||
MOZ_ASSERT(masm.framePushed() == FramePushedForEntrySP);
|
MOZ_ASSERT(masm.framePushed() == FramePushedForEntrySP);
|
||||||
masm.loadWasmActivation(scratch);
|
masm.loadWasmActivationFromTls(scratch);
|
||||||
masm.storeStackPtr(Address(scratch, WasmActivation::offsetOfEntrySP()));
|
masm.storeStackPtr(Address(scratch, WasmActivation::offsetOfEntrySP()));
|
||||||
|
|
||||||
// Dynamically align the stack since ABIStackAlignment is not necessarily
|
// Dynamically align the stack since ABIStackAlignment is not necessarily
|
||||||
@ -281,7 +268,7 @@ wasm::GenerateEntry(MacroAssembler& masm, const FuncExport& fe, bool usesHeap)
|
|||||||
masm.call(CallSiteDesc(CallSiteDesc::Relative), fe.funcIndex());
|
masm.call(CallSiteDesc(CallSiteDesc::Relative), fe.funcIndex());
|
||||||
|
|
||||||
// Recover the stack pointer value before dynamic alignment.
|
// Recover the stack pointer value before dynamic alignment.
|
||||||
masm.loadWasmActivation(scratch);
|
masm.loadWasmActivationFromTls(scratch);
|
||||||
masm.loadStackPtr(Address(scratch, WasmActivation::offsetOfEntrySP()));
|
masm.loadStackPtr(Address(scratch, WasmActivation::offsetOfEntrySP()));
|
||||||
masm.setFramePushed(FramePushedForEntrySP);
|
masm.setFramePushed(FramePushedForEntrySP);
|
||||||
|
|
||||||
@ -558,17 +545,26 @@ wasm::GenerateInterpExit(MacroAssembler& masm, const FuncImport& fi, uint32_t fu
|
|||||||
MOZ_CRASH("Limit");
|
MOZ_CRASH("Limit");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// The native ABI preserves the TLS, heap and global registers since they
|
||||||
|
// are non-volatile.
|
||||||
|
MOZ_ASSERT(NonVolatileRegs.has(WasmTlsReg));
|
||||||
|
#if defined(JS_CODEGEN_X64) || \
|
||||||
|
defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64) || \
|
||||||
|
defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
|
||||||
|
MOZ_ASSERT(NonVolatileRegs.has(HeapReg));
|
||||||
|
#endif
|
||||||
|
#if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64) || \
|
||||||
|
defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
|
||||||
|
MOZ_ASSERT(NonVolatileRegs.has(GlobalReg));
|
||||||
|
#endif
|
||||||
|
|
||||||
GenerateExitEpilogue(masm, framePushed, ExitReason::ImportInterp, &offsets);
|
GenerateExitEpilogue(masm, framePushed, ExitReason::ImportInterp, &offsets);
|
||||||
|
|
||||||
offsets.end = masm.currentOffset();
|
offsets.end = masm.currentOffset();
|
||||||
return offsets;
|
return offsets;
|
||||||
}
|
}
|
||||||
|
|
||||||
#if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
|
static const unsigned SavedTlsReg = sizeof(void*);
|
||||||
static const unsigned MaybeSavedGlobalReg = sizeof(void*);
|
|
||||||
#else
|
|
||||||
static const unsigned MaybeSavedGlobalReg = 0;
|
|
||||||
#endif
|
|
||||||
|
|
||||||
// Generate a stub that is called via the internal ABI derived from the
|
// Generate a stub that is called via the internal ABI derived from the
|
||||||
// signature of the import and calls into a compatible JIT function,
|
// signature of the import and calls into a compatible JIT function,
|
||||||
@ -589,7 +585,7 @@ wasm::GenerateJitExit(MacroAssembler& masm, const FuncImport& fi, bool usesHeap)
|
|||||||
static_assert(AsmJSStackAlignment >= JitStackAlignment, "subsumes");
|
static_assert(AsmJSStackAlignment >= JitStackAlignment, "subsumes");
|
||||||
unsigned sizeOfRetAddr = sizeof(void*);
|
unsigned sizeOfRetAddr = sizeof(void*);
|
||||||
unsigned jitFrameBytes = 3 * sizeof(void*) + (1 + sig.args().length()) * sizeof(Value);
|
unsigned jitFrameBytes = 3 * sizeof(void*) + (1 + sig.args().length()) * sizeof(Value);
|
||||||
unsigned totalJitFrameBytes = sizeOfRetAddr + jitFrameBytes + MaybeSavedGlobalReg;
|
unsigned totalJitFrameBytes = sizeOfRetAddr + jitFrameBytes + SavedTlsReg;
|
||||||
unsigned jitFramePushed = StackDecrementForCall(masm, JitStackAlignment, totalJitFrameBytes) -
|
unsigned jitFramePushed = StackDecrementForCall(masm, JitStackAlignment, totalJitFrameBytes) -
|
||||||
sizeOfRetAddr;
|
sizeOfRetAddr;
|
||||||
|
|
||||||
@ -644,15 +640,11 @@ wasm::GenerateJitExit(MacroAssembler& masm, const FuncImport& fi, bool usesHeap)
|
|||||||
argOffset += sig.args().length() * sizeof(Value);
|
argOffset += sig.args().length() * sizeof(Value);
|
||||||
MOZ_ASSERT(argOffset == jitFrameBytes);
|
MOZ_ASSERT(argOffset == jitFrameBytes);
|
||||||
|
|
||||||
// 6. Jit code will clobber all registers, even non-volatiles. GlobalReg and
|
// 6. Jit code will clobber all registers, even non-volatiles. WasmTlsReg
|
||||||
// HeapReg are removed from the general register set for asm.js code, so
|
// must be kept live for the benefit of the epilogue, so push it on the
|
||||||
// these will not have been saved by the caller like all other registers,
|
// stack so that it can be restored before the epilogue.
|
||||||
// so they must be explicitly preserved. Only save GlobalReg since
|
static_assert(SavedTlsReg == sizeof(void*), "stack frame accounting");
|
||||||
// HeapReg can be reloaded (from global data) after the call.
|
masm.storePtr(WasmTlsReg, Address(masm.getStackPointer(), jitFrameBytes));
|
||||||
#if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
|
|
||||||
static_assert(MaybeSavedGlobalReg == sizeof(void*), "stack frame accounting");
|
|
||||||
masm.storePtr(GlobalReg, Address(masm.getStackPointer(), jitFrameBytes));
|
|
||||||
#endif
|
|
||||||
|
|
||||||
{
|
{
|
||||||
// Enable Activation.
|
// Enable Activation.
|
||||||
@ -712,12 +704,6 @@ wasm::GenerateJitExit(MacroAssembler& masm, const FuncImport& fi, bool usesHeap)
|
|||||||
masm.store8(Imm32(0), Address(act, JitActivation::offsetOfActiveUint8()));
|
masm.store8(Imm32(0), Address(act, JitActivation::offsetOfActiveUint8()));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Reload the global register since JIT code can clobber any register.
|
|
||||||
#if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
|
|
||||||
static_assert(MaybeSavedGlobalReg == sizeof(void*), "stack frame accounting");
|
|
||||||
masm.loadPtr(Address(masm.getStackPointer(), jitFrameBytes), GlobalReg);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
// As explained above, the frame was aligned for the JIT ABI such that
|
// As explained above, the frame was aligned for the JIT ABI such that
|
||||||
// (sp + sizeof(void*)) % JitStackAlignment == 0
|
// (sp + sizeof(void*)) % JitStackAlignment == 0
|
||||||
// But now we possibly want to call one of several different C++ functions,
|
// But now we possibly want to call one of several different C++ functions,
|
||||||
@ -764,10 +750,12 @@ wasm::GenerateJitExit(MacroAssembler& masm, const FuncImport& fi, bool usesHeap)
|
|||||||
Label done;
|
Label done;
|
||||||
masm.bind(&done);
|
masm.bind(&done);
|
||||||
|
|
||||||
// Ion code does not respect system callee-saved register conventions so
|
// Ion code does not respect the system ABI's callee-saved register
|
||||||
// reload the heap register.
|
// conventions so reload any assumed-non-volatile registers. Note that the
|
||||||
if (usesHeap)
|
// reserveStack(sizeOfRetAddr) above means that the stack pointer is at a
|
||||||
masm.loadAsmJSHeapRegisterFromGlobalData();
|
// different offset than when WasmTlsReg was stored.
|
||||||
|
masm.loadPtr(Address(masm.getStackPointer(), jitFrameBytes + sizeOfRetAddr), WasmTlsReg);
|
||||||
|
masm.loadWasmPinnedRegsFromTls();
|
||||||
|
|
||||||
GenerateExitEpilogue(masm, masm.framePushed(), ExitReason::ImportJit, &offsets);
|
GenerateExitEpilogue(masm, masm.framePushed(), ExitReason::ImportJit, &offsets);
|
||||||
|
|
||||||
@ -849,7 +837,7 @@ GenerateStackOverflow(MacroAssembler& masm)
|
|||||||
// the non-profiling case (there is no return path from this point) and, in
|
// the non-profiling case (there is no return path from this point) and, in
|
||||||
// the profiling case, it is already correct.
|
// the profiling case, it is already correct.
|
||||||
Register activation = ABINonArgReturnReg0;
|
Register activation = ABINonArgReturnReg0;
|
||||||
masm.loadWasmActivation(activation);
|
masm.loadWasmActivationFromTls(activation);
|
||||||
masm.storePtr(masm.getStackPointer(), Address(activation, WasmActivation::offsetOfFP()));
|
masm.storePtr(masm.getStackPointer(), Address(activation, WasmActivation::offsetOfFP()));
|
||||||
|
|
||||||
// Prepare the stack for calling C++.
|
// Prepare the stack for calling C++.
|
||||||
@ -919,7 +907,7 @@ GenerateThrow(MacroAssembler& masm)
|
|||||||
// maintain the invariant that fp is either null or pointing to a valid
|
// maintain the invariant that fp is either null or pointing to a valid
|
||||||
// frame.
|
// frame.
|
||||||
Register scratch = ABINonArgReturnReg0;
|
Register scratch = ABINonArgReturnReg0;
|
||||||
masm.loadWasmActivation(scratch);
|
masm.loadWasmActivationFromSymbolicAddress(scratch);
|
||||||
masm.storePtr(ImmWord(0), Address(scratch, WasmActivation::offsetOfFP()));
|
masm.storePtr(ImmWord(0), Address(scratch, WasmActivation::offsetOfFP()));
|
||||||
|
|
||||||
masm.setFramePushed(FramePushedForEntrySP);
|
masm.setFramePushed(FramePushedForEntrySP);
|
||||||
@ -990,7 +978,7 @@ wasm::GenerateInterruptStub(MacroAssembler& masm)
|
|||||||
Register scratch = ABINonArgReturnReg0;
|
Register scratch = ABINonArgReturnReg0;
|
||||||
|
|
||||||
// Store resumePC into the reserved space.
|
// Store resumePC into the reserved space.
|
||||||
masm.loadWasmActivation(scratch);
|
masm.loadWasmActivationFromSymbolicAddress(scratch);
|
||||||
masm.loadPtr(Address(scratch, WasmActivation::offsetOfResumePC()), scratch);
|
masm.loadPtr(Address(scratch, WasmActivation::offsetOfResumePC()), scratch);
|
||||||
masm.storePtr(scratch, Address(masm.getStackPointer(), masm.framePushed() + sizeof(void*)));
|
masm.storePtr(scratch, Address(masm.getStackPointer(), masm.framePushed() + sizeof(void*)));
|
||||||
|
|
||||||
@ -1028,7 +1016,7 @@ wasm::GenerateInterruptStub(MacroAssembler& masm)
|
|||||||
masm.ma_and(StackPointer, StackPointer, Imm32(~(ABIStackAlignment - 1)));
|
masm.ma_and(StackPointer, StackPointer, Imm32(~(ABIStackAlignment - 1)));
|
||||||
|
|
||||||
// Store resumePC into the reserved space.
|
// Store resumePC into the reserved space.
|
||||||
masm.loadWasmActivation(IntArgReg0);
|
masm.loadWasmActivationFromSymbolicAddress(IntArgReg0);
|
||||||
masm.loadPtr(Address(IntArgReg0, WasmActivation::offsetOfResumePC()), IntArgReg1);
|
masm.loadPtr(Address(IntArgReg0, WasmActivation::offsetOfResumePC()), IntArgReg1);
|
||||||
masm.storePtr(IntArgReg1, Address(s0, masm.framePushed()));
|
masm.storePtr(IntArgReg1, Address(s0, masm.framePushed()));
|
||||||
|
|
||||||
@ -1072,7 +1060,7 @@ wasm::GenerateInterruptStub(MacroAssembler& masm)
|
|||||||
masm.ma_and(Imm32(~7), sp, sp);
|
masm.ma_and(Imm32(~7), sp, sp);
|
||||||
|
|
||||||
// Store resumePC into the return PC stack slot.
|
// Store resumePC into the return PC stack slot.
|
||||||
masm.loadWasmActivation(IntArgReg0);
|
masm.loadWasmActivationFromSymbolicAddress(IntArgReg0);
|
||||||
masm.loadPtr(Address(IntArgReg0, WasmActivation::offsetOfResumePC()), IntArgReg1);
|
masm.loadPtr(Address(IntArgReg0, WasmActivation::offsetOfResumePC()), IntArgReg1);
|
||||||
masm.storePtr(IntArgReg1, Address(r6, 14 * sizeof(uint32_t*)));
|
masm.storePtr(IntArgReg1, Address(r6, 14 * sizeof(uint32_t*)));
|
||||||
|
|
||||||
|
@ -1044,16 +1044,25 @@ struct ExportArg
|
|||||||
//
|
//
|
||||||
struct TlsData
|
struct TlsData
|
||||||
{
|
{
|
||||||
|
// Pointer to the JSContext that contains this TLS data.
|
||||||
|
JSContext* cx;
|
||||||
|
|
||||||
// Pointer to the Instance that contains this TLS data.
|
// Pointer to the Instance that contains this TLS data.
|
||||||
Instance* instance;
|
Instance* instance;
|
||||||
|
|
||||||
|
// Pointer to the global data for this Instance.
|
||||||
|
uint8_t* globalData;
|
||||||
|
|
||||||
|
// Pointer to the base of the default memory (or null if there is none).
|
||||||
|
uint8_t* memoryBase;
|
||||||
|
|
||||||
// Stack limit for the current thread. This limit is checked against the
|
// Stack limit for the current thread. This limit is checked against the
|
||||||
// stack pointer in the prologue of functions that allocate stack space. See
|
// stack pointer in the prologue of functions that allocate stack space. See
|
||||||
// `CodeGenerator::generateWasm`.
|
// `CodeGenerator::generateWasm`.
|
||||||
void* stackLimit;
|
void* stackLimit;
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef int32_t (*ExportFuncPtr)(ExportArg* args, uint8_t* global, TlsData* tls);
|
typedef int32_t (*ExportFuncPtr)(ExportArg* args, TlsData* tls);
|
||||||
|
|
||||||
// Constants:
|
// Constants:
|
||||||
|
|
||||||
@ -1066,9 +1075,7 @@ static const uint64_t Uint32Range = uint64_t(UINT32_MAX) + 1;
|
|||||||
static const uint64_t MappedSize = 2 * Uint32Range + PageSize;
|
static const uint64_t MappedSize = 2 * Uint32Range + PageSize;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
static const unsigned ContextPtrGlobalDataOffset = 0;
|
static const unsigned NaN64GlobalDataOffset = 0;
|
||||||
static const unsigned HeapGlobalDataOffset = ContextPtrGlobalDataOffset + sizeof(void*);
|
|
||||||
static const unsigned NaN64GlobalDataOffset = HeapGlobalDataOffset + sizeof(void*);
|
|
||||||
static const unsigned NaN32GlobalDataOffset = NaN64GlobalDataOffset + sizeof(double);
|
static const unsigned NaN32GlobalDataOffset = NaN64GlobalDataOffset + sizeof(double);
|
||||||
static const unsigned InitialGlobalDataBytes = NaN32GlobalDataOffset + sizeof(float);
|
static const unsigned InitialGlobalDataBytes = NaN32GlobalDataOffset + sizeof(float);
|
||||||
|
|
||||||
|
@ -1370,8 +1370,12 @@ class MacroAssembler : public MacroAssemblerSpecific
|
|||||||
void loadJitActivation(Register dest) {
|
void loadJitActivation(Register dest) {
|
||||||
loadPtr(AbsoluteAddress(GetJitContext()->runtime->addressOfActivation()), dest);
|
loadPtr(AbsoluteAddress(GetJitContext()->runtime->addressOfActivation()), dest);
|
||||||
}
|
}
|
||||||
void loadWasmActivation(Register dest) {
|
void loadWasmActivationFromTls(Register dest) {
|
||||||
loadWasmGlobalPtr(wasm::ContextPtrGlobalDataOffset, dest);
|
loadPtr(Address(WasmTlsReg, offsetof(wasm::TlsData, cx)), dest);
|
||||||
|
loadPtr(Address(dest, JSContext::offsetOfWasmActivation()), dest);
|
||||||
|
}
|
||||||
|
void loadWasmActivationFromSymbolicAddress(Register dest) {
|
||||||
|
movePtr(wasm::SymbolicAddress::Context, dest);
|
||||||
loadPtr(Address(dest, JSContext::offsetOfWasmActivation()), dest);
|
loadPtr(Address(dest, JSContext::offsetOfWasmActivation()), dest);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1449,8 +1449,10 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
|
|||||||
void loadWasmGlobalPtr(uint32_t globalDataOffset, Register dest) {
|
void loadWasmGlobalPtr(uint32_t globalDataOffset, Register dest) {
|
||||||
loadPtr(Address(GlobalReg, globalDataOffset - AsmJSGlobalRegBias), dest);
|
loadPtr(Address(GlobalReg, globalDataOffset - AsmJSGlobalRegBias), dest);
|
||||||
}
|
}
|
||||||
void loadAsmJSHeapRegisterFromGlobalData() {
|
void loadWasmPinnedRegsFromTls() {
|
||||||
loadWasmGlobalPtr(wasm::HeapGlobalDataOffset, HeapReg);
|
loadPtr(Address(WasmTlsReg, offsetof(wasm::TlsData, memoryBase)), HeapReg);
|
||||||
|
loadPtr(Address(WasmTlsReg, offsetof(wasm::TlsData, globalData)), GlobalReg);
|
||||||
|
ma_add(Imm32(AsmJSGlobalRegBias), GlobalReg);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Instrumentation for entering and leaving the profiler.
|
// Instrumentation for entering and leaving the profiler.
|
||||||
|
@ -2308,9 +2308,10 @@ class MacroAssemblerCompat : public vixl::MacroAssembler
|
|||||||
void loadWasmGlobalPtr(uint32_t globalDataOffset, Register dest) {
|
void loadWasmGlobalPtr(uint32_t globalDataOffset, Register dest) {
|
||||||
loadPtr(Address(GlobalReg, globalDataOffset - AsmJSGlobalRegBias), dest);
|
loadPtr(Address(GlobalReg, globalDataOffset - AsmJSGlobalRegBias), dest);
|
||||||
}
|
}
|
||||||
void loadAsmJSHeapRegisterFromGlobalData() {
|
void loadWasmPinnedRegsFromTls() {
|
||||||
loadWasmGlobalPtr(wasm::HeapGlobalDataOffset, HeapReg);
|
loadPtr(Address(WasmTlsReg, offsetof(wasm::TlsData, memoryBase)), HeapReg);
|
||||||
loadWasmGlobalPtr(wasm::HeapGlobalDataOffset + 8, HeapLenReg);
|
loadPtr(Address(WasmTlsReg, offsetof(wasm::TlsData, globalData)), GlobalReg);
|
||||||
|
adds32(Imm32(AsmJSGlobalRegBias), GlobalReg);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Overwrites the payload bits of a dest register containing a Value.
|
// Overwrites the payload bits of a dest register containing a Value.
|
||||||
|
@ -997,13 +997,13 @@ class MacroAssemblerMIPSCompat : public MacroAssemblerMIPS
|
|||||||
void moveFloat32(FloatRegister src, FloatRegister dest) {
|
void moveFloat32(FloatRegister src, FloatRegister dest) {
|
||||||
as_movs(dest, src);
|
as_movs(dest, src);
|
||||||
}
|
}
|
||||||
|
|
||||||
void loadWasmGlobalPtr(uint32_t globalDataOffset, Register dest) {
|
void loadWasmGlobalPtr(uint32_t globalDataOffset, Register dest) {
|
||||||
loadPtr(Address(GlobalReg, globalDataOffset - AsmJSGlobalRegBias), dest);
|
loadPtr(Address(GlobalReg, globalDataOffset - AsmJSGlobalRegBias), dest);
|
||||||
}
|
}
|
||||||
void loadAsmJSHeapRegisterFromGlobalData() {
|
void loadWasmPinnedRegsFromTls() {
|
||||||
MOZ_ASSERT(Imm16::IsInSignedRange(wasm::HeapGlobalDataOffset - AsmJSGlobalRegBias));
|
loadPtr(Address(WasmTlsReg, offsetof(wasm::TlsData, memoryBase)), HeapReg);
|
||||||
loadWasmGlobalPtr(wasm::HeapGlobalDataOffset, HeapReg);
|
loadPtr(Address(WasmTlsReg, offsetof(wasm::TlsData, globalData)), GlobalReg);
|
||||||
|
ma_addu(GlobalReg, Imm32(AsmJSGlobalRegBias));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Instrumentation for entering and leaving the profiler.
|
// Instrumentation for entering and leaving the profiler.
|
||||||
|
@ -1003,9 +1003,10 @@ class MacroAssemblerMIPS64Compat : public MacroAssemblerMIPS64
|
|||||||
void loadWasmGlobalPtr(uint32_t globalDataOffset, Register dest) {
|
void loadWasmGlobalPtr(uint32_t globalDataOffset, Register dest) {
|
||||||
loadPtr(Address(GlobalReg, globalDataOffset - AsmJSGlobalRegBias), dest);
|
loadPtr(Address(GlobalReg, globalDataOffset - AsmJSGlobalRegBias), dest);
|
||||||
}
|
}
|
||||||
void loadAsmJSHeapRegisterFromGlobalData() {
|
void loadWasmPinnedRegsFromTls() {
|
||||||
MOZ_ASSERT(Imm16::IsInSignedRange(wasm::HeapGlobalDataOffset - AsmJSGlobalRegBias));
|
loadPtr(Address(WasmTlsReg, offsetof(wasm::TlsData, memoryBase)), HeapReg);
|
||||||
loadWasmGlobalPtr(wasm::HeapGlobalDataOffset, HeapReg);
|
loadPtr(Address(WasmTlsReg, offsetof(wasm::TlsData, globalData)), GlobalReg);
|
||||||
|
ma_addu(GlobalReg, Imm32(AsmJSGlobalRegBias));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Instrumentation for entering and leaving the profiler.
|
// Instrumentation for entering and leaving the profiler.
|
||||||
|
@ -406,8 +406,9 @@ class MacroAssemblerNone : public Assembler
|
|||||||
void buildFakeExitFrame(Register, uint32_t*) { MOZ_CRASH(); }
|
void buildFakeExitFrame(Register, uint32_t*) { MOZ_CRASH(); }
|
||||||
bool buildOOLFakeExitFrame(void*) { MOZ_CRASH(); }
|
bool buildOOLFakeExitFrame(void*) { MOZ_CRASH(); }
|
||||||
void loadWasmGlobalPtr(uint32_t, Register) { MOZ_CRASH(); }
|
void loadWasmGlobalPtr(uint32_t, Register) { MOZ_CRASH(); }
|
||||||
void loadWasmActivation(Register) { MOZ_CRASH(); }
|
void loadWasmActivationFromTls(Register) { MOZ_CRASH(); }
|
||||||
void loadAsmJSHeapRegisterFromGlobalData() { MOZ_CRASH(); }
|
void loadWasmActivationFromSymbolicAddress(Register) { MOZ_CRASH(); }
|
||||||
|
void loadWasmPinnedRegsFromTls() { MOZ_CRASH(); }
|
||||||
|
|
||||||
void setPrinter(Sprinter*) { MOZ_CRASH(); }
|
void setPrinter(Sprinter*) { MOZ_CRASH(); }
|
||||||
Operand ToPayload(Operand base) { MOZ_CRASH(); }
|
Operand ToPayload(Operand base) { MOZ_CRASH(); }
|
||||||
|
@ -829,14 +829,6 @@ class Assembler : public AssemblerX86Shared
|
|||||||
return CodeOffset(masm.leaq_rip(dest.encoding()).offset());
|
return CodeOffset(masm.leaq_rip(dest.encoding()).offset());
|
||||||
}
|
}
|
||||||
|
|
||||||
void loadWasmGlobalPtr(uint32_t globalDataOffset, Register dest) {
|
|
||||||
CodeOffset label = loadRipRelativeInt64(dest);
|
|
||||||
append(wasm::GlobalAccess(label, globalDataOffset));
|
|
||||||
}
|
|
||||||
void loadAsmJSHeapRegisterFromGlobalData() {
|
|
||||||
loadWasmGlobalPtr(wasm::HeapGlobalDataOffset, HeapReg);
|
|
||||||
}
|
|
||||||
|
|
||||||
void cmpq(Register rhs, Register lhs) {
|
void cmpq(Register rhs, Register lhs) {
|
||||||
masm.cmpq_rr(rhs.encoding(), lhs.encoding());
|
masm.cmpq_rr(rhs.encoding(), lhs.encoding());
|
||||||
}
|
}
|
||||||
|
@ -883,6 +883,14 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared
|
|||||||
void wasmTruncateFloat32ToUInt64(FloatRegister input, Register output, Label* oolEntry,
|
void wasmTruncateFloat32ToUInt64(FloatRegister input, Register output, Label* oolEntry,
|
||||||
Label* oolRejoin, FloatRegister tempDouble);
|
Label* oolRejoin, FloatRegister tempDouble);
|
||||||
|
|
||||||
|
void loadWasmGlobalPtr(uint32_t globalDataOffset, Register dest) {
|
||||||
|
CodeOffset label = loadRipRelativeInt64(dest);
|
||||||
|
append(wasm::GlobalAccess(label, globalDataOffset));
|
||||||
|
}
|
||||||
|
void loadWasmPinnedRegsFromTls() {
|
||||||
|
loadPtr(Address(WasmTlsReg, offsetof(wasm::TlsData, memoryBase)), HeapReg);
|
||||||
|
}
|
||||||
|
|
||||||
public:
|
public:
|
||||||
Condition testInt32Truthy(bool truthy, const ValueOperand& operand) {
|
Condition testInt32Truthy(bool truthy, const ValueOperand& operand) {
|
||||||
test32(operand.valueReg(), operand.valueReg());
|
test32(operand.valueReg(), operand.valueReg());
|
||||||
|
@ -934,14 +934,6 @@ class Assembler : public AssemblerX86Shared
|
|||||||
return CodeOffset(masm.currentOffset());
|
return CodeOffset(masm.currentOffset());
|
||||||
}
|
}
|
||||||
|
|
||||||
void loadWasmGlobalPtr(uint32_t globalDataOffset, Register dest) {
|
|
||||||
CodeOffset label = movlWithPatch(PatchedAbsoluteAddress(), dest);
|
|
||||||
append(wasm::GlobalAccess(label, globalDataOffset));
|
|
||||||
}
|
|
||||||
void loadAsmJSHeapRegisterFromGlobalData() {
|
|
||||||
// x86 doesn't have a pinned heap register.
|
|
||||||
}
|
|
||||||
|
|
||||||
static bool canUseInSingleByteInstruction(Register reg) {
|
static bool canUseInSingleByteInstruction(Register reg) {
|
||||||
return X86Encoding::HasSubregL(reg.encoding());
|
return X86Encoding::HasSubregL(reg.encoding());
|
||||||
}
|
}
|
||||||
|
@ -839,6 +839,14 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared
|
|||||||
|
|
||||||
inline void ensureDouble(const ValueOperand& source, FloatRegister dest, Label* failure);
|
inline void ensureDouble(const ValueOperand& source, FloatRegister dest, Label* failure);
|
||||||
|
|
||||||
|
void loadWasmGlobalPtr(uint32_t globalDataOffset, Register dest) {
|
||||||
|
CodeOffset label = movlWithPatch(PatchedAbsoluteAddress(), dest);
|
||||||
|
append(wasm::GlobalAccess(label, globalDataOffset));
|
||||||
|
}
|
||||||
|
void loadWasmPinnedRegsFromTls() {
|
||||||
|
// x86 doesn't have any pinned registers.
|
||||||
|
}
|
||||||
|
|
||||||
public:
|
public:
|
||||||
// Used from within an Exit frame to handle a pending exception.
|
// Used from within an Exit frame to handle a pending exception.
|
||||||
void handleFailureWithHandlerTail(void* handler);
|
void handleFailureWithHandlerTail(void* handler);
|
||||||
|
Loading…
Reference in New Issue
Block a user