Bug 1288222 - Baldr: match signature types structurally (r=bbouvier)

MozReview-Commit-ID: 7Noq2TBkKmB

--HG--
extra : rebase_source : 89201ad63cdd369437b7c2c251d6c9e65e9c2ece
This commit is contained in:
Luke Wagner 2016-07-21 21:19:49 -05:00
parent fc7d4e3d39
commit 04fe291d25
30 changed files with 615 additions and 280 deletions

View File

@ -1543,10 +1543,10 @@ class MOZ_STACK_CLASS ModuleValidator
class NamedSig
{
PropertyName* name_;
const DeclaredSig* sig_;
const SigWithId* sig_;
public:
NamedSig(PropertyName* name, const DeclaredSig& sig)
NamedSig(PropertyName* name, const SigWithId& sig)
: name_(name), sig_(&sig)
{}
PropertyName* name() const {
@ -1570,7 +1570,7 @@ class MOZ_STACK_CLASS ModuleValidator
}
};
typedef HashMap<NamedSig, uint32_t, NamedSig> ImportMap;
typedef HashMap<const DeclaredSig*, uint32_t, SigHashPolicy> SigMap;
typedef HashMap<const SigWithId*, uint32_t, SigHashPolicy> SigMap;
typedef HashMap<PropertyName*, Global*> GlobalMap;
typedef HashMap<PropertyName*, MathBuiltin> MathNameMap;
typedef HashMap<PropertyName*, AsmJSAtomicsBuiltinFunction> AtomicsNameMap;

View File

@ -1737,7 +1737,7 @@ class BaseCompiler
void beginFunction() {
JitSpew(JitSpew_Codegen, "# Emitting wasm baseline code");
wasm::GenerateFunctionPrologue(masm, localSize_, mg_.funcSigIndex(func_.index()),
wasm::GenerateFunctionPrologue(masm, localSize_, mg_.funcSigs[func_.index()]->id,
&compileResults_.offsets());
MOZ_ASSERT(masm.framePushed() == uint32_t(localSize_));
@ -2041,7 +2041,7 @@ class BaseCompiler
// Precondition: sync()
void funcPtrCall(const Sig& sig, uint32_t sigIndex, uint32_t length, uint32_t globalDataOffset,
void funcPtrCall(const SigWithId& sig, uint32_t length, uint32_t globalDataOffset,
Stk& indexVal, const FunctionCall& call)
{
Register ptrReg = WasmTableCallPtrReg;
@ -2054,7 +2054,17 @@ class BaseCompiler
} else {
masm.branch32(Assembler::Condition::AboveOrEqual, ptrReg, Imm32(length),
wasm::JumpTarget::OutOfBounds);
masm.move32(Imm32(sigIndex), WasmTableCallSigReg);
}
switch (sig.id.kind()) {
case SigIdDesc::Kind::Global:
masm.loadWasmGlobalPtr(sig.id.globalDataOffset(), WasmTableCallSigReg);
break;
case SigIdDesc::Kind::Immediate:
masm.move32(Imm32(sig.id.immediate()), WasmTableCallSigReg);
break;
case SigIdDesc::Kind::None:
break;
}
{
@ -5143,7 +5153,7 @@ BaseCompiler::emitCallIndirect(uint32_t callOffset)
Nothing callee_;
const Sig& sig = mg_.sigs[sigIndex];
const SigWithId& sig = mg_.sigs[sigIndex];
if (deadCode_) {
return skipCall(sig.args()) && iter_.readCallIndirectCallee(&callee_) &&
@ -5175,7 +5185,7 @@ BaseCompiler::emitCallIndirect(uint32_t callOffset)
? mg_.tables[mg_.asmJSSigToTableIndex[sigIndex]]
: mg_.tables[0];
funcPtrCall(sig, sigIndex, table.initial, table.globalDataOffset, callee, baselineCall);
funcPtrCall(sig, table.initial, table.globalDataOffset, callee, baselineCall);
endCall(baselineCall);

View File

@ -249,53 +249,17 @@ CodeSegment::~CodeSegment()
DeallocateExecutableMemory(bytes_, totalLength(), gc::SystemPageSize());
}
static size_t
SerializedSigSize(const Sig& sig)
{
return sizeof(ExprType) +
SerializedPodVectorSize(sig.args());
}
static uint8_t*
SerializeSig(uint8_t* cursor, const Sig& sig)
{
cursor = WriteScalar<ExprType>(cursor, sig.ret());
cursor = SerializePodVector(cursor, sig.args());
return cursor;
}
static const uint8_t*
DeserializeSig(const uint8_t* cursor, Sig* sig)
{
ExprType ret;
cursor = ReadScalar<ExprType>(cursor, &ret);
ValTypeVector args;
cursor = DeserializePodVector(cursor, &args);
if (!cursor)
return nullptr;
*sig = Sig(Move(args), ret);
return cursor;
}
static size_t
SizeOfSigExcludingThis(const Sig& sig, MallocSizeOf mallocSizeOf)
{
return sig.args().sizeOfExcludingThis(mallocSizeOf);
}
size_t
FuncExport::serializedSize() const
{
return SerializedSigSize(sig_) +
return sig_.serializedSize() +
sizeof(pod);
}
uint8_t*
FuncExport::serialize(uint8_t* cursor) const
{
cursor = SerializeSig(cursor, sig_);
cursor = sig_.serialize(cursor);
cursor = WriteBytes(cursor, &pod, sizeof(pod));
return cursor;
}
@ -303,7 +267,7 @@ FuncExport::serialize(uint8_t* cursor) const
const uint8_t*
FuncExport::deserialize(const uint8_t* cursor)
{
(cursor = DeserializeSig(cursor, &sig_)) &&
(cursor = sig_.deserialize(cursor)) &&
(cursor = ReadBytes(cursor, &pod, sizeof(pod)));
return cursor;
}
@ -311,20 +275,20 @@ FuncExport::deserialize(const uint8_t* cursor)
size_t
FuncExport::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
{
return SizeOfSigExcludingThis(sig_, mallocSizeOf);
return sig_.sizeOfExcludingThis(mallocSizeOf);
}
size_t
FuncImport::serializedSize() const
{
return SerializedSigSize(sig_) +
return sig_.serializedSize() +
sizeof(pod);
}
uint8_t*
FuncImport::serialize(uint8_t* cursor) const
{
cursor = SerializeSig(cursor, sig_);
cursor = sig_.serialize(cursor);
cursor = WriteBytes(cursor, &pod, sizeof(pod));
return cursor;
}
@ -332,7 +296,7 @@ FuncImport::serialize(uint8_t* cursor) const
const uint8_t*
FuncImport::deserialize(const uint8_t* cursor)
{
(cursor = DeserializeSig(cursor, &sig_)) &&
(cursor = sig_.deserialize(cursor)) &&
(cursor = ReadBytes(cursor, &pod, sizeof(pod)));
return cursor;
}
@ -340,7 +304,7 @@ FuncImport::deserialize(const uint8_t* cursor)
size_t
FuncImport::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
{
return SizeOfSigExcludingThis(sig_, mallocSizeOf);
return sig_.sizeOfExcludingThis(mallocSizeOf);
}
CodeRange::CodeRange(Kind kind, Offsets offsets)
@ -452,6 +416,7 @@ Metadata::serializedSize() const
return sizeof(pod()) +
SerializedVectorSize(funcImports) +
SerializedVectorSize(funcExports) +
SerializedVectorSize(sigIds) +
SerializedPodVectorSize(tables) +
SerializedPodVectorSize(memoryAccesses) +
SerializedPodVectorSize(boundsChecks) +
@ -469,6 +434,7 @@ Metadata::serialize(uint8_t* cursor) const
cursor = WriteBytes(cursor, &pod(), sizeof(pod()));
cursor = SerializeVector(cursor, funcImports);
cursor = SerializeVector(cursor, funcExports);
cursor = SerializeVector(cursor, sigIds);
cursor = SerializePodVector(cursor, tables);
cursor = SerializePodVector(cursor, memoryAccesses);
cursor = SerializePodVector(cursor, boundsChecks);
@ -487,6 +453,7 @@ Metadata::deserialize(const uint8_t* cursor)
(cursor = ReadBytes(cursor, &pod(), sizeof(pod()))) &&
(cursor = DeserializeVector(cursor, &funcImports)) &&
(cursor = DeserializeVector(cursor, &funcExports)) &&
(cursor = DeserializeVector(cursor, &sigIds)) &&
(cursor = DeserializePodVector(cursor, &tables)) &&
(cursor = DeserializePodVector(cursor, &memoryAccesses)) &&
(cursor = DeserializePodVector(cursor, &boundsChecks)) &&
@ -504,6 +471,7 @@ Metadata::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
{
return SizeOfVectorExcludingThis(funcImports, mallocSizeOf) +
SizeOfVectorExcludingThis(funcExports, mallocSizeOf) +
SizeOfVectorExcludingThis(sigIds, mallocSizeOf) +
tables.sizeOfExcludingThis(mallocSizeOf) +
memoryAccesses.sizeOfExcludingThis(mallocSizeOf) +
boundsChecks.sizeOfExcludingThis(mallocSizeOf) +

View File

@ -466,6 +466,7 @@ struct Metadata : ShareableBase<Metadata>, MetadataCacheablePod
FuncImportVector funcImports;
FuncExportVector funcExports;
SigWithIdVector sigIds;
TableDescVector tables;
MemoryAccessVector memoryAccesses;
BoundsCheckVector boundsChecks;

View File

@ -550,7 +550,7 @@ DecodeTypeSection(Decoder& d, ModuleGeneratorData* init)
}
static bool
DecodeSignatureIndex(Decoder& d, const ModuleGeneratorData& init, const DeclaredSig** sig)
DecodeSignatureIndex(Decoder& d, const ModuleGeneratorData& init, const SigWithId** sig)
{
uint32_t sigIndex;
if (!d.readVarU32(&sigIndex))
@ -723,7 +723,7 @@ static bool
DecodeImport(Decoder& d, bool newFormat, ModuleGeneratorData* init, ImportVector* imports)
{
if (!newFormat) {
const DeclaredSig* sig = nullptr;
const SigWithId* sig = nullptr;
if (!DecodeSignatureIndex(d, *init, &sig))
return false;
@ -764,7 +764,7 @@ DecodeImport(Decoder& d, bool newFormat, ModuleGeneratorData* init, ImportVector
switch (DefinitionKind(importKind)) {
case DefinitionKind::Function: {
const DeclaredSig* sig = nullptr;
const SigWithId* sig = nullptr;
if (!DecodeSignatureIndex(d, *init, &sig))
return false;
if (!CheckTypeForJS(d, *sig))
@ -1067,7 +1067,7 @@ DecodeFunctionBody(Decoder& d, ModuleGenerator& mg, uint32_t funcIndex)
return false;
ValTypeVector locals;
const DeclaredSig& sig = mg.funcSig(funcIndex);
const Sig& sig = mg.funcSig(funcIndex);
if (!locals.appendAll(sig.args()))
return false;
@ -1120,7 +1120,7 @@ DecodeStartSection(Decoder& d, ModuleGenerator& mg)
if (startFuncIndex >= mg.numFuncSigs())
return Fail(d, "unknown start function");
const DeclaredSig& sig = mg.funcSig(startFuncIndex);
const Sig& sig = mg.funcSig(startFuncIndex);
if (sig.ret() != ExprType::Void)
return Fail(d, "start function must not return anything");

View File

@ -331,7 +331,7 @@ GenerateProfilingEpilogue(MacroAssembler& masm, unsigned framePushed, ExitReason
// Specifically, ToggleProfiling patches all callsites to either call the
// profiling or non-profiling entry point.
void
wasm::GenerateFunctionPrologue(MacroAssembler& masm, unsigned framePushed, uint32_t sigIndex,
wasm::GenerateFunctionPrologue(MacroAssembler& masm, unsigned framePushed, const SigIdDesc& sigId,
FuncOffsets* offsets)
{
#if defined(JS_CODEGEN_ARM)
@ -349,8 +349,21 @@ wasm::GenerateFunctionPrologue(MacroAssembler& masm, unsigned framePushed, uint3
// Generate table entry thunk:
masm.haltingAlign(CodeAlignment);
offsets->tableEntry = masm.currentOffset();
masm.branch32(Assembler::Condition::NotEqual, WasmTableCallSigReg, Imm32(sigIndex),
JumpTarget::BadIndirectCall);
switch (sigId.kind()) {
case SigIdDesc::Kind::Global: {
Register scratch = WasmTableCallPtrReg; // clobbered by the indirect call
masm.loadWasmGlobalPtr(sigId.globalDataOffset(), scratch);
masm.branch32(Assembler::Condition::NotEqual, WasmTableCallSigReg, scratch,
JumpTarget::BadIndirectCall);
break;
}
case SigIdDesc::Kind::Immediate:
masm.branch32(Assembler::Condition::NotEqual, WasmTableCallSigReg, Imm32(sigId.immediate()),
JumpTarget::BadIndirectCall);
break;
case SigIdDesc::Kind::None:
break;
}
offsets->tableProfilingJump = masm.nopPatchableToNearJump().offset();
// Generate normal prologue:

View File

@ -33,6 +33,7 @@ namespace wasm {
class CallSite;
class CodeRange;
class Instance;
class SigIdDesc;
struct CallThunk;
struct FuncOffsets;
struct Metadata;
@ -111,7 +112,7 @@ void
GenerateExitEpilogue(jit::MacroAssembler& masm, unsigned framePushed, ExitReason reason,
ProfilingOffsets* offsets);
void
GenerateFunctionPrologue(jit::MacroAssembler& masm, unsigned framePushed, uint32_t sigIndex,
GenerateFunctionPrologue(jit::MacroAssembler& masm, unsigned framePushed, const SigIdDesc& sigId,
FuncOffsets* offsets);
void
GenerateFunctionEpilogue(jit::MacroAssembler& masm, unsigned framePushed, FuncOffsets* offsets);

View File

@ -152,6 +152,26 @@ ModuleGenerator::init(UniqueModuleGeneratorData shared, CompileArgs&& args,
if (!allocateGlobalBytes(sizeof(void*), sizeof(void*), &table.globalDataOffset))
return false;
}
for (uint32_t i = 0; i < numSigs_; i++) {
SigWithId& sig = shared_->sigs[i];
if (SigIdDesc::isGlobal(sig)) {
uint32_t globalDataOffset;
if (!allocateGlobalBytes(sizeof(void*), sizeof(void*), &globalDataOffset))
return false;
sig.id = SigIdDesc::global(sig, globalDataOffset);
Sig copy;
if (!copy.clone(sig))
return false;
if (!metadata_->sigIds.emplaceBack(Move(copy), sig.id))
return false;
} else {
sig.id = SigIdDesc::immediate(sig);
}
}
} else {
MOZ_ASSERT(shared_->sigs.length() == MaxSigs);
MOZ_ASSERT(shared_->tables.length() == MaxTables);
@ -341,9 +361,6 @@ ModuleGenerator::finishTask(IonCompileTask* task)
return true;
}
typedef Vector<Offsets, 0, SystemAllocPolicy> OffsetVector;
typedef Vector<ProfilingOffsets, 0, SystemAllocPolicy> ProfilingOffsetVector;
bool
ModuleGenerator::finishFuncExports()
{
@ -376,6 +393,9 @@ ModuleGenerator::finishFuncExports()
return true;
}
typedef Vector<Offsets, 0, SystemAllocPolicy> OffsetVector;
typedef Vector<ProfilingOffsets, 0, SystemAllocPolicy> ProfilingOffsetVector;
bool
ModuleGenerator::finishCodegen()
{
@ -616,7 +636,7 @@ ModuleGenerator::initSig(uint32_t sigIndex, Sig&& sig)
shared_->sigs[sigIndex] = Move(sig);
}
const DeclaredSig&
const SigWithId&
ModuleGenerator::sig(uint32_t index) const
{
MOZ_ASSERT(index < numSigs_);
@ -650,7 +670,7 @@ ModuleGenerator::bumpMinMemoryLength(uint32_t newMinMemoryLength)
shared_->minMemoryLength = newMinMemoryLength;
}
const DeclaredSig&
const SigWithId&
ModuleGenerator::funcSig(uint32_t funcIndex) const
{
MOZ_ASSERT(shared_->funcSigs[funcIndex]);

View File

@ -38,11 +38,11 @@ class FunctionGenerator;
struct FuncImportGenDesc
{
const DeclaredSig* sig;
const SigWithId* sig;
uint32_t globalDataOffset;
FuncImportGenDesc() : sig(nullptr), globalDataOffset(0) {}
explicit FuncImportGenDesc(const DeclaredSig* sig) : sig(sig), globalDataOffset(0) {}
explicit FuncImportGenDesc(const SigWithId* sig) : sig(sig), globalDataOffset(0) {}
};
typedef Vector<FuncImportGenDesc, 0, SystemAllocPolicy> FuncImportGenDescVector;
@ -55,17 +55,13 @@ struct ModuleGeneratorData
mozilla::Atomic<uint32_t> minMemoryLength;
uint32_t maxMemoryLength;
DeclaredSigVector sigs;
DeclaredSigPtrVector funcSigs;
SigWithIdVector sigs;
SigWithIdPtrVector funcSigs;
FuncImportGenDescVector funcImports;
GlobalDescVector globals;
TableDescVector tables;
Uint32Vector asmJSSigToTableIndex;
uint32_t funcSigIndex(uint32_t funcIndex) const {
return funcSigs[funcIndex] - sigs.begin();
}
explicit ModuleGeneratorData(SignalUsage usesSignal, ModuleKind kind = ModuleKind::Wasm)
: kind(kind),
usesSignal(usesSignal),
@ -161,11 +157,11 @@ class MOZ_STACK_CLASS ModuleGenerator
// Signatures:
uint32_t numSigs() const { return numSigs_; }
const DeclaredSig& sig(uint32_t sigIndex) const;
const SigWithId& sig(uint32_t sigIndex) const;
// Function declarations:
uint32_t numFuncSigs() const { return shared_->funcSigs.length(); }
const DeclaredSig& funcSig(uint32_t funcIndex) const;
const SigWithId& funcSig(uint32_t funcIndex) const;
// Globals:
MOZ_MUST_USE bool allocateGlobal(ValType type, bool isConst, uint32_t* index);

View File

@ -41,6 +41,59 @@ using namespace js::wasm;
using mozilla::BinarySearch;
using mozilla::Swap;
class SigIdSet
{
typedef HashMap<const Sig*, uint32_t, SigHashPolicy, SystemAllocPolicy> Map;
Map map_;
public:
~SigIdSet() {
MOZ_ASSERT_IF(!JSRuntime::hasLiveRuntimes(), !map_.initialized() || map_.empty());
}
bool ensureInitialized(JSContext* cx) {
if (!map_.initialized() && !map_.init()) {
ReportOutOfMemory(cx);
return false;
}
return true;
}
bool allocateSigId(JSContext* cx, const Sig& sig, const void** sigId) {
Map::AddPtr p = map_.lookupForAdd(sig);
if (p) {
MOZ_ASSERT(p->value() > 0);
p->value()++;
*sigId = p->key();
return true;
}
UniquePtr<Sig> clone = MakeUnique<Sig>();
if (!clone || !clone->clone(sig) || !map_.add(p, clone.get(), 1)) {
ReportOutOfMemory(cx);
return false;
}
*sigId = clone.release();
MOZ_ASSERT(!(uintptr_t(*sigId) & SigIdDesc::ImmediateBit));
return true;
}
void deallocateSigId(const Sig& sig, const void* sigId) {
Map::Ptr p = map_.lookup(sig);
MOZ_RELEASE_ASSERT(p && p->key() == sigId && p->value() > 0);
p->value()--;
if (!p->value()) {
js_delete(p->key());
map_.remove(p);
}
}
};
ExclusiveData<SigIdSet> sigIdSet;
uint8_t**
Instance::addressOfMemoryBase() const
{
@ -54,6 +107,13 @@ Instance::addressOfTableBase(size_t tableIndex) const
return (void**)(codeSegment_->globalData() + metadata_->tables[tableIndex].globalDataOffset);
}
const void**
Instance::addressOfSigId(const SigIdDesc& sigId) const
{
MOZ_ASSERT(sigId.globalDataOffset() >= InitialGlobalDataBytes);
return (const void**)(codeSegment_->globalData() + sigId.globalDataOffset());
}
FuncImportExit&
Instance::funcImportToExit(const FuncImport& fi)
{
@ -388,6 +448,27 @@ Instance::Instance(UniqueCodeSegment codeSegment,
*addressOfTableBase(i) = tables_[i]->array();
}
bool
Instance::init(JSContext* cx)
{
if (!metadata_->sigIds.empty()) {
ExclusiveData<SigIdSet>::Guard lockedSigIdSet = sigIdSet.lock();
if (!lockedSigIdSet->ensureInitialized(cx))
return false;
for (const SigWithId& sig : metadata_->sigIds) {
const void* sigId;
if (!lockedSigIdSet->allocateSigId(cx, sig, &sigId))
return false;
*addressOfSigId(sig.id) = sigId;
}
}
return true;
}
Instance::~Instance()
{
for (unsigned i = 0; i < metadata_->funcImports.length(); i++) {
@ -395,6 +476,15 @@ Instance::~Instance()
if (exit.baselineScript)
exit.baselineScript->removeDependentWasmImport(*this, i);
}
if (!metadata_->sigIds.empty()) {
ExclusiveData<SigIdSet>::Guard lockedSigIdSet = sigIdSet.lock();
for (const SigWithId& sig : metadata_->sigIds) {
if (const void* sigId = *addressOfSigId(sig.id))
lockedSigIdSet->deallocateSigId(sig, sigId);
}
}
}
void

View File

@ -55,6 +55,7 @@ class Instance
// Internal helpers:
uint8_t** addressOfMemoryBase() const;
void** addressOfTableBase(size_t tableIndex) const;
const void** addressOfSigId(const SigIdDesc& sigId) const;
FuncImportExit& funcImportToExit(const FuncImport& fi);
MOZ_MUST_USE bool toggleProfiling(JSContext* cx);
@ -80,6 +81,7 @@ class Instance
SharedTableVector&& tables,
Handle<FunctionVector> funcImports);
~Instance();
bool init(JSContext* cx);
void trace(JSTracer* trc);
const CodeSegment& codeSegment() const { return *codeSegment_; }

View File

@ -901,10 +901,11 @@ class FunctionCompiler
MInstruction* ptrFun = MAsmJSLoadFuncPtr::New(alloc(), maskedIndex, globalDataOffset);
curBlock_->add(ptrFun);
callee = MAsmJSCall::Callee(ptrFun);
MOZ_ASSERT(mg_.sigs[sigIndex].id.kind() == SigIdDesc::Kind::None);
} else {
MInstruction* ptrFun = MAsmJSLoadFuncPtr::New(alloc(), index, length, globalDataOffset);
curBlock_->add(ptrFun);
callee = MAsmJSCall::Callee(ptrFun, sigIndex);
callee = MAsmJSCall::Callee(ptrFun, mg_.sigs[sigIndex].id);
}
return callPrivate(callee, args, mg_.sigs[sigIndex].ret(), def);
@ -3384,10 +3385,10 @@ wasm::IonCompileFunction(IonCompileTask* task)
if (!lir)
return false;
uint32_t sigIndex = task->mg().funcSigIndex(func.index());
SigIdDesc sigId = task->mg().funcSigs[func.index()]->id;
CodeGenerator codegen(&mir, lir, &results.masm());
if (!codegen.generateWasm(sigIndex, &results.offsets()))
if (!codegen.generateWasm(sigId, &results.offsets()))
return false;
}

View File

@ -37,16 +37,16 @@ typedef jit::ABIArgIter<ValTypeVector> ABIArgValTypeIter;
class FuncBytes
{
Bytes bytes_;
uint32_t index_;
const DeclaredSig& sig_;
uint32_t lineOrBytecode_;
Uint32Vector callSiteLineNums_;
Bytes bytes_;
uint32_t index_;
const SigWithId& sig_;
uint32_t lineOrBytecode_;
Uint32Vector callSiteLineNums_;
public:
FuncBytes(Bytes&& bytes,
uint32_t index,
const DeclaredSig& sig,
const SigWithId& sig,
uint32_t lineOrBytecode,
Uint32Vector&& callSiteLineNums)
: bytes_(Move(bytes)),
@ -59,7 +59,7 @@ class FuncBytes
Bytes& bytes() { return bytes_; }
const Bytes& bytes() const { return bytes_; }
uint32_t index() const { return index_; }
const DeclaredSig& sig() const { return sig_; }
const SigWithId& sig() const { return sig_; }
uint32_t lineOrBytecode() const { return lineOrBytecode_; }
const Uint32Vector& callSiteLineNums() const { return callSiteLineNums_; }
};

View File

@ -442,8 +442,8 @@ Module::instantiateMemory(JSContext* cx, MutableHandleWasmMemoryObject memory) c
}
bool
Module::instantiateTable(JSContext* cx, const CodeSegment& codeSegment,
HandleWasmTableObject tableImport, SharedTableVector* tables) const
Module::instantiateTable(JSContext* cx, HandleWasmTableObject tableImport,
SharedTableVector* tables) const
{
for (const TableDesc& tableDesc : metadata_->tables) {
SharedTable table;
@ -551,7 +551,7 @@ Module::instantiate(JSContext* cx,
return false;
SharedTableVector tables;
if (!instantiateTable(cx, *codeSegment, tableImport, &tables))
if (!instantiateTable(cx, tableImport, &tables))
return false;
// To support viewing the source of an instance (Instance::createText), the
@ -586,6 +586,9 @@ Module::instantiate(JSContext* cx,
instanceObj->init(Move(instance));
}
if (!instanceObj->instance().init(cx))
return false;
// Create the export object.
RootedObject exportObj(cx);

View File

@ -186,8 +186,8 @@ class Module : public RefCounted<Module>
const SharedBytes bytecode_;
bool instantiateMemory(JSContext* cx, MutableHandleWasmMemoryObject memory) const;
bool instantiateTable(JSContext* cx, const CodeSegment& codeSegment,
HandleWasmTableObject tableImport, SharedTableVector* tables) const;
bool instantiateTable(JSContext* cx, HandleWasmTableObject tableImport,
SharedTableVector* tables) const;
bool initElems(JSContext* cx, HandleWasmInstanceObject instanceObj,
HandleWasmTableObject tableObj) const;

View File

@ -330,6 +330,154 @@ GetCPUID()
#endif
}
typedef uint32_t ImmediateType; // for 32/64 consistency
static const unsigned sImmediateBits = sizeof(ImmediateType) * 8 - 1; // -1 for ImmediateBit
static const unsigned sReturnBit = 1;
static const unsigned sLengthBits = 4;
static const unsigned sTypeBits = 2;
static const unsigned sMaxTypes = (sImmediateBits - sReturnBit - sLengthBits) / sTypeBits;
static bool
IsImmediateType(ValType vt)
{
MOZ_ASSERT(uint32_t(vt) > 0);
return (uint32_t(vt) - 1) < (1 << sTypeBits);
}
static bool
IsImmediateType(ExprType et)
{
return et == ExprType::Void || IsImmediateType(NonVoidToValType(et));
}
/* static */ bool
SigIdDesc::isGlobal(const Sig& sig)
{
unsigned numTypes = (sig.ret() == ExprType::Void ? 0 : 1) +
(sig.args().length());
if (numTypes > sMaxTypes)
return true;
if (!IsImmediateType(sig.ret()))
return true;
for (ValType v : sig.args()) {
if (!IsImmediateType(v))
return true;
}
return false;
}
/* static */ SigIdDesc
SigIdDesc::global(const Sig& sig, uint32_t globalDataOffset)
{
MOZ_ASSERT(isGlobal(sig));
return SigIdDesc(Kind::Global, globalDataOffset);
}
static ImmediateType
LengthToBits(uint32_t length)
{
static_assert(sMaxTypes <= ((1 << sLengthBits) - 1), "fits");
MOZ_ASSERT(length <= sMaxTypes);
return length;
}
static ImmediateType
TypeToBits(ValType type)
{
static_assert(3 <= ((1 << sTypeBits) - 1), "fits");
MOZ_ASSERT(uint32_t(type) >= 1 && uint32_t(type) <= 4);
return uint32_t(type) - 1;
}
size_t
Sig::serializedSize() const
{
return sizeof(ret_) +
SerializedPodVectorSize(args_);
}
uint8_t*
Sig::serialize(uint8_t* cursor) const
{
cursor = WriteScalar<ExprType>(cursor, ret_);
cursor = SerializePodVector(cursor, args_);
return cursor;
}
const uint8_t*
Sig::deserialize(const uint8_t* cursor)
{
(cursor = ReadScalar<ExprType>(cursor, &ret_)) &&
(cursor = DeserializePodVector(cursor, &args_));
return cursor;
}
size_t
Sig::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
{
return args_.sizeOfExcludingThis(mallocSizeOf);
}
/* static */ SigIdDesc
SigIdDesc::immediate(const Sig& sig)
{
ImmediateType immediate = ImmediateBit;
uint32_t shift = 1;
if (sig.ret() != ExprType::Void) {
immediate |= (1 << shift);
shift += sReturnBit;
immediate |= TypeToBits(NonVoidToValType(sig.ret())) << shift;
shift += sTypeBits;
} else {
shift += sReturnBit;
}
immediate |= LengthToBits(sig.args().length()) << shift;
shift += sLengthBits;
for (ValType argType : sig.args()) {
immediate |= TypeToBits(argType) << shift;
shift += sTypeBits;
}
MOZ_ASSERT(shift <= sImmediateBits);
return SigIdDesc(Kind::Immediate, immediate);
}
size_t
SigWithId::serializedSize() const
{
return Sig::serializedSize() +
sizeof(id);
}
uint8_t*
SigWithId::serialize(uint8_t* cursor) const
{
cursor = Sig::serialize(cursor);
cursor = WriteBytes(cursor, &id, sizeof(id));
return cursor;
}
const uint8_t*
SigWithId::deserialize(const uint8_t* cursor)
{
(cursor = Sig::deserialize(cursor)) &&
(cursor = ReadBytes(cursor, &id, sizeof(id)));
return cursor;
}
size_t
SigWithId::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
{
return Sig::sizeOfExcludingThis(mallocSizeOf);
}
Assumptions::Assumptions(JS::BuildIdCharVector&& buildId)
: usesSignal(),
cpuId(GetCPUID()),

View File

@ -402,6 +402,8 @@ class Sig
bool operator!=(const Sig& rhs) const {
return !(*this == rhs);
}
WASM_DECLARE_SERIALIZABLE(Sig)
};
struct SigHashPolicy
@ -411,6 +413,61 @@ struct SigHashPolicy
static bool match(const Sig* lhs, Lookup rhs) { return *lhs == rhs; }
};
// SigIdDesc describes a signature id that can be used by call_indirect and
// table-entry prologues to structurally compare whether the caller and callee's
// signatures *structurally* match. To handle the general case, a Sig is
// allocated and stored in a process-wide hash table, so that pointer equality
// implies structural equality. As an optimization for the 99% case where the
// Sig has a small number of parameters, the Sig is bit-packed into a uint32
// immediate value so that integer equality implies structural equality. Both
// cases can be handled with a single comparison by always setting the LSB for
// the immediates (the LSB is necessarily 0 for allocated Sig pointers due to
// alignment).
class SigIdDesc
{
public:
enum class Kind { None, Immediate, Global };
static const uintptr_t ImmediateBit = 0x1;
private:
Kind kind_;
size_t bits_;
SigIdDesc(Kind kind, size_t bits) : kind_(kind), bits_(bits) {}
public:
Kind kind() const { return kind_; }
static bool isGlobal(const Sig& sig);
SigIdDesc() : kind_(Kind::None), bits_(0) {}
static SigIdDesc global(const Sig& sig, uint32_t globalDataOffset);
static SigIdDesc immediate(const Sig& sig);
bool isGlobal() const { return kind_ == Kind::Global; }
size_t immediate() const { MOZ_ASSERT(kind_ == Kind::Immediate); return bits_; }
uint32_t globalDataOffset() const { MOZ_ASSERT(kind_ == Kind::Global); return bits_; }
};
// SigWithId pairs a Sig with SigIdDesc, describing either how to compile code
// that compares this signature's id or, at instantiation what signature ids to
// allocate in the global hash and where to put them.
struct SigWithId : Sig
{
SigIdDesc id;
SigWithId() = default;
explicit SigWithId(Sig&& sig, SigIdDesc id) : Sig(Move(sig)), id(id) {}
void operator=(Sig&& rhs) { Sig::operator=(Move(rhs)); }
WASM_DECLARE_SERIALIZABLE(SigWithId)
};
typedef Vector<SigWithId, 0, SystemAllocPolicy> SigWithIdVector;
typedef Vector<const SigWithId*, 0, SystemAllocPolicy> SigWithIdPtrVector;
// A GlobalDesc describes a single global variable. Currently, globals are only
// exposed through asm.js.
@ -426,22 +483,6 @@ struct GlobalDesc
typedef Vector<GlobalDesc, 0, SystemAllocPolicy> GlobalDescVector;
// A "declared" signature is a Sig object that is created and owned by the
// ModuleGenerator. These signature objects are read-only and have the same
// lifetime as the ModuleGenerator. This type is useful since some uses of Sig
// need this extended lifetime and want to statically distinguish from the
// common stack-allocated Sig objects that get passed around.
struct DeclaredSig : Sig
{
DeclaredSig() = default;
explicit DeclaredSig(Sig&& sig) : Sig(Move(sig)) {}
void operator=(Sig&& rhs) { Sig::operator=(Move(rhs)); }
};
typedef Vector<DeclaredSig, 0, SystemAllocPolicy> DeclaredSigVector;
typedef Vector<const DeclaredSig*, 0, SystemAllocPolicy> DeclaredSigPtrVector;
// The (,Profiling,Func)Offsets classes are used to record the offsets of
// different key points in a CodeRange during compilation.

View File

@ -58,21 +58,21 @@
(assert_return (invoke "callt" (i32.const 0)) (i32.const 1))
(assert_return (invoke "callt" (i32.const 1)) (i32.const 2))
(assert_return (invoke "callt" (i32.const 2)) (i32.const 3))
(assert_trap (invoke "callt" (i32.const 3)) "indirect call signature mismatch")
(assert_trap (invoke "callt" (i32.const 4)) "indirect call signature mismatch")
(assert_return (invoke "callt" (i32.const 3)) (i32.const 4))
(assert_return (invoke "callt" (i32.const 4)) (i32.const 5))
(assert_return (invoke "callt" (i32.const 5)) (i32.const 1))
(assert_return (invoke "callt" (i32.const 6)) (i32.const 3))
(assert_trap (invoke "callt" (i32.const 7)) "undefined table index 7")
(assert_trap (invoke "callt" (i32.const 100)) "undefined table index 100")
(assert_trap (invoke "callt" (i32.const -1)) "undefined table index -1")
(assert_trap (invoke "callu" (i32.const 0)) "indirect call signature mismatch")
(assert_trap (invoke "callu" (i32.const 1)) "indirect call signature mismatch")
(assert_trap (invoke "callu" (i32.const 2)) "indirect call signature mismatch")
(assert_return (invoke "callu" (i32.const 0)) (i32.const 1))
(assert_return (invoke "callu" (i32.const 1)) (i32.const 2))
(assert_return (invoke "callu" (i32.const 2)) (i32.const 3))
(assert_return (invoke "callu" (i32.const 3)) (i32.const 4))
(assert_return (invoke "callu" (i32.const 4)) (i32.const 5))
(assert_trap (invoke "callu" (i32.const 5)) "indirect call signature mismatch")
(assert_trap (invoke "callu" (i32.const 6)) "indirect call signature mismatch")
(assert_return (invoke "callu" (i32.const 5)) (i32.const 1))
(assert_return (invoke "callu" (i32.const 6)) (i32.const 3))
(assert_trap (invoke "callu" (i32.const 7)) "undefined table index 7")
(assert_trap (invoke "callu" (i32.const -1)) "undefined table index -1")

View File

@ -0,0 +1,154 @@
// |jit-test| --no-baseline
// Turn off baseline and since it messes up the GC finalization assertions by
// adding spurious edges to the GC graph.
load(libdir + 'wasm.js');
load(libdir + 'asserts.js');
const Module = WebAssembly.Module;
const Instance = WebAssembly.Instance;
const Table = WebAssembly.Table;
// Explicitly opt into the new binary format for imports and exports until it
// is used by default everywhere.
const textToBinary = str => wasmTextToBinary(str, 'new-format');
const evalText = (str, imports) => new Instance(new Module(textToBinary(str)), imports);
var caller = `(type $v2i (func (result i32))) (func $call (param $i i32) (result i32) (call_indirect $v2i (get_local $i))) (export "call" $call)`
var callee = i => `(func $f${i} (type $v2i) (result i32) (i32.const ${i}))`;
// A table should not hold exported functions alive and exported functions
// should not hold their originating table alive. Live exported functions should
// hold instances alive. Nothing should hold the export object alive.
resetFinalizeCount();
var i = evalText(`(module (table (resizable 2)) (export "tbl" table) (elem 0 $f0) ${callee(0)} ${caller})`);
var e = i.exports;
var t = e.tbl;
var f = t.get(0);
assertEq(f(), e.call(0));
assertErrorMessage(() => e.call(1), Error, /bad wasm indirect call/);
assertErrorMessage(() => e.call(2), Error, /out-of-range/);
assertEq(finalizeCount(), 0);
i.edge = makeFinalizeObserver();
e.edge = makeFinalizeObserver();
t.edge = makeFinalizeObserver();
f.edge = makeFinalizeObserver();
gc();
assertEq(finalizeCount(), 0);
f = null;
gc();
assertEq(finalizeCount(), 1);
f = t.get(0);
f.edge = makeFinalizeObserver();
gc();
assertEq(finalizeCount(), 1);
i.exports = null;
e = null;
gc();
assertEq(finalizeCount(), 2);
t = null;
gc();
assertEq(finalizeCount(), 3);
i = null;
gc();
assertEq(finalizeCount(), 3);
assertEq(f(), 0);
f = null;
gc();
assertEq(finalizeCount(), 5);
// A table should hold the instance of any of its elements alive.
resetFinalizeCount();
var i = evalText(`(module (table (resizable 1)) (export "tbl" table) (elem 0 $f0) ${callee(0)} ${caller})`);
var e = i.exports;
var t = e.tbl;
var f = t.get(0);
i.edge = makeFinalizeObserver();
e.edge = makeFinalizeObserver();
t.edge = makeFinalizeObserver();
f.edge = makeFinalizeObserver();
gc();
assertEq(finalizeCount(), 0);
i.exports = null;
e = null;
gc();
assertEq(finalizeCount(), 1);
f = null;
gc();
assertEq(finalizeCount(), 2);
i = null;
gc();
assertEq(finalizeCount(), 2);
t = null;
gc();
assertEq(finalizeCount(), 4);
// The bad-indirect-call stub should (currently, could be changed later) keep
// the instance containing that stub alive.
resetFinalizeCount();
var i = evalText(`(module (table (resizable 2)) (export "tbl" table) ${caller})`);
var e = i.exports;
var t = e.tbl;
i.edge = makeFinalizeObserver();
e.edge = makeFinalizeObserver();
t.edge = makeFinalizeObserver();
gc();
assertEq(finalizeCount(), 0);
i.exports = null;
e = null;
gc();
assertEq(finalizeCount(), 1);
i = null;
gc();
assertEq(finalizeCount(), 1);
t = null;
gc();
assertEq(finalizeCount(), 3);
// Before initialization, a table is not bound to any instance.
resetFinalizeCount();
var i = evalText(`(module (func $f0 (result i32) (i32.const 0)) (export "f0" $f0))`);
var t = new Table({initial:4});
i.edge = makeFinalizeObserver();
t.edge = makeFinalizeObserver();
gc();
assertEq(finalizeCount(), 0);
i = null;
gc();
assertEq(finalizeCount(), 1);
t = null;
gc();
assertEq(finalizeCount(), 2);
// When a Table is created (uninitialized) and then first assigned, it keeps the
// first element's Instance alive (as above).
resetFinalizeCount();
var i = evalText(`(module (func $f (result i32) (i32.const 42)) (export "f" $f))`);
var f = i.exports.f;
var t = new Table({initial:1});
i.edge = makeFinalizeObserver();
f.edge = makeFinalizeObserver();
t.edge = makeFinalizeObserver();
t.set(0, f);
assertEq(t.get(0), f);
assertEq(t.get(0)(), 42);
gc();
assertEq(finalizeCount(), 0);
f = null;
i.exports = null;
gc();
assertEq(finalizeCount(), 1);
assertEq(t.get(0)(), 42);
t.get(0).edge = makeFinalizeObserver();
gc();
assertEq(finalizeCount(), 2);
i = null;
gc();
assertEq(finalizeCount(), 2);
t.set(0, null);
assertEq(t.get(0), null);
gc();
assertEq(finalizeCount(), 2);
t = null;
gc();
assertEq(finalizeCount(), 4);

View File

@ -1,8 +1,4 @@
// |jit-test| --no-baseline
// Turn off baseline and since it messes up the GC finalization assertions by
// adding spurious edges to the GC graph.
// |jit-test| test-also-wasm-baseline
load(libdir + 'wasm.js');
load(libdir + 'asserts.js');
@ -60,138 +56,19 @@ assertEq(call(1), 1);
assertEq(tbl.get(0)(), 0);
assertEq(tbl.get(1)(), 1);
// A table should not hold exported functions alive and exported functions
// should not hold their originating table alive. Live exported functions should
// hold instances alive. Nothing should hold the export object alive.
resetFinalizeCount();
var i = evalText(`(module (table (resizable 2)) (export "tbl" table) (elem 0 $f0) ${callee(0)} ${caller})`);
var e = i.exports;
var t = e.tbl;
var f = t.get(0);
assertEq(f(), e.call(0));
assertErrorMessage(() => e.call(1), Error, /bad wasm indirect call/);
assertErrorMessage(() => e.call(2), Error, /out-of-range/);
assertEq(finalizeCount(), 0);
i.edge = makeFinalizeObserver();
e.edge = makeFinalizeObserver();
t.edge = makeFinalizeObserver();
f.edge = makeFinalizeObserver();
gc();
assertEq(finalizeCount(), 0);
f = null;
gc();
assertEq(finalizeCount(), 1);
f = t.get(0);
f.edge = makeFinalizeObserver();
gc();
assertEq(finalizeCount(), 1);
i.exports = null;
e = null;
gc();
assertEq(finalizeCount(), 2);
t = null;
gc();
assertEq(finalizeCount(), 3);
i = null;
gc();
assertEq(finalizeCount(), 3);
assertEq(f(), 0);
f = null;
gc();
assertEq(finalizeCount(), 5);
// Call signatures are matched structurally:
// A table should hold the instance of any of its elements alive.
resetFinalizeCount();
var i = evalText(`(module (table (resizable 1)) (export "tbl" table) (elem 0 $f0) ${callee(0)} ${caller})`);
var e = i.exports;
var t = e.tbl;
var f = t.get(0);
i.edge = makeFinalizeObserver();
e.edge = makeFinalizeObserver();
t.edge = makeFinalizeObserver();
f.edge = makeFinalizeObserver();
gc();
assertEq(finalizeCount(), 0);
i.exports = null;
e = null;
gc();
assertEq(finalizeCount(), 1);
f = null;
gc();
assertEq(finalizeCount(), 2);
i = null;
gc();
assertEq(finalizeCount(), 2);
t = null;
gc();
assertEq(finalizeCount(), 4);
// The bad-indirect-call stub should (currently, could be changed later) keep
// the instance containing that stub alive.
resetFinalizeCount();
var i = evalText(`(module (table (resizable 2)) (export "tbl" table) ${caller})`);
var e = i.exports;
var t = e.tbl;
i.edge = makeFinalizeObserver();
e.edge = makeFinalizeObserver();
t.edge = makeFinalizeObserver();
gc();
assertEq(finalizeCount(), 0);
i.exports = null;
e = null;
gc();
assertEq(finalizeCount(), 1);
i = null;
gc();
assertEq(finalizeCount(), 1);
t = null;
gc();
assertEq(finalizeCount(), 3);
// Before initialization, a table is not bound to any instance.
resetFinalizeCount();
var i = evalText(`(module (func $f0 (result i32) (i32.const 0)) (export "f0" $f0))`);
var t = new Table({initial:4});
i.edge = makeFinalizeObserver();
t.edge = makeFinalizeObserver();
gc();
assertEq(finalizeCount(), 0);
i = null;
gc();
assertEq(finalizeCount(), 1);
t = null;
gc();
assertEq(finalizeCount(), 2);
// When a Table is created (uninitialized) and then first assigned, it keeps the
// first element's Instance alive (as above).
resetFinalizeCount();
var i = evalText(`(module (func $f (result i32) (i32.const 42)) (export "f" $f))`);
var f = i.exports.f;
var t = new Table({initial:1});
i.edge = makeFinalizeObserver();
f.edge = makeFinalizeObserver();
t.edge = makeFinalizeObserver();
t.set(0, f);
assertEq(t.get(0), f);
assertEq(t.get(0)(), 42);
gc();
assertEq(finalizeCount(), 0);
f = null;
i.exports = null;
gc();
assertEq(finalizeCount(), 1);
assertEq(t.get(0)(), 42);
t.get(0).edge = makeFinalizeObserver();
gc();
assertEq(finalizeCount(), 2);
i = null;
gc();
assertEq(finalizeCount(), 2);
t.set(0, null);
assertEq(t.get(0), null);
gc();
assertEq(finalizeCount(), 2);
t = null;
gc();
assertEq(finalizeCount(), 4);
var call = evalText(`(module
(type $v2i1 (func (result i32)))
(type $v2i2 (func (result i32)))
(type $i2v (func (param i32)))
(table $a $b $c)
(func $a (type $v2i1) (result i32) (i32.const 0))
(func $b (type $v2i2) (result i32) (i32.const 1))
(func $c (type $i2v) (param i32))
(func $call (param i32) (result i32) (call_indirect $v2i1 (get_local 0)))
(export "call" $call)
)`).exports.call;
assertEq(call(0), 0);
assertEq(call(1), 1);
assertErrorMessage(() => call(2), Error, /bad wasm indirect call/);

View File

@ -8972,11 +8972,11 @@ CodeGenerator::visitRest(LRest* lir)
}
bool
CodeGenerator::generateWasm(uint32_t sigIndex, wasm::FuncOffsets* offsets)
CodeGenerator::generateWasm(wasm::SigIdDesc sigId, wasm::FuncOffsets* offsets)
{
JitSpew(JitSpew_Codegen, "# Emitting asm.js code");
wasm::GenerateFunctionPrologue(masm, frameSize(), sigIndex, offsets);
wasm::GenerateFunctionPrologue(masm, frameSize(), sigId, offsets);
// Overflow checks are omitted by CodeGenerator in some cases (leaf
// functions with small framePushed). Perform overflow-checking after

View File

@ -66,7 +66,7 @@ class CodeGenerator final : public CodeGeneratorSpecific
public:
MOZ_MUST_USE bool generate();
MOZ_MUST_USE bool generateWasm(uint32_t sigIndex, wasm::FuncOffsets *offsets);
MOZ_MUST_USE bool generateWasm(wasm::SigIdDesc sigId, wasm::FuncOffsets *offsets);
MOZ_MUST_USE bool link(JSContext* cx, CompilerConstraintList* constraints);
MOZ_MUST_USE bool linkSharedStubs(JSContext* cx);

View File

@ -13475,14 +13475,14 @@ class MAsmJSCall final
class Callee {
public:
enum Which { Internal, Dynamic, Builtin };
static const uint32_t NoSigIndex = UINT32_MAX;
private:
Which which_;
union {
union U {
U() {}
uint32_t internal_;
struct {
MDefinition* callee_;
uint32_t sigIndex_;
wasm::SigIdDesc sigId_;
} dynamic;
wasm::SymbolicAddress builtin_;
} u;
@ -13491,9 +13491,11 @@ class MAsmJSCall final
explicit Callee(uint32_t callee) : which_(Internal) {
u.internal_ = callee;
}
explicit Callee(MDefinition* callee, uint32_t sigIndex = NoSigIndex) : which_(Dynamic) {
explicit Callee(MDefinition* callee, wasm::SigIdDesc sigId = wasm::SigIdDesc())
: which_(Dynamic)
{
u.dynamic.callee_ = callee;
u.dynamic.sigIndex_ = sigIndex;
u.dynamic.sigId_ = sigId;
}
explicit Callee(wasm::SymbolicAddress callee) : which_(Builtin) {
u.builtin_ = callee;
@ -13509,13 +13511,9 @@ class MAsmJSCall final
MOZ_ASSERT(which_ == Dynamic);
return u.dynamic.callee_;
}
bool dynamicHasSigIndex() const {
wasm::SigIdDesc dynamicSigId() const {
MOZ_ASSERT(which_ == Dynamic);
return u.dynamic.sigIndex_ != NoSigIndex;
}
uint32_t dynamicSigIndex() const {
MOZ_ASSERT(dynamicHasSigIndex());
return u.dynamic.sigIndex_;
return u.dynamic.sigId_;
}
wasm::SymbolicAddress builtin() const {
MOZ_ASSERT(which_ == Builtin);

View File

@ -883,7 +883,8 @@ class MacroAssembler : public MacroAssemblerSpecific
// ===============================================================
// Branch functions
inline void branch32(Condition cond, Register lhs, Register rhs, Label* label) PER_SHARED_ARCH;
template <class L>
inline void branch32(Condition cond, Register lhs, Register rhs, L label) PER_SHARED_ARCH;
template <class L>
inline void branch32(Condition cond, Register lhs, Imm32 rhs, L label) PER_SHARED_ARCH;
inline void branch32(Condition cond, Register length, const RegisterOrInt32Constant& key,

View File

@ -652,8 +652,9 @@ MacroAssembler::popcnt32(Register input, Register output, Register tmp)
// ===============================================================
// Branch functions
template <class L>
void
MacroAssembler::branch32(Condition cond, Register lhs, Register rhs, Label* label)
MacroAssembler::branch32(Condition cond, Register lhs, Register rhs, L label)
{
ma_cmp(lhs, rhs);
ma_b(label, cond);

View File

@ -651,8 +651,9 @@ MacroAssembler::ctz32(Register src, Register dest, bool knownNotZero)
// ===============================================================
// Branch functions
template <class L>
void
MacroAssembler::branch32(Condition cond, Register lhs, Register rhs, Label* label)
MacroAssembler::branch32(Condition cond, Register lhs, Register rhs, L label)
{
cmp32(lhs, rhs);
B(label, cond);

View File

@ -400,8 +400,9 @@ MacroAssembler::ctz32(Register src, Register dest, bool knownNotZero)
// ===============================================================
// Branch functions
template <class L>
void
MacroAssembler::branch32(Condition cond, Register lhs, Register rhs, Label* label)
MacroAssembler::branch32(Condition cond, Register lhs, Register rhs, L label)
{
ma_b(lhs, rhs, label, cond);
}

View File

@ -1510,19 +1510,30 @@ CodeGeneratorShared::emitAsmJSCall(LAsmJSCall* ins)
MAsmJSCall::Callee callee = mir->callee();
switch (callee.which()) {
case MAsmJSCall::Callee::Internal:
case MAsmJSCall::Callee::Internal: {
masm.call(mir->desc(), callee.internal());
break;
}
case MAsmJSCall::Callee::Dynamic: {
if (callee.dynamicHasSigIndex())
masm.move32(Imm32(callee.dynamicSigIndex()), WasmTableCallSigReg);
wasm::SigIdDesc sigId = callee.dynamicSigId();
switch (sigId.kind()) {
case wasm::SigIdDesc::Kind::Global:
masm.loadWasmGlobalPtr(sigId.globalDataOffset(), WasmTableCallSigReg);
break;
case wasm::SigIdDesc::Kind::Immediate:
masm.move32(Imm32(sigId.immediate()), WasmTableCallSigReg);
break;
case wasm::SigIdDesc::Kind::None:
break;
}
MOZ_ASSERT(WasmTableCallPtrReg == ToRegister(ins->getOperand(mir->dynamicCalleeOperandIndex())));
masm.call(mir->desc(), WasmTableCallPtrReg);
break;
}
case MAsmJSCall::Callee::Builtin:
case MAsmJSCall::Callee::Builtin: {
masm.call(callee.builtin());
break;
}
}
if (mir->spIncrement())

View File

@ -442,8 +442,9 @@ MacroAssembler::rshift32Arithmetic(Imm32 shift, Register srcDest)
// ===============================================================
// Branch instructions
template <class L>
void
MacroAssembler::branch32(Condition cond, Register lhs, Register rhs, Label* label)
MacroAssembler::branch32(Condition cond, Register lhs, Register rhs, L label)
{
cmp32(lhs, rhs);
j(cond, label);

View File

@ -106,10 +106,6 @@ class Simulator;
#endif
} // namespace jit
namespace wasm {
class Module;
} // namespace wasm
/*
* A FreeOp can do one thing: free memory. For convenience, it has delete_
* convenience methods that also call destructors.