Backed out 5 changesets (bug 1546138) for bustage at js/src/jit-test/tests/wasm/funcref.js for upcoming beta

Backed out changeset 13e26dbd7cc7 (bug 1546138)
Backed out changeset edf39b4a6ec1 (bug 1546138)
Backed out changeset b60f1ed65b1a (bug 1546138)
Backed out changeset da9544b976b1 (bug 1546138)
Backed out changeset 77be2a536573 (bug 1546138)

--HG--
extra : rebase_source : 7469677612ed4684d340da11776e8e412dd4995b
This commit is contained in:
Coroiu Cristina 2019-05-13 08:38:11 +03:00
parent 097caf634b
commit c139f7f6fe
35 changed files with 721 additions and 744 deletions

View File

@ -1,142 +0,0 @@
const {Module,Instance,Global,RuntimeError} = WebAssembly;
const badWasmFunc = /can only pass WebAssembly exported functions to funcref/;
const typeErr = /type mismatch/;
// Validation:
wasmEvalText(`(module (func (local anyref funcref) (local.set 0 (local.get 1))))`);
wasmEvalText(`(module (func (local funcref funcref) (local.set 0 (local.get 1))))`);
wasmEvalText(`(module (func (local funcref) (local.set 0 (ref.null))))`);
wasmFailValidateText(`(module (func (local funcref anyref) (local.set 0 (local.get 1))))`, typeErr);
wasmEvalText(`(module (global (mut funcref) (ref.null)) (func (param funcref) (global.set 0 (local.get 0))))`);
wasmEvalText(`(module (global (mut anyref) (ref.null)) (func (param funcref) (global.set 0 (local.get 0))))`);
wasmFailValidateText(`(module (global (mut funcref) (ref.null)) (func (param anyref) (global.set 0 (local.get 0))))`, typeErr);
wasmEvalText(`(module (func (param funcref)) (func (param funcref) (call 0 (local.get 0))))`);
wasmEvalText(`(module (func (param anyref)) (func (param funcref) (call 0 (local.get 0))))`);
wasmFailValidateText(`(module (func (param funcref)) (func (param anyref) (call 0 (local.get 0))))`, typeErr);
wasmEvalText(`(module (func (param funcref) (result funcref) (block funcref (local.get 0) (br 0))))`);
wasmEvalText(`(module (func (param funcref) (result anyref) (block anyref (local.get 0) (br 0))))`);
wasmFailValidateText(`(module (func (param anyref) (result anyref) (block funcref (local.get 0) (br 0))))`, typeErr);
wasmEvalText(`(module (func (param funcref funcref) (result funcref) (select (local.get 0) (local.get 1) (i32.const 0))))`);
wasmEvalText(`(module (func (param anyref funcref) (result anyref) (select (local.get 0) (local.get 1) (i32.const 0))))`);
wasmEvalText(`(module (func (param funcref anyref) (result anyref) (select (local.get 0) (local.get 1) (i32.const 0))))`);
wasmFailValidateText(`(module (func (param anyref funcref) (result funcref) (select (local.get 0) (local.get 1) (i32.const 0))))`, typeErr);
wasmFailValidateText(`(module (func (param funcref anyref) (result funcref) (select (local.get 0) (local.get 1) (i32.const 0))))`, typeErr);
// Runtime:
var m = new Module(wasmTextToBinary(`(module (func (export "wasmFun")))`));
const wasmFun1 = new Instance(m).exports.wasmFun;
const wasmFun2 = new Instance(m).exports.wasmFun;
const wasmFun3 = new Instance(m).exports.wasmFun;
var run = wasmEvalText(`(module
(global (mut funcref) (ref.null))
(func (param $x funcref) (param $test i32) (result funcref)
local.get $x
global.get 0
local.get $test
select
)
(func (export "run") (param $a funcref) (param $b funcref) (param $c funcref) (param $test1 i32) (param $test2 i32) (result funcref)
local.get $a
global.set 0
block funcref
local.get $b
local.get $test1
br_if 0
drop
local.get $c
end
local.get $test2
call 0
)
)`).exports.run;
assertEq(run(wasmFun1, wasmFun2, wasmFun3, false, false), wasmFun1);
assertEq(run(wasmFun1, wasmFun2, wasmFun3, true, false), wasmFun1);
assertEq(run(wasmFun1, wasmFun2, wasmFun3, true, true), wasmFun2);
assertEq(run(wasmFun1, wasmFun2, wasmFun3, false, true), wasmFun3);
var run = wasmEvalText(`(module
(type $t0 (func (param anyref) (result anyref)))
(type $t1 (func (param funcref) (result anyref)))
(type $t2 (func (param anyref) (result funcref)))
(type $t3 (func (param funcref funcref) (result funcref)))
(func $f0 (type $t0) ref.null)
(func $f1 (type $t1) ref.null)
(func $f2 (type $t2) ref.null)
(func $f3 (type $t3) ref.null)
(table funcref (elem $f0 $f1 $f2 $f3))
(func (export "run") (param i32 i32) (result anyref)
block $b3 block $b2 block $b1 block $b0
local.get 0
br_table $b0 $b1 $b2 $b3
end $b0
ref.null
local.get 1
call_indirect $t0
return
end $b1
ref.null
local.get 1
call_indirect $t1
return
end $b2
ref.null
local.get 1
call_indirect $t2
return
end $b3
ref.null
ref.null
local.get 1
call_indirect $t3
return
)
)`).exports.run;
for (var i = 0; i < 4; i++) {
for (var j = 0; j < 4; j++) {
if (i == j)
assertEq(run(i, j), null);
else
assertErrorMessage(() => run(i, j), RuntimeError, /indirect call signature mismatch/);
}
}
// JS API:
const wasmFun = wasmEvalText(`(module (func (export "x")))`).exports.x;
var run = wasmEvalText(`(module (func (export "run") (param funcref) (result funcref) (local.get 0)))`).exports.run;
assertEq(run(wasmFun), wasmFun);
assertEq(run(null), null);
assertErrorMessage(() => run(() => {}), TypeError, badWasmFunc);
var importReturnValue;
var importFun = () => importReturnValue;
var run = wasmEvalText(`(module (func (import "" "i") (result funcref)) (func (export "run") (result funcref) (call 0)))`, {'':{i:importFun}}).exports.run;
importReturnValue = wasmFun;
assertEq(run(), wasmFun);
importReturnValue = null;
assertEq(run(), null);
importReturnValue = undefined;
assertErrorMessage(() => run(), TypeError, badWasmFunc);
importReturnValue = () => {};
assertErrorMessage(() => run(), TypeError, badWasmFunc);
var g = new Global({value:'funcref', mutable:true}, wasmFun);
assertEq(g.value, wasmFun);
g.value = null;
assertEq(g.value, null);
Math.sin();
assertErrorMessage(() => g.value = () => {}, TypeError, badWasmFunc);
var g = new Global({value:'funcref', mutable:true}, null);
assertEq(g.value, null);
g.value = wasmFun;
assertEq(g.value, wasmFun);
assertErrorMessage(() => new Global({value:'funcref'}, () => {}), TypeError, badWasmFunc);

View File

@ -5,8 +5,6 @@ const Memory = WebAssembly.Memory;
const LinkError = WebAssembly.LinkError;
const RuntimeError = WebAssembly.RuntimeError;
const badFuncRefError = /can only pass WebAssembly exported functions to funcref/;
var callee = i => `(func $f${i} (result i32) (i32.const ${i}))`;
wasmFailValidateText(`(module (elem (i32.const 0) $f0) ${callee(0)})`, /elem segment requires a table section/);
@ -124,8 +122,8 @@ assertEq(e4.call(2), 13);
var asmjsFun = (function() { "use asm"; function f() {} return f })();
assertEq(isAsmJSFunction(asmjsFun), isAsmJSCompilationAvailable());
assertErrorMessage(() => tbl.set(0, asmjsFun), TypeError, badFuncRefError);
assertErrorMessage(() => tbl.grow(1, asmjsFun), TypeError, badFuncRefError);
assertErrorMessage(() => tbl.set(0, asmjsFun), TypeError, /can only assign WebAssembly exported functions/);
assertErrorMessage(() => tbl.grow(1, asmjsFun), TypeError, /bad initializer to funcref table/);
var m = new Module(wasmTextToBinary(`(module
(type $i2i (func (param i32) (result i32)))

View File

@ -179,6 +179,7 @@ static inline const MDefinition* GetObject(const MDefinition* ins) {
case MDefinition::Opcode::WasmLoadGlobalCell:
case MDefinition::Opcode::WasmStoreGlobalVar:
case MDefinition::Opcode::WasmStoreGlobalCell:
case MDefinition::Opcode::WasmLoadRef:
case MDefinition::Opcode::WasmStoreRef:
case MDefinition::Opcode::ArrayJoin:
case MDefinition::Opcode::ArraySlice:

View File

@ -7421,8 +7421,7 @@ void CodeGenerator::emitWasmCallBase(LWasmCallBase<Defs>* lir) {
break;
case wasm::CalleeDesc::BuiltinInstanceMethod:
masm.wasmCallBuiltinInstanceMethod(desc, mir->instanceArg(),
callee.builtin(),
mir->builtinMethodFailureMode());
callee.builtin());
switchRealm = false;
break;
}
@ -7531,6 +7530,10 @@ void CodeGenerator::visitWasmDerivedPointer(LWasmDerivedPointer* ins) {
masm.addPtr(Imm32(int32_t(ins->offset())), ToRegister(ins->output()));
}
void CodeGenerator::visitWasmLoadRef(LWasmLoadRef* lir) {
masm.loadPtr(Address(ToRegister(lir->ptr()), 0), ToRegister(lir->output()));
}
void CodeGenerator::visitWasmStoreRef(LWasmStoreRef* ins) {
Register tls = ToRegister(ins->tls());
Register valueAddr = ToRegister(ins->valueAddr());
@ -13915,7 +13918,6 @@ void CodeGenerator::emitIonToWasmCallBase(LIonToWasmCallBase<NumDefs>* lir) {
case wasm::ValType::I64:
case wasm::ValType::Ref:
case wasm::ValType::AnyRef:
case wasm::ValType::FuncRef:
// Don't forget to trace GC type arguments in TraceJitExitFrames
// when they're enabled.
MOZ_CRASH("unexpected argument type when calling from ion to wasm");
@ -13974,7 +13976,6 @@ void CodeGenerator::emitIonToWasmCallBase(LIonToWasmCallBase<NumDefs>* lir) {
break;
case wasm::ExprType::Ref:
case wasm::ExprType::AnyRef:
case wasm::ExprType::FuncRef:
case wasm::ExprType::I64:
// Don't forget to trace GC type return value in TraceJitExitFrames
// when they're enabled.
@ -14015,6 +14016,11 @@ void CodeGenerator::visitWasmNullConstant(LWasmNullConstant* lir) {
masm.xorPtr(ToRegister(lir->output()), ToRegister(lir->output()));
}
void CodeGenerator::visitIsNullPointer(LIsNullPointer* lir) {
masm.cmpPtrSet(Assembler::Equal, ToRegister(lir->value()), ImmWord(0),
ToRegister(lir->output()));
}
void CodeGenerator::visitWasmCompareAndSelect(LWasmCompareAndSelect* ins) {
bool cmpIs32bit = ins->compareType() == MCompare::Compare_Int32 ||
ins->compareType() == MCompare::Compare_UInt32;

View File

@ -4350,6 +4350,11 @@ void LIRGenerator::visitWasmDerivedPointer(MWasmDerivedPointer* ins) {
define(new (alloc()) LWasmDerivedPointer(base), ins);
}
void LIRGenerator::visitWasmLoadRef(MWasmLoadRef* ins) {
define(new (alloc()) LWasmLoadRef(useRegisterAtStart(ins->getOperand(0))),
ins);
}
void LIRGenerator::visitWasmStoreRef(MWasmStoreRef* ins) {
LAllocation tls = useRegister(ins->tls());
LAllocation valueAddr = useFixed(ins->valueAddr(), PreBarrierReg);
@ -4717,6 +4722,11 @@ void LIRGenerator::visitWasmNullConstant(MWasmNullConstant* ins) {
define(new (alloc()) LWasmNullConstant(), ins);
}
void LIRGenerator::visitIsNullPointer(MIsNullPointer* ins) {
define(new (alloc()) LIsNullPointer(useRegisterAtStart(ins->getOperand(0))),
ins);
}
void LIRGenerator::visitWasmFloatConstant(MWasmFloatConstant* ins) {
switch (ins->type()) {
case MIRType::Double:

View File

@ -4272,7 +4272,6 @@ IonBuilder::InliningResult IonBuilder::inlineWasmCall(CallInfo& callInfo,
break;
case wasm::ValType::I64:
case wasm::ValType::AnyRef:
case wasm::ValType::FuncRef:
case wasm::ValType::Ref:
MOZ_CRASH("impossible per above check");
case wasm::ValType::NullRef:

View File

@ -5451,9 +5451,8 @@ MWasmCall* MWasmCall::New(TempAllocator& alloc, const wasm::CallSiteDesc& desc,
MWasmCall* MWasmCall::NewBuiltinInstanceMethodCall(
TempAllocator& alloc, const wasm::CallSiteDesc& desc,
const wasm::SymbolicAddress builtin, wasm::FailureMode failureMode,
const ABIArg& instanceArg, const Args& args, MIRType resultType,
uint32_t stackArgAreaSizeUnaligned) {
const wasm::SymbolicAddress builtin, const ABIArg& instanceArg,
const Args& args, MIRType resultType, uint32_t stackArgAreaSizeUnaligned) {
auto callee = wasm::CalleeDesc::builtinInstanceMethod(builtin);
MWasmCall* call = MWasmCall::New(alloc, desc, callee, args, resultType,
stackArgAreaSizeUnaligned, nullptr);
@ -5463,7 +5462,6 @@ MWasmCall* MWasmCall::NewBuiltinInstanceMethodCall(
MOZ_ASSERT(instanceArg != ABIArg());
call->instanceArg_ = instanceArg;
call->builtinMethodFailureMode_ = failureMode;
return call;
}

View File

@ -1570,6 +1570,29 @@ class MWasmNullConstant : public MNullaryInstruction {
ALLOW_CLONE(MWasmNullConstant)
};
class MIsNullPointer : public MUnaryInstruction, public NoTypePolicy::Data {
explicit MIsNullPointer(MDefinition* value)
: MUnaryInstruction(classOpcode, value) {
MOZ_ASSERT(value->type() == MIRType::Pointer);
setResultType(MIRType::Boolean);
setMovable();
}
public:
INSTRUCTION_HEADER(IsNullPointer);
static MIsNullPointer* New(TempAllocator& alloc, MDefinition* value) {
return new (alloc) MIsNullPointer(value);
}
bool congruentTo(const MDefinition* ins) const override {
return congruentIfOperandsEqual(ins);
}
AliasSet getAliasSet() const override { return AliasSet::None(); }
ALLOW_CLONE(MIsNullPointer)
};
// Floating-point value as created by wasm. Just a constant value, used to
// effectively inhibite all the MIR optimizations. This uses the same LIR nodes
// as a MConstant of the same type would.
@ -11775,6 +11798,31 @@ class MWasmDerivedPointer : public MUnaryInstruction,
ALLOW_CLONE(MWasmDerivedPointer)
};
class MWasmLoadRef : public MUnaryInstruction, public NoTypePolicy::Data {
AliasSet::Flag aliasSet_;
explicit MWasmLoadRef(MDefinition* valueAddr, AliasSet::Flag aliasSet,
bool isMovable = true)
: MUnaryInstruction(classOpcode, valueAddr), aliasSet_(aliasSet) {
MOZ_ASSERT(valueAddr->type() == MIRType::Pointer);
setResultType(MIRType::RefOrNull);
if (isMovable) {
setMovable();
}
}
public:
INSTRUCTION_HEADER(WasmLoadRef)
TRIVIAL_NEW_WRAPPERS
bool congruentTo(const MDefinition* ins) const override {
return congruentIfOperandsEqual(ins);
}
AliasSet getAliasSet() const override { return AliasSet::Load(aliasSet_); }
ALLOW_CLONE(MWasmLoadRef)
};
class MWasmStoreRef : public MAryInstruction<3>, public NoTypePolicy::Data {
AliasSet::Flag aliasSet_;
@ -11849,7 +11897,6 @@ class MWasmStackArg : public MUnaryInstruction, public NoTypePolicy::Data {
class MWasmCall final : public MVariadicInstruction, public NoTypePolicy::Data {
wasm::CallSiteDesc desc_;
wasm::CalleeDesc callee_;
wasm::FailureMode builtinMethodFailureMode_;
FixedList<AnyRegister> argRegs_;
uint32_t stackArgAreaSizeUnaligned_;
ABIArg instanceArg_;
@ -11859,7 +11906,6 @@ class MWasmCall final : public MVariadicInstruction, public NoTypePolicy::Data {
: MVariadicInstruction(classOpcode),
desc_(desc),
callee_(callee),
builtinMethodFailureMode_(wasm::FailureMode::Infallible),
stackArgAreaSizeUnaligned_(stackArgAreaSizeUnaligned) {}
public:
@ -11879,9 +11925,8 @@ class MWasmCall final : public MVariadicInstruction, public NoTypePolicy::Data {
static MWasmCall* NewBuiltinInstanceMethodCall(
TempAllocator& alloc, const wasm::CallSiteDesc& desc,
const wasm::SymbolicAddress builtin, wasm::FailureMode failureMode,
const ABIArg& instanceArg, const Args& args, MIRType resultType,
uint32_t stackArgAreaSizeUnaligned);
const wasm::SymbolicAddress builtin, const ABIArg& instanceArg,
const Args& args, MIRType resultType, uint32_t stackArgAreaSizeUnaligned);
size_t numArgs() const { return argRegs_.length(); }
AnyRegister registerForArg(size_t index) const {
@ -11890,10 +11935,6 @@ class MWasmCall final : public MVariadicInstruction, public NoTypePolicy::Data {
}
const wasm::CallSiteDesc& desc() const { return desc_; }
const wasm::CalleeDesc& callee() const { return callee_; }
wasm::FailureMode builtinMethodFailureMode() const {
MOZ_ASSERT(callee_.which() == wasm::CalleeDesc::BuiltinInstanceMethod);
return builtinMethodFailureMode_;
}
uint32_t stackArgAreaSizeUnaligned() const {
return stackArgAreaSizeUnaligned_;
}

View File

@ -3191,7 +3191,7 @@ CodeOffset MacroAssembler::wasmCallImport(const wasm::CallSiteDesc& desc,
CodeOffset MacroAssembler::wasmCallBuiltinInstanceMethod(
const wasm::CallSiteDesc& desc, const ABIArg& instanceArg,
wasm::SymbolicAddress builtin, wasm::FailureMode failureMode) {
wasm::SymbolicAddress builtin) {
MOZ_ASSERT(instanceArg != ABIArg());
if (instanceArg.kind() == ABIArg::GPR) {
@ -3207,31 +3207,7 @@ CodeOffset MacroAssembler::wasmCallBuiltinInstanceMethod(
MOZ_CRASH("Unknown abi passing style for pointer");
}
CodeOffset ret = call(desc, builtin);
if (failureMode != wasm::FailureMode::Infallible) {
Label noTrap;
switch (failureMode) {
case wasm::FailureMode::Infallible:
MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE();
case wasm::FailureMode::FailOnNegI32:
branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &noTrap);
break;
case wasm::FailureMode::FailOnNullPtr:
branchTestPtr(Assembler::NonZero, ReturnReg, ReturnReg, &noTrap);
break;
case wasm::FailureMode::FailOnInvalidRef:
branchPtr(Assembler::NotEqual, ReturnReg,
ImmWord(uintptr_t(wasm::AnyRef::invalid().forCompiledCode())),
&noTrap);
break;
}
wasmTrap(wasm::Trap::ThrowReported,
wasm::BytecodeOffset(desc.lineOrBytecode()));
bind(&noTrap);
}
return ret;
return call(desc, builtin);
}
CodeOffset MacroAssembler::wasmCallIndirect(const wasm::CallSiteDesc& desc,

View File

@ -1927,8 +1927,7 @@ class MacroAssembler : public MacroAssemblerSpecific {
// (TLS & pinned regs are non-volatile registers in the system ABI).
CodeOffset wasmCallBuiltinInstanceMethod(const wasm::CallSiteDesc& desc,
const ABIArg& instanceArg,
wasm::SymbolicAddress builtin,
wasm::FailureMode failureMode);
wasm::SymbolicAddress builtin);
// As enterFakeExitFrame(), but using register conventions appropriate for
// wasm stubs.

View File

@ -6682,6 +6682,17 @@ class LWasmDerivedPointer : public LInstructionHelper<1, 1, 0> {
size_t offset() { return mirRaw()->toWasmDerivedPointer()->offset(); }
};
class LWasmLoadRef : public LInstructionHelper<1, 1, 0> {
public:
LIR_HEADER(WasmLoadRef);
explicit LWasmLoadRef(const LAllocation& ptr)
: LInstructionHelper(classOpcode) {
setOperand(0, ptr);
}
MWasmLoadRef* mir() const { return mirRaw()->toWasmLoadRef(); }
const LAllocation* ptr() { return getOperand(0); }
};
class LWasmStoreRef : public LInstructionHelper<0, 3, 1> {
public:
LIR_HEADER(WasmStoreRef);
@ -6771,6 +6782,17 @@ class LWasmNullConstant : public LInstructionHelper<1, 0, 0> {
explicit LWasmNullConstant() : LInstructionHelper(classOpcode) {}
};
class LIsNullPointer : public LInstructionHelper<1, 1, 0> {
public:
LIR_HEADER(IsNullPointer);
explicit LIsNullPointer(const LAllocation& value)
: LInstructionHelper(classOpcode) {
setOperand(0, value);
}
MIsNullPointer* mir() const { return mirRaw()->toIsNullPointer(); }
const LAllocation* value() { return getOperand(0); }
};
template <size_t Defs>
class LWasmCallBase : public LVariadicInstruction<Defs, 0> {
using Base = LVariadicInstruction<Defs, 0>;

View File

@ -401,6 +401,7 @@ MSG_DEF(JSMSG_WASM_DROPPED_ELEM_SEG, 0, JSEXN_WASMRUNTIMEERROR, "use of droppe
MSG_DEF(JSMSG_WASM_DEREF_NULL, 0, JSEXN_WASMRUNTIMEERROR, "dereferencing null pointer")
MSG_DEF(JSMSG_WASM_BAD_RANGE , 2, JSEXN_RANGEERR, "bad {0} {1}")
MSG_DEF(JSMSG_WASM_BAD_GROW, 1, JSEXN_RANGEERR, "failed to grow {0}")
MSG_DEF(JSMSG_WASM_BAD_TBL_GROW_INIT, 1, JSEXN_TYPEERR, "bad initializer to {0} table")
MSG_DEF(JSMSG_WASM_TABLE_OUT_OF_BOUNDS, 0, JSEXN_RANGEERR, "table index out of bounds")
MSG_DEF(JSMSG_WASM_BAD_UINT32, 2, JSEXN_TYPEERR, "bad {0} {1}")
MSG_DEF(JSMSG_WASM_BAD_BUF_ARG, 0, JSEXN_TYPEERR, "first argument must be an ArrayBuffer or typed array object")
@ -411,7 +412,7 @@ MSG_DEF(JSMSG_WASM_BAD_ELEMENT, 0, JSEXN_TYPEERR, "\"element\" proper
MSG_DEF(JSMSG_WASM_BAD_ELEMENT_GENERALIZED, 0, JSEXN_TYPEERR, "\"element\" property of table descriptor must be \"funcref\" or \"anyref\"")
MSG_DEF(JSMSG_WASM_BAD_IMPORT_ARG, 0, JSEXN_TYPEERR, "second argument must be an object")
MSG_DEF(JSMSG_WASM_BAD_IMPORT_FIELD, 1, JSEXN_TYPEERR, "import object field '{0}' is not an Object")
MSG_DEF(JSMSG_WASM_BAD_FUNCREF_VALUE, 0, JSEXN_TYPEERR, "can only pass WebAssembly exported functions to funcref")
MSG_DEF(JSMSG_WASM_BAD_TABLE_VALUE, 0, JSEXN_TYPEERR, "can only assign WebAssembly exported functions to Table")
MSG_DEF(JSMSG_WASM_BAD_I64_TYPE, 0, JSEXN_TYPEERR, "cannot pass i64 to or from JS")
MSG_DEF(JSMSG_WASM_BAD_GLOBAL_TYPE, 0, JSEXN_TYPEERR, "bad type for a WebAssembly.Global")
MSG_DEF(JSMSG_WASM_NO_TRANSFER, 0, JSEXN_TYPEERR, "cannot transfer WebAssembly/asm.js ArrayBuffer")

View File

@ -2019,7 +2019,7 @@ class MOZ_STACK_CLASS JS_HAZ_ROOTED ModuleValidator
}
env_.asmJSSigToTableIndex[sigIndex] = env_.tables.length();
if (!env_.tables.emplaceBack(TableKind::AsmJS, Limits(mask + 1))) {
if (!env_.tables.emplaceBack(TableKind::TypedFunction, Limits(mask + 1))) {
return false;
}
@ -6562,8 +6562,7 @@ static bool ValidateGlobalVariable(JSContext* cx, const AsmJSGlobal& global,
}
case ValType::Ref:
case ValType::NullRef:
case ValType::AnyRef:
case ValType::FuncRef: {
case ValType::AnyRef: {
MOZ_CRASH("not available in asm.js");
}
}

View File

@ -127,11 +127,9 @@ class AstValType {
}
}
#ifdef ENABLE_WASM_GC
bool isNarrowType() const {
bool isRefType() const {
return code() == ValType::AnyRef || code() == ValType::Ref;
}
#endif
bool isValid() const { return !(which_ == IsValType && !type_.isValid()); }

View File

@ -1056,7 +1056,6 @@ void BaseLocalIter::settle() {
case ValType::F32:
case ValType::F64:
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef:
// TODO/AnyRef-boxing: With boxed immediates and strings, the
// debugger must be made aware that AnyRef != Pointer.
@ -2784,7 +2783,6 @@ class BaseCompiler final : public BaseCompilerInterface {
case ExprType::I64:
needI64(joinRegI64_);
break;
case ExprType::FuncRef:
case ExprType::AnyRef:
case ExprType::NullRef:
case ExprType::Ref:
@ -2802,7 +2800,6 @@ class BaseCompiler final : public BaseCompilerInterface {
case ExprType::I64:
freeI64(joinRegI64_);
break;
case ExprType::FuncRef:
case ExprType::AnyRef:
case ExprType::NullRef:
case ExprType::Ref:
@ -2828,7 +2825,6 @@ class BaseCompiler final : public BaseCompilerInterface {
break;
case ExprType::Ref:
case ExprType::NullRef:
case ExprType::FuncRef:
case ExprType::AnyRef:
needRef(joinRegPtr_);
break;
@ -2853,7 +2849,6 @@ class BaseCompiler final : public BaseCompilerInterface {
break;
case ExprType::Ref:
case ExprType::NullRef:
case ExprType::FuncRef:
case ExprType::AnyRef:
freeRef(joinRegPtr_);
break;
@ -3783,7 +3778,6 @@ class BaseCompiler final : public BaseCompilerInterface {
}
case ExprType::Ref:
case ExprType::NullRef:
case ExprType::FuncRef:
case ExprType::AnyRef: {
DebugOnly<Stk::Kind> k(stk_.back().kind());
MOZ_ASSERT(k == Stk::RegisterRef || k == Stk::ConstRef ||
@ -3822,7 +3816,6 @@ class BaseCompiler final : public BaseCompilerInterface {
return Some(AnyReg(joinRegF64_));
case ExprType::Ref:
case ExprType::NullRef:
case ExprType::FuncRef:
case ExprType::AnyRef:
MOZ_ASSERT(isAvailableRef(joinRegPtr_));
needRef(joinRegPtr_);
@ -4246,7 +4239,6 @@ class BaseCompiler final : public BaseCompilerInterface {
masm.storeFloat32(RegF32(ReturnFloat32Reg), resultsAddress);
break;
case ExprType::Ref:
case ExprType::FuncRef:
case ExprType::AnyRef:
masm.storePtr(RegPtr(ReturnReg), resultsAddress);
break;
@ -4277,7 +4269,6 @@ class BaseCompiler final : public BaseCompilerInterface {
masm.loadFloat32(resultsAddress, RegF32(ReturnFloat32Reg));
break;
case ExprType::Ref:
case ExprType::FuncRef:
case ExprType::AnyRef:
masm.loadPtr(resultsAddress, RegPtr(ReturnReg));
break;
@ -4590,7 +4581,6 @@ class BaseCompiler final : public BaseCompilerInterface {
break;
}
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef: {
ABIArg argLoc = call->abi.next(MIRType::RefOrNull);
if (argLoc.kind() == ABIArg::Stack) {
@ -4648,15 +4638,14 @@ class BaseCompiler final : public BaseCompilerInterface {
return callSymbolic(builtin, call);
}
CodeOffset builtinInstanceMethodCall(const SymbolicAddressSignature& builtin,
CodeOffset builtinInstanceMethodCall(SymbolicAddress builtin,
const ABIArg& instanceArg,
const FunctionCall& call) {
// Builtin method calls assume the TLS register has been set.
masm.loadWasmTlsRegFromFrame();
CallSiteDesc desc(call.lineOrBytecode, CallSiteDesc::Symbolic);
return masm.wasmCallBuiltinInstanceMethod(
desc, instanceArg, builtin.identity, builtin.failureMode);
return masm.wasmCallBuiltinInstanceMethod(desc, instanceArg, builtin);
}
//////////////////////////////////////////////////////////////////////
@ -8564,7 +8553,6 @@ void BaseCompiler::doReturn(ExprType type, bool popStack) {
}
case ExprType::Ref:
case ExprType::NullRef:
case ExprType::FuncRef:
case ExprType::AnyRef: {
RegPtr rv = popRef(RegPtr(ReturnReg));
returnCleanup(popStack);
@ -9007,7 +8995,6 @@ bool BaseCompiler::emitGetLocal() {
pushLocalF32(slot);
break;
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef:
pushLocalRef(slot);
break;
@ -9072,7 +9059,6 @@ bool BaseCompiler::emitSetOrTeeLocal(uint32_t slot) {
break;
}
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef: {
RegPtr rv = popRef();
syncLocal(slot);
@ -9138,12 +9124,12 @@ bool BaseCompiler::emitGetGlobal() {
pushF64(value.f64());
break;
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef:
pushRef(intptr_t(value.ref().forCompiledCode()));
break;
case ValType::NullRef:
MOZ_CRASH("NullRef not expressible");
pushRef(intptr_t(value.ref()));
break;
case ValType::AnyRef:
pushRef(intptr_t(value.anyref().forCompiledCode()));
break;
default:
MOZ_CRASH("Global constant type");
}
@ -9180,7 +9166,6 @@ bool BaseCompiler::emitGetGlobal() {
break;
}
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef: {
RegPtr rv = needRef();
ScratchI32 tmp(*this);
@ -9240,7 +9225,6 @@ bool BaseCompiler::emitSetGlobal() {
break;
}
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef: {
RegPtr valueAddr(PreBarrierReg);
needRef(valueAddr);
@ -9648,7 +9632,6 @@ bool BaseCompiler::emitSelect() {
}
case ValType::Ref:
case ValType::NullRef:
case ValType::FuncRef:
case ValType::AnyRef: {
RegPtr r, rs;
pop2xRef(&r, &rs);
@ -9802,7 +9785,7 @@ bool BaseCompiler::emitInstanceCall(uint32_t lineOrBytecode,
passArg(t, peek(numNonInstanceArgs - i), &baselineCall);
}
CodeOffset raOffset =
builtinInstanceMethodCall(builtin, instanceArg, baselineCall);
builtinInstanceMethodCall(builtin.identity, instanceArg, baselineCall);
if (!createStackMap("emitInstanceCall", raOffset)) {
return false;
}
@ -10164,6 +10147,7 @@ bool BaseCompiler::emitWait(ValType type, uint32_t byteSize) {
return true;
}
// Returns -1 on trap, otherwise nonnegative result.
switch (type.code()) {
case ValType::I32:
if (!emitInstanceCall(lineOrBytecode, SASigWaitI32)) {
@ -10179,6 +10163,11 @@ bool BaseCompiler::emitWait(ValType type, uint32_t byteSize) {
MOZ_CRASH();
}
Label ok;
masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
trap(Trap::ThrowReported);
masm.bind(&ok);
return true;
}
@ -10195,7 +10184,17 @@ bool BaseCompiler::emitWake() {
return true;
}
return emitInstanceCall(lineOrBytecode, SASigWake);
// Returns -1 on trap, otherwise nonnegative result.
if (!emitInstanceCall(lineOrBytecode, SASigWake)) {
return false;
}
Label ok;
masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
trap(Trap::ThrowReported);
masm.bind(&ok);
return true;
}
#ifdef ENABLE_WASM_BULKMEM_OPS
@ -10214,6 +10213,7 @@ bool BaseCompiler::emitMemOrTableCopy(bool isMem) {
return true;
}
// Returns -1 on trap, otherwise 0.
if (isMem) {
MOZ_ASSERT(srcMemOrTableIndex == 0);
MOZ_ASSERT(dstMemOrTableIndex == 0);
@ -10230,6 +10230,11 @@ bool BaseCompiler::emitMemOrTableCopy(bool isMem) {
}
}
Label ok;
masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
trap(Trap::ThrowReported);
masm.bind(&ok);
return true;
}
@ -10246,11 +10251,21 @@ bool BaseCompiler::emitDataOrElemDrop(bool isData) {
}
// Despite the cast to int32_t, the callee regards the value as unsigned.
//
// Returns -1 on trap, otherwise 0.
pushI32(int32_t(segIndex));
const SymbolicAddressSignature& callee =
isData ? SASigDataDrop : SASigElemDrop;
if (!emitInstanceCall(lineOrBytecode, callee, /*pushReturnedValue=*/false)) {
return false;
}
return emitInstanceCall(lineOrBytecode,
isData ? SASigDataDrop : SASigElemDrop,
/*pushReturnedValue=*/false);
Label ok;
masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
trap(Trap::ThrowReported);
masm.bind(&ok);
return true;
}
bool BaseCompiler::emitMemFill() {
@ -10265,8 +10280,18 @@ bool BaseCompiler::emitMemFill() {
return true;
}
return emitInstanceCall(lineOrBytecode, SASigMemFill,
/*pushReturnedValue=*/false);
// Returns -1 on trap, otherwise 0.
if (!emitInstanceCall(lineOrBytecode, SASigMemFill,
/*pushReturnedValue=*/false)) {
return false;
}
Label ok;
masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
trap(Trap::ThrowReported);
masm.bind(&ok);
return true;
}
bool BaseCompiler::emitMemOrTableInit(bool isMem) {
@ -10284,6 +10309,7 @@ bool BaseCompiler::emitMemOrTableInit(bool isMem) {
return true;
}
// Returns -1 on trap, otherwise 0.
pushI32(int32_t(segIndex));
if (isMem) {
if (!emitInstanceCall(lineOrBytecode, SASigMemInit,
@ -10298,6 +10324,11 @@ bool BaseCompiler::emitMemOrTableInit(bool isMem) {
}
}
Label ok;
masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
trap(Trap::ThrowReported);
masm.bind(&ok);
return true;
}
#endif
@ -10317,9 +10348,20 @@ bool BaseCompiler::emitTableFill() {
}
// fill(start:u32, val:ref, len:u32, table:u32) -> u32
//
// Returns -1 on trap, otherwise 0.
pushI32(tableIndex);
return emitInstanceCall(lineOrBytecode, SASigTableFill,
/*pushReturnedValue=*/false);
if (!emitInstanceCall(lineOrBytecode, SASigTableFill,
/*pushReturnedValue=*/false)) {
return false;
}
Label ok;
masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
trap(Trap::ThrowReported);
masm.bind(&ok);
return true;
}
MOZ_MUST_USE
@ -10333,12 +10375,21 @@ bool BaseCompiler::emitTableGet() {
if (deadCode_) {
return true;
}
// get(index:u32, table:u32) -> uintptr_t(AnyRef)
// get(index:u32, table:u32) -> void*
//
// Returns nullptr for error, otherwise a pointer to a nonmoveable memory
// location that holds the anyref value.
pushI32(tableIndex);
if (!emitInstanceCall(lineOrBytecode, SASigTableGet,
/*pushReturnedValue=*/false)) {
return false;
}
Label noTrap;
masm.branchTestPtr(Assembler::NonZero, ReturnReg, ReturnReg, &noTrap);
trap(Trap::ThrowReported);
masm.bind(&noTrap);
masm.loadPtr(Address(ReturnReg, 0), ReturnReg);
// Push the resulting anyref back on the eval stack. NOTE: needRef() must
// not kill the value in the register.
@ -10362,6 +10413,8 @@ bool BaseCompiler::emitTableGrow() {
return true;
}
// grow(initValue:anyref, delta:u32, table:u32) -> u32
//
// infallible.
pushI32(tableIndex);
return emitInstanceCall(lineOrBytecode, SASigTableGrow);
}
@ -10378,9 +10431,18 @@ bool BaseCompiler::emitTableSet() {
return true;
}
// set(index:u32, value:ref, table:u32) -> i32
//
// Returns -1 on range error, otherwise 0 (which is then ignored).
pushI32(tableIndex);
return emitInstanceCall(lineOrBytecode, SASigTableSet,
/*pushReturnedValue=*/false);
if (!emitInstanceCall(lineOrBytecode, SASigTableSet,
/*pushReturnedValue=*/false)) {
return false;
}
Label noTrap;
masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &noTrap);
trap(Trap::ThrowReported);
masm.bind(&noTrap);
return true;
}
MOZ_MUST_USE
@ -10394,6 +10456,8 @@ bool BaseCompiler::emitTableSize() {
return true;
}
// size(table:u32) -> u32
//
// infallible.
pushI32(tableIndex);
return emitInstanceCall(lineOrBytecode, SASigTableSize);
}
@ -10423,6 +10487,13 @@ bool BaseCompiler::emitStructNew() {
return false;
}
// Null pointer check.
Label ok;
masm.branchTestPtr(Assembler::NonZero, ReturnReg, ReturnReg, &ok);
trap(Trap::ThrowReported);
masm.bind(&ok);
// As many arguments as there are fields.
MOZ_ASSERT(args.length() == structType.fields_.length());
@ -10474,7 +10545,6 @@ bool BaseCompiler::emitStructNew() {
break;
}
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef: {
RegPtr value = popRef();
masm.storePtr(value, Address(rdata, offs));
@ -10592,7 +10662,6 @@ bool BaseCompiler::emitStructGet() {
break;
}
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef: {
RegPtr r = needRef();
masm.loadPtr(Address(rp, offs), r);
@ -10654,7 +10723,6 @@ bool BaseCompiler::emitStructSet() {
rd = popF64();
break;
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef:
rr = popRef();
break;
@ -10698,7 +10766,6 @@ bool BaseCompiler::emitStructSet() {
break;
}
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef: {
masm.computeEffectiveAddress(Address(rp, offs), valueAddr);
// emitBarrieredStore consumes valueAddr
@ -10735,10 +10802,6 @@ bool BaseCompiler::emitStructNarrow() {
return true;
}
// Currently not supported by struct.narrow validation.
MOZ_ASSERT(inputType != ValType::FuncRef);
MOZ_ASSERT(outputType != ValType::FuncRef);
// AnyRef -> AnyRef is a no-op, just leave the value on the stack.
if (inputType == ValType::AnyRef && outputType == ValType::AnyRef) {
@ -10752,6 +10815,8 @@ bool BaseCompiler::emitStructNarrow() {
bool mustUnboxAnyref = inputType == ValType::AnyRef;
// Dynamic downcast (ref T) -> (ref U), leaves rp or null
//
// Infallible.
const StructType& outputStruct =
env_.types[outputType.refTypeIndex()].structType();

View File

@ -56,138 +56,94 @@ static const unsigned BUILTIN_THUNK_LIFO_SIZE = 64 * 1024;
#define _RoN MIRType::RefOrNull
#define _VOID MIRType::None
#define _END MIRType::None
#define _Infallible FailureMode::Infallible
#define _FailOnNegI32 FailureMode::FailOnNegI32
#define _FailOnNullPtr FailureMode::FailOnNullPtr
#define _FailOnInvalidRef FailureMode::FailOnInvalidRef
namespace js {
namespace wasm {
const SymbolicAddressSignature SASigSinD = {
SymbolicAddress::SinD, _F64, _Infallible, 1, {_F64, _END}};
SymbolicAddress::SinD, _F64, 1, {_F64, _END}};
const SymbolicAddressSignature SASigCosD = {
SymbolicAddress::CosD, _F64, _Infallible, 1, {_F64, _END}};
SymbolicAddress::CosD, _F64, 1, {_F64, _END}};
const SymbolicAddressSignature SASigTanD = {
SymbolicAddress::TanD, _F64, _Infallible, 1, {_F64, _END}};
SymbolicAddress::TanD, _F64, 1, {_F64, _END}};
const SymbolicAddressSignature SASigASinD = {
SymbolicAddress::ASinD, _F64, _Infallible, 1, {_F64, _END}};
SymbolicAddress::ASinD, _F64, 1, {_F64, _END}};
const SymbolicAddressSignature SASigACosD = {
SymbolicAddress::ACosD, _F64, _Infallible, 1, {_F64, _END}};
SymbolicAddress::ACosD, _F64, 1, {_F64, _END}};
const SymbolicAddressSignature SASigATanD = {
SymbolicAddress::ATanD, _F64, _Infallible, 1, {_F64, _END}};
SymbolicAddress::ATanD, _F64, 1, {_F64, _END}};
const SymbolicAddressSignature SASigCeilD = {
SymbolicAddress::CeilD, _F64, _Infallible, 1, {_F64, _END}};
SymbolicAddress::CeilD, _F64, 1, {_F64, _END}};
const SymbolicAddressSignature SASigCeilF = {
SymbolicAddress::CeilF, _F32, _Infallible, 1, {_F32, _END}};
SymbolicAddress::CeilF, _F32, 1, {_F32, _END}};
const SymbolicAddressSignature SASigFloorD = {
SymbolicAddress::FloorD, _F64, _Infallible, 1, {_F64, _END}};
SymbolicAddress::FloorD, _F64, 1, {_F64, _END}};
const SymbolicAddressSignature SASigFloorF = {
SymbolicAddress::FloorF, _F32, _Infallible, 1, {_F32, _END}};
SymbolicAddress::FloorF, _F32, 1, {_F32, _END}};
const SymbolicAddressSignature SASigTruncD = {
SymbolicAddress::TruncD, _F64, _Infallible, 1, {_F64, _END}};
SymbolicAddress::TruncD, _F64, 1, {_F64, _END}};
const SymbolicAddressSignature SASigTruncF = {
SymbolicAddress::TruncF, _F32, _Infallible, 1, {_F32, _END}};
SymbolicAddress::TruncF, _F32, 1, {_F32, _END}};
const SymbolicAddressSignature SASigNearbyIntD = {
SymbolicAddress::NearbyIntD, _F64, _Infallible, 1, {_F64, _END}};
SymbolicAddress::NearbyIntD, _F64, 1, {_F64, _END}};
const SymbolicAddressSignature SASigNearbyIntF = {
SymbolicAddress::NearbyIntF, _F32, _Infallible, 1, {_F32, _END}};
SymbolicAddress::NearbyIntF, _F32, 1, {_F32, _END}};
const SymbolicAddressSignature SASigExpD = {
SymbolicAddress::ExpD, _F64, _Infallible, 1, {_F64, _END}};
SymbolicAddress::ExpD, _F64, 1, {_F64, _END}};
const SymbolicAddressSignature SASigLogD = {
SymbolicAddress::LogD, _F64, _Infallible, 1, {_F64, _END}};
SymbolicAddress::LogD, _F64, 1, {_F64, _END}};
const SymbolicAddressSignature SASigPowD = {
SymbolicAddress::PowD, _F64, _Infallible, 2, {_F64, _F64, _END}};
SymbolicAddress::PowD, _F64, 2, {_F64, _F64, _END}};
const SymbolicAddressSignature SASigATan2D = {
SymbolicAddress::ATan2D, _F64, _Infallible, 2, {_F64, _F64, _END}};
SymbolicAddress::ATan2D, _F64, 2, {_F64, _F64, _END}};
const SymbolicAddressSignature SASigMemoryGrow = {
SymbolicAddress::MemoryGrow, _I32, _Infallible, 2, {_PTR, _I32, _END}};
SymbolicAddress::MemoryGrow, _I32, 2, {_PTR, _I32, _END}};
const SymbolicAddressSignature SASigMemorySize = {
SymbolicAddress::MemorySize, _I32, _Infallible, 1, {_PTR, _END}};
const SymbolicAddressSignature SASigWaitI32 = {SymbolicAddress::WaitI32,
_I32,
_FailOnNegI32,
4,
{_PTR, _I32, _I32, _I64, _END}};
const SymbolicAddressSignature SASigWaitI64 = {SymbolicAddress::WaitI64,
_I32,
_FailOnNegI32,
4,
{_PTR, _I32, _I64, _I64, _END}};
SymbolicAddress::MemorySize, _I32, 1, {_PTR, _END}};
const SymbolicAddressSignature SASigWaitI32 = {
SymbolicAddress::WaitI32, _I32, 4, {_PTR, _I32, _I32, _I64, _END}};
const SymbolicAddressSignature SASigWaitI64 = {
SymbolicAddress::WaitI64, _I32, 4, {_PTR, _I32, _I64, _I64, _END}};
const SymbolicAddressSignature SASigWake = {
SymbolicAddress::Wake, _I32, _FailOnNegI32, 3, {_PTR, _I32, _I32, _END}};
const SymbolicAddressSignature SASigMemCopy = {SymbolicAddress::MemCopy,
_VOID,
_FailOnNegI32,
4,
{_PTR, _I32, _I32, _I32, _END}};
SymbolicAddress::Wake, _I32, 3, {_PTR, _I32, _I32, _END}};
const SymbolicAddressSignature SASigMemCopy = {
SymbolicAddress::MemCopy, _I32, 4, {_PTR, _I32, _I32, _I32, _END}};
const SymbolicAddressSignature SASigDataDrop = {
SymbolicAddress::DataDrop, _VOID, _FailOnNegI32, 2, {_PTR, _I32, _END}};
const SymbolicAddressSignature SASigMemFill = {SymbolicAddress::MemFill,
_VOID,
_FailOnNegI32,
4,
{_PTR, _I32, _I32, _I32, _END}};
SymbolicAddress::DataDrop, _I32, 2, {_PTR, _I32, _END}};
const SymbolicAddressSignature SASigMemFill = {
SymbolicAddress::MemFill, _I32, 4, {_PTR, _I32, _I32, _I32, _END}};
const SymbolicAddressSignature SASigMemInit = {
SymbolicAddress::MemInit,
_VOID,
_FailOnNegI32,
5,
{_PTR, _I32, _I32, _I32, _I32, _END}};
SymbolicAddress::MemInit, _I32, 5, {_PTR, _I32, _I32, _I32, _I32, _END}};
const SymbolicAddressSignature SASigTableCopy = {
SymbolicAddress::TableCopy,
_VOID,
_FailOnNegI32,
_I32,
6,
{_PTR, _I32, _I32, _I32, _I32, _I32, _END}};
const SymbolicAddressSignature SASigElemDrop = {
SymbolicAddress::ElemDrop, _VOID, _FailOnNegI32, 2, {_PTR, _I32, _END}};
SymbolicAddress::ElemDrop, _I32, 2, {_PTR, _I32, _END}};
const SymbolicAddressSignature SASigTableFill = {
SymbolicAddress::TableFill,
_VOID,
_FailOnNegI32,
5,
{_PTR, _I32, _RoN, _I32, _I32, _END}};
const SymbolicAddressSignature SASigTableGet = {SymbolicAddress::TableGet,
_RoN,
_FailOnInvalidRef,
3,
{_PTR, _I32, _I32, _END}};
SymbolicAddress::TableFill, _I32, 5, {_PTR, _I32, _RoN, _I32, _I32, _END}};
const SymbolicAddressSignature SASigTableGet = {
SymbolicAddress::TableGet, _PTR, 3, {_PTR, _I32, _I32, _END}};
const SymbolicAddressSignature SASigTableGrow = {
SymbolicAddress::TableGrow,
_I32,
_Infallible,
4,
{_PTR, _RoN, _I32, _I32, _END}};
SymbolicAddress::TableGrow, _I32, 4, {_PTR, _RoN, _I32, _I32, _END}};
const SymbolicAddressSignature SASigTableInit = {
SymbolicAddress::TableInit,
_VOID,
_FailOnNegI32,
_I32,
6,
{_PTR, _I32, _I32, _I32, _I32, _I32, _END}};
const SymbolicAddressSignature SASigTableSet = {SymbolicAddress::TableSet,
_VOID,
_FailOnNegI32,
4,
{_PTR, _I32, _RoN, _I32, _END}};
const SymbolicAddressSignature SASigTableSet = {
SymbolicAddress::TableSet, _I32, 4, {_PTR, _I32, _RoN, _I32, _END}};
const SymbolicAddressSignature SASigTableSize = {
SymbolicAddress::TableSize, _I32, _Infallible, 2, {_PTR, _I32, _END}};
SymbolicAddress::TableSize, _I32, 2, {_PTR, _I32, _END}};
const SymbolicAddressSignature SASigPostBarrier = {
SymbolicAddress::PostBarrier, _VOID, _Infallible, 2, {_PTR, _PTR, _END}};
SymbolicAddress::PostBarrier, _VOID, 2, {_PTR, _PTR, _END}};
const SymbolicAddressSignature SASigPostBarrierFiltering = {
SymbolicAddress::PostBarrierFiltering,
_VOID,
_Infallible,
2,
{_PTR, _PTR, _END}};
SymbolicAddress::PostBarrierFiltering, _VOID, 2, {_PTR, _PTR, _END}};
const SymbolicAddressSignature SASigStructNew = {
SymbolicAddress::StructNew, _RoN, _FailOnNullPtr, 2, {_PTR, _I32, _END}};
SymbolicAddress::StructNew, _RoN, 2, {_PTR, _I32, _END}};
const SymbolicAddressSignature SASigStructNarrow = {
SymbolicAddress::StructNarrow,
_RoN,
_Infallible,
4,
{_PTR, _I32, _I32, _RoN, _END}};
SymbolicAddress::StructNarrow, _RoN, 4, {_PTR, _I32, _I32, _RoN, _END}};
} // namespace wasm
} // namespace js
@ -200,9 +156,6 @@ const SymbolicAddressSignature SASigStructNarrow = {
#undef _RoN
#undef _VOID
#undef _END
#undef _Infallible
#undef _FailOnNegI32
#undef _FailOnNullPtr
// ============================================================================
// WebAssembly builtin C++ functions called from wasm code to implement internal
@ -666,9 +619,6 @@ void* wasm::AddressOf(SymbolicAddress imm, ABIFunctionType* abiType) {
case SymbolicAddress::CallImport_F64:
*abiType = Args_General4;
return FuncCast(Instance::callImport_f64, *abiType);
case SymbolicAddress::CallImport_FuncRef:
*abiType = Args_General4;
return FuncCast(Instance::callImport_funcref, *abiType);
case SymbolicAddress::CallImport_AnyRef:
*abiType = Args_General4;
return FuncCast(Instance::callImport_anyref, *abiType);
@ -887,7 +837,6 @@ bool wasm::NeedsBuiltinThunk(SymbolicAddress sym) {
case SymbolicAddress::CallImport_I32:
case SymbolicAddress::CallImport_I64:
case SymbolicAddress::CallImport_F64:
case SymbolicAddress::CallImport_FuncRef:
case SymbolicAddress::CallImport_AnyRef:
case SymbolicAddress::CoerceInPlace_ToInt32: // GenerateImportJitExit
case SymbolicAddress::CoerceInPlace_ToNumber:

View File

@ -49,7 +49,7 @@ enum class TypeCode {
F64 = 0x7c, // SLEB128(-0x04)
// A function pointer with any signature
FuncRef = 0x70, // SLEB128(-0x10)
AnyFunc = 0x70, // SLEB128(-0x10)
// A reference to any type.
AnyRef = 0x6f,

View File

@ -436,9 +436,9 @@ size_t global_tlsOffset(const GlobalDesc* global) {
// TableDesc
size_t table_tlsOffset(const TableDesc* table) {
MOZ_RELEASE_ASSERT(
table->kind == TableKind::FuncRef || table->kind == TableKind::AsmJS,
"cranelift doesn't support AnyRef tables yet.");
MOZ_RELEASE_ASSERT(table->kind == TableKind::AnyFunction ||
table->kind == TableKind::TypedFunction,
"cranelift doesn't support AnyRef tables yet.");
return globalToTlsOffset(table->globalDataOffset);
}

View File

@ -1261,7 +1261,6 @@ static const char* ThunkedNativeToDescription(SymbolicAddress func) {
case SymbolicAddress::CallImport_I32:
case SymbolicAddress::CallImport_I64:
case SymbolicAddress::CallImport_F64:
case SymbolicAddress::CallImport_FuncRef:
case SymbolicAddress::CallImport_AnyRef:
case SymbolicAddress::CoerceInPlace_ToInt32:
case SymbolicAddress::CoerceInPlace_ToNumber:

View File

@ -367,10 +367,10 @@ bool ModuleGenerator::init(Metadata* maybeAsmJSMetadata) {
}
for (const ElemSegment* seg : env_->elemSegments) {
TableKind kind = !seg->active() ? TableKind::FuncRef
TableKind kind = !seg->active() ? TableKind::AnyFunction
: env_->tables[seg->tableIndex].kind;
switch (kind) {
case TableKind::FuncRef:
case TableKind::AnyFunction:
for (uint32_t funcIndex : seg->elemFuncIndices) {
if (funcIndex == NullFuncIndex) {
continue;
@ -378,7 +378,7 @@ bool ModuleGenerator::init(Metadata* maybeAsmJSMetadata) {
addOrMerge(ExportedFunc(funcIndex, false));
}
break;
case TableKind::AsmJS:
case TableKind::TypedFunction:
// asm.js functions are not exported.
break;
case TableKind::AnyRef:

View File

@ -131,7 +131,6 @@ bool Instance::callImport(JSContext* cx, uint32_t funcImportIndex,
case ValType::F64:
args[i].set(JS::CanonicalizedDoubleValue(*(double*)&argv[i]));
break;
case ValType::FuncRef:
case ValType::AnyRef: {
args[i].set(UnboxAnyRef(AnyRef::fromCompiledCode(*(void**)&argv[i])));
break;
@ -220,7 +219,7 @@ bool Instance::callImport(JSContext* cx, uint32_t funcImportIndex,
type = TypeSet::DoubleType();
break;
case ValType::Ref:
case ValType::FuncRef:
MOZ_CRASH("case guarded above");
case ValType::AnyRef:
MOZ_CRASH("case guarded above");
case ValType::I64:
@ -306,32 +305,12 @@ Instance::callImport_anyref(Instance* instance, int32_t funcImportIndex,
if (!BoxAnyRef(cx, rval, &result)) {
return false;
}
static_assert(sizeof(argv[0]) >= sizeof(void*), "fits");
*(void**)argv = result.get().forCompiledCode();
return true;
}
/* static */ int32_t /* 0 to signal trap; 1 to signal OK */
Instance::callImport_funcref(Instance* instance, int32_t funcImportIndex,
int32_t argc, uint64_t* argv) {
JSContext* cx = TlsContext.get();
RootedValue rval(cx);
if (!instance->callImport(cx, funcImportIndex, argc, argv, &rval)) {
return false;
}
RootedFunction fun(cx);
if (!CheckFuncRefValue(cx, rval, &fun)) {
return false;
}
*(void**)argv = fun;
return true;
}
/* static */ uint32_t Instance::memoryGrow_i32(Instance* instance,
uint32_t delta) {
MOZ_ASSERT(SASigMemoryGrow.failureMode == FailureMode::Infallible);
/* static */ uint32_t /* infallible */
Instance::memoryGrow_i32(Instance* instance, uint32_t delta) {
MOZ_ASSERT(!instance->isAsmJS());
JSContext* cx = TlsContext.get();
@ -346,9 +325,8 @@ Instance::callImport_funcref(Instance* instance, int32_t funcImportIndex,
return ret;
}
/* static */ uint32_t Instance::memorySize_i32(Instance* instance) {
MOZ_ASSERT(SASigMemorySize.failureMode == FailureMode::Infallible);
/* static */ uint32_t /* infallible */
Instance::memorySize_i32(Instance* instance) {
// This invariant must hold when running Wasm code. Assert it here so we can
// write tests for cross-realm calls.
MOZ_ASSERT(TlsContext.get()->realm() == instance->realm());
@ -396,22 +374,20 @@ static int32_t PerformWait(Instance* instance, uint32_t byteOffset, T value,
}
}
/* static */ int32_t Instance::wait_i32(Instance* instance, uint32_t byteOffset,
int32_t value, int64_t timeout_ns) {
MOZ_ASSERT(SASigWaitI32.failureMode == FailureMode::FailOnNegI32);
/* static */ int32_t /* -1 to signal trap; nonnegative result for ok */
Instance::wait_i32(Instance* instance, uint32_t byteOffset, int32_t value,
int64_t timeout_ns) {
return PerformWait<int32_t>(instance, byteOffset, value, timeout_ns);
}
/* static */ int32_t Instance::wait_i64(Instance* instance, uint32_t byteOffset,
int64_t value, int64_t timeout_ns) {
MOZ_ASSERT(SASigWaitI64.failureMode == FailureMode::FailOnNegI32);
/* static */ int32_t /* -1 to signal trap; nonnegative result for ok */
Instance::wait_i64(Instance* instance, uint32_t byteOffset, int64_t value,
int64_t timeout_ns) {
return PerformWait<int64_t>(instance, byteOffset, value, timeout_ns);
}
/* static */ int32_t Instance::wake(Instance* instance, uint32_t byteOffset,
int32_t count) {
MOZ_ASSERT(SASigWake.failureMode == FailureMode::FailOnNegI32);
/* static */ int32_t /* -1 to signal trap; nonnegative for ok */
Instance::wake(Instance* instance, uint32_t byteOffset, int32_t count) {
JSContext* cx = TlsContext.get();
// The alignment guard is not in the wasm spec as of 2017-11-02, but is
@ -442,11 +418,9 @@ static int32_t PerformWait(Instance* instance, uint32_t byteOffset, T value,
return int32_t(woken);
}
/* static */ int32_t Instance::memCopy(Instance* instance,
uint32_t dstByteOffset,
uint32_t srcByteOffset, uint32_t len) {
MOZ_ASSERT(SASigMemCopy.failureMode == FailureMode::FailOnNegI32);
/* static */ int32_t /* -1 to signal trap; 0 for ok */
Instance::memCopy(Instance* instance, uint32_t dstByteOffset,
uint32_t srcByteOffset, uint32_t len) {
WasmMemoryObject* mem = instance->memory();
uint32_t memLen = mem->volatileMemoryLength();
@ -512,9 +486,8 @@ static int32_t PerformWait(Instance* instance, uint32_t byteOffset, T value,
return -1;
}
/* static */ int32_t Instance::dataDrop(Instance* instance, uint32_t segIndex) {
MOZ_ASSERT(SASigDataDrop.failureMode == FailureMode::FailOnNegI32);
/* static */ int32_t /* -1 to signal trap; 0 for ok */
Instance::dataDrop(Instance* instance, uint32_t segIndex) {
MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveDataSegments_.length(),
"ensured by validation");
@ -532,10 +505,9 @@ static int32_t PerformWait(Instance* instance, uint32_t byteOffset, T value,
return 0;
}
/* static */ int32_t Instance::memFill(Instance* instance, uint32_t byteOffset,
uint32_t value, uint32_t len) {
MOZ_ASSERT(SASigMemFill.failureMode == FailureMode::FailOnNegI32);
/* static */ int32_t /* -1 to signal trap; 0 for ok */
Instance::memFill(Instance* instance, uint32_t byteOffset, uint32_t value,
uint32_t len) {
WasmMemoryObject* mem = instance->memory();
uint32_t memLen = mem->volatileMemoryLength();
@ -586,11 +558,9 @@ static int32_t PerformWait(Instance* instance, uint32_t byteOffset, T value,
return -1;
}
/* static */ int32_t Instance::memInit(Instance* instance, uint32_t dstOffset,
uint32_t srcOffset, uint32_t len,
uint32_t segIndex) {
MOZ_ASSERT(SASigMemInit.failureMode == FailureMode::FailOnNegI32);
/* static */ int32_t /* -1 to signal trap; 0 for ok */
Instance::memInit(Instance* instance, uint32_t dstOffset, uint32_t srcOffset,
uint32_t len, uint32_t segIndex) {
MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveDataSegments_.length(),
"ensured by validation");
@ -666,12 +636,10 @@ static int32_t PerformWait(Instance* instance, uint32_t byteOffset, T value,
return -1;
}
/* static */ int32_t Instance::tableCopy(Instance* instance, uint32_t dstOffset,
uint32_t srcOffset, uint32_t len,
uint32_t dstTableIndex,
uint32_t srcTableIndex) {
MOZ_ASSERT(SASigMemCopy.failureMode == FailureMode::FailOnNegI32);
/* static */ int32_t /* -1 to signal trap; 0 for ok */
Instance::tableCopy(Instance* instance, uint32_t dstOffset, uint32_t srcOffset,
uint32_t len, uint32_t dstTableIndex,
uint32_t srcTableIndex) {
const SharedTable& srcTable = instance->tables()[srcTableIndex];
uint32_t srcTableLen = srcTable->length();
@ -745,9 +713,8 @@ static int32_t PerformWait(Instance* instance, uint32_t byteOffset, T value,
return -1;
}
/* static */ int32_t Instance::elemDrop(Instance* instance, uint32_t segIndex) {
MOZ_ASSERT(SASigDataDrop.failureMode == FailureMode::FailOnNegI32);
/* static */ int32_t /* -1 to signal trap; 0 for ok */
Instance::elemDrop(Instance* instance, uint32_t segIndex) {
MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveElemSegments_.length(),
"ensured by validation");
@ -804,23 +771,20 @@ void Instance::initElems(uint32_t tableIndex, const ElemSegment& seg,
calleeInstanceObj->getExportedFunctionCodeRange(fun, calleeTier);
void* code = calleeInstance.codeBase(calleeTier) +
calleeCodeRange.funcTableEntry();
table.setFuncRef(dstOffset + i, code, &calleeInstance);
table.setAnyFunc(dstOffset + i, code, &calleeInstance);
continue;
}
}
void* code = codeBaseTier +
codeRanges[funcToCodeRange[funcIndex]].funcTableEntry();
table.setFuncRef(dstOffset + i, code, this);
table.setAnyFunc(dstOffset + i, code, this);
}
}
}
/* static */ int32_t Instance::tableInit(Instance* instance, uint32_t dstOffset,
uint32_t srcOffset, uint32_t len,
uint32_t segIndex,
uint32_t tableIndex) {
MOZ_ASSERT(SASigTableInit.failureMode == FailureMode::FailOnNegI32);
/* static */ int32_t /* -1 to signal trap; 0 for ok */
Instance::tableInit(Instance* instance, uint32_t dstOffset, uint32_t srcOffset,
uint32_t len, uint32_t segIndex, uint32_t tableIndex) {
MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveElemSegments_.length(),
"ensured by validation");
@ -839,7 +803,7 @@ void Instance::initElems(uint32_t tableIndex, const ElemSegment& seg,
// Element segments cannot currently contain arbitrary values, and anyref
// tables cannot be initialized from segments.
MOZ_ASSERT(table.kind() == TableKind::FuncRef);
MOZ_ASSERT(table.kind() == TableKind::AnyFunction);
// We are proposing to copy
//
@ -887,11 +851,9 @@ void Instance::initElems(uint32_t tableIndex, const ElemSegment& seg,
return -1;
}
/* static */ int32_t Instance::tableFill(Instance* instance, uint32_t start,
void* value, uint32_t len,
uint32_t tableIndex) {
MOZ_ASSERT(SASigTableFill.failureMode == FailureMode::FailOnNegI32);
/* static */ int32_t /* -1 to signal trap; 0 for ok */
Instance::tableFill(Instance* instance, uint32_t start, void* value,
uint32_t len, uint32_t tableIndex) {
Table& table = *instance->tables()[tableIndex];
MOZ_RELEASE_ASSERT(table.kind() == TableKind::AnyRef);
@ -935,23 +897,31 @@ void Instance::initElems(uint32_t tableIndex, const ElemSegment& seg,
return -1;
}
/* static */ void* Instance::tableGet(Instance* instance, uint32_t index,
uint32_t tableIndex) {
MOZ_ASSERT(SASigTableGet.failureMode == FailureMode::FailOnInvalidRef);
// The return convention for tableGet() is awkward but avoids a situation where
// Ion code has to hold a value that may or may not be a pointer to GC'd
// storage, or where Ion has to pass in a pointer to storage where a return
// value can be written.
//
// Note carefully that the pointer that is returned may not be valid past
// operations that change the size of the table or cause GC work; it is strictly
// to be used to retrieve the return value.
/* static */ void* /* nullptr to signal trap; pointer to table location
otherwise */
Instance::tableGet(Instance* instance, uint32_t index, uint32_t tableIndex) {
const Table& table = *instance->tables()[tableIndex];
MOZ_RELEASE_ASSERT(table.kind() == TableKind::AnyRef);
if (index >= table.length()) {
JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr,
JSMSG_WASM_TABLE_OUT_OF_BOUNDS);
return AnyRef::invalid().forCompiledCode();
return nullptr;
}
return table.getAnyRef(index).forCompiledCode();
return const_cast<void*>(table.getShortlivedAnyRefLocForCompiledCode(index));
}
/* static */ uint32_t Instance::tableGrow(Instance* instance, void* initValue,
uint32_t delta, uint32_t tableIndex) {
MOZ_ASSERT(SASigTableGrow.failureMode == FailureMode::Infallible);
/* static */ uint32_t /* infallible */
Instance::tableGrow(Instance* instance, void* initValue, uint32_t delta,
uint32_t tableIndex) {
RootedAnyRef obj(TlsContext.get(), AnyRef::fromCompiledCode(initValue));
Table& table = *instance->tables()[tableIndex];
MOZ_RELEASE_ASSERT(table.kind() == TableKind::AnyRef);
@ -965,10 +935,9 @@ void Instance::initElems(uint32_t tableIndex, const ElemSegment& seg,
return oldSize;
}
/* static */ int32_t Instance::tableSet(Instance* instance, uint32_t index,
void* value, uint32_t tableIndex) {
MOZ_ASSERT(SASigTableSet.failureMode == FailureMode::FailOnNegI32);
/* static */ int32_t /* -1 to signal trap; 0 for ok */
Instance::tableSet(Instance* instance, uint32_t index, void* value,
uint32_t tableIndex) {
Table& table = *instance->tables()[tableIndex];
MOZ_RELEASE_ASSERT(table.kind() == TableKind::AnyRef);
if (index >= table.length()) {
@ -980,23 +949,20 @@ void Instance::initElems(uint32_t tableIndex, const ElemSegment& seg,
return 0;
}
/* static */ uint32_t Instance::tableSize(Instance* instance,
uint32_t tableIndex) {
MOZ_ASSERT(SASigTableSize.failureMode == FailureMode::Infallible);
/* static */ uint32_t /* infallible */
Instance::tableSize(Instance* instance, uint32_t tableIndex) {
Table& table = *instance->tables()[tableIndex];
return table.length();
}
/* static */ void Instance::postBarrier(Instance* instance,
gc::Cell** location) {
MOZ_ASSERT(SASigPostBarrier.failureMode == FailureMode::Infallible);
/* static */ void /* infallible */
Instance::postBarrier(Instance* instance, gc::Cell** location) {
MOZ_ASSERT(location);
TlsContext.get()->runtime()->gc.storeBuffer().putCell(location);
}
/* static */ void Instance::postBarrierFiltering(Instance* instance,
gc::Cell** location) {
MOZ_ASSERT(SASigPostBarrier.failureMode == FailureMode::Infallible);
/* static */ void /* infallible */
Instance::postBarrierFiltering(Instance* instance, gc::Cell** location) {
MOZ_ASSERT(location);
if (*location == nullptr || !gc::IsInsideNursery(*location)) {
return;
@ -1010,19 +976,16 @@ void Instance::initElems(uint32_t tableIndex, const ElemSegment& seg,
// When we fail to allocate we return a nullptr; the wasm side must check this
// and propagate it as an error.
/* static */ void* Instance::structNew(Instance* instance, uint32_t typeIndex) {
MOZ_ASSERT(SASigStructNew.failureMode == FailureMode::FailOnNullPtr);
/* static */ void* /* null on OOM, otherwise a pointer */
Instance::structNew(Instance* instance, uint32_t typeIndex) {
JSContext* cx = TlsContext.get();
Rooted<TypeDescr*> typeDescr(cx, instance->structTypeDescrs_[typeIndex]);
return TypedObject::createZeroed(cx, typeDescr);
}
/* static */ void* Instance::structNarrow(Instance* instance,
uint32_t mustUnboxAnyref,
uint32_t outputTypeIndex,
void* maybeNullPtr) {
MOZ_ASSERT(SASigStructNarrow.failureMode == FailureMode::Infallible);
/* static */ void* /* infallible */
Instance::structNarrow(Instance* instance, uint32_t mustUnboxAnyref,
uint32_t outputTypeIndex, void* maybeNullPtr) {
JSContext* cx = TlsContext.get();
Rooted<TypedObject*> obj(cx);
@ -1096,7 +1059,7 @@ void Instance::initElems(uint32_t tableIndex, const ElemSegment& seg,
// Either the written location is in the global data section in the
// WasmInstanceObject, or the Cell of a WasmGlobalObject:
//
// - WasmInstanceObjects are always tenured and u.ref_ may point to a
// - WasmInstanceObjects are always tenured and u.ref_/anyref_ may point to a
// nursery object, so we need a post-barrier since the global data of an
// instance is effectively a field of the WasmInstanceObject.
//
@ -1125,22 +1088,33 @@ void CopyValPostBarriered(uint8_t* dst, const Val& src) {
memcpy(dst, &x, sizeof(x));
break;
}
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef: {
// TODO/AnyRef-boxing: With boxed immediates and strings, the write
// barrier is going to have to be more complicated.
ASSERT_ANYREF_IS_JSOBJECT;
MOZ_ASSERT(*(void**)dst == nullptr,
"should be null so no need for a pre-barrier");
AnyRef x = src.ref();
memcpy(dst, x.asJSObjectAddress(), sizeof(*x.asJSObjectAddress()));
AnyRef x = src.anyref();
memcpy(dst, x.asJSObjectAddress(), sizeof(x));
if (!x.isNull()) {
JSObject::writeBarrierPost((JSObject**)dst, nullptr, x.asJSObject());
}
break;
}
case ValType::Ref: {
MOZ_ASSERT(*(JSObject**)dst == nullptr,
"should be null so no need for a pre-barrier");
JSObject* x = src.ref();
memcpy(dst, &x, sizeof(x));
if (x) {
JSObject::writeBarrierPost((JSObject**)dst, nullptr, x);
}
break;
}
case ValType::NullRef: {
break;
}
default: {
MOZ_CRASH("unexpected Val type");
}
}
@ -1428,13 +1402,13 @@ void Instance::tracePrivate(JSTracer* trc) {
}
for (const GlobalDesc& global : code().metadata().globals) {
// Indirect reference globals get traced by the owning WebAssembly.Global.
// Indirect anyref global get traced by the owning WebAssembly.Global.
if (!global.type().isReference() || global.isConstant() ||
global.isIndirect()) {
continue;
}
GCPtrObject* obj = (GCPtrObject*)(globalData() + global.offset());
TraceNullableEdge(trc, obj, "wasm reference-typed global");
TraceNullableEdge(trc, obj, "wasm ref/anyref global");
}
TraceNullableEdge(trc, &memory_, "wasm buffer");
@ -1682,7 +1656,7 @@ bool Instance::callExport(JSContext* cx, uint32_t funcIndex, CallArgs args) {
}
ASSERT_ANYREF_IS_JSOBJECT;
Rooted<GCVector<JSObject*, 8, SystemAllocPolicy>> refs(cx);
Rooted<GCVector<JSObject*, 8, SystemAllocPolicy>> anyrefs(cx);
DebugCodegen(DebugChannel::Function, "wasm-function[%d]; arguments ",
funcIndex);
@ -1692,6 +1666,7 @@ bool Instance::callExport(JSContext* cx, uint32_t funcIndex, CallArgs args) {
switch (funcType->arg(i).code()) {
case ValType::I32:
if (!ToInt32(cx, v, (int32_t*)&exportArgs[i])) {
DebugCodegen(DebugChannel::Function, "call to ToInt32 failed!\n");
return false;
}
DebugCodegen(DebugChannel::Function, "i32(%d) ",
@ -1701,6 +1676,8 @@ bool Instance::callExport(JSContext* cx, uint32_t funcIndex, CallArgs args) {
MOZ_CRASH("unexpected i64 flowing into callExport");
case ValType::F32:
if (!RoundFloat32(cx, v, (float*)&exportArgs[i])) {
DebugCodegen(DebugChannel::Function,
"call to RoundFloat32 failed!\n");
return false;
}
DebugCodegen(DebugChannel::Function, "f32(%f) ",
@ -1708,6 +1685,7 @@ bool Instance::callExport(JSContext* cx, uint32_t funcIndex, CallArgs args) {
break;
case ValType::F64:
if (!ToNumber(cx, v, (double*)&exportArgs[i])) {
DebugCodegen(DebugChannel::Function, "call to ToNumber failed!\n");
return false;
}
DebugCodegen(DebugChannel::Function, "f64(%lf) ",
@ -1715,32 +1693,20 @@ bool Instance::callExport(JSContext* cx, uint32_t funcIndex, CallArgs args) {
break;
case ValType::Ref:
MOZ_CRASH("temporarily unsupported Ref type in callExport");
case ValType::FuncRef: {
RootedFunction fun(cx);
if (!CheckFuncRefValue(cx, v, &fun)) {
return false;
}
// Store in rooted array until no more GC is possible.
ASSERT_ANYREF_IS_JSOBJECT;
if (!refs.emplaceBack(fun)) {
return false;
}
DebugCodegen(DebugChannel::Function, "ptr(#%d) ",
int(refs.length() - 1));
break;
}
case ValType::AnyRef: {
RootedAnyRef ar(cx, AnyRef::null());
if (!BoxAnyRef(cx, v, &ar)) {
DebugCodegen(DebugChannel::Function, "call to BoxAnyRef failed!\n");
return false;
}
// Store in rooted array until no more GC is possible.
// We'll copy the value into the arguments array just before the call;
// for now tuck the value away in a rooted array.
ASSERT_ANYREF_IS_JSOBJECT;
if (!refs.emplaceBack(ar.get().asJSObject())) {
if (!anyrefs.emplaceBack(ar.get().asJSObject())) {
return false;
}
DebugCodegen(DebugChannel::Function, "ptr(#%d) ",
int(refs.length() - 1));
int(anyrefs.length() - 1));
break;
}
case ValType::NullRef: {
@ -1752,18 +1718,18 @@ bool Instance::callExport(JSContext* cx, uint32_t funcIndex, CallArgs args) {
DebugCodegen(DebugChannel::Function, "\n");
// Copy over reference values from the rooted array, if any.
if (refs.length() > 0) {
if (anyrefs.length() > 0) {
DebugCodegen(DebugChannel::Function, "; ");
size_t nextRef = 0;
for (size_t i = 0; i < funcType->args().length(); ++i) {
if (funcType->arg(i).isReference()) {
ASSERT_ANYREF_IS_JSOBJECT;
*(void**)&exportArgs[i] = (void*)refs[nextRef++];
*(void**)&exportArgs[i] = (void*)anyrefs[nextRef++];
DebugCodegen(DebugChannel::Function, "ptr(#%d) = %p ", int(nextRef - 1),
*(void**)&exportArgs[i]);
}
}
refs.clear();
anyrefs.clear();
}
{
@ -1817,7 +1783,6 @@ bool Instance::callExport(JSContext* cx, uint32_t funcIndex, CallArgs args) {
break;
case ExprType::Ref:
MOZ_CRASH("temporarily unsupported Ref type in callExport");
case ExprType::FuncRef:
case ExprType::AnyRef:
args.rval().set(UnboxAnyRef(AnyRef::fromCompiledCode(*(void**)retAddr)));
DebugCodegen(DebugChannel::Function, "ptr(%p)", *(void**)retAddr);

View File

@ -182,7 +182,6 @@ class Instance {
static int32_t callImport_i64(Instance*, int32_t, int32_t, uint64_t*);
static int32_t callImport_f64(Instance*, int32_t, int32_t, uint64_t*);
static int32_t callImport_anyref(Instance*, int32_t, int32_t, uint64_t*);
static int32_t callImport_funcref(Instance*, int32_t, int32_t, uint64_t*);
static uint32_t memoryGrow_i32(Instance* instance, uint32_t delta);
static uint32_t memorySize_i32(Instance* instance);
static int32_t wait_i32(Instance* instance, uint32_t byteOffset,

View File

@ -183,7 +183,6 @@ class FunctionCompiler {
ins = MConstant::New(alloc(), DoubleValue(0.0), MIRType::Double);
break;
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef:
ins = MWasmNullConstant::New(alloc());
break;
@ -681,6 +680,66 @@ class FunctionCompiler {
return ins;
}
bool checkI32NegativeMeansFailedResult(MDefinition* value) {
if (inDeadCode()) {
return true;
}
auto* zero = constant(Int32Value(0), MIRType::Int32);
auto* cond = compare(value, zero, JSOP_LT, MCompare::Compare_Int32);
MBasicBlock* failBlock;
if (!newBlock(curBlock_, &failBlock)) {
return false;
}
MBasicBlock* okBlock;
if (!newBlock(curBlock_, &okBlock)) {
return false;
}
curBlock_->end(MTest::New(alloc(), cond, failBlock, okBlock));
failBlock->end(
MWasmTrap::New(alloc(), wasm::Trap::ThrowReported, bytecodeOffset()));
curBlock_ = okBlock;
return true;
}
bool checkPointerNullMeansFailedResult(MDefinition* value) {
if (inDeadCode()) {
return true;
}
auto* cond = MIsNullPointer::New(alloc(), value);
curBlock_->add(cond);
MBasicBlock* failBlock;
if (!newBlock(curBlock_, &failBlock)) {
return false;
}
MBasicBlock* okBlock;
if (!newBlock(curBlock_, &okBlock)) {
return false;
}
curBlock_->end(MTest::New(alloc(), cond, failBlock, okBlock));
failBlock->end(
MWasmTrap::New(alloc(), wasm::Trap::ThrowReported, bytecodeOffset()));
curBlock_ = okBlock;
return true;
}
MDefinition* derefTableElementPointer(MDefinition* base) {
// Table element storage may be moved by GC operations, so reads from that
// storage are not movable.
MWasmLoadRef* load =
MWasmLoadRef::New(alloc(), base, AliasSet::WasmTableElement,
/*isMovable=*/false);
curBlock_->add(load);
return load;
}
MDefinition* load(MDefinition* base, MemoryAccessDesc* access,
ValType result) {
if (inDeadCode()) {
@ -1098,8 +1157,6 @@ class FunctionCompiler {
return true;
}
MOZ_ASSERT(builtin.failureMode == FailureMode::Infallible);
CallSiteDesc desc(lineOrBytecode, CallSiteDesc::Symbolic);
auto callee = CalleeDesc::builtin(builtin.identity);
auto* ins =
@ -1117,27 +1174,22 @@ class FunctionCompiler {
bool builtinInstanceMethodCall(const SymbolicAddressSignature& builtin,
uint32_t lineOrBytecode,
const CallCompileState& call,
MDefinition** def = nullptr) {
MOZ_ASSERT_IF(!def, builtin.retType == MIRType::None);
MDefinition** def) {
if (inDeadCode()) {
if (def) {
*def = nullptr;
}
*def = nullptr;
return true;
}
CallSiteDesc desc(lineOrBytecode, CallSiteDesc::Symbolic);
auto* ins = MWasmCall::NewBuiltinInstanceMethodCall(
alloc(), desc, builtin.identity, builtin.failureMode, call.instanceArg_,
call.regArgs_, builtin.retType, StackArgAreaSizeUnaligned(builtin));
alloc(), desc, builtin.identity, call.instanceArg_, call.regArgs_,
builtin.retType, StackArgAreaSizeUnaligned(builtin));
if (!ins) {
return false;
}
curBlock_->add(ins);
if (def) {
*def = ins;
}
*def = ins;
return true;
}
@ -2131,9 +2183,8 @@ static bool EmitGetGlobal(FunctionCompiler& f) {
case ValType::F64:
result = f.constant(value.f64());
break;
case ValType::FuncRef:
case ValType::AnyRef:
MOZ_ASSERT(value.ref().isNull());
MOZ_ASSERT(value.anyref().isNull());
result = f.nullRefConstant();
break;
default:
@ -2172,7 +2223,8 @@ static bool EmitSetGlobal(FunctionCompiler& f) {
return false;
}
f.finishCall(&args);
if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args)) {
MDefinition* ret;
if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
return false;
}
}
@ -2796,6 +2848,10 @@ static bool EmitWait(FunctionCompiler& f, ValType type, uint32_t byteSize) {
return false;
}
if (!f.checkI32NegativeMeansFailedResult(ret)) {
return false;
}
f.iter().setResult(ret);
return true;
}
@ -2839,6 +2895,10 @@ static bool EmitWake(FunctionCompiler& f) {
return false;
}
if (!f.checkI32NegativeMeansFailedResult(ret)) {
return false;
}
f.iter().setResult(ret);
return true;
}
@ -2914,7 +2974,16 @@ static bool EmitMemOrTableCopy(FunctionCompiler& f, bool isMem) {
return false;
}
return f.builtinInstanceMethodCall(callee, lineOrBytecode, args);
MDefinition* ret;
if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
return false;
}
if (!f.checkI32NegativeMeansFailedResult(ret)) {
return false;
}
return true;
}
static bool EmitDataOrElemDrop(FunctionCompiler& f, bool isData) {
@ -2946,7 +3015,16 @@ static bool EmitDataOrElemDrop(FunctionCompiler& f, bool isData) {
return false;
}
return f.builtinInstanceMethodCall(callee, lineOrBytecode, args);
MDefinition* ret;
if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
return false;
}
if (!f.checkI32NegativeMeansFailedResult(ret)) {
return false;
}
return true;
}
static bool EmitMemFill(FunctionCompiler& f) {
@ -2981,7 +3059,16 @@ static bool EmitMemFill(FunctionCompiler& f) {
return false;
}
return f.builtinInstanceMethodCall(callee, lineOrBytecode, args);
MDefinition* ret;
if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
return false;
}
if (!f.checkI32NegativeMeansFailedResult(ret)) {
return false;
}
return true;
}
static bool EmitMemOrTableInit(FunctionCompiler& f, bool isMem) {
@ -3033,7 +3120,16 @@ static bool EmitMemOrTableInit(FunctionCompiler& f, bool isMem) {
return false;
}
return f.builtinInstanceMethodCall(callee, lineOrBytecode, args);
MDefinition* ret;
if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
return false;
}
if (!f.checkI32NegativeMeansFailedResult(ret)) {
return false;
}
return true;
}
#endif // ENABLE_WASM_BULKMEM_OPS
@ -3083,7 +3179,16 @@ static bool EmitTableFill(FunctionCompiler& f) {
return false;
}
return f.builtinInstanceMethodCall(callee, lineOrBytecode, args);
MDefinition* ret;
if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
return false;
}
if (!f.checkI32NegativeMeansFailedResult(ret)) {
return false;
}
return true;
}
static bool EmitTableGet(FunctionCompiler& f) {
@ -3124,8 +3229,16 @@ static bool EmitTableGet(FunctionCompiler& f) {
// The return value here is either null, denoting an error, or a short-lived
// pointer to a location containing a possibly-null ref.
MDefinition* ret;
if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
MDefinition* result;
if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &result)) {
return false;
}
if (!f.checkPointerNullMeansFailedResult(result)) {
return false;
}
MDefinition* ret = f.derefTableElementPointer(result);
if (!ret) {
return false;
}
@ -3224,7 +3337,14 @@ static bool EmitTableSet(FunctionCompiler& f) {
return false;
}
return f.builtinInstanceMethodCall(callee, lineOrBytecode, args);
MDefinition* ret;
if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
return false;
}
if (!f.checkI32NegativeMeansFailedResult(ret)) {
return false;
}
return true;
}
static bool EmitTableSize(FunctionCompiler& f) {

View File

@ -178,20 +178,12 @@ static bool ToWebAssemblyValue(JSContext* cx, ValType targetType, HandleValue v,
val.set(Val(d));
return true;
}
case ValType::FuncRef: {
RootedFunction fun(cx);
if (!CheckFuncRefValue(cx, v, &fun)) {
return false;
}
val.set(Val(ValType::FuncRef, AnyRef::fromJSObject(fun)));
return true;
}
case ValType::AnyRef: {
RootedAnyRef tmp(cx, AnyRef::null());
if (!BoxAnyRef(cx, v, &tmp)) {
return false;
}
val.set(Val(ValType::AnyRef, tmp));
val.set(Val(tmp));
return true;
}
case ValType::Ref:
@ -211,9 +203,8 @@ static Value ToJSValue(const Val& val) {
return DoubleValue(JS::CanonicalizeNaN(double(val.f32())));
case ValType::F64:
return DoubleValue(JS::CanonicalizeNaN(val.f64()));
case ValType::FuncRef:
case ValType::AnyRef:
return UnboxAnyRef(val.ref());
return UnboxAnyRef(val.anyref());
case ValType::Ref:
case ValType::NullRef:
case ValType::I64:
@ -1545,29 +1536,6 @@ bool wasm::IsWasmExportedFunction(JSFunction* fun) {
return fun->kind() == JSFunction::Wasm;
}
bool wasm::CheckFuncRefValue(JSContext* cx, HandleValue v,
MutableHandleFunction fun) {
if (v.isNull()) {
MOZ_ASSERT(!fun);
return true;
}
if (v.isObject()) {
JSObject& obj = v.toObject();
if (obj.is<JSFunction>()) {
JSFunction* f = &obj.as<JSFunction>();
if (IsWasmExportedFunction(f)) {
fun.set(f);
return true;
}
}
}
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
JSMSG_WASM_BAD_FUNCREF_VALUE);
return false;
}
Instance& wasm::ExportedFunctionToInstance(JSFunction* fun) {
return ExportedFunctionToInstanceObject(fun)->instance();
}
@ -2038,7 +2006,7 @@ bool WasmTableObject::construct(JSContext* cx, unsigned argc, Value* vp) {
TableKind tableKind;
if (StringEqualsAscii(elementLinearStr, "anyfunc") ||
StringEqualsAscii(elementLinearStr, "funcref")) {
tableKind = TableKind::FuncRef;
tableKind = TableKind::AnyFunction;
#ifdef ENABLE_WASM_REFTYPES
} else if (StringEqualsAscii(elementLinearStr, "anyref")) {
if (!HasReftypesSupport(cx)) {
@ -2127,8 +2095,8 @@ bool WasmTableObject::getImpl(JSContext* cx, const CallArgs& args) {
}
switch (table.kind()) {
case TableKind::FuncRef: {
const FunctionTableElem& elem = table.getFuncRef(index);
case TableKind::AnyFunction: {
const FunctionTableElem& elem = table.getAnyFunc(index);
if (!elem.code) {
args.rval().setNull();
return true;
@ -2166,13 +2134,6 @@ bool WasmTableObject::get(JSContext* cx, unsigned argc, Value* vp) {
static void TableFunctionFill(JSContext* cx, Table* table, HandleFunction value,
uint32_t index, uint32_t limit) {
if (!value) {
while (index < limit) {
table->setNull(index++);
}
return;
}
RootedWasmInstanceObject instanceObj(cx,
ExportedFunctionToInstanceObject(value));
uint32_t funcIndex = ExportedFunctionToFuncIndex(value);
@ -2190,10 +2151,24 @@ static void TableFunctionFill(JSContext* cx, Table* table, HandleFunction value,
metadata.codeRange(metadata.lookupFuncExport(funcIndex));
void* code = instance.codeBase(tier) + codeRange.funcTableEntry();
while (index < limit) {
table->setFuncRef(index++, code, &instance);
table->setAnyFunc(index++, code, &instance);
}
}
static bool IsWasmExportedFunction(const Value& v, MutableHandleFunction f) {
if (!v.isObject()) {
return false;
}
JSObject& obj = v.toObject();
if (!obj.is<JSFunction>() || !IsWasmExportedFunction(&obj.as<JSFunction>())) {
return false;
}
f.set(&obj.as<JSFunction>());
return true;
}
/* static */
bool WasmTableObject::setImpl(JSContext* cx, const CallArgs& args) {
RootedWasmTableObject tableObj(
@ -2211,14 +2186,21 @@ bool WasmTableObject::setImpl(JSContext* cx, const CallArgs& args) {
RootedValue fillValue(cx, args[1]);
switch (table.kind()) {
case TableKind::FuncRef: {
RootedFunction fun(cx);
if (!CheckFuncRefValue(cx, fillValue, &fun)) {
case TableKind::AnyFunction: {
RootedFunction value(cx);
if (!IsWasmExportedFunction(fillValue, &value) && !fillValue.isNull()) {
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
JSMSG_WASM_BAD_TABLE_VALUE);
return false;
}
MOZ_ASSERT(index < MaxTableLength);
static_assert(MaxTableLength < UINT32_MAX, "Invariant");
TableFunctionFill(cx, &table, fun, index, index + 1);
if (value) {
MOZ_ASSERT(index < MaxTableLength);
static_assert(MaxTableLength < UINT32_MAX, "Invariant");
TableFunctionFill(cx, &table, value, index, index + 1);
} else {
table.setNull(index);
}
break;
}
case TableKind::AnyRef: {
@ -2278,20 +2260,21 @@ bool WasmTableObject::growImpl(JSContext* cx, const CallArgs& args) {
static_assert(MaxTableLength < UINT32_MAX, "Invariant");
switch (table->table().kind()) {
case TableKind::FuncRef: {
case TableKind::AnyFunction: {
RootedFunction value(cx);
if (fillValue.isNull()) {
#ifdef DEBUG
for (uint32_t index = oldLength; index < oldLength + delta; index++) {
MOZ_ASSERT(table->table().getFuncRef(index).code == nullptr);
MOZ_ASSERT(table->table().getAnyFunc(index).code == nullptr);
}
#endif
} else {
RootedFunction fun(cx);
if (!CheckFuncRefValue(cx, fillValue, &fun)) {
return false;
}
TableFunctionFill(cx, &table->table(), fun, oldLength,
} else if (IsWasmExportedFunction(fillValue, &value)) {
TableFunctionFill(cx, &table->table(), value, oldLength,
oldLength + delta);
} else {
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
JSMSG_WASM_BAD_TBL_GROW_INIT, "funcref");
return false;
}
break;
}
@ -2370,14 +2353,14 @@ void WasmGlobalObject::trace(JSTracer* trc, JSObject* obj) {
return;
}
switch (global->type().code()) {
case ValType::FuncRef:
case ValType::AnyRef:
if (!global->cell()->ref.isNull()) {
if (!global->cell()->anyref.isNull()) {
// TODO/AnyRef-boxing: With boxed immediates and strings, the write
// barrier is going to have to be more complicated.
ASSERT_ANYREF_IS_JSOBJECT;
TraceManuallyBarrieredEdge(trc, global->cell()->ref.asJSObjectAddress(),
"wasm reference-typed global");
TraceManuallyBarrieredEdge(trc,
global->cell()->anyref.asJSObjectAddress(),
"wasm anyref global");
}
break;
case ValType::I32:
@ -2439,22 +2422,22 @@ WasmGlobalObject* WasmGlobalObject::create(JSContext* cx, HandleVal hval,
case ValType::F64:
cell->f64 = val.f64();
break;
case ValType::FuncRef:
case ValType::NullRef:
MOZ_ASSERT(!cell->ref, "value should be null already");
break;
case ValType::AnyRef:
MOZ_ASSERT(cell->ref.isNull(), "no prebarriers needed");
cell->ref = val.ref();
if (!cell->ref.isNull()) {
MOZ_ASSERT(cell->anyref.isNull(), "no prebarriers needed");
cell->anyref = val.anyref();
if (!cell->anyref.isNull()) {
// TODO/AnyRef-boxing: With boxed immediates and strings, the write
// barrier is going to have to be more complicated.
ASSERT_ANYREF_IS_JSOBJECT;
JSObject::writeBarrierPost(cell->ref.asJSObjectAddress(), nullptr,
cell->ref.asJSObject());
JSObject::writeBarrierPost(&cell->anyref, nullptr,
cell->anyref.asJSObject());
}
break;
case ValType::Ref:
MOZ_CRASH("Ref NYI");
case ValType::NullRef:
MOZ_CRASH("NullRef not expressible");
}
obj->initReservedSlot(TYPE_SLOT,
@ -2521,9 +2504,6 @@ bool WasmGlobalObject::construct(JSContext* cx, unsigned argc, Value* vp) {
} else if (StringEqualsAscii(typeLinearStr, "f64")) {
globalType = ValType::F64;
#ifdef ENABLE_WASM_REFTYPES
} else if (HasReftypesSupport(cx) &&
StringEqualsAscii(typeLinearStr, "funcref")) {
globalType = ValType::FuncRef;
} else if (HasReftypesSupport(cx) &&
StringEqualsAscii(typeLinearStr, "anyref")) {
globalType = ValType::AnyRef;
@ -2553,11 +2533,8 @@ bool WasmGlobalObject::construct(JSContext* cx, unsigned argc, Value* vp) {
case ValType::F64:
globalVal = Val(double(0.0));
break;
case ValType::FuncRef:
globalVal = Val(ValType::FuncRef, AnyRef::null());
break;
case ValType::AnyRef:
globalVal = Val(ValType::AnyRef, AnyRef::null());
globalVal = Val(AnyRef::null());
break;
case ValType::Ref:
MOZ_CRASH("Ref NYI");
@ -2568,7 +2545,7 @@ bool WasmGlobalObject::construct(JSContext* cx, unsigned argc, Value* vp) {
// Override with non-undefined value, if provided.
RootedValue valueVal(cx, args.get(1));
if (!valueVal.isUndefined() ||
(args.length() >= 2 && globalType.isReference())) {
(args.length() >= 2 && globalType == ValType::AnyRef)) {
if (!ToWebAssemblyValue(cx, globalType, valueVal, &globalVal)) {
return false;
}
@ -2593,7 +2570,6 @@ bool WasmGlobalObject::valueGetterImpl(JSContext* cx, const CallArgs& args) {
case ValType::I32:
case ValType::F32:
case ValType::F64:
case ValType::FuncRef:
case ValType::AnyRef:
args.rval().set(args.thisv().toObject().as<WasmGlobalObject>().value(cx));
return true;
@ -2651,18 +2627,17 @@ bool WasmGlobalObject::valueSetterImpl(JSContext* cx, const CallArgs& args) {
case ValType::F64:
cell->f64 = val.get().f64();
break;
case ValType::FuncRef:
case ValType::AnyRef: {
AnyRef prevPtr = cell->ref;
AnyRef prevPtr = cell->anyref;
// TODO/AnyRef-boxing: With boxed immediates and strings, the write
// barrier is going to have to be more complicated.
ASSERT_ANYREF_IS_JSOBJECT;
JSObject::writeBarrierPre(prevPtr.asJSObject());
cell->ref = val.get().ref();
if (!cell->ref.isNull()) {
JSObject::writeBarrierPost(cell->ref.asJSObjectAddress(),
cell->anyref = val.get().anyref();
if (!cell->anyref.isNull()) {
JSObject::writeBarrierPost(cell->anyref.asJSObjectAddress(),
prevPtr.asJSObject(),
cell->ref.asJSObject());
cell->anyref.asJSObject());
}
break;
}
@ -2718,11 +2693,8 @@ void WasmGlobalObject::val(MutableHandleVal outval) const {
case ValType::F64:
outval.set(Val(cell->f64));
return;
case ValType::FuncRef:
outval.set(Val(ValType::FuncRef, cell->ref));
return;
case ValType::AnyRef:
outval.set(Val(ValType::AnyRef, cell->ref));
outval.set(Val(cell->anyref));
return;
case ValType::Ref:
MOZ_CRASH("Ref NYI");

View File

@ -102,14 +102,13 @@ MOZ_MUST_USE bool DeserializeModule(JSContext* cx, const Bytes& serialized,
// functions for extracting the instance and func-index of a wasm function
// can be used for both wasm and asm.js, however.
bool IsWasmExportedFunction(JSFunction* fun);
bool CheckFuncRefValue(JSContext* cx, HandleValue v, MutableHandleFunction fun);
extern bool IsWasmExportedFunction(JSFunction* fun);
Instance& ExportedFunctionToInstance(JSFunction* fun);
WasmInstanceObject* ExportedFunctionToInstanceObject(JSFunction* fun);
uint32_t ExportedFunctionToFuncIndex(JSFunction* fun);
extern Instance& ExportedFunctionToInstance(JSFunction* fun);
extern WasmInstanceObject* ExportedFunctionToInstanceObject(JSFunction* fun);
extern uint32_t ExportedFunctionToFuncIndex(JSFunction* fun);
bool IsSharedWasmMemoryObject(JSObject* obj);
extern bool IsSharedWasmMemoryObject(JSObject* obj);
} // namespace wasm
@ -177,7 +176,8 @@ class WasmGlobalObject : public NativeObject {
int64_t i64;
float f32;
double f64;
wasm::AnyRef ref;
JSObject* ref; // Note, this breaks an abstraction boundary
wasm::AnyRef anyref;
Cell() : i64(0) {}
~Cell() {}
};

View File

@ -1228,7 +1228,6 @@ static bool MakeStructField(JSContext* cx, const ValType& v, bool isMutable,
t = GlobalObject::getOrCreateReferenceTypeDescr(
cx, cx->global(), ReferenceType::TYPE_OBJECT);
break;
case ValType::FuncRef:
case ValType::AnyRef:
t = GlobalObject::getOrCreateReferenceTypeDescr(
cx, cx->global(), ReferenceType::TYPE_WASM_ANYREF);

View File

@ -46,7 +46,6 @@ class StackType {
case TypeCode::F32:
case TypeCode::F64:
case TypeCode::AnyRef:
case TypeCode::FuncRef:
case TypeCode::Ref:
case TypeCode::NullRef:
case TypeCode::Limit:
@ -65,7 +64,6 @@ class StackType {
F64 = uint8_t(ValType::F64),
AnyRef = uint8_t(ValType::AnyRef),
FuncRef = uint8_t(ValType::FuncRef),
Ref = uint8_t(ValType::Ref),
NullRef = uint8_t(ValType::NullRef),
@ -85,16 +83,24 @@ class StackType {
Code code() const { return Code(UnpackTypeCodeType(tc_)); }
uint32_t refTypeIndex() const { return UnpackTypeCodeIndex(tc_); }
bool isRef() const { return UnpackTypeCodeType(tc_) == TypeCode::Ref; }
bool isReference() const { return IsReferenceType(tc_); }
bool isReference() const {
TypeCode tc = UnpackTypeCodeType(tc_);
return tc == TypeCode::Ref || tc == TypeCode::AnyRef ||
tc == TypeCode::NullRef;
}
bool operator==(const StackType& that) const { return tc_ == that.tc_; }
bool operator!=(const StackType& that) const { return tc_ != that.tc_; }
bool operator==(Code that) const {
MOZ_ASSERT(that != Code::Ref);
return code() == that;
}
bool operator!=(Code that) const { return !(*this == that); }
};
@ -759,7 +765,6 @@ inline bool OpIter<Policy>::readBlockType(ExprType* type) {
case uint8_t(ExprType::F64):
known = true;
break;
case uint8_t(ExprType::FuncRef):
case uint8_t(ExprType::AnyRef):
#ifdef ENABLE_WASM_REFTYPES
known = true;
@ -1559,7 +1564,7 @@ inline bool OpIter<Policy>::readCallIndirect(uint32_t* funcTypeIndex,
}
return fail("table index out of range for call_indirect");
}
if (env_.tables[*tableIndex].kind != TableKind::FuncRef) {
if (env_.tables[*tableIndex].kind != TableKind::AnyFunction) {
return fail("indirect calls must go through a table of 'funcref'");
}
@ -1950,7 +1955,7 @@ inline bool OpIter<Policy>::readMemOrTableInit(bool isMem, uint32_t* segIndex,
// Element segments must carry functions exclusively and funcref is not
// yet a subtype of anyref.
if (env_.tables[*dstTableIndex].kind != TableKind::FuncRef) {
if (env_.tables[*dstTableIndex].kind != TableKind::AnyFunction) {
return fail("only tables of 'funcref' may have element segments");
}
if (*segIndex >= env_.elemSegments.length()) {

View File

@ -312,7 +312,6 @@ static void StoreABIReturn(MacroAssembler& masm, const FuncExport& fe,
masm.storeDouble(ReturnDoubleReg, Address(argv, 0));
break;
case ExprType::Ref:
case ExprType::FuncRef:
case ExprType::AnyRef:
masm.storePtr(ReturnReg, Address(argv, 0));
break;
@ -901,9 +900,10 @@ static bool GenerateJitEntry(MacroAssembler& masm, size_t funcExportIndex,
break;
}
case ExprType::Ref:
case ExprType::FuncRef:
MOZ_CRASH("return ref in jitentry NYI");
break;
case ExprType::AnyRef:
MOZ_CRASH("returning reference in jitentry NYI");
MOZ_CRASH("return anyref in jitentry NYI");
break;
case ExprType::I64:
MOZ_CRASH("unexpected return type when calling from ion to wasm");
@ -1151,7 +1151,6 @@ void wasm::GenerateDirectCallFromJit(MacroAssembler& masm, const FuncExport& fe,
GenPrintF64(DebugChannel::Function, masm, ReturnDoubleReg);
break;
case wasm::ExprType::Ref:
case wasm::ExprType::FuncRef:
case wasm::ExprType::AnyRef:
case wasm::ExprType::I64:
MOZ_CRASH("unexpected return type when calling from ion to wasm");
@ -1553,14 +1552,6 @@ static bool GenerateImportInterpExit(MacroAssembler& masm, const FuncImport& fi,
break;
case ExprType::Ref:
MOZ_CRASH("No Ref support here yet");
case ExprType::FuncRef:
masm.call(SymbolicAddress::CallImport_FuncRef);
masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
masm.loadPtr(argv, ReturnReg);
GenPrintf(DebugChannel::Import, masm, "wasm-import[%u]; returns ",
funcImportIndex);
GenPrintPtr(DebugChannel::Import, masm, ReturnReg);
break;
case ExprType::AnyRef:
masm.call(SymbolicAddress::CallImport_AnyRef);
masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
@ -1762,9 +1753,10 @@ static bool GenerateImportJitExit(MacroAssembler& masm, const FuncImport& fi,
GenPrintF64(DebugChannel::Import, masm, ReturnDoubleReg);
break;
case ExprType::Ref:
case ExprType::FuncRef:
MOZ_CRASH("ref returned by import (jit exit) NYI");
break;
case ExprType::AnyRef:
MOZ_CRASH("reference returned by import (jit exit) NYI");
MOZ_CRASH("anyref returned by import (jit exit) NYI");
break;
case ExprType::NullRef:
MOZ_CRASH("NullRef not expressible");

View File

@ -30,7 +30,7 @@ using namespace js::wasm;
using mozilla::CheckedInt;
Table::Table(JSContext* cx, const TableDesc& desc,
HandleWasmTableObject maybeObject, UniqueFuncRefArray functions)
HandleWasmTableObject maybeObject, UniqueAnyFuncArray functions)
: maybeObject_(maybeObject),
observers_(cx->zone()),
functions_(std::move(functions)),
@ -55,9 +55,9 @@ Table::Table(JSContext* cx, const TableDesc& desc,
SharedTable Table::create(JSContext* cx, const TableDesc& desc,
HandleWasmTableObject maybeObject) {
switch (desc.kind) {
case TableKind::FuncRef:
case TableKind::AsmJS: {
UniqueFuncRefArray functions(
case TableKind::AnyFunction:
case TableKind::TypedFunction: {
UniqueAnyFuncArray functions(
cx->pod_calloc<FunctionTableElem>(desc.limits.initial));
if (!functions) {
return nullptr;
@ -90,7 +90,7 @@ void Table::tracePrivate(JSTracer* trc) {
}
switch (kind_) {
case TableKind::FuncRef: {
case TableKind::AnyFunction: {
for (uint32_t i = 0; i < length_; i++) {
if (functions_[i].tls) {
functions_[i].tls->instance->trace(trc);
@ -104,7 +104,7 @@ void Table::tracePrivate(JSTracer* trc) {
objects_.trace(trc);
break;
}
case TableKind::AsmJS: {
case TableKind::TypedFunction: {
#ifdef DEBUG
for (uint32_t i = 0; i < length_; i++) {
MOZ_ASSERT(!functions_[i].tls);
@ -135,7 +135,7 @@ uint8_t* Table::functionBase() const {
return (uint8_t*)functions_.get();
}
const FunctionTableElem& Table::getFuncRef(uint32_t index) const {
const FunctionTableElem& Table::getAnyFunc(uint32_t index) const {
MOZ_ASSERT(isFunction());
return functions_[index];
}
@ -148,7 +148,13 @@ AnyRef Table::getAnyRef(uint32_t index) const {
return AnyRef::fromJSObject(objects_[index]);
}
void Table::setFuncRef(uint32_t index, void* code, const Instance* instance) {
const void* Table::getShortlivedAnyRefLocForCompiledCode(uint32_t index) const {
MOZ_ASSERT(!isFunction());
return const_cast<HeapPtr<JSObject*>&>(objects_[index])
.unsafeUnbarrieredForTracing();
}
void Table::setAnyFunc(uint32_t index, void* code, const Instance* instance) {
MOZ_ASSERT(isFunction());
FunctionTableElem& elem = functions_[index];
@ -157,13 +163,13 @@ void Table::setFuncRef(uint32_t index, void* code, const Instance* instance) {
}
switch (kind_) {
case TableKind::FuncRef:
case TableKind::AnyFunction:
elem.code = code;
elem.tls = instance->tlsData();
MOZ_ASSERT(elem.tls->instance->objectUnbarriered()->isTenured(),
"no writeBarrierPost (Table::set)");
break;
case TableKind::AsmJS:
case TableKind::TypedFunction:
elem.code = code;
elem.tls = nullptr;
break;
@ -182,7 +188,7 @@ void Table::setAnyRef(uint32_t index, AnyRef new_obj) {
void Table::setNull(uint32_t index) {
switch (kind_) {
case TableKind::FuncRef: {
case TableKind::AnyFunction: {
FunctionTableElem& elem = functions_[index];
if (elem.tls) {
JSObject::writeBarrierPre(elem.tls->instance->objectUnbarriered());
@ -196,7 +202,7 @@ void Table::setNull(uint32_t index) {
setAnyRef(index, AnyRef::null());
break;
}
case TableKind::AsmJS: {
case TableKind::TypedFunction: {
MOZ_CRASH("Should not happen");
}
}
@ -204,7 +210,7 @@ void Table::setNull(uint32_t index) {
void Table::copy(const Table& srcTable, uint32_t dstIndex, uint32_t srcIndex) {
switch (kind_) {
case TableKind::FuncRef: {
case TableKind::AnyFunction: {
FunctionTableElem& dst = functions_[dstIndex];
if (dst.tls) {
JSObject::writeBarrierPre(dst.tls->instance->objectUnbarriered());
@ -227,7 +233,7 @@ void Table::copy(const Table& srcTable, uint32_t dstIndex, uint32_t srcIndex) {
setAnyRef(dstIndex, srcTable.getAnyRef(srcIndex));
break;
}
case TableKind::AsmJS: {
case TableKind::TypedFunction: {
MOZ_CRASH("Bad table type");
}
}
@ -258,7 +264,7 @@ uint32_t Table::grow(uint32_t delta, JSContext* cx) {
cx->runtime(); // Use JSRuntime's MallocProvider to avoid throwing.
switch (kind_) {
case TableKind::FuncRef: {
case TableKind::AnyFunction: {
// Note that realloc does not release functions_'s pointee on failure
// which is exactly what we need here.
FunctionTableElem* newFunctions = rt->pod_realloc<FunctionTableElem>(
@ -279,7 +285,7 @@ uint32_t Table::grow(uint32_t delta, JSContext* cx) {
}
break;
}
case TableKind::AsmJS: {
case TableKind::TypedFunction: {
MOZ_CRASH("Bad table type");
}
}

View File

@ -29,7 +29,7 @@ namespace wasm {
// stateful objects exposed to WebAssembly. asm.js also uses Tables to represent
// its homogeneous function-pointer tables.
//
// A table of FuncRef holds FunctionTableElems, which are (instance*,index)
// A table of AnyFunction holds FunctionTableElems, which are (instance*,index)
// pairs, where the instance must be traced.
//
// A table of AnyRef holds JSObject pointers, which must be traced.
@ -44,11 +44,11 @@ class Table : public ShareableBase<Table> {
using InstanceSet = JS::WeakCache<GCHashSet<
WeakHeapPtrWasmInstanceObject,
MovableCellHasher<WeakHeapPtrWasmInstanceObject>, SystemAllocPolicy>>;
using UniqueFuncRefArray = UniquePtr<FunctionTableElem[], JS::FreePolicy>;
using UniqueAnyFuncArray = UniquePtr<FunctionTableElem[], JS::FreePolicy>;
WeakHeapPtrWasmTableObject maybeObject_;
InstanceSet observers_;
UniqueFuncRefArray functions_; // either functions_ has data
UniqueAnyFuncArray functions_; // either functions_ has data
TableAnyRefVector objects_; // or objects_, but not both
const TableKind kind_;
uint32_t length_;
@ -57,7 +57,7 @@ class Table : public ShareableBase<Table> {
template <class>
friend struct js::MallocProvider;
Table(JSContext* cx, const TableDesc& td, HandleWasmTableObject maybeObject,
UniqueFuncRefArray functions);
UniqueAnyFuncArray functions);
Table(JSContext* cx, const TableDesc& td, HandleWasmTableObject maybeObject,
TableAnyRefVector&& objects);
@ -70,8 +70,9 @@ class Table : public ShareableBase<Table> {
void trace(JSTracer* trc);
TableKind kind() const { return kind_; }
bool isTypedFunction() const { return kind_ == TableKind::TypedFunction; }
bool isFunction() const {
return kind_ == TableKind::FuncRef || kind_ == TableKind::AsmJS;
return kind_ == TableKind::AnyFunction || kind_ == TableKind::TypedFunction;
}
uint32_t length() const { return length_; }
Maybe<uint32_t> maximum() const { return maximum_; }
@ -79,13 +80,14 @@ class Table : public ShareableBase<Table> {
// Only for function values. Raw pointer to the table.
uint8_t* functionBase() const;
// get/setFuncRef is allowed only on table-of-funcref.
// get/setAnyFunc is allowed only on table-of-funcref.
// get/setAnyRef is allowed only on table-of-anyref.
// setNull is allowed on either.
const FunctionTableElem& getFuncRef(uint32_t index) const;
void setFuncRef(uint32_t index, void* code, const Instance* instance);
const FunctionTableElem& getAnyFunc(uint32_t index) const;
void setAnyFunc(uint32_t index, void* code, const Instance* instance);
AnyRef getAnyRef(uint32_t index) const;
const void* getShortlivedAnyRefLocForCompiledCode(uint32_t index) const;
void setAnyRef(uint32_t index, AnyRef);
void setNull(uint32_t index);

View File

@ -90,6 +90,7 @@ class WasmToken {
Field,
Float,
Func,
FuncRef,
#ifdef ENABLE_WASM_GC
GcFeatureOptIn,
#endif
@ -375,6 +376,7 @@ class WasmToken {
case Field:
case Float:
case Func:
case FuncRef:
#ifdef ENABLE_WASM_GC
case GcFeatureOptIn:
#endif
@ -952,7 +954,7 @@ WasmToken WasmTokenStream::next() {
return WasmToken(WasmToken::Align, begin, cur_);
}
if (consume(u"anyfunc")) {
return WasmToken(WasmToken::ValueType, ValType::FuncRef, begin, cur_);
return WasmToken(WasmToken::FuncRef, begin, cur_);
}
if (consume(u"anyref")) {
return WasmToken(WasmToken::ValueType, ValType::AnyRef, begin, cur_);
@ -1035,7 +1037,7 @@ WasmToken WasmTokenStream::next() {
}
if (consume(u"funcref")) {
return WasmToken(WasmToken::ValueType, ValType::FuncRef, begin, cur_);
return WasmToken(WasmToken::FuncRef, begin, cur_);
}
if (consume(u"func")) {
@ -3971,7 +3973,7 @@ static AstExpr* ParseStructNarrow(WasmParseContext& c, bool inParens) {
return nullptr;
}
if (!inputType.isNarrowType()) {
if (!inputType.isRefType()) {
c.ts.generateError(c.ts.peek(), "struct.narrow requires ref type", c.error);
return nullptr;
}
@ -3981,7 +3983,7 @@ static AstExpr* ParseStructNarrow(WasmParseContext& c, bool inParens) {
return nullptr;
}
if (!outputType.isNarrowType()) {
if (!outputType.isRefType()) {
c.ts.generateError(c.ts.peek(), "struct.narrow requires ref type", c.error);
return nullptr;
}
@ -4717,19 +4719,20 @@ static bool ParseGlobalType(WasmParseContext& c, AstValType* type,
static bool ParseElemType(WasmParseContext& c, TableKind* tableKind) {
WasmToken token;
if (c.ts.getIf(WasmToken::ValueType, &token)) {
if (token.valueType() == ValType::FuncRef) {
*tableKind = TableKind::FuncRef;
return true;
}
if (c.ts.getIf(WasmToken::FuncRef, &token)) {
*tableKind = TableKind::AnyFunction;
return true;
}
#ifdef ENABLE_WASM_REFTYPES
if (token.valueType() == ValType::AnyRef) {
*tableKind = TableKind::AnyRef;
return true;
}
#endif
if (c.ts.getIf(WasmToken::ValueType, &token) &&
token.valueType() == ValType::AnyRef) {
*tableKind = TableKind::AnyRef;
return true;
}
c.ts.generateError(token, "'funcref' or 'anyref' required", c.error);
#else
c.ts.generateError(token, "'funcref' required", c.error);
#endif
return false;
}
@ -6855,8 +6858,8 @@ static bool EncodeLimits(Encoder& e, const Limits& limits) {
static bool EncodeTableLimits(Encoder& e, const Limits& limits,
TableKind tableKind) {
switch (tableKind) {
case TableKind::FuncRef:
if (!e.writeVarU32(uint32_t(TypeCode::FuncRef))) {
case TableKind::AnyFunction:
if (!e.writeVarU32(uint32_t(TypeCode::AnyFunc))) {
return false;
}
break;
@ -7277,7 +7280,7 @@ static bool EncodeElemSegment(Encoder& e, AstElemSegment& segment) {
}
if (segment.isPassive()) {
if (!e.writeFixedU8(uint8_t(TypeCode::FuncRef))) {
if (!e.writeFixedU8(uint8_t(TypeCode::AnyFunc))) {
return false;
}
}

View File

@ -78,10 +78,11 @@ Val::Val(const LitVal& val) {
u.f64_ = val.f64();
return;
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef:
u.ref_ = val.ref();
return;
case ValType::AnyRef:
u.anyref_ = val.anyref();
return;
case ValType::NullRef:
break;
}
@ -89,12 +90,16 @@ Val::Val(const LitVal& val) {
}
void Val::trace(JSTracer* trc) {
if (type_.isValid() && type_.isReference() && !u.ref_.isNull()) {
// TODO/AnyRef-boxing: With boxed immediates and strings, the write
// barrier is going to have to be more complicated.
ASSERT_ANYREF_IS_JSOBJECT;
TraceManuallyBarrieredEdge(trc, u.ref_.asJSObjectAddress(),
"wasm reference-typed global");
if (type_.isValid()) {
if (type_.isRef() && u.ref_) {
TraceManuallyBarrieredEdge(trc, &u.ref_, "wasm ref/anyref global");
} else if (type_ == ValType::AnyRef && !u.anyref_.isNull()) {
// TODO/AnyRef-boxing: With boxed immediates and strings, the write
// barrier is going to have to be more complicated.
ASSERT_ANYREF_IS_JSOBJECT;
TraceManuallyBarrieredEdge(trc, u.anyref_.asJSObjectAddress(),
"wasm ref/anyref global");
}
}
}
@ -268,7 +273,6 @@ static bool IsImmediateType(ValType vt) {
case ValType::I64:
case ValType::F32:
case ValType::F64:
case ValType::FuncRef:
case ValType::AnyRef:
return true;
case ValType::NullRef:
@ -289,10 +293,8 @@ static unsigned EncodeImmediateType(ValType vt) {
return 2;
case ValType::F64:
return 3;
case ValType::FuncRef:
return 4;
case ValType::AnyRef:
return 5;
return 4;
case ValType::NullRef:
case ValType::Ref:
break;
@ -722,7 +724,6 @@ void DebugFrame::updateReturnJSValue() {
case ExprType::Ref:
cachedReturnJSValue_ = ObjectOrNullValue((JSObject*)resultRef_);
break;
case ExprType::FuncRef:
case ExprType::AnyRef:
cachedReturnJSValue_ = UnboxAnyRef(resultAnyRef_);
break;

View File

@ -255,12 +255,6 @@ static inline uint32_t UnpackTypeCodeIndex(PackedTypeCode ptc) {
return uint32_t(ptc) >> 8;
}
static inline bool IsReferenceType(PackedTypeCode ptc) {
TypeCode tc = UnpackTypeCodeType(ptc);
return tc == TypeCode::Ref || tc == TypeCode::AnyRef ||
tc == TypeCode::FuncRef || tc == TypeCode::NullRef;
}
// The ExprType represents the type of a WebAssembly expression or return value
// and may either be a ValType or void.
//
@ -281,7 +275,6 @@ class ExprType {
case TypeCode::F32:
case TypeCode::F64:
case TypeCode::AnyRef:
case TypeCode::FuncRef:
case TypeCode::NullRef:
case TypeCode::Ref:
case TypeCode::BlockVoid:
@ -302,7 +295,6 @@ class ExprType {
F32 = uint8_t(TypeCode::F32),
F64 = uint8_t(TypeCode::F64),
AnyRef = uint8_t(TypeCode::AnyRef),
FuncRef = uint8_t(TypeCode::FuncRef),
NullRef = uint8_t(TypeCode::NullRef),
Ref = uint8_t(TypeCode::Ref),
@ -329,23 +321,32 @@ class ExprType {
explicit inline ExprType(const ValType& t);
PackedTypeCode packed() const { return tc_; }
PackedTypeCode* packedPtr() { return &tc_; }
Code code() const { return Code(UnpackTypeCodeType(tc_)); }
uint32_t refTypeIndex() const { return UnpackTypeCodeIndex(tc_); }
bool isValid() const { return IsValid(tc_); }
uint32_t refTypeIndex() const { return UnpackTypeCodeIndex(tc_); }
bool isRef() const { return UnpackTypeCodeType(tc_) == TypeCode::Ref; }
bool isReference() const { return IsReferenceType(tc_); }
bool isReference() const {
TypeCode tc = UnpackTypeCodeType(tc_);
return tc == TypeCode::Ref || tc == TypeCode::AnyRef ||
tc == TypeCode::NullRef;
}
bool operator==(const ExprType& that) const { return tc_ == that.tc_; }
bool operator!=(const ExprType& that) const { return tc_ != that.tc_; }
bool operator==(Code that) const {
MOZ_ASSERT(that != Code::Ref);
return code() == that;
}
bool operator!=(Code that) const { return !(*this == that); }
};
@ -363,7 +364,6 @@ class ValType {
case TypeCode::F32:
case TypeCode::F64:
case TypeCode::AnyRef:
case TypeCode::FuncRef:
case TypeCode::NullRef:
case TypeCode::Ref:
return true;
@ -381,7 +381,6 @@ class ValType {
F64 = uint8_t(TypeCode::F64),
AnyRef = uint8_t(TypeCode::AnyRef),
FuncRef = uint8_t(TypeCode::FuncRef),
NullRef = uint8_t(TypeCode::NullRef),
Ref = uint8_t(TypeCode::Ref),
};
@ -432,19 +431,27 @@ class ValType {
Code code() const { return Code(UnpackTypeCodeType(tc_)); }
uint32_t refTypeIndex() const { return UnpackTypeCodeIndex(tc_); }
bool isValid() const { return IsValid(tc_); }
uint32_t refTypeIndex() const { return UnpackTypeCodeIndex(tc_); }
bool isRef() const { return UnpackTypeCodeType(tc_) == TypeCode::Ref; }
bool isReference() const { return IsReferenceType(tc_); }
bool isReference() const {
TypeCode tc = UnpackTypeCodeType(tc_);
return tc == TypeCode::Ref || tc == TypeCode::AnyRef ||
tc == TypeCode::NullRef;
}
bool operator==(const ValType& that) const { return tc_ == that.tc_; }
bool operator!=(const ValType& that) const { return tc_ != that.tc_; }
bool operator==(Code that) const {
MOZ_ASSERT(that != Code::Ref);
return code() == that;
}
bool operator!=(Code that) const { return !(*this == that); }
};
@ -464,7 +471,6 @@ static inline unsigned SizeOf(ValType vt) {
case ValType::F64:
return 8;
case ValType::AnyRef:
case ValType::FuncRef:
case ValType::NullRef:
case ValType::Ref:
return sizeof(intptr_t);
@ -484,7 +490,6 @@ static inline jit::MIRType ToMIRType(ValType vt) {
return jit::MIRType::Double;
case ValType::Ref:
case ValType::AnyRef:
case ValType::FuncRef:
case ValType::NullRef:
return jit::MIRType::RefOrNull;
}
@ -522,8 +527,6 @@ static inline const char* ToCString(ExprType type) {
return "f64";
case ExprType::AnyRef:
return "anyref";
case ExprType::FuncRef:
return "funcref";
case ExprType::NullRef:
return "nullref";
case ExprType::Ref:
@ -568,16 +571,11 @@ static inline const char* ToCString(ValType type) {
class AnyRef {
JSObject* value_;
explicit AnyRef() : value_((JSObject*)-1) {}
explicit AnyRef(JSObject* p) : value_(p) {
MOZ_ASSERT(((uintptr_t)p & 0x03) == 0);
}
public:
// An invalid AnyRef cannot arise naturally from wasm and so can be used as
// a sentinel value to indicate failure from an AnyRef-returning function.
static AnyRef invalid() { return AnyRef(); }
// Given a void* that comes from compiled wasm code, turn it into AnyRef.
static AnyRef fromCompiledCode(void* p) { return AnyRef((JSObject*)p); }
@ -704,7 +702,8 @@ class LitVal {
uint64_t i64_;
float f32_;
double f64_;
AnyRef ref_;
JSObject* ref_; // Note, this breaks an abstraction boundary
AnyRef anyref_;
} u;
public:
@ -716,11 +715,17 @@ class LitVal {
explicit LitVal(float f32) : type_(ValType::F32) { u.f32_ = f32; }
explicit LitVal(double f64) : type_(ValType::F64) { u.f64_ = f64; }
explicit LitVal(ValType type, AnyRef any) : type_(type) {
MOZ_ASSERT(type.isReference());
explicit LitVal(AnyRef any) : type_(ValType::AnyRef) {
MOZ_ASSERT(any.isNull(),
"use Val for non-nullptr ref types to get tracing");
u.ref_ = any;
u.anyref_ = any;
}
explicit LitVal(ValType refType, JSObject* ref) : type_(refType) {
MOZ_ASSERT(refType.isRef());
MOZ_ASSERT(ref == nullptr,
"use Val for non-nullptr ref types to get tracing");
u.ref_ = ref;
}
ValType type() const { return type_; }
@ -742,10 +747,14 @@ class LitVal {
MOZ_ASSERT(type_ == ValType::F64);
return u.f64_;
}
AnyRef ref() const {
MOZ_ASSERT(type_.isReference());
JSObject* ref() const {
MOZ_ASSERT(type_.isRef());
return u.ref_;
}
AnyRef anyref() const {
MOZ_ASSERT(type_ == ValType::AnyRef);
return u.anyref_;
}
};
// A Val is a LitVal that can contain (non-null) pointers to GC things. All Vals
@ -761,9 +770,9 @@ class MOZ_NON_PARAM Val : public LitVal {
explicit Val(uint64_t i64) : LitVal(i64) {}
explicit Val(float f32) : LitVal(f32) {}
explicit Val(double f64) : LitVal(f64) {}
explicit Val(ValType type, AnyRef val) : LitVal(type, AnyRef::null()) {
MOZ_ASSERT(type.isReference());
u.ref_ = val;
explicit Val(AnyRef val) : LitVal(AnyRef::null()) { u.anyref_ = val; }
explicit Val(ValType type, JSObject* obj) : LitVal(type, (JSObject*)nullptr) {
u.ref_ = obj;
}
void trace(JSTracer* trc);
};
@ -1854,7 +1863,6 @@ enum class SymbolicAddress {
CallImport_I32,
CallImport_I64,
CallImport_F64,
CallImport_FuncRef,
CallImport_AnyRef,
CoerceInPlace_ToInt32,
CoerceInPlace_ToNumber,
@ -1905,18 +1913,6 @@ enum class SymbolicAddress {
Limit
};
// The FailureMode indicates whether, immediately after a call to a builtin
// returns, the return value should be checked against an error condition
// (and if so, which one) which signals that the C++ calle has already
// reported an error and thus wasm needs to wasmTrap(Trap::ThrowReported).
enum class FailureMode : uint8_t {
Infallible,
FailOnNegI32,
FailOnNullPtr,
FailOnInvalidRef
};
// SymbolicAddressSignature carries type information for a function referred
// to by a SymbolicAddress. In order that |argTypes| can be written out as a
// static initialiser, it has to have fixed length. At present
@ -1932,8 +1928,6 @@ struct SymbolicAddressSignature {
const SymbolicAddress identity;
// The return type, or MIRType::None to denote 'void'.
const jit::MIRType retType;
// The failure mode, which is checked by masm.wasmCallBuiltinInstanceMethod.
const FailureMode failureMode;
// The number of arguments, 0 .. SymbolicAddressSignatureMaxArgs only.
const uint8_t numArgs;
// The argument types; SymbolicAddressSignatureMaxArgs + 1 guard, which
@ -1972,13 +1966,10 @@ struct Limits {
};
// TableDesc describes a table as well as the offset of the table's base pointer
// in global memory. The TableKind determines the representation:
// - AnyRef: a wasm anyref word (wasm::AnyRef)
// - FuncRef: a two-word FunctionTableElem (wasm indirect call ABI)
// - AsmJS: a two-word FunctionTableElem (asm.js ABI)
// Eventually there should be a single unified AnyRef representation.
// in global memory. Currently, wasm only has "any function" and asm.js only
// "typed function".
enum class TableKind { AnyRef, FuncRef, AsmJS };
enum class TableKind { AnyFunction, AnyRef, TypedFunction };
struct TableDesc {
TableKind kind;
@ -2115,8 +2106,8 @@ struct TableTls {
void* functionBase;
};
// Table element for TableKind::FuncRef which carries both the code pointer and
// an instance pointer.
// Table elements for TableKind::AnyFunctions carry both the code pointer and an
// instance pointer.
struct FunctionTableElem {
// The code to call when calling this element. The table ABI is the system

View File

@ -1316,7 +1316,6 @@ static bool DecodeStructType(Decoder& d, ModuleEnvironment* env,
case ValType::Ref:
offset = layout.addReference(ReferenceType::TYPE_OBJECT);
break;
case ValType::FuncRef:
case ValType::AnyRef:
offset = layout.addReference(ReferenceType::TYPE_WASM_ANYREF);
break;
@ -1563,8 +1562,8 @@ static bool DecodeTableTypeAndLimits(Decoder& d, bool gcTypesEnabled,
}
TableKind tableKind;
if (elementType == uint8_t(TypeCode::FuncRef)) {
tableKind = TableKind::FuncRef;
if (elementType == uint8_t(TypeCode::AnyFunc)) {
tableKind = TableKind::AnyFunction;
#ifdef ENABLE_WASM_REFTYPES
} else if (elementType == uint8_t(TypeCode::AnyRef)) {
tableKind = TableKind::AnyRef;
@ -1603,7 +1602,6 @@ static bool GlobalIsJSCompatible(Decoder& d, ValType type, bool isMutable) {
case ValType::F32:
case ValType::F64:
case ValType::I64:
case ValType::FuncRef:
case ValType::AnyRef:
break;
#ifdef WASM_PRIVATE_REFTYPES
@ -1939,8 +1937,14 @@ static bool DecodeInitializerExpression(Decoder& d, ModuleEnvironment* env,
return d.fail(
"type mismatch: initializer type and expected type don't match");
}
MOZ_ASSERT_IF(expected.isRef(), env->gcTypesEnabled());
*init = InitExpr(LitVal(expected, AnyRef::null()));
if (expected == ValType::AnyRef) {
*init = InitExpr(LitVal(AnyRef::null()));
} else {
if (!env->gcTypesEnabled()) {
return d.fail("unexpected initializer expression");
}
*init = InitExpr(LitVal(expected, nullptr));
}
break;
}
case uint16_t(Op::GetGlobal): {
@ -2267,7 +2271,7 @@ static bool DecodeElemSection(Decoder& d, ModuleEnvironment* env) {
// segments, there really is no segment index, and we should never
// touch the field.
tableIndex = (uint32_t)-1;
} else if (env->tables[tableIndex].kind != TableKind::FuncRef) {
} else if (env->tables[tableIndex].kind != TableKind::AnyFunction) {
return d.fail("only tables of 'funcref' may have element segments");
}
@ -2288,7 +2292,7 @@ static bool DecodeElemSection(Decoder& d, ModuleEnvironment* env) {
if (!d.readFixedU8(&form)) {
return d.fail("expected type form");
}
if (form != uint8_t(TypeCode::FuncRef)) {
if (form != uint8_t(TypeCode::AnyFunc)) {
return d.fail(
"passive segments can only contain function references");
}

View File

@ -611,7 +611,6 @@ class Decoder {
*type = ValType::Code(code);
return true;
#ifdef ENABLE_WASM_REFTYPES
case uint8_t(ValType::FuncRef):
case uint8_t(ValType::AnyRef):
*type = ValType::Code(code);
return true;