mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-11-24 13:21:05 +00:00
Bug 1577508 - Allow WebAssembly blocks to return multiple values r=luke
Differential Revision: https://phabricator.services.mozilla.com/D43977 --HG-- extra : moz-landing-system : lando
This commit is contained in:
parent
99657d8d0b
commit
b736935a6c
@ -230,10 +230,19 @@ assertErrorMessage(() => wasmEval(moduleWithSections([
|
||||
nameSection([moduleNameSubsection('hi')])])
|
||||
).f(), RuntimeError, /unreachable/);
|
||||
|
||||
// Diagnose nonstandard block signature types.
|
||||
for (var bad of [0xff, 0, 1, 0x3f])
|
||||
// Diagnose invalid block signature types.
|
||||
for (var bad of [0xff, 1, 0x3f])
|
||||
assertErrorMessage(() => wasmEval(moduleWithSections([sigSection([v2vSig]), declSection([0]), bodySection([funcBody({locals:[], body:[BlockCode, bad, EndCode]})])])), CompileError, /invalid .*block type/);
|
||||
|
||||
if (wasmMultiValueEnabled()) {
|
||||
// In this test module, 0 denotes a void-to-void block type.
|
||||
let binary = moduleWithSections([sigSection([v2vSig]), declSection([0]), bodySection([funcBody({locals:[], body:[BlockCode, 0, EndCode]})])]);
|
||||
assertEq(WebAssembly.validate(binary), true);
|
||||
} else {
|
||||
const bad = 0;
|
||||
assertErrorMessage(() => wasmEval(moduleWithSections([sigSection([v2vSig]), declSection([0]), bodySection([funcBody({locals:[], body:[BlockCode, bad, EndCode]})])])), CompileError, /invalid .*block type/);
|
||||
}
|
||||
|
||||
// Ensure all invalid opcodes rejected
|
||||
for (let op of undefinedOpcodes) {
|
||||
let binary = moduleWithSections([v2vSigSection, declSection([0]), bodySection([funcBody({locals:[], body:[op]})])]);
|
||||
|
@ -18,7 +18,7 @@ const invalidRefBlockType = funcBody({locals:[], body:[
|
||||
0x42,
|
||||
EndCode,
|
||||
]});
|
||||
checkInvalid(invalidRefBlockType, /invalid inline block type/);
|
||||
checkInvalid(invalidRefBlockType, /ref/);
|
||||
|
||||
const invalidTooBigRefType = funcBody({locals:[], body:[
|
||||
BlockCode,
|
||||
@ -26,4 +26,4 @@ const invalidTooBigRefType = funcBody({locals:[], body:[
|
||||
varU32(1000000),
|
||||
EndCode,
|
||||
]});
|
||||
checkInvalid(invalidTooBigRefType, /invalid inline block type/);
|
||||
checkInvalid(invalidTooBigRefType, /ref/);
|
||||
|
@ -202,7 +202,7 @@ wasmFailValidateText(`
|
||||
(br_table 1 0 (i32.const 15))
|
||||
)
|
||||
)
|
||||
)`, /br_table operand must be subtype of all target types/);
|
||||
)`, /br_table targets must all have the same arity/);
|
||||
|
||||
wasmFailValidateText(`
|
||||
(module
|
||||
@ -212,7 +212,7 @@ wasmFailValidateText(`
|
||||
(br_table 1 0 (i32.const 15))
|
||||
)
|
||||
)
|
||||
)`, /br_table operand must be subtype of all target types/);
|
||||
)`, /br_table targets must all have the same arity/);
|
||||
|
||||
wasmValidateText(`
|
||||
(module
|
||||
|
@ -2441,11 +2441,21 @@ class BaseCompiler final : public BaseCompilerInterface {
|
||||
deadThenBranch(false) {}
|
||||
};
|
||||
|
||||
class NothingVector {
|
||||
Nothing unused_;
|
||||
|
||||
public:
|
||||
bool resize(size_t length) { return true; }
|
||||
Nothing& operator[](size_t) { return unused_; }
|
||||
Nothing& back() { return unused_; }
|
||||
};
|
||||
|
||||
struct BaseCompilePolicy {
|
||||
// The baseline compiler tracks values on a stack of its own -- it
|
||||
// needs to scan that stack for spilling -- and thus has no need
|
||||
// for the values maintained by the iterator.
|
||||
using Value = Nothing;
|
||||
using ValueVector = NothingVector;
|
||||
|
||||
// The baseline compiler uses the iterator's control stack, attaching
|
||||
// its own control information.
|
||||
@ -2777,89 +2787,105 @@ class BaseCompiler final : public BaseCompilerInterface {
|
||||
}
|
||||
}
|
||||
|
||||
void maybeReserveJoinRegI(ExprType type) {
|
||||
switch (type.code()) {
|
||||
case ExprType::I32:
|
||||
void maybeReserveJoinRegI(ResultType type) {
|
||||
if (type.empty()) {
|
||||
return;
|
||||
}
|
||||
MOZ_ASSERT(type.length() == 1, "multi-value joins unimplemented");
|
||||
switch (type[0].code()) {
|
||||
case ValType::I32:
|
||||
needI32(joinRegI32_);
|
||||
break;
|
||||
case ExprType::I64:
|
||||
case ValType::I64:
|
||||
needI64(joinRegI64_);
|
||||
break;
|
||||
case ExprType::FuncRef:
|
||||
case ExprType::AnyRef:
|
||||
case ExprType::NullRef:
|
||||
case ExprType::Ref:
|
||||
case ValType::F32:
|
||||
case ValType::F64:
|
||||
break;
|
||||
case ValType::FuncRef:
|
||||
case ValType::AnyRef:
|
||||
case ValType::NullRef:
|
||||
case ValType::Ref:
|
||||
needRef(joinRegPtr_);
|
||||
break;
|
||||
default:;
|
||||
}
|
||||
}
|
||||
|
||||
void maybeUnreserveJoinRegI(ExprType type) {
|
||||
switch (type.code()) {
|
||||
case ExprType::I32:
|
||||
void maybeUnreserveJoinRegI(ResultType type) {
|
||||
if (type.empty()) {
|
||||
return;
|
||||
}
|
||||
MOZ_ASSERT(type.length() == 1, "multi-value joins unimplemented");
|
||||
switch (type[0].code()) {
|
||||
case ValType::I32:
|
||||
freeI32(joinRegI32_);
|
||||
break;
|
||||
case ExprType::I64:
|
||||
case ValType::I64:
|
||||
freeI64(joinRegI64_);
|
||||
break;
|
||||
case ExprType::FuncRef:
|
||||
case ExprType::AnyRef:
|
||||
case ExprType::NullRef:
|
||||
case ExprType::Ref:
|
||||
case ValType::F32:
|
||||
case ValType::F64:
|
||||
break;
|
||||
case ValType::FuncRef:
|
||||
case ValType::AnyRef:
|
||||
case ValType::NullRef:
|
||||
case ValType::Ref:
|
||||
freeRef(joinRegPtr_);
|
||||
break;
|
||||
default:;
|
||||
}
|
||||
}
|
||||
|
||||
void maybeReserveJoinReg(ExprType type) {
|
||||
switch (type.code()) {
|
||||
case ExprType::I32:
|
||||
void maybeReserveJoinReg(ResultType type) {
|
||||
if (type.empty()) {
|
||||
return;
|
||||
}
|
||||
MOZ_ASSERT(type.length() == 1, "multi-value joins unimplemented");
|
||||
switch (type[0].code()) {
|
||||
case ValType::I32:
|
||||
needI32(joinRegI32_);
|
||||
break;
|
||||
case ExprType::I64:
|
||||
case ValType::I64:
|
||||
needI64(joinRegI64_);
|
||||
break;
|
||||
case ExprType::F32:
|
||||
case ValType::F32:
|
||||
needF32(joinRegF32_);
|
||||
break;
|
||||
case ExprType::F64:
|
||||
case ValType::F64:
|
||||
needF64(joinRegF64_);
|
||||
break;
|
||||
case ExprType::Ref:
|
||||
case ExprType::NullRef:
|
||||
case ExprType::FuncRef:
|
||||
case ExprType::AnyRef:
|
||||
case ValType::Ref:
|
||||
case ValType::NullRef:
|
||||
case ValType::FuncRef:
|
||||
case ValType::AnyRef:
|
||||
needRef(joinRegPtr_);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void maybeUnreserveJoinReg(ExprType type) {
|
||||
switch (type.code()) {
|
||||
case ExprType::I32:
|
||||
void maybeUnreserveJoinReg(ResultType type) {
|
||||
if (type.empty()) {
|
||||
return;
|
||||
}
|
||||
MOZ_ASSERT(type.length() == 1, "multi-value joins unimplemented");
|
||||
switch (type[0].code()) {
|
||||
case ValType::I32:
|
||||
freeI32(joinRegI32_);
|
||||
break;
|
||||
case ExprType::I64:
|
||||
case ValType::I64:
|
||||
freeI64(joinRegI64_);
|
||||
break;
|
||||
case ExprType::F32:
|
||||
case ValType::F32:
|
||||
freeF32(joinRegF32_);
|
||||
break;
|
||||
case ExprType::F64:
|
||||
case ValType::F64:
|
||||
freeF64(joinRegF64_);
|
||||
break;
|
||||
case ExprType::Ref:
|
||||
case ExprType::NullRef:
|
||||
case ExprType::FuncRef:
|
||||
case ExprType::AnyRef:
|
||||
case ValType::Ref:
|
||||
case ValType::NullRef:
|
||||
case ValType::FuncRef:
|
||||
case ValType::AnyRef:
|
||||
freeRef(joinRegPtr_);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@ -3753,48 +3779,47 @@ class BaseCompiler final : public BaseCompilerInterface {
|
||||
// popping of the stack we can just use the JoinReg as it will
|
||||
// become available in that process.
|
||||
|
||||
MOZ_MUST_USE Maybe<AnyReg> popJoinRegUnlessVoid(ExprType type) {
|
||||
switch (type.code()) {
|
||||
case ExprType::Void: {
|
||||
return Nothing();
|
||||
}
|
||||
case ExprType::I32: {
|
||||
MOZ_MUST_USE Maybe<AnyReg> popJoinRegUnlessVoid(ResultType type) {
|
||||
if (type.empty()) {
|
||||
return Nothing();
|
||||
}
|
||||
MOZ_ASSERT(type.length() == 1, "multi-value return unimplemented");
|
||||
switch (type[0].code()) {
|
||||
case ValType::I32: {
|
||||
DebugOnly<Stk::Kind> k(stk_.back().kind());
|
||||
MOZ_ASSERT(k == Stk::RegisterI32 || k == Stk::ConstI32 ||
|
||||
k == Stk::MemI32 || k == Stk::LocalI32);
|
||||
return Some(AnyReg(popI32(joinRegI32_)));
|
||||
}
|
||||
case ExprType::I64: {
|
||||
case ValType::I64: {
|
||||
DebugOnly<Stk::Kind> k(stk_.back().kind());
|
||||
MOZ_ASSERT(k == Stk::RegisterI64 || k == Stk::ConstI64 ||
|
||||
k == Stk::MemI64 || k == Stk::LocalI64);
|
||||
return Some(AnyReg(popI64(joinRegI64_)));
|
||||
}
|
||||
case ExprType::F64: {
|
||||
case ValType::F64: {
|
||||
DebugOnly<Stk::Kind> k(stk_.back().kind());
|
||||
MOZ_ASSERT(k == Stk::RegisterF64 || k == Stk::ConstF64 ||
|
||||
k == Stk::MemF64 || k == Stk::LocalF64);
|
||||
return Some(AnyReg(popF64(joinRegF64_)));
|
||||
}
|
||||
case ExprType::F32: {
|
||||
case ValType::F32: {
|
||||
DebugOnly<Stk::Kind> k(stk_.back().kind());
|
||||
MOZ_ASSERT(k == Stk::RegisterF32 || k == Stk::ConstF32 ||
|
||||
k == Stk::MemF32 || k == Stk::LocalF32);
|
||||
return Some(AnyReg(popF32(joinRegF32_)));
|
||||
}
|
||||
case ExprType::Ref:
|
||||
case ExprType::NullRef:
|
||||
case ExprType::FuncRef:
|
||||
case ExprType::AnyRef: {
|
||||
case ValType::Ref:
|
||||
case ValType::NullRef:
|
||||
case ValType::FuncRef:
|
||||
case ValType::AnyRef: {
|
||||
DebugOnly<Stk::Kind> k(stk_.back().kind());
|
||||
MOZ_ASSERT(k == Stk::RegisterRef || k == Stk::ConstRef ||
|
||||
k == Stk::MemRef || k == Stk::LocalRef);
|
||||
return Some(AnyReg(popRef(joinRegPtr_)));
|
||||
}
|
||||
default: {
|
||||
MOZ_CRASH("Compiler bug: unexpected expression type");
|
||||
}
|
||||
}
|
||||
MOZ_CRASH("Compiler bug: unexpected expression type");
|
||||
}
|
||||
|
||||
// If we ever start not sync-ing on entry to Block (but instead try to sync
|
||||
@ -3803,36 +3828,37 @@ class BaseCompiler final : public BaseCompilerInterface {
|
||||
// joinreg in the contexts it's being used, so some other solution will need
|
||||
// to be found.
|
||||
|
||||
MOZ_MUST_USE Maybe<AnyReg> captureJoinRegUnlessVoid(ExprType type) {
|
||||
switch (type.code()) {
|
||||
case ExprType::I32:
|
||||
MOZ_MUST_USE Maybe<AnyReg> captureJoinRegUnlessVoid(ResultType type) {
|
||||
if (type.empty()) {
|
||||
return Nothing();
|
||||
}
|
||||
MOZ_ASSERT(type.length() == 1, "multi-value return unimplemented");
|
||||
switch (type[0].code()) {
|
||||
case ValType::I32:
|
||||
MOZ_ASSERT(isAvailableI32(joinRegI32_));
|
||||
needI32(joinRegI32_);
|
||||
return Some(AnyReg(joinRegI32_));
|
||||
case ExprType::I64:
|
||||
case ValType::I64:
|
||||
MOZ_ASSERT(isAvailableI64(joinRegI64_));
|
||||
needI64(joinRegI64_);
|
||||
return Some(AnyReg(joinRegI64_));
|
||||
case ExprType::F32:
|
||||
case ValType::F32:
|
||||
MOZ_ASSERT(isAvailableF32(joinRegF32_));
|
||||
needF32(joinRegF32_);
|
||||
return Some(AnyReg(joinRegF32_));
|
||||
case ExprType::F64:
|
||||
case ValType::F64:
|
||||
MOZ_ASSERT(isAvailableF64(joinRegF64_));
|
||||
needF64(joinRegF64_);
|
||||
return Some(AnyReg(joinRegF64_));
|
||||
case ExprType::Ref:
|
||||
case ExprType::NullRef:
|
||||
case ExprType::FuncRef:
|
||||
case ExprType::AnyRef:
|
||||
case ValType::Ref:
|
||||
case ValType::NullRef:
|
||||
case ValType::FuncRef:
|
||||
case ValType::AnyRef:
|
||||
MOZ_ASSERT(isAvailableRef(joinRegPtr_));
|
||||
needRef(joinRegPtr_);
|
||||
return Some(AnyReg(joinRegPtr_));
|
||||
case ExprType::Void:
|
||||
return Nothing();
|
||||
default:
|
||||
MOZ_CRASH("Compiler bug: unexpected type");
|
||||
}
|
||||
MOZ_CRASH("Compiler bug: unexpected type");
|
||||
}
|
||||
|
||||
void pushJoinRegUnlessVoid(const Maybe<AnyReg>& r) {
|
||||
@ -6571,23 +6597,22 @@ class BaseCompiler final : public BaseCompilerInterface {
|
||||
const StackHeight stackHeight; // The value to pop to along the taken edge,
|
||||
// unless !hasPop()
|
||||
const bool invertBranch; // If true, invert the sense of the branch
|
||||
const ExprType
|
||||
resultType; // The result propagated along the edges, or Void
|
||||
const ResultType resultType; // The result propagated along the edges
|
||||
|
||||
explicit BranchState(Label* label)
|
||||
: label(label),
|
||||
stackHeight(StackHeight::Invalid()),
|
||||
invertBranch(false),
|
||||
resultType(ExprType::Void) {}
|
||||
resultType(ResultType::Empty()) {}
|
||||
|
||||
BranchState(Label* label, bool invertBranch)
|
||||
: label(label),
|
||||
stackHeight(StackHeight::Invalid()),
|
||||
invertBranch(invertBranch),
|
||||
resultType(ExprType::Void) {}
|
||||
resultType(ResultType::Empty()) {}
|
||||
|
||||
BranchState(Label* label, StackHeight stackHeight, bool invertBranch,
|
||||
ExprType resultType)
|
||||
ResultType resultType)
|
||||
: label(label),
|
||||
stackHeight(stackHeight),
|
||||
invertBranch(invertBranch),
|
||||
@ -6719,9 +6744,10 @@ class BaseCompiler final : public BaseCompilerInterface {
|
||||
template <bool isSetLocal>
|
||||
MOZ_MUST_USE bool emitSetOrTeeLocal(uint32_t slot);
|
||||
|
||||
void endBlock(ExprType type);
|
||||
void endBlock(ResultType type);
|
||||
void endLoop(ResultType type);
|
||||
void endIfThen();
|
||||
void endIfThenElse(ExprType type);
|
||||
void endIfThenElse(ResultType type);
|
||||
|
||||
void doReturn(bool popStack);
|
||||
void pushReturnValueOfCall(const FunctionCall& call, ValType type);
|
||||
@ -8134,7 +8160,7 @@ bool BaseCompiler::emitBlock() {
|
||||
return true;
|
||||
}
|
||||
|
||||
void BaseCompiler::endBlock(ExprType type) {
|
||||
void BaseCompiler::endBlock(ResultType type) {
|
||||
Control& block = controlItem();
|
||||
|
||||
// Save the value.
|
||||
@ -8251,10 +8277,10 @@ void BaseCompiler::endIfThen() {
|
||||
}
|
||||
|
||||
bool BaseCompiler::emitElse() {
|
||||
ExprType thenType;
|
||||
Nothing unused_thenValue;
|
||||
ResultType thenType;
|
||||
NothingVector unused_thenValues;
|
||||
|
||||
if (!iter_.readElse(&thenType, &unused_thenValue)) {
|
||||
if (!iter_.readElse(&thenType, &unused_thenValues)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -8295,7 +8321,7 @@ bool BaseCompiler::emitElse() {
|
||||
return true;
|
||||
}
|
||||
|
||||
void BaseCompiler::endIfThenElse(ExprType type) {
|
||||
void BaseCompiler::endIfThenElse(ResultType type) {
|
||||
Control& ifThenElse = controlItem();
|
||||
|
||||
// The expression type is not a reliable guide to what we'll find
|
||||
@ -8339,9 +8365,9 @@ void BaseCompiler::endIfThenElse(ExprType type) {
|
||||
|
||||
bool BaseCompiler::emitEnd() {
|
||||
LabelKind kind;
|
||||
ExprType type;
|
||||
Nothing unused_value;
|
||||
if (!iter_.readEnd(&kind, &type, &unused_value)) {
|
||||
ResultType type;
|
||||
NothingVector unused_values;
|
||||
if (!iter_.readEnd(&kind, &type, &unused_values)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -8374,9 +8400,9 @@ bool BaseCompiler::emitEnd() {
|
||||
|
||||
bool BaseCompiler::emitBr() {
|
||||
uint32_t relativeDepth;
|
||||
ExprType type;
|
||||
Nothing unused_value;
|
||||
if (!iter_.readBr(&relativeDepth, &type, &unused_value)) {
|
||||
ResultType type;
|
||||
NothingVector unused_values;
|
||||
if (!iter_.readBr(&relativeDepth, &type, &unused_values)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -8407,9 +8433,10 @@ bool BaseCompiler::emitBr() {
|
||||
|
||||
bool BaseCompiler::emitBrIf() {
|
||||
uint32_t relativeDepth;
|
||||
ExprType type;
|
||||
Nothing unused_value, unused_condition;
|
||||
if (!iter_.readBrIf(&relativeDepth, &type, &unused_value,
|
||||
ResultType type;
|
||||
NothingVector unused_values;
|
||||
Nothing unused_condition;
|
||||
if (!iter_.readBrIf(&relativeDepth, &type, &unused_values,
|
||||
&unused_condition)) {
|
||||
return false;
|
||||
}
|
||||
@ -8432,10 +8459,17 @@ bool BaseCompiler::emitBrIf() {
|
||||
bool BaseCompiler::emitBrTable() {
|
||||
Uint32Vector depths;
|
||||
uint32_t defaultDepth;
|
||||
ExprType branchValueType;
|
||||
Nothing unused_value, unused_index;
|
||||
if (!iter_.readBrTable(&depths, &defaultDepth, &branchValueType,
|
||||
&unused_value, &unused_index)) {
|
||||
ResultType type;
|
||||
NothingVector unused_values;
|
||||
Nothing unused_index;
|
||||
// N.B., `type' gets set to the type of the default branch target. In the
|
||||
// presence of subtyping, it could be that the different branch targets have
|
||||
// different types. Here we rely on the assumption that the value
|
||||
// representations (e.g. Stk value types) of all branch target types are the
|
||||
// same, in the baseline compiler. Notably, this means that all Ref types
|
||||
// should be represented the same.
|
||||
if (!iter_.readBrTable(&depths, &defaultDepth, &type, &unused_values,
|
||||
&unused_index)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -8444,14 +8478,14 @@ bool BaseCompiler::emitBrTable() {
|
||||
}
|
||||
|
||||
// Don't use joinReg for rc
|
||||
maybeReserveJoinRegI(branchValueType);
|
||||
maybeReserveJoinRegI(type);
|
||||
|
||||
// Table switch value always on top.
|
||||
RegI32 rc = popI32();
|
||||
|
||||
maybeUnreserveJoinRegI(branchValueType);
|
||||
maybeUnreserveJoinRegI(type);
|
||||
|
||||
Maybe<AnyReg> r = popJoinRegUnlessVoid(branchValueType);
|
||||
Maybe<AnyReg> r = popJoinRegUnlessVoid(type);
|
||||
|
||||
Label dispatchCode;
|
||||
masm.branch32(Assembler::Below, rc, Imm32(depths.length()), &dispatchCode);
|
||||
@ -8565,8 +8599,8 @@ void BaseCompiler::doReturn(bool popStack) {
|
||||
}
|
||||
|
||||
bool BaseCompiler::emitReturn() {
|
||||
Nothing unused_value;
|
||||
if (!iter_.readReturn(&unused_value)) {
|
||||
NothingVector unused_values;
|
||||
if (!iter_.readReturn(&unused_values)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -8653,7 +8687,7 @@ bool BaseCompiler::emitCall() {
|
||||
uint32_t lineOrBytecode = readCallSiteLineOrBytecode();
|
||||
|
||||
uint32_t funcIndex;
|
||||
BaseOpIter::ValueVector args_;
|
||||
NothingVector args_;
|
||||
if (!iter_.readCall(&funcIndex, &args_)) {
|
||||
return false;
|
||||
}
|
||||
@ -8707,7 +8741,7 @@ bool BaseCompiler::emitCallIndirect() {
|
||||
uint32_t funcTypeIndex;
|
||||
uint32_t tableIndex;
|
||||
Nothing callee_;
|
||||
BaseOpIter::ValueVector args_;
|
||||
NothingVector args_;
|
||||
if (!iter_.readCallIndirect(&funcTypeIndex, &tableIndex, &callee_, &args_)) {
|
||||
return false;
|
||||
}
|
||||
@ -8812,7 +8846,6 @@ bool BaseCompiler::emitUnaryMathBuiltinCall(SymbolicAddress callee,
|
||||
|
||||
popValueStackBy(numArgs);
|
||||
|
||||
// We know retType isn't ExprType::Void here, so there's no need to check it.
|
||||
pushReturnValueOfCall(baselineCall, retType);
|
||||
|
||||
return true;
|
||||
@ -10441,7 +10474,7 @@ bool BaseCompiler::emitStructNew() {
|
||||
uint32_t lineOrBytecode = readCallSiteLineOrBytecode();
|
||||
|
||||
uint32_t typeIndex;
|
||||
BaseOpIter::ValueVector args;
|
||||
NothingVector args;
|
||||
if (!iter_.readStructNew(&typeIndex, &args)) {
|
||||
return false;
|
||||
}
|
||||
@ -10462,10 +10495,6 @@ bool BaseCompiler::emitStructNew() {
|
||||
return false;
|
||||
}
|
||||
|
||||
// As many arguments as there are fields.
|
||||
|
||||
MOZ_ASSERT(args.length() == structType.fields_.length());
|
||||
|
||||
// Optimization opportunity: Iterate backward to pop arguments off the
|
||||
// stack. This will generate more instructions than we want, since we
|
||||
// really only need to pop the stack once at the end, not for every element,
|
||||
|
@ -42,7 +42,13 @@ enum class SectionId {
|
||||
GcFeatureOptIn = 42 // Arbitrary, but fits in 7 bits
|
||||
};
|
||||
|
||||
// WebAssembly type encodings are all single-byte negative SLEB128s, hence:
|
||||
// forall tc:TypeCode. ((tc & SLEB128SignMask) == SLEB128SignBit
|
||||
static const uint8_t SLEB128SignMask = 0xc0;
|
||||
static const uint8_t SLEB128SignBit = 0x40;
|
||||
|
||||
enum class TypeCode {
|
||||
|
||||
I32 = 0x7f, // SLEB128(-0x01)
|
||||
I64 = 0x7e, // SLEB128(-0x02)
|
||||
F32 = 0x7d, // SLEB128(-0x03)
|
||||
@ -63,7 +69,7 @@ enum class TypeCode {
|
||||
// Type constructor for structure types - unofficial
|
||||
Struct = 0x50, // SLEB128(-0x30)
|
||||
|
||||
// Special code representing the block signature ()->()
|
||||
// The 'empty' case of blocktype.
|
||||
BlockVoid = 0x40, // SLEB128(-0x40)
|
||||
|
||||
// Type designator for null - unofficial, will not appear in the binary format
|
||||
|
@ -43,10 +43,12 @@ using mozilla::Some;
|
||||
namespace {
|
||||
|
||||
typedef Vector<MBasicBlock*, 8, SystemAllocPolicy> BlockVector;
|
||||
typedef Vector<MDefinition*, 8, SystemAllocPolicy> DefVector;
|
||||
|
||||
struct IonCompilePolicy {
|
||||
// We store SSA definitions in the value stack.
|
||||
typedef MDefinition* Value;
|
||||
typedef DefVector ValueVector;
|
||||
|
||||
// We store loop headers and then/else blocks in the control flow stack.
|
||||
typedef MBasicBlock* ControlItem;
|
||||
@ -1151,23 +1153,18 @@ class FunctionCompiler {
|
||||
|
||||
inline bool inDeadCode() const { return curBlock_ == nullptr; }
|
||||
|
||||
void returnExpr(MDefinition* operand) {
|
||||
void returnValues(const DefVector& values) {
|
||||
if (inDeadCode()) {
|
||||
return;
|
||||
}
|
||||
|
||||
MWasmReturn* ins = MWasmReturn::New(alloc(), operand);
|
||||
curBlock_->end(ins);
|
||||
curBlock_ = nullptr;
|
||||
}
|
||||
MOZ_ASSERT(values.length() <= 1, "until multi-return");
|
||||
|
||||
void returnVoid() {
|
||||
if (inDeadCode()) {
|
||||
return;
|
||||
if (values.empty()) {
|
||||
curBlock_->end(MWasmReturnVoid::New(alloc()));
|
||||
} else {
|
||||
curBlock_->end(MWasmReturn::New(alloc(), values[0]));
|
||||
}
|
||||
|
||||
MWasmReturnVoid* ins = MWasmReturnVoid::New(alloc());
|
||||
curBlock_->end(ins);
|
||||
curBlock_ = nullptr;
|
||||
}
|
||||
|
||||
@ -1183,39 +1180,42 @@ class FunctionCompiler {
|
||||
}
|
||||
|
||||
private:
|
||||
static bool hasPushed(MBasicBlock* block) {
|
||||
uint32_t numPushed = block->stackDepth() - block->info().firstStackSlot();
|
||||
MOZ_ASSERT(numPushed == 0 || numPushed == 1);
|
||||
return numPushed;
|
||||
static uint32_t numPushed(MBasicBlock* block) {
|
||||
return block->stackDepth() - block->info().firstStackSlot();
|
||||
}
|
||||
|
||||
public:
|
||||
void pushDef(MDefinition* def) {
|
||||
void pushDefs(const DefVector& defs) {
|
||||
if (inDeadCode()) {
|
||||
return;
|
||||
}
|
||||
MOZ_ASSERT(!hasPushed(curBlock_));
|
||||
if (def && def->type() != MIRType::None) {
|
||||
MOZ_ASSERT(numPushed(curBlock_) == 0);
|
||||
for (MDefinition* def : defs) {
|
||||
MOZ_ASSERT(def->type() != MIRType::None);
|
||||
curBlock_->push(def);
|
||||
}
|
||||
}
|
||||
|
||||
MDefinition* popDefIfPushed() {
|
||||
if (!hasPushed(curBlock_)) {
|
||||
return nullptr;
|
||||
bool popPushedDefs(DefVector* defs) {
|
||||
size_t n = numPushed(curBlock_);
|
||||
if (!defs->resizeUninitialized(n)) {
|
||||
return false;
|
||||
}
|
||||
MDefinition* def = curBlock_->pop();
|
||||
MOZ_ASSERT(def->type() != MIRType::Value);
|
||||
return def;
|
||||
for (; n > 0; n--) {
|
||||
MDefinition* def = curBlock_->pop();
|
||||
MOZ_ASSERT(def->type() != MIRType::Value);
|
||||
(*defs)[n - 1] = def;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private:
|
||||
void addJoinPredecessor(MDefinition* def, MBasicBlock** joinPred) {
|
||||
void addJoinPredecessor(const DefVector& defs, MBasicBlock** joinPred) {
|
||||
*joinPred = curBlock_;
|
||||
if (inDeadCode()) {
|
||||
return;
|
||||
}
|
||||
pushDef(def);
|
||||
pushDefs(defs);
|
||||
}
|
||||
|
||||
public:
|
||||
@ -1241,15 +1241,15 @@ class FunctionCompiler {
|
||||
}
|
||||
|
||||
bool switchToElse(MBasicBlock* elseBlock, MBasicBlock** thenJoinPred) {
|
||||
MDefinition* ifDef;
|
||||
if (!finishBlock(&ifDef)) {
|
||||
DefVector values;
|
||||
if (!finishBlock(&values)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!elseBlock) {
|
||||
*thenJoinPred = nullptr;
|
||||
} else {
|
||||
addJoinPredecessor(ifDef, thenJoinPred);
|
||||
addJoinPredecessor(values, thenJoinPred);
|
||||
|
||||
curBlock_ = elseBlock;
|
||||
mirGraph().moveBlockToEnd(curBlock_);
|
||||
@ -1258,47 +1258,44 @@ class FunctionCompiler {
|
||||
return startBlock();
|
||||
}
|
||||
|
||||
bool joinIfElse(MBasicBlock* thenJoinPred, MDefinition** def) {
|
||||
MDefinition* elseDef;
|
||||
if (!finishBlock(&elseDef)) {
|
||||
bool joinIfElse(MBasicBlock* thenJoinPred, DefVector* defs) {
|
||||
DefVector values;
|
||||
if (!finishBlock(&values)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!thenJoinPred && inDeadCode()) {
|
||||
*def = nullptr;
|
||||
} else {
|
||||
MBasicBlock* elseJoinPred;
|
||||
addJoinPredecessor(elseDef, &elseJoinPred);
|
||||
|
||||
mozilla::Array<MBasicBlock*, 2> blocks;
|
||||
size_t numJoinPreds = 0;
|
||||
if (thenJoinPred) {
|
||||
blocks[numJoinPreds++] = thenJoinPred;
|
||||
}
|
||||
if (elseJoinPred) {
|
||||
blocks[numJoinPreds++] = elseJoinPred;
|
||||
}
|
||||
|
||||
if (numJoinPreds == 0) {
|
||||
*def = nullptr;
|
||||
return true;
|
||||
}
|
||||
|
||||
MBasicBlock* join;
|
||||
if (!goToNewBlock(blocks[0], &join)) {
|
||||
return false;
|
||||
}
|
||||
for (size_t i = 1; i < numJoinPreds; ++i) {
|
||||
if (!goToExistingBlock(blocks[i], join)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
curBlock_ = join;
|
||||
*def = popDefIfPushed();
|
||||
return true;
|
||||
}
|
||||
|
||||
return true;
|
||||
MBasicBlock* elseJoinPred;
|
||||
addJoinPredecessor(values, &elseJoinPred);
|
||||
|
||||
mozilla::Array<MBasicBlock*, 2> blocks;
|
||||
size_t numJoinPreds = 0;
|
||||
if (thenJoinPred) {
|
||||
blocks[numJoinPreds++] = thenJoinPred;
|
||||
}
|
||||
if (elseJoinPred) {
|
||||
blocks[numJoinPreds++] = elseJoinPred;
|
||||
}
|
||||
|
||||
if (numJoinPreds == 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
MBasicBlock* join;
|
||||
if (!goToNewBlock(blocks[0], &join)) {
|
||||
return false;
|
||||
}
|
||||
for (size_t i = 1; i < numJoinPreds; ++i) {
|
||||
if (!goToExistingBlock(blocks[i], join)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
curBlock_ = join;
|
||||
return popPushedDefs(defs);
|
||||
}
|
||||
|
||||
bool startBlock() {
|
||||
@ -1308,10 +1305,10 @@ class FunctionCompiler {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool finishBlock(MDefinition** def) {
|
||||
bool finishBlock(DefVector* defs) {
|
||||
MOZ_ASSERT(blockDepth_);
|
||||
uint32_t topLabel = --blockDepth_;
|
||||
return bindBranches(topLabel, def);
|
||||
return bindBranches(topLabel, defs);
|
||||
}
|
||||
|
||||
bool startLoop(MBasicBlock** loopHeader) {
|
||||
@ -1401,7 +1398,7 @@ class FunctionCompiler {
|
||||
}
|
||||
|
||||
public:
|
||||
bool closeLoop(MBasicBlock* loopHeader, MDefinition** loopResult) {
|
||||
bool closeLoop(MBasicBlock* loopHeader, DefVector* loopResults) {
|
||||
MOZ_ASSERT(blockDepth_ >= 1);
|
||||
MOZ_ASSERT(loopDepth_);
|
||||
|
||||
@ -1413,7 +1410,6 @@ class FunctionCompiler {
|
||||
blockPatches_[headerLabel].empty());
|
||||
blockDepth_--;
|
||||
loopDepth_--;
|
||||
*loopResult = nullptr;
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -1428,7 +1424,7 @@ class FunctionCompiler {
|
||||
// branches as forward jumps to a single backward jump. This is
|
||||
// unfortunate but the optimizer is able to fold these into single jumps
|
||||
// to backedges.
|
||||
MDefinition* _;
|
||||
DefVector _;
|
||||
if (!bindBranches(headerLabel, &_)) {
|
||||
return false;
|
||||
}
|
||||
@ -1437,7 +1433,7 @@ class FunctionCompiler {
|
||||
|
||||
if (curBlock_) {
|
||||
// We're on the loop backedge block, created by bindBranches.
|
||||
if (hasPushed(curBlock_)) {
|
||||
for (size_t i = 0, n = numPushed(curBlock_); i != n; i++) {
|
||||
curBlock_->pop();
|
||||
}
|
||||
|
||||
@ -1462,8 +1458,7 @@ class FunctionCompiler {
|
||||
}
|
||||
|
||||
blockDepth_ -= 1;
|
||||
*loopResult = inDeadCode() ? nullptr : popDefIfPushed();
|
||||
return true;
|
||||
return inDeadCode() || popPushedDefs(loopResults);
|
||||
}
|
||||
|
||||
bool addControlFlowPatch(MControlInstruction* ins, uint32_t relative,
|
||||
@ -1479,7 +1474,7 @@ class FunctionCompiler {
|
||||
return blockPatches_[absolute].append(ControlFlowPatch(ins, index));
|
||||
}
|
||||
|
||||
bool br(uint32_t relativeDepth, MDefinition* maybeValue) {
|
||||
bool br(uint32_t relativeDepth, const DefVector& values) {
|
||||
if (inDeadCode()) {
|
||||
return true;
|
||||
}
|
||||
@ -1489,14 +1484,14 @@ class FunctionCompiler {
|
||||
return false;
|
||||
}
|
||||
|
||||
pushDef(maybeValue);
|
||||
pushDefs(values);
|
||||
|
||||
curBlock_->end(jump);
|
||||
curBlock_ = nullptr;
|
||||
return true;
|
||||
}
|
||||
|
||||
bool brIf(uint32_t relativeDepth, MDefinition* maybeValue,
|
||||
bool brIf(uint32_t relativeDepth, const DefVector& values,
|
||||
MDefinition* condition) {
|
||||
if (inDeadCode()) {
|
||||
return true;
|
||||
@ -1512,7 +1507,7 @@ class FunctionCompiler {
|
||||
return false;
|
||||
}
|
||||
|
||||
pushDef(maybeValue);
|
||||
pushDefs(values);
|
||||
|
||||
curBlock_->end(test);
|
||||
curBlock_ = joinBlock;
|
||||
@ -1520,7 +1515,7 @@ class FunctionCompiler {
|
||||
}
|
||||
|
||||
bool brTable(MDefinition* operand, uint32_t defaultDepth,
|
||||
const Uint32Vector& depths, MDefinition* maybeValue) {
|
||||
const Uint32Vector& depths, const DefVector& values) {
|
||||
if (inDeadCode()) {
|
||||
return true;
|
||||
}
|
||||
@ -1573,7 +1568,7 @@ class FunctionCompiler {
|
||||
}
|
||||
}
|
||||
|
||||
pushDef(maybeValue);
|
||||
pushDefs(values);
|
||||
|
||||
curBlock_->end(table);
|
||||
curBlock_ = nullptr;
|
||||
@ -1621,10 +1616,9 @@ class FunctionCompiler {
|
||||
return next->addPredecessor(alloc(), prev);
|
||||
}
|
||||
|
||||
bool bindBranches(uint32_t absolute, MDefinition** def) {
|
||||
bool bindBranches(uint32_t absolute, DefVector* defs) {
|
||||
if (absolute >= blockPatches_.length() || blockPatches_[absolute].empty()) {
|
||||
*def = inDeadCode() ? nullptr : popDefIfPushed();
|
||||
return true;
|
||||
return inDeadCode() || popPushedDefs(defs);
|
||||
}
|
||||
|
||||
ControlFlowPatchVector& patches = blockPatches_[absolute];
|
||||
@ -1664,7 +1658,9 @@ class FunctionCompiler {
|
||||
|
||||
curBlock_ = join;
|
||||
|
||||
*def = popDefIfPushed();
|
||||
if (!popPushedDefs(defs)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
patches.clear();
|
||||
return true;
|
||||
@ -1789,15 +1785,13 @@ static bool EmitIf(FunctionCompiler& f) {
|
||||
}
|
||||
|
||||
static bool EmitElse(FunctionCompiler& f) {
|
||||
ExprType thenType;
|
||||
MDefinition* thenValue;
|
||||
if (!f.iter().readElse(&thenType, &thenValue)) {
|
||||
ResultType thenType;
|
||||
DefVector thenValues;
|
||||
if (!f.iter().readElse(&thenType, &thenValues)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!IsVoid(thenType)) {
|
||||
f.pushDef(thenValue);
|
||||
}
|
||||
f.pushDefs(thenValues);
|
||||
|
||||
if (!f.switchToElse(f.iter().controlItem(), &f.iter().controlItem())) {
|
||||
return false;
|
||||
@ -1808,40 +1802,33 @@ static bool EmitElse(FunctionCompiler& f) {
|
||||
|
||||
static bool EmitEnd(FunctionCompiler& f) {
|
||||
LabelKind kind;
|
||||
ExprType type;
|
||||
MDefinition* value;
|
||||
if (!f.iter().readEnd(&kind, &type, &value)) {
|
||||
ResultType type;
|
||||
DefVector preJoinDefs;
|
||||
if (!f.iter().readEnd(&kind, &type, &preJoinDefs)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
MBasicBlock* block = f.iter().controlItem();
|
||||
|
||||
f.iter().popEnd();
|
||||
|
||||
if (!IsVoid(type)) {
|
||||
f.pushDef(value);
|
||||
}
|
||||
f.pushDefs(preJoinDefs);
|
||||
|
||||
MDefinition* def = nullptr;
|
||||
DefVector postJoinDefs;
|
||||
switch (kind) {
|
||||
case LabelKind::Body:
|
||||
MOZ_ASSERT(f.iter().controlStackEmpty());
|
||||
if (!f.finishBlock(&def)) {
|
||||
if (!f.finishBlock(&postJoinDefs)) {
|
||||
return false;
|
||||
}
|
||||
if (f.inDeadCode() || IsVoid(type)) {
|
||||
f.returnVoid();
|
||||
} else {
|
||||
f.returnExpr(def);
|
||||
}
|
||||
f.returnValues(postJoinDefs);
|
||||
return f.iter().readFunctionEnd(f.iter().end());
|
||||
case LabelKind::Block:
|
||||
if (!f.finishBlock(&def)) {
|
||||
if (!f.finishBlock(&postJoinDefs)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case LabelKind::Loop:
|
||||
if (!f.closeLoop(block, &def)) {
|
||||
if (!f.closeLoop(block, &postJoinDefs)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
@ -1852,76 +1839,54 @@ static bool EmitEnd(FunctionCompiler& f) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!f.joinIfElse(block, &def)) {
|
||||
if (!f.joinIfElse(block, &postJoinDefs)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case LabelKind::Else:
|
||||
if (!f.joinIfElse(block, &def)) {
|
||||
if (!f.joinIfElse(block, &postJoinDefs)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if (!IsVoid(type)) {
|
||||
MOZ_ASSERT_IF(!f.inDeadCode(), def);
|
||||
f.iter().setResult(def);
|
||||
}
|
||||
MOZ_ASSERT_IF(!f.inDeadCode(), postJoinDefs.length() == type.length());
|
||||
f.iter().setResults(postJoinDefs.length(), postJoinDefs);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool EmitBr(FunctionCompiler& f) {
|
||||
uint32_t relativeDepth;
|
||||
ExprType type;
|
||||
MDefinition* value;
|
||||
if (!f.iter().readBr(&relativeDepth, &type, &value)) {
|
||||
ResultType type;
|
||||
DefVector values;
|
||||
if (!f.iter().readBr(&relativeDepth, &type, &values)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (IsVoid(type)) {
|
||||
if (!f.br(relativeDepth, nullptr)) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
if (!f.br(relativeDepth, value)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
return f.br(relativeDepth, values);
|
||||
}
|
||||
|
||||
static bool EmitBrIf(FunctionCompiler& f) {
|
||||
uint32_t relativeDepth;
|
||||
ExprType type;
|
||||
MDefinition* value;
|
||||
ResultType type;
|
||||
DefVector values;
|
||||
MDefinition* condition;
|
||||
if (!f.iter().readBrIf(&relativeDepth, &type, &value, &condition)) {
|
||||
if (!f.iter().readBrIf(&relativeDepth, &type, &values, &condition)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (IsVoid(type)) {
|
||||
if (!f.brIf(relativeDepth, nullptr, condition)) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
if (!f.brIf(relativeDepth, value, condition)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
return f.brIf(relativeDepth, values, condition);
|
||||
}
|
||||
|
||||
static bool EmitBrTable(FunctionCompiler& f) {
|
||||
Uint32Vector depths;
|
||||
uint32_t defaultDepth;
|
||||
ExprType branchValueType;
|
||||
MDefinition* branchValue;
|
||||
ResultType branchValueType;
|
||||
DefVector branchValues;
|
||||
MDefinition* index;
|
||||
if (!f.iter().readBrTable(&depths, &defaultDepth, &branchValueType,
|
||||
&branchValue, &index)) {
|
||||
&branchValues, &index)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -1937,24 +1902,19 @@ static bool EmitBrTable(FunctionCompiler& f) {
|
||||
}
|
||||
|
||||
if (allSameDepth) {
|
||||
return f.br(defaultDepth, branchValue);
|
||||
return f.br(defaultDepth, branchValues);
|
||||
}
|
||||
|
||||
return f.brTable(index, defaultDepth, depths, branchValue);
|
||||
return f.brTable(index, defaultDepth, depths, branchValues);
|
||||
}
|
||||
|
||||
static bool EmitReturn(FunctionCompiler& f) {
|
||||
MDefinition* value;
|
||||
if (!f.iter().readReturn(&value)) {
|
||||
DefVector values;
|
||||
if (!f.iter().readReturn(&values)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (f.funcType().results().length() == 0) {
|
||||
f.returnVoid();
|
||||
return true;
|
||||
}
|
||||
|
||||
f.returnExpr(value);
|
||||
f.returnValues(values);
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -1967,8 +1927,6 @@ static bool EmitUnreachable(FunctionCompiler& f) {
|
||||
return true;
|
||||
}
|
||||
|
||||
typedef IonOpIter::ValueVector DefVector;
|
||||
|
||||
static bool EmitCallArgs(FunctionCompiler& f, const FuncType& funcType,
|
||||
const DefVector& args, CallCompileState* call) {
|
||||
for (size_t i = 0, n = funcType.args().length(); i < n; ++i) {
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -6268,12 +6268,23 @@ static bool EncodeExprList(Encoder& e, const AstExprVector& v) {
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool EncodeBlockType(Encoder& e, AstExprType& t) {
|
||||
ExprType type = t.type();
|
||||
static_assert(size_t(TypeCode::Limit) <= UINT8_MAX, "fits");
|
||||
MOZ_ASSERT(size_t(type.code()) < size_t(TypeCode::Limit));
|
||||
if (type.isRef()) {
|
||||
return e.writeFixedU8(uint8_t(ExprType::Ref)) &&
|
||||
e.writeVarU32(type.refTypeIndex());
|
||||
}
|
||||
return e.writeFixedU8(uint8_t(type.code()));
|
||||
}
|
||||
|
||||
static bool EncodeBlock(Encoder& e, AstBlock& b) {
|
||||
if (!e.writeOp(b.op())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!e.writeBlockType(b.type().type())) {
|
||||
if (!EncodeBlockType(e, b.type())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -6458,7 +6469,7 @@ static bool EncodeIf(Encoder& e, AstIf& i) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!e.writeBlockType(i.type().type())) {
|
||||
if (!EncodeBlockType(e, i.type())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -227,21 +227,6 @@ uint8_t* FuncType::serialize(uint8_t* cursor) const {
|
||||
return cursor;
|
||||
}
|
||||
|
||||
namespace js {
|
||||
namespace wasm {
|
||||
|
||||
// ExprType is not POD while ReadScalar requires POD, so specialize.
|
||||
template <>
|
||||
inline const uint8_t* ReadScalar<ExprType>(const uint8_t* src, ExprType* dst) {
|
||||
static_assert(sizeof(PackedTypeCode) == sizeof(ExprType),
|
||||
"ExprType must carry only a PackedTypeCode");
|
||||
memcpy(dst->packedPtr(), src, sizeof(PackedTypeCode));
|
||||
return src + sizeof(*dst);
|
||||
}
|
||||
|
||||
} // namespace wasm
|
||||
} // namespace js
|
||||
|
||||
const uint8_t* FuncType::deserialize(const uint8_t* cursor) {
|
||||
cursor = DeserializePodVector(cursor, &results_);
|
||||
if (!cursor) {
|
||||
|
@ -213,26 +213,28 @@ static_assert(std::is_pod<PackedTypeCode>::value,
|
||||
"must be POD to be simply serialized/deserialized");
|
||||
|
||||
const uint32_t NoTypeCode = 0xFF; // Only use these
|
||||
const uint32_t NoRefTypeIndex = 0xFFFFFF; // with PackedTypeCode
|
||||
|
||||
static inline PackedTypeCode InvalidPackedTypeCode() {
|
||||
return PackedTypeCode((NoRefTypeIndex << 8) | NoTypeCode);
|
||||
}
|
||||
|
||||
static inline PackedTypeCode PackTypeCode(TypeCode tc) {
|
||||
MOZ_ASSERT(uint32_t(tc) <= 0xFF);
|
||||
MOZ_ASSERT(tc != TypeCode::Ref);
|
||||
return PackedTypeCode((NoRefTypeIndex << 8) | uint32_t(tc));
|
||||
}
|
||||
const uint32_t NoRefTypeIndex = 0x3FFFFF; // with PackedTypeCode
|
||||
|
||||
static inline PackedTypeCode PackTypeCode(TypeCode tc, uint32_t refTypeIndex) {
|
||||
MOZ_ASSERT(uint32_t(tc) <= 0xFF);
|
||||
MOZ_ASSERT_IF(tc != TypeCode::Ref, refTypeIndex == NoRefTypeIndex);
|
||||
MOZ_ASSERT_IF(tc == TypeCode::Ref, refTypeIndex <= MaxTypes);
|
||||
static_assert(MaxTypes < (1 << (32 - 8)), "enough bits");
|
||||
// A PackedTypeCode should be representable in a single word, so in the
|
||||
// smallest case, 32 bits. However sometimes 2 bits of the word may be taken
|
||||
// by a pointer tag; for that reason, limit to 30 bits; and then there's the
|
||||
// 8-bit typecode, so 22 bits left for the type index.
|
||||
static_assert(MaxTypes < (1 << (30 - 8)), "enough bits");
|
||||
return PackedTypeCode((refTypeIndex << 8) | uint32_t(tc));
|
||||
}
|
||||
|
||||
static inline PackedTypeCode PackTypeCode(TypeCode tc) {
|
||||
return PackTypeCode(tc, NoRefTypeIndex);
|
||||
}
|
||||
|
||||
static inline PackedTypeCode InvalidPackedTypeCode() {
|
||||
return PackedTypeCode(NoTypeCode);
|
||||
}
|
||||
|
||||
static inline PackedTypeCode PackedTypeCodeFromBits(uint32_t bits) {
|
||||
return PackTypeCode(TypeCode(bits & 255), bits >> 8);
|
||||
}
|
||||
|
@ -444,15 +444,24 @@ bool wasm::DecodeValidatedLocalEntries(Decoder& d, ValTypeVector* locals) {
|
||||
|
||||
// Function body validation.
|
||||
|
||||
class NothingVector {
|
||||
Nothing unused_;
|
||||
|
||||
public:
|
||||
bool resize(size_t length) { return true; }
|
||||
Nothing& operator[](size_t) { return unused_; }
|
||||
Nothing& back() { return unused_; }
|
||||
};
|
||||
|
||||
struct ValidatingPolicy {
|
||||
typedef Nothing Value;
|
||||
typedef NothingVector ValueVector;
|
||||
typedef Nothing ControlItem;
|
||||
};
|
||||
|
||||
typedef OpIter<ValidatingPolicy> ValidatingOpIter;
|
||||
|
||||
static bool DecodeFunctionBodyExprs(const ModuleEnvironment& env,
|
||||
// FIXME(1401675): Replace with BlockType.
|
||||
uint32_t funcIndex,
|
||||
const ValTypeVector& locals,
|
||||
const uint8_t* bodyEnd, Decoder* d) {
|
||||
@ -473,12 +482,13 @@ static bool DecodeFunctionBodyExprs(const ModuleEnvironment& env,
|
||||
}
|
||||
|
||||
Nothing nothing;
|
||||
NothingVector nothings;
|
||||
ResultType unusedType;
|
||||
|
||||
switch (op.b0) {
|
||||
case uint16_t(Op::End): {
|
||||
LabelKind unusedKind;
|
||||
ExprType unusedType;
|
||||
if (!iter.readEnd(&unusedKind, &unusedType, ¬hing)) {
|
||||
if (!iter.readEnd(&unusedKind, &unusedType, ¬hings)) {
|
||||
return false;
|
||||
}
|
||||
iter.popEnd();
|
||||
@ -493,12 +503,12 @@ static bool DecodeFunctionBodyExprs(const ModuleEnvironment& env,
|
||||
CHECK(iter.readDrop());
|
||||
case uint16_t(Op::Call): {
|
||||
uint32_t unusedIndex;
|
||||
ValidatingOpIter::ValueVector unusedArgs;
|
||||
NothingVector unusedArgs;
|
||||
CHECK(iter.readCall(&unusedIndex, &unusedArgs));
|
||||
}
|
||||
case uint16_t(Op::CallIndirect): {
|
||||
uint32_t unusedIndex, unusedIndex2;
|
||||
ValidatingOpIter::ValueVector unusedArgs;
|
||||
NothingVector unusedArgs;
|
||||
CHECK(iter.readCallIndirect(&unusedIndex, &unusedIndex2, ¬hing,
|
||||
&unusedArgs));
|
||||
}
|
||||
@ -573,10 +583,8 @@ static bool DecodeFunctionBodyExprs(const ModuleEnvironment& env,
|
||||
CHECK(iter.readLoop());
|
||||
case uint16_t(Op::If):
|
||||
CHECK(iter.readIf(¬hing));
|
||||
case uint16_t(Op::Else): {
|
||||
ExprType type;
|
||||
CHECK(iter.readElse(&type, ¬hing));
|
||||
}
|
||||
case uint16_t(Op::Else):
|
||||
CHECK(iter.readElse(&unusedType, ¬hings));
|
||||
case uint16_t(Op::I32Clz):
|
||||
case uint16_t(Op::I32Ctz):
|
||||
case uint16_t(Op::I32Popcnt):
|
||||
@ -815,23 +823,20 @@ static bool DecodeFunctionBodyExprs(const ModuleEnvironment& env,
|
||||
CHECK(iter.readMemorySize());
|
||||
case uint16_t(Op::Br): {
|
||||
uint32_t unusedDepth;
|
||||
ExprType unusedType;
|
||||
CHECK(iter.readBr(&unusedDepth, &unusedType, ¬hing));
|
||||
CHECK(iter.readBr(&unusedDepth, &unusedType, ¬hings));
|
||||
}
|
||||
case uint16_t(Op::BrIf): {
|
||||
uint32_t unusedDepth;
|
||||
ExprType unusedType;
|
||||
CHECK(iter.readBrIf(&unusedDepth, &unusedType, ¬hing, ¬hing));
|
||||
CHECK(iter.readBrIf(&unusedDepth, &unusedType, ¬hings, ¬hing));
|
||||
}
|
||||
case uint16_t(Op::BrTable): {
|
||||
Uint32Vector unusedDepths;
|
||||
uint32_t unusedDefault;
|
||||
ExprType unusedType;
|
||||
CHECK(iter.readBrTable(&unusedDepths, &unusedDefault, &unusedType,
|
||||
¬hing, ¬hing));
|
||||
¬hings, ¬hing));
|
||||
}
|
||||
case uint16_t(Op::Return):
|
||||
CHECK(iter.readReturn(¬hing));
|
||||
CHECK(iter.readReturn(¬hings));
|
||||
case uint16_t(Op::Unreachable):
|
||||
CHECK(iter.readUnreachable());
|
||||
case uint16_t(Op::MiscPrefix): {
|
||||
@ -958,7 +963,7 @@ static bool DecodeFunctionBodyExprs(const ModuleEnvironment& env,
|
||||
return iter.unrecognizedOpcode(&op);
|
||||
}
|
||||
uint32_t unusedUint;
|
||||
ValidatingOpIter::ValueVector unusedArgs;
|
||||
NothingVector unusedArgs;
|
||||
CHECK(iter.readStructNew(&unusedUint, &unusedArgs));
|
||||
}
|
||||
case uint32_t(MiscOp::StructGet): {
|
||||
@ -1210,10 +1215,8 @@ static bool DecodeFunctionBodyExprs(const ModuleEnvironment& env,
|
||||
bool wasm::ValidateFunctionBody(const ModuleEnvironment& env,
|
||||
uint32_t funcIndex, uint32_t bodySize,
|
||||
Decoder& d) {
|
||||
const FuncType& funcType = *env.funcTypes[funcIndex];
|
||||
|
||||
ValTypeVector locals;
|
||||
if (!locals.appendAll(funcType.args())) {
|
||||
if (!locals.appendAll(env.funcTypes[funcIndex]->args())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -396,15 +396,6 @@ class Encoder {
|
||||
}
|
||||
return writeFixedU8(uint8_t(type.code()));
|
||||
}
|
||||
MOZ_MUST_USE bool writeBlockType(ExprType type) {
|
||||
static_assert(size_t(TypeCode::Limit) <= UINT8_MAX, "fits");
|
||||
MOZ_ASSERT(size_t(type.code()) < size_t(TypeCode::Limit));
|
||||
if (type.isRef()) {
|
||||
return writeFixedU8(uint8_t(ExprType::Ref)) &&
|
||||
writeVarU32(type.refTypeIndex());
|
||||
}
|
||||
return writeFixedU8(uint8_t(type.code()));
|
||||
}
|
||||
MOZ_MUST_USE bool writeOp(Op op) {
|
||||
static_assert(size_t(Op::Limit) == 256, "fits");
|
||||
MOZ_ASSERT(size_t(op) < size_t(Op::Limit));
|
||||
@ -626,6 +617,16 @@ class Decoder {
|
||||
const uint8_t* begin() const { return beg_; }
|
||||
const uint8_t* end() const { return end_; }
|
||||
|
||||
// Peek at the next byte, if it exists, without advancing the position.
|
||||
|
||||
bool peekByte(uint8_t* byte) {
|
||||
if (done()) {
|
||||
return false;
|
||||
}
|
||||
*byte = *cur_;
|
||||
return true;
|
||||
}
|
||||
|
||||
// Fixed-size encoding operations simply copy the literal bytes (without
|
||||
// attempting to align).
|
||||
|
||||
@ -708,20 +709,6 @@ class Decoder {
|
||||
}
|
||||
return true;
|
||||
}
|
||||
MOZ_MUST_USE bool readBlockType(uint8_t* code, uint32_t* refTypeIndex) {
|
||||
static_assert(size_t(TypeCode::Limit) <= UINT8_MAX, "fits");
|
||||
if (!readFixedU8(code)) {
|
||||
return false;
|
||||
}
|
||||
if (*code == uint8_t(TypeCode::Ref)) {
|
||||
if (!readVarU32(refTypeIndex)) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
*refTypeIndex = NoRefTypeIndex;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
MOZ_MUST_USE bool readOp(OpBytes* op) {
|
||||
static_assert(size_t(Op::Limit) == 256, "fits");
|
||||
uint8_t u8;
|
||||
|
Loading…
Reference in New Issue
Block a user