Backed out changeset bf0900c02d78 (bug 1577508) for build bustages on a CLOSED TREE

This commit is contained in:
Andreea Pavel 2019-10-03 12:43:21 +03:00
parent c468ba3c9f
commit ab6c3faf7b
12 changed files with 668 additions and 992 deletions

View File

@ -230,19 +230,10 @@ assertErrorMessage(() => wasmEval(moduleWithSections([
nameSection([moduleNameSubsection('hi')])])
).f(), RuntimeError, /unreachable/);
// Diagnose invalid block signature types.
for (var bad of [0xff, 1, 0x3f])
// Diagnose nonstandard block signature types.
for (var bad of [0xff, 0, 1, 0x3f])
assertErrorMessage(() => wasmEval(moduleWithSections([sigSection([v2vSig]), declSection([0]), bodySection([funcBody({locals:[], body:[BlockCode, bad, EndCode]})])])), CompileError, /invalid .*block type/);
if (wasmMultiValueEnabled()) {
// In this test module, 0 denotes a void-to-void block type.
let binary = moduleWithSections([sigSection([v2vSig]), declSection([0]), bodySection([funcBody({locals:[], body:[BlockCode, 0, EndCode]})])]);
assertEq(WebAssembly.validate(binary), true);
} else {
const bad = 0;
assertErrorMessage(() => wasmEval(moduleWithSections([sigSection([v2vSig]), declSection([0]), bodySection([funcBody({locals:[], body:[BlockCode, bad, EndCode]})])])), CompileError, /invalid .*block type/);
}
// Ensure all invalid opcodes rejected
for (let op of undefinedOpcodes) {
let binary = moduleWithSections([v2vSigSection, declSection([0]), bodySection([funcBody({locals:[], body:[op]})])]);

View File

@ -18,7 +18,7 @@ const invalidRefBlockType = funcBody({locals:[], body:[
0x42,
EndCode,
]});
checkInvalid(invalidRefBlockType, /ref/);
checkInvalid(invalidRefBlockType, /invalid inline block type/);
const invalidTooBigRefType = funcBody({locals:[], body:[
BlockCode,
@ -26,4 +26,4 @@ const invalidTooBigRefType = funcBody({locals:[], body:[
varU32(1000000),
EndCode,
]});
checkInvalid(invalidTooBigRefType, /ref/);
checkInvalid(invalidTooBigRefType, /invalid inline block type/);

View File

@ -202,7 +202,7 @@ wasmFailValidateText(`
(br_table 1 0 (i32.const 15))
)
)
)`, /br_table targets must all have the same arity/);
)`, /br_table operand must be subtype of all target types/);
wasmFailValidateText(`
(module
@ -212,7 +212,7 @@ wasmFailValidateText(`
(br_table 1 0 (i32.const 15))
)
)
)`, /br_table targets must all have the same arity/);
)`, /br_table operand must be subtype of all target types/);
wasmValidateText(`
(module

View File

@ -2441,21 +2441,11 @@ class BaseCompiler final : public BaseCompilerInterface {
deadThenBranch(false) {}
};
class NothingVector {
Nothing unused_;
public:
bool resize(size_t length) { return true; }
Nothing& operator[](size_t) { return unused_; }
Nothing& back() { return unused_; }
};
struct BaseCompilePolicy {
// The baseline compiler tracks values on a stack of its own -- it
// needs to scan that stack for spilling -- and thus has no need
// for the values maintained by the iterator.
using Value = Nothing;
using ValueVector = NothingVector;
// The baseline compiler uses the iterator's control stack, attaching
// its own control information.
@ -2787,105 +2777,89 @@ class BaseCompiler final : public BaseCompilerInterface {
}
}
void maybeReserveJoinRegI(ResultType type) {
if (type.empty()) {
return;
}
MOZ_ASSERT(type.length() == 1, "multi-value joins unimplemented");
switch (type[0].code()) {
case ValType::I32:
void maybeReserveJoinRegI(ExprType type) {
switch (type.code()) {
case ExprType::I32:
needI32(joinRegI32_);
break;
case ValType::I64:
case ExprType::I64:
needI64(joinRegI64_);
break;
case ValType::F32:
case ValType::F64:
break;
case ValType::FuncRef:
case ValType::AnyRef:
case ValType::NullRef:
case ValType::Ref:
case ExprType::FuncRef:
case ExprType::AnyRef:
case ExprType::NullRef:
case ExprType::Ref:
needRef(joinRegPtr_);
break;
default:;
}
}
void maybeUnreserveJoinRegI(ResultType type) {
if (type.empty()) {
return;
}
MOZ_ASSERT(type.length() == 1, "multi-value joins unimplemented");
switch (type[0].code()) {
case ValType::I32:
void maybeUnreserveJoinRegI(ExprType type) {
switch (type.code()) {
case ExprType::I32:
freeI32(joinRegI32_);
break;
case ValType::I64:
case ExprType::I64:
freeI64(joinRegI64_);
break;
case ValType::F32:
case ValType::F64:
break;
case ValType::FuncRef:
case ValType::AnyRef:
case ValType::NullRef:
case ValType::Ref:
case ExprType::FuncRef:
case ExprType::AnyRef:
case ExprType::NullRef:
case ExprType::Ref:
freeRef(joinRegPtr_);
break;
default:;
}
}
void maybeReserveJoinReg(ResultType type) {
if (type.empty()) {
return;
}
MOZ_ASSERT(type.length() == 1, "multi-value joins unimplemented");
switch (type[0].code()) {
case ValType::I32:
void maybeReserveJoinReg(ExprType type) {
switch (type.code()) {
case ExprType::I32:
needI32(joinRegI32_);
break;
case ValType::I64:
case ExprType::I64:
needI64(joinRegI64_);
break;
case ValType::F32:
case ExprType::F32:
needF32(joinRegF32_);
break;
case ValType::F64:
case ExprType::F64:
needF64(joinRegF64_);
break;
case ValType::Ref:
case ValType::NullRef:
case ValType::FuncRef:
case ValType::AnyRef:
case ExprType::Ref:
case ExprType::NullRef:
case ExprType::FuncRef:
case ExprType::AnyRef:
needRef(joinRegPtr_);
break;
default:
break;
}
}
void maybeUnreserveJoinReg(ResultType type) {
if (type.empty()) {
return;
}
MOZ_ASSERT(type.length() == 1, "multi-value joins unimplemented");
switch (type[0].code()) {
case ValType::I32:
void maybeUnreserveJoinReg(ExprType type) {
switch (type.code()) {
case ExprType::I32:
freeI32(joinRegI32_);
break;
case ValType::I64:
case ExprType::I64:
freeI64(joinRegI64_);
break;
case ValType::F32:
case ExprType::F32:
freeF32(joinRegF32_);
break;
case ValType::F64:
case ExprType::F64:
freeF64(joinRegF64_);
break;
case ValType::Ref:
case ValType::NullRef:
case ValType::FuncRef:
case ValType::AnyRef:
case ExprType::Ref:
case ExprType::NullRef:
case ExprType::FuncRef:
case ExprType::AnyRef:
freeRef(joinRegPtr_);
break;
default:
break;
}
}
@ -3779,47 +3753,48 @@ class BaseCompiler final : public BaseCompilerInterface {
// popping of the stack we can just use the JoinReg as it will
// become available in that process.
MOZ_MUST_USE Maybe<AnyReg> popJoinRegUnlessVoid(ResultType type) {
if (type.empty()) {
return Nothing();
}
MOZ_ASSERT(type.length() == 1, "multi-value return unimplemented");
switch (type[0].code()) {
case ValType::I32: {
MOZ_MUST_USE Maybe<AnyReg> popJoinRegUnlessVoid(ExprType type) {
switch (type.code()) {
case ExprType::Void: {
return Nothing();
}
case ExprType::I32: {
DebugOnly<Stk::Kind> k(stk_.back().kind());
MOZ_ASSERT(k == Stk::RegisterI32 || k == Stk::ConstI32 ||
k == Stk::MemI32 || k == Stk::LocalI32);
return Some(AnyReg(popI32(joinRegI32_)));
}
case ValType::I64: {
case ExprType::I64: {
DebugOnly<Stk::Kind> k(stk_.back().kind());
MOZ_ASSERT(k == Stk::RegisterI64 || k == Stk::ConstI64 ||
k == Stk::MemI64 || k == Stk::LocalI64);
return Some(AnyReg(popI64(joinRegI64_)));
}
case ValType::F64: {
case ExprType::F64: {
DebugOnly<Stk::Kind> k(stk_.back().kind());
MOZ_ASSERT(k == Stk::RegisterF64 || k == Stk::ConstF64 ||
k == Stk::MemF64 || k == Stk::LocalF64);
return Some(AnyReg(popF64(joinRegF64_)));
}
case ValType::F32: {
case ExprType::F32: {
DebugOnly<Stk::Kind> k(stk_.back().kind());
MOZ_ASSERT(k == Stk::RegisterF32 || k == Stk::ConstF32 ||
k == Stk::MemF32 || k == Stk::LocalF32);
return Some(AnyReg(popF32(joinRegF32_)));
}
case ValType::Ref:
case ValType::NullRef:
case ValType::FuncRef:
case ValType::AnyRef: {
case ExprType::Ref:
case ExprType::NullRef:
case ExprType::FuncRef:
case ExprType::AnyRef: {
DebugOnly<Stk::Kind> k(stk_.back().kind());
MOZ_ASSERT(k == Stk::RegisterRef || k == Stk::ConstRef ||
k == Stk::MemRef || k == Stk::LocalRef);
return Some(AnyReg(popRef(joinRegPtr_)));
}
default: {
MOZ_CRASH("Compiler bug: unexpected expression type");
}
}
// MOZ_CRASH("Compiler bug: unexpected expression type");
}
// If we ever start not sync-ing on entry to Block (but instead try to sync
@ -3828,37 +3803,36 @@ class BaseCompiler final : public BaseCompilerInterface {
// joinreg in the contexts it's being used, so some other solution will need
// to be found.
MOZ_MUST_USE Maybe<AnyReg> captureJoinRegUnlessVoid(ResultType type) {
if (type.empty()) {
return Nothing();
}
MOZ_ASSERT(type.length() == 1, "multi-value return unimplemented");
switch (type[0].code()) {
case ValType::I32:
MOZ_MUST_USE Maybe<AnyReg> captureJoinRegUnlessVoid(ExprType type) {
switch (type.code()) {
case ExprType::I32:
MOZ_ASSERT(isAvailableI32(joinRegI32_));
needI32(joinRegI32_);
return Some(AnyReg(joinRegI32_));
case ValType::I64:
case ExprType::I64:
MOZ_ASSERT(isAvailableI64(joinRegI64_));
needI64(joinRegI64_);
return Some(AnyReg(joinRegI64_));
case ValType::F32:
case ExprType::F32:
MOZ_ASSERT(isAvailableF32(joinRegF32_));
needF32(joinRegF32_);
return Some(AnyReg(joinRegF32_));
case ValType::F64:
case ExprType::F64:
MOZ_ASSERT(isAvailableF64(joinRegF64_));
needF64(joinRegF64_);
return Some(AnyReg(joinRegF64_));
case ValType::Ref:
case ValType::NullRef:
case ValType::FuncRef:
case ValType::AnyRef:
case ExprType::Ref:
case ExprType::NullRef:
case ExprType::FuncRef:
case ExprType::AnyRef:
MOZ_ASSERT(isAvailableRef(joinRegPtr_));
needRef(joinRegPtr_);
return Some(AnyReg(joinRegPtr_));
case ExprType::Void:
return Nothing();
default:
MOZ_CRASH("Compiler bug: unexpected type");
}
// MOZ_CRASH("Compiler bug: unexpected type");
}
void pushJoinRegUnlessVoid(const Maybe<AnyReg>& r) {
@ -6597,22 +6571,23 @@ class BaseCompiler final : public BaseCompilerInterface {
const StackHeight stackHeight; // The value to pop to along the taken edge,
// unless !hasPop()
const bool invertBranch; // If true, invert the sense of the branch
const ResultType resultType; // The result propagated along the edges
const ExprType
resultType; // The result propagated along the edges, or Void
explicit BranchState(Label* label)
: label(label),
stackHeight(StackHeight::Invalid()),
invertBranch(false),
resultType(ResultType::Empty()) {}
resultType(ExprType::Void) {}
BranchState(Label* label, bool invertBranch)
: label(label),
stackHeight(StackHeight::Invalid()),
invertBranch(invertBranch),
resultType(ResultType::Empty()) {}
resultType(ExprType::Void) {}
BranchState(Label* label, StackHeight stackHeight, bool invertBranch,
ResultType resultType)
ExprType resultType)
: label(label),
stackHeight(stackHeight),
invertBranch(invertBranch),
@ -6744,10 +6719,9 @@ class BaseCompiler final : public BaseCompilerInterface {
template <bool isSetLocal>
MOZ_MUST_USE bool emitSetOrTeeLocal(uint32_t slot);
void endBlock(ResultType type);
void endLoop(ResultType type);
void endBlock(ExprType type);
void endIfThen();
void endIfThenElse(ResultType type);
void endIfThenElse(ExprType type);
void doReturn(bool popStack);
void pushReturnValueOfCall(const FunctionCall& call, ValType type);
@ -8160,7 +8134,7 @@ bool BaseCompiler::emitBlock() {
return true;
}
void BaseCompiler::endBlock(ResultType type) {
void BaseCompiler::endBlock(ExprType type) {
Control& block = controlItem();
// Save the value.
@ -8277,10 +8251,10 @@ void BaseCompiler::endIfThen() {
}
bool BaseCompiler::emitElse() {
ResultType thenType;
NothingVector unused_thenValues;
ExprType thenType;
Nothing unused_thenValue;
if (!iter_.readElse(&thenType, &unused_thenValues)) {
if (!iter_.readElse(&thenType, &unused_thenValue)) {
return false;
}
@ -8321,7 +8295,7 @@ bool BaseCompiler::emitElse() {
return true;
}
void BaseCompiler::endIfThenElse(ResultType type) {
void BaseCompiler::endIfThenElse(ExprType type) {
Control& ifThenElse = controlItem();
// The expression type is not a reliable guide to what we'll find
@ -8365,9 +8339,9 @@ void BaseCompiler::endIfThenElse(ResultType type) {
bool BaseCompiler::emitEnd() {
LabelKind kind;
ResultType type;
NothingVector unused_values;
if (!iter_.readEnd(&kind, &type, &unused_values)) {
ExprType type;
Nothing unused_value;
if (!iter_.readEnd(&kind, &type, &unused_value)) {
return false;
}
@ -8400,9 +8374,9 @@ bool BaseCompiler::emitEnd() {
bool BaseCompiler::emitBr() {
uint32_t relativeDepth;
ResultType type;
NothingVector unused_values;
if (!iter_.readBr(&relativeDepth, &type, &unused_values)) {
ExprType type;
Nothing unused_value;
if (!iter_.readBr(&relativeDepth, &type, &unused_value)) {
return false;
}
@ -8433,10 +8407,9 @@ bool BaseCompiler::emitBr() {
bool BaseCompiler::emitBrIf() {
uint32_t relativeDepth;
ResultType type;
NothingVector unused_values;
Nothing unused_condition;
if (!iter_.readBrIf(&relativeDepth, &type, &unused_values,
ExprType type;
Nothing unused_value, unused_condition;
if (!iter_.readBrIf(&relativeDepth, &type, &unused_value,
&unused_condition)) {
return false;
}
@ -8459,17 +8432,10 @@ bool BaseCompiler::emitBrIf() {
bool BaseCompiler::emitBrTable() {
Uint32Vector depths;
uint32_t defaultDepth;
ResultType type;
NothingVector unused_values;
Nothing unused_index;
// N.B., `type' gets set to the type of the default branch target. In the
// presence of subtyping, it could be that the different branch targets have
// different types. Here we rely on the assumption that the value
// representations (e.g. Stk value types) of all branch target types are the
// same, in the baseline compiler. Notably, this means that all Ref types
// should be represented the same.
if (!iter_.readBrTable(&depths, &defaultDepth, &type, &unused_values,
&unused_index)) {
ExprType branchValueType;
Nothing unused_value, unused_index;
if (!iter_.readBrTable(&depths, &defaultDepth, &branchValueType,
&unused_value, &unused_index)) {
return false;
}
@ -8478,14 +8444,14 @@ bool BaseCompiler::emitBrTable() {
}
// Don't use joinReg for rc
maybeReserveJoinRegI(type);
maybeReserveJoinRegI(branchValueType);
// Table switch value always on top.
RegI32 rc = popI32();
maybeUnreserveJoinRegI(type);
maybeUnreserveJoinRegI(branchValueType);
Maybe<AnyReg> r = popJoinRegUnlessVoid(type);
Maybe<AnyReg> r = popJoinRegUnlessVoid(branchValueType);
Label dispatchCode;
masm.branch32(Assembler::Below, rc, Imm32(depths.length()), &dispatchCode);
@ -8599,8 +8565,8 @@ void BaseCompiler::doReturn(bool popStack) {
}
bool BaseCompiler::emitReturn() {
NothingVector unused_values;
if (!iter_.readReturn(&unused_values)) {
Nothing unused_value;
if (!iter_.readReturn(&unused_value)) {
return false;
}
@ -8687,7 +8653,7 @@ bool BaseCompiler::emitCall() {
uint32_t lineOrBytecode = readCallSiteLineOrBytecode();
uint32_t funcIndex;
NothingVector args_;
BaseOpIter::ValueVector args_;
if (!iter_.readCall(&funcIndex, &args_)) {
return false;
}
@ -8741,7 +8707,7 @@ bool BaseCompiler::emitCallIndirect() {
uint32_t funcTypeIndex;
uint32_t tableIndex;
Nothing callee_;
NothingVector args_;
BaseOpIter::ValueVector args_;
if (!iter_.readCallIndirect(&funcTypeIndex, &tableIndex, &callee_, &args_)) {
return false;
}
@ -8846,6 +8812,7 @@ bool BaseCompiler::emitUnaryMathBuiltinCall(SymbolicAddress callee,
popValueStackBy(numArgs);
// We know retType isn't ExprType::Void here, so there's no need to check it.
pushReturnValueOfCall(baselineCall, retType);
return true;
@ -10474,7 +10441,7 @@ bool BaseCompiler::emitStructNew() {
uint32_t lineOrBytecode = readCallSiteLineOrBytecode();
uint32_t typeIndex;
NothingVector args;
BaseOpIter::ValueVector args;
if (!iter_.readStructNew(&typeIndex, &args)) {
return false;
}
@ -10495,6 +10462,10 @@ bool BaseCompiler::emitStructNew() {
return false;
}
// As many arguments as there are fields.
MOZ_ASSERT(args.length() == structType.fields_.length());
// Optimization opportunity: Iterate backward to pop arguments off the
// stack. This will generate more instructions than we want, since we
// really only need to pop the stack once at the end, not for every element,

View File

@ -42,13 +42,7 @@ enum class SectionId {
GcFeatureOptIn = 42 // Arbitrary, but fits in 7 bits
};
// WebAssembly type encodings are all single-byte negative SLEB128s, hence:
// forall tc:TypeCode. ((tc & SLEB128SignMask) == SLEB128SignBit
static const uint8_t SLEB128SignMask = 0xc0;
static const uint8_t SLEB128SignBit = 0x40;
enum class TypeCode {
I32 = 0x7f, // SLEB128(-0x01)
I64 = 0x7e, // SLEB128(-0x02)
F32 = 0x7d, // SLEB128(-0x03)
@ -69,7 +63,7 @@ enum class TypeCode {
// Type constructor for structure types - unofficial
Struct = 0x50, // SLEB128(-0x30)
// The 'empty' case of blocktype.
// Special code representing the block signature ()->()
BlockVoid = 0x40, // SLEB128(-0x40)
// Type designator for null - unofficial, will not appear in the binary format

View File

@ -43,12 +43,10 @@ using mozilla::Some;
namespace {
typedef Vector<MBasicBlock*, 8, SystemAllocPolicy> BlockVector;
typedef Vector<MDefinition*, 8, SystemAllocPolicy> DefVector;
struct IonCompilePolicy {
// We store SSA definitions in the value stack.
typedef MDefinition* Value;
typedef DefVector ValueVector;
// We store loop headers and then/else blocks in the control flow stack.
typedef MBasicBlock* ControlItem;
@ -1153,18 +1151,23 @@ class FunctionCompiler {
inline bool inDeadCode() const { return curBlock_ == nullptr; }
void returnValues(const DefVector& values) {
void returnExpr(MDefinition* operand) {
if (inDeadCode()) {
return;
}
MOZ_ASSERT(values.length() <= 1, "until multi-return");
MWasmReturn* ins = MWasmReturn::New(alloc(), operand);
curBlock_->end(ins);
curBlock_ = nullptr;
}
if (values.empty()) {
curBlock_->end(MWasmReturnVoid::New(alloc()));
} else {
curBlock_->end(MWasmReturn::New(alloc(), values[0]));
void returnVoid() {
if (inDeadCode()) {
return;
}
MWasmReturnVoid* ins = MWasmReturnVoid::New(alloc());
curBlock_->end(ins);
curBlock_ = nullptr;
}
@ -1180,42 +1183,39 @@ class FunctionCompiler {
}
private:
static uint32_t numPushed(MBasicBlock* block) {
return block->stackDepth() - block->info().firstStackSlot();
static bool hasPushed(MBasicBlock* block) {
uint32_t numPushed = block->stackDepth() - block->info().firstStackSlot();
MOZ_ASSERT(numPushed == 0 || numPushed == 1);
return numPushed;
}
public:
void pushDefs(const DefVector& defs) {
void pushDef(MDefinition* def) {
if (inDeadCode()) {
return;
}
MOZ_ASSERT(numPushed(curBlock_) == 0);
for (MDefinition* def : defs) {
MOZ_ASSERT(def->type() != MIRType::None);
MOZ_ASSERT(!hasPushed(curBlock_));
if (def && def->type() != MIRType::None) {
curBlock_->push(def);
}
}
bool popPushedDefs(DefVector* defs) {
size_t n = numPushed(curBlock_);
if (!defs->resizeUninitialized(n)) {
return false;
MDefinition* popDefIfPushed() {
if (!hasPushed(curBlock_)) {
return nullptr;
}
for (; n > 0; n--) {
MDefinition* def = curBlock_->pop();
MOZ_ASSERT(def->type() != MIRType::Value);
(*defs)[n - 1] = def;
}
return true;
MDefinition* def = curBlock_->pop();
MOZ_ASSERT(def->type() != MIRType::Value);
return def;
}
private:
void addJoinPredecessor(const DefVector& defs, MBasicBlock** joinPred) {
void addJoinPredecessor(MDefinition* def, MBasicBlock** joinPred) {
*joinPred = curBlock_;
if (inDeadCode()) {
return;
}
pushDefs(defs);
pushDef(def);
}
public:
@ -1241,15 +1241,15 @@ class FunctionCompiler {
}
bool switchToElse(MBasicBlock* elseBlock, MBasicBlock** thenJoinPred) {
DefVector values;
if (!finishBlock(&values)) {
MDefinition* ifDef;
if (!finishBlock(&ifDef)) {
return false;
}
if (!elseBlock) {
*thenJoinPred = nullptr;
} else {
addJoinPredecessor(values, thenJoinPred);
addJoinPredecessor(ifDef, thenJoinPred);
curBlock_ = elseBlock;
mirGraph().moveBlockToEnd(curBlock_);
@ -1258,44 +1258,47 @@ class FunctionCompiler {
return startBlock();
}
bool joinIfElse(MBasicBlock* thenJoinPred, DefVector* defs) {
DefVector values;
if (!finishBlock(&values)) {
bool joinIfElse(MBasicBlock* thenJoinPred, MDefinition** def) {
MDefinition* elseDef;
if (!finishBlock(&elseDef)) {
return false;
}
if (!thenJoinPred && inDeadCode()) {
return true;
}
*def = nullptr;
} else {
MBasicBlock* elseJoinPred;
addJoinPredecessor(elseDef, &elseJoinPred);
MBasicBlock* elseJoinPred;
addJoinPredecessor(values, &elseJoinPred);
mozilla::Array<MBasicBlock*, 2> blocks;
size_t numJoinPreds = 0;
if (thenJoinPred) {
blocks[numJoinPreds++] = thenJoinPred;
}
if (elseJoinPred) {
blocks[numJoinPreds++] = elseJoinPred;
}
mozilla::Array<MBasicBlock*, 2> blocks;
size_t numJoinPreds = 0;
if (thenJoinPred) {
blocks[numJoinPreds++] = thenJoinPred;
}
if (elseJoinPred) {
blocks[numJoinPreds++] = elseJoinPred;
}
if (numJoinPreds == 0) {
*def = nullptr;
return true;
}
if (numJoinPreds == 0) {
return true;
}
MBasicBlock* join;
if (!goToNewBlock(blocks[0], &join)) {
return false;
}
for (size_t i = 1; i < numJoinPreds; ++i) {
if (!goToExistingBlock(blocks[i], join)) {
MBasicBlock* join;
if (!goToNewBlock(blocks[0], &join)) {
return false;
}
for (size_t i = 1; i < numJoinPreds; ++i) {
if (!goToExistingBlock(blocks[i], join)) {
return false;
}
}
curBlock_ = join;
*def = popDefIfPushed();
}
curBlock_ = join;
return popPushedDefs(defs);
return true;
}
bool startBlock() {
@ -1305,10 +1308,10 @@ class FunctionCompiler {
return true;
}
bool finishBlock(DefVector* defs) {
bool finishBlock(MDefinition** def) {
MOZ_ASSERT(blockDepth_);
uint32_t topLabel = --blockDepth_;
return bindBranches(topLabel, defs);
return bindBranches(topLabel, def);
}
bool startLoop(MBasicBlock** loopHeader) {
@ -1398,7 +1401,7 @@ class FunctionCompiler {
}
public:
bool closeLoop(MBasicBlock* loopHeader, DefVector* loopResults) {
bool closeLoop(MBasicBlock* loopHeader, MDefinition** loopResult) {
MOZ_ASSERT(blockDepth_ >= 1);
MOZ_ASSERT(loopDepth_);
@ -1410,6 +1413,7 @@ class FunctionCompiler {
blockPatches_[headerLabel].empty());
blockDepth_--;
loopDepth_--;
*loopResult = nullptr;
return true;
}
@ -1424,7 +1428,7 @@ class FunctionCompiler {
// branches as forward jumps to a single backward jump. This is
// unfortunate but the optimizer is able to fold these into single jumps
// to backedges.
DefVector _;
MDefinition* _;
if (!bindBranches(headerLabel, &_)) {
return false;
}
@ -1433,7 +1437,7 @@ class FunctionCompiler {
if (curBlock_) {
// We're on the loop backedge block, created by bindBranches.
for (size_t i = 0, n = numPushed(curBlock_); i != n; i++) {
if (hasPushed(curBlock_)) {
curBlock_->pop();
}
@ -1458,7 +1462,8 @@ class FunctionCompiler {
}
blockDepth_ -= 1;
return inDeadCode() || popPushedDefs(loopResults);
*loopResult = inDeadCode() ? nullptr : popDefIfPushed();
return true;
}
bool addControlFlowPatch(MControlInstruction* ins, uint32_t relative,
@ -1474,7 +1479,7 @@ class FunctionCompiler {
return blockPatches_[absolute].append(ControlFlowPatch(ins, index));
}
bool br(uint32_t relativeDepth, const DefVector& values) {
bool br(uint32_t relativeDepth, MDefinition* maybeValue) {
if (inDeadCode()) {
return true;
}
@ -1484,14 +1489,14 @@ class FunctionCompiler {
return false;
}
pushDefs(values);
pushDef(maybeValue);
curBlock_->end(jump);
curBlock_ = nullptr;
return true;
}
bool brIf(uint32_t relativeDepth, const DefVector& values,
bool brIf(uint32_t relativeDepth, MDefinition* maybeValue,
MDefinition* condition) {
if (inDeadCode()) {
return true;
@ -1507,7 +1512,7 @@ class FunctionCompiler {
return false;
}
pushDefs(values);
pushDef(maybeValue);
curBlock_->end(test);
curBlock_ = joinBlock;
@ -1515,7 +1520,7 @@ class FunctionCompiler {
}
bool brTable(MDefinition* operand, uint32_t defaultDepth,
const Uint32Vector& depths, const DefVector& values) {
const Uint32Vector& depths, MDefinition* maybeValue) {
if (inDeadCode()) {
return true;
}
@ -1568,7 +1573,7 @@ class FunctionCompiler {
}
}
pushDefs(values);
pushDef(maybeValue);
curBlock_->end(table);
curBlock_ = nullptr;
@ -1616,9 +1621,10 @@ class FunctionCompiler {
return next->addPredecessor(alloc(), prev);
}
bool bindBranches(uint32_t absolute, DefVector* defs) {
bool bindBranches(uint32_t absolute, MDefinition** def) {
if (absolute >= blockPatches_.length() || blockPatches_[absolute].empty()) {
return inDeadCode() || popPushedDefs(defs);
*def = inDeadCode() ? nullptr : popDefIfPushed();
return true;
}
ControlFlowPatchVector& patches = blockPatches_[absolute];
@ -1658,9 +1664,7 @@ class FunctionCompiler {
curBlock_ = join;
if (!popPushedDefs(defs)) {
return false;
}
*def = popDefIfPushed();
patches.clear();
return true;
@ -1785,13 +1789,15 @@ static bool EmitIf(FunctionCompiler& f) {
}
static bool EmitElse(FunctionCompiler& f) {
ResultType thenType;
DefVector thenValues;
if (!f.iter().readElse(&thenType, &thenValues)) {
ExprType thenType;
MDefinition* thenValue;
if (!f.iter().readElse(&thenType, &thenValue)) {
return false;
}
f.pushDefs(thenValues);
if (!IsVoid(thenType)) {
f.pushDef(thenValue);
}
if (!f.switchToElse(f.iter().controlItem(), &f.iter().controlItem())) {
return false;
@ -1802,33 +1808,40 @@ static bool EmitElse(FunctionCompiler& f) {
static bool EmitEnd(FunctionCompiler& f) {
LabelKind kind;
ResultType type;
DefVector preJoinDefs;
if (!f.iter().readEnd(&kind, &type, &preJoinDefs)) {
ExprType type;
MDefinition* value;
if (!f.iter().readEnd(&kind, &type, &value)) {
return false;
}
MBasicBlock* block = f.iter().controlItem();
f.iter().popEnd();
f.pushDefs(preJoinDefs);
if (!IsVoid(type)) {
f.pushDef(value);
}
DefVector postJoinDefs;
MDefinition* def = nullptr;
switch (kind) {
case LabelKind::Body:
MOZ_ASSERT(f.iter().controlStackEmpty());
if (!f.finishBlock(&postJoinDefs)) {
if (!f.finishBlock(&def)) {
return false;
}
f.returnValues(postJoinDefs);
if (f.inDeadCode() || IsVoid(type)) {
f.returnVoid();
} else {
f.returnExpr(def);
}
return f.iter().readFunctionEnd(f.iter().end());
case LabelKind::Block:
if (!f.finishBlock(&postJoinDefs)) {
if (!f.finishBlock(&def)) {
return false;
}
break;
case LabelKind::Loop:
if (!f.closeLoop(block, &postJoinDefs)) {
if (!f.closeLoop(block, &def)) {
return false;
}
break;
@ -1839,54 +1852,76 @@ static bool EmitEnd(FunctionCompiler& f) {
return false;
}
if (!f.joinIfElse(block, &postJoinDefs)) {
if (!f.joinIfElse(block, &def)) {
return false;
}
break;
case LabelKind::Else:
if (!f.joinIfElse(block, &postJoinDefs)) {
if (!f.joinIfElse(block, &def)) {
return false;
}
break;
}
MOZ_ASSERT_IF(!f.inDeadCode(), postJoinDefs.length() == type.length());
f.iter().setResults(postJoinDefs.length(), postJoinDefs);
if (!IsVoid(type)) {
MOZ_ASSERT_IF(!f.inDeadCode(), def);
f.iter().setResult(def);
}
return true;
}
static bool EmitBr(FunctionCompiler& f) {
uint32_t relativeDepth;
ResultType type;
DefVector values;
if (!f.iter().readBr(&relativeDepth, &type, &values)) {
ExprType type;
MDefinition* value;
if (!f.iter().readBr(&relativeDepth, &type, &value)) {
return false;
}
return f.br(relativeDepth, values);
if (IsVoid(type)) {
if (!f.br(relativeDepth, nullptr)) {
return false;
}
} else {
if (!f.br(relativeDepth, value)) {
return false;
}
}
return true;
}
static bool EmitBrIf(FunctionCompiler& f) {
uint32_t relativeDepth;
ResultType type;
DefVector values;
ExprType type;
MDefinition* value;
MDefinition* condition;
if (!f.iter().readBrIf(&relativeDepth, &type, &values, &condition)) {
if (!f.iter().readBrIf(&relativeDepth, &type, &value, &condition)) {
return false;
}
return f.brIf(relativeDepth, values, condition);
if (IsVoid(type)) {
if (!f.brIf(relativeDepth, nullptr, condition)) {
return false;
}
} else {
if (!f.brIf(relativeDepth, value, condition)) {
return false;
}
}
return true;
}
static bool EmitBrTable(FunctionCompiler& f) {
Uint32Vector depths;
uint32_t defaultDepth;
ResultType branchValueType;
DefVector branchValues;
ExprType branchValueType;
MDefinition* branchValue;
MDefinition* index;
if (!f.iter().readBrTable(&depths, &defaultDepth, &branchValueType,
&branchValues, &index)) {
&branchValue, &index)) {
return false;
}
@ -1902,19 +1937,24 @@ static bool EmitBrTable(FunctionCompiler& f) {
}
if (allSameDepth) {
return f.br(defaultDepth, branchValues);
return f.br(defaultDepth, branchValue);
}
return f.brTable(index, defaultDepth, depths, branchValues);
return f.brTable(index, defaultDepth, depths, branchValue);
}
static bool EmitReturn(FunctionCompiler& f) {
DefVector values;
if (!f.iter().readReturn(&values)) {
MDefinition* value;
if (!f.iter().readReturn(&value)) {
return false;
}
f.returnValues(values);
if (f.funcType().results().length() == 0) {
f.returnVoid();
return true;
}
f.returnExpr(value);
return true;
}
@ -1927,6 +1967,8 @@ static bool EmitUnreachable(FunctionCompiler& f) {
return true;
}
typedef IonOpIter::ValueVector DefVector;
static bool EmitCallArgs(FunctionCompiler& f, const FuncType& funcType,
const DefVector& args, CallCompileState* call) {
for (size_t i = 0, n = funcType.args().length(); i < n; ++i) {

File diff suppressed because it is too large Load Diff

View File

@ -6178,23 +6178,12 @@ static bool EncodeExprList(Encoder& e, const AstExprVector& v) {
return true;
}
static bool EncodeBlockType(Encoder& e, AstExprType& t) {
ExprType type = t.type();
static_assert(size_t(TypeCode::Limit) <= UINT8_MAX, "fits");
MOZ_ASSERT(size_t(type.code()) < size_t(TypeCode::Limit));
if (type.isRef()) {
return e.writeFixedU8(uint8_t(ExprType::Ref)) &&
e.writeVarU32(type.refTypeIndex());
}
return e.writeFixedU8(uint8_t(type.code()));
}
static bool EncodeBlock(Encoder& e, AstBlock& b) {
if (!e.writeOp(b.op())) {
return false;
}
if (!EncodeBlockType(e, b.type())) {
if (!e.writeBlockType(b.type().type())) {
return false;
}
@ -6379,7 +6368,7 @@ static bool EncodeIf(Encoder& e, AstIf& i) {
return false;
}
if (!EncodeBlockType(e, i.type())) {
if (!e.writeBlockType(i.type().type())) {
return false;
}

View File

@ -227,6 +227,21 @@ uint8_t* FuncType::serialize(uint8_t* cursor) const {
return cursor;
}
namespace js {
namespace wasm {
// ExprType is not POD while ReadScalar requires POD, so specialize.
template <>
inline const uint8_t* ReadScalar<ExprType>(const uint8_t* src, ExprType* dst) {
static_assert(sizeof(PackedTypeCode) == sizeof(ExprType),
"ExprType must carry only a PackedTypeCode");
memcpy(dst->packedPtr(), src, sizeof(PackedTypeCode));
return src + sizeof(*dst);
}
} // namespace wasm
} // namespace js
const uint8_t* FuncType::deserialize(const uint8_t* cursor) {
cursor = DeserializePodVector(cursor, &results_);
if (!cursor) {

View File

@ -213,28 +213,26 @@ static_assert(std::is_pod<PackedTypeCode>::value,
"must be POD to be simply serialized/deserialized");
const uint32_t NoTypeCode = 0xFF; // Only use these
const uint32_t NoRefTypeIndex = 0x3FFFFF; // with PackedTypeCode
const uint32_t NoRefTypeIndex = 0xFFFFFF; // with PackedTypeCode
static inline PackedTypeCode InvalidPackedTypeCode() {
return PackedTypeCode((NoRefTypeIndex << 8) | NoTypeCode);
}
static inline PackedTypeCode PackTypeCode(TypeCode tc) {
MOZ_ASSERT(uint32_t(tc) <= 0xFF);
MOZ_ASSERT(tc != TypeCode::Ref);
return PackedTypeCode((NoRefTypeIndex << 8) | uint32_t(tc));
}
static inline PackedTypeCode PackTypeCode(TypeCode tc, uint32_t refTypeIndex) {
MOZ_ASSERT(uint32_t(tc) <= 0xFF);
MOZ_ASSERT_IF(tc != TypeCode::Ref, refTypeIndex == NoRefTypeIndex);
MOZ_ASSERT_IF(tc == TypeCode::Ref, refTypeIndex <= MaxTypes);
// A PackedTypeCode should be representable in a single word, so in the
// smallest case, 32 bits. However sometimes 2 bits of the word may be taken
// by a pointer tag; for that reason, limit to 30 bits; and then there's the
// 8-bit typecode, so 22 bits left for the type index.
static_assert(MaxTypes < (1 << (30 - 8)), "enough bits");
static_assert(MaxTypes < (1 << (32 - 8)), "enough bits");
return PackedTypeCode((refTypeIndex << 8) | uint32_t(tc));
}
static inline PackedTypeCode PackTypeCode(TypeCode tc) {
return PackTypeCode(tc, NoRefTypeIndex);
}
static inline PackedTypeCode InvalidPackedTypeCode() {
return PackedTypeCode(NoTypeCode);
}
static inline PackedTypeCode PackedTypeCodeFromBits(uint32_t bits) {
return PackTypeCode(TypeCode(bits & 255), bits >> 8);
}

View File

@ -444,24 +444,15 @@ bool wasm::DecodeValidatedLocalEntries(Decoder& d, ValTypeVector* locals) {
// Function body validation.
class NothingVector {
Nothing unused_;
public:
bool resize(size_t length) { return true; }
Nothing& operator[](size_t) { return unused_; }
Nothing& back() { return unused_; }
};
struct ValidatingPolicy {
typedef Nothing Value;
typedef NothingVector ValueVector;
typedef Nothing ControlItem;
};
typedef OpIter<ValidatingPolicy> ValidatingOpIter;
static bool DecodeFunctionBodyExprs(const ModuleEnvironment& env,
// FIXME(1401675): Replace with BlockType.
uint32_t funcIndex,
const ValTypeVector& locals,
const uint8_t* bodyEnd, Decoder* d) {
@ -482,13 +473,12 @@ static bool DecodeFunctionBodyExprs(const ModuleEnvironment& env,
}
Nothing nothing;
NothingVector nothings;
ResultType unusedType;
switch (op.b0) {
case uint16_t(Op::End): {
LabelKind unusedKind;
if (!iter.readEnd(&unusedKind, &unusedType, &nothings)) {
ExprType unusedType;
if (!iter.readEnd(&unusedKind, &unusedType, &nothing)) {
return false;
}
iter.popEnd();
@ -503,12 +493,12 @@ static bool DecodeFunctionBodyExprs(const ModuleEnvironment& env,
CHECK(iter.readDrop());
case uint16_t(Op::Call): {
uint32_t unusedIndex;
NothingVector unusedArgs;
ValidatingOpIter::ValueVector unusedArgs;
CHECK(iter.readCall(&unusedIndex, &unusedArgs));
}
case uint16_t(Op::CallIndirect): {
uint32_t unusedIndex, unusedIndex2;
NothingVector unusedArgs;
ValidatingOpIter::ValueVector unusedArgs;
CHECK(iter.readCallIndirect(&unusedIndex, &unusedIndex2, &nothing,
&unusedArgs));
}
@ -583,8 +573,10 @@ static bool DecodeFunctionBodyExprs(const ModuleEnvironment& env,
CHECK(iter.readLoop());
case uint16_t(Op::If):
CHECK(iter.readIf(&nothing));
case uint16_t(Op::Else):
CHECK(iter.readElse(&unusedType, &nothings));
case uint16_t(Op::Else): {
ExprType type;
CHECK(iter.readElse(&type, &nothing));
}
case uint16_t(Op::I32Clz):
case uint16_t(Op::I32Ctz):
case uint16_t(Op::I32Popcnt):
@ -823,20 +815,23 @@ static bool DecodeFunctionBodyExprs(const ModuleEnvironment& env,
CHECK(iter.readMemorySize());
case uint16_t(Op::Br): {
uint32_t unusedDepth;
CHECK(iter.readBr(&unusedDepth, &unusedType, &nothings));
ExprType unusedType;
CHECK(iter.readBr(&unusedDepth, &unusedType, &nothing));
}
case uint16_t(Op::BrIf): {
uint32_t unusedDepth;
CHECK(iter.readBrIf(&unusedDepth, &unusedType, &nothings, &nothing));
ExprType unusedType;
CHECK(iter.readBrIf(&unusedDepth, &unusedType, &nothing, &nothing));
}
case uint16_t(Op::BrTable): {
Uint32Vector unusedDepths;
uint32_t unusedDefault;
ExprType unusedType;
CHECK(iter.readBrTable(&unusedDepths, &unusedDefault, &unusedType,
&nothings, &nothing));
&nothing, &nothing));
}
case uint16_t(Op::Return):
CHECK(iter.readReturn(&nothings));
CHECK(iter.readReturn(&nothing));
case uint16_t(Op::Unreachable):
CHECK(iter.readUnreachable());
case uint16_t(Op::MiscPrefix): {
@ -963,7 +958,7 @@ static bool DecodeFunctionBodyExprs(const ModuleEnvironment& env,
return iter.unrecognizedOpcode(&op);
}
uint32_t unusedUint;
NothingVector unusedArgs;
ValidatingOpIter::ValueVector unusedArgs;
CHECK(iter.readStructNew(&unusedUint, &unusedArgs));
}
case uint32_t(MiscOp::StructGet): {
@ -1215,8 +1210,10 @@ static bool DecodeFunctionBodyExprs(const ModuleEnvironment& env,
bool wasm::ValidateFunctionBody(const ModuleEnvironment& env,
uint32_t funcIndex, uint32_t bodySize,
Decoder& d) {
const FuncType& funcType = *env.funcTypes[funcIndex];
ValTypeVector locals;
if (!locals.appendAll(env.funcTypes[funcIndex]->args())) {
if (!locals.appendAll(funcType.args())) {
return false;
}

View File

@ -396,6 +396,15 @@ class Encoder {
}
return writeFixedU8(uint8_t(type.code()));
}
MOZ_MUST_USE bool writeBlockType(ExprType type) {
static_assert(size_t(TypeCode::Limit) <= UINT8_MAX, "fits");
MOZ_ASSERT(size_t(type.code()) < size_t(TypeCode::Limit));
if (type.isRef()) {
return writeFixedU8(uint8_t(ExprType::Ref)) &&
writeVarU32(type.refTypeIndex());
}
return writeFixedU8(uint8_t(type.code()));
}
MOZ_MUST_USE bool writeOp(Op op) {
static_assert(size_t(Op::Limit) == 256, "fits");
MOZ_ASSERT(size_t(op) < size_t(Op::Limit));
@ -617,16 +626,6 @@ class Decoder {
const uint8_t* begin() const { return beg_; }
const uint8_t* end() const { return end_; }
// Peek at the next byte, if it exists, without advancing the position.
bool peekByte(uint8_t* byte) {
if (done()) {
return false;
}
*byte = *cur_;
return true;
}
// Fixed-size encoding operations simply copy the literal bytes (without
// attempting to align).
@ -709,6 +708,20 @@ class Decoder {
}
return true;
}
MOZ_MUST_USE bool readBlockType(uint8_t* code, uint32_t* refTypeIndex) {
static_assert(size_t(TypeCode::Limit) <= UINT8_MAX, "fits");
if (!readFixedU8(code)) {
return false;
}
if (*code == uint8_t(TypeCode::Ref)) {
if (!readVarU32(refTypeIndex)) {
return false;
}
} else {
*refTypeIndex = NoRefTypeIndex;
}
return true;
}
MOZ_MUST_USE bool readOp(OpBytes* op) {
static_assert(size_t(Op::Limit) == 256, "fits");
uint8_t u8;