Bug 1427710 part 1 - Make TokenKind an enum class. r=jandem

This commit is contained in:
Rofael Aleezada 2018-01-12 15:16:12 +01:00
parent f1dcc7b8db
commit ccff779855
7 changed files with 642 additions and 618 deletions

File diff suppressed because it is too large Load Diff

View File

@ -1158,7 +1158,7 @@ class GeneralParser
bool checkBindingIdentifier(PropertyName* ident,
uint32_t offset,
YieldHandling yieldHandling,
TokenKind hint = TOK_LIMIT);
TokenKind hint = TokenKind::TOK_LIMIT);
PropertyName* labelOrIdentifierReference(YieldHandling yieldHandling);
@ -1232,7 +1232,8 @@ class GeneralParser
PropertyName* bindingIdentifier(YieldHandling yieldHandling);
bool checkLabelOrIdentifierReference(PropertyName* ident, uint32_t offset,
YieldHandling yieldHandling, TokenKind hint = TOK_LIMIT);
YieldHandling yieldHandling,
TokenKind hint = TokenKind::TOK_LIMIT);
Node statementList(YieldHandling yieldHandling);

View File

@ -10,71 +10,71 @@
#define vm_ReservedWords_h
#define FOR_EACH_JAVASCRIPT_RESERVED_WORD(macro) \
macro(false, false_, TOK_FALSE) \
macro(true, true_, TOK_TRUE) \
macro(null, null, TOK_NULL) \
macro(false, false_, TokenKind::TOK_FALSE) \
macro(true, true_, TokenKind::TOK_TRUE) \
macro(null, null, TokenKind::TOK_NULL) \
\
/* Keywords. */ \
macro(break, break_, TOK_BREAK) \
macro(case, case_, TOK_CASE) \
macro(catch, catch_, TOK_CATCH) \
macro(const, const_, TOK_CONST) \
macro(continue, continue_, TOK_CONTINUE) \
macro(debugger, debugger, TOK_DEBUGGER) \
macro(default, default_, TOK_DEFAULT) \
macro(delete, delete_, TOK_DELETE) \
macro(do, do_, TOK_DO) \
macro(else, else_, TOK_ELSE) \
macro(finally, finally_, TOK_FINALLY) \
macro(for, for_, TOK_FOR) \
macro(function, function, TOK_FUNCTION) \
macro(if, if_, TOK_IF) \
macro(in, in, TOK_IN) \
macro(instanceof, instanceof, TOK_INSTANCEOF) \
macro(new, new_, TOK_NEW) \
macro(return, return_, TOK_RETURN) \
macro(switch, switch_, TOK_SWITCH) \
macro(this, this_, TOK_THIS) \
macro(throw, throw_, TOK_THROW) \
macro(try, try_, TOK_TRY) \
macro(typeof, typeof_, TOK_TYPEOF) \
macro(var, var, TOK_VAR) \
macro(void, void_, TOK_VOID) \
macro(while, while_, TOK_WHILE) \
macro(with, with, TOK_WITH) \
macro(import, import, TOK_IMPORT) \
macro(export, export_, TOK_EXPORT) \
macro(class, class_, TOK_CLASS) \
macro(extends, extends, TOK_EXTENDS) \
macro(super, super, TOK_SUPER) \
macro(break, break_, TokenKind::TOK_BREAK) \
macro(case, case_, TokenKind::TOK_CASE) \
macro(catch, catch_, TokenKind::TOK_CATCH) \
macro(const, const_, TokenKind::TOK_CONST) \
macro(continue, continue_, TokenKind::TOK_CONTINUE) \
macro(debugger, debugger, TokenKind::TOK_DEBUGGER) \
macro(default, default_, TokenKind::TOK_DEFAULT) \
macro(delete, delete_, TokenKind::TOK_DELETE) \
macro(do, do_, TokenKind::TOK_DO) \
macro(else, else_, TokenKind::TOK_ELSE) \
macro(finally, finally_, TokenKind::TOK_FINALLY) \
macro(for, for_, TokenKind::TOK_FOR) \
macro(function, function, TokenKind::TOK_FUNCTION) \
macro(if, if_, TokenKind::TOK_IF) \
macro(in, in, TokenKind::TOK_IN) \
macro(instanceof, instanceof, TokenKind::TOK_INSTANCEOF) \
macro(new, new_, TokenKind::TOK_NEW) \
macro(return, return_, TokenKind::TOK_RETURN) \
macro(switch, switch_, TokenKind::TOK_SWITCH) \
macro(this, this_, TokenKind::TOK_THIS) \
macro(throw, throw_, TokenKind::TOK_THROW) \
macro(try, try_, TokenKind::TOK_TRY) \
macro(typeof, typeof_, TokenKind::TOK_TYPEOF) \
macro(var, var, TokenKind::TOK_VAR) \
macro(void, void_, TokenKind::TOK_VOID) \
macro(while, while_, TokenKind::TOK_WHILE) \
macro(with, with, TokenKind::TOK_WITH) \
macro(import, import, TokenKind::TOK_IMPORT) \
macro(export, export_, TokenKind::TOK_EXPORT) \
macro(class, class_, TokenKind::TOK_CLASS) \
macro(extends, extends, TokenKind::TOK_EXTENDS) \
macro(super, super, TokenKind::TOK_SUPER) \
\
/* Future reserved words. */ \
macro(enum, enum_, TOK_ENUM) \
macro(enum, enum_, TokenKind::TOK_ENUM) \
\
/* Future reserved words, but only in strict mode. */ \
macro(implements, implements, TOK_IMPLEMENTS) \
macro(interface, interface, TOK_INTERFACE) \
macro(package, package, TOK_PACKAGE) \
macro(private, private_, TOK_PRIVATE) \
macro(protected, protected_, TOK_PROTECTED) \
macro(public, public_, TOK_PUBLIC) \
macro(implements, implements, TokenKind::TOK_IMPLEMENTS) \
macro(interface, interface, TokenKind::TOK_INTERFACE) \
macro(package, package, TokenKind::TOK_PACKAGE) \
macro(private, private_, TokenKind::TOK_PRIVATE) \
macro(protected, protected_, TokenKind::TOK_PROTECTED) \
macro(public, public_, TokenKind::TOK_PUBLIC) \
\
/* Contextual keywords. */ \
macro(as, as, TOK_AS) \
macro(async, async, TOK_ASYNC) \
macro(await, await, TOK_AWAIT) \
macro(from, from, TOK_FROM) \
macro(get, get, TOK_GET) \
macro(let, let, TOK_LET) \
macro(of, of, TOK_OF) \
macro(set, set, TOK_SET) \
macro(static, static_, TOK_STATIC) \
macro(target, target, TOK_TARGET) \
macro(as, as, TokenKind::TOK_AS) \
macro(async, async, TokenKind::TOK_ASYNC) \
macro(await, await, TokenKind::TOK_AWAIT) \
macro(from, from, TokenKind::TOK_FROM) \
macro(get, get, TokenKind::TOK_GET) \
macro(let, let, TokenKind::TOK_LET) \
macro(of, of, TokenKind::TOK_OF) \
macro(set, set, TokenKind::TOK_SET) \
macro(static, static_, TokenKind::TOK_STATIC) \
macro(target, target, TokenKind::TOK_TARGET) \
/* \
* Yield is a token inside function*. Outside of a function*, it is a \
* future reserved word in strict mode, but a keyword in JS1.7 even \
* when strict. Punt logic to parser. \
*/ \
macro(yield, yield, TOK_YIELD)
macro(yield, yield, TokenKind::TOK_YIELD)
#endif /* vm_ReservedWords_h */

View File

@ -235,7 +235,7 @@ namespace frontend {
// Values of this type are used to index into arrays such as isExprEnding[],
// so the first value must be zero.
enum TokenKind {
enum class TokenKind {
#define EMIT_ENUM(name, desc) TOK_##name,
#define EMIT_ENUM_RANGE(name, value) TOK_##name = TOK_##value,
FOR_EACH_TOKEN_KIND_WITH_RANGE(EMIT_ENUM, EMIT_ENUM_RANGE)
@ -247,63 +247,63 @@ enum TokenKind {
inline bool
TokenKindIsBinaryOp(TokenKind tt)
{
return TOK_BINOP_FIRST <= tt && tt <= TOK_BINOP_LAST;
return TokenKind::TOK_BINOP_FIRST <= tt && tt <= TokenKind::TOK_BINOP_LAST;
}
inline bool
TokenKindIsEquality(TokenKind tt)
{
return TOK_EQUALITY_START <= tt && tt <= TOK_EQUALITY_LAST;
return TokenKind::TOK_EQUALITY_START <= tt && tt <= TokenKind::TOK_EQUALITY_LAST;
}
inline bool
TokenKindIsRelational(TokenKind tt)
{
return TOK_RELOP_START <= tt && tt <= TOK_RELOP_LAST;
return TokenKind::TOK_RELOP_START <= tt && tt <= TokenKind::TOK_RELOP_LAST;
}
inline bool
TokenKindIsShift(TokenKind tt)
{
return TOK_SHIFTOP_START <= tt && tt <= TOK_SHIFTOP_LAST;
return TokenKind::TOK_SHIFTOP_START <= tt && tt <= TokenKind::TOK_SHIFTOP_LAST;
}
inline bool
TokenKindIsAssignment(TokenKind tt)
{
return TOK_ASSIGNMENT_START <= tt && tt <= TOK_ASSIGNMENT_LAST;
return TokenKind::TOK_ASSIGNMENT_START <= tt && tt <= TokenKind::TOK_ASSIGNMENT_LAST;
}
inline MOZ_MUST_USE bool
TokenKindIsKeyword(TokenKind tt)
{
return (TOK_KEYWORD_FIRST <= tt && tt <= TOK_KEYWORD_LAST) ||
(TOK_KEYWORD_BINOP_FIRST <= tt && tt <= TOK_KEYWORD_BINOP_LAST) ||
(TOK_KEYWORD_UNOP_FIRST <= tt && tt <= TOK_KEYWORD_UNOP_LAST);
return (TokenKind::TOK_KEYWORD_FIRST <= tt && tt <= TokenKind::TOK_KEYWORD_LAST) ||
(TokenKind::TOK_KEYWORD_BINOP_FIRST <= tt && tt <= TokenKind::TOK_KEYWORD_BINOP_LAST) ||
(TokenKind::TOK_KEYWORD_UNOP_FIRST <= tt && tt <= TokenKind::TOK_KEYWORD_UNOP_LAST);
}
inline MOZ_MUST_USE bool
TokenKindIsContextualKeyword(TokenKind tt)
{
return TOK_CONTEXTUAL_KEYWORD_FIRST <= tt && tt <= TOK_CONTEXTUAL_KEYWORD_LAST;
return TokenKind::TOK_CONTEXTUAL_KEYWORD_FIRST <= tt && tt <= TokenKind::TOK_CONTEXTUAL_KEYWORD_LAST;
}
inline MOZ_MUST_USE bool
TokenKindIsFutureReservedWord(TokenKind tt)
{
return TOK_FUTURE_RESERVED_KEYWORD_FIRST <= tt && tt <= TOK_FUTURE_RESERVED_KEYWORD_LAST;
return TokenKind::TOK_FUTURE_RESERVED_KEYWORD_FIRST <= tt && tt <= TokenKind::TOK_FUTURE_RESERVED_KEYWORD_LAST;
}
inline MOZ_MUST_USE bool
TokenKindIsStrictReservedWord(TokenKind tt)
{
return TOK_STRICT_RESERVED_KEYWORD_FIRST <= tt && tt <= TOK_STRICT_RESERVED_KEYWORD_LAST;
return TokenKind::TOK_STRICT_RESERVED_KEYWORD_FIRST <= tt && tt <= TokenKind::TOK_STRICT_RESERVED_KEYWORD_LAST;
}
inline MOZ_MUST_USE bool
TokenKindIsReservedWordLiteral(TokenKind tt)
{
return TOK_RESERVED_WORD_LITERAL_FIRST <= tt && tt <= TOK_RESERVED_WORD_LITERAL_LAST;
return TokenKind::TOK_RESERVED_WORD_LITERAL_FIRST <= tt && tt <= TokenKind::TOK_RESERVED_WORD_LITERAL_LAST;
}
inline MOZ_MUST_USE bool
@ -317,7 +317,7 @@ TokenKindIsReservedWord(TokenKind tt)
inline MOZ_MUST_USE bool
TokenKindIsPossibleIdentifier(TokenKind tt)
{
return tt == TOK_NAME ||
return tt == TokenKind::TOK_NAME ||
TokenKindIsContextualKeyword(tt) ||
TokenKindIsStrictReservedWord(tt);
}

View File

@ -210,7 +210,7 @@ ReservedWordTokenKind(PropertyName* str)
if (const ReservedWordInfo* rw = FindReservedWord(str))
return rw->tokentype;
return TOK_NAME;
return TokenKind::TOK_NAME;
}
const char*
@ -225,7 +225,7 @@ ReservedWordToCharZ(PropertyName* str)
const char*
ReservedWordToCharZ(TokenKind tt)
{
MOZ_ASSERT(tt != TOK_NAME);
MOZ_ASSERT(tt != TokenKind::TOK_NAME);
switch (tt) {
#define EMIT_CASE(word, name, type) case type: return js_##word##_str;
FOR_EACH_JAVASCRIPT_RESERVED_WORD(EMIT_CASE)
@ -239,7 +239,7 @@ ReservedWordToCharZ(TokenKind tt)
PropertyName*
TokenStreamAnyChars::reservedWordToPropertyName(TokenKind tt) const
{
MOZ_ASSERT(tt != TOK_NAME);
MOZ_ASSERT(tt != TokenKind::TOK_NAME);
switch (tt) {
#define EMIT_CASE(word, name, type) case type: return cx->names().name;
FOR_EACH_JAVASCRIPT_RESERVED_WORD(EMIT_CASE)
@ -426,12 +426,12 @@ TokenStreamAnyChars::TokenStreamAnyChars(JSContext* cx, const ReadOnlyCompileOpt
// See Parser::assignExpr() for an explanation of isExprEnding[].
PodArrayZero(isExprEnding);
isExprEnding[TOK_COMMA] = 1;
isExprEnding[TOK_SEMI] = 1;
isExprEnding[TOK_COLON] = 1;
isExprEnding[TOK_RP] = 1;
isExprEnding[TOK_RB] = 1;
isExprEnding[TOK_RC] = 1;
isExprEnding[size_t(TokenKind::TOK_COMMA)] = 1;
isExprEnding[size_t(TokenKind::TOK_SEMI)] = 1;
isExprEnding[size_t(TokenKind::TOK_COLON)] = 1;
isExprEnding[size_t(TokenKind::TOK_RP)] = 1;
isExprEnding[size_t(TokenKind::TOK_RB)] = 1;
isExprEnding[size_t(TokenKind::TOK_RC)] = 1;
}
template<typename CharT>
@ -796,7 +796,7 @@ TokenStreamAnyChars::fillExcludingContext(ErrorMetadata* err, uint32_t offset)
bool
TokenStreamAnyChars::hasTokenizationStarted() const
{
return isCurrentTokenType(TOK_EOF) && !isEOF();
return isCurrentTokenType(TokenKind::TOK_EOF) && !isEOF();
}
void
@ -1252,9 +1252,9 @@ TokenStreamSpecific<CharT, AnyCharsAccess>::newToken(ptrdiff_t adjust)
static bool
IsTokenSane(Token* tp)
{
// Nb: TOK_EOL should never be used in an actual Token; it should only be
// returned as a TokenKind from peekTokenSameLine().
if (tp->type >= TOK_LIMIT || tp->type == TOK_EOL)
// Nb: TokenKind::TOK_EOL should never be used in an actual Token;
// it should only be returned as a TokenKind from peekTokenSameLine().
if (tp->type >= TokenKind::TOK_LIMIT || tp->type == TokenKind::TOK_EOL)
return false;
if (tp->pos.end < tp->pos.begin)
@ -1356,13 +1356,13 @@ enum FirstCharKind {
// of the tokens seen in practice.
//
// We represent the 'OneChar' kind with any positive value less than
// TOK_LIMIT. This representation lets us associate each one-char token
// char16_t with a TokenKind and thus avoid a subsequent char16_t-to-TokenKind
// conversion.
// TokenKind::TOK_LIMIT. This representation lets us associate
// each one-char token char16_t with a TokenKind and thus avoid
// a subsequent char16_t-to-TokenKind conversion.
OneChar_Min = 0,
OneChar_Max = TOK_LIMIT - 1,
OneChar_Max = size_t(TokenKind::TOK_LIMIT) - 1,
Space = TOK_LIMIT,
Space = size_t(TokenKind::TOK_LIMIT),
Ident,
Dec,
String,
@ -1385,9 +1385,17 @@ enum FirstCharKind {
// Space: 9, 11, 12, 32: '\t', '\v', '\f', ' '
// EOL: 10, 13: '\n', '\r'
//
#define T_COMMA TOK_COMMA
#define T_COLON TOK_COLON
#define T_BITNOT TOK_BITNOT
#define T_COMMA size_t(TokenKind::TOK_COMMA)
#define T_COLON size_t(TokenKind::TOK_COLON)
#define T_BITNOT size_t(TokenKind::TOK_BITNOT)
#define T_LP size_t(TokenKind::TOK_LP)
#define T_RP size_t(TokenKind::TOK_RP)
#define T_SEMI size_t(TokenKind::TOK_SEMI)
#define T_HOOK size_t(TokenKind::TOK_HOOK)
#define T_LB size_t(TokenKind::TOK_LB)
#define T_RB size_t(TokenKind::TOK_RB)
#define T_LC size_t(TokenKind::TOK_LC)
#define T_RC size_t(TokenKind::TOK_RC)
#define Templat String
#define _______ Other
static const uint8_t firstCharKinds[] = {
@ -1396,19 +1404,27 @@ static const uint8_t firstCharKinds[] = {
/* 10+ */ EOL, Space, Space, EOL, _______, _______, _______, _______, _______, _______,
/* 20+ */ _______, _______, _______, _______, _______, _______, _______, _______, _______, _______,
/* 30+ */ _______, _______, Space, _______, String, _______, Ident, _______, _______, String,
/* 40+ */ TOK_LP, TOK_RP, _______, _______, T_COMMA,_______, _______, _______,BasePrefix, Dec,
/* 50+ */ Dec, Dec, Dec, Dec, Dec, Dec, Dec, Dec, T_COLON,TOK_SEMI,
/* 60+ */ _______, _______, _______,TOK_HOOK, _______, Ident, Ident, Ident, Ident, Ident,
/* 40+ */ T_LP, T_RP, _______, _______, T_COMMA,_______, _______, _______,BasePrefix, Dec,
/* 50+ */ Dec, Dec, Dec, Dec, Dec, Dec, Dec, Dec, T_COLON, T_SEMI,
/* 60+ */ _______, _______, _______, T_HOOK, _______, Ident, Ident, Ident, Ident, Ident,
/* 70+ */ Ident, Ident, Ident, Ident, Ident, Ident, Ident, Ident, Ident, Ident,
/* 80+ */ Ident, Ident, Ident, Ident, Ident, Ident, Ident, Ident, Ident, Ident,
/* 90+ */ Ident, TOK_LB, _______, TOK_RB, _______, Ident, Templat, Ident, Ident, Ident,
/* 90+ */ Ident, T_LB, _______, T_RB, _______, Ident, Templat, Ident, Ident, Ident,
/* 100+ */ Ident, Ident, Ident, Ident, Ident, Ident, Ident, Ident, Ident, Ident,
/* 110+ */ Ident, Ident, Ident, Ident, Ident, Ident, Ident, Ident, Ident, Ident,
/* 120+ */ Ident, Ident, Ident, TOK_LC, _______, TOK_RC,T_BITNOT, _______
/* 120+ */ Ident, Ident, Ident, T_LC, _______, T_RC,T_BITNOT, _______
};
#undef T_COMMA
#undef T_COLON
#undef T_BITNOT
#undef T_LP
#undef T_RP
#undef T_SEMI
#undef T_HOOK
#undef T_LB
#undef T_RB
#undef T_LC
#undef T_RC
#undef Templat
#undef _______
@ -1440,7 +1456,7 @@ TokenStreamSpecific<CharT, AnyCharsAccess>::getTokenInternal(TokenKind* ttp, Mod
retry:
if (MOZ_UNLIKELY(!userbuf.hasRawChars())) {
tp = newToken(0);
tp->type = TOK_EOF;
tp->type = TokenKind::TOK_EOF;
anyCharsAccess().flags.isEOF = true;
goto out;
}
@ -1580,7 +1596,7 @@ TokenStreamSpecific<CharT, AnyCharsAccess>::getTokenInternal(TokenKind* ttp, Mod
JSAtom* atom = atomizeChars(anyCharsAccess().cx, chars, length);
if (!atom)
goto error;
tp->type = TOK_NAME;
tp->type = TokenKind::TOK_NAME;
tp->setName(atom->asPropertyName());
goto out;
}
@ -1653,7 +1669,7 @@ TokenStreamSpecific<CharT, AnyCharsAccess>::getTokenInternal(TokenKind* ttp, Mod
goto error;
}
}
tp->type = TOK_NUMBER;
tp->type = TokenKind::TOK_NUMBER;
tp->setNumber(dval, decimalPoint);
goto out;
}
@ -1768,7 +1784,7 @@ TokenStreamSpecific<CharT, AnyCharsAccess>::getTokenInternal(TokenKind* ttp, Mod
goto error;
}
tp->type = TOK_NUMBER;
tp->type = TokenKind::TOK_NUMBER;
tp->setNumber(dval, NoDecimal);
goto out;
}
@ -1788,28 +1804,28 @@ TokenStreamSpecific<CharT, AnyCharsAccess>::getTokenInternal(TokenKind* ttp, Mod
}
if (c == '.') {
if (matchChar('.')) {
tp->type = TOK_TRIPLEDOT;
tp->type = TokenKind::TOK_TRIPLEDOT;
goto out;
}
}
ungetCharIgnoreEOL(c);
tp->type = TOK_DOT;
tp->type = TokenKind::TOK_DOT;
goto out;
case '=':
if (matchChar('='))
tp->type = matchChar('=') ? TOK_STRICTEQ : TOK_EQ;
tp->type = matchChar('=') ? TokenKind::TOK_STRICTEQ : TokenKind::TOK_EQ;
else if (matchChar('>'))
tp->type = TOK_ARROW;
tp->type = TokenKind::TOK_ARROW;
else
tp->type = TOK_ASSIGN;
tp->type = TokenKind::TOK_ASSIGN;
goto out;
case '+':
if (matchChar('+'))
tp->type = TOK_INC;
tp->type = TokenKind::TOK_INC;
else
tp->type = matchChar('=') ? TOK_ADDASSIGN : TOK_ADD;
tp->type = matchChar('=') ? TokenKind::TOK_ADDASSIGN : TokenKind::TOK_ADD;
goto out;
case '\\': {
@ -1824,31 +1840,31 @@ TokenStreamSpecific<CharT, AnyCharsAccess>::getTokenInternal(TokenKind* ttp, Mod
case '|':
if (matchChar('|'))
tp->type = TOK_OR;
tp->type = TokenKind::TOK_OR;
#ifdef ENABLE_PIPELINE_OPERATOR
else if (matchChar('>'))
tp->type = TOK_PIPELINE;
tp->type = TokenKind::TOK_PIPELINE;
#endif
else
tp->type = matchChar('=') ? TOK_BITORASSIGN : TOK_BITOR;
tp->type = matchChar('=') ? TokenKind::TOK_BITORASSIGN : TokenKind::TOK_BITOR;
goto out;
case '^':
tp->type = matchChar('=') ? TOK_BITXORASSIGN : TOK_BITXOR;
tp->type = matchChar('=') ? TokenKind::TOK_BITXORASSIGN : TokenKind::TOK_BITXOR;
goto out;
case '&':
if (matchChar('&'))
tp->type = TOK_AND;
tp->type = TokenKind::TOK_AND;
else
tp->type = matchChar('=') ? TOK_BITANDASSIGN : TOK_BITAND;
tp->type = matchChar('=') ? TokenKind::TOK_BITANDASSIGN : TokenKind::TOK_BITAND;
goto out;
case '!':
if (matchChar('='))
tp->type = matchChar('=') ? TOK_STRICTNE : TOK_NE;
tp->type = matchChar('=') ? TokenKind::TOK_STRICTNE : TokenKind::TOK_NE;
else
tp->type = TOK_NOT;
tp->type = TokenKind::TOK_NOT;
goto out;
case '<':
@ -1864,28 +1880,28 @@ TokenStreamSpecific<CharT, AnyCharsAccess>::getTokenInternal(TokenKind* ttp, Mod
}
}
if (matchChar('<')) {
tp->type = matchChar('=') ? TOK_LSHASSIGN : TOK_LSH;
tp->type = matchChar('=') ? TokenKind::TOK_LSHASSIGN : TokenKind::TOK_LSH;
} else {
tp->type = matchChar('=') ? TOK_LE : TOK_LT;
tp->type = matchChar('=') ? TokenKind::TOK_LE : TokenKind::TOK_LT;
}
goto out;
case '>':
if (matchChar('>')) {
if (matchChar('>'))
tp->type = matchChar('=') ? TOK_URSHASSIGN : TOK_URSH;
tp->type = matchChar('=') ? TokenKind::TOK_URSHASSIGN : TokenKind::TOK_URSH;
else
tp->type = matchChar('=') ? TOK_RSHASSIGN : TOK_RSH;
tp->type = matchChar('=') ? TokenKind::TOK_RSHASSIGN : TokenKind::TOK_RSH;
} else {
tp->type = matchChar('=') ? TOK_GE : TOK_GT;
tp->type = matchChar('=') ? TokenKind::TOK_GE : TokenKind::TOK_GT;
}
goto out;
case '*':
if (matchChar('*'))
tp->type = matchChar('=') ? TOK_POWASSIGN : TOK_POW;
tp->type = matchChar('=') ? TokenKind::TOK_POWASSIGN : TokenKind::TOK_POW;
else
tp->type = matchChar('=') ? TOK_MULASSIGN : TOK_MUL;
tp->type = matchChar('=') ? TokenKind::TOK_MULASSIGN : TokenKind::TOK_MUL;
goto out;
case '/':
@ -2005,16 +2021,16 @@ TokenStreamSpecific<CharT, AnyCharsAccess>::getTokenInternal(TokenKind* ttp, Mod
consumeKnownChar(c);
goto error;
}
tp->type = TOK_REGEXP;
tp->type = TokenKind::TOK_REGEXP;
tp->setRegExpFlags(reflags);
goto out;
}
tp->type = matchChar('=') ? TOK_DIVASSIGN : TOK_DIV;
tp->type = matchChar('=') ? TokenKind::TOK_DIVASSIGN : TokenKind::TOK_DIV;
goto out;
case '%':
tp->type = matchChar('=') ? TOK_MODASSIGN : TOK_MOD;
tp->type = matchChar('=') ? TokenKind::TOK_MODASSIGN : TokenKind::TOK_MOD;
goto out;
case '-':
@ -2030,9 +2046,9 @@ TokenStreamSpecific<CharT, AnyCharsAccess>::getTokenInternal(TokenKind* ttp, Mod
goto skipline;
}
tp->type = TOK_DEC;
tp->type = TokenKind::TOK_DEC;
} else {
tp->type = matchChar('=') ? TOK_SUBASSIGN : TOK_SUB;
tp->type = matchChar('=') ? TokenKind::TOK_SUBASSIGN : TokenKind::TOK_SUB;
}
goto out;
@ -2320,12 +2336,12 @@ TokenStreamSpecific<CharT, AnyCharsAccess>::getStringOrTemplateToken(int untilCh
return false;
if (!parsingTemplate) {
(*tp)->type = TOK_STRING;
(*tp)->type = TokenKind::TOK_STRING;
} else {
if (c == '$' && nc == '{')
(*tp)->type = TOK_TEMPLATE_HEAD;
(*tp)->type = TokenKind::TOK_TEMPLATE_HEAD;
else
(*tp)->type = TOK_NO_SUBS_TEMPLATE;
(*tp)->type = TokenKind::TOK_NO_SUBS_TEMPLATE;
}
(*tp)->setAtom(atom);
@ -2336,11 +2352,11 @@ const char*
TokenKindToDesc(TokenKind tt)
{
switch (tt) {
#define EMIT_CASE(name, desc) case TOK_##name: return desc;
#define EMIT_CASE(name, desc) case TokenKind::TOK_##name: return desc;
FOR_EACH_TOKEN_KIND(EMIT_CASE)
#undef EMIT_CASE
case TOK_LIMIT:
MOZ_ASSERT_UNREACHABLE("TOK_LIMIT should not be passed.");
case TokenKind::TOK_LIMIT:
MOZ_ASSERT_UNREACHABLE("TokenKind::TOK_LIMIT should not be passed.");
break;
}
@ -2352,10 +2368,10 @@ const char*
TokenKindToString(TokenKind tt)
{
switch (tt) {
#define EMIT_CASE(name, desc) case TOK_##name: return "TOK_" #name;
#define EMIT_CASE(name, desc) case TokenKind::TOK_##name: return "TokenKind::TOK_" #name;
FOR_EACH_TOKEN_KIND(EMIT_CASE)
#undef EMIT_CASE
case TOK_LIMIT: break;
case TokenKind::TOK_LIMIT: break;
}
return "<bad TokenKind>";

View File

@ -313,25 +313,25 @@ struct Token
// Mutators
void setName(PropertyName* name) {
MOZ_ASSERT(type == TOK_NAME);
MOZ_ASSERT(type == TokenKind::TOK_NAME);
u.name = name;
}
void setAtom(JSAtom* atom) {
MOZ_ASSERT(type == TOK_STRING ||
type == TOK_TEMPLATE_HEAD ||
type == TOK_NO_SUBS_TEMPLATE);
MOZ_ASSERT(type == TokenKind::TOK_STRING ||
type == TokenKind::TOK_TEMPLATE_HEAD ||
type == TokenKind::TOK_NO_SUBS_TEMPLATE);
u.atom = atom;
}
void setRegExpFlags(RegExpFlag flags) {
MOZ_ASSERT(type == TOK_REGEXP);
MOZ_ASSERT(type == TokenKind::TOK_REGEXP);
MOZ_ASSERT((flags & AllFlags) == flags);
u.reflags = flags;
}
void setNumber(double n, DecimalPoint decimalPoint) {
MOZ_ASSERT(type == TOK_NUMBER);
MOZ_ASSERT(type == TokenKind::TOK_NUMBER);
u.number.value = n;
u.number.decimalPoint = decimalPoint;
}
@ -339,30 +339,30 @@ struct Token
// Type-safe accessors
PropertyName* name() const {
MOZ_ASSERT(type == TOK_NAME);
MOZ_ASSERT(type == TokenKind::TOK_NAME);
return u.name->JSAtom::asPropertyName(); // poor-man's type verification
}
JSAtom* atom() const {
MOZ_ASSERT(type == TOK_STRING ||
type == TOK_TEMPLATE_HEAD ||
type == TOK_NO_SUBS_TEMPLATE);
MOZ_ASSERT(type == TokenKind::TOK_STRING ||
type == TokenKind::TOK_TEMPLATE_HEAD ||
type == TokenKind::TOK_NO_SUBS_TEMPLATE);
return u.atom;
}
RegExpFlag regExpFlags() const {
MOZ_ASSERT(type == TOK_REGEXP);
MOZ_ASSERT(type == TokenKind::TOK_REGEXP);
MOZ_ASSERT((u.reflags & AllFlags) == u.reflags);
return u.reflags;
}
double number() const {
MOZ_ASSERT(type == TOK_NUMBER);
MOZ_ASSERT(type == TokenKind::TOK_NUMBER);
return u.number.value;
}
DecimalPoint decimalPoint() const {
MOZ_ASSERT(type == TOK_NUMBER);
MOZ_ASSERT(type == TokenKind::TOK_NUMBER);
return u.number.decimalPoint;
}
};
@ -490,7 +490,7 @@ class TokenStreamAnyChars
public:
PropertyName* currentName() const {
if (isCurrentTokenType(TOK_NAME))
if (isCurrentTokenType(TokenKind::TOK_NAME))
return currentToken().name();
MOZ_ASSERT(TokenKindIsPossibleIdentifierName(currentToken().type));
@ -498,7 +498,7 @@ class TokenStreamAnyChars
}
bool currentNameHasEscapes() const {
if (isCurrentTokenType(TOK_NAME)) {
if (isCurrentTokenType(TokenKind::TOK_NAME)) {
TokenPos pos = currentToken().pos;
return (pos.end - pos.begin) != currentToken().name()->length();
}
@ -508,7 +508,7 @@ class TokenStreamAnyChars
}
PropertyName* nextName() const {
if (nextToken().type != TOK_NAME)
if (nextToken().type != TokenKind::TOK_NAME)
return nextToken().name();
MOZ_ASSERT(TokenKindIsPossibleIdentifierName(nextToken().type));
@ -565,7 +565,7 @@ class TokenStreamAnyChars
// Token after yield expression without operand already has
// NoneIsOperand exception.
MOZ_ASSERT(modifierException == OperandIsNone);
MOZ_ASSERT(next.type != TOK_DIV,
MOZ_ASSERT(next.type != TokenKind::TOK_DIV,
"next token requires contextual specifier to be parsed unambiguously");
// Do not update modifierException.
@ -576,12 +576,12 @@ class TokenStreamAnyChars
switch (modifierException) {
case NoneIsOperand:
MOZ_ASSERT(next.modifier == Operand);
MOZ_ASSERT(next.type != TOK_DIV,
MOZ_ASSERT(next.type != TokenKind::TOK_DIV,
"next token requires contextual specifier to be parsed unambiguously");
break;
case OperandIsNone:
MOZ_ASSERT(next.modifier == None);
MOZ_ASSERT(next.type != TOK_DIV && next.type != TOK_REGEXP,
MOZ_ASSERT(next.type != TokenKind::TOK_DIV && next.type != TokenKind::TOK_REGEXP,
"next token requires contextual specifier to be parsed unambiguously");
break;
default:
@ -770,7 +770,7 @@ class TokenStreamAnyChars
const char* filename_; // input filename or null
UniqueTwoByteChars displayURL_; // the user's requested source URL or null
UniqueTwoByteChars sourceMapURL_; // source map's filename or null
uint8_t isExprEnding[TOK_LIMIT];// which tokens definitely terminate exprs?
uint8_t isExprEnding[size_t(TokenKind::TOK_LIMIT)];// which tokens definitely terminate exprs?
JSContext* const cx;
bool mutedErrors;
StrictModeGetter* strictModeGetter; // used to test for strict mode
@ -1116,11 +1116,11 @@ class MOZ_STACK_CLASS TokenStreamSpecific
JSAtom* getRawTemplateStringAtom() {
TokenStreamAnyChars& anyChars = anyCharsAccess();
MOZ_ASSERT(anyChars.currentToken().type == TOK_TEMPLATE_HEAD ||
anyChars.currentToken().type == TOK_NO_SUBS_TEMPLATE);
MOZ_ASSERT(anyChars.currentToken().type == TokenKind::TOK_TEMPLATE_HEAD ||
anyChars.currentToken().type == TokenKind::TOK_NO_SUBS_TEMPLATE);
const CharT* cur = userbuf.rawCharPtrAt(anyChars.currentToken().pos.begin + 1);
const CharT* end;
if (anyChars.currentToken().type == TOK_TEMPLATE_HEAD) {
if (anyChars.currentToken().type == TokenKind::TOK_TEMPLATE_HEAD) {
// Of the form |`...${| or |}...${|
end = userbuf.rawCharPtrAt(anyChars.currentToken().pos.end - 2);
} else {
@ -1181,7 +1181,7 @@ class MOZ_STACK_CLASS TokenStreamSpecific
anyChars.lookahead--;
anyChars.cursor = (anyChars.cursor + 1) & ntokensMask;
TokenKind tt = anyChars.currentToken().type;
MOZ_ASSERT(tt != TOK_EOL);
MOZ_ASSERT(tt != TokenKind::TOK_EOL);
verifyConsistentModifier(modifier, anyChars.currentToken());
*ttp = tt;
return true;
@ -1275,7 +1275,7 @@ class MOZ_STACK_CLASS TokenStreamSpecific
const auto& srcCoords = anyChars.srcCoords;
*ttp = srcCoords.lineNum(curr.pos.end) == srcCoords.lineNum(next.pos.begin)
? next.type
: TOK_EOL;
: TokenKind::TOK_EOL;
return true;
}
@ -1305,7 +1305,7 @@ class MOZ_STACK_CLASS TokenStreamSpecific
if (!peekToken(&tt))
return false;
*endsExpr = anyCharsAccess().isExprEnding[tt];
*endsExpr = anyCharsAccess().isExprEnding[size_t(tt)];
if (*endsExpr) {
// If the next token ends an overall Expression, we'll parse this
// Expression without ever invoking Parser::orExpr(). But we need

View File

@ -766,7 +766,7 @@ GetToken(AsmJSParser& parser, TokenKind* tkp)
while (true) {
if (!ts.getToken(&tk, TokenStream::Operand))
return false;
if (tk != TOK_SEMI)
if (tk != TokenKind::TOK_SEMI)
break;
}
*tkp = tk;
@ -781,9 +781,9 @@ PeekToken(AsmJSParser& parser, TokenKind* tkp)
while (true) {
if (!ts.peekToken(&tk, TokenStream::Operand))
return false;
if (tk != TOK_SEMI)
if (tk != TokenKind::TOK_SEMI)
break;
ts.consumeKnownToken(TOK_SEMI, TokenStream::Operand);
ts.consumeKnownToken(TokenKind::TOK_SEMI, TokenStream::Operand);
}
*tkp = tk;
return true;
@ -795,7 +795,7 @@ ParseVarOrConstStatement(AsmJSParser& parser, ParseNode** var)
TokenKind tk;
if (!PeekToken(parser, &tk))
return false;
if (tk != TOK_VAR && tk != TOK_CONST) {
if (tk != TokenKind::TOK_VAR && tk != TokenKind::TOK_CONST) {
*var = nullptr;
return true;
}
@ -3848,7 +3848,7 @@ CheckModuleProcessingDirectives(ModuleValidator& m)
auto& ts = m.parser().tokenStream;
while (true) {
bool matched;
if (!ts.matchToken(&matched, TOK_STRING, TokenStream::Operand))
if (!ts.matchToken(&matched, TokenKind::TOK_STRING, TokenStream::Operand))
return false;
if (!matched)
return true;
@ -3859,7 +3859,7 @@ CheckModuleProcessingDirectives(ModuleValidator& m)
TokenKind tt;
if (!ts.getToken(&tt))
return false;
if (tt != TOK_SEMI)
if (tt != TokenKind::TOK_SEMI)
return m.failCurrentOffset("expected semicolon after string literal");
}
}
@ -7178,7 +7178,7 @@ ParseFunction(ModuleValidator& m, ParseNode** fnOut, unsigned* line)
{
auto& tokenStream = m.tokenStream();
tokenStream.consumeKnownToken(TOK_FUNCTION, TokenStream::Operand);
tokenStream.consumeKnownToken(TokenKind::TOK_FUNCTION, TokenStream::Operand);
auto& anyChars = tokenStream.anyCharsAccess();
uint32_t toStringStart = anyChars.currentToken().pos.begin;
@ -7187,7 +7187,7 @@ ParseFunction(ModuleValidator& m, ParseNode** fnOut, unsigned* line)
TokenKind tk;
if (!tokenStream.getToken(&tk, TokenStream::Operand))
return false;
if (tk == TOK_MUL)
if (tk == TokenKind::TOK_MUL)
return m.failCurrentOffset("unexpected generator function");
if (!TokenKindIsPossibleIdentifier(tk))
return false; // The regular parser will throw a SyntaxError, no need to m.fail.
@ -7307,7 +7307,7 @@ CheckFunctions(ModuleValidator& m)
if (!PeekToken(m.parser(), &tk))
return false;
if (tk != TOK_FUNCTION)
if (tk != TokenKind::TOK_FUNCTION)
break;
if (!CheckFunction(m))
@ -7441,8 +7441,8 @@ CheckModuleReturn(ModuleValidator& m)
if (!GetToken(m.parser(), &tk))
return false;
auto& ts = m.parser().tokenStream;
if (tk != TOK_RETURN) {
return m.failCurrentOffset((tk == TOK_RC || tk == TOK_EOF)
if (tk != TokenKind::TOK_RETURN) {
return m.failCurrentOffset((tk == TokenKind::TOK_RC || tk == TokenKind::TOK_EOF)
? "expecting return statement"
: "invalid asm.js. statement");
}
@ -7474,7 +7474,7 @@ CheckModuleEnd(ModuleValidator &m)
if (!GetToken(m.parser(), &tk))
return false;
if (tk != TOK_EOF && tk != TOK_RC)
if (tk != TokenKind::TOK_EOF && tk != TokenKind::TOK_RC)
return m.failCurrentOffset("top-level export (return) must be the last statement");
m.parser().tokenStream.anyCharsAccess().ungetToken();