Bug 1652216 - Update jsparagus to fix private field handling. r=yulia

Differential Revision: https://phabricator.services.mozilla.com/D83643
This commit is contained in:
Tooru Fujisawa 2020-07-16 00:13:18 +00:00
parent e398ca4d46
commit 916bf4d67b
25 changed files with 111363 additions and 9507 deletions

View File

@ -30,7 +30,7 @@ rev = "61dcc364ac0d6d0816ab88a494bbf20d824b009b"
[source."https://github.com/mozilla-spidermonkey/jsparagus"]
git = "https://github.com/mozilla-spidermonkey/jsparagus"
replace-with = "vendored-sources"
rev = "b987c8f4d4bc878dd79c58ad3917c0db3f7341b3"
rev = "e75748bec6ff2ebc9c0a7e66a2e824441b6787dd"
[source."https://github.com/kvark/spirv_cross"]
branch = "wgpu3"

16
Cargo.lock generated
View File

@ -2356,7 +2356,7 @@ dependencies = [
[[package]]
name = "jsparagus"
version = "0.1.0"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=b987c8f4d4bc878dd79c58ad3917c0db3f7341b3#b987c8f4d4bc878dd79c58ad3917c0db3f7341b3"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=e75748bec6ff2ebc9c0a7e66a2e824441b6787dd#e75748bec6ff2ebc9c0a7e66a2e824441b6787dd"
dependencies = [
"jsparagus-ast",
"jsparagus-emitter",
@ -2370,7 +2370,7 @@ dependencies = [
[[package]]
name = "jsparagus-ast"
version = "0.1.0"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=b987c8f4d4bc878dd79c58ad3917c0db3f7341b3#b987c8f4d4bc878dd79c58ad3917c0db3f7341b3"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=e75748bec6ff2ebc9c0a7e66a2e824441b6787dd#e75748bec6ff2ebc9c0a7e66a2e824441b6787dd"
dependencies = [
"bumpalo",
"indexmap",
@ -2379,7 +2379,7 @@ dependencies = [
[[package]]
name = "jsparagus-emitter"
version = "0.1.0"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=b987c8f4d4bc878dd79c58ad3917c0db3f7341b3#b987c8f4d4bc878dd79c58ad3917c0db3f7341b3"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=e75748bec6ff2ebc9c0a7e66a2e824441b6787dd#e75748bec6ff2ebc9c0a7e66a2e824441b6787dd"
dependencies = [
"bumpalo",
"byteorder",
@ -2392,7 +2392,7 @@ dependencies = [
[[package]]
name = "jsparagus-generated-parser"
version = "0.1.0"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=b987c8f4d4bc878dd79c58ad3917c0db3f7341b3#b987c8f4d4bc878dd79c58ad3917c0db3f7341b3"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=e75748bec6ff2ebc9c0a7e66a2e824441b6787dd#e75748bec6ff2ebc9c0a7e66a2e824441b6787dd"
dependencies = [
"bumpalo",
"jsparagus-ast",
@ -2402,12 +2402,12 @@ dependencies = [
[[package]]
name = "jsparagus-json-log"
version = "0.1.0"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=b987c8f4d4bc878dd79c58ad3917c0db3f7341b3#b987c8f4d4bc878dd79c58ad3917c0db3f7341b3"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=e75748bec6ff2ebc9c0a7e66a2e824441b6787dd#e75748bec6ff2ebc9c0a7e66a2e824441b6787dd"
[[package]]
name = "jsparagus-parser"
version = "0.1.0"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=b987c8f4d4bc878dd79c58ad3917c0db3f7341b3#b987c8f4d4bc878dd79c58ad3917c0db3f7341b3"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=e75748bec6ff2ebc9c0a7e66a2e824441b6787dd#e75748bec6ff2ebc9c0a7e66a2e824441b6787dd"
dependencies = [
"arrayvec",
"bumpalo",
@ -2419,7 +2419,7 @@ dependencies = [
[[package]]
name = "jsparagus-scope"
version = "0.1.0"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=b987c8f4d4bc878dd79c58ad3917c0db3f7341b3#b987c8f4d4bc878dd79c58ad3917c0db3f7341b3"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=e75748bec6ff2ebc9c0a7e66a2e824441b6787dd#e75748bec6ff2ebc9c0a7e66a2e824441b6787dd"
dependencies = [
"indexmap",
"jsparagus-ast",
@ -2429,7 +2429,7 @@ dependencies = [
[[package]]
name = "jsparagus-stencil"
version = "0.1.0"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=b987c8f4d4bc878dd79c58ad3917c0db3f7341b3#b987c8f4d4bc878dd79c58ad3917c0db3f7341b3"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=e75748bec6ff2ebc9c0a7e66a2e824441b6787dd#e75748bec6ff2ebc9c0a7e66a2e824441b6787dd"
dependencies = [
"jsparagus-ast",
]

View File

@ -12,12 +12,12 @@ log = "0.4"
# Disable regex feature for code size.
env_logger = {version = "0.6", default-features = false}
# For non-jsparagus developers.
jsparagus = { git = "https://github.com/mozilla-spidermonkey/jsparagus", rev = "b987c8f4d4bc878dd79c58ad3917c0db3f7341b3" }
jsparagus = { git = "https://github.com/mozilla-spidermonkey/jsparagus", rev = "e75748bec6ff2ebc9c0a7e66a2e824441b6787dd" }
# For local development, replace above with
# jsparagus = { path = "{path to jsparagus}" }
[build-dependencies]
# For non-jsparagus developers.
jsparagus = { git = "https://github.com/mozilla-spidermonkey/jsparagus", rev = "b987c8f4d4bc878dd79c58ad3917c0db3f7341b3" }
jsparagus = { git = "https://github.com/mozilla-spidermonkey/jsparagus", rev = "e75748bec6ff2ebc9c0a7e66a2e824441b6787dd" }
# For local development, replace above with
# jsparagus = { path = "{path to jsparagus}" }

View File

@ -1 +1 @@
{"files":{"Cargo.toml":"6691713bda864bda400c97783df5f4bff9202bc61179db79ac01557d6d1a5038","ast.json":"8bee02b419d90de0203ba7378aedecd5c94a447128b11231970119c9ecfecb77","generate_ast.py":"0c24431d9c07af42d7d17739c2e21465964151562437cfca093ceddde898bc93","src/arena.rs":"03ef07c963556160a6f1a85fd901833d7322f8a5f265c20d3e3543432dd2a96d","src/associated_data.rs":"aad369f45eca0506632d3c6aa2f487debb81d0dbc5a73eda46ee97478d51eaf3","src/dump_generated.rs":"a1368d31a8e3d286f230058e4a007ce7cd94240f2e59c0cc31c8a56756b1c1b3","src/lib.rs":"b35553bedec9f6d88cc5194592f857dc13669559cbc8b206048c35299c4f86be","src/source_atom_set.rs":"1e6fb4be166c9fe4cf63ae6475f1abb12cb3dfa268250328a60f483a09e1481c","src/source_location.rs":"3832440ecec6de726262837072810410bddb45c075288386509511c153f6afd9","src/source_location_accessor_generated.rs":"4a016e4860a2b6b9b7637dff10c634255f6c1942231eb75224261679944fa169","src/source_slice_list.rs":"c82413b3081e091a3c4ce5d2c3624e54ecbeb0bb9952f10d373d10faf589955a","src/type_id_generated.rs":"c70cbdc0f84c2fd7e84fa6ef1614be22f63e52ffcbf2f93714c189dce3dad557","src/types_generated.rs":"965e0619315156b9fd918f26f6317aa98970b535a7df7442f163b23ee5eaf641","src/visit_generated.rs":"d09510127771be0c3a20c95ed48a4e4d387144e6cb08b02783a81739539111ed"},"package":null}
{"files":{"Cargo.toml":"6691713bda864bda400c97783df5f4bff9202bc61179db79ac01557d6d1a5038","ast.json":"0bf1dfe7ec726f0c26cec875961a3072a7b8d7600ef6d301915a2684ed51a1ad","generate_ast.py":"0c24431d9c07af42d7d17739c2e21465964151562437cfca093ceddde898bc93","src/arena.rs":"03ef07c963556160a6f1a85fd901833d7322f8a5f265c20d3e3543432dd2a96d","src/associated_data.rs":"aad369f45eca0506632d3c6aa2f487debb81d0dbc5a73eda46ee97478d51eaf3","src/dump_generated.rs":"8ca0736952ee41fc932807e8450c6eb68e081b3fd5f7a0f701cae8b35dc0c13e","src/lib.rs":"b35553bedec9f6d88cc5194592f857dc13669559cbc8b206048c35299c4f86be","src/source_atom_set.rs":"1e6fb4be166c9fe4cf63ae6475f1abb12cb3dfa268250328a60f483a09e1481c","src/source_location.rs":"3832440ecec6de726262837072810410bddb45c075288386509511c153f6afd9","src/source_location_accessor_generated.rs":"2669efcc5447229429f8fab6123bbd9dec8ed4c69232992af05aca3a59f1c710","src/source_slice_list.rs":"c82413b3081e091a3c4ce5d2c3624e54ecbeb0bb9952f10d373d10faf589955a","src/type_id_generated.rs":"a1e88f0d9d97d61339d0bedd0f6a8472bd39ea13968531ebce7140ca47edbaeb","src/types_generated.rs":"5e4f2386e76d2e9d2a60fd799430b2d4d628be61fb41200acbd5102403011d0f","src/visit_generated.rs":"630e6a8d1c32148a413d568ad0e6915ebe39a068b2563d1860af74428547801e"},"package":null}

View File

@ -289,11 +289,15 @@
"StaticMemberExpressionTail": {
"property": "IdentifierName"
},
"PrivateFieldExpressionTail": {
"field": "PrivateIdentifier"
},
"CallExpressionTail": {
"arguments": "Arguments"
},
"ComputedMemberExpression": "ComputedMemberExpression",
"StaticMemberExpression": "StaticMemberExpression",
"PrivateFieldExpression": "PrivateFieldExpression",
"CallExpression": "CallExpression"
},
"PropertyName": {
@ -400,6 +404,7 @@
"MemberAssignmentTarget": {
"_type": "enum",
"ComputedMemberAssignmentTarget": "ComputedMemberAssignmentTarget",
"PrivateFieldAssignmentTarget": "PrivateFieldAssignmentTarget",
"StaticMemberAssignmentTarget": "StaticMemberAssignmentTarget"
},
"ComputedMemberAssignmentTarget": {
@ -407,6 +412,11 @@
"object": "ExpressionOrSuper",
"expression": "Box<Expression>"
},
"PrivateFieldAssignmentTarget": {
"_type": "struct",
"object": "ExpressionOrSuper",
"field": "PrivateIdentifier"
},
"StaticMemberAssignmentTarget": {
"_type": "struct",
"object": "ExpressionOrSuper",
@ -633,7 +643,7 @@
},
"PrivateFieldExpression": {
"_type": "struct",
"object": "Box<Expression>",
"object": "ExpressionOrSuper",
"field": "PrivateIdentifier"
},
"TemplateExpressionElement": {

View File

@ -834,6 +834,13 @@ impl<'alloc> ASTDump for OptionalChain<'alloc> {
property.dump_with_atoms_at(out, atoms, slices, depth + 1);
write!(out, ")").expect("failed to dump");
}
OptionalChain::PrivateFieldExpressionTail { field, .. } => {
write!(out, "(PrivateFieldExpressionTail").expect("failed to dump");
write!(out, " ").expect("failed to dump");
write!(out, "field=").expect("failed to dump");
field.dump_with_atoms_at(out, atoms, slices, depth + 1);
write!(out, ")").expect("failed to dump");
}
OptionalChain::CallExpressionTail { arguments, .. } => {
write!(out, "(CallExpressionTail").expect("failed to dump");
write!(out, " ").expect("failed to dump");
@ -847,6 +854,9 @@ impl<'alloc> ASTDump for OptionalChain<'alloc> {
OptionalChain::StaticMemberExpression(ast) => {
ast.dump_with_atoms_at(out, atoms, slices, depth);
}
OptionalChain::PrivateFieldExpression(ast) => {
ast.dump_with_atoms_at(out, atoms, slices, depth);
}
OptionalChain::CallExpression(ast) => {
ast.dump_with_atoms_at(out, atoms, slices, depth);
}
@ -1165,6 +1175,9 @@ impl<'alloc> ASTDump for MemberAssignmentTarget<'alloc> {
MemberAssignmentTarget::ComputedMemberAssignmentTarget(ast) => {
ast.dump_with_atoms_at(out, atoms, slices, depth);
}
MemberAssignmentTarget::PrivateFieldAssignmentTarget(ast) => {
ast.dump_with_atoms_at(out, atoms, slices, depth);
}
MemberAssignmentTarget::StaticMemberAssignmentTarget(ast) => {
ast.dump_with_atoms_at(out, atoms, slices, depth);
}
@ -1187,6 +1200,21 @@ impl<'alloc> ASTDump for ComputedMemberAssignmentTarget<'alloc> {
}
}
impl<'alloc> ASTDump for PrivateFieldAssignmentTarget<'alloc> {
fn dump_with_atoms_at<W>(&self, out: &mut W, atoms: &SourceAtomSet, slices: &SourceSliceList, depth: usize)
where W: io::Write
{
write!(out, "(PrivateFieldAssignmentTarget").expect("failed to dump");
newline(out, depth + 1);
write!(out, "object=").expect("failed to dump");
self.object.dump_with_atoms_at(out, atoms, slices, depth + 1);
newline(out, depth + 1);
write!(out, "field=").expect("failed to dump");
self.field.dump_with_atoms_at(out, atoms, slices, depth + 1);
write!(out, ")").expect("failed to dump");
}
}
impl<'alloc> ASTDump for StaticMemberAssignmentTarget<'alloc> {
fn dump_with_atoms_at<W>(&self, out: &mut W, atoms: &SourceAtomSet, slices: &SourceSliceList, depth: usize)
where W: io::Write

View File

@ -1254,6 +1254,7 @@ impl<'alloc> SourceLocationAccessor for MemberAssignmentTarget<'alloc> {
fn set_loc(&mut self, start: SourceLocation, end: SourceLocation) {
match self {
MemberAssignmentTarget::ComputedMemberAssignmentTarget(content) => { content.set_loc(start, end) }
MemberAssignmentTarget::PrivateFieldAssignmentTarget(content) => { content.set_loc(start, end) }
MemberAssignmentTarget::StaticMemberAssignmentTarget(content) => { content.set_loc(start, end) }
}
}
@ -1261,6 +1262,7 @@ impl<'alloc> SourceLocationAccessor for MemberAssignmentTarget<'alloc> {
fn get_loc(&self) -> SourceLocation {
match self {
MemberAssignmentTarget::ComputedMemberAssignmentTarget(content) => { content.get_loc() }
MemberAssignmentTarget::PrivateFieldAssignmentTarget(content) => { content.get_loc() }
MemberAssignmentTarget::StaticMemberAssignmentTarget(content) => { content.get_loc() }
}
}
@ -1431,12 +1433,17 @@ impl<'alloc> SourceLocationAccessor for OptionalChain<'alloc> {
loc.start = start.start;
loc.end = end.end;
}
OptionalChain::PrivateFieldExpressionTail { mut loc, .. } => {
loc.start = start.start;
loc.end = end.end;
}
OptionalChain::CallExpressionTail { mut loc, .. } => {
loc.start = start.start;
loc.end = end.end;
}
OptionalChain::ComputedMemberExpression(content) => { content.set_loc(start, end) }
OptionalChain::StaticMemberExpression(content) => { content.set_loc(start, end) }
OptionalChain::PrivateFieldExpression(content) => { content.set_loc(start, end) }
OptionalChain::CallExpression(content) => { content.set_loc(start, end) }
}
}
@ -1449,11 +1456,15 @@ impl<'alloc> SourceLocationAccessor for OptionalChain<'alloc> {
OptionalChain::StaticMemberExpressionTail { loc, .. } => {
*loc
}
OptionalChain::PrivateFieldExpressionTail { loc, .. } => {
*loc
}
OptionalChain::CallExpressionTail { loc, .. } => {
*loc
}
OptionalChain::ComputedMemberExpression(content) => { content.get_loc() }
OptionalChain::StaticMemberExpression(content) => { content.get_loc() }
OptionalChain::PrivateFieldExpression(content) => { content.get_loc() }
OptionalChain::CallExpression(content) => { content.get_loc() }
}
}
@ -1475,6 +1486,17 @@ impl<'alloc> SourceLocationAccessor for Parameter<'alloc> {
}
}
impl<'alloc> SourceLocationAccessor for PrivateFieldAssignmentTarget<'alloc> {
fn set_loc(&mut self, start: SourceLocation, end: SourceLocation) {
self.loc.start = start.start;
self.loc.end = end.end;
}
fn get_loc(&self) -> SourceLocation {
self.loc
}
}
impl<'alloc> SourceLocationAccessor for PrivateFieldExpression<'alloc> {
fn set_loc(&mut self, start: SourceLocation, end: SourceLocation) {
self.loc.start = start.start;

View File

@ -43,6 +43,7 @@ pub enum NodeTypeId {
ExpressionOrSuper,
MemberAssignmentTarget,
ComputedMemberAssignmentTarget,
PrivateFieldAssignmentTarget,
StaticMemberAssignmentTarget,
ArrayBinding,
ObjectBinding,
@ -343,6 +344,12 @@ impl<'alloc> NodeTypeIdAccessor for ComputedMemberAssignmentTarget<'alloc> {
}
}
impl<'alloc> NodeTypeIdAccessor for PrivateFieldAssignmentTarget<'alloc> {
fn get_type_id(&self) -> NodeTypeId {
NodeTypeId::PrivateFieldAssignmentTarget
}
}
impl<'alloc> NodeTypeIdAccessor for StaticMemberAssignmentTarget<'alloc> {
fn get_type_id(&self) -> NodeTypeId {
NodeTypeId::StaticMemberAssignmentTarget

View File

@ -472,12 +472,17 @@ pub enum OptionalChain<'alloc> {
property: IdentifierName,
loc: SourceLocation,
},
PrivateFieldExpressionTail {
field: PrivateIdentifier,
loc: SourceLocation,
},
CallExpressionTail {
arguments: Arguments<'alloc>,
loc: SourceLocation,
},
ComputedMemberExpression(ComputedMemberExpression<'alloc>),
StaticMemberExpression(StaticMemberExpression<'alloc>),
PrivateFieldExpression(PrivateFieldExpression<'alloc>),
CallExpression(CallExpression<'alloc>),
}
@ -610,6 +615,7 @@ pub enum ExpressionOrSuper<'alloc> {
#[derive(Debug, PartialEq)]
pub enum MemberAssignmentTarget<'alloc> {
ComputedMemberAssignmentTarget(ComputedMemberAssignmentTarget<'alloc>),
PrivateFieldAssignmentTarget(PrivateFieldAssignmentTarget<'alloc>),
StaticMemberAssignmentTarget(StaticMemberAssignmentTarget<'alloc>),
}
@ -620,6 +626,13 @@ pub struct ComputedMemberAssignmentTarget<'alloc> {
pub loc: SourceLocation,
}
#[derive(Debug, PartialEq)]
pub struct PrivateFieldAssignmentTarget<'alloc> {
pub object: ExpressionOrSuper<'alloc>,
pub field: PrivateIdentifier,
pub loc: SourceLocation,
}
#[derive(Debug, PartialEq)]
pub struct StaticMemberAssignmentTarget<'alloc> {
pub object: ExpressionOrSuper<'alloc>,
@ -927,7 +940,7 @@ pub struct StaticMemberExpression<'alloc> {
#[derive(Debug, PartialEq)]
pub struct PrivateFieldExpression<'alloc> {
pub object: arena::Box<'alloc, Expression<'alloc>>,
pub object: ExpressionOrSuper<'alloc>,
pub field: PrivateIdentifier,
pub loc: SourceLocation,
}

View File

@ -2363,6 +2363,11 @@ pub trait Pass<'alloc> {
property,
)
}
OptionalChain::PrivateFieldExpressionTail { field, .. } => {
self.visit_enum_optional_chain_variant_private_field_expression_tail(
field,
)
}
OptionalChain::CallExpressionTail { arguments, .. } => {
self.visit_enum_optional_chain_variant_call_expression_tail(
arguments,
@ -2374,6 +2379,9 @@ pub trait Pass<'alloc> {
OptionalChain::StaticMemberExpression(ast) => {
self.visit_enum_optional_chain_variant_static_member_expression(ast)
}
OptionalChain::PrivateFieldExpression(ast) => {
self.visit_enum_optional_chain_variant_private_field_expression(ast)
}
OptionalChain::CallExpression(ast) => {
self.visit_enum_optional_chain_variant_call_expression(ast)
}
@ -2437,6 +2445,31 @@ pub trait Pass<'alloc> {
) {
}
fn visit_enum_optional_chain_variant_private_field_expression_tail(
&mut self,
field: &'alloc PrivateIdentifier,
) {
self.enter_enum_optional_chain_variant_private_field_expression_tail(
field,
);
self.visit_private_identifier(field);
self.leave_enum_optional_chain_variant_private_field_expression_tail(
field,
);
}
fn enter_enum_optional_chain_variant_private_field_expression_tail(
&mut self,
field: &'alloc PrivateIdentifier,
) {
}
fn leave_enum_optional_chain_variant_private_field_expression_tail(
&mut self,
field: &'alloc PrivateIdentifier,
) {
}
fn visit_enum_optional_chain_variant_call_expression_tail(
&mut self,
arguments: &'alloc Arguments<'alloc>,
@ -2504,6 +2537,27 @@ pub trait Pass<'alloc> {
) {
}
fn visit_enum_optional_chain_variant_private_field_expression(
&mut self,
ast: &'alloc PrivateFieldExpression<'alloc>,
) {
self.enter_enum_optional_chain_variant_private_field_expression(ast);
self.visit_private_field_expression(ast);
self.leave_enum_optional_chain_variant_private_field_expression(ast);
}
fn enter_enum_optional_chain_variant_private_field_expression(
&mut self,
ast: &'alloc PrivateFieldExpression<'alloc>,
) {
}
fn leave_enum_optional_chain_variant_private_field_expression(
&mut self,
ast: &'alloc PrivateFieldExpression<'alloc>,
) {
}
fn visit_enum_optional_chain_variant_call_expression(
&mut self,
ast: &'alloc CallExpression<'alloc>,
@ -3670,6 +3724,9 @@ pub trait Pass<'alloc> {
MemberAssignmentTarget::ComputedMemberAssignmentTarget(ast) => {
self.visit_enum_member_assignment_target_variant_computed_member_assignment_target(ast)
}
MemberAssignmentTarget::PrivateFieldAssignmentTarget(ast) => {
self.visit_enum_member_assignment_target_variant_private_field_assignment_target(ast)
}
MemberAssignmentTarget::StaticMemberAssignmentTarget(ast) => {
self.visit_enum_member_assignment_target_variant_static_member_assignment_target(ast)
}
@ -3704,6 +3761,27 @@ pub trait Pass<'alloc> {
) {
}
fn visit_enum_member_assignment_target_variant_private_field_assignment_target(
&mut self,
ast: &'alloc PrivateFieldAssignmentTarget<'alloc>,
) {
self.enter_enum_member_assignment_target_variant_private_field_assignment_target(ast);
self.visit_private_field_assignment_target(ast);
self.leave_enum_member_assignment_target_variant_private_field_assignment_target(ast);
}
fn enter_enum_member_assignment_target_variant_private_field_assignment_target(
&mut self,
ast: &'alloc PrivateFieldAssignmentTarget<'alloc>,
) {
}
fn leave_enum_member_assignment_target_variant_private_field_assignment_target(
&mut self,
ast: &'alloc PrivateFieldAssignmentTarget<'alloc>,
) {
}
fn visit_enum_member_assignment_target_variant_static_member_assignment_target(
&mut self,
ast: &'alloc StaticMemberAssignmentTarget<'alloc>,
@ -3738,6 +3816,19 @@ pub trait Pass<'alloc> {
fn leave_computed_member_assignment_target(&mut self, ast: &'alloc ComputedMemberAssignmentTarget<'alloc>) {
}
fn visit_private_field_assignment_target(&mut self, ast: &'alloc PrivateFieldAssignmentTarget<'alloc>) {
self.enter_private_field_assignment_target(ast);
self.visit_expression_or_super(&ast.object);
self.visit_private_identifier(&ast.field);
self.leave_private_field_assignment_target(ast);
}
fn enter_private_field_assignment_target(&mut self, ast: &'alloc PrivateFieldAssignmentTarget<'alloc>) {
}
fn leave_private_field_assignment_target(&mut self, ast: &'alloc PrivateFieldAssignmentTarget<'alloc>) {
}
fn visit_static_member_assignment_target(&mut self, ast: &'alloc StaticMemberAssignmentTarget<'alloc>) {
self.enter_static_member_assignment_target(ast);
self.visit_expression_or_super(&ast.object);
@ -4885,7 +4976,7 @@ pub trait Pass<'alloc> {
fn visit_private_field_expression(&mut self, ast: &'alloc PrivateFieldExpression<'alloc>) {
self.enter_private_field_expression(ast);
self.visit_expression(&ast.object);
self.visit_expression_or_super(&ast.object);
self.visit_private_identifier(&ast.field);
self.leave_private_field_expression(ast);
}

View File

@ -1 +1 @@
{"files":{"Cargo.toml":"553be3c198fe555913bbeb7473b24e0e1fff12e48890a2e399b311df8a97c814","src/ast_builder.rs":"8b10743ebbc3390d1158cb44ff5c87fbb653d62521f3e1274565d0061eebe095","src/context_stack.rs":"29331d03cd4c8ee9283cb426ebe893b7ba6ad6d8a69016399c4d92a81cb1363b","src/declaration_kind.rs":"fdfda2fe408cce1c637d17fee0813160619450472c6de9befc36ebeed892cc3c","src/early_error_checker.rs":"150a106a8f0901b72ae40581f0c12f785983514cbc9042404ed6cf4315693d60","src/early_errors.rs":"8674454af7ac5efe51eb6a8e2abe088aad5560e0a0bd88a3eae66c90f1527149","src/error.rs":"507e4dd9c66720f3da2db135c3024392d8aaac5ccdb90c7f7463ccb2eff7efa8","src/lib.rs":"b74105a84c4a141b880439f9ec724f7dc08224342be08a73490ac2c01410af08","src/parser_tables_generated.rs":"405a20fc84c79ebe84daae5f46c5c27f86958292991ac6420a86131c2e1031d8","src/stack_value_generated.rs":"d8696a671368e2565d589922e3a46d20667ed3e17e29953e69b970470e9639ee","src/token.rs":"479f4cb97d2e6bc654a70634f3809817cc73eaf749c845643beb3556b9ead383","src/traits/mod.rs":"ba74c71f7218027f8188247bc64df243117613fbc9893d40799402ef1e6dbf59"},"package":null}
{"files":{"Cargo.toml":"553be3c198fe555913bbeb7473b24e0e1fff12e48890a2e399b311df8a97c814","src/ast_builder.rs":"ff3cd595b70e8359b4ce9ce78b066890a78572fee923fcd8cadd029b8fbc4d7a","src/context_stack.rs":"29331d03cd4c8ee9283cb426ebe893b7ba6ad6d8a69016399c4d92a81cb1363b","src/declaration_kind.rs":"fdfda2fe408cce1c637d17fee0813160619450472c6de9befc36ebeed892cc3c","src/early_error_checker.rs":"150a106a8f0901b72ae40581f0c12f785983514cbc9042404ed6cf4315693d60","src/early_errors.rs":"8674454af7ac5efe51eb6a8e2abe088aad5560e0a0bd88a3eae66c90f1527149","src/error.rs":"507e4dd9c66720f3da2db135c3024392d8aaac5ccdb90c7f7463ccb2eff7efa8","src/lib.rs":"b74105a84c4a141b880439f9ec724f7dc08224342be08a73490ac2c01410af08","src/parser_tables_generated.rs":"403e630c54088a55f2d348e7a817716bb54ef52f96f102d198e9aa7ffa5ea1bc","src/stack_value_generated.rs":"ce8567634ff2bb818593f56c0589b4ba2d508704db943eb0778d79dfd19cce36","src/token.rs":"479f4cb97d2e6bc654a70634f3809817cc73eaf749c845643beb3556b9ead383","src/traits/mod.rs":"ba74c71f7218027f8188247bc64df243117613fbc9893d40799402ef1e6dbf59"},"package":null}

View File

@ -1233,6 +1233,21 @@ impl<'alloc> AstBuilder<'alloc> {
})
}
// OptionalChain : `?.` PrivateIdentifier
pub fn optional_private_field_member_expr_tail(
&self,
start_token: arena::Box<'alloc, Token>,
private_identifier: arena::Box<'alloc, Token>,
) -> arena::Box<'alloc, Expression<'alloc>> {
let private_identifier_loc = private_identifier.loc;
self.alloc_with(|| {
Expression::OptionalChain(OptionalChain::PrivateFieldExpressionTail {
field: self.private_identifier(private_identifier),
loc: SourceLocation::from_parts(start_token.loc, private_identifier_loc),
})
})
}
// OptionalChain : `?.` Arguments
pub fn optional_call_expr_tail(
&self,
@ -1293,6 +1308,25 @@ impl<'alloc> AstBuilder<'alloc> {
})
}
// OptionalChain : OptionalChain `.` PrivateIdentifier
pub fn optional_private_field_member_expr(
&self,
object: arena::Box<'alloc, Expression<'alloc>>,
private_identifier: arena::Box<'alloc, Token>,
) -> arena::Box<'alloc, Expression<'alloc>> {
let object_loc = object.get_loc();
let private_identifier_loc = private_identifier.loc;
self.alloc_with(|| {
Expression::OptionalChain(OptionalChain::PrivateFieldExpression(
PrivateFieldExpression {
object: ExpressionOrSuper::Expression(object),
field: self.private_identifier(private_identifier),
loc: SourceLocation::from_parts(object_loc, private_identifier_loc),
},
))
})
}
// OptionalChain : OptionalChain Arguments
pub fn optional_call_expr(
&self,
@ -1388,7 +1422,7 @@ impl<'alloc> AstBuilder<'alloc> {
self.alloc_with(|| {
Expression::MemberExpression(MemberExpression::PrivateFieldExpression(
PrivateFieldExpression {
object,
object: ExpressionOrSuper::Expression(object),
field: self.private_identifier(private_identifier),
loc: SourceLocation::from_parts(object_loc, field_loc),
},
@ -2130,6 +2164,13 @@ impl<'alloc> AstBuilder<'alloc> {
},
),
),
Expression::MemberExpression(MemberExpression::PrivateFieldExpression(
PrivateFieldExpression { object, field, loc },
)) => SimpleAssignmentTarget::MemberAssignmentTarget(
MemberAssignmentTarget::PrivateFieldAssignmentTarget(
PrivateFieldAssignmentTarget { object, field, loc },
),
),
// Static Semantics: AssignmentTargetType
// https://tc39.es/ecma262/#sec-static-semantics-static-semantics-assignmenttargettype

File diff suppressed because it is too large Load Diff

View File

@ -84,6 +84,7 @@ pub enum StackValue<'alloc> {
ObjectProperty(arena::Box<'alloc, ObjectProperty<'alloc>>),
OptionalChain(arena::Box<'alloc, OptionalChain<'alloc>>),
Parameter(arena::Box<'alloc, Parameter<'alloc>>),
PrivateFieldAssignmentTarget(arena::Box<'alloc, PrivateFieldAssignmentTarget<'alloc>>),
PrivateFieldExpression(arena::Box<'alloc, PrivateFieldExpression<'alloc>>),
PrivateIdentifier(arena::Box<'alloc, PrivateIdentifier>),
Program(arena::Box<'alloc, Program<'alloc>>),
@ -801,6 +802,15 @@ impl<'alloc> StackValueItem<'alloc> for Parameter<'alloc> {
}
}
impl<'alloc> StackValueItem<'alloc> for PrivateFieldAssignmentTarget<'alloc> {
fn to_ast(sv: StackValue<'alloc>) -> AstResult<'alloc, Self> {
match sv {
StackValue::PrivateFieldAssignmentTarget(v) => Ok(v),
_ => Err(format!("StackValue expected PrivateFieldAssignmentTarget, got {:?}", sv)),
}
}
}
impl<'alloc> StackValueItem<'alloc> for PrivateFieldExpression<'alloc> {
fn to_ast(sv: StackValue<'alloc>) -> AstResult<'alloc, Self> {
match sv {
@ -1625,6 +1635,13 @@ impl<'alloc> TryIntoStack<'alloc> for arena::Box<'alloc, Parameter<'alloc>> {
}
}
impl<'alloc> TryIntoStack<'alloc> for arena::Box<'alloc, PrivateFieldAssignmentTarget<'alloc>> {
type Error = Infallible;
fn try_into_stack(self) -> Result<StackValue<'alloc>, Infallible> {
Ok(StackValue::PrivateFieldAssignmentTarget(self))
}
}
impl<'alloc> TryIntoStack<'alloc> for arena::Box<'alloc, PrivateFieldExpression<'alloc>> {
type Error = Infallible;
fn try_into_stack(self) -> Result<StackValue<'alloc>, Infallible> {

View File

@ -1 +1 @@
{"files":{"Cargo.toml":"500dc18629fd32dd7019a7967535d6fc53bf94dc7e6c305be46f4040d47cac9e","src/builder.rs":"685b6c95b929c6af9e4441ff47a8f836c16a7c7b68ffcd1311fe03116eb2a8d5","src/data.rs":"a3cf1e7b1a96a619bcb8bd87f39bbd44dda0fc554c70a5d4d45b70eb03e69401","src/free_name_tracker.rs":"30c55b85d61bbcc43ce4ade4bdf93d01369b80e486589df89c3175445a491b2a","src/lib.rs":"16b1fe6659f977a547df15922b2864b81c4d452ad198409db4374e88c6df68bc","src/pass.rs":"2ae0c72e428932c9f1c5e6b37043e93fcf686dde0b9f26b60d1a710bcd73852d"},"package":null}
{"files":{"Cargo.toml":"500dc18629fd32dd7019a7967535d6fc53bf94dc7e6c305be46f4040d47cac9e","src/builder.rs":"cccbb9f6d0a33bfb13b9362590e2266b172786beb784338eeca9cdbf2b936d13","src/data.rs":"a3cf1e7b1a96a619bcb8bd87f39bbd44dda0fc554c70a5d4d45b70eb03e69401","src/free_name_tracker.rs":"30c55b85d61bbcc43ce4ade4bdf93d01369b80e486589df89c3175445a491b2a","src/lib.rs":"16b1fe6659f977a547df15922b2864b81c4d452ad198409db4374e88c6df68bc","src/pass.rs":"2ae0c72e428932c9f1c5e6b37043e93fcf686dde0b9f26b60d1a710bcd73852d"},"package":null}

View File

@ -345,7 +345,10 @@ struct PossiblyAnnexBFunction {
name: SourceAtomSetIndex,
owner_scope_index: ScopeIndex,
binding_index: BindingIndex,
stencil_index: ScriptStencilIndex,
/// Index of the script in the list of `functions` in the
/// `FunctionScriptStencilBuilder`.
script_index: ScriptStencilIndex,
}
#[derive(Debug)]
@ -365,14 +368,14 @@ impl PossiblyAnnexBFunctionList {
name: SourceAtomSetIndex,
owner_scope_index: ScopeIndex,
binding_index: BindingIndex,
stencil_index: ScriptStencilIndex,
script_index: ScriptStencilIndex,
) {
if let Some(functions) = self.functions.get_mut(&name) {
functions.push(PossiblyAnnexBFunction {
name,
owner_scope_index,
binding_index,
stencil_index,
script_index,
});
return;
}
@ -382,7 +385,7 @@ impl PossiblyAnnexBFunctionList {
name,
owner_scope_index,
binding_index,
stencil_index,
script_index,
});
self.functions.insert(name, functions);
}
@ -406,7 +409,7 @@ impl PossiblyAnnexBFunctionList {
_ => panic!("unexpected scope pointed by Annex B function"),
}
function_declaration_properties.mark_annex_b(fun.stencil_index);
function_declaration_properties.mark_annex_b(fun.script_index);
}
}
}
@ -1037,10 +1040,16 @@ struct FunctionParametersScopeBuilder {
has_parameter_expressions: bool,
scope_index: ScopeIndex,
/// Index of the script in the list of `functions` in the
/// `FunctionScriptStencilBuilder`.
script_index: ScriptStencilIndex,
has_direct_eval: bool,
}
impl FunctionParametersScopeBuilder {
fn new(scope_index: ScopeIndex, is_arrow: bool) -> Self {
fn new(scope_index: ScopeIndex, is_arrow: bool, script_index: ScriptStencilIndex) -> Self {
let mut base = BaseScopeBuilder::new();
if !is_arrow {
@ -1073,6 +1082,8 @@ impl FunctionParametersScopeBuilder {
simple_parameter_list: true,
has_parameter_expressions: false,
scope_index,
script_index,
has_direct_eval: false,
}
}
@ -1394,6 +1405,7 @@ impl FunctionParametersScopeBuilder {
self.non_positional_parameter_names.len(),
function_max_var_names_count,
enclosing,
self.script_index,
);
// FunctionDeclarationInstantiation ( func, argumentsList )
@ -1509,6 +1521,10 @@ impl FunctionParametersScopeBuilder {
else {
debug_assert!(has_extra_body_var_scope);
// In non-strict mode code, direct `eval` can extend function's
// scope.
let function_has_extensible_scope = !self.strict && self.has_direct_eval;
// Step 28.a. NOTE: A separate Environment Record is needed to
// ensure that closures created by expressions in the
// formal parameter list do not have visibility of
@ -1520,6 +1536,7 @@ impl FunctionParametersScopeBuilder {
// varEnv.
let mut data = VarScopeData::new(
body_scope_builder.var_names.len(),
function_has_extensible_scope,
/* encloding= */ self.scope_index,
);
@ -1891,6 +1908,19 @@ impl ScopeBuilderStack {
panic!("There should be at least one scope on the stack");
}
fn maybe_innermost_function_parameters<'a>(
&'a mut self,
) -> Option<&'a mut FunctionParametersScopeBuilder> {
for builder in self.stack.iter_mut().rev() {
match builder {
ScopeBuilder::FunctionParameters(builder) => return Some(builder),
_ => {}
}
}
None
}
fn innermost_lexical<'a>(&'a mut self) -> &'a mut ScopeBuilder {
// FIXME: If there's no other case, merge with innermost.
self.innermost()
@ -1909,6 +1939,10 @@ impl ScopeBuilderStack {
.get_scope_index()
}
fn current_scope_index_or_empty_global(&self) -> ScopeIndex {
self.current_scope_index()
}
fn push_global(&mut self, builder: GlobalScopeBuilder) {
self.stack.push(ScopeBuilder::Global(builder))
}
@ -2060,7 +2094,12 @@ impl FunctionScriptStencilBuilder {
///
/// This creates `ScriptStencil` for the function, and adds it to
/// enclosing function if exists.
fn enter<T>(&mut self, fun: &T, syntax_kind: FunctionSyntaxKind) -> ScriptStencilIndex
fn enter<T>(
&mut self,
fun: &T,
syntax_kind: FunctionSyntaxKind,
enclosing_scope_index: ScopeIndex,
) -> ScriptStencilIndex
where
T: SourceLocationAccessor + NodeTypeIdAccessor,
{
@ -2084,6 +2123,7 @@ impl FunctionScriptStencilBuilder {
syntax_kind.is_generator(),
syntax_kind.is_async(),
FunctionFlags::interpreted(syntax_kind),
enclosing_scope_index,
);
let index = self.functions.push(function_stencil);
self.function_stencil_indices.insert(fun, index);
@ -2114,6 +2154,15 @@ impl FunctionScriptStencilBuilder {
self.function_stack.pop();
}
/// Returns the current function's index.
/// Panics if no current function is found.
fn current_index(&self) -> ScriptStencilIndex {
*self
.function_stack
.last()
.expect("should be inside function")
}
/// Returns a immutable reference to the innermost function. None otherwise.
fn maybe_current<'a>(&'a self) -> Option<&'a ScriptStencil> {
let maybe_index = self.function_stack.last();
@ -2411,6 +2460,7 @@ impl ScopeDataMapBuilder {
let fun_index = self.function_stencil_builder.enter(
fun,
FunctionSyntaxKind::function_declaration(is_generator, is_async),
self.builder_stack.current_scope_index_or_empty_global(),
);
match self.builder_stack.innermost_lexical() {
@ -2451,6 +2501,7 @@ impl ScopeDataMapBuilder {
self.function_stencil_builder.enter(
fun,
FunctionSyntaxKind::function_expression(is_generator, is_async),
self.builder_stack.current_scope_index_or_empty_global(),
);
}
@ -2473,8 +2524,11 @@ impl ScopeDataMapBuilder {
where
T: SourceLocationAccessor + NodeTypeIdAccessor,
{
self.function_stencil_builder
.enter(fun, FunctionSyntaxKind::method(is_generator, is_async));
self.function_stencil_builder.enter(
fun,
FunctionSyntaxKind::method(is_generator, is_async),
self.builder_stack.current_scope_index_or_empty_global(),
);
}
pub fn after_method<T>(&mut self, fun: &T)
@ -2488,8 +2542,11 @@ impl ScopeDataMapBuilder {
where
T: SourceLocationAccessor + NodeTypeIdAccessor,
{
self.function_stencil_builder
.enter(fun, FunctionSyntaxKind::getter());
self.function_stencil_builder.enter(
fun,
FunctionSyntaxKind::getter(),
self.builder_stack.current_scope_index_or_empty_global(),
);
}
pub fn on_getter_parameter<T>(&mut self, param: &T)
@ -2511,8 +2568,11 @@ impl ScopeDataMapBuilder {
where
T: SourceLocationAccessor + NodeTypeIdAccessor,
{
self.function_stencil_builder
.enter(fun, FunctionSyntaxKind::setter());
self.function_stencil_builder.enter(
fun,
FunctionSyntaxKind::setter(),
self.builder_stack.current_scope_index_or_empty_global(),
);
}
pub fn before_setter_parameter<T>(&mut self, param: &T)
@ -2538,8 +2598,11 @@ impl ScopeDataMapBuilder {
where
T: SourceLocationAccessor + NodeTypeIdAccessor,
{
self.function_stencil_builder
.enter(params, FunctionSyntaxKind::arrow(is_async));
self.function_stencil_builder.enter(
params,
FunctionSyntaxKind::arrow(is_async),
self.builder_stack.current_scope_index_or_empty_global(),
);
}
pub fn after_arrow_function<T>(&mut self, body: &T)
@ -2559,7 +2622,11 @@ impl ScopeDataMapBuilder {
let is_arrow = self.function_stencil_builder.current().is_arrow_function();
let builder = FunctionParametersScopeBuilder::new(index, is_arrow);
let builder = FunctionParametersScopeBuilder::new(
index,
is_arrow,
self.function_stencil_builder.current_index(),
);
self.non_global.insert(params, index);
self.builder_stack.push_function_parameters(builder);
@ -2777,6 +2844,20 @@ impl ScopeDataMapBuilder {
self.scopes
.populate(lexical_scope_index, scope_data_set.lexical);
}
#[allow(dead_code)]
pub fn on_direct_eval(&mut self) {
if let Some(parameter_scope_builder) =
self.builder_stack.maybe_innermost_function_parameters()
{
parameter_scope_builder.has_direct_eval = true;
}
self.builder_stack
.innermost()
.base_mut()
.bindings_accessed_dynamically = true;
}
}
pub struct ScopeDataMapAndScriptStencilList {

View File

@ -1 +1 @@
{"files":{"Cargo.toml":"f5439990185662ab31de12c07ef0c842438e3207bdfecc4fa6a9e4d4bea8a0d3","src/bytecode_offset.rs":"2aa7ba8c3cfbbd832092e65b599ab1c5a28d784ccc65d9e351bba656421b9a69","src/copy/AsyncFunctionResolveKind.h":"3851ecbb4728257595dd6e900749d1d8e02558574c00424a7ff0e3ca007fa6ec","src/copy/BytecodeFormatFlags.h":"f495a25e113a92071514d17483fbd821c9e9b24b164cae1c6b5ad2dd7491a945","src/copy/CheckIsObjectKind.h":"8f0e112396d966c9221a743d353f62671e04cdace7dd49a59898d94ba0f621b7","src/copy/FunctionFlags.h":"e2578c5747f298d03d81fa2b248e4b36800ac8d42d9d6ef801ebb1bc13bc3960","src/copy/FunctionPrefixKind.h":"f540a5c646a519b2d61aa27e4be865e08a31438def00ad5ba4ba2982ad1f2275","src/copy/GeneratorAndAsyncKind.h":"301668ce705970a51abfa94f89fd5db29ef5f129525110860e9e9bf7586ef187","src/copy/GeneratorResumeKind.h":"9e3cd9dc9c7f50937c6c45d73ec092dbfd92c4b56818ae6d1504bcd77078d0a6","src/copy/Opcodes.h":"6663f2ae6251b341352c0f75bea2a2c27babd555768e74727ecf524b1e83563e","src/copy/SourceNotes.h":"1e467f4e63d6f40a428e257fecd210bd14664516adf75a45cb17ab02ccd65fd8","src/copy/StencilEnums.h":"e5a1db4af868fd65591ed97594f7aa9a4cde79194da0cabd62b34e950b3b10b4","src/copy/Symbol.h":"603985e8c92c94e021baf3a0114dd384035eda60827016d310f1507c8396a45e","src/copy/ThrowMsgKind.h":"da805756961d81a2b50aeb391a02fd59a0aa39a9e3eb6aae21b423b15875ab30","src/env_coord.rs":"0be36a1bd307f5586affe0f3046d8b2ab2f5382b41b7b7bfb364b97d16a7c410","src/frame_slot.rs":"b20c81d67c572f20d06d493b211cd3eaa0432a8294541583643b82df3af2f813","src/function.rs":"b841ba6f7ecee3a38a136ef9902fd1d4a3f6b0aa96d1e8d8340e7f26dead75d9","src/gcthings.rs":"e2cb24050e30f423d0fe7bdbf2295854c4b8d3d76a202d47ea4c4e5586c4db58","src/lib.rs":"b003e085344277d2987ef492dc513048e8ec83217850a22ba7ca06ac01bc9b5c","src/opcode.rs":"aabbeae9df11564d3275414497ff99499f3d297f6d062351180f77cb23e588a0","src/opcode_info.rs":"a27c6d5602f5ecdcc882a0167614bc7a7754d958124941b4c1c0cdc2b0a894f1","src/regexp.rs":"7436cf545b990bec7dcc51ff28d67deaca9d4ce894468fdad0dd44b25c571cf2","src/result.rs":"58a64e0619c4ba4c6b7d8834208698a8f1639ab1771f7ae22272f81fe3611d63","src/scope.rs":"f3898f6ce9dbe45f0d346dbc93fcd801c74445aa154640bb6071c964c4758cdc","src/scope_notes.rs":"9947ba5aba3097321c76adcb5648a478e4a67e088fdc1e01511e51c4ad41a9f3","src/script.rs":"77138bdd47afafb4e70125face41244bc2b3f681c2f41458833d26146c27b3ab"},"package":null}
{"files":{"Cargo.toml":"f5439990185662ab31de12c07ef0c842438e3207bdfecc4fa6a9e4d4bea8a0d3","src/bytecode_offset.rs":"2aa7ba8c3cfbbd832092e65b599ab1c5a28d784ccc65d9e351bba656421b9a69","src/copy/AsyncFunctionResolveKind.h":"3851ecbb4728257595dd6e900749d1d8e02558574c00424a7ff0e3ca007fa6ec","src/copy/BytecodeFormatFlags.h":"f495a25e113a92071514d17483fbd821c9e9b24b164cae1c6b5ad2dd7491a945","src/copy/CheckIsObjectKind.h":"8f0e112396d966c9221a743d353f62671e04cdace7dd49a59898d94ba0f621b7","src/copy/FunctionFlags.h":"e2578c5747f298d03d81fa2b248e4b36800ac8d42d9d6ef801ebb1bc13bc3960","src/copy/FunctionPrefixKind.h":"f540a5c646a519b2d61aa27e4be865e08a31438def00ad5ba4ba2982ad1f2275","src/copy/GeneratorAndAsyncKind.h":"301668ce705970a51abfa94f89fd5db29ef5f129525110860e9e9bf7586ef187","src/copy/GeneratorResumeKind.h":"9e3cd9dc9c7f50937c6c45d73ec092dbfd92c4b56818ae6d1504bcd77078d0a6","src/copy/Opcodes.h":"6663f2ae6251b341352c0f75bea2a2c27babd555768e74727ecf524b1e83563e","src/copy/SourceNotes.h":"1e467f4e63d6f40a428e257fecd210bd14664516adf75a45cb17ab02ccd65fd8","src/copy/StencilEnums.h":"e5a1db4af868fd65591ed97594f7aa9a4cde79194da0cabd62b34e950b3b10b4","src/copy/Symbol.h":"603985e8c92c94e021baf3a0114dd384035eda60827016d310f1507c8396a45e","src/copy/ThrowMsgKind.h":"da805756961d81a2b50aeb391a02fd59a0aa39a9e3eb6aae21b423b15875ab30","src/env_coord.rs":"0be36a1bd307f5586affe0f3046d8b2ab2f5382b41b7b7bfb364b97d16a7c410","src/frame_slot.rs":"b20c81d67c572f20d06d493b211cd3eaa0432a8294541583643b82df3af2f813","src/function.rs":"b841ba6f7ecee3a38a136ef9902fd1d4a3f6b0aa96d1e8d8340e7f26dead75d9","src/gcthings.rs":"e2cb24050e30f423d0fe7bdbf2295854c4b8d3d76a202d47ea4c4e5586c4db58","src/lib.rs":"b003e085344277d2987ef492dc513048e8ec83217850a22ba7ca06ac01bc9b5c","src/opcode.rs":"aabbeae9df11564d3275414497ff99499f3d297f6d062351180f77cb23e588a0","src/opcode_info.rs":"a27c6d5602f5ecdcc882a0167614bc7a7754d958124941b4c1c0cdc2b0a894f1","src/regexp.rs":"7436cf545b990bec7dcc51ff28d67deaca9d4ce894468fdad0dd44b25c571cf2","src/result.rs":"58a64e0619c4ba4c6b7d8834208698a8f1639ab1771f7ae22272f81fe3611d63","src/scope.rs":"0e6b287226d92b7cdb914dc2513865ceac8fc0bdd49d4a1806652cd28c466044","src/scope_notes.rs":"9947ba5aba3097321c76adcb5648a478e4a67e088fdc1e01511e51c4ad41a9f3","src/script.rs":"52f1e97bcf9d1d7c11bb6e42b52f0a2a4ced1059e8c0c0296e1dccb31de646c0"},"package":null}

View File

@ -248,6 +248,8 @@ pub struct VarScopeData {
/// and VarScope::Data.nextFrameSlot is calculated there.
pub first_frame_slot: FrameSlot,
pub function_has_extensible_scope: bool,
/// ScopeIndex of the enclosing scope.
///
/// A parameter for ScopeCreationData::create.
@ -255,13 +257,18 @@ pub struct VarScopeData {
}
impl VarScopeData {
pub fn new(var_count: usize, enclosing: ScopeIndex) -> Self {
pub fn new(
var_count: usize,
function_has_extensible_scope: bool,
enclosing: ScopeIndex,
) -> Self {
let capacity = var_count;
Self {
base: BaseScopeData::new(capacity),
// Set to the correct value in EmitterScopeStack::enter_lexical.
first_frame_slot: FrameSlot::new(0),
function_has_extensible_scope,
enclosing,
}
}
@ -446,10 +453,10 @@ pub struct FunctionScopeData {
/// use Vec of Option<BindingName>, instead of BindingName like others.
pub base: BaseScopeData<Option<BindingName>>,
has_parameter_exprs: bool,
pub has_parameter_exprs: bool,
non_positional_formal_start: usize,
var_start: usize,
pub non_positional_formal_start: usize,
pub var_start: usize,
/// The first frame slot of this scope.
///
@ -464,6 +471,8 @@ pub struct FunctionScopeData {
///
/// A parameter for ScopeCreationData::create.
pub enclosing: ScopeIndex,
pub function_index: ScriptStencilIndex,
}
impl FunctionScopeData {
@ -473,6 +482,7 @@ impl FunctionScopeData {
non_positional_formal_start: usize,
max_var_count: usize,
enclosing: ScopeIndex,
function_index: ScriptStencilIndex,
) -> Self {
let capacity = positional_parameter_count + non_positional_formal_start + max_var_count;
@ -484,6 +494,7 @@ impl FunctionScopeData {
// Set to the correct value in EmitterScopeStack::enter_function.
first_frame_slot: FrameSlot::new(0),
enclosing,
function_index,
}
}
}

View File

@ -3,6 +3,7 @@
use crate::frame_slot::FrameSlot;
use crate::function::FunctionFlags;
use crate::gcthings::GCThing;
use crate::scope::ScopeIndex;
use crate::scope_notes::ScopeNote;
use ast::source_atom_set::SourceAtomSetIndex;
@ -262,6 +263,8 @@ pub struct ScriptStencil {
pub fun_name: Option<SourceAtomSetIndex>,
pub fun_nargs: u16,
pub fun_flags: FunctionFlags,
pub lazy_function_enclosing_scope_index: Option<ScopeIndex>,
}
impl ScriptStencil {
@ -278,6 +281,7 @@ impl ScriptStencil {
fun_name: None,
fun_nargs: 0,
fun_flags: FunctionFlags::empty(),
lazy_function_enclosing_scope_index: None,
}
}
@ -287,6 +291,7 @@ impl ScriptStencil {
is_generator: bool,
is_async: bool,
fun_flags: FunctionFlags,
lazy_function_enclosing_scope_index: ScopeIndex,
) -> Self {
let mut flags = ImmutableScriptFlagsEnum::IsFunction as u32;
if is_generator {
@ -304,6 +309,7 @@ impl ScriptStencil {
fun_name,
fun_nargs: 0,
fun_flags,
lazy_function_enclosing_scope_index: Some(lazy_function_enclosing_scope_index),
}
}

File diff suppressed because one or more lines are too long

View File

@ -307,6 +307,8 @@ OptionalChain[Yield, Await] :
=> optional_computed_member_expr_tail($0, $2, $3)
`?.` IdentifierName
=> optional_static_member_expr_tail($0, $1)
`?.` PrivateIdentifier
=> optional_private_field_member_expr_tail($0, $1)
`?.` Arguments[?Yield, ?Await]
=> optional_call_expr_tail($0, $1)
`?.` TemplateLiteral[?Yield, ?Await, +Tagged]
@ -315,6 +317,8 @@ OptionalChain[Yield, Await] :
=> optional_computed_member_expr($0, $2, $3)
OptionalChain[?Yield, ?Await] `.` IdentifierName
=> optional_static_member_expr($0, $2)
OptionalChain[?Yield, ?Await] `.` PrivateIdentifier
=> optional_private_field_member_expr($0, $2)
OptionalChain[?Yield, ?Await] Arguments[?Yield, ?Await]
=> optional_call_expr($0, $1)
OptionalChain[?Yield, ?Await] TemplateLiteral[?Yield, ?Await, +Tagged]

View File

@ -152,6 +152,15 @@ class Action:
the new indexes"""
return self
def fold_by_destination(self, actions: typing.List[Action]) -> typing.List[Action]:
"""If after rewriting state indexes, multiple condition are reaching the same
destination state, we attempt to fold them by destination. Not
implementing this function can lead to the introduction of inconsistent
states, as the conditions might be redundant. """
# By default do nothing.
return actions
def __eq__(self, other: object) -> bool:
if self.__class__ != other.__class__:
return False
@ -191,7 +200,7 @@ class Replay(Action):
this does not Shift a term given as argument as the replay action should
always be garanteed and that we want to maximize the sharing of code when
possible."""
__slots__ = ['replay_dest']
__slots__ = ['replay_steps']
replay_steps: typing.Tuple[StateId, ...]
@ -404,6 +413,15 @@ class FilterStates(Action):
states = list(state_map[s] for s in self.states)
return FilterStates(states)
def fold_by_destination(self, actions: typing.List[Action]) -> typing.List[Action]:
states: typing.List[StateId] = []
for a in actions:
if not isinstance(a, FilterStates):
# Do nothing in case the state is inconsistent.
return actions
states.extend(a.states)
return [FilterStates(states)]
def __str__(self) -> str:
return "FilterStates({})".format(self.states)

View File

@ -19,19 +19,25 @@ def shifted_path_to(pt: ParseTable, n: int, right_of: Path) -> typing.Iterator[P
state = right_of[0].src
assert isinstance(state, int)
for edge in pt.states[state].backedges:
assert pt.term_is_shifted(edge.term)
if isinstance(edge.term, Action) and edge.term.update_stack():
# Some Action such as Unwind and Replay are actions which are
# forking the execution state from the parse stable state.
# While computing the shifted_path_to, we only iterate over the
# parse table states.
continue
assert pt.term_is_shifted(edge.term)
if pt.term_is_stacked(edge.term):
s_n = n - 1
if n == 0:
continue
else:
s_n = n
if n == 0 and not pt.assume_inconsistent:
# If the parse table is no longer inconsistent, then there is
# no point on walking back on actions as they are expected to
# be resolved. Thus we cannot have the restrictions issue that
# we have on inconsistent parse tables.
continue
from_edge = Edge(edge.src, edge.term)
for path in shifted_path_to(pt, s_n, [from_edge] + right_of):
yield path

View File

@ -11,6 +11,7 @@ def state_43_actions(parser, lexer):
replay = [StateTermValue(0, Nt(InitNt(goal=Nt('grammar'))), value, False)]
del parser.stack[-1:]
parser.shift_list(replay, lexer)
state_86_actions(parser, lexer)
return
def state_44_actions(parser, lexer):
@ -20,6 +21,7 @@ def state_44_actions(parser, lexer):
replay = [StateTermValue(0, Nt('nt_defs'), value, False)]
del parser.stack[-1:]
parser.shift_list(replay, lexer)
state_84_actions(parser, lexer)
return
def state_45_actions(parser, lexer):
@ -29,6 +31,7 @@ def state_45_actions(parser, lexer):
replay = [StateTermValue(0, Nt('token_defs'), value, False)]
del parser.stack[-1:]
parser.shift_list(replay, lexer)
state_85_actions(parser, lexer)
return
def state_46_actions(parser, lexer):
@ -38,6 +41,7 @@ def state_46_actions(parser, lexer):
replay = [StateTermValue(0, Nt('nt_defs'), value, False)]
del parser.stack[-2:]
parser.shift_list(replay, lexer)
state_84_actions(parser, lexer)
return
def state_47_actions(parser, lexer):
@ -47,6 +51,7 @@ def state_47_actions(parser, lexer):
replay = [StateTermValue(0, Nt('token_defs'), value, False)]
del parser.stack[-2:]
parser.shift_list(replay, lexer)
state_85_actions(parser, lexer)
return
def state_48_actions(parser, lexer):
@ -56,6 +61,7 @@ def state_48_actions(parser, lexer):
replay = [StateTermValue(0, Nt(InitNt(goal=Nt('grammar'))), value, False)]
del parser.stack[-2:]
parser.shift_list(replay, lexer)
state_86_actions(parser, lexer)
return
def state_49_actions(parser, lexer):
@ -65,6 +71,7 @@ def state_49_actions(parser, lexer):
replay = [StateTermValue(0, Nt('nt_def'), value, False)]
del parser.stack[-4:]
parser.shift_list(replay, lexer)
state_112_actions(parser, lexer)
return
def state_50_actions(parser, lexer):
@ -74,6 +81,7 @@ def state_50_actions(parser, lexer):
replay = [StateTermValue(0, Nt('prods'), value, False)]
del parser.stack[-1:]
parser.shift_list(replay, lexer)
state_98_actions(parser, lexer)
return
def state_51_actions(parser, lexer):
@ -83,6 +91,7 @@ def state_51_actions(parser, lexer):
replay = [StateTermValue(0, Nt('terms'), value, False)]
del parser.stack[-1:]
parser.shift_list(replay, lexer)
state_101_actions(parser, lexer)
return
def state_52_actions(parser, lexer):
@ -92,6 +101,7 @@ def state_52_actions(parser, lexer):
replay = [StateTermValue(0, Nt('symbol'), value, False)]
del parser.stack[-1:]
parser.shift_list(replay, lexer)
state_95_actions(parser, lexer)
return
def state_53_actions(parser, lexer):
@ -101,6 +111,7 @@ def state_53_actions(parser, lexer):
replay = [StateTermValue(0, Nt('symbol'), value, False)]
del parser.stack[-1:]
parser.shift_list(replay, lexer)
state_95_actions(parser, lexer)
return
def state_54_actions(parser, lexer):
@ -110,6 +121,7 @@ def state_54_actions(parser, lexer):
replay = [StateTermValue(0, Nt('prods'), value, False)]
del parser.stack[-1:]
parser.shift_list(replay, lexer)
state_98_actions(parser, lexer)
return
def state_55_actions(parser, lexer):
@ -119,6 +131,7 @@ def state_55_actions(parser, lexer):
replay = [StateTermValue(0, Nt('token_def'), value, False)]
del parser.stack[-4:]
parser.shift_list(replay, lexer)
state_104_actions(parser, lexer)
return
def state_56_actions(parser, lexer):
@ -128,6 +141,7 @@ def state_56_actions(parser, lexer):
replay = [StateTermValue(0, Nt('nt_def'), value, False)]
del parser.stack[-5:]
parser.shift_list(replay, lexer)
state_112_actions(parser, lexer)
return
def state_57_actions(parser, lexer):
@ -137,6 +151,7 @@ def state_57_actions(parser, lexer):
replay = [StateTermValue(0, Nt('prods'), value, False)]
del parser.stack[-2:]
parser.shift_list(replay, lexer)
state_98_actions(parser, lexer)
return
def state_58_actions(parser, lexer):
@ -146,6 +161,7 @@ def state_58_actions(parser, lexer):
replay = [StateTermValue(0, Nt('prod'), value, False)]
del parser.stack[-2:]
parser.shift_list(replay, lexer)
state_105_actions(parser, lexer)
return
def state_59_actions(parser, lexer):
@ -155,6 +171,7 @@ def state_59_actions(parser, lexer):
replay = [StateTermValue(0, Nt('terms'), value, False)]
del parser.stack[-2:]
parser.shift_list(replay, lexer)
state_101_actions(parser, lexer)
return
def state_60_actions(parser, lexer):
@ -164,6 +181,7 @@ def state_60_actions(parser, lexer):
replay = [StateTermValue(0, Nt('term'), value, False)]
del parser.stack[-2:]
parser.shift_list(replay, lexer)
state_118_actions(parser, lexer)
return
def state_61_actions(parser, lexer):
@ -173,6 +191,7 @@ def state_61_actions(parser, lexer):
replay = [StateTermValue(0, Nt('nt_def'), value, False)]
del parser.stack[-5:]
parser.shift_list(replay, lexer)
state_112_actions(parser, lexer)
return
def state_62_actions(parser, lexer):
@ -182,6 +201,7 @@ def state_62_actions(parser, lexer):
replay = [StateTermValue(0, Nt('nt_def'), value, False)]
del parser.stack[-5:]
parser.shift_list(replay, lexer)
state_112_actions(parser, lexer)
return
def state_63_actions(parser, lexer):
@ -191,6 +211,7 @@ def state_63_actions(parser, lexer):
replay = [StateTermValue(0, Nt('token_def'), value, False)]
del parser.stack[-5:]
parser.shift_list(replay, lexer)
state_104_actions(parser, lexer)
return
def state_64_actions(parser, lexer):
@ -200,6 +221,7 @@ def state_64_actions(parser, lexer):
replay = [StateTermValue(0, Nt('prod'), value, False)]
del parser.stack[-3:]
parser.shift_list(replay, lexer)
state_105_actions(parser, lexer)
return
def state_65_actions(parser, lexer):
@ -209,6 +231,7 @@ def state_65_actions(parser, lexer):
replay = [StateTermValue(0, Nt('reducer'), value, False)]
del parser.stack[-2:]
parser.shift_list(replay, lexer)
state_106_actions(parser, lexer)
return
def state_66_actions(parser, lexer):
@ -218,6 +241,7 @@ def state_66_actions(parser, lexer):
replay = [StateTermValue(0, Nt('expr'), value, False)]
del parser.stack[-1:]
parser.shift_list(replay, lexer)
state_114_actions(parser, lexer)
return
def state_67_actions(parser, lexer):
@ -227,6 +251,7 @@ def state_67_actions(parser, lexer):
replay = [StateTermValue(0, Nt('expr'), value, False)]
del parser.stack[-1:]
parser.shift_list(replay, lexer)
state_114_actions(parser, lexer)
return
def state_68_actions(parser, lexer):
@ -236,6 +261,7 @@ def state_68_actions(parser, lexer):
replay = [StateTermValue(0, Nt('nt_def'), value, False)]
del parser.stack[-6:]
parser.shift_list(replay, lexer)
state_112_actions(parser, lexer)
return
def state_69_actions(parser, lexer):
@ -245,6 +271,7 @@ def state_69_actions(parser, lexer):
replay = [StateTermValue(0, Nt('nt_def'), value, False)]
del parser.stack[-6:]
parser.shift_list(replay, lexer)
state_112_actions(parser, lexer)
return
def state_70_actions(parser, lexer):
@ -254,6 +281,7 @@ def state_70_actions(parser, lexer):
replay = [StateTermValue(0, Nt('nt_def'), value, False)]
del parser.stack[-6:]
parser.shift_list(replay, lexer)
state_112_actions(parser, lexer)
return
def state_71_actions(parser, lexer):
@ -263,6 +291,7 @@ def state_71_actions(parser, lexer):
replay = [StateTermValue(0, Nt('nt_def'), value, False)]
del parser.stack[-7:]
parser.shift_list(replay, lexer)
state_112_actions(parser, lexer)
return
def state_72_actions(parser, lexer):
@ -272,6 +301,7 @@ def state_72_actions(parser, lexer):
replay = [StateTermValue(0, Nt('expr'), value, False)]
del parser.stack[-3:]
parser.shift_list(replay, lexer)
state_114_actions(parser, lexer)
return
def state_73_actions(parser, lexer):
@ -281,6 +311,7 @@ def state_73_actions(parser, lexer):
replay = [StateTermValue(0, Nt('expr_args'), value, False)]
del parser.stack[-1:]
parser.shift_list(replay, lexer)
state_115_actions(parser, lexer)
return
def state_74_actions(parser, lexer):
@ -290,6 +321,7 @@ def state_74_actions(parser, lexer):
replay = [StateTermValue(0, Nt('expr'), value, False)]
del parser.stack[-4:]
parser.shift_list(replay, lexer)
state_114_actions(parser, lexer)
return
def state_75_actions(parser, lexer):
@ -299,6 +331,7 @@ def state_75_actions(parser, lexer):
replay = [StateTermValue(0, Nt('expr'), value, False)]
del parser.stack[-4:]
parser.shift_list(replay, lexer)
state_114_actions(parser, lexer)
return
def state_76_actions(parser, lexer):
@ -308,6 +341,7 @@ def state_76_actions(parser, lexer):
replay = [StateTermValue(0, Nt('expr_args'), value, False)]
del parser.stack[-3:]
parser.shift_list(replay, lexer)
state_115_actions(parser, lexer)
return
def state_77_actions(parser, lexer):
@ -318,6 +352,7 @@ def state_77_actions(parser, lexer):
replay = replay + parser.stack[-1:]
del parser.stack[-2:]
parser.shift_list(replay, lexer)
state_116_actions(parser, lexer)
return
def state_78_actions(parser, lexer):
@ -328,6 +363,7 @@ def state_78_actions(parser, lexer):
replay = replay + parser.stack[-1:]
del parser.stack[-3:]
parser.shift_list(replay, lexer)
state_116_actions(parser, lexer)
return
def state_79_actions(parser, lexer):
@ -338,180 +374,513 @@ def state_79_actions(parser, lexer):
replay = replay + parser.stack[-1:]
del parser.stack[-2:]
parser.shift_list(replay, lexer)
state_118_actions(parser, lexer)
return
def state_80_actions(parser, lexer):
value = None
parser.replay_action(23)
top = parser.stack.pop()
top = StateTermValue(23, top.term, top.value, top.new_line)
parser.stack.append(top)
return
def state_81_actions(parser, lexer):
value = None
parser.replay_action(12)
top = parser.stack.pop()
top = StateTermValue(12, top.term, top.value, top.new_line)
parser.stack.append(top)
return
def state_82_actions(parser, lexer):
value = None
parser.replay_action(13)
top = parser.stack.pop()
top = StateTermValue(13, top.term, top.value, top.new_line)
parser.stack.append(top)
return
def state_83_actions(parser, lexer):
value = None
parser.replay_action(11)
top = parser.stack.pop()
top = StateTermValue(11, top.term, top.value, top.new_line)
parser.stack.append(top)
return
def state_84_actions(parser, lexer):
value = None
if parser.top_state() in [10]:
state_81_actions(parser, lexer)
return
if parser.top_state() in [11]:
state_82_actions(parser, lexer)
return
def state_85_actions(parser, lexer):
value = None
if parser.top_state() in [10]:
state_83_actions(parser, lexer)
return
def state_86_actions(parser, lexer):
value = None
if parser.top_state() in [10]:
state_80_actions(parser, lexer)
return
def state_87_actions(parser, lexer):
value = None
parser.replay_action(44)
state_44_actions(parser, lexer)
return
def state_88_actions(parser, lexer):
value = None
parser.replay_action(46)
state_46_actions(parser, lexer)
return
def state_89_actions(parser, lexer):
value = None
parser.replay_action(4)
top = parser.stack.pop()
top = StateTermValue(4, top.term, top.value, top.new_line)
parser.stack.append(top)
return
def state_90_actions(parser, lexer):
value = None
parser.replay_action(5)
top = parser.stack.pop()
top = StateTermValue(5, top.term, top.value, top.new_line)
parser.stack.append(top)
return
def state_91_actions(parser, lexer):
value = None
parser.replay_action(6)
top = parser.stack.pop()
top = StateTermValue(6, top.term, top.value, top.new_line)
parser.stack.append(top)
return
def state_92_actions(parser, lexer):
value = None
parser.replay_action(7)
top = parser.stack.pop()
top = StateTermValue(7, top.term, top.value, top.new_line)
parser.stack.append(top)
return
def state_93_actions(parser, lexer):
value = None
parser.replay_action(8)
top = parser.stack.pop()
top = StateTermValue(8, top.term, top.value, top.new_line)
parser.stack.append(top)
return
def state_94_actions(parser, lexer):
value = None
parser.replay_action(9)
top = parser.stack.pop()
top = StateTermValue(9, top.term, top.value, top.new_line)
parser.stack.append(top)
return
def state_95_actions(parser, lexer):
value = None
if parser.top_state() in [0, 1, 2, 3, 4, 5, 6, 7, 8]:
state_94_actions(parser, lexer)
return
def state_96_actions(parser, lexer):
value = None
parser.replay_action(45)
state_45_actions(parser, lexer)
return
def state_97_actions(parser, lexer):
value = None
parser.replay_action(47)
state_47_actions(parser, lexer)
return
def state_98_actions(parser, lexer):
value = None
if parser.top_state() in [0]:
state_89_actions(parser, lexer)
return
if parser.top_state() in [1]:
state_90_actions(parser, lexer)
return
if parser.top_state() in [2]:
state_91_actions(parser, lexer)
return
if parser.top_state() in [3]:
state_92_actions(parser, lexer)
return
def state_99_actions(parser, lexer):
value = None
parser.replay_action(50)
state_50_actions(parser, lexer)
return
def state_100_actions(parser, lexer):
value = None
parser.replay_action(57)
state_57_actions(parser, lexer)
return
def state_101_actions(parser, lexer):
value = None
if parser.top_state() in [0, 1, 2, 3, 4, 5, 6, 7]:
state_93_actions(parser, lexer)
return
def state_102_actions(parser, lexer):
value = None
parser.replay_action(51)
state_51_actions(parser, lexer)
return
def state_103_actions(parser, lexer):
value = None
parser.replay_action(59)
state_59_actions(parser, lexer)
return
def state_104_actions(parser, lexer):
value = None
if parser.top_state() in [10]:
state_96_actions(parser, lexer)
return
if parser.top_state() in [11]:
state_97_actions(parser, lexer)
return
def state_105_actions(parser, lexer):
value = None
if parser.top_state() in [0, 1, 2, 3]:
state_99_actions(parser, lexer)
return
if parser.top_state() in [4, 5, 6, 7]:
state_100_actions(parser, lexer)
return
def state_106_actions(parser, lexer):
value = None
if parser.top_state() in [8]:
state_107_actions(parser, lexer)
return
def state_107_actions(parser, lexer):
value = None
parser.replay_action(38)
top = parser.stack.pop()
top = StateTermValue(38, top.term, top.value, top.new_line)
parser.stack.append(top)
return
def state_108_actions(parser, lexer):
value = None
parser.replay_action(65)
state_65_actions(parser, lexer)
return
def state_109_actions(parser, lexer):
value = None
parser.replay_action(73)
state_73_actions(parser, lexer)
return
def state_110_actions(parser, lexer):
value = None
parser.replay_action(42)
top = parser.stack.pop()
top = StateTermValue(42, top.term, top.value, top.new_line)
parser.stack.append(top)
return
def state_111_actions(parser, lexer):
value = None
parser.replay_action(76)
state_76_actions(parser, lexer)
return
def state_112_actions(parser, lexer):
value = None
if parser.top_state() in [10, 11]:
state_87_actions(parser, lexer)
return
if parser.top_state() in [12, 13]:
state_88_actions(parser, lexer)
return
def state_113_actions(parser, lexer):
value = None
parser.replay_action(41)
top = parser.stack.pop()
top = StateTermValue(41, top.term, top.value, top.new_line)
parser.stack.append(top)
return
def state_114_actions(parser, lexer):
value = None
if parser.top_state() in [15]:
state_108_actions(parser, lexer)
return
if parser.top_state() in [14]:
state_109_actions(parser, lexer)
return
if parser.top_state() in [16]:
state_110_actions(parser, lexer)
return
if parser.top_state() in [17]:
state_111_actions(parser, lexer)
return
def state_115_actions(parser, lexer):
value = None
if parser.top_state() in [14]:
state_113_actions(parser, lexer)
return
def state_116_actions(parser, lexer):
value = None
if parser.top_state() in [10]:
state_117_actions(parser, lexer)
return
def state_117_actions(parser, lexer):
value = None
parser.replay_action(43)
state_43_actions(parser, lexer)
return
def state_118_actions(parser, lexer):
value = None
if parser.top_state() in [0, 1, 2, 3, 4, 5, 6, 7]:
state_102_actions(parser, lexer)
return
if parser.top_state() in [8]:
state_103_actions(parser, lexer)
return
actions = [
# 0.
{'nt': 2, 'COMMENT': 3, 'goal': 4, 'token': 6, 'var': 7, Nt('grammar'): 43, Nt('nt_defs'): 1, Nt('nt_def'): 44, Nt('token_defs'): 5, Nt('token_def'): 45, Nt(InitNt(goal=Nt('grammar'))): 8},
{'}': 49, 'IDENT': 52, 'STR': 53, 'COMMENT': 54, Nt('prods'): 4, Nt('prod'): 50, Nt('terms'): 8, Nt('term'): 51, Nt('symbol'): 9},
# 1.
{End(): 77, 'goal': 4, 'COMMENT': 3, 'nt': 2, Nt('nt_def'): 46},
{'}': 61, 'IDENT': 52, 'STR': 53, 'COMMENT': 54, Nt('prods'): 5, Nt('prod'): 50, Nt('terms'): 8, Nt('term'): 51, Nt('symbol'): 9},
# 2.
{'IDENT': 10},
{'}': 62, 'IDENT': 52, 'STR': 53, 'COMMENT': 54, Nt('prods'): 6, Nt('prod'): 50, Nt('terms'): 8, Nt('term'): 51, Nt('symbol'): 9},
# 3.
{'nt': 11, 'goal': 12},
{'}': 69, 'IDENT': 52, 'STR': 53, 'COMMENT': 54, Nt('prods'): 7, Nt('prod'): 50, Nt('terms'): 8, Nt('term'): 51, Nt('symbol'): 9},
# 4.
{'nt': 13},
{'}': 56, 'IDENT': 52, 'STR': 53, Nt('prod'): 57, Nt('terms'): 8, Nt('term'): 51, Nt('symbol'): 9},
# 5.
{'nt': 2, 'COMMENT': 3, 'goal': 4, 'token': 6, 'var': 7, Nt('nt_defs'): 14, Nt('nt_def'): 44, Nt('token_def'): 47},
{'}': 68, 'IDENT': 52, 'STR': 53, Nt('prod'): 57, Nt('terms'): 8, Nt('term'): 51, Nt('symbol'): 9},
# 6.
{'IDENT': 15},
{'}': 70, 'IDENT': 52, 'STR': 53, Nt('prod'): 57, Nt('terms'): 8, Nt('term'): 51, Nt('symbol'): 9},
# 7.
{'token': 16},
{'}': 71, 'IDENT': 52, 'STR': 53, Nt('prod'): 57, Nt('terms'): 8, Nt('term'): 51, Nt('symbol'): 9},
# 8.
{End(): 48},
{';': 58, 'IDENT': 52, 'STR': 53, '=>': 15, Nt('term'): 59, Nt('symbol'): 9, Nt('reducer'): 38},
# 9.
{},
{'=>': 79, 'STR': 79, 'IDENT': 79, ';': 79, '?': 60, Nt('reducer'): 79, Nt('symbol'): 79, Nt('term'): 79},
# 10.
{'{': 17},
{'nt': 18, 'COMMENT': 19, 'goal': 20, 'token': 21, 'var': 22, Nt('grammar'): 43, Nt('nt_defs'): 12, Nt('nt_def'): 44, Nt('token_defs'): 11, Nt('token_def'): 45, Nt(InitNt(goal=Nt('grammar'))): 23},
# 11.
{'IDENT': 18},
{'nt': 18, 'COMMENT': 19, 'goal': 20, 'token': 21, 'var': 22, Nt('nt_defs'): 13, Nt('nt_def'): 44, Nt('token_def'): 47},
# 12.
{'nt': 19},
{End(): 77, 'goal': 20, 'COMMENT': 19, 'nt': 18, Nt('nt_def'): 46},
# 13.
{'IDENT': 20},
{End(): 78, 'goal': 20, 'COMMENT': 19, 'nt': 18, Nt('nt_def'): 46},
# 14.
{End(): 78, 'goal': 4, 'COMMENT': 3, 'nt': 2, Nt('nt_def'): 46},
{')': 72, 'MATCH': 66, 'IDENT': 39, 'Some': 40, 'None': 67, Nt('expr_args'): 41, Nt('expr'): 73},
# 15.
{'=': 21},
{'MATCH': 66, 'IDENT': 39, 'Some': 40, 'None': 67, Nt('expr'): 65},
# 16.
{'IDENT': 22},
{'MATCH': 66, 'IDENT': 39, 'Some': 40, 'None': 67, Nt('expr'): 42},
# 17.
{'}': 49, 'IDENT': 52, 'STR': 53, 'COMMENT': 54, Nt('prods'): 23, Nt('prod'): 50, Nt('terms'): 24, Nt('term'): 51, Nt('symbol'): 25},
{'MATCH': 66, 'IDENT': 39, 'Some': 40, 'None': 67, Nt('expr'): 76},
# 18.
{'{': 26},
{'IDENT': 25},
# 19.
{'IDENT': 27},
{'nt': 26, 'goal': 27},
# 20.
{'{': 28},
{'nt': 28},
# 21.
{'STR': 29},
{'IDENT': 29},
# 22.
{';': 55},
{'token': 30},
# 23.
{'}': 56, 'IDENT': 52, 'STR': 53, Nt('prod'): 57, Nt('terms'): 24, Nt('term'): 51, Nt('symbol'): 25},
{End(): 48},
# 24.
{';': 58, 'IDENT': 52, 'STR': 53, '=>': 31, Nt('term'): 59, Nt('symbol'): 25, Nt('reducer'): 30},
{},
# 25.
{'=>': 79, 'STR': 79, 'IDENT': 79, ';': 79, '?': 60, Nt('reducer'): 79, Nt('symbol'): 79, Nt('term'): 79},
{'{': 0},
# 26.
{'}': 61, 'IDENT': 52, 'STR': 53, 'COMMENT': 54, Nt('prods'): 32, Nt('prod'): 50, Nt('terms'): 24, Nt('term'): 51, Nt('symbol'): 25},
{'IDENT': 31},
# 27.
{'{': 33},
{'nt': 32},
# 28.
{'}': 62, 'IDENT': 52, 'STR': 53, 'COMMENT': 54, Nt('prods'): 34, Nt('prod'): 50, Nt('terms'): 24, Nt('term'): 51, Nt('symbol'): 25},
{'IDENT': 33},
# 29.
{';': 63},
{'=': 34},
# 30.
{';': 64},
{'IDENT': 35},
# 31.
{'MATCH': 66, 'IDENT': 35, 'Some': 36, 'None': 67, Nt('expr'): 65},
{'{': 1},
# 32.
{'}': 68, 'IDENT': 52, 'STR': 53, Nt('prod'): 57, Nt('terms'): 24, Nt('term'): 51, Nt('symbol'): 25},
{'IDENT': 36},
# 33.
{'}': 69, 'IDENT': 52, 'STR': 53, 'COMMENT': 54, Nt('prods'): 37, Nt('prod'): 50, Nt('terms'): 24, Nt('term'): 51, Nt('symbol'): 25},
{'{': 2},
# 34.
{'}': 70, 'IDENT': 52, 'STR': 53, Nt('prod'): 57, Nt('terms'): 24, Nt('term'): 51, Nt('symbol'): 25},
{'STR': 37},
# 35.
{'(': 38},
{';': 55},
# 36.
{'(': 39},
{'{': 3},
# 37.
{'}': 71, 'IDENT': 52, 'STR': 53, Nt('prod'): 57, Nt('terms'): 24, Nt('term'): 51, Nt('symbol'): 25},
{';': 63},
# 38.
{')': 72, 'MATCH': 66, 'IDENT': 35, 'Some': 36, 'None': 67, Nt('expr_args'): 40, Nt('expr'): 73},
{';': 64},
# 39.
{'MATCH': 66, 'IDENT': 35, 'Some': 36, 'None': 67, Nt('expr'): 41},
{'(': 14},
# 40.
{')': 74, ',': 42},
{'(': 16},
# 41.
{')': 75},
{')': 74, ',': 17},
# 42.
{'MATCH': 66, 'IDENT': 35, 'Some': 36, 'None': 67, Nt('expr'): 76},
{')': 75},
# 43.
@ -661,6 +1030,162 @@ actions = [
state_79_actions,
# 80.
state_80_actions,
# 81.
state_81_actions,
# 82.
state_82_actions,
# 83.
state_83_actions,
# 84.
state_84_actions,
# 85.
state_85_actions,
# 86.
state_86_actions,
# 87.
state_87_actions,
# 88.
state_88_actions,
# 89.
state_89_actions,
# 90.
state_90_actions,
# 91.
state_91_actions,
# 92.
state_92_actions,
# 93.
state_93_actions,
# 94.
state_94_actions,
# 95.
state_95_actions,
# 96.
state_96_actions,
# 97.
state_97_actions,
# 98.
state_98_actions,
# 99.
state_99_actions,
# 100.
state_100_actions,
# 101.
state_101_actions,
# 102.
state_102_actions,
# 103.
state_103_actions,
# 104.
state_104_actions,
# 105.
state_105_actions,
# 106.
state_106_actions,
# 107.
state_107_actions,
# 108.
state_108_actions,
# 109.
state_109_actions,
# 110.
state_110_actions,
# 111.
state_111_actions,
# 112.
state_112_actions,
# 113.
state_113_actions,
# 114.
state_114_actions,
# 115.
state_115_actions,
# 116.
state_116_actions,
# 117.
state_117_actions,
# 118.
state_118_actions,
]
error_codes = [
@ -669,9 +1194,12 @@ error_codes = [
None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None,
None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None,
None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None,
None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None,
None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None,
None, None, None, None, None, None, None,
]
goal_nt_to_init_state = {'grammar': 0}
goal_nt_to_init_state = {'grammar': 10}
class DefaultMethods:
def nt_defs_single(self, x0):

View File

@ -11,7 +11,7 @@ import itertools
from . import types
from .utils import consume, keep_until, split
from .ordered import OrderedSet, OrderedFrozenSet
from .actions import Action, FilterFlag
from .actions import Action, Replay, Reduce, FilterStates, Seq
from .grammar import End, ErrorSymbol, InitNt, Nt
from .rewrites import CanonicalGrammar
from .lr0 import LR0Generator, Term
@ -20,6 +20,9 @@ from .aps import APS, Edge, Path
# StateAndTransitions objects are indexed using a StateId which is an integer.
StateId = int
# Action or ordered sequence of action which have to be performed.
DelayedAction = typing.Union[Action, typing.Tuple[Action, ...]]
class StateAndTransitions:
"""This is one state of the parse table, which has transitions based on
@ -41,6 +44,10 @@ class StateAndTransitions:
# i.e. how we got to this state.
locations: OrderedFrozenSet[str]
# Ordered set of Actions which are pushed to the next state after a
# conflict.
delayed_actions: OrderedFrozenSet[DelayedAction]
# Outgoing edges taken when shifting terminals.
terminals: typing.Dict[str, StateId]
@ -53,10 +60,6 @@ class StateAndTransitions:
# List of epsilon transitions with associated actions.
epsilon: typing.List[typing.Tuple[Action, StateId]]
# Ordered set of Actions which are pushed to the next state after a
# conflict.
delayed_actions: OrderedFrozenSet[Action]
# Set of edges that lead to this state.
backedges: OrderedSet[Edge]
@ -73,7 +76,7 @@ class StateAndTransitions:
self,
index: StateId,
locations: OrderedFrozenSet[str],
delayed_actions: OrderedFrozenSet[Action] = OrderedFrozenSet()
delayed_actions: OrderedFrozenSet[DelayedAction] = OrderedFrozenSet()
) -> None:
assert isinstance(locations, OrderedFrozenSet)
assert isinstance(delayed_actions, OrderedFrozenSet)
@ -182,8 +185,15 @@ class StateAndTransitions:
self.errors = {
k: state_map[s] for k, s in self.errors.items()
}
# Multiple actions might jump to the same target, attempt to fold these
# conditions based on having the same target.
epsilon_by_dest = collections.defaultdict(list)
for k, s in self.epsilon:
epsilon_by_dest[state_map[s]].append(k.rewrite_state_indexes(state_map))
self.epsilon = [
(k.rewrite_state_indexes(state_map), state_map[s]) for k, s in self.epsilon
(k, s)
for s, ks in epsilon_by_dest.items()
for k in ks[0].fold_by_destination(ks)
]
self.backedges = OrderedSet(
Edge(state_map[edge.src], apply_on_term(edge.term))
@ -351,10 +361,15 @@ class ParseTable:
# Optimize by removing unused states.
self.remove_all_unreachable_state(verbose, progress)
# TODO: Statically compute replayed terms. (maybe?)
# Replace reduce actions by programmatic stack manipulation.
self.lower_reduce_actions(verbose, progress)
# Fold paths which have the same ending.
self.fold_identical_endings(verbose, progress)
# Group state with similar non-terminal edges close-by, to improve the
# generated Rust code by grouping matched state numbers.
self.group_nonterminal_states(verbose, progress)
# Split shift states from epsilon states.
self.group_epsilon_states(verbose, progress)
# self.group_epsilon_states(verbose, progress)
def save(self, filename: os.PathLike) -> None:
with open(filename, 'wb') as f:
@ -382,6 +397,7 @@ class ParseTable:
for s in self.states:
if s is not None:
s.rewrite_state_indexes(state_map)
self.assert_state_invariants(s)
self.named_goals = [
(nt, state_map[s]) for nt, s in self.named_goals
]
@ -397,7 +413,7 @@ class ParseTable:
def new_state(
self,
locations: OrderedFrozenSet[str],
delayed_actions: OrderedFrozenSet[Action] = OrderedFrozenSet()
delayed_actions: OrderedFrozenSet[DelayedAction] = OrderedFrozenSet()
) -> typing.Tuple[bool, StateAndTransitions]:
"""Get or create state with an LR0 location and delayed actions. Returns a tuple
where the first element is whether the element is newly created, and
@ -414,7 +430,7 @@ class ParseTable:
def get_state(
self,
locations: OrderedFrozenSet[str],
delayed_actions: OrderedFrozenSet[Action] = OrderedFrozenSet()
delayed_actions: OrderedFrozenSet[DelayedAction] = OrderedFrozenSet()
) -> StateAndTransitions:
"""Like new_state(), but only returns the state without returning whether it is
newly created or not."""
@ -538,6 +554,8 @@ class ParseTable:
for e in src.backedges:
assert e.term is not None
assert self.states[e.src][e.term] == src.index
if not self.assume_inconsistent:
assert not src.is_inconsistent()
def remove_unreachable_states(
self,
@ -636,7 +654,7 @@ class ParseTable:
self.debug_dump()
def term_is_shifted(self, term: typing.Optional[Term]) -> bool:
return not (isinstance(term, Action) and term.update_stack())
return not isinstance(term, Action) or term.follow_edge()
def is_valid_path(
self,
@ -1126,7 +1144,7 @@ class ParseTable:
# Compute the unique name, based on the locations and actions
# which are delayed.
locations: OrderedSet[str] = OrderedSet()
delayed: OrderedSet[Action] = OrderedSet()
delayed: OrderedSet[DelayedAction] = OrderedSet()
new_shift_map: typing.DefaultDict[
Term,
typing.List[typing.Tuple[StateAndTransitions, typing.List[Edge]]]
@ -1314,6 +1332,138 @@ class ParseTable:
self.states = [s for s in self.states if s is not None]
self.rewrite_reordered_state_indexes()
def lower_reduce_actions(self, verbose: bool, progress: bool) -> None:
# Remove Reduce actions and replace them by the programmatic
# equivalent.
#
# This transformation preserves the stack manipulations of the parse
# table. It only changes it from being implicitly executed by the LR
# parser, to being explicitly executed with actions.
#
# This transformation converts the hard-to-predict load of the shift
# table into a branch prediction which is potentially easier to
# predict.
#
# A side-effect of this transformation is that it removes the need for
# replaying non-terminals, thus the backends could safely ignore the
# ability of the shift function from handling non-terminals.
if verbose or progress:
print("Lower Reduce actions.")
maybe_unreachable_set: OrderedSet[StateId] = OrderedSet()
def transform() -> typing.Iterator[None]:
for s in self.states:
term, _ = next(iter(s.epsilon), (None, None))
if self.term_is_shifted(term):
continue
assert len(s.epsilon) == 1
yield # progress bar.
reduce_state = s
if verbose:
print("Inlining shift-operation for state {}".format(str(reduce_state)))
# The reduced_aps should contain all reduced path of the single
# Reduce action which is present on this state. However, as
# state of the graph are shared, some reduced paths might follow
# the same path and reach the same state.
#
# This code collect for each replayed path, the tops of the
# stack on top of which these states are replayed.
aps = APS.start(s.index)
states_by_replay_term = collections.defaultdict(list)
# print("Start:\n{}".format(aps.string(name="\titer_aps")))
# print(s.stable_str(self.states))
for reduced_aps in aps.shift_next(self):
# As long as we have elements to replay, we should only
# have a single path for each reduced path. If the next
# state contains an action, then we stop here.
iter_aps = reduced_aps
next_is_action = self.states[iter_aps.state].epsilon != []
has_replay = iter_aps.replay != []
assert next_is_action is False and has_replay is True
while (not next_is_action) and has_replay:
# print("Step {}:\n{}".format(len(iter_aps.history),
# iter_aps.string(name="\titer_aps")))
next_aps = list(iter_aps.shift_next(self))
if len(next_aps) == 0:
# Note, this might happen as we are adding
# lookahead tokens from any successor, we might not
# always have a way to replay all tokens, in such
# case an error should be produced, but in the mean
# time, let's use the shift function as usual.
break
assert len(next_aps) == 1
iter_aps = next_aps[0]
next_is_action = self.states[iter_aps.state].epsilon != []
has_replay = iter_aps.replay != []
# print("End at {}:\n{}".format(len(iter_aps.history),
# iter_aps.string(name="\titer_aps")))
replay_list = [e.src for e in iter_aps.shift]
assert len(replay_list) >= 2
replay_term = Replay(replay_list[1:])
states_by_replay_term[replay_term].append(replay_list[0])
# Create FilterStates actions.
filter_by_replay_term = {
replay_term: FilterStates(states)
for replay_term, states in states_by_replay_term.items()
}
# Convert the Reduce action to an Unwind action.
reduce_term, _ = next(iter(s.epsilon))
if isinstance(reduce_term, Reduce):
unwind_term: Action = reduce_term.unwind
else:
assert isinstance(reduce_term, Seq)
assert isinstance(reduce_term.actions[-1], Reduce)
unwind_term = Seq(list(reduce_term.actions[:-1]) + [reduce_term.actions[-1].unwind])
# Remove the old Reduce edge if still present.
# print("Before:\n{}".format(reduce_state.stable_str(self.states)))
self.remove_edge(reduce_state, reduce_term, maybe_unreachable_set)
# Add Unwind action.
# print("After:\n")
locations = reduce_state.locations
delayed: OrderedFrozenSet[DelayedAction] = OrderedFrozenSet(filter_by_replay_term.items())
is_new, filter_state = self.new_state(locations, delayed)
self.add_edge(reduce_state, unwind_term, filter_state.index)
if not is_new:
for replay_term, filter_term in filter_by_replay_term.items():
assert filter_term in filter_state
replay_state = self.states[filter_state[filter_term]]
assert replay_term in replay_state
# print(replay_state.stable_str(self.states))
# print(filter_state.stable_str(self.states))
# print(reduce_state.stable_str(self.states))
continue
for replay_term, filter_term in filter_by_replay_term.items():
dest_idx = replay_term.replay_steps[-1]
dest = self.states[dest_idx]
# Add FilterStates action from the filter_state to the replay_state.
locations = dest.locations
delayed = OrderedFrozenSet(itertools.chain(dest.delayed_actions, [replay_term]))
is_new, replay_state = self.new_state(locations, delayed)
self.add_edge(filter_state, filter_term, replay_state.index)
assert (not is_new) == (replay_term in replay_state)
# Add Replay actions from the replay_state to the destination.
if is_new:
dest_idx = replay_term.replay_steps[-1]
self.add_edge(replay_state, replay_term, dest_idx)
# print(replay_state.stable_str(self.states))
assert not replay_state.is_inconsistent()
# print(filter_state.stable_str(self.states))
# print(reduce_state.stable_str(self.states))
assert not reduce_state.is_inconsistent()
assert not filter_state.is_inconsistent()
consume(transform(), progress)
def fold_identical_endings(self, verbose: bool, progress: bool) -> None:
# If 2 states have the same outgoing edges, then we can merge the 2
# states into a single state, and rewrite all the backedges leading to
@ -1337,7 +1487,7 @@ class ParseTable:
src = self.states[edge.src]
old_dest = src[edge_term]
# print("replace {} -- {} --> {}, by {} -- {} --> {}"
# .format(src.index, term, src[term], src.index, term, ref.index))
# .format(src.index, edge_term, src[edge_term], src.index, edge_term, ref.index))
self.replace_edge(src, edge_term, ref.index, maybe_unreachable)
state_map[old_dest] = ref.index
hit = True
@ -1378,6 +1528,28 @@ class ParseTable:
self.states.extend(from_act_action_states)
self.rewrite_reordered_state_indexes()
def group_nonterminal_states(self, verbose: bool, progress: bool) -> None:
# This function is used to reduce the range of FilterStates values,
# such that the Rust compiler can compile FilterStates match statements
# to a table-switch.
freq_count = collections.Counter(nt for s in self.states for nt in s.nonterminals)
freq_nt, _ = zip(*freq_count.most_common())
def state_value(s: StateAndTransitions) -> float:
value = 0.0
if len(s.epsilon) != 0:
return 4.0
if len(s.nonterminals) == 0:
return 2.0
i = 1.0
for nt in freq_nt:
if nt in s:
value += i
i /= 2.0
return -value
self.states.sort(key=state_value)
self.rewrite_reordered_state_indexes()
def count_shift_states(self) -> int:
return sum(1 for s in self.states if s is not None and len(s.epsilon) == 0)