mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2024-11-23 13:09:49 +00:00
Synch with decompyle3 code a little bit
This commit is contained in:
parent
e03f4cfe43
commit
41d1ba31f3
@ -431,35 +431,39 @@ class Python37BaseParser(PythonParser):
|
||||
"BUILD_TUPLE",
|
||||
"BUILD_TUPLE_UNPACK",
|
||||
):
|
||||
v = token.attr
|
||||
collection_size = token.attr
|
||||
|
||||
is_LOAD_CLOSURE = False
|
||||
if opname_base == "BUILD_TUPLE":
|
||||
# If is part of a "load_closure", then it is not part of a
|
||||
# "list".
|
||||
is_LOAD_CLOSURE = True
|
||||
for j in range(v):
|
||||
for j in range(collection_size):
|
||||
if tokens[i - j - 1].kind != "LOAD_CLOSURE":
|
||||
is_LOAD_CLOSURE = False
|
||||
break
|
||||
if is_LOAD_CLOSURE:
|
||||
rule = "load_closure ::= %s%s" % (("LOAD_CLOSURE " * v), opname)
|
||||
rule = "load_closure ::= %s%s" % (
|
||||
("LOAD_CLOSURE " * collection_size),
|
||||
opname,
|
||||
)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
if not is_LOAD_CLOSURE or v == 0:
|
||||
if not is_LOAD_CLOSURE or collection_size == 0:
|
||||
# We do this complicated test to speed up parsing of
|
||||
# pathelogically long literals, especially those over 1024.
|
||||
build_count = token.attr
|
||||
thousands = build_count // 1024
|
||||
thirty32s = (build_count // 32) % 32
|
||||
thousands = collection_size // 1024
|
||||
thirty32s = (collection_size // 32) % 32
|
||||
if thirty32s > 0:
|
||||
rule = "expr32 ::=%s" % (" expr" * 32)
|
||||
self.add_unique_rule(rule, opname_base, build_count, customize)
|
||||
self.add_unique_rule(
|
||||
rule, opname_base, collection_size, customize
|
||||
)
|
||||
pass
|
||||
if thousands > 0:
|
||||
self.add_unique_rule(
|
||||
"expr1024 ::=%s" % (" expr32" * 32),
|
||||
opname_base,
|
||||
build_count,
|
||||
collection_size,
|
||||
customize,
|
||||
)
|
||||
pass
|
||||
@ -468,7 +472,7 @@ class Python37BaseParser(PythonParser):
|
||||
("%s ::= " % collection)
|
||||
+ "expr1024 " * thousands
|
||||
+ "expr32 " * thirty32s
|
||||
+ "expr " * (build_count % 32)
|
||||
+ "expr " * (collection_size % 32)
|
||||
+ opname
|
||||
)
|
||||
self.add_unique_rules(["expr ::= %s" % collection, rule], customize)
|
||||
@ -478,8 +482,8 @@ class Python37BaseParser(PythonParser):
|
||||
if token.attr == 2:
|
||||
self.add_unique_rules(
|
||||
[
|
||||
"expr ::= build_slice2",
|
||||
"build_slice2 ::= expr expr BUILD_SLICE_2",
|
||||
"expr ::= slice2",
|
||||
"slice2 ::= expr expr BUILD_SLICE_2",
|
||||
],
|
||||
customize,
|
||||
)
|
||||
@ -489,8 +493,8 @@ class Python37BaseParser(PythonParser):
|
||||
)
|
||||
self.add_unique_rules(
|
||||
[
|
||||
"expr ::= build_slice3",
|
||||
"build_slice3 ::= expr expr expr BUILD_SLICE_3",
|
||||
"expr ::= slice3",
|
||||
"slice3 ::= expr expr expr BUILD_SLICE_3",
|
||||
],
|
||||
customize,
|
||||
)
|
||||
@ -524,6 +528,7 @@ class Python37BaseParser(PythonParser):
|
||||
|
||||
if opname == "CALL_FUNCTION" and token.attr == 1:
|
||||
rule = """
|
||||
expr ::= dict_comp
|
||||
dict_comp ::= LOAD_DICTCOMP LOAD_STR MAKE_FUNCTION_0 expr
|
||||
GET_ITER CALL_FUNCTION_1
|
||||
classdefdeco1 ::= expr classdefdeco2 CALL_FUNCTION_1
|
||||
@ -563,6 +568,7 @@ class Python37BaseParser(PythonParser):
|
||||
+ "expr " * nak
|
||||
+ opname
|
||||
)
|
||||
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
|
||||
elif opname == "CONTINUE":
|
||||
@ -1252,20 +1258,13 @@ class Python37BaseParser(PythonParser):
|
||||
try:
|
||||
if fn:
|
||||
return fn(self, lhs, n, rule, ast, tokens, first, last)
|
||||
except:
|
||||
except Exception:
|
||||
import sys, traceback
|
||||
|
||||
print(
|
||||
("Exception in %s %s\n"
|
||||
+ "rule: %s\n"
|
||||
+ "offsets %s .. %s")
|
||||
% (
|
||||
fn.__name__,
|
||||
sys.exc_info()[1],
|
||||
rule2str(rule),
|
||||
tokens[first].offset,
|
||||
tokens[last].offset,
|
||||
)
|
||||
f"Exception in {fn.__name__} {sys.exc_info()[1]}\n"
|
||||
+ f"rule: {rule2str(rule)}\n"
|
||||
+ f"offsets {tokens[first].offset} .. {tokens[last].offset}"
|
||||
)
|
||||
print(traceback.print_tb(sys.exc_info()[2], -1))
|
||||
raise ParserError(tokens[last], tokens[last].off2int(), self.debug["rules"])
|
||||
|
Loading…
Reference in New Issue
Block a user