mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2024-11-23 05:00:07 +00:00
Two bugs and a refactor ..
1. parse2.py: try except in a loop with a (virtual) continue treat CONTINUE like JUMP_ABSOLUTE which it is 2. in taking methods off of constants, a parenthesis needs to be added Some refactoring of global code done
This commit is contained in:
parent
0724dc1c0e
commit
5fe8303184
@ -113,10 +113,6 @@ check-bytecode-2.4:
|
||||
check-bytecode-2.5:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-2.5
|
||||
|
||||
#: Check deparsing Python 2.6
|
||||
check-bytecode-2.6:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-2.6
|
||||
|
||||
#: Get grammar coverage for Python 2.5
|
||||
grammar-coverage-2.5:
|
||||
-rm $(COVER_DIR)/spark-grammar-25.cover
|
||||
@ -173,7 +169,7 @@ grammar-coverage-3.5:
|
||||
SPARK_PARSER_COVERAGE=$(COVER_DIR)/spark-grammar-35.cover $(PYTHON) test_pyenvlib.py --3.5.3
|
||||
|
||||
#: Check deparsing Python 2.6
|
||||
pcheck-bytecode-2.6:
|
||||
check-bytecode-2.6:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-2.6 --weak-verify
|
||||
|
||||
#: Check deparsing Python 2.7
|
||||
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -19,3 +19,6 @@ x[0:3] += 1, 2, 3
|
||||
|
||||
# Is not in chained compare
|
||||
x[0] is not x[1] is not x[2]
|
||||
|
||||
# Method name is a constant, so we need parenthesis around it
|
||||
(1).__nonzero__() == 1
|
||||
|
@ -39,3 +39,24 @@ except SystemExit:
|
||||
x = 2
|
||||
except:
|
||||
x = 3
|
||||
|
||||
# Try except in loops
|
||||
|
||||
for i in (1, 2):
|
||||
try:
|
||||
x = 1
|
||||
except ValueError:
|
||||
y = 2
|
||||
|
||||
for badarg in (2, 3):
|
||||
try:
|
||||
pass
|
||||
except TypeError:
|
||||
pass
|
||||
except ValueError:
|
||||
if badarg:
|
||||
pass
|
||||
else:
|
||||
y = 3
|
||||
else:
|
||||
y = 4
|
||||
|
@ -162,6 +162,7 @@ class Python2Parser(PythonParser):
|
||||
|
||||
jmp_abs ::= JUMP_ABSOLUTE
|
||||
jmp_abs ::= JUMP_BACK
|
||||
jmp_abs ::= CONTINUE
|
||||
'''
|
||||
|
||||
def p_generator_exp2(self, args):
|
||||
|
@ -106,35 +106,36 @@ TABLE_DIRECT = {
|
||||
(1, 'unary_op'),
|
||||
(0, 'expr') ),
|
||||
|
||||
'unary_not': ( 'not %c',
|
||||
'unary_not': ( 'not %c',
|
||||
(0, 'expr' ) ),
|
||||
'unary_convert': ( '`%c`',
|
||||
(0, 'expr' ), ),
|
||||
'get_iter': ( 'iter(%c)',
|
||||
(0, 'expr'), ),
|
||||
'slice0': ( '%c[:]',
|
||||
'slice0': ( '%c[:]',
|
||||
(0, 'expr'), ),
|
||||
'slice1': ( '%c[%p:]',
|
||||
'slice1': ( '%c[%p:]',
|
||||
(0, 'expr'),
|
||||
(1, 100) ),
|
||||
'slice2': ( '%c[:%p]',
|
||||
'slice2': ( '%c[:%p]',
|
||||
(0, 'expr'),
|
||||
(1, 100) ),
|
||||
'slice3': ( '%c[%p:%p]',
|
||||
'slice3': ( '%c[%p:%p]',
|
||||
(0, 'expr'),
|
||||
(1, 100), (2, 100) ),
|
||||
|
||||
'IMPORT_FROM': ( '%{pattr}', ),
|
||||
'load_attr': ( '%c.%[1]{pattr}', 0),
|
||||
'LOAD_FAST': ( '%{pattr}', ),
|
||||
'LOAD_NAME': ( '%{pattr}', ),
|
||||
'load_attr': ( '%c.%[1]{pattr}',
|
||||
(0, 'expr')),
|
||||
'LOAD_FAST': ( '%{pattr}', ),
|
||||
'LOAD_NAME': ( '%{pattr}', ),
|
||||
'LOAD_CLASSNAME': ( '%{pattr}', ),
|
||||
'LOAD_GLOBAL': ( '%{pattr}', ),
|
||||
'LOAD_DEREF': ( '%{pattr}', ),
|
||||
'LOAD_LOCALS': ( 'locals()', ),
|
||||
'LOAD_GLOBAL': ( '%{pattr}', ),
|
||||
'LOAD_DEREF': ( '%{pattr}', ),
|
||||
'LOAD_LOCALS': ( 'locals()', ),
|
||||
'LOAD_ASSERT': ( '%{pattr}', ),
|
||||
'DELETE_FAST': ( '%|del %{pattr}\n', ),
|
||||
'DELETE_NAME': ( '%|del %{pattr}\n', ),
|
||||
'DELETE_FAST': ( '%|del %{pattr}\n', ),
|
||||
'DELETE_NAME': ( '%|del %{pattr}\n', ),
|
||||
'DELETE_GLOBAL': ( '%|del %{pattr}\n', ),
|
||||
'delete_subscr': ( '%|del %c[%c]\n', 0, 1,),
|
||||
'subscript': ( '%c[%p]',
|
||||
@ -143,18 +144,24 @@ TABLE_DIRECT = {
|
||||
'subscript2': ( '%c[%c]',
|
||||
(0, 'expr'),
|
||||
(1, 'expr') ),
|
||||
'store_subscr': ( '%c[%c]', 0, 1),
|
||||
'STORE_FAST': ( '%{pattr}', ),
|
||||
'STORE_NAME': ( '%{pattr}', ),
|
||||
'STORE_GLOBAL': ( '%{pattr}', ),
|
||||
'STORE_DEREF': ( '%{pattr}', ),
|
||||
'unpack': ( '%C%,', (1, maxint, ', ') ),
|
||||
'store_subscr': ( '%c[%c]', 0, 1),
|
||||
'STORE_FAST': ( '%{pattr}', ),
|
||||
'STORE_NAME': ( '%{pattr}', ),
|
||||
'STORE_GLOBAL': ( '%{pattr}', ),
|
||||
'STORE_DEREF': ( '%{pattr}', ),
|
||||
'unpack': ( '%C%,', (1, maxint, ', ') ),
|
||||
|
||||
# This nonterminal we create on the fly in semantic routines
|
||||
'unpack_w_parens': ( '(%C%,)', (1, maxint, ', ') ),
|
||||
'unpack_w_parens': ( '(%C%,)', (1, maxint, ', ') ),
|
||||
|
||||
'unpack_list': ( '[%C]', (1, maxint, ', ') ),
|
||||
'build_tuple2': ( '%P', (0, -1, ', ', 100) ),
|
||||
# This nonterminal we create on the fly in semantic routines
|
||||
'load_attr': ( '(%c).%[1]{pattr}',
|
||||
(0, 'expr')),
|
||||
|
||||
'unpack_list': ( '[%C]',
|
||||
(1, maxint, ', ') ),
|
||||
'build_tuple2': ( '%P',
|
||||
(0, -1, ', ', 100) ),
|
||||
|
||||
'list_iter': ( '%c', 0 ),
|
||||
'list_for': ( ' for %c in %c%c', 2, 0, 3 ),
|
||||
|
@ -1,4 +1,7 @@
|
||||
import sys
|
||||
|
||||
from uncompyle6.parsers.astnode import AST
|
||||
|
||||
from uncompyle6 import PYTHON3
|
||||
if PYTHON3:
|
||||
minint = -sys.maxsize-1
|
||||
@ -7,6 +10,53 @@ else:
|
||||
minint = -sys.maxint-1
|
||||
maxint = sys.maxint
|
||||
|
||||
read_write_global_ops = frozenset(('STORE_GLOBAL', 'DELETE_GLOBAL', 'LOAD_GLOBAL'))
|
||||
read_global_ops = frozenset(('STORE_GLOBAL', 'DELETE_GLOBAL'))
|
||||
|
||||
# FIXME: this and find_globals could be paramaterized with one of the
|
||||
# above global ops
|
||||
def find_all_globals(node, globs):
|
||||
"""Search AST node to find variable names that are global."""
|
||||
for n in node:
|
||||
if isinstance(n, AST):
|
||||
globs = find_all_globals(n, globs)
|
||||
elif n.kind in read_write_global_ops:
|
||||
globs.add(n.pattr)
|
||||
return globs
|
||||
|
||||
def find_globals(node, globs):
|
||||
"""search AST node to find variable names that need a 'global' added."""
|
||||
for n in node:
|
||||
if isinstance(n, AST):
|
||||
globs = find_globals(n, globs)
|
||||
elif n.kind in read_global_ops:
|
||||
globs.add(n.pattr)
|
||||
return globs
|
||||
|
||||
# def find_globals(node, globs, global_ops=mkfunc_globals):
|
||||
# """Find globals in this statement."""
|
||||
# for n in node:
|
||||
# # print("XXX", n.kind, global_ops)
|
||||
# if isinstance(n, AST):
|
||||
# # FIXME: do I need a caser for n.kind="mkfunc"?
|
||||
# if n.kind in ("conditional_lambda", "return_lambda"):
|
||||
# globs = find_globals(n, globs, mklambda_globals)
|
||||
# else:
|
||||
# globs = find_globals(n, globs, global_ops)
|
||||
# elif n.kind in frozenset(global_ops):
|
||||
# globs.add(n.pattr)
|
||||
# return globs
|
||||
|
||||
def find_none(node):
|
||||
for n in node:
|
||||
if isinstance(n, AST):
|
||||
if n not in ('return_stmt', 'return_if_stmt'):
|
||||
if find_none(n):
|
||||
return True
|
||||
elif n.kind == 'LOAD_CONST' and n.pattr is None:
|
||||
return True
|
||||
return False
|
||||
|
||||
def print_docstring(self, indent, docstring):
|
||||
try:
|
||||
if docstring.find('"""') == -1:
|
||||
|
@ -9,7 +9,9 @@ from uncompyle6.parsers.astnode import AST
|
||||
from uncompyle6 import PYTHON3
|
||||
from uncompyle6.semantics.parser_error import ParserError
|
||||
from uncompyle6.parser import ParserError as ParserError2
|
||||
from uncompyle6.semantics.helper import print_docstring
|
||||
from uncompyle6.semantics.helper import (
|
||||
print_docstring, find_all_globals, find_globals, find_none
|
||||
)
|
||||
|
||||
if PYTHON3:
|
||||
from itertools import zip_longest
|
||||
@ -18,42 +20,6 @@ else:
|
||||
|
||||
from uncompyle6.show import maybe_show_ast_param_default
|
||||
|
||||
def find_all_globals(node, globs):
|
||||
"""Find globals including LOAD_GLOBALs in this AST node."""
|
||||
for n in node:
|
||||
if isinstance(n, AST):
|
||||
globs = find_all_globals(n, globs)
|
||||
elif n.kind in frozenset(('STORE_GLOBAL', 'DELETE_GLOBAL', 'LOAD_GLOBAL')):
|
||||
globs.add(n.pattr)
|
||||
return globs
|
||||
|
||||
mkfunc_globals = frozenset(('STORE_GLOBAL', 'DELETE_GLOBAL', 'LOAD_GLOBAL'))
|
||||
mklambda_globals = frozenset(('STORE_GLOBAL', 'DELETE_GLOBAL'))
|
||||
|
||||
def find_globals(node, globs, global_ops=mkfunc_globals):
|
||||
"""Find globals in this statement."""
|
||||
for n in node:
|
||||
# print("XXX", n.kind, global_ops)
|
||||
if isinstance(n, AST):
|
||||
# FIXME: do I need a caser for n.kind="mkfunc"?
|
||||
if n.kind in ("conditional_lambda", "return_lambda"):
|
||||
globs = find_globals(n, globs, mklambda_globals)
|
||||
else:
|
||||
globs = find_globals(n, globs, global_ops)
|
||||
elif n.kind in frozenset(global_ops):
|
||||
globs.add(n.pattr)
|
||||
return globs
|
||||
|
||||
def find_none(node):
|
||||
for n in node:
|
||||
if isinstance(n, AST):
|
||||
if n not in ('return_stmt', 'return_if_stmt'):
|
||||
if find_none(n):
|
||||
return True
|
||||
elif n.kind == 'LOAD_CONST' and n.pattr is None:
|
||||
return True
|
||||
return False
|
||||
|
||||
# FIXME: DRY the below code...
|
||||
|
||||
def make_function3_annotate(self, node, is_lambda, nested=1,
|
||||
|
@ -123,10 +123,11 @@ from uncompyle6.scanner import Code, get_scanner
|
||||
import uncompyle6.parser as python_parser
|
||||
from uncompyle6.semantics.make_function import (
|
||||
make_function2, make_function3, make_function3_annotate,
|
||||
find_globals)
|
||||
)
|
||||
from uncompyle6.semantics.parser_error import ParserError
|
||||
from uncompyle6.semantics.check_ast import checker
|
||||
from uncompyle6.semantics.helper import print_docstring
|
||||
from uncompyle6.semantics.helper import (
|
||||
print_docstring, find_globals)
|
||||
from uncompyle6.scanners.tok import Token
|
||||
|
||||
from uncompyle6.semantics.consts import (
|
||||
@ -1877,6 +1878,11 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
|
||||
n_unpack_w_parens = n_unpack
|
||||
|
||||
def n_load_attr(self, node):
|
||||
if node[0] == 'LOAD_CONST':
|
||||
node.kind = 'load_attr_w_parens'
|
||||
self.default(node)
|
||||
|
||||
def n_assign(self, node):
|
||||
# A horrible hack for Python 3.0 .. 3.2
|
||||
if 3.0 <= self.version <= 3.2 and len(node) == 2:
|
||||
|
Loading…
Reference in New Issue
Block a user