mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2024-11-22 20:49:48 +00:00
Fix spelling via "codespell"
This commit is contained in:
parent
e9120eab45
commit
dcc9d1a571
@ -27,7 +27,7 @@ def test_if_in_for():
|
||||
fjt = scan.find_jump_targets(False)
|
||||
|
||||
## FIXME: the data below is wrong.
|
||||
## we get different results currenty as well.
|
||||
## we get different results currently as well.
|
||||
## We need to probably fix both the code
|
||||
## and the test below
|
||||
# assert {15: [3], 69: [66], 63: [18]} == fjt
|
||||
|
@ -67,7 +67,7 @@ def are_instructions_equal(i1, i2):
|
||||
Determine if two instructions are approximately equal,
|
||||
ignoring certain fields which we allow to differ, namely:
|
||||
|
||||
* code objects are ignore (should probaby be checked) due to address
|
||||
* code objects are ignore (should probably be checked) due to address
|
||||
* line numbers
|
||||
|
||||
:param i1: left instruction to compare
|
||||
|
4
setup.py
4
setup.py
@ -5,8 +5,8 @@ import sys
|
||||
"""Setup script for the 'uncompyle6' distribution."""
|
||||
|
||||
SYS_VERSION = sys.version_info[0:2]
|
||||
if not ((2, 4) <= SYS_VERSION < (3, 12)):
|
||||
mess = "Python Release 2.6 .. 3.11 are supported in this code branch."
|
||||
if not ((2, 4) <= SYS_VERSION < (3, 13)):
|
||||
mess = "Python Release 2.6 .. 3.12 are supported in this code branch."
|
||||
if (2, 4) <= SYS_VERSION <= (2, 7):
|
||||
mess += (
|
||||
"\nFor your Python, version %s, use the python-2.4 code/branch."
|
||||
|
@ -1,6 +1,6 @@
|
||||
"""
|
||||
test_prettyprint.py -- source test pattern for tesing the prettyprint
|
||||
funcionality of decompyle
|
||||
functionality of decompyle
|
||||
|
||||
This source is part of the decompyle test suite.
|
||||
|
||||
|
@ -23,7 +23,7 @@ Disassemble/Tokenize FILE with in the way that is done to
|
||||
assist uncompyle6 in parsing the instruction stream. For example
|
||||
instructions with variable-length arguments like CALL_FUNCTION and
|
||||
BUILD_LIST have argument counts appended to the instruction name, and
|
||||
COME_FROM psuedo instructions are inserted into the instruction stream.
|
||||
COME_FROM pseudo instructions are inserted into the instruction stream.
|
||||
Bit flag values encoded in an operand are expanding, EXTENDED_ARG
|
||||
value are folded into the following instruction operand.
|
||||
|
||||
|
@ -66,7 +66,7 @@ def decompile(
|
||||
"""
|
||||
ingests and deparses a given code block 'co'
|
||||
|
||||
if `bytecode_version` is None, use the current Python intepreter
|
||||
if `bytecode_version` is None, use the current Python interpreter
|
||||
version.
|
||||
|
||||
Caller is responsible for closing `out` and `mapstream`
|
||||
|
@ -221,7 +221,7 @@ class PythonParser(GenericASTBuilder):
|
||||
|
||||
This appears in CALL_FUNCTION or CALL_METHOD (PyPy) tokens
|
||||
"""
|
||||
# Low byte indicates number of positional paramters,
|
||||
# Low byte indicates number of positional parameters,
|
||||
# high byte number of keyword parameters
|
||||
assert token.kind.startswith("CALL_FUNCTION") or token.kind.startswith("CALL_METHOD")
|
||||
args_pos = token.attr & 0xFF
|
||||
@ -600,12 +600,12 @@ class PythonParser(GenericASTBuilder):
|
||||
compare ::= compare_single
|
||||
compare_single ::= expr expr COMPARE_OP
|
||||
|
||||
# A compare_chained is two comparisions, as in: x <= y <= z
|
||||
# A compare_chained is two comparisons, as in: x <= y <= z
|
||||
compare_chained ::= expr compared_chained_middle ROT_TWO POP_TOP
|
||||
_come_froms
|
||||
compare_chained_right ::= expr COMPARE_OP JUMP_FORWARD
|
||||
|
||||
# Non-null kvlist items are broken out in the indiviual grammars
|
||||
# Non-null kvlist items are broken out in the individual grammars
|
||||
kvlist ::=
|
||||
|
||||
# Positional arguments in make_function
|
||||
|
@ -307,7 +307,7 @@ class Python26Parser(Python2Parser):
|
||||
|
||||
and ::= expr JUMP_IF_FALSE POP_TOP expr JUMP_IF_FALSE POP_TOP
|
||||
|
||||
# A "compare_chained" is two comparisions like x <= y <= z
|
||||
# A "compare_chained" is two comparisons like x <= y <= z
|
||||
compare_chained ::= expr compared_chained_middle ROT_TWO
|
||||
COME_FROM POP_TOP _come_froms
|
||||
compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP
|
||||
@ -466,7 +466,7 @@ class Python26Parser(Python2Parser):
|
||||
ja_attr = ast[4].attr
|
||||
return tokens[last].offset != ja_attr
|
||||
elif lhs == "try_except":
|
||||
# We need to distingush "try_except" from "tryelsestmt"; we do that
|
||||
# We need to distinguish "try_except" from "tryelsestmt"; we do that
|
||||
# by checking the jump before the END_FINALLY
|
||||
# If we have:
|
||||
# insn
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2015-2022 Rocky Bernstein
|
||||
# Copyright (c) 2015-2023 Rocky Bernstein
|
||||
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
# Copyright (c) 1999 John Aycock
|
||||
@ -634,7 +634,7 @@ class Python3Parser(PythonParser):
|
||||
self.add_unique_rule(rule, token.kind, uniq_param, customize)
|
||||
|
||||
def add_make_function_rule(self, rule, opname, attr, customize):
|
||||
"""Python 3.3 added a an addtional LOAD_STR before MAKE_FUNCTION and
|
||||
"""Python 3.3 added a an additional LOAD_STR before MAKE_FUNCTION and
|
||||
this has an effect on many rules.
|
||||
"""
|
||||
if self.version >= (3, 3):
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016-2017, 2022 Rocky Bernstein
|
||||
# Copyright (c) 2016-2017, 2022-2023 Rocky Bernstein
|
||||
"""
|
||||
spark grammar differences over Python 3.1 for Python 3.0.
|
||||
"""
|
||||
@ -31,8 +31,8 @@ class Python30Parser(Python31Parser):
|
||||
|
||||
# In many ways Python 3.0 code generation is more like Python 2.6 than
|
||||
# it is 2.7 or 3.1. So we have a number of 2.6ish (and before) rules below
|
||||
# Specifically POP_TOP is more prevelant since there is no POP_JUMP_IF_...
|
||||
# instructions
|
||||
# Specifically POP_TOP is more prevalant since there is no POP_JUMP_IF_...
|
||||
# instructions.
|
||||
|
||||
_ifstmts_jump ::= c_stmts JUMP_FORWARD _come_froms POP_TOP COME_FROM
|
||||
_ifstmts_jump ::= c_stmts COME_FROM POP_TOP
|
||||
@ -208,7 +208,7 @@ class Python30Parser(Python31Parser):
|
||||
come_froms POP_TOP POP_BLOCK COME_FROM_LOOP
|
||||
|
||||
|
||||
# A "compare_chained" is two comparisions like x <= y <= z
|
||||
# A "compare_chained" is two comparisons like x <= y <= z
|
||||
compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP
|
||||
jmp_false compared_chained_middle _come_froms
|
||||
compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016-2017, 2019, 2021 Rocky Bernstein
|
||||
# Copyright (c) 2016-2017, 2019, 2021, 2023 Rocky Bernstein
|
||||
"""
|
||||
spark grammar differences over Python 3.4 for Python 3.5.
|
||||
"""
|
||||
@ -258,7 +258,7 @@ class Python35Parser(Python34Parser):
|
||||
('pos_arg ' * args_pos) +
|
||||
('kwarg ' * args_kw) + kw + token.kind)
|
||||
|
||||
# Note: semantic actions make use of the fact of wheter "args_pos"
|
||||
# Note: semantic actions make use of the fact of whether "args_pos"
|
||||
# zero or not in creating a template rule.
|
||||
self.add_unique_rule(rule, token.kind, args_pos, customize)
|
||||
else:
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016-2020, 2022 Rocky Bernstein
|
||||
# Copyright (c) 2016-2020, 2022-2023 Rocky Bernstein
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@ -58,7 +58,7 @@ class Python36Parser(Python35Parser):
|
||||
come_froms JUMP_BACK come_froms POP_BLOCK COME_FROM_LOOP
|
||||
|
||||
# 3.6 due to jump optimization, we sometimes add RETURN_END_IF where
|
||||
# RETURN_VALUE is meant. Specifcally this can happen in
|
||||
# RETURN_VALUE is meant. Specifically, this can happen in
|
||||
# ifelsestmt -> ...else_suite _. suite_stmts... (last) stmt
|
||||
return ::= return_expr RETURN_END_IF
|
||||
return ::= return_expr RETURN_VALUE COME_FROM
|
||||
@ -404,7 +404,7 @@ class Python36Parser(Python35Parser):
|
||||
JUMP_LOOP COME_FROM
|
||||
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_TOP
|
||||
|
||||
# FIXME this is a workaround for probalby some bug in the Earley parser
|
||||
# FIXME this is a workaround for probably some bug in the Earley parser
|
||||
# if we use get_aiter, then list_comp_async doesn't match, and I don't
|
||||
# understand why.
|
||||
expr_get_aiter ::= expr GET_AITER
|
||||
|
@ -224,11 +224,11 @@ class Python37Parser(Python37BaseParser):
|
||||
compare ::= compare_single
|
||||
compare_single ::= expr expr COMPARE_OP
|
||||
|
||||
# A compare_chained is two comparisions like x <= y <= z
|
||||
# A compare_chained is two comparisons like x <= y <= z
|
||||
compare_chained ::= expr compared_chained_middle ROT_TWO POP_TOP _come_froms
|
||||
compare_chained_right ::= expr COMPARE_OP JUMP_FORWARD
|
||||
|
||||
# Non-null kvlist items are broken out in the indiviual grammars
|
||||
# Non-null kvlist items are broken out in the individual grammars
|
||||
kvlist ::=
|
||||
|
||||
# Positional arguments in make_function
|
||||
@ -1144,7 +1144,7 @@ class Python37Parser(Python37BaseParser):
|
||||
come_froms JUMP_BACK come_froms POP_BLOCK COME_FROM_LOOP
|
||||
|
||||
# 3.6 due to jump optimization, we sometimes add RETURN_END_IF where
|
||||
# RETURN_VALUE is meant. Specifcally this can happen in
|
||||
# RETURN_VALUE is meant. Specifically this can happen in
|
||||
# ifelsestmt -> ...else_suite _. suite_stmts... (last) stmt
|
||||
return ::= return_expr RETURN_END_IF
|
||||
return ::= return_expr RETURN_VALUE COME_FROM
|
||||
@ -1377,7 +1377,7 @@ class Python37Parser(Python37BaseParser):
|
||||
JUMP_BACK COME_FROM
|
||||
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_TOP
|
||||
|
||||
# FIXME this is a workaround for probalby some bug in the Earley parser
|
||||
# FIXME this is a workaround for probably some bug in the Earley parser
|
||||
# if we use get_aiter, then list_comp_async doesn't match, and I don't
|
||||
# understand why.
|
||||
expr_get_aiter ::= expr GET_AITER
|
||||
|
@ -38,7 +38,7 @@ class Python37BaseParser(PythonParser):
|
||||
return "%s_0" % (token.kind)
|
||||
|
||||
def add_make_function_rule(self, rule, opname, attr, customize):
|
||||
"""Python 3.3 added a an addtional LOAD_STR before MAKE_FUNCTION and
|
||||
"""Python 3.3 added a an additional LOAD_STR before MAKE_FUNCTION and
|
||||
this has an effect on many rules.
|
||||
"""
|
||||
new_rule = rule % "LOAD_STR "
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2017-2020, 2022 Rocky Bernstein
|
||||
# Copyright (c) 2017-2020, 2022-2023 Rocky Bernstein
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -599,7 +599,7 @@ def parse_fn_counts_30_35(argc: int) -> Tuple[int, int, int]:
|
||||
In Python 3.0 to 3.5 MAKE_CLOSURE and MAKE_FUNCTION encode
|
||||
arguments counts of positional, default + named, and annotation
|
||||
arguments a particular kind of encoding where each of
|
||||
the entry a a packe byted value of the lower 24 bits
|
||||
the entry a a packed byted value of the lower 24 bits
|
||||
of ``argc``. The high bits of argc may have come from
|
||||
an EXTENDED_ARG instruction. Here, we unpack the values
|
||||
from the ``argc`` int and return a triple of the
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2015-2022 by Rocky Bernstein
|
||||
# Copyright (c) 2015-2023 by Rocky Bernstein
|
||||
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
#
|
||||
@ -55,7 +55,7 @@ class Scanner2(Scanner):
|
||||
self.load_asserts = set([])
|
||||
|
||||
# Create opcode classification sets
|
||||
# Note: super initilization above initializes self.opc
|
||||
# Note: super initialization above initializes self.opc
|
||||
|
||||
# Ops that start SETUP_ ... We will COME_FROM with these names
|
||||
# Some blocks and END_ statements. And they can start
|
||||
@ -430,7 +430,7 @@ class Scanner2(Scanner):
|
||||
|
||||
# EXTENDED_ARG doesn't appear in instructions,
|
||||
# but is instead the next opcode folded into it, and has the offset
|
||||
# of the EXTENDED_ARG. Therefor in self.offset2nist_index we'll find
|
||||
# of the EXTENDED_ARG. Therefore in self.offset2nist_index we'll find
|
||||
# the instruction at the previous EXTENDED_ARG offset which is 3
|
||||
# bytes back.
|
||||
if j is None and offset > self.opc.ARG_MAX_VALUE:
|
||||
@ -925,7 +925,7 @@ class Scanner2(Scanner):
|
||||
# Is it an "and" inside an "if" or "while" block
|
||||
if op == self.opc.PJIF:
|
||||
|
||||
# Search for other POP_JUMP_IF_...'s targetting the
|
||||
# Search for other POP_JUMP_IF_...'s targeting the
|
||||
# same target, of the current POP_JUMP_... instruction,
|
||||
# starting from current offset, and filter everything inside inner 'or'
|
||||
# jumps and mid-line ifs
|
||||
@ -1024,7 +1024,7 @@ class Scanner2(Scanner):
|
||||
):
|
||||
self.fixed_jumps[offset] = rtarget
|
||||
else:
|
||||
# note test for < 2.7 might be superflous although informative
|
||||
# note test for < 2.7 might be superfluous although informative
|
||||
# for 2.7 a different branch is taken and the below code is handled
|
||||
# under: elif op in self.pop_jump_if_or_pop
|
||||
# below
|
||||
@ -1114,7 +1114,7 @@ class Scanner2(Scanner):
|
||||
if code_pre_rtarget in self.jump_forward:
|
||||
if_end = self.get_target(pre_rtarget)
|
||||
|
||||
# Is this a loop and not an "if" statment?
|
||||
# Is this a loop and not an "if" statement?
|
||||
if (if_end < pre_rtarget) and (pre[if_end] in self.setup_loop_targets):
|
||||
|
||||
if if_end > start:
|
||||
@ -1337,9 +1337,9 @@ class Scanner2(Scanner):
|
||||
|
||||
# FIXME FIXME FIXME
|
||||
# All the conditions are horrible, and I am not sure I
|
||||
# undestand fully what's going l
|
||||
# understand fully what's going l
|
||||
# We REALLY REALLY need a better way to handle control flow
|
||||
# Expecially for < 2.7
|
||||
# Especially for < 2.7
|
||||
if label is not None and label != -1:
|
||||
if self.version[:2] == (2, 7):
|
||||
# FIXME: rocky: I think we need something like this...
|
||||
|
@ -62,7 +62,7 @@ class Scanner3(Scanner):
|
||||
super(Scanner3, self).__init__(version, show_asm, is_pypy)
|
||||
|
||||
# Create opcode classification sets
|
||||
# Note: super initilization above initializes self.opc
|
||||
# Note: super initialization above initializes self.opc
|
||||
|
||||
# For ops that start SETUP_ ... we will add COME_FROM with these names
|
||||
# at the their targets.
|
||||
@ -228,7 +228,7 @@ class Scanner3(Scanner):
|
||||
assert count <= i
|
||||
|
||||
if collection_type == "CONST_DICT":
|
||||
# constant dictonaries work via BUILD_CONST_KEY_MAP and
|
||||
# constant dictionaries work via BUILD_CONST_KEY_MAP and
|
||||
# handle the values() like sets and lists.
|
||||
# However the keys() are an LOAD_CONST of the keys.
|
||||
# adjust offset to account for this
|
||||
@ -1130,7 +1130,7 @@ class Scanner3(Scanner):
|
||||
|
||||
# Is it an "and" inside an "if" or "while" block
|
||||
if op == self.opc.POP_JUMP_IF_FALSE:
|
||||
# Search for another POP_JUMP_IF_FALSE targetting the same op,
|
||||
# Search for another POP_JUMP_IF_FALSE targeting the same op,
|
||||
# in current statement, starting from current offset, and filter
|
||||
# everything inside inner 'or' jumps and midline ifs
|
||||
match = self.rem_or(
|
||||
@ -1337,7 +1337,7 @@ class Scanner3(Scanner):
|
||||
self.not_continue.add(pre_rtarget)
|
||||
elif code[pre_rtarget] in rtarget_break:
|
||||
self.structs.append({"type": "if-then", "start": start, "end": rtarget})
|
||||
# It is important to distingish if this return is inside some sort
|
||||
# It is important to distinguish if this return is inside some sort
|
||||
# except block return
|
||||
jump_prev = prev_op[offset]
|
||||
if self.is_pypy and code[jump_prev] == self.opc.COMPARE_OP:
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016-2019, 2021-2022 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2019, 2021-2023 by Rocky Bernstein
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@ -51,7 +51,7 @@ class Scanner37(Scanner37Base):
|
||||
assert count <= i
|
||||
|
||||
if collection_type == "CONST_DICT":
|
||||
# constant dictonaries work via BUILD_CONST_KEY_MAP and
|
||||
# constant dictionaries work via BUILD_CONST_KEY_MAP and
|
||||
# handle the values() like sets and lists.
|
||||
# However the keys() are an LOAD_CONST of the keys.
|
||||
# adjust offset to account for this
|
||||
|
@ -21,7 +21,7 @@ def checker(ast, in_loop, errors):
|
||||
if ast.kind in ("aug_assign1", "aug_assign2") and ast[0][0] == "and":
|
||||
text = str(ast)
|
||||
error_text = (
|
||||
"\n# improper augmented assigment (e.g. +=, *=, ...):\n#\t"
|
||||
"\n# improper augmented assignment (e.g. +=, *=, ...):\n#\t"
|
||||
+ "\n# ".join(text.split("\n"))
|
||||
+ "\n"
|
||||
)
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2017-2022 by Rocky Bernstein
|
||||
# Copyright (c) 2017-2023 by Rocky Bernstein
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@ -22,7 +22,7 @@ minint = -sys.maxsize - 1
|
||||
maxint = sys.maxsize
|
||||
|
||||
|
||||
# Operator precidence See
|
||||
# Operator precedence See
|
||||
# https://docs.python.org/2/reference/expressions.html#operator-precedence
|
||||
# or
|
||||
# https://docs.python.org/3/reference/expressions.html#operator-precedence
|
||||
@ -37,11 +37,11 @@ maxint = sys.maxsize
|
||||
# various templates we use odd values. Avoiding equal-precedent comparisons
|
||||
# avoids ambiguity what to do when the precedence is equal.
|
||||
|
||||
# The precidence of a key below applies the key, a node, and the its
|
||||
# *parent*. A node however sometimes sets the precidence for its
|
||||
# children. For example, "call" has precidence 2 so we don't get
|
||||
# The precedence of a key below applies the key, a node, and the its
|
||||
# *parent*. A node however sometimes sets the precedence for its
|
||||
# children. For example, "call" has precedence 2 so we don't get
|
||||
# additional the additional parenthesis of: ".. op (call())". However
|
||||
# for call's children, it parameters, we set the the precidence high,
|
||||
# for call's children, it parameters, we set the the precedence high,
|
||||
# say to 100, to make sure we avoid additional prenthesis in
|
||||
# call((.. op ..)).
|
||||
|
||||
@ -428,7 +428,7 @@ TABLE_DIRECT = {
|
||||
|
||||
"expr_stmt": (
|
||||
"%|%p\n",
|
||||
# When a statment contains only a named_expr (:=)
|
||||
# When a statement contains only a named_expr (:=)
|
||||
# the named_expr should have parenthesis around it.
|
||||
(0, "expr", PRECEDENCE["named_expr"] - 1)
|
||||
),
|
||||
|
@ -226,7 +226,7 @@ def customize_for_version3(self, version):
|
||||
assert node[0] == "expr"
|
||||
if node[0][0] == "get_iter":
|
||||
# Skip over yield_from.expr.get_iter which adds an
|
||||
# extra iter(). Maybe we can do in tranformation phase instead?
|
||||
# extra iter(). Maybe we can do in transformation phase instead?
|
||||
template = ("yield from %c", (0, "expr"))
|
||||
self.template_engine(template, node[0][0])
|
||||
else:
|
||||
@ -318,7 +318,7 @@ def customize_for_version3(self, version):
|
||||
# FIXME: the real situation is that when derived from
|
||||
# function_def_annotate we the name has been filled in.
|
||||
# But when derived from funcdefdeco it hasn't Would like a better
|
||||
# way to distinquish.
|
||||
# way to distinguish.
|
||||
if self.f.getvalue()[-4:] == "def ":
|
||||
self.write(get_code_name(code_node.attr))
|
||||
|
||||
|
@ -1347,7 +1347,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
selectedText = text[start:finish]
|
||||
|
||||
# Compute offsets relative to the beginning of the
|
||||
# line rather than the beinning of the text
|
||||
# line rather than the beginning of the text.
|
||||
try:
|
||||
lineStart = text[:start].rindex("\n") + 1
|
||||
except ValueError:
|
||||
@ -1355,7 +1355,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
adjustedStart = start - lineStart
|
||||
|
||||
# If selected text is greater than a single line
|
||||
# just show the first line plus elipses.
|
||||
# just show the first line plus ellipsis.
|
||||
lines = selectedText.split("\n")
|
||||
if len(lines) > 1:
|
||||
adjustedEnd = len(lines[0]) - adjustedStart
|
||||
@ -1428,7 +1428,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
p = node.parent
|
||||
orig_parent = p
|
||||
# If we can get different text, use that as the parent,
|
||||
# otherwise we'll use the immeditate parent
|
||||
# otherwise we'll use the immediatate parent.
|
||||
while p and (
|
||||
hasattr(p, "parent") and p.start == node.start and p.finish == node.finish
|
||||
):
|
||||
@ -1778,7 +1778,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
n_set = n_tuple = n_build_set = n_list
|
||||
|
||||
def template_engine(self, entry, startnode):
|
||||
"""The format template interpetation engine. See the comment at the
|
||||
"""The format template interpretation engine. See the comment at the
|
||||
beginning of this module for the how we interpret format
|
||||
specifications such as %c, %C, and so on.
|
||||
"""
|
||||
@ -1961,7 +1961,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
|
||||
# FIXME figure out how to get these cases to be table driven.
|
||||
# 2. subroutine calls. It the last op is the call and for purposes of printing
|
||||
# we don't need to print anything special there. However it encompases the
|
||||
# we don't need to print anything special there. However it encompasses the
|
||||
# entire string of the node fn(...)
|
||||
if startnode.kind == "call":
|
||||
last_node = startnode[-1]
|
||||
@ -2166,7 +2166,7 @@ def code_deparse_around_offset(
|
||||
return deparsed
|
||||
|
||||
|
||||
# Deprecated. Here still for compatability
|
||||
# Deprecated. Here still for compatibility
|
||||
def deparse_code_around_offset(
|
||||
name,
|
||||
offset,
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2022 Rocky Bernstein
|
||||
# Copyright (c) 2022-2023 Rocky Bernstein
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@ -43,7 +43,7 @@ def escape_string(s, quotes=('"', "'", '"""', "'''")):
|
||||
s = s.replace(orig, replace)
|
||||
return "%s%s%s" % (quote, s, quote)
|
||||
|
||||
# FIXME: this and find_globals could be paramaterized with one of the
|
||||
# FIXME: this and find_globals could be parameterized with one of the
|
||||
# above global ops
|
||||
def find_all_globals(node, globs):
|
||||
"""Search Syntax Tree node to find variable names that are global."""
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2015-2022 by Rocky Bernstein
|
||||
# Copyright (c) 2015-2023 by Rocky Bernstein
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
@ -31,7 +31,7 @@ from xdis import iscode
|
||||
|
||||
def make_function1(self, node, is_lambda, nested=1, code_node=None):
|
||||
"""
|
||||
Dump function defintion, doc string, and function body.
|
||||
Dump function definition, doc string, and function body.
|
||||
This code is specialied for Python 2.
|
||||
"""
|
||||
|
||||
@ -40,7 +40,7 @@ def make_function1(self, node, is_lambda, nested=1, code_node=None):
|
||||
- handle defaults
|
||||
- handle format tuple parameters
|
||||
"""
|
||||
# if formal parameter is a tuple, the paramater name
|
||||
# if formal parameter is a tuple, the parameter name
|
||||
# starts with a dot (eg. '.1', '.2')
|
||||
args = tree[0]
|
||||
del tree[0]
|
||||
|
@ -34,7 +34,7 @@ from uncompyle6.show import maybe_show_tree_param_default
|
||||
|
||||
def make_function2(self, node, is_lambda, nested=1, code_node=None):
|
||||
"""
|
||||
Dump function defintion, doc string, and function body.
|
||||
Dump function definition, doc string, and function body.
|
||||
This code is specialied for Python 2.
|
||||
"""
|
||||
|
||||
|
@ -37,7 +37,7 @@ def make_function3_annotate(
|
||||
self, node, is_lambda, nested=1, code_node=None, annotate_last=-1
|
||||
):
|
||||
"""
|
||||
Dump function defintion, doc string, and function
|
||||
Dump function definition, doc string, and function
|
||||
body. This code is specialized for Python 3"""
|
||||
|
||||
def build_param(ast, name, default):
|
||||
@ -310,7 +310,7 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
# the object on the stack, for keyword-only parameters
|
||||
# * parameter annotation objects
|
||||
# * a tuple listing the parameter names for the annotations
|
||||
# (only if there are ony annotation objects)
|
||||
# (only if there are only annotation objects)
|
||||
# * the code associated with the function (at TOS1)
|
||||
# * the qualified name of the function (at TOS)
|
||||
|
||||
|
@ -91,7 +91,7 @@ Python.
|
||||
# the second item is the nonterminal name and the precedence is given last.
|
||||
#
|
||||
# %C evaluate/travers children recursively, with sibling children separated by the
|
||||
# given string. It needs a 3-tuple: a starting node, the maximimum
|
||||
# given string. It needs a 3-tuple: a starting node, the maximum
|
||||
# value of an end node, and a string to be inserted between sibling children
|
||||
#
|
||||
# %, Append ',' if last %C only printed one item. This is mostly for tuples
|
||||
@ -99,12 +99,12 @@ Python.
|
||||
# other tuples. The specifier takes no arguments
|
||||
#
|
||||
# %P same as %C but sets operator precedence. Its argument is a 4-tuple:
|
||||
# the node low and high indices, the separator, a string the precidence
|
||||
# the node low and high indices, the separator, a string the precedence
|
||||
# value, an integer.
|
||||
#
|
||||
# %D Same as `%C` this is for left-recursive lists like kwargs where goes
|
||||
# to epsilon at the beginning. It needs a 3-tuple: a starting node, the
|
||||
# maximimum value of an end node, and a string to be inserted between
|
||||
# maximum value of an end node, and a string to be inserted between
|
||||
# sibling children. If we were to use `%C` an extra separator with an
|
||||
# epsilon would appear at the beginning.
|
||||
#
|
||||
@ -119,7 +119,7 @@ Python.
|
||||
# %[N]{EXPR} Python eval(EXPR) in context of node[N]. Takes no arguments
|
||||
#
|
||||
# %[N]{%X} evaluate/recurse on child node[N], using specifier %X.
|
||||
# %X can be one of the above, e.g. %c, %p, etc. Takes the arguemnts
|
||||
# %X can be one of the above, e.g. %c, %p, etc. Takes the arguments
|
||||
# that the specifier uses.
|
||||
#
|
||||
# %% literal '%'. Takes no arguments.
|
||||
@ -214,22 +214,22 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
||||
of both the syntax tree and language we should produce.
|
||||
|
||||
`out' is IO-like file pointer to where the output should go. It
|
||||
whould have a getvalue() method.
|
||||
would have a getvalue() method.
|
||||
|
||||
`scanner' is a method to call when we need to scan tokens. Sometimes
|
||||
in producing output we will run across further tokens that need
|
||||
to be scaned.
|
||||
to be scanned.
|
||||
|
||||
If `showast' is True, we print the syntax tree.
|
||||
|
||||
`compile_mode' is is either 'exec' or 'single'. It is the compile
|
||||
mode that was used to create the Syntax Tree and specifies a
|
||||
gramar variant within a Python version to use.
|
||||
grammar variant within a Python version to use.
|
||||
|
||||
`is_pypy` should be True if the Syntax Tree was generated for PyPy.
|
||||
|
||||
`linestarts` is a dictionary of line number to bytecode offset. This
|
||||
can sometimes assist in determinte which kind of source-code construct
|
||||
can sometimes assist in determining which kind of source-code construct
|
||||
to use when there is ambiguity.
|
||||
|
||||
"""
|
||||
@ -680,7 +680,7 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
||||
pass
|
||||
|
||||
def template_engine(self, entry, startnode):
|
||||
"""The format template interpetation engine. See the comment at the
|
||||
"""The format template interpretation engine. See the comment at the
|
||||
beginning of this module for the how we interpret format
|
||||
specifications such as %c, %C, and so on.
|
||||
"""
|
||||
@ -970,7 +970,7 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
||||
# within the function definition
|
||||
assert node[1] == "store"
|
||||
# if lhs is not a UNPACK_TUPLE (or equiv.),
|
||||
# add parenteses to make this a tuple
|
||||
# add parentheses to make this a tuple
|
||||
# if node[1][0] not in ('unpack', 'unpack_list'):
|
||||
result = self.traverse(node[1])
|
||||
if not (result.startswith("(") and result.endswith(")")):
|
||||
|
@ -263,7 +263,7 @@ class TreeTransform(GenericASTTraversal, object):
|
||||
# if elif elif
|
||||
def n_ifelsestmt(self, node, preprocess=False):
|
||||
"""
|
||||
Transformation involving if..else statments.
|
||||
Transformation involving if..else statements.
|
||||
For example
|
||||
|
||||
|
||||
|
@ -185,7 +185,7 @@ def cmp_code_objects(version, is_pypy, code_obj1, code_obj2, verify, name=""):
|
||||
# print dir(code_obj1)
|
||||
if isinstance(code_obj1, object):
|
||||
# new style classes (Python 2.2)
|
||||
# assume _both_ code objects to be new stle classes
|
||||
# assume _both_ code objects to be new style classes
|
||||
assert dir(code_obj1) == dir(code_obj2)
|
||||
else:
|
||||
# old style classes
|
||||
@ -205,7 +205,7 @@ def cmp_code_objects(version, is_pypy, code_obj1, code_obj2, verify, name=""):
|
||||
# should be faster and more sophisticated
|
||||
# if this compare fails, we use the old routine to
|
||||
# find out, what exactly is nor equal
|
||||
# if this compare succeds, simply return
|
||||
# if this compare succeeds, simply return
|
||||
# return
|
||||
pass
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user