You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-03 00:45:53 +08:00
lint
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -2,6 +2,7 @@
|
|||||||
*.pyo
|
*.pyo
|
||||||
*_dis
|
*_dis
|
||||||
*~
|
*~
|
||||||
|
.mypy_cache
|
||||||
/.cache
|
/.cache
|
||||||
/.eggs
|
/.eggs
|
||||||
/.hypothesis
|
/.hypothesis
|
||||||
@@ -10,7 +11,6 @@
|
|||||||
/.pytest_cache
|
/.pytest_cache
|
||||||
/.python-version
|
/.python-version
|
||||||
/.tox
|
/.tox
|
||||||
.mypy_cache
|
|
||||||
/.venv*
|
/.venv*
|
||||||
/README
|
/README
|
||||||
/__pkginfo__.pyc
|
/__pkginfo__.pyc
|
||||||
@@ -20,6 +20,7 @@
|
|||||||
/tmp
|
/tmp
|
||||||
/uncompyle6.egg-info
|
/uncompyle6.egg-info
|
||||||
/unpyc
|
/unpyc
|
||||||
|
/venv
|
||||||
ChangeLog
|
ChangeLog
|
||||||
__pycache__
|
__pycache__
|
||||||
build
|
build
|
||||||
|
@@ -216,7 +216,7 @@ def do_tests(src_dir, obj_patterns, target_dir, opts):
|
|||||||
print("Output directory: ", target_dir)
|
print("Output directory: ", target_dir)
|
||||||
try:
|
try:
|
||||||
_, _, failed_files, failed_verify = main(
|
_, _, failed_files, failed_verify = main(
|
||||||
src_dir, target_dir, files, [], do_verify=opts["do_verify"]
|
src_dir, target_dir, files, []
|
||||||
)
|
)
|
||||||
if failed_files != 0:
|
if failed_files != 0:
|
||||||
sys.exit(2)
|
sys.exit(2)
|
||||||
|
@@ -90,7 +90,7 @@ def decompile(
|
|||||||
run_pypy_str = "PyPy " if IS_PYPY else ""
|
run_pypy_str = "PyPy " if IS_PYPY else ""
|
||||||
sys_version_lines = sys.version.split("\n")
|
sys_version_lines = sys.version.split("\n")
|
||||||
if source_encoding:
|
if source_encoding:
|
||||||
write("# -*- coding: %s -*-" % source_encoding)
|
write(f"# -*- coding: {source_encoding} -*-")
|
||||||
write(
|
write(
|
||||||
"# uncompyle6 version %s\n"
|
"# uncompyle6 version %s\n"
|
||||||
"# %sPython bytecode version base %s%s\n# Decompiled from: %sPython %s"
|
"# %sPython bytecode version base %s%s\n# Decompiled from: %sPython %s"
|
||||||
@@ -104,9 +104,9 @@ def decompile(
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
if co.co_filename:
|
if co.co_filename:
|
||||||
write("# Embedded file name: %s" % co.co_filename)
|
write(f"# Embedded file name: {co.co_filename}")
|
||||||
if timestamp:
|
if timestamp:
|
||||||
write("# Compiled at: %s" % datetime.datetime.fromtimestamp(timestamp))
|
write(f"# Compiled at: {datetime.datetime.fromtimestamp(timestamp)}")
|
||||||
if source_size:
|
if source_size:
|
||||||
write("# Size of source mod 2**32: %d bytes" % source_size)
|
write("# Size of source mod 2**32: %d bytes" % source_size)
|
||||||
|
|
||||||
@@ -129,13 +129,14 @@ def decompile(
|
|||||||
version=bytecode_version,
|
version=bytecode_version,
|
||||||
code_objects=code_objects,
|
code_objects=code_objects,
|
||||||
is_pypy=is_pypy,
|
is_pypy=is_pypy,
|
||||||
|
debug_opts=debug_opts,
|
||||||
)
|
)
|
||||||
header_count = 3 + len(sys_version_lines)
|
header_count = 3 + len(sys_version_lines)
|
||||||
linemap = [
|
linemap = [
|
||||||
(line_no, deparsed.source_linemap[line_no] + header_count)
|
(line_no, deparsed.source_linemap[line_no] + header_count)
|
||||||
for line_no in sorted(deparsed.source_linemap.keys())
|
for line_no in sorted(deparsed.source_linemap.keys())
|
||||||
]
|
]
|
||||||
mapstream.write("\n\n# %s\n" % linemap)
|
mapstream.write(f"\n\n# {linemap}\n")
|
||||||
else:
|
else:
|
||||||
if do_fragments:
|
if do_fragments:
|
||||||
deparse_fn = code_deparse_fragments
|
deparse_fn = code_deparse_fragments
|
||||||
@@ -163,11 +164,11 @@ def compile_file(source_path: str) -> str:
|
|||||||
basename = source_path
|
basename = source_path
|
||||||
|
|
||||||
if hasattr(sys, "pypy_version_info"):
|
if hasattr(sys, "pypy_version_info"):
|
||||||
bytecode_path = "%s-pypy%s.pyc" % (basename, version_tuple_to_str())
|
bytecode_path = f"{basename}-pypy{version_tuple_to_str()}.pyc"
|
||||||
else:
|
else:
|
||||||
bytecode_path = "%s-%s.pyc" % (basename, version_tuple_to_str())
|
bytecode_path = f"{basename}-{version_tuple_to_str()}.pyc"
|
||||||
|
|
||||||
print("compiling %s to %s" % (source_path, bytecode_path))
|
print(f"compiling {source_path} to {bytecode_path}")
|
||||||
py_compile.compile(source_path, bytecode_path, "exec")
|
py_compile.compile(source_path, bytecode_path, "exec")
|
||||||
return bytecode_path
|
return bytecode_path
|
||||||
|
|
||||||
@@ -232,7 +233,6 @@ def decompile_file(
|
|||||||
compile_mode="exec",
|
compile_mode="exec",
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
co = None
|
|
||||||
return deparsed
|
return deparsed
|
||||||
|
|
||||||
|
|
||||||
@@ -245,7 +245,6 @@ def main(
|
|||||||
outfile=None,
|
outfile=None,
|
||||||
showasm: Optional[str] = None,
|
showasm: Optional[str] = None,
|
||||||
showast={},
|
showast={},
|
||||||
do_verify=False,
|
|
||||||
showgrammar=False,
|
showgrammar=False,
|
||||||
source_encoding=None,
|
source_encoding=None,
|
||||||
raise_on_error=False,
|
raise_on_error=False,
|
||||||
@@ -274,7 +273,7 @@ def main(
|
|||||||
infile = os.path.join(in_base, filename)
|
infile = os.path.join(in_base, filename)
|
||||||
# print("XXX", infile)
|
# print("XXX", infile)
|
||||||
if not os.path.exists(infile):
|
if not os.path.exists(infile):
|
||||||
sys.stderr.write("File '%s' doesn't exist. Skipped\n" % infile)
|
sys.stderr.write(f"File '{infile}' doesn't exist. Skipped\n")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if do_linemaps:
|
if do_linemaps:
|
||||||
@@ -322,13 +321,13 @@ def main(
|
|||||||
):
|
):
|
||||||
if e[0] != last_mod:
|
if e[0] != last_mod:
|
||||||
line = "=" * len(e[0])
|
line = "=" * len(e[0])
|
||||||
outstream.write("%s\n%s\n%s\n" % (line, e[0], line))
|
outstream.write(f"{line}\n{e[0]}\n{line}\n")
|
||||||
last_mod = e[0]
|
last_mod = e[0]
|
||||||
info = offsets[e]
|
info = offsets[e]
|
||||||
extractInfo = d.extract_node_info(info)
|
extract_info = d.extract_node_info(info)
|
||||||
outstream.write("%s" % info.node.format().strip() + "\n")
|
outstream.write(f"{info.node.format().strip()}" + "\n")
|
||||||
outstream.write(extractInfo.selectedLine + "\n")
|
outstream.write(extract_info.selectedLine + "\n")
|
||||||
outstream.write(extractInfo.markerLine + "\n\n")
|
outstream.write(extract_info.markerLine + "\n\n")
|
||||||
pass
|
pass
|
||||||
pass
|
pass
|
||||||
tot_files += 1
|
tot_files += 1
|
||||||
@@ -349,14 +348,14 @@ def main(
|
|||||||
if str(e).startswith("Unsupported Python"):
|
if str(e).startswith("Unsupported Python"):
|
||||||
sys.stdout.write("\n")
|
sys.stdout.write("\n")
|
||||||
sys.stderr.write(
|
sys.stderr.write(
|
||||||
"\n# Unsupported bytecode in file %s\n# %s\n" % (infile, e)
|
f"\n# Unsupported bytecode in file {infile}\n# {e}\n"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
if outfile:
|
if outfile:
|
||||||
outstream.close()
|
outstream.close()
|
||||||
os.remove(outfile)
|
os.remove(outfile)
|
||||||
sys.stdout.write("\n")
|
sys.stdout.write("\n")
|
||||||
sys.stderr.write("\nLast file: %s " % (infile))
|
sys.stderr.write(f"\nLast file: {infile} ")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
# except:
|
# except:
|
||||||
@@ -376,7 +375,7 @@ def main(
|
|||||||
okay_files += 1
|
okay_files += 1
|
||||||
if not current_outfile:
|
if not current_outfile:
|
||||||
mess = "\n# okay decompiling"
|
mess = "\n# okay decompiling"
|
||||||
# mem_usage = __memUsage()
|
# mem_usage = __mem_usage()
|
||||||
print(mess, infile)
|
print(mess, infile)
|
||||||
if current_outfile:
|
if current_outfile:
|
||||||
sys.stdout.write(
|
sys.stdout.write(
|
||||||
@@ -384,7 +383,6 @@ def main(
|
|||||||
% (
|
% (
|
||||||
infile,
|
infile,
|
||||||
status_msg(
|
status_msg(
|
||||||
do_verify,
|
|
||||||
tot_files,
|
tot_files,
|
||||||
okay_files,
|
okay_files,
|
||||||
failed_files,
|
failed_files,
|
||||||
@@ -405,14 +403,14 @@ def main(
|
|||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
pass
|
pass
|
||||||
return (tot_files, okay_files, failed_files, verify_failed_files)
|
return tot_files, okay_files, failed_files, verify_failed_files
|
||||||
|
|
||||||
|
|
||||||
# ---- main ----
|
# ---- main ----
|
||||||
|
|
||||||
if sys.platform.startswith("linux") and os.uname()[2][:2] in ["2.", "3.", "4."]:
|
if sys.platform.startswith("linux") and os.uname()[2][:2] in ["2.", "3.", "4."]:
|
||||||
|
|
||||||
def __memUsage():
|
def __mem_sage():
|
||||||
mi = open("/proc/self/stat", "r")
|
mi = open("/proc/self/stat", "r")
|
||||||
mu = mi.readline().split()[22]
|
mu = mi.readline().split()[22]
|
||||||
mi.close()
|
mi.close()
|
||||||
@@ -420,11 +418,11 @@ if sys.platform.startswith("linux") and os.uname()[2][:2] in ["2.", "3.", "4."]:
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
def __memUsage():
|
def __mem_usage():
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
def status_msg(do_verify, tot_files, okay_files, failed_files, verify_failed_files):
|
def status_msg(tot_files, okay_files, failed_files, verify_failed_files):
|
||||||
if tot_files == 1:
|
if tot_files == 1:
|
||||||
if failed_files:
|
if failed_files:
|
||||||
return "\n# decompile failed"
|
return "\n# decompile failed"
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
# Copyright (c) 2018, 2024 by Rocky Bernstein
|
# Copyright (c) 2018, 2024 by Rocky Bernstein
|
||||||
#
|
#
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU General Public License as published by
|
# it under the terms of the GNU General Public License as published by
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
@@ -13,7 +14,10 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from uncompyle6.semantics.fragments import FragmentsWalker, code_deparse as fragments_code_deparse
|
from uncompyle6.semantics.fragments import (
|
||||||
|
FragmentsWalker,
|
||||||
|
code_deparse as fragments_code_deparse,
|
||||||
|
)
|
||||||
from uncompyle6.semantics.pysource import SourceWalker, code_deparse
|
from uncompyle6.semantics.pysource import SourceWalker, code_deparse
|
||||||
|
|
||||||
|
|
||||||
@@ -25,9 +29,9 @@ class LineMapWalker(SourceWalker):
|
|||||||
self.current_line_number = 1
|
self.current_line_number = 1
|
||||||
|
|
||||||
def write(self, *data):
|
def write(self, *data):
|
||||||
"""Augment write routine to keep track of current line"""
|
"""Augment write routine to keep track of current line."""
|
||||||
for line in data:
|
for line in data:
|
||||||
## print("XXX write: '%s'" % l)
|
# print(f"XXX write: '{line}'")
|
||||||
for i in str(line):
|
for i in str(line):
|
||||||
if i == "\n":
|
if i == "\n":
|
||||||
self.current_line_number += 1
|
self.current_line_number += 1
|
||||||
@@ -39,7 +43,7 @@ class LineMapWalker(SourceWalker):
|
|||||||
# Note n_expr needs treatment too
|
# Note n_expr needs treatment too
|
||||||
|
|
||||||
def default(self, node):
|
def default(self, node):
|
||||||
"""Augment write default routine to record line number changes"""
|
"""Augment default-write routine to record line number changes."""
|
||||||
if hasattr(node, "linestart"):
|
if hasattr(node, "linestart"):
|
||||||
if node.linestart:
|
if node.linestart:
|
||||||
self.source_linemap[self.current_line_number] = node.linestart
|
self.source_linemap[self.current_line_number] = node.linestart
|
||||||
@@ -85,7 +89,7 @@ def code_deparse_with_fragments_and_map(*args, **kwargs):
|
|||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
||||||
def deparse_test(co):
|
def deparse_test(co):
|
||||||
"This is a docstring"
|
"""This is a docstring"""
|
||||||
deparsed = code_deparse_with_map(co)
|
deparsed = code_deparse_with_map(co)
|
||||||
a = 1
|
a = 1
|
||||||
b = 2
|
b = 2
|
||||||
|
@@ -141,17 +141,25 @@ from uncompyle6.parsers.treenode import SyntaxTree
|
|||||||
from uncompyle6.scanner import Code, get_scanner
|
from uncompyle6.scanner import Code, get_scanner
|
||||||
from uncompyle6.scanners.tok import Token
|
from uncompyle6.scanners.tok import Token
|
||||||
from uncompyle6.semantics.check_ast import checker
|
from uncompyle6.semantics.check_ast import checker
|
||||||
from uncompyle6.semantics.consts import (ASSIGN_TUPLE_PARAM,
|
from uncompyle6.semantics.consts import (
|
||||||
INDENT_PER_LEVEL, LINE_LENGTH, MAP,
|
ASSIGN_TUPLE_PARAM,
|
||||||
MAP_DIRECT, NAME_MODULE, NONE, PASS,
|
INDENT_PER_LEVEL,
|
||||||
PRECEDENCE, RETURN_LOCALS,
|
LINE_LENGTH,
|
||||||
RETURN_NONE, TAB, TABLE_R, escape)
|
MAP,
|
||||||
|
MAP_DIRECT,
|
||||||
|
NAME_MODULE,
|
||||||
|
NONE,
|
||||||
|
PASS,
|
||||||
|
PRECEDENCE,
|
||||||
|
RETURN_LOCALS,
|
||||||
|
RETURN_NONE,
|
||||||
|
TAB,
|
||||||
|
TABLE_R,
|
||||||
|
escape,
|
||||||
|
)
|
||||||
from uncompyle6.semantics.customize import customize_for_version
|
from uncompyle6.semantics.customize import customize_for_version
|
||||||
from uncompyle6.semantics.gencomp import ComprehensionMixin
|
from uncompyle6.semantics.gencomp import ComprehensionMixin
|
||||||
from uncompyle6.semantics.helper import (
|
from uncompyle6.semantics.helper import find_globals_and_nonlocals, print_docstring
|
||||||
find_globals_and_nonlocals,
|
|
||||||
print_docstring
|
|
||||||
)
|
|
||||||
from uncompyle6.semantics.make_function1 import make_function1
|
from uncompyle6.semantics.make_function1 import make_function1
|
||||||
from uncompyle6.semantics.make_function2 import make_function2
|
from uncompyle6.semantics.make_function2 import make_function2
|
||||||
from uncompyle6.semantics.make_function3 import make_function3
|
from uncompyle6.semantics.make_function3 import make_function3
|
||||||
@@ -162,9 +170,11 @@ from uncompyle6.semantics.transform import TreeTransform, is_docstring
|
|||||||
from uncompyle6.show import maybe_show_tree
|
from uncompyle6.show import maybe_show_tree
|
||||||
from uncompyle6.util import better_repr
|
from uncompyle6.util import better_repr
|
||||||
|
|
||||||
DEFAULT_DEBUG_OPTS = {"asm": False, "tree": False, "grammar": False}
|
|
||||||
|
|
||||||
def unicode(x): return x
|
def unicode(x):
|
||||||
|
return x
|
||||||
|
|
||||||
|
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
|
|
||||||
PARSER_DEFAULT_DEBUG = {
|
PARSER_DEFAULT_DEBUG = {
|
||||||
@@ -200,6 +210,7 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
Class to traverses a Parse Tree of the bytecode instruction built from parsing to produce some sort of source text.
|
Class to traverses a Parse Tree of the bytecode instruction built from parsing to produce some sort of source text.
|
||||||
The Parse tree may be turned an Abstract Syntax tree as an intermediate step.
|
The Parse tree may be turned an Abstract Syntax tree as an intermediate step.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
stacked_params = ("f", "indent", "is_lambda", "_globals")
|
stacked_params = ("f", "indent", "is_lambda", "_globals")
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@@ -288,7 +299,7 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
self.in_format_string = None
|
self.in_format_string = None
|
||||||
|
|
||||||
# hide_internal suppresses displaying the additional instructions that sometimes
|
# hide_internal suppresses displaying the additional instructions that sometimes
|
||||||
# exist in code but but were not written in the source code.
|
# exist in code but were not written in the source code.
|
||||||
# An example is:
|
# An example is:
|
||||||
# __module__ = __name__
|
# __module__ = __name__
|
||||||
self.hide_internal = True
|
self.hide_internal = True
|
||||||
@@ -355,7 +366,6 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
indent += " "
|
indent += " "
|
||||||
i = 0
|
i = 0
|
||||||
for node in ast:
|
for node in ast:
|
||||||
|
|
||||||
if hasattr(node, "__repr1__"):
|
if hasattr(node, "__repr1__"):
|
||||||
if enumerate_children:
|
if enumerate_children:
|
||||||
child = self.str_with_template1(node, indent, i)
|
child = self.str_with_template1(node, indent, i)
|
||||||
@@ -375,9 +385,9 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
i += 1
|
i += 1
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
def indent_if_source_nl(self, line_number, indent):
|
def indent_if_source_nl(self, line_number: int, indent: int):
|
||||||
if line_number != self.line_number:
|
if line_number != self.line_number:
|
||||||
self.write("\n" + self.indent + INDENT_PER_LEVEL[:-1])
|
self.write("\n" + indent + INDENT_PER_LEVEL[:-1])
|
||||||
return self.line_number
|
return self.line_number
|
||||||
|
|
||||||
f = property(
|
f = property(
|
||||||
@@ -685,7 +695,7 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
|
|
||||||
def template_engine(self, entry, startnode):
|
def template_engine(self, entry, startnode):
|
||||||
"""The format template interpretation engine. See the comment at the
|
"""The format template interpretation engine. See the comment at the
|
||||||
beginning of this module for the how we interpret format
|
beginning of this module for how we interpret format
|
||||||
specifications such as %c, %C, and so on.
|
specifications such as %c, %C, and so on.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -729,20 +739,31 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
if isinstance(index[1], str):
|
if isinstance(index[1], str):
|
||||||
# if node[index[0]] != index[1]:
|
# if node[index[0]] != index[1]:
|
||||||
# from trepan.api import debug; debug()
|
# from trepan.api import debug; debug()
|
||||||
assert node[index[0]] == index[1], (
|
assert (
|
||||||
"at %s[%d], expected '%s' node; got '%s'"
|
node[index[0]] == index[1]
|
||||||
% (node.kind, arg, index[1], node[index[0]].kind,)
|
), "at %s[%d], expected '%s' node; got '%s'" % (
|
||||||
|
node.kind,
|
||||||
|
arg,
|
||||||
|
index[1],
|
||||||
|
node[index[0]].kind,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
assert node[index[0]] in index[1], (
|
assert (
|
||||||
"at %s[%d], expected to be in '%s' node; got '%s'"
|
node[index[0]] in index[1]
|
||||||
% (node.kind, arg, index[1], node[index[0]].kind,)
|
), "at %s[%d], expected to be in '%s' node; got '%s'" % (
|
||||||
|
node.kind,
|
||||||
|
arg,
|
||||||
|
index[1],
|
||||||
|
node[index[0]].kind,
|
||||||
)
|
)
|
||||||
|
|
||||||
index = index[0]
|
index = index[0]
|
||||||
assert isinstance(index, int), (
|
assert isinstance(
|
||||||
"at %s[%d], %s should be int or tuple"
|
index, int
|
||||||
% (node.kind, arg, type(index),)
|
), "at %s[%d], %s should be int or tuple" % (
|
||||||
|
node.kind,
|
||||||
|
arg,
|
||||||
|
type(index),
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -765,14 +786,22 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
if len(tup) == 3:
|
if len(tup) == 3:
|
||||||
(index, nonterm_name, self.prec) = tup
|
(index, nonterm_name, self.prec) = tup
|
||||||
if isinstance(tup[1], str):
|
if isinstance(tup[1], str):
|
||||||
assert node[index] == nonterm_name, (
|
assert (
|
||||||
"at %s[%d], expected '%s' node; got '%s'"
|
node[index] == nonterm_name
|
||||||
% (node.kind, arg, nonterm_name, node[index].kind,)
|
), "at %s[%d], expected '%s' node; got '%s'" % (
|
||||||
|
node.kind,
|
||||||
|
arg,
|
||||||
|
nonterm_name,
|
||||||
|
node[index].kind,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
assert node[tup[0]] in tup[1], (
|
assert (
|
||||||
"at %s[%d], expected to be in '%s' node; got '%s'"
|
node[tup[0]] in tup[1]
|
||||||
% (node.kind, arg, index[1], node[index[0]].kind,)
|
), "at %s[%d], expected to be in '%s' node; got '%s'" % (
|
||||||
|
node.kind,
|
||||||
|
arg,
|
||||||
|
index[1],
|
||||||
|
node[index[0]].kind,
|
||||||
)
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -885,52 +914,51 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
"CALL_FUNCTION_VAR_KW",
|
"CALL_FUNCTION_VAR_KW",
|
||||||
"CALL_FUNCTION_KW",
|
"CALL_FUNCTION_KW",
|
||||||
):
|
):
|
||||||
|
|
||||||
# FIXME: handle everything in customize.
|
# FIXME: handle everything in customize.
|
||||||
# Right now, some of this is here, and some in that.
|
# Right now, some of this is here, and some in that.
|
||||||
|
|
||||||
if v == 0:
|
if v == 0:
|
||||||
str = "%c(%C" # '%C' is a dummy here ...
|
template_str = "%c(%C" # '%C' is a dummy here ...
|
||||||
p2 = (0, 0, None) # .. because of the None in this
|
p2 = (0, 0, None) # because of the None in this
|
||||||
else:
|
else:
|
||||||
str = "%c(%C, "
|
template_str = "%c(%C, "
|
||||||
p2 = (1, -2, ", ")
|
p2 = (1, -2, ", ")
|
||||||
if op == "CALL_FUNCTION_VAR":
|
if op == "CALL_FUNCTION_VAR":
|
||||||
# Python 3.5 only puts optional args (the VAR part)
|
# Python 3.5 only puts optional args (the VAR part)
|
||||||
# the lowest down the stack
|
# the lowest down the stack
|
||||||
if self.version == (3, 5):
|
if self.version == (3, 5):
|
||||||
if str == "%c(%C, ":
|
if template_str == "%c(%C, ":
|
||||||
entry = ("%c(*%C, %c)", 0, p2, -2)
|
entry = ("%c(*%C, %c)", 0, p2, -2)
|
||||||
elif str == "%c(%C":
|
elif template_str == "%c(%C":
|
||||||
entry = ("%c(*%C)", 0, (1, 100, ""))
|
entry = ("%c(*%C)", 0, (1, 100, ""))
|
||||||
elif self.version == (3, 4):
|
elif self.version == (3, 4):
|
||||||
# CALL_FUNCTION_VAR's top element of the stack contains
|
# CALL_FUNCTION_VAR's top element of the stack contains
|
||||||
# the variable argument list
|
# the variable argument list
|
||||||
if v == 0:
|
if v == 0:
|
||||||
str = "%c(*%c)"
|
template_str = "%c(*%c)"
|
||||||
entry = (str, 0, -2)
|
entry = (template_str, 0, -2)
|
||||||
else:
|
else:
|
||||||
str = "%c(%C, *%c)"
|
template_str = "%c(%C, *%c)"
|
||||||
entry = (str, 0, p2, -2)
|
entry = (template_str, 0, p2, -2)
|
||||||
else:
|
else:
|
||||||
str += "*%c)"
|
template_str += "*%c)"
|
||||||
entry = (str, 0, p2, -2)
|
entry = (template_str, 0, p2, -2)
|
||||||
elif op == "CALL_FUNCTION_KW":
|
elif op == "CALL_FUNCTION_KW":
|
||||||
str += "**%c)"
|
template_str += "**%c)"
|
||||||
entry = (str, 0, p2, -2)
|
entry = (template_str, 0, p2, -2)
|
||||||
elif op == "CALL_FUNCTION_VAR_KW":
|
elif op == "CALL_FUNCTION_VAR_KW":
|
||||||
str += "*%c, **%c)"
|
template_str += "*%c, **%c)"
|
||||||
# Python 3.5 only puts optional args (the VAR part)
|
# Python 3.5 only puts optional args (the VAR part)
|
||||||
# the lowest down the stack
|
# the lowest down the stack
|
||||||
na = v & 0xFF # positional parameters
|
na = v & 0xFF # positional parameters
|
||||||
if self.version == (3, 5) and na == 0:
|
if self.version == (3, 5) and na == 0:
|
||||||
if p2[2]:
|
if p2[2]:
|
||||||
p2 = (2, -2, ", ")
|
p2 = (2, -2, ", ")
|
||||||
entry = (str, 0, p2, 1, -2)
|
entry = (template_str, 0, p2, 1, -2)
|
||||||
else:
|
else:
|
||||||
if p2[2]:
|
if p2[2]:
|
||||||
p2 = (1, -3, ", ")
|
p2 = (1, -3, ", ")
|
||||||
entry = (str, 0, p2, -3, -2)
|
entry = (template_str, 0, p2, -3, -2)
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
assert False, "Unhandled CALL_FUNCTION %s" % op
|
assert False, "Unhandled CALL_FUNCTION %s" % op
|
||||||
@@ -1014,7 +1042,7 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
if ast[0] == "sstmt":
|
if ast[0] == "sstmt":
|
||||||
ast[0] = ast[0][0]
|
ast[0] = ast[0][0]
|
||||||
first_stmt = ast[0]
|
first_stmt = ast[0]
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -1023,7 +1051,7 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
del ast[0]
|
del ast[0]
|
||||||
first_stmt = ast[0]
|
first_stmt = ast[0]
|
||||||
pass
|
pass
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
have_qualname = False
|
have_qualname = False
|
||||||
@@ -1035,17 +1063,15 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
if self.version < (3, 0):
|
if self.version < (3, 0):
|
||||||
# Should we ditch this in favor of the "else" case?
|
# Should we ditch this in favor of the "else" case?
|
||||||
qualname = ".".join(self.classes)
|
qualname = ".".join(self.classes)
|
||||||
QUAL_NAME = SyntaxTree(
|
qual_name_tree = SyntaxTree(
|
||||||
"assign",
|
"assign",
|
||||||
[
|
[
|
||||||
SyntaxTree("expr", [Token("LOAD_CONST", pattr=qualname)]),
|
SyntaxTree("expr", [Token("LOAD_CONST", pattr=qualname)]),
|
||||||
SyntaxTree(
|
SyntaxTree("store", [Token("STORE_NAME", pattr="__qualname__")]),
|
||||||
"store", [Token("STORE_NAME", pattr="__qualname__")]
|
|
||||||
),
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
# FIXME: is this right now that we've redone the grammar?
|
# FIXME: is this right now that we've redone the grammar?
|
||||||
have_qualname = ast[0] == QUAL_NAME
|
have_qualname = ast[0] == qual_name_tree
|
||||||
else:
|
else:
|
||||||
# Python 3.4+ has constants like 'cmp_to_key.<locals>.K'
|
# Python 3.4+ has constants like 'cmp_to_key.<locals>.K'
|
||||||
# which are not simple classes like the < 3 case.
|
# which are not simple classes like the < 3 case.
|
||||||
@@ -1057,7 +1083,7 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
and first_stmt[1][0] == Token("STORE_NAME", pattr="__qualname__")
|
and first_stmt[1][0] == Token("STORE_NAME", pattr="__qualname__")
|
||||||
):
|
):
|
||||||
have_qualname = True
|
have_qualname = True
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if have_qualname:
|
if have_qualname:
|
||||||
@@ -1078,7 +1104,7 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
try:
|
try:
|
||||||
# FIXME: Is there an extra [0]?
|
# FIXME: Is there an extra [0]?
|
||||||
docstring = ast[i][0][0][0][0].pattr
|
docstring = ast[i][0][0][0][0].pattr
|
||||||
except:
|
except Exception:
|
||||||
docstring = code.co_consts[0]
|
docstring = code.co_consts[0]
|
||||||
if print_docstring(self, indent, docstring):
|
if print_docstring(self, indent, docstring):
|
||||||
self.println()
|
self.println()
|
||||||
@@ -1104,7 +1130,6 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
# else:
|
# else:
|
||||||
# print stmt[-1]
|
# print stmt[-1]
|
||||||
|
|
||||||
|
|
||||||
globals, nonlocals = find_globals_and_nonlocals(
|
globals, nonlocals = find_globals_and_nonlocals(
|
||||||
ast, set(), set(), code, self.version
|
ast, set(), set(), code, self.version
|
||||||
)
|
)
|
||||||
@@ -1148,7 +1173,7 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
else:
|
else:
|
||||||
self.customize(customize)
|
self.customize(customize)
|
||||||
self.text = self.traverse(ast, is_lambda=is_lambda)
|
self.text = self.traverse(ast, is_lambda=is_lambda)
|
||||||
# In a formatted string using "lambda', we should not add "\n".
|
# In a formatted string using "lambda", we should not add "\n".
|
||||||
# For example in:
|
# For example in:
|
||||||
# f'{(lambda x:x)("8")!r}'
|
# f'{(lambda x:x)("8")!r}'
|
||||||
# Adding a "\n" after "lambda x: x" will give an error message:
|
# Adding a "\n" after "lambda x: x" will give an error message:
|
||||||
@@ -1167,7 +1192,6 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
noneInNames=False,
|
noneInNames=False,
|
||||||
is_top_level_module=False,
|
is_top_level_module=False,
|
||||||
):
|
):
|
||||||
|
|
||||||
# FIXME: DRY with fragments.py
|
# FIXME: DRY with fragments.py
|
||||||
|
|
||||||
# assert isinstance(tokens[0], Token)
|
# assert isinstance(tokens[0], Token)
|
||||||
@@ -1298,7 +1322,7 @@ def code_deparse(
|
|||||||
is_top_level_module=is_top_level_module,
|
is_top_level_module=is_top_level_module,
|
||||||
)
|
)
|
||||||
|
|
||||||
#### XXX workaround for profiling
|
# XXX workaround for profiling
|
||||||
if deparsed.ast is None:
|
if deparsed.ast is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -1406,7 +1430,7 @@ def deparse_code2str(
|
|||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
||||||
def deparse_test(co):
|
def deparse_test(co):
|
||||||
"This is a docstring"
|
"""This is a docstring"""
|
||||||
s = deparse_code2str(co)
|
s = deparse_code2str(co)
|
||||||
# s = deparse_code2str(co, debug_opts={"asm": "after", "tree": {'before': False, 'after': False}})
|
# s = deparse_code2str(co, debug_opts={"asm": "after", "tree": {'before': False, 'after': False}})
|
||||||
print(s)
|
print(s)
|
||||||
|
Reference in New Issue
Block a user