You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-04 01:09:52 +08:00
Merge branch 'master' into python-3.3-to-3.5
This commit is contained in:
@@ -1,4 +1,4 @@
|
|||||||
# Copyright (C) 2018, 2020-2021 Rocky Bernstein <rocky@gnu.org>
|
# Copyright (C) 2018, 2020-2021 2024 Rocky Bernstein <rocky@gnu.org>
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU General Public License as published by
|
# it under the terms of the GNU General Public License as published by
|
||||||
@@ -32,9 +32,11 @@
|
|||||||
# 3.3 | pip | 10.0.1 |
|
# 3.3 | pip | 10.0.1 |
|
||||||
# 3.4 | pip | 19.1.1 |
|
# 3.4 | pip | 19.1.1 |
|
||||||
|
|
||||||
|
import os.path as osp
|
||||||
|
|
||||||
# Things that change more often go here.
|
# Things that change more often go here.
|
||||||
copyright = """
|
copyright = """
|
||||||
Copyright (C) 2015-2021 Rocky Bernstein <rb@dustyfeet.com>.
|
Copyright (C) 2015-2021, 2024 Rocky Bernstein <rb@dustyfeet.com>.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
classifiers = [
|
classifiers = [
|
||||||
@@ -75,7 +77,7 @@ entry_points = {
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
ftp_url = None
|
ftp_url = None
|
||||||
install_requires = ["spark-parser >= 1.8.9, < 1.9.0", "xdis >= 6.0.8, < 6.2.0"]
|
install_requires = ["click", "spark-parser >= 1.8.9, < 1.9.0", "xdis >= 6.0.8, < 6.2.0"]
|
||||||
|
|
||||||
license = "GPL3"
|
license = "GPL3"
|
||||||
mailing_list = "python-debugger@googlegroups.com"
|
mailing_list = "python-debugger@googlegroups.com"
|
||||||
@@ -88,21 +90,18 @@ web = "https://github.com/rocky/python-uncompyle6/"
|
|||||||
zip_safe = True
|
zip_safe = True
|
||||||
|
|
||||||
|
|
||||||
import os.path
|
|
||||||
|
|
||||||
|
|
||||||
def get_srcdir():
|
def get_srcdir():
|
||||||
filename = os.path.normcase(os.path.dirname(os.path.abspath(__file__)))
|
filename = osp.normcase(osp.dirname(osp.abspath(__file__)))
|
||||||
return os.path.realpath(filename)
|
return osp.realpath(filename)
|
||||||
|
|
||||||
|
|
||||||
srcdir = get_srcdir()
|
srcdir = get_srcdir()
|
||||||
|
|
||||||
|
|
||||||
def read(*rnames):
|
def read(*rnames):
|
||||||
return open(os.path.join(srcdir, *rnames)).read()
|
return open(osp.join(srcdir, *rnames)).read()
|
||||||
|
|
||||||
|
|
||||||
# Get info from files; set: long_description and __version__
|
# Get info from files; set: long_description and VERSION
|
||||||
long_description = read("README.rst") + "\n"
|
long_description = read("README.rst") + "\n"
|
||||||
exec(read("uncompyle6/version.py"))
|
exec(read("uncompyle6/version.py"))
|
||||||
|
5
admin-tools/merge-for-2.4.sh
Executable file
5
admin-tools/merge-for-2.4.sh
Executable file
@@ -0,0 +1,5 @@
|
|||||||
|
#/bin/bash
|
||||||
|
cd $(dirname ${BASH_SOURCE[0]})
|
||||||
|
if . ./setup-python-2.4.sh; then
|
||||||
|
git merge python-3.0-to-3.2
|
||||||
|
fi
|
5
admin-tools/merge-for-3.0.sh
Executable file
5
admin-tools/merge-for-3.0.sh
Executable file
@@ -0,0 +1,5 @@
|
|||||||
|
#/bin/bash
|
||||||
|
cd $(dirname ${BASH_SOURCE[0]})
|
||||||
|
if . ./setup-python-3.0.sh; then
|
||||||
|
git merge python-3.3-to-3.5
|
||||||
|
fi
|
5
admin-tools/merge-for-3.3.sh
Executable file
5
admin-tools/merge-for-3.3.sh
Executable file
@@ -0,0 +1,5 @@
|
|||||||
|
#/bin/bash
|
||||||
|
cd $(dirname ${BASH_SOURCE[0]})
|
||||||
|
if . ./setup-python-3.3.sh; then
|
||||||
|
git merge master
|
||||||
|
fi
|
@@ -1,9 +1,21 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# Mode: -*- python -*-
|
|
||||||
#
|
#
|
||||||
# Copyright (c) 2015-2016, 2018, 2020, 2022-2023 by Rocky Bernstein <rb@dustyfeet.com>
|
# Copyright (c) 2015-2016, 2018, 2020, 2022-2024
|
||||||
|
# by Rocky Bernstein <rb@dustyfeet.com>
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import getopt
|
import getopt
|
||||||
import os
|
import os
|
||||||
@@ -51,15 +63,14 @@ PATTERNS = ("*.pyc", "*.pyo")
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
Usage_short = (
|
usage_short = (
|
||||||
"""usage: %s FILE...
|
f"""usage: {program} FILE...
|
||||||
Type -h for for full help."""
|
Type -h for for full help."""
|
||||||
% program
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if len(sys.argv) == 1:
|
if len(sys.argv) == 1:
|
||||||
print("No file(s) given", file=sys.stderr)
|
print("No file(s) given", file=sys.stderr)
|
||||||
print(Usage_short, file=sys.stderr)
|
print(usage_short, file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -67,7 +78,7 @@ Type -h for for full help."""
|
|||||||
sys.argv[1:], "hVU", ["help", "version", "uncompyle6"]
|
sys.argv[1:], "hVU", ["help", "version", "uncompyle6"]
|
||||||
)
|
)
|
||||||
except getopt.GetoptError as e:
|
except getopt.GetoptError as e:
|
||||||
print("%s: %s" % (os.path.basename(sys.argv[0]), e), file=sys.stderr)
|
print(f"{os.path.basename(sys.argv[0])}: {e}", file=sys.stderr)
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
|
||||||
for opt, val in opts:
|
for opt, val in opts:
|
||||||
@@ -75,18 +86,18 @@ Type -h for for full help."""
|
|||||||
print(__doc__)
|
print(__doc__)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
elif opt in ("-V", "--version"):
|
elif opt in ("-V", "--version"):
|
||||||
print("%s %s" % (program, __version__))
|
print(f"{program} {__version__}")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
else:
|
else:
|
||||||
print(opt)
|
print(opt)
|
||||||
print(Usage_short, file=sys.stderr)
|
print(usage_short, file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
for file in files:
|
for file in files:
|
||||||
if os.path.exists(files[0]):
|
if os.path.exists(files[0]):
|
||||||
disassemble_file(file, sys.stdout)
|
disassemble_file(file, sys.stdout)
|
||||||
else:
|
else:
|
||||||
print("Can't read %s - skipping" % files[0], file=sys.stderr)
|
print(f"Can't read {files[0]} - skipping", file=sys.stderr)
|
||||||
pass
|
pass
|
||||||
pass
|
pass
|
||||||
return
|
return
|
||||||
|
@@ -1,15 +1,19 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# Mode: -*- python -*-
|
# Mode: -*- python -*-
|
||||||
#
|
#
|
||||||
# Copyright (c) 2015-2017, 2019-2020, 2023 by Rocky Bernstein
|
# Copyright (c) 2015-2017, 2019-2020, 2023-2024
|
||||||
|
# by Rocky Bernstein
|
||||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||||
#
|
#
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import getopt
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
import click
|
||||||
|
from xdis.version_info import version_tuple_to_str
|
||||||
|
|
||||||
from uncompyle6 import verify
|
from uncompyle6 import verify
|
||||||
from uncompyle6.main import main, status_msg
|
from uncompyle6.main import main, status_msg
|
||||||
@@ -17,172 +21,163 @@ from uncompyle6.version import __version__
|
|||||||
|
|
||||||
program = "uncompyle6"
|
program = "uncompyle6"
|
||||||
|
|
||||||
__doc__ = """
|
|
||||||
Usage:
|
|
||||||
%s [OPTIONS]... [ FILE | DIR]...
|
|
||||||
%s [--help | -h | --V | --version]
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
%s foo.pyc bar.pyc # decompile foo.pyc, bar.pyc to stdout
|
|
||||||
%s -o . foo.pyc bar.pyc # decompile to ./foo.pyc_dis and ./bar.pyc_dis
|
|
||||||
%s -o /tmp /usr/lib/python1.5 # decompile whole library
|
|
||||||
|
|
||||||
Options:
|
|
||||||
-o <path> output decompiled files to this path:
|
|
||||||
if multiple input files are decompiled, the common prefix
|
|
||||||
is stripped from these names and the remainder appended to
|
|
||||||
<path>
|
|
||||||
uncompyle6 -o /tmp bla/fasel.pyc bla/foo.pyc
|
|
||||||
-> /tmp/fasel.pyc_dis, /tmp/foo.pyc_dis
|
|
||||||
uncompyle6 -o /tmp bla/fasel.pyc bar/foo.pyc
|
|
||||||
-> /tmp/bla/fasel.pyc_dis, /tmp/bar/foo.pyc_dis
|
|
||||||
uncompyle6 -o /tmp /usr/lib/python1.5
|
|
||||||
-> /tmp/smtplib.pyc_dis ... /tmp/lib-tk/FixTk.pyc_dis
|
|
||||||
--compile | -c <python-file>
|
|
||||||
attempts a decompilation after compiling <python-file>
|
|
||||||
-d print timestamps
|
|
||||||
-p <integer> use <integer> number of processes
|
|
||||||
-r recurse directories looking for .pyc and .pyo files
|
|
||||||
--fragments use fragments deparser
|
|
||||||
--verify compare generated source with input byte-code
|
|
||||||
--verify-run compile generated source, run it and check exit code
|
|
||||||
--syntax-verify compile generated source
|
|
||||||
--linemaps generated line number correspondencies between byte-code
|
|
||||||
and generated source output
|
|
||||||
--encoding <encoding>
|
|
||||||
use <encoding> in generated source according to pep-0263
|
|
||||||
--help show this message
|
|
||||||
|
|
||||||
Debugging Options:
|
|
||||||
--asm | -a include byte-code (disables --verify)
|
|
||||||
--grammar | -g show matching grammar
|
|
||||||
--tree={before|after}
|
|
||||||
-t {before|after} include syntax before (or after) tree transformation
|
|
||||||
(disables --verify)
|
|
||||||
--tree++ | -T add template rules to --tree=before when possible
|
|
||||||
|
|
||||||
Extensions of generated files:
|
|
||||||
'.pyc_dis' '.pyo_dis' successfully decompiled (and verified if --verify)
|
|
||||||
+ '_unverified' successfully decompile but --verify failed
|
|
||||||
+ '_failed' decompile failed (contact author for enhancement)
|
|
||||||
""" % (
|
|
||||||
(program,) * 5
|
|
||||||
)
|
|
||||||
|
|
||||||
program = "uncompyle6"
|
|
||||||
|
|
||||||
def usage():
|
def usage():
|
||||||
print(__doc__)
|
print(__doc__)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
def main_bin():
|
# __doc__ = """
|
||||||
if not (
|
# Usage:
|
||||||
sys.version_info[0:2]
|
# %s [OPTIONS]... [ FILE | DIR]...
|
||||||
in (
|
# %s [--help | -h | --V | --version]
|
||||||
(2, 4),
|
>>>>>>> master
|
||||||
(2, 5),
|
|
||||||
(2, 6),
|
# Examples:
|
||||||
(2, 7),
|
# %s foo.pyc bar.pyc # decompile foo.pyc, bar.pyc to stdout
|
||||||
(3, 0),
|
# %s -o . foo.pyc bar.pyc # decompile to ./foo.pyc_dis and ./bar.pyc_dis
|
||||||
(3, 1),
|
# %s -o /tmp /usr/lib/python1.5 # decompile whole library
|
||||||
(3, 2),
|
|
||||||
(3, 3),
|
# Options:
|
||||||
(3, 4),
|
# -o <path> output decompiled files to this path:
|
||||||
(3, 5),
|
# if multiple input files are decompiled, the common prefix
|
||||||
(3, 6),
|
# is stripped from these names and the remainder appended to
|
||||||
(3, 7),
|
# <path>
|
||||||
(3, 8),
|
# uncompyle6 -o /tmp bla/fasel.pyc bla/foo.pyc
|
||||||
(3, 9),
|
# -> /tmp/fasel.pyc_dis, /tmp/foo.pyc_dis
|
||||||
(3, 10),
|
# uncompyle6 -o /tmp bla/fasel.pyc bar/foo.pyc
|
||||||
(3, 11),
|
# -> /tmp/bla/fasel.pyc_dis, /tmp/bar/foo.pyc_dis
|
||||||
|
# uncompyle6 -o /tmp /usr/lib/python1.5
|
||||||
|
# -> /tmp/smtplib.pyc_dis ... /tmp/lib-tk/FixTk.pyc_dis
|
||||||
|
# --compile | -c <python-file>
|
||||||
|
# attempts a decompilation after compiling <python-file>
|
||||||
|
# -d print timestamps
|
||||||
|
# -p <integer> use <integer> number of processes
|
||||||
|
# -r recurse directories looking for .pyc and .pyo files
|
||||||
|
# --fragments use fragments deparser
|
||||||
|
# --verify compare generated source with input byte-code
|
||||||
|
# --verify-run compile generated source, run it and check exit code
|
||||||
|
# --syntax-verify compile generated source
|
||||||
|
# --linemaps generated line number correspondencies between byte-code
|
||||||
|
# and generated source output
|
||||||
|
# --encoding <encoding>
|
||||||
|
# use <encoding> in generated source according to pep-0263
|
||||||
|
# --help show this message
|
||||||
|
|
||||||
|
# Debugging Options:
|
||||||
|
# --asm | -a include byte-code (disables --verify)
|
||||||
|
# --grammar | -g show matching grammar
|
||||||
|
# --tree={before|after}
|
||||||
|
# -t {before|after} include syntax before (or after) tree transformation
|
||||||
|
# (disables --verify)
|
||||||
|
# --tree++ | -T add template rules to --tree=before when possible
|
||||||
|
|
||||||
|
# Extensions of generated files:
|
||||||
|
# '.pyc_dis' '.pyo_dis' successfully decompiled (and verified if --verify)
|
||||||
|
# + '_unverified' successfully decompile but --verify failed
|
||||||
|
# + '_failed' decompile failed (contact author for enhancement)
|
||||||
|
# """ % (
|
||||||
|
# (program,) * 5
|
||||||
|
# )
|
||||||
|
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@click.option(
|
||||||
|
"--asm++/--no-asm++",
|
||||||
|
"-A",
|
||||||
|
"asm_plus",
|
||||||
|
default=False,
|
||||||
|
help="show xdis assembler and tokenized assembler",
|
||||||
|
)
|
||||||
|
@click.option("--asm/--no-asm", "-a", default=False)
|
||||||
|
@click.option("--grammar/--no-grammar", "-g", "show_grammar", default=False)
|
||||||
|
@click.option("--tree/--no-tree", "-t", default=False)
|
||||||
|
@click.option(
|
||||||
|
"--tree++/--no-tree++",
|
||||||
|
"-T",
|
||||||
|
"tree_plus",
|
||||||
|
default=False,
|
||||||
|
help="show parse tree and Abstract Syntax Tree",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--linemaps/--no-linemaps",
|
||||||
|
default=False,
|
||||||
|
help="show line number correspondencies between byte-code "
|
||||||
|
"and generated source output",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--verify",
|
||||||
|
type=click.Choice(["run", "syntax"]),
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--recurse/--no-recurse",
|
||||||
|
"-r",
|
||||||
|
"recurse_dirs",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--output",
|
||||||
|
"-o",
|
||||||
|
"outfile",
|
||||||
|
type=click.Path(
|
||||||
|
exists=True, file_okay=True, dir_okay=True, writable=True, resolve_path=True
|
||||||
|
),
|
||||||
|
required=False,
|
||||||
|
)
|
||||||
|
@click.version_option(version=__version__)
|
||||||
|
@click.option(
|
||||||
|
"--start-offset",
|
||||||
|
"start_offset",
|
||||||
|
default=0,
|
||||||
|
help="start decomplation at offset; default is 0 or the starting offset.",
|
||||||
|
)
|
||||||
|
@click.version_option(version=__version__)
|
||||||
|
@click.option(
|
||||||
|
"--stop-offset",
|
||||||
|
"stop_offset",
|
||||||
|
default=-1,
|
||||||
|
help="stop decomplation when seeing an offset greater or equal to this; default is "
|
||||||
|
"-1 which indicates no stopping point.",
|
||||||
|
)
|
||||||
|
@click.argument("files", nargs=-1, type=click.Path(readable=True), required=True)
|
||||||
|
def main_bin(
|
||||||
|
asm: bool,
|
||||||
|
asm_plus: bool,
|
||||||
|
show_grammar,
|
||||||
|
tree: bool,
|
||||||
|
tree_plus: bool,
|
||||||
|
linemaps: bool,
|
||||||
|
verify,
|
||||||
|
recurse_dirs: bool,
|
||||||
|
outfile,
|
||||||
|
start_offset: int,
|
||||||
|
stop_offset: int,
|
||||||
|
files,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Cross Python bytecode decompiler for Python bytecode up to Python 3.8.
|
||||||
|
"""
|
||||||
|
|
||||||
|
version_tuple = sys.version_info[0:2]
|
||||||
|
if version_tuple < (3, 7):
|
||||||
|
print(
|
||||||
|
f"Error: This version of the {program} runs from Python 3.7 or greater."
|
||||||
|
f"You need another branch of this code for Python before 3.7."
|
||||||
|
f""" \n\tYou have version: {version_tuple_to_str()}."""
|
||||||
)
|
)
|
||||||
):
|
|
||||||
print("Error: %s requires Python 2.4-3.11" % program, file=sys.stderr)
|
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
recurse_dirs = False
|
|
||||||
numproc = 0
|
numproc = 0
|
||||||
outfile = "-"
|
|
||||||
out_base = None
|
out_base = None
|
||||||
source_paths = []
|
|
||||||
|
out_base = None
|
||||||
|
source_paths: List[str] = []
|
||||||
timestamp = False
|
timestamp = False
|
||||||
timestampfmt = "# %Y.%m.%d %H:%M:%S %Z"
|
timestampfmt = "# %Y.%m.%d %H:%M:%S %Z"
|
||||||
|
pyc_paths = files
|
||||||
|
|
||||||
try:
|
# Expand directory if "recurse" was specified.
|
||||||
opts, pyc_paths = getopt.getopt(
|
|
||||||
sys.argv[1:],
|
|
||||||
"hac:gtTdrVo:p:",
|
|
||||||
"help asm compile= grammar linemaps recurse "
|
|
||||||
"timestamp tree= tree+ "
|
|
||||||
"fragments verify verify-run version "
|
|
||||||
"syntax-verify "
|
|
||||||
"showgrammar encoding=".split(" "),
|
|
||||||
)
|
|
||||||
except getopt.GetoptError as e:
|
|
||||||
print("%s: %s" % (os.path.basename(sys.argv[0]), e), file=sys.stderr)
|
|
||||||
sys.exit(-1)
|
|
||||||
|
|
||||||
options = {
|
|
||||||
"showasm": None
|
|
||||||
}
|
|
||||||
for opt, val in opts:
|
|
||||||
if opt in ("-h", "--help"):
|
|
||||||
print(__doc__)
|
|
||||||
sys.exit(0)
|
|
||||||
elif opt in ("-V", "--version"):
|
|
||||||
print("%s %s" % (program, __version__))
|
|
||||||
sys.exit(0)
|
|
||||||
elif opt == "--verify":
|
|
||||||
options["do_verify"] = "strong"
|
|
||||||
elif opt == "--syntax-verify":
|
|
||||||
options["do_verify"] = "weak"
|
|
||||||
elif opt == "--fragments":
|
|
||||||
options["do_fragments"] = True
|
|
||||||
elif opt == "--verify-run":
|
|
||||||
options["do_verify"] = "verify-run"
|
|
||||||
elif opt == "--linemaps":
|
|
||||||
options["do_linemaps"] = True
|
|
||||||
elif opt in ("--asm", "-a"):
|
|
||||||
if options["showasm"] == None:
|
|
||||||
options["showasm"] = "after"
|
|
||||||
else:
|
|
||||||
options["showasm"] = "both"
|
|
||||||
options["do_verify"] = None
|
|
||||||
elif opt in ("--tree", "-t"):
|
|
||||||
if "showast" not in options:
|
|
||||||
options["showast"] = {}
|
|
||||||
if val == "before":
|
|
||||||
options["showast"][val] = True
|
|
||||||
elif val == "after":
|
|
||||||
options["showast"][val] = True
|
|
||||||
else:
|
|
||||||
options["showast"]["before"] = True
|
|
||||||
options["do_verify"] = None
|
|
||||||
elif opt in ("--tree+", "-T"):
|
|
||||||
if "showast" not in options:
|
|
||||||
options["showast"] = {}
|
|
||||||
options["showast"]["after"] = True
|
|
||||||
options["showast"]["before"] = True
|
|
||||||
options["do_verify"] = None
|
|
||||||
elif opt in ("--grammar", "-g"):
|
|
||||||
options["showgrammar"] = True
|
|
||||||
elif opt == "-o":
|
|
||||||
outfile = val
|
|
||||||
elif opt in ("--timestamp", "-d"):
|
|
||||||
timestamp = True
|
|
||||||
elif opt in ("--compile", "-c"):
|
|
||||||
source_paths.append(val)
|
|
||||||
elif opt == "-p":
|
|
||||||
numproc = int(val)
|
|
||||||
elif opt in ("--recurse", "-r"):
|
|
||||||
recurse_dirs = True
|
|
||||||
elif opt == "--encoding":
|
|
||||||
options["source_encoding"] = val
|
|
||||||
else:
|
|
||||||
print(opt, file=sys.stderr)
|
|
||||||
usage()
|
|
||||||
|
|
||||||
# expand directory if specified
|
|
||||||
if recurse_dirs:
|
if recurse_dirs:
|
||||||
expanded_files = []
|
expanded_files = []
|
||||||
for f in pyc_paths:
|
for f in pyc_paths:
|
||||||
@@ -216,15 +211,32 @@ def main_bin():
|
|||||||
out_base = outfile
|
out_base = outfile
|
||||||
outfile = None
|
outfile = None
|
||||||
|
|
||||||
|
# A second -a turns show_asm="after" into show_asm="before"
|
||||||
|
if asm_plus or asm:
|
||||||
|
asm_opt = "both" if asm_plus else "after"
|
||||||
|
else:
|
||||||
|
asm_opt = None
|
||||||
|
|
||||||
if timestamp:
|
if timestamp:
|
||||||
print(time.strftime(timestampfmt))
|
print(time.strftime(timestampfmt))
|
||||||
|
|
||||||
if numproc <= 1:
|
if numproc <= 1:
|
||||||
|
show_ast = {"before": tree or tree_plus, "after": tree_plus}
|
||||||
try:
|
try:
|
||||||
result = main(
|
result = main(
|
||||||
src_base, out_base, pyc_paths, source_paths, outfile, **options
|
src_base,
|
||||||
|
out_base,
|
||||||
|
pyc_paths,
|
||||||
|
source_paths,
|
||||||
|
outfile,
|
||||||
|
showasm=asm_opt,
|
||||||
|
showgrammar=show_grammar,
|
||||||
|
showast=show_ast,
|
||||||
|
do_verify=verify,
|
||||||
|
do_linemaps=linemaps,
|
||||||
|
start_offset=start_offset,
|
||||||
|
stop_offset=stop_offset,
|
||||||
)
|
)
|
||||||
result = [options.get("do_verify", None)] + list(result)
|
|
||||||
if len(pyc_paths) > 1:
|
if len(pyc_paths) > 1:
|
||||||
mess = status_msg(*result)
|
mess = status_msg(*result)
|
||||||
print("# " + mess)
|
print("# " + mess)
|
||||||
|
@@ -15,8 +15,10 @@
|
|||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import os
|
import os
|
||||||
|
import os.path as osp
|
||||||
import py_compile
|
import py_compile
|
||||||
import sys
|
import sys
|
||||||
|
import tempfile
|
||||||
|
|
||||||
from xdis import iscode
|
from xdis import iscode
|
||||||
from xdis.load import load_module
|
from xdis.load import load_module
|
||||||
@@ -37,9 +39,9 @@ def _get_outstream(outfile):
|
|||||||
"""
|
"""
|
||||||
Return an opened output file descriptor for ``outfile``.
|
Return an opened output file descriptor for ``outfile``.
|
||||||
"""
|
"""
|
||||||
dir_name = os.path.dirname(outfile)
|
dir_name = osp.dirname(outfile)
|
||||||
failed_file = outfile + "_failed"
|
failed_file = outfile + "_failed"
|
||||||
if os.path.exists(failed_file):
|
if osp.exists(failed_file):
|
||||||
os.remove(failed_file)
|
os.remove(failed_file)
|
||||||
try:
|
try:
|
||||||
os.makedirs(dir_name)
|
os.makedirs(dir_name)
|
||||||
@@ -59,11 +61,13 @@ def decompile(
|
|||||||
source_encoding=None,
|
source_encoding=None,
|
||||||
code_objects={},
|
code_objects={},
|
||||||
source_size=None,
|
source_size=None,
|
||||||
is_pypy=False,
|
is_pypy: bool = False,
|
||||||
magic_int=None,
|
magic_int=None,
|
||||||
mapstream=None,
|
mapstream=None,
|
||||||
do_fragments=False,
|
do_fragments=False,
|
||||||
compile_mode="exec",
|
compile_mode="exec",
|
||||||
|
start_offset: int = 0,
|
||||||
|
stop_offset: int = -1,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
ingests and deparses a given code block 'co'
|
ingests and deparses a given code block 'co'
|
||||||
@@ -131,6 +135,7 @@ def decompile(
|
|||||||
debug_opts=debug_opts,
|
debug_opts=debug_opts,
|
||||||
)
|
)
|
||||||
header_count = 3 + len(sys_version_lines)
|
header_count = 3 + len(sys_version_lines)
|
||||||
|
if deparsed is not None:
|
||||||
linemap = [
|
linemap = [
|
||||||
(line_no, deparsed.source_linemap[line_no] + header_count)
|
(line_no, deparsed.source_linemap[line_no] + header_count)
|
||||||
for line_no in sorted(deparsed.source_linemap.keys())
|
for line_no in sorted(deparsed.source_linemap.keys())
|
||||||
@@ -148,8 +153,11 @@ def decompile(
|
|||||||
is_pypy=is_pypy,
|
is_pypy=is_pypy,
|
||||||
debug_opts=debug_opts,
|
debug_opts=debug_opts,
|
||||||
compile_mode=compile_mode,
|
compile_mode=compile_mode,
|
||||||
|
start_offset=start_offset,
|
||||||
|
stop_offset=stop_offset,
|
||||||
)
|
)
|
||||||
pass
|
pass
|
||||||
|
real_out.write("\n")
|
||||||
return deparsed
|
return deparsed
|
||||||
except pysource.SourceWalkerError as e:
|
except pysource.SourceWalkerError as e:
|
||||||
# deparsing failed
|
# deparsing failed
|
||||||
@@ -173,7 +181,7 @@ def compile_file(source_path):
|
|||||||
|
|
||||||
|
|
||||||
def decompile_file(
|
def decompile_file(
|
||||||
filename,
|
filename: str,
|
||||||
outstream=None,
|
outstream=None,
|
||||||
showasm=None,
|
showasm=None,
|
||||||
showast={},
|
showast={},
|
||||||
@@ -181,6 +189,8 @@ def decompile_file(
|
|||||||
source_encoding=None,
|
source_encoding=None,
|
||||||
mapstream=None,
|
mapstream=None,
|
||||||
do_fragments=False,
|
do_fragments=False,
|
||||||
|
start_offset=0,
|
||||||
|
stop_offset=-1,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
decompile Python byte-code file (.pyc). Return objects to
|
decompile Python byte-code file (.pyc). Return objects to
|
||||||
@@ -210,6 +220,8 @@ def decompile_file(
|
|||||||
is_pypy=is_pypy,
|
is_pypy=is_pypy,
|
||||||
magic_int=magic_int,
|
magic_int=magic_int,
|
||||||
mapstream=mapstream,
|
mapstream=mapstream,
|
||||||
|
start_offset=start_offset,
|
||||||
|
stop_offset=stop_offset,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
@@ -230,6 +242,8 @@ def decompile_file(
|
|||||||
mapstream=mapstream,
|
mapstream=mapstream,
|
||||||
do_fragments=do_fragments,
|
do_fragments=do_fragments,
|
||||||
compile_mode="exec",
|
compile_mode="exec",
|
||||||
|
start_offset=start_offset,
|
||||||
|
stop_offset=stop_offset,
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
return deparsed
|
return deparsed
|
||||||
@@ -244,10 +258,13 @@ def main(
|
|||||||
outfile=None,
|
outfile=None,
|
||||||
showasm=None,
|
showasm=None,
|
||||||
showast={},
|
showast={},
|
||||||
showgrammar=False,
|
do_verify = None,
|
||||||
|
showgrammar: bool = False,
|
||||||
source_encoding=None,
|
source_encoding=None,
|
||||||
do_linemaps=False,
|
do_linemaps=False,
|
||||||
do_fragments=False,
|
do_fragments=False,
|
||||||
|
start_offset: int = 0,
|
||||||
|
stop_offset: int = -1,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
in_base base directory for input files
|
in_base base directory for input files
|
||||||
@@ -260,7 +277,8 @@ def main(
|
|||||||
- files below out_base out_base=...
|
- files below out_base out_base=...
|
||||||
- stdout out_base=None, outfile=None
|
- stdout out_base=None, outfile=None
|
||||||
"""
|
"""
|
||||||
tot_files = okay_files = failed_files = verify_failed_files = 0
|
tot_files = okay_files = failed_files = 0
|
||||||
|
verify_failed_files = 0 if do_verify else 0
|
||||||
current_outfile = outfile
|
current_outfile = outfile
|
||||||
linemap_stream = None
|
linemap_stream = None
|
||||||
|
|
||||||
@@ -268,9 +286,9 @@ def main(
|
|||||||
compiled_files.append(compile_file(source_path))
|
compiled_files.append(compile_file(source_path))
|
||||||
|
|
||||||
for filename in compiled_files:
|
for filename in compiled_files:
|
||||||
infile = os.path.join(in_base, filename)
|
infile = osp.join(in_base, filename)
|
||||||
# print("XXX", infile)
|
# print("XXX", infile)
|
||||||
if not os.path.exists(infile):
|
if not osp.exists(infile):
|
||||||
sys.stderr.write("File '%s' doesn't exist. Skipped\n" % infile)
|
sys.stderr.write("File '%s' doesn't exist. Skipped\n" % infile)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -283,14 +301,19 @@ def main(
|
|||||||
if outfile: # outfile was given as parameter
|
if outfile: # outfile was given as parameter
|
||||||
outstream = _get_outstream(outfile)
|
outstream = _get_outstream(outfile)
|
||||||
elif out_base is None:
|
elif out_base is None:
|
||||||
|
out_base = tempfile.mkdtemp(prefix="py-dis-")
|
||||||
|
if do_verify and filename.endswith(".pyc"):
|
||||||
|
current_outfile = osp.join(out_base, filename[0:-1])
|
||||||
|
outstream = open(current_outfile, "w")
|
||||||
|
else:
|
||||||
outstream = sys.stdout
|
outstream = sys.stdout
|
||||||
if do_linemaps:
|
if do_linemaps:
|
||||||
linemap_stream = sys.stdout
|
linemap_stream = sys.stdout
|
||||||
else:
|
else:
|
||||||
if filename.endswith(".pyc"):
|
if filename.endswith(".pyc"):
|
||||||
current_outfile = os.path.join(out_base, filename[0:-1])
|
current_outfile = osp.join(out_base, filename[0:-1])
|
||||||
else:
|
else:
|
||||||
current_outfile = os.path.join(out_base, filename) + "_dis"
|
current_outfile = osp.join(out_base, filename) + "_dis"
|
||||||
pass
|
pass
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -298,9 +321,9 @@ def main(
|
|||||||
|
|
||||||
# print(current_outfile, file=sys.stderr)
|
# print(current_outfile, file=sys.stderr)
|
||||||
|
|
||||||
# Try to uncompile the input file
|
# Try to decompile the input file.
|
||||||
try:
|
try:
|
||||||
deparsed = decompile_file(
|
deparsed_objects = decompile_file(
|
||||||
infile,
|
infile,
|
||||||
outstream,
|
outstream,
|
||||||
showasm,
|
showasm,
|
||||||
@@ -309,11 +332,13 @@ def main(
|
|||||||
source_encoding,
|
source_encoding,
|
||||||
linemap_stream,
|
linemap_stream,
|
||||||
do_fragments,
|
do_fragments,
|
||||||
|
start_offset,
|
||||||
|
stop_offset,
|
||||||
)
|
)
|
||||||
if do_fragments:
|
if do_fragments:
|
||||||
for d in deparsed:
|
for deparsed_object in deparsed_objects:
|
||||||
last_mod = None
|
last_mod = None
|
||||||
offsets = d.offsets
|
offsets = deparsed_object.offsets
|
||||||
for e in sorted(
|
for e in sorted(
|
||||||
[k for k in offsets.keys() if isinstance(k[1], int)]
|
[k for k in offsets.keys() if isinstance(k[1], int)]
|
||||||
):
|
):
|
||||||
@@ -322,11 +347,48 @@ def main(
|
|||||||
outstream.write("%s\n%s\n%s\n" % (line, e[0], line))
|
outstream.write("%s\n%s\n%s\n" % (line, e[0], line))
|
||||||
last_mod = e[0]
|
last_mod = e[0]
|
||||||
info = offsets[e]
|
info = offsets[e]
|
||||||
extract_info = d.extract_node_info(info)
|
extract_info = deparse_object.extract_node_info(info)
|
||||||
outstream.write("%s" % info.node.format().strip() + "\n")
|
outstream.write("%s" % info.node.format().strip() + "\n")
|
||||||
outstream.write(extract_info.selectedLine + "\n")
|
outstream.write(extract_info.selectedLine + "\n")
|
||||||
outstream.write(extract_info.markerLine + "\n\n")
|
outstream.write(extract_info.markerLine + "\n\n")
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
if do_verify:
|
||||||
|
for deparsed_object in deparsed_objects:
|
||||||
|
deparsed_object.f.close()
|
||||||
|
if PYTHON_VERSION_TRIPLE[:2] != deparsed_object.version[:2]:
|
||||||
|
sys.stdout.write(
|
||||||
|
f"\n# skipping running {deparsed_object.f.name}; it is"
|
||||||
|
f"{version_tuple_to_str(deparsed_object.version, end=2)}, "
|
||||||
|
"and we are "
|
||||||
|
f"{version_tuple_to_str(PYTHON_VERSION_TRIPLE, end=2)}\n"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
check_type = "syntax check"
|
||||||
|
if do_verify == "run":
|
||||||
|
check_type = "run"
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, deparsed_object.f.name],
|
||||||
|
capture_output=True,
|
||||||
|
)
|
||||||
|
valid = result.returncode == 0
|
||||||
|
output = result.stdout.decode()
|
||||||
|
if output:
|
||||||
|
print(output)
|
||||||
|
pass
|
||||||
|
if not valid:
|
||||||
|
print(result.stderr.decode())
|
||||||
|
|
||||||
|
else:
|
||||||
|
valid = syntax_check(deparsed_object.f.name)
|
||||||
|
|
||||||
|
if not valid:
|
||||||
|
verify_failed_files += 1
|
||||||
|
sys.stderr.write(
|
||||||
|
f"\n# {check_type} failed on file {deparsed_object.f.name}\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
# sys.stderr.write(f"Ran {deparsed_object.f.name}\n")
|
||||||
pass
|
pass
|
||||||
tot_files += 1
|
tot_files += 1
|
||||||
except (ValueError, SyntaxError, ParserError, pysource.SourceWalkerError) as e:
|
except (ValueError, SyntaxError, ParserError, pysource.SourceWalkerError) as e:
|
||||||
|
@@ -22,8 +22,10 @@ scanners, e.g. for Python 2.7 or 3.4.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from array import array
|
from array import array
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
from types import ModuleType
|
||||||
|
|
||||||
import xdis
|
import xdis
|
||||||
from xdis import (
|
from xdis import (
|
||||||
@@ -98,6 +100,10 @@ class Code(object):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, co, scanner, classname=None, show_asm=None):
|
def __init__(self, co, scanner, classname=None, show_asm=None):
|
||||||
|
# Full initialization is given below, but for linters
|
||||||
|
# well set up some initial values.
|
||||||
|
self.co_code = None # Really either bytes for >= 3.0 and string in < 3.0
|
||||||
|
|
||||||
for i in dir(co):
|
for i in dir(co):
|
||||||
if i.startswith("co_"):
|
if i.startswith("co_"):
|
||||||
setattr(self, i, getattr(co, i))
|
setattr(self, i, getattr(co, i))
|
||||||
@@ -430,7 +436,7 @@ class Scanner:
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
None in instr
|
None in instr
|
||||||
except:
|
except Exception:
|
||||||
instr = [instr]
|
instr = [instr]
|
||||||
|
|
||||||
first = self.offset2inst_index[start]
|
first = self.offset2inst_index[start]
|
||||||
@@ -623,12 +629,11 @@ def get_scanner(version, is_pypy=False, show_asm=None):
|
|||||||
# If version is a string, turn that into the corresponding float.
|
# If version is a string, turn that into the corresponding float.
|
||||||
if isinstance(version, str):
|
if isinstance(version, str):
|
||||||
if version not in canonic_python_version:
|
if version not in canonic_python_version:
|
||||||
raise RuntimeError("Unknown Python version in xdis %s" % version)
|
raise RuntimeError(f"Unknown Python version in xdis {version}")
|
||||||
canonic_version = canonic_python_version[version]
|
canonic_version = canonic_python_version[version]
|
||||||
if canonic_version not in CANONIC2VERSION:
|
if canonic_version not in CANONIC2VERSION:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"Unsupported Python version %s (canonic %s)"
|
f"Unsupported Python version {version} (canonic {canonic_version})"
|
||||||
% (version, canonic_version)
|
|
||||||
)
|
)
|
||||||
version = CANONIC2VERSION[canonic_version]
|
version = CANONIC2VERSION[canonic_version]
|
||||||
|
|
||||||
|
@@ -2035,6 +2035,8 @@ def code_deparse(
|
|||||||
code_objects={},
|
code_objects={},
|
||||||
compile_mode="exec",
|
compile_mode="exec",
|
||||||
walker=FragmentsWalker,
|
walker=FragmentsWalker,
|
||||||
|
start_offset: int = 0,
|
||||||
|
stop_offset: int = -1,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Convert the code object co into a python source fragment.
|
Convert the code object co into a python source fragment.
|
||||||
@@ -2069,6 +2071,22 @@ def code_deparse(
|
|||||||
tokens, customize = scanner.ingest(co, code_objects=code_objects, show_asm=show_asm)
|
tokens, customize = scanner.ingest(co, code_objects=code_objects, show_asm=show_asm)
|
||||||
|
|
||||||
tokens, customize = scanner.ingest(co)
|
tokens, customize = scanner.ingest(co)
|
||||||
|
|
||||||
|
if start_offset > 0:
|
||||||
|
for i, t in enumerate(tokens):
|
||||||
|
# If t.offset is a string, we want to skip this.
|
||||||
|
if isinstance(t.offset, int) and t.offset >= start_offset:
|
||||||
|
tokens = tokens[i:]
|
||||||
|
break
|
||||||
|
|
||||||
|
if stop_offset > -1:
|
||||||
|
for i, t in enumerate(tokens):
|
||||||
|
# In contrast to the test for start_offset If t.offset is
|
||||||
|
# a string, we want to extract the integer offset value.
|
||||||
|
if t.off2int() >= stop_offset:
|
||||||
|
tokens = tokens[:i]
|
||||||
|
break
|
||||||
|
|
||||||
maybe_show_asm(show_asm, tokens)
|
maybe_show_asm(show_asm, tokens)
|
||||||
|
|
||||||
debug_parser = dict(PARSER_DEFAULT_DEBUG)
|
debug_parser = dict(PARSER_DEFAULT_DEBUG)
|
||||||
|
@@ -130,6 +130,8 @@ Python.
|
|||||||
# evaluating the escape code.
|
# evaluating the escape code.
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
from io import StringIO
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from spark_parser import GenericASTTraversal
|
from spark_parser import GenericASTTraversal
|
||||||
from xdis import COMPILER_FLAG_BIT, iscode
|
from xdis import COMPILER_FLAG_BIT, iscode
|
||||||
@@ -158,7 +160,11 @@ from uncompyle6.semantics.consts import (
|
|||||||
)
|
)
|
||||||
from uncompyle6.semantics.customize import customize_for_version
|
from uncompyle6.semantics.customize import customize_for_version
|
||||||
from uncompyle6.semantics.gencomp import ComprehensionMixin
|
from uncompyle6.semantics.gencomp import ComprehensionMixin
|
||||||
from uncompyle6.semantics.helper import find_globals_and_nonlocals, print_docstring
|
from uncompyle6.semantics.helper import (
|
||||||
|
find_globals_and_nonlocals,
|
||||||
|
is_lambda_mode,
|
||||||
|
print_docstring,
|
||||||
|
)
|
||||||
from uncompyle6.semantics.make_function1 import make_function1
|
from uncompyle6.semantics.make_function1 import make_function1
|
||||||
from uncompyle6.semantics.make_function2 import make_function2
|
from uncompyle6.semantics.make_function2 import make_function2
|
||||||
from uncompyle6.semantics.make_function3 import make_function3
|
from uncompyle6.semantics.make_function3 import make_function3
|
||||||
@@ -174,8 +180,6 @@ def unicode(x):
|
|||||||
return x
|
return x
|
||||||
|
|
||||||
|
|
||||||
from io import StringIO
|
|
||||||
|
|
||||||
PARSER_DEFAULT_DEBUG = {
|
PARSER_DEFAULT_DEBUG = {
|
||||||
"rules": False,
|
"rules": False,
|
||||||
"transition": False,
|
"transition": False,
|
||||||
@@ -206,7 +210,8 @@ class SourceWalkerError(Exception):
|
|||||||
|
|
||||||
class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
||||||
"""
|
"""
|
||||||
Class to traverses a Parse Tree of the bytecode instruction built from parsing to produce some sort of source text.
|
Class to traverses a Parse Tree of the bytecode instruction built from parsing to
|
||||||
|
produce some sort of source text.
|
||||||
The Parse tree may be turned an Abstract Syntax tree as an intermediate step.
|
The Parse tree may be turned an Abstract Syntax tree as an intermediate step.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -214,7 +219,7 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
version,
|
version: tuple,
|
||||||
out,
|
out,
|
||||||
scanner,
|
scanner,
|
||||||
showast=TREE_DEFAULT_DEBUG,
|
showast=TREE_DEFAULT_DEBUG,
|
||||||
@@ -224,7 +229,7 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
linestarts={},
|
linestarts={},
|
||||||
tolerate_errors=False,
|
tolerate_errors=False,
|
||||||
):
|
):
|
||||||
"""`version' is the Python version (a float) of the Python dialect
|
"""`version' is the Python version of the Python dialect
|
||||||
of both the syntax tree and language we should produce.
|
of both the syntax tree and language we should produce.
|
||||||
|
|
||||||
`out' is IO-like file pointer to where the output should go. It
|
`out' is IO-like file pointer to where the output should go. It
|
||||||
@@ -236,9 +241,12 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
|
|
||||||
If `showast' is True, we print the syntax tree.
|
If `showast' is True, we print the syntax tree.
|
||||||
|
|
||||||
`compile_mode' is is either 'exec' or 'single'. It is the compile
|
`compile_mode` is is either `exec`, `single` or `lambda`.
|
||||||
mode that was used to create the Syntax Tree and specifies a
|
|
||||||
grammar variant within a Python version to use.
|
For `lambda`, the grammar that can be used in lambda
|
||||||
|
expressions is used. Otherwise, it is the compile mode that
|
||||||
|
was used to create the Syntax Tree and specifies a grammar
|
||||||
|
variant within a Python version to use.
|
||||||
|
|
||||||
`is_pypy` should be True if the Syntax Tree was generated for PyPy.
|
`is_pypy` should be True if the Syntax Tree was generated for PyPy.
|
||||||
|
|
||||||
@@ -263,10 +271,8 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
self.currentclass = None
|
self.currentclass = None
|
||||||
self.classes = []
|
self.classes = []
|
||||||
self.debug_parser = dict(debug_parser)
|
self.debug_parser = dict(debug_parser)
|
||||||
# Initialize p_lambda on demand
|
|
||||||
self.line_number = 1
|
self.line_number = 1
|
||||||
self.linemap = {}
|
self.linemap = {}
|
||||||
self.p_lambda = None
|
|
||||||
self.params = params
|
self.params = params
|
||||||
self.param_stack = []
|
self.param_stack = []
|
||||||
self.ERROR = None
|
self.ERROR = None
|
||||||
@@ -277,11 +283,15 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
self.pending_newlines = 0
|
self.pending_newlines = 0
|
||||||
self.linestarts = linestarts
|
self.linestarts = linestarts
|
||||||
self.treeTransform = TreeTransform(version=self.version, show_ast=showast)
|
self.treeTransform = TreeTransform(version=self.version, show_ast=showast)
|
||||||
|
|
||||||
# FIXME: have p.insts update in a better way
|
# FIXME: have p.insts update in a better way
|
||||||
# modularity is broken here
|
# modularity is broken here
|
||||||
self.insts = scanner.insts
|
self.insts = scanner.insts
|
||||||
self.offset2inst_index = scanner.offset2inst_index
|
self.offset2inst_index = scanner.offset2inst_index
|
||||||
|
|
||||||
|
# Initialize p_lambda on demand
|
||||||
|
self.p_lambda = None
|
||||||
|
|
||||||
# This is in Python 2.6 on. It changes the way
|
# This is in Python 2.6 on. It changes the way
|
||||||
# strings get interpreted. See n_LOAD_CONST
|
# strings get interpreted. See n_LOAD_CONST
|
||||||
self.FUTURE_UNICODE_LITERALS = False
|
self.FUTURE_UNICODE_LITERALS = False
|
||||||
@@ -309,12 +319,13 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
customize_for_version(self, is_pypy, version)
|
customize_for_version(self, is_pypy, version)
|
||||||
return
|
return
|
||||||
|
|
||||||
def maybe_show_tree(self, ast, phase):
|
def maybe_show_tree(self, tree, phase):
|
||||||
if self.showast.get("before", False):
|
if self.showast.get("before", False):
|
||||||
self.println(
|
self.println(
|
||||||
"""
|
"""
|
||||||
---- end before transform
|
---- end before transform
|
||||||
"""
|
"""
|
||||||
|
+ " "
|
||||||
)
|
)
|
||||||
if self.showast.get("after", False):
|
if self.showast.get("after", False):
|
||||||
self.println(
|
self.println(
|
||||||
@@ -324,7 +335,7 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
+ " "
|
+ " "
|
||||||
)
|
)
|
||||||
if self.showast.get(phase, False):
|
if self.showast.get(phase, False):
|
||||||
maybe_show_tree(self, ast)
|
maybe_show_tree(self, tree)
|
||||||
|
|
||||||
def str_with_template(self, ast):
|
def str_with_template(self, ast):
|
||||||
stream = sys.stdout
|
stream = sys.stdout
|
||||||
@@ -384,9 +395,9 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
i += 1
|
i += 1
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
def indent_if_source_nl(self, line_number: int, indent: int):
|
def indent_if_source_nl(self, line_number: int, indent_spaces: str):
|
||||||
if line_number != self.line_number:
|
if line_number != self.line_number:
|
||||||
self.write("\n" + indent + INDENT_PER_LEVEL[:-1])
|
self.write("\n" + indent_spaces + INDENT_PER_LEVEL[:-1])
|
||||||
return self.line_number
|
return self.line_number
|
||||||
|
|
||||||
f = property(
|
f = property(
|
||||||
@@ -508,19 +519,19 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
def pp_tuple(self, tup):
|
def pp_tuple(self, tup):
|
||||||
"""Pretty print a tuple"""
|
"""Pretty print a tuple"""
|
||||||
last_line = self.f.getvalue().split("\n")[-1]
|
last_line = self.f.getvalue().split("\n")[-1]
|
||||||
l = len(last_line) + 1
|
ll = len(last_line) + 1
|
||||||
indent = " " * l
|
indent = " " * ll
|
||||||
self.write("(")
|
self.write("(")
|
||||||
sep = ""
|
sep = ""
|
||||||
for item in tup:
|
for item in tup:
|
||||||
self.write(sep)
|
self.write(sep)
|
||||||
l += len(sep)
|
ll += len(sep)
|
||||||
s = better_repr(item, self.version)
|
s = better_repr(item, self.version)
|
||||||
l += len(s)
|
ll += len(s)
|
||||||
self.write(s)
|
self.write(s)
|
||||||
sep = ","
|
sep = ","
|
||||||
if l > LINE_LENGTH:
|
if ll > LINE_LENGTH:
|
||||||
l = 0
|
ll = 0
|
||||||
sep += "\n" + indent
|
sep += "\n" + indent
|
||||||
else:
|
else:
|
||||||
sep += " "
|
sep += " "
|
||||||
@@ -564,6 +575,7 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
|
|
||||||
def print_super_classes3(self, node):
|
def print_super_classes3(self, node):
|
||||||
n = len(node) - 1
|
n = len(node) - 1
|
||||||
|
j = 0
|
||||||
if node.kind != "expr":
|
if node.kind != "expr":
|
||||||
if node == "kwarg":
|
if node == "kwarg":
|
||||||
self.template_engine(("(%[0]{attr}=%c)", 1), node)
|
self.template_engine(("(%[0]{attr}=%c)", 1), node)
|
||||||
@@ -601,9 +613,9 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
self.write("(")
|
self.write("(")
|
||||||
if kwargs:
|
if kwargs:
|
||||||
# Last arg is tuple of keyword values: omit
|
# Last arg is tuple of keyword values: omit
|
||||||
l = n - 1
|
m = n - 1
|
||||||
else:
|
else:
|
||||||
l = n
|
m = n
|
||||||
|
|
||||||
if kwargs:
|
if kwargs:
|
||||||
# 3.6+ does this
|
# 3.6+ does this
|
||||||
@@ -615,7 +627,7 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
j += 1
|
j += 1
|
||||||
|
|
||||||
j = 0
|
j = 0
|
||||||
while i < l:
|
while i < m:
|
||||||
self.write(sep)
|
self.write(sep)
|
||||||
value = self.traverse(node[i])
|
value = self.traverse(node[i])
|
||||||
self.write("%s=%s" % (kwargs[j], value))
|
self.write("%s=%s" % (kwargs[j], value))
|
||||||
@@ -623,7 +635,7 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
j += 1
|
j += 1
|
||||||
i += 1
|
i += 1
|
||||||
else:
|
else:
|
||||||
while i < l:
|
while i < m:
|
||||||
value = self.traverse(node[i])
|
value = self.traverse(node[i])
|
||||||
i += 1
|
i += 1
|
||||||
self.write(sep, value)
|
self.write(sep, value)
|
||||||
@@ -699,9 +711,10 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# print("-----")
|
# print("-----")
|
||||||
# print(startnode)
|
# print(startnode.kind)
|
||||||
# print(entry[0])
|
# print(entry[0])
|
||||||
# print('======')
|
# print('======')
|
||||||
|
|
||||||
fmt = entry[0]
|
fmt = entry[0]
|
||||||
arg = 1
|
arg = 1
|
||||||
i = 0
|
i = 0
|
||||||
@@ -795,13 +808,9 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
node[index].kind,
|
node[index].kind,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
assert (
|
assert node[tup[0]] in tup[1], (
|
||||||
node[tup[0]] in tup[1]
|
f"at {node.kind}[{tup[0]}], expected to be in '{tup[1]}' "
|
||||||
), "at %s[%d], expected to be in '%s' node; got '%s'" % (
|
f"node; got '{node[tup[0]].kind}'"
|
||||||
node.kind,
|
|
||||||
arg,
|
|
||||||
index[1],
|
|
||||||
node[index[0]].kind,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -870,7 +879,7 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
d = node.__dict__
|
d = node.__dict__
|
||||||
try:
|
try:
|
||||||
self.write(eval(expr, d, d))
|
self.write(eval(expr, d, d))
|
||||||
except:
|
except Exception:
|
||||||
raise
|
raise
|
||||||
m = escape.search(fmt, i)
|
m = escape.search(fmt, i)
|
||||||
self.write(fmt[i:])
|
self.write(fmt[i:])
|
||||||
@@ -1094,8 +1103,8 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
# if docstring exists, dump it
|
# if docstring exists, dump it
|
||||||
if code.co_consts and code.co_consts[0] is not None and len(ast) > 0:
|
if code.co_consts and code.co_consts[0] is not None and len(ast) > 0:
|
||||||
do_doc = False
|
do_doc = False
|
||||||
if is_docstring(ast[0], self.version, code.co_consts):
|
|
||||||
i = 0
|
i = 0
|
||||||
|
if is_docstring(ast[0], self.version, code.co_consts):
|
||||||
do_doc = True
|
do_doc = True
|
||||||
elif len(ast) > 1 and is_docstring(ast[1], self.version, code.co_consts):
|
elif len(ast) > 1 and is_docstring(ast[1], self.version, code.co_consts):
|
||||||
i = 1
|
i = 1
|
||||||
@@ -1191,7 +1200,7 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
is_lambda=False,
|
is_lambda=False,
|
||||||
noneInNames=False,
|
noneInNames=False,
|
||||||
is_top_level_module=False,
|
is_top_level_module=False,
|
||||||
):
|
) -> GenericASTTraversal:
|
||||||
# FIXME: DRY with fragments.py
|
# FIXME: DRY with fragments.py
|
||||||
|
|
||||||
# assert isinstance(tokens[0], Token)
|
# assert isinstance(tokens[0], Token)
|
||||||
@@ -1243,7 +1252,7 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
# Build a parse tree from a tokenized and massaged disassembly.
|
# Build a parse tree from a tokenized and massaged disassembly.
|
||||||
try:
|
try:
|
||||||
# FIXME: have p.insts update in a better way
|
# FIXME: have p.insts update in a better way
|
||||||
# modularity is broken here
|
# Modularity is broken here.
|
||||||
p_insts = self.p.insts
|
p_insts = self.p.insts
|
||||||
self.p.insts = self.scanner.insts
|
self.p.insts = self.scanner.insts
|
||||||
self.p.offset2inst_index = self.scanner.offset2inst_index
|
self.p.offset2inst_index = self.scanner.offset2inst_index
|
||||||
@@ -1256,6 +1265,7 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
checker(ast, False, self.ast_errors)
|
checker(ast, False, self.ast_errors)
|
||||||
|
|
||||||
self.customize(customize)
|
self.customize(customize)
|
||||||
|
|
||||||
transform_tree = self.treeTransform.transform(ast, code)
|
transform_tree = self.treeTransform.transform(ast, code)
|
||||||
|
|
||||||
self.maybe_show_tree(ast, phase="before")
|
self.maybe_show_tree(ast, phase="before")
|
||||||
@@ -1271,13 +1281,15 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin):
|
|||||||
def code_deparse(
|
def code_deparse(
|
||||||
co,
|
co,
|
||||||
out=sys.stdout,
|
out=sys.stdout,
|
||||||
version=None,
|
version: Optional[tuple] = None,
|
||||||
debug_opts=DEFAULT_DEBUG_OPTS,
|
debug_opts=DEFAULT_DEBUG_OPTS,
|
||||||
code_objects={},
|
code_objects={},
|
||||||
compile_mode="exec",
|
compile_mode="exec",
|
||||||
is_pypy=IS_PYPY,
|
is_pypy=IS_PYPY,
|
||||||
walker=SourceWalker,
|
walker=SourceWalker,
|
||||||
):
|
start_offset: int = 0,
|
||||||
|
stop_offset: int = -1,
|
||||||
|
) -> Optional[SourceWalker]:
|
||||||
"""
|
"""
|
||||||
ingests and deparses a given code block 'co'. If version is None,
|
ingests and deparses a given code block 'co'. If version is None,
|
||||||
we will use the current Python interpreter version.
|
we will use the current Python interpreter version.
|
||||||
@@ -1285,6 +1297,9 @@ def code_deparse(
|
|||||||
|
|
||||||
assert iscode(co)
|
assert iscode(co)
|
||||||
|
|
||||||
|
if out is None:
|
||||||
|
out = sys.stdout
|
||||||
|
|
||||||
if version is None:
|
if version is None:
|
||||||
version = PYTHON_VERSION_TRIPLE
|
version = PYTHON_VERSION_TRIPLE
|
||||||
|
|
||||||
@@ -1295,6 +1310,21 @@ def code_deparse(
|
|||||||
co, code_objects=code_objects, show_asm=debug_opts["asm"]
|
co, code_objects=code_objects, show_asm=debug_opts["asm"]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if start_offset > 0:
|
||||||
|
for i, t in enumerate(tokens):
|
||||||
|
# If t.offset is a string, we want to skip this.
|
||||||
|
if isinstance(t.offset, int) and t.offset >= start_offset:
|
||||||
|
tokens = tokens[i:]
|
||||||
|
break
|
||||||
|
|
||||||
|
if stop_offset > -1:
|
||||||
|
for i, t in enumerate(tokens):
|
||||||
|
# In contrast to the test for start_offset If t.offset is
|
||||||
|
# a string, we want to extract the integer offset value.
|
||||||
|
if t.off2int() >= stop_offset:
|
||||||
|
tokens = tokens[:i]
|
||||||
|
break
|
||||||
|
|
||||||
debug_parser = debug_opts.get("grammar", dict(PARSER_DEFAULT_DEBUG))
|
debug_parser = debug_opts.get("grammar", dict(PARSER_DEFAULT_DEBUG))
|
||||||
|
|
||||||
# Build Syntax Tree from disassembly.
|
# Build Syntax Tree from disassembly.
|
||||||
@@ -1318,7 +1348,7 @@ def code_deparse(
|
|||||||
tokens,
|
tokens,
|
||||||
customize,
|
customize,
|
||||||
co,
|
co,
|
||||||
is_lambda=(compile_mode == "lambda"),
|
is_lambda=is_lambda_mode(compile_mode),
|
||||||
is_top_level_module=is_top_level_module,
|
is_top_level_module=is_top_level_module,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1327,7 +1357,7 @@ def code_deparse(
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
# FIXME use a lookup table here.
|
# FIXME use a lookup table here.
|
||||||
if compile_mode == "lambda":
|
if is_lambda_mode(compile_mode):
|
||||||
expected_start = "lambda_start"
|
expected_start = "lambda_start"
|
||||||
elif compile_mode == "eval":
|
elif compile_mode == "eval":
|
||||||
expected_start = "expr_start"
|
expected_start = "expr_start"
|
||||||
@@ -1340,6 +1370,7 @@ def code_deparse(
|
|||||||
expected_start = None
|
expected_start = None
|
||||||
else:
|
else:
|
||||||
expected_start = None
|
expected_start = None
|
||||||
|
|
||||||
if expected_start:
|
if expected_start:
|
||||||
assert (
|
assert (
|
||||||
deparsed.ast == expected_start
|
deparsed.ast == expected_start
|
||||||
@@ -1386,7 +1417,7 @@ def code_deparse(
|
|||||||
deparsed.ast,
|
deparsed.ast,
|
||||||
name=co.co_name,
|
name=co.co_name,
|
||||||
customize=customize,
|
customize=customize,
|
||||||
is_lambda=compile_mode == "lambda",
|
is_lambda=is_lambda_mode(compile_mode),
|
||||||
debug_opts=debug_opts,
|
debug_opts=debug_opts,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1414,9 +1445,12 @@ def deparse_code2str(
|
|||||||
compile_mode="exec",
|
compile_mode="exec",
|
||||||
is_pypy=IS_PYPY,
|
is_pypy=IS_PYPY,
|
||||||
walker=SourceWalker,
|
walker=SourceWalker,
|
||||||
):
|
start_offset: int = 0,
|
||||||
"""Return the deparsed text for a Python code object. `out` is where any intermediate
|
stop_offset: int = -1,
|
||||||
output for assembly or tree output will be sent.
|
) -> str:
|
||||||
|
"""
|
||||||
|
Return the deparsed text for a Python code object. `out` is where
|
||||||
|
any intermediate output for assembly or tree output will be sent.
|
||||||
"""
|
"""
|
||||||
return code_deparse(
|
return code_deparse(
|
||||||
code,
|
code,
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
# Copyright (c) 2019-2023 by Rocky Bernstein
|
# Copyright (c) 2019-2024 by Rocky Bernstein
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU General Public License as published by
|
# it under the terms of the GNU General Public License as published by
|
||||||
@@ -13,14 +13,16 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from uncompyle6.show import maybe_show_tree
|
|
||||||
from copy import copy
|
from copy import copy
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from spark_parser import GenericASTTraversal, GenericASTTraversalPruningException
|
from spark_parser import GenericASTTraversal, GenericASTTraversalPruningException
|
||||||
|
|
||||||
from uncompyle6.semantics.helper import find_code_node
|
|
||||||
from uncompyle6.parsers.treenode import SyntaxTree
|
from uncompyle6.parsers.treenode import SyntaxTree
|
||||||
from uncompyle6.scanners.tok import NoneToken, Token
|
from uncompyle6.scanners.tok import NoneToken, Token
|
||||||
from uncompyle6.semantics.consts import RETURN_NONE, ASSIGN_DOC_STRING
|
from uncompyle6.semantics.consts import ASSIGN_DOC_STRING, RETURN_NONE
|
||||||
|
from uncompyle6.semantics.helper import find_code_node
|
||||||
|
from uncompyle6.show import maybe_show_tree
|
||||||
|
|
||||||
|
|
||||||
def is_docstring(node, version, co_consts):
|
def is_docstring(node, version, co_consts):
|
||||||
@@ -55,27 +57,34 @@ def is_docstring(node, version, co_consts):
|
|||||||
return node == ASSIGN_DOC_STRING(co_consts[0], doc_load)
|
return node == ASSIGN_DOC_STRING(co_consts[0], doc_load)
|
||||||
|
|
||||||
|
|
||||||
def is_not_docstring(call_stmt_node):
|
def is_not_docstring(call_stmt_node) -> bool:
|
||||||
try:
|
try:
|
||||||
return (
|
return (
|
||||||
call_stmt_node == "call_stmt"
|
call_stmt_node == "call_stmt"
|
||||||
and call_stmt_node[0][0] == "LOAD_STR"
|
and call_stmt_node[0][0] == "LOAD_STR"
|
||||||
and call_stmt_node[1] == "POP_TOP"
|
and call_stmt_node[1] == "POP_TOP"
|
||||||
)
|
)
|
||||||
except:
|
except Exception:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
class TreeTransform(GenericASTTraversal, object):
|
class TreeTransform(GenericASTTraversal, object):
|
||||||
def __init__(self, version, show_ast=None, is_pypy=False):
|
def __init__(
|
||||||
|
self,
|
||||||
|
version: tuple,
|
||||||
|
is_pypy=False,
|
||||||
|
show_ast: Optional[dict] = None,
|
||||||
|
):
|
||||||
self.version = version
|
self.version = version
|
||||||
self.showast = show_ast
|
self.showast = show_ast
|
||||||
self.is_pypy = is_pypy
|
self.is_pypy = is_pypy
|
||||||
return
|
return
|
||||||
|
|
||||||
def maybe_show_tree(self, ast):
|
def maybe_show_tree(self, tree):
|
||||||
if isinstance(self.showast, dict) and self.showast:
|
if isinstance(self.showast, dict) and (
|
||||||
maybe_show_tree(self, ast)
|
self.showast.get("before") or self.showast.get("after")
|
||||||
|
):
|
||||||
|
maybe_show_tree(self, tree)
|
||||||
|
|
||||||
def preorder(self, node=None):
|
def preorder(self, node=None):
|
||||||
"""Walk the tree in roughly 'preorder' (a bit of a lie explained below).
|
"""Walk the tree in roughly 'preorder' (a bit of a lie explained below).
|
||||||
@@ -119,12 +128,10 @@ class TreeTransform(GenericASTTraversal, object):
|
|||||||
|
|
||||||
mkfunc_pattr = node[-1].pattr
|
mkfunc_pattr = node[-1].pattr
|
||||||
if isinstance(mkfunc_pattr, tuple):
|
if isinstance(mkfunc_pattr, tuple):
|
||||||
|
assert isinstance(mkfunc_pattr, tuple)
|
||||||
assert len(mkfunc_pattr) == 4 and isinstance(mkfunc_pattr, int)
|
assert len(mkfunc_pattr) == 4 and isinstance(mkfunc_pattr, int)
|
||||||
|
|
||||||
if (
|
if len(code.co_consts) > 0 and isinstance(code.co_consts[0], str):
|
||||||
len(code.co_consts) > 0
|
|
||||||
and isinstance(code.co_consts[0], str)
|
|
||||||
):
|
|
||||||
docstring_node = SyntaxTree(
|
docstring_node = SyntaxTree(
|
||||||
"docstring", [Token("LOAD_STR", has_arg=True, pattr=code.co_consts[0])]
|
"docstring", [Token("LOAD_STR", has_arg=True, pattr=code.co_consts[0])]
|
||||||
)
|
)
|
||||||
@@ -148,7 +155,11 @@ class TreeTransform(GenericASTTraversal, object):
|
|||||||
|
|
||||||
if ifstmts_jump == "_ifstmts_jumpl" and ifstmts_jump[0] == "_ifstmts_jump":
|
if ifstmts_jump == "_ifstmts_jumpl" and ifstmts_jump[0] == "_ifstmts_jump":
|
||||||
ifstmts_jump = ifstmts_jump[0]
|
ifstmts_jump = ifstmts_jump[0]
|
||||||
elif ifstmts_jump not in ("_ifstmts_jump", "_ifstmts_jumpl", "ifstmts_jumpl"):
|
elif ifstmts_jump not in (
|
||||||
|
"_ifstmts_jump",
|
||||||
|
"_ifstmts_jumpl",
|
||||||
|
"ifstmts_jumpl",
|
||||||
|
):
|
||||||
return node
|
return node
|
||||||
stmts = ifstmts_jump[0]
|
stmts = ifstmts_jump[0]
|
||||||
else:
|
else:
|
||||||
@@ -208,10 +219,11 @@ class TreeTransform(GenericASTTraversal, object):
|
|||||||
kind = "assert2not"
|
kind = "assert2not"
|
||||||
|
|
||||||
LOAD_ASSERT = call[0].first_child()
|
LOAD_ASSERT = call[0].first_child()
|
||||||
if LOAD_ASSERT not in ( "LOAD_ASSERT", "LOAD_GLOBAL"):
|
if LOAD_ASSERT not in ("LOAD_ASSERT", "LOAD_GLOBAL"):
|
||||||
return node
|
return node
|
||||||
if isinstance(call[1], SyntaxTree):
|
if isinstance(call[1], SyntaxTree):
|
||||||
expr = call[1][0]
|
expr = call[1][0]
|
||||||
|
assert_expr.transformed_by = "n_ifstmt"
|
||||||
node = SyntaxTree(
|
node = SyntaxTree(
|
||||||
kind,
|
kind,
|
||||||
[
|
[
|
||||||
@@ -221,8 +233,8 @@ class TreeTransform(GenericASTTraversal, object):
|
|||||||
expr,
|
expr,
|
||||||
RAISE_VARARGS_1,
|
RAISE_VARARGS_1,
|
||||||
],
|
],
|
||||||
|
transformed_by="n_ifstmt",
|
||||||
)
|
)
|
||||||
node.transformed_by = "n_ifstmt"
|
|
||||||
pass
|
pass
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
@@ -250,9 +262,10 @@ class TreeTransform(GenericASTTraversal, object):
|
|||||||
|
|
||||||
LOAD_ASSERT = expr[0]
|
LOAD_ASSERT = expr[0]
|
||||||
node = SyntaxTree(
|
node = SyntaxTree(
|
||||||
kind, [assert_expr, jump_cond, LOAD_ASSERT, RAISE_VARARGS_1]
|
kind,
|
||||||
|
[assert_expr, jump_cond, LOAD_ASSERT, RAISE_VARARGS_1],
|
||||||
|
transformed_by="n_ifstmt",
|
||||||
)
|
)
|
||||||
node.transformed_by = ("n_ifstmt",)
|
|
||||||
pass
|
pass
|
||||||
pass
|
pass
|
||||||
return node
|
return node
|
||||||
@@ -289,7 +302,12 @@ class TreeTransform(GenericASTTraversal, object):
|
|||||||
|
|
||||||
len_n = len(n)
|
len_n = len(n)
|
||||||
# Sometimes stmt is reduced away and n[0] can be a single reduction like continue -> CONTINUE.
|
# Sometimes stmt is reduced away and n[0] can be a single reduction like continue -> CONTINUE.
|
||||||
if len_n == 1 and isinstance(n[0], SyntaxTree) and len(n[0]) == 1 and n[0] == "stmt":
|
if (
|
||||||
|
len_n == 1
|
||||||
|
and isinstance(n[0], SyntaxTree)
|
||||||
|
and len(n[0]) == 1
|
||||||
|
and n[0] == "stmt"
|
||||||
|
):
|
||||||
n = n[0][0]
|
n = n[0][0]
|
||||||
elif len_n == 0:
|
elif len_n == 0:
|
||||||
return node
|
return node
|
||||||
@@ -407,22 +425,26 @@ class TreeTransform(GenericASTTraversal, object):
|
|||||||
list_for_node.transformed_by = ("n_list_for",)
|
list_for_node.transformed_by = ("n_list_for",)
|
||||||
return list_for_node
|
return list_for_node
|
||||||
|
|
||||||
|
def n_negated_testtrue(self, node):
|
||||||
|
assert node[0] == "testtrue"
|
||||||
|
test_node = node[0][0]
|
||||||
|
test_node.transformed_by = "n_negated_testtrue"
|
||||||
|
return test_node
|
||||||
|
|
||||||
def n_stmts(self, node):
|
def n_stmts(self, node):
|
||||||
if node.first_child() == "SETUP_ANNOTATIONS":
|
if node.first_child() == "SETUP_ANNOTATIONS":
|
||||||
prev = node[0][0]
|
prev = node[0][0]
|
||||||
new_stmts = [node[0]]
|
new_stmts = [node[0]]
|
||||||
for i, sstmt in enumerate(node[1:]):
|
for i, sstmt in enumerate(node[1:]):
|
||||||
ann_assign = sstmt[0]
|
ann_assign = sstmt[0]
|
||||||
if (
|
if ann_assign == "ann_assign" and prev == "assign":
|
||||||
ann_assign == "ann_assign"
|
|
||||||
and prev == "assign"
|
|
||||||
):
|
|
||||||
annotate_var = ann_assign[-2]
|
annotate_var = ann_assign[-2]
|
||||||
if annotate_var.attr == prev[-1][0].attr:
|
if annotate_var.attr == prev[-1][0].attr:
|
||||||
node[i].kind = "deleted " + node[i].kind
|
node[i].kind = "deleted " + node[i].kind
|
||||||
del new_stmts[-1]
|
del new_stmts[-1]
|
||||||
ann_assign_init = SyntaxTree(
|
ann_assign_init = SyntaxTree(
|
||||||
"ann_assign_init", [ann_assign[0], copy(prev[0]), annotate_var]
|
"ann_assign_init",
|
||||||
|
[ann_assign[0], copy(prev[0]), annotate_var],
|
||||||
)
|
)
|
||||||
if sstmt[0] == "ann_assign":
|
if sstmt[0] == "ann_assign":
|
||||||
sstmt[0] = ann_assign_init
|
sstmt[0] = ann_assign_init
|
||||||
@@ -441,26 +463,28 @@ class TreeTransform(GenericASTTraversal, object):
|
|||||||
node = self.preorder(node)
|
node = self.preorder(node)
|
||||||
return node
|
return node
|
||||||
|
|
||||||
def transform(self, ast, code):
|
def transform(self, parse_tree: GenericASTTraversal, code) -> GenericASTTraversal:
|
||||||
self.maybe_show_tree(ast)
|
self.maybe_show_tree(parse_tree)
|
||||||
self.ast = copy(ast)
|
self.ast = copy(parse_tree)
|
||||||
|
del parse_tree
|
||||||
self.ast = self.traverse(self.ast, is_lambda=False)
|
self.ast = self.traverse(self.ast, is_lambda=False)
|
||||||
|
n = len(self.ast)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Disambiguate a string (expression) which appears as a "call_stmt" at
|
# Disambiguate a string (expression) which appears as a "call_stmt" at
|
||||||
# the beginning of a function versus a docstring. Seems pretty academic,
|
# the beginning of a function versus a docstring. Seems pretty academic,
|
||||||
# but this is Python.
|
# but this is Python.
|
||||||
call_stmt = ast[0][0]
|
call_stmt = self.ast[0][0]
|
||||||
if is_not_docstring(call_stmt):
|
if is_not_docstring(call_stmt):
|
||||||
call_stmt.kind = "string_at_beginning"
|
call_stmt.kind = "string_at_beginning"
|
||||||
call_stmt.transformed_by = "transform"
|
call_stmt.transformed_by = "transform"
|
||||||
pass
|
pass
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
for i in range(len(self.ast)):
|
for i in range(n):
|
||||||
sstmt = ast[i]
|
sstmt = self.ast[i]
|
||||||
if len(sstmt) == 1 and sstmt == "sstmt":
|
if len(sstmt) == 1 and sstmt == "sstmt":
|
||||||
self.ast[i] = self.ast[i][0]
|
self.ast[i] = self.ast[i][0]
|
||||||
|
|
||||||
@@ -486,7 +510,7 @@ class TreeTransform(GenericASTTraversal, object):
|
|||||||
if self.ast[-1] == RETURN_NONE:
|
if self.ast[-1] == RETURN_NONE:
|
||||||
self.ast.pop() # remove last node
|
self.ast.pop() # remove last node
|
||||||
# todo: if empty, add 'pass'
|
# todo: if empty, add 'pass'
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return self.ast
|
return self.ast
|
||||||
|
Reference in New Issue
Block a user