You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-04 01:09:52 +08:00
Merge branch 'master' into python-2.4
This commit is contained in:
@@ -1,12 +1,7 @@
|
|||||||
language: python
|
language: python
|
||||||
|
|
||||||
python:
|
python:
|
||||||
- '2.7' # this is a cheat here because travis doesn't do 2.4-2.6
|
- 2.7 # this is a cheat here because travis doesn't do 2.4-2.6
|
||||||
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- python: '2.7'
|
|
||||||
dist: xenial # required for Python >= 3.7 (travis-ci/travis-ci#9069)
|
|
||||||
|
|
||||||
install:
|
install:
|
||||||
- pip install -e .
|
- pip install -e .
|
||||||
|
2
Makefile
2
Makefile
@@ -49,7 +49,7 @@ check-2.4 check-2.5:
|
|||||||
2.6 5.0 5.3 5.6 5.8:
|
2.6 5.0 5.3 5.6 5.8:
|
||||||
|
|
||||||
#:PyPy pypy3-2.4.0 Python 3:
|
#:PyPy pypy3-2.4.0 Python 3:
|
||||||
pypy-3.2 2.4:
|
7.1 pypy-3.2 2.4:
|
||||||
$(MAKE) -C test $@
|
$(MAKE) -C test $@
|
||||||
|
|
||||||
#: Run py.test tests
|
#: Run py.test tests
|
||||||
|
2
NEWS.md
2
NEWS.md
@@ -1,4 +1,4 @@
|
|||||||
4.1.0 2019-10-12 Stony Brook Ride
|
3.5.0 2019-10-12 Stony Brook Ride
|
||||||
=================================
|
=================================
|
||||||
|
|
||||||
- Fix fragment bugs
|
- Fix fragment bugs
|
||||||
|
@@ -58,7 +58,7 @@ entry_points = {
|
|||||||
]}
|
]}
|
||||||
ftp_url = None
|
ftp_url = None
|
||||||
install_requires = ["spark-parser >= 1.8.9, < 1.9.0",
|
install_requires = ["spark-parser >= 1.8.9, < 1.9.0",
|
||||||
"xdis >= 4.1.0, < 4.2.0"]
|
"xdis >= 4.1.2, < 4.2.0"]
|
||||||
|
|
||||||
license = "GPL3"
|
license = "GPL3"
|
||||||
mailing_list = "python-debugger@googlegroups.com"
|
mailing_list = "python-debugger@googlegroups.com"
|
||||||
|
@@ -63,6 +63,7 @@ Goto https://github.com/rocky/python-uncompyle6/releases
|
|||||||
|
|
||||||
# Upload single package and look at Rst Formating
|
# Upload single package and look at Rst Formating
|
||||||
|
|
||||||
|
$ twine check dist/uncompyle6-${VERSION}*
|
||||||
$ twine upload dist/uncompyle6-${VERSION}-py3.3.egg
|
$ twine upload dist/uncompyle6-${VERSION}-py3.3.egg
|
||||||
|
|
||||||
# Upload rest of versions
|
# Upload rest of versions
|
||||||
|
@@ -5,4 +5,4 @@ if [[ $0 == ${BASH_SOURCE[0]} ]] ; then
|
|||||||
echo "This script should be *sourced* rather than run directly through bash"
|
echo "This script should be *sourced* rather than run directly through bash"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
export PYVERSIONS='3.6.9 3.7.4 2.6.9 3.3.7 2.7.16 3.2.6 3.1.5 3.4.10 3.5.7'
|
export PYVERSIONS='3.6.9 3.7.5 2.6.9 3.3.7 2.7.16 3.2.6 3.1.5 3.4.10 3.5.7'
|
||||||
|
@@ -3,48 +3,52 @@ from uncompyle6 import PYTHON_VERSION, PYTHON3, IS_PYPY # , PYTHON_VERSION
|
|||||||
from uncompyle6.parser import get_python_parser, python_parser
|
from uncompyle6.parser import get_python_parser, python_parser
|
||||||
from uncompyle6.scanner import get_scanner
|
from uncompyle6.scanner import get_scanner
|
||||||
|
|
||||||
def test_grammar():
|
|
||||||
|
|
||||||
|
def test_grammar():
|
||||||
def check_tokens(tokens, opcode_set):
|
def check_tokens(tokens, opcode_set):
|
||||||
remain_tokens = set(tokens) - opcode_set
|
remain_tokens = set(tokens) - opcode_set
|
||||||
remain_tokens = set([re.sub(r'_\d+$','', t) for t in remain_tokens])
|
remain_tokens = set([re.sub(r"_\d+$", "", t) for t in remain_tokens])
|
||||||
remain_tokens = set([re.sub('_CONT$','', t) for t in remain_tokens])
|
remain_tokens = set([re.sub("_CONT$", "", t) for t in remain_tokens])
|
||||||
remain_tokens = set([re.sub('LOAD_CODE$','', t) for t in remain_tokens])
|
remain_tokens = set([re.sub("LOAD_CODE$", "", t) for t in remain_tokens])
|
||||||
remain_tokens = set(remain_tokens) - opcode_set
|
remain_tokens = set(remain_tokens) - opcode_set
|
||||||
assert remain_tokens == set([]), \
|
assert remain_tokens == set([]), "Remaining tokens %s\n====\n%s" % (
|
||||||
"Remaining tokens %s\n====\n%s" % (remain_tokens, p.dump_grammar())
|
remain_tokens,
|
||||||
|
p.dump_grammar(),
|
||||||
|
)
|
||||||
|
|
||||||
p = get_python_parser(PYTHON_VERSION, is_pypy=IS_PYPY)
|
p = get_python_parser(PYTHON_VERSION, is_pypy=IS_PYPY)
|
||||||
(lhs, rhs, tokens,
|
(lhs, rhs, tokens, right_recursive, dup_rhs) = p.check_sets()
|
||||||
right_recursive, dup_rhs) = p.check_sets()
|
|
||||||
|
|
||||||
# We have custom rules that create the below
|
# We have custom rules that create the below
|
||||||
expect_lhs = set(['pos_arg', 'attribute'])
|
expect_lhs = set(["pos_arg", "attribute"])
|
||||||
if PYTHON_VERSION < 3.8:
|
if PYTHON_VERSION < 3.8:
|
||||||
expect_lhs.add('get_iter')
|
expect_lhs.add("get_iter")
|
||||||
|
else:
|
||||||
|
expect_lhs.add("async_with_as_stmt")
|
||||||
|
expect_lhs.add("async_with_stmt")
|
||||||
|
|
||||||
|
unused_rhs = set(["list", "mkfunc", "mklambda", "unpack"])
|
||||||
|
|
||||||
unused_rhs = set(['list', 'mkfunc',
|
expect_right_recursive = set([("designList", ("store", "DUP_TOP", "designList"))])
|
||||||
'mklambda',
|
|
||||||
'unpack',])
|
|
||||||
|
|
||||||
expect_right_recursive = set([('designList',
|
|
||||||
('store', 'DUP_TOP', 'designList'))])
|
|
||||||
|
|
||||||
if PYTHON_VERSION < 3.7:
|
if PYTHON_VERSION < 3.7:
|
||||||
unused_rhs.add('call')
|
unused_rhs.add("call")
|
||||||
|
|
||||||
if PYTHON_VERSION > 2.6:
|
if PYTHON_VERSION > 2.6:
|
||||||
expect_lhs.add('kvlist')
|
expect_lhs.add("kvlist")
|
||||||
expect_lhs.add('kv3')
|
expect_lhs.add("kv3")
|
||||||
unused_rhs.add('dict')
|
unused_rhs.add("dict")
|
||||||
|
|
||||||
if PYTHON3:
|
if PYTHON3:
|
||||||
expect_lhs.add('load_genexpr')
|
expect_lhs.add("load_genexpr")
|
||||||
|
|
||||||
unused_rhs = unused_rhs.union(set("""
|
unused_rhs = unused_rhs.union(
|
||||||
|
set(
|
||||||
|
"""
|
||||||
except_pop_except generator_exp
|
except_pop_except generator_exp
|
||||||
""".split()))
|
""".split()
|
||||||
|
)
|
||||||
|
)
|
||||||
if PYTHON_VERSION >= 3.0:
|
if PYTHON_VERSION >= 3.0:
|
||||||
expect_lhs.add("annotate_arg")
|
expect_lhs.add("annotate_arg")
|
||||||
expect_lhs.add("annotate_tuple")
|
expect_lhs.add("annotate_tuple")
|
||||||
@@ -53,17 +57,19 @@ def test_grammar():
|
|||||||
unused_rhs.add("classdefdeco1")
|
unused_rhs.add("classdefdeco1")
|
||||||
unused_rhs.add("tryelsestmtl")
|
unused_rhs.add("tryelsestmtl")
|
||||||
if PYTHON_VERSION >= 3.5:
|
if PYTHON_VERSION >= 3.5:
|
||||||
expect_right_recursive.add((('l_stmts',
|
expect_right_recursive.add(
|
||||||
('lastl_stmt', 'come_froms', 'l_stmts'))))
|
(("l_stmts", ("lastl_stmt", "come_froms", "l_stmts")))
|
||||||
|
)
|
||||||
pass
|
pass
|
||||||
elif 3.0 < PYTHON_VERSION < 3.3:
|
elif 3.0 < PYTHON_VERSION < 3.3:
|
||||||
expect_right_recursive.add((('l_stmts',
|
expect_right_recursive.add(
|
||||||
('lastl_stmt', 'COME_FROM', 'l_stmts'))))
|
(("l_stmts", ("lastl_stmt", "COME_FROM", "l_stmts")))
|
||||||
|
)
|
||||||
pass
|
pass
|
||||||
pass
|
pass
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
expect_lhs.add('kwarg')
|
expect_lhs.add("kwarg")
|
||||||
|
|
||||||
assert expect_lhs == set(lhs)
|
assert expect_lhs == set(lhs)
|
||||||
|
|
||||||
@@ -73,9 +79,16 @@ def test_grammar():
|
|||||||
|
|
||||||
assert expect_right_recursive == right_recursive
|
assert expect_right_recursive == right_recursive
|
||||||
|
|
||||||
expect_dup_rhs = frozenset([('COME_FROM',), ('CONTINUE',), ('JUMP_ABSOLUTE',),
|
expect_dup_rhs = frozenset(
|
||||||
('LOAD_CONST',),
|
[
|
||||||
('JUMP_BACK',), ('JUMP_FORWARD',)])
|
("COME_FROM",),
|
||||||
|
("CONTINUE",),
|
||||||
|
("JUMP_ABSOLUTE",),
|
||||||
|
("LOAD_CONST",),
|
||||||
|
("JUMP_BACK",),
|
||||||
|
("JUMP_FORWARD",),
|
||||||
|
]
|
||||||
|
)
|
||||||
reduced_dup_rhs = dict((k, dup_rhs[k]) for k in dup_rhs if k not in expect_dup_rhs)
|
reduced_dup_rhs = dict((k, dup_rhs[k]) for k in dup_rhs if k not in expect_dup_rhs)
|
||||||
for k in reduced_dup_rhs:
|
for k in reduced_dup_rhs:
|
||||||
print(k, reduced_dup_rhs[k])
|
print(k, reduced_dup_rhs[k])
|
||||||
@@ -92,22 +105,33 @@ def test_grammar():
|
|||||||
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP LOAD_STR LOAD_CODE
|
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP LOAD_STR LOAD_CODE
|
||||||
LAMBDA_MARKER
|
LAMBDA_MARKER
|
||||||
RETURN_END_IF RETURN_END_IF_LAMBDA RETURN_VALUE_LAMBDA RETURN_LAST
|
RETURN_END_IF RETURN_END_IF_LAMBDA RETURN_VALUE_LAMBDA RETURN_LAST
|
||||||
""".split())
|
""".split()
|
||||||
|
)
|
||||||
if 2.6 <= PYTHON_VERSION <= 2.7:
|
if 2.6 <= PYTHON_VERSION <= 2.7:
|
||||||
opcode_set = set(s.opc.opname).union(ignore_set)
|
opcode_set = set(s.opc.opname).union(ignore_set)
|
||||||
if PYTHON_VERSION == 2.6:
|
if PYTHON_VERSION == 2.6:
|
||||||
opcode_set.add("THEN")
|
opcode_set.add("THEN")
|
||||||
check_tokens(tokens, opcode_set)
|
check_tokens(tokens, opcode_set)
|
||||||
elif PYTHON_VERSION == 3.4:
|
elif PYTHON_VERSION == 3.4:
|
||||||
ignore_set.add('LOAD_CLASSNAME')
|
ignore_set.add("LOAD_CLASSNAME")
|
||||||
ignore_set.add('STORE_LOCALS')
|
ignore_set.add("STORE_LOCALS")
|
||||||
opcode_set = set(s.opc.opname).union(ignore_set)
|
opcode_set = set(s.opc.opname).union(ignore_set)
|
||||||
check_tokens(tokens, opcode_set)
|
check_tokens(tokens, opcode_set)
|
||||||
|
|
||||||
|
|
||||||
def test_dup_rule():
|
def test_dup_rule():
|
||||||
import inspect
|
import inspect
|
||||||
python_parser(PYTHON_VERSION, inspect.currentframe().f_code,
|
|
||||||
|
python_parser(
|
||||||
|
PYTHON_VERSION,
|
||||||
|
inspect.currentframe().f_code,
|
||||||
is_pypy=IS_PYPY,
|
is_pypy=IS_PYPY,
|
||||||
parser_debug={
|
parser_debug={
|
||||||
'dups': True, 'transition': False, 'reduce': False,
|
"dups": True,
|
||||||
'rules': False, 'errorstack': None, 'context': True})
|
"transition": False,
|
||||||
|
"reduce": False,
|
||||||
|
"rules": False,
|
||||||
|
"errorstack": None,
|
||||||
|
"context": True,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
@@ -100,7 +100,7 @@ check-bytecode-3:
|
|||||||
--bytecode-3.1 --bytecode-3.2 --bytecode-3.3 \
|
--bytecode-3.1 --bytecode-3.2 --bytecode-3.3 \
|
||||||
--bytecode-3.4 --bytecode-3.5 --bytecode-3.6 \
|
--bytecode-3.4 --bytecode-3.5 --bytecode-3.6 \
|
||||||
--bytecode-3.7 \
|
--bytecode-3.7 \
|
||||||
--bytecode-pypy3.2
|
--bytecode-pypy3.2 --bytecode-pypy3.6 --bytecode-3.8
|
||||||
|
|
||||||
#: Check deparsing on selected bytecode 3.x
|
#: Check deparsing on selected bytecode 3.x
|
||||||
check-bytecode-3-short:
|
check-bytecode-3-short:
|
||||||
@@ -338,7 +338,9 @@ pypy-3.2 2.4:
|
|||||||
$(PYTHON) test_pythonlib.py --bytecode-pypy3.2 --verify
|
$(PYTHON) test_pythonlib.py --bytecode-pypy3.2 --verify
|
||||||
|
|
||||||
#: PyPy 5.0.x with Python 3.6 ...
|
#: PyPy 5.0.x with Python 3.6 ...
|
||||||
|
check-bytecode-pypy3.6: 7.1
|
||||||
7.1:
|
7.1:
|
||||||
|
$(PYTHON) test_pythonlib.py --bytecode-pypy3.6-run --verify-run
|
||||||
$(PYTHON) test_pythonlib.py --bytecode-pypy3.6 --verify
|
$(PYTHON) test_pythonlib.py --bytecode-pypy3.6 --verify
|
||||||
|
|
||||||
|
|
||||||
|
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.8/01_for_continue.pyc
Normal file
BIN
test/bytecode_3.8/01_for_continue.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_pypy3.5/00_assign.pyc
Normal file
BIN
test/bytecode_pypy3.5/00_assign.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_pypy3.5/00_import.pyc
Normal file
BIN
test/bytecode_pypy3.5/00_import.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_pypy3.5/11_classbug.pyc
Normal file
BIN
test/bytecode_pypy3.5/11_classbug.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_pypy3.6/00_import.pyc
Normal file
BIN
test/bytecode_pypy3.6/00_import.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_pypy3.6/04_class_kwargs.pyc
Normal file
BIN
test/bytecode_pypy3.6/04_class_kwargs.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_pypy3.6/11_classbug.pyc
Normal file
BIN
test/bytecode_pypy3.6/11_classbug.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_pypy3.6_run/00_assign.pyc
Normal file
BIN
test/bytecode_pypy3.6_run/00_assign.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_pypy3.6_run/00_docstring.pyc
Normal file
BIN
test/bytecode_pypy3.6_run/00_docstring.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_pypy3.6_run/01_fstring.pyc
Normal file
BIN
test/bytecode_pypy3.6_run/01_fstring.pyc
Normal file
Binary file not shown.
7
test/simple_source/bug38/01_extra_iter.py
Normal file
7
test/simple_source/bug38/01_extra_iter.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# Adapted from From 3.3 urllib/parse.py
|
||||||
|
qs = "https://travis-ci.org/rocky/python-uncompyle6/builds/605260823?utm_medium=notification&utm_source=email"
|
||||||
|
expect = ['https://travis-ci.org/rocky/python-uncompyle6/builds/605260823?utm_medium=notification', 'utm_source=email']
|
||||||
|
|
||||||
|
# Should visually see that we don't add an extra iter() which is not technically wrong, just
|
||||||
|
# unnecessary.
|
||||||
|
assert expect == [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
|
5
test/simple_source/bug38/01_for_continue.py
Normal file
5
test/simple_source/bug38/01_for_continue.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Bug is turning a JUMP_BACK for a CONTINUE so for has no JUMP_BACK.
|
||||||
|
# Also there is no POP_BLOCK since there isn't anything in the loop.
|
||||||
|
# In the future when we have better control flow, we might redo all of this.
|
||||||
|
for i in range(2):
|
||||||
|
pass
|
@@ -42,6 +42,7 @@ TEST_VERSIONS = (
|
|||||||
"pypy3.5-5.7.1-beta",
|
"pypy3.5-5.7.1-beta",
|
||||||
"pypy3.5-5.9.0",
|
"pypy3.5-5.9.0",
|
||||||
"pypy3.5-6.0.0",
|
"pypy3.5-6.0.0",
|
||||||
|
"pypy3.6-7.1.0",
|
||||||
"native",
|
"native",
|
||||||
) + tuple(python_versions)
|
) + tuple(python_versions)
|
||||||
|
|
||||||
|
@@ -32,12 +32,11 @@ want to run on earlier Python versions.
|
|||||||
import sys
|
import sys
|
||||||
from collections import deque
|
from collections import deque
|
||||||
|
|
||||||
import uncompyle6
|
|
||||||
|
|
||||||
from xdis.code import iscode
|
from xdis.code import iscode
|
||||||
from xdis.load import check_object_path, load_module
|
from xdis.load import check_object_path, load_module
|
||||||
from uncompyle6.scanner import get_scanner
|
from uncompyle6.scanner import get_scanner
|
||||||
|
|
||||||
|
|
||||||
def disco(version, co, out=None, is_pypy=False):
|
def disco(version, co, out=None, is_pypy=False):
|
||||||
"""
|
"""
|
||||||
diassembles and deparses a given code block 'co'
|
diassembles and deparses a given code block 'co'
|
||||||
@@ -47,9 +46,9 @@ def disco(version, co, out=None, is_pypy=False):
|
|||||||
|
|
||||||
# store final output stream for case of error
|
# store final output stream for case of error
|
||||||
real_out = out or sys.stdout
|
real_out = out or sys.stdout
|
||||||
real_out.write('# Python %s\n' % version)
|
real_out.write("# Python %s\n" % version)
|
||||||
if co.co_filename:
|
if co.co_filename:
|
||||||
real_out.write('# Embedded file name: %s\n' % co.co_filename)
|
real_out.write("# Embedded file name: %s\n" % co.co_filename)
|
||||||
|
|
||||||
scanner = get_scanner(version, is_pypy=is_pypy)
|
scanner = get_scanner(version, is_pypy=is_pypy)
|
||||||
|
|
||||||
@@ -60,8 +59,8 @@ def disco(version, co, out=None, is_pypy=False):
|
|||||||
def disco_loop(disasm, queue, real_out):
|
def disco_loop(disasm, queue, real_out):
|
||||||
while len(queue) > 0:
|
while len(queue) > 0:
|
||||||
co = queue.popleft()
|
co = queue.popleft()
|
||||||
if co.co_name != '<module>':
|
if co.co_name != "<module>":
|
||||||
real_out.write('\n# %s line %d of %s\n' %
|
real_out.write("\n# %s line %d of %s\n" %
|
||||||
(co.co_name, co.co_firstlineno, co.co_filename))
|
(co.co_name, co.co_firstlineno, co.co_filename))
|
||||||
tokens, customize = disasm(co)
|
tokens, customize = disasm(co)
|
||||||
for t in tokens:
|
for t in tokens:
|
||||||
@@ -73,6 +72,7 @@ def disco_loop(disasm, queue, real_out):
|
|||||||
pass
|
pass
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
# def disassemble_fp(fp, outstream=None):
|
# def disassemble_fp(fp, outstream=None):
|
||||||
# """
|
# """
|
||||||
# disassemble Python byte-code from an open file
|
# disassemble Python byte-code from an open file
|
||||||
@@ -86,6 +86,7 @@ def disco_loop(disasm, queue, real_out):
|
|||||||
# disco(version, co, outstream, is_pypy=is_pypy)
|
# disco(version, co, outstream, is_pypy=is_pypy)
|
||||||
# co = None
|
# co = None
|
||||||
|
|
||||||
|
|
||||||
def disassemble_file(filename, outstream=None):
|
def disassemble_file(filename, outstream=None):
|
||||||
"""
|
"""
|
||||||
disassemble Python byte-code file (.pyc)
|
disassemble Python byte-code file (.pyc)
|
||||||
@@ -94,8 +95,7 @@ def disassemble_file(filename, outstream=None):
|
|||||||
try to find the corresponding compiled object.
|
try to find the corresponding compiled object.
|
||||||
"""
|
"""
|
||||||
filename = check_object_path(filename)
|
filename = check_object_path(filename)
|
||||||
(version, timestamp, magic_int, co, is_pypy,
|
(version, timestamp, magic_int, co, is_pypy, source_size) = load_module(filename)
|
||||||
source_size) = load_module(filename)
|
|
||||||
if type(co) == list:
|
if type(co) == list:
|
||||||
for con in co:
|
for con in co:
|
||||||
disco(version, con, outstream)
|
disco(version, con, outstream)
|
||||||
@@ -103,6 +103,7 @@ def disassemble_file(filename, outstream=None):
|
|||||||
disco(version, co, outstream, is_pypy=is_pypy)
|
disco(version, co, outstream, is_pypy=is_pypy)
|
||||||
co = None
|
co = None
|
||||||
|
|
||||||
|
|
||||||
def _test():
|
def _test():
|
||||||
"""Simple test program to disassemble a file."""
|
"""Simple test program to disassemble a file."""
|
||||||
argc = len(sys.argv)
|
argc = len(sys.argv)
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
# Copyright (c) 2015-2018 Rocky Bernstein
|
# Copyright (c) 2015-2019 Rocky Bernstein
|
||||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||||
#
|
#
|
||||||
# Copyright (c) 1999 John Aycock
|
# Copyright (c) 1999 John Aycock
|
||||||
@@ -29,10 +29,10 @@ from uncompyle6.parser import PythonParser, PythonParserSingle, nop_func
|
|||||||
from uncompyle6.parsers.treenode import SyntaxTree
|
from uncompyle6.parsers.treenode import SyntaxTree
|
||||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||||
|
|
||||||
class Python2Parser(PythonParser):
|
|
||||||
|
|
||||||
|
class Python2Parser(PythonParser):
|
||||||
def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG):
|
def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG):
|
||||||
super(Python2Parser, self).__init__(SyntaxTree, 'stmts', debug=debug_parser)
|
super(Python2Parser, self).__init__(SyntaxTree, "stmts", debug=debug_parser)
|
||||||
self.new_rules = set()
|
self.new_rules = set()
|
||||||
|
|
||||||
def p_print2(self, args):
|
def p_print2(self, args):
|
||||||
@@ -50,7 +50,7 @@ class Python2Parser(PythonParser):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def p_print_to(self, args):
|
def p_print_to(self, args):
|
||||||
'''
|
"""
|
||||||
stmt ::= print_to
|
stmt ::= print_to
|
||||||
stmt ::= print_to_nl
|
stmt ::= print_to_nl
|
||||||
stmt ::= print_nl_to
|
stmt ::= print_nl_to
|
||||||
@@ -60,10 +60,10 @@ class Python2Parser(PythonParser):
|
|||||||
print_to_items ::= print_to_items print_to_item
|
print_to_items ::= print_to_items print_to_item
|
||||||
print_to_items ::= print_to_item
|
print_to_items ::= print_to_item
|
||||||
print_to_item ::= DUP_TOP expr ROT_TWO PRINT_ITEM_TO
|
print_to_item ::= DUP_TOP expr ROT_TWO PRINT_ITEM_TO
|
||||||
'''
|
"""
|
||||||
|
|
||||||
def p_grammar(self, args):
|
def p_grammar(self, args):
|
||||||
'''
|
"""
|
||||||
sstmt ::= stmt
|
sstmt ::= stmt
|
||||||
sstmt ::= return RETURN_LAST
|
sstmt ::= return RETURN_LAST
|
||||||
|
|
||||||
@@ -174,12 +174,12 @@ class Python2Parser(PythonParser):
|
|||||||
jmp_abs ::= JUMP_ABSOLUTE
|
jmp_abs ::= JUMP_ABSOLUTE
|
||||||
jmp_abs ::= JUMP_BACK
|
jmp_abs ::= JUMP_BACK
|
||||||
jmp_abs ::= CONTINUE
|
jmp_abs ::= CONTINUE
|
||||||
'''
|
"""
|
||||||
|
|
||||||
def p_generator_exp2(self, args):
|
def p_generator_exp2(self, args):
|
||||||
'''
|
"""
|
||||||
generator_exp ::= LOAD_GENEXPR MAKE_FUNCTION_0 expr GET_ITER CALL_FUNCTION_1
|
generator_exp ::= LOAD_GENEXPR MAKE_FUNCTION_0 expr GET_ITER CALL_FUNCTION_1
|
||||||
'''
|
"""
|
||||||
|
|
||||||
def p_expr2(self, args):
|
def p_expr2(self, args):
|
||||||
"""
|
"""
|
||||||
@@ -250,25 +250,41 @@ class Python2Parser(PythonParser):
|
|||||||
this.
|
this.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if 'PyPy' in customize:
|
if "PyPy" in customize:
|
||||||
# PyPy-specific customizations
|
# PyPy-specific customizations
|
||||||
self.addRule("""
|
self.addRule(
|
||||||
|
"""
|
||||||
stmt ::= assign3_pypy
|
stmt ::= assign3_pypy
|
||||||
stmt ::= assign2_pypy
|
stmt ::= assign2_pypy
|
||||||
assign3_pypy ::= expr expr expr store store store
|
assign3_pypy ::= expr expr expr store store store
|
||||||
assign2_pypy ::= expr expr store store
|
assign2_pypy ::= expr expr store store
|
||||||
list_comp ::= expr BUILD_LIST_FROM_ARG for_iter store list_iter
|
list_comp ::= expr BUILD_LIST_FROM_ARG for_iter store list_iter
|
||||||
JUMP_BACK
|
JUMP_BACK
|
||||||
""", nop_func)
|
""",
|
||||||
|
nop_func,
|
||||||
|
)
|
||||||
|
|
||||||
# For a rough break out on the first word. This may
|
# For a rough break out on the first word. This may
|
||||||
# include instructions that don't need customization,
|
# include instructions that don't need customization,
|
||||||
# but we'll do a finer check after the rough breakout.
|
# but we'll do a finer check after the rough breakout.
|
||||||
customize_instruction_basenames = frozenset(
|
customize_instruction_basenames = frozenset(
|
||||||
('BUILD', 'CALL', 'CONTINUE', 'DELETE',
|
(
|
||||||
'DUP', 'EXEC', 'GET', 'JUMP',
|
"BUILD",
|
||||||
'LOAD', 'LOOKUP', 'MAKE', 'SETUP',
|
"CALL",
|
||||||
'RAISE', 'UNPACK'))
|
"CONTINUE",
|
||||||
|
"DELETE",
|
||||||
|
"DUP",
|
||||||
|
"EXEC",
|
||||||
|
"GET",
|
||||||
|
"JUMP",
|
||||||
|
"LOAD",
|
||||||
|
"LOOKUP",
|
||||||
|
"MAKE",
|
||||||
|
"SETUP",
|
||||||
|
"RAISE",
|
||||||
|
"UNPACK",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
# Opcode names in the custom_seen_ops set have rules that get added
|
# Opcode names in the custom_seen_ops set have rules that get added
|
||||||
# unconditionally and the rules are constant. So they need to be done
|
# unconditionally and the rules are constant. So they need to be done
|
||||||
@@ -282,139 +298,191 @@ class Python2Parser(PythonParser):
|
|||||||
|
|
||||||
# Do a quick breakout before testing potentially
|
# Do a quick breakout before testing potentially
|
||||||
# each of the dozen or so instruction in if elif.
|
# each of the dozen or so instruction in if elif.
|
||||||
if (opname[:opname.find('_')] not in customize_instruction_basenames
|
if (
|
||||||
or opname in custom_seen_ops):
|
opname[: opname.find("_")] not in customize_instruction_basenames
|
||||||
|
or opname in custom_seen_ops
|
||||||
|
):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
opname_base = opname[:opname.rfind('_')]
|
opname_base = opname[: opname.rfind("_")]
|
||||||
|
|
||||||
# The order of opname listed is roughly sorted below
|
# The order of opname listed is roughly sorted below
|
||||||
if opname_base in ('BUILD_LIST', 'BUILD_SET', 'BUILD_TUPLE'):
|
if opname_base in ("BUILD_LIST", "BUILD_SET", "BUILD_TUPLE"):
|
||||||
# We do this complicated test to speed up parsing of
|
# We do this complicated test to speed up parsing of
|
||||||
# pathelogically long literals, especially those over 1024.
|
# pathelogically long literals, especially those over 1024.
|
||||||
build_count = token.attr
|
build_count = token.attr
|
||||||
thousands = (build_count//1024)
|
thousands = build_count // 1024
|
||||||
thirty32s = ((build_count//32) % 32)
|
thirty32s = (build_count // 32) % 32
|
||||||
if thirty32s > 0:
|
if thirty32s > 0:
|
||||||
rule = "expr32 ::=%s" % (' expr' * 32)
|
rule = "expr32 ::=%s" % (" expr" * 32)
|
||||||
self.add_unique_rule(rule, opname_base, build_count, customize)
|
self.add_unique_rule(rule, opname_base, build_count, customize)
|
||||||
if thousands > 0:
|
if thousands > 0:
|
||||||
self.add_unique_rule("expr1024 ::=%s" % (' expr32' * 32),
|
self.add_unique_rule(
|
||||||
opname_base, build_count, customize)
|
"expr1024 ::=%s" % (" expr32" * 32),
|
||||||
collection = opname_base[opname_base.find('_')+1:].lower()
|
opname_base,
|
||||||
rule = (('%s ::= ' % collection) + 'expr1024 '*thousands +
|
build_count,
|
||||||
'expr32 '*thirty32s + 'expr '*(build_count % 32) + opname)
|
customize,
|
||||||
self.add_unique_rules([
|
)
|
||||||
"expr ::= %s" % collection,
|
collection = opname_base[opname_base.find("_") + 1 :].lower()
|
||||||
rule], customize)
|
rule = (
|
||||||
|
("%s ::= " % collection)
|
||||||
|
+ "expr1024 " * thousands
|
||||||
|
+ "expr32 " * thirty32s
|
||||||
|
+ "expr " * (build_count % 32)
|
||||||
|
+ opname
|
||||||
|
)
|
||||||
|
self.add_unique_rules(["expr ::= %s" % collection, rule], customize)
|
||||||
continue
|
continue
|
||||||
elif opname_base == 'BUILD_MAP':
|
elif opname_base == "BUILD_MAP":
|
||||||
if opname == 'BUILD_MAP_n':
|
if opname == "BUILD_MAP_n":
|
||||||
# PyPy sometimes has no count. Sigh.
|
# PyPy sometimes has no count. Sigh.
|
||||||
self.add_unique_rules([
|
self.add_unique_rules(
|
||||||
'kvlist_n ::= kvlist_n kv3',
|
[
|
||||||
'kvlist_n ::=',
|
"kvlist_n ::= kvlist_n kv3",
|
||||||
'dict ::= BUILD_MAP_n kvlist_n',
|
"kvlist_n ::=",
|
||||||
], customize)
|
"dict ::= BUILD_MAP_n kvlist_n",
|
||||||
|
],
|
||||||
|
customize,
|
||||||
|
)
|
||||||
if self.version >= 2.7:
|
if self.version >= 2.7:
|
||||||
self.add_unique_rule(
|
self.add_unique_rule(
|
||||||
'dict_comp_func ::= BUILD_MAP_n LOAD_FAST FOR_ITER store '
|
"dict_comp_func ::= BUILD_MAP_n LOAD_FAST FOR_ITER store "
|
||||||
'comp_iter JUMP_BACK RETURN_VALUE RETURN_LAST',
|
"comp_iter JUMP_BACK RETURN_VALUE RETURN_LAST",
|
||||||
'dict_comp_func', 0, customize)
|
"dict_comp_func",
|
||||||
|
0,
|
||||||
|
customize,
|
||||||
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
kvlist_n = ' kv3' * token.attr
|
kvlist_n = " kv3" * token.attr
|
||||||
rule = "dict ::= %s%s" % (opname, kvlist_n)
|
rule = "dict ::= %s%s" % (opname, kvlist_n)
|
||||||
self.addRule(rule, nop_func)
|
self.addRule(rule, nop_func)
|
||||||
continue
|
continue
|
||||||
elif opname_base == 'BUILD_SLICE':
|
elif opname_base == "BUILD_SLICE":
|
||||||
slice_num = token.attr
|
slice_num = token.attr
|
||||||
if slice_num == 2:
|
if slice_num == 2:
|
||||||
self.add_unique_rules([
|
self.add_unique_rules(
|
||||||
'expr ::= build_slice2',
|
[
|
||||||
'build_slice2 ::= expr expr BUILD_SLICE_2'
|
"expr ::= build_slice2",
|
||||||
], customize)
|
"build_slice2 ::= expr expr BUILD_SLICE_2",
|
||||||
|
],
|
||||||
|
customize,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
assert slice_num == 3, ("BUILD_SLICE value must be 2 or 3; is %s" %
|
assert slice_num == 3, (
|
||||||
slice_num)
|
"BUILD_SLICE value must be 2 or 3; is %s" % slice_num
|
||||||
self.add_unique_rules([
|
)
|
||||||
'expr ::= build_slice3',
|
self.add_unique_rules(
|
||||||
'build_slice3 ::= expr expr expr BUILD_SLICE_3',
|
[
|
||||||
], customize)
|
"expr ::= build_slice3",
|
||||||
|
"build_slice3 ::= expr expr expr BUILD_SLICE_3",
|
||||||
|
],
|
||||||
|
customize,
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
elif opname_base in ('CALL_FUNCTION', 'CALL_FUNCTION_VAR',
|
elif opname_base in (
|
||||||
'CALL_FUNCTION_VAR_KW', 'CALL_FUNCTION_KW'):
|
"CALL_FUNCTION",
|
||||||
|
"CALL_FUNCTION_VAR",
|
||||||
|
"CALL_FUNCTION_VAR_KW",
|
||||||
|
"CALL_FUNCTION_KW",
|
||||||
|
):
|
||||||
|
|
||||||
args_pos, args_kw = self.get_pos_kw(token)
|
args_pos, args_kw = self.get_pos_kw(token)
|
||||||
|
|
||||||
# number of apply equiv arguments:
|
# number of apply equiv arguments:
|
||||||
nak = ( len(opname_base)-len('CALL_FUNCTION') ) // 3
|
nak = (len(opname_base) - len("CALL_FUNCTION")) // 3
|
||||||
rule = 'call ::= expr ' + 'expr '*args_pos + 'kwarg '*args_kw \
|
rule = (
|
||||||
+ 'expr ' * nak + opname
|
"call ::= expr "
|
||||||
elif opname_base == 'CALL_METHOD':
|
+ "expr " * args_pos
|
||||||
|
+ "kwarg " * args_kw
|
||||||
|
+ "expr " * nak
|
||||||
|
+ opname
|
||||||
|
)
|
||||||
|
elif opname_base == "CALL_METHOD":
|
||||||
# PyPy only - DRY with parse3
|
# PyPy only - DRY with parse3
|
||||||
|
|
||||||
args_pos, args_kw = self.get_pos_kw(token)
|
args_pos, args_kw = self.get_pos_kw(token)
|
||||||
|
|
||||||
# number of apply equiv arguments:
|
# number of apply equiv arguments:
|
||||||
nak = ( len(opname_base)-len('CALL_METHOD') ) // 3
|
nak = (len(opname_base) - len("CALL_METHOD")) // 3
|
||||||
rule = 'call ::= expr ' + 'expr '*args_pos + 'kwarg '*args_kw \
|
rule = (
|
||||||
+ 'expr ' * nak + opname
|
"call ::= expr "
|
||||||
elif opname == 'CONTINUE_LOOP':
|
+ "expr " * args_pos
|
||||||
self.addRule('continue ::= CONTINUE_LOOP', nop_func)
|
+ "kwarg " * args_kw
|
||||||
|
+ "expr " * nak
|
||||||
|
+ opname
|
||||||
|
)
|
||||||
|
elif opname == "CONTINUE_LOOP":
|
||||||
|
self.addRule("continue ::= CONTINUE_LOOP", nop_func)
|
||||||
custom_seen_ops.add(opname)
|
custom_seen_ops.add(opname)
|
||||||
continue
|
continue
|
||||||
elif opname == 'DELETE_ATTR':
|
elif opname == "DELETE_ATTR":
|
||||||
self.addRule('del_stmt ::= expr DELETE_ATTR', nop_func)
|
self.addRule("del_stmt ::= expr DELETE_ATTR", nop_func)
|
||||||
custom_seen_ops.add(opname)
|
custom_seen_ops.add(opname)
|
||||||
continue
|
continue
|
||||||
elif opname.startswith('DELETE_SLICE'):
|
elif opname.startswith("DELETE_SLICE"):
|
||||||
self.addRule("""
|
self.addRule(
|
||||||
|
"""
|
||||||
del_expr ::= expr
|
del_expr ::= expr
|
||||||
del_stmt ::= del_expr DELETE_SLICE+0
|
del_stmt ::= del_expr DELETE_SLICE+0
|
||||||
del_stmt ::= del_expr del_expr DELETE_SLICE+1
|
del_stmt ::= del_expr del_expr DELETE_SLICE+1
|
||||||
del_stmt ::= del_expr del_expr DELETE_SLICE+2
|
del_stmt ::= del_expr del_expr DELETE_SLICE+2
|
||||||
del_stmt ::= del_expr del_expr del_expr DELETE_SLICE+3
|
del_stmt ::= del_expr del_expr del_expr DELETE_SLICE+3
|
||||||
""", nop_func)
|
""",
|
||||||
|
nop_func,
|
||||||
|
)
|
||||||
custom_seen_ops.add(opname)
|
custom_seen_ops.add(opname)
|
||||||
self.check_reduce['del_expr'] = 'AST'
|
self.check_reduce["del_expr"] = "AST"
|
||||||
continue
|
continue
|
||||||
elif opname == 'DELETE_DEREF':
|
elif opname == "DELETE_DEREF":
|
||||||
self.addRule("""
|
self.addRule(
|
||||||
|
"""
|
||||||
stmt ::= del_deref_stmt
|
stmt ::= del_deref_stmt
|
||||||
del_deref_stmt ::= DELETE_DEREF
|
del_deref_stmt ::= DELETE_DEREF
|
||||||
""", nop_func)
|
""",
|
||||||
|
nop_func,
|
||||||
|
)
|
||||||
custom_seen_ops.add(opname)
|
custom_seen_ops.add(opname)
|
||||||
continue
|
continue
|
||||||
elif opname == 'DELETE_SUBSCR':
|
elif opname == "DELETE_SUBSCR":
|
||||||
self.addRule("""
|
self.addRule(
|
||||||
|
"""
|
||||||
del_stmt ::= delete_subscript
|
del_stmt ::= delete_subscript
|
||||||
delete_subscript ::= expr expr DELETE_SUBSCR
|
delete_subscript ::= expr expr DELETE_SUBSCR
|
||||||
""", nop_func)
|
""",
|
||||||
self.check_reduce['delete_subscript'] = 'AST'
|
nop_func,
|
||||||
|
)
|
||||||
|
self.check_reduce["delete_subscript"] = "AST"
|
||||||
custom_seen_ops.add(opname)
|
custom_seen_ops.add(opname)
|
||||||
continue
|
continue
|
||||||
elif opname == 'GET_ITER':
|
elif opname == "GET_ITER":
|
||||||
self.addRule("""
|
self.addRule(
|
||||||
|
"""
|
||||||
expr ::= get_iter
|
expr ::= get_iter
|
||||||
attribute ::= expr GET_ITER
|
attribute ::= expr GET_ITER
|
||||||
""", nop_func)
|
""",
|
||||||
|
nop_func,
|
||||||
|
)
|
||||||
custom_seen_ops.add(opname)
|
custom_seen_ops.add(opname)
|
||||||
continue
|
continue
|
||||||
elif opname_base in ('DUP_TOPX', 'RAISE_VARARGS'):
|
elif opname_base in ("DUP_TOPX", "RAISE_VARARGS"):
|
||||||
# FIXME: remove these conditions if they are not needed.
|
# FIXME: remove these conditions if they are not needed.
|
||||||
# no longer need to add a rule
|
# no longer need to add a rule
|
||||||
continue
|
continue
|
||||||
elif opname == 'EXEC_STMT':
|
elif opname == "EXEC_STMT":
|
||||||
self.addRule("""
|
self.addRule(
|
||||||
|
"""
|
||||||
stmt ::= exec_stmt
|
stmt ::= exec_stmt
|
||||||
exec_stmt ::= expr exprlist DUP_TOP EXEC_STMT
|
exec_stmt ::= expr exprlist DUP_TOP EXEC_STMT
|
||||||
exec_stmt ::= expr exprlist EXEC_STMT
|
exec_stmt ::= expr exprlist EXEC_STMT
|
||||||
exprlist ::= expr+
|
exprlist ::= expr+
|
||||||
""", nop_func)
|
""",
|
||||||
|
nop_func,
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
elif opname == 'JUMP_IF_NOT_DEBUG':
|
elif opname == "JUMP_IF_NOT_DEBUG":
|
||||||
self.addRule("""
|
self.addRule(
|
||||||
|
"""
|
||||||
jmp_true_false ::= POP_JUMP_IF_TRUE
|
jmp_true_false ::= POP_JUMP_IF_TRUE
|
||||||
jmp_true_false ::= POP_JUMP_IF_FALSE
|
jmp_true_false ::= POP_JUMP_IF_FALSE
|
||||||
stmt ::= assert_pypy
|
stmt ::= assert_pypy
|
||||||
@@ -424,107 +492,152 @@ class Python2Parser(PythonParser):
|
|||||||
assert2_pypy ::= JUMP_IF_NOT_DEBUG assert_expr jmp_true_false
|
assert2_pypy ::= JUMP_IF_NOT_DEBUG assert_expr jmp_true_false
|
||||||
LOAD_ASSERT expr CALL_FUNCTION_1
|
LOAD_ASSERT expr CALL_FUNCTION_1
|
||||||
RAISE_VARARGS_1 COME_FROM
|
RAISE_VARARGS_1 COME_FROM
|
||||||
""", nop_func)
|
""",
|
||||||
|
nop_func,
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
elif opname == 'LOAD_ATTR':
|
elif opname == "LOAD_ATTR":
|
||||||
self.addRule("""
|
self.addRule(
|
||||||
|
"""
|
||||||
expr ::= attribute
|
expr ::= attribute
|
||||||
attribute ::= expr LOAD_ATTR
|
attribute ::= expr LOAD_ATTR
|
||||||
""", nop_func)
|
""",
|
||||||
|
nop_func,
|
||||||
|
)
|
||||||
custom_seen_ops.add(opname)
|
custom_seen_ops.add(opname)
|
||||||
continue
|
continue
|
||||||
elif opname == 'LOAD_LISTCOMP':
|
elif opname == "LOAD_LISTCOMP":
|
||||||
self.addRule("expr ::= listcomp", nop_func)
|
self.addRule("expr ::= listcomp", nop_func)
|
||||||
custom_seen_ops.add(opname)
|
custom_seen_ops.add(opname)
|
||||||
continue
|
continue
|
||||||
elif opname == 'LOAD_SETCOMP':
|
elif opname == "LOAD_SETCOMP":
|
||||||
self.add_unique_rules([
|
self.add_unique_rules(
|
||||||
|
[
|
||||||
"expr ::= set_comp",
|
"expr ::= set_comp",
|
||||||
"set_comp ::= LOAD_SETCOMP MAKE_FUNCTION_0 expr GET_ITER CALL_FUNCTION_1"
|
"set_comp ::= LOAD_SETCOMP MAKE_FUNCTION_0 expr GET_ITER CALL_FUNCTION_1",
|
||||||
], customize)
|
],
|
||||||
|
customize,
|
||||||
|
)
|
||||||
custom_seen_ops.add(opname)
|
custom_seen_ops.add(opname)
|
||||||
continue
|
continue
|
||||||
elif opname == 'LOOKUP_METHOD':
|
elif opname == "LOOKUP_METHOD":
|
||||||
# A PyPy speciality - DRY with parse3
|
# A PyPy speciality - DRY with parse3
|
||||||
self.addRule("""
|
self.addRule(
|
||||||
|
"""
|
||||||
expr ::= attribute
|
expr ::= attribute
|
||||||
attribute ::= expr LOOKUP_METHOD
|
attribute ::= expr LOOKUP_METHOD
|
||||||
""",
|
""",
|
||||||
nop_func)
|
nop_func,
|
||||||
|
)
|
||||||
custom_seen_ops.add(opname)
|
custom_seen_ops.add(opname)
|
||||||
continue
|
continue
|
||||||
elif opname_base == 'MAKE_FUNCTION':
|
elif opname_base == "MAKE_FUNCTION":
|
||||||
if i > 0 and tokens[i-1] == 'LOAD_LAMBDA':
|
if i > 0 and tokens[i - 1] == "LOAD_LAMBDA":
|
||||||
self.addRule('mklambda ::= %s LOAD_LAMBDA %s' %
|
self.addRule(
|
||||||
('pos_arg ' * token.attr, opname), nop_func)
|
"mklambda ::= %s LOAD_LAMBDA %s"
|
||||||
rule = 'mkfunc ::= %s LOAD_CODE %s' % ('expr ' * token.attr, opname)
|
% ("pos_arg " * token.attr, opname),
|
||||||
elif opname_base == 'MAKE_CLOSURE':
|
nop_func,
|
||||||
|
)
|
||||||
|
rule = "mkfunc ::= %s LOAD_CODE %s" % ("expr " * token.attr, opname)
|
||||||
|
elif opname_base == "MAKE_CLOSURE":
|
||||||
# FIXME: use add_unique_rules to tidy this up.
|
# FIXME: use add_unique_rules to tidy this up.
|
||||||
if i > 0 and tokens[i-1] == 'LOAD_LAMBDA':
|
if i > 0 and tokens[i - 1] == "LOAD_LAMBDA":
|
||||||
self.addRule('mklambda ::= %s load_closure LOAD_LAMBDA %s' %
|
self.addRule(
|
||||||
('expr ' * token.attr, opname), nop_func)
|
"mklambda ::= %s load_closure LOAD_LAMBDA %s"
|
||||||
|
% ("expr " * token.attr, opname),
|
||||||
|
nop_func,
|
||||||
|
)
|
||||||
if i > 0:
|
if i > 0:
|
||||||
prev_tok = tokens[i-1]
|
prev_tok = tokens[i - 1]
|
||||||
if prev_tok == 'LOAD_GENEXPR':
|
if prev_tok == "LOAD_GENEXPR":
|
||||||
self.add_unique_rules([
|
self.add_unique_rules(
|
||||||
('generator_exp ::= %s load_closure LOAD_GENEXPR %s expr'
|
[
|
||||||
' GET_ITER CALL_FUNCTION_1' %
|
(
|
||||||
('expr ' * token.attr, opname))], customize)
|
"generator_exp ::= %s load_closure LOAD_GENEXPR %s expr"
|
||||||
|
" GET_ITER CALL_FUNCTION_1"
|
||||||
|
% ("expr " * token.attr, opname)
|
||||||
|
)
|
||||||
|
],
|
||||||
|
customize,
|
||||||
|
)
|
||||||
pass
|
pass
|
||||||
self.add_unique_rules([
|
self.add_unique_rules(
|
||||||
('mkfunc ::= %s load_closure LOAD_CODE %s' %
|
[
|
||||||
('expr ' * token.attr, opname))], customize)
|
(
|
||||||
|
"mkfunc ::= %s load_closure LOAD_CODE %s"
|
||||||
|
% ("expr " * token.attr, opname)
|
||||||
|
)
|
||||||
|
],
|
||||||
|
customize,
|
||||||
|
)
|
||||||
|
|
||||||
if self.version >= 2.7:
|
if self.version >= 2.7:
|
||||||
if i > 0:
|
if i > 0:
|
||||||
prev_tok = tokens[i-1]
|
prev_tok = tokens[i - 1]
|
||||||
if prev_tok == 'LOAD_DICTCOMP':
|
if prev_tok == "LOAD_DICTCOMP":
|
||||||
self.add_unique_rules([
|
self.add_unique_rules(
|
||||||
('dict_comp ::= %s load_closure LOAD_DICTCOMP %s expr'
|
[
|
||||||
' GET_ITER CALL_FUNCTION_1' %
|
(
|
||||||
('expr ' * token.attr, opname))], customize)
|
"dict_comp ::= %s load_closure LOAD_DICTCOMP %s expr"
|
||||||
elif prev_tok == 'LOAD_SETCOMP':
|
" GET_ITER CALL_FUNCTION_1"
|
||||||
self.add_unique_rules([
|
% ("expr " * token.attr, opname)
|
||||||
|
)
|
||||||
|
],
|
||||||
|
customize,
|
||||||
|
)
|
||||||
|
elif prev_tok == "LOAD_SETCOMP":
|
||||||
|
self.add_unique_rules(
|
||||||
|
[
|
||||||
"expr ::= set_comp",
|
"expr ::= set_comp",
|
||||||
('set_comp ::= %s load_closure LOAD_SETCOMP %s expr'
|
(
|
||||||
' GET_ITER CALL_FUNCTION_1' %
|
"set_comp ::= %s load_closure LOAD_SETCOMP %s expr"
|
||||||
('expr ' * token.attr, opname))
|
" GET_ITER CALL_FUNCTION_1"
|
||||||
], customize)
|
% ("expr " * token.attr, opname)
|
||||||
|
),
|
||||||
|
],
|
||||||
|
customize,
|
||||||
|
)
|
||||||
pass
|
pass
|
||||||
pass
|
pass
|
||||||
continue
|
continue
|
||||||
elif opname == 'SETUP_EXCEPT':
|
elif opname == "SETUP_EXCEPT":
|
||||||
if 'PyPy' in customize:
|
if "PyPy" in customize:
|
||||||
self.add_unique_rules([
|
self.add_unique_rules(
|
||||||
|
[
|
||||||
"stmt ::= try_except_pypy",
|
"stmt ::= try_except_pypy",
|
||||||
"try_except_pypy ::= SETUP_EXCEPT suite_stmts_opt except_handler_pypy",
|
"try_except_pypy ::= SETUP_EXCEPT suite_stmts_opt except_handler_pypy",
|
||||||
"except_handler_pypy ::= COME_FROM except_stmts END_FINALLY COME_FROM"
|
"except_handler_pypy ::= COME_FROM except_stmts END_FINALLY COME_FROM",
|
||||||
], customize)
|
],
|
||||||
|
customize,
|
||||||
|
)
|
||||||
custom_seen_ops.add(opname)
|
custom_seen_ops.add(opname)
|
||||||
continue
|
continue
|
||||||
elif opname == 'SETUP_FINALLY':
|
elif opname == "SETUP_FINALLY":
|
||||||
if 'PyPy' in customize:
|
if "PyPy" in customize:
|
||||||
self.addRule("""
|
self.addRule(
|
||||||
|
"""
|
||||||
stmt ::= tryfinallystmt_pypy
|
stmt ::= tryfinallystmt_pypy
|
||||||
tryfinallystmt_pypy ::= SETUP_FINALLY suite_stmts_opt COME_FROM_FINALLY
|
tryfinallystmt_pypy ::= SETUP_FINALLY suite_stmts_opt COME_FROM_FINALLY
|
||||||
suite_stmts_opt END_FINALLY""", nop_func)
|
suite_stmts_opt END_FINALLY""",
|
||||||
|
nop_func,
|
||||||
|
)
|
||||||
|
|
||||||
custom_seen_ops.add(opname)
|
custom_seen_ops.add(opname)
|
||||||
continue
|
continue
|
||||||
elif opname_base in ('UNPACK_TUPLE', 'UNPACK_SEQUENCE'):
|
elif opname_base in ("UNPACK_TUPLE", "UNPACK_SEQUENCE"):
|
||||||
custom_seen_ops.add(opname)
|
custom_seen_ops.add(opname)
|
||||||
rule = 'unpack ::= ' + opname + ' store' * token.attr
|
rule = "unpack ::= " + opname + " store" * token.attr
|
||||||
elif opname_base == 'UNPACK_LIST':
|
elif opname_base == "UNPACK_LIST":
|
||||||
custom_seen_ops.add(opname)
|
custom_seen_ops.add(opname)
|
||||||
rule = 'unpack_list ::= ' + opname + ' store' * token.attr
|
rule = "unpack_list ::= " + opname + " store" * token.attr
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
self.addRule(rule, nop_func)
|
self.addRule(rule, nop_func)
|
||||||
pass
|
pass
|
||||||
|
|
||||||
self.check_reduce['raise_stmt1'] = 'tokens'
|
self.check_reduce["raise_stmt1"] = "tokens"
|
||||||
self.check_reduce['aug_assign2'] = 'AST'
|
self.check_reduce["aug_assign2"] = "AST"
|
||||||
self.check_reduce['or'] = 'AST'
|
self.check_reduce["or"] = "AST"
|
||||||
# self.check_reduce['_stmts'] = 'AST'
|
# self.check_reduce['_stmts'] = 'AST'
|
||||||
|
|
||||||
# Dead code testing...
|
# Dead code testing...
|
||||||
@@ -539,24 +652,30 @@ class Python2Parser(PythonParser):
|
|||||||
# Dead code testing...
|
# Dead code testing...
|
||||||
# if lhs == 'while1elsestmt':
|
# if lhs == 'while1elsestmt':
|
||||||
# from trepan.api import debug; debug()
|
# from trepan.api import debug; debug()
|
||||||
if lhs in ('aug_assign1', 'aug_assign2') and ast[0] and ast[0][0] in ('and', 'or'):
|
if (
|
||||||
|
lhs in ("aug_assign1", "aug_assign2")
|
||||||
|
and ast[0]
|
||||||
|
and ast[0][0] in ("and", "or")
|
||||||
|
):
|
||||||
return True
|
return True
|
||||||
elif lhs in ('raise_stmt1',):
|
elif lhs in ("raise_stmt1",):
|
||||||
# We will assume 'LOAD_ASSERT' will be handled by an assert grammar rule
|
# We will assume 'LOAD_ASSERT' will be handled by an assert grammar rule
|
||||||
return (tokens[first] == 'LOAD_ASSERT' and (last >= len(tokens)))
|
return tokens[first] == "LOAD_ASSERT" and (last >= len(tokens))
|
||||||
elif rule == ('or', ('expr', 'jmp_true', 'expr', '\\e_come_from_opt')):
|
elif rule == ("or", ("expr", "jmp_true", "expr", "\\e_come_from_opt")):
|
||||||
expr2 = ast[2]
|
expr2 = ast[2]
|
||||||
return expr2 == 'expr' and expr2[0] == 'LOAD_ASSERT'
|
return expr2 == "expr" and expr2[0] == "LOAD_ASSERT"
|
||||||
elif lhs in ('delete_subscript', 'del_expr'):
|
elif lhs in ("delete_subscript", "del_expr"):
|
||||||
op = ast[0][0]
|
op = ast[0][0]
|
||||||
return op.kind in ('and', 'or')
|
return op.kind in ("and", "or")
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
class Python2ParserSingle(Python2Parser, PythonParserSingle):
|
class Python2ParserSingle(Python2Parser, PythonParserSingle):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
|
if __name__ == "__main__":
|
||||||
# Check grammar
|
# Check grammar
|
||||||
p = Python2Parser()
|
p = Python2Parser()
|
||||||
p.check_grammar()
|
p.check_grammar()
|
||||||
|
@@ -432,7 +432,7 @@ class Python3Parser(PythonParser):
|
|||||||
else:
|
else:
|
||||||
return "%s_0" % (token.kind)
|
return "%s_0" % (token.kind)
|
||||||
|
|
||||||
def custom_build_class_rule(self, opname, i, token, tokens, customize):
|
def custom_build_class_rule(self, opname, i, token, tokens, customize, is_pypy):
|
||||||
"""
|
"""
|
||||||
# Should the first rule be somehow folded into the 2nd one?
|
# Should the first rule be somehow folded into the 2nd one?
|
||||||
build_class ::= LOAD_BUILD_CLASS mkfunc
|
build_class ::= LOAD_BUILD_CLASS mkfunc
|
||||||
@@ -485,10 +485,18 @@ class Python3Parser(PythonParser):
|
|||||||
call_function = call_fn_tok.kind
|
call_function = call_fn_tok.kind
|
||||||
if call_function.startswith("CALL_FUNCTION_KW"):
|
if call_function.startswith("CALL_FUNCTION_KW"):
|
||||||
self.addRule("classdef ::= build_class_kw store", nop_func)
|
self.addRule("classdef ::= build_class_kw store", nop_func)
|
||||||
rule = "build_class_kw ::= LOAD_BUILD_CLASS mkfunc %sLOAD_CONST %s" % (
|
if is_pypy:
|
||||||
"expr " * (call_fn_tok.attr - 1),
|
args_pos, args_kw = self.get_pos_kw(call_fn_tok)
|
||||||
|
rule = "build_class_kw ::= LOAD_BUILD_CLASS mkfunc %s%s%s" % (
|
||||||
|
"expr " * (args_pos - 1),
|
||||||
|
"kwarg " * (args_kw),
|
||||||
call_function,
|
call_function,
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
rule = (
|
||||||
|
"build_class_kw ::= LOAD_BUILD_CLASS mkfunc %sLOAD_CONST %s"
|
||||||
|
% ("expr " * (call_fn_tok.attr - 1), call_function)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
call_function = self.call_fn_name(call_fn_tok)
|
call_function = self.call_fn_name(call_fn_tok)
|
||||||
rule = "build_class ::= LOAD_BUILD_CLASS mkfunc %s%s" % (
|
rule = "build_class ::= LOAD_BUILD_CLASS mkfunc %s%s" % (
|
||||||
@@ -498,7 +506,7 @@ class Python3Parser(PythonParser):
|
|||||||
self.addRule(rule, nop_func)
|
self.addRule(rule, nop_func)
|
||||||
return
|
return
|
||||||
|
|
||||||
def custom_classfunc_rule(self, opname, token, customize, next_token):
|
def custom_classfunc_rule(self, opname, token, customize, next_token, is_pypy):
|
||||||
"""
|
"""
|
||||||
call ::= expr {expr}^n CALL_FUNCTION_n
|
call ::= expr {expr}^n CALL_FUNCTION_n
|
||||||
call ::= expr {expr}^n CALL_FUNCTION_VAR_n
|
call ::= expr {expr}^n CALL_FUNCTION_VAR_n
|
||||||
@@ -516,11 +524,21 @@ class Python3Parser(PythonParser):
|
|||||||
# Yes, this computation based on instruction name is a little bit hoaky.
|
# Yes, this computation based on instruction name is a little bit hoaky.
|
||||||
nak = (len(opname) - len("CALL_FUNCTION")) // 3
|
nak = (len(opname) - len("CALL_FUNCTION")) // 3
|
||||||
|
|
||||||
token.kind = self.call_fn_name(token)
|
|
||||||
uniq_param = args_kw + args_pos
|
uniq_param = args_kw + args_pos
|
||||||
|
|
||||||
# Note: 3.5+ have subclassed this method; so we don't handle
|
# Note: 3.5+ have subclassed this method; so we don't handle
|
||||||
# 'CALL_FUNCTION_VAR' or 'CALL_FUNCTION_EX' here.
|
# 'CALL_FUNCTION_VAR' or 'CALL_FUNCTION_EX' here.
|
||||||
|
if is_pypy and self.version >= 3.6:
|
||||||
|
if token == "CALL_FUNCTION":
|
||||||
|
token.kind = self.call_fn_name(token)
|
||||||
|
rule = (
|
||||||
|
"call ::= expr "
|
||||||
|
+ ("pos_arg " * args_pos)
|
||||||
|
+ ("kwarg " * args_kw)
|
||||||
|
+ token.kind
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
token.kind = self.call_fn_name(token)
|
||||||
rule = (
|
rule = (
|
||||||
"call ::= expr "
|
"call ::= expr "
|
||||||
+ ("pos_arg " * args_pos)
|
+ ("pos_arg " * args_pos)
|
||||||
@@ -545,7 +563,12 @@ class Python3Parser(PythonParser):
|
|||||||
this has an effect on many rules.
|
this has an effect on many rules.
|
||||||
"""
|
"""
|
||||||
if self.version >= 3.3:
|
if self.version >= 3.3:
|
||||||
new_rule = rule % (("LOAD_STR ") * 1)
|
if PYTHON3 or not self.is_pypy:
|
||||||
|
load_op = "LOAD_STR "
|
||||||
|
else:
|
||||||
|
load_op = "LOAD_CONST "
|
||||||
|
|
||||||
|
new_rule = rule % ((load_op) * 1)
|
||||||
else:
|
else:
|
||||||
new_rule = rule % (("LOAD_STR ") * 0)
|
new_rule = rule % (("LOAD_STR ") * 0)
|
||||||
self.add_unique_rule(new_rule, opname, attr, customize)
|
self.add_unique_rule(new_rule, opname, attr, customize)
|
||||||
@@ -573,7 +596,7 @@ class Python3Parser(PythonParser):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
is_pypy = False
|
self.is_pypy = False
|
||||||
|
|
||||||
# For a rough break out on the first word. This may
|
# For a rough break out on the first word. This may
|
||||||
# include instructions that don't need customization,
|
# include instructions that don't need customization,
|
||||||
@@ -618,7 +641,7 @@ class Python3Parser(PythonParser):
|
|||||||
# a specific instruction seen.
|
# a specific instruction seen.
|
||||||
|
|
||||||
if "PyPy" in customize:
|
if "PyPy" in customize:
|
||||||
is_pypy = True
|
self.is_pypy = True
|
||||||
self.addRule(
|
self.addRule(
|
||||||
"""
|
"""
|
||||||
stmt ::= assign3_pypy
|
stmt ::= assign3_pypy
|
||||||
@@ -823,7 +846,9 @@ class Python3Parser(PythonParser):
|
|||||||
"""
|
"""
|
||||||
self.addRule(rule, nop_func)
|
self.addRule(rule, nop_func)
|
||||||
|
|
||||||
self.custom_classfunc_rule(opname, token, customize, tokens[i + 1])
|
self.custom_classfunc_rule(
|
||||||
|
opname, token, customize, tokens[i + 1], self.is_pypy
|
||||||
|
)
|
||||||
# Note: don't add to custom_ops_processed.
|
# Note: don't add to custom_ops_processed.
|
||||||
|
|
||||||
elif opname_base == "CALL_METHOD":
|
elif opname_base == "CALL_METHOD":
|
||||||
@@ -882,21 +907,30 @@ class Python3Parser(PythonParser):
|
|||||||
self.addRule(
|
self.addRule(
|
||||||
"""
|
"""
|
||||||
stmt ::= assert_pypy
|
stmt ::= assert_pypy
|
||||||
stmt ::= assert2_pypy", nop_func)
|
stmt ::= assert_not_pypy
|
||||||
|
stmt ::= assert2_pypy
|
||||||
|
stmt ::= assert2_not_pypy
|
||||||
assert_pypy ::= JUMP_IF_NOT_DEBUG assert_expr jmp_true
|
assert_pypy ::= JUMP_IF_NOT_DEBUG assert_expr jmp_true
|
||||||
LOAD_ASSERT RAISE_VARARGS_1 COME_FROM
|
LOAD_ASSERT RAISE_VARARGS_1 COME_FROM
|
||||||
|
assert_not_pypy ::= JUMP_IF_NOT_DEBUG assert_expr jmp_false
|
||||||
|
LOAD_ASSERT RAISE_VARARGS_1 COME_FROM
|
||||||
assert2_pypy ::= JUMP_IF_NOT_DEBUG assert_expr jmp_true
|
assert2_pypy ::= JUMP_IF_NOT_DEBUG assert_expr jmp_true
|
||||||
LOAD_ASSERT expr CALL_FUNCTION_1
|
LOAD_ASSERT expr CALL_FUNCTION_1
|
||||||
RAISE_VARARGS_1 COME_FROM
|
RAISE_VARARGS_1 COME_FROM
|
||||||
assert2_pypy ::= JUMP_IF_NOT_DEBUG assert_expr jmp_true
|
assert2_pypy ::= JUMP_IF_NOT_DEBUG assert_expr jmp_true
|
||||||
LOAD_ASSERT expr CALL_FUNCTION_1
|
LOAD_ASSERT expr CALL_FUNCTION_1
|
||||||
RAISE_VARARGS_1 COME_FROM,
|
RAISE_VARARGS_1 COME_FROM
|
||||||
|
assert2_not_pypy ::= JUMP_IF_NOT_DEBUG assert_expr jmp_false
|
||||||
|
LOAD_ASSERT expr CALL_FUNCTION_1
|
||||||
|
RAISE_VARARGS_1 COME_FROM
|
||||||
""",
|
""",
|
||||||
nop_func,
|
nop_func,
|
||||||
)
|
)
|
||||||
custom_ops_processed.add(opname)
|
custom_ops_processed.add(opname)
|
||||||
elif opname == "LOAD_BUILD_CLASS":
|
elif opname == "LOAD_BUILD_CLASS":
|
||||||
self.custom_build_class_rule(opname, i, token, tokens, customize)
|
self.custom_build_class_rule(
|
||||||
|
opname, i, token, tokens, customize, self.is_pypy
|
||||||
|
)
|
||||||
# Note: don't add to custom_ops_processed.
|
# Note: don't add to custom_ops_processed.
|
||||||
elif opname == "LOAD_CLASSDEREF":
|
elif opname == "LOAD_CLASSDEREF":
|
||||||
# Python 3.4+
|
# Python 3.4+
|
||||||
@@ -969,7 +1003,7 @@ class Python3Parser(PythonParser):
|
|||||||
j = 1
|
j = 1
|
||||||
else:
|
else:
|
||||||
j = 2
|
j = 2
|
||||||
if is_pypy or (i >= j and tokens[i - j] == "LOAD_LAMBDA"):
|
if self.is_pypy or (i >= j and tokens[i - j] == "LOAD_LAMBDA"):
|
||||||
rule_pat = "mklambda ::= %sload_closure LOAD_LAMBDA %%s%s" % (
|
rule_pat = "mklambda ::= %sload_closure LOAD_LAMBDA %%s%s" % (
|
||||||
"pos_arg " * args_pos,
|
"pos_arg " * args_pos,
|
||||||
opname,
|
opname,
|
||||||
@@ -984,7 +1018,7 @@ class Python3Parser(PythonParser):
|
|||||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||||
|
|
||||||
if has_get_iter_call_function1:
|
if has_get_iter_call_function1:
|
||||||
if is_pypy or (i >= j and tokens[i - j] == "LOAD_LISTCOMP"):
|
if self.is_pypy or (i >= j and tokens[i - j] == "LOAD_LISTCOMP"):
|
||||||
# In the tokens we saw:
|
# In the tokens we saw:
|
||||||
# LOAD_LISTCOMP LOAD_CONST MAKE_FUNCTION (>= 3.3) or
|
# LOAD_LISTCOMP LOAD_CONST MAKE_FUNCTION (>= 3.3) or
|
||||||
# LOAD_LISTCOMP MAKE_FUNCTION (< 3.3) or
|
# LOAD_LISTCOMP MAKE_FUNCTION (< 3.3) or
|
||||||
@@ -998,7 +1032,7 @@ class Python3Parser(PythonParser):
|
|||||||
self.add_make_function_rule(
|
self.add_make_function_rule(
|
||||||
rule_pat, opname, token.attr, customize
|
rule_pat, opname, token.attr, customize
|
||||||
)
|
)
|
||||||
if is_pypy or (i >= j and tokens[i - j] == "LOAD_SETCOMP"):
|
if self.is_pypy or (i >= j and tokens[i - j] == "LOAD_SETCOMP"):
|
||||||
rule_pat = (
|
rule_pat = (
|
||||||
"set_comp ::= %sload_closure LOAD_SETCOMP %%s%s expr "
|
"set_comp ::= %sload_closure LOAD_SETCOMP %%s%s expr "
|
||||||
"GET_ITER CALL_FUNCTION_1"
|
"GET_ITER CALL_FUNCTION_1"
|
||||||
@@ -1007,7 +1041,7 @@ class Python3Parser(PythonParser):
|
|||||||
self.add_make_function_rule(
|
self.add_make_function_rule(
|
||||||
rule_pat, opname, token.attr, customize
|
rule_pat, opname, token.attr, customize
|
||||||
)
|
)
|
||||||
if is_pypy or (i >= j and tokens[i - j] == "LOAD_DICTCOMP"):
|
if self.is_pypy or (i >= j and tokens[i - j] == "LOAD_DICTCOMP"):
|
||||||
self.add_unique_rule(
|
self.add_unique_rule(
|
||||||
"dict_comp ::= %sload_closure LOAD_DICTCOMP %s "
|
"dict_comp ::= %sload_closure LOAD_DICTCOMP %s "
|
||||||
"expr GET_ITER CALL_FUNCTION_1"
|
"expr GET_ITER CALL_FUNCTION_1"
|
||||||
@@ -1053,17 +1087,24 @@ class Python3Parser(PythonParser):
|
|||||||
)
|
)
|
||||||
|
|
||||||
elif self.version >= 3.4:
|
elif self.version >= 3.4:
|
||||||
|
if PYTHON3 or not self.is_pypy:
|
||||||
|
load_op = "LOAD_STR"
|
||||||
|
else:
|
||||||
|
load_op = "LOAD_CONST"
|
||||||
|
|
||||||
if annotate_args > 0:
|
if annotate_args > 0:
|
||||||
rule = "mkfunc_annotate ::= %s%s%sannotate_tuple load_closure LOAD_CODE LOAD_STR %s" % (
|
rule = "mkfunc_annotate ::= %s%s%sannotate_tuple load_closure %s %s %s" % (
|
||||||
"pos_arg " * args_pos,
|
"pos_arg " * args_pos,
|
||||||
kwargs_str,
|
kwargs_str,
|
||||||
"annotate_arg " * (annotate_args - 1),
|
"annotate_arg " * (annotate_args - 1),
|
||||||
|
load_op,
|
||||||
opname,
|
opname,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
rule = "mkfunc ::= %s%s load_closure LOAD_CODE LOAD_STR %s" % (
|
rule = "mkfunc ::= %s%s load_closure LOAD_CODE %s %s" % (
|
||||||
"pos_arg " * args_pos,
|
"pos_arg " * args_pos,
|
||||||
kwargs_str,
|
kwargs_str,
|
||||||
|
load_op,
|
||||||
opname,
|
opname,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1121,6 +1162,14 @@ class Python3Parser(PythonParser):
|
|||||||
opname,
|
opname,
|
||||||
)
|
)
|
||||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||||
|
if not PYTHON3 and self.is_pypy:
|
||||||
|
rule = "mkfunc ::= %s%s%s%s" % (
|
||||||
|
"expr " * stack_count,
|
||||||
|
"load_closure " * closure,
|
||||||
|
"LOAD_CODE LOAD_CONST ",
|
||||||
|
opname,
|
||||||
|
)
|
||||||
|
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||||
|
|
||||||
if has_get_iter_call_function1:
|
if has_get_iter_call_function1:
|
||||||
rule_pat = (
|
rule_pat = (
|
||||||
@@ -1137,7 +1186,7 @@ class Python3Parser(PythonParser):
|
|||||||
self.add_make_function_rule(
|
self.add_make_function_rule(
|
||||||
rule_pat, opname, token.attr, customize
|
rule_pat, opname, token.attr, customize
|
||||||
)
|
)
|
||||||
if is_pypy or (i >= 2 and tokens[i - 2] == "LOAD_LISTCOMP"):
|
if self.is_pypy or (i >= 2 and tokens[i - 2] == "LOAD_LISTCOMP"):
|
||||||
if self.version >= 3.6:
|
if self.version >= 3.6:
|
||||||
# 3.6+ sometimes bundles all of the
|
# 3.6+ sometimes bundles all of the
|
||||||
# 'exprs' in the rule above into a
|
# 'exprs' in the rule above into a
|
||||||
@@ -1158,7 +1207,7 @@ class Python3Parser(PythonParser):
|
|||||||
rule_pat, opname, token.attr, customize
|
rule_pat, opname, token.attr, customize
|
||||||
)
|
)
|
||||||
|
|
||||||
if is_pypy or (i >= 2 and tokens[i - 2] == "LOAD_LAMBDA"):
|
if self.is_pypy or (i >= 2 and tokens[i - 2] == "LOAD_LAMBDA"):
|
||||||
rule_pat = "mklambda ::= %s%sLOAD_LAMBDA %%s%s" % (
|
rule_pat = "mklambda ::= %s%sLOAD_LAMBDA %%s%s" % (
|
||||||
("pos_arg " * args_pos),
|
("pos_arg " * args_pos),
|
||||||
("kwarg " * args_kw),
|
("kwarg " * args_kw),
|
||||||
@@ -1186,7 +1235,7 @@ class Python3Parser(PythonParser):
|
|||||||
)
|
)
|
||||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||||
|
|
||||||
if is_pypy or (i >= j and tokens[i - j] == "LOAD_LISTCOMP"):
|
if self.is_pypy or (i >= j and tokens[i - j] == "LOAD_LISTCOMP"):
|
||||||
# In the tokens we saw:
|
# In the tokens we saw:
|
||||||
# LOAD_LISTCOMP LOAD_CONST MAKE_FUNCTION (>= 3.3) or
|
# LOAD_LISTCOMP LOAD_CONST MAKE_FUNCTION (>= 3.3) or
|
||||||
# LOAD_LISTCOMP MAKE_FUNCTION (< 3.3) or
|
# LOAD_LISTCOMP MAKE_FUNCTION (< 3.3) or
|
||||||
@@ -1201,7 +1250,7 @@ class Python3Parser(PythonParser):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# FIXME: Fold test into add_make_function_rule
|
# FIXME: Fold test into add_make_function_rule
|
||||||
if is_pypy or (i >= j and tokens[i - j] == "LOAD_LAMBDA"):
|
if self.is_pypy or (i >= j and tokens[i - j] == "LOAD_LAMBDA"):
|
||||||
rule_pat = "mklambda ::= %s%sLOAD_LAMBDA %%s%s" % (
|
rule_pat = "mklambda ::= %s%sLOAD_LAMBDA %%s%s" % (
|
||||||
("pos_arg " * args_pos),
|
("pos_arg " * args_pos),
|
||||||
("kwarg " * args_kw),
|
("kwarg " * args_kw),
|
||||||
|
@@ -143,9 +143,15 @@ class Python35Parser(Python34Parser):
|
|||||||
super(Python35Parser, self).customize_grammar_rules(tokens, customize)
|
super(Python35Parser, self).customize_grammar_rules(tokens, customize)
|
||||||
for i, token in enumerate(tokens):
|
for i, token in enumerate(tokens):
|
||||||
opname = token.kind
|
opname = token.kind
|
||||||
|
if opname == 'LOAD_ASSERT':
|
||||||
|
if 'PyPy' in customize:
|
||||||
|
rules_str = """
|
||||||
|
stmt ::= JUMP_IF_NOT_DEBUG stmts COME_FROM
|
||||||
|
"""
|
||||||
|
self.add_unique_doc_rules(rules_str, customize)
|
||||||
# FIXME: I suspect this is wrong for 3.6 and 3.5, but
|
# FIXME: I suspect this is wrong for 3.6 and 3.5, but
|
||||||
# I haven't verified what the 3.7ish fix is
|
# I haven't verified what the 3.7ish fix is
|
||||||
if opname == 'BUILD_MAP_UNPACK_WITH_CALL':
|
elif opname == 'BUILD_MAP_UNPACK_WITH_CALL':
|
||||||
if self.version < 3.7:
|
if self.version < 3.7:
|
||||||
self.addRule("expr ::= unmapexpr", nop_func)
|
self.addRule("expr ::= unmapexpr", nop_func)
|
||||||
nargs = token.attr % 256
|
nargs = token.attr % 256
|
||||||
|
@@ -187,13 +187,7 @@ class Python36Parser(Python35Parser):
|
|||||||
for i, token in enumerate(tokens):
|
for i, token in enumerate(tokens):
|
||||||
opname = token.kind
|
opname = token.kind
|
||||||
|
|
||||||
if opname == 'LOAD_ASSERT':
|
if opname == 'FORMAT_VALUE':
|
||||||
if 'PyPy' in customize:
|
|
||||||
rules_str = """
|
|
||||||
stmt ::= JUMP_IF_NOT_DEBUG stmts COME_FROM
|
|
||||||
"""
|
|
||||||
self.add_unique_doc_rules(rules_str, customize)
|
|
||||||
elif opname == 'FORMAT_VALUE':
|
|
||||||
rules_str = """
|
rules_str = """
|
||||||
expr ::= formatted_value1
|
expr ::= formatted_value1
|
||||||
formatted_value1 ::= expr FORMAT_VALUE
|
formatted_value1 ::= expr FORMAT_VALUE
|
||||||
@@ -315,7 +309,7 @@ class Python36Parser(Python35Parser):
|
|||||||
pass
|
pass
|
||||||
return
|
return
|
||||||
|
|
||||||
def custom_classfunc_rule(self, opname, token, customize, next_token):
|
def custom_classfunc_rule(self, opname, token, customize, next_token, is_pypy):
|
||||||
|
|
||||||
args_pos, args_kw = self.get_pos_kw(token)
|
args_pos, args_kw = self.get_pos_kw(token)
|
||||||
|
|
||||||
@@ -337,9 +331,13 @@ class Python36Parser(Python35Parser):
|
|||||||
self.add_unique_rule('expr ::= async_call', token.kind, uniq_param, customize)
|
self.add_unique_rule('expr ::= async_call', token.kind, uniq_param, customize)
|
||||||
|
|
||||||
if opname.startswith('CALL_FUNCTION_KW'):
|
if opname.startswith('CALL_FUNCTION_KW'):
|
||||||
|
if is_pypy:
|
||||||
|
# PYPY doesn't follow CPython 3.6 CALL_FUNCTION_KW conventions
|
||||||
|
super(Python36Parser, self).custom_classfunc_rule(opname, token, customize, next_token, is_pypy)
|
||||||
|
else:
|
||||||
self.addRule("expr ::= call_kw36", nop_func)
|
self.addRule("expr ::= call_kw36", nop_func)
|
||||||
values = 'expr ' * token.attr
|
values = 'expr ' * token.attr
|
||||||
rule = "call_kw36 ::= expr %s LOAD_CONST %s" % (values, opname)
|
rule = "call_kw36 ::= expr {values} LOAD_CONST {opname}".format(**locals())
|
||||||
self.add_unique_rule(rule, token.kind, token.attr, customize)
|
self.add_unique_rule(rule, token.kind, token.attr, customize)
|
||||||
elif opname == 'CALL_FUNCTION_EX_KW':
|
elif opname == 'CALL_FUNCTION_EX_KW':
|
||||||
# Note: this doesn't exist in 3.7 and later
|
# Note: this doesn't exist in 3.7 and later
|
||||||
@@ -405,7 +403,7 @@ class Python36Parser(Python35Parser):
|
|||||||
""", nop_func)
|
""", nop_func)
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
super(Python36Parser, self).custom_classfunc_rule(opname, token, customize, next_token)
|
super(Python36Parser, self).custom_classfunc_rule(opname, token, customize, next_token, is_pypy)
|
||||||
|
|
||||||
def reduce_is_invalid(self, rule, ast, tokens, first, last):
|
def reduce_is_invalid(self, rule, ast, tokens, first, last):
|
||||||
invalid = super(Python36Parser,
|
invalid = super(Python36Parser,
|
||||||
|
@@ -92,6 +92,7 @@ class Python38Parser(Python37Parser):
|
|||||||
for38 ::= expr get_iter store for_block JUMP_BACK
|
for38 ::= expr get_iter store for_block JUMP_BACK
|
||||||
for38 ::= expr for_iter store for_block JUMP_BACK
|
for38 ::= expr for_iter store for_block JUMP_BACK
|
||||||
for38 ::= expr for_iter store for_block JUMP_BACK POP_BLOCK
|
for38 ::= expr for_iter store for_block JUMP_BACK POP_BLOCK
|
||||||
|
for38 ::= expr for_iter store for_block
|
||||||
|
|
||||||
forelsestmt38 ::= expr for_iter store for_block POP_BLOCK else_suite
|
forelsestmt38 ::= expr for_iter store for_block POP_BLOCK else_suite
|
||||||
forelselaststmt38 ::= expr for_iter store for_block POP_BLOCK else_suitec
|
forelselaststmt38 ::= expr for_iter store for_block POP_BLOCK else_suitec
|
||||||
|
@@ -27,8 +27,7 @@ import sys
|
|||||||
from uncompyle6 import PYTHON3, IS_PYPY, PYTHON_VERSION
|
from uncompyle6 import PYTHON3, IS_PYPY, PYTHON_VERSION
|
||||||
from uncompyle6.scanners.tok import Token
|
from uncompyle6.scanners.tok import Token
|
||||||
import xdis
|
import xdis
|
||||||
from xdis.bytecode import (
|
from xdis.bytecode import Bytecode, instruction_size, extended_arg_val, next_offset
|
||||||
Bytecode, instruction_size, extended_arg_val, next_offset)
|
|
||||||
from xdis.magics import canonic_python_version
|
from xdis.magics import canonic_python_version
|
||||||
from xdis.util import code2num
|
from xdis.util import code2num
|
||||||
|
|
||||||
@@ -39,15 +38,38 @@ else:
|
|||||||
|
|
||||||
# The byte code versions we support.
|
# The byte code versions we support.
|
||||||
# Note: these all have to be floats
|
# Note: these all have to be floats
|
||||||
PYTHON_VERSIONS = frozenset((1.0, 1.1, 1.3, 1.4, 1.5, 1.6,
|
PYTHON_VERSIONS = frozenset(
|
||||||
2.1, 2.2, 2.3, 2.4, 2.5, 2.6, 2.7,
|
(
|
||||||
3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6, 3.7, 3.8))
|
1.0,
|
||||||
|
1.1,
|
||||||
|
1.3,
|
||||||
|
1.4,
|
||||||
|
1.5,
|
||||||
|
1.6,
|
||||||
|
2.1,
|
||||||
|
2.2,
|
||||||
|
2.3,
|
||||||
|
2.4,
|
||||||
|
2.5,
|
||||||
|
2.6,
|
||||||
|
2.7,
|
||||||
|
3.0,
|
||||||
|
3.1,
|
||||||
|
3.2,
|
||||||
|
3.3,
|
||||||
|
3.4,
|
||||||
|
3.5,
|
||||||
|
3.6,
|
||||||
|
3.7,
|
||||||
|
3.8,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
CANONIC2VERSION = dict((canonic_python_version[str(v)], v) for v in PYTHON_VERSIONS)
|
CANONIC2VERSION = dict((canonic_python_version[str(v)], v) for v in PYTHON_VERSIONS)
|
||||||
|
|
||||||
# Magic changed mid version for Python 3.5.2. Compatibility was added for
|
# Magic changed mid version for Python 3.5.2. Compatibility was added for
|
||||||
# the older 3.5 interpreter magic.
|
# the older 3.5 interpreter magic.
|
||||||
CANONIC2VERSION['3.5.2'] = 3.5
|
CANONIC2VERSION["3.5.2"] = 3.5
|
||||||
|
|
||||||
|
|
||||||
# FIXME: DRY
|
# FIXME: DRY
|
||||||
@@ -57,24 +79,28 @@ if PYTHON3:
|
|||||||
|
|
||||||
def long(l):
|
def long(l):
|
||||||
return l
|
return l
|
||||||
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
L65536 = long(65536) # NOQA
|
L65536 = long(65536) # NOQA
|
||||||
|
|
||||||
|
|
||||||
class Code(object):
|
class Code(object):
|
||||||
'''
|
"""
|
||||||
Class for representing code-objects.
|
Class for representing code-objects.
|
||||||
|
|
||||||
This is similar to the original code object, but additionally
|
This is similar to the original code object, but additionally
|
||||||
the diassembled code is stored in the attribute '_tokens'.
|
the diassembled code is stored in the attribute '_tokens'.
|
||||||
'''
|
"""
|
||||||
|
|
||||||
def __init__(self, co, scanner, classname=None):
|
def __init__(self, co, scanner, classname=None):
|
||||||
for i in dir(co):
|
for i in dir(co):
|
||||||
if i.startswith('co_'):
|
if i.startswith("co_"):
|
||||||
setattr(self, i, getattr(co, i))
|
setattr(self, i, getattr(co, i))
|
||||||
self._tokens, self._customize = scanner.ingest(co, classname)
|
self._tokens, self._customize = scanner.ingest(co, classname)
|
||||||
|
|
||||||
class Scanner(object):
|
|
||||||
|
|
||||||
|
class Scanner(object):
|
||||||
def __init__(self, version, show_asm=None, is_pypy=False):
|
def __init__(self, version, show_asm=None, is_pypy=False):
|
||||||
self.version = version
|
self.version = version
|
||||||
self.show_asm = show_asm
|
self.show_asm = show_asm
|
||||||
@@ -102,7 +128,7 @@ class Scanner(object):
|
|||||||
"""
|
"""
|
||||||
# FIXME: remove this when all subsidiary functions have been removed.
|
# FIXME: remove this when all subsidiary functions have been removed.
|
||||||
# We should be able to get everything from the self.insts list.
|
# We should be able to get everything from the self.insts list.
|
||||||
self.code = array('B', co.co_code)
|
self.code = array("B", co.co_code)
|
||||||
|
|
||||||
bytecode = Bytecode(co, self.opc)
|
bytecode = Bytecode(co, self.opc)
|
||||||
self.build_prev_op()
|
self.build_prev_op()
|
||||||
@@ -130,7 +156,7 @@ class Scanner(object):
|
|||||||
# 'List-map' which shows line number of current op and offset of
|
# 'List-map' which shows line number of current op and offset of
|
||||||
# first op on following line, given offset of op as index
|
# first op on following line, given offset of op as index
|
||||||
lines = []
|
lines = []
|
||||||
LineTuple = namedtuple('LineTuple', ['l_no', 'next'])
|
LineTuple = namedtuple("LineTuple", ["l_no", "next"])
|
||||||
|
|
||||||
# Iterate through available linestarts, and fill
|
# Iterate through available linestarts, and fill
|
||||||
# the data for all code offsets encountered until
|
# the data for all code offsets encountered until
|
||||||
@@ -173,14 +199,14 @@ class Scanner(object):
|
|||||||
goes forward.
|
goes forward.
|
||||||
"""
|
"""
|
||||||
opname = self.get_inst(offset).opname
|
opname = self.get_inst(offset).opname
|
||||||
if opname == 'JUMP_FORWARD':
|
if opname == "JUMP_FORWARD":
|
||||||
return True
|
return True
|
||||||
if opname != 'JUMP_ABSOLUTE':
|
if opname != "JUMP_ABSOLUTE":
|
||||||
return False
|
return False
|
||||||
return offset < self.get_target(offset)
|
return offset < self.get_target(offset)
|
||||||
|
|
||||||
def prev_offset(self, offset):
|
def prev_offset(self, offset):
|
||||||
return self.insts[self.offset2inst_index[offset]-1].offset
|
return self.insts[self.offset2inst_index[offset] - 1].offset
|
||||||
|
|
||||||
def get_inst(self, offset):
|
def get_inst(self, offset):
|
||||||
# Instructions can get moved as a result of EXTENDED_ARGS removal.
|
# Instructions can get moved as a result of EXTENDED_ARGS removal.
|
||||||
@@ -207,7 +233,7 @@ class Scanner(object):
|
|||||||
return target
|
return target
|
||||||
|
|
||||||
def get_argument(self, pos):
|
def get_argument(self, pos):
|
||||||
arg = self.code[pos+1] + self.code[pos+2] * 256
|
arg = self.code[pos + 1] + self.code[pos + 2] * 256
|
||||||
return arg
|
return arg
|
||||||
|
|
||||||
def next_offset(self, op, offset):
|
def next_offset(self, op, offset):
|
||||||
@@ -218,9 +244,9 @@ class Scanner(object):
|
|||||||
op = self.code[i]
|
op = self.code[i]
|
||||||
if op in self.JUMP_OPS:
|
if op in self.JUMP_OPS:
|
||||||
dest = self.get_target(i, op)
|
dest = self.get_target(i, op)
|
||||||
print('%i\t%s\t%i' % (i, self.opname[op], dest))
|
print("%i\t%s\t%i" % (i, self.opname[op], dest))
|
||||||
else:
|
else:
|
||||||
print('%i\t%s\t' % (i, self.opname[op]))
|
print("%i\t%s\t" % (i, self.opname[op]))
|
||||||
|
|
||||||
def first_instr(self, start, end, instr, target=None, exact=True):
|
def first_instr(self, start, end, instr, target=None, exact=True):
|
||||||
"""
|
"""
|
||||||
@@ -234,11 +260,9 @@ class Scanner(object):
|
|||||||
Return index to it or None if not found.
|
Return index to it or None if not found.
|
||||||
"""
|
"""
|
||||||
code = self.code
|
code = self.code
|
||||||
assert(start >= 0 and end <= len(code))
|
assert start >= 0 and end <= len(code)
|
||||||
|
|
||||||
try:
|
if not isinstance(instr, list):
|
||||||
None in instr
|
|
||||||
except:
|
|
||||||
instr = [instr]
|
instr = [instr]
|
||||||
|
|
||||||
result_offset = None
|
result_offset = None
|
||||||
@@ -276,9 +300,7 @@ class Scanner(object):
|
|||||||
if not (start >= 0 and end <= len(code)):
|
if not (start >= 0 and end <= len(code)):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
if not isinstance(instr, list):
|
||||||
None in instr
|
|
||||||
except:
|
|
||||||
instr = [instr]
|
instr = [instr]
|
||||||
|
|
||||||
result_offset = None
|
result_offset = None
|
||||||
@@ -289,7 +311,7 @@ class Scanner(object):
|
|||||||
op = code[offset]
|
op = code[offset]
|
||||||
|
|
||||||
if op == self.opc.EXTENDED_ARG:
|
if op == self.opc.EXTENDED_ARG:
|
||||||
arg = code2num(code, offset+1) | extended_arg
|
arg = code2num(code, offset + 1) | extended_arg
|
||||||
extended_arg = extended_arg_val(self.opc, arg)
|
extended_arg = extended_arg_val(self.opc, arg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -367,7 +389,7 @@ class Scanner(object):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
code = self.code
|
code = self.code
|
||||||
assert(start >= 0 and end <= len(code))
|
assert start >= 0 and end <= len(code)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
None in instr
|
None in instr
|
||||||
@@ -381,7 +403,7 @@ class Scanner(object):
|
|||||||
op = code[offset]
|
op = code[offset]
|
||||||
|
|
||||||
if op == self.opc.EXTENDED_ARG:
|
if op == self.opc.EXTENDED_ARG:
|
||||||
arg = code2num(code, offset+1) | extended_arg
|
arg = code2num(code, offset + 1) | extended_arg
|
||||||
extended_arg = extended_arg_val(self.opc, arg)
|
extended_arg = extended_arg_val(self.opc, arg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -425,8 +447,11 @@ class Scanner(object):
|
|||||||
last_was_extarg = False
|
last_was_extarg = False
|
||||||
n = len(instructions)
|
n = len(instructions)
|
||||||
for i, inst in enumerate(instructions):
|
for i, inst in enumerate(instructions):
|
||||||
if (inst.opname == 'EXTENDED_ARG'
|
if (
|
||||||
and i+1 < n and instructions[i+1].opname != 'MAKE_FUNCTION'):
|
inst.opname == "EXTENDED_ARG"
|
||||||
|
and i + 1 < n
|
||||||
|
and instructions[i + 1].opname != "MAKE_FUNCTION"
|
||||||
|
):
|
||||||
last_was_extarg = True
|
last_was_extarg = True
|
||||||
starts_line = inst.starts_line
|
starts_line = inst.starts_line
|
||||||
is_jump_target = inst.is_jump_target
|
is_jump_target = inst.is_jump_target
|
||||||
@@ -437,13 +462,15 @@ class Scanner(object):
|
|||||||
# j = self.stmts.index(inst.offset)
|
# j = self.stmts.index(inst.offset)
|
||||||
# self.lines[j] = offset
|
# self.lines[j] = offset
|
||||||
|
|
||||||
new_inst = inst._replace(starts_line=starts_line,
|
new_inst = inst._replace(
|
||||||
|
starts_line=starts_line,
|
||||||
is_jump_target=is_jump_target,
|
is_jump_target=is_jump_target,
|
||||||
offset=offset)
|
offset=offset,
|
||||||
|
)
|
||||||
inst = new_inst
|
inst = new_inst
|
||||||
if i < n:
|
if i < n:
|
||||||
new_prev = self.prev_op[instructions[i].offset]
|
new_prev = self.prev_op[instructions[i].offset]
|
||||||
j = instructions[i+1].offset
|
j = instructions[i + 1].offset
|
||||||
old_prev = self.prev_op[j]
|
old_prev = self.prev_op[j]
|
||||||
while self.prev_op[j] == old_prev and j < n:
|
while self.prev_op[j] == old_prev and j < n:
|
||||||
self.prev_op[j] = new_prev
|
self.prev_op[j] = new_prev
|
||||||
@@ -465,9 +492,12 @@ class Scanner(object):
|
|||||||
for i in ifs:
|
for i in ifs:
|
||||||
# For each offset, if line number of current and next op
|
# For each offset, if line number of current and next op
|
||||||
# is the same
|
# is the same
|
||||||
if self.lines[i].l_no == self.lines[i+3].l_no:
|
if self.lines[i].l_no == self.lines[i + 3].l_no:
|
||||||
# Skip last op on line if it is some sort of POP_JUMP.
|
# Skip last op on line if it is some sort of POP_JUMP.
|
||||||
if self.code[self.prev[self.lines[i].next]] in (self.opc.PJIT, self.opc.PJIF):
|
if self.code[self.prev[self.lines[i].next]] in (
|
||||||
|
self.opc.PJIT,
|
||||||
|
self.opc.PJIF,
|
||||||
|
):
|
||||||
continue
|
continue
|
||||||
filtered.append(i)
|
filtered.append(i)
|
||||||
return filtered
|
return filtered
|
||||||
@@ -477,8 +507,8 @@ class Scanner(object):
|
|||||||
|
|
||||||
def restrict_to_parent(self, target, parent):
|
def restrict_to_parent(self, target, parent):
|
||||||
"""Restrict target to parent structure boundaries."""
|
"""Restrict target to parent structure boundaries."""
|
||||||
if not (parent['start'] < target < parent['end']):
|
if not (parent["start"] < target < parent["end"]):
|
||||||
target = parent['end']
|
target = parent["end"]
|
||||||
return target
|
return target
|
||||||
|
|
||||||
def setTokenClass(self, tokenClass):
|
def setTokenClass(self, tokenClass):
|
||||||
@@ -486,6 +516,7 @@ class Scanner(object):
|
|||||||
self.Token = tokenClass
|
self.Token = tokenClass
|
||||||
return self.Token
|
return self.Token
|
||||||
|
|
||||||
|
|
||||||
def parse_fn_counts(argc):
|
def parse_fn_counts(argc):
|
||||||
return ((argc & 0xFF), (argc >> 8) & 0xFF, (argc >> 16) & 0x7FFF)
|
return ((argc & 0xFF), (argc >> 8) & 0xFF, (argc >> 16) & 0x7FFF)
|
||||||
|
|
||||||
@@ -498,8 +529,10 @@ def get_scanner(version, is_pypy=False, show_asm=None):
|
|||||||
raise RuntimeError("Unknown Python version in xdis %s" % version)
|
raise RuntimeError("Unknown Python version in xdis %s" % version)
|
||||||
canonic_version = canonic_python_version[version]
|
canonic_version = canonic_python_version[version]
|
||||||
if canonic_version not in CANONIC2VERSION:
|
if canonic_version not in CANONIC2VERSION:
|
||||||
raise RuntimeError("Unsupported Python version %s (canonic %s)"
|
raise RuntimeError(
|
||||||
% (version, canonic_version))
|
"Unsupported Python version %s (canonic %s)"
|
||||||
|
% (version, canonic_version)
|
||||||
|
)
|
||||||
version = CANONIC2VERSION[canonic_version]
|
version = CANONIC2VERSION[canonic_version]
|
||||||
|
|
||||||
# Pick up appropriate scanner
|
# Pick up appropriate scanner
|
||||||
@@ -507,24 +540,34 @@ def get_scanner(version, is_pypy=False, show_asm=None):
|
|||||||
v_str = "%s" % (int(version * 10))
|
v_str = "%s" % (int(version * 10))
|
||||||
try:
|
try:
|
||||||
import importlib
|
import importlib
|
||||||
|
|
||||||
if is_pypy:
|
if is_pypy:
|
||||||
scan = importlib.import_module("uncompyle6.scanners.pypy%s" % v_str)
|
scan = importlib.import_module("uncompyle6.scanners.pypy%s" % v_str)
|
||||||
else:
|
else:
|
||||||
scan = importlib.import_module("uncompyle6.scanners.scanner%s" % v_str)
|
scan = importlib.import_module("uncompyle6.scanners.scanner%s" % v_str)
|
||||||
if False: print(scan) # Avoid unused scan
|
if False:
|
||||||
|
print(scan) # Avoid unused scan
|
||||||
except ImportError:
|
except ImportError:
|
||||||
if is_pypy:
|
if is_pypy:
|
||||||
exec("import uncompyle6.scanners.pypy%s as scan" % v_str,
|
exec(
|
||||||
locals(), globals())
|
"import uncompyle6.scanners.pypy%s as scan" % v_str,
|
||||||
|
locals(),
|
||||||
|
globals(),
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
exec("import uncompyle6.scanners.scanner%s as scan" % v_str,
|
exec(
|
||||||
locals(), globals())
|
"import uncompyle6.scanners.scanner%s as scan" % v_str,
|
||||||
|
locals(),
|
||||||
|
globals(),
|
||||||
|
)
|
||||||
if is_pypy:
|
if is_pypy:
|
||||||
scanner = eval("scan.ScannerPyPy%s(show_asm=show_asm)" % v_str,
|
scanner = eval(
|
||||||
locals(), globals())
|
"scan.ScannerPyPy%s(show_asm=show_asm)" % v_str, locals(), globals()
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
scanner = eval("scan.Scanner%s(show_asm=show_asm)" % v_str,
|
scanner = eval(
|
||||||
locals(), globals())
|
"scan.Scanner%s(show_asm=show_asm)" % v_str, locals(), globals()
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
raise RuntimeError("Unsupported Python version %s" % version)
|
raise RuntimeError("Unsupported Python version %s" % version)
|
||||||
return scanner
|
return scanner
|
||||||
@@ -532,8 +575,9 @@ def get_scanner(version, is_pypy=False, show_asm=None):
|
|||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
import inspect, uncompyle6
|
import inspect, uncompyle6
|
||||||
|
|
||||||
co = inspect.currentframe().f_code
|
co = inspect.currentframe().f_code
|
||||||
# scanner = get_scanner('2.7.13', True)
|
# scanner = get_scanner('2.7.13', True)
|
||||||
# scanner = get_scanner(sys.version[:5], False)
|
# scanner = get_scanner(sys.version[:5], False)
|
||||||
scanner = get_scanner(uncompyle6.PYTHON_VERSION, IS_PYPY, True)
|
scanner = get_scanner(uncompyle6.PYTHON_VERSION, IS_PYPY, True)
|
||||||
tokens, customize = scanner.ingest(co, {}, show_asm='after')
|
tokens, customize = scanner.ingest(co, {}, show_asm="after")
|
||||||
|
@@ -268,7 +268,10 @@ class Scanner3(Scanner):
|
|||||||
# There is a an implied JUMP_IF_TRUE that we are not testing for (yet?) here
|
# There is a an implied JUMP_IF_TRUE that we are not testing for (yet?) here
|
||||||
assert_can_follow = inst.opname == "POP_TOP" and i + 1 < n
|
assert_can_follow = inst.opname == "POP_TOP" and i + 1 < n
|
||||||
else:
|
else:
|
||||||
assert_can_follow = inst.opname == "POP_JUMP_IF_TRUE" and i + 1 < n
|
assert_can_follow = (
|
||||||
|
inst.opname in ("POP_JUMP_IF_TRUE", "POP_JUMP_IF_FALSE")
|
||||||
|
and i + 1 < n
|
||||||
|
)
|
||||||
if assert_can_follow:
|
if assert_can_follow:
|
||||||
next_inst = self.insts[i + 1]
|
next_inst = self.insts[i + 1]
|
||||||
if (
|
if (
|
||||||
@@ -278,9 +281,7 @@ class Scanner3(Scanner):
|
|||||||
):
|
):
|
||||||
raise_idx = self.offset2inst_index[self.prev_op[inst.argval]]
|
raise_idx = self.offset2inst_index[self.prev_op[inst.argval]]
|
||||||
raise_inst = self.insts[raise_idx]
|
raise_inst = self.insts[raise_idx]
|
||||||
if raise_inst.opname.startswith(
|
if raise_inst.opname.startswith("RAISE_VARARGS"):
|
||||||
"RAISE_VARARGS"
|
|
||||||
):
|
|
||||||
self.load_asserts.add(next_inst.offset)
|
self.load_asserts.add(next_inst.offset)
|
||||||
pass
|
pass
|
||||||
pass
|
pass
|
||||||
@@ -436,11 +437,16 @@ class Scanner3(Scanner):
|
|||||||
else:
|
else:
|
||||||
opname = "%s_%d" % (opname, pos_args)
|
opname = "%s_%d" % (opname, pos_args)
|
||||||
|
|
||||||
elif self.is_pypy and opname == "JUMP_IF_NOT_DEBUG":
|
elif self.is_pypy and opname in ("JUMP_IF_NOT_DEBUG", "CALL_FUNCTION"):
|
||||||
|
if opname == "JUMP_IF_NOT_DEBUG":
|
||||||
# The value in the dict is in special cases in semantic actions, such
|
# The value in the dict is in special cases in semantic actions, such
|
||||||
# as JUMP_IF_NOT_DEBUG. The value is not used in these cases, so we put
|
# as JUMP_IF_NOT_DEBUG. The value is not used in these cases, so we put
|
||||||
# in arbitrary value 0.
|
# in arbitrary value 0.
|
||||||
customize[opname] = 0
|
customize[opname] = 0
|
||||||
|
elif self.version >= 3.6 and argval > 255:
|
||||||
|
opname = "CALL_FUNCTION_KW"
|
||||||
|
pass
|
||||||
|
|
||||||
elif opname == "UNPACK_EX":
|
elif opname == "UNPACK_EX":
|
||||||
# FIXME: try with scanner and parser by
|
# FIXME: try with scanner and parser by
|
||||||
# changing argval
|
# changing argval
|
||||||
|
@@ -28,8 +28,15 @@ def customize_for_version(self, is_pypy, version):
|
|||||||
# PyPy changes
|
# PyPy changes
|
||||||
#######################
|
#######################
|
||||||
TABLE_DIRECT.update({
|
TABLE_DIRECT.update({
|
||||||
'assert_pypy': ( '%|assert %c\n' , 1 ),
|
'assert_pypy': ( '%|assert %c\n' , (1, 'assert_expr') ),
|
||||||
'assert2_pypy': ( '%|assert %c, %c\n' , 1, 4 ),
|
# This is as a result of an if transoration
|
||||||
|
'assert0_pypy': ( '%|assert %c\n' , (0, 'assert_expr') ),
|
||||||
|
|
||||||
|
'assert_not_pypy': ( '%|assert not %c\n' , (1, 'assert_exp') ),
|
||||||
|
'assert2_not_pypy': ( '%|assert not %c, %c\n' , (1, 'assert_exp'),
|
||||||
|
(4, 'expr') ),
|
||||||
|
'assert2_pypy': ( '%|assert %c, %c\n' , (1, 'assert_expr'),
|
||||||
|
(4, 'expr') ),
|
||||||
'try_except_pypy': ( '%|try:\n%+%c%-%c\n\n', 1, 2 ),
|
'try_except_pypy': ( '%|try:\n%+%c%-%c\n\n', 1, 2 ),
|
||||||
'tryfinallystmt_pypy': ( '%|try:\n%+%c%-%|finally:\n%+%c%-\n\n', 1, 3 ),
|
'tryfinallystmt_pypy': ( '%|try:\n%+%c%-%|finally:\n%+%c%-\n\n', 1, 3 ),
|
||||||
'assign3_pypy': ( '%|%c, %c, %c = %c, %c, %c\n', 5, 4, 3, 0, 1, 2 ),
|
'assign3_pypy': ( '%|%c, %c, %c = %c, %c, %c\n', 5, 4, 3, 0, 1, 2 ),
|
||||||
|
@@ -495,7 +495,10 @@ def customize_for_version36(self, version):
|
|||||||
# bytecode, the escaping of the braces has been
|
# bytecode, the escaping of the braces has been
|
||||||
# removed. So we need to put back the braces escaping in
|
# removed. So we need to put back the braces escaping in
|
||||||
# reconstructing the source.
|
# reconstructing the source.
|
||||||
assert expr[0] == 'LOAD_STR'
|
assert (
|
||||||
|
expr[0] == "LOAD_STR" or
|
||||||
|
expr[0] == "LOAD_CONST" and isinstance(expr[0].attr, unicode)
|
||||||
|
)
|
||||||
value = value.replace("{", "{{").replace("}", "}}")
|
value = value.replace("{", "{{").replace("}", "}}")
|
||||||
|
|
||||||
# Remove leading quotes
|
# Remove leading quotes
|
||||||
|
@@ -189,6 +189,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
|||||||
self.hide_internal = False
|
self.hide_internal = False
|
||||||
self.offsets = {}
|
self.offsets = {}
|
||||||
self.last_finish = -1
|
self.last_finish = -1
|
||||||
|
self.is_pypy = is_pypy
|
||||||
|
|
||||||
# FIXME: is there a better way?
|
# FIXME: is there a better way?
|
||||||
global MAP_DIRECT_FRAGMENT
|
global MAP_DIRECT_FRAGMENT
|
||||||
@@ -1463,25 +1464,80 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
|||||||
# as a custom rule
|
# as a custom rule
|
||||||
start = len(self.f.getvalue())
|
start = len(self.f.getvalue())
|
||||||
n = len(node) - 1
|
n = len(node) - 1
|
||||||
|
|
||||||
|
if node.kind != "expr":
|
||||||
|
if node == "kwarg":
|
||||||
|
self.template_engine(("(%[0]{attr}=%c)", 1), node)
|
||||||
|
return
|
||||||
|
|
||||||
|
kwargs = None
|
||||||
assert node[n].kind.startswith("CALL_FUNCTION")
|
assert node[n].kind.startswith("CALL_FUNCTION")
|
||||||
|
|
||||||
|
if node[n].kind.startswith("CALL_FUNCTION_KW"):
|
||||||
|
if self.is_pypy:
|
||||||
|
# FIXME: this doesn't handle positional and keyword args
|
||||||
|
# properly. Need to do something more like that below
|
||||||
|
# in the non-PYPY 3.6 case.
|
||||||
|
self.template_engine(('(%[0]{attr}=%c)', 1), node[n-1])
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
kwargs = node[n - 1].attr
|
||||||
|
|
||||||
|
assert isinstance(kwargs, tuple)
|
||||||
|
i = n - (len(kwargs) + 1)
|
||||||
|
j = 1 + n - node[n].attr
|
||||||
|
else:
|
||||||
|
i = start = n - 2
|
||||||
|
for i in range(start, 0, -1):
|
||||||
|
if not node[i].kind in ["expr", "call", "LOAD_CLASSNAME"]:
|
||||||
|
break
|
||||||
|
pass
|
||||||
|
|
||||||
|
if i == start:
|
||||||
|
return
|
||||||
|
i += 2
|
||||||
|
|
||||||
for i in range(n - 2, 0, -1):
|
for i in range(n - 2, 0, -1):
|
||||||
if not node[i].kind in ["expr", "LOAD_CLASSNAME"]:
|
if not node[i].kind in ["expr", "LOAD_CLASSNAME"]:
|
||||||
break
|
break
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if i == n - 2:
|
|
||||||
return
|
|
||||||
self.write("(")
|
|
||||||
line_separator = ", "
|
line_separator = ", "
|
||||||
sep = ""
|
sep = ""
|
||||||
i += 1
|
i += 1
|
||||||
while i < n:
|
self.write("(")
|
||||||
|
if kwargs:
|
||||||
|
# 3.6+ does this
|
||||||
|
while j < i:
|
||||||
|
self.write(sep)
|
||||||
|
value = self.traverse(node[j])
|
||||||
|
self.write("%s" % value)
|
||||||
|
sep = line_separator
|
||||||
|
j += 1
|
||||||
|
|
||||||
|
j = 0
|
||||||
|
while i < l:
|
||||||
|
self.write(sep)
|
||||||
|
value = self.traverse(node[i])
|
||||||
|
self.write("%s=%s" % (kwargs[j], value))
|
||||||
|
sep = line_separator
|
||||||
|
j += 1
|
||||||
|
i += 1
|
||||||
|
else:
|
||||||
|
while i < l:
|
||||||
value = self.traverse(node[i])
|
value = self.traverse(node[i])
|
||||||
self.node_append(sep, value, node[i])
|
|
||||||
i += 1
|
i += 1
|
||||||
self.write(sep, value)
|
self.write(sep, value)
|
||||||
sep = line_separator
|
sep = line_separator
|
||||||
|
pass
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
if self.version >= 3.6 and node[0] == "LOAD_CONST":
|
||||||
|
return
|
||||||
|
value = self.traverse(node[0])
|
||||||
|
self.write("(")
|
||||||
|
self.write(value)
|
||||||
|
pass
|
||||||
|
|
||||||
self.write(")")
|
self.write(")")
|
||||||
self.set_pos_info(node, start, len(self.f.getvalue()))
|
self.set_pos_info(node, start, len(self.f.getvalue()))
|
||||||
|
@@ -239,7 +239,9 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
is_pypy=is_pypy,
|
is_pypy=is_pypy,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.treeTransform = TreeTransform(version, showast)
|
self.treeTransform = TreeTransform(version=version,
|
||||||
|
show_ast=showast,
|
||||||
|
is_pypy=is_pypy)
|
||||||
self.debug_parser = dict(debug_parser)
|
self.debug_parser = dict(debug_parser)
|
||||||
self.showast = showast
|
self.showast = showast
|
||||||
self.params = params
|
self.params = params
|
||||||
@@ -1564,8 +1566,15 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
assert node[n].kind.startswith("CALL_FUNCTION")
|
assert node[n].kind.startswith("CALL_FUNCTION")
|
||||||
|
|
||||||
if node[n].kind.startswith("CALL_FUNCTION_KW"):
|
if node[n].kind.startswith("CALL_FUNCTION_KW"):
|
||||||
# 3.6+ starts doing this
|
if self.is_pypy:
|
||||||
|
# FIXME: this doesn't handle positional and keyword args
|
||||||
|
# properly. Need to do something more like that below
|
||||||
|
# in the non-PYPY 3.6 case.
|
||||||
|
self.template_engine(('(%[0]{attr}=%c)', 1), node[n-1])
|
||||||
|
return
|
||||||
|
else:
|
||||||
kwargs = node[n - 1].attr
|
kwargs = node[n - 1].attr
|
||||||
|
|
||||||
assert isinstance(kwargs, tuple)
|
assert isinstance(kwargs, tuple)
|
||||||
i = n - (len(kwargs) + 1)
|
i = n - (len(kwargs) + 1)
|
||||||
j = 1 + n - node[n].attr
|
j = 1 + n - node[n].attr
|
||||||
@@ -1750,56 +1759,85 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
else:
|
else:
|
||||||
kv_node = node[1:]
|
kv_node = node[1:]
|
||||||
else:
|
else:
|
||||||
assert node[-1].kind.startswith("kvlist")
|
indent = self.indent + " "
|
||||||
kv_node = node[-1]
|
line_number = self.line_number
|
||||||
|
sep = ''
|
||||||
|
opname = node[-1].kind
|
||||||
|
if self.is_pypy and self.version >= 3.5:
|
||||||
|
if opname.startswith('BUILD_CONST_KEY_MAP'):
|
||||||
|
keys = node[-2].attr
|
||||||
|
# FIXME: DRY this and the above
|
||||||
|
for i in range(len(keys)):
|
||||||
|
key = keys[i]
|
||||||
|
value = self.traverse(node[i], indent='')
|
||||||
|
self.write(sep, key, ': ', value)
|
||||||
|
sep = ", "
|
||||||
|
if line_number != self.line_number:
|
||||||
|
sep += "\n" + self.indent + " "
|
||||||
|
line_number = self.line_number
|
||||||
|
pass
|
||||||
|
pass
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
if opname.startswith('kvlist'):
|
||||||
|
list_node = node[0]
|
||||||
|
else:
|
||||||
|
list_node = node
|
||||||
|
|
||||||
|
assert list_node[-1].kind.startswith('BUILD_MAP')
|
||||||
|
for i in range(0, len(list_node)-1, 2):
|
||||||
|
key = self.traverse(list_node[i], indent='')
|
||||||
|
value = self.traverse(list_node[i+1], indent='')
|
||||||
|
self.write(sep, key, ': ', value)
|
||||||
|
sep = ", "
|
||||||
|
if line_number != self.line_number:
|
||||||
|
sep += "\n" + self.indent + " "
|
||||||
|
line_number = self.line_number
|
||||||
|
pass
|
||||||
|
pass
|
||||||
|
pass
|
||||||
|
elif opname.startswith('kvlist'):
|
||||||
|
kv_node = node[-1]
|
||||||
first_time = True
|
first_time = True
|
||||||
for kv in kv_node:
|
for kv in kv_node:
|
||||||
assert kv in ("kv", "kv2", "kv3")
|
assert kv in ('kv', 'kv2', 'kv3')
|
||||||
|
|
||||||
# kv ::= DUP_TOP expr ROT_TWO expr STORE_SUBSCR
|
# kv ::= DUP_TOP expr ROT_TWO expr STORE_SUBSCR
|
||||||
# kv2 ::= DUP_TOP expr expr ROT_THREE STORE_SUBSCR
|
# kv2 ::= DUP_TOP expr expr ROT_THREE STORE_SUBSCR
|
||||||
# kv3 ::= expr expr STORE_MAP
|
# kv3 ::= expr expr STORE_MAP
|
||||||
|
|
||||||
# FIXME: DRY this and the above
|
# FIXME: DRY this and the above
|
||||||
indent = self.indent + " "
|
if kv == 'kv':
|
||||||
if kv == "kv":
|
|
||||||
self.write(sep)
|
self.write(sep)
|
||||||
name = self.traverse(kv[-2], indent="")
|
name = self.traverse(kv[-2], indent='')
|
||||||
if first_time:
|
if first_time:
|
||||||
line_number = self.indent_if_source_nl(line_number, indent)
|
line_number = self.indent_if_source_nl(line_number, indent)
|
||||||
first_time = False
|
first_time = False
|
||||||
pass
|
pass
|
||||||
line_number = self.line_number
|
line_number = self.line_number
|
||||||
self.write(name, ": ")
|
self.write(name, ': ')
|
||||||
value = self.traverse(
|
value = self.traverse(kv[1], indent=self.indent+(len(name)+2)*' ')
|
||||||
kv[1], indent=self.indent + (len(name) + 2) * " "
|
elif kv == 'kv2':
|
||||||
)
|
|
||||||
elif kv == "kv2":
|
|
||||||
self.write(sep)
|
self.write(sep)
|
||||||
name = self.traverse(kv[1], indent="")
|
name = self.traverse(kv[1], indent='')
|
||||||
if first_time:
|
if first_time:
|
||||||
line_number = self.indent_if_source_nl(line_number, indent)
|
line_number = self.indent_if_source_nl(line_number, indent)
|
||||||
first_time = False
|
first_time = False
|
||||||
pass
|
pass
|
||||||
line_number = self.line_number
|
line_number = self.line_number
|
||||||
self.write(name, ": ")
|
self.write(name, ': ')
|
||||||
value = self.traverse(
|
value = self.traverse(kv[-3], indent=self.indent+(len(name)+2)*' ')
|
||||||
kv[-3], indent=self.indent + (len(name) + 2) * " "
|
elif kv == 'kv3':
|
||||||
)
|
|
||||||
elif kv == "kv3":
|
|
||||||
self.write(sep)
|
self.write(sep)
|
||||||
name = self.traverse(kv[-2], indent="")
|
name = self.traverse(kv[-2], indent='')
|
||||||
if first_time:
|
if first_time:
|
||||||
line_number = self.indent_if_source_nl(line_number, indent)
|
line_number = self.indent_if_source_nl(line_number, indent)
|
||||||
first_time = False
|
first_time = False
|
||||||
pass
|
pass
|
||||||
line_number = self.line_number
|
line_number = self.line_number
|
||||||
self.write(name, ": ")
|
self.write(name, ': ')
|
||||||
line_number = self.line_number
|
line_number = self.line_number
|
||||||
value = self.traverse(
|
value = self.traverse(kv[0], indent=self.indent+(len(name)+2)*' ')
|
||||||
kv[0], indent=self.indent + (len(name) + 2) * " "
|
|
||||||
)
|
|
||||||
pass
|
pass
|
||||||
self.write(value)
|
self.write(value)
|
||||||
sep = ", "
|
sep = ", "
|
||||||
@@ -1808,6 +1846,7 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
line_number = self.line_number
|
line_number = self.line_number
|
||||||
pass
|
pass
|
||||||
pass
|
pass
|
||||||
|
|
||||||
pass
|
pass
|
||||||
if sep.startswith(",\n"):
|
if sep.startswith(",\n"):
|
||||||
self.write(sep[1:])
|
self.write(sep[1:])
|
||||||
|
@@ -30,9 +30,11 @@ def is_docstring(node):
|
|||||||
|
|
||||||
|
|
||||||
class TreeTransform(GenericASTTraversal, object):
|
class TreeTransform(GenericASTTraversal, object):
|
||||||
def __init__(self, version, show_ast=None):
|
def __init__(self, version, show_ast=None,
|
||||||
|
is_pypy=False):
|
||||||
self.version = version
|
self.version = version
|
||||||
self.showast = show_ast
|
self.showast = show_ast
|
||||||
|
self.is_pypy = is_pypy
|
||||||
return
|
return
|
||||||
|
|
||||||
def maybe_show_tree(self, ast):
|
def maybe_show_tree(self, ast):
|
||||||
@@ -133,6 +135,9 @@ class TreeTransform(GenericASTTraversal, object):
|
|||||||
# becomes:
|
# becomes:
|
||||||
# assert ::= assert_expr jmp_true LOAD_ASSERT RAISE_VARARGS_1 COME_FROM
|
# assert ::= assert_expr jmp_true LOAD_ASSERT RAISE_VARARGS_1 COME_FROM
|
||||||
if jump_cond == "jmp_true":
|
if jump_cond == "jmp_true":
|
||||||
|
if self.is_pypy:
|
||||||
|
kind = "assert0_pypy"
|
||||||
|
else:
|
||||||
kind = "assert"
|
kind = "assert"
|
||||||
else:
|
else:
|
||||||
assert jump_cond == "jmp_false"
|
assert jump_cond == "jmp_false"
|
||||||
@@ -230,6 +235,15 @@ class TreeTransform(GenericASTTraversal, object):
|
|||||||
|
|
||||||
n_ifelsestmtc = n_ifelsestmtl = n_ifelsestmt
|
n_ifelsestmtc = n_ifelsestmtl = n_ifelsestmt
|
||||||
|
|
||||||
|
def n_list_for(self, list_for_node):
|
||||||
|
expr = list_for_node[0]
|
||||||
|
if (expr == "expr" and expr[0] == "get_iter"):
|
||||||
|
# Remove extraneous get_iter() inside the "for" of a comprehension
|
||||||
|
assert expr[0][0] == "expr"
|
||||||
|
list_for_node[0] = expr[0][0]
|
||||||
|
list_for_node.transformed_by="n_list_for",
|
||||||
|
return list_for_node
|
||||||
|
|
||||||
def traverse(self, node, is_lambda=False):
|
def traverse(self, node, is_lambda=False):
|
||||||
node = self.preorder(node)
|
node = self.preorder(node)
|
||||||
return node
|
return node
|
||||||
|
Reference in New Issue
Block a user