You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-03 16:59:52 +08:00
Compare commits
58 Commits
release-py
...
release-py
Author | SHA1 | Date | |
---|---|---|---|
|
7272ac4a60 | ||
|
45c8d62e68 | ||
|
096563cf91 | ||
|
7fd21aa227 | ||
|
82bc294995 | ||
|
9d3e4a6660 | ||
|
7dfade1195 | ||
|
1df5aa0ef9 | ||
|
c06ba45991 | ||
|
7fab91eb4e | ||
|
7659277c5c | ||
|
761eee7ae7 | ||
|
c2181e3235 | ||
|
3695921364 | ||
|
d14193f219 | ||
|
94251cd294 | ||
|
a9515c7aab | ||
|
e5f3d803a8 | ||
|
e5ae70bea8 | ||
|
189605ea2c | ||
|
4c74bf1d9d | ||
|
600cee26d9 | ||
|
c087bd785e | ||
|
80b68af2d3 | ||
|
24ccc16701 | ||
|
69714fb65a | ||
|
b94f98f8f7 | ||
|
f05b092983 | ||
|
61466808f5 | ||
|
76a66c3460 | ||
|
91224b2382 | ||
|
e76f1f107f | ||
|
2f8e063a99 | ||
|
15533c5e38 | ||
|
6511cc4dd4 | ||
|
de25c5f003 | ||
|
fdf97a1cc0 | ||
|
24011bb0da | ||
|
8880568045 | ||
|
4f61321c91 | ||
|
269f4f2e1b | ||
|
aab951280b | ||
|
f1e48fb60a | ||
|
c0022ed5b7 | ||
|
41a50b5e46 | ||
|
0154c87d63 | ||
|
c9c70103aa | ||
|
a18dc340ce | ||
|
037648577f | ||
|
3c7d460036 | ||
|
cd62e54c88 | ||
|
ef9ccc3a8c | ||
|
c397bf6bda | ||
|
0aa41058a6 | ||
|
27f67e6fca | ||
|
6fcf49b214 | ||
|
49661b222e | ||
|
c481d97866 |
1
Makefile
1
Makefile
@@ -38,6 +38,7 @@ check-3.0 check-3.1 check-3.2 check-3.6:
|
||||
$(MAKE) -C test $@
|
||||
|
||||
check-3.7: pytest
|
||||
$(MAKE) -C test check
|
||||
|
||||
#:Tests for Python 2.4-2.5 (don't have pytest)
|
||||
check-2.4 check-2.5:
|
||||
|
11
NEWS
11
NEWS
@@ -1,3 +1,14 @@
|
||||
uncompyle6 3.2.1 2018-06-04 MF
|
||||
|
||||
- Python 1.4 and 1.5 bug fixes
|
||||
|
||||
uncompyle6 3.2.0 2018-05-19 Rocket Scientist
|
||||
|
||||
- Add rudimentary 1.4 support (still a bit buggy)
|
||||
- add --tree+ option to show formatting rule, when it is constant
|
||||
- Python 2.7.15candidate1 support (via xdis)
|
||||
- bug fixes, especially for 3.7 (but 2.7 and 3.6 and others as well)
|
||||
|
||||
uncompyle6 3.1.3 2018-04-16
|
||||
|
||||
- Add some Python 3.7 rules, such as for handling LOAD_METHOD (not complete)
|
||||
|
@@ -56,7 +56,7 @@ entry_points = {
|
||||
]}
|
||||
ftp_url = None
|
||||
install_requires = ['spark-parser >= 1.8.5, < 1.9.0',
|
||||
'xdis >= 3.7.0, < 3.8.0']
|
||||
'xdis >= 3.8.2, < 3.9.0']
|
||||
license = 'GPL3'
|
||||
mailing_list = 'python-debugger@googlegroups.com'
|
||||
modname = 'uncompyle6'
|
||||
|
@@ -1,5 +1,5 @@
|
||||
#!/bin/bash
|
||||
PYTHON_VERSION=3.6.4
|
||||
PYTHON_VERSION=3.6.5
|
||||
|
||||
# FIXME put some of the below in a common routine
|
||||
function finish {
|
||||
|
@@ -47,7 +47,7 @@ install:
|
||||
|
||||
# Upgrade to the latest version of pip to avoid it displaying warnings
|
||||
# about it being out of date.
|
||||
- "pip install --disable-pip-version-check --user --upgrade pip"
|
||||
- "%PYTHON%\\python.exe -m pip install --disable-pip-version-check --user --upgrade pip"
|
||||
|
||||
# Install the build dependencies of the project. If some dependencies contain
|
||||
# compiled extensions and are not provided as pre-built wheel packages,
|
||||
|
@@ -6,6 +6,7 @@ machine:
|
||||
|
||||
dependencies:
|
||||
override:
|
||||
- pip install --upgrade setuptools
|
||||
- pip install -e .
|
||||
- pip install pytest==3.2.5 hypothesis
|
||||
test:
|
||||
|
@@ -1,5 +1,6 @@
|
||||
PHONY=check clean dist distclean test test-unit test-functional rmChangeLog clean_pyc nosetests \
|
||||
check-bytecode-1.5 check-bytecode-1 check-bytecode-2 check-bytecode-3 \
|
||||
check-bytecode-1 check-bytecode-1.4 check-bytecode-1.5 \
|
||||
check-bytecode-2 check-bytecode-3 \
|
||||
check-bytecode-2.2 check-byteocde-2.3 check-bytecode-2.4 \
|
||||
check-short check-2.6 check-2.7 check-3.0 check-3.1 check-3.2 check-3.3 \
|
||||
check-3.4 check-3.5 check-3.6 check-3.7 check-5.6 5.6 5.8 \
|
||||
@@ -76,7 +77,7 @@ check-disasm:
|
||||
$(PYTHON) dis-compare.py
|
||||
|
||||
#: Check deparsing bytecode 1.x only
|
||||
check-bytecode-1: check-bytecode-1.5
|
||||
check-bytecode-1: check-bytecode-1.4 check-bytecode-1.5
|
||||
|
||||
#: Check deparsing bytecode 2.x only
|
||||
check-bytecode-2:
|
||||
@@ -93,11 +94,17 @@ check-bytecode-3:
|
||||
#: Check deparsing bytecode that works running Python 2 and Python 3
|
||||
check-bytecode: check-bytecode-3
|
||||
$(PYTHON) test_pythonlib.py \
|
||||
--bytecode-1.4 --bytecode-1.5 \
|
||||
--bytecode-2.2 --bytecode-2.3 --bytecode-2.4 \
|
||||
--bytecode-2.1 --bytecode-2.2 --bytecode-2.3 --bytecode-2.4 \
|
||||
--bytecode-2.5 --bytecode-2.6 --bytecode-2.7 \
|
||||
--bytecode-pypy2.7
|
||||
|
||||
|
||||
#: Check deparsing bytecode 1.4 only
|
||||
check-bytecode-1.4:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-1.4
|
||||
|
||||
#: Check deparsing bytecode 1.5 only
|
||||
check-bytecode-1.5:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-1.5
|
||||
@@ -267,7 +274,7 @@ check-3.4-ok:
|
||||
2.6:
|
||||
|
||||
#: PyPy 5.0.x with Python 2.7 ...
|
||||
pypy-2.7 5.0 5.3:
|
||||
pypy-2.7 5.0 5.3 6.0:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-pypy2.7 --verify
|
||||
|
||||
#: PyPy 2.4.x with Python 3.2 ...
|
||||
|
BIN
test/bytecode_1.4/01_print-1.4.pyc
Normal file
BIN
test/bytecode_1.4/01_print-1.4.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.4/02_continue.pyc
Normal file
BIN
test/bytecode_1.4/02_continue.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.4/bisect.pyc
Normal file
BIN
test/bytecode_1.4/bisect.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.4/cmp.pyc
Normal file
BIN
test/bytecode_1.4/cmp.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.4/cmpcache.pyc
Normal file
BIN
test/bytecode_1.4/cmpcache.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.4/dbhash.pyc
Normal file
BIN
test/bytecode_1.4/dbhash.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.4/emacs.pyc
Normal file
BIN
test/bytecode_1.4/emacs.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.4/glob.pyc
Normal file
BIN
test/bytecode_1.4/glob.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.4/test_class.pyc
Normal file
BIN
test/bytecode_1.4/test_class.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.4/test_del.pyc
Normal file
BIN
test/bytecode_1.4/test_del.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.4/test_docstring.pyc
Normal file
BIN
test/bytecode_1.4/test_docstring.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.4/test_empty.pyc
Normal file
BIN
test/bytecode_1.4/test_empty.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.4/test_exec.pyc
Normal file
BIN
test/bytecode_1.4/test_exec.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.4/test_global.pyc
Normal file
BIN
test/bytecode_1.4/test_global.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.4/test_globals.pyc
Normal file
BIN
test/bytecode_1.4/test_globals.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.4/test_single_stmt.pyc
Normal file
BIN
test/bytecode_1.4/test_single_stmt.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.5/00_unpack_list.pyc
Normal file
BIN
test/bytecode_1.5/00_unpack_list.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_2.6_run/01_ifelse_listcomp.pyc
Normal file
BIN
test/bytecode_2.6_run/01_ifelse_listcomp.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.6_run/02_ifelse_lambda.pyc
Normal file
BIN
test/bytecode_2.6_run/02_ifelse_lambda.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.6_run/03_complex_and.pyc
Normal file
BIN
test/bytecode_2.6_run/03_complex_and.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.7.5/01_while1.pyc
Normal file
BIN
test/bytecode_2.7.5/01_while1.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_2.7/03_for_try_raise.pyc
Normal file
BIN
test/bytecode_2.7/03_for_try_raise.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.7/04_while1_while1.pyc
Normal file
BIN
test/bytecode_2.7/04_while1_while1.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.7/05_try_else.pyc-notyet
Normal file
BIN
test/bytecode_2.7/05_try_else.pyc-notyet
Normal file
Binary file not shown.
BIN
test/bytecode_2.7_run/01_ifelse_listcomp.pyc
Normal file
BIN
test/bytecode_2.7_run/01_ifelse_listcomp.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.7_run/02_ifelse_lambda.pyc
Normal file
BIN
test/bytecode_2.7_run/02_ifelse_lambda.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.7_run/05_dict_comp.pyc
Normal file
BIN
test/bytecode_2.7_run/05_dict_comp.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.1_run/05_dict_comp.pyc
Normal file
BIN
test/bytecode_3.1_run/05_dict_comp.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.5_run/02_ifelse_lambda.pyc
Normal file
BIN
test/bytecode_3.5_run/02_ifelse_lambda.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.6/04_while1_while1.pyc
Normal file
BIN
test/bytecode_3.6/04_while1_while1.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.6_run/02_ifelse_lambda.pyc
Normal file
BIN
test/bytecode_3.6_run/02_ifelse_lambda.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.7/02_ifelse_lambda.pyc
Normal file
BIN
test/bytecode_3.7/02_ifelse_lambda.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.7/02_kwargs.pyc
Normal file
BIN
test/bytecode_3.7/02_kwargs.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.7/03_async_await.pyc
Normal file
BIN
test/bytecode_3.7/03_async_await.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.7/03_double_star_unpack.pyc
Normal file
BIN
test/bytecode_3.7/03_double_star_unpack.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.7/03_if_elif.pyc
Normal file
BIN
test/bytecode_3.7/03_if_elif.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.7/03_weird26.pyc
Normal file
BIN
test/bytecode_3.7/03_weird26.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.7/03_while-if-break.pyc
Normal file
BIN
test/bytecode_3.7/03_while-if-break.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.7/04_class_kwargs.pyc
Normal file
BIN
test/bytecode_3.7/04_class_kwargs.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.7/04_importlist.pyc
Normal file
BIN
test/bytecode_3.7/04_importlist.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.7/04_raise.pyc
Normal file
BIN
test/bytecode_3.7/04_raise.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.7/04_try_finally.pyc
Normal file
BIN
test/bytecode_3.7/04_try_finally.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.7/04_withas.pyc
Normal file
BIN
test/bytecode_3.7/04_withas.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.7/05_36lambda.pyc
Normal file
BIN
test/bytecode_3.7/05_36lambda.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.7/05_call_function_kw2.pyc
Normal file
BIN
test/bytecode_3.7/05_call_function_kw2.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.7_run/01_fstring.pyc
Normal file
BIN
test/bytecode_3.7_run/01_fstring.pyc
Normal file
Binary file not shown.
@@ -2,7 +2,8 @@
|
||||
# lambda's have to be more or less on a line
|
||||
|
||||
f = lambda x: 1 if x<2 else 3
|
||||
f(5)
|
||||
assert f(3) == 3
|
||||
assert f(1) == 1
|
||||
|
||||
# If that wasn't enough ...
|
||||
# Python will create dead code
|
||||
@@ -10,10 +11,18 @@ f(5)
|
||||
# not to include the else expression
|
||||
|
||||
g = lambda: 1 if True else 3
|
||||
g()
|
||||
assert g() == 1
|
||||
|
||||
h = lambda: 1 if False else 3
|
||||
h()
|
||||
assert h() == 3
|
||||
|
||||
# From 2.7 test_builtin
|
||||
lambda c: 'a' <= c <= 'z', 'Hello World'
|
||||
i = lambda c: 'a' <= c <= 'z', 'Hello World'
|
||||
assert i[0]('a') == True
|
||||
assert i[0]('A') == False
|
||||
|
||||
# Issue #170. Bug is needing an "conditional_not_lambda" grammar rule
|
||||
# in addition the the "conditional_lambda" rule
|
||||
j = lambda a: False if not a else True
|
||||
assert j(True) == True
|
||||
assert j(False) == False
|
||||
|
3
test/simple_source/bug14/00_unpack_list.py
Normal file
3
test/simple_source/bug14/00_unpack_list.py
Normal file
@@ -0,0 +1,3 @@
|
||||
# Python 1.4 tzparse.py, but also appears in 1.5
|
||||
|
||||
[tzname, delta] = __file__
|
6
test/simple_source/bug14/01_print.py
Normal file
6
test/simple_source/bug14/01_print.py
Normal file
@@ -0,0 +1,6 @@
|
||||
# Python 1.4 aifc.py
|
||||
# Something weird about the final "print" and PRINT_NL_CONT followed by PRINT_NL
|
||||
def _readmark(self, markers):
|
||||
if self._markers: print 'marker',
|
||||
else: print 'markers',
|
||||
print 'instead of', markers
|
11
test/simple_source/bug14/02_continue.py
Normal file
11
test/simple_source/bug14/02_continue.py
Normal file
@@ -0,0 +1,11 @@
|
||||
# Python 1.4 cgi.py
|
||||
# Bug was in "continue" detection.
|
||||
# 1.4 doesn't have lnotab and our CONTINUE detection is off.
|
||||
def parse_multipart(params, pdict):
|
||||
while params:
|
||||
if params.has_key('name'):
|
||||
params = None
|
||||
else:
|
||||
continue
|
||||
|
||||
return None
|
4
test/simple_source/bug26/01_ifelse_listcomp.py
Normal file
4
test/simple_source/bug26/01_ifelse_listcomp.py
Normal file
@@ -0,0 +1,4 @@
|
||||
# Bug from issue #171: parsing "if x if a else y" inside a list comprehension on 2.7
|
||||
# This is RUNNABLE!
|
||||
assert [False, True, True, True, True] == [False if not a else True for a in range(5)]
|
||||
assert [True, False, False, False, False] == [False if a else True for a in range(5)]
|
17
test/simple_source/bug26/03_complex_and.py
Normal file
17
test/simple_source/bug26/03_complex_and.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# From 2.6 test_datetime.py
|
||||
# Bug is in parsing (x is 0 or x is 1) and (y is 5 or y is 2)
|
||||
# correctly.
|
||||
|
||||
# This code is RUNNABLE!
|
||||
result = []
|
||||
for y in (1, 2, 10):
|
||||
x = cmp(1, y)
|
||||
if (x is 0 or x is 1) and (y is 5 or y is 2):
|
||||
expected = 10
|
||||
elif y is 2:
|
||||
expected = 2
|
||||
else:
|
||||
expected = 3
|
||||
result.append(expected)
|
||||
|
||||
assert result == [10, 2, 3]
|
12
test/simple_source/bug27+/03_for_try_raise.py
Normal file
12
test/simple_source/bug27+/03_for_try_raise.py
Normal file
@@ -0,0 +1,12 @@
|
||||
# Code in 2.7 needing rule:
|
||||
# try_except ::= SETUP_EXCEPT suite_stmts_opt POP_BLOCK except_handler
|
||||
# Generally we need a COME_FROM. But not in the situation below.
|
||||
|
||||
for package in [1,2]:
|
||||
try:
|
||||
pass
|
||||
except IndexError:
|
||||
with __file__ as f:
|
||||
pass
|
||||
except:
|
||||
raise
|
11
test/simple_source/bug27+/05_try_else.py
Normal file
11
test/simple_source/bug27+/05_try_else.py
Normal file
@@ -0,0 +1,11 @@
|
||||
# From Python 2.7 test_ziplib.py
|
||||
# Bug is distinguishing try from try/else.
|
||||
def testAFakeZlib(self):
|
||||
try:
|
||||
self.doTest()
|
||||
except ImportError:
|
||||
if self.compression != 3:
|
||||
self.fail("expected test to not raise ImportError")
|
||||
else:
|
||||
if self.compression != 4:
|
||||
self.fail("expected test to raise ImportError")
|
9
test/simple_source/bug275/01_while1.py
Normal file
9
test/simple_source/bug275/01_while1.py
Normal file
@@ -0,0 +1,9 @@
|
||||
# Issue #173. Bug is that 2.7.5 omits POP_BLOCK in
|
||||
# in later 2.7 grammar.
|
||||
# while1stmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK COME_FROM
|
||||
# while1stmt ::= SETUP_LOOP l_stmts_opt CONTINUE COME_FROM
|
||||
# which is included in later code generation
|
||||
ms=0
|
||||
if ms==1:
|
||||
while 1:
|
||||
pass
|
@@ -1,4 +1,7 @@
|
||||
# Bug was in dictionary comprehension involving "if not"
|
||||
# Issue #162
|
||||
#
|
||||
# This code is RUNNABLE!
|
||||
def x(s):
|
||||
return {k: v
|
||||
for (k, v) in s
|
||||
|
@@ -19,10 +19,15 @@ while 1:
|
||||
while 1:
|
||||
if __name__:
|
||||
while 1:
|
||||
if y:
|
||||
if __name__:
|
||||
break
|
||||
raise RuntimeError
|
||||
elif __file__:
|
||||
x = 2
|
||||
else:
|
||||
raise RuntimeError
|
||||
|
||||
# Degenerate case. Note: we can't run becase this causes an infinite loop.
|
||||
# Suggested in issue #172
|
||||
while 1:
|
||||
pass
|
||||
|
22
test/stdlib/compile-file-1x.py
Executable file
22
test/stdlib/compile-file-1x.py
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/usr/bin/env python
|
||||
"""byte compiles a Python 1.x program"""
|
||||
import sys
|
||||
if len(sys.argv) != 2:
|
||||
print("Usage: compile-file.py *python-file*")
|
||||
sys.exit(1)
|
||||
source = sys.argv[1]
|
||||
|
||||
# assert source.endswith('.py')
|
||||
basename = source[:-3]
|
||||
|
||||
# We do this crazy way to support Python 1.4 which
|
||||
# doesn't support version_info.
|
||||
PY_VERSION = sys.version[:3]
|
||||
|
||||
bytecode = "%s-%s.pyc" % (basename, PY_VERSION)
|
||||
|
||||
import py_compile
|
||||
print("# compiling %s to %s" % (source, bytecode))
|
||||
py_compile.compile(source, bytecode)
|
||||
# import os
|
||||
# os.system("../bin/uncompyle6 %s" % bytecode)
|
@@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
"""byte compiles a Python program after version 2.2 or so. Also see compile_file_1x.py"""
|
||||
import sys
|
||||
if len(sys.argv) != 2:
|
||||
print("Usage: compile-file.py *python-file*")
|
||||
|
@@ -56,6 +56,8 @@ case $PYVERSION in
|
||||
2.6)
|
||||
SKIP_TESTS=(
|
||||
[test_compile.py]=1 # Intermittent - sometimes works and sometimes doesn't
|
||||
[test_grammar.py]=1 # Need real flow control. "and" in side "or"
|
||||
# "and" inside ifelse need to simulatenously work
|
||||
[test_grp.py]=1 # Long test - might work Control flow?
|
||||
[test_opcodes.py]=1
|
||||
[test_pwd.py]=1 # Long test - might work? Control flow?
|
||||
@@ -73,12 +75,24 @@ case $PYVERSION in
|
||||
# .pyenv/versions/2.6.9/lib/python2.6/sre_parse.pyc
|
||||
# .pyenv/versions/2.6.9/lib/python2.6/tabnanny.pyc
|
||||
# .pyenv/versions/2.6.9/lib/python2.6/tarfile.pyc
|
||||
|
||||
# Not getting set by bach below?
|
||||
[test_pprint.py]=1
|
||||
|
||||
)
|
||||
if (( batch )) ; then
|
||||
# Fails in crontab environment?
|
||||
# Figure out what's up here
|
||||
SKIP_TESTS[test_aifc.py]=1
|
||||
SKIP_TESTS[test_array.py]=1
|
||||
|
||||
# SyntaxError: Non-ASCII character '\xdd' in file test_base64.py on line 153, but no encoding declared; see http://www.python.org/peps/pep-0263.html for details
|
||||
SKIP_TESTS[test_base64.py]=1
|
||||
|
||||
# output indicates expected == output, but this fails anyway.
|
||||
# Maybe the underlying encoding is subtlely different so it
|
||||
# looks the same?
|
||||
SKIP_TESTS[test_pprint.py]=1
|
||||
fi
|
||||
;;
|
||||
2.7)
|
||||
@@ -109,12 +123,17 @@ case $PYVERSION in
|
||||
[test_unicode.py]=1 # Too long to run 11 seconds
|
||||
[test_xpickle.py]=1 # Runs ok but takes 72 seconds
|
||||
[test_zipfile64.py]=1 # Runs ok but takes 204 seconds
|
||||
[test_zipimport.py]=1 # We can't distinguish try from try/else yet
|
||||
)
|
||||
if (( batch )) ; then
|
||||
# Fails in crontab environment?
|
||||
# Figure out what's up here
|
||||
SKIP_TESTS[test_array.py]=1
|
||||
SKIP_TESTS[test_ast.py]=1
|
||||
SKIP_TESTS[test_audioop.py]=1
|
||||
|
||||
# SyntaxError: Non-ASCII character '\xdd' in file test_base64.py on line 153, but no encoding declared; see http://www.python.org/peps/pep-0263.html for details
|
||||
SKIP_TESTS[test_base64.py]=1
|
||||
fi
|
||||
;;
|
||||
3.5)
|
||||
@@ -171,7 +190,7 @@ if [[ -n $1 ]] ; then
|
||||
SKIP_TESTS=()
|
||||
fi
|
||||
else
|
||||
files=test_a*.py
|
||||
files=test_*.py
|
||||
fi
|
||||
|
||||
typeset -i ALL_FILES_STARTTIME=$(date +%s)
|
||||
|
@@ -76,7 +76,7 @@ for vers in (2.7, 3.4, 3.5, 3.6):
|
||||
test_options[key] = (os.path.join(src_dir, pythonlib), PYOC, key, vers)
|
||||
pass
|
||||
|
||||
for vers in (1.5,
|
||||
for vers in (1.4, 1.5,
|
||||
2.1, 2.2, 2.3, 2.4, 2.5, 2.6, 2.7,
|
||||
3.0, 3.1, 3.2, 3.3,
|
||||
3.4, 3.5, 3.6, 3.7, 'pypy3.2', 'pypy2.7'):
|
||||
|
@@ -45,6 +45,7 @@ Debugging Options:
|
||||
--asm -a include byte-code (disables --verify)
|
||||
--grammar -g show matching grammar
|
||||
--tree -t include syntax tree (disables --verify)
|
||||
--tree++ add template rules to --tree when possible
|
||||
|
||||
Extensions of generated files:
|
||||
'.pyc_dis' '.pyo_dis' successfully decompiled (and verified if --verify)
|
||||
@@ -60,7 +61,7 @@ from uncompyle6.version import VERSION
|
||||
|
||||
def usage():
|
||||
print("""usage:
|
||||
%s [--verify | --weak-verify ] [--asm] [--tree] [--grammar] [-o <path>] FILE|DIR...
|
||||
%s [--verify | --weak-verify ] [--asm] [--tree[+]] [--grammar] [-o <path>] FILE|DIR...
|
||||
%s [--help | -h | --version | -V]
|
||||
""" % (program, program))
|
||||
sys.exit(1)
|
||||
@@ -84,8 +85,10 @@ def main_bin():
|
||||
|
||||
try:
|
||||
opts, files = getopt.getopt(sys.argv[1:], 'hagtdrVo:c:p:',
|
||||
'help asm grammar linemaps recurse timestamp tree '
|
||||
'fragments verify verify-run version weak-verify '
|
||||
'help asm grammar linemaps recurse '
|
||||
'timestamp tree tree+ '
|
||||
'fragments verify verify-run version '
|
||||
'weak-verify '
|
||||
'showgrammar'.split(' '))
|
||||
except getopt.GetoptError(e):
|
||||
sys.stderr.write('%s: %s\n' % (os.path.basename(sys.argv[0]), e))
|
||||
@@ -115,6 +118,9 @@ def main_bin():
|
||||
elif opt in ('--tree', '-t'):
|
||||
options['showast'] = True
|
||||
options['do_verify'] = None
|
||||
elif opt in ('--tree+',):
|
||||
options['showast'] = 'Full'
|
||||
options['do_verify'] = None
|
||||
elif opt in ('--grammar', '-g'):
|
||||
options['showgrammar'] = True
|
||||
elif opt == '-o':
|
||||
|
@@ -618,18 +618,25 @@ def get_python_parser(
|
||||
# a lazy way of doing the import?
|
||||
|
||||
if version < 3.0:
|
||||
if version == 1.5:
|
||||
import uncompyle6.parsers.parse15 as parse15
|
||||
if compile_mode == 'exec':
|
||||
p = parse15.Python15Parser(debug_parser)
|
||||
else:
|
||||
p = parse15.Python15ParserSingle(debug_parser)
|
||||
elif version == 2.1:
|
||||
import uncompyle6.parsers.parse21 as parse21
|
||||
if compile_mode == 'exec':
|
||||
p = parse21.Python21Parser(debug_parser)
|
||||
else:
|
||||
p = parse21.Python21ParserSingle(debug_parser)
|
||||
if version < 2.2:
|
||||
if version == 1.4:
|
||||
import uncompyle6.parsers.parse14 as parse14
|
||||
if compile_mode == 'exec':
|
||||
p = parse14.Python14Parser(debug_parser)
|
||||
else:
|
||||
p = parse14.Python14ParserSingle(debug_parser)
|
||||
elif version == 1.5:
|
||||
import uncompyle6.parsers.parse15 as parse15
|
||||
if compile_mode == 'exec':
|
||||
p = parse15.Python15Parser(debug_parser)
|
||||
else:
|
||||
p = parse15.Python15ParserSingle(debug_parser)
|
||||
elif version == 2.1:
|
||||
import uncompyle6.parsers.parse21 as parse21
|
||||
if compile_mode == 'exec':
|
||||
p = parse21.Python21Parser(debug_parser)
|
||||
else:
|
||||
p = parse21.Python21ParserSingle(debug_parser)
|
||||
elif version == 2.2:
|
||||
import uncompyle6.parsers.parse22 as parse22
|
||||
if compile_mode == 'exec':
|
||||
|
70
uncompyle6/parsers/parse14.py
Normal file
70
uncompyle6/parsers/parse14.py
Normal file
@@ -0,0 +1,70 @@
|
||||
# Copyright (c) 2018 Rocky Bernstein
|
||||
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from uncompyle6.parser import PythonParserSingle
|
||||
from uncompyle6.parsers.parse15 import Python15Parser
|
||||
|
||||
class Python14Parser(Python15Parser):
|
||||
|
||||
def p_misc14(self, args):
|
||||
"""
|
||||
# Nothing here yet, but will need to add UNARY_CALL, BINARY_CALL,
|
||||
# RAISE_EXCEPTION, BUILD_FUNCTION, UNPACK_ARG, UNPACK_VARARG, LOAD_LOCAL,
|
||||
# SET_FUNC_ARGS, and RESERVE_FAST
|
||||
|
||||
# FIXME: should check that this indeed around __doc__
|
||||
# Possibly not strictly needed
|
||||
stmt ::= doc_junk
|
||||
doc_junk ::= LOAD_CONST POP_TOP
|
||||
|
||||
# Not sure why later Python's omit the COME_FROM
|
||||
jb_pop14 ::= JUMP_BACK COME_FROM POP_TOP
|
||||
|
||||
whileelsestmt ::= SETUP_LOOP testexpr l_stmts_opt
|
||||
jb_pop14
|
||||
POP_BLOCK else_suitel COME_FROM
|
||||
|
||||
print_items_nl_stmt ::= expr PRINT_ITEM_CONT print_items_opt PRINT_NEWLINE_CONT
|
||||
|
||||
|
||||
# 1.4 doesn't have linenotab, and although this shouldn't
|
||||
# be a show stopper, our CONTINUE detection is off here.
|
||||
continue ::= JUMP_BACK
|
||||
"""
|
||||
|
||||
def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG):
|
||||
super(Python14Parser, self).__init__(debug_parser)
|
||||
self.customized = {}
|
||||
|
||||
def customize_grammar_rules(self, tokens, customize):
|
||||
super(Python14Parser, self).customize_grammar_rules(tokens, customize)
|
||||
self.remove_rules("""
|
||||
whileelsestmt ::= SETUP_LOOP testexpr l_stmts_opt
|
||||
jb_pop
|
||||
POP_BLOCK else_suitel COME_FROM
|
||||
""")
|
||||
self.check_reduce['doc_junk'] = 'tokens'
|
||||
|
||||
|
||||
def reduce_is_invalid(self, rule, ast, tokens, first, last):
|
||||
invalid = super(Python14Parser,
|
||||
self).reduce_is_invalid(rule, ast,
|
||||
tokens, first, last)
|
||||
if invalid or tokens is None:
|
||||
return invalid
|
||||
if rule[0] == 'doc_junk':
|
||||
return not isinstance(tokens[first].pattr, str)
|
||||
|
||||
|
||||
|
||||
class Python14ParserSingle(Python14Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python14Parser()
|
||||
p.check_grammar()
|
||||
p.dump_grammar()
|
||||
|
||||
# local variables:
|
||||
# tab-width: 4
|
@@ -2,7 +2,7 @@
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <hartmut@goebel.noris.de>
|
||||
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from uncompyle6.parser import PythonParserSingle
|
||||
from uncompyle6.parser import PythonParserSingle, nop_func
|
||||
from uncompyle6.parsers.parse21 import Python21Parser
|
||||
|
||||
class Python15Parser(Python21Parser):
|
||||
@@ -23,7 +23,18 @@ class Python15Parser(Python21Parser):
|
||||
importlist ::= IMPORT_FROM
|
||||
"""
|
||||
|
||||
class Python15ParserSingle(Python21Parser, PythonParserSingle):
|
||||
def customize_grammar_rules(self, tokens, customize):
|
||||
super(Python15Parser, self).customize_grammar_rules(tokens, customize)
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.kind
|
||||
opname_base = opname[:opname.rfind('_')]
|
||||
|
||||
if opname_base == 'UNPACK_LIST':
|
||||
self.addRule("store ::= unpack_list", nop_func)
|
||||
|
||||
|
||||
|
||||
class Python15ParserSingle(Python15Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@@ -272,12 +272,12 @@ class Python2Parser(PythonParser):
|
||||
'LOAD', 'LOOKUP', 'MAKE', 'SETUP',
|
||||
'RAISE', 'UNPACK'))
|
||||
|
||||
# Opcode names in the custom_ops_seen set have rules that get added
|
||||
# Opcode names in the custom_seen_ops set have rules that get added
|
||||
# unconditionally and the rules are constant. So they need to be done
|
||||
# only once and if we see the opcode a second we don't have to consider
|
||||
# adding more rules.
|
||||
#
|
||||
custom_ops_seen = set()
|
||||
custom_seen_ops = set()
|
||||
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.kind
|
||||
@@ -285,14 +285,13 @@ class Python2Parser(PythonParser):
|
||||
# Do a quick breakout before testing potentially
|
||||
# each of the dozen or so instruction in if elif.
|
||||
if (opname[:opname.find('_')] not in customize_instruction_basenames
|
||||
or opname in custom_ops_seen):
|
||||
or opname in custom_seen_ops):
|
||||
continue
|
||||
|
||||
opname_base = opname[:opname.rfind('_')]
|
||||
|
||||
# The order of opname listed is roughly sorted below
|
||||
if opname_base in ('BUILD_LIST', 'BUILD_SET', 'BUILD_TUPLE'):
|
||||
v = token.attr
|
||||
collection = opname_base[opname_base.find('_')+1:].lower()
|
||||
rule = '%s ::= %s%s' % (collection, (token.attr * 'expr '), opname)
|
||||
self.add_unique_rules([
|
||||
@@ -353,32 +352,32 @@ class Python2Parser(PythonParser):
|
||||
+ 'expr ' * nak + opname
|
||||
elif opname == 'CONTINUE_LOOP':
|
||||
self.addRule('continue ::= CONTINUE_LOOP', nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_seen_ops.add(opname)
|
||||
continue
|
||||
elif opname == 'DELETE_ATTR':
|
||||
self.addRule('del_stmt ::= expr DELETE_ATTR', nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_seen_ops.add(opname)
|
||||
continue
|
||||
elif opname == 'DELETE_DEREF':
|
||||
self.addRule("""
|
||||
stmt ::= del_deref_stmt
|
||||
del_deref_stmt ::= DELETE_DEREF
|
||||
""", nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_seen_ops.add(opname)
|
||||
continue
|
||||
elif opname == 'DELETE_SUBSCR':
|
||||
self.addRule("""
|
||||
del_stmt ::= delete_subscr
|
||||
delete_subscr ::= expr expr DELETE_SUBSCR
|
||||
""", nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_seen_ops.add(opname)
|
||||
continue
|
||||
elif opname == 'GET_ITER':
|
||||
self.addRule("""
|
||||
expr ::= get_iter
|
||||
attribute ::= expr GET_ITER
|
||||
""", nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_seen_ops.add(opname)
|
||||
continue
|
||||
elif opname_base in ('DUP_TOPX', 'RAISE_VARARGS'):
|
||||
# FIXME: remove these conditions if they are not needed.
|
||||
@@ -393,7 +392,6 @@ class Python2Parser(PythonParser):
|
||||
""", nop_func)
|
||||
continue
|
||||
elif opname == 'JUMP_IF_NOT_DEBUG':
|
||||
v = token.attr
|
||||
self.addRule("""
|
||||
jmp_true_false ::= POP_JUMP_IF_TRUE
|
||||
jmp_true_false ::= POP_JUMP_IF_FALSE
|
||||
@@ -411,18 +409,18 @@ class Python2Parser(PythonParser):
|
||||
expr ::= attribute
|
||||
attribute ::= expr LOAD_ATTR
|
||||
""", nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_seen_ops.add(opname)
|
||||
continue
|
||||
elif opname == 'LOAD_LISTCOMP':
|
||||
self.addRule("expr ::= listcomp", nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_seen_ops.add(opname)
|
||||
continue
|
||||
elif opname == 'LOAD_SETCOMP':
|
||||
self.add_unique_rules([
|
||||
"expr ::= set_comp",
|
||||
"set_comp ::= LOAD_SETCOMP MAKE_FUNCTION_0 expr GET_ITER CALL_FUNCTION_1"
|
||||
], customize)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_seen_ops.add(opname)
|
||||
continue
|
||||
elif opname == 'LOOKUP_METHOD':
|
||||
# A PyPy speciality - DRY with parse3
|
||||
@@ -431,7 +429,7 @@ class Python2Parser(PythonParser):
|
||||
attribute ::= expr LOOKUP_METHOD
|
||||
""",
|
||||
nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_seen_ops.add(opname)
|
||||
continue
|
||||
elif opname_base == 'MAKE_FUNCTION':
|
||||
if i > 0 and tokens[i-1] == 'LOAD_LAMBDA':
|
||||
@@ -480,7 +478,7 @@ class Python2Parser(PythonParser):
|
||||
"try_except_pypy ::= SETUP_EXCEPT suite_stmts_opt except_handler_pypy",
|
||||
"except_handler_pypy ::= COME_FROM except_stmts END_FINALLY COME_FROM"
|
||||
], customize)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_seen_ops.add(opname)
|
||||
continue
|
||||
elif opname == 'SETUP_FINALLY':
|
||||
if 'PyPy' in customize:
|
||||
@@ -489,13 +487,13 @@ class Python2Parser(PythonParser):
|
||||
tryfinallystmt_pypy ::= SETUP_FINALLY suite_stmts_opt COME_FROM_FINALLY
|
||||
suite_stmts_opt END_FINALLY""", nop_func)
|
||||
|
||||
custom_ops_seen.add(opname)
|
||||
custom_seen_ops.add(opname)
|
||||
continue
|
||||
elif opname_base in ('UNPACK_TUPLE', 'UNPACK_SEQUENCE'):
|
||||
custom_ops_seen.add(opname)
|
||||
custom_seen_ops.add(opname)
|
||||
rule = 'unpack ::= ' + opname + ' store' * token.attr
|
||||
elif opname_base == 'UNPACK_LIST':
|
||||
custom_ops_seen.add(opname)
|
||||
custom_seen_ops.add(opname)
|
||||
rule = 'unpack_list ::= ' + opname + ' store' * token.attr
|
||||
else:
|
||||
continue
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016-2017 Rocky Bernstein
|
||||
# Copyright (c) 2016-2018 Rocky Bernstein
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <hartmut@goebel.noris.de>
|
||||
# Copyright (c) 1999 John Aycock
|
||||
|
||||
@@ -32,9 +32,6 @@ class Python23Parser(Python24Parser):
|
||||
while1stmt ::= _while1test l_stmts_opt JUMP_BACK
|
||||
POP_TOP POP_BLOCK COME_FROM
|
||||
|
||||
while1stmt ::= _while1test l_stmts_opt JUMP_BACK
|
||||
POP_TOP POP_BLOCK
|
||||
|
||||
while1stmt ::= _while1test l_stmts_opt JUMP_BACK
|
||||
COME_FROM POP_TOP POP_BLOCK COME_FROM
|
||||
|
||||
|
@@ -83,8 +83,12 @@ class Python25Parser(Python26Parser):
|
||||
setupwithas ::= DUP_TOP LOAD_ATTR ROT_TWO LOAD_ATTR CALL_FUNCTION_0 setup_finally
|
||||
stmt ::= classdefdeco
|
||||
stmt ::= conditional_lambda
|
||||
stmt ::= conditional_not_lambda
|
||||
conditional_lambda ::= expr jmp_false_then expr return_if_lambda
|
||||
return_stmt_lambda LAMBDA_MARKER
|
||||
conditional_not_lambda
|
||||
::= expr jmp_true_then expr return_if_lambda
|
||||
return_stmt_lambda LAMBDA_MARKER
|
||||
""")
|
||||
super(Python25Parser, self).customize_grammar_rules(tokens, customize)
|
||||
if self.version == 2.5:
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2017 Rocky Bernstein
|
||||
# Copyright (c) 2017-2018 Rocky Bernstein
|
||||
"""
|
||||
spark grammar differences over Python2 for Python 2.6.
|
||||
"""
|
||||
@@ -264,7 +264,11 @@ class Python26Parser(Python2Parser):
|
||||
dict ::= BUILD_MAP kvlist
|
||||
kvlist ::= kvlist kv3
|
||||
|
||||
conditional ::= expr jmp_false expr jf_cf_pop expr come_from_opt
|
||||
# Note: preserve positions 0 2 and 4 for semantic actions
|
||||
conditional_not ::= expr jmp_true expr jf_cf_pop expr COME_FROM
|
||||
conditional ::= expr jmp_false expr jf_cf_pop expr come_from_opt
|
||||
expr ::= conditional_not
|
||||
|
||||
and ::= expr JUMP_IF_FALSE POP_TOP expr JUMP_IF_FALSE POP_TOP
|
||||
|
||||
# compare_chained is like x <= y <= z
|
||||
@@ -289,8 +293,12 @@ class Python26Parser(Python2Parser):
|
||||
|
||||
return_if_lambda ::= RETURN_END_IF_LAMBDA POP_TOP
|
||||
stmt ::= conditional_lambda
|
||||
stmt ::= conditional_not_lambda
|
||||
conditional_lambda ::= expr jmp_false_then expr return_if_lambda
|
||||
return_stmt_lambda LAMBDA_MARKER
|
||||
conditional_not_lambda ::=
|
||||
expr jmp_true_then expr return_if_lambda
|
||||
return_stmt_lambda LAMBDA_MARKER
|
||||
|
||||
# conditional_true are for conditions which always evaluate true
|
||||
# There is dead or non-optional remnants of the condition code though,
|
||||
@@ -315,7 +323,9 @@ class Python26Parser(Python2Parser):
|
||||
WITH_CLEANUP END_FINALLY
|
||||
""")
|
||||
super(Python26Parser, self).customize_grammar_rules(tokens, customize)
|
||||
self.check_reduce['and'] = 'AST'
|
||||
if self.version >= 2.6:
|
||||
self.check_reduce['and'] = 'AST'
|
||||
self.check_reduce['assert_expr_and'] = 'AST'
|
||||
self.check_reduce['list_for'] = 'AST'
|
||||
self.check_reduce['try_except'] = 'tokens'
|
||||
self.check_reduce['tryelsestmt'] = 'AST'
|
||||
@@ -326,17 +336,33 @@ class Python26Parser(Python2Parser):
|
||||
tokens, first, last)
|
||||
if invalid or tokens is None:
|
||||
return invalid
|
||||
if rule == ('and', ('expr', 'jmp_false', 'expr', '\\e_come_from_opt')):
|
||||
if rule in (
|
||||
('and', ('expr', 'jmp_false', 'expr', '\\e_come_from_opt')),
|
||||
('and', ('expr', 'jmp_false', 'expr', 'come_from_opt')),
|
||||
('assert_expr_and', ('assert_expr', 'jmp_false', 'expr'))
|
||||
):
|
||||
|
||||
# FIXME: workaround profiling bug
|
||||
if ast[1] is None:
|
||||
return False
|
||||
|
||||
# For now, we won't let the 2nd 'expr' be a "conditional_not"
|
||||
# However in < 2.6 where we don't have if/else expression it *can*
|
||||
# be.
|
||||
if self.version >= 2.6 and ast[2][0] == 'conditional_not':
|
||||
return True
|
||||
|
||||
test_index = last
|
||||
while tokens[test_index].kind == 'COME_FROM':
|
||||
test_index += 1
|
||||
if tokens[test_index].kind.startswith('JUMP_IF'):
|
||||
return False
|
||||
|
||||
# Test that jmp_false jumps to the end of "and"
|
||||
# or that it jumps to the same place as the end of "and"
|
||||
jmp_false = ast[1][0]
|
||||
jmp_target = jmp_false.offset + jmp_false.attr + 3
|
||||
return not (jmp_target == tokens[last].offset or
|
||||
return not (jmp_target == tokens[test_index].offset or
|
||||
tokens[last].pattr == jmp_false.pattr)
|
||||
elif rule == (
|
||||
'list_for',
|
||||
|
@@ -33,6 +33,9 @@ class Python27Parser(Python2Parser):
|
||||
set_comp_func ::= BUILD_SET_0 LOAD_FAST FOR_ITER store comp_iter
|
||||
JUMP_BACK RETURN_VALUE RETURN_LAST
|
||||
|
||||
comp_iter ::= comp_if_not
|
||||
comp_if_not ::= expr jmp_true comp_iter
|
||||
|
||||
comp_body ::= dict_comp_body
|
||||
comp_body ::= set_comp_body
|
||||
comp_for ::= expr for_iter store comp_iter JUMP_BACK
|
||||
@@ -48,6 +51,10 @@ class Python27Parser(Python2Parser):
|
||||
|
||||
def p_try27(self, args):
|
||||
"""
|
||||
# If the last except is a "raise" we might not have a final COME_FROM
|
||||
try_except ::= SETUP_EXCEPT suite_stmts_opt POP_BLOCK
|
||||
except_handler
|
||||
|
||||
tryfinallystmt ::= SETUP_FINALLY suite_stmts_opt
|
||||
POP_BLOCK LOAD_CONST
|
||||
COME_FROM_FINALLY suite_stmts_opt END_FINALLY
|
||||
@@ -136,6 +143,14 @@ class Python27Parser(Python2Parser):
|
||||
whilestmt ::= SETUP_LOOP testexpr returns
|
||||
_come_froms POP_BLOCK COME_FROM
|
||||
|
||||
|
||||
# 2.7.5 (and before to 2.7.0?)
|
||||
while1stmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK COME_FROM
|
||||
while1stmt ::= SETUP_LOOP l_stmts_opt CONTINUE COME_FROM
|
||||
while1stmt ::= SETUP_LOOP returns COME_FROM
|
||||
while1elsestmt ::= SETUP_LOOP l_stmts JUMP_BACK
|
||||
else_suitel COME_FROM
|
||||
|
||||
while1stmt ::= SETUP_LOOP returns bp_come_from
|
||||
while1stmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK POP_BLOCK COME_FROM
|
||||
whilestmt ::= SETUP_LOOP testexpr l_stmts_opt JUMP_BACK POP_BLOCK _come_froms
|
||||
@@ -158,8 +173,15 @@ class Python27Parser(Python2Parser):
|
||||
# Common with 2.6
|
||||
return_if_lambda ::= RETURN_END_IF_LAMBDA COME_FROM
|
||||
stmt ::= conditional_lambda
|
||||
stmt ::= conditional_not_lambda
|
||||
conditional_lambda ::= expr jmp_false expr return_if_lambda
|
||||
return_stmt_lambda LAMBDA_MARKER
|
||||
conditional_not_lambda
|
||||
::= expr jmp_true expr return_if_lambda
|
||||
return_stmt_lambda LAMBDA_MARKER
|
||||
|
||||
expr ::= conditional_not
|
||||
conditional_not ::= expr jmp_true expr _jump expr COME_FROM
|
||||
|
||||
kv3 ::= expr expr STORE_MAP
|
||||
"""
|
||||
@@ -167,14 +189,18 @@ class Python27Parser(Python2Parser):
|
||||
def customize_grammar_rules(self, tokens, customize):
|
||||
# 2.7 changes COME_FROM to COME_FROM_FINALLY
|
||||
self.remove_rules("""
|
||||
while1stmt ::= SETUP_LOOP l_stmts JUMP_BACK COME_FROM
|
||||
while1elsestmt ::= SETUP_LOOP l_stmts JUMP_BACK else_suite COME_FROM
|
||||
tryfinallystmt ::= SETUP_FINALLY suite_stmts_opt POP_BLOCK LOAD_CONST COME_FROM suite_stmts_opt END_FINALLY
|
||||
tryfinallystmt ::= SETUP_FINALLY suite_stmts_opt
|
||||
POP_BLOCK LOAD_CONST COME_FROM suite_stmts_opt
|
||||
END_FINALLY
|
||||
""")
|
||||
super(Python27Parser, self).customize_grammar_rules(tokens, customize)
|
||||
self.check_reduce['and'] = 'AST'
|
||||
# self.check_reduce['or'] = 'AST'
|
||||
self.check_reduce['raise_stmt1'] = 'AST'
|
||||
# self.check_reduce['conditional_true'] = 'AST'
|
||||
self.check_reduce['list_if_not'] = 'AST'
|
||||
self.check_reduce['list_if'] = 'AST'
|
||||
self.check_reduce['conditional_true'] = 'AST'
|
||||
return
|
||||
|
||||
def reduce_is_invalid(self, rule, ast, tokens, first, last):
|
||||
@@ -192,6 +218,21 @@ class Python27Parser(Python2Parser):
|
||||
tokens[last].pattr == jmp_false.pattr)
|
||||
elif rule[0] == ('raise_stmt1'):
|
||||
return ast[0] == 'expr' and ast[0][0] == 'or'
|
||||
elif rule == ('list_if_not', ('expr', 'jmp_true', 'list_iter')):
|
||||
jump_inst = ast[1][0]
|
||||
jump_offset = jump_inst.attr
|
||||
return jump_offset > jump_inst.offset and jump_offset < tokens[last].offset
|
||||
elif rule == ('list_if', ('expr', 'jmp_false', 'list_iter')):
|
||||
jump_inst = ast[1][0]
|
||||
jump_offset = jump_inst.attr
|
||||
return jump_offset > jump_inst.offset and jump_offset < tokens[last].offset
|
||||
elif rule == ('or', ('expr', 'jmp_true', 'expr', '\\e_come_from_opt')):
|
||||
# Test that jmp_true doesn't jump inside the middle the "or"
|
||||
# or that it jumps to the same place as the end of "and"
|
||||
jmp_true = ast[1][0]
|
||||
jmp_target = jmp_true.offset + jmp_true.attr + 3
|
||||
return not (jmp_target == tokens[last].offset or
|
||||
tokens[last].pattr == jmp_true.pattr)
|
||||
# elif rule[0] == ('conditional_true'):
|
||||
# # FIXME: the below is a hack: we check expr for
|
||||
# # nodes that could have possibly been a been a Boolean.
|
||||
|
@@ -335,6 +335,17 @@ class Python3Parser(PythonParser):
|
||||
|
||||
def p_stmt3(self, args):
|
||||
"""
|
||||
stmt ::= conditional_lambda
|
||||
stmt ::= conditional_not_lambda
|
||||
conditional_lambda ::= expr jmp_false expr return_if_lambda
|
||||
return_stmt_lambda LAMBDA_MARKER
|
||||
conditional_not_lambda
|
||||
::= expr jmp_true expr return_if_lambda
|
||||
return_stmt_lambda LAMBDA_MARKER
|
||||
|
||||
return_stmt_lambda ::= ret_expr RETURN_VALUE_LAMBDA
|
||||
return_if_lambda ::= RETURN_END_IF_LAMBDA
|
||||
|
||||
stmt ::= return_closure
|
||||
return_closure ::= LOAD_CLOSURE RETURN_VALUE RETURN_LAST
|
||||
|
||||
@@ -477,9 +488,7 @@ class Python3Parser(PythonParser):
|
||||
self.addRule(rule, nop_func)
|
||||
return
|
||||
|
||||
def custom_classfunc_rule(self, opname, token, customize,
|
||||
possible_class_decorator,
|
||||
seen_GET_AWAITABLE_YIELD_FROM, next_token):
|
||||
def custom_classfunc_rule(self, opname, token, customize, next_token):
|
||||
"""
|
||||
call ::= expr {expr}^n CALL_FUNCTION_n
|
||||
call ::= expr {expr}^n CALL_FUNCTION_VAR_n
|
||||
@@ -509,7 +518,7 @@ class Python3Parser(PythonParser):
|
||||
|
||||
self.add_unique_rule(rule, token.kind, uniq_param, customize)
|
||||
|
||||
if possible_class_decorator:
|
||||
if 'LOAD_BUILD_CLASS' in self.seen_ops:
|
||||
if (next_token == 'CALL_FUNCTION' and next_token.attr == 1
|
||||
and args_pos > 1):
|
||||
rule = ('classdefdeco2 ::= LOAD_BUILD_CLASS mkfunc %s%s_%d'
|
||||
@@ -557,9 +566,9 @@ class Python3Parser(PythonParser):
|
||||
customize_instruction_basenames = frozenset(
|
||||
('BUILD', 'CALL', 'CONTINUE', 'DELETE', 'GET',
|
||||
'JUMP', 'LOAD', 'LOOKUP', 'MAKE',
|
||||
'RAISE', 'UNPACK'))
|
||||
'RETURN', 'RAISE', 'UNPACK'))
|
||||
|
||||
# Opcode names in the custom_ops_seen set have rules that get added
|
||||
# Opcode names in the custom_ops_processed set have rules that get added
|
||||
# unconditionally and the rules are constant. So they need to be done
|
||||
# only once and if we see the opcode a second we don't have to consider
|
||||
# adding more rules.
|
||||
@@ -567,18 +576,13 @@ class Python3Parser(PythonParser):
|
||||
# Note: BUILD_TUPLE_UNPACK_WITH_CALL gets considered by
|
||||
# default because it starts with BUILD. So we'll set to ignore it from
|
||||
# the start.
|
||||
custom_ops_seen = set(('BUILD_TUPLE_UNPACK_WITH_CALL',))
|
||||
|
||||
# In constrast to custom_ops_seen, seen_xxx rules here are part of some
|
||||
# other rule; so if we see them a second time we still have to loop
|
||||
# over customization
|
||||
seen_LOAD_BUILD_CLASS = False
|
||||
seen_GET_AWAITABLE_YIELD_FROM = False
|
||||
|
||||
# This is used in parse36.py as well as here
|
||||
self.seen_LOAD_DICTCOMP = False
|
||||
self.seen_LOAD_SETCOMP = False
|
||||
custom_ops_processed = set(('BUILD_TUPLE_UNPACK_WITH_CALL',))
|
||||
|
||||
# A set of instruction operation names that exist in the token stream.
|
||||
# We use this customize the grammar that we create.
|
||||
# 2.6-compatible set comprehensions
|
||||
self.seen_ops = frozenset([t.kind for t in tokens])
|
||||
self.seen_op_basenames = frozenset([opname[:opname.rfind('_')] for opname in self.seen_ops])
|
||||
|
||||
# Loop over instructions adding custom grammar rules based on
|
||||
# a specific instruction seen.
|
||||
@@ -588,16 +592,22 @@ class Python3Parser(PythonParser):
|
||||
self.addRule("""
|
||||
stmt ::= assign3_pypy
|
||||
stmt ::= assign2_pypy
|
||||
assign3_pypy ::= expr expr expr store store store
|
||||
assign2_pypy ::= expr expr store store
|
||||
return_if_lambda ::= RETURN_END_IF_LAMBDA
|
||||
stmt ::= conditional_lambda
|
||||
assign3_pypy ::= expr expr expr store store store
|
||||
assign2_pypy ::= expr expr store store
|
||||
return_if_lambda ::= RETURN_END_IF_LAMBDA
|
||||
stmt ::= conditional_lambda
|
||||
stmt ::= conditional_not_lambda
|
||||
conditional_lambda ::= expr jmp_false expr return_if_lambda
|
||||
return_lambda LAMBDA_MARKER
|
||||
conditional_not_lambda
|
||||
::= expr jmp_true expr return_if_lambda
|
||||
return_lambda LAMBDA_MARKER
|
||||
""", nop_func)
|
||||
|
||||
has_get_iter_call_function1 = False
|
||||
n = len(tokens)
|
||||
|
||||
# Determine if we have an iteration CALL_FUNCTION_1.
|
||||
has_get_iter_call_function1 = False
|
||||
max_branches = 0
|
||||
for i, token in enumerate(tokens):
|
||||
if token == 'GET_ITER' and i < n-2 and self.call_fn_name(tokens[i+1]) == 'CALL_FUNCTION_1':
|
||||
@@ -606,7 +616,6 @@ class Python3Parser(PythonParser):
|
||||
elif (token == 'GET_AWAITABLE' and i < n-3
|
||||
and tokens[i+1] == 'LOAD_CONST' and tokens[i+2] == 'YIELD_FROM'):
|
||||
max_branches += 1
|
||||
seen_GET_AWAITABLE_YIELD_FROM = True
|
||||
if max_branches > 2:
|
||||
break
|
||||
|
||||
@@ -616,7 +625,7 @@ class Python3Parser(PythonParser):
|
||||
# Do a quick breakout before testing potentially
|
||||
# each of the dozen or so instruction in if elif.
|
||||
if (opname[:opname.find('_')] not in customize_instruction_basenames
|
||||
or opname in custom_ops_seen):
|
||||
or opname in custom_ops_processed):
|
||||
continue
|
||||
|
||||
opname_base = opname[:opname.rfind('_')]
|
||||
@@ -659,7 +668,7 @@ class Python3Parser(PythonParser):
|
||||
# FIXME: really we need a combination of dict_entry-like things.
|
||||
# It just so happens the most common case is not to mix
|
||||
# dictionary comphensions with dictionary, elements
|
||||
if self.seen_LOAD_DICTCOMP:
|
||||
if 'LOAD_DICTCOMP' in self.seen_ops:
|
||||
rule = 'dict ::= %s%s' % ('dict_comp ' * token.attr, opname)
|
||||
self.addRule(rule, nop_func)
|
||||
rule = """
|
||||
@@ -738,9 +747,9 @@ class Python3Parser(PythonParser):
|
||||
"""
|
||||
self.addRule(rule, nop_func)
|
||||
|
||||
self.custom_classfunc_rule(opname, token, customize,
|
||||
seen_LOAD_BUILD_CLASS,
|
||||
seen_GET_AWAITABLE_YIELD_FROM, tokens[i+1])
|
||||
self.custom_classfunc_rule(opname, token, customize, tokens[i+1])
|
||||
# Note: don't add to custom_ops_processed.
|
||||
|
||||
elif opname_base == 'CALL_METHOD':
|
||||
# PyPy only - DRY with parse2
|
||||
|
||||
@@ -755,31 +764,31 @@ class Python3Parser(PythonParser):
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
elif opname == 'CONTINUE':
|
||||
self.addRule('continue ::= CONTINUE', nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_ops_processed.add(opname)
|
||||
elif opname == 'CONTINUE_LOOP':
|
||||
self.addRule('continue ::= CONTINUE_LOOP', nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_ops_processed.add(opname)
|
||||
elif opname == 'DELETE_ATTR':
|
||||
self.addRule('del_stmt ::= expr DELETE_ATTR', nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_ops_processed.add(opname)
|
||||
elif opname == 'DELETE_DEREF':
|
||||
self.addRule("""
|
||||
stmt ::= del_deref_stmt
|
||||
del_deref_stmt ::= DELETE_DEREF
|
||||
""", nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_ops_processed.add(opname)
|
||||
elif opname == 'DELETE_SUBSCR':
|
||||
self.addRule("""
|
||||
del_stmt ::= delete_subscr
|
||||
delete_subscr ::= expr expr DELETE_SUBSCR
|
||||
""", nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_ops_processed.add(opname)
|
||||
elif opname == 'GET_ITER':
|
||||
self.addRule("""
|
||||
expr ::= get_iter
|
||||
attribute ::= expr GET_ITER
|
||||
""", nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_ops_processed.add(opname)
|
||||
elif opname == 'JUMP_IF_NOT_DEBUG':
|
||||
v = token.attr
|
||||
self.addRule("""
|
||||
@@ -794,40 +803,42 @@ class Python3Parser(PythonParser):
|
||||
LOAD_ASSERT expr CALL_FUNCTION_1
|
||||
RAISE_VARARGS_1 COME_FROM,
|
||||
""", nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_ops_processed.add(opname)
|
||||
elif opname == 'LOAD_BUILD_CLASS':
|
||||
seen_LOAD_BUILD_CLASS = True
|
||||
self.custom_build_class_rule(opname, i, token, tokens, customize)
|
||||
# Note: don't add to custom_ops_processed.
|
||||
elif opname == 'LOAD_CLASSDEREF':
|
||||
# Python 3.4+
|
||||
self.addRule("expr ::= LOAD_CLASSDEREF", nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_ops_processed.add(opname)
|
||||
elif opname == 'LOAD_CLASSNAME':
|
||||
self.addRule("expr ::= LOAD_CLASSNAME", nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_ops_processed.add(opname)
|
||||
elif opname == 'LOAD_DICTCOMP':
|
||||
self.seen_LOAD_DICTCOMP = True
|
||||
if has_get_iter_call_function1:
|
||||
rule_pat = ("dict_comp ::= LOAD_DICTCOMP %sMAKE_FUNCTION_0 expr "
|
||||
"GET_ITER CALL_FUNCTION_1")
|
||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||
# listcomp is a custom Python3 rule
|
||||
pass
|
||||
custom_ops_processed.add(opname)
|
||||
elif opname == 'LOAD_ATTR':
|
||||
self.addRule("""
|
||||
expr ::= attribute
|
||||
attribute ::= expr LOAD_ATTR
|
||||
""", nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_ops_processed.add(opname)
|
||||
elif opname == 'LOAD_LISTCOMP':
|
||||
self.add_unique_rule("expr ::= listcomp", opname, token.attr, customize)
|
||||
custom_ops_processed.add(opname)
|
||||
elif opname == 'LOAD_SETCOMP':
|
||||
self.seen_LOAD_SETCOMP = True
|
||||
# Should this be generalized and put under MAKE_FUNCTION?
|
||||
if has_get_iter_call_function1:
|
||||
self.addRule("expr ::= set_comp", nop_func)
|
||||
rule_pat = ("set_comp ::= LOAD_SETCOMP %sMAKE_FUNCTION_0 expr "
|
||||
"GET_ITER CALL_FUNCTION_1")
|
||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||
pass
|
||||
custom_ops_processed.add(opname)
|
||||
elif opname == 'LOOKUP_METHOD':
|
||||
# A PyPy speciality - DRY with parse3
|
||||
self.addRule("""
|
||||
@@ -835,12 +846,12 @@ class Python3Parser(PythonParser):
|
||||
attribute ::= expr LOOKUP_METHOD
|
||||
""",
|
||||
nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_ops_processed.add(opname)
|
||||
elif opname.startswith('MAKE_CLOSURE'):
|
||||
# DRY with MAKE_FUNCTION
|
||||
# Note: this probably doesn't handle kwargs proprerly
|
||||
|
||||
if opname == 'MAKE_CLOSURE_0' and self.seen_LOAD_DICTCOMP:
|
||||
if opname == 'MAKE_CLOSURE_0' and 'LOAD_DICTCOMP' in self.seen_ops:
|
||||
# Is there something general going on here?
|
||||
# Note that 3.6+ doesn't do this, but we'll remove
|
||||
# this rule in parse36.py
|
||||
@@ -1074,25 +1085,25 @@ class Python3Parser(PythonParser):
|
||||
self.addRule("""
|
||||
return_lambda ::= ret_expr RETURN_VALUE_LAMBDA
|
||||
""", nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_ops_processed.add(opname)
|
||||
elif opname == 'RAISE_VARARGS_0':
|
||||
self.addRule("""
|
||||
stmt ::= raise_stmt0
|
||||
raise_stmt0 ::= RAISE_VARARGS_0
|
||||
""", nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_ops_processed.add(opname)
|
||||
elif opname == 'RAISE_VARARGS_1':
|
||||
self.addRule("""
|
||||
stmt ::= raise_stmt1
|
||||
raise_stmt1 ::= expr RAISE_VARARGS_1
|
||||
""", nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_ops_processed.add(opname)
|
||||
elif opname == 'RAISE_VARARGS_2':
|
||||
self.addRule("""
|
||||
stmt ::= raise_stmt2
|
||||
raise_stmt2 ::= expr expr RAISE_VARARGS_2
|
||||
""", nop_func)
|
||||
custom_ops_seen.add(opname)
|
||||
custom_ops_processed.add(opname)
|
||||
elif opname_base in ('UNPACK_EX',):
|
||||
before_count, after_count = token.attr
|
||||
rule = 'unpack ::= ' + opname + ' store' * (before_count + after_count + 1)
|
||||
@@ -1103,6 +1114,10 @@ class Python3Parser(PythonParser):
|
||||
elif opname_base == 'UNPACK_LIST':
|
||||
rule = 'unpack_list ::= ' + opname + ' store' * token.attr
|
||||
self.addRule(rule, nop_func)
|
||||
custom_ops_processed.add(opname)
|
||||
pass
|
||||
pass
|
||||
|
||||
self.check_reduce['aug_assign1'] = 'AST'
|
||||
self.check_reduce['aug_assign2'] = 'AST'
|
||||
self.check_reduce['while1stmt'] = 'noAST'
|
||||
|
@@ -15,6 +15,9 @@ class Python35Parser(Python34Parser):
|
||||
|
||||
def p_35on(self, args):
|
||||
"""
|
||||
|
||||
pb_ja ::= POP_BLOCK JUMP_ABSOLUTE
|
||||
|
||||
# The number of canned instructions in new statements is mind boggling.
|
||||
# I'm sure by the time Python 4 comes around these will be turned
|
||||
# into special opcodes
|
||||
@@ -89,7 +92,7 @@ class Python35Parser(Python34Parser):
|
||||
LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_FALSE
|
||||
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_BLOCK
|
||||
JUMP_ABSOLUTE END_FINALLY COME_FROM
|
||||
for_block POP_BLOCK JUMP_ABSOLUTE
|
||||
for_block pb_ja
|
||||
else_suite COME_FROM_LOOP
|
||||
|
||||
|
||||
@@ -140,17 +143,20 @@ class Python35Parser(Python34Parser):
|
||||
super(Python35Parser, self).customize_grammar_rules(tokens, customize)
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.kind
|
||||
# FIXME: I suspect this is wrong for 3.6 and 3.5, but
|
||||
# I haven't verified what the 3.7ish fix is
|
||||
if opname == 'BUILD_MAP_UNPACK_WITH_CALL':
|
||||
self.addRule("expr ::= unmapexpr", nop_func)
|
||||
nargs = token.attr % 256
|
||||
map_unpack_n = "map_unpack_%s" % nargs
|
||||
rule = map_unpack_n + ' ::= ' + 'expr ' * (nargs)
|
||||
self.addRule(rule, nop_func)
|
||||
rule = "unmapexpr ::= %s %s" % (map_unpack_n, opname)
|
||||
self.addRule(rule, nop_func)
|
||||
call_token = tokens[i+1]
|
||||
rule = 'call ::= expr unmapexpr ' + call_token.kind
|
||||
self.addRule(rule, nop_func)
|
||||
if self.version < 3.7:
|
||||
self.addRule("expr ::= unmapexpr", nop_func)
|
||||
nargs = token.attr % 256
|
||||
map_unpack_n = "map_unpack_%s" % nargs
|
||||
rule = map_unpack_n + ' ::= ' + 'expr ' * (nargs)
|
||||
self.addRule(rule, nop_func)
|
||||
rule = "unmapexpr ::= %s %s" % (map_unpack_n, opname)
|
||||
self.addRule(rule, nop_func)
|
||||
call_token = tokens[i+1]
|
||||
rule = 'call ::= expr unmapexpr ' + call_token.kind
|
||||
self.addRule(rule, nop_func)
|
||||
elif opname == 'BEFORE_ASYNC_WITH':
|
||||
# Some Python 3.5+ async additions
|
||||
rules_str = """
|
||||
@@ -199,10 +205,7 @@ class Python35Parser(Python34Parser):
|
||||
pass
|
||||
return
|
||||
|
||||
def custom_classfunc_rule(self, opname, token, customize,
|
||||
seen_LOAD_BUILD_CLASS,
|
||||
seen_GET_AWAITABLE_YIELD_FROM,
|
||||
*args):
|
||||
def custom_classfunc_rule(self, opname, token, customize, *args):
|
||||
args_pos, args_kw = self.get_pos_kw(token)
|
||||
|
||||
# Additional exprs for * and ** args:
|
||||
@@ -213,7 +216,7 @@ class Python35Parser(Python34Parser):
|
||||
nak = ( len(opname)-len('CALL_FUNCTION') ) // 3
|
||||
uniq_param = args_kw + args_pos
|
||||
|
||||
if seen_GET_AWAITABLE_YIELD_FROM:
|
||||
if frozenset(('GET_AWAITABLE', 'YIELD_FROM')).issubset(self.seen_ops):
|
||||
rule = ('async_call ::= expr ' +
|
||||
('pos_arg ' * args_pos) +
|
||||
('kwarg ' * args_kw) +
|
||||
@@ -242,10 +245,7 @@ class Python35Parser(Python34Parser):
|
||||
# zero or not in creating a template rule.
|
||||
self.add_unique_rule(rule, token.kind, args_pos, customize)
|
||||
else:
|
||||
super(Python35Parser, self).custom_classfunc_rule(opname, token, customize,
|
||||
seen_LOAD_BUILD_CLASS,
|
||||
seen_GET_AWAITABLE_YIELD_FROM,
|
||||
*args)
|
||||
super(Python35Parser, self).custom_classfunc_rule(opname, token, customize, *args)
|
||||
|
||||
class Python35ParserSingle(Python35Parser, PythonParserSingle):
|
||||
pass
|
||||
|
@@ -35,13 +35,6 @@ class Python36Parser(Python35Parser):
|
||||
# 3.6 redoes how return_closure works. FIXME: Isolate to LOAD_CLOSURE
|
||||
return_closure ::= LOAD_CLOSURE DUP_TOP STORE_NAME RETURN_VALUE RETURN_LAST
|
||||
|
||||
stmt ::= conditional_lambda
|
||||
conditional_lambda ::= expr jmp_false expr return_if_lambda
|
||||
return_stmt_lambda LAMBDA_MARKER
|
||||
return_stmt_lambda ::= ret_expr RETURN_VALUE_LAMBDA
|
||||
return_if_lambda ::= RETURN_END_IF_LAMBDA
|
||||
|
||||
|
||||
for_block ::= l_stmts_opt come_from_loops JUMP_BACK
|
||||
come_from_loops ::= COME_FROM_LOOP*
|
||||
|
||||
@@ -55,6 +48,30 @@ class Python36Parser(Python35Parser):
|
||||
jf_cf ::= JUMP_FORWARD COME_FROM
|
||||
conditional ::= expr jmp_false expr jf_cf expr COME_FROM
|
||||
|
||||
async_for_stmt ::= SETUP_LOOP expr
|
||||
GET_AITER
|
||||
LOAD_CONST YIELD_FROM SETUP_EXCEPT GET_ANEXT LOAD_CONST
|
||||
YIELD_FROM
|
||||
store
|
||||
POP_BLOCK JUMP_FORWARD COME_FROM_EXCEPT DUP_TOP
|
||||
LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_FALSE
|
||||
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_BLOCK
|
||||
JUMP_ABSOLUTE END_FINALLY COME_FROM
|
||||
for_block POP_BLOCK
|
||||
COME_FROM_LOOP
|
||||
|
||||
async_forelse_stmt ::= SETUP_LOOP expr
|
||||
GET_AITER
|
||||
LOAD_CONST YIELD_FROM SETUP_EXCEPT GET_ANEXT LOAD_CONST
|
||||
YIELD_FROM
|
||||
store
|
||||
POP_BLOCK JUMP_FORWARD COME_FROM_EXCEPT DUP_TOP
|
||||
LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_FALSE
|
||||
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_BLOCK
|
||||
JUMP_ABSOLUTE END_FINALLY COME_FROM
|
||||
for_block POP_BLOCK
|
||||
else_suite COME_FROM_LOOP
|
||||
|
||||
# Adds a COME_FROM_ASYNC_WITH over 3.5
|
||||
# FIXME: remove corresponding rule for 3.5?
|
||||
|
||||
@@ -91,9 +108,32 @@ class Python36Parser(Python35Parser):
|
||||
"""
|
||||
|
||||
def customize_grammar_rules(self, tokens, customize):
|
||||
# self.remove_rules("""
|
||||
# """)
|
||||
super(Python36Parser, self).customize_grammar_rules(tokens, customize)
|
||||
self.remove_rules("""
|
||||
dict_comp ::= load_closure LOAD_DICTCOMP LOAD_CONST MAKE_CLOSURE_0 expr GET_ITER CALL_FUNCTION_1
|
||||
async_for_stmt ::= SETUP_LOOP expr
|
||||
GET_AITER
|
||||
LOAD_CONST YIELD_FROM SETUP_EXCEPT GET_ANEXT LOAD_CONST
|
||||
YIELD_FROM
|
||||
store
|
||||
POP_BLOCK jump_except COME_FROM_EXCEPT DUP_TOP
|
||||
LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_FALSE
|
||||
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_BLOCK
|
||||
JUMP_ABSOLUTE END_FINALLY COME_FROM
|
||||
for_block POP_BLOCK JUMP_ABSOLUTE
|
||||
COME_FROM_LOOP
|
||||
async_forelse_stmt ::= SETUP_LOOP expr
|
||||
GET_AITER
|
||||
LOAD_CONST YIELD_FROM SETUP_EXCEPT GET_ANEXT LOAD_CONST
|
||||
YIELD_FROM
|
||||
store
|
||||
POP_BLOCK JUMP_FORWARD COME_FROM_EXCEPT DUP_TOP
|
||||
LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_FALSE
|
||||
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_BLOCK
|
||||
JUMP_ABSOLUTE END_FINALLY COME_FROM
|
||||
for_block pb_ja
|
||||
else_suite COME_FROM_LOOP
|
||||
""")
|
||||
self.check_reduce['call_kw'] = 'AST'
|
||||
|
||||
@@ -107,7 +147,7 @@ class Python36Parser(Python35Parser):
|
||||
"""
|
||||
self.add_unique_doc_rules(rules_str, customize)
|
||||
elif opname == 'MAKE_FUNCTION_8':
|
||||
if self.seen_LOAD_DICTCOMP:
|
||||
if 'LOAD_DICTCOMP' in self.seen_ops:
|
||||
# Is there something general going on here?
|
||||
rule = """
|
||||
dict_comp ::= load_closure LOAD_DICTCOMP LOAD_CONST
|
||||
@@ -115,7 +155,7 @@ class Python36Parser(Python35Parser):
|
||||
GET_ITER CALL_FUNCTION_1
|
||||
"""
|
||||
self.addRule(rule, nop_func)
|
||||
elif self.seen_LOAD_SETCOMP:
|
||||
elif 'LOAD_SETCOMP' in self.seen_ops:
|
||||
rule = """
|
||||
set_comp ::= load_closure LOAD_SETCOMP LOAD_CONST
|
||||
MAKE_FUNCTION_8 expr
|
||||
@@ -166,9 +206,7 @@ class Python36Parser(Python35Parser):
|
||||
self.add_unique_doc_rules(rules_str, customize)
|
||||
elif opname.startswith('BUILD_MAP_UNPACK_WITH_CALL'):
|
||||
v = token.attr
|
||||
rule = ('build_map_unpack_with_call ::= ' + 'expr1024 ' * int(v//1024) +
|
||||
'expr32 ' * int((v//32) % 32) +
|
||||
'expr ' * (v % 32) + opname)
|
||||
rule = 'build_map_unpack_with_call ::= %s%s' % ('expr ' * v, opname)
|
||||
self.addRule(rule, nop_func)
|
||||
elif opname.startswith('BUILD_TUPLE_UNPACK_WITH_CALL'):
|
||||
v = token.attr
|
||||
@@ -191,9 +229,7 @@ class Python36Parser(Python35Parser):
|
||||
"""
|
||||
self.addRule(rules_str, nop_func)
|
||||
|
||||
def custom_classfunc_rule(self, opname, token, customize,
|
||||
possible_class_decorator,
|
||||
seen_GET_AWAITABLE_YIELD_FROM, next_token):
|
||||
def custom_classfunc_rule(self, opname, token, customize, next_token):
|
||||
|
||||
args_pos, args_kw = self.get_pos_kw(token)
|
||||
|
||||
@@ -205,7 +241,7 @@ class Python36Parser(Python35Parser):
|
||||
nak = ( len(opname)-len('CALL_FUNCTION') ) // 3
|
||||
uniq_param = args_kw + args_pos
|
||||
|
||||
if seen_GET_AWAITABLE_YIELD_FROM:
|
||||
if frozenset(('GET_AWAITABLE', 'YIELD_FROM')).issubset(self.seen_ops):
|
||||
rule = ('async_call ::= expr ' +
|
||||
('pos_arg ' * args_pos) +
|
||||
('kwarg ' * args_kw) +
|
||||
@@ -220,51 +256,70 @@ class Python36Parser(Python35Parser):
|
||||
rule = "call_kw36 ::= expr %s LOAD_CONST %s" % (values, opname)
|
||||
self.add_unique_rule(rule, token.kind, token.attr, customize)
|
||||
elif opname == 'CALL_FUNCTION_EX_KW':
|
||||
self.addRule("""expr ::= call_ex_kw
|
||||
expr ::= call_ex_kw2
|
||||
expr ::= call_ex_kw3
|
||||
expr ::= call_ex_kw4
|
||||
|
||||
call_ex_kw ::= expr expr build_map_unpack_with_call
|
||||
CALL_FUNCTION_EX_KW
|
||||
call_ex_kw2 ::= expr
|
||||
build_tuple_unpack_with_call
|
||||
build_map_unpack_with_call
|
||||
CALL_FUNCTION_EX_KW
|
||||
call_ex_kw3 ::= expr
|
||||
build_tuple_unpack_with_call
|
||||
expr
|
||||
CALL_FUNCTION_EX_KW
|
||||
# Note: this doesn't exist in 3.7 and later
|
||||
self.addRule("""expr ::= call_ex_kw4
|
||||
call_ex_kw4 ::= expr
|
||||
expr
|
||||
expr
|
||||
CALL_FUNCTION_EX_KW
|
||||
""",
|
||||
nop_func)
|
||||
if 'BUILD_MAP_UNPACK_WITH_CALL' in self.seen_op_basenames:
|
||||
self.addRule("""expr ::= call_ex_kw
|
||||
call_ex_kw ::= expr expr build_map_unpack_with_call
|
||||
CALL_FUNCTION_EX_KW
|
||||
""", nop_func)
|
||||
if 'BUILD_TUPLE_UNPACK_WITH_CALL' in self.seen_op_basenames:
|
||||
# FIXME: should this be parameterized by EX value?
|
||||
self.addRule("""expr ::= call_ex_kw3
|
||||
call_ex_kw3 ::= expr
|
||||
build_tuple_unpack_with_call
|
||||
expr
|
||||
CALL_FUNCTION_EX_KW
|
||||
""", nop_func)
|
||||
if 'BUILD_MAP_UNPACK_WITH_CALL' in self.seen_op_basenames:
|
||||
# FIXME: should this be parameterized by EX value?
|
||||
self.addRule("""expr ::= call_ex_kw2
|
||||
call_ex_kw2 ::= expr
|
||||
build_tuple_unpack_with_call
|
||||
build_map_unpack_with_call
|
||||
CALL_FUNCTION_EX_KW
|
||||
""", nop_func)
|
||||
|
||||
elif opname == 'CALL_FUNCTION_EX':
|
||||
self.addRule("""
|
||||
expr ::= call_ex
|
||||
starred ::= expr
|
||||
call_ex ::= expr starred CALL_FUNCTION_EX
|
||||
""", nop_func)
|
||||
if self.version > 3.6:
|
||||
self.addRule("""
|
||||
expr ::= call_ex_kw3
|
||||
if self.version >= 3.6:
|
||||
if 'BUILD_MAP_UNPACK_WITH_CALL' in self.seen_ops:
|
||||
self.addRule("""
|
||||
expr ::= call_ex_kw
|
||||
call_ex_kw3 ::= expr
|
||||
build_tuple_unpack_with_call
|
||||
expr
|
||||
CALL_FUNCTION_EX
|
||||
call_ex_kw ::= expr expr
|
||||
build_map_unpack_with_call CALL_FUNCTION_EX
|
||||
""", nop_func)
|
||||
if 'BUILD_TUPLE_UNPACK_WITH_CALL' in self.seen_ops:
|
||||
self.addRule("""
|
||||
expr ::= call_ex_kw3
|
||||
call_ex_kw3 ::= expr
|
||||
build_tuple_unpack_with_call
|
||||
%s
|
||||
CALL_FUNCTION_EX
|
||||
""" % 'expr '* token.attr, nop_func)
|
||||
pass
|
||||
|
||||
# FIXME: Is this right?
|
||||
self.addRule("""
|
||||
expr ::= call_ex_kw4
|
||||
call_ex_kw4 ::= expr
|
||||
expr
|
||||
expr
|
||||
CALL_FUNCTION_EX
|
||||
""", nop_func)
|
||||
pass
|
||||
else:
|
||||
super(Python36Parser, self).custom_classfunc_rule(opname, token,
|
||||
customize,
|
||||
possible_class_decorator,
|
||||
seen_GET_AWAITABLE_YIELD_FROM,
|
||||
next_token)
|
||||
super(Python36Parser, self).custom_classfunc_rule(opname, token, customize, next_token)
|
||||
|
||||
def reduce_is_invalid(self, rule, ast, tokens, first, last):
|
||||
invalid = super(Python36Parser,
|
||||
|
@@ -32,12 +32,36 @@ class Python37Parser(Python36Parser):
|
||||
stmt ::= import37
|
||||
import37 ::= import POP_TOP
|
||||
|
||||
async_for_stmt ::= SETUP_LOOP expr
|
||||
GET_AITER
|
||||
SETUP_EXCEPT GET_ANEXT LOAD_CONST
|
||||
YIELD_FROM
|
||||
store
|
||||
POP_BLOCK JUMP_FORWARD COME_FROM_EXCEPT DUP_TOP
|
||||
LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_TRUE
|
||||
END_FINALLY COME_FROM
|
||||
for_block
|
||||
COME_FROM
|
||||
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_TOP POP_BLOCK
|
||||
COME_FROM_LOOP
|
||||
|
||||
async_forelse_stmt ::= SETUP_LOOP expr
|
||||
GET_AITER
|
||||
SETUP_EXCEPT GET_ANEXT LOAD_CONST
|
||||
YIELD_FROM
|
||||
store
|
||||
POP_BLOCK JUMP_FORWARD COME_FROM_EXCEPT DUP_TOP
|
||||
LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_TRUE
|
||||
END_FINALLY COME_FROM
|
||||
for_block
|
||||
COME_FROM
|
||||
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_TOP POP_BLOCK
|
||||
else_suite COME_FROM_LOOP
|
||||
|
||||
# Is there a pattern here?
|
||||
attributes ::= IMPORT_FROM ROT_TWO POP_TOP IMPORT_FROM
|
||||
|
||||
# FIXME: generalize and specialize
|
||||
attribute37 ::= LOAD_FAST LOAD_METHOD
|
||||
attribute37 ::= LOAD_NAME LOAD_METHOD
|
||||
attribute37 ::= expr LOAD_METHOD
|
||||
expr ::= attribute37
|
||||
|
||||
# FIXME: generalize and specialize
|
||||
@@ -45,6 +69,19 @@ class Python37Parser(Python36Parser):
|
||||
"""
|
||||
|
||||
def customize_grammar_rules(self, tokens, customize):
|
||||
self.remove_rules("""
|
||||
async_forelse_stmt ::= SETUP_LOOP expr
|
||||
GET_AITER
|
||||
LOAD_CONST YIELD_FROM SETUP_EXCEPT GET_ANEXT LOAD_CONST
|
||||
YIELD_FROM
|
||||
store
|
||||
POP_BLOCK JUMP_FORWARD COME_FROM_EXCEPT DUP_TOP
|
||||
LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_FALSE
|
||||
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_BLOCK
|
||||
JUMP_ABSOLUTE END_FINALLY COME_FROM
|
||||
for_block POP_BLOCK
|
||||
else_suite COME_FROM_LOOP
|
||||
""")
|
||||
super(Python37Parser, self).customize_grammar_rules(tokens, customize)
|
||||
|
||||
class Python37ParserSingle(Python37Parser, PythonParserSingle):
|
||||
|
@@ -39,7 +39,7 @@ else:
|
||||
|
||||
# The byte code versions we support.
|
||||
# Note: these all have to be floats
|
||||
PYTHON_VERSIONS = frozenset((1.5,
|
||||
PYTHON_VERSIONS = frozenset((1.4, 1.5,
|
||||
2.1, 2.2, 2.3, 2.4, 2.5, 2.6, 2.7,
|
||||
3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6, 3.7))
|
||||
|
||||
@@ -121,7 +121,10 @@ class Scanner(object):
|
||||
|
||||
# Offset: lineno pairs, only for offsets which start line.
|
||||
# Locally we use list for more convenient iteration using indices
|
||||
linestarts = list(self.opc.findlinestarts(code_obj))
|
||||
if self.version > 1.4:
|
||||
linestarts = list(self.opc.findlinestarts(code_obj))
|
||||
else:
|
||||
linestarts = [[0, 1]]
|
||||
self.linestarts = dict(linestarts)
|
||||
|
||||
# 'List-map' which shows line number of current op and offset of
|
||||
|
36
uncompyle6/scanners/scanner14.py
Normal file
36
uncompyle6/scanners/scanner14.py
Normal file
@@ -0,0 +1,36 @@
|
||||
# Copyright (c) 2018 by Rocky Bernstein
|
||||
"""
|
||||
Python 1.4 bytecode decompiler massaging.
|
||||
|
||||
This massages tokenized 1.4 bytecode to make it more amenable for
|
||||
grammar parsing.
|
||||
"""
|
||||
|
||||
import uncompyle6.scanners.scanner15 as scan
|
||||
# from uncompyle6.scanners.scanner26 import ingest as ingest26
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_14
|
||||
JUMP_OPS = opcode_14.JUMP_OPS
|
||||
|
||||
# We base this off of 1.5 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
# The history is that 2.7 support is the cleanest,
|
||||
# then from that we got 2.6 and so on.
|
||||
class Scanner14(scan.Scanner15):
|
||||
def __init__(self, show_asm=False):
|
||||
scan.Scanner15.__init__(self, show_asm)
|
||||
self.opc = opcode_14
|
||||
self.opname = opcode_14.opname
|
||||
self.version = 1.4
|
||||
self.genexpr_name = '<generator expression>'
|
||||
return
|
||||
|
||||
# def ingest22(self, co, classname=None, code_objects={}, show_asm=None):
|
||||
# tokens, customize = self.parent_ingest(co, classname, code_objects, show_asm)
|
||||
# tokens = [t for t in tokens if t.kind != 'SET_LINENO']
|
||||
|
||||
# # for t in tokens:
|
||||
# # print(t)
|
||||
#
|
||||
# return tokens, customize
|
@@ -1,6 +1,6 @@
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2018 by Rocky Bernstein
|
||||
"""
|
||||
Python 1.5 bytecode decompiler scanner.
|
||||
Python 1.5 bytecode decompiler massaging.
|
||||
|
||||
This massages tokenized 1.5 bytecode to make it more amenable for
|
||||
grammar parsing.
|
||||
@@ -25,3 +25,17 @@ class Scanner15(scan.Scanner21):
|
||||
self.version = 1.5
|
||||
self.genexpr_name = '<generator expression>'
|
||||
return
|
||||
|
||||
def ingest(self, co, classname=None, code_objects={}, show_asm=None):
|
||||
"""
|
||||
Pick out tokens from an uncompyle6 code object, and transform them,
|
||||
returning a list of uncompyle6 Token's.
|
||||
|
||||
The transformations are made to assist the deparsing grammar.
|
||||
"""
|
||||
tokens, customize = scan.Scanner21.ingest(self, co, classname, code_objects, show_asm)
|
||||
for t in tokens:
|
||||
if t.op == self.opc.UNPACK_LIST:
|
||||
t.kind = 'UNPACK_LIST_%d' % t.attr
|
||||
pass
|
||||
return tokens, customize
|
||||
|
@@ -1,6 +1,6 @@
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2018 by Rocky Bernstein
|
||||
"""
|
||||
Python 2.1 bytecode scanner/deparser
|
||||
Python 2.1 bytecode massaging.
|
||||
|
||||
This massages tokenized 2.1 bytecode to make it more amenable for
|
||||
grammar parsing.
|
||||
|
@@ -1,6 +1,6 @@
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2018 by Rocky Bernstein
|
||||
"""
|
||||
Python 2.2 bytecode ingester.
|
||||
Python 2.2 bytecode massaging.
|
||||
|
||||
This massages tokenized 2.2 bytecode to make it more amenable for
|
||||
grammar parsing.
|
||||
|
@@ -1,6 +1,6 @@
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2018 by Rocky Bernstein
|
||||
"""
|
||||
Python 2.3 bytecode scanner/deparser
|
||||
Python 2.3 bytecode massaging.
|
||||
|
||||
This massages tokenized 2.3 bytecode to make it more amenable for
|
||||
grammar parsing.
|
||||
|
@@ -1,6 +1,6 @@
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 2.4 bytecode scanner/deparser
|
||||
Python 2.4 bytecode massaging.
|
||||
|
||||
This massages tokenized 2.7 bytecode to make it more amenable for
|
||||
grammar parsing.
|
||||
|
@@ -1,6 +1,6 @@
|
||||
# Copyright (c) 2015-2017 by Rocky Bernstein
|
||||
# Copyright (c) 2015-2018 by Rocky Bernstein
|
||||
"""
|
||||
Python 2.5 bytecode scanner/deparser
|
||||
Python 2.5 bytecode massaging.
|
||||
|
||||
This overlaps Python's 2.5's dis module, but it can be run from
|
||||
Python 3 and other versions of Python. Also, we save token
|
||||
|
@@ -161,6 +161,7 @@ class Scanner3(Scanner):
|
||||
self.varargs_ops = frozenset(varargs_ops)
|
||||
# FIXME: remove the above in favor of:
|
||||
# self.varargs_ops = frozenset(self.opc.hasvargs)
|
||||
return
|
||||
|
||||
def ingest(self, co, classname=None, code_objects={}, show_asm=None):
|
||||
"""
|
||||
|
@@ -207,9 +207,14 @@ TABLE_DIRECT = {
|
||||
'conditional': ( '%p if %p else %p', (2, 27), (0, 27), (4, 27) ),
|
||||
'conditional_true': ( '%p if 1 else %p', (0, 27), (2, 27) ),
|
||||
'ret_cond': ( '%p if %p else %p', (2, 27), (0, 27), (-1, 27) ),
|
||||
'conditionalnot': ( '%p if not %p else %p', (2, 27), (0, 22), (4, 27) ),
|
||||
'conditional_not': ( '%p if not %p else %p', (2, 27), (0, 22), (4, 27) ),
|
||||
'ret_cond_not': ( '%p if not %p else %p', (2, 27), (0, 22), (-1, 27) ),
|
||||
'conditional_lambda': ( '%c if %c else %c', 2, 0, 4),
|
||||
'conditional_lambda':
|
||||
( '%c if %c else %c',
|
||||
(2, 'expr'), 0, 4 ),
|
||||
'conditional_not_lambda':
|
||||
( '%c if not %c else %c',
|
||||
(2, 'expr'), 0, 4 ),
|
||||
|
||||
'compare_single': ( '%p %[-1]{pattr.replace("-", " ")} %p', (0, 19), (1, 19) ),
|
||||
'compare_chained': ( '%p %p', (0, 29), (1, 30)),
|
||||
@@ -325,67 +330,65 @@ MAP = {
|
||||
# tend to have parenthesis around them. Things at the bottom
|
||||
# of the list will tend not to have parenthesis around them.
|
||||
PRECEDENCE = {
|
||||
'list': 0,
|
||||
'dict': 0,
|
||||
'unary_convert': 0,
|
||||
'dict_comp': 0,
|
||||
'set_comp': 0,
|
||||
'set_comp_expr': 0,
|
||||
'list_comp': 0,
|
||||
'generator_exp': 0,
|
||||
'list': 0,
|
||||
'dict': 0,
|
||||
'unary_convert': 0,
|
||||
'dict_comp': 0,
|
||||
'set_comp': 0,
|
||||
'set_comp_expr': 0,
|
||||
'list_comp': 0,
|
||||
'generator_exp': 0,
|
||||
|
||||
'attribute': 2,
|
||||
'subscript': 2,
|
||||
'subscript2': 2,
|
||||
'slice0': 2,
|
||||
'slice1': 2,
|
||||
'slice2': 2,
|
||||
'slice3': 2,
|
||||
'buildslice2': 2,
|
||||
'buildslice3': 2,
|
||||
'call': 2,
|
||||
'attribute': 2,
|
||||
'subscript': 2,
|
||||
'subscript2': 2,
|
||||
'slice0': 2,
|
||||
'slice1': 2,
|
||||
'slice2': 2,
|
||||
'slice3': 2,
|
||||
'buildslice2': 2,
|
||||
'buildslice3': 2,
|
||||
'call': 2,
|
||||
|
||||
'BINARY_POWER': 4,
|
||||
'BINARY_POWER': 4,
|
||||
|
||||
'unary_expr': 6,
|
||||
'unary_expr': 6,
|
||||
|
||||
'BINARY_MULTIPLY': 8,
|
||||
'BINARY_DIVIDE': 8,
|
||||
'BINARY_TRUE_DIVIDE': 8,
|
||||
'BINARY_FLOOR_DIVIDE': 8,
|
||||
'BINARY_MODULO': 8,
|
||||
'BINARY_MULTIPLY': 8,
|
||||
'BINARY_DIVIDE': 8,
|
||||
'BINARY_TRUE_DIVIDE': 8,
|
||||
'BINARY_FLOOR_DIVIDE': 8,
|
||||
'BINARY_MODULO': 8,
|
||||
|
||||
'BINARY_ADD': 10,
|
||||
'BINARY_SUBTRACT': 10,
|
||||
'BINARY_ADD': 10,
|
||||
'BINARY_SUBTRACT': 10,
|
||||
|
||||
'BINARY_LSHIFT': 12,
|
||||
'BINARY_RSHIFT': 12,
|
||||
'BINARY_LSHIFT': 12,
|
||||
'BINARY_RSHIFT': 12,
|
||||
|
||||
'BINARY_AND': 14,
|
||||
'BINARY_AND': 14,
|
||||
'BINARY_XOR': 16,
|
||||
'BINARY_OR': 18,
|
||||
|
||||
'BINARY_XOR': 16,
|
||||
'compare': 20,
|
||||
'unary_not': 22,
|
||||
'and': 24,
|
||||
'ret_and': 24,
|
||||
|
||||
'BINARY_OR': 18,
|
||||
'or': 26,
|
||||
'ret_or': 26,
|
||||
|
||||
'compare': 20,
|
||||
'conditional': 28,
|
||||
'conditional_lamdba': 28,
|
||||
'conditional_not_lamdba': 28,
|
||||
'conditionalnot': 28,
|
||||
'ret_cond': 28,
|
||||
'ret_cond_not': 28,
|
||||
|
||||
'unary_not': 22,
|
||||
'_mklambda': 30,
|
||||
|
||||
'and': 24,
|
||||
'ret_and': 24,
|
||||
|
||||
'or': 26,
|
||||
'ret_or': 26,
|
||||
|
||||
'conditional': 28,
|
||||
'conditional_lamdba': 28,
|
||||
'conditionalnot': 28,
|
||||
'ret_cond': 28,
|
||||
'ret_cond_not': 28,
|
||||
|
||||
'_mklambda': 30,
|
||||
'yield': 101,
|
||||
'yield_from': 101
|
||||
'yield': 101,
|
||||
'yield_from': 101
|
||||
}
|
||||
|
||||
ASSIGN_TUPLE_PARAM = lambda param_name: \
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user