You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-03 16:59:52 +08:00
Compare commits
95 Commits
release-py
...
release-py
Author | SHA1 | Date | |
---|---|---|---|
|
1462a8beb0 | ||
|
f877e65919 | ||
|
155031a7c4 | ||
|
c81b40b43b | ||
|
78898ed187 | ||
|
ef03d78c4d | ||
|
7fc7e083c3 | ||
|
d41a858f80 | ||
|
6dd0ad0810 | ||
|
9368b63a2f | ||
|
da06d83a87 | ||
|
6fb5808ff0 | ||
|
0c3db340fa | ||
|
925b6667d7 | ||
|
b8547346b7 | ||
|
0aa7a7c223 | ||
|
cf5445c202 | ||
|
bc8c38ee58 | ||
|
4cd81dab61 | ||
|
4f4b628842 | ||
|
ff50a7f37b | ||
|
85a49aec2f | ||
|
9f2c7352e7 | ||
|
e9cf370e11 | ||
|
90ac8a463d | ||
|
0e64111195 | ||
|
fd84325e4f | ||
|
ddc00edd42 | ||
|
4259963859 | ||
|
4905cc6bb0 | ||
|
f008b8f411 | ||
|
2e81ee5d2e | ||
|
50e59a37c1 | ||
|
5c8f93b735 | ||
|
88ef4baca8 | ||
|
6ab711baab | ||
|
9e05750537 | ||
|
5c662b334e | ||
|
56b2e17e30 | ||
|
94038151f4 | ||
|
b9281c79be | ||
|
51dec051df | ||
|
f5ac06013f | ||
|
06bbacef45 | ||
|
bfdc6529a0 | ||
|
1ed389ce61 | ||
|
19f2e1277b | ||
|
48b251273a | ||
|
6290311143 | ||
|
947d619c77 | ||
|
908d313204 | ||
|
38dffa3290 | ||
|
c8747cc899 | ||
|
8a705a70f5 | ||
|
92d562e600 | ||
|
c91b5e1164 | ||
|
f8fd474b55 | ||
|
30756c52b2 | ||
|
74e280171b | ||
|
df0207eb90 | ||
|
07a8ae9541 | ||
|
5d6872bcd1 | ||
|
c03cdf1a49 | ||
|
01ad91b519 | ||
|
ccd42077c1 | ||
|
ca3f822c81 | ||
|
bc5f43ab05 | ||
|
1896c40202 | ||
|
8278c72a6f | ||
|
3b0a5aab16 | ||
|
a74890d388 | ||
|
5d24367ef6 | ||
|
1da2118e13 | ||
|
cad1325a90 | ||
|
61534ceed5 | ||
|
9f66694056 | ||
|
2bdfd76635 | ||
|
01f2f6578b | ||
|
67e8f5d1a7 | ||
|
02b1554da3 | ||
|
fac5d31f34 | ||
|
d42858cae4 | ||
|
2a76013ed5 | ||
|
9e815d8d79 | ||
|
def9e1676b | ||
|
35fcb1edf1 | ||
|
ffbce9cb77 | ||
|
dc1971f559 | ||
|
cef61904c7 | ||
|
778f92b6f4 | ||
|
a8260edded | ||
|
c662cb1df2 | ||
|
681bbd616b | ||
|
e809ade6e1 | ||
|
d080b4402d |
11
.gitignore
vendored
11
.gitignore
vendored
@@ -1,11 +1,15 @@
|
||||
*.pyo
|
||||
*.pyc
|
||||
*.pyo
|
||||
*_dis
|
||||
*~
|
||||
/.cache
|
||||
/.eggs
|
||||
/.hypothesis
|
||||
/.idea
|
||||
/.pytest_cache
|
||||
/.python-version
|
||||
/.tox
|
||||
/.venv*
|
||||
/README
|
||||
/__pkginfo__.pyc
|
||||
/dist
|
||||
@@ -14,9 +18,6 @@
|
||||
/tmp
|
||||
/uncompyle6.egg-info
|
||||
/unpyc
|
||||
ChangeLog
|
||||
__pycache__
|
||||
build
|
||||
/.venv*
|
||||
/.idea
|
||||
/.hypothesis
|
||||
ChangeLog
|
||||
|
@@ -57,6 +57,19 @@ disassembler called `pydisasm`.
|
||||
|
||||
### Semantic equivalence vs. exact source code
|
||||
|
||||
Consider how Python compiles something like "(x*y) + 5". Early on
|
||||
Python creates an "abstract syntax tree" (AST) for this. And this is
|
||||
"abstract" in the sense that unimportant, redundant or unnecceary
|
||||
items have been removed. Here, this means that any notion that you
|
||||
wrote "x+y" in parenthesis is lost, since in this context they are
|
||||
unneeded. Also lost is the fact that the multiplication didn't have
|
||||
spaces around it while the addition did. It should not come as a
|
||||
surprise then that the bytecode which is derived from the AST also has
|
||||
no notion of such possible variation. Generally this kind of thing
|
||||
isn't noticed since the Python community has laid out a very rigid set
|
||||
of formatting guidelines; and it has largely beaten the community into
|
||||
compliance.
|
||||
|
||||
Almost all versions of Python can perform some sort of code
|
||||
improvement that can't be undone. In earlier versions of Python it is
|
||||
rare; in later Python versions, it is more common.
|
||||
@@ -66,7 +79,7 @@ If the code emitted is semantically equivalent, then this isn't a bug.
|
||||
|
||||
For example the code might be
|
||||
|
||||
```
|
||||
```python
|
||||
if a:
|
||||
if b:
|
||||
x = 1
|
||||
@@ -74,7 +87,7 @@ if a:
|
||||
|
||||
and we might produce:
|
||||
|
||||
```
|
||||
```python
|
||||
if a and b:
|
||||
x = 1
|
||||
```
|
||||
@@ -87,24 +100,35 @@ else:
|
||||
|
||||
```
|
||||
|
||||
may come out as `elif`.
|
||||
may come out as `elif` or vice versa.
|
||||
|
||||
|
||||
As mentioned in the README, It is possible that Python changes what
|
||||
you write to be more efficient. For example, for:
|
||||
|
||||
|
||||
```
|
||||
```python
|
||||
if True:
|
||||
x = 5
|
||||
```
|
||||
|
||||
Python will generate code like:
|
||||
|
||||
```
|
||||
```python
|
||||
x = 5
|
||||
```
|
||||
|
||||
Even more extreme, if your code is:
|
||||
|
||||
```python
|
||||
if False:
|
||||
x = 1
|
||||
y = 2
|
||||
# ...
|
||||
```
|
||||
|
||||
Python will eliminate the entire "if" statement.
|
||||
|
||||
So just because the text isn't the same, does not
|
||||
necessarily mean there's a bug.
|
||||
|
||||
|
58
NEWS
58
NEWS
@@ -1,3 +1,43 @@
|
||||
uncompyle6 3.1.0 2018-03-21 Equinox
|
||||
|
||||
- Add code_deparse_with_offset() fragment function.
|
||||
- Correct paramenter call fragment deparse_code()
|
||||
- Lots of 3.6, 3.x, and 2.7 bug fixes
|
||||
About 5% of 3.6 fail parsing now. But
|
||||
semantics still needs much to be desired.
|
||||
|
||||
uncompyle6 3.0.1 2018-02-17
|
||||
|
||||
- All Python 2.6.9 standard library files weakly verify
|
||||
- Many 3.6 fixes. 84% of the first 200 standard library files weakly compile.
|
||||
One more big push is needed to get the remaining to compile
|
||||
- Many decompilation fixes for other Python versions
|
||||
- Add more to the test framework
|
||||
- And more add tests target previous existing bugs more completely
|
||||
- sync recent license changes in metadata
|
||||
|
||||
uncompyle6 3.0.0 2018-02-17
|
||||
|
||||
- deparse_code() and lookalikes from the various semantic actions are
|
||||
now deprecated. Instead use new API code_deparse() which makes the
|
||||
version optional and bundles debug options into a dictionary.
|
||||
- License changed to GPL3.
|
||||
- Many Python 3.6 fixes, especially around handling EXTENDED_ARGS
|
||||
Due to the reduction in operand size for JUMP's there are many
|
||||
more EXTENDED_ARGS instructions which can be the targets
|
||||
of jumps, and messes up the peephole-like analysis that is
|
||||
done for control flow since we don't have something better in place.
|
||||
- Code has been reorganized to be more instruction nametuple based where it
|
||||
has been more bytecode array based. There was and still is code that had
|
||||
had magic numbers to advance instructions or to pick out operands.
|
||||
- Bug fixes in numerous other Python versions
|
||||
- Instruction display improved
|
||||
- Keep global statements in fixed order (from wangym5106)
|
||||
|
||||
A bit more work is still needed for 3.6 especially in the area of
|
||||
function calls and definitions.
|
||||
|
||||
|
||||
uncompyle6 2.16.0 2018-02-17
|
||||
|
||||
- API additions:
|
||||
@@ -6,7 +46,7 @@ uncompyle6 2.16.0 2018-02-17
|
||||
- Better 2.7 end_if and COME_FROM determination
|
||||
- Fix up 3.6+ CALL_FUNCTION_EX
|
||||
- Misc pydisasm fixes
|
||||
- Wierd comprehension bug seen via new loctraceback
|
||||
- Weird comprehension bug seen via new loctraceback
|
||||
- Fix Python 3.5+ CALL_FUNCTION_VAR and BUILD_LIST_UNPACK in call; with this
|
||||
we can can handle 3.5+ f(a, b, *c, *d, *e) now
|
||||
|
||||
@@ -59,7 +99,7 @@ Decompilation bug fixes, mostly 3.6 and pre 2.7
|
||||
- limit pypy customization to pypy
|
||||
- Add addr fields in COME_FROMS
|
||||
- Allow use of full instructions in parser reduction routines
|
||||
- Reduce grammar in Pythion 3 by specialization more to specific
|
||||
- Reduce grammar in Python 3 by specialization more to specific
|
||||
Python versions
|
||||
- Match Python AST names more closely when possible
|
||||
|
||||
@@ -77,7 +117,7 @@ uncompyle6 2.14.0 2017-11-26 johnnybamazing
|
||||
and remove used grammar rules
|
||||
- Fix a number of bytecode decompile problems
|
||||
(many more remain)
|
||||
- Add stdlib/runtests.sh for even more rigourous testing
|
||||
- Add stdlib/runtests.sh for even more rigorous testing
|
||||
|
||||
uncompyle6 2.13.3 2017-11-13
|
||||
|
||||
@@ -101,7 +141,7 @@ Overall: better 3.6 decompiling and some much needed code refactoring and cleanu
|
||||
added to assist here. Ignoring errors may be okay because the fragment parser often just needs,
|
||||
well, *fragments*.
|
||||
- Distinguish RETURN_VALUE from RETURN_END_IF in exception bodies better in 3.6
|
||||
- bug in 3.x language changes: import queue va import Queue
|
||||
- bug in 3.x language changes: import queue via import Queue
|
||||
- reinstate some bytecode tests since decompiling has gotten better
|
||||
- Revise how to report a bug
|
||||
|
||||
@@ -136,8 +176,8 @@ uncompyle6 2.11.4 2017-08-15
|
||||
|
||||
* scanner and parser now allow 3-part version string lookups,
|
||||
e.g. 2.7.1 We allow a float here, but if passed a string like '2.7'. or
|
||||
* unpin 3.5.1. xdis 3.5.4 has been releasd and fixes the problems we had. Use that.
|
||||
* some routnes here moved to xdis. Use the xdis version
|
||||
* unpin 3.5.1. xdis 3.5.4 has been release and fixes the problems we had. Use that.
|
||||
* some routines here moved to xdis. Use the xdis version
|
||||
* README.rst: Link typo Name is trepan2 now not trepan
|
||||
* xdis-forced change adjust for COMPARE_OP "is-not" in
|
||||
semanatic routines. We need "is not".
|
||||
@@ -233,9 +273,9 @@ uncompyle6 2.9.8 2016-12-16
|
||||
- fix bug in --verify option
|
||||
- DRY (a little) control-flow detection
|
||||
- fix syntax in tuples with one element
|
||||
- if AST rule inheritence in Python 2.5
|
||||
- if AST rule inheritance in Python 2.5
|
||||
- NAME_MODULE removal for Python <= 2.4
|
||||
- verifycall fixes for Python <= 2.4
|
||||
- verify call fixes for Python <= 2.4
|
||||
- more Python lint
|
||||
|
||||
uncompyle6 2.9.7 2016-12-16
|
||||
@@ -271,7 +311,7 @@ uncompyle6 2.9.6 2016-11-20
|
||||
in the results.
|
||||
- better control flow debugging output
|
||||
- Python 2 and 3 detect structure code is more similar
|
||||
- Handle Docstrings with embedded tiple quotes (""")
|
||||
- Handle Docstrings with embedded triple quotes (""")
|
||||
|
||||
uncompyle6 2.9.5 2016-11-13
|
||||
|
||||
|
@@ -35,8 +35,6 @@ classifiers = ['Development Status :: 5 - Production/Stable',
|
||||
'Programming Language :: Python :: 2.5',
|
||||
'Programming Language :: Python :: 2.6',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3.1',
|
||||
'Programming Language :: Python :: 3.2',
|
||||
'Programming Language :: Python :: 3.3',
|
||||
'Programming Language :: Python :: 3.4',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
@@ -56,8 +54,8 @@ entry_points = {
|
||||
]}
|
||||
ftp_url = None
|
||||
install_requires = ['spark-parser >= 1.8.5, < 1.9.0',
|
||||
'xdis >= 3.6.9, < 3.7.0']
|
||||
license = 'MIT'
|
||||
'xdis >= 3.7.0, < 3.8.0']
|
||||
license = 'GPL3'
|
||||
mailing_list = 'python-debugger@googlegroups.com'
|
||||
modname = 'uncompyle6'
|
||||
py_modules = None
|
||||
|
@@ -5,4 +5,4 @@ if [[ $0 == ${BASH_SOURCE[0]} ]] ; then
|
||||
echo "This script should be *sourced* rather than run directly through bash"
|
||||
exit 1
|
||||
fi
|
||||
export PYVERSIONS='3.5.3 3.6.3 2.6.9 3.3.6 2.7.14 3.4.2'
|
||||
export PYVERSIONS='3.5.5 3.6.4 2.6.9 3.3.7 2.7.14 3.4.8'
|
||||
|
28
admin-tools/run-pyenvlib-test-all.sh
Executable file
28
admin-tools/run-pyenvlib-test-all.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/bin/bash
|
||||
# Runs test_pyenvlib.test on all versions of Python master.
|
||||
function finish {
|
||||
cd $owd
|
||||
}
|
||||
|
||||
# FIXME put some of the below in a common routine
|
||||
owd=$(pwd)
|
||||
trap finish EXIT
|
||||
|
||||
cd $(dirname ${BASH_SOURCE[0]})
|
||||
if ! source ./pyenv-newer-versions ; then
|
||||
exit $?
|
||||
fi
|
||||
if ! source ./setup-master.sh ; then
|
||||
exit $?
|
||||
fi
|
||||
cd ../test
|
||||
for version in $PYVERSIONS; do
|
||||
if ! pyenv local $version ; then
|
||||
exit $?
|
||||
fi
|
||||
echo "====== Running test_pyenvlib.py on $version ====="
|
||||
if ! python ./test_pyenvlib.py --weak-verify --max 800 --${version} ; then
|
||||
exit $?
|
||||
fi
|
||||
echo "------ Done test_pyenvlib.py on $version -----"
|
||||
done
|
@@ -1,5 +1,5 @@
|
||||
#!/bin/bash
|
||||
PYTHON_VERSION=3.6.3
|
||||
PYTHON_VERSION=3.6.4
|
||||
|
||||
# FIXME put some of the below in a common routine
|
||||
function finish {
|
||||
|
@@ -53,6 +53,7 @@ check-3.4: check-bytecode check-3.4-ok check-2.7-ok
|
||||
#: Run working tests from Python 3.5
|
||||
check-3.5: check-bytecode
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.5 --weak-verify $(COMPILE)
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.5-run --verify-run
|
||||
|
||||
#: Run working tests from Python 3.6
|
||||
check-3.6: check-bytecode
|
||||
|
BIN
test/bytecode_2.5/08_if_while_else.pyc
Normal file
BIN
test/bytecode_2.5/08_if_while_else.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.6/02_while1_if_and.pyc
Normal file
BIN
test/bytecode_2.6/02_while1_if_and.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.6/08_distbug.pyc
Normal file
BIN
test/bytecode_2.6/08_distbug.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.7/01_rel_import.pyc
Normal file
BIN
test/bytecode_2.7/01_rel_import.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_2.7_run/02_assert.pyc
Normal file
BIN
test/bytecode_2.7_run/02_assert.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.1/05_dict_comp.pyc
Normal file
BIN
test/bytecode_3.1/05_dict_comp.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.3_run/05_nonlocal.pyc
Normal file
BIN
test/bytecode_3.3_run/05_nonlocal.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.4/05_while1_if_continue.pyc
Normal file
BIN
test/bytecode_3.4/05_while1_if_continue.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.5_run/05_dict_comp.pyc
Normal file
BIN
test/bytecode_3.5_run/05_dict_comp.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.6/04_class_kwargs.pyc
Normal file
BIN
test/bytecode_3.6/04_class_kwargs.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.6/05_36lambda.pyc
Normal file
BIN
test/bytecode_3.6/05_36lambda.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.6/05_if_and_comp.pyc
Normal file
BIN
test/bytecode_3.6/05_if_and_comp.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.6/05_while_and_if.pyc
Normal file
BIN
test/bytecode_3.6/05_while_and_if.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.6_run/01_fstring.pyc
Normal file
BIN
test/bytecode_3.6_run/01_fstring.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.6_run/03_try_return_except.pyc
Normal file
BIN
test/bytecode_3.6_run/03_try_return_except.pyc
Normal file
Binary file not shown.
65
test/run-and-email.sh
Executable file
65
test/run-and-email.sh
Executable file
@@ -0,0 +1,65 @@
|
||||
#!/bin/bash
|
||||
|
||||
function displaytime {
|
||||
printf "ran in "
|
||||
local T=$1
|
||||
local D=$((T/60/60/24))
|
||||
local H=$((T/60/60%24))
|
||||
local M=$((T/60%60))
|
||||
local S=$((T%60))
|
||||
(( $D > 0 )) && printf '%d days ' $D
|
||||
(( $H > 0 )) && printf '%d hours ' $H
|
||||
(( $M > 0 )) && printf '%d minutes ' $M
|
||||
(( $D > 0 || $H > 0 || $M > 0 )) && printf 'and '
|
||||
printf '%d seconds\n' $S
|
||||
}
|
||||
|
||||
PYVERSION=${PYVERSION:-"3.5.5 2.7.14 3.4.8 2.6.9"}
|
||||
# PYVERSION=${PYVERSION:-"3.5.5"}
|
||||
|
||||
USER=${USER:-rocky}
|
||||
EMAIL=${EMAIL:-rb@dustyfeet.com}
|
||||
MAX_TESTS=${MAX_TESTS:-800}
|
||||
typeset -i RUN_STARTTIME=$(date +%s)
|
||||
|
||||
for VERSION in $PYVERSION ; do
|
||||
typeset -i rc=0
|
||||
LOGFILE=/tmp/pyenvlib-$VERSION-$$.log
|
||||
|
||||
if [[ $VERSION == '3.5.5' ]] ; then
|
||||
MAX_TESTS=224
|
||||
else
|
||||
MAX_TESTS=800
|
||||
fi
|
||||
|
||||
if ! pyenv local $VERSION ; then
|
||||
rc=1
|
||||
else
|
||||
echo Python Version $(pyenv local) > $LOGFILE
|
||||
echo "" >> $LOGFILE
|
||||
typeset -i ALL_FILES_STARTTIME=$(date +%s)
|
||||
python ./test_pyenvlib.py --max ${MAX_TESTS} --weak-verify --$VERSION >>$LOGFILE 2>&1
|
||||
rc=$?
|
||||
|
||||
echo Python Version $(pyenv local) >> $LOGFILE
|
||||
echo "" >>LOGFILE
|
||||
|
||||
typeset -i ALL_FILES_ENDTIME=$(date +%s)
|
||||
(( time_diff = ALL_FILES_ENDTIME - ALL_FILES_STARTTIME))
|
||||
displaytime $time_diff >> $LOGFILE
|
||||
fi
|
||||
|
||||
SUBJECT_PREFIX="pyenv weak verify (max $MAX_TESTS) for"
|
||||
if ((rc == 0)); then
|
||||
tail -v $LOGFILE | mail -s "$SUBJECT_PREFIX $VERSION ok" ${USER}@localhost
|
||||
else
|
||||
tail -v $LOGFILE | mail -s "$SUBJECT_PREFIX $VERSION not ok" ${USER}@localhost
|
||||
tail -v $LOGFILE | mail -s "$SUBJECT_PREFIX $VERSION not ok" ${EMAIL}
|
||||
fi
|
||||
rm .python-version
|
||||
done
|
||||
|
||||
typeset -i RUN_ENDTIME=$(date +%s)
|
||||
(( time_diff = RUN_ENDTIME - RUN_STARTTIME))
|
||||
elapsed_time=$(displaytime $time_diff)
|
||||
echo "Run complete $elapsed_time for versions $PYVERSION" | mail -s "pyenv weak verify in $elapsed_time" ${EMAIL}
|
13
test/simple_source/bug25/08_if_while_else.py
Normal file
13
test/simple_source/bug25/08_if_while_else.py
Normal file
@@ -0,0 +1,13 @@
|
||||
# From 2.3 Queue.py
|
||||
# Bug was adding COME_FROM from while
|
||||
# confusing the else
|
||||
def put(item, block=True, timeout=None):
|
||||
if block:
|
||||
if timeout:
|
||||
while True:
|
||||
if item:
|
||||
block = 1
|
||||
else:
|
||||
block = 5
|
||||
elif item:
|
||||
block = False
|
19
test/simple_source/bug26/02_while1_if_and.py
Normal file
19
test/simple_source/bug26/02_while1_if_and.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# From 2.6.9 ftplib.py
|
||||
# Bug was handling if with "and' inside while1
|
||||
def getmultiline(line):
|
||||
if line[3]:
|
||||
while 1:
|
||||
if line[2] and line[5]:
|
||||
break
|
||||
return
|
||||
|
||||
# From 2.6.9 refactor.py
|
||||
def _detect_future_features(tp):
|
||||
while True:
|
||||
if tp == 6:
|
||||
while tp == 7:
|
||||
if tp != 11:
|
||||
break
|
||||
else:
|
||||
break
|
||||
return
|
@@ -21,5 +21,33 @@ def call(*args):
|
||||
except KeyError:
|
||||
return 2
|
||||
except TypeError:
|
||||
# Unhashable argument
|
||||
return 3
|
||||
|
||||
|
||||
# From 2.6.9 pdb.py
|
||||
# Here we have a "try/except" inside a "try/except/else and we can't
|
||||
# distinguish which COME_FROM comes from which "try".
|
||||
|
||||
def do_jump(self, arg):
|
||||
try:
|
||||
arg(1)
|
||||
except ValueError:
|
||||
arg(2)
|
||||
else:
|
||||
try:
|
||||
arg(3)
|
||||
except ValueError:
|
||||
arg(4)
|
||||
|
||||
# From 2.6.9 smtpd.py
|
||||
# Bug was that the for can cause multiple COME_FROMs at the
|
||||
# of the try block
|
||||
def _deliver(self, s, mailfrom, rcpttos):
|
||||
try:
|
||||
mailfrom(1)
|
||||
except RuntimeError:
|
||||
mailfrom(2)
|
||||
except IndexError:
|
||||
for r in s:
|
||||
mailfrom()
|
||||
return
|
||||
|
13
test/simple_source/bug26/08_distbug.py
Normal file
13
test/simple_source/bug26/08_distbug.py
Normal file
@@ -0,0 +1,13 @@
|
||||
# From 2.3.7 dis.py. Bug ranged from 2.2 to 2.6.
|
||||
# bug was in "while". uncompyle6 doesn't
|
||||
# add in a COME_FROM after the while. Maybe it should?
|
||||
|
||||
def distb(tb=None):
|
||||
"""Disassemble a traceback (default: last traceback)."""
|
||||
if tb is None:
|
||||
try:
|
||||
tb = sys.last_traceback
|
||||
except AttributeError:
|
||||
raise RuntimeError, "no last traceback to disassemble"
|
||||
while tb.tb_next: tb = tb.tb_next
|
||||
disassemble(tb.tb_frame.f_code, tb.tb_lasti)
|
17
test/simple_source/bug27+/02_assert.py
Normal file
17
test/simple_source/bug27+/02_assert.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# From 2.7 test_argparse.py
|
||||
# Bug was turnning assert into an "or raise" statement
|
||||
def __call__(arg, dest):
|
||||
try:
|
||||
assert arg == 'spam', 'dest: %s' % dest
|
||||
except:
|
||||
raise
|
||||
|
||||
__call__('spam', __file__)
|
||||
|
||||
# From python 2.7.14 lib2to3/refactor.py
|
||||
# Bug was mangling assert turning if into "or"
|
||||
def refactor_doctest(clipped, new):
|
||||
assert clipped, clipped
|
||||
if not new:
|
||||
new += u"\n"
|
||||
return
|
@@ -4,3 +4,14 @@ def __new__(cls, encode, decode, streamreader=None, streamwriter=None,
|
||||
incrementalencoder=None, incrementaldecoder=None, name=None,
|
||||
*, _is_text_encoding=None):
|
||||
return
|
||||
|
||||
# From 3.3 _pyio.py. A closure is created here.
|
||||
# This changes how the default params are found
|
||||
class StringIO(object):
|
||||
def __init__(self, initial_value="", newline="\n"):
|
||||
super(StringIO, self).__init__()
|
||||
|
||||
# No closure created here
|
||||
class StringIO2(object):
|
||||
def __init__(self, initial_value="", newline="\n"):
|
||||
return 5
|
||||
|
12
test/simple_source/bug33/05_nonlocal.py
Normal file
12
test/simple_source/bug33/05_nonlocal.py
Normal file
@@ -0,0 +1,12 @@
|
||||
# From Python 3.6 functools.py
|
||||
# Bug was in detecting "nonlocal" access
|
||||
def not_bug():
|
||||
cache_token = 5
|
||||
|
||||
def register():
|
||||
nonlocal cache_token
|
||||
return cache_token == 5
|
||||
|
||||
return register()
|
||||
|
||||
assert not_bug()
|
37
test/simple_source/bug34/05_while1_if_continue.py
Normal file
37
test/simple_source/bug34/05_while1_if_continue.py
Normal file
@@ -0,0 +1,37 @@
|
||||
# Bug in Python 3.4 text_file.py
|
||||
# Bug is handling: while true ... if ... continue
|
||||
def readline(b):
|
||||
a = 1
|
||||
while True:
|
||||
if b:
|
||||
if b[0]:
|
||||
a = 2
|
||||
b = None
|
||||
continue
|
||||
b = None
|
||||
a = 5
|
||||
|
||||
return a
|
||||
|
||||
assert readline(None) == 1
|
||||
assert readline([2]) == 2
|
||||
|
||||
def readline2(self):
|
||||
while True:
|
||||
line = 5
|
||||
if self[0]:
|
||||
if self:
|
||||
self[0] = 1
|
||||
continue
|
||||
|
||||
return line + self[0]
|
||||
|
||||
# From 3.4.4 connection.py
|
||||
def PipeClient(address):
|
||||
while 1:
|
||||
try:
|
||||
address += 1
|
||||
except OSError as e:
|
||||
raise e
|
||||
else:
|
||||
raise
|
@@ -1,49 +0,0 @@
|
||||
# Bug was found in 3.6 _osx_support.py in if/elif needing
|
||||
# EXTENDED_ARGS which are the targets of jumps.
|
||||
def get_platform_osx(_config_vars, osname, release, machine, sys, re):
|
||||
"""Filter values for get_platform()"""
|
||||
|
||||
macver = _config_vars.get('MACOSX_DEPLOYMENT_TARGET', '')
|
||||
macrelease = release or 10
|
||||
macver = macver or macrelease
|
||||
|
||||
if macver:
|
||||
release = macver
|
||||
osname = "macosx"
|
||||
|
||||
cflags = _config_vars.get('CFLAGS', _config_vars.get('CFLAGS', ''))
|
||||
if macrelease:
|
||||
try:
|
||||
macrelease = tuple(int(i) for i in macrelease.split('.')[0:2])
|
||||
except ValueError:
|
||||
macrelease = (10, 0)
|
||||
else:
|
||||
macrelease = (10, 0)
|
||||
|
||||
if (macrelease >= (10, 4)) and '-arch' in cflags.strip():
|
||||
machine = 'fat'
|
||||
|
||||
archs = re.findall(r'-arch\s+(\S+)', cflags)
|
||||
archs = tuple(sorted(set(archs)))
|
||||
|
||||
if len(archs) == 1:
|
||||
machine = archs[0]
|
||||
elif archs == ('i386', 'ppc'):
|
||||
machine = 'fat'
|
||||
elif archs == ('i386', 'x86_64'):
|
||||
machine = 'intel'
|
||||
elif archs == ('i386', 'ppc', 'x86_64'):
|
||||
machine = 'fat3'
|
||||
elif archs == ('ppc64', 'x86_64'):
|
||||
machine = 'fat64'
|
||||
elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
|
||||
machine = 'universal'
|
||||
else:
|
||||
raise ValueError(
|
||||
"Don't know machine value for archs=%r" % (archs,))
|
||||
|
||||
elif machine == 'i386':
|
||||
if sys.maxsize >= 2**32:
|
||||
machine = 'x86_64'
|
||||
|
||||
return (osname, release, machine)
|
@@ -1,5 +1,30 @@
|
||||
# Python 3.6's changes for calling functions.
|
||||
# See https://github.com/rocky/python-uncompyle6/issues/58
|
||||
# CALL_FUNCTION_EX takes 2 to 3 arguments on the stack: the function, the tuple of positional arguments,
|
||||
# and optionally the dict of keyword arguments if bit 0 of oparg is 1.
|
||||
a(*[])
|
||||
|
||||
# CALL_FUNCTION_EX takes 2 to 3 arguments on the stack:
|
||||
# * the function,
|
||||
# * the tuple of positional arguments, and optionally
|
||||
# * the dict of keyword arguments if bit 0 of oparg is 1.
|
||||
from foo import f, dialect, args, kwds, reader
|
||||
|
||||
f(*[])
|
||||
|
||||
# From Python 3.6 csv.py
|
||||
# (f, dialect) are positional arg tuples, *args, is by itself, i.e.
|
||||
# no tuple.
|
||||
x = reader(f, dialect, *args, **kwds)
|
||||
|
||||
# From 3.6 functools.py
|
||||
# Below there is a load_closure instruction added
|
||||
def cmp_to_key(mycmp):
|
||||
class K(object):
|
||||
def __ge__():
|
||||
return mycmp()
|
||||
return
|
||||
|
||||
# In this situation though, there is no load_closure
|
||||
def cmp2_to_key(mycmp):
|
||||
class K2(object):
|
||||
def __ge__():
|
||||
return 5
|
||||
return
|
||||
|
@@ -1,5 +1,18 @@
|
||||
# Self-checking 3.6+ string interpolation tests
|
||||
|
||||
var1 = 'x'
|
||||
var2 = 'y'
|
||||
print(f'interpolate {var1} strings {var2!r} {var2!s} py36')
|
||||
print(f'{abc}0')
|
||||
print(f'{abc}{abc!s}')
|
||||
abc = 'def'
|
||||
assert (f'interpolate {var1} strings {var2!r} {var2!s} py36' ==
|
||||
"interpolate x strings 'y' y py36")
|
||||
assert 'def0' == f'{abc}0'
|
||||
assert 'defdef' == f'{abc}{abc!s}'
|
||||
|
||||
# From 3.6 functools.py
|
||||
# Bug was handling format operator strings.
|
||||
|
||||
k, v = "1", ["2"]
|
||||
x = f"{k}={v!r}"
|
||||
y = f"functools.{x}({', '.join(v)})"
|
||||
assert x == "1=['2']"
|
||||
assert y == "functools.1=['2'](2)"
|
||||
|
@@ -1,5 +1,13 @@
|
||||
# From 3.6 _markupbase _parse_doctype_subset()
|
||||
def bug(self, j):
|
||||
def bug(self, j, a, b):
|
||||
self.parse_comment(j, report=0)
|
||||
self.parse_comment(j, report=1, foo=2)
|
||||
self.parse_comment(a, b, report=3)
|
||||
|
||||
# From 3.6 fnmatch.py
|
||||
# Bug was precidence parenthesis around decorator
|
||||
|
||||
import functools
|
||||
@functools.lru_cache(maxsize=256, typed=True)
|
||||
def _compile_pattern(pat):
|
||||
pass
|
||||
|
30
test/simple_source/bug36/03_try_return_except.py
Normal file
30
test/simple_source/bug36/03_try_return_except.py
Normal file
@@ -0,0 +1,30 @@
|
||||
# From Python 3.6 bdb.py
|
||||
# Bug was handling try's with returns
|
||||
# END_FINALLY in 3.6 starts disasppearing
|
||||
|
||||
def effective(possibles):
|
||||
for b in possibles:
|
||||
try:
|
||||
return 1
|
||||
except:
|
||||
return 2
|
||||
return 3
|
||||
|
||||
assert effective([5]) == 1
|
||||
assert effective([]) == 3
|
||||
|
||||
def effective2(possibles):
|
||||
b = 0
|
||||
for b in possibles:
|
||||
try:
|
||||
if b >= 5:
|
||||
b = 5
|
||||
else:
|
||||
return 2
|
||||
except:
|
||||
return 3
|
||||
return b
|
||||
|
||||
assert effective2([5]) == 5
|
||||
assert effective2([]) == 0
|
||||
assert effective2(['a']) == 3
|
17
test/simple_source/bug36/04_class_kwargs.py
Normal file
17
test/simple_source/bug36/04_class_kwargs.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# From 3.6 test_abc.py
|
||||
# Bug was Reciever() class definition
|
||||
import abc
|
||||
import unittest
|
||||
class TestABCWithInitSubclass(unittest.TestCase):
|
||||
def test_works_with_init_subclass(self):
|
||||
class ReceivesClassKwargs:
|
||||
def __init_subclass__(cls, **kwargs):
|
||||
super().__init_subclass__()
|
||||
class Receiver(ReceivesClassKwargs, abc.ABC, x=1, y=2, z=3):
|
||||
pass
|
||||
|
||||
def test_abstractmethod_integration(self):
|
||||
for abstractthing in [abc.abstractmethod]:
|
||||
class C(metaclass=abc.ABCMeta):
|
||||
@abstractthing
|
||||
def foo(self): pass # abstract
|
@@ -18,3 +18,16 @@ def getvalue1(self):
|
||||
finally:
|
||||
pass
|
||||
return 2
|
||||
|
||||
# From Python 3.6 asynchat.py
|
||||
# Bug is handling as why in the face of a return.
|
||||
# uncompyle6 shows removal of "why" after the return.
|
||||
def handle_read(self):
|
||||
try:
|
||||
data = 5
|
||||
except ZeroDivisionError:
|
||||
return
|
||||
except OSError as why:
|
||||
return why
|
||||
|
||||
return data
|
||||
|
20
test/simple_source/bug36/05_36lambda.py
Normal file
20
test/simple_source/bug36/05_36lambda.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# From Python 3.6 hmac.py
|
||||
# needed to change mklambda rule
|
||||
def __init__(self, msg = None, digestmod = None):
|
||||
self.digest_cons = lambda d='': digestmod.new(d)
|
||||
|
||||
# From Python 3.6 functools.py
|
||||
# Bug was handling lambda for MAKE_FUNCTION_8 (closure)
|
||||
# vs to MAKE_FUNCTION_9 (pos_args + closure)
|
||||
def bug():
|
||||
def register(cls, func=None):
|
||||
return lambda f: register(cls, f)
|
||||
|
||||
# From Python 3.6 configparser.py
|
||||
def items(self, d, section=5, raw=False, vars=None):
|
||||
if vars:
|
||||
for key, value in vars.items():
|
||||
d[self.optionxform(key)] = value
|
||||
d = lambda option: self._interpolation.before_get(self,
|
||||
section, option, d[option], d)
|
||||
return
|
@@ -6,3 +6,12 @@ def a85decode(b, *, foldspaces=False, adobe=False, ignorechars=b' \t\n\r\v'):
|
||||
_UNSET = object()
|
||||
def get(self, section, option, *, raw=False, vars=None, fallback=_UNSET):
|
||||
return
|
||||
|
||||
# From 3.6 compileall.py Bug is making default values are in quotes
|
||||
def compile_command(source, filename="<input>", symbol="single"):
|
||||
return
|
||||
|
||||
# From 3.6 _pyio.py. Bug was in getting order of metaclass=abc.ABCMeta right
|
||||
import abc
|
||||
class IOBase(metaclass=abc.ABCMeta):
|
||||
pass
|
||||
|
12
test/simple_source/bug36/05_if_and_comp.py
Normal file
12
test/simple_source/bug36/05_if_and_comp.py
Normal file
@@ -0,0 +1,12 @@
|
||||
# From 3.6 base64.py
|
||||
# Bug was handling "and" condition in the presense of POP_JUMP_IF_FALSE
|
||||
# locations
|
||||
def _85encode(foldnuls, words):
|
||||
return ['z' if foldnuls and word
|
||||
else 'y'
|
||||
for word in words]
|
||||
|
||||
# From Python 3.6 enum.py
|
||||
|
||||
def __new__(metacls, cls, bases, classdict):
|
||||
{k: classdict[k] for k in classdict._member_names}
|
13
test/simple_source/bug36/05_while_and_if.py
Normal file
13
test/simple_source/bug36/05_while_and_if.py
Normal file
@@ -0,0 +1,13 @@
|
||||
# From Python 3.6 getopt.py
|
||||
# Bug showing that "while" can have several "COME_FROMS" before loop end
|
||||
# NOTE: uncompyle6 still gets the "if"s wrong.
|
||||
def getopt(args):
|
||||
while args and args[0] and args[0] != '-':
|
||||
if args[0] == '--':
|
||||
break
|
||||
if args[0]:
|
||||
opts = 5
|
||||
else:
|
||||
opts = 6
|
||||
|
||||
return opts
|
8
test/simple_source/comprehension/05_dict_comp.py
Normal file
8
test/simple_source/comprehension/05_dict_comp.py
Normal file
@@ -0,0 +1,8 @@
|
||||
# Issue #162
|
||||
def x(s):
|
||||
return {k: v
|
||||
for (k, v) in s
|
||||
if not k.startswith('_')
|
||||
}
|
||||
|
||||
assert x((('_foo', None),)) == {}
|
2
test/simple_source/stmts/01_rel_import.py
Normal file
2
test/simple_source/stmts/01_rel_import.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# Tests relative imports
|
||||
from . import bogus
|
@@ -6,3 +6,16 @@ def PipeClient(address):
|
||||
else:
|
||||
raise
|
||||
return
|
||||
|
||||
# From 2.6.9 sre.py
|
||||
# Bug was parsing inner while1. Our massaging adds a COME_FROM
|
||||
# possibly in the wrong place. When control flow is
|
||||
# redone possibly all of this mess will disappear.
|
||||
def _parse(source, state, this, group, char):
|
||||
while 1:
|
||||
if this:
|
||||
while 1:
|
||||
raise RuntimeError
|
||||
else:
|
||||
raise IndexError
|
||||
return
|
||||
|
20
test/stdlib/run-and-email.sh
Executable file
20
test/stdlib/run-and-email.sh
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/bin/bash
|
||||
USER=${USER:-rocky}
|
||||
EMAIL=${EMAIL:-rb@dustyfeet.com}
|
||||
SUBJECT_PREFIX="stdlib unit testing for"
|
||||
for VERSION in 2.7.14 2.6.9 ; do
|
||||
typeset -i rc=0
|
||||
LOGFILE=/tmp/runtests-$VERSION-$$.log
|
||||
if ! pyenv local $VERSION ; then
|
||||
rc=1
|
||||
else
|
||||
/bin/bash ./runtests.sh >$LOGFILE 2>&1
|
||||
rc=$?
|
||||
fi
|
||||
if ((rc == 0)); then
|
||||
tail -v $LOGFILE | mail -s "$SUBJECT_PREFIX $VERSION ok" ${USER}@localhost
|
||||
else
|
||||
tail -v $LOGFILE | mail -s "$SUBJECT_PREFIX $VERSION not ok" ${USER}@localhost
|
||||
tail -v $LOGFILE | mail -s "$SUBJECT_PREFIX $VERSION not ok" $EMAIL
|
||||
fi
|
||||
done
|
@@ -48,11 +48,16 @@ case $PYVERSION in
|
||||
;;
|
||||
2.6)
|
||||
SKIP_TESTS=(
|
||||
[test_compile.py]=1 # Intermittent - sometimes works and sometimes doesn't
|
||||
[test_grp.py]=1 # Long test - might work Control flow?
|
||||
[test_opcodes.py]=1
|
||||
[test_pwd.py]=1 # Long test - might work? Control flow?
|
||||
[test_re.py]=1 # Probably Control flow?
|
||||
[test_queue.py]=1 # Control flow?
|
||||
[test_strftime.py]=1
|
||||
[test_trace.py]=1 # Line numbers are expected to be different
|
||||
[test_zipfile64.py]=1 # Skip Long test
|
||||
[test_zlib.py]=1 # Look at
|
||||
# .pyenv/versions/2.6.9/lib/python2.6/lib2to3/refactor.pyc
|
||||
# .pyenv/versions/2.6.9/lib/python2.6/pyclbr.pyc
|
||||
# .pyenv/versions/2.6.9/lib/python2.6/quopri.pyc -- look at ishex, is short
|
||||
@@ -65,6 +70,11 @@ case $PYVERSION in
|
||||
;;
|
||||
2.7)
|
||||
SKIP_TESTS=(
|
||||
# These are ok, but our test machine POWER has problems
|
||||
# so we skip..
|
||||
[test_httplib.py]=1 # Ok, but POWER has problems with this
|
||||
[test_pdb.py]=1 # Ok, but POWER has problems with this
|
||||
|
||||
[test_curses.py]=1 # Possibly fails on its own but not detected
|
||||
[test_dis.py]=1 # We change line numbers - duh!
|
||||
[test_doctest.py]=1 # Fails on its own
|
||||
@@ -87,9 +97,23 @@ case $PYVERSION in
|
||||
[test_zipfile64.py]=1 # Runs ok but takes 204 seconds
|
||||
)
|
||||
;;
|
||||
3.5)
|
||||
SKIP_TESTS=(
|
||||
[test_decorators.py]=1 # Control flow wrt "if elif"
|
||||
)
|
||||
;;
|
||||
3.6)
|
||||
SKIP_TESTS=(
|
||||
[test_contains.py]=1 # Code "while False: yield None" is optimized away in compilation
|
||||
[test_decorators.py]=1 # Control flow wrt "if elif"
|
||||
[test_pow.py]=1 # Control flow wrt "continue"
|
||||
)
|
||||
;;
|
||||
*)
|
||||
SKIP_TESTS=( [test_aepack.py]=1 [audiotests.py]=1
|
||||
SKIP_TESTS=( [test_aepack.py]=1
|
||||
[audiotests.py]=1
|
||||
[test_dis.py]=1 # We change line numbers - duh!
|
||||
[test_generators.py]=1 # I think string formatting of docstrings gets in the way. Not sure
|
||||
)
|
||||
;;
|
||||
esac
|
||||
@@ -169,7 +193,7 @@ typeset -i ALL_FILES_ENDTIME=$(date +%s)
|
||||
|
||||
(( time_diff = ALL_FILES_ENDTIME - ALL_FILES_STARTTIME))
|
||||
|
||||
printf "Ran $i tests in "
|
||||
printf "Ran $i unit-test files in "
|
||||
displaytime $time_diff
|
||||
|
||||
exit $allerrs
|
||||
|
@@ -116,8 +116,10 @@ def do_tests(src_dir, patterns, target_dir, start_with=None,
|
||||
files = files[:max_files]
|
||||
|
||||
print(time.ctime())
|
||||
main.main(src_dir, target_dir, files, [], do_verify=do_verify)
|
||||
(tot_files, okay_files, failed_files,
|
||||
verify_failed_files) = main.main(src_dir, target_dir, files, [], do_verify=do_verify)
|
||||
print(time.ctime())
|
||||
return verify_failed_files + failed_files
|
||||
|
||||
if __name__ == '__main__':
|
||||
import getopt, sys
|
||||
@@ -161,15 +163,19 @@ if __name__ == '__main__':
|
||||
'/tmp/spark-grammar-%s.cover' % vers
|
||||
)
|
||||
|
||||
failed = 0
|
||||
for src_dir, pattern, target_dir in test_dirs:
|
||||
if os.path.exists(src_dir):
|
||||
target_dir = os.path.join(target_base, target_dir)
|
||||
if os.path.exists(target_dir):
|
||||
shutil.rmtree(target_dir, ignore_errors=1)
|
||||
do_tests(src_dir, pattern, target_dir, start_with,
|
||||
do_verify, test_options['max='])
|
||||
failed += do_tests(src_dir, pattern, target_dir, start_with,
|
||||
do_verify, test_options['max='])
|
||||
else:
|
||||
print("### Path %s doesn't exist; skipping" % src_dir)
|
||||
pass
|
||||
pass
|
||||
sys.exit(failed)
|
||||
|
||||
# python 1.5:
|
||||
|
||||
|
@@ -38,7 +38,7 @@ def _get_outstream(outfile):
|
||||
os.makedirs(dir)
|
||||
except OSError:
|
||||
pass
|
||||
return open(outfile, 'w')
|
||||
return open(outfile, 'wb')
|
||||
|
||||
def decompile(
|
||||
bytecode_version, co, out=None, showasm=None, showast=False,
|
||||
|
@@ -429,9 +429,6 @@ class PythonParser(GenericASTBuilder):
|
||||
|
||||
for_block ::= l_stmts_opt _come_froms JUMP_BACK
|
||||
|
||||
for ::= SETUP_LOOP expr for_iter store
|
||||
for_block POP_BLOCK _come_froms
|
||||
|
||||
forelsestmt ::= SETUP_LOOP expr for_iter store
|
||||
for_block POP_BLOCK else_suite _come_froms
|
||||
|
||||
|
@@ -1,8 +1,21 @@
|
||||
# Copyright (c) 2015-2017 Rocky Bernstein
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
#
|
||||
# Copyright (c) 1999 John Aycock
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""
|
||||
A spark grammar for Python 2.x.
|
||||
Base grammar for Python 2.x.
|
||||
|
||||
However instead of terminal symbols being the usual ASCII text,
|
||||
e.g. 5, myvariable, "for", etc. they are CPython Bytecode tokens,
|
||||
@@ -80,6 +93,9 @@ class Python2Parser(PythonParser):
|
||||
raise_stmt2 ::= expr expr RAISE_VARARGS_2
|
||||
raise_stmt3 ::= expr expr expr RAISE_VARARGS_3
|
||||
|
||||
for ::= SETUP_LOOP expr for_iter store
|
||||
for_block POP_BLOCK _come_froms
|
||||
|
||||
del_stmt ::= expr DELETE_SLICE+0
|
||||
del_stmt ::= expr expr DELETE_SLICE+1
|
||||
del_stmt ::= expr expr DELETE_SLICE+2
|
||||
@@ -173,6 +189,7 @@ class Python2Parser(PythonParser):
|
||||
def p_expr2(self, args):
|
||||
"""
|
||||
expr ::= LOAD_LOCALS
|
||||
expr ::= LOAD_ASSERT
|
||||
expr ::= slice0
|
||||
expr ::= slice1
|
||||
expr ::= slice2
|
||||
@@ -306,9 +323,9 @@ class Python2Parser(PythonParser):
|
||||
], customize)
|
||||
if self.version >= 2.7:
|
||||
self.add_unique_rule(
|
||||
'dictcomp_func ::= BUILD_MAP_n LOAD_FAST FOR_ITER store '
|
||||
'dict_comp_func ::= BUILD_MAP_n LOAD_FAST FOR_ITER store '
|
||||
'comp_iter JUMP_BACK RETURN_VALUE RETURN_LAST',
|
||||
'dictcomp_func', 0, customize)
|
||||
'dict_comp_func', 0, customize)
|
||||
|
||||
else:
|
||||
kvlist_n = "kvlist_%s" % token.attr
|
||||
@@ -503,6 +520,7 @@ class Python2Parser(PythonParser):
|
||||
|
||||
self.check_reduce['aug_assign1'] = 'AST'
|
||||
self.check_reduce['aug_assign2'] = 'AST'
|
||||
self.check_reduce['or'] = 'AST'
|
||||
# self.check_reduce['_stmts'] = 'AST'
|
||||
|
||||
# Dead code testing...
|
||||
@@ -518,8 +536,11 @@ class Python2Parser(PythonParser):
|
||||
# if lhs == 'while1elsestmt':
|
||||
# from trepan.api import debug; debug()
|
||||
|
||||
if lhs in ('aug_assign1', 'aug_assign2') and ast[0] and ast[0][0] == 'and':
|
||||
if lhs in ('aug_assign1', 'aug_assign2') and ast[0] and ast[0][0] in ('and', 'or'):
|
||||
return True
|
||||
if rule == ('or', ('expr', 'jmp_true', 'expr', '\\e_come_from_opt')):
|
||||
expr2 = ast[2]
|
||||
return expr2 == 'expr' and expr2[0] == 'LOAD_ASSERT'
|
||||
return False
|
||||
|
||||
class Python2ParserSingle(Python2Parser, PythonParserSingle):
|
||||
|
@@ -18,6 +18,10 @@ class Python22Parser(Python23Parser):
|
||||
COME_FROM POP_TOP COME_FROM
|
||||
list_for ::= expr for_iter store list_iter CONTINUE JUMP_FORWARD
|
||||
COME_FROM POP_TOP COME_FROM
|
||||
|
||||
# Some versions of Python 2.2 have been found to generate
|
||||
# PRINT_ITEM_CONT for PRINT_ITEM
|
||||
print_items_stmt ::= expr PRINT_ITEM_CONT print_items_opt
|
||||
'''
|
||||
|
||||
def customize_grammar_rules(self, tokens, customize):
|
||||
|
@@ -32,6 +32,9 @@ class Python23Parser(Python24Parser):
|
||||
while1stmt ::= _while1test l_stmts_opt JUMP_BACK
|
||||
POP_TOP POP_BLOCK COME_FROM
|
||||
|
||||
while1stmt ::= _while1test l_stmts_opt JUMP_BACK
|
||||
POP_TOP POP_BLOCK
|
||||
|
||||
while1stmt ::= _while1test l_stmts_opt JUMP_BACK
|
||||
COME_FROM POP_TOP POP_BLOCK COME_FROM
|
||||
|
||||
|
@@ -33,8 +33,8 @@ class Python24Parser(Python25Parser):
|
||||
import_cont ::= filler LOAD_CONST alias
|
||||
|
||||
# Python 2.5+ omits POP_TOP POP_BLOCK
|
||||
while1stmt ::= SETUP_LOOP l_stmts JUMP_BACK POP_TOP POP_BLOCK COME_FROM
|
||||
while1stmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK POP_TOP POP_BLOCK COME_FROM
|
||||
while1stmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK POP_TOP POP_BLOCK
|
||||
|
||||
# Python 2.5+:
|
||||
# call_stmt ::= expr POP_TOP
|
||||
|
@@ -22,7 +22,7 @@ class Python26Parser(Python2Parser):
|
||||
JUMP_IF_FALSE POP_TOP POP_TOP store POP_TOP
|
||||
|
||||
except_handler ::= JUMP_FORWARD COME_FROM except_stmts
|
||||
come_from_pop END_FINALLY come_froms
|
||||
come_froms_pop END_FINALLY come_froms
|
||||
|
||||
except_handler ::= JUMP_FORWARD COME_FROM except_stmts END_FINALLY
|
||||
come_froms
|
||||
@@ -77,8 +77,7 @@ class Python26Parser(Python2Parser):
|
||||
jb_cont ::= JUMP_BACK
|
||||
jb_cont ::= CONTINUE
|
||||
|
||||
jb_cf_pop ::= JUMP_BACK come_froms POP_TOP
|
||||
jb_cf_pop ::= JUMP_BACK POP_TOP
|
||||
jb_cf_pop ::= come_from_opt JUMP_BACK _come_froms POP_TOP
|
||||
ja_cf_pop ::= JUMP_ABSOLUTE come_froms POP_TOP
|
||||
jf_cf_pop ::= JUMP_FORWARD come_froms POP_TOP
|
||||
|
||||
@@ -115,8 +114,7 @@ class Python26Parser(Python2Parser):
|
||||
|
||||
# Semantic actions want else_suitel to be at index 3
|
||||
ifelsestmtl ::= testexpr c_stmts_opt cf_jb_cf_pop else_suitel
|
||||
|
||||
ifelsestmtc ::= testexpr c_stmts_opt ja_cf_pop else_suitec
|
||||
ifelsestmtc ::= testexpr c_stmts_opt ja_cf_pop else_suitec
|
||||
|
||||
# Semantic actions want suite_stmts_opt to be at index 3
|
||||
withstmt ::= expr setupwith SETUP_FINALLY suite_stmts_opt
|
||||
@@ -135,10 +133,15 @@ class Python26Parser(Python2Parser):
|
||||
setup_finally ::= STORE_FAST SETUP_FINALLY LOAD_FAST DELETE_FAST
|
||||
setup_finally ::= STORE_NAME SETUP_FINALLY LOAD_NAME DELETE_NAME
|
||||
|
||||
while1stmt ::= SETUP_LOOP l_stmts JUMP_BACK COME_FROM
|
||||
while1stmt ::= SETUP_LOOP l_stmts_opt come_from_opt JUMP_BACK _come_froms
|
||||
|
||||
# Sometimes JUMP_BACK is misclassified as CONTINUE.
|
||||
# workaround until we have better control flow in place
|
||||
while1stmt ::= SETUP_LOOP l_stmts_opt CONTINUE _come_froms
|
||||
|
||||
whilestmt ::= SETUP_LOOP testexpr l_stmts_opt jb_pop POP_BLOCK _come_froms
|
||||
whilestmt ::= SETUP_LOOP testexpr l_stmts_opt jb_cf_pop bp_come_from
|
||||
whilestmt ::= SETUP_LOOP testexpr l_stmts_opt jb_cf_pop POP_BLOCK
|
||||
whilestmt ::= SETUP_LOOP testexpr returns POP_BLOCK COME_FROM
|
||||
|
||||
whileelsestmt ::= SETUP_LOOP testexpr l_stmts_opt jb_pop POP_BLOCK
|
||||
@@ -149,13 +152,11 @@ class Python26Parser(Python2Parser):
|
||||
return ::= ret_expr RETURN_VALUE POP_TOP
|
||||
return_if_stmt ::= ret_expr RETURN_END_IF POP_TOP
|
||||
|
||||
iflaststmtl ::= testexpr c_stmts_opt JUMP_BACK come_from_pop
|
||||
iflaststmtl ::= testexpr c_stmts_opt jb_cf_pop
|
||||
iflaststmt ::= testexpr c_stmts_opt JUMP_ABSOLUTE come_from_pop
|
||||
|
||||
lastc_stmt ::= iflaststmt COME_FROM
|
||||
|
||||
while1stmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK COME_FROM
|
||||
|
||||
ifstmt ::= testexpr_then _ifstmts_jump
|
||||
|
||||
# Semantic actions want the else to be at position 3
|
||||
@@ -314,7 +315,7 @@ class Python26Parser(Python2Parser):
|
||||
self.check_reduce['and'] = 'AST'
|
||||
self.check_reduce['list_for'] = 'AST'
|
||||
self.check_reduce['try_except'] = 'tokens'
|
||||
self.check_reduce['tryelsestmt'] = 'tokens'
|
||||
self.check_reduce['tryelsestmt'] = 'AST'
|
||||
|
||||
def reduce_is_invalid(self, rule, ast, tokens, first, last):
|
||||
invalid = super(Python26Parser,
|
||||
@@ -364,7 +365,27 @@ class Python26Parser(Python2Parser):
|
||||
return (tokens[last-3].kind not in frozenset(('JUMP_FORWARD', 'RETURN_VALUE'))
|
||||
or (tokens[last-3] == 'JUMP_FORWARD' and tokens[last-3].attr != 2))
|
||||
elif rule[0] == 'tryelsestmt':
|
||||
|
||||
# We need to distingush try_except from tryelsestmt and we do that
|
||||
# by making sure that the jump before the except handler jumps to
|
||||
# code somewhere before the end of the construct.
|
||||
# This AST method is slower, but the token-only based approach
|
||||
# didn't work as it failed with a "try" embedded inside a "try/else"
|
||||
# since we can't detect COME_FROM boundaries.
|
||||
|
||||
if ast[3] == 'except_handler':
|
||||
except_handler = ast[3]
|
||||
if except_handler[0] == 'JUMP_FORWARD':
|
||||
else_start = int(except_handler[0].pattr)
|
||||
if last == len(tokens):
|
||||
last -= 1
|
||||
if tokens[last] == 'COME_FROM' and isinstance:
|
||||
last_offset = int(tokens[last].offset.split('_')[0])
|
||||
return else_start >= last_offset
|
||||
|
||||
|
||||
# The above test apparently isn't good enough, so we have additional
|
||||
# checks distinguish try_except from tryelsestmt and we do that
|
||||
# by checking the jump before the END_FINALLY
|
||||
# If we have:
|
||||
# insn
|
||||
|
@@ -19,24 +19,29 @@ class Python27Parser(Python2Parser):
|
||||
lc_body ::= expr LIST_APPEND
|
||||
for_iter ::= GET_ITER COME_FROM FOR_ITER
|
||||
|
||||
stmt ::= setcomp_func
|
||||
stmt ::= set_comp_func
|
||||
|
||||
|
||||
# Dictionary and set comprehensions were added in Python 2.7
|
||||
expr ::= dict_comp
|
||||
dict_comp ::= LOAD_DICTCOMP MAKE_FUNCTION_0 expr GET_ITER CALL_FUNCTION_1
|
||||
|
||||
stmt ::= dictcomp_func
|
||||
dictcomp_func ::= BUILD_MAP_0 LOAD_FAST FOR_ITER store
|
||||
comp_iter JUMP_BACK RETURN_VALUE RETURN_LAST
|
||||
stmt ::= dict_comp_func
|
||||
dict_comp_func ::= BUILD_MAP_0 LOAD_FAST FOR_ITER store
|
||||
comp_iter JUMP_BACK RETURN_VALUE RETURN_LAST
|
||||
|
||||
setcomp_func ::= BUILD_SET_0 LOAD_FAST FOR_ITER store comp_iter
|
||||
JUMP_BACK RETURN_VALUE RETURN_LAST
|
||||
set_comp_func ::= BUILD_SET_0 LOAD_FAST FOR_ITER store comp_iter
|
||||
JUMP_BACK RETURN_VALUE RETURN_LAST
|
||||
|
||||
comp_body ::= dict_comp_body
|
||||
comp_body ::= set_comp_body
|
||||
comp_for ::= expr for_iter store comp_iter JUMP_BACK
|
||||
|
||||
comp_iter ::= comp_if
|
||||
comp_iter ::= comp_if_not
|
||||
comp_if_not ::= expr jmp_true comp_iter
|
||||
comp_iter ::= comp_body
|
||||
|
||||
dict_comp_body ::= expr expr MAP_ADD
|
||||
set_comp_body ::= expr SET_ADD
|
||||
|
||||
@@ -170,6 +175,7 @@ class Python27Parser(Python2Parser):
|
||||
""")
|
||||
super(Python27Parser, self).customize_grammar_rules(tokens, customize)
|
||||
self.check_reduce['and'] = 'AST'
|
||||
self.check_reduce['raise_stmt1'] = 'AST'
|
||||
# self.check_reduce['conditional_true'] = 'AST'
|
||||
return
|
||||
|
||||
@@ -186,6 +192,8 @@ class Python27Parser(Python2Parser):
|
||||
jmp_target = jmp_false.offset + jmp_false.attr + 3
|
||||
return not (jmp_target == tokens[last].offset or
|
||||
tokens[last].pattr == jmp_false.pattr)
|
||||
elif rule[0] == ('raise_stmt1'):
|
||||
return ast[0] == 'expr' and ast[0][0] == 'or'
|
||||
# elif rule[0] == ('conditional_true'):
|
||||
# # FIXME: the below is a hack: we check expr for
|
||||
# # nodes that could have possibly been a been a Boolean.
|
||||
|
@@ -3,7 +3,18 @@
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
# Copyright (c) 1999 John Aycock
|
||||
#
|
||||
# See LICENSE for license
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""
|
||||
A spark grammar for Python 3.x.
|
||||
|
||||
@@ -51,13 +62,13 @@ class Python3Parser(PythonParser):
|
||||
jb_or_c ::= JUMP_BACK
|
||||
jb_or_c ::= CONTINUE
|
||||
|
||||
stmt ::= setcomp_func
|
||||
stmt ::= set_comp_func
|
||||
|
||||
setcomp_func ::= BUILD_SET_0 LOAD_FAST FOR_ITER store comp_iter
|
||||
JUMP_BACK RETURN_VALUE RETURN_LAST
|
||||
set_comp_func ::= BUILD_SET_0 LOAD_FAST FOR_ITER store comp_iter
|
||||
JUMP_BACK RETURN_VALUE RETURN_LAST
|
||||
|
||||
setcomp_func ::= BUILD_SET_0 LOAD_FAST FOR_ITER store comp_iter
|
||||
COME_FROM JUMP_BACK RETURN_VALUE RETURN_LAST
|
||||
set_comp_func ::= BUILD_SET_0 LOAD_FAST FOR_ITER store comp_iter
|
||||
COME_FROM JUMP_BACK RETURN_VALUE RETURN_LAST
|
||||
|
||||
comp_body ::= dict_comp_body
|
||||
comp_body ::= set_comp_body
|
||||
@@ -67,14 +78,19 @@ class Python3Parser(PythonParser):
|
||||
# See also common Python p_list_comprehension
|
||||
"""
|
||||
|
||||
def p_dictcomp3(self, args):
|
||||
def p_dict_comp3(self, args):
|
||||
""""
|
||||
expr ::= dict_comp
|
||||
stmt ::= dictcomp_func
|
||||
dictcomp_func ::= BUILD_MAP_0 LOAD_FAST FOR_ITER store
|
||||
comp_iter JUMP_BACK RETURN_VALUE RETURN_LAST
|
||||
dict_comp ::= LOAD_DICTCOMP LOAD_CONST MAKE_FUNCTION_0 expr
|
||||
GET_ITER CALL_FUNCTION_1
|
||||
stmt ::= dict_comp_func
|
||||
dict_comp_func ::= BUILD_MAP_0 LOAD_FAST FOR_ITER store
|
||||
comp_iter JUMP_BACK RETURN_VALUE RETURN_LAST
|
||||
dict_comp ::= LOAD_DICTCOMP LOAD_CONST MAKE_FUNCTION_0 expr
|
||||
GET_ITER CALL_FUNCTION_1
|
||||
|
||||
comp_iter ::= comp_if
|
||||
comp_iter ::= comp_if_not
|
||||
comp_if_not ::= expr jmp_true comp_iter
|
||||
comp_iter ::= comp_body
|
||||
"""
|
||||
|
||||
def p_grammar(self, args):
|
||||
@@ -225,10 +241,10 @@ class Python3Parser(PythonParser):
|
||||
except_suite ::= returns
|
||||
|
||||
except_cond1 ::= DUP_TOP expr COMPARE_OP
|
||||
jmp_false POP_TOP POP_TOP POP_TOP
|
||||
jmp_false POP_TOP POP_TOP POP_TOP
|
||||
|
||||
except_cond2 ::= DUP_TOP expr COMPARE_OP
|
||||
jmp_false POP_TOP store POP_TOP
|
||||
jmp_false POP_TOP store POP_TOP
|
||||
|
||||
except ::= POP_TOP POP_TOP POP_TOP c_stmts_opt POP_EXCEPT _jump
|
||||
except ::= POP_TOP POP_TOP POP_TOP returns
|
||||
@@ -617,9 +633,9 @@ class Python3Parser(PythonParser):
|
||||
kvlist_n = "kvlist_%s" % token.attr
|
||||
if opname == 'BUILD_MAP_n':
|
||||
# PyPy sometimes has no count. Sigh.
|
||||
rule = ('dictcomp_func ::= BUILD_MAP_n LOAD_FAST FOR_ITER store '
|
||||
rule = ('dict_comp_func ::= BUILD_MAP_n LOAD_FAST FOR_ITER store '
|
||||
'comp_iter JUMP_BACK RETURN_VALUE RETURN_LAST')
|
||||
self.add_unique_rule(rule, 'dictomp_func', 1, customize)
|
||||
self.add_unique_rule(rule, 'dict_comp_func', 1, customize)
|
||||
|
||||
kvlist_n = 'kvlist_n'
|
||||
rule = 'kvlist_n ::= kvlist_n kv3'
|
||||
@@ -871,6 +887,20 @@ class Python3Parser(PythonParser):
|
||||
# before.
|
||||
args_pos, args_kw, annotate_args, closure = token.attr
|
||||
stack_count = args_pos + args_kw + annotate_args
|
||||
if closure:
|
||||
if args_pos:
|
||||
rule = ('mklambda ::= %s%s%s%s' %
|
||||
('expr ' * stack_count,
|
||||
'load_closure ' * closure,
|
||||
'BUILD_TUPLE_1 LOAD_LAMBDA LOAD_CONST ',
|
||||
opname))
|
||||
else:
|
||||
rule = ('mklambda ::= %s%s%s' %
|
||||
('load_closure ' * closure,
|
||||
'LOAD_LAMBDA LOAD_CONST ',
|
||||
opname))
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
|
||||
rule = ('mkfunc ::= %s%s%s%s' %
|
||||
('expr ' * stack_count,
|
||||
'load_closure ' * closure,
|
||||
@@ -1061,7 +1091,7 @@ class Python3Parser(PythonParser):
|
||||
if tokens[last] in ('JUMP_BACK', 'CONTINUE'):
|
||||
# These indicate inside a loop, but token[last]
|
||||
# should not be in a loop.
|
||||
# FIXME: Not quite righte: refine by using target
|
||||
# FIXME: Not quite right: refine by using target
|
||||
return True
|
||||
|
||||
# if SETUP_LOOP target spans the else part, then this is
|
||||
@@ -1071,7 +1101,7 @@ class Python3Parser(PythonParser):
|
||||
last += 1
|
||||
if last == n:
|
||||
return False
|
||||
return tokens[first].attr >= tokens[last].offset
|
||||
return tokens[first].attr > tokens[last].offset
|
||||
elif lhs == 'while1stmt':
|
||||
|
||||
# If there is a fall through to the COME_FROM_LOOP. then this is
|
||||
|
@@ -34,16 +34,23 @@ class Python34Parser(Python33Parser):
|
||||
# Seems to be needed starting 3.4.4 or so
|
||||
while1stmt ::= SETUP_LOOP l_stmts
|
||||
COME_FROM JUMP_BACK POP_BLOCK COME_FROM_LOOP
|
||||
while1stmt ::= SETUP_LOOP l_stmts
|
||||
POP_BLOCK COME_FROM_LOOP
|
||||
|
||||
# FIXME the below masks a bug in not detecting COME_FROM_LOOP
|
||||
# grammar rules with COME_FROM -> COME_FROM_LOOP already exist
|
||||
whileelsestmt ::= SETUP_LOOP testexpr l_stmts_opt JUMP_BACK POP_BLOCK
|
||||
else_suitel COME_FROM
|
||||
|
||||
while1elsestmt ::= SETUP_LOOP l_stmts JUMP_BACK POP_BLOCK else_suitel
|
||||
COME_FROM_LOOP
|
||||
|
||||
# Python 3.4+ optimizes the trailing two JUMPS away
|
||||
|
||||
# Is this 3.4 only?
|
||||
yield_from ::= expr GET_ITER LOAD_CONST YIELD_FROM
|
||||
|
||||
_ifstmts_jump ::= c_stmts_opt JUMP_ABSOLUTE JUMP_FORWARD COME_FROM
|
||||
"""
|
||||
|
||||
def customize_grammar_rules(self, tokens, customize):
|
||||
|
@@ -19,6 +19,7 @@ spark grammar differences over Python 3.5 for Python 3.6.
|
||||
from uncompyle6.parser import PythonParserSingle, nop_func
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from uncompyle6.parsers.parse35 import Python35Parser
|
||||
from uncompyle6.scanners.tok import Token
|
||||
|
||||
class Python36Parser(Python35Parser):
|
||||
|
||||
@@ -31,9 +32,12 @@ class Python36Parser(Python35Parser):
|
||||
"""
|
||||
sstmt ::= sstmt RETURN_LAST
|
||||
|
||||
# 3.6 redoes how return_closure works
|
||||
# 3.6 redoes how return_closure works. FIXME: Isolate to LOAD_CLOSURE
|
||||
return_closure ::= LOAD_CLOSURE DUP_TOP STORE_NAME RETURN_VALUE RETURN_LAST
|
||||
|
||||
# Is there something general going on here? FIXME: Isolate to LOAD_DICTCOMP
|
||||
dict_comp ::= load_closure LOAD_DICTCOMP LOAD_CONST MAKE_FUNCTION_8 expr GET_ITER CALL_FUNCTION_1
|
||||
|
||||
stmt ::= conditional_lambda
|
||||
conditional_lambda ::= expr jmp_false expr return_if_lambda
|
||||
return_stmt_lambda LAMBDA_MARKER
|
||||
@@ -45,11 +49,14 @@ class Python36Parser(Python35Parser):
|
||||
come_from_loops ::= COME_FROM_LOOP*
|
||||
|
||||
whilestmt ::= SETUP_LOOP testexpr l_stmts_opt
|
||||
JUMP_BACK COME_FROM POP_BLOCK COME_FROM_LOOP
|
||||
JUMP_BACK come_froms POP_BLOCK COME_FROM_LOOP
|
||||
|
||||
# This might be valid in < 3.6
|
||||
and ::= expr jmp_false expr
|
||||
|
||||
jf_cf ::= JUMP_FORWARD COME_FROM
|
||||
conditional ::= expr jmp_false expr jf_cf expr COME_FROM
|
||||
|
||||
# Adds a COME_FROM_ASYNC_WITH over 3.5
|
||||
# FIXME: remove corresponding rule for 3.5?
|
||||
|
||||
@@ -67,16 +74,28 @@ class Python36Parser(Python35Parser):
|
||||
stmt ::= try_except36
|
||||
try_except36 ::= SETUP_EXCEPT returns except_handler36
|
||||
opt_come_from_except
|
||||
try_except36 ::= SETUP_EXCEPT suite_stmts
|
||||
|
||||
# 3.6 omits END_FINALLY sometimes
|
||||
except_handler36 ::= COME_FROM_EXCEPT except_stmts
|
||||
except_handler ::= jmp_abs COME_FROM_EXCEPT except_stmts
|
||||
|
||||
stmt ::= tryfinally36
|
||||
tryfinally36 ::= SETUP_FINALLY returns
|
||||
COME_FROM_FINALLY suite_stmts
|
||||
tryfinally36 ::= SETUP_FINALLY returns
|
||||
COME_FROM_FINALLY suite_stmts_opt END_FINALLY
|
||||
except_suite_finalize ::= SETUP_FINALLY returns
|
||||
COME_FROM_FINALLY suite_stmts_opt END_FINALLY _jump
|
||||
"""
|
||||
|
||||
def customize_grammar_rules(self, tokens, customize):
|
||||
super(Python36Parser, self).customize_grammar_rules(tokens, customize)
|
||||
self.remove_rules("""
|
||||
dict_comp ::= load_closure LOAD_DICTCOMP LOAD_CONST MAKE_CLOSURE_0 expr GET_ITER CALL_FUNCTION_1
|
||||
""")
|
||||
self.check_reduce['call_kw'] = 'AST'
|
||||
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.kind
|
||||
|
||||
@@ -186,6 +205,8 @@ class Python36Parser(Python35Parser):
|
||||
self.addRule("""expr ::= call_ex_kw
|
||||
expr ::= call_ex_kw2
|
||||
expr ::= call_ex_kw3
|
||||
expr ::= call_ex_kw4
|
||||
|
||||
call_ex_kw ::= expr expr build_map_unpack_with_call
|
||||
CALL_FUNCTION_EX_KW
|
||||
call_ex_kw2 ::= expr
|
||||
@@ -193,6 +214,10 @@ class Python36Parser(Python35Parser):
|
||||
build_map_unpack_with_call
|
||||
CALL_FUNCTION_EX_KW
|
||||
call_ex_kw3 ::= expr
|
||||
build_tuple_unpack_with_call
|
||||
expr
|
||||
CALL_FUNCTION_EX_KW
|
||||
call_ex_kw4 ::= expr
|
||||
expr
|
||||
expr
|
||||
CALL_FUNCTION_EX_KW
|
||||
@@ -212,6 +237,21 @@ class Python36Parser(Python35Parser):
|
||||
seen_GET_AWAITABLE_YIELD_FROM,
|
||||
next_token)
|
||||
|
||||
def reduce_is_invalid(self, rule, ast, tokens, first, last):
|
||||
invalid = super(Python36Parser,
|
||||
self).reduce_is_invalid(rule, ast,
|
||||
tokens, first, last)
|
||||
if invalid:
|
||||
return invalid
|
||||
if rule[0] == 'call_kw':
|
||||
# Make sure we don't derive call_kw
|
||||
nt = ast[0]
|
||||
while not isinstance(nt, Token):
|
||||
if nt[0] == 'call_kw':
|
||||
return True
|
||||
nt = nt[0]
|
||||
|
||||
return False
|
||||
class Python36ParserSingle(Python36Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
|
@@ -41,6 +41,7 @@ else:
|
||||
from collections import namedtuple
|
||||
|
||||
from array import array
|
||||
from copy import copy
|
||||
|
||||
from xdis.code import iscode
|
||||
from xdis.bytecode import (
|
||||
@@ -58,6 +59,7 @@ class Scanner2(Scanner):
|
||||
# This is the 2.5+ default
|
||||
# For <2.5 it is <generator expression>
|
||||
self.genexpr_name = '<genexpr>'
|
||||
self.load_asserts = set([])
|
||||
|
||||
@staticmethod
|
||||
def unmangle_name(name, classname):
|
||||
@@ -143,6 +145,9 @@ class Scanner2(Scanner):
|
||||
# 'LOAD_ASSERT' is used in assert statements.
|
||||
self.load_asserts = set()
|
||||
for i in self.op_range(0, codelen):
|
||||
|
||||
self.offset2inst_index[inst.offset] = i
|
||||
|
||||
# We need to detect the difference between:
|
||||
# raise AssertionError
|
||||
# and
|
||||
@@ -163,7 +168,9 @@ class Scanner2(Scanner):
|
||||
|
||||
# Get jump targets
|
||||
# Format: {target offset: [jump offsets]}
|
||||
load_asserts_save = copy(self.load_asserts)
|
||||
jump_targets = self.find_jump_targets(show_asm)
|
||||
self.load_asserts = load_asserts_save
|
||||
# print("XXX2", jump_targets)
|
||||
|
||||
last_stmt = self.next_stmt[0]
|
||||
@@ -305,7 +312,7 @@ class Scanner2(Scanner):
|
||||
j = self.offset2inst_index[offset]
|
||||
target_index = self.offset2inst_index[target]
|
||||
is_continue = (self.insts[target_index-1].opname == 'SETUP_LOOP'
|
||||
and self.insts[j+1].opname == 'JUMP_FORWARD') and False
|
||||
and self.insts[j+1].opname == 'JUMP_FORWARD')
|
||||
if is_continue:
|
||||
op_name = 'CONTINUE'
|
||||
if (offset in self.stmts and
|
||||
@@ -1131,7 +1138,13 @@ class Scanner2(Scanner):
|
||||
source = self.setup_loops[label]
|
||||
else:
|
||||
source = offset
|
||||
targets[label] = targets.get(label, []) + [source]
|
||||
# FIXME: The grammar for 2.6 and before doesn't
|
||||
# handle COME_FROM's from a loop inside if's
|
||||
# It probably should.
|
||||
if (self.version > 2.6 or
|
||||
self.code[source] != self.opc.SETUP_LOOP or
|
||||
self.code[label] != self.opc.JUMP_FORWARD):
|
||||
targets[label] = targets.get(label, []) + [source]
|
||||
pass
|
||||
pass
|
||||
pass
|
||||
|
@@ -184,7 +184,7 @@ TABLE_DIRECT = {
|
||||
|
||||
'comp_iter': ( '%c', 0 ),
|
||||
'comp_if': ( ' if %c%c', 0, 2 ),
|
||||
'comp_ifnot': ( ' if not %p%c', (0, 22), 2 ),
|
||||
'comp_if_not': ( ' if not %p%c', (0, 22), 2 ),
|
||||
'comp_body': ( '', ), # ignore when recusing
|
||||
'set_comp_body': ( '%c', 0 ),
|
||||
'gen_comp_body': ( '%c', 0 ),
|
||||
@@ -317,12 +317,17 @@ MAP = {
|
||||
# See https://docs.python.org/2/reference/expressions.html
|
||||
# or https://docs.python.org/3/reference/expressions.html
|
||||
# for a list.
|
||||
|
||||
# Things at the top of this list below with low-value precidence will
|
||||
# tend to have parenthesis around them. Things at the bottom
|
||||
# of the list will tend not to have parenthesis around them.
|
||||
PRECEDENCE = {
|
||||
'list': 0,
|
||||
'dict': 0,
|
||||
'unary_convert': 0,
|
||||
'dict_comp': 0,
|
||||
'set_comp': 0,
|
||||
'set_comp_expr': 0,
|
||||
'list_comp': 0,
|
||||
'generator_exp': 0,
|
||||
|
||||
@@ -376,8 +381,9 @@ PRECEDENCE = {
|
||||
'ret_cond_not': 28,
|
||||
|
||||
'_mklambda': 30,
|
||||
'yield': 101,
|
||||
'yield_from': 101
|
||||
'call_kw': 100, # 100 seems to to be module/function precidence
|
||||
'yield': 101,
|
||||
'yield_from': 101
|
||||
}
|
||||
|
||||
ASSIGN_TUPLE_PARAM = lambda param_name: \
|
||||
|
@@ -357,8 +357,10 @@ def customize_for_version(self, is_pypy, version):
|
||||
'tryfinally36': ( '%|try:\n%+%c%-%|finally:\n%+%c%-\n\n',
|
||||
(1, 'returns'), 3 ),
|
||||
'fstring_expr': ( "{%c%{conversion}}", 0),
|
||||
'fstring_single': ( "f'{%c%{conversion}}'", 0),
|
||||
'fstring_multi': ( "f'%c'", 0),
|
||||
# FIXME: the below assumes the format strings
|
||||
# don't have ''' in them. Fix this properly
|
||||
'fstring_single': ( "f'''{%c%{conversion}}'''", 0),
|
||||
'fstring_multi': ( "f'''%c'''", 0),
|
||||
'func_args36': ( "%c(**", 0),
|
||||
'try_except36': ( '%|try:\n%+%c%-%c\n\n', 1, 2 ),
|
||||
'unpack_list': ( '*%c', (0, 'list') ),
|
||||
@@ -455,6 +457,33 @@ def customize_for_version(self, is_pypy, version):
|
||||
self.n_call_ex_kw2 = call_ex_kw2
|
||||
|
||||
def call_ex_kw3(node):
|
||||
"""Handle CALL_FUNCTION_EX 1 (have KW) but without
|
||||
BUILD_MAP_UNPACK_WITH_CALL"""
|
||||
self.preorder(node[0])
|
||||
self.write('(')
|
||||
args = node[1][0]
|
||||
if args == 'expr':
|
||||
args = args[0]
|
||||
if args == 'tuple':
|
||||
if self.call36_tuple(args) > 0:
|
||||
self.write(', ')
|
||||
pass
|
||||
pass
|
||||
|
||||
self.write('*')
|
||||
self.preorder(node[1][1])
|
||||
self.write(', ')
|
||||
|
||||
kwargs = node[2]
|
||||
if kwargs == 'expr':
|
||||
kwargs = kwargs[0]
|
||||
self.write('**')
|
||||
self.preorder(kwargs)
|
||||
self.write(')')
|
||||
self.prune()
|
||||
self.n_call_ex_kw3 = call_ex_kw3
|
||||
|
||||
def call_ex_kw4(node):
|
||||
"""Handle CALL_FUNCTION_EX 2 (have KW) but without
|
||||
BUILD_{MAP,TUPLE}_UNPACK_WITH_CALL"""
|
||||
self.preorder(node[0])
|
||||
@@ -478,8 +507,7 @@ def customize_for_version(self, is_pypy, version):
|
||||
self.preorder(kwargs)
|
||||
self.write(')')
|
||||
self.prune()
|
||||
self.n_call_ex_kw3 = call_ex_kw3
|
||||
|
||||
self.n_call_ex_kw4 = call_ex_kw4
|
||||
|
||||
def call36_tuple(node):
|
||||
"""
|
||||
@@ -577,6 +605,14 @@ def customize_for_version(self, is_pypy, version):
|
||||
|
||||
FSTRING_CONVERSION_MAP = {1: '!s', 2: '!r', 3: '!a'}
|
||||
|
||||
def n_formatted_value(node):
|
||||
if node[0] == 'LOAD_CONST':
|
||||
self.write(node[0].attr)
|
||||
self.prune()
|
||||
else:
|
||||
self.default(node)
|
||||
self.n_formatted_value = n_formatted_value
|
||||
|
||||
def f_conversion(node):
|
||||
node.conversion = FSTRING_CONVERSION_MAP.get(node.data[1].attr, '')
|
||||
|
||||
@@ -610,7 +646,7 @@ def customize_for_version(self, is_pypy, version):
|
||||
num_posargs = len(node) - (num_kwargs + 1)
|
||||
n = len(node)
|
||||
assert n >= len(keys)+1, \
|
||||
'not enough parameters keyword-tuple values'
|
||||
'not enough parameters keyword-tuple values'
|
||||
# try:
|
||||
# assert n >= len(keys)+1, \
|
||||
# 'not enough parameters keyword-tuple values'
|
||||
@@ -665,7 +701,7 @@ def customize_for_version(self, is_pypy, version):
|
||||
self.prune()
|
||||
return
|
||||
self.n_return_closure = return_closure
|
||||
pass # version > 3.6
|
||||
pass # version > 3.4
|
||||
pass # version > 3.0
|
||||
pass # version >= 3.6
|
||||
pass # version >= 3.4
|
||||
pass # version >= 3.0
|
||||
return
|
||||
|
@@ -63,9 +63,10 @@ The node position 0 will be associated with "import".
|
||||
|
||||
# FIXME: DRY code with pysource
|
||||
|
||||
import re, sys
|
||||
import re
|
||||
|
||||
from xdis.code import iscode
|
||||
from xdis.magics import sysinfo2float
|
||||
from uncompyle6.semantics import pysource
|
||||
from uncompyle6 import parser
|
||||
from uncompyle6.scanner import Token, Code, get_scanner
|
||||
@@ -679,7 +680,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
# skip over stmt return ret_expr
|
||||
ast = ast[0][0][0]
|
||||
store = None
|
||||
if ast in ['setcomp_func', 'dictcomp_func']:
|
||||
if ast in ['set_comp_func', 'dict_comp_func']:
|
||||
# Offset 0: BUILD_SET should have the span
|
||||
# of '{'
|
||||
self.gen_source(ast, code_name, {})
|
||||
@@ -724,8 +725,8 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
pass
|
||||
|
||||
# Python 2.7+ starts including set_comp_body
|
||||
# Python 3.5+ starts including setcomp_func
|
||||
assert n.kind in ('lc_body', 'comp_body', 'setcomp_func', 'set_comp_body'), ast
|
||||
# Python 3.5+ starts including set_comp_func
|
||||
assert n.kind in ('lc_body', 'comp_body', 'set_comp_func', 'set_comp_body'), ast
|
||||
assert store, "Couldn't find store in list/set comprehension"
|
||||
|
||||
old_name = self.name
|
||||
@@ -871,8 +872,8 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
self.prune()
|
||||
|
||||
# FIXME: Not sure if below is general. Also, add dict_comp_func.
|
||||
# 'setcomp_func': ("%|lambda %c: {%c for %c in %c%c}\n", 1, 3, 3, 1, 4)
|
||||
def n_setcomp_func(self, node):
|
||||
# 'set_comp_func': ("%|lambda %c: {%c for %c in %c%c}\n", 1, 3, 3, 1, 4)
|
||||
def n_set_comp_func(self, node):
|
||||
setcomp_start = len(self.f.getvalue())
|
||||
self.write(self.indent, "lambda ")
|
||||
param_node = node[1]
|
||||
@@ -1746,8 +1747,12 @@ def deparse_code(version, co, out=StringIO(), showasm=False, showast=False,
|
||||
'ast': showast,
|
||||
'grammar': showgrammar
|
||||
}
|
||||
return code_deparse(co, out, version, debug_opts, code_objects, compile_mode,
|
||||
is_pypy, walker)
|
||||
return code_deparse(co, out,
|
||||
version=version,
|
||||
debug_opts=debug_opts,
|
||||
code_objects=code_objects,
|
||||
compile_mode=compile_mode,
|
||||
is_pypy=is_pypy, walker=walker)
|
||||
|
||||
def code_deparse(co, out=StringIO(), version=None, is_pypy=None,
|
||||
debug_opts=DEFAULT_DEBUG_OPTS,
|
||||
@@ -1775,7 +1780,7 @@ def code_deparse(co, out=StringIO(), version=None, is_pypy=None,
|
||||
assert iscode(co)
|
||||
|
||||
if version is None:
|
||||
version = float(sys.version[0:3])
|
||||
version = sysinfo2float()
|
||||
if is_pypy is None:
|
||||
is_pypy = IS_PYPY
|
||||
|
||||
@@ -1809,7 +1814,11 @@ def code_deparse(co, out=StringIO(), version=None, is_pypy=None,
|
||||
|
||||
# convert leading '__doc__ = "..." into doc string
|
||||
assert deparsed.ast == 'stmts'
|
||||
deparsed.mod_globs = pysource.find_globals(deparsed.ast, set())
|
||||
(deparsed.mod_globs,
|
||||
nonlocals) = (pysource
|
||||
.find_globals_and_nonlocals(deparsed.ast,
|
||||
set(), set(),
|
||||
co, version))
|
||||
|
||||
# Just when you think we've forgotten about what we
|
||||
# were supposed to to: Generate source from AST!
|
||||
@@ -1848,15 +1857,22 @@ def find_gt(a, x):
|
||||
return a[i]
|
||||
raise ValueError
|
||||
|
||||
def deparse_code_around_offset(name, offset, version, co, out=StringIO(),
|
||||
showasm=False, showast=False,
|
||||
showgrammar=False, is_pypy=False):
|
||||
def code_deparse_around_offset(name, offset, co, out=StringIO(),
|
||||
version=None, is_pypy=None,
|
||||
debug_opts=DEFAULT_DEBUG_OPTS):
|
||||
"""
|
||||
Like deparse_code(), but given a function/module name and
|
||||
offset, finds the node closest to offset. If offset is not an instruction boundary,
|
||||
we raise an IndexError.
|
||||
"""
|
||||
deparsed = deparse_code(version, co, out, showasm, showast, showgrammar, is_pypy)
|
||||
assert iscode(co)
|
||||
|
||||
if version is None:
|
||||
version = sysinfo2float()
|
||||
if is_pypy is None:
|
||||
is_pypy = IS_PYPY
|
||||
|
||||
deparsed = code_deparse(co, out, version, is_pypy, debug_opts)
|
||||
if (name, offset) in deparsed.offsets.keys():
|
||||
# This is the easy case
|
||||
return deparsed
|
||||
@@ -1869,6 +1885,17 @@ def deparse_code_around_offset(name, offset, version, co, out=StringIO(),
|
||||
deparsed.offsets[name, offset] = deparsed.offsets[name, found_offset]
|
||||
return deparsed
|
||||
|
||||
# Deprecated. Here still for compatability
|
||||
def deparse_code_around_offset(name, offset, version, co, out=StringIO(),
|
||||
showasm=False, showast=False,
|
||||
showgrammar=False, is_pypy=False):
|
||||
debug_opts = {
|
||||
'asm': showasm,
|
||||
'ast': showast,
|
||||
'grammar': showgrammar
|
||||
}
|
||||
return code_deparse(name, offset, co, out, version, is_pypy,
|
||||
debug_opts)
|
||||
|
||||
def op_at_code_loc(code, loc, opc):
|
||||
"""Return the instruction name at code[loc] using
|
||||
@@ -1938,14 +1965,7 @@ def deparsed_find(tup, deparsed, code):
|
||||
# return
|
||||
|
||||
# def deparse_test_around(offset, name, co, is_pypy=IS_PYPY):
|
||||
# sys_version = sys.version_info[0] + (sys.version_info[1] / 10.0)
|
||||
# walk = deparse_code_around_offset(name, offset, sys_version, co, showasm=False, showast=False,
|
||||
# showgrammar=False, is_pypy=IS_PYPY)
|
||||
# deparsed = deparse_code_around_offset(name, offset, sys_version, co,
|
||||
# showasm=False,
|
||||
# showast=False,
|
||||
# showgrammar=False,
|
||||
# is_pypy=IS_PYPY)
|
||||
# deparsed = code_deparse_around_offset(name, offset, co)
|
||||
# print("deparsed source")
|
||||
# print(deparsed.text, "\n")
|
||||
# print('------------------------')
|
||||
@@ -1972,13 +1992,14 @@ def deparsed_find(tup, deparsed, code):
|
||||
# return
|
||||
|
||||
# def get_code_for_fn(fn):
|
||||
# if hasattr(fn, 'func_code'):
|
||||
# return fn.func_code
|
||||
# return fn.__code__
|
||||
|
||||
# def test():
|
||||
# import os, sys
|
||||
|
||||
# def div_test(a, b, c):
|
||||
# return a / b / c
|
||||
|
||||
# def gcd(a, b):
|
||||
# if a > b:
|
||||
# (a, b) = (b, a)
|
||||
@@ -1992,9 +2013,10 @@ def deparsed_find(tup, deparsed, code):
|
||||
|
||||
# # check_args(['3', '5'])
|
||||
# # deparse_test(get_code_for_fn(gcd))
|
||||
# deparse_test(get_code_for_fn(div_test))
|
||||
# # deparse_test(get_code_for_fn(test))
|
||||
# # deparse_test(get_code_for_fn(FragmentsWalker.fixup_offsets))
|
||||
# # deparse_test(get_code_for_fn(FragmentsWalker.n_list))
|
||||
# print('=' * 30)
|
||||
# deparse_test_around(408, 'n_list', get_code_for_fn(FragmentsWalker.n_list))
|
||||
# # deparse_test_around(408, 'n_list', get_code_for_fn(FragmentsWalker.n_build_list))
|
||||
# # deparse_test(inspect.currentframe().f_code)
|
||||
|
@@ -13,6 +13,9 @@ else:
|
||||
read_write_global_ops = frozenset(('STORE_GLOBAL', 'DELETE_GLOBAL', 'LOAD_GLOBAL'))
|
||||
read_global_ops = frozenset(('STORE_GLOBAL', 'DELETE_GLOBAL'))
|
||||
|
||||
# NOTE: we also need to check that the variable name is a free variable, not a cell variable.
|
||||
nonglobal_ops = frozenset(('STORE_DEREF', 'DELETE_DEREF'))
|
||||
|
||||
# FIXME: this and find_globals could be paramaterized with one of the
|
||||
# above global ops
|
||||
def find_all_globals(node, globs):
|
||||
@@ -24,15 +27,22 @@ def find_all_globals(node, globs):
|
||||
globs.add(n.pattr)
|
||||
return globs
|
||||
|
||||
def find_globals(node, globs):
|
||||
def find_globals_and_nonlocals(node, globs, nonlocals, code, version):
|
||||
"""search a node of parse tree to find variable names that need a
|
||||
'global' added."""
|
||||
either 'global' or 'nonlocal' statements added."""
|
||||
for n in node:
|
||||
if isinstance(n, AST):
|
||||
globs = find_globals(n, globs)
|
||||
globs, nonlocals = find_globals_and_nonlocals(n, globs, nonlocals,
|
||||
code, version)
|
||||
elif n.kind in read_global_ops:
|
||||
globs.add(n.pattr)
|
||||
return globs
|
||||
elif (version >= 3.0
|
||||
and n.kind in nonglobal_ops
|
||||
and n.pattr in code.co_freevars
|
||||
and n.pattr != code.co_name
|
||||
and code.co_name != '<lambda>'):
|
||||
nonlocals.add(n.pattr)
|
||||
return globs, nonlocals
|
||||
|
||||
# def find_globals(node, globs, global_ops=mkfunc_globals):
|
||||
# """Find globals in this statement."""
|
||||
|
@@ -21,7 +21,7 @@ from uncompyle6.scanner import Code
|
||||
from uncompyle6.parsers.astnode import AST
|
||||
from uncompyle6.semantics.parser_error import ParserError
|
||||
from uncompyle6.semantics.helper import (
|
||||
print_docstring, find_all_globals, find_globals, find_none
|
||||
print_docstring, find_all_globals, find_globals_and_nonlocals, find_none
|
||||
)
|
||||
|
||||
|
||||
@@ -265,8 +265,12 @@ def make_function3_annotate(self, node, is_lambda, nested=1,
|
||||
assert ast == 'stmts'
|
||||
|
||||
all_globals = find_all_globals(ast, set())
|
||||
for g in sorted((all_globals & self.mod_globs) | find_globals(ast, set())):
|
||||
globals, nonlocals = find_globals_and_nonlocals(ast, set(), set(),
|
||||
code, self.version)
|
||||
for g in sorted((all_globals & self.mod_globs) | globals):
|
||||
self.println(self.indent, 'global ', g)
|
||||
for nl in sorted(nonlocals):
|
||||
self.println(self.indent, 'nonlocal ', nl)
|
||||
self.mod_globs -= all_globals
|
||||
has_none = 'None' in code.co_names
|
||||
rn = has_none and not find_none(ast)
|
||||
@@ -419,7 +423,17 @@ def make_function2(self, node, is_lambda, nested=1, codeNode=None):
|
||||
assert ast == 'stmts'
|
||||
|
||||
all_globals = find_all_globals(ast, set())
|
||||
for g in sorted((all_globals & self.mod_globs) | find_globals(ast, set())):
|
||||
|
||||
globals, nonlocals = find_globals_and_nonlocals(ast, set(), set(),
|
||||
code, self.version)
|
||||
|
||||
# Python 2 doesn't support the "nonlocal" statement
|
||||
try:
|
||||
assert self.version >= 3.0 or not nonlocals
|
||||
except:
|
||||
from trepan.api import debug; debug()
|
||||
|
||||
for g in sorted((all_globals & self.mod_globs) | globals):
|
||||
self.println(self.indent, 'global ', g)
|
||||
self.mod_globs -= all_globals
|
||||
has_none = 'None' in code.co_names
|
||||
@@ -456,8 +470,11 @@ def make_function3(self, node, is_lambda, nested=1, codeNode=None):
|
||||
|
||||
# MAKE_CLOSURE adds an additional closure slot
|
||||
|
||||
# Thank you, Python: such a well-thought out system that has
|
||||
# changed and continues to change many times.
|
||||
# In Python 3.6 stack entries change again. I understand
|
||||
# 3.7 changes some of those changes. Yes, it is hard to follow
|
||||
# and I am sure I haven't been able to keep up.
|
||||
|
||||
# Thank you, Python.
|
||||
|
||||
def build_param(ast, name, default):
|
||||
"""build parameters:
|
||||
@@ -482,10 +499,23 @@ def make_function3(self, node, is_lambda, nested=1, codeNode=None):
|
||||
# MAKE_FUNCTION_... or MAKE_CLOSURE_...
|
||||
assert node[-1].kind.startswith('MAKE_')
|
||||
|
||||
# Python 3.3+ adds a qualified name at TOS (-1)
|
||||
# moving down the LOAD_LAMBDA instruction
|
||||
if 3.0 <= self.version <= 3.2:
|
||||
lambda_index = -2
|
||||
elif 3.03 <= self.version:
|
||||
lambda_index = -3
|
||||
else:
|
||||
lambda_index = None
|
||||
|
||||
args_node = node[-1]
|
||||
if isinstance(args_node.attr, tuple):
|
||||
if self.version <= 3.3 and len(node) > 2 and node[-3] != 'LOAD_LAMBDA':
|
||||
# positional args are after kwargs
|
||||
pos_args, kw_args, annotate_argc = args_node.attr
|
||||
# FIXME: there is probably a better way to classify this.
|
||||
if (self.version <= 3.3 and len(node) > 2 and
|
||||
node[lambda_index] != 'LOAD_LAMBDA' and
|
||||
(node[0].kind.startswith('kwarg') or node[-4].kind != 'load_closure')):
|
||||
# args are after kwargs; kwargs are bundled as one node
|
||||
defparams = node[1:args_node.attr[0]+1]
|
||||
else:
|
||||
# args are before kwargs; kwags as bundled as one node
|
||||
@@ -502,7 +532,7 @@ def make_function3(self, node, is_lambda, nested=1, codeNode=None):
|
||||
expr_node = node[0]
|
||||
if (expr_node[0] == 'LOAD_CONST' and
|
||||
isinstance(expr_node[0].attr, tuple)):
|
||||
defparams = list(expr_node[0].attr)
|
||||
defparams = [repr(a) for a in expr_node[0].attr]
|
||||
elif expr_node[0] in frozenset(('list', 'tuple', 'dict', 'set')):
|
||||
defparams = [self.traverse(n, indent='') for n in expr_node[0][:-1]]
|
||||
else:
|
||||
@@ -524,19 +554,19 @@ def make_function3(self, node, is_lambda, nested=1, codeNode=None):
|
||||
code = codeNode.attr
|
||||
|
||||
assert iscode(code)
|
||||
code = Code(code, self.scanner, self.currentclass)
|
||||
scanner_code = Code(code, self.scanner, self.currentclass)
|
||||
|
||||
# add defaults values to parameter names
|
||||
argc = code.co_argcount
|
||||
paramnames = list(code.co_varnames[:argc])
|
||||
paramnames = list(scanner_code.co_varnames[:argc])
|
||||
|
||||
# defaults are for last n parameters, thus reverse
|
||||
if not 3.0 <= self.version <= 3.1 or self.version >= 3.6:
|
||||
paramnames.reverse(); defparams.reverse()
|
||||
|
||||
try:
|
||||
ast = self.build_ast(code._tokens,
|
||||
code._customize,
|
||||
ast = self.build_ast(scanner_code._tokens,
|
||||
scanner_code._customize,
|
||||
is_lambda = is_lambda,
|
||||
noneInNames = ('None' in code.co_names))
|
||||
except ParserError, p:
|
||||
@@ -729,15 +759,22 @@ def make_function3(self, node, is_lambda, nested=1, codeNode=None):
|
||||
# docstring exists, dump it
|
||||
print_docstring(self, self.indent, code.co_consts[0])
|
||||
|
||||
code._tokens = None # save memory
|
||||
scanner_code._tokens = None # save memory
|
||||
assert ast == 'stmts'
|
||||
|
||||
all_globals = find_all_globals(ast, set())
|
||||
for g in sorted((all_globals & self.mod_globs) | find_globals(ast, set())):
|
||||
globals, nonlocals = find_globals_and_nonlocals(ast, set(),
|
||||
set(), code, self.version)
|
||||
|
||||
for g in sorted((all_globals & self.mod_globs) | globals):
|
||||
self.println(self.indent, 'global ', g)
|
||||
|
||||
for nl in sorted(nonlocals):
|
||||
self.println(self.indent, 'nonlocal ', nl)
|
||||
|
||||
self.mod_globs -= all_globals
|
||||
has_none = 'None' in code.co_names
|
||||
rn = has_none and not find_none(ast)
|
||||
self.gen_source(ast, code.co_name, code._customize, is_lambda=is_lambda,
|
||||
self.gen_source(ast, code.co_name, scanner_code._customize, is_lambda=is_lambda,
|
||||
returnNone=rn)
|
||||
code._tokens = None; code._customize = None # save memory
|
||||
scanner_code._tokens = None; scanner_code._customize = None # save memory
|
||||
|
@@ -141,7 +141,7 @@ from uncompyle6.semantics.parser_error import ParserError
|
||||
from uncompyle6.semantics.check_ast import checker
|
||||
from uncompyle6.semantics.customize import customize_for_version
|
||||
from uncompyle6.semantics.helper import (
|
||||
print_docstring, find_globals, flatten_list)
|
||||
print_docstring, find_globals_and_nonlocals, flatten_list)
|
||||
from uncompyle6.scanners.tok import Token
|
||||
|
||||
from uncompyle6.semantics.consts import (
|
||||
@@ -536,278 +536,8 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
pass
|
||||
self.n_unmapexpr = unmapexpr
|
||||
|
||||
if version >= 3.6:
|
||||
########################
|
||||
# Python 3.6+ Additions
|
||||
#######################
|
||||
|
||||
TABLE_DIRECT.update({
|
||||
'fstring_expr': ( "{%c%{conversion}}", 0),
|
||||
'fstring_single': ( "f'{%c%{conversion}}'", 0),
|
||||
'fstring_multi': ( "f'%c'", 0),
|
||||
'func_args36': ( "%c(**", 0),
|
||||
'try_except36': ( '%|try:\n%+%c%-%c\n\n', 1, 2 ),
|
||||
'unpack_list': ( '*%c', (0, 'list') ),
|
||||
'starred': ( '*%c', (0, 'expr') ),
|
||||
'call_ex' : (
|
||||
'%c(%c)',
|
||||
(0, 'expr'), 1),
|
||||
'call_ex_kw' : (
|
||||
'%c(%c)',
|
||||
(0, 'expr'), 2),
|
||||
|
||||
})
|
||||
|
||||
TABLE_R.update({
|
||||
'CALL_FUNCTION_EX': ('%c(*%P)', 0, (1, 2, ', ', 100)),
|
||||
# Not quite right
|
||||
'CALL_FUNCTION_EX_KW': ('%c(**%C)', 0, (2, 3, ',')),
|
||||
})
|
||||
|
||||
def build_unpack_tuple_with_call(node):
|
||||
|
||||
if node[0] == 'expr':
|
||||
tup = node[0][0]
|
||||
else:
|
||||
tup = node[0]
|
||||
pass
|
||||
assert tup == 'tuple'
|
||||
self.call36_tuple(tup)
|
||||
|
||||
buwc = node[-1]
|
||||
assert buwc.kind.startswith('BUILD_TUPLE_UNPACK_WITH_CALL')
|
||||
for n in node[1:-1]:
|
||||
self.f.write(', *')
|
||||
self.preorder(n)
|
||||
pass
|
||||
self.prune()
|
||||
return
|
||||
self.n_build_tuple_unpack_with_call = build_unpack_tuple_with_call
|
||||
|
||||
def build_unpack_map_with_call(node):
|
||||
n = node[0]
|
||||
if n == 'expr':
|
||||
n = n[0]
|
||||
if n == 'dict':
|
||||
self.call36_dict(n)
|
||||
first = 1
|
||||
sep = ', **'
|
||||
else:
|
||||
first = 0
|
||||
sep = '**'
|
||||
for n in node[first:-1]:
|
||||
self.f.write(sep)
|
||||
self.preorder(n)
|
||||
sep = ', **'
|
||||
pass
|
||||
self.prune()
|
||||
return
|
||||
self.n_build_map_unpack_with_call = build_unpack_map_with_call
|
||||
|
||||
def call_ex_kw2(node):
|
||||
"""Handle CALL_FUNCTION_EX 2 (have KW) but with
|
||||
BUILD_{MAP,TUPLE}_UNPACK_WITH_CALL"""
|
||||
|
||||
# This is weird shit. Thanks Python!
|
||||
self.preorder(node[0])
|
||||
self.write('(')
|
||||
|
||||
assert node[1] == 'build_tuple_unpack_with_call'
|
||||
btuwc = node[1]
|
||||
tup = btuwc[0]
|
||||
if tup == 'expr':
|
||||
tup = tup[0]
|
||||
assert tup == 'tuple'
|
||||
self.call36_tuple(tup)
|
||||
assert node[2] == 'build_map_unpack_with_call'
|
||||
|
||||
self.write(', ')
|
||||
d = node[2][0]
|
||||
if d == 'expr':
|
||||
d = d[0]
|
||||
assert d == 'dict'
|
||||
self.call36_dict(d)
|
||||
|
||||
args = btuwc[1]
|
||||
self.write(', *')
|
||||
self.preorder(args)
|
||||
|
||||
self.write(', **')
|
||||
star_star_args = node[2][1]
|
||||
if star_star_args == 'expr':
|
||||
star_star_args = star_star_args[0]
|
||||
self.preorder(star_star_args)
|
||||
self.write(')')
|
||||
self.prune()
|
||||
self.n_call_ex_kw2 = call_ex_kw2
|
||||
|
||||
def call_ex_kw3(node):
|
||||
"""Handle CALL_FUNCTION_EX 2 (have KW) but without
|
||||
BUILD_{MAP,TUPLE}_UNPACK_WITH_CALL"""
|
||||
self.preorder(node[0])
|
||||
self.write('(')
|
||||
args = node[1][0]
|
||||
if args == 'tuple':
|
||||
if self.call36_tuple(args) > 0:
|
||||
self.write(', ')
|
||||
pass
|
||||
pass
|
||||
else:
|
||||
self.write('*')
|
||||
self.preorder(args)
|
||||
self.write(', ')
|
||||
pass
|
||||
|
||||
kwargs = node[2]
|
||||
if kwargs == 'expr':
|
||||
kwargs = kwargs[0]
|
||||
self.write('**')
|
||||
self.preorder(kwargs)
|
||||
self.write(')')
|
||||
self.prune()
|
||||
self.n_call_ex_kw3 = call_ex_kw3
|
||||
|
||||
|
||||
def call36_tuple(node):
|
||||
"""
|
||||
A tuple used in a call, these are like normal tuples but they
|
||||
don't have the enclosing parenthesis.
|
||||
"""
|
||||
assert node == 'tuple'
|
||||
# Note: don't iterate over last element which is a
|
||||
# BUILD_TUPLE...
|
||||
flat_elems = flatten_list(node[:-1])
|
||||
|
||||
self.indent_more(INDENT_PER_LEVEL)
|
||||
sep = ''
|
||||
|
||||
for elem in flat_elems:
|
||||
if elem in ('ROT_THREE', 'EXTENDED_ARG'):
|
||||
continue
|
||||
assert elem == 'expr'
|
||||
line_number = self.line_number
|
||||
value = self.traverse(elem)
|
||||
if line_number != self.line_number:
|
||||
sep += '\n' + self.indent + INDENT_PER_LEVEL[:-1]
|
||||
self.write(sep, value)
|
||||
sep = ', '
|
||||
|
||||
self.indent_less(INDENT_PER_LEVEL)
|
||||
return len(flat_elems)
|
||||
self.call36_tuple = call36_tuple
|
||||
|
||||
def call36_dict(node):
|
||||
"""
|
||||
A dict used in a call_ex_kw2, which are a dictionary items expressed
|
||||
in a call. This should format to:
|
||||
a=1, b=2
|
||||
In other words, no braces, no quotes around keys and ":" becomes
|
||||
"=".
|
||||
|
||||
We will source-code use line breaks to guide us when to break.
|
||||
"""
|
||||
p = self.prec
|
||||
self.prec = 100
|
||||
|
||||
self.indent_more(INDENT_PER_LEVEL)
|
||||
sep = INDENT_PER_LEVEL[:-1]
|
||||
line_number = self.line_number
|
||||
|
||||
assert node[0].kind.startswith('kvlist')
|
||||
# Python 3.5+ style key/value list in dict
|
||||
kv_node = node[0]
|
||||
l = list(kv_node)
|
||||
i = 0
|
||||
# Respect line breaks from source
|
||||
while i < len(l):
|
||||
self.write(sep)
|
||||
name = self.traverse(l[i], indent='')
|
||||
# Strip off beginning and trailing quotes in name
|
||||
name = name[1:-1]
|
||||
if i > 0:
|
||||
line_number = self.indent_if_source_nl(line_number,
|
||||
self.indent + INDENT_PER_LEVEL[:-1])
|
||||
line_number = self.line_number
|
||||
self.write(name, '=')
|
||||
value = self.traverse(l[i+1], indent=self.indent+(len(name)+2)*' ')
|
||||
self.write(value)
|
||||
sep = ","
|
||||
if line_number != self.line_number:
|
||||
sep += "\n" + self.indent + INDENT_PER_LEVEL[:-1]
|
||||
line_number = self.line_number
|
||||
i += 2
|
||||
pass
|
||||
self.prec = p
|
||||
self.indent_less(INDENT_PER_LEVEL)
|
||||
return
|
||||
self.call36_dict = call36_dict
|
||||
|
||||
|
||||
FSTRING_CONVERSION_MAP = {1: '!s', 2: '!r', 3: '!a'}
|
||||
|
||||
def f_conversion(node):
|
||||
node.conversion = FSTRING_CONVERSION_MAP.get(node.data[1].attr, '')
|
||||
|
||||
def fstring_expr(node):
|
||||
f_conversion(node)
|
||||
self.default(node)
|
||||
self.n_fstring_expr = fstring_expr
|
||||
|
||||
def fstring_single(node):
|
||||
f_conversion(node)
|
||||
self.default(node)
|
||||
self.n_fstring_single = fstring_single
|
||||
|
||||
# def kwargs_only_36(node):
|
||||
# keys = node[-1].attr
|
||||
# num_kwargs = len(keys)
|
||||
# values = node[:num_kwargs]
|
||||
# for i, (key, value) in enumerate(zip(keys, values)):
|
||||
# self.write(key + '=')
|
||||
# self.preorder(value)
|
||||
# if i < num_kwargs:
|
||||
# self.write(',')
|
||||
# self.prune()
|
||||
# return
|
||||
# self.n_kwargs_only_36 = kwargs_only_36
|
||||
|
||||
def kwargs_36(node):
|
||||
self.write('(')
|
||||
keys = node[-1].attr
|
||||
num_kwargs = len(keys)
|
||||
num_posargs = len(node) - (num_kwargs + 1)
|
||||
n = len(node)
|
||||
assert n >= len(keys)+2
|
||||
sep = ''
|
||||
# FIXME: adjust output for line breaks?
|
||||
for i in range(num_posargs):
|
||||
self.write(sep)
|
||||
self.preorder(node[i])
|
||||
sep = ', '
|
||||
|
||||
i = num_posargs
|
||||
j = 0
|
||||
# FIXME: adjust output for line breaks?
|
||||
while i < n-1:
|
||||
self.write(sep)
|
||||
self.write(keys[j] + '=')
|
||||
self.preorder(node[i])
|
||||
i += 1
|
||||
j += 1
|
||||
self.write(')')
|
||||
self.prune()
|
||||
return
|
||||
self.n_kwargs_36 = kwargs_36
|
||||
|
||||
|
||||
def return_closure(node):
|
||||
# Nothing should be output here
|
||||
self.prune()
|
||||
return
|
||||
self.n_return_closure = return_closure
|
||||
pass # version > 3.6
|
||||
pass # version > 3.4
|
||||
pass # version > 3.0
|
||||
pass # version >= 3.4
|
||||
pass # version >= 3.0
|
||||
return
|
||||
|
||||
f = property(lambda s: s.params['f'],
|
||||
@@ -851,6 +581,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.pending_newlines = 0
|
||||
self.params = {
|
||||
'_globals': {},
|
||||
'_nonlocals': {}, # Python 3 has nonlocal
|
||||
'f': StringIO(),
|
||||
'indent': indent,
|
||||
'is_lambda': is_lambda,
|
||||
@@ -1294,7 +1025,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
def n_import_from(self, node):
|
||||
relative_path_index = 0
|
||||
if self.version >= 2.5:
|
||||
if node[relative_path_index].attr > 0:
|
||||
if node[relative_path_index].pattr > 0:
|
||||
node[2].pattr = ('.' * node[relative_path_index].pattr) + node[2].pattr
|
||||
if self.version > 2.7:
|
||||
if isinstance(node[1].pattr, tuple):
|
||||
@@ -1504,7 +1235,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
n = n[3]
|
||||
elif n == 'comp_if':
|
||||
n = n[2]
|
||||
elif n == 'comp_ifnot':
|
||||
elif n == 'comp_if_not':
|
||||
n = n[2]
|
||||
|
||||
assert n == 'comp_body', n
|
||||
@@ -1542,6 +1273,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.comprehension_walk(node, iter_index=4)
|
||||
self.write('}')
|
||||
self.prune()
|
||||
n_dict_comp = n_set_comp
|
||||
|
||||
def comprehension_walk3(self, node, iter_index, code_index=-5):
|
||||
"""Non-closure-based comprehensions the way they are done in Python3.
|
||||
@@ -1566,7 +1298,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
ast = ast[0]
|
||||
|
||||
store = None
|
||||
if ast in ['setcomp_func', 'dictcomp_func']:
|
||||
if ast in ['set_comp_func', 'dict_comp_func']:
|
||||
for k in ast:
|
||||
if k == 'comp_iter':
|
||||
n = k
|
||||
@@ -1597,8 +1329,8 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
if n[2] == 'store':
|
||||
store = n[2]
|
||||
n = n[3]
|
||||
elif n in ('list_if', 'list_if_not', 'comp_if', 'comp_ifnot'):
|
||||
have_not = n in ('list_if_not', 'comp_ifnot')
|
||||
elif n in ('list_if', 'list_if_not', 'comp_if', 'comp_if_not'):
|
||||
have_not = n in ('list_if_not', 'comp_if_not')
|
||||
if_node = n[0]
|
||||
if n[1] == 'store':
|
||||
store = n[1]
|
||||
@@ -1607,8 +1339,8 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
pass
|
||||
|
||||
# Python 2.7+ starts including set_comp_body
|
||||
# Python 3.5+ starts including setcomp_func
|
||||
assert n.kind in ('lc_body', 'comp_body', 'setcomp_func', 'set_comp_body'), ast
|
||||
# Python 3.5+ starts including set_comp_func
|
||||
assert n.kind in ('lc_body', 'comp_body', 'set_comp_func', 'set_comp_body'), ast
|
||||
assert store, "Couldn't find store in list/set comprehension"
|
||||
|
||||
# A problem created with later Python code generation is that there
|
||||
@@ -1740,8 +1472,6 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.write(']')
|
||||
self.prune()
|
||||
|
||||
n_dict_comp = n_set_comp
|
||||
|
||||
def setcomprehension_walk3(self, node, collection_index):
|
||||
"""Set comprehensions the way they are done in Python3.
|
||||
They're more other comprehensions, e.g. set comprehensions
|
||||
@@ -1793,6 +1523,13 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
# class definition ('class X(A,B,C):')
|
||||
cclass = self.currentclass
|
||||
|
||||
# Pick out various needed bits of information
|
||||
# * class_name - the name of the class
|
||||
# * subclass_info - the parameters to the class e.g.
|
||||
# class Foo(bar, baz)
|
||||
# -----------
|
||||
# * subclass_code - the code for the subclass body
|
||||
subclass_info = None
|
||||
if self.version > 3.0:
|
||||
if node == 'classdefdeco2':
|
||||
if self.version >= 3.6:
|
||||
@@ -1801,17 +1538,31 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
class_name = node[2][0].pattr
|
||||
else:
|
||||
class_name = node[1][2].pattr
|
||||
buildclass = node
|
||||
build_class = node
|
||||
else:
|
||||
build_class = node[0]
|
||||
if self.version >= 3.6:
|
||||
class_name = node[0][1][0].attr.co_name
|
||||
buildclass = node[0]
|
||||
if build_class == 'build_class_kw':
|
||||
mkfunc = build_class[1]
|
||||
assert mkfunc == 'mkfunc'
|
||||
subclass_info = build_class
|
||||
if hasattr(mkfunc[0], 'attr') and iscode(mkfunc[0].attr):
|
||||
subclass_code = mkfunc[0].attr
|
||||
else:
|
||||
assert mkfunc[0] == 'load_closure'
|
||||
subclass_code = mkfunc[1].attr
|
||||
assert iscode(subclass_code)
|
||||
if build_class[1][0] == 'load_closure':
|
||||
code_node = build_class[1][1]
|
||||
else:
|
||||
code_node = build_class[1][0]
|
||||
class_name = code_node.attr.co_name
|
||||
else:
|
||||
class_name = node[1][0].pattr
|
||||
buildclass = node[0]
|
||||
build_class = node[0]
|
||||
|
||||
assert 'mkfunc' == buildclass[1]
|
||||
mkfunc = buildclass[1]
|
||||
assert 'mkfunc' == build_class[1]
|
||||
mkfunc = build_class[1]
|
||||
if mkfunc[0] == 'kwargs':
|
||||
if 3.0 <= self.version <= 3.2:
|
||||
for n in mkfunc:
|
||||
@@ -1833,9 +1584,9 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
subclass_info = node
|
||||
else:
|
||||
subclass_info = node[0]
|
||||
elif buildclass[1][0] == 'load_closure':
|
||||
elif build_class[1][0] == 'load_closure':
|
||||
# Python 3 with closures not functions
|
||||
load_closure = buildclass[1]
|
||||
load_closure = build_class[1]
|
||||
if hasattr(load_closure[-3], 'attr'):
|
||||
# Python 3.3 classes with closures work like this.
|
||||
# Note have to test before 3.2 case because
|
||||
@@ -1846,34 +1597,35 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
subclass_code = load_closure[-2].attr
|
||||
else:
|
||||
raise 'Internal Error n_classdef: cannot find class body'
|
||||
if hasattr(buildclass[3], '__len__'):
|
||||
subclass_info = buildclass[3]
|
||||
elif hasattr(buildclass[2], '__len__'):
|
||||
subclass_info = buildclass[2]
|
||||
if hasattr(build_class[3], '__len__'):
|
||||
if not subclass_info:
|
||||
subclass_info = build_class[3]
|
||||
elif hasattr(build_class[2], '__len__'):
|
||||
subclass_info = build_class[2]
|
||||
else:
|
||||
raise 'Internal Error n_classdef: cannot superclass name'
|
||||
elif self.version >= 3.6 and node == 'classdefdeco2':
|
||||
subclass_info = node
|
||||
subclass_code = buildclass[1][0].attr
|
||||
else:
|
||||
subclass_code = buildclass[1][0].attr
|
||||
subclass_code = build_class[1][0].attr
|
||||
elif not subclass_info:
|
||||
subclass_code = build_class[1][0].attr
|
||||
subclass_info = node[0]
|
||||
else:
|
||||
if node == 'classdefdeco2':
|
||||
buildclass = node
|
||||
build_class = node
|
||||
else:
|
||||
buildclass = node[0]
|
||||
build_list = buildclass[1][0]
|
||||
if hasattr(buildclass[-3][0], 'attr'):
|
||||
subclass_code = buildclass[-3][0].attr
|
||||
class_name = buildclass[0].pattr
|
||||
elif (buildclass[-3] == 'mkfunc' and
|
||||
build_class = node[0]
|
||||
build_list = build_class[1][0]
|
||||
if hasattr(build_class[-3][0], 'attr'):
|
||||
subclass_code = build_class[-3][0].attr
|
||||
class_name = build_class[0].pattr
|
||||
elif (build_class[-3] == 'mkfunc' and
|
||||
node == 'classdefdeco2' and
|
||||
buildclass[-3][0] == 'load_closure'):
|
||||
subclass_code = buildclass[-3][1].attr
|
||||
class_name = buildclass[-3][0][0].pattr
|
||||
build_class[-3][0] == 'load_closure'):
|
||||
subclass_code = build_class[-3][1].attr
|
||||
class_name = build_class[-3][0][0].pattr
|
||||
elif hasattr(node[0][0], 'pattr'):
|
||||
subclass_code = buildclass[-3][1].attr
|
||||
subclass_code = build_class[-3][1].attr
|
||||
class_name = node[0][0].pattr
|
||||
else:
|
||||
raise 'Internal Error n_classdef: cannot find class name'
|
||||
@@ -1930,43 +1682,64 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
def print_super_classes3(self, node):
|
||||
n = len(node)-1
|
||||
if node.kind != 'expr':
|
||||
assert node[n].kind.startswith('CALL_FUNCTION')
|
||||
|
||||
kwargs = None
|
||||
# 3.6+ starts having this
|
||||
if node[n].kind.startswith('CALL_FUNCTION_KW'):
|
||||
# 3.6+ starts does this
|
||||
kwargs = node[n-1].attr
|
||||
assert isinstance(kwargs, tuple)
|
||||
assert node[n].kind.startswith('CALL_FUNCTION')
|
||||
for i in range(n-2, 0, -1):
|
||||
if not node[i].kind in ['expr', 'LOAD_CLASSNAME']:
|
||||
break
|
||||
pass
|
||||
i = n - (len(kwargs)+1)
|
||||
j = 1 + n - node[n].attr
|
||||
else:
|
||||
for i in range(n-2, 0, -1):
|
||||
if not node[i].kind in ['expr', 'LOAD_CLASSNAME']:
|
||||
break
|
||||
pass
|
||||
|
||||
if i == n-2:
|
||||
return
|
||||
i += 2
|
||||
|
||||
if i == n-2:
|
||||
return
|
||||
line_separator = ', '
|
||||
sep = ''
|
||||
self.write('(')
|
||||
j = 0
|
||||
i += 2
|
||||
if kwargs:
|
||||
# Last arg is tuple of keyword values: omit
|
||||
l = n - 1
|
||||
else:
|
||||
l = n
|
||||
while i < l:
|
||||
value = self.traverse(node[i])
|
||||
i += 1
|
||||
self.write(sep, value)
|
||||
# 3.6+ may have this
|
||||
if kwargs:
|
||||
self.write("=%s" % kwargs[j])
|
||||
|
||||
if kwargs:
|
||||
# 3.6+ does this
|
||||
while j < i:
|
||||
self.write(sep)
|
||||
value = self.traverse(node[j])
|
||||
self.write("%s" % value)
|
||||
sep = line_separator
|
||||
j += 1
|
||||
sep = line_separator
|
||||
pass
|
||||
|
||||
j = 0
|
||||
while i < l:
|
||||
self.write(sep)
|
||||
value = self.traverse(node[i])
|
||||
self.write("%s=%s" % (kwargs[j], value))
|
||||
sep = line_separator
|
||||
j += 1
|
||||
i += 1
|
||||
else:
|
||||
while i < l:
|
||||
value = self.traverse(node[i])
|
||||
i += 1
|
||||
self.write(sep, value)
|
||||
sep = line_separator
|
||||
pass
|
||||
pass
|
||||
else:
|
||||
self.write('(')
|
||||
if self.version >= 3.6 and node[0] == 'LOAD_CONST':
|
||||
return
|
||||
value = self.traverse(node[0])
|
||||
self.write('(')
|
||||
self.write(value)
|
||||
pass
|
||||
|
||||
@@ -2570,11 +2343,16 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
# else:
|
||||
# print ast[-1][-1]
|
||||
|
||||
globals, nonlocals = find_globals_and_nonlocals(ast, set(), set(),
|
||||
code, self.version)
|
||||
# Add "global" declaration statements at the top
|
||||
# of the function
|
||||
for g in sorted(find_globals(ast, set())):
|
||||
for g in sorted(globals):
|
||||
self.println(indent, 'global ', g)
|
||||
|
||||
for nl in sorted(nonlocals):
|
||||
self.println(indent, 'nonlocal ', nl)
|
||||
|
||||
old_name = self.name
|
||||
self.gen_source(ast, code.co_name, code._customize)
|
||||
self.name = old_name
|
||||
@@ -2686,8 +2464,12 @@ def deparse_code(version, co, out=sys.stdout, showasm=None, showast=False,
|
||||
'ast': showast,
|
||||
'grammar': showgrammar
|
||||
}
|
||||
return code_deparse(co, out, version, debug_opts, code_objects, compile_mode,
|
||||
is_pypy, walker)
|
||||
return code_deparse(co, out,
|
||||
version=version,
|
||||
debug_opts=debug_opts,
|
||||
code_objects=code_objects,
|
||||
compile_mode=compile_mode,
|
||||
is_pypy=is_pypy, walker=walker)
|
||||
|
||||
def code_deparse(co, out=sys.stdout, version=None, debug_opts=DEFAULT_DEBUG_OPTS,
|
||||
code_objects={}, compile_mode='exec', is_pypy=False, walker=SourceWalker):
|
||||
@@ -2730,7 +2512,11 @@ def code_deparse(co, out=sys.stdout, version=None, debug_opts=DEFAULT_DEBUG_OPTS
|
||||
# save memory
|
||||
del tokens
|
||||
|
||||
deparsed.mod_globs = find_globals(deparsed.ast, set())
|
||||
deparsed.mod_globs, nonlocals = find_globals_and_nonlocals(deparsed.ast,
|
||||
set(), set(),
|
||||
co, version)
|
||||
|
||||
assert not nonlocals
|
||||
|
||||
# convert leading '__doc__ = "..." into doc string
|
||||
try:
|
||||
|
@@ -12,4 +12,4 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
# This file is suitable for sourcing inside bash as
|
||||
# well as importing into Python
|
||||
VERSION='3.0.0'
|
||||
VERSION='3.1.0'
|
||||
|
Reference in New Issue
Block a user