You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-04 01:09:52 +08:00
Compare commits
73 Commits
release-2.
...
release-2.
Author | SHA1 | Date | |
---|---|---|---|
|
114fe11e66 | ||
|
b131c20e99 | ||
|
5db1178b3e | ||
|
7ece296f76 | ||
|
5035d5433b | ||
|
78a5b620a7 | ||
|
e851c0d46a | ||
|
a760188724 | ||
|
ad345ef94a | ||
|
d050dd3adb | ||
|
9392103998 | ||
|
707770049f | ||
|
ec0669367f | ||
|
3f40c16587 | ||
|
66518baed0 | ||
|
21023fea74 | ||
|
66741d16ba | ||
|
e02ebef45d | ||
|
99fce6dfd7 | ||
|
7b8c5e091c | ||
|
77caf515ea | ||
|
e4c0d56947 | ||
|
4827b1e994 | ||
|
2b46e71264 | ||
|
84c2932bc5 | ||
|
874b3c9d31 | ||
|
f6a997befc | ||
|
136f42a610 | ||
|
c43e734f37 | ||
|
2327f0fdfa | ||
|
0afcd31bd5 | ||
|
6f097ff1ca | ||
|
8eb1a16f5b | ||
|
ed9fb64e72 | ||
|
d002c667ae | ||
|
e56743cc14 | ||
|
39814fab8b | ||
|
970774ab95 | ||
|
723fa5dfed | ||
|
4d4e59c40b | ||
|
a92e6c9688 | ||
|
6c546fe6e1 | ||
|
9b1dd0f26c | ||
|
0ff0c97a95 | ||
|
3e988be075 | ||
|
eb64a03dfa | ||
|
9aa4e2b9ae | ||
|
c147514e9e | ||
|
813229ac45 | ||
|
f1a947f106 | ||
|
2f51067a9d | ||
|
e3f4beeb74 | ||
|
7d58dcf6dd | ||
|
bfff1b4e9f | ||
|
e6761e13bb | ||
|
c7c0a98982 | ||
|
eebec48308 | ||
|
da50394841 | ||
|
13d5cd1a58 | ||
|
08dcc7d820 | ||
|
7755563b65 | ||
|
b43cbc050d | ||
|
db7a26d47d | ||
|
92166452c1 | ||
|
96fa3ef381 | ||
|
755415c7d8 | ||
|
b168e1de55 | ||
|
38eed14b41 | ||
|
2c993f8c32 | ||
|
65858a4c74 | ||
|
263c63e009 | ||
|
813bce4697 | ||
|
a5d2237435 |
@@ -8,6 +8,7 @@ python:
|
||||
- '2.6'
|
||||
- '3.3'
|
||||
- '3.4'
|
||||
- '3.2'
|
||||
|
||||
install:
|
||||
- pip install -r requirements.txt
|
||||
|
339
ChangeLog
339
ChangeLog
@@ -1,6 +1,343 @@
|
||||
2017-01-11 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/version.py: Get ready for release 2.10.9
|
||||
|
||||
2017-01-11 R. Bernstein <rocky@users.noreply.github.com>
|
||||
|
||||
* : Merge pull request #79 from rocky/revert-78-patch-1 Revert "fix bug : not generate all files when use "-ro""
|
||||
|
||||
2017-01-11 R. Bernstein <rocky@users.noreply.github.com>
|
||||
|
||||
* : Merge pull request #78 from jlugjb/patch-1 fix bug : not generate all files when use "-ro"
|
||||
|
||||
2017-01-10 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/simple_source/bug35/03_double_star_unpack.py,
|
||||
uncompyle6/parsers/parse3.py, uncompyle6/semantics/pysource.py:
|
||||
Improve BUILD_xxx_UNPACK slightly
|
||||
|
||||
2017-01-09 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/simple_source/bug35/03_async_await.py,
|
||||
uncompyle6/parsers/parse3.py, uncompyle6/semantics/pysource.py: Add
|
||||
async_call_function for 3.5+
|
||||
|
||||
2017-01-09 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : Reinstate test
|
||||
|
||||
2017-01-08 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : Works now
|
||||
|
||||
2017-01-08 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse30.py, uncompyle6/scanners/scanner3.py:
|
||||
Python 3.0 decompile bugs
|
||||
|
||||
2017-01-08 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/scanners/scanner2.py, uncompyle6/scanners/scanner3.py,
|
||||
uncompyle6/scanners/scanner30.py: Towards better 3.0 decompilation Sync scanner2 and scanner3 better
|
||||
|
||||
2017-01-08 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/simple_source/bug35/03_while-if-break.py,
|
||||
uncompyle6/parsers/parse35.py, uncompyle6/scanner.py,
|
||||
uncompyle6/scanners/scanner3.py: Fix 3.5, 3.6 while true if/break
|
||||
bug
|
||||
|
||||
2017-01-08 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/__init__.py, uncompyle6/main.py,
|
||||
uncompyle6/semantics/consts.py, uncompyle6/semantics/fragments.py,
|
||||
uncompyle6/semantics/pysource.py: Misc cleanups Favor "decompile" over "uncompyle" since "decompile" is in common
|
||||
use Reduce size of pysource.py by splitting out constants
|
||||
|
||||
2017-01-08 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/simple_source/bug35/03_async_await.py,
|
||||
uncompyle6/parsers/parse35.py, uncompyle6/scanners/scanner3.py,
|
||||
uncompyle6/semantics/pysource.py: Add 3.5+ async with/for .. scanner3.py: 3.6 bytecode vs wordcode fix
|
||||
|
||||
2017-01-07 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/simple_source/bug35/03_async_await.py,
|
||||
uncompyle6/parsers/parse35.py, uncompyle6/semantics/pysource.py:
|
||||
Start to add 3.5+ await and async
|
||||
|
||||
2017-01-07 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/simple_source/bug31/04_def_annotate.py,
|
||||
uncompyle6/semantics/make_function.py: More Python 3 annotation bugs
|
||||
|
||||
2017-01-07 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py, uncompyle6/parsers/parse31.py,
|
||||
uncompyle6/parsers/parse32.py,
|
||||
uncompyle6/semantics/make_function.py,
|
||||
uncompyle6/semantics/pysource.py: Fix some errors in deparsing
|
||||
Python 3 annotations
|
||||
|
||||
2017-01-07 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/make_function.py: Small Pyhton 3.x annotate
|
||||
bug
|
||||
|
||||
2017-01-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* README.rst: Note what's up with Python 3 decompile quality
|
||||
|
||||
2017-01-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/scanners/scanner3.py: 3.5 continue check is needed on
|
||||
3.6
|
||||
|
||||
2017-01-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/test_pyenvlib.py, uncompyle6/parsers/parse36.py,
|
||||
uncompyle6/scanners/scanner3.py: Towards better 3.6 support
|
||||
|
||||
2017-01-02 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse35.py, uncompyle6/scanners/scanner3.py:
|
||||
Python 3.5 continue detection bug
|
||||
|
||||
2017-01-01 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/scanners/scanner3.py: add come_from for setup_finally
|
||||
and setup_except
|
||||
|
||||
2017-01-01 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse35.py, uncompyle6/scanners/scanner3.py:
|
||||
Towards fixing Python 3.5 return bugs
|
||||
|
||||
2017-01-01 rocky <rb@dustyfeet.com>
|
||||
|
||||
* README.rst: Note how to verify correctness ... with --verify, --weak-verify and cross checking with pycdc
|
||||
|
||||
2016-12-31 rocky <rb@dustyfeet.com>
|
||||
|
||||
* ChangeLog, NEWS, uncompyle6/version.py: Get ready for release
|
||||
2.9.9
|
||||
|
||||
2016-12-31 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse26.py: 2.x list_if may have a THEN in it
|
||||
|
||||
2016-12-31 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/scanners/scanner3.py: Towards fixing a Python 3.3
|
||||
return/continue bug
|
||||
|
||||
2016-12-30 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/main.py: On --verify if we can't unbuffer output, don't
|
||||
|
||||
2016-12-29 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/scanners/scanner2.py, uncompyle6/scanners/scanner3.py:
|
||||
dectect_structure() -> detect_control_flow()
|
||||
|
||||
2016-12-29 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/scanners/scanner2.py, uncompyle6/scanners/scanner3.py:
|
||||
DRY code and emitted Python 3 source * Python 3: break; continue -> break * Use variable in detect_structure for pre[rtarget] * Make Python 2 and Python 3 detect_structure more alie
|
||||
|
||||
2016-12-29 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/scanners/scanner3.py: More if/then detection in Python
|
||||
3.x
|
||||
|
||||
2016-12-29 R. Bernstein <rocky@users.noreply.github.com>
|
||||
|
||||
* : Merge pull request #73 from rocky/then-crap Add THEN token to improve Python 2.2-2.6 control flow detection
|
||||
|
||||
2016-12-28 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py, uncompyle6/scanners/tok.py: Misc
|
||||
bugs
|
||||
|
||||
2016-12-28 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : commit 723fa5dfed5bb198c66741c594e2c277ded88970 Author: rocky
|
||||
<rb@dustyfeet.com> Date: Wed Dec 28 18:57:09 2016 -0500
|
||||
|
||||
2016-12-28 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py, uncompyle6/parsers/parse32.py,
|
||||
uncompyle6/parsers/parse33.py: Towards fixing a 3.2 while true: ...
|
||||
break bug
|
||||
|
||||
2016-12-28 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/Makefile, uncompyle6/main.py, uncompyle6/parsers/parse26.py,
|
||||
uncompyle6/verify.py: Bugs in Python 2.6- "and" and "lambda"
|
||||
handling .. and clean up verify output
|
||||
|
||||
2016-12-27 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse26.py, uncompyle6/scanners/scanner2.py,
|
||||
uncompyle6/scanners/scanner26.py, uncompyle6/semantics/pysource.py:
|
||||
WIP : Add THEN to disambigute from "and"
|
||||
|
||||
2016-12-27 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/scanners/scanner2.py: Make 2.6 and 2.7 ingest more
|
||||
alike
|
||||
|
||||
2016-12-26 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : Update 2.7 bytecode file for last fix
|
||||
|
||||
2016-12-26 R. Bernstein <rocky@users.noreply.github.com>
|
||||
|
||||
* : Merge pull request #71 from jiangpengcheng/tupple_bug tupples which contain only 1 element need a comma
|
||||
|
||||
2016-12-26 jiangpch <jiangpch@gohighsec.com>
|
||||
|
||||
* uncompyle6/semantics/pysource.py: tupples which contain only 1
|
||||
element need a comma
|
||||
|
||||
2016-12-26 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse25.py: fix bug in using python2 AST rules
|
||||
in python 2.5
|
||||
|
||||
2016-12-26 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : commit f1a947f106b231fb1480ba301b15e3ceaf78c94f Author: rocky
|
||||
<rb@dustyfeet.com> Date: Mon Dec 26 00:43:02 2016 -0500
|
||||
|
||||
2016-12-25 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/scanners/scanner23.py,
|
||||
uncompyle6/scanners/scanner24.py, uncompyle6/semantics/pysource.py,
|
||||
uncompyle6/verify.py: Scanner call fixes. NAME_MODULE removal for
|
||||
<=2.4
|
||||
|
||||
2016-12-24 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/astnode.py, uncompyle6/parsers/parse2.py,
|
||||
uncompyle6/parsers/parse26.py, uncompyle6/parsers/parse3.py,
|
||||
uncompyle6/parsers/parse36.py, uncompyle6/scanners/scanner15.py,
|
||||
uncompyle6/scanners/scanner2.py, uncompyle6/scanners/scanner21.py,
|
||||
uncompyle6/scanners/scanner22.py,
|
||||
uncompyle6/semantics/fragments.py, uncompyle6/semantics/pysource.py:
|
||||
Lint
|
||||
|
||||
2016-12-24 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/pysource.py: Remove stray debug hook
|
||||
|
||||
2016-12-20 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/pysource.py: Bang on 3.6
|
||||
build_map_unpack_with_call Probably will fix better in the future.
|
||||
|
||||
2016-12-18 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/bin/pydisassemble.py, uncompyle6/bin/uncompile.py,
|
||||
uncompyle6/parsers/parse2.py, uncompyle6/parsers/parse25.py,
|
||||
uncompyle6/parsers/parse27.py, uncompyle6/parsers/parse3.py,
|
||||
uncompyle6/scanners/scanner2.py, uncompyle6/scanners/scanner3.py:
|
||||
Python flake8 crap Was testing realgud's C-x!8 (goto flake8 warning/error)
|
||||
|
||||
2016-12-18 rocky <rb@dustyfeet.com>
|
||||
|
||||
* pytest/.gitignore, test/simple_source/bug25/02_try_else.py,
|
||||
uncompyle6/parsers/parse25.py, uncompyle6/parsers/parse26.py: Python
|
||||
2.5 mistaken try/else
|
||||
|
||||
2016-12-17 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/scanners/scanner2.py, uncompyle6/scanners/scanner25.py:
|
||||
show-asm on python2.5 is optional make scanner2 look a little more like scanner3
|
||||
|
||||
2016-12-16 rocky <rb@dustyfeet.com>
|
||||
|
||||
* NEWS: Release 2.9.8 news
|
||||
|
||||
2016-12-16 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py, uncompyle6/version.py: Get ready for release 2.9.8
|
||||
|
||||
2016-12-16 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/simple_source/bug35/02_build_map_unpack_with_call.py,
|
||||
uncompyle6/parsers/parse3.py, uncompyle6/parsers/parse35.py,
|
||||
uncompyle6/parsers/parse36.py, uncompyle6/scanners/scanner3.py,
|
||||
uncompyle6/semantics/pysource.py: Start to handle 3.5
|
||||
build_map_unpack_with_call 3.6 also started but needs even more work
|
||||
|
||||
2016-12-15 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/scanner.py, uncompyle6/scanners/scanner3.py: Some
|
||||
Python 3.6 bytecode->wordcode fixes
|
||||
|
||||
2016-12-13 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py: Was passing wrong type
|
||||
|
||||
2016-12-11 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parser.py: option -g: show start-end range when
|
||||
possible
|
||||
|
||||
2016-12-11 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/make_function.py, uncompyle6/verify.py: two
|
||||
misc changes - track print_docstring move to help (used in python 3.1) - verify: allow RETURN_VALUE to match RETURN_END_IF
|
||||
|
||||
2016-12-10 rocky <rb@dustyfeet.com>
|
||||
|
||||
* .travis.yml, test/Makefile: 3.2 needs --weak-verify
|
||||
|
||||
2016-12-10 rocky <rb@dustyfeet.com>
|
||||
|
||||
* .travis.yml: Try testing on 3.2
|
||||
|
||||
2016-12-10 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py, uncompyle6/bin/uncompile.py: Can run in Python 3.1
|
||||
and Python 3.2
|
||||
|
||||
2016-12-10 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/Makefile, uncompyle6/parsers/parse3.py,
|
||||
uncompyle6/scanners/scanner3.py: Another python 3 ELSE fixes and ... Makefile: - test python 3.0 bytecode - turn full --verify back on Python 3.x
|
||||
|
||||
2016-12-10 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/scanners/scanner3.py: Another faulty Python3 ELSE tag
|
||||
remove
|
||||
|
||||
2016-12-09 rocky <rb@dustyfeet.com>
|
||||
|
||||
* pytest/test_grammar.py: Grammar check: ELSE on RHS is ok.
|
||||
|
||||
2016-12-09 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/scanners/tok.py, uncompyle6/verify.py: Back of some of
|
||||
the verification changes
|
||||
|
||||
2016-12-09 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : commit a5d2237435ee51e681c73db9e7ea379d56456205 Author: rocky
|
||||
<rb@dustyfeet.com> Date: Fri Dec 9 21:10:10 2016 -0500
|
||||
|
||||
2016-12-04 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/version.py: Get ready for release 2.9.7
|
||||
* ChangeLog, NEWS, uncompyle6/main.py, uncompyle6/parser.py,
|
||||
uncompyle6/parsers/parse26.py, uncompyle6/parsers/parse3.py,
|
||||
uncompyle6/parsers/parse34.py, uncompyle6/parsers/parse35.py,
|
||||
uncompyle6/scanner.py, uncompyle6/scanners/scanner2.py,
|
||||
uncompyle6/scanners/scanner23.py, uncompyle6/scanners/scanner24.py,
|
||||
uncompyle6/scanners/scanner3.py, uncompyle6/scanners/tok.py,
|
||||
uncompyle6/semantics/make_function.py,
|
||||
uncompyle6/semantics/pysource.py, uncompyle6/verify.py,
|
||||
uncompyle6/version.py: Get ready for release 2.9.7 Some of the many lint things. Linting is kind of stupid though.
|
||||
|
||||
2016-11-28 rocky <rb@dustyfeet.com>
|
||||
|
||||
|
33
NEWS
33
NEWS
@@ -1,3 +1,36 @@
|
||||
uncompyle6 2.9.9 2016-12-16
|
||||
|
||||
- Remaining Python 3.5 ops handled
|
||||
(this also means more Python 3.6 ops are handled)
|
||||
- Python 3.5 and 3.6 async and await handled
|
||||
- Python 3.0 decompilation improved
|
||||
- Python 3 annotations fixed
|
||||
- Better control-flow detection
|
||||
- Code cleanups and misc bug fixes
|
||||
|
||||
uncompyle6 2.9.8 2016-12-16
|
||||
|
||||
- Better control-flow detection
|
||||
- pseudo instruction THEN in 2.x
|
||||
to disambiguate if from and
|
||||
- fix bug in --verify option
|
||||
- DRY (a little) control-flow detection
|
||||
- fix syntax in tuples with one element
|
||||
- if AST rule inheritence in Python 2.5
|
||||
- NAME_MODULE removal for Python <= 2.4
|
||||
- verifycall fixes for Python <= 2.4
|
||||
- more Python lint
|
||||
|
||||
uncompyle6 2.9.7 2016-12-16
|
||||
|
||||
- Start to handle 3.5/3.6 build_map_unpack_with_call
|
||||
- Some Python 3.6 bytecode to wordcode conversion fixes
|
||||
- option -g: show start-end range when possible
|
||||
- track print_docstring move to help (used in python 3.1)
|
||||
- verify: allow RETURN_VALUE to match RETURN_END_IF
|
||||
- some 3.2 compatibility
|
||||
- Better Python 3 control flow detection by adding Pseudo ELSE opcodes
|
||||
|
||||
uncompyle6 2.9.6 2016-12-04
|
||||
|
||||
- Shorten Python3 grammars with + and *
|
||||
|
27
README.rst
27
README.rst
@@ -43,10 +43,10 @@ information.
|
||||
Requirements
|
||||
------------
|
||||
|
||||
This project requires Python 2.6 or later, PyPy 3-2.4, or PyPy-5.0.1.
|
||||
Python versions 2.3-2.7 are supported in the python-2.4 branch.
|
||||
This project requires Python 2.6 or later, PyPy 3-2.4, or PyPy-5.0.1.
|
||||
Python versions 2.4-2.7 are supported in the python-2.4 branch.
|
||||
The bytecode files it can read has been tested on Python bytecodes from
|
||||
versions 2.1-2.7, and 3.2-3.6 and the above-mentioned PyPy versions.
|
||||
versions 1.5, 2.1-2.7, and 3.2-3.6 and the above-mentioned PyPy versions.
|
||||
|
||||
Installation
|
||||
------------
|
||||
@@ -93,6 +93,16 @@ For usage help:
|
||||
|
||||
$ uncompyle6 -h
|
||||
|
||||
If you want strong verification of the correctness of the
|
||||
decompilation process, add the `--verify` option. But there are
|
||||
situations where this will indicate a failure, although the generated
|
||||
program is semantically equivalent. Using option `--weak-verify` will
|
||||
tell you if there is something definitely wrong. Generally, large
|
||||
swaths of code are decompiled correctly, if not the entire program.
|
||||
|
||||
You can also cross compare the results with pycdc_ . Since they work
|
||||
differently, bugs here often aren't in that, and vice versa.
|
||||
|
||||
|
||||
Known Bugs/Restrictions
|
||||
-----------------------
|
||||
@@ -128,10 +138,13 @@ on the lower end Python versions which is more difficult for us to
|
||||
handle since we don't have a Python interpreter for versions 1.5, 1.6,
|
||||
and 2.0.
|
||||
|
||||
Python 3.0 support is weak; Python 3.5 largely works, but still has
|
||||
some bugs in it. Python 3.6 changes things drastically by using word
|
||||
codes rather than byte codes. That has been addressed, but then it also
|
||||
changes function call opcodes and its semantics.
|
||||
In the Python 3 series, Python support is is strongest around 3.4 or
|
||||
3.3 and drops off as you move further away from those versions. Python
|
||||
3.5 largely works, but still has some bugs in it and is missing some
|
||||
opcodes. Python 3.6 changes things drastically by using word codes
|
||||
rather than byte codes. That has been addressed, but then it also
|
||||
changes function call opcodes and its semantics and has more problems
|
||||
with control flow than 3.5 has.
|
||||
|
||||
Currently not all Python magic numbers are supported. Specifically in
|
||||
some versions of Python, notably Python 3.6, the magic number has
|
||||
|
@@ -20,6 +20,8 @@ classifiers = ['Development Status :: 5 - Production/Stable',
|
||||
'Programming Language :: Python :: 2.5',
|
||||
'Programming Language :: Python :: 2.6',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3.1',
|
||||
'Programming Language :: Python :: 3.2',
|
||||
'Programming Language :: Python :: 3.3',
|
||||
'Programming Language :: Python :: 3.4',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
@@ -38,7 +40,7 @@ entry_points={
|
||||
]}
|
||||
ftp_url = None
|
||||
install_requires = ['spark-parser >= 1.5.1, < 1.6.0',
|
||||
'xdis >= 3.2.3, < 3.3.0']
|
||||
'xdis >= 3.2.4, < 3.3.0']
|
||||
license = 'MIT'
|
||||
mailing_list = 'python-debugger@googlegroups.com'
|
||||
modname = 'uncompyle6'
|
||||
|
1
pytest/.gitignore
vendored
1
pytest/.gitignore
vendored
@@ -1 +1,2 @@
|
||||
/.hypothesis
|
||||
/__pycache__
|
||||
|
@@ -41,7 +41,7 @@ def test_grammar():
|
||||
"""
|
||||
JUMP_BACK CONTINUE RETURN_END_IF
|
||||
COME_FROM COME_FROM_EXCEPT COME_FROM_LOOP COME_FROM_WITH
|
||||
COME_FROM_FINALLY
|
||||
COME_FROM_FINALLY ELSE
|
||||
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP
|
||||
LAMBDA_MARKER RETURN_LAST
|
||||
""".split())
|
||||
|
@@ -22,9 +22,9 @@ check:
|
||||
#: Run working tests from Python 2.6 or 2.7
|
||||
check-2.6 check-2.7: check-bytecode-2 check-bytecode-3 check-bytecode-1 check-2.7-ok
|
||||
|
||||
#: Run working tests from Python 3.1
|
||||
#: Run working tests from Python 3.0
|
||||
check-3.0: check-bytecode
|
||||
@echo Python 3.0 testing not done yet
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.0 --weak-verify $(COMPILE)
|
||||
|
||||
#: Run working tests from Python 3.1
|
||||
check-3.1: check-bytecode
|
||||
@@ -36,11 +36,11 @@ check-3.2: check-bytecode
|
||||
|
||||
#: Run working tests from Python 3.3
|
||||
check-3.3: check-bytecode
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.3 --weak-verify $(COMPILE)
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.3 --verify $(COMPILE)
|
||||
|
||||
#: Run working tests from Python 3.4
|
||||
check-3.4: check-bytecode check-3.4-ok check-2.7-ok
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.4 --weak-verify $(COMPILE)
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.4 --verify $(COMPILE)
|
||||
|
||||
#: Run working tests from Python 3.5
|
||||
check-3.5: check-bytecode
|
||||
@@ -100,7 +100,7 @@ check-bytecode-2.5:
|
||||
|
||||
#: Check deparsing Python 2.6
|
||||
check-bytecode-2.6:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-2.6
|
||||
$(PYTHON) test_pythonlib.py --bytecode-2.6 --weak-verify
|
||||
|
||||
#: Check deparsing Python 2.7
|
||||
check-bytecode-2.7:
|
||||
|
BIN
test/bytecode_2.5/02_try_else.pyc
Normal file
BIN
test/bytecode_2.5/02_try_else.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.6/04_if_and_bug.pyc
Normal file
BIN
test/bytecode_2.6/04_if_and_bug.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.4/05_while_true_break.pyc
Normal file
BIN
test/bytecode_3.4/05_while_true_break.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.5/02_build_map_unpack_with_call.pyc
Normal file
BIN
test/bytecode_3.5/02_build_map_unpack_with_call.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.5/03_async_await.pyc
Normal file
BIN
test/bytecode_3.5/03_async_await.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.5/03_double_star_unpack.pyc
Normal file
BIN
test/bytecode_3.5/03_double_star_unpack.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.5/03_while-if-break.pyc
Normal file
BIN
test/bytecode_3.5/03_while-if-break.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.5/04_def_annotate.pyc
Normal file
BIN
test/bytecode_3.5/04_def_annotate.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.6/00_assign.pyc
Normal file
BIN
test/bytecode_3.6/00_assign.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.6/00_docstring.pyc
Normal file
BIN
test/bytecode_3.6/00_docstring.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.6/00_import.pyc
Normal file
BIN
test/bytecode_3.6/00_import.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.6/01_matrix_multiply.pyc
Normal file
BIN
test/bytecode_3.6/01_matrix_multiply.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.6/02_build_map_unpack_with_call.pyc
Normal file
BIN
test/bytecode_3.6/02_build_map_unpack_with_call.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.6/03_async_await.pyc
Normal file
BIN
test/bytecode_3.6/03_async_await.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.6/03_while-if-break.pyc
Normal file
BIN
test/bytecode_3.6/03_while-if-break.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.6/03_while_else.pyc
Normal file
BIN
test/bytecode_3.6/03_while_else.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.6/04_raise.pyc
Normal file
BIN
test/bytecode_3.6/04_raise.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.6/05_try_finally_pass.pyc
Normal file
BIN
test/bytecode_3.6/05_try_finally_pass.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.6/10_if_break_finally.pyc
Normal file
BIN
test/bytecode_3.6/10_if_break_finally.pyc
Normal file
Binary file not shown.
12
test/simple_source/bug25/02_try_else.py
Normal file
12
test/simple_source/bug25/02_try_else.py
Normal file
@@ -0,0 +1,12 @@
|
||||
# Python 2.5 bug
|
||||
# Was turning into tryelse when there in fact is no else.
|
||||
def options(self, section):
|
||||
"""Return a list of option names for the given section name."""
|
||||
try:
|
||||
opts = self._sections[section].copy()
|
||||
except KeyError:
|
||||
raise NoSectionError(section)
|
||||
opts.update(self._defaults)
|
||||
if '__name__' in opts:
|
||||
del opts['__name__']
|
||||
return opts.keys()
|
@@ -8,3 +8,12 @@ def open(file, mode = "r", buffering = None,
|
||||
encoding = None, errors = None,
|
||||
newline = None, closefd = True) -> "IOBase":
|
||||
return text
|
||||
|
||||
def foo(x: 'an argument that defaults to 5' = 5):
|
||||
print(x)
|
||||
|
||||
def div(a: dict(type=float, help='the dividend'),
|
||||
b: dict(type=float, help='the divisor (must be different than 0)')
|
||||
) -> dict(type=float, help='the result of dividing a by b'):
|
||||
"""Divide a by b"""
|
||||
return a / b
|
||||
|
@@ -0,0 +1 @@
|
||||
f(**a, **b)
|
26
test/simple_source/bug35/03_async_await.py
Normal file
26
test/simple_source/bug35/03_async_await.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Python 3.5+ async and await
|
||||
async def await_test(asyncio):
|
||||
reader, writer = await asyncio.open_connection(80)
|
||||
await bar()
|
||||
|
||||
async def afor_test():
|
||||
|
||||
async for i in [1,2,3]:
|
||||
x = i
|
||||
|
||||
|
||||
async def afor_else_test():
|
||||
|
||||
async for i in [1,2,3]:
|
||||
x = i
|
||||
else:
|
||||
z = 4
|
||||
|
||||
|
||||
async def awith_test():
|
||||
async with i:
|
||||
print(i)
|
||||
|
||||
async def awith_as_test():
|
||||
async with 1 as i:
|
||||
print(i)
|
9
test/simple_source/bug35/03_double_star_unpack.py
Normal file
9
test/simple_source/bug35/03_double_star_unpack.py
Normal file
@@ -0,0 +1,9 @@
|
||||
# Bug in Python 3.5 is getting the two star'd arguments right.
|
||||
def sum(a,b,c,d):
|
||||
return a + b + c + d
|
||||
|
||||
args=(1,2)
|
||||
sum(*args, *args)
|
||||
|
||||
# FIXME: this is handled incorrectly
|
||||
# (*c,) = (3,4)
|
7
test/simple_source/bug35/03_while-if-break.py
Normal file
7
test/simple_source/bug35/03_while-if-break.py
Normal file
@@ -0,0 +1,7 @@
|
||||
# Python 3.5 and 3.6 break inside a
|
||||
# while True and if / break
|
||||
def display_date(loop):
|
||||
while True:
|
||||
if loop.time():
|
||||
break
|
||||
x = 5
|
@@ -5,3 +5,8 @@ def some_function():
|
||||
def some_other_function():
|
||||
some_variable, = some_function()
|
||||
print(some_variable)
|
||||
|
||||
empty_tup = ()
|
||||
one_item_tup = ("item1", )
|
||||
one_item_tup_without_parentheses = "item",
|
||||
many_items_tup = ("item1", "item2", "item3")
|
||||
|
@@ -33,7 +33,7 @@ TEST_VERSIONS=('2.3.7', '2.4.6', '2.5.6', '2.6.9',
|
||||
'2.7.10', '2.7.11', '2.7.12',
|
||||
'3.0.1', '3.1.5', '3.2.6',
|
||||
'3.3.5', '3.3.6',
|
||||
'3.4.2', '3.5.1')
|
||||
'3.4.2', '3.5.1', '3.6.0')
|
||||
|
||||
target_base = '/tmp/py-dis/'
|
||||
lib_prefix = os.path.join(os.environ['HOME'], '.pyenv/versions')
|
||||
|
@@ -47,7 +47,10 @@ import uncompyle6.semantics.pysource
|
||||
import uncompyle6.semantics.fragments
|
||||
|
||||
# Export some functions
|
||||
from uncompyle6.main import uncompyle_file
|
||||
from uncompyle6.main import decompile_file
|
||||
|
||||
# For compaitility
|
||||
uncompyle_file = decompile_file
|
||||
|
||||
# Conventience functions so you can say:
|
||||
# from uncompyle6 import deparse_code
|
||||
|
@@ -62,7 +62,6 @@ Type -h for for full help.""" % program
|
||||
print(Usage_short, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
for file in files:
|
||||
if os.path.exists(files[0]):
|
||||
disassemble_file(file, sys.stdout, native)
|
||||
|
@@ -66,9 +66,9 @@ def usage():
|
||||
|
||||
def main_bin():
|
||||
if not (sys.version_info[0:2] in ((2, 6), (2, 7),
|
||||
(3, 2), (3, 3),
|
||||
(3, 1), (3, 2), (3, 3),
|
||||
(3, 4), (3, 5), (3, 6))):
|
||||
print('Error: %s requires Python 2.6, 2.7, 3.2, 3.3, 3.4, 3.5, or 3.6' % program,
|
||||
print('Error: %s requires Python 2.6-2.7, or 3.1-3.6' % program,
|
||||
file=sys.stderr)
|
||||
sys.exit(-1)
|
||||
|
||||
@@ -142,7 +142,6 @@ def main_bin():
|
||||
if src_base:
|
||||
sb_len = len( os.path.join(src_base, '') )
|
||||
files = [f[sb_len:] for f in files]
|
||||
del sb_len
|
||||
|
||||
if not files:
|
||||
print("No files given", file=sys.stderr)
|
||||
@@ -225,7 +224,6 @@ def main_bin():
|
||||
except (KeyboardInterrupt, OSError):
|
||||
pass
|
||||
|
||||
|
||||
if timestamp:
|
||||
print(time.strftime(timestampfmt))
|
||||
|
||||
|
@@ -11,7 +11,7 @@ from uncompyle6.linenumbers import line_number_mapping
|
||||
|
||||
from xdis.load import load_module
|
||||
|
||||
def uncompyle(
|
||||
def decompile(
|
||||
bytecode_version, co, out=None, showasm=None, showast=False,
|
||||
timestamp=None, showgrammar=False, code_objects={},
|
||||
source_size=None, is_pypy=False, magic_int=None):
|
||||
@@ -48,8 +48,10 @@ def uncompyle(
|
||||
# deparsing failed
|
||||
raise pysource.SourceWalkerError(str(e))
|
||||
|
||||
# For compatiblity
|
||||
uncompyle = decompile
|
||||
|
||||
def uncompyle_file(filename, outstream=None, showasm=None, showast=False,
|
||||
def decompile_file(filename, outstream=None, showasm=None, showast=False,
|
||||
showgrammar=False):
|
||||
"""
|
||||
decompile Python byte-code file (.pyc)
|
||||
@@ -62,16 +64,20 @@ def uncompyle_file(filename, outstream=None, showasm=None, showast=False,
|
||||
|
||||
if type(co) == list:
|
||||
for con in co:
|
||||
uncompyle(version, con, outstream, showasm, showast,
|
||||
decompile(version, con, outstream, showasm, showast,
|
||||
timestamp, showgrammar, code_objects=code_objects,
|
||||
is_pypy=is_pypy, magic_int=magic_int)
|
||||
else:
|
||||
uncompyle(version, co, outstream, showasm, showast,
|
||||
decompile(version, co, outstream, showasm, showast,
|
||||
timestamp, showgrammar,
|
||||
code_objects=code_objects, source_size=source_size,
|
||||
is_pypy=is_pypy, magic_int=magic_int)
|
||||
co = None
|
||||
|
||||
# For compatiblity
|
||||
uncompyle_file = decompile_file
|
||||
|
||||
|
||||
# FIXME: combine into an options parameter
|
||||
def main(in_base, out_base, files, codes, outfile=None,
|
||||
showasm=None, showast=False, do_verify=False,
|
||||
@@ -101,12 +107,6 @@ def main(in_base, out_base, files, codes, outfile=None,
|
||||
|
||||
tot_files = okay_files = failed_files = verify_failed_files = 0
|
||||
|
||||
# for code in codes:
|
||||
# version = sys.version[:3] # "2.5"
|
||||
# with open(code, "r") as f:
|
||||
# co = compile(f.read(), "", "exec")
|
||||
# uncompyle(sys.version[:3], co, sys.stdout, showasm=showasm, showast=showast)
|
||||
|
||||
for filename in files:
|
||||
infile = os.path.join(in_base, filename)
|
||||
if not os.path.exists(infile):
|
||||
@@ -126,8 +126,9 @@ def main(in_base, out_base, files, codes, outfile=None,
|
||||
prefix = prefix[:-len('.py')]
|
||||
junk, outfile = tempfile.mkstemp(suffix=".py",
|
||||
prefix=prefix)
|
||||
# Unbuffer output
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
# Unbuffer output if possible
|
||||
buffering = -1 if sys.stdout.isatty() else 0
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', buffering)
|
||||
tee = subprocess.Popen(["tee", outfile], stdin=subprocess.PIPE)
|
||||
os.dup2(tee.stdin.fileno(), sys.stdout.fileno())
|
||||
os.dup2(tee.stdin.fileno(), sys.stderr.fileno())
|
||||
@@ -141,7 +142,7 @@ def main(in_base, out_base, files, codes, outfile=None,
|
||||
|
||||
# Try to uncompile the input file
|
||||
try:
|
||||
uncompyle_file(infile, outstream, showasm, showast, showgrammar)
|
||||
decompile_file(infile, outstream, showasm, showast, showgrammar)
|
||||
tot_files += 1
|
||||
except (ValueError, SyntaxError, ParserError, pysource.SourceWalkerError) as e:
|
||||
sys.stdout.write("\n")
|
||||
@@ -230,11 +231,11 @@ def status_msg(do_verify, tot_files, okay_files, failed_files,
|
||||
verify_failed_files):
|
||||
if tot_files == 1:
|
||||
if failed_files:
|
||||
return "decompile failed"
|
||||
return "\n# decompile failed"
|
||||
elif verify_failed_files:
|
||||
return "decompile verify failed"
|
||||
return "\n# decompile verify failed"
|
||||
else:
|
||||
return "Successfully decompiled file"
|
||||
return "\n# Successfully decompiled file"
|
||||
pass
|
||||
pass
|
||||
mess = "decompiled %i files: %i okay, %i failed" % (tot_files, okay_files, failed_files)
|
||||
|
@@ -73,6 +73,11 @@ class PythonParser(GenericASTBuilder):
|
||||
"""Customized format and print for our kind of tokens
|
||||
which gets called in debugging grammar reduce rules
|
||||
"""
|
||||
def fix(c):
|
||||
s = str(c)
|
||||
i = s.find('_')
|
||||
return s if i == -1 else s[:i]
|
||||
|
||||
prefix = ''
|
||||
if parent and tokens:
|
||||
p_token = tokens[parent]
|
||||
@@ -81,8 +86,11 @@ class PythonParser(GenericASTBuilder):
|
||||
else:
|
||||
prefix = ' '
|
||||
if hasattr(p_token, 'offset'):
|
||||
prefix += "%3s " % str(p_token.offset)
|
||||
prefix += " "
|
||||
prefix += "%3s" % fix(p_token.offset)
|
||||
if len(rule[1]) > 1:
|
||||
prefix += '-%-3s ' % fix(tokens[i-1].offset)
|
||||
else:
|
||||
prefix += ' '
|
||||
else:
|
||||
prefix = ' '
|
||||
|
||||
@@ -264,8 +272,7 @@ class PythonParser(GenericASTBuilder):
|
||||
|
||||
# Zero or more COME_FROMs
|
||||
# loops can have this
|
||||
_come_from ::= _come_from COME_FROM
|
||||
_come_from ::=
|
||||
_come_from ::= COME_FROM*
|
||||
|
||||
# Zero or one COME_FROM
|
||||
# And/or expressions have this
|
||||
|
@@ -28,9 +28,9 @@ class AST(spark_AST):
|
||||
i = 0
|
||||
for node in self:
|
||||
if hasattr(node, '__repr1__'):
|
||||
if enumerate_children:
|
||||
if enumerate_children:
|
||||
child = node.__repr1__(indent, i)
|
||||
else:
|
||||
else:
|
||||
child = node.__repr1__(indent, None)
|
||||
else:
|
||||
inst = node.format(line_prefix='L.')
|
||||
|
@@ -272,7 +272,7 @@ class Python2Parser(PythonParser):
|
||||
continue
|
||||
elif opname_base in ('BUILD_LIST', 'BUILD_TUPLE', 'BUILD_SET'):
|
||||
thousands = (v//1024)
|
||||
thirty32s = ((v//32)%32)
|
||||
thirty32s = ((v//32) % 32)
|
||||
if thirty32s > 0:
|
||||
rule = "expr32 ::=%s" % (' expr' * 32)
|
||||
self.add_unique_rule(rule, opname_base, v, customize)
|
||||
@@ -282,7 +282,7 @@ class Python2Parser(PythonParser):
|
||||
opname_base, v, customize)
|
||||
self.seen1024 = True
|
||||
rule = ('build_list ::= ' + 'expr1024 '*thousands +
|
||||
'expr32 '*thirty32s + 'expr '*(v%32) + opname)
|
||||
'expr32 '*thirty32s + 'expr '*(v % 32) + opname)
|
||||
elif opname == 'LOOKUP_METHOD':
|
||||
# A PyPy speciality - DRY with parse3
|
||||
self.add_unique_rule("load_attr ::= expr LOOKUP_METHOD",
|
||||
|
@@ -13,7 +13,7 @@ class Python25Parser(Python26Parser):
|
||||
self.customized = {}
|
||||
|
||||
def p_misc25(self, args):
|
||||
'''
|
||||
"""
|
||||
# If "return_if_stmt" is in a loop, a JUMP_BACK can be emitted. In 2.6 the
|
||||
# JUMP_BACK doesn't appear
|
||||
|
||||
@@ -33,7 +33,33 @@ class Python25Parser(Python26Parser):
|
||||
|
||||
with_cleanup ::= LOAD_FAST DELETE_FAST WITH_CLEANUP END_FINALLY
|
||||
with_cleanup ::= LOAD_NAME DELETE_NAME WITH_CLEANUP END_FINALLY
|
||||
'''
|
||||
"""
|
||||
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
super(Python25Parser, self).add_custom_rules(tokens, customize)
|
||||
if self.version == 2.5:
|
||||
self.check_reduce['tryelsestmt'] = 'tokens'
|
||||
|
||||
def reduce_is_invalid(self, rule, ast, tokens, first, last):
|
||||
invalid = super(Python25Parser,
|
||||
self).reduce_is_invalid(rule, ast,
|
||||
tokens, first, last)
|
||||
if invalid:
|
||||
return invalid
|
||||
lhs = rule[0]
|
||||
if lhs in ('tryelsestmt', ):
|
||||
# The end of the else part of try/else come_from has to come
|
||||
# from an END_FINALLY statement
|
||||
if tokens[last-1].type.startswith('COME_FROM'):
|
||||
end_finally_offset = int(tokens[last-1].pattr)
|
||||
current = first
|
||||
while current < last:
|
||||
offset = tokens[current].offset
|
||||
if offset == end_finally_offset:
|
||||
return tokens[current].type != 'END_FINALLY'
|
||||
current += 1
|
||||
return False
|
||||
|
||||
|
||||
class Python25ParserSingle(Python26Parser, PythonParserSingle):
|
||||
pass
|
||||
|
@@ -42,7 +42,7 @@ class Python26Parser(Python2Parser):
|
||||
try_middle come_froms
|
||||
|
||||
tryelsestmt ::= SETUP_EXCEPT suite_stmts_opt POP_BLOCK
|
||||
try_middle else_suite come_froms
|
||||
try_middle else_suite COME_FROM
|
||||
|
||||
_ifstmts_jump ::= c_stmts_opt JUMP_FORWARD COME_FROM POP_TOP
|
||||
|
||||
@@ -113,16 +113,10 @@ class Python26Parser(Python2Parser):
|
||||
|
||||
break_stmt ::= BREAK_LOOP JUMP_BACK
|
||||
|
||||
# Semantic actions want the else to be at position 3
|
||||
ifelsestmt ::= testexpr c_stmts_opt jf_cf_pop else_suite come_froms
|
||||
ifelsestmt ::= testexpr c_stmts_opt filler else_suitel come_froms POP_TOP
|
||||
|
||||
# Semantic actions want else_suitel to be at index 3
|
||||
ifelsestmtl ::= testexpr c_stmts_opt jb_cf_pop else_suitel
|
||||
ifelsestmtc ::= testexpr c_stmts_opt ja_cf_pop else_suitec
|
||||
|
||||
iflaststmt ::= testexpr c_stmts_opt JUMP_ABSOLUTE come_froms POP_TOP
|
||||
|
||||
# Semantic actions want suite_stmts_opt to be at index 3
|
||||
withstmt ::= expr setupwith SETUP_FINALLY suite_stmts_opt
|
||||
POP_BLOCK LOAD_CONST COME_FROM WITH_CLEANUP END_FINALLY
|
||||
@@ -159,6 +153,29 @@ class Python26Parser(Python2Parser):
|
||||
|
||||
while1stmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK COME_FROM
|
||||
|
||||
ifstmt ::= testexpr_then _ifstmts_jump
|
||||
|
||||
# Semantic actions want the else to be at position 3
|
||||
ifelsestmt ::= testexpr c_stmts_opt jf_cf_pop else_suite come_froms
|
||||
ifelsestmt ::= testexpr_then c_stmts_opt jf_cf_pop else_suite come_froms
|
||||
ifelsestmt ::= testexpr c_stmts_opt filler else_suitel come_froms POP_TOP
|
||||
ifelsestmt ::= testexpr_then c_stmts_opt filler else_suitel come_froms POP_TOP
|
||||
|
||||
# Semantic actions want else_suitel to be at index 3
|
||||
ifelsestmtl ::= testexpr_then c_stmts_opt jb_cf_pop else_suitel
|
||||
ifelsestmtc ::= testexpr_then c_stmts_opt ja_cf_pop else_suitec
|
||||
|
||||
iflaststmt ::= testexpr_then c_stmts_opt JUMP_ABSOLUTE come_froms POP_TOP
|
||||
iflaststmt ::= testexpr c_stmts_opt JUMP_ABSOLUTE come_froms POP_TOP
|
||||
|
||||
testexpr_then ::= testtrue_then
|
||||
testexpr_then ::= testfalse_then
|
||||
testtrue_then ::= expr jmp_true_then
|
||||
testfalse_then ::= expr jmp_false_then
|
||||
|
||||
jmp_false_then ::= JUMP_IF_FALSE THEN POP_TOP
|
||||
jmp_true_then ::= JUMP_IF_TRUE THEN POP_TOP
|
||||
|
||||
# Common with 2.7
|
||||
while1stmt ::= SETUP_LOOP return_stmts bp_come_from
|
||||
while1stmt ::= SETUP_LOOP return_stmts COME_FROM
|
||||
@@ -177,11 +194,11 @@ class Python26Parser(Python2Parser):
|
||||
|
||||
list_iter ::= list_if JUMP_BACK
|
||||
list_iter ::= list_if JUMP_BACK COME_FROM POP_TOP
|
||||
list_compr ::= BUILD_LIST_0 DUP_TOP
|
||||
designator list_iter del_stmt
|
||||
list_compr ::= BUILD_LIST_0 DUP_TOP
|
||||
designator list_iter JUMP_BACK del_stmt
|
||||
lc_body ::= LOAD_NAME expr LIST_APPEND
|
||||
list_compr ::= BUILD_LIST_0 DUP_TOP
|
||||
designator list_iter del_stmt
|
||||
list_compr ::= BUILD_LIST_0 DUP_TOP
|
||||
designator list_iter JUMP_BACK del_stmt
|
||||
lc_body ::= LOAD_NAME expr LIST_APPEND
|
||||
lc_body ::= LOAD_FAST expr LIST_APPEND
|
||||
|
||||
comp_for ::= SETUP_LOOP expr _for designator comp_iter jb_bp_come_from
|
||||
@@ -196,16 +213,16 @@ class Python26Parser(Python2Parser):
|
||||
genexpr_func ::= setup_loop_lf FOR_ITER designator comp_iter JUMP_BACK come_from_pop
|
||||
jb_bp_come_from
|
||||
genexpr ::= LOAD_GENEXPR MAKE_FUNCTION_0 expr GET_ITER CALL_FUNCTION_1 COME_FROM
|
||||
|
||||
list_if ::= list_if ::= expr jmp_false_then list_iter
|
||||
'''
|
||||
|
||||
def p_ret26(self, args):
|
||||
'''
|
||||
ret_and ::= expr jmp_false ret_expr_or_cond COME_FROM
|
||||
ret_or ::= expr jmp_true ret_expr_or_cond COME_FROM
|
||||
ret_cond ::= expr jmp_false expr RETURN_END_IF POP_TOP ret_expr_or_cond
|
||||
ret_cond ::= expr jmp_false expr ret_expr_or_cond
|
||||
ret_cond_not ::= expr jmp_true expr RETURN_END_IF POP_TOP ret_expr_or_cond
|
||||
ret_and ::= expr jmp_false ret_expr_or_cond COME_FROM
|
||||
ret_or ::= expr jmp_true ret_expr_or_cond COME_FROM
|
||||
ret_cond ::= expr jmp_false_then expr RETURN_END_IF POP_TOP ret_expr_or_cond
|
||||
ret_cond ::= expr jmp_false_then expr ret_expr_or_cond
|
||||
ret_cond_not ::= expr jmp_true_then expr RETURN_END_IF POP_TOP ret_expr_or_cond
|
||||
|
||||
return_if_stmt ::= ret_expr RETURN_END_IF POP_TOP
|
||||
return_stmt ::= ret_expr RETURN_VALUE POP_TOP
|
||||
@@ -215,17 +232,37 @@ class Python26Parser(Python2Parser):
|
||||
'''
|
||||
|
||||
def p_except26(self, args):
|
||||
'''
|
||||
"""
|
||||
except_suite ::= c_stmts_opt jmp_abs POP_TOP
|
||||
'''
|
||||
"""
|
||||
|
||||
def p_misc26(self, args):
|
||||
'''
|
||||
"""
|
||||
conditional ::= expr jmp_false expr jf_cf_pop expr come_from_opt
|
||||
and ::= expr JUMP_IF_FALSE POP_TOP expr JUMP_IF_FALSE POP_TOP
|
||||
cmp_list ::= expr cmp_list1 ROT_TWO COME_FROM POP_TOP _come_from
|
||||
'''
|
||||
|
||||
conditional_lambda ::= expr jmp_false_then return_if_stmt return_stmt LAMBDA_MARKER
|
||||
"""
|
||||
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
super(Python26Parser, self).add_custom_rules(tokens, customize)
|
||||
self.check_reduce['and'] = 'AST'
|
||||
|
||||
def reduce_is_invalid(self, rule, ast, tokens, first, last):
|
||||
invalid = super(Python26Parser,
|
||||
self).reduce_is_invalid(rule, ast,
|
||||
tokens, first, last)
|
||||
if invalid:
|
||||
return invalid
|
||||
if rule == ('and', ('expr', 'jmp_false', 'expr', '\\e_come_from_opt')):
|
||||
# Test that jmp_false jumps to the end of "and"
|
||||
# or that it jumps to the same place as the end of "and"
|
||||
jmp_false = ast[1][0]
|
||||
jmp_target = jmp_false.offset + jmp_false.attr + 3
|
||||
return not (jmp_target == tokens[last].offset or
|
||||
tokens[last].pattr == jmp_false.pattr)
|
||||
return False
|
||||
class Python26ParserSingle(Python2Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
|
@@ -115,8 +115,10 @@ if __name__ == '__main__':
|
||||
""".split()))
|
||||
remain_tokens = set(tokens) - opcode_set
|
||||
import re
|
||||
remain_tokens = set([re.sub('_\d+$','', t) for t in remain_tokens])
|
||||
remain_tokens = set([re.sub('_CONT$','', t) for t in remain_tokens])
|
||||
remain_tokens = set([re.sub('_\d+$', '', t)
|
||||
for t in remain_tokens])
|
||||
remain_tokens = set([re.sub('_CONT$', '', t)
|
||||
for t in remain_tokens])
|
||||
remain_tokens = set(remain_tokens) - opcode_set
|
||||
print(remain_tokens)
|
||||
# p.dumpGrammar()
|
||||
|
@@ -138,15 +138,24 @@ class Python3Parser(PythonParser):
|
||||
iflaststmtl ::= testexpr c_stmts_opt JUMP_BACK
|
||||
iflaststmtl ::= testexpr c_stmts_opt JUMP_BACK COME_FROM_LOOP
|
||||
|
||||
# These are used to keep AST indices the same
|
||||
jf_else ::= JUMP_FORWARD ELSE
|
||||
ja_else ::= JUMP_ABSOLUTE ELSE
|
||||
|
||||
# Note: in if/else kinds of statements, we err on the side
|
||||
# of missing "else" clauses. Therefore we include grammar
|
||||
# rules with and without ELSE.
|
||||
|
||||
ifelsestmt ::= testexpr c_stmts_opt JUMP_FORWARD else_suite COME_FROM
|
||||
ifelsestmt ::= testexpr c_stmts_opt jf_else else_suite _come_from
|
||||
|
||||
ifelsestmtc ::= testexpr c_stmts_opt JUMP_ABSOLUTE else_suitec
|
||||
ifelsestmtc ::= testexpr c_stmts_opt ja_else else_suitec
|
||||
|
||||
ifelsestmtr ::= testexpr return_if_stmts return_stmts
|
||||
|
||||
ifelsestmtl ::= testexpr c_stmts_opt JUMP_BACK else_suitel
|
||||
|
||||
|
||||
# FIXME: this feels like a hack. Is it just 1 or two
|
||||
# COME_FROMs? the parsed tree for this and even with just the
|
||||
# one COME_FROM for Python 2.7 seems to associate the
|
||||
@@ -366,14 +375,17 @@ class Python3Parser(PythonParser):
|
||||
'''
|
||||
|
||||
def p_expr3(self, args):
|
||||
'''
|
||||
"""
|
||||
conditional ::= expr jmp_false expr jf_else expr COME_FROM
|
||||
conditionalnot ::= expr jmp_true expr jf_else expr COME_FROM
|
||||
|
||||
|
||||
expr ::= LOAD_CLASSNAME
|
||||
|
||||
# Python 3.4+
|
||||
expr ::= LOAD_CLASSDEREF
|
||||
|
||||
# Python3 drops slice0..slice3
|
||||
'''
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def call_fn_name(token):
|
||||
@@ -441,6 +453,15 @@ class Python3Parser(PythonParser):
|
||||
('kwarg ' * args_kw) +
|
||||
'expr ' * nak + token.type)
|
||||
self.add_unique_rule(rule, token.type, args_pos, customize)
|
||||
if self.version >= 3.5:
|
||||
rule = ('async_call_function ::= expr ' +
|
||||
('pos_arg ' * args_pos) +
|
||||
('kwarg ' * args_kw) +
|
||||
'expr ' * nak + token.type +
|
||||
' GET_AWAITABLE LOAD_CONST YIELD_FROM')
|
||||
self.add_unique_rule(rule, token.type, args_pos, customize)
|
||||
self.add_unique_rule('expr ::= async_call_function', token.type, args_pos, customize)
|
||||
|
||||
rule = ('classdefdeco2 ::= LOAD_BUILD_CLASS mkfunc %s%s_%d'
|
||||
% (('expr ' * (args_pos-1)), opname, args_pos))
|
||||
self.add_unique_rule(rule, token.type, args_pos, customize)
|
||||
@@ -464,8 +485,15 @@ class Python3Parser(PythonParser):
|
||||
|
||||
# build_class (see load_build_class)
|
||||
|
||||
build_list ::= {expr}^n BUILD_LIST_n
|
||||
build_list ::= {expr}^n BUILD_TUPLE_n
|
||||
# Even the below say _list, in the semantic rules we
|
||||
# disambiguate tuples, and sets from lists
|
||||
|
||||
build_list ::= {expr}^n BUILD_LIST_n
|
||||
build_list ::= {expr}^n BUILD_TUPLE_n
|
||||
build_list ::= {expr}^n BUILD_SET_n
|
||||
build_list ::= {expr}^n BUILD_LIST_UNPACK_n
|
||||
build_list ::= {expr}^n BUILD_SET_UNPACK_n
|
||||
build_list ::= {expr}^n BUILD_TUPLE_UNPACK_n
|
||||
|
||||
load_closure ::= {LOAD_CLOSURE}^n BUILD_TUPLE_n
|
||||
# call_function (see custom_classfunc_rule)
|
||||
@@ -538,7 +566,8 @@ class Python3Parser(PythonParser):
|
||||
elif opname_base in ('BUILD_LIST', 'BUILD_TUPLE', 'BUILD_SET'):
|
||||
v = token.attr
|
||||
rule = ('build_list ::= ' + 'expr1024 ' * int(v//1024) +
|
||||
'expr32 ' * int((v//32)%32) + 'expr '*(v%32) + opname)
|
||||
'expr32 ' * int((v//32) % 32) +
|
||||
'expr ' * (v % 32) + opname)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
if opname_base == 'BUILD_TUPLE':
|
||||
rule = ('load_closure ::= %s%s' % (('LOAD_CLOSURE ' * v), opname))
|
||||
@@ -578,9 +607,10 @@ class Python3Parser(PythonParser):
|
||||
self.add_unique_rule(rule, 'kvlist_n', 1, customize)
|
||||
rule = "mapexpr ::= BUILD_MAP_n kvlist_n"
|
||||
elif self.version >= 3.5:
|
||||
rule = kvlist_n + ' ::= ' + 'expr ' * (token.attr*2)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
rule = "mapexpr ::= %s %s" % (kvlist_n, opname)
|
||||
if opname != 'BUILD_MAP_WITH_CALL':
|
||||
rule = kvlist_n + ' ::= ' + 'expr ' * (token.attr*2)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
rule = "mapexpr ::= %s %s" % (kvlist_n, opname)
|
||||
else:
|
||||
rule = kvlist_n + ' ::= ' + 'expr expr STORE_MAP ' * token.attr
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
@@ -622,6 +652,27 @@ class Python3Parser(PythonParser):
|
||||
rule = ('mkfunc ::= kwargs %sexpr %s' %
|
||||
('pos_arg ' * args_pos, opname))
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
if opname.startswith('MAKE_FUNCTION_A'):
|
||||
# rule = ('mkfunc2 ::= %s%sEXTENDED_ARG %s' %
|
||||
# ('pos_arg ' * (args_pos), 'kwargs ' * (annotate_args-1), opname))
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
if self.version >= 3.3:
|
||||
rule = ('mkfunc_annotate ::= %s%sannotate_tuple LOAD_CONST LOAD_CONST EXTENDED_ARG %s' %
|
||||
(('pos_arg ' * (args_pos)),
|
||||
('call_function ' * (annotate_args-1)), opname))
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
rule = ('mkfunc_annotate ::= %s%sannotate_tuple LOAD_CONST LOAD_CONST EXTENDED_ARG %s' %
|
||||
(('pos_arg ' * (args_pos)),
|
||||
('annotate_arg ' * (annotate_args-1)), opname))
|
||||
else:
|
||||
rule = ('mkfunc_annotate ::= %s%sannotate_tuple LOAD_CONST EXTENDED_ARG %s' %
|
||||
(('pos_arg ' * (args_pos)),
|
||||
('annotate_arg ' * (annotate_args-1)), opname))
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
rule = ('mkfunc_annotate ::= %s%sannotate_tuple LOAD_CONST EXTENDED_ARG %s' %
|
||||
(('pos_arg ' * (args_pos)),
|
||||
('call_function ' * (annotate_args-1)), opname))
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
elif opname_base == 'CALL_METHOD':
|
||||
# PyPy only - DRY with parse2
|
||||
|
||||
@@ -682,12 +733,27 @@ class Python3Parser(PythonParser):
|
||||
self.check_reduce['augassign1'] = 'AST'
|
||||
self.check_reduce['augassign2'] = 'AST'
|
||||
self.check_reduce['while1stmt'] = 'noAST'
|
||||
self.check_reduce['annotate_tuple'] = 'noAST'
|
||||
self.check_reduce['kwarg'] = 'noAST'
|
||||
# FIXME: remove parser errors caused by the below
|
||||
# self.check_reduce['while1elsestmt'] = 'noAST'
|
||||
return
|
||||
|
||||
def reduce_is_invalid(self, rule, ast, tokens, first, last):
|
||||
lhs = rule[0]
|
||||
if lhs in ('augassign1', 'augassign2') and ast[0][0] == 'and':
|
||||
return True
|
||||
elif lhs == 'annotate_tuple':
|
||||
return not isinstance(tokens[first].attr, tuple)
|
||||
elif lhs == 'kwarg':
|
||||
return not isinstance(tokens[first].attr, str)
|
||||
elif lhs == 'while1elsestmt':
|
||||
# if SETUP_LOOP target spans the else part, then this is
|
||||
# not while1else. Also do for whileTrue?
|
||||
last += 1
|
||||
while isinstance(tokens[last].offset, str):
|
||||
last += 1
|
||||
return tokens[first].attr == tokens[last].offset
|
||||
elif lhs == 'while1stmt':
|
||||
if tokens[last] in ('COME_FROM_LOOP', 'JUMP_BACK'):
|
||||
# jump_back should be right afer SETUP_LOOP. Test?
|
||||
|
@@ -15,6 +15,11 @@ class Python30Parser(Python3Parser):
|
||||
stmt ::= store_locals
|
||||
store_locals ::= LOAD_FAST STORE_LOCALS
|
||||
|
||||
# FIXME: combine with parse3.2
|
||||
whileTruestmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK
|
||||
COME_FROM_LOOP
|
||||
whileTruestmt ::= SETUP_LOOP return_stmts
|
||||
COME_FROM_LOOP
|
||||
|
||||
# In many ways Python 3.0 code generation is more like Python 2.6 than
|
||||
# it is 2.7 or 3.1. So we have a number of 2.6ish (and before) rules below
|
||||
|
@@ -36,16 +36,8 @@ class Python31Parser(Python32Parser):
|
||||
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
super(Python31Parser, self).add_custom_rules(tokens, customize)
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.type
|
||||
if opname.startswith('MAKE_FUNCTION_A'):
|
||||
args_pos, args_kw, annotate_args = token.attr
|
||||
# Check that there are 2 annotated params?
|
||||
# rule = ('mkfunc2 ::= %s%sEXTENDED_ARG %s' %
|
||||
# ('pos_arg ' * (args_pos), 'kwargs ' * (annotate_args-1), opname))
|
||||
rule = ('mkfunc_annotate ::= %s%sannotate_tuple LOAD_CONST EXTENDED_ARG %s' %
|
||||
(('pos_arg ' * (args_pos)),
|
||||
('annotate_arg ' * (annotate_args-1)), opname))
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
return
|
||||
pass
|
||||
|
||||
class Python31ParserSingle(Python31Parser, PythonParserSingle):
|
||||
pass
|
||||
|
@@ -19,6 +19,11 @@ class Python32Parser(Python3Parser):
|
||||
COME_FROM_LOOP
|
||||
whileTruestmt ::= SETUP_LOOP return_stmts
|
||||
COME_FROM_LOOP
|
||||
|
||||
# Python 3.2+ has more loop optimization that removes
|
||||
# JUMP_FORWARD in some cases, and hence we also don't
|
||||
# see COME_FROM
|
||||
_ifstmts_jump ::= c_stmts_opt
|
||||
"""
|
||||
pass
|
||||
|
||||
@@ -41,6 +46,9 @@ class Python32Parser(Python3Parser):
|
||||
(('pos_arg ' * (args_pos)),
|
||||
('annotate_arg ' * (annotate_args-1)), opname))
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
pass
|
||||
return
|
||||
pass
|
||||
|
||||
|
||||
class Python32ParserSingle(Python32Parser, PythonParserSingle):
|
||||
|
@@ -21,11 +21,6 @@ class Python33Parser(Python32Parser):
|
||||
iflaststmt ::= testexpr c_stmts_opt33
|
||||
c_stmts_opt33 ::= JUMP_BACK JUMP_ABSOLUTE c_stmts_opt
|
||||
_ifstmts_jump ::= c_stmts_opt JUMP_FORWARD _come_from
|
||||
|
||||
# Python 3.3+ has more loop optimization that removes
|
||||
# JUMP_FORWARD in some cases, and hence we also don't
|
||||
# see COME_FROM
|
||||
_ifstmts_jump ::= c_stmts_opt
|
||||
"""
|
||||
|
||||
class Python33ParserSingle(Python33Parser, PythonParserSingle):
|
||||
|
@@ -16,37 +16,122 @@ class Python35Parser(Python34Parser):
|
||||
|
||||
def p_35on(self, args):
|
||||
"""
|
||||
# The number of canned instructions in new statements is mind boggling.
|
||||
# I'm sure by the time Python 4 comes around these will be turned
|
||||
# into special opcodes
|
||||
|
||||
# Python 3.5+ Await statement
|
||||
stmt ::= await_stmt
|
||||
await_stmt ::= call_function GET_AWAITABLE LOAD_CONST YIELD_FROM POP_TOP
|
||||
|
||||
# Python 3.5+ has WITH_CLEANUP_START/FINISH
|
||||
|
||||
withstmt ::= expr SETUP_WITH exprlist suite_stmts_opt
|
||||
POP_BLOCK LOAD_CONST COME_FROM_WITH
|
||||
WITH_CLEANUP_START WITH_CLEANUP_FINISH END_FINALLY
|
||||
withstmt ::= expr
|
||||
SETUP_WITH exprlist suite_stmts_opt
|
||||
POP_BLOCK LOAD_CONST COME_FROM_WITH
|
||||
WITH_CLEANUP_START WITH_CLEANUP_FINISH END_FINALLY
|
||||
|
||||
withstmt ::= expr SETUP_WITH POP_TOP suite_stmts_opt
|
||||
POP_BLOCK LOAD_CONST COME_FROM_WITH
|
||||
WITH_CLEANUP_START WITH_CLEANUP_FINISH END_FINALLY
|
||||
withstmt ::= expr
|
||||
SETUP_WITH POP_TOP suite_stmts_opt
|
||||
POP_BLOCK LOAD_CONST COME_FROM_WITH
|
||||
WITH_CLEANUP_START WITH_CLEANUP_FINISH END_FINALLY
|
||||
|
||||
withasstmt ::= expr
|
||||
SETUP_WITH designator suite_stmts_opt
|
||||
POP_BLOCK LOAD_CONST COME_FROM_WITH
|
||||
WITH_CLEANUP_START WITH_CLEANUP_FINISH END_FINALLY
|
||||
|
||||
|
||||
# Python 3.5+ async additions
|
||||
|
||||
stmt ::= async_with_stmt
|
||||
async_with_stmt ::= expr
|
||||
BEFORE_ASYNC_WITH GET_AWAITABLE LOAD_CONST YIELD_FROM
|
||||
SETUP_ASYNC_WITH POP_TOP suite_stmts_opt
|
||||
POP_BLOCK LOAD_CONST
|
||||
WITH_CLEANUP_START
|
||||
GET_AWAITABLE LOAD_CONST YIELD_FROM
|
||||
WITH_CLEANUP_FINISH END_FINALLY
|
||||
|
||||
stmt ::= async_with_as_stmt
|
||||
async_with_as_stmt ::= expr
|
||||
BEFORE_ASYNC_WITH GET_AWAITABLE LOAD_CONST YIELD_FROM
|
||||
SETUP_ASYNC_WITH designator suite_stmts_opt
|
||||
POP_BLOCK LOAD_CONST
|
||||
WITH_CLEANUP_START
|
||||
GET_AWAITABLE LOAD_CONST YIELD_FROM
|
||||
WITH_CLEANUP_FINISH END_FINALLY
|
||||
|
||||
|
||||
stmt ::= async_for_stmt
|
||||
async_for_stmt ::= SETUP_LOOP expr
|
||||
GET_AITER
|
||||
LOAD_CONST YIELD_FROM SETUP_EXCEPT GET_ANEXT LOAD_CONST
|
||||
YIELD_FROM
|
||||
designator
|
||||
POP_BLOCK JUMP_FORWARD COME_FROM_EXCEPT DUP_TOP
|
||||
LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_FALSE
|
||||
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_BLOCK
|
||||
JUMP_ABSOLUTE END_FINALLY COME_FROM
|
||||
for_block POP_BLOCK JUMP_ABSOLUTE
|
||||
opt_come_from_loop
|
||||
|
||||
stmt ::= async_forelse_stmt
|
||||
async_forelse_stmt ::= SETUP_LOOP expr
|
||||
GET_AITER
|
||||
LOAD_CONST YIELD_FROM SETUP_EXCEPT GET_ANEXT LOAD_CONST
|
||||
YIELD_FROM
|
||||
designator
|
||||
POP_BLOCK JUMP_FORWARD COME_FROM_EXCEPT DUP_TOP
|
||||
LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_FALSE
|
||||
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_BLOCK
|
||||
JUMP_ABSOLUTE END_FINALLY COME_FROM
|
||||
for_block POP_BLOCK JUMP_ABSOLUTE
|
||||
else_suite COME_FROM_LOOP
|
||||
|
||||
withasstmt ::= expr SETUP_WITH designator suite_stmts_opt
|
||||
POP_BLOCK LOAD_CONST COME_FROM_WITH
|
||||
WITH_CLEANUP_START WITH_CLEANUP_FINISH END_FINALLY
|
||||
|
||||
inplace_op ::= INPLACE_MATRIX_MULTIPLY
|
||||
binary_op ::= BINARY_MATRIX_MULTIPLY
|
||||
|
||||
# Python 3.5+ does jump optimization
|
||||
# In <.3.5 the below is a JUMP_FORWARD to a JUMP_ABSOLUTE.
|
||||
# in return_stmt, we will need the semantic actions in pysource.py
|
||||
# to work out whether to dedent or not based on the presence of
|
||||
# RETURN_END_IF vs RETURN_VALUE
|
||||
|
||||
return_if_stmt ::= ret_expr RETURN_END_IF POP_BLOCK
|
||||
|
||||
ifelsestmtc ::= testexpr c_stmts_opt JUMP_FORWARD else_suitec
|
||||
ifelsestmtc ::= testexpr c_stmts_opt jf_else else_suitec
|
||||
|
||||
# ifstmt ::= testexpr c_stmts_opt
|
||||
|
||||
iflaststmt ::= testexpr c_stmts_opt JUMP_FORWARD
|
||||
|
||||
# Python 3.3+ also has yield from. 3.5 does it
|
||||
# differently than 3.3, 3.4
|
||||
|
||||
yield_from ::= expr GET_YIELD_FROM_ITER LOAD_CONST YIELD_FROM
|
||||
|
||||
_ifstmts_jump ::= c_stmts_opt COME_FROM
|
||||
"""
|
||||
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
super(Python35Parser, self).add_custom_rules(tokens, customize)
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.type
|
||||
if opname == 'BUILD_MAP_UNPACK_WITH_CALL':
|
||||
nargs = token.attr % 256
|
||||
map_unpack_n = "map_unpack_%s" % nargs
|
||||
rule = map_unpack_n + ' ::= ' + 'expr ' * (nargs)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
rule = "unmapexpr ::= %s %s" % (map_unpack_n, opname)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
call_token = tokens[i+1]
|
||||
if self.version == 3.5:
|
||||
rule = 'call_function ::= expr unmapexpr ' + call_token.type
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
pass
|
||||
pass
|
||||
return
|
||||
|
||||
class Python35ParserSingle(Python35Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
|
@@ -18,6 +18,12 @@ class Python36Parser(Python35Parser):
|
||||
"""
|
||||
fstring_multi ::= fstring_expr_or_strs BUILD_STRING
|
||||
fstring_expr_or_strs ::= fstring_expr_or_str+
|
||||
|
||||
func_args36 ::= expr BUILD_TUPLE_0
|
||||
call_function ::= func_args36 unmapexpr CALL_FUNCTION_EX
|
||||
|
||||
withstmt ::= expr SETUP_WITH POP_TOP suite_stmts_opt POP_BLOCK LOAD_CONST
|
||||
WITH_CLEANUP_START WITH_CLEANUP_FINISH END_FINALLY
|
||||
"""
|
||||
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
@@ -46,6 +52,7 @@ class Python36Parser(Python35Parser):
|
||||
""" % (fstring_expr_or_str_n, fstring_expr_or_str_n, "fstring_expr_or_str " * v)
|
||||
self.add_unique_doc_rules(rules_str, customize)
|
||||
|
||||
|
||||
class Python36ParserSingle(Python36Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
@@ -65,8 +72,8 @@ if __name__ == '__main__':
|
||||
""".split()))
|
||||
remain_tokens = set(tokens) - opcode_set
|
||||
import re
|
||||
remain_tokens = set([re.sub('_\d+$','', t) for t in remain_tokens])
|
||||
remain_tokens = set([re.sub('_CONT$','', t) for t in remain_tokens])
|
||||
remain_tokens = set([re.sub('_\d+$', '', t) for t in remain_tokens])
|
||||
remain_tokens = set([re.sub('_CONT$', '', t) for t in remain_tokens])
|
||||
remain_tokens = set(remain_tokens) - opcode_set
|
||||
print(remain_tokens)
|
||||
# print(sorted(p.rule2name.items()))
|
||||
|
@@ -226,9 +226,9 @@ class Scanner(object):
|
||||
for given opcode <op>.
|
||||
"""
|
||||
if op < self.opc.HAVE_ARGUMENT:
|
||||
return 1
|
||||
return 2 if self.version >= 3.6 else 1
|
||||
else:
|
||||
return 3
|
||||
return 2 if self.version >= 3.6 else 3
|
||||
|
||||
def remove_mid_line_ifs(self, ifs):
|
||||
"""
|
||||
|
@@ -23,5 +23,5 @@ class Scanner15(scan.Scanner21):
|
||||
self.opc = opcode_15
|
||||
self.opname = opcode_15.opname
|
||||
self.version = 1.5
|
||||
self.genexpr_name = '<generator expression>';
|
||||
self.genexpr_name = '<generator expression>'
|
||||
return
|
||||
|
@@ -37,7 +37,7 @@ class Scanner2(scan.Scanner):
|
||||
self.jump_forward = frozenset([self.opc.JUMP_ABSOLUTE, self.opc.JUMP_FORWARD])
|
||||
# This is the 2.5+ default
|
||||
# For <2.5 it is <generator expression>
|
||||
self.genexpr_name = '<genexpr>';
|
||||
self.genexpr_name = '<genexpr>'
|
||||
|
||||
@staticmethod
|
||||
def unmangle_name(name, classname):
|
||||
@@ -68,7 +68,6 @@ class Scanner2(scan.Scanner):
|
||||
varnames = co.co_varnames
|
||||
return free, names, varnames
|
||||
|
||||
|
||||
def ingest(self, co, classname=None, code_objects={}, show_asm=None):
|
||||
"""
|
||||
Pick out tokens from an uncompyle6 code object, and transform them,
|
||||
@@ -98,14 +97,14 @@ class Scanner2(scan.Scanner):
|
||||
|
||||
customize = {}
|
||||
if self.is_pypy:
|
||||
customize['PyPy'] = 1;
|
||||
customize['PyPy'] = 1
|
||||
|
||||
Token = self.Token # shortcut
|
||||
|
||||
n = self.setup_code(co)
|
||||
codelen = self.setup_code(co)
|
||||
|
||||
self.build_lines_data(co, n)
|
||||
self.build_prev_op(n)
|
||||
self.build_lines_data(co, codelen)
|
||||
self.build_prev_op(codelen)
|
||||
|
||||
free, names, varnames = self.unmangle_code_names(co, classname)
|
||||
self.names = names
|
||||
@@ -114,7 +113,7 @@ class Scanner2(scan.Scanner):
|
||||
# turn 'LOAD_GLOBAL' to 'LOAD_ASSERT'.
|
||||
# 'LOAD_ASSERT' is used in assert statements.
|
||||
self.load_asserts = set()
|
||||
for i in self.op_range(0, n):
|
||||
for i in self.op_range(0, codelen):
|
||||
# We need to detect the difference between:
|
||||
# raise AssertionError
|
||||
# and
|
||||
@@ -139,7 +138,7 @@ class Scanner2(scan.Scanner):
|
||||
last_stmt = self.next_stmt[0]
|
||||
i = self.next_stmt[last_stmt]
|
||||
replace = {}
|
||||
while i < n-1:
|
||||
while i < codelen - 1:
|
||||
if self.lines[last_stmt].next > i:
|
||||
# Distinguish "print ..." from "print ...,"
|
||||
if self.code[last_stmt] == self.opc.PRINT_ITEM:
|
||||
@@ -151,7 +150,7 @@ class Scanner2(scan.Scanner):
|
||||
i = self.next_stmt[i]
|
||||
|
||||
extended_arg = 0
|
||||
for offset in self.op_range(0, n):
|
||||
for offset in self.op_range(0, codelen):
|
||||
if offset in jump_targets:
|
||||
jump_idx = 0
|
||||
# We want to process COME_FROMs to the same offset to be in *descending*
|
||||
@@ -177,6 +176,7 @@ class Scanner2(scan.Scanner):
|
||||
offset="%s_%d" % (offset, jump_idx),
|
||||
has_arg = True))
|
||||
jump_idx += 1
|
||||
pass
|
||||
|
||||
op = self.code[offset]
|
||||
op_name = self.opc.opname[op]
|
||||
@@ -459,7 +459,6 @@ class Scanner2(scan.Scanner):
|
||||
self.ignore_if.add(except_match)
|
||||
return None
|
||||
|
||||
|
||||
self.ignore_if.add(except_match)
|
||||
self.not_continue.add(jmp)
|
||||
return jmp
|
||||
@@ -479,26 +478,27 @@ class Scanner2(scan.Scanner):
|
||||
elif op in self.setup_ops:
|
||||
count_SETUP_ += 1
|
||||
|
||||
def detect_structure(self, offset, op):
|
||||
def detect_control_flow(self, offset, op):
|
||||
"""
|
||||
Detect type of block structures and their boundaries to fix optimized jumps
|
||||
in python2.3+
|
||||
"""
|
||||
|
||||
# TODO: check the struct boundaries more precisely -Dan
|
||||
|
||||
code = self.code
|
||||
|
||||
# Detect parent structure
|
||||
parent = self.structs[0]
|
||||
start = parent['start']
|
||||
end = parent['end']
|
||||
|
||||
# Pick inner-most parent for our offset
|
||||
for struct in self.structs:
|
||||
_start = struct['start']
|
||||
_end = struct['end']
|
||||
if (_start <= offset < _end) and (_start >= start and _end <= end):
|
||||
start = _start
|
||||
end = _end
|
||||
current_start = struct['start']
|
||||
current_end = struct['end']
|
||||
if ((current_start <= offset < current_end)
|
||||
and (current_start >= start and current_end <= end)):
|
||||
start = current_start
|
||||
end = current_end
|
||||
parent = struct
|
||||
|
||||
if op == self.opc.SETUP_LOOP:
|
||||
@@ -677,6 +677,8 @@ class Scanner2(scan.Scanner):
|
||||
self.fixed_jumps[offset] = rtarget
|
||||
return
|
||||
|
||||
jump_if_offset = offset
|
||||
|
||||
start = offset+3
|
||||
pre = self.prev
|
||||
|
||||
@@ -699,6 +701,10 @@ class Scanner2(scan.Scanner):
|
||||
'end': pre[target]})
|
||||
return
|
||||
|
||||
# The op offset just before the target jump offset is important
|
||||
# in making a determination of what we have. Save that.
|
||||
pre_rtarget = pre[rtarget]
|
||||
|
||||
# Is it an "and" inside an "if" or "while" block
|
||||
if op == self.opc.PJIF:
|
||||
|
||||
@@ -709,22 +715,22 @@ class Scanner2(scan.Scanner):
|
||||
|
||||
# If we still have any offsets in set, start working on it
|
||||
if match:
|
||||
if code[pre[rtarget]] in self.jump_forward \
|
||||
and pre[rtarget] not in self.stmts \
|
||||
and self.restrict_to_parent(self.get_target(pre[rtarget]), parent) == rtarget:
|
||||
if code[pre[pre[rtarget]]] == self.opc.JUMP_ABSOLUTE \
|
||||
if code[pre_rtarget] in self.jump_forward \
|
||||
and pre_rtarget not in self.stmts \
|
||||
and self.restrict_to_parent(self.get_target(pre_rtarget), parent) == rtarget:
|
||||
if code[pre[pre_rtarget]] == self.opc.JUMP_ABSOLUTE \
|
||||
and self.remove_mid_line_ifs([offset]) \
|
||||
and target == self.get_target(pre[pre[rtarget]]) \
|
||||
and (pre[pre[rtarget]] not in self.stmts or self.get_target(pre[pre[rtarget]]) > pre[pre[rtarget]])\
|
||||
and 1 == len(self.remove_mid_line_ifs(self.rem_or(start, pre[pre[rtarget]], self.pop_jump_if, target))):
|
||||
and target == self.get_target(pre[pre_rtarget]) \
|
||||
and (pre[pre_rtarget] not in self.stmts or self.get_target(pre[pre_rtarget]) > pre[pre_rtarget])\
|
||||
and 1 == len(self.remove_mid_line_ifs(self.rem_or(start, pre[pre_rtarget], self.pop_jump_if, target))):
|
||||
pass
|
||||
elif code[pre[pre[rtarget]]] == self.opc.RETURN_VALUE \
|
||||
elif code[pre[pre_rtarget]] == self.opc.RETURN_VALUE \
|
||||
and self.remove_mid_line_ifs([offset]) \
|
||||
and 1 == (len(set(self.remove_mid_line_ifs(self.rem_or(start,
|
||||
pre[pre[rtarget]],
|
||||
pre[pre_rtarget],
|
||||
self.pop_jump_if, target)))
|
||||
| set(self.remove_mid_line_ifs(self.rem_or(start, pre[pre[rtarget]],
|
||||
(self.opc.PJIF, self.opc.PJIT, self.opc.JUMP_ABSOLUTE), pre[rtarget], True))))):
|
||||
| set(self.remove_mid_line_ifs(self.rem_or(start, pre[pre_rtarget],
|
||||
(self.opc.PJIF, self.opc.PJIT, self.opc.JUMP_ABSOLUTE), pre_rtarget, True))))):
|
||||
pass
|
||||
else:
|
||||
fix = None
|
||||
@@ -742,7 +748,7 @@ class Scanner2(scan.Scanner):
|
||||
else:
|
||||
if (self.version < 2.7
|
||||
and parent['type'] in ('root', 'for-loop', 'if-then',
|
||||
'if-else', 'try')):
|
||||
'else', 'try')):
|
||||
self.fixed_jumps[offset] = rtarget
|
||||
else:
|
||||
# note test for < 2.7 might be superflous although informative
|
||||
@@ -757,7 +763,7 @@ class Scanner2(scan.Scanner):
|
||||
else:
|
||||
assert_offset = offset + 3
|
||||
if (assert_offset) in self.load_asserts:
|
||||
if code[pre[rtarget]] == self.opc.RAISE_VARARGS:
|
||||
if code[pre_rtarget] == self.opc.RAISE_VARARGS:
|
||||
return
|
||||
self.load_asserts.remove(assert_offset)
|
||||
|
||||
@@ -766,7 +772,7 @@ class Scanner2(scan.Scanner):
|
||||
pass
|
||||
elif code[next] in self.jump_forward and target == self.get_target(next):
|
||||
if code[pre[next]] == self.opc.PJIF:
|
||||
if code[next] == self.opc.JUMP_FORWARD or target != rtarget or code[pre[pre[rtarget]]] not in (self.opc.JUMP_ABSOLUTE, self.opc.RETURN_VALUE):
|
||||
if code[next] == self.opc.JUMP_FORWARD or target != rtarget or code[pre[pre_rtarget]] not in (self.opc.JUMP_ABSOLUTE, self.opc.RETURN_VALUE):
|
||||
self.fixed_jumps[offset] = pre[next]
|
||||
return
|
||||
elif code[next] == self.opc.JUMP_ABSOLUTE and code[target] in self.jump_forward:
|
||||
@@ -783,20 +789,32 @@ class Scanner2(scan.Scanner):
|
||||
return
|
||||
|
||||
if self.version == 2.7:
|
||||
if code[pre[rtarget]] == self.opc.JUMP_ABSOLUTE and pre[rtarget] in self.stmts \
|
||||
and pre[rtarget] != offset and pre[pre[rtarget]] != offset:
|
||||
if code[pre_rtarget] == self.opc.JUMP_ABSOLUTE and pre_rtarget in self.stmts \
|
||||
and pre_rtarget != offset and pre[pre_rtarget] != offset:
|
||||
if code[rtarget] == self.opc.JUMP_ABSOLUTE and code[rtarget+3] == self.opc.POP_BLOCK:
|
||||
if code[pre[pre[rtarget]]] != self.opc.JUMP_ABSOLUTE:
|
||||
if code[pre[pre_rtarget]] != self.opc.JUMP_ABSOLUTE:
|
||||
pass
|
||||
elif self.get_target(pre[pre[rtarget]]) != target:
|
||||
elif self.get_target(pre[pre_rtarget]) != target:
|
||||
pass
|
||||
else:
|
||||
rtarget = pre[rtarget]
|
||||
rtarget = pre_rtarget
|
||||
else:
|
||||
rtarget = pre[rtarget]
|
||||
rtarget = pre_rtarget
|
||||
|
||||
# Does the "if" jump just beyond a jump op, then this is probably an if statement
|
||||
pre_rtarget = pre[rtarget]
|
||||
# Does the "jump if" jump beyond a jump op?
|
||||
# That is, we have something like:
|
||||
# POP_JUMP_IF_FALSE HERE
|
||||
# ...
|
||||
# JUMP_FORWARD
|
||||
# HERE:
|
||||
#
|
||||
# If so, this can be block inside an "if" statement
|
||||
# or a conditional assignment like:
|
||||
# x = 1 if x else 2
|
||||
#
|
||||
# There are other contexts we may need to consider
|
||||
# like whether the target is "END_FINALLY"
|
||||
# or if the condition jump is to a forward location
|
||||
code_pre_rtarget = code[pre_rtarget]
|
||||
|
||||
if code_pre_rtarget in self.jump_forward:
|
||||
@@ -816,21 +834,87 @@ class Scanner2(scan.Scanner):
|
||||
jump_target = self.get_target(next_offset, next_op)
|
||||
if jump_target in self.setup_loops:
|
||||
self.structs.append({'type': 'while-loop',
|
||||
'start': start - 3,
|
||||
'start': jump_if_offset,
|
||||
'end': jump_target})
|
||||
self.fixed_jumps[start-3] = jump_target
|
||||
self.fixed_jumps[jump_if_offset] = jump_target
|
||||
return
|
||||
|
||||
end = self.restrict_to_parent(if_end, parent)
|
||||
|
||||
self.structs.append({'type': 'if-then',
|
||||
'start': start-3,
|
||||
'end': pre_rtarget})
|
||||
if_then_maybe = None
|
||||
|
||||
if 2.2 <= self.version <= 2.6:
|
||||
# Take the JUMP_IF target. In an "if/then", it will be
|
||||
# a POP_TOP instruction and the instruction before it
|
||||
# will be a JUMP_FORWARD to just after the POP_TOP.
|
||||
# For example:
|
||||
# Good:
|
||||
# 3 JUMP_IF_FALSE 33 'to 39'
|
||||
# ..
|
||||
# 36 JUMP_FORWARD 1 'to 40'
|
||||
# 39 POP_TOP
|
||||
# 40 ...
|
||||
# example:
|
||||
|
||||
# BAD (is an "and"):
|
||||
# 28 JUMP_IF_FALSE 4 'to 35'
|
||||
# ...
|
||||
# 32 JUMP_ABSOLUTE 40 'to 40' # should be 36 or there should
|
||||
# # be a COME_FROM at the pop top
|
||||
# # before 40 to 35
|
||||
# 35 POP_TOP
|
||||
# 36 ...
|
||||
# 39 POP_TOP
|
||||
# 39_0 COME_FROM 3
|
||||
# 40 ...
|
||||
|
||||
if self.opc.opname[code[jump_if_offset]].startswith('JUMP_IF'):
|
||||
jump_if_target = code[jump_if_offset+1]
|
||||
if self.opc.opname[code[jump_if_target + jump_if_offset + 3]] == 'POP_TOP':
|
||||
jump_inst = jump_if_target + jump_if_offset
|
||||
jump_offset = code[jump_inst+1]
|
||||
jump_op = self.opc.opname[code[jump_inst]]
|
||||
if (jump_op == 'JUMP_FORWARD' and jump_offset == 1):
|
||||
self.structs.append({'type': 'if-then',
|
||||
'start': start-3,
|
||||
'end': pre_rtarget})
|
||||
|
||||
self.thens[start] = end
|
||||
elif jump_op == 'JUMP_ABSOLUTE':
|
||||
if_then_maybe = {'type': 'if-then',
|
||||
'start': start-3,
|
||||
'end': pre_rtarget}
|
||||
|
||||
elif self.version == 2.7:
|
||||
self.structs.append({'type': 'if-then',
|
||||
'start': start-3,
|
||||
'end': pre_rtarget})
|
||||
|
||||
self.not_continue.add(pre_rtarget)
|
||||
|
||||
if rtarget < end:
|
||||
self.structs.append({'type': 'if-else',
|
||||
# We have an "else" block of some kind.
|
||||
# Is it associated with "if_then_maybe" seen above?
|
||||
# These will be linked in this funny way:
|
||||
|
||||
# 198 JUMP_IF_FALSE 18 'to 219'
|
||||
# 201 POP_TOP
|
||||
# ...
|
||||
# 216 JUMP_ABSOLUTE 256 'to 256'
|
||||
# 219 POP_TOP
|
||||
# ...
|
||||
# 252 JUMP_FORWARD 1 'to 256'
|
||||
# 255 POP_TOP
|
||||
# 256
|
||||
if if_then_maybe and jump_op == 'JUMP_ABSOLUTE':
|
||||
jump_target = self.get_target(jump_inst, code[jump_inst])
|
||||
if self.opc.opname[code[end]] == 'JUMP_FORWARD':
|
||||
end_target = self.get_target(end, code[end])
|
||||
if jump_target == end_target:
|
||||
self.structs.append(if_then_maybe)
|
||||
self.thens[start] = end
|
||||
|
||||
self.structs.append({'type': 'else',
|
||||
'start': rtarget,
|
||||
'end': end})
|
||||
elif code_pre_rtarget == self.opc.RETURN_VALUE:
|
||||
@@ -838,6 +922,7 @@ class Scanner2(scan.Scanner):
|
||||
self.structs.append({'type': 'if-then',
|
||||
'start': start,
|
||||
'end': rtarget})
|
||||
self.thens[start] = rtarget
|
||||
if self.version == 2.7 or code[pre_rtarget+1] != self.opc.JUMP_FORWARD:
|
||||
self.return_end_ifs.add(pre_rtarget)
|
||||
|
||||
@@ -871,11 +956,12 @@ class Scanner2(scan.Scanner):
|
||||
self.ignore_if = set()
|
||||
self.build_statement_indices()
|
||||
|
||||
# Containers filled by detect_structure()
|
||||
# Containers filled by detect_control_flow()
|
||||
self.not_continue = set()
|
||||
self.return_end_ifs = set()
|
||||
self.setup_loop_targets = {} # target given setup_loop offset
|
||||
self.setup_loops = {} # setup_loop offset given target
|
||||
self.thens = {} # JUMP_IF's that separate the 'then' part of an 'if'
|
||||
|
||||
targets = {}
|
||||
for offset in self.op_range(0, n):
|
||||
@@ -883,7 +969,7 @@ class Scanner2(scan.Scanner):
|
||||
|
||||
# Determine structures and fix jumps in Python versions
|
||||
# since 2.3
|
||||
self.detect_structure(offset, op)
|
||||
self.detect_control_flow(offset, op)
|
||||
|
||||
if op_has_argument(op, self.opc):
|
||||
label = self.fixed_jumps.get(offset)
|
||||
@@ -891,8 +977,8 @@ class Scanner2(scan.Scanner):
|
||||
|
||||
if label is None:
|
||||
if op in self.opc.hasjrel and self.opc.opname[op] != 'FOR_ITER':
|
||||
# if (op in self.opc.hasjrel and
|
||||
# (self.version < 2.0 or op != self.opc.FOR_ITER)):
|
||||
# if (op in self.opc.hasjrel and
|
||||
# (self.version < 2.0 or op != self.opc.FOR_ITER)):
|
||||
label = offset + 3 + oparg
|
||||
elif self.version == 2.7 and op in self.opc.hasjabs:
|
||||
if op in (self.opc.JUMP_IF_FALSE_OR_POP,
|
||||
|
@@ -23,5 +23,5 @@ class Scanner21(scan.Scanner22):
|
||||
self.opc = opcode_21
|
||||
self.opname = opcode_21.opname
|
||||
self.version = 2.1
|
||||
self.genexpr_name = '<generator expression>';
|
||||
self.genexpr_name = '<generator expression>'
|
||||
return
|
||||
|
@@ -23,7 +23,7 @@ class Scanner22(scan.Scanner23):
|
||||
self.opc = opcode_22
|
||||
self.opname = opcode_22.opname
|
||||
self.version = 2.2
|
||||
self.genexpr_name = '<generator expression>';
|
||||
self.genexpr_name = '<generator expression>'
|
||||
self.parent_ingest = self.ingest
|
||||
self.ingest = self.ingest22
|
||||
return
|
||||
|
@@ -2,9 +2,8 @@
|
||||
"""
|
||||
Python 2.3 bytecode scanner/deparser
|
||||
|
||||
This overlaps Python's 2.3's dis module, but it can be run from
|
||||
Python 3 and other versions of Python. Also, we save token
|
||||
information for later use in deparsing.
|
||||
This massages tokenized 2.3 bytecode to make it more amenable for
|
||||
grammar parsing.
|
||||
"""
|
||||
|
||||
import uncompyle6.scanners.scanner24 as scan
|
||||
@@ -13,12 +12,12 @@ import uncompyle6.scanners.scanner24 as scan
|
||||
from xdis.opcodes import opcode_23
|
||||
JUMP_OPs = opcode_23.JUMP_OPs
|
||||
|
||||
# We base this off of 2.5 instead of the other way around
|
||||
# We base this off of 2.4 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
# The history is that 2.7 support is the cleanest,
|
||||
# then from that we got 2.6 and so on.
|
||||
class Scanner23(scan.Scanner24):
|
||||
def __init__(self, show_asm):
|
||||
def __init__(self, show_asm=False):
|
||||
scan.Scanner24.__init__(self, show_asm)
|
||||
self.opc = opcode_23
|
||||
self.opname = opcode_23.opname
|
||||
|
@@ -2,9 +2,8 @@
|
||||
"""
|
||||
Python 2.4 bytecode scanner/deparser
|
||||
|
||||
This overlaps Python's 2.4's dis module, but it can be run from
|
||||
Python 3 and other versions of Python. Also, we save token
|
||||
information for later use in deparsing.
|
||||
This massages tokenized 2.7 bytecode to make it more amenable for
|
||||
grammar parsing.
|
||||
"""
|
||||
|
||||
import uncompyle6.scanners.scanner25 as scan
|
||||
@@ -18,7 +17,7 @@ JUMP_OPs = opcode_24.JUMP_OPs
|
||||
# The history is that 2.7 support is the cleanest,
|
||||
# then from that we got 2.6 and so on.
|
||||
class Scanner24(scan.Scanner25):
|
||||
def __init__(self, show_asm):
|
||||
def __init__(self, show_asm=False):
|
||||
scan.Scanner25.__init__(self, show_asm)
|
||||
# These are the only differences in initialization between
|
||||
# 2.4, 2.5 and 2.6
|
||||
|
@@ -18,7 +18,7 @@ JUMP_OPs = opcode_25.JUMP_OPs
|
||||
# The history is that 2.7 support is the cleanest,
|
||||
# then from that we got 2.6 and so on.
|
||||
class Scanner25(scan.Scanner26):
|
||||
def __init__(self, show_asm):
|
||||
def __init__(self, show_asm=False):
|
||||
# There are no differences in initialization between
|
||||
# 2.5 and 2.6
|
||||
self.opc = opcode_25
|
||||
|
@@ -94,35 +94,32 @@ class Scanner26(scan.Scanner2):
|
||||
for instr in bytecode.get_instructions(co):
|
||||
print(instr._disassemble())
|
||||
|
||||
# from xdis.bytecode import Bytecode
|
||||
# bytecode = Bytecode(co, self.opc)
|
||||
# for instr in bytecode.get_instructions(co):
|
||||
# print(instr._disassemble())
|
||||
|
||||
# Container for tokens
|
||||
tokens = []
|
||||
|
||||
customize = {}
|
||||
if self.is_pypy:
|
||||
customize['PyPy'] = 1
|
||||
|
||||
Token = self.Token # shortcut
|
||||
|
||||
n = self.setup_code(co)
|
||||
codelen = self.setup_code(co)
|
||||
|
||||
self.build_lines_data(co, n)
|
||||
self.build_prev_op(n)
|
||||
self.build_lines_data(co, codelen)
|
||||
self.build_prev_op(codelen)
|
||||
|
||||
free, names, varnames = self.unmangle_code_names(co, classname)
|
||||
self.names = names
|
||||
|
||||
codelen = len(self.code)
|
||||
|
||||
# Scan for assertions. Later we will
|
||||
# turn 'LOAD_GLOBAL' to 'LOAD_ASSERT'.
|
||||
# 'LOAD_ASSERT' is used in assert statements.
|
||||
self.load_asserts = set()
|
||||
for i in self.op_range(0, n):
|
||||
# We need to detect the difference between
|
||||
# "raise AssertionError" and
|
||||
# "assert"
|
||||
for i in self.op_range(0, codelen):
|
||||
# We need to detect the difference between:
|
||||
# raise AssertionError
|
||||
# and
|
||||
# assert ...
|
||||
if (self.code[i] == self.opc.JUMP_IF_TRUE and
|
||||
i + 4 < codelen and
|
||||
self.code[i+3] == self.opc.POP_TOP and
|
||||
@@ -167,6 +164,11 @@ class Scanner26(scan.Scanner2):
|
||||
offset="%s_%d" % (offset, jump_idx),
|
||||
has_arg = True))
|
||||
jump_idx += 1
|
||||
elif offset in self.thens:
|
||||
tokens.append(Token(
|
||||
'THEN', None, self.thens[offset],
|
||||
offset="%s_0" % offset,
|
||||
has_arg = True))
|
||||
|
||||
has_arg = (op >= self.opc.HAVE_ARGUMENT)
|
||||
if has_arg:
|
||||
|
@@ -148,7 +148,7 @@ class Scanner3(Scanner):
|
||||
"""
|
||||
|
||||
show_asm = self.show_asm if not show_asm else show_asm
|
||||
# show_asm = 'after'
|
||||
# show_asm = 'both'
|
||||
if show_asm in ('both', 'before'):
|
||||
bytecode = Bytecode(co, self.opc)
|
||||
for instr in bytecode.get_instructions(co):
|
||||
@@ -159,7 +159,7 @@ class Scanner3(Scanner):
|
||||
|
||||
customize = {}
|
||||
if self.is_pypy:
|
||||
customize['PyPy'] = 1;
|
||||
customize['PyPy'] = 1
|
||||
|
||||
self.code = array('B', co.co_code)
|
||||
self.build_lines_data(co)
|
||||
@@ -199,6 +199,7 @@ class Scanner3(Scanner):
|
||||
# Get jump targets
|
||||
# Format: {target offset: [jump offsets]}
|
||||
jump_targets = self.find_jump_targets(show_asm)
|
||||
last_op_was_break = False
|
||||
|
||||
for inst in bytecode:
|
||||
|
||||
@@ -225,6 +226,14 @@ class Scanner3(Scanner):
|
||||
jump_idx += 1
|
||||
pass
|
||||
pass
|
||||
elif inst.offset in self.else_start:
|
||||
end_offset = self.else_start[inst.offset]
|
||||
tokens.append(Token('ELSE',
|
||||
None, repr(end_offset),
|
||||
offset='%s' % (inst.offset),
|
||||
has_arg = True, opc=self.opc))
|
||||
|
||||
pass
|
||||
|
||||
pattr = inst.argrepr
|
||||
opname = inst.opname
|
||||
@@ -313,11 +322,15 @@ class Scanner3(Scanner):
|
||||
if target <= inst.offset:
|
||||
next_opname = self.opname[self.code[inst.offset+3]]
|
||||
if (inst.offset in self.stmts and
|
||||
next_opname not in ('END_FINALLY', 'POP_BLOCK',
|
||||
(self.version != 3.0 or (hasattr(inst, 'linestart'))) and
|
||||
(next_opname not in ('END_FINALLY', 'POP_BLOCK',
|
||||
# Python 3.0 only uses POP_TOP
|
||||
'POP_TOP')
|
||||
and inst.offset not in self.not_continue):
|
||||
opname = 'CONTINUE'
|
||||
'POP_TOP'))):
|
||||
if (self.version >= 3.5 or
|
||||
(inst.offset not in self.not_continue) or
|
||||
(tokens[-1].type == 'RETURN_VALUE')):
|
||||
opname = 'CONTINUE'
|
||||
pass
|
||||
else:
|
||||
opname = 'JUMP_BACK'
|
||||
# FIXME: this is a hack to catch stuff like:
|
||||
@@ -326,15 +339,21 @@ class Scanner3(Scanner):
|
||||
# There are other situations where we don't catch
|
||||
# CONTINUE as well.
|
||||
if tokens[-1].type == 'JUMP_BACK' and tokens[-1].attr <= argval:
|
||||
# intern is used because we are changing the *previous* token
|
||||
tokens[-1].type = intern('CONTINUE')
|
||||
|
||||
if tokens[-2].type == 'BREAK_LOOP':
|
||||
del tokens[-1]
|
||||
else:
|
||||
# intern is used because we are changing the *previous* token
|
||||
tokens[-1].type = intern('CONTINUE')
|
||||
if last_op_was_break and opname == 'CONTINUE':
|
||||
last_op_was_break = False
|
||||
continue
|
||||
elif op == self.opc.RETURN_VALUE:
|
||||
if inst.offset in self.return_end_ifs:
|
||||
opname = 'RETURN_END_IF'
|
||||
elif inst.offset in self.load_asserts:
|
||||
opname = 'LOAD_ASSERT'
|
||||
|
||||
last_op_was_break = opname == 'BREAK_LOOP'
|
||||
tokens.append(
|
||||
Token(
|
||||
type_ = opname,
|
||||
@@ -424,8 +443,9 @@ class Scanner3(Scanner):
|
||||
self.fixed_jumps = {}
|
||||
self.ignore_if = set()
|
||||
self.build_statement_indices()
|
||||
self.else_start = {}
|
||||
|
||||
# Containers filled by detect_structure()
|
||||
# Containers filled by detect_control_flow()
|
||||
self.not_continue = set()
|
||||
self.return_end_ifs = set()
|
||||
self.setup_loop_targets = {} # target given setup_loop offset
|
||||
@@ -437,16 +457,20 @@ class Scanner3(Scanner):
|
||||
|
||||
# Determine structures and fix jumps in Python versions
|
||||
# since 2.3
|
||||
self.detect_structure(offset, targets)
|
||||
self.detect_control_flow(offset, targets)
|
||||
|
||||
has_arg = (op >= op3.HAVE_ARGUMENT)
|
||||
if has_arg:
|
||||
label = self.fixed_jumps.get(offset)
|
||||
oparg = code[offset+1] + code[offset+2] * 256
|
||||
if self.version >= 3.6:
|
||||
oparg = code[offset+1]
|
||||
else:
|
||||
oparg = code[offset+1] + code[offset+2] * 256
|
||||
next_offset = offset + self.op_size(op)
|
||||
|
||||
if label is None:
|
||||
if op in op3.hasjrel and op != self.opc.FOR_ITER:
|
||||
label = offset + 3 + oparg
|
||||
label = next_offset + oparg
|
||||
elif op in op3.hasjabs:
|
||||
if op in self.jump_if_pop:
|
||||
if oparg > offset:
|
||||
@@ -476,7 +500,7 @@ class Scanner3(Scanner):
|
||||
prelim = self.all_instr(start, end, self.statement_opcodes)
|
||||
|
||||
# Initialize final container with statements with
|
||||
# preliminnary data
|
||||
# preliminary data
|
||||
stmts = self.stmts = set(prelim)
|
||||
|
||||
# Same for opcode sequences
|
||||
@@ -553,12 +577,18 @@ class Scanner3(Scanner):
|
||||
Get target offset for op located at given <offset>.
|
||||
"""
|
||||
op = self.code[offset]
|
||||
target = self.code[offset+1] + self.code[offset+2] * 256
|
||||
if op in op3.hasjrel:
|
||||
target += offset + 3
|
||||
if self.version >= 3.6:
|
||||
target = self.code[offset+1]
|
||||
if op in self.opc.hasjrel:
|
||||
target += offset + 2
|
||||
else:
|
||||
target = self.code[offset+1] + self.code[offset+2] * 256
|
||||
if op in self.opc.hasjrel:
|
||||
target += offset + 3
|
||||
|
||||
return target
|
||||
|
||||
def detect_structure(self, offset, targets):
|
||||
def detect_control_flow(self, offset, targets):
|
||||
"""
|
||||
Detect structures and their boundaries to fix optimized jumps
|
||||
in python2.3+
|
||||
@@ -585,13 +615,15 @@ class Scanner3(Scanner):
|
||||
parent = struct
|
||||
|
||||
if op == self.opc.SETUP_LOOP:
|
||||
|
||||
# We categorize loop types: 'for', 'while', 'while 1' with
|
||||
# possibly suffixes '-loop' and '-else'
|
||||
# Try to find the jump_back instruction of the loop.
|
||||
# It could be a return instruction.
|
||||
|
||||
start = offset+3
|
||||
if self.version <= 3.5:
|
||||
start = offset+3
|
||||
else:
|
||||
start = offset+2
|
||||
target = self.get_target(offset)
|
||||
end = self.restrict_to_parent(target, parent)
|
||||
self.setup_loop_targets[offset] = target
|
||||
@@ -601,22 +633,35 @@ class Scanner3(Scanner):
|
||||
self.fixed_jumps[offset] = end
|
||||
(line_no, next_line_byte) = self.lines[offset]
|
||||
jump_back = self.last_instr(start, end, self.opc.JUMP_ABSOLUTE,
|
||||
next_line_byte, False)
|
||||
next_line_byte, False)
|
||||
|
||||
if jump_back and jump_back != self.prev_op[end] and self.is_jump_forward(jump_back+3):
|
||||
if (code[self.prev_op[end]] == self.opc.RETURN_VALUE
|
||||
or (code[self.prev_op[end]] == self.opc.POP_BLOCK
|
||||
and code[self.prev_op[self.prev_op[end]]] == self.opc.RETURN_VALUE)):
|
||||
if jump_back:
|
||||
jump_forward_offset = jump_back+3
|
||||
else:
|
||||
jump_forward_offset = None
|
||||
|
||||
return_val_offset1 = self.prev[self.prev[end]]
|
||||
|
||||
if (jump_back and jump_back != self.prev_op[end]
|
||||
and self.is_jump_forward(jump_forward_offset)):
|
||||
if (code[self.prev_op[end]] == self.opc.RETURN_VALUE or
|
||||
(code[self.prev_op[end]] == self.opc.POP_BLOCK
|
||||
and code[return_val_offset1] == self.opc.RETURN_VALUE)):
|
||||
jump_back = None
|
||||
if not jump_back: # loop suite ends in return. wtf right?
|
||||
jump_back = self.last_instr(start, end, self.opc.RETURN_VALUE) + 1
|
||||
if not jump_back:
|
||||
jump_back = self.last_instr(start, end, self.opc.RETURN_VALUE)
|
||||
if not jump_back:
|
||||
return
|
||||
|
||||
jump_back += 2
|
||||
if_offset = None
|
||||
if code[self.prev_op[next_line_byte]] not in POP_JUMP_TF:
|
||||
loop_type = 'for'
|
||||
else:
|
||||
if_offset = self.prev[next_line_byte]
|
||||
if if_offset:
|
||||
loop_type = 'while'
|
||||
self.ignore_if.add(self.prev_op[next_line_byte])
|
||||
self.ignore_if.add(if_offset)
|
||||
else:
|
||||
loop_type = 'for'
|
||||
target = next_line_byte
|
||||
end = jump_back + 3
|
||||
else:
|
||||
@@ -630,6 +675,7 @@ class Scanner3(Scanner):
|
||||
elif target < offset:
|
||||
self.fixed_jumps[offset] = jump_back+4
|
||||
end = jump_back+4
|
||||
|
||||
target = self.get_target(jump_back)
|
||||
|
||||
if code[target] in (self.opc.FOR_ITER, self.opc.GET_ITER):
|
||||
@@ -637,6 +683,7 @@ class Scanner3(Scanner):
|
||||
else:
|
||||
loop_type = 'while'
|
||||
test = self.prev_op[next_line_byte]
|
||||
|
||||
if test == offset:
|
||||
loop_type = 'while 1'
|
||||
elif self.code[test] in op3.hasjabs+op3.hasjrel:
|
||||
@@ -677,38 +724,40 @@ class Scanner3(Scanner):
|
||||
'end': prev_op[target]})
|
||||
return
|
||||
|
||||
# Is it an "and" inside an "if" block
|
||||
# The op offset just before the target jump offset is important
|
||||
# in making a determination of what we have. Save that.
|
||||
pre_rtarget = prev_op[rtarget]
|
||||
|
||||
# Is it an "and" inside an "if" or "while" block
|
||||
if op == self.opc.POP_JUMP_IF_FALSE:
|
||||
|
||||
# Search for another POP_JUMP_IF_FALSE targetting the same op,
|
||||
# in current statement, starting from current offset, and filter
|
||||
# everything inside inner 'or' jumps and midline ifs
|
||||
match = self.rem_or(start, self.next_stmt[offset],
|
||||
self.opc.POP_JUMP_IF_FALSE, target)
|
||||
## We can't remove mid-line ifs because line structures have changed
|
||||
## from restructBytecode().
|
||||
## match = self.remove_mid_line_ifs(match)
|
||||
|
||||
# If we still have any offsets in set, start working on it
|
||||
if match:
|
||||
is_jump_forward = self.is_jump_forward(prev_op[rtarget])
|
||||
if (is_jump_forward and prev_op[rtarget] not in self.stmts and
|
||||
self.restrict_to_parent(self.get_target(prev_op[rtarget]), parent) == rtarget):
|
||||
if (code[prev_op[prev_op[rtarget]]] == self.opc.JUMP_ABSOLUTE
|
||||
is_jump_forward = self.is_jump_forward(pre_rtarget)
|
||||
if (is_jump_forward and pre_rtarget not in self.stmts and
|
||||
self.restrict_to_parent(self.get_target(pre_rtarget), parent) == rtarget):
|
||||
if (code[prev_op[pre_rtarget]] == self.opc.JUMP_ABSOLUTE
|
||||
and self.remove_mid_line_ifs([offset]) and
|
||||
target == self.get_target(prev_op[prev_op[rtarget]]) and
|
||||
(prev_op[prev_op[rtarget]] not in self.stmts or
|
||||
self.get_target(prev_op[prev_op[rtarget]]) > prev_op[prev_op[rtarget]]) and
|
||||
1 == len(self.remove_mid_line_ifs(self.rem_or(start, prev_op[prev_op[rtarget]], POP_JUMP_TF, target)))):
|
||||
target == self.get_target(prev_op[pre_rtarget]) and
|
||||
(prev_op[pre_rtarget] not in self.stmts or
|
||||
self.get_target(prev_op[pre_rtarget]) > prev_op[pre_rtarget]) and
|
||||
1 == len(self.remove_mid_line_ifs(self.rem_or(start, prev_op[pre_rtarget], POP_JUMP_TF, target)))):
|
||||
pass
|
||||
elif (code[prev_op[prev_op[rtarget]]] == self.opc.RETURN_VALUE
|
||||
elif (code[prev_op[pre_rtarget]] == self.opc.RETURN_VALUE
|
||||
and self.remove_mid_line_ifs([offset]) and
|
||||
1 == (len(set(self.remove_mid_line_ifs(self.rem_or(start, prev_op[prev_op[rtarget]],
|
||||
1 == (len(set(self.remove_mid_line_ifs(self.rem_or(start, prev_op[pre_rtarget],
|
||||
POP_JUMP_TF, target))) |
|
||||
set(self.remove_mid_line_ifs(self.rem_or(start, prev_op[prev_op[rtarget]],
|
||||
set(self.remove_mid_line_ifs(self.rem_or(start, prev_op[pre_rtarget],
|
||||
(self.opc.POP_JUMP_IF_FALSE,
|
||||
self.opc.POP_JUMP_IF_TRUE,
|
||||
self.opc.JUMP_ABSOLUTE),
|
||||
prev_op[rtarget], True)))))):
|
||||
pre_rtarget, True)))))):
|
||||
pass
|
||||
else:
|
||||
fix = None
|
||||
@@ -736,7 +785,7 @@ class Scanner3(Scanner):
|
||||
if code[prev_op[next]] == self.opc.POP_JUMP_IF_FALSE:
|
||||
if (code[next] == self.opc.JUMP_FORWARD
|
||||
or target != rtarget
|
||||
or code[prev_op[prev_op[rtarget]]] not in
|
||||
or code[prev_op[pre_rtarget]] not in
|
||||
(self.opc.JUMP_ABSOLUTE, self.opc.RETURN_VALUE)):
|
||||
self.fixed_jumps[offset] = prev_op[next]
|
||||
return
|
||||
@@ -749,38 +798,62 @@ class Scanner3(Scanner):
|
||||
if offset in self.ignore_if:
|
||||
return
|
||||
|
||||
if (code[prev_op[rtarget]] == self.opc.JUMP_ABSOLUTE and
|
||||
prev_op[rtarget] in self.stmts and
|
||||
prev_op[rtarget] != offset and
|
||||
prev_op[prev_op[rtarget]] != offset and
|
||||
if (code[pre_rtarget] == self.opc.JUMP_ABSOLUTE and
|
||||
pre_rtarget in self.stmts and
|
||||
pre_rtarget != offset and
|
||||
prev_op[pre_rtarget] != offset and
|
||||
not (code[rtarget] == self.opc.JUMP_ABSOLUTE and
|
||||
code[rtarget+3] == self.opc.POP_BLOCK and
|
||||
code[prev_op[prev_op[rtarget]]] != self.opc.JUMP_ABSOLUTE)):
|
||||
rtarget = prev_op[rtarget]
|
||||
code[prev_op[pre_rtarget]] != self.opc.JUMP_ABSOLUTE)):
|
||||
rtarget = pre_rtarget
|
||||
|
||||
# Does the "if" jump just beyond a jump op, then this can be
|
||||
# a block inside an "if" statement
|
||||
if self.is_jump_forward(prev_op[rtarget]):
|
||||
if_end = self.get_target(prev_op[rtarget])
|
||||
# Does the "jump if" jump beyond a jump op?
|
||||
# That is, we have something like:
|
||||
# POP_JUMP_IF_FALSE HERE
|
||||
# ...
|
||||
# JUMP_FORWARD
|
||||
# HERE:
|
||||
#
|
||||
# If so, this can be block inside an "if" statement
|
||||
# or a conditional assignment like:
|
||||
# x = 1 if x else 2
|
||||
#
|
||||
# There are other contexts we may need to consider
|
||||
# like whether the target is "END_FINALLY"
|
||||
# or if the condition jump is to a forward location
|
||||
if self.is_jump_forward(pre_rtarget):
|
||||
if_end = self.get_target(pre_rtarget)
|
||||
|
||||
# Is this a loop and not an "if" statement?
|
||||
if ((if_end < prev_op[rtarget]) and
|
||||
# If the jump target is back, we are looping
|
||||
if (if_end < pre_rtarget and
|
||||
(code[prev_op[if_end]] == self.opc.SETUP_LOOP)):
|
||||
if(if_end > start):
|
||||
if (if_end > start):
|
||||
return
|
||||
|
||||
end = self.restrict_to_parent(if_end, parent)
|
||||
|
||||
self.structs.append({'type': 'if-then',
|
||||
'start': start,
|
||||
'end': prev_op[rtarget]})
|
||||
self.not_continue.add(prev_op[rtarget])
|
||||
'end': pre_rtarget})
|
||||
self.not_continue.add(pre_rtarget)
|
||||
|
||||
if rtarget < end:
|
||||
self.structs.append({'type': 'if-else',
|
||||
if rtarget < end and (
|
||||
code[rtarget] not in (self.opc.END_FINALLY,
|
||||
self.opc.JUMP_ABSOLUTE) and
|
||||
code[prev_op[pre_rtarget]] not in (self.opc.POP_EXCEPT,
|
||||
self.opc.END_FINALLY)):
|
||||
self.structs.append({'type': 'else',
|
||||
'start': rtarget,
|
||||
'end': end})
|
||||
elif code[prev_op[rtarget]] == self.opc.RETURN_VALUE:
|
||||
self.else_start[rtarget] = end
|
||||
elif self.is_jump_back(pre_rtarget):
|
||||
if_end = rtarget
|
||||
self.structs.append({'type': 'if-then',
|
||||
'start': start,
|
||||
'end': pre_rtarget})
|
||||
self.not_continue.add(pre_rtarget)
|
||||
elif code[pre_rtarget] in (self.opc.RETURN_VALUE,
|
||||
self.opc.BREAK_LOOP):
|
||||
self.structs.append({'type': 'if-then',
|
||||
'start': start,
|
||||
'end': rtarget})
|
||||
@@ -810,8 +883,21 @@ class Scanner3(Scanner):
|
||||
return
|
||||
pass
|
||||
pass
|
||||
self.return_end_ifs.add(prev_op[rtarget])
|
||||
if code[pre_rtarget] == self.opc.RETURN_VALUE:
|
||||
self.return_end_ifs.add(pre_rtarget)
|
||||
else:
|
||||
self.fixed_jumps[offset] = rtarget
|
||||
self.not_continue.add(pre_rtarget)
|
||||
|
||||
|
||||
elif op == self.opc.SETUP_EXCEPT:
|
||||
target = self.get_target(offset)
|
||||
end = self.restrict_to_parent(target, parent)
|
||||
self.fixed_jumps[offset] = end
|
||||
elif op == self.opc.SETUP_FINALLY:
|
||||
target = self.get_target(offset)
|
||||
end = self.restrict_to_parent(target, parent)
|
||||
self.fixed_jumps[offset] = end
|
||||
elif op in self.jump_if_pop:
|
||||
target = self.get_target(offset)
|
||||
if target > offset:
|
||||
@@ -849,6 +935,16 @@ class Scanner3(Scanner):
|
||||
pass
|
||||
return
|
||||
|
||||
def is_jump_back(self, offset):
|
||||
"""
|
||||
Return True if the code at offset is some sort of jump back.
|
||||
That is, it is ether "JUMP_FORWARD" or an absolute jump that
|
||||
goes forward.
|
||||
"""
|
||||
if self.code[offset] != self.opc.JUMP_ABSOLUTE:
|
||||
return False
|
||||
return offset > self.get_target(offset)
|
||||
|
||||
def next_except_jump(self, start):
|
||||
"""
|
||||
Return the next jump that was generated by an except SomeException:
|
||||
@@ -869,7 +965,9 @@ class Scanner3(Scanner):
|
||||
op = self.code[i]
|
||||
if op == self.opc.END_FINALLY:
|
||||
if count_END_FINALLY == count_SETUP_:
|
||||
assert self.code[self.prev_op[i]] in (JUMP_ABSOLUTE, JUMP_FORWARD, RETURN_VALUE)
|
||||
assert self.code[self.prev_op[i]] in (JUMP_ABSOLUTE,
|
||||
JUMP_FORWARD,
|
||||
RETURN_VALUE)
|
||||
self.not_continue.add(self.prev_op[i])
|
||||
return self.prev_op[i]
|
||||
count_END_FINALLY += 1
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2016, 2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 3.0 bytecode scanner/deparser
|
||||
|
||||
@@ -12,6 +12,8 @@ from __future__ import print_function
|
||||
from xdis.opcodes import opcode_30 as opc
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
|
||||
JUMP_TF = frozenset([opc.JUMP_IF_FALSE, opc.JUMP_IF_TRUE])
|
||||
|
||||
from uncompyle6.scanners.scanner3 import Scanner3
|
||||
class Scanner30(Scanner3):
|
||||
|
||||
@@ -20,6 +22,373 @@ class Scanner30(Scanner3):
|
||||
return
|
||||
pass
|
||||
|
||||
def detect_control_flow(self, offset, targets):
|
||||
"""
|
||||
Detect structures and their boundaries to fix optimized jumps
|
||||
Python 3.0 is more like Python 2.6 than it is Python 3.x.
|
||||
So we have a special routine here.
|
||||
"""
|
||||
|
||||
code = self.code
|
||||
op = code[offset]
|
||||
|
||||
# Detect parent structure
|
||||
parent = self.structs[0]
|
||||
start = parent['start']
|
||||
end = parent['end']
|
||||
|
||||
# Pick inner-most parent for our offset
|
||||
for struct in self.structs:
|
||||
current_start = struct['start']
|
||||
current_end = struct['end']
|
||||
if ((current_start <= offset < current_end)
|
||||
and (current_start >= start and current_end <= end)):
|
||||
start = current_start
|
||||
end = current_end
|
||||
parent = struct
|
||||
|
||||
if op == self.opc.SETUP_LOOP:
|
||||
# We categorize loop types: 'for', 'while', 'while 1' with
|
||||
# possibly suffixes '-loop' and '-else'
|
||||
# Try to find the jump_back instruction of the loop.
|
||||
# It could be a return instruction.
|
||||
|
||||
start = offset+3
|
||||
target = self.get_target(offset)
|
||||
end = self.restrict_to_parent(target, parent)
|
||||
self.setup_loop_targets[offset] = target
|
||||
self.setup_loops[target] = offset
|
||||
|
||||
if target != end:
|
||||
self.fixed_jumps[offset] = end
|
||||
|
||||
(line_no, next_line_byte) = self.lines[offset]
|
||||
jump_back = self.last_instr(start, end, self.opc.JUMP_ABSOLUTE,
|
||||
next_line_byte, False)
|
||||
|
||||
if jump_back:
|
||||
jump_forward_offset = jump_back+3
|
||||
else:
|
||||
jump_forward_offset = None
|
||||
|
||||
return_val_offset1 = self.prev[self.prev[end]]
|
||||
|
||||
if (jump_back and jump_back != self.prev_op[end]
|
||||
and self.is_jump_forward(jump_forward_offset)):
|
||||
if (code[self.prev_op[end]] == self.opc.RETURN_VALUE or
|
||||
(code[self.prev_op[end]] == self.opc.POP_BLOCK
|
||||
and code[return_val_offset1] == self.opc.RETURN_VALUE)):
|
||||
jump_back = None
|
||||
if not jump_back:
|
||||
# loop suite ends in return
|
||||
jump_back = self.last_instr(start, end, self.opc.RETURN_VALUE)
|
||||
if not jump_back:
|
||||
return
|
||||
|
||||
jump_back += 2
|
||||
if_offset = None
|
||||
if code[self.prev_op[next_line_byte]] not in JUMP_TF:
|
||||
if_offset = self.prev[next_line_byte]
|
||||
if if_offset:
|
||||
loop_type = 'while'
|
||||
self.ignore_if.add(if_offset)
|
||||
else:
|
||||
loop_type = 'for'
|
||||
target = next_line_byte
|
||||
end = jump_back + 3
|
||||
else:
|
||||
if self.get_target(jump_back) >= next_line_byte:
|
||||
jump_back = self.last_instr(start, end, self.opc.JUMP_ABSOLUTE, start, False)
|
||||
if end > jump_back+4 and self.is_jump_forward(end):
|
||||
if self.is_jump_forward(jump_back+4):
|
||||
if self.get_target(jump_back+4) == self.get_target(end):
|
||||
self.fixed_jumps[offset] = jump_back+4
|
||||
end = jump_back+4
|
||||
elif target < offset:
|
||||
self.fixed_jumps[offset] = jump_back+4
|
||||
end = jump_back+4
|
||||
|
||||
target = self.get_target(jump_back)
|
||||
|
||||
if code[target] in (self.opc.FOR_ITER, self.opc.GET_ITER):
|
||||
loop_type = 'for'
|
||||
else:
|
||||
loop_type = 'while'
|
||||
test = self.prev_op[next_line_byte]
|
||||
|
||||
if test == offset:
|
||||
loop_type = 'while 1'
|
||||
elif self.code[test] in opc.hasjabs+opc.hasjrel:
|
||||
self.ignore_if.add(test)
|
||||
test_target = self.get_target(test)
|
||||
if test_target > (jump_back+3):
|
||||
jump_back = test_target
|
||||
self.not_continue.add(jump_back)
|
||||
self.loops.append(target)
|
||||
self.structs.append({'type': loop_type + '-loop',
|
||||
'start': target,
|
||||
'end': jump_back})
|
||||
if jump_back+3 != end:
|
||||
self.structs.append({'type': loop_type + '-else',
|
||||
'start': jump_back+3,
|
||||
'end': end})
|
||||
elif op in JUMP_TF:
|
||||
start = offset + self.op_size(op)
|
||||
target = self.get_target(offset)
|
||||
rtarget = self.restrict_to_parent(target, parent)
|
||||
prev_op = self.prev_op
|
||||
|
||||
# Do not let jump to go out of parent struct bounds
|
||||
if target != rtarget and parent['type'] == 'and/or':
|
||||
self.fixed_jumps[offset] = rtarget
|
||||
return
|
||||
|
||||
# Does this jump to right after another conditional jump that is
|
||||
# not myself? If so, it's part of a larger conditional.
|
||||
# rocky: if we have a conditional jump to the next instruction, then
|
||||
# possibly I am "skipping over" a "pass" or null statement.
|
||||
|
||||
if ((code[prev_op[target]] in self.pop_jump_if_pop) and
|
||||
(target > offset) and prev_op[target] != offset):
|
||||
self.fixed_jumps[offset] = prev_op[target]
|
||||
self.structs.append({'type': 'and/or',
|
||||
'start': start,
|
||||
'end': prev_op[target]})
|
||||
return
|
||||
|
||||
# The op offset just before the target jump offset is important
|
||||
# in making a determination of what we have. Save that.
|
||||
pre_rtarget = prev_op[rtarget]
|
||||
|
||||
# Is it an "and" inside an "if" or "while" block
|
||||
if op == opc.JUMP_IF_FALSE:
|
||||
|
||||
# Search for another JUMP_IF_FALSE targetting the same op,
|
||||
# in current statement, starting from current offset, and filter
|
||||
# everything inside inner 'or' jumps and midline ifs
|
||||
match = self.rem_or(start, self.next_stmt[offset],
|
||||
opc.JUMP_IF_FALSE, target)
|
||||
|
||||
# If we still have any offsets in set, start working on it
|
||||
if match:
|
||||
is_jump_forward = self.is_jump_forward(pre_rtarget)
|
||||
if (is_jump_forward and pre_rtarget not in self.stmts and
|
||||
self.restrict_to_parent(self.get_target(pre_rtarget), parent) == rtarget):
|
||||
if (code[prev_op[pre_rtarget]] == self.opc.JUMP_ABSOLUTE
|
||||
and self.remove_mid_line_ifs([offset]) and
|
||||
target == self.get_target(prev_op[pre_rtarget]) and
|
||||
(prev_op[pre_rtarget] not in self.stmts or
|
||||
self.get_target(prev_op[pre_rtarget]) > prev_op[pre_rtarget]) and
|
||||
1 == len(self.remove_mid_line_ifs(self.rem_or(start, prev_op[pre_rtarget], JUMP_TF, target)))):
|
||||
pass
|
||||
elif (code[prev_op[pre_rtarget]] == self.opc.RETURN_VALUE
|
||||
and self.remove_mid_line_ifs([offset]) and
|
||||
1 == (len(set(self.remove_mid_line_ifs(self.rem_or(start, prev_op[pre_rtarget],
|
||||
JUMP_TF, target))) |
|
||||
set(self.remove_mid_line_ifs(self.rem_or(start, prev_op[pre_rtarget],
|
||||
(opc.JUMP_IF_FALSE,
|
||||
opc.JUMP_IF_TRUE,
|
||||
opc.JUMP_ABSOLUTE),
|
||||
pre_rtarget, True)))))):
|
||||
pass
|
||||
else:
|
||||
fix = None
|
||||
jump_ifs = self.all_instr(start, self.next_stmt[offset],
|
||||
opc.JUMP_IF_FALSE)
|
||||
last_jump_good = True
|
||||
for j in jump_ifs:
|
||||
if target == self.get_target(j):
|
||||
if self.lines[j].next == j + 3 and last_jump_good:
|
||||
fix = j
|
||||
break
|
||||
else:
|
||||
last_jump_good = False
|
||||
self.fixed_jumps[offset] = fix or match[-1]
|
||||
return
|
||||
else:
|
||||
self.fixed_jumps[offset] = match[-1]
|
||||
return
|
||||
# op == JUMP_IF_TRUE
|
||||
else:
|
||||
next = self.next_stmt[offset]
|
||||
if prev_op[next] == offset:
|
||||
pass
|
||||
elif self.is_jump_forward(next) and target == self.get_target(next):
|
||||
if code[prev_op[next]] == opc.JUMP_IF_FALSE:
|
||||
if (code[next] == self.opc.JUMP_FORWARD
|
||||
or target != rtarget
|
||||
or code[prev_op[pre_rtarget]] not in
|
||||
(self.opc.JUMP_ABSOLUTE, self.opc.RETURN_VALUE)):
|
||||
self.fixed_jumps[offset] = prev_op[next]
|
||||
return
|
||||
elif (code[next] == self.opc.JUMP_ABSOLUTE and self.is_jump_forward(target) and
|
||||
self.get_target(target) == self.get_target(next)):
|
||||
self.fixed_jumps[offset] = prev_op[next]
|
||||
return
|
||||
|
||||
# Don't add a struct for a while test, it's already taken care of
|
||||
if offset in self.ignore_if:
|
||||
return
|
||||
|
||||
if (code[pre_rtarget] == self.opc.JUMP_ABSOLUTE and
|
||||
pre_rtarget in self.stmts and
|
||||
pre_rtarget != offset and
|
||||
prev_op[pre_rtarget] != offset and
|
||||
not (code[rtarget] == self.opc.JUMP_ABSOLUTE and
|
||||
code[rtarget+3] == self.opc.POP_BLOCK and
|
||||
code[prev_op[pre_rtarget]] != self.opc.JUMP_ABSOLUTE)):
|
||||
rtarget = pre_rtarget
|
||||
|
||||
# Does the "jump if" jump beyond a jump op?
|
||||
# That is, we have something like:
|
||||
# JUMP_IF_FALSE HERE
|
||||
# ...
|
||||
# JUMP_FORWARD
|
||||
# HERE:
|
||||
#
|
||||
# If so, this can be block inside an "if" statement
|
||||
# or a conditional assignment like:
|
||||
# x = 1 if x else 2
|
||||
#
|
||||
# There are other contexts we may need to consider
|
||||
# like whether the target is "END_FINALLY"
|
||||
# or if the condition jump is to a forward location
|
||||
if self.is_jump_forward(pre_rtarget):
|
||||
if_end = self.get_target(pre_rtarget)
|
||||
|
||||
# If the jump target is back, we are looping
|
||||
if (if_end < pre_rtarget and
|
||||
(code[prev_op[if_end]] == self.opc.SETUP_LOOP)):
|
||||
if (if_end > start):
|
||||
return
|
||||
|
||||
end = self.restrict_to_parent(if_end, parent)
|
||||
|
||||
self.structs.append({'type': 'if-then',
|
||||
'start': start,
|
||||
'end': pre_rtarget})
|
||||
self.not_continue.add(pre_rtarget)
|
||||
|
||||
# if rtarget < end and (
|
||||
# code[rtarget] not in (self.opc.END_FINALLY,
|
||||
# self.opc.JUMP_ABSOLUTE) and
|
||||
# code[prev_op[pre_rtarget]] not in (self.opc.POP_EXCEPT,
|
||||
# self.opc.END_FINALLY)):
|
||||
# self.structs.append({'type': 'else',
|
||||
# 'start': rtarget,
|
||||
# 'end': end})
|
||||
# self.else_start[rtarget] = end
|
||||
elif self.is_jump_back(pre_rtarget):
|
||||
if_end = rtarget
|
||||
self.structs.append({'type': 'if-then',
|
||||
'start': start,
|
||||
'end': pre_rtarget})
|
||||
self.not_continue.add(pre_rtarget)
|
||||
elif code[pre_rtarget] in (self.opc.RETURN_VALUE,
|
||||
self.opc.BREAK_LOOP):
|
||||
self.structs.append({'type': 'if-then',
|
||||
'start': start,
|
||||
'end': rtarget})
|
||||
# It is important to distingish if this return is inside some sort
|
||||
# except block return
|
||||
jump_prev = prev_op[offset]
|
||||
if self.is_pypy and code[jump_prev] == self.opc.COMPARE_OP:
|
||||
if self.opc.cmp_op[code[jump_prev+1]] == 'exception match':
|
||||
return
|
||||
if self.version >= 3.5:
|
||||
# Python 3.5 may remove as dead code a JUMP
|
||||
# instruction after a RETURN_VALUE. So we check
|
||||
# based on seeing SETUP_EXCEPT various places.
|
||||
if code[rtarget] == self.opc.SETUP_EXCEPT:
|
||||
return
|
||||
# Check that next instruction after pops and jump is
|
||||
# not from SETUP_EXCEPT
|
||||
next_op = rtarget
|
||||
if code[next_op] == self.opc.POP_BLOCK:
|
||||
next_op += self.op_size(self.code[next_op])
|
||||
if code[next_op] == self.opc.JUMP_ABSOLUTE:
|
||||
next_op += self.op_size(self.code[next_op])
|
||||
if next_op in targets:
|
||||
for try_op in targets[next_op]:
|
||||
come_from_op = code[try_op]
|
||||
if come_from_op == self.opc.SETUP_EXCEPT:
|
||||
return
|
||||
pass
|
||||
pass
|
||||
if code[pre_rtarget] == self.opc.RETURN_VALUE:
|
||||
self.return_end_ifs.add(pre_rtarget)
|
||||
else:
|
||||
self.fixed_jumps[offset] = rtarget
|
||||
self.not_continue.add(pre_rtarget)
|
||||
|
||||
|
||||
elif op == self.opc.SETUP_EXCEPT:
|
||||
target = self.get_target(offset)
|
||||
end = self.restrict_to_parent(target, parent)
|
||||
self.fixed_jumps[offset] = end
|
||||
elif op == self.opc.SETUP_FINALLY:
|
||||
target = self.get_target(offset)
|
||||
end = self.restrict_to_parent(target, parent)
|
||||
self.fixed_jumps[offset] = end
|
||||
elif op in self.jump_if_pop:
|
||||
target = self.get_target(offset)
|
||||
if target > offset:
|
||||
unop_target = self.last_instr(offset, target, self.opc.JUMP_FORWARD, target)
|
||||
if unop_target and code[unop_target+3] != self.opc.ROT_TWO:
|
||||
self.fixed_jumps[offset] = unop_target
|
||||
else:
|
||||
self.fixed_jumps[offset] = self.restrict_to_parent(target, parent)
|
||||
pass
|
||||
pass
|
||||
elif self.version >= 3.5:
|
||||
# 3.5+ has Jump optimization which too often causes RETURN_VALUE to get
|
||||
# misclassified as RETURN_END_IF. Handle that here.
|
||||
# In RETURN_VALUE, JUMP_ABSOLUTE, RETURN_VALUE is never RETURN_END_IF
|
||||
if op == self.opc.RETURN_VALUE:
|
||||
if (offset+1 < len(code) and code[offset+1] == self.opc.JUMP_ABSOLUTE and
|
||||
offset in self.return_end_ifs):
|
||||
self.return_end_ifs.remove(offset)
|
||||
pass
|
||||
pass
|
||||
elif op == self.opc.JUMP_FORWARD:
|
||||
# If we have:
|
||||
# JUMP_FORWARD x, [non-jump, insns], RETURN_VALUE, x:
|
||||
# then RETURN_VALUE is not RETURN_END_IF
|
||||
rtarget = self.get_target(offset)
|
||||
rtarget_prev = self.prev[rtarget]
|
||||
if (code[rtarget_prev] == self.opc.RETURN_VALUE and
|
||||
rtarget_prev in self.return_end_ifs):
|
||||
i = rtarget_prev
|
||||
while i != offset:
|
||||
if code[i] in [opc.JUMP_FORWARD, opc.JUMP_ABSOLUTE]:
|
||||
return
|
||||
i = self.prev[i]
|
||||
self.return_end_ifs.remove(rtarget_prev)
|
||||
pass
|
||||
return
|
||||
|
||||
def rem_or(self, start, end, instr, target=None, include_beyond_target=False):
|
||||
"""
|
||||
Find offsets of all requested <instr> between <start> and <end>,
|
||||
optionally <target>ing specified offset, and return list found
|
||||
<instr> offsets which are not within any POP_JUMP_IF_TRUE jumps.
|
||||
"""
|
||||
assert(start>=0 and end<=len(self.code) and start <= end)
|
||||
|
||||
# Find all offsets of requested instructions
|
||||
instr_offsets = self.all_instr(start, end, instr, target, include_beyond_target)
|
||||
# Get all JUMP_IF_TRUE (or) offsets
|
||||
pjit_offsets = self.all_instr(start, end, opc.JUMP_IF_TRUE)
|
||||
filtered = []
|
||||
for pjit_offset in pjit_offsets:
|
||||
pjit_tgt = self.get_target(pjit_offset) - 3
|
||||
for instr_offset in instr_offsets:
|
||||
if instr_offset <= pjit_offset or instr_offset >= pjit_tgt:
|
||||
filtered.append(instr_offset)
|
||||
instr_offsets = filtered
|
||||
filtered = []
|
||||
return instr_offsets
|
||||
|
||||
if __name__ == "__main__":
|
||||
from uncompyle6 import PYTHON_VERSION
|
||||
if PYTHON_VERSION == 3.0:
|
||||
|
@@ -66,7 +66,8 @@ class Token:
|
||||
pattr = self.pattr
|
||||
if self.opc:
|
||||
if self.op in self.opc.hasjrel:
|
||||
pattr = "to " + self.pattr
|
||||
if not self.pattr.startswith('to '):
|
||||
pattr = "to " + self.pattr
|
||||
elif self.op in self.opc.hasjabs:
|
||||
self.pattr= str(self.pattr)
|
||||
if not self.pattr.startswith('to '):
|
||||
|
347
uncompyle6/semantics/consts.py
Normal file
347
uncompyle6/semantics/consts.py
Normal file
@@ -0,0 +1,347 @@
|
||||
# Copyright (c) 2017 by Rocky Bernstein
|
||||
"""Constants used in pysource.py"""
|
||||
|
||||
import re, sys
|
||||
from uncompyle6.parsers.astnode import AST
|
||||
from uncompyle6 import PYTHON3
|
||||
from uncompyle6.scanners.tok import Token, NoneToken
|
||||
|
||||
if PYTHON3:
|
||||
minint = -sys.maxsize-1
|
||||
maxint = sys.maxsize
|
||||
else:
|
||||
minint = -sys.maxint-1
|
||||
maxint = sys.maxint
|
||||
|
||||
|
||||
LINE_LENGTH = 80
|
||||
|
||||
# Some ASTs used for comparing code fragments (like 'return None' at
|
||||
# the end of functions).
|
||||
|
||||
RETURN_LOCALS = AST('return_stmt',
|
||||
[ AST('ret_expr', [AST('expr', [ Token('LOAD_LOCALS') ])]),
|
||||
Token('RETURN_VALUE')])
|
||||
|
||||
NONE = AST('expr', [ NoneToken ] )
|
||||
|
||||
RETURN_NONE = AST('stmt',
|
||||
[ AST('return_stmt',
|
||||
[ NONE, Token('RETURN_VALUE')]) ])
|
||||
|
||||
PASS = AST('stmts',
|
||||
[ AST('sstmt',
|
||||
[ AST('stmt',
|
||||
[ AST('passstmt', [])])])])
|
||||
|
||||
ASSIGN_DOC_STRING = lambda doc_string: \
|
||||
AST('stmt',
|
||||
[ AST('assign',
|
||||
[ AST('expr', [ Token('LOAD_CONST', pattr=doc_string) ]),
|
||||
AST('designator', [ Token('STORE_NAME', pattr='__doc__')])
|
||||
])])
|
||||
|
||||
NAME_MODULE = AST('stmt',
|
||||
[ AST('assign',
|
||||
[ AST('expr',
|
||||
[Token('LOAD_NAME', pattr='__name__', offset=0, has_arg=True)]),
|
||||
AST('designator',
|
||||
[ Token('STORE_NAME', pattr='__module__', offset=3, has_arg=True)])
|
||||
])])
|
||||
|
||||
# God intended \t, but Python has decided to use 4 spaces.
|
||||
# If you want real tabs, use Go.
|
||||
# TAB = '\t'
|
||||
TAB = ' ' * 4
|
||||
INDENT_PER_LEVEL = ' ' # additional intent per pretty-print level
|
||||
|
||||
TABLE_R = {
|
||||
'STORE_ATTR': ( '%c.%[1]{pattr}', 0),
|
||||
# 'STORE_SUBSCR': ( '%c[%c]', 0, 1 ),
|
||||
'DELETE_ATTR': ( '%|del %c.%[-1]{pattr}\n', 0 ),
|
||||
# 'EXEC_STMT': ( '%|exec %c in %[1]C\n', 0, (0,maxint,', ') ),
|
||||
}
|
||||
|
||||
TABLE_R0 = {
|
||||
# 'BUILD_LIST': ( '[%C]', (0,-1,', ') ),
|
||||
# 'BUILD_TUPLE': ( '(%C)', (0,-1,', ') ),
|
||||
# 'CALL_FUNCTION': ( '%c(%P)', 0, (1,-1,', ') ),
|
||||
}
|
||||
TABLE_DIRECT = {
|
||||
'BINARY_ADD': ( '+' ,),
|
||||
'BINARY_SUBTRACT': ( '-' ,),
|
||||
'BINARY_MULTIPLY': ( '*' ,),
|
||||
'BINARY_DIVIDE': ( '/' ,),
|
||||
'BINARY_MATRIX_MULTIPLY': ( '@' ,),
|
||||
'BINARY_TRUE_DIVIDE': ( '/' ,),
|
||||
'BINARY_FLOOR_DIVIDE': ( '//' ,),
|
||||
'BINARY_MODULO': ( '%%',),
|
||||
'BINARY_POWER': ( '**',),
|
||||
'BINARY_LSHIFT': ( '<<',),
|
||||
'BINARY_RSHIFT': ( '>>',),
|
||||
'BINARY_AND': ( '&' ,),
|
||||
'BINARY_OR': ( '|' ,),
|
||||
'BINARY_XOR': ( '^' ,),
|
||||
'INPLACE_ADD': ( '+=' ,),
|
||||
'INPLACE_SUBTRACT': ( '-=' ,),
|
||||
'INPLACE_MULTIPLY': ( '*=' ,),
|
||||
'INPLACE_MATRIX_MULTIPLY': ( '@=' ,),
|
||||
'INPLACE_DIVIDE': ( '/=' ,),
|
||||
'INPLACE_TRUE_DIVIDE': ( '/=' ,),
|
||||
'INPLACE_FLOOR_DIVIDE': ( '//=' ,),
|
||||
'INPLACE_MODULO': ( '%%=',),
|
||||
'INPLACE_POWER': ( '**=',),
|
||||
'INPLACE_LSHIFT': ( '<<=',),
|
||||
'INPLACE_RSHIFT': ( '>>=',),
|
||||
'INPLACE_AND': ( '&=' ,),
|
||||
'INPLACE_OR': ( '|=' ,),
|
||||
'INPLACE_XOR': ( '^=' ,),
|
||||
'binary_expr': ( '%c %c %c', 0, -1, 1 ),
|
||||
|
||||
'UNARY_POSITIVE': ( '+',),
|
||||
'UNARY_NEGATIVE': ( '-',),
|
||||
'UNARY_INVERT': ( '~%c'),
|
||||
'unary_expr': ( '%c%c', 1, 0),
|
||||
|
||||
'unary_not': ( 'not %c', 0 ),
|
||||
'unary_convert': ( '`%c`', 0 ),
|
||||
'get_iter': ( 'iter(%c)', 0 ),
|
||||
'slice0': ( '%c[:]', 0 ),
|
||||
'slice1': ( '%c[%p:]', 0, (1, 100) ),
|
||||
'slice2': ( '%c[:%p]', 0, (1, 100) ),
|
||||
'slice3': ( '%c[%p:%p]', 0, (1, 100), (2, 100) ),
|
||||
|
||||
'IMPORT_FROM': ( '%{pattr}', ),
|
||||
'load_attr': ( '%c.%[1]{pattr}', 0),
|
||||
'LOAD_FAST': ( '%{pattr}', ),
|
||||
'LOAD_NAME': ( '%{pattr}', ),
|
||||
'LOAD_CLASSNAME': ( '%{pattr}', ),
|
||||
'LOAD_GLOBAL': ( '%{pattr}', ),
|
||||
'LOAD_DEREF': ( '%{pattr}', ),
|
||||
'LOAD_LOCALS': ( 'locals()', ),
|
||||
'LOAD_ASSERT': ( '%{pattr}', ),
|
||||
# 'LOAD_CONST': ( '%{pattr}', ), # handled by n_LOAD_CONST
|
||||
'DELETE_FAST': ( '%|del %{pattr}\n', ),
|
||||
'DELETE_NAME': ( '%|del %{pattr}\n', ),
|
||||
'DELETE_GLOBAL': ( '%|del %{pattr}\n', ),
|
||||
'delete_subscr': ( '%|del %c[%c]\n', 0, 1,),
|
||||
'binary_subscr': ( '%c[%p]', 0, (1, 100)),
|
||||
'binary_subscr2': ( '%c[%p]', 0, (1, 100)),
|
||||
'store_subscr': ( '%c[%c]', 0, 1),
|
||||
'STORE_FAST': ( '%{pattr}', ),
|
||||
'STORE_NAME': ( '%{pattr}', ),
|
||||
'STORE_GLOBAL': ( '%{pattr}', ),
|
||||
'STORE_DEREF': ( '%{pattr}', ),
|
||||
'unpack': ( '%C%,', (1, maxint, ', ') ),
|
||||
|
||||
# This nonterminal we create on the fly in semantic routines
|
||||
'unpack_w_parens': ( '(%C%,)', (1, maxint, ', ') ),
|
||||
|
||||
'unpack_list': ( '[%C]', (1, maxint, ', ') ),
|
||||
'build_tuple2': ( '%P', (0, -1, ', ', 100) ),
|
||||
|
||||
# 'list_compr': ( '[ %c ]', -2), # handled by n_list_compr
|
||||
'list_iter': ( '%c', 0),
|
||||
'list_for': ( ' for %c in %c%c', 2, 0, 3 ),
|
||||
'list_if': ( ' if %c%c', 0, 2 ),
|
||||
'list_if_not': ( ' if not %p%c', (0, 22), 2 ),
|
||||
'lc_body': ( '', ), # ignore when recusing
|
||||
|
||||
'comp_iter': ( '%c', 0),
|
||||
'comp_if': ( ' if %c%c', 0, 2 ),
|
||||
'comp_ifnot': ( ' if not %p%c', (0, 22), 2 ),
|
||||
'comp_body': ( '', ), # ignore when recusing
|
||||
'set_comp_body': ( '%c', 0 ),
|
||||
'gen_comp_body': ( '%c', 0 ),
|
||||
'dict_comp_body': ( '%c:%c', 1, 0 ),
|
||||
|
||||
'assign': ( '%|%c = %p\n', -1, (0, 200) ),
|
||||
|
||||
# The 2nd parameter should have a = suffix.
|
||||
# There is a rule with a 4th parameter "designator"
|
||||
# which we don't use here.
|
||||
'augassign1': ( '%|%c %c %c\n', 0, 2, 1),
|
||||
|
||||
'augassign2': ( '%|%c.%[2]{pattr} %c %c\n', 0, -3, -4),
|
||||
'designList': ( '%c = %c', 0, -1 ),
|
||||
'and': ( '%c and %c', 0, 2 ),
|
||||
'ret_and': ( '%c and %c', 0, 2 ),
|
||||
'and2': ( '%c', 3 ),
|
||||
'or': ( '%c or %c', 0, 2 ),
|
||||
'ret_or': ( '%c or %c', 0, 2 ),
|
||||
'conditional': ( '%p if %p else %p', (2, 27), (0, 27), (4, 27)),
|
||||
'ret_cond': ( '%p if %p else %p', (2, 27), (0, 27), (-1, 27)),
|
||||
'conditionalnot': ( '%p if not %p else %p', (2, 27), (0, 22), (4, 27)),
|
||||
'ret_cond_not': ( '%p if not %p else %p', (2, 27), (0, 22), (-1, 27)),
|
||||
'conditional_lambda': ( '(%c if %c else %c)', 2, 0, 3),
|
||||
'return_lambda': ('%c', 0),
|
||||
'compare': ( '%p %[-1]{pattr} %p', (0, 19), (1, 19) ),
|
||||
'cmp_list': ( '%p %p', (0, 20), (1, 19)),
|
||||
'cmp_list1': ( '%[3]{pattr} %p %p', (0, 19), (-2, 19)),
|
||||
'cmp_list2': ( '%[1]{pattr} %p', (0, 19)),
|
||||
# 'classdef': (), # handled by n_classdef()
|
||||
'funcdef': ( '\n\n%|def %c\n', -2), # -2 to handle closures
|
||||
'funcdefdeco': ( '\n\n%c', 0),
|
||||
'mkfuncdeco': ( '%|@%c\n%c', 0, 1),
|
||||
'mkfuncdeco0': ( '%|def %c\n', 0),
|
||||
'classdefdeco': ( '\n\n%c', 0),
|
||||
'classdefdeco1': ( '%|@%c\n%c', 0, 1),
|
||||
'kwarg': ( '%[0]{pattr}=%c', 1),
|
||||
'kwargs': ( '%D', (0, maxint, ', ') ),
|
||||
|
||||
'assert_expr_or': ( '%c or %c', 0, 2 ),
|
||||
'assert_expr_and': ( '%c and %c', 0, 2 ),
|
||||
'print_items_stmt': ( '%|print %c%c,\n', 0, 2), # Python 2 only
|
||||
'print_items_nl_stmt': ( '%|print %c%c\n', 0, 2),
|
||||
'print_item': ( ', %c', 0),
|
||||
'print_nl': ( '%|print\n', ),
|
||||
'print_to': ( '%|print >> %c, %c,\n', 0, 1 ),
|
||||
'print_to_nl': ( '%|print >> %c, %c\n', 0, 1 ),
|
||||
'print_nl_to': ( '%|print >> %c\n', 0 ),
|
||||
'print_to_items': ( '%C', (0, 2, ', ') ),
|
||||
|
||||
'call_stmt': ( '%|%p\n', (0, 200)),
|
||||
'break_stmt': ( '%|break\n', ),
|
||||
'continue_stmt': ( '%|continue\n', ),
|
||||
|
||||
'raise_stmt0': ( '%|raise\n', ),
|
||||
'raise_stmt1': ( '%|raise %c\n', 0),
|
||||
'raise_stmt3': ( '%|raise %c, %c, %c\n', 0, 1, 2),
|
||||
# 'yield': ( 'yield %c', 0),
|
||||
# 'return_stmt': ( '%|return %c\n', 0),
|
||||
|
||||
'ifstmt': ( '%|if %c:\n%+%c%-', 0, 1 ),
|
||||
'iflaststmt': ( '%|if %c:\n%+%c%-', 0, 1 ),
|
||||
'iflaststmtl': ( '%|if %c:\n%+%c%-', 0, 1 ),
|
||||
'testtrue': ( 'not %p', (0, 22) ),
|
||||
|
||||
'ifelsestmt': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 3 ),
|
||||
'ifelsestmtc': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 3 ),
|
||||
'ifelsestmtl': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 3 ),
|
||||
'ifelifstmt': ( '%|if %c:\n%+%c%-%c', 0, 1, 3 ),
|
||||
'elifelifstmt': ( '%|elif %c:\n%+%c%-%c', 0, 1, 3 ),
|
||||
'elifstmt': ( '%|elif %c:\n%+%c%-', 0, 1 ),
|
||||
'elifelsestmt': ( '%|elif %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 3 ),
|
||||
'ifelsestmtr': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 2 ),
|
||||
'elifelsestmtr': ( '%|elif %c:\n%+%c%-%|else:\n%+%c%-\n\n', 0, 1, 2 ),
|
||||
|
||||
'whileTruestmt': ( '%|while True:\n%+%c%-\n\n', 1 ),
|
||||
'whilestmt': ( '%|while %c:\n%+%c%-\n\n', 1, 2 ),
|
||||
'while1stmt': ( '%|while 1:\n%+%c%-\n\n', 1 ),
|
||||
'while1elsestmt': ( '%|while 1:\n%+%c%-%|else:\n%+%c%-\n\n', 1, -2 ),
|
||||
'whileelsestmt': ( '%|while %c:\n%+%c%-%|else:\n%+%c%-\n\n', 1, 2, -2 ),
|
||||
'whileelselaststmt': ( '%|while %c:\n%+%c%-%|else:\n%+%c%-', 1, 2, -2 ),
|
||||
'forstmt': ( '%|for %c in %c:\n%+%c%-\n\n', 3, 1, 4 ),
|
||||
'forelsestmt': (
|
||||
'%|for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n', 3, 1, 4, -2),
|
||||
'forelselaststmt': (
|
||||
'%|for %c in %c:\n%+%c%-%|else:\n%+%c%-', 3, 1, 4, -2),
|
||||
'forelselaststmtl': (
|
||||
'%|for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n', 3, 1, 4, -2),
|
||||
'trystmt': ( '%|try:\n%+%c%-%c\n\n', 1, 3 ),
|
||||
'tryelsestmt': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-\n\n', 1, 3, 4 ),
|
||||
'tryelsestmtc': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-', 1, 3, 4 ),
|
||||
'tryelsestmtl': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-', 1, 3, 4 ),
|
||||
'tf_trystmt': ( '%c%-%c%+', 1, 3 ),
|
||||
'tf_tryelsestmt': ( '%c%-%c%|else:\n%+%c', 1, 3, 4 ),
|
||||
'tryfinallystmt': ( '%|try:\n%+%c%-%|finally:\n%+%c%-\n\n', 1, 5 ),
|
||||
'except': ( '%|except:\n%+%c%-', 3 ),
|
||||
'except_cond1': ( '%|except %c:\n', 1 ),
|
||||
'except_suite': ( '%+%c%-%C', 0, (1, maxint, '') ),
|
||||
'except_suite_finalize': ( '%+%c%-%C', 1, (3, maxint, '') ),
|
||||
'withstmt': ( '%|with %c:\n%+%c%-', 0, 3),
|
||||
'withasstmt': ( '%|with %c as %c:\n%+%c%-', 0, 2, 3),
|
||||
'passstmt': ( '%|pass\n', ),
|
||||
'STORE_FAST': ( '%{pattr}', ),
|
||||
'kv': ( '%c: %c', 3, 1 ),
|
||||
'kv2': ( '%c: %c', 1, 2 ),
|
||||
'mapexpr': ( '{%[1]C}', (0, maxint, ', ') ),
|
||||
'importstmt': ( '%|import %c\n', 2),
|
||||
'importfrom': ( '%|from %[2]{pattr} import %c\n', 3 ),
|
||||
'importstar': ( '%|from %[2]{pattr} import *\n', ),
|
||||
}
|
||||
|
||||
|
||||
MAP_DIRECT = (TABLE_DIRECT, )
|
||||
MAP_R0 = (TABLE_R0, -1, 0)
|
||||
MAP_R = (TABLE_R, -1)
|
||||
|
||||
MAP = {
|
||||
'stmt': MAP_R,
|
||||
'call_function': MAP_R,
|
||||
'del_stmt': MAP_R,
|
||||
'designator': MAP_R,
|
||||
'exprlist': MAP_R0,
|
||||
}
|
||||
|
||||
PRECEDENCE = {
|
||||
'build_list': 0,
|
||||
'mapexpr': 0,
|
||||
'unary_convert': 0,
|
||||
'dictcomp': 0,
|
||||
'setcomp': 0,
|
||||
'list_compr': 0,
|
||||
'genexpr': 0,
|
||||
|
||||
'load_attr': 2,
|
||||
'binary_subscr': 2,
|
||||
'binary_subscr2': 2,
|
||||
'slice0': 2,
|
||||
'slice1': 2,
|
||||
'slice2': 2,
|
||||
'slice3': 2,
|
||||
'buildslice2': 2,
|
||||
'buildslice3': 2,
|
||||
'call_function': 2,
|
||||
|
||||
'BINARY_POWER': 4,
|
||||
|
||||
'unary_expr': 6,
|
||||
|
||||
'BINARY_MULTIPLY': 8,
|
||||
'BINARY_DIVIDE': 8,
|
||||
'BINARY_TRUE_DIVIDE': 8,
|
||||
'BINARY_FLOOR_DIVIDE': 8,
|
||||
'BINARY_MODULO': 8,
|
||||
|
||||
'BINARY_ADD': 10,
|
||||
'BINARY_SUBTRACT': 10,
|
||||
|
||||
'BINARY_LSHIFT': 12,
|
||||
'BINARY_RSHIFT': 12,
|
||||
|
||||
'BINARY_AND': 14,
|
||||
|
||||
'BINARY_XOR': 16,
|
||||
|
||||
'BINARY_OR': 18,
|
||||
|
||||
'cmp': 20,
|
||||
|
||||
'unary_not': 22,
|
||||
|
||||
'and': 24,
|
||||
'ret_and': 24,
|
||||
|
||||
'or': 26,
|
||||
'ret_or': 26,
|
||||
|
||||
'conditional': 28,
|
||||
'conditionalnot': 28,
|
||||
'ret_cond': 28,
|
||||
'ret_cond_not': 28,
|
||||
|
||||
'_mklambda': 30,
|
||||
'yield': 101,
|
||||
'yield_from': 101
|
||||
}
|
||||
|
||||
ASSIGN_TUPLE_PARAM = lambda param_name: \
|
||||
AST('expr', [ Token('LOAD_FAST', pattr=param_name) ])
|
||||
|
||||
escape = re.compile(r'''
|
||||
(?P<prefix> [^%]* )
|
||||
% ( \[ (?P<child> -? \d+ ) \] )?
|
||||
((?P<type> [^{] ) |
|
||||
( [{] (?P<expr> [^}]* ) [}] ))
|
||||
''', re.VERBOSE)
|
@@ -69,17 +69,22 @@ from uncompyle6.show import (
|
||||
maybe_show_ast_param_default,
|
||||
)
|
||||
|
||||
from uncompyle6.semantics.pysource import AST, INDENT_PER_LEVEL, NONE, PRECEDENCE, \
|
||||
ParserError, TABLE_DIRECT, escape, find_globals, minint, MAP
|
||||
from uncompyle6.parsers.astnode import AST
|
||||
|
||||
from uncompyle6.semantics.pysource import (
|
||||
ParserError, find_globals, StringIO)
|
||||
|
||||
from uncompyle6.semantics.consts import (
|
||||
INDENT_PER_LEVEL, NONE, PRECEDENCE,
|
||||
TABLE_DIRECT, escape, minint, MAP
|
||||
)
|
||||
|
||||
from uncompyle6.semantics.make_function import find_all_globals, find_none
|
||||
|
||||
if PYTHON3:
|
||||
from itertools import zip_longest
|
||||
from io import StringIO
|
||||
else:
|
||||
from itertools import izip_longest as zip_longest
|
||||
from StringIO import StringIO
|
||||
|
||||
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
@@ -97,14 +102,14 @@ TABLE_DIRECT_FRAGMENT = {
|
||||
'importstmt': ( '%|import %c%x\n', 2, (2, (0, 1)), ),
|
||||
'importfrom': ( '%|from %[2]{pattr}%x import %c\n', (2, (0, 1)), 3),
|
||||
'importmultiple': ( '%|import%b %c%c\n', 0, 2, 3 ),
|
||||
'list_for': (' for %c%x in %c%c', 2, (2,(1,)), 0, 3 ),
|
||||
'forstmt': ( '%|for%b %c%x in %c:\n%+%c%-\n\n', 0, 3, (3, (2,)), 1, 4 ),
|
||||
'list_for': (' for %c%x in %c%c', 2, (2, (1, )), 0, 3 ),
|
||||
'forstmt': ( '%|for%b %c%x in %c:\n%+%c%-\n\n', 0, 3, (3, (2, )), 1, 4 ),
|
||||
'forelsestmt': (
|
||||
'%|for %c in %c%x:\n%+%c%-%|else:\n%+%c%-\n\n', 3, (3, (2,)), 1, 4, -2),
|
||||
'forelselaststmt': (
|
||||
'%|for %c%x in %c:\n%+%c%-%|else:\n%+%c%-', 3, (3, (2,)), 1, 4, -2),
|
||||
'forelselaststmtl': (
|
||||
'%|for %c%x in %c:\n%+%c%-%|else:\n%+%c%-\n\n', 3, (3, (2,)), 1, 4, -2),
|
||||
'%|for %c%x in %c:\n%+%c%-%|else:\n%+%c%-\n\n', 3, (3, (2, )), 1, 4, -2),
|
||||
|
||||
'whilestmt': ( '%|while%b %c:\n%+%c%-\n\n', 0, 1, 2 ),
|
||||
'whileelsestmt': ( '%|while%b %c:\n%+%c%-%|else:\n%+%c%-\n\n', 0, 1, 2, -2 ),
|
||||
@@ -163,7 +168,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
None)
|
||||
|
||||
def set_pos_info(self, node, start, finish, name=None):
|
||||
if name == None: name = self.name
|
||||
if name is None: name = self.name
|
||||
if hasattr(node, 'offset'):
|
||||
self.offsets[name, node.offset] = \
|
||||
NodeInfo(node = node, start = start, finish = finish)
|
||||
@@ -583,7 +588,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
self.set_pos_info(node[-3], start, len(self.f.getvalue()))
|
||||
start = len(self.f.getvalue())
|
||||
self.preorder(ast[iter_index])
|
||||
self.set_pos_info(iter_index, start, len(self.f.getvalue()))
|
||||
self.set_pos_info(ast[iter_index], start, len(self.f.getvalue()))
|
||||
self.prec = p
|
||||
|
||||
def comprehension_walk3(self, node, iter_index, code_index=-5):
|
||||
@@ -622,7 +627,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
n = ast[iter_index]
|
||||
assert n == 'list_iter'
|
||||
|
||||
## FIXME: I'm not totally sure this is right.
|
||||
# FIXME: I'm not totally sure this is right.
|
||||
|
||||
# find innermost node
|
||||
if_node = None
|
||||
|
@@ -71,7 +71,11 @@ def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
# MAKE_FUNCTION_... or MAKE_CLOSURE_...
|
||||
assert node[-1].type.startswith('MAKE_')
|
||||
|
||||
annotate_tuple = node[annotate_last]
|
||||
annotate_tuple = None
|
||||
for annotate_last in range(len(node)-1, -1, -1):
|
||||
if node[annotate_last] == 'annotate_tuple':
|
||||
annotate_tuple = node[annotate_last]
|
||||
break
|
||||
annotate_args = {}
|
||||
|
||||
if (annotate_tuple == 'annotate_tuple'
|
||||
@@ -148,13 +152,19 @@ def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
suffix = ''
|
||||
for param in paramnames[:i]:
|
||||
self.write(suffix, param)
|
||||
if param in annotate_args:
|
||||
value, string = annotate_args[param]
|
||||
if string:
|
||||
self.write(': "%s"' % value)
|
||||
else:
|
||||
self.write(': %s' % value)
|
||||
suffix = ', '
|
||||
if param in annotate_tuple[0].attr:
|
||||
p = annotate_tuple[0].attr.index(param)
|
||||
self.write(': ')
|
||||
self.preorder(node[p])
|
||||
if (line_number != self.line_number):
|
||||
suffix = ",\n" + indent
|
||||
line_number = self.line_number
|
||||
# value, string = annotate_args[param]
|
||||
# if string:
|
||||
# self.write(': "%s"' % value)
|
||||
# else:
|
||||
# self.write(': %s' % value)
|
||||
|
||||
suffix = ', ' if i > 0 else ''
|
||||
for n in node:
|
||||
@@ -163,7 +173,10 @@ def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
param = paramnames[i]
|
||||
self.write(param)
|
||||
if param in annotate_args:
|
||||
self.write(':"%s' % annotate_args[param])
|
||||
aa = annotate_args[param]
|
||||
if isinstance(aa, tuple):
|
||||
aa = aa[0]
|
||||
self.write(': "%s"' % aa)
|
||||
self.write('=')
|
||||
i += 1
|
||||
self.preorder(n)
|
||||
@@ -189,6 +202,9 @@ def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
i = 0
|
||||
for n in node[0]:
|
||||
if n == 'kwarg':
|
||||
if (line_number != self.line_number):
|
||||
self.write("\n" + indent)
|
||||
line_number = self.line_number
|
||||
self.write('%s=' % n[0].pattr)
|
||||
self.preorder(n[1])
|
||||
if i < last:
|
||||
@@ -207,19 +223,24 @@ def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
self.write(": ")
|
||||
else:
|
||||
self.write(')')
|
||||
if 'return' in annotate_args:
|
||||
value, string = annotate_args['return']
|
||||
if string:
|
||||
self.write(' -> "%s"' % value)
|
||||
else:
|
||||
self.write(' -> %s' % value)
|
||||
if 'return' in annotate_tuple[0].attr:
|
||||
if (line_number != self.line_number):
|
||||
self.write("\n" + indent)
|
||||
line_number = self.line_number
|
||||
self.write(' -> ')
|
||||
# value, string = annotate_args['return']
|
||||
# if string:
|
||||
# self.write(' -> "%s"' % value)
|
||||
# else:
|
||||
# self.write(' -> %s' % value)
|
||||
self.preorder(node[annotate_last-1])
|
||||
|
||||
self.println(":")
|
||||
|
||||
if (len(code.co_consts) > 0 and
|
||||
code.co_consts[0] is not None and not isLambda): # ugly
|
||||
# docstring exists, dump it
|
||||
self.print_docstring(indent, code.co_consts[0])
|
||||
print_docstring(self, self.indent, code.co_consts[0])
|
||||
|
||||
code._tokens = None # save memory
|
||||
assert ast == 'stmts'
|
||||
|
@@ -69,21 +69,30 @@ methods implement most of the below.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import sys, re
|
||||
import sys
|
||||
|
||||
from uncompyle6 import PYTHON3
|
||||
from xdis.code import iscode
|
||||
from xdis.util import COMPILER_FLAG_BIT
|
||||
|
||||
from uncompyle6.parser import get_python_parser
|
||||
from uncompyle6.parsers.astnode import AST
|
||||
from spark_parser import GenericASTTraversal, DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from uncompyle6.scanner import Code, get_scanner
|
||||
from uncompyle6.scanners.tok import Token, NoneToken
|
||||
import uncompyle6.parser as python_parser
|
||||
from uncompyle6.semantics.make_function import (
|
||||
make_function2, make_function3, make_function3_annotate, find_globals)
|
||||
from uncompyle6.semantics.parser_error import ParserError
|
||||
from uncompyle6.semantics.check_ast import checker
|
||||
from uncompyle6.semantics.helper import print_docstring
|
||||
from uncompyle6.scanners.tok import Token
|
||||
|
||||
from uncompyle6.semantics.consts import (
|
||||
LINE_LENGTH, RETURN_LOCALS, NONE, RETURN_NONE, PASS,
|
||||
ASSIGN_DOC_STRING, NAME_MODULE, TAB,
|
||||
INDENT_PER_LEVEL, TABLE_R, TABLE_DIRECT, MAP_DIRECT,
|
||||
MAP, PRECEDENCE, ASSIGN_TUPLE_PARAM, escape, maxint, minint)
|
||||
|
||||
|
||||
from uncompyle6.show import (
|
||||
maybe_show_ast,
|
||||
@@ -91,344 +100,8 @@ from uncompyle6.show import (
|
||||
|
||||
if PYTHON3:
|
||||
from io import StringIO
|
||||
minint = -sys.maxsize-1
|
||||
maxint = sys.maxsize
|
||||
else:
|
||||
from StringIO import StringIO
|
||||
minint = -sys.maxint-1
|
||||
maxint = sys.maxint
|
||||
|
||||
LINE_LENGTH = 80
|
||||
|
||||
# Some ASTs used for comparing code fragments (like 'return None' at
|
||||
# the end of functions).
|
||||
|
||||
RETURN_LOCALS = AST('return_stmt',
|
||||
[ AST('ret_expr', [AST('expr', [ Token('LOAD_LOCALS') ])]),
|
||||
Token('RETURN_VALUE')])
|
||||
|
||||
NONE = AST('expr', [ NoneToken ] )
|
||||
|
||||
RETURN_NONE = AST('stmt',
|
||||
[ AST('return_stmt',
|
||||
[ NONE, Token('RETURN_VALUE')]) ])
|
||||
|
||||
PASS = AST('stmts',
|
||||
[ AST('sstmt',
|
||||
[ AST('stmt',
|
||||
[ AST('passstmt', [])])])])
|
||||
|
||||
ASSIGN_DOC_STRING = lambda doc_string: \
|
||||
AST('stmt',
|
||||
[ AST('assign',
|
||||
[ AST('expr', [ Token('LOAD_CONST', pattr=doc_string) ]),
|
||||
AST('designator', [ Token('STORE_NAME', pattr='__doc__')])
|
||||
])])
|
||||
|
||||
BUILD_TUPLE_0 = AST('expr',
|
||||
[ AST('build_list',
|
||||
[ Token('BUILD_TUPLE_0') ])])
|
||||
|
||||
NAME_MODULE = AST('stmt',
|
||||
[ AST('assign',
|
||||
[ AST('expr', [Token('LOAD_NAME', pattr='__name__')]),
|
||||
AST('designator', [ Token('STORE_NAME', pattr='__module__')])
|
||||
])])
|
||||
|
||||
# TAB = '\t' # as God intended
|
||||
TAB = ' ' *4 # is less spacy than "\t"
|
||||
INDENT_PER_LEVEL = ' ' # additional intent per pretty-print level
|
||||
|
||||
TABLE_R = {
|
||||
'STORE_ATTR': ( '%c.%[1]{pattr}', 0),
|
||||
# 'STORE_SUBSCR': ( '%c[%c]', 0, 1 ),
|
||||
'DELETE_ATTR': ( '%|del %c.%[-1]{pattr}\n', 0 ),
|
||||
# 'EXEC_STMT': ( '%|exec %c in %[1]C\n', 0, (0,maxint,', ') ),
|
||||
}
|
||||
|
||||
TABLE_R0 = {
|
||||
# 'BUILD_LIST': ( '[%C]', (0,-1,', ') ),
|
||||
# 'BUILD_TUPLE': ( '(%C)', (0,-1,', ') ),
|
||||
# 'CALL_FUNCTION': ( '%c(%P)', 0, (1,-1,', ') ),
|
||||
}
|
||||
TABLE_DIRECT = {
|
||||
'BINARY_ADD': ( '+' ,),
|
||||
'BINARY_SUBTRACT': ( '-' ,),
|
||||
'BINARY_MULTIPLY': ( '*' ,),
|
||||
'BINARY_DIVIDE': ( '/' ,),
|
||||
'BINARY_MATRIX_MULTIPLY': ( '@' ,),
|
||||
'BINARY_TRUE_DIVIDE': ( '/' ,),
|
||||
'BINARY_FLOOR_DIVIDE': ( '//' ,),
|
||||
'BINARY_MODULO': ( '%%',),
|
||||
'BINARY_POWER': ( '**',),
|
||||
'BINARY_LSHIFT': ( '<<',),
|
||||
'BINARY_RSHIFT': ( '>>',),
|
||||
'BINARY_AND': ( '&' ,),
|
||||
'BINARY_OR': ( '|' ,),
|
||||
'BINARY_XOR': ( '^' ,),
|
||||
'INPLACE_ADD': ( '+=' ,),
|
||||
'INPLACE_SUBTRACT': ( '-=' ,),
|
||||
'INPLACE_MULTIPLY': ( '*=' ,),
|
||||
'INPLACE_MATRIX_MULTIPLY': ( '@=' ,),
|
||||
'INPLACE_DIVIDE': ( '/=' ,),
|
||||
'INPLACE_TRUE_DIVIDE': ( '/=' ,),
|
||||
'INPLACE_FLOOR_DIVIDE': ( '//=' ,),
|
||||
'INPLACE_MODULO': ( '%%=',),
|
||||
'INPLACE_POWER': ( '**=',),
|
||||
'INPLACE_LSHIFT': ( '<<=',),
|
||||
'INPLACE_RSHIFT': ( '>>=',),
|
||||
'INPLACE_AND': ( '&=' ,),
|
||||
'INPLACE_OR': ( '|=' ,),
|
||||
'INPLACE_XOR': ( '^=' ,),
|
||||
'binary_expr': ( '%c %c %c', 0, -1, 1 ),
|
||||
|
||||
'UNARY_POSITIVE': ( '+',),
|
||||
'UNARY_NEGATIVE': ( '-',),
|
||||
'UNARY_INVERT': ( '~%c'),
|
||||
'unary_expr': ( '%c%c', 1, 0),
|
||||
|
||||
'unary_not': ( 'not %c', 0 ),
|
||||
'unary_convert': ( '`%c`', 0 ),
|
||||
'get_iter': ( 'iter(%c)', 0 ),
|
||||
'slice0': ( '%c[:]', 0 ),
|
||||
'slice1': ( '%c[%p:]', 0, (1, 100) ),
|
||||
'slice2': ( '%c[:%p]', 0, (1, 100) ),
|
||||
'slice3': ( '%c[%p:%p]', 0, (1, 100), (2, 100) ),
|
||||
|
||||
'IMPORT_FROM': ( '%{pattr}', ),
|
||||
'load_attr': ( '%c.%[1]{pattr}', 0),
|
||||
'LOAD_FAST': ( '%{pattr}', ),
|
||||
'LOAD_NAME': ( '%{pattr}', ),
|
||||
'LOAD_CLASSNAME': ( '%{pattr}', ),
|
||||
'LOAD_GLOBAL': ( '%{pattr}', ),
|
||||
'LOAD_DEREF': ( '%{pattr}', ),
|
||||
'LOAD_LOCALS': ( 'locals()', ),
|
||||
'LOAD_ASSERT': ( '%{pattr}', ),
|
||||
# 'LOAD_CONST': ( '%{pattr}', ), # handled by n_LOAD_CONST
|
||||
'DELETE_FAST': ( '%|del %{pattr}\n', ),
|
||||
'DELETE_NAME': ( '%|del %{pattr}\n', ),
|
||||
'DELETE_GLOBAL': ( '%|del %{pattr}\n', ),
|
||||
'delete_subscr': ( '%|del %c[%c]\n', 0, 1,),
|
||||
'binary_subscr': ( '%c[%p]', 0, (1, 100)),
|
||||
'binary_subscr2': ( '%c[%p]', 0, (1, 100)),
|
||||
'store_subscr': ( '%c[%c]', 0, 1),
|
||||
'STORE_FAST': ( '%{pattr}', ),
|
||||
'STORE_NAME': ( '%{pattr}', ),
|
||||
'STORE_GLOBAL': ( '%{pattr}', ),
|
||||
'STORE_DEREF': ( '%{pattr}', ),
|
||||
'unpack': ( '%C%,', (1, maxint, ', ') ),
|
||||
|
||||
# This nonterminal we create on the fly in semantic routines
|
||||
'unpack_w_parens': ( '(%C%,)', (1, maxint, ', ') ),
|
||||
|
||||
'unpack_list': ( '[%C]', (1, maxint, ', ') ),
|
||||
'build_tuple2': ( '%P', (0, -1, ', ', 100) ),
|
||||
|
||||
# 'list_compr': ( '[ %c ]', -2), # handled by n_list_compr
|
||||
'list_iter': ( '%c', 0),
|
||||
'list_for': ( ' for %c in %c%c', 2, 0, 3 ),
|
||||
'list_if': ( ' if %c%c', 0, 2 ),
|
||||
'list_if_not': ( ' if not %p%c', (0, 22), 2 ),
|
||||
'lc_body': ( '', ), # ignore when recusing
|
||||
|
||||
'comp_iter': ( '%c', 0),
|
||||
'comp_if': ( ' if %c%c', 0, 2 ),
|
||||
'comp_ifnot': ( ' if not %p%c', (0, 22), 2 ),
|
||||
'comp_body': ( '', ), # ignore when recusing
|
||||
'set_comp_body': ( '%c', 0 ),
|
||||
'gen_comp_body': ( '%c', 0 ),
|
||||
'dict_comp_body': ( '%c:%c', 1, 0 ),
|
||||
|
||||
'assign': ( '%|%c = %p\n', -1, (0, 200) ),
|
||||
|
||||
# The 2nd parameter should have a = suffix.
|
||||
# There is a rule with a 4th parameter "designator"
|
||||
# which we don't use here.
|
||||
'augassign1': ( '%|%c %c %c\n', 0, 2, 1),
|
||||
|
||||
'augassign2': ( '%|%c.%[2]{pattr} %c %c\n', 0, -3, -4),
|
||||
'designList': ( '%c = %c', 0, -1 ),
|
||||
'and': ( '%c and %c', 0, 2 ),
|
||||
'ret_and': ( '%c and %c', 0, 2 ),
|
||||
'and2': ( '%c', 3 ),
|
||||
'or': ( '%c or %c', 0, 2 ),
|
||||
'ret_or': ( '%c or %c', 0, 2 ),
|
||||
'conditional': ( '%p if %p else %p', (2, 27), (0, 27), (4, 27)),
|
||||
'ret_cond': ( '%p if %p else %p', (2, 27), (0, 27), (-1, 27)),
|
||||
'conditionalnot': ( '%p if not %p else %p', (2, 27), (0, 22), (4, 27)),
|
||||
'ret_cond_not': ( '%p if not %p else %p', (2, 27), (0, 22), (-1, 27)),
|
||||
'conditional_lambda': ( '(%c if %c else %c)', 2, 0, 3),
|
||||
'return_lambda': ('%c', 0),
|
||||
'compare': ( '%p %[-1]{pattr} %p', (0, 19), (1, 19) ),
|
||||
'cmp_list': ( '%p %p', (0, 20), (1, 19)),
|
||||
'cmp_list1': ( '%[3]{pattr} %p %p', (0, 19), (-2, 19)),
|
||||
'cmp_list2': ( '%[1]{pattr} %p', (0, 19)),
|
||||
# 'classdef': (), # handled by n_classdef()
|
||||
'funcdef': ( '\n\n%|def %c\n', -2), # -2 to handle closures
|
||||
'funcdefdeco': ( '\n\n%c', 0),
|
||||
'mkfuncdeco': ( '%|@%c\n%c', 0, 1),
|
||||
'mkfuncdeco0': ( '%|def %c\n', 0),
|
||||
'classdefdeco': ( '\n\n%c', 0),
|
||||
'classdefdeco1': ( '%|@%c\n%c', 0, 1),
|
||||
'kwarg': ( '%[0]{pattr}=%c', 1),
|
||||
'kwargs': ( '%D', (0, maxint, ', ') ),
|
||||
|
||||
'assert_expr_or': ( '%c or %c', 0, 2 ),
|
||||
'assert_expr_and': ( '%c and %c', 0, 2 ),
|
||||
'print_items_stmt': ( '%|print %c%c,\n', 0, 2), # Python 2 only
|
||||
'print_items_nl_stmt': ( '%|print %c%c\n', 0, 2),
|
||||
'print_item': ( ', %c', 0),
|
||||
'print_nl': ( '%|print\n', ),
|
||||
'print_to': ( '%|print >> %c, %c,\n', 0, 1 ),
|
||||
'print_to_nl': ( '%|print >> %c, %c\n', 0, 1 ),
|
||||
'print_nl_to': ( '%|print >> %c\n', 0 ),
|
||||
'print_to_items': ( '%C', (0, 2, ', ') ),
|
||||
|
||||
'call_stmt': ( '%|%p\n', (0, 200)),
|
||||
'break_stmt': ( '%|break\n', ),
|
||||
'continue_stmt': ( '%|continue\n', ),
|
||||
|
||||
'raise_stmt0': ( '%|raise\n', ),
|
||||
'raise_stmt1': ( '%|raise %c\n', 0),
|
||||
'raise_stmt3': ( '%|raise %c, %c, %c\n', 0, 1, 2),
|
||||
# 'yield': ( 'yield %c', 0),
|
||||
# 'return_stmt': ( '%|return %c\n', 0),
|
||||
|
||||
'ifstmt': ( '%|if %c:\n%+%c%-', 0, 1 ),
|
||||
'iflaststmt': ( '%|if %c:\n%+%c%-', 0, 1 ),
|
||||
'iflaststmtl': ( '%|if %c:\n%+%c%-', 0, 1 ),
|
||||
'testtrue': ( 'not %p', (0, 22) ),
|
||||
|
||||
'ifelsestmt': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 3 ),
|
||||
'ifelsestmtc': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 3 ),
|
||||
'ifelsestmtl': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 3 ),
|
||||
'ifelifstmt': ( '%|if %c:\n%+%c%-%c', 0, 1, 3 ),
|
||||
'elifelifstmt': ( '%|elif %c:\n%+%c%-%c', 0, 1, 3 ),
|
||||
'elifstmt': ( '%|elif %c:\n%+%c%-', 0, 1 ),
|
||||
'elifelsestmt': ( '%|elif %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 3 ),
|
||||
'ifelsestmtr': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 2 ),
|
||||
'elifelsestmtr': ( '%|elif %c:\n%+%c%-%|else:\n%+%c%-\n\n', 0, 1, 2 ),
|
||||
|
||||
'whileTruestmt': ( '%|while True:\n%+%c%-\n\n', 1 ),
|
||||
'whilestmt': ( '%|while %c:\n%+%c%-\n\n', 1, 2 ),
|
||||
'while1stmt': ( '%|while 1:\n%+%c%-\n\n', 1 ),
|
||||
'while1elsestmt': ( '%|while 1:\n%+%c%-%|else:\n%+%c%-\n\n', 1, -2 ),
|
||||
'whileelsestmt': ( '%|while %c:\n%+%c%-%|else:\n%+%c%-\n\n', 1, 2, -2 ),
|
||||
'whileelselaststmt': ( '%|while %c:\n%+%c%-%|else:\n%+%c%-', 1, 2, -2 ),
|
||||
'forstmt': ( '%|for %c in %c:\n%+%c%-\n\n', 3, 1, 4 ),
|
||||
'forelsestmt': (
|
||||
'%|for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n', 3, 1, 4, -2),
|
||||
'forelselaststmt': (
|
||||
'%|for %c in %c:\n%+%c%-%|else:\n%+%c%-', 3, 1, 4, -2),
|
||||
'forelselaststmtl': (
|
||||
'%|for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n', 3, 1, 4, -2),
|
||||
'trystmt': ( '%|try:\n%+%c%-%c\n\n', 1, 3 ),
|
||||
'tryelsestmt': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-\n\n', 1, 3, 4 ),
|
||||
'tryelsestmtc': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-', 1, 3, 4 ),
|
||||
'tryelsestmtl': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-', 1, 3, 4 ),
|
||||
'tf_trystmt': ( '%c%-%c%+', 1, 3 ),
|
||||
'tf_tryelsestmt': ( '%c%-%c%|else:\n%+%c', 1, 3, 4 ),
|
||||
'tryfinallystmt': ( '%|try:\n%+%c%-%|finally:\n%+%c%-\n\n', 1, 5 ),
|
||||
'except': ( '%|except:\n%+%c%-', 3 ),
|
||||
'except_cond1': ( '%|except %c:\n', 1 ),
|
||||
'except_suite': ( '%+%c%-%C', 0, (1, maxint, '') ),
|
||||
'except_suite_finalize': ( '%+%c%-%C', 1, (3, maxint, '') ),
|
||||
'withstmt': ( '%|with %c:\n%+%c%-', 0, 3),
|
||||
'withasstmt': ( '%|with %c as %c:\n%+%c%-', 0, 2, 3),
|
||||
'passstmt': ( '%|pass\n', ),
|
||||
'STORE_FAST': ( '%{pattr}', ),
|
||||
'kv': ( '%c: %c', 3, 1 ),
|
||||
'kv2': ( '%c: %c', 1, 2 ),
|
||||
'mapexpr': ( '{%[1]C}', (0, maxint, ', ') ),
|
||||
'importstmt': ( '%|import %c\n', 2),
|
||||
'importfrom': ( '%|from %[2]{pattr} import %c\n', 3 ),
|
||||
'importstar': ( '%|from %[2]{pattr} import *\n', ),
|
||||
}
|
||||
|
||||
|
||||
MAP_DIRECT = (TABLE_DIRECT, )
|
||||
MAP_R0 = (TABLE_R0, -1, 0)
|
||||
MAP_R = (TABLE_R, -1)
|
||||
|
||||
MAP = {
|
||||
'stmt': MAP_R,
|
||||
'call_function': MAP_R,
|
||||
'del_stmt': MAP_R,
|
||||
'designator': MAP_R,
|
||||
'exprlist': MAP_R0,
|
||||
}
|
||||
|
||||
PRECEDENCE = {
|
||||
'build_list': 0,
|
||||
'mapexpr': 0,
|
||||
'unary_convert': 0,
|
||||
'dictcomp': 0,
|
||||
'setcomp': 0,
|
||||
'list_compr': 0,
|
||||
'genexpr': 0,
|
||||
|
||||
'load_attr': 2,
|
||||
'binary_subscr': 2,
|
||||
'binary_subscr2': 2,
|
||||
'slice0': 2,
|
||||
'slice1': 2,
|
||||
'slice2': 2,
|
||||
'slice3': 2,
|
||||
'buildslice2': 2,
|
||||
'buildslice3': 2,
|
||||
'call_function': 2,
|
||||
|
||||
'BINARY_POWER': 4,
|
||||
|
||||
'unary_expr': 6,
|
||||
|
||||
'BINARY_MULTIPLY': 8,
|
||||
'BINARY_DIVIDE': 8,
|
||||
'BINARY_TRUE_DIVIDE': 8,
|
||||
'BINARY_FLOOR_DIVIDE': 8,
|
||||
'BINARY_MODULO': 8,
|
||||
|
||||
'BINARY_ADD': 10,
|
||||
'BINARY_SUBTRACT': 10,
|
||||
|
||||
'BINARY_LSHIFT': 12,
|
||||
'BINARY_RSHIFT': 12,
|
||||
|
||||
'BINARY_AND': 14,
|
||||
|
||||
'BINARY_XOR': 16,
|
||||
|
||||
'BINARY_OR': 18,
|
||||
|
||||
'cmp': 20,
|
||||
|
||||
'unary_not': 22,
|
||||
|
||||
'and': 24,
|
||||
'ret_and': 24,
|
||||
|
||||
'or': 26,
|
||||
'ret_or': 26,
|
||||
|
||||
'conditional': 28,
|
||||
'conditionalnot': 28,
|
||||
'ret_cond': 28,
|
||||
'ret_cond_not': 28,
|
||||
|
||||
'_mklambda': 30,
|
||||
'yield': 101,
|
||||
'yield_from': 101
|
||||
}
|
||||
|
||||
ASSIGN_TUPLE_PARAM = lambda param_name: \
|
||||
AST('expr', [ Token('LOAD_FAST', pattr=param_name) ])
|
||||
|
||||
escape = re.compile(r'''
|
||||
(?P<prefix> [^%]* )
|
||||
% ( \[ (?P<child> -? \d+ ) \] )?
|
||||
((?P<type> [^{] ) |
|
||||
( [{] (?P<expr> [^}]* ) [}] ))
|
||||
''', re.VERBOSE)
|
||||
|
||||
def is_docstring(node):
|
||||
try:
|
||||
@@ -492,7 +165,6 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.write("\n" + self.indent + INDENT_PER_LEVEL[:-1])
|
||||
return self.line_number
|
||||
|
||||
|
||||
def customize_for_version(self, is_pypy, version):
|
||||
if is_pypy:
|
||||
########################
|
||||
@@ -537,7 +209,6 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
'raise_stmt2': ( '%|raise %c from %c\n', 0, 1),
|
||||
})
|
||||
|
||||
|
||||
if version < 2.0:
|
||||
TABLE_DIRECT.update({
|
||||
'importlist': ( '%C', (0, maxint, ', ') ),
|
||||
@@ -546,6 +217,18 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
TABLE_DIRECT.update({
|
||||
'importlist2': ( '%C', (0, maxint, ', ') ),
|
||||
})
|
||||
if version <= 2.4:
|
||||
global NAME_MODULE
|
||||
NAME_MODULE = AST('stmt',
|
||||
[ AST('assign',
|
||||
[ AST('expr',
|
||||
[Token('LOAD_GLOBAL', pattr='__name__',
|
||||
offset=0, has_arg=True)]),
|
||||
AST('designator',
|
||||
[ Token('STORE_NAME', pattr='__module__',
|
||||
offset=3, has_arg=True)])
|
||||
])])
|
||||
pass
|
||||
if version <= 2.3:
|
||||
TABLE_DIRECT.update({
|
||||
'tryfinallystmt': ( '%|try:\n%+%c%-%|finally:\n%+%c%-\n\n', 1, 4 )
|
||||
@@ -575,7 +258,9 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
})
|
||||
else:
|
||||
TABLE_DIRECT.update({
|
||||
'except_cond3': ( '%|except %c, %c:\n', 1, 6 ),
|
||||
'except_cond3': ( '%|except %c, %c:\n', 1, 6 ),
|
||||
'testtrue_then': ( 'not %p', (0, 22) ),
|
||||
|
||||
})
|
||||
if 2.4 <= version <= 2.6:
|
||||
TABLE_DIRECT.update({
|
||||
@@ -608,7 +293,9 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
code = node[-3]
|
||||
|
||||
self.indentMore()
|
||||
annotate_last = -4 if self.version == 3.1 else -5
|
||||
for annotate_last in range(len(node)-1, -1, -1):
|
||||
if node[annotate_last] == 'annotate_tuple':
|
||||
break
|
||||
|
||||
# FIXME: handle and pass full annotate args
|
||||
make_function3_annotate(self, node, isLambda=False,
|
||||
@@ -622,7 +309,6 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.prune() # stop recursing
|
||||
self.n_mkfunc_annotate = n_mkfunc_annotate
|
||||
|
||||
|
||||
if version >= 3.4:
|
||||
########################
|
||||
# Python 3.4+ Additions
|
||||
@@ -630,14 +316,65 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
TABLE_DIRECT.update({
|
||||
'LOAD_CLASSDEREF': ( '%{pattr}', ),
|
||||
})
|
||||
########################
|
||||
# Python 3.5+ Additions
|
||||
#######################
|
||||
if version >= 3.5:
|
||||
TABLE_DIRECT.update({
|
||||
'await_stmt': ( '%|await %c', 0),
|
||||
'async_for_stmt': (
|
||||
'%|async for %c in %c:\n%+%c%-\n\n', 9, 1, 25 ),
|
||||
'async_forelse_stmt': (
|
||||
'%|async for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n', 9, 1, 25, 28 ),
|
||||
'async_with_stmt': (
|
||||
'%|async with %c:\n%+%c%-', 0, 7),
|
||||
'async_with_as_stmt': (
|
||||
'%|async with %c as %c:\n%+%c%-', 0, 6, 7),
|
||||
|
||||
})
|
||||
def n_async_call_function(node):
|
||||
self.f.write('async ')
|
||||
node.type == 'call_function'
|
||||
p = self.prec
|
||||
self.prec = 80
|
||||
self.engine(('%c(%P)', 0, (1, -4, ', ', 100)), node)
|
||||
self.prec = p
|
||||
self.prune()
|
||||
self.n_async_call_function = n_async_call_function
|
||||
|
||||
def n_funcdef(node):
|
||||
code_node = node[0][1]
|
||||
if (code_node == 'LOAD_CONST' and iscode(code_node.attr)
|
||||
and code_node.attr.co_flags & COMPILER_FLAG_BIT['COROUTINE']):
|
||||
self.engine(('\n\n%|async def %c\n', -2), node)
|
||||
else:
|
||||
self.engine(('\n\n%|def %c\n', -2), node)
|
||||
self.prune()
|
||||
self.n_funcdef = n_funcdef
|
||||
|
||||
def n_unmapexpr(node):
|
||||
last_n = node[0][-1]
|
||||
for n in node[0]:
|
||||
self.preorder(n)
|
||||
if n != last_n:
|
||||
self.f.write(', **')
|
||||
pass
|
||||
pass
|
||||
if version >= 3.6:
|
||||
self.f.write(')')
|
||||
self.prune()
|
||||
pass
|
||||
self.n_unmapexpr = n_unmapexpr
|
||||
|
||||
if version >= 3.6:
|
||||
########################
|
||||
# Python 3.6+ Additions
|
||||
#######################
|
||||
TABLE_DIRECT.update({
|
||||
'fstring_expr': ( "{%c%{conversion}}", 0),
|
||||
'fstring_expr': ( "{%c%{conversion}}", 0),
|
||||
'fstring_single': ( "f'{%c%{conversion}}'", 0),
|
||||
'fstring_multi': ( "f'%c'", 0),
|
||||
'func_args36': ( "%c(**", 0),
|
||||
})
|
||||
|
||||
FSTRING_CONVERSION_MAP = {1: '!s', 2: '!r', 3: '!a'}
|
||||
@@ -653,7 +390,6 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
def n_fstring_single(node):
|
||||
f_conversion(node)
|
||||
self.default(node)
|
||||
|
||||
self.n_fstring_single = n_fstring_single
|
||||
|
||||
return
|
||||
@@ -896,6 +632,8 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
sep += ' '
|
||||
pass
|
||||
pass
|
||||
if len(tup) == 1:
|
||||
self.write(", ")
|
||||
self.write(')')
|
||||
|
||||
def n_LOAD_CONST(self, node):
|
||||
@@ -1134,7 +872,6 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
assert n == 'lc_body'
|
||||
self.write( '[ ')
|
||||
|
||||
|
||||
if self.version >= 2.7:
|
||||
expr = n[0]
|
||||
list_iter = node[-1]
|
||||
@@ -1322,7 +1059,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
n = ast[iter_index]
|
||||
assert n == 'list_iter', n
|
||||
|
||||
## FIXME: I'm not totally sure this is right.
|
||||
# FIXME: I'm not totally sure this is right.
|
||||
|
||||
# find innermost node
|
||||
if_node = None
|
||||
@@ -1756,23 +1493,29 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.prec = 100
|
||||
lastnode = node.pop()
|
||||
lastnodetype = lastnode.type
|
||||
|
||||
# If this build list is inside a CALL_FUNCTION_VAR,
|
||||
# then the first * has already been printed.
|
||||
# Until I have a better way to check for CALL_FUNCTION_VAR,
|
||||
# will assume that if the text ends in *.
|
||||
last_was_star = self.f.getvalue().endswith('*')
|
||||
|
||||
have_star = False
|
||||
if lastnodetype.startswith('BUILD_LIST'):
|
||||
# 3.5+ has BUILD_LIST_UNPACK
|
||||
if lastnodetype == 'BUILD_LIST_UNPACK':
|
||||
# FIXME: need to handle range of BUILD_LIST_UNPACK
|
||||
have_star = True
|
||||
endchar = ''
|
||||
else:
|
||||
self.write('['); endchar = ']'
|
||||
elif lastnodetype.startswith('BUILD_TUPLE'):
|
||||
self.write('('); endchar = ')'
|
||||
elif lastnodetype.startswith('BUILD_SET'):
|
||||
self.write('{'); endchar = '}'
|
||||
elif lastnodetype.startswith('ROT_TWO'):
|
||||
self.write('('); endchar = ')'
|
||||
if lastnodetype.endswith('UNPACK'):
|
||||
# FIXME: need to handle range of BUILD_LIST_UNPACK
|
||||
have_star = True
|
||||
endchar = ''
|
||||
else:
|
||||
raise 'Internal Error: n_build_list expects list or tuple'
|
||||
if lastnodetype.startswith('BUILD_LIST'):
|
||||
self.write('['); endchar = ']'
|
||||
elif lastnodetype.startswith('BUILD_TUPLE'):
|
||||
self.write('('); endchar = ')'
|
||||
elif lastnodetype.startswith('BUILD_SET'):
|
||||
self.write('{'); endchar = '}'
|
||||
elif lastnodetype.startswith('ROT_TWO'):
|
||||
self.write('('); endchar = ')'
|
||||
else:
|
||||
raise 'Internal Error: n_build_list expects list or tuple'
|
||||
|
||||
flat_elems = []
|
||||
for elem in node:
|
||||
@@ -1799,8 +1542,13 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
sep += '\n' + self.indent + INDENT_PER_LEVEL[:-1]
|
||||
else:
|
||||
if sep != '': sep += ' '
|
||||
if have_star:
|
||||
sep += '*'
|
||||
if not last_was_star:
|
||||
if have_star:
|
||||
sep += '*'
|
||||
pass
|
||||
pass
|
||||
else:
|
||||
last_was_star = False
|
||||
self.write(sep, value)
|
||||
sep = ','
|
||||
if lastnode.attr == 1 and lastnodetype.startswith('BUILD_TUPLE'):
|
||||
@@ -1862,8 +1610,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
beginning of this module for the how we interpret format specifications such as
|
||||
%c, %C, and so on.
|
||||
"""
|
||||
|
||||
# self.println("----> ", startnode.type)
|
||||
# self.println("----> ", startnode.type, ', ', entry[0])
|
||||
fmt = entry[0]
|
||||
arg = 1
|
||||
i = 0
|
||||
@@ -2124,7 +1871,6 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.println()
|
||||
del ast[i]
|
||||
|
||||
|
||||
# the function defining a class normally returns locals(); we
|
||||
# don't want this to show up in the source, thus remove the node
|
||||
if len(ast) > 0 and ast[-1][0] == RETURN_LOCALS:
|
||||
|
@@ -184,16 +184,16 @@ def cmp_code_objects(version, is_pypy, code_obj1, code_obj2,
|
||||
elif member == 'co_code' and not ignore_code:
|
||||
if version == 2.3:
|
||||
import uncompyle6.scanners.scanner23 as scan
|
||||
scanner = scan.Scanner26()
|
||||
scanner = scan.Scanner23(show_asm=False)
|
||||
elif version == 2.4:
|
||||
import uncompyle6.scanners.scanner24 as scan
|
||||
scanner = scan.Scanner25()
|
||||
scanner = scan.Scanner24(show_asm=False)
|
||||
elif version == 2.5:
|
||||
import uncompyle6.scanners.scanner25 as scan
|
||||
scanner = scan.Scanner25()
|
||||
scanner = scan.Scanner25(show_asm=False)
|
||||
elif version == 2.6:
|
||||
import uncompyle6.scanners.scanner26 as scan
|
||||
scanner = scan.Scanner26()
|
||||
scanner = scan.Scanner26(show_asm=False)
|
||||
elif version == 2.7:
|
||||
if is_pypy:
|
||||
import uncompyle6.scanners.pypy27 as scan
|
||||
@@ -319,6 +319,9 @@ def cmp_code_objects(version, is_pypy, code_obj1, code_obj2,
|
||||
elif tokens1[i1].type == 'LOAD_NAME' and tokens2[i2].type == 'LOAD_CONST' \
|
||||
and tokens1[i1].pattr == 'None' and tokens2[i2].pattr is None:
|
||||
pass
|
||||
elif tokens1[i1].type == 'RETURN_VALUE' and \
|
||||
tokens2[i2].type == 'RETURN_END_IF':
|
||||
pass
|
||||
else:
|
||||
raise CmpErrorCode(name, tokens1[i1].offset, tokens1[i1],
|
||||
tokens2[i2], tokens1, tokens2)
|
||||
@@ -353,6 +356,9 @@ def cmp_code_objects(version, is_pypy, code_obj1, code_obj2,
|
||||
if is_pypy:
|
||||
# For PYPY for now we don't care about PYPY_SOURCE_IS_UTF8:
|
||||
flags2 &= ~0x0100 # PYPY_SOURCE_IS_UTF8
|
||||
# We also don't care about COROUTINE or GENERATOR for now
|
||||
flags1 &= ~0x000000a0
|
||||
flags2 &= ~0x000000a0
|
||||
if flags1 != flags2:
|
||||
raise CmpErrorMember(name, 'co_flags',
|
||||
pretty_flags(flags1),
|
||||
@@ -401,7 +407,8 @@ def compare_code_with_srcfile(pyc_filename, src_filename, weak_verify=False):
|
||||
try:
|
||||
code_obj2 = load_file(src_filename)
|
||||
except SyntaxError as e:
|
||||
return str(e)
|
||||
# src_filename can be the first of a group sometimes
|
||||
return str(e).replace(src_filename, pyc_filename)
|
||||
cmp_code_objects(version, is_pypy, code_obj1, code_obj2, ignore_code=weak_verify)
|
||||
return None
|
||||
|
||||
|
@@ -1,3 +1,3 @@
|
||||
# This file is suitable for sourcing inside bash as
|
||||
# well as importing into Python
|
||||
VERSION='2.9.7'
|
||||
VERSION='2.9.9'
|
||||
|
Reference in New Issue
Block a user