You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-03 00:45:53 +08:00
Compare commits
208 Commits
release-2.
...
release-py
Author | SHA1 | Date | |
---|---|---|---|
|
0f489672b9 | ||
|
b7d8cbfaf5 | ||
|
af10f99776 | ||
|
0cbafa6e3a | ||
|
4afaee2a36 | ||
|
daea3c348c | ||
|
bf45260588 | ||
|
34a356d237 | ||
|
d9c1374a59 | ||
|
2e05137f2b | ||
|
267ecda070 | ||
|
7e89839777 | ||
|
c7f8edd5ef | ||
|
6a991833a3 | ||
|
28ee3f1257 | ||
|
e9588e56e2 | ||
|
7b2217fda4 | ||
|
5ca219f3d3 | ||
|
b733a1b036 | ||
|
4615cda03f | ||
|
eb92418224 | ||
|
844221cd43 | ||
|
df8d253f78 | ||
|
89b42e3696 | ||
|
22e5a4a283 | ||
|
61810172d1 | ||
|
7c299fbf37 | ||
|
da695115b5 | ||
|
f1d9e194fe | ||
|
e727a437ea | ||
|
9a3e11a957 | ||
|
966a4bc7dc | ||
|
658c8b4be7 | ||
|
d4dab54c7b | ||
|
ad98fae3d4 | ||
|
cbbf64ccd0 | ||
|
394120bb1a | ||
|
7257ba41c5 | ||
|
9eee4eccd7 | ||
|
cf3c07e047 | ||
|
d93b7a9eae | ||
|
5ebb731c04 | ||
|
d3794ec9af | ||
|
2ab7aa2f48 | ||
|
49fd430505 | ||
|
2a47f0309f | ||
|
3084ac20e9 | ||
|
9c846c309e | ||
|
b4efa62fad | ||
|
94d1c6dfd3 | ||
|
6991a637a2 | ||
|
52b1f4d2b6 | ||
|
0ce804ae16 | ||
|
d2502f205e | ||
|
2ad40a5648 | ||
|
d1a695b2bd | ||
|
47b6a35abc | ||
|
b1e32c7cc5 | ||
|
47977b3372 | ||
|
2a7a166696 | ||
|
ea732acf49 | ||
|
da884487d5 | ||
|
ff73efcf8e | ||
|
a32c0e68ef | ||
|
73857c831b | ||
|
4c2ca44818 | ||
|
3e7add1138 | ||
|
69fd1b3371 | ||
|
d540146d5a | ||
|
e9a17010c7 | ||
|
038692dbf9 | ||
|
93437152a2 | ||
|
b952f56c44 | ||
|
ca1679e636 | ||
|
8d084ed358 | ||
|
a10914a645 | ||
|
9c0ef9fa63 | ||
|
449d74af51 | ||
|
f8a40c1949 | ||
|
e10e184eda | ||
|
605721c995 | ||
|
50d875f6a6 | ||
|
26e8de8532 | ||
|
89d8a70778 | ||
|
5566b9ba6c | ||
|
1093ef5c5b | ||
|
dcaca27821 | ||
|
e56ab2dcd5 | ||
|
d6c45979ba | ||
|
4a47822904 | ||
|
4e9555a7f6 | ||
|
d1c0413b79 | ||
|
93ec81673b | ||
|
0cf5f41fda | ||
|
246495febd | ||
|
91b86ac156 | ||
|
26cd91046e | ||
|
b42c66e091 | ||
|
364827a2f2 | ||
|
819458564c | ||
|
486f313532 | ||
|
84fd71b73b | ||
|
50687e6317 | ||
|
b35546157f | ||
|
7755dddd94 | ||
|
ce1e841255 | ||
|
68f0f79030 | ||
|
bf195a234f | ||
|
87db833f62 | ||
|
8081decf7c | ||
|
e5008693a1 | ||
|
810649799c | ||
|
d4be647bce | ||
|
4a898ff4c1 | ||
|
cb6925beec | ||
|
2665f292c5 | ||
|
33be34c6fb | ||
|
3bbc94847d | ||
|
3a8d4e1a12 | ||
|
87e005a7ba | ||
|
5477ca294d | ||
|
31c28d0220 | ||
|
659e28d686 | ||
|
8a33a583cd | ||
|
8a776176e2 | ||
|
03498963d4 | ||
|
47dbc57f3d | ||
|
a06e9bf32e | ||
|
7e8f7ba674 | ||
|
39b9810587 | ||
|
8cdaac93ab | ||
|
a9f7a3c6d0 | ||
|
495bdd7b64 | ||
|
b4ded92822 | ||
|
be9194c223 | ||
|
45bd8e4058 | ||
|
bb24df596d | ||
|
6acec471e3 | ||
|
41343c27b7 | ||
|
9e34654b38 | ||
|
09eb7f7f78 | ||
|
b9703cf6b4 | ||
|
792df2a7a7 | ||
|
b4a6c3c319 | ||
|
4199bc7f61 | ||
|
91e1d2538f | ||
|
6773a66b99 | ||
|
ed6cb9af79 | ||
|
a91cd71667 | ||
|
6f82ae3642 | ||
|
4e05c741e3 | ||
|
fdcb90f661 | ||
|
f416473562 | ||
|
5856802902 | ||
|
4f2ae2f603 | ||
|
ea1651d8ca | ||
|
be769da401 | ||
|
cb3c5e7119 | ||
|
39e3582e72 | ||
|
a0c090932e | ||
|
d1e118afa3 | ||
|
f7da8fd8ab | ||
|
3b1dd9d1c4 | ||
|
91fd1ce732 | ||
|
a46e7cbfa4 | ||
|
d46873c44d | ||
|
54e50771ab | ||
|
f7a910ec66 | ||
|
160ec0d9cc | ||
|
e1111e3f50 | ||
|
65913778a5 | ||
|
cf21fff38b | ||
|
29122340e6 | ||
|
6d6a73eea7 | ||
|
e4a7641927 | ||
|
b24b46d48c | ||
|
a65d7dce5b | ||
|
718a0a5d34 | ||
|
ea9e3ab3f5 | ||
|
770e988ff8 | ||
|
0fa0641974 | ||
|
c13e23cdae | ||
|
fab4ebb768 | ||
|
89429339fa | ||
|
6ed129bd7a | ||
|
c4fde6b53e | ||
|
a7d93e88b4 | ||
|
9891494142 | ||
|
f8544dfbbe | ||
|
b00651d428 | ||
|
da8dccbaca | ||
|
37272ae827 | ||
|
7f2bee46b7 | ||
|
c8a4dcf72b | ||
|
012ff91cfb | ||
|
e690ddd50a | ||
|
45b7c1948c | ||
|
e2fb7ca3d2 | ||
|
b3bda76582 | ||
|
ab6d322eca | ||
|
1a8a0df107 | ||
|
0a37709b0a | ||
|
98cd1417df | ||
|
460069ceaa | ||
|
316aa44f23 | ||
|
7133540c23 | ||
|
590231741d | ||
|
a9349b8f3d |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -16,3 +16,5 @@
|
||||
/unpyc
|
||||
__pycache__
|
||||
build
|
||||
/.venv*
|
||||
/.idea
|
@@ -3,15 +3,10 @@ language: python
|
||||
sudo: false
|
||||
|
||||
python:
|
||||
- '3.5'
|
||||
- '2.7.12'
|
||||
- '2.6'
|
||||
- '3.3'
|
||||
- '3.4'
|
||||
- '3.2'
|
||||
- '2.7' # this is a cheat here because travis doesn't do 2.4-2.6
|
||||
|
||||
install:
|
||||
- pip install -r requirements.txt
|
||||
- pip install -e .
|
||||
- pip install -r requirements-dev.txt
|
||||
|
||||
script:
|
||||
|
47
HISTORY.md
47
HISTORY.md
@@ -44,7 +44,8 @@ it appears that Hartmut did most of the work to get this code to
|
||||
accept the full Python language. He added precedence to the table
|
||||
specifiers, support for multiple versions of Python, the
|
||||
pretty-printing of docstrings, lists, and hashes. He also wrote test and verification routines of
|
||||
deparsed bytecode, and used this in an extensive set of tests that he also wrote. He could verify against the entire Python library.
|
||||
deparsed bytecode, and used this in an extensive set of tests that he also wrote. He says he could verify against the
|
||||
entire Python library. However I have subsequently found small and relatively obscure bugs in the decompilation code.
|
||||
|
||||
decompyle2.2 was packaged for Debian (sarge) by
|
||||
[Ben Burton around 2002](https://packages.qa.debian.org/d/decompyle.html). As
|
||||
@@ -65,10 +66,12 @@ code to handle first Python 2.3 and then 2.4 bytecodes. Because of
|
||||
jump optimization introduced in the CPython bytecode compiler at that
|
||||
time, various JUMP instructions were classifed as going backwards, and
|
||||
COME FROM instructions were reintroduced. See
|
||||
RELEASE-2.4-CHANGELOG.txt for more details here. There wasn't a public
|
||||
[RELEASE-2.4-CHANGELOG.txt](https://github.com/rocky/python-uncompyle6/blob/master/DECOMPYLE-2.4-CHANGELOG.txt)
|
||||
for more details here. There wasn't a public
|
||||
release of RELEASE-2.4 and bytecodes other than Python 2.4 weren't
|
||||
supported. Dan says the Python 2.3 version could verify the entire
|
||||
python library.
|
||||
Python library. But given subsequent bugs found like simply
|
||||
recognizing complex-number constants in bytecode, decompilation wasn't perfect.
|
||||
|
||||
Next we get to ["uncompyle" and
|
||||
PyPI](https://pypi.python.org/pypi/uncompyle/1.1) and the era of
|
||||
@@ -95,17 +98,17 @@ so. Then hamled made a few commits earler on, while Eike Siewertsen
|
||||
made a few commits later on. But mostly wibiti, and Guenther
|
||||
Starnberger got the code to where uncompyle2 was around 2012.
|
||||
|
||||
In uncompyle2 decompilation of python bytecode 2.5 & 2.6 is done by
|
||||
In `uncompyle`, decompilation of python bytecode 2.5 & 2.6 is done by
|
||||
transforming the byte code into a a pseudo 2.7 python bytecode and is
|
||||
based on code from Eloi Vanderbeken.
|
||||
|
||||
This project, uncompyle6, abandons that approach for various
|
||||
This project, `uncompyle6`, abandons that approach for various
|
||||
reasons. However the main reason is that we need offsets in fragment
|
||||
deparsing to be exactly the same, and the transformation process can
|
||||
remove instructions. Adding instructions with psuedo_offsets is
|
||||
remove instructions. _Adding_ instructions with psuedo offsets is
|
||||
however okay.
|
||||
|
||||
Uncompyle6, however owes its existence to the fork of uncompyle2 by
|
||||
`Uncompyle6` however owes its existence to the fork of `uncompyle2` by
|
||||
Myst herie (Mysterie) whose first commit picks up at
|
||||
2012. I chose this since it seemed to have been at that time the most
|
||||
actively, if briefly, worked on. Also starting around 2012 is Dark
|
||||
@@ -115,9 +118,12 @@ I started working on this late 2015, mostly to add fragment support.
|
||||
In that, I decided to make this runnable on Python 3.2+ and Python 2.6+
|
||||
while, handling Python bytecodes from Python versions 2.5+ and
|
||||
3.2+. In doing so, it has been expedient to separate this into three
|
||||
projects: load loading and disassembly (xdis), parsing and tree
|
||||
building (spark_parser), and grammar and semantic actions for
|
||||
decompiling (uncompyle6).
|
||||
projects:
|
||||
|
||||
* bytecode loading and disassembly ([xdis](https://pypi.python.org/pypi/xdis)),
|
||||
* parsing and tree building ([spark_parser](https://pypi.python.org/pypi/spark_parser)),
|
||||
* this project - grammar and semantic actions for decompiling
|
||||
([uncompyle6](https://pypi.python.org/pypi/spark_parser)).
|
||||
|
||||
|
||||
Over the many years, code styles and Python features have
|
||||
@@ -142,16 +148,19 @@ if the grammar is LR or left recursive.
|
||||
|
||||
Another approach that doesn't use grammars is to do something like
|
||||
simulate execution symbolically and build expression trees off of
|
||||
stack results. The two important projects that work this way are
|
||||
[unpyc3](https://code.google.com/p/unpyc3/) and most especially
|
||||
[pycdc](https://github.com/zrax/pycdc) The latter project is largely
|
||||
by Michael Hansen and Darryl Pogue. If they supported getting
|
||||
source-code fragments and I could call it from Python, I'd probably
|
||||
ditch this and use that. From what I've seen, the code runs blindingly
|
||||
fast and spans all versions of Python.
|
||||
stack results. Control flow in that apprproach still needs to be
|
||||
handled somewhat ad hoc. The two important projects that work this
|
||||
way are [unpyc3](https://code.google.com/p/unpyc3/) and most
|
||||
especially [pycdc](https://github.com/zrax/pycdc) The latter project
|
||||
is largely by Michael Hansen and Darryl Pogue. If they supported
|
||||
getting source-code fragments, did a better job in supporting Python
|
||||
more fully, and had a way I could call it from Python, I'd probably
|
||||
would have ditched this and used that. The code runs blindingly fast
|
||||
and spans all versions of Python, although more recently Python 3
|
||||
support has been lagging.
|
||||
|
||||
Tests for the project have been, or are being, culled from all of the
|
||||
projects mentioned.
|
||||
|
||||
NB. If you find mistakes, want corrections, or want your name added (or removed),
|
||||
please contact me.
|
||||
NB. If you find mistakes, want corrections, or want your name added
|
||||
(or removed), please contact me.
|
||||
|
68
HOW-TO-REPORT-A-BUG.md
Normal file
68
HOW-TO-REPORT-A-BUG.md
Normal file
@@ -0,0 +1,68 @@
|
||||
# How to report a Bug
|
||||
|
||||
## The difficulty of the problem
|
||||
|
||||
There is no Python decompiler yet, that I know about that will
|
||||
decompyle everything. This one probably does the
|
||||
best job of *any* Python decompiler. But it is a constant work in progress: Python keeps changing, and so does its code generation.
|
||||
|
||||
I have found bugs in *every* Python decompiler I have tried. Even
|
||||
those where authors/maintainers claim that they have used it on
|
||||
the entire Python standard library. And I don't mean that
|
||||
the program doesn't come out with the same Python source instructions,
|
||||
but that the program is *semantically* not equivalent.
|
||||
|
||||
So it is likely you'll find a mistranslation in decompiling.
|
||||
|
||||
## What to send (minimum requirements)
|
||||
|
||||
The basic requirement is pretty simple:
|
||||
|
||||
* Python bytecode
|
||||
* Source text
|
||||
|
||||
## What to send (additional helpful information)
|
||||
|
||||
Some kind folks also give the invocation they used and the output
|
||||
which usually includes an error message produced. This is helpful. I
|
||||
can figure out what OS you are running this on and what version of
|
||||
*uncomplye6* was used. Therefore, if you don't provide the input
|
||||
command and the output from that, please give:
|
||||
|
||||
* _uncompyle6_ version used
|
||||
* OS that you used this on
|
||||
* Python interpreter version used
|
||||
|
||||
|
||||
### But I don't *have* the source code!
|
||||
|
||||
Sure, I get it. No problem. There is Python assembly code on parse
|
||||
errors, so simply by hand decompile that. To get a full disassembly,
|
||||
use pydisasm from the [xdis](https://pypi.python.org/pypi/xdis)
|
||||
package. Opcodes are described in the documentation for
|
||||
the [dis](https://docs.python.org/3.6/library/dis.html) module.
|
||||
|
||||
### But I don't *have* the source code and am incapable of figuring how how to do a hand disassembly!
|
||||
|
||||
Well, you could learn. No one is born into this world knowing how to
|
||||
disassemble Python bytecode. And as Richard Feynman once said, "What
|
||||
one fool can learn, so can another."
|
||||
|
||||
## Narrowing the problem
|
||||
|
||||
I don't need the entire source code base for which one file or module
|
||||
can't be decompiled. I just need that one file or module only. If
|
||||
there are several files, file a bug report for each file.
|
||||
|
||||
Python modules can get quite large, and usually decompilation problems
|
||||
occur in a single function or maybe the main-line code but not any of
|
||||
the functions or classes. So please chop down the source code by
|
||||
removing those parts that do to decompile properly.
|
||||
|
||||
By doing this, you'll probably have a better sense of what exactly is
|
||||
the problem. Perhaps you can find the boundary of what decompiles, and
|
||||
what doesn't. That is useful. Or maybe the same file will decompile
|
||||
properly on a neighboring version of Python. That is helpful too.
|
||||
|
||||
In sum, the more you can isolate or narrow the problem, the more
|
||||
likley the problem will be fixed and fixed sooner.
|
@@ -1,6 +1,7 @@
|
||||
include README.rst
|
||||
include ChangeLog
|
||||
include HISTORY.md
|
||||
include HOW-TO-REPORT-A-BUG.md
|
||||
include LICENSE
|
||||
include Makefile
|
||||
include requirements.txt
|
||||
|
4
Makefile
4
Makefile
@@ -37,7 +37,7 @@ check-3.0 check-3.1 check-3.2 check-3.5 check-3.6:
|
||||
$(MAKE) -C test $@
|
||||
|
||||
#:Tests for Python 2.6 (doesn't have pytest)
|
||||
check-2.6:
|
||||
check-2.4 check-2.5 check-2.6:
|
||||
$(MAKE) -C test $@
|
||||
|
||||
#:PyPy 2.6.1 or PyPy 5.0.1
|
||||
@@ -59,7 +59,7 @@ clean: clean_pyc
|
||||
|
||||
#: Create source (tarball) and wheel distribution
|
||||
dist:
|
||||
$(PYTHON) ./setup.py sdist bdist_wheel
|
||||
$(PYTHON) ./setup.py sdist bdist_egg
|
||||
|
||||
#: Remove .pyc files
|
||||
clean_pyc:
|
||||
|
43
NEWS
43
NEWS
@@ -1,3 +1,46 @@
|
||||
uncompyle6 2.11.1 2016-06-18 Fleetwood
|
||||
- Major improvements in fragment tracking
|
||||
* Add nonterminal node in extractInfo
|
||||
* tag more offsets in expressions
|
||||
* tag array subscripts
|
||||
* set YIELD value offset in a <yield> expr
|
||||
* fix a long-standing bug in not adjusting final AST when melding other deparse ASTs
|
||||
- Fixes yet again for make_function node handling; document what's up here
|
||||
- Fix bug in snowflake Python 3.5 *args kwargs
|
||||
|
||||
uncompyle6 2.10.1 2016-06-3 Marylin Frankel
|
||||
|
||||
- fix some fragments parsing bugs
|
||||
- was returning the wrong type sometimes in deparse_code_around_offset()
|
||||
- capture function name in offsets
|
||||
- track changes to ifelstrmtr node from pysource into fragments
|
||||
|
||||
uncompyle6 2.10.0 2016-05-30 Elaine Gordon
|
||||
|
||||
- Add fuzzy offset deparse lookup
|
||||
- 3.6 bugfixes
|
||||
- fix EXTENDED_ARGS handling (and in 2.6 and others)
|
||||
- semantic routine make_function fragments.py
|
||||
- MAKE_FUNCTION handling
|
||||
- CALL_FUNCTION_EX handling
|
||||
- async property on defs
|
||||
- support for CALL_FUNCTION_KW (moagstar)
|
||||
- 3.5+ UNMAP_PACK and BUILD_UNMAP_PACK handling
|
||||
- 3.5 FUNCTION_VAR bug
|
||||
- 3.x pass statement insdie while True
|
||||
- Improve 3.2 decompilation
|
||||
- Fixed -o argument processing (Gregrory)
|
||||
- Reduce scope of LOAD_ASSERT as expr to 3.4+
|
||||
- "await" statement fixes
|
||||
- 2.3, 2.4 "if 1 .." fixes
|
||||
- 3.x annotation fixes
|
||||
|
||||
uncompyle6 2.9.11 2016-04-06
|
||||
|
||||
- Better support for Python 3.5+ BUILD_MAP_UNPACK
|
||||
- Start 3.6 CALL_FUNCTION_EX support
|
||||
- Many decompilation bug fixes. (Many more remain). See ChangeLog
|
||||
|
||||
uncompyle6 2.9.10 2016-02-25
|
||||
|
||||
- Python grammar rule fixes
|
||||
|
31
README.rst
31
README.rst
@@ -11,8 +11,8 @@ Introduction
|
||||
------------
|
||||
|
||||
*uncompyle6* translates Python bytecode back into equivalent Python
|
||||
source code. It accepts bytecodes from Python version 2.1 to 3.6 or
|
||||
so, including PyPy bytecode and Dropbox's Python 2.5 bytecode.
|
||||
source code. It accepts bytecodes from Python version 1.5, and 2.1 to
|
||||
3.6 or so, including PyPy bytecode and Dropbox's Python 2.5 bytecode.
|
||||
|
||||
Why this?
|
||||
---------
|
||||
@@ -21,7 +21,8 @@ There were a number of decompyle, uncompile, uncompyle2, uncompyle3
|
||||
forks around. All of them came basically from the same code base, and
|
||||
almost all of them no were no longer actively maintained. Only one
|
||||
handled Python 3, and even there, only 3.2 or 3.3 depending on which
|
||||
code is used. This code pulls these together and moves forward. It
|
||||
code is used. This code pulls these together and moves forward. This
|
||||
project has the most complete support for Python 3.3 and above. It
|
||||
also addresses a number of open issues in the previous forks.
|
||||
|
||||
What makes this different from other CPython bytecode decompilers?: its
|
||||
@@ -46,7 +47,7 @@ Requirements
|
||||
This project requires Python 2.6 or later, PyPy 3-2.4, or PyPy-5.0.1.
|
||||
Python versions 2.4-2.7 are supported in the python-2.4 branch.
|
||||
The bytecode files it can read has been tested on Python bytecodes from
|
||||
versions 1.5, 2.1-2.7, and 3.2-3.6 and the above-mentioned PyPy versions.
|
||||
versions 1.5, 2.1-2.7, and 3.0-3.6 and the above-mentioned PyPy versions.
|
||||
|
||||
Installation
|
||||
------------
|
||||
@@ -55,7 +56,7 @@ This uses setup.py, so it follows the standard Python routine:
|
||||
|
||||
::
|
||||
|
||||
pip install -r requirements.txt
|
||||
pip install -e .
|
||||
pip install -r requirements-dev.txt
|
||||
python setup.py install # may need sudo
|
||||
# or if you have pyenv:
|
||||
@@ -112,7 +113,7 @@ with handling control flow. All of the Python decompilers I have looked
|
||||
at have the same problem. In some cases we can detect an erroneous
|
||||
decompilation and report that.
|
||||
|
||||
About 90% of the decompilation of Python standard library packages in
|
||||
Over 98% of the decompilation of Python standard library packages in
|
||||
Python 2.7.12 verifies correctly. Over 99% of Python 2.7 and 3.3-3.5
|
||||
"weakly" verify. Python 2.6 drops down to 96% weakly verifying.
|
||||
Other versions drop off in quality too.
|
||||
@@ -140,11 +141,10 @@ and 2.0.
|
||||
|
||||
In the Python 3 series, Python support is is strongest around 3.4 or
|
||||
3.3 and drops off as you move further away from those versions. Python
|
||||
3.5 largely works, but still has some bugs in it and is missing some
|
||||
opcodes. Python 3.6 changes things drastically by using word codes
|
||||
rather than byte codes. That has been addressed, but then it also
|
||||
changes function call opcodes and its semantics and has more problems
|
||||
with control flow than 3.5 has.
|
||||
3.6 changes things drastically by using word codes rather than byte
|
||||
codes. That has been addressed, but then it also changes function call
|
||||
opcodes and its semantics and has more problems with control flow than
|
||||
3.5 has.
|
||||
|
||||
Currently not all Python magic numbers are supported. Specifically in
|
||||
some versions of Python, notably Python 3.6, the magic number has
|
||||
@@ -158,17 +158,20 @@ We also don't handle PJOrion_ obfuscated code. For that try: PJOrion
|
||||
Deobfuscator_ to unscramble the bytecode to get valid bytecode before
|
||||
trying this tool.
|
||||
|
||||
Handling pathologically long lists of expressions or statements is
|
||||
slow.
|
||||
|
||||
|
||||
There is lots to do, so please dig in and help.
|
||||
|
||||
See Also
|
||||
--------
|
||||
|
||||
* https://github.com/zrax/pycdc : supports all versions of Python and is written in C++
|
||||
* https://code.google.com/archive/p/unpyc3/ : supports Python 3.2 only. The above projects use a different decompiling technique what is used here.
|
||||
* https://github.com/zrax/pycdc : supports all versions of Python and is written in C++. Support for later Python 3 versions is a bit lacking though.
|
||||
* https://code.google.com/archive/p/unpyc3/ : supports Python 3.2 only. The above projects use a different decompiling technique than what is used here.
|
||||
* https://github.com/figment/unpyc3/ : fork of above, but supports Python 3.3 only. Include some fixes like supporting function annotations
|
||||
* The HISTORY_ file.
|
||||
|
||||
* `How to report a bug <https://github.com/rocky/python-uncompyle6/blob/master/HISTORY.md>`_
|
||||
.. |downloads| image:: https://img.shields.io/pypi/dd/uncompyle6.svg
|
||||
.. _trepan: https://pypi.python.org/pypi/trepan
|
||||
.. _HISTORY: https://github.com/rocky/python-uncompyle6/blob/master/HISTORY.md
|
||||
|
@@ -33,14 +33,14 @@ classifiers = ['Development Status :: 5 - Production/Stable',
|
||||
# The rest in alphabetic order
|
||||
author = "Rocky Bernstein, Hartmut Goebel, John Aycock, and others"
|
||||
author_email = "rb@dustyfeet.com"
|
||||
entry_points={
|
||||
entry_points = {
|
||||
'console_scripts': [
|
||||
'uncompyle6=uncompyle6.bin.uncompile:main_bin',
|
||||
'pydisassemble=uncompyle6.bin.pydisassemble:main',
|
||||
]}
|
||||
ftp_url = None
|
||||
install_requires = ['spark-parser >= 1.6.0, < 1.7.0',
|
||||
'xdis >= 3.2.4, < 3.3.0']
|
||||
install_requires = ['spark-parser >= 1.6.1, < 1.7.0',
|
||||
'xdis >= 3.3.1, < 3.4.0']
|
||||
license = 'MIT'
|
||||
mailing_list = 'python-debugger@googlegroups.com'
|
||||
modname = 'uncompyle6'
|
||||
|
78
appveyor.yml
Normal file
78
appveyor.yml
Normal file
@@ -0,0 +1,78 @@
|
||||
environment:
|
||||
global:
|
||||
# SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the
|
||||
# /E:ON and /V:ON options are not enabled in the batch script intepreter
|
||||
# See: http://stackoverflow.com/a/13751649/163740
|
||||
CMD_IN_ENV: "cmd /E:ON /V:ON /C .\\appveyor\\run_with_env.cmd"
|
||||
|
||||
matrix:
|
||||
|
||||
# Pre-installed Python versions, which Appveyor may upgrade to
|
||||
# a later point release.
|
||||
# See: http://www.appveyor.com/docs/installed-software#python
|
||||
|
||||
# - PYTHON: "C:\\Python27"
|
||||
# PYTHON_VERSION: "2.7.x"
|
||||
# PYTHON_ARCH: "32"
|
||||
|
||||
- PYTHON: "C:\\Python27-x64"
|
||||
PYTHON_VERSION: "2.7.x"
|
||||
PYTHON_ARCH: "64"
|
||||
|
||||
# - PYTHON: "C:\\Python26"
|
||||
# PYTHON_VERSION: "2.6.x"
|
||||
# PYTHON_ARCH: "32"
|
||||
|
||||
# - PYTHON: "C:\\Python26-x64"
|
||||
# PYTHON_VERSION: "2.6.x"
|
||||
# PYTHON_ARCH: "64"
|
||||
|
||||
install:
|
||||
# We need wheel installed to build wheels
|
||||
- "%PYTHON%\\python.exe -m pip install wheel"
|
||||
|
||||
# Install Python (from the official .msi of http://python.org) and pip when
|
||||
# not already installed.
|
||||
- ps: if (-not(Test-Path($env:PYTHON))) { & appveyor\install.ps1 }
|
||||
|
||||
# Prepend newly installed Python to the PATH of this build (this cannot be
|
||||
# done from inside the powershell script as it would require to restart
|
||||
# the parent CMD process).
|
||||
- "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
|
||||
- "SET HOME=."
|
||||
|
||||
# Check that we have the expected version and architecture for Python
|
||||
- "python --version"
|
||||
- "python -c \"import struct; print(struct.calcsize('P') * 8)\""
|
||||
|
||||
# Upgrade to the latest version of pip to avoid it displaying warnings
|
||||
# about it being out of date.
|
||||
- "pip install --disable-pip-version-check --user --upgrade pip"
|
||||
|
||||
# Install the build dependencies of the project. If some dependencies contain
|
||||
# compiled extensions and are not provided as pre-built wheel packages,
|
||||
# pip will build them from source using the MSVC compiler matching the
|
||||
# target Python version and architecture
|
||||
- "%CMD_IN_ENV% pip install -r requirements.txt"
|
||||
|
||||
build_script:
|
||||
# Build the compiled extension
|
||||
- "%CMD_IN_ENV% python setup.py build"
|
||||
|
||||
test_script:
|
||||
# Run the project tests
|
||||
- "%CMD_IN_ENV% python test/test_pyenvlib.py --native --weak-verify"
|
||||
|
||||
after_test:
|
||||
# If tests are successful, create binary packages for the project.
|
||||
- "%CMD_IN_ENV% python setup.py bdist_wininst"
|
||||
- "%CMD_IN_ENV% python setup.py bdist_msi"
|
||||
- ps: "ls dist"
|
||||
|
||||
artifacts:
|
||||
# Archive the generated packages in the ci.appveyor.com build report.
|
||||
- path: dist\*
|
||||
|
||||
#on_success:
|
||||
# - TODO: upload the content of dist/*.whl to a public wheelhouse
|
||||
#
|
229
appveyor/install.ps1
Normal file
229
appveyor/install.ps1
Normal file
@@ -0,0 +1,229 @@
|
||||
# Sample script to install Python and pip under Windows
|
||||
# Authors: Olivier Grisel, Jonathan Helmus, Kyle Kastner, and Alex Willmer
|
||||
# License: CC0 1.0 Universal: http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
$MINICONDA_URL = "http://repo.continuum.io/miniconda/"
|
||||
$BASE_URL = "https://www.python.org/ftp/python/"
|
||||
$GET_PIP_URL = "https://bootstrap.pypa.io/get-pip.py"
|
||||
$GET_PIP_PATH = "C:\get-pip.py"
|
||||
|
||||
$PYTHON_PRERELEASE_REGEX = @"
|
||||
(?x)
|
||||
(?<major>\d+)
|
||||
\.
|
||||
(?<minor>\d+)
|
||||
\.
|
||||
(?<micro>\d+)
|
||||
(?<prerelease>[a-z]{1,2}\d+)
|
||||
"@
|
||||
|
||||
|
||||
function Download ($filename, $url) {
|
||||
$webclient = New-Object System.Net.WebClient
|
||||
|
||||
$basedir = $pwd.Path + "\"
|
||||
$filepath = $basedir + $filename
|
||||
if (Test-Path $filename) {
|
||||
Write-Host "Reusing" $filepath
|
||||
return $filepath
|
||||
}
|
||||
|
||||
# Download and retry up to 3 times in case of network transient errors.
|
||||
Write-Host "Downloading" $filename "from" $url
|
||||
$retry_attempts = 2
|
||||
for ($i = 0; $i -lt $retry_attempts; $i++) {
|
||||
try {
|
||||
$webclient.DownloadFile($url, $filepath)
|
||||
break
|
||||
}
|
||||
Catch [Exception]{
|
||||
Start-Sleep 1
|
||||
}
|
||||
}
|
||||
if (Test-Path $filepath) {
|
||||
Write-Host "File saved at" $filepath
|
||||
} else {
|
||||
# Retry once to get the error message if any at the last try
|
||||
$webclient.DownloadFile($url, $filepath)
|
||||
}
|
||||
return $filepath
|
||||
}
|
||||
|
||||
|
||||
function ParsePythonVersion ($python_version) {
|
||||
if ($python_version -match $PYTHON_PRERELEASE_REGEX) {
|
||||
return ([int]$matches.major, [int]$matches.minor, [int]$matches.micro,
|
||||
$matches.prerelease)
|
||||
}
|
||||
$version_obj = [version]$python_version
|
||||
return ($version_obj.major, $version_obj.minor, $version_obj.build, "")
|
||||
}
|
||||
|
||||
|
||||
function DownloadPython ($python_version, $platform_suffix) {
|
||||
$major, $minor, $micro, $prerelease = ParsePythonVersion $python_version
|
||||
|
||||
if (($major -le 2 -and $micro -eq 0) `
|
||||
-or ($major -eq 3 -and $minor -le 2 -and $micro -eq 0) `
|
||||
) {
|
||||
$dir = "$major.$minor"
|
||||
$python_version = "$major.$minor$prerelease"
|
||||
} else {
|
||||
$dir = "$major.$minor.$micro"
|
||||
}
|
||||
|
||||
if ($prerelease) {
|
||||
if (($major -le 2) `
|
||||
-or ($major -eq 3 -and $minor -eq 1) `
|
||||
-or ($major -eq 3 -and $minor -eq 2) `
|
||||
-or ($major -eq 3 -and $minor -eq 3) `
|
||||
) {
|
||||
$dir = "$dir/prev"
|
||||
}
|
||||
}
|
||||
|
||||
if (($major -le 2) -or ($major -le 3 -and $minor -le 4)) {
|
||||
$ext = "msi"
|
||||
if ($platform_suffix) {
|
||||
$platform_suffix = ".$platform_suffix"
|
||||
}
|
||||
} else {
|
||||
$ext = "exe"
|
||||
if ($platform_suffix) {
|
||||
$platform_suffix = "-$platform_suffix"
|
||||
}
|
||||
}
|
||||
|
||||
$filename = "python-$python_version$platform_suffix.$ext"
|
||||
$url = "$BASE_URL$dir/$filename"
|
||||
$filepath = Download $filename $url
|
||||
return $filepath
|
||||
}
|
||||
|
||||
|
||||
function InstallPython ($python_version, $architecture, $python_home) {
|
||||
Write-Host "Installing Python" $python_version "for" $architecture "bit architecture to" $python_home
|
||||
if (Test-Path $python_home) {
|
||||
Write-Host $python_home "already exists, skipping."
|
||||
return $false
|
||||
}
|
||||
if ($architecture -eq "32") {
|
||||
$platform_suffix = ""
|
||||
} else {
|
||||
$platform_suffix = "amd64"
|
||||
}
|
||||
$installer_path = DownloadPython $python_version $platform_suffix
|
||||
$installer_ext = [System.IO.Path]::GetExtension($installer_path)
|
||||
Write-Host "Installing $installer_path to $python_home"
|
||||
$install_log = $python_home + ".log"
|
||||
if ($installer_ext -eq '.msi') {
|
||||
InstallPythonMSI $installer_path $python_home $install_log
|
||||
} else {
|
||||
InstallPythonEXE $installer_path $python_home $install_log
|
||||
}
|
||||
if (Test-Path $python_home) {
|
||||
Write-Host "Python $python_version ($architecture) installation complete"
|
||||
} else {
|
||||
Write-Host "Failed to install Python in $python_home"
|
||||
Get-Content -Path $install_log
|
||||
Exit 1
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function InstallPythonEXE ($exepath, $python_home, $install_log) {
|
||||
$install_args = "/quiet InstallAllUsers=1 TargetDir=$python_home"
|
||||
RunCommand $exepath $install_args
|
||||
}
|
||||
|
||||
|
||||
function InstallPythonMSI ($msipath, $python_home, $install_log) {
|
||||
$install_args = "/qn /log $install_log /i $msipath TARGETDIR=$python_home"
|
||||
$uninstall_args = "/qn /x $msipath"
|
||||
RunCommand "msiexec.exe" $install_args
|
||||
if (-not(Test-Path $python_home)) {
|
||||
Write-Host "Python seems to be installed else-where, reinstalling."
|
||||
RunCommand "msiexec.exe" $uninstall_args
|
||||
RunCommand "msiexec.exe" $install_args
|
||||
}
|
||||
}
|
||||
|
||||
function RunCommand ($command, $command_args) {
|
||||
Write-Host $command $command_args
|
||||
Start-Process -FilePath $command -ArgumentList $command_args -Wait -Passthru
|
||||
}
|
||||
|
||||
|
||||
function InstallPip ($python_home) {
|
||||
$pip_path = $python_home + "\Scripts\pip.exe"
|
||||
$python_path = $python_home + "\python.exe"
|
||||
if (-not(Test-Path $pip_path)) {
|
||||
Write-Host "Installing pip..."
|
||||
$webclient = New-Object System.Net.WebClient
|
||||
$webclient.DownloadFile($GET_PIP_URL, $GET_PIP_PATH)
|
||||
Write-Host "Executing:" $python_path $GET_PIP_PATH
|
||||
& $python_path $GET_PIP_PATH
|
||||
} else {
|
||||
Write-Host "pip already installed."
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function DownloadMiniconda ($python_version, $platform_suffix) {
|
||||
if ($python_version -eq "3.4") {
|
||||
$filename = "Miniconda3-3.5.5-Windows-" + $platform_suffix + ".exe"
|
||||
} else {
|
||||
$filename = "Miniconda-3.5.5-Windows-" + $platform_suffix + ".exe"
|
||||
}
|
||||
$url = $MINICONDA_URL + $filename
|
||||
$filepath = Download $filename $url
|
||||
return $filepath
|
||||
}
|
||||
|
||||
|
||||
function InstallMiniconda ($python_version, $architecture, $python_home) {
|
||||
Write-Host "Installing Python" $python_version "for" $architecture "bit architecture to" $python_home
|
||||
if (Test-Path $python_home) {
|
||||
Write-Host $python_home "already exists, skipping."
|
||||
return $false
|
||||
}
|
||||
if ($architecture -eq "32") {
|
||||
$platform_suffix = "x86"
|
||||
} else {
|
||||
$platform_suffix = "x86_64"
|
||||
}
|
||||
$filepath = DownloadMiniconda $python_version $platform_suffix
|
||||
Write-Host "Installing" $filepath "to" $python_home
|
||||
$install_log = $python_home + ".log"
|
||||
$args = "/S /D=$python_home"
|
||||
Write-Host $filepath $args
|
||||
Start-Process -FilePath $filepath -ArgumentList $args -Wait -Passthru
|
||||
if (Test-Path $python_home) {
|
||||
Write-Host "Python $python_version ($architecture) installation complete"
|
||||
} else {
|
||||
Write-Host "Failed to install Python in $python_home"
|
||||
Get-Content -Path $install_log
|
||||
Exit 1
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function InstallMinicondaPip ($python_home) {
|
||||
$pip_path = $python_home + "\Scripts\pip.exe"
|
||||
$conda_path = $python_home + "\Scripts\conda.exe"
|
||||
if (-not(Test-Path $pip_path)) {
|
||||
Write-Host "Installing pip..."
|
||||
$args = "install --yes pip"
|
||||
Write-Host $conda_path $args
|
||||
Start-Process -FilePath "$conda_path" -ArgumentList $args -Wait -Passthru
|
||||
} else {
|
||||
Write-Host "pip already installed."
|
||||
}
|
||||
}
|
||||
|
||||
function main () {
|
||||
InstallPython $env:PYTHON_VERSION $env:PYTHON_ARCH $env:PYTHON
|
||||
InstallPip $env:PYTHON
|
||||
}
|
||||
|
||||
main
|
87
appveyor/run_with_env.cmd
Normal file
87
appveyor/run_with_env.cmd
Normal file
@@ -0,0 +1,87 @@
|
||||
:: To build extensions for 64 bit Python 3, we need to configure environment
|
||||
:: variables to use the MSVC 2010 C++ compilers from GRMSDKX_EN_DVD.iso of:
|
||||
:: MS Windows SDK for Windows 7 and .NET Framework 4 (SDK v7.1)
|
||||
::
|
||||
:: To build extensions for 64 bit Python 2, we need to configure environment
|
||||
:: variables to use the MSVC 2008 C++ compilers from GRMSDKX_EN_DVD.iso of:
|
||||
:: MS Windows SDK for Windows 7 and .NET Framework 3.5 (SDK v7.0)
|
||||
::
|
||||
:: 32 bit builds, and 64-bit builds for 3.5 and beyond, do not require specific
|
||||
:: environment configurations.
|
||||
::
|
||||
:: Note: this script needs to be run with the /E:ON and /V:ON flags for the
|
||||
:: cmd interpreter, at least for (SDK v7.0)
|
||||
::
|
||||
:: More details at:
|
||||
:: https://github.com/cython/cython/wiki/64BitCythonExtensionsOnWindows
|
||||
:: http://stackoverflow.com/a/13751649/163740
|
||||
::
|
||||
:: Author: Olivier Grisel
|
||||
:: License: CC0 1.0 Universal: http://creativecommons.org/publicdomain/zero/1.0/
|
||||
::
|
||||
:: Notes about batch files for Python people:
|
||||
::
|
||||
:: Quotes in values are literally part of the values:
|
||||
:: SET FOO="bar"
|
||||
:: FOO is now five characters long: " b a r "
|
||||
:: If you don't want quotes, don't include them on the right-hand side.
|
||||
::
|
||||
:: The CALL lines at the end of this file look redundant, but if you move them
|
||||
:: outside of the IF clauses, they do not run properly in the SET_SDK_64==Y
|
||||
:: case, I don't know why.
|
||||
@ECHO OFF
|
||||
SET COMMAND_TO_RUN=%*
|
||||
SET WIN_SDK_ROOT=C:\Program Files\Microsoft SDKs\Windows
|
||||
SET WIN_WDK=c:\Program Files (x86)\Windows Kits\10\Include\wdf
|
||||
|
||||
:: Extract the major and minor versions, and allow for the minor version to be
|
||||
:: more than 9. This requires the version number to have two dots in it.
|
||||
SET MAJOR_PYTHON_VERSION=%PYTHON_VERSION:~0,1%
|
||||
IF "%PYTHON_VERSION:~3,1%" == "." (
|
||||
SET MINOR_PYTHON_VERSION=%PYTHON_VERSION:~2,1%
|
||||
) ELSE (
|
||||
SET MINOR_PYTHON_VERSION=%PYTHON_VERSION:~2,2%
|
||||
)
|
||||
|
||||
:: Based on the Python version, determine what SDK version to use, and whether
|
||||
:: to set the SDK for 64-bit.
|
||||
IF %MAJOR_PYTHON_VERSION% == 2 (
|
||||
SET WINDOWS_SDK_VERSION="v7.0"
|
||||
SET SET_SDK_64=Y
|
||||
) ELSE (
|
||||
IF %MAJOR_PYTHON_VERSION% == 3 (
|
||||
SET WINDOWS_SDK_VERSION="v7.1"
|
||||
IF %MINOR_PYTHON_VERSION% LEQ 4 (
|
||||
SET SET_SDK_64=Y
|
||||
) ELSE (
|
||||
SET SET_SDK_64=N
|
||||
IF EXIST "%WIN_WDK%" (
|
||||
:: See: https://connect.microsoft.com/VisualStudio/feedback/details/1610302/
|
||||
REN "%WIN_WDK%" 0wdf
|
||||
)
|
||||
)
|
||||
) ELSE (
|
||||
ECHO Unsupported Python version: "%MAJOR_PYTHON_VERSION%"
|
||||
EXIT 1
|
||||
)
|
||||
)
|
||||
|
||||
IF %PYTHON_ARCH% == 64 (
|
||||
IF %SET_SDK_64% == Y (
|
||||
ECHO Configuring Windows SDK %WINDOWS_SDK_VERSION% for Python %MAJOR_PYTHON_VERSION% on a 64 bit architecture
|
||||
SET DISTUTILS_USE_SDK=1
|
||||
SET MSSdk=1
|
||||
"%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Setup\WindowsSdkVer.exe" -q -version:%WINDOWS_SDK_VERSION%
|
||||
"%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Bin\SetEnv.cmd" /x64 /release
|
||||
ECHO Executing: %COMMAND_TO_RUN%
|
||||
call %COMMAND_TO_RUN% || EXIT 1
|
||||
) ELSE (
|
||||
ECHO Using default MSVC build environment for 64 bit architecture
|
||||
ECHO Executing: %COMMAND_TO_RUN%
|
||||
call %COMMAND_TO_RUN% || EXIT 1
|
||||
)
|
||||
) ELSE (
|
||||
ECHO Using default MSVC build environment for 32 bit architecture
|
||||
ECHO Executing: %COMMAND_TO_RUN%
|
||||
call %COMMAND_TO_RUN% || EXIT 1
|
||||
)
|
@@ -6,8 +6,8 @@ machine:
|
||||
|
||||
dependencies:
|
||||
override:
|
||||
- pip install -r requirements.txt
|
||||
- pip install -e .
|
||||
- pip install -r requirements-dev.txt
|
||||
test:
|
||||
override:
|
||||
- python ./setup.py develop && make check-2.7
|
||||
- python ./setup.py develop && make check-2.6
|
||||
|
6
pytest/test_CALL_FUNCTION_KW.sh
Normal file
6
pytest/test_CALL_FUNCTION_KW.sh
Normal file
@@ -0,0 +1,6 @@
|
||||
source ../.venv.3.6/bin/activate
|
||||
py.test -k test_CALL_FUNCTION_KW
|
||||
source ../.venv.3.5/bin/activate
|
||||
py.test -k test_CALL_FUNCTION_KW
|
||||
source ../.venv.2.7/bin/activate
|
||||
py.test -k test_CALL_FUNCTION_KW
|
@@ -1,150 +0,0 @@
|
||||
# std
|
||||
import os
|
||||
# test
|
||||
import pytest
|
||||
import hypothesis
|
||||
from hypothesis import strategies as st
|
||||
# uncompyle6
|
||||
from uncompyle6 import PYTHON_VERSION, deparse_code
|
||||
|
||||
|
||||
@st.composite
|
||||
def expressions(draw):
|
||||
# todo : would be nice to generate expressions using hypothesis however
|
||||
# this is pretty involved so for now just use a corpus of expressions
|
||||
# from which to select.
|
||||
return draw(st.sampled_from((
|
||||
'abc',
|
||||
'len(items)',
|
||||
'x + 1',
|
||||
'lineno',
|
||||
'container',
|
||||
'self.attribute',
|
||||
'self.method()',
|
||||
# These expressions are failing, I think these are control
|
||||
# flow problems rather than problems with FORMAT_VALUE,
|
||||
# however I need to confirm this...
|
||||
#'sorted(items, key=lambda x: x.name)',
|
||||
#'func(*args, **kwargs)',
|
||||
#'text or default',
|
||||
#'43 if life_the_universe and everything else None'
|
||||
)))
|
||||
|
||||
|
||||
@st.composite
|
||||
def format_specifiers(draw):
|
||||
"""
|
||||
Generate a valid format specifier using the rules:
|
||||
|
||||
format_spec ::= [[fill]align][sign][#][0][width][,][.precision][type]
|
||||
fill ::= <any character>
|
||||
align ::= "<" | ">" | "=" | "^"
|
||||
sign ::= "+" | "-" | " "
|
||||
width ::= integer
|
||||
precision ::= integer
|
||||
type ::= "b" | "c" | "d" | "e" | "E" | "f" | "F" | "g" | "G" | "n" | "o" | "s" | "x" | "X" | "%"
|
||||
|
||||
See https://docs.python.org/2/library/string.html
|
||||
|
||||
:param draw: Let hypothesis draw from other strategies.
|
||||
|
||||
:return: An example format_specifier.
|
||||
"""
|
||||
alphabet_strategy = st.characters(min_codepoint=ord('a'), max_codepoint=ord('z'))
|
||||
fill = draw(st.one_of(alphabet_strategy, st.none()))
|
||||
align = draw(st.sampled_from(list('<>=^')))
|
||||
fill_align = (fill + align or '') if fill else ''
|
||||
|
||||
type_ = draw(st.sampled_from('bcdeEfFgGnosxX%'))
|
||||
can_have_sign = type_ in 'deEfFgGnoxX%'
|
||||
can_have_comma = type_ in 'deEfFgG%'
|
||||
can_have_precision = type_ in 'fFgG'
|
||||
can_have_pound = type_ in 'boxX%'
|
||||
can_have_zero = type_ in 'oxX'
|
||||
|
||||
sign = draw(st.sampled_from(list('+- ') + [''])) if can_have_sign else ''
|
||||
pound = draw(st.sampled_from(('#', '',))) if can_have_pound else ''
|
||||
zero = draw(st.sampled_from(('0', '',))) if can_have_zero else ''
|
||||
|
||||
int_strategy = st.integers(min_value=1, max_value=1000)
|
||||
|
||||
width = draw(st.one_of(int_strategy, st.none()))
|
||||
width = str(width) if width is not None else ''
|
||||
|
||||
comma = draw(st.sampled_from((',', '',))) if can_have_comma else ''
|
||||
if can_have_precision:
|
||||
precision = draw(st.one_of(int_strategy, st.none()))
|
||||
precision = '.' + str(precision) if precision else ''
|
||||
else:
|
||||
precision = ''
|
||||
|
||||
return ''.join((fill_align, sign, pound, zero, width, comma, precision, type_,))
|
||||
|
||||
|
||||
@st.composite
|
||||
def fstrings(draw):
|
||||
"""
|
||||
Generate a valid f-string.
|
||||
See https://www.python.org/dev/peps/pep-0498/#specification
|
||||
|
||||
:param draw: Let hypothsis draw from other strategies.
|
||||
|
||||
:return: A valid f-string.
|
||||
"""
|
||||
character_strategy = st.characters(
|
||||
blacklist_characters='\r\n\'\\s{}',
|
||||
min_codepoint=1,
|
||||
max_codepoint=1000,
|
||||
)
|
||||
is_raw = draw(st.booleans())
|
||||
integer_strategy = st.integers(min_value=0, max_value=3)
|
||||
expression_count = draw(integer_strategy)
|
||||
content = []
|
||||
for _ in range(expression_count):
|
||||
expression = draw(expressions())
|
||||
conversion = draw(st.sampled_from(('', '!s', '!r', '!a',)))
|
||||
has_specifier = draw(st.booleans())
|
||||
specifier = ':' + draw(format_specifiers()) if has_specifier else ''
|
||||
content.append('{{{}{}}}'.format(expression, conversion, specifier))
|
||||
content.append(draw(st.text(character_strategy)))
|
||||
content = ''.join(content)
|
||||
return "f{}'{}'".format('r' if is_raw else '', content)
|
||||
|
||||
|
||||
@pytest.mark.skipif(PYTHON_VERSION < 3.6, reason='need at least python 3.6')
|
||||
@hypothesis.given(format_specifiers())
|
||||
def test_format_specifiers(format_specifier):
|
||||
"""Verify that format_specifiers generates valid specifiers"""
|
||||
try:
|
||||
exec('"{:' + format_specifier + '}".format(0)')
|
||||
except ValueError as e:
|
||||
if 'Unknown format code' not in str(e):
|
||||
raise
|
||||
|
||||
|
||||
def run_test(text):
|
||||
hypothesis.assume(len(text))
|
||||
hypothesis.assume("f'{" in text)
|
||||
expr = text + '\n'
|
||||
code = compile(expr, '<string>', 'single')
|
||||
deparsed = deparse_code(PYTHON_VERSION, code, compile_mode='single')
|
||||
recompiled = compile(deparsed.text, '<string>', 'single')
|
||||
if recompiled != code:
|
||||
assert 'dis(' + deparsed.text.strip('\n') + ')' == 'dis(' + expr.strip('\n') + ')'
|
||||
|
||||
|
||||
@pytest.mark.skipif(PYTHON_VERSION < 3.6, reason='need at least python 3.6')
|
||||
@hypothesis.given(fstrings())
|
||||
def test_uncompyle_fstring(fstring):
|
||||
"""Verify uncompyling fstring bytecode"""
|
||||
run_test(fstring)
|
||||
|
||||
|
||||
@pytest.mark.skipif(PYTHON_VERSION < 3.6, reason='need at least python 3.6')
|
||||
@pytest.mark.parametrize('fstring', [
|
||||
"f'{abc}{abc!s}'",
|
||||
"f'{abc}0'",
|
||||
])
|
||||
def test_uncompyle_direct(fstring):
|
||||
"""useful for debugging"""
|
||||
run_test(fstring)
|
@@ -40,7 +40,9 @@ def test_grammar():
|
||||
ignore_set = set(
|
||||
"""
|
||||
JUMP_BACK CONTINUE RETURN_END_IF
|
||||
COME_FROM COME_FROM_EXCEPT COME_FROM_LOOP COME_FROM_WITH
|
||||
COME_FROM COME_FROM_EXCEPT
|
||||
COME_FROM_EXCEPT_CLAUSE
|
||||
COME_FROM_LOOP COME_FROM_WITH
|
||||
COME_FROM_FINALLY ELSE
|
||||
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP
|
||||
LAMBDA_MARKER RETURN_LAST
|
||||
|
@@ -2,18 +2,22 @@
|
||||
from __future__ import print_function
|
||||
# std
|
||||
import os
|
||||
import dis
|
||||
import difflib
|
||||
import subprocess
|
||||
import tempfile
|
||||
# compatability
|
||||
import six
|
||||
import functools
|
||||
from StringIO import StringIO
|
||||
# uncompyle6 / xdis
|
||||
from uncompyle6 import PYTHON_VERSION, deparse_code
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY, deparse_code
|
||||
# TODO : I think we can get xdis to support the dis api (python 3 version) by doing something like this there
|
||||
from xdis.bytecode import Bytecode
|
||||
from xdis.main import get_opcode
|
||||
opc = get_opcode(PYTHON_VERSION, IS_PYPY)
|
||||
Bytecode = functools.partial(Bytecode, opc=opc)
|
||||
|
||||
|
||||
def _dis_to_text(co):
|
||||
return dis.Bytecode(co).dis()
|
||||
return Bytecode(co).dis()
|
||||
|
||||
|
||||
def print_diff(original, uncompyled):
|
||||
@@ -99,9 +103,8 @@ def are_code_objects_equal(co1, co2):
|
||||
|
||||
:return: True if the two code objects are approximately equal, otherwise False.
|
||||
"""
|
||||
# TODO : Use xdis for python2 compatability
|
||||
instructions1 = dis.Bytecode(co1)
|
||||
instructions2 = dis.Bytecode(co2)
|
||||
instructions1 = Bytecode(co1)
|
||||
instructions2 = Bytecode(co2)
|
||||
for opcode1, opcode2 in zip(instructions1, instructions2):
|
||||
if not are_instructions_equal(opcode1, opcode2):
|
||||
return False
|
||||
@@ -119,7 +122,7 @@ def validate_uncompyle(text, mode='exec'):
|
||||
original_text = text
|
||||
|
||||
deparsed = deparse_code(PYTHON_VERSION, original_code,
|
||||
compile_mode=mode, out=six.StringIO())
|
||||
compile_mode=mode, out=StringIO())
|
||||
uncompyled_text = deparsed.text
|
||||
uncompyled_code = compile(uncompyled_text, '<string>', 'exec')
|
||||
|
||||
|
@@ -1,4 +1,3 @@
|
||||
pytest
|
||||
pytest>=3.0.0
|
||||
flake8
|
||||
hypothesis
|
||||
six
|
@@ -3,7 +3,7 @@ PHONY=check clean dist distclean test test-unit test-functional rmChangeLog clea
|
||||
GIT2CL ?= git2cl
|
||||
PYTHON ?= python
|
||||
|
||||
PYTHON_VERSION = $(shell $(PYTHON) -V | cut -d ' ' -f 2 | cut -d'.' -f1,2)
|
||||
PYTHON_VERSION = $(shell $(PYTHON) -V 2>&1 | cut -d ' ' -f 2 | cut -d'.' -f1,2)
|
||||
NATIVE_CHECK = check-$(PYTHON_VERSION)
|
||||
|
||||
# Set COMPILE='--compile' to force compilation before check
|
||||
@@ -16,11 +16,10 @@ check-short:
|
||||
|
||||
# Run all tests
|
||||
check:
|
||||
@$(PYTHON) -V && PYTHON_VERSION=`$(PYTHON) -V 2>&1 | cut -d ' ' -f 2 | cut -d'.' -f1,2`; \
|
||||
$(MAKE) check-$$PYTHON_VERSION
|
||||
$(MAKE) check-$(PYTHON_VERSION)
|
||||
|
||||
#: Run working tests from Python 2.6 or 2.7
|
||||
check-2.6 check-2.7: check-bytecode-2 check-bytecode-3 check-bytecode-1 check-2.7-ok
|
||||
check-2.4 check-2.5 check-2.6 check-2.7: check-bytecode-2 check-bytecode-3 check-bytecode-1 check-native-short
|
||||
|
||||
#: Run working tests from Python 3.0
|
||||
check-3.0: check-bytecode
|
||||
@@ -36,7 +35,7 @@ check-3.2: check-bytecode
|
||||
|
||||
#: Run working tests from Python 3.3
|
||||
check-3.3: check-bytecode
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.3 --verify $(COMPILE)
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.3 --weak-verify $(COMPILE)
|
||||
|
||||
#: Run working tests from Python 3.4
|
||||
check-3.4: check-bytecode check-3.4-ok check-2.7-ok
|
||||
@@ -68,7 +67,7 @@ check-bytecode-2:
|
||||
check-bytecode-3:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.0 \
|
||||
--bytecode-3.1 --bytecode-3.2 --bytecode-3.3 \
|
||||
--bytecode-3.4 --bytecode-3.5 --bytecode-pypy3.2
|
||||
--bytecode-3.4 --bytecode-3.5 --bytecode-3.6 --bytecode-pypy3.2
|
||||
|
||||
#: Check deparsing bytecode that works running Python 2 and Python 3
|
||||
check-bytecode: check-bytecode-3
|
||||
@@ -98,29 +97,6 @@ check-bytecode-2.4:
|
||||
check-bytecode-2.5:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-2.5
|
||||
|
||||
#: Get grammar coverage for Python 2.5
|
||||
grammar-coverage-2.5:
|
||||
SPARK_PARSER_COVERAGE=/tmp/spark-grammar-25.cover $(PYTHON) test_pythonlib.py --bytecode-2.5
|
||||
SPARK_PARSER_COVERAGE=/tmp/spark-grammar-25.cover $(PYTHON) test_pyenvlib.py --2.5.6
|
||||
|
||||
#: Get grammar coverage for Python 2.6
|
||||
grammar-coverage-2.6:
|
||||
SPARK_PARSER_COVERAGE=/tmp/spark-grammar-26.cover $(PYTHON) test_pythonlib.py --bytecode-2.6
|
||||
SPARK_PARSER_COVERAGE=/tmp/spark-grammar-26.cover $(PYTHON) test_pyenvlib.py --2.6.9
|
||||
|
||||
#: Get grammar coverage for Python 2.7
|
||||
grammar-coverage-2.7:
|
||||
SPARK_PARSER_COVERAGE=/tmp/spark-grammar-27.cover $(PYTHON) test_pythonlib.py --bytecode-2.7
|
||||
SPARK_PARSER_COVERAGE=/tmp/spark-grammar-27.cover $(PYTHON) test_pyenvlib.py --2.7.13
|
||||
|
||||
#: Check deparsing Python 2.6
|
||||
check-bytecode-2.6:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-2.6 --weak-verify
|
||||
|
||||
#: Check deparsing Python 2.7
|
||||
check-bytecode-2.7:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-2.7 --verify
|
||||
|
||||
#: Check deparsing Python 3.0
|
||||
check-bytecode-3.0:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.0
|
||||
@@ -149,9 +125,33 @@ check-bytecode-3.5:
|
||||
check-bytecode-3.6:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.6
|
||||
|
||||
#: Get grammar coverage for Python 2.4
|
||||
grammar-coverage-2.4:
|
||||
SPARK_PARSER_COVERAGE=/tmp/spark-grammar-24.cover $(PYTHON) test_pythonlib.py --bytecode-2.4
|
||||
SPARK_PARSER_COVERAGE=/tmp/spark-grammar-24.cover $(PYTHON) test_pyenvlib.py --2.4.6
|
||||
|
||||
#: Get grammar coverage for Python 2.5
|
||||
grammar-coverage-2.5:
|
||||
SPARK_PARSER_COVERAGE=/tmp/spark-grammar-25.cover $(PYTHON) test_pythonlib.py --bytecode-2.5
|
||||
SPARK_PARSER_COVERAGE=/tmp/spark-grammar-25.cover $(PYTHON) test_pyenvlib.py --2.5.6
|
||||
|
||||
#: Get grammar coverage for Python 2.6
|
||||
grammar-coverage-2.6:
|
||||
SPARK_PARSER_COVERAGE=/tmp/spark-grammar-26.cover $(PYTHON) test_pythonlib.py --bytecode-2.6
|
||||
SPARK_PARSER_COVERAGE=/tmp/spark-grammar-26.cover $(PYTHON) test_pyenvlib.py --2.6.9
|
||||
|
||||
#: Get grammar coverage for Python 2.7
|
||||
grammar-coverage-2.7:
|
||||
SPARK_PARSER_COVERAGE=/tmp/spark-grammar-27.cover $(PYTHON) test_pythonlib.py --bytecode-2.7
|
||||
SPARK_PARSER_COVERAGE=/tmp/spark-grammar-27.cover $(PYTHON) test_pyenvlib.py --2.7.13
|
||||
|
||||
#: short tests for bytecodes only for this version of Python
|
||||
check-native-short:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-$(PYTHON_VERSION) --verify $(COMPILE)
|
||||
$(PYTHON) test_pythonlib.py --bytecode-$(PYTHON_VERSION) --weak-verify $(COMPILE)
|
||||
|
||||
#: Run longer Python 2.6's lib files known to be okay
|
||||
check-2.4-ok:
|
||||
$(PYTHON) test_pythonlib.py --ok-2.4 --verify $(COMPILE)
|
||||
|
||||
#: Run longer Python 2.6's lib files known to be okay
|
||||
check-2.6-ok:
|
||||
|
BIN
test/bytecode_2.3/03_if1.pyc
Normal file
BIN
test/bytecode_2.3/03_if1.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.4/01_ops.pyc
Normal file
BIN
test/bytecode_2.4/01_ops.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.5/01_ops.pyc
Normal file
BIN
test/bytecode_2.5/01_ops.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_2.6/03_double_equals.pyc
Normal file
BIN
test/bytecode_2.6/03_double_equals.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.7/02_complex.pyc
Normal file
BIN
test/bytecode_2.7/02_complex.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.7/02_while1else.pyc
Normal file
BIN
test/bytecode_2.7/02_while1else.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.7/03_double_equals.pyc
Normal file
BIN
test/bytecode_2.7/03_double_equals.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.7/03_if_1_else.pyc
Normal file
BIN
test/bytecode_2.7/03_if_1_else.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.1/02_ifelse_comprehension.pyc
Normal file
BIN
test/bytecode_3.1/02_ifelse_comprehension.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.1/03_if_1_else.pyc
Normal file
BIN
test/bytecode_3.1/03_if_1_else.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.1/12_if_while_true_pass.pyc
Normal file
BIN
test/bytecode_3.1/12_if_while_true_pass.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.2/01_delete_deref.pyc
Normal file
BIN
test/bytecode_3.2/01_delete_deref.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.2/01_named_and_kwargs.pyc
Normal file
BIN
test/bytecode_3.2/01_named_and_kwargs.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.2/01_try_except_raise.pyc
Normal file
BIN
test/bytecode_3.2/01_try_except_raise.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.2/03_if.pyc
Normal file
BIN
test/bytecode_3.2/03_if.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.3/02_while1else.pyc
Normal file
BIN
test/bytecode_3.3/02_while1else.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.3/12_if_while_true_pass.pyc
Normal file
BIN
test/bytecode_3.3/12_if_while_true_pass.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.5/01_map_unpack.pyc
Normal file
BIN
test/bytecode_3.5/01_map_unpack.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.5/01_named_and_kwargs.pyc
Normal file
BIN
test/bytecode_3.5/01_named_and_kwargs.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.5/02_async_for.pyc
Normal file
BIN
test/bytecode_3.5/02_async_for.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.5/02_while1else.pyc
Normal file
BIN
test/bytecode_3.5/02_while1else.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.5/03_while-if-break.pyc-notyet
Normal file
BIN
test/bytecode_3.5/03_while-if-break.pyc-notyet
Normal file
Binary file not shown.
BIN
test/bytecode_3.5/04_CALL_FUNCTION_VAR_KW.pyc
Normal file
BIN
test/bytecode_3.5/04_CALL_FUNCTION_VAR_KW.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.5/10_kw+pos_args-bug.pyc
Normal file
BIN
test/bytecode_3.5/10_kw+pos_args-bug.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.6/01_call_function.pyc
Normal file
BIN
test/bytecode_3.6/01_call_function.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.6/01_extended_arg.pyc-notyet
Normal file
BIN
test/bytecode_3.6/01_extended_arg.pyc-notyet
Normal file
Binary file not shown.
BIN
test/bytecode_3.6/04_CALL_FUNCTION_VAR_KW.pyc-notyet
Normal file
BIN
test/bytecode_3.6/04_CALL_FUNCTION_VAR_KW.pyc-notyet
Normal file
Binary file not shown.
BIN
test/bytecode_3.6/11_classbug.pyc
Normal file
BIN
test/bytecode_3.6/11_classbug.pyc
Normal file
Binary file not shown.
7
test/simple_source/bug22/03_if1.py
Normal file
7
test/simple_source/bug22/03_if1.py
Normal file
@@ -0,0 +1,7 @@
|
||||
# From https://github.com/ToontownInfinite /src/otp/avatar/LocalAvatar.py#L364
|
||||
if 1:
|
||||
def jumpLandAnimFix(self, jumpTime):
|
||||
return 5
|
||||
|
||||
def jumpLand(self):
|
||||
return 6
|
6
test/simple_source/bug26/03_double_equals.py
Normal file
6
test/simple_source/bug26/03_double_equals.py
Normal file
@@ -0,0 +1,6 @@
|
||||
# From Python 2.7 parse_starttag HTMLParser.pyc
|
||||
attrvalue = [1,2]
|
||||
while attrvalue:
|
||||
if attrvalue[:1] == 5 or \
|
||||
attrvalue[:1] == 2 == attrvalue[-1:]:
|
||||
attrvalue = 10
|
1
test/simple_source/bug27+/03_if_1_else.py
Normal file
1
test/simple_source/bug27+/03_if_1_else.py
Normal file
@@ -0,0 +1 @@
|
||||
1 if 1 else __file__
|
12
test/simple_source/bug31/02_ifelse_comprehension.py
Normal file
12
test/simple_source/bug31/02_ifelse_comprehension.py
Normal file
@@ -0,0 +1,12 @@
|
||||
# Python 2.7 sqlalchemy-1.013/sql/crud.py
|
||||
def _extend_values_for_multiparams(compiler, stmt, c):
|
||||
c(
|
||||
[
|
||||
(
|
||||
(compiler() if compiler()
|
||||
else compiler())
|
||||
if c in stmt else compiler(),
|
||||
)
|
||||
]
|
||||
for i in enumerate(stmt)
|
||||
)
|
@@ -17,3 +17,14 @@ def div(a: dict(type=float, help='the dividend'),
|
||||
) -> dict(type=float, help='the result of dividing a by b'):
|
||||
"""Divide a by b"""
|
||||
return a / b
|
||||
|
||||
class TestSignatureObject(unittest.TestCase):
|
||||
def test_signature_on_wkwonly(self):
|
||||
def test(*, a:float, b:str) -> int:
|
||||
pass
|
||||
|
||||
class SupportsInt(_Protocol):
|
||||
|
||||
@abstractmethod
|
||||
def __int__(self) -> int:
|
||||
pass
|
||||
|
@@ -18,3 +18,12 @@ def __init__(self, defaults=None, dict_type=_default_dict,
|
||||
default_section=DEFAULTSECT,
|
||||
interpolation=_UNSET):
|
||||
pass
|
||||
|
||||
# Bug found by hypothesis in creating function calls
|
||||
# thanks to moagstar
|
||||
def fn(a, b, d):
|
||||
return (a, b, d)
|
||||
|
||||
b = {'b': 1,
|
||||
'd': 2}
|
||||
fn(a=0, **b)
|
||||
|
9
test/simple_source/bug32/01_try_except_raise.py
Normal file
9
test/simple_source/bug32/01_try_except_raise.py
Normal file
@@ -0,0 +1,9 @@
|
||||
# From 3.2 _abcoll.py
|
||||
def pop(self):
|
||||
it = iter(self)
|
||||
try:
|
||||
value = next(it)
|
||||
except StopIteration:
|
||||
raise KeyError
|
||||
self.discard(value)
|
||||
return value
|
11
test/simple_source/bug32/03_if.py
Normal file
11
test/simple_source/bug32/03_if.py
Normal file
@@ -0,0 +1,11 @@
|
||||
# From 3.2 shlex.py
|
||||
def _samefile(os, src, dst):
|
||||
if hasattr(os.path, 'samefile'):
|
||||
try:
|
||||
return os.path.samefile(src, dst)
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
# All other platforms: check for same pathname.
|
||||
return (os.path.normcase(os.path.abspath(src)) ==
|
||||
os.path.normcase(os.path.abspath(dst)))
|
4
test/simple_source/bug33/01_delete_deref.py
Normal file
4
test/simple_source/bug33/01_delete_deref.py
Normal file
@@ -0,0 +1,4 @@
|
||||
def a():
|
||||
del y
|
||||
def b():
|
||||
return y
|
16
test/simple_source/bug33/01_if_try_except.py
Normal file
16
test/simple_source/bug33/01_if_try_except.py
Normal file
@@ -0,0 +1,16 @@
|
||||
# From 3.3.5 _osx_support.py
|
||||
def _get_system_version():
|
||||
if __file__ is None:
|
||||
try:
|
||||
m = 5
|
||||
except IOError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
m = 10
|
||||
finally:
|
||||
m = 15
|
||||
if m is not None:
|
||||
m = 20
|
||||
|
||||
return m
|
16
test/simple_source/bug33/01_try_except.py
Normal file
16
test/simple_source/bug33/01_try_except.py
Normal file
@@ -0,0 +1,16 @@
|
||||
# From 3.3.5 _osx_support.py
|
||||
def _get_system_version():
|
||||
if __file__ is None:
|
||||
try:
|
||||
m = 5
|
||||
except IOError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
m = 10
|
||||
finally:
|
||||
m = 15
|
||||
if m is not None:
|
||||
m = 20
|
||||
|
||||
return m
|
@@ -1,3 +1,6 @@
|
||||
# From Python 3.3.6 hmac.py
|
||||
# Problem was getting wrong placement of positional args
|
||||
digest_cons = lambda d=b'': 5
|
||||
|
||||
# Handle single kwarg
|
||||
lambda *, d=0: None
|
||||
|
9
test/simple_source/bug35/01_map_unpack.py
Normal file
9
test/simple_source/bug35/01_map_unpack.py
Normal file
@@ -0,0 +1,9 @@
|
||||
# Python 3.5+ PEP 448 - Additional Unpacking Generalizations for dictionaries
|
||||
{**{}}
|
||||
{**{'a': 1, 'b': 2}}
|
||||
## {**{'x': 1}, **{'y': 2}}
|
||||
# {'c': 1, {'d': 2}, **{'e': 3}}
|
||||
[*[]]
|
||||
{**{0:0 for a in b}}
|
||||
## {**{}, **{}}
|
||||
## {**{}, **{}, **{}}
|
3
test/simple_source/bug35/02_async_for.py
Normal file
3
test/simple_source/bug35/02_async_for.py
Normal file
@@ -0,0 +1,3 @@
|
||||
async def a(b, c):
|
||||
async for b in c:
|
||||
pass
|
@@ -24,3 +24,9 @@ async def awith_test():
|
||||
async def awith_as_test():
|
||||
async with 1 as i:
|
||||
print(i)
|
||||
|
||||
async def f(z):
|
||||
await z
|
||||
|
||||
async def g(z):
|
||||
return await z
|
||||
|
@@ -5,3 +5,10 @@ def display_date(loop):
|
||||
if loop.time():
|
||||
break
|
||||
x = 5
|
||||
|
||||
# Another loop to test 3.5 ifelsestmtl grammar rule
|
||||
while loop:
|
||||
if x:
|
||||
True
|
||||
else:
|
||||
True
|
||||
|
8
test/simple_source/bug35/04_CALL_FUNCTION_VAR_KW.py
Normal file
8
test/simple_source/bug35/04_CALL_FUNCTION_VAR_KW.py
Normal file
@@ -0,0 +1,8 @@
|
||||
# sql/schema.py
|
||||
# Note that kwargs comes before "positional" args
|
||||
def tometadata(self, metadata, schema, Table, args, name=None):
|
||||
table = Table(
|
||||
name, metadata, schema=schema,
|
||||
*args, **self.kwargs
|
||||
)
|
||||
return table
|
5
test/simple_source/bug36/01_call_function.py
Normal file
5
test/simple_source/bug36/01_call_function.py
Normal file
@@ -0,0 +1,5 @@
|
||||
# Python 3.6's changes for calling functions.
|
||||
# See https://github.com/rocky/python-uncompyle6/issues/58
|
||||
# CALL_FUNCTION_EX takes 2 to 3 arguments on the stack: the function, the tuple of positional arguments,
|
||||
# and optionally the dict of keyword arguments if bit 0 of oparg is 1.
|
||||
a(*[])
|
2
test/simple_source/bug36/01_extended_arg.py
Normal file
2
test/simple_source/bug36/01_extended_arg.py
Normal file
@@ -0,0 +1,2 @@
|
||||
if __file__:
|
||||
0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0+0
|
@@ -8,3 +8,20 @@ def __init__(self, defaults=None, dict_type=_default_dict,
|
||||
default_section=DEFAULTSECT,
|
||||
interpolation=_UNSET):
|
||||
pass
|
||||
|
||||
# From 3.5 sqlalchemy/orm/__init__.py
|
||||
# Python 3.5 changes the stack position of where * args are (furthest down the stack)
|
||||
# Python 3.6+ replaces CALL_FUNCTION_VAR_KW with CALL_FUNCTION_EX
|
||||
def deferred(*columns, **kw):
|
||||
return ColumnProperty(deferred=True, *columns, **kw)
|
||||
|
||||
|
||||
# From sqlalchemy/sql/selectable.py
|
||||
class GenerativeSelect():
|
||||
def __init__(self,
|
||||
ClauseList,
|
||||
util,
|
||||
order_by=None):
|
||||
self._order_by_clause = ClauseList(
|
||||
*util.to_list(order_by),
|
||||
_literal_as_text=5)
|
||||
|
1123
test/simple_source/expression/06_huge_list.py
Normal file
1123
test/simple_source/expression/06_huge_list.py
Normal file
File diff suppressed because it is too large
Load Diff
14
test/simple_source/looping/12_if_while_true_pass.py
Normal file
14
test/simple_source/looping/12_if_while_true_pass.py
Normal file
@@ -0,0 +1,14 @@
|
||||
# Python 3.3 pyclbr.py
|
||||
# Note that Python 3 adds a lot of unecessary "continues"
|
||||
# and puts that in for "pass"
|
||||
def _readmodule(g, token, path):
|
||||
for tokentype in g:
|
||||
if g:
|
||||
while True:
|
||||
if token:
|
||||
token = 1
|
||||
elif token:
|
||||
pass
|
||||
elif tokentype:
|
||||
token = 7
|
||||
token = 10
|
8
test/simple_source/stmts/02_while1else.py
Normal file
8
test/simple_source/stmts/02_while1else.py
Normal file
@@ -0,0 +1,8 @@
|
||||
# From Python-3.5.2/Lib/multiprocessing/connection.py
|
||||
|
||||
def PipeClient(address):
|
||||
while 1:
|
||||
z = 2
|
||||
else:
|
||||
raise
|
||||
return
|
@@ -19,10 +19,8 @@ Step 2: Run the test:
|
||||
test_pyenvlib --mylib --verify # decompile verify 'mylib'
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6 import main, PYTHON3
|
||||
import os, time, shutil
|
||||
import os, time, shutil, sys
|
||||
from fnmatch import fnmatch
|
||||
|
||||
#----- configure this for your needs
|
||||
@@ -33,7 +31,7 @@ TEST_VERSIONS=('2.3.7', '2.4.6', '2.5.6', '2.6.9',
|
||||
'2.7.10', '2.7.11', '2.7.12', '2.7.13',
|
||||
'3.0.1', '3.1.5', '3.2.6',
|
||||
'3.3.5', '3.3.6',
|
||||
'3.4.2', '3.5.1', '3.6.0')
|
||||
'3.4.2', '3.5.1', '3.6.0', 'native')
|
||||
|
||||
target_base = '/tmp/py-dis/'
|
||||
lib_prefix = os.path.join(os.environ['HOME'], '.pyenv/versions')
|
||||
@@ -54,6 +52,11 @@ for vers in TEST_VERSIONS:
|
||||
short_vers = vers[0:-2]
|
||||
test_options[vers] = (os.path.join(lib_prefix, vers, 'lib_pypy'),
|
||||
PYC, 'python-lib'+short_vers)
|
||||
if vers == 'native':
|
||||
short_vers = os.path.basename(sys.path[-1])
|
||||
test_options[vers] = (sys.path[-1],
|
||||
PYC, short_vers)
|
||||
|
||||
else:
|
||||
short_vers = vers[:3]
|
||||
test_options[vers] = (os.path.join(lib_prefix, vers, 'lib', 'python'+short_vers),
|
||||
|
@@ -27,8 +27,6 @@ Step 2: Run the test:
|
||||
test_pythonlib.py --mylib --verify # decompile verify 'mylib'
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import getopt, os, py_compile, sys, shutil, tempfile, time
|
||||
|
||||
from uncompyle6 import PYTHON_VERSION
|
||||
@@ -127,8 +125,10 @@ def do_tests(src_dir, obj_patterns, target_dir, opts):
|
||||
if opts['do_compile']:
|
||||
compiled_version = opts['compiled_version']
|
||||
if compiled_version and PYTHON_VERSION != compiled_version:
|
||||
print("Not compiling: desired Python version is %s but we are running %s" %
|
||||
(compiled_version, PYTHON_VERSION), file=sys.stderr)
|
||||
sys.stderr.write("Not compiling: "
|
||||
"desired Python version is %s "
|
||||
"but we are running %s" %
|
||||
(compiled_version, PYTHON_VERSION))
|
||||
else:
|
||||
for root, dirs, basenames in os.walk(src_dir):
|
||||
file_matches(files, root, basenames, PY)
|
||||
@@ -146,8 +146,8 @@ def do_tests(src_dir, obj_patterns, target_dir, opts):
|
||||
file_matches(files, dirname, basenames, obj_patterns)
|
||||
|
||||
if not files:
|
||||
print("Didn't come up with any files to test! Try with --compile?",
|
||||
file=sys.stderr)
|
||||
sys.stderr.write("Didn't come up with any files to test! "
|
||||
"Try with --compile?")
|
||||
exit(1)
|
||||
|
||||
os.chdir(cwd)
|
||||
@@ -161,9 +161,9 @@ def do_tests(src_dir, obj_patterns, target_dir, opts):
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
print(time.ctime())
|
||||
print('Source directory: ', src_dir)
|
||||
print('Output directory: ', target_dir)
|
||||
print time.ctime()
|
||||
print 'Source directory: ', src_dir
|
||||
print 'Output directory: ', target_dir
|
||||
try:
|
||||
_, _, failed_files, failed_verify = \
|
||||
main(src_dir, target_dir, files, [],
|
||||
@@ -236,14 +236,13 @@ if __name__ == '__main__':
|
||||
if os.path.isdir(src_dir):
|
||||
checked_dirs.append([src_dir, pattern, target_dir])
|
||||
else:
|
||||
print("Can't find directory %s. Skipping" % src_dir,
|
||||
file=sys.stderr)
|
||||
sys.stderr.write("Can't find directory %s. Skipping" % src_dir)
|
||||
continue
|
||||
last_compile_version = compiled_version
|
||||
pass
|
||||
|
||||
if not checked_dirs:
|
||||
print("No directories found to check", file=sys.stderr)
|
||||
sys.stderr.write("No directories found to check\n")
|
||||
sys.exit(1)
|
||||
|
||||
test_opts['compiled_version'] = last_compile_version
|
||||
|
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# Copyright (c) 2015-2016 by Rocky Bernstein <rb@dustyfeet.com>
|
||||
#
|
||||
from __future__ import print_function
|
||||
import sys, os, getopt
|
||||
|
||||
from uncompyle6.disas import disassemble_file
|
||||
@@ -26,7 +25,7 @@ Options:
|
||||
-V | --version show version and stop
|
||||
-h | --help show this message
|
||||
|
||||
""".format(program)
|
||||
""" % (program, program)
|
||||
|
||||
PATTERNS = ('*.pyc', '*.pyo')
|
||||
|
||||
@@ -37,15 +36,15 @@ Type -h for for full help.""" % program
|
||||
native = True
|
||||
|
||||
if len(sys.argv) == 1:
|
||||
print("No file(s) given", file=sys.stderr)
|
||||
print(Usage_short, file=sys.stderr)
|
||||
sys.stderr.write("No file(s) given\n")
|
||||
sys.stderr.write(Usage_short)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
opts, files = getopt.getopt(sys.argv[1:], 'hVU',
|
||||
['help', 'version', 'uncompyle6'])
|
||||
except getopt.GetoptError as e:
|
||||
print('%s: %s' % (os.path.basename(sys.argv[0]), e), file=sys.stderr)
|
||||
except getopt.GetoptError(e):
|
||||
sys.stderr.write('%s: %s' % (os.path.basename(sys.argv[0]), e))
|
||||
sys.exit(-1)
|
||||
|
||||
for opt, val in opts:
|
||||
@@ -59,15 +58,14 @@ Type -h for for full help.""" % program
|
||||
native = False
|
||||
else:
|
||||
print(opt)
|
||||
print(Usage_short, file=sys.stderr)
|
||||
sys.stderr.write(Usage_short)
|
||||
sys.exit(1)
|
||||
|
||||
for file in files:
|
||||
if os.path.exists(files[0]):
|
||||
disassemble_file(file, sys.stdout, native)
|
||||
else:
|
||||
print("Can't read %s - skipping" % files[0],
|
||||
file=sys.stderr)
|
||||
sys.stderr.write("Can't read %s - skipping\n" % files[0])
|
||||
pass
|
||||
pass
|
||||
return
|
||||
|
@@ -4,7 +4,6 @@
|
||||
# Copyright (c) 2015-2016 by Rocky Bernstein
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
#
|
||||
from __future__ import print_function
|
||||
import sys, os, getopt, time
|
||||
|
||||
program, ext = os.path.splitext(os.path.basename(__file__))
|
||||
@@ -65,11 +64,11 @@ def usage():
|
||||
|
||||
|
||||
def main_bin():
|
||||
if not (sys.version_info[0:2] in ((2, 6), (2, 7),
|
||||
(3, 1), (3, 2), (3, 3),
|
||||
if not (sys.version_info[0:2] in ((2, 4), (2, 5), (2, 6), (2, 7),
|
||||
(3, 2), (3, 3),
|
||||
(3, 4), (3, 5), (3, 6))):
|
||||
print('Error: %s requires Python 2.6-2.7, or 3.1-3.6' % program,
|
||||
file=sys.stderr)
|
||||
sys.stderr.write('Error: %s requires Python 2.4 2.5 2.6, 2.7, '
|
||||
'3.2, 3.3, 3.4, 3.5, or 3.6' % program)
|
||||
sys.exit(-1)
|
||||
|
||||
do_verify = recurse_dirs = False
|
||||
@@ -84,8 +83,8 @@ def main_bin():
|
||||
opts, files = getopt.getopt(sys.argv[1:], 'hagtdrVo:c:p:',
|
||||
'help asm grammar linemaps recurse timestamp tree '
|
||||
'verify version showgrammar'.split(' '))
|
||||
except getopt.GetoptError as e:
|
||||
print('%s: %s' % (os.path.basename(sys.argv[0]), e), file=sys.stderr)
|
||||
except getopt.GetoptError(e):
|
||||
sys.stderr.write('%s: %s\n' % (os.path.basename(sys.argv[0]), e))
|
||||
sys.exit(-1)
|
||||
|
||||
options = {}
|
||||
@@ -119,7 +118,7 @@ def main_bin():
|
||||
elif opt in ('--recurse', '-r'):
|
||||
recurse_dirs = True
|
||||
else:
|
||||
print(opt, file=sys.stderr)
|
||||
sys.stderr.write(opt)
|
||||
usage()
|
||||
|
||||
# expand directory if specified
|
||||
@@ -144,7 +143,7 @@ def main_bin():
|
||||
files = [f[sb_len:] for f in files]
|
||||
|
||||
if not files:
|
||||
print("No files given", file=sys.stderr)
|
||||
sys.stderr.write("No files given\n")
|
||||
usage()
|
||||
|
||||
if outfile == '-':
|
||||
|
@@ -16,8 +16,6 @@ Second, we need structured instruction information for the
|
||||
want to run on Python 2.7.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
from collections import deque
|
||||
|
||||
@@ -37,10 +35,9 @@ def disco(version, co, out=None, is_pypy=False):
|
||||
|
||||
# store final output stream for case of error
|
||||
real_out = out or sys.stdout
|
||||
print('# Python %s' % version, file=real_out)
|
||||
real_out.write('# Python %s\n' % version)
|
||||
if co.co_filename:
|
||||
print('# Embedded file name: %s' % co.co_filename,
|
||||
file=real_out)
|
||||
real_out.write('# Embedded file name: %s\n' % co.co_filename)
|
||||
|
||||
scanner = get_scanner(version, is_pypy=is_pypy)
|
||||
|
||||
@@ -52,16 +49,15 @@ def disco_loop(disasm, queue, real_out):
|
||||
while len(queue) > 0:
|
||||
co = queue.popleft()
|
||||
if co.co_name != '<module>':
|
||||
print('\n# %s line %d of %s' %
|
||||
(co.co_name, co.co_firstlineno, co.co_filename),
|
||||
file=real_out)
|
||||
real_out.write('\n# %s line %d of %s\n' %
|
||||
(co.co_name, co.co_firstlineno, co.co_filename))
|
||||
tokens, customize = disasm(co)
|
||||
for t in tokens:
|
||||
if iscode(t.pattr):
|
||||
queue.append(t.pattr)
|
||||
elif iscode(t.attr):
|
||||
queue.append(t.attr)
|
||||
print(t, file=real_out)
|
||||
real_out.write(t)
|
||||
pass
|
||||
pass
|
||||
|
||||
|
@@ -10,7 +10,7 @@ def line_number_mapping(pyc_filename, src_filename):
|
||||
source_size) = load_module(pyc_filename)
|
||||
try:
|
||||
code2 = load_file(src_filename)
|
||||
except SyntaxError as e:
|
||||
except SyntaxError, e:
|
||||
return str(e)
|
||||
|
||||
queue = deque([code1, code2])
|
||||
|
@@ -1,4 +1,3 @@
|
||||
from __future__ import print_function
|
||||
import datetime, os, subprocess, sys, tempfile
|
||||
|
||||
from uncompyle6 import verify, IS_PYPY
|
||||
@@ -22,31 +21,36 @@ def decompile(
|
||||
|
||||
# store final output stream for case of error
|
||||
real_out = out or sys.stdout
|
||||
co_pypy_str = 'PyPy ' if is_pypy else ''
|
||||
run_pypy_str = 'PyPy ' if IS_PYPY else ''
|
||||
print('# uncompyle6 version %s\n'
|
||||
'# %sPython bytecode %s%s\n# Decompiled from: %sPython %s' %
|
||||
(VERSION, co_pypy_str, bytecode_version,
|
||||
" (%d)" % magic_int if magic_int else "",
|
||||
run_pypy_str, '\n# '.join(sys.version.split('\n'))),
|
||||
file=real_out)
|
||||
if co.co_filename:
|
||||
print('# Embedded file name: %s' % co.co_filename,
|
||||
file=real_out)
|
||||
if timestamp:
|
||||
print('# Compiled at: %s' % datetime.datetime.fromtimestamp(timestamp),
|
||||
file=real_out)
|
||||
if source_size:
|
||||
print('# Size of source mod 2**32: %d bytes' % source_size,
|
||||
file=real_out)
|
||||
if is_pypy:
|
||||
co_pypy_str = 'PyPy '
|
||||
else:
|
||||
co_pypy_str = ''
|
||||
|
||||
try:
|
||||
pysource.deparse_code(bytecode_version, co, out, showasm, showast,
|
||||
showgrammar, code_objects=code_objects,
|
||||
is_pypy=is_pypy)
|
||||
except pysource.SourceWalkerError as e:
|
||||
# deparsing failed
|
||||
raise pysource.SourceWalkerError(str(e))
|
||||
if IS_PYPY:
|
||||
run_pypy_str = 'PyPy '
|
||||
else:
|
||||
run_pypy_str = ''
|
||||
|
||||
if magic_int:
|
||||
m = str(magic_int)
|
||||
else:
|
||||
m = ""
|
||||
real_out.write('# uncompyle6 version %s\n'
|
||||
'# %sPython bytecode %s%s\n# Decompiled from: %sPython %s\n' %
|
||||
(VERSION, co_pypy_str, bytecode_version,
|
||||
" (%s)" % m, run_pypy_str,
|
||||
'\n# '.join(sys.version.split('\n'))))
|
||||
if co.co_filename:
|
||||
real_out.write('# Embedded file name: %s\n' % co.co_filename)
|
||||
if timestamp:
|
||||
real_out.write('# Compiled at: %s\n' %
|
||||
datetime.datetime.fromtimestamp(timestamp))
|
||||
if source_size:
|
||||
real_out.write('# Size of source mod 2**32: %d bytes\n' % source_size)
|
||||
|
||||
pysource.deparse_code(bytecode_version, co, out, showasm, showast,
|
||||
showgrammar, code_objects=code_objects,
|
||||
is_pypy=is_pypy)
|
||||
|
||||
# For compatiblity
|
||||
uncompyle = decompile
|
||||
@@ -106,6 +110,7 @@ def main(in_base, out_base, files, codes, outfile=None,
|
||||
return open(outfile, 'w')
|
||||
|
||||
tot_files = okay_files = failed_files = verify_failed_files = 0
|
||||
current_outfile = outfile
|
||||
|
||||
for filename in files:
|
||||
infile = os.path.join(in_base, filename)
|
||||
@@ -127,26 +132,29 @@ def main(in_base, out_base, files, codes, outfile=None,
|
||||
junk, outfile = tempfile.mkstemp(suffix=".py",
|
||||
prefix=prefix)
|
||||
# Unbuffer output if possible
|
||||
buffering = -1 if sys.stdout.isatty() else 0
|
||||
if sys.stdout.isatty():
|
||||
buffering = -1
|
||||
else:
|
||||
buffering = 0
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', buffering)
|
||||
tee = subprocess.Popen(["tee", outfile], stdin=subprocess.PIPE)
|
||||
os.dup2(tee.stdin.fileno(), sys.stdout.fileno())
|
||||
os.dup2(tee.stdin.fileno(), sys.stderr.fileno())
|
||||
else:
|
||||
if filename.endswith('.pyc'):
|
||||
outfile = os.path.join(out_base, filename[0:-1])
|
||||
current_outfile = os.path.join(out_base, filename[0:-1])
|
||||
else:
|
||||
outfile = os.path.join(out_base, filename) + '_dis'
|
||||
outstream = _get_outstream(outfile)
|
||||
# print(outfile, file=sys.stderr)
|
||||
current_outfile = os.path.join(out_base, filename) + '_dis'
|
||||
outstream = _get_outstream(current_outfile)
|
||||
# print(current_outfile, file=sys.stderr)
|
||||
|
||||
# Try to uncompile the input file
|
||||
try:
|
||||
decompile_file(infile, outstream, showasm, showast, showgrammar)
|
||||
tot_files += 1
|
||||
except (ValueError, SyntaxError, ParserError, pysource.SourceWalkerError) as e:
|
||||
except (ValueError, SyntaxError, ParserError, pysource.SourceWalkerError):
|
||||
sys.stdout.write("\n")
|
||||
sys.stderr.write("\n# file %s\n# %s\n" % (infile, e))
|
||||
sys.stderr.write("# file %s\n" % (infile))
|
||||
failed_files += 1
|
||||
except KeyboardInterrupt:
|
||||
if outfile:
|
||||
@@ -157,16 +165,16 @@ def main(in_base, out_base, files, codes, outfile=None,
|
||||
raise
|
||||
# except:
|
||||
# failed_files += 1
|
||||
# if outfile:
|
||||
# if current_outfile:
|
||||
# outstream.close()
|
||||
# os.rename(outfile, outfile + '_failed')
|
||||
# os.rename(current_outfile, current_outfile + '_failed')
|
||||
# else:
|
||||
# sys.stderr.write("\n# %s" % sys.exc_info()[1])
|
||||
# sys.stderr.write("\n# Can't uncompile %s\n" % infile)
|
||||
else: # uncompile successful
|
||||
if outfile:
|
||||
if current_outfile:
|
||||
if do_linemaps:
|
||||
mapping = line_number_mapping(infile, outfile)
|
||||
mapping = line_number_mapping(infile, current_outfile)
|
||||
outstream.write("\n\n## Line number correspondences\n")
|
||||
import pprint
|
||||
s = pprint.pformat(mapping, indent=2, width=80)
|
||||
@@ -177,39 +185,43 @@ def main(in_base, out_base, files, codes, outfile=None,
|
||||
if do_verify:
|
||||
weak_verify = do_verify == 'weak'
|
||||
try:
|
||||
msg = verify.compare_code_with_srcfile(infile, outfile, weak_verify=weak_verify)
|
||||
if not outfile:
|
||||
msg = verify.compare_code_with_srcfile(infile, current_outfile, weak_verify=weak_verify)
|
||||
if not current_outfile:
|
||||
if not msg:
|
||||
print('\n# okay decompiling %s' % infile)
|
||||
print '\n# okay decompiling %s' % infile
|
||||
okay_files += 1
|
||||
else:
|
||||
print('\n# %s\n\t%s', infile, msg)
|
||||
except verify.VerifyCmpError as e:
|
||||
print '\n# %s\n\t%s', infile, msg
|
||||
except verify.VerifyCmpError, e:
|
||||
print(e)
|
||||
verify_failed_files += 1
|
||||
os.rename(outfile, outfile + '_unverified')
|
||||
os.rename(current_outfile, current_outfile + '_unverified')
|
||||
sys.stderr.write("### Error Verifying %s\n" % filename)
|
||||
sys.stderr.write(str(e) + "\n")
|
||||
if not outfile:
|
||||
sys.stder.write("### Error Verifiying %s" %
|
||||
filename)
|
||||
sys.stderr.write(e)
|
||||
if raise_on_error:
|
||||
raise
|
||||
pass
|
||||
pass
|
||||
pass
|
||||
elif do_verify:
|
||||
sys.stderr.write("\n### uncompile successful, but no file to compare against\n")
|
||||
sys.stderr.write("\n### uncompile successful, "
|
||||
"but no file to compare against")
|
||||
pass
|
||||
else:
|
||||
okay_files += 1
|
||||
if not outfile:
|
||||
if not current_outfile:
|
||||
mess = '\n# okay decompiling'
|
||||
# mem_usage = __memUsage()
|
||||
print(mess, infile)
|
||||
if outfile:
|
||||
if current_outfile:
|
||||
sys.stdout.write("%s\r" %
|
||||
status_msg(do_verify, tot_files, okay_files, failed_files, verify_failed_files))
|
||||
sys.stdout.flush()
|
||||
if outfile:
|
||||
if current_outfile:
|
||||
sys.stdout.write("\n")
|
||||
sys.stdout.flush()
|
||||
return (tot_files, okay_files, failed_files, verify_failed_files)
|
||||
|
@@ -6,8 +6,6 @@
|
||||
Common uncompyle parser routines.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
|
||||
from xdis.code import iscode
|
||||
@@ -30,13 +28,19 @@ class PythonParser(GenericASTBuilder):
|
||||
|
||||
def __init__(self, AST, start, debug):
|
||||
super(PythonParser, self).__init__(AST, start, debug)
|
||||
self.collect = frozenset(
|
||||
['stmts', 'except_stmts', '_stmts',
|
||||
'exprlist', 'kvlist', 'kwargs', 'come_froms',
|
||||
# Python < 3
|
||||
'print_items',
|
||||
# PyPy:
|
||||
'kvlist_n'])
|
||||
self.collect = [
|
||||
'stmts', 'except_stmts', '_stmts', 'load_attrs',
|
||||
'exprlist', 'kvlist', 'kwargs', 'come_froms', '_come_from',
|
||||
# Python < 3
|
||||
'print_items',
|
||||
# PyPy:
|
||||
'kvlist_n']
|
||||
|
||||
def ast_first_offset(self, ast):
|
||||
if hasattr(ast, 'offset'):
|
||||
return ast.offset
|
||||
else:
|
||||
return self.ast_first_offset(ast[0])
|
||||
|
||||
def add_unique_rule(self, rule, opname, count, customize):
|
||||
"""Add rule to grammar, but only if it hasn't been added previously
|
||||
@@ -86,7 +90,10 @@ class PythonParser(GenericASTBuilder):
|
||||
def fix(c):
|
||||
s = str(c)
|
||||
i = s.find('_')
|
||||
return s if i == -1 else s[:i]
|
||||
if i == -1:
|
||||
return s
|
||||
else:
|
||||
return s[:i]
|
||||
|
||||
prefix = ''
|
||||
if parent and tokens:
|
||||
@@ -117,7 +124,10 @@ class PythonParser(GenericASTBuilder):
|
||||
err_token = instructions[index]
|
||||
print("Instruction context:")
|
||||
for i in range(start, finish):
|
||||
indent = ' ' if i != index else '-> '
|
||||
if i != index:
|
||||
indent = ' '
|
||||
else:
|
||||
indent = '-> '
|
||||
print("%s%s" % (indent, instructions[i]))
|
||||
raise ParserError(err_token, err_token.offset)
|
||||
|
||||
@@ -395,8 +405,7 @@ class PythonParser(GenericASTBuilder):
|
||||
import_cont ::= LOAD_CONST LOAD_CONST import_as_cont
|
||||
import_as_cont ::= IMPORT_FROM designator
|
||||
|
||||
load_attrs ::= LOAD_ATTR
|
||||
load_attrs ::= load_attrs LOAD_ATTR
|
||||
load_attrs ::= LOAD_ATTR+
|
||||
"""
|
||||
|
||||
def p_list_comprehension(self, args):
|
||||
@@ -503,8 +512,12 @@ class PythonParser(GenericASTBuilder):
|
||||
expr ::= conditional
|
||||
conditional ::= expr jmp_false expr JUMP_FORWARD expr COME_FROM
|
||||
conditional ::= expr jmp_false expr JUMP_ABSOLUTE expr
|
||||
|
||||
expr ::= conditionalnot
|
||||
conditionalnot ::= expr jmp_true expr _jump expr COME_FROM
|
||||
conditionalnot ::= expr jmp_true expr _jump expr COME_FROM
|
||||
|
||||
expr ::= conditionalTrue
|
||||
conditionalTrue ::= expr JUMP_FORWARD expr COME_FROM
|
||||
|
||||
ret_expr ::= expr
|
||||
ret_expr ::= ret_and
|
||||
|
@@ -12,8 +12,6 @@ If we succeed in creating a parse tree, then we have a Python program
|
||||
that a later phase can turn into a sequence of ASCII text.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6.parser import PythonParser, PythonParserSingle, nop_func
|
||||
from uncompyle6.parsers.astnode import AST
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
@@ -44,7 +42,8 @@ class Python2Parser(PythonParser):
|
||||
while1stmt ::= SETUP_LOOP l_stmts JUMP_BACK COME_FROM
|
||||
while1stmt ::= SETUP_LOOP l_stmts JUMP_BACK POP_BLOCK COME_FROM
|
||||
|
||||
while1elsestmt ::= SETUP_LOOP l_stmts JUMP_BACK else_suite COME_FROM
|
||||
while1elsestmt ::= SETUP_LOOP l_stmts JUMP_BACK POP_BLOCK else_suite COME_FROM
|
||||
while1elsestmt ::= SETUP_LOOP l_stmts JUMP_BACK else_suite COME_FROM
|
||||
|
||||
exec_stmt ::= expr exprlist DUP_TOP EXEC_STMT
|
||||
exec_stmt ::= expr exprlist EXEC_STMT
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016 Rocky Bernstein
|
||||
# Copyright (c) 2016-2017 Rocky Bernstein
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <hartmut@goebel.noris.de>
|
||||
# Copyright (c) 1999 John Aycock
|
||||
|
||||
@@ -14,6 +14,17 @@ class Python23Parser(Python24Parser):
|
||||
|
||||
def p_misc23(self, args):
|
||||
'''
|
||||
# Python 2.4 only adds something like the below for if 1:
|
||||
# However we will just treat it as a noop (which of course messes up
|
||||
# simple verify of bytecode.
|
||||
# See also below in reduce_is_invalid where we check that the JUMP_FORWARD
|
||||
# target matches the COME_FROM target
|
||||
stmt ::= if1_stmt
|
||||
if1_stmt ::= JUMP_FORWARD JUMP_IF_FALSE THEN POP_TOP COME_FROM
|
||||
stmts
|
||||
JUMP_FORWARD COME_FROM POP_TOP COME_FROM
|
||||
|
||||
|
||||
# Used to keep semantic positions the same across later versions
|
||||
# of Python
|
||||
_while1test ::= SETUP_LOOP JUMP_FORWARD JUMP_IF_FALSE POP_TOP COME_FROM
|
||||
@@ -33,6 +44,23 @@ class Python23Parser(Python24Parser):
|
||||
lc_body ::= LOAD_FAST expr LIST_APPEND
|
||||
'''
|
||||
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
super(Python23Parser, self).add_custom_rules(tokens, customize)
|
||||
|
||||
def reduce_is_invalid(self, rule, ast, tokens, first, last):
|
||||
invalid = super(Python24Parser,
|
||||
self).reduce_is_invalid(rule, ast,
|
||||
tokens, first, last)
|
||||
if invalid:
|
||||
return invalid
|
||||
|
||||
# FiXME: this code never gets called...
|
||||
lhs = rule[0]
|
||||
if lhs == 'nop_stmt':
|
||||
return not int(tokens[first].pattr) == tokens[last].offset
|
||||
|
||||
return False
|
||||
|
||||
class Python23ParserSingle(Python23Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
|
@@ -14,6 +14,15 @@ class Python24Parser(Python25Parser):
|
||||
|
||||
def p_misc24(self, args):
|
||||
'''
|
||||
# Python 2.4 only adds something like the below for if 1:
|
||||
# However we will just treat it as a noop (which of course messes up
|
||||
# simple verify of bytecode.
|
||||
# See also below in reduce_is_invalid where we check that the JUMP_FORWARD
|
||||
# target matches the COME_FROM target
|
||||
stmt ::= nop_stmt
|
||||
nop_stmt ::= JUMP_FORWARD POP_TOP COME_FROM
|
||||
|
||||
|
||||
# 2.5+ has two LOAD_CONSTs, one for the number '.'s in a relative import
|
||||
# keep positions similar to simplify semantic actions
|
||||
|
||||
@@ -37,6 +46,25 @@ class Python24Parser(Python25Parser):
|
||||
gen_comp_body ::= expr YIELD_VALUE
|
||||
'''
|
||||
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
super(Python24Parser, self).add_custom_rules(tokens, customize)
|
||||
if self.version == 2.4:
|
||||
self.check_reduce['nop_stmt'] = 'tokens'
|
||||
|
||||
def reduce_is_invalid(self, rule, ast, tokens, first, last):
|
||||
invalid = super(Python24Parser,
|
||||
self).reduce_is_invalid(rule, ast,
|
||||
tokens, first, last)
|
||||
if invalid:
|
||||
return invalid
|
||||
|
||||
# FiXME: this code never gets called...
|
||||
lhs = rule[0]
|
||||
if lhs == 'nop_stmt':
|
||||
return not int(tokens[first].pattr) == tokens[last].offset
|
||||
|
||||
return False
|
||||
|
||||
class Python24ParserSingle(Python24Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
|
@@ -157,7 +157,6 @@ class Python26Parser(Python2Parser):
|
||||
# Semantic actions want the else to be at position 3
|
||||
ifelsestmt ::= testexpr c_stmts_opt jf_cf_pop else_suite come_froms
|
||||
ifelsestmt ::= testexpr_then c_stmts_opt jf_cf_pop else_suite come_froms
|
||||
ifelsestmt ::= testexpr c_stmts_opt filler else_suitel come_froms POP_TOP
|
||||
ifelsestmt ::= testexpr_then c_stmts_opt filler else_suitel come_froms POP_TOP
|
||||
|
||||
# Semantic actions want else_suitel to be at index 3
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016 Rocky Bernstein
|
||||
# Copyright (c) 2016-2017 Rocky Bernstein
|
||||
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <hartmut@goebel.noris.de>
|
||||
|
||||
@@ -52,6 +52,8 @@ class Python27Parser(Python2Parser):
|
||||
come_froms ::= come_froms COME_FROM
|
||||
come_froms ::= COME_FROM
|
||||
|
||||
iflaststmtl ::= testexpr c_stmts_opt
|
||||
|
||||
_ifstmts_jump ::= c_stmts_opt JUMP_FORWARD come_froms
|
||||
bp_come_from ::= POP_BLOCK COME_FROM
|
||||
|
||||
@@ -96,6 +98,27 @@ class Python27Parser(Python2Parser):
|
||||
while1stmt ::= SETUP_LOOP return_stmts COME_FROM
|
||||
"""
|
||||
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
super(Python27Parser, self).add_custom_rules(tokens, customize)
|
||||
self.check_reduce['and'] = 'AST'
|
||||
return
|
||||
|
||||
def reduce_is_invalid(self, rule, ast, tokens, first, last):
|
||||
invalid = super(Python27Parser,
|
||||
self).reduce_is_invalid(rule, ast,
|
||||
tokens, first, last)
|
||||
if invalid:
|
||||
return invalid
|
||||
if rule == ('and', ('expr', 'jmp_false', 'expr', '\\e_come_from_opt')):
|
||||
# Test that jmp_false jumps to the end of "and"
|
||||
# or that it jumps to the same place as the end of "and"
|
||||
jmp_false = ast[1][0]
|
||||
jmp_target = jmp_false.offset + jmp_false.attr + 3
|
||||
return not (jmp_target == tokens[last].offset or
|
||||
tokens[last].pattr == jmp_false.pattr)
|
||||
return False
|
||||
|
||||
|
||||
class Python27ParserSingle(Python27Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2015, 2016 Rocky Bernstein
|
||||
# Copyright (c) 2015-2017 Rocky Bernstein
|
||||
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
# Copyright (c) 1999 John Aycock
|
||||
@@ -15,8 +15,6 @@ If we succeed in creating a parse tree, then we have a Python program
|
||||
that a later phase can turn into a sequence of ASCII text.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6.parser import PythonParser, PythonParserSingle, nop_func
|
||||
from uncompyle6.parsers.astnode import AST
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
@@ -149,23 +147,25 @@ class Python3Parser(PythonParser):
|
||||
iflaststmtl ::= testexpr c_stmts_opt JUMP_BACK COME_FROM_LOOP
|
||||
|
||||
# These are used to keep AST indices the same
|
||||
jf_else ::= JUMP_FORWARD ELSE
|
||||
ja_else ::= JUMP_ABSOLUTE ELSE
|
||||
jump_forward_else ::= JUMP_FORWARD ELSE
|
||||
jump_absolute_else ::= JUMP_ABSOLUTE ELSE
|
||||
|
||||
# Note: in if/else kinds of statements, we err on the side
|
||||
# of missing "else" clauses. Therefore we include grammar
|
||||
# rules with and without ELSE.
|
||||
|
||||
ifelsestmt ::= testexpr c_stmts_opt JUMP_FORWARD else_suite COME_FROM
|
||||
ifelsestmt ::= testexpr c_stmts_opt jf_else else_suite _come_from
|
||||
ifelsestmt ::= testexpr c_stmts_opt JUMP_FORWARD else_suite opt_come_from_except
|
||||
ifelsestmt ::= testexpr c_stmts_opt jump_forward_else else_suite _come_from
|
||||
|
||||
ifelsestmtc ::= testexpr c_stmts_opt JUMP_ABSOLUTE else_suitec
|
||||
ifelsestmtc ::= testexpr c_stmts_opt ja_else else_suitec
|
||||
ifelsestmtc ::= testexpr c_stmts_opt jump_absolute_else else_suitec
|
||||
|
||||
ifelsestmtr ::= testexpr return_if_stmts return_stmts
|
||||
|
||||
ifelsestmtl ::= testexpr c_stmts_opt JUMP_BACK else_suitel
|
||||
ifelsestmtl ::= testexpr c_stmts_opt COME_FROM JUMP_BACK else_suitel
|
||||
ifelsestmtl ::= testexpr c_stmts_opt cf_jump_back else_suitel
|
||||
|
||||
cf_jump_back ::= COME_FROM JUMP_BACK
|
||||
|
||||
# FIXME: this feels like a hack. Is it just 1 or two
|
||||
# COME_FROMs? the parsed tree for this and even with just the
|
||||
@@ -180,14 +180,17 @@ class Python3Parser(PythonParser):
|
||||
POP_BLOCK LOAD_CONST
|
||||
come_from_or_finally suite_stmts_opt END_FINALLY
|
||||
|
||||
tryelsestmt ::= SETUP_EXCEPT suite_stmts_opt POP_BLOCK
|
||||
try_middle else_suite come_from_except_clauses
|
||||
|
||||
tryelsestmt ::= SETUP_EXCEPT suite_stmts_opt POP_BLOCK
|
||||
try_middle else_suite come_froms
|
||||
|
||||
tryelsestmtc ::= SETUP_EXCEPT suite_stmts_opt POP_BLOCK
|
||||
try_middle else_suitec COME_FROM
|
||||
try_middle else_suitec come_from_except_clauses
|
||||
|
||||
tryelsestmtl ::= SETUP_EXCEPT suite_stmts_opt POP_BLOCK
|
||||
try_middle else_suitel COME_FROM
|
||||
try_middle else_suitel come_from_except_clauses
|
||||
|
||||
try_middle ::= jmp_abs COME_FROM except_stmts
|
||||
END_FINALLY
|
||||
@@ -254,7 +257,10 @@ class Python3Parser(PythonParser):
|
||||
|
||||
def p_misc3(self, args):
|
||||
"""
|
||||
try_middle ::= JUMP_FORWARD COME_FROM_EXCEPT except_stmts END_FINALLY COME_FROM
|
||||
try_middle ::= JUMP_FORWARD COME_FROM_EXCEPT except_stmts
|
||||
END_FINALLY COME_FROM
|
||||
try_middle ::= JUMP_FORWARD COME_FROM_EXCEPT except_stmts
|
||||
END_FINALLY COME_FROM_EXCEPT_CLAUSE
|
||||
|
||||
for_block ::= l_stmts_opt opt_come_from_loop JUMP_BACK
|
||||
for_block ::= l_stmts
|
||||
@@ -269,12 +275,14 @@ class Python3Parser(PythonParser):
|
||||
stmt ::= funcdef_annotate
|
||||
funcdef_annotate ::= mkfunc_annotate designator
|
||||
|
||||
mkfuncdeco0 ::= mkfunc_annotate
|
||||
|
||||
# This has the annotation value.
|
||||
# LOAD_NAME is used in an annotation type like
|
||||
# int, float, str
|
||||
annotate_arg ::= LOAD_NAME
|
||||
# LOAD_CONST is used in an annotation string
|
||||
annotate_arg ::= LOAD_CONST
|
||||
annotate_arg ::= expr
|
||||
|
||||
# This stores the tuple of parameter names
|
||||
# that have been annotated
|
||||
@@ -285,10 +293,13 @@ class Python3Parser(PythonParser):
|
||||
"""
|
||||
opt_come_from_except ::= COME_FROM_EXCEPT
|
||||
opt_come_from_except ::= come_froms
|
||||
opt_come_from_except ::= come_from_except_clauses
|
||||
|
||||
come_froms ::= come_froms COME_FROM
|
||||
come_froms ::=
|
||||
come_froms ::= COME_FROM*
|
||||
|
||||
come_from_except_clauses ::= COME_FROM_EXCEPT_CLAUSE+
|
||||
|
||||
opt_come_from_loop ::= opt_come_from_loop COME_FROM_LOOP
|
||||
opt_come_from_loop ::= opt_come_from_loop COME_FROM_LOOP
|
||||
opt_come_from_loop ::=
|
||||
|
||||
@@ -321,7 +332,9 @@ class Python3Parser(PythonParser):
|
||||
|
||||
def p_stmt3(self, args):
|
||||
"""
|
||||
stmt ::= LOAD_CLOSURE RETURN_VALUE RETURN_LAST
|
||||
stmt ::= return_closure
|
||||
return_closure ::= LOAD_CLOSURE RETURN_VALUE RETURN_LAST
|
||||
|
||||
stmt ::= whileTruestmt
|
||||
ifelsestmt ::= testexpr c_stmts_opt JUMP_FORWARD else_suite _come_from
|
||||
"""
|
||||
@@ -372,6 +385,8 @@ class Python3Parser(PythonParser):
|
||||
|
||||
while1stmt ::= SETUP_LOOP l_stmts COME_FROM JUMP_BACK COME_FROM_LOOP
|
||||
|
||||
while1elsestmt ::= SETUP_LOOP l_stmts JUMP_BACK
|
||||
else_suite COME_FROM_LOOP
|
||||
|
||||
# FIXME: investigate - can code really produce a NOP?
|
||||
whileTruestmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK NOP
|
||||
@@ -393,9 +408,12 @@ class Python3Parser(PythonParser):
|
||||
|
||||
def p_expr3(self, args):
|
||||
"""
|
||||
conditional ::= expr jmp_false expr jf_else expr COME_FROM
|
||||
conditionalnot ::= expr jmp_true expr jf_else expr COME_FROM
|
||||
conditional ::= expr jmp_false expr jump_forward_else expr COME_FROM
|
||||
conditionalnot ::= expr jmp_true expr jump_forward_else expr COME_FROM
|
||||
|
||||
# a JUMP_FORWARD to another JUMP_FORWARD can get turned into
|
||||
# a JUMP_ABSOLUTE with no COME_FROM
|
||||
conditional ::= expr jmp_false expr jump_absolute_else expr
|
||||
|
||||
expr ::= LOAD_CLASSNAME
|
||||
|
||||
@@ -453,9 +471,9 @@ class Python3Parser(PythonParser):
|
||||
def custom_classfunc_rule(self, opname, token, customize):
|
||||
"""
|
||||
call_function ::= expr {expr}^n CALL_FUNCTION_n
|
||||
call_function ::= expr {expr}^n CALL_FUNCTION_VAR_n POP_TOP
|
||||
call_function ::= expr {expr}^n CALL_FUNCTION_VAR_KW_n POP_TOP
|
||||
call_function ::= expr {expr}^n CALL_FUNCTION_KW_n POP_TOP
|
||||
call_function ::= expr {expr}^n CALL_FUNCTION_VAR_n
|
||||
call_function ::= expr {expr}^n CALL_FUNCTION_VAR_KW_n
|
||||
call_function ::= expr {expr}^n CALL_FUNCTION_KW_n
|
||||
|
||||
classdefdeco2 ::= LOAD_BUILD_CLASS mkfunc {expr}^n-1 CALL_FUNCTION_n
|
||||
"""
|
||||
@@ -463,31 +481,64 @@ class Python3Parser(PythonParser):
|
||||
# high byte number of positional parameters
|
||||
args_pos = token.attr & 0xff
|
||||
args_kw = (token.attr >> 8) & 0xff
|
||||
args_kw = (token.attr >> 8) & 0xff
|
||||
# args_ann = (token.attr >> 16) & 0x7FFF
|
||||
|
||||
# Additional exprs for * and ** args:
|
||||
# 0 if neither
|
||||
# 1 for CALL_FUNCTION_VAR or CALL_FUNCTION_KW
|
||||
# 2 for * and ** args (CALL_FUNCTION_VAR_KW).
|
||||
# Yes, this computation based on instruction name is a little bit hoaky.
|
||||
nak = ( len(opname)-len('CALL_FUNCTION') ) // 3
|
||||
|
||||
token.type = self.call_fn_name(token)
|
||||
rule = ('call_function ::= expr ' +
|
||||
('pos_arg ' * args_pos) +
|
||||
('kwarg ' * args_kw) +
|
||||
'expr ' * nak + token.type)
|
||||
self.add_unique_rule(rule, token.type, args_pos, customize)
|
||||
uniq_param = args_kw + args_pos
|
||||
if self.version == 3.5 and opname.startswith('CALL_FUNCTION_VAR'):
|
||||
# Python 3.5 changes the stack position of *args. KW args come
|
||||
# after *args.
|
||||
# Python 3.6+ replaces CALL_FUNCTION_VAR_KW with CALL_FUNCTION_EX
|
||||
if opname.endswith('KW'):
|
||||
kw = 'expr '
|
||||
else:
|
||||
kw = ''
|
||||
rule = ('call_function ::= expr expr ' +
|
||||
('pos_arg ' * args_pos) +
|
||||
('kwarg ' * args_kw) + kw + token.type)
|
||||
self.add_unique_rule(rule, token.type, uniq_param, customize)
|
||||
if self.version >= 3.6 and opname == 'CALL_FUNCTION_EX_KW':
|
||||
rule = ('call_function36 ::= '
|
||||
'expr build_tuple_unpack_with_call build_map_unpack_with_call '
|
||||
'CALL_FUNCTION_EX_KW_1')
|
||||
self.add_unique_rule(rule, token.type, uniq_param, customize)
|
||||
rule = 'call_function ::= call_function36'
|
||||
else:
|
||||
rule = ('call_function ::= expr ' +
|
||||
('pos_arg ' * args_pos) +
|
||||
('kwarg ' * args_kw) +
|
||||
'expr ' * nak + token.type)
|
||||
|
||||
self.add_unique_rule(rule, token.type, uniq_param, customize)
|
||||
if self.version >= 3.5:
|
||||
rule = ('async_call_function ::= expr ' +
|
||||
('pos_arg ' * args_pos) +
|
||||
('kwarg ' * args_kw) +
|
||||
'expr ' * nak + token.type +
|
||||
' GET_AWAITABLE LOAD_CONST YIELD_FROM')
|
||||
self.add_unique_rule(rule, token.type, args_pos, customize)
|
||||
self.add_unique_rule('expr ::= async_call_function', token.type, args_pos, customize)
|
||||
self.add_unique_rule(rule, token.type, uniq_param, customize)
|
||||
self.add_unique_rule('expr ::= async_call_function', token.type, uniq_param, customize)
|
||||
|
||||
rule = ('classdefdeco2 ::= LOAD_BUILD_CLASS mkfunc %s%s_%d'
|
||||
% (('expr ' * (args_pos-1)), opname, args_pos))
|
||||
self.add_unique_rule(rule, token.type, args_pos, customize)
|
||||
self.add_unique_rule(rule, token.type, uniq_param, customize)
|
||||
|
||||
def add_make_function_rule(self, rule, opname, attr, customize):
|
||||
"""Python 3.3 added a an addtional LOAD_CONST before MAKE_FUNCTION and
|
||||
this has an effect on many rules.
|
||||
"""
|
||||
new_rule = rule % (('LOAD_CONST ') * (1 if self.version >= 3.3 else 0))
|
||||
if self.version >= 3.3:
|
||||
new_rule = rule % (('LOAD_CONST ') * 1)
|
||||
else:
|
||||
new_rule = rule % (('LOAD_CONST ') * 0)
|
||||
self.add_unique_rule(new_rule, opname, attr, customize)
|
||||
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
@@ -566,8 +617,9 @@ class Python3Parser(PythonParser):
|
||||
assign2_pypy ::= expr expr designator designator
|
||||
""", nop_func)
|
||||
continue
|
||||
elif opname in ('CALL_FUNCTION', 'CALL_FUNCTION_VAR',
|
||||
'CALL_FUNCTION_VAR_KW', 'CALL_FUNCTION_KW'):
|
||||
elif (opname in ('CALL_FUNCTION', 'CALL_FUNCTION_VAR',
|
||||
'CALL_FUNCTION_VAR_KW', 'CALL_FUNCTION_EX_KW')
|
||||
or opname.startswith('CALL_FUNCTION_KW')):
|
||||
self.custom_classfunc_rule(opname, token, customize)
|
||||
elif opname == 'LOAD_DICTCOMP':
|
||||
rule_pat = ("dictcomp ::= LOAD_DICTCOMP %sMAKE_FUNCTION_0 expr "
|
||||
@@ -580,6 +632,26 @@ class Python3Parser(PythonParser):
|
||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||
elif opname == 'LOAD_BUILD_CLASS':
|
||||
self.custom_build_class_rule(opname, i, token, tokens, customize)
|
||||
elif opname.startswith('BUILD_LIST_UNPACK'):
|
||||
v = token.attr
|
||||
rule = ('build_list_unpack ::= ' + 'expr1024 ' * int(v//1024) +
|
||||
'expr32 ' * int((v//32) % 32) +
|
||||
'expr ' * (v % 32) + opname)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
rule = 'expr ::= build_list_unpack'
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
elif opname.startswith('BUILD_TUPLE_UNPACK_WITH_CALL'):
|
||||
v = token.attr
|
||||
rule = ('build_tuple_unpack_with_call ::= ' + 'expr1024 ' * int(v//1024) +
|
||||
'expr32 ' * int((v//32) % 32) +
|
||||
'expr ' * (v % 32) + opname)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
elif opname.startswith('BUILD_MAP_UNPACK_WITH_CALL'):
|
||||
v = token.attr
|
||||
rule = ('build_map_unpack_with_call ::= ' + 'expr1024 ' * int(v//1024) +
|
||||
'expr32 ' * int((v//32) % 32) +
|
||||
'expr ' * (v % 32) + opname)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
elif opname_base in ('BUILD_LIST', 'BUILD_TUPLE', 'BUILD_SET'):
|
||||
v = token.attr
|
||||
rule = ('build_list ::= ' + 'expr1024 ' * int(v//1024) +
|
||||
@@ -589,13 +661,17 @@ class Python3Parser(PythonParser):
|
||||
if opname_base == 'BUILD_TUPLE':
|
||||
rule = ('load_closure ::= %s%s' % (('LOAD_CLOSURE ' * v), opname))
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
|
||||
rule = ('build_tuple ::= ' + 'expr1024 ' * int(v//1024) +
|
||||
'expr32 ' * int((v//32) % 32) +
|
||||
'expr ' * (v % 32) + opname)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
elif opname == 'LOOKUP_METHOD':
|
||||
# A PyPy speciality - DRY with parse2
|
||||
self.add_unique_rule("load_attr ::= expr LOOKUP_METHOD",
|
||||
opname, token.attr, customize)
|
||||
continue
|
||||
elif opname == 'JUMP_IF_NOT_DEBUG':
|
||||
v = token.attr
|
||||
self.add_unique_rule(
|
||||
"stmt ::= assert_pypy", opname, v, customize)
|
||||
self.add_unique_rule(
|
||||
@@ -625,10 +701,20 @@ class Python3Parser(PythonParser):
|
||||
rule = "mapexpr ::= BUILD_MAP_n kvlist_n"
|
||||
elif self.version >= 3.5:
|
||||
if opname != 'BUILD_MAP_WITH_CALL':
|
||||
rule = kvlist_n + ' ::= ' + 'expr ' * (token.attr*2)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
rule = "mapexpr ::= %s %s" % (kvlist_n, opname)
|
||||
|
||||
if opname == 'BUILD_MAP_UNPACK':
|
||||
rule = kvlist_n + ' ::= ' + 'expr ' * (token.attr*2)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
rule = 'dict ::= ' + 'expr ' * (token.attr*2)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
rule = 'mapexpr ::= ' + 'dict ' * token.attr
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
rule = ('unmap_dict ::= ' +
|
||||
('mapexpr ' * token.attr) +
|
||||
'BUILD_MAP_UNPACK')
|
||||
else:
|
||||
rule = kvlist_n + ' ::= ' + 'expr ' * (token.attr*2)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
rule = "mapexpr ::= %s %s" % (kvlist_n, opname)
|
||||
else:
|
||||
rule = kvlist_n + ' ::= ' + 'expr expr STORE_MAP ' * token.attr
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
@@ -650,12 +736,38 @@ class Python3Parser(PythonParser):
|
||||
rule = 'unpack_list ::= ' + opname + ' designator' * token.attr
|
||||
elif opname_base.startswith('MAKE_FUNCTION'):
|
||||
# DRY with MAKE_CLOSURE
|
||||
args_pos, args_kw, annotate_args = token.attr
|
||||
if self.version >= 3.6:
|
||||
# The semantics of MAKE_FUNCTION in 3.6 are totally different from
|
||||
# before.
|
||||
args_pos, args_kw, annotate_args, closure = token.attr
|
||||
stack_count = args_pos + args_kw + annotate_args
|
||||
rule = ('mkfunc ::= %s%s%s%s' %
|
||||
('expr ' * stack_count,
|
||||
'load_closure ' * closure,
|
||||
'LOAD_CONST ' * 2,
|
||||
opname))
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
rule_pat = ('mklambda ::= %s%sLOAD_LAMBDA %%s%s' %
|
||||
(('pos_arg '* args_pos),
|
||||
('kwarg '* args_kw),
|
||||
opname))
|
||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||
rule_pat = ("listcomp ::= %sLOAD_LISTCOMP %%s%s expr "
|
||||
"GET_ITER CALL_FUNCTION_1" % ('expr ' * args_pos, opname))
|
||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||
continue
|
||||
if self.version < 3.6:
|
||||
args_pos, args_kw, annotate_args = token.attr
|
||||
else:
|
||||
args_pos, args_kw, annotate_args, closure = token.attr
|
||||
|
||||
rule_pat = ("genexpr ::= %sload_genexpr %%s%s expr "
|
||||
"GET_ITER CALL_FUNCTION_1" % ('pos_arg '* args_pos, opname))
|
||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||
rule_pat = ('mklambda ::= %sLOAD_LAMBDA %%s%s' % ('pos_arg '* args_pos, opname))
|
||||
rule_pat = ('mklambda ::= %s%sLOAD_LAMBDA %%s%s' %
|
||||
(('pos_arg '* args_pos),
|
||||
('kwarg '* args_kw),
|
||||
opname))
|
||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||
rule_pat = ("listcomp ::= %sLOAD_LISTCOMP %%s%s expr "
|
||||
"GET_ITER CALL_FUNCTION_1" % ('expr ' * args_pos, opname))
|
||||
@@ -676,10 +788,18 @@ class Python3Parser(PythonParser):
|
||||
('pos_arg ' * args_pos, opname))
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
if opname.startswith('MAKE_FUNCTION_A'):
|
||||
# rule = ('mkfunc2 ::= %s%sEXTENDED_ARG %s' %
|
||||
# ('pos_arg ' * (args_pos), 'kwargs ' * (annotate_args-1), opname))
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
if self.version >= 3.6:
|
||||
rule = ('mkfunc_annotate ::= %s%sannotate_tuple LOAD_CONST LOAD_CONST %s' %
|
||||
(('pos_arg ' * (args_pos)),
|
||||
('call_function ' * (annotate_args-1)), opname))
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
rule = ('mkfunc_annotate ::= %s%sannotate_tuple LOAD_CONST LOAD_CONST %s' %
|
||||
(('pos_arg ' * (args_pos)),
|
||||
('annotate_arg ' * (annotate_args-1)), opname))
|
||||
if self.version >= 3.3:
|
||||
# Normally we remove EXTENDED_ARG from the opcodes, but in the case of
|
||||
# annotated functions can use the EXTENDED_ARG tuple to signal we have an annotated function.
|
||||
# Yes this is a little hacky
|
||||
rule = ('mkfunc_annotate ::= %s%sannotate_tuple LOAD_CONST LOAD_CONST EXTENDED_ARG %s' %
|
||||
(('pos_arg ' * (args_pos)),
|
||||
('call_function ' * (annotate_args-1)), opname))
|
||||
@@ -688,6 +808,7 @@ class Python3Parser(PythonParser):
|
||||
(('pos_arg ' * (args_pos)),
|
||||
('annotate_arg ' * (annotate_args-1)), opname))
|
||||
else:
|
||||
# See above comment about use of EXTENDED_ARG
|
||||
rule = ('mkfunc_annotate ::= %s%sannotate_tuple LOAD_CONST EXTENDED_ARG %s' %
|
||||
(('pos_arg ' * (args_pos)),
|
||||
('annotate_arg ' * (annotate_args-1)), opname))
|
||||
@@ -769,7 +890,8 @@ class Python3Parser(PythonParser):
|
||||
elif lhs == 'annotate_tuple':
|
||||
return not isinstance(tokens[first].attr, tuple)
|
||||
elif lhs == 'kwarg':
|
||||
return not isinstance(tokens[first].attr, str)
|
||||
return not (isinstance(tokens[first].attr, unicode) or
|
||||
isinstance(tokens[first].attr, str))
|
||||
elif lhs == 'while1elsestmt':
|
||||
# if SETUP_LOOP target spans the else part, then this is
|
||||
# not while1else. Also do for whileTrue?
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user