Compare commits

..

137 Commits

Author SHA1 Message Date
rocky
e56088b566 Need parens in unpack in 2.4ish 2019-07-03 20:04:45 -04:00
rocky
40d2ef3071 Merge branch 'master' into python-2.4 2019-07-03 19:38:47 -04:00
rocky
e39a902e56 Get ready for release 3.3.5 2019-07-03 19:37:29 -04:00
rocky
e2914ed552 More excpet_cond futzing 2019-07-03 19:26:36 -04:00
rocky
5afa14a945 More except_cond futzing 2019-07-03 19:23:23 -04:00
rocky
4f5ad533c3 Reinstate except_cond{2,3} rules 2019-07-03 19:21:39 -04:00
rocky
f425db33b7 except_cond3 needs to be in 2.x 2019-07-03 19:16:09 -04:00
rocky
7f7487206a Reinstate except_cond{2,3} rules 2019-07-03 18:59:24 -04:00
rocky
82d8e0cd47 master merge hell 2019-07-03 18:36:53 -04:00
rocky
1c21e1c9d2 Merge branch 'master' into python-2.4 2019-07-03 18:35:11 -04:00
rocky
68c5b2338f Clearer 3.3 "yeild_from" semantic handling 2019-07-01 12:54:38 -04:00
rocky
e55a0410c9 weak-verify -> syntax-verify. More bytecode tests 2019-07-01 10:23:43 -04:00
R. Bernstein
0fe8961418 Merge pull request #269 from rocky/if-elif-else-more
If elif else more
2019-07-01 09:52:11 -04:00
R. Bernstein
8cd331a32b Merge pull request #273 from rocky/py3-annotation-args
Fix handling py3 annotation args + defparam comma issue
2019-06-30 18:54:58 -04:00
rocky
4c76931807 Update tests related to branch 2019-06-30 18:20:40 -04:00
x0ret
7b7f794913 Fix handling py3 annotation args + defparam comma issue 2019-07-01 01:28:32 +04:30
rocky
50e46531ce Adjust 3.x grammar rules to include annotate args 2019-06-29 23:33:21 -04:00
rocky
cd2072b8e3 Merge branch 'master' into python-2.4 2019-06-29 15:57:22 -04:00
R. Bernstein
67ef34977f Merge pull request #270 from rocky/py3-star-args
Fix issue in 3.x star args function signatures
2019-06-29 15:57:02 -04:00
rocky
32c7b8f23d Add tests for x0ret's recent varrg fixes 2019-06-29 15:50:47 -04:00
x0ret
2f06d1eeb0 Fix issue in 3.x star args function signatures 2019-06-29 21:53:02 +04:30
rocky
999f1fb0f9 Mostly x0ret's while(1)/if fixes ..
plus a potential test
2019-06-29 07:01:45 -04:00
x0ret
76eef9a149 Handle if elif else case for 3.5 2019-06-29 06:57:23 -04:00
rocky
c8b945fb56 Handling if elif else more 2019-06-29 06:57:23 -04:00
rocky
a1e7c16dbe Fix bugs introduced by last commit 2019-06-29 06:19:02 -04:00
rocky
35f14e4357 Small assert message change 2019-06-29 05:08:23 -04:00
rocky
49d1a50354 Merge branch 'master' of github.com:rocky/python-uncompyle6 2019-06-29 04:59:46 -04:00
rocky
0dc19a8fdd Correct 3.4 "yield from" semantic action bug 2019-06-29 04:59:03 -04:00
R. Bernstein
f6aa8b2baf Merge pull request #264 from rocky/ifelif-27
if/elif for 2.5-2.7
2019-06-24 05:50:53 -04:00
rocky
887a006849 if/elif for 2.5-2.7
Specifically simple_source/03_if_elif.py
2019-06-23 21:29:15 -04:00
rocky
e26c7407a0 Small changes to document some of the complexity. 2019-06-23 20:00:00 -04:00
rocky
18bb1bc9e3 Fix Python 2.xisms 2019-06-23 18:15:53 -04:00
rocky
c0e8ce22af Merge branch 'master' into python-2.4 2019-06-23 17:51:21 -04:00
R. Bernstein
69823af553 Merge pull request #262 from rocky/ifelif
reinstate some elif's
2019-06-23 17:27:57 -04:00
x0ret
e96498eaf0 Adjust ifelsestmtr grammer 2019-06-24 01:28:33 +04:30
rocky
9d6d6a355d Start to reinstate elif's 2019-06-21 07:13:05 -04:00
R. Bernstein
04c53c1086 Merge pull request #261 from rocky/load-code
LOAD_CONST -> LOAD_CODE where appropriate
2019-06-21 06:34:43 -04:00
rocky
96866f94a7 Adjust grammar checker to ignore LOAD_CODE 2019-06-19 15:54:16 -04:00
rocky
d371839c99 A few more LOAD_CONST->LOAD_CODE 2019-06-19 15:38:58 -04:00
rocky
24afe072b7 LOAD_CONST -> LOAD_CODE where appropriate 2019-06-19 14:43:07 -04:00
rocky
e2d7f01298 Handle 2-arg asserts in 3.6+ish
Changed files have also been reformatted via the blacken formatter
2019-06-18 22:09:16 -04:00
rocky
72a95e7cce Add back in validate. 2019-06-17 02:00:55 -04:00
rocky
b39112b601 One more deparse_code removal 2019-06-16 22:30:56 -04:00
rocky
3983aa1b92 One more deparse_code removal 2019-06-16 22:30:05 -04:00
rocky
8d85e78960 Merge branch 'master' into python-2.4 2019-06-16 22:00:33 -04:00
rocky
20b513fc81 Merge branch 'master' of github.com:rocky/python-uncompyle6 2019-06-16 21:58:23 -04:00
rocky
d369017122 remove deprecated deparse_code 2019-06-16 21:57:56 -04:00
rocky
d3eca29934 Merge branch 'master' into python-2.4 2019-06-15 10:09:44 -04:00
rocky
6675ea2cd0 Control flow yet again 2019-06-15 10:09:13 -04:00
rocky
4b82806d6c Flow control bites again.
See related appveyor https://ci.appveyor.com/project/rocky/python-decompile3/builds/25301153/job/x0we0dpgb3apgk1v
2019-06-15 07:18:30 -04:00
rocky
f3b72884c6 Merge hell? 2019-06-12 13:09:22 -04:00
rocky
504164fcea Merge branch 'master' into python-2.4 2019-06-12 13:08:30 -04:00
rocky
3c06b82931 Get ready for release 3.3.4 2019-06-12 12:01:31 -04:00
R. Bernstein
c680416f92 Merge pull request #255 from rocky/3.6-store_annotation
Add 3.6 STORE_ANNOTATION
2019-06-12 10:56:27 -04:00
rocky
58c8fe5a66 Oops - forgot to add the test source 2019-06-11 16:09:04 -04:00
rocky
aea1adeb85 Reinstate test 2019-06-11 16:04:29 -04:00
x0ret
c871a4ecc5 Fix subscript in store_annotation + indentation 2019-06-12 00:26:34 +04:30
rocky
cd9eca7bff Formatting change slighty 2019-06-11 14:14:45 -04:00
rocky
002720988c Formatting in < 3.0 is different for name ops 2019-06-11 14:08:50 -04:00
rocky
08f23567a6 Nicer assembly display...
Fewer extraneous quotes and remove pattrs that don't mean anything.
Base more on OP poperties like varargs and NAME_OPS
2019-06-11 12:44:29 -04:00
rocky
43348d7d24 CI testing take 3
This time, for sure!
2019-06-11 11:19:34 -04:00
rocky
164e9d4b5c CI testing take 2 2019-06-11 11:16:45 -04:00
rocky
37e4754268 Fix Improper semantic action format 2019-06-11 11:10:53 -04:00
rocky
c3257a9b79 CI testing - remove Python 2.6 testing and add 3.7 2019-06-11 11:05:50 -04:00
rocky
70b0704967 CI - remove 2.6 testing, add 3.7 testing 2019-06-11 11:03:43 -04:00
rocky
76dcaf9bf0 Tweaks to x0ret's anotation type handling
- match AST names a little better: AnnAssign -> ann_assign...
- localize Annotation type grammar change only when we have it
- Add reduce rule to combine assignment and annotate declaration
- Add annotation-type test from Python 3.6
- Docuemnt what's up with annotation types
2019-06-11 11:02:25 -04:00
x0ret
21fd506fbb Add 3.6 STORE_ANNOTATION 2019-06-11 10:36:55 -04:00
rocky
efe0914814 See above. 2019-06-11 10:35:53 -04:00
rocky
5981c7eae9 Fix LOAD_STR messing up docstring comparision 2019-06-11 10:33:49 -04:00
R. Bernstein
36ef1607af Merge pull request #259 from rocky/annotation-types-final
Fix py3 function signatures + annotations + ordering
2019-06-09 18:34:10 -04:00
rocky
b2d97f9847 Possble use of ','.join to remove "ends_in_comma"? 2019-06-09 18:29:46 -04:00
rocky
24ba5d7f40 One more LOAD_CONST->LOAD_STR remnant and...
We're good to go!

All function signatures seem to be working! YAY!

Credit goes to x0ret
2019-06-09 18:20:05 -04:00
x0ret
eae3f0d77b Fix issue in commas in function signatures 2019-06-10 02:25:19 +04:30
x0ret
a54fba7993 Fix issue in commas in function signatures 2019-06-10 01:42:16 +04:30
rocky
719d2d7232 Correct order of pos vs kwargs in 3.0-3.2 2019-06-09 16:26:08 -04:00
x0ret
e82cabc278 Fix 2 issues in commas in function signatures 2019-06-10 00:29:34 +04:30
rocky
9ab086b207 Add more x0ret tests 2019-06-09 15:19:01 -04:00
x0ret
4022e80d6d Fix py3 function signatures + annotations + ordering 2019-06-09 23:46:33 +04:30
rocky
9811c5bc42 Nicer assembly output 2019-06-09 12:21:45 -04:00
rocky
354796fffd One more LOAD_CONST->LOAD_STR artifact 2019-06-09 11:10:14 -04:00
R. Bernstein
ab696b316a Merge pull request #257 from rocky/annotation-types-3.6
Annotation types 3.6
2019-06-09 10:48:41 -04:00
x0ret
2f99da8199 Fix leading * arg in function signature in 3.6 2019-06-09 19:06:57 +04:30
rocky
fd5f4fa5b8 Nicer LOAD_STR assembly output 2019-06-09 09:53:21 -04:00
R. Bernstein
8e4168674d Merge pull request #252 from rocky/string-const
[WIP] LOAD_CONST->LOAD_STR for Python 3.x
2019-06-09 03:18:07 -04:00
rocky
c8fc6a704c LOAD_CONST->LOAD_STR bugs and 3.4 kwargsonly 2019-06-09 02:18:21 -04:00
rocky
622d6f849c Merge branch 'master' into string-const 2019-06-09 01:20:53 -04:00
rocky
aa21fe0b31 Give up on 3.8 in this branch 2019-06-08 18:57:43 -04:00
rocky
2995acb8d9 Merge branch 'master' into python-2.4 2019-06-08 18:57:08 -04:00
R. Bernstein
10d8aed4c0 Merge pull request #253 from rocky/annotation-types-3.5
Revise annotation type implementation for < 3.6
2019-06-08 18:43:04 -04:00
rocky
86fd5dbf7a 3.3-3.4 pos kwargs ordering 2019-06-08 18:40:50 -04:00
R. Bernstein
9fe1752359 Merge pull request #254 from rocky/origin/annotation-types-3.5
Add kwonly parsing.
2019-06-08 17:59:51 -04:00
x0ret
48ae7a6964 Fix kwonly args annotation handling 2019-06-09 01:38:42 +04:30
rocky
117b4ff4f1 Add kwonly parsing.
* annotation parsing for kwonly args is missing.
* Start filling out runnable tests. More work is needed on tests.
* refresh incorrect bytecode_3.3_run/15_assert.pyc
2019-06-08 15:29:18 -04:00
x0ret
e9002038f8 Revise annotation type implementation for < 3.6 2019-06-08 20:42:43 +04:30
rocky
9d47b99932 Another LOAD_STR/CONST isolation in < 3.0 2019-06-08 11:40:48 -04:00
rocky
59b012df6f localize LOAD_STR change to Python 3 2019-06-08 11:01:58 -04:00
rocky
44d7cbcf6f LOAD_CONST->LOAD_STR for Python 3.x 2019-06-08 02:28:27 -04:00
rocky
9bae73679f Reinstate 3.6. docstring test 2019-06-07 12:32:21 -04:00
rocky
ceebe9ab60 Add x0ret's annotation test on 3.6 2019-06-07 04:56:03 -04:00
R. Bernstein
b7e22b4530 Merge pull request #251 from rocky/annotation-types
x0ret's code in decompile3 for annotation types
2019-06-06 11:26:48 -04:00
x0ret
c7b20edba0 add annotations type test cases 2019-06-06 19:14:03 +04:30
rocky
64e35b09db Small simplification 2019-06-06 09:10:44 -04:00
rocky
3436a3a256 Merge branch 'master' into python-2.4 2019-06-06 08:45:18 -04:00
rocky
a0d4daf5ff Small typo 2019-06-06 08:44:19 -04:00
rocky
afa6a00db8 x0ret's code in decompile3 for annotation types 2019-06-06 06:39:02 -04:00
rocky
d634c5c17a Merge branch 'master' into python-2.4 2019-06-06 02:53:55 -04:00
rocky
d8f0d31475 better name for call generator rule 2019-06-06 02:53:04 -04:00
R. Bernstein
dd76a6f253 Merge pull request #250 from rocky/extra-parenthesis-genexpr-dryer
Extra parenthesis genexpr dryer
2019-06-06 02:02:25 -04:00
rocky
cb40caa73c DRY x0ret's code a little bit. 2019-06-05 20:35:06 -04:00
x0ret
fd59879510 feature #247: handle extra parenthesis in generators 2019-06-05 20:18:05 -04:00
R. Bernstein
c9cae2d09e Merge pull request #246 from rocky/async-await-generator
Bug in 3.5+ generator detection...
2019-06-05 20:16:24 -04:00
rocky
af209dc142 Bug in 3.5+ generator detection...
Also bug in 3.5 code detection for async attribute
2019-06-05 19:08:21 -04:00
rocky
f9fd63d5f5 Merge branch 'master' into python-2.4 2019-06-05 11:38:37 -04:00
R. Bernstein
ad419e0ed9 Merge pull request #243 from rocky/docstrings-again
Some docstring bugs fixed, some remain...
2019-05-31 08:37:41 -04:00
R. Bernstein
ee5c7da790 Merge pull request #244 from x0ret/docstrings-again
Fix unicode docstring again
2019-05-28 15:57:01 -04:00
x0ret
39c12704a8 fix unicode docstring again, handling unicode string in py2, fix docstring indentation 2019-05-28 15:11:44 +04:30
rocky
3b3fc09b60 Reinstate more docstring tests
But 3.{6,7} are stil broken
2019-05-27 20:59:29 -04:00
rocky
f7697ccd7b Some docstring bugs fixed, some remain...
I had broken escaping the tail quote by inadvertently switching from """
by default to '''.

Some additional tests have been added to 00_docstring.py for
this. However...

Unicode decoding is still broken. For now I've added  errors="ignore" to
.decode("utf-8", ...) until a better fix is found. Sigh.
2019-05-27 18:01:08 -04:00
R. Bernstein
e364499bb9 Merge pull request #242 from x0ret/master
Towards supporting unicode
2019-05-27 12:10:09 -04:00
x0ret
9db59f1b80 add support for generated source encoding 2019-05-27 17:19:10 +04:30
x0ret
a5cdb50154 towards supporting unicode: docstring 2019-05-27 17:00:08 +04:30
rocky
792ef5b5b8 Simplfy - TODO fix unicode in docstrings 2019-05-24 11:03:44 -04:00
rocky
123be56e5d Simplify docstrings...
main.py: 2.7ism creaped in.
2019-05-24 10:45:29 -04:00
rocky
7f46d8bb2a Merge branch 'master' into python-2.4 2019-05-24 10:37:51 -04:00
rocky
47ed0795b2 3.x docsting escaping works differently? 2019-05-24 09:53:56 -04:00
rocky
cccf33573b A runnable docstring test...
TODO: fix up the code! so this doesn't throw an assert error!
2019-05-24 02:29:23 -04:00
rocky
3c3e5c82fc Another small tweak 2019-05-21 17:04:09 -04:00
rocky
436260dc9a Small tweak 2019-05-21 17:02:24 -04:00
rocky
8f0674706b Grammar simplification 2019-05-21 16:10:12 -04:00
rocky
01cc184716 dict grammar rule cleanup 2019-05-21 15:09:40 -04:00
rocky
2771cb46ab short option -T for --tree+ 2019-05-21 11:38:43 -04:00
rocky
9ed4326f7e Administrivia 2019-05-21 08:29:03 -04:00
rocky
e3b10b62d7 Remove debug stmt 2019-05-21 07:19:08 -04:00
rocky
59b8f18486 Fix 3.7 list comprehension bug 2019-05-21 07:01:27 -04:00
rocky
bcf6939312 Merge branch 'master' of github.com:rocky/python-uncompyle6 2019-05-20 13:06:33 -04:00
rocky
3b7f49c01d Status area update and ...
Handle bytecode mismatch errors
2019-05-20 13:05:41 -04:00
R. Bernstein
ae976e991a Update README.rst 2019-05-20 09:14:46 -04:00
79 changed files with 2235 additions and 1202 deletions

47
NEWS.md
View File

@@ -1,21 +1,50 @@
3.3.5 2019-07-03 Pre Independence Day
=====================================
Again, most of the work in this is release is thanks to x0ret.
- Handle annotation args in Python 3.x
- Fix vararg and function signatures in 3.x
- Some 3.x < 3.6 while(1)/if fixes - others remain
- Start reinstating else if -> elif
- LOAD_CONST -> LOAD_CODE where appropriate
- option `weak-verify` is now `syntax-verify`
- code cleanups, start using "blacken" to reformat text
3.3.4 2019-06-19 Fleetwood at 65
================================
Most of the work in this is release is thanks to x0ret.
- Major work was done by x0ret to correct function signatures and include annotation types
- Handle Python 3.6 STORE_ANNOTATION [#58](https://github.com/rocky/python-uncompyle6/issues/58)
- Friendlier assembly output
- `LOAD_CONST` replaced by `LOAD_STR` where appropriate to simplify parsing and improve clarity
- remove unneeded parenthesis in a generator expression when it is the single argument to the function [#247](https://github.com/rocky/python-uncompyle6/issues/246)
- Bug in noting an async function [#246](https://github.com/rocky/python-uncompyle6/issues/246)
- Handle unicode docstrings and fix docstring bugs [#241](https://github.com/rocky/python-uncompyle6/issues/241)
- Add short option -T as an alternate for --tree+
- Some grammar cleanup
3.3.3 2019-05-19 Henry and Lewis
================================
As before, decomplation bugs fixed. The focus has primarily been on
Python 3.7. But with this release, releases will be put on hold,as a
better control-flow detection is worked on . Tis has been needed for a
better control-flow detection is worked on . This has been needed for a
while, and is long overdue. It will probably also take a while to get
done as good as what we have now.
However this work will be done in a new project
[decompyle3](https://github.com/rocky/python-decompile3). In contrast
to _uncompyle6_ the code wil be written assuming a modern Python 3,
to _uncompyle6_ the code will be written assuming a modern Python 3,
e.g. 3.7. It is originally intended to decompile Python version 3.7
and greater.
* A number of Python 3.7+ chained comparisons were fixed
* Revise Python 3.6ish format string handling
* Go over operator precedence, e.g. for AST IfExp
* Go over operator precedence, e.g. for AST `IfExp`
Reported Bug Fixes
------------------
@@ -47,7 +76,7 @@ Lots of decomplation bugs, especially in the 3.x series fixed. Don't worry thoug
* Add annotation return values in 3.6+
* Fix 3.6+ lambda parameter handling decompilation
* Fix 3.7+ chained comparision decompilation
* Fix 3.7+ chained comparison decompilation
* split out semantic-action customization into more separate files
* Add 3.8 try/else
* Fix 2.7 generator decompilation
@@ -79,14 +108,14 @@ Bug Fixes
Pull Requests
----------------
* [#202: Better "assert" statement detemination in Python 2.7](https://github.com/rocky/python-uncompyle6/pull/211)
* [#202: Better "assert" statement determination in Python 2.7](https://github.com/rocky/python-uncompyle6/pull/211)
* [#204: Python 3.7 testing](https://github.com/rocky/python-uncompyle6/pull/204)
* [#205: Run more f-string tests on Python 3.7](https://github.com/rocky/python-uncompyle6/pull/205)
* [#211: support utf-8 chars in Python 3 sourcecode](https://github.com/rocky/python-uncompyle6/pull/202)
3.2.5 2018-12-30 Clearout sale
3.2.5 2018-12-30 Clear-out sale
======================================
- 3.7.2 Remove deprecation warning on regexp string that isn't raw
@@ -151,14 +180,14 @@ Jesus on Friday's New York Times puzzle: "I'm stuck on 2A"
- reduce 3.5, 3.6 control-flow bugs
- reduce ambiguity in rules that lead to long (exponential?) parses
- limit/isolate some 2.6/2.7,3.x grammar rules
- more runtime testing of decompiled code
- more removal of parenthesis around calls via setting precidence
- more run-time testing of decompiled code
- more removal of parenthesis around calls via setting precedence
3.1.0 2018-03-21 Equinox
==============================
- Add code_deparse_with_offset() fragment function.
- Correct paramenter call fragment deparse_code()
- Correct parameter call fragment deparse_code()
- Lots of 3.6, 3.x, and 2.7 bug fixes
About 5% of 3.6 fail parsing now. But
semantics still needs much to be desired.

View File

@@ -93,8 +93,8 @@ This uses setup.py, so it follows the standard Python routine:
A GNU makefile is also provided so :code:`make install` (possibly as root or
sudo) will do the steps above.
Testing
-------
Running Tests
-------------
::
@@ -122,16 +122,32 @@ For usage help:
$ uncompyle6 -h
If you want strong verification of the correctness of the
decompilation process, add the `--verify` option. But there are
situations where this will indicate a failure, although the generated
program is semantically equivalent. Using option `--weak-verify` will
tell you if there is something definitely wrong. Generally, large
swaths of code are decompiled correctly, if not the entire program.
Verification
------------
You can also cross compare the results with pycdc_ . Since they work
differently, bugs here often aren't in that, and vice versa.
In older versions of Python it was possible to verify bytecode by
decompiling bytecode, and then compiling using the Python interpreter
for that bytecode version. Having done this the bytecode produced
could be compared with the original bytecode. However as Python's code
generation got better, this no longer was feasible.
If you want Python syntax verification of the correctness of the
decompilation process, add the `--syntax-verify` option. However since
Python syntax changes, you should use this option if the bytecode is
the right bytecode for the Python interpreter that will be checking
the syntax.
You can also cross compare the results with another python decompiler
like pycdc_ . Since they work differently, bugs here often aren't in
that, and vice versa.
There is an interesting class of these programs that is readily
available give stronger verification: those programs that when run
test themselves. Our test suite includes these.
And Python comes with another a set of programs like this: its test
suite for the standard library. We have some code in `test/stdlib` to
facilitate this kind of checking too.
Known Bugs/Restrictions
-----------------------
@@ -146,27 +162,6 @@ All of the Python decompilers that I have looked at have problems
decompiling Python's control flow. In some cases we can detect an
erroneous decompilation and report that.
In older versions of Python it was possible to verify bytecode by
decompiling bytecode, and then compiling using the Python interpreter
for that bytecode version. Having done this the bytecode produced
could be compared with the original bytecode. However as Python's code
generation got better, this is no longer feasible.
The verification that we use that doesn't check bytecode for
equivalence but does check to see if the resulting decompiled source
is a valid Python program by running the Python interpreter. Because
the Python language has changed so much, for best results you should
use the same Python version in checking as was used in creating the
bytecode.
There are however an interesting class of these programs that is
readily available give stronger verification: those programs that
when run check some computation, or even better themselves.
And already Python has a set of programs like this: the test suite
for the standard library that comes with Python. We have some
code in `test/stdlib` to facilitate this kind of checking.
Python support is strongest in Python 2 for 2.7 and drops off as you
get further away from that. Support is also probably pretty good for
python 2.3-2.4 since a lot of the goodness of early the version of the
@@ -194,7 +189,7 @@ Between Python 3.5, 3.6 and 3.7 there have been major changes to the
Currently not all Python magic numbers are supported. Specifically in
some versions of Python, notably Python 3.6, the magic number has
changes several times within a version.
changes several times within a version.
**We support only released versions, not candidate versions.** Note however
that the magic of a released version is usually the same as the *last* candidate version prior to release.
@@ -222,7 +217,7 @@ See Also
* https://github.com/zrax/pycdc : purports to support all versions of Python. It is written in C++ and is most accurate for Python versions around 2.7 and 3.3 when the code was more actively developed. Accuracy for more recent versions of Python 3 and early versions of Python are especially lacking. See its `issue tracker <https://github.com/zrax/pycdc/issues>`_ for details. Currently lightly maintained.
* https://code.google.com/archive/p/unpyc3/ : supports Python 3.2 only. The above projects use a different decompiling technique than what is used here. Currently unmaintained.
* https://github.com/figment/unpyc3/ : fork of above, but supports Python 3.3 only. Includes some fixes like supporting function annotations. Currently unmaintained.
* https://github.com/wibiti/uncompyle2 : supports Python 2.7 only, but does that fairly well. There are situtations where `uncompyle6` results are incorrect while `uncompyle2` results are not, but more often uncompyle6 is correct when uncompyle2 is not. Because `uncompyle6` adheres to accuracy over idiomatic Python, `uncompyle2` can produce more natural-looking code when it is correct. Currently `uncompyle2` is lightly maintained. See its issue `tracker <https://github.com/wibiti/uncompyle2/issues>`_ for more details
* https://github.com/wibiti/uncompyle2 : supports Python 2.7 only, but does that fairly well. There are situations where `uncompyle6` results are incorrect while `uncompyle2` results are not, but more often uncompyle6 is correct when uncompyle2 is not. Because `uncompyle6` adheres to accuracy over idiomatic Python, `uncompyle2` can produce more natural-looking code when it is correct. Currently `uncompyle2` is lightly maintained. See its issue `tracker <https://github.com/wibiti/uncompyle2/issues>`_ for more details
* `How to report a bug <https://github.com/rocky/python-uncompyle6/blob/master/HOW-TO-REPORT-A-BUG.md>`_
* The HISTORY_ file.
* https://github.com/rocky/python-xdis : Cross Python version disassembler

View File

@@ -58,7 +58,7 @@ entry_points = {
]}
ftp_url = None
install_requires = ['spark-parser >= 1.8.7, < 1.9.0',
'xdis >= 4.0.1, < 4.1.0']
'xdis >= 4.0.2, < 4.1.0']
license = 'GPL3'
mailing_list = 'python-debugger@googlegroups.com'

View File

@@ -61,7 +61,7 @@ build_script:
test_script:
# Run the project tests
- "%CMD_IN_ENV% python test/test_pyenvlib.py --native --weak-verify"
- "%CMD_IN_ENV% python test/test_pyenvlib.py --native --syntax-verify"
after_test:
# If tests are successful, create binary packages for the project.

View File

@@ -1,78 +0,0 @@
import sys
from uncompyle6 import PYTHON3
if PYTHON3:
from io import StringIO
minint = -sys.maxsize-1
maxint = sys.maxsize
else:
from StringIO import StringIO
minint = -sys.maxint-1
maxint = sys.maxint
from uncompyle6.semantics.helper import print_docstring
class PrintFake:
def __init__(self):
self.pending_newlines = 0
self.f = StringIO()
def write(self, *data):
if (len(data) == 0) or (len(data) == 1 and data[0] == ''):
return
out = ''.join((str(j) for j in data))
n = 0
for i in out:
if i == '\n':
n += 1
if n == len(out):
self.pending_newlines = max(self.pending_newlines, n)
return
elif n:
self.pending_newlines = max(self.pending_newlines, n)
out = out[n:]
break
else:
break
if self.pending_newlines > 0:
self.f.write('\n'*self.pending_newlines)
self.pending_newlines = 0
for i in out[::-1]:
if i == '\n':
self.pending_newlines += 1
else:
break
if self.pending_newlines:
out = out[:-self.pending_newlines]
self.f.write(out)
def println(self, *data):
if data and not(len(data) == 1 and data[0] == ''):
self.write(*data)
self.pending_newlines = max(self.pending_newlines, 1)
return
pass
def test_docstring():
for doc, expect in (
("Now is the time",
' """Now is the time"""'),
("""
Now is the time
""",
''' """
Now is the time
"""''')
# (r'''func placeholder - ' and with ("""\nstring\n """)''',
# """ r'''func placeholder - ' and with (\"\"\"\nstring\n\"\"\")'''"""),
# (r"""func placeholder - ' and with ('''\nstring\n''') and \"\"\"\nstring\n\"\"\" """,
# """ r\"\"\"func placeholder - ' and with ('''\nstring\n''') and \"\"\"\nstring\n\"\"\" \"\"\"""")
):
o = PrintFake()
# print(doc)
# print(expect)
print_docstring(o, ' ', doc)
assert expect == o.f.getvalue()

View File

@@ -9,6 +9,7 @@ def test_grammar():
remain_tokens = set(tokens) - opcode_set
remain_tokens = set([re.sub(r'_\d+$','', t) for t in remain_tokens])
remain_tokens = set([re.sub('_CONT$','', t) for t in remain_tokens])
remain_tokens = set([re.sub('LOAD_CODE$','', t) for t in remain_tokens])
remain_tokens = set(remain_tokens) - opcode_set
assert remain_tokens == set([]), \
"Remaining tokens %s\n====\n%s" % (remain_tokens, p.dump_grammar())
@@ -88,7 +89,7 @@ def test_grammar():
COME_FROM_EXCEPT_CLAUSE
COME_FROM_LOOP COME_FROM_WITH
COME_FROM_FINALLY ELSE
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP LOAD_STR LOAD_CODE
LAMBDA_MARKER
RETURN_END_IF RETURN_END_IF_LAMBDA RETURN_VALUE_LAMBDA RETURN_LAST
""".split())

View File

@@ -1,3 +1,4 @@
from uncompyle6 import PYTHON_VERSION
from uncompyle6.scanners.tok import Token
def test_token():
@@ -16,7 +17,7 @@ def test_token():
# Make sure formatting of: LOAD_CONST False. We assume False is the 0th index
# of co_consts.
t = Token('LOAD_CONST', offset=1, attr=False, pattr=False, has_arg=True)
expect = ' 1 LOAD_CONST 0 False'
expect = ' 1 LOAD_CONST False'
assert t.format() == expect
if __name__ == '__main__':

View File

@@ -8,5 +8,5 @@
9 STORE_NAME 2 'b'
12 JUMP_FORWARD 0 'to 15'
15_0 COME_FROM 12 '12'
15 LOAD_CONST 0 None
15 LOAD_CONST None
18 RETURN_VALUE

View File

@@ -4,12 +4,12 @@
3 0 LOAD_NAME 0 'True'
3 POP_JUMP_IF_FALSE 15 'to 15'
4 6 LOAD_CONST 0 1
4 6 LOAD_CONST 1
9 STORE_NAME 1 'b'
12 JUMP_FORWARD 6 'to 21'
6 15 LOAD_CONST 1 2
6 15 LOAD_CONST 2
18 STORE_NAME 2 'd'
21_0 COME_FROM 12 '12'
21 LOAD_CONST 2 None
21 LOAD_CONST None
24 RETURN_VALUE

View File

@@ -1,25 +1,28 @@
# future
from __future__ import print_function
# std
import os
import difflib
import subprocess
import tempfile
from StringIO import StringIO
import functools
# uncompyle6 / xdis
from uncompyle6 import PYTHON_VERSION, IS_PYPY, deparse_code
from uncompyle6 import PYTHON_VERSION, PYTHON3, IS_PYPY, code_deparse
# TODO : I think we can get xdis to support the dis api (python 3 version) by doing something like this there
from xdis.bytecode import Bytecode
from xdis.main import get_opcode
opc = get_opcode(PYTHON_VERSION, IS_PYPY)
from StringIO import StringIO
Bytecode = functools.partial(Bytecode, opc=opc)
import six
if PYTHON3:
from io import StringIO
else:
from StringIO import StringIO
def _dis_to_text(co):
return Bytecode(co).dis()
try:
import functools
Bytecode = functools.partial(Bytecode, opc=opc)
def _dis_to_text(co):
return Bytecode(co).dis()
except:
pass
def print_diff(original, uncompyled):
"""
@@ -42,11 +45,8 @@ def print_diff(original, uncompyled):
print('\nTo display diff highlighting run:\n pip install BeautifulSoup4')
diff = difflib.HtmlDiff().make_table(*args)
f = tempfile.NamedTemporaryFile(delete=False)
try:
with tempfile.NamedTemporaryFile(delete=False) as f:
f.write(str(diff).encode('utf-8'))
finally:
f.close()
try:
print()
@@ -63,7 +63,8 @@ def print_diff(original, uncompyled):
print('\nFor side by side diff install elinks')
diff = difflib.Differ().compare(original_lines, uncompyled_lines)
print('\n'.join(diff))
os.unlink(f.name)
finally:
os.unlink(f.name)
def are_instructions_equal(i1, i2):
@@ -124,11 +125,10 @@ def validate_uncompyle(text, mode='exec'):
original_dis = _dis_to_text(original_code)
original_text = text
deparsed = deparse_code(PYTHON_VERSION, original_code,
compile_mode=mode,
out=StringIO(),
is_pypy=IS_PYPY)
deparsed = code_deparse(original_code,
out=six.StringIO(),
version=PYTHON_VERSION,
compile_mode=mode)
uncompyled_text = deparsed.text
uncompyled_code = compile(uncompyled_text, '<string>', 'exec')

View File

@@ -34,47 +34,47 @@ check-2.4 check-2.5 check-2.6 check-2.7: check-bytecode-2 check-bytecode-3 check
#: Run working tests from Python 3.0
check-3.0: check-bytecode
$(PYTHON) test_pythonlib.py --bytecode-3.0-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.0 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --bytecode-3.0 --syntax-verify $(COMPILE)
#: Run working tests from Python 3.1
check-3.1: check-bytecode
$(PYTHON) test_pythonlib.py --bytecode-3.1-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.1 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --bytecode-3.1 --syntax-verify $(COMPILE)
#: Run working tests from Python 3.2
check-3.2: check-bytecode
$(PYTHON) test_pythonlib.py --bytecode-3.2-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.2 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --bytecode-3.2 --syntax-verify $(COMPILE)
#: Run working tests from Python 3.3
check-3.3: check-bytecode
$(PYTHON) test_pythonlib.py --bytecode-3.3-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.3 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --bytecode-3.3 --syntax-verify $(COMPILE)
#: Run working tests from Python 3.4
check-3.4: check-bytecode check-3.4-ok check-2.7-ok
$(PYTHON) test_pythonlib.py --bytecode-3.4-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.4 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --bytecode-3.4 --syntax-verify $(COMPILE)
#: Run working tests from Python 3.5
check-3.5: check-bytecode
$(PYTHON) test_pythonlib.py --bytecode-3.5-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.5 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --bytecode-3.5 --syntax-verify $(COMPILE)
#: Run working tests from Python 3.6
check-3.6: check-bytecode
$(PYTHON) test_pythonlib.py --bytecode-3.6-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.6 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --bytecode-3.6 --syntax-verify $(COMPILE)
#: Run working tests from Python 3.7
check-3.7: check-bytecode
$(PYTHON) test_pythonlib.py --bytecode-3.7-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.7 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --bytecode-3.7 --syntax-verify $(COMPILE)
#: Run working tests from Python 3.8
check-3.8: check-bytecode
$(PYTHON) test_pythonlib.py --bytecode-3.8-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.8 --weak-verify $(COMPILE)
# #: Run working tests from Python 3.8
# check-3.8: check-bytecode
# $(PYTHON) test_pythonlib.py --bytecode-3.8-run --verify-run
# $(PYTHON) test_pythonlib.py --bytecode-3.8 --syntax-verify $(COMPILE)
# FIXME
#: this is called when running under pypy3.5-5.8.0 or pypy2-5.6.0
@@ -98,7 +98,7 @@ check-bytecode-3:
$(PYTHON) test_pythonlib.py --bytecode-3.0 \
--bytecode-3.1 --bytecode-3.2 --bytecode-3.3 \
--bytecode-3.4 --bytecode-3.5 --bytecode-3.6 \
--bytecode-3.7 --bytecode-3.8 \
--bytecode-3.7 \
--bytecode-pypy3.2
#: Check deparsing on selected bytecode 3.x
@@ -177,7 +177,7 @@ grammar-coverage-2.6:
grammar-coverage-2.7:
-rm $(COVER_DIR)/spark-grammar-2.7.cover || true
SPARK_PARSER_COVERAGE=$(COVER_DIR)/spark-grammar-2.7.cover $(PYTHON) test_pythonlib.py --bytecode-2.7
SPARK_PARSER_COVERAGE=$(COVER_DIR)/spark-grammar-2.7.cover $(PYTHON) test_pyenvlib.py --2.7.14 --max=600
SPARK_PARSER_COVERAGE=$(COVER_DIR)/spark-grammar-2.7.cover $(PYTHON) test_pyenvlib.py --2.7.16 --max=600
#: Get grammar coverage for Python 3.0
grammar-coverage-3.0:
@@ -220,66 +220,71 @@ grammar-coverage-3.5:
grammar-coverage-3.6:
rm $(COVER_DIR)/spark-grammar-3.6.cover || /bin/true
SPARK_PARSER_COVERAGE=$(COVER_DIR)/spark-grammar-3.6.cover $(PYTHON) test_pythonlib.py --bytecode-3.6
SPARK_PARSER_COVERAGE=$(COVER_DIR)/spark-grammar-3.6.cover $(PYTHON) test_pyenvlib.py --3.6.4 --max=280
SPARK_PARSER_COVERAGE=$(COVER_DIR)/spark-grammar-3.6.cover $(PYTHON) test_pyenvlib.py --3.6.8 --max=280
#: Get grammar coverage for Python 3.7
grammar-coverage-3.7:
rm $(COVER_DIR)/spark-grammar-3.7.cover || /bin/true
SPARK_PARSER_COVERAGE=$(COVER_DIR)/spark-grammar-3.7.cover $(PYTHON) test_pyenvlib.py --3.7.3 --max=500
#: Check deparsing Python 2.6
check-bytecode-2.6:
$(PYTHON) test_pythonlib.py --bytecode-2.6-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-2.6 --weak-verify
$(PYTHON) test_pythonlib.py --bytecode-2.6 --syntax-verify
#: Check deparsing Python 2.7
check-bytecode-2.7:
$(PYTHON) test_pythonlib.py --bytecode-2.7-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-2.7 --weak-verify
$(PYTHON) test_pythonlib.py --bytecode-2.7 --syntax-verify
#: Check deparsing Python 3.0
check-bytecode-3.0:
$(PYTHON) test_pythonlib.py --bytecode-3.0-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.0 --weak-verify
$(PYTHON) test_pythonlib.py --bytecode-3.0 --syntax-verify
#: Check deparsing Python 3.1
check-bytecode-3.1:
$(PYTHON) test_pythonlib.py --bytecode-3.1-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.1 --weak-verify
$(PYTHON) test_pythonlib.py --bytecode-3.1 --syntax-verify
#: Check deparsing Python 3.2
check-bytecode-3.2:
$(PYTHON) test_pythonlib.py --bytecode-3.2-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.2 --weak-verify
$(PYTHON) test_pythonlib.py --bytecode-3.2 --syntax-verify
#: Check deparsing Python 3.3
check-bytecode-3.3:
$(PYTHON) test_pythonlib.py --bytecode-3.3-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.3 --weak-verify
$(PYTHON) test_pythonlib.py --bytecode-3.3 --syntax-verify
#: Check deparsing Python 3.4
check-bytecode-3.4:
$(PYTHON) test_pythonlib.py --bytecode-3.4-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.4 --weak-verify
$(PYTHON) test_pythonlib.py --bytecode-3.4 --syntax-verify
#: Check deparsing Python 3.5
check-bytecode-3.5:
$(PYTHON) test_pythonlib.py --bytecode-3.5-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.5 --weak-verify
$(PYTHON) test_pythonlib.py --bytecode-3.5 --syntax-verify
#: Check deparsing Python 3.6
check-bytecode-3.6:
$(PYTHON) test_pythonlib.py --bytecode-3.6-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.6 --weak-verify
$(PYTHON) test_pythonlib.py --bytecode-3.6 --syntax-verify
#: Check deparsing Python 3.7
check-bytecode-3.7:
$(PYTHON) test_pythonlib.py --bytecode-3.7-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.7 --weak-verify
$(PYTHON) test_pythonlib.py --bytecode-3.7 --syntax-verify
#: Check deparsing Python 3.8
check-bytecode-3.8:
$(PYTHON) test_pythonlib.py --bytecode-3.8-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.8 --weak-verify
# #: Check deparsing Python 3.8
# check-bytecode-3.8:
# $(PYTHON) test_pythonlib.py --bytecode-3.8-run --verify-run
# $(PYTHON) test_pythonlib.py --bytecode-3.8 --syntax-verify
#: short tests for bytecodes only for this version of Python
check-native-short:
$(PYTHON) test_pythonlib.py --bytecode-$(PYTHON_VERSION) --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --bytecode-$(PYTHON_VERSION) --syntax-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --bytecode-$(PYTHON_VERSION)-run --verify-run $(COMPILE)
#: Run longer Python 2.6's lib files known to be okay
@@ -288,19 +293,19 @@ check-2.4-ok:
#: Run longer Python 2.6's lib files known to be okay
check-2.6-ok:
$(PYTHON) test_pythonlib.py --ok-2.6 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --ok-2.6 --syntax-verify $(COMPILE)
#: Run longer Python 2.7's lib files known to be okay
check-2.7-ok:
$(PYTHON) test_pythonlib.py --ok-2.7 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --ok-2.7 --syntax-verify $(COMPILE)
#: Run longer Python 3.2's lib files known to be okay
check-3.2-ok:
$(PYTHON) test_pythonlib.py --ok-3.2 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --ok-3.2 --syntax-verify $(COMPILE)
#: Run longer Python 3.4's lib files known to be okay
check-3.4-ok:
$(PYTHON) test_pythonlib.py --ok-3.4 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --ok-3.4 --syntax-verify $(COMPILE)
#: PyPy of some sort. E.g. [PyPy 5.0.1 with GCC 4.8.4]
# Skip for now

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -1,7 +1,7 @@
#!/bin/bash
# Remake Python grammar statistics
typeset -A ALL_VERS=([2.4]=2.4.6 [2.5]=2.5.6 [2.6]=2.6.9 [2.7]=2.7.14 [3.2]=3.2.6 [3.3]=3.3.6 [3.4]=3.4.8 [3.5]=3.5.5 [3.6]=3.6.4)
typeset -A ALL_VERS=([2.4]=2.4.6 [2.5]=2.5.6 [2.6]=2.6.9 [2.7]=2.7.16 [3.2]=3.2.6 [3.3]=3.3.6 [3.4]=3.4.8 [3.5]=3.5.6 [3.6]=3.6.8, [3.7]=3.7.3)
if (( $# == 0 )); then
echo 1>&2 "usage: $0 two-digit-version"

View File

@@ -42,7 +42,7 @@ for VERSION in $PYVERSION ; do
echo Python Version $(pyenv local) > $LOGFILE
echo "" >> $LOGFILE
typeset -i ALL_FILES_STARTTIME=$(date +%s)
python ./test_pyenvlib.py --max ${MAX_TESTS} --weak-verify --$VERSION >>$LOGFILE 2>&1
python ./test_pyenvlib.py --max ${MAX_TESTS} --syntax-verify --$VERSION >>$LOGFILE 2>&1
rc=$?
echo Python Version $(pyenv local) >> $LOGFILE

View File

@@ -1,13 +1,61 @@
# Python 3 annotations
# Python 3 positional, kwonly, varargs, and annotations. Ick.
def foo(a, b: 'annotating b', c: int) -> float:
print(a + b + c)
# RUNNABLE!
def test1(args_1, c: int, w=4, *varargs: int, **kwargs: 'annotating kwargs') -> tuple:
return (args_1, c, w, kwargs)
def test2(args_1, args_2, c: int, w=4, *varargs: int, **kwargs: 'annotating kwargs'):
return (args_1, args_2, c, w, varargs, kwargs)
def test3(c: int, w=4, *varargs: int, **kwargs: 'annotating kwargs') -> float:
return 5.4
def test4(a: float, c: int, *varargs: int, **kwargs: 'annotating kwargs') -> float:
return 5.4
def test5(a: float, c: int = 5, *varargs: int, **kwargs: 'annotating kwargs') -> float:
return 5.4
def test6(a: float, c: int, test=None):
return (a, c, test)
def test7(*varargs: int, **kwargs):
return (varargs, kwargs)
def test8(x=55, *varargs: int, **kwargs) -> list:
return (x, varargs, kwargs)
def test9(arg_1=55, *varargs: int, y=5, **kwargs):
return x, varargs, int, y, kwargs
def test10(args_1, b: 'annotating b', c: int) -> float:
return 5.4
def test11(*, name):
return args, name
def test12(a, *args, name):
return a, args
pass
def test13(*args, name):
return args, name
def test14(*args, name: int=1, qname):
return args, name, qname
def test15(*args, name='S', fname, qname=4):
return args, name, fname, qname
# From 3.4 /asyncio/streams.py open_connection
_DEFAULT_LIMIT = 5
def test16(host=None, port=None, *,
loop=None, limit=_DEFAULT_LIMIT, **kwds):
return host, port, loop, limit, kwds
# Python 3.1 _pyio.py uses the -> "IOBase" annotation
def open(file, mode = "r", buffering = None,
encoding = None, errors = None,
newline = None, closefd = True) -> "IOBase":
return text
def o(f, mode = "r", buffering = None) -> "IOBase":
return (f, mode, buffering)
def foo1(x: 'an argument that defaults to 5' = 5):
print(x)
@@ -18,13 +66,87 @@ def div(a: dict(type=float, help='the dividend'),
"""Divide a by b"""
return a / b
class TestSignatureObject(unittest.TestCase):
class TestSignatureObject1():
def test_signature_on_wkwonly(self):
def test(*, a:float, b:str) -> int:
def test(*, a:float, b:str, c:str = 'test', **kwargs: int) -> int:
pass
class SupportsInt(_Protocol):
class TestSignatureObject2():
def test_signature_on_wkwonly(self):
def test(*, c='test', a:float, b:str="S", **kwargs: int) -> int:
pass
class TestSignatureObject3():
def test_signature_on_wkwonly(self):
def test(*, c='test', a:float, kwargs:str="S", **b: int) -> int:
pass
class TestSignatureObject4():
def test_signature_on_wkwonly(self):
def test(x=55, *args, c:str='test', a:float, kwargs:str="S", **b: int) -> int:
pass
class TestSignatureObject5():
def test_signature_on_wkwonly(self):
def test(x=55, *args: int, c='test', a:float, kwargs:str="S", **b: int) -> int:
pass
class TestSignatureObject5():
def test_signature_on_wkwonly(self):
def test(x:int=55, *args: (int, str), c='test', a:float, kwargs:str="S", **b: int) -> int:
pass
class TestSignatureObject7():
def test_signature_on_wkwonly(self):
def test(c='test', kwargs:str="S", **b: int) -> int:
pass
class TestSignatureObject8():
def test_signature_on_wkwonly(self):
def test(**b: int) -> int:
pass
class TestSignatureObject9():
def test_signature_on_wkwonly(self):
def test(a, **b: int) -> int:
pass
class SupportsInt():
@abstractmethod
def __int__(self) -> int:
pass
def ann1(args_1, b: 'annotating b', c: int, *varargs: str) -> float:
assert ann1.__annotations__['b'] == 'annotating b'
assert ann1.__annotations__['c'] == int
assert ann1.__annotations__['varargs'] == str
assert ann1.__annotations__['return'] == float
def ann2(args_1, b: int = 5, **kwargs: float) -> float:
assert ann2.__annotations__['b'] == int
assert ann2.__annotations__['kwargs'] == float
assert ann2.__annotations__['return'] == float
assert b == 5
class TestSignatureObject():
def test_signature_on_wkwonly(self):
def test(x:int=55, *args: (int, str), c='test', a:float, kwargs:str="S", **b: int) -> int:
pass
assert test1(1, 5) == (1, 5, 4, {})
assert test1(1, 5, 6, foo='bar') == (1, 5, 6, {'foo': 'bar'})
assert test2(2, 3, 4) == (2, 3, 4, 4, (), {})
assert test3(10, foo='bar') == 5.4
assert test4(9.5, 7, 6, 4, bar='baz') == 5.4
### FIXME: fill in...
assert test6(1.2, 3) == (1.2, 3, None)
assert test6(2.3, 4, 5) == (2.3, 4, 5)
ann1(1, 'test', 5)
ann2(1)
### FIXME: fill in...
assert test12(1, 2, 3, name='hi') == (1, (2, 3)), "a, *args, name"
assert test13(1, 2, 3, name='hi') == ((1, 2, 3), 'hi'), "*args, name"
assert test16('localhost', loop=2, limit=3, a='b') == ('localhost', None, 2, 3, {'a': 'b'})

View File

@@ -0,0 +1,34 @@
# Testing "while 1" versus "while" handling with if/elif/else's
def while_test(a, b, c):
while a != 2:
if b:
a += 1
elif c:
c = 0
else:
break
return a, b, c
def while1_test(a, b, c):
while 1:
if a != 2:
if b:
a = 3
b = 0
elif c:
c = 0
else:
a += b + c
break
return a, b, c
assert while_test(2, 0, 0) == (2, 0, 0), "no while loops"
assert while_test(0, 1, 0) == (2, 1, 0), "two while loops of b branch"
assert while_test(0, 0, 0) == (0, 0, 0), "0 while loops, else branch"
# FIXME: put this in a timer, and try with a=2
assert while1_test(4, 1, 1) == (3, 0, 0), "three while1 loops"
assert while1_test(4, 0, 0) == (4, 0, 0), " one while1 loop"

View File

@@ -0,0 +1,17 @@
# From 3.7.3 asyncio/base_events.py
# We had (still have) screwy logic. Python 3.5 code node detection was off too.
async def create_connection(self):
infos = await self._ensure_resolved()
laddr_infos = await self._ensure_resolved()
for family in infos:
for laddr in laddr_infos:
family = 1
else:
continue
await self.sock_connect()
else:
raise OSError('Multiple exceptions: {}' for exc in family)
return

View File

@@ -1,5 +1,5 @@
# Adapted from Python 3.6 trace.py
# Bug was in handling BUID_TUPLE_UNPACK created via
# Bug was in handling BUILD_TUPLE_UNPACK created via
# *opts.arguments
import argparse
parser = argparse.ArgumentParser()
@@ -7,4 +7,4 @@ parser.add_argument('filename', nargs='?')
parser.add_argument('arguments', nargs=argparse.REMAINDER)
opts = parser.parse_args(["foo", "a", "b"])
argv = opts.filename, *opts.arguments
assert argv == ('foo', 'a', 'b')
assert argv == ('foo', 'a', 'b'), "Reconstruct tuple using '*' and BUILD_TUPLE_UNPACK"

View File

@@ -0,0 +1,37 @@
# This is from Python 3.6's test directory.
"""
Some correct syntax for variable annotation here.
More examples are in test_grammar and test_parser.
"""
from typing import no_type_check, ClassVar
i: int = 1
j: int
x: float = i/10
def f():
class C: ...
return C()
f().new_attr: object = object()
class C:
def __init__(self, x: int) -> None:
self.x = x
c = C(5)
c.new_attr: int = 10
__annotations__ = {}
@no_type_check
class NTC:
def meth(self, param: complex) -> None:
...
class CV:
var: ClassVar['CV']
CV.var = CV()

View File

@@ -8,4 +8,7 @@ def x(s):
if not k.startswith('_')
}
assert x((('_foo', None),)) == {}
# Yes, the print() is funny. This is
# to test though a 2-arg assert where
# the 2nd argument is not a string.
assert x((('_foo', None),)) == {}, print("See issue #162")

View File

@@ -11,6 +11,9 @@
def _walk_dir(dir, dfile, ddir=None):
yield from _walk_dir(dir, ddir=dfile)
def ybug(g):
yield from g
# From 3.5.1 _wakrefset.py
#
# 3.5:

View File

@@ -1,10 +1,55 @@
# -*- coding: utf-8 -*-
# uncompyle2 bug was not escaping """ properly
r'''func placeholder - with ("""\nstring\n""")'''
def foo():
r'''func placeholder - ' and with ("""\nstring\n""")'''
def bar():
# RUNNABLE!
r'''func placeholder - with ("""\nstring\n""")'''
def dq0():
assert __doc__ == r'''func placeholder - with ("""\nstring\n""")'''
def dq1():
"""assert that dedent() has no effect on 'text'"""
assert dq1.__doc__ == """assert that dedent() has no effect on 'text'"""
def dq2():
'''assert that dedent() has no effect on 'text\''''
assert dq1.__doc__ == '''assert that dedent() has no effect on 'text\''''
def dq3():
"""assert that dedent() has no effect on 'text\""""
assert dq3.__doc__ == """assert that dedent() has no effect on 'text\""""
def dq4():
"""assert that dedent() has no effect on 'text'"""
assert dq4.__doc__ == """assert that dedent() has no effect on 'text'"""
def dq5():
r'''func placeholder - ' and with ("""\nstring\n""")'''
assert dq5.__doc__ == r'''func placeholder - ' and with ("""\nstring\n""")'''
def dq6():
r"""func placeholder - ' and with ('''\nstring\n''') and \"\"\"\nstring\n\"\"\" """
assert dq6.__doc__ == r"""func placeholder - ' and with ('''\nstring\n''') and \"\"\"\nstring\n\"\"\" """
def dq7():
u""" <----- SEE 'u' HERE
>>> mylen(u"áéíóú")
5
"""
assert dq7.__doc__ == u""" <----- SEE 'u' HERE
>>> mylen(u"áéíóú")
5
"""
def dq8():
u""" <----- SEE 'u' HERE
>>> mylen(u"تست")
5
"""
assert dq8.__doc__ == u""" <----- SEE 'u' HERE
>>> mylen(u"تست")
5
"""
def baz():
"""
@@ -20,3 +65,28 @@ def baz():
>>> t.rundict(m1.__dict__, 'rundict_test_pvt') # None are skipped.
TestResults(failed=0, attempted=8)
"""
assert baz.__doc__ == \
"""
... '''>>> assert 1 == 1
... '''
... \"""
>>> exec test_data in m1.__dict__
>>> exec test_data in m2.__dict__
>>> m1.__dict__.update({"f2": m2._f, "g2": m2.g, "h2": m2.H})
Tests that objects outside m1 are excluded:
\"""
>>> t.rundict(m1.__dict__, 'rundict_test_pvt') # None are skipped.
TestResults(failed=0, attempted=8)
"""
dq0()
dq1()
dq2()
dq3()
dq4()
dq5()
dq6()
dq7()
dq8()
baz()

View File

@@ -136,7 +136,7 @@ if __name__ == '__main__':
test_options_keys = list(test_options.keys())
test_options_keys.sort()
opts, args = getopt.getopt(sys.argv[1:], '',
['start-with=', 'verify', 'verify-run', 'weak-verify',
['start-with=', 'verify', 'verify-run', 'syntax-verify',
'max=', 'coverage', 'all', ] \
+ test_options_keys )
vers = ''
@@ -144,7 +144,7 @@ if __name__ == '__main__':
for opt, val in opts:
if opt == '--verify':
do_verify = 'strong'
elif opt == '--weak-verify':
elif opt == '--syntax-verify':
do_verify = 'weak'
elif opt == '--verify-run':
do_verify = 'verify-run'

View File

@@ -193,7 +193,7 @@ if __name__ == '__main__':
test_options_keys.sort()
opts, args = getopt.getopt(sys.argv[1:], '',
['start-with=', 'verify', 'verify-run',
'weak-verify', 'all',
'syntax-verify', 'all',
'compile', 'coverage',
'no-rm'] \
+ test_options_keys )
@@ -210,7 +210,7 @@ if __name__ == '__main__':
for opt, val in opts:
if opt == '--verify':
test_opts['do_verify'] = 'strong'
elif opt == '--weak-verify':
elif opt == '--syntax-verify':
test_opts['do_verify'] = 'weak'
elif opt == '--verify-run':
test_opts['do_verify'] = 'verify-run'

View File

@@ -51,14 +51,8 @@ import uncompyle6.semantics.fragments
# Export some functions
from uncompyle6.main import decompile_file
# For compatibility
uncompyle_file = decompile_file
# Convenience functions so you can say:
# from uncompyle6 import (code_deparse, deparse_code2str)
code_deparse = uncompyle6.semantics.pysource.code_deparse
deparse_code2str = uncompyle6.semantics.pysource.deparse_code2str
# This is deprecated:
deparse_code = uncompyle6.semantics.pysource.deparse_code
code_deparse = uncompyle6.semantics.pysource.code_deparse

View File

@@ -37,9 +37,11 @@ Options:
--fragments use fragments deparser
--verify compare generated source with input byte-code
--verify-run compile generated source, run it and check exit code
--weak-verify compile generated source
--syntax-verify compile generated source
--linemaps generated line number correspondencies between byte-code
and generated source output
--encoding <encoding>
use <encoding> in generated source according to pep-0263
--help show this message
Debugging Options:
@@ -80,14 +82,15 @@ def main_bin():
timestampfmt = "# %Y.%m.%d %H:%M:%S %Z"
try:
opts, pyc_paths = getopt.getopt(sys.argv[1:], 'hac:gtdrVo:p:',
opts, pyc_paths = getopt.getopt(sys.argv[1:], 'hac:gtTdrVo:p:',
'help asm compile= grammar linemaps recurse '
'timestamp tree tree+ '
'fragments verify verify-run version '
'weak-verify '
'showgrammar'.split(' '))
except getopt.GetoptError(e):
sys.stderr.write('%s: %s\n' % (os.path.basename(sys.argv[0]), e))
'syntax-verify '
'showgrammar encoding='.split(' '))
except getopt.GetoptError, e:
sys.stderr.write('%s: %s\n' %
(os.path.basename(sys.argv[0]), e))
sys.exit(-1)
options = {}
@@ -100,7 +103,7 @@ def main_bin():
sys.exit(0)
elif opt == '--verify':
options['do_verify'] = 'strong'
elif opt == '--weak-verify':
elif opt == '--syntax-verify':
options['do_verify'] = 'weak'
elif opt == '--fragments':
options['do_fragments'] = True
@@ -114,7 +117,7 @@ def main_bin():
elif opt in ('--tree', '-t'):
options['showast'] = True
options['do_verify'] = None
elif opt in ('--tree+',):
elif opt in ('--tree+', '-T'):
options['showast'] = 'Full'
options['do_verify'] = None
elif opt in ('--grammar', '-g'):
@@ -129,6 +132,8 @@ def main_bin():
numproc = int(val)
elif opt in ('--recurse', '-r'):
recurse_dirs = True
elif opt == '--encoding':
options['source_encoding'] = val
else:
sys.stderr.write(opt)
usage()

View File

@@ -42,7 +42,7 @@ def _get_outstream(outfile):
def decompile(
bytecode_version, co, out=None, showasm=None, showast=False,
timestamp=None, showgrammar=False, code_objects={},
timestamp=None, showgrammar=False, source_encoding=None, code_objects={},
source_size=None, is_pypy=None, magic_int=None,
mapstream=None, do_fragments=False):
"""
@@ -81,6 +81,8 @@ def decompile(
m = ""
sys_version_lines = sys.version.split('\n')
if source_encoding:
write('# -*- coding: %s -*-' % source_encoding)
write('# uncompyle6 version %s\n'
'# %sPython bytecode %s%s\n# Decompiled from: %sPython %s' %
(VERSION, co_pypy_str, bytecode_version,
@@ -147,7 +149,7 @@ def compile_file(source_path):
def decompile_file(filename, outstream=None, showasm=None, showast=False,
showgrammar=False, mapstream=None, do_fragments=False):
showgrammar=False, source_encoding=None, mapstream=None, do_fragments=False):
"""
decompile Python byte-code file (.pyc). Return objects to
all of the deparsed objects found in `filename`.
@@ -163,12 +165,12 @@ def decompile_file(filename, outstream=None, showasm=None, showast=False,
for con in co:
deparsed.append(
decompile(version, con, outstream, showasm, showast,
timestamp, showgrammar, code_objects=code_objects,
timestamp, showgrammar, source_encoding, code_objects=code_objects,
is_pypy=is_pypy, magic_int=magic_int),
mapstream=mapstream)
else:
deparsed = [decompile(version, co, outstream, showasm, showast,
timestamp, showgrammar,
timestamp, showgrammar, source_encoding,
code_objects=code_objects, source_size=source_size,
is_pypy=is_pypy, magic_int=magic_int,
mapstream=mapstream, do_fragments=do_fragments)]
@@ -179,7 +181,7 @@ def decompile_file(filename, outstream=None, showasm=None, showast=False,
# FIXME: combine into an options parameter
def main(in_base, out_base, compiled_files, source_files, outfile=None,
showasm=None, showast=False, do_verify=False,
showgrammar=False, raise_on_error=False,
showgrammar=False, source_encoding=None, raise_on_error=False,
do_linemaps=False, do_fragments=False):
"""
in_base base directory for input files
@@ -250,7 +252,7 @@ def main(in_base, out_base, compiled_files, source_files, outfile=None,
# Try to uncompile the input file
try:
deparsed = decompile_file(infile, outstream, showasm, showast, showgrammar,
linemap_stream, do_fragments)
source_encoding, linemap_stream, do_fragments)
if do_fragments:
for d in deparsed:
last_mod = None
@@ -280,6 +282,19 @@ def main(in_base, out_base, compiled_files, source_files, outfile=None,
sys.stdout.write("\n")
sys.stderr.write("\nLast file: %s " % (infile))
raise
except RuntimeError, e:
sys.stdout.write("\n%s\n" % str(e))
if str(e).startswith('Unsupported Python'):
sys.stdout.write("\n")
sys.stderr.write("\n# Unsupported bytecode in file %s\n# %s\n" % (infile, e))
else:
if outfile:
outstream.close()
os.remove(outfile)
sys.stdout.write("\n")
sys.stderr.write("\nLast file: %s " % (infile))
raise
# except:
# failed_files += 1
# if current_outfile:
@@ -337,9 +352,9 @@ def main(in_base, out_base, compiled_files, source_files, outfile=None,
# mem_usage = __memUsage()
print mess, infile
if current_outfile:
sys.stdout.write("%s\r" %
status_msg(do_verify, tot_files, okay_files, failed_files,
verify_failed_files, do_verify))
sys.stdout.write("%s -- %s\r" %
(infile, status_msg(do_verify, tot_files, okay_files, failed_files,
verify_failed_files, do_verify)))
try:
# FIXME: Something is weird with Pypy here
sys.stdout.flush()

View File

@@ -496,6 +496,7 @@ class PythonParser(GenericASTBuilder):
def p_expr(self, args):
'''
expr ::= _mklambda
expr ::= LOAD_CODE
expr ::= LOAD_FAST
expr ::= LOAD_NAME
expr ::= LOAD_CONST
@@ -801,7 +802,6 @@ def python_parser(version, co, out=sys.stdout, showasm=False,
if __name__ == '__main__':
def parse_test(co):
from uncompyle6 import PYTHON_VERSION, IS_PYPY
ast = python_parser('2.7.13', co, showasm=True, is_pypy=True)
ast = python_parser(PYTHON_VERSION, co, showasm=True, is_pypy=IS_PYPY)
print(ast)
return

View File

@@ -457,7 +457,7 @@ class Python2Parser(PythonParser):
if i > 0 and tokens[i-1] == 'LOAD_LAMBDA':
self.addRule('mklambda ::= %s LOAD_LAMBDA %s' %
('pos_arg ' * token.attr, opname), nop_func)
rule = 'mkfunc ::= %s LOAD_CONST %s' % ('expr ' * token.attr, opname)
rule = 'mkfunc ::= %s LOAD_CODE %s' % ('expr ' * token.attr, opname)
elif opname_base == 'MAKE_CLOSURE':
# FIXME: use add_unique_rules to tidy this up.
if i > 0 and tokens[i-1] == 'LOAD_LAMBDA':
@@ -472,7 +472,7 @@ class Python2Parser(PythonParser):
('expr ' * token.attr, opname))], customize)
pass
self.add_unique_rules([
('mkfunc ::= %s load_closure LOAD_CONST %s' %
('mkfunc ::= %s load_closure LOAD_CODE %s' %
('expr ' * token.attr, opname))], customize)
if self.version >= 2.7:

View File

@@ -102,6 +102,8 @@ class Python26Parser(Python2Parser):
def p_stmt26(self, args):
"""
stmt ::= ifelsestmtr
# We use filler as a placeholder to keep nonterminal positions
# the same across different grammars so that the same semantic actions
# can be used
@@ -173,6 +175,9 @@ class Python26Parser(Python2Parser):
iflaststmt ::= testexpr_then c_stmts_opt JUMP_ABSOLUTE come_froms POP_TOP
iflaststmt ::= testexpr c_stmts_opt JUMP_ABSOLUTE come_froms POP_TOP
# "if"/"else" statement that ends in a RETURN
ifelsestmtr ::= testexpr_then return_if_stmts returns
testexpr_then ::= testtrue_then
testexpr_then ::= testfalse_then
testtrue_then ::= expr jmp_true_then

View File

@@ -127,6 +127,8 @@ class Python27Parser(Python2Parser):
def p_stmt27(self, args):
"""
stmt ::= ifelsestmtr
# assert condition
assert ::= assert_expr jmp_true LOAD_ASSERT RAISE_VARARGS_1
@@ -179,6 +181,9 @@ class Python27Parser(Python2Parser):
ifelsestmtl ::= testexpr c_stmts_opt JUMP_BACK else_suitel
ifelsestmtl ::= testexpr c_stmts_opt CONTINUE else_suitel
# "if"/"else" statement that ends in a RETURN
ifelsestmtr ::= testexpr return_if_stmts COME_FROM returns
# Common with 2.6
return_if_lambda ::= RETURN_END_IF_LAMBDA COME_FROM
stmt ::= if_expr_lambda

File diff suppressed because it is too large Load Diff

View File

@@ -73,7 +73,7 @@ class Python32Parser(Python3Parser):
args_pos, args_kw, annotate_args = token.attr
# Check that there are 2 annotated params?
rule = (('mkfunc_annotate ::= %s%sannotate_tuple '
'LOAD_CONST LOAD_CONST EXTENDED_ARG %s') %
'LOAD_CONST LOAD_CODE EXTENDED_ARG %s') %
(('pos_arg ' * (args_pos)),
('annotate_arg ' * (annotate_args-1)), opname))
self.add_unique_rule(rule, opname, token.attr, customize)

View File

@@ -47,7 +47,7 @@ class Python34Parser(Python33Parser):
# Python 3.4+ optimizes the trailing two JUMPS away
# Is this 3.4 only?
# This is 3.4 only
yield_from ::= expr GET_ITER LOAD_CONST YIELD_FROM
_ifstmts_jump ::= c_stmts_opt JUMP_ABSOLUTE JUMP_FORWARD COME_FROM
@@ -55,6 +55,7 @@ class Python34Parser(Python33Parser):
def customize_grammar_rules(self, tokens, customize):
self.remove_rules("""
yield_from ::= expr expr YIELD_FROM
# 3.4.2 has this. 3.4.4 may now
# while1stmt ::= SETUP_LOOP l_stmts COME_FROM JUMP_BACK COME_FROM_LOOP
""")

View File

@@ -29,8 +29,15 @@ class Python36Parser(Python35Parser):
def p_36misc(self, args):
"""
sstmt ::= sstmt RETURN_LAST
"""sstmt ::= sstmt RETURN_LAST
# long except clauses in a loop can sometimes cause a JUMP_BACK to turn into a
# JUMP_FORWARD to a JUMP_BACK. And when this happens there is an additional
# ELSE added to the except_suite. With better flow control perhaps we can
# sort this out better.
except_suite ::= c_stmts_opt POP_EXCEPT jump_except ELSE
except_suite_finalize ::= SETUP_FINALLY c_stmts_opt except_var_finalize END_FINALLY
_jump ELSE
# 3.6 redoes how return_closure works. FIXME: Isolate to LOAD_CLOSURE
return_closure ::= LOAD_CLOSURE DUP_TOP STORE_NAME RETURN_VALUE RETURN_LAST
@@ -142,6 +149,7 @@ class Python36Parser(Python35Parser):
COME_FROM_FINALLY
compare_chained2 ::= expr COMPARE_OP come_froms JUMP_FORWARD
"""
def customize_grammar_rules(self, tokens, customize):
@@ -201,14 +209,14 @@ class Python36Parser(Python35Parser):
if 'LOAD_DICTCOMP' in self.seen_ops:
# Is there something general going on here?
rule = """
dict_comp ::= load_closure LOAD_DICTCOMP LOAD_CONST
dict_comp ::= load_closure LOAD_DICTCOMP LOAD_STR
MAKE_FUNCTION_8 expr
GET_ITER CALL_FUNCTION_1
"""
self.addRule(rule, nop_func)
elif 'LOAD_SETCOMP' in self.seen_ops:
rule = """
set_comp ::= load_closure LOAD_SETCOMP LOAD_CONST
set_comp ::= load_closure LOAD_SETCOMP LOAD_STR
MAKE_FUNCTION_8 expr
GET_ITER CALL_FUNCTION_1
"""
@@ -263,6 +271,23 @@ class Python36Parser(Python35Parser):
self.addRule(rule, nop_func)
rule = ('starred ::= %s %s' % ('expr ' * v, opname))
self.addRule(rule, nop_func)
elif opname == 'SETUP_ANNOTATIONS':
# 3.6 Variable Annotations PEP 526
# This seems to come before STORE_ANNOTATION, and doesn't
# correspond to direct Python source code.
rule = """
stmt ::= SETUP_ANNOTATIONS
stmt ::= ann_assign_init_value
stmt ::= ann_assign_no_init
ann_assign_init_value ::= expr store store_annotation
ann_assign_no_init ::= store_annotation
store_annotation ::= LOAD_NAME STORE_ANNOTATION
store_annotation ::= subscript STORE_ANNOTATION
"""
self.addRule(rule, nop_func)
# Check to combine assignment + annotation into one statement
self.check_reduce['assign'] = 'token'
elif opname == 'SETUP_WITH':
rules_str = """
withstmt ::= expr SETUP_WITH POP_TOP suite_stmts_opt COME_FROM_WITH
@@ -288,6 +313,7 @@ class Python36Parser(Python35Parser):
self.addRule(rules_str, nop_func)
pass
pass
return
def custom_classfunc_rule(self, opname, token, customize, next_token):
@@ -387,6 +413,15 @@ class Python36Parser(Python35Parser):
tokens, first, last)
if invalid:
return invalid
if rule[0] == 'assign':
# Try to combine assignment + annotation into one statement
if (len(tokens) >= last + 1 and
tokens[last] == 'LOAD_NAME' and
tokens[last+1] == 'STORE_ANNOTATION' and
tokens[last-1].pattr == tokens[last+1].pattr):
# Will handle as ann_assign_init_value
return True
pass
if rule[0] == 'call_kw':
# Make sure we don't derive call_kw
nt = ast[0]

View File

@@ -78,6 +78,12 @@ class Python37Parser(Python36Parser):
attribute37 ::= expr LOAD_METHOD
expr ::= attribute37
# long except clauses in a loop can sometimes cause a JUMP_BACK to turn into a
# JUMP_FORWARD to a JUMP_BACK. And when this happens there is an additional
# ELSE added to the except_suite. With better flow control perhaps we can
# sort this out better.
except_suite ::= c_stmts_opt POP_EXCEPT jump_except ELSE
# FIXME: generalize and specialize
call ::= expr CALL_METHOD_0

View File

@@ -287,6 +287,8 @@ class Scanner2(Scanner):
op_name = 'LOAD_DICTCOMP'
elif const.co_name == '<setcomp>':
op_name = 'LOAD_SETCOMP'
else:
op_name = "LOAD_CODE"
# verify() uses 'pattr' for comparison, since 'attr'
# now holds Code(const) and thus can not be used
# for comparison (todo: think about changing this)

View File

@@ -173,6 +173,8 @@ class Scanner26(scan.Scanner2):
op_name = 'LOAD_DICTCOMP'
elif const.co_name == '<setcomp>':
op_name = 'LOAD_SETCOMP'
else:
op_name = "LOAD_CODE"
# verify uses 'pattr' for comparison, since 'attr'
# now holds Code(const) and thus can not be used
# for comparison (todo: think about changing this)

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
# Copyright (c) 2016-2018 by Rocky Bernstein
# Copyright (c) 2016-2019 by Rocky Bernstein
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
# Copyright (c) 1999 John Aycock
#
@@ -22,18 +22,29 @@ if PYTHON3:
intern = sys.intern
class Token: # Python 2.4 can't have empty ()
"""
Class representing a byte-code instruction.
A byte-code token is equivalent to Python 3's dis.instruction or
the contents of one line as output by dis.dis().
"""
# FIXME: match Python 3.4's terms:
# linestart = starts_line
# attr = argval
# pattr = argrepr
def __init__(self, opname, attr=None, pattr=None, offset=-1,
linestart=None, op=None, has_arg=None, opc=None):
def __init__(
self,
opname,
attr=None,
pattr=None,
offset=-1,
linestart=None,
op=None,
has_arg=None,
opc=None,
):
self.kind = intern(opname)
self.has_arg = has_arg
self.attr = attr
@@ -46,6 +57,7 @@ class Token: # Python 2.4 can't have empty ()
if opc is None:
from xdis.std import _std_api
self.opc = _std_api.opc
else:
self.opc = opc
@@ -58,7 +70,9 @@ class Token: # Python 2.4 can't have empty ()
""" '==' on kind and "pattr" attributes.
It is okay if offsets and linestarts are different"""
if isinstance(o, Token):
return (self.kind == o.kind) and (self.pattr == o.pattr)
return (self.kind == o.kind) and (
(self.pattr == o.pattr) or self.attr == o.attr
)
else:
# ?? do we need this?
return self.kind == o
@@ -77,47 +91,69 @@ class Token: # Python 2.4 can't have empty ()
# ('%9s %-18s %r' % (self.offset, self.kind, pattr)))
def __str__(self):
return self.format(line_prefix='')
return self.format(line_prefix="")
def format(self, line_prefix=''):
def format(self, line_prefix=""):
if self.linestart:
prefix = '\n%s%4d ' % (line_prefix, self.linestart)
prefix = "\n%s%4d " % (line_prefix, self.linestart)
else:
prefix = ' ' * (6 + len(line_prefix))
offset_opname = '%6s %-17s' % (self.offset, self.kind)
prefix = (" " * (6 + len(line_prefix)))
offset_opname = "%6s %-17s" % (self.offset, self.kind)
if not self.has_arg:
return "%s%s" % (prefix, offset_opname)
if isinstance(self.attr, int):
argstr = "%6d " % self.attr
else:
argstr = ' '*7
argstr = (" " * 7)
name = self.kind
if self.has_arg:
pattr = self.pattr
if self.opc:
if self.op in self.opc.JREL_OPS:
if not self.pattr.startswith('to '):
if not self.pattr.startswith("to "):
pattr = "to " + self.pattr
elif self.op in self.opc.JABS_OPS:
self.pattr = str(self.pattr)
if not self.pattr.startswith('to '):
if not self.pattr.startswith("to "):
pattr = "to " + str(self.pattr)
pass
elif self.op in self.opc.CONST_OPS:
# Compare with pysource n_LOAD_CONST
attr = self.attr
if attr is None:
pattr = None
if name == "LOAD_STR":
pattr = self.attr
elif name == "LOAD_CODE":
return "%s%s%s %s" % (prefix, offset_opname, argstr, pattr)
else:
return "%s%s %r" % (prefix, offset_opname, pattr)
elif self.op in self.opc.hascompare:
if isinstance(self.attr, int):
pattr = self.opc.cmp_op[self.attr]
return "%s%s%s %s" % (prefix, offset_opname, argstr, pattr)
elif self.op in self.opc.hasvargs:
return "%s%s%s" % (prefix, offset_opname, argstr)
elif name == 'LOAD_ASSERT':
return "%s%s %s" % (prefix, offset_opname, pattr)
elif self.op in self.opc.NAME_OPS:
if self.opc.version >= 3.0:
return "%s%s%s %s" % (prefix, offset_opname, argstr, self.attr)
elif name == "EXTENDED_ARG":
return "%s%s%s 0x%x << %s = %s" % (
prefix,
offset_opname,
argstr,
self.attr,
self.opc.EXTENDED_ARG_SHIFT,
pattr,
)
# And so on. See xdis/bytecode.py get_instructions_bytes
pass
elif re.search(r'_\d+$', self.kind):
return "%s%s%s" % (prefix, offset_opname, argstr)
elif re.search(r"_\d+$", self.kind):
return "%s%s%s" % (prefix, offset_opname, argstr)
else:
pattr = ''
return "%s%s%s %r" % (prefix, offset_opname, argstr, pattr)
pattr = ""
return "%s%s%s %r" % (prefix, offset_opname, argstr, pattr)
def __hash__(self):
return hash(self.kind)
@@ -125,4 +161,5 @@ class Token: # Python 2.4 can't have empty ()
def __getitem__(self, i):
raise IndexError
NoneToken = Token('LOAD_CONST', offset=-1, attr=None, pattr=None)
NoneToken = Token("LOAD_CONST", offset=-1, attr=None, pattr=None)

View File

@@ -36,7 +36,6 @@ class AligningWalker(SourceWalker, object):
self.pending_newlines = max(self.pending_newlines, 1)
def write(self, *data):
from trepan.api import debug; debug()
if (len(data) == 1) and data[0] == self.indent:
diff = max(self.pending_newlines,
self.desired_line_number - self.current_line_number)

View File

@@ -128,10 +128,10 @@ PASS = SyntaxTree('stmts',
[ SyntaxTree('stmt',
[ SyntaxTree('pass', [])])])])
ASSIGN_DOC_STRING = lambda doc_string: \
ASSIGN_DOC_STRING = lambda doc_string, doc_load: \
SyntaxTree('stmt',
[ SyntaxTree('assign',
[ SyntaxTree('expr', [ Token('LOAD_CONST', pattr=doc_string) ]),
[ SyntaxTree('expr', [ Token(doc_load, pattr=doc_string, attr=doc_string) ]),
SyntaxTree('store', [ Token('STORE_NAME', pattr='__doc__')])
])])
@@ -221,8 +221,9 @@ TABLE_DIRECT = {
'IMPORT_FROM': ( '%{pattr}', ),
'attribute': ( '%c.%[1]{pattr}',
(0, 'expr')),
'LOAD_FAST': ( '%{pattr}', ),
'LOAD_NAME': ( '%{pattr}', ),
'LOAD_STR': ( '%{pattr}', ),
'LOAD_FAST': ( '%{pattr}', ),
'LOAD_NAME': ( '%{pattr}', ),
'LOAD_CLASSNAME': ( '%{pattr}', ),
'LOAD_GLOBAL': ( '%{pattr}', ),
'LOAD_DEREF': ( '%{pattr}', ),
@@ -317,7 +318,7 @@ TABLE_DIRECT = {
'mkfuncdeco0': ( '%|def %c\n', 0),
'classdefdeco': ( '\n\n%c', 0),
'classdefdeco1': ( '%|@%c\n%c', 0, 1),
'kwarg': ( '%[0]{pattr}=%c', 1),
'kwarg': ( '%[0]{pattr}=%c', 1), # Change when Python 2 does LOAD_STR
'kwargs': ( '%D', (0, maxint, ', ') ),
'kwargs1': ( '%D', (0, maxint, ', ') ),
@@ -349,23 +350,34 @@ TABLE_DIRECT = {
'testtrue': ( 'not %p',
(0, PRECEDENCE['unary_not']) ),
# Generally the args here are 0: (some sort of) "testexpr",
# 1: (some sort of) "cstmts_opt",
# 2 or 3: "else_suite"
# But unfortunately there are irregularities, For example, 2.6- uses "testexpr_then"
# and sometimes "cstmts" instead of "cstmts_opt" happens.
# Down the line we might isolate these into version-specific rules.
'ifelsestmt': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 3 ),
'ifelsestmtc': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 3 ),
'ifelsestmtl': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 3 ),
'ifelsestmtr': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 2 ),
'ifelsestmtr2': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-\n\n', 0, 1, 3 ), # has COME_FROM in position 2
# "elif" forms are not generated by the parser but are created through tree
# transformations. See "n_ifelsestmt".
'ifelifstmt': ( '%|if %c:\n%+%c%-%c', 0, 1, 3 ),
'elifelifstmt': ( '%|elif %c:\n%+%c%-%c', 0, 1, 3 ),
'elifstmt': ( '%|elif %c:\n%+%c%-', 0, 1 ),
'elifelsestmt': ( '%|elif %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 3 ),
'ifelsestmtr': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 2 ),
'ifelsestmtr2': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-\n\n', 0, 1, 3 ), # has COME_FROM
'elifelsestmtr': ( '%|elif %c:\n%+%c%-%|else:\n%+%c%-\n\n', 0, 1, 2 ),
'elifelsestmtr2': ( '%|elif %c:\n%+%c%-%|else:\n%+%c%-\n\n', 0, 1, 3 ), # has COME_FROM
'elifelsestmtr2': ( '%|elif %c:\n%+%c%-%|else:\n%+%c%-\n\n', 0, 1, 3 ), # has COME_FROM in position 2
'whileTruestmt': ( '%|while True:\n%+%c%-\n\n', 1 ),
'whilestmt': ( '%|while %c:\n%+%c%-\n\n', 1, 2 ),
'while1stmt': ( '%|while 1:\n%+%c%-\n\n', 1 ),
'while1elsestmt': ( '%|while 1:\n%+%c%-%|else:\n%+%c%-\n\n', 1, -2 ),
'whileelsestmt': ( '%|while %c:\n%+%c%-%|else:\n%+%c%-\n\n', 1, 2, -2 ),
'whileelsestmt2': ( '%|while %c:\n%+%c%-%|else:\n%+%c%-\n\n', 1, 2, -3 ),
'whileelselaststmt': ( '%|while %c:\n%+%c%-%|else:\n%+%c%-', 1, 2, -2 ),
# Note: Python 3.8+ changes this
@@ -397,7 +409,9 @@ TABLE_DIRECT = {
'tf_tryelsestmt': ( '%c%-%c%|else:\n%+%c', 1, 3, 4 ),
'tryfinallystmt': ( '%|try:\n%+%c%-%|finally:\n%+%c%-\n\n', 1, 5 ),
'except': ( '%|except:\n%+%c%-', 3 ),
'except_cond1': ( '%|except %c:\n', 1 ),
'except_cond1': ( '%|except %c:\n', (1, 'expr') ),
'except_cond2': ( '%|except %c as %c:\n',
(1, 'expr'), (5, 'store') ),
'except_suite': ( '%+%c%-%C', 0, (1, maxint, '') ),
# In Python 3.6, this is more complicated in the presence of "returns"

View File

@@ -49,11 +49,6 @@ def customize_for_version(self, is_pypy, version):
5, 6, 7, 0, 1, 2 ),
})
if version >= 3.0:
TABLE_DIRECT.update({
# Gotta love Python for its futzing around with syntax like this
'raise_stmt2': ( '%|raise %c from %c\n', 0, 1),
})
if version >= 3.2:
TABLE_DIRECT.update({
'del_deref_stmt': ( '%|del %c\n', 0),
@@ -62,6 +57,10 @@ def customize_for_version(self, is_pypy, version):
from uncompyle6.semantics.customize3 import customize_for_version3
customize_for_version3(self, version)
else: # < 3.0
TABLE_DIRECT.update({
'except_cond3' : ( '%|except %c, %c:\n',
(1, 'expr'), (-2, 'store') )
})
if 2.4 <= version <= 2.6:
TABLE_DIRECT.update({
'comp_for': ( ' for %c in %c', 3, 1 ),

View File

@@ -26,8 +26,6 @@ def customize_for_version25(self, version):
# Import style for 2.5+
########################
TABLE_DIRECT.update({
'except_cond3' : ( '%|except %c, %c:\n',
(1, 'expr'), (-2, 'store') ),
'importmultiple': ( '%|import %c%c\n', 2, 3 ),
'import_cont' : ( ', %c', 2 ),
# With/as is allowed as "from future" thing in 2.5

View File

@@ -31,9 +31,31 @@ def customize_for_version26_27(self, version):
if version > 2.6:
TABLE_DIRECT.update({
'except_cond2': ( '%|except %c as %c:\n', 1, 5 ),
# When a generator is a single parameter of a function,
# it doesn't need the surrounding parenethesis.
'call_generator': ('%c%P', 0, (1, -1, ', ', 100)),
})
else:
TABLE_DIRECT.update({
'testtrue_then': ( 'not %p', (0, 22) ),
})
def n_call(node):
mapping = self._get_mapping(node)
key = node
for i in mapping[1:]:
key = key[i]
pass
if key.kind == 'CALL_FUNCTION_1':
# A function with one argument. If this is a generator,
# no parenthesis is needed.
args_node = node[-2]
if args_node == 'expr':
n = args_node[0]
if n == 'generator_exp':
node.kind = 'call_generator'
pass
pass
self.default(node)
self.n_call = n_call

View File

@@ -19,26 +19,38 @@
from uncompyle6.semantics.consts import TABLE_DIRECT
from xdis.code import iscode
from uncompyle6.semantics.helper import gen_function_parens_adjust
from uncompyle6.semantics.make_function import make_function3_annotate
from uncompyle6.semantics.customize35 import customize_for_version35
from uncompyle6.semantics.customize36 import customize_for_version36
from uncompyle6.semantics.customize37 import customize_for_version37
from uncompyle6.semantics.customize38 import customize_for_version38
def customize_for_version3(self, version):
TABLE_DIRECT.update({
'comp_for' : ( ' for %c in %c',
(2, 'store') , (0, 'expr') ),
'conditionalnot' : ( '%c if not %c else %c',
(2, 'expr') , (0, 'expr'), (4, 'expr') ),
'except_cond2' : ( '%|except %c as %c:\n', 1, 5 ),
'function_def_annotate': ( '\n\n%|def %c%c\n', -1, 0),
'importmultiple' : ( '%|import %c%c\n', 2, 3 ),
'import_cont' : ( ', %c', 2 ),
'store_locals' : ( '%|# inspect.currentframe().f_locals = __locals__\n', ),
'withstmt' : ( '%|with %c:\n%+%c%-', 0, 3),
'withasstmt' : ( '%|with %c as (%c):\n%+%c%-', 0, 2, 3),
})
TABLE_DIRECT.update(
{
"comp_for": (" for %c in %c", (2, "store"), (0, "expr")),
"conditionalnot": (
"%c if not %c else %c",
(2, "expr"),
(0, "expr"),
(4, "expr"),
),
"except_cond2": ("%|except %c as %c:\n", 1, 5),
"function_def_annotate": ("\n\n%|def %c%c\n", -1, 0),
# When a generator is a single parameter of a function,
# it doesn't need the surrounding parenethesis.
"call_generator": ("%c%P", 0, (1, -1, ", ", 100)),
"importmultiple": ("%|import %c%c\n", 2, 3),
"import_cont": (", %c", 2),
"kwarg": ("%[0]{attr}=%c", 1),
"raise_stmt2": ("%|raise %c from %c\n", 0, 1),
"store_locals": ("%|# inspect.currentframe().f_locals = __locals__\n",),
"withstmt": ("%|with %c:\n%+%c%-", 0, 3),
"withasstmt": ("%|with %c as (%c):\n%+%c%-", 0, 2, 3),
}
)
assert version >= 3.0
@@ -53,102 +65,102 @@ def customize_for_version3(self, version):
# ----------
# * subclass_code - the code for the subclass body
subclass_info = None
if node == 'classdefdeco2':
if node == "classdefdeco2":
if self.version >= 3.6:
class_name = node[1][1].pattr
class_name = node[1][1].attr
elif self.version <= 3.3:
class_name = node[2][0].pattr
class_name = node[2][0].attr
else:
class_name = node[1][2].pattr
class_name = node[1][2].attr
build_class = node
else:
build_class = node[0]
if self.version >= 3.6:
if build_class == 'build_class_kw':
if build_class == "build_class_kw":
mkfunc = build_class[1]
assert mkfunc == 'mkfunc'
assert mkfunc == "mkfunc"
subclass_info = build_class
if hasattr(mkfunc[0], 'attr') and iscode(mkfunc[0].attr):
if hasattr(mkfunc[0], "attr") and iscode(mkfunc[0].attr):
subclass_code = mkfunc[0].attr
else:
assert mkfunc[0] == 'load_closure'
assert mkfunc[0] == "load_closure"
subclass_code = mkfunc[1].attr
assert iscode(subclass_code)
if build_class[1][0] == 'load_closure':
if build_class[1][0] == "load_closure":
code_node = build_class[1][1]
else:
code_node = build_class[1][0]
class_name = code_node.attr.co_name
else:
class_name = node[1][0].pattr
class_name = node[1][0].attr
build_class = node[0]
assert 'mkfunc' == build_class[1]
assert "mkfunc" == build_class[1]
mkfunc = build_class[1]
if mkfunc[0] in ('kwargs', 'no_kwargs'):
if mkfunc[0] in ("kwargs", "no_kwargs"):
if 3.0 <= self.version <= 3.2:
for n in mkfunc:
if hasattr(n, 'attr') and iscode(n.attr):
if hasattr(n, "attr") and iscode(n.attr):
subclass_code = n.attr
break
elif n == 'expr':
elif n == "expr":
subclass_code = n[0].attr
pass
pass
else:
for n in mkfunc:
if hasattr(n, 'attr') and iscode(n.attr):
if hasattr(n, "attr") and iscode(n.attr):
subclass_code = n.attr
break
pass
pass
if node == 'classdefdeco2':
if node == "classdefdeco2":
subclass_info = node
else:
subclass_info = node[0]
elif build_class[1][0] == 'load_closure':
elif build_class[1][0] == "load_closure":
# Python 3 with closures not functions
load_closure = build_class[1]
if hasattr(load_closure[-3], 'attr'):
if hasattr(load_closure[-3], "attr"):
# Python 3.3 classes with closures work like this.
# Note have to test before 3.2 case because
# index -2 also has an attr.
subclass_code = load_closure[-3].attr
elif hasattr(load_closure[-2], 'attr'):
elif hasattr(load_closure[-2], "attr"):
# Python 3.2 works like this
subclass_code = load_closure[-2].attr
else:
raise 'Internal Error n_classdef: cannot find class body'
if hasattr(build_class[3], '__len__'):
raise "Internal Error n_classdef: cannot find class body"
if hasattr(build_class[3], "__len__"):
if not subclass_info:
subclass_info = build_class[3]
elif hasattr(build_class[2], '__len__'):
elif hasattr(build_class[2], "__len__"):
subclass_info = build_class[2]
else:
raise 'Internal Error n_classdef: cannot superclass name'
elif self.version >= 3.6 and node == 'classdefdeco2':
raise "Internal Error n_classdef: cannot superclass name"
elif self.version >= 3.6 and node == "classdefdeco2":
subclass_info = node
subclass_code = build_class[1][0].attr
elif not subclass_info:
if mkfunc[0] in ('no_kwargs', 'kwargs'):
if mkfunc[0] in ("no_kwargs", "kwargs"):
subclass_code = mkfunc[1].attr
else:
subclass_code = mkfunc[0].attr
if node == 'classdefdeco2':
if node == "classdefdeco2":
subclass_info = node
else:
subclass_info = node[0]
if (node == 'classdefdeco2'):
self.write('\n')
if node == "classdefdeco2":
self.write("\n")
else:
self.write('\n\n')
self.write("\n\n")
self.currentclass = str(class_name)
self.write(self.indent, 'class ', self.currentclass)
self.write(self.indent, "class ", self.currentclass)
self.print_super_classes3(subclass_info)
self.println(':')
self.println(":")
# class body
self.indent_more()
@@ -157,11 +169,12 @@ def customize_for_version3(self, version):
self.currentclass = cclass
if len(self.param_stack) > 1:
self.write('\n\n')
self.write("\n\n")
else:
self.write('\n\n\n')
self.write("\n\n\n")
self.prune()
self.n_classdef3 = n_classdef3
if version == 3.0:
@@ -170,42 +183,44 @@ def customize_for_version3(self, version):
# since we pick up the iteration variable some other way and
# we definitely don't include in the source _[dd].
def n_comp_iter(node):
if node[0] == 'expr':
if node[0] == "expr":
n = node[0][0]
if (n == 'LOAD_FAST' and
n.pattr[0:2] == '_['):
if n == "LOAD_FAST" and n.pattr[0:2] == "_[":
self.prune()
pass
pass
# Not this special case, procede as normal...
# Not this special case, proceed as normal...
self.default(node)
self.n_comp_iter = n_comp_iter
if version >= 3.3:
elif version == 3.3:
# FIXME: perhaps this can be folded into the 3.4+ case?
def n_yield_from(node):
self.write('yield from')
self.write(' ')
if 3.3 <= self.version <= 3.4:
self.preorder(node[0][0][0][0])
elif self.version >= 3.5:
self.preorder(node[0])
else:
assert False, "dunno about this python version"
self.prune() # stop recursing
assert node[0] == "expr"
assert node[0][0] == "get_iter"
# Skip over yield_from.expr.get_iter which adds an
# extra iter(). Maybe we can do in tranformation phase instead?
template = ("yield from %c", (0, "expr"))
self.template_engine(template, node[0][0])
self.prune()
self.n_yield_from = n_yield_from
if 3.2 <= version <= 3.4:
def n_call(node):
mapping = self._get_mapping(node)
key = node
for i in mapping[1:]:
key = key[i]
pass
if key.kind.startswith('CALL_FUNCTION_VAR_KW'):
if key.kind.startswith("CALL_FUNCTION_VAR_KW"):
# We may want to fill this in...
# But it is distinct from CALL_FUNCTION_VAR below
pass
elif key.kind.startswith('CALL_FUNCTION_VAR'):
elif key.kind.startswith("CALL_FUNCTION_VAR"):
# CALL_FUNCTION_VAR's top element of the stack contains
# the variable argument list, then comes
# annotation args, then keyword args.
@@ -219,28 +234,52 @@ def customize_for_version3(self, version):
# kwargs == 0 is handled by the table entry
# Should probably handle it here though.
if nargs == 0:
template = ('%c(*%c, %C)',
0, -2, (1, kwargs+1, ', '))
template = ("%c(*%c, %C)", 0, -2, (1, kwargs + 1, ", "))
else:
template = ('%c(%C, *%c, %C)',
0, (1, nargs+1, ', '),
-2, (-2-kwargs, -2, ', '))
template = (
"%c(%C, *%c, %C)",
0,
(1, nargs + 1, ", "),
-2,
(-2 - kwargs, -2, ", "),
)
self.template_engine(template, node)
self.prune()
else:
gen_function_parens_adjust(key, node)
self.default(node)
self.n_call = n_call
self.n_call = n_call
elif version < 3.2:
def n_call(node):
mapping = self._get_mapping(node)
key = node
for i in mapping[1:]:
key = key[i]
pass
gen_function_parens_adjust(key, node)
self.default(node)
self.n_call = n_call
def n_mkfunc_annotate(node):
if self.version >= 3.3 or node[-2] == 'kwargs':
# Handling EXTENDED_ARG before MAKE_FUNCTION ...
if node[-2] == "EXTENDED_ARG":
i = -1
else:
i = 0
if self.version <= 3.2:
code = node[-2 + i]
elif self.version >= 3.3 or node[-2] == "kwargs":
# LOAD_CONST code object ..
# LOAD_CONST 'x0' if >= 3.3
# EXTENDED_ARG
# MAKE_FUNCTION ..
code = node[-4]
elif node[-3] == 'expr':
code = node[-3 + i]
elif node[-3] == "expr":
code = node[-3][0]
else:
# LOAD_CONST code object ..
@@ -248,42 +287,51 @@ def customize_for_version3(self, version):
code = node[-3]
self.indent_more()
for annotate_last in range(len(node)-1, -1, -1):
if node[annotate_last] == 'annotate_tuple':
for annotate_last in range(len(node) - 1, -1, -1):
if node[annotate_last] == "annotate_tuple":
break
# FIXME: the real situation is that when derived from
# function_def_annotate we the name has been filled in.
# But when derived from funcdefdeco it hasn't Would like a better
# way to distinquish.
if self.f.getvalue()[-4:] == 'def ':
if self.f.getvalue()[-4:] == "def ":
self.write(code.attr.co_name)
# FIXME: handle and pass full annotate args
make_function3_annotate(self, node, is_lambda=False,
code_node=code, annotate_last=annotate_last)
make_function3_annotate(
self, node, is_lambda=False, code_node=code, annotate_last=annotate_last
)
if len(self.param_stack) > 1:
self.write('\n\n')
self.write("\n\n")
else:
self.write('\n\n\n')
self.write("\n\n\n")
self.indent_less()
self.prune() # stop recursing
self.prune() # stop recursing
self.n_mkfunc_annotate = n_mkfunc_annotate
TABLE_DIRECT.update({
'tryelsestmtl3': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-',
(1, 'suite_stmts_opt'),
(3, 'except_handler'),
(5, 'else_suitel') ),
})
TABLE_DIRECT.update(
{
"tryelsestmtl3": (
"%|try:\n%+%c%-%c%|else:\n%+%c%-",
(1, "suite_stmts_opt"),
(3, "except_handler"),
(5, "else_suitel"),
)
}
)
if version >= 3.4:
#######################
# Python 3.4+ Changes #
#######################
TABLE_DIRECT.update({
'LOAD_CLASSDEREF': ( '%{pattr}', ),
})
TABLE_DIRECT.update(
{
"LOAD_CLASSDEREF": ("%{pattr}",),
"yield_from": ("yield from %c", (0, "expr")),
}
)
if version >= 3.5:
customize_for_version35(self, version)
if version >= 3.6:
@@ -293,8 +341,8 @@ def customize_for_version3(self, version):
if version >= 3.8:
customize_for_version38(self, version)
pass # version >= 3.8
pass # 3.7
pass # 3.6
pass # 3.5
pass # 3.4
pass # 3.7
pass # 3.6
pass # 3.5
pass # 3.4
return

View File

@@ -19,7 +19,8 @@ from xdis.code import iscode
from xdis.util import COMPILER_FLAG_BIT
from uncompyle6.semantics.consts import (
INDENT_PER_LEVEL, TABLE_DIRECT)
from uncompyle6.semantics.helper import flatten_list
from uncompyle6.semantics.helper import (
flatten_list, gen_function_parens_adjust)
#######################
# Python 3.5+ Changes #
@@ -112,23 +113,21 @@ def customize_for_version35(self, version):
template = ('*%c)', nargs+1)
self.template_engine(template, node)
self.prune()
else:
gen_function_parens_adjust(key, node)
self.default(node)
self.n_call = n_call
def n_function_def(node):
if self.version >= 3.6:
code_node = node[0][0]
for n in node[0]:
if hasattr(n, 'attr') and iscode(n.attr):
code_node = n
break
pass
pass
else:
code_node = node[0][1]
n0 = node[0]
is_code = False
for i in list(range(len(n0)-2, -1, -1)):
code_node = n0[i]
if hasattr(code_node, 'attr') and iscode(code_node.attr):
is_code = True
break
is_code = hasattr(code_node, 'attr') and iscode(code_node.attr)
if (is_code and
(code_node.attr.co_flags & COMPILER_FLAG_BIT['COROUTINE'])):
self.template_engine(('\n\n%|async def %c\n',

View File

@@ -60,6 +60,15 @@ def customize_for_version36(self, version):
'call_ex' : (
'%c(%p)',
(0, 'expr'), (1, 100)),
'store_annotation': (
'%[1]{pattr}: %c',
0
),
'ann_assign_init_value': (
'%|%c = %p\n',
(-1, 'store_annotation'), (0, 'expr', 200)),
'ann_assign_no_init': (
'%|%c\n', (0, 'store_annotation')),
})
@@ -77,7 +86,7 @@ def customize_for_version36(self, version):
self.call36_tuple(n)
first = 1
sep = ', *'
elif n == 'LOAD_CONST':
elif n == 'LOAD_STR':
value = self.format_pos_args(n)
self.f.write(value)
first = 1
@@ -401,7 +410,7 @@ def customize_for_version36(self, version):
self.n_except_suite_finalize = n_except_suite_finalize
def n_formatted_value(node):
if node[0] == 'LOAD_CONST':
if node[0] in ('LOAD_STR', 'LOAD_CONST'):
value = node[0].attr
if isinstance(value, tuple):
self.write(node[0].attr)
@@ -415,7 +424,7 @@ def customize_for_version36(self, version):
def n_formatted_value_attr(node):
f_conversion(node)
fmt_node = node.data[3]
if fmt_node == 'expr' and fmt_node[0] == 'LOAD_CONST':
if fmt_node == 'expr' and fmt_node[0] == 'LOAD_STR':
node.string = escape_format(fmt_node[0].attr)
else:
node.string = fmt_node
@@ -424,7 +433,7 @@ def customize_for_version36(self, version):
def f_conversion(node):
fmt_node = node.data[1]
if fmt_node == 'expr' and fmt_node[0] == 'LOAD_CONST':
if fmt_node == 'expr' and fmt_node[0] == 'LOAD_STR':
data = fmt_node[0].attr
else:
data = fmt_node.attr
@@ -482,11 +491,11 @@ def customize_for_version36(self, version):
else:
# {{ and }} in Python source-code format strings mean
# { and } respectively. But only when *not* part of a
# formatted value. However in the LOAD_CONST
# formatted value. However in the LOAD_STR
# bytecode, the escaping of the braces has been
# removed. So we need to put back the braces escaping in
# reconstructing the source.
assert expr[0] == 'LOAD_CONST'
assert expr[0] == 'LOAD_STR'
value = value.replace("{", "{{").replace("}", "}}")
# Remove leading quotes

View File

@@ -424,6 +424,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
pass
self.set_pos_info(node, start, len(self.f.getvalue()))
self.prune()
n_LOAD_STR = n_LOAD_CONST
def n_exec_stmt(self, node):
"""

View File

@@ -99,19 +99,22 @@ def strip_quotes(str):
def print_docstring(self, indent, docstring):
try:
if docstring.find('"""') == -1:
quote = '"""'
else:
quote = '"""'
if docstring.find(quote) >= 0:
if docstring.find("'''") == -1:
quote = "'''"
docstring = docstring.replace("'''", "\\'''")
except:
return False
self.write(indent)
if not PYTHON3 and not isinstance(docstring, str):
# Must be unicode in Python2
self.write('u')
docstring = repr(docstring.expandtabs())[2:-1]
elif PYTHON3 and 2.4 <= self.version <= 2.7:
try:
repr(docstring.expandtabs())[1:-1].encode("ascii")
except UnicodeEncodeError:
self.write('u')
docstring = repr(docstring.expandtabs())[1:-1]
else:
docstring = repr(docstring.expandtabs())[1:-1]
@@ -132,40 +135,42 @@ def print_docstring(self, indent, docstring):
and (docstring[-1] != '"'
or docstring[-2] == '\t')):
self.write('r') # raw string
# restore backslashes unescaped since raw
# Restore backslashes unescaped since raw
docstring = docstring.replace('\t', '\\')
else:
# Escape '"' if it's the last character, so it doesn't
# ruin the ending triple quote
if len(docstring) and docstring[-1] == '"':
docstring = docstring[:-1] + '\\"'
# Restore escaped backslashes
# Escape the last character if it is the same as the
# triple quote character.
quote1 = quote[-1]
if len(docstring) and docstring[-1] == quote1:
docstring = docstring[:-1] + '\\' + quote1
# Escape triple quote when needed
if quote == '"""':
replace_str = '\\"""'
else:
assert quote == "'''"
replace_str = "\\'''"
docstring = docstring.replace(quote, replace_str)
docstring = docstring.replace('\t', '\\\\')
# Escape triple quote when needed
if quote == '""""':
docstring = docstring.replace('"""', '\\"\\"\\"')
lines = docstring.split('\n')
calculate_indent = maxint
for line in lines[1:]:
stripped = line.lstrip()
if len(stripped) > 0:
calculate_indent = min(calculate_indent, len(line) - len(stripped))
calculate_indent = min(calculate_indent, len(lines[-1]) - len(lines[-1].lstrip()))
# Remove indentation (first line is special):
trimmed = [lines[0]]
if calculate_indent < maxint:
trimmed += [line[calculate_indent:] for line in lines[1:]]
self.write(quote)
if len(trimmed) == 0:
if len(lines) == 0:
self.println(quote)
elif len(trimmed) == 1:
self.println(trimmed[0], quote)
elif len(lines) == 1:
self.println(lines[0], quote)
else:
self.println(trimmed[0])
for line in trimmed[1:-1]:
self.println( indent, line )
self.println(indent, trimmed[-1], quote)
self.println(lines[0])
for line in lines[1:-1]:
if line:
self.println( line )
else:
self.println( "\n\n" )
pass
pass
self.println(lines[-1], quote)
return True
@@ -191,6 +196,26 @@ def flatten_list(node):
pass
return flat_elems
# Note: this is only used in Python > 3.0
# Should move this somewhere more specific?
def gen_function_parens_adjust(mapping_key, node):
"""If we can avoid the outer parenthesis
of a generator function, set the node key to
'call_generator' and the caller will do the default
action on that. Otherwise we do nothing.
"""
if mapping_key.kind != 'CALL_FUNCTION_1':
return
args_node = node[-2]
if args_node == 'pos_arg':
assert args_node[0] == 'expr'
n = args_node[0][0]
if n == 'generator_exp':
node.kind = 'call_generator'
pass
return
# if __name__ == '__main__':
# if PYTHON3:

View File

@@ -1,4 +1,4 @@
# Copyright (c) 2015-2018 by Rocky Bernstein
# Copyright (c) 2015-2019 by Rocky Bernstein
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
#
# This program is free software: you can redistribute it and/or modify
@@ -67,7 +67,7 @@ def make_function3_annotate(self, node, is_lambda, nested=1,
i = -1
j = annotate_last-1
l = -len(node)
while j >= l and node[j].kind in ('annotate_arg' 'annotate_tuple'):
while j >= l and node[j].kind in ('annotate_arg', 'annotate_tuple'):
annotate_args[annotate_tup[i]] = node[j][0]
i -= 1
j -= 1
@@ -85,6 +85,12 @@ def make_function3_annotate(self, node, is_lambda, nested=1,
annotate_argc = 0
pass
annotate_dict = {}
for name in annotate_args.keys():
n = self.traverse(annotate_args[name], indent='')
annotate_dict[name] = n
if 3.0 <= self.version <= 3.2:
lambda_index = -2
elif 3.03 <= self.version:
@@ -103,7 +109,11 @@ def make_function3_annotate(self, node, is_lambda, nested=1,
# add defaults values to parameter names
argc = code.co_argcount
kwonlyargcount = code.co_kwonlyargcount
paramnames = list(code.co_varnames[:argc])
if kwonlyargcount > 0:
kwargs = list(code.co_varnames[argc:argc+kwonlyargcount])
try:
ast = self.build_ast(code._tokens,
@@ -129,22 +139,14 @@ def make_function3_annotate(self, node, is_lambda, nested=1,
indent = ' ' * l
line_number = self.line_number
if code_has_star_arg(code):
self.write('*%s' % code.co_varnames[argc + kw_pairs])
argc += 1
i = len(paramnames) - len(defparams)
suffix = ''
no_paramnames = len(paramnames[:i]) == 0
for param in paramnames[:i]:
self.write(suffix, param)
suffix = ', '
if param in annotate_tuple[0].attr:
p = [x for x in annotate_tuple[0].attr].index(param)
self.write(': ')
self.preorder(node[p])
if param in annotate_dict:
self.write(': %s' % annotate_dict[param])
if (line_number != self.line_number):
suffix = ",\n" + indent
line_number = self.line_number
@@ -160,7 +162,6 @@ def make_function3_annotate(self, node, is_lambda, nested=1,
suffix = ''
for n in node:
if n == 'pos_arg':
no_paramnames = False
self.write(suffix)
param = paramnames[i]
self.write(param)
@@ -183,60 +184,70 @@ def make_function3_annotate(self, node, is_lambda, nested=1,
suffix = ', '
# self.println(indent, '#flags:\t', int(code.co_flags))
if kw_args + annotate_argc > 0:
if no_paramnames:
if not code_has_star_arg(code):
if argc > 0:
self.write(", *, ")
else:
self.write("*, ")
pass
else:
self.write(", ")
if code_has_star_arg(code):
star_arg = code.co_varnames[argc + kwonlyargcount]
if annotate_dict and star_arg in annotate_dict:
self.write(suffix, '*%s: %s' % (star_arg, annotate_dict[star_arg]))
else:
self.write(suffix, '*%s' % star_arg)
argc += 1
kwargs = node[0]
last = len(kwargs)-1
i = 0
for n in node[0]:
if n == 'kwarg':
if (line_number != self.line_number):
self.write("\n" + indent)
line_number = self.line_number
self.write('%s=' % n[0].pattr)
self.preorder(n[1])
if i < last:
self.write(', ')
i += 1
pass
pass
annotate_args = []
for n in node:
if n == 'annotate_arg':
annotate_args.append(n[0])
elif n == 'annotate_tuple':
t = n[0].attr
if t[-1] == 'return':
t = t[0:-1]
annotate_args = annotate_args[:-1]
pass
last = len(annotate_args) - 1
for i in range(len(annotate_args)):
self.write("%s: " % (t[i]))
self.preorder(annotate_args[i])
if i < last:
self.write(', ')
pass
pass
break
# self.println(indent, '#flags:\t', int(code.co_flags))
ends_in_comma = False
if kwonlyargcount > 0:
if not code_has_star_arg(code):
if argc > 0:
self.write(", *, ")
else:
self.write("*, ")
pass
ends_in_comma = True
else:
if argc > 0:
self.write(", ")
ends_in_comma = True
kw_args = [None] * kwonlyargcount
for n in node:
if n == 'kwargs':
n = n[0]
if n == 'kwarg':
name = eval(n[0].pattr)
idx = kwargs.index(name)
default = self.traverse(n[1], indent='')
if annotate_dict and name in annotate_dict:
kw_args[idx] = '%s: %s=%s' % (name, annotate_dict[name], default)
else:
kw_args[idx] = '%s=%s' % (name, default)
pass
pass
# handling other args
other_kw = [c == None for c in kw_args]
for i, flag in enumerate(other_kw):
if flag:
n = kwargs[i]
if n in annotate_dict:
kw_args[i] = "%s: %s" %(n, annotate_dict[n])
else:
kw_args[i] = "%s" % n
if code_has_star_star_arg(code):
if argc > 0:
self.write(', ')
self.write('**%s' % code.co_varnames[argc + kw_pairs])
self.write(', '.join(kw_args))
ends_in_comma = False
else:
if argc == 0:
ends_in_comma = True
if code_has_star_star_arg(code):
if not ends_in_comma:
self.write(', ')
star_star_arg = code.co_varnames[argc + kwonlyargcount]
if annotate_dict and star_star_arg in annotate_dict:
self.write('**%s: %s' % (star_star_arg, annotate_dict[star_star_arg]))
else:
self.write('**%s' % star_star_arg)
if is_lambda:
self.write(": ")
@@ -473,7 +484,7 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
# Thank you, Python.
def build_param(ast, name, default):
def build_param(ast, name, default, annotation=None):
"""build parameters:
- handle defaults
- handle format tuple parameters
@@ -483,7 +494,10 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
else:
value = self.traverse(default, indent='')
maybe_show_tree_param_default(self.showast, name, value)
result = '%s=%s' % (name, value)
if annotation:
result = '%s: %s=%s' % (name, annotation, value)
else:
result = '%s=%s' % (name, value)
# The below can probably be removed. This is probably
# a holdover from days when LOAD_CONST erroneously
@@ -658,7 +672,11 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
# add defaults values to parameter names
argc = code.co_argcount
kwonlyargcount = code.co_kwonlyargcount
paramnames = list(scanner_code.co_varnames[:argc])
if kwonlyargcount > 0:
kwargs = list(scanner_code.co_varnames[argc:argc+kwonlyargcount])
# defaults are for last n parameters, thus reverse
paramnames.reverse();
@@ -681,21 +699,36 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
else:
kw_pairs = 0
i = len(paramnames) - len(defparams)
# build parameters
params = []
if defparams:
for i, defparam in enumerate(defparams):
params.append(build_param(ast, paramnames[i], defparam))
params.append(build_param(ast, paramnames[i], defparam,
annotate_dict.get(paramnames[i])))
params += paramnames[i+1:]
for param in paramnames[i+1:]:
if param in annotate_dict:
params.append("%s: %s" % (param, annotate_dict[param]))
else:
params.append(param)
else:
params = paramnames
for param in paramnames:
if param in annotate_dict:
params.append("%s: %s" % (param, annotate_dict[param]))
else:
params.append(param)
params.reverse() # back to correct order
if code_has_star_arg(code):
if self.version > 3.0:
params.append('*%s' % code.co_varnames[argc + kw_pairs])
star_arg = code.co_varnames[argc + kwonlyargcount]
if annotate_dict and star_arg in annotate_dict:
params.append('*%s: %s' % (star_arg, annotate_dict[star_arg]))
else:
params.append('*%s' % star_arg)
else:
params.append('*%s' % code.co_varnames[argc])
argc += 1
@@ -724,56 +757,47 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
self.write("(", ", ".join(params))
# self.println(indent, '#flags:\t', int(code.co_flags))
# FIXME: Could we remove ends_in_comma and its tests if we just
# created a parameter list and at the very end did a join on that?
# Unless careful, We might lose line breaks though.
ends_in_comma = False
if kw_args > 0:
if kwonlyargcount > 0:
if not (4 & code.co_flags):
if argc > 0:
self.write(", *, ")
else:
self.write("*, ")
pass
ends_in_comma = True
else:
self.write(", ")
ends_in_comma = True
if argc > 0:
self.write(", ")
ends_in_comma = True
# FIXME: this is not correct for 3.5. or 3.6 (which works different)
# and 3.7?
if 3.0 <= self.version <= 3.2:
kwargs = node[0]
last = len(kwargs)-1
i = 0
for n in node[0]:
if n == 'kwarg':
self.write('%s=' % n[0].pattr)
self.preorder(n[1])
if i < last:
self.write(', ')
ends_in_comma = True
pass
else:
ends_in_comma = False
pass
i += 1
pass
pass
elif self.version <= 3.5:
# FIXME this is not qute right for 3.5
for n in node:
if n == 'pos_arg':
continue
elif self.version >= 3.4 and not (n.kind in ('kwargs', 'no_kwargs', 'kwarg')):
continue
else:
self.preorder(n)
ends_in_comma = False
break
if 3.0 <= self.version <= 3.5:
kw_args = [None] * kwonlyargcount
kw_nodes = node[0]
if kw_nodes == "kwargs":
for n in kw_nodes:
name = eval(n[0].pattr)
default = self.traverse(n[1], indent='')
idx = kwargs.index(name)
kw_args[idx] = "%s=%s" % (name, default)
other_kw = [c == None for c in kw_args]
for i, flag in enumerate(other_kw):
if flag:
kw_args[i] = "%s" % kwargs[i]
self.write(', '.join(kw_args))
ends_in_comma = False
elif self.version >= 3.6:
# argc = node[-1].attr
# co = node[-3].attr
# argcount = co.co_argcount
# kwonlyargcount = co.co_kwonlyargcount
free_tup = annotate_dict = kw_dict = default_tup = None
free_tup = ann_dict = kw_dict = default_tup = None
fn_bits = node[-1].attr
index = -4 # Skip over:
# MAKE_FUNCTION,
@@ -783,7 +807,7 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
free_tup = node[index]
index -= 1
if fn_bits[-2]:
annotate_dict = node[index]
ann_dict = node[index]
index -= 1
if fn_bits[-3]:
kw_dict = node[index]
@@ -795,6 +819,8 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
kw_dict = kw_dict[0]
# FIXME: handle free_tup, annotate_dict, and default_tup
kw_args = [None] * kwonlyargcount
if kw_dict:
assert kw_dict == 'dict'
defaults = [self.traverse(n, indent='') for n in kw_dict[:-2]]
@@ -803,18 +829,42 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
sep = ''
# FIXME: possibly handle line breaks
for i, n in enumerate(names):
self.write(sep)
self.write("%s=%s" % (n, defaults[i]))
sep = ', '
ends_in_comma = False
idx = kwargs.index(n)
if annotate_dict and n in annotate_dict:
t = "%s: %s=%s" % (n, annotate_dict[n], defaults[i])
else:
t = "%s=%s" % (n, defaults[i])
kw_args[idx] = t
pass
pass
# handle others
other_kw = [c == None for c in kw_args]
for i, flag in enumerate(other_kw):
if flag:
n = kwargs[i]
if ann_dict and n in annotate_dict:
kw_args[i] = "%s: %s" %(n, annotate_dict[n])
else:
kw_args[i] = "%s" % n
self.write(', '.join(kw_args))
ends_in_comma = False
pass
else:
if argc == 0:
ends_in_comma = True
if code_has_star_star_arg(code):
if argc > 0 and not ends_in_comma:
if not ends_in_comma:
self.write(', ')
self.write('**%s' % code.co_varnames[argc + kw_pairs])
star_star_arg = code.co_varnames[argc + kwonlyargcount]
if annotate_dict and star_star_arg in annotate_dict:
self.write('**%s: %s' % (star_star_arg, annotate_dict[star_star_arg]))
else:
self.write('**%s' % star_star_arg)
if is_lambda:
self.write(": ")

View File

@@ -26,7 +26,7 @@ Upper levels of the grammar is a more-or-less conventional grammar for
Python.
"""
# The below is a bit long, but still it is somehwat abbreviated.
# The below is a bit long, but still it is somewhat abbreviated.
# See https://github.com/rocky/python-uncompyle6/wiki/Table-driven-semantic-actions.
# for a more complete explanation, nicely marked up and with examples.
#
@@ -363,7 +363,10 @@ class SourceWalker(GenericASTTraversal, object):
def write(self, *data):
if (len(data) == 0) or (len(data) == 1 and data[0] == ''):
return
out = ''.join((str(j) for j in data))
if not PYTHON3:
out = ''.join((unicode(j) for j in data))
else:
out = ''.join((str(j) for j in data))
n = 0
for i in out:
if i == '\n':
@@ -607,6 +610,11 @@ class SourceWalker(GenericASTTraversal, object):
else:
self.write(repr(data))
else:
if not PYTHON3:
try:
repr(data).encode("ascii")
except UnicodeEncodeError:
self.write('u')
self.write(repr(data))
# LOAD_CONST is a terminal, so stop processing/recursing early
self.prune()
@@ -638,15 +646,48 @@ class SourceWalker(GenericASTTraversal, object):
self.println()
self.prune() # stop recursing
# preprocess is used for handling chains of
# if elif elif
def n_ifelsestmt(self, node, preprocess=False):
"""
Here we turn:
if ...
else
if ..
into:
if ..
elif ...
[else ...]
where appropriate
"""
else_suite = node[3]
n = else_suite[0]
old_stmts = None
if len(n) == 1 == len(n[0]) and n[0] == '_stmts':
n = n[0][0][0]
elif n[0].kind in ('lastc_stmt', 'lastl_stmt'):
if len(n) == 1 == len(n[0]) and n[0] == 'stmt':
n = n[0][0]
elif n[0].kind in ('lastc_stmt', 'lastl_stmt'):
n = n[0]
if n[0].kind in ('ifstmt', 'iflaststmt', 'iflaststmtl', 'ifelsestmtl', 'ifelsestmtc'):
# This seems needed for Python 2.5-2.7
n = n[0]
pass
pass
elif ( len(n) > 1 and 1 == len(n[0]) and n[0] == 'stmt'
and n[1].kind == "stmt" ):
else_suite_stmts = n[0]
if else_suite_stmts[0].kind not in ('ifstmt', 'iflaststmt', 'ifelsestmtl'):
if not preprocess:
self.default(node)
return
old_stmts = n
n = else_suite_stmts[0]
else:
if not preprocess:
self.default(node)
@@ -666,6 +707,18 @@ class SourceWalker(GenericASTTraversal, object):
elif n.kind in ('ifelsestmt', 'ifelsestmtc', 'ifelsestmtl'):
n.kind = 'elifelsestmt'
if not preprocess:
if old_stmts:
if n.kind == "elifstmt":
trailing_else = SyntaxTree("stmts", old_stmts[1:])
# We use elifelsestmtr because it has 3 nodes
elifelse_stmt = SyntaxTree(
'elifelsestmtr', [n[0], n[1], trailing_else])
node[3] = elifelse_stmt
pass
else:
# Other cases for n.kind may happen here
return
pass
self.default(node)
n_ifelsestmtc = n_ifelsestmtl = n_ifelsestmt
@@ -1100,6 +1153,9 @@ class SourceWalker(GenericASTTraversal, object):
comp_store = ast[3]
have_not = False
# Iterate to find the innermost store
# We'll come back to the list iteration below.
while n in ('list_iter', 'comp_iter'):
# iterate one nesting deeper
if self.version == 3.0 and len(n) == 3:
@@ -1109,7 +1165,7 @@ class SourceWalker(GenericASTTraversal, object):
n = n[0]
if n in ('list_for', 'comp_for'):
if n[2] == 'store':
if n[2] == 'store' and not store:
store = n[2]
n = n[3]
elif n in ('list_if', 'list_if_not', 'comp_if', 'comp_if_not'):
@@ -1153,11 +1209,12 @@ class SourceWalker(GenericASTTraversal, object):
self.write(' in ')
self.preorder(node[-3])
# Here is where we handle nested list iterations.
if ast == 'list_comp' and self.version != 3.0:
list_iter = ast[1]
assert list_iter == 'list_iter'
if list_iter == 'list_for':
self.preorder(list_iter[3])
if list_iter[0] == 'list_for':
self.preorder(list_iter[0][3])
self.prec = p
return
pass
@@ -1424,9 +1481,7 @@ class SourceWalker(GenericASTTraversal, object):
n = len(node) - 1
if node.kind != 'expr':
if node == 'kwarg':
self.write('(')
self.template_engine(('%[0]{pattr}=%c', 1), node)
self.write(')')
self.template_engine(('(%[0]{attr}=%c)', 1), node)
return
kwargs = None
@@ -1772,9 +1827,15 @@ class SourceWalker(GenericASTTraversal, object):
self.write(', ')
self.prune()
return
for n in node[1:]:
if n[0].kind == 'unpack':
n[0].kind = 'unpack_w_parens'
# In Python 2.4, unpack is used in (a, b, c) of:
# except RuntimeError, (a, b, c):
if self.version < 2.7:
node.kind = 'unpack_w_parens'
self.default(node)
n_unpack_w_parens = n_unpack
@@ -2097,6 +2158,7 @@ class SourceWalker(GenericASTTraversal, object):
except:
pass
have_qualname = False
if self.version < 3.0:
# Should we ditch this in favor of the "else" case?
@@ -2112,7 +2174,7 @@ class SourceWalker(GenericASTTraversal, object):
# which are not simple classes like the < 3 case.
try:
if (first_stmt[0] == 'assign' and
first_stmt[0][0][0] == 'LOAD_CONST' and
first_stmt[0][0][0] == 'LOAD_STR' and
first_stmt[0][1] == 'store' and
first_stmt[0][1][0] == Token('STORE_NAME', pattr='__qualname__')):
have_qualname = True
@@ -2260,22 +2322,6 @@ DEFAULT_DEBUG_OPTS = {
'grammar': False
}
# This interface is deprecated. Use simpler code_deparse.
def deparse_code(version, co, out=sys.stdout, showasm=None, showast=False,
showgrammar=False, code_objects={}, compile_mode='exec',
is_pypy=IS_PYPY, walker=SourceWalker):
debug_opts = {
'asm': showasm,
'ast': showast,
'grammar': showgrammar
}
return code_deparse(co, out,
version=version,
debug_opts=debug_opts,
code_objects=code_objects,
compile_mode=compile_mode,
is_pypy=is_pypy, walker=walker)
def code_deparse(co, out=sys.stdout, version=None, debug_opts=DEFAULT_DEBUG_OPTS,
code_objects={}, compile_mode='exec', is_pypy=IS_PYPY, walker=SourceWalker):
"""
@@ -2323,13 +2369,28 @@ def code_deparse(co, out=sys.stdout, version=None, debug_opts=DEFAULT_DEBUG_OPTS
assert not nonlocals
if version >= 3.0:
load_op = 'LOAD_STR'
else:
load_op = 'LOAD_CONST'
# convert leading '__doc__ = "..." into doc string
try:
if deparsed.ast[0][0] == ASSIGN_DOC_STRING(co.co_consts[0]):
stmts = deparsed.ast
first_stmt = stmts[0][0]
if version >= 3.6:
if first_stmt[0] == 'SETUP_ANNOTATIONS':
del stmts[0]
assert stmts[0] == 'sstmt'
# Nuke sstmt
first_stmt = stmts[0][0]
pass
pass
if first_stmt == ASSIGN_DOC_STRING(co.co_consts[0], load_op):
print_docstring(deparsed, '', co.co_consts[0])
del deparsed.ast[0]
if deparsed.ast[-1] == RETURN_NONE:
deparsed.ast.pop() # remove last node
del stmts[0]
if stmts[-1] == RETURN_NONE:
stmts.pop() # remove last node
# todo: if empty, add 'pass'
except:
pass
@@ -2361,7 +2422,7 @@ def deparse_code2str(code, out=sys.stdout, version=None,
"""Return the deparsed text for a Python code object. `out` is where any intermediate
output for assembly or tree output will be sent.
"""
return deparse_code(version, code, out, showasm=debug_opts.get('asm', None),
return code_deparse(code, out, version, showasm=debug_opts.get('asm', None),
showast=debug_opts.get('tree', None),
showgrammar=debug_opts.get('grammar', None), code_objects=code_objects,
compile_mode=compile_mode, is_pypy=is_pypy, walker=walker).text

View File

@@ -12,4 +12,4 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# This file is suitable for sourcing inside bash as
# well as importing into Python
VERSION='3.3.3' # noqa
VERSION="3.3.5" # noqa