Compare commits

..

77 Commits
3.3.4 ... 3.4.1

Author SHA1 Message Date
rocky
f0f9676f52 update news 2019-10-02 13:50:02 -04:00
rocky
be610aa6b3 Bump min xdis version...
And testing versions
2019-10-02 13:46:56 -04:00
rocky
1494bb2049 Bump test python versions 2019-10-02 10:59:59 -04:00
rocky
d62dc3daac Get ready for release 3.4.1 2019-10-02 10:45:01 -04:00
rocky
5ad51707e3 Wasn't handling 3-arg %p in fragments.py...
and also fielding errors in code_deparse()
2019-10-02 10:29:49 -04:00
rocky
f28c255804 Add downloads per month 2019-09-25 16:00:57 -04:00
rocky
315965300f Note assert{,2}not is transformation only 2019-09-23 08:32:46 -04:00
rocky
9bd85fe5a0 Correct assert{,2} transforms
Fixes #289
2019-09-23 08:26:16 -04:00
rocky
c6e3168c31 Update an old not-every-used config 2019-08-24 09:26:41 -04:00
rocky
7969b19c2b Reinstate Generator rule accidentally removed...
I removed the rule when writing how issue #283 was fixed. What was
supposed to be a temporary change, I accidentally committed.
2019-08-24 08:05:50 -04:00
rocky
f8bfde4a8e Get ready for release 3.4.0 2019-08-24 07:58:42 -04:00
R. Bernstein
e2b309fa30 Merge pull request #286 from rocky/tree-transform
Add tree transformation phase...
2019-08-24 06:58:40 -04:00
rocky
1ebfde6927 Add tree transformation phase...
if ... else if ... ->  if ... elif ..
if .. : raise AsssertionError ->  assert

Add options --tree=before --tree=after  -T expanded to include this

This code ported from decompyle3. x0ret did all the heavy lifting.
2019-08-21 09:00:09 -04:00
rocky
73619de3f5 CircleCI 4th try 2019-08-21 08:57:29 -04:00
rocky
600cd0b8ee CircleCi 3rd try 2019-08-21 08:55:35 -04:00
rocky
90a2ed2c9e CircleCI 2nd try 2019-08-21 08:50:38 -04:00
rocky
8728cb6a99 Fix for recent CircleCI breakage 2019-08-21 08:43:38 -04:00
rocky
8daedaf063 Run black over validate.py 2019-08-20 18:23:52 -04:00
rocky
4a4a20995e Update runtests.sh...
for 2.6 and 2.7. More work is needed. Start count of skipped tests.
2019-08-12 11:24:07 -04:00
rocky
c923ce9afe Tweak runtests.sh so it works on 2.6 2019-08-11 22:00:28 -04:00
R. Bernstein
88901c6901 Merge pull request #285 from trengri/master
Test case for #284
2019-08-11 19:45:22 -04:00
Grigory Trenin
31f7d14eab Test case for #284 2019-08-12 02:09:51 +03:00
rocky
388d1da970 Fixes #284 2nd problem...
while 1 bug for 2.6
2019-08-10 20:39:02 -04:00
rocky
66294d54f7 Comment what's up in last change. 2019-08-10 18:46:56 -04:00
rocky
55783c2712 Fixes #284 2019-08-08 21:48:12 -04:00
rocky
fd580f3c60 Python 2.6 generator rule with 'and' conditional
Fixes #283.
2019-08-05 10:36:08 -04:00
rocky
a781006ff1 Need spark version 1.8.9 for Python 3.7.4 2019-07-28 21:11:17 -04:00
rocky
0be3d5a530 Bump min spark-parser version
spark-parser 1.8.8 handles Python 3.7.4 and 3.8.

Fixes #281
2019-07-28 20:49:37 -04:00
rocky
bc3cd0102b Use updated python 3.8 from xdis 2019-07-25 07:44:49 -04:00
rocky
6e6d590268 Reformat (via black) make_function.py 2019-07-18 11:01:47 -04:00
rocky
71bdc8dc6a Merge branch 'master' of github.com:rocky/python-uncompyle6 2019-07-18 05:27:05 -04:00
rocky
71735ca7ef Note that CALL_METHOD is used in 3.7+
and not just Pypy anymore
2019-07-18 05:26:33 -04:00
R. Bernstein
9f121ef00c Merge pull request #279 from rocky/build-list-unpack
Fix issue in BUILD_LIST_UNPACK and variants
2019-07-13 17:54:48 -04:00
x0ret
2e01f42f65 Fixes #278 2019-07-14 01:12:42 +04:30
rocky
3f9a862277 Remove dup semantic rule 2019-07-05 06:24:14 -04:00
rocky
cc531cf90a Fix call to code_deparse call in deparse_code2str
Fixes #275
2019-07-05 06:06:14 -04:00
rocky
c7124ad9ca RsT :code: before backticks 2019-07-04 10:01:36 -04:00
rocky
44a4aab0a7 Use black to reformat some files 2019-07-04 09:56:50 -04:00
rocky
2a52982d52 Go over NEWS for markdown and spelling 2019-07-04 05:15:56 -04:00
R. Bernstein
56e5e8dcef Add repology package status 2019-07-04 04:25:24 -04:00
rocky
ecab7d7b09 Python 2.4 unpack rule needs adjusting for exceptions 2019-07-03 20:08:23 -04:00
rocky
e39a902e56 Get ready for release 3.3.5 2019-07-03 19:37:29 -04:00
rocky
e2914ed552 More excpet_cond futzing 2019-07-03 19:26:36 -04:00
rocky
f425db33b7 except_cond3 needs to be in 2.x 2019-07-03 19:16:09 -04:00
rocky
68c5b2338f Clearer 3.3 "yeild_from" semantic handling 2019-07-01 12:54:38 -04:00
rocky
e55a0410c9 weak-verify -> syntax-verify. More bytecode tests 2019-07-01 10:23:43 -04:00
R. Bernstein
0fe8961418 Merge pull request #269 from rocky/if-elif-else-more
If elif else more
2019-07-01 09:52:11 -04:00
R. Bernstein
8cd331a32b Merge pull request #273 from rocky/py3-annotation-args
Fix handling py3 annotation args + defparam comma issue
2019-06-30 18:54:58 -04:00
rocky
4c76931807 Update tests related to branch 2019-06-30 18:20:40 -04:00
x0ret
7b7f794913 Fix handling py3 annotation args + defparam comma issue 2019-07-01 01:28:32 +04:30
rocky
50e46531ce Adjust 3.x grammar rules to include annotate args 2019-06-29 23:33:21 -04:00
R. Bernstein
67ef34977f Merge pull request #270 from rocky/py3-star-args
Fix issue in 3.x star args function signatures
2019-06-29 15:57:02 -04:00
rocky
32c7b8f23d Add tests for x0ret's recent varrg fixes 2019-06-29 15:50:47 -04:00
x0ret
2f06d1eeb0 Fix issue in 3.x star args function signatures 2019-06-29 21:53:02 +04:30
rocky
999f1fb0f9 Mostly x0ret's while(1)/if fixes ..
plus a potential test
2019-06-29 07:01:45 -04:00
x0ret
76eef9a149 Handle if elif else case for 3.5 2019-06-29 06:57:23 -04:00
rocky
c8b945fb56 Handling if elif else more 2019-06-29 06:57:23 -04:00
rocky
a1e7c16dbe Fix bugs introduced by last commit 2019-06-29 06:19:02 -04:00
rocky
35f14e4357 Small assert message change 2019-06-29 05:08:23 -04:00
rocky
49d1a50354 Merge branch 'master' of github.com:rocky/python-uncompyle6 2019-06-29 04:59:46 -04:00
rocky
0dc19a8fdd Correct 3.4 "yield from" semantic action bug 2019-06-29 04:59:03 -04:00
R. Bernstein
f6aa8b2baf Merge pull request #264 from rocky/ifelif-27
if/elif for 2.5-2.7
2019-06-24 05:50:53 -04:00
rocky
887a006849 if/elif for 2.5-2.7
Specifically simple_source/03_if_elif.py
2019-06-23 21:29:15 -04:00
rocky
e26c7407a0 Small changes to document some of the complexity. 2019-06-23 20:00:00 -04:00
R. Bernstein
69823af553 Merge pull request #262 from rocky/ifelif
reinstate some elif's
2019-06-23 17:27:57 -04:00
x0ret
e96498eaf0 Adjust ifelsestmtr grammer 2019-06-24 01:28:33 +04:30
rocky
9d6d6a355d Start to reinstate elif's 2019-06-21 07:13:05 -04:00
R. Bernstein
04c53c1086 Merge pull request #261 from rocky/load-code
LOAD_CONST -> LOAD_CODE where appropriate
2019-06-21 06:34:43 -04:00
rocky
96866f94a7 Adjust grammar checker to ignore LOAD_CODE 2019-06-19 15:54:16 -04:00
rocky
d371839c99 A few more LOAD_CONST->LOAD_CODE 2019-06-19 15:38:58 -04:00
rocky
24afe072b7 LOAD_CONST -> LOAD_CODE where appropriate 2019-06-19 14:43:07 -04:00
rocky
e2d7f01298 Handle 2-arg asserts in 3.6+ish
Changed files have also been reformatted via the blacken formatter
2019-06-18 22:09:16 -04:00
rocky
b39112b601 One more deparse_code removal 2019-06-16 22:30:56 -04:00
rocky
20b513fc81 Merge branch 'master' of github.com:rocky/python-uncompyle6 2019-06-16 21:58:23 -04:00
rocky
d369017122 remove deprecated deparse_code 2019-06-16 21:57:56 -04:00
rocky
6675ea2cd0 Control flow yet again 2019-06-15 10:09:13 -04:00
rocky
4b82806d6c Flow control bites again.
See related appveyor https://ci.appveyor.com/project/rocky/python-decompile3/builds/25301153/job/x0we0dpgb3apgk1v
2019-06-15 07:18:30 -04:00
72 changed files with 3665 additions and 2206 deletions

View File

@@ -27,9 +27,9 @@ jobs:
# VM instead of a container) see https://circleci.com/docs/2.0/executor-types/
# To see the list of pre-built images that CircleCI provides for most common languages see
# https://circleci.com/docs/2.0/circleci-images/
docker:
- image: circleci/build-image:ubuntu-14.04-XXL-upstart-1189-5614f37
command: /sbin/init
machine:
python:
version: 2.7.14
steps:
# Machine Setup
# If you break your build into multiple jobs with workflows, you will probably want to do the parts of this that are relevant in each
@@ -42,25 +42,24 @@ jobs:
# This is based on your 1.0 configuration file or project settings
- run:
working_directory: ~/rocky/python-uncompyle6
command: pyenv local 2.7.11 && pyenv rehash && pip install virtualenv && pip install nose && pip install pep8 && pip install six && pyenv rehash
command: pip install virtualenv && pip install nose && pip install pep8 && pyenv rehash
# Dependencies
# This would typically go in either a build or a build-and-test job when using workflows
# Restore the dependency cache
- restore_cache:
keys:
# This branch if available
- v1-dep-{{ .Branch }}-
# Default branch if not
- v1-dep-master-
# Any branch if there are none on the default branch - this should be unnecessary if you have your default branch configured correctly
- v1-dep-
- v2-dependencies-{{ .Branch }}-
# fallback to using the latest cache if no exact match is found
- v2-dependencies-
# This is based on your 1.0 configuration file or project settings
- run: pip install --upgrade setuptools
- run: pip install -e .
- run: pip install pytest==3.2.5 hypothesis==3.0.0
- run: pip install -r requirements-dev.txt
# Save dependency cache
- save_cache:
key: v1-dep-{{ .Branch }}-{{ epoch }}
key: v2-dependencies-{{ .Branch }}-{{ epoch }}
paths:
# This is a broad list of cache paths to include many possible development environments
# You can probably delete some of these entries
@@ -69,14 +68,13 @@ jobs:
- ~/.m2
- ~/.ivy2
- ~/.bundle
- ~/.go_workspace
- ~/.gradle
- ~/.cache/bower
# Test
# This would typically be a build job when using workflows, possibly combined with build
# This is based on your 1.0 configuration file or project settings
- run: python ./setup.py develop && make check-2.7
- run: cd ./test/stdlib && pyenv local 2.7.11 && bash ./runtests.sh 'test_[p-z]*.py'
- run: cd ./test/stdlib && bash ./runtests.sh 'test_[p-z]*.py'
# Teardown
# If you break your build into multiple jobs with workflows, you will probably want to do the parts of this that are relevant in each
# Save test results

205
NEWS.md
View File

@@ -1,15 +1,58 @@
3.3.4 2019-05-19 Fleetwood at 65
3.4.1 2019-10-02
================
- Correct assert{,2} transforms Fixes #289
- Fragment parsing fixes:
* Wasn't handling 3-arg %p
* fielding error in code_deparse()
- Use newer xdis to better track Python 3.8.0
3.4.0 2019-08-24 Totoro
=======================
The main change is to add a tree-transformation phase. This simplifies the
code a little and allows us to turn `if ...: raise AssertionError` into
`assert`, and many `if ..: else if ...` into `if ... elif ..`
Use options `--show=before` and `--show=after` to see the before the tree transformation phase and after the tree transformation phase.
Most of the heavy lifting for this was done by x0ret.
Other changes:
- Fix issue #275, #283 (process to fix this bug is documented on wiki), #284
- blacken more code
- CircleCI adjustments for a changing CircleCi
- Require more recent `xdis` for Python 3.8
- Fix bugs in code using `BUILD_LIST_UNPACK` and variants
3.3.5 2019-07-03 Pre Independence Day
=====================================
Again, most of the work in this is release is thanks to x0ret.
- Handle annotation arguments in Python 3.x
- Fix _vararg_ and function signatures in 3.x
- Some 3.x < 3.6 `while` (1)/`if` fixes &mdash; others remain
- Start reinstating `else if` -> `elif`
- `LOAD_CONST` -> `LOAD_CODE` where appropriate
- option `--weak-verify` is now `--syntax-verify`
- code cleanups, start using [black](https://github.com/python/black) to reformat text
3.3.4 2019-06-19 Fleetwood at 65
================================
Most of the work in this is release is thanks to x0ret.
- Major work was done by x0ret to correct function signatures and include annotation types
- Handle Python 3.6 STORE_ANNOTATION [#58](https://github.com/rocky/python-uncompyle6/issues/58)
- Handle Python 3.6 `STORE_ANNOTATION` [#58](https://github.com/rocky/python-uncompyle6/issues/58)
- Friendlier assembly output
- `LOAD_CONST` replaced by `LOAD_STR` where appropriate to simplify parsing and improve clarity
- remove unneeded parenthesis in a generator expression when it is the single argument to the function [#247](https://github.com/rocky/python-uncompyle6/issues/246)
- Bug in noting an async function [#246](https://github.com/rocky/python-uncompyle6/issues/246)
- Handle unicode docstrings and fix docstring bugs [#241](https://github.com/rocky/python-uncompyle6/issues/241)
- Handle Unicode docstrings and fix docstring bugs [#241](https://github.com/rocky/python-uncompyle6/issues/241)
- Add short option -T as an alternate for --tree+
- Some grammar cleanup
@@ -53,7 +96,7 @@ some span back as far as 2.x
But as before, many more remain in the 3.7 and 3.8 range which will
get addressed in future releases
Pypy 3.6 support was started. Pypy 3.x detection fixed (via xdis)
Pypy 3.6 support was started. Pypy 3.x detection fixed (via `xdis`)
3.3.1 2019-04-19 Good Friday
==========================
@@ -137,7 +180,7 @@ Pull Requests
- Add rudimentary 1.4 support (still a bit buggy)
- add --tree+ option to show formatting rule, when it is constant
- Python 2.7.15candidate1 support (via xdis)
- Python 2.7.15candidate1 support (via `xdis`)
- bug fixes, especially for 3.7 (but 2.7 and 3.6 and others as well)
3.1.3 2018-04-16
@@ -242,24 +285,24 @@ function calls and definitions.
2.15.1 2018-01-27
=====================
- Add --linemap option to give line correspondences
- Add `--linemap` option to give line correspondences
between original source lines and reconstructed line sources.
It is far from perfect, but it is a start
- Add a new class of tests: tests which when decompiled check themselves
- Split off Python version semantic action customizations into its own file
- Fix 2.7 bug in ifelse loop statement
- Handle 3.6+ EXTENDED_ARGs for POP_JUMP_IF... instructions
- Correct 3.6+ calls with kwargs
- Fix 2.7 bug in `if`/`else` loop statement
- Handle 3.6+ `EXTENDED_ARG`s for `POP_JUMP_IF..` instructions
- Correct 3.6+ calls with `kwargs`
- Describe the difficulty of 3.6 in README
2.14.3 2018-01-19
=====================
- Fix bug in 3.5+ await stmt
- Fix bug in 3.5+ `await` statement
- Better version to magic handling; handle 3.5.2 .. 3.5.4 versions
- Improve/correct test_pyenvlib.py status messages
- Fix some 2.7 and 2.6 parser bugs
- Fix whilelse parsing bugs
- Fix `whilelse` parsing bugs
- Correct 2.5- decorator parsing
- grammar for decorators matches AST a little more
- better tests in setup.py for running the right version of Python
@@ -270,15 +313,15 @@ function calls and definitions.
Decompilation bug fixes, mostly 3.6 and pre 2.7
- 3.6 FUNCTION_EX (somewhat)
- 3.6 FUNCTION_EX_KW fixes
- 3.6 MAKE_FUNCTION fixes
- correct 3.5 CALL_FUNCTION_VAR
- 3.6 `FUNCTION_EX` (somewhat)
- 3.6 `FUNCTION_EX_KW` fixes
- 3.6 `MAKE_FUNCTION` fixes
- correct 3.5 `CALL_FUNCTION_VAR`
- stronger 3.x "while 1" testing
- Fix bug in if's with "pass" bodies. Fixes #104
- try/else and try/finally fixes on 2.6-
- limit pypy customization to pypy
- Add addr fields in COME_FROMS
- Add addr fields in `COME_FROM`S
- Allow use of full instructions in parser reduction routines
- Reduce grammar in Python 3 by specialization more to specific
Python versions
@@ -287,11 +330,11 @@ Decompilation bug fixes, mostly 3.6 and pre 2.7
2.14.1 2017-12-10 Dr. Gecko
===================================
- Many decompilation bugfixes
- Many decompilation bug fixes
- Grammar rule reduction and version isolation
- Match higher-level nonterminal names more closely
with Python AST
- Start automated Python stdlib testing - full round trip
- Start automated Python _stdlib_ testing &mdash; full round trip
2.14.0 2017-11-26 johnnybamazing
=========================================
@@ -300,7 +343,7 @@ Decompilation bug fixes, mostly 3.6 and pre 2.7
and remove used grammar rules
- Fix a number of bytecode decompile problems
(many more remain)
- Add stdlib/runtests.sh for even more rigorous testing
- Add `stdlib/runtests.sh` for even more rigorous testing
2.13.3 2017-11-13
=====================
@@ -316,16 +359,16 @@ Overall: better 3.6 decompiling and some much needed code refactoring and cleanu
rather trying to parse the bytecode array. This largely been done in for versions 3.x;
3.0 custom mangling code has been reduced;
some 2.x conversion has been done, but more is desired. This make it possible to...
- Handle EXTENDED_ARGS better. While relevant to all Python versions it is most noticeable in
version 3.6+ where in switching to wordcodes the size of operands has been reduced from 2**16
to 2**8. JUMP instruction then often need EXTENDED_ARGS.
- Handle `EXTENDED_ARGS` better. While relevant to all Python versions it is most noticeable in
version 3.6+ where in switching to wordcodes the size of operands has been reduced from 2^16
to 2^8. `JUMP` instruction then often need EXTENDED_ARGS.
- Refactor find_jump_targets() with via working of of instructions rather the bytecode array.
- use --weak-verify more and additional fuzzing on verify()
- use `--weak-verify` more and additional fuzzing on verify()
- fragment parser now ignores errors in nested function definitions; an parameter was
added to assist here. Ignoring errors may be okay because the fragment parser often just needs,
well, *fragments*.
- Distinguish RETURN_VALUE from RETURN_END_IF in exception bodies better in 3.6
- bug in 3.x language changes: import queue via import Queue
- Distinguish `RETURN_VALUE` from `RETURN_END_IF` in exception bodies better in 3.6
- bug in 3.x language changes: import queue via `import Queue`
- reinstate some bytecode tests since decompiling has gotten better
- Revise how to report a bug
@@ -345,12 +388,12 @@ Overall: better 3.6 decompiling and some much needed code refactoring and cleanu
- Fixes in deparsing lambda expressions
- Improve table-semantics descriptions
- Document hacky customize arg count better (until we can remove it)
- Update to use xdis 3.7.0 or greater
- Update to use `xdis` 3.7.0 or greater
2.12.0 2017-09-26
=====================
- Use xdis 3.6.0 or greater now
- Use `xdis` 3.6.0 or greater now
- Small semantic table cleanups
- Python 3.4's terms a little names better
- Slightly more Python 3.7, but still failing a lot
@@ -364,13 +407,13 @@ Overall: better 3.6 decompiling and some much needed code refactoring and cleanu
2.11.4 2017-08-15
=====================
* scanner and parser now allow 3-part version string lookups,
* scanner and parser now allow 3-part version string look ups,
e.g. 2.7.1 We allow a float here, but if passed a string like '2.7'. or
* unpin 3.5.1. xdis 3.5.4 has been release and fixes the problems we had. Use that.
* some routines here moved to xdis. Use the xdis version
* README.rst: Link typo Name is trepan2 now not trepan
* xdis-forced change adjust for COMPARE_OP "is-not" in
semanatic routines. We need "is not".
* unpin 3.5.1. `xdis` 3.5.4 has been release and fixes the problems we had. Use that.
* some routines here moved to `xdis`. Use the `xdis` version
* `README.rst`: Link typo Name is _trepan2_ now not _trepan_
* xdis-forced change adjust for `COMPARE_OP` "is-not" in
semantic routines. We need "is not".
* Some PyPy tolerance in validate testing.
* Some pyston tolerance
@@ -380,15 +423,15 @@ Overall: better 3.6 decompiling and some much needed code refactoring and cleanu
Very minor changes
- RsT doc fixes and updates
- use newer xdis, but not too new; 3.5.2 breaks uncompyle6
- use xdis opcode sets
- xdis "exception match" is now "exception-match"
- use newer `xdis`, but not too new; 3.5.2 breaks uncompyle6
- use `xdis` opcode sets
- `xdis` "exception match" is now "exception-match"
2.11.2 2017-07-09
=====================
- Start supporting Pypy 3.5 (5.7.1-beta)
- use xdis 3.5.0's opcode sets and require xdis 3.5.0
- use `xdis` 3.5.0's opcode sets and require `xdis` 3.5.0
- Correct some Python 2.4-2.6 loop detection
- guard against badly formatted bytecode
@@ -396,55 +439,55 @@ Very minor changes
=====================
- Python 3.x annotation and function signature fixes
- Bump xdis version
- Small pysource bug fixes
- Bump `xdis` version
- Small `pysource.py` bug fixes
2.11.0 2017-06-18 Fleetwood
==================================
- Major improvements in fragment tracking
* Add nonterminal node in extractInfo
* Add nonterminal node in `extractInfo()`
* tag more offsets in expressions
* tag array subscripts
* set YIELD value offset in a <yield> expr
* set `YIELD` value offset in a _yield expr_
* fix a long-standing bug in not adjusting final AST when melding other deparse ASTs
- Fixes yet again for make_function node handling; document what's up here
- Fix bug in snowflake Python 3.5 *args kwargs
- Fix bug in snowflake Python 3.5 `*args`, `kwargs`
2.10.1 2017-06-3 Marylin Frankel
========================================
- fix some fragments parsing bugs
- was returning the wrong type sometimes in deparse_code_around_offset()
- was returning the wrong type sometimes in `deparse_code_around_offset()`
- capture function name in offsets
- track changes to ifelstrmtr node from pysource into fragments
- track changes to `ifelstrmtr` node from `pysource.py` into fragments
2.10.0 2017-05-30 Elaine Gordon
=======================================
- Add fuzzy offset deparse look up
- 3.6 bug fixes
- fix EXTENDED_ARGS handling (and in 2.6 and others)
- fix `EXTENDED_ARGS` handling (and in 2.6 and others)
- semantic routine make_function fragments.py
- MAKE_FUNCTION handling
- CALL_FUNCTION_EX handling
- async property on defs
- support for CALL_FUNCTION_KW (moagstar)
- 3.5+ UNMAP_PACK and BUILD_UNMAP_PACK handling
- `MAKE_FUNCTION` handling
- `CALL_FUNCTION_EX` handling
- `async` property on `defs`
- support for `CALL_FUNCTION_KW` (moagstar)
- 3.5+ `UNMAP_PACK` and` BUILD_UNMAP_PACK` handling
- 3.5 FUNCTION_VAR bug
- 3.x pass statement insdie while True
- 3.x pass statement inside `while True`
- Improve 3.2 decompilation
- Fixed -o argument processing (grkov90)
- Fixed `-o` argument processing (grkov90)
- Reduce scope of LOAD_ASSERT as expr to 3.4+
- "await" statement fixes
- `await` statement fixes
- 2.3, 2.4 "if 1 .." fixes
- 3.x annotation fixes
2.9.11 2017-04-06
=====================
- Better support for Python 3.5+ BUILD_MAP_UNPACK
- Start 3.6 CALL_FUNCTION_EX support
- Better support for Python 3.5+ `BUILD_MAP_UNPACK`
- Start 3.6 `CALL_FUNCTION_EX` support
- Many decompilation bug fixes. (Many more remain). See ChangeLog
2.9.10 2017-02-25
@@ -452,7 +495,7 @@ Very minor changes
- Python grammar rule fixes
- Add ability to get grammar coverage on runs
- Handle Python 3.6 opcode BUILD_CONST_KEYMAP
- Handle Python 3.6 opcode `BUILD_CONST_KEYMAP`
2.9.9 2016-12-16
@@ -468,13 +511,13 @@ Very minor changes
====================
- Better control-flow detection
- pseudo instruction THEN in 2.x
- pseudo instruction `THEN` in 2.x
to disambiguate if from and
- fix bug in --verify option
- fix bug in `--verify` option
- DRY (a little) control-flow detection
- fix syntax in tuples with one element
- if AST rule inheritance in Python 2.5
- NAME_MODULE removal for Python <= 2.4
- `NAME_MODULE` removal for Python <= 2.4
- verify call fixes for Python <= 2.4
- more Python lint
@@ -485,9 +528,9 @@ Very minor changes
- Some Python 3.6 bytecode to wordcode conversion fixes
- option -g: show start-end range when possible
- track print_docstring move to help (used in python 3.1)
- verify: allow RETURN_VALUE to match RETURN_END_IF
- verify: allow `RETURN_VALUE` to match `RETURN_END_IF`
- some 3.2 compatibility
- Better Python 3 control flow detection by adding Pseudo ELSE opcodes
- Better Python 3 control flow detection by adding Pseudo `ELSE` opcodes
2.9.6 2016-12-04
====================
@@ -503,7 +546,7 @@ Very minor changes
- Correct MANIFEST.in
- More AST grammar checking
- --linemapping option or linenumbers.line_number_mapping()
- `--linemapping` option or _linenumbers.line_number_mapping()_
Shows correspondence of lines between source
and decompiled source
- Some control flow adjustments in code for 2.x.
@@ -523,7 +566,7 @@ Very minor changes
* improper while 1 else
* docstring indent
* 3.3 default values in lambda expressions
* start 3.0 decompilation (needs newer xdis)
* start 3.0 decompilation (needs newer `xdis`)
- Start grammar misparse checking
@@ -537,12 +580,12 @@ Very minor changes
2.9.3 2016-10-26
====================
Release forced by incompatibility change in xdis 3.2.0.
Release forced by incompatibility change in` xdis` 3.2.0.
- Python 3.1 bugs:
* handle "with ... as"
* handle "with"
* Start handling def (...) -> yy (has bugs still)
* handle `with`... `as`
* handle `with`
* Start handling `def` (...) -> _yy_ (has bugs still)
- DRY Python 3.x via inheritance
- Python 3.6 work (from Daniel Bradburn)
@@ -568,12 +611,12 @@ Release forced by incompatibility change in xdis 3.2.0.
2.9.0 2016-10-09
====================
- Use xdis 3.0.0 protocol load_module.
- Use `xdis` 3.0.0 protocol `load_module`.
this Forces change in requirements.txt and _pkg_info_.py
- Start Python 1.5 decompiling; another round of work is needed to
remove bugs
- Simplify python 2.1 grammar
- Fix bug with -t ... Wasn't showing source text when -t option was given
- Fix bug with `-t` ... Wasn't showing source text when `-t` option was given
- Fix 2.1-2.6 bug in list comprehension
2.8.4 2016-10-08
@@ -582,8 +625,8 @@ Release forced by incompatibility change in xdis 3.2.0.
- Python 3 disassembly bug fixes
- Python 3.6 fstring bug fixes (from moagstar)
- Python 2.1 disassembly
- COME_FROM suffixes added in Python3
- use .py extension in verification disassembly
- `COME_FROM` suffixes added in Python3
- use `.py` extension in verification disassembly
2.8.3 2016-09-11 live from NYC!
=======================================
@@ -628,8 +671,8 @@ control-flow structure detection is done.
- Add Python 2.2 decompilation
- Fix bugs
* PyPy LOOKUP_METHOD bug
* Python 3.6 FORMAT_VALUE handles expressions now
* PyPy `LOOKUP_METHOD` bug
* Python 3.6 `FORMAT_VALUE` handles expressions now
2.8.0 2016-08-03
====================
@@ -679,7 +722,7 @@ control-flow structure detection is done.
====================
- Improve Python 2.6 bytecode deparsing:
stdlib now will deparse something
_stdlib_ now will deparse something
- Better <2.6 vs. 2.7 grammar separation
- Fix some 2.7 deparsing bugs
- Fix bug in installing uncompyle6 script
@@ -742,9 +785,9 @@ control-flow structure detection is done.
2.3.2 2016-05-1
===================
- Add --version option standalone scripts
- Add `--version` option standalone scripts
- Correct License information in package
- expose fns uncompyle_file, load_file, and load_module
- expose functions `uncompyle_file()`, `load_file()`, and `load_module()`
- Start to DRY Python2 and Python3 grammars Separate out 3.2, and 3.5+
specific grammar code
- Fix bug in 3.5+ constant map parsing
@@ -752,12 +795,12 @@ control-flow structure detection is done.
2.3.0, 2.3.1 2016-04-30
=============================
- Require spark_parser >= 1.1.0
- Require `spark_parser` >= 1.1.0
2.2.0 2016-04-30
====================
- Spark is no longer here but pulled separate package spark_parse
- Spark is no longer here but pulled separate package [spark_parser](https://pypi.org/project/spark_parser/)
- Python 3 parsing fixes
- More tests
@@ -767,7 +810,7 @@ control-flow structure detection is done.
- Support single-mode (in addition to exec-mode) compilation
- Start to DRY Python 2 and Python 3 grammars
- Fix bug in if else ternary construct
- Fix bug in uncomplye6 -d and -r options (via lelicopter)
- Fix bug in uncomplye6 `-d` and `-r` options (via lelicopter)
2.1.3 2016-01-02
@@ -775,7 +818,7 @@ control-flow structure detection is done.
- Limited support for decompiling Python 3.5
- Improve Python 3 class deparsing
- Handle MAKE_CLOSURE opcode
- Handle `MAKE_CLOSURE` opcode
- Start to DRY opcode code.
- increase test coverage
- fix misc small bugs and some improvements
@@ -817,5 +860,5 @@ Changes from uncompyle2
SPARK:
add option to show grammar rules applied
allow Python-style # comments in grammar
allow Python-style `#` comments in grammar
Runs on Python 3 and Python 2

View File

@@ -1,4 +1,8 @@
|buildstatus| |Latest Version| |Supported Python Versions|
|buildstatus| |Pypi Installs| |Latest Version| |Supported Python Versions|
|packagestatus|
uncompyle6
==========
@@ -122,17 +126,6 @@ For usage help:
$ uncompyle6 -h
If you want strong verification of the correctness of the
decompilation process, add the `--verify` option. But there are
situations where this will indicate a failure, although the generated
program is semantically equivalent. Using option `--weak-verify` will
tell you if there is something definitely wrong. Generally, large
swaths of code are decompiled correctly, if not the entire program.
You can also cross compare the results with pycdc_ . Since they work
differently, bugs here often aren't in that, and vice versa.
Verification
------------
@@ -140,22 +133,25 @@ In older versions of Python it was possible to verify bytecode by
decompiling bytecode, and then compiling using the Python interpreter
for that bytecode version. Having done this the bytecode produced
could be compared with the original bytecode. However as Python's code
generation got better, this is no longer feasible.
generation got better, this no longer was feasible.
The verification that we use that doesn't check bytecode for
equivalence but does check to see if the resulting decompiled source
is a valid Python program by running the Python interpreter. Because
the Python language has changed so much, for best results you should
use the same Python version in checking as was used in creating the
bytecode.
If you want Python syntax verification of the correctness of the
decompilation process, add the :code:`--syntax-verify` option. However since
Python syntax changes, you should use this option if the bytecode is
the right bytecode for the Python interpreter that will be checking
the syntax.
There are however an interesting class of these programs that is
readily available give stronger verification: those programs that
when run check some computation, or even better themselves.
You can also cross compare the results with another python decompiler
like pycdc_ . Since they work differently, bugs here often aren't in
that, and vice versa.
And already Python has a set of programs like this: the test suite
for the standard library that comes with Python. We have some
code in `test/stdlib` to facilitate this kind of checking.
There is an interesting class of these programs that is readily
available give stronger verification: those programs that when run
test themselves. Our test suite includes these.
And Python comes with another a set of programs like this: its test
suite for the standard library. We have some code in :code:`test/stdlib` to
facilitate this kind of checking too.
Known Bugs/Restrictions
-----------------------
@@ -185,15 +181,15 @@ In the Python 3 series, Python support is is strongest around 3.4 or
3.0 is weird in that it in some ways resembles 2.6 more than it does
3.1 or 2.7. Python 3.6 changes things drastically by using word codes
rather than byte codes. As a result, the jump offset field in a jump
instruction argument has been reduced. This makes the `EXTENDED_ARG`
instruction argument has been reduced. This makes the :code:`EXTENDED_ARG`
instructions are now more prevalent in jump instruction; previously
they had been rare. Perhaps to compensate for the additional
`EXTENDED_ARG` instructions, additional jump optimization has been
:code:`EXTENDED_ARG` instructions, additional jump optimization has been
added. So in sum handling control flow by ad hoc means as is currently
done is worse.
Between Python 3.5, 3.6 and 3.7 there have been major changes to the
`MAKE_FUNCTION` and `CALL_FUNCTION` instructions.
:code:`MAKE_FUNCTION` and :code:`CALL_FUNCTION` instructions.
Currently not all Python magic numbers are supported. Specifically in
some versions of Python, notably Python 3.6, the magic number has
@@ -225,7 +221,7 @@ See Also
* https://github.com/zrax/pycdc : purports to support all versions of Python. It is written in C++ and is most accurate for Python versions around 2.7 and 3.3 when the code was more actively developed. Accuracy for more recent versions of Python 3 and early versions of Python are especially lacking. See its `issue tracker <https://github.com/zrax/pycdc/issues>`_ for details. Currently lightly maintained.
* https://code.google.com/archive/p/unpyc3/ : supports Python 3.2 only. The above projects use a different decompiling technique than what is used here. Currently unmaintained.
* https://github.com/figment/unpyc3/ : fork of above, but supports Python 3.3 only. Includes some fixes like supporting function annotations. Currently unmaintained.
* https://github.com/wibiti/uncompyle2 : supports Python 2.7 only, but does that fairly well. There are situations where `uncompyle6` results are incorrect while `uncompyle2` results are not, but more often uncompyle6 is correct when uncompyle2 is not. Because `uncompyle6` adheres to accuracy over idiomatic Python, `uncompyle2` can produce more natural-looking code when it is correct. Currently `uncompyle2` is lightly maintained. See its issue `tracker <https://github.com/wibiti/uncompyle2/issues>`_ for more details
* https://github.com/wibiti/uncompyle2 : supports Python 2.7 only, but does that fairly well. There are situations where :code:`uncompyle6` results are incorrect while :code:`uncompyle2` results are not, but more often uncompyle6 is correct when uncompyle2 is not. Because :code:`uncompyle6` adheres to accuracy over idiomatic Python, :code:`uncompyle2` can produce more natural-looking code when it is correct. Currently :code:`uncompyle2` is lightly maintained. See its issue `tracker <https://github.com/wibiti/uncompyle2/issues>`_ for more details
* `How to report a bug <https://github.com/rocky/python-uncompyle6/blob/master/HOW-TO-REPORT-A-BUG.md>`_
* The HISTORY_ file.
* https://github.com/rocky/python-xdis : Cross Python version disassembler
@@ -242,9 +238,12 @@ See Also
.. _this: https://github.com/rocky/python-uncompyle6/wiki/Deparsing-technology-and-its-use-in-exact-location-reporting
.. |buildstatus| image:: https://travis-ci.org/rocky/python-uncompyle6.svg
:target: https://travis-ci.org/rocky/python-uncompyle6
.. |packagestatus| image:: https://repology.org/badge/vertical-allrepos/python:uncompyle6.svg
:target: https://repology.org/project/python:uncompyle6/versions
.. _PJOrion: http://www.koreanrandom.com/forum/topic/15280-pjorion-%D1%80%D0%B5%D0%B4%D0%B0%D0%BA%D1%82%D0%B8%D1%80%D0%BE%D0%B2%D0%B0%D0%BD%D0%B8%D0%B5-%D0%BA%D0%BE%D0%BC%D0%BF%D0%B8%D0%BB%D1%8F%D1%86%D0%B8%D1%8F-%D0%B4%D0%B5%D0%BA%D0%BE%D0%BC%D0%BF%D0%B8%D0%BB%D1%8F%D1%86%D0%B8%D1%8F-%D0%BE%D0%B1%D1%84
.. _Deobfuscator: https://github.com/extremecoders-re/PjOrion-Deobfuscator
.. _Py2EXE: https://en.wikipedia.org/wiki/Py2exe
.. |Supported Python Versions| image:: https://img.shields.io/pypi/pyversions/uncompyle6.svg
.. |Latest Version| image:: https://badge.fury.io/py/uncompyle6.svg
:target: https://badge.fury.io/py/uncompyle6
.. |Pypi Installs| image:: https://pepy.tech/badge/uncompyle6/month

View File

@@ -26,46 +26,46 @@ copyright = """
Copyright (C) 2015-2019 Rocky Bernstein <rb@dustyfeet.com>.
"""
classifiers = ['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Topic :: Software Development :: Debuggers',
'Topic :: Software Development :: Libraries :: Python Modules',
classifiers = ["Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.4",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.0",
"Programming Language :: Python :: 3.1",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Topic :: Software Development :: Debuggers",
"Topic :: Software Development :: Libraries :: Python Modules",
]
# The rest in alphabetic order
author = "Rocky Bernstein, Hartmut Goebel, John Aycock, and others"
author_email = "rb@dustyfeet.com"
entry_points = {
'console_scripts': [
'uncompyle6=uncompyle6.bin.uncompile:main_bin',
'pydisassemble=uncompyle6.bin.pydisassemble:main',
"console_scripts": [
"uncompyle6=uncompyle6.bin.uncompile:main_bin",
"pydisassemble=uncompyle6.bin.pydisassemble:main",
]}
ftp_url = None
install_requires = ['spark-parser >= 1.8.7, < 1.9.0',
'xdis >= 4.0.2, < 4.1.0']
install_requires = ["spark-parser >= 1.8.9, < 1.9.0",
"xdis >= 4.0.4, < 4.1.0"]
license = 'GPL3'
mailing_list = 'python-debugger@googlegroups.com'
modname = 'uncompyle6'
license = "GPL3"
mailing_list = "python-debugger@googlegroups.com"
modname = "uncompyle6"
py_modules = None
short_desc = 'Python cross-version byte-code decompiler'
web = 'https://github.com/rocky/python-uncompyle6/'
short_desc = "Python cross-version byte-code decompiler"
web = "https://github.com/rocky/python-uncompyle6/"
# tracebacks in zip files are funky and not debuggable
zip_safe = True
@@ -82,5 +82,5 @@ def read(*rnames):
return open(os.path.join(srcdir, *rnames)).read()
# Get info from files; set: long_description and VERSION
long_description = ( read("README.rst") + '\n' )
exec(read('uncompyle6/version.py'))
long_description = ( read("README.rst") + "\n" )
exec(read("uncompyle6/version.py"))

View File

@@ -5,4 +5,4 @@ if [[ $0 == ${BASH_SOURCE[0]} ]] ; then
echo "This script should be *sourced* rather than run directly through bash"
exit 1
fi
export PYVERSIONS='3.6.8 3.7.3 2.6.9 3.3.7 2.7.16 3.2.6 3.1.5 3.4.8'
export PYVERSIONS='3.6.9 3.7.4 2.6.9 3.3.7 2.7.16 3.2.6 3.1.5 3.4.10 3.5.7'

View File

@@ -61,7 +61,7 @@ build_script:
test_script:
# Run the project tests
- "%CMD_IN_ENV% python test/test_pyenvlib.py --native --weak-verify"
- "%CMD_IN_ENV% python test/test_pyenvlib.py --native --syntax-verify"
after_test:
# If tests are successful, create binary packages for the project.

View File

@@ -1,9 +1,12 @@
# std
# test
from uncompyle6 import PYTHON_VERSION, deparse_code
import sys
from uncompyle6 import PYTHON_VERSION, code_deparse
import pytest
pytestmark = pytest.mark.skipif(PYTHON_VERSION <= 2.6,
reason='hypothesis needs 2.7 or later')
pytestmark = pytest.mark.skipif(
PYTHON_VERSION <= 2.6, reason="hypothesis needs 2.7 or later"
)
if PYTHON_VERSION > 2.6:
import hypothesis
@@ -11,29 +14,31 @@ if PYTHON_VERSION > 2.6:
# uncompyle6
@st.composite
def expressions(draw):
# todo : would be nice to generate expressions using hypothesis however
# this is pretty involved so for now just use a corpus of expressions
# from which to select.
return draw(st.sampled_from((
'abc',
'len(items)',
'x + 1',
'lineno',
'container',
'self.attribute',
'self.method()',
# These expressions are failing, I think these are control
# flow problems rather than problems with FORMAT_VALUE,
# however I need to confirm this...
#'sorted(items, key=lambda x: x.name)',
#'func(*args, **kwargs)',
#'text or default',
#'43 if life_the_universe and everything else None'
)))
return draw(
st.sampled_from(
(
"abc",
"len(items)",
"x + 1",
"lineno",
"container",
"self.attribute",
"self.method()",
# These expressions are failing, I think these are control
# flow problems rather than problems with FORMAT_VALUE,
# however I need to confirm this...
#'sorted(items, key=lambda x: x.name)',
#'func(*args, **kwargs)',
#'text or default',
#'43 if life_the_universe and everything else None'
)
)
)
@st.composite
def format_specifiers(draw):
@@ -54,36 +59,37 @@ if PYTHON_VERSION > 2.6:
:return: An example format_specifier.
"""
alphabet_strategy = st.characters(min_codepoint=ord('a'), max_codepoint=ord('z'))
alphabet_strategy = st.characters(
min_codepoint=ord("a"), max_codepoint=ord("z")
)
fill = draw(st.one_of(alphabet_strategy, st.none()))
align = draw(st.sampled_from(list('<>=^')))
fill_align = (fill + align or '') if fill else ''
align = draw(st.sampled_from(list("<>=^")))
fill_align = (fill + align or "") if fill else ""
type_ = draw(st.sampled_from('bcdeEfFgGnosxX%'))
can_have_sign = type_ in 'deEfFgGnoxX%'
can_have_comma = type_ in 'deEfFgG%'
can_have_precision = type_ in 'fFgG'
can_have_pound = type_ in 'boxX%'
can_have_zero = type_ in 'oxX'
type_ = draw(st.sampled_from("bcdeEfFgGnosxX%"))
can_have_sign = type_ in "deEfFgGnoxX%"
can_have_comma = type_ in "deEfFgG%"
can_have_precision = type_ in "fFgG"
can_have_pound = type_ in "boxX%"
can_have_zero = type_ in "oxX"
sign = draw(st.sampled_from(list('+- ') + [''])) if can_have_sign else ''
pound = draw(st.sampled_from(('#', '',))) if can_have_pound else ''
zero = draw(st.sampled_from(('0', '',))) if can_have_zero else ''
sign = draw(st.sampled_from(list("+- ") + [""])) if can_have_sign else ""
pound = draw(st.sampled_from(("#", ""))) if can_have_pound else ""
zero = draw(st.sampled_from(("0", ""))) if can_have_zero else ""
int_strategy = st.integers(min_value=1, max_value=1000)
width = draw(st.one_of(int_strategy, st.none()))
width = str(width) if width is not None else ''
width = str(width) if width is not None else ""
comma = draw(st.sampled_from((',', '',))) if can_have_comma else ''
comma = draw(st.sampled_from((",", ""))) if can_have_comma else ""
if can_have_precision:
precision = draw(st.one_of(int_strategy, st.none()))
precision = '.' + str(precision) if precision else ''
precision = "." + str(precision) if precision else ""
else:
precision = ''
return ''.join((fill_align, sign, pound, zero, width, comma, precision, type_,))
precision = ""
return "".join((fill_align, sign, pound, zero, width, comma, precision, type_))
@st.composite
def fstrings(draw):
@@ -96,9 +102,7 @@ if PYTHON_VERSION > 2.6:
:return: A valid f-string.
"""
character_strategy = st.characters(
blacklist_characters='\r\n\'\\s{}',
min_codepoint=1,
max_codepoint=1000,
blacklist_characters="\r\n'\\s{}", min_codepoint=1, max_codepoint=1000
)
is_raw = draw(st.booleans())
integer_strategy = st.integers(min_value=0, max_value=3)
@@ -106,53 +110,49 @@ if PYTHON_VERSION > 2.6:
content = []
for _ in range(expression_count):
expression = draw(expressions())
conversion = draw(st.sampled_from(('', '!s', '!r', '!a',)))
conversion = draw(st.sampled_from(("", "!s", "!r", "!a")))
has_specifier = draw(st.booleans())
specifier = ':' + draw(format_specifiers()) if has_specifier else ''
content.append('{{{}{}}}'.format(expression, conversion, specifier))
specifier = ":" + draw(format_specifiers()) if has_specifier else ""
content.append("{{{}{}}}".format(expression, conversion, specifier))
content.append(draw(st.text(character_strategy)))
content = ''.join(content)
return "f{}'{}'".format('r' if is_raw else '', content)
content = "".join(content)
return "f{}'{}'".format("r" if is_raw else "", content)
@pytest.mark.skipif(PYTHON_VERSION != 3.6, reason='need Python 3.6')
@pytest.mark.skipif(PYTHON_VERSION != 3.6, reason="need Python 3.6")
@hypothesis.given(format_specifiers())
def test_format_specifiers(format_specifier):
"""Verify that format_specifiers generates valid specifiers"""
try:
exec('"{:' + format_specifier + '}".format(0)')
except ValueError as e:
if 'Unknown format code' not in str(e):
if "Unknown format code" not in str(e):
raise
def run_test(text):
hypothesis.assume(len(text))
hypothesis.assume("f'{" in text)
expr = text + '\n'
code = compile(expr, '<string>', 'single')
deparsed = deparse_code(PYTHON_VERSION, code, compile_mode='single')
recompiled = compile(deparsed.text, '<string>', 'single')
expr = text + "\n"
code = compile(expr, "<string>", "single")
deparsed = code_deparse(code, sys.stdout, PYTHON_VERSION, compile_mode="single")
recompiled = compile(deparsed.text, "<string>", "single")
if recompiled != code:
print(recompiled)
print('================')
print("================")
print(code)
print('----------------')
assert 'dis(' + deparsed.text.strip('\n') + ')' == 'dis(' + expr.strip('\n') + ')'
print("----------------")
assert (
"dis(" + deparsed.text.strip("\n") + ")"
== "dis(" + expr.strip("\n") + ")"
)
@pytest.mark.skipif(PYTHON_VERSION != 3.6, reason='need Python 3.6')
@pytest.mark.skipif(PYTHON_VERSION != 3.6, reason="need Python 3.6")
@hypothesis.given(fstrings())
def test_uncompyle_fstring(fstring):
"""Verify uncompyling fstring bytecode"""
run_test(fstring)
@pytest.mark.skipif(PYTHON_VERSION != 3.6, reason='need Python 3.6+')
@pytest.mark.parametrize('fstring', [
"f'{abc}{abc!s}'",
"f'{abc}0'",
])
@pytest.mark.skipif(PYTHON_VERSION != 3.6, reason="need Python 3.6+")
@pytest.mark.parametrize("fstring", ["f'{abc}{abc!s}'", "f'{abc}0'"])
def test_uncompyle_direct(fstring):
"""useful for debugging"""
run_test(fstring)

View File

@@ -9,6 +9,7 @@ def test_grammar():
remain_tokens = set(tokens) - opcode_set
remain_tokens = set([re.sub(r'_\d+$','', t) for t in remain_tokens])
remain_tokens = set([re.sub('_CONT$','', t) for t in remain_tokens])
remain_tokens = set([re.sub('LOAD_CODE$','', t) for t in remain_tokens])
remain_tokens = set(remain_tokens) - opcode_set
assert remain_tokens == set([]), \
"Remaining tokens %s\n====\n%s" % (remain_tokens, p.dump_grammar())
@@ -88,7 +89,7 @@ def test_grammar():
COME_FROM_EXCEPT_CLAUSE
COME_FROM_LOOP COME_FROM_WITH
COME_FROM_FINALLY ELSE
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP LOAD_STR
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP LOAD_STR LOAD_CODE
LAMBDA_MARKER
RETURN_END_IF RETURN_END_IF_LAMBDA RETURN_VALUE_LAMBDA RETURN_LAST
""".split())

View File

@@ -15,7 +15,7 @@ else:
def iteritems(d):
return d.iteritems()
from uncompyle6.semantics.pysource import SourceWalker as SourceWalker
from uncompyle6.semantics.pysource import (SourceWalker, deparse_code2str)
def test_template_engine():
s = StringIO()
@@ -185,3 +185,11 @@ def test_tables():
assert arg == len(entry), (
"%s[%s] arg %d should be length of entry %d. Full entry: %s" %
(name, k, arg, len(entry), entry))
def test_deparse_code2str():
def deparse_test(co):
"This is a docstring"
s = deparse_code2str(co, debug_opts={"asm": "after", "tree": True})
assert s
return
deparse_test(deparse_test.__code__)

View File

@@ -1,16 +1,20 @@
# future
from __future__ import print_function
# std
import os
import difflib
import subprocess
import tempfile
import functools
# uncompyle6 / xdis
from uncompyle6 import PYTHON_VERSION, PYTHON3, IS_PYPY, deparse_code
from uncompyle6 import PYTHON_VERSION, PYTHON3, IS_PYPY, code_deparse
# TODO : I think we can get xdis to support the dis api (python 3 version) by doing something like this there
from xdis.bytecode import Bytecode
from xdis.main import get_opcode
opc = get_opcode(PYTHON_VERSION, IS_PYPY)
Bytecode = functools.partial(Bytecode, opc=opc)
import six
@@ -20,6 +24,7 @@ if PYTHON3:
else:
from StringIO import StringIO
def _dis_to_text(co):
return Bytecode(co).dis()
@@ -33,36 +38,32 @@ def print_diff(original, uncompyled):
:param original: Text describing the original code object.
:param uncompyled: Text describing the uncompyled code object.
"""
original_lines = original.split('\n')
uncompyled_lines = uncompyled.split('\n')
args = original_lines, uncompyled_lines, 'original', 'uncompyled'
original_lines = original.split("\n")
uncompyled_lines = uncompyled.split("\n")
args = original_lines, uncompyled_lines, "original", "uncompyled"
try:
from bs4 import BeautifulSoup
diff = difflib.HtmlDiff().make_file(*args)
diff = BeautifulSoup(diff, "html.parser")
diff.select_one('table[summary="Legends"]').extract()
except ImportError:
print('\nTo display diff highlighting run:\n pip install BeautifulSoup4')
print("\nTo display diff highlighting run:\n pip install BeautifulSoup4")
diff = difflib.HtmlDiff().make_table(*args)
with tempfile.NamedTemporaryFile(delete=False) as f:
f.write(str(diff).encode('utf-8'))
f.write(str(diff).encode("utf-8"))
try:
print()
html = subprocess.check_output([
'elinks',
'-dump',
'-no-references',
'-dump-color-mode',
'1',
f.name,
]).decode('utf-8')
html = subprocess.check_output(
["elinks", "-dump", "-no-references", "-dump-color-mode", "1", f.name]
).decode("utf-8")
print(html)
except:
print('\nFor side by side diff install elinks')
print("\nFor side by side diff install elinks")
diff = difflib.Differ().compare(original_lines, uncompyled_lines)
print('\n'.join(diff))
print("\n".join(diff))
finally:
os.unlink(f.name)
@@ -80,18 +81,19 @@ def are_instructions_equal(i1, i2):
:return: True if the two instructions are approximately equal, otherwise False.
"""
result = (1 == 1
result = (
1 == 1
and i1.opname == i2.opname
and i1.opcode == i2.opcode
and i1.arg == i2.arg
# ignore differences due to code objects
# TODO : Better way of ignoring address
and (i1.argval == i2.argval or '<code object' in str(i1.argval))
and (i1.argval == i2.argval or "<code object" in str(i1.argval))
# TODO : Should probably recurse to check code objects
and (i1.argrepr == i2.argrepr or '<code object' in i1.argrepr)
and (i1.argrepr == i2.argrepr or "<code object" in i1.argrepr)
and i1.offset == i2.offset
# ignore differences in line numbers
#and i1.starts_line
# and i1.starts_line
and i1.is_jump_target == i2.is_jump_target
)
return result
@@ -115,22 +117,21 @@ def are_code_objects_equal(co1, co2):
return True
def validate_uncompyle(text, mode='exec'):
def validate_uncompyle(text, mode="exec"):
"""
Validate decompilation of the given source code.
:param text: Source to validate decompilation of.
"""
original_code = compile(text, '<string>', mode)
original_code = compile(text, "<string>", mode)
original_dis = _dis_to_text(original_code)
original_text = text
deparsed = deparse_code(PYTHON_VERSION, original_code,
compile_mode=mode,
out=six.StringIO(),
is_pypy=IS_PYPY)
deparsed = code_deparse(
original_code, out=six.StringIO(), version=PYTHON_VERSION, compile_mode=mode
)
uncompyled_text = deparsed.text
uncompyled_code = compile(uncompyled_text, '<string>', 'exec')
uncompyled_code = compile(uncompyled_text, "<string>", "exec")
if not are_code_objects_equal(uncompyled_code, original_code):
@@ -138,15 +139,17 @@ def validate_uncompyle(text, mode='exec'):
def output(text, dis):
width = 60
return '\n\n'.join([
' SOURCE CODE '.center(width, '#'),
text.strip(),
' BYTECODE '.center(width, '#'),
dis
])
return "\n\n".join(
[
" SOURCE CODE ".center(width, "#"),
text.strip(),
" BYTECODE ".center(width, "#"),
dis,
]
)
original = output(original_text, original_dis)
uncompyled = output(uncompyled_text, uncompyled_dis)
print_diff(original, uncompyled)
assert 'original' == 'uncompyled'
assert "original" == "uncompyled"

View File

@@ -1,2 +1,4 @@
flake8
hypothesis<=3.0.0
six
pytest==3.2.5

View File

@@ -1,6 +1,6 @@
[bdist_rpm]
release = 1
packager = Mysterie <kajusska@gmail.com>
packager = rocky <rb@dustyfeet.com>
doc_files = README
# CHANGES.txt
# USAGE.txt
@@ -8,4 +8,4 @@ doc_files = README
# examples/
[bdist_wheel]
universal=1
# universal=1

View File

@@ -34,47 +34,47 @@ check-2.6 check-2.7: check-bytecode-2 check-bytecode-3 check-bytecode-1 check-na
#: Run working tests from Python 3.0
check-3.0: check-bytecode
$(PYTHON) test_pythonlib.py --bytecode-3.0-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.0 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --bytecode-3.0 --syntax-verify $(COMPILE)
#: Run working tests from Python 3.1
check-3.1: check-bytecode
$(PYTHON) test_pythonlib.py --bytecode-3.1-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.1 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --bytecode-3.1 --syntax-verify $(COMPILE)
#: Run working tests from Python 3.2
check-3.2: check-bytecode
$(PYTHON) test_pythonlib.py --bytecode-3.2-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.2 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --bytecode-3.2 --syntax-verify $(COMPILE)
#: Run working tests from Python 3.3
check-3.3: check-bytecode
$(PYTHON) test_pythonlib.py --bytecode-3.3-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.3 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --bytecode-3.3 --syntax-verify $(COMPILE)
#: Run working tests from Python 3.4
check-3.4: check-bytecode check-3.4-ok check-2.7-ok
$(PYTHON) test_pythonlib.py --bytecode-3.4-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.4 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --bytecode-3.4 --syntax-verify $(COMPILE)
#: Run working tests from Python 3.5
check-3.5: check-bytecode
$(PYTHON) test_pythonlib.py --bytecode-3.5-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.5 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --bytecode-3.5 --syntax-verify $(COMPILE)
#: Run working tests from Python 3.6
check-3.6: check-bytecode
$(PYTHON) test_pythonlib.py --bytecode-3.6-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.6 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --bytecode-3.6 --syntax-verify $(COMPILE)
#: Run working tests from Python 3.7
check-3.7: check-bytecode
$(PYTHON) test_pythonlib.py --bytecode-3.7-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.7 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --bytecode-3.7 --syntax-verify $(COMPILE)
#: Run working tests from Python 3.8
check-3.8: check-bytecode
$(PYTHON) test_pythonlib.py --bytecode-3.8-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.8 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --bytecode-3.8 --syntax-verify $(COMPILE)
# FIXME
#: this is called when running under pypy3.5-5.8.0 or pypy2-5.6.0
@@ -229,78 +229,78 @@ grammar-coverage-3.7:
#: Check deparsing Python 2.6
check-bytecode-2.6:
$(PYTHON) test_pythonlib.py --bytecode-2.6-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-2.6 --weak-verify
$(PYTHON) test_pythonlib.py --bytecode-2.6 --syntax-verify
#: Check deparsing Python 2.7
check-bytecode-2.7:
$(PYTHON) test_pythonlib.py --bytecode-2.7-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-2.7 --weak-verify
$(PYTHON) test_pythonlib.py --bytecode-2.7 --syntax-verify
#: Check deparsing Python 3.0
check-bytecode-3.0:
$(PYTHON) test_pythonlib.py --bytecode-3.0-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.0 --weak-verify
$(PYTHON) test_pythonlib.py --bytecode-3.0 --syntax-verify
#: Check deparsing Python 3.1
check-bytecode-3.1:
$(PYTHON) test_pythonlib.py --bytecode-3.1-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.1 --weak-verify
$(PYTHON) test_pythonlib.py --bytecode-3.1 --syntax-verify
#: Check deparsing Python 3.2
check-bytecode-3.2:
$(PYTHON) test_pythonlib.py --bytecode-3.2-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.2 --weak-verify
$(PYTHON) test_pythonlib.py --bytecode-3.2 --syntax-verify
#: Check deparsing Python 3.3
check-bytecode-3.3:
$(PYTHON) test_pythonlib.py --bytecode-3.3-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.3 --weak-verify
$(PYTHON) test_pythonlib.py --bytecode-3.3 --syntax-verify
#: Check deparsing Python 3.4
check-bytecode-3.4:
$(PYTHON) test_pythonlib.py --bytecode-3.4-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.4 --weak-verify
$(PYTHON) test_pythonlib.py --bytecode-3.4 --syntax-verify
#: Check deparsing Python 3.5
check-bytecode-3.5:
$(PYTHON) test_pythonlib.py --bytecode-3.5-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.5 --weak-verify
$(PYTHON) test_pythonlib.py --bytecode-3.5 --syntax-verify
#: Check deparsing Python 3.6
check-bytecode-3.6:
$(PYTHON) test_pythonlib.py --bytecode-3.6-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.6 --weak-verify
$(PYTHON) test_pythonlib.py --bytecode-3.6 --syntax-verify
#: Check deparsing Python 3.7
check-bytecode-3.7:
$(PYTHON) test_pythonlib.py --bytecode-3.7-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.7 --weak-verify
$(PYTHON) test_pythonlib.py --bytecode-3.7 --syntax-verify
#: Check deparsing Python 3.8
check-bytecode-3.8:
$(PYTHON) test_pythonlib.py --bytecode-3.8-run --verify-run
$(PYTHON) test_pythonlib.py --bytecode-3.8 --weak-verify
$(PYTHON) test_pythonlib.py --bytecode-3.8 --syntax-verify
#: short tests for bytecodes only for this version of Python
check-native-short:
$(PYTHON) test_pythonlib.py --bytecode-$(PYTHON_VERSION) --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --bytecode-$(PYTHON_VERSION) --syntax-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --bytecode-$(PYTHON_VERSION)-run --verify-run $(COMPILE)
#: Run longer Python 2.6's lib files known to be okay
check-2.6-ok:
$(PYTHON) test_pythonlib.py --ok-2.6 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --ok-2.6 --syntax-verify $(COMPILE)
#: Run longer Python 2.7's lib files known to be okay
check-2.7-ok:
$(PYTHON) test_pythonlib.py --ok-2.7 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --ok-2.7 --syntax-verify $(COMPILE)
#: Run longer Python 3.2's lib files known to be okay
check-3.2-ok:
$(PYTHON) test_pythonlib.py --ok-3.2 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --ok-3.2 --syntax-verify $(COMPILE)
#: Run longer Python 3.4's lib files known to be okay
check-3.4-ok:
$(PYTHON) test_pythonlib.py --ok-3.4 --weak-verify $(COMPILE)
$(PYTHON) test_pythonlib.py --ok-3.4 --syntax-verify $(COMPILE)
#: PyPy of some sort. E.g. [PyPy 5.0.1 with GCC 4.8.4]
# Skip for now

View File

@@ -2,22 +2,23 @@
""" Trivial helper program to bytecompile and run an uncompile
"""
import os, sys, py_compile
assert len(sys.argv) >= 2
version = sys.version[0:3]
if sys.argv[1] == '--run':
suffix = '_run'
if sys.argv[1] in ("--run", "-r"):
suffix = "_run"
py_source = sys.argv[2:]
else:
suffix = ''
suffix = ""
py_source = sys.argv[1:]
for path in py_source:
short = os.path.basename(path)
if hasattr(sys, 'pypy_version_info'):
cfile = "bytecode_pypy%s%s/%s" % (version, suffix, short) + 'c'
if hasattr(sys, "pypy_version_info"):
cfile = "bytecode_pypy%s%s/%s" % (version, suffix, short) + "c"
else:
cfile = "bytecode_%s%s/%s" % (version, suffix, short) + 'c'
cfile = "bytecode_%s%s/%s" % (version, suffix, short) + "c"
print("byte-compiling %s to %s" % (path, cfile))
py_compile.compile(path, cfile)
if isinstance(version, str) or version >= (2, 6, 0):
os.system("../bin/uncompyle6 -a -t %s" % cfile)
os.system("../bin/uncompyle6 -a -T %s" % cfile)

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -42,7 +42,7 @@ for VERSION in $PYVERSION ; do
echo Python Version $(pyenv local) > $LOGFILE
echo "" >> $LOGFILE
typeset -i ALL_FILES_STARTTIME=$(date +%s)
python ./test_pyenvlib.py --max ${MAX_TESTS} --weak-verify --$VERSION >>$LOGFILE 2>&1
python ./test_pyenvlib.py --max ${MAX_TESTS} --syntax-verify --$VERSION >>$LOGFILE 2>&1
rc=$?
echo Python Version $(pyenv local) >> $LOGFILE

View File

@@ -0,0 +1,19 @@
# Issue #284 in Python 2.6
# See https://github.com/rocky/python-uncompyle6/issues/284
# Decompilation failed when return was the last statetement
# in the while loop inside the if block
# This code is RUNNABLE!
def f1():
if True:
while True:
return 5
def f2():
if True:
while 1:
return 6
assert f1() == 5 and f2() == 6

View File

@@ -31,9 +31,28 @@ def test9(arg_1=55, *varargs: int, y=5, **kwargs):
def test10(args_1, b: 'annotating b', c: int) -> float:
return 5.4
class IOBase:
def test11(*, name):
return args, name
def test12(a, *args, name):
return a, args
pass
def test13(*args, name):
return args, name
def test14(*args, name: int=1, qname):
return args, name, qname
def test15(*args, name='S', fname, qname=4):
return args, name, fname, qname
# From 3.4 /asyncio/streams.py open_connection
_DEFAULT_LIMIT = 5
def test16(host=None, port=None, *,
loop=None, limit=_DEFAULT_LIMIT, **kwds):
return host, port, loop, limit, kwds
# Python 3.1 _pyio.py uses the -> "IOBase" annotation
def o(f, mode = "r", buffering = None) -> "IOBase":
return (f, mode, buffering)
@@ -109,6 +128,10 @@ def ann2(args_1, b: int = 5, **kwargs: float) -> float:
assert ann2.__annotations__['return'] == float
assert b == 5
class TestSignatureObject():
def test_signature_on_wkwonly(self):
def test(x:int=55, *args: (int, str), c='test', a:float, kwargs:str="S", **b: int) -> int:
pass
assert test1(1, 5) == (1, 5, 4, {})
assert test1(1, 5, 6, foo='bar') == (1, 5, 6, {'foo': 'bar'})
@@ -121,3 +144,9 @@ assert test6(2.3, 4, 5) == (2.3, 4, 5)
ann1(1, 'test', 5)
ann2(1)
### FIXME: fill in...
assert test12(1, 2, 3, name='hi') == (1, (2, 3)), "a, *args, name"
assert test13(1, 2, 3, name='hi') == ((1, 2, 3), 'hi'), "*args, name"
assert test16('localhost', loop=2, limit=3, a='b') == ('localhost', None, 2, 3, {'a': 'b'})

View File

@@ -0,0 +1,34 @@
# Testing "while 1" versus "while" handling with if/elif/else's
def while_test(a, b, c):
while a != 2:
if b:
a += 1
elif c:
c = 0
else:
break
return a, b, c
def while1_test(a, b, c):
while 1:
if a != 2:
if b:
a = 3
b = 0
elif c:
c = 0
else:
a += b + c
break
return a, b, c
assert while_test(2, 0, 0) == (2, 0, 0), "no while loops"
assert while_test(0, 1, 0) == (2, 1, 0), "two while loops of b branch"
assert while_test(0, 0, 0) == (0, 0, 0), "0 while loops, else branch"
# FIXME: put this in a timer, and try with a=2
assert while1_test(4, 1, 1) == (3, 0, 0), "three while1 loops"
assert while1_test(4, 0, 0) == (4, 0, 0), " one while1 loop"

View File

@@ -0,0 +1,17 @@
a = 5
x = [1, 2, 3]
i = [(a,), x]
j = [a, *x]
def f1(a):
return a[0], a[1]
def f2(b):
return len(b), b[0]+5, b[2]
def f3(x, y):
return [1, *x, y]
assert f1(i) == ((5,), x)
assert f2(j) == (4, 10, 2)
assert f3(x, a) == [1, 1, 2, 3, 5]

View File

@@ -1,5 +1,5 @@
# Adapted from Python 3.6 trace.py
# Bug was in handling BUID_TUPLE_UNPACK created via
# Bug was in handling BUILD_TUPLE_UNPACK created via
# *opts.arguments
import argparse
parser = argparse.ArgumentParser()
@@ -7,4 +7,4 @@ parser.add_argument('filename', nargs='?')
parser.add_argument('arguments', nargs=argparse.REMAINDER)
opts = parser.parse_args(["foo", "a", "b"])
argv = opts.filename, *opts.arguments
assert argv == ('foo', 'a', 'b')
assert argv == ('foo', 'a', 'b'), "Reconstruct tuple using '*' and BUILD_TUPLE_UNPACK"

View File

@@ -0,0 +1,8 @@
# Self-checking test.
# Bug was in if transform not inverting expression
# This file is RUNNABLE!
def test_assert2(c):
if c < 2:
raise SyntaxError('Oops')
test_assert2(5)

View File

@@ -8,4 +8,7 @@ def x(s):
if not k.startswith('_')
}
assert x((('_foo', None),)) == {}
# Yes, the print() is funny. This is
# to test though a 2-arg assert where
# the 2nd argument is not a string.
assert x((('_foo', None),)) == {}, print("See issue #162")

View File

@@ -11,6 +11,9 @@
def _walk_dir(dir, dfile, ddir=None):
yield from _walk_dir(dir, ddir=dfile)
def ybug(g):
yield from g
# From 3.5.1 _wakrefset.py
#
# 3.5:

View File

@@ -43,10 +43,10 @@ case $PYVERSION in
2.5)
SKIP_TESTS=(
[test_contextlib.py]=1 # Syntax error - look at
[test_dis.py]=1 # We change line numbers - duh!
[test_grammar.py]=1 # Too many stmts. Handle large stmts
[test_grp.py]=1 # Long test - might work Control flow?
[test_pdb.py]=1 # Line-number specific
[test_dis.py]=1 # We change line numbers - duh!
[test_grammar.py]=1 # Too many stmts. Handle large stmts
[test_grp.py]=1 # Long test - might work Control flow?
[test_pdb.py]=1 # Line-number specific
[test_pwd.py]=1 # Long test - might work? Control flow?
[test_queue.py]=1 # Control flow?
[test_re.py]=1 # Probably Control flow?
@@ -56,18 +56,46 @@ case $PYVERSION in
;;
2.6)
SKIP_TESTS=(
[test_aepack.py]=1
[test_aifc.py]=1
[test_array.py]=1
[test_audioop.py]=1
[test_base64.py]=1
[test_bigmem.py]=1
[test_binascii.py]=1
[test_builtin.py]=1
[test_bytes.py]=1
[test_class.py]=1
[test_codeccallbacks.py]=1
[test_codecencodings_cn.py]=1
[test_codecencodings_hk.py]=1
[test_codecencodings_jp.py]=1
[test_codecencodings_kr.py]=1
[test_codecencodings_tw.py]=1
[test_codecencodings_cn.py]=1
[test_codecmaps_hk.py]=1
[test_codecmaps_jp.py]=1
[test_codecmaps_kr.py]=1
[test_codecmaps_tw.py]=1
[test_codecs.py]=1
[test_compile.py]=1 # Intermittent - sometimes works and sometimes doesn't
[test_grammar.py]=1 # Need real flow control. "and" in side "or"
# "and" inside ifelse need to simulatenously work
[test_cookielib.py]=1
[test_copy.py]=1
[test_decimal.py]=1
[test_descr.py]=1 # Problem in pickle.py?
[test_exceptions.py]=1
[test_extcall.py]=1
[test_float.py]=1
[test_future4.py]=1
[test_generators.py]=1
[test_grp.py]=1 # Long test - might work Control flow?
[test_opcodes.py]=1
[test_pwd.py]=1 # Long test - might work? Control flow?
[test_re.py]=1 # Probably Control flow?
[test_queue.py]=1 # Control flow?
[test_strftime.py]=1
[test_trace.py]=1 # Line numbers are expected to be different
[test_zipfile64.py]=1 # Skip Long test
[test_zlib.py]=1 # Look at
[test_zlib.py]=1 # Takes too long to run (more than 3 minutes 39 seconds)
# .pyenv/versions/2.6.9/lib/python2.6/lib2to3/refactor.pyc
# .pyenv/versions/2.6.9/lib/python2.6/pyclbr.pyc
# .pyenv/versions/2.6.9/lib/python2.6/quopri.pyc -- look at ishex, is short
@@ -103,9 +131,11 @@ case $PYVERSION in
[test_httplib.py]=1 # Ok, but POWER has problems with this
[test_pdb.py]=1 # Ok, but POWER has problems with this
[test_capi.py]=1
[test_curses.py]=1 # Possibly fails on its own but not detected
[test_dis.py]=1 # We change line numbers - duh!
[test_doctest.py]=1 # Fails on its own
[test_exceptions.py]=1
[test_format.py]=1 # control flow. uncompyle2 does not have problems here
[test_generators.py]=1 # control flow. uncompyle2 has problem here too
[test_grammar.py]=1 # Too many stmts. Handle large stmts
@@ -113,6 +143,9 @@ case $PYVERSION in
[test_ioctl.py]=1 # Test takes too long to run
[test_itertools.py]=1 # Fix erroneous reduction to "conditional_true".
# See test/simple_source/bug27+/05_not_unconditional.py
[test_long.py]=1
[test_long_future.py]=1
[test_math.py]=1
[test_memoryio.py]=1 # FIX
[test_multiprocessing.py]=1 # On uncompyle2, taks 24 secs
[test_pep352.py]=1 # ?
@@ -122,9 +155,11 @@ case $PYVERSION in
[test_pty.py]=1
[test_queue.py]=1 # Control flow?
[test_re.py]=1 # Probably Control flow?
[test_runpy.py]=1 # Long and fails on its own
[test_select.py]=1 # Runs okay but takes 11 seconds
[test_socket.py]=1 # Runs ok but takes 22 seconds
[test_subprocess.py]=1 # Runs ok but takes 22 seconds
[test_sys_setprofile.py]=1
[test_sys_settrace.py]=1 # Line numbers are expected to be different
[test_strtod.py]=1 # FIX
[test_traceback.py]=1 # Line numbers change - duh.
@@ -188,6 +223,7 @@ fi
mkdir $TESTDIR || exit $?
cp -r ${PYENV_ROOT}/versions/${PYVERSION}.${MINOR}/lib/python${PYVERSION}/test $TESTDIR
cd $TESTDIR/test
pyenv local $FULLVERSION
export PYTHONPATH=$TESTDIR
# Run tests
@@ -204,10 +240,14 @@ else
fi
typeset -i ALL_FILES_STARTTIME=$(date +%s)
typeset -i skipped=0
for file in $files; do
# AIX bash doesn't grok [[ -v SKIP... ]]
[[ ${SKIP_TESTS[$file]} == 1 ]] && continue
if [[ ${SKIP_TESTS[$file]} == 1 ]] ; then
((skipped++))
continue
fi
# If the fails *before* decompiling, skip it!
typeset -i STARTTIME=$(date +%s)
@@ -241,7 +281,7 @@ for file in $files; do
fi
(( rc != 0 && allerrs++ ))
if (( STOP_ONERROR && rc )) ; then
echo "** Ran $i tests before failure **"
echo "** Ran $i tests before failure. Skipped $skipped test for known failures. **"
exit $allerrs
fi
done
@@ -251,5 +291,5 @@ typeset -i ALL_FILES_ENDTIME=$(date +%s)
printf "Ran $i unit-test files in "
displaytime $time_diff
echo "Skipped $skipped test for known failures."
exit $allerrs

View File

@@ -28,64 +28,77 @@ from fnmatch import fnmatch
from uncompyle6 import main, PYTHON3
import xdis.magics as magics
#----- configure this for your needs
# ----- configure this for your needs
python_versions = [v for v in magics.python_versions if
re.match('^[0-9.]+$', v)]
python_versions = [v for v in magics.python_versions if re.match("^[0-9.]+$", v)]
# FIXME: we should remove Python versions that we don't support.
# These include Jython, and Python bytecode changes pre release.
TEST_VERSIONS = (
'pypy3-2.4.0', 'pypy-2.6.1',
'pypy-5.0.1', 'pypy-5.3.1', 'pypy3.5-5.7.1-beta',
'pypy3.5-5.9.0', 'pypy3.5-6.0.0',
'native') + tuple(python_versions)
"pypy3-2.4.0",
"pypy-2.6.1",
"pypy-5.0.1",
"pypy-5.3.1",
"pypy3.5-5.7.1-beta",
"pypy3.5-5.9.0",
"pypy3.5-6.0.0",
"native",
) + tuple(python_versions)
target_base = '/tmp/py-dis/'
lib_prefix = os.path.join(os.environ['HOME'], '.pyenv/versions')
target_base = "/tmp/py-dis/"
lib_prefix = os.path.join(os.environ["HOME"], ".pyenv/versions")
PYC = ('*.pyc', )
PYO = ('*.pyo', )
PYOC = ('*.pyc', '*.pyo')
PYC = ("*.pyc",)
PYO = ("*.pyo",)
PYOC = ("*.pyc", "*.pyo")
#-----
# -----
test_options = {
# name: (src_basedir, pattern, output_base_suffix)
'test': ('./test', PYOC, 'test'),
'max=': 200,
}
"test": ("./test", PYOC, "test"),
"max=": 200,
}
for vers in TEST_VERSIONS:
if vers.startswith('pypy'):
if vers.startswith('pypy3.'):
if vers.startswith("pypy"):
if vers.startswith("pypy3."):
short_vers = vers[4:6]
else:
short_vers = vers[0:-2]
test_options[vers] = (os.path.join(lib_prefix, vers, 'lib_pypy'),
PYC, 'python-lib'+short_vers)
test_options[vers] = (
os.path.join(lib_prefix, vers, "lib_pypy"),
PYC,
"python-lib" + short_vers,
)
else:
if vers == 'native':
if vers == "native":
short_vers = os.path.basename(sys.path[-1])
test_options[vers] = (sys.path[-1],
PYC, short_vers)
test_options[vers] = (sys.path[-1], PYC, short_vers)
else:
short_vers = vers[:3]
test_options[vers] = (os.path.join(lib_prefix, vers, 'lib', 'python'+short_vers),
PYC, 'python-lib'+short_vers)
test_options[vers] = (
os.path.join(lib_prefix, vers, "lib", "python" + short_vers),
PYC,
"python-lib" + short_vers,
)
def do_tests(src_dir, patterns, target_dir, start_with=None,
do_verify=False, max_files=200):
def do_tests(
src_dir, patterns, target_dir, start_with=None, do_verify=False, max_files=200
):
def visitor(files, dirname, names):
files.extend(
[os.path.normpath(os.path.join(dirname, n))
for n in names
for pat in patterns
if fnmatch(n, pat)])
[
os.path.normpath(os.path.join(dirname, n))
for n in names
for pat in patterns
if fnmatch(n, pat)
]
)
files = []
cwd = os.getcwd()
@@ -93,10 +106,13 @@ def do_tests(src_dir, patterns, target_dir, start_with=None,
if PYTHON3:
for root, dirname, names in os.walk(os.curdir):
files.extend(
[os.path.normpath(os.path.join(root, n))
for n in names
for pat in patterns
if fnmatch(n, pat)])
[
os.path.normpath(os.path.join(root, n))
for n in names
for pat in patterns
if fnmatch(n, pat)
]
)
pass
pass
else:
@@ -108,26 +124,29 @@ def do_tests(src_dir, patterns, target_dir, start_with=None,
try:
start_with = files.index(start_with)
files = files[start_with:]
print('>>> starting with file', files[0])
print(">>> starting with file", files[0])
except ValueError:
pass
if len(files) > max_files:
files = [file for file in files if not 'site-packages' in file]
files = [file for file in files if not 'test' in file]
files = [file for file in files if not "site-packages" in file]
files = [file for file in files if not "test" in file]
if len(files) > max_files:
# print("Number of files %d - truncating to last 200" % len(files))
print("Number of files %d - truncating to first %s" %
(len(files), max_files))
print(
"Number of files %d - truncating to first %s" % (len(files), max_files)
)
files = files[:max_files]
print(time.ctime())
(tot_files, okay_files, failed_files,
verify_failed_files) = main.main(src_dir, target_dir, files, [], do_verify=do_verify)
(tot_files, okay_files, failed_files, verify_failed_files) = main.main(
src_dir, target_dir, files, [], do_verify=do_verify
)
print(time.ctime())
return verify_failed_files + failed_files
if __name__ == '__main__':
if __name__ == "__main__":
import getopt, sys
do_coverage = do_verify = False
@@ -136,38 +155,46 @@ if __name__ == '__main__':
test_options_keys = list(test_options.keys())
test_options_keys.sort()
opts, args = getopt.getopt(sys.argv[1:], '',
['start-with=', 'verify', 'verify-run', 'weak-verify',
'max=', 'coverage', 'all', ] \
+ test_options_keys )
vers = ''
opts, args = getopt.getopt(
sys.argv[1:],
"",
[
"start-with=",
"verify",
"verify-run",
"syntax-verify",
"max=",
"coverage",
"all",
]
+ test_options_keys,
)
vers = ""
for opt, val in opts:
if opt == '--verify':
do_verify = 'strong'
elif opt == '--weak-verify':
do_verify = 'weak'
elif opt == '--verify-run':
do_verify = 'verify-run'
elif opt == '--coverage':
if opt == "--verify":
do_verify = "strong"
elif opt == "--syntax-verify":
do_verify = "weak"
elif opt == "--verify-run":
do_verify = "verify-run"
elif opt == "--coverage":
do_coverage = True
elif opt == '--start-with':
elif opt == "--start-with":
start_with = val
elif opt[2:] in test_options_keys:
triple = test_options[opt[2:]]
vers = triple[-1]
test_dirs.append(triple)
elif opt == '--max':
test_options['max='] = int(val)
elif opt == '--all':
vers = 'all'
elif opt == "--max":
test_options["max="] = int(val)
elif opt == "--all":
vers = "all"
for val in test_options_keys:
test_dirs.append(test_options[val])
if do_coverage:
os.environ['SPARK_PARSER_COVERAGE'] = (
'/tmp/spark-grammar-%s.cover' % vers
)
os.environ["SPARK_PARSER_COVERAGE"] = "/tmp/spark-grammar-%s.cover" % vers
failed = 0
for src_dir, pattern, target_dir in test_dirs:
@@ -175,8 +202,14 @@ if __name__ == '__main__':
target_dir = os.path.join(target_base, target_dir)
if os.path.exists(target_dir):
shutil.rmtree(target_dir, ignore_errors=1)
failed += do_tests(src_dir, pattern, target_dir, start_with,
do_verify, test_options['max='])
failed += do_tests(
src_dir,
pattern,
target_dir,
start_with,
do_verify,
test_options["max="],
)
else:
print("### Path %s doesn't exist; skipping" % src_dir)
pass

View File

@@ -35,69 +35,89 @@ from uncompyle6 import PYTHON_VERSION
from uncompyle6.main import main
from fnmatch import fnmatch
def get_srcdir():
filename = os.path.normcase(os.path.dirname(__file__))
return os.path.realpath(filename)
src_dir = get_srcdir()
#----- configure this for your needs
# ----- configure this for your needs
lib_prefix = '/usr/lib'
#lib_prefix = [src_dir, '/usr/lib/', '/usr/local/lib/']
lib_prefix = "/usr/lib"
# lib_prefix = [src_dir, '/usr/lib/', '/usr/local/lib/']
target_base = tempfile.mkdtemp(prefix='py-dis-')
target_base = tempfile.mkdtemp(prefix="py-dis-")
PY = ('*.py', )
PYC = ('*.pyc', )
PYO = ('*.pyo', )
PYOC = ('*.pyc', '*.pyo')
PY = ("*.py",)
PYC = ("*.pyc",)
PYO = ("*.pyo",)
PYOC = ("*.pyc", "*.pyo")
test_options = {
# name: (src_basedir, pattern, output_base_suffix, python_version)
'test':
('test', PYC, 'test'),
'ok-2.6': (os.path.join(src_dir, 'ok_lib2.6'),
PYOC, 'ok-2.6', 2.6),
'ok-2.7': (os.path.join(src_dir, 'ok_lib2.7'),
PYOC, 'ok-2.7', 2.7),
'ok-3.2': (os.path.join(src_dir, 'ok_lib3.2'),
PYOC, 'ok-3.2', 3.2),
'base-2.7': (os.path.join(src_dir, 'base_tests', 'python2.7'),
PYOC, 'base_2.7', 2.7),
"test": ("test", PYC, "test"),
"ok-2.6": (os.path.join(src_dir, "ok_lib2.6"), PYOC, "ok-2.6", 2.6),
"ok-2.7": (os.path.join(src_dir, "ok_lib2.7"), PYOC, "ok-2.7", 2.7),
"ok-3.2": (os.path.join(src_dir, "ok_lib3.2"), PYOC, "ok-3.2", 3.2),
"base-2.7": (
os.path.join(src_dir, "base_tests", "python2.7"),
PYOC,
"base_2.7",
2.7,
),
}
for vers in (2.7, 3.4, 3.5, 3.6):
for vers in (2.7, 3.4, 3.5, 3.6):
pythonlib = "ok_lib%s" % vers
key = "ok-%s" % vers
test_options[key] = (os.path.join(src_dir, pythonlib), PYOC, key, vers)
pass
for vers in (1.3, 1.4, 1.5,
2.1, 2.2, 2.3, 2.4, 2.5, 2.6, 2.7,
3.0, 3.1, 3.2, 3.3,
3.4, 3.5, 3.6, 3.7, 3.8, 'pypy3.2', 'pypy2.7', 'pypy3.6'):
for vers in (
1.3,
1.4,
1.5,
2.1,
2.2,
2.3,
2.4,
2.5,
2.6,
2.7,
3.0,
3.1,
3.2,
3.3,
3.4,
3.5,
3.6,
3.7,
3.8,
"pypy3.2",
"pypy2.7",
"pypy3.6",
):
bytecode = "bytecode_%s" % vers
key = "bytecode-%s" % vers
test_options[key] = (bytecode, PYC, bytecode, vers)
test_options[key] = (bytecode, PYC, bytecode, vers)
bytecode = "bytecode_%s_run" % vers
key = "bytecode-%s-run" % vers
test_options[key] = (bytecode, PYC, bytecode, vers)
test_options[key] = (bytecode, PYC, bytecode, vers)
key = "%s" % vers
pythonlib = "python%s" % vers
if isinstance(vers, float) and vers >= 3.0:
pythonlib = os.path.join(pythonlib, '__pycache__')
test_options[key] = (os.path.join(lib_prefix, pythonlib), PYOC, pythonlib, vers)
pythonlib = os.path.join(pythonlib, "__pycache__")
test_options[key] = (os.path.join(lib_prefix, pythonlib), PYOC, pythonlib, vers)
# -----
#-----
def help():
print("""Usage-Examples:
print(
"""Usage-Examples:
# compile, decompyle and verify short tests for Python 2.7:
test_pythonlib.py --bytecode-2.7 --verify --compile
@@ -107,18 +127,21 @@ def help():
# decompile and verify known good python 2.7
test_pythonlib.py --ok-2.7 --verify
""")
"""
)
sys.exit(1)
def do_tests(src_dir, obj_patterns, target_dir, opts):
def file_matches(files, root, basenames, patterns):
files.extend(
[os.path.normpath(os.path.join(root, n))
for n in basenames
for pat in patterns
if fnmatch(n, pat)])
[
os.path.normpath(os.path.join(root, n))
for n in basenames
for pat in patterns
if fnmatch(n, pat)
]
)
files = []
# Change directories so use relative rather than
@@ -127,11 +150,14 @@ def do_tests(src_dir, obj_patterns, target_dir, opts):
cwd = os.getcwd()
os.chdir(src_dir)
if opts['do_compile']:
compiled_version = opts['compiled_version']
if opts["do_compile"]:
compiled_version = opts["compiled_version"]
if compiled_version and PYTHON_VERSION != compiled_version:
print("Not compiling: desired Python version is %s but we are running %s" %
(compiled_version, PYTHON_VERSION), file=sys.stderr)
print(
"Not compiling: desired Python version is %s but we are running %s"
% (compiled_version, PYTHON_VERSION),
file=sys.stderr,
)
else:
for root, dirs, basenames in os.walk(src_dir):
file_matches(files, root, basenames, PY)
@@ -143,34 +169,36 @@ def do_tests(src_dir, obj_patterns, target_dir, opts):
pass
pass
for root, dirs, basenames in os.walk('.'):
for root, dirs, basenames in os.walk("."):
# Turn root into a relative path
dirname = root[2:] # 2 = len('.') + 1
file_matches(files, dirname, basenames, obj_patterns)
if not files:
print("Didn't come up with any files to test! Try with --compile?",
file=sys.stderr)
print(
"Didn't come up with any files to test! Try with --compile?",
file=sys.stderr,
)
exit(1)
os.chdir(cwd)
files.sort()
if opts['start_with']:
if opts["start_with"]:
try:
start_with = files.index(opts['start_with'])
start_with = files.index(opts["start_with"])
files = files[start_with:]
print('>>> starting with file', files[0])
print(">>> starting with file", files[0])
except ValueError:
pass
print(time.ctime())
print('Source directory: ', src_dir)
print('Output directory: ', target_dir)
print("Source directory: ", src_dir)
print("Output directory: ", target_dir)
try:
_, _, failed_files, failed_verify = \
main(src_dir, target_dir, files, [],
do_verify=opts['do_verify'])
_, _, failed_files, failed_verify = main(
src_dir, target_dir, files, [], do_verify=opts["do_verify"]
)
if failed_files != 0:
sys.exit(2)
elif failed_verify != 0:
@@ -179,71 +207,81 @@ def do_tests(src_dir, obj_patterns, target_dir, opts):
except (KeyboardInterrupt, OSError):
print()
sys.exit(1)
if test_opts['rmtree']:
if test_opts["rmtree"]:
parent_dir = os.path.dirname(target_dir)
print("Everything good, removing %s" % parent_dir)
shutil.rmtree(parent_dir)
if __name__ == '__main__':
if __name__ == "__main__":
test_dirs = []
checked_dirs = []
start_with = None
test_options_keys = list(test_options.keys())
test_options_keys.sort()
opts, args = getopt.getopt(sys.argv[1:], '',
['start-with=', 'verify', 'verify-run',
'weak-verify', 'all',
'compile', 'coverage',
'no-rm'] \
+ test_options_keys )
if not opts: help()
opts, args = getopt.getopt(
sys.argv[1:],
"",
[
"start-with=",
"verify",
"verify-run",
"syntax-verify",
"all",
"compile",
"coverage",
"no-rm",
]
+ test_options_keys,
)
if not opts:
help()
test_opts = {
'do_compile': False,
'do_verify': False,
'start_with': None,
'rmtree' : True,
'coverage' : False
}
"do_compile": False,
"do_verify": False,
"start_with": None,
"rmtree": True,
"coverage": False,
}
for opt, val in opts:
if opt == '--verify':
test_opts['do_verify'] = 'strong'
elif opt == '--weak-verify':
test_opts['do_verify'] = 'weak'
elif opt == '--verify-run':
test_opts['do_verify'] = 'verify-run'
elif opt == '--compile':
test_opts['do_compile'] = True
elif opt == '--start-with':
test_opts['start_with'] = val
elif opt == '--no-rm':
test_opts['rmtree'] = False
if opt == "--verify":
test_opts["do_verify"] = "strong"
elif opt == "--syntax-verify":
test_opts["do_verify"] = "weak"
elif opt == "--verify-run":
test_opts["do_verify"] = "verify-run"
elif opt == "--compile":
test_opts["do_compile"] = True
elif opt == "--start-with":
test_opts["start_with"] = val
elif opt == "--no-rm":
test_opts["rmtree"] = False
elif opt[2:] in test_options_keys:
test_dirs.append(test_options[opt[2:]])
elif opt == '--all':
elif opt == "--all":
for val in test_options_keys:
test_dirs.append(test_options[val])
elif opt == '--coverage':
test_opts['coverage'] = True
elif opt == "--coverage":
test_opts["coverage"] = True
else:
help()
pass
pass
if test_opts['coverage']:
os.environ['SPARK_PARSER_COVERAGE'] = (
'/tmp/spark-grammar-python-lib%s.cover' % test_dirs[0][-1]
)
if test_opts["coverage"]:
os.environ["SPARK_PARSER_COVERAGE"] = (
"/tmp/spark-grammar-python-lib%s.cover" % test_dirs[0][-1]
)
last_compile_version = None
for src_dir, pattern, target_dir, compiled_version in test_dirs:
if os.path.isdir(src_dir):
checked_dirs.append([src_dir, pattern, target_dir])
else:
print("Can't find directory %s. Skipping" % src_dir,
file=sys.stderr)
print("Can't find directory %s. Skipping" % src_dir, file=sys.stderr)
continue
last_compile_version = compiled_version
pass
@@ -252,7 +290,7 @@ if __name__ == '__main__':
print("No directories found to check", file=sys.stderr)
sys.exit(1)
test_opts['compiled_version'] = last_compile_version
test_opts["compiled_version"] = last_compile_version
for src_dir, pattern, target_dir in checked_dirs:
target_dir = os.path.join(target_base, target_dir)

View File

@@ -51,14 +51,8 @@ import uncompyle6.semantics.fragments
# Export some functions
from uncompyle6.main import decompile_file
# For compatibility
uncompyle_file = decompile_file
# Convenience functions so you can say:
# from uncompyle6 import (code_deparse, deparse_code2str)
code_deparse = uncompyle6.semantics.pysource.code_deparse
deparse_code2str = uncompyle6.semantics.pysource.deparse_code2str
# This is deprecated:
deparse_code = uncompyle6.semantics.pysource.deparse_code
code_deparse = uncompyle6.semantics.pysource.code_deparse

View File

@@ -38,7 +38,7 @@ Options:
--fragments use fragments deparser
--verify compare generated source with input byte-code
--verify-run compile generated source, run it and check exit code
--weak-verify compile generated source
--syntax-verify compile generated source
--linemaps generated line number correspondencies between byte-code
and generated source output
--encoding <encoding>
@@ -46,10 +46,12 @@ Options:
--help show this message
Debugging Options:
--asm | -a include byte-code (disables --verify)
--grammar | -g show matching grammar
--tree | -t include syntax tree (disables --verify)
--tree++ add template rules to --tree when possible
--asm | -a include byte-code (disables --verify)
--grammar | -g show matching grammar
--tree={before|after}
-t {before|after} include syntax before (or after) tree transformation
(disables --verify)
--tree++ | -T add template rules to --tree=before when possible
Extensions of generated files:
'.pyc_dis' '.pyo_dis' successfully decompiled (and verified if --verify)
@@ -89,9 +91,9 @@ def main_bin():
try:
opts, pyc_paths = getopt.getopt(sys.argv[1:], 'hac:gtTdrVo:p:',
'help asm compile= grammar linemaps recurse '
'timestamp tree tree+ '
'timestamp tree= tree+ '
'fragments verify verify-run version '
'weak-verify '
'syntax-verify '
'showgrammar encoding='.split(' '))
except getopt.GetoptError as e:
print('%s: %s' % (os.path.basename(sys.argv[0]), e), file=sys.stderr)
@@ -107,7 +109,7 @@ def main_bin():
sys.exit(0)
elif opt == '--verify':
options['do_verify'] = 'strong'
elif opt == '--weak-verify':
elif opt == '--syntax-verify':
options['do_verify'] = 'weak'
elif opt == '--fragments':
options['do_fragments'] = True
@@ -119,10 +121,19 @@ def main_bin():
options['showasm'] = 'after'
options['do_verify'] = None
elif opt in ('--tree', '-t'):
options['showast'] = True
if 'showast' not in options:
options['showast'] = {}
if val == 'before':
options['showast'][val] = True
elif val == 'after':
options['showast'][val] = True
else:
options['showast']['before'] = True
options['do_verify'] = None
elif opt in ('--tree+', '-T'):
options['showast'] = 'Full'
if 'showast' not in options:
options['showast'] = {}
options['showast']['Full'] = True
options['do_verify'] = None
elif opt in ('--grammar', '-g'):
options['showgrammar'] = True

View File

@@ -45,10 +45,21 @@ def _get_outstream(outfile):
return open(outfile, mode='w', encoding='utf-8')
def decompile(
bytecode_version, co, out=None, showasm=None, showast=False,
timestamp=None, showgrammar=False, source_encoding=None, code_objects={},
source_size=None, is_pypy=None, magic_int=None,
mapstream=None, do_fragments=False):
bytecode_version,
co,
out=None,
showasm=None,
showast={},
timestamp=None,
showgrammar=False,
source_encoding=None,
code_objects={},
source_size=None,
is_pypy=None,
magic_int=None,
mapstream=None,
do_fragments=False,
):
"""
ingests and deparses a given code block 'co'
@@ -294,7 +305,7 @@ def main(in_base, out_base, compiled_files, source_files, outfile=None,
# failed_files += 1
# if current_outfile:
# outstream.close()
# os.rename(current_outfile, current_outfile + '_failed')
# os.rename(current_outfile, current_outfile + "_failed")
# else:
# sys.stderr.write("\n# %s" % sys.exc_info()[1])
# sys.stderr.write("\n# Can't uncompile %s\n" % infile)

View File

@@ -498,6 +498,7 @@ class PythonParser(GenericASTBuilder):
def p_expr(self, args):
'''
expr ::= _mklambda
expr ::= LOAD_CODE
expr ::= LOAD_FAST
expr ::= LOAD_NAME
expr ::= LOAD_CONST

View File

@@ -459,7 +459,7 @@ class Python2Parser(PythonParser):
if i > 0 and tokens[i-1] == 'LOAD_LAMBDA':
self.addRule('mklambda ::= %s LOAD_LAMBDA %s' %
('pos_arg ' * token.attr, opname), nop_func)
rule = 'mkfunc ::= %s LOAD_CONST %s' % ('expr ' * token.attr, opname)
rule = 'mkfunc ::= %s LOAD_CODE %s' % ('expr ' * token.attr, opname)
elif opname_base == 'MAKE_CLOSURE':
# FIXME: use add_unique_rules to tidy this up.
if i > 0 and tokens[i-1] == 'LOAD_LAMBDA':
@@ -474,7 +474,7 @@ class Python2Parser(PythonParser):
('expr ' * token.attr, opname))], customize)
pass
self.add_unique_rules([
('mkfunc ::= %s load_closure LOAD_CONST %s' %
('mkfunc ::= %s load_closure LOAD_CODE %s' %
('expr ' * token.attr, opname))], customize)
if self.version >= 2.7:

View File

@@ -102,6 +102,8 @@ class Python26Parser(Python2Parser):
def p_stmt26(self, args):
"""
stmt ::= ifelsestmtr
# We use filler as a placeholder to keep nonterminal positions
# the same across different grammars so that the same semantic actions
# can be used
@@ -144,6 +146,11 @@ class Python26Parser(Python2Parser):
whilestmt ::= SETUP_LOOP testexpr l_stmts_opt jb_cf_pop POP_BLOCK
whilestmt ::= SETUP_LOOP testexpr returns POP_BLOCK COME_FROM
# In the "whilestmt" below, there isn't a COME_FROM when the
# "while" is the last thing in the module or function.
whilestmt ::= SETUP_LOOP testexpr returns POP_TOP POP_BLOCK
whileelsestmt ::= SETUP_LOOP testexpr l_stmts_opt jb_pop POP_BLOCK
else_suitel COME_FROM
while1elsestmt ::= SETUP_LOOP l_stmts JUMP_BACK else_suitel COME_FROM
@@ -173,6 +180,9 @@ class Python26Parser(Python2Parser):
iflaststmt ::= testexpr_then c_stmts_opt JUMP_ABSOLUTE come_froms POP_TOP
iflaststmt ::= testexpr c_stmts_opt JUMP_ABSOLUTE come_froms POP_TOP
# "if"/"else" statement that ends in a RETURN
ifelsestmtr ::= testexpr_then return_if_stmts returns
testexpr_then ::= testtrue_then
testexpr_then ::= testfalse_then
testtrue_then ::= expr jmp_true_then
@@ -181,7 +191,11 @@ class Python26Parser(Python2Parser):
jmp_false_then ::= JUMP_IF_FALSE THEN POP_TOP
jmp_true_then ::= JUMP_IF_TRUE THEN POP_TOP
while1stmt ::= SETUP_LOOP returns COME_FROM
# In the "while1stmt" below, there sometimes isn't a
# "COME_FROM" when the "while1" is the last thing in the
# module or function.
while1stmt ::= SETUP_LOOP returns come_from_opt
for_block ::= returns _come_froms
"""
@@ -236,6 +250,9 @@ class Python26Parser(Python2Parser):
genexpr_func ::= setup_loop_lf FOR_ITER store comp_iter JUMP_ABSOLUTE come_froms
POP_TOP jb_pop jb_pb_come_from
genexpr_func ::= setup_loop_lf FOR_ITER store comp_iter JUMP_BACK come_froms
POP_TOP jb_pb_come_from
generator_exp ::= LOAD_GENEXPR MAKE_FUNCTION_0 expr GET_ITER CALL_FUNCTION_1 COME_FROM
list_if ::= list_if ::= expr jmp_false_then list_iter
'''

View File

@@ -127,6 +127,8 @@ class Python27Parser(Python2Parser):
def p_stmt27(self, args):
"""
stmt ::= ifelsestmtr
# assert condition
assert ::= assert_expr jmp_true LOAD_ASSERT RAISE_VARARGS_1
@@ -179,6 +181,9 @@ class Python27Parser(Python2Parser):
ifelsestmtl ::= testexpr c_stmts_opt JUMP_BACK else_suitel
ifelsestmtl ::= testexpr c_stmts_opt CONTINUE else_suitel
# "if"/"else" statement that ends in a RETURN
ifelsestmtr ::= testexpr return_if_stmts COME_FROM returns
# Common with 2.6
return_if_lambda ::= RETURN_END_IF_LAMBDA COME_FROM
stmt ::= if_expr_lambda

File diff suppressed because it is too large Load Diff

View File

@@ -75,7 +75,7 @@ class Python32Parser(Python3Parser):
args_pos, args_kw, annotate_args = token.attr
# Check that there are 2 annotated params?
rule = (('mkfunc_annotate ::= %s%sannotate_tuple '
'LOAD_CONST LOAD_CONST EXTENDED_ARG %s') %
'LOAD_CONST LOAD_CODE EXTENDED_ARG %s') %
(('pos_arg ' * (args_pos)),
('annotate_arg ' * (annotate_args-1)), opname))
self.add_unique_rule(rule, opname, token.attr, customize)

View File

@@ -32,6 +32,14 @@ class Python36Parser(Python35Parser):
def p_36misc(self, args):
"""sstmt ::= sstmt RETURN_LAST
# long except clauses in a loop can sometimes cause a JUMP_BACK to turn into a
# JUMP_FORWARD to a JUMP_BACK. And when this happens there is an additional
# ELSE added to the except_suite. With better flow control perhaps we can
# sort this out better.
except_suite ::= c_stmts_opt POP_EXCEPT jump_except ELSE
except_suite_finalize ::= SETUP_FINALLY c_stmts_opt except_var_finalize END_FINALLY
_jump ELSE
# 3.6 redoes how return_closure works. FIXME: Isolate to LOAD_CLOSURE
return_closure ::= LOAD_CLOSURE DUP_TOP STORE_NAME RETURN_VALUE RETURN_LAST

View File

@@ -79,6 +79,12 @@ class Python37Parser(Python36Parser):
attribute37 ::= expr LOAD_METHOD
expr ::= attribute37
# long except clauses in a loop can sometimes cause a JUMP_BACK to turn into a
# JUMP_FORWARD to a JUMP_BACK. And when this happens there is an additional
# ELSE added to the except_suite. With better flow control perhaps we can
# sort this out better.
except_suite ::= c_stmts_opt POP_EXCEPT jump_except ELSE
# FIXME: generalize and specialize
call ::= expr CALL_METHOD_0

View File

@@ -7,6 +7,10 @@ if PYTHON3:
intern = sys.intern
class SyntaxTree(spark_AST):
def __init__(self, *args, **kwargs):
super(SyntaxTree, self).__init__(*args, **kwargs)
self.transformed_by = None
def isNone(self):
"""An SyntaxTree None token. We can't use regular list comparisons
because SyntaxTree token offsets might be different"""
@@ -23,6 +27,11 @@ class SyntaxTree(spark_AST):
if len(self) > 1:
rv += " (%d)" % (len(self))
enumerate_children = True
if self.transformed_by is not None:
if self.transformed_by is True:
rv += " (transformed)"
else:
rv += " (transformed by %s)" % self.transformed_by
rv = indent + rv
indent += ' '
i = 0

View File

@@ -287,6 +287,8 @@ class Scanner2(Scanner):
op_name = 'LOAD_DICTCOMP'
elif const.co_name == '<setcomp>':
op_name = 'LOAD_SETCOMP'
else:
op_name = "LOAD_CODE"
# verify() uses 'pattr' for comparison, since 'attr'
# now holds Code(const) and thus can not be used
# for comparison (todo: think about changing this)

View File

@@ -173,6 +173,8 @@ class Scanner26(scan.Scanner2):
op_name = 'LOAD_DICTCOMP'
elif const.co_name == '<setcomp>':
op_name = 'LOAD_SETCOMP'
else:
op_name = "LOAD_CODE"
# verify uses 'pattr' for comparison, since 'attr'
# now holds Code(const) and thus can not be used
# for comparison (todo: think about changing this)

File diff suppressed because it is too large Load Diff

View File

@@ -21,19 +21,30 @@ from uncompyle6 import PYTHON3
if PYTHON3:
intern = sys.intern
class Token():
class Token:
"""
Class representing a byte-code instruction.
A byte-code token is equivalent to Python 3's dis.instruction or
the contents of one line as output by dis.dis().
"""
# FIXME: match Python 3.4's terms:
# linestart = starts_line
# attr = argval
# pattr = argrepr
def __init__(self, opname, attr=None, pattr=None, offset=-1,
linestart=None, op=None, has_arg=None, opc=None):
def __init__(
self,
opname,
attr=None,
pattr=None,
offset=-1,
linestart=None,
op=None,
has_arg=None,
opc=None,
):
self.kind = intern(opname)
self.has_arg = has_arg
self.attr = attr
@@ -46,6 +57,7 @@ class Token():
if opc is None:
from xdis.std import _std_api
self.opc = _std_api.opc
else:
self.opc = opc
@@ -58,10 +70,9 @@ class Token():
""" '==' on kind and "pattr" attributes.
It is okay if offsets and linestarts are different"""
if isinstance(o, Token):
return (
(self.kind == o.kind)
and ((self.pattr == o.pattr) or self.attr == o.attr)
)
return (self.kind == o.kind) and (
(self.pattr == o.pattr) or self.attr == o.attr
)
else:
# ?? do we need this?
return self.kind == o
@@ -80,56 +91,67 @@ class Token():
# ('%9s %-18s %r' % (self.offset, self.kind, pattr)))
def __str__(self):
return self.format(line_prefix='')
return self.format(line_prefix="")
def format(self, line_prefix=''):
prefix = ('\n%s%4d ' % (line_prefix, self.linestart)
if self.linestart else (' ' * (6 + len(line_prefix))))
offset_opname = '%6s %-17s' % (self.offset, self.kind)
def format(self, line_prefix=""):
prefix = (
"\n%s%4d " % (line_prefix, self.linestart)
if self.linestart
else (" " * (6 + len(line_prefix)))
)
offset_opname = "%6s %-17s" % (self.offset, self.kind)
if not self.has_arg:
return "%s%s" % (prefix, offset_opname)
argstr = "%6d " % self.attr if isinstance(self.attr, int) else (' '*7)
argstr = "%6d " % self.attr if isinstance(self.attr, int) else (" " * 7)
name = self.kind
if self.has_arg:
pattr = self.pattr
if self.opc:
if self.op in self.opc.JREL_OPS:
if not self.pattr.startswith('to '):
if not self.pattr.startswith("to "):
pattr = "to " + self.pattr
elif self.op in self.opc.JABS_OPS:
self.pattr = str(self.pattr)
if not self.pattr.startswith('to '):
if not self.pattr.startswith("to "):
pattr = "to " + str(self.pattr)
pass
elif self.op in self.opc.CONST_OPS:
if name == 'LOAD_STR':
if name == "LOAD_STR":
pattr = self.attr
elif name == 'LOAD_CODE':
return "%s%s%s %s" % (prefix, offset_opname, argstr, pattr)
elif name == "LOAD_CODE":
return "%s%s%s %s" % (prefix, offset_opname, argstr, pattr)
else:
return "%s%s %r" % (prefix, offset_opname, pattr)
return "%s%s %r" % (prefix, offset_opname, pattr)
elif self.op in self.opc.hascompare:
if isinstance(self.attr, int):
pattr = self.opc.cmp_op[self.attr]
return "%s%s%s %s" % (prefix, offset_opname, argstr, pattr)
return "%s%s%s %s" % (prefix, offset_opname, argstr, pattr)
elif self.op in self.opc.hasvargs:
return "%s%s%s" % (prefix, offset_opname, argstr)
return "%s%s%s" % (prefix, offset_opname, argstr)
elif name == 'LOAD_ASSERT':
return "%s%s %s" % (prefix, offset_opname, pattr)
elif self.op in self.opc.NAME_OPS:
if self.opc.version >= 3.0:
return "%s%s%s %s" % (prefix, offset_opname, argstr, self.attr)
elif name == 'EXTENDED_ARG':
return "%s%s%s 0x%x << %s = %s" % (prefix, offset_opname, argstr, self.attr,
self.opc.EXTENDED_ARG_SHIFT, pattr)
return "%s%s%s %s" % (prefix, offset_opname, argstr, self.attr)
elif name == "EXTENDED_ARG":
return "%s%s%s 0x%x << %s = %s" % (
prefix,
offset_opname,
argstr,
self.attr,
self.opc.EXTENDED_ARG_SHIFT,
pattr,
)
# And so on. See xdis/bytecode.py get_instructions_bytes
pass
elif re.search(r'_\d+$', self.kind):
return "%s%s%s" % (prefix, offset_opname, argstr)
elif re.search(r"_\d+$", self.kind):
return "%s%s%s" % (prefix, offset_opname, argstr)
else:
pattr = ''
return "%s%s%s %r" % (prefix, offset_opname, argstr, pattr)
pattr = ""
return "%s%s%s %r" % (prefix, offset_opname, argstr, pattr)
def __hash__(self):
return hash(self.kind)
@@ -137,4 +159,5 @@ class Token():
def __getitem__(self, i):
raise IndexError
NoneToken = Token('LOAD_CONST', offset=-1, attr=None, pattr=None)
NoneToken = Token("LOAD_CONST", offset=-1, attr=None, pattr=None)

View File

@@ -350,23 +350,34 @@ TABLE_DIRECT = {
'testtrue': ( 'not %p',
(0, PRECEDENCE['unary_not']) ),
# Generally the args here are 0: (some sort of) "testexpr",
# 1: (some sort of) "cstmts_opt",
# 2 or 3: "else_suite"
# But unfortunately there are irregularities, For example, 2.6- uses "testexpr_then"
# and sometimes "cstmts" instead of "cstmts_opt" happens.
# Down the line we might isolate these into version-specific rules.
'ifelsestmt': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 3 ),
'ifelsestmtc': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 3 ),
'ifelsestmtl': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 3 ),
'ifelsestmtr': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 2 ),
'ifelsestmtr2': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-\n\n', 0, 1, 3 ), # has COME_FROM in position 2
# "elif" forms are not generated by the parser but are created through tree
# transformations. See "n_ifelsestmt".
'ifelifstmt': ( '%|if %c:\n%+%c%-%c', 0, 1, 3 ),
'elifelifstmt': ( '%|elif %c:\n%+%c%-%c', 0, 1, 3 ),
'elifstmt': ( '%|elif %c:\n%+%c%-', 0, 1 ),
'elifelsestmt': ( '%|elif %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 3 ),
'ifelsestmtr': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 2 ),
'ifelsestmtr2': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-\n\n', 0, 1, 3 ), # has COME_FROM
'elifelsestmtr': ( '%|elif %c:\n%+%c%-%|else:\n%+%c%-\n\n', 0, 1, 2 ),
'elifelsestmtr2': ( '%|elif %c:\n%+%c%-%|else:\n%+%c%-\n\n', 0, 1, 3 ), # has COME_FROM
'elifelsestmtr2': ( '%|elif %c:\n%+%c%-%|else:\n%+%c%-\n\n', 0, 1, 3 ), # has COME_FROM in position 2
'whileTruestmt': ( '%|while True:\n%+%c%-\n\n', 1 ),
'whilestmt': ( '%|while %c:\n%+%c%-\n\n', 1, 2 ),
'while1stmt': ( '%|while 1:\n%+%c%-\n\n', 1 ),
'while1elsestmt': ( '%|while 1:\n%+%c%-%|else:\n%+%c%-\n\n', 1, -2 ),
'whileelsestmt': ( '%|while %c:\n%+%c%-%|else:\n%+%c%-\n\n', 1, 2, -2 ),
'whileelsestmt2': ( '%|while %c:\n%+%c%-%|else:\n%+%c%-\n\n', 1, 2, -3 ),
'whileelselaststmt': ( '%|while %c:\n%+%c%-%|else:\n%+%c%-', 1, 2, -2 ),
# Note: Python 3.8+ changes this
@@ -399,6 +410,8 @@ TABLE_DIRECT = {
'tryfinallystmt': ( '%|try:\n%+%c%-%|finally:\n%+%c%-\n\n', 1, 5 ),
'except': ( '%|except:\n%+%c%-', 3 ),
'except_cond1': ( '%|except %c:\n', 1 ),
'except_cond2': ( '%|except %c as %c:\n',
(1, 'expr'), (5, 'store') ),
'except_suite': ( '%+%c%-%C', 0, (1, maxint, '') ),
# In Python 3.6, this is more complicated in the presence of "returns"

View File

@@ -16,12 +16,12 @@
"""Isolate Python version-specific semantic actions here.
"""
from uncompyle6.semantics.consts import (
TABLE_R, TABLE_DIRECT)
from uncompyle6.semantics.consts import PRECEDENCE, TABLE_R, TABLE_DIRECT
from uncompyle6.parsers.treenode import SyntaxTree
from uncompyle6.scanners.tok import Token
def customize_for_version(self, is_pypy, version):
if is_pypy:
########################
@@ -40,94 +40,130 @@ def customize_for_version(self, is_pypy, version):
# Without PyPy
#######################
TABLE_DIRECT.update({
'assert': ( '%|assert %c\n' , 0 ),
'assert2': ( '%|assert %c, %c\n' , 0, 3 ),
'try_except': ( '%|try:\n%+%c%-%c\n\n', 1, 3 ),
'assign2': ( '%|%c, %c = %c, %c\n',
3, 4, 0, 1 ),
'assign3': ( '%|%c, %c, %c = %c, %c, %c\n',
5, 6, 7, 0, 1, 2 ),
})
if version >= 3.0:
"assert": ("%|assert %c\n", (0, "assert_expr")),
"assert2": ("%|assert %c, %c\n", (0, "assert_expr"), 3),
# Created only via transformation
"assertnot": ("%|assert not %p\n", (0, PRECEDENCE['unary_not'])),
"assert2not": ( "%|assert not %p, %c\n" ,
(0, PRECEDENCE['unary_not']), 3 ),
"assign2": ("%|%c, %c = %c, %c\n", 3, 4, 0, 1),
"assign3": ("%|%c, %c, %c = %c, %c, %c\n", 5, 6, 7, 0, 1, 2),
"try_except": ("%|try:\n%+%c%-%c\n\n", 1, 3),
})
if version >= 3.0:
if version >= 3.2:
TABLE_DIRECT.update({
'del_deref_stmt': ( '%|del %c\n', 0),
'DELETE_DEREF': ( '%{pattr}', 0 ),
})
TABLE_DIRECT.update(
{"del_deref_stmt": ("%|del %c\n", 0), "DELETE_DEREF": ("%{pattr}", 0)}
)
from uncompyle6.semantics.customize3 import customize_for_version3
customize_for_version3(self, version)
else: # < 3.0
TABLE_DIRECT.update(
{"except_cond3": ("%|except %c, %c:\n", (1, "expr"), (-2, "store"))}
)
if 2.4 <= version <= 2.6:
TABLE_DIRECT.update({
'comp_for': ( ' for %c in %c', 3, 1 ),
})
TABLE_DIRECT.update({"comp_for": (" for %c in %c", 3, 1)})
else:
TABLE_DIRECT.update({
'comp_for': ( ' for %c in %c%c', 2, 0, 3 ),
})
TABLE_DIRECT.update({"comp_for": (" for %c in %c%c", 2, 0, 3)})
if version >= 2.5:
from uncompyle6.semantics.customize25 import customize_for_version25
customize_for_version25(self, version)
if version >= 2.6:
from uncompyle6.semantics.customize26_27 import customize_for_version26_27
from uncompyle6.semantics.customize26_27 import (
customize_for_version26_27,
)
customize_for_version26_27(self, version)
pass
else: # < 2.5
global NAME_MODULE
NAME_MODULE = SyntaxTree('stmt',
[ SyntaxTree('assign',
[ SyntaxTree('expr',
[Token('LOAD_GLOBAL', pattr='__name__',
offset=0, has_arg=True)]),
SyntaxTree('store',
[ Token('STORE_NAME', pattr='__module__',
offset=3, has_arg=True)])
])])
TABLE_DIRECT.update({
'importmultiple': ( '%|import %c%c\n', 2, 3),
'import_cont' : ( ', %c', 2),
'tryfinallystmt': ( '%|try:\n%+%c%-%|finally:\n%+%c%-',
(1, 'suite_stmts_opt') ,
(5, 'suite_stmts_opt') )
})
NAME_MODULE = SyntaxTree(
"stmt",
[
SyntaxTree(
"assign",
[
SyntaxTree(
"expr",
[
Token(
"LOAD_GLOBAL",
pattr="__name__",
offset=0,
has_arg=True,
)
],
),
SyntaxTree(
"store",
[
Token(
"STORE_NAME",
pattr="__module__",
offset=3,
has_arg=True,
)
],
),
],
)
],
)
TABLE_DIRECT.update(
{
"importmultiple": ("%|import %c%c\n", 2, 3),
"import_cont": (", %c", 2),
"tryfinallystmt": (
"%|try:\n%+%c%-%|finally:\n%+%c%-",
(1, "suite_stmts_opt"),
(5, "suite_stmts_opt"),
),
}
)
if version == 2.4:
def n_iftrue_stmt24(node):
self.template_engine(('%c', 0), node)
self.template_engine(("%c", 0), node)
self.default(node)
self.prune()
self.n_iftrue_stmt24 = n_iftrue_stmt24
else: # version <= 2.3:
TABLE_DIRECT.update({
'if1_stmt': ( '%|if 1\n%+%c%-', 5 )
})
TABLE_DIRECT.update({"if1_stmt": ("%|if 1\n%+%c%-", 5)})
if version <= 2.1:
TABLE_DIRECT.update({
'importmultiple': ( '%c', 2 ),
# FIXME: not quite right. We have indiividual imports
# when there is in fact one: "import a, b, ..."
'imports_cont': ( '%C%,', (1, 100, '\n') ),
})
TABLE_DIRECT.update(
{
"importmultiple": ("%c", 2),
# FIXME: not quite right. We have indiividual imports
# when there is in fact one: "import a, b, ..."
"imports_cont": ("%C%,", (1, 100, "\n")),
}
)
pass
pass
pass # < 2.5
pass # < 2.5
# < 3.0 continues
TABLE_R.update({
'STORE_SLICE+0': ( '%c[:]', 0 ),
'STORE_SLICE+1': ( '%c[%p:]', 0, (1, -1) ),
'STORE_SLICE+2': ( '%c[:%p]', 0, (1, -1) ),
'STORE_SLICE+3': ( '%c[%p:%p]', 0, (1, -1), (2, -1) ),
'DELETE_SLICE+0': ( '%|del %c[:]\n', 0 ),
'DELETE_SLICE+1': ( '%|del %c[%c:]\n', 0, 1 ),
'DELETE_SLICE+2': ( '%|del %c[:%c]\n', 0, 1 ),
'DELETE_SLICE+3': ( '%|del %c[%c:%c]\n', 0, 1, 2 ),
})
TABLE_DIRECT.update({
'raise_stmt2': ( '%|raise %c, %c\n', 0, 1),
})
TABLE_R.update(
{
"STORE_SLICE+0": ("%c[:]", 0),
"STORE_SLICE+1": ("%c[%p:]", 0, (1, -1)),
"STORE_SLICE+2": ("%c[:%p]", 0, (1, -1)),
"STORE_SLICE+3": ("%c[%p:%p]", 0, (1, -1), (2, -1)),
"DELETE_SLICE+0": ("%|del %c[:]\n", 0),
"DELETE_SLICE+1": ("%|del %c[%c:]\n", 0, 1),
"DELETE_SLICE+2": ("%|del %c[:%c]\n", 0, 1),
"DELETE_SLICE+3": ("%|del %c[%c:%c]\n", 0, 1, 2),
}
)
TABLE_DIRECT.update({"raise_stmt2": ("%|raise %c, %c\n", 0, 1)})
# exec as a built-in statement is only in Python 2.x
def n_exec_stmt(node):
@@ -135,17 +171,19 @@ def customize_for_version(self, is_pypy, version):
exec_stmt ::= expr exprlist DUP_TOP EXEC_STMT
exec_stmt ::= expr exprlist EXEC_STMT
"""
self.write(self.indent, 'exec ')
self.write(self.indent, "exec ")
self.preorder(node[0])
if not node[1][0].isNone():
sep = ' in '
sep = " in "
for subnode in node[1]:
self.write(sep); sep = ", "
self.write(sep)
sep = ", "
self.preorder(subnode)
self.println()
self.prune() # stop recursing
self.prune() # stop recursing
self.n_exec_smt = n_exec_stmt
pass # < 3.0
pass # < 3.0
return

View File

@@ -26,8 +26,6 @@ def customize_for_version25(self, version):
# Import style for 2.5+
########################
TABLE_DIRECT.update({
'except_cond3' : ( '%|except %c, %c:\n',
(1, 'expr'), (-2, 'store') ),
'importmultiple': ( '%|import %c%c\n', 2, 3 ),
'import_cont' : ( ', %c', 2 ),
# With/as is allowed as "from future" thing in 2.5

View File

@@ -26,27 +26,30 @@ from uncompyle6.semantics.customize36 import customize_for_version36
from uncompyle6.semantics.customize37 import customize_for_version37
from uncompyle6.semantics.customize38 import customize_for_version38
def customize_for_version3(self, version):
TABLE_DIRECT.update({
'comp_for' : ( ' for %c in %c',
(2, 'store') , (0, 'expr') ),
'conditionalnot' : ( '%c if not %c else %c',
(2, 'expr') , (0, 'expr'), (4, 'expr') ),
'except_cond2' : ( '%|except %c as %c:\n', 1, 5 ),
'function_def_annotate': ( '\n\n%|def %c%c\n', -1, 0),
# When a generator is a single parameter of a function,
# it doesn't need the surrounding parenethesis.
'call_generator' : ('%c%P', 0, (1, -1, ', ', 100)),
'importmultiple' : ( '%|import %c%c\n', 2, 3 ),
'import_cont' : ( ', %c', 2 ),
'kwarg' : ( '%[0]{attr}=%c', 1),
'raise_stmt2' : ( '%|raise %c from %c\n', 0, 1),
'store_locals' : ( '%|# inspect.currentframe().f_locals = __locals__\n', ),
'withstmt' : ( '%|with %c:\n%+%c%-', 0, 3),
'withasstmt' : ( '%|with %c as (%c):\n%+%c%-', 0, 2, 3),
})
TABLE_DIRECT.update(
{
"comp_for": (" for %c in %c", (2, "store"), (0, "expr")),
"conditionalnot": (
"%c if not %c else %c",
(2, "expr"),
(0, "expr"),
(4, "expr"),
),
"function_def_annotate": ("\n\n%|def %c%c\n", -1, 0),
# When a generator is a single parameter of a function,
# it doesn't need the surrounding parenethesis.
"call_generator": ("%c%P", 0, (1, -1, ", ", 100)),
"importmultiple": ("%|import %c%c\n", 2, 3),
"import_cont": (", %c", 2),
"kwarg": ("%[0]{attr}=%c", 1),
"raise_stmt2": ("%|raise %c from %c\n", 0, 1),
"store_locals": ("%|# inspect.currentframe().f_locals = __locals__\n",),
"withstmt": ("%|with %c:\n%+%c%-", 0, 3),
"withasstmt": ("%|with %c as (%c):\n%+%c%-", 0, 2, 3),
}
)
assert version >= 3.0
@@ -61,7 +64,7 @@ def customize_for_version3(self, version):
# ----------
# * subclass_code - the code for the subclass body
subclass_info = None
if node == 'classdefdeco2':
if node == "classdefdeco2":
if self.version >= 3.6:
class_name = node[1][1].attr
elif self.version <= 3.3:
@@ -72,17 +75,17 @@ def customize_for_version3(self, version):
else:
build_class = node[0]
if self.version >= 3.6:
if build_class == 'build_class_kw':
if build_class == "build_class_kw":
mkfunc = build_class[1]
assert mkfunc == 'mkfunc'
assert mkfunc == "mkfunc"
subclass_info = build_class
if hasattr(mkfunc[0], 'attr') and iscode(mkfunc[0].attr):
if hasattr(mkfunc[0], "attr") and iscode(mkfunc[0].attr):
subclass_code = mkfunc[0].attr
else:
assert mkfunc[0] == 'load_closure'
assert mkfunc[0] == "load_closure"
subclass_code = mkfunc[1].attr
assert iscode(subclass_code)
if build_class[1][0] == 'load_closure':
if build_class[1][0] == "load_closure":
code_node = build_class[1][1]
else:
code_node = build_class[1][0]
@@ -91,72 +94,72 @@ def customize_for_version3(self, version):
class_name = node[1][0].attr
build_class = node[0]
assert 'mkfunc' == build_class[1]
assert "mkfunc" == build_class[1]
mkfunc = build_class[1]
if mkfunc[0] in ('kwargs', 'no_kwargs'):
if mkfunc[0] in ("kwargs", "no_kwargs"):
if 3.0 <= self.version <= 3.2:
for n in mkfunc:
if hasattr(n, 'attr') and iscode(n.attr):
if hasattr(n, "attr") and iscode(n.attr):
subclass_code = n.attr
break
elif n == 'expr':
elif n == "expr":
subclass_code = n[0].attr
pass
pass
else:
for n in mkfunc:
if hasattr(n, 'attr') and iscode(n.attr):
if hasattr(n, "attr") and iscode(n.attr):
subclass_code = n.attr
break
pass
pass
if node == 'classdefdeco2':
if node == "classdefdeco2":
subclass_info = node
else:
subclass_info = node[0]
elif build_class[1][0] == 'load_closure':
elif build_class[1][0] == "load_closure":
# Python 3 with closures not functions
load_closure = build_class[1]
if hasattr(load_closure[-3], 'attr'):
if hasattr(load_closure[-3], "attr"):
# Python 3.3 classes with closures work like this.
# Note have to test before 3.2 case because
# index -2 also has an attr.
subclass_code = load_closure[-3].attr
elif hasattr(load_closure[-2], 'attr'):
elif hasattr(load_closure[-2], "attr"):
# Python 3.2 works like this
subclass_code = load_closure[-2].attr
else:
raise 'Internal Error n_classdef: cannot find class body'
if hasattr(build_class[3], '__len__'):
raise "Internal Error n_classdef: cannot find class body"
if hasattr(build_class[3], "__len__"):
if not subclass_info:
subclass_info = build_class[3]
elif hasattr(build_class[2], '__len__'):
elif hasattr(build_class[2], "__len__"):
subclass_info = build_class[2]
else:
raise 'Internal Error n_classdef: cannot superclass name'
elif self.version >= 3.6 and node == 'classdefdeco2':
raise "Internal Error n_classdef: cannot superclass name"
elif self.version >= 3.6 and node == "classdefdeco2":
subclass_info = node
subclass_code = build_class[1][0].attr
elif not subclass_info:
if mkfunc[0] in ('no_kwargs', 'kwargs'):
if mkfunc[0] in ("no_kwargs", "kwargs"):
subclass_code = mkfunc[1].attr
else:
subclass_code = mkfunc[0].attr
if node == 'classdefdeco2':
if node == "classdefdeco2":
subclass_info = node
else:
subclass_info = node[0]
if (node == 'classdefdeco2'):
self.write('\n')
if node == "classdefdeco2":
self.write("\n")
else:
self.write('\n\n')
self.write("\n\n")
self.currentclass = str(class_name)
self.write(self.indent, 'class ', self.currentclass)
self.write(self.indent, "class ", self.currentclass)
self.print_super_classes3(subclass_info)
self.println(':')
self.println(":")
# class body
self.indent_more()
@@ -165,11 +168,12 @@ def customize_for_version3(self, version):
self.currentclass = cclass
if len(self.param_stack) > 1:
self.write('\n\n')
self.write("\n\n")
else:
self.write('\n\n\n')
self.write("\n\n\n")
self.prune()
self.n_classdef3 = n_classdef3
if version == 3.0:
@@ -178,28 +182,28 @@ def customize_for_version3(self, version):
# since we pick up the iteration variable some other way and
# we definitely don't include in the source _[dd].
def n_comp_iter(node):
if node[0] == 'expr':
if node[0] == "expr":
n = node[0][0]
if (n == 'LOAD_FAST' and
n.pattr[0:2] == '_['):
if n == "LOAD_FAST" and n.pattr[0:2] == "_[":
self.prune()
pass
pass
# Not this special case, procede as normal...
# Not this special case, proceed as normal...
self.default(node)
self.n_comp_iter = n_comp_iter
if version >= 3.3:
elif version == 3.3:
# FIXME: perhaps this can be folded into the 3.4+ case?
def n_yield_from(node):
self.write('yield from')
self.write(' ')
if 3.3 <= self.version <= 3.4:
self.preorder(node[0][0][0][0])
elif self.version >= 3.5:
self.preorder(node[0])
else:
assert False, "dunno about this python version"
self.prune() # stop recursing
assert node[0] == "expr"
assert node[0][0] == "get_iter"
# Skip over yield_from.expr.get_iter which adds an
# extra iter(). Maybe we can do in tranformation phase instead?
template = ("yield from %c", (0, "expr"))
self.template_engine(template, node[0][0])
self.prune()
self.n_yield_from = n_yield_from
if 3.2 <= version <= 3.4:
@@ -211,11 +215,11 @@ def customize_for_version3(self, version):
for i in mapping[1:]:
key = key[i]
pass
if key.kind.startswith('CALL_FUNCTION_VAR_KW'):
if key.kind.startswith("CALL_FUNCTION_VAR_KW"):
# We may want to fill this in...
# But it is distinct from CALL_FUNCTION_VAR below
pass
elif key.kind.startswith('CALL_FUNCTION_VAR'):
elif key.kind.startswith("CALL_FUNCTION_VAR"):
# CALL_FUNCTION_VAR's top element of the stack contains
# the variable argument list, then comes
# annotation args, then keyword args.
@@ -229,12 +233,15 @@ def customize_for_version3(self, version):
# kwargs == 0 is handled by the table entry
# Should probably handle it here though.
if nargs == 0:
template = ('%c(*%c, %C)',
0, -2, (1, kwargs+1, ', '))
template = ("%c(*%c, %C)", 0, -2, (1, kwargs + 1, ", "))
else:
template = ('%c(%C, *%c, %C)',
0, (1, nargs+1, ', '),
-2, (-2-kwargs, -2, ', '))
template = (
"%c(%C, *%c, %C)",
0,
(1, nargs + 1, ", "),
-2,
(-2 - kwargs, -2, ", "),
)
self.template_engine(template, node)
self.prune()
else:
@@ -243,6 +250,7 @@ def customize_for_version3(self, version):
self.n_call = n_call
elif version < 3.2:
def n_call(node):
mapping = self._get_mapping(node)
key = node
@@ -251,17 +259,23 @@ def customize_for_version3(self, version):
pass
gen_function_parens_adjust(key, node)
self.default(node)
self.n_call = n_call
def n_mkfunc_annotate(node):
if self.version >= 3.3 or node[-2] == 'kwargs':
# Handling EXTENDED_ARG before MAKE_FUNCTION ...
i = -1 if node[-2] == "EXTENDED_ARG" else 0
if self.version <= 3.2:
code = node[-2 + i]
elif self.version >= 3.3 or node[-2] == "kwargs":
# LOAD_CONST code object ..
# LOAD_CONST 'x0' if >= 3.3
# EXTENDED_ARG
# MAKE_FUNCTION ..
code = node[-4]
elif node[-3] == 'expr':
code = node[-3 + i]
elif node[-3] == "expr":
code = node[-3][0]
else:
# LOAD_CONST code object ..
@@ -269,42 +283,51 @@ def customize_for_version3(self, version):
code = node[-3]
self.indent_more()
for annotate_last in range(len(node)-1, -1, -1):
if node[annotate_last] == 'annotate_tuple':
for annotate_last in range(len(node) - 1, -1, -1):
if node[annotate_last] == "annotate_tuple":
break
# FIXME: the real situation is that when derived from
# function_def_annotate we the name has been filled in.
# But when derived from funcdefdeco it hasn't Would like a better
# way to distinquish.
if self.f.getvalue()[-4:] == 'def ':
if self.f.getvalue()[-4:] == "def ":
self.write(code.attr.co_name)
# FIXME: handle and pass full annotate args
make_function3_annotate(self, node, is_lambda=False,
code_node=code, annotate_last=annotate_last)
make_function3_annotate(
self, node, is_lambda=False, code_node=code, annotate_last=annotate_last
)
if len(self.param_stack) > 1:
self.write('\n\n')
self.write("\n\n")
else:
self.write('\n\n\n')
self.write("\n\n\n")
self.indent_less()
self.prune() # stop recursing
self.prune() # stop recursing
self.n_mkfunc_annotate = n_mkfunc_annotate
TABLE_DIRECT.update({
'tryelsestmtl3': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-',
(1, 'suite_stmts_opt'),
(3, 'except_handler'),
(5, 'else_suitel') ),
})
TABLE_DIRECT.update(
{
"tryelsestmtl3": (
"%|try:\n%+%c%-%c%|else:\n%+%c%-",
(1, "suite_stmts_opt"),
(3, "except_handler"),
(5, "else_suitel"),
)
}
)
if version >= 3.4:
#######################
# Python 3.4+ Changes #
#######################
TABLE_DIRECT.update({
'LOAD_CLASSDEREF': ( '%{pattr}', ),
})
TABLE_DIRECT.update(
{
"LOAD_CLASSDEREF": ("%{pattr}",),
"yield_from": ("yield from %c", (0, "expr")),
}
)
if version >= 3.5:
customize_for_version35(self, version)
if version >= 3.6:
@@ -314,8 +337,8 @@ def customize_for_version3(self, version):
if version >= 3.8:
customize_for_version38(self, version)
pass # version >= 3.8
pass # 3.7
pass # 3.6
pass # 3.5
pass # 3.4
pass # 3.7
pass # 3.6
pass # 3.5
pass # 3.4
return

View File

@@ -56,7 +56,65 @@ def customize_for_version35(self, version):
node.kind == 'async_call'
self.prune()
self.n_async_call = async_call
self.n_build_list_unpack = self.n_list
def n_build_list_unpack(node):
"""
prettyprint a list or tuple
"""
p = self.prec
self.prec = 100
lastnode = node.pop()
lastnodetype = lastnode.kind
# If this build list is inside a CALL_FUNCTION_VAR,
# then the first * has already been printed.
# Until I have a better way to check for CALL_FUNCTION_VAR,
# will assume that if the text ends in *.
last_was_star = self.f.getvalue().endswith("*")
if lastnodetype.startswith("BUILD_LIST"):
self.write("[")
endchar = "]"
flat_elems = flatten_list(node)
self.indent_more(INDENT_PER_LEVEL)
sep = ""
for elem in flat_elems:
if elem in ("ROT_THREE", "EXTENDED_ARG"):
continue
assert elem == "expr"
line_number = self.line_number
use_star = True
value = self.traverse(elem)
if value.startswith("("):
assert value.endswith(")")
use_star = False
value = value[1:-1].rstrip(" ") # Remove starting '(' and trailing ')' and additional spaces
if value == "":
pass
else:
if value.endswith(","): # if args has only one item
value = value[:-1]
if line_number != self.line_number:
sep += "\n" + self.indent + INDENT_PER_LEVEL[:-1]
else:
if sep != "":
sep += " "
if not last_was_star and use_star:
sep += "*"
pass
else:
last_was_star = False
self.write(sep, value)
sep = ","
self.write(endchar)
self.indent_less(INDENT_PER_LEVEL)
self.prec = p
self.prune()
return
self.n_build_list_unpack = n_build_list_unpack
def n_call(node):
mapping = self._get_mapping(node)

View File

@@ -1,4 +1,4 @@
# Copyright (c) 2015-2018 by Rocky Bernstein
# Copyright (c) 2015-2019 by Rocky Bernstein
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -1636,16 +1636,27 @@ class FragmentsWalker(pysource.SourceWalker, object):
finish = len(self.f.getvalue())
self.set_pos_info(node, start, finish)
arg += 1
elif typ == 'p':
elif typ == "p":
p = self.prec
(index, self.prec) = entry[arg]
tup = entry[arg]
assert isinstance(tup, tuple)
if len(tup) == 3:
(index, nonterm_name, self.prec) = tup
assert node[index] == nonterm_name, (
"at %s[%d], expected '%s' node; got '%s'"
% (node.kind, arg, nonterm_name, node[index].kind)
)
else:
assert len(tup) == 2
(index, self.prec) = entry[arg]
node[index].parent = node
start = len(self.f.getvalue())
self.preorder(node[index])
self.set_pos_info(node, start, len(self.f.getvalue()))
self.prec = p
arg += 1
elif typ == 'C':
elif typ == "C":
low, high, sep = entry[arg]
lastC = remaining = len(node[low:high])
start = len(self.f.getvalue())
@@ -1657,7 +1668,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
self.set_pos_info(node, start, len(self.f.getvalue()))
arg += 1
elif typ == 'D':
elif typ == "D":
low, high, sep = entry[arg]
lastC = remaining = len(node[low:high])
for subnode in node[low:high]:
@@ -1670,13 +1681,13 @@ class FragmentsWalker(pysource.SourceWalker, object):
pass
pass
arg += 1
elif typ == 'x':
elif typ == "x":
src, dest = entry[arg]
for d in dest:
self.set_pos_info_recurse(node[d], node[src].start, node[src].finish)
pass
arg += 1
elif typ == 'P':
elif typ == "P":
p = self.prec
low, high, sep, self.prec = entry[arg]
lastC = remaining = len(node[low:high])
@@ -1689,13 +1700,13 @@ class FragmentsWalker(pysource.SourceWalker, object):
self.prec = p
arg += 1
elif typ == '{':
elif typ == "{":
d = node.__dict__
expr = m.group('expr')
expr = m.group("expr")
# Line mapping stuff
if (hasattr(node, 'linestart') and node.linestart
and hasattr(node, 'current_line_number')):
if (hasattr(node, "linestart") and node.linestart
and hasattr(node, "current_line_number")):
self.source_linemap[self.current_line_number] = node.linestart
# Additional fragment-position stuff
try:
@@ -1766,7 +1777,7 @@ def code_deparse(co, out=StringIO(), version=None, is_pypy=None,
:param out: File like object to write the output to.
:param debug_opts: A dictionary with keys
'asm': value determines whether to show
mangled bytecode disdassembly
mangled bytecode disassembly
'ast': value determines whether to show
'grammar': boolean determining whether to show
grammar reduction rules.
@@ -1918,6 +1929,8 @@ def deparsed_find(tup, deparsed, code):
"""
nodeInfo = None
name, last_i = tup
if not hasattr(deparsed, "offsets"):
return None
if (name, last_i) in deparsed.offsets.keys():
nodeInfo = deparsed.offsets[name, last_i]
else:

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,249 @@
# Copyright (c) 2019 by Rocky Bernstein
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from uncompyle6.show import maybe_show_tree
from copy import copy
from spark_parser import GenericASTTraversal, GenericASTTraversalPruningException
from uncompyle6.parsers.treenode import SyntaxTree
from uncompyle6.scanners.tok import Token
from uncompyle6.semantics.consts import RETURN_NONE
def is_docstring(node):
try:
return node[0][0].kind == "assign" and node[0][0][1][0].pattr == "__doc__"
except:
return False
class TreeTransform(GenericASTTraversal, object):
def __init__(self, version, show_ast=None):
self.version = version
self.showast = show_ast
return
def maybe_show_tree(self, ast):
if isinstance(self.showast, dict) and self.showast:
maybe_show_tree(self, ast)
def preorder(self, node=None):
"""Walk the tree in roughly 'preorder' (a bit of a lie explained below).
For each node with typestring name *name* if the
node has a method called n_*name*, call that before walking
children.
In typical use a node with children can call "preorder" in any
order it wants which may skip children or order then in ways
other than first to last. In fact, this this happens. So in
this sense this function not strictly preorder.
"""
if node is None:
node = self.ast
try:
name = "n_" + self.typestring(node)
if hasattr(self, name):
func = getattr(self, name)
node = func(node)
except GenericASTTraversalPruningException:
return
for i, kid in enumerate(node):
node[i] = self.preorder(kid)
return node
def n_ifstmt(self, node):
"""Here we check if we can turn an `ifstmt` or 'iflaststmtl` into
some kind of `assert` statement"""
testexpr = node[0]
if testexpr.kind != "testexpr":
return node
if node.kind == "ifstmt":
ifstmts_jump = node[1]
if node[1] != "_ifstmts_jump":
return node
stmts = ifstmts_jump[0]
else:
# iflaststmtl works this way
stmts = node[1]
if stmts in ("c_stmts",) and len(stmts) == 1:
stmt = stmts[0]
raise_stmt = stmt[0]
if raise_stmt == "raise_stmt1" and len(testexpr[0]) == 2:
assert_expr = testexpr[0][0]
assert_expr.kind = "assert_expr"
jump_cond = testexpr[0][1]
expr = raise_stmt[0]
RAISE_VARARGS_1 = raise_stmt[1]
if expr[0] == "call":
# ifstmt
# 0. testexpr
# testtrue (2)
# 0. expr
# 1. _ifstmts_jump (2)
# 0. c_stmts
# stmt
# raise_stmt1 (2)
# 0. expr
# call (3)
# 1. RAISE_VARARGS_1
# becomes:
# assert2 ::= assert_expr jmp_true LOAD_ASSERT expr RAISE_VARARGS_1 COME_FROM
if jump_cond == "jmp_true":
kind = "assert2"
else:
assert jump_cond == "jmp_false"
kind = "assert2not"
call = expr[0]
LOAD_ASSERT = call[0]
expr = call[1][0]
node = SyntaxTree(
kind,
[assert_expr, jump_cond, LOAD_ASSERT, expr, RAISE_VARARGS_1]
)
else:
# ifstmt
# 0. testexpr (2)
# testtrue
# 0. expr
# 1. _ifstmts_jump (2)
# 0. c_stmts
# stmts
# raise_stmt1 (2)
# 0. expr
# LOAD_ASSERT
# 1. RAISE_VARARGS_1
# becomes:
# assert ::= assert_expr jmp_true LOAD_ASSERT RAISE_VARARGS_1 COME_FROM
if jump_cond == "jmp_true":
kind = "assert"
else:
assert jump_cond == "jmp_false"
kind = "assertnot"
LOAD_ASSERT = expr[0]
node = SyntaxTree(
kind,
[assert_expr, jump_cond, LOAD_ASSERT, RAISE_VARARGS_1]
)
node.transformed_by="n_ifstmt",
pass
pass
return node
n_iflaststmtl = n_ifstmt
# preprocess is used for handling chains of
# if elif elif
def n_ifelsestmt(self, node, preprocess=False):
"""
Here we turn:
if ...
else
if ..
into:
if ..
elif ...
[else ...]
where appropriate
"""
else_suite = node[3]
n = else_suite[0]
old_stmts = None
if len(n) == 1 == len(n[0]) and n[0] == "stmt":
n = n[0][0]
elif n[0].kind in ("lastc_stmt", "lastl_stmt"):
n = n[0]
if n[0].kind in (
"ifstmt",
"iflaststmt",
"iflaststmtl",
"ifelsestmtl",
"ifelsestmtc",
):
# This seems needed for Python 2.5-2.7
n = n[0]
pass
pass
elif len(n) > 1 and 1 == len(n[0]) and n[0] == "stmt" and n[1].kind == "stmt":
else_suite_stmts = n[0]
if else_suite_stmts[0].kind not in ("ifstmt", "iflaststmt", "ifelsestmtl"):
return node
old_stmts = n
n = else_suite_stmts[0]
else:
return node
if n.kind in ("ifstmt", "iflaststmt", "iflaststmtl"):
node.kind = "ifelifstmt"
n.kind = "elifstmt"
elif n.kind in ("ifelsestmtr",):
node.kind = "ifelifstmt"
n.kind = "elifelsestmtr"
elif n.kind in ("ifelsestmt", "ifelsestmtc", "ifelsestmtl"):
node.kind = "ifelifstmt"
self.n_ifelsestmt(n, preprocess=True)
if n == "ifelifstmt":
n.kind = "elifelifstmt"
elif n.kind in ("ifelsestmt", "ifelsestmtc", "ifelsestmtl"):
n.kind = "elifelsestmt"
if not preprocess:
if old_stmts:
if n.kind == "elifstmt":
trailing_else = SyntaxTree("stmts", old_stmts[1:])
# We use elifelsestmtr because it has 3 nodes
elifelse_stmt = SyntaxTree(
"elifelsestmtr", [n[0], n[1], trailing_else]
)
node[3] = elifelse_stmt
pass
else:
# Other cases for n.kind may happen here
pass
pass
node.transformed_by = "n_ifelsestmt"
return node
n_ifelsestmtc = n_ifelsestmtl = n_ifelsestmt
def traverse(self, node, is_lambda=False):
node = self.preorder(node)
return node
def transform(self, ast):
self.maybe_show_tree(ast)
self.ast = copy(ast)
self.ast = self.traverse(self.ast, is_lambda=False)
if self.ast[-1] == RETURN_NONE:
self.ast.pop() # remove last node
# todo: if empty, add 'pass'
return self.ast
# Write template_engine
# def template_engine

View File

@@ -26,10 +26,10 @@ def maybe_show_asm(showasm, tokens):
:param tokens: The asm tokens to show.
"""
if showasm:
stream = showasm if hasattr(showasm, 'write') else sys.stdout
stream = showasm if hasattr(showasm, "write") else sys.stdout
for t in tokens:
stream.write(str(t))
stream.write('\n')
stream.write("\n")
def maybe_show_tree(walker, ast):
@@ -43,15 +43,16 @@ def maybe_show_tree(walker, ast):
:param ast: The ast to show.
"""
if walker.showast:
if hasattr(walker.showast, 'write'):
if hasattr(walker.showast, "write"):
stream = walker.showast
else:
stream = sys.stdout
if walker.showast == 'Full':
if (isinstance(walker.showast, dict) and walker.showast.get("Full", False)
and hasattr(walker, "str_with_template")):
walker.str_with_template(ast)
else:
stream.write(str(ast))
stream.write('\n')
stream.write("\n")
def maybe_show_tree_param_default(show_tree, name, default):
@@ -68,11 +69,11 @@ def maybe_show_tree_param_default(show_tree, name, default):
:param default: The function parameter default.
"""
if show_tree:
stream = show_tree if hasattr(show_tree, 'write') else sys.stdout
stream.write('\n')
stream.write('--' + name)
stream.write('\n')
stream = show_tree if hasattr(show_tree, "write") else sys.stdout
stream.write("\n")
stream.write("--" + name)
stream.write("\n")
stream.write(str(default))
stream.write('\n')
stream.write('--')
stream.write('\n')
stream.write("\n")
stream.write("--")
stream.write("\n")

View File

@@ -12,4 +12,4 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# This file is suitable for sourcing inside bash as
# well as importing into Python
VERSION='3.3.4' # noqa
VERSION="3.4.1" # noqa