You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-04 01:09:52 +08:00
Compare commits
109 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
2717a5e302 | ||
|
77dd3b8d50 | ||
|
2cfc60fbd7 | ||
|
daa424cf0c | ||
|
7b68c9c838 | ||
|
df5df9364c | ||
|
f1496cad4d | ||
|
a3a15414d3 | ||
|
9874553fb4 | ||
|
d21d93fd84 | ||
|
dbf2729f76 | ||
|
047e27c966 | ||
|
6a81a752a7 | ||
|
44f0ba0efb | ||
|
bc8907e752 | ||
|
4e9d8783d1 | ||
|
47c847644e | ||
|
af2ed31871 | ||
|
49de5b5c9d | ||
|
c8252ca9cb | ||
|
0441fbc616 | ||
|
1b4335edf1 | ||
|
2e36551c02 | ||
|
fff6f82dd7 | ||
|
7c8f3cc9ec | ||
|
78a595c8cf | ||
|
cda0154594 | ||
|
d852f23962 | ||
|
065fd13b81 | ||
|
659f37585b | ||
|
bc18fcf7fa | ||
|
144f52da8e | ||
|
9f250b49ee | ||
|
4abdffecb9 | ||
|
1419acf019 | ||
|
bdc24d7f51 | ||
|
07ec8fa1fb | ||
|
04c2240d63 | ||
|
96dcdfd744 | ||
|
82ea77c592 | ||
|
4f0e580438 | ||
|
09cc0d775a | ||
|
2da5a5649f | ||
|
373916f57c | ||
|
1c0c54991e | ||
|
5de5d2357f | ||
|
6d296f11c9 | ||
|
0ae4acff9e | ||
|
40c2f2962c | ||
|
dae195e36e | ||
|
2c503d5a14 | ||
|
eed4c1025b | ||
|
86c1d12e69 | ||
|
61a367b0ae | ||
|
dba6d24361 | ||
|
0b111f1568 | ||
|
c666e2dc3d | ||
|
0a5fcc51d8 | ||
|
ade9f7a182 | ||
|
d41ef3e5dc | ||
|
ebb0342b38 | ||
|
f17ebf42a9 | ||
|
85dcce4ff2 | ||
|
bc9a16eaac | ||
|
d08d183fc8 | ||
|
0b3d6b8add | ||
|
5cb46c2ed3 | ||
|
163e47fb49 | ||
|
0cf32f1b70 | ||
|
f0f9676f52 | ||
|
be610aa6b3 | ||
|
1494bb2049 | ||
|
d62dc3daac | ||
|
5ad51707e3 | ||
|
f28c255804 | ||
|
315965300f | ||
|
9bd85fe5a0 | ||
|
c6e3168c31 | ||
|
7969b19c2b | ||
|
f8bfde4a8e | ||
|
e2b309fa30 | ||
|
1ebfde6927 | ||
|
73619de3f5 | ||
|
600cd0b8ee | ||
|
90a2ed2c9e | ||
|
8728cb6a99 | ||
|
8daedaf063 | ||
|
4a4a20995e | ||
|
c923ce9afe | ||
|
88901c6901 | ||
|
31f7d14eab | ||
|
388d1da970 | ||
|
66294d54f7 | ||
|
55783c2712 | ||
|
fd580f3c60 | ||
|
a781006ff1 | ||
|
0be3d5a530 | ||
|
bc3cd0102b | ||
|
6e6d590268 | ||
|
71bdc8dc6a | ||
|
71735ca7ef | ||
|
9f121ef00c | ||
|
2e01f42f65 | ||
|
3f9a862277 | ||
|
cc531cf90a | ||
|
c7124ad9ca | ||
|
44a4aab0a7 | ||
|
2a52982d52 | ||
|
56e5e8dcef |
@@ -1,11 +1,3 @@
|
||||
# This configuration was automatically generated from a CircleCI 1.0 config.
|
||||
# It should include any build commands you had along with commands that CircleCI
|
||||
# inferred from your project structure. We strongly recommend you read all the
|
||||
# comments in this file to understand the structure of CircleCI 2.0, as the idiom
|
||||
# for configuration has changed substantially in 2.0 to allow arbitrary jobs rather
|
||||
# than the prescribed lifecycle of 1.0. In general, we recommend using this generated
|
||||
# configuration as a reference rather than using it in production, though in most
|
||||
# cases it should duplicate the execution of your original 1.0 config.
|
||||
version: 2
|
||||
jobs:
|
||||
build:
|
||||
@@ -27,9 +19,9 @@ jobs:
|
||||
# VM instead of a container) see https://circleci.com/docs/2.0/executor-types/
|
||||
# To see the list of pre-built images that CircleCI provides for most common languages see
|
||||
# https://circleci.com/docs/2.0/circleci-images/
|
||||
docker:
|
||||
- image: circleci/build-image:ubuntu-14.04-XXL-upstart-1189-5614f37
|
||||
command: /sbin/init
|
||||
machine:
|
||||
python:
|
||||
version: 2.7.14
|
||||
steps:
|
||||
# Machine Setup
|
||||
# If you break your build into multiple jobs with workflows, you will probably want to do the parts of this that are relevant in each
|
||||
@@ -42,25 +34,24 @@ jobs:
|
||||
# This is based on your 1.0 configuration file or project settings
|
||||
- run:
|
||||
working_directory: ~/rocky/python-uncompyle6
|
||||
command: pyenv local 2.7.11 && pyenv rehash && pip install virtualenv && pip install nose && pip install pep8 && pip install six && pyenv rehash
|
||||
command: pip install virtualenv && pip install nose && pip install pep8 && pyenv rehash
|
||||
# Dependencies
|
||||
# This would typically go in either a build or a build-and-test job when using workflows
|
||||
# Restore the dependency cache
|
||||
- restore_cache:
|
||||
keys:
|
||||
# This branch if available
|
||||
- v1-dep-{{ .Branch }}-
|
||||
# Default branch if not
|
||||
- v1-dep-master-
|
||||
# Any branch if there are none on the default branch - this should be unnecessary if you have your default branch configured correctly
|
||||
- v1-dep-
|
||||
- v2-dependencies-{{ .Branch }}-
|
||||
# fallback to using the latest cache if no exact match is found
|
||||
- v2-dependencies-
|
||||
|
||||
# This is based on your 1.0 configuration file or project settings
|
||||
- run: pip install --upgrade setuptools
|
||||
- run: pip install -e .
|
||||
- run: pip install pytest==3.2.5 hypothesis==3.0.0
|
||||
- run: pip install -r requirements-dev.txt
|
||||
|
||||
# Save dependency cache
|
||||
- save_cache:
|
||||
key: v1-dep-{{ .Branch }}-{{ epoch }}
|
||||
key: v2-dependencies-{{ .Branch }}-{{ epoch }}
|
||||
paths:
|
||||
# This is a broad list of cache paths to include many possible development environments
|
||||
# You can probably delete some of these entries
|
||||
@@ -69,14 +60,13 @@ jobs:
|
||||
- ~/.m2
|
||||
- ~/.ivy2
|
||||
- ~/.bundle
|
||||
- ~/.go_workspace
|
||||
- ~/.gradle
|
||||
- ~/.cache/bower
|
||||
|
||||
# Test
|
||||
# This would typically be a build job when using workflows, possibly combined with build
|
||||
# This is based on your 1.0 configuration file or project settings
|
||||
- run: python ./setup.py develop && make check-2.7
|
||||
- run: cd ./test/stdlib && pyenv local 2.7.11 && bash ./runtests.sh 'test_[p-z]*.py'
|
||||
- run: cd ./test/stdlib && bash ./runtests.sh 'test_[p-z]*.py'
|
||||
# Teardown
|
||||
# If you break your build into multiple jobs with workflows, you will probably want to do the parts of this that are relevant in each
|
||||
# Save test results
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -6,6 +6,7 @@
|
||||
/.eggs
|
||||
/.hypothesis
|
||||
/.idea
|
||||
/.mypy_cache
|
||||
/.pytest_cache
|
||||
/.python-version
|
||||
/.tox
|
||||
|
@@ -5,6 +5,7 @@ python:
|
||||
- '2.7'
|
||||
- '3.4'
|
||||
- '3.6'
|
||||
- '3.8'
|
||||
|
||||
matrix:
|
||||
include:
|
||||
|
8
Makefile
8
Makefile
@@ -47,8 +47,12 @@ check-3.8:
|
||||
# Skip for now
|
||||
2.6 5.0 5.3 5.6 5.8:
|
||||
|
||||
#:PyPy pypy3-2.4.0 Python 3:
|
||||
pypy-3.2 2.4:
|
||||
#:PyPy pypy3-2.4.0 Python 3.6.1:
|
||||
7.1 pypy-3.2 2.4:
|
||||
$(MAKE) -C test $@
|
||||
|
||||
#:PyPy pypy3-2.4.0 Python 3.6.9:
|
||||
7.2:
|
||||
$(MAKE) -C test $@
|
||||
|
||||
#: Run py.test tests
|
||||
|
224
NEWS.md
224
NEWS.md
@@ -1,15 +1,65 @@
|
||||
3.5.1 2019-10-29 JNC
|
||||
====================
|
||||
|
||||
- Pypy 3.3, 3.5, 3.6, and 3.6.9 support
|
||||
- Improve 3.0 decompilation
|
||||
- no parse errors on stlib bytecode. However accurate translation in
|
||||
control-flow and and/or detection needs work
|
||||
- Remove extraneous iter() in "for" of list comprehension Fixes #272
|
||||
- "for" block without a POP_BLOCK and confusing JUMP_BACK for CONTINUE. Fixes #293
|
||||
- Fix unmarshal incompletness detected in Pypy 3.6
|
||||
- Miscellaneous bugs fixed
|
||||
|
||||
3.5.0 2019-10-12 Stony Brook Ride
|
||||
=================================
|
||||
|
||||
- Fix fragment bugs
|
||||
* missing RETURN_LAST introduced when adding transformation layer
|
||||
* more parent entries on tokens
|
||||
- Preliminary support for decompiling Python 1.0, 1.1. 1.2 and 1.6
|
||||
* Newer xdis version needed
|
||||
|
||||
3.4.1 2019-10-02
|
||||
================
|
||||
|
||||
- Correct assert{,2} transforms Fixes #289
|
||||
- Fragment parsing fixes:
|
||||
* Wasn't handling 3-arg %p
|
||||
* fielding error in code_deparse()
|
||||
- Use newer xdis to better track Python 3.8.0
|
||||
|
||||
|
||||
3.4.0 2019-08-24 Totoro
|
||||
=======================
|
||||
|
||||
The main change is to add a tree-transformation phase. This simplifies the
|
||||
code a little and allows us to turn `if ...: raise AssertionError` into
|
||||
`assert`, and many `if ..: else if ...` into `if ... elif ..`
|
||||
|
||||
Use options `--show=before` and `--show=after` to see the before the tree transformation phase and after the tree transformation phase.
|
||||
|
||||
Most of the heavy lifting for this was done by x0ret.
|
||||
|
||||
Other changes:
|
||||
|
||||
- Fix issue #275, #283 (process to fix this bug is documented on wiki), #284
|
||||
- blacken more code
|
||||
- CircleCI adjustments for a changing CircleCi
|
||||
- Require more recent `xdis` for Python 3.8
|
||||
- Fix bugs in code using `BUILD_LIST_UNPACK` and variants
|
||||
|
||||
3.3.5 2019-07-03 Pre Independence Day
|
||||
=====================================
|
||||
|
||||
Again, most of the work in this is release is thanks to x0ret.
|
||||
|
||||
- Handle annotation args in Python 3.x
|
||||
- Fix vararg and function signatures in 3.x
|
||||
- Some 3.x < 3.6 while(1)/if fixes - others remain
|
||||
- Start reinstating else if -> elif
|
||||
- LOAD_CONST -> LOAD_CODE where appropriate
|
||||
- option `weak-verify` is now `syntax-verify`
|
||||
- code cleanups, start using "blacken" to reformat text
|
||||
- Handle annotation arguments in Python 3.x
|
||||
- Fix _vararg_ and function signatures in 3.x
|
||||
- Some 3.x < 3.6 `while` (1)/`if` fixes — others remain
|
||||
- Start reinstating `else if` -> `elif`
|
||||
- `LOAD_CONST` -> `LOAD_CODE` where appropriate
|
||||
- option `--weak-verify` is now `--syntax-verify`
|
||||
- code cleanups, start using [black](https://github.com/python/black) to reformat text
|
||||
|
||||
|
||||
3.3.4 2019-06-19 Fleetwood at 65
|
||||
@@ -18,12 +68,12 @@ Again, most of the work in this is release is thanks to x0ret.
|
||||
Most of the work in this is release is thanks to x0ret.
|
||||
|
||||
- Major work was done by x0ret to correct function signatures and include annotation types
|
||||
- Handle Python 3.6 STORE_ANNOTATION [#58](https://github.com/rocky/python-uncompyle6/issues/58)
|
||||
- Handle Python 3.6 `STORE_ANNOTATION` [#58](https://github.com/rocky/python-uncompyle6/issues/58)
|
||||
- Friendlier assembly output
|
||||
- `LOAD_CONST` replaced by `LOAD_STR` where appropriate to simplify parsing and improve clarity
|
||||
- remove unneeded parenthesis in a generator expression when it is the single argument to the function [#247](https://github.com/rocky/python-uncompyle6/issues/246)
|
||||
- Bug in noting an async function [#246](https://github.com/rocky/python-uncompyle6/issues/246)
|
||||
- Handle unicode docstrings and fix docstring bugs [#241](https://github.com/rocky/python-uncompyle6/issues/241)
|
||||
- Handle Unicode docstrings and fix docstring bugs [#241](https://github.com/rocky/python-uncompyle6/issues/241)
|
||||
- Add short option -T as an alternate for --tree+
|
||||
- Some grammar cleanup
|
||||
|
||||
@@ -67,7 +117,7 @@ some span back as far as 2.x
|
||||
But as before, many more remain in the 3.7 and 3.8 range which will
|
||||
get addressed in future releases
|
||||
|
||||
Pypy 3.6 support was started. Pypy 3.x detection fixed (via xdis)
|
||||
Pypy 3.6 support was started. Pypy 3.x detection fixed (via `xdis`)
|
||||
|
||||
3.3.1 2019-04-19 Good Friday
|
||||
==========================
|
||||
@@ -151,7 +201,7 @@ Pull Requests
|
||||
|
||||
- Add rudimentary 1.4 support (still a bit buggy)
|
||||
- add --tree+ option to show formatting rule, when it is constant
|
||||
- Python 2.7.15candidate1 support (via xdis)
|
||||
- Python 2.7.15candidate1 support (via `xdis`)
|
||||
- bug fixes, especially for 3.7 (but 2.7 and 3.6 and others as well)
|
||||
|
||||
3.1.3 2018-04-16
|
||||
@@ -256,24 +306,24 @@ function calls and definitions.
|
||||
2.15.1 2018-01-27
|
||||
=====================
|
||||
|
||||
- Add --linemap option to give line correspondences
|
||||
- Add `--linemap` option to give line correspondences
|
||||
between original source lines and reconstructed line sources.
|
||||
It is far from perfect, but it is a start
|
||||
- Add a new class of tests: tests which when decompiled check themselves
|
||||
- Split off Python version semantic action customizations into its own file
|
||||
- Fix 2.7 bug in ifelse loop statement
|
||||
- Handle 3.6+ EXTENDED_ARGs for POP_JUMP_IF... instructions
|
||||
- Correct 3.6+ calls with kwargs
|
||||
- Fix 2.7 bug in `if`/`else` loop statement
|
||||
- Handle 3.6+ `EXTENDED_ARG`s for `POP_JUMP_IF..` instructions
|
||||
- Correct 3.6+ calls with `kwargs`
|
||||
- Describe the difficulty of 3.6 in README
|
||||
|
||||
2.14.3 2018-01-19
|
||||
=====================
|
||||
|
||||
- Fix bug in 3.5+ await stmt
|
||||
- Fix bug in 3.5+ `await` statement
|
||||
- Better version to magic handling; handle 3.5.2 .. 3.5.4 versions
|
||||
- Improve/correct test_pyenvlib.py status messages
|
||||
- Fix some 2.7 and 2.6 parser bugs
|
||||
- Fix whilelse parsing bugs
|
||||
- Fix `whilelse` parsing bugs
|
||||
- Correct 2.5- decorator parsing
|
||||
- grammar for decorators matches AST a little more
|
||||
- better tests in setup.py for running the right version of Python
|
||||
@@ -284,15 +334,15 @@ function calls and definitions.
|
||||
|
||||
Decompilation bug fixes, mostly 3.6 and pre 2.7
|
||||
|
||||
- 3.6 FUNCTION_EX (somewhat)
|
||||
- 3.6 FUNCTION_EX_KW fixes
|
||||
- 3.6 MAKE_FUNCTION fixes
|
||||
- correct 3.5 CALL_FUNCTION_VAR
|
||||
- 3.6 `FUNCTION_EX` (somewhat)
|
||||
- 3.6 `FUNCTION_EX_KW` fixes
|
||||
- 3.6 `MAKE_FUNCTION` fixes
|
||||
- correct 3.5 `CALL_FUNCTION_VAR`
|
||||
- stronger 3.x "while 1" testing
|
||||
- Fix bug in if's with "pass" bodies. Fixes #104
|
||||
- try/else and try/finally fixes on 2.6-
|
||||
- limit pypy customization to pypy
|
||||
- Add addr fields in COME_FROMS
|
||||
- Add addr fields in `COME_FROM`S
|
||||
- Allow use of full instructions in parser reduction routines
|
||||
- Reduce grammar in Python 3 by specialization more to specific
|
||||
Python versions
|
||||
@@ -301,11 +351,11 @@ Decompilation bug fixes, mostly 3.6 and pre 2.7
|
||||
2.14.1 2017-12-10 Dr. Gecko
|
||||
===================================
|
||||
|
||||
- Many decompilation bugfixes
|
||||
- Many decompilation bug fixes
|
||||
- Grammar rule reduction and version isolation
|
||||
- Match higher-level nonterminal names more closely
|
||||
with Python AST
|
||||
- Start automated Python stdlib testing - full round trip
|
||||
- Start automated Python _stdlib_ testing — full round trip
|
||||
|
||||
2.14.0 2017-11-26 johnnybamazing
|
||||
=========================================
|
||||
@@ -314,7 +364,7 @@ Decompilation bug fixes, mostly 3.6 and pre 2.7
|
||||
and remove used grammar rules
|
||||
- Fix a number of bytecode decompile problems
|
||||
(many more remain)
|
||||
- Add stdlib/runtests.sh for even more rigorous testing
|
||||
- Add `stdlib/runtests.sh` for even more rigorous testing
|
||||
|
||||
2.13.3 2017-11-13
|
||||
=====================
|
||||
@@ -330,16 +380,16 @@ Overall: better 3.6 decompiling and some much needed code refactoring and cleanu
|
||||
rather trying to parse the bytecode array. This largely been done in for versions 3.x;
|
||||
3.0 custom mangling code has been reduced;
|
||||
some 2.x conversion has been done, but more is desired. This make it possible to...
|
||||
- Handle EXTENDED_ARGS better. While relevant to all Python versions it is most noticeable in
|
||||
version 3.6+ where in switching to wordcodes the size of operands has been reduced from 2**16
|
||||
to 2**8. JUMP instruction then often need EXTENDED_ARGS.
|
||||
- Handle `EXTENDED_ARGS` better. While relevant to all Python versions it is most noticeable in
|
||||
version 3.6+ where in switching to wordcodes the size of operands has been reduced from 2^16
|
||||
to 2^8. `JUMP` instruction then often need EXTENDED_ARGS.
|
||||
- Refactor find_jump_targets() with via working of of instructions rather the bytecode array.
|
||||
- use --weak-verify more and additional fuzzing on verify()
|
||||
- use `--weak-verify` more and additional fuzzing on verify()
|
||||
- fragment parser now ignores errors in nested function definitions; an parameter was
|
||||
added to assist here. Ignoring errors may be okay because the fragment parser often just needs,
|
||||
well, *fragments*.
|
||||
- Distinguish RETURN_VALUE from RETURN_END_IF in exception bodies better in 3.6
|
||||
- bug in 3.x language changes: import queue via import Queue
|
||||
- Distinguish `RETURN_VALUE` from `RETURN_END_IF` in exception bodies better in 3.6
|
||||
- bug in 3.x language changes: import queue via `import Queue`
|
||||
- reinstate some bytecode tests since decompiling has gotten better
|
||||
- Revise how to report a bug
|
||||
|
||||
@@ -359,12 +409,12 @@ Overall: better 3.6 decompiling and some much needed code refactoring and cleanu
|
||||
- Fixes in deparsing lambda expressions
|
||||
- Improve table-semantics descriptions
|
||||
- Document hacky customize arg count better (until we can remove it)
|
||||
- Update to use xdis 3.7.0 or greater
|
||||
- Update to use `xdis` 3.7.0 or greater
|
||||
|
||||
2.12.0 2017-09-26
|
||||
=====================
|
||||
|
||||
- Use xdis 3.6.0 or greater now
|
||||
- Use `xdis` 3.6.0 or greater now
|
||||
- Small semantic table cleanups
|
||||
- Python 3.4's terms a little names better
|
||||
- Slightly more Python 3.7, but still failing a lot
|
||||
@@ -378,13 +428,13 @@ Overall: better 3.6 decompiling and some much needed code refactoring and cleanu
|
||||
2.11.4 2017-08-15
|
||||
=====================
|
||||
|
||||
* scanner and parser now allow 3-part version string lookups,
|
||||
* scanner and parser now allow 3-part version string look ups,
|
||||
e.g. 2.7.1 We allow a float here, but if passed a string like '2.7'. or
|
||||
* unpin 3.5.1. xdis 3.5.4 has been release and fixes the problems we had. Use that.
|
||||
* some routines here moved to xdis. Use the xdis version
|
||||
* README.rst: Link typo Name is trepan2 now not trepan
|
||||
* xdis-forced change adjust for COMPARE_OP "is-not" in
|
||||
semanatic routines. We need "is not".
|
||||
* unpin 3.5.1. `xdis` 3.5.4 has been release and fixes the problems we had. Use that.
|
||||
* some routines here moved to `xdis`. Use the `xdis` version
|
||||
* `README.rst`: Link typo Name is _trepan2_ now not _trepan_
|
||||
* xdis-forced change adjust for `COMPARE_OP` "is-not" in
|
||||
semantic routines. We need "is not".
|
||||
* Some PyPy tolerance in validate testing.
|
||||
* Some pyston tolerance
|
||||
|
||||
@@ -394,15 +444,15 @@ Overall: better 3.6 decompiling and some much needed code refactoring and cleanu
|
||||
Very minor changes
|
||||
|
||||
- RsT doc fixes and updates
|
||||
- use newer xdis, but not too new; 3.5.2 breaks uncompyle6
|
||||
- use xdis opcode sets
|
||||
- xdis "exception match" is now "exception-match"
|
||||
- use newer `xdis`, but not too new; 3.5.2 breaks uncompyle6
|
||||
- use `xdis` opcode sets
|
||||
- `xdis` "exception match" is now "exception-match"
|
||||
|
||||
2.11.2 2017-07-09
|
||||
=====================
|
||||
|
||||
- Start supporting Pypy 3.5 (5.7.1-beta)
|
||||
- use xdis 3.5.0's opcode sets and require xdis 3.5.0
|
||||
- use `xdis` 3.5.0's opcode sets and require `xdis` 3.5.0
|
||||
- Correct some Python 2.4-2.6 loop detection
|
||||
- guard against badly formatted bytecode
|
||||
|
||||
@@ -410,55 +460,55 @@ Very minor changes
|
||||
=====================
|
||||
|
||||
- Python 3.x annotation and function signature fixes
|
||||
- Bump xdis version
|
||||
- Small pysource bug fixes
|
||||
- Bump `xdis` version
|
||||
- Small `pysource.py` bug fixes
|
||||
|
||||
2.11.0 2017-06-18 Fleetwood
|
||||
==================================
|
||||
|
||||
- Major improvements in fragment tracking
|
||||
* Add nonterminal node in extractInfo
|
||||
* Add nonterminal node in `extractInfo()`
|
||||
* tag more offsets in expressions
|
||||
* tag array subscripts
|
||||
* set YIELD value offset in a <yield> expr
|
||||
* set `YIELD` value offset in a _yield expr_
|
||||
* fix a long-standing bug in not adjusting final AST when melding other deparse ASTs
|
||||
- Fixes yet again for make_function node handling; document what's up here
|
||||
- Fix bug in snowflake Python 3.5 *args kwargs
|
||||
- Fix bug in snowflake Python 3.5 `*args`, `kwargs`
|
||||
|
||||
2.10.1 2017-06-3 Marylin Frankel
|
||||
========================================
|
||||
|
||||
- fix some fragments parsing bugs
|
||||
- was returning the wrong type sometimes in deparse_code_around_offset()
|
||||
- was returning the wrong type sometimes in `deparse_code_around_offset()`
|
||||
- capture function name in offsets
|
||||
- track changes to ifelstrmtr node from pysource into fragments
|
||||
- track changes to `ifelstrmtr` node from `pysource.py` into fragments
|
||||
|
||||
2.10.0 2017-05-30 Elaine Gordon
|
||||
=======================================
|
||||
|
||||
- Add fuzzy offset deparse look up
|
||||
- 3.6 bug fixes
|
||||
- fix EXTENDED_ARGS handling (and in 2.6 and others)
|
||||
- fix `EXTENDED_ARGS` handling (and in 2.6 and others)
|
||||
- semantic routine make_function fragments.py
|
||||
- MAKE_FUNCTION handling
|
||||
- CALL_FUNCTION_EX handling
|
||||
- async property on defs
|
||||
- support for CALL_FUNCTION_KW (moagstar)
|
||||
- 3.5+ UNMAP_PACK and BUILD_UNMAP_PACK handling
|
||||
- `MAKE_FUNCTION` handling
|
||||
- `CALL_FUNCTION_EX` handling
|
||||
- `async` property on `defs`
|
||||
- support for `CALL_FUNCTION_KW` (moagstar)
|
||||
- 3.5+ `UNMAP_PACK` and` BUILD_UNMAP_PACK` handling
|
||||
- 3.5 FUNCTION_VAR bug
|
||||
- 3.x pass statement insdie while True
|
||||
- 3.x pass statement inside `while True`
|
||||
- Improve 3.2 decompilation
|
||||
- Fixed -o argument processing (grkov90)
|
||||
- Fixed `-o` argument processing (grkov90)
|
||||
- Reduce scope of LOAD_ASSERT as expr to 3.4+
|
||||
- "await" statement fixes
|
||||
- `await` statement fixes
|
||||
- 2.3, 2.4 "if 1 .." fixes
|
||||
- 3.x annotation fixes
|
||||
|
||||
2.9.11 2017-04-06
|
||||
=====================
|
||||
|
||||
- Better support for Python 3.5+ BUILD_MAP_UNPACK
|
||||
- Start 3.6 CALL_FUNCTION_EX support
|
||||
- Better support for Python 3.5+ `BUILD_MAP_UNPACK`
|
||||
- Start 3.6 `CALL_FUNCTION_EX` support
|
||||
- Many decompilation bug fixes. (Many more remain). See ChangeLog
|
||||
|
||||
2.9.10 2017-02-25
|
||||
@@ -466,7 +516,7 @@ Very minor changes
|
||||
|
||||
- Python grammar rule fixes
|
||||
- Add ability to get grammar coverage on runs
|
||||
- Handle Python 3.6 opcode BUILD_CONST_KEYMAP
|
||||
- Handle Python 3.6 opcode `BUILD_CONST_KEYMAP`
|
||||
|
||||
2.9.9 2016-12-16
|
||||
|
||||
@@ -482,13 +532,13 @@ Very minor changes
|
||||
====================
|
||||
|
||||
- Better control-flow detection
|
||||
- pseudo instruction THEN in 2.x
|
||||
- pseudo instruction `THEN` in 2.x
|
||||
to disambiguate if from and
|
||||
- fix bug in --verify option
|
||||
- fix bug in `--verify` option
|
||||
- DRY (a little) control-flow detection
|
||||
- fix syntax in tuples with one element
|
||||
- if AST rule inheritance in Python 2.5
|
||||
- NAME_MODULE removal for Python <= 2.4
|
||||
- `NAME_MODULE` removal for Python <= 2.4
|
||||
- verify call fixes for Python <= 2.4
|
||||
- more Python lint
|
||||
|
||||
@@ -499,9 +549,9 @@ Very minor changes
|
||||
- Some Python 3.6 bytecode to wordcode conversion fixes
|
||||
- option -g: show start-end range when possible
|
||||
- track print_docstring move to help (used in python 3.1)
|
||||
- verify: allow RETURN_VALUE to match RETURN_END_IF
|
||||
- verify: allow `RETURN_VALUE` to match `RETURN_END_IF`
|
||||
- some 3.2 compatibility
|
||||
- Better Python 3 control flow detection by adding Pseudo ELSE opcodes
|
||||
- Better Python 3 control flow detection by adding Pseudo `ELSE` opcodes
|
||||
|
||||
2.9.6 2016-12-04
|
||||
====================
|
||||
@@ -517,7 +567,7 @@ Very minor changes
|
||||
|
||||
- Correct MANIFEST.in
|
||||
- More AST grammar checking
|
||||
- --linemapping option or linenumbers.line_number_mapping()
|
||||
- `--linemapping` option or _linenumbers.line_number_mapping()_
|
||||
Shows correspondence of lines between source
|
||||
and decompiled source
|
||||
- Some control flow adjustments in code for 2.x.
|
||||
@@ -537,7 +587,7 @@ Very minor changes
|
||||
* improper while 1 else
|
||||
* docstring indent
|
||||
* 3.3 default values in lambda expressions
|
||||
* start 3.0 decompilation (needs newer xdis)
|
||||
* start 3.0 decompilation (needs newer `xdis`)
|
||||
- Start grammar misparse checking
|
||||
|
||||
|
||||
@@ -551,12 +601,12 @@ Very minor changes
|
||||
2.9.3 2016-10-26
|
||||
====================
|
||||
|
||||
Release forced by incompatibility change in xdis 3.2.0.
|
||||
Release forced by incompatibility change in` xdis` 3.2.0.
|
||||
|
||||
- Python 3.1 bugs:
|
||||
* handle "with ... as"
|
||||
* handle "with"
|
||||
* Start handling def (...) -> yy (has bugs still)
|
||||
* handle `with`... `as`
|
||||
* handle `with`
|
||||
* Start handling `def` (...) -> _yy_ (has bugs still)
|
||||
|
||||
- DRY Python 3.x via inheritance
|
||||
- Python 3.6 work (from Daniel Bradburn)
|
||||
@@ -582,12 +632,12 @@ Release forced by incompatibility change in xdis 3.2.0.
|
||||
2.9.0 2016-10-09
|
||||
====================
|
||||
|
||||
- Use xdis 3.0.0 protocol load_module.
|
||||
- Use `xdis` 3.0.0 protocol `load_module`.
|
||||
this Forces change in requirements.txt and _pkg_info_.py
|
||||
- Start Python 1.5 decompiling; another round of work is needed to
|
||||
remove bugs
|
||||
- Simplify python 2.1 grammar
|
||||
- Fix bug with -t ... Wasn't showing source text when -t option was given
|
||||
- Fix bug with `-t` ... Wasn't showing source text when `-t` option was given
|
||||
- Fix 2.1-2.6 bug in list comprehension
|
||||
|
||||
2.8.4 2016-10-08
|
||||
@@ -596,8 +646,8 @@ Release forced by incompatibility change in xdis 3.2.0.
|
||||
- Python 3 disassembly bug fixes
|
||||
- Python 3.6 fstring bug fixes (from moagstar)
|
||||
- Python 2.1 disassembly
|
||||
- COME_FROM suffixes added in Python3
|
||||
- use .py extension in verification disassembly
|
||||
- `COME_FROM` suffixes added in Python3
|
||||
- use `.py` extension in verification disassembly
|
||||
|
||||
2.8.3 2016-09-11 live from NYC!
|
||||
=======================================
|
||||
@@ -642,8 +692,8 @@ control-flow structure detection is done.
|
||||
- Add Python 2.2 decompilation
|
||||
|
||||
- Fix bugs
|
||||
* PyPy LOOKUP_METHOD bug
|
||||
* Python 3.6 FORMAT_VALUE handles expressions now
|
||||
* PyPy `LOOKUP_METHOD` bug
|
||||
* Python 3.6 `FORMAT_VALUE` handles expressions now
|
||||
|
||||
2.8.0 2016-08-03
|
||||
====================
|
||||
@@ -693,7 +743,7 @@ control-flow structure detection is done.
|
||||
====================
|
||||
|
||||
- Improve Python 2.6 bytecode deparsing:
|
||||
stdlib now will deparse something
|
||||
_stdlib_ now will deparse something
|
||||
- Better <2.6 vs. 2.7 grammar separation
|
||||
- Fix some 2.7 deparsing bugs
|
||||
- Fix bug in installing uncompyle6 script
|
||||
@@ -756,9 +806,9 @@ control-flow structure detection is done.
|
||||
2.3.2 2016-05-1
|
||||
===================
|
||||
|
||||
- Add --version option standalone scripts
|
||||
- Add `--version` option standalone scripts
|
||||
- Correct License information in package
|
||||
- expose fns uncompyle_file, load_file, and load_module
|
||||
- expose functions `uncompyle_file()`, `load_file()`, and `load_module()`
|
||||
- Start to DRY Python2 and Python3 grammars Separate out 3.2, and 3.5+
|
||||
specific grammar code
|
||||
- Fix bug in 3.5+ constant map parsing
|
||||
@@ -766,12 +816,12 @@ control-flow structure detection is done.
|
||||
2.3.0, 2.3.1 2016-04-30
|
||||
=============================
|
||||
|
||||
- Require spark_parser >= 1.1.0
|
||||
- Require `spark_parser` >= 1.1.0
|
||||
|
||||
2.2.0 2016-04-30
|
||||
====================
|
||||
|
||||
- Spark is no longer here but pulled separate package spark_parse
|
||||
- Spark is no longer here but pulled separate package [spark_parser](https://pypi.org/project/spark_parser/)
|
||||
- Python 3 parsing fixes
|
||||
- More tests
|
||||
|
||||
@@ -781,7 +831,7 @@ control-flow structure detection is done.
|
||||
- Support single-mode (in addition to exec-mode) compilation
|
||||
- Start to DRY Python 2 and Python 3 grammars
|
||||
- Fix bug in if else ternary construct
|
||||
- Fix bug in uncomplye6 -d and -r options (via lelicopter)
|
||||
- Fix bug in uncomplye6 `-d` and `-r` options (via lelicopter)
|
||||
|
||||
|
||||
2.1.3 2016-01-02
|
||||
@@ -789,7 +839,7 @@ control-flow structure detection is done.
|
||||
|
||||
- Limited support for decompiling Python 3.5
|
||||
- Improve Python 3 class deparsing
|
||||
- Handle MAKE_CLOSURE opcode
|
||||
- Handle `MAKE_CLOSURE` opcode
|
||||
- Start to DRY opcode code.
|
||||
- increase test coverage
|
||||
- fix misc small bugs and some improvements
|
||||
@@ -831,5 +881,5 @@ Changes from uncompyle2
|
||||
|
||||
SPARK:
|
||||
add option to show grammar rules applied
|
||||
allow Python-style # comments in grammar
|
||||
allow Python-style `#` comments in grammar
|
||||
Runs on Python 3 and Python 2
|
||||
|
27
README.rst
27
README.rst
@@ -1,4 +1,6 @@
|
||||
|buildstatus| |Latest Version| |Supported Python Versions|
|
||||
|buildstatus| |Pypi Installs| |Latest Version| |Supported Python Versions|
|
||||
|
||||
|packagestatus|
|
||||
|
||||
uncompyle6
|
||||
==========
|
||||
@@ -11,7 +13,7 @@ Introduction
|
||||
------------
|
||||
|
||||
*uncompyle6* translates Python bytecode back into equivalent Python
|
||||
source code. It accepts bytecodes from Python version 1.3 to version
|
||||
source code. It accepts bytecodes from Python version 1.0 to version
|
||||
3.8, spanning over 24 years of Python releases. We include Dropbox's
|
||||
Python 2.5 bytecode and some PyPy bytecode.
|
||||
|
||||
@@ -86,9 +88,9 @@ This uses setup.py, so it follows the standard Python routine:
|
||||
|
||||
::
|
||||
|
||||
pip install -e . # set up to run from source tree
|
||||
# Or if you want to install instead
|
||||
python setup.py install # may need sudo
|
||||
$ pip install -e . # set up to run from source tree
|
||||
# Or if you want to install instead
|
||||
$ python setup.py install # may need sudo
|
||||
|
||||
A GNU makefile is also provided so :code:`make install` (possibly as root or
|
||||
sudo) will do the steps above.
|
||||
@@ -132,7 +134,7 @@ could be compared with the original bytecode. However as Python's code
|
||||
generation got better, this no longer was feasible.
|
||||
|
||||
If you want Python syntax verification of the correctness of the
|
||||
decompilation process, add the `--syntax-verify` option. However since
|
||||
decompilation process, add the :code:`--syntax-verify` option. However since
|
||||
Python syntax changes, you should use this option if the bytecode is
|
||||
the right bytecode for the Python interpreter that will be checking
|
||||
the syntax.
|
||||
@@ -146,7 +148,7 @@ available give stronger verification: those programs that when run
|
||||
test themselves. Our test suite includes these.
|
||||
|
||||
And Python comes with another a set of programs like this: its test
|
||||
suite for the standard library. We have some code in `test/stdlib` to
|
||||
suite for the standard library. We have some code in :code:`test/stdlib` to
|
||||
facilitate this kind of checking too.
|
||||
|
||||
Known Bugs/Restrictions
|
||||
@@ -177,15 +179,15 @@ In the Python 3 series, Python support is is strongest around 3.4 or
|
||||
3.0 is weird in that it in some ways resembles 2.6 more than it does
|
||||
3.1 or 2.7. Python 3.6 changes things drastically by using word codes
|
||||
rather than byte codes. As a result, the jump offset field in a jump
|
||||
instruction argument has been reduced. This makes the `EXTENDED_ARG`
|
||||
instruction argument has been reduced. This makes the :code:`EXTENDED_ARG`
|
||||
instructions are now more prevalent in jump instruction; previously
|
||||
they had been rare. Perhaps to compensate for the additional
|
||||
`EXTENDED_ARG` instructions, additional jump optimization has been
|
||||
:code:`EXTENDED_ARG` instructions, additional jump optimization has been
|
||||
added. So in sum handling control flow by ad hoc means as is currently
|
||||
done is worse.
|
||||
|
||||
Between Python 3.5, 3.6 and 3.7 there have been major changes to the
|
||||
`MAKE_FUNCTION` and `CALL_FUNCTION` instructions.
|
||||
:code:`MAKE_FUNCTION` and :code:`CALL_FUNCTION` instructions.
|
||||
|
||||
Currently not all Python magic numbers are supported. Specifically in
|
||||
some versions of Python, notably Python 3.6, the magic number has
|
||||
@@ -217,7 +219,7 @@ See Also
|
||||
* https://github.com/zrax/pycdc : purports to support all versions of Python. It is written in C++ and is most accurate for Python versions around 2.7 and 3.3 when the code was more actively developed. Accuracy for more recent versions of Python 3 and early versions of Python are especially lacking. See its `issue tracker <https://github.com/zrax/pycdc/issues>`_ for details. Currently lightly maintained.
|
||||
* https://code.google.com/archive/p/unpyc3/ : supports Python 3.2 only. The above projects use a different decompiling technique than what is used here. Currently unmaintained.
|
||||
* https://github.com/figment/unpyc3/ : fork of above, but supports Python 3.3 only. Includes some fixes like supporting function annotations. Currently unmaintained.
|
||||
* https://github.com/wibiti/uncompyle2 : supports Python 2.7 only, but does that fairly well. There are situations where `uncompyle6` results are incorrect while `uncompyle2` results are not, but more often uncompyle6 is correct when uncompyle2 is not. Because `uncompyle6` adheres to accuracy over idiomatic Python, `uncompyle2` can produce more natural-looking code when it is correct. Currently `uncompyle2` is lightly maintained. See its issue `tracker <https://github.com/wibiti/uncompyle2/issues>`_ for more details
|
||||
* https://github.com/wibiti/uncompyle2 : supports Python 2.7 only, but does that fairly well. There are situations where :code:`uncompyle6` results are incorrect while :code:`uncompyle2` results are not, but more often uncompyle6 is correct when uncompyle2 is not. Because :code:`uncompyle6` adheres to accuracy over idiomatic Python, :code:`uncompyle2` can produce more natural-looking code when it is correct. Currently :code:`uncompyle2` is lightly maintained. See its issue `tracker <https://github.com/wibiti/uncompyle2/issues>`_ for more details
|
||||
* `How to report a bug <https://github.com/rocky/python-uncompyle6/blob/master/HOW-TO-REPORT-A-BUG.md>`_
|
||||
* The HISTORY_ file.
|
||||
* https://github.com/rocky/python-xdis : Cross Python version disassembler
|
||||
@@ -234,9 +236,12 @@ See Also
|
||||
.. _this: https://github.com/rocky/python-uncompyle6/wiki/Deparsing-technology-and-its-use-in-exact-location-reporting
|
||||
.. |buildstatus| image:: https://travis-ci.org/rocky/python-uncompyle6.svg
|
||||
:target: https://travis-ci.org/rocky/python-uncompyle6
|
||||
.. |packagestatus| image:: https://repology.org/badge/vertical-allrepos/python:uncompyle6.svg
|
||||
:target: https://repology.org/project/python:uncompyle6/versions
|
||||
.. _PJOrion: http://www.koreanrandom.com/forum/topic/15280-pjorion-%D1%80%D0%B5%D0%B4%D0%B0%D0%BA%D1%82%D0%B8%D1%80%D0%BE%D0%B2%D0%B0%D0%BD%D0%B8%D0%B5-%D0%BA%D0%BE%D0%BC%D0%BF%D0%B8%D0%BB%D1%8F%D1%86%D0%B8%D1%8F-%D0%B4%D0%B5%D0%BA%D0%BE%D0%BC%D0%BF%D0%B8%D0%BB%D1%8F%D1%86%D0%B8%D1%8F-%D0%BE%D0%B1%D1%84
|
||||
.. _Deobfuscator: https://github.com/extremecoders-re/PjOrion-Deobfuscator
|
||||
.. _Py2EXE: https://en.wikipedia.org/wiki/Py2exe
|
||||
.. |Supported Python Versions| image:: https://img.shields.io/pypi/pyversions/uncompyle6.svg
|
||||
.. |Latest Version| image:: https://badge.fury.io/py/uncompyle6.svg
|
||||
:target: https://badge.fury.io/py/uncompyle6
|
||||
.. |Pypi Installs| image:: https://pepy.tech/badge/uncompyle6/month
|
||||
|
@@ -26,46 +26,46 @@ copyright = """
|
||||
Copyright (C) 2015-2019 Rocky Bernstein <rb@dustyfeet.com>.
|
||||
"""
|
||||
|
||||
classifiers = ['Development Status :: 5 - Production/Stable',
|
||||
'Intended Audience :: Developers',
|
||||
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
|
||||
'Operating System :: OS Independent',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2.4',
|
||||
'Programming Language :: Python :: 2.5',
|
||||
'Programming Language :: Python :: 2.6',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3.0',
|
||||
'Programming Language :: Python :: 3.1',
|
||||
'Programming Language :: Python :: 3.2',
|
||||
'Programming Language :: Python :: 3.3',
|
||||
'Programming Language :: Python :: 3.4',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Topic :: Software Development :: Debuggers',
|
||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
||||
classifiers = ["Development Status :: 5 - Production/Stable",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 2.4",
|
||||
"Programming Language :: Python :: 2.5",
|
||||
"Programming Language :: Python :: 2.6",
|
||||
"Programming Language :: Python :: 2.7",
|
||||
"Programming Language :: Python :: 3.0",
|
||||
"Programming Language :: Python :: 3.1",
|
||||
"Programming Language :: Python :: 3.2",
|
||||
"Programming Language :: Python :: 3.3",
|
||||
"Programming Language :: Python :: 3.4",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Topic :: Software Development :: Debuggers",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
]
|
||||
|
||||
# The rest in alphabetic order
|
||||
author = "Rocky Bernstein, Hartmut Goebel, John Aycock, and others"
|
||||
author_email = "rb@dustyfeet.com"
|
||||
entry_points = {
|
||||
'console_scripts': [
|
||||
'uncompyle6=uncompyle6.bin.uncompile:main_bin',
|
||||
'pydisassemble=uncompyle6.bin.pydisassemble:main',
|
||||
"console_scripts": [
|
||||
"uncompyle6=uncompyle6.bin.uncompile:main_bin",
|
||||
"pydisassemble=uncompyle6.bin.pydisassemble:main",
|
||||
]}
|
||||
ftp_url = None
|
||||
install_requires = ['spark-parser >= 1.8.7, < 1.9.0',
|
||||
'xdis >= 4.0.2, < 4.1.0']
|
||||
install_requires = ["spark-parser >= 1.8.9, < 1.9.0",
|
||||
"xdis >= 4.1.3, < 4.2.0"]
|
||||
|
||||
license = 'GPL3'
|
||||
mailing_list = 'python-debugger@googlegroups.com'
|
||||
modname = 'uncompyle6'
|
||||
license = "GPL3"
|
||||
mailing_list = "python-debugger@googlegroups.com"
|
||||
modname = "uncompyle6"
|
||||
py_modules = None
|
||||
short_desc = 'Python cross-version byte-code decompiler'
|
||||
web = 'https://github.com/rocky/python-uncompyle6/'
|
||||
short_desc = "Python cross-version byte-code decompiler"
|
||||
web = "https://github.com/rocky/python-uncompyle6/"
|
||||
|
||||
# tracebacks in zip files are funky and not debuggable
|
||||
zip_safe = True
|
||||
@@ -82,5 +82,5 @@ def read(*rnames):
|
||||
return open(os.path.join(srcdir, *rnames)).read()
|
||||
|
||||
# Get info from files; set: long_description and VERSION
|
||||
long_description = ( read("README.rst") + '\n' )
|
||||
exec(read('uncompyle6/version.py'))
|
||||
long_description = ( read("README.rst") + "\n" )
|
||||
exec(read("uncompyle6/version.py"))
|
||||
|
@@ -56,19 +56,21 @@
|
||||
|
||||
$ . ./admin-tools/make-dist-older.sh
|
||||
$ git tag release-python-2.4-$VERSION
|
||||
|
||||
$ twine check dist/uncompyle6-$VERSION*
|
||||
$ . ./admin-tools/make-dist-newer.sh
|
||||
|
||||
Goto https://github.com/rocky/python-uncompyle6/releases
|
||||
$ twine check dist/uncompyle6-$VERSION*
|
||||
|
||||
# Upload single package and look at Rst Formating
|
||||
|
||||
$ twine check dist/uncompyle6-${VERSION}*
|
||||
$ twine upload dist/uncompyle6-${VERSION}-py3.3.egg
|
||||
|
||||
# Upload rest of versions
|
||||
|
||||
$ twine upload dist/uncompyle6-${VERSION}*
|
||||
|
||||
Goto https://github.com/rocky/python-uncompyle6/releases
|
||||
|
||||
# Push tags:
|
||||
|
||||
$ git push --tags
|
||||
|
@@ -5,4 +5,4 @@ if [[ $0 == ${BASH_SOURCE[0]} ]] ; then
|
||||
echo "This script should be *sourced* rather than run directly through bash"
|
||||
exit 1
|
||||
fi
|
||||
export PYVERSIONS='3.6.8 3.7.3 2.6.9 3.3.7 2.7.16 3.2.6 3.1.5 3.4.8'
|
||||
export PYVERSIONS='3.5.9 3.6.9 2.6.9 3.3.7 2.7.17 3.2.6 3.1.5 3.4.10 3.7.5'
|
||||
|
@@ -1,5 +1,5 @@
|
||||
#!/bin/bash
|
||||
PYTHON_VERSION=3.6.8
|
||||
PYTHON_VERSION=3.7.5
|
||||
|
||||
# FIXME put some of the below in a common routine
|
||||
function finish {
|
||||
|
@@ -1,50 +1,54 @@
|
||||
import re
|
||||
from uncompyle6 import PYTHON_VERSION, PYTHON3, IS_PYPY # , PYTHON_VERSION
|
||||
from uncompyle6 import PYTHON_VERSION, PYTHON3, IS_PYPY # , PYTHON_VERSION
|
||||
from uncompyle6.parser import get_python_parser, python_parser
|
||||
from uncompyle6.scanner import get_scanner
|
||||
|
||||
def test_grammar():
|
||||
|
||||
def test_grammar():
|
||||
def check_tokens(tokens, opcode_set):
|
||||
remain_tokens = set(tokens) - opcode_set
|
||||
remain_tokens = set([re.sub(r'_\d+$','', t) for t in remain_tokens])
|
||||
remain_tokens = set([re.sub('_CONT$','', t) for t in remain_tokens])
|
||||
remain_tokens = set([re.sub('LOAD_CODE$','', t) for t in remain_tokens])
|
||||
remain_tokens = set([re.sub(r"_\d+$", "", t) for t in remain_tokens])
|
||||
remain_tokens = set([re.sub("_CONT$", "", t) for t in remain_tokens])
|
||||
remain_tokens = set([re.sub("LOAD_CODE$", "", t) for t in remain_tokens])
|
||||
remain_tokens = set(remain_tokens) - opcode_set
|
||||
assert remain_tokens == set([]), \
|
||||
"Remaining tokens %s\n====\n%s" % (remain_tokens, p.dump_grammar())
|
||||
assert remain_tokens == set([]), "Remaining tokens %s\n====\n%s" % (
|
||||
remain_tokens,
|
||||
p.dump_grammar(),
|
||||
)
|
||||
|
||||
p = get_python_parser(PYTHON_VERSION, is_pypy=IS_PYPY)
|
||||
(lhs, rhs, tokens,
|
||||
right_recursive, dup_rhs) = p.check_sets()
|
||||
(lhs, rhs, tokens, right_recursive, dup_rhs) = p.check_sets()
|
||||
|
||||
# We have custom rules that create the below
|
||||
expect_lhs = set(['pos_arg', 'attribute'])
|
||||
expect_lhs = set(["pos_arg", "attribute"])
|
||||
if PYTHON_VERSION < 3.8:
|
||||
expect_lhs.add('get_iter')
|
||||
expect_lhs.add("get_iter")
|
||||
else:
|
||||
expect_lhs.add("async_with_as_stmt")
|
||||
expect_lhs.add("async_with_stmt")
|
||||
|
||||
unused_rhs = set(["list", "mkfunc", "mklambda", "unpack"])
|
||||
|
||||
unused_rhs = set(['list', 'mkfunc',
|
||||
'mklambda',
|
||||
'unpack',])
|
||||
|
||||
expect_right_recursive = set([('designList',
|
||||
('store', 'DUP_TOP', 'designList'))])
|
||||
expect_right_recursive = set([("designList", ("store", "DUP_TOP", "designList"))])
|
||||
|
||||
if PYTHON_VERSION < 3.7:
|
||||
unused_rhs.add('call')
|
||||
unused_rhs.add("call")
|
||||
|
||||
if PYTHON_VERSION > 2.6:
|
||||
expect_lhs.add('kvlist')
|
||||
expect_lhs.add('kv3')
|
||||
unused_rhs.add('dict')
|
||||
expect_lhs.add("kvlist")
|
||||
expect_lhs.add("kv3")
|
||||
unused_rhs.add("dict")
|
||||
|
||||
if PYTHON3:
|
||||
expect_lhs.add('load_genexpr')
|
||||
expect_lhs.add("load_genexpr")
|
||||
|
||||
unused_rhs = unused_rhs.union(set("""
|
||||
unused_rhs = unused_rhs.union(
|
||||
set(
|
||||
"""
|
||||
except_pop_except generator_exp
|
||||
""".split()))
|
||||
""".split()
|
||||
)
|
||||
)
|
||||
if PYTHON_VERSION >= 3.0:
|
||||
expect_lhs.add("annotate_arg")
|
||||
expect_lhs.add("annotate_tuple")
|
||||
@@ -53,17 +57,19 @@ def test_grammar():
|
||||
unused_rhs.add("classdefdeco1")
|
||||
unused_rhs.add("tryelsestmtl")
|
||||
if PYTHON_VERSION >= 3.5:
|
||||
expect_right_recursive.add((('l_stmts',
|
||||
('lastl_stmt', 'come_froms', 'l_stmts'))))
|
||||
expect_right_recursive.add(
|
||||
(("l_stmts", ("lastl_stmt", "come_froms", "l_stmts")))
|
||||
)
|
||||
pass
|
||||
elif 3.0 < PYTHON_VERSION < 3.3:
|
||||
expect_right_recursive.add((('l_stmts',
|
||||
('lastl_stmt', 'COME_FROM', 'l_stmts'))))
|
||||
expect_right_recursive.add(
|
||||
(("l_stmts", ("lastl_stmt", "COME_FROM", "l_stmts")))
|
||||
)
|
||||
pass
|
||||
pass
|
||||
pass
|
||||
else:
|
||||
expect_lhs.add('kwarg')
|
||||
expect_lhs.add("kwarg")
|
||||
|
||||
assert expect_lhs == set(lhs)
|
||||
|
||||
@@ -73,9 +79,16 @@ def test_grammar():
|
||||
|
||||
assert expect_right_recursive == right_recursive
|
||||
|
||||
expect_dup_rhs = frozenset([('COME_FROM',), ('CONTINUE',), ('JUMP_ABSOLUTE',),
|
||||
('LOAD_CONST',),
|
||||
('JUMP_BACK',), ('JUMP_FORWARD',)])
|
||||
expect_dup_rhs = frozenset(
|
||||
[
|
||||
("COME_FROM",),
|
||||
("CONTINUE",),
|
||||
("JUMP_ABSOLUTE",),
|
||||
("LOAD_CONST",),
|
||||
("JUMP_BACK",),
|
||||
("JUMP_FORWARD",),
|
||||
]
|
||||
)
|
||||
reduced_dup_rhs = dict((k, dup_rhs[k]) for k in dup_rhs if k not in expect_dup_rhs)
|
||||
for k in reduced_dup_rhs:
|
||||
print(k, reduced_dup_rhs[k])
|
||||
@@ -83,7 +96,7 @@ def test_grammar():
|
||||
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
ignore_set = set(
|
||||
"""
|
||||
"""
|
||||
JUMP_BACK CONTINUE
|
||||
COME_FROM COME_FROM_EXCEPT
|
||||
COME_FROM_EXCEPT_CLAUSE
|
||||
@@ -92,22 +105,33 @@ def test_grammar():
|
||||
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP LOAD_STR LOAD_CODE
|
||||
LAMBDA_MARKER
|
||||
RETURN_END_IF RETURN_END_IF_LAMBDA RETURN_VALUE_LAMBDA RETURN_LAST
|
||||
""".split())
|
||||
""".split()
|
||||
)
|
||||
if 2.6 <= PYTHON_VERSION <= 2.7:
|
||||
opcode_set = set(s.opc.opname).union(ignore_set)
|
||||
if PYTHON_VERSION == 2.6:
|
||||
opcode_set.add("THEN")
|
||||
check_tokens(tokens, opcode_set)
|
||||
elif PYTHON_VERSION == 3.4:
|
||||
ignore_set.add('LOAD_CLASSNAME')
|
||||
ignore_set.add('STORE_LOCALS')
|
||||
ignore_set.add("LOAD_CLASSNAME")
|
||||
ignore_set.add("STORE_LOCALS")
|
||||
opcode_set = set(s.opc.opname).union(ignore_set)
|
||||
check_tokens(tokens, opcode_set)
|
||||
|
||||
|
||||
def test_dup_rule():
|
||||
import inspect
|
||||
python_parser(PYTHON_VERSION, inspect.currentframe().f_code,
|
||||
is_pypy=IS_PYPY,
|
||||
parser_debug={
|
||||
'dups': True, 'transition': False, 'reduce': False,
|
||||
'rules': False, 'errorstack': None, 'context': True})
|
||||
|
||||
python_parser(
|
||||
PYTHON_VERSION,
|
||||
inspect.currentframe().f_code,
|
||||
is_pypy=IS_PYPY,
|
||||
parser_debug={
|
||||
"dups": True,
|
||||
"transition": False,
|
||||
"reduce": False,
|
||||
"rules": False,
|
||||
"errorstack": None,
|
||||
"context": True,
|
||||
},
|
||||
)
|
||||
|
@@ -15,7 +15,7 @@ else:
|
||||
def iteritems(d):
|
||||
return d.iteritems()
|
||||
|
||||
from uncompyle6.semantics.pysource import SourceWalker as SourceWalker
|
||||
from uncompyle6.semantics.pysource import (SourceWalker, deparse_code2str)
|
||||
|
||||
def test_template_engine():
|
||||
s = StringIO()
|
||||
@@ -185,3 +185,11 @@ def test_tables():
|
||||
assert arg == len(entry), (
|
||||
"%s[%s] arg %d should be length of entry %d. Full entry: %s" %
|
||||
(name, k, arg, len(entry), entry))
|
||||
|
||||
def test_deparse_code2str():
|
||||
def deparse_test(co):
|
||||
"This is a docstring"
|
||||
s = deparse_code2str(co, debug_opts={"asm": "after", "tree": True})
|
||||
assert s
|
||||
return
|
||||
deparse_test(deparse_test.__code__)
|
||||
|
@@ -1,16 +1,20 @@
|
||||
# future
|
||||
from __future__ import print_function
|
||||
|
||||
# std
|
||||
import os
|
||||
import difflib
|
||||
import subprocess
|
||||
import tempfile
|
||||
import functools
|
||||
|
||||
# uncompyle6 / xdis
|
||||
from uncompyle6 import PYTHON_VERSION, PYTHON3, IS_PYPY, code_deparse
|
||||
|
||||
# TODO : I think we can get xdis to support the dis api (python 3 version) by doing something like this there
|
||||
from xdis.bytecode import Bytecode
|
||||
from xdis.main import get_opcode
|
||||
|
||||
opc = get_opcode(PYTHON_VERSION, IS_PYPY)
|
||||
Bytecode = functools.partial(Bytecode, opc=opc)
|
||||
import six
|
||||
@@ -20,6 +24,7 @@ if PYTHON3:
|
||||
else:
|
||||
from StringIO import StringIO
|
||||
|
||||
|
||||
def _dis_to_text(co):
|
||||
return Bytecode(co).dis()
|
||||
|
||||
@@ -33,36 +38,32 @@ def print_diff(original, uncompyled):
|
||||
:param original: Text describing the original code object.
|
||||
:param uncompyled: Text describing the uncompyled code object.
|
||||
"""
|
||||
original_lines = original.split('\n')
|
||||
uncompyled_lines = uncompyled.split('\n')
|
||||
args = original_lines, uncompyled_lines, 'original', 'uncompyled'
|
||||
original_lines = original.split("\n")
|
||||
uncompyled_lines = uncompyled.split("\n")
|
||||
args = original_lines, uncompyled_lines, "original", "uncompyled"
|
||||
try:
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
diff = difflib.HtmlDiff().make_file(*args)
|
||||
diff = BeautifulSoup(diff, "html.parser")
|
||||
diff.select_one('table[summary="Legends"]').extract()
|
||||
except ImportError:
|
||||
print('\nTo display diff highlighting run:\n pip install BeautifulSoup4')
|
||||
print("\nTo display diff highlighting run:\n pip install BeautifulSoup4")
|
||||
diff = difflib.HtmlDiff().make_table(*args)
|
||||
|
||||
with tempfile.NamedTemporaryFile(delete=False) as f:
|
||||
f.write(str(diff).encode('utf-8'))
|
||||
f.write(str(diff).encode("utf-8"))
|
||||
|
||||
try:
|
||||
print()
|
||||
html = subprocess.check_output([
|
||||
'elinks',
|
||||
'-dump',
|
||||
'-no-references',
|
||||
'-dump-color-mode',
|
||||
'1',
|
||||
f.name,
|
||||
]).decode('utf-8')
|
||||
html = subprocess.check_output(
|
||||
["elinks", "-dump", "-no-references", "-dump-color-mode", "1", f.name]
|
||||
).decode("utf-8")
|
||||
print(html)
|
||||
except:
|
||||
print('\nFor side by side diff install elinks')
|
||||
print("\nFor side by side diff install elinks")
|
||||
diff = difflib.Differ().compare(original_lines, uncompyled_lines)
|
||||
print('\n'.join(diff))
|
||||
print("\n".join(diff))
|
||||
finally:
|
||||
os.unlink(f.name)
|
||||
|
||||
@@ -80,18 +81,19 @@ def are_instructions_equal(i1, i2):
|
||||
|
||||
:return: True if the two instructions are approximately equal, otherwise False.
|
||||
"""
|
||||
result = (1 == 1
|
||||
result = (
|
||||
1 == 1
|
||||
and i1.opname == i2.opname
|
||||
and i1.opcode == i2.opcode
|
||||
and i1.arg == i2.arg
|
||||
# ignore differences due to code objects
|
||||
# TODO : Better way of ignoring address
|
||||
and (i1.argval == i2.argval or '<code object' in str(i1.argval))
|
||||
and (i1.argval == i2.argval or "<code object" in str(i1.argval))
|
||||
# TODO : Should probably recurse to check code objects
|
||||
and (i1.argrepr == i2.argrepr or '<code object' in i1.argrepr)
|
||||
and (i1.argrepr == i2.argrepr or "<code object" in i1.argrepr)
|
||||
and i1.offset == i2.offset
|
||||
# ignore differences in line numbers
|
||||
#and i1.starts_line
|
||||
# and i1.starts_line
|
||||
and i1.is_jump_target == i2.is_jump_target
|
||||
)
|
||||
return result
|
||||
@@ -115,22 +117,21 @@ def are_code_objects_equal(co1, co2):
|
||||
return True
|
||||
|
||||
|
||||
def validate_uncompyle(text, mode='exec'):
|
||||
def validate_uncompyle(text, mode="exec"):
|
||||
"""
|
||||
Validate decompilation of the given source code.
|
||||
|
||||
:param text: Source to validate decompilation of.
|
||||
"""
|
||||
original_code = compile(text, '<string>', mode)
|
||||
original_code = compile(text, "<string>", mode)
|
||||
original_dis = _dis_to_text(original_code)
|
||||
original_text = text
|
||||
|
||||
deparsed = code_deparse(original_code,
|
||||
out=six.StringIO(),
|
||||
version=PYTHON_VERSION,
|
||||
compile_mode=mode)
|
||||
deparsed = code_deparse(
|
||||
original_code, out=six.StringIO(), version=PYTHON_VERSION, compile_mode=mode
|
||||
)
|
||||
uncompyled_text = deparsed.text
|
||||
uncompyled_code = compile(uncompyled_text, '<string>', 'exec')
|
||||
uncompyled_code = compile(uncompyled_text, "<string>", "exec")
|
||||
|
||||
if not are_code_objects_equal(uncompyled_code, original_code):
|
||||
|
||||
@@ -138,15 +139,17 @@ def validate_uncompyle(text, mode='exec'):
|
||||
|
||||
def output(text, dis):
|
||||
width = 60
|
||||
return '\n\n'.join([
|
||||
' SOURCE CODE '.center(width, '#'),
|
||||
text.strip(),
|
||||
' BYTECODE '.center(width, '#'),
|
||||
dis
|
||||
])
|
||||
return "\n\n".join(
|
||||
[
|
||||
" SOURCE CODE ".center(width, "#"),
|
||||
text.strip(),
|
||||
" BYTECODE ".center(width, "#"),
|
||||
dis,
|
||||
]
|
||||
)
|
||||
|
||||
original = output(original_text, original_dis)
|
||||
uncompyled = output(uncompyled_text, uncompyled_dis)
|
||||
print_diff(original, uncompyled)
|
||||
|
||||
assert 'original' == 'uncompyled'
|
||||
assert "original" == "uncompyled"
|
||||
|
@@ -1,2 +1,4 @@
|
||||
flake8
|
||||
hypothesis<=3.0.0
|
||||
six
|
||||
pytest==3.2.5
|
||||
|
@@ -1,6 +1,6 @@
|
||||
[bdist_rpm]
|
||||
release = 1
|
||||
packager = Mysterie <kajusska@gmail.com>
|
||||
packager = rocky <rb@dustyfeet.com>
|
||||
doc_files = README
|
||||
# CHANGES.txt
|
||||
# USAGE.txt
|
||||
@@ -8,4 +8,4 @@ doc_files = README
|
||||
# examples/
|
||||
|
||||
[bdist_wheel]
|
||||
universal=1
|
||||
# universal=1
|
||||
|
1
setup.py
1
setup.py
@@ -32,6 +32,7 @@ setup(
|
||||
install_requires = install_requires,
|
||||
license = license,
|
||||
long_description = long_description,
|
||||
long_description_content_type = "text/x-rst",
|
||||
name = modname,
|
||||
packages = find_packages(),
|
||||
py_modules = py_modules,
|
||||
|
@@ -1,5 +1,6 @@
|
||||
PHONY=check clean dist distclean test test-unit test-functional rmChangeLog clean_pyc nosetests \
|
||||
check-bytecode-1 check-bytecode-1.3 check-bytecode-1.4 check-bytecode-1.5 \
|
||||
check-bytecode-1.0 check-bytecode-1.1 check-bytecode-1.2 check-bytecode-1.3 \
|
||||
check-bytecode-1 check-bytecode-1.4 check-bytecode-1.5 check-bytecode-1.6 \
|
||||
check-bytecode-2 check-bytecode-3 check-bytecode-3-short \
|
||||
check-bytecode-2.2 check-byteocde-2.3 check-bytecode-2.4 \
|
||||
check-short check-2.6 check-2.7 check-3.0 check-3.1 check-3.2 check-3.3 \
|
||||
@@ -85,7 +86,7 @@ check-disasm:
|
||||
$(PYTHON) dis-compare.py
|
||||
|
||||
#: Check deparsing bytecode 1.x only
|
||||
check-bytecode-1: check-bytecode-1.4 check-bytecode-1.5
|
||||
check-bytecode-1: check-bytecode-1.0 check-bytecode-1.1 check-bytecode-1.2 check-bytecode-1.3 check-bytecode-1.4 check-bytecode-1.5 check-bytecode-1.6
|
||||
|
||||
#: Check deparsing bytecode 2.x only
|
||||
check-bytecode-2:
|
||||
@@ -98,8 +99,8 @@ check-bytecode-3:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.0 \
|
||||
--bytecode-3.1 --bytecode-3.2 --bytecode-3.3 \
|
||||
--bytecode-3.4 --bytecode-3.5 --bytecode-3.6 \
|
||||
--bytecode-3.7 --bytecode-3.8 \
|
||||
--bytecode-pypy3.2
|
||||
--bytecode-3.7 \
|
||||
--bytecode-pypy3.2 --bytecode-pypy3.6 --bytecode-3.8
|
||||
|
||||
#: Check deparsing on selected bytecode 3.x
|
||||
check-bytecode-3-short:
|
||||
@@ -109,6 +110,7 @@ check-bytecode-3-short:
|
||||
#: Check deparsing bytecode on all Python 2 and Python 3 versions
|
||||
check-bytecode: check-bytecode-3
|
||||
$(PYTHON) test_pythonlib.py \
|
||||
--bytecode-1.0 --bytecode-1.1 --bytecode-1.2 \
|
||||
--bytecode-1.3 --bytecode-1.4 --bytecode-1.5 \
|
||||
--bytecode-2.2 --bytecode-2.3 --bytecode-2.4 \
|
||||
--bytecode-2.1 --bytecode-2.2 --bytecode-2.3 --bytecode-2.4 \
|
||||
@@ -122,6 +124,18 @@ check-bytecode-short: check-bytecode-3-short
|
||||
--bytecode-2.6 --bytecode-2.7 --bytecode-pypy2.7
|
||||
|
||||
|
||||
#: Check deparsing bytecode 1.0 only
|
||||
check-bytecode-1.0:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-1.0
|
||||
|
||||
#: Check deparsing bytecode 1.1 only
|
||||
check-bytecode-1.1:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-1.1
|
||||
|
||||
#: Check deparsing bytecode 1.2 only
|
||||
check-bytecode-1.2:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-1.2
|
||||
|
||||
#: Check deparsing bytecode 1.3 only
|
||||
check-bytecode-1.3:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-1.3
|
||||
@@ -134,6 +148,10 @@ check-bytecode-1.4:
|
||||
check-bytecode-1.5:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-1.5
|
||||
|
||||
#: Check deparsing bytecode 1.6 only
|
||||
check-bytecode-1.6:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-1.6
|
||||
|
||||
#: Check deparsing Python 2.1
|
||||
check-bytecode-2.1:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-2.1
|
||||
@@ -314,8 +332,16 @@ pypy-2.7 5.0 5.3 6.0:
|
||||
pypy-3.2 2.4:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-pypy3.2 --verify
|
||||
|
||||
#: PyPy 5.0.x with Python 3.6 ...
|
||||
#: PyPy 5.0.x with Python 3.6.1 ...
|
||||
check-bytecode-pypy3.6: 7.1
|
||||
7.1:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-pypy3.6-run --verify-run
|
||||
$(PYTHON) test_pythonlib.py --bytecode-pypy3.6 --verify
|
||||
|
||||
#: PyPy 5.0.x with Python 3.6.9
|
||||
check-bytecode-pypy3.6: 7.2
|
||||
7.2:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-pypy3.6-run --verify-run
|
||||
$(PYTHON) test_pythonlib.py --bytecode-pypy3.6 --verify
|
||||
|
||||
|
||||
|
@@ -2,22 +2,23 @@
|
||||
""" Trivial helper program to bytecompile and run an uncompile
|
||||
"""
|
||||
import os, sys, py_compile
|
||||
|
||||
assert len(sys.argv) >= 2
|
||||
version = sys.version[0:3]
|
||||
if sys.argv[1] == '--run':
|
||||
suffix = '_run'
|
||||
if sys.argv[1] in ("--run", "-r"):
|
||||
suffix = "_run"
|
||||
py_source = sys.argv[2:]
|
||||
else:
|
||||
suffix = ''
|
||||
suffix = ""
|
||||
py_source = sys.argv[1:]
|
||||
|
||||
for path in py_source:
|
||||
short = os.path.basename(path)
|
||||
if hasattr(sys, 'pypy_version_info'):
|
||||
cfile = "bytecode_pypy%s%s/%s" % (version, suffix, short) + 'c'
|
||||
if hasattr(sys, "pypy_version_info"):
|
||||
cfile = "bytecode_pypy%s%s/%s" % (version, suffix, short) + "c"
|
||||
else:
|
||||
cfile = "bytecode_%s%s/%s" % (version, suffix, short) + 'c'
|
||||
cfile = "bytecode_%s%s/%s" % (version, suffix, short) + "c"
|
||||
print("byte-compiling %s to %s" % (path, cfile))
|
||||
py_compile.compile(path, cfile)
|
||||
if isinstance(version, str) or version >= (2, 6, 0):
|
||||
os.system("../bin/uncompyle6 -a -t %s" % cfile)
|
||||
os.system("../bin/uncompyle6 -a -T %s" % cfile)
|
||||
|
BIN
test/bytecode_1.0/simple_const.pyc
Normal file
BIN
test/bytecode_1.0/simple_const.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.0/unpack_assign.pyc
Normal file
BIN
test/bytecode_1.0/unpack_assign.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.1/simple_const.pyc
Normal file
BIN
test/bytecode_1.1/simple_const.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.2/simple_const.pyc
Normal file
BIN
test/bytecode_1.2/simple_const.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.3/simple_const.pyc
Normal file
BIN
test/bytecode_1.3/simple_const.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.6/simple_const.pyc
Normal file
BIN
test/bytecode_1.6/simple_const.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.6_run/00_generator.pyc
Normal file
BIN
test/bytecode_2.6_run/00_generator.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.6_run/01_if_while_return.pyc
Normal file
BIN
test/bytecode_2.6_run/01_if_while_return.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.0/03_ifelse.pyc
Normal file
BIN
test/bytecode_3.0/03_ifelse.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.5_run/02_build_list_unpack.pyc
Normal file
BIN
test/bytecode_3.5_run/02_build_list_unpack.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.6_run/02_build_list_unpack.pyc
Normal file
BIN
test/bytecode_3.6_run/02_build_list_unpack.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.7_run/01_assert2.pyc
Normal file
BIN
test/bytecode_3.7_run/01_assert2.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.7_run/02_build_list_unpack.pyc
Normal file
BIN
test/bytecode_3.7_run/02_build_list_unpack.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.8/01_for_continue.pyc
Normal file
BIN
test/bytecode_3.8/01_for_continue.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_pypy3.5/00_assign.pyc
Normal file
BIN
test/bytecode_pypy3.5/00_assign.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_pypy3.5/00_import.pyc
Normal file
BIN
test/bytecode_pypy3.5/00_import.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_pypy3.5/11_classbug.pyc
Normal file
BIN
test/bytecode_pypy3.5/11_classbug.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_pypy3.6/00_import.pyc
Normal file
BIN
test/bytecode_pypy3.6/00_import.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_pypy3.6/04_class_kwargs.pyc
Normal file
BIN
test/bytecode_pypy3.6/04_class_kwargs.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_pypy3.6/11_classbug.pyc
Normal file
BIN
test/bytecode_pypy3.6/11_classbug.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_pypy3.6_run/00_assign.pyc
Normal file
BIN
test/bytecode_pypy3.6_run/00_assign.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_pypy3.6_run/00_docstring.pyc
Normal file
BIN
test/bytecode_pypy3.6_run/00_docstring.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_pypy3.6_run/01_fstring.pyc
Normal file
BIN
test/bytecode_pypy3.6_run/01_fstring.pyc
Normal file
Binary file not shown.
19
test/simple_source/bug26/01_if_while_return.py
Normal file
19
test/simple_source/bug26/01_if_while_return.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# Issue #284 in Python 2.6
|
||||
# See https://github.com/rocky/python-uncompyle6/issues/284
|
||||
# Decompilation failed when return was the last statetement
|
||||
# in the while loop inside the if block
|
||||
|
||||
# This code is RUNNABLE!
|
||||
|
||||
def f1():
|
||||
if True:
|
||||
while True:
|
||||
return 5
|
||||
|
||||
def f2():
|
||||
if True:
|
||||
while 1:
|
||||
return 6
|
||||
|
||||
|
||||
assert f1() == 5 and f2() == 6
|
@@ -7,3 +7,13 @@ def start_new_thread(function, args, kwargs={}):
|
||||
pass
|
||||
except:
|
||||
args()
|
||||
|
||||
# Adapted from 3.0.1 code.py
|
||||
# Bug is again JUMP_FORWARD elimination compared
|
||||
# to earlier and later Pythons.
|
||||
def interact():
|
||||
while 1:
|
||||
try:
|
||||
more = 1
|
||||
except KeyboardInterrupt:
|
||||
more = 0
|
||||
|
@@ -7,3 +7,8 @@ while 1:
|
||||
raise RuntimeError
|
||||
else:
|
||||
raise RuntimeError
|
||||
|
||||
# Adapted from 3.0.1 cgi.py
|
||||
def _parseparam(s, end):
|
||||
while end > 0 and s.count(''):
|
||||
end = s.find(';')
|
||||
|
50
test/simple_source/bug30/03_ifelse.py
Normal file
50
test/simple_source/bug30/03_ifelse.py
Normal file
@@ -0,0 +1,50 @@
|
||||
# Adapted from 3.0 base64
|
||||
# Problem was handling if/else which
|
||||
# needs to be like Python 2.6 (and not like 2.7 or 3.1)
|
||||
def main(args, f, func, sys):
|
||||
"""Small main program"""
|
||||
if args and args[0] != '-':
|
||||
func(f, sys.stdout.buffer)
|
||||
else:
|
||||
func(sys.stdin.buffer, sys.stdout.buffer)
|
||||
|
||||
# From Python 3.0 _markupbase.py.
|
||||
#
|
||||
# The Problem was in the way "if"s are generated in 3.0 which is sort
|
||||
# of like a more optimized Python 2.6, with reduced extraneous jumps,
|
||||
# but still 2.6-ish and not 2.7- or 3.1-ish.
|
||||
def parse_marked_section(fn, i, rawdata, report=1):
|
||||
if report:
|
||||
j = 1
|
||||
fn(rawdata[i: j])
|
||||
return 10
|
||||
|
||||
# From 3.0.1 _abcoll.py
|
||||
# Bug was in genexpr_func which doesn't have a JUMP_BACK but
|
||||
# in its gen_comp_body, we can use COME_FROM in its place.
|
||||
# As above omission of JUMPs is a feature of 3.0 that doesn't
|
||||
# seem to be in later versions (or earlier like 2.6).
|
||||
def __and__(self, other, Iterable):
|
||||
if not isinstance(other, Iterable):
|
||||
return NotImplemented
|
||||
return self._from_iterable(value for value in other if value in self)
|
||||
|
||||
# Adapted from 3.0.1 abc.py
|
||||
# Bug was in handling multiple COME_FROMs in return_if_stmt
|
||||
def __instancecheck__(subtype, subclass, cls):
|
||||
if subtype:
|
||||
if (cls and subclass):
|
||||
return False
|
||||
|
||||
|
||||
# Adapted from 3.0.1 abc.py
|
||||
# Bug was rule in "jump_absolute_else" and disallowing
|
||||
# "else" to the wrong place.
|
||||
|
||||
def _strptime(locale_time, found_zone, time):
|
||||
for tz_values in locale_time:
|
||||
if found_zone:
|
||||
if (time and found_zone):
|
||||
break
|
||||
else:
|
||||
break
|
@@ -18,3 +18,13 @@ assert normpath(['.']) == []
|
||||
assert normpath(['a', 'b', '..']) == ['a']
|
||||
assert normpath(['a', 'b', '', 'c']) == ['a', 'b', 'c']
|
||||
assert normpath(['a', 'b', '.', '', 'c', '..']) == ['a', 'b']
|
||||
|
||||
# Adapted from 3.0.1 cgi.py
|
||||
# Bug was in detecting "or" and "and" due to lack of PUSH/POP_IF instructions.
|
||||
def handle(format, html, text):
|
||||
formatter = (format and html) or text
|
||||
return formatter
|
||||
|
||||
assert handle(False, False, True)
|
||||
assert not handle(True, False, False)
|
||||
assert handle(True, True, False)
|
||||
|
17
test/simple_source/bug35/02_build_list_unpack.py
Normal file
17
test/simple_source/bug35/02_build_list_unpack.py
Normal file
@@ -0,0 +1,17 @@
|
||||
a = 5
|
||||
x = [1, 2, 3]
|
||||
i = [(a,), x]
|
||||
j = [a, *x]
|
||||
|
||||
def f1(a):
|
||||
return a[0], a[1]
|
||||
|
||||
def f2(b):
|
||||
return len(b), b[0]+5, b[2]
|
||||
|
||||
def f3(x, y):
|
||||
return [1, *x, y]
|
||||
|
||||
assert f1(i) == ((5,), x)
|
||||
assert f2(j) == (4, 10, 2)
|
||||
assert f3(x, a) == [1, 1, 2, 3, 5]
|
8
test/simple_source/bug37/01_assert2.py
Normal file
8
test/simple_source/bug37/01_assert2.py
Normal file
@@ -0,0 +1,8 @@
|
||||
# Self-checking test.
|
||||
# Bug was in if transform not inverting expression
|
||||
# This file is RUNNABLE!
|
||||
def test_assert2(c):
|
||||
if c < 2:
|
||||
raise SyntaxError('Oops')
|
||||
|
||||
test_assert2(5)
|
7
test/simple_source/bug38/01_extra_iter.py
Normal file
7
test/simple_source/bug38/01_extra_iter.py
Normal file
@@ -0,0 +1,7 @@
|
||||
# Adapted from From 3.3 urllib/parse.py
|
||||
qs = "https://travis-ci.org/rocky/python-uncompyle6/builds/605260823?utm_medium=notification&utm_source=email"
|
||||
expect = ['https://travis-ci.org/rocky/python-uncompyle6/builds/605260823?utm_medium=notification', 'utm_source=email']
|
||||
|
||||
# Should visually see that we don't add an extra iter() which is not technically wrong, just
|
||||
# unnecessary.
|
||||
assert expect == [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
|
5
test/simple_source/bug38/01_for_continue.py
Normal file
5
test/simple_source/bug38/01_for_continue.py
Normal file
@@ -0,0 +1,5 @@
|
||||
# Bug is turning a JUMP_BACK for a CONTINUE so for has no JUMP_BACK.
|
||||
# Also there is no POP_BLOCK since there isn't anything in the loop.
|
||||
# In the future when we have better control flow, we might redo all of this.
|
||||
for i in range(2):
|
||||
pass
|
@@ -43,10 +43,10 @@ case $PYVERSION in
|
||||
2.5)
|
||||
SKIP_TESTS=(
|
||||
[test_contextlib.py]=1 # Syntax error - look at
|
||||
[test_dis.py]=1 # We change line numbers - duh!
|
||||
[test_grammar.py]=1 # Too many stmts. Handle large stmts
|
||||
[test_grp.py]=1 # Long test - might work Control flow?
|
||||
[test_pdb.py]=1 # Line-number specific
|
||||
[test_dis.py]=1 # We change line numbers - duh!
|
||||
[test_grammar.py]=1 # Too many stmts. Handle large stmts
|
||||
[test_grp.py]=1 # Long test - might work Control flow?
|
||||
[test_pdb.py]=1 # Line-number specific
|
||||
[test_pwd.py]=1 # Long test - might work? Control flow?
|
||||
[test_queue.py]=1 # Control flow?
|
||||
[test_re.py]=1 # Probably Control flow?
|
||||
@@ -56,18 +56,46 @@ case $PYVERSION in
|
||||
;;
|
||||
2.6)
|
||||
SKIP_TESTS=(
|
||||
[test_aepack.py]=1
|
||||
[test_aifc.py]=1
|
||||
[test_array.py]=1
|
||||
[test_audioop.py]=1
|
||||
[test_base64.py]=1
|
||||
[test_bigmem.py]=1
|
||||
[test_binascii.py]=1
|
||||
[test_builtin.py]=1
|
||||
[test_bytes.py]=1
|
||||
[test_class.py]=1
|
||||
[test_codeccallbacks.py]=1
|
||||
[test_codecencodings_cn.py]=1
|
||||
[test_codecencodings_hk.py]=1
|
||||
[test_codecencodings_jp.py]=1
|
||||
[test_codecencodings_kr.py]=1
|
||||
[test_codecencodings_tw.py]=1
|
||||
[test_codecencodings_cn.py]=1
|
||||
[test_codecmaps_hk.py]=1
|
||||
[test_codecmaps_jp.py]=1
|
||||
[test_codecmaps_kr.py]=1
|
||||
[test_codecmaps_tw.py]=1
|
||||
[test_codecs.py]=1
|
||||
[test_compile.py]=1 # Intermittent - sometimes works and sometimes doesn't
|
||||
[test_grammar.py]=1 # Need real flow control. "and" in side "or"
|
||||
# "and" inside ifelse need to simulatenously work
|
||||
[test_cookielib.py]=1
|
||||
[test_copy.py]=1
|
||||
[test_decimal.py]=1
|
||||
[test_descr.py]=1 # Problem in pickle.py?
|
||||
[test_exceptions.py]=1
|
||||
[test_extcall.py]=1
|
||||
[test_float.py]=1
|
||||
[test_future4.py]=1
|
||||
[test_generators.py]=1
|
||||
[test_grp.py]=1 # Long test - might work Control flow?
|
||||
[test_opcodes.py]=1
|
||||
[test_pwd.py]=1 # Long test - might work? Control flow?
|
||||
[test_re.py]=1 # Probably Control flow?
|
||||
[test_queue.py]=1 # Control flow?
|
||||
[test_strftime.py]=1
|
||||
[test_trace.py]=1 # Line numbers are expected to be different
|
||||
[test_zipfile64.py]=1 # Skip Long test
|
||||
[test_zlib.py]=1 # Look at
|
||||
[test_zlib.py]=1 # Takes too long to run (more than 3 minutes 39 seconds)
|
||||
# .pyenv/versions/2.6.9/lib/python2.6/lib2to3/refactor.pyc
|
||||
# .pyenv/versions/2.6.9/lib/python2.6/pyclbr.pyc
|
||||
# .pyenv/versions/2.6.9/lib/python2.6/quopri.pyc -- look at ishex, is short
|
||||
@@ -103,9 +131,11 @@ case $PYVERSION in
|
||||
[test_httplib.py]=1 # Ok, but POWER has problems with this
|
||||
[test_pdb.py]=1 # Ok, but POWER has problems with this
|
||||
|
||||
[test_capi.py]=1
|
||||
[test_curses.py]=1 # Possibly fails on its own but not detected
|
||||
[test_dis.py]=1 # We change line numbers - duh!
|
||||
[test_doctest.py]=1 # Fails on its own
|
||||
[test_exceptions.py]=1
|
||||
[test_format.py]=1 # control flow. uncompyle2 does not have problems here
|
||||
[test_generators.py]=1 # control flow. uncompyle2 has problem here too
|
||||
[test_grammar.py]=1 # Too many stmts. Handle large stmts
|
||||
@@ -113,6 +143,9 @@ case $PYVERSION in
|
||||
[test_ioctl.py]=1 # Test takes too long to run
|
||||
[test_itertools.py]=1 # Fix erroneous reduction to "conditional_true".
|
||||
# See test/simple_source/bug27+/05_not_unconditional.py
|
||||
[test_long.py]=1
|
||||
[test_long_future.py]=1
|
||||
[test_math.py]=1
|
||||
[test_memoryio.py]=1 # FIX
|
||||
[test_multiprocessing.py]=1 # On uncompyle2, taks 24 secs
|
||||
[test_pep352.py]=1 # ?
|
||||
@@ -122,9 +155,11 @@ case $PYVERSION in
|
||||
[test_pty.py]=1
|
||||
[test_queue.py]=1 # Control flow?
|
||||
[test_re.py]=1 # Probably Control flow?
|
||||
[test_runpy.py]=1 # Long and fails on its own
|
||||
[test_select.py]=1 # Runs okay but takes 11 seconds
|
||||
[test_socket.py]=1 # Runs ok but takes 22 seconds
|
||||
[test_subprocess.py]=1 # Runs ok but takes 22 seconds
|
||||
[test_sys_setprofile.py]=1
|
||||
[test_sys_settrace.py]=1 # Line numbers are expected to be different
|
||||
[test_strtod.py]=1 # FIX
|
||||
[test_traceback.py]=1 # Line numbers change - duh.
|
||||
@@ -188,6 +223,7 @@ fi
|
||||
mkdir $TESTDIR || exit $?
|
||||
cp -r ${PYENV_ROOT}/versions/${PYVERSION}.${MINOR}/lib/python${PYVERSION}/test $TESTDIR
|
||||
cd $TESTDIR/test
|
||||
pyenv local $FULLVERSION
|
||||
export PYTHONPATH=$TESTDIR
|
||||
|
||||
# Run tests
|
||||
@@ -204,10 +240,14 @@ else
|
||||
fi
|
||||
|
||||
typeset -i ALL_FILES_STARTTIME=$(date +%s)
|
||||
typeset -i skipped=0
|
||||
|
||||
for file in $files; do
|
||||
# AIX bash doesn't grok [[ -v SKIP... ]]
|
||||
[[ ${SKIP_TESTS[$file]} == 1 ]] && continue
|
||||
if [[ ${SKIP_TESTS[$file]} == 1 ]] ; then
|
||||
((skipped++))
|
||||
continue
|
||||
fi
|
||||
|
||||
# If the fails *before* decompiling, skip it!
|
||||
typeset -i STARTTIME=$(date +%s)
|
||||
@@ -241,7 +281,7 @@ for file in $files; do
|
||||
fi
|
||||
(( rc != 0 && allerrs++ ))
|
||||
if (( STOP_ONERROR && rc )) ; then
|
||||
echo "** Ran $i tests before failure **"
|
||||
echo "** Ran $i tests before failure. Skipped $skipped test for known failures. **"
|
||||
exit $allerrs
|
||||
fi
|
||||
done
|
||||
@@ -251,5 +291,5 @@ typeset -i ALL_FILES_ENDTIME=$(date +%s)
|
||||
|
||||
printf "Ran $i unit-test files in "
|
||||
displaytime $time_diff
|
||||
|
||||
echo "Skipped $skipped test for known failures."
|
||||
exit $allerrs
|
||||
|
@@ -28,64 +28,80 @@ from fnmatch import fnmatch
|
||||
from uncompyle6 import main, PYTHON3
|
||||
import xdis.magics as magics
|
||||
|
||||
#----- configure this for your needs
|
||||
# ----- configure this for your needs
|
||||
|
||||
python_versions = [v for v in magics.python_versions if
|
||||
re.match('^[0-9.]+$', v)]
|
||||
python_versions = [v for v in magics.python_versions if re.match("^[0-9.]+$", v)]
|
||||
|
||||
# FIXME: we should remove Python versions that we don't support.
|
||||
# These include Jython, and Python bytecode changes pre release.
|
||||
|
||||
TEST_VERSIONS = (
|
||||
'pypy3-2.4.0', 'pypy-2.6.1',
|
||||
'pypy-5.0.1', 'pypy-5.3.1', 'pypy3.5-5.7.1-beta',
|
||||
'pypy3.5-5.9.0', 'pypy3.5-6.0.0',
|
||||
'native') + tuple(python_versions)
|
||||
"pypy3-2.4.0",
|
||||
"pypy-2.6.1",
|
||||
"pypy-5.0.1",
|
||||
"pypy-5.3.1",
|
||||
"pypy3.5-5.7.1-beta",
|
||||
"pypy3.5-5.9.0",
|
||||
"pypy3.5-6.0.0",
|
||||
"pypy3.6-7.1.0",
|
||||
"pypy3.6-7.1.1",
|
||||
"pypy3.6-7.2.0",
|
||||
"native",
|
||||
) + tuple(python_versions)
|
||||
|
||||
|
||||
target_base = '/tmp/py-dis/'
|
||||
lib_prefix = os.path.join(os.environ['HOME'], '.pyenv/versions')
|
||||
target_base = "/tmp/py-dis/"
|
||||
lib_prefix = os.path.join(os.environ["HOME"], ".pyenv/versions")
|
||||
|
||||
PYC = ('*.pyc', )
|
||||
PYO = ('*.pyo', )
|
||||
PYOC = ('*.pyc', '*.pyo')
|
||||
PYC = ("*.pyc",)
|
||||
PYO = ("*.pyo",)
|
||||
PYOC = ("*.pyc", "*.pyo")
|
||||
|
||||
#-----
|
||||
# -----
|
||||
|
||||
test_options = {
|
||||
# name: (src_basedir, pattern, output_base_suffix)
|
||||
'test': ('./test', PYOC, 'test'),
|
||||
'max=': 200,
|
||||
}
|
||||
"test": ("./test", PYOC, "test"),
|
||||
"max=": 200,
|
||||
}
|
||||
|
||||
for vers in TEST_VERSIONS:
|
||||
if vers.startswith('pypy'):
|
||||
if vers.startswith('pypy3.'):
|
||||
if vers.startswith("pypy"):
|
||||
if vers.startswith("pypy3."):
|
||||
short_vers = vers[4:6]
|
||||
else:
|
||||
short_vers = vers[0:-2]
|
||||
|
||||
test_options[vers] = (os.path.join(lib_prefix, vers, 'lib_pypy'),
|
||||
PYC, 'python-lib'+short_vers)
|
||||
test_options[vers] = (
|
||||
os.path.join(lib_prefix, vers, "lib_pypy"),
|
||||
PYC,
|
||||
"python-lib" + short_vers,
|
||||
)
|
||||
else:
|
||||
if vers == 'native':
|
||||
if vers == "native":
|
||||
short_vers = os.path.basename(sys.path[-1])
|
||||
test_options[vers] = (sys.path[-1],
|
||||
PYC, short_vers)
|
||||
test_options[vers] = (sys.path[-1], PYC, short_vers)
|
||||
else:
|
||||
short_vers = vers[:3]
|
||||
test_options[vers] = (os.path.join(lib_prefix, vers, 'lib', 'python'+short_vers),
|
||||
PYC, 'python-lib'+short_vers)
|
||||
test_options[vers] = (
|
||||
os.path.join(lib_prefix, vers, "lib", "python" + short_vers),
|
||||
PYC,
|
||||
"python-lib" + short_vers,
|
||||
)
|
||||
|
||||
def do_tests(src_dir, patterns, target_dir, start_with=None,
|
||||
do_verify=False, max_files=200):
|
||||
|
||||
def do_tests(
|
||||
src_dir, patterns, target_dir, start_with=None, do_verify=False, max_files=200
|
||||
):
|
||||
def visitor(files, dirname, names):
|
||||
files.extend(
|
||||
[os.path.normpath(os.path.join(dirname, n))
|
||||
for n in names
|
||||
for pat in patterns
|
||||
if fnmatch(n, pat)])
|
||||
[
|
||||
os.path.normpath(os.path.join(dirname, n))
|
||||
for n in names
|
||||
for pat in patterns
|
||||
if fnmatch(n, pat)
|
||||
]
|
||||
)
|
||||
|
||||
files = []
|
||||
cwd = os.getcwd()
|
||||
@@ -93,10 +109,13 @@ def do_tests(src_dir, patterns, target_dir, start_with=None,
|
||||
if PYTHON3:
|
||||
for root, dirname, names in os.walk(os.curdir):
|
||||
files.extend(
|
||||
[os.path.normpath(os.path.join(root, n))
|
||||
for n in names
|
||||
for pat in patterns
|
||||
if fnmatch(n, pat)])
|
||||
[
|
||||
os.path.normpath(os.path.join(root, n))
|
||||
for n in names
|
||||
for pat in patterns
|
||||
if fnmatch(n, pat)
|
||||
]
|
||||
)
|
||||
pass
|
||||
pass
|
||||
else:
|
||||
@@ -108,26 +127,29 @@ def do_tests(src_dir, patterns, target_dir, start_with=None,
|
||||
try:
|
||||
start_with = files.index(start_with)
|
||||
files = files[start_with:]
|
||||
print('>>> starting with file', files[0])
|
||||
print(">>> starting with file", files[0])
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if len(files) > max_files:
|
||||
files = [file for file in files if not 'site-packages' in file]
|
||||
files = [file for file in files if not 'test' in file]
|
||||
files = [file for file in files if not "site-packages" in file]
|
||||
files = [file for file in files if not "test" in file]
|
||||
if len(files) > max_files:
|
||||
# print("Number of files %d - truncating to last 200" % len(files))
|
||||
print("Number of files %d - truncating to first %s" %
|
||||
(len(files), max_files))
|
||||
print(
|
||||
"Number of files %d - truncating to first %s" % (len(files), max_files)
|
||||
)
|
||||
files = files[:max_files]
|
||||
|
||||
print(time.ctime())
|
||||
(tot_files, okay_files, failed_files,
|
||||
verify_failed_files) = main.main(src_dir, target_dir, files, [], do_verify=do_verify)
|
||||
(tot_files, okay_files, failed_files, verify_failed_files) = main.main(
|
||||
src_dir, target_dir, files, [], do_verify=do_verify
|
||||
)
|
||||
print(time.ctime())
|
||||
return verify_failed_files + failed_files
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
import getopt, sys
|
||||
|
||||
do_coverage = do_verify = False
|
||||
@@ -136,38 +158,46 @@ if __name__ == '__main__':
|
||||
|
||||
test_options_keys = list(test_options.keys())
|
||||
test_options_keys.sort()
|
||||
opts, args = getopt.getopt(sys.argv[1:], '',
|
||||
['start-with=', 'verify', 'verify-run', 'syntax-verify',
|
||||
'max=', 'coverage', 'all', ] \
|
||||
+ test_options_keys )
|
||||
vers = ''
|
||||
opts, args = getopt.getopt(
|
||||
sys.argv[1:],
|
||||
"",
|
||||
[
|
||||
"start-with=",
|
||||
"verify",
|
||||
"verify-run",
|
||||
"syntax-verify",
|
||||
"max=",
|
||||
"coverage",
|
||||
"all",
|
||||
]
|
||||
+ test_options_keys,
|
||||
)
|
||||
vers = ""
|
||||
|
||||
for opt, val in opts:
|
||||
if opt == '--verify':
|
||||
do_verify = 'strong'
|
||||
elif opt == '--syntax-verify':
|
||||
do_verify = 'weak'
|
||||
elif opt == '--verify-run':
|
||||
do_verify = 'verify-run'
|
||||
elif opt == '--coverage':
|
||||
if opt == "--verify":
|
||||
do_verify = "strong"
|
||||
elif opt == "--syntax-verify":
|
||||
do_verify = "weak"
|
||||
elif opt == "--verify-run":
|
||||
do_verify = "verify-run"
|
||||
elif opt == "--coverage":
|
||||
do_coverage = True
|
||||
elif opt == '--start-with':
|
||||
elif opt == "--start-with":
|
||||
start_with = val
|
||||
elif opt[2:] in test_options_keys:
|
||||
triple = test_options[opt[2:]]
|
||||
vers = triple[-1]
|
||||
test_dirs.append(triple)
|
||||
elif opt == '--max':
|
||||
test_options['max='] = int(val)
|
||||
elif opt == '--all':
|
||||
vers = 'all'
|
||||
elif opt == "--max":
|
||||
test_options["max="] = int(val)
|
||||
elif opt == "--all":
|
||||
vers = "all"
|
||||
for val in test_options_keys:
|
||||
test_dirs.append(test_options[val])
|
||||
|
||||
if do_coverage:
|
||||
os.environ['SPARK_PARSER_COVERAGE'] = (
|
||||
'/tmp/spark-grammar-%s.cover' % vers
|
||||
)
|
||||
os.environ["SPARK_PARSER_COVERAGE"] = "/tmp/spark-grammar-%s.cover" % vers
|
||||
|
||||
failed = 0
|
||||
for src_dir, pattern, target_dir in test_dirs:
|
||||
@@ -175,8 +205,14 @@ if __name__ == '__main__':
|
||||
target_dir = os.path.join(target_base, target_dir)
|
||||
if os.path.exists(target_dir):
|
||||
shutil.rmtree(target_dir, ignore_errors=1)
|
||||
failed += do_tests(src_dir, pattern, target_dir, start_with,
|
||||
do_verify, test_options['max='])
|
||||
failed += do_tests(
|
||||
src_dir,
|
||||
pattern,
|
||||
target_dir,
|
||||
start_with,
|
||||
do_verify,
|
||||
test_options["max="],
|
||||
)
|
||||
else:
|
||||
print("### Path %s doesn't exist; skipping" % src_dir)
|
||||
pass
|
||||
|
@@ -35,69 +35,93 @@ from uncompyle6 import PYTHON_VERSION
|
||||
from uncompyle6.main import main
|
||||
from fnmatch import fnmatch
|
||||
|
||||
|
||||
def get_srcdir():
|
||||
filename = os.path.normcase(os.path.dirname(__file__))
|
||||
return os.path.realpath(filename)
|
||||
|
||||
|
||||
src_dir = get_srcdir()
|
||||
|
||||
|
||||
#----- configure this for your needs
|
||||
# ----- configure this for your needs
|
||||
|
||||
lib_prefix = '/usr/lib'
|
||||
#lib_prefix = [src_dir, '/usr/lib/', '/usr/local/lib/']
|
||||
lib_prefix = "/usr/lib"
|
||||
# lib_prefix = [src_dir, '/usr/lib/', '/usr/local/lib/']
|
||||
|
||||
target_base = tempfile.mkdtemp(prefix='py-dis-')
|
||||
target_base = tempfile.mkdtemp(prefix="py-dis-")
|
||||
|
||||
PY = ('*.py', )
|
||||
PYC = ('*.pyc', )
|
||||
PYO = ('*.pyo', )
|
||||
PYOC = ('*.pyc', '*.pyo')
|
||||
PY = ("*.py",)
|
||||
PYC = ("*.pyc",)
|
||||
PYO = ("*.pyo",)
|
||||
PYOC = ("*.pyc", "*.pyo")
|
||||
|
||||
test_options = {
|
||||
# name: (src_basedir, pattern, output_base_suffix, python_version)
|
||||
'test':
|
||||
('test', PYC, 'test'),
|
||||
|
||||
'ok-2.6': (os.path.join(src_dir, 'ok_lib2.6'),
|
||||
PYOC, 'ok-2.6', 2.6),
|
||||
|
||||
'ok-2.7': (os.path.join(src_dir, 'ok_lib2.7'),
|
||||
PYOC, 'ok-2.7', 2.7),
|
||||
|
||||
'ok-3.2': (os.path.join(src_dir, 'ok_lib3.2'),
|
||||
PYOC, 'ok-3.2', 3.2),
|
||||
|
||||
'base-2.7': (os.path.join(src_dir, 'base_tests', 'python2.7'),
|
||||
PYOC, 'base_2.7', 2.7),
|
||||
"test": ("test", PYC, "test"),
|
||||
"ok-2.6": (os.path.join(src_dir, "ok_lib2.6"), PYOC, "ok-2.6", 2.6),
|
||||
"ok-2.7": (os.path.join(src_dir, "ok_lib2.7"), PYOC, "ok-2.7", 2.7),
|
||||
"ok-3.2": (os.path.join(src_dir, "ok_lib3.2"), PYOC, "ok-3.2", 3.2),
|
||||
"base-2.7": (
|
||||
os.path.join(src_dir, "base_tests", "python2.7"),
|
||||
PYOC,
|
||||
"base_2.7",
|
||||
2.7,
|
||||
),
|
||||
}
|
||||
|
||||
for vers in (2.7, 3.4, 3.5, 3.6):
|
||||
for vers in (2.7, 3.4, 3.5, 3.6):
|
||||
pythonlib = "ok_lib%s" % vers
|
||||
key = "ok-%s" % vers
|
||||
test_options[key] = (os.path.join(src_dir, pythonlib), PYOC, key, vers)
|
||||
pass
|
||||
|
||||
for vers in (1.3, 1.4, 1.5,
|
||||
2.1, 2.2, 2.3, 2.4, 2.5, 2.6, 2.7,
|
||||
3.0, 3.1, 3.2, 3.3,
|
||||
3.4, 3.5, 3.6, 3.7, 3.8, 'pypy3.2', 'pypy2.7', 'pypy3.6'):
|
||||
for vers in (
|
||||
1.0,
|
||||
1.1,
|
||||
1.2,
|
||||
1.3,
|
||||
1.4,
|
||||
1.5,
|
||||
1.6,
|
||||
2.1,
|
||||
2.2,
|
||||
2.3,
|
||||
2.4,
|
||||
2.5,
|
||||
2.6,
|
||||
2.7,
|
||||
3.0,
|
||||
3.1,
|
||||
3.2,
|
||||
3.3,
|
||||
3.4,
|
||||
3.5,
|
||||
3.6,
|
||||
3.7,
|
||||
3.8,
|
||||
"pypy3.2",
|
||||
"pypy2.7",
|
||||
"pypy3.6",
|
||||
):
|
||||
bytecode = "bytecode_%s" % vers
|
||||
key = "bytecode-%s" % vers
|
||||
test_options[key] = (bytecode, PYC, bytecode, vers)
|
||||
test_options[key] = (bytecode, PYC, bytecode, vers)
|
||||
bytecode = "bytecode_%s_run" % vers
|
||||
key = "bytecode-%s-run" % vers
|
||||
test_options[key] = (bytecode, PYC, bytecode, vers)
|
||||
test_options[key] = (bytecode, PYC, bytecode, vers)
|
||||
key = "%s" % vers
|
||||
pythonlib = "python%s" % vers
|
||||
if isinstance(vers, float) and vers >= 3.0:
|
||||
pythonlib = os.path.join(pythonlib, '__pycache__')
|
||||
test_options[key] = (os.path.join(lib_prefix, pythonlib), PYOC, pythonlib, vers)
|
||||
pythonlib = os.path.join(pythonlib, "__pycache__")
|
||||
test_options[key] = (os.path.join(lib_prefix, pythonlib), PYOC, pythonlib, vers)
|
||||
|
||||
# -----
|
||||
|
||||
#-----
|
||||
|
||||
def help():
|
||||
print("""Usage-Examples:
|
||||
print(
|
||||
"""Usage-Examples:
|
||||
|
||||
# compile, decompyle and verify short tests for Python 2.7:
|
||||
test_pythonlib.py --bytecode-2.7 --verify --compile
|
||||
@@ -107,18 +131,21 @@ def help():
|
||||
|
||||
# decompile and verify known good python 2.7
|
||||
test_pythonlib.py --ok-2.7 --verify
|
||||
""")
|
||||
"""
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def do_tests(src_dir, obj_patterns, target_dir, opts):
|
||||
|
||||
def file_matches(files, root, basenames, patterns):
|
||||
files.extend(
|
||||
[os.path.normpath(os.path.join(root, n))
|
||||
for n in basenames
|
||||
for pat in patterns
|
||||
if fnmatch(n, pat)])
|
||||
[
|
||||
os.path.normpath(os.path.join(root, n))
|
||||
for n in basenames
|
||||
for pat in patterns
|
||||
if fnmatch(n, pat)
|
||||
]
|
||||
)
|
||||
|
||||
files = []
|
||||
# Change directories so use relative rather than
|
||||
@@ -127,11 +154,14 @@ def do_tests(src_dir, obj_patterns, target_dir, opts):
|
||||
cwd = os.getcwd()
|
||||
os.chdir(src_dir)
|
||||
|
||||
if opts['do_compile']:
|
||||
compiled_version = opts['compiled_version']
|
||||
if opts["do_compile"]:
|
||||
compiled_version = opts["compiled_version"]
|
||||
if compiled_version and PYTHON_VERSION != compiled_version:
|
||||
print("Not compiling: desired Python version is %s but we are running %s" %
|
||||
(compiled_version, PYTHON_VERSION), file=sys.stderr)
|
||||
print(
|
||||
"Not compiling: desired Python version is %s but we are running %s"
|
||||
% (compiled_version, PYTHON_VERSION),
|
||||
file=sys.stderr,
|
||||
)
|
||||
else:
|
||||
for root, dirs, basenames in os.walk(src_dir):
|
||||
file_matches(files, root, basenames, PY)
|
||||
@@ -143,34 +173,36 @@ def do_tests(src_dir, obj_patterns, target_dir, opts):
|
||||
pass
|
||||
pass
|
||||
|
||||
for root, dirs, basenames in os.walk('.'):
|
||||
for root, dirs, basenames in os.walk("."):
|
||||
# Turn root into a relative path
|
||||
dirname = root[2:] # 2 = len('.') + 1
|
||||
file_matches(files, dirname, basenames, obj_patterns)
|
||||
|
||||
if not files:
|
||||
print("Didn't come up with any files to test! Try with --compile?",
|
||||
file=sys.stderr)
|
||||
print(
|
||||
"Didn't come up with any files to test! Try with --compile?",
|
||||
file=sys.stderr,
|
||||
)
|
||||
exit(1)
|
||||
|
||||
os.chdir(cwd)
|
||||
files.sort()
|
||||
|
||||
if opts['start_with']:
|
||||
if opts["start_with"]:
|
||||
try:
|
||||
start_with = files.index(opts['start_with'])
|
||||
start_with = files.index(opts["start_with"])
|
||||
files = files[start_with:]
|
||||
print('>>> starting with file', files[0])
|
||||
print(">>> starting with file", files[0])
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
print(time.ctime())
|
||||
print('Source directory: ', src_dir)
|
||||
print('Output directory: ', target_dir)
|
||||
print("Source directory: ", src_dir)
|
||||
print("Output directory: ", target_dir)
|
||||
try:
|
||||
_, _, failed_files, failed_verify = \
|
||||
main(src_dir, target_dir, files, [],
|
||||
do_verify=opts['do_verify'])
|
||||
_, _, failed_files, failed_verify = main(
|
||||
src_dir, target_dir, files, [], do_verify=opts["do_verify"]
|
||||
)
|
||||
if failed_files != 0:
|
||||
sys.exit(2)
|
||||
elif failed_verify != 0:
|
||||
@@ -179,71 +211,81 @@ def do_tests(src_dir, obj_patterns, target_dir, opts):
|
||||
except (KeyboardInterrupt, OSError):
|
||||
print()
|
||||
sys.exit(1)
|
||||
if test_opts['rmtree']:
|
||||
if test_opts["rmtree"]:
|
||||
parent_dir = os.path.dirname(target_dir)
|
||||
print("Everything good, removing %s" % parent_dir)
|
||||
shutil.rmtree(parent_dir)
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_dirs = []
|
||||
checked_dirs = []
|
||||
start_with = None
|
||||
|
||||
test_options_keys = list(test_options.keys())
|
||||
test_options_keys.sort()
|
||||
opts, args = getopt.getopt(sys.argv[1:], '',
|
||||
['start-with=', 'verify', 'verify-run',
|
||||
'syntax-verify', 'all',
|
||||
'compile', 'coverage',
|
||||
'no-rm'] \
|
||||
+ test_options_keys )
|
||||
if not opts: help()
|
||||
opts, args = getopt.getopt(
|
||||
sys.argv[1:],
|
||||
"",
|
||||
[
|
||||
"start-with=",
|
||||
"verify",
|
||||
"verify-run",
|
||||
"syntax-verify",
|
||||
"all",
|
||||
"compile",
|
||||
"coverage",
|
||||
"no-rm",
|
||||
]
|
||||
+ test_options_keys,
|
||||
)
|
||||
if not opts:
|
||||
help()
|
||||
|
||||
test_opts = {
|
||||
'do_compile': False,
|
||||
'do_verify': False,
|
||||
'start_with': None,
|
||||
'rmtree' : True,
|
||||
'coverage' : False
|
||||
}
|
||||
"do_compile": False,
|
||||
"do_verify": False,
|
||||
"start_with": None,
|
||||
"rmtree": True,
|
||||
"coverage": False,
|
||||
}
|
||||
|
||||
for opt, val in opts:
|
||||
if opt == '--verify':
|
||||
test_opts['do_verify'] = 'strong'
|
||||
elif opt == '--syntax-verify':
|
||||
test_opts['do_verify'] = 'weak'
|
||||
elif opt == '--verify-run':
|
||||
test_opts['do_verify'] = 'verify-run'
|
||||
elif opt == '--compile':
|
||||
test_opts['do_compile'] = True
|
||||
elif opt == '--start-with':
|
||||
test_opts['start_with'] = val
|
||||
elif opt == '--no-rm':
|
||||
test_opts['rmtree'] = False
|
||||
if opt == "--verify":
|
||||
test_opts["do_verify"] = "strong"
|
||||
elif opt == "--syntax-verify":
|
||||
test_opts["do_verify"] = "weak"
|
||||
elif opt == "--verify-run":
|
||||
test_opts["do_verify"] = "verify-run"
|
||||
elif opt == "--compile":
|
||||
test_opts["do_compile"] = True
|
||||
elif opt == "--start-with":
|
||||
test_opts["start_with"] = val
|
||||
elif opt == "--no-rm":
|
||||
test_opts["rmtree"] = False
|
||||
elif opt[2:] in test_options_keys:
|
||||
test_dirs.append(test_options[opt[2:]])
|
||||
elif opt == '--all':
|
||||
elif opt == "--all":
|
||||
for val in test_options_keys:
|
||||
test_dirs.append(test_options[val])
|
||||
elif opt == '--coverage':
|
||||
test_opts['coverage'] = True
|
||||
elif opt == "--coverage":
|
||||
test_opts["coverage"] = True
|
||||
else:
|
||||
help()
|
||||
pass
|
||||
pass
|
||||
|
||||
if test_opts['coverage']:
|
||||
os.environ['SPARK_PARSER_COVERAGE'] = (
|
||||
'/tmp/spark-grammar-python-lib%s.cover' % test_dirs[0][-1]
|
||||
)
|
||||
if test_opts["coverage"]:
|
||||
os.environ["SPARK_PARSER_COVERAGE"] = (
|
||||
"/tmp/spark-grammar-python-lib%s.cover" % test_dirs[0][-1]
|
||||
)
|
||||
|
||||
last_compile_version = None
|
||||
for src_dir, pattern, target_dir, compiled_version in test_dirs:
|
||||
if os.path.isdir(src_dir):
|
||||
checked_dirs.append([src_dir, pattern, target_dir])
|
||||
else:
|
||||
print("Can't find directory %s. Skipping" % src_dir,
|
||||
file=sys.stderr)
|
||||
print("Can't find directory %s. Skipping" % src_dir, file=sys.stderr)
|
||||
continue
|
||||
last_compile_version = compiled_version
|
||||
pass
|
||||
@@ -252,7 +294,7 @@ if __name__ == '__main__':
|
||||
print("No directories found to check", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
test_opts['compiled_version'] = last_compile_version
|
||||
test_opts["compiled_version"] = last_compile_version
|
||||
|
||||
for src_dir, pattern, target_dir in checked_dirs:
|
||||
target_dir = os.path.join(target_base, target_dir)
|
||||
|
@@ -46,10 +46,12 @@ Options:
|
||||
--help show this message
|
||||
|
||||
Debugging Options:
|
||||
--asm | -a include byte-code (disables --verify)
|
||||
--grammar | -g show matching grammar
|
||||
--tree | -t include syntax tree (disables --verify)
|
||||
--tree++ add template rules to --tree when possible
|
||||
--asm | -a include byte-code (disables --verify)
|
||||
--grammar | -g show matching grammar
|
||||
--tree={before|after}
|
||||
-t {before|after} include syntax before (or after) tree transformation
|
||||
(disables --verify)
|
||||
--tree++ | -T add template rules to --tree=before when possible
|
||||
|
||||
Extensions of generated files:
|
||||
'.pyc_dis' '.pyo_dis' successfully decompiled (and verified if --verify)
|
||||
@@ -89,7 +91,7 @@ def main_bin():
|
||||
try:
|
||||
opts, pyc_paths = getopt.getopt(sys.argv[1:], 'hac:gtTdrVo:p:',
|
||||
'help asm compile= grammar linemaps recurse '
|
||||
'timestamp tree tree+ '
|
||||
'timestamp tree= tree+ '
|
||||
'fragments verify verify-run version '
|
||||
'syntax-verify '
|
||||
'showgrammar encoding='.split(' '))
|
||||
@@ -119,10 +121,19 @@ def main_bin():
|
||||
options['showasm'] = 'after'
|
||||
options['do_verify'] = None
|
||||
elif opt in ('--tree', '-t'):
|
||||
options['showast'] = True
|
||||
if 'showast' not in options:
|
||||
options['showast'] = {}
|
||||
if val == 'before':
|
||||
options['showast'][val] = True
|
||||
elif val == 'after':
|
||||
options['showast'][val] = True
|
||||
else:
|
||||
options['showast']['before'] = True
|
||||
options['do_verify'] = None
|
||||
elif opt in ('--tree+', '-T'):
|
||||
options['showast'] = 'Full'
|
||||
if 'showast' not in options:
|
||||
options['showast'] = {}
|
||||
options['showast']['Full'] = True
|
||||
options['do_verify'] = None
|
||||
elif opt in ('--grammar', '-g'):
|
||||
options['showgrammar'] = True
|
||||
|
@@ -34,12 +34,11 @@ from __future__ import print_function
|
||||
import sys
|
||||
from collections import deque
|
||||
|
||||
import uncompyle6
|
||||
|
||||
from xdis.code import iscode
|
||||
from xdis.load import check_object_path, load_module
|
||||
from uncompyle6.scanner import get_scanner
|
||||
|
||||
|
||||
def disco(version, co, out=None, is_pypy=False):
|
||||
"""
|
||||
diassembles and deparses a given code block 'co'
|
||||
@@ -49,10 +48,9 @@ def disco(version, co, out=None, is_pypy=False):
|
||||
|
||||
# store final output stream for case of error
|
||||
real_out = out or sys.stdout
|
||||
print('# Python %s' % version, file=real_out)
|
||||
print("# Python %s" % version, file=real_out)
|
||||
if co.co_filename:
|
||||
print('# Embedded file name: %s' % co.co_filename,
|
||||
file=real_out)
|
||||
print("# Embedded file name: %s" % co.co_filename, file=real_out)
|
||||
|
||||
scanner = get_scanner(version, is_pypy=is_pypy)
|
||||
|
||||
@@ -63,10 +61,12 @@ def disco(version, co, out=None, is_pypy=False):
|
||||
def disco_loop(disasm, queue, real_out):
|
||||
while len(queue) > 0:
|
||||
co = queue.popleft()
|
||||
if co.co_name != '<module>':
|
||||
print('\n# %s line %d of %s' %
|
||||
(co.co_name, co.co_firstlineno, co.co_filename),
|
||||
file=real_out)
|
||||
if co.co_name != "<module>":
|
||||
print(
|
||||
"\n# %s line %d of %s"
|
||||
% (co.co_name, co.co_firstlineno, co.co_filename),
|
||||
file=real_out,
|
||||
)
|
||||
tokens, customize = disasm(co)
|
||||
for t in tokens:
|
||||
if iscode(t.pattr):
|
||||
@@ -77,6 +77,7 @@ def disco_loop(disasm, queue, real_out):
|
||||
pass
|
||||
pass
|
||||
|
||||
|
||||
# def disassemble_fp(fp, outstream=None):
|
||||
# """
|
||||
# disassemble Python byte-code from an open file
|
||||
@@ -90,6 +91,7 @@ def disco_loop(disasm, queue, real_out):
|
||||
# disco(version, co, outstream, is_pypy=is_pypy)
|
||||
# co = None
|
||||
|
||||
|
||||
def disassemble_file(filename, outstream=None):
|
||||
"""
|
||||
disassemble Python byte-code file (.pyc)
|
||||
@@ -98,8 +100,7 @@ def disassemble_file(filename, outstream=None):
|
||||
try to find the corresponding compiled object.
|
||||
"""
|
||||
filename = check_object_path(filename)
|
||||
(version, timestamp, magic_int, co, is_pypy,
|
||||
source_size) = load_module(filename)
|
||||
(version, timestamp, magic_int, co, is_pypy, source_size) = load_module(filename)
|
||||
if type(co) == list:
|
||||
for con in co:
|
||||
disco(version, con, outstream)
|
||||
@@ -107,6 +108,7 @@ def disassemble_file(filename, outstream=None):
|
||||
disco(version, co, outstream, is_pypy=is_pypy)
|
||||
co = None
|
||||
|
||||
|
||||
def _test():
|
||||
"""Simple test program to disassemble a file."""
|
||||
argc = len(sys.argv)
|
||||
|
@@ -45,10 +45,21 @@ def _get_outstream(outfile):
|
||||
return open(outfile, mode='w', encoding='utf-8')
|
||||
|
||||
def decompile(
|
||||
bytecode_version, co, out=None, showasm=None, showast=False,
|
||||
timestamp=None, showgrammar=False, source_encoding=None, code_objects={},
|
||||
source_size=None, is_pypy=None, magic_int=None,
|
||||
mapstream=None, do_fragments=False):
|
||||
bytecode_version,
|
||||
co,
|
||||
out=None,
|
||||
showasm=None,
|
||||
showast={},
|
||||
timestamp=None,
|
||||
showgrammar=False,
|
||||
source_encoding=None,
|
||||
code_objects={},
|
||||
source_size=None,
|
||||
is_pypy=None,
|
||||
magic_int=None,
|
||||
mapstream=None,
|
||||
do_fragments=False,
|
||||
):
|
||||
"""
|
||||
ingests and deparses a given code block 'co'
|
||||
|
||||
@@ -294,7 +305,7 @@ def main(in_base, out_base, compiled_files, source_files, outfile=None,
|
||||
# failed_files += 1
|
||||
# if current_outfile:
|
||||
# outstream.close()
|
||||
# os.rename(current_outfile, current_outfile + '_failed')
|
||||
# os.rename(current_outfile, current_outfile + "_failed")
|
||||
# else:
|
||||
# sys.stderr.write("\n# %s" % sys.exc_info()[1])
|
||||
# sys.stderr.write("\n# Can't uncompile %s\n" % infile)
|
||||
|
@@ -628,12 +628,30 @@ def get_python_parser(
|
||||
|
||||
if version < 3.0:
|
||||
if version < 2.2:
|
||||
if version == 1.0:
|
||||
import uncompyle6.parsers.parse10 as parse10
|
||||
if compile_mode == 'exec':
|
||||
p = parse10.Python10Parser(debug_parser)
|
||||
else:
|
||||
p = parse10.Python01ParserSingle(debug_parser)
|
||||
elif version == 1.1:
|
||||
import uncompyle6.parsers.parse11 as parse11
|
||||
if compile_mode == 'exec':
|
||||
p = parse11.Python11Parser(debug_parser)
|
||||
else:
|
||||
p = parse11.Python11ParserSingle(debug_parser)
|
||||
if version == 1.2:
|
||||
import uncompyle6.parsers.parse12 as parse12
|
||||
if compile_mode == 'exec':
|
||||
p = parse12.Python12Parser(debug_parser)
|
||||
else:
|
||||
p = parse12.Python12ParserSingle(debug_parser)
|
||||
if version == 1.3:
|
||||
import uncompyle6.parsers.parse13 as parse13
|
||||
if compile_mode == 'exec':
|
||||
p = parse13.Python14Parser(debug_parser)
|
||||
p = parse13.Python13Parser(debug_parser)
|
||||
else:
|
||||
p = parse13.Python14ParserSingle(debug_parser)
|
||||
p = parse13.Python13ParserSingle(debug_parser)
|
||||
elif version == 1.4:
|
||||
import uncompyle6.parsers.parse14 as parse14
|
||||
if compile_mode == 'exec':
|
||||
@@ -646,6 +664,12 @@ def get_python_parser(
|
||||
p = parse15.Python15Parser(debug_parser)
|
||||
else:
|
||||
p = parse15.Python15ParserSingle(debug_parser)
|
||||
elif version == 1.6:
|
||||
import uncompyle6.parsers.parse16 as parse16
|
||||
if compile_mode == 'exec':
|
||||
p = parse16.Python16Parser(debug_parser)
|
||||
else:
|
||||
p = parse16.Python16ParserSingle(debug_parser)
|
||||
elif version == 2.1:
|
||||
import uncompyle6.parsers.parse21 as parse21
|
||||
if compile_mode == 'exec':
|
||||
|
25
uncompyle6/parsers/parse10.py
Normal file
25
uncompyle6/parsers/parse10.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Copyright (c) 2019 Rocky Bernstein
|
||||
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from uncompyle6.parser import PythonParserSingle
|
||||
from uncompyle6.parsers.parse11 import Python11Parser
|
||||
|
||||
|
||||
class Python10Parser(Python11Parser):
|
||||
def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG):
|
||||
super(Python11Parser, self).__init__(debug_parser)
|
||||
self.customized = {}
|
||||
|
||||
|
||||
class Python10ParserSingle(Python10Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Check grammar
|
||||
p = Python10Parser()
|
||||
p.check_grammar()
|
||||
p.dump_grammar()
|
||||
|
||||
# local variables:
|
||||
# tab-width: 4
|
25
uncompyle6/parsers/parse11.py
Normal file
25
uncompyle6/parsers/parse11.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Copyright (c) 2019 Rocky Bernstein
|
||||
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from uncompyle6.parser import PythonParserSingle
|
||||
from uncompyle6.parsers.parse12 import Python12Parser
|
||||
|
||||
|
||||
class Python11Parser(Python12Parser):
|
||||
def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG):
|
||||
super(Python12Parser, self).__init__(debug_parser)
|
||||
self.customized = {}
|
||||
|
||||
|
||||
class Python11ParserSingle(Python11Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Check grammar
|
||||
p = Python12Parser()
|
||||
p.check_grammar()
|
||||
p.dump_grammar()
|
||||
|
||||
# local variables:
|
||||
# tab-width: 4
|
25
uncompyle6/parsers/parse12.py
Normal file
25
uncompyle6/parsers/parse12.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Copyright (c) 2019 Rocky Bernstein
|
||||
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from uncompyle6.parser import PythonParserSingle
|
||||
from uncompyle6.parsers.parse13 import Python13Parser
|
||||
|
||||
|
||||
class Python12Parser(Python13Parser):
|
||||
def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG):
|
||||
super(Python12Parser, self).__init__(debug_parser)
|
||||
self.customized = {}
|
||||
|
||||
|
||||
class Python12ParserSingle(Python12Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Check grammar
|
||||
p = Python12Parser()
|
||||
p.check_grammar()
|
||||
p.dump_grammar()
|
||||
|
||||
# local variables:
|
||||
# tab-width: 4
|
46
uncompyle6/parsers/parse16.py
Normal file
46
uncompyle6/parsers/parse16.py
Normal file
@@ -0,0 +1,46 @@
|
||||
# Copyright (c) 2019 Rocky Bernstein
|
||||
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from uncompyle6.parser import PythonParserSingle, nop_func
|
||||
from uncompyle6.parsers.parse21 import Python21Parser
|
||||
|
||||
class Python16Parser(Python21Parser):
|
||||
|
||||
def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG):
|
||||
super(Python16Parser, self).__init__(debug_parser)
|
||||
self.customized = {}
|
||||
|
||||
def p_import16(self, args):
|
||||
"""
|
||||
import ::= filler IMPORT_NAME STORE_FAST
|
||||
import ::= filler IMPORT_NAME STORE_NAME
|
||||
|
||||
import_from ::= filler IMPORT_NAME importlist
|
||||
import_from ::= filler filler IMPORT_NAME importlist POP_TOP
|
||||
|
||||
importlist ::= importlist IMPORT_FROM
|
||||
importlist ::= IMPORT_FROM
|
||||
"""
|
||||
|
||||
def customize_grammar_rules(self, tokens, customize):
|
||||
super(Python16Parser, self).customize_grammar_rules(tokens, customize)
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.kind
|
||||
opname_base = opname[:opname.rfind('_')]
|
||||
|
||||
if opname_base == 'UNPACK_LIST':
|
||||
self.addRule("store ::= unpack_list", nop_func)
|
||||
|
||||
|
||||
|
||||
class Python16ParserSingle(Python16Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python15Parser()
|
||||
p.check_grammar()
|
||||
p.dump_grammar()
|
||||
|
||||
# local variables:
|
||||
# tab-width: 4
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2015-2018 Rocky Bernstein
|
||||
# Copyright (c) 2015-2019 Rocky Bernstein
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
#
|
||||
# Copyright (c) 1999 John Aycock
|
||||
@@ -31,10 +31,10 @@ from uncompyle6.parser import PythonParser, PythonParserSingle, nop_func
|
||||
from uncompyle6.parsers.treenode import SyntaxTree
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
|
||||
class Python2Parser(PythonParser):
|
||||
|
||||
class Python2Parser(PythonParser):
|
||||
def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG):
|
||||
super(Python2Parser, self).__init__(SyntaxTree, 'stmts', debug=debug_parser)
|
||||
super(Python2Parser, self).__init__(SyntaxTree, "stmts", debug=debug_parser)
|
||||
self.new_rules = set()
|
||||
|
||||
def p_print2(self, args):
|
||||
@@ -52,7 +52,7 @@ class Python2Parser(PythonParser):
|
||||
"""
|
||||
|
||||
def p_print_to(self, args):
|
||||
'''
|
||||
"""
|
||||
stmt ::= print_to
|
||||
stmt ::= print_to_nl
|
||||
stmt ::= print_nl_to
|
||||
@@ -62,10 +62,10 @@ class Python2Parser(PythonParser):
|
||||
print_to_items ::= print_to_items print_to_item
|
||||
print_to_items ::= print_to_item
|
||||
print_to_item ::= DUP_TOP expr ROT_TWO PRINT_ITEM_TO
|
||||
'''
|
||||
"""
|
||||
|
||||
def p_grammar(self, args):
|
||||
'''
|
||||
"""
|
||||
sstmt ::= stmt
|
||||
sstmt ::= return RETURN_LAST
|
||||
|
||||
@@ -176,12 +176,12 @@ class Python2Parser(PythonParser):
|
||||
jmp_abs ::= JUMP_ABSOLUTE
|
||||
jmp_abs ::= JUMP_BACK
|
||||
jmp_abs ::= CONTINUE
|
||||
'''
|
||||
"""
|
||||
|
||||
def p_generator_exp2(self, args):
|
||||
'''
|
||||
"""
|
||||
generator_exp ::= LOAD_GENEXPR MAKE_FUNCTION_0 expr GET_ITER CALL_FUNCTION_1
|
||||
'''
|
||||
"""
|
||||
|
||||
def p_expr2(self, args):
|
||||
"""
|
||||
@@ -252,25 +252,41 @@ class Python2Parser(PythonParser):
|
||||
this.
|
||||
"""
|
||||
|
||||
if 'PyPy' in customize:
|
||||
if "PyPy" in customize:
|
||||
# PyPy-specific customizations
|
||||
self.addRule("""
|
||||
self.addRule(
|
||||
"""
|
||||
stmt ::= assign3_pypy
|
||||
stmt ::= assign2_pypy
|
||||
assign3_pypy ::= expr expr expr store store store
|
||||
assign2_pypy ::= expr expr store store
|
||||
list_comp ::= expr BUILD_LIST_FROM_ARG for_iter store list_iter
|
||||
JUMP_BACK
|
||||
""", nop_func)
|
||||
""",
|
||||
nop_func,
|
||||
)
|
||||
|
||||
# For a rough break out on the first word. This may
|
||||
# include instructions that don't need customization,
|
||||
# but we'll do a finer check after the rough breakout.
|
||||
customize_instruction_basenames = frozenset(
|
||||
('BUILD', 'CALL', 'CONTINUE', 'DELETE',
|
||||
'DUP', 'EXEC', 'GET', 'JUMP',
|
||||
'LOAD', 'LOOKUP', 'MAKE', 'SETUP',
|
||||
'RAISE', 'UNPACK'))
|
||||
(
|
||||
"BUILD",
|
||||
"CALL",
|
||||
"CONTINUE",
|
||||
"DELETE",
|
||||
"DUP",
|
||||
"EXEC",
|
||||
"GET",
|
||||
"JUMP",
|
||||
"LOAD",
|
||||
"LOOKUP",
|
||||
"MAKE",
|
||||
"SETUP",
|
||||
"RAISE",
|
||||
"UNPACK",
|
||||
)
|
||||
)
|
||||
|
||||
# Opcode names in the custom_seen_ops set have rules that get added
|
||||
# unconditionally and the rules are constant. So they need to be done
|
||||
@@ -284,139 +300,191 @@ class Python2Parser(PythonParser):
|
||||
|
||||
# Do a quick breakout before testing potentially
|
||||
# each of the dozen or so instruction in if elif.
|
||||
if (opname[:opname.find('_')] not in customize_instruction_basenames
|
||||
or opname in custom_seen_ops):
|
||||
if (
|
||||
opname[: opname.find("_")] not in customize_instruction_basenames
|
||||
or opname in custom_seen_ops
|
||||
):
|
||||
continue
|
||||
|
||||
opname_base = opname[:opname.rfind('_')]
|
||||
opname_base = opname[: opname.rfind("_")]
|
||||
|
||||
# The order of opname listed is roughly sorted below
|
||||
if opname_base in ('BUILD_LIST', 'BUILD_SET', 'BUILD_TUPLE'):
|
||||
if opname_base in ("BUILD_LIST", "BUILD_SET", "BUILD_TUPLE"):
|
||||
# We do this complicated test to speed up parsing of
|
||||
# pathelogically long literals, especially those over 1024.
|
||||
build_count = token.attr
|
||||
thousands = (build_count//1024)
|
||||
thirty32s = ((build_count//32) % 32)
|
||||
thousands = build_count // 1024
|
||||
thirty32s = (build_count // 32) % 32
|
||||
if thirty32s > 0:
|
||||
rule = "expr32 ::=%s" % (' expr' * 32)
|
||||
rule = "expr32 ::=%s" % (" expr" * 32)
|
||||
self.add_unique_rule(rule, opname_base, build_count, customize)
|
||||
if thousands > 0:
|
||||
self.add_unique_rule("expr1024 ::=%s" % (' expr32' * 32),
|
||||
opname_base, build_count, customize)
|
||||
collection = opname_base[opname_base.find('_')+1:].lower()
|
||||
rule = (('%s ::= ' % collection) + 'expr1024 '*thousands +
|
||||
'expr32 '*thirty32s + 'expr '*(build_count % 32) + opname)
|
||||
self.add_unique_rules([
|
||||
"expr ::= %s" % collection,
|
||||
rule], customize)
|
||||
self.add_unique_rule(
|
||||
"expr1024 ::=%s" % (" expr32" * 32),
|
||||
opname_base,
|
||||
build_count,
|
||||
customize,
|
||||
)
|
||||
collection = opname_base[opname_base.find("_") + 1 :].lower()
|
||||
rule = (
|
||||
("%s ::= " % collection)
|
||||
+ "expr1024 " * thousands
|
||||
+ "expr32 " * thirty32s
|
||||
+ "expr " * (build_count % 32)
|
||||
+ opname
|
||||
)
|
||||
self.add_unique_rules(["expr ::= %s" % collection, rule], customize)
|
||||
continue
|
||||
elif opname_base == 'BUILD_MAP':
|
||||
if opname == 'BUILD_MAP_n':
|
||||
elif opname_base == "BUILD_MAP":
|
||||
if opname == "BUILD_MAP_n":
|
||||
# PyPy sometimes has no count. Sigh.
|
||||
self.add_unique_rules([
|
||||
'kvlist_n ::= kvlist_n kv3',
|
||||
'kvlist_n ::=',
|
||||
'dict ::= BUILD_MAP_n kvlist_n',
|
||||
], customize)
|
||||
self.add_unique_rules(
|
||||
[
|
||||
"kvlist_n ::= kvlist_n kv3",
|
||||
"kvlist_n ::=",
|
||||
"dict ::= BUILD_MAP_n kvlist_n",
|
||||
],
|
||||
customize,
|
||||
)
|
||||
if self.version >= 2.7:
|
||||
self.add_unique_rule(
|
||||
'dict_comp_func ::= BUILD_MAP_n LOAD_FAST FOR_ITER store '
|
||||
'comp_iter JUMP_BACK RETURN_VALUE RETURN_LAST',
|
||||
'dict_comp_func', 0, customize)
|
||||
"dict_comp_func ::= BUILD_MAP_n LOAD_FAST FOR_ITER store "
|
||||
"comp_iter JUMP_BACK RETURN_VALUE RETURN_LAST",
|
||||
"dict_comp_func",
|
||||
0,
|
||||
customize,
|
||||
)
|
||||
|
||||
else:
|
||||
kvlist_n = ' kv3' * token.attr
|
||||
kvlist_n = " kv3" * token.attr
|
||||
rule = "dict ::= %s%s" % (opname, kvlist_n)
|
||||
self.addRule(rule, nop_func)
|
||||
continue
|
||||
elif opname_base == 'BUILD_SLICE':
|
||||
slice_num = token.attr
|
||||
elif opname_base == "BUILD_SLICE":
|
||||
slice_num = token.attr
|
||||
if slice_num == 2:
|
||||
self.add_unique_rules([
|
||||
'expr ::= build_slice2',
|
||||
'build_slice2 ::= expr expr BUILD_SLICE_2'
|
||||
], customize)
|
||||
self.add_unique_rules(
|
||||
[
|
||||
"expr ::= build_slice2",
|
||||
"build_slice2 ::= expr expr BUILD_SLICE_2",
|
||||
],
|
||||
customize,
|
||||
)
|
||||
else:
|
||||
assert slice_num == 3, ("BUILD_SLICE value must be 2 or 3; is %s" %
|
||||
slice_num)
|
||||
self.add_unique_rules([
|
||||
'expr ::= build_slice3',
|
||||
'build_slice3 ::= expr expr expr BUILD_SLICE_3',
|
||||
], customize)
|
||||
assert slice_num == 3, (
|
||||
"BUILD_SLICE value must be 2 or 3; is %s" % slice_num
|
||||
)
|
||||
self.add_unique_rules(
|
||||
[
|
||||
"expr ::= build_slice3",
|
||||
"build_slice3 ::= expr expr expr BUILD_SLICE_3",
|
||||
],
|
||||
customize,
|
||||
)
|
||||
continue
|
||||
elif opname_base in ('CALL_FUNCTION', 'CALL_FUNCTION_VAR',
|
||||
'CALL_FUNCTION_VAR_KW', 'CALL_FUNCTION_KW'):
|
||||
elif opname_base in (
|
||||
"CALL_FUNCTION",
|
||||
"CALL_FUNCTION_VAR",
|
||||
"CALL_FUNCTION_VAR_KW",
|
||||
"CALL_FUNCTION_KW",
|
||||
):
|
||||
|
||||
args_pos, args_kw = self.get_pos_kw(token)
|
||||
|
||||
# number of apply equiv arguments:
|
||||
nak = ( len(opname_base)-len('CALL_FUNCTION') ) // 3
|
||||
rule = 'call ::= expr ' + 'expr '*args_pos + 'kwarg '*args_kw \
|
||||
+ 'expr ' * nak + opname
|
||||
elif opname_base == 'CALL_METHOD':
|
||||
nak = (len(opname_base) - len("CALL_FUNCTION")) // 3
|
||||
rule = (
|
||||
"call ::= expr "
|
||||
+ "expr " * args_pos
|
||||
+ "kwarg " * args_kw
|
||||
+ "expr " * nak
|
||||
+ opname
|
||||
)
|
||||
elif opname_base == "CALL_METHOD":
|
||||
# PyPy only - DRY with parse3
|
||||
|
||||
args_pos, args_kw = self.get_pos_kw(token)
|
||||
|
||||
# number of apply equiv arguments:
|
||||
nak = ( len(opname_base)-len('CALL_METHOD') ) // 3
|
||||
rule = 'call ::= expr ' + 'expr '*args_pos + 'kwarg '*args_kw \
|
||||
+ 'expr ' * nak + opname
|
||||
elif opname == 'CONTINUE_LOOP':
|
||||
self.addRule('continue ::= CONTINUE_LOOP', nop_func)
|
||||
nak = (len(opname_base) - len("CALL_METHOD")) // 3
|
||||
rule = (
|
||||
"call ::= expr "
|
||||
+ "expr " * args_pos
|
||||
+ "kwarg " * args_kw
|
||||
+ "expr " * nak
|
||||
+ opname
|
||||
)
|
||||
elif opname == "CONTINUE_LOOP":
|
||||
self.addRule("continue ::= CONTINUE_LOOP", nop_func)
|
||||
custom_seen_ops.add(opname)
|
||||
continue
|
||||
elif opname == 'DELETE_ATTR':
|
||||
self.addRule('del_stmt ::= expr DELETE_ATTR', nop_func)
|
||||
elif opname == "DELETE_ATTR":
|
||||
self.addRule("del_stmt ::= expr DELETE_ATTR", nop_func)
|
||||
custom_seen_ops.add(opname)
|
||||
continue
|
||||
elif opname.startswith('DELETE_SLICE'):
|
||||
self.addRule("""
|
||||
elif opname.startswith("DELETE_SLICE"):
|
||||
self.addRule(
|
||||
"""
|
||||
del_expr ::= expr
|
||||
del_stmt ::= del_expr DELETE_SLICE+0
|
||||
del_stmt ::= del_expr del_expr DELETE_SLICE+1
|
||||
del_stmt ::= del_expr del_expr DELETE_SLICE+2
|
||||
del_stmt ::= del_expr del_expr del_expr DELETE_SLICE+3
|
||||
""", nop_func)
|
||||
""",
|
||||
nop_func,
|
||||
)
|
||||
custom_seen_ops.add(opname)
|
||||
self.check_reduce['del_expr'] = 'AST'
|
||||
self.check_reduce["del_expr"] = "AST"
|
||||
continue
|
||||
elif opname == 'DELETE_DEREF':
|
||||
self.addRule("""
|
||||
elif opname == "DELETE_DEREF":
|
||||
self.addRule(
|
||||
"""
|
||||
stmt ::= del_deref_stmt
|
||||
del_deref_stmt ::= DELETE_DEREF
|
||||
""", nop_func)
|
||||
""",
|
||||
nop_func,
|
||||
)
|
||||
custom_seen_ops.add(opname)
|
||||
continue
|
||||
elif opname == 'DELETE_SUBSCR':
|
||||
self.addRule("""
|
||||
elif opname == "DELETE_SUBSCR":
|
||||
self.addRule(
|
||||
"""
|
||||
del_stmt ::= delete_subscript
|
||||
delete_subscript ::= expr expr DELETE_SUBSCR
|
||||
""", nop_func)
|
||||
self.check_reduce['delete_subscript'] = 'AST'
|
||||
""",
|
||||
nop_func,
|
||||
)
|
||||
self.check_reduce["delete_subscript"] = "AST"
|
||||
custom_seen_ops.add(opname)
|
||||
continue
|
||||
elif opname == 'GET_ITER':
|
||||
self.addRule("""
|
||||
elif opname == "GET_ITER":
|
||||
self.addRule(
|
||||
"""
|
||||
expr ::= get_iter
|
||||
attribute ::= expr GET_ITER
|
||||
""", nop_func)
|
||||
""",
|
||||
nop_func,
|
||||
)
|
||||
custom_seen_ops.add(opname)
|
||||
continue
|
||||
elif opname_base in ('DUP_TOPX', 'RAISE_VARARGS'):
|
||||
elif opname_base in ("DUP_TOPX", "RAISE_VARARGS"):
|
||||
# FIXME: remove these conditions if they are not needed.
|
||||
# no longer need to add a rule
|
||||
continue
|
||||
elif opname == 'EXEC_STMT':
|
||||
self.addRule("""
|
||||
elif opname == "EXEC_STMT":
|
||||
self.addRule(
|
||||
"""
|
||||
stmt ::= exec_stmt
|
||||
exec_stmt ::= expr exprlist DUP_TOP EXEC_STMT
|
||||
exec_stmt ::= expr exprlist EXEC_STMT
|
||||
exprlist ::= expr+
|
||||
""", nop_func)
|
||||
""",
|
||||
nop_func,
|
||||
)
|
||||
continue
|
||||
elif opname == 'JUMP_IF_NOT_DEBUG':
|
||||
self.addRule("""
|
||||
elif opname == "JUMP_IF_NOT_DEBUG":
|
||||
self.addRule(
|
||||
"""
|
||||
jmp_true_false ::= POP_JUMP_IF_TRUE
|
||||
jmp_true_false ::= POP_JUMP_IF_FALSE
|
||||
stmt ::= assert_pypy
|
||||
@@ -426,107 +494,152 @@ class Python2Parser(PythonParser):
|
||||
assert2_pypy ::= JUMP_IF_NOT_DEBUG assert_expr jmp_true_false
|
||||
LOAD_ASSERT expr CALL_FUNCTION_1
|
||||
RAISE_VARARGS_1 COME_FROM
|
||||
""", nop_func)
|
||||
""",
|
||||
nop_func,
|
||||
)
|
||||
continue
|
||||
elif opname == 'LOAD_ATTR':
|
||||
self.addRule("""
|
||||
elif opname == "LOAD_ATTR":
|
||||
self.addRule(
|
||||
"""
|
||||
expr ::= attribute
|
||||
attribute ::= expr LOAD_ATTR
|
||||
""", nop_func)
|
||||
""",
|
||||
nop_func,
|
||||
)
|
||||
custom_seen_ops.add(opname)
|
||||
continue
|
||||
elif opname == 'LOAD_LISTCOMP':
|
||||
elif opname == "LOAD_LISTCOMP":
|
||||
self.addRule("expr ::= listcomp", nop_func)
|
||||
custom_seen_ops.add(opname)
|
||||
continue
|
||||
elif opname == 'LOAD_SETCOMP':
|
||||
self.add_unique_rules([
|
||||
"expr ::= set_comp",
|
||||
"set_comp ::= LOAD_SETCOMP MAKE_FUNCTION_0 expr GET_ITER CALL_FUNCTION_1"
|
||||
], customize)
|
||||
elif opname == "LOAD_SETCOMP":
|
||||
self.add_unique_rules(
|
||||
[
|
||||
"expr ::= set_comp",
|
||||
"set_comp ::= LOAD_SETCOMP MAKE_FUNCTION_0 expr GET_ITER CALL_FUNCTION_1",
|
||||
],
|
||||
customize,
|
||||
)
|
||||
custom_seen_ops.add(opname)
|
||||
continue
|
||||
elif opname == 'LOOKUP_METHOD':
|
||||
elif opname == "LOOKUP_METHOD":
|
||||
# A PyPy speciality - DRY with parse3
|
||||
self.addRule("""
|
||||
self.addRule(
|
||||
"""
|
||||
expr ::= attribute
|
||||
attribute ::= expr LOOKUP_METHOD
|
||||
""",
|
||||
nop_func)
|
||||
nop_func,
|
||||
)
|
||||
custom_seen_ops.add(opname)
|
||||
continue
|
||||
elif opname_base == 'MAKE_FUNCTION':
|
||||
if i > 0 and tokens[i-1] == 'LOAD_LAMBDA':
|
||||
self.addRule('mklambda ::= %s LOAD_LAMBDA %s' %
|
||||
('pos_arg ' * token.attr, opname), nop_func)
|
||||
rule = 'mkfunc ::= %s LOAD_CODE %s' % ('expr ' * token.attr, opname)
|
||||
elif opname_base == 'MAKE_CLOSURE':
|
||||
elif opname_base == "MAKE_FUNCTION":
|
||||
if i > 0 and tokens[i - 1] == "LOAD_LAMBDA":
|
||||
self.addRule(
|
||||
"mklambda ::= %s LOAD_LAMBDA %s"
|
||||
% ("pos_arg " * token.attr, opname),
|
||||
nop_func,
|
||||
)
|
||||
rule = "mkfunc ::= %s LOAD_CODE %s" % ("expr " * token.attr, opname)
|
||||
elif opname_base == "MAKE_CLOSURE":
|
||||
# FIXME: use add_unique_rules to tidy this up.
|
||||
if i > 0 and tokens[i-1] == 'LOAD_LAMBDA':
|
||||
self.addRule('mklambda ::= %s load_closure LOAD_LAMBDA %s' %
|
||||
('expr ' * token.attr, opname), nop_func)
|
||||
if i > 0 and tokens[i - 1] == "LOAD_LAMBDA":
|
||||
self.addRule(
|
||||
"mklambda ::= %s load_closure LOAD_LAMBDA %s"
|
||||
% ("expr " * token.attr, opname),
|
||||
nop_func,
|
||||
)
|
||||
if i > 0:
|
||||
prev_tok = tokens[i-1]
|
||||
if prev_tok == 'LOAD_GENEXPR':
|
||||
self.add_unique_rules([
|
||||
('generator_exp ::= %s load_closure LOAD_GENEXPR %s expr'
|
||||
' GET_ITER CALL_FUNCTION_1' %
|
||||
('expr ' * token.attr, opname))], customize)
|
||||
prev_tok = tokens[i - 1]
|
||||
if prev_tok == "LOAD_GENEXPR":
|
||||
self.add_unique_rules(
|
||||
[
|
||||
(
|
||||
"generator_exp ::= %s load_closure LOAD_GENEXPR %s expr"
|
||||
" GET_ITER CALL_FUNCTION_1"
|
||||
% ("expr " * token.attr, opname)
|
||||
)
|
||||
],
|
||||
customize,
|
||||
)
|
||||
pass
|
||||
self.add_unique_rules([
|
||||
('mkfunc ::= %s load_closure LOAD_CODE %s' %
|
||||
('expr ' * token.attr, opname))], customize)
|
||||
self.add_unique_rules(
|
||||
[
|
||||
(
|
||||
"mkfunc ::= %s load_closure LOAD_CODE %s"
|
||||
% ("expr " * token.attr, opname)
|
||||
)
|
||||
],
|
||||
customize,
|
||||
)
|
||||
|
||||
if self.version >= 2.7:
|
||||
if i > 0:
|
||||
prev_tok = tokens[i-1]
|
||||
if prev_tok == 'LOAD_DICTCOMP':
|
||||
self.add_unique_rules([
|
||||
('dict_comp ::= %s load_closure LOAD_DICTCOMP %s expr'
|
||||
' GET_ITER CALL_FUNCTION_1' %
|
||||
('expr ' * token.attr, opname))], customize)
|
||||
elif prev_tok == 'LOAD_SETCOMP':
|
||||
self.add_unique_rules([
|
||||
"expr ::= set_comp",
|
||||
('set_comp ::= %s load_closure LOAD_SETCOMP %s expr'
|
||||
' GET_ITER CALL_FUNCTION_1' %
|
||||
('expr ' * token.attr, opname))
|
||||
], customize)
|
||||
prev_tok = tokens[i - 1]
|
||||
if prev_tok == "LOAD_DICTCOMP":
|
||||
self.add_unique_rules(
|
||||
[
|
||||
(
|
||||
"dict_comp ::= %s load_closure LOAD_DICTCOMP %s expr"
|
||||
" GET_ITER CALL_FUNCTION_1"
|
||||
% ("expr " * token.attr, opname)
|
||||
)
|
||||
],
|
||||
customize,
|
||||
)
|
||||
elif prev_tok == "LOAD_SETCOMP":
|
||||
self.add_unique_rules(
|
||||
[
|
||||
"expr ::= set_comp",
|
||||
(
|
||||
"set_comp ::= %s load_closure LOAD_SETCOMP %s expr"
|
||||
" GET_ITER CALL_FUNCTION_1"
|
||||
% ("expr " * token.attr, opname)
|
||||
),
|
||||
],
|
||||
customize,
|
||||
)
|
||||
pass
|
||||
pass
|
||||
continue
|
||||
elif opname == 'SETUP_EXCEPT':
|
||||
if 'PyPy' in customize:
|
||||
self.add_unique_rules([
|
||||
"stmt ::= try_except_pypy",
|
||||
"try_except_pypy ::= SETUP_EXCEPT suite_stmts_opt except_handler_pypy",
|
||||
"except_handler_pypy ::= COME_FROM except_stmts END_FINALLY COME_FROM"
|
||||
], customize)
|
||||
elif opname == "SETUP_EXCEPT":
|
||||
if "PyPy" in customize:
|
||||
self.add_unique_rules(
|
||||
[
|
||||
"stmt ::= try_except_pypy",
|
||||
"try_except_pypy ::= SETUP_EXCEPT suite_stmts_opt except_handler_pypy",
|
||||
"except_handler_pypy ::= COME_FROM except_stmts END_FINALLY COME_FROM",
|
||||
],
|
||||
customize,
|
||||
)
|
||||
custom_seen_ops.add(opname)
|
||||
continue
|
||||
elif opname == 'SETUP_FINALLY':
|
||||
if 'PyPy' in customize:
|
||||
self.addRule("""
|
||||
elif opname == "SETUP_FINALLY":
|
||||
if "PyPy" in customize:
|
||||
self.addRule(
|
||||
"""
|
||||
stmt ::= tryfinallystmt_pypy
|
||||
tryfinallystmt_pypy ::= SETUP_FINALLY suite_stmts_opt COME_FROM_FINALLY
|
||||
suite_stmts_opt END_FINALLY""", nop_func)
|
||||
suite_stmts_opt END_FINALLY""",
|
||||
nop_func,
|
||||
)
|
||||
|
||||
custom_seen_ops.add(opname)
|
||||
continue
|
||||
elif opname_base in ('UNPACK_TUPLE', 'UNPACK_SEQUENCE'):
|
||||
elif opname_base in ("UNPACK_TUPLE", "UNPACK_SEQUENCE"):
|
||||
custom_seen_ops.add(opname)
|
||||
rule = 'unpack ::= ' + opname + ' store' * token.attr
|
||||
elif opname_base == 'UNPACK_LIST':
|
||||
rule = "unpack ::= " + opname + " store" * token.attr
|
||||
elif opname_base == "UNPACK_LIST":
|
||||
custom_seen_ops.add(opname)
|
||||
rule = 'unpack_list ::= ' + opname + ' store' * token.attr
|
||||
rule = "unpack_list ::= " + opname + " store" * token.attr
|
||||
else:
|
||||
continue
|
||||
self.addRule(rule, nop_func)
|
||||
pass
|
||||
|
||||
self.check_reduce['raise_stmt1'] = 'tokens'
|
||||
self.check_reduce['aug_assign2'] = 'AST'
|
||||
self.check_reduce['or'] = 'AST'
|
||||
self.check_reduce["raise_stmt1"] = "tokens"
|
||||
self.check_reduce["aug_assign2"] = "AST"
|
||||
self.check_reduce["or"] = "AST"
|
||||
# self.check_reduce['_stmts'] = 'AST'
|
||||
|
||||
# Dead code testing...
|
||||
@@ -541,24 +654,30 @@ class Python2Parser(PythonParser):
|
||||
# Dead code testing...
|
||||
# if lhs == 'while1elsestmt':
|
||||
# from trepan.api import debug; debug()
|
||||
if lhs in ('aug_assign1', 'aug_assign2') and ast[0] and ast[0][0] in ('and', 'or'):
|
||||
if (
|
||||
lhs in ("aug_assign1", "aug_assign2")
|
||||
and ast[0]
|
||||
and ast[0][0] in ("and", "or")
|
||||
):
|
||||
return True
|
||||
elif lhs in ('raise_stmt1',):
|
||||
elif lhs in ("raise_stmt1",):
|
||||
# We will assume 'LOAD_ASSERT' will be handled by an assert grammar rule
|
||||
return (tokens[first] == 'LOAD_ASSERT' and (last >= len(tokens)))
|
||||
elif rule == ('or', ('expr', 'jmp_true', 'expr', '\\e_come_from_opt')):
|
||||
return tokens[first] == "LOAD_ASSERT" and (last >= len(tokens))
|
||||
elif rule == ("or", ("expr", "jmp_true", "expr", "\\e_come_from_opt")):
|
||||
expr2 = ast[2]
|
||||
return expr2 == 'expr' and expr2[0] == 'LOAD_ASSERT'
|
||||
elif lhs in ('delete_subscript', 'del_expr'):
|
||||
return expr2 == "expr" and expr2[0] == "LOAD_ASSERT"
|
||||
elif lhs in ("delete_subscript", "del_expr"):
|
||||
op = ast[0][0]
|
||||
return op.kind in ('and', 'or')
|
||||
return op.kind in ("and", "or")
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class Python2ParserSingle(Python2Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Check grammar
|
||||
p = Python2Parser()
|
||||
p.check_grammar()
|
||||
|
@@ -146,6 +146,11 @@ class Python26Parser(Python2Parser):
|
||||
whilestmt ::= SETUP_LOOP testexpr l_stmts_opt jb_cf_pop POP_BLOCK
|
||||
whilestmt ::= SETUP_LOOP testexpr returns POP_BLOCK COME_FROM
|
||||
|
||||
# In the "whilestmt" below, there isn't a COME_FROM when the
|
||||
# "while" is the last thing in the module or function.
|
||||
|
||||
whilestmt ::= SETUP_LOOP testexpr returns POP_TOP POP_BLOCK
|
||||
|
||||
whileelsestmt ::= SETUP_LOOP testexpr l_stmts_opt jb_pop POP_BLOCK
|
||||
else_suitel COME_FROM
|
||||
while1elsestmt ::= SETUP_LOOP l_stmts JUMP_BACK else_suitel COME_FROM
|
||||
@@ -186,7 +191,11 @@ class Python26Parser(Python2Parser):
|
||||
jmp_false_then ::= JUMP_IF_FALSE THEN POP_TOP
|
||||
jmp_true_then ::= JUMP_IF_TRUE THEN POP_TOP
|
||||
|
||||
while1stmt ::= SETUP_LOOP returns COME_FROM
|
||||
# In the "while1stmt" below, there sometimes isn't a
|
||||
# "COME_FROM" when the "while1" is the last thing in the
|
||||
# module or function.
|
||||
|
||||
while1stmt ::= SETUP_LOOP returns come_from_opt
|
||||
for_block ::= returns _come_froms
|
||||
"""
|
||||
|
||||
@@ -241,8 +250,11 @@ class Python26Parser(Python2Parser):
|
||||
genexpr_func ::= setup_loop_lf FOR_ITER store comp_iter JUMP_ABSOLUTE come_froms
|
||||
POP_TOP jb_pop jb_pb_come_from
|
||||
|
||||
genexpr_func ::= setup_loop_lf FOR_ITER store comp_iter JUMP_BACK come_froms
|
||||
POP_TOP jb_pb_come_from
|
||||
|
||||
generator_exp ::= LOAD_GENEXPR MAKE_FUNCTION_0 expr GET_ITER CALL_FUNCTION_1 COME_FROM
|
||||
list_if ::= list_if ::= expr jmp_false_then list_iter
|
||||
list_if ::= expr jmp_false_then list_iter
|
||||
'''
|
||||
|
||||
def p_ret26(self, args):
|
||||
@@ -455,7 +467,7 @@ if __name__ == '__main__':
|
||||
p.check_grammar()
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
if PYTHON_VERSION == 2.6:
|
||||
lhs, rhs, tokens, right_recursive = p.check_sets()
|
||||
lhs, rhs, tokens, right_recursive, dup_rhs = p.check_sets()
|
||||
from uncompyle6.scanner import get_scanner
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
opcode_set = set(s.opc.opname).union(set(
|
||||
|
@@ -41,8 +41,6 @@ class Python27Parser(Python2Parser):
|
||||
comp_body ::= set_comp_body
|
||||
comp_for ::= expr for_iter store comp_iter JUMP_BACK
|
||||
|
||||
comp_iter ::= comp_body
|
||||
|
||||
dict_comp_body ::= expr expr MAP_ADD
|
||||
set_comp_body ::= expr SET_ADD
|
||||
|
||||
@@ -294,7 +292,7 @@ if __name__ == '__main__':
|
||||
p.check_grammar()
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
if PYTHON_VERSION == 2.7:
|
||||
lhs, rhs, tokens, right_recursive = p.check_sets()
|
||||
lhs, rhs, tokens, right_recursive, dup_rhs = p.check_sets()
|
||||
from uncompyle6.scanner import get_scanner
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
opcode_set = set(s.opc.opname).union(set(
|
||||
|
@@ -86,10 +86,8 @@ class Python3Parser(PythonParser):
|
||||
dict_comp_func ::= BUILD_MAP_0 LOAD_FAST FOR_ITER store
|
||||
comp_iter JUMP_BACK RETURN_VALUE RETURN_LAST
|
||||
|
||||
comp_iter ::= comp_if
|
||||
comp_iter ::= comp_if_not
|
||||
comp_if_not ::= expr jmp_true comp_iter
|
||||
comp_iter ::= comp_body
|
||||
"""
|
||||
|
||||
def p_grammar(self, args):
|
||||
@@ -432,7 +430,7 @@ class Python3Parser(PythonParser):
|
||||
else:
|
||||
return "%s_0" % (token.kind)
|
||||
|
||||
def custom_build_class_rule(self, opname, i, token, tokens, customize):
|
||||
def custom_build_class_rule(self, opname, i, token, tokens, customize, is_pypy):
|
||||
"""
|
||||
# Should the first rule be somehow folded into the 2nd one?
|
||||
build_class ::= LOAD_BUILD_CLASS mkfunc
|
||||
@@ -483,10 +481,18 @@ class Python3Parser(PythonParser):
|
||||
call_function = call_fn_tok.kind
|
||||
if call_function.startswith("CALL_FUNCTION_KW"):
|
||||
self.addRule("classdef ::= build_class_kw store", nop_func)
|
||||
rule = "build_class_kw ::= LOAD_BUILD_CLASS mkfunc %sLOAD_CONST %s" % (
|
||||
"expr " * (call_fn_tok.attr - 1),
|
||||
call_function,
|
||||
)
|
||||
if is_pypy:
|
||||
args_pos, args_kw = self.get_pos_kw(call_fn_tok)
|
||||
rule = "build_class_kw ::= LOAD_BUILD_CLASS mkfunc %s%s%s" % (
|
||||
"expr " * (args_pos - 1),
|
||||
"kwarg " * (args_kw),
|
||||
call_function,
|
||||
)
|
||||
else:
|
||||
rule = (
|
||||
"build_class_kw ::= LOAD_BUILD_CLASS mkfunc %sLOAD_CONST %s"
|
||||
% ("expr " * (call_fn_tok.attr - 1), call_function)
|
||||
)
|
||||
else:
|
||||
call_function = self.call_fn_name(call_fn_tok)
|
||||
rule = "build_class ::= LOAD_BUILD_CLASS mkfunc %s%s" % (
|
||||
@@ -496,7 +502,7 @@ class Python3Parser(PythonParser):
|
||||
self.addRule(rule, nop_func)
|
||||
return
|
||||
|
||||
def custom_classfunc_rule(self, opname, token, customize, next_token):
|
||||
def custom_classfunc_rule(self, opname, token, customize, next_token, is_pypy):
|
||||
"""
|
||||
call ::= expr {expr}^n CALL_FUNCTION_n
|
||||
call ::= expr {expr}^n CALL_FUNCTION_VAR_n
|
||||
@@ -514,18 +520,28 @@ class Python3Parser(PythonParser):
|
||||
# Yes, this computation based on instruction name is a little bit hoaky.
|
||||
nak = (len(opname) - len("CALL_FUNCTION")) // 3
|
||||
|
||||
token.kind = self.call_fn_name(token)
|
||||
uniq_param = args_kw + args_pos
|
||||
|
||||
# Note: 3.5+ have subclassed this method; so we don't handle
|
||||
# 'CALL_FUNCTION_VAR' or 'CALL_FUNCTION_EX' here.
|
||||
rule = (
|
||||
"call ::= expr "
|
||||
+ ("pos_arg " * args_pos)
|
||||
+ ("kwarg " * args_kw)
|
||||
+ "expr " * nak
|
||||
+ token.kind
|
||||
)
|
||||
if is_pypy and self.version >= 3.6:
|
||||
if token == "CALL_FUNCTION":
|
||||
token.kind = self.call_fn_name(token)
|
||||
rule = (
|
||||
"call ::= expr "
|
||||
+ ("pos_arg " * args_pos)
|
||||
+ ("kwarg " * args_kw)
|
||||
+ token.kind
|
||||
)
|
||||
else:
|
||||
token.kind = self.call_fn_name(token)
|
||||
rule = (
|
||||
"call ::= expr "
|
||||
+ ("pos_arg " * args_pos)
|
||||
+ ("kwarg " * args_kw)
|
||||
+ "expr " * nak
|
||||
+ token.kind
|
||||
)
|
||||
|
||||
self.add_unique_rule(rule, token.kind, uniq_param, customize)
|
||||
|
||||
@@ -543,7 +559,12 @@ class Python3Parser(PythonParser):
|
||||
this has an effect on many rules.
|
||||
"""
|
||||
if self.version >= 3.3:
|
||||
new_rule = rule % (("LOAD_STR ") * 1)
|
||||
if PYTHON3 or not self.is_pypy:
|
||||
load_op = "LOAD_STR "
|
||||
else:
|
||||
load_op = "LOAD_CONST "
|
||||
|
||||
new_rule = rule % ((load_op) * 1)
|
||||
else:
|
||||
new_rule = rule % (("LOAD_STR ") * 0)
|
||||
self.add_unique_rule(new_rule, opname, attr, customize)
|
||||
@@ -571,7 +592,7 @@ class Python3Parser(PythonParser):
|
||||
|
||||
"""
|
||||
|
||||
is_pypy = False
|
||||
self.is_pypy = False
|
||||
|
||||
# For a rough break out on the first word. This may
|
||||
# include instructions that don't need customization,
|
||||
@@ -616,7 +637,7 @@ class Python3Parser(PythonParser):
|
||||
# a specific instruction seen.
|
||||
|
||||
if "PyPy" in customize:
|
||||
is_pypy = True
|
||||
self.is_pypy = True
|
||||
self.addRule(
|
||||
"""
|
||||
stmt ::= assign3_pypy
|
||||
@@ -821,11 +842,13 @@ class Python3Parser(PythonParser):
|
||||
"""
|
||||
self.addRule(rule, nop_func)
|
||||
|
||||
self.custom_classfunc_rule(opname, token, customize, tokens[i + 1])
|
||||
self.custom_classfunc_rule(
|
||||
opname, token, customize, tokens[i + 1], self.is_pypy
|
||||
)
|
||||
# Note: don't add to custom_ops_processed.
|
||||
|
||||
elif opname_base == "CALL_METHOD":
|
||||
# PyPy only - DRY with parse2
|
||||
# PyPy and Python 3.7+ only - DRY with parse2
|
||||
|
||||
args_pos, args_kw = self.get_pos_kw(token)
|
||||
|
||||
@@ -880,21 +903,30 @@ class Python3Parser(PythonParser):
|
||||
self.addRule(
|
||||
"""
|
||||
stmt ::= assert_pypy
|
||||
stmt ::= assert2_pypy", nop_func)
|
||||
stmt ::= assert_not_pypy
|
||||
stmt ::= assert2_pypy
|
||||
stmt ::= assert2_not_pypy
|
||||
assert_pypy ::= JUMP_IF_NOT_DEBUG assert_expr jmp_true
|
||||
LOAD_ASSERT RAISE_VARARGS_1 COME_FROM
|
||||
assert_not_pypy ::= JUMP_IF_NOT_DEBUG assert_expr jmp_false
|
||||
LOAD_ASSERT RAISE_VARARGS_1 COME_FROM
|
||||
assert2_pypy ::= JUMP_IF_NOT_DEBUG assert_expr jmp_true
|
||||
LOAD_ASSERT expr CALL_FUNCTION_1
|
||||
RAISE_VARARGS_1 COME_FROM
|
||||
assert2_pypy ::= JUMP_IF_NOT_DEBUG assert_expr jmp_true
|
||||
LOAD_ASSERT expr CALL_FUNCTION_1
|
||||
RAISE_VARARGS_1 COME_FROM,
|
||||
RAISE_VARARGS_1 COME_FROM
|
||||
assert2_not_pypy ::= JUMP_IF_NOT_DEBUG assert_expr jmp_false
|
||||
LOAD_ASSERT expr CALL_FUNCTION_1
|
||||
RAISE_VARARGS_1 COME_FROM
|
||||
""",
|
||||
nop_func,
|
||||
)
|
||||
custom_ops_processed.add(opname)
|
||||
elif opname == "LOAD_BUILD_CLASS":
|
||||
self.custom_build_class_rule(opname, i, token, tokens, customize)
|
||||
self.custom_build_class_rule(
|
||||
opname, i, token, tokens, customize, self.is_pypy
|
||||
)
|
||||
# Note: don't add to custom_ops_processed.
|
||||
elif opname == "LOAD_CLASSDEREF":
|
||||
# Python 3.4+
|
||||
@@ -967,7 +999,7 @@ class Python3Parser(PythonParser):
|
||||
j = 1
|
||||
else:
|
||||
j = 2
|
||||
if is_pypy or (i >= j and tokens[i - j] == "LOAD_LAMBDA"):
|
||||
if self.is_pypy or (i >= j and tokens[i - j] == "LOAD_LAMBDA"):
|
||||
rule_pat = "mklambda ::= %sload_closure LOAD_LAMBDA %%s%s" % (
|
||||
"pos_arg " * args_pos,
|
||||
opname,
|
||||
@@ -982,7 +1014,7 @@ class Python3Parser(PythonParser):
|
||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||
|
||||
if has_get_iter_call_function1:
|
||||
if is_pypy or (i >= j and tokens[i - j] == "LOAD_LISTCOMP"):
|
||||
if self.is_pypy or (i >= j and tokens[i - j] == "LOAD_LISTCOMP"):
|
||||
# In the tokens we saw:
|
||||
# LOAD_LISTCOMP LOAD_CONST MAKE_FUNCTION (>= 3.3) or
|
||||
# LOAD_LISTCOMP MAKE_FUNCTION (< 3.3) or
|
||||
@@ -996,7 +1028,7 @@ class Python3Parser(PythonParser):
|
||||
self.add_make_function_rule(
|
||||
rule_pat, opname, token.attr, customize
|
||||
)
|
||||
if is_pypy or (i >= j and tokens[i - j] == "LOAD_SETCOMP"):
|
||||
if self.is_pypy or (i >= j and tokens[i - j] == "LOAD_SETCOMP"):
|
||||
rule_pat = (
|
||||
"set_comp ::= %sload_closure LOAD_SETCOMP %%s%s expr "
|
||||
"GET_ITER CALL_FUNCTION_1"
|
||||
@@ -1005,7 +1037,7 @@ class Python3Parser(PythonParser):
|
||||
self.add_make_function_rule(
|
||||
rule_pat, opname, token.attr, customize
|
||||
)
|
||||
if is_pypy or (i >= j and tokens[i - j] == "LOAD_DICTCOMP"):
|
||||
if self.is_pypy or (i >= j and tokens[i - j] == "LOAD_DICTCOMP"):
|
||||
self.add_unique_rule(
|
||||
"dict_comp ::= %sload_closure LOAD_DICTCOMP %s "
|
||||
"expr GET_ITER CALL_FUNCTION_1"
|
||||
@@ -1051,17 +1083,24 @@ class Python3Parser(PythonParser):
|
||||
)
|
||||
|
||||
elif self.version >= 3.4:
|
||||
if PYTHON3 or not self.is_pypy:
|
||||
load_op = "LOAD_STR"
|
||||
else:
|
||||
load_op = "LOAD_CONST"
|
||||
|
||||
if annotate_args > 0:
|
||||
rule = "mkfunc_annotate ::= %s%s%sannotate_tuple load_closure LOAD_CODE LOAD_STR %s" % (
|
||||
rule = "mkfunc_annotate ::= %s%s%sannotate_tuple load_closure %s %s %s" % (
|
||||
"pos_arg " * args_pos,
|
||||
kwargs_str,
|
||||
"annotate_arg " * (annotate_args - 1),
|
||||
load_op,
|
||||
opname,
|
||||
)
|
||||
else:
|
||||
rule = "mkfunc ::= %s%s load_closure LOAD_CODE LOAD_STR %s" % (
|
||||
rule = "mkfunc ::= %s%s load_closure LOAD_CODE %s %s" % (
|
||||
"pos_arg " * args_pos,
|
||||
kwargs_str,
|
||||
load_op,
|
||||
opname,
|
||||
)
|
||||
|
||||
@@ -1119,6 +1158,14 @@ class Python3Parser(PythonParser):
|
||||
opname,
|
||||
)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
if not PYTHON3 and self.is_pypy:
|
||||
rule = "mkfunc ::= %s%s%s%s" % (
|
||||
"expr " * stack_count,
|
||||
"load_closure " * closure,
|
||||
"LOAD_CODE LOAD_CONST ",
|
||||
opname,
|
||||
)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
|
||||
if has_get_iter_call_function1:
|
||||
rule_pat = (
|
||||
@@ -1135,7 +1182,7 @@ class Python3Parser(PythonParser):
|
||||
self.add_make_function_rule(
|
||||
rule_pat, opname, token.attr, customize
|
||||
)
|
||||
if is_pypy or (i >= 2 and tokens[i - 2] == "LOAD_LISTCOMP"):
|
||||
if self.is_pypy or (i >= 2 and tokens[i - 2] == "LOAD_LISTCOMP"):
|
||||
if self.version >= 3.6:
|
||||
# 3.6+ sometimes bundles all of the
|
||||
# 'exprs' in the rule above into a
|
||||
@@ -1156,7 +1203,7 @@ class Python3Parser(PythonParser):
|
||||
rule_pat, opname, token.attr, customize
|
||||
)
|
||||
|
||||
if is_pypy or (i >= 2 and tokens[i - 2] == "LOAD_LAMBDA"):
|
||||
if self.is_pypy or (i >= 2 and tokens[i - 2] == "LOAD_LAMBDA"):
|
||||
rule_pat = "mklambda ::= %s%sLOAD_LAMBDA %%s%s" % (
|
||||
("pos_arg " * args_pos),
|
||||
("kwarg " * args_kw),
|
||||
@@ -1184,7 +1231,7 @@ class Python3Parser(PythonParser):
|
||||
)
|
||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||
|
||||
if is_pypy or (i >= j and tokens[i - j] == "LOAD_LISTCOMP"):
|
||||
if self.is_pypy or (i >= j and tokens[i - j] == "LOAD_LISTCOMP"):
|
||||
# In the tokens we saw:
|
||||
# LOAD_LISTCOMP LOAD_CONST MAKE_FUNCTION (>= 3.3) or
|
||||
# LOAD_LISTCOMP MAKE_FUNCTION (< 3.3) or
|
||||
@@ -1199,7 +1246,7 @@ class Python3Parser(PythonParser):
|
||||
)
|
||||
|
||||
# FIXME: Fold test into add_make_function_rule
|
||||
if is_pypy or (i >= j and tokens[i - j] == "LOAD_LAMBDA"):
|
||||
if self.is_pypy or (i >= j and tokens[i - j] == "LOAD_LAMBDA"):
|
||||
rule_pat = "mklambda ::= %s%sLOAD_LAMBDA %%s%s" % (
|
||||
("pos_arg " * args_pos),
|
||||
("kwarg " * args_kw),
|
||||
@@ -1390,8 +1437,9 @@ class Python3Parser(PythonParser):
|
||||
except_handler COME_FROM else_suitel
|
||||
opt_come_from_except
|
||||
""",
|
||||
nop_func,
|
||||
nop_func
|
||||
)
|
||||
|
||||
custom_ops_processed.add(opname)
|
||||
elif opname_base in ("UNPACK_EX",):
|
||||
before_count, after_count = token.attr
|
||||
@@ -1494,12 +1542,24 @@ class Python3Parser(PythonParser):
|
||||
for i in range(cfl - 1, first, -1):
|
||||
if tokens[i] != "POP_BLOCK":
|
||||
break
|
||||
if tokens[i].kind not in ("JUMP_BACK", "RETURN_VALUE"):
|
||||
if tokens[i].kind not in ("JUMP_BACK", "RETURN_VALUE", "BREAK_LOOP"):
|
||||
if not tokens[i].kind.startswith("COME_FROM"):
|
||||
return True
|
||||
|
||||
# Check that the SETUP_LOOP jumps to the offset after the
|
||||
# COME_FROM_LOOP
|
||||
|
||||
# Python 3.0 has additional:
|
||||
# JUMP_FORWARD here
|
||||
# COME_FROM
|
||||
# POP_TOP
|
||||
# COME_FROM
|
||||
# here:
|
||||
# (target of SETUP_LOOP)
|
||||
# We won't check this.
|
||||
if self.version == 3.0:
|
||||
return False
|
||||
|
||||
if 0 <= last < len(tokens) and tokens[last] in (
|
||||
"COME_FROM_LOOP",
|
||||
"JUMP_BACK",
|
||||
|
@@ -12,13 +12,19 @@ class Python30Parser(Python31Parser):
|
||||
def p_30(self, args):
|
||||
"""
|
||||
|
||||
pt_bp ::= POP_TOP POP_BLOCK
|
||||
|
||||
assert ::= assert_expr jmp_true LOAD_ASSERT RAISE_VARARGS_1 COME_FROM POP_TOP
|
||||
assert2 ::= assert_expr jmp_true LOAD_ASSERT expr CALL_FUNCTION_1 RAISE_VARARGS_1
|
||||
come_froms
|
||||
call_stmt ::= expr _come_froms POP_TOP
|
||||
|
||||
return_if_lambda ::= RETURN_END_IF_LAMBDA COME_FROM POP_TOP
|
||||
compare_chained2 ::= expr COMPARE_OP RETURN_END_IF_LAMBDA
|
||||
|
||||
# FIXME: combine with parse3.2
|
||||
whileTruestmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK
|
||||
COME_FROM_LOOP
|
||||
whileTruestmt ::= SETUP_LOOP l_stmts_opt
|
||||
jb_or_c COME_FROM_LOOP
|
||||
whileTruestmt ::= SETUP_LOOP returns
|
||||
COME_FROM_LOOP
|
||||
|
||||
@@ -43,10 +49,22 @@ class Python30Parser(Python31Parser):
|
||||
|
||||
else_suitel ::= l_stmts COME_FROM_LOOP JUMP_BACK
|
||||
|
||||
ifelsestmtl ::= testexpr c_stmts_opt jb_pop_top else_suitel
|
||||
jump_absolute_else ::= COME_FROM JUMP_ABSOLUTE COME_FROM POP_TOP
|
||||
|
||||
jump_cf_pop ::= _come_froms _jump _come_froms POP_TOP
|
||||
|
||||
ifelsestmt ::= testexpr c_stmts_opt jump_cf_pop else_suite COME_FROM
|
||||
ifelsestmtl ::= testexpr c_stmts_opt jump_cf_pop else_suitel
|
||||
ifelsestmtc ::= testexpr c_stmts_opt jump_absolute_else else_suitec
|
||||
ifelsestmtc ::= testexpr c_stmts_opt jump_cf_pop else_suitec
|
||||
|
||||
iflaststmt ::= testexpr c_stmts_opt JUMP_ABSOLUTE COME_FROM
|
||||
iflaststmtl ::= testexpr c_stmts_opt jb_pop_top
|
||||
iflaststmtl ::= testexpr c_stmts_opt come_froms JUMP_BACK COME_FROM POP_TOP
|
||||
|
||||
iflaststmt ::= testexpr c_stmts_opt JUMP_ABSOLUTE COME_FROM POP_TOP
|
||||
|
||||
|
||||
withasstmt ::= expr setupwithas store suite_stmts_opt
|
||||
POP_BLOCK LOAD_CONST COME_FROM_FINALLY
|
||||
LOAD_FAST DELETE_FAST WITH_CLEANUP END_FINALLY
|
||||
@@ -54,8 +72,8 @@ class Python30Parser(Python31Parser):
|
||||
setup_finally ::= STORE_FAST SETUP_FINALLY LOAD_FAST DELETE_FAST
|
||||
|
||||
# Need to keep LOAD_FAST as index 1
|
||||
set_comp_func_header ::= BUILD_SET_0 DUP_TOP STORE_FAST
|
||||
set_comp_func ::= set_comp_func_header
|
||||
set_comp_header ::= BUILD_SET_0 DUP_TOP STORE_FAST
|
||||
set_comp_func ::= set_comp_header
|
||||
LOAD_FAST FOR_ITER store comp_iter
|
||||
JUMP_BACK POP_TOP JUMP_BACK RETURN_VALUE RETURN_LAST
|
||||
|
||||
@@ -63,8 +81,10 @@ class Python30Parser(Python31Parser):
|
||||
list_comp ::= list_comp_header
|
||||
LOAD_FAST FOR_ITER store comp_iter
|
||||
JUMP_BACK
|
||||
list_comp ::= list_comp_header
|
||||
LOAD_FAST FOR_ITER store comp_iter
|
||||
JUMP_BACK _come_froms POP_TOP JUMP_BACK
|
||||
|
||||
set_comp_header ::= BUILD_SET_0 DUP_TOP STORE_FAST
|
||||
set_comp ::= set_comp_header
|
||||
LOAD_FAST FOR_ITER store comp_iter
|
||||
JUMP_BACK
|
||||
@@ -73,6 +93,24 @@ class Python30Parser(Python31Parser):
|
||||
dict_comp ::= dict_comp_header
|
||||
LOAD_FAST FOR_ITER store dict_comp_iter
|
||||
JUMP_BACK
|
||||
dict_comp ::= dict_comp_header
|
||||
LOAD_FAST FOR_ITER store dict_comp_iter
|
||||
JUMP_BACK _come_froms POP_TOP JUMP_BACK
|
||||
|
||||
stmt ::= try_except30
|
||||
try_except30 ::= SETUP_EXCEPT suite_stmts_opt
|
||||
_come_froms pt_bp
|
||||
except_handler opt_come_from_except
|
||||
|
||||
# From Python 2.6
|
||||
|
||||
|
||||
list_iter ::= list_if JUMP_BACK
|
||||
list_iter ::= list_if JUMP_BACK _come_froms POP_TOP
|
||||
lc_body ::= LOAD_NAME expr LIST_APPEND
|
||||
lc_body ::= LOAD_FAST expr LIST_APPEND
|
||||
list_if ::= expr jmp_false_then list_iter
|
||||
#############
|
||||
|
||||
dict_comp_iter ::= expr expr ROT_TWO expr STORE_SUBSCR
|
||||
|
||||
@@ -88,19 +126,52 @@ class Python30Parser(Python31Parser):
|
||||
except_suite ::= c_stmts POP_EXCEPT jump_except POP_TOP
|
||||
except_suite_finalize ::= SETUP_FINALLY c_stmts_opt except_var_finalize END_FINALLY
|
||||
_jump COME_FROM POP_TOP
|
||||
jump_except ::= JUMP_FORWARD COME_FROM POP_TOP
|
||||
jump_except ::= JUMP_ABSOLUTE COME_FROM POP_TOP
|
||||
|
||||
except_handler ::= jmp_abs COME_FROM_EXCEPT except_stmts END_FINALLY
|
||||
|
||||
_ifstmts_jump ::= c_stmts_opt JUMP_FORWARD COME_FROM POP_TOP
|
||||
_ifstmts_jump ::= c_stmts_opt come_froms POP_TOP JUMP_FORWARD _come_froms
|
||||
|
||||
jump_except ::= _jump COME_FROM POP_TOP
|
||||
|
||||
or ::= expr jmp_false expr jmp_true expr
|
||||
or ::= expr jmp_true expr
|
||||
|
||||
import_from ::= LOAD_CONST LOAD_CONST IMPORT_NAME importlist _come_froms POP_TOP
|
||||
|
||||
################################################################################
|
||||
# In many ways 3.0 is like 2.6. One similarity is there is no JUMP_IF_TRUE and
|
||||
# JUMP_IF_FALSE
|
||||
# The below rules in fact are the same or similar.
|
||||
|
||||
jmp_true ::= JUMP_IF_TRUE POP_TOP
|
||||
jmp_false ::= JUMP_IF_FALSE _come_froms POP_TOP
|
||||
jmp_true ::= JUMP_IF_TRUE POP_TOP
|
||||
jmp_true_then ::= JUMP_IF_TRUE _come_froms POP_TOP
|
||||
jmp_false ::= JUMP_IF_FALSE _come_froms POP_TOP
|
||||
jmp_false_then ::= JUMP_IF_FALSE POP_TOP
|
||||
|
||||
# We don't have hacky THEN detection, so we do it
|
||||
# in the grammar below which is also somewhat hacky.
|
||||
|
||||
stmt ::= ifstmt30
|
||||
stmt ::= ifnotstmt30
|
||||
ifstmt30 ::= testfalse_then _ifstmts_jump30
|
||||
ifnotstmt30 ::= testtrue_then _ifstmts_jump30
|
||||
|
||||
testfalse_then ::= expr jmp_false_then
|
||||
testtrue_then ::= expr jmp_true_then
|
||||
call_stmt ::= expr COME_FROM
|
||||
_ifstmts_jump30 ::= c_stmts POP_TOP
|
||||
|
||||
gen_comp_body ::= expr YIELD_VALUE COME_FROM POP_TOP
|
||||
|
||||
except_handler ::= jmp_abs COME_FROM_EXCEPT except_stmts
|
||||
COME_FROM POP_TOP END_FINALLY
|
||||
|
||||
or ::= expr jmp_true_then expr come_from_opt
|
||||
ret_or ::= expr jmp_true_then expr come_from_opt
|
||||
ret_and ::= expr jump_false expr come_from_opt
|
||||
|
||||
################################################################################
|
||||
for_block ::= l_stmts_opt _come_froms POP_TOP JUMP_BACK
|
||||
|
||||
except_handler ::= JUMP_FORWARD COME_FROM_EXCEPT except_stmts
|
||||
@@ -108,12 +179,17 @@ class Python30Parser(Python31Parser):
|
||||
except_handler ::= jmp_abs COME_FROM_EXCEPT except_stmts
|
||||
POP_TOP END_FINALLY
|
||||
|
||||
return_if_stmt ::= ret_expr RETURN_END_IF COME_FROM POP_TOP
|
||||
and ::= expr jmp_false expr come_from_opt
|
||||
return_if_stmt ::= ret_expr RETURN_END_IF come_froms POP_TOP
|
||||
return_if_stmt ::= ret_expr RETURN_VALUE come_froms POP_TOP
|
||||
|
||||
and ::= expr jmp_false_then expr come_from_opt
|
||||
|
||||
whilestmt ::= SETUP_LOOP testexpr l_stmts_opt come_from_opt
|
||||
JUMP_BACK COME_FROM POP_TOP POP_BLOCK COME_FROM_LOOP
|
||||
JUMP_BACK _come_froms POP_TOP POP_BLOCK COME_FROM_LOOP
|
||||
whilestmt ::= SETUP_LOOP testexpr returns
|
||||
POP_TOP POP_BLOCK COME_FROM_LOOP
|
||||
whilestmt ::= SETUP_LOOP testexpr l_stmts_opt come_from_opt
|
||||
come_froms POP_TOP POP_BLOCK COME_FROM_LOOP
|
||||
|
||||
|
||||
# compare_chained is like x <= y <= z
|
||||
@@ -124,34 +200,162 @@ class Python30Parser(Python31Parser):
|
||||
compare_chained2 ::= expr COMPARE_OP RETURN_END_IF
|
||||
"""
|
||||
|
||||
def customize_grammar_rules(self, tokens, customize):
|
||||
super(Python30Parser, self).customize_grammar_rules(tokens, customize)
|
||||
|
||||
def remove_rules_30(self):
|
||||
self.remove_rules("""
|
||||
|
||||
# The were found using grammar coverage
|
||||
while1stmt ::= SETUP_LOOP l_stmts COME_FROM JUMP_BACK COME_FROM_LOOP
|
||||
whileTruestmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK POP_BLOCK COME_FROM_LOOP
|
||||
whileelsestmt ::= SETUP_LOOP testexpr l_stmts_opt JUMP_BACK POP_BLOCK else_suitel COME_FROM_LOOP
|
||||
whilestmt ::= SETUP_LOOP testexpr l_stmts_opt JUMP_BACK POP_BLOCK COME_FROM_LOOP
|
||||
whilestmt ::= SETUP_LOOP testexpr l_stmts_opt JUMP_BACK POP_BLOCK JUMP_BACK COME_FROM_LOOP
|
||||
whilestmt ::= SETUP_LOOP testexpr returns POP_TOP POP_BLOCK COME_FROM_LOOP
|
||||
withasstmt ::= expr SETUP_WITH store suite_stmts_opt POP_BLOCK LOAD_CONST COME_FROM_WITH WITH_CLEANUP END_FINALLY
|
||||
withstmt ::= expr SETUP_WITH POP_TOP suite_stmts_opt POP_BLOCK LOAD_CONST COME_FROM_WITH WITH_CLEANUP END_FINALLY
|
||||
##########################################################################################
|
||||
|
||||
iflaststmtl ::= testexpr c_stmts_opt JUMP_BACK COME_FROM_LOOP
|
||||
ifelsestmtl ::= testexpr c_stmts_opt JUMP_BACK else_suitel
|
||||
iflaststmt ::= testexpr c_stmts_opt JUMP_ABSOLUTE
|
||||
_ifstmts_jump ::= c_stmts_opt JUMP_FORWARD _come_froms
|
||||
|
||||
jump_forward_else ::= JUMP_FORWARD ELSE
|
||||
jump_absolute_else ::= JUMP_ABSOLUTE ELSE
|
||||
whilestmt ::= SETUP_LOOP testexpr l_stmts_opt COME_FROM JUMP_BACK POP_BLOCK
|
||||
COME_FROM_LOOP
|
||||
whilestmt ::= SETUP_LOOP testexpr returns
|
||||
POP_BLOCK COME_FROM_LOOP
|
||||
|
||||
assert ::= assert_expr jmp_true LOAD_ASSERT RAISE_VARARGS_1
|
||||
|
||||
return_if_lambda ::= RETURN_END_IF_LAMBDA
|
||||
except_suite ::= c_stmts POP_EXCEPT jump_except
|
||||
whileelsestmt ::= SETUP_LOOP testexpr l_stmts JUMP_BACK POP_BLOCK
|
||||
else_suitel COME_FROM_LOOP
|
||||
|
||||
# No JUMP_IF_FALSE_OR_POP
|
||||
################################################################
|
||||
# No JUMP_IF_FALSE_OR_POP, JUMP_IF_TRUE_OR_POP,
|
||||
# POP_JUMP_IF_FALSE, or POP_JUMP_IF_TRUE
|
||||
|
||||
jmp_false ::= POP_JUMP_IF_FALSE
|
||||
jmp_true ::= JUMP_IF_TRUE_OR_POP POP_TOP
|
||||
jmp_true ::= POP_JUMP_IF_TRUE
|
||||
|
||||
compare_chained1 ::= expr DUP_TOP ROT_THREE COMPARE_OP JUMP_IF_FALSE_OR_POP
|
||||
compare_chained1 COME_FROM
|
||||
compare_chained1 ::= expr DUP_TOP ROT_THREE COMPARE_OP JUMP_IF_FALSE_OR_POP
|
||||
compare_chained2 COME_FROM
|
||||
ret_or ::= expr JUMP_IF_TRUE_OR_POP ret_expr_or_cond COME_FROM
|
||||
ret_and ::= expr JUMP_IF_FALSE_OR_POP ret_expr_or_cond COME_FROM
|
||||
ret_cond ::= expr POP_JUMP_IF_FALSE expr RETURN_END_IF
|
||||
COME_FROM ret_expr_or_cond
|
||||
ret_expr_or_cond ::= ret_cond
|
||||
or ::= expr JUMP_IF_TRUE_OR_POP expr COME_FROM
|
||||
and ::= expr JUMP_IF_TRUE_OR_POP expr COME_FROM
|
||||
and ::= expr JUMP_IF_FALSE_OR_POP expr COME_FROM
|
||||
""")
|
||||
|
||||
def customize_grammar_rules(self, tokens, customize):
|
||||
super(Python30Parser, self).customize_grammar_rules(tokens, customize)
|
||||
self.remove_rules_30()
|
||||
|
||||
self.check_reduce["iflaststmtl"] = "AST"
|
||||
self.check_reduce['ifstmt'] = "AST"
|
||||
self.check_reduce["ifelsestmtc"] = "AST"
|
||||
self.check_reduce["ifelsestmt"] = "AST"
|
||||
# self.check_reduce["and"] = "stmt"
|
||||
return
|
||||
|
||||
def reduce_is_invalid(self, rule, ast, tokens, first, last):
|
||||
invalid = super(Python30Parser,
|
||||
self).reduce_is_invalid(rule, ast,
|
||||
tokens, first, last)
|
||||
if invalid:
|
||||
return invalid
|
||||
lhs = rule[0]
|
||||
if (
|
||||
lhs in ("iflaststmtl", "ifstmt",
|
||||
"ifelsestmt", "ifelsestmtc") and ast[0] == "testexpr"
|
||||
):
|
||||
testexpr = ast[0]
|
||||
if testexpr[0] == "testfalse":
|
||||
testfalse = testexpr[0]
|
||||
if lhs == "ifelsestmtc" and ast[2] == "jump_absolute_else":
|
||||
jump_absolute_else = ast[2]
|
||||
come_from = jump_absolute_else[2]
|
||||
return come_from == "COME_FROM" and come_from.attr < tokens[first].offset
|
||||
pass
|
||||
elif lhs in ("ifelsestmt", "ifelsestmtc") and ast[2] == "jump_cf_pop":
|
||||
jump_cf_pop = ast[2]
|
||||
come_froms = jump_cf_pop[0]
|
||||
for come_from in come_froms:
|
||||
if come_from.attr < tokens[first].offset:
|
||||
return True
|
||||
come_froms = jump_cf_pop[2]
|
||||
if come_froms == "COME_FROM":
|
||||
if come_froms.attr < tokens[first].offset:
|
||||
return True
|
||||
pass
|
||||
elif come_froms == "_come_froms":
|
||||
for come_from in come_froms:
|
||||
if come_from.attr < tokens[first].offset:
|
||||
return True
|
||||
|
||||
return False
|
||||
elif testfalse[1] == "jmp_false":
|
||||
jmp_false = testfalse[1]
|
||||
if last == len(tokens):
|
||||
last -= 1
|
||||
while (isinstance(tokens[first].offset, str) and first < last):
|
||||
first += 1
|
||||
if first == last:
|
||||
return True
|
||||
while (first < last and isinstance(tokens[last].offset, str)):
|
||||
last -= 1
|
||||
if rule[0] == "iflaststmtl":
|
||||
return not (jmp_false[0].attr <= tokens[last].offset)
|
||||
else:
|
||||
jmp_false_target = jmp_false[0].attr
|
||||
if tokens[first].offset > jmp_false_target:
|
||||
return True
|
||||
return (
|
||||
(jmp_false_target > tokens[last].offset) and tokens[last] != "JUMP_FORWARD")
|
||||
pass
|
||||
pass
|
||||
pass
|
||||
# elif lhs == "and":
|
||||
# return tokens[last+1] == "JUMP_FORWARD"
|
||||
|
||||
pass
|
||||
|
||||
class Python30ParserSingle(Python30Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python30Parser()
|
||||
p.remove_rules_30()
|
||||
p.check_grammar()
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
if PYTHON_VERSION == 3.0:
|
||||
lhs, rhs, tokens, right_recursive, dup_rhs = p.check_sets()
|
||||
from uncompyle6.scanner import get_scanner
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
opcode_set = set(s.opc.opname).union(set(
|
||||
"""JUMP_BACK CONTINUE RETURN_END_IF COME_FROM
|
||||
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP LOAD_CLASSNAME
|
||||
LAMBDA_MARKER RETURN_LAST
|
||||
""".split()))
|
||||
## FIXME: try this
|
||||
remain_tokens = set(tokens) - opcode_set
|
||||
import re
|
||||
remain_tokens = set([re.sub(r'_\d+$', '', t) for t in remain_tokens])
|
||||
remain_tokens = set([re.sub('_CONT$', '', t) for t in remain_tokens])
|
||||
remain_tokens = set(remain_tokens) - opcode_set
|
||||
print(remain_tokens)
|
||||
import sys
|
||||
if len(sys.argv) > 1:
|
||||
from spark_parser.spark import rule2str
|
||||
for rule in sorted(p.rule2name.items()):
|
||||
print(rule2str(rule[0]))
|
||||
|
@@ -32,11 +32,45 @@ class Python31Parser(Python32Parser):
|
||||
load ::= LOAD_FAST
|
||||
load ::= LOAD_NAME
|
||||
"""
|
||||
def remove_rules_31(self):
|
||||
self.remove_rules("""
|
||||
# DUP_TOP_TWO is DUP_TOPX in 3.1 and earlier
|
||||
subscript2 ::= expr expr DUP_TOP_TWO BINARY_SUBSCR
|
||||
""")
|
||||
|
||||
def customize_grammar_rules(self, tokens, customize):
|
||||
super(Python31Parser, self).customize_grammar_rules(tokens, customize)
|
||||
self.remove_rules_31()
|
||||
return
|
||||
pass
|
||||
|
||||
class Python31ParserSingle(Python31Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python31Parser()
|
||||
p.remove_rules_31()
|
||||
p.check_grammar()
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
if PYTHON_VERSION == 3.1:
|
||||
lhs, rhs, tokens, right_recursive, dup_rhs = p.check_sets()
|
||||
from uncompyle6.scanner import get_scanner
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
opcode_set = set(s.opc.opname).union(set(
|
||||
"""JUMP_BACK CONTINUE RETURN_END_IF COME_FROM
|
||||
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP LOAD_CLASSNAME
|
||||
LAMBDA_MARKER RETURN_LAST
|
||||
""".split()))
|
||||
## FIXME: try this
|
||||
remain_tokens = set(tokens) - opcode_set
|
||||
import re
|
||||
remain_tokens = set([re.sub(r'_\d+$', '', t) for t in remain_tokens])
|
||||
remain_tokens = set([re.sub('_CONT$', '', t) for t in remain_tokens])
|
||||
remain_tokens = set(remain_tokens) - opcode_set
|
||||
print(remain_tokens)
|
||||
import sys
|
||||
if len(sys.argv) > 1:
|
||||
from spark_parser.spark import rule2str
|
||||
for rule in sorted(p.rule2name.items()):
|
||||
print(rule2str(rule[0]))
|
||||
|
@@ -8,9 +8,15 @@ from uncompyle6.parser import PythonParserSingle
|
||||
from uncompyle6.parsers.parse3 import Python3Parser
|
||||
|
||||
class Python32Parser(Python3Parser):
|
||||
def p_30to33(self, args):
|
||||
"""
|
||||
# Store locals is only in Python 3.0 to 3.3
|
||||
stmt ::= store_locals
|
||||
store_locals ::= LOAD_FAST STORE_LOCALS
|
||||
"""
|
||||
|
||||
def p_32to35(self, args):
|
||||
"""
|
||||
expr ::= conditional
|
||||
conditional ::= expr jmp_false expr jump_forward_else expr COME_FROM
|
||||
|
||||
# compare_chained2 is used in a "chained_compare": x <= y <= z
|
||||
@@ -18,10 +24,6 @@ class Python32Parser(Python3Parser):
|
||||
compare_chained2 ::= expr COMPARE_OP RETURN_VALUE
|
||||
compare_chained2 ::= expr COMPARE_OP RETURN_VALUE_LAMBDA
|
||||
|
||||
# Store locals is only in Python 3.0 to 3.3
|
||||
stmt ::= store_locals
|
||||
store_locals ::= LOAD_FAST STORE_LOCALS
|
||||
|
||||
# Python < 3.5 no POP BLOCK
|
||||
whileTruestmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK COME_FROM_LOOP
|
||||
|
||||
|
@@ -14,23 +14,6 @@ class Python33Parser(Python32Parser):
|
||||
# Python 3.3+ adds yield from.
|
||||
expr ::= yield_from
|
||||
yield_from ::= expr expr YIELD_FROM
|
||||
|
||||
# We do the grammar hackery below for semantics
|
||||
# actions that want c_stmts_opt at index 1
|
||||
|
||||
# Python 3.5+ has jump optimization to remove the redundant
|
||||
# jump_excepts. But in 3.3 we need them added
|
||||
|
||||
try_except ::= SETUP_EXCEPT suite_stmts_opt POP_BLOCK
|
||||
except_handler
|
||||
jump_excepts come_from_except_clauses
|
||||
"""
|
||||
|
||||
def p_30to33(self, args):
|
||||
"""
|
||||
# Store locals is only in Python 3.0 to 3.3
|
||||
stmt ::= store_locals
|
||||
store_locals ::= LOAD_FAST STORE_LOCALS
|
||||
"""
|
||||
|
||||
def customize_grammar_rules(self, tokens, customize):
|
||||
|
@@ -72,7 +72,7 @@ if __name__ == '__main__':
|
||||
p.check_grammar()
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
if PYTHON_VERSION == 3.4:
|
||||
lhs, rhs, tokens, right_recursive = p.check_sets()
|
||||
lhs, rhs, tokens, right_recursive, dup_rhs = p.check_sets()
|
||||
from uncompyle6.scanner import get_scanner
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
opcode_set = set(s.opc.opname).union(set(
|
||||
|
@@ -144,9 +144,15 @@ class Python35Parser(Python34Parser):
|
||||
super(Python35Parser, self).customize_grammar_rules(tokens, customize)
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.kind
|
||||
if opname == 'LOAD_ASSERT':
|
||||
if 'PyPy' in customize:
|
||||
rules_str = """
|
||||
stmt ::= JUMP_IF_NOT_DEBUG stmts COME_FROM
|
||||
"""
|
||||
self.add_unique_doc_rules(rules_str, customize)
|
||||
# FIXME: I suspect this is wrong for 3.6 and 3.5, but
|
||||
# I haven't verified what the 3.7ish fix is
|
||||
if opname == 'BUILD_MAP_UNPACK_WITH_CALL':
|
||||
elif opname == 'BUILD_MAP_UNPACK_WITH_CALL':
|
||||
if self.version < 3.7:
|
||||
self.addRule("expr ::= unmapexpr", nop_func)
|
||||
nargs = token.attr % 256
|
||||
@@ -257,7 +263,7 @@ if __name__ == '__main__':
|
||||
p.check_grammar()
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
if PYTHON_VERSION == 3.5:
|
||||
lhs, rhs, tokens, right_recursive = p.check_sets()
|
||||
lhs, rhs, tokens, right_recursive, dup_rhs = p.check_sets()
|
||||
from uncompyle6.scanner import get_scanner
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
opcode_set = set(s.opc.opname).union(set(
|
||||
|
@@ -188,13 +188,7 @@ class Python36Parser(Python35Parser):
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.kind
|
||||
|
||||
if opname == 'LOAD_ASSERT':
|
||||
if 'PyPy' in customize:
|
||||
rules_str = """
|
||||
stmt ::= JUMP_IF_NOT_DEBUG stmts COME_FROM
|
||||
"""
|
||||
self.add_unique_doc_rules(rules_str, customize)
|
||||
elif opname == 'FORMAT_VALUE':
|
||||
if opname == 'FORMAT_VALUE':
|
||||
rules_str = """
|
||||
expr ::= formatted_value1
|
||||
formatted_value1 ::= expr FORMAT_VALUE
|
||||
@@ -316,7 +310,7 @@ class Python36Parser(Python35Parser):
|
||||
pass
|
||||
return
|
||||
|
||||
def custom_classfunc_rule(self, opname, token, customize, next_token):
|
||||
def custom_classfunc_rule(self, opname, token, customize, next_token, is_pypy):
|
||||
|
||||
args_pos, args_kw = self.get_pos_kw(token)
|
||||
|
||||
@@ -338,10 +332,14 @@ class Python36Parser(Python35Parser):
|
||||
self.add_unique_rule('expr ::= async_call', token.kind, uniq_param, customize)
|
||||
|
||||
if opname.startswith('CALL_FUNCTION_KW'):
|
||||
self.addRule("expr ::= call_kw36", nop_func)
|
||||
values = 'expr ' * token.attr
|
||||
rule = "call_kw36 ::= expr {values} LOAD_CONST {opname}".format(**locals())
|
||||
self.add_unique_rule(rule, token.kind, token.attr, customize)
|
||||
if is_pypy:
|
||||
# PYPY doesn't follow CPython 3.6 CALL_FUNCTION_KW conventions
|
||||
super(Python36Parser, self).custom_classfunc_rule(opname, token, customize, next_token, is_pypy)
|
||||
else:
|
||||
self.addRule("expr ::= call_kw36", nop_func)
|
||||
values = 'expr ' * token.attr
|
||||
rule = "call_kw36 ::= expr {values} LOAD_CONST {opname}".format(**locals())
|
||||
self.add_unique_rule(rule, token.kind, token.attr, customize)
|
||||
elif opname == 'CALL_FUNCTION_EX_KW':
|
||||
# Note: this doesn't exist in 3.7 and later
|
||||
self.addRule("""expr ::= call_ex_kw4
|
||||
@@ -406,7 +404,7 @@ class Python36Parser(Python35Parser):
|
||||
""", nop_func)
|
||||
pass
|
||||
else:
|
||||
super(Python36Parser, self).custom_classfunc_rule(opname, token, customize, next_token)
|
||||
super(Python36Parser, self).custom_classfunc_rule(opname, token, customize, next_token, is_pypy)
|
||||
|
||||
def reduce_is_invalid(self, rule, ast, tokens, first, last):
|
||||
invalid = super(Python36Parser,
|
||||
@@ -443,7 +441,7 @@ if __name__ == '__main__':
|
||||
p.check_grammar()
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
if PYTHON_VERSION == 3.6:
|
||||
lhs, rhs, tokens, right_recursive = p.check_sets()
|
||||
lhs, rhs, tokens, right_recursive, dup_rhs = p.check_sets()
|
||||
from uncompyle6.scanner import get_scanner
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
opcode_set = set(s.opc.opname).union(set(
|
||||
|
@@ -141,31 +141,6 @@ class Python37Parser(Python36Parser):
|
||||
"""
|
||||
|
||||
def customize_grammar_rules(self, tokens, customize):
|
||||
self.remove_rules("""
|
||||
async_forelse_stmt ::= SETUP_LOOP expr
|
||||
GET_AITER
|
||||
LOAD_CONST YIELD_FROM SETUP_EXCEPT GET_ANEXT LOAD_CONST
|
||||
YIELD_FROM
|
||||
store
|
||||
POP_BLOCK JUMP_FORWARD COME_FROM_EXCEPT DUP_TOP
|
||||
LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_FALSE
|
||||
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_BLOCK
|
||||
JUMP_ABSOLUTE END_FINALLY COME_FROM
|
||||
for_block POP_BLOCK
|
||||
else_suite COME_FROM_LOOP
|
||||
stmt ::= async_for_stmt36
|
||||
async_for_stmt36 ::= SETUP_LOOP expr
|
||||
GET_AITER
|
||||
LOAD_CONST YIELD_FROM SETUP_EXCEPT GET_ANEXT LOAD_CONST
|
||||
YIELD_FROM
|
||||
store
|
||||
POP_BLOCK JUMP_BACK COME_FROM_EXCEPT DUP_TOP
|
||||
LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_TRUE
|
||||
END_FINALLY continues COME_FROM
|
||||
POP_TOP POP_TOP POP_TOP POP_EXCEPT
|
||||
POP_TOP POP_BLOCK
|
||||
COME_FROM_LOOP
|
||||
""")
|
||||
super(Python37Parser, self).customize_grammar_rules(tokens, customize)
|
||||
|
||||
class Python37ParserSingle(Python37Parser, PythonParserSingle):
|
||||
@@ -173,22 +148,33 @@ class Python37ParserSingle(Python37Parser, PythonParserSingle):
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
# FIXME: DRY this with other parseXX.py routines
|
||||
p = Python37Parser()
|
||||
p.check_grammar()
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
|
||||
if PYTHON_VERSION == 3.7:
|
||||
lhs, rhs, tokens, right_recursive = p.check_sets()
|
||||
lhs, rhs, tokens, right_recursive, dup_rhs = p.check_sets()
|
||||
from uncompyle6.scanner import get_scanner
|
||||
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
opcode_set = set(s.opc.opname).union(set(
|
||||
"""JUMP_BACK CONTINUE RETURN_END_IF COME_FROM
|
||||
opcode_set = set(s.opc.opname).union(
|
||||
set(
|
||||
"""JUMP_BACK CONTINUE RETURN_END_IF COME_FROM
|
||||
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP LOAD_CLASSNAME
|
||||
LAMBDA_MARKER RETURN_LAST
|
||||
""".split()))
|
||||
""".split()
|
||||
)
|
||||
)
|
||||
remain_tokens = set(tokens) - opcode_set
|
||||
import re
|
||||
remain_tokens = set([re.sub(r'_\d+$', '', t) for t in remain_tokens])
|
||||
remain_tokens = set([re.sub('_CONT$', '', t) for t in remain_tokens])
|
||||
|
||||
remain_tokens = set([re.sub(r"_\d+$", "", t) for t in remain_tokens])
|
||||
remain_tokens = set([re.sub("_CONT$", "", t) for t in remain_tokens])
|
||||
remain_tokens = set(remain_tokens) - opcode_set
|
||||
print(remain_tokens)
|
||||
# print(sorted(p.rule2name.items()))
|
||||
import sys
|
||||
if len(sys.argv) > 1:
|
||||
from spark_parser.spark import rule2str
|
||||
for rule in sorted(p.rule2name.items()):
|
||||
print(rule2str(rule[0]))
|
||||
|
@@ -92,6 +92,7 @@ class Python38Parser(Python37Parser):
|
||||
for38 ::= expr get_iter store for_block JUMP_BACK
|
||||
for38 ::= expr for_iter store for_block JUMP_BACK
|
||||
for38 ::= expr for_iter store for_block JUMP_BACK POP_BLOCK
|
||||
for38 ::= expr for_iter store for_block
|
||||
|
||||
forelsestmt38 ::= expr for_iter store for_block POP_BLOCK else_suite
|
||||
forelselaststmt38 ::= expr for_iter store for_block POP_BLOCK else_suitec
|
||||
@@ -158,7 +159,7 @@ class Python38Parser(Python37Parser):
|
||||
super(Python38Parser, self).__init__(debug_parser)
|
||||
self.customized = {}
|
||||
|
||||
def customize_grammar_rules(self, tokens, customize):
|
||||
def remove_rules_38(self):
|
||||
self.remove_rules("""
|
||||
stmt ::= async_for_stmt37
|
||||
stmt ::= for
|
||||
@@ -225,7 +226,10 @@ class Python38Parser(Python37Parser):
|
||||
|
||||
|
||||
""")
|
||||
|
||||
def customize_grammar_rules(self, tokens, customize):
|
||||
super(Python37Parser, self).customize_grammar_rules(tokens, customize)
|
||||
self.remove_rules_38()
|
||||
self.check_reduce['ifstmt'] = 'tokens'
|
||||
self.check_reduce['whileTruestmt38'] = 'tokens'
|
||||
|
||||
@@ -233,6 +237,7 @@ class Python38Parser(Python37Parser):
|
||||
invalid = super(Python38Parser,
|
||||
self).reduce_is_invalid(rule, ast,
|
||||
tokens, first, last)
|
||||
self.remove_rules_38()
|
||||
if invalid:
|
||||
return invalid
|
||||
if rule[0] == 'ifstmt':
|
||||
@@ -263,24 +268,34 @@ class Python38Parser(Python37Parser):
|
||||
class Python38ParserSingle(Python38Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
# Check grammar
|
||||
# FIXME: DRY this with other parseXX.py routines
|
||||
p = Python38Parser()
|
||||
p.remove_rules_38()
|
||||
p.check_grammar()
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
|
||||
if PYTHON_VERSION == 3.8:
|
||||
lhs, rhs, tokens, right_recursive = p.check_sets()
|
||||
lhs, rhs, tokens, right_recursive, dup_rhs = p.check_sets()
|
||||
from uncompyle6.scanner import get_scanner
|
||||
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
opcode_set = set(s.opc.opname).union(set(
|
||||
"""JUMP_BACK CONTINUE RETURN_END_IF COME_FROM
|
||||
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP LOAD_CLASSNAME
|
||||
LAMBDA_MARKER RETURN_LAST
|
||||
opcode_set = set(s.opc.opname).union(
|
||||
set(
|
||||
"""JUMP_BACK CONTINUE RETURN_END_IF COME_FROM
|
||||
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP LOAD_CLASSNAME
|
||||
LAMBDA_MARKER RETURN_LAST
|
||||
""".split()))
|
||||
remain_tokens = set(tokens) - opcode_set
|
||||
import re
|
||||
remain_tokens = set([re.sub(r'_\d+$', '', t) for t in remain_tokens])
|
||||
remain_tokens = set([re.sub('_CONT$', '', t) for t in remain_tokens])
|
||||
|
||||
remain_tokens = set([re.sub(r"_\d+$", "", t) for t in remain_tokens])
|
||||
remain_tokens = set([re.sub("_CONT$", "", t) for t in remain_tokens])
|
||||
remain_tokens = set(remain_tokens) - opcode_set
|
||||
print(remain_tokens)
|
||||
# print(sorted(p.rule2name.items()))
|
||||
import sys
|
||||
if len(sys.argv) > 1:
|
||||
from spark_parser.spark import rule2str
|
||||
for rule in sorted(p.rule2name.items()):
|
||||
print(rule2str(rule[0]))
|
||||
|
@@ -7,6 +7,10 @@ if PYTHON3:
|
||||
intern = sys.intern
|
||||
|
||||
class SyntaxTree(spark_AST):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(SyntaxTree, self).__init__(*args, **kwargs)
|
||||
self.transformed_by = None
|
||||
|
||||
def isNone(self):
|
||||
"""An SyntaxTree None token. We can't use regular list comparisons
|
||||
because SyntaxTree token offsets might be different"""
|
||||
@@ -23,6 +27,11 @@ class SyntaxTree(spark_AST):
|
||||
if len(self) > 1:
|
||||
rv += " (%d)" % (len(self))
|
||||
enumerate_children = True
|
||||
if self.transformed_by is not None:
|
||||
if self.transformed_by is True:
|
||||
rv += " (transformed)"
|
||||
else:
|
||||
rv += " (transformed by %s)" % self.transformed_by
|
||||
rv = indent + rv
|
||||
indent += ' '
|
||||
i = 0
|
||||
@@ -45,3 +54,11 @@ class SyntaxTree(spark_AST):
|
||||
rv += "\n" + child
|
||||
i += 1
|
||||
return rv
|
||||
|
||||
def first_child(self):
|
||||
if len(self) > 0:
|
||||
child = self[0]
|
||||
if not isinstance(child, SyntaxTree):
|
||||
return child
|
||||
return self[0].first_child()
|
||||
return self
|
||||
|
@@ -30,22 +30,44 @@ import sys
|
||||
from uncompyle6 import PYTHON3, IS_PYPY
|
||||
from uncompyle6.scanners.tok import Token
|
||||
import xdis
|
||||
from xdis.bytecode import (
|
||||
Bytecode, instruction_size, extended_arg_val, next_offset)
|
||||
from xdis.bytecode import Bytecode, instruction_size, extended_arg_val, next_offset
|
||||
from xdis.magics import canonic_python_version
|
||||
from xdis.util import code2num
|
||||
|
||||
# The byte code versions we support.
|
||||
# Note: these all have to be floats
|
||||
PYTHON_VERSIONS = frozenset((1.3, 1.4, 1.5,
|
||||
2.1, 2.2, 2.3, 2.4, 2.5, 2.6, 2.7,
|
||||
3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6, 3.7, 3.8))
|
||||
PYTHON_VERSIONS = frozenset(
|
||||
(
|
||||
1.0,
|
||||
1.1,
|
||||
1.3,
|
||||
1.4,
|
||||
1.5,
|
||||
1.6,
|
||||
2.1,
|
||||
2.2,
|
||||
2.3,
|
||||
2.4,
|
||||
2.5,
|
||||
2.6,
|
||||
2.7,
|
||||
3.0,
|
||||
3.1,
|
||||
3.2,
|
||||
3.3,
|
||||
3.4,
|
||||
3.5,
|
||||
3.6,
|
||||
3.7,
|
||||
3.8,
|
||||
)
|
||||
)
|
||||
|
||||
CANONIC2VERSION = dict((canonic_python_version[str(v)], v) for v in PYTHON_VERSIONS)
|
||||
|
||||
# Magic changed mid version for Python 3.5.2. Compatibility was added for
|
||||
# the older 3.5 interpreter magic.
|
||||
CANONIC2VERSION['3.5.2'] = 3.5
|
||||
CANONIC2VERSION["3.5.2"] = 3.5
|
||||
|
||||
|
||||
# FIXME: DRY
|
||||
@@ -55,24 +77,28 @@ if PYTHON3:
|
||||
|
||||
def long(l):
|
||||
return l
|
||||
|
||||
|
||||
else:
|
||||
L65536 = long(65536) # NOQA
|
||||
L65536 = long(65536) # NOQA
|
||||
|
||||
|
||||
class Code(object):
|
||||
'''
|
||||
"""
|
||||
Class for representing code-objects.
|
||||
|
||||
This is similar to the original code object, but additionally
|
||||
the diassembled code is stored in the attribute '_tokens'.
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, co, scanner, classname=None):
|
||||
for i in dir(co):
|
||||
if i.startswith('co_'):
|
||||
if i.startswith("co_"):
|
||||
setattr(self, i, getattr(co, i))
|
||||
self._tokens, self._customize = scanner.ingest(co, classname)
|
||||
|
||||
class Scanner(object):
|
||||
|
||||
class Scanner(object):
|
||||
def __init__(self, version, show_asm=None, is_pypy=False):
|
||||
self.version = version
|
||||
self.show_asm = show_asm
|
||||
@@ -100,7 +126,7 @@ class Scanner(object):
|
||||
"""
|
||||
# FIXME: remove this when all subsidiary functions have been removed.
|
||||
# We should be able to get everything from the self.insts list.
|
||||
self.code = array('B', co.co_code)
|
||||
self.code = array("B", co.co_code)
|
||||
|
||||
bytecode = Bytecode(co, self.opc)
|
||||
self.build_prev_op()
|
||||
@@ -128,7 +154,7 @@ class Scanner(object):
|
||||
# 'List-map' which shows line number of current op and offset of
|
||||
# first op on following line, given offset of op as index
|
||||
lines = []
|
||||
LineTuple = namedtuple('LineTuple', ['l_no', 'next'])
|
||||
LineTuple = namedtuple("LineTuple", ["l_no", "next"])
|
||||
|
||||
# Iterate through available linestarts, and fill
|
||||
# the data for all code offsets encountered until
|
||||
@@ -171,14 +197,14 @@ class Scanner(object):
|
||||
goes forward.
|
||||
"""
|
||||
opname = self.get_inst(offset).opname
|
||||
if opname == 'JUMP_FORWARD':
|
||||
if opname == "JUMP_FORWARD":
|
||||
return True
|
||||
if opname != 'JUMP_ABSOLUTE':
|
||||
if opname != "JUMP_ABSOLUTE":
|
||||
return False
|
||||
return offset < self.get_target(offset)
|
||||
|
||||
def prev_offset(self, offset):
|
||||
return self.insts[self.offset2inst_index[offset]-1].offset
|
||||
return self.insts[self.offset2inst_index[offset] - 1].offset
|
||||
|
||||
def get_inst(self, offset):
|
||||
# Instructions can get moved as a result of EXTENDED_ARGS removal.
|
||||
@@ -205,7 +231,7 @@ class Scanner(object):
|
||||
return target
|
||||
|
||||
def get_argument(self, pos):
|
||||
arg = self.code[pos+1] + self.code[pos+2] * 256
|
||||
arg = self.code[pos + 1] + self.code[pos + 2] * 256
|
||||
return arg
|
||||
|
||||
def next_offset(self, op, offset):
|
||||
@@ -216,9 +242,9 @@ class Scanner(object):
|
||||
op = self.code[i]
|
||||
if op in self.JUMP_OPS:
|
||||
dest = self.get_target(i, op)
|
||||
print('%i\t%s\t%i' % (i, self.opname[op], dest))
|
||||
print("%i\t%s\t%i" % (i, self.opname[op], dest))
|
||||
else:
|
||||
print('%i\t%s\t' % (i, self.opname[op]))
|
||||
print("%i\t%s\t" % (i, self.opname[op]))
|
||||
|
||||
def first_instr(self, start, end, instr, target=None, exact=True):
|
||||
"""
|
||||
@@ -232,11 +258,9 @@ class Scanner(object):
|
||||
Return index to it or None if not found.
|
||||
"""
|
||||
code = self.code
|
||||
assert(start >= 0 and end <= len(code))
|
||||
assert start >= 0 and end <= len(code)
|
||||
|
||||
try:
|
||||
None in instr
|
||||
except:
|
||||
if not isinstance(instr, list):
|
||||
instr = [instr]
|
||||
|
||||
result_offset = None
|
||||
@@ -274,9 +298,7 @@ class Scanner(object):
|
||||
if not (start >= 0 and end <= len(code)):
|
||||
return None
|
||||
|
||||
try:
|
||||
None in instr
|
||||
except:
|
||||
if not isinstance(instr, list):
|
||||
instr = [instr]
|
||||
|
||||
result_offset = None
|
||||
@@ -287,7 +309,7 @@ class Scanner(object):
|
||||
op = code[offset]
|
||||
|
||||
if op == self.opc.EXTENDED_ARG:
|
||||
arg = code2num(code, offset+1) | extended_arg
|
||||
arg = code2num(code, offset + 1) | extended_arg
|
||||
extended_arg = extended_arg_val(self.opc, arg)
|
||||
continue
|
||||
|
||||
@@ -365,7 +387,7 @@ class Scanner(object):
|
||||
"""
|
||||
|
||||
code = self.code
|
||||
assert(start >= 0 and end <= len(code))
|
||||
assert start >= 0 and end <= len(code)
|
||||
|
||||
try:
|
||||
None in instr
|
||||
@@ -379,7 +401,7 @@ class Scanner(object):
|
||||
op = code[offset]
|
||||
|
||||
if op == self.opc.EXTENDED_ARG:
|
||||
arg = code2num(code, offset+1) | extended_arg
|
||||
arg = code2num(code, offset + 1) | extended_arg
|
||||
extended_arg = extended_arg_val(self.opc, arg)
|
||||
continue
|
||||
|
||||
@@ -423,8 +445,11 @@ class Scanner(object):
|
||||
last_was_extarg = False
|
||||
n = len(instructions)
|
||||
for i, inst in enumerate(instructions):
|
||||
if (inst.opname == 'EXTENDED_ARG'
|
||||
and i+1 < n and instructions[i+1].opname != 'MAKE_FUNCTION'):
|
||||
if (
|
||||
inst.opname == "EXTENDED_ARG"
|
||||
and i + 1 < n
|
||||
and instructions[i + 1].opname != "MAKE_FUNCTION"
|
||||
):
|
||||
last_was_extarg = True
|
||||
starts_line = inst.starts_line
|
||||
is_jump_target = inst.is_jump_target
|
||||
@@ -435,13 +460,15 @@ class Scanner(object):
|
||||
# j = self.stmts.index(inst.offset)
|
||||
# self.lines[j] = offset
|
||||
|
||||
new_inst = inst._replace(starts_line=starts_line,
|
||||
is_jump_target=is_jump_target,
|
||||
offset=offset)
|
||||
new_inst = inst._replace(
|
||||
starts_line=starts_line,
|
||||
is_jump_target=is_jump_target,
|
||||
offset=offset,
|
||||
)
|
||||
inst = new_inst
|
||||
if i < n:
|
||||
new_prev = self.prev_op[instructions[i].offset]
|
||||
j = instructions[i+1].offset
|
||||
j = instructions[i + 1].offset
|
||||
old_prev = self.prev_op[j]
|
||||
while self.prev_op[j] == old_prev and j < n:
|
||||
self.prev_op[j] = new_prev
|
||||
@@ -463,9 +490,12 @@ class Scanner(object):
|
||||
for i in ifs:
|
||||
# For each offset, if line number of current and next op
|
||||
# is the same
|
||||
if self.lines[i].l_no == self.lines[i+3].l_no:
|
||||
if self.lines[i].l_no == self.lines[i + 3].l_no:
|
||||
# Skip last op on line if it is some sort of POP_JUMP.
|
||||
if self.code[self.prev[self.lines[i].next]] in (self.opc.PJIT, self.opc.PJIF):
|
||||
if self.code[self.prev[self.lines[i].next]] in (
|
||||
self.opc.PJIT,
|
||||
self.opc.PJIF,
|
||||
):
|
||||
continue
|
||||
filtered.append(i)
|
||||
return filtered
|
||||
@@ -475,8 +505,8 @@ class Scanner(object):
|
||||
|
||||
def restrict_to_parent(self, target, parent):
|
||||
"""Restrict target to parent structure boundaries."""
|
||||
if not (parent['start'] < target < parent['end']):
|
||||
target = parent['end']
|
||||
if not (parent["start"] < target < parent["end"]):
|
||||
target = parent["end"]
|
||||
return target
|
||||
|
||||
def setTokenClass(self, tokenClass):
|
||||
@@ -484,6 +514,7 @@ class Scanner(object):
|
||||
self.Token = tokenClass
|
||||
return self.Token
|
||||
|
||||
|
||||
def parse_fn_counts(argc):
|
||||
return ((argc & 0xFF), (argc >> 8) & 0xFF, (argc >> 16) & 0x7FFF)
|
||||
|
||||
@@ -496,8 +527,10 @@ def get_scanner(version, is_pypy=False, show_asm=None):
|
||||
raise RuntimeError("Unknown Python version in xdis %s" % version)
|
||||
canonic_version = canonic_python_version[version]
|
||||
if canonic_version not in CANONIC2VERSION:
|
||||
raise RuntimeError("Unsupported Python version %s (canonic %s)"
|
||||
% (version, canonic_version))
|
||||
raise RuntimeError(
|
||||
"Unsupported Python version %s (canonic %s)"
|
||||
% (version, canonic_version)
|
||||
)
|
||||
version = CANONIC2VERSION[canonic_version]
|
||||
|
||||
# Pick up appropriate scanner
|
||||
@@ -505,24 +538,34 @@ def get_scanner(version, is_pypy=False, show_asm=None):
|
||||
v_str = "%s" % (int(version * 10))
|
||||
try:
|
||||
import importlib
|
||||
|
||||
if is_pypy:
|
||||
scan = importlib.import_module("uncompyle6.scanners.pypy%s" % v_str)
|
||||
else:
|
||||
scan = importlib.import_module("uncompyle6.scanners.scanner%s" % v_str)
|
||||
if False: print(scan) # Avoid unused scan
|
||||
if False:
|
||||
print(scan) # Avoid unused scan
|
||||
except ImportError:
|
||||
if is_pypy:
|
||||
exec("import uncompyle6.scanners.pypy%s as scan" % v_str,
|
||||
locals(), globals())
|
||||
exec(
|
||||
"import uncompyle6.scanners.pypy%s as scan" % v_str,
|
||||
locals(),
|
||||
globals(),
|
||||
)
|
||||
else:
|
||||
exec("import uncompyle6.scanners.scanner%s as scan" % v_str,
|
||||
locals(), globals())
|
||||
exec(
|
||||
"import uncompyle6.scanners.scanner%s as scan" % v_str,
|
||||
locals(),
|
||||
globals(),
|
||||
)
|
||||
if is_pypy:
|
||||
scanner = eval("scan.ScannerPyPy%s(show_asm=show_asm)" % v_str,
|
||||
locals(), globals())
|
||||
scanner = eval(
|
||||
"scan.ScannerPyPy%s(show_asm=show_asm)" % v_str, locals(), globals()
|
||||
)
|
||||
else:
|
||||
scanner = eval("scan.Scanner%s(show_asm=show_asm)" % v_str,
|
||||
locals(), globals())
|
||||
scanner = eval(
|
||||
"scan.Scanner%s(show_asm=show_asm)" % v_str, locals(), globals()
|
||||
)
|
||||
else:
|
||||
raise RuntimeError("Unsupported Python version %s" % version)
|
||||
return scanner
|
||||
@@ -530,8 +573,9 @@ def get_scanner(version, is_pypy=False, show_asm=None):
|
||||
|
||||
if __name__ == "__main__":
|
||||
import inspect, uncompyle6
|
||||
|
||||
co = inspect.currentframe().f_code
|
||||
# scanner = get_scanner('2.7.13', True)
|
||||
# scanner = get_scanner(sys.version[:5], False)
|
||||
scanner = get_scanner(uncompyle6.PYTHON_VERSION, IS_PYPY, True)
|
||||
tokens, customize = scanner.ingest(co, {}, show_asm='after')
|
||||
tokens, customize = scanner.ingest(co, {}, show_asm="after")
|
||||
|
@@ -9,8 +9,8 @@ make things easier for decompilation.
|
||||
import uncompyle6.scanners.scanner32 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_32 as opc # is this right?
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
from xdis.opcodes import opcode_32pypy as opc
|
||||
JUMP_OPs = opc.JUMP_OPS
|
||||
|
||||
# We base this off of 3.2
|
||||
class ScannerPyPy32(scan.Scanner32):
|
||||
@@ -19,4 +19,5 @@ class ScannerPyPy32(scan.Scanner32):
|
||||
# pypy 3.2 and 3.2
|
||||
scan.Scanner32.__init__(self, show_asm, is_pypy=True)
|
||||
self.version = 3.2
|
||||
self.opc = opc
|
||||
return
|
||||
|
23
uncompyle6/scanners/pypy33.py
Normal file
23
uncompyle6/scanners/pypy33.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Copyright (c) 2019 by Rocky Bernstein
|
||||
"""
|
||||
Python PyPy 3.3 decompiler scanner.
|
||||
|
||||
Does some additional massaging of xdis-disassembled instructions to
|
||||
make things easier for decompilation.
|
||||
"""
|
||||
|
||||
import uncompyle6.scanners.scanner33 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_33pypy as opc
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
|
||||
# We base this off of 3.3
|
||||
class ScannerPyPy33(scan.Scanner33):
|
||||
def __init__(self, show_asm):
|
||||
# There are no differences in initialization between
|
||||
# pypy 3.3 and 3.3
|
||||
scan.Scanner33.__init__(self, show_asm, is_pypy=True)
|
||||
self.version = 3.3
|
||||
self.opc = opc
|
||||
return
|
35
uncompyle6/scanners/scanner10.py
Normal file
35
uncompyle6/scanners/scanner10.py
Normal file
@@ -0,0 +1,35 @@
|
||||
# Copyright (c) 2019 by Rocky Bernstein
|
||||
"""
|
||||
Python 1.0 bytecode decompiler massaging.
|
||||
|
||||
This massages tokenized 1.0 bytecode to make it more amenable for
|
||||
grammar parsing.
|
||||
"""
|
||||
|
||||
import uncompyle6.scanners.scanner11 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_10
|
||||
|
||||
JUMP_OPS = opcode_10.JUMP_OPS
|
||||
|
||||
# We base this off of 1.1 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
# The history is that 2.7 support is the cleanest,
|
||||
# then from that we got 2.6 and so on.
|
||||
class Scanner10(scan.Scanner11):
|
||||
def __init__(self, show_asm=False):
|
||||
scan.Scanner11.__init__(self, show_asm)
|
||||
self.opc = opcode_10
|
||||
self.opname = opcode_10.opname
|
||||
self.version = 1.0
|
||||
return
|
||||
|
||||
# def ingest(self, co, classname=None, code_objects={}, show_asm=None):
|
||||
# tokens, customize = self.parent_ingest(co, classname, code_objects, show_asm)
|
||||
# tokens = [t for t in tokens if t.kind != 'SET_LINENO']
|
||||
|
||||
# # for t in tokens:
|
||||
# # print(t)
|
||||
#
|
||||
# return tokens, customize
|
35
uncompyle6/scanners/scanner11.py
Normal file
35
uncompyle6/scanners/scanner11.py
Normal file
@@ -0,0 +1,35 @@
|
||||
# Copyright (c) 2019 by Rocky Bernstein
|
||||
"""
|
||||
Python 1.1 bytecode decompiler massaging.
|
||||
|
||||
This massages tokenized 1.1 bytecode to make it more amenable for
|
||||
grammar parsing.
|
||||
"""
|
||||
|
||||
import uncompyle6.scanners.scanner13 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_11
|
||||
|
||||
JUMP_OPS = opcode_11.JUMP_OPS
|
||||
|
||||
# We base this off of 1.2 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
# The history is that 2.7 support is the cleanest,
|
||||
# then from that we got 2.6 and so on.
|
||||
class Scanner11(scan.Scanner13): # no scanner 1.2
|
||||
def __init__(self, show_asm=False):
|
||||
scan.Scanner13.__init__(self, show_asm)
|
||||
self.opc = opcode_11
|
||||
self.opname = opcode_11.opname
|
||||
self.version = 1.1
|
||||
return
|
||||
|
||||
# def ingest(self, co, classname=None, code_objects={}, show_asm=None):
|
||||
# tokens, customize = self.parent_ingest(co, classname, code_objects, show_asm)
|
||||
# tokens = [t for t in tokens if t.kind != 'SET_LINENO']
|
||||
|
||||
# # for t in tokens:
|
||||
# # print(t)
|
||||
#
|
||||
# return tokens, customize
|
36
uncompyle6/scanners/scanner12.py
Normal file
36
uncompyle6/scanners/scanner12.py
Normal file
@@ -0,0 +1,36 @@
|
||||
# Copyright (c) 2019 by Rocky Bernstein
|
||||
"""
|
||||
Python 1.2 bytecode decompiler massaging.
|
||||
|
||||
This massages tokenized 1.2 bytecode to make it more amenable for
|
||||
grammar parsing.
|
||||
|
||||
"""
|
||||
|
||||
import uncompyle6.scanners.scanner13 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_11
|
||||
|
||||
JUMP_OPS = opcode_11.JUMP_OPS
|
||||
|
||||
# We base this off of 1.3 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
# The history is that 2.7 support is the cleanest,
|
||||
# then from that we got 2.6 and so on.
|
||||
class Scanner12(scan.Scanner13):
|
||||
def __init__(self, show_asm=False):
|
||||
scan.Scanner14.__init__(self, show_asm)
|
||||
self.opc = opcode_11
|
||||
self.opname = opcode_11.opname
|
||||
self.version = 1.2 # Note: is the same as 1.1 bytecode
|
||||
return
|
||||
|
||||
# def ingest(self, co, classname=None, code_objects={}, show_asm=None):
|
||||
# tokens, customize = self.parent_ingest(co, classname, code_objects, show_asm)
|
||||
# tokens = [t for t in tokens if t.kind != 'SET_LINENO']
|
||||
|
||||
# # for t in tokens:
|
||||
# # print(t)
|
||||
#
|
||||
# return tokens, customize
|
@@ -7,10 +7,12 @@ grammar parsing.
|
||||
"""
|
||||
|
||||
import uncompyle6.scanners.scanner14 as scan
|
||||
|
||||
# from uncompyle6.scanners.scanner26 import ingest as ingest26
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_13
|
||||
|
||||
JUMP_OPS = opcode_13.JUMP_OPS
|
||||
|
||||
# We base this off of 1.4 instead of the other way around
|
||||
|
41
uncompyle6/scanners/scanner16.py
Normal file
41
uncompyle6/scanners/scanner16.py
Normal file
@@ -0,0 +1,41 @@
|
||||
# Copyright (c) 2019 by Rocky Bernstein
|
||||
"""
|
||||
Python 1.6 bytecode decompiler massaging.
|
||||
|
||||
This massages tokenized 1.6 bytecode to make it more amenable for
|
||||
grammar parsing.
|
||||
"""
|
||||
|
||||
import uncompyle6.scanners.scanner21 as scan
|
||||
# from uncompyle6.scanners.scanner26 import ingest as ingest26
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_16
|
||||
JUMP_OPS = opcode_16.JUMP_OPS
|
||||
|
||||
# We base this off of 2.2 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
# The history is that 2.7 support is the cleanest,
|
||||
# then from that we got 2.6 and so on.
|
||||
class Scanner16(scan.Scanner21):
|
||||
def __init__(self, show_asm=False):
|
||||
scan.Scanner21.__init__(self, show_asm)
|
||||
self.opc = opcode_16
|
||||
self.opname = opcode_16.opname
|
||||
self.version = 1.6
|
||||
self.genexpr_name = '<generator expression>'
|
||||
return
|
||||
|
||||
def ingest(self, co, classname=None, code_objects={}, show_asm=None):
|
||||
"""
|
||||
Pick out tokens from an uncompyle6 code object, and transform them,
|
||||
returning a list of uncompyle6 Token's.
|
||||
|
||||
The transformations are made to assist the deparsing grammar.
|
||||
"""
|
||||
tokens, customize = scan.Scanner21.ingest(self, co, classname, code_objects, show_asm)
|
||||
for t in tokens:
|
||||
if t.op == self.opc.UNPACK_LIST:
|
||||
t.kind = 'UNPACK_LIST_%d' % t.attr
|
||||
pass
|
||||
return tokens, customize
|
@@ -257,22 +257,31 @@ class Scanner3(Scanner):
|
||||
# RAISE_VARARGS then we have a "raise" statement
|
||||
# else we have an "assert" statement.
|
||||
if self.version == 3.0:
|
||||
# There is a an implied JUMP_IF_TRUE that we are not testing for (yet?) here
|
||||
# Like 2.6, 3.0 doesn't have POP_JUMP_IF... so we have
|
||||
# to go through more machinations
|
||||
assert_can_follow = inst.opname == "POP_TOP" and i + 1 < n
|
||||
if assert_can_follow:
|
||||
prev_inst = self.insts[i - 1]
|
||||
assert_can_follow = (
|
||||
prev_inst.opname in ("JUMP_IF_TRUE", "JUMP_IF_FALSE")
|
||||
and i + 1 < n )
|
||||
jump_if_inst = prev_inst
|
||||
else:
|
||||
assert_can_follow = inst.opname == "POP_JUMP_IF_TRUE" and i + 1 < n
|
||||
assert_can_follow = (
|
||||
inst.opname in ("POP_JUMP_IF_TRUE", "POP_JUMP_IF_FALSE")
|
||||
and i + 1 < n
|
||||
)
|
||||
jump_if_inst = inst
|
||||
if assert_can_follow:
|
||||
next_inst = self.insts[i + 1]
|
||||
if (
|
||||
next_inst.opname == "LOAD_GLOBAL"
|
||||
and next_inst.argval == "AssertionError"
|
||||
and inst.argval
|
||||
and jump_if_inst.argval
|
||||
):
|
||||
raise_idx = self.offset2inst_index[self.prev_op[inst.argval]]
|
||||
raise_idx = self.offset2inst_index[self.prev_op[jump_if_inst.argval]]
|
||||
raise_inst = self.insts[raise_idx]
|
||||
if raise_inst.opname.startswith(
|
||||
"RAISE_VARARGS"
|
||||
):
|
||||
if raise_inst.opname.startswith("RAISE_VARARGS"):
|
||||
self.load_asserts.add(next_inst.offset)
|
||||
pass
|
||||
pass
|
||||
@@ -428,11 +437,16 @@ class Scanner3(Scanner):
|
||||
else:
|
||||
opname = "%s_%d" % (opname, pos_args)
|
||||
|
||||
elif self.is_pypy and opname == "JUMP_IF_NOT_DEBUG":
|
||||
# The value in the dict is in special cases in semantic actions, such
|
||||
# as JUMP_IF_NOT_DEBUG. The value is not used in these cases, so we put
|
||||
# in arbitrary value 0.
|
||||
customize[opname] = 0
|
||||
elif self.is_pypy and opname in ("JUMP_IF_NOT_DEBUG", "CALL_FUNCTION"):
|
||||
if opname == "JUMP_IF_NOT_DEBUG":
|
||||
# The value in the dict is in special cases in semantic actions, such
|
||||
# as JUMP_IF_NOT_DEBUG. The value is not used in these cases, so we put
|
||||
# in arbitrary value 0.
|
||||
customize[opname] = 0
|
||||
elif self.version >= 3.6 and argval > 255:
|
||||
opname = "CALL_FUNCTION_KW"
|
||||
pass
|
||||
|
||||
elif opname == "UNPACK_EX":
|
||||
# FIXME: try with scanner and parser by
|
||||
# changing argval
|
||||
@@ -468,6 +482,12 @@ class Scanner3(Scanner):
|
||||
and self.insts[i + 1].opname == "JUMP_FORWARD"
|
||||
)
|
||||
|
||||
if (self.version == 3.0 and self.insts[i + 1].opname == "JUMP_FORWARD"
|
||||
and not is_continue):
|
||||
target_prev = self.offset2inst_index[self.prev_op[target]]
|
||||
is_continue = (
|
||||
self.insts[target_prev].opname == "SETUP_LOOP")
|
||||
|
||||
if is_continue or (
|
||||
inst.offset in self.stmts
|
||||
and (
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2015-2018 by Rocky Bernstein
|
||||
# Copyright (c) 2015-2019 by Rocky Bernstein
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -28,7 +28,7 @@ JUMP_OPS = opc.JUMP_OPS
|
||||
from uncompyle6.scanners.scanner3 import Scanner3
|
||||
class Scanner33(Scanner3):
|
||||
|
||||
def __init__(self, show_asm=False):
|
||||
def __init__(self, show_asm=False, is_pypy=False):
|
||||
Scanner3.__init__(self, 3.3, show_asm)
|
||||
return
|
||||
pass
|
||||
|
@@ -418,7 +418,6 @@ TABLE_DIRECT = {
|
||||
'except_suite_finalize': ( '%+%c%-%C', 1, (3, maxint, '') ),
|
||||
|
||||
'pass': ( '%|pass\n', ),
|
||||
'STORE_FAST': ( '%{pattr}', ),
|
||||
'kv': ( '%c: %c', 3, 1 ),
|
||||
'kv2': ( '%c: %c', 1, 2 ),
|
||||
'import': ( '%|import %c\n', 2),
|
||||
|
@@ -16,20 +16,27 @@
|
||||
"""Isolate Python version-specific semantic actions here.
|
||||
"""
|
||||
|
||||
from uncompyle6.semantics.consts import (
|
||||
TABLE_R, TABLE_DIRECT)
|
||||
from uncompyle6.semantics.consts import PRECEDENCE, TABLE_R, TABLE_DIRECT
|
||||
|
||||
from uncompyle6.parsers.treenode import SyntaxTree
|
||||
from uncompyle6.scanners.tok import Token
|
||||
|
||||
|
||||
def customize_for_version(self, is_pypy, version):
|
||||
if is_pypy:
|
||||
########################
|
||||
# PyPy changes
|
||||
#######################
|
||||
TABLE_DIRECT.update({
|
||||
'assert_pypy': ( '%|assert %c\n' , 1 ),
|
||||
'assert2_pypy': ( '%|assert %c, %c\n' , 1, 4 ),
|
||||
'assert_pypy': ( '%|assert %c\n' , (1, 'assert_expr') ),
|
||||
# This is as a result of an if transoration
|
||||
'assert0_pypy': ( '%|assert %c\n' , (0, 'assert_expr') ),
|
||||
|
||||
'assert_not_pypy': ( '%|assert not %c\n' , (1, 'assert_exp') ),
|
||||
'assert2_not_pypy': ( '%|assert not %c, %c\n' , (1, 'assert_exp'),
|
||||
(4, 'expr') ),
|
||||
'assert2_pypy': ( '%|assert %c, %c\n' , (1, 'assert_expr'),
|
||||
(4, 'expr') ),
|
||||
'try_except_pypy': ( '%|try:\n%+%c%-%c\n\n', 1, 2 ),
|
||||
'tryfinallystmt_pypy': ( '%|try:\n%+%c%-%|finally:\n%+%c%-\n\n', 1, 3 ),
|
||||
'assign3_pypy': ( '%|%c, %c, %c = %c, %c, %c\n', 5, 4, 3, 0, 1, 2 ),
|
||||
@@ -40,98 +47,130 @@ def customize_for_version(self, is_pypy, version):
|
||||
# Without PyPy
|
||||
#######################
|
||||
TABLE_DIRECT.update({
|
||||
'assert': ( '%|assert %c\n' , 0 ),
|
||||
'assert2': ( '%|assert %c, %c\n' , 0, 3 ),
|
||||
'try_except': ( '%|try:\n%+%c%-%c\n\n', 1, 3 ),
|
||||
'assign2': ( '%|%c, %c = %c, %c\n',
|
||||
3, 4, 0, 1 ),
|
||||
'assign3': ( '%|%c, %c, %c = %c, %c, %c\n',
|
||||
5, 6, 7, 0, 1, 2 ),
|
||||
})
|
||||
if version >= 3.0:
|
||||
"assert": ("%|assert %c\n", (0, "assert_expr")),
|
||||
"assert2": ("%|assert %c, %c\n", (0, "assert_expr"), 3),
|
||||
|
||||
# Created only via transformation
|
||||
"assertnot": ("%|assert not %p\n", (0, PRECEDENCE['unary_not'])),
|
||||
"assert2not": ( "%|assert not %p, %c\n" ,
|
||||
(0, PRECEDENCE['unary_not']), 3 ),
|
||||
|
||||
"assign2": ("%|%c, %c = %c, %c\n", 3, 4, 0, 1),
|
||||
"assign3": ("%|%c, %c, %c = %c, %c, %c\n", 5, 6, 7, 0, 1, 2),
|
||||
"try_except": ("%|try:\n%+%c%-%c\n\n", 1, 3),
|
||||
})
|
||||
if version >= 3.0:
|
||||
if version >= 3.2:
|
||||
TABLE_DIRECT.update({
|
||||
'del_deref_stmt': ( '%|del %c\n', 0),
|
||||
'DELETE_DEREF': ( '%{pattr}', 0 ),
|
||||
})
|
||||
TABLE_DIRECT.update(
|
||||
{"del_deref_stmt": ("%|del %c\n", 0), "DELETE_DEREF": ("%{pattr}", 0)}
|
||||
)
|
||||
from uncompyle6.semantics.customize3 import customize_for_version3
|
||||
|
||||
customize_for_version3(self, version)
|
||||
else: # < 3.0
|
||||
TABLE_DIRECT.update({
|
||||
'except_cond3' : ( '%|except %c, %c:\n',
|
||||
(1, 'expr'), (-2, 'store') )
|
||||
})
|
||||
TABLE_DIRECT.update(
|
||||
{"except_cond3": ("%|except %c, %c:\n", (1, "expr"), (-2, "store"))}
|
||||
)
|
||||
if 2.4 <= version <= 2.6:
|
||||
TABLE_DIRECT.update({
|
||||
'comp_for': ( ' for %c in %c', 3, 1 ),
|
||||
})
|
||||
TABLE_DIRECT.update({"comp_for": (" for %c in %c", 3, 1)})
|
||||
else:
|
||||
TABLE_DIRECT.update({
|
||||
'comp_for': ( ' for %c in %c%c', 2, 0, 3 ),
|
||||
})
|
||||
TABLE_DIRECT.update({"comp_for": (" for %c in %c%c", 2, 0, 3)})
|
||||
|
||||
if version >= 2.5:
|
||||
from uncompyle6.semantics.customize25 import customize_for_version25
|
||||
|
||||
customize_for_version25(self, version)
|
||||
|
||||
if version >= 2.6:
|
||||
from uncompyle6.semantics.customize26_27 import customize_for_version26_27
|
||||
from uncompyle6.semantics.customize26_27 import (
|
||||
customize_for_version26_27,
|
||||
)
|
||||
|
||||
customize_for_version26_27(self, version)
|
||||
pass
|
||||
else: # < 2.5
|
||||
global NAME_MODULE
|
||||
NAME_MODULE = SyntaxTree('stmt',
|
||||
[ SyntaxTree('assign',
|
||||
[ SyntaxTree('expr',
|
||||
[Token('LOAD_GLOBAL', pattr='__name__',
|
||||
offset=0, has_arg=True)]),
|
||||
SyntaxTree('store',
|
||||
[ Token('STORE_NAME', pattr='__module__',
|
||||
offset=3, has_arg=True)])
|
||||
])])
|
||||
TABLE_DIRECT.update({
|
||||
'importmultiple': ( '%|import %c%c\n', 2, 3),
|
||||
'import_cont' : ( ', %c', 2),
|
||||
'tryfinallystmt': ( '%|try:\n%+%c%-%|finally:\n%+%c%-',
|
||||
(1, 'suite_stmts_opt') ,
|
||||
(5, 'suite_stmts_opt') )
|
||||
})
|
||||
NAME_MODULE = SyntaxTree(
|
||||
"stmt",
|
||||
[
|
||||
SyntaxTree(
|
||||
"assign",
|
||||
[
|
||||
SyntaxTree(
|
||||
"expr",
|
||||
[
|
||||
Token(
|
||||
"LOAD_GLOBAL",
|
||||
pattr="__name__",
|
||||
offset=0,
|
||||
has_arg=True,
|
||||
)
|
||||
],
|
||||
),
|
||||
SyntaxTree(
|
||||
"store",
|
||||
[
|
||||
Token(
|
||||
"STORE_NAME",
|
||||
pattr="__module__",
|
||||
offset=3,
|
||||
has_arg=True,
|
||||
)
|
||||
],
|
||||
),
|
||||
],
|
||||
)
|
||||
],
|
||||
)
|
||||
TABLE_DIRECT.update(
|
||||
{
|
||||
"importmultiple": ("%|import %c%c\n", 2, 3),
|
||||
"import_cont": (", %c", 2),
|
||||
"tryfinallystmt": (
|
||||
"%|try:\n%+%c%-%|finally:\n%+%c%-",
|
||||
(1, "suite_stmts_opt"),
|
||||
(5, "suite_stmts_opt"),
|
||||
),
|
||||
}
|
||||
)
|
||||
if version == 2.4:
|
||||
|
||||
def n_iftrue_stmt24(node):
|
||||
self.template_engine(('%c', 0), node)
|
||||
self.template_engine(("%c", 0), node)
|
||||
self.default(node)
|
||||
self.prune()
|
||||
|
||||
self.n_iftrue_stmt24 = n_iftrue_stmt24
|
||||
else: # version <= 2.3:
|
||||
TABLE_DIRECT.update({
|
||||
'if1_stmt': ( '%|if 1\n%+%c%-', 5 )
|
||||
})
|
||||
TABLE_DIRECT.update({"if1_stmt": ("%|if 1\n%+%c%-", 5)})
|
||||
if version <= 2.1:
|
||||
TABLE_DIRECT.update({
|
||||
'importmultiple': ( '%c', 2 ),
|
||||
# FIXME: not quite right. We have indiividual imports
|
||||
# when there is in fact one: "import a, b, ..."
|
||||
'imports_cont': ( '%C%,', (1, 100, '\n') ),
|
||||
})
|
||||
TABLE_DIRECT.update(
|
||||
{
|
||||
"importmultiple": ("%c", 2),
|
||||
# FIXME: not quite right. We have indiividual imports
|
||||
# when there is in fact one: "import a, b, ..."
|
||||
"imports_cont": ("%C%,", (1, 100, "\n")),
|
||||
}
|
||||
)
|
||||
pass
|
||||
pass
|
||||
pass # < 2.5
|
||||
pass # < 2.5
|
||||
|
||||
# < 3.0 continues
|
||||
|
||||
TABLE_R.update({
|
||||
'STORE_SLICE+0': ( '%c[:]', 0 ),
|
||||
'STORE_SLICE+1': ( '%c[%p:]', 0, (1, -1) ),
|
||||
'STORE_SLICE+2': ( '%c[:%p]', 0, (1, -1) ),
|
||||
'STORE_SLICE+3': ( '%c[%p:%p]', 0, (1, -1), (2, -1) ),
|
||||
'DELETE_SLICE+0': ( '%|del %c[:]\n', 0 ),
|
||||
'DELETE_SLICE+1': ( '%|del %c[%c:]\n', 0, 1 ),
|
||||
'DELETE_SLICE+2': ( '%|del %c[:%c]\n', 0, 1 ),
|
||||
'DELETE_SLICE+3': ( '%|del %c[%c:%c]\n', 0, 1, 2 ),
|
||||
})
|
||||
TABLE_DIRECT.update({
|
||||
'raise_stmt2': ( '%|raise %c, %c\n', 0, 1),
|
||||
})
|
||||
TABLE_R.update(
|
||||
{
|
||||
"STORE_SLICE+0": ("%c[:]", 0),
|
||||
"STORE_SLICE+1": ("%c[%p:]", 0, (1, -1)),
|
||||
"STORE_SLICE+2": ("%c[:%p]", 0, (1, -1)),
|
||||
"STORE_SLICE+3": ("%c[%p:%p]", 0, (1, -1), (2, -1)),
|
||||
"DELETE_SLICE+0": ("%|del %c[:]\n", 0),
|
||||
"DELETE_SLICE+1": ("%|del %c[%c:]\n", 0, 1),
|
||||
"DELETE_SLICE+2": ("%|del %c[:%c]\n", 0, 1),
|
||||
"DELETE_SLICE+3": ("%|del %c[%c:%c]\n", 0, 1, 2),
|
||||
}
|
||||
)
|
||||
TABLE_DIRECT.update({"raise_stmt2": ("%|raise %c, %c\n", 0, 1)})
|
||||
|
||||
# exec as a built-in statement is only in Python 2.x
|
||||
def n_exec_stmt(node):
|
||||
@@ -139,17 +178,19 @@ def customize_for_version(self, is_pypy, version):
|
||||
exec_stmt ::= expr exprlist DUP_TOP EXEC_STMT
|
||||
exec_stmt ::= expr exprlist EXEC_STMT
|
||||
"""
|
||||
self.write(self.indent, 'exec ')
|
||||
self.write(self.indent, "exec ")
|
||||
self.preorder(node[0])
|
||||
if not node[1][0].isNone():
|
||||
sep = ' in '
|
||||
sep = " in "
|
||||
for subnode in node[1]:
|
||||
self.write(sep); sep = ", "
|
||||
self.write(sep)
|
||||
sep = ", "
|
||||
self.preorder(subnode)
|
||||
self.println()
|
||||
self.prune() # stop recursing
|
||||
self.prune() # stop recursing
|
||||
|
||||
self.n_exec_smt = n_exec_stmt
|
||||
|
||||
pass # < 3.0
|
||||
pass # < 3.0
|
||||
|
||||
return
|
||||
|
@@ -40,6 +40,7 @@ def customize_for_version26_27(self, version):
|
||||
'testtrue_then': ( 'not %p', (0, 22) ),
|
||||
})
|
||||
|
||||
# FIXME: this should be a transformation
|
||||
def n_call(node):
|
||||
mapping = self._get_mapping(node)
|
||||
key = node
|
||||
|
@@ -37,7 +37,6 @@ def customize_for_version3(self, version):
|
||||
(0, "expr"),
|
||||
(4, "expr"),
|
||||
),
|
||||
"except_cond2": ("%|except %c as %c:\n", 1, 5),
|
||||
"function_def_annotate": ("\n\n%|def %c%c\n", -1, 0),
|
||||
# When a generator is a single parameter of a function,
|
||||
# it doesn't need the surrounding parenethesis.
|
||||
@@ -182,6 +181,19 @@ def customize_for_version3(self, version):
|
||||
# the iteration variable. These rules we can ignore
|
||||
# since we pick up the iteration variable some other way and
|
||||
# we definitely don't include in the source _[dd].
|
||||
TABLE_DIRECT.update({
|
||||
"ifstmt30": ( "%|if %c:\n%+%c%-",
|
||||
(0, "testfalse_then"),
|
||||
(1, "_ifstmts_jump30") ),
|
||||
"ifnotstmt30": ( "%|if not %c:\n%+%c%-",
|
||||
(0, "testtrue_then"),
|
||||
(1, "_ifstmts_jump30") ),
|
||||
"try_except30": ( "%|try:\n%+%c%-%c\n\n",
|
||||
(1, "suite_stmts_opt"),
|
||||
(4, "except_handler") ),
|
||||
|
||||
})
|
||||
|
||||
def n_comp_iter(node):
|
||||
if node[0] == "expr":
|
||||
n = node[0][0]
|
||||
@@ -198,11 +210,14 @@ def customize_for_version3(self, version):
|
||||
# FIXME: perhaps this can be folded into the 3.4+ case?
|
||||
def n_yield_from(node):
|
||||
assert node[0] == "expr"
|
||||
assert node[0][0] == "get_iter"
|
||||
# Skip over yield_from.expr.get_iter which adds an
|
||||
# extra iter(). Maybe we can do in tranformation phase instead?
|
||||
template = ("yield from %c", (0, "expr"))
|
||||
self.template_engine(template, node[0][0])
|
||||
if node[0][0] == "get_iter":
|
||||
# Skip over yield_from.expr.get_iter which adds an
|
||||
# extra iter(). Maybe we can do in tranformation phase instead?
|
||||
template = ("yield from %c", (0, "expr"))
|
||||
self.template_engine(template, node[0][0])
|
||||
else:
|
||||
template = ("yield from %c", (0, "attribute"))
|
||||
self.template_engine(template, node[0][0][0])
|
||||
self.prune()
|
||||
|
||||
self.n_yield_from = n_yield_from
|
||||
|
@@ -56,7 +56,65 @@ def customize_for_version35(self, version):
|
||||
node.kind == 'async_call'
|
||||
self.prune()
|
||||
self.n_async_call = async_call
|
||||
self.n_build_list_unpack = self.n_list
|
||||
|
||||
def n_build_list_unpack(node):
|
||||
"""
|
||||
prettyprint a list or tuple
|
||||
"""
|
||||
p = self.prec
|
||||
self.prec = 100
|
||||
lastnode = node.pop()
|
||||
lastnodetype = lastnode.kind
|
||||
|
||||
# If this build list is inside a CALL_FUNCTION_VAR,
|
||||
# then the first * has already been printed.
|
||||
# Until I have a better way to check for CALL_FUNCTION_VAR,
|
||||
# will assume that if the text ends in *.
|
||||
last_was_star = self.f.getvalue().endswith("*")
|
||||
|
||||
if lastnodetype.startswith("BUILD_LIST"):
|
||||
self.write("[")
|
||||
endchar = "]"
|
||||
|
||||
flat_elems = flatten_list(node)
|
||||
|
||||
self.indent_more(INDENT_PER_LEVEL)
|
||||
sep = ""
|
||||
for elem in flat_elems:
|
||||
if elem in ("ROT_THREE", "EXTENDED_ARG"):
|
||||
continue
|
||||
assert elem == "expr"
|
||||
line_number = self.line_number
|
||||
use_star = True
|
||||
value = self.traverse(elem)
|
||||
if value.startswith("("):
|
||||
assert value.endswith(")")
|
||||
use_star = False
|
||||
value = value[1:-1].rstrip(" ") # Remove starting '(' and trailing ')' and additional spaces
|
||||
if value == "":
|
||||
pass
|
||||
else:
|
||||
if value.endswith(","): # if args has only one item
|
||||
value = value[:-1]
|
||||
if line_number != self.line_number:
|
||||
sep += "\n" + self.indent + INDENT_PER_LEVEL[:-1]
|
||||
else:
|
||||
if sep != "":
|
||||
sep += " "
|
||||
if not last_was_star and use_star:
|
||||
sep += "*"
|
||||
pass
|
||||
else:
|
||||
last_was_star = False
|
||||
self.write(sep, value)
|
||||
sep = ","
|
||||
self.write(endchar)
|
||||
self.indent_less(INDENT_PER_LEVEL)
|
||||
|
||||
self.prec = p
|
||||
self.prune()
|
||||
return
|
||||
self.n_build_list_unpack = n_build_list_unpack
|
||||
|
||||
def n_call(node):
|
||||
mapping = self._get_mapping(node)
|
||||
|
@@ -495,7 +495,10 @@ def customize_for_version36(self, version):
|
||||
# bytecode, the escaping of the braces has been
|
||||
# removed. So we need to put back the braces escaping in
|
||||
# reconstructing the source.
|
||||
assert expr[0] == 'LOAD_STR'
|
||||
assert (
|
||||
expr[0] == "LOAD_STR" or
|
||||
expr[0] == "LOAD_CONST" and isinstance(expr[0].attr, unicode)
|
||||
)
|
||||
value = value.replace("{", "{{").replace("}", "}}")
|
||||
|
||||
# Remove leading quotes
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2015-2018 by Rocky Bernstein
|
||||
# Copyright (c) 2015-2019 by Rocky Bernstein
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
@@ -23,8 +23,11 @@ from uncompyle6 import PYTHON3
|
||||
from uncompyle6.semantics.parser_error import ParserError
|
||||
from uncompyle6.parser import ParserError as ParserError2
|
||||
from uncompyle6.semantics.helper import (
|
||||
print_docstring, find_all_globals, find_globals_and_nonlocals, find_none
|
||||
)
|
||||
print_docstring,
|
||||
find_all_globals,
|
||||
find_globals_and_nonlocals,
|
||||
find_none,
|
||||
)
|
||||
|
||||
if PYTHON3:
|
||||
from itertools import zip_longest
|
||||
@@ -35,8 +38,9 @@ from uncompyle6.show import maybe_show_tree_param_default
|
||||
|
||||
# FIXME: DRY the below code...
|
||||
|
||||
def make_function3_annotate(self, node, is_lambda, nested=1,
|
||||
code_node=None, annotate_last=-1):
|
||||
def make_function3_annotate(
|
||||
self, node, is_lambda, nested=1, code_node=None, annotate_last=-1
|
||||
):
|
||||
"""
|
||||
Dump function defintion, doc string, and function
|
||||
body. This code is specialized for Python 3"""
|
||||
@@ -47,33 +51,35 @@ def make_function3_annotate(self, node, is_lambda, nested=1,
|
||||
- handle format tuple parameters
|
||||
"""
|
||||
if default:
|
||||
value = self.traverse(default, indent='')
|
||||
value = self.traverse(default, indent="")
|
||||
maybe_show_tree_param_default(self, name, value)
|
||||
result = '%s=%s' % (name, value)
|
||||
if result[-2:] == '= ': # default was 'LOAD_CONST None'
|
||||
result += 'None'
|
||||
result = "%s=%s" % (name, value)
|
||||
if result[-2:] == "= ": # default was 'LOAD_CONST None'
|
||||
result += "None"
|
||||
return result
|
||||
else:
|
||||
return name
|
||||
|
||||
# MAKE_FUNCTION_... or MAKE_CLOSURE_...
|
||||
assert node[-1].kind.startswith('MAKE_')
|
||||
assert node[-1].kind.startswith("MAKE_")
|
||||
|
||||
annotate_tuple = None
|
||||
for annotate_last in range(len(node)-1, -1, -1):
|
||||
if node[annotate_last] == 'annotate_tuple':
|
||||
for annotate_last in range(len(node) - 1, -1, -1):
|
||||
if node[annotate_last] == "annotate_tuple":
|
||||
annotate_tuple = node[annotate_last]
|
||||
break
|
||||
annotate_args = {}
|
||||
|
||||
if (annotate_tuple == 'annotate_tuple'
|
||||
and annotate_tuple[0] in ('LOAD_CONST', 'LOAD_NAME')
|
||||
and isinstance(annotate_tuple[0].attr, tuple)):
|
||||
if (
|
||||
annotate_tuple == "annotate_tuple"
|
||||
and annotate_tuple[0] in ("LOAD_CONST", "LOAD_NAME")
|
||||
and isinstance(annotate_tuple[0].attr, tuple)
|
||||
):
|
||||
annotate_tup = annotate_tuple[0].attr
|
||||
i = -1
|
||||
j = annotate_last-1
|
||||
j = annotate_last - 1
|
||||
l = -len(node)
|
||||
while j >= l and node[j].kind in ('annotate_arg', 'annotate_tuple'):
|
||||
while j >= l and node[j].kind in ("annotate_arg", "annotate_tuple"):
|
||||
annotate_args[annotate_tup[i]] = node[j][0]
|
||||
i -= 1
|
||||
j -= 1
|
||||
@@ -81,20 +87,20 @@ def make_function3_annotate(self, node, is_lambda, nested=1,
|
||||
args_node = node[-1]
|
||||
if isinstance(args_node.attr, tuple):
|
||||
# positional args are before kwargs
|
||||
defparams = node[:args_node.attr[0]]
|
||||
pos_args, kw_args, annotate_argc = args_node.attr
|
||||
if 'return' in annotate_args.keys():
|
||||
defparams = node[: args_node.attr[0]]
|
||||
pos_args, kw_args, annotate_argc = args_node.attr
|
||||
if "return" in annotate_args.keys():
|
||||
annotate_argc = len(annotate_args) - 1
|
||||
else:
|
||||
defparams = node[:args_node.attr]
|
||||
kw_args = 0
|
||||
defparams = node[: args_node.attr]
|
||||
kw_args = 0
|
||||
annotate_argc = 0
|
||||
pass
|
||||
|
||||
annotate_dict = {}
|
||||
|
||||
for name in annotate_args.keys():
|
||||
n = self.traverse(annotate_args[name], indent='')
|
||||
n = self.traverse(annotate_args[name], indent="")
|
||||
annotate_dict[name] = n
|
||||
|
||||
if 3.0 <= self.version <= 3.2:
|
||||
@@ -105,7 +111,7 @@ def make_function3_annotate(self, node, is_lambda, nested=1,
|
||||
lambda_index = None
|
||||
|
||||
if lambda_index and is_lambda and iscode(node[lambda_index].attr):
|
||||
assert node[lambda_index].kind == 'LOAD_LAMBDA'
|
||||
assert node[lambda_index].kind == "LOAD_LAMBDA"
|
||||
code = node[lambda_index].attr
|
||||
else:
|
||||
code = code_node.attr
|
||||
@@ -119,13 +125,15 @@ def make_function3_annotate(self, node, is_lambda, nested=1,
|
||||
|
||||
paramnames = list(code.co_varnames[:argc])
|
||||
if kwonlyargcount > 0:
|
||||
kwargs = list(code.co_varnames[argc:argc+kwonlyargcount])
|
||||
kwargs = list(code.co_varnames[argc : argc + kwonlyargcount])
|
||||
|
||||
try:
|
||||
ast = self.build_ast(code._tokens,
|
||||
code._customize,
|
||||
is_lambda = is_lambda,
|
||||
noneInNames = ('None' in code.co_names))
|
||||
ast = self.build_ast(
|
||||
code._tokens,
|
||||
code._customize,
|
||||
is_lambda=is_lambda,
|
||||
noneInNames=("None" in code.co_names),
|
||||
)
|
||||
except (ParserError, ParserError2) as p:
|
||||
self.write(str(p))
|
||||
if not self.tolerate_errors:
|
||||
@@ -142,18 +150,18 @@ def make_function3_annotate(self, node, is_lambda, nested=1,
|
||||
|
||||
last_line = self.f.getvalue().split("\n")[-1]
|
||||
l = len(last_line)
|
||||
indent = ' ' * l
|
||||
indent = " " * l
|
||||
line_number = self.line_number
|
||||
|
||||
i = len(paramnames) - len(defparams)
|
||||
suffix = ''
|
||||
suffix = ""
|
||||
|
||||
for param in paramnames[:i]:
|
||||
self.write(suffix, param)
|
||||
suffix = ', '
|
||||
suffix = ", "
|
||||
if param in annotate_dict:
|
||||
self.write(': %s' % annotate_dict[param])
|
||||
if (line_number != self.line_number):
|
||||
self.write(": %s" % annotate_dict[param])
|
||||
if line_number != self.line_number:
|
||||
suffix = ",\n" + indent
|
||||
line_number = self.line_number
|
||||
# value, string = annotate_args[param]
|
||||
@@ -162,10 +170,9 @@ def make_function3_annotate(self, node, is_lambda, nested=1,
|
||||
# else:
|
||||
# self.write(': %s' % value)
|
||||
|
||||
|
||||
suffix = ', ' if i > 0 else ''
|
||||
suffix = ", " if i > 0 else ""
|
||||
for n in node:
|
||||
if n == 'pos_arg':
|
||||
if n == "pos_arg":
|
||||
self.write(suffix)
|
||||
param = paramnames[i]
|
||||
self.write(param)
|
||||
@@ -175,25 +182,24 @@ def make_function3_annotate(self, node, is_lambda, nested=1,
|
||||
aa = aa[0]
|
||||
self.write(': "%s"' % aa)
|
||||
elif isinstance(aa, SyntaxTree):
|
||||
self.write(': ')
|
||||
self.write(": ")
|
||||
self.preorder(aa)
|
||||
|
||||
self.write('=')
|
||||
self.write("=")
|
||||
i += 1
|
||||
self.preorder(n)
|
||||
if (line_number != self.line_number):
|
||||
if line_number != self.line_number:
|
||||
suffix = ",\n" + indent
|
||||
line_number = self.line_number
|
||||
else:
|
||||
suffix = ', '
|
||||
|
||||
suffix = ", "
|
||||
|
||||
if code_has_star_arg(code):
|
||||
star_arg = code.co_varnames[argc + kwonlyargcount]
|
||||
if annotate_dict and star_arg in annotate_dict:
|
||||
self.write(suffix, '*%s: %s' % (star_arg, annotate_dict[star_arg]))
|
||||
self.write(suffix, "*%s: %s" % (star_arg, annotate_dict[star_arg]))
|
||||
else:
|
||||
self.write(suffix, '*%s' % star_arg)
|
||||
self.write(suffix, "*%s" % star_arg)
|
||||
argc += 1
|
||||
|
||||
# self.println(indent, '#flags:\t', int(code.co_flags))
|
||||
@@ -214,16 +220,16 @@ def make_function3_annotate(self, node, is_lambda, nested=1,
|
||||
kw_args = [None] * kwonlyargcount
|
||||
|
||||
for n in node:
|
||||
if n == 'kwargs':
|
||||
if n == "kwargs":
|
||||
n = n[0]
|
||||
if n == 'kwarg':
|
||||
if n == "kwarg":
|
||||
name = eval(n[0].pattr)
|
||||
idx = kwargs.index(name)
|
||||
default = self.traverse(n[1], indent='')
|
||||
default = self.traverse(n[1], indent="")
|
||||
if annotate_dict and name in annotate_dict:
|
||||
kw_args[idx] = '%s: %s=%s' % (name, annotate_dict[name], default)
|
||||
kw_args[idx] = "%s: %s=%s" % (name, annotate_dict[name], default)
|
||||
else:
|
||||
kw_args[idx] = '%s=%s' % (name, default)
|
||||
kw_args[idx] = "%s=%s" % (name, default)
|
||||
pass
|
||||
pass
|
||||
|
||||
@@ -233,11 +239,11 @@ def make_function3_annotate(self, node, is_lambda, nested=1,
|
||||
if flag:
|
||||
n = kwargs[i]
|
||||
if n in annotate_dict:
|
||||
kw_args[i] = "%s: %s" %(n, annotate_dict[n])
|
||||
kw_args[i] = "%s: %s" % (n, annotate_dict[n])
|
||||
else:
|
||||
kw_args[i] = "%s" % n
|
||||
|
||||
self.write(', '.join(kw_args))
|
||||
self.write(", ".join(kw_args))
|
||||
ends_in_comma = False
|
||||
|
||||
else:
|
||||
@@ -246,52 +252,56 @@ def make_function3_annotate(self, node, is_lambda, nested=1,
|
||||
|
||||
if code_has_star_star_arg(code):
|
||||
if not ends_in_comma:
|
||||
self.write(', ')
|
||||
self.write(", ")
|
||||
star_star_arg = code.co_varnames[argc + kwonlyargcount]
|
||||
if annotate_dict and star_star_arg in annotate_dict:
|
||||
self.write('**%s: %s' % (star_star_arg, annotate_dict[star_star_arg]))
|
||||
self.write("**%s: %s" % (star_star_arg, annotate_dict[star_star_arg]))
|
||||
else:
|
||||
self.write('**%s' % star_star_arg)
|
||||
self.write("**%s" % star_star_arg)
|
||||
|
||||
if is_lambda:
|
||||
self.write(": ")
|
||||
else:
|
||||
self.write(')')
|
||||
if 'return' in annotate_tuple[0].attr:
|
||||
self.write(")")
|
||||
if "return" in annotate_tuple[0].attr:
|
||||
if (line_number != self.line_number) and not no_paramnames:
|
||||
self.write("\n" + indent)
|
||||
line_number = self.line_number
|
||||
self.write(' -> ')
|
||||
self.write(" -> ")
|
||||
# value, string = annotate_args['return']
|
||||
# if string:
|
||||
# self.write(' -> "%s"' % value)
|
||||
# else:
|
||||
# self.write(' -> %s' % value)
|
||||
self.preorder(node[annotate_last-1])
|
||||
self.preorder(node[annotate_last - 1])
|
||||
|
||||
self.println(":")
|
||||
|
||||
if (len(code.co_consts) > 0 and
|
||||
code.co_consts[0] is not None and not is_lambda): # ugly
|
||||
if (
|
||||
len(code.co_consts) > 0 and code.co_consts[0] is not None and not is_lambda
|
||||
): # ugly
|
||||
# docstring exists, dump it
|
||||
print_docstring(self, self.indent, code.co_consts[0])
|
||||
|
||||
code._tokens = None # save memory
|
||||
assert ast == 'stmts'
|
||||
code._tokens = None # save memory
|
||||
assert ast == "stmts"
|
||||
|
||||
all_globals = find_all_globals(ast, set())
|
||||
globals, nonlocals = find_globals_and_nonlocals(ast, set(), set(),
|
||||
code, self.version)
|
||||
globals, nonlocals = find_globals_and_nonlocals(
|
||||
ast, set(), set(), code, self.version
|
||||
)
|
||||
for g in sorted((all_globals & self.mod_globs) | globals):
|
||||
self.println(self.indent, 'global ', g)
|
||||
self.println(self.indent, "global ", g)
|
||||
for nl in sorted(nonlocals):
|
||||
self.println(self.indent, 'nonlocal ', nl)
|
||||
self.println(self.indent, "nonlocal ", nl)
|
||||
self.mod_globs -= all_globals
|
||||
has_none = 'None' in code.co_names
|
||||
has_none = "None" in code.co_names
|
||||
rn = has_none and not find_none(ast)
|
||||
self.gen_source(ast, code.co_name, code._customize, is_lambda=is_lambda,
|
||||
returnNone=rn)
|
||||
code._tokens = code._customize = None # save memory
|
||||
self.gen_source(
|
||||
ast, code.co_name, code._customize, is_lambda=is_lambda, returnNone=rn
|
||||
)
|
||||
code._tokens = code._customize = None # save memory
|
||||
|
||||
|
||||
def make_function2(self, node, is_lambda, nested=1, code_node=None):
|
||||
"""
|
||||
@@ -309,38 +319,38 @@ def make_function2(self, node, is_lambda, nested=1, code_node=None):
|
||||
"""
|
||||
# if formal parameter is a tuple, the paramater name
|
||||
# starts with a dot (eg. '.1', '.2')
|
||||
if name.startswith('.'):
|
||||
if name.startswith("."):
|
||||
# replace the name with the tuple-string
|
||||
name = self.get_tuple_parameter(ast, name)
|
||||
pass
|
||||
|
||||
if default:
|
||||
value = self.traverse(default, indent='')
|
||||
value = self.traverse(default, indent="")
|
||||
maybe_show_tree_param_default(self.showast, name, value)
|
||||
result = '%s=%s' % (name, value)
|
||||
if result[-2:] == '= ': # default was 'LOAD_CONST None'
|
||||
result += 'None'
|
||||
result = "%s=%s" % (name, value)
|
||||
if result[-2:] == "= ": # default was 'LOAD_CONST None'
|
||||
result += "None"
|
||||
return result
|
||||
else:
|
||||
return name
|
||||
|
||||
# MAKE_FUNCTION_... or MAKE_CLOSURE_...
|
||||
assert node[-1].kind.startswith('MAKE_')
|
||||
assert node[-1].kind.startswith("MAKE_")
|
||||
|
||||
args_node = node[-1]
|
||||
if isinstance(args_node.attr, tuple):
|
||||
# positional args are after kwargs
|
||||
defparams = node[1:args_node.attr[0]+1]
|
||||
pos_args, kw_args, annotate_argc = args_node.attr
|
||||
defparams = node[1 : args_node.attr[0] + 1]
|
||||
pos_args, kw_args, annotate_argc = args_node.attr
|
||||
else:
|
||||
defparams = node[:args_node.attr]
|
||||
kw_args = 0
|
||||
defparams = node[: args_node.attr]
|
||||
kw_args = 0
|
||||
pass
|
||||
|
||||
lambda_index = None
|
||||
|
||||
if lambda_index and is_lambda and iscode(node[lambda_index].attr):
|
||||
assert node[lambda_index].kind == 'LOAD_LAMBDA'
|
||||
assert node[lambda_index].kind == "LOAD_LAMBDA"
|
||||
code = node[lambda_index].attr
|
||||
else:
|
||||
code = code_node.attr
|
||||
@@ -353,13 +363,16 @@ def make_function2(self, node, is_lambda, nested=1, code_node=None):
|
||||
paramnames = list(code.co_varnames[:argc])
|
||||
|
||||
# defaults are for last n parameters, thus reverse
|
||||
paramnames.reverse(); defparams.reverse()
|
||||
paramnames.reverse()
|
||||
defparams.reverse()
|
||||
|
||||
try:
|
||||
ast = self.build_ast(code._tokens,
|
||||
code._customize,
|
||||
is_lambda = is_lambda,
|
||||
noneInNames = ('None' in code.co_names))
|
||||
ast = self.build_ast(
|
||||
code._tokens,
|
||||
code._customize,
|
||||
is_lambda=is_lambda,
|
||||
noneInNames=("None" in code.co_names),
|
||||
)
|
||||
except (ParserError, ParserError2) as p:
|
||||
self.write(str(p))
|
||||
if not self.tolerate_errors:
|
||||
@@ -370,12 +383,14 @@ def make_function2(self, node, is_lambda, nested=1, code_node=None):
|
||||
indent = self.indent
|
||||
|
||||
# build parameters
|
||||
params = [build_param(ast, name, default) for
|
||||
name, default in zip_longest(paramnames, defparams, fillvalue=None)]
|
||||
params.reverse() # back to correct order
|
||||
params = [
|
||||
build_param(ast, name, default)
|
||||
for name, default in zip_longest(paramnames, defparams, fillvalue=None)
|
||||
]
|
||||
params.reverse() # back to correct order
|
||||
|
||||
if code_has_star_arg(code):
|
||||
params.append('*%s' % code.co_varnames[argc])
|
||||
params.append("*%s" % code.co_varnames[argc])
|
||||
argc += 1
|
||||
|
||||
# dump parameter list (with default values)
|
||||
@@ -387,13 +402,15 @@ def make_function2(self, node, is_lambda, nested=1, code_node=None):
|
||||
# drop the (return) None since that was just put there
|
||||
# to have something to after the yield finishes.
|
||||
# FIXME: this is a bit hoaky and not general
|
||||
if (len(ast) > 1 and
|
||||
self.traverse(ast[-1]) == 'None' and
|
||||
self.traverse(ast[-2]).strip().startswith('yield')):
|
||||
if (
|
||||
len(ast) > 1
|
||||
and self.traverse(ast[-1]) == "None"
|
||||
and self.traverse(ast[-2]).strip().startswith("yield")
|
||||
):
|
||||
del ast[-1]
|
||||
# Now pick out the expr part of the last statement
|
||||
ast_expr = ast[-1]
|
||||
while ast_expr.kind != 'expr':
|
||||
while ast_expr.kind != "expr":
|
||||
ast_expr = ast_expr[0]
|
||||
ast[-1] = ast_expr
|
||||
pass
|
||||
@@ -411,7 +428,7 @@ def make_function2(self, node, is_lambda, nested=1, code_node=None):
|
||||
self.write(", ")
|
||||
|
||||
for n in node:
|
||||
if n == 'pos_arg':
|
||||
if n == "pos_arg":
|
||||
continue
|
||||
else:
|
||||
self.preorder(n)
|
||||
@@ -420,38 +437,43 @@ def make_function2(self, node, is_lambda, nested=1, code_node=None):
|
||||
|
||||
if code_has_star_star_arg(code):
|
||||
if argc > 0:
|
||||
self.write(', ')
|
||||
self.write('**%s' % code.co_varnames[argc + kw_pairs])
|
||||
self.write(", ")
|
||||
self.write("**%s" % code.co_varnames[argc + kw_pairs])
|
||||
|
||||
if is_lambda:
|
||||
self.write(": ")
|
||||
else:
|
||||
self.println("):")
|
||||
|
||||
if len(code.co_consts) > 0 and code.co_consts[0] is not None and not is_lambda: # ugly
|
||||
if (
|
||||
len(code.co_consts) > 0 and code.co_consts[0] is not None and not is_lambda
|
||||
): # ugly
|
||||
# docstring exists, dump it
|
||||
print_docstring(self, indent, code.co_consts[0])
|
||||
|
||||
code._tokens = None # save memory
|
||||
code._tokens = None # save memory
|
||||
if not is_lambda:
|
||||
assert ast == 'stmts'
|
||||
assert ast == "stmts"
|
||||
|
||||
all_globals = find_all_globals(ast, set())
|
||||
|
||||
globals, nonlocals = find_globals_and_nonlocals(ast, set(), set(),
|
||||
code, self.version)
|
||||
globals, nonlocals = find_globals_and_nonlocals(
|
||||
ast, set(), set(), code, self.version
|
||||
)
|
||||
|
||||
# Python 2 doesn't support the "nonlocal" statement
|
||||
assert self.version >= 3.0 or not nonlocals
|
||||
|
||||
for g in sorted((all_globals & self.mod_globs) | globals):
|
||||
self.println(self.indent, 'global ', g)
|
||||
self.println(self.indent, "global ", g)
|
||||
self.mod_globs -= all_globals
|
||||
has_none = 'None' in code.co_names
|
||||
has_none = "None" in code.co_names
|
||||
rn = has_none and not find_none(ast)
|
||||
self.gen_source(ast, code.co_name, code._customize, is_lambda=is_lambda,
|
||||
returnNone=rn)
|
||||
code._tokens = None; code._customize = None # save memory
|
||||
self.gen_source(
|
||||
ast, code.co_name, code._customize, is_lambda=is_lambda, returnNone=rn
|
||||
)
|
||||
code._tokens = None
|
||||
code._customize = None # save memory
|
||||
|
||||
|
||||
def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
@@ -495,23 +517,23 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
if self.version >= 3.6:
|
||||
value = default
|
||||
else:
|
||||
value = self.traverse(default, indent='')
|
||||
value = self.traverse(default, indent="")
|
||||
maybe_show_tree_param_default(self.showast, name, value)
|
||||
if annotation:
|
||||
result = '%s: %s=%s' % (name, annotation, value)
|
||||
result = "%s: %s=%s" % (name, annotation, value)
|
||||
else:
|
||||
result = '%s=%s' % (name, value)
|
||||
result = "%s=%s" % (name, value)
|
||||
|
||||
# The below can probably be removed. This is probably
|
||||
# a holdover from days when LOAD_CONST erroneously
|
||||
# didn't handle LOAD_CONST None properly
|
||||
if result[-2:] == '= ': # default was 'LOAD_CONST None'
|
||||
result += 'None'
|
||||
if result[-2:] == "= ": # default was 'LOAD_CONST None'
|
||||
result += "None"
|
||||
|
||||
return result
|
||||
|
||||
# MAKE_FUNCTION_... or MAKE_CLOSURE_...
|
||||
assert node[-1].kind.startswith('MAKE_')
|
||||
assert node[-1].kind.startswith("MAKE_")
|
||||
|
||||
# Python 3.3+ adds a qualified name at TOS (-1)
|
||||
# moving down the LOAD_LAMBDA instruction
|
||||
@@ -531,11 +553,13 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
# not to be confused with keyword parameters which may appear after *.
|
||||
args_attr = args_node.attr
|
||||
|
||||
if isinstance(args_attr, tuple) or (self.version >= 3.6 and isinstance(args_attr, list)):
|
||||
if isinstance(args_attr, tuple) or (
|
||||
self.version >= 3.6 and isinstance(args_attr, list)
|
||||
):
|
||||
if len(args_attr) == 3:
|
||||
pos_args, kw_args, annotate_argc = args_attr
|
||||
pos_args, kw_args, annotate_argc = args_attr
|
||||
else:
|
||||
pos_args, kw_args, annotate_argc, closure = args_attr
|
||||
pos_args, kw_args, annotate_argc, closure = args_attr
|
||||
|
||||
i = -4
|
||||
kw_pairs = 0
|
||||
@@ -545,86 +569,103 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
if annotate_argc:
|
||||
# Turn into subroutine and DRY with other use
|
||||
annotate_node = node[i]
|
||||
if annotate_node == 'expr':
|
||||
if annotate_node == "expr":
|
||||
annotate_node = annotate_node[0]
|
||||
annotate_name_node = annotate_node[-1]
|
||||
if annotate_node == 'dict' and annotate_name_node.kind.startswith('BUILD_CONST_KEY_MAP'):
|
||||
types = [self.traverse(n, indent='') for n in annotate_node[:-2]]
|
||||
if annotate_node == "dict" and annotate_name_node.kind.startswith(
|
||||
"BUILD_CONST_KEY_MAP"
|
||||
):
|
||||
types = [
|
||||
self.traverse(n, indent="") for n in annotate_node[:-2]
|
||||
]
|
||||
names = annotate_node[-2].attr
|
||||
l = len(types)
|
||||
assert l == len(names)
|
||||
for i in range(l): annotate_dict[names[i]] = types[i]
|
||||
for i in range(l):
|
||||
annotate_dict[names[i]] = types[i]
|
||||
pass
|
||||
pass
|
||||
i -= 1
|
||||
if kw_args:
|
||||
kw_node = node[i]
|
||||
if kw_node == 'expr':
|
||||
if kw_node == "expr":
|
||||
kw_node = kw_node[0]
|
||||
if kw_node == 'dict':
|
||||
if kw_node == "dict":
|
||||
kw_pairs = kw_node[-1].attr
|
||||
|
||||
|
||||
# FIXME: there is probably a better way to classify this.
|
||||
have_kwargs = node[0].kind.startswith('kwarg') or node[0] == 'no_kwargs'
|
||||
have_kwargs = node[0].kind.startswith("kwarg") or node[0] == "no_kwargs"
|
||||
if len(node) >= 4:
|
||||
lc_index = -4
|
||||
else:
|
||||
lc_index = -3
|
||||
pass
|
||||
|
||||
if (3.0 <= self.version <= 3.3 and len(node) > 2 and
|
||||
node[lambda_index] != 'LOAD_LAMBDA' and
|
||||
(have_kwargs or node[lc_index].kind != 'load_closure')):
|
||||
if (
|
||||
3.0 <= self.version <= 3.3
|
||||
and len(node) > 2
|
||||
and node[lambda_index] != "LOAD_LAMBDA"
|
||||
and (have_kwargs or node[lc_index].kind != "load_closure")
|
||||
):
|
||||
|
||||
# Find the index in "node" where the first default
|
||||
# parameter value is located. Note this is in contrast to
|
||||
# key-word arguments, pairs of (name, value), which appear after "*".
|
||||
# "default_values_start" is this location.
|
||||
default_values_start = 0
|
||||
if node[0] == 'no_kwargs':
|
||||
if node[0] == "no_kwargs":
|
||||
default_values_start += 1
|
||||
# args are after kwargs; kwargs are bundled as one node
|
||||
if node[default_values_start] == 'kwargs':
|
||||
if node[default_values_start] == "kwargs":
|
||||
default_values_start += 1
|
||||
defparams = node[default_values_start:default_values_start+args_node.attr[0]]
|
||||
defparams = node[
|
||||
default_values_start : default_values_start + args_node.attr[0]
|
||||
]
|
||||
else:
|
||||
if self.version < 3.6:
|
||||
defparams = node[:args_node.attr[0]]
|
||||
kw_args = 0
|
||||
defparams = node[: args_node.attr[0]]
|
||||
kw_args = 0
|
||||
else:
|
||||
defparams = []
|
||||
# FIXME: DRY with code below
|
||||
default, kw_args, annotate_argc = args_node.attr[0:3]
|
||||
if default:
|
||||
expr_node = node[0]
|
||||
if node[0] == 'pos_arg':
|
||||
if node[0] == "pos_arg":
|
||||
expr_node = expr_node[0]
|
||||
assert expr_node == 'expr', "expecting mkfunc default node to be an expr"
|
||||
if (expr_node[0] == 'LOAD_CONST' and
|
||||
isinstance(expr_node[0].attr, tuple)):
|
||||
assert (
|
||||
expr_node == "expr"
|
||||
), "expecting mkfunc default node to be an expr"
|
||||
if expr_node[0] == "LOAD_CONST" and isinstance(
|
||||
expr_node[0].attr, tuple
|
||||
):
|
||||
defparams = [repr(a) for a in expr_node[0].attr]
|
||||
elif expr_node[0] in frozenset(('list', 'tuple', 'dict', 'set')):
|
||||
defparams = [self.traverse(n, indent='') for n in expr_node[0][:-1]]
|
||||
elif expr_node[0] in frozenset(("list", "tuple", "dict", "set")):
|
||||
defparams = [
|
||||
self.traverse(n, indent="") for n in expr_node[0][:-1]
|
||||
]
|
||||
else:
|
||||
defparams = []
|
||||
pass
|
||||
else:
|
||||
if self.version < 3.6:
|
||||
defparams = node[:args_node.attr]
|
||||
kw_args = 0
|
||||
defparams = node[: args_node.attr]
|
||||
kw_args = 0
|
||||
else:
|
||||
default, kw_args, annotate, closure = args_node.attr
|
||||
if default:
|
||||
expr_node = node[0]
|
||||
if node[0] == 'pos_arg':
|
||||
if node[0] == "pos_arg":
|
||||
expr_node = expr_node[0]
|
||||
assert expr_node == 'expr', "expecting mkfunc default node to be an expr"
|
||||
if (expr_node[0] == 'LOAD_CONST' and
|
||||
isinstance(expr_node[0].attr, tuple)):
|
||||
assert (
|
||||
expr_node == "expr"
|
||||
), "expecting mkfunc default node to be an expr"
|
||||
if expr_node[0] == "LOAD_CONST" and isinstance(
|
||||
expr_node[0].attr, tuple
|
||||
):
|
||||
defparams = [repr(a) for a in expr_node[0].attr]
|
||||
elif expr_node[0] in frozenset(('list', 'tuple', 'dict', 'set')):
|
||||
defparams = [self.traverse(n, indent='') for n in expr_node[0][:-1]]
|
||||
elif expr_node[0] in frozenset(("list", "tuple", "dict", "set")):
|
||||
defparams = [self.traverse(n, indent="") for n in expr_node[0][:-1]]
|
||||
else:
|
||||
defparams = []
|
||||
|
||||
@@ -637,28 +678,33 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
if annotate_argc:
|
||||
# Turn into subroutine and DRY with other use
|
||||
annotate_node = node[i]
|
||||
if annotate_node == 'expr':
|
||||
if annotate_node == "expr":
|
||||
annotate_node = annotate_node[0]
|
||||
annotate_name_node = annotate_node[-1]
|
||||
if annotate_node == 'dict' and annotate_name_node.kind.startswith('BUILD_CONST_KEY_MAP'):
|
||||
types = [self.traverse(n, indent='') for n in annotate_node[:-2]]
|
||||
if annotate_node == "dict" and annotate_name_node.kind.startswith(
|
||||
"BUILD_CONST_KEY_MAP"
|
||||
):
|
||||
types = [
|
||||
self.traverse(n, indent="") for n in annotate_node[:-2]
|
||||
]
|
||||
names = annotate_node[-2].attr
|
||||
l = len(types)
|
||||
assert l == len(names)
|
||||
for i in range(l): annotate_dict[names[i]] = types[i]
|
||||
for i in range(l):
|
||||
annotate_dict[names[i]] = types[i]
|
||||
pass
|
||||
pass
|
||||
i -= 1
|
||||
if kw_args:
|
||||
kw_node = node[i]
|
||||
if kw_node == 'expr':
|
||||
if kw_node == "expr":
|
||||
kw_node = kw_node[0]
|
||||
if kw_node == 'dict':
|
||||
if kw_node == "dict":
|
||||
kw_pairs = kw_node[-1].attr
|
||||
pass
|
||||
|
||||
if lambda_index and is_lambda and iscode(node[lambda_index].attr):
|
||||
assert node[lambda_index].kind == 'LOAD_LAMBDA'
|
||||
assert node[lambda_index].kind == "LOAD_LAMBDA"
|
||||
code = node[lambda_index].attr
|
||||
else:
|
||||
code = code_node.attr
|
||||
@@ -672,17 +718,19 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
|
||||
paramnames = list(scanner_code.co_varnames[:argc])
|
||||
if kwonlyargcount > 0:
|
||||
kwargs = list(scanner_code.co_varnames[argc:argc+kwonlyargcount])
|
||||
kwargs = list(scanner_code.co_varnames[argc : argc + kwonlyargcount])
|
||||
|
||||
# defaults are for last n parameters, thus reverse
|
||||
paramnames.reverse();
|
||||
paramnames.reverse()
|
||||
defparams.reverse()
|
||||
|
||||
try:
|
||||
ast = self.build_ast(scanner_code._tokens,
|
||||
scanner_code._customize,
|
||||
is_lambda = is_lambda,
|
||||
noneInNames = ('None' in code.co_names))
|
||||
ast = self.build_ast(
|
||||
scanner_code._tokens,
|
||||
scanner_code._customize,
|
||||
is_lambda=is_lambda,
|
||||
noneInNames=("None" in code.co_names),
|
||||
)
|
||||
except (ParserError, ParserError2) as p:
|
||||
self.write(str(p))
|
||||
if not self.tolerate_errors:
|
||||
@@ -701,10 +749,13 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
params = []
|
||||
if defparams:
|
||||
for i, defparam in enumerate(defparams):
|
||||
params.append(build_param(ast, paramnames[i], defparam,
|
||||
annotate_dict.get(paramnames[i])))
|
||||
params.append(
|
||||
build_param(
|
||||
ast, paramnames[i], defparam, annotate_dict.get(paramnames[i])
|
||||
)
|
||||
)
|
||||
|
||||
for param in paramnames[i+1:]:
|
||||
for param in paramnames[i + 1 :]:
|
||||
if param in annotate_dict:
|
||||
params.append("%s: %s" % (param, annotate_dict[param]))
|
||||
else:
|
||||
@@ -716,17 +767,17 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
else:
|
||||
params.append(param)
|
||||
|
||||
params.reverse() # back to correct order
|
||||
params.reverse() # back to correct order
|
||||
|
||||
if code_has_star_arg(code):
|
||||
if self.version > 3.0:
|
||||
star_arg = code.co_varnames[argc + kwonlyargcount]
|
||||
if annotate_dict and star_arg in annotate_dict:
|
||||
params.append('*%s: %s' % (star_arg, annotate_dict[star_arg]))
|
||||
params.append("*%s: %s" % (star_arg, annotate_dict[star_arg]))
|
||||
else:
|
||||
params.append('*%s' % star_arg)
|
||||
params.append("*%s" % star_arg)
|
||||
else:
|
||||
params.append('*%s' % code.co_varnames[argc])
|
||||
params.append("*%s" % code.co_varnames[argc])
|
||||
argc += 1
|
||||
|
||||
# dump parameter list (with default values)
|
||||
@@ -738,13 +789,15 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
# drop the (return) None since that was just put there
|
||||
# to have something to after the yield finishes.
|
||||
# FIXME: this is a bit hoaky and not general
|
||||
if (len(ast) > 1 and
|
||||
self.traverse(ast[-1]) == 'None' and
|
||||
self.traverse(ast[-2]).strip().startswith('yield')):
|
||||
if (
|
||||
len(ast) > 1
|
||||
and self.traverse(ast[-1]) == "None"
|
||||
and self.traverse(ast[-2]).strip().startswith("yield")
|
||||
):
|
||||
del ast[-1]
|
||||
# Now pick out the expr part of the last statement
|
||||
ast_expr = ast[-1]
|
||||
while ast_expr.kind != 'expr':
|
||||
while ast_expr.kind != "expr":
|
||||
ast_expr = ast_expr[0]
|
||||
ast[-1] = ast_expr
|
||||
pass
|
||||
@@ -776,7 +829,7 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
if kw_nodes == "kwargs":
|
||||
for n in kw_nodes:
|
||||
name = eval(n[0].pattr)
|
||||
default = self.traverse(n[1], indent='')
|
||||
default = self.traverse(n[1], indent="")
|
||||
idx = kwargs.index(name)
|
||||
kw_args[idx] = "%s=%s" % (name, default)
|
||||
|
||||
@@ -785,7 +838,7 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
for i, flag in enumerate(other_kw):
|
||||
if flag:
|
||||
kw_args[i] = "%s" % kwargs[i]
|
||||
self.write(', '.join(kw_args))
|
||||
self.write(", ".join(kw_args))
|
||||
ends_in_comma = False
|
||||
elif self.version >= 3.6:
|
||||
# argc = node[-1].attr
|
||||
@@ -796,9 +849,9 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
free_tup = ann_dict = kw_dict = default_tup = None
|
||||
fn_bits = node[-1].attr
|
||||
index = -4 # Skip over:
|
||||
# MAKE_FUNCTION,
|
||||
# LOAD_CONST qualified name,
|
||||
# LOAD_CONST code object
|
||||
# MAKE_FUNCTION,
|
||||
# LOAD_CONST qualified name,
|
||||
# LOAD_CONST code object
|
||||
if fn_bits[-1]:
|
||||
free_tup = node[index]
|
||||
index -= 1
|
||||
@@ -811,18 +864,18 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
if fn_bits[-4]:
|
||||
default_tup = node[index]
|
||||
|
||||
if kw_dict == 'expr':
|
||||
if kw_dict == "expr":
|
||||
kw_dict = kw_dict[0]
|
||||
|
||||
# FIXME: handle free_tup, annotate_dict, and default_tup
|
||||
kw_args = [None] * kwonlyargcount
|
||||
|
||||
if kw_dict:
|
||||
assert kw_dict == 'dict'
|
||||
defaults = [self.traverse(n, indent='') for n in kw_dict[:-2]]
|
||||
assert kw_dict == "dict"
|
||||
defaults = [self.traverse(n, indent="") for n in kw_dict[:-2]]
|
||||
names = eval(self.traverse(kw_dict[-2]))
|
||||
assert len(defaults) == len(names)
|
||||
sep = ''
|
||||
sep = ""
|
||||
# FIXME: possibly handle line breaks
|
||||
for i, n in enumerate(names):
|
||||
idx = kwargs.index(n)
|
||||
@@ -841,11 +894,11 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
if flag:
|
||||
n = kwargs[i]
|
||||
if ann_dict and n in annotate_dict:
|
||||
kw_args[i] = "%s: %s" %(n, annotate_dict[n])
|
||||
kw_args[i] = "%s: %s" % (n, annotate_dict[n])
|
||||
else:
|
||||
kw_args[i] = "%s" % n
|
||||
|
||||
self.write(', '.join(kw_args))
|
||||
self.write(", ".join(kw_args))
|
||||
ends_in_comma = False
|
||||
|
||||
pass
|
||||
@@ -855,41 +908,46 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
|
||||
if code_has_star_star_arg(code):
|
||||
if not ends_in_comma:
|
||||
self.write(', ')
|
||||
self.write(", ")
|
||||
star_star_arg = code.co_varnames[argc + kwonlyargcount]
|
||||
if annotate_dict and star_star_arg in annotate_dict:
|
||||
self.write('**%s: %s' % (star_star_arg, annotate_dict[star_star_arg]))
|
||||
self.write("**%s: %s" % (star_star_arg, annotate_dict[star_star_arg]))
|
||||
else:
|
||||
self.write('**%s' % star_star_arg)
|
||||
self.write("**%s" % star_star_arg)
|
||||
|
||||
if is_lambda:
|
||||
self.write(": ")
|
||||
else:
|
||||
self.write(')')
|
||||
if annotate_dict and 'return' in annotate_dict:
|
||||
self.write(' -> %s' % annotate_dict['return'])
|
||||
self.write(")")
|
||||
if annotate_dict and "return" in annotate_dict:
|
||||
self.write(" -> %s" % annotate_dict["return"])
|
||||
self.println(":")
|
||||
|
||||
if len(code.co_consts) > 0 and code.co_consts[0] is not None and not is_lambda: # ugly
|
||||
if (
|
||||
len(code.co_consts) > 0 and code.co_consts[0] is not None and not is_lambda
|
||||
): # ugly
|
||||
# docstring exists, dump it
|
||||
print_docstring(self, self.indent, code.co_consts[0])
|
||||
|
||||
scanner_code._tokens = None # save memory
|
||||
assert ast == 'stmts'
|
||||
scanner_code._tokens = None # save memory
|
||||
assert ast == "stmts"
|
||||
|
||||
all_globals = find_all_globals(ast, set())
|
||||
globals, nonlocals = find_globals_and_nonlocals(ast, set(),
|
||||
set(), code, self.version)
|
||||
globals, nonlocals = find_globals_and_nonlocals(
|
||||
ast, set(), set(), code, self.version
|
||||
)
|
||||
|
||||
for g in sorted((all_globals & self.mod_globs) | globals):
|
||||
self.println(self.indent, 'global ', g)
|
||||
self.println(self.indent, "global ", g)
|
||||
|
||||
for nl in sorted(nonlocals):
|
||||
self.println(self.indent, 'nonlocal ', nl)
|
||||
self.println(self.indent, "nonlocal ", nl)
|
||||
|
||||
self.mod_globs -= all_globals
|
||||
has_none = 'None' in code.co_names
|
||||
has_none = "None" in code.co_names
|
||||
rn = has_none and not find_none(ast)
|
||||
self.gen_source(ast, code.co_name, scanner_code._customize, is_lambda=is_lambda,
|
||||
returnNone=rn)
|
||||
scanner_code._tokens = None; scanner_code._customize = None # save memory
|
||||
self.gen_source(
|
||||
ast, code.co_name, scanner_code._customize, is_lambda=is_lambda, returnNone=rn
|
||||
)
|
||||
scanner_code._tokens = None
|
||||
scanner_code._customize = None # save memory
|
||||
|
File diff suppressed because it is too large
Load Diff
266
uncompyle6/semantics/transform.py
Normal file
266
uncompyle6/semantics/transform.py
Normal file
@@ -0,0 +1,266 @@
|
||||
# Copyright (c) 2019 by Rocky Bernstein
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from uncompyle6.show import maybe_show_tree
|
||||
from copy import copy
|
||||
from spark_parser import GenericASTTraversal, GenericASTTraversalPruningException
|
||||
|
||||
from uncompyle6.parsers.treenode import SyntaxTree
|
||||
from uncompyle6.scanners.tok import Token
|
||||
from uncompyle6.semantics.consts import RETURN_NONE
|
||||
|
||||
|
||||
def is_docstring(node):
|
||||
try:
|
||||
return node[0][0].kind == "assign" and node[0][0][1][0].pattr == "__doc__"
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
class TreeTransform(GenericASTTraversal, object):
|
||||
def __init__(self, version, show_ast=None,
|
||||
is_pypy=False):
|
||||
self.version = version
|
||||
self.showast = show_ast
|
||||
self.is_pypy = is_pypy
|
||||
return
|
||||
|
||||
def maybe_show_tree(self, ast):
|
||||
if isinstance(self.showast, dict) and self.showast:
|
||||
maybe_show_tree(self, ast)
|
||||
|
||||
def preorder(self, node=None):
|
||||
"""Walk the tree in roughly 'preorder' (a bit of a lie explained below).
|
||||
For each node with typestring name *name* if the
|
||||
node has a method called n_*name*, call that before walking
|
||||
children.
|
||||
|
||||
In typical use a node with children can call "preorder" in any
|
||||
order it wants which may skip children or order then in ways
|
||||
other than first to last. In fact, this this happens. So in
|
||||
this sense this function not strictly preorder.
|
||||
"""
|
||||
if node is None:
|
||||
node = self.ast
|
||||
|
||||
try:
|
||||
name = "n_" + self.typestring(node)
|
||||
if hasattr(self, name):
|
||||
func = getattr(self, name)
|
||||
node = func(node)
|
||||
except GenericASTTraversalPruningException:
|
||||
return
|
||||
|
||||
for i, kid in enumerate(node):
|
||||
node[i] = self.preorder(kid)
|
||||
return node
|
||||
|
||||
def n_ifstmt(self, node):
|
||||
"""Here we check if we can turn an `ifstmt` or 'iflaststmtl` into
|
||||
some kind of `assert` statement"""
|
||||
|
||||
testexpr = node[0]
|
||||
|
||||
if testexpr.kind != "testexpr":
|
||||
return node
|
||||
if node.kind == "ifstmt":
|
||||
ifstmts_jump = node[1]
|
||||
if node[1] != "_ifstmts_jump":
|
||||
return node
|
||||
stmts = ifstmts_jump[0]
|
||||
else:
|
||||
# iflaststmtl works this way
|
||||
stmts = node[1]
|
||||
|
||||
if stmts in ("c_stmts",) and len(stmts) == 1:
|
||||
stmt = stmts[0]
|
||||
raise_stmt = stmt[0]
|
||||
if raise_stmt == "raise_stmt1" and len(testexpr[0]) == 2:
|
||||
assert_expr = testexpr[0][0]
|
||||
assert_expr.kind = "assert_expr"
|
||||
jump_cond = testexpr[0][1]
|
||||
expr = raise_stmt[0]
|
||||
RAISE_VARARGS_1 = raise_stmt[1]
|
||||
if expr[0] == "call":
|
||||
# ifstmt
|
||||
# 0. testexpr
|
||||
# testtrue (2)
|
||||
# 0. expr
|
||||
# 1. _ifstmts_jump (2)
|
||||
# 0. c_stmts
|
||||
# stmt
|
||||
# raise_stmt1 (2)
|
||||
# 0. expr
|
||||
# call (3)
|
||||
# 1. RAISE_VARARGS_1
|
||||
# becomes:
|
||||
# assert2 ::= assert_expr jmp_true LOAD_ASSERT expr RAISE_VARARGS_1 COME_FROM
|
||||
if jump_cond == "jmp_true":
|
||||
kind = "assert2"
|
||||
else:
|
||||
assert jump_cond == "jmp_false"
|
||||
kind = "assert2not"
|
||||
|
||||
call = expr[0]
|
||||
LOAD_ASSERT = call[0]
|
||||
if isinstance(call[1], SyntaxTree):
|
||||
expr = call[1][0]
|
||||
node = SyntaxTree(
|
||||
kind,
|
||||
[assert_expr, jump_cond, LOAD_ASSERT, expr, RAISE_VARARGS_1]
|
||||
)
|
||||
pass
|
||||
pass
|
||||
else:
|
||||
# ifstmt
|
||||
# 0. testexpr (2)
|
||||
# testtrue
|
||||
# 0. expr
|
||||
# 1. _ifstmts_jump (2)
|
||||
# 0. c_stmts
|
||||
# stmts
|
||||
# raise_stmt1 (2)
|
||||
# 0. expr
|
||||
# LOAD_ASSERT
|
||||
# 1. RAISE_VARARGS_1
|
||||
# becomes:
|
||||
# assert ::= assert_expr jmp_true LOAD_ASSERT RAISE_VARARGS_1 COME_FROM
|
||||
if jump_cond == "jmp_true":
|
||||
if self.is_pypy:
|
||||
kind = "assert0_pypy"
|
||||
else:
|
||||
kind = "assert"
|
||||
else:
|
||||
assert jump_cond == "jmp_false"
|
||||
kind = "assertnot"
|
||||
|
||||
LOAD_ASSERT = expr[0]
|
||||
node = SyntaxTree(
|
||||
kind,
|
||||
[assert_expr, jump_cond, LOAD_ASSERT, RAISE_VARARGS_1]
|
||||
)
|
||||
node.transformed_by="n_ifstmt",
|
||||
pass
|
||||
pass
|
||||
return node
|
||||
|
||||
n_iflaststmtl = n_ifstmt
|
||||
|
||||
# preprocess is used for handling chains of
|
||||
# if elif elif
|
||||
def n_ifelsestmt(self, node, preprocess=False):
|
||||
"""
|
||||
Here we turn:
|
||||
|
||||
if ...
|
||||
else
|
||||
if ..
|
||||
|
||||
into:
|
||||
|
||||
if ..
|
||||
elif ...
|
||||
|
||||
[else ...]
|
||||
|
||||
where appropriate
|
||||
"""
|
||||
else_suite = node[3]
|
||||
|
||||
n = else_suite[0]
|
||||
old_stmts = None
|
||||
|
||||
if len(n) == 1 == len(n[0]) and n[0] == "stmt":
|
||||
n = n[0][0]
|
||||
elif n[0].kind in ("lastc_stmt", "lastl_stmt"):
|
||||
n = n[0]
|
||||
if n[0].kind in (
|
||||
"ifstmt",
|
||||
"iflaststmt",
|
||||
"iflaststmtl",
|
||||
"ifelsestmtl",
|
||||
"ifelsestmtc",
|
||||
):
|
||||
# This seems needed for Python 2.5-2.7
|
||||
n = n[0]
|
||||
pass
|
||||
pass
|
||||
elif len(n) > 1 and 1 == len(n[0]) and n[0] == "stmt" and n[1].kind == "stmt":
|
||||
else_suite_stmts = n[0]
|
||||
if else_suite_stmts[0].kind not in ("ifstmt", "iflaststmt", "ifelsestmtl"):
|
||||
return node
|
||||
old_stmts = n
|
||||
n = else_suite_stmts[0]
|
||||
else:
|
||||
return node
|
||||
|
||||
if n.kind in ("ifstmt", "iflaststmt", "iflaststmtl"):
|
||||
node.kind = "ifelifstmt"
|
||||
n.kind = "elifstmt"
|
||||
elif n.kind in ("ifelsestmtr",):
|
||||
node.kind = "ifelifstmt"
|
||||
n.kind = "elifelsestmtr"
|
||||
elif n.kind in ("ifelsestmt", "ifelsestmtc", "ifelsestmtl"):
|
||||
node.kind = "ifelifstmt"
|
||||
self.n_ifelsestmt(n, preprocess=True)
|
||||
if n == "ifelifstmt":
|
||||
n.kind = "elifelifstmt"
|
||||
elif n.kind in ("ifelsestmt", "ifelsestmtc", "ifelsestmtl"):
|
||||
n.kind = "elifelsestmt"
|
||||
if not preprocess:
|
||||
if old_stmts:
|
||||
if n.kind == "elifstmt":
|
||||
trailing_else = SyntaxTree("stmts", old_stmts[1:])
|
||||
# We use elifelsestmtr because it has 3 nodes
|
||||
elifelse_stmt = SyntaxTree(
|
||||
"elifelsestmtr", [n[0], n[1], trailing_else]
|
||||
)
|
||||
node[3] = elifelse_stmt
|
||||
pass
|
||||
else:
|
||||
# Other cases for n.kind may happen here
|
||||
pass
|
||||
pass
|
||||
node.transformed_by = "n_ifelsestmt"
|
||||
return node
|
||||
|
||||
n_ifelsestmtc = n_ifelsestmtl = n_ifelsestmt
|
||||
|
||||
def n_list_for(self, list_for_node):
|
||||
expr = list_for_node[0]
|
||||
if (expr == "expr" and expr[0] == "get_iter"):
|
||||
# Remove extraneous get_iter() inside the "for" of a comprehension
|
||||
assert expr[0][0] == "expr"
|
||||
list_for_node[0] = expr[0][0]
|
||||
list_for_node.transformed_by="n_list_for",
|
||||
return list_for_node
|
||||
|
||||
def traverse(self, node, is_lambda=False):
|
||||
node = self.preorder(node)
|
||||
return node
|
||||
|
||||
def transform(self, ast):
|
||||
self.maybe_show_tree(ast)
|
||||
self.ast = copy(ast)
|
||||
self.ast = self.traverse(self.ast, is_lambda=False)
|
||||
|
||||
if self.ast[-1] == RETURN_NONE:
|
||||
self.ast.pop() # remove last node
|
||||
# todo: if empty, add 'pass'
|
||||
|
||||
return self.ast
|
||||
|
||||
# Write template_engine
|
||||
# def template_engine
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user